[caffe-contrib] 01/362: Imported Upstream version 0.0~rc2+96179ca

Zhou Mo cdluminate-guest at moszumanska.debian.org
Tue May 3 09:24:04 UTC 2016


This is an automated email from the git hooks/post-receive script.

cdluminate-guest pushed a commit to branch master
in repository caffe-contrib.

commit e76a44fe6cc557f609e04a870e996cd8d767b18e
Author: Zhou Mo <cdluminate at gmail.com>
Date:   Thu Jul 9 00:56:02 2015 +0000

    Imported Upstream version 0.0~rc2+96179ca
---
 .Doxyfile                                          |  2335 +++
 .gitignore                                         |    93 +
 .travis.yml                                        |    47 +
 CMakeLists.txt                                     |    69 +
 CONTRIBUTORS.md                                    |    19 +
 INSTALL.md                                         |     7 +
 LICENSE                                            |    44 +
 Makefile                                           |   631 +
 Makefile.config.example                            |    93 +
 README.md                                          |    34 +
 caffe.cloc                                         |    53 +
 cmake/ConfigGen.cmake                              |   104 +
 cmake/Cuda.cmake                                   |   254 +
 cmake/Dependencies.cmake                           |   157 +
 cmake/External/gflags.cmake                        |    56 +
 cmake/External/glog.cmake                          |    56 +
 cmake/Misc.cmake                                   |    52 +
 cmake/Modules/FindAtlas.cmake                      |    52 +
 cmake/Modules/FindGFlags.cmake                     |    50 +
 cmake/Modules/FindGlog.cmake                       |    48 +
 cmake/Modules/FindLAPACK.cmake                     |   190 +
 cmake/Modules/FindLMDB.cmake                       |    28 +
 cmake/Modules/FindLevelDB.cmake                    |    44 +
 cmake/Modules/FindMKL.cmake                        |   110 +
 cmake/Modules/FindMatlabMex.cmake                  |    48 +
 cmake/Modules/FindNumPy.cmake                      |    58 +
 cmake/Modules/FindOpenBLAS.cmake                   |    62 +
 cmake/Modules/FindSnappy.cmake                     |    28 +
 cmake/Modules/FindvecLib.cmake                     |    34 +
 cmake/ProtoBuf.cmake                               |    90 +
 cmake/Summary.cmake                                |   168 +
 cmake/Targets.cmake                                |   173 +
 cmake/Templates/CaffeConfig.cmake.in               |    58 +
 cmake/Templates/CaffeConfigVersion.cmake.in        |    11 +
 cmake/Templates/caffe_config.h.in                  |    32 +
 cmake/Utils.cmake                                  |   381 +
 cmake/lint.cmake                                   |    50 +
 data/cifar10/get_cifar10.sh                        |    19 +
 data/ilsvrc12/get_ilsvrc_aux.sh                    |    21 +
 data/mnist/get_mnist.sh                            |    24 +
 docs/CMakeLists.txt                                |   106 +
 docs/CNAME                                         |     1 +
 docs/README.md                                     |     5 +
 docs/_config.yml                                   |     7 +
 docs/_layouts/default.html                         |    62 +
 docs/development.md                                |   120 +
 docs/images/GitHub-Mark-64px.png                   |   Bin 0 -> 2625 bytes
 docs/images/caffeine-icon.png                      |   Bin 0 -> 954 bytes
 docs/index.md                                      |   106 +
 docs/install_apt.md                                |    50 +
 docs/install_osx.md                                |   128 +
 docs/install_yum.md                                |    45 +
 docs/installation.md                               |   127 +
 docs/model_zoo.md                                  |    70 +
 docs/performance_hardware.md                       |    73 +
 docs/stylesheets/pygment_trac.css                  |    69 +
 docs/stylesheets/reset.css                         |    21 +
 docs/stylesheets/styles.css                        |   348 +
 docs/tutorial/convolution.md                       |    13 +
 docs/tutorial/data.md                              |    78 +
 docs/tutorial/fig/.gitignore                       |     0
 docs/tutorial/fig/backward.jpg                     |   Bin 0 -> 105017 bytes
 docs/tutorial/fig/forward.jpg                      |   Bin 0 -> 71957 bytes
 docs/tutorial/fig/forward_backward.png             |   Bin 0 -> 57267 bytes
 docs/tutorial/fig/layer.jpg                        |   Bin 0 -> 54757 bytes
 docs/tutorial/fig/logreg.jpg                       |   Bin 0 -> 42966 bytes
 docs/tutorial/forward_backward.md                  |    37 +
 docs/tutorial/index.md                             |    51 +
 docs/tutorial/interfaces.md                        |   279 +
 docs/tutorial/layers.md                            |   525 +
 docs/tutorial/loss.md                              |    51 +
 docs/tutorial/net_layer_blob.md                    |   168 +
 docs/tutorial/solver.md                            |   271 +
 examples/00-classification.ipynb                   | 13187 +++++++++++++
 examples/01-learning-lenet.ipynb                   |  5196 +++++
 examples/02-brewing-logreg.ipynb                   |  5771 ++++++
 examples/03-fine-tuning.ipynb                      |   947 +
 examples/CMakeLists.txt                            |    31 +
 examples/cifar10/cifar10_full.prototxt             |   154 +
 examples/cifar10/cifar10_full_solver.prototxt      |    26 +
 examples/cifar10/cifar10_full_solver_lr1.prototxt  |    26 +
 examples/cifar10/cifar10_full_solver_lr2.prototxt  |    26 +
 examples/cifar10/cifar10_full_train_test.prototxt  |   220 +
 examples/cifar10/cifar10_quick.prototxt            |   147 +
 examples/cifar10/cifar10_quick_solver.prototxt     |    25 +
 examples/cifar10/cifar10_quick_solver_lr1.prototxt |    25 +
 examples/cifar10/cifar10_quick_train_test.prototxt |   222 +
 examples/cifar10/convert_cifar_data.cpp            |   109 +
 examples/cifar10/create_cifar10.sh                 |    19 +
 examples/cifar10/readme.md                         |    98 +
 examples/cifar10/train_full.sh                     |    16 +
 examples/cifar10/train_quick.sh                    |    11 +
 examples/cpp_classification/classification.cpp     |   255 +
 examples/cpp_classification/readme.md              |    77 +
 examples/detection.ipynb                           |  8392 ++++++++
 examples/feature_extraction/imagenet_val.prototxt  |   238 +
 examples/feature_extraction/readme.md              |    74 +
 examples/finetune_flickr_style/assemble_data.py    |    98 +
 examples/finetune_flickr_style/flickr_style.csv.gz |   Bin 0 -> 2178982 bytes
 examples/finetune_flickr_style/readme.md           |   165 +
 examples/finetune_flickr_style/style_names.txt     |    20 +
 .../pascal_finetune_solver.prototxt                |    13 +
 .../pascal_finetune_trainval_test.prototxt         |   391 +
 .../nonlinear_auto_test.prototxt                   |    54 +
 .../nonlinear_auto_train.prototxt                  |    54 +
 .../hdf5_classification/nonlinear_solver.prototxt  |    15 +
 .../nonlinear_train_val.prototxt                   |    98 +
 examples/hdf5_classification/solver.prototxt       |    15 +
 examples/hdf5_classification/train_val.prototxt    |    68 +
 examples/imagenet/create_imagenet.sh               |    57 +
 examples/imagenet/make_imagenet_mean.sh            |    12 +
 examples/imagenet/readme.md                        |   105 +
 examples/imagenet/resume_training.sh               |     5 +
 examples/imagenet/train_caffenet.sh                |     4 +
 examples/images/cat.jpg                            |   Bin 0 -> 140391 bytes
 examples/images/cat_gray.jpg                       |   Bin 0 -> 92726 bytes
 examples/images/fish-bike.jpg                      |   Bin 0 -> 47638 bytes
 examples/mnist/convert_mnist_data.cpp              |   198 +
 examples/mnist/create_mnist.sh                     |    21 +
 examples/mnist/lenet.prototxt                      |   128 +
 examples/mnist/lenet_auto_solver.prototxt          |    24 +
 examples/mnist/lenet_consolidated_solver.prototxt  |   263 +
 examples/mnist/lenet_multistep_solver.prototxt     |    29 +
 examples/mnist/lenet_solver.prototxt               |    25 +
 examples/mnist/lenet_stepearly_solver.prototxt     |    28 +
 examples/mnist/lenet_train_test.prototxt           |   168 +
 examples/mnist/mnist_autoencoder.prototxt          |   323 +
 examples/mnist/mnist_autoencoder_solver.prototxt   |    19 +
 .../mnist_autoencoder_solver_adagrad.prototxt      |    17 +
 .../mnist_autoencoder_solver_nesterov.prototxt     |    20 +
 examples/mnist/readme.md                           |   287 +
 examples/mnist/train_lenet.sh                      |     3 +
 examples/mnist/train_lenet_consolidated.sh         |     4 +
 examples/mnist/train_mnist_autoencoder.sh          |     4 +
 examples/mnist/train_mnist_autoencoder_adagrad.sh  |     4 +
 examples/mnist/train_mnist_autoencoder_nesterov.sh |     4 +
 examples/net_surgery.ipynb                         |  6911 +++++++
 .../net_surgery/bvlc_caffenet_full_conv.prototxt   |   216 +
 examples/net_surgery/conv.prototxt                 |    26 +
 examples/pycaffe/caffenet.py                       |    55 +
 examples/pycaffe/layers/pyloss.py                  |    37 +
 examples/pycaffe/linreg.prototxt                   |    60 +
 examples/siamese/convert_mnist_siamese_data.cpp    |   123 +
 examples/siamese/create_mnist_siamese.sh           |    21 +
 examples/siamese/mnist_siamese.ipynb               |  1909 ++
 examples/siamese/mnist_siamese.prototxt            |   113 +
 examples/siamese/mnist_siamese_solver.prototxt     |    25 +
 examples/siamese/mnist_siamese_train_test.prototxt |   349 +
 examples/siamese/readme.md                         |   187 +
 examples/siamese/train_mnist_siamese.sh            |     5 +
 examples/web_demo/app.py                           |   227 +
 examples/web_demo/exifutil.py                      |    39 +
 examples/web_demo/readme.md                        |    41 +
 examples/web_demo/requirements.txt                 |     6 +
 examples/web_demo/templates/index.html             |   138 +
 include/caffe/blob.hpp                             |   280 +
 include/caffe/caffe.hpp                            |    19 +
 include/caffe/common.hpp                           |   172 +
 include/caffe/common_layers.hpp                    |   611 +
 include/caffe/data_layers.hpp                      |   327 +
 include/caffe/data_transformer.hpp                 |   151 +
 include/caffe/filler.hpp                           |   296 +
 include/caffe/internal_thread.hpp                  |    42 +
 include/caffe/layer.hpp                            |   470 +
 include/caffe/layer_factory.hpp                    |   127 +
 include/caffe/loss_layers.hpp                      |   768 +
 include/caffe/net.hpp                              |   268 +
 include/caffe/neuron_layers.hpp                    |   809 +
 include/caffe/python_layer.hpp                     |    67 +
 include/caffe/solver.hpp                           |   150 +
 include/caffe/syncedmem.hpp                        |    73 +
 include/caffe/test/test_caffe_main.hpp             |    78 +
 include/caffe/test/test_gradient_check_util.hpp    |   260 +
 include/caffe/util/benchmark.hpp                   |    52 +
 include/caffe/util/cudnn.hpp                       |   132 +
 include/caffe/util/db.hpp                          |    54 +
 include/caffe/util/db_leveldb.hpp                  |    73 +
 include/caffe/util/db_lmdb.hpp                     |    91 +
 include/caffe/util/device_alternate.hpp            |   102 +
 include/caffe/util/im2col.hpp                      |    32 +
 include/caffe/util/insert_splits.hpp               |    26 +
 include/caffe/util/io.hpp                          |   159 +
 include/caffe/util/math_functions.hpp              |   280 +
 include/caffe/util/mkl_alternate.hpp               |    97 +
 include/caffe/util/rng.hpp                         |    43 +
 include/caffe/util/upgrade_proto.hpp               |    64 +
 include/caffe/vision_layers.hpp                    |   524 +
 matlab/+caffe/+test/test_net.m                     |    96 +
 matlab/+caffe/+test/test_solver.m                  |    45 +
 matlab/+caffe/Blob.m                               |    78 +
 matlab/+caffe/Layer.m                              |    32 +
 matlab/+caffe/Net.m                                |   133 +
 matlab/+caffe/Solver.m                             |    56 +
 matlab/+caffe/get_net.m                            |    37 +
 matlab/+caffe/get_solver.m                         |    10 +
 matlab/+caffe/imagenet/ilsvrc_2012_mean.mat        |   Bin 0 -> 606799 bytes
 matlab/+caffe/io.m                                 |    33 +
 matlab/+caffe/private/CHECK.m                      |     7 +
 matlab/+caffe/private/CHECK_FILE_EXIST.m           |     7 +
 matlab/+caffe/private/caffe_.cpp                   |   546 +
 matlab/+caffe/private/is_valid_handle.m            |    27 +
 matlab/+caffe/reset_all.m                          |     8 +
 matlab/+caffe/run_tests.m                          |    19 +
 matlab/+caffe/set_device.m                         |    11 +
 matlab/+caffe/set_mode_cpu.m                       |     7 +
 matlab/+caffe/set_mode_gpu.m                       |     7 +
 matlab/CMakeLists.txt                              |    72 +
 matlab/demo/classification_demo.m                  |   147 +
 matlab/hdf5creation/.gitignore                     |     2 +
 matlab/hdf5creation/demo.m                         |    64 +
 matlab/hdf5creation/store2hdf5.m                   |    59 +
 models/bvlc_alexnet/deploy.prototxt                |   276 +
 models/bvlc_alexnet/readme.md                      |    25 +
 models/bvlc_alexnet/solver.prototxt                |    14 +
 models/bvlc_alexnet/train_val.prototxt             |   384 +
 models/bvlc_googlenet/deploy.prototxt              |  2156 ++
 models/bvlc_googlenet/quick_solver.prototxt        |    15 +
 models/bvlc_googlenet/readme.md                    |    32 +
 models/bvlc_googlenet/solver.prototxt              |    16 +
 models/bvlc_googlenet/train_val.prototxt           |  2433 +++
 models/bvlc_reference_caffenet/deploy.prototxt     |   212 +
 models/bvlc_reference_caffenet/readme.md           |    25 +
 models/bvlc_reference_caffenet/solver.prototxt     |    14 +
 models/bvlc_reference_caffenet/train_val.prototxt  |   400 +
 .../bvlc_reference_rcnn_ilsvrc13/deploy.prototxt   |   207 +
 models/bvlc_reference_rcnn_ilsvrc13/readme.md      |    20 +
 models/finetune_flickr_style/deploy.prototxt       |   342 +
 models/finetune_flickr_style/readme.md             |    24 +
 models/finetune_flickr_style/solver.prototxt       |    17 +
 models/finetune_flickr_style/train_val.prototxt    |   388 +
 python/CMakeLists.txt                              |    34 +
 python/caffe/__init__.py                           |     7 +
 python/caffe/_caffe.cpp                            |   301 +
 python/caffe/classifier.py                         |    97 +
 python/caffe/detector.py                           |   216 +
 python/caffe/draw.py                               |   213 +
 python/caffe/imagenet/ilsvrc_2012_mean.npy         |   Bin 0 -> 1572944 bytes
 python/caffe/io.py                                 |   379 +
 python/caffe/net_spec.py                           |   204 +
 python/caffe/pycaffe.py                            |   281 +
 python/caffe/test/test_net.py                      |    80 +
 python/caffe/test/test_net_spec.py                 |    67 +
 python/caffe/test/test_python_layer.py             |    63 +
 python/caffe/test/test_solver.py                   |    52 +
 python/classify.py                                 |   138 +
 python/detect.py                                   |   173 +
 python/draw_net.py                                 |    45 +
 python/requirements.txt                            |    17 +
 scripts/build_docs.sh                              |    20 +
 scripts/copy_notebook.py                           |    32 +
 scripts/cpp_lint.py                                |  4868 +++++
 scripts/deploy_docs.sh                             |    50 +
 scripts/download_model_binary.py                   |    76 +
 scripts/download_model_from_gist.sh                |    24 +
 scripts/gather_examples.sh                         |    29 +
 scripts/travis/travis_build_and_test.sh            |    34 +
 scripts/travis/travis_install.sh                   |    69 +
 scripts/travis/travis_setup_makefile_config.sh     |    23 +
 scripts/upload_model_to_gist.sh                    |    38 +
 src/caffe/CMakeLists.txt                           |    36 +
 src/caffe/blob.cpp                                 |   495 +
 src/caffe/common.cpp                               |   271 +
 src/caffe/data_transformer.cpp                     |   529 +
 src/caffe/internal_thread.cpp                      |    40 +
 src/caffe/layer_factory.cpp                        |   179 +
 src/caffe/layers/absval_layer.cpp                  |    45 +
 src/caffe/layers/absval_layer.cu                   |    34 +
 src/caffe/layers/accuracy_layer.cpp                |    91 +
 src/caffe/layers/argmax_layer.cpp                  |    63 +
 src/caffe/layers/base_conv_layer.cpp               |   298 +
 src/caffe/layers/base_data_layer.cpp               |    89 +
 src/caffe/layers/base_data_layer.cu                |    30 +
 src/caffe/layers/bnll_layer.cpp                    |    48 +
 src/caffe/layers/bnll_layer.cu                     |    60 +
 src/caffe/layers/concat_layer.cpp                  |    98 +
 src/caffe/layers/concat_layer.cu                   |    71 +
 src/caffe/layers/contrastive_loss_layer.cpp        |   121 +
 src/caffe/layers/contrastive_loss_layer.cu         |   111 +
 src/caffe/layers/conv_layer.cpp                    |    76 +
 src/caffe/layers/conv_layer.cu                     |    64 +
 src/caffe/layers/cudnn_conv_layer.cpp              |   130 +
 src/caffe/layers/cudnn_conv_layer.cu               |   160 +
 src/caffe/layers/cudnn_pooling_layer.cpp           |    50 +
 src/caffe/layers/cudnn_pooling_layer.cu            |    45 +
 src/caffe/layers/cudnn_relu_layer.cpp              |    46 +
 src/caffe/layers/cudnn_relu_layer.cu               |    57 +
 src/caffe/layers/cudnn_sigmoid_layer.cpp           |    46 +
 src/caffe/layers/cudnn_sigmoid_layer.cu            |    47 +
 src/caffe/layers/cudnn_softmax_layer.cpp           |    50 +
 src/caffe/layers/cudnn_softmax_layer.cu            |    48 +
 src/caffe/layers/cudnn_tanh_layer.cpp              |    46 +
 src/caffe/layers/cudnn_tanh_layer.cu               |    48 +
 src/caffe/layers/data_layer.cpp                    |   126 +
 src/caffe/layers/deconv_layer.cpp                  |    79 +
 src/caffe/layers/deconv_layer.cu                   |    64 +
 src/caffe/layers/dropout_layer.cpp                 |    78 +
 src/caffe/layers/dropout_layer.cu                  |    77 +
 src/caffe/layers/dummy_data_layer.cpp              |   115 +
 src/caffe/layers/eltwise_layer.cpp                 |   161 +
 src/caffe/layers/eltwise_layer.cu                  |   135 +
 src/caffe/layers/euclidean_loss_layer.cpp          |    57 +
 src/caffe/layers/euclidean_loss_layer.cu           |    44 +
 src/caffe/layers/exp_layer.cpp                     |    69 +
 src/caffe/layers/exp_layer.cu                      |    44 +
 src/caffe/layers/filter_layer.cpp                  |   127 +
 src/caffe/layers/filter_layer.cu                   |    70 +
 src/caffe/layers/flatten_layer.cpp                 |    44 +
 src/caffe/layers/hdf5_data_layer.cpp               |   167 +
 src/caffe/layers/hdf5_data_layer.cu                |    53 +
 src/caffe/layers/hdf5_output_layer.cpp             |    77 +
 src/caffe/layers/hdf5_output_layer.cu              |    43 +
 src/caffe/layers/hinge_loss_layer.cpp              |    82 +
 src/caffe/layers/im2col_layer.cpp                  |    95 +
 src/caffe/layers/im2col_layer.cu                   |    37 +
 src/caffe/layers/image_data_layer.cpp              |   159 +
 src/caffe/layers/infogain_loss_layer.cpp           |   110 +
 src/caffe/layers/inner_product_layer.cpp           |   129 +
 src/caffe/layers/inner_product_layer.cu            |    56 +
 src/caffe/layers/log_layer.cpp                     |    87 +
 src/caffe/layers/log_layer.cu                      |    57 +
 src/caffe/layers/loss_layer.cpp                    |    33 +
 src/caffe/layers/lrn_layer.cpp                     |   259 +
 src/caffe/layers/lrn_layer.cu                      |   203 +
 src/caffe/layers/memory_data_layer.cpp             |   121 +
 .../layers/multinomial_logistic_loss_layer.cpp     |    67 +
 src/caffe/layers/mvn_layer.cpp                     |   145 +
 src/caffe/layers/mvn_layer.cu                      |   124 +
 src/caffe/layers/neuron_layer.cpp                  |    16 +
 src/caffe/layers/pooling_layer.cpp                 |   319 +
 src/caffe/layers/pooling_layer.cu                  |   387 +
 src/caffe/layers/power_layer.cpp                   |   104 +
 src/caffe/layers/power_layer.cu                    |    87 +
 src/caffe/layers/prelu_layer.cpp                   |   140 +
 src/caffe/layers/prelu_layer.cu                    |   128 +
 src/caffe/layers/reduction_layer.cpp               |   132 +
 src/caffe/layers/reduction_layer.cu                |    93 +
 src/caffe/layers/relu_layer.cpp                    |    46 +
 src/caffe/layers/relu_layer.cu                     |    65 +
 src/caffe/layers/reshape_layer.cpp                 |    95 +
 .../layers/sigmoid_cross_entropy_loss_layer.cpp    |    80 +
 .../layers/sigmoid_cross_entropy_loss_layer.cu     |    37 +
 src/caffe/layers/sigmoid_layer.cpp                 |    49 +
 src/caffe/layers/sigmoid_layer.cu                  |    62 +
 src/caffe/layers/silence_layer.cpp                 |    27 +
 src/caffe/layers/silence_layer.cu                  |    28 +
 src/caffe/layers/slice_layer.cpp                   |   120 +
 src/caffe/layers/slice_layer.cu                    |    71 +
 src/caffe/layers/softmax_layer.cpp                 |    96 +
 src/caffe/layers/softmax_layer.cu                  |   149 +
 src/caffe/layers/softmax_loss_layer.cpp            |   130 +
 src/caffe/layers/softmax_loss_layer.cu             |   125 +
 src/caffe/layers/split_layer.cpp                   |    60 +
 src/caffe/layers/split_layer.cu                    |    38 +
 src/caffe/layers/spp_layer.cpp                     |   193 +
 src/caffe/layers/tanh_layer.cpp                    |    46 +
 src/caffe/layers/tanh_layer.cu                     |    59 +
 src/caffe/layers/threshold_layer.cpp               |    34 +
 src/caffe/layers/threshold_layer.cu                |    33 +
 src/caffe/layers/window_data_layer.cpp             |   466 +
 src/caffe/net.cpp                                  |   852 +
 src/caffe/proto/caffe.proto                        |  1109 ++
 src/caffe/solver.cpp                               |   783 +
 src/caffe/syncedmem.cpp                            |   113 +
 src/caffe/test/CMakeLists.txt                      |    36 +
 src/caffe/test/test_accuracy_layer.cpp             |   231 +
 src/caffe/test/test_argmax_layer.cpp               |   168 +
 src/caffe/test/test_benchmark.cpp                  |    90 +
 src/caffe/test/test_blob.cpp                       |   294 +
 src/caffe/test/test_caffe_main.cpp                 |    40 +
 src/caffe/test/test_common.cpp                     |    66 +
 src/caffe/test/test_concat_layer.cpp               |   176 +
 src/caffe/test/test_contrastive_loss_layer.cpp     |   146 +
 src/caffe/test/test_convolution_layer.cpp          |   699 +
 src/caffe/test/test_data/generate_sample_data.py   |    53 +
 src/caffe/test/test_data/sample_data.h5            |   Bin 0 -> 11824 bytes
 src/caffe/test/test_data/sample_data_2_gzip.h5     |   Bin 0 -> 15446 bytes
 src/caffe/test/test_data/sample_data_list.txt      |     2 +
 src/caffe/test/test_data_layer.cpp                 |   427 +
 src/caffe/test/test_data_transformer.cpp           |   355 +
 src/caffe/test/test_db.cpp                         |   134 +
 src/caffe/test/test_deconvolution_layer.cpp        |   158 +
 src/caffe/test/test_dummy_data_layer.cpp           |   193 +
 src/caffe/test/test_eltwise_layer.cpp              |   209 +
 src/caffe/test/test_euclidean_loss_layer.cpp       |    91 +
 src/caffe/test/test_filler.cpp                     |   243 +
 src/caffe/test/test_filter_layer.cpp               |   128 +
 src/caffe/test/test_flatten_layer.cpp              |   109 +
 src/caffe/test/test_gradient_based_solver.cpp      |   561 +
 src/caffe/test/test_hdf5_output_layer.cpp          |   120 +
 src/caffe/test/test_hdf5data_layer.cpp             |   135 +
 src/caffe/test/test_hinge_loss_layer.cpp           |    76 +
 src/caffe/test/test_im2col_kernel.cu               |   125 +
 src/caffe/test/test_im2col_layer.cpp               |   118 +
 src/caffe/test/test_image_data_layer.cpp           |   179 +
 src/caffe/test/test_infogain_loss_layer.cpp        |    70 +
 src/caffe/test/test_inner_product_layer.cpp        |   113 +
 src/caffe/test/test_internal_thread.cpp            |    23 +
 src/caffe/test/test_io.cpp                         |   422 +
 src/caffe/test/test_layer_factory.cpp              |    35 +
 src/caffe/test/test_lrn_layer.cpp                  |   250 +
 src/caffe/test/test_math_functions.cpp             |   246 +
 src/caffe/test/test_maxpool_dropout_layers.cpp     |   127 +
 src/caffe/test/test_memory_data_layer.cpp          |   296 +
 .../test/test_multinomial_logistic_loss_layer.cpp  |    61 +
 src/caffe/test/test_mvn_layer.cpp                  |   169 +
 src/caffe/test/test_net.cpp                        |  2375 +++
 src/caffe/test/test_neuron_layer.cpp               |   842 +
 src/caffe/test/test_platform.cpp                   |    57 +
 src/caffe/test/test_pooling_layer.cpp              |  1182 ++
 src/caffe/test/test_power_layer.cpp                |   170 +
 src/caffe/test/test_protobuf.cpp                   |    29 +
 src/caffe/test/test_random_number_generator.cpp    |   521 +
 src/caffe/test/test_reduction_layer.cpp            |   297 +
 src/caffe/test/test_reshape_layer.cpp              |   280 +
 .../test/test_sigmoid_cross_entropy_loss_layer.cpp |   122 +
 src/caffe/test/test_slice_layer.cpp                |   189 +
 src/caffe/test/test_softmax_layer.cpp              |   149 +
 src/caffe/test/test_softmax_with_loss_layer.cpp    |   110 +
 src/caffe/test/test_solver.cpp                     |   108 +
 src/caffe/test/test_split_layer.cpp                |  1045 +
 src/caffe/test/test_spp_layer.cpp                  |   131 +
 src/caffe/test/test_stochastic_pooling.cpp         |   176 +
 src/caffe/test/test_syncedmem.cpp                  |   126 +
 src/caffe/test/test_tanh_layer.cpp                 |   101 +
 src/caffe/test/test_threshold_layer.cpp            |    98 +
 src/caffe/test/test_upgrade_proto.cpp              |  2909 +++
 src/caffe/test/test_util_blas.cpp                  |   134 +
 src/caffe/util/benchmark.cpp                       |   168 +
 src/caffe/util/cudnn.cpp                           |    23 +
 src/caffe/util/db.cpp                              |    31 +
 src/caffe/util/db_leveldb.cpp                      |    21 +
 src/caffe/util/db_lmdb.cpp                         |    51 +
 src/caffe/util/im2col.cpp                          |    83 +
 src/caffe/util/im2col.cu                           |   144 +
 src/caffe/util/insert_splits.cpp                   |   144 +
 src/caffe/util/io.cpp                              |   306 +
 src/caffe/util/math_functions.cpp                  |   397 +
 src/caffe/util/math_functions.cu                   |   465 +
 src/caffe/util/upgrade_proto.cpp                   |   940 +
 src/gtest/CMakeLists.txt                           |     5 +
 src/gtest/gtest-all.cpp                            |  9117 +++++++++
 src/gtest/gtest.h                                  | 19537 +++++++++++++++++++
 src/gtest/gtest_main.cc                            |    39 +
 tools/CMakeLists.txt                               |    29 +
 tools/caffe.cpp                                    |   311 +
 tools/compute_image_mean.cpp                       |   119 +
 tools/convert_imageset.cpp                         |   152 +
 tools/device_query.cpp                             |     7 +
 tools/extra/extract_seconds.py                     |    64 +
 tools/extra/launch_resize_and_crop_images.sh       |    24 +
 tools/extra/parse_log.py                           |   196 +
 tools/extra/parse_log.sh                           |    46 +
 tools/extra/plot_log.gnuplot.example               |    69 +
 tools/extra/plot_training_log.py.example           |   187 +
 tools/extra/resize_and_crop_images.py              |   109 +
 tools/extract_features.cpp                         |   189 +
 tools/finetune_net.cpp                             |     7 +
 tools/net_speed_benchmark.cpp                      |     7 +
 tools/test_net.cpp                                 |     7 +
 tools/train_net.cpp                                |     7 +
 tools/upgrade_net_proto_binary.cpp                 |    49 +
 tools/upgrade_net_proto_text.cpp                   |    55 +
 462 files changed, 144865 insertions(+)

diff --git a/.Doxyfile b/.Doxyfile
new file mode 100644
index 0000000..432c9c6
--- /dev/null
+++ b/.Doxyfile
@@ -0,0 +1,2335 @@
+# Doxyfile 1.8.8
+
+# This file describes the settings to be used by the documentation system
+# doxygen (www.doxygen.org) for a project.
+#
+# All text after a double hash (##) is considered a comment and is placed in
+# front of the TAG it is preceding.
+#
+# All text after a single hash (#) is considered a comment and will be ignored.
+# The format is:
+# TAG = value [value, ...]
+# For lists, items can also be appended using:
+# TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (\" \").
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# This tag specifies the encoding used for all characters in the config file
+# that follow. The default is UTF-8 which is also the encoding used for all text
+# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
+# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
+# for the list of possible encodings.
+# The default value is: UTF-8.
+
+DOXYFILE_ENCODING      = UTF-8
+
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
+# double-quotes, unless you are using Doxywizard) that should identify the
+# project for which the documentation is generated. This name is used in the
+# title of most generated pages and in a few other places.
+# The default value is: My Project.
+
+PROJECT_NAME           = "Caffe"
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
+# could be handy for archiving the generated documentation or if some version
+# control system is used.
+
+PROJECT_NUMBER         =
+
+# Using the PROJECT_BRIEF tag one can provide an optional one line description
+# for a project that appears at the top of each page and should give viewer a
+# quick idea about the purpose of the project. Keep the description short.
+
+PROJECT_BRIEF          =
+
+# With the PROJECT_LOGO tag one can specify an logo or icon that is included in
+# the documentation. The maximum height of the logo should not exceed 55 pixels
+# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo
+# to the output directory.
+
+PROJECT_LOGO           =
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
+# into which the generated documentation will be written. If a relative path is
+# entered, it will be relative to the location where doxygen was started. If
+# left blank the current directory will be used.
+
+OUTPUT_DIRECTORY       = ./doxygen/
+
+# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub-
+# directories (in 2 levels) under the output directory of each output format and
+# will distribute the generated files over these directories. Enabling this
+# option can be useful when feeding doxygen a huge amount of source files, where
+# putting all generated files in the same directory would otherwise causes
+# performance problems for the file system.
+# The default value is: NO.
+
+CREATE_SUBDIRS         = NO
+
+# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
+# characters to appear in the names of generated files. If set to NO, non-ASCII
+# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
+# U+3044.
+# The default value is: NO.
+
+ALLOW_UNICODE_NAMES    = NO
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
+# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
+# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
+# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
+# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
+# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
+# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
+# Ukrainian and Vietnamese.
+# The default value is: English.
+
+OUTPUT_LANGUAGE        = English
+
+# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member
+# descriptions after the members that are listed in the file and class
+# documentation (similar to Javadoc). Set to NO to disable this.
+# The default value is: YES.
+
+BRIEF_MEMBER_DESC      = YES
+
+# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief
+# description of a member or function before the detailed description
+#
+# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+# The default value is: YES.
+
+REPEAT_BRIEF           = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator that is
+# used to form the text in various listings. Each string in this list, if found
+# as the leading text of the brief description, will be stripped from the text
+# and the result, after processing the whole list, is used as the annotated
+# text. Otherwise, the brief description is used as-is. If left blank, the
+# following values are used ($name is automatically replaced with the name of
+# the entity):The $name class, The $name widget, The $name file, is, provides,
+# specifies, contains, represents, a, an and the.
+
+ABBREVIATE_BRIEF       =
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# doxygen will generate a detailed section even if there is only a brief
+# description.
+# The default value is: NO.
+
+ALWAYS_DETAILED_SEC    = NO
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+# The default value is: NO.
+
+INLINE_INHERITED_MEMB  = NO
+
+# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path
+# before files name in the file list and in the header files. If set to NO the
+# shortest path that makes the file name unique will be used
+# The default value is: YES.
+
+FULL_PATH_NAMES        = YES
+
+# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
+# Stripping is only done if one of the specified strings matches the left-hand
+# part of the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the path to
+# strip.
+#
+# Note that you can specify absolute paths here, but also relative paths, which
+# will be relative from the directory where doxygen is started.
+# This tag requires that the tag FULL_PATH_NAMES is set to YES.
+
+STRIP_FROM_PATH        =
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
+# path mentioned in the documentation of a class, which tells the reader which
+# header file to include in order to use a class. If left blank only the name of
+# the header file containing the class definition is used. Otherwise one should
+# specify the list of include paths that are normally passed to the compiler
+# using the -I flag.
+
+STRIP_FROM_INC_PATH    =
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
+# less readable) file names. This can be useful is your file systems doesn't
+# support long names like on DOS, Mac, or CD-ROM.
+# The default value is: NO.
+
+SHORT_NAMES            = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
+# first line (until the first dot) of a Javadoc-style comment as the brief
+# description. If set to NO, the Javadoc-style will behave just like regular Qt-
+# style comments (thus requiring an explicit @brief command for a brief
+# description.)
+# The default value is: NO.
+
+JAVADOC_AUTOBRIEF      = NO
+
+# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
+# line (until the first dot) of a Qt-style comment as the brief description. If
+# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
+# requiring an explicit \brief command for a brief description.)
+# The default value is: NO.
+
+QT_AUTOBRIEF           = NO
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
+# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
+# a brief description. This used to be the default behavior. The new default is
+# to treat a multi-line C++ comment block as a detailed description. Set this
+# tag to YES if you prefer the old behavior instead.
+#
+# Note that setting this tag to YES also means that rational rose comments are
+# not recognized any more.
+# The default value is: NO.
+
+MULTILINE_CPP_IS_BRIEF = NO
+
+# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
+# documentation from any documented member that it re-implements.
+# The default value is: YES.
+
+INHERIT_DOCS           = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a
+# new page for each member. If set to NO, the documentation of a member will be
+# part of the file/class/namespace that contains it.
+# The default value is: NO.
+
+SEPARATE_MEMBER_PAGES  = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
+# uses this value to replace tabs by spaces in code fragments.
+# Minimum value: 1, maximum value: 16, default value: 4.
+
+TAB_SIZE               = 8
+
+# This tag can be used to specify a number of aliases that act as commands in
+# the documentation. An alias has the form:
+# name=value
+# For example adding
+# "sideeffect=@par Side Effects:\n"
+# will allow you to put the command \sideeffect (or @sideeffect) in the
+# documentation, which will result in a user-defined paragraph with heading
+# "Side Effects:". You can put \n's in the value part of an alias to insert
+# newlines.
+
+ALIASES                =
+
+# This tag can be used to specify a number of word-keyword mappings (TCL only).
+# A mapping has the form "name=value". For example adding "class=itcl::class"
+# will allow you to use the command class in the itcl::class meaning.
+
+TCL_SUBST              =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
+# only. Doxygen will then generate output that is more tailored for C. For
+# instance, some of the names that are used will be different. The list of all
+# members will be omitted, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_FOR_C  = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
+# Python sources only. Doxygen will then generate output that is more tailored
+# for that language. For instance, namespaces will be presented as packages,
+# qualified scopes will look different, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_JAVA   = NO
+
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources. Doxygen will then generate output that is tailored for Fortran.
+# The default value is: NO.
+
+OPTIMIZE_FOR_FORTRAN   = NO
+
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for VHDL.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_VHDL   = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it
+# parses. With this tag you can assign which parser to use for a given
+# extension. Doxygen has a built-in mapping, but you can override or extend it
+# using this tag. The format is ext=language, where ext is a file extension, and
+# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
+# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran:
+# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran:
+# Fortran. In the later case the parser tries to guess whether the code is fixed
+# or free formatted code, this is the default for Fortran type files), VHDL. For
+# instance to make doxygen treat .inc files as Fortran files (default is PHP),
+# and .f files as C (default is Fortran), use: inc=Fortran f=C.
+#
+# Note For files without extension you can use no_extension as a placeholder.
+#
+# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
+# the files are not read by doxygen.
+
+EXTENSION_MAPPING      =
+
+# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
+# according to the Markdown format, which allows for more readable
+# documentation. See http://daringfireball.net/projects/markdown/ for details.
+# The output of markdown processing is further processed by doxygen, so you can
+# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
+# case of backward compatibilities issues.
+# The default value is: YES.
+
+MARKDOWN_SUPPORT       = YES
+
+# When enabled doxygen tries to link words that correspond to documented
+# classes, or namespaces to their corresponding documentation. Such a link can
+# be prevented in individual cases by by putting a % sign in front of the word
+# or globally by setting AUTOLINK_SUPPORT to NO.
+# The default value is: YES.
+
+AUTOLINK_SUPPORT       = YES
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should set this
+# tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string);
+# versus func(std::string) {}). This also make the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+# The default value is: NO.
+
+BUILTIN_STL_SUPPORT    = NO
+
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
+# enable parsing support.
+# The default value is: NO.
+
+CPP_CLI_SUPPORT        = NO
+
+# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
+# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
+# will parse them like normal C++ but will assume all classes use public instead
+# of private inheritance when no explicit protection keyword is present.
+# The default value is: NO.
+
+SIP_SUPPORT            = NO
+
+# For Microsoft's IDL there are propget and propput attributes to indicate
+# getter and setter methods for a property. Setting this option to YES will make
+# doxygen to replace the get and set methods by a property in the documentation.
+# This will only work if the methods are indeed getting or setting a simple
+# type. If this is not the case, or you want to show the methods anyway, you
+# should set this option to NO.
+# The default value is: YES.
+
+IDL_PROPERTY_SUPPORT   = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES, then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+# The default value is: NO.
+
+DISTRIBUTE_GROUP_DOC   = NO
+
+# Set the SUBGROUPING tag to YES to allow class member groups of the same type
+# (for instance a group of public functions) to be put as a subgroup of that
+# type (e.g. under the Public Functions section). Set it to NO to prevent
+# subgrouping. Alternatively, this can be done per class using the
+# \nosubgrouping command.
+# The default value is: YES.
+
+SUBGROUPING            = YES
+
+# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
+# are shown inside the group in which they are included (e.g. using \ingroup)
+# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
+# and RTF).
+#
+# Note that this feature does not work in combination with
+# SEPARATE_MEMBER_PAGES.
+# The default value is: NO.
+
+INLINE_GROUPED_CLASSES = NO
+
+# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
+# with only public data fields or simple typedef fields will be shown inline in
+# the documentation of the scope in which they are defined (i.e. file,
+# namespace, or group documentation), provided this scope is documented. If set
+# to NO, structs, classes, and unions are shown on a separate page (for HTML and
+# Man pages) or section (for LaTeX and RTF).
+# The default value is: NO.
+
+INLINE_SIMPLE_STRUCTS  = NO
+
+# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
+# enum is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically be
+# useful for C code in case the coding convention dictates that all compound
+# types are typedef'ed and only the typedef is referenced, never the tag name.
+# The default value is: NO.
+
+TYPEDEF_HIDES_STRUCT   = NO
+
+# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
+# cache is used to resolve symbols given their name and scope. Since this can be
+# an expensive process and often the same symbol appears multiple times in the
+# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
+# doxygen will become slower. If the cache is too large, memory is wasted. The
+# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
+# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
+# symbols. At the end of a run doxygen will report the cache usage and suggest
+# the optimal cache size from a speed point of view.
+# Minimum value: 0, maximum value: 9, default value: 0.
+
+LOOKUP_CACHE_SIZE      = 0
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
+# documentation are documented, even if no documentation was available. Private
+# class members and static file members will be hidden unless the
+# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
+# Note: This will also disable the warnings about undocumented members that are
+# normally produced when WARNINGS is set to YES.
+# The default value is: NO.
+
+EXTRACT_ALL            = NO
+
+# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will
+# be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PRIVATE        = NO
+
+# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal
+# scope will be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PACKAGE        = NO
+
+# If the EXTRACT_STATIC tag is set to YES all static members of a file will be
+# included in the documentation.
+# The default value is: NO.
+
+EXTRACT_STATIC         = NO
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined
+# locally in source files will be included in the documentation. If set to NO
+# only classes defined in header files are included. Does not have any effect
+# for Java sources.
+# The default value is: YES.
+
+EXTRACT_LOCAL_CLASSES  = YES
+
+# This flag is only useful for Objective-C code. When set to YES local methods,
+# which are defined in the implementation section but not in the interface are
+# included in the documentation. If set to NO only methods in the interface are
+# included.
+# The default value is: NO.
+
+EXTRACT_LOCAL_METHODS  = NO
+
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base name of
+# the file that contains the anonymous namespace. By default anonymous namespace
+# are hidden.
+# The default value is: NO.
+
+EXTRACT_ANON_NSPACES   = NO
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
+# undocumented members inside documented classes or files. If set to NO these
+# members will be included in the various overviews, but no documentation
+# section is generated. This option has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_MEMBERS     = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy. If set
+# to NO these classes will be included in the various overviews. This option has
+# no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_CLASSES     = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
+# (class|struct|union) declarations. If set to NO these declarations will be
+# included in the documentation.
+# The default value is: NO.
+
+HIDE_FRIEND_COMPOUNDS  = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
+# documentation blocks found inside the body of a function. If set to NO these
+# blocks will be appended to the function's detailed documentation block.
+# The default value is: NO.
+
+HIDE_IN_BODY_DOCS      = NO
+
+# The INTERNAL_DOCS tag determines if documentation that is typed after a
+# \internal command is included. If the tag is set to NO then the documentation
+# will be excluded. Set it to YES to include the internal documentation.
+# The default value is: NO.
+
+INTERNAL_DOCS          = NO
+
+# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
+# names in lower-case letters. If set to YES upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
+# and Mac users are advised to set this option to NO.
+# The default value is: system dependent.
+
+CASE_SENSE_NAMES       = YES
+
+# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
+# their full class and namespace scopes in the documentation. If set to YES the
+# scope will be hidden.
+# The default value is: NO.
+
+HIDE_SCOPE_NAMES       = NO
+
+# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
+# the files that are included by a file in the documentation of that file.
+# The default value is: YES.
+
+SHOW_INCLUDE_FILES     = YES
+
+# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
+# grouped member an include statement to the documentation, telling the reader
+# which file to include in order to use the member.
+# The default value is: NO.
+
+SHOW_GROUPED_MEMB_INC  = NO
+
+# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
+# files with double quotes in the documentation rather than with sharp brackets.
+# The default value is: NO.
+
+FORCE_LOCAL_INCLUDES   = NO
+
+# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
+# documentation for inline members.
+# The default value is: YES.
+
+INLINE_INFO            = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
+# (detailed) documentation of file and class members alphabetically by member
+# name. If set to NO the members will appear in declaration order.
+# The default value is: YES.
+
+SORT_MEMBER_DOCS       = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
+# descriptions of file, namespace and class members alphabetically by member
+# name. If set to NO the members will appear in declaration order. Note that
+# this will also influence the order of the classes in the class list.
+# The default value is: NO.
+
+SORT_BRIEF_DOCS        = NO
+
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
+# (brief and detailed) documentation of class members so that constructors and
+# destructors are listed first. If set to NO the constructors will appear in the
+# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
+# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
+# member documentation.
+# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
+# detailed member documentation.
+# The default value is: NO.
+
+SORT_MEMBERS_CTORS_1ST = NO
+
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
+# of group names into alphabetical order. If set to NO the group names will
+# appear in their defined order.
+# The default value is: NO.
+
+SORT_GROUP_NAMES       = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
+# fully-qualified names, including namespaces. If set to NO, the class list will
+# be sorted only by class name, not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the alphabetical
+# list.
+# The default value is: NO.
+
+SORT_BY_SCOPE_NAME     = NO
+
+# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
+# type resolution of all parameters of a function it will reject a match between
+# the prototype and the implementation of a member function even if there is
+# only one candidate or it is obvious which candidate to choose by doing a
+# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
+# accept a match between prototype and implementation in such cases.
+# The default value is: NO.
+
+STRICT_PROTO_MATCHING  = NO
+
+# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the
+# todo list. This list is created by putting \todo commands in the
+# documentation.
+# The default value is: YES.
+
+GENERATE_TODOLIST      = YES
+
+# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the
+# test list. This list is created by putting \test commands in the
+# documentation.
+# The default value is: YES.
+
+GENERATE_TESTLIST      = YES
+
+# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug
+# list. This list is created by putting \bug commands in the documentation.
+# The default value is: YES.
+
+GENERATE_BUGLIST       = YES
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO)
+# the deprecated list. This list is created by putting \deprecated commands in
+# the documentation.
+# The default value is: YES.
+
+GENERATE_DEPRECATEDLIST= YES
+
+# The ENABLED_SECTIONS tag can be used to enable conditional documentation
+# sections, marked by \if <section_label> ... \endif and \cond <section_label>
+# ... \endcond blocks.
+
+ENABLED_SECTIONS       =
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
+# initial value of a variable or macro / define can have for it to appear in the
+# documentation. If the initializer consists of more lines than specified here
+# it will be hidden. Use a value of 0 to hide initializers completely. The
+# appearance of the value of individual variables and macros / defines can be
+# controlled using \showinitializer or \hideinitializer command in the
+# documentation regardless of this setting.
+# Minimum value: 0, maximum value: 10000, default value: 30.
+
+MAX_INITIALIZER_LINES  = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
+# the bottom of the documentation of classes and structs. If set to YES the list
+# will mention the files that were used to generate the documentation.
+# The default value is: YES.
+
+SHOW_USED_FILES        = YES
+
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
+# will remove the Files entry from the Quick Index and from the Folder Tree View
+# (if specified).
+# The default value is: YES.
+
+SHOW_FILES             = YES
+
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
+# page. This will remove the Namespaces entry from the Quick Index and from the
+# Folder Tree View (if specified).
+# The default value is: YES.
+
+SHOW_NAMESPACES        = YES
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command command input-file, where command is the value of the
+# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
+# by doxygen. Whatever the program writes to standard output is used as the file
+# version. For an example see the documentation.
+
+FILE_VERSION_FILTER    =
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
+# by doxygen. The layout file controls the global structure of the generated
+# output files in an output format independent way. To create the layout file
+# that represents doxygen's defaults, run doxygen with the -l option. You can
+# optionally specify a file name after the option, if omitted DoxygenLayout.xml
+# will be used as the name of the layout file.
+#
+# Note that if you run doxygen from a directory containing a file called
+# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
+# tag is left empty.
+
+LAYOUT_FILE            =
+
+# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
+# the reference definitions. This must be a list of .bib files. The .bib
+# extension is automatically appended if omitted. This requires the bibtex tool
+# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
+# For LaTeX the style of the bibliography can be controlled using
+# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
+# search path. See also \cite for info how to create references.
+
+CITE_BIB_FILES         =
+
+#---------------------------------------------------------------------------
+# Configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated to
+# standard output by doxygen. If QUIET is set to YES this implies that the
+# messages are off.
+# The default value is: NO.
+
+QUIET                  = YES
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES
+# this implies that the warnings are on.
+#
+# Tip: Turn warnings on while writing the documentation.
+# The default value is: YES.
+
+WARNINGS               = YES
+
+# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate
+# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
+# will automatically be disabled.
+# The default value is: YES.
+
+WARN_IF_UNDOCUMENTED   = NO
+
+# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some parameters
+# in a documented function, or documenting parameters that don't exist or using
+# markup commands wrongly.
+# The default value is: YES.
+
+WARN_IF_DOC_ERROR      = YES
+
+# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
+# are documented, but have no documentation for their parameters or return
+# value. If set to NO doxygen will only warn about wrong or incomplete parameter
+# documentation, but not about the absence of documentation.
+# The default value is: NO.
+
+WARN_NO_PARAMDOC       = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that doxygen
+# can produce. The string should contain the $file, $line, and $text tags, which
+# will be replaced by the file and line number from which the warning originated
+# and the warning text. Optionally the format may contain $version, which will
+# be replaced by the version of the file (if it could be obtained via
+# FILE_VERSION_FILTER)
+# The default value is: $file:$line: $text.
+
+WARN_FORMAT            = "$file:$line: $text"
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning and error
+# messages should be written. If left blank the output is written to standard
+# error (stderr).
+
+WARN_LOGFILE           =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag is used to specify the files and/or directories that contain
+# documented source files. You may enter file names like myfile.cpp or
+# directories like /usr/src/myproject. Separate the files or directories with
+# spaces.
+# Note: If this tag is empty the current directory is searched.
+
+INPUT                  = ./include/caffe \
+                         ./src/caffe
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
+# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
+# documentation (see: http://www.gnu.org/software/libiconv) for the list of
+# possible encodings.
+# The default value is: UTF-8.
+
+INPUT_ENCODING         = UTF-8
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
+# *.h) to filter out the source-files in the directories. If left blank the
+# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii,
+# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp,
+# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown,
+# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf,
+# *.qsf, *.as and *.js.
+
+FILE_PATTERNS          =
+
+# The RECURSIVE tag can be used to specify whether or not subdirectories should
+# be searched for input files as well.
+# The default value is: NO.
+
+RECURSIVE              = YES
+
+# The EXCLUDE tag can be used to specify files and/or directories that should be
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+#
+# Note that relative paths are relative to the directory from which doxygen is
+# run.
+
+EXCLUDE                = ./src/caffe/test/ \
+                         ./include/caffe/test/
+
+# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
+# directories that are symbolic links (a Unix file system feature) are excluded
+# from the input.
+# The default value is: NO.
+
+EXCLUDE_SYMLINKS       = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories.
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories for example use the pattern */test/*
+
+EXCLUDE_PATTERNS       =
+
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
+# AClass::ANamespace, ANamespace::*Test
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories use the pattern */test/*
+
+EXCLUDE_SYMBOLS        =
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or directories
+# that contain example code fragments that are included (see the \include
+# command).
+
+EXAMPLE_PATH           =
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
+# *.h) to filter out the source-files in the directories. If left blank all
+# files are included.
+
+EXAMPLE_PATTERNS       =
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude commands
+# irrespective of the value of the RECURSIVE tag.
+# The default value is: NO.
+
+EXAMPLE_RECURSIVE      = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or directories
+# that contain images that are to be included in the documentation (see the
+# \image command).
+
+IMAGE_PATH             =
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command:
+#
+# <filter> <input-file>
+#
+# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
+# name of an input file. Doxygen will then use the output that the filter
+# program writes to standard output. If FILTER_PATTERNS is specified, this tag
+# will be ignored.
+#
+# Note that the filter must not add or remove lines; it is applied before the
+# code is scanned, but not when the output code is generated. If lines are added
+# or removed, the anchors will not be placed correctly.
+
+INPUT_FILTER           =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis. Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match. The filters are a list of the form: pattern=filter
+# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
+# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
+# patterns match the file name, INPUT_FILTER is applied.
+
+FILTER_PATTERNS        =
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER ) will also be used to filter the input files that are used for
+# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
+# The default value is: NO.
+
+FILTER_SOURCE_FILES    = NO
+
+# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
+# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
+# it is also possible to disable source filtering for a specific pattern using
+# *.ext= (so without naming a filter).
+# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
+
+FILTER_SOURCE_PATTERNS =
+
+# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
+# is part of the input, its contents will be placed on the main page
+# (index.html). This can be useful if you have a project on for instance GitHub
+# and want to reuse the introduction page also for the doxygen output.
+
+USE_MDFILE_AS_MAINPAGE =
+
+#---------------------------------------------------------------------------
+# Configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
+# generated. Documented entities will be cross-referenced with these sources.
+#
+# Note: To get rid of all source code in the generated output, make sure that
+# also VERBATIM_HEADERS is set to NO.
+# The default value is: NO.
+
+SOURCE_BROWSER         = NO
+
+# Setting the INLINE_SOURCES tag to YES will include the body of functions,
+# classes and enums directly into the documentation.
+# The default value is: NO.
+
+INLINE_SOURCES         = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
+# special comment blocks from generated source code fragments. Normal C, C++ and
+# Fortran comments will always remain visible.
+# The default value is: YES.
+
+STRIP_CODE_COMMENTS    = YES
+
+# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
+# function all documented functions referencing it will be listed.
+# The default value is: NO.
+
+REFERENCED_BY_RELATION = NO
+
+# If the REFERENCES_RELATION tag is set to YES then for each documented function
+# all documented entities called/used by that function will be listed.
+# The default value is: NO.
+
+REFERENCES_RELATION    = NO
+
+# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
+# to YES, then the hyperlinks from functions in REFERENCES_RELATION and
+# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
+# link to the documentation.
+# The default value is: YES.
+
+REFERENCES_LINK_SOURCE = YES
+
+# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
+# source code will show a tooltip with additional information such as prototype,
+# brief description and links to the definition and documentation. Since this
+# will make the HTML file larger and loading of large files a bit slower, you
+# can opt to disable this feature.
+# The default value is: YES.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+SOURCE_TOOLTIPS        = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code will
+# point to the HTML generated by the htags(1) tool instead of doxygen built-in
+# source browser. The htags tool is part of GNU's global source tagging system
+# (see http://www.gnu.org/software/global/global.html). You will need version
+# 4.8.6 or higher.
+#
+# To use it do the following:
+# - Install the latest version of global
+# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
+# - Make sure the INPUT points to the root of the source tree
+# - Run doxygen as normal
+#
+# Doxygen will invoke htags (and that will in turn invoke gtags), so these
+# tools must be available from the command line (i.e. in the search path).
+#
+# The result: instead of the source browser generated by doxygen, the links to
+# source code will now point to the output of htags.
+# The default value is: NO.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+USE_HTAGS              = NO
+
+# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
+# verbatim copy of the header file for each class for which an include is
+# specified. Set to NO to disable this.
+# See also: Section \class.
+# The default value is: YES.
+
+VERBATIM_HEADERS       = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
+# compounds will be generated. Enable this if the project contains a lot of
+# classes, structs, unions or interfaces.
+# The default value is: YES.
+
+ALPHABETICAL_INDEX     = YES
+
+# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
+# which the alphabetical index list will be split.
+# Minimum value: 1, maximum value: 20, default value: 5.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+COLS_IN_ALPHA_INDEX    = 5
+
+# In case all classes in a project start with a common prefix, all classes will
+# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
+# can be used to specify a prefix (or a list of prefixes) that should be ignored
+# while generating the index headers.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+IGNORE_PREFIX          =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output
+# The default value is: YES.
+
+GENERATE_HTML          = YES
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_OUTPUT            = html
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
+# generated HTML page (for example: .htm, .php, .asp).
+# The default value is: .html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FILE_EXTENSION    = .html
+
+# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
+# each generated HTML page. If the tag is left blank doxygen will generate a
+# standard header.
+#
+# To get valid HTML the header file that includes any scripts and style sheets
+# that doxygen needs, which is dependent on the configuration options used (e.g.
+# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
+# default header using
+# doxygen -w html new_header.html new_footer.html new_stylesheet.css
+# YourConfigFile
+# and then modify the file new_header.html. See also section "Doxygen usage"
+# for information on how to generate the default header that doxygen normally
+# uses.
+# Note: The header is subject to change so you typically have to regenerate the
+# default header when upgrading to a newer version of doxygen. For a description
+# of the possible markers and block names see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_HEADER            =
+
+# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
+# generated HTML page. If the tag is left blank doxygen will generate a standard
+# footer. See HTML_HEADER for more information on how to generate a default
+# footer and what special commands can be used inside the footer. See also
+# section "Doxygen usage" for information on how to generate the default footer
+# that doxygen normally uses.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FOOTER            =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
+# sheet that is used by each HTML page. It can be used to fine-tune the look of
+# the HTML output. If left blank doxygen will generate a default style sheet.
+# See also section "Doxygen usage" for information on how to generate the style
+# sheet that doxygen normally uses.
+# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
+# it is more robust and this tag (HTML_STYLESHEET) will in the future become
+# obsolete.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_STYLESHEET        =
+
+# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# cascading style sheets that are included after the standard style sheets
+# created by doxygen. Using this option one can overrule certain style aspects.
+# This is preferred over using HTML_STYLESHEET since it does not replace the
+# standard style sheet and is therefor more robust against future updates.
+# Doxygen will copy the style sheet files to the output directory.
+# Note: The order of the extra stylesheet files is of importance (e.g. the last
+# stylesheet in the list overrules the setting of the previous ones in the
+# list). For an example see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_STYLESHEET  =
+
+# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the HTML output directory. Note
+# that these files will be copied to the base HTML output directory. Use the
+# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
+# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
+# files will be copied as-is; there are no commands or markers available.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_FILES       =
+
+# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
+# will adjust the colors in the stylesheet and background images according to
+# this color. Hue is specified as an angle on a colorwheel, see
+# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
+# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
+# purple, and 360 is red again.
+# Minimum value: 0, maximum value: 359, default value: 220.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_HUE    = 220
+
+# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
+# in the HTML output. For a value of 0 the output will use grayscales only. A
+# value of 255 will produce the most vivid colors.
+# Minimum value: 0, maximum value: 255, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_SAT    = 100
+
+# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
+# luminance component of the colors in the HTML output. Values below 100
+# gradually make the output lighter, whereas values above 100 make the output
+# darker. The value divided by 100 is the actual gamma applied, so 80 represents
+# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
+# change the gamma.
+# Minimum value: 40, maximum value: 240, default value: 80.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_GAMMA  = 80
+
+# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
+# page will contain the date and time when the page was generated. Setting this
+# to NO can help when comparing the output of multiple runs.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_TIMESTAMP         = YES
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
+# page has loaded.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_DYNAMIC_SECTIONS  = NO
+
+# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
+# shown in the various tree structured indices initially; the user can expand
+# and collapse entries dynamically later on. Doxygen will expand the tree to
+# such a level that at most the specified number of entries are visible (unless
+# a fully collapsed tree already exceeds this amount). So setting the number of
+# entries 1 will produce a full collapsed tree by default. 0 is a special value
+# representing an infinite number of entries and will result in a full expanded
+# tree by default.
+# Minimum value: 0, maximum value: 9999, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_INDEX_NUM_ENTRIES = 100
+
+# If the GENERATE_DOCSET tag is set to YES, additional index files will be
+# generated that can be used as input for Apple's Xcode 3 integrated development
+# environment (see: http://developer.apple.com/tools/xcode/), introduced with
+# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
+# Makefile in the HTML output directory. Running make will produce the docset in
+# that directory and running make install will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
+# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
+# for more information.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_DOCSET        = NO
+
+# This tag determines the name of the docset feed. A documentation feed provides
+# an umbrella under which multiple documentation sets from a single provider
+# (such as a company or product suite) can be grouped.
+# The default value is: Doxygen generated docs.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_FEEDNAME        = "Doxygen generated docs"
+
+# This tag specifies a string that should uniquely identify the documentation
+# set bundle. This should be a reverse domain-name style string, e.g.
+# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_BUNDLE_ID       = org.doxygen.Project
+
+# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
+# the documentation publisher. This should be a reverse domain-name style
+# string, e.g. com.mycompany.MyDocSet.documentation.
+# The default value is: org.doxygen.Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_ID    = org.doxygen.Publisher
+
+# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
+# The default value is: Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_NAME  = Publisher
+
+# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
+# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
+# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
+# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
+# Windows.
+#
+# The HTML Help Workshop contains a compiler that can convert all HTML output
+# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
+# files are now used as the Windows 98 help format, and will replace the old
+# Windows help format (.hlp) on all Windows platforms in the future. Compressed
+# HTML files also contain an index, a table of contents, and you can search for
+# words in the documentation. The HTML workshop also contains a viewer for
+# compressed HTML files.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_HTMLHELP      = NO
+
+# The CHM_FILE tag can be used to specify the file name of the resulting .chm
+# file. You can add a path in front of the file if the result should not be
+# written to the html output directory.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_FILE               =
+
+# The HHC_LOCATION tag can be used to specify the location (absolute path
+# including file name) of the HTML help compiler ( hhc.exe). If non-empty
+# doxygen will try to run the HTML help compiler on the generated index.hhp.
+# The file has to be specified with full path.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+HHC_LOCATION           =
+
+# The GENERATE_CHI flag controls if a separate .chi index file is generated (
+# YES) or that it should be included in the master .chm file ( NO).
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+GENERATE_CHI           = NO
+
+# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc)
+# and project file content.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_INDEX_ENCODING     =
+
+# The BINARY_TOC flag controls whether a binary table of contents is generated (
+# YES) or a normal table of contents ( NO) in the .chm file. Furthermore it
+# enables the Previous and Next buttons.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+BINARY_TOC             = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members to
+# the table of contents of the HTML help documentation and to the tree view.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+TOC_EXPAND             = NO
+
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
+# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
+# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
+# (.qch) of the generated HTML documentation.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_QHP           = NO
+
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
+# the file name of the resulting .qch file. The path specified is relative to
+# the HTML output folder.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QCH_FILE               =
+
+# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
+# Project output. For more information please see Qt Help Project / Namespace
+# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_NAMESPACE          = org.doxygen.Project
+
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
+# Help Project output. For more information please see Qt Help Project / Virtual
+# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
+# folders).
+# The default value is: doc.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_VIRTUAL_FOLDER     = doc
+
+# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
+# filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_NAME   =
+
+# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
+# custom filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_ATTRS  =
+
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
+# project's filter section matches. Qt Help Project / Filter Attributes (see:
+# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_SECT_FILTER_ATTRS  =
+
+# The QHG_LOCATION tag can be used to specify the location of Qt's
+# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
+# generated .qhp file.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHG_LOCATION           =
+
+# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
+# generated, together with the HTML files, they form an Eclipse help plugin. To
+# install this plugin and make it available under the help contents menu in
+# Eclipse, the contents of the directory containing the HTML and XML files needs
+# to be copied into the plugins directory of eclipse. The name of the directory
+# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
+# After copying Eclipse needs to be restarted before the help appears.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_ECLIPSEHELP   = NO
+
+# A unique identifier for the Eclipse help plugin. When installing the plugin
+# the directory name containing the HTML and XML files should also have this
+# name. Each documentation set should have its own identifier.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
+
+ECLIPSE_DOC_ID         = org.doxygen.Project
+
+# If you want full control over the layout of the generated HTML pages it might
+# be necessary to disable the index and replace it with your own. The
+# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
+# of each HTML page. A value of NO enables the index and the value YES disables
+# it. Since the tabs in the index contain the same information as the navigation
+# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+DISABLE_INDEX          = NO
+
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information. If the tag
+# value is set to YES, a side panel will be generated containing a tree-like
+# index structure (just like the one that is generated for HTML Help). For this
+# to work a browser that supports JavaScript, DHTML, CSS and frames is required
+# (i.e. any modern browser). Windows users are probably better off using the
+# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can
+# further fine-tune the look of the index. As an example, the default style
+# sheet generated by doxygen has an example that shows how to put an image at
+# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
+# the same information as the tab index, you could consider setting
+# DISABLE_INDEX to YES when enabling this option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_TREEVIEW      = NO
+
+# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
+# doxygen will group on one line in the generated HTML documentation.
+#
+# Note that a value of 0 will completely suppress the enum values from appearing
+# in the overview section.
+# Minimum value: 0, maximum value: 20, default value: 4.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+ENUM_VALUES_PER_LINE   = 4
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
+# to set the initial width (in pixels) of the frame in which the tree is shown.
+# Minimum value: 0, maximum value: 1500, default value: 250.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+TREEVIEW_WIDTH         = 250
+
+# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to
+# external symbols imported via tag files in a separate window.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+EXT_LINKS_IN_WINDOW    = NO
+
+# Use this tag to change the font size of LaTeX formulas included as images in
+# the HTML documentation. When you change the font size after a successful
+# doxygen run you need to manually remove any form_*.png images from the HTML
+# output directory to force them to be regenerated.
+# Minimum value: 8, maximum value: 50, default value: 10.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_FONTSIZE       = 10
+
+# Use the FORMULA_TRANPARENT tag to determine whether or not the images
+# generated for formulas are transparent PNGs. Transparent PNGs are not
+# supported properly for IE 6.0, but are supported on all modern browsers.
+#
+# Note that when changing this option you need to delete any form_*.png files in
+# the HTML output directory before the changes have effect.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_TRANSPARENT    = YES
+
+# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
+# http://www.mathjax.org) which uses client side Javascript for the rendering
+# instead of using prerendered bitmaps. Use this if you do not have LaTeX
+# installed or if you want to formulas look prettier in the HTML output. When
+# enabled you may also need to install MathJax separately and configure the path
+# to it using the MATHJAX_RELPATH option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+USE_MATHJAX            = NO
+
+# When MathJax is enabled you can set the default output format to be used for
+# the MathJax output. See the MathJax site (see:
+# http://docs.mathjax.org/en/latest/output.html) for more details.
+# Possible values are: HTML-CSS (which is slower, but has the best
+# compatibility), NativeMML (i.e. MathML) and SVG.
+# The default value is: HTML-CSS.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_FORMAT         = HTML-CSS
+
+# When MathJax is enabled you need to specify the location relative to the HTML
+# output directory using the MATHJAX_RELPATH option. The destination directory
+# should contain the MathJax.js script. For instance, if the mathjax directory
+# is located at the same level as the HTML output directory, then
+# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
+# Content Delivery Network so you can quickly see the result without installing
+# MathJax. However, it is strongly recommended to install a local copy of
+# MathJax from http://www.mathjax.org before deployment.
+# The default value is: http://cdn.mathjax.org/mathjax/latest.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_RELPATH        = http://www.mathjax.org/mathjax
+
+# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
+# extension names that should be enabled during MathJax rendering. For example
+# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_EXTENSIONS     =
+
+# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
+# of code that will be used on startup of the MathJax code. See the MathJax site
+# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
+# example see the documentation.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_CODEFILE       =
+
+# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
+# the HTML output. The underlying search engine uses javascript and DHTML and
+# should work on any modern browser. Note that when using HTML help
+# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
+# there is already a search function so this one should typically be disabled.
+# For large projects the javascript based search engine can be slow, then
+# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
+# search using the keyboard; to jump to the search box use <access key> + S
+# (what the <access key> is depends on the OS and browser, but it is typically
+# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
+# key> to jump into the search results window, the results can be navigated
+# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
+# the search. The filter options can be selected when the cursor is inside the
+# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
+# to select a filter and <Enter> or <escape> to activate or cancel the filter
+# option.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+SEARCHENGINE           = YES
+
+# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
+# implemented using a web server instead of a web client using Javascript. There
+# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
+# setting. When disabled, doxygen will generate a PHP script for searching and
+# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
+# and searching needs to be provided by external tools. See the section
+# "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SERVER_BASED_SEARCH    = NO
+
+# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
+# script for searching. Instead the search results are written to an XML file
+# which needs to be processed by an external indexer. Doxygen will invoke an
+# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
+# search results.
+#
+# Doxygen ships with an example indexer ( doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: http://xapian.org/).
+#
+# See the section "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH        = NO
+
+# The SEARCHENGINE_URL should point to a search engine hosted by a web server
+# which will return the search results when EXTERNAL_SEARCH is enabled.
+#
+# Doxygen ships with an example indexer ( doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: http://xapian.org/). See the section "External Indexing and
+# Searching" for details.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHENGINE_URL       =
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
+# search data is written to a file for indexing by an external tool. With the
+# SEARCHDATA_FILE tag the name of this file can be specified.
+# The default file is: searchdata.xml.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHDATA_FILE        = searchdata.xml
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
+# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
+# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
+# projects and redirect the results back to the right project.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH_ID     =
+
+# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
+# projects other than the one defined by this configuration file, but that are
+# all added to the same external search index. Each project needs to have a
+# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
+# to a relative location where the documentation can be found. The format is:
+# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTRA_SEARCH_MAPPINGS  =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES doxygen will generate LaTeX output.
+# The default value is: YES.
+
+GENERATE_LATEX         = YES
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_OUTPUT           = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked.
+#
+# Note that when enabling USE_PDFLATEX this option is only used for generating
+# bitmaps for formulas in the HTML output, but not in the Makefile that is
+# written to the output directory.
+# The default file is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_CMD_NAME         = latex
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
+# index for LaTeX.
+# The default file is: makeindex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+MAKEINDEX_CMD_NAME     = makeindex
+
+# If the COMPACT_LATEX tag is set to YES doxygen generates more compact LaTeX
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+COMPACT_LATEX          = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used by the
+# printer.
+# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
+# 14 inches) and executive (7.25 x 10.5 inches).
+# The default value is: a4.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PAPER_TYPE             = a4
+
+# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
+# that should be included in the LaTeX output. To get the times font for
+# instance you can specify
+# EXTRA_PACKAGES=times
+# If left blank no extra packages will be included.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+EXTRA_PACKAGES         = amsmath \
+                         amsfonts \
+                         xr
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
+# generated LaTeX document. The header should contain everything until the first
+# chapter. If it is left blank doxygen will generate a standard header. See
+# section "Doxygen usage" for information on how to let doxygen write the
+# default header to a separate file.
+#
+# Note: Only use a user-defined header if you know what you are doing! The
+# following commands have a special meaning inside the header: $title,
+# $datetime, $date, $doxygenversion, $projectname, $projectnumber,
+# $projectbrief, $projectlogo. Doxygen will replace $title with the empy string,
+# for the replacement values of the other commands the user is refered to
+# HTML_HEADER.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HEADER           =
+
+# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
+# generated LaTeX document. The footer should contain everything after the last
+# chapter. If it is left blank doxygen will generate a standard footer. See
+# LATEX_HEADER for more information on how to generate a default footer and what
+# special commands can be used inside the footer.
+#
+# Note: Only use a user-defined footer if you know what you are doing!
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_FOOTER           =
+
+# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the LATEX_OUTPUT output
+# directory. Note that the files will be copied as-is; there are no commands or
+# markers available.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_FILES      =
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
+# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
+# contain links (just like the HTML output) instead of page references. This
+# makes the output suitable for online browsing using a PDF viewer.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PDF_HYPERLINKS         = YES
+
+# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
+# the PDF file directly from the LaTeX files. Set this option to YES to get a
+# higher quality PDF documentation.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+USE_PDFLATEX           = YES
+
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
+# command to the generated LaTeX files. This will instruct LaTeX to keep running
+# if errors occur, instead of asking the user for help. This option is also used
+# when generating formulas in HTML.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BATCHMODE        = NO
+
+# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
+# index chapters (such as File Index, Compound Index, etc.) in the output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HIDE_INDICES     = NO
+
+# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
+# code with syntax highlighting in the LaTeX output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_SOURCE_CODE      = NO
+
+# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
+# bibliography, e.g. plainnat, or ieeetr. See
+# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
+# The default value is: plain.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BIB_STYLE        = plain
+
+#---------------------------------------------------------------------------
+# Configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES doxygen will generate RTF output. The
+# RTF output is optimized for Word 97 and may not look too pretty with other RTF
+# readers/editors.
+# The default value is: NO.
+
+GENERATE_RTF           = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: rtf.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_OUTPUT             = rtf
+
+# If the COMPACT_RTF tag is set to YES doxygen generates more compact RTF
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+COMPACT_RTF            = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
+# contain hyperlink fields. The RTF file will contain links (just like the HTML
+# output) instead of page references. This makes the output suitable for online
+# browsing using Word or some other Word compatible readers that support those
+# fields.
+#
+# Note: WordPad (write) and others do not support links.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_HYPERLINKS         = NO
+
+# Load stylesheet definitions from file. Syntax is similar to doxygen's config
+# file, i.e. a series of assignments. You only have to provide replacements,
+# missing definitions are set to their default value.
+#
+# See also section "Doxygen usage" for information on how to generate the
+# default style sheet that doxygen normally uses.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_STYLESHEET_FILE    =
+
+# Set optional variables used in the generation of an RTF document. Syntax is
+# similar to doxygen's config file. A template extensions file can be generated
+# using doxygen -e rtf extensionFile.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_EXTENSIONS_FILE    =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES doxygen will generate man pages for
+# classes and files.
+# The default value is: NO.
+
+GENERATE_MAN           = NO
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it. A directory man3 will be created inside the directory specified by
+# MAN_OUTPUT.
+# The default directory is: man.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_OUTPUT             = man
+
+# The MAN_EXTENSION tag determines the extension that is added to the generated
+# man pages. In case the manual section does not start with a number, the number
+# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
+# optional.
+# The default value is: .3.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_EXTENSION          = .3
+
+# The MAN_SUBDIR tag determines the name of the directory created within
+# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by
+# MAN_EXTENSION with the initial . removed.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_SUBDIR             =
+
+# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
+# will generate one additional man file for each entity documented in the real
+# man page(s). These additional files only source the real man page, but without
+# them the man command would be unable to find the correct page.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_LINKS              = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES doxygen will generate an XML file that
+# captures the structure of the code including all documentation.
+# The default value is: NO.
+
+GENERATE_XML           = NO
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: xml.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_OUTPUT             = xml
+
+# If the XML_PROGRAMLISTING tag is set to YES doxygen will dump the program
+# listings (including syntax highlighting and cross-referencing information) to
+# the XML output. Note that enabling this will significantly increase the size
+# of the XML output.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_PROGRAMLISTING     = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the DOCBOOK output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_DOCBOOK tag is set to YES doxygen will generate Docbook files
+# that can be used to generate PDF.
+# The default value is: NO.
+
+GENERATE_DOCBOOK       = NO
+
+# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
+# front of it.
+# The default directory is: docbook.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_OUTPUT         = docbook
+
+# If the DOCBOOK_PROGRAMLISTING tag is set to YES doxygen will include the
+# program listings (including syntax highlighting and cross-referencing
+# information) to the DOCBOOK output. Note that enabling this will significantly
+# increase the size of the DOCBOOK output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_PROGRAMLISTING = NO
+
+#---------------------------------------------------------------------------
+# Configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES doxygen will generate an AutoGen
+# Definitions (see http://autogen.sf.net) file that captures the structure of
+# the code including all documentation. Note that this feature is still
+# experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_AUTOGEN_DEF   = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES doxygen will generate a Perl module
+# file that captures the structure of the code including all documentation.
+#
+# Note that this feature is still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_PERLMOD       = NO
+
+# If the PERLMOD_LATEX tag is set to YES doxygen will generate the necessary
+# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
+# output from the Perl module output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_LATEX          = NO
+
+# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be nicely
+# formatted so it can be parsed by a human reader. This is useful if you want to
+# understand what is going on. On the other hand, if this tag is set to NO the
+# size of the Perl module output will be much smaller and Perl will parse it
+# just the same.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_PRETTY         = YES
+
+# The names of the make variables in the generated doxyrules.make file are
+# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
+# so different doxyrules.make files included by the same Makefile don't
+# overwrite each other's variables.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_MAKEVAR_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES doxygen will evaluate all
+# C-preprocessor directives found in the sources and include files.
+# The default value is: YES.
+
+ENABLE_PREPROCESSING   = YES
+
+# If the MACRO_EXPANSION tag is set to YES doxygen will expand all macro names
+# in the source code. If set to NO only conditional compilation will be
+# performed. Macro expansion can be done in a controlled way by setting
+# EXPAND_ONLY_PREDEF to YES.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+MACRO_EXPANSION        = NO
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
+# the macro expansion is limited to the macros specified with the PREDEFINED and
+# EXPAND_AS_DEFINED tags.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_ONLY_PREDEF     = NO
+
+# If the SEARCH_INCLUDES tag is set to YES the includes files in the
+# INCLUDE_PATH will be searched if a #include is found.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SEARCH_INCLUDES        = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by the
+# preprocessor.
+# This tag requires that the tag SEARCH_INCLUDES is set to YES.
+
+INCLUDE_PATH           =
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will be
+# used.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+INCLUDE_FILE_PATTERNS  =
+
+# The PREDEFINED tag can be used to specify one or more macro names that are
+# defined before the preprocessor is started (similar to the -D option of e.g.
+# gcc). The argument of the tag is a list of macros of the form: name or
+# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
+# is assumed. To prevent a macro definition from being undefined via #undef or
+# recursively expanded use the := operator instead of the = operator.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+PREDEFINED             =
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
+# tag can be used to specify a list of macro names that should be expanded. The
+# macro definition that is found in the sources will be used. Use the PREDEFINED
+# tag if you want to use a different macro definition that overrules the
+# definition found in the source code.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_AS_DEFINED      =
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
+# remove all references to function-like macros that are alone on a line, have
+# an all uppercase name, and do not end with a semicolon. Such function macros
+# are typically used for boiler-plate code, and will confuse the parser if not
+# removed.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SKIP_FUNCTION_MACROS   = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES tag can be used to specify one or more tag files. For each tag
+# file the location of the external documentation should be added. The format of
+# a tag file without this location is as follows:
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where loc1 and loc2 can be relative or absolute paths or URLs. See the
+# section "Linking to external documentation" for more information about the use
+# of tag files.
+# Note: Each tag file must have a unique name (where the name does NOT include
+# the path). If a tag file is not located in the directory in which doxygen is
+# run, you must also specify the path to the tagfile here.
+
+TAGFILES               =
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
+# tag file that is based on the input files it reads. See section "Linking to
+# external documentation" for more information about the usage of tag files.
+
+GENERATE_TAGFILE       =
+
+# If the ALLEXTERNALS tag is set to YES all external class will be listed in the
+# class index. If set to NO only the inherited external classes will be listed.
+# The default value is: NO.
+
+ALLEXTERNALS           = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed in
+# the modules index. If set to NO, only the current project's groups will be
+# listed.
+# The default value is: YES.
+
+EXTERNAL_GROUPS        = YES
+
+# If the EXTERNAL_PAGES tag is set to YES all external pages will be listed in
+# the related pages index. If set to NO, only the current project's pages will
+# be listed.
+# The default value is: YES.
+
+EXTERNAL_PAGES         = YES
+
+# The PERL_PATH should be the absolute path and name of the perl script
+# interpreter (i.e. the result of 'which perl').
+# The default file (with absolute path) is: /usr/bin/perl.
+
+PERL_PATH              = /usr/bin/perl
+
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+
+# If the CLASS_DIAGRAMS tag is set to YES doxygen will generate a class diagram
+# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
+# NO turns the diagrams off. Note that this option also works with HAVE_DOT
+# disabled, but it is recommended to install and use dot, since it yields more
+# powerful graphs.
+# The default value is: YES.
+
+CLASS_DIAGRAMS         = YES
+
+# You can define message sequence charts within doxygen comments using the \msc
+# command. Doxygen will then run the mscgen tool (see:
+# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
+# documentation. The MSCGEN_PATH tag allows you to specify the directory where
+# the mscgen tool resides. If left empty the tool is assumed to be found in the
+# default search path.
+
+MSCGEN_PATH            =
+
+# You can include diagrams made with dia in doxygen documentation. Doxygen will
+# then run dia to produce the diagram and insert it in the documentation. The
+# DIA_PATH tag allows you to specify the directory where the dia binary resides.
+# If left empty dia is assumed to be found in the default search path.
+
+DIA_PATH               =
+
+# If set to YES, the inheritance and collaboration graphs will hide inheritance
+# and usage relations if the target is undocumented or is not a class.
+# The default value is: YES.
+
+HIDE_UNDOC_RELATIONS   = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz (see:
+# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
+# Bell Labs. The other options in this section have no effect if this option is
+# set to NO
+# The default value is: NO.
+
+HAVE_DOT               = NO
+
+# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
+# to run in parallel. When set to 0 doxygen will base this on the number of
+# processors available in the system. You can set it explicitly to a value
+# larger than 0 to get control over the balance between CPU load and processing
+# speed.
+# Minimum value: 0, maximum value: 32, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_NUM_THREADS        = 0
+
+# When you want a differently looking font in the dot files that doxygen
+# generates you can specify the font name using DOT_FONTNAME. You need to make
+# sure dot is able to find the font, which can be done by putting it in a
+# standard location or by setting the DOTFONTPATH environment variable or by
+# setting DOT_FONTPATH to the directory containing the font.
+# The default value is: Helvetica.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTNAME           = Helvetica
+
+# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
+# dot graphs.
+# Minimum value: 4, maximum value: 24, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTSIZE           = 10
+
+# By default doxygen will tell dot to use the default font as specified with
+# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
+# the path where dot can find it using this tag.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTPATH           =
+
+# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
+# each documented class showing the direct and indirect inheritance relations.
+# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CLASS_GRAPH            = YES
+
+# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
+# graph for each documented class showing the direct and indirect implementation
+# dependencies (inheritance, containment, and class references variables) of the
+# class with other documented classes.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+COLLABORATION_GRAPH    = YES
+
+# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
+# groups, showing the direct groups dependencies.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GROUP_GRAPHS           = YES
+
+# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LOOK               = NO
+
+# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
+# class node. If there are many fields or methods and many nodes the graph may
+# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
+# number of items for each type to make the size more manageable. Set this to 0
+# for no limit. Note that the threshold may be exceeded by 50% before the limit
+# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
+# but if the number exceeds 15, the total amount of fields shown is limited to
+# 10.
+# Minimum value: 0, maximum value: 100, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LIMIT_NUM_FIELDS   = 10
+
+# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
+# collaboration graphs will show the relations between templates and their
+# instances.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+TEMPLATE_RELATIONS     = NO
+
+# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
+# YES then doxygen will generate a graph for each documented file showing the
+# direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDE_GRAPH          = YES
+
+# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
+# set to YES then doxygen will generate a graph for each documented file showing
+# the direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDED_BY_GRAPH      = YES
+
+# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable call graphs for selected
+# functions only using the \callgraph command.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALL_GRAPH             = NO
+
+# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable caller graphs for selected
+# functions only using the \callergraph command.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALLER_GRAPH           = NO
+
+# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
+# hierarchy of all classes instead of a textual one.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GRAPHICAL_HIERARCHY    = YES
+
+# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
+# dependencies a directory has on other directories in a graphical way. The
+# dependency relations are determined by the #include relations between the
+# files in the directories.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DIRECTORY_GRAPH        = YES
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot.
+# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
+# to make the SVG files visible in IE 9+ (other browsers do not have this
+# requirement).
+# Possible values are: png, jpg, gif and svg.
+# The default value is: png.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_IMAGE_FORMAT       = png
+
+# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
+# enable generation of interactive SVG images that allow zooming and panning.
+#
+# Note that this requires a modern browser other than Internet Explorer. Tested
+# and working are Firefox, Chrome, Safari, and Opera.
+# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
+# the SVG files visible. Older versions of IE do not have SVG support.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INTERACTIVE_SVG        = NO
+
+# The DOT_PATH tag can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_PATH               =
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the \dotfile
+# command).
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOTFILE_DIRS           =
+
+# The MSCFILE_DIRS tag can be used to specify one or more directories that
+# contain msc files that are included in the documentation (see the \mscfile
+# command).
+
+MSCFILE_DIRS           =
+
+# The DIAFILE_DIRS tag can be used to specify one or more directories that
+# contain dia files that are included in the documentation (see the \diafile
+# command).
+
+DIAFILE_DIRS           =
+
+# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the
+# path where java can find the plantuml.jar file. If left blank, it is assumed
+# PlantUML is not used or called during a preprocessing step. Doxygen will
+# generate a warning when it encounters a \startuml command in this case and
+# will not generate output for the diagram.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+PLANTUML_JAR_PATH      =
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
+# that will be shown in the graph. If the number of nodes in a graph becomes
+# larger than this value, doxygen will truncate the graph, which is visualized
+# by representing a node as a red box. Note that doxygen if the number of direct
+# children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
+# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+# Minimum value: 0, maximum value: 10000, default value: 50.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_GRAPH_MAX_NODES    = 50
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
+# generated by dot. A depth value of 3 means that only nodes reachable from the
+# root by following a path via at most 3 edges will be shown. Nodes that lay
+# further from the root node will be omitted. Note that setting this option to 1
+# or 2 may greatly reduce the computation time needed for large code bases. Also
+# note that the size of a graph can be further restricted by
+# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+# Minimum value: 0, maximum value: 1000, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+MAX_DOT_GRAPH_DEPTH    = 0
+
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, because dot on Windows does not seem
+# to support this out of the box.
+#
+# Warning: Depending on the platform used, enabling this option may lead to
+# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
+# read).
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_TRANSPARENT        = NO
+
+# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10) support
+# this, this feature is disabled by default.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_MULTI_TARGETS      = YES
+
+# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
+# explaining the meaning of the various boxes and arrows in the dot generated
+# graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GENERATE_LEGEND        = YES
+
+# If the DOT_CLEANUP tag is set to YES doxygen will remove the intermediate dot
+# files that are used to generate the various graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_CLEANUP            = YES
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..28f2aca
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,93 @@
+## General
+
+# Compiled Object files
+*.slo
+*.lo
+*.o
+*.cuo
+
+# Compiled Dynamic libraries
+*.so
+*.dylib
+
+# Compiled Static libraries
+*.lai
+*.la
+*.a
+
+# Compiled protocol buffers
+*.pb.h
+*.pb.cc
+*_pb2.py
+
+# Compiled python
+*.pyc
+
+# Compiled MATLAB
+*.mex*
+
+# IPython notebook checkpoints
+.ipynb_checkpoints
+
+# Editor temporaries
+*.swp
+*~
+
+# Sublime Text settings
+*.sublime-workspace
+*.sublime-project
+
+# Eclipse Project settings
+*.*project
+.settings
+
+# QtCreator files
+*.user
+
+# PyCharm files
+.idea
+
+# OSX dir files
+.DS_Store
+
+## Caffe
+
+# User's build configuration
+Makefile.config
+
+# Data and models are either
+# 1. reference, and not casually committed
+# 2. custom, and live on their own unless they're deliberated contributed
+data/*
+models/*
+*.caffemodel
+*.solverstate
+*.binaryproto
+*leveldb
+*lmdb
+
+# build, distribute, and bins (+ python proto bindings)
+build
+.build_debug/*
+.build_release/*
+distribute/*
+*.testbin
+*.bin
+python/caffe/proto/
+cmake_build
+.cmake_build
+
+# Generated documentation
+docs/_site
+docs/gathered
+_site
+doxygen
+docs/dev
+
+# LevelDB files
+*.sst
+*.ldb
+LOCK
+LOG*
+CURRENT
+MANIFEST-*
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..955aa8c
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,47 @@
+# Use a build matrix to do two builds in parallel:
+# one using CMake, and one using make.
+env:
+  matrix:
+    - WITH_CUDA=false WITH_CMAKE=false
+    - WITH_CUDA=false WITH_CMAKE=true
+    - WITH_CUDA=true WITH_CMAKE=false
+    - WITH_CUDA=true WITH_CMAKE=true
+
+language: cpp
+
+# Cache Ubuntu apt packages.
+cache: apt
+
+compiler: gcc
+
+before_install:
+  - export NUM_THREADS=4
+  - export SCRIPTS=./scripts/travis
+
+install:
+  - sudo -E $SCRIPTS/travis_install.sh
+
+before_script:
+  - export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib:/usr/local/cuda/lib64
+  - export PATH=/home/travis/miniconda/bin:$PATH
+  - if ! $WITH_CMAKE; then $SCRIPTS/travis_setup_makefile_config.sh; fi
+
+script: $SCRIPTS/travis_build_and_test.sh
+
+notifications:
+# Emails are sent to the committer's git-configured email address by default,
+# but only if they have access to the repository.  To enable Travis on your
+# public fork of Caffe, just go to travis-ci.org and flip the switch on for
+# your Caffe fork.  To configure your git email address, use:
+#     git config --global user.email me at example.com
+  email:
+    on_success: always
+    on_failure: always
+
+# IRC notifications disabled by default.
+# Uncomment next 5 lines to send notifications to chat.freenode.net#caffe
+#   irc:
+#     channels:
+#       - "chat.freenode.net#caffe"
+#     template:
+#       - "%{repository}/%{branch} (%{commit} - %{author}): %{message}"
diff --git a/CMakeLists.txt b/CMakeLists.txt
new file mode 100644
index 0000000..e202350
--- /dev/null
+++ b/CMakeLists.txt
@@ -0,0 +1,69 @@
+cmake_minimum_required(VERSION 2.8.7)
+
+# ---[ Caffe project
+project(Caffe C CXX)
+
+# ---[ Using cmake scripts and modules
+list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake/Modules)
+
+include(ExternalProject)
+
+include(cmake/Utils.cmake)
+include(cmake/Targets.cmake)
+include(cmake/Misc.cmake)
+include(cmake/Summary.cmake)
+include(cmake/ConfigGen.cmake)
+
+# ---[ Options
+caffe_option(CPU_ONLY  "Build Caffe without CUDA support" OFF) # TODO: rename to USE_CUDA
+caffe_option(USE_CUDNN "Build Caffe with cuDNN libary support" ON IF NOT CPU_ONLY)
+caffe_option(BUILD_SHARED_LIBS "Build shared libraries" ON)
+caffe_option(BUILD_python "Build Python wrapper" ON)
+set(python_version "2" CACHE STRING "Specify which python version to use")
+caffe_option(BUILD_matlab "Build Matlab wrapper" OFF IF UNIX OR APPLE)
+caffe_option(BUILD_docs   "Build documentation" ON IF UNIX OR APPLE)
+caffe_option(BUILD_python_layer "Build the Caffe python layer" ON)
+
+# ---[ Dependencies
+include(cmake/Dependencies.cmake)
+
+# ---[ Flags
+if(UNIX OR APPLE)
+  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC -Wall")
+endif()
+
+if(USE_libstdcpp)
+  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libstdc++")
+  message("-- Warning: forcing libstdc++ (controlled by USE_libstdcpp option in cmake)")
+endif()
+
+add_definitions(-DGTEST_USE_OWN_TR1_TUPLE)
+
+# ---[ Warnings
+caffe_warnings_disable(CMAKE_CXX_FLAGS -Wno-sign-compare -Wno-uninitialized)
+
+# ---[ Config generation
+configure_file(cmake/Templates/caffe_config.h.in "${PROJECT_BINARY_DIR}/caffe_config.h")
+
+# ---[ Includes
+set(Caffe_INCLUDE_DIR ${PROJECT_SOURCE_DIR}/include)
+include_directories(${Caffe_INCLUDE_DIR} ${PROJECT_BINARY_DIR})
+include_directories(BEFORE src) # This is needed for gtest.
+
+# ---[ Subdirectories
+add_subdirectory(src/gtest)
+add_subdirectory(src/caffe)
+add_subdirectory(tools)
+add_subdirectory(examples)
+add_subdirectory(python)
+add_subdirectory(matlab)
+add_subdirectory(docs)
+
+# ---[ Linter target
+add_custom_target(lint COMMAND ${CMAKE_COMMAND} -P ${PROJECT_SOURCE_DIR}/cmake/lint.cmake)
+
+# ---[ Configuration summary
+caffe_print_configuration_summary()
+
+# ---[ Export configs generation
+caffe_generate_export_configs()
diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md
new file mode 100644
index 0000000..8db66ea
--- /dev/null
+++ b/CONTRIBUTORS.md
@@ -0,0 +1,19 @@
+# Contributors
+
+Caffe is developed by a core set of BVLC members and the open-source community.
+
+We thank all of our [contributors](https://github.com/BVLC/caffe/graphs/contributors)!
+
+**For the detailed history of contributions** of a given file, try
+
+    git blame file
+
+to see line-by-line credits and
+
+    git log --follow file
+
+to see the change log even across renames and rewrites.
+
+Please refer to the [acknowledgements](http://caffe.berkeleyvision.org/#acknowledgements) on the Caffe site for further details.
+
+**Copyright** is held by the original contributor according to the versioning history; see LICENSE.
diff --git a/INSTALL.md b/INSTALL.md
new file mode 100644
index 0000000..42fcf02
--- /dev/null
+++ b/INSTALL.md
@@ -0,0 +1,7 @@
+# Installation
+
+See http://caffe.berkeleyvision.org/installation.html for the latest
+installation instructions.
+
+Check the issue tracker in case you need help:
+https://github.com/BVLC/caffe/issues
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..d69d16f
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,44 @@
+COPYRIGHT
+
+All contributions by the University of California:
+Copyright (c) 2014, 2015, The Regents of the University of California (Regents)
+All rights reserved.
+
+All other contributions:
+Copyright (c) 2014, 2015, the respective contributors
+All rights reserved.
+
+Caffe uses a shared copyright model: each contributor holds copyright over
+their contributions to Caffe. The project versioning records all such
+contribution and copyright details. If a contributor wants to further mark
+their specific copyright on a particular contribution, they should indicate
+their copyright solely in the commit message of the change when it is
+committed.
+
+LICENSE
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met: 
+
+1. Redistributions of source code must retain the above copyright notice, this
+   list of conditions and the following disclaimer. 
+2. Redistributions in binary form must reproduce the above copyright notice,
+   this list of conditions and the following disclaimer in the documentation
+   and/or other materials provided with the distribution. 
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+CONTRIBUTION AGREEMENT
+
+By contributing to the BVLC/caffe repository through pull-request, comment,
+or otherwise, the contributor releases their content to the
+license and copyright terms herein.
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..e4e66df
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,631 @@
+PROJECT := caffe
+
+CONFIG_FILE := Makefile.config
+# Explicitly check for the config file, otherwise make -k will proceed anyway.
+ifeq ($(wildcard $(CONFIG_FILE)),)
+$(error $(CONFIG_FILE) not found. See $(CONFIG_FILE).example.)
+endif
+include $(CONFIG_FILE)
+
+BUILD_DIR_LINK := $(BUILD_DIR)
+ifeq ($(RELEASE_BUILD_DIR),)
+	RELEASE_BUILD_DIR := .$(BUILD_DIR)_release
+endif
+ifeq ($(DEBUG_BUILD_DIR),)
+	DEBUG_BUILD_DIR := .$(BUILD_DIR)_debug
+endif
+
+DEBUG ?= 0
+ifeq ($(DEBUG), 1)
+	BUILD_DIR := $(DEBUG_BUILD_DIR)
+	OTHER_BUILD_DIR := $(RELEASE_BUILD_DIR)
+else
+	BUILD_DIR := $(RELEASE_BUILD_DIR)
+	OTHER_BUILD_DIR := $(DEBUG_BUILD_DIR)
+endif
+
+# All of the directories containing code.
+SRC_DIRS := $(shell find * -type d -exec bash -c "find {} -maxdepth 1 \
+	\( -name '*.cpp' -o -name '*.proto' \) | grep -q ." \; -print)
+
+# The target shared library name
+LIB_BUILD_DIR := $(BUILD_DIR)/lib
+STATIC_NAME := $(LIB_BUILD_DIR)/lib$(PROJECT).a
+DYNAMIC_NAME := $(LIB_BUILD_DIR)/lib$(PROJECT).so
+
+##############################
+# Get all source files
+##############################
+# CXX_SRCS are the source files excluding the test ones.
+CXX_SRCS := $(shell find src/$(PROJECT) ! -name "test_*.cpp" -name "*.cpp")
+# CU_SRCS are the cuda source files
+CU_SRCS := $(shell find src/$(PROJECT) ! -name "test_*.cu" -name "*.cu")
+# TEST_SRCS are the test source files
+TEST_MAIN_SRC := src/$(PROJECT)/test/test_caffe_main.cpp
+TEST_SRCS := $(shell find src/$(PROJECT) -name "test_*.cpp")
+TEST_SRCS := $(filter-out $(TEST_MAIN_SRC), $(TEST_SRCS))
+TEST_CU_SRCS := $(shell find src/$(PROJECT) -name "test_*.cu")
+GTEST_SRC := src/gtest/gtest-all.cpp
+# TOOL_SRCS are the source files for the tool binaries
+TOOL_SRCS := $(shell find tools -name "*.cpp")
+# EXAMPLE_SRCS are the source files for the example binaries
+EXAMPLE_SRCS := $(shell find examples -name "*.cpp")
+# BUILD_INCLUDE_DIR contains any generated header files we want to include.
+BUILD_INCLUDE_DIR := $(BUILD_DIR)/src
+# PROTO_SRCS are the protocol buffer definitions
+PROTO_SRC_DIR := src/$(PROJECT)/proto
+PROTO_SRCS := $(wildcard $(PROTO_SRC_DIR)/*.proto)
+# PROTO_BUILD_DIR will contain the .cc and obj files generated from
+# PROTO_SRCS; PROTO_BUILD_INCLUDE_DIR will contain the .h header files
+PROTO_BUILD_DIR := $(BUILD_DIR)/$(PROTO_SRC_DIR)
+PROTO_BUILD_INCLUDE_DIR := $(BUILD_INCLUDE_DIR)/$(PROJECT)/proto
+# NONGEN_CXX_SRCS includes all source/header files except those generated
+# automatically (e.g., by proto).
+NONGEN_CXX_SRCS := $(shell find \
+	src/$(PROJECT) \
+	include/$(PROJECT) \
+	python/$(PROJECT) \
+	matlab/+$(PROJECT)/private \
+	examples \
+	tools \
+	-name "*.cpp" -or -name "*.hpp" -or -name "*.cu" -or -name "*.cuh")
+LINT_SCRIPT := scripts/cpp_lint.py
+LINT_OUTPUT_DIR := $(BUILD_DIR)/.lint
+LINT_EXT := lint.txt
+LINT_OUTPUTS := $(addsuffix .$(LINT_EXT), $(addprefix $(LINT_OUTPUT_DIR)/, $(NONGEN_CXX_SRCS)))
+EMPTY_LINT_REPORT := $(BUILD_DIR)/.$(LINT_EXT)
+NONEMPTY_LINT_REPORT := $(BUILD_DIR)/$(LINT_EXT)
+# PY$(PROJECT)_SRC is the python wrapper for $(PROJECT)
+PY$(PROJECT)_SRC := python/$(PROJECT)/_$(PROJECT).cpp
+PY$(PROJECT)_SO := python/$(PROJECT)/_$(PROJECT).so
+PY$(PROJECT)_HXX := include/$(PROJECT)/python_layer.hpp
+# MAT$(PROJECT)_SRC is the mex entrance point of matlab package for $(PROJECT)
+MAT$(PROJECT)_SRC := matlab/+$(PROJECT)/private/$(PROJECT)_.cpp
+ifneq ($(MATLAB_DIR),)
+	MAT_SO_EXT := $(shell $(MATLAB_DIR)/bin/mexext)
+endif
+MAT$(PROJECT)_SO := matlab/+$(PROJECT)/private/$(PROJECT)_.$(MAT_SO_EXT)
+
+##############################
+# Derive generated files
+##############################
+# The generated files for protocol buffers
+PROTO_GEN_HEADER_SRCS := $(addprefix $(PROTO_BUILD_DIR)/, \
+		$(notdir ${PROTO_SRCS:.proto=.pb.h}))
+PROTO_GEN_HEADER := $(addprefix $(PROTO_BUILD_INCLUDE_DIR)/, \
+		$(notdir ${PROTO_SRCS:.proto=.pb.h}))
+PROTO_GEN_CC := $(addprefix $(BUILD_DIR)/, ${PROTO_SRCS:.proto=.pb.cc})
+PY_PROTO_BUILD_DIR := python/$(PROJECT)/proto
+PY_PROTO_INIT := python/$(PROJECT)/proto/__init__.py
+PROTO_GEN_PY := $(foreach file,${PROTO_SRCS:.proto=_pb2.py}, \
+		$(PY_PROTO_BUILD_DIR)/$(notdir $(file)))
+# The objects corresponding to the source files
+# These objects will be linked into the final shared library, so we
+# exclude the tool, example, and test objects.
+CXX_OBJS := $(addprefix $(BUILD_DIR)/, ${CXX_SRCS:.cpp=.o})
+CU_OBJS := $(addprefix $(BUILD_DIR)/cuda/, ${CU_SRCS:.cu=.o})
+PROTO_OBJS := ${PROTO_GEN_CC:.cc=.o}
+OBJS := $(PROTO_OBJS) $(CXX_OBJS) $(CU_OBJS)
+# tool, example, and test objects
+TOOL_OBJS := $(addprefix $(BUILD_DIR)/, ${TOOL_SRCS:.cpp=.o})
+TOOL_BUILD_DIR := $(BUILD_DIR)/tools
+TEST_CXX_BUILD_DIR := $(BUILD_DIR)/src/$(PROJECT)/test
+TEST_CU_BUILD_DIR := $(BUILD_DIR)/cuda/src/$(PROJECT)/test
+TEST_CXX_OBJS := $(addprefix $(BUILD_DIR)/, ${TEST_SRCS:.cpp=.o})
+TEST_CU_OBJS := $(addprefix $(BUILD_DIR)/cuda/, ${TEST_CU_SRCS:.cu=.o})
+TEST_OBJS := $(TEST_CXX_OBJS) $(TEST_CU_OBJS)
+GTEST_OBJ := $(addprefix $(BUILD_DIR)/, ${GTEST_SRC:.cpp=.o})
+EXAMPLE_OBJS := $(addprefix $(BUILD_DIR)/, ${EXAMPLE_SRCS:.cpp=.o})
+# Output files for automatic dependency generation
+DEPS := ${CXX_OBJS:.o=.d} ${CU_OBJS:.o=.d} ${TEST_CXX_OBJS:.o=.d} \
+	${TEST_CU_OBJS:.o=.d} $(BUILD_DIR)/${MAT$(PROJECT)_SO:.$(MAT_SO_EXT)=.d}
+# tool, example, and test bins
+TOOL_BINS := ${TOOL_OBJS:.o=.bin}
+EXAMPLE_BINS := ${EXAMPLE_OBJS:.o=.bin}
+# symlinks to tool bins without the ".bin" extension
+TOOL_BIN_LINKS := ${TOOL_BINS:.bin=}
+# Put the test binaries in build/test for convenience.
+TEST_BIN_DIR := $(BUILD_DIR)/test
+TEST_CU_BINS := $(addsuffix .testbin,$(addprefix $(TEST_BIN_DIR)/, \
+		$(foreach obj,$(TEST_CU_OBJS),$(basename $(notdir $(obj))))))
+TEST_CXX_BINS := $(addsuffix .testbin,$(addprefix $(TEST_BIN_DIR)/, \
+		$(foreach obj,$(TEST_CXX_OBJS),$(basename $(notdir $(obj))))))
+TEST_BINS := $(TEST_CXX_BINS) $(TEST_CU_BINS)
+# TEST_ALL_BIN is the test binary that links caffe dynamically.
+TEST_ALL_BIN := $(TEST_BIN_DIR)/test_all.testbin
+
+##############################
+# Derive compiler warning dump locations
+##############################
+WARNS_EXT := warnings.txt
+CXX_WARNS := $(addprefix $(BUILD_DIR)/, ${CXX_SRCS:.cpp=.o.$(WARNS_EXT)})
+CU_WARNS := $(addprefix $(BUILD_DIR)/cuda/, ${CU_SRCS:.cu=.o.$(WARNS_EXT)})
+TOOL_WARNS := $(addprefix $(BUILD_DIR)/, ${TOOL_SRCS:.cpp=.o.$(WARNS_EXT)})
+EXAMPLE_WARNS := $(addprefix $(BUILD_DIR)/, ${EXAMPLE_SRCS:.cpp=.o.$(WARNS_EXT)})
+TEST_WARNS := $(addprefix $(BUILD_DIR)/, ${TEST_SRCS:.cpp=.o.$(WARNS_EXT)})
+TEST_CU_WARNS := $(addprefix $(BUILD_DIR)/cuda/, ${TEST_CU_SRCS:.cu=.o.$(WARNS_EXT)})
+ALL_CXX_WARNS := $(CXX_WARNS) $(TOOL_WARNS) $(EXAMPLE_WARNS) $(TEST_WARNS)
+ALL_CU_WARNS := $(CU_WARNS) $(TEST_CU_WARNS)
+ALL_WARNS := $(ALL_CXX_WARNS) $(ALL_CU_WARNS)
+
+EMPTY_WARN_REPORT := $(BUILD_DIR)/.$(WARNS_EXT)
+NONEMPTY_WARN_REPORT := $(BUILD_DIR)/$(WARNS_EXT)
+
+##############################
+# Derive include and lib directories
+##############################
+CUDA_INCLUDE_DIR := $(CUDA_DIR)/include
+
+CUDA_LIB_DIR :=
+# add <cuda>/lib64 only if it exists
+ifneq ("$(wildcard $(CUDA_DIR)/lib64)","")
+	CUDA_LIB_DIR += $(CUDA_DIR)/lib64
+endif
+CUDA_LIB_DIR += $(CUDA_DIR)/lib
+
+INCLUDE_DIRS += $(BUILD_INCLUDE_DIR) ./src ./include
+ifneq ($(CPU_ONLY), 1)
+	INCLUDE_DIRS += $(CUDA_INCLUDE_DIR)
+	LIBRARY_DIRS += $(CUDA_LIB_DIR)
+	LIBRARIES := cudart cublas curand
+endif
+LIBRARIES += glog gflags protobuf leveldb snappy \
+	lmdb boost_system hdf5_hl hdf5 m \
+	opencv_core opencv_highgui opencv_imgproc
+PYTHON_LIBRARIES := boost_python python2.7
+WARNINGS := -Wall -Wno-sign-compare
+
+##############################
+# Set build directories
+##############################
+
+DISTRIBUTE_DIR ?= distribute
+DISTRIBUTE_SUBDIRS := $(DISTRIBUTE_DIR)/bin $(DISTRIBUTE_DIR)/lib
+DIST_ALIASES := dist
+ifneq ($(strip $(DISTRIBUTE_DIR)),distribute)
+		DIST_ALIASES += distribute
+endif
+
+ALL_BUILD_DIRS := $(sort $(BUILD_DIR) $(addprefix $(BUILD_DIR)/, $(SRC_DIRS)) \
+	$(addprefix $(BUILD_DIR)/cuda/, $(SRC_DIRS)) \
+	$(LIB_BUILD_DIR) $(TEST_BIN_DIR) $(PY_PROTO_BUILD_DIR) $(LINT_OUTPUT_DIR) \
+	$(DISTRIBUTE_SUBDIRS) $(PROTO_BUILD_INCLUDE_DIR))
+
+##############################
+# Set directory for Doxygen-generated documentation
+##############################
+DOXYGEN_CONFIG_FILE ?= ./.Doxyfile
+# should be the same as OUTPUT_DIRECTORY in the .Doxyfile
+DOXYGEN_OUTPUT_DIR ?= ./doxygen
+DOXYGEN_COMMAND ?= doxygen
+# All the files that might have Doxygen documentation.
+DOXYGEN_SOURCES := $(shell find \
+	src/$(PROJECT) \
+	include/$(PROJECT) \
+	python/ \
+	matlab/ \
+	examples \
+	tools \
+	-name "*.cpp" -or -name "*.hpp" -or -name "*.cu" -or -name "*.cuh" -or \
+        -name "*.py" -or -name "*.m")
+DOXYGEN_SOURCES += $(DOXYGEN_CONFIG_FILE)
+
+
+##############################
+# Configure build
+##############################
+
+# Determine platform
+UNAME := $(shell uname -s)
+ifeq ($(UNAME), Linux)
+	LINUX := 1
+else ifeq ($(UNAME), Darwin)
+	OSX := 1
+endif
+
+# Linux
+ifeq ($(LINUX), 1)
+	CXX ?= /usr/bin/g++
+	GCCVERSION := $(shell $(CXX) -dumpversion | cut -f1,2 -d.)
+	# older versions of gcc are too dumb to build boost with -Wuninitalized
+	ifeq ($(shell echo $(GCCVERSION) \< 4.6 | bc), 1)
+		WARNINGS += -Wno-uninitialized
+	endif
+	# boost::thread is reasonably called boost_thread (compare OS X)
+	# We will also explicitly add stdc++ to the link target.
+	LIBRARIES += boost_thread stdc++
+endif
+
+# OS X:
+# clang++ instead of g++
+# libstdc++ for NVCC compatibility on OS X >= 10.9 with CUDA < 7.0
+ifeq ($(OSX), 1)
+	CXX := /usr/bin/clang++
+	ifneq ($(CPU_ONLY), 1)
+		CUDA_VERSION := $(shell $(CUDA_DIR)/bin/nvcc -V | grep -o 'release \d' | grep -o '\d')
+		ifeq ($(shell echo $(CUDA_VERSION) \< 7.0 | bc), 1)
+			CXXFLAGS += -stdlib=libstdc++
+			LINKFLAGS += -stdlib=libstdc++
+		endif
+		# clang throws this warning for cuda headers
+		WARNINGS += -Wno-unneeded-internal-declaration
+	endif
+	# gtest needs to use its own tuple to not conflict with clang
+	COMMON_FLAGS += -DGTEST_USE_OWN_TR1_TUPLE=1
+	# boost::thread is called boost_thread-mt to mark multithreading on OS X
+	LIBRARIES += boost_thread-mt
+	# we need to explicitly ask for the rpath to be obeyed
+	DYNAMIC_FLAGS := -install_name @rpath/libcaffe.so
+	ORIGIN := @loader_path
+else
+	ORIGIN := \$$ORIGIN
+endif
+
+# Custom compiler
+ifdef CUSTOM_CXX
+	CXX := $(CUSTOM_CXX)
+endif
+
+# Static linking
+ifneq (,$(findstring clang++,$(CXX)))
+	STATIC_LINK_COMMAND := -Wl,-force_load $(STATIC_NAME)
+else ifneq (,$(findstring g++,$(CXX)))
+	STATIC_LINK_COMMAND := -Wl,--whole-archive $(STATIC_NAME) -Wl,--no-whole-archive
+else
+  # The following line must not be indented with a tab, since we are not inside a target
+  $(error Cannot static link with the $(CXX) compiler)
+endif
+
+# Debugging
+ifeq ($(DEBUG), 1)
+	COMMON_FLAGS += -DDEBUG -g -O0
+	NVCCFLAGS += -G
+else
+	COMMON_FLAGS += -DNDEBUG -O2
+endif
+
+# cuDNN acceleration configuration.
+ifeq ($(USE_CUDNN), 1)
+	LIBRARIES += cudnn
+	COMMON_FLAGS += -DUSE_CUDNN
+endif
+
+# CPU-only configuration
+ifeq ($(CPU_ONLY), 1)
+	OBJS := $(PROTO_OBJS) $(CXX_OBJS)
+	TEST_OBJS := $(TEST_CXX_OBJS)
+	TEST_BINS := $(TEST_CXX_BINS)
+	ALL_WARNS := $(ALL_CXX_WARNS)
+	TEST_FILTER := --gtest_filter="-*GPU*"
+	COMMON_FLAGS += -DCPU_ONLY
+endif
+
+# Python layer support
+ifeq ($(WITH_PYTHON_LAYER), 1)
+	COMMON_FLAGS += -DWITH_PYTHON_LAYER
+	LIBRARIES += $(PYTHON_LIBRARIES)
+endif
+
+# BLAS configuration (default = ATLAS)
+BLAS ?= atlas
+ifeq ($(BLAS), mkl)
+	# MKL
+	LIBRARIES += mkl_rt
+	COMMON_FLAGS += -DUSE_MKL
+	MKL_DIR ?= /opt/intel/mkl
+	BLAS_INCLUDE ?= $(MKL_DIR)/include
+	BLAS_LIB ?= $(MKL_DIR)/lib $(MKL_DIR)/lib/intel64
+else ifeq ($(BLAS), open)
+	# OpenBLAS
+	LIBRARIES += openblas
+else
+	# ATLAS
+	ifeq ($(LINUX), 1)
+		ifeq ($(BLAS), atlas)
+			# Linux simply has cblas and atlas
+			LIBRARIES += cblas atlas
+		endif
+	else ifeq ($(OSX), 1)
+		# OS X packages atlas as the vecLib framework
+		LIBRARIES += cblas
+		# 10.10 has accelerate while 10.9 has veclib
+		XCODE_CLT_VER := $(shell pkgutil --pkg-info=com.apple.pkg.CLTools_Executables | grep -o 'version: 6')
+		ifneq (,$(findstring version: 6,$(XCODE_CLT_VER)))
+			BLAS_INCLUDE ?= /System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Headers/
+			LDFLAGS += -framework Accelerate
+		else
+			BLAS_INCLUDE ?= /System/Library/Frameworks/vecLib.framework/Versions/Current/Headers/
+			LDFLAGS += -framework vecLib
+		endif
+	endif
+endif
+INCLUDE_DIRS += $(BLAS_INCLUDE)
+LIBRARY_DIRS += $(BLAS_LIB)
+
+LIBRARY_DIRS += $(LIB_BUILD_DIR)
+
+# Automatic dependency generation (nvcc is handled separately)
+CXXFLAGS += -MMD -MP
+
+# Complete build flags.
+COMMON_FLAGS += $(foreach includedir,$(INCLUDE_DIRS),-I$(includedir))
+CXXFLAGS += -pthread -fPIC $(COMMON_FLAGS) $(WARNINGS)
+NVCCFLAGS += -ccbin=$(CXX) -Xcompiler -fPIC $(COMMON_FLAGS)
+# mex may invoke an older gcc that is too liberal with -Wuninitalized
+MATLAB_CXXFLAGS := $(CXXFLAGS) -Wno-uninitialized
+LINKFLAGS += -pthread -fPIC $(COMMON_FLAGS) $(WARNINGS)
+
+USE_PKG_CONFIG ?= 0
+ifeq ($(USE_PKG_CONFIG), 1)
+	PKG_CONFIG := $(shell pkg-config opencv --libs)
+else
+	PKG_CONFIG :=
+endif
+LDFLAGS += $(foreach librarydir,$(LIBRARY_DIRS),-L$(librarydir)) $(PKG_CONFIG) \
+		$(foreach library,$(LIBRARIES),-l$(library))
+PYTHON_LDFLAGS := $(LDFLAGS) $(foreach library,$(PYTHON_LIBRARIES),-l$(library))
+
+# 'superclean' target recursively* deletes all files ending with an extension
+# in $(SUPERCLEAN_EXTS) below.  This may be useful if you've built older
+# versions of Caffe that do not place all generated files in a location known
+# to the 'clean' target.
+#
+# 'supercleanlist' will list the files to be deleted by make superclean.
+#
+# * Recursive with the exception that symbolic links are never followed, per the
+# default behavior of 'find'.
+SUPERCLEAN_EXTS := .so .a .o .bin .testbin .pb.cc .pb.h _pb2.py .cuo
+
+# Set the sub-targets of the 'everything' target.
+EVERYTHING_TARGETS := all py$(PROJECT) test warn lint
+# Only build matcaffe as part of "everything" if MATLAB_DIR is specified.
+ifneq ($(MATLAB_DIR),)
+	EVERYTHING_TARGETS += mat$(PROJECT)
+endif
+
+##############################
+# Define build targets
+##############################
+.PHONY: all test clean docs linecount lint lintclean tools examples $(DIST_ALIASES) \
+	py mat py$(PROJECT) mat$(PROJECT) proto runtest \
+	superclean supercleanlist supercleanfiles warn everything
+
+all: $(STATIC_NAME) $(DYNAMIC_NAME) tools examples
+
+everything: $(EVERYTHING_TARGETS)
+
+linecount:
+	cloc --read-lang-def=$(PROJECT).cloc \
+		src/$(PROJECT) include/$(PROJECT) tools examples \
+		python matlab
+
+lint: $(EMPTY_LINT_REPORT)
+
+lintclean:
+	@ $(RM) -r $(LINT_OUTPUT_DIR) $(EMPTY_LINT_REPORT) $(NONEMPTY_LINT_REPORT)
+
+docs: $(DOXYGEN_OUTPUT_DIR)
+	@ cd ./docs ; ln -sfn ../$(DOXYGEN_OUTPUT_DIR)/html doxygen
+
+$(DOXYGEN_OUTPUT_DIR): $(DOXYGEN_CONFIG_FILE) $(DOXYGEN_SOURCES)
+	$(DOXYGEN_COMMAND) $(DOXYGEN_CONFIG_FILE)
+
+$(EMPTY_LINT_REPORT): $(LINT_OUTPUTS) | $(BUILD_DIR)
+	@ cat $(LINT_OUTPUTS) > $@
+	@ if [ -s "$@" ]; then \
+		cat $@; \
+		mv $@ $(NONEMPTY_LINT_REPORT); \
+		echo "Found one or more lint errors."; \
+		exit 1; \
+	  fi; \
+	  $(RM) $(NONEMPTY_LINT_REPORT); \
+	  echo "No lint errors!";
+
+$(LINT_OUTPUTS): $(LINT_OUTPUT_DIR)/%.lint.txt : % $(LINT_SCRIPT) | $(LINT_OUTPUT_DIR)
+	@ mkdir -p $(dir $@)
+	@ python $(LINT_SCRIPT) $< 2>&1 \
+		| grep -v "^Done processing " \
+		| grep -v "^Total errors found: 0" \
+		> $@ \
+		|| true
+
+test: $(TEST_ALL_BIN) $(TEST_ALL_DYNLINK_BIN) $(TEST_BINS)
+
+tools: $(TOOL_BINS) $(TOOL_BIN_LINKS)
+
+examples: $(EXAMPLE_BINS)
+
+py$(PROJECT): py
+
+py: $(PY$(PROJECT)_SO) $(PROTO_GEN_PY)
+
+$(PY$(PROJECT)_SO): $(PY$(PROJECT)_SRC) $(PY$(PROJECT)_HXX) | $(DYNAMIC_NAME)
+	@ echo CXX/LD -o $@ $<
+	$(Q)$(CXX) -shared -o $@ $(PY$(PROJECT)_SRC) \
+		-o $@ $(LINKFLAGS) -l$(PROJECT) $(PYTHON_LDFLAGS) \
+		-Wl,-rpath,$(ORIGIN)/../../build/lib
+
+mat$(PROJECT): mat
+
+mat: $(MAT$(PROJECT)_SO)
+
+$(MAT$(PROJECT)_SO): $(MAT$(PROJECT)_SRC) $(STATIC_NAME)
+	@ if [ -z "$(MATLAB_DIR)" ]; then \
+		echo "MATLAB_DIR must be specified in $(CONFIG_FILE)" \
+			"to build mat$(PROJECT)."; \
+		exit 1; \
+	fi
+	@ echo MEX $<
+	$(Q)$(MATLAB_DIR)/bin/mex $(MAT$(PROJECT)_SRC) \
+			CXX="$(CXX)" \
+			CXXFLAGS="\$$CXXFLAGS $(MATLAB_CXXFLAGS)" \
+			CXXLIBS="\$$CXXLIBS $(STATIC_LINK_COMMAND) $(LDFLAGS)" -output $@
+	@ if [ -f "$(PROJECT)_.d" ]; then \
+		mv -f $(PROJECT)_.d $(BUILD_DIR)/${MAT$(PROJECT)_SO:.$(MAT_SO_EXT)=.d}; \
+	fi
+
+runtest: $(TEST_ALL_BIN)
+	$(TOOL_BUILD_DIR)/caffe
+	$(TEST_ALL_BIN) $(TEST_GPUID) --gtest_shuffle $(TEST_FILTER)
+
+pytest: py
+	cd python; python -m unittest discover -s caffe/test
+	
+mattest: mat
+	cd matlab; $(MATLAB_DIR)/bin/matlab -nodisplay -r 'caffe.run_tests(), exit()'
+
+warn: $(EMPTY_WARN_REPORT)
+
+$(EMPTY_WARN_REPORT): $(ALL_WARNS) | $(BUILD_DIR)
+	@ cat $(ALL_WARNS) > $@
+	@ if [ -s "$@" ]; then \
+		cat $@; \
+		mv $@ $(NONEMPTY_WARN_REPORT); \
+		echo "Compiler produced one or more warnings."; \
+		exit 1; \
+	  fi; \
+	  $(RM) $(NONEMPTY_WARN_REPORT); \
+	  echo "No compiler warnings!";
+
+$(ALL_WARNS): %.o.$(WARNS_EXT) : %.o
+
+$(BUILD_DIR_LINK): $(BUILD_DIR)/.linked
+
+# Create a target ".linked" in this BUILD_DIR to tell Make that the "build" link
+# is currently correct, then delete the one in the OTHER_BUILD_DIR in case it
+# exists and $(DEBUG) is toggled later.
+$(BUILD_DIR)/.linked:
+	@ mkdir -p $(BUILD_DIR)
+	@ $(RM) $(OTHER_BUILD_DIR)/.linked
+	@ $(RM) -r $(BUILD_DIR_LINK)
+	@ ln -s $(BUILD_DIR) $(BUILD_DIR_LINK)
+	@ touch $@
+
+$(ALL_BUILD_DIRS): | $(BUILD_DIR_LINK)
+	@ mkdir -p $@
+
+$(DYNAMIC_NAME): $(OBJS) | $(LIB_BUILD_DIR)
+	@ echo LD -o $@
+	$(Q)$(CXX) -shared -o $@ $(OBJS) $(LINKFLAGS) $(LDFLAGS) $(DYNAMIC_FLAGS)
+
+$(STATIC_NAME): $(OBJS) | $(LIB_BUILD_DIR)
+	@ echo AR -o $@
+	$(Q)ar rcs $@ $(OBJS)
+
+$(BUILD_DIR)/%.o: %.cpp | $(ALL_BUILD_DIRS)
+	@ echo CXX $<
+	$(Q)$(CXX) $< $(CXXFLAGS) -c -o $@ 2> $@.$(WARNS_EXT) \
+		|| (cat $@.$(WARNS_EXT); exit 1)
+	@ cat $@.$(WARNS_EXT)
+
+$(PROTO_BUILD_DIR)/%.pb.o: $(PROTO_BUILD_DIR)/%.pb.cc $(PROTO_GEN_HEADER) \
+		| $(PROTO_BUILD_DIR)
+	@ echo CXX $<
+	$(Q)$(CXX) $< $(CXXFLAGS) -c -o $@ 2> $@.$(WARNS_EXT) \
+		|| (cat $@.$(WARNS_EXT); exit 1)
+	@ cat $@.$(WARNS_EXT)
+
+$(BUILD_DIR)/cuda/%.o: %.cu | $(ALL_BUILD_DIRS)
+	@ echo NVCC $<
+	$(Q)$(CUDA_DIR)/bin/nvcc $(NVCCFLAGS) $(CUDA_ARCH) -M $< -o ${@:.o=.d} \
+		-odir $(@D)
+	$(Q)$(CUDA_DIR)/bin/nvcc $(NVCCFLAGS) $(CUDA_ARCH) -c $< -o $@ 2> $@.$(WARNS_EXT) \
+		|| (cat $@.$(WARNS_EXT); exit 1)
+	@ cat $@.$(WARNS_EXT)
+
+$(TEST_ALL_BIN): $(TEST_MAIN_SRC) $(TEST_OBJS) $(GTEST_OBJ) \
+		| $(DYNAMIC_NAME) $(TEST_BIN_DIR)
+	@ echo CXX/LD -o $@ $<
+	$(Q)$(CXX) $(TEST_MAIN_SRC) $(TEST_OBJS) $(GTEST_OBJ) \
+		-o $@ $(LINKFLAGS) $(LDFLAGS) -l$(PROJECT) -Wl,-rpath,$(ORIGIN)/../lib
+
+$(TEST_CU_BINS): $(TEST_BIN_DIR)/%.testbin: $(TEST_CU_BUILD_DIR)/%.o \
+	$(GTEST_OBJ) | $(DYNAMIC_NAME) $(TEST_BIN_DIR)
+	@ echo LD $<
+	$(Q)$(CXX) $(TEST_MAIN_SRC) $< $(GTEST_OBJ) \
+		-o $@ $(LINKFLAGS) $(LDFLAGS) -l$(PROJECT) -Wl,-rpath,$(ORIGIN)/../lib
+
+$(TEST_CXX_BINS): $(TEST_BIN_DIR)/%.testbin: $(TEST_CXX_BUILD_DIR)/%.o \
+	$(GTEST_OBJ) | $(DYNAMIC_NAME) $(TEST_BIN_DIR)
+	@ echo LD $<
+	$(Q)$(CXX) $(TEST_MAIN_SRC) $< $(GTEST_OBJ) \
+		-o $@ $(LINKFLAGS) $(LDFLAGS) -l$(PROJECT) -Wl,-rpath,$(ORIGIN)/../lib
+
+# Target for extension-less symlinks to tool binaries with extension '*.bin'.
+$(TOOL_BUILD_DIR)/%: $(TOOL_BUILD_DIR)/%.bin | $(TOOL_BUILD_DIR)
+	@ $(RM) $@
+	@ ln -s $(abspath $<) $@
+
+$(TOOL_BINS): %.bin : %.o | $(DYNAMIC_NAME)
+	@ echo CXX/LD -o $@
+	$(Q)$(CXX) $< -o $@ $(LINKFLAGS) -l$(PROJECT) $(LDFLAGS) \
+		-Wl,-rpath,$(ORIGIN)/../lib
+
+$(EXAMPLE_BINS): %.bin : %.o | $(DYNAMIC_NAME)
+	@ echo CXX/LD -o $@
+	$(Q)$(CXX) $< -o $@ $(LINKFLAGS) -l$(PROJECT) $(LDFLAGS) \
+		-Wl,-rpath,$(ORIGIN)/../../lib
+
+proto: $(PROTO_GEN_CC) $(PROTO_GEN_HEADER)
+
+$(PROTO_BUILD_DIR)/%.pb.cc $(PROTO_BUILD_DIR)/%.pb.h : \
+		$(PROTO_SRC_DIR)/%.proto | $(PROTO_BUILD_DIR)
+	@ echo PROTOC $<
+	$(Q)protoc --proto_path=$(PROTO_SRC_DIR) --cpp_out=$(PROTO_BUILD_DIR) $<
+
+$(PY_PROTO_BUILD_DIR)/%_pb2.py : $(PROTO_SRC_DIR)/%.proto \
+		$(PY_PROTO_INIT) | $(PY_PROTO_BUILD_DIR)
+	@ echo PROTOC \(python\) $<
+	$(Q)protoc --proto_path=$(PROTO_SRC_DIR) --python_out=$(PY_PROTO_BUILD_DIR) $<
+
+$(PY_PROTO_INIT): | $(PY_PROTO_BUILD_DIR)
+	touch $(PY_PROTO_INIT)
+
+clean:
+	@- $(RM) -rf $(ALL_BUILD_DIRS)
+	@- $(RM) -rf $(OTHER_BUILD_DIR)
+	@- $(RM) -rf $(BUILD_DIR_LINK)
+	@- $(RM) -rf $(DISTRIBUTE_DIR)
+	@- $(RM) $(PY$(PROJECT)_SO)
+	@- $(RM) $(MAT$(PROJECT)_SO)
+
+supercleanfiles:
+	$(eval SUPERCLEAN_FILES := $(strip \
+			$(foreach ext,$(SUPERCLEAN_EXTS), $(shell find . -name '*$(ext)' \
+			-not -path './data/*'))))
+
+supercleanlist: supercleanfiles
+	@ \
+	if [ -z "$(SUPERCLEAN_FILES)" ]; then \
+		echo "No generated files found."; \
+	else \
+		echo $(SUPERCLEAN_FILES) | tr ' ' '\n'; \
+	fi
+
+superclean: clean supercleanfiles
+	@ \
+	if [ -z "$(SUPERCLEAN_FILES)" ]; then \
+		echo "No generated files found."; \
+	else \
+		echo "Deleting the following generated files:"; \
+		echo $(SUPERCLEAN_FILES) | tr ' ' '\n'; \
+		$(RM) $(SUPERCLEAN_FILES); \
+	fi
+
+$(DIST_ALIASES): $(DISTRIBUTE_DIR)
+
+$(DISTRIBUTE_DIR): all py | $(DISTRIBUTE_SUBDIRS)
+	# add include
+	cp -r include $(DISTRIBUTE_DIR)/
+	mkdir -p $(DISTRIBUTE_DIR)/include/caffe/proto
+	cp $(PROTO_GEN_HEADER_SRCS) $(DISTRIBUTE_DIR)/include/caffe/proto
+	# add tool and example binaries
+	cp $(TOOL_BINS) $(DISTRIBUTE_DIR)/bin
+	cp $(EXAMPLE_BINS) $(DISTRIBUTE_DIR)/bin
+	# add libraries
+	cp $(STATIC_NAME) $(DISTRIBUTE_DIR)/lib
+	cp $(DYNAMIC_NAME) $(DISTRIBUTE_DIR)/lib
+	# add python - it's not the standard way, indeed...
+	cp -r python $(DISTRIBUTE_DIR)/python
+
+-include $(DEPS)
diff --git a/Makefile.config.example b/Makefile.config.example
new file mode 100644
index 0000000..a873502
--- /dev/null
+++ b/Makefile.config.example
@@ -0,0 +1,93 @@
+## Refer to http://caffe.berkeleyvision.org/installation.html
+# Contributions simplifying and improving our build system are welcome!
+
+# cuDNN acceleration switch (uncomment to build with cuDNN).
+# USE_CUDNN := 1
+
+# CPU-only switch (uncomment to build without GPU support).
+# CPU_ONLY := 1
+
+# To customize your choice of compiler, uncomment and set the following.
+# N.B. the default for Linux is g++ and the default for OSX is clang++
+# CUSTOM_CXX := g++
+
+# CUDA directory contains bin/ and lib/ directories that we need.
+CUDA_DIR := /usr/local/cuda
+# On Ubuntu 14.04, if cuda tools are installed via
+# "sudo apt-get install nvidia-cuda-toolkit" then use this instead:
+# CUDA_DIR := /usr
+
+# CUDA architecture setting: going with all of them.
+# For CUDA < 6.0, comment the *_50 lines for compatibility.
+CUDA_ARCH := -gencode arch=compute_20,code=sm_20 \
+		-gencode arch=compute_20,code=sm_21 \
+		-gencode arch=compute_30,code=sm_30 \
+		-gencode arch=compute_35,code=sm_35 \
+		-gencode arch=compute_50,code=sm_50 \
+		-gencode arch=compute_50,code=compute_50
+
+# BLAS choice:
+# atlas for ATLAS (default)
+# mkl for MKL
+# open for OpenBlas
+BLAS := atlas
+# Custom (MKL/ATLAS/OpenBLAS) include and lib directories.
+# Leave commented to accept the defaults for your choice of BLAS
+# (which should work)!
+# BLAS_INCLUDE := /path/to/your/blas
+# BLAS_LIB := /path/to/your/blas
+
+# Homebrew puts openblas in a directory that is not on the standard search path
+# BLAS_INCLUDE := $(shell brew --prefix openblas)/include
+# BLAS_LIB := $(shell brew --prefix openblas)/lib
+
+# This is required only if you will compile the matlab interface.
+# MATLAB directory should contain the mex binary in /bin.
+# MATLAB_DIR := /usr/local
+# MATLAB_DIR := /Applications/MATLAB_R2012b.app
+
+# NOTE: this is required only if you will compile the python interface.
+# We need to be able to find Python.h and numpy/arrayobject.h.
+PYTHON_INCLUDE := /usr/include/python2.7 \
+		/usr/lib/python2.7/dist-packages/numpy/core/include
+# Anaconda Python distribution is quite popular. Include path:
+# Verify anaconda location, sometimes it's in root.
+# ANACONDA_HOME := $(HOME)/anaconda
+# PYTHON_INCLUDE := $(ANACONDA_HOME)/include \
+		# $(ANACONDA_HOME)/include/python2.7 \
+		# $(ANACONDA_HOME)/lib/python2.7/site-packages/numpy/core/include \
+
+# We need to be able to find libpythonX.X.so or .dylib.
+PYTHON_LIB := /usr/lib
+# PYTHON_LIB := $(ANACONDA_HOME)/lib
+
+# Homebrew installs numpy in a non standard path (keg only)
+# PYTHON_INCLUDE += $(dir $(shell python -c 'import numpy.core; print(numpy.core.__file__)'))/include
+# PYTHON_LIB += $(shell brew --prefix numpy)/lib
+
+# Uncomment to support layers written in Python (will link against Python libs)
+# WITH_PYTHON_LAYER := 1
+
+# Whatever else you find you need goes here.
+INCLUDE_DIRS := $(PYTHON_INCLUDE) /usr/local/include
+LIBRARY_DIRS := $(PYTHON_LIB) /usr/local/lib /usr/lib
+
+# If Homebrew is installed at a non standard location (for example your home directory) and you use it for general dependencies
+# INCLUDE_DIRS += $(shell brew --prefix)/include
+# LIBRARY_DIRS += $(shell brew --prefix)/lib
+
+# Uncomment to use `pkg-config` to specify OpenCV library paths.
+# (Usually not necessary -- OpenCV libraries are normally installed in one of the above $LIBRARY_DIRS.)
+# USE_PKG_CONFIG := 1
+
+BUILD_DIR := build
+DISTRIBUTE_DIR := distribute
+
+# Uncomment for debugging. Does not work on OSX due to https://github.com/BVLC/caffe/issues/171
+# DEBUG := 1
+
+# The ID of the GPU that 'make runtest' will use to run unit tests.
+TEST_GPUID := 0
+
+# enable pretty build (comment to see full commands)
+Q ?= @
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..ebec286
--- /dev/null
+++ b/README.md
@@ -0,0 +1,34 @@
+# Caffe
+
+Caffe is a deep learning framework made with expression, speed, and modularity in mind.
+It is developed by the Berkeley Vision and Learning Center ([BVLC](http://bvlc.eecs.berkeley.edu)) and community contributors.
+
+Check out the [project site](http://caffe.berkeleyvision.org) for all the details like
+
+- [DIY Deep Learning for Vision with Caffe](https://docs.google.com/presentation/d/1UeKXVgRvvxg9OUdh_UiC5G71UMscNPlvArsWER41PsU/edit#slide=id.p)
+- [Tutorial Documentation](http://caffe.berkeleyvision.org/tutorial/)
+- [BVLC reference models](http://caffe.berkeleyvision.org/model_zoo.html) and the [community model zoo](https://github.com/BVLC/caffe/wiki/Model-Zoo)
+- [Installation instructions](http://caffe.berkeleyvision.org/installation.html)
+
+and step-by-step examples.
+
+[![Join the chat at https://gitter.im/BVLC/caffe](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/BVLC/caffe?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
+
+Please join the [caffe-users group](https://groups.google.com/forum/#!forum/caffe-users) or [gitter chat](https://gitter.im/BVLC/caffe) to ask questions and talk about methods and models.
+Framework development discussions and thorough bug reports are collected on [Issues](https://github.com/BVLC/caffe/issues).
+
+Happy brewing!
+
+## License and Citation
+
+Caffe is released under the [BSD 2-Clause license](https://github.com/BVLC/caffe/blob/master/LICENSE).
+The BVLC reference models are released for unrestricted use.
+
+Please cite Caffe in your publications if it helps your research:
+
+    @article{jia2014caffe,
+      Author = {Jia, Yangqing and Shelhamer, Evan and Donahue, Jeff and Karayev, Sergey and Long, Jonathan and Girshick, Ross and Guadarrama, Sergio and Darrell, Trevor},
+      Journal = {arXiv preprint arXiv:1408.5093},
+      Title = {Caffe: Convolutional Architecture for Fast Feature Embedding},
+      Year = {2014}
+    }
diff --git a/caffe.cloc b/caffe.cloc
new file mode 100644
index 0000000..a36ab61
--- /dev/null
+++ b/caffe.cloc
@@ -0,0 +1,53 @@
+Bourne Shell
+    filter remove_matches ^\s*#
+    filter remove_inline #.*$
+    extension sh
+    script_exe sh
+C
+    filter remove_matches ^\s*//
+    filter call_regexp_common C
+    filter remove_inline //.*$
+    extension c
+    extension ec
+    extension pgc
+C++
+    filter remove_matches ^\s*//
+    filter remove_inline //.*$
+    filter call_regexp_common C
+    extension C
+    extension cc
+    extension cpp
+    extension cxx
+    extension pcc
+C/C++ Header
+    filter remove_matches ^\s*//
+    filter call_regexp_common C
+    filter remove_inline //.*$
+    extension H
+    extension h
+    extension hh
+    extension hpp
+CUDA
+    filter remove_matches ^\s*//
+    filter remove_inline //.*$
+    filter call_regexp_common C
+    extension cu
+Python
+    filter remove_matches ^\s*#
+    filter docstring_to_C
+    filter call_regexp_common C
+    filter remove_inline #.*$
+    extension py
+make
+    filter remove_matches ^\s*#
+    filter remove_inline #.*$
+    extension Gnumakefile
+    extension Makefile
+    extension am
+    extension gnumakefile
+    extension makefile
+    filename Gnumakefile
+    filename Makefile
+    filename gnumakefile
+    filename makefile
+    script_exe make
diff --git a/cmake/ConfigGen.cmake b/cmake/ConfigGen.cmake
new file mode 100644
index 0000000..566d6ca
--- /dev/null
+++ b/cmake/ConfigGen.cmake
@@ -0,0 +1,104 @@
+
+################################################################################################
+# Helper function to fetch caffe includes which will be passed to dependent projects
+# Usage:
+#   caffe_get_current_includes(<includes_list_variable>)
+function(caffe_get_current_includes includes_variable)
+  get_property(current_includes DIRECTORY PROPERTY INCLUDE_DIRECTORIES)
+  caffe_convert_absolute_paths(current_includes)
+
+  # remove at most one ${PROJECT_BINARY_DIR} include added for caffe_config.h
+  list(FIND current_includes ${PROJECT_BINARY_DIR} __index)
+  list(REMOVE_AT current_includes ${__index})
+
+  # removing numpy includes (since not required for client libs)
+  set(__toremove "")
+  foreach(__i ${current_includes})
+    if(${__i} MATCHES "python")
+      list(APPEND __toremove ${__i})
+    endif()
+  endforeach()
+  if(__toremove)
+    list(REMOVE_ITEM current_includes ${__toremove})
+  endif()
+
+  caffe_list_unique(current_includes)
+  set(${includes_variable} ${current_includes} PARENT_SCOPE)
+endfunction()
+
+################################################################################################
+# Helper function to get all list items that begin with given prefix
+# Usage:
+#   caffe_get_items_with_prefix(<prefix> <list_variable> <output_variable>)
+function(caffe_get_items_with_prefix prefix list_variable output_variable)
+  set(__result "")
+  foreach(__e ${${list_variable}})
+    if(__e MATCHES "^${prefix}.*")
+      list(APPEND __result ${__e})
+    endif()
+  endforeach()
+  set(${output_variable} ${__result} PARENT_SCOPE)
+endfunction()
+
+################################################################################################
+# Function for generation Caffe build- and install- tree export config files
+# Usage:
+#  caffe_generate_export_configs()
+function(caffe_generate_export_configs)
+  set(install_cmake_suffix "share/Caffe")
+
+  # ---[ Configure build-tree CaffeConfig.cmake file ]---
+  caffe_get_current_includes(Caffe_INCLUDE_DIRS)
+
+  set(Caffe_DEFINITIONS "")
+  if(NOT HAVE_CUDA)
+    set(HAVE_CUDA FALSE)
+    list(APPEND Caffe_DEFINITIONS -DCPU_ONLY)
+  endif()
+
+  if(NOT HAVE_CUDNN)
+    set(HAVE_CUDNN FALSE)
+  else()
+    list(APPEND DEFINITIONS -DUSE_CUDNN)
+  endif()
+
+  if(BLAS STREQUAL "MKL" OR BLAS STREQUAL "mkl")
+    list(APPEND Caffe_DEFINITIONS -DUSE_MKL)
+  endif()
+
+  configure_file("cmake/Templates/CaffeConfig.cmake.in" "${PROJECT_BINARY_DIR}/CaffeConfig.cmake" @ONLY)
+
+  # Add targets to the build-tree export set
+  export(TARGETS caffe proto FILE "${PROJECT_BINARY_DIR}/CaffeTargets.cmake")
+  export(PACKAGE Caffe)
+
+  # ---[ Configure install-tree CaffeConfig.cmake file ]---
+
+  # remove source and build dir includes
+  caffe_get_items_with_prefix(${PROJECT_SOURCE_DIR} Caffe_INCLUDE_DIRS __insource)
+  caffe_get_items_with_prefix(${PROJECT_BINARY_DIR} Caffe_INCLUDE_DIRS __inbinary)
+  list(REMOVE_ITEM Caffe_INCLUDE_DIRS ${__insource} ${__inbinary})
+
+  # add `install` include folder
+  set(lines
+     "get_filename_component(__caffe_include \"\${Caffe_CMAKE_DIR}/../../include\" ABSOLUTE)\n"
+     "list(APPEND Caffe_INCLUDE_DIRS \${__caffe_include})\n"
+     "unset(__caffe_include)\n")
+  string(REPLACE ";" "" Caffe_INSTALL_INCLUDE_DIR_APPEND_COMMAND ${lines})
+
+  configure_file("cmake/Templates/CaffeConfig.cmake.in" "${PROJECT_BINARY_DIR}/cmake/CaffeConfig.cmake" @ONLY)
+
+  # Install the CaffeConfig.cmake and export set to use with install-tree
+  install(FILES "${PROJECT_BINARY_DIR}/cmake/CaffeConfig.cmake" DESTINATION ${install_cmake_suffix})
+  install(EXPORT CaffeTargets DESTINATION ${install_cmake_suffix})
+
+  # ---[ Configure and install version file ]---
+
+  # TODO: Lines below are commented because Caffe does't declare its version in headers.
+  # When the declarations are added, modify `caffe_extract_caffe_version()` macro and uncomment
+
+  # configure_file(cmake/Templates/CaffeConfigVersion.cmake.in "${PROJECT_BINARY_DIR}/CaffeConfigVersion.cmake" @ONLY)
+  # install(FILES "${PROJECT_BINARY_DIR}/CaffeConfigVersion.cmake" DESTINATION ${install_cmake_suffix})
+endfunction()
+
+
diff --git a/cmake/Cuda.cmake b/cmake/Cuda.cmake
new file mode 100644
index 0000000..ff58d31
--- /dev/null
+++ b/cmake/Cuda.cmake
@@ -0,0 +1,254 @@
+if(CPU_ONLY)
+  return()
+endif()
+
+# Known NVIDIA GPU achitectures Caffe can be compiled for.
+# This list will be used for CUDA_ARCH_NAME = All option
+set(Caffe_known_gpu_archs "20 21(20) 30 35 50")
+
+################################################################################################
+# A function for automatic detection of GPUs installed  (if autodetection is enabled)
+# Usage:
+#   caffe_detect_installed_gpus(out_variable)
+function(caffe_detect_installed_gpus out_variable)
+  if(NOT CUDA_gpu_detect_output)
+    set(__cufile ${PROJECT_BINARY_DIR}/detect_cuda_archs.cu)
+
+    file(WRITE ${__cufile} ""
+      "#include <cstdio>\n"
+      "int main()\n"
+      "{\n"
+      "  int count = 0;\n"
+      "  if (cudaSuccess != cudaGetDeviceCount(&count)) return -1;\n"
+      "  if (count == 0) return -1;\n"
+      "  for (int device = 0; device < count; ++device)\n"
+      "  {\n"
+      "    cudaDeviceProp prop;\n"
+      "    if (cudaSuccess == cudaGetDeviceProperties(&prop, device))\n"
+      "      std::printf(\"%d.%d \", prop.major, prop.minor);\n"
+      "  }\n"
+      "  return 0;\n"
+      "}\n")
+
+    execute_process(COMMAND "${CUDA_NVCC_EXECUTABLE}" "--run" "${__cufile}"
+                    WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/CMakeFiles/"
+                    RESULT_VARIABLE __nvcc_res OUTPUT_VARIABLE __nvcc_out
+                    ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE)
+
+    if(__nvcc_res EQUAL 0)
+      string(REPLACE "2.1" "2.1(2.0)" __nvcc_out "${__nvcc_out}")
+      set(CUDA_gpu_detect_output ${__nvcc_out} CACHE INTERNAL "Returned GPU architetures from caffe_detect_gpus tool" FORCE)
+    endif()
+  endif()
+
+  if(NOT CUDA_gpu_detect_output)
+    message(STATUS "Automatic GPU detection failed. Building for all known architectures.")
+    set(${out_variable} ${Caffe_known_gpu_archs} PARENT_SCOPE)
+  else()
+    set(${out_variable} ${CUDA_gpu_detect_output} PARENT_SCOPE)
+  endif()
+endfunction()
+
+
+################################################################################################
+# Function for selecting GPU arch flags for nvcc based on CUDA_ARCH_NAME
+# Usage:
+#   caffe_select_nvcc_arch_flags(out_variable)
+function(caffe_select_nvcc_arch_flags out_variable)
+  # List of arch names
+  set(__archs_names "Fermi" "Kepler" "Maxwell" "All" "Manual")
+  set(__archs_name_default "All")
+  if(NOT CMAKE_CROSSCOMPILING)
+    list(APPEND __archs_names "Auto")
+    set(__archs_name_default "Auto")
+  endif()
+
+  # set CUDA_ARCH_NAME strings (so it will be seen as dropbox in CMake-Gui)
+  set(CUDA_ARCH_NAME ${__archs_name_default} CACHE STRING "Select target NVIDIA GPU achitecture.")
+  set_property( CACHE CUDA_ARCH_NAME PROPERTY STRINGS "" ${__archs_names} )
+  mark_as_advanced(CUDA_ARCH_NAME)
+
+  # verify CUDA_ARCH_NAME value
+  if(NOT ";${__archs_names};" MATCHES ";${CUDA_ARCH_NAME};")
+    string(REPLACE ";" ", " __archs_names "${__archs_names}")
+    message(FATAL_ERROR "Only ${__archs_names} architeture names are supported.")
+  endif()
+
+  if(${CUDA_ARCH_NAME} STREQUAL "Manual")
+    set(CUDA_ARCH_BIN ${Caffe_known_gpu_archs} CACHE STRING "Specify 'real' GPU architectures to build binaries for, BIN(PTX) format is supported")
+    set(CUDA_ARCH_PTX "50"                     CACHE STRING "Specify 'virtual' PTX architectures to build PTX intermediate code for")
+    mark_as_advanced(CUDA_ARCH_BIN CUDA_ARCH_PTX)
+  else()
+    unset(CUDA_ARCH_BIN CACHE)
+    unset(CUDA_ARCH_PTX CACHE)
+  endif()
+
+  if(${CUDA_ARCH_NAME} STREQUAL "Fermi")
+    set(__cuda_arch_bin "20 21(20)")
+  elseif(${CUDA_ARCH_NAME} STREQUAL "Kepler")
+    set(__cuda_arch_bin "30 35")
+  elseif(${CUDA_ARCH_NAME} STREQUAL "Maxwell")
+    set(__cuda_arch_bin "50")
+  elseif(${CUDA_ARCH_NAME} STREQUAL "All")
+    set(__cuda_arch_bin ${Caffe_known_gpu_archs})
+  elseif(${CUDA_ARCH_NAME} STREQUAL "Auto")
+    caffe_detect_installed_gpus(__cuda_arch_bin)
+  else()  # (${CUDA_ARCH_NAME} STREQUAL "Manual")
+    set(__cuda_arch_bin ${CUDA_ARCH_BIN})
+  endif()
+
+  # remove dots and convert to lists
+  string(REGEX REPLACE "\\." "" __cuda_arch_bin "${__cuda_arch_bin}")
+  string(REGEX REPLACE "\\." "" __cuda_arch_ptx "${CUDA_ARCH_PTX}")
+  string(REGEX MATCHALL "[0-9()]+" __cuda_arch_bin "${__cuda_arch_bin}")
+  string(REGEX MATCHALL "[0-9]+"   __cuda_arch_ptx "${__cuda_arch_ptx}")
+  caffe_list_unique(__cuda_arch_bin __cuda_arch_ptx)
+
+  set(__nvcc_flags "")
+  set(__nvcc_archs_readable "")
+
+  # Tell NVCC to add binaries for the specified GPUs
+  foreach(__arch ${__cuda_arch_bin})
+    if(__arch MATCHES "([0-9]+)\\(([0-9]+)\\)")
+      # User explicitly specified PTX for the concrete BIN
+      list(APPEND __nvcc_flags -gencode arch=compute_${CMAKE_MATCH_2},code=sm_${CMAKE_MATCH_1})
+      list(APPEND __nvcc_archs_readable sm_${CMAKE_MATCH_1})
+    else()
+      # User didn't explicitly specify PTX for the concrete BIN, we assume PTX=BIN
+      list(APPEND __nvcc_flags -gencode arch=compute_${__arch},code=sm_${__arch})
+      list(APPEND __nvcc_archs_readable sm_${__arch})
+    endif()
+  endforeach()
+
+  # Tell NVCC to add PTX intermediate code for the specified architectures
+  foreach(__arch ${__cuda_arch_ptx})
+    list(APPEND __nvcc_flags -gencode arch=compute_${__arch},code=compute_${__arch})
+    list(APPEND __nvcc_archs_readable compute_${__arch})
+  endforeach()
+
+  string(REPLACE ";" " " __nvcc_archs_readable "${__nvcc_archs_readable}")
+  set(${out_variable}          ${__nvcc_flags}          PARENT_SCOPE)
+  set(${out_variable}_readable ${__nvcc_archs_readable} PARENT_SCOPE)
+endfunction()
+
+################################################################################################
+# Short command for cuda comnpilation
+# Usage:
+#   caffe_cuda_compile(<objlist_variable> <cuda_files>)
+macro(caffe_cuda_compile objlist_variable)
+  foreach(var CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG)
+    set(${var}_backup_in_cuda_compile_ "${${var}}")
+
+    # we remove /EHa as it generates warnings under windows
+    string(REPLACE "/EHa" "" ${var} "${${var}}")
+
+  endforeach()
+
+  if(UNIX OR APPLE)
+    list(APPEND CUDA_NVCC_FLAGS -Xcompiler -fPIC)
+  endif()
+
+  if(APPLE)
+    list(APPEND CUDA_NVCC_FLAGS -Xcompiler -Wno-unused-function)
+  endif()
+
+  cuda_compile(cuda_objcs ${ARGN})
+
+  foreach(var CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG)
+    set(${var} "${${var}_backup_in_cuda_compile_}")
+    unset(${var}_backup_in_cuda_compile_)
+  endforeach()
+
+  set(${objlist_variable} ${cuda_objcs})
+endmacro()
+
+################################################################################################
+# Short command for cuDNN detection. Believe it soon will be a part of CUDA toolkit distribution.
+# That's why not FindcuDNN.cmake file, but just the macro
+# Usage:
+#   detect_cuDNN()
+function(detect_cuDNN)
+  set(CUDNN_ROOT "" CACHE PATH "CUDNN root folder")
+
+  find_path(CUDNN_INCLUDE cudnn.h
+            PATHS ${CUDNN_ROOT} $ENV{CUDNN_ROOT} ${CUDA_TOOLKIT_INCLUDE}
+            DOC "Path to cuDNN include directory." )
+
+  get_filename_component(__libpath_hist ${CUDA_CUDART_LIBRARY} PATH)
+  find_library(CUDNN_LIBRARY NAMES libcudnn.so # libcudnn_static.a
+                             PATHS ${CUDNN_ROOT} $ENV{CUDNN_ROOT} ${CUDNN_INCLUDE} ${__libpath_hist}
+                             DOC "Path to cuDNN library.")
+
+  if(CUDNN_INCLUDE AND CUDNN_LIBRARY)
+    set(HAVE_CUDNN  TRUE PARENT_SCOPE)
+    set(CUDNN_FOUND TRUE PARENT_SCOPE)
+
+    mark_as_advanced(CUDNN_INCLUDE CUDNN_LIBRARY CUDNN_ROOT)
+    message(STATUS "Found cuDNN (include: ${CUDNN_INCLUDE}, library: ${CUDNN_LIBRARY})")
+  endif()
+endfunction()
+
+
+################################################################################################
+###  Non macro section
+################################################################################################
+
+find_package(CUDA 5.5 QUIET)
+find_cuda_helper_libs(curand)  # cmake 2.8.7 compartibility which doesn't search for curand
+
+if(NOT CUDA_FOUND)
+  return()
+endif()
+
+set(HAVE_CUDA TRUE)
+message(STATUS "CUDA detected: " ${CUDA_VERSION})
+include_directories(SYSTEM ${CUDA_INCLUDE_DIRS})
+list(APPEND Caffe_LINKER_LIBS ${CUDA_CUDART_LIBRARY}
+                              ${CUDA_curand_LIBRARY} ${CUDA_CUBLAS_LIBRARIES})
+
+# cudnn detection
+if(USE_CUDNN)
+  detect_cuDNN()
+  if(HAVE_CUDNN)
+    add_definitions(-DUSE_CUDNN)
+    include_directories(SYSTEM ${CUDNN_INCLUDE})
+    list(APPEND Caffe_LINKER_LIBS ${CUDNN_LIBRARY})
+  endif()
+endif()
+
+# setting nvcc arch flags
+caffe_select_nvcc_arch_flags(NVCC_FLAGS_EXTRA)
+list(APPEND CUDA_NVCC_FLAGS ${NVCC_FLAGS_EXTRA})
+message(STATUS "Added CUDA NVCC flags for: ${NVCC_FLAGS_EXTRA_readable}")
+
+# Boost 1.55 workaround, see https://svn.boost.org/trac/boost/ticket/9392 or
+# https://github.com/ComputationalRadiationPhysics/picongpu/blob/master/src/picongpu/CMakeLists.txt
+if(Boost_VERSION EQUAL 105500)
+  message(STATUS "Cuda + Boost 1.55: Applying noinline work around")
+  # avoid warning for CMake >= 2.8.12
+  set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} \"-DBOOST_NOINLINE=__attribute__((noinline))\" ")
+endif()
+
+# disable some nvcc diagnostic that apears in boost, glog, glags, opencv, etc.
+foreach(diag cc_clobber_ignored integer_sign_change useless_using_declaration set_but_not_used)
+  list(APPEND CUDA_NVCC_FLAGS -Xcudafe --diag_suppress=${diag})
+endforeach()
+
+# setting default testing device
+if(NOT CUDA_TEST_DEVICE)
+  set(CUDA_TEST_DEVICE -1)
+endif()
+
+mark_as_advanced(CUDA_BUILD_CUBIN CUDA_BUILD_EMULATION CUDA_VERBOSE_BUILD)
+mark_as_advanced(CUDA_SDK_ROOT_DIR CUDA_SEPARABLE_COMPILATION)
+
+# Handle clang/libc++ issue
+if(APPLE)
+  caffe_detect_darwin_version(OSX_VERSION)
+
+  # OSX 10.9 and higher uses clang/libc++ by default which is incompartible with old CUDA toolkits
+  if(OSX_VERSION VERSION_GREATER 10.8)
+    # enabled by default if and only if CUDA version is less than 7.0
+    caffe_option(USE_libstdcpp "Use libstdc++ instead of libc++" (CUDA_VERSION VERSION_LESS 7.0))
+  endif()
+endif()
diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake
new file mode 100644
index 0000000..7cae5c9
--- /dev/null
+++ b/cmake/Dependencies.cmake
@@ -0,0 +1,157 @@
+# This list is required for static linking and exported to CaffeConfig.cmake
+set(Caffe_LINKER_LIBS "")
+
+# ---[ Boost
+find_package(Boost 1.46 REQUIRED COMPONENTS system thread)
+include_directories(SYSTEM ${Boost_INCLUDE_DIR})
+list(APPEND Caffe_LINKER_LIBS ${Boost_LIBRARIES})
+
+# ---[ Threads
+find_package(Threads REQUIRED)
+list(APPEND Caffe_LINKER_LIBS ${CMAKE_THREAD_LIBS_INIT})
+
+# ---[ Google-glog
+include("cmake/External/glog.cmake")
+include_directories(SYSTEM ${GLOG_INCLUDE_DIRS})
+list(APPEND Caffe_LINKER_LIBS ${GLOG_LIBRARIES})
+
+# ---[ Google-gflags
+include("cmake/External/gflags.cmake")
+include_directories(SYSTEM ${GFLAGS_INCLUDE_DIRS})
+list(APPEND Caffe_LINKER_LIBS ${GFLAGS_LIBRARIES})
+
+# ---[ Google-protobuf
+include(cmake/ProtoBuf.cmake)
+
+# ---[ HDF5
+find_package(HDF5 COMPONENTS HL REQUIRED)
+include_directories(SYSTEM ${HDF5_INCLUDE_DIRS} ${HDF5_HL_INCLUDE_DIR})
+list(APPEND Caffe_LINKER_LIBS ${HDF5_LIBRARIES})
+
+# ---[ LMDB
+find_package(LMDB REQUIRED)
+include_directories(SYSTEM ${LMDB_INCLUDE_DIR})
+list(APPEND Caffe_LINKER_LIBS ${LMDB_LIBRARIES})
+
+# ---[ LevelDB
+find_package(LevelDB REQUIRED)
+include_directories(SYSTEM ${LevelDB_INCLUDE})
+list(APPEND Caffe_LINKER_LIBS ${LevelDB_LIBRARIES})
+
+# ---[ Snappy
+find_package(Snappy REQUIRED)
+include_directories(SYSTEM ${Snappy_INCLUDE_DIR})
+list(APPEND Caffe_LINKER_LIBS ${Snappy_LIBRARIES})
+
+# ---[ CUDA
+include(cmake/Cuda.cmake)
+if(NOT HAVE_CUDA)
+  if(CPU_ONLY)
+    message("-- CUDA is disabled. Building without it...")
+  else()
+    message("-- CUDA is not detected by cmake. Building without it...")
+  endif()
+
+  # TODO: remove this not cross platform define in future. Use caffe_config.h instead.
+  add_definitions(-DCPU_ONLY)
+endif()
+
+# ---[ OpenCV
+find_package(OpenCV QUIET COMPONENTS core highgui imgproc imgcodecs)
+if(NOT OpenCV_FOUND) # if not OpenCV 3.x, then imgcodecs are not found
+  find_package(OpenCV REQUIRED COMPONENTS core highgui imgproc)
+endif()
+include_directories(SYSTEM ${OpenCV_INCLUDE_DIRS})
+list(APPEND Caffe_LINKER_LIBS ${OpenCV_LIBS})
+message(STATUS "OpenCV found (${OpenCV_CONFIG_PATH})")
+
+# ---[ BLAS
+if(NOT APPLE)
+  set(BLAS "Atlas" CACHE STRING "Selected BLAS library")
+  set_property(CACHE BLAS PROPERTY STRINGS "Atlas;Open;MKL")
+
+  if(BLAS STREQUAL "Atlas" OR BLAS STREQUAL "atlas")
+    find_package(Atlas REQUIRED)
+    include_directories(SYSTEM ${Atlas_INCLUDE_DIR})
+    list(APPEND Caffe_LINKER_LIBS ${Atlas_LIBRARIES})
+  elseif(BLAS STREQUAL "Open" OR BLAS STREQUAL "open")
+    find_package(OpenBLAS REQUIRED)
+    include_directories(SYSTEM ${OpenBLAS_INCLUDE_DIR})
+    list(APPEND Caffe_LINKER_LIBS ${OpenBLAS_LIB})
+  elseif(BLAS STREQUAL "MKL" OR BLAS STREQUAL "mkl")
+    find_package(MKL REQUIRED)
+    include_directories(SYSTEM ${MKL_INCLUDE_DIR})
+    list(APPEND Caffe_LINKER_LIBS ${MKL_LIBRARIES})
+    add_definitions(-DUSE_MKL)
+  endif()
+elseif(APPLE)
+  find_package(vecLib REQUIRED)
+  include_directories(SYSTEM ${vecLib_INCLUDE_DIR})
+  list(APPEND Caffe_LINKER_LIBS ${vecLib_LINKER_LIBS})
+endif()
+
+# ---[ Python
+if(BUILD_python)
+  if(NOT "${python_version}" VERSION_LESS "3.0.0")
+    # use python3
+    find_package(PythonInterp 3.0)
+    find_package(PythonLibs 3.0)
+    find_package(NumPy 1.7.1)
+    # Find the matching boost python implementation
+    set(version ${PYTHONLIBS_VERSION_STRING})
+    
+    STRING( REPLACE "." "" boost_py_version ${version} )
+    find_package(Boost 1.46 COMPONENTS "python-py${boost_py_version}")
+    set(Boost_PYTHON_FOUND ${Boost_PYTHON-PY${boost_py_version}_FOUND})
+    
+    while(NOT "${version}" STREQUAL "" AND NOT Boost_PYTHON_FOUND)
+      STRING( REGEX REPLACE "([0-9.]+).[0-9]+" "\\1" version ${version} )
+      STRING( REGEX MATCHALL "([0-9.]+).[0-9]+" has_more_version ${version} )
+      if("${has_more_version}" STREQUAL "")
+        break()
+      endif()
+      
+      STRING( REPLACE "." "" boost_py_version ${version} )
+      find_package(Boost 1.46 COMPONENTS "python-py${boost_py_version}")
+      set(Boost_PYTHON_FOUND ${Boost_PYTHON-PY${boost_py_version}_FOUND})
+    endwhile()
+    if(NOT Boost_PYTHON_FOUND)
+      find_package(Boost 1.46 COMPONENTS python)
+    endif()
+  else()
+    # disable Python 3 search
+    find_package(PythonInterp 2.7)
+    find_package(PythonLibs 2.7)
+    find_package(NumPy 1.7.1)
+    find_package(Boost 1.46 COMPONENTS python)
+  endif()
+  if(PYTHONLIBS_FOUND AND NUMPY_FOUND AND Boost_PYTHON_FOUND)
+    set(HAVE_PYTHON TRUE)
+    if(BUILD_python_layer)
+      add_definitions(-DWITH_PYTHON_LAYER)
+      include_directories(SYSTEM ${PYTHON_INCLUDE_DIRS} ${NUMPY_INCLUDE_DIR} ${Boost_INCLUDE_DIRS})
+      list(APPEND Caffe_LINKER_LIBS ${PYTHON_LIBRARIES} ${Boost_LIBRARIES})
+    endif()
+  endif()
+endif()
+
+# ---[ Matlab
+if(BUILD_matlab)
+  find_package(MatlabMex)
+  if(MATLABMEX_FOUND)
+    set(HAVE_MATLAB TRUE)
+  endif()
+
+  # sudo apt-get install liboctave-dev
+  find_program(Octave_compiler NAMES mkoctfile DOC "Octave C++ compiler")
+
+  if(HAVE_MATLAB AND Octave_compiler)
+    set(Matlab_build_mex_using "Matlab" CACHE STRING "Select Matlab or Octave if both detected")
+    set_property(CACHE Matlab_build_mex_using PROPERTY STRINGS "Matlab;Octave")
+  endif()
+endif()
+
+# ---[ Doxygen
+if(BUILD_docs)
+  find_package(Doxygen)
+endif()
diff --git a/cmake/External/gflags.cmake b/cmake/External/gflags.cmake
new file mode 100644
index 0000000..e3dba04
--- /dev/null
+++ b/cmake/External/gflags.cmake
@@ -0,0 +1,56 @@
+if (NOT __GFLAGS_INCLUDED) # guard against multiple includes
+  set(__GFLAGS_INCLUDED TRUE)
+
+  # use the system-wide gflags if present
+  find_package(GFlags)
+  if (GFLAGS_FOUND)
+    set(GFLAGS_EXTERNAL FALSE)
+  else()
+    # gflags will use pthreads if it's available in the system, so we must link with it
+    find_package(Threads)
+
+    # build directory
+    set(gflags_PREFIX ${CMAKE_BINARY_DIR}/external/gflags-prefix)
+    # install directory
+    set(gflags_INSTALL ${CMAKE_BINARY_DIR}/external/gflags-install)
+
+    # we build gflags statically, but want to link it into the caffe shared library
+    # this requires position-independent code
+    if (UNIX)
+        set(GFLAGS_EXTRA_COMPILER_FLAGS "-fPIC")
+    endif()
+
+    set(GFLAGS_CXX_FLAGS ${CMAKE_CXX_FLAGS} ${GFLAGS_EXTRA_COMPILER_FLAGS})
+    set(GFLAGS_C_FLAGS ${CMAKE_C_FLAGS} ${GFLAGS_EXTRA_COMPILER_FLAGS})
+
+    ExternalProject_Add(gflags
+      PREFIX ${gflags_PREFIX}
+      GIT_REPOSITORY "https://github.com/gflags/gflags.git"
+      GIT_TAG "v2.1.2"
+      UPDATE_COMMAND ""
+      INSTALL_DIR ${gflags_INSTALL}
+      CMAKE_ARGS -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
+                 -DCMAKE_INSTALL_PREFIX=${gflags_INSTALL}
+                 -DBUILD_SHARED_LIBS=OFF
+                 -DBUILD_STATIC_LIBS=ON
+                 -DBUILD_PACKAGING=OFF
+                 -DBUILD_TESTING=OFF
+                 -DBUILD_NC_TESTS=OFF
+                 -BUILD_CONFIG_TESTS=OFF
+                 -DINSTALL_HEADERS=ON
+                 -DCMAKE_C_FLAGS=${GFLAGS_C_FLAGS}
+                 -DCMAKE_CXX_FLAGS=${GFLAGS_CXX_FLAGS}
+      LOG_DOWNLOAD 1
+      LOG_INSTALL 1
+      )
+
+    set(GFLAGS_FOUND TRUE)
+    set(GFLAGS_INCLUDE_DIRS ${gflags_INSTALL}/include)
+    set(GFLAGS_LIBRARIES ${gflags_INSTALL}/lib/libgflags.a ${CMAKE_THREAD_LIBS_INIT})
+    set(GFLAGS_LIBRARY_DIRS ${gflags_INSTALL}/lib)
+    set(GFLAGS_EXTERNAL TRUE)
+
+    list(APPEND external_project_dependencies gflags)
+  endif()
+
+endif()
diff --git a/cmake/External/glog.cmake b/cmake/External/glog.cmake
new file mode 100644
index 0000000..a44672f
--- /dev/null
+++ b/cmake/External/glog.cmake
@@ -0,0 +1,56 @@
+# glog depends on gflags
+include("cmake/External/gflags.cmake")
+
+if (NOT __GLOG_INCLUDED)
+  set(__GLOG_INCLUDED TRUE)
+
+  # try the system-wide glog first
+  find_package(Glog)
+  if (GLOG_FOUND)
+      set(GLOG_EXTERNAL FALSE)
+  else()
+    # fetch and build glog from github
+
+    # build directory
+    set(glog_PREFIX ${CMAKE_BINARY_DIR}/external/glog-prefix)
+    # install directory
+    set(glog_INSTALL ${CMAKE_BINARY_DIR}/external/glog-install)
+
+    # we build glog statically, but want to link it into the caffe shared library
+    # this requires position-independent code
+    if (UNIX)
+      set(GLOG_EXTRA_COMPILER_FLAGS "-fPIC")
+    endif()
+
+    set(GLOG_CXX_FLAGS ${CMAKE_CXX_FLAGS} ${GLOG_EXTRA_COMPILER_FLAGS})
+    set(GLOG_C_FLAGS ${CMAKE_C_FLAGS} ${GLOG_EXTRA_COMPILER_FLAGS})
+
+    # depend on gflags if we're also building it
+    if (GFLAGS_EXTERNAL)
+      set(GLOG_DEPENDS gflags)
+    endif()
+
+    ExternalProject_Add(glog
+      DEPENDS ${GLOG_DEPENDS}
+      PREFIX ${glog_PREFIX}
+      GIT_REPOSITORY "https://github.com/google/glog"
+      GIT_TAG "v0.3.4"
+      UPDATE_COMMAND ""
+      INSTALL_DIR ${gflags_INSTALL}
+      CONFIGURE_COMMAND env "CFLAGS=${GLOG_C_FLAGS}" "CXXFLAGS=${GLOG_CXX_FLAGS}" ${glog_PREFIX}/src/glog/configure --prefix=${glog_INSTALL} --enable-shared=no --enable-static=yes --with-gflags=${GFLAGS_LIBRARY_DIRS}/..
+      LOG_DOWNLOAD 1
+      LOG_CONFIGURE 1
+      LOG_INSTALL 1
+      )
+
+    set(GLOG_FOUND TRUE)
+    set(GLOG_INCLUDE_DIRS ${glog_INSTALL}/include)
+    set(GLOG_LIBRARIES ${GFLAGS_LIBRARIES} ${glog_INSTALL}/lib/libglog.a)
+    set(GLOG_LIBRARY_DIRS ${glog_INSTALL}/lib)
+    set(GLOG_EXTERNAL TRUE)
+
+    list(APPEND external_project_dependencies glog)
+  endif()
+
+endif()
+
diff --git a/cmake/Misc.cmake b/cmake/Misc.cmake
new file mode 100644
index 0000000..7676754
--- /dev/null
+++ b/cmake/Misc.cmake
@@ -0,0 +1,52 @@
+# ---[ Configuration types
+set(CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "Possible configurations" FORCE)
+mark_as_advanced(CMAKE_CONFIGURATION_TYPES)
+
+if(DEFINED CMAKE_BUILD_TYPE)
+  set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS ${CMAKE_CONFIGURATION_TYPES})
+endif()
+
+# --[ If user doesn't specify build type then assume release
+if("${CMAKE_BUILD_TYPE}" STREQUAL "")
+  set(CMAKE_BUILD_TYPE Release)
+endif()
+
+if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
+  set(CMAKE_COMPILER_IS_CLANGXX TRUE)
+endif()
+
+# ---[ Solution folders
+caffe_option(USE_PROJECT_FOLDERS "IDE Solution folders" (MSVC_IDE OR CMAKE_GENERATOR MATCHES Xcode) )
+
+if(USE_PROJECT_FOLDERS)
+  set_property(GLOBAL PROPERTY USE_FOLDERS ON)
+  set_property(GLOBAL PROPERTY PREDEFINED_TARGETS_FOLDER "CMakeTargets")
+endif()
+
+# ---[ Install options
+if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
+  set(CMAKE_INSTALL_PREFIX "${PROJECT_BINARY_DIR}/install" CACHE PATH "Default install path" FORCE)
+endif()
+
+# ---[ RPATH settings
+set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE CACHE BOOLEAN "Use link paths for shared library rpath")
+set(CMAKE_MACOSX_RPATH TRUE)
+
+list(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES ${CMAKE_INSTALL_PREFIX}/lib __is_systtem_dir)
+if(${__is_systtem_dir} STREQUAL -1)
+  set(CMAKE_INSTALL_RPATH ${CMAKE_INSTALL_PREFIX}/lib)
+endif()
+
+# ---[ Funny target
+if(UNIX OR APPLE)
+  add_custom_target(symlink_to_build COMMAND "ln" "-sf" "${PROJECT_BINARY_DIR}" "${PROJECT_SOURCE_DIR}/build"
+                                     COMMENT "Adding symlink: <caffe_root>/build -> ${PROJECT_BINARY_DIR}" )
+endif()
+
+# ---[ Set debug postfix
+set(Caffe_DEBUG_POSTFIX "-d")
+
+set(CAffe_POSTFIX "")
+if(CMAKE_BUILD_TYPE MATCHES "Debug")
+  set(CAffe_POSTFIX ${Caffe_DEBUG_POSTFIX})
+endif()
diff --git a/cmake/Modules/FindAtlas.cmake b/cmake/Modules/FindAtlas.cmake
new file mode 100644
index 0000000..6e15643
--- /dev/null
+++ b/cmake/Modules/FindAtlas.cmake
@@ -0,0 +1,52 @@
+# Find the Atlas (and Lapack) libraries
+#
+# The following variables are optionally searched for defaults
+#  Atlas_ROOT_DIR:            Base directory where all Atlas components are found
+#
+# The following are set after configuration is done:
+#  Atlas_FOUND
+#  Atlas_INCLUDE_DIRS
+#  Atlas_LIBRARIES
+#  Atlas_LIBRARYRARY_DIRS
+
+set(Atlas_INCLUDE_SEARCH_PATHS
+  /usr/include/atlas
+  /usr/include/atlas-base
+  $ENV{Atlas_ROOT_DIR}
+  $ENV{Atlas_ROOT_DIR}/include
+)
+
+set(Atlas_LIB_SEARCH_PATHS
+  /usr/lib/atlas
+  /usr/lib/atlas-base
+  $ENV{Atlas_ROOT_DIR}
+  $ENV{Atlas_ROOT_DIR}/lib
+)
+
+find_path(Atlas_CBLAS_INCLUDE_DIR   NAMES cblas.h   PATHS ${Atlas_INCLUDE_SEARCH_PATHS})
+find_path(Atlas_CLAPACK_INCLUDE_DIR NAMES clapack.h PATHS ${Atlas_INCLUDE_SEARCH_PATHS})
+
+find_library(Atlas_CBLAS_LIBRARY NAMES  ptcblas_r ptcblas cblas_r cblas PATHS ${Atlas_LIB_SEARCH_PATHS})
+find_library(Atlas_BLAS_LIBRARY NAMES   atlas_r   atlas                 PATHS ${Atlas_LIB_SEARCH_PATHS})
+find_library(Atlas_LAPACK_LIBRARY NAMES alapack_r alapack lapack_atlas  PATHS ${Atlas_LIB_SEARCH_PATHS})
+
+set(LOOKED_FOR
+  Atlas_CBLAS_INCLUDE_DIR
+  Atlas_CLAPACK_INCLUDE_DIR
+
+  Atlas_CBLAS_LIBRARY
+  Atlas_BLAS_LIBRARY
+  Atlas_LAPACK_LIBRARY
+)
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(Atlas DEFAULT_MSG ${LOOKED_FOR})
+
+if(ATLAS_FOUND)
+  set(Atlas_INCLUDE_DIR ${Atlas_CBLAS_INCLUDE_DIR} ${Atlas_CLAPACK_INCLUDE_DIR})
+  set(Atlas_LIBRARIES ${Atlas_LAPACK_LIBRARY} ${Atlas_CBLAS_LIBRARY} ${Atlas_BLAS_LIBRARY})
+  mark_as_advanced(${LOOKED_FOR})
+
+  message(STATUS "Found Atlas (include: ${Atlas_CBLAS_INCLUDE_DIR}, library: ${Atlas_BLAS_LIBRARY})")
+endif(ATLAS_FOUND)
+
diff --git a/cmake/Modules/FindGFlags.cmake b/cmake/Modules/FindGFlags.cmake
new file mode 100644
index 0000000..29b60f0
--- /dev/null
+++ b/cmake/Modules/FindGFlags.cmake
@@ -0,0 +1,50 @@
+# - Try to find GFLAGS
+#
+# The following variables are optionally searched for defaults
+#  GFLAGS_ROOT_DIR:            Base directory where all GFLAGS components are found
+#
+# The following are set after configuration is done:
+#  GFLAGS_FOUND
+#  GFLAGS_INCLUDE_DIRS
+#  GFLAGS_LIBRARIES
+#  GFLAGS_LIBRARYRARY_DIRS
+
+include(FindPackageHandleStandardArgs)
+
+set(GFLAGS_ROOT_DIR "" CACHE PATH "Folder contains Gflags")
+
+# We are testing only a couple of files in the include directories
+if(WIN32)
+    find_path(GFLAGS_INCLUDE_DIR gflags/gflags.h
+        PATHS ${GFLAGS_ROOT_DIR}/src/windows)
+else()
+    find_path(GFLAGS_INCLUDE_DIR gflags/gflags.h
+        PATHS ${GFLAGS_ROOT_DIR})
+endif()
+
+if(MSVC)
+    find_library(GFLAGS_LIBRARY_RELEASE
+        NAMES libgflags
+        PATHS ${GFLAGS_ROOT_DIR}
+        PATH_SUFFIXES Release)
+
+    find_library(GFLAGS_LIBRARY_DEBUG
+        NAMES libgflags-debug
+        PATHS ${GFLAGS_ROOT_DIR}
+        PATH_SUFFIXES Debug)
+
+    set(GFLAGS_LIBRARY optimized ${GFLAGS_LIBRARY_RELEASE} debug ${GFLAGS_LIBRARY_DEBUG})
+else()
+    find_library(GFLAGS_LIBRARY gflags)
+endif()
+
+find_package_handle_standard_args(GFlags DEFAULT_MSG GFLAGS_INCLUDE_DIR GFLAGS_LIBRARY)
+
+
+if(GFLAGS_FOUND)
+    set(GFLAGS_INCLUDE_DIRS ${GFLAGS_INCLUDE_DIR})
+    set(GFLAGS_LIBRARIES ${GFLAGS_LIBRARY})
+    message(STATUS "Found gflags  (include: ${GFLAGS_INCLUDE_DIR}, library: ${GFLAGS_LIBRARY})")
+    mark_as_advanced(GFLAGS_LIBRARY_DEBUG GFLAGS_LIBRARY_RELEASE
+                     GFLAGS_LIBRARY GFLAGS_INCLUDE_DIR GFLAGS_ROOT_DIR)
+endif()
diff --git a/cmake/Modules/FindGlog.cmake b/cmake/Modules/FindGlog.cmake
new file mode 100644
index 0000000..99abbe4
--- /dev/null
+++ b/cmake/Modules/FindGlog.cmake
@@ -0,0 +1,48 @@
+# - Try to find Glog
+#
+# The following variables are optionally searched for defaults
+#  GLOG_ROOT_DIR:            Base directory where all GLOG components are found
+#
+# The following are set after configuration is done:
+#  GLOG_FOUND
+#  GLOG_INCLUDE_DIRS
+#  GLOG_LIBRARIES
+#  GLOG_LIBRARYRARY_DIRS
+
+include(FindPackageHandleStandardArgs)
+
+set(GLOG_ROOT_DIR "" CACHE PATH "Folder contains Google glog")
+
+if(WIN32)
+    find_path(GLOG_INCLUDE_DIR glog/logging.h
+        PATHS ${GLOG_ROOT_DIR}/src/windows)
+else()
+    find_path(GLOG_INCLUDE_DIR glog/logging.h
+        PATHS ${GLOG_ROOT_DIR})
+endif()
+
+if(MSVC)
+    find_library(GLOG_LIBRARY_RELEASE libglog_static
+        PATHS ${GLOG_ROOT_DIR}
+        PATH_SUFFIXES Release)
+
+    find_library(GLOG_LIBRARY_DEBUG libglog_static
+        PATHS ${GLOG_ROOT_DIR}
+        PATH_SUFFIXES Debug)
+
+    set(GLOG_LIBRARY optimized ${GLOG_LIBRARY_RELEASE} debug ${GLOG_LIBRARY_DEBUG})
+else()
+    find_library(GLOG_LIBRARY glog
+        PATHS ${GLOG_ROOT_DIR}
+        PATH_SUFFIXES lib lib64)
+endif()
+
+find_package_handle_standard_args(Glog DEFAULT_MSG GLOG_INCLUDE_DIR GLOG_LIBRARY)
+
+if(GLOG_FOUND)
+  set(GLOG_INCLUDE_DIRS ${GLOG_INCLUDE_DIR})
+  set(GLOG_LIBRARIES ${GLOG_LIBRARY})
+  message(STATUS "Found glog    (include: ${GLOG_INCLUDE_DIR}, library: ${GLOG_LIBRARY})")
+  mark_as_advanced(GLOG_ROOT_DIR GLOG_LIBRARY_RELEASE GLOG_LIBRARY_DEBUG
+                                 GLOG_LIBRARY GLOG_INCLUDE_DIR)
+endif()
diff --git a/cmake/Modules/FindLAPACK.cmake b/cmake/Modules/FindLAPACK.cmake
new file mode 100644
index 0000000..9641c45
--- /dev/null
+++ b/cmake/Modules/FindLAPACK.cmake
@@ -0,0 +1,190 @@
+# - Find LAPACK library
+# This module finds an installed fortran library that implements the LAPACK
+# linear-algebra interface (see http://www.netlib.org/lapack/).
+#
+# The approach follows that taken for the autoconf macro file, acx_lapack.m4
+# (distributed at http://ac-archive.sourceforge.net/ac-archive/acx_lapack.html).
+#
+# This module sets the following variables:
+#  LAPACK_FOUND - set to true if a library implementing the LAPACK interface is found
+#  LAPACK_LIBRARIES - list of libraries (using full path name) for LAPACK
+
+# Note: I do not think it is a good idea to mixup different BLAS/LAPACK versions
+# Hence, this script wants to find a Lapack library matching your Blas library
+
+# Do nothing if LAPACK was found before
+IF(NOT LAPACK_FOUND)
+
+SET(LAPACK_LIBRARIES)
+SET(LAPACK_INFO)
+
+IF(LAPACK_FIND_QUIETLY OR NOT LAPACK_FIND_REQUIRED)
+  FIND_PACKAGE(BLAS)
+ELSE(LAPACK_FIND_QUIETLY OR NOT LAPACK_FIND_REQUIRED)
+  FIND_PACKAGE(BLAS REQUIRED)
+ENDIF(LAPACK_FIND_QUIETLY OR NOT LAPACK_FIND_REQUIRED)
+
+# Old search lapack script
+include(CheckFortranFunctionExists)
+
+macro(Check_Lapack_Libraries LIBRARIES _prefix _name _flags _list _blas)
+  # This macro checks for the existence of the combination of fortran libraries
+  # given by _list.  If the combination is found, this macro checks (using the
+  # Check_Fortran_Function_Exists macro) whether can link against that library
+  # combination using the name of a routine given by _name using the linker
+  # flags given by _flags.  If the combination of libraries is found and passes
+  # the link test, LIBRARIES is set to the list of complete library paths that
+  # have been found.  Otherwise, LIBRARIES is set to FALSE.
+  # N.B. _prefix is the prefix applied to the names of all cached variables that
+  # are generated internally and marked advanced by this macro.
+  set(_libraries_work TRUE)
+  set(${LIBRARIES})
+  set(_combined_name)
+  foreach(_library ${_list})
+    set(_combined_name ${_combined_name}_${_library})
+    if(_libraries_work)
+      if (WIN32)
+        find_library(${_prefix}_${_library}_LIBRARY
+          NAMES ${_library} PATHS ENV LIB PATHS ENV PATH)
+      else (WIN32)
+        if(APPLE)
+          find_library(${_prefix}_${_library}_LIBRARY
+            NAMES ${_library}
+            PATHS /usr/local/lib /usr/lib /usr/local/lib64 /usr/lib64
+            ENV DYLD_LIBRARY_PATH)
+        else(APPLE)
+          find_library(${_prefix}_${_library}_LIBRARY
+            NAMES ${_library}
+            PATHS /usr/local/lib /usr/lib /usr/local/lib64 /usr/lib64
+            ENV LD_LIBRARY_PATH)
+        endif(APPLE)
+      endif(WIN32)
+      mark_as_advanced(${_prefix}_${_library}_LIBRARY)
+      set(${LIBRARIES} ${${LIBRARIES}} ${${_prefix}_${_library}_LIBRARY})
+      set(_libraries_work ${${_prefix}_${_library}_LIBRARY})
+    endif(_libraries_work)
+  endforeach(_library ${_list})
+  if(_libraries_work)
+    # Test this combination of libraries.
+    set(CMAKE_REQUIRED_LIBRARIES ${_flags} ${${LIBRARIES}} ${_blas})
+    if (CMAKE_Fortran_COMPILER_WORKS)
+      check_fortran_function_exists(${_name} ${_prefix}${_combined_name}_WORKS)
+    else (CMAKE_Fortran_COMPILER_WORKS)
+      check_function_exists("${_name}_" ${_prefix}${_combined_name}_WORKS)
+    endif (CMAKE_Fortran_COMPILER_WORKS)
+    set(CMAKE_REQUIRED_LIBRARIES)
+    mark_as_advanced(${_prefix}${_combined_name}_WORKS)
+    set(_libraries_work ${${_prefix}${_combined_name}_WORKS})
+  endif(_libraries_work)
+  if(NOT _libraries_work)
+    set(${LIBRARIES} FALSE)
+  endif(NOT _libraries_work)
+endmacro(Check_Lapack_Libraries)
+
+
+if(BLAS_FOUND)
+
+  # Intel MKL
+  IF((NOT LAPACK_INFO) AND (BLAS_INFO STREQUAL "mkl"))
+    IF(MKL_LAPACK_LIBRARIES)
+      SET(LAPACK_LIBRARIES ${MKL_LAPACK_LIBRARIES} ${MKL_LIBRARIES})
+    ELSE(MKL_LAPACK_LIBRARIES)
+      SET(LAPACK_LIBRARIES ${MKL_LIBRARIES})
+    ENDIF(MKL_LAPACK_LIBRARIES)
+    SET(LAPACK_INCLUDE_DIR ${MKL_INCLUDE_DIR})
+    SET(LAPACK_INFO "mkl")
+  ENDIF()
+
+  # OpenBlas
+  IF((NOT LAPACK_INFO) AND (BLAS_INFO STREQUAL "open"))
+    SET(CMAKE_REQUIRED_LIBRARIES ${BLAS_LIBRARIES})
+    check_function_exists("cheev_" OPEN_LAPACK_WORKS)
+    if(OPEN_LAPACK_WORKS)
+      SET(LAPACK_INFO "open")
+    else()
+      message(STATUS "It seems OpenBlas has not been compiled with Lapack support")
+    endif()
+  endif()
+
+  # GotoBlas
+  IF((NOT LAPACK_INFO) AND (BLAS_INFO STREQUAL "goto"))
+    SET(CMAKE_REQUIRED_LIBRARIES ${BLAS_LIBRARIES})
+    check_function_exists("cheev_" GOTO_LAPACK_WORKS)
+    if(GOTO_LAPACK_WORKS)
+      SET(LAPACK_INFO "goto")
+    else()
+      message(STATUS "It seems GotoBlas has not been compiled with Lapack support")
+    endif()
+  endif()
+
+  # ACML
+  IF((NOT LAPACK_INFO) AND (BLAS_INFO STREQUAL "acml"))
+    SET(CMAKE_REQUIRED_LIBRARIES ${BLAS_LIBRARIES})
+    check_function_exists("cheev_" ACML_LAPACK_WORKS)
+    if(ACML_LAPACK_WORKS)
+      SET(LAPACK_INFO "acml")
+    else()
+      message(STATUS "Strangely, this ACML library does not support Lapack?!")
+    endif()
+  endif()
+
+  # Accelerate
+  IF((NOT LAPACK_INFO) AND (BLAS_INFO STREQUAL "accelerate"))
+    SET(CMAKE_REQUIRED_LIBRARIES ${BLAS_LIBRARIES})
+    check_function_exists("cheev_" ACCELERATE_LAPACK_WORKS)
+    if(ACCELERATE_LAPACK_WORKS)
+      SET(LAPACK_INFO "accelerate")
+    else()
+      message(STATUS "Strangely, this Accelerate library does not support Lapack?!")
+    endif()
+  endif()
+
+  # vecLib
+  IF((NOT LAPACK_INFO) AND (BLAS_INFO STREQUAL "veclib"))
+    SET(CMAKE_REQUIRED_LIBRARIES ${BLAS_LIBRARIES})
+    check_function_exists("cheev_" VECLIB_LAPACK_WORKS)
+    if(VECLIB_LAPACK_WORKS)
+      SET(LAPACK_INFO "veclib")
+    else()
+      message(STATUS "Strangely, this vecLib library does not support Lapack?!")
+    endif()
+  endif()
+
+  # Generic LAPACK library?
+  IF((NOT LAPACK_INFO) AND (BLAS_INFO STREQUAL "generic"))
+    check_lapack_libraries(
+      LAPACK_LIBRARIES
+      LAPACK
+      cheev
+      ""
+      "lapack"
+      "${BLAS_LIBRARIES}"
+      )
+    if(LAPACK_LIBRARIES)
+      SET(LAPACK_INFO "generic")
+    endif(LAPACK_LIBRARIES)
+  endif()
+
+else(BLAS_FOUND)
+  message(STATUS "LAPACK requires BLAS")
+endif(BLAS_FOUND)
+
+if(LAPACK_INFO)
+  set(LAPACK_FOUND TRUE)
+else(LAPACK_INFO)
+  set(LAPACK_FOUND FALSE)
+endif(LAPACK_INFO)
+
+IF (NOT LAPACK_FOUND AND LAPACK_FIND_REQUIRED)
+  message(FATAL_ERROR "Cannot find a library with LAPACK API. Please specify library location.")
+ENDIF (NOT LAPACK_FOUND AND LAPACK_FIND_REQUIRED)
+IF(NOT LAPACK_FIND_QUIETLY)
+  IF(LAPACK_FOUND)
+    MESSAGE(STATUS "Found a library with LAPACK API. (${LAPACK_INFO})")
+  ELSE(LAPACK_FOUND)
+    MESSAGE(STATUS "Cannot find a library with LAPACK API. Not using LAPACK.")
+  ENDIF(LAPACK_FOUND)
+ENDIF(NOT LAPACK_FIND_QUIETLY)
+
+# Do nothing if LAPACK was found before
+ENDIF(NOT LAPACK_FOUND)
diff --git a/cmake/Modules/FindLMDB.cmake b/cmake/Modules/FindLMDB.cmake
new file mode 100644
index 0000000..8a817fd
--- /dev/null
+++ b/cmake/Modules/FindLMDB.cmake
@@ -0,0 +1,28 @@
+# Try to find the LMBD libraries and headers
+#  LMDB_FOUND - system has LMDB lib
+#  LMDB_INCLUDE_DIR - the LMDB include directory
+#  LMDB_LIBRARIES - Libraries needed to use LMDB
+
+# FindCWD based on FindGMP by:
+# Copyright (c) 2006, Laurent Montel, <montel at kde.org>
+#
+# Redistribution and use is allowed according to the terms of the BSD license.
+
+# Adapted from FindCWD by:
+# Copyright 2013 Conrad Steenberg <conrad.steenberg at gmail.com>
+# Aug 31, 2013
+
+find_path(LMDB_INCLUDE_DIR NAMES  lmdb.h PATHS "$ENV{LMDB_DIR}/include")
+find_library(LMDB_LIBRARIES NAMES lmdb   PATHS "$ENV{LMDB_DIR}/lib" )
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(LMDB DEFAULT_MSG LMDB_INCLUDE_DIR LMDB_LIBRARIES)
+
+if(LMDB_FOUND)
+  message(STATUS "Found lmdb    (include: ${LMDB_INCLUDE_DIR}, library: ${LMDB_LIBRARIES})")
+  mark_as_advanced(LMDB_INCLUDE_DIR LMDB_LIBRARIES)
+
+  caffe_parse_header(${LMDB_INCLUDE_DIR}/lmdb.h
+                     LMDB_VERSION_LINES MDB_VERSION_MAJOR MDB_VERSION_MINOR MDB_VERSION_PATCH)
+  set(LMDB_VERSION "${MDB_VERSION_MAJOR}.${MDB_VERSION_MINOR}.${MDB_VERSION_PATCH}")
+endif()
diff --git a/cmake/Modules/FindLevelDB.cmake b/cmake/Modules/FindLevelDB.cmake
new file mode 100644
index 0000000..97f08ac
--- /dev/null
+++ b/cmake/Modules/FindLevelDB.cmake
@@ -0,0 +1,44 @@
+# - Find LevelDB
+#
+#  LevelDB_INCLUDES  - List of LevelDB includes
+#  LevelDB_LIBRARIES - List of libraries when using LevelDB.
+#  LevelDB_FOUND     - True if LevelDB found.
+
+# Look for the header file.
+find_path(LevelDB_INCLUDE NAMES leveldb/db.h
+                          PATHS $ENV{LEVELDB_ROOT}/include /opt/local/include /usr/local/include /usr/include
+                          DOC "Path in which the file leveldb/db.h is located." )
+
+# Look for the library.
+find_library(LevelDB_LIBRARY NAMES leveldb
+                             PATHS /usr/lib $ENV{LEVELDB_ROOT}/lib
+                             DOC "Path to leveldb library." )
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(LevelDB DEFAULT_MSG LevelDB_INCLUDE LevelDB_LIBRARY)
+
+if(LEVELDB_FOUND)
+  message(STATUS "Found LevelDB (include: ${LevelDB_INCLUDE}, library: ${LevelDB_LIBRARY})")
+  set(LevelDB_INCLUDES ${LevelDB_INCLUDE})
+  set(LevelDB_LIBRARIES ${LevelDB_LIBRARY})
+  mark_as_advanced(LevelDB_INCLUDE LevelDB_LIBRARY)
+
+  if(EXISTS "${LevelDB_INCLUDE}/leveldb/db.h")
+    file(STRINGS "${LevelDB_INCLUDE}/leveldb/db.h" __version_lines
+           REGEX "static const int k[^V]+Version[ \t]+=[ \t]+[0-9]+;")
+
+    foreach(__line ${__version_lines})
+      if(__line MATCHES "[^k]+kMajorVersion[ \t]+=[ \t]+([0-9]+);")
+        set(LEVELDB_VERSION_MAJOR ${CMAKE_MATCH_1})
+      elseif(__line MATCHES "[^k]+kMinorVersion[ \t]+=[ \t]+([0-9]+);")
+        set(LEVELDB_VERSION_MINOR ${CMAKE_MATCH_1})
+      endif()
+    endforeach()
+
+    if(LEVELDB_VERSION_MAJOR AND LEVELDB_VERSION_MINOR)
+      set(LEVELDB_VERSION "${LEVELDB_VERSION_MAJOR}.${LEVELDB_VERSION_MINOR}")
+    endif()
+
+    caffe_clear_vars(__line __version_lines)
+  endif()
+endif()
diff --git a/cmake/Modules/FindMKL.cmake b/cmake/Modules/FindMKL.cmake
new file mode 100644
index 0000000..d2012db
--- /dev/null
+++ b/cmake/Modules/FindMKL.cmake
@@ -0,0 +1,110 @@
+# Find the MKL libraries
+#
+# Options:
+#
+#   MKL_USE_SINGLE_DYNAMIC_LIBRARY  : use single dynamic library interface
+#   MKL_USE_STATIC_LIBS             : use static libraries
+#   MKL_MULTI_THREADED              : use multi-threading
+#
+# This module defines the following variables:
+#
+#   MKL_FOUND            : True mkl is found
+#   MKL_INCLUDE_DIR      : unclude directory
+#   MKL_LIBRARIES        : the libraries to link against.
+
+
+# ---[ Options
+caffe_option(MKL_USE_SINGLE_DYNAMIC_LIBRARY "Use single dynamic library interface" ON)
+caffe_option(MKL_USE_STATIC_LIBS "Use static libraries" OFF IF NOT MKL_USE_SINGLE_DYNAMIC_LIBRARY)
+caffe_option(MKL_MULTI_THREADED  "Use multi-threading"   ON IF NOT MKL_USE_SINGLE_DYNAMIC_LIBRARY)
+
+# ---[ Root folders
+set(INTEL_ROOT "/opt/intel" CACHE PATH "Folder contains intel libs")
+find_path(MKL_ROOT include/mkl.h PATHS $ENV{MKL_ROOT} ${INTEL_ROOT}/mkl
+                                   DOC "Folder contains MKL")
+
+# ---[ Find include dir
+find_path(MKL_INCLUDE_DIR mkl.h PATHS ${MKL_ROOT} PATH_SUFFIXES include)
+set(__looked_for MKL_INCLUDE_DIR)
+
+# ---[ Find libraries
+if(CMAKE_SIZEOF_VOID_P EQUAL 4)
+  set(__path_suffixes lib lib/ia32)
+else()
+  set(__path_suffixes lib lib/intel64)
+endif()
+
+set(__mkl_libs "")
+if(MKL_USE_SINGLE_DYNAMIC_LIBRARY)
+  list(APPEND __mkl_libs rt)
+else()
+  if(CMAKE_SIZEOF_VOID_P EQUAL 4)
+    if(WIN32)
+      list(APPEND __mkl_libs intel_c)
+    else()
+      list(APPEND __mkl_libs intel gf)
+    endif()
+  else()
+    list(APPEND __mkl_libs intel_lp64 gf_lp64)
+  endif()
+
+  if(MKL_MULTI_THREADED)
+    list(APPEND __mkl_libs intel_thread)
+  else()
+     list(APPEND __mkl_libs sequential)
+  endif()
+
+  list(APPEND __mkl_libs core cdft_core)
+endif()
+
+
+foreach (__lib ${__mkl_libs})
+  set(__mkl_lib "mkl_${__lib}")
+  string(TOUPPER ${__mkl_lib} __mkl_lib_upper)
+
+  if(MKL_USE_STATIC_LIBS)
+    set(__mkl_lib "lib${__mkl_lib}.a")
+  endif()
+
+  find_library(${__mkl_lib_upper}_LIBRARY
+        NAMES ${__mkl_lib}
+        PATHS ${MKL_ROOT} "${MKL_INCLUDE_DIR}/.."
+        PATH_SUFFIXES ${__path_suffixes}
+        DOC "The path to Intel(R) MKL ${__mkl_lib} library")
+  mark_as_advanced(${__mkl_lib_upper}_LIBRARY)
+
+  list(APPEND __looked_for ${__mkl_lib_upper}_LIBRARY)
+  list(APPEND MKL_LIBRARIES ${${__mkl_lib_upper}_LIBRARY})
+endforeach()
+
+
+if(NOT MKL_USE_SINGLE_DYNAMIC_LIBRARY)
+  if (MKL_USE_STATIC_LIBS)
+    set(__iomp5_libs iomp5 libiomp5mt.lib)
+  else()
+    set(__iomp5_libs iomp5 libiomp5md.lib)
+  endif()
+
+  if(WIN32)
+    find_path(INTEL_INCLUDE_DIR omp.h PATHS ${INTEL_ROOT} PATH_SUFFIXES include)
+    list(APPEND __looked_for INTEL_INCLUDE_DIR)
+  endif()
+
+  find_library(MKL_RTL_LIBRARY ${__iomp5_libs}
+     PATHS ${INTEL_RTL_ROOT} ${INTEL_ROOT}/compiler ${MKL_ROOT}/.. ${MKL_ROOT}/../compiler
+     PATH_SUFFIXES ${__path_suffixes}
+     DOC "Path to Path to OpenMP runtime library")
+
+  list(APPEND __looked_for MKL_RTL_LIBRARY)
+  list(APPEND MKL_LIBRARIES ${MKL_RTL_LIBRARY})
+endif()
+
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(MKL DEFAULT_MSG ${__looked_for})
+
+if(MKL_FOUND)
+  message(STATUS "Found MKL (include: ${MKL_INCLUDE_DIR}, lib: ${MKL_LIBRARIES}")
+endif()
+
+caffe_clear_vars(__looked_for __mkl_libs __path_suffixes __lib_suffix __iomp5_libs)
diff --git a/cmake/Modules/FindMatlabMex.cmake b/cmake/Modules/FindMatlabMex.cmake
new file mode 100644
index 0000000..28ae65e
--- /dev/null
+++ b/cmake/Modules/FindMatlabMex.cmake
@@ -0,0 +1,48 @@
+# This module looks for MatlabMex compiler
+# Defines variables:
+#    Matlab_DIR    - Matlab root dir
+#    Matlab_mex    - path to mex compiler
+#    Matlab_mexext - path to mexext
+
+if(MSVC)
+  foreach(__ver "9.30" "7.14" "7.11" "7.10" "7.9" "7.8" "7.7")
+    get_filename_component(__matlab_root "[HKEY_LOCAL_MACHINE\\SOFTWARE\\MathWorks\\MATLAB\\${__ver};MATLABROOT]" ABSOLUTE)
+    if(__matlab_root)
+      break()
+    endif()
+  endforeach()
+endif()
+
+if(APPLE)
+  foreach(__ver "R2014b" "R2014a" "R2013b" "R2013a" "R2012b" "R2012a" "R2011b" "R2011a" "R2010b" "R2010a")
+    if(EXISTS /Applications/MATLAB_${__ver}.app)
+      set(__matlab_root /Applications/MATLAB_${__ver}.app)
+      break()
+    endif()
+  endforeach()
+endif()
+
+if(UNIX)
+   execute_process(COMMAND which matlab OUTPUT_STRIP_TRAILING_WHITESPACE
+                   OUTPUT_VARIABLE __out RESULT_VARIABLE __res)
+
+   if(__res MATCHES 0) # Suppress `readlink` warning if `which` returned nothing
+     execute_process(COMMAND which matlab  COMMAND xargs readlink
+                     COMMAND xargs dirname COMMAND xargs dirname COMMAND xargs echo -n
+                     OUTPUT_VARIABLE __matlab_root OUTPUT_STRIP_TRAILING_WHITESPACE)
+   endif()
+endif()
+
+
+find_path(Matlab_DIR NAMES bin/mex bin/mexext PATHS ${__matlab_root}
+                     DOC "Matlab directory" NO_DEFAULT_PATH)
+
+find_program(Matlab_mex    NAMES mex    mex.bat    HINTS ${Matlab_DIR} PATH_SUFFIXES bin NO_DEFAULT_PATH)
+find_program(Matlab_mexext NAMES mexext mexext.bat HINTS ${Matlab_DIR} PATH_SUFFIXES bin NO_DEFAULT_PATH)
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(MatlabMex DEFAULT_MSG Matlab_mex Matlab_mexext)
+
+if(MATLABMEX_FOUND)
+  mark_as_advanced(Matlab_mex Matlab_mexext)
+endif()
diff --git a/cmake/Modules/FindNumPy.cmake b/cmake/Modules/FindNumPy.cmake
new file mode 100644
index 0000000..a671494
--- /dev/null
+++ b/cmake/Modules/FindNumPy.cmake
@@ -0,0 +1,58 @@
+# - Find the NumPy libraries
+# This module finds if NumPy is installed, and sets the following variables
+# indicating where it is.
+#
+# TODO: Update to provide the libraries and paths for linking npymath lib.
+#
+#  NUMPY_FOUND               - was NumPy found
+#  NUMPY_VERSION             - the version of NumPy found as a string
+#  NUMPY_VERSION_MAJOR       - the major version number of NumPy
+#  NUMPY_VERSION_MINOR       - the minor version number of NumPy
+#  NUMPY_VERSION_PATCH       - the patch version number of NumPy
+#  NUMPY_VERSION_DECIMAL     - e.g. version 1.6.1 is 10601
+#  NUMPY_INCLUDE_DIR         - path to the NumPy include files
+
+unset(NUMPY_VERSION)
+unset(NUMPY_INCLUDE_DIR)
+
+if(PYTHONINTERP_FOUND)
+  execute_process(COMMAND "${PYTHON_EXECUTABLE}" "-c"
+    "import numpy as n; print(n.__version__); print(n.get_include());"
+    RESULT_VARIABLE __result
+    OUTPUT_VARIABLE __output
+    OUTPUT_STRIP_TRAILING_WHITESPACE)
+
+  if(__result MATCHES 0)
+    string(REGEX REPLACE ";" "\\\\;" __values ${__output})
+    string(REGEX REPLACE "\r?\n" ";"    __values ${__values})
+    list(GET __values 0 NUMPY_VERSION)
+    list(GET __values 1 NUMPY_INCLUDE_DIR)
+
+    string(REGEX MATCH "^([0-9])+\\.([0-9])+\\.([0-9])+" __ver_check "${NUMPY_VERSION}")
+    if(NOT "${__ver_check}" STREQUAL "")
+      set(NUMPY_VERSION_MAJOR ${CMAKE_MATCH_1})
+      set(NUMPY_VERSION_MINOR ${CMAKE_MATCH_2})
+      set(NUMPY_VERSION_PATCH ${CMAKE_MATCH_3})
+      math(EXPR NUMPY_VERSION_DECIMAL
+        "(${NUMPY_VERSION_MAJOR} * 10000) + (${NUMPY_VERSION_MINOR} * 100) + ${NUMPY_VERSION_PATCH}")
+      string(REGEX REPLACE "\\\\" "/"  NUMPY_INCLUDE_DIR ${NUMPY_INCLUDE_DIR})
+    else()
+     unset(NUMPY_VERSION)
+     unset(NUMPY_INCLUDE_DIR)
+     message(STATUS "Requested NumPy version and include path, but got instead:\n${__output}\n")
+    endif()
+  endif()
+else()
+  message(STATUS "To find NumPy Python interpretator is required to be found.")
+endif()
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(NumPy REQUIRED_VARS NUMPY_INCLUDE_DIR NUMPY_VERSION
+                                        VERSION_VAR   NUMPY_VERSION)
+
+if(NUMPY_FOUND)
+  message(STATUS "NumPy ver. ${NUMPY_VERSION} found (include: ${NUMPY_INCLUDE_DIR})")
+endif()
+
+caffe_clear_vars(__result __output __error_value __values __ver_check __error_value)
+
diff --git a/cmake/Modules/FindOpenBLAS.cmake b/cmake/Modules/FindOpenBLAS.cmake
new file mode 100644
index 0000000..b843492
--- /dev/null
+++ b/cmake/Modules/FindOpenBLAS.cmake
@@ -0,0 +1,62 @@
+
+
+SET(Open_BLAS_INCLUDE_SEARCH_PATHS
+  /usr/include
+  /usr/include/openblas-base
+  /usr/local/include
+  /usr/local/include/openblas-base
+  /opt/OpenBLAS/include
+  $ENV{OpenBLAS_HOME}
+  $ENV{OpenBLAS_HOME}/include
+)
+
+SET(Open_BLAS_LIB_SEARCH_PATHS
+        /lib/
+        /lib/openblas-base
+        /lib64/
+        /usr/lib
+        /usr/lib/openblas-base
+        /usr/lib64
+        /usr/local/lib
+        /usr/local/lib64
+        /opt/OpenBLAS/lib
+        $ENV{OpenBLAS}cd
+        $ENV{OpenBLAS}/lib
+        $ENV{OpenBLAS_HOME}
+        $ENV{OpenBLAS_HOME}/lib
+ )
+
+FIND_PATH(OpenBLAS_INCLUDE_DIR NAMES cblas.h PATHS ${Open_BLAS_INCLUDE_SEARCH_PATHS})
+FIND_LIBRARY(OpenBLAS_LIB NAMES openblas PATHS ${Open_BLAS_LIB_SEARCH_PATHS})
+
+SET(OpenBLAS_FOUND ON)
+
+#    Check include files
+IF(NOT OpenBLAS_INCLUDE_DIR)
+    SET(OpenBLAS_FOUND OFF)
+    MESSAGE(STATUS "Could not find OpenBLAS include. Turning OpenBLAS_FOUND off")
+ENDIF()
+
+#    Check libraries
+IF(NOT OpenBLAS_LIB)
+    SET(OpenBLAS_FOUND OFF)
+    MESSAGE(STATUS "Could not find OpenBLAS lib. Turning OpenBLAS_FOUND off")
+ENDIF()
+
+IF (OpenBLAS_FOUND)
+  IF (NOT OpenBLAS_FIND_QUIETLY)
+    MESSAGE(STATUS "Found OpenBLAS libraries: ${OpenBLAS_LIB}")
+    MESSAGE(STATUS "Found OpenBLAS include: ${OpenBLAS_INCLUDE_DIR}")
+  ENDIF (NOT OpenBLAS_FIND_QUIETLY)
+ELSE (OpenBLAS_FOUND)
+  IF (OpenBLAS_FIND_REQUIRED)
+    MESSAGE(FATAL_ERROR "Could not find OpenBLAS")
+  ENDIF (OpenBLAS_FIND_REQUIRED)
+ENDIF (OpenBLAS_FOUND)
+
+MARK_AS_ADVANCED(
+    OpenBLAS_INCLUDE_DIR
+    OpenBLAS_LIB
+    OpenBLAS
+)
+
diff --git a/cmake/Modules/FindSnappy.cmake b/cmake/Modules/FindSnappy.cmake
new file mode 100644
index 0000000..eff2a86
--- /dev/null
+++ b/cmake/Modules/FindSnappy.cmake
@@ -0,0 +1,28 @@
+# Find the Snappy libraries
+#
+# The following variables are optionally searched for defaults
+#  Snappy_ROOT_DIR:    Base directory where all Snappy components are found
+#
+# The following are set after configuration is done:
+#  SNAPPY_FOUND
+#  Snappy_INCLUDE_DIR
+#  Snappy_LIBRARIES
+
+find_path(Snappy_INCLUDE_DIR NAMES snappy.h
+                             PATHS ${SNAPPY_ROOT_DIR} ${SNAPPY_ROOT_DIR}/include)
+
+find_library(Snappy_LIBRARIES NAMES snappy
+                              PATHS ${SNAPPY_ROOT_DIR} ${SNAPPY_ROOT_DIR}/lib)
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(Snappy DEFAULT_MSG Snappy_INCLUDE_DIR Snappy_LIBRARIES)
+
+if(SNAPPY_FOUND)
+  message(STATUS "Found Snappy  (include: ${Snappy_INCLUDE_DIR}, library: ${Snappy_LIBRARIES})")
+  mark_as_advanced(Snappy_INCLUDE_DIR Snappy_LIBRARIES)
+
+  caffe_parse_header(${Snappy_INCLUDE_DIR}/snappy-stubs-public.h
+                     SNAPPY_VERION_LINES SNAPPY_MAJOR SNAPPY_MINOR SNAPPY_PATCHLEVEL)
+  set(Snappy_VERSION "${SNAPPY_MAJOR}.${SNAPPY_MINOR}.${SNAPPY_PATCHLEVEL}")
+endif()
+
diff --git a/cmake/Modules/FindvecLib.cmake b/cmake/Modules/FindvecLib.cmake
new file mode 100644
index 0000000..9600da4
--- /dev/null
+++ b/cmake/Modules/FindvecLib.cmake
@@ -0,0 +1,34 @@
+# Find the vecLib libraries as part of Accelerate.framework or as standalon framework
+#
+# The following are set after configuration is done:
+#  VECLIB_FOUND
+#  vecLib_INCLUDE_DIR
+#  vecLib_LINKER_LIBS
+
+
+if(NOT APPLE)
+  return()
+endif()
+
+set(__veclib_include_suffix "Frameworks/vecLib.framework/Versions/Current/Headers")
+
+find_path(vecLib_INCLUDE_DIR vecLib.h
+          DOC "vecLib include directory"
+          PATHS /System/Library/${__veclib_include_suffix}
+                /System/Library/Frameworks/Accelerate.framework/Versions/Current/${__veclib_include_suffix}
+                /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.9.sdk/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Headers/)
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(vecLib DEFAULT_MSG vecLib_INCLUDE_DIR)
+
+if(VECLIB_FOUND)
+  if(vecLib_INCLUDE_DIR MATCHES "^/System/Library/Frameworks/vecLib.framework.*")
+    set(vecLib_LINKER_LIBS -lcblas "-framework vecLib")
+    message(STATUS "Found standalone vecLib.framework")
+  else()
+    set(vecLib_LINKER_LIBS -lcblas "-framework Accelerate")
+    message(STATUS "Found vecLib as part of Accelerate.framework")
+  endif()
+
+  mark_as_advanced(vecLib_INCLUDE_DIR)
+endif()
diff --git a/cmake/ProtoBuf.cmake b/cmake/ProtoBuf.cmake
new file mode 100644
index 0000000..fc799bd
--- /dev/null
+++ b/cmake/ProtoBuf.cmake
@@ -0,0 +1,90 @@
+# Finds Google Protocol Buffers library and compilers and extends
+# the standard cmake script with version and python generation support
+
+find_package( Protobuf REQUIRED )
+include_directories(SYSTEM ${PROTOBUF_INCLUDE_DIR})
+list(APPEND Caffe_LINKER_LIBS ${PROTOBUF_LIBRARIES})
+
+# As of Ubuntu 14.04 protoc is no longer a part of libprotobuf-dev package
+# and should be installed separately as in: sudo apt-get install protobuf-compiler
+if(EXISTS ${PROTOBUF_PROTOC_EXECUTABLE})
+  message(STATUS "Found PROTOBUF Compiler: ${PROTOBUF_PROTOC_EXECUTABLE}")
+else()
+  message(FATAL_ERROR "Could not find PROTOBUF Compiler")
+endif()
+
+if(PROTOBUF_FOUND)
+  # fetches protobuf version
+  caffe_parse_header(${PROTOBUF_INCLUDE_DIR}/google/protobuf/stubs/common.h VERION_LINE GOOGLE_PROTOBUF_VERSION)
+  string(REGEX MATCH "([0-9])00([0-9])00([0-9])" PROTOBUF_VERSION ${GOOGLE_PROTOBUF_VERSION})
+  set(PROTOBUF_VERSION "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}.${CMAKE_MATCH_3}")
+  unset(GOOGLE_PROTOBUF_VERSION)
+endif()
+
+# place where to generate protobuf sources
+set(proto_gen_folder "${PROJECT_BINARY_DIR}/include/caffe/proto")
+include_directories(SYSTEM "${PROJECT_BINARY_DIR}/include")
+
+set(PROTOBUF_GENERATE_CPP_APPEND_PATH TRUE)
+
+################################################################################################
+# Modification of standard 'protobuf_generate_cpp()' with output dir parameter and python support
+# Usage:
+#   caffe_protobuf_generate_cpp_py(<output_dir> <srcs_var> <hdrs_var> <python_var> <proto_files>)
+function(caffe_protobuf_generate_cpp_py output_dir srcs_var hdrs_var python_var)
+  if(NOT ARGN)
+    message(SEND_ERROR "Error: caffe_protobuf_generate_cpp_py() called without any proto files")
+    return()
+  endif()
+
+  if(PROTOBUF_GENERATE_CPP_APPEND_PATH)
+    # Create an include path for each file specified
+    foreach(fil ${ARGN})
+      get_filename_component(abs_fil ${fil} ABSOLUTE)
+      get_filename_component(abs_path ${abs_fil} PATH)
+      list(FIND _protoc_include ${abs_path} _contains_already)
+      if(${_contains_already} EQUAL -1)
+        list(APPEND _protoc_include -I ${abs_path})
+      endif()
+    endforeach()
+  else()
+    set(_protoc_include -I ${CMAKE_CURRENT_SOURCE_DIR})
+  endif()
+
+  if(DEFINED PROTOBUF_IMPORT_DIRS)
+    foreach(dir ${PROTOBUF_IMPORT_DIRS})
+      get_filename_component(abs_path ${dir} ABSOLUTE)
+      list(FIND _protoc_include ${abs_path} _contains_already)
+      if(${_contains_already} EQUAL -1)
+        list(APPEND _protoc_include -I ${abs_path})
+      endif()
+    endforeach()
+  endif()
+
+  set(${srcs_var})
+  set(${hdrs_var})
+  set(${python_var})
+  foreach(fil ${ARGN})
+    get_filename_component(abs_fil ${fil} ABSOLUTE)
+    get_filename_component(fil_we ${fil} NAME_WE)
+
+    list(APPEND ${srcs_var} "${output_dir}/${fil_we}.pb.cc")
+    list(APPEND ${hdrs_var} "${output_dir}/${fil_we}.pb.h")
+    list(APPEND ${python_var} "${output_dir}/${fil_we}_pb2.py")
+
+    add_custom_command(
+      OUTPUT "${output_dir}/${fil_we}.pb.cc"
+             "${output_dir}/${fil_we}.pb.h"
+             "${output_dir}/${fil_we}_pb2.py"
+      COMMAND ${CMAKE_COMMAND} -E make_directory "${output_dir}"
+      COMMAND ${PROTOBUF_PROTOC_EXECUTABLE} --cpp_out    ${output_dir} ${_protoc_include} ${abs_fil}
+      COMMAND ${PROTOBUF_PROTOC_EXECUTABLE} --python_out ${output_dir} ${_protoc_include} ${abs_fil}
+      DEPENDS ${abs_fil}
+      COMMENT "Running C++/Python protocol buffer compiler on ${fil}" VERBATIM )
+  endforeach()
+
+  set_source_files_properties(${${srcs_var}} ${${hdrs_var}} ${${python_var}} PROPERTIES GENERATED TRUE)
+  set(${srcs_var} ${${srcs_var}} PARENT_SCOPE)
+  set(${hdrs_var} ${${hdrs_var}} PARENT_SCOPE)
+  set(${python_var} ${${python_var}} PARENT_SCOPE)
+endfunction()
diff --git a/cmake/Summary.cmake b/cmake/Summary.cmake
new file mode 100644
index 0000000..e094ac0
--- /dev/null
+++ b/cmake/Summary.cmake
@@ -0,0 +1,168 @@
+################################################################################################
+# Caffe status report function.
+# Automatically align right column and selects text based on condition.
+# Usage:
+#   caffe_status(<text>)
+#   caffe_status(<heading> <value1> [<value2> ...])
+#   caffe_status(<heading> <condition> THEN <text for TRUE> ELSE <text for FALSE> )
+function(caffe_status text)
+  set(status_cond)
+  set(status_then)
+  set(status_else)
+
+  set(status_current_name "cond")
+  foreach(arg ${ARGN})
+    if(arg STREQUAL "THEN")
+      set(status_current_name "then")
+    elseif(arg STREQUAL "ELSE")
+      set(status_current_name "else")
+    else()
+      list(APPEND status_${status_current_name} ${arg})
+    endif()
+  endforeach()
+
+  if(DEFINED status_cond)
+    set(status_placeholder_length 23)
+    string(RANDOM LENGTH ${status_placeholder_length} ALPHABET " " status_placeholder)
+    string(LENGTH "${text}" status_text_length)
+    if(status_text_length LESS status_placeholder_length)
+      string(SUBSTRING "${text}${status_placeholder}" 0 ${status_placeholder_length} status_text)
+    elseif(DEFINED status_then OR DEFINED status_else)
+      message(STATUS "${text}")
+      set(status_text "${status_placeholder}")
+    else()
+      set(status_text "${text}")
+    endif()
+
+    if(DEFINED status_then OR DEFINED status_else)
+      if(${status_cond})
+        string(REPLACE ";" " " status_then "${status_then}")
+        string(REGEX REPLACE "^[ \t]+" "" status_then "${status_then}")
+        message(STATUS "${status_text} ${status_then}")
+      else()
+        string(REPLACE ";" " " status_else "${status_else}")
+        string(REGEX REPLACE "^[ \t]+" "" status_else "${status_else}")
+        message(STATUS "${status_text} ${status_else}")
+      endif()
+    else()
+      string(REPLACE ";" " " status_cond "${status_cond}")
+      string(REGEX REPLACE "^[ \t]+" "" status_cond "${status_cond}")
+      message(STATUS "${status_text} ${status_cond}")
+    endif()
+  else()
+    message(STATUS "${text}")
+  endif()
+endfunction()
+
+
+################################################################################################
+# Function for fetching Caffe version from git and headers
+# Usage:
+#   caffe_extract_caffe_version()
+function(caffe_extract_caffe_version)
+  set(Caffe_GIT_VERSION "unknown")
+  find_package(Git)
+  if(GIT_FOUND)
+    execute_process(COMMAND ${GIT_EXECUTABLE} describe --tags --always --dirty
+                    ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE
+                    WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}"
+                    OUTPUT_VARIABLE Caffe_GIT_VERSION
+                    RESULT_VARIABLE __git_result)
+    if(NOT ${__git_result} EQUAL 0)
+      set(Caffe_GIT_VERSION "unknown")
+    endif()
+  endif()
+
+  set(Caffe_GIT_VERSION ${Caffe_GIT_VERSION} PARENT_SCOPE)
+  set(Caffe_VERSION "<TODO> (Caffe doesn't declare its version in headers)" PARENT_SCOPE)
+
+  # caffe_parse_header(${Caffe_INCLUDE_DIR}/caffe/version.hpp Caffe_VERSION_LINES CAFFE_MAJOR CAFFE_MINOR CAFFE_PATCH)
+  # set(Caffe_VERSION "${CAFFE_MAJOR}.${CAFFE_MINOR}.${CAFFE_PATCH}" PARENT_SCOPE)
+
+  # or for #define Caffe_VERSION "x.x.x"
+  # caffe_parse_header_single_define(Caffe ${Caffe_INCLUDE_DIR}/caffe/version.hpp Caffe_VERSION)
+  # set(Caffe_VERSION ${Caffe_VERSION_STRING} PARENT_SCOPE)
+
+endfunction()
+
+
+################################################################################################
+# Prints accumulated caffe configuration summary
+# Usage:
+#   caffe_print_configuration_summary()
+
+function(caffe_print_configuration_summary)
+  caffe_extract_caffe_version()
+  set(Caffe_VERSION ${Caffe_VERSION} PARENT_SCOPE)
+
+  caffe_merge_flag_lists(__flags_rel CMAKE_CXX_FLAGS_RELEASE CMAKE_CXX_FLAGS)
+  caffe_merge_flag_lists(__flags_deb CMAKE_CXX_FLAGS_DEBUG   CMAKE_CXX_FLAGS)
+
+  caffe_status("")
+  caffe_status("******************* Caffe Configuration Summary *******************")
+  caffe_status("General:")
+  caffe_status("  Version           :   ${Caffe_VERSION}")
+  caffe_status("  Git               :   ${Caffe_GIT_VERSION}")
+  caffe_status("  System            :   ${CMAKE_SYSTEM_NAME}")
+  caffe_status("  C++ compiler      :   ${CMAKE_CXX_COMPILER}")
+  caffe_status("  Release CXX flags :   ${__flags_rel}")
+  caffe_status("  Debug CXX flags   :   ${__flags_deb}")
+  caffe_status("  Build type        :   ${CMAKE_BUILD_TYPE}")
+  caffe_status("")
+  caffe_status("  BUILD_SHARED_LIBS :   ${BUILD_SHARED_LIBS}")
+  caffe_status("  BUILD_python      :   ${BUILD_python}")
+  caffe_status("  BUILD_matlab      :   ${BUILD_matlab}")
+  caffe_status("  BUILD_docs        :   ${BUILD_docs}")
+  caffe_status("  CPU_ONLY          :   ${CPU_ONLY}")
+  caffe_status("")
+  caffe_status("Dependencies:")
+  caffe_status("  BLAS              : " APPLE THEN "Yes (vecLib)" ELSE "Yes (${BLAS})")
+  caffe_status("  Boost             :   Yes (ver. ${Boost_MAJOR_VERSION}.${Boost_MINOR_VERSION})")
+  caffe_status("  glog              :   Yes")
+  caffe_status("  gflags            :   Yes")
+  caffe_status("  protobuf          : " PROTOBUF_FOUND THEN "Yes (ver. ${PROTOBUF_VERSION})" ELSE "No" )
+  caffe_status("  lmdb              : " LMDB_FOUND THEN "Yes (ver. ${LMDB_VERSION})" ELSE "No")
+  caffe_status("  Snappy            : " SNAPPY_FOUND THEN "Yes (ver. ${Snappy_VERSION})" ELSE "No" )
+  caffe_status("  LevelDB           : " LEVELDB_FOUND THEN  "Yes (ver. ${LEVELDB_VERSION})" ELSE "No")
+  caffe_status("  OpenCV            :   Yes (ver. ${OpenCV_VERSION})")
+  caffe_status("  CUDA              : " HAVE_CUDA THEN "Yes (ver. ${CUDA_VERSION})" ELSE "No" )
+  caffe_status("")
+  if(HAVE_CUDA)
+    caffe_status("NVIDIA CUDA:")
+    caffe_status("  Target GPU(s)     :   ${CUDA_ARCH_NAME}" )
+    caffe_status("  GPU arch(s)       :   ${NVCC_FLAGS_EXTRA_readable}")
+    if(USE_CUDNN)
+      caffe_status("  cuDNN             : " HAVE_CUDNN THEN "Yes" ELSE "Not found")
+    else()
+      caffe_status("  cuDNN             :   Disabled")
+    endif()
+    caffe_status("")
+  endif()
+  if(HAVE_PYTHON)
+    caffe_status("Python:")
+    caffe_status("  Interpreter       :" PYTHON_EXECUTABLE THEN "${PYTHON_EXECUTABLE} (ver. ${PYTHON_VERSION_STRING})" ELSE "No")
+    caffe_status("  Libraries         :" PYTHONLIBS_FOUND  THEN "${PYTHON_LIBRARIES} (ver ${PYTHONLIBS_VERSION_STRING})" ELSE "No")
+    caffe_status("  NumPy             :" NUMPY_FOUND  THEN "${NUMPY_INCLUDE_DIR} (ver ${NUMPY_VERSION})" ELSE "No")
+    caffe_status("")
+  endif()
+  if(BUILD_matlab)
+    caffe_status("Matlab:")
+    caffe_status("  Matlab            :" HAVE_MATLAB THEN "Yes (${Matlab_mex}, ${Matlab_mexext}" ELSE "No")
+    caffe_status("  Octave            :" Octave_compiler THEN  "Yes (${Octave_compiler})" ELSE "No")
+    if(HAVE_MATLAB AND Octave_compiler)
+      caffe_status("  Build mex using   :   ${Matlab_build_mex_using}")
+    endif()
+    caffe_status("")
+  endif()
+  if(BUILD_docs)
+    caffe_status("Documentaion:")
+    caffe_status("  Doxygen           :" DOXYGEN_FOUND THEN "${DOXYGEN_EXECUTABLE} (${DOXYGEN_VERSION})" ELSE "No")
+    caffe_status("  config_file       :   ${DOXYGEN_config_file}")
+
+    caffe_status("")
+  endif()
+  caffe_status("Install:")
+  caffe_status("  Install path      :   ${CMAKE_INSTALL_PREFIX}")
+  caffe_status("")
+endfunction()
+
diff --git a/cmake/Targets.cmake b/cmake/Targets.cmake
new file mode 100644
index 0000000..2401f25
--- /dev/null
+++ b/cmake/Targets.cmake
@@ -0,0 +1,173 @@
+################################################################################################
+# Defines global Caffe_LINK flag, This flag is required to prevent linker from excluding
+# some objects which are not addressed directly but are registered via static constructors
+if(BUILD_SHARED_LIBS)
+  set(Caffe_LINK caffe)
+else()
+  if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
+    set(Caffe_LINK -Wl,-force_load caffe)
+  elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
+    set(Caffe_LINK -Wl,--whole-archive caffe -Wl,--no-whole-archive)
+  endif()
+endif()
+
+################################################################################################
+# Convenient command to setup source group for IDEs that support this feature (VS, XCode)
+# Usage:
+#   caffe_source_group(<group> GLOB[_RECURSE] <globbing_expression>)
+function(caffe_source_group group)
+  cmake_parse_arguments(CAFFE_SOURCE_GROUP "" "" "GLOB;GLOB_RECURSE" ${ARGN})
+  if(CAFFE_SOURCE_GROUP_GLOB)
+    file(GLOB srcs1 ${CAFFE_SOURCE_GROUP_GLOB})
+    source_group(${group} FILES ${srcs1})
+  endif()
+
+  if(CAFFE_SOURCE_GROUP_GLOB_RECURSE)
+    file(GLOB_RECURSE srcs2 ${CAFFE_SOURCE_GROUP_GLOB_RECURSE})
+    source_group(${group} FILES ${srcs2})
+  endif()
+endfunction()
+
+################################################################################################
+# Collecting sources from globbing and appending to output list variable
+# Usage:
+#   caffe_collect_sources(<output_variable> GLOB[_RECURSE] <globbing_expression>)
+function(caffe_collect_sources variable)
+  cmake_parse_arguments(CAFFE_COLLECT_SOURCES "" "" "GLOB;GLOB_RECURSE" ${ARGN})
+  if(CAFFE_COLLECT_SOURCES_GLOB)
+    file(GLOB srcs1 ${CAFFE_COLLECT_SOURCES_GLOB})
+    set(${variable} ${variable} ${srcs1})
+  endif()
+
+  if(CAFFE_COLLECT_SOURCES_GLOB_RECURSE)
+    file(GLOB_RECURSE srcs2 ${CAFFE_COLLECT_SOURCES_GLOB_RECURSE})
+    set(${variable} ${variable} ${srcs2})
+  endif()
+endfunction()
+
+################################################################################################
+# Short command getting caffe sources (assuming standard Caffe code tree)
+# Usage:
+#   caffe_pickup_caffe_sources(<root>)
+function(caffe_pickup_caffe_sources root)
+  # put all files in source groups (visible as subfolder in many IDEs)
+  caffe_source_group("Include"        GLOB "${root}/include/caffe/*.h*")
+  caffe_source_group("Include\\Util"  GLOB "${root}/include/caffe/util/*.h*")
+  caffe_source_group("Include"        GLOB "${PROJECT_BINARY_DIR}/caffe_config.h*")
+  caffe_source_group("Source"         GLOB "${root}/src/caffe/*.cpp")
+  caffe_source_group("Source\\Util"   GLOB "${root}/src/caffe/util/*.cpp")
+  caffe_source_group("Source\\Layers" GLOB "${root}/src/caffe/layers/*.cpp")
+  caffe_source_group("Source\\Cuda"   GLOB "${root}/src/caffe/layers/*.cu")
+  caffe_source_group("Source\\Cuda"   GLOB "${root}/src/caffe/util/*.cu")
+  caffe_source_group("Source\\Proto"  GLOB "${root}/src/caffe/proto/*.proto")
+
+  # source groups for test target
+  caffe_source_group("Include"      GLOB "${root}/include/caffe/test/test_*.h*")
+  caffe_source_group("Source"       GLOB "${root}/src/caffe/test/test_*.cpp")
+  caffe_source_group("Source\\Cuda" GLOB "${root}/src/caffe/test/test_*.cu")
+
+  # collect files
+  file(GLOB test_hdrs    ${root}/include/caffe/test/test_*.h*)
+  file(GLOB test_srcs    ${root}/src/caffe/test/test_*.cpp)
+  file(GLOB_RECURSE hdrs ${root}/include/caffe/*.h*)
+  file(GLOB_RECURSE srcs ${root}/src/caffe/*.cpp)
+  list(REMOVE_ITEM  hdrs ${test_hdrs})
+  list(REMOVE_ITEM  srcs ${test_srcs})
+
+  # adding headers to make the visible in some IDEs (Qt, VS, Xcode)
+  list(APPEND srcs ${hdrs} ${PROJECT_BINARY_DIR}/caffe_config.h)
+  list(APPEND test_srcs ${test_hdrs})
+
+  # collect cuda files
+  file(GLOB    test_cuda ${root}/src/caffe/test/test_*.cu)
+  file(GLOB_RECURSE cuda ${root}/src/caffe/*.cu)
+  list(REMOVE_ITEM  cuda ${test_cuda})
+
+  # add proto to make them editable in IDEs too
+  file(GLOB_RECURSE proto_files ${root}/src/caffe/*.proto)
+  list(APPEND srcs ${proto_files})
+
+  # convet to absolute paths
+  caffe_convert_absolute_paths(srcs)
+  caffe_convert_absolute_paths(cuda)
+  caffe_convert_absolute_paths(test_srcs)
+  caffe_convert_absolute_paths(test_cuda)
+
+  # propogate to parent scope
+  set(srcs ${srcs} PARENT_SCOPE)
+  set(cuda ${cuda} PARENT_SCOPE)
+  set(test_srcs ${test_srcs} PARENT_SCOPE)
+  set(test_cuda ${test_cuda} PARENT_SCOPE)
+endfunction()
+
+################################################################################################
+# Short command for setting defeault target properties
+# Usage:
+#   caffe_default_properties(<target>)
+function(caffe_default_properties target)
+  set_target_properties(${target} PROPERTIES
+    DEBUG_POSTFIX ${Caffe_DEBUG_POSTFIX}
+    ARCHIVE_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/lib"
+    LIBRARY_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/lib"
+    RUNTIME_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/bin")
+  # make sure we build all external depepdencies first
+  if (DEFINED external_project_dependencies)
+    add_dependencies(${target} ${external_project_dependencies})
+  endif()
+endfunction()
+
+################################################################################################
+# Short command for setting runtime directory for build target
+# Usage:
+#   caffe_set_runtime_directory(<target> <dir>)
+function(caffe_set_runtime_directory target dir)
+  set_target_properties(${target} PROPERTIES
+    RUNTIME_OUTPUT_DIRECTORY "${dir}")
+endfunction()
+
+################################################################################################
+# Short command for setting solution folder property for target
+# Usage:
+#   caffe_set_solution_folder(<target> <folder>)
+function(caffe_set_solution_folder target folder)
+  if(USE_PROJECT_FOLDERS)
+    set_target_properties(${target} PROPERTIES FOLDER "${folder}")
+  endif()
+endfunction()
+
+################################################################################################
+# Reads lines from input file, prepends source directory to each line and writes to output file
+# Usage:
+#   caffe_configure_testdatafile(<testdatafile>)
+function(caffe_configure_testdatafile file)
+  file(STRINGS ${file} __lines)
+  set(result "")
+  foreach(line ${__lines})
+    set(result "${result}${PROJECT_SOURCE_DIR}/${line}\n")
+  endforeach()
+  file(WRITE ${file}.gen.cmake ${result})
+endfunction()
+
+################################################################################################
+# Filter out all files that are not included in selected list
+# Usage:
+#   caffe_leave_only_selected_tests(<filelist_variable> <selected_list>)
+function(caffe_leave_only_selected_tests file_list)
+  if(NOT ARGN)
+    return() # blank list means leave all
+  endif()
+  string(REPLACE "," ";" __selected ${ARGN})
+  list(APPEND __selected caffe_main)
+
+  set(result "")
+  foreach(f ${${file_list}})
+    get_filename_component(name ${f} NAME_WE)
+    string(REGEX REPLACE "^test_" "" name ${name})
+    list(FIND __selected ${name} __index)
+    if(NOT __index EQUAL -1)
+      list(APPEND result ${f})
+    endif()
+  endforeach()
+  set(${file_list} ${result} PARENT_SCOPE)
+endfunction()
+
diff --git a/cmake/Templates/CaffeConfig.cmake.in b/cmake/Templates/CaffeConfig.cmake.in
new file mode 100644
index 0000000..8f23742
--- /dev/null
+++ b/cmake/Templates/CaffeConfig.cmake.in
@@ -0,0 +1,58 @@
+# Config file for the Caffe package.
+#
+# Note:
+#   Caffe and this config file depends on opencv,
+#   so put `find_package(OpenCV)` before searching Caffe
+#   via `find_package(Caffe)`. All other lib/includes
+#   dependencies are hard coded in the file
+#
+# After successful configuration the following variables
+# will be defined:
+#
+#   Caffe_INCLUDE_DIRS - Caffe include directories
+#   Caffe_LIBRARIES    - libraries to link against
+#   Caffe_DEFINITIONS  - a list of definitions to pass to compiler
+#
+#   Caffe_HAVE_CUDA    - signals about CUDA support
+#   Caffe_HAVE_CUDNN   - signals about cuDNN support
+
+
+# OpenCV dependency
+
+if(NOT OpenCV_FOUND)
+  set(Caffe_OpenCV_CONFIG_PATH "@OpenCV_CONFIG_PATH@")
+  if(Caffe_OpenCV_CONFIG_PATH)
+    get_filename_component(Caffe_OpenCV_CONFIG_PATH ${Caffe_OpenCV_CONFIG_PATH} ABSOLUTE)
+
+    if(EXISTS ${Caffe_OpenCV_CONFIG_PATH} AND NOT TARGET opencv_core)
+      message(STATUS "Caffe: using OpenCV config from ${Caffe_OpenCV_CONFIG_PATH}")
+      include(${Caffe_OpenCV_CONFIG_PATH}/OpenCVModules.cmake)
+    endif()
+
+  else()
+    find_package(OpenCV REQUIRED)
+  endif()
+  unset(Caffe_OpenCV_CONFIG_PATH)
+endif()
+
+# Compute paths
+get_filename_component(Caffe_CMAKE_DIR "${CMAKE_CURRENT_LIST_FILE}" PATH)
+set(Caffe_INCLUDE_DIRS "@Caffe_INCLUDE_DIRS@")
+
+ at Caffe_INSTALL_INCLUDE_DIR_APPEND_COMMAND@
+
+# Our library dependencies
+if(NOT TARGET caffe AND NOT caffe_BINARY_DIR)
+  include("${Caffe_CMAKE_DIR}/CaffeTargets.cmake")
+endif()
+
+# List of IMPORTED libs created by CaffeTargets.cmake
+set(Caffe_LIBRARIES caffe)
+
+# Definitions
+set(Caffe_DEFINITIONS "@Caffe_DEFINITIONS@")
+
+# Cuda support variables
+set(Caffe_CPU_ONLY @CPU_ONLY@)
+set(Caffe_HAVE_CUDA @HAVE_CUDA@)
+set(Caffe_HAVE_CUDNN @HAVE_CUDNN@)
diff --git a/cmake/Templates/CaffeConfigVersion.cmake.in b/cmake/Templates/CaffeConfigVersion.cmake.in
new file mode 100644
index 0000000..19f8530
--- /dev/null
+++ b/cmake/Templates/CaffeConfigVersion.cmake.in
@@ -0,0 +1,11 @@
+set(PACKAGE_VERSION "@Caffe_VERSION@")
+
+# Check whether the requested PACKAGE_FIND_VERSION is compatible
+if("${PACKAGE_VERSION}" VERSION_LESS "${PACKAGE_FIND_VERSION}")
+  set(PACKAGE_VERSION_COMPATIBLE FALSE)
+else()
+  set(PACKAGE_VERSION_COMPATIBLE TRUE)
+  if ("${PACKAGE_VERSION}" VERSION_EQUAL "${PACKAGE_FIND_VERSION}")
+    set(PACKAGE_VERSION_EXACT TRUE)
+  endif()
+endif()
diff --git a/cmake/Templates/caffe_config.h.in b/cmake/Templates/caffe_config.h.in
new file mode 100644
index 0000000..6039e8f
--- /dev/null
+++ b/cmake/Templates/caffe_config.h.in
@@ -0,0 +1,32 @@
+/* Sources directory */
+#define SOURCE_FOLDER "${PROJECT_SOURCE_DIR}"
+
+/* Binaries directory */
+#define BINARY_FOLDER "${PROJECT_BINARY_DIR}"
+
+/* NVIDA Cuda */
+#cmakedefine HAVE_CUDA
+
+/* NVIDA cuDNN */
+#cmakedefine HAVE_CUDNN
+#cmakedefine USE_CUDNN
+
+/* NVIDA cuDNN */
+#cmakedefine CPU_ONLY
+
+/* Test device */
+#define CUDA_TEST_DEVICE ${CUDA_TEST_DEVICE}
+
+/* Temporary (TODO: remove) */
+#if 1
+  #define CMAKE_SOURCE_DIR SOURCE_FOLDER "/src/"
+  #define EXAMPLES_SOURCE_DIR BINARY_FOLDER "/examples/"
+  #define CMAKE_EXT ".gen.cmake"
+#else
+  #define CMAKE_SOURCE_DIR "src/"
+  #define EXAMPLES_SOURCE_DIR "examples/"
+  #define CMAKE_EXT ""
+#endif
+
+/* Matlab */
+#cmakedefine HAVE_MATLAB
diff --git a/cmake/Utils.cmake b/cmake/Utils.cmake
new file mode 100644
index 0000000..a1bde1a
--- /dev/null
+++ b/cmake/Utils.cmake
@@ -0,0 +1,381 @@
+################################################################################################
+# Command alias for debugging messages
+# Usage:
+#   dmsg(<message>)
+function(dmsg)
+  message(STATUS ${ARGN})
+endfunction()
+
+################################################################################################
+# Removes duplicates from list(s)
+# Usage:
+#   caffe_list_unique(<list_variable> [<list_variable>] [...])
+macro(caffe_list_unique)
+  foreach(__lst ${ARGN})
+    if(${__lst})
+      list(REMOVE_DUPLICATES ${__lst})
+    endif()
+  endforeach()
+endmacro()
+
+################################################################################################
+# Clears variables from list
+# Usage:
+#   caffe_clear_vars(<variables_list>)
+macro(caffe_clear_vars)
+  foreach(_var ${ARGN})
+    unset(${_var})
+  endforeach()
+endmacro()
+
+################################################################################################
+# Removes duplicates from string
+# Usage:
+#   caffe_string_unique(<string_variable>)
+function(caffe_string_unique __string)
+  if(${__string})
+    set(__list ${${__string}})
+    separate_arguments(__list)
+    list(REMOVE_DUPLICATES __list)
+    foreach(__e ${__list})
+      set(__str "${__str} ${__e}")
+    endforeach()
+    set(${__string} ${__str} PARENT_SCOPE)
+  endif()
+endfunction()
+
+################################################################################################
+# Prints list element per line
+# Usage:
+#   caffe_print_list(<list>)
+function(caffe_print_list)
+  foreach(e ${ARGN})
+    message(STATUS ${e})
+  endforeach()
+endfunction()
+
+################################################################################################
+# Function merging lists of compiler flags to single string.
+# Usage:
+#   caffe_merge_flag_lists(out_variable <list1> [<list2>] [<list3>] ...)
+function(caffe_merge_flag_lists out_var)
+  set(__result "")
+  foreach(__list ${ARGN})
+    foreach(__flag ${${__list}})
+      string(STRIP ${__flag} __flag)
+      set(__result "${__result} ${__flag}")
+    endforeach()
+  endforeach()
+  string(STRIP ${__result} __result)
+  set(${out_var} ${__result} PARENT_SCOPE)
+endfunction()
+
+################################################################################################
+# Converts all paths in list to absolute
+# Usage:
+#   caffe_convert_absolute_paths(<list_variable>)
+function(caffe_convert_absolute_paths variable)
+  set(__dlist "")
+  foreach(__s ${${variable}})
+    get_filename_component(__abspath ${__s} ABSOLUTE)
+    list(APPEND __list ${__abspath})
+  endforeach()
+  set(${variable} ${__list} PARENT_SCOPE)
+endfunction()
+
+################################################################################################
+# Reads set of version defines from the header file
+# Usage:
+#   caffe_parse_header(<file> <define1> <define2> <define3> ..)
+macro(caffe_parse_header FILENAME FILE_VAR)
+  set(vars_regex "")
+  set(__parnet_scope OFF)
+  set(__add_cache OFF)
+  foreach(name ${ARGN})
+    if("${name}" STREQUAL "PARENT_SCOPE")
+      set(__parnet_scope ON)
+    elseif("${name}" STREQUAL "CACHE")
+      set(__add_cache ON)
+    elseif(vars_regex)
+      set(vars_regex "${vars_regex}|${name}")
+    else()
+      set(vars_regex "${name}")
+    endif()
+  endforeach()
+  if(EXISTS "${FILENAME}")
+    file(STRINGS "${FILENAME}" ${FILE_VAR} REGEX "#define[ \t]+(${vars_regex})[ \t]+[0-9]+" )
+  else()
+    unset(${FILE_VAR})
+  endif()
+  foreach(name ${ARGN})
+    if(NOT "${name}" STREQUAL "PARENT_SCOPE" AND NOT "${name}" STREQUAL "CACHE")
+      if(${FILE_VAR})
+        if(${FILE_VAR} MATCHES ".+[ \t]${name}[ \t]+([0-9]+).*")
+          string(REGEX REPLACE ".+[ \t]${name}[ \t]+([0-9]+).*" "\\1" ${name} "${${FILE_VAR}}")
+        else()
+          set(${name} "")
+        endif()
+        if(__add_cache)
+          set(${name} ${${name}} CACHE INTERNAL "${name} parsed from ${FILENAME}" FORCE)
+        elseif(__parnet_scope)
+          set(${name} "${${name}}" PARENT_SCOPE)
+        endif()
+      else()
+        unset(${name} CACHE)
+      endif()
+    endif()
+  endforeach()
+endmacro()
+
+################################################################################################
+# Reads single version define from the header file and parses it
+# Usage:
+#   caffe_parse_header_single_define(<library_name> <file> <define_name>)
+function(caffe_parse_header_single_define LIBNAME HDR_PATH VARNAME)
+  set(${LIBNAME}_H "")
+  if(EXISTS "${HDR_PATH}")
+    file(STRINGS "${HDR_PATH}" ${LIBNAME}_H REGEX "^#define[ \t]+${VARNAME}[ \t]+\"[^\"]*\".*$" LIMIT_COUNT 1)
+  endif()
+
+  if(${LIBNAME}_H)
+    string(REGEX REPLACE "^.*[ \t]${VARNAME}[ \t]+\"([0-9]+).*$" "\\1" ${LIBNAME}_VERSION_MAJOR "${${LIBNAME}_H}")
+    string(REGEX REPLACE "^.*[ \t]${VARNAME}[ \t]+\"[0-9]+\\.([0-9]+).*$" "\\1" ${LIBNAME}_VERSION_MINOR  "${${LIBNAME}_H}")
+    string(REGEX REPLACE "^.*[ \t]${VARNAME}[ \t]+\"[0-9]+\\.[0-9]+\\.([0-9]+).*$" "\\1" ${LIBNAME}_VERSION_PATCH "${${LIBNAME}_H}")
+    set(${LIBNAME}_VERSION_MAJOR ${${LIBNAME}_VERSION_MAJOR} ${ARGN} PARENT_SCOPE)
+    set(${LIBNAME}_VERSION_MINOR ${${LIBNAME}_VERSION_MINOR} ${ARGN} PARENT_SCOPE)
+    set(${LIBNAME}_VERSION_PATCH ${${LIBNAME}_VERSION_PATCH} ${ARGN} PARENT_SCOPE)
+    set(${LIBNAME}_VERSION_STRING "${${LIBNAME}_VERSION_MAJOR}.${${LIBNAME}_VERSION_MINOR}.${${LIBNAME}_VERSION_PATCH}" PARENT_SCOPE)
+
+    # append a TWEAK version if it exists:
+    set(${LIBNAME}_VERSION_TWEAK "")
+    if("${${LIBNAME}_H}" MATCHES "^.*[ \t]${VARNAME}[ \t]+\"[0-9]+\\.[0-9]+\\.[0-9]+\\.([0-9]+).*$")
+      set(${LIBNAME}_VERSION_TWEAK "${CMAKE_MATCH_1}" ${ARGN} PARENT_SCOPE)
+    endif()
+    if(${LIBNAME}_VERSION_TWEAK)
+      set(${LIBNAME}_VERSION_STRING "${${LIBNAME}_VERSION_STRING}.${${LIBNAME}_VERSION_TWEAK}" ${ARGN} PARENT_SCOPE)
+    else()
+      set(${LIBNAME}_VERSION_STRING "${${LIBNAME}_VERSION_STRING}" ${ARGN} PARENT_SCOPE)
+    endif()
+  endif()
+endfunction()
+
+########################################################################################################
+# An option that the user can select. Can accept condition to control when option is available for user.
+# Usage:
+#   caffe_option(<option_variable> "doc string" <initial value or boolean expression> [IF <condition>])
+function(caffe_option variable description value)
+  set(__value ${value})
+  set(__condition "")
+  set(__varname "__value")
+  foreach(arg ${ARGN})
+    if(arg STREQUAL "IF" OR arg STREQUAL "if")
+      set(__varname "__condition")
+    else()
+      list(APPEND ${__varname} ${arg})
+    endif()
+  endforeach()
+  unset(__varname)
+  if("${__condition}" STREQUAL "")
+    set(__condition 2 GREATER 1)
+  endif()
+
+  if(${__condition})
+    if("${__value}" MATCHES ";")
+      if(${__value})
+        option(${variable} "${description}" ON)
+      else()
+        option(${variable} "${description}" OFF)
+      endif()
+    elseif(DEFINED ${__value})
+      if(${__value})
+        option(${variable} "${description}" ON)
+      else()
+        option(${variable} "${description}" OFF)
+      endif()
+    else()
+      option(${variable} "${description}" ${__value})
+    endif()
+  else()
+    unset(${variable} CACHE)
+  endif()
+endfunction()
+
+################################################################################################
+# Utility macro for comparing two lists. Used for CMake debugging purposes
+# Usage:
+#   caffe_compare_lists(<list_variable> <list2_variable> [description])
+function(caffe_compare_lists list1 list2 desc)
+  set(__list1 ${${list1}})
+  set(__list2 ${${list2}})
+  list(SORT __list1)
+  list(SORT __list2)
+  list(LENGTH __list1 __len1)
+  list(LENGTH __list2 __len2)
+
+  if(NOT ${__len1} EQUAL ${__len2})
+    message(FATAL_ERROR "Lists are not equal. ${__len1} != ${__len2}. ${desc}")
+  endif()
+
+  foreach(__i RANGE 1 ${__len1})
+    math(EXPR __index "${__i}- 1")
+    list(GET __list1 ${__index} __item1)
+    list(GET __list2 ${__index} __item2)
+    if(NOT ${__item1} STREQUAL ${__item2})
+      message(FATAL_ERROR "Lists are not equal. Differ at element ${__index}. ${desc}")
+    endif()
+  endforeach()
+endfunction()
+
+################################################################################################
+# Command for disabling warnings for different platforms (see below for gcc and VisualStudio)
+# Usage:
+#   caffe_warnings_disable(<CMAKE_[C|CXX]_FLAGS[_CONFIGURATION]> -Wshadow /wd4996 ..,)
+macro(caffe_warnings_disable)
+  set(_flag_vars "")
+  set(_msvc_warnings "")
+  set(_gxx_warnings "")
+
+  foreach(arg ${ARGN})
+    if(arg MATCHES "^CMAKE_")
+      list(APPEND _flag_vars ${arg})
+    elseif(arg MATCHES "^/wd")
+      list(APPEND _msvc_warnings ${arg})
+    elseif(arg MATCHES "^-W")
+      list(APPEND _gxx_warnings ${arg})
+    endif()
+  endforeach()
+
+  if(NOT _flag_vars)
+    set(_flag_vars CMAKE_C_FLAGS CMAKE_CXX_FLAGS)
+  endif()
+
+  if(MSVC AND _msvc_warnings)
+    foreach(var ${_flag_vars})
+      foreach(warning ${_msvc_warnings})
+        set(${var} "${${var}} ${warning}")
+      endforeach()
+    endforeach()
+  elseif((CMAKE_COMPILER_IS_GNUCXX OR CMAKE_COMPILER_IS_CLANGXX) AND _gxx_warnings)
+    foreach(var ${_flag_vars})
+      foreach(warning ${_gxx_warnings})
+        if(NOT warning MATCHES "^-Wno-")
+          string(REPLACE "${warning}" "" ${var} "${${var}}")
+          string(REPLACE "-W" "-Wno-" warning "${warning}")
+        endif()
+        set(${var} "${${var}} ${warning}")
+      endforeach()
+    endforeach()
+  endif()
+  caffe_clear_vars(_flag_vars _msvc_warnings _gxx_warnings)
+endmacro()
+
+################################################################################################
+# Helper function get current definitions
+# Usage:
+#   caffe_get_current_definitions(<definitions_variable>)
+function(caffe_get_current_definitions definitions_var)
+  get_property(current_definitions DIRECTORY PROPERTY COMPILE_DEFINITIONS)
+  set(result "")
+
+  foreach(d ${current_definitions})
+    list(APPEND result -D${d})
+  endforeach()
+
+  caffe_list_unique(result)
+  set(${definitions_var} ${result} PARENT_SCOPE)
+endfunction()
+
+################################################################################################
+# Helper function get current includes/definitions
+# Usage:
+#   caffe_get_current_cflags(<cflagslist_variable>)
+function(caffe_get_current_cflags cflags_var)
+  get_property(current_includes DIRECTORY PROPERTY INCLUDE_DIRECTORIES)
+  caffe_convert_absolute_paths(current_includes)
+  caffe_get_current_definitions(cflags)
+
+  foreach(i ${current_includes})
+    list(APPEND cflags "-I${i}")
+  endforeach()
+
+  caffe_list_unique(cflags)
+  set(${cflags_var} ${cflags} PARENT_SCOPE)
+endfunction()
+
+################################################################################################
+# Helper function to parse current linker libs into link directories, libflags and osx frameworks
+# Usage:
+#   caffe_parse_linker_libs(<Caffe_LINKER_LIBS_var> <directories_var> <libflags_var> <frameworks_var>)
+function(caffe_parse_linker_libs Caffe_LINKER_LIBS_variable folders_var flags_var frameworks_var)
+
+  set(__unspec "")
+  set(__debug "")
+  set(__optimized "")
+  set(__framework "")
+  set(__varname "__unspec")
+
+  # split libs into debug, optimized, unspecified and frameworks
+  foreach(list_elem ${${Caffe_LINKER_LIBS_variable}})
+    if(list_elem STREQUAL "debug")
+      set(__varname "__debug")
+    elseif(list_elem STREQUAL "optimized")
+      set(__varname "__optimized")
+    elseif(list_elem MATCHES "^-framework[ \t]+([^ \t].*)")
+      list(APPEND __framework -framework ${CMAKE_MATCH_1})
+    else()
+      list(APPEND ${__varname} ${list_elem})
+      set(__varname "__unspec")
+    endif()
+  endforeach()
+
+  # attach debug or optimized libs to unspecified according to current configuration
+  if(CMAKE_BUILD_TYPE MATCHES "Debug")
+    set(__libs ${__unspec} ${__debug})
+  else()
+    set(__libs ${__unspec} ${__optimized})
+  endif()
+
+  set(libflags "")
+  set(folders "")
+
+  # convert linker libraries list to link flags
+  foreach(lib ${__libs})
+    if(TARGET ${lib})
+      list(APPEND folders $<TARGET_LINKER_FILE_DIR:${lib}>)
+      list(APPEND libflags -l${lib})
+    elseif(lib MATCHES "^-l.*")
+      list(APPEND libflags ${lib})
+    elseif(IS_ABSOLUTE ${lib})
+      get_filename_component(name_we ${lib} NAME_WE)
+      get_filename_component(folder  ${lib} PATH)
+
+      string(REGEX MATCH "^lib(.*)" __match ${name_we})
+      list(APPEND libflags -l${CMAKE_MATCH_1})
+      list(APPEND folders    ${folder})
+    else()
+      message(FATAL_ERROR "Logic error. Need to update cmake script")
+    endif()
+  endforeach()
+
+  caffe_list_unique(libflags folders)
+
+  set(${folders_var} ${folders} PARENT_SCOPE)
+  set(${flags_var} ${libflags} PARENT_SCOPE)
+  set(${frameworks_var} ${__framework} PARENT_SCOPE)
+endfunction()
+
+################################################################################################
+# Helper function to detect Darwin version, i.e. 10.8, 10.9, 10.10, ....
+# Usage:
+#   caffe_detect_darwin_version(<version_variable>)
+function(caffe_detect_darwin_version output_var)
+  if(APPLE)
+    execute_process(COMMAND /usr/bin/sw_vers -productVersion
+                    RESULT_VARIABLE __sw_vers OUTPUT_VARIABLE __sw_vers_out
+                    ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE)
+
+    set(${output_var} ${__sw_vers_out} PARENT_SCOPE)
+  else()
+    set(${output_var} "" PARENT_SCOPE)
+  endif()
+endfunction()
diff --git a/cmake/lint.cmake b/cmake/lint.cmake
new file mode 100644
index 0000000..70a0065
--- /dev/null
+++ b/cmake/lint.cmake
@@ -0,0 +1,50 @@
+
+set(CMAKE_SOURCE_DIR ..)
+set(LINT_COMMAND ${CMAKE_SOURCE_DIR}/scripts/cpp_lint.py)
+set(SRC_FILE_EXTENSIONS h hpp hu c cpp cu cc)
+set(EXCLUDE_FILE_EXTENSTIONS pb.h pb.cc)
+set(LINT_DIRS include src/caffe examples tools python matlab)
+
+cmake_policy(SET CMP0009 NEW)  # suppress cmake warning
+
+# find all files of interest
+foreach(ext ${SRC_FILE_EXTENSIONS})
+    foreach(dir ${LINT_DIRS})
+        file(GLOB_RECURSE FOUND_FILES ${CMAKE_SOURCE_DIR}/${dir}/*.${ext})
+        set(LINT_SOURCES ${LINT_SOURCES} ${FOUND_FILES})
+    endforeach()
+endforeach()
+
+# find all files that should be excluded
+foreach(ext ${EXCLUDE_FILE_EXTENSTIONS})
+    file(GLOB_RECURSE FOUND_FILES ${CMAKE_SOURCE_DIR}/*.${ext})
+    set(EXCLUDED_FILES ${EXCLUDED_FILES} ${FOUND_FILES})
+endforeach()
+
+# exclude generated pb files
+list(REMOVE_ITEM LINT_SOURCES ${EXCLUDED_FILES})
+
+execute_process(
+    COMMAND ${LINT_COMMAND} ${LINT_SOURCES}
+    ERROR_VARIABLE LINT_OUTPUT
+    ERROR_STRIP_TRAILING_WHITESPACE
+)
+
+string(REPLACE "\n" ";" LINT_OUTPUT ${LINT_OUTPUT})
+
+list(GET LINT_OUTPUT -1 LINT_RESULT)
+list(REMOVE_AT LINT_OUTPUT -1)
+string(REPLACE " " ";" LINT_RESULT ${LINT_RESULT})
+list(GET LINT_RESULT -1 NUM_ERRORS)
+if(NUM_ERRORS GREATER 0)
+    foreach(msg ${LINT_OUTPUT})
+        string(FIND ${msg} "Done" result)
+        if(result LESS 0)
+            message(STATUS ${msg})
+        endif()
+    endforeach()
+    message(FATAL_ERROR "Lint found ${NUM_ERRORS} errors!")
+else()
+    message(STATUS "Lint did not find any errors!")
+endif()
+
diff --git a/data/cifar10/get_cifar10.sh b/data/cifar10/get_cifar10.sh
new file mode 100755
index 0000000..623c848
--- /dev/null
+++ b/data/cifar10/get_cifar10.sh
@@ -0,0 +1,19 @@
+#!/usr/bin/env sh
+# This scripts downloads the CIFAR10 (binary version) data and unzips it.
+
+DIR="$( cd "$(dirname "$0")" ; pwd -P )"
+cd $DIR
+
+echo "Downloading..."
+
+wget --no-check-certificate http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz
+
+echo "Unzipping..."
+
+tar -xf cifar-10-binary.tar.gz && rm -f cifar-10-binary.tar.gz
+mv cifar-10-batches-bin/* . && rm -rf cifar-10-batches-bin
+
+# Creation is split out because leveldb sometimes causes segfault
+# and needs to be re-created.
+
+echo "Done."
diff --git a/data/ilsvrc12/get_ilsvrc_aux.sh b/data/ilsvrc12/get_ilsvrc_aux.sh
new file mode 100755
index 0000000..b9b85d2
--- /dev/null
+++ b/data/ilsvrc12/get_ilsvrc_aux.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env sh
+#
+# N.B. This does not download the ilsvrcC12 data set, as it is gargantuan.
+# This script downloads the imagenet example auxiliary files including:
+# - the ilsvrc12 image mean, binaryproto
+# - synset ids and words
+# - Python pickle-format data of ImageNet graph structure and relative infogain
+# - the training splits with labels
+
+DIR="$( cd "$(dirname "$0")" ; pwd -P )"
+cd $DIR
+
+echo "Downloading..."
+
+wget http://dl.caffe.berkeleyvision.org/caffe_ilsvrc12.tar.gz
+
+echo "Unzipping..."
+
+tar -xf caffe_ilsvrc12.tar.gz && rm -f caffe_ilsvrc12.tar.gz
+
+echo "Done."
diff --git a/data/mnist/get_mnist.sh b/data/mnist/get_mnist.sh
new file mode 100755
index 0000000..8eb6aee
--- /dev/null
+++ b/data/mnist/get_mnist.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env sh
+# This scripts downloads the mnist data and unzips it.
+
+DIR="$( cd "$(dirname "$0")" ; pwd -P )"
+cd $DIR
+
+echo "Downloading..."
+
+wget --no-check-certificate http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz
+wget --no-check-certificate http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz
+wget --no-check-certificate http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz
+wget --no-check-certificate http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz
+
+echo "Unzipping..."
+
+gunzip train-images-idx3-ubyte.gz
+gunzip train-labels-idx1-ubyte.gz
+gunzip t10k-images-idx3-ubyte.gz
+gunzip t10k-labels-idx1-ubyte.gz
+
+# Creation is split out because leveldb sometimes causes segfault
+# and needs to be re-created.
+
+echo "Done."
diff --git a/docs/CMakeLists.txt b/docs/CMakeLists.txt
new file mode 100644
index 0000000..ae47e46
--- /dev/null
+++ b/docs/CMakeLists.txt
@@ -0,0 +1,106 @@
+# Building docs script
+# Requirements:
+#   sudo apt-get install doxygen texlive ruby-dev
+#   sudo gem install jekyll execjs therubyracer
+
+if(NOT BUILD_docs OR NOT DOXYGEN_FOUND)
+  return()
+endif()
+
+#################################################################################################
+# Gather docs from <root>/examples/**/readme.md
+function(gather_readmes_as_prebuild_cmd target gathered_dir root)
+  set(full_gathered_dir ${root}/${gathered_dir})
+
+  file(GLOB_RECURSE readmes ${root}/examples/readme.md ${root}/examples/README.md)
+  foreach(file ${readmes})
+    # Only use file if it is to be included in docs.
+    file(STRINGS ${file} file_lines REGEX "include_in_docs: true")
+
+    if(file_lines)
+      # Since everything is called readme.md, rename it by its dirname.
+      file(RELATIVE_PATH file ${root} ${file})
+      get_filename_component(folder ${file} PATH)
+      set(new_filename ${full_gathered_dir}/${folder}.md)
+
+      # folder value might be like <subfolder>/readme.md. That's why make directory.
+      get_filename_component(new_folder ${new_filename} PATH)
+      add_custom_command(TARGET ${target} PRE_BUILD
+        COMMAND ${CMAKE_COMMAND} -E make_directory ${new_folder}
+        COMMAND ln -sf ${root}/${file} ${new_filename}
+        COMMENT "Creating symlink ${new_filename} -> ${root}/${file}"
+        WORKING_DIRECTORY ${root} VERBATIM)
+    endif()
+  endforeach()
+endfunction()
+
+################################################################################################
+# Gather docs from examples/*.ipynb and add YAML front-matter.
+function(gather_notebooks_as_prebuild_cmd target gathered_dir root)
+  set(full_gathered_dir ${root}/${gathered_dir})
+
+  if(NOT PYTHON_EXECUTABLE)
+    message(STATUS "Python interpeter is not found. Can't include *.ipynb files in docs. Skipping...")
+    return()
+  endif()
+
+  file(GLOB_RECURSE notebooks ${root}/examples/*.ipynb)
+  foreach(file ${notebooks})
+    file(RELATIVE_PATH file ${root} ${file})
+    set(new_filename ${full_gathered_dir}/${file})
+
+    get_filename_component(new_folder ${new_filename} PATH)
+    add_custom_command(TARGET ${target} PRE_BUILD
+      COMMAND ${CMAKE_COMMAND} -E make_directory ${new_folder}
+      COMMAND ${PYTHON_EXECUTABLE} scripts/copy_notebook.py ${file} ${new_filename}
+      COMMENT "Copying notebook ${file} to ${new_filename}"
+      WORKING_DIRECTORY ${root} VERBATIM)
+  endforeach()
+
+  set(${outputs_var} ${outputs} PARENT_SCOPE)
+endfunction()
+
+################################################################################################
+########################## [ Non macro part ] ##################################################
+
+# Gathering is done at each 'make doc'
+file(REMOVE_RECURSE ${PROJECT_SOURCE_DIR}/docs/gathered)
+
+# Doxygen config file path
+set(DOXYGEN_config_file ${PROJECT_SOURCE_DIR}/.Doxyfile CACHE FILEPATH "Doxygen config file")
+
+# Adding docs target
+add_custom_target(docs COMMAND ${DOXYGEN_EXECUTABLE} ${DOXYGEN_config_file}
+                       WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
+                       COMMENT "Launching doxygen..." VERBATIM)
+
+# Gathering examples into docs subfolder
+gather_notebooks_as_prebuild_cmd(docs docs/gathered ${PROJECT_SOURCE_DIR})
+gather_readmes_as_prebuild_cmd(docs docs/gathered  ${PROJECT_SOURCE_DIR})
+
+# Auto detect output directory
+file(STRINGS ${DOXYGEN_config_file} config_line REGEX "OUTPUT_DIRECTORY[ \t]+=[^=].*")
+if(config_line)
+  string(REGEX MATCH "OUTPUT_DIRECTORY[ \t]+=([^=].*)" __ver_check "${config_line}")
+  string(STRIP ${CMAKE_MATCH_1} output_dir)
+  message(STATUS "Detected Doxygen OUTPUT_DIRECTORY: ${output_dir}")
+else()
+  set(output_dir ./doxygen/)
+  message(STATUS "Can't find OUTPUT_DIRECTORY in doxygen config file. Try to use default: ${output_dir}")
+endif()
+
+if(NOT IS_ABSOLUTE ${output_dir})
+  set(output_dir ${PROJECT_SOURCE_DIR}/${output_dir})
+  get_filename_component(output_dir ${output_dir} ABSOLUTE)
+endif()
+
+# creates symlink in docs subfolder to code documentation built by doxygen
+add_custom_command(TARGET docs POST_BUILD VERBATIM
+                   COMMAND ln -sfn "${output_dir}/html" doxygen
+                   WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/docs
+                   COMMENT "Creating symlink ${PROJECT_SOURCE_DIR}/docs/doxygen -> ${output_dir}/html")
+
+# for quick launch of jekyll
+add_custom_target(jekyll COMMAND jekyll serve -w -s . -d _site --port=4000
+                         WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/docs
+                         COMMENT "Launching jekyll..." VERBATIM)
diff --git a/docs/CNAME b/docs/CNAME
new file mode 100644
index 0000000..eee1ae2
--- /dev/null
+++ b/docs/CNAME
@@ -0,0 +1 @@
+caffe.berkeleyvision.org
diff --git a/docs/README.md b/docs/README.md
new file mode 100644
index 0000000..8f1781e
--- /dev/null
+++ b/docs/README.md
@@ -0,0 +1,5 @@
+# Caffe Documentation
+
+To generate the documentation, run `$CAFFE_ROOT/scripts/build_docs.sh`.
+
+To push your changes to the documentation to the gh-pages branch of your or the BVLC repo, run `$CAFFE_ROOT/scripts/deploy_docs.sh <repo_name>`.
diff --git a/docs/_config.yml b/docs/_config.yml
new file mode 100644
index 0000000..95aec12
--- /dev/null
+++ b/docs/_config.yml
@@ -0,0 +1,7 @@
+defaults:
+  -
+    scope:
+      path: "" # an empty string here means all files in the project
+    values:
+      layout: "default"
+
diff --git a/docs/_layouts/default.html b/docs/_layouts/default.html
new file mode 100644
index 0000000..b8efe60
--- /dev/null
+++ b/docs/_layouts/default.html
@@ -0,0 +1,62 @@
+<!doctype html>
+<html>
+  <head>
+    <!-- MathJax -->
+    <script type="text/javascript"
+      src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML">
+    </script>
+    <meta charset="utf-8">
+    <meta http-equiv="X-UA-Compatible" content="chrome=1">
+    <title>
+      Caffe {% if page contains 'title' %}| {{ page.title }}{% endif %}
+    </title>
+
+    <link rel="icon" type="image/png" href="/images/caffeine-icon.png">
+
+    <link rel="stylesheet" href="/stylesheets/reset.css">
+    <link rel="stylesheet" href="/stylesheets/styles.css">
+    <link rel="stylesheet" href="/stylesheets/pygment_trac.css">
+
+    <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
+    <!--[if lt IE 9]>
+    <script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
+    <![endif]-->
+  </head>
+  <body>
+  <script>
+    (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+    (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
+    m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+    })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+
+    ga('create', 'UA-46255508-1', 'daggerfs.com');
+    ga('send', 'pageview');
+  </script>
+    <div class="wrapper">
+      <header>
+        <h1 class="header"><a href="/">Caffe</a></h1>
+        <p class="header">
+          Deep learning framework by the <a class="header name" href="http://bvlc.eecs.berkeley.edu/">BVLC</a>
+        </p>
+        <p class="header">
+          Created by
+          <br>
+          <a class="header name" href="http://daggerfs.com/">Yangqing Jia</a>
+          <br>
+          Lead Developer
+          <br>
+          <a class="header name" href="http://imaginarynumber.net/">Evan Shelhamer</a>
+        <ul>
+          <li>
+            <a class="buttons github" href="https://github.com/BVLC/caffe">View On GitHub</a>
+          </li>
+        </ul>
+      </header>
+      <section>
+
+      {{ content }}
+
+      </section>
+  </div>
+  </body>
+</html>
diff --git a/docs/development.md b/docs/development.md
new file mode 100644
index 0000000..107c2c3
--- /dev/null
+++ b/docs/development.md
@@ -0,0 +1,120 @@
+---
+title: Developing and Contributing
+---
+# Development and Contributing
+
+Caffe is developed with active participation of the community.<br>
+The [BVLC](http://bvlc.eecs.berkeley.edu/) brewers welcome all contributions!
+
+The exact details of contributions are recorded by versioning and cited in our [acknowledgements](http://caffe.berkeleyvision.org/#acknowledgements).
+This method is impartial and always up-to-date.
+
+## License
+
+Caffe is licensed under the terms in [LICENSE](https://github.com/BVLC/caffe/blob/master/LICENSE). By contributing to the project, you agree to the license and copyright terms therein and release your contribution under these terms.
+
+## Copyright
+
+Caffe uses a shared copyright model: each contributor holds copyright over their contributions to Caffe. The project versioning records all such contribution and copyright details.
+
+If a contributor wants to further mark their specific copyright on a particular contribution, they should indicate their copyright solely in the commit message of the change when it is committed. Do not include copyright notices in files for this purpose.
+
+### Documentation
+
+This website, written with [Jekyll](http://jekyllrb.com/), acts as the official Caffe documentation -- simply run `scripts/build_docs.sh` and view the website at `http://0.0.0.0:4000`.
+
+We prefer tutorials and examples to be documented close to where they live, in `readme.md` files.
+The `build_docs.sh` script gathers all `examples/**/readme.md` and `examples/*.ipynb` files, and makes a table of contents.
+To be included in the docs, the readme files must be annotated with [YAML front-matter](http://jekyllrb.com/docs/frontmatter/), including the flag `include_in_docs: true`.
+Similarly for IPython notebooks: simply include `"include_in_docs": true` in the `"metadata"` JSON field.
+
+Other docs, such as installation guides, are written in the `docs` directory and manually linked to from the `index.md` page.
+
+We strive to provide lots of usage examples, and to document all code in docstrings.
+We absolutely appreciate any contribution to this effort!
+
+### Versioning
+
+The `master` branch receives all new development including community contributions.
+We try to keep it in a reliable state, but it is the bleeding edge, and things do get broken every now and then.
+BVLC maintainers will periodically make releases by marking stable checkpoints as tags and maintenance branches. [Past releases](https://github.com/BVLC/caffe/releases) are catalogued online.
+
+#### Issues & Pull Request Protocol
+
+Post [Issues](https://github.com/BVLC/caffe/issues) to propose features, report [bugs], and discuss framework code.
+Large-scale development work is guided by [milestones], which are sets of Issues selected for bundling as releases.
+
+Please note that since the core developers are largely researchers, we may work on a feature in isolation for some time before releasing it to the community, so as to claim honest academic contribution.
+We do release things as soon as a reasonable technical report may be written, and we still aim to inform the community of ongoing development through Github Issues.
+
+**When you are ready to develop a feature or fixing a bug, follow this protocol**:
+
+- Develop in [feature branches] with descriptive names. Branch off of the latest `master`.
+- Bring your work up-to-date by [rebasing] onto the latest `master` when done.
+(Groom your changes by [interactive rebase], if you'd like.)
+- [Pull request] your contribution to `BVLC/caffe`'s `master` branch for discussion and review.
+  - Make PRs *as soon as development begins*, to let discussion guide development.
+  - A PR is only ready for merge review when it is a fast-forward merge, and all code is documented, linted, and tested -- that means your PR must include tests!
+- When the PR satisfies the above properties, use comments to request maintainer review.
+
+The following is a poetic presentation of the protocol in code form.
+
+#### [Shelhamer's](https://github.com/shelhamer) “life of a branch in four acts”
+
+Make the `feature` branch off of the latest `bvlc/master`
+
+    git checkout master
+    git pull upstream master
+    git checkout -b feature
+    # do your work, make commits
+
+Prepare to merge by rebasing your branch on the latest `bvlc/master`
+
+    # make sure master is fresh
+    git checkout master
+    git pull upstream master
+    # rebase your branch on the tip of master
+    git checkout feature
+    git rebase master
+
+Push your branch to pull request it into `BVLC/caffe:master`
+
+    git push origin feature
+    # ...make pull request to master...
+
+Now make a pull request! You can do this from the command line (`git pull-request -b master`) if you install [hub](https://github.com/github/hub). Hub has many other magical uses.
+
+The pull request of `feature` into `master` will be a clean merge. Applause.
+
+[bugs]: https://github.com/BVLC/caffe/issues?labels=bug&page=1&state=open
+[milestones]: https://github.com/BVLC/caffe/issues?milestone=1
+[Pull request]: https://help.github.com/articles/using-pull-requests
+[interactive rebase]: https://help.github.com/articles/interactive-rebase
+[rebasing]: http://git-scm.com/book/en/Git-Branching-Rebasing
+[feature branches]: https://www.atlassian.com/git/workflows#!workflow-feature-branch
+
+**Historical note**: Caffe once relied on a two branch `master` and `dev` workflow.
+PRs from this time are still open but these will be merged into `master` or closed.
+
+### Testing
+
+Run `make runtest` to check the project tests. New code requires new tests. Pull requests that fail tests will not be accepted.
+
+The `gtest` framework we use provides many additional options, which you can access by running the test binaries directly. One of the more useful options is `--gtest_filter`, which allows you to filter tests by name:
+
+    # run all tests with CPU in the name
+    build/test/test_all.testbin --gtest_filter='*CPU*'
+
+    # run all tests without GPU in the name (note the leading minus sign)
+    build/test/test_all.testbin --gtest_filter=-'*GPU*'
+
+To get a list of all options `googletest` provides, simply pass the `--help` flag:
+
+    build/test/test_all.testbin --help
+
+### Style
+
+- **Run `make lint` to check C++ code.**
+- Wrap lines at 80 chars.
+- Follow [Google C++ style](http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml) and [Google python style](http://google-styleguide.googlecode.com/svn/trunk/pyguide.html) + [PEP 8](http://legacy.python.org/dev/peps/pep-0008/).
+- Remember that “a foolish consistency is the hobgoblin of little minds,” so use your best judgement to write the clearest code for your particular case.
diff --git a/docs/images/GitHub-Mark-64px.png b/docs/images/GitHub-Mark-64px.png
new file mode 100644
index 0000000..182a1a3
Binary files /dev/null and b/docs/images/GitHub-Mark-64px.png differ
diff --git a/docs/images/caffeine-icon.png b/docs/images/caffeine-icon.png
new file mode 100644
index 0000000..88b4a00
Binary files /dev/null and b/docs/images/caffeine-icon.png differ
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 0000000..932b3b5
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,106 @@
+---
+title: Deep Learning Framework
+---
+
+# Caffe
+
+Caffe is a deep learning framework made with expression, speed, and modularity in mind.
+It is developed by the Berkeley Vision and Learning Center ([BVLC](http://bvlc.eecs.berkeley.edu)) and by community contributors.
+[Yangqing Jia](http://daggerfs.com) created the project during his PhD at UC Berkeley.
+Caffe is released under the [BSD 2-Clause license](https://github.com/BVLC/caffe/blob/master/LICENSE).
+
+Check out our web image classification [demo](http://demo.caffe.berkeleyvision.org)!
+
+## Why Caffe?
+
+**Expressive architecture** encourages application and innovation.
+Models and optimization are defined by configuration without hard-coding.
+Switch between CPU and GPU by setting a single flag to train on a GPU machine then deploy to commodity clusters or mobile devices.
+
+**Extensible code** fosters active development.
+In Caffe's first year, it has been forked by over 1,000 developers and had many significant changes contributed back.
+Thanks to these contributors the framework tracks the state-of-the-art in both code and models.
+
+**Speed** makes Caffe perfect for research experiments and industry deployment.
+Caffe can process **over 60M images per day** with a single NVIDIA K40 GPU\*.
+That's 1 ms/image for inference and 4 ms/image for learning.
+We believe that Caffe is the fastest convnet implementation available.
+
+**Community**: Caffe already powers academic research projects, startup prototypes, and even large-scale industrial applications in vision, speech, and multimedia.
+Join our community of brewers on the [caffe-users group](https://groups.google.com/forum/#!forum/caffe-users) and [Github](https://github.com/BVLC/caffe/).
+
+<p class="footnote" markdown="1">
+\* With the ILSVRC2012-winning [SuperVision](http://www.image-net.org/challenges/LSVRC/2012/supervision.pdf) model and caching IO.
+Consult performance [details](/performance_hardware.html).
+</p>
+
+## Documentation
+
+- [DIY Deep Learning for Vision with Caffe](https://docs.google.com/presentation/d/1UeKXVgRvvxg9OUdh_UiC5G71UMscNPlvArsWER41PsU/edit#slide=id.p)<br>
+Tutorial presentation.
+- [Tutorial Documentation](/tutorial)<br>
+Practical guide and framework reference.
+- [arXiv / ACM MM '14 paper](http://arxiv.org/abs/1408.5093)<br>
+A 4-page report for the ACM Multimedia Open Source competition (arXiv:1408.5093v1).
+- [Installation instructions](/installation.html)<br>
+Tested on Ubuntu, Red Hat, OS X.
+* [Model Zoo](/model_zoo.html)<br>
+BVLC suggests a standard distribution format for Caffe models, and provides trained models.
+* [Developing & Contributing](/development.html)<br>
+Guidelines for development and contributing to Caffe.
+* [API Documentation](/doxygen/annotated.html)<br>
+Developer documentation automagically generated from code comments.
+
+### Examples
+
+{% assign examples = site.pages | where:'category','example' | sort: 'priority' %}
+{% for page in examples %}
+- <div><a href="{{page.url}}">{{page.title}}</a><br>{{page.description}}</div>
+{% endfor %}
+
+### Notebook Examples
+
+{% assign notebooks = site.pages | where:'category','notebook' | sort: 'priority' %}
+{% for page in notebooks %}
+- <div><a href="http://nbviewer.ipython.org/github/BVLC/caffe/blob/master/{{page.original_path}}">{{page.title}}</a><br>{{page.description}}</div>
+{% endfor %}
+
+## Citing Caffe
+
+Please cite Caffe in your publications if it helps your research:
+
+    @article{jia2014caffe,
+      Author = {Jia, Yangqing and Shelhamer, Evan and Donahue, Jeff and Karayev, Sergey and Long, Jonathan and Girshick, Ross and Guadarrama, Sergio and Darrell, Trevor},
+      Journal = {arXiv preprint arXiv:1408.5093},
+      Title = {Caffe: Convolutional Architecture for Fast Feature Embedding},
+      Year = {2014}
+    }
+
+If you do publish a paper where Caffe helped your research, we encourage you to update the [publications wiki](https://github.com/BVLC/caffe/wiki/Publications).
+Citations are also tracked automatically by [Google Scholar](http://scholar.google.com/scholar?oi=bibs&hl=en&cites=17333247995453974016).
+
+## Contacting Us
+
+Join the [caffe-users group](https://groups.google.com/forum/#!forum/caffe-users) to ask questions and discuss methods and models. This is where we talk about usage, installation, and applications.
+
+Framework development discussions and thorough bug reports are collected on [Issues](https://github.com/BVLC/caffe/issues).
+
+Contact [caffe-dev](mailto:caffe-dev at googlegroups.com) if you have a confidential proposal for the framework *and the ability to act on it*.
+Requests for features, explanations, or personal help will be ignored; post to [caffe-users](https://groups.google.com/forum/#!forum/caffe-users) instead.
+
+The core Caffe developers offer [consulting services](mailto:caffe-coldpress at googlegroups.com) for appropriate projects.
+
+## Acknowledgements
+
+The BVLC Caffe developers would like to thank NVIDIA for GPU donation, A9 and Amazon Web Services for a research grant in support of Caffe development and reproducible research in deep learning, and BVLC PI [Trevor Darrell](http://www.eecs.berkeley.edu/~trevor/) for guidance.
+
+The BVLC members who have contributed to Caffe are (alphabetical by first name):
+[Eric Tzeng](https://github.com/erictzeng), [Evan Shelhamer](http://imaginarynumber.net/), [Jeff Donahue](http://jeffdonahue.com/), [Jon Long](https://github.com/longjon), [Ross Girshick](http://www.cs.berkeley.edu/~rbg/), [Sergey Karayev](http://sergeykarayev.com/), [Sergio Guadarrama](http://www.eecs.berkeley.edu/~sguada/), and [Yangqing Jia](http://daggerfs.com/).
+
+The open-source community plays an important and growing role in Caffe's development.
+Check out the Github [project pulse](https://github.com/BVLC/caffe/pulse) for recent activity and the [contributors](https://github.com/BVLC/caffe/graphs/contributors) for the full list.
+
+We sincerely appreciate your interest and contributions!
+If you'd like to contribute, please read the [developing & contributing](development.html) guide.
+
+Yangqing would like to give a personal thanks to the NVIDIA Academic program for providing GPUs, [Oriol Vinyals](http://www1.icsi.berkeley.edu/~vinyals/) for discussions along the journey, and BVLC PI [Trevor Darrell](http://www.eecs.berkeley.edu/~trevor/) for advice.
diff --git a/docs/install_apt.md b/docs/install_apt.md
new file mode 100644
index 0000000..0fa205a
--- /dev/null
+++ b/docs/install_apt.md
@@ -0,0 +1,50 @@
+---
+title: Installation: Ubuntu
+---
+
+# Ubuntu Installation
+
+**General dependencies**
+
+    sudo apt-get install libprotobuf-dev libleveldb-dev libsnappy-dev libopencv-dev libhdf5-serial-dev
+    sudo apt-get install --no-install-recommends libboost-all-dev
+
+**CUDA**: Install via the NVIDIA package instead of `apt-get` to be certain of the library and driver versions.
+Install the library and latest driver separately; the driver bundled with the library is usually out-of-date.
+This can be skipped for CPU-only installation.
+
+**BLAS**: install ATLAS by `sudo apt-get install libatlas-base-dev` or install OpenBLAS or MKL for better CPU performance.
+
+**Python** (optional): if you use the default Python you will need to `sudo apt-get install` the `python-dev` package to have the Python headers for building the pycaffe interface.
+
+**Remaining dependencies, 14.04**
+
+Everything is packaged in 14.04.
+
+    sudo apt-get install libgflags-dev libgoogle-glog-dev liblmdb-dev protobuf-compiler
+
+**Remaining dependencies, 12.04**
+
+These dependencies need manual installation in 12.04.
+
+    # glog
+    wget https://google-glog.googlecode.com/files/glog-0.3.3.tar.gz
+    tar zxvf glog-0.3.3.tar.gz
+    cd glog-0.3.3
+    ./configure
+    make && make install
+    # gflags
+    wget https://github.com/schuhschuh/gflags/archive/master.zip
+    unzip master.zip
+    cd gflags-master
+    mkdir build && cd build
+    export CXXFLAGS="-fPIC" && cmake .. && make VERBOSE=1
+    make && make install
+    # lmdb
+    git clone https://gitorious.org/mdb/mdb.git
+    cd mdb/libraries/liblmdb
+    make && make install
+
+Note that glog does not compile with the most recent gflags version (2.1), so before that is resolved you will need to build with glog first.
+
+Continue with [compilation](installation.html#compilation).
diff --git a/docs/install_osx.md b/docs/install_osx.md
new file mode 100644
index 0000000..6405d8a
--- /dev/null
+++ b/docs/install_osx.md
@@ -0,0 +1,128 @@
+---
+title: Installation: OS X
+---
+
+# OS X Installation
+
+We highly recommend using the [Homebrew](http://brew.sh/) package manager.
+Ideally you could start from a clean `/usr/local` to avoid conflicts.
+In the following, we assume that you're using Anaconda Python and Homebrew.
+
+**CUDA**: Install via the NVIDIA package that includes both CUDA and the bundled driver. **CUDA 7 is strongly suggested.** Older CUDA require `libstdc++` while clang++ is the default compiler and `libc++` the default standard library on OS X 10.9+. This disagreement makes it necessary to change the compilation settings for each of the dependencies. This is prone to error.
+
+**Library Path**: We find that everything compiles successfully if `$LD_LIBRARY_PATH` is not set at all, and `$DYLD_FALLBACK_LIBRARY_PATH` is set to provide CUDA, Python, and other relevant libraries (e.g. `/usr/local/cuda/lib:$HOME/anaconda/lib:/usr/local/lib:/usr/lib`).
+In other `ENV` settings, things may not work as expected.
+
+**General dependencies**
+
+    brew install -vd snappy leveldb gflags glog szip lmdb
+    # need the homebrew science source for OpenCV and hdf5
+    brew tap homebrew/science
+    brew install hdf5 opencv
+
+If using Anaconda Python, a modification to the OpenCV formula might be needed
+Do `brew edit opencv` and change the lines that look like the two lines below to exactly the two lines below.
+
+      -DPYTHON_LIBRARY=#{py_prefix}/lib/libpython2.7.dylib
+      -DPYTHON_INCLUDE_DIR=#{py_prefix}/include/python2.7
+
+If using Anaconda Python, HDF5 is bundled and the `hdf5` formula can be skipped.
+
+**Remaining dependencies, with / without Python**
+
+    # with Python pycaffe needs dependencies built from source
+    brew install --build-from-source --with-python -vd protobuf
+    brew install --build-from-source -vd boost boost-python
+    # without Python the usual installation suffices
+    brew install protobuf boost
+
+**BLAS**: already installed as the [Accelerate / vecLib Framework](https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man7/Accelerate.7.html). OpenBLAS and MKL are alternatives for faster CPU computation.
+
+**Python** (optional): Anaconda is the preferred Python.
+If you decide against it, please use Homebrew.
+Check that Caffe and dependencies are linking against the same, desired Python.
+
+Continue with [compilation](installation.html#compilation).
+
+## libstdc++ installation
+
+This route is not for the faint of heart.
+For OS X 10.10 and 10.9 you should install CUDA 7 and follow the instructions above.
+If that is not an option, take a deep breath and carry on.
+
+In OS X 10.9+, clang++ is the default C++ compiler and uses `libc++` as the standard library.
+However, NVIDIA CUDA (even version 6.0) currently links only with `libstdc++`.
+This makes it necessary to change the compilation settings for each of the dependencies.
+
+We do this by modifying the Homebrew formulae before installing any packages.
+Make sure that Homebrew doesn't install any software dependencies in the background; all packages must be linked to `libstdc++`.
+
+The prerequisite Homebrew formulae are
+
+    boost snappy leveldb protobuf gflags glog szip lmdb homebrew/science/opencv
+
+For each of these formulas, `brew edit FORMULA`, and add the ENV definitions as shown:
+
+      def install
+          # ADD THE FOLLOWING:
+          ENV.append "CXXFLAGS", "-stdlib=libstdc++"
+          ENV.append "CFLAGS", "-stdlib=libstdc++"
+          ENV.append "LDFLAGS", "-stdlib=libstdc++ -lstdc++"
+          # The following is necessary because libtool likes to strip LDFLAGS:
+          ENV["CXX"] = "/usr/bin/clang++ -stdlib=libstdc++"
+          ...
+
+To edit the formulae in turn, run
+
+    for x in snappy leveldb protobuf gflags glog szip boost boost-python lmdb homebrew/science/opencv; do brew edit $x; done
+
+After this, run
+
+    for x in snappy leveldb gflags glog szip lmdb homebrew/science/opencv; do brew uninstall $x; brew install --build-from-source -vd $x; done
+    brew uninstall protobuf; brew install --build-from-source --with-python -vd protobuf
+    brew install --build-from-source -vd boost boost-python
+
+If this is not done exactly right then linking errors will trouble you.
+
+**Homebrew versioning** that Homebrew maintains itself as a separate git repository and making the above `brew edit FORMULA` changes will change files in your local copy of homebrew's master branch. By default, this will prevent you from updating Homebrew using `brew update`, as you will get an error message like the following:
+
+    $ brew update
+    error: Your local changes to the following files would be overwritten by merge:
+      Library/Formula/lmdb.rb
+    Please, commit your changes or stash them before you can merge.
+    Aborting
+    Error: Failure while executing: git pull -q origin refs/heads/master:refs/remotes/origin/master
+
+One solution is to commit your changes to a separate Homebrew branch, run `brew update`, and rebase your changes onto the updated master. You'll have to do this both for the main Homebrew repository in `/usr/local/` and the Homebrew science repository that contains OpenCV in  `/usr/local/Library/Taps/homebrew/homebrew-science`, as follows:
+
+    cd /usr/local
+    git checkout -b caffe
+    git add .
+    git commit -m "Update Caffe dependencies to use libstdc++"
+    cd /usr/local/Library/Taps/homebrew/homebrew-science
+    git checkout -b caffe
+    git add .
+    git commit -m "Update Caffe dependencies"
+
+Then, whenever you want to update homebrew, switch back to the master branches, do the update, rebase the caffe branches onto master and fix any conflicts:
+
+    # Switch batch to homebrew master branches
+    cd /usr/local
+    git checkout master
+    cd /usr/local/Library/Taps/homebrew/homebrew-science
+    git checkout master
+
+    # Update homebrew; hopefully this works without errors!
+    brew update
+
+    # Switch back to the caffe branches with the formulae that you modified earlier
+    cd /usr/local
+    git rebase master caffe
+    # Fix any merge conflicts and commit to caffe branch
+    cd /usr/local/Library/Taps/homebrew/homebrew-science
+    git rebase master caffe
+    # Fix any merge conflicts and commit to caffe branch
+
+    # Done!
+
+At this point, you should be running the latest Homebrew packages and your Caffe-related modifications will remain in place.
diff --git a/docs/install_yum.md b/docs/install_yum.md
new file mode 100644
index 0000000..478e7d9
--- /dev/null
+++ b/docs/install_yum.md
@@ -0,0 +1,45 @@
+---
+title: Installation: RHEL / Fedora / CentOS
+---
+
+# RHEL / Fedora / CentOS Installation
+
+**General dependencies**
+
+    sudo yum install protobuf-devel leveldb-devel snappy-devel opencv-devel boost-devel hdf5-devel
+
+**Remaining dependencies, recent OS**
+
+    sudo yum install gflags-devel glog-devel lmdb-devel
+
+**Remaining dependencies, if not found**
+
+    # glog
+    wget https://google-glog.googlecode.com/files/glog-0.3.3.tar.gz
+    tar zxvf glog-0.3.3.tar.gz
+    cd glog-0.3.3
+    ./configure
+    make && make install
+    # gflags
+    wget https://github.com/schuhschuh/gflags/archive/master.zip
+    unzip master.zip
+    cd gflags-master
+    mkdir build && cd build
+    export CXXFLAGS="-fPIC" && cmake .. && make VERBOSE=1
+    make && make install
+    # lmdb
+    git clone git://gitorious.org/mdb/mdb.git
+    cd mdb/libraries/liblmdb
+    make && make install
+
+Note that glog does not compile with the most recent gflags version (2.1), so before that is resolved you will need to build with glog first.
+
+**CUDA**: Install via the NVIDIA package instead of `yum` to be certain of the library and driver versions.
+Install the library and latest driver separately; the driver bundled with the library is usually out-of-date.
+    + CentOS/RHEL/Fedora:
+
+**BLAS**: install ATLAS by `sudo yum install atlas-devel` or install OpenBLAS or MKL for better CPU performance. For the Makefile build, uncomment and set `BLAS_LIB` accordingly as ATLAS is usually installed under `/usr/lib[64]/atlas`).
+
+**Python** (optional): if you use the default Python you will need to `sudo yum install` the `python-devel` package to have the Python headers for building the pycaffe wrapper.
+
+Continue with [compilation](installation.html#compilation).
diff --git a/docs/installation.md b/docs/installation.md
new file mode 100644
index 0000000..144e6a3
--- /dev/null
+++ b/docs/installation.md
@@ -0,0 +1,127 @@
+---
+title: Installation
+---
+
+# Installation
+
+Prior to installing, have a glance through this guide and take note of the details for your platform.
+We install and run Caffe on Ubuntu 14.04 and 12.04, OS X 10.10 / 10.9 / 10.8, and AWS.
+The official Makefile and `Makefile.config` build are complemented by an automatic CMake build from the community.
+
+- [Prerequisites](#prerequisites)
+- [Compilation](#compilation)
+- [Hardware](#hardware)
+- Platforms: [Ubuntu guide](install_apt.html), [OS X guide](install_osx.html), and [RHEL / CentOS / Fedora guide](install_yum.html)
+
+When updating Caffe, it's best to `make clean` before re-compiling.
+
+## Prerequisites
+
+Caffe has several dependencies.
+
+* [CUDA](https://developer.nvidia.com/cuda-zone) is required for GPU mode.
+    * library version 7.0 and the latest driver version are recommended, but 6.* is fine too
+    * 5.5, and 5.0 are compatible but considered legacy
+* [BLAS](http://en.wikipedia.org/wiki/Basic_Linear_Algebra_Subprograms) via ATLAS, MKL, or OpenBLAS.
+* [Boost](http://www.boost.org/) >= 1.55
+* [OpenCV](http://opencv.org/) >= 2.4 including 3.0
+* `protobuf`, `glog`, `gflags`
+* IO libraries `hdf5`, `leveldb`, `snappy`, `lmdb`
+
+Pycaffe and Matcaffe interfaces have their own natural needs.
+
+* For Python Caffe:  `Python 2.7` or `Python 3.3+`, `numpy (>= 1.7)`, boost-provided `boost.python`
+* For MATLAB Caffe: MATLAB with the `mex` compiler.
+
+**cuDNN Caffe**: for fastest operation Caffe is accelerated by drop-in integration of [NVIDIA cuDNN](https://developer.nvidia.com/cudnn). To speed up your Caffe models, install cuDNN then uncomment the `USE_CUDNN := 1` flag in `Makefile.config` when installing Caffe. Acceleration is automatic. For now cuDNN v1 is integrated but see [PR #1731](https://github.com/BVLC/caffe/pull/1731) for v2.
+
+**CPU-only Caffe**: for cold-brewed CPU-only Caffe uncomment the `CPU_ONLY := 1` flag in `Makefile.config` to configure and build Caffe without CUDA. This is helpful for cloud or cluster deployment.
+
+### CUDA and BLAS
+
+Caffe requires the CUDA `nvcc` compiler to compile its GPU code and CUDA driver for GPU operation.
+To install CUDA, go to the [NVIDIA CUDA website](https://developer.nvidia.com/cuda-downloads) and follow installation instructions there. Install the library and the latest standalone driver separately; the driver bundled with the library is usually out-of-date. **Warning!** The 331.* CUDA driver series has a critical performance issue: do not use it.
+
+For best performance, Caffe can be accelerated by [NVIDIA cuDNN](https://developer.nvidia.com/cudnn). Register for free at the cuDNN site, install it, then continue with these installation instructions. To compile with cuDNN set the `USE_CUDNN := 1` flag set in your `Makefile.config`.
+
+Caffe requires BLAS as the backend of its matrix and vector computations.
+There are several implementations of this library. The choice is yours:
+
+* [ATLAS](http://math-atlas.sourceforge.net/): free, open source, and so the default for Caffe.
+* [Intel MKL](http://software.intel.com/en-us/intel-mkl): commercial and optimized for Intel CPUs, with a free trial and [student](http://software.intel.com/en-us/intel-education-offerings) licenses.
+    1. Install MKL.
+    2. Set `BLAS := mkl` in `Makefile.config`
+* [OpenBLAS](http://www.openblas.net/): free and open source; this optimized and parallel BLAS could require more effort to install, although it might offer a speedup.
+    1. Install OpenBLAS
+    2. Set `BLAS := open` in `Makefile.config`
+
+### Python and/or MATLAB Caffe (optional)
+
+#### Python
+
+The main requirements are `numpy` and `boost.python` (provided by boost). `pandas` is useful too and needed for some examples.
+
+You can install the dependencies with
+
+    for req in $(cat requirements.txt); do pip install $req; done
+
+but we suggest first installing the [Anaconda](https://store.continuum.io/cshop/anaconda/) Python distribution, which provides most of the necessary packages, as well as the `hdf5` library dependency.
+
+To import the `caffe` Python module after completing the installation, add the module directory to your `$PYTHONPATH` by `export PYTHONPATH=/path/to/caffe/python:$PYTHONPATH` or the like. You should not import the module in the `caffe/python/caffe` directory!
+
+*Caffe's Python interface works with Python 2.7. Python 3.3+ should work out of the box without protobuf support. For protobuf support please install protobuf 3.0 alpha (https://developers.google.com/protocol-buffers/). Earlier Pythons are your own adventure.*
+
+#### MATLAB
+
+Install MATLAB, and make sure that its `mex` is in your `$PATH`.
+
+*Caffe's MATLAB interface works with versions 2014a/b, 2013a/b, and 2012b.*
+
+#### Windows
+
+There is an unofficial Windows port of Caffe at [niuzhiheng/caffe:windows](https://github.com/niuzhiheng/caffe). Thanks [@niuzhiheng](https://github.com/niuzhiheng)!
+
+## Compilation
+
+Now that you have the prerequisites, edit your `Makefile.config` to change the paths for your setup The defaults should work, but uncomment the relevant lines if using Anaconda Python.
+
+    cp Makefile.config.example Makefile.config
+    # Adjust Makefile.config (for example, if using Anaconda Python)
+    make all
+    make test
+    make runtest
+
+- For cuDNN acceleration, you should uncomment the `USE_CUDNN := 1` switch in `Makefile.config`.
+- For CPU-only Caffe, uncomment `CPU_ONLY := 1` in `Makefile.config`.
+
+To compile the Python and MATLAB wrappers do `make pycaffe` and `make matcaffe` respectively.
+Be sure to set your MATLAB and Python paths in `Makefile.config` first!
+
+**Distribution**: run `make distribute` to create a `distribute` directory with all the Caffe headers, compiled libraries, binaries, etc. needed for distribution to other machines.
+
+**Speed**: for a faster build, compile in parallel by doing `make all -j8` where 8 is the number of parallel threads for compilation (a good choice for the number of threads is the number of cores in your machine).
+
+Now that you have installed Caffe, check out the [MNIST tutorial](gathered/examples/mnist.html) and the [reference ImageNet model tutorial](gathered/examples/imagenet.html).
+
+### CMake Compilation
+
+In lieu of manually editing `Makefile.config` to configure the build, Caffe offers an unofficial CMake build thanks to @Nerei, @akosiorek, and other members of the community. It requires CMake version >= 2.8.7.
+The basic steps are as follows:
+
+    mkdir build
+    cd build
+    cmake ..
+    make all
+    make runtest
+
+See [PR #1667](https://github.com/BVLC/caffe/pull/1667) for options and details.
+
+## Hardware
+
+**Laboratory Tested Hardware**: Berkeley Vision runs Caffe with K40s, K20s, and Titans including models at ImageNet/ILSVRC scale. We also run on GTX series cards (980s and 770s) and GPU-equipped MacBook Pros. We have not encountered any trouble in-house with devices with CUDA capability >= 3.0. All reported hardware issues thus-far have been due to GPU configuration, overheating, and the like.
+
+**CUDA compute capability**: devices with compute capability <= 2.0 may have to reduce CUDA thread numbers and batch sizes due to hardware constraints. Your mileage may vary.
+
+Once installed, check your times against our [reference performance numbers](performance_hardware.html) to make sure everything is configured properly.
+
+Ask hardware questions on the [caffe-users group](https://groups.google.com/forum/#!forum/caffe-users).
diff --git a/docs/model_zoo.md b/docs/model_zoo.md
new file mode 100644
index 0000000..06dc0a4
--- /dev/null
+++ b/docs/model_zoo.md
@@ -0,0 +1,70 @@
+---
+title: Model Zoo
+---
+# Caffe Model Zoo
+
+Lots of researchers and engineers have made Caffe models for different tasks with all kinds of architectures and data.
+These models are learned and applied for problems ranging from simple regression, to large-scale visual classification, to Siamese networks for image similarity, to speech and robotics applications.
+
+To help share these models, we introduce the model zoo framework:
+
+- A standard format for packaging Caffe model info.
+- Tools to upload/download model info to/from Github Gists, and to download trained `.caffemodel` binaries.
+- A central wiki page for sharing model info Gists.
+
+## Where to get trained models
+
+First of all, we bundle BVLC-trained models for unrestricted, out of the box use.
+<br>
+See the [BVLC model license](#bvlc-model-license) for details.
+Each one of these can be downloaded by running `scripts/download_model_binary.py <dirname>` where `<dirname>` is specified below:
+
+- **BVLC Reference CaffeNet** in `models/bvlc_reference_caffenet`: AlexNet trained on ILSVRC 2012, with a minor variation from the version as described in [ImageNet classification with deep convolutional neural networks](http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks) by Krizhevsky et al. in NIPS 2012. (Trained by Jeff Donahue @jeffdonahue)
+- **BVLC AlexNet** in `models/bvlc_alexnet`: AlexNet trained on ILSVRC 2012, almost exactly as described in [ImageNet classification with deep convolutional neural networks](http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks) by Krizhevsky et al. in NIPS 2012. (Trained by Evan Shelhamer @shelhamer)
+- **BVLC Reference R-CNN ILSVRC-2013** in `models/bvlc_reference_rcnn_ilsvrc13`: pure Caffe implementation of [R-CNN](https://github.com/rbgirshick/rcnn) as described by Girshick et al. in CVPR 2014. (Trained by Ross Girshick @rbgirshick)
+- **BVLC GoogLeNet** in `models/bvlc_googlenet`: GoogLeNet trained on ILSVRC 2012, almost exactly as described in [Going Deeper with Convolutions](http://arxiv.org/abs/1409.4842) by Szegedy et al. in ILSVRC 2014. (Trained by Sergio Guadarrama @sguada)
+
+**Community models** made by Caffe users are posted to a publicly editable [wiki page](https://github.com/BVLC/caffe/wiki/Model-Zoo).
+These models are subject to conditions of their respective authors such as citation and license.
+Thank you for sharing your models!
+
+## Model info format
+
+A caffe model is distributed as a directory containing:
+
+- Solver/model prototxt(s)
+- `readme.md` containing
+    - YAML frontmatter
+        - Caffe version used to train this model (tagged release or commit hash).
+        - [optional] file URL and SHA1 of the trained `.caffemodel`.
+        - [optional] github gist id.
+    - Information about what data the model was trained on, modeling choices, etc.
+    - License information.
+- [optional] Other helpful scripts.
+
+### Hosting model info
+
+Github Gist is a good format for model info distribution because it can contain multiple files, is versionable, and has in-browser syntax highlighting and markdown rendering.
+
+`scripts/upload_model_to_gist.sh <dirname>` uploads non-binary files in the model directory as a Github Gist and prints the Gist ID. If `gist_id` is already part of the `<dirname>/readme.md` frontmatter, then updates existing Gist.
+
+Try doing `scripts/upload_model_to_gist.sh models/bvlc_alexnet` to test the uploading (don't forget to delete the uploaded gist afterward).
+
+Downloading model info is done just as easily with `scripts/download_model_from_gist.sh <gist_id> <dirname>`.
+
+### Hosting trained models
+
+It is up to the user where to host the `.caffemodel` file.
+We host our BVLC-provided models on our own server.
+Dropbox also works fine (tip: make sure that `?dl=1` is appended to the end of the URL).
+
+`scripts/download_model_binary.py <dirname>` downloads the `.caffemodel` from the URL specified in the `<dirname>/readme.md` frontmatter and confirms SHA1.
+
+## BVLC model license
+
+The Caffe models bundled by the BVLC are released for unrestricted use.
+
+These models are trained on data from the [ImageNet project](http://www.image-net.org/) and training data includes internet photos that may be subject to copyright.
+
+Our present understanding as researchers is that there is no restriction placed on the open release of these learned model weights, since none of the original images are distributed in whole or in part.
+To the extent that the interpretation arises that weights are derivative works of the original copyright holder and they assert such a copyright, UC Berkeley makes no representations as to what use is allowed other than to consider our present release in the spirit of fair use in the academic mission of the university to disseminate knowledge and tools as broadly as possible without restriction.
diff --git a/docs/performance_hardware.md b/docs/performance_hardware.md
new file mode 100644
index 0000000..cdd4b36
--- /dev/null
+++ b/docs/performance_hardware.md
@@ -0,0 +1,73 @@
+---
+title: Performance and Hardware Configuration
+---
+
+# Performance and Hardware Configuration
+
+To measure performance on different NVIDIA GPUs we use CaffeNet, the Caffe reference ImageNet model.
+
+For training, each time point is 20 iterations/minibatches of 256 images for 5,120 images total. For testing, a 50,000 image validation set is classified.
+
+**Acknowledgements**: BVLC members are very grateful to NVIDIA for providing several GPUs to conduct this research.
+
+## NVIDIA K40
+
+Performance is best with ECC off and boost clock enabled. While ECC makes a negligible difference in speed, disabling it frees ~1 GB of GPU memory.
+
+Best settings with ECC off and maximum clock speed in standard Caffe:
+
+* Training is 26.5 secs / 20 iterations (5,120 images)
+* Testing is 100 secs / validation set (50,000 images)
+
+Best settings with Caffe + [cuDNN acceleration](http://nvidia.com/cudnn):
+
+* Training is 19.2 secs / 20 iterations (5,120 images)
+* Testing is 60.7 secs / validation set (50,000 images)
+
+Other settings:
+
+* ECC on, max speed: training 26.7 secs / 20 iterations, test 101 secs / validation set
+* ECC on, default speed: training 31 secs / 20 iterations, test 117 secs / validation set
+* ECC off, default speed: training 31 secs / 20 iterations, test 118 secs / validation set
+
+### K40 configuration tips
+
+For maximum K40 performance, turn off ECC and boost the clock speed (at your own risk).
+
+To turn off ECC, do
+
+    sudo nvidia-smi -i 0 --ecc-config=0    # repeat with -i x for each GPU ID
+
+then reboot.
+
+Set the "persistence" mode of the GPU settings by
+
+    sudo nvidia-smi -pm 1
+
+and then set the clock speed with
+
+    sudo nvidia-smi -i 0 -ac 3004,875    # repeat with -i x for each GPU ID
+
+but note that this configuration resets across driver reloading / rebooting. Include these commands in a boot script to initialize these settings. For a simple fix, add these commands to `/etc/rc.local` (on Ubuntu).
+
+## NVIDIA Titan
+
+Training: 26.26 secs / 20 iterations (5,120 images).
+Testing: 100 secs / validation set (50,000 images).
+
+cuDNN Training: 20.25 secs / 20 iterations (5,120 images).
+cuDNN Testing: 66.3 secs / validation set (50,000 images).
+
+
+## NVIDIA K20
+
+Training: 36.0 secs / 20 iterations (5,120 images).
+Testing: 133 secs / validation set (50,000 images).
+
+## NVIDIA GTX 770
+
+Training: 33.0 secs / 20 iterations (5,120 images).
+Testing: 129 secs / validation set (50,000 images).
+
+cuDNN Training: 24.3 secs / 20 iterations (5,120 images).
+cuDNN Testing: 104 secs / validation set (50,000 images).
diff --git a/docs/stylesheets/pygment_trac.css b/docs/stylesheets/pygment_trac.css
new file mode 100644
index 0000000..c6a6452
--- /dev/null
+++ b/docs/stylesheets/pygment_trac.css
@@ -0,0 +1,69 @@
+.highlight  { background: #ffffff; }
+.highlight .c { color: #999988; font-style: italic } /* Comment */
+.highlight .err { color: #a61717; background-color: #e3d2d2 } /* Error */
+.highlight .k { font-weight: bold } /* Keyword */
+.highlight .o { font-weight: bold } /* Operator */
+.highlight .cm { color: #999988; font-style: italic } /* Comment.Multiline */
+.highlight .cp { color: #999999; font-weight: bold } /* Comment.Preproc */
+.highlight .c1 { color: #999988; font-style: italic } /* Comment.Single */
+.highlight .cs { color: #999999; font-weight: bold; font-style: italic } /* Comment.Special */
+.highlight .gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */
+.highlight .gd .x { color: #000000; background-color: #ffaaaa } /* Generic.Deleted.Specific */
+.highlight .ge { font-style: italic } /* Generic.Emph */
+.highlight .gr { color: #aa0000 } /* Generic.Error */
+.highlight .gh { color: #999999 } /* Generic.Heading */
+.highlight .gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */
+.highlight .gi .x { color: #000000; background-color: #aaffaa } /* Generic.Inserted.Specific */
+.highlight .go { color: #888888 } /* Generic.Output */
+.highlight .gp { color: #555555 } /* Generic.Prompt */
+.highlight .gs { font-weight: bold } /* Generic.Strong */
+.highlight .gu { color: #800080; font-weight: bold; } /* Generic.Subheading */
+.highlight .gt { color: #aa0000 } /* Generic.Traceback */
+.highlight .kc { font-weight: bold } /* Keyword.Constant */
+.highlight .kd { font-weight: bold } /* Keyword.Declaration */
+.highlight .kn { font-weight: bold } /* Keyword.Namespace */
+.highlight .kp { font-weight: bold } /* Keyword.Pseudo */
+.highlight .kr { font-weight: bold } /* Keyword.Reserved */
+.highlight .kt { color: #445588; font-weight: bold } /* Keyword.Type */
+.highlight .m { color: #009999 } /* Literal.Number */
+.highlight .s { color: #d14 } /* Literal.String */
+.highlight .na { color: #008080 } /* Name.Attribute */
+.highlight .nb { color: #0086B3 } /* Name.Builtin */
+.highlight .nc { color: #445588; font-weight: bold } /* Name.Class */
+.highlight .no { color: #008080 } /* Name.Constant */
+.highlight .ni { color: #800080 } /* Name.Entity */
+.highlight .ne { color: #990000; font-weight: bold } /* Name.Exception */
+.highlight .nf { color: #990000; font-weight: bold } /* Name.Function */
+.highlight .nn { color: #555555 } /* Name.Namespace */
+.highlight .nt { color: #000080 } /* Name.Tag */
+.highlight .nv { color: #008080 } /* Name.Variable */
+.highlight .ow { font-weight: bold } /* Operator.Word */
+.highlight .w { color: #bbbbbb } /* Text.Whitespace */
+.highlight .mf { color: #009999 } /* Literal.Number.Float */
+.highlight .mh { color: #009999 } /* Literal.Number.Hex */
+.highlight .mi { color: #009999 } /* Literal.Number.Integer */
+.highlight .mo { color: #009999 } /* Literal.Number.Oct */
+.highlight .sb { color: #d14 } /* Literal.String.Backtick */
+.highlight .sc { color: #d14 } /* Literal.String.Char */
+.highlight .sd { color: #d14 } /* Literal.String.Doc */
+.highlight .s2 { color: #d14 } /* Literal.String.Double */
+.highlight .se { color: #d14 } /* Literal.String.Escape */
+.highlight .sh { color: #d14 } /* Literal.String.Heredoc */
+.highlight .si { color: #d14 } /* Literal.String.Interpol */
+.highlight .sx { color: #d14 } /* Literal.String.Other */
+.highlight .sr { color: #009926 } /* Literal.String.Regex */
+.highlight .s1 { color: #d14 } /* Literal.String.Single */
+.highlight .ss { color: #990073 } /* Literal.String.Symbol */
+.highlight .bp { color: #999999 } /* Name.Builtin.Pseudo */
+.highlight .vc { color: #008080 } /* Name.Variable.Class */
+.highlight .vg { color: #008080 } /* Name.Variable.Global */
+.highlight .vi { color: #008080 } /* Name.Variable.Instance */
+.highlight .il { color: #009999 } /* Literal.Number.Integer.Long */
+
+.type-csharp .highlight .k { color: #0000FF }
+.type-csharp .highlight .kt { color: #0000FF }
+.type-csharp .highlight .nf { color: #000000; font-weight: normal }
+.type-csharp .highlight .nc { color: #2B91AF }
+.type-csharp .highlight .nn { color: #000000 }
+.type-csharp .highlight .s { color: #A31515 }
+.type-csharp .highlight .sc { color: #A31515 }
diff --git a/docs/stylesheets/reset.css b/docs/stylesheets/reset.css
new file mode 100644
index 0000000..6020b26
--- /dev/null
+++ b/docs/stylesheets/reset.css
@@ -0,0 +1,21 @@
+/* MeyerWeb Reset */
+
+html, body, div, span, applet, object, iframe,
+h1, h2, h3, h4, h5, h6, p, blockquote, pre,
+a, abbr, acronym, address, big, cite, code,
+del, dfn, em, img, ins, kbd, q, s, samp,
+small, strike, strong, sub, sup, tt, var,
+b, u, i, center,
+dl, dt, dd, ol, ul, li,
+fieldset, form, label, legend,
+table, caption, tbody, tfoot, thead, tr, th, td,
+article, aside, canvas, details, embed,
+figure, figcaption, footer, header, hgroup,
+menu, nav, output, ruby, section, summary,
+time, mark, audio, video {
+  margin: 0;
+  padding: 0;
+  border: 0;
+  font: inherit;
+  vertical-align: baseline;
+}
diff --git a/docs/stylesheets/styles.css b/docs/stylesheets/styles.css
new file mode 100644
index 0000000..2dbedb8
--- /dev/null
+++ b/docs/stylesheets/styles.css
@@ -0,0 +1,348 @@
+ at import url(http://fonts.googleapis.com/css?family=PT+Serif|Open+Sans:600,400);
+
+body {
+  padding:10px 50px 0 0;
+  font-family: 'Open Sans', sans-serif;
+  font-size: 14px;
+  color: #232323;
+  background-color: #FBFAF7;
+  margin: 0;
+  line-height: 1.5rem;
+  -webkit-font-smoothing: antialiased;
+}
+
+h1, h2, h3, h4, h5, h6 {
+  color:#232323;
+  margin:36px 0 10px;
+}
+
+p, ul, ol, table, dl {
+  margin:0 0 22px;
+}
+
+h1, h2, h3 {
+  font-family: 'PT Serif', serif;
+  line-height:1.3;
+  font-weight: normal;
+  display: block;
+  border-bottom: 1px solid #ccc;
+  padding-bottom: 5px;
+}
+
+h1 {
+  font-size: 30px;
+}
+
+h2 {
+  font-size: 24px;
+}
+
+h3 {
+  font-size: 18px;
+}
+
+h4, h5, h6 {
+  font-family: 'PT Serif', serif;
+  font-weight: 700;
+}
+
+a {
+  color:#C30000;
+  text-decoration:none;
+}
+
+a:hover {
+  text-decoration: underline;
+}
+
+a small {
+  font-size: 12px;
+}
+
+em {
+  font-style: italic;
+}
+
+strong {
+  font-weight:700;
+}
+
+ul {
+  padding-left: 25px;
+}
+
+ol {
+  list-style: decimal;
+  padding-left: 20px;
+}
+
+blockquote {
+  margin: 0;
+  padding: 0 0 0 20px;
+  font-style: italic;
+}
+
+dl, dt, dd, dl p {
+  font-color: #444;
+}
+
+dl dt {
+  font-weight: bold;
+}
+
+dl dd {
+  padding-left: 20px;
+  font-style: italic;
+}
+
+dl p {
+  padding-left: 20px;
+  font-style: italic;
+}
+
+hr {
+  border:0;
+  background:#ccc;
+  height:1px;
+  margin:0 0 24px;
+}
+
+/* Images */
+
+img {
+  position: relative;
+  margin: 0 auto;
+  max-width: 650px;
+  padding: 5px;
+  margin: 10px 0 32px 0;
+  border: 1px solid #ccc;
+}
+
+p img {
+  display: inline;
+  margin: 0;
+  padding: 0;
+  vertical-align: middle;
+  text-align: center;
+  border: none;
+}
+
+/* Code blocks */
+code, pre {
+  font-family: monospace;
+  color:#000;
+  font-size:12px;
+  line-height: 14px;
+}
+
+pre {
+  padding: 6px 12px;
+  background: #FDFEFB;
+  border-radius:4px;
+  border:1px solid #D7D8C8;
+  overflow: auto;
+  white-space: pre-wrap;
+  margin-bottom: 16px;
+}
+
+
+/* Tables */
+table {
+  width:100%;
+}
+
+table {
+  border: 1px solid #ccc;
+  margin-bottom: 32px;
+  text-align: left;
+ }
+
+th {
+  font-family: 'Open Sans', sans-serif;
+  font-size: 18px;
+  font-weight: normal;
+  padding: 10px;
+  background: #232323;
+  color: #FDFEFB;
+ }
+
+td {
+  padding: 10px;
+  background: #ccc;
+ }
+
+
+/* Wrapper */
+.wrapper {
+  width:960px;
+}
+
+
+/* Header */
+
+header {
+  width:170px;
+  float:left;
+  position:fixed;
+  padding: 12px 25px 22px 50px;
+  margin: 24px 25px 0 0;
+}
+
+p.header {
+  font-size: 14px;
+}
+
+h1.header {
+  font-size: 30px;
+  font-weight: 300;
+  line-height: 1.3em;
+  margin-top: 0;
+}
+
+a.name {
+  white-space: nowrap;
+}
+
+header ul {
+  list-style:none;
+  padding:0;
+}
+
+header li {
+  list-style-type: none;
+  width:132px;
+  height:15px;
+  margin-bottom: 12px;
+  line-height: 1em;
+  padding: 6px 6px 6px 7px;
+  background: #c30000;
+  border-radius:4px;
+  border:1px solid #555;
+}
+
+header li:hover {
+  background: #dd0000;
+}
+
+a.buttons {
+  color: #fff;
+  text-decoration: none;
+  font-weight: normal;
+  padding: 2px 2px 2px 22px;
+  height: 30px;
+}
+
+a.github {
+  background: url(/images/GitHub-Mark-64px.png) no-repeat center left;
+  background-size: 15%;
+}
+
+/* Section - for main page content */
+
+section {
+  width:650px;
+  float:right;
+  padding-bottom:50px;
+}
+
+p.footnote {
+  font-size: 12px;
+}
+
+
+/* Footer */
+
+footer {
+  width:170px;
+  float:left;
+  position:fixed;
+  bottom:10px;
+  padding-left: 50px;
+}
+
+ at media print, screen and (max-width: 960px) {
+
+  div.wrapper {
+    width:auto;
+    margin:0;
+  }
+
+  header, section, footer {
+    float:none;
+    position:static;
+    width:auto;
+  }
+
+  footer {
+    border-top: 1px solid #ccc;
+    margin:0 84px 0 50px;
+    padding:0;
+  }
+
+  header {
+    padding-right:320px;
+  }
+
+  section {
+    padding:20px 84px 20px 50px;
+    margin:0 0 20px;
+  }
+
+  header a small {
+    display:inline;
+  }
+
+  header ul {
+    position:absolute;
+    right:130px;
+    top:84px;
+  }
+}
+
+ at media print, screen and (max-width: 720px) {
+  body {
+    word-wrap:break-word;
+  }
+
+  header {
+    padding:10px 20px 0;
+    margin-right: 0;
+  }
+
+  section {
+    padding:10px 0 10px 20px;
+    margin:0 0 30px;
+  }
+
+  footer {
+    margin: 0 0 0 30px;
+  }
+
+  header ul, header p.view {
+    position:static;
+  }
+}
+
+ at media print, screen and (max-width: 480px) {
+
+  header ul li.download {
+    display:none;
+  }
+
+  footer {
+    margin: 0 0 0 20px;
+  }
+
+  footer a{
+    display:block;
+  }
+
+}
+
+ at media print {
+  body {
+    padding:0.4in;
+    font-size:12pt;
+    color:#444;
+  }
+}
diff --git a/docs/tutorial/convolution.md b/docs/tutorial/convolution.md
new file mode 100644
index 0000000..a02fe4e
--- /dev/null
+++ b/docs/tutorial/convolution.md
@@ -0,0 +1,13 @@
+---
+title: Convolution
+---
+# Caffeinated Convolution
+
+The Caffe strategy for convolution is to reduce the problem to matrix-matrix multiplication.
+This linear algebra computation is highly-tuned in BLAS libraries and efficiently computed on GPU devices.
+
+For more details read Yangqing's [Convolution in Caffe: a memo](https://github.com/Yangqing/caffe/wiki/Convolution-in-Caffe:-a-memo).
+
+As it turns out, this same reduction was independently explored in the context of conv. nets by
+
+> K. Chellapilla, S. Puri, P. Simard, et al. High performance convolutional neural networks for document processing. In Tenth International Workshop on Frontiers in Handwriting Recognition, 2006.
diff --git a/docs/tutorial/data.md b/docs/tutorial/data.md
new file mode 100644
index 0000000..3bf7d93
--- /dev/null
+++ b/docs/tutorial/data.md
@@ -0,0 +1,78 @@
+---
+title: Data
+---
+# Data: Ins and Outs
+
+Data flows through Caffe as [Blobs](net_layer_blob.html#blob-storage-and-communication).
+Data layers load input and save output by converting to and from Blob to other formats.
+Common transformations like mean-subtraction and feature-scaling are done by data layer configuration.
+New input types are supported by developing a new data layer -- the rest of the Net follows by the modularity of the Caffe layer catalogue.
+
+This data layer definition
+
+    layer {
+      name: "mnist"
+      # Data layer loads leveldb or lmdb storage DBs for high-throughput.
+      type: "Data"
+      # the 1st top is the data itself: the name is only convention
+      top: "data"
+      # the 2nd top is the ground truth: the name is only convention
+      top: "label"
+      # the Data layer configuration
+      data_param {
+        # path to the DB
+        source: "examples/mnist/mnist_train_lmdb"
+        # type of DB: LEVELDB or LMDB (LMDB supports concurrent reads)
+        backend: LMDB
+        # batch processing improves efficiency.
+        batch_size: 64
+      }
+      # common data transformations
+      transform_param {
+        # feature scaling coefficient: this maps the [0, 255] MNIST data to [0, 1]
+        scale: 0.00390625
+      }
+    }
+
+loads the MNIST digits.
+
+**Tops and Bottoms**: A data layer makes **top** blobs to output data to the model.
+It does not have **bottom** blobs since it takes no input.
+
+**Data and Label**: a data layer has at least one top canonically named **data**.
+For ground truth a second top can be defined that is canonically named **label**.
+Both tops simply produce blobs and there is nothing inherently special about these names.
+The (data, label) pairing is a convenience for classification models.
+
+**Transformations**: data preprocessing is parametrized by transformation messages within the data layer definition.
+
+    layer {
+      name: "data"
+      type: "Data"
+      [...]
+      transform_param {
+        scale: 0.1
+        mean_file_size: mean.binaryproto
+        # for images in particular horizontal mirroring and random cropping
+        # can be done as simple data augmentations.
+        mirror: 1  # 1 = on, 0 = off
+        # crop a `crop_size` x `crop_size` patch:
+        # - at random during training
+        # - from the center during testing
+        crop_size: 227
+      }
+    }
+
+**Prefetching**: for throughput data layers fetch the next batch of data and prepare it in the background while the Net computes the current batch.
+
+**Multiple Inputs**: a Net can have multiple inputs of any number and type. Define as many data layers as needed giving each a unique name and top. Multiple inputs are useful for non-trivial ground truth: one data layer loads the actual data and the other data layer loads the ground truth in lock-step. In this arrangement both data and label can be any 4D array. Further applications of multiple inputs are found in multi-modal and sequence models. In these cases you may need to implement  [...]
+
+*Improvements to data processing to add formats, generality, or helper utilities are welcome!*
+
+## Formats
+
+Refer to the layer catalogue of [data layers](layers.html#data-layers) for close-ups on each type of data Caffe understands.
+
+## Deployment Input
+
+For on-the-fly computation deployment Nets define their inputs by `input` fields: these Nets then accept direct assignment of data for online or interactive computation.
diff --git a/docs/tutorial/fig/.gitignore b/docs/tutorial/fig/.gitignore
new file mode 100644
index 0000000..e69de29
diff --git a/docs/tutorial/fig/backward.jpg b/docs/tutorial/fig/backward.jpg
new file mode 100644
index 0000000..906b080
Binary files /dev/null and b/docs/tutorial/fig/backward.jpg differ
diff --git a/docs/tutorial/fig/forward.jpg b/docs/tutorial/fig/forward.jpg
new file mode 100644
index 0000000..ab52810
Binary files /dev/null and b/docs/tutorial/fig/forward.jpg differ
diff --git a/docs/tutorial/fig/forward_backward.png b/docs/tutorial/fig/forward_backward.png
new file mode 100644
index 0000000..d2f46c3
Binary files /dev/null and b/docs/tutorial/fig/forward_backward.png differ
diff --git a/docs/tutorial/fig/layer.jpg b/docs/tutorial/fig/layer.jpg
new file mode 100644
index 0000000..5075381
Binary files /dev/null and b/docs/tutorial/fig/layer.jpg differ
diff --git a/docs/tutorial/fig/logreg.jpg b/docs/tutorial/fig/logreg.jpg
new file mode 100644
index 0000000..480f519
Binary files /dev/null and b/docs/tutorial/fig/logreg.jpg differ
diff --git a/docs/tutorial/forward_backward.md b/docs/tutorial/forward_backward.md
new file mode 100644
index 0000000..a645f00
--- /dev/null
+++ b/docs/tutorial/forward_backward.md
@@ -0,0 +1,37 @@
+---
+title: Forward and Backward for Inference and Learning
+---
+# Forward and Backward
+
+The forward and backward passes are the essential computations of a [Net](net_layer_blob.html).
+
+<img src="fig/forward_backward.png" alt="Forward and Backward" width="480">
+
+Let's consider a simple logistic regression classifier.
+
+The **forward** pass computes the output given the input for inference.
+In forward Caffe composes the computation of each layer to compute the "function" represented by the model.
+This pass goes from bottom to top.
+
+<img src="fig/forward.jpg" alt="Forward pass" width="320">
+
+The data $$x$$ is passed through an inner product layer for $$g(x)$$ then through a softmax for $$h(g(x))$$ and softmax loss to give $$f_W(x)$$.
+
+The **backward** pass computes the gradient given the loss for learning.
+In backward Caffe reverse-composes the gradient of each layer to compute the gradient of the whole model by automatic differentiation.
+This is back-propagation.
+This pass goes from top to bottom.
+
+<img src="fig/backward.jpg" alt="Backward pass" width="320">
+
+The backward pass begins with the loss and computes the gradient with respect to the output $$\frac{\partial f_W}{\partial h}$$. The gradient with respect to the rest of the model is computed layer-by-layer through the chain rule. Layers with parameters, like the `INNER_PRODUCT` layer, compute the gradient with respect to their parameters $$\frac{\partial f_W}{\partial W_{\text{ip}}}$$ during the backward step.
+
+These computations follow immediately from defining the model: Caffe plans and carries out the forward and backward passes for you.
+
+- The `Net::Forward()` and `Net::Backward()` methods carry out the respective passes while `Layer::Forward()` and `Layer::Backward()` compute each step.
+- Every layer type has `forward_{cpu,gpu}()` and `backward_{cpu,gpu}` methods to compute its steps according to the mode of computation. A layer may only implement CPU or GPU mode due to constraints or convenience.
+
+The [Solver](solver.html) optimizes a model by first calling forward to yield the output and loss, then calling backward to generate the gradient of the model, and then incorporating the gradient into a weight update that attempts to minimize the loss. Division of labor between the Solver, Net, and Layer keep Caffe modular and open to development.
+
+For the details of the forward and backward steps of Caffe's layer types, refer to the [layer catalogue](layers.html).
+
diff --git a/docs/tutorial/index.md b/docs/tutorial/index.md
new file mode 100644
index 0000000..7d4e77b
--- /dev/null
+++ b/docs/tutorial/index.md
@@ -0,0 +1,51 @@
+---
+title: Caffe Tutorial
+---
+# Caffe Tutorial
+
+Caffe is a deep learning framework and this tutorial explains its philosophy, architecture, and usage.
+This is a practical guide and framework introduction, so the full frontier, context, and history of deep learning cannot be covered here.
+While explanations will be given where possible, a background in machine learning and neural networks is helpful.
+
+## Philosophy
+
+In one sip, Caffe is brewed for
+
+- Expression: models and optimizations are defined as plaintext schemas instead of code.
+- Speed: for research and industry alike speed is crucial for state-of-the-art models and massive data.
+- Modularity: new tasks and settings require flexibility and extension.
+- Openness: scientific and applied progress call for common code, reference models, and reproducibility.
+- Community: academic research, startup prototypes, and industrial applications all share strength by joint discussion and development in a BSD-2 project.
+
+and these principles direct the project.
+
+## Tour
+
+- [Nets, Layers, and Blobs](net_layer_blob.html): the anatomy of a Caffe model.
+- [Forward / Backward](forward_backward.html): the essential computations of layered compositional models.
+- [Loss](loss.html): the task to be learned is defined by the loss.
+- [Solver](solver.html): the solver coordinates model optimization.
+- [Layer Catalogue](layers.html): the layer is the fundamental unit of modeling and computation -- Caffe's catalogue includes layers for state-of-the-art models.
+- [Interfaces](interfaces.html): command line, Python, and MATLAB Caffe.
+- [Data](data.html): how to caffeinate data for model input.
+
+For a closer look at a few details:
+
+- [Caffeinated Convolution](convolution.html): how Caffe computes convolutions.
+
+## Deeper Learning
+
+There are helpful references freely online for deep learning that complement our hands-on tutorial.
+These cover introductory and advanced material, background and history, and the latest advances.
+
+The [Tutorial on Deep Learning for Vision](https://sites.google.com/site/deeplearningcvpr2014/) from CVPR '14 is a good companion tutorial for researchers.
+Once you have the framework and practice foundations from the Caffe tutorial, explore the fundamental ideas and advanced research directions in the CVPR '14 tutorial.
+
+A broad introduction is given in the free online draft of [Neural Networks and Deep Learning](http://neuralnetworksanddeeplearning.com/index.html) by Michael Nielsen. In particular the chapters on using neural nets and how backpropagation works are helpful if you are new to the subject.
+
+These recent academic tutorials cover deep learning for researchers in machine learning and vision:
+
+- [Deep Learning Tutorial](http://www.cs.nyu.edu/~yann/talks/lecun-ranzato-icml2013.pdf) by Yann LeCun (NYU, Facebook) and Marc'Aurelio Ranzato (Facebook). ICML 2013 tutorial.
+- [LISA Deep Learning Tutorial](http://deeplearning.net/tutorial/deeplearning.pdf) by the LISA Lab directed by Yoshua Bengio (U. Montréal).
+
+For an exposition of neural networks in circuits and code, check out [Understanding Neural Networks from a Programmer's Perspective](http://karpathy.github.io/neuralnets/) by Andrej Karpathy (Stanford).
diff --git a/docs/tutorial/interfaces.md b/docs/tutorial/interfaces.md
new file mode 100644
index 0000000..1296331
--- /dev/null
+++ b/docs/tutorial/interfaces.md
@@ -0,0 +1,279 @@
+---
+title: Interfaces
+---
+# Interfaces
+
+Caffe has command line, Python, and MATLAB interfaces for day-to-day usage, interfacing with research code, and rapid prototyping. While Caffe is a C++ library at heart and it exposes a modular interface for development, not every occasion calls for custom compilation. The cmdcaffe, pycaffe, and matcaffe interfaces are here for you.
+
+## Command Line
+
+The command line interface -- cmdcaffe -- is the `caffe` tool for model training, scoring, and diagnostics. Run `caffe` without any arguments for help. This tool and others are found in caffe/build/tools. (The following example calls require completing the LeNet / MNIST example first.)
+
+**Training**: `caffe train` learns models from scratch, resumes learning from saved snapshots, and fine-tunes models to new data and tasks:
+
+* All training requires a solver configuration through the `-solver solver.prototxt` argument. 
+* Resuming requires the `-snapshot model_iter_1000.solverstate` argument to load the solver snapshot. 
+* Fine-tuning requires the `-weights model.caffemodel` argument for the model initialization.
+
+For example, you can run:
+
+    # train LeNet
+    caffe train -solver examples/mnist/lenet_solver.prototxt
+    # train on GPU 2
+    caffe train -solver examples/mnist/lenet_solver.prototxt -gpu 2
+    # resume training from the half-way point snapshot
+    caffe train -solver examples/mnist/lenet_solver.prototxt -snapshot examples/mnist/lenet_iter_5000.solverstate
+
+For a full example of fine-tuning, see examples/finetuning_on_flickr_style, but the training call alone is
+
+    # fine-tune CaffeNet model weights for style recognition
+    caffe train -solver examples/finetuning_on_flickr_style/solver.prototxt -weights models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel
+
+**Testing**: `caffe test` scores models by running them in the test phase and reports the net output as its score. The net architecture must be properly defined to output an accuracy measure or loss as its output. The per-batch score is reported and then the grand average is reported last.
+
+    #
+    # score the learned LeNet model on the validation set as defined in the 
+    # model architeture lenet_train_test.prototxt
+    caffe test -model examples/mnist/lenet_train_test.prototxt -weights examples/mnist/lenet_iter_10000.caffemodel -gpu 0 -iterations 100
+
+**Benchmarking**: `caffe time` benchmarks model execution layer-by-layer through timing and synchronization. This is useful to check system performance and measure relative execution times for models.
+
+    # (These example calls require you complete the LeNet / MNIST example first.)
+    # time LeNet training on CPU for 10 iterations
+    caffe time -model examples/mnist/lenet_train_test.prototxt -iterations 10
+    # time LeNet training on GPU for the default 50 iterations
+    caffe time -model examples/mnist/lenet_train_test.prototxt -gpu 0
+    # time a model architecture with the given weights on the first GPU for 10 iterations
+    caffe time -model examples/mnist/lenet_train_test.prototxt -weights examples/mnist/lenet_iter_10000.caffemodel -gpu 0 -iterations 10
+
+**Diagnostics**: `caffe device_query` reports GPU details for reference and checking device ordinals for running on a given device in multi-GPU machines.
+
+    # query the first device
+    caffe device_query -gpu 0
+
+## Python
+
+The Python interface -- pycaffe -- is the `caffe` module and its scripts in caffe/python. `import caffe` to load models, do forward and backward, handle IO, visualize networks, and even instrument model solving. All model data, derivatives, and parameters are exposed for reading and writing.
+
+- `caffe.Net` is the central interface for loading, configuring, and running models. `caffe.Classsifier` and `caffe.Detector` provide convenience interfaces for common tasks.
+- `caffe.SGDSolver` exposes the solving interface.
+- `caffe.io` handles input / output with preprocessing and protocol buffers.
+- `caffe.draw` visualizes network architectures.
+- Caffe blobs are exposed as numpy ndarrays for ease-of-use and efficiency.
+
+Tutorial IPython notebooks are found in caffe/examples: do `ipython notebook caffe/examples` to try them. For developer reference docstrings can be found throughout the code.
+
+Compile pycaffe by `make pycaffe`. The module dir caffe/python/caffe should be installed in your PYTHONPATH for `import caffe`.
+
+## MATLAB
+
+The MATLAB interface -- matcaffe -- is the `caffe` package in caffe/matlab in which you can integrate Caffe in your Matlab code.
+
+In MatCaffe, you can
+
+* Creating multiple Nets in Matlab
+* Do forward and backward computation
+* Access any layer within a network, and any parameter blob in a layer
+* Get and set data or diff to any blob within a network, not restricting to input blobs or output blobs
+* Save a network's parameters to file, and load parameters from file
+* Reshape a blob and reshape a network
+* Edit network parameter and do network surgery
+* Create multiple Solvers in Matlab for training
+* Resume training from solver snapshots
+* Access train net and test nets in a solver
+* Run for a certain number of iterations and give back control to Matlab
+* Intermingle arbitrary Matlab code with gradient steps
+
+An ILSVRC image classification demo is in caffe/matlab/demo/classification_demo.m (you need to download BVLC CaffeNet from [Model Zoo](http://caffe.berkeleyvision.org/model_zoo.html) to run it).
+
+### Build MatCaffe
+
+Build MatCaffe with `make all matcaffe`. After that, you may test it using `make mattest`.
+
+Common issue: if you run into error messages like `libstdc++.so.6:version 'GLIBCXX_3.4.15' not found` during `make mattest`, then it usually means that your Matlab's runtime libraries do not match your compile-time libraries. You may need to do the following before you start Matlab:
+
+    export LD_LIBRARY_PATH=/opt/intel/mkl/lib/intel64:/usr/local/cuda/lib64
+    export LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libstdc++.so.6
+
+Or the equivalent based on where things are installed on your system, and do `make mattest` again to see if the issue is fixed. Note: this issue is sometimes more complicated since during its startup Matlab may overwrite your `LD_LIBRARY_PATH` environment variable. You can run `!ldd ./matlab/+caffe/private/caffe_.mexa64` (the mex extension may differ on your system) in Matlab to see its runtime libraries, and preload your compile-time libraries by exporting them to your `LD_PRELOAD` envi [...]
+
+After successful building and testing, add this package to Matlab search PATH by starting `matlab` from caffe root folder and running the following commands in Matlab command window.
+
+    addpath ./matlab
+
+You can save your Matlab search PATH by running `savepath` so that you don't have to run the command above again every time you use MatCaffe.
+
+### Use MatCaffe
+
+MatCaffe is very similar to PyCaffe in usage.
+
+Examples below shows detailed usages and assumes you have downloaded BVLC CaffeNet from [Model Zoo](http://caffe.berkeleyvision.org/model_zoo.html) and started `matlab` from caffe root folder.
+
+    model = './models/bvlc_reference_caffenet/deploy.prototxt';
+    weights = './models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel';
+
+#### Set mode and device
+
+**Mode and device should always be set BEFORE you create a net or a solver.**
+
+Use CPU:
+
+    caffe.set_mode_cpu();
+
+Use GPU and specify its gpu_id:
+
+    caffe.set_mode_gpu();
+    caffe.set_device(gpu_id);
+
+#### Create a network and access its layers and blobs
+
+Create a network:
+
+    net = caffe.Net(model, weights, 'test'); % create net and load weights
+
+Or
+
+    net = caffe.Net(model, 'test'); % create net but not load weights
+    net.copy_from(weights); % load weights
+
+which creates `net` object as
+
+      Net with properties:
+
+               layer_vec: [1x23 caffe.Layer]
+                blob_vec: [1x15 caffe.Blob]
+                  inputs: {'data'}
+                 outputs: {'prob'}
+        name2layer_index: [23x1 containers.Map]
+         name2blob_index: [15x1 containers.Map]
+             layer_names: {23x1 cell}
+              blob_names: {15x1 cell}
+
+The two `containers.Map` objects are useful to find the index of a layer or a blob by its name.
+
+You have access to every blob in this network. To fill blob 'data' with all ones:
+
+    net.blobs('data').set_data(ones(net.blobs('data').shape));
+
+To multiply all values in blob 'data' by 10:
+
+    net.blobs('data').set_data(net.blobs('data').get_data() * 10);
+
+**Be aware that since Matlab is 1-indexed and column-major, the usual 4 blob dimensions in Matlab are `[width, height, channels, num]`, and `width` is the fastest dimension. Also be aware that images are in BGR channels.** Also, Caffe uses single-precision float data. If your data is not single, `set_data` will automatically convert it to single.
+
+You also have access to every layer, so you can do network surgery. For example, to multiply conv1 parameters by 10:
+
+    net.params('conv1', 1).set_data(net.params('conv1', 1).get_data() * 10); % set weights
+    net.params('conv1', 2).set_data(net.params('conv1', 2).get_data() * 10); % set bias
+
+Alternatively, you can use
+
+    net.layers('conv1').params(1).set_data(net.layers('conv1').params(1).get_data() * 10);
+    net.layers('conv1').params(2).set_data(net.layers('conv1').params(2).get_data() * 10);
+
+To save the network you just modified:
+
+    net.save('my_net.caffemodel');
+
+To get a layer's type (string):
+
+    layer_type = net.layers('conv1').type;
+
+#### Forward and backward
+
+Forward pass can be done using `net.forward` or `net.forward_prefilled`. Function `net.forward` takes in a cell array of N-D arrays containing data of input blob(s) and outputs a cell array containing data from output blob(s). Function `net.forward_prefilled` uses existing data in input blob(s) during forward pass, takes no input and produces no output. After creating some data for input blobs like `data = rand(net.blobs('data').shape);` you can run
+    
+    res = net.forward({data});
+    prob = res{1};
+
+Or
+
+    net.blobs('data').set_data(data);
+    net.forward_prefilled();
+    prob = net.blobs('prob').get_data();
+
+Backward is similar using `net.backward` or `net.backward_prefilled` and replacing `get_data` and `set_data` with `get_diff` and `set_diff`. After creating some gradients for output blobs like `prob_diff = rand(net.blobs('prob').shape);` you can run
+
+    res = net.backward({prob_diff});
+    data_diff = res{1};
+
+Or
+
+    net.blobs('prob').set_diff(prob_diff);
+    net.backward_prefilled();
+    data_diff = net.blobs('data').get_diff();
+    
+**However, the backward computation above doesn't get correct results, because Caffe decides that the network does not need backward computation. To get correct backward results, you need to set `'force_backward: true'` in your network prototxt.**
+
+After performing forward or backward pass, you can also get the data or diff in internal blobs. For example, to extract pool5 features after forward pass:
+
+    pool5_feat = net.blobs('pool5').get_data();
+
+#### Reshape
+
+Assume you want to run 1 image at a time instead of 10:
+
+    net.blobs('data').reshape([227 227 3 1]); % reshape blob 'data'
+    net.reshape();
+
+Then the whole network is reshaped, and now `net.blobs('prob').shape` should be `[1000 1]`;
+
+#### Training
+
+Assume you have created training and validation lmdbs following our [ImageNET Tutorial](http://caffe.berkeleyvision.org/gathered/examples/imagenet.html), to create a solver and train on ILSVRC 2012 classification dataset:
+
+    solver = caffe.Solver('./models/bvlc_reference_caffenet/solver.prototxt');
+
+which creates `solver` object as
+
+      Solver with properties:
+
+              net: [1x1 caffe.Net]
+        test_nets: [1x1 caffe.Net]
+
+To train:
+
+    solver.solve();
+
+Or train for only 1000 iterations (so that you can do something to its net before training more iterations)
+
+    solver.step(1000);
+
+To get iteration number:
+
+    iter = solver.iter();
+
+To get its network:
+
+    train_net = solver.net;
+    test_net = solver.test_nets(1);
+
+To resume from a snapshot "your_snapshot.solverstate":
+
+    solver.restore('your_snapshot.solverstate');
+
+#### Input and output
+
+`caffe.io` class provides basic input functions `load_image` and `read_mean`. For example, to read ILSVRC 2012 mean file (assume you have downloaded imagenet example auxiliary files by running `./data/ilsvrc12/get_ilsvrc_aux.sh`):
+
+    mean_data = caffe.io.read_mean('./data/ilsvrc12/imagenet_mean.binaryproto');
+
+To read Caffe's example image and resize to `[width, height]` and suppose we want `width = 256; height = 256;`
+
+    im_data = caffe.io.load_image('./examples/images/cat.jpg');
+    im_data = imresize(im_data, [width, height]); % resize using Matlab's imresize
+
+**Keep in mind that `width` is the fastest dimension and channels are BGR, which is different from the usual way that Matlab stores an image.** If you don't want to use `caffe.io.load_image` and prefer to load an image by yourself, you can do
+
+    im_data = imread('./examples/images/cat.jpg'); % read image
+    im_data = im_data(:, :, [3, 2, 1]); % convert from RGB to BGR
+    im_data = permute(im_data, [2, 1, 3]); % permute width and height
+    im_data = single(im_data); % convert to single precision
+
+Also, you may take a look at caffe/matlab/demo/classification_demo.m to see how to prepare input by taking crops from an image.
+
+We show in caffe/matlab/hdf5creation how to read and write HDF5 data with Matlab. We do not provide extra functions for data output as Matlab itself is already quite powerful in output.
+
+#### Clear nets and solvers
+
+Call `caffe.reset_all()` to clear all solvers and stand-alone nets you have created.
diff --git a/docs/tutorial/layers.md b/docs/tutorial/layers.md
new file mode 100644
index 0000000..806374e
--- /dev/null
+++ b/docs/tutorial/layers.md
@@ -0,0 +1,525 @@
+---
+title: Layer Catalogue
+---
+# Layers
+
+To create a Caffe model you need to define the model architecture in a protocol buffer definition file (prototxt).
+
+Caffe layers and their parameters are defined in the protocol buffer definitions for the project in [caffe.proto](https://github.com/BVLC/caffe/blob/master/src/caffe/proto/caffe.proto).
+
+### Vision Layers
+
+* Header: `./include/caffe/vision_layers.hpp`
+
+Vision layers usually take *images* as input and produce other *images* as output.
+A typical "image" in the real-world may have one color channel ($$c = 1$$), as in a grayscale image, or three color channels ($$c = 3$$) as in an RGB (red, green, blue) image.
+But in this context, the distinguishing characteristic of an image is its spatial structure: usually an image has some non-trivial height $$h > 1$$ and width $$w > 1$$.
+This 2D geometry naturally lends itself to certain decisions about how to process the input.
+In particular, most of the vision layers work by applying a particular operation to some region of the input to produce a corresponding region of the output.
+In contrast, other layers (with few exceptions) ignore the spatial structure of the input, effectively treating it as "one big vector" with dimension $$chw$$.
+
+
+#### Convolution
+
+* Layer type: `Convolution`
+* CPU implementation: `./src/caffe/layers/convolution_layer.cpp`
+* CUDA GPU implementation: `./src/caffe/layers/convolution_layer.cu`
+* Parameters (`ConvolutionParameter convolution_param`)
+    - Required
+        - `num_output` (`c_o`): the number of filters
+        - `kernel_size` (or `kernel_h` and `kernel_w`): specifies height and width of each filter
+    - Strongly Recommended
+        - `weight_filler` [default `type: 'constant' value: 0`]
+    - Optional
+        - `bias_term` [default `true`]: specifies whether to learn and apply a set of additive biases to the filter outputs
+        - `pad` (or `pad_h` and `pad_w`) [default 0]: specifies the number of pixels to (implicitly) add to each side of the input
+        - `stride` (or `stride_h` and `stride_w`) [default 1]: specifies the intervals at which to apply the filters to the input
+        - `group` (g) [default 1]: If g > 1, we restrict the connectivity of each filter to a subset of the input. Specifically, the input and output channels are separated into g groups, and the $$i$$th output group channels will be only connected to the $$i$$th input group channels.
+* Input
+    - `n * c_i * h_i * w_i`
+* Output
+    - `n * c_o * h_o * w_o`, where `h_o = (h_i + 2 * pad_h - kernel_h) / stride_h + 1` and `w_o` likewise.
+* Sample (as seen in `./examples/imagenet/imagenet_train_val.prototxt`)
+
+      layer {
+        name: "conv1"
+        type: "Convolution"
+        bottom: "data"
+        top: "conv1"
+        # learning rate and decay multipliers for the filters
+        param { lr_mult: 1 decay_mult: 1 }
+        # learning rate and decay multipliers for the biases
+        param { lr_mult: 2 decay_mult: 0 }
+        convolution_param {
+          num_output: 96     # learn 96 filters
+          kernel_size: 11    # each filter is 11x11
+          stride: 4          # step 4 pixels between each filter application
+          weight_filler {
+            type: "gaussian" # initialize the filters from a Gaussian
+            std: 0.01        # distribution with stdev 0.01 (default mean: 0)
+          }
+          bias_filler {
+            type: "constant" # initialize the biases to zero (0)
+            value: 0
+          }
+        }
+      }
+
+The `Convolution` layer convolves the input image with a set of learnable filters, each producing one feature map in the output image.
+
+#### Pooling
+
+* Layer type: `Pooling`
+* CPU implementation: `./src/caffe/layers/pooling_layer.cpp`
+* CUDA GPU implementation: `./src/caffe/layers/pooling_layer.cu`
+* Parameters (`PoolingParameter pooling_param`)
+    - Required
+        - `kernel_size` (or `kernel_h` and `kernel_w`): specifies height and width of each filter
+    - Optional
+        - `pool` [default MAX]: the pooling method. Currently MAX, AVE, or STOCHASTIC
+        - `pad` (or `pad_h` and `pad_w`) [default 0]: specifies the number of pixels to (implicitly) add to each side of the input
+        - `stride` (or `stride_h` and `stride_w`) [default 1]: specifies the intervals at which to apply the filters to the input
+* Input
+    - `n * c * h_i * w_i`
+* Output
+    - `n * c * h_o * w_o`, where h_o and w_o are computed in the same way as convolution.
+* Sample (as seen in `./examples/imagenet/imagenet_train_val.prototxt`)
+
+      layer {
+        name: "pool1"
+        type: "Pooling"
+        bottom: "conv1"
+        top: "pool1"
+        pooling_param {
+          pool: MAX
+          kernel_size: 3 # pool over a 3x3 region
+          stride: 2      # step two pixels (in the bottom blob) between pooling regions
+        }
+      }
+
+#### Local Response Normalization (LRN)
+
+* Layer type: `LRN`
+* CPU Implementation: `./src/caffe/layers/lrn_layer.cpp`
+* CUDA GPU Implementation: `./src/caffe/layers/lrn_layer.cu`
+* Parameters (`LRNParameter lrn_param`)
+    - Optional
+        - `local_size` [default 5]: the number of channels to sum over (for cross channel LRN) or the side length of the square region to sum over (for within channel LRN)
+        - `alpha` [default 1]: the scaling parameter (see below)
+        - `beta` [default 5]: the exponent (see below)
+        - `norm_region` [default `ACROSS_CHANNELS`]: whether to sum over adjacent channels (`ACROSS_CHANNELS`) or nearby spatial locaitons (`WITHIN_CHANNEL`)
+
+The local response normalization layer performs a kind of "lateral inhibition" by normalizing over local input regions. In `ACROSS_CHANNELS` mode, the local regions extend across nearby channels, but have no spatial extent (i.e., they have shape `local_size x 1 x 1`). In `WITHIN_CHANNEL` mode, the local regions extend spatially, but are in separate channels (i.e., they have shape `1 x local_size x local_size`). Each input value is divided by $$(1 + (\alpha/n) \sum_i x_i^2)^\beta$$, where [...]
+
+#### im2col
+
+`Im2col` is a helper for doing the image-to-column transformation that you most likely do not need to know about. This is used in Caffe's original convolution to do matrix multiplication by laying out all patches into a matrix.
+
+### Loss Layers
+
+Loss drives learning by comparing an output to a target and assigning cost to minimize. The loss itself is computed by the forward pass and the gradient w.r.t. to the loss is computed by the backward pass.
+
+#### Softmax
+
+* Layer type: `SoftmaxWithLoss`
+
+The softmax loss layer computes the multinomial logistic loss of the softmax of its inputs. It's conceptually identical to a softmax layer followed by a multinomial logistic loss layer, but provides a more numerically stable gradient.
+
+#### Sum-of-Squares / Euclidean
+
+* Layer type: `EuclideanLoss`
+
+The Euclidean loss layer computes the sum of squares of differences of its two inputs, $$\frac 1 {2N} \sum_{i=1}^N \| x^1_i - x^2_i \|_2^2$$.
+
+#### Hinge / Margin
+
+* Layer type: `HingeLoss`
+* CPU implementation: `./src/caffe/layers/hinge_loss_layer.cpp`
+* CUDA GPU implementation: none yet
+* Parameters (`HingeLossParameter hinge_loss_param`)
+    - Optional
+        - `norm` [default L1]: the norm used. Currently L1, L2
+* Inputs
+    - `n * c * h * w` Predictions
+    - `n * 1 * 1 * 1` Labels
+* Output
+    - `1 * 1 * 1 * 1` Computed Loss
+* Samples
+
+      # L1 Norm
+      layer {
+        name: "loss"
+        type: "HingeLoss"
+        bottom: "pred"
+        bottom: "label"
+      }
+
+      # L2 Norm
+      layer {
+        name: "loss"
+        type: "HingeLoss"
+        bottom: "pred"
+        bottom: "label"
+        top: "loss"
+        hinge_loss_param {
+          norm: L2
+        }
+      }
+
+The hinge loss layer computes a one-vs-all hinge or squared hinge loss.
+
+#### Sigmoid Cross-Entropy
+
+`SigmoidCrossEntropyLoss`
+
+#### Infogain
+
+`InfogainLoss`
+
+#### Accuracy and Top-k
+
+`Accuracy` scores the output as the accuracy of output with respect to target -- it is not actually a loss and has no backward step.
+
+### Activation / Neuron Layers
+
+In general, activation / Neuron layers are element-wise operators, taking one bottom blob and producing one top blob of the same size. In the layers below, we will ignore the input and out sizes as they are identical:
+
+* Input
+    - n * c * h * w
+* Output
+    - n * c * h * w
+
+#### ReLU / Rectified-Linear and Leaky-ReLU
+
+* Layer type: `ReLU`
+* CPU implementation: `./src/caffe/layers/relu_layer.cpp`
+* CUDA GPU implementation: `./src/caffe/layers/relu_layer.cu`
+* Parameters (`ReLUParameter relu_param`)
+    - Optional
+        - `negative_slope` [default 0]: specifies whether to leak the negative part by multiplying it with the slope value rather than setting it to 0.
+* Sample (as seen in `./examples/imagenet/imagenet_train_val.prototxt`)
+
+      layer {
+        name: "relu1"
+        type: "ReLU"
+        bottom: "conv1"
+        top: "conv1"
+      }
+
+Given an input value x, The `ReLU` layer computes the output as x if x > 0 and negative_slope * x if x <= 0. When the negative slope parameter is not set, it is equivalent to the standard ReLU function of taking max(x, 0). It also supports in-place computation, meaning that the bottom and the top blob could be the same to preserve memory consumption.
+
+#### Sigmoid
+
+* Layer type: `Sigmoid`
+* CPU implementation: `./src/caffe/layers/sigmoid_layer.cpp`
+* CUDA GPU implementation: `./src/caffe/layers/sigmoid_layer.cu`
+* Sample (as seen in `./examples/imagenet/mnist_autoencoder.prototxt`)
+
+      layer {
+        name: "encode1neuron"
+        bottom: "encode1"
+        top: "encode1neuron"
+        type: "Sigmoid"
+      }
+
+The `Sigmoid` layer computes the output as sigmoid(x) for each input element x.
+
+#### TanH / Hyperbolic Tangent
+
+* Layer type: `TanH`
+* CPU implementation: `./src/caffe/layers/tanh_layer.cpp`
+* CUDA GPU implementation: `./src/caffe/layers/tanh_layer.cu`
+* Sample
+
+      layer {
+        name: "layer"
+        bottom: "in"
+        top: "out"
+        type: "TanH"
+      }
+
+The `TanH` layer computes the output as tanh(x) for each input element x.
+
+#### Absolute Value
+
+* Layer type: `AbsVal`
+* CPU implementation: `./src/caffe/layers/absval_layer.cpp`
+* CUDA GPU implementation: `./src/caffe/layers/absval_layer.cu`
+* Sample
+
+      layer {
+        name: "layer"
+        bottom: "in"
+        top: "out"
+        type: "AbsVal"
+      }
+
+The `AbsVal` layer computes the output as abs(x) for each input element x.
+
+#### Power
+
+* Layer type: `Power`
+* CPU implementation: `./src/caffe/layers/power_layer.cpp`
+* CUDA GPU implementation: `./src/caffe/layers/power_layer.cu`
+* Parameters (`PowerParameter power_param`)
+    - Optional
+        - `power` [default 1]
+        - `scale` [default 1]
+        - `shift` [default 0]
+* Sample
+
+      layer {
+        name: "layer"
+        bottom: "in"
+        top: "out"
+        type: "Power"
+        power_param {
+          power: 1
+          scale: 1
+          shift: 0
+        }
+      }
+
+The `Power` layer computes the output as (shift + scale * x) ^ power for each input element x.
+
+#### BNLL
+
+* Layer type: `BNLL`
+* CPU implementation: `./src/caffe/layers/bnll_layer.cpp`
+* CUDA GPU implementation: `./src/caffe/layers/bnll_layer.cu`
+* Sample
+
+      layer {
+        name: "layer"
+        bottom: "in"
+        top: "out"
+        type: BNLL
+      }
+
+The `BNLL` (binomial normal log likelihood) layer computes the output as log(1 + exp(x)) for each input element x.
+
+
+### Data Layers
+
+Data enters Caffe through data layers: they lie at the bottom of nets. Data can come from efficient databases (LevelDB or LMDB), directly from memory, or, when efficiency is not critical, from files on disk in HDF5 or common image formats.
+
+Common input preprocessing (mean subtraction, scaling, random cropping, and mirroring) is available by specifying `TransformationParameter`s.
+
+#### Database
+
+* Layer type: `Data`
+* Parameters
+    - Required
+        - `source`: the name of the directory containing the database
+        - `batch_size`: the number of inputs to process at one time
+    - Optional
+        - `rand_skip`: skip up to this number of inputs at the beginning; useful for asynchronous sgd
+        - `backend` [default `LEVELDB`]: choose whether to use a `LEVELDB` or `LMDB`
+
+
+
+#### In-Memory
+
+* Layer type: `MemoryData`
+* Parameters
+    - Required
+        - `batch_size`, `channels`, `height`, `width`: specify the size of input chunks to read from memory
+
+The memory data layer reads data directly from memory, without copying it. In order to use it, one must call `MemoryDataLayer::Reset` (from C++) or `Net.set_input_arrays` (from Python) in order to specify a source of contiguous data (as 4D row major array), which is read one batch-sized chunk at a time.
+
+#### HDF5 Input
+
+* Layer type: `HDF5Data`
+* Parameters
+    - Required
+        - `source`: the name of the file to read from
+        - `batch_size`
+
+#### HDF5 Output
+
+* Layer type: `HDF5Output`
+* Parameters
+    - Required
+        - `file_name`: name of file to write to
+
+The HDF5 output layer performs the opposite function of the other layers in this section: it writes its input blobs to disk.
+
+#### Images
+
+* Layer type: `ImageData`
+* Parameters
+    - Required
+        - `source`: name of a text file, with each line giving an image filename and label
+        - `batch_size`: number of images to batch together
+    - Optional
+        - `rand_skip`
+        - `shuffle` [default false]
+        - `new_height`, `new_width`: if provided, resize all images to this size
+
+#### Windows
+
+`WindowData`
+
+#### Dummy
+
+`DummyData` is for development and debugging. See `DummyDataParameter`.
+
+### Common Layers
+
+#### Inner Product
+
+* Layer type: `InnerProduct`
+* CPU implementation: `./src/caffe/layers/inner_product_layer.cpp`
+* CUDA GPU implementation: `./src/caffe/layers/inner_product_layer.cu`
+* Parameters (`InnerProductParameter inner_product_param`)
+    - Required
+        - `num_output` (`c_o`): the number of filters
+    - Strongly recommended
+        - `weight_filler` [default `type: 'constant' value: 0`]
+    - Optional
+        - `bias_filler` [default `type: 'constant' value: 0`]
+        - `bias_term` [default `true`]: specifies whether to learn and apply a set of additive biases to the filter outputs
+* Input
+    - `n * c_i * h_i * w_i`
+* Output
+    - `n * c_o * 1 * 1`
+* Sample
+
+      layer {
+        name: "fc8"
+        type: "InnerProduct"
+        # learning rate and decay multipliers for the weights
+        param { lr_mult: 1 decay_mult: 1 }
+        # learning rate and decay multipliers for the biases
+        param { lr_mult: 2 decay_mult: 0 }
+        inner_product_param {
+          num_output: 1000
+          weight_filler {
+            type: "gaussian"
+            std: 0.01
+          }
+          bias_filler {
+            type: "constant"
+            value: 0
+          }
+        }
+        bottom: "fc7"
+        top: "fc8"
+      }
+
+The `InnerProduct` layer (also usually referred to as the fully connected layer) treats the input as a simple vector and produces an output in the form of a single vector (with the blob's height and width set to 1).
+
+#### Splitting
+
+The `Split` layer is a utility layer that splits an input blob to multiple output blobs. This is used when a blob is fed into multiple output layers.
+
+#### Flattening
+
+The `Flatten` layer is a utility layer that flattens an input of shape `n * c * h * w` to a simple vector output of shape `n * (c*h*w)`
+
+#### Reshape
+
+* Layer type: `Reshape`
+* Implementation: `./src/caffe/layers/reshape_layer.cpp`
+* Parameters (`ReshapeParameter reshape_param`)
+    - Optional: (also see detailed description below)
+        - `shape`
+
+* Input
+    - a single blob with arbitrary dimensions
+* Output
+    - the same blob, with modified dimensions, as specified by `reshape_param`
+
+* Sample
+
+        layer {
+          name: "reshape"
+          type: "Reshape"
+          bottom: "input"
+          top: "output"
+          reshape_param {
+            shape {
+              dim: 0  # copy the dimension from below
+              dim: 2
+              dim: 3
+              dim: -1 # infer it from the other dimensions
+            }
+          }
+        }
+
+The `Reshape` layer can be used to change the dimensions of its input, without changing its data. Just like the `Flatten` layer, only the dimensions are changed; no data is copied in the process.
+
+Output dimensions are specified by the `ReshapeParam` proto. Positive numbers are used directly, setting the corresponding dimension of the output blob. In addition, two special values are accepted for any of the target dimension values:
+
+* **0** means "copy the respective dimension of the bottom layer". That is, if the bottom has 2 as its 1st dimension, the top will have 2 as its 1st dimension as well, given `dim: 0` as the 1st target dimension.
+* **-1** stands for "infer this from the other dimensions". This behavior is similar to that of -1 in *numpy*'s or `[]` for *MATLAB*'s reshape: this dimension is calculated to keep the overall element count the same as in the bottom layer. At most one -1 can be used in a reshape operation.
+
+As another example, specifying `reshape_param { shape { dim: 0 dim: -1 } }` makes the layer behave in exactly the same way as the `Flatten` layer.
+
+#### Concatenation
+
+* Layer type: `Concat`
+* CPU implementation: `./src/caffe/layers/concat_layer.cpp`
+* CUDA GPU implementation: `./src/caffe/layers/concat_layer.cu`
+* Parameters (`ConcatParameter concat_param`)
+    - Optional
+        - `axis` [default 1]: 0 for concatenation along num and 1 for channels.
+* Input
+    - `n_i * c_i * h * w` for each input blob i from 1 to K.
+* Output
+    - if `axis = 0`: `(n_1 + n_2 + ... + n_K) * c_1 * h * w`, and all input `c_i` should be the same.
+    - if `axis = 1`: `n_1 * (c_1 + c_2 + ... + c_K) * h * w`, and all input `n_i` should be the same.
+* Sample
+
+      layer {
+        name: "concat"
+        bottom: "in1"
+        bottom: "in2"
+        top: "out"
+        type: "Concat"
+        concat_param {
+          axis: 1
+        }
+      }
+
+The `Concat` layer is a utility layer that concatenates its multiple input blobs to one single output blob.
+
+#### Slicing
+
+The `Slice` layer is a utility layer that slices an input layer to multiple output layers along a given dimension (currently num or channel only) with given slice indices.
+
+* Sample
+
+      layer {
+        name: "slicer_label"
+        type: "Slice"
+        bottom: "label"
+        ## Example of label with a shape N x 3 x 1 x 1
+        top: "label1"
+        top: "label2"
+        top: "label3"
+        slice_param {
+          axis: 1
+          slice_point: 1
+          slice_point: 2
+        }
+      }
+
+`axis` indicates the target axis; `slice_point` indicates indexes in the selected dimension (the number of indices must be equal to the number of top blobs minus one).
+
+
+#### Elementwise Operations
+
+`Eltwise`
+
+#### Argmax
+
+`ArgMax`
+
+#### Softmax
+
+`Softmax`
+
+#### Mean-Variance Normalization
+
+`MVN`
diff --git a/docs/tutorial/loss.md b/docs/tutorial/loss.md
new file mode 100644
index 0000000..d2d0e77
--- /dev/null
+++ b/docs/tutorial/loss.md
@@ -0,0 +1,51 @@
+---
+title: Loss
+---
+# Loss
+
+In Caffe, as in most of machine learning, learning is driven by a **loss** function (also known as an **error**, **cost**, or **objective** function).
+A loss function specifies the goal of learning by mapping parameter settings (i.e., the current network weights) to a scalar value specifying the  "badness" of these parameter settings.
+Hence, the goal of learning is to find a setting of the weights that *minimizes* the loss function.
+
+The loss in Caffe is computed by the Forward pass of the network.
+Each layer takes a set of input (`bottom`) blobs and produces a set of output (`top`) blobs.
+Some of these layers' outputs may be used in the loss function.
+A typical choice of loss function for one-versus-all classification tasks is the `SoftmaxWithLoss` function, used in a network definition as follows, for example:
+
+    layer {
+      name: "loss"
+      type: "SoftmaxWithLoss"
+      bottom: "pred"
+      bottom: "label"
+      top: "loss"
+    }
+
+In a `SoftmaxWithLoss` function, the `top` blob is a scalar (empty shape) which averages the loss (computed from predicted labels `pred` and actuals labels `label`) over the entire mini-batch.
+
+### Loss weights
+
+For nets with multiple layers producing a loss (e.g., a network that both classifies the input using a `SoftmaxWithLoss` layer and reconstructs it using a `EuclideanLoss` layer), *loss weights* can be used to specify their relative importance.
+
+By convention, Caffe layer types with the suffix `Loss` contribute to the loss function, but other layers are assumed to be purely used for intermediate computations.
+However, any layer can be used as a loss by adding a field `loss_weight: <float>` to a layer definition for each `top` blob produced by the layer.
+Layers with the suffix `Loss` have an implicit `loss_weight: 1` for the first `top` blob (and `loss_weight: 0` for any additional `top`s); other layers have an implicit `loss_weight: 0` for all `top`s.
+So, the above `SoftmaxWithLoss` layer could be equivalently written as:
+
+    layer {
+      name: "loss"
+      type: "SoftmaxWithLoss"
+      bottom: "pred"
+      bottom: "label"
+      top: "loss"
+      loss_weight: 1
+    }
+
+However, *any* layer able to backpropagate may be given a non-zero `loss_weight`, allowing one to, for example, regularize the activations produced by some intermediate layer(s) of the network if desired.
+For non-singleton outputs with an associated non-zero loss, the loss is computed simply by summing over all entries of the blob.
+
+The final loss in Caffe, then, is computed by summing the total weighted loss over the network, as in the following pseudo-code:
+
+    loss := 0
+    for layer in layers:
+      for top, loss_weight in layer.tops, layer.loss_weights:
+        loss += loss_weight * sum(top)
diff --git a/docs/tutorial/net_layer_blob.md b/docs/tutorial/net_layer_blob.md
new file mode 100644
index 0000000..e8b7bd3
--- /dev/null
+++ b/docs/tutorial/net_layer_blob.md
@@ -0,0 +1,168 @@
+---
+title: Blobs, Layers, and Nets
+---
+# Blobs, Layers, and Nets: anatomy of a Caffe model
+
+Deep networks are compositional models that are naturally represented as a collection of inter-connected layers that work on chunks of data. Caffe defines a net layer-by-layer in its own model schema. The network defines the entire model bottom-to-top from input data to loss. As data and derivatives flow through the network in the [forward and backward passes](forward_backward.html) Caffe stores, communicates, and manipulates the information as *blobs*: the blob is the standard array and [...]
+
+[Solving](solver.html) is configured separately to decouple modeling and optimization.
+
+We will go over the details of these components in more detail.
+
+## Blob storage and communication
+
+A Blob is a wrapper over the actual data being processed and passed along by Caffe, and also under the hood provides synchronization capability between the CPU and the GPU. Mathematically, a blob is an N-dimensional array stored in a C-contiguous fashion.
+
+Caffe stores and communicates data using blobs. Blobs provide a unified memory interface holding data; e.g., batches of images, model parameters, and derivatives for optimization.
+
+Blobs conceal the computational and mental overhead of mixed CPU/GPU operation by synchronizing from the CPU host to the GPU device as needed. Memory on the host and device is allocated on demand (lazily) for efficient memory usage.
+
+The conventional blob dimensions for batches of image data are number N x channel K x height H x width W. Blob memory is row-major in layout, so the last / rightmost dimension changes fastest. For example, in a 4D blob, the value at index (n, k, h, w) is physically located at index ((n * K + k) * H + h) * W + w.
+
+- Number / N is the batch size of the data. Batch processing achieves better throughput for communication and device processing. For an ImageNet training batch of 256 images B = 256.
+- Channel / K is the feature dimension e.g. for RGB images K = 3.
+
+Note that although many blobs in Caffe examples are 4D with axes for image applications, it is totally valid to use blobs for non-image applications. For example, if you simply need fully-connected networks like the conventional multi-layer perceptron, use 2D blobs (shape (N, D)) and call the InnerProductLayer (which we will cover soon).
+
+Parameter blob dimensions vary according to the type and configuration of the layer. For a convolution layer with 96 filters of 11 x 11 spatial dimension and 3 inputs the blob is 96 x 3 x 11 x 11. For an inner product / fully-connected layer with 1000 output channels and 1024 input channels the parameter blob is 1000 x 1024.
+
+For custom data it may be necessary to hack your own input preparation tool or data layer. However once your data is in your job is done. The modularity of layers accomplishes the rest of the work for you.
+
+### Implementation Details
+
+As we are often interested in the values as well as the gradients of the blob, a Blob stores two chunks of memories, *data* and *diff*. The former is the normal data that we pass along, and the latter is the gradient computed by the network.
+
+Further, as the actual values could be stored either on the CPU and on the GPU, there are two different ways to access them: the const way, which does not change the values, and the mutable way, which changes the values:
+
+    const Dtype* cpu_data() const;
+    Dtype* mutable_cpu_data();
+
+(similarly for gpu and diff).
+
+The reason for such design is that, a Blob uses a SyncedMem class to synchronize values between the CPU and GPU in order to hide the synchronization details and to minimize data transfer. A rule of thumb is, always use the const call if you do not want to change the values, and never store the pointers in your own object. Every time you work on a blob, call the functions to get the pointers, as the SyncedMem will need this to figure out when to copy data.
+
+In practice when GPUs are present, one loads data from the disk to a blob in CPU code, calls a device kernel to do GPU computation, and ferries the blob off to the next layer, ignoring low-level details while maintaining a high level of performance. As long as all layers have GPU implementations, all the intermediate data and gradients will remain in the GPU.
+
+If you want to check out when a Blob will copy data, here is an illustrative example:
+
+    // Assuming that data are on the CPU initially, and we have a blob.
+    const Dtype* foo;
+    Dtype* bar;
+    foo = blob.gpu_data(); // data copied cpu->gpu.
+    foo = blob.cpu_data(); // no data copied since both have up-to-date contents.
+    bar = blob.mutable_gpu_data(); // no data copied.
+    // ... some operations ...
+    bar = blob.mutable_gpu_data(); // no data copied when we are still on GPU.
+    foo = blob.cpu_data(); // data copied gpu->cpu, since the gpu side has modified the data
+    foo = blob.gpu_data(); // no data copied since both have up-to-date contents
+    bar = blob.mutable_cpu_data(); // still no data copied.
+    bar = blob.mutable_gpu_data(); // data copied cpu->gpu.
+    bar = blob.mutable_cpu_data(); // data copied gpu->cpu.
+
+## Layer computation and connections
+
+The layer is the essence of a model and the fundamental unit of computation. Layers convolve filters, pool, take inner products, apply nonlinearities like rectified-linear and sigmoid and other elementwise transformations, normalize, load data, and compute losses like softmax and hinge. [See the layer catalogue](layers.html) for all operations. Most of the types needed for state-of-the-art deep learning tasks are there.
+
+<img src="fig/layer.jpg" alt="A layer with bottom and top blob." width="256">
+
+A layer takes input through *bottom* connections and makes output through *top* connections.
+
+Each layer type defines three critical computations: *setup*, *forward*, and *backward*.
+
+- Setup: initialize the layer and its connections once at model initialization.
+- Forward: given input from bottom compute the output and send to the top.
+- Backward: given the gradient w.r.t. the top output compute the gradient w.r.t. to the input and send to the bottom. A layer with parameters computes the gradient w.r.t. to its parameters and stores it internally.
+
+More specifically, there will be two Forward and Backward functions implemented, one for CPU and one for GPU. If you do not implement a GPU version, the layer will fall back to the CPU functions as a backup option. This may come handy if you would like to do quick experiments, although it may come with additional data transfer cost (its inputs will be copied from GPU to CPU, and its outputs will be copied back from CPU to GPU).
+
+Layers have two key responsibilities for the operation of the network as a whole: a *forward pass* that takes the inputs and produces the outputs, and a *backward pass* that takes the gradient with respect to the output, and computes the gradients with respect to the parameters and to the inputs, which are in turn back-propagated to earlier layers. These passes are simply the composition of each layer's forward and backward.
+
+Developing custom layers requires minimal effort by the compositionality of the network and modularity of the code. Define the setup, forward, and backward for the layer and it is ready for inclusion in a net.
+
+## Net definition and operation
+
+The net jointly defines a function and its gradient by composition and auto-differentiation. The composition of every layer's output computes the function to do a given task, and the composition of every layer's backward computes the gradient from the loss to learn the task. Caffe models are end-to-end machine learning engines.
+
+The net is a set of layers connected in a computation graph -- a directed acyclic graph (DAG) to be exact. Caffe does all the bookkeeping for any DAG of layers to ensure correctness of the forward and backward passes. A typical net begins with a data layer that loads from disk and ends with a loss layer that computes the objective for a task such as classification or reconstruction.
+
+The net is defined as a set of layers and their connections in a plaintext modeling language.
+A simple logistic regression classifier
+
+<img src="fig/logreg.jpg" alt="Softmax Regression" width="256">
+
+is defined by
+
+    name: "LogReg"
+    layer {
+      name: "mnist"
+      type: "Data"
+      top: "data"
+      top: "label"
+      data_param {
+        source: "input_leveldb"
+        batch_size: 64
+      }
+    }
+    layer {
+      name: "ip"
+      type: "InnerProduct"
+      bottom: "data"
+      top: "ip"
+      inner_product_param {
+        num_output: 2
+      }
+    }
+    layer {
+      name: "loss"
+      type: "SoftmaxWithLoss"
+      bottom: "ip"
+      bottom: "label"
+      top: "loss"
+    }
+
+Model initialization is handled by `Net::Init()`. The initialization mainly does two things: scaffolding the overall DAG by creating the blobs and layers (for C++ geeks: the network will retain ownership of the blobs and layers during its lifetime), and calls the layers' `SetUp()` function. It also does a set of other bookkeeping things, such as validating the correctness of the overall network architecture. Also, during initialization the Net explains its initialization by logging to IN [...]
+
+    I0902 22:52:17.931977 2079114000 net.cpp:39] Initializing net from parameters:
+    name: "LogReg"
+    [...model prototxt printout...]
+    # construct the network layer-by-layer
+    I0902 22:52:17.932152 2079114000 net.cpp:67] Creating Layer mnist
+    I0902 22:52:17.932165 2079114000 net.cpp:356] mnist -> data
+    I0902 22:52:17.932188 2079114000 net.cpp:356] mnist -> label
+    I0902 22:52:17.932200 2079114000 net.cpp:96] Setting up mnist
+    I0902 22:52:17.935807 2079114000 data_layer.cpp:135] Opening leveldb input_leveldb
+    I0902 22:52:17.937155 2079114000 data_layer.cpp:195] output data size: 64,1,28,28
+    I0902 22:52:17.938570 2079114000 net.cpp:103] Top shape: 64 1 28 28 (50176)
+    I0902 22:52:17.938593 2079114000 net.cpp:103] Top shape: 64 (64)
+    I0902 22:52:17.938611 2079114000 net.cpp:67] Creating Layer ip
+    I0902 22:52:17.938617 2079114000 net.cpp:394] ip <- data
+    I0902 22:52:17.939177 2079114000 net.cpp:356] ip -> ip
+    I0902 22:52:17.939196 2079114000 net.cpp:96] Setting up ip
+    I0902 22:52:17.940289 2079114000 net.cpp:103] Top shape: 64 2 (128)
+    I0902 22:52:17.941270 2079114000 net.cpp:67] Creating Layer loss
+    I0902 22:52:17.941305 2079114000 net.cpp:394] loss <- ip
+    I0902 22:52:17.941314 2079114000 net.cpp:394] loss <- label
+    I0902 22:52:17.941323 2079114000 net.cpp:356] loss -> loss
+    # set up the loss and configure the backward pass
+    I0902 22:52:17.941328 2079114000 net.cpp:96] Setting up loss
+    I0902 22:52:17.941328 2079114000 net.cpp:103] Top shape: (1)
+    I0902 22:52:17.941329 2079114000 net.cpp:109]     with loss weight 1
+    I0902 22:52:17.941779 2079114000 net.cpp:170] loss needs backward computation.
+    I0902 22:52:17.941787 2079114000 net.cpp:170] ip needs backward computation.
+    I0902 22:52:17.941794 2079114000 net.cpp:172] mnist does not need backward computation.
+    # determine outputs
+    I0902 22:52:17.941800 2079114000 net.cpp:208] This network produces output loss
+    # finish initialization and report memory usage
+    I0902 22:52:17.941810 2079114000 net.cpp:467] Collecting Learning Rate and Weight Decay.
+    I0902 22:52:17.941818 2079114000 net.cpp:219] Network initialization done.
+    I0902 22:52:17.941824 2079114000 net.cpp:220] Memory required for data: 201476
+
+Note that the construction of the network is device agnostic - recall our earlier explanation that blobs and layers hide implementation details from the model definition. After construction, the network is run on either CPU or GPU by setting a single switch defined in `Caffe::mode()` and set by `Caffe::set_mode()`. Layers come with corresponding CPU and GPU routines that produce identical results (up to numerical errors, and with tests to guard it). The CPU / GPU switch is seamless and i [...]
+
+### Model format
+
+The models are defined in plaintext protocol buffer schema (prototxt) while the learned models are serialized as binary protocol buffer (binaryproto) .caffemodel files.
+
+The model format is defined by the protobuf schema in [caffe.proto](https://github.com/BVLC/caffe/blob/master/src/caffe/proto/caffe.proto). The source file is mostly self-explanatory so one is encouraged to check it out.
+
+Caffe speaks [Google Protocol Buffer](https://code.google.com/p/protobuf/) for the following strengths: minimal-size binary strings when serialized, efficient serialization, a human-readable text format compatible with the binary version, and efficient interface implementations in multiple languages, most notably C++ and Python. This all contributes to the flexibility and extensibility of modeling in Caffe.
diff --git a/docs/tutorial/solver.md b/docs/tutorial/solver.md
new file mode 100644
index 0000000..17f793e
--- /dev/null
+++ b/docs/tutorial/solver.md
@@ -0,0 +1,271 @@
+---
+title: Solver / Model Optimization
+---
+# Solver
+
+The solver orchestrates model optimization by coordinating the network's forward inference and backward gradients to form parameter updates that attempt to improve the loss.
+The responsibilities of learning are divided between the Solver for overseeing the optimization and generating parameter updates and the Net for yielding loss and gradients.
+
+The Caffe solvers are Stochastic Gradient Descent (SGD), Adaptive Gradient (ADAGRAD), and Nesterov's Accelerated Gradient (NESTEROV).
+
+The solver
+
+1. scaffolds the optimization bookkeeping and creates the training network for learning and test network(s) for evaluation.
+2. iteratively optimizes by calling forward / backward and updating parameters
+3. (periodically) evaluates the test networks
+4. snapshots the model and solver state throughout the optimization
+
+where each iteration
+
+1. calls network forward to compute the output and loss
+2. calls network backward to compute the gradients
+3. incorporates the gradients into parameter updates according to the solver method
+4. updates the solver state according to learning rate, history, and method
+
+to take the weights all the way from initialization to learned model.
+
+Like Caffe models, Caffe solvers run in CPU / GPU modes.
+
+## Methods
+
+The solver methods address the general optimization problem of loss minimization.
+For dataset $$D$$, the optimization objective is the average loss over all $$|D|$$ data instances throughout the dataset
+
+$$L(W) = \frac{1}{|D|} \sum_i^{|D|} f_W\left(X^{(i)}\right) + \lambda r(W)$$
+
+where $$f_W\left(X^{(i)}\right)$$ is the loss on data instance $$X^{(i)}$$ and $$r(W)$$ is a regularization term with weight $$\lambda$$.
+$$|D|$$ can be very large, so in practice, in each solver iteration we use a stochastic approximation of this objective, drawing a mini-batch of $$N << |D|$$ instances:
+
+$$L(W) \approx \frac{1}{N} \sum_i^N f_W\left(X^{(i)}\right) + \lambda r(W)$$
+
+The model computes $$f_W$$ in the forward pass and the gradient $$\nabla f_W$$ in the backward pass.
+
+The parameter update $$\Delta W$$ is formed by the solver from the error gradient $$\nabla f_W$$, the regularization gradient $$\nabla r(W)$$, and other particulars to each method.
+
+### SGD
+
+**Stochastic gradient descent** (`solver_type: SGD`) updates the weights $$ W $$ by a linear combination of the negative gradient $$ \nabla L(W) $$ and the previous weight update $$ V_t $$.
+The **learning rate** $$ \alpha $$ is the weight of the negative gradient.
+The **momentum** $$ \mu $$ is the weight of the previous update.
+
+Formally, we have the following formulas to compute the update value $$ V_{t+1} $$ and the updated weights $$ W_{t+1} $$ at iteration $$ t+1 $$, given the previous weight update $$ V_t $$ and current weights $$ W_t $$:
+
+$$
+V_{t+1} = \mu V_t - \alpha \nabla L(W_t)
+$$
+
+$$
+W_{t+1} = W_t + V_{t+1}
+$$
+
+The learning "hyperparameters" ($$\alpha$$ and $$\mu$$) might require a bit of tuning for best results.
+If you're not sure where to start, take a look at the "Rules of thumb" below, and for further information you might refer to Leon Bottou's [Stochastic Gradient Descent Tricks](http://research.microsoft.com/pubs/192769/tricks-2012.pdf) [1].
+
+[1] L. Bottou.
+    [Stochastic Gradient Descent Tricks](http://research.microsoft.com/pubs/192769/tricks-2012.pdf).
+    *Neural Networks: Tricks of the Trade*: Springer, 2012.
+
+#### Rules of thumb for setting the learning rate $$ \alpha $$ and momentum $$ \mu $$
+
+A good strategy for deep learning with SGD is to initialize the learning rate $$ \alpha $$ to a value around $$ \alpha \approx 0.01 = 10^{-2} $$, and dropping it by a constant factor (e.g., 10) throughout training when the loss begins to reach an apparent "plateau", repeating this several times.
+Generally, you probably want to use a momentum $$ \mu = 0.9 $$ or similar value.
+By smoothing the weight updates across iterations, momentum tends to make deep learning with SGD both stabler and faster.
+
+This was the strategy used by Krizhevsky et al. [1] in their famously winning CNN entry to the ILSVRC-2012 competition, and Caffe makes this strategy easy to implement in a `SolverParameter`, as in our reproduction of [1] at `./examples/imagenet/alexnet_solver.prototxt`.
+
+To use a learning rate policy like this, you can put the following lines somewhere in your solver prototxt file:
+
+    base_lr: 0.01     # begin training at a learning rate of 0.01 = 1e-2
+
+    lr_policy: "step" # learning rate policy: drop the learning rate in "steps"
+                      # by a factor of gamma every stepsize iterations
+
+    gamma: 0.1        # drop the learning rate by a factor of 10
+                      # (i.e., multiply it by a factor of gamma = 0.1)
+
+    stepsize: 100000  # drop the learning rate every 100K iterations
+
+    max_iter: 350000  # train for 350K iterations total
+
+    momentum: 0.9
+
+Under the above settings, we'll always use `momentum` $$ \mu = 0.9 $$.
+We'll begin training at a `base_lr` of $$ \alpha = 0.01 = 10^{-2} $$ for the first 100,000 iterations, then multiply the learning rate by `gamma` ($$ \gamma $$) and train at $$ \alpha' = \alpha \gamma = (0.01) (0.1) = 0.001 = 10^{-3} $$ for iterations 100K-200K, then at $$ \alpha'' = 10^{-4} $$ for iterations 200K-300K, and finally train until iteration 350K (since we have `max_iter: 350000`) at $$ \alpha''' = 10^{-5} $$.
+
+Note that the momentum setting $$ \mu $$ effectively multiplies the size of your updates by a factor of $$ \frac{1}{1 - \mu} $$ after many iterations of training, so if you increase $$ \mu $$, it may be a good idea to **decrease** $$ \alpha $$ accordingly (and vice versa).
+
+For example, with $$ \mu = 0.9 $$, we have an effective update size multiplier of $$ \frac{1}{1 - 0.9} = 10 $$.
+If we increased the momentum to $$ \mu = 0.99 $$, we've increased our update size multiplier to 100, so we should drop $$ \alpha $$ (`base_lr`) by a factor of 10.
+
+Note also that the above settings are merely guidelines, and they're definitely not guaranteed to be optimal (or even work at all!) in every situation.
+If learning diverges (e.g., you start to see very large or `NaN` or `inf` loss values or outputs), try dropping the `base_lr` (e.g., `base_lr: 0.001`) and re-training, repeating this until you find a `base_lr` value that works.
+
+[1] A. Krizhevsky, I. Sutskever, and G. Hinton.
+    [ImageNet Classification with Deep Convolutional Neural Networks](http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks.pdf).
+    *Advances in Neural Information Processing Systems*, 2012.
+
+### AdaGrad
+
+The **adaptive gradient** (`solver_type: ADAGRAD`) method (Duchi et al. [1]) is a gradient-based optimization method (like SGD) that attempts to "find needles in haystacks in the form of very predictive but rarely seen features," in Duchi et al.'s words.
+Given the update information from all previous iterations $$ \left( \nabla L(W) \right)_{t'} $$ for $$ t' \in \{1, 2, ..., t\} $$,
+the update formulas proposed by [1] are as follows, specified for each component $$i$$ of the weights $$W$$:
+
+$$
+(W_{t+1})_i =
+(W_t)_i - \alpha
+\frac{\left( \nabla L(W_t) \right)_{i}}{
+    \sqrt{\sum_{t'=1}^{t} \left( \nabla L(W_{t'}) \right)_i^2}
+}
+$$
+
+Note that in practice, for weights $$ W \in \mathcal{R}^d $$, AdaGrad implementations (including the one in Caffe) use only $$ \mathcal{O}(d) $$ extra storage for the historical gradient information (rather than the $$ \mathcal{O}(dt) $$ storage that would be necessary to store each historical gradient individually).
+
+[1] J. Duchi, E. Hazan, and Y. Singer.
+    [Adaptive Subgradient Methods for Online Learning and Stochastic Optimization](http://www.magicbroom.info/Papers/DuchiHaSi10.pdf).
+    *The Journal of Machine Learning Research*, 2011.
+
+### NAG
+
+**Nesterov's accelerated gradient** (`solver_type: NESTEROV`) was proposed by Nesterov [1] as an "optimal" method of convex optimization, achieving a convergence rate of $$ \mathcal{O}(1/t^2) $$ rather than the $$ \mathcal{O}(1/t) $$.
+Though the required assumptions to achieve the $$ \mathcal{O}(1/t^2) $$ convergence typically will not hold for deep networks trained with Caffe (e.g., due to non-smoothness and non-convexity), in practice NAG can be a very effective method for optimizing certain types of deep learning architectures, as demonstrated for deep MNIST autoencoders by Sutskever et al. [2].
+
+The weight update formulas look very similar to the SGD updates given above:
+
+$$
+V_{t+1} = \mu V_t - \alpha \nabla L(W_t + \mu V_t)
+$$
+
+$$
+W_{t+1} = W_t + V_{t+1}
+$$
+
+What distinguishes the method from SGD is the weight setting $$ W $$ on which we compute the error gradient $$ \nabla L(W) $$ -- in NAG we take the gradient on weights with added momentum $$ \nabla L(W_t + \mu V_t) $$; in SGD we simply take the gradient $$ \nabla L(W_t) $$ on the current weights themselves.
+
+[1] Y. Nesterov.
+    A Method of Solving a Convex Programming Problem with Convergence Rate $$\mathcal{O}(1/\sqrt{k})$$.
+    *Soviet Mathematics Doklady*, 1983.
+
+[2] I. Sutskever, J. Martens, G. Dahl, and G. Hinton.
+    [On the Importance of Initialization and Momentum in Deep Learning](http://www.cs.toronto.edu/~fritz/absps/momentum.pdf).
+    *Proceedings of the 30th International Conference on Machine Learning*, 2013.
+
+## Scaffolding
+
+The solver scaffolding prepares the optimization method and initializes the model to be learned in `Solver::Presolve()`.
+
+    > caffe train -solver examples/mnist/lenet_solver.prototxt
+    I0902 13:35:56.474978 16020 caffe.cpp:90] Starting Optimization
+    I0902 13:35:56.475190 16020 solver.cpp:32] Initializing solver from parameters:
+    test_iter: 100
+    test_interval: 500
+    base_lr: 0.01
+    display: 100
+    max_iter: 10000
+    lr_policy: "inv"
+    gamma: 0.0001
+    power: 0.75
+    momentum: 0.9
+    weight_decay: 0.0005
+    snapshot: 5000
+    snapshot_prefix: "examples/mnist/lenet"
+    solver_mode: GPU
+    net: "examples/mnist/lenet_train_test.prototxt"
+
+Net initialization
+
+    I0902 13:35:56.655681 16020 solver.cpp:72] Creating training net from net file: examples/mnist/lenet_train_test.prototxt
+    [...]
+    I0902 13:35:56.656740 16020 net.cpp:56] Memory required for data: 0
+    I0902 13:35:56.656791 16020 net.cpp:67] Creating Layer mnist
+    I0902 13:35:56.656811 16020 net.cpp:356] mnist -> data
+    I0902 13:35:56.656846 16020 net.cpp:356] mnist -> label
+    I0902 13:35:56.656874 16020 net.cpp:96] Setting up mnist
+    I0902 13:35:56.694052 16020 data_layer.cpp:135] Opening lmdb examples/mnist/mnist_train_lmdb
+    I0902 13:35:56.701062 16020 data_layer.cpp:195] output data size: 64,1,28,28
+    I0902 13:35:56.701146 16020 data_layer.cpp:236] Initializing prefetch
+    I0902 13:35:56.701196 16020 data_layer.cpp:238] Prefetch initialized.
+    I0902 13:35:56.701212 16020 net.cpp:103] Top shape: 64 1 28 28 (50176)
+    I0902 13:35:56.701230 16020 net.cpp:103] Top shape: 64 1 1 1 (64)
+    [...]
+    I0902 13:35:56.703737 16020 net.cpp:67] Creating Layer ip1
+    I0902 13:35:56.703753 16020 net.cpp:394] ip1 <- pool2
+    I0902 13:35:56.703778 16020 net.cpp:356] ip1 -> ip1
+    I0902 13:35:56.703797 16020 net.cpp:96] Setting up ip1
+    I0902 13:35:56.728127 16020 net.cpp:103] Top shape: 64 500 1 1 (32000)
+    I0902 13:35:56.728142 16020 net.cpp:113] Memory required for data: 5039360
+    I0902 13:35:56.728175 16020 net.cpp:67] Creating Layer relu1
+    I0902 13:35:56.728194 16020 net.cpp:394] relu1 <- ip1
+    I0902 13:35:56.728219 16020 net.cpp:345] relu1 -> ip1 (in-place)
+    I0902 13:35:56.728240 16020 net.cpp:96] Setting up relu1
+    I0902 13:35:56.728256 16020 net.cpp:103] Top shape: 64 500 1 1 (32000)
+    I0902 13:35:56.728270 16020 net.cpp:113] Memory required for data: 5167360
+    I0902 13:35:56.728287 16020 net.cpp:67] Creating Layer ip2
+    I0902 13:35:56.728304 16020 net.cpp:394] ip2 <- ip1
+    I0902 13:35:56.728333 16020 net.cpp:356] ip2 -> ip2
+    I0902 13:35:56.728356 16020 net.cpp:96] Setting up ip2
+    I0902 13:35:56.728690 16020 net.cpp:103] Top shape: 64 10 1 1 (640)
+    I0902 13:35:56.728705 16020 net.cpp:113] Memory required for data: 5169920
+    I0902 13:35:56.728734 16020 net.cpp:67] Creating Layer loss
+    I0902 13:35:56.728747 16020 net.cpp:394] loss <- ip2
+    I0902 13:35:56.728767 16020 net.cpp:394] loss <- label
+    I0902 13:35:56.728786 16020 net.cpp:356] loss -> loss
+    I0902 13:35:56.728811 16020 net.cpp:96] Setting up loss
+    I0902 13:35:56.728837 16020 net.cpp:103] Top shape: 1 1 1 1 (1)
+    I0902 13:35:56.728849 16020 net.cpp:109]     with loss weight 1
+    I0902 13:35:56.728878 16020 net.cpp:113] Memory required for data: 5169924
+
+Loss
+
+    I0902 13:35:56.728893 16020 net.cpp:170] loss needs backward computation.
+    I0902 13:35:56.728909 16020 net.cpp:170] ip2 needs backward computation.
+    I0902 13:35:56.728924 16020 net.cpp:170] relu1 needs backward computation.
+    I0902 13:35:56.728938 16020 net.cpp:170] ip1 needs backward computation.
+    I0902 13:35:56.728953 16020 net.cpp:170] pool2 needs backward computation.
+    I0902 13:35:56.728970 16020 net.cpp:170] conv2 needs backward computation.
+    I0902 13:35:56.728984 16020 net.cpp:170] pool1 needs backward computation.
+    I0902 13:35:56.728998 16020 net.cpp:170] conv1 needs backward computation.
+    I0902 13:35:56.729014 16020 net.cpp:172] mnist does not need backward computation.
+    I0902 13:35:56.729027 16020 net.cpp:208] This network produces output loss
+    I0902 13:35:56.729053 16020 net.cpp:467] Collecting Learning Rate and Weight Decay.
+    I0902 13:35:56.729071 16020 net.cpp:219] Network initialization done.
+    I0902 13:35:56.729085 16020 net.cpp:220] Memory required for data: 5169924
+    I0902 13:35:56.729277 16020 solver.cpp:156] Creating test net (#0) specified by net file: examples/mnist/lenet_train_test.prototxt
+
+Completion
+
+    I0902 13:35:56.806970 16020 solver.cpp:46] Solver scaffolding done.
+    I0902 13:35:56.806984 16020 solver.cpp:165] Solving LeNet
+
+
+## Updating Parameters
+
+The actual weight update is made by the solver then applied to the net parameters in `Solver::ComputeUpdateValue()`.
+The `ComputeUpdateValue` method incorporates any weight decay $$ r(W) $$ into the weight gradients (which currently just contain the error gradients) to get the final gradient with respect to each network weight.
+Then these gradients are scaled by the learning rate $$ \alpha $$ and the update to subtract is stored in each parameter Blob's `diff` field.
+Finally, the `Blob::Update` method is called on each parameter blob, which performs the final update (subtracting the Blob's `diff` from its `data`).
+
+## Snapshotting and Resuming
+
+The solver snapshots the weights and its own state during training in `Solver::Snapshot()` and `Solver::SnapshotSolverState()`.
+The weight snapshots export the learned model while the solver snapshots allow training to be resumed from a given point.
+Training is resumed by `Solver::Restore()` and `Solver::RestoreSolverState()`.
+
+Weights are saved without extension while solver states are saved with `.solverstate` extension.
+Both files will have an `_iter_N` suffix for the snapshot iteration number.
+
+Snapshotting is configured by:
+
+    # The snapshot interval in iterations.
+    snapshot: 5000
+    # File path prefix for snapshotting model weights and solver state.
+    # Note: this is relative to the invocation of the `caffe` utility, not the
+    # solver definition file.
+    snapshot_prefix: "/path/to/model"
+    # Snapshot the diff along with the weights. This can help debugging training
+    # but takes more storage.
+    snapshot_diff: false
+    # A final snapshot is saved at the end of training unless
+    # this flag is set to false. The default is true.
+    snapshot_after_train: true
+
+in the solver definition prototxt.
diff --git a/examples/00-classification.ipynb b/examples/00-classification.ipynb
new file mode 100644
index 0000000..46bbb19
--- /dev/null
+++ b/examples/00-classification.ipynb
@@ -0,0 +1,13187 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Instant Recognition with Caffe\n",
+    "\n",
+    "In this example we'll classify an image with the bundled CaffeNet model based on the network architecture of Krizhevsky et al. for ImageNet. We'll compare CPU and GPU operation then reach into the model to inspect features and the output.\n",
+    "\n",
+    "(These feature visualizations follow the DeCAF visualizations originally by Yangqing Jia.)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "First, import required modules, set plotting parameters, and run `./scripts/download_model_binary.py models/bvlc_reference_caffenet` to get the pretrained CaffeNet model if it hasn't already been fetched."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline\n",
+    "\n",
+    "# Make sure that caffe is on the python path:\n",
+    "caffe_root = '../'  # this file is expected to be in {caffe_root}/examples\n",
+    "import sys\n",
+    "sys.path.insert(0, caffe_root + 'python')\n",
+    "\n",
+    "import caffe\n",
+    "\n",
+    "plt.rcParams['figure.figsize'] = (10, 10)\n",
+    "plt.rcParams['image.interpolation'] = 'nearest'\n",
+    "plt.rcParams['image.cmap'] = 'gray'\n",
+    "\n",
+    "import os\n",
+    "if not os.path.isfile(caffe_root + 'models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel'):\n",
+    "    print(\"Downloading pre-trained CaffeNet model...\")\n",
+    "    !../scripts/download_model_binary.py ../models/bvlc_reference_caffenet"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Set Caffe to CPU mode, load the net in the test phase for inference, and configure input preprocessing."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "caffe.set_mode_cpu()\n",
+    "net = caffe.Net(caffe_root + 'models/bvlc_reference_caffenet/deploy.prototxt',\n",
+    "                caffe_root + 'models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel',\n",
+    "                caffe.TEST)\n",
+    "\n",
+    "# input preprocessing: 'data' is the name of the input blob == net.inputs[0]\n",
+    "transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})\n",
+    "transformer.set_transpose('data', (2,0,1))\n",
+    "transformer.set_mean('data', np.load(caffe_root + 'python/caffe/imagenet/ilsvrc_2012_mean.npy').mean(1).mean(1)) # mean pixel\n",
+    "transformer.set_raw_scale('data', 255)  # the reference model operates on images in [0,255] range instead of [0,1]\n",
+    "transformer.set_channel_swap('data', (2,1,0))  # the reference model has channels in BGR order instead of RGB"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's start with a simple classification. We'll set a batch of 50 to demonstrate batch processing, even though we'll only be classifying one image. (Note that the batch size can also be changed on-the-fly.)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "# set net to batch size of 50\n",
+    "net.blobs['data'].reshape(50,3,227,227)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Feed in the image (with some preprocessing) and classify with a forward pass."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Predicted class is #281.\n"
+     ]
+    }
+   ],
+   "source": [
+    "net.blobs['data'].data[...] = transformer.preprocess('data', caffe.io.load_image(caffe_root + 'examples/images/cat.jpg'))\n",
+    "out = net.forward()\n",
+    "print(\"Predicted class is #{}.\".format(out['prob'].argmax()))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "What did the input look like?"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "<matplotlib.image.AxesImage at 0x7f665b02ae90>"
+      ]
+     },
+     "execution_count": 7,
+     "metadata": {},
+     "output_type": "execute_result"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlIAAAJOCAYAAAB8y+mTAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvVmsbdt2HdRnvaq99ynuue/ed1/pV9i4iJPYDk5M4gQ7\n",
+       "sp0gSIICMiYSIQLxgfiAD0Dwg5QIIb744yeKkq/ABxLIwrISMDikwibGicv4xe/Z7913i3NPufde\n",
+       "xSz5GK2N3uady8/KVm5OYkb/OHudseaac4wxxxhzjtZbbz2bpsmSJUuWLFmyZMmS/eNb/qorkCxZ\n",
+       "smTJkiVL9s+rpRepZMmSJUuWLFmyO1p6kUqWLFmyZMmSJbujpRepZMmSJUuWLFmyO1p6kUqWLFmy\n",
+       "ZMmSJbujpRepZMmSJUuWLFmyO9pH8iKVZdkPZ1n2K1mW/VqWZf/pR3GNZMmSJUuWLFmyV23ZP2kd\n",
+       "qSzLCjP7VTP7QTN728x+xsx+dJqmX/4neqFkyZIlS5YsWbJXbB8FIvX7zOxL0zR9ZZqmzsz+qpn9\n",
+       "ax/BdZIlS5YsWbJkyV6pfRQvUm+Z2Vfl/19DWbJkyZIlS5Ys2e8oKz+Cc/62vsIsy1JemmTJkiVL\n",
+       "lizZPzc2TVN2rvyjeJF628w+Kf//pAVUambVxc6ay62ZmdWXO9vcvzIzs1E4W3keALNzb11Z4e3J\n",
+       "yiL8tvQjx2KanSMch98UZ06Yh++yTM6Lz7l5Wd/3oWwYYtnUn8Lxo1+/5PfT6Ocb8Vu8R1ZyS3q0\n",
+       "ezI/Xn/r1USdpJ/GcWSFF8eZnC9Hn5EXF39nZtMU6tt1XnY8hPr2Qx/LBrRLR9NkHcq8r4sidHJZ\n",
+       "1vby7YNdvrWO96JufNitVnU4DvekWa3id3VVhbJ17WV1+FyX1aKskHvdd6FO+9t9LHv+8mBmZi9v\n",
+       "bmLZ6Rjadjodw7UaP2/G8aTjJQv9M2pZMeAwvyf1KpynkhnG9o+j9yeqacdTOMcwnfx43K8x87FW\n",
+       "TKGtgw6NAfdz8ErlebhwkQ9SFv42lfcneyzPUNHe2zD04frsIzOztg3na09+XNdyPE12++7Btm+s\n",
+       "rapD2Wrl/VmuwvnqrXdKHudp+Nt3PrKmAceNfjyHvc61yPOczgDsMlA5n3Xcx1uRsc0yr3E/y8Lb\n",
+       "wGV0Kvy4gT+evMxPKKtXG647jX4+joVm42XNJtyf9S6Ube/5/dpdbMIxK++Tug7HDTIoiiL0RXmx\n",
+       "jmVVGc6zu/Q51uD7Qep+i/lxc3NrZmZP3n0av3v/nWdmZvbe157ai1+7tasvbC0/oF8LvXdYz3qv\n",
+       "Z9dyTZT7xMvKuJvQjhx9k2XaLq4rft4C85Rjycwsx1qfyzzNePMyeSZgTLRy/SwP/ZlV4bi89nat\n",
+       "LsIJq42feLUNn7c7v0/NCt9PfhzHna6x/TFcd//S59j187AGjIdw/ZtnBz/HKfx2kHPwmVkIPjEa\n",
+       "nydShjkmzbcC3ajPyd2j8Fy+euN+LPvgVx7b57/vM1ZjQeu4cJk/EyYZ6y8wdt771cexrLLQr/oq\n",
+       "wnk/FfKcyrhOyvOM9UNbtxu5JyuM9dzLiqJF+5ZrzWTyPMN8P7Z+/WEM53v5uLPrx+Fe5HVlb/+8\n",
+       "z4MP20fxIvWzZvaFLMs+Y2ZfN7N/08x+9MMHNVcXdvGJj30El0+WLFmyZMmSJbu7XTxq7OJRY2Zm\n",
+       "1cX2n+6L1DRNfZZl/6GZ/aSFvfxfTBF7yZIlS5YsWbLfifZRIFI2TdNPmNlPfKNjVlc7U6bUBNhT\n",
+       "1RgmHKBlWYQgBUbGAdnMtcUPctGc54gXWJxD3WNERSepaAWsWF0LPc6TictgwI+LM/ISuRwV25Wx\n",
+       "3gJPG1022ikfqq+ZZWiQlhHS1eb48Us3Jv1XhcCjFaDtSd0o8VrugoqnkWqOcEsOw2jltrBhGCNU\n",
+       "m4mriuehS41uQjO/13krx9M9uhI3FqD/KRcYGz8Zte4o1Pa0La8H94S4bPPoCRB3L13Acq3YZ7P+\n",
+       "BDwtJdNEyHoJ948Dz+vfZXAL5NqvqHsm49/d4YMcx8Huv6VLtarE3Yyx28KNK91vHdxsvSPh1sIF\n",
+       "2fntj27GPM+tvqgty/LojlE3QhzbOp/jmEDBpD7TM+Max49yHKH6PNe5w75eusfVxnx+/aKS/s/j\n",
+       "pPTz0s0jbqyCa5f4gOlaGUdxAVa4dzNaQCg7iaukexbOd7wJ7omX1+7aefh6aNfFlbvsNsHbZ6ut\n",
+       "u+w2cNltH+ykLLhsVuI+r9Zh3rWDXz+vwzihq+7UtvG72za4bO5PF1blpe3eXFt7G+q7f+H1HA7s\n",
+       "u1hkBdaTvtN7EsqyMwsV+78SNz77NctlDOMapXAlSMNVd1+BeZfJoIxjN9MxhjmJcV82MtYyXktc\n",
+       "q/icK90E9csmnc90I/r971u6++RZEMduKMtl/E0Z56RPykgZMDHUJZPJxqVF3Z10AVe1uODR3lpo\n",
+       "Dq9/7jWrm8oqnLeuffyxLuqyrA9hzNQy1iZQAMRTbgU9oLJQ5fQ36qMYt4zrZCv0CDYnr/zEOfp/\n",
+       "VDduzu/kWuXyWj2pEkrLqL7xOvLKlM2bq4tXdelk/5StufhI3teT/TNo9a767Q9K9jvCdm+uf/uD\n",
+       "kv2OsNc+ff+3P+j/x/bKnnBTXszIyUQCct3CkkOtiEBGwrQiB4UeHo7D30GIitylZni9ne2BCEgp\n",
+       "+MOdcK67hfC5EmL7Gjum063v6kh2nu2+sWMmUXw0bdfASkYjEDXNCJPYkfWyIyNyN7vYEiXRnZiZ\n",
+       "EzfNnDCoOzi+rI+jkyi5SxrNd6nTtNzpRcREkQOcWncu/HoiMpTJDu5Icr6SQ7Fbbr1dHU9cyHgC\n",
+       "cqHk5XEiefoYy0jQjURkQT8qnk+6rQRikUk/EX2sZAfHxuruZ8S1FCTh9ePObNJzcFfr05QIiwa+\n",
+       "xjkh03mcQJ6vhLy/CfesrBs/DghThWu0rZLISSyXHfSJbfFqxhs7Qx+4S9fxBNRzNvPQ7yC2T6Of\n",
+       "hAEykxJ2LZNf4QwgByt03XM3r8cRiZVpUscAFB4p8wq777yUOoFkPiNW457klZJ9UQ8h5Y817p0g\n",
+       "fPw8HmVO7DF2D+hsB3ps7MIP2vZeLOvuh3u8uhSU6j4QqavLWHZ5FdCpfOvnq0pwQHqFCYCSDOHC\n",
+       "+1shwu/CeS+EnL1fh7m7anydeP5BQK7aG1knunCefNA1AX0iaxfnWJxDsv5xLSilr7OItORyHNCP\n",
+       "2W8jduFlJGUL6t8BnStBzi91DWUdCyEx58vnCdfTIlc0KxxRCJpVVAzo8X6KgBWHhJCuR/R7rmu9\n",
+       "Pyi9nlh3lNjtiJ2gqfhNJqjb1YMwth4+8DHGwAdOHkUQix4IltzXI1DUeiszsA5trXIJHomPiTNl\n",
+       "57w5BQPQ/FoEx/NZYNcyUCfDdTN9TuA3xbqU32JM6uAp5fMZS7n2kiVLlixZsmTJ7mjpRSpZsmTJ\n",
+       "kiVLluyO9urIK1k2c9nlJACrfy7qHsnP6II7h7QpZk+i8hm3IG0GHJ4jO55hUVOraBqFnInK3Bfe\n",
+       "V9sF18rpRtxIdMfB3VjMCMO9fhWuEV0b6jKAuyFTNxbPK7/leeRVeYrupuX7M91yucCuBdwyVeVt\n",
+       "pe5NJ+4JQs+5wNjxEjNdMBTZsu6Ec4de9EkAwZ5Ooq2EIdvPXItwt8j1ed5eNYMwxlSDJmuPaGu4\n",
+       "1mrlbq+6gXugFngeLt1BIOOSAQgyhiZqQJn6cRYf3ANry3sdv5wN64KN8WvB9aXjuQZUvxEqYgO9\n",
+       "m0yDHODmjrJnMrG6nhpbSxL/jByKG1uKC4wM+VzhcZxmEDdKHgnlPJffG7pHZvPalm6UGKhiS5vk\n",
+       "nrA9lZJSS7oA0P8yhopySdgl8VzdfdRsU69sZnT3SfvZbnGLZh1cSzMfZPhzgms1H9xldrgG2T97\n",
+       "HsvGMtzkq0FuNq5bbb1SxRoaTCs/H3unkH7PDvO1s5TAjmaDOdlpAA7738uuqsCAv33sv715L7h7\n",
+       "skFc0BkDBcQtBddiEdcVXX857sUVWHBM+Jwoz7iAC2oWyXFxiGnwALX6uNbL/RpicIgXVnAt5+ru\n",
+       "4vnELce5Xei1MI5qcfe1iPiIwVYyYOIaKg07E08Ug4IKcYFGbS1Zfxswv+vtxstW4TP1+czMNuvg\n",
+       "0s3i8uN9yPVZ164S2k7rra+nw4nadkvX2kzmklp9+iz0hoVzScCQgQ5waP05MaCPy1L6n8EDM7oL\n",
+       "6iTrVB2DLfTZda6X3RIilSxZsmTJkiVLdkd7ZYhUkRc2DcsQTpUa4FtqMUNQ+L0QQFk2nnlrVJTm\n",
+       "Q+S1GYJgy3BV7oQVOSKhrcxlVxvr7CjJGsrDTeZk3/0xqGwP7VIJPIskW1HxRf0GbQR3RDP5gyVy\n",
+       "RmK17qZIUPad25KcOI+hRVv1rZ7H9U5snTqqnZ/ZpQuzlt8r6jCV87DSQSUkwLrsBf05TmFXq/ek\n",
+       "sbBzGmXnShLrOBtj4Te7C9+5V5AEIPqhhNlmzbhqr9+EwINON0TcQSlywuop2RztySTUmCHZhBBV\n",
+       "rsFjHASlwX0f9N5lYdzlog58CTXszT3fpY0gtmaDs415vQH36dj7eU8HhoErARzXnKkIA5HSsOqI\n",
+       "puk9WaIE/nU2Oyb8J//wQd7qMzIlWuSSIF7G8OhKUMeeqvxn0D/ujGfZBmK7ZWIxglr6n6r4gxxX\n",
+       "8t6KxITlod/HWa+gPxEUQIQoVDico2uPUrTBaWXusOtkTnS4x2XhaxIDIHqVZGD4N87XK4LZAEFa\n",
+       "C/qQL8PFG6wZU+F9TUTw8J7PyRLrnpKyiYSXGVF6Cc2vziBSGDOFwPkRYZfxxEsosXygTIJX3TKi\n",
+       "jUTJJdS+ZFCGQPId1vNxRizHeWXtivIvci2iU4WssVUTrt+d6BHw2h0PGK/5cvxrn0znsmIQOZZ6\n",
+       "EglralG7LymT4mthhbK8Ivonaz3mfyuSOA2CGNaCfrZELidFxFnfJZ6sS0F83gFW0jkZEflB0Ofo\n",
+       "EVk+YxWRc/q9lCErQ6UZDRIilSxZsmTJkiVL9tFYepFKlixZsmTJkiW7o706svk0RJeAmUXIfBCI\n",
+       "mWDaXB8qm39p6qpSbRcqC8slPgTPKZwYtahUiwLnK4XYTS9jIZBtXVLtXMnOwd2iCrwNCMB9hiSj\n",
+       "onBLz8IoiUdJqJupmJ+h1E7xuxkWuvjtQGgZeiIKD0fkVFybVYP2C4x/CddXq2rXexDQW1Elpzrv\n",
+       "oH0S/s4ctVEVewl7x2SYmbYB8GzpblQqoJeq4wR3QyXJXYuGV/arXEFUJ3aTuGeohD5KWQd9plzg\n",
+       "/p59nXsZE2hr8IDBZaAK+BPJoOz3Tnsn9Ke6NqMnVFyiUdG3keTGV+H76lLaT9e3aEVdF4TK4bIQ\n",
+       "xeSYMUDdWJFYL2OXlxACdtTlkhs6LKduVG0/N65ZNhbuHrA4r5cK9LP5H8nr/tOoWj6L6Mhm9Z15\n",
+       "DOHGmeR+FXA3Fbm7MSboyOXinumobqNq1yTqqrYR+1NcEAPmZwFtqUF9HND02u7cZbaFtlNR+73u\n",
+       "EbTRiSp5uQ/XOogLhuvd/saTex9vA5H9gITf3cmTfJuF8c9ADDOzHv2prt2hhrundBGszT2QnU9+\n",
+       "YHsDV6HohxUlXXqYL5p4G31dDBrEQbe49xMT2Oq9Y+CBlnF5lsdOdNXnVPgW1xqfE7p202W1lfuf\n",
+       "YT4NMp+oXzSKBtcIjkApdAdSKfoMpHN5JnGdVGI7PcazYCoU5qKjx7mQSwBCBmV7VYUfqIslYzfH\n",
+       "ul9x7siYzKC3NYg+KwMaaiWbT2E8qQuOuuTq7mNWhtLO3Ke4Jp2hG6gGIeOvVFkel52tCeh2iT+K\n",
+       "97iSAT2eCUZTS4hUsmTJkiVLlizZHe3Vkc2z+R405lDScGmWnfl9ppIAZ8imfGGeEctIwI4FZxCZ\n",
+       "cY5/hXPJmzl3Xxrqz6NV7ZobEa3nwB0md9Wy06VcQ+a7Be4qZ+1HG8bpTD1tUTRrIwl7LVSkq1m4\n",
+       "OpXNvahhuPTaz7G9wg5G3tZ7tOvmpSNSpxMUe0/aJxPKBPUYuOtcEosZRKBq11QdbmULUXOXakJK\n",
+       "xfdKlCcCqqGu/My/OoZuDmEnvu9uY1lO5GDQXSLIoZOwiKlirArYRD00nJpdwUAAIQdnZ8ihzEW4\n",
+       "2wrZH9u1R49cxXqoHpuZSx6Yma3Rjy8mRx8+1nyTmZl99cX74VyT7r5JRNeRhZ2u9CvVy2c7PaLN\n",
+       "M7ItCbi60+NvsdOW8ecq5hIuz4COmdr0ktjOwIpS5QfQt1nm96kE2sVfDoOiVUvCLnP95ZMQ1pHF\n",
+       "oBfpjojO6RQrl+HvzHGXraXfhwrtQgCMIt1ExHd+PNPyKHLLOXncC3KL3flw60T1AyQWDi8ddTqd\n",
+       "noW/bUCT2tGPzyM52vtmxZtW+b2mdIPGklCdoZE1oQd5uGpFfgF/i2yJVsSnxizbBMaJoiT4q+sk\n",
+       "0WwFWGfBDbEM4x6k9EJy/VUrrDUbQe6xdqisRkQ6ZQAQnBokr97pGD4fj94nx+Nc7XyuiLKsrwcR\n",
+       "zbRuwrX0GQMkaNLkmRnQXjltUVDZWxA+QDcFkSut08DsCOJ9ATrarCWLAjwWvRD14/2ZIUfnnvh4\n",
+       "dg5nvjJWSYMNeD4ZgHF+yNypiciZlBEBlwCM3/qyHzpjsmTJkiVLlixZsn8sSy9SyZIlS5YsWbJk\n",
+       "d7RX5trLimyGnEd1lnOaMTOb5N/5b+dkU5Lyzp1lCdTR7TXTswAsqrIvVEVVN55BvVp/OzFBrWCR\n",
+       "PSDVHgTQbKb7AXK0uqyg7bHfO7TO883hXhLlpYVwQSq0S2IrSZm5/ICeGJEOsRJk882FJEO9CBjo\n",
+       "Skjcfpxf6/oamjHXXvcWLr1WIOPjAf0DUrreLaqoqwv0HOwb3cLqgsX9r0QxeL0O/an3yV16gL1V\n",
+       "sRgkUw02IMdUx64r2yhRG24JdVXDzaWkWLrAMtyvTPqfSUgVzd6AHFoI2biGts7rD69i2Yr6PRei\n",
+       "twXXw+34Fa/7kW4+uGzEPdVFIra4sXG6QtzCMbez3hpqMM36Gvdipu1Tzn48Q+55b4blfFUlbLrj\n",
+       "dT7F02pARfRKLF2VXk11D9LdrEmboeIs7mby3tXdGbWtJLm5lUzMLi6TD9MNzCyD66fnyBJXTF6H\n",
+       "41f3fKAMOK8Gj3Q4x2HvxPLjMdxbzbbw8jrc/8PLZ34NJGYukGS2KySh7hTc3FkjbaXLtJRgB7jZ\n",
+       "JU7D6ns4Tu5/B7fl9KSX3zJ4B641dW0zA4Sf1jXjZs+OKCTkde94/aUbaRgkowLV83NSAbyvV9uw\n",
+       "hjRXvq6smKBbgk2cjK7XQrCRPhPwsW3FBd5zPFN3z9s65kt3c6R26PMn6mMJtQI8gmnv5yAZvGxc\n",
+       "R6qKauhC1Gdy92b5ysCgqHwWFoZgFwkoG0BYHyVQxx9aMp/o7ZP2tHhmjnFK6FoLzTh5no1wM+tc\n",
+       "Lxg0IOt5UfB55u2PSah17dA6n7GESCVLlixZsmTJkt3RXhkiNeRC3DWLW8JCURr8VaSBkaMzVXJK\n",
+       "B0hINJEAfUvN45s7fydvq9x9z9iufDX23UqPd89OwlVdUVrVVpeE8pjrLubQO0eEc8tB6L6850rc\n",
+       "p2PYYepOcxipdqs7NxdF8OYQ/Qj9dOplZ4JdpSrclqtQtlp7vzaQGNjthNiNn1S138+yQQ47c+Rk\n",
+       "XzCcXInqoR0jN72y1awBjw2yWyPJuJNdHWUShkFy8gHZUzkH3otCwpktjh2oEws5ejqzzaAavyqQ\n",
+       "t1Bb7zMncRdZaHcvCdgq7Eg1d1jcsjOvpOaGxHG17OpWl6FP1jJzqxVkKuS83/XF7zEzsw+OX41l\n",
+       "77wIYe2vr1+LZY9vQp/16P+xW+7WM7kW0TmdJgTdIuJkJloXsiNlnkj58cDzZdwFahAHyNGqWM2w\n",
+       "dpGWd4TjTADGbJfOmy3XYP6xidX2nScR1KyT6+NjJ+TgsSaCI1eHOrKqzUdlbZWpWFESQ/qpCGMx\n",
+       "HzH+BRErkMMuV6UPEK8Hyf95QKBE13lZC2Lz/rlLEpxeXIfvOp87xQqSBJQYEamRsQIyICHsOQDr\n",
+       "lcmuHujUai3XR469URDG8hbnuZWgoBNJ3pQrEPQBa9Ygj648qtgvA5Bq6X+CTqpKXqAjx8E7lE1b\n",
+       "I9jGZuhr+LyWm70G0jIVKn8S2t+evO+4yK1WlRSBlK+kePzt6CWZEdZPs/aZCTquaH05f9aZeYBW\n",
+       "KR6BU0+U3O/TZEESJtOceDHGZul9odr4IM/uzkiUlzytE8tEEoL5BOVZFNcOVdtnRg/8VfmJCS8F\n",
+       "qljPvJYaFEVCu+ZaLRlsJOsOg7B6OV+Vf+NXpYRIJUuWLFmyZMmS3dHSi1SyZMmSJUuWLNkd7RUq\n",
+       "m48fSjJM2WMpG+ZuDzNx953R1sjOUNDPiBiLns2SHDoI7FhNhPiWsONJiY1UrBYYmYTB6ZwLCoTe\n",
+       "GRF2YoLUpRZOLfBkcy/ArkqifgFV4q5TfRD25/JdmVBxVigRHnUU1d2MWkySZLeGa68WsvnuKrix\n",
+       "RB4l6o0M4ioaoEfTi6uOaus9DquF7R61gGpJfNnBtSBQeAeYl9+FNsLdIrB8eSZpZXRRAIoflZzZ\n",
+       "E7JWFXMoRg8KhaM9tbpqAwSvQQlZtom/iMYxgHrmKnsNkm/ZiO4V3KeTzNxPvv66mZndW/t9+vrT\n",
+       "r5mZ2R/9jj8Zy37j7Z8xM7Mv11+PZc/e/lJoI7SFykZ0X0YQq1Vbim48GbvUFtLpxLE9zAIAqOLt\n",
+       "x5HQHSF4JdZirmmfUCtmnrQ7XsE+bLP5NOUfOt5s6Ofn605yDiRrVtXxOVEd5yUBVtwjUVFaEj5H\n",
+       "+SoZE3T9FVopunLhYstK1UfDIStVlg5/1T03Yf3rxd29h87b6bmXtfvg5pt6J6AXTThuBc24TCgT\n",
+       "zNSQibstgws2r9UFhjaKZ4seyr73+1nvMO9ulgRsg7tt5tpCXepZ0Al1lNyGmNxdA0Wwxiq1AN93\n",
+       "0sbqIsyBuNaJPhTX8Jk+Vc9k0H6tqmFgj2avCDU8nWSc4Ny16FJNQ7gX7YEkag3BoLtT3Zi4J0qi\n",
+       "ZwYAeQCS2mJn5u4obvFxWrbx1IO2gLGoZG+6jwdxt3ONH4alGy87p98oz93o+lUGRMGAqrnuXDgM\n",
+       "93qm7E7CvqydJV174tonsVwuFoOxNFPDGb0xtYRIJUuWLFmyZMmS3dFeGSI1WT4j1vJtWXcQGQhz\n",
+       "MwVwEgtn8rTc1elOI7xhaqhpfKvl26q+GUeUyk/L3cRsRwo06ajhl0BfcqnTSMSil9BhKNXGCFbZ\n",
+       "mlf5Uk2VobO57GArKMbW9c6PQl6j21snkR6O4Vqt7BJIxou7Snlb5wZyf/Cd6cUYlLJ1F14j/LUU\n",
+       "JeYCiNWq3sayLZSXu+79WHaEsu2p905+sHsQroFdUC/E3n5kbiqv09QhJ9zB++lEdWiBxMa4Tfdr\n",
+       "Maw3E9SrqEhKx/UFVDq2Yee+P7iy+R5lp8HrlJ0Ja+cOJp+pkgcriyUBMtKlBaUdOV5lu1OjvgI+\n",
+       "2Q7t+eLFZ2LZw9feMjOz73jj22LZ937L95qZ2c//3N+JZT/3f/55MzP7rm/6ZjMz+3u/9Hb8rkA+\n",
+       "tZPmUATqUAgRNYvjWHKisYmZI1xTzOEliATaU2H3r4RV7qqHVtGnbPbXzIR5uxzrMwV09JkikjGf\n",
+       "JUPOhcRaor4z+Q0rFmWxfrJOjJizKhMQyeZCSiZneZIgB+62uQnW4IgMqKcGAFDZvpWMAadjGJ8H\n",
+       "kU55+TysCYfHHhRRYT2tFTklSgF0upIcfkRnS3l01OU9MzPbNj7/DfnybjNfkwjscm0yMys2WLvl\n",
+       "p1EK4BbBBrImEJ0aFBEDEXgGvuM/ipJnZ9AkSpbMwhTwHCkwJnMZr1Sb7+TElEmZLR450SQv4vNE\n",
+       "nzFERHLN5wfUp0LAQq/yN/RcLBU8ZpIgOdraSw69EuhXId4MPgK60e9Ji+fUoZV1F9IZo4UBq4Rt\n",
+       "IjfjSYnlJMq7RTmReUJL/JG5Oy4lfjjfSqJEyivnM17nesYAjOXzvxHUvYwoleQfxHl6IcoPs4wn\n",
+       "S0uIVLJkyZIlS5Ys2R0tvUglS5YsWbJkyZLd0V6dsnnXmb7HRU7mjJwa/qreUoRvFdm3M5pRxVKz\n",
+       "h1oU0esgUCyhxWymRXWGsI4uG8W1dwSkWQg8Sqh0bB3aHgEjF2j3eiPJWKGxUUrSYqqcZ4JZT/is\n",
+       "CTKvoM7aCCn7Bm6+VuDWA0iB9NRMM90rEPbEFbi/CQfef00I4FRAn0HGASrdiGsvh7L2G6/7ceyK\n",
+       "XIZdA7dgBl/FUXRXujb019E9a2YZ1JZbL+zhKugEgieJMhcC+g3u91pg+dJIygTZWNp/6F6amdmp\n",
+       "Vy2c8FlhZ2qQzDRbou6IuGViHk25n9SPwq0o1WWGc8zU+THs1luB1qGFs27ux7Lv/+IfDue71QTR\n",
+       "wS35Xd/zp2LZf3ATLvxTf+Mnzczszaun8bv3X5Cc7dfq8yPqpNfH+cVVbGeSUNOVXcjYabbV7HAV\n",
+       "bh64KKi0W0/YX+Y6fSUaaxH7VYMH4CoUN9rgcufhd0Iw7XlTpP/HSHYXtXHcz0ncyBNI5Jqg26By\n",
+       "Xq1m+v2hiTMF/HHeLrlWkQetJnU3dXDFZKLt1B/COL1+4W684+Nw746ibD0ykfdKaBF0pTHpgLrR\n",
+       "sD5tV04tuH/xyMzMNhtfJ/ImnLep3I3ybAhjay9jku5riUmxkgrgGAyjruHdkhw9UVldVawxd3Tu\n",
+       "ZtEt6NfixOslyOYA5ffNLrRHPZY5zquuXdZFA2XoxtN4qpFJmM8kIVdvW8cEADhdrlpIpHvIEPLP\n",
+       "Mv6wrjaitl9CfK6Ue02trmLmvqS225JgzcTcSqJnt7e9BzHEhMuzeC64NoWU74Fc8tzP6FL3cUJX\n",
+       "ch910SSICJSNcdDAJupdLTUQVcV81VC9XvoJ90d1Fsfk2kuWLFmyZMmSJfto7NUhUlk2Q5pc7fsb\n",
+       "K5tn8Tt51T0TmTjFUFdFs/Cmy7dgIZbm/kM5Ht+dkUnQPDwjZAfGXslp2DnLlmQAyZqXzzJ54waq\n",
+       "lMtuge3W3QJzA03yqs88bbWEye8Qat/WjsiU2Op02CUfTr5bZbtG6cybmwAhXV87+rW7xI60F5SK\n",
+       "irFK4s5fcv9LAAAgAElEQVSpgO753x49gpxDc+2XJRKI6za972A7oFPD9DKWEbDS+8rdz3iSewJ4\n",
+       "opV+2uM3B9m5GNTbiURMRyGx4362svsi2VPJliQ2ZqLsXaL9paoiEyVTQjnGBBGUfNCxjvMJJMoc\n",
+       "ig+2rk5+QP63N7Yf97a+GxC7r7z967Hs+btBEqHe+o7sh/7t/8LMzL7zO/9lMzP7r//Cv+91y4Fq\n",
+       "SlDEe8/fC/XU6UfgRObziAM0bJik1Hrt46SsMXYwN4dBSde2KDu3JkTUS4ti+LPbAORghgd9aCs5\n",
+       "Lad6RDzCSc6EcMdrynlj4VLiRYM3ZgrRsDwinAxrX651ubRsIJos69nhFMqOtyIJArRV5SQYnj7W\n",
+       "kgEAY5DobDNDC8K9u9o+jGWP7gGR2m1iWVaF821WnpWhB8J4+9IDUKwJ9TvVIjES0cyl+4GoUyHS\n",
+       "7hlQnUJI8eUGSLcEtrQ3uIYih3HdE5QY7b+9gRL5PSUsM2BCgy3m+TrNXJR7praOQaHK6jH/6kyp\n",
+       "H7IvKDopWgJSelYux99MQQP9P0OfkKexbJYyDSY5IZnxg+tqqDTmLnIoKkpM2Z22XZLiB0GEO0+i\n",
+       "5z8+k+2AQWgqJk5kN84xBXD5WwHJKjyL1ht/nvD+NI2o0mMtKmy5nihy2ZqjbecsIVLJkiVLlixZ\n",
+       "smR3tFeGSI39MM/MHHOjCfeBoc7yahp9nmdQIhW1GydKJ2g4NX47UixT0Cru+oSkURAtUfE/yjQI\n",
+       "IhL5CrrTG4m0CJrB9GPgILRSt1MW3uZXInQ5NrzGMtZVdz/cHHWCiPFNX0Ptm3GeE6yU0PQjOR2t\n",
+       "70IY1v3sifO8NhcBzbi8JzudCyIcvkvkznUjQnP374UwaeVjHA4QBEQfV4IWHZDPrDxobrSlcGmP\n",
+       "HVHnGyLrICaZi587R/vbyneEK/LQjJnZdWdG9UMdk/CpS7gsRT8LCXUnr22GpuKjCsJ1uF6807K1\n",
+       "4U53lHHaAUG4rJy4cYEcZ/ene7Hs5//e3zIzs9Pe0b810Mzb0/NY9hP//X9mZmZ/8E//R2Zm9mf+\n",
+       "7H8Zv/sL/+1/HK6/c/Tr6c1j1FN25Phbag69nOHP0nfoss3ax10dQ5HJPRM+0jleQgyX9qIo0qeo\n",
+       "Ar7X8GfyoQZFvcHXclkV5V4Vi3MQkZrtqVkn5c2wKqpmQtRdOZcRic8Wx3HsKPrplC4NNceapOK/\n",
+       "4BcOwpHsWyBcg+/IOe9mSwfmfYVNeCHAxNVrQa7kYu2I1GYVyu5dOEePqEfJRHxmdroMJ7q5FkmY\n",
+       "Q+BNVZeCXOwhRApBSuVDZZS60VB/IBi7S+Fobonm+/UPDyDd8MT7af8yNLIURKgASpND/mMQZIZ8\n",
+       "wRmqyXVFZDqitI0cyDrPtUQjnuRFlERAuzQ16ASkrTiH6uhag3W/Wgua2BD9Vo4mryl9TMqhtNvp\n",
+       "v+Tv6VybZr/T5uic4PNWxynnuziCYs7OYVg+4/hbXRPjOWRe0TtSC2+Y3Cgt2zQrnMN/y2tkCkll\n",
+       "OkGWlhCpZMmSJUuWLFmyO1p6kUqWLFmyZMmSJbujvTLXXj5kMyXakXmixBXFcOrMNISVpFypOpSl\n",
+       "B1WsjcLGCkECsiM8KSTeEsdpaGZBeHYW1U2fgcCocOn1pnni4CqT9pQjQjgBBZeCpx8PUD2/cf9U\n",
+       "RRVxCZcuQfKeKlFW9qt6RY19okRpqvjSnSQh7HDtVbW0iyrut17P62cIoX4o13oNauNHcaPB21cL\n",
+       "sW97CVdJKS4IunSoLJ+724fv+VnuIfl0d4wS636iOrXkdaKMtN7/1Toct9kKoZ9K0Tiv3v+xw5jQ\n",
+       "XGe4d4O4Gwq4pypxGU7wYxUCmVOxt9dxym7kZcUV2SGceNVc+vHds3D9zu/J7/rE95iZ2bO3XZW8\n",
+       "exYI+oVC2/Ulru/938IF9Pd//C+Zmdnv/3f/q/jdD//AnzAzsx//u/9bLCvXDL9XFyijMoQojnxq\n",
+       "o/Q/UzbWjbjKK6jSwwXQCOk5EoElKIMwfjEjbFOSQP1oqIckJXRvm7rRQimj35VEH68wLuukfoSe\n",
+       "65NQEDg+SskrN5xQP+f6Wz7ALVGIqw4SAwxN12ADsgEkMtwGuMP7k6wJR5BzT7JOwt09mvjA0Svd\n",
+       "3vuTAQA9pD4aCY5YgRewufIgkgevB9fvunE3GsnQvUg3HNvgArzcuLv5eBn6pJq8Tvk+BMG8ONyE\n",
+       "6++1w+iL8vpWcHeVPk2s3sGNI1SJqgtlm5205zb0ycsX7sbKseDXaKvm2uQzppC8dvtujzJ1rcEt\n",
+       "K6T4KF2T6TihW0zWThC/Yz5JlZqJ41WeXainkrN5ywrN0wkC+ihzt6CrUrKC8DkxyNiNBPQoEyHj\n",
+       "la5vWbvKuP7K8wcuu7EV+QuMRZVYGeg+10gQ5l0lFUOCCIYJ7w7S1zn6bLPy58kWbrxS7h1J6Roo\n",
+       "M8BtuZb15Hhcri1qCZFKlixZsmTJkiW7o70yRMrsNCOYxU2ivBlzV6MEdBLaRlPCGoS+5HTMBD1K\n",
+       "RnbuYkoSTGeZ4SGMJsy+CCbpboF5mEqF08ZF2diHt+NWUKfiQxIPSqbrgebkLz3M8jr2hVyrCd8X\n",
+       "ghyRqK8M2AiEtNJPH9o5aP9P2OKOQrbOgDAoD/awD+25ufYd5H4f6nRV+Y50IqFw9N3kxTp8P0g4\n",
+       "PXe9LYQzCxEaZU4uomWhUuHPoGKqaPjuwq+fQ/ahEWL7FjvRWhC+HIgUpSlGgR9jrr9ROxZhwEKs\n",
+       "jmH9tVyfIezSd7lR/E92PxQ9RBs0h1mOm9iPTvZ/HSTf6aWf483LsNP/ypd+MZbVGMer2vv/9hiQ\n",
+       "gJ2EqW+aEJ7+wfN3zczsnb/xP8XvfuzH/nMzM3v7K/9vLPtaGxCxSna1lDpQbmYEMWSQrTbYEUro\n",
+       "Ngm4bL+KenJeD73OdWqH+LUYLj3T/mQ9ZuoCmHdSkkWZBNz/mYAvjxRU+0ywx9n9KAmwEuqe9dyR\n",
+       "C8JEkVDVXAQiwWVH0YooiSLrZEcSr8pqAEWZ7aOZ6kxVImGDrqdAycaWRGDNjRgI3Zcia3LvXhh/\n",
+       "u42PqyMiP3pZfzfrLf7upAyIxOTzfo0cn0X3xMzMrkWRdwI6qONvtQvzrlr5OWrkH9VQ//VFqF8n\n",
+       "BPxyE77fXMq6cwPkrmdwjqIfkFqZhLBPCYO5SnQok3xtFPMsZI5H9Ecgxg7rQ07Su+QazBqKNIsg\n",
+       "LMZHJgRwik8Wpc4nRjvJ+geUSMn7bMY5hRFKkWS6hvcM9hFyOsZ9oQEjQKdUkDeLY8vLTh3z+qnE\n",
+       "BOYEhXYVkebaLST2Ckj8SjwiF7sw/vQ5QY+FkufLkhNP8kkW3xhzSohUsmTJkiVLlizZHS29SCVL\n",
+       "lixZsmTJkt3RXp2OVNfN4LQIJwq0P5CoqCqy/L1A0Tnh9kmPIyvTocWKej+AQpmjx8ysjGRjccVV\n",
+       "1OdYwqNFpu5GqO2qawNwY945tEgwlrU8ii+iO4TPJ8lDZSDiKRLfbcL3662T6Kh8XuRLyFhzDU09\n",
+       "NThIzlZ1cOYw82ux2Xqb9vvgZnrxwuH2p0/gMtq4jtFAXFhUeYs6nHzVeN3bTYBZ22sSYaVOqHsr\n",
+       "+fJaEGYLuXfbXajoVjSrqKy83Tlk32zhKlRCM90ozO8lbozszD4j9nWphEUSNr1ddB+XQlQnQbmQ\n",
+       "MTZAn4T5zcpK4exwjmMvisHwVX3qY98dy67fCbpQjfy2BGm/EMViKqXnhY/Jrg998hD50r7+lS/F\n",
+       "7y4/+6tmZvYjf+zPxbKf/iv/Sah3LgRg1PNGBioV2Eshm5P4qtC+pwmDe1C14EjO73ROwBWoWlAx\n",
+       "UEF/u9S2YTaERnSsBmYlOKNZFdcn8c8ObI9mQKBmjhBWoy6Vuu8ZT7P2PqFUTy2q3MxJRgLujNrA\n",
+       "YId+xjYP35meA/0v5x1Asp0k12TUL5u5auCC6aJCmLeroD6Pu/EauI83Wy+jEPSpc6rCugnfV7XP\n",
+       "ic1qjTaKLhvcna+9HsbkePS5dv32C9RDiMWoXr2WoBDM8Wbj47TEulNfyjw9hrm1v/F65nDl0SuZ\n",
+       "idspwxo2SmBFQde2rF0ci8OwdONmOk6Y61DmSQm3JbWSCqEi0I2VZbr+yBrD62N9ys+4pOaZQkgz\n",
+       "8TqdjqEvNBcrLxdpJPId1fEHdWOfCQBiftpeRcsw7Gpx9w/VMp8mn/xknuTyXGHUmmYbYQ7XC9EW\n",
+       "2yCjA92eZtJ36pXFRO3lfEpHOGcJkUqWLFmyZMmSJbujvTqy+WQ2yM6IJLJMEZSl6Gt8m9ZdQgwT\n",
+       "lV1V3JHJbwucu+Hb+kx1GfnqBK3grkJJ6cxxpfnvPEu17BxaSAIIUbQzhsQvVZR5vnYvpEOESx9e\n",
+       "ito4Qpy7gyBn2HXKJi0iG6rUzU1ETDWouQFJgJZw/bxEaGiuu+9wvufXN36t90PurDcevO6XgtRD\n",
+       "lkvdKbtQaR9j143vTkIsbU8B6cpUagD92UhY7/Z+2HVMQthcI3N7vZF8hgjd7wYnb1tUj0YWes2N\n",
+       "xb6Q3c+YEVVSqQvu/nQ8r3Gc7BzLUKYBEEOJfHbHUJZLWzmed5PvqjcgUTa1ZFBHuxWRYFiv7qOY\n",
+       "6XwUhC9OMrQ/k93tV37hp83M7Lv/yJ+NZd/2v37ezMx+uf3AT4EBVYn8BeWwNdegg3OCpsTd8TLX\n",
+       "XkeS7bn8m7nuqpWpze/D30GQpjWQqI8B6Qh1Cn323ntBsf3m5kX8biTCrXn1zqo4M9hDhUiwS9Zc\n",
+       "g1S2FqV0qu1r4EkFZM9z7gllHAEQcxFpICh6HBa+ZiNjF/O5l5D04w3Gnfy0xbpTrEO7dltHmquG\n",
+       "Ocx8TPJeKErIbA8qP9Lh3tal5DoDmjXM1liQjSHhcPXAj8/3gYje3QoBHUiIdKGtsf4pmtNskVdN\n",
+       "5nPDnHwqcdIElKqDUvmk4fpRkkP7Onxfq0eAwROaqiCiKoJSAVkvL7w/md2hQfu7M8Epo5DYYwCC\n",
+       "BMDEZ6J6aTBOFRGLY+cgkjS4nye5RpRRYLtniuXhWu1B+glS6Jke1y2RJt4JXXenmAJCEC4S9UcG\n",
+       "kWkAGNZwGWvrXZjrm60TyysikrXk32OOVbk+JSHGg6LUs6iVhSVEKlmyZMmSJUuW7I6WXqSSJUuW\n",
+       "LFmyZMnuaK9O2XwaZ2CZ670IPE4tFtUMIrFzRlR3CjqNhL1MIWNCmrhYKUlmK+gJVeILpDpqqWRD\n",
+       "ktJn3LMlKXSCHk0mpGS627IskNkyUfFuS+hYSbuo2XQ6irIukmsOoi0ywn2mOlb8rKQ8uhZIzu2V\n",
+       "sEovirgHo1bJzN0KGPfoBOjnz4K20DtP3o9lb7z+ppmZ3Y5O4mQyVOq+hDqAbN2ecF4/vuuWiWwz\n",
+       "3NeLKz/HFi6bXOD2ch1+W4oGzEQF/ExdUCQPU09kSSLOxWU8UuFX+qQmZC7QPl3Aeb6cYprcN+qd\n",
+       "1HTFCukVg2wjeiZrENofbi5i2e07QcV8Iy6TOO7EfZtRFVjGaQNtLxJM1zJfSqhdP//yz8WyP/0n\n",
+       "/j0zM/tv/sc/H8v2uGzWyljDPFoLsbiB21Dd8tEDgQ+daCzRtZLL/R9c7tzblS/heTajEmX3q/uh\n",
+       "z3Y7d6PstsEtfIEx+f7jx/G7998PivrdsBwTMxpstkyuyyNydcLhNJNMiRH3fZRAgR5RK5FSkKnb\n",
+       "gXpbkkjXGGzi11pvQrt1ngxd+Nzt/bcvq+Aiu3nulRq4v0bXFaLFtNlsZ20282Tpg6iY00V7Ovl5\n",
+       "j3DbayL3NbSfWl2n0J7mAcaLuFg6JCMeWnftDS1cgSdZ6++F39aN32sS+nWdnOiClHX6sA+/3d9C\n",
+       "C+skRGi0a+ZMpmtfE/nC9aQk/ujElr6rSuo4SZAVvq+3oR67Qsn+fNaouw9BRKO6xcO9oJs2lC2T\n",
+       "BrPOev0eNAO9x0PGayzXEJLMlaoT1z8RcutYvzMBTTp3OLV16YxakeVyXvF5tpPnyvYirJONBCBU\n",
+       "oO3kSi3BfR9Fq45eQ02M3EomiXOWEKlkyZIlS5YsWbI72quTPxjm4ZW5MYeQEszwnqchnHyblTD1\n",
+       "KYIKuiPFzqFcolncEZeSr47hjY2EZhcIw8xm8BPfiL3r4g5bw6TRnkJIjAe8sVcgnedCYixx/Kjy\n",
+       "C9iZ7oXEtwI5UjjMdntACO+tvuqH8zWCsJFIPIDsrMhID+SoNkfJCCYpiY/tn+QN/gRF2994/51Y\n",
+       "9uZr983M7Cgqwhm2uJlvJq3H9rwDAfpw3Pv1+7nqtJkjhpu193+DfFpV4f3EMF0NWz1N4dwKZnL3\n",
+       "EZuYKYKH3Z+oCJNsr2TPHmMhG2T3QxKjSmLwP6KTscI9KeulOjXVq7eVt3WbhV3Xg8aJ/UMWUJRR\n",
+       "dk0ZULpJ5k6N8zy79UCBqcOOHMc9ffrE6/bG58zM7PGv/T+x7Pt/OCBSf/F/+e9i2ddbxok7iZ+7\n",
+       "v5WEGhMI0PDrMc5jKMwLh5c7XF0nONlnsgb8WkFqIFE7IZteXAVF7VqI0muQaHebHeroJz4gdP/p\n",
+       "Y8n1OFGJXa4Vd+cqvxJlyWMZZUeUp2wnrBO1oJ7YJUNg3ErZQTvqoBE4CKJZK/oX7v+9tSegW9dB\n",
+       "jVymrr14NwR0fCV3JI6o6Po+dvUitXL/EormsiZQnkLRp7YN8/nmxsfaLcZd23muvTxfEsB7TFBK\n",
+       "iFzd9zYcX+K4k5+jP4VrDXsNngl1yjPvE/eBiJeg5P33McGcdCQiHw/ergPyAA4SxEN0RGVqSJgu\n",
+       "ZaEmclSvJAMCc8Yq6hnRMQY7ef9TVqQQqRW6R2oJFGE2gP3R5+TzZ1h4ZTpRnmIldeLzVCU2pjho\n",
+       "AyKpaw2DfCYhrI+QdalFJiXHOirKDTFAR7FcIoaz4AnmyY3jzn9xsQ6yGlsZp1f3Q3tK6aZqReka\n",
+       "WZPQ17P8p1RKF0L7OE+RsLCESCVLlixZsmTJkt3R0otUsmTJkiVLlizZHe2VufaKfJjB+ETxNPFi\n",
+       "JJjNMr8CFzyjhTEnoIJYpgRktLZkAkTlHBdUNhfCHKUzZu4ealDNlVw+fBzlqHppJJtBpfReEm+S\n",
+       "AN21klAScOJrV67jUsNloUkmL6fgRru9dp/Zs2cBRj8I3F0QWqX7QnmwJFGLxk8OCLTvlezKxLsC\n",
+       "RfdLDagbqHHXQuLkTR7sJEUg1B8CBH1qvb4kmythu4IbT3Vs6PlSYjFJ45m4TIx9O8mwH5kYGK41\n",
+       "IQITzs7FPRg9zzJ2OwYDrMXdBx0xdQHnuN/qbibZkxCzZd6GAvdie+WK0Z8fvjU05dorShdgLm5p\n",
+       "4z0TePo53CwPHn48lr18Flw6K7oHBOJ/cRu0ot543RPUvvi1nzUzs3/lu38olv3lv/sTZmZ2I/O0\n",
+       "gktFvJKeeFTcQvwJYydOQo6f4FqvNUEv5tgkUHt0Ac80c0L/7C587lzCBbAuvVIlPq/gCtX1597z\n",
+       "4O57+czdSCf0a6WJZ+HGKMU9NWByqVYcx7Fwgi0fSJSXxSjnWsRktLLWYf3R5OKcY3Xp4+TqXtDK\n",
+       "iq44M9uUwUVW5T7GHu3CcRtJZP3BixA00uxCfe8/eM2vheMKCcpgMFB38ACU6xtkQHj6MpY9gdv4\n",
+       "1Lm7b7MKAQCFEKoruJlbrhej3//dJtynw9rr28GlNLzw65dXCOhZP49lF8U91N3J9hN1uSQoIsoY\n",
+       "IbCHf83MctBHZq6eM16fki51cRXTzaVUCbreNfCo5v1hMmJdr/KlayvqU6krCt9r0t6HD8Nvb26c\n",
+       "PmFYsrv+jKtOyeYd3Jxw+46iYt4dWaa0BAQR1X7eCi7TtQQANAgAmK1d+NiqKjr7GwuwsFKiFtnF\n",
+       "hbuAL/F5I4m0qWKuGRCoX1nIGDuC+qKuRQ1QOGcJkUqWLFmyZMmSJbujvTqy+TjNCIuMgxaQJubT\n",
+       "U86ZIV+Uhl9SWXmGZmHbN2bLncM0Uh3dLYa4z1QVlirGM0VjvxiaoArYOF5zzWFn7S/fIlcwLeu0\n",
+       "AurS1KLOChJprYQ5lLUPOvltQBree9vJw0R9MhB8B1VdBvG+VwI8diFKDhypziz92tTczfuOaN+G\n",
+       "neB6e99/jJvbTpI7D4RGok99K7tFRn9LBSghkQvSVTTLe0dSZiZblxbjRNvN/GtDD9KvLcmhSliN\n",
+       "QhtCxOyQME3DiiuQnFVOIZL7Z8gJdoxQAs4rR5qYk/Ak5PA3Ph/Qgewf+O6bIdS6I2X4tQ5/qnhP\n",
+       "gjCOI4McgILI9voK5M133/cxtL0IUhd/7Pv/jVj2V//mj5uZ2U5Jsdi5a0h+zFkp/R9VsYH+FULE\n",
+       "L+NEEUQAE7DXUGuSbKWtVEruB58T9YrKxpWUISgBfXO58u9eexTQnMdPhYjdH/DXr8Vw/VkESBwL\n",
+       "S6mHOVGe6Phs8OIURDD1HMtxXaJfN2vf6V/dC0jPw/uvSdkDMzOrel/2X24CJJGvvPM2z8L3XR4a\n",
+       "eXnpO30iLL1kEWhP4fPx6HP3+bMw/18+1/UHUgtHR/iYi3ADsn9oOOYpmj3IGDoSwdZlHc+E9uiF\n",
+       "Lx6jXZJrsG5C/VaSlaBeIQBG7kmFtXUAmjL03tcN+m466FzjXw3hx3ouwVMT0UR9djEoSpBorkkl\n",
+       "M0tIW4uc43OZAWQmv4NKKXBGj4nmn+swaW5vJcgH407D/zlO24EyNYq+IyemJExYYe5W2rElpYP8\n",
+       "nqxWoS6XFw+9jZCp6UTi43B6id+iLX5WW+H5txEvRdNQ/kDznzIDiAaKAU0V2ZkOY6ztNXhCXBVn\n",
+       "LCFSyZIlS5YsWbJkd7T0IpUsWbJkyZIlS3ZHe3VJi3NPdmhmVlLFXI+ZbFkGSHNG2DxHwMtJivZf\n",
+       "E46vaupEiO4FCJOq7B0TpM7ceSQbqrYQVaw1kSx1ZNQtEb6vAG0q6Y1uLyVdNqsAVdbi2lvHMoeC\n",
+       "KxAVOyFFZoRRxbX4wTtBD2cPonI2g4xxXfUwsAmSDZSekkog880m1G+19bLDMVxrKpzsl8EtNwqJ\n",
+       "sGfSVpK9VWF2WN5XEpVVM2wi8j6Tmw5/WnFVHqFzM4r/mDpGPd29ct5yrHCMtB/1076jBtrpqARw\n",
+       "6rfI+bJlImNqlbiarihGgwC5k3t4/Ti056EQO9uRbjEhUcPdNXM3w31489IJuLvLAKnz9rcHJwe/\n",
+       "eBkS+JbiRv7ab/6KmZl9xxe+PZZ97+d+t5mZ/R+//HdiWUF9IHFB0R2VmbrgMe4ZsJH7/aoxT0+m\n",
+       "cw2/FXIoSaF5L4MXn0l6NvPEuKXA/esVyKZ054zuMt1CKfnNtzQZd9BK6zWzQPTjy73GmOlaCcqA\n",
+       "W2aY6WKFz+qWqhokXK/O7HPp2hNaBNeRShJZc524f99dJruHwd3XTL52lOvwOZegjGyNtQNj4mrr\n",
+       "c7iBu6eWwJ4BWQ6ePnEX6MubMI5ePn8Wy25uwrjbiyr56UAdOckUQIV+uDsHoQwMXEO0C9HX4+Dj\n",
+       "//ZZqFMl7s7Lh1xj9Lf4K86igYVU/RYaAZvdy7zuua5oTBTdc3ILy5wJz5XuwePUV4lnVwxi0gCs\n",
+       "8F2uCaLz5ThR8kK8Vgxo8b6+uIBrU/r4xQ1cdWeSmzPIaJBsIxkaXpvQTeJc8JqUeRhHTe1u3C10\n",
+       "ztYS5NLw2aZ9cvkA1VgSwfOciex9XK8wrhmcZGZGr+gk6/9EVXwZFMOwLOvVb3nGEiKVLFmyZMmS\n",
+       "JUt2R3t1iNSYz3Nu4Q12MN0ugFisqqMkKme6q6eKraidA+2ahMQ5MfwdJMJJEBwiUTPFWubrU5kA\n",
+       "7ghnMBnI24K0zOlwOComCsQOUkhvkWwru1CSHndbf4Nn6Oh67W/wZUGZgtnFzMyRtvCZu/6wSzwc\n",
+       "fLeeoS56TxhyOoiKPBs+IzFjh7XbOYlxAppw7H2HXyPUt5O3exJV2epBd7rY/SrRrwCJUXMf5SRb\n",
+       "C4l9ysNvVKZhADo1CNkx7kSJiArSWcYdluy+zowrkpdn0g34PMurxZxoghz1JXOnIdejKBYf9qG+\n",
+       "98z79ZOP3jIzs335ttQJBFiverzvikhyE3s8+X1vLgJK0e8hV7GS/I9tKFMEkWja4av/MJb9mT/+\n",
+       "75iZ2f/9D/6mV+CSYe2CJmEY6QY6yp5A6sAk2wAHWalSEzxeBiD5vOMk6BekPpRE++ImkJzffOiS\n",
+       "CAVDoivsuDWlFqRQ1hu/X/chBXErof6UUdFdco/+LyT/4BhhUkUYGGQi5GHOhmmJnFMmQ4NtOD4V\n",
+       "uaBC++q+79LvPYAqtaD0RIyz0RveAhXcARnYyhx6CCTw5pmkJ+B6JnkSW4TYH184+nk4hbJD57/N\n",
+       "8jAWBfS1i3UIUCnGcP1WxiuBQA026UlKF/R5APr5/Ilf6+r10MZC8m9mZfhNmTlyRUCswNzsNDSf\n",
+       "sgaKapSLIssmBm+oiwMIm4yT8Uye2IroEB5KrUgNUH4hE7SkrJnDUq6fzasbCrHGSUBHMVHZ3VHH\n",
+       "Aev+rXe7HfbhP1T9NhnXnJSjidROC6Rr7RVomvDMutz5/Fs3yCggayKDi8pSvR7ILYq+OY0+r/lO\n",
+       "oCryJVXMZf2ronSIjFM8NDXv7MBnizD1y+IbY04JkUqWLFmyZMmSJbujpRepZMmSJUuWLFmyO9qr\n",
+       "c+2ZmUky2DGSYgV2jfC9wKjRtbR0nY2KyxPbVLHnjurhVKyW71gVIfEq8ZVGYqeqmEf9HtWbImSv\n",
+       "6rAfOqwplxC/CezIBL21uACv7gd4vqnd3UMIdFB8nPo80p4T9V5A9iyFnEoXmLaL7ibtabrbVFul\n",
+       "rgIsfLl1yLZcL92idMF2QmI8HkFshPtIXYbUKmFiTTN3jykB+kQ3Sy5aH1m4hnjbbAB5fNBKRa0s\n",
+       "JIOW+0WPyqicy4GuGLmvcPeVZxKkqrYLlXpzVUqG2m9DfS5zl202BnfDpz7+RW/rLfSmhIg5oJ+o\n",
+       "nWKm93Hp2rt/X11bYezcHMKXh5euWfX6a4GofCuJZ1vcry//0i/Gst/1nX/OzMy2evNWAbKfcnEB\n",
+       "5XRtCLE5uu3hdhE3Cr8pxY3gxGMl5+L7M9tCHbvvPgmK3Z/95BteJbhZphPcE+JGy+ACKho/y/oS\n",
+       "CRxpDcwAACAASURBVLK3rq1ELaBJ5m4HV1kvbpkeY+10EDcaya6Fkl1ZB7hslVh9Rm8uBiyIjhnH\n",
+       "QqXEetABBhnQdHPclO6q/CTIwA9OYfx9YfvAz4GfHuU+tdCP+lVJrv4eEgl/sH8hdQ/fj0Ispzf2\n",
+       "0Iq7uYZ6OlwsR0lBQd0x1aLrmZVBXLtji/l343399J1AadhcScJvBCM04haamIQ8uthEH45aROIy\n",
+       "YpJp4WtHGscss8EZHSk+O9rOXVUZxsmYhXuiAVP0I1cy2Ks+rDsb1UwiZUCek6SeKImay0guz4LN\n",
+       "mmR/Wbv2YW0lfWGScT1hfg6SyL0jUX8rLtMi1I9BN2ZmWQx8UL250J4YdGCu/ch3BqVMZMhMrM/J\n",
+       "+J18pkt7rrcF+opELwwdE6irjlZy7SVLlixZsmTJkn0k9uoQqay3TJEm7uBnu6/qw7+Kb5CZENs8\n",
+       "0FIlg0HUFBJn34YdQQcl7KKr5LtwPs3NVgB9mHIlBzL8VpAL1ECjNeOuQ3bpUb0cb+utvPEzNFw5\n",
+       "pMzxVq0krBmhyLud59Aiya4VsifJrvuV73S2UNveHsKOc2/+HcRhZ2/r3P3oDpaKxpko8ZY5wvqF\n",
+       "xL3F7reqRGrAwg6rk91ni50r89X1J9nVYKfbC9JIoKWT8PO4c1BECnjGSfqkQ/9rDqcCatwjSe8S\n",
+       "Vk9Zg15C07nHUUSIfT2JYnkRj9Mwdexcc0WOmllZPjrS+PB+IFi+K8TeL25qXFPCxYEIjjJ5mH8t\n",
+       "k918D+TsZu/nW2Ecl8hdtjXfQb54AfkDkemgrEe9knDpX/wZMzP7k3/0X49lP/nLf8vMzNpGiPWo\n",
+       "cyXq8S3uI5XoNa9hiR1kp8ReIqwzZi2DPSSHIcihpSAnR5Cdf+Px12PZW3lAlih7of3F/F+bK1Fs\n",
+       "rxCwIHvQmjn0THewDMn3uh+wqz+eVBU8jO3bk8tODOiDyZgbU9aJCL4p0ohd9bRcT/uTz13+opS5\n",
+       "ex9r3MPOUZpPYE5sMYbaJz5fKR1xr9O4/jB2hsKlFn7+5hfMzOyRyJ883oe6TBJQ0gOl6o6Oel5j\n",
+       "PSsGyrrI+nuCXImgH0esSTOHBGREBkEYn3wtSLJcfczRxGKHPisk8GbAcwHPiXHSEyOIQObEkDNQ\n",
+       "Reo5LccTg5IyGf9cuyfJp8cgnw7n0zyMJ9zPW1n/ikOoy+3Rz7HbhDY0guqQWD0JwlYjeEmDkqg+\n",
+       "MPSS/w/3+DCFNSGSzqXdgwRRtAjUmG4d6dwDTT9ufEwSpaqEWB7XAM1oUjAnJZ7hsv41mH8a6xUJ\n",
+       "5fLsIoql/cnnz0GY9YcDA5XOPGN+C0uIVLJkyZIlS5Ys2R3tFXKk5jKbjoRkv+UxOBAfFLqaywqY\n",
+       "OTdJAvejENnYYgdZO4JBF2l3klxf5BkpSFMy+7SEWjN0flb1pX99AmITNynKRxqJYIivHm0tm2Wo\n",
+       "fS1CY8wXVbRe0T14Q4WEszNMlvyJQXdQzM0mdYric5Kbq8RulTmSzMwm5p8aVlIWPsvmN/qoBw2/\n",
+       "xi6iBW/keND8Rthpqk8/ylQovwznlxvFDYuimjFPU68IC+4xOTKSL4yilqPs/sjNmmV/B3KhaCr7\n",
+       "kRnHzTxkO5dQ37rZ4G9An1a58wemU+jjB2vPl9a3YefeDYLWYtelyGlJQVZpf5FB6mDw3RflAVa7\n",
+       "sEu/boWrgfMdRSbj9dceheMk/P/i7a+YmdmP/IE/Hsv++q/+VKhH4WNizCnm57/twQOJYJ7sNDkp\n",
+       "O7lf5DKcZAzlMf+cH7daA9UQhLmCsO3zWxeJ3O0LHI/xKmKFNe7TWkQdydHsBWmuc2aVlxx+4Gso\n",
+       "R2oENe148DF2fRPQwdXJ5/0B+fxOE3PSLfOqKfpIiQuVXxhx3U7m03MIse56n5SfeD/0yeaxXB/o\n",
+       "0NchqjmNS6RrEAmTT33y02Zm9pk33opln34cGvvs+v1YxjkxziYK8y8Kcge+UAaIqd1LXstjqPtR\n",
+       "BVEBGA2af5H5T2U48RLXgvBeYUx0M+kECKLyt4I0UzC478/xZwV9js8k+R6oSi7kXCJbhYwxCpEW\n",
+       "uHcHGS8lEBlFickv62VOGhDu8p7kFWROVB2TEXVRkVDkn5Rx12DtqnG/VDonynlIXkF253Dj7XpW\n",
+       "hPG8Kl3OpwQCXghyV2ZLNG/AOjblkJVRUeNhycft0MZB+KiUblGZohaekMOtSvJg3TvKcW0S5EyW\n",
+       "LFmyZMmSJftILL1IJUuWLFmyZMmS3dFemWsvmyewk3L/PAIqVHIiox5zDT+PxG490VKBm6Rw8sYU\n",
+       "nmWeuqaQEHZ8Pwq0S9VVde0UsdIzHHdx/QkQfIyq1TBw5vCTJsQgaA3NHQAxCgGyKOla01xrS/V0\n",
+       "yiTQfagK27xEKWTfCYWnVodJqOdu7fAsFeUHQT+HE1xm8qo+wgXYHjRPFfoYt+IkrrXudE6mIpSd\n",
+       "BEau4LI0zUmFjhzUU8TgAVFq73HfhyOI9a3eMLRPHMR0AczyJOLjLP8T4PHpTE4sdQFQlX5VgZwv\n",
+       "rjCqrl9diNTFBwiXlnPQjaMhvC3kHxrxAOZQNF5X7j7MQUbd7cL9PMi9ptr2cBIFflyrXnudruEe\n",
+       "/IS4pT918XEzM/uqefj7yyPJs6I2PzHXINw+GjBB124r5PRb1k2DPcL5VuJGLSAroTnkCrR/EAL2\n",
+       "fr9HPcJxlbiiGT1Cd7qZq/d3GihzJttBhbxfJPOa+X2/uBDFZBxXHbyN6y64YF7com90Uczogj/j\n",
+       "sxJi9QnZAFp1dz0J/fnmE6EKvPuemZk9kQAQur5zEKALkWk5Ia9cIY+Or/3GV8zM7LONE8u//bVv\n",
+       "NTOzX3jy6/7bnv2keRIZZCQrH2kOICxnpbox8UHqNIx8Tghhm659VYTB19cvnNh//00Q2pWp3lNR\n",
+       "HucYlSCCdXpGPsa6muliRxK7rL/IT6pjYl0z2EQCn+C2rCFD0OzdPXsNKZL26Pdr3dDtpfoLDLby\n",
+       "dfIS64gqwPdHStJoe0gz8RLeCyrrr2RhOWKdnubpPsJ5RZX+9mU47oXkxGNWjMn8ecI8fo1o17Av\n",
+       "yhou21lgEcapzBPKqvTyAKB7TtfkAzI6nMR9eoCcjLqKh9PSlauWEKlkyZIlS5YsWbI72isV5JyF\n",
+       "2sN081UwX5zm1WIWatmQcaenp+OudyaxgI8DoJNRRCUHvK0OsvvpT5A1cL525NPlsiPKYqirlkHq\n",
+       "YCaJwLfvbFHfcwhdg52w9tIEpGHM/A16GMLb+iAoGQmduYiKsk9IcKwq2WnFXaC81TP/nmxNSAZf\n",
+       "rXz3Wea4/gyRCn97eVdn2PfJI53tdAh1otDj2AqxFoTWQUmc2HX2s85jtnYvotJfd9L2AzmT3RcR\n",
+       "rpHIlIiaUuiymImqAiWTMuYQzCRMv6D4nu60WV0NMpgouhpQoloyoxP1OsrO7HQNMriclx8VfSiA\n",
+       "EvaySyWKkytPfWCmeRBsS98tt4cA/2hY8S0kBARUsBY3/vBlz//3g9/1w2Zm9pd++q/EsgqT4Sj1\n",
+       "pNhgR0TwKHIVmH/DXscELywCfsxPKXkqJ6BJha5wEL0sJf/Zs+cBMdti/q+3siYMJJE7+tasAmG/\n",
+       "lFxvnKDKP+aatVpptAXaKrjz5b1A8l9tvU43Ny9wOOaL9NehZzCAippinZIJUABFb5752PkE5til\n",
+       "kI1PGM9rQUIzIAYt8wXKEG4ajGvJf8lccG9/5aux7OE3B0TycHJi94Rx3086Uc/s9IEi5AiyUUSK\n",
+       "DPBMZFWYO66VORH1TWU9I0yla9ztbWjHWtBMX+NZICKtdkb8GO2RmBgrKWcjzSoR5LPeOiK8Joon\n",
+       "p6vRjhz9dXHf19rdPvz22fOnsezmGMbEKCj9hkiXgmkI9ig33taSujfXkruuX6KeMXcfPBaFjH8S\n",
+       "wDPxHOUQBM0kdyYFTm9uHRHk+VpBRBncsRLZn9VFaHezwXop3PyoO6RBDOh5DUDpIPCqnijmcVWR\n",
+       "6L6nJI+P8U6EXc9ZQqSSJUuWLFmyZMnuaOlFKlmyZMmSJUuW7I72anPt/Tbm3hsFSAH3ztx9Z/Jv\n",
+       "wUZ5VyS0WODEmvOJPG3VfYnEdiFgR7kb8S1mkdC91KDKhbxOhHiMriqFh+kyknYRThWIdaIbTfwt\n",
+       "VABXBeAe7pZW/G0dlMUHQMCqBJ/BB1Jq/j9qnIgrqiwA4xbibmP7hVnew0WjCszXyGu1v/a6H2/D\n",
+       "+fa3wd/Xd0tXlJI9e5DBpZrWQ0hmUgI23ZzyW+pSidxSJJcPIJh23dLVMFMnxr2oayGRMsee5tAC\n",
+       "jFyIYu+J1y+9AjVIrh38AlvBrDc4byHuLvrl2oP7R6mP0opvgYrlheS1mpBPaxS/3BFk68fvB72f\n",
+       "hw9cnZrny4SIuwPErvpQjx6F3HW/+Y++HMu+40d+IBz/v//lWLaHAvutaBBRjZ+57o6i40a5qaMQ\n",
+       "0JnrcJIAgAq58AbRvaGXdRQXDLMHHISBnHfhIiO0ikrR8crpipOAjRF6X6WMqwkXm41d5r8r/Z4w\n",
+       "2EO9+HSVFeJS5TpFF+zTp+rGx7wWN06dhd9uRJ+ngLbPZ194PV9Hn51kTeD6lIm7rcM4XoMUPcla\n",
+       "t0JftDIn6ipc94O9u2xes8+Gesg8KeAeGaTu5UCtPtGvg6tuD3droUroGdXR5bzoV9VWipkyhBTe\n",
+       "QLJ7RkoekOtQPTcVidqhb8pMyPExUEjciFGKSQMgQhvXlauoVzUJ0+JaY/+o+xBrcYUccoM8V+od\n",
+       "COiX3oabmzBmjyfXkWqgdzfJmkyX8k4Wzx7uzmYnOmLPEbwyaUYJ1oGabdJ+PB/V200NvlLcqFx2\n",
+       "Blmnnj59hr+Pve7b0P6re5694xJk9A3ypa5E27DtkK9U3LMZXZBy/R7jSTOAHKC83u6970ZQKXrR\n",
+       "kera5buFWkKkkiVLlixZsmTJ7mj/zCib8/9zwjhJ3LJbOaNizLfkeVbn8Hceps63fiqxyhkiYVBC\n",
+       "Q0FezU4SrszcXbr7oQK5MFt5PlVFp/wBdzVFoe+xS3Kcn0yJxaF++lbN63dStj+8wF/fJe6PgfjZ\n",
+       "YRc+yVt9US3zxZE8ryhZCYL6rSAiV/eCirEqFh9jeK4TJTtIIuz3BynDLhW3pmuV4EjFcGkrCavK\n",
+       "gMX9F/AvkudHlUQAoVxDfXsgIMylNGoUA0yJlSRv6k6LJEbltbZAIspa0EQc14lK7vEQEKGbJvTn\n",
+       "rpGw3hoSGr2GdVMuQHPoEcHwtt4w7FjGU808etJR94BAXVziHg6SwxIyAYVkhr9FffcHJ6f2uNYn\n",
+       "PvlGLMuevGNmZt/3LX8olv0Pf/+vm5nZSe7xcQ/18gNlMJa5ARVB6GL+uVgU+2TWT7gZqoAfAwA0\n",
+       "24BRdiG0+1R63zRABxWtGKIkh4RVo89OElbOe1EJsz8GwMzCyiHToGMcHxnE8fCBK9tPz8I5OlFi\n",
+       "3jbh3lUSkv57hjDvPl85wshLaF6x3Trs+kdZDEmUpjSHon8GNFHV3ikhkcs5NlUg6BdyPxlsoznU\n",
+       "qEQyk5jB9xEImoXVc52S83J8zB4JWDsEOSmRDSKr5UAMGV2LKQvDIBs97XAGsfbfSQ69ikijBK9w\n",
+       "kdM1Caz4WqU7KJ3D7Bz6TGBgi6KPQLUuBl9ruT5wXoV24HyC/jW78Jv9rc/nscTzQVDHBjkzux1z\n",
+       "A4pcA66ljzO2v6p0sIc/naCEnO+6xg+YJ6fOUaLjgeeLV/XjmfdWxkQFNFFV+bnGz2QNBkoc+W9P\n",
+       "6DNVlG9lvp2zhEglS5YsWbJkyZLd0dKLVLJkyZIlS5Ys2R3t1bn2sn5Guszi3yVhWF12hD1VHZnf\n",
+       "K9xLt1CWqY4QyoozbkSQ7mbJcAHjZpoglt8J3FyUS2Ij0fBBYFlqS5F0qtcnLK5aVAMTZM4Ui5HQ\n",
+       "U8St6KrQhL/H0zWOdxdci9/S7VaXDgVP05mhMC37PwdU3rWSeBbu0F4I8EU1LcroNuoFWiW5u+/p\n",
+       "spqpZoV/1dvGXKCSyDYrzrgM4jUFAu7oxhG4F3080Qc7k32iYr64PUDUrDLvL8LSowppkYBciasM\n",
+       "rr1SoPUeboTbl+E+7Qt3xdZrENsHcc9QbV/qdIsEoioL9tbHgzvo2QcffLhKVolm0X4f6vToDbjl\n",
+       "cj9vfXON9nkfHm6Dy7gTYvFqdT+05eg34Mnbv2lmZn/oD/6pWPZTP/+3zczsvVsfO0OLOQGS92lw\n",
+       "3aHS4PYVn0EZ3UxLJWz1oxO+14AKNq2TRKYjiPRTF+ZOJar3F5sL/EDuF7TFRnEZHXG+W9EsGyeQ\n",
+       "2GWsNdDvoZ5QOADjTwYv5wBdMIXM9e0muPFyWTzvQXvsM8P9WPadD78V9fDrH7swTi6392IZAzA2\n",
+       "u4tYxinDjAWTjAkqsWuC2lUd1pGVucvwsgznazKve8+MCpKqoEP7NXuD13mZjJlusUwzMDAZttzr\n",
+       "KXrR/LwF3O2bjRD7SSyX/qyoS2hLo3tOXesV66TPM5SpAjoVuzuhj5Q4T1N6P9UgpfNRMEhN+LnJ\n",
+       "fO0uQAEYJp1XYS2qG3F3877KmCzQ1rV5kMXNbZiD+UqoMtQbuwQRvhG6Q5XPjjEzq2oGW8WiqDZf\n",
+       "SsLxE6gVpbneGt3t7cndjR3c9uMEAr4MCa6nmtmhYMaEQd146P9OgzfgKpeAGhLLewl86Y6/tUvX\n",
+       "LCFSyZIlS5YsWbJkd7ZXm2vvTL66OfoR/o4S/jqdQUn8+OV74Vw9HGW4bqaEZby4DsoY7hDqWyix\n",
+       "DaiWhJByB6W7VCegC3JiJBHOQ0lDXZbtZ5z+Xt7MM24IJYY/R6jz8eg7kg477FaQo3E6zX47jLKD\n",
+       "6ahm6zsTqmPPqJ4DSfT+Bv/0RQid32xcATqqAyuxe4ICtpAIKTcxRvRNyJHsa7mvkYAut4mK7sOg\n",
+       "Oyh+kPs0cKchKCVbF3O+nVEil3FKxHSQ3RfzE06NSEIUrJMEL2D3o7kjOchHhNO/K4hkfwio0u/7\n",
+       "+Kf9+Jdh199JPUucoxN14A8+CMrHn/vsF2LZl7/0S2ZmtpK6ry4emJnZe+8EBOn+a2/G7z7+qc+E\n",
+       "Sz73fHkl0KFjochpqMvjl44mfR4E6fWL61j2ez733WZm9rM/9T/HsssmjLcDyNubynfmHSalBgz0\n",
+       "H5rDZj7vZ8kB0D2qlH8AEnvsHLktqGyPcfd8csXohx8LyM12pbt6jCFhoLcYTy9f+rh+/PTdcI7X\n",
+       "HP1Zb8Lk3V6KUjXIvjMFdAYUYGz2slsusVt/ffXA6wnU50c++bv9FAfsviXXWQ3UuZVsByvOWZlQ\n",
+       "VG2PgToy/6q4KAuJGnnYVpkjPUS916LUb4cnZjYndtOLMM1C7edBJrPwm+i6kDkZ0WHJyTdyTIgk\n",
+       "xgrBA5J/kU2cKdWT7A+UbBbWMlJ+RYIIxjPPIpt7P0IhEPlR1z+oyItSPEP2q6LB/0Vqg+uUtot9\n",
+       "MXq/FmWYV/1JPDd8FsljkjIdAjraZrtZ1Ok4hPtJ2QOqmZuZbUiOlztVAnWcpSTkGt8vCejqTaDc\n",
+       "jeZY9QUdCN7Kxxq9BNpPHiglmQpGPP9k7kaEXwJKYi7WQaWLEiKVLFmyZMmSJUv2kVh6kUqWLFmy\n",
+       "ZMmSJbujvTLX3jhNs7e4LJKtvYyEciUCZpFsqoDvElqlh0zPR0K3qFgszqvkwInwoBJBocSbiQsu\n",
+       "kuKFAEnPyyg89TKSlwETKmEZ3wnqbJMxaanAvkzkKgTMEuc5nfy4HsRuJfvVUNQ9gQA9CJkuz8Jv\n",
+       "VfWaCYJVR4Z6G6q3tT9c41pe+T5bkmj7EQrsoqPUQatooPtCXbED74lJ4VJbrKUrRF17VMBWTjL/\n",
+       "o2U8bbYcQ5bR3elFdCOqthETKDNRp/5WE4m2JybNVG0ZfBeT9npjN3AFrT/u4+R24v0XAjb0fjpx\n",
+       "mZ4QePDe++/EsvuvgYD+9Hksm/aB5H51P7iKVM/lK78elMpz7TAQ6rPCXVEvkPD09U84Yfnd94K7\n",
+       "962Vl/3ef+H3mpnZvf/rJ73uSNZ8ewx1qoUxX5XB7TSKe4peIZX7itpXqiyPITaob2GiLo8OKNzP\n",
+       "koRV/+ZEF/BG3AO4lBKG6dohEd3M7P13QvvbgxCwL0N7Xjzze3d1P7j+7omKM9vRZmeU5UHs3orf\n",
+       "44fuBWL5pegI3XbhvirdoapDX1caAIP7qATwOqrxox7t0o09CAG/XgUXVLZ3N+7zZ89wnGhLFetZ\n",
+       "u8zMsgGaRUKfoNs6BgwJPYLrWSb6bHlJHTlVRwfZWTym5Rr6UPLUW69DnZSonMNt2Y98XuiijGAT\n",
+       "0Ruzga44fXjhmqItWDHISAZvf4Iu3+T0DRK118UO7RKXHZXVMx8TQ0E3mmg7YSzWoqJOzaZWxhMp\n",
+       "Cp2Qskl5KUVvq0Lfdbhfwo23qlpqNkViuzzlSSKfRAOqQfaGScZJDr27opJ5mvN5gswaEsRAl940\n",
+       "eyajjXqzmSlDX3uYGFy0Ik/ou1vNwLHXNWNpCZFKlixZsmTJkiW7o70yRCrPM5ulMIuIgISLkhQ+\n",
+       "+yXDmhUliZ/khCwUSIbXZpGQHj2cXAnLINZJBbhxUQLyCCRGQ/eLMwrlE3PcgTE+NUJgO0OY4xv2\n",
+       "IEy4rqNisxImmVdJGXtAuCTXFcN+R5xjPzoyxLr3gn6ReJ0pERP9L1zLeNzN3pEO5poalWyP45TE\n",
+       "GOUJIpdQJauxq9T+BxKghO1sojq5H8cQ90wlMYxojh+XVSRbLqUOOCZVHZ9IoOawylfcJsuujqH2\n",
+       "vaCEHX7TiiQC1OizE8ixrV/rNZCSd5lvq29xr3NBpHrIKmheuQK7yeONE8BXV4/MzKxsnAB972FA\n",
+       "qVrsVl8KglWDCL6WecKwc5WwYFfsb5yUfoGdfibj6Yuf/WYzM/sDn/nWWPa3/9GvmJnZpgxt7CYf\n",
+       "k0RGWtlVdkBRs5myPBTrT0IAB4pSV05KXQFZymWO9ZBboGKyIn2UDul6l5+IqsyTBqBw7MQiqzCO\n",
+       "D89cfmQ8YDddiSQBVMZzkR9pauYEDNe6J+P6HsLEv+/hZ2PZJ5qQz+3FrV8rBxRTCiLF+UEUIJQB\n",
+       "TVGEBX1AorSuSTSVK2gxnhVNHDEmL3eea25bADGV8HPKOEyFI3cROWYmiMGR1rwiAVu8BJweua7J\n",
+       "yF154cEztkL/C0wfAxUE9eE0otehV7SEyJ2SqHMGwIhiOMZdI4gUn1mKSO1PgYDfjfrcCefZYJ6W\n",
+       "EoCxwnNilDIiVp1kJThiXKt0TgfCuMKuHMbMDWpm1lm4P0poz7lOYpA3IqvAnJSjZBbogToOMv8n\n",
+       "kvdFfiEGG2igWJQJ8jG53gH1RBerrMGEm5HLmjCi3b2s/1RgVzS77bieqZeAAUXqYTmTcUQsIVLJ\n",
+       "kiVLlixZsmR3tPQilSxZsmTJkiVLdkd7hUmLRcPHbM4KhpHQq2Q/emBGmzGQF7+NCuniPyR5nOj9\n",
+       "9KFfmJllqjtkdBkJ2TFWV8lx1CASuJ86QuKqqpH8tWnC8StJMrrekvTpNRojsVpcEVSFFdcCYW7t\n",
+       "BSbeVLIhv2fS3k7dM+2SsD3QFTmDUflXieokoDo83EMXppsR+vlBIwAoDER1XIFn2dui4UGNj5nn\n",
+       "lL4l8ZTStZDLEI+BBCqawhbR3SL3P8+pWaYaN9DgkgoUUW7d29qCAKsK7AaipPV+T1qQHLOqRfv8\n",
+       "+huoSO/37jI7nujmUH0a1C3XOQR3r5DCn18H188bb30ylj19EdyxFxdBFfuttzxB7gG6ZOva3WPv\n",
+       "Pwn6SGuB9ukCf/rU67l7MxCr9zfublqBNP4D3/uDseyX3v11M3MSdylulwFaNY3OIRDrb4/uHuq7\n",
+       "ZeLTBq6CuvbzXcJVWq/d3bA/kagMwq64HcgZUNXjGFiiOkqY9+oWpwu81+Oo6C7j/4DggnXlfdcg\n",
+       "IXAHV9jl5CTyP/KFbzczs0/kfk+e74MbRxNuF/BVKtk5Mq/FZUIysCZtbeg+wk1R9yBd8RpsUWGN\n",
+       "GUcfazUWMs2ewLqsRcfq2Ib7c9J5l1FbCn0t9IwMa5KuyUyGrMkZIJRvzT1xmUKNuyjcLXbC+QoZ\n",
+       "d0wcz0wYvQQWrNHvpST+pgusFL2z9Tr0BYn4Zr60dDpO4A5blaLsfWK2gyPO4fp8Gdxjtd7DfOla\n",
+       "7jGeVRavHFGXWQYIuNF6r2cD+sJQS9Je3JOcQUQzbUecX561TELdtn7eFkFGfSvuNqyPjXBFSEuY\n",
+       "0SywnlVoF+ermdmAyJJcAiYquplVFJ9zVxJpD0w0foYWosmlq+IbvyolRCpZsmTJkiVLluyO9uoQ\n",
+       "qWmcvUHH/FKy++EOc0l1tg+9rVId+uyF/CN3OjheScQWldX1twzXlLdvgg9yFK+r+f9IypuF1ZIM\n",
+       "hwYxp5KZ2cUu7DoaQakOCJ2e56tbtiuq81Z+PhJE52qvIJaCdLnpfLdwHMIuSQReIym7E6TJX72X\n",
+       "avNDITv3bImSxV/K7pf143EqEkzEMpPd50R0QlTpiSLOiOWsk4b1UqZgWu7cs0JbHox5BU1JjCDq\n",
+       "ag6zAW0tNCSZQRGyqylA2s801x5+0gL92e99t7rCQNG8hhzrmleSytM6d0iK3sjOedWEcOqbaw+1\n",
+       "/gyUz6m6fnPtyAiJuIeTX//yKoTp769dAXxVMazbx99+H65x/56TjW+fB2LtZz73LbHsX3ojkKb/\n",
+       "GpCuQoj1zB2mO/KXQKJOmq8RshONhL9vEepfr/18G1RlvfZxv7Egz3DE/TrunZxvyARw6Dyv4NDh\n",
+       "HgpySEJvL/eJaK4inJQaULL3/hiucfPYAzXyi1Cnp12QUPix3/+vxu8+kV/gWk7OpsRDKfti9oSu\n",
+       "XRwnTeMEbEqmrFaCkmIslkRkZ2tYuNe5rD81QtEPk/cTUbV3Dl5WAJ1pNFAnkpK9Tm0fZBQ8oEcD\n",
+       "cBj+LqrTmIsagLC6CH29uuf3vwQ6OYuIx2kUzeWadMBYK0v/QYH7WYuXYAWV7VrWX6J0kyCizPGm\n",
+       "aFa5CX3CDA+h3aGNDMM/7oVsD9XxWbaFnEEBIonSUBJFPRf4vlKUnkFJMu96SFIo6I3+ySNa1CqA\n",
+       "LgAAIABJREFUKe0amP9TPDLDcuwMLdEsQZ2BCHWCUuYIkNJnVw7EMOY6LfXZTSV+Vcyn/M1SkkH7\n",
+       "joFaiuYy7+sg6/n6UvQezlhCpJIlS5YsWbJkye5o6UUqWbJkyZIlS5bsjvbKXHvTNKOaR7eTJo2N\n",
+       "ySvP/F5dJq6pNDujmc3dHZG0fi7xMTWrZvDg8rwRFRSyJYtyKWNy01ySG+eAefNqCV1Sl6URcvgG\n",
+       "bolrIey2gEK17iVcBqq74VURCBiq5dSWGmuBXUGE1ySvA9sgvlVCxcMsaSzuk6oNn9Fg4r0YM4Xq\n",
+       "+WmcHWPm5HB1urncmLYLelPqMpyoQbPU1pKhE2WrCNnnlcDogOAnVYKGnorqzhjcDYXqgoGMqWLH\n",
+       "jBPoVMiKXdEzQav3TQGuZyEywge4SjIhYPc9yJYzxV4QRgfVVgruoKLyfnr2JLjb9lBCH0Wz7Aa6\n",
+       "UKpO/02f+SYzM/vggyex7NHDQFQvxS399GX47Wbrbjm6UV+TRMqvXwZF9W+Gj3OmY1YEHZ1Lca38\n",
+       "Wv84tHXtHXubhXv2svd7N0Gp/zNvfSyWvX/zG2Zm9gXRsXp2G9rx7PgUv4tf2QR9pH7y87YgAufD\n",
+       "kmysiaSjUrickLEOTAprZrZ/GlygjbpRngdV8B/5tu8yM7N/8WOuGcXsqv3JicBNA22lSd3tcLdK\n",
+       "poIKa4yqvVOVWssYqMIIAF2nuD7PKBiMCRG6xcVr4b6uL5wU/2gdxsKToyugN3BLdbpOgYDNca1u\n",
+       "fM7hQiZWDXLyJPpcq4rBBv7TaoM5OUsuznnvB1JvqMQ6XQmLfQW/4IW4R9fo/6rw+zrgXgzm9ykD\n",
+       "obodJLMDXMD6NORzqkPgw3Hvx6+oxK46ZnBzq7YXx4QJLeIIV7VJIvk8ZmCQOg2kJcSi+DwjEX8U\n",
+       "faoRz6SDqIOzTpPo4g247mkvyuYg72se96hoL1pZhuAdJ5Qvnz8zDUrU93SUpPHoz151vG6QWWPQ\n",
+       "9ZxuTFmLV0owWlpCpJIlS5YsWbJkye5orxCRymb50rJIAF6iSkqsjYCQvMIWgBgGQWl47kHenGNY\n",
+       "K8+ir8EMIdVrRaKoEAEJZ8xI0ZRpkPaQKC2tqbDrIWGzmCFIVMeWdmGHsdvuYtltFlCFw0F232y3\n",
+       "1IlkRyVbk/fYA+HoBJkwEHWnmeotSLRCdieY0s/QPNRXAJkCqFemEsAT26Why/wxCON+dNwZK7GV\n",
+       "SrVDp8gh/g46nrir9p3WRKXe0nfJOUiORCKVsB+BMxF9jmoCoqzMIALhsMdggxnCihPNkEse14eL\n",
+       "VXJPHj16HW0REjXQjBdPnezN8OxCd1BA7Mrad8mX9wJKwJxrZmYtAgkePAqyB7mQODfXgdj8/tu/\n",
+       "Gct++Zd+yczMPvs5R0m+9A9/IdQXKISZGaOPj72o5+Nak6i9f/rTnzEzsw+Q86/Yem6+h/cD0vVc\n",
+       "2nqzCuP/Ue5teL4O13hijtzeFOEaQ+bX+jjyCTYyxz/5emj36d2AAnUCYRwxnogCm5mNQKkOMk9q\n",
+       "5gQTNI/jfxRV7KmldIDMe+yEG0E9HuK3/9b3/EBow7UTtm+BuimxuUNOwEnqXoHYXQlywjGeC5pL\n",
+       "SYJJpFs8k8ESmSDqUQqJuiWJWkLYD20g7b/5+qdj2ftDQKKOgvBWGb0DgjAeSXbmeNF1nWH9Z8Lv\n",
+       "V4pSoS8ESKB3QBHWEX3SCu5N4vcOMgmqTr5Dv6+ljJIcxQwQRp5YQZ97ymQImsSsBIoIDpAEYA7V\n",
+       "bHBkZjzAIyB58KJMi1yrgjzJJMd17VK6xc7kGI1k7BkQiOcpxppK55yoVC9rcn8Mn0+3gnQdmadV\n",
+       "rgkpoLIR5JQqHZK7dfYcMTvrzdJH0jmy+Qko7nEv95rIlaBUU8lcg7Lwi7flnCVEKlmyZMmSJUuW\n",
+       "7I6WXqSSJUuWLFmyZMnuaK9W2fyMK+4M0jizzJ17UkolXC/JP6SYHa4RLxz+zIjYS30eJ5YLOTFe\n",
+       "d6mjNL8+3I0Ct/K4Ciq2M3JwNvszKyxMCaMgYooWCV2bpQikEKnXOkUXESF+ZZEzeadokUxwX03K\n",
+       "oWafZUt3q16LatNZ7pAoNUByucklXUlMBi3uzgH3opqxHukKE3ieirV6r+NfuU+E9oXDGBOz1kzQ\n",
+       "7N/lPK4U3Sm2QWBnXlZJ9AUTnvY6dkhUFtfeOCdRbleqBN3gWnKfpsWHONYKdUFcBqL29sLdbRTt\n",
+       "Pty6jtQa5FXqYr187m6kFdwjO3G3Udk6l349Au5/8dI1qBgoMYrL4gi31CDJjT/76M1w3fffNrM5\n",
+       "wfSTF6EN1+La3lMd/NL75CUGe7t5IGXQ4hF33+WDkLT5U48+5ccdA3n9Afpp6p7F7xrMl6P5PXnZ\n",
+       "B72nQdxDRyRLHmWcPtiF3zy/Fb0n0AxaSdr7YBvczJ8FYd/M7Ef/8A+bmVmH/hxaUZimpptoZlGX\n",
+       "KBNlZyaB1eWUI7GSAJgSPthB3C3M6BB/oK5AELBPknic61otCvhruKCna0m4i4CWjbhAe7jxKuEF\n",
+       "1AWU3aHjo3Ooh3t01KwM0DTSdmVol7I3XG9KA5DgKhMXdNTemmrUTYKCSDcRyghd65msa3mxxChG\n",
+       "LKStuLa7qK0k7mO65eHjurl2bbMyD/26lmCLsaAbX4N4wvebjY9dztm9eKn6I4jtku2h4LNlPLPu\n",
+       "0outMnbMoiBzYoi0EEnaDvrCpFpxGKfbjWog4q88O5hU+1wieWrgFfJMHHoq4J8Rl5RAHb5wZLKe\n",
+       "V5hbpSThLjUa5IwlRCpZsmTJkiVLluyO9krlD879Pzuzg5jhNMs0PI5RzZTSEaZ7htjMt1nN4Rfl\n",
+       "FxTBOfMpolPT8g1V35L5tRLqbxFqeXV/WJz2w/UIdQEiJSTirANyJ7/lDi5XWYFs+SZO4l0JlCwf\n",
+       "dBdKJVqtTPijCq++SdEbxR+JYu1Ite9z5H1R+2b4Me/TdAYRU2XzM+PEd4KKki2vRUQsE4SpoiRF\n",
+       "g2vJTj/DLmTKdKdLZquEEDOIQULCGf5bmOy+CE5IqO3/x96bxNqSZddhJ/rbvfb32VZWVlbHIlUs\n",
+       "UkXRNARDEiQBNmRZsOmZPTBgGzYEeGZ6JHhoTz2ybA80EAxZJmiIhA1IggSDIin2pWIWq5hVWdn/\n",
+       "/N3r322j8+CsdfaKfy9TxisSnzbOHuR/GffeiBMnTpyIs9baa1OMOp36W/HOrZetbb1fYak4tRxR\n",
+       "PCz1qnA6AkiExAttU4HafXdvH4RtV5cegTo/9chMLejHEqe6JxYGZ+fPnHPO/eDdd8K2r3zN13+7\n",
+       "wD6cc26CFeZyZehXnt3y7RXx9v79V5xzzj1414vY88qONUL6fSEo1dde9d8/vbBV+iuoU9keWp88\n",
+       "A2IyGVunrCFA328MObmVveScc25T+XYeHt0Jn5088s7i5d6tsO0TCIFXC0Oa5lh071W2X47rXtKv\n",
+       "l7CHuFeZ2/sXXvXX+9/++s+EbS8BlWyA4M3XYlcQrCAkrRz/5qXUS4PIXJNNmLyiAlzWx0xkPksp\n",
+       "8mZq/MAdGveJ3musGCAIRolzKPYMERmlfmyN5yaAXxW+8zYbm2NY/y9Y0sh+057zn9ynmDtStQ6h\n",
+       "eFmQlhrz1ODZEfpHk2zwBQj2k0FdV6LfUv+zY2KT1MsjmqJzJxJfMkE/NrDuaNR+ANdkde4/u5Ja\n",
+       "m8sa9RdTQ1+nM3/cXua1CpYRU7mf0inm2kbm6bVv+9VS+h9JA7kk5cyRDMKxMKjJim1aV5ZzUTES\n",
+       "/wlHixeZk0P5x21LDkX9O4xJPtcHTA+/o+MUu2ukrl6NGn+btVRFQDUCOvw75xyB1VIE8KPZZ2NO\n",
+       "EZGKESNGjBgxYsS4YbxQjdQQffH/Ds0Ptg05Qz07yTWnJYHqYYhSDG0/h8cYmO8FCEmOz1X9AHzh\n",
+       "G64ei+mX8kZMgEVWOuslDdb8W3C9r/WN/NvvRrj/Cu+5ikgRdSoH1ai3Uz37IHOQdH6sMDfsO7Va\n",
+       "oKZCuX+m8Mr7dpc8p59wzqX99vs4+6mT8+dve9EPBG4aK0ddLbLC/ErrKsJoL2m2V8ROEDmu0/NC\n",
+       "ES6glLn0EzVSXHzJKohgTid1uELm+iA1F/uQbmg4JiUlnLYXy2uzrqCdwUv3fRr+XmYrSA6xLLFV\n",
+       "fVF4JIQp0s7Z+dcrW1VewmByKanzL7/q0ZdTqZ2YQwdFLcXx66Yfeu/dHzrnnHt6YvYDNKnLUxtX\n",
+       "t+/7/Z6dSf097FcRkem+11qJb2KwfzhEbbZe0vo7pNA3YhL7yqHXTb10aJqiJTRHhdgKzIFIZaI5\n",
+       "67Biz1JDbrj6XT/2+3jWWn+9CuRuXYjR6CefOuece+323bBtg3p5h1ITcIVVbyFo5jOkX781szb9\n",
+       "zJveHPQrL78atq1XXod1ee1RsrFqxM49OqFmgSNcO0XuqOXTO6Jn3bUdpsOpIDdtQ5PGbUsamioO\n",
+       "xz/OWXRTNCQd6Ot6b3+wlGtS4F5IE0P4EtoJAOFpOq0rCEREXHppnZMM5kQgbZlYB1C3qGaSNfW1\n",
+       "gtzhGMvEX69RashMA1RNUZUeCHOb2n2d4/qsxWB23fjreb0xNLWtPdJTr8QmgQaXuHdSOYfFxu8j\n",
+       "FauHLPPjQ68hx0It2q882O4II5BQtyq1C4PmVKE7zGdEnwS55jVZiSEnnyepWAhUKe0/pO0BYJVa\n",
+       "eym1VHb4wEQlfE71W98n4uScabk2MiZpY6IezdS3lZWNnQKWEUmmzMVnvypFRCpGjBgxYsSIEeOG\n",
+       "EV+kYsSIESNGjBgxbhgvjNpLkuQ5vfa2OynhPBWREe/bZROggkHCuKkKhfkHxda50iMQwg3oPv67\n",
+       "S5Teb31PxeaGhssx8Pf5mYd2K4ETKZisJQ02nUFEKbstAGMmQjewTwbuuDjHxG23swXtptBphraV\n",
+       "Qg+sWwrQxfWYWmtJCS7ZFrU/aFgTULwTgqBdUldBIxRMw1Z3csLDhaQG05xcoNYU7tHJwAEZf2ud\n",
+       "rpzUolJ1TJP2O64KsZAg3Sj8SELKROjGLCEtazBy0ntBbSK3GIfxdGJUzcv3vZD5/i0vcp6kRoVU\n",
+       "Fe4Jccfuum16hmnNqysTdneoXddJ/1+ceDH43tFh2DYB3ZaBglxLbb4anb0UIeZmDWpB7rX3fvgD\n",
+       "55xzo7FRNkf7/hz3hZaioL1QF2P0dwlx9FLT2kFMsUagc869fM/XzlNLiBr0TCYXqkX7LsV+YDJC\n",
+       "CrX8NkMqdnr4Oeecc+935uJ+NPZt/+7DT8O2v/zVrzvnnHv46aOw7ZUHnubbnxnd+PGJF+WXQpnc\n",
+       "bXxbvnbfXOG/+YUv+3NYmk3Dcj2kjxaXRjfS6iATCpR/D4TlCWuY2dgpg/2ISCpgZ9E1eo/hPglW\n",
+       "JzqH8B7ezkqppQJCOfE0p2ioXRFEzHZP0D26EFd2UlApEmtyoewaUPY6r9PFO6sVFyA9qXQP2i7J\n",
+       "HjxtdQXv0T8LWCNUqZ1ECgqwEh6fdi5tbeff4J5Z1HZPLiFsbhO7Tn1L+wWRT2Ae472+WolgGnPB\n",
+       "eCI0OsZ1JXN3R2G10P0ODGUhaf0jiMHnWk8R40hr0nGeprJDn9NMUNGkANKs+kxkLc5EEpDYllzl\n",
+       "K5BeaKJYGix70E+aWIZr3MgYbmrSk/Y9jmeVe9DOo5GHV4kEFWHqXZaraH47IiIVI0aMGDFixIhx\n",
+       "w3hhiFTXdQOxucUuo8tu61NdkXQhJV5FlKwTJWnCFO/xrVbeVrlKa0R0GGroqflYqImnKximf/Zb\n",
+       "3xuibv5/rq/8KiHNbLXS4rPDIxWHw8BuZKvaEdCfTFZEAB8GK9JdAuwaXwxC8MGqkoakYkIG9EvR\n",
+       "J8pXC0HEiEjpSpNp0uvaBJhtx0rb2k6ahOIfuYYZEJlShNVNzRpW1qIEqbOy0LNrLasvB2QrkZUO\n",
+       "0SnaL3RqYZFBsKrIJUSsKtgMK8hWTFJxaw2qiqNPbh9Z6vLLt70h5Qyi8/3c0KK9iV9C5gu7Tpsr\n",
+       "fw4Hx/a9y/MztN3QF1pnlCrsxaovE5+EDz7wCMy9O/d9ExtDX770VS+E/vbT37fzR1OuFyaY3b86\n",
+       "cc4597N/+a+Hbd//1r90zjl3/9jO9ekzv4puDs1+obvybe6AtOxNbBn46aPH/pwFEeM9uxGx63Ti\n",
+       "7QQW13r+WKUursK2ycQL+jcy/vanGONI0x9398JnR0f++8m1iPNhtbAvyQ77WH0fSNun2DaVFPIN\n",
+       "kKafeOXLYVvhfPsWa7tPaCxYQ1isCOLiFAJ0sRWoYR0wGtmxiDoXIlROsE2NBldIPOgETaKJYob7\n",
+       "uRPomrU49fs5E2AKO/4aKGI5s2117bepUJ4GuJnUhNuH3Ubn/Lku5HrVTMBRRJa2DgJ+8xet1lDD\n",
+       "fmpxpGzJMMgxmCjBCWW+tnk663x/Nop0Z/689NnRoQWb3sZuS1sJeZ5RID1AxELpRvSJnBfrnp6c\n",
+       "Gkq5v+/vp7Hc6w1MYmuZu/mMy+Wxz2SoQoyQ+VrA8eqc1dYj+q2WNLTVUPbDhbFm+y0SWhIJIknz\n",
+       "S3l2BNBJUf/0eduLbfudISFE5FSRQ9QulGyX8dRfz83AdJZJSdamLIuIVIwYMWLEiBEjxp9KxBep\n",
+       "GDFixIgRI0aMG8YLFpurF8QOLyJ+LJAdnW1VHLZbgO73p0J1ije7IJgcNMhvy3Uv9KdSB3D8SPeb\n",
+       "7Gi767a3sU4QPDkuL0WcDF+WUmDHMtQ8EsVmBc8YwTFJKdaN0QMOFN3A2Z1UHh3Txf8pw7Y81Wvi\n",
+       "/9UaVvS0Uih4BMfsvBN4FnW1rjd2/OXSb2s6a2cfoFr0tfQl6c5EagJmBb2tpJ961jUUcSI9sAZC\n",
+       "eVLA8j14hSTB7V4dlnFcoUApgG9a5WxxXTP1QAJVKgLIEoLF4yOjgA4PPS01gwD9cGy01+Xc02fT\n",
+       "pfXr8S1PN6ln06SGE7aI3VPsT0fhCCLvfGLO0l+876lFWmVdnJ2Hz779rW/5Nt0yeq5eeJrjSNzR\n",
+       "Xz7w5/CvvvWbYdsRkgE++vC9sO3gyIuxr6XtLeiew32/j01tLZ6TPpTbiy7zV1Kvb7n0570Semy1\n",
+       "9H/rvXuNum5aYpKUGo+1t2di/9Gev04v3zW673LuReFffNX8tja4TvsTo7H2Zv58RpltOzv3v711\n",
+       "ZM7miUObxUenBd0zgXh4fmbnOp749l1dSE3Emd+WaP0xeFsJs+cyUG+N1PrjeO4a5UVI7aA2nPpD\n",
+       "QVJQiYs6JRCzPRPbpxPQk1firQRKmXUtnXNuuuf307fWT8sV607ivGSe4hTXLqU2HotIyoWtKe0Q\n",
+       "WoyO5ZqAwaQErdMW6kNibtgIPX/d+b5QsXlH93Jx5+5BC2pNxrZnXT2ZT0A3aUk4iuKTjnOIfB+n\n",
+       "uLw2yvrs1FP7B/s2roJXney3gRg+6W0+2YAOX6/Fx4teTULpkvrkZ53QmEGOod6CucpB+D3Sczp3\n",
+       "orKEjN1Qd1fm6SCz4VgQGQfn6UyfE+j/XNze+UKhvnQUu6fyMsAxniRK5+2SIWkbYsSIESNGjBgx\n",
+       "Ytwo/sw4m+8KE5sPNvp/Br/FG+wO74TBT1m7iW+/amFARERRMhxDgA4Ttss7aBreVsUBONT1E/uF\n",
+       "56wTEkE15ld+pXM9kXpVQIK6QQP8P2NxjKWgstb0eziaD1NYuYrjm7viFRAHysqgYJqoiJMLCAUH\n",
+       "IkKI8iqnjtF+m7qYE8XpN7qcIEpmLQ8tCqif1LXDakZNjEdAP9pKUp2xmluL261ds200McUqTR1z\n",
+       "6eKrAnResn6Q6kt3ZEU/+bk48KPNk6n10x7qZI2nHiXaiItzh/NZnpvY9fiOT7VPz61fK9gObB4/\n",
+       "C9suTh+jnYawZTjv4/uGsDz6yKMpb7z1Feecc/fvW625Mzia37lnLt6LK4+OiA7TJVjhr5fW9vLQ\n",
+       "72cjK+c5XNYXUtWdSQlHB15QPZ/L97Fanu6JsH5BZ3fbxzlW5I0KhuH2vRbB7OWV3/eBIIKPn6Ke\n",
+       "HlDVvcquDVf9x8eGvhVI4V6LJcMT9onMSRR+P92chW20gigl/XoxB0qhOl0Ie2ugmZUgiBucdzkS\n",
+       "F3MKimU+yZg8slb7EdzjYh1CtCmRpJQOCPgaqFYjlhhEBLSuJ1f1mSAYKzZJxh+tUHKxvyg3/u/x\n",
+       "xM5nuvF9V8MBXCs7EAmXrzsCUr3YH7Q76up1TInXuTtlnb7BJO+/h+HUyvzDY9WaMMMkFhmTecHn\n",
+       "lCAdBdE/OxSLDHTd9rzHhBmd/ynyVwTlGe77ydisRm7dIvpmx8pWOO9a51NYp6xsPPM5ocgRbQLc\n",
+       "889QZ8+4TITdHJOKXPEqDhA5jlN5FFmfqaDd35/sm3Tgzr/9rOdY7/v6uS1uqErHOZbyPA1jQu+J\n",
+       "drj35yMiUjFixIgRI0aMGDeM+CIVI0aMGDFixIhxw3ih1J5SIYQCBx5DyfN/mD+MCpCDy/jAlRzf\n",
+       "21FcOICASuOAYlKIkRq6TGHXIIpWKJbFJUUct6vwI0V0AU62qFHkdL0wKHIByFih6M55uL0TdWKK\n",
+       "4zetwviEp1Wwx76li7u6iKNN6s+Cfi+E2ivB6aiPCx11KylkS68Qk5A614798a/Vgfo5GFnd2ekV\n",
+       "leVKrW3TwfSgqeT8WdS37WScBCH59vqBBTcLEZ0mOfpf90HxvAhm02KNthuNREfrpFQfF/S7WLaU\n",
+       "oPbopiu1YN1i6Sm9eyMT8VIc/MpLL4Vtpyhke7ZnouTV0tMilycmSj7Y8xTR04efhG0FKK053LOv\n",
+       "5/b9I1BqzdKoxTFoMZNkO3dy5mnEo6lRUPSFaXNxyofYtxG3Z/YOxfOnpyZ2X8M/6u4DE9GencEB\n",
+       "PLNrPYNg/+rcaMGnJ34/B/tGd9AL5+JC3KZBR96FY/pc/LEurvyx9sdGBRagABpJiphh/LfqrLzw\n",
+       "v23EA+sLX37TN10SOi5FyM1gUkoO+m4lRab3Jn4sqGM+i7amOtUweaQyb6EE99pKxcuYH5UWrXE8\n",
+       "/quedRQd91L4O/CSwrc/XvuxOF/ZNSFlQt8r58TTTm7JvPT7G8Hja6FeUCEBRdzOoUDPxAMvC/O+\n",
+       "VnuAi7jM5ymLoMvxg0ccK2Doswbq9Uzo2YSjWAs/k1LqtkXZbafng39VbI5nB2UZvYjDWYFAx1Xb\n",
+       "+TH79NPHYRuLZXcb8fvCPdkLCZaiL9SrjcJzpeX4PGWR4yHVBcpY9OUh2UolOAk9wLZfOwZ90rPa\n",
+       "hSTvDDwPzSfNOfM7bOTZ0fR059c24XqKLsGee0pV2i/+30ZEpGLEiBEjRowYMW4YLwyRSvvnUKVu\n",
+       "h8As2B8oqkJnXa3/xJ3a/pOtP2z1w5XBIA0eO9EX7dAktQTYtV/8q2iKivHsGMMGD5zQsW1+aSuI\n",
+       "yQirn3R7VZV1AmskXOmLsJG1iQZ1tVhriCsT2UXCN3PbRnuEboB0+VBn8zEEuJrqHUAnLQmYMp3Y\n",
+       "Vh8bLsV61uuylRHTlDOnqwWuNGSVDPWmuo1nSEVOG0WzKMAXUf7aN5DCwmZjvdJxfyIYpbC8kZUu\n",
+       "b6Jgl+BccEUeIAdYYLXOhJ20lkhH+H4raeXnvi0P7pjYOwVytbwy5IRC4dmeITdPPv3YOefc9MDQ\n",
+       "LAq1jw5uWTvR5LNnvg7e0V37jOhUKnYVX/myrzX3+DvfDdv2Dj1ic/XQ6s8tYDUwGRt2lbdDpMM5\n",
+       "56Yzj5g8O/WIVK0raIzTp+dPwrb7t31fLKWuGftdHbiLsUfHFBCmncbezFCq+bVHjogIb9bWtnO0\n",
+       "qbpj13AEi4PTT83CocK9sFiJTQDqOn7uDbNJmO37cz0T1I0p22utdcfPAF1OR4b00Qk7lzkhhcdB\n",
+       "mkq1AYiys0pSuJHGnzaGyK1xvp0gAkQCa9gkZHJf82uKXNOVfirI3ROgmM+e2bUjcq1C/Q5i9LVc\n",
+       "T05ZnKenUsNxhfv6orbahMwG0vkvQDyCUrX5NhIf7GG01h3d03n/y36JJqsTOl23swFzsC1Ybhuy\n",
+       "HvLsAuqUynweBP3YRyt2DZwTc0H/e3z/6tL65GTkx+eg1h0ta9RrAU1RhGsNS5JWhNpMGght08oO\n",
+       "/KzbRkkHZuehnu12ApZeOzoc9DJPVOH5RHG6oE/hfHaghPrugL5T5ojXX1FHWqYME9okQWpHREQq\n",
+       "RowYMWLEiBHjhvEjIVJJkrzvnLt0/nWt7vv+m0mSHDvn/oFz7nXn3PvOuZ/v+/78j91JjBgxYsSI\n",
+       "ESPG/0fjR6X2eufcv9X3/als+wXn3D/p+/6/T5Lkv8b//8LWD9NkIHrbEoI7gzY7UeLxr6GGm8Li\n",
+       "bXHYwBfoOfuodEC/0XdEID6amO8oUKyur8GJVRx4g6Ntv+McuU3gdNISyyuDM5cTeLw0Ispfo51S\n",
+       "tDWtUFxU4Gnafgytuvxv29Cf294lQ7pvex+kL7XIJOkJLQZcwqtm4FQOam/SCGSMvzc4r07OlUkE\n",
+       "tfRTXtGdXKkN/28vAtCMXjUibE0a3yb1YElB7dkYEiEsqRKphpxVvP4qfsT5iwCX9N1Y/H4SUAvr\n",
+       "1gS4LIxMpiBrDLIvW/ydG7R/eYYit9dCz6zgNyWi8A705WYlBXdLT9HsHRp9dz73Pkd7KJp869Bo\n",
+       "rym2rVdGI37vt37LOefcV3/up8K2R7/52zg/8fuBQPnBqy+HbR99z9OBeSUeaLjf5ktPSyjFRAh+\n",
+       "uTRqkU7k9+4a3Xn11J9DK+N5DB+p46nRQh98/JFzzrmjO0Z3Hhx6Qf0nn3oBPv3PnHOuQfLA0S3r\n",
+       "r8ePfVHn9craVMF7qha6ocO4m0lx2xUE6MPirhjjQm1VY3+dFkg8qSrxcQM9k4pgN4NjvtJIvI1y\n",
+       "mX/yylOErfp44X4WpjDc25xiNAHI3KmFzIAoeFoZBXhy5vvp5PFJ2LYHik6a6Tag8tdCt5MqSgI9\n",
+       "JsJ+OPbXM2vw+hLrdC1aDAooF20BfZ4ycSAvgo+cFhcfJr5ovVrOcb0aVGGbJhu00KVkA4MwfCby\n",
+       "gUApqi8dHc0xNnIVZ1Mc3Sm1hn9Fl7JY+P6kd5r/qf/NRKjaJhSSt3mC414d7YNQG/25H/X0AAAg\n",
+       "AElEQVSaSFYMi2CX8uzsSbfL85d9MSxa77dtlKqn27jQbSsMxhIFpXvZB8eJJnb1HX0B3VYMEsBU\n",
+       "jhE+x/4GVZA/m7z7k6D2nhcD/Q3n3N/D33/POfc3/wSOESNGjBgxYsSI8Wcu/iQQqX+aJEnrnPsf\n",
+       "+77/n5xz9/q+Zx7mY+fcvV0/9C/UIvrSPSIoDh0IxvCWmMgbOdGBwYsmYofmO7yRdoOaRxD9Dd48\n",
+       "qcRTF2vf0j5R8RlWdfpy22+nzvbPNVCF4MFFW1JOL8+RVj8WlAyWCJuR/ZZmzHmh0BHTelUATbE9\n",
+       "ji+C0eCAPoCktr/HlUmzw6Yi1eNDjFhK6vQUH68EJSlQO69myq195Gq4HvdSL6lK+Duta+X/7dRF\n",
+       "nKctq+9wTWprZ7CMYAa32B+ElaMMIjqa5922YDJJJJUd6EzrDLmYYSXYSO3Ey41fMR6m3j27Pbfv\n",
+       "v3X/J5xzzl3NjRl//BDiXTmt+dz/ZiMoJcdTWyv653/05KmlSb/5xTfQXj8VnJw9DZ+lbh+nIucP\n",
+       "BOH844/t+FgJZ+JYvYLw+pP3Pgzb1p1fTU5yQWkgaJ7u+fP/RGrz5ejDrDIRPS/XRlb/G/RnImON\n",
+       "QnW9m2dAp06eGUpysOfRpmXtUS1FGl9+xaNp13ND5Gq48l9embCXLv9jqbXH3Swu7NpVQDrWgkgs\n",
+       "rv2+R5UJytdAaXk6/Y6KDd0OuwK1CSGa3+hKn+hLYceaAk26OLM+4T0wApq62Sha5rcN6qABRZzM\n",
+       "TGz+zre/75xz7uSJ7bed+HYWgkjWtCTJBQl2rJOJenWS7NJgzi6l1t9k6o+7FPQ5/LaROqUphNq9\n",
+       "IveE7uVrTEZy25YslrAkfY3mJeK2HtA0tz13ppK8VOYznJfc0A3rxPl+GjIyRMmFucH9WY1s/DFR\n",
+       "aHlhwv5q5M/1tLY5ZjQicrRdAWP4fCQ9gXtdHisZLXEE/Ql2BgNdO/tCWIqGLImcPy0ZtMZjTouD\n",
+       "Bm0T5qLbZpPcDrdzPsd21fXVdjJRTFHXbhe0JfGjvkj9XN/3nyZJcsc590+SJPmeftj3fZ8MKxTG\n",
+       "iBEjRowYMWL8/yZ+pBepvu8/xb9PkyT5JefcN51zj5Mkud/3/aMkSR44557s+u3FwyvHZUC1V4Yq\n",
+       "4DFixIgRI0aMGC8ynn1w4Z59AJPjPy1EKkmSiXMu6/v+KkmSqXPurzrn/lvn3D9yzv3Hzrn/Dv/+\n",
+       "H7t+f/TSLHhtOCeFfAdOqNu/I1Ta/muKCNp+d+yD0N3AdwKeSSpiDvCs+l0B2lR4MPnjXdGHqnhu\n",
+       "er5Qr8Coso8FYNmskYKKfN/sxXcEfFghLuYm9hZvKXAFLdx5lXYktDyA7MMpKN8KcawUg+0n/da5\n",
+       "pvh1pvAo8OBKPKhWJWkMii5FsA0KUIx93cZtU5AUPvYDcy96hdk55vQgqSUBIKVQEYcXwSqvk8Lu\n",
+       "BeFjgedZmDkXWjCls7nQCITD80xg9I2H2Rdox/3+dvhsBJFxuxC1Kw57LT5SLAYsbFeAomvZSK+y\n",
+       "2cwKE1/Ao+nuK68455w7mNnxnzzy4uxCxskY4+p7v/PbYds3/9Jfcs459we/9ZvWALiIn58YjfjG\n",
+       "57/q25FKIWVA+tcoEDxfmjh2NvF0Xyo8JsWmg0Ku6JNSEiAW2N9s3woeHxx4inAjY+wa1NoYHkhP\n",
+       "ICZ3zrm7D7wqoZeb8gIFipUeuMSxKqFRX/2cp0znl5KwjGtxeW7bCnhErYVuSSAeTygsz+36ty1d\n",
+       "9MUzC99rpZ9CEXZRkdc8j6WNiRJ04HhqtFCNttQN/ZTsFFKMcZ0SDlAsNzkyz7C3v/O2c865xysr\n",
+       "2rwY+/lsJEW7+xJtKfQaY9yD9lutbK5pWiR2SLWBkFAkU3JHXyK5JsWI1I59r8T4y0s1vMM/kEes\n",
+       "nRy/oQBeJyBKBtRvDu2Q+ZfsVS5+Xy0mnMHzjA1sSfHZteHX9LrmUMOPCkusYLKHnmso9C2U3WrJ\n",
+       "ShnyvY4JMNJ2No3Cek1AyOhLqFUk2ud+abRcMkhy2n52tpg7ckn8CMfbKdXBv516kZHak7neWiy/\n",
+       "haRGE4rw8Z3Xxu7Oa/7+TJPEvfNrD7cPjvhREKl7zrlfQoNz59zf7/v+HydJ8jvOuf8tSZL/xMH+\n",
+       "4Ec4RowYMWLEiBEjxp/ZuPGLVN/37znnvr5j+6lz7q/863cwfKsP6MeO+jZJv736/GN3uv3r7W8R\n",
+       "/doh4tQtHZCObuBqipWeIn187VcwjXXVdH/dEHXT2nihLXqCFKfObUWUQeTcDZy92Rhxu+1Yr0jf\n",
+       "voHwhEZJGnQQlufyfbZJzosojTjh1ljVBNW7M4QvlR7NsGIpClu5lBQUA65RcSYFtc1aUCr0maZ6\n",
+       "01ag3yGeV9SNgsZOheJhJYhrMhBH0kVdVka8ZAqTcZUo38t2IHLMdFbrjBoOzRuIZ+9NrYbefO6R\n",
+       "jgMIsZ1z7tnomXPOucW1iUivLvyqP+nFgZqgpyz1eP7nJ+ZW8uWXvuicc+7y1O9XUYWQhn9hNfx6\n",
+       "ODAr0vXr/+yfOuecu3XHkK4Sq//5yn6b5rQzsWtSw0V5A1uDXATTl5fezuFQ3LmZGv1MziFntYPa\n",
+       "xvMenM21dl4FMfj13MbuLaBUrLm3f2jWCHRPr2RlvMEx1Nl/Xfv9zaa2bQ6kay12HryeqaAJoa5Z\n",
+       "IvW/OqZ/+/Neyr3WIaEgE5QqwT5WKxPA7+15lCgT9KPvsR/R5LKdI3GqHgElo8i+kPz/EhYHi6X1\n",
+       "6/0RjiFzx8W5b8uzx89sG75XzawB1T6Q28qu3XTPHyOtMNfJnBju694E8yXad6H2H0Qk5LfMT5Gc\n",
+       "BFcACS4VYeZ4wny6kXm6bshIqLAZtgqCPqdAszRRJQdKnYpNTcMkJ3WKR5sLQtia7MPEKkEk+Vei\n",
+       "julAkxq1zsE8OZxjt60bVkioUIF10JqzNqGM174G0yHnyv3q8zwkBQn8xYQyBfjo1N6rJQTGZ4MJ\n",
+       "Wq2LBlYcYR/btf44d+RauzAjcrvtrK9tz7IdUJhEdDaPESNGjBgxYsS4YcQXqRgxYsSIESNGjBvG\n",
+       "Cyta3Lsh7UWXhAHE2W7DfvTv0IKCz1NmfhshQIVMKaje9pOg2F1Fbz2EhYocko7shdsLJRMV7n2u\n",
+       "QLL/Ec8LjrlC8RCKHNaTpI+JwJg1/ImEHtiQbhQvDtanVPbSXGm3PU5CJw/OFRCr7CQIBuX4TfB7\n",
+       "EQo0eCslz28awOK5I6XnP8z0+zjsZm2wa9Ztw9M06u1ExEyaM/hjOedSUnUDqhK/xTn0iQjGKdgU\n",
+       "LgB1V11RyjZAxpn4I4Wi1ap/B6VVCwVQw7OlWvgv3nvZXLTrZ16U3FV2XiXokVLEsezj9dIoYJ6P\n",
+       "QtJ9oIzs+HRAhobbffLhB+Gzu0ee5tLzv4KwvRJ6dgN67lgcwB9+4IXq1cQomIbeNpdG943LYdHo\n",
+       "XkTEvK9nMxOMr9ekQoQeRSHfXjyDSkcPJKHFSxxD7pNTFCZmkd/J1DyreDMsxPesxT27Xl2GbVMU\n",
+       "QV4IjZiBqqU43jnnlmFOsGm3xm+ykQmFq4rH9f1ajY3a3WCCUI+f5dxTWrleE1zXZmG0IMW79IJy\n",
+       "zrkp/m5r+x5pTrqtj2fWJw08rihwd865Atcwmdg5vP2eH0cXkpSS5f7v4trOf7YG3bJn93gPkfle\n",
+       "yeOKsB7nkDk7hwyU6npi89QJ6OhWvO1KFK1VWjYrcG1lPi8hnua0Wwy8oDDWBtqObRopVENoVICN\n",
+       "eV/pPsxTSouRZqxBd3YDF28fqWQApBm9oCSxCGNMn52sfNHJfE76brnQAsWUT9hvMwrKeyZqaVLS\n",
+       "9nOaD55WtBJBvL7D8HFgIr7jWZiC5suyHdhP2J3SraRWbRt/K+oBV5SkT7UB/p9ENC3dZ2uKIiIV\n",
+       "I0aMGDFixIhx03hhiJR/ed8WcDWDFHofg7Te4BhebG3bVX9vlytwQCEk5XRXkmS/QwBOEaOmlVKn\n",
+       "OHB25X4GIrV+eAT5KKBqmodqMsKwpVnT2dUuXThsp2//w/06ZyhKRtGdgmVYrfUiGGUf6konCAFb\n",
+       "RQ707Pg9pN9r/aOUqJOgSejIbEe/smP1GnY1Vp9LQd+Ifslo7nA+KpTf0Z1Bbx/E64W6Dvu/c3GR\n",
+       "z4lIibVvWCWKAL5C5yZapwsDr5YV5qYmSuT/fyTJAZN9L/y+eGZWbCOIjMciwC6BBCxEbExRZq65\n",
+       "6zn7SWuyYTwhYaAQpPHJs0fOOefefOONsO3dH/h6eSpApmD57e98J2x79WVvp3B8ZGjG4tqjOGtB\n",
+       "iWiF0UIUOxcheAW3b0WfriGszwo7frPxiMyeOItXQEmePDb0q5r4izeuzIF7PvdtOjrwfa2WLHRn\n",
+       "v742Yfsax8ok1bpEDUMdavsHRzj+J2Hb4bFH7OZiXZESCZW7ZwN06M69B84551pJIrjqidzZOJm3\n",
+       "i61tCZCOrrZjXUPsvr4SRAb3TiWu9Keo5zgG0laNDWk6O/Nj8ZWXHoRt4bgjO/7Jqb9OxWjbHzAV\n",
+       "hL1HIklbiVM93O7rmsJ62wfrDhZSQ28FofBsbNf1GmjevLWxNp971OXwWIT6EOBnTlzB0RWsdpHp\n",
+       "xIZkg0TOgU/RQV3T8JyyTQWdzQU5yXGwVuZz1kIkcrzRpCCMl1ST+WnrIvN08FoYtAnIlTrFB6sH\n",
+       "eQISuRbopgjWLpjDB89J/6/O9WEuHCA52zY16a7nZHC4GcBUaLv/31zmMGZKJQPkEN8ThqEBOnq9\n",
+       "MIuVIyTXqP2FXTNBAj9bax4RqRgxYsSIESNGjJvGC0OkfCjSg39lG1eirdb84S+FkCXAoeV6wsux\n",
+       "fs/yGvEdNRyj9mqbC91Vw2+AfvHvdlsjM+BWn3tt1c8C5zzYL9/C5c0Yr8taQ6tlAwYpsTkPIm0G\n",
+       "R0xKvxxAeNvfD9XXk61tsqhxNWpx1aJHYqp92umrPFAS1UiRt8a2UpCG0cj/vZrLCoppxVprjItO\n",
+       "lUhhNaH119jFkjkuCAQQJLkjign0SCPh2cGpK6qW5jT1sx8XLAqYiyFmWPVr+rv/m6jCyTOrdbdX\n",
+       "ekRmLDqjS5i06j1RICV9um/Hujjx31uuzOjx9h2PktSNVHpHPasGKfSF1iZEH37yidXLu3vvJbTz\n",
+       "Udh2uOcRi1Fp7ZzAfuDRJ4amvfbq55xzzj1V3Ro0L6ydpytD6vFOTs0kkxYat+6+GrZR87EWnc/Z\n",
+       "R6idJ2ezxop0IX3CVPsc4rfrhSFYBQbD6toMNDvUcxsJ+tEB/Rvt27YF+vXw+Dhse/rEX9vxyAbg\n",
+       "eN+jPhuxhJhO/SqZ2sDVxlClBNqgQmrNVSOvocpkBd02/vh9qmaO/rwnI2l70MjYMQrU4huX/nuK\n",
+       "YLDu3mhk12mJeezJD/4obGuSDfYhSA9Q2kysBlizs2jt3iGaOl/4e6KoZKwD9cpau9bUpuZiyJoD\n",
+       "EWlE38ZpbHFtKNU+EFudpqj1JAiTyjzNsp+9IChJvm21wDp5qeqW8EwYqU0J0S9BpFhXjjUMVefT\n",
+       "Y7LrW0WptrWcfD4oqtPgHlPkqoH5Zi/XmHqoPNXXA7IJufwfmofD1oNvA82Xa1LAzkPrtHJs9YLm\n",
+       "Z8X2s9Pq5G0/J2k+m8k++KytxTrk4syjz/qIrwG2lqJ5Dc9A6U/VVe6KiEjFiBEjRowYMWLcMOKL\n",
+       "VIwYMWLEiBEjxg3jxdkf9P1z7rDbKaQtIPtcxHmk3hRG5NvgTvuDHa6nu9Teu/7a5ZROOlBhRwp6\n",
+       "u53fF/ruOUoxUXFoRzhT4Hmcg/SS0XzKLaIzWkmr3zDVt5I02XAw0INCBVDQ2arbPBFgNfHmcQdW\n",
+       "D3R7FwqwprOufY3u5erAm/Ye7p2BbiiE4iCNclUYPLtBqrvSnW4DuFng2ZwuvnI+OVLRa6m/RR1r\n",
+       "TypQRLcpqM+8VBqPPLLUi0P/aLmuCr9Jpf9JB6glAQW1y47CZnNnvv3AU3Fnz4xuovO3dn9GEXmv\n",
+       "Y9If/0DE3msIbzOhtGk/cI2af6u10V45VLaaAPIR7BFef+11awAE2IeHZlPw/e96mucV+d6jR154\n",
+       "fXzbxOuLuae7qjHGQWfO4mtQcM1KUvORfl1dmf1ABSqkFpuMNSD96dRorDUcm5drE+XPQK1doYZe\n",
+       "o7XZYENASs4550oMmD4xB3haFyzXkkIPEfP5pQnVD/b3cV52jB4U0N5M6TaME1zkO7esTza4167O\n",
+       "RUR+Acd0oR9anH/bW9/1jadvG7vEbgMKspM6cUe3fY3BNcZEcynieA4FoT2Yiv8Pf+UXw7bRXd8/\n",
+       "lVB2CZI3dOwGZbfOHZBIrNe+n5pMrgn+HOXS/yktQYTaY03AQQ07f4zlue1vg6SNzNnY4XV3qLGn\n",
+       "UgAKqjvNLKJeWuwnOuxvJKL4IqVQXuxPeloiDHxn/Ge0BNJTgLQh1eoIuOxtoxYGFKDbjxeLbRqP\n",
+       "SvpcMo9YDaOV+55SBmO95JlMqYJaLeD7uXYearyWkqhTw2JIXfFZNSEZlg/x/wV9pzVUaSekyWMZ\n",
+       "KEW1S5igUoM6odOpP5M6qS3mYq0K4Fyk9mLEiBEjRowYMf5U4sWJzdNkIPDju+cu26tO0p8plBsU\n",
+       "304p9pPdp9vviHzBbkMa6nZ65WdVl9YYvBETTBsU2+vc88EFI9OFhymnNHqT7wdVvKJKWBGpJQNW\n",
+       "2q0K28MLtKAU2HeLlUEqb+HBCDGR1QoF+CJO5Opo0HcBahMRI1b2WpPJrAa2Vz9wAXDjsQmWmX5e\n",
+       "lYbSbK627R/YZ72sPrKKNfzUEoImlSpKHFpiqKlhyrxnsQRIsE0F+KwrV01NRJqWOP9CV5o1jm/9\n",
+       "Xi9hejilhYEhCKdMIZeuPofRoKb/clVVjMQ6oPbISS73TjX2f2/EJmGB1P4kWIhoxgbRAts0Qyq8\n",
+       "iuJfe+AF6M+eWl21L3zB1/DTel19TuNAE/vm1QTH9d87EmPKDz70CJYKuzeoMUi0wjnn9g48EtYs\n",
+       "DKUKSRmy+j4/98L3Oy+9HLalsDE4O/NjTE34WlyLtdRwu/Wyryeoaf2sTXcpgm0Kf0eVjYnFle/3\n",
+       "RCwpVjD2zKVOZYbVOW+T+caulwPCkWZ2XpOJX/Uv5tZOIkaZoN70RqxFbL8GAjkeG+pFlJT1D9va\n",
+       "vj/Z99enE6j54HWPYH3v/Xft8AAiio0Ii+l9uO1bHBJhnJM5niixGsiiLUcjTTbyf2dy7aqESJMg\n",
+       "ImjzamW/vV75Y+yrmSnQvBHQx0TRKkyiKzn/AshFLpM32ZEiU6uJcnDOzpmQXdmMJHTZjgSkMO/I\n",
+       "HIptRBeds1p3jTwT2s222JzIViZIPJ+nyUCojqQAID2K6oRrJywBr7HWlawkkcjOh6eo7BTarjgP\n",
+       "a/eFon9qtYD2CtLPX+ZSJ3N8y88TWS7IFcaWolk5UecBCPXZmFNEpGLEiBEjRowYMW4Y8UUqRowY\n",
+       "MWLEiBHjhvFCa+0NiTzAcwLZ7SxvQyfsRug+Ooar7wOdwtWBtWWtPQi7h1bog5btaq3+duC23VJE\n",
+       "ueP4KoB3w9+qZxXd2wdtIgU4gFF3uH1TvL7Dnz0RyJIsRw6uqBF4ugQ8vcOcd3iVWMJIKCP+rTRK\n",
+       "1kHsKdQOu0eFjcHuCx8mgvuPJl5QOh4bZbEqPH2yqQV3Jd1WWpuykmJHO1bb7qBb6ZUVYGGBuAM9\n",
+       "q9QylaVyDuwLQaeTioJh+V7CRAXxlgLNU9aAvYUKodh3sTTKiHXQOnWHZlKCHL8io9DLsdDbh4cm\n",
+       "1KXwnjX/3GBMrrFJaITMw96V1JA7haD6tdfeDNuuQbNNJ7OwbTb1fkerzdKOAYq2AVW3d2x15fZu\n",
+       "3fbHFHPsPdZBE8dqnngt/dpijaj1H9mPy6WorXtPVV2dwONp39pLD7RCKIkW41rrWhYj/5tM6sot\n",
+       "4azdi2dWj/at5XuTA3++19dG3yXO/51Vvr2jyd3wGWstqtNyB5GxeuZ0dAeX+oM97ndNNrheLLFf\n",
+       "63f6oa1qUGZCI0/2/LkmQlnlENE7pTtxT2alzp28x2VGCbsWr7yec6f/fynX5jrcQ5frs7Ct6vzx\n",
+       "WzmvEeaCQtoODXEQszvn3NU1KCuh4OkLlSCxJRMRO0XmhVwACq+1dmr4vvzNeVwdwAMdKG3nvV1j\n",
+       "PtXzCpU9ZLLpMBY1UamBZXmj1Ttwb7et0eIp6XaZJxxlKwNPQ4rNeSzFYLapQNKhKkFI0XdJsv1b\n",
+       "fcZw3wMKFNcnSA9kTgrUnuyXPoqDyh6siaou6iF3SsTz/fDf5/ezKyIiFSNGjBgxYsSIccN4YYhU\n",
+       "mqZDZ+9grP3Zb359QK623yoH6ActCZo/Pm0x2VFAZ9im7WPZNjl8cCBXZ3O8VTtd4beD7+t7bM+0\n",
+       "Vi2XhP/RdFWK3IdYHrY1svrAv3Qdd84cgCn6dYLgpFi5JFJfibtTRILo06BPHEXE9j2uJvT4TEVN\n",
+       "pP5R0vtVbIq1W6KV0SEK1GrtRQlbBb10GMWKCNL2QIWqKc5jYH/AlQiWvcmO1Uo7GEJcrYk7c0gd\n",
+       "N0SgRJu11F2SePSjleNfL/zK+irxq+rLlVkdpBCn92sRcfZsrqE6RYHkAbFpmBQQBXdqHQERsYiH\n",
+       "uYqkAL+QMTGuPHI1qgSlQf/kIuJvln6/H330Udh2fGsfxxKrBQjL50uxM4AVwQj9NBfB9Ouv+Xp9\n",
+       "V9d2rmdz//2FuI2PM2+ncLGRtHacK8XR/mT9NTk8NmH1D3/wQ/9R7Y87duZEnkEAnmSGtMwgbD84\n",
+       "MgTnI7i3FyIAZ0241craztV8ObL+XCHZIE1ElI1/RxNYgsggWq/gbC331ZOHj4Y/dIa+N4JSpHCx\n",
+       "XnUqLC5xjjJPEYkFcj25JX3CeVWQ4yuI149eMuTs8ArIldw8tKTQuYNu5600nmgLHeP1Btzgvltk\n",
+       "4vaebJ9DNfXH2F/ZPXGFZ4HOZ0xo2TTiwE3UeYMkEkEriHRXgmC1GONaL5Gu4GrT0hBNUZiq3bYp\n",
+       "6HB8oqkbsTVgcowbuIMPmRbnzEU9EZSsBxLVZ/qc8OOoEYQ7h2VFOrAn8v8wGUPrr3JMJImgxDss\n",
+       "IdKMNQTlWuNvTUpp2GaxRODn2XOsjj/HdGsb3wU0sYeIVa99QjujZvtdRK0buuhsHiNGjBgxYsSI\n",
+       "8acT8UUqRowYMWLEiBHjhvHifKQSNxC2BmBvyJnhqwrFYdug4DB/qtTStlM6aZ6wRaFACtZ30H07\n",
+       "KcCd3lL6vWTwj28zYUkevtv6/uBcWYxSBcA7DLeCs4h4WxEWTlqBoAFtbwDdFoLEtrBHH9Rsxv6U\n",
+       "nuzhaZULZMojtAKPbgAZ17VSe/g3Uch0KOhW2D0UvhRofTyBA7v4LREyV7E9hZ0DUa4jPLwtQA9f\n",
+       "EyqKLua6D8Ls6lNGprKR8ycsnqQKgYPubRTu9v3zyYUvzPvKwYPw2UG6h/MzaH8MH5e8ML+tPPef\n",
+       "5+LB1bYQO3dGbbFY8WJxFbat6dUFh+e2E9dtwv6dCaFTXP/J2Kito31P/ZyeWCHjEeirorDjL0C3\n",
+       "jSt1O/aduzj31Npkz2g00h2VFEM+PrzlnHPu8syKIV9BRN4PxK5+TMznWqDY040roRZXF34/r731\n",
+       "FeeccwdHt8NnOejuu/fvWZtAez19ZmLn/T1PwdUr87FaQTyulNUIVOliJX5PIex7t2/5MTBfwPdM\n",
+       "fNTy1PfnxcIow+mBP6/FpV0neuU5oTvXDYvxalYEky3Uldu3ZYb9VlO7hlNcu+nxrbDtOx+955xz\n",
+       "bil089GBHxONuMhTNqA0yiYYScl8/hyL0otkoEdR7XUn3la4h8pKcAEMhb2xSBWW/vOVqNfzwvt8\n",
+       "tUJ3tqDWMkzUw+oM/l+VEXTJdgJQKMwr80mHOTbV7zUsUKxzrP97jftVi/xaQWObvNm+AT3V8Vmj\n",
+       "InZQkNLBCT3LBmIRPguV7qJ/IaUl1il8Pqh1IyUouq3EuEsyoZv5zNIi8Eu2d9t3yvZlry4trqcm\n",
+       "EzGJp5DvcYypiLxtmQAhzzMWSFbx+o53AI2ISMWIESNGjBgxYtwwXhgi1fb9YFUfXL77bURKhWgB\n",
+       "Hhjk6YcPt77mBrXj/nghe7JLxMbVmraz33775kp4l3h+4HZOEWNYhenxuX/Ztm1iG1AttUnogzuu\n",
+       "LOVSCtvFfqHjCgtp0LU4bKdEsGxI0BU819Rg1jxyEt12Q5tmW8TXAJ1KZDUZUJqaFhIqjgQiJaJH\n",
+       "1ssrREQbCi+qi++aNaxEWEt0TlKC02BJgF3IaTHDfiOoml1/RaTgol3bftcQiI/UuoL7lWvScIUH\n",
+       "AehGEKE5hNUzcb2m/cHBnlkYjOEG3jSGtNQ4f3F/cO3C76/IzW27L4lI+P/fP7B6eWcnZziWoU9j\n",
+       "1KtqxEX58sr/fe+eoWn7e17QnVWGJvVQytciFC9x4EuME7pqO+fcBOn0z04NQatG/vjVANWi2NXG\n",
+       "7uXco0Pj3FzRX3nNt++9H/5R2DabetTl/v2X0F7r69VTb4lweGhia1qYaF23yzP/vdXS2lnivEcz\n",
+       "u06X5/68c0Ep12skVJTWzjPUzmswXqd7Jo6/RI3Bg0NDhJZA2LTW5RrC4strE2UTYU5lUKS4Zy8u\n",
+       "DU175cj3BVH6TC1B8NNLSQr59Xe/5Zxz7ry2RAmUv3TT3IT1HeCctdQk7IBSNjJPs3Ykk2205luB\n",
+       "Z0EtqEoNdKaUpJQRHm2ziV2n5RJC9Wt9TkCUr6nutM5gwsqgOgTuV7FfCck20qZgE1CLABzH0qIX\n",
+       "NWw/ekHE6PK9gnWFoiGh1Kk+J2gXIIlVRBiHT7xtUXZ47skxOAcVghxR5M35tJcKGD3Oq1cLAcyk\n",
+       "ihJWIySqyPOE8yjdxJ2z+6Nv5B5Ph0kxGhugrqVYsoxGo63vhdtD3zFoU6QO9HgmbSShYyEI8K6I\n",
+       "iFSMGDFixIgRI8YNI75IxYgRI0aMGDFi3DBeGLX3vHSLaGMmVEjb03diWxyswlI6m/aCmZqzuAh7\n",
+       "6e0UCh9v+y5l2bbAbCjOC5xh2Mb9qe+Gusza/oa/HMj70F4VlhMeVRfxpASMKj+nbIwAACAASURB\n",
+       "VJAxmc+Bj0a77fdEzxaeT73Ryw8qSJxoCYGqEyyhVXWdpbO7OpvndKDulO7yFIQYULtqRB8pwqkq\n",
+       "oiQ9qVAw2jw00tralqHgbmsMVBCxNmuliofjKZc+XAffFRl/HYWlKqL3bdrs2X6XY9AiMnbpfdVu\n",
+       "bFuOIrTGCip0ve2Ovn/saSZNCqCPjF5/FjLerI3aoX+WClAVDnfOub6xY92/9/Lgd74tEBuLA/jR\n",
+       "oacDHz36OGwjLP7n/vzPhG3nF74tqyujkeZLTzcegFLsSrmH8OfxsVFrZ8+8V1UjfcjTVrov6HPV\n",
+       "gwx/X14atXj3rR9zzjk3QTHmVCi7Db2ABv40BdptNF4HCmbvwOi2DS725akdq8NgXIgr+/7UC9m7\n",
+       "HYkqd+74/WmR2dmev651o8WtT5xzzj14xahVFpDWAsm8/+dXNiZmcFYfj6zfry/89Zkde1pSacQp\n",
+       "uvO751a0+v2lTzLoR3YO09ZTm/XaKJEM1ycR+pbcc6rFzSFkZq+r2DxDZkch36ZjfCZsTo7+LMf2\n",
+       "2+mB//typeJt/mV9XEMgXuDeSOSerMEBKrUbBpvQc6SbXC/yCVJvqkqABGAjbvdB+ZFsP//yvEBr\n",
+       "5ZmAZ00vO2ayUauVLbBNC7OHosEyxlskt5Tin1aOSrRl+/s5qFitIjFDIe9qrHOSP7HRSJ/J/nzk\n",
+       "0eHGU9+3Su3xGdTsmOt4nZTOY+JZN6iUwjlZfQkpit/2iizFvmssCRe7IiJSMWLEiBEjRowYN4wX\n",
+       "V2uv7bRcTkB6WkFyglPsQIA9tBBwTrRjosAzB1b5Xj9Ekwa1gUINOW0U3n5lJyYAV3fubUhkV00+\n",
+       "07jxD0nXTLbbFIR4svwK9fnkTZtohpZLIjqWy4o0OMpyVdHqKgxIizosMw241NX/tmCfmwYgXHjD\n",
+       "1zRVOCvLapqrTYoYtYbaEgK/VvKPrQ6XJiqED+1YRAllNVuvuEqTZqbDeoaars7VzEBEGlJo7Xsb\n",
+       "FLZbzeV6jrBKlvGUYXwmrV1QXrMRbAKO980degIn8FSVsBgf2ic97RFknBIl7eVk6ZA9FZuEzpa/\n",
+       "zjlZNfsjO+ecOzowsXON4xalrVbXQKzu3n81bHv8yCNHTx5/Grbdvu2dys8ey7VjO8P1tPFCZ2E9\n",
+       "V46Tr/7Y18O26zXrL9q4ov3CvZfvhG0lEJFUHIuPjr3dQVZCxC5j/TyFi7igdEUJBPFSXOxnHtWp\n",
+       "Rtav8xMvvC5LXSWjz2TsXkJYro76U+zv5MLXMJxMLAFgfe2tENLO+unolkeOJvt2/ORTqpLDJnd5\n",
+       "6dvUi7N93fj25WtJgMD9WcGKomQtPedcBvfq6xO7rsvNAqclyDVQnEbcqUPdQ0F4+b1B+j0TOjih\n",
+       "CNLBemm5jNMC92kp91qRo9Zg0clvccxc7A9KtlPd032bMwjKtTpCjnZ28qzh3K02MeFWVOQYCTiK\n",
+       "iDcr1F9U2xX2HUTfea5CaP93Kg/PlhC/JuqwUoPa76Q75k7Op3I+IySUKBNR4L4gEKcWNiR98lL7\n",
+       "H5/lBuuUSGwRXXdAhwY2Nei8Vp+nGFv5c6ySc86VWYl9ybluE0eBJcoK22812hbl83t5ouf/2a9K\n",
+       "EZGKESNGjBgxYsS4YcQXqRgxYsSIESNGjBvGi3M2d8lQMBvcvrcdXgcFctNt3wvSLAMBO+FYPUhP\n",
+       "v40dXlBkzHaYk6t3Bdukzq72220XVaXqWlOFo2nqmUXYWyBjCjHzbbolSVRY6P/txMeoIgQu0HaA\n",
+       "NElPiWRzAz+TXMT5DkWDlW41sbV9LZy30KiEp9XHiLRMI8LKNeiYBJ9pQd3l2v+dijqdDrupFvls\n",
+       "6eOliQL+30boyx4CZT2f4FsVRJTqoox+En8eY/TEd+XKt2U52i7GmfVKLYCC0KLN6O8EyQNVavRM\n",
+       "iWLBeW+C3Q1otFzAaMLOtYho6U6tQvFwnwi0P4PPEe+JRMTZLLzbbuyavPTK6/4zEXbSAfhIROE1\n",
+       "oPd3v/9u2HZ5ucR+7Ri8/kskIhxIQd8WClT1PWowZq+W5phNsbEWDa9APZLOcs65HBTAKy+/Hrbt\n",
+       "HXnaLAdlt1jYfq+W/rdVZtTm1ZX31jo6MrrNZf6aLa/tWNOZ75/LKxOlcyra1HY9R1OKeEVYCzqs\n",
+       "LEqcn4zXULTd7qHx1FOBWvB5Dpfz62s5PvyWJjPx9kKf1Uu7x8agCOfn/lynB+YjVqKywKN3fyNs\n",
+       "y8nj9DL/cXJtdZ7GR4P5nHO8k22Yp/shneOccwXvT5n/+PDQ+ZxzRiIJSEFYndq5Ntyf0JJkcmsW\n",
+       "vpVnCIu7t+pYzudDP3gC4UNxQMf8rBUgOD1kO5KXrL80AYjeVvb9PBSDl++RqtP5DM7zg0dcSlG+\n",
+       "UfXTib/+MhU4FFRwJWjRQrygmKCU5ZLYwMoOOyqLDAKyBD1Hzucbnbu0gLJzAy5ug2dnKYlSlFQ0\n",
+       "UvCZYnNNBGuDQF+T3PjcE0lB/tmYU0SkYsSIESNGjBgxbhgvDJHq+mSAyDDVX5EOIkEDE3O+kWfb\n",
+       "wlpdETCdvu0VOXpObD7Q8G4LwCm2Gwrh+C0Vhbut7/F/ugF0w4+AqjldLfl2UujnT6cdfOaciLJF\n",
+       "bMeFQC3HoqCyqGTlUgydbdeNrWoTpO7qqj5Ltx3b+5b7EMEoUR1B7tjvmaTQ0hV7LV0yn3skot1A\n",
+       "YCgriAYp9GmhfQjBpoiouzV9DRQ5xKZOxfMUj6oAFihNBYGhKBZTCFFXKxGx8xiyBGmABK1t8R9q\n",
+       "QaaCXDYV+lgEmEf7PrX81bH/98GhiaM7pI53khtMi4nZgTlhs4ZiIUvINb6XySqNyFVZ2DXhMOJ4\n",
+       "ykRYX448EpPvW/25NUSspaAP1ys/jpYPTYD8ysteeF5VJlT+9h++7Zxz7vDAxjhHG8eJitiZaj5A\n",
+       "3+A8fnDbUvKfPPS2C4ulIWcjCMvv3LofthHtPJR0fg5t9muRGTJUlP78NTV/Aqf0UhCka6AU1chS\n",
+       "pB998gm22bkugaIlkqaesZ5ia2Oirv1AWq58e8vK9kvn9enUrv9m5cX2dWvnf3HuLRF6WVWv5n6/\n",
+       "ubjN55hb9PhZBiQUyQnZyI6V0PleUv3nQNgU/aRDfaO13uj2LW0KlQXkGjcUdNP+pVZUA7Umexsn\n",
+       "RB8yTf9Xj5Xw4+05nvroRpiADGgyE0taQZVazImpoNTUqav9TeZ8+1Ybmbs4d7aKSA1rjTpnz6UJ\n",
+       "0vpzQXrCM0uraITmiWAcSQ4bsZoYwWpGH7IZnMdHExunk5L1PCV5oCDqR0setaQB+yDP36ShdYvU\n",
+       "VcX4aKR6g0OVBa1eEZ478ixgv/eBEZE+xLuAAqJ5T5se2UfP66oVOGC10Ctyv83EJF1EpGLEiBEj\n",
+       "RowYMf5UIr5IxYgRI0aMGDFi3DBeoI9UHwTGzpnbtnqRMNTHR4Wyti3Z+ozwae+2hXqEJZUeo7Op\n",
+       "ejaxeKZSQdxvKxRYE2BBETvjGFpcklReS7pHPqNz7EREf0nqofpaCgmzy9RHhN4/ev4JhOKlUHtk\n",
+       "LTpAzMuFmmz5E9fCzh36LpN+oqPyrFC4He6wSu3hJ6WIEovCw7jjymDkzdo7MC+vPD3QbAT2JY0r\n",
+       "vi+kgxUeZveTHsRW/496i9CzZlC0Em3D/04mQlnS9kdoZAqrV9fizpzQW0gEo3RFF01stY8xKc6+\n",
+       "y8TTPUeHb/r9N3asxdI7TNcLc8eeTjxVtl7a8Q/2PT10dnIatrX0kZHxPyKlI9eYruwU9tZr80da\n",
+       "rvw1ORTfqaNjT+1cnp2EbU8eeRrr9ORx2PZdePC89oUfD9u++KUvOuec+0e/8oth2099/RvOOed6\n",
+       "0GepHItO+OqYX4389xZLE2zfPvZ06MOP3w/bZjNPh1WV7s+Lscd7IpTHfb/GuVa5fuZ/2wm1PoYY\n",
+       "XinrezNPfX30/vfDtr2p38/p6bOwrQHdfbRvQnUKagtxdGebVvBR64TGGoOyrMWyfzLyY2JzIZ49\n",
+       "oHkv5PikOxPx0bq69ILyBw/Mv4xu6PdfYvHiWj7z7awK69fLS9+vq5VRiwv83Qq11KzhWJ4o7Yax\n",
+       "KPdYA1qOY1jnH4ekAJVl5LgntVIFP+9l+meiysDvyW0n/gRvM0gWapnrC0pGhNqjpKRXF+2WB1Zv\n",
+       "KT477KeUanRaSB3HrUDPJYOi0f57Ta8JOBCMS4HgPKUXkjST4v2RjScWZna5PgtY8FxobhZwhn28\n",
+       "7tcSxVRGARpNijtzXFepUpWYk4U647Mtlcl7qdUo3JCKTiDHWIm3GBNaErf9TFAB+grPGxXFF0ie\n",
+       "0CQPpWN3RUSkYsSIESNGjBgxbhgv0P5g+AZr4rVtYXEyeKveYR2wY39cHSSiQGMtviBUE4EhUY+R\n",
+       "IBI5LFibxo5lzte6gkQKp6BpJp4XlCLUzsNbuPR+iZpEebm9+p7b4tu1WB0msiIpKtokDDwJfCur\n",
+       "bfE80/rTwlYcNYTdvdSGYq3DfFDrjuegYsdtG98SK8eR1HWi2FyNuuvOp1Yvln5Vu9HlGrouL0VY\n",
+       "iFVtKfa4G6xgFU0L6cyagBAM6GUbmjfe839MDmT1h1XyeG8vbLt8ChG/WEes5v6atGI/UMA6QJX1\n",
+       "LWwSchGFvzrzlgEZVmSLjSENvCUquU4UnqrVxtPHvu5ZIcLyuoZQX0ShXNkWY/vtGlYURBWL3JCG\n",
+       "DRCRS2eoxnvv/JFzzrmTZ4ZIffTRQ/+9hdVwy3E+73xgAvSf/Tf/onPOuS998SfDtg8e+n2/9upX\n",
+       "fHvkZq+Qhz6qbAW9gIv5wcyuyekjf/zjY7MpSEKyh12T27e9aH65sj5mKvp43yNY19ci7Meqermw\n",
+       "LIISiNTt25YUsAHSdSj1uD555uvPjUfW1+OZb1+W2vkUQDbm0ndVwbR/oHSC9HabbWf9pfO/PT85\n",
+       "C9syIMGHt62fJmN//KuFIUd37nrhvd53B3seYasmftytL+1al3d9EsGHH38Sti1Qu/B6Lo7pQKQ2\n",
+       "0td0L8+VdcDqv0uk2kHPihKsdqDXBEk5gmp0mAvTROduCOUHNdkgFB9LZQHMidkO2x2quHtBOnqK\n",
+       "mLWGaUJxsiDnHe5ZqdTQ4bdqp0ObBk1KoRNAeHaktl9e9lwhIfTrwIk8Rw3TQhOVUCljKlUJMN+s\n",
+       "ekHYgabVYlNTovLCCPODojXMWGnVbR33riav8FrXglJmZBikn1Zrf/zF0s7n7Noj97Q92qxtrPFB\n",
+       "MRDAA6VSoT6d9dWFocJzqhrbHFsWrDJike1gyjQiIhUjRowYMWLEiHHDeKGGnIrWkMtUrpIIQyIp\n",
+       "6Ynlf8pv8X1Ff8BpDszfSJgTmRKzNqYuj6Radah1JzTrKuiKZEWCFUYvKBENvDpZ6WWhnh4N54yr\n",
+       "HUNTUU5E54Bq6uuBHqgZnINzVs07K+TtH0hcqmZyKXUzeDPXOnxAOAZcPc80UVSH2jNbQbDfE1lp\n",
+       "TSZY1Uo6+QS6lVyQkwqrlPXKr+qfnth+F1jhZmJ02cISopDVZzmDRmupOguajsr1J0onx0+Q6ju5\n",
+       "jZXJvhgdVv4cmo0da1NhXLntlY6unPegIcnEfLLAOD6emkZmL/coQQndymJjqEK+8td9XJhuh2Nt\n",
+       "PjfjyA6rynojyCnaMh6bwWWw7pBhworptFhYSh+usCJdiD7hYunP4YcPDaX57rteyzVfmiEkD/Hy\n",
+       "wvrk6h//qnPOub/21/5K2PaDH37gv/eqP69nT+3834DFweXCth1N94cn45xLgb5sNnbtDlEfsBRE\n",
+       "lPX89o4NTTrY9/3TApE7Ee3XhmaVAnRWE1p42Hmt1/5anD0xjdgMppaljP8N9ZWKnGBFnG8Mdiaa\n",
+       "WcNqY724DJ/xttfVdw1ErBor+kXd3LYOsm1FcwdrC1UtjWGtsKLObqMWMr6Pi5Eil/74iytDlbql\n",
+       "/1uvSdDwVNYnRFN0jqceJUGreqlrugE61Ij2Mu05n1Wyja6aar4L5EbQHP6tfo/m7DBkEJwzCwdl\n",
+       "P4hcZTKf1sF+Yds4We00+CxQ2oVIZJLxWHYoF0wlbRObpxrZYHsjWq4R0FEBuF3N+nMyAFjjVscY\n",
+       "rQDYTDU/roG613KjNLCkye2SONf5MTEZ6xzj5xFF/a6BSJ2f23Pv7KoetE1ZqiLFs1OQ/gWsY2iq\n",
+       "65xZ4ehzt8L8TxTWOedG0ClnMsay9LNflSIiFSNGjBgxYsSIccOIL1IxYsSIESNGjBg3jBdK7Q0C\n",
+       "qKCmpiafre+yn9LtVL5P4aGKMk0MDTGniPOYpj+eSJegAa3Ui6Kjdrc2LLQEFN804tgb6h8pteT/\n",
+       "ZcqpsAOunCKFdSSU1QjizJXUdQMqqW/AKbhH0V+bi7qI5DrUmCpYw0/E9kzxzgRGtzpNQvehe1Kp\n",
+       "4UcB/qCuHvp2KnW9KDYflYYt7009tVJ3EKdKXblV72HfRCwcClA1SSqQOZHasUDREFYrjRJSh4Vu\n",
+       "HYEWHB/7zw6ORAgMenR5pZA5KUOLHND23sRqko1BfZSlCZBb4OdPTow+6pkmjvT7XETMd+EsrnWm\n",
+       "lqBA59cmTm5B7fSC9x8eHuP7kqkAWma0tv3R7T4DBp8Ljdg1vu0fPTIq6A9QO280M8fyN/6ctzi4\n",
+       "uDK68cNP3nfOOffJufXU4ycQKBe/GrZ97Se+7JsGgWc1tv1uMMqXS6OMboGyyoTaZ/r7bKI15HAo\n",
+       "4TFS0KxHh1ITEDB/j2udy/W6vPB2EnSJds65EhIAdYxOIRk4vGUWAh2udSLC8hJU4fza+mkN0exU\n",
+       "XNGZFdFi/Ks7+br210KdoEdwHk+UWkJ9xMMD689nT/w2WkP48/dtun3H3N5LOPAHcbKI/euVP/57\n",
+       "H74Ttq2uUW3g2gTArN3YKrVWBv8VO1fMhbnaUjtQxOiHRKi4NRNLxNeAt3Mi8zlr1w2sS/CTQuwf\n",
+       "6IqdlpqAQldufEeF6KDKVNhOek7PNWlZqcHOivtTuQVF21rLjVKNoDWX5xqp/VYkGJSP1FKpgp9W\n",
+       "Qk/xgaO15grWP5XahaQI1X6AyV20mKFzu3POZTh+IzRii7bUcv5JENbL/IvLvpGO4l40eYsVN7qO\n",
+       "VjsitwD13IqzPqUXs8LGP+teLiTZooFUo2vsnqwWfr7RGp9Z9tmYU0SkYsSIESNGjBgxbhgvEJHq\n",
+       "naJS3Y6q1sHgTFYEthJQQ0jsQw20+FMVpVPX11NgKIjUyL+F5iIO5gKvEURqibpPSStiRyIyWn0b\n",
+       "qtBU01Sxwluv/ZtzroJ1pKmqwM2hJls+ln6as/6YrPRyih21T/CmLSuNDMJurkJGIk6lwDPUrbNT\n",
+       "GNTQS8vtWoeUqjZS1b7pPHKgwvIRTEfHUrurgNj2LiqTX6xMWHvpPsW5yvGxgi5ECNlBqFiqdUVF\n",
+       "EbsgnLigjRicZhP/+eTQ991IUK0Ey6lQ3d45l+PzVsT+Jf6c7ttKfzrxSNtIUIKs8Oddyzi5deyR\n",
+       "gOO9W9ivfX+DFW6WSro+6sWxbpxzYpgo438DRGK+tP6sgWbMUmtnQvE+xv1s//Xw2d0HHi363vvf\n",
+       "tnOdepTsDmrpOefcrVu+7V/66p8P2/7u//x3nXPOXZ9qmrzv49/7zrth21tffsu3A8jZj3/FDDwf\n",
+       "P/vIn7+guh3HvaC/RGfa2q7JnZe8oD+XuoIUNGul+XLPo1jtCihFLqgK0FkaUzrn3HzljzE9MBH/\n",
+       "Civio9fesm2XMEeVVXULOHklK+Iyx/kInF5gzBwd+ntI7RcyJpaI/QbH53RqiFwGePr9j56EbRSj\n",
+       "N1KTcg/mpNOJ7Y9jYgK4oBcLh/On/rxOzq1NNPVcX8tcg7ltb89QWp6P1j9dpdiP3s8UVDsm8dj4\n",
+       "p/tj24txak+rC2UY/L+5JIqQndApuWYmkYq9iUhxPtWkqFD3T55TZFPUEaBmXUF5ToSfiJkkkHW1\n",
+       "86FlB617VBzPR6KyNcGeR4T9TDyppmr06ce4ItzBzFqmc9akc/I8STu0pQYiJIjcCH2hdU057y9W\n",
+       "dq1rJK00lVgtEAFPFf3xn2fSx82aCLyfu/pGLCyQjJWW0tdMPBDq4PaxT6yYTm1MnuI+vb6WOpFI\n",
+       "slBzbjWW3hURkYoRI0aMGDFixLhhxBepGDFixIgRI0aMG8YLo/aStAnup86JV4eKs4O3lPwO/zMQ\n",
+       "APbd1jZCr4lAhhSlc1NZ2ulTRJxXUq8HNdFacTZfziliFhgT+2mFRmx7wsPiQYLdFMAMm0HNKQ8t\n",
+       "ZiKEpGfWWOrvbSBAbzcK9/Y4h+1tFMI7Z7Xjsp4u7nZ0MlCrC/XxAmUm/hzsMxV7sz7efG2CvSz1\n",
+       "MP5mpu64vgGF1JqrQFUQbr0lHj9n649wrkJjQcSaVQqZo07hnjirT+G3tBRRIuuajdXcBO0Ye/qg\n",
+       "F3d4ihdzETFXgKpruf5jiGjl0rm9fQ9ZT0ZGQfTA9s/mRrctGw9ZNx0dxoWeybeTGJpA99l5reDf\n",
+       "kopg9wJ0S9sZLbJBgsRqYTX5JqB2DkFFvvzmV8NnV4mn7M6FMji46+Hxd7//YdhWTrx4+7XPvRy2\n",
+       "vfPB+/5Y1+bj8o0f99TXD77znbDt44+98P4bL33OOWc12pxzbm/qx9B6bj5SHJPLldTLAi14JM7m\n",
+       "7J/J2Ghkh2ub79u2NSkiXLtKXJ9/7Me/7pxz7urc+uvy0l+7qzM7r8me//HVk0dhG6naTtz2i6nv\n",
+       "//tyP83RP9cLEaDDF4yXWIW1yQb+PDM7h8meF9SOJ3ZDV3Ci/vxbRkF+/AHaJ/Pu4aH/nOPPOeeS\n",
+       "jokvSA45MHH+2//yN5xzzj1+ZI71I+e/17bizo56blOpyTfKfZuYdOKcc+c1+s5JAgboMyoq1LOt\n",
+       "JO2mPkLZNmWXIismFxPADL5MuXqL4fM825ZlBBdxlZHwd5LEFMpjyvF7CvXVRpvPpEFVDiQlDWqC\n",
+       "Qr4QHO5lTmpYG07qZYKqbqTWaMP6q2qQlSCxYiD2p3+gtB3Cb33G1UhUYdNHIsQucV33pIZqj6SE\n",
+       "pTNvuQYdVYsofA35Sie+hDXGXyc4TwYPxOtrv7/M2bia4l6YVnZPkKrUmpD0+bonc/Irxw+cc859\n",
+       "+uxh2Ha6usD3RVKyo8avRkSkYsSIESNGjBgxbhgvDJHa2x+7K6lWHl7q1UWb6jwt9R2cTVXsx1o7\n",
+       "stKg2Lzb8VuKTgV9oHVAVuo2pFxqCTuuakv1GuCH9mfK1Ye8yYaUXH6vtvYyvbIQURurtGvqJZGz\n",
+       "VgrWccWSSp0+O76gNNhfSdsDreGEQ+S1nUQHJK6Q1GAu5tLBdcKKSGoznZ96EemtQxFKQzSaZbZK\n",
+       "4NJyRLuAkTihj/1KZ5UYSsFrl4tjdFUSYZN6SRDWFhNDGJYLiBgrO35C8TqQyFzUibw8tTjLN1jV\n",
+       "doWdKxEucXVwsz3f9kLGJH+7L+cf6uNx9anoK+wM6rX1IdPaKVx2zrkcKf6KXK1QoLEUxWRVesSm\n",
+       "baV23NijPscPPueccy6d3Q6fnXzs0Ze1rCDvwfbg48cfhG3/4n/5Peecc//rL/5y2HZ14VGkjdQO\n",
+       "PL30q8lXP/flsO0Rxskadg6KqtQr38dPRWz94JY/fqLuxJnfpoLQCdzjNxtDenKca1qYszwTGdjO\n",
+       "Bw8MVfv2b3r05f333gvbFkDHFpeyX6xcj24Z+vPFH/No1t27ZokQKgmMbIxVrV9FL9e2cqfKlfdu\n",
+       "KqvqFsiAuvPT4iGRdfGKqJbcp6+9cc8f69qSQurGjydNAOk4Z6C9SWN9/fEjf92/8tobtu3K7+MP\n",
+       "z98O26aV7+tSYNoK98JYkjdWEA+vGkGzAJ13SGIZIE24P1uxMGHtSnW6CUlGA5QI85mMkx5oTiY1\n",
+       "2bgjHitRACfkP+1gP+RgoT6i3s/cnyBSRJP0e7SOSGFXo8wJheJrQYkpsm/l/t8wJ0NtBWgnoc+z\n",
+       "3o+tZmMnuZz7sdNIlYODAz8+8nJ7rLFentpUjMi6lDZOVyFRQGwaIMbvhZ3hvJdqpyDJpAmVPQSl\n",
+       "TPivHWsKd/4iM/SpgsVLNqi1isSO18wm4cNTz4ScXlulgmqbxBhERKRixIgRI0aMGDFuGPFFKkaM\n",
+       "GDFixIgR44bxwqi98bh0fW/vcXO44qo4nF5ISuM1PZ1V1TF8W4DuQuHF7WMTxVXKiseiINE557KC\n",
+       "/iRK90EcKD1XAu7sBUZdr4eCUW0f3dvHQvGwkHBeDHBsHEs8TiAe78WfI6MruRQtprdOKn5PJSDz\n",
+       "0Rius4KFp4Dgu95otLRDkWGh0XJ0XiYUWIvr2IiI9erUi0fv3bkXttHTaAJ3an9yLODsz6ESIfoY\n",
+       "VEzdmbB3BMF6Is7GxP6nM3GRHnlouRQXb4r808zoow28lVjcNhMaLwF9shYfmxzUo3pbFfAvUbE5\n",
+       "P82G1TX9uQoEzWKh69b3+0qowDVuz24lPjpMrBgU4/b/dq3C/f576gpew4379ZdN0P8SRN4UmXfi\n",
+       "BH52+sw559zRLXO9XkBkvRLKroXr8MlTEyBz3P+tf//nw7b/6r/8T51zzv2d/+bvhG2PTj1VxILX\n",
+       "aa9eRNiXFuOu4Xcl1/oaxU3VsTiHP9ieiMcTFtIWATodqCnUfe+d74bPvvVtL4rPE6PnPnrC4s42\n",
+       "Tg4grN6IB9vZs//bOefcT//cT4Ztn3/jK/4UJCnAisGqKzTuZ2ybTo12WJSeAtNizLwX1+Jin+M+\n",
+       "KoXGDuLpQ7v/wjwlEoQa12B25BNAuoXdf7cPfVsqccC/uPZeVUd7Jvbv4emV5+rL549Rit9PhXm8\n",
+       "sErBboIitF2yxrHCR66pPQXatiLj4Jws3lp0D0+c8n2oKCH3M/s4k2cRHb1ZKWBYnQLjRYrsUr6R\n",
+       "y+M0DXO3UPBUqkiTgvRCxkSN42aQkRTSXxuUcdDH2roFFSfnWoMCa1vpf5xHXkoD4EAujz3XQd5x\n",
+       "Lff4bOZpa1J66SADjMWAbb8VKL1EqLV66cduL3Os4zVTX8bAPUoyFn4TClpLBY4SIvrEWXt7JECQ\n",
+       "knROaEyhQCtM2vrmcP/YP7MqmXfmqxP3WRERqRgxYsSIESNGjBvGC0OkmXyBIgAAIABJREFUyqoY\n",
+       "OEYTOZkvJA0XK/de3xf5JqyvkNyHuuOGumPibM20Uoj3ckVwsMIboAp4Te+lrtwYIua+FrF3zpR8\n",
+       "e0vmKqKT+mds3wJivk5qE2WhXp44DIf3XBGnApFayiqNtb5UbE+BouwuoE0d2pHKqjbnW71AbRXq\n",
+       "hCWCvmShhqCdF1e1raTfLrGIPTu1Om23r/xKeLZnK+cUQvI2ZBtorUW4SEv9s6IhwiDWFUBRJiLK\n",
+       "HMGpeS0rwmRNp+SwyXWsu0iUSAT4RCk1/dylqFcn1hFJSbd12+YwtrT+GesTlpI6fAyX8QwrrbnU\n",
+       "y5pghdWKsDqBAJtiSuecayAGbxpre9dv3yAPXnkJ5yUO0GOKSCHw1SSCsKq3bVe4Py8vTBzsEt/X\n",
+       "ej9xNf/eBx+FbV94601/zncNpTx7/L5zzmw/FBlIkAGSy5ioeS/IuVJsPx4b+nJw5EXz7cqE6rOR\n",
+       "F5l3E0N4uoVHSSskj7zz3ffDZx8+8cc670x0enTb73dyx4TlFxBbPz43AfqruD7ff9tsIqqRtxHY\n",
+       "37Nrt8F5VDKg2NtEq9Yyh8xgU1FLrb8FkN69PRPRZyknsm2EX5MXKtwzG1mlHx75dlZILHgkSOMf\n",
+       "/NCjdD/4+P2wrcCxDqd2/LOrU+xfEWH/dyHbKtx3lSAnayLbnNcEQCkTIBzqbM96fYKqFUDORzL/\n",
+       "jTGfJ87mnxa2M4X0MbuM87UyHZyntP4q0UEFaeiKXgnrkcEyRmtiEgnVpKScoviMtiaCiKMzarkn\n",
+       "aIniBOmmK7rOCbQQEGN117F2obokoC1ZIok6S1SAoJ2OtLdzZIlkW8prYg/UWe7HfSvJQz1Qx1TQ\n",
+       "px53QCnPs9nEn+PVlf93LElhLoN1jXyf2vlNa+jrIWqhNkt5yKd87tq2ae/Pux3ZPNF2UrN0R0RE\n",
+       "KkaMGDFixIgR44YRX6RixIgRI0aMGDFuGC+M2iuqbOD6WlSE5QweXy4JXyvdsE3tUQyoYjd+LZV3\n",
+       "RdJteyhUWowMMqXYcmhECyqmEn+iyh94I6JwFrkshKoko5MItEmKhNSK2LMEGrFQLi6hP4gIGynA\n",
+       "F7dbCq9TcWIdT0c4Z6MAWjrW4vtFZtBtAW+bQuk++o50QqMBlm9F2NyAjmrX0v+gaBbXBuOeo9Dp\n",
+       "dCricfhHbRp/LepWYOycAnCjPSjyThMREaKzpenBMb2VMZGhH+tW6V7C/XTHVzdduiiLOzW6IpXr\n",
+       "T7FrLoJZFzzKNCmAIlL7XouBXOG6j2sRzK7p2GxtanGNVURe5qQR5DrlM5yDbVssvFD34LZRMNOp\n",
+       "H5+jsf9+Lxj/wYHv9/O3zYl9Hw70I/HduUIfqzsyPeDe/cN/Fbb98i/9ij+GeABdwe9qChdzdYwm\n",
+       "FXR0YO0NInvp69XK04wH9+6HbUykUFoygx9TIjA+uZqHH3r67qMnRllegdq+84bt1218P/3u278W\n",
+       "Nr1239N9n/+GFVx+53d/3znn3EuvGI355Ikf93qPVxhQ66XdTxkE2jkoszRVesj/q5IB0jPqI1Tk\n",
+       "/v5XLx4yVFrtYAM/oolQxSw03IOy/Z3f/q3w2Ydn3gF6X4pBj+DjlQhl3cBRv5Hi2rzsfScFl3EZ\n",
+       "KxElp4kfpyMKtmWuYRJNplQY2pkpZY32Hci1pn/URoqAny5B/QrdSSF5jvtK3elp7a0F6rOOlKWI\n",
+       "6NHFlQi7ecs04gFF+/ZmrfMEEnog+k/lYZcGwbj14RiSkqa1Y/XwalIfQXo1NSLUp2wjlXmCkhuK\n",
+       "s51zboPi7/Olb1NV6QMY7ZZ7Nwf11kkh8QrHSMdWNDitLra+xwoUej1HGAxTVKyYyPwznvi/S6l2\n",
+       "wXn9/Npo6QoSkUmhugz8RqbuEfqzbmxMrvLPNpKKiFSMGDFixIgRI8YN44UhUqMqc6W8GU4TprDa\n",
+       "u90pXY8l1T+sxNSxtu+f32TiPRFFj+BAPZ36z8bTmfwAQkBJobQdiosy1IubSt6+WetJROlpvy3U\n",
+       "rVCTiCL6dWqrtcnMv6XnuSBIITXampRj1acC6D44kIvbK16mM4FpaC1R4N9a3GyDK7isVhquEiQl\n",
+       "nYid/pYr4dVGbCJY929j7by+8OjD1b6hjszJXm38Z2uxX2BqelFKvSQINjtJl82A+qWyrEiBJmRS\n",
+       "E7Dv5/itIEwlxNb4aSfu7BnEzoUggpvSt3Mk6d8pEIRuY6vEHrWjZOEahPq9oIlERBIIbCeS1kt0\n",
+       "MBOkYUlXfkEkFnPeJzZORxAlqyh+f4rzEYuJZ4+fOuece+1Lfn+blQ22B7e9TcLTJ0/Dtte/8Q3n\n",
+       "nHNvvvlK2Pbo172jdTG168QEhNffNETm13/tV51zzr3z/e+HbUGgCiSsFHXwEoLlzhlaQiAkE+SM\n",
+       "SLAsyN3xvr+fahlq2b7vk1atQzBO3vuhR1p+4w/eCZ998Rvenfy/+M/+dtj2D//3f+Gcc+6jf/4r\n",
+       "Ydsffvt7zjnn/oYgTV/75k8555z7g++YncKXv/o155y53jvn3OrK179rBJ4m2pcBfVmuzPU8H7Pa\n",
+       "gaKPfiz2krzSAf6ppFLABinx6go/BiI8k4FSYV5c4Bo+PLeEAaaaH4slxtmV39ZKpYgpLE6uGxPq\n",
+       "M02/FoQ7ATo0EqEwqwEQnUylZEBdewSjSg2lHAMtyAV9SiDU3htJFQGwHmlpNg3HG//3k1Or53h9\n",
+       "DUf/1t/rqaDUFawZyrHWRPV/q3ULmYBCkbMMSSFilZ5Q2LxR1IkWA2Qf7Psl5qtE2IwEqHIt9hs9\n",
+       "hmIq6BtRorYRm4AO7IzMZyOMhcxJ8gJYlwb2C3NFUDGvpDLX0JV+YDEEhC+RQolkHVqZ91oicYrm\n",
+       "oczEZOSPq4hYVtI6SK0uyD4Ywvzhkx8455z7/P0fs3PNmWwhlQUS3juCHMtzcVdERCpGjBgxYsSI\n",
+       "EeOGEV+kYsSIESNGjBgxbhgvjtobl25UqRCQhV/FiRi02Om5iZOvL+l3pO+AffhF2AJYuBJRXjWi\n",
+       "Z4SHB7VAbgZ6Ls2EdnH055DCvyN6UOn3II7LVexGUaz6k8CDCMK5TOBUsgK63xywrzprU+St3k70\n",
+       "8ciFWqCPUpqJPwf9UfBT9c7IE0DgudCTCR1+VZztz7UWKiLof1WUCSg0EWF1A/i6XonfFDxFNhCl\n",
+       "bsTjhSLbTGBfQsFrKdpcVYCitZB1DXhcHIMTuKfnqZLA/hqPQI/UQo+0DdsufQKxdyl0I8euOlZ3\n",
+       "4Jlyqc+cFv74nRTtLEhBAovvRLC/AT2jLvIs2tkJ3VfjWLMDoyyaQF9qcW3fvusr6+Nk6s/3t3/j\n",
+       "nznnnHvjrb8QPpuN/bn+3E8YFP7+U0+Bvf66UXtXl358fnpiws4H999yzjn3F75pzt6fPvRU3ZNH\n",
+       "j8K2n/6q95ZySIRI1J8HsPvKKRUAIayIg+k3UwqNQ+qrFVE0650WuRZo9eP4vY+9O/crn7eCyg8/\n",
+       "8ZTWf/63fyFs+/ihP8eTxybAX117WuD3fvfbYVsFJ+hjKdC8yeBj04iwl0kxItTtQL20ECCPZU4c\n",
+       "w5394sKoKPoYNZ2Nq5S1XZdCLYEC7Xu77ylkToWqKm77ws310vOit4/N2f3hqb/+Uls6eFatpJB2\n",
+       "gUSFbmnXJGeh78TGH72SnNBiZY1rDGlDLvRQBRptnNm4ZmHuiST7cP6rxNwtm/h7PB+ZpGO09n1b\n",
+       "je2ETs+8eP78wlPaA8qcxxLH+LLw12Sk5zCGsFu4ZfpXCbPlWriHcx5wzrmWjuqsWCBZHDmSksby\n",
+       "7Mgg3m/FRX6NuThLZa7ZwU5RllGOtFICimbLOGHVCLK3q8YkGGvcu6ulULsTyEiEbsvgmJ4KBZyH\n",
+       "Yu2SgANarhO6FzXl3f4hqG3JCuPtkWf67MJG4fvXoLSfXX0Str18C0XKE3nGIlEilWfheCIJKjsi\n",
+       "IlIxYsSIESNGjBg3jBeGSE0mo7CScM5WnyNNTU4gGJWU+BourquFrcitTtS2m3M1EqHgmHYGeAuW\n",
+       "t+UCb58bQVXCC3Gib7oQ0Qn6Q/fYRN6I+YZd17aayWg3ACF6IQjKBG/wWWrfT7GPpNfUYPybijgO\n",
+       "SxwV6jPVV12sua7hy/xoJCmdPQXr1v+s3aTi6BorEUWkuGDqBE2g83QmLsYzOpQLmrReQKiKlWkj\n",
+       "9QonM6BEA1sL1J/TellAH9TZPkv8SkxTh0sgERsRj6eoycTkAEW/aE2QOFmt4+9xaY65Ge0HnF2n\n",
+       "bsPajepe79vXdoowQYALCwlFCzrWsFrbCr5eMa3Z9kuXc01AmOLaFpmiHxCv9yJeh7M3x+TpyUPb\n",
+       "x8zv96/+RUOV/oe//w+cc87dv2+WAF/98S8555z7Uve5sO1zr73hnHPu08fW9j9824vMU2er2Z/6\n",
+       "hhdgNxB7zq/MiXyzgut4ZWOI91Ou/QqUcCrJI3Q53khiwajy56N1Kt/7nheg/l//3Avhv/zNn7Xj\n",
+       "ow7gr//m79ixgATX1yZi/eX/0wvP/8Of/5th208CpcuPrf/fe+LRrHv7b4RtF08/ds45tzg1UXaD\n",
+       "umIU3u8f3rbzx2r9YN/QRyInKkBnaPUIormF1M4kwj05sJR0B0fzDAkgI6l28DosJq4lXf8J5onZ\n",
+       "xJICHsOSIt/YPUHkXi1miHBrm1iLbQQ2YSyIVIq5WxNGUjhb5wK5VGAOxsJ69ECOSmkTrW2q2o5R\n",
+       "BITJf295ZahSj/t1LFUsaCcxFdQiAerS5jYm2b75pc0/wfZC5jhuKzDG+1TmxAlcvxNlbnBfp4aS\n",
+       "3R7567Rs3rfj9/48utzQVKLuY0HkeszPucydY9yDeyM/7y1bu4dPLvzYTQRBypioJIzIuOQzQeZp\n",
+       "zJ3KBLHahDrKT5AoFtDZXs6f9RLFziftORatX/d5zWpr09WVv3eqkY2JPoxTeXaUn/2qFBGpGDFi\n",
+       "xIgRI0aMG0Z8kYoRI0aMGDFixLhhvDixeZ67icCJOWi+RooBj8cebjs8MhpliWKdCu02G0KKWvDY\n",
+       "/9t2UtwS1Aq9LSYiBCY8ql4sFGqrizgLP+Yjo7aaBUTZ4mJNN3LVmlNQTd+XUiiLHLSTQsYF3H43\n",
+       "IhIkxDrPDJ513Wc5q4tTNiD6dIfrLKH1UhyGe1Cq69romYZ9nNn59wFmFQoUwuqpQKIHR56OKJSC\n",
+       "g7h+AcoiUWdx0FydMxqTlz2VSqZd5/u174xaYOKBaN0DfZyLAHUDioKO3upPZeyZHasEjTQSETMv\n",
+       "WSfC7g50swpQ6XKeSyFl8q01KJYL8bEadxB4rg1Gd6QlhbKdX/v9jSfWJlJfvRMaYQMB7Mpo0Ska\n",
+       "Pzv2hWo/fvft8NkGgt7Pf+FrYdt/9O/+deecc7/2W0Z3XUA8r6L4P/p9L7zeCAX0b/zE551zzv30\n",
+       "T/471s7OX+OL02fOOedW98T1GDdPJpRlijHeSJ8cQNi9Ecg+B/U2OzJajKUEVhf229Nr3/ZPLz2l\n",
+       "+DUZMPXSH38srs8N1p4qAfjiW/68/ta/9x+Ebe0SvmjiYj0HQ7S3b/Pe1af+/BcLozSpVFjAP2op\n",
+       "CQi3boMKkfukxvXshAIqQFFfXj4L20gHjqQYKwuXJ4NEEd/m5RyeTdKvvCabpSUA3Z8+cM45dy4C\n",
+       "ZAqLJ1JIuoXIXAUYGXyZdO6ccF7EDTgZKY3lI9d5LcP9L35LdFkvJzaf5aUfJ6z64JxzC3gvlbUk\n",
+       "71A2kXgfpYlWm4A4uxO3dbqoq7N2CsqqlQQkVtlQb6Uewu51bW3frH0PrXA99440OQb3uiQHpJgn\n",
+       "93AdnHOuwPywEro9KX2yx1KSHa4hrNYEmNnsDs7V2j5CH/DxUHX22QyVJ54uTsI2FjwuJHmqg2L8\n",
+       "/2HvzWIt27LsoLl2f5rbxI17o3vx2nwv26rKrKwmy64GG7sSYQT4p2whPvyBEBJ8I4wQfFrAB3wg\n",
+       "+DQyQhQ2BbIQIOMqCyyEq8msclZR2bzMl+9FvngR8SJeRNzmtLtdfKwx9xynblQmuigJFVrzJ27s\n",
+       "c87ea6+99tp7jTnmGPSIG7UPXUpkb8xZ3jMtIRxPaSSOXl08aCmOUtDSqxYXUTVwHzvSaux8GJOT\n",
+       "lJwykKLshxVtu0wb4oiIVIwYMWLEiBEjxhXjpSFSRZGMhGgR9pOisnYo4FaVNVPJ425ub+QXF2EF\n",
+       "wd51Sl4bPJEIsdIp8Wac5lxWCm+4BZG9UyXxMQE+rBKSjNTWsTrsOlvVdCi/3/HEciqxAGkGUnZX\n",
+       "BCnLbQVXosTVMWEOPoGbjSFSC6wEWX5Bbfcy8iQa3+b1e+x5pWX49AafgTC5ru1YKuuQ0kojxRs+\n",
+       "I2wp0JmjY1NA3tsLb/1tTWXSQMfyVomVVOoO5GpgqQd0BROrlZybkCp6p9IBTGxEp3giOytKp7xG\n",
+       "Juw7fD8j9EvXxDn1a4G2DFSSrboHFS2/BgmoQ0ukfI/fNnqNiUQ9FUUQafWNa7haGgF2ArK5Yw8t\n",
+       "IAxcOu2Bzi42dv6bdVh1bYE+3r37hu0DitEfvPfuuO3mnSB78Itf/sy4rYYP14KI4jmkCPaZKN6E\n",
+       "+/RiZSs9RdYcVvPble1jUqoPHSMiKACgFbyD/1ZPfaeq0Fs61gyq0OfPbTxfLJVYinJ9UvG/fhxQ\n",
+       "OkbEHCBR9aMTEfm7//3fFxGR5Zm1ff8onL+nwoap+l8awC7Xb4Ty682FqceP0gbqoUdzyBOozN+5\n",
+       "w8raI/w8bqtBFE8IYVM+s6c5doYyfk/HcCD0J6sLnIs1eAppgj2SjjltAtKxYE9QIDL7VFC07MKx\n",
+       "BnKPyPKAEjFyMe1036HfWcJGPdTYQ1UdCDwVUTQNkDsiEat7g3ckXTIJ+1ktDPVWeQpFVWqCG9Qn\n",
+       "0hPZevTkZGcLdHtCKOGAMZvRdUrxeUIIj8cY6zpVB2e8A8rehNLNJwGJSsSI7Qn6uGtI4gbFFnlB\n",
+       "xQt1kCKpu+fjtsn8FtrLsiNA7tHeVLpLn3FRgM6x9WDXeu5UkodMOVEUtFP4hbkw4fE8+nmqYSOh\n",
+       "9EO4xuxh6oFIDTVJLahiPvl0Fopw9zafToHsb7Yk+7MjmXM5IiIVI0aMGDFixIhxxXhpiJRzTpKd\n",
+       "EmZwiug76mrPuecKOVVf2pvu/l54I10uWVQLQnM7nnx4i9ZSS5I1UFv1hFAdLb9MqKxTEpVQoHbC\n",
+       "94cRsR5l/CqNICLSel1hhn+rCb/BwweNUCoTJ7XvHUIYramt/LluTvH9y+7bzBHq0PYE/ImUHK0L\n",
+       "mPMN7DSOPplM7Hvrra4g6JpAuDGj63T7JKx65nvkk4a3f+Z3uHGFU/yJc2bPKbomQFgGKhfuwOHa\n",
+       "KetGH6ckpjpe7xdIXCjSwOhjiv31xHNSAbuE9qtjzBNykQHhYDmPHgZZrrMc/cVKne6x+ib5hXG8\n",
+       "0hhW1HFCBD8rXSfx0Uz7mjwRBxXfo8EL7yxFPxriaB3P9nEutt+zJ0G40k9t9TtA6O7uTfPm6iCc\n",
+       "mGWGCNTPwqpvVti2xSKgHqtF4PL4YzsvFQ7dnxP3C/vdUDvVV1A5YOF0wnkfHlk7t+dh1X16bujD\n",
+       "DGN2jn+51P2VVwNX5HBunKKnz0I7c0Ja/v1/798WEZFf+MrPj9uObgUhzpSQy5O9sJ+Dm3bvTjZh\n",
+       "9d+tXrO2A4FdQtYgI1kD5Xy0La3IsR7mezIFSsXzyXjP0Jwk6O6ErrGgxD9pQz85+mxSBnTq2eZs\n",
+       "3FYDHZ9NDelIdf6jVtbgcmZTEs4E2jSdWp/0Wdi3oqUZ+aq5Pvy23ZL/HHbnaU5QNNuzJyGOVZJM\n",
+       "wwZop/qVioh4UZQarScARcdVy+Kj4NckuY3JTDlSxEPtB+W3EfcnUUkKRj0VuQn768gbbwqJj0Gs\n",
+       "/4sqoJqeuGw6Fw4kCeASlfgx9Kkq1ePW7sltHfhvs8mJtR2csAEeqzzXTyDTMdA42apki2P0CSiR\n",
+       "I6/L8XOSbsH12bLJrN99njL3Sa+dozlZqc4Z+UpqNoXFbxXF6unZ3bbK5bP99YTsvygiIhUjRowY\n",
+       "MWLEiHHFiC9SMWLEiBEjRowYV4yXltrrh2ZX4Vb9dQiK03QbK/aWgBR7IpuqKveUyNPbLaQGSDFV\n",
+       "EzS5KpATxJiAsMj+f61C4JwCdOoXZMevUIq67jktqErV9tOxtB77a1pKTwBtZiXiBCWnOUHBOYjF\n",
+       "B7WRuAeU0HaDkWgrEOpdatIFApi5gOeXUMoyQ13rlryhlKivMhQiIj1IfmT1JsUcaZTrtr+bNwPM\n",
+       "X3JJMI5HCgvSo8RWUwt5QWWoUDGWLam4I/nbUwM6H0i+aUYqxigZz0pLgWgakf2XNLs8ql7QmFRl\n",
+       "WyY96g9Kuk7qDZURKTRHakVTUSIiGWB2ykrJXjrBYZFupWvtke5MqPx6UGI9Kxvj/qgmlG7rLkPm\n",
+       "gjFZUEl6s9J0c/jexbmVMM/2QxqHy4pfuRZ+y2Xl08M3RESkzCkFfet1ERFZX9i1uzgNCsi9NwL4\n",
+       "4iKofReYDLqtjdf1Jvz2eP/VcVsiSjolYjEkFhKWhLgI43hv3/pEvR7X53af3LhxU0REfvWXg8fg\n",
+       "1/74vfGzo5PgF/ilL31u3PZ//s7viIjIllJLP/vlnxQRkddes3Zeuxb2u3xs/fnOn/tZERHJ9ywF\n",
+       "Wk/Ctd07tHbO1+H+dB5FNDTVdUgxnJ9bamd/P+yP/R/1fkqpAMJjLmyo7+ZIUQ5zu++ap4GA3CHt\n",
+       "fG1uabfFOqSFbu+Z/94E92xNfmkXmDuXRADvIBOQUlp4XoRzrSaWPi1g5HeahvFSe5rDRrcB26ay\n",
+       "Mkz2bseUto3TDvffhOgb0xl86oiArCl4jxRbRvO/Fj4wAtHj+I7mFVfAWSG1m71HSl+LXkSMSE+G\n",
+       "ElLADUI59kliUhMq+zLw+ffBjWBSmGL+MLpnsNo6nCrIPUHT/Al5/W1xD3rqd49Umu7XN/Zgy5Ee\n",
+       "26fnhPoktgM5YPgXHB8FVenOFBvuz4LnbqUKaA6OC7uQe+WiLD1+Tn3dgb7gHRcvIC1InoS911S5\n",
+       "zXttHVN7MWLEiBEjRowYP5Z4aYiUl2YHfepVLK+ht1D14UmMbJeXWi6fX/ptQb5uSpDk8lPnRmZx\n",
+       "+IyQBhUwZO21Aat0JrE5fSOnN2glebKvmm5jvyBdiQxa1tzz23r4rZbti4iUeFuu6E1fQIa+RkKD\n",
+       "kyqsGLetie+lRVgJux2yub6lwwU9tbJmRdByQjoGvRb0tq5c8I6ESzuc9/4rRmxV3628JGIhVsQs\n",
+       "07DFas6jDDYl+QenZHNCzlp4sg3kIN80kJMYSPwPpatpaW1v1dWbrmernnggBe/wGzE+uTTcyPt0\n",
+       "XnKZ2J+jzQmRx5Mk9HdBSOikDB06L8P1LFoiXWJJWp8aSrjJw+p0oHHS4++EVl/aZ/XGSvI3QHhY\n",
+       "zUGJ3EMNv7o98nBbBNTj7h0jQudYzRH/Xw4q+IrRqvb04w9FRGRxdjpuq9dYWTNxE9fk+nFAOJ4/\n",
+       "M8+5GycBdc1IaPHsaUBESI9XOiC7bcPyF+HaLU/t+O0mXNz6whAxgDnyV7/6cyIislo9GT979jDs\n",
+       "7ye/aFIPMy2Nrq1f33j9bRERefWuCSJ+973gofezb75ux7obrn9PAyWBdEhG404LBKaz8P2zhZ2X\n",
+       "yoMw2X4fY6yaGNI1YHymRIrPgdhmLMmB65l5ki5ZheueYTzN6boeQ65gNiGZGiBNjxePxm0qxNsT\n",
+       "iXoAYlCSdMSsDOOtKEk4EwUaEyBX9x98b/yshziv3jciIguImSbkU+qRneBSdwHqu2X/UwzkKRVv\n",
+       "6DjSJnFhwQoo8YrI6S7HfUXEag9ELnP0jIMkASNnGQqE8pKfHfoX5j86BUXVBxKaHoDw9t6QqwxI\n",
+       "d0/yAz289lRyQESkRAYmT2zuroDEtDU/u1BQBSSMBVwT9AlLx8yw31Vj95qChMlAKD0EWVniIskg\n",
+       "RdLbveshAK1eqB09V/UlxhPSmkCKgVUSVHbIUfVADkHadkfqAIU/lY2JgrIdL4qISMWIESNGjBgx\n",
+       "Ylwx4otUjBgxYsSIESPGFeOlpfayLJPt1lIWqg7btgYFqp8ek0hVCZtJzB4K6AytFoCvWcdGdT5U\n",
+       "TyMlcqx66eS0behAAE5ZT+KyPlGvpGR6LVU15J5UXN3oCQTPJVZ2Rw6ubQ2K9SWgyIxgTJyPJ3g+\n",
+       "A8y5NyNyHLa1YmTXiaZFAeOmzlJ7DWDfnFTEO0DFPenOZIA4C1Y2x2nskQJ9kYMASpBtOwRYuCNf\n",
+       "q4tVgG+nSAU64VRE+LvpDMb3Er7P6Va9JqxsrNezIVKwKrpzWiRBWi4ZlJxv16RQHSlOTyL315PX\n",
+       "VoLrmpCyr0LGBeWKE/SZ3xrcv+5ArATB0ZOxYn0BdXb2QUM7JxV7iAFa9zyew98L0rFxSYCqHSm7\n",
+       "r5YoHsD5FAsaQyD0OyFfQQzynnR0nj0Iaaya2lki3Ts/tFRhhT4551QVUqofYh8T0hG7WIbzqna8\n",
+       "GcP3y9Jg99PTMMb5flJSbE++gu99+/thf0RsTgak8pAe/lf/yi+Nn/3OH4WU0vtPjDB/81YgkaeJ\n",
+       "ka2dhPvp4X1TJ38DqaLPvmPfO3k9KKWvnlv6skfbzy6MPO6QyixQPEA2ZNLXSli2cXWxQEqH5sn5\n",
+       "LLRph5aA7pnmNnbyg9CmgYo3hjrMBRuo59drm6f3MtUdov6HKn5KxQ4+07Fr98lUqReUqqpQ+DAt\n",
+       "bZx45IBKFAqsLqw44JOn98N3KD2TYHw0VIDUogChoblL00HsCZlgPz2JAKbqseowhyWcigIRmQqg\n",
+       "tIiIFdtH/aiUlbS0eMXGaYmU3pTmbiW096AADJTGUl+5nojg620YOw0VhSgtpSCv0yQJ1zHJ7Vwr\n",
+       "1YDi8+ngqEF6f1o1VaKwgNX2vWjazb6u+ljsU6j+f5wm871ScNhjFmlJSqk6F1L6da3aZnatVYPQ\n",
+       "Ey9DSeQ9pfu0yIjfJ/QnJc2ngxYNUPFAGVN7MWLEiBEjRowYP554eYhUnklNpc4NVgZtb4iUrpJ5\n",
+       "RZArUY/416KefEz2VtI0ISdFqeR1LQ1mc77wj/O80gn7YJREka5V1+9lAAAgAElEQVRtY+qwma6+\n",
+       "iETqQBrOmEUKj7UKys5Mdncoe2dXcSUZbonYejwLKrZ9b6vkCh5eStITEWmxvwv2tUPfVVkg8XqS\n",
+       "i9DVx7Yxsq1yl1sm8eF7eUkq2lh95bT6yCqskp21abu9wP5s5VZDPXkPxPecVibpWABAw7RW9O+y\n",
+       "ijMzoJMsnHdPKraq2dARUTPBaioZ9NrQagm/ZURSy2mTnW2XCdhKBq+p/DnDKrKl8v9zlC5fLMO2\n",
+       "/cQIyzdA7M2o/n2zDm1i5DbPgQgQK1VXjFy8kIMU3G4I9cQqfrMJbTu6Zn1TgWy5Ig+91z/3BRER\n",
+       "+fiJoSo1VtrzPUI4t+E3q6Xd4ypJsKm5rBpoAmQwOvIwu30zjJ1JZfvd4P5kRRJVgx8IzVUC+idr\n",
+       "a6ci0e3aijK6OvRJNQ9I040TK+L4ItS7P/+moWTf+OPviojIw2d2Djeuh/tvRordn/5UIOj/xC/8\n",
+       "hJ3rk4A+rT/+zrjtDEjcZm33c4N5Yh/9P58TqluE/tlu7L6um3BfZTT/6ejMCH1KQQBOpnZPemVU\n",
+       "k1J5oqgz5BSa2hApVeVOqGBiWgSEr6AqhjLBdSTkYpOoTAyRovNQsj+h+WTA5D4BwuXu2I21hhL+\n",
+       "prb5V0HvtiNl6w4eko1dp5lKJxABW6fglgp/HOY714Xz3raMVsBZoDDCfIpswqaxe71uQn+WM5pr\n",
+       "ID/gOttfUYVt8zmT3cM5rmHyN3SEEmPed42hqg5I2EBSN76H1AQXoGCeSggSVBQpLaztHZ5305lJ\n",
+       "Ymj5fwUkeOgJQWs1S2D7nU7Cb5c0rgXFAJ5J+aMBJM3xngsE0D4H9fgUUhOEvnkg5vz88z3GMJH4\n",
+       "ezxrGhoThSr/D1RkNaqn83Pvh78qRUQqRowYMWLEiBHjihFfpGLEiBEjRowYMa4YLy21J7KrD1Q3\n",
+       "ahRIzEqk+zg9oSQ2T2m0HCTfjHWkAF9mRB5Xc+EKv2VDVyUbp5QzdEhL+R3NDDWZJDNaJdsRKVy1\n",
+       "lxJScdV0VAU12Zy8Y1UDqCcS5TAEGJMh69U2qA4XBZFdATfPpqStlQToeUhYAR2muUh7su6QEuDr\n",
+       "lg19NWVp8LzuY4ewB8jW5aQPhF13ncH4NZR9mdjZIc3nXYCqXWaEVdUKKnJL7WiWjwmYCo8L6Y2J\n",
+       "krdprdD1WtBACuRKNsXpMIl3/Lu2dJOSx3NK2TU4x4QIkD2KAhLmiSO1xzomagg8Qxqj7EmzBum7\n",
+       "uuOdYFxRHlHFgzeNweiqIrxYWf8PSLOwCfUeSMmzg9DHT08tjedA7D86tDH0T7/xT0VE5O13TFvp\n",
+       "9Hn43t5tS3eskWba0vEnSCktSFtmsw59ogrH08rOvyyCLlJLUvgp0pzUIzIgFVCvjZS8AHk7J7Jt\n",
+       "VkJZn6a9PU0R43rmpNj++hdDWu7jb/7xuO3P/8xbIiLiSIPNQZ388Mi27c+hAfXIlNI/+UEgu6+e\n",
+       "fDxu80jlZxM77z2YRXeNakHRuWKg5lTYsIGR7EBptKlqz1EBxnwCzaaTm7ZDHMsTAT5DvqtQZwkq\n",
+       "YtBULBtfzzGfHqwtPZWh3y+IAD7BNc4pVbWC4fG1uc1no6YfCor2p9bez33qZ0RE5OFj69ePP1ng\n",
+       "+zSvDM9w+paWbDHWZGYpICcoQHKXU4u1FhvxYxLjpW3oueJDH6Z0TTzmmK6zftVRm7HgHCgF5cSu\n",
+       "XTWDthNoJGsqTumghSYFU1DCv31nKSuljXSkAF4gfcq0gMqDvkLziRLUh4bmwnKG/WG8ZjauSgnj\n",
+       "yiX8nETKcno0busd7sn08jOxJ7pHooVaRClpkapVCbqMn12qc0bjSpXQCyrK2kArcSAdOzVons8s\n",
+       "pS8tNKhokunpHeRFERGpGDFixIgRI0aMK8ZLQ6ScuB3VbyU0d7TWHN/cWQl5VMemUmuU3efkoaMo\n",
+       "UkFkb32L1TLMhNS5HYjXPfnlpfBm8uzhA5ZrSh5GPtGVo71BZyB2D0TAyxWRwsqgpBWMEoU9ESG3\n",
+       "TVhpZc5If6tNKLGuDuwNOnkBwqSloBWps9bwnxtwDJaGEFXdJQ9BVYIloEU6LYlntW/0sU9YxVfV\n",
+       "461PPMruB2cIj1aVeqeEaduvIlJZZqtfXWG03tCXfCwiYK9DrMiYaw7CYEOkZC2jzb2qzpOEBiDD\n",
+       "qrT+H4sBcttHmqNcWQx96VxAmDytCBX1cNQnDsrLfavq6OTDpeRQWtUPQFW354YcoXJ/B6Vbr7GC\n",
+       "I68rLTHuG7p203C8Zou2Ca20Qbw/PTMissb3vv/N8e/PvhOQm6dPPhq33bgeVqLz0lCq80VYMR90\n",
+       "hFyN3Rjafu2moQ8TIGGOizh8+EFO6vgpxh+Xuut1bEiBusI4SompfnERiOdvvhF88tqNXcP5q0Gx\n",
+       "/OZbb43bFs8DyXlxYWTnk9ktERGpicT+6FkoIlgvTH6kfhrQCZZpaSEJ0VNRyHoREKsBLOr9uRGL\n",
+       "FeptSJ17vhfGWkPIaQN/ytmB/baaBYQvPTwZt3n1vyTUvVWftBr3JBXFeIyPfLD+30O/diXJH0CB\n",
+       "3IvdO1v9LdXJn6HPGn9r3DZNgfCAnL5nuxXpwvhwx1wUEu6FRf1d2+bC8eut9X/XQmKBkIsMyE5P\n",
+       "CNOgY8ar6r+hz+kAZIbOf8CYzKhQZj4J83NLY7Luw9ztB5ZEUEkcUhvHdVd+c0IyNV2rbgs2/zRt\n",
+       "2F+R83MK8g9EgFdle0eSPF2nckKUicEY25UTQJuAzAwpIWiYHxl974dwTZwnqxCv3oXWd4rO9yST\n",
+       "MehzyhPqjzlbW+RIjmDcH52/vTNw1iXFv/ScQj9ut4bSFTmyI1SMNXBW4AUREakYMWLEiBEjRowr\n",
+       "RnyRihEjRowYMWLEuGK8XLI5E8bUoJFSYZqVGwgzdC/YVhTQbNrRx1AjYVLgBvFcSdR+h2CoJsPU\n",
+       "Js0sUspkQArSE7Srx8oyhlZBVMwMRiyyAIFOMlW9ZhKnv7TfHgS7NOPvBbh5uTG9p/nkroiIbFvS\n",
+       "AJrgNzXtD+T6UT2djt8OgZTZU9ohJwPh8fjoCk53tp2S+Izs2AEW3yMVaQcCfL1lBd4F9hFSKs4Z\n",
+       "xJ+pZg2lGydlSPc0nUGxmo5Kd5R9CebV7w2Xh7tPdUyEExs4PaTXk/W21EiZxq46WCvEH7Yh3dfZ\n",
+       "tlb7luTLZknIW1zbC+eV1tRGjIm8NBh7u9D9ERETJqvLle24QaqoaS3ds49U2erUUnXNMhDFlZzK\n",
+       "yu5KBGfS5+G1kB555VUzMj48CNc1zwyKv3YcjrW8sBRki9ReTZmNFErJRamkV0sFdUi33Diy3M4a\n",
+       "6XZWlu6QMi4PjbBcQ8cqyYlYjHNsNkZKP5iHFEwKLbTNxoxS+3shfdmeW3roAO3NqLDj0YdBF2pC\n",
+       "6RFNh54cm5FwcnQd3/9g3FYgHV5N7RxnMB/eaHqS1OlXm3Cu6y3p82As5KT2voT213zfFMMd3AN4\n",
+       "nKYrXOOO9Ov0PsJ80VEa0Q2aRrLz1/RRRamgCe7dlkzQN9BqSgvrpxL32/nZvXFbfxDG1j7SwgV9\n",
+       "X5DlqjsbV3tIrWbkQLBqfxDOZWP91CHdnZU2JrpeKRVEC+hVKy60fbsl81x0naMiHlVi5ykhh4vA\n",
+       "huapHnSEmubJYSx4snOczaD3BlV+T7pT3RYUBDuU9FopkxHZGmnGjDQY21ZpKdZPDbSV8tTmaX3a\n",
+       "DeSAIA0U7VX3jRwj9iaYE4kWoNzxgfS5tECm7+z+Ux27igul1JWDaAnqHiCgQpSJkdj1+cBFYX2r\n",
+       "z3Mi0WMuaMiVIQUtZ13b8zSbQGeSjJx5vn1RREQqRowYMWLEiBHjivHSEKlhGHbqC5UIl+2UYWIF\n",
+       "SeTgNNPSSPJQwxt2yn49o9i1vSUrGVm36WpExPzSGMlQcMJTNzm8/bKK96CEOpJTyBXNIjQjw4oh\n",
+       "AQrC7fWQS2CkzWP50xIRz6HUdrW2VXIJBWxHqw/1KRwITVCieIcS8mFLK45UVb/t/BWR6jtGhLJL\n",
+       "23K81Q8ka5BPdHVMvlJQSs5phdn60IYGK8ze20qzyMLys99B/8Lxd7yuIMkg1E9asc0rTb3+RWKr\n",
+       "ZDeimFjVUKl3i7LylIoY1OvN0+pLkQ7ZKZfFapKU6sf9Enm8w+pomYTznlEBRIa/a+prVezebOza\n",
+       "9bUSJm1bC0J5S9fz+XlYCVYT6n+VBwDqwIjwDAjW+bmtIItpQEs6WkGvoHqcUwnx/U/Cb472DZG5\n",
+       "/RoU9XMr/7/3zVDGngG5ev7o/fGzJ/DOO//EyN5HJ3dERCQtDH3J4ZPVCyENeVix7meGSGTw7rrg\n",
+       "CgTMN+tVQJBmr7w+fuS3YVw/P7U2bbJwXkz2VpmAhhTbVSH94f0fjNsUHWRXgDzVQhHrz20Xvpeg\n",
+       "8KIi+Q8twJkSStmMBSBUko7P53NDujKU/fuV3WMD2sSuDM0KqBPGxpqQpl5JvNSHM5SzDyy1gKId\n",
+       "PT8REY85Ke05wxDatF0aeX+RhDGxnl7Dudh1TfHMyBJCtYBmpqX107AJY23bGfraAaWs14a+9Cpn\n",
+       "kxDC2akrghKrSWqiO8V5UaYD878nB4wEBSiDAVKjP6HzjLRc9pPVbbNZuHZdS560cOPoOHOC47KE\n",
+       "Q4FnDauNj0Rtep7VGLOJ2D2eYd73PWVCOvUdDNcioaKs9QLuFPvWr/qs63dI+UDT6Lmr2Qy2hRjU\n",
+       "Y5AI+OqnmiOr46lgRLMDO56E+C1ZSEqL7IBL2JMV2+hY6zoUilQlFXn8iDeliEjFiBEjRowYMWJc\n",
+       "MeKLVIwYMWLEiBEjxhXjJab22h1DxVy1gijtkiLdVZas9g1tI9YMgjptQXB/ghRZSuTZUUdC90Xw\n",
+       "pBvJ5mQyrNvYDFfNjdmhFsTKjHShkg30WRImwGNbrik+ShmmCme2l7b1LZO+F2i7QfsXy5AqOTgw\n",
+       "AqRrwvn3tD/N8jTNZYgzGVNABI8C2i0LIwCPotx0+jXSTZ7g6cypuaUdPwOhMSEdkxwE3R4pMNYO\n",
+       "6WSFzyg9gDQDp6BG82lKi/adXk9rp+vDOZZEom/VmDjRdKvtt0P6wpFmmSLKnAJ2WihABxuNkXvS\n",
+       "xUKKrqfUhkLPQx6uZ0rkaJXUcg2ltnHPZJSKqKFozFowJVShB043QNm7pLT0M5jA7sFwuKOChXNo\n",
+       "S12/dceOjzTKa6St9PpbnxIRkZWJQsk++uLhfdOWun03kIinB/a92V5I/X3z978uIiKffsXSgxX0\n",
+       "wR5/ZPpAH37rayIicrBnJOq922G/xczGf4GUYkZ6b1Ue+unaO9Z2AUVAoX2mFqjuUZbaeNn26jZg\n",
+       "Y/36jaBtxObO9+9/GHZPJO5ZhWKTytJtDdJhrOOkQ7sFifvsgtWx8R1Kt+m8l9J8NsMxHM2xHVL/\n",
+       "2YY0e5YhLdMRKbcDHWALI+uMxksiqplHKfAVzOVJW2gqOv7svPYxt093aBmhfQtyCtguQjru+SSk\n",
+       "WChjP2rmDeSAoSnbgdJIOe6j1lMBTBNSZG7NCuDQlmOx8UFdHnAvUMqyh+H4hvahfS0tK6DjN0Ts\n",
+       "VocKLt5QBe6E0odKW8jweOb7uoXGkvdcAIMUMOndqQOHo/E/mhtzQZNSDwZLgc9yJeBfNhKuNyDs\n",
+       "0/yv415TrCIiSabafvY93UdLVAV93nTkqNFA28llTAqHBpayKEiLUFNwNV+nTk3biQKBOdlxWhDP\n",
+       "DHU9EREZhlD4VPd2jUtKr78oIiIVI0aMGDFixIhxxXh5yuaJ3/ELy9R4jnigaaqqr0QExNvqQG+L\n",
+       "W7xMtuS1pn4+rJStq7i2Vc85LmlUbzQiFivCQqs1rWDfQa7QvsSx1II2mBAGvLdmqZLeaWWWqBI4\n",
+       "94mqydL591AMpmVaAwV0723l0kN+oKlZxTv83eO8+8b6UKUYPJ1DCUKnS3ilhfZ5vibhDX4YWIEa\n",
+       "RQGE3BQFVgesVI025VOUoVJZ8wTyA0yYVwJ+Rsq2uhDxL1CxTWil3yXan7SaT3TFgu/RaqXDas3R\n",
+       "uWo9QUorSO3PwfO6BGiWp/2haKJlArpTnzSMddpFqV6PCZWmTyAXsLVrp9dkS7oCI/GXULIGpeDL\n",
+       "FSk1F/C/Awow7BR7YKVNpdFf/HJQMT+8ZlIDHz8NROEpSVDnGDtf/MpXrJ24Jxsi6n/67XC8+x8+\n",
+       "EhGR3/2a+drdwO4+9baVOhdaRUKK/VugA2crI3bv4TxmM0K9QbzOyeswn2GMA7nx5Nc5uR1kRbJD\n",
+       "O/6jb/9h+B5JKJyfh9LpnMbE0Tzsd7kgFXMgEgNduxxIZEf3xGQaVr/VPpARVtYeScREgF6Ga9dQ\n",
+       "Wb0HijqlgpYEbXYkSZJiW00SDyn6R/u6Jb0Oj3Ha0z2UwVniOLNjPV2F/c1p/AsQjpYyB5ULCEfV\n",
+       "G5rSJGE+eX4avAmbwsZ1UQQkkm+hulEXCZ6nQSIfbP5t1kAfU0MpvM4jjJIBOemBCG6XJFeAPnb0\n",
+       "XFnD49I7O68CqLe6HoiIlJ3671nbx/mBMifqD+qADOUTKtjxOB8aQyq1U5WsIi44FqPZYUx25Gun\n",
+       "474jVnxbg2w+kE8hCmTUZWFI7FwVQb1YPBq3qa9mO9h8PhbgsFXGqGxOqQOdR6k/nRaK6f9pTG5r\n",
+       "II1CmRunSLNdJx27XBSRwB+W6sRGj1X+bU998aKIiFSMGDFixIgRI8YVI75IxYgRI0aMGDFiXDFe\n",
+       "WmovFSeO8nijCTBBnJqW8PS+l4IcxvCsonwD6aj0IDSXpGOSqsqumtf2RrBTEm9H5GBN8wxEDvYg\n",
+       "sRUVw6gAHCktNgCK7AluToGBqi6Vo1SknjbrSGmbkh3XYG0bERbTsJ9NbQq8pQ9k24E0S7LEgFER\n",
+       "kbZlQ8sA++4QDBNNI9gmJ5pSI+PPTNOI1neqc1RRP3UD9HEGS8tNoGlUg5Rbb8z4NYHKd0JmyEoA\n",
+       "T0hFfiSIU0MH/N0TBC+tarbYphzwuaZW287GRAMdqY72myN94cjE0uGadYTZa+ovYW0TVYWncarF\n",
+       "BS10XNa19U2O4oWisDHctqrObP2/0fHP55WFNFY1sXTbyXFQgD44MlJ2B50zl+uYsH1cuxmI30o6\n",
+       "FhHJAdn7nK4hVLnvP3ho38N998FDu57Hx4GUffe1T4/b9g5Dm979/j0REWlOrf/rZ0G/abGybSfX\n",
+       "wrh+9MSg9g8fPBARkS988Uvjtlv7oe3XrlvK7OZJaOfpUzISRur5+PgzaLiZJvs0pJEcGf9evxn6\n",
+       "/+F3fnfcdv/jUOyRUX7g8OZtERGZHdtvW6jNb0mVPIfa+PTA0od63yl5t6L5Z1TC5iIKpHa6rfXJ\n",
+       "ADpAN7HrVOBH/dbSLR7q2SsiTw+YFyb5ZS0iTS16Hv+jjBrpAuq8R+M/AfG375hEHI6VU7pXP930\n",
+       "IVWU9jaGz8/CHJcOlsbRzH9e2T2RiM5n9pxQA++e+qnDfZpRatX3moILY6NurW+ykZ7B8yQKZcT6\n",
+       "tYOKeUr3iU5ZRW8UjAapso6MxAcUGWmqzhOxPtfUNulIqX4Sz4ljF9P856Xb+b6ISI7rSV7t0rQw\n",
+       "HBZru05tKeb9ruPnio4J0mzrVcfJdqyFNEnKg3fAMUnvS8/H87MTqTo1VKZddOqsQSlTJZZzxrDX\n",
+       "QiZ6TjtQYFIqgFICuicKAKcDXxQRkYoRI0aMGDFixLhivDREKkuSkdQlItKMJF72Cwr/emdvuvom\n",
+       "mpGvUAWyt6dSfyMl29t/gb87lUFIab94W09oZaLq6Vzqru3bUfZGW9jDaNwNlSQnStr2l72JOsgE\n",
+       "0GmN5fSMCCma5EkmQBV1WyIWJm6DtjMiMuzsj+UnlPO4owQLNItVj5VQr0iOiEjqKuzPVm4blFh7\n",
+       "WqXrKqWcGAG0LALCMJ8EZOJJQyrSWKXkVH6usMtAK2IdMkzUdy/wWlTZAR5j+rnHOnigFawCATV5\n",
+       "M3UDxhP5FKZYVfU7noxY6RKaqKukjgoFlMS4lU9ERGSvMgRjo9eT/MrmUMyeyqvWJnjtrddGGF0u\n",
+       "w9/nq/Nx23v3g2L0wZn5Ss2A3OxdC4hISqXWzz4J38uo2CO/Hq4Xq9MnSs7M7LruHQY0p6jse0ps\n",
+       "/cFjQ6lyrJj/wld/VUREnj6wtnkf0IeB1N5X2/D3nbc+O277qV/+ydAO9t/cD319QnIK6Tz0bX/B\n",
+       "q88wtuoKcglT83pUKZY0MWL55HZAmpKHhlxNVqGvSyrK+ORBIL43G5J62MextnT94cVWE3m4ASq+\n",
+       "ByXyycxKr/tRasSaqeX0NRWPXLsezrWjQpVE3Q4GLjIJ5zgjrz+BivVYpt/yHMJEYXy9u0yYTlX+\n",
+       "gL6vJPa0sXtsCsmEPZJOSUEAH4BCrFsjMXeQghlIryBzkHpouVBiiuPbPaEuA11tyJHC03xWqhTe\n",
+       "qq9qzwiyeqLa8Sugijz/DRLua++pKEY9WUnipxYtMqHv4V91KvDkjajISEZI1zivU4bHYyLne2fA\n",
+       "MyEtOMWAZxHL2TQoSvB8jyOLo/sjhKbbhuPOp4S0gaifEik9QbFJXnBRUPhtltG81+C+d4YcKlFc\n",
+       "kz8NZVOmQN07Ote20aIIvnYoHiJJGK9+riwToZkDGrt9F8nmMWLEiBEjRowYP5Z4eYhUnu6IKiYK\n",
+       "iTD3R8UXqYS3x2o+J5Qkx0p3QyWsoycbleSOop9YTThPTvM+vIVzrlhzrymtFlIJ+yio57IcsgK0\n",
+       "SinK3XLNsB+sSLTtXFaPlQFztKpMkZ7L4o8DuVFrOx3lfvW8m5r8ksA1GvPHJCpY97qCIbdufK90\n",
+       "tlrtVdSNEDFFvTy1SYXztr2t5lPAjmVlK+wC3kkFViTzysQfn5xCzHFgpA8CdrSCSJRrRisIpys9\n",
+       "FhhV8UMqUx9XYlgR98TzUh5A2zGqFY6R0QDonMpp8OoLEhMDyxRcRh0VAKuAPtTEabt7J5TfV/3l\n",
+       "Uv/TZ5+M2z55EMQfVxfGkVM5kZM7t8dtOQQhT58aH+Hxx+G39z/8QERECkImSnz/5g2TOnh2rm2h\n",
+       "su40XM+7dw0l03t2dmzCmUf7J+GYnxjC0EISogdK8yv/3FfHz/6Xv/2tsP/e7uFSEUbyP1R0bN2Y\n",
+       "+Oebx6EtGfmvleBo3ZiY/18Pglk+AUepoHsdJeybpfXrxSeBD/XwkZ2DQKTw2ZN746b9WTjvas9W\n",
+       "2jmQ2Nk++dqpwDChCTnu4+0a/pMEE1cV5BLIrzEHInjjhl3rg2sBiSvJVzEB0tMuiYdWw0ON+DA1\n",
+       "EKkGCKenMTlymRyhejgd9kRNsDEh+YMUvJ4poU8CeYA5cVSWmJcSzKE5Hb9RoUlCSzz6jn01+w7c\n",
+       "H0LkFcVIaX/96EnKnC8IfELMU8V9ceDwD12vHp6QWU58MJVsoLmrgrAx+5mudQ5urU+qKoyTHsdn\n",
+       "VE9UnoefkxD/9NTXA54jDfPRcP2pql9E763EUEI3avzQNVY5mUzRL5LVQP9stnb9J0DdsorRL1xX\n",
+       "zoTgWrCfbZaqnx4hcRLGbAt0lGUllOcp9J6gSFtKnowpsgR0+JHD1Xc7T2r8S+8d7M/5goiIVIwY\n",
+       "MWLEiBEjxhUjvkjFiBEjRowYMWJcMV5eai8tdxTLc+CNLZHDlBTsyP9MvdYKIjsPLfzSaP9lCWVZ\n",
+       "gva8VwIyCGZCqb1a02OWstIUDJf/Ftlk5zMRkbK67E3kAT0mrLYtu+m+jmrNO3yWELF46OCNxfCk\n",
+       "g9cewZ796Bdm78VL+B9xSbJXRXNAwAxXKrGdrNZUsF0ckf31GA2nFgBtD5RaVd+7YYdsj+9nlu5x\n",
+       "KFN2gHiL3FIhXj32WBIDx2dlZyX+J5QCtXQj+SqpJALJIudI5WgqqmVyuN9iX5QKTNUvj9KYqTaT\n",
+       "VXQxnsXIrsoP7Uip+s3XvyAiIvtlSEX1a0vjPXw/pJH2p0QsVV4vndfsWiBIZ0RUv3kS+vi77743\n",
+       "bqubkMZ79OD+uO3n//xfEhGRT33uZ8J+Szv+o4/viYjImuQPXn8rSBd8790/GrdtL+CJ9n2Dwvdm\n",
+       "oS1HN+1aK2n+5k0jarcg8nuMnZuvvj5+9qWv/osiIvJH/+B/te+nIS1X7Ns+0mn47euUxkyRej19\n",
+       "ZqnlYYkULDG1c6TeSvU6LIwwP2ipe04eYkgFDiSd8hDk+Tu33xi3nT4Lqb+MpA6qLsw350sjO8+R\n",
+       "xlkTyV9lNA4ODkRE5IhSdm0d9jeh75+ehjTvdGZp2dlhSJF3e5xGAbH8gIp8ULLf9JdVrFuQwlka\n",
+       "IElDex0pPavsTLaTssG8Q5PyBPfnorbx1KoDAKXWJj5ckxYpm5pI3KIkbtriktCmbufZgXlFyO0B\n",
+       "aa6OfOWScT6jAgC0xfcqjcIuBinOmdTBIUkwmbFcALZNiOwMwnI5ORi3XYOH2/rMrkkLq44CJHZW\n",
+       "J2/QdvYrHYuMBtoHaCYNzfG5qCcsFepoui23e2KAyjyr1/fgvBSQNXEkvzC6WPT8TILXHaXMRveA\n",
+       "np0t4J1Ivx2gst4zzwYpxUyLA6jYq4aHonibu4Yx9Wv3bkFuGBoeacxux/8QUkgtp/N++KtSRKRi\n",
+       "xIgRI0aMGDGuGC8NkUqSVHJa6bleUQXyHPJaGkmSCBBzTGil0aBMkt8KEyWIezvG6F03qL8QyxqE\n",
+       "4zPSop54GRFbC6x0Ci4hxfpoyisSCCcyUdQ5JXtDQI/eqlu8ffOxHMr+EybMgwDb0wpOyeYNlek3\n",
+       "44qRloQQ7tRVmicqvCIdzGvcoO1tT4RFrCrSjM2J1MOI5Bw6FVrjt/rQ5uXaStyrEqWrWJGw/2AJ\n",
+       "IUpPsgIO184T2VUF5hRBCueo5Hka4krG32VbioiVS9e1IUgZSLwNrWD9uOqm1ffYECJAAmEbqE03\n",
+       "TgJ5fHbHUJfF8wv8ey98llj5/dFRQF1KGhMtyMHtYON6sh/6vZwZIvXd9wNRe0PigzdvBLL3r/yl\n",
+       "v2Lfe+97IiLyv/3WfyciIhWRzd8GSnVyfDJu+9rXf19EROZ7dvy3P/P5cCwa632Da0JjLEFPPXxo\n",
+       "RG1FBOoViLWEiE72A0q3yKxNr732hoiIHN42pGt+hLmAZHzNnhEAACAASURBVCUU7W3WNnbXS6BT\n",
+       "NJ9MQfJtm4Dq3GAPsWk4x5zkOvaOAtLz2Z/86XFbXuo9acc/uINr0dAqHWXsB6Xtr4dIZ0cSAyqT\n",
+       "sgUBf7uxMakFKzmR4g/3A1F+uk/oXwZiLRH15fyxiIhcLKiUG2gGixmnuO/2ylfC8RdGtt9i9V+S\n",
+       "+KUiViwTo8KJTEDvIfFQUIZhqqgDbVtI6J8MqPKqs3tonDMJ/c4SFdq1NinZOvXW1yq+6HsSc0bx\n",
+       "wtCxTIIiLGFcETAjmcrkkF7AGlIj6q8nIjI/AHJDc02egRTO0glaoDK1+2mrkqRAVXryH1VRYyEC\n",
+       "/Au0T8eMBXOk9Z7ICRFSgvwO+AOUyNEcrz6xYxEBPacTvEZMcpLpAPq37gxNnKnX3450DYqC2Lu0\n",
+       "00wAzSe1kschNUT38GYTxmRVXPYa5GKjNFcxTypKUOkIusj63uGFCqpoHn9RREQqRowYMWLEiBHj\n",
+       "ihFfpGLEiBEjRowYMa4YLy215xK3o2zeoyk5QXwNYHFW4M6V7EipHedB2OsMikuQZkoJMk6UPQ1y\n",
+       "YN9SegbHJWkn6ZE+yYgAPpLdSdm5g/ZSkhuMmaiHGqUb1P9K0e6eyIFNDdXfxKDoLlcdLWtTDki1\n",
+       "JsjS0mf2xQawaNM/tzaBZF+MpGxWVse/LRMMoXFD+HAFkmvGOiq16rjwb8O/nkiJqjarqtsiIgdz\n",
+       "kOdHxN7OqwTJPhWDzDukLCkDK71qGjmGjEGs7zlVrCkAgsW1nZoBovaqr1RJBOQkC9fYEbFddcw8\n",
+       "4eObLqSKfuoz/+y4bXEWtj14YATwvekBjoEiBmpbjZR1tya/SBBfSdpsLID4+KMPx2237rwlIiLH\n",
+       "1w1u//CD8Pn/8Bv/9bhtDj+vL37ll0N7jszz7bd/92siIrJ/YP2f5UFT6l/453913NY16gBAYxJp\n",
+       "vlsnRgp/9ix43G3WBpOPpPwtriGluxsQQKeH1iZXhDTfd98zX79XVkEz6c5NO9flMii6z+d2n16D\n",
+       "T+B0Rl6DiabqlZzMue1wrVvSB8qgi3XgTTPrFaRem+fk4Yc0Sss+bRifq6Wl1k6fhoKCemWk+Plc\n",
+       "C2XCtd6SOn2GduY0r+0dhTE0O7BxKkiVJ6SsXqOPJ5SCUrL3xcLuSYe0TA4qRM4+gBobS3cPbfh7\n",
+       "2PGQRJ9RCm4fGlgbmk/0srd038+RKlzh2rgNaXvhWKvG+lr6cN2nFRUFID3Dvp57VUhRb7ZUgAPS\n",
+       "eJ5S4RHmk6bROdTm8BrzyWRCXn+Y/1Y8Tgp8b8qaeYL9Wp+oywErgFcVFPCbcD5lZv2/2oTzHkhv\n",
+       "UBkyjuZ/TcHRNG1pRnqeKbWB96c6e0zoz5A/nOD+8509KPsGaT9OD6J4Y03FFucXGOsd6cKVSui3\n",
+       "Phan2lJUKIRioBzuCTm5KOg7AfvqpakWJXGhWGg7SSWKcuGzjLQCMXexVthub1yOiEjFiBEjRowY\n",
+       "MWJcMV4aItX6VDImgmn5Oa1Ix7dqJodhpVOQsnVegJS+tVVVD/Jml9qb7rjqBJrFSrBKYh8IQXIg\n",
+       "m7HXnKJjXBKrpaBZSqW2WXgTz+kctaw1UbTEFovSt1AsJpSuVQAtp7J6HDbh448lnqSOq07bJDHR\n",
+       "Dc+xD+yPESSsEltaLaqsAiuBK4g3EImy7i57GI3VpIS6qdp6QsjhJ1Av30OprycEKQM5sKDVmoDE\n",
+       "vOFrDRiRvRNVeZmRI0W7OiJqlmjTWP5K/arjr6DrP98Pq/80N1SlaQMZN89s9Xnz1mdEROR7H/zu\n",
+       "uG0PK+aKiixyKLqPvn7eVMcFq75pNqPvh3EyyY2A/RRq2299yvznnj0PhP7vvPddOx8QhV95xdTj\n",
+       "f/rnfklERL7+e78nIiKr1detvYeB+J6IlWv/yi/9ORERefzk43Hbk6dhXN0gNCsBmvDgowfjNi1n\n",
+       "XhIio6rcev8PdL+oTMTpx+bN1735qfA7UpZ/fh76rCOfxH0Ufjx+asc/OFBfQfvtrdtAc/bCtWPP\n",
+       "u8l+6HdH8KeuVoc9u9ZHuMc+pkV1uglI69mZtd1jKUy3kxzBu/CUFKjd6MoQxt1yYeM6K8JBJuS1\n",
+       "psh+QdIVHbzOHDkQFJBT4CV5+xzq5VT+30FiQQs7pLV+rYCwOCpKGHp43VFRRqtoDheg4Po01Cbt\n",
+       "2pyI4g2Q6xKK/vlg479uQr9yEUUroXihJL+2AVIvOfVT4gOamExoju3D/uixIy7RuaXGuZDUAea4\n",
+       "gdBnB8S83tg8sVrAxSGjYqdr8Ekk1HPolbxORHmozGc6r5M36DR/U0REts7uv6EPiGXBdhsg+1cl\n",
+       "FWrhOcXSMfo3E9X12bLjUyfa9nB/qAyGiEhSQjqkpUIN3MeziRXPrLpwL67WhrD2qG5KMnruonig\n",
+       "oHlPx7bDK8vO88+H8ZftmNJC4odQUlWAT9nrr1E/WXrHwLUdyLs3cRGRihEjRowYMWLE+LHEj3yR\n",
+       "cs79befcY+fc/0Xbjpxzv+mc+65z7h865w7ps3/XOfc959x3nHNfffFeY8SIESNGjBgx/uzH/5PU\n",
+       "3n8pIv+ZiPxXtO1vishveu//Y+fcv4P//03n3OdF5K+LyOdF5BUR+S3n3Kc959AQTgrpWJ9ohDZZ\n",
+       "CVyNJ4lYB6jWE7OsQKqkJFXsxTqkW1xiUH0BnYk0U3iW0j5IabF5pRqzDpQeEvzdkQT4qNjKBHho\n",
+       "QGWFbdM2q8YS8eCkBxTaNHxeaAXhzi8QZ5Ucui9twwahAeacEClvDQh45KkSEpqoGSylVrR/UtIM\n",
+       "aUHO3DX5hCp6Q+muGv1D7P0K+jmOyOMbXJ8ccG6ZGhF4NgmQbbOl1KaqnZO2TI0+SyuDh32iaUlq\n",
+       "J1JlTGJU09YCJEpOd6oWWUmpvQwpg5JSBvNZgNvdYOPvvff+UEREJmQa6zvA06Se33ukQ5AWzMh4\n",
+       "M9X7g/WZsPZZrqyI4O7d10RE5MMPf2DnivF5dGCGw5tNaN/n33lj3PZP/vE/EhGRHP20t2+Gvh4p\n",
+       "q1/7tb8+bvv1/zZMA7/4i78ybnsT6bbHD8w0uOtUW4eI8qmmrGxMqAK/jpfz56RZBAI0E2HPzsIY\n",
+       "PpkZ7D9orowY+D3mkRlpK11sQCItbd55+gRpwQ5p3CmbnId/PRd7ID3br0nZfhJ+e/z6G+O25w8D\n",
+       "sf9aa+O5XiClQWmhCca4o5T2dh3SVjXSc8yidZgLONMwK0LKzlWWgs2grdWvqQAG6UPWdtqADLxd\n",
+       "W6q8QJqjrmEondD4gwm3ozYlMCFPdsjBYR8FkbIHpAq3ZBqsnrYN67dh3E2GCu22se5w/2/Xdl8V\n",
+       "0MfrKD2pOls81rTwhCkVSRL6P/Hk3uA1fRXOMSMXDVWd30kZ4Xy2NE8VVfjt4oJSq3or0ByjBVWO\n",
+       "6A6p7OFfJX3bPTTNw3j2pKw+ZEpVIS0wzCNVab/tGi3AsmunrhRtexlTSXcI2KFPOugtzbgAB+T1\n",
+       "hLbpw6WtSVuxD/dCQvNpP+A5zQ4YY7qRNbhAy8G48pQyzsbnJKujq7k0pzHD/jYbe3ZsN+Ge7PlY\n",
+       "MMveYc+zwN0L4kciUt77/0NETv/E5n9JRP4O/v47IvJX8fe/LCK/7r1vvff3ROQ9Efn5H3WMGDFi\n",
+       "xIgRI0aMP4txVbL5Te/9Y/z9WES0xvmOiPwOfe8jCcjUpRiaVhrymstBABuIiDeK2JLc9lQVqBN7\n",
+       "q8yB/kwm5DUHouqqNmKbigEXXomShL5ATXYHpQIpct2asmyGVXXHyBH8slS5WEQkwakVOz5Noc2K\n",
+       "yDDBTVc/LaEPDQjoaU/vsfDf89QnCdRhB5JzUInyPLc+znyB4+f4186h1jLYHfAQpfYpE/u0rJmk\n",
+       "FtTDipCz9VK9C2k1jT7m0u0KpbBblGnnU0KEQPzPCWlscF1TXiVuce1IMTcBQT/ntUKqisW0IgTx\n",
+       "PIGXEysG68opJ7JnDrKvKrKH3Ybr//HZt+28qmtoE0kigCjriIDaN2ElXrqwv5yIsOpd2PRW6j2d\n",
+       "BFmDQyp1v/d+IJQPjbVzbw5iLSGYE5Rxv/ud74/bDg4DAlUCOTg/t/vq1/7a3xARkf/8v/hPx23/\n",
+       "+r/xb4mIyNd/+/fHbU+efkNERE5OjGz+9ttvi4jI44+t7R3Gybe+af2k6vUZ4NklIVITEEzf+onP\n",
+       "j9sWQHX6h9avb0LtvG/s3nm6CXICN28awnbrbvCs29uz8aSoo5KTO0IL1KcyeUEBhidEugFBvV/a\n",
+       "ue4fB5Lt3jVDpD74RpCTaE4NuTt7Hq7JvDKETTmzLRCpGdVaHB4F9PG1T/+EtRNIb7uwQoX+PIyr\n",
+       "hFS01yAPp4TIdFvIJFDxyBbyJDNFM+geLlBEkFREdk8UkSevS/TTllDqtgn9VHpGfYHwEXKfYsra\n",
+       "4Jow+m2q13YOWmyyXtM83YY+KQsrCtAMQ5qQn2kPBwpC80fIHsen6WqsCmG3Be/VxYJQ0jbcCwk9\n",
+       "ky7OUIBEiGiDoqiBimcqoOMFPElTytK0Kk3hKNOg835GUhvqisFpB/WaJU9WD/SNXS56IGxMsFaS\n",
+       "ucrZ1K21d38vjPV5anOiplsW9OzqgU5fkHWiIpsJ9WeiD0+xfurgbVnAa5OLiJSon6bsdbjd+UxE\n",
+       "pK4VVaOx3qraPEknYU4g4E78j8Cc/l+TzX0YUT+M0v7D6e4xYsSIESNGjBh/RuOqiNRj59wt7/3H\n",
+       "zrnbIqLmaQ9E5FX63l1suxS//VvvjiXqd986krc+O33R12LEiBEjRowYMf4/jQ/fXciH76rMxiWa\n",
+       "905c9UXqfxSRvyEi/xH+/fu0/b9xzv0nElJ674jI771oBz/3l98aiXsiIksQLMvS4LkG0PbgDAus\n",
+       "oJVRFIZ3K7Q8JbXxPRAvzzamGNwMAfpNQSxlgplC9jvaGYBxUyKsr9dId00ZxoXhMCnbOsCojlW0\n",
+       "FYLE9/LO4NQJSH81p8wAhedERFRlc5dY2zPAshWZG6shs6fzcSBxKgGvo8Ex+vmK0PcvG/9qapPT\n",
+       "km17mYDZKxmVIfhetT1s2wa/HUAKzVpLT5R5gMc9kVOVbF8RiX1ahmux7kxbRlMLA5sra5Mp3aH7\n",
+       "bqDOmxLE3OBaTCk9kCZQUSYS7bOLYBCck+HwEikDNi3tkdJYN0ZsTZEOzqCs3DQ2riYwXJ2Ulh6a\n",
+       "IKXy0YN747bZLIz1jFS8B6QMUkpLnj0PpN1qRkauVfjNEtpBX/rKXxg/+81/9A9EROQXfuHnxm2/\n",
+       "8Rt/V0REfuoLPzVuOzoOabz33/9g3Pbhh4Fsvd1aHkmJ4puVXadr89D2Aor5BbsYOCWAG4k6B8m8\n",
+       "pVTIvR/cExGR23eOx23Tadjfxdr6v1ViK+VqyiJc/9ksjLVyYmkfTaMUpO2mIlCLhZ1XC+VxR/PJ\n",
+       "2f2QPr0gZfHXvhCMjhePb4/bHt/7AxERef7M9KbKFBpIRThXylhLjmOsSe/q4GZYuyasmH2KuY5y\n",
+       "YC0I5S3dz6rp05BSeYnz7XHzZo5TS6FtGY2rUdyOim2UbE7TlKRIm7IDwBrzc+uoeATHQ8ZeBiLH\n",
+       "p5iTuK9VvXpFfT3TbaQKr+rhjgo62g7k9cbms0GNfDUVRgUrZaHnQCa2uE+5sELJ0GVqxQ6j9lJv\n",
+       "165tw3XvOtKPw/5SKJz3pCKu89pOunGcs+wcVO8wyyv6IoqHMkrBgqDO7iEJqB/9wGbZSIGD7pLs\n",
+       "6POFtqeFuRgoRSQtSSsSE39GyuJrDG7WgFJF+YxoMett0AqrlPpApu1KGeioX1WWLU0unz87hVRQ\n",
+       "VleD6rC/0Ka7bx/J3beVruDln/xPn8ifFj/yRco59+si8s+IyLFz7r6I/Aci8h+KyN9zzv1rInJP\n",
+       "RP6aiIj3/lvOub8nIt+SkJD+Nz0nk2PEiBEjRowYMf5/FD/yRcp7/6/8KR/95T/l+39LRP7Wj9pv\n",
+       "KwtpabWSjWRrWhnghXTTGmGtgDnTdPLOuE095FJaOc0UkVqaAmyP8tguQVkv+UDp6pO39Xgzdc7e\n",
+       "zBOUAjfkSZfqm35u3el1FbNDrAz/KpqUUHmllut6VnFX/jOXv8PPj7jW0qBMviS16w4rzJ5WKV5J\n",
+       "hiP6RO0FMuJ2vg8CLpXGqoRDyYq542rCzjUHUbutraFKBmdC/xZq7w795cUIuylWRlMitmYgirrc\n",
+       "VpUTXbk7Q3O8V7V1Wrmml0tie1zH8XsEyWnJ72pj53/rTkBfzs7MLy8HKXRLsgbq8dUnhr40HgRI\n",
+       "UsDOIImwxfdKKnVe4BrefuPNcduj+wH12Z8b+iIg7H9CxG71uDs9JfI21Kh7kh3R2o47d4OEwXJh\n",
+       "379Yh9XycW+I3DtvfVpERJ48sdXZxx+H1eKXv/zlS9uWS0M6dPzfuXNi24BiXDsM1y4jwvoFELTZ\n",
+       "zMbaIXzfVgsbEwtIKHz00aNx2+tvBVK+bMgncqIohd3PEyByut7rd0jk4Z6YnRiCtIXK943rdg4P\n",
+       "N2FF/sEP3h+37QNNOKlsQH39f/+fRUTkzc/+9Ljt2h3U4hApViDdUqAoQYnrIiIHx3dFROTw0K5/\n",
+       "vwzHb3qSbpgEUu7Zqc1/MyAxm42RslVRPCls3NHdLiK796u6QvA8mY1uD/bLTpETQhUqsObVc1NE\n",
+       "5AB+ogUhAgXmmBYPgKekhF2BZD1NWRIkHL+jAhxVqHcsp+PWaJNt2wCJVSKyiEgPAnZeKRF6/Gic\n",
+       "QwaakxOQoiualEt4WG7Ia24CNDMj6ZQRuSFl77oNKJ1H9qPgYhtVYqdron28s00nMiKMZyNiRsgh\n",
+       "kHC3U2QE1I8mqgF929cqYWJjrYZPbFPQ3K0+iTSf6uOH0bQU/rMdaVI0rRZl2Tw1wY/Ozu6LiEhV\n",
+       "2bPOY95lErnrw/xH4Oco+8BFYcNYZEUK/HgHGSg7lDK0+oKIyuYxYsSIESNGjBhXjPgiFSNGjBgx\n",
+       "YsSIccV4aabFfdeOWkcilr5KSR9DTTtdx0S0AJV6Z/CcmlUyG0uNfjMyvG3bc3ymWiREBNeUGiN4\n",
+       "IJszsbuFFk9LjDlVgB4IWveqCkvnqAaVg2oXJUzixtcLaoB6CxM83sE8MnMExePzZmNppLpTwiph\n",
+       "m4AvFYHvO8NYizT0YUuwr+tV2ZdUZIHV9mTkqXvZMXYctUpIlRx9nO9A2+F8aqjNdnStExfSRwUR\n",
+       "Jqea0vNMoofei+PUAoj1BC2rHotj11gcT/WufE9KyEC2pwfW3o8e/pGIiGSFpUeSIaS+BkotJmqW\n",
+       "TddOSflMdkyc6qeFcX1+atfkZz7/F0VE5NF9UyyfTQLxvGZ1XpC3b90ysueDj4JWUU4GneUkpLvn\n",
+       "ZLjbqGknUjsfvf+98bOv/PwviojIH/zBH9jxkR48ODAC+GuvBbLz979v+lTTabiPrx0a2XazUW0j\n",
+       "GycHMIFW0n+9tnF1fD2Mydnc2rvdhLTLfGqprcOjPZyzpfYuzmGkvGfXROkDe3T+ExQN6D2U0MXR\n",
+       "tN9iSelRpBafPbpn5wq9pQMqQGlOw9j9pLbzuXsztPnZh98ct+1fCym4azft2qUD1OhxbW7etNTi\n",
+       "/hG+d2Tfl61q5tic1Ov88PTxuO3Jo1AAsCHyto4/1rtrkOaqKpCOOT0ykpiZAoAUGN1sJagKPc0d\n",
+       "agyeEnm/UM0sSpWpgXKFx9MBURZ6pGcuNpTGQ1oqJ4P2FumhtKNCAVH3ClbWDsfqaT5p23D+KTT4\n",
+       "WNlcU8COiPWDFgORtqEKuPWOjo9r0veWWtaMmn+BjtKANN6GnlO1D2m/ji6KA3k84aIAKNAXVBSQ\n",
+       "qQkwFeoovYPTeD2KsvhecB5zcA66R09K7Gj7s3Mba0cHYU7oqVCshd4WaxV6pG8H4WtXXPpehuvu\n",
+       "s5DG9r096zLVO/N2//WtPuOp/7Nwr3t6TrVdmHdZq6yFVqP0pCTgSMztBRERqRgxYsSIESNGjCvG\n",
+       "S0Ok0kTEEcHL6Zu7Y6QhvCVOyMOnw5tmQ+WnOXzKBrHfFopIkU8cOHHSjorqLE2gXm/2BruBrIFn\n",
+       "SQKsqraJkQh1VZUQAd1LWPWlORPasSJSDztCulQSoWeyOY7ld8jmeDNnVEXPj1a/SihmFdsEKzZd\n",
+       "aOQJqf5qmX5Bpb4gduf55Td4ZvHpomcglKpD3yVCb/Uou3be0LRSDQVBJtxQWXtTh3O4EJOwqK6B\n",
+       "HJkT2TNXtWn2NVTCIBPgQVTnfk/VayqssFhqoYLnlWttNeKS0JaUVnUKuuWMfqX6EcsvwCeNVqm6\n",
+       "Oq/7sN8vvPkXx8++/V0ohlevjdu6bSB0tkSAP4Ky8CePjFh8fBSQkw1dEyWIbsh/7fpxIE0rWvTK\n",
+       "K3asH9wLxM43iOzeQ6n93g8Mfbp+/TrOy041xyq+76w/FemYV4YwrlGePoWsgbZbRGQFpK2aGPqk\n",
+       "91pP40/V2dlDbA0PxYzGs6JPRWEIo6JTGyBdAxFRJ0DV2hVJgUOJfn7DUKJPPgh+7q+8+qlx2xl0\n",
+       "Ovq1lbU3dVjpz28bmrS4OEU7rU3Xj8PnHRSuc5IayYBOdaRznM+uXWp7g/lnduOufQ/E/4tHpCyP\n",
+       "OSGbGqFZLSDUkzJjSQogwo7L1WtFxKidkDNhV4qRtM7ICTIMPJvNgdR7/HtIWYWnQIsykslpobK9\n",
+       "U5KPubDtGPUP466he0IclMKpUGjAnKmODsmOOwP+Jsa0umwMYnNXN6h0gvVrDeQwI/kBJe2z/ECO\n",
+       "QV4vw70xkNehonrdYPew67X/KSMy/nUZVeLiAUURk9T6uBmW+CX73gKJhHr4dkP3FeZfJow/O7sX\n",
+       "vk/PDn0mejE0S8dRS44CPdqXEHIomMcbJaB7KmJSXj0T21XihiR5hhbFBinNyZifeT4ZW0zPKUfH\n",
+       "e1FERCpGjBgxYsSIEeOKEV+kYsSIESNGjBgxrhgvLbXnpduRvVXYVeE/ESMg5hnBjnWA2+qWGbuA\n",
+       "R1kXSYnFlNoaQHJVJVRWfR6JbUS6TMEA3yEgI2XWEIypBsZDzTLe0AwhaLsfU3tI8RA5VNvCqUg/\n",
+       "4HNKWfQQ3xrYIFRhSTZcRt/1ZGRcVlCFz0Jfty2R85AqzYnEnuVLtJOIiEgPMTlPYVaGgpW825Ni\n",
+       "sAzhOrJmx2ggirRASer0GxCPMwKqV+vQzvm+HUth/oxMextVRaZr5xMllhsErjCvpgXXG/v+aydB\n",
+       "4+fs3Mje80OoIxNhulPiKaUFR9dq2jYSmimRoWax1+afFRGRDx789vhZBYPOtrHUpir6TkgxWYnn\n",
+       "16/ZNvVK5oKGAn07P7J06wIOoqdnIWV4+/Yb42enp6rfY2kEzUCcHJva+gV0nMrS9ruFPo+qJIuY\n",
+       "8n1LhqeaZmvqJfZv56DK/puNkb2vwwR4uzayqWpR3SKDYqcS+KQ3dnAQPt/ft3SLpvnnc2hs0T3U\n",
+       "4bNqn5TgoTFUZKYjNT0O5PDhk4fWzjfeEBGR03vfGrd5zGOZszF+gjZXpJWWoHimQHp0781PW5uW\n",
+       "IFFPyY0a81m7tfu0BIlfKC3UQNF+OrG2C1JENZkLJ5iDskTn0Ms0AiZHlyD7cnpq/A0Vj2ixi6cC\n",
+       "FJ1jyW96TK3pHHNO2l5qLjydsGI4NOsoPaRFLBvSEZtg/vNEAdnU4d6qiEucZVp4gPbSHJ7pM4um\n",
+       "vwyk9JbUybNMn1NGAVGF+umEi2KQRiPD3d6pKjjSXi1pTG1RAOCs2KVM1bGCSORINzJVQRvN2l4e\n",
+       "431gSgmKoXqi2aROje6R9iUCvGrFeep/fXbkPbcJzwlHY1c1w0g/UnB/5FTQNeDZqVplLHvVoPDA\n",
+       "U3sLrR1jvTM9LPWJpmo9aVbp0PaUKu/dD8ecIiIVI0aMGDFixIhxxXhpiFTbLSWhlVmHt+RJbqs/\n",
+       "feslrrWkWOlkVI6o6t2MEqg6qyeyb68lm3gj7sVWYYMP25hYmAhQMkLOPLpsS75WuZa9LkmBe66/\n",
+       "IXVYoFROSz5ppaNyCT6jMmCgGh2R2FVVgRYE0uMNu8+4rB+rQyrTVWmJ3AeCLaueuzSsyBvygVO1\n",
+       "9Z5Qwim8DhtCpLbwdVNUUURE+bFDclmBfEbFA/sgAG/asILJcpZ/CMenBZnUq7Dqm8xtRVqhFLsn\n",
+       "9GEFMqRvrE19ourthDCMRQPh+ydHhrR8/CSU089mdF3VL4tQrcGBFEq+hlpq7wbyesKKLHVG8m/r\n",
+       "cHKHt4A+bZ6Pn01BAPXsYejDtumeoSrPnwXEpqEbpcEqfo++9wxK4W1jfTzfCyvnYh3O8ezsyfjZ\n",
+       "rVsBudhuSWoBitHLlY0T9TrznlFSlTEmn0SMhaoioi72nWOV3lEfqqxAW9vxU4zxprNjnRzN8JmV\n",
+       "yZdAX4qJXeschQ0FwQ8qhZFNwjXJCK0oVZWcSKceBNhhZcruRyB0PyHF9A5E+aM3vjBuu3gYlM+T\n",
+       "jIotoNCcFJdL0kv1PLswRDA7DGR89p9brYCqeLtRVt8OCvjFnpH3S5S/z2Z2/NOnYeyw2rcqG3SY\n",
+       "pwpuG4ojCkKOFZ3K6P5T4nndWZ8MOEbbENKgx6T9tUDHLurwW54v0lYJ2zZ3laNjg107lf1Y0zjd\n",
+       "1iBbE5qmGYaWClXUg1XJ63Ra0o2+gkSiHi4jct0QrslQ2/E1I6HzJe+7Hwx1LiGd0AJpaqjYwUNq\n",
+       "waecacHzhLzxnH6e2Db9uyBESNXAO5oTmgGFUoWNJ49nocMztG/J11NlgijToPIPCc1dXlXxCSXv\n",
+       "0tD/BP6MTiWkZiE9fPTy/jLpe4uiJNbpyKfadiKxI3PUkEzFKH9BavvOX5Zf6OjeelFERCpGjBgx\n",
+       "YsSIEeOK8dIQqWFoR2RIRCTxWgZJ5frg3jAfZxQw6xhpwfcI4Snwqp94ysciD6w5Wv5+lqpY2uXV\n",
+       "Use5XyRaW3rT7gCZpIVtq7dOd0xnrV5vQNC4vHTQVT2hVFg5saxBiVx63TJKpuWnDN2BZ8BCbz7H\n",
+       "fsOxysJW5h36k1G9NFOndSprxuqU+7/RFRzx1sYSXltMy6CCeNTMsgQSgLaw/IHDCkv9+ML56H5t\n",
+       "VaGV40VF/K5NgfOi1V8f0Jme+iQHbyLHdV1vjXuQ66alyQAAIABJREFUgfOTkIO4A0qQZ8SvwwqO\n",
+       "lyVlrt6N1J8S0IFJeWPcNr8eUILHENCc0jms4THJPIsZEIGWynUz9OuKfL0SfC+hdt66FVC8B/dN\n",
+       "JuHps3CBPv/5z4mIyL17xgdTngsLWF5cKG+KPSR1RUyr2ibcE5MJiakCJepa4zeZ6GXYduOEUDWs\n",
+       "vg/nhqotFmHlzvepiiOqgKSIyEzbTOevIqHMUdM2l2O/E/qiHEmScFDkihRRpFnBf+8N4zJtH0Ie\n",
+       "ojCUbO9W8Adt1oY+zHFueUUiqZ0ivLhfCFWRGnzAmSGycwy8zcMPxm0V0J97f/iPx2237rwuIiLP\n",
+       "n5knmnJk8tzGnc43JfiSzIdqwBtzhIiorAR/b7VeXdrmFLkivokKwV6s7b5THZGJhP2uCS05BHK1\n",
+       "JKTzDG3KCCXScdVu7VjKh+LniSJCLWUY1E9U7yFWmhmQYcgZpkLzWqYjYa7vtrax60K/rje27fpx\n",
+       "OMeUELYewpWiaD7xYWvMu0Nvz7URCGM6mj472VcO++12JCmAEnWX51gCPSVP4X8HBMcP9uxqavjf\n",
+       "TgnpxTVmMVedID0b4Olzb2c+edHzfIrzAh+ZfPUa9EmzJakDZJtS0qRJVSSZRaIVzSMulyL2RU7n\n",
+       "w2jbCyIiUjFixIgRI0aMGFeM+CIVI0aMGDFixIhxxXhpqb28KkbStYhIAii03dq2CmmkvqfSXECG\n",
+       "xKGWZhMwSC5dVui/qoyouG7D35oB6xkKBSycEenSJ+q5Qw3Hb6ZU6i1pgN47wnYbqHgLpbsU0c5T\n",
+       "VRjn9kKugNJo6s1WEzlzq3WdDIWKKvAyZBtgTPauU3J5qZDxQKlVkGyXG8vFFRUUc4nE3um1oGNp\n",
+       "qa/3lpbT9CnLOajasSdV8AaSEfN5KNdua9vvViUZCPaugTdPibDpoF6dCo+dcI4deS05kOy9t9SK\n",
+       "oswH8FB78Nj82kqQ5zMiB49+fqTinEIBvWms/HlSBHXqnq6xpnRTKrLYA2l+swDplyD2Oe6JKn1l\n",
+       "3HbnZkgPPXzfSu21GiOhvp7B9+3i3M41Qarq5k0rf//owT0REXn6NKR7VKVcRGSJdNuKSNSqjs4l\n",
+       "5DoWWxr/mvpoqK69nOG8vV2TDvfsDMrmljqUUcOBvRn1GOz11oHEW5DadZqoJxyPcRR70LUrME7S\n",
+       "UYmbTku9M0mJWtQLtKSctcqa0LXLZlAg31jKanJ8J5zPhcmOtFBKV9KriEiB9KHuLd0nuYLTUAww\n",
+       "JE/HTR6l48XExmmzCL++ft3SyOcY2+w1pmk5TssqaXokT1N6TlXOd9J9TXNpm0NfF5RaVQJ6R7Iz\n",
+       "PVIqh3NKFeEaDyhJH0jF+/kKnowdpbFx2zfk/1aAKjCf2TzdNOEe2zY2npVYzs+YHMUQVk5P9AgU\n",
+       "kbREbB8wj/Y9p4z1X1bMRr8SfLFZq/+gXbsOKe0UOUVOuyotgot9Usy1Rcnfw31SXvYf7Acu/wdV\n",
+       "gtNtSOmnQoRy/CZBWszzPjCfqROFiMhsBheBzMZ63eE6kkq4pk3ZbaCDxIijeUJ9Ui3dR/2Kvmjp\n",
+       "mVCnmBP5vipRqMC+fl1oZ0cpYE2L9lx4lvDEcDkiIhUjRowYMWLEiHHFeGmIVFHuiaNSxqHRN077\n",
+       "TtuFN8iC3si3KI1dEGGzQqkxC8fpinlg8nQaUI8OXkKeypp7p2/TLIyH0vCdl1EtCbbvaZmkS9nr\n",
+       "Dt+mUnevIp6pks2J9OaVsEvtVZLzYKvaNUQqZ8dErFZRMXqD3oAA2BMpcQsy8kQF/xyjb7oKo5JP\n",
+       "EDBzEuT0WIk58hA0HyIqAMCmoiBETEmchMi0NUjxVdhvURKJEwKbO+X/WK2uN+TJiLZX5cG4rczC\n",
+       "qjMlRMI8/qjvgEgs67DCn5Ykl6BoSkbEeqz+JyWVy0JoNCVBuBxO5BURFpWUnYmt0pbPIDCKMnhf\n",
+       "2y05A9m2W9g5NFuMXRaEVZtEOtctkMWT64ZmPHwc3Nlfff2NcdvJSUAsnj0L0gjsq5dloW2MNFmJ\n",
+       "N8mPAE3d8Y5UnzIqddZig6Ky8+/gnaZoWkPEbnWu31BhgZL3J1NCtTDsG1p9TnNFfUm4FegIF29o\n",
+       "nzXwH0xL+36PMumMpC60DFto7tICjIT8P/tpGIslzUlr+OqVJEngl2FsbZaE5oJ4nkGstt0aOb+4\n",
+       "GaQW2ne/bvvYC9IZ9akVEQyrgD4lhbVzijHbNIycAGEgkUZF8bxTQUqSOlG5CkI6VEy0I/kN/ZvA\n",
+       "P9luIbBLBTi664T6Kcd5T4B6TwgQnOdh7KwIfVoNYV7rB7uu6r86m1CpPYpW6o3dz2fLcN8XhR1k\n",
+       "swj73ttHyT8j/ZCaYYRCxy7vV4WGeT5VX09PWYcORUM9ZQ5URqTH82QYGJkJ+yX91FEImed/JXun\n",
+       "lE3oW70m1Han52jf02cbeye2uMk87nW3kxHRIi5rUwHx2YrvJ8z/PRVF9Q5CvAnNcZgTevauxece\n",
+       "mSbmfmvRFouPdhgfFT+8R6FN29RtUahFwtEe2SRF8EREplN7trwoIiIVI0aMGDFixIhxxYgvUjFi\n",
+       "xIgRI0aMGFeMl5baS4ZMytTSXhn0ZhqCIgeo4iYErbfw1duQZs5qreq8nNpTZWnCAAHHj8rBhb1H\n",
+       "qup5yqIhKdRUE9OCUdhPCApVMnJHcLMkl2FxN/rYqece+wVeViJXZJUVc/W3DK1mKnfO8DjOoyO9\n",
+       "qUH1NpCym0xsvxXOYY80e7abcD5FQSlQ1UyhdFsL4idreygBcCBS5noTYNyDGXmddeG6a/omJ8Kk\n",
+       "9pyj80+60P+LtaVC8jzso6R06/4kpKyWiY2TFL6DQ84KxEg3qC4Mp1Y1BcjGTprGJSJmnmtRBJMT\n",
+       "Nd1n39uH9k+7NvLsmL7GveAojbhaoGCCSMTDNpzjtLIxOepikYp8jXxfR0Iumj5ZLCxVrKRwJR1z\n",
+       "2kvTA0omFrFUEI9JTdXspLtVn4hSS9o7nlLKxUTTfEgj02czVTanMaTFC44KAAqkwgaazvQcj44s\n",
+       "tann5ok8rD5mY3pma+dfgahcr8jXDymblIjtg6YnSYo5hcfdQPdpMcE92dj+elVRXpu20+L+/dD2\n",
+       "d4IuVXphKQtVvnd3TLMqffCHYV+ZtWmDPll+8MfjtvlRSClu1nRPIN2Zk96XjhmH+zklasUcRQGe\n",
+       "U3ZQ53c0/463EWlGVVCZ73ouSnDYB6vn43pryp6OX0KLadiyYnm41jx3pOiLnvShpuiTvaltew6n\n",
+       "hO3axniK+aHRwicujkDb1aNTxHxK+dmR457oaY5XFfEs5QIYeJe25AmKVF7Xql/e+JE9szbsSRt+\n",
+       "O9RMQFdfUSqKUW0nSje6F/yl2ot5xj6ZSIF1qlloY1i9bg8OrChmnAto7qygqdZS+r7eqscmUUXQ\n",
+       "9po8Fh2eN5aCpJQhnutcvKXpy4zm0wxjrd+QthRcLjoa0KoVKDsK+FFHKkaMGDFixIgR48cSLw2R\n",
+       "yiSTLDHSaa6Oz4Q+rfEG3xOJTN+0e0Juzi4CYZAWiZJl6qtk25TEqKvEg4qIxfDS2fZWfj3A9b4s\n",
+       "SQEc5OieZA2ykZxHxGa8YQ9Ukqlll2500qZIdsl8IkY63CVMNzufidhKyBGJsADJr6dVikoLtFi4\n",
+       "kTi5FJPQx0VFb+FtvnPMcOBwDh3Lk2OF33smz+NY5GGlFbbn5+YnV2AMtFgFduR/WEHFuCV59Ayr\n",
+       "yvOV7bfuwwntU6FAhRLyxBapsjmHijUpul9sAxKgXVySEm6HlVlGpEslYDZ0Xrq/nZXmSGgn/0Gs\n",
+       "yLakdqxFDjXGSYcSbRGR6RDO4aS0ld4pfPU8ESFbRcISKgkGyXvFaEquZGu7nioFUJH/nEZHK8dx\n",
+       "H8mL1l66+rUtJdABlinQ3zpav42eaapOXlgfFriH2Ydtgnbm3N5MXQnoXsvD38uloW+qvJxS/fkG\n",
+       "pHxF2HgMt1j1d8TsLYCYt4ScpxjYrM7c4+ZKSAG7A4m5Xds1Lk+C2ni+TwR83ccjkMdL6q8nwePP\n",
+       "k0xF26v8ApXkF4GAvmYRadx3N05MEuEMchMNIZF6nUr0ccJyBZhQWzZABXI0pXNNFSUmpX5VUeeB\n",
+       "0gBZYtQl1eMB4d/SXHOxCUVGFyvrwxRjrG3sHLIKxG7GXIBmsUp1irHYDnaOOebpDoUljOoMQJC4\n",
+       "1F7HGo+rflCUkqRTMMlwWf/Qhb8X53ah5nOgfiNyxKX5imqNm0bZn4QuyQgi03llZdiYESKbTxR9\n",
+       "IUcPv8W/tr8BqJsizZx9qeCTyYVCFZ6JPNcMnUon0PkA/RmzKiJSoECKpjhZg+yujhLsg6eJEEbQ\n",
+       "ypFkbp2izh5FeTxu28IzcKBrp/MeS7zsJIVeEBGRihEjRowYMWLEuGLEF6kYMWLEiBEjRowrxktL\n",
+       "7fVtKhmZFqdQjE0dQfZA9C82puKr5PE8M7j/fBXg7gWpKB/vB2XpggjtCXSGHIiNCUG2RaFK5ETi\n",
+       "g2lukhqxsURqifwRpQVk2VIKTM2FW9JgatXw1is8aXBuDqJmTrCnItwpexHjkmVEwFUSMYuvqrKz\n",
+       "Jx2dehvaMp+F82l6Sjv1AU5mjZdMoVrW7HB6nQyydsniT35NHK5tSnhzBkIfE5p9BrIvdJcGUsxN\n",
+       "kYrylJ9TpXpPxF7XqmYKQct5SKMVpKK+cYHEyxkrlypRH1pIhemFDP4ysTFNlDC7pu+Ff5PUSPR5\n",
+       "FvZTkOFxgzxLwUrx6JMMcHrRmxJzAvK4I9Xp2QypBdICSjH+2Aw1LzQ9QaRY5E8aMma+ffuWiIg8\n",
+       "g5HtrmYQUiGUitEUGGtLKRmdCegV0uYNpcU0fcipcj2ejjvVZAongetJJHavJGIa6/2YbrB+yjrV\n",
+       "4iHNml5TcBZqvrtE8cJ0aiT+Fn2XEhF1W0OzqKO5A6bSAx0rhQL6lojVzQUI6KQ35s/D3MVGvprd\n",
+       "zq+hDynd3GO/w7nNicU8pPnO3v3GuO3wOIy/+ralhS8eBCPl+b7Nia2/XDygqT1NleU0J+j0VFG6\n",
+       "VYn3XNijiuYpFQ+0UOwe2MkWYysjB4oeWlUe6/w1zQmTSbg+R/7WuO1RE/pweWFp7DJHX9Mc22Ou\n",
+       "6T2natF2SuOoHFOqNAZ2p9DUUmLFLkoV4cKaEvNTQlqBbtD72c613ipVwNqk7hpaCOAc0yjwDKH5\n",
+       "T9NtPHcOGLM9p6fUtF5Ib8lDg5HU+1W1vO9IKb/9E9eONBN1LvbOKBhpEtJnrCOoZsVcbJAgBZfT\n",
+       "w2vAeW/ofqpVNR1uA6w7plQZdnZIExSWsEF9EYp9ytRSe5oXTKlQR+eJ2lNRTvbDX5UiIhUjRowY\n",
+       "MWLEiHHFeGmI1LK5kFlpK6OhhxIqEcYHrHDKglYrgBNyQrPm07AiO1+Tsi+8k3r2zhvCKmoK1KHt\n",
+       "bFWhC7IdxdZUFcNNRV1AImcCeN1iVcOq4J2WBNubrqfVkcguEbvAiTNhcTqSk4lsDRJxRitI3e8O\n",
+       "AV1JwcQYdKkSm8P+JhNbVSiJkrnETgmAhJIlTv2iqPwfq548J1I6muJJAVmlC1hNYKixigQ53hGx\n",
+       "VMt5p5M5/SCskq4dkIRCq8raNk5KKCCzd6F34XxXWyalhr5osQqZlIf2fW0nLQg9ToxXP6Nic0rs\n",
+       "SFXJoH5SiY2eIDH1Eeuw4iu8oTXNJiAd6YS/r0rI1ok1EFb2euyUlEpK7Ur2Z0+uZ8+fo22QFSGv\n",
+       "RSXZtoTSViAPs4r1wUG4n9iTr4E/JKNPWjOf53xPQO1+REGoXBmoLssKrFG6zwTgCcrq11TWr/dx\n",
+       "RitJbd90xvMJ2gH5hZrQZy3XrwnpWp4G5G5+QnIB6PeOEPGigAL/0uaODB5uKakor6Aof/3VT9m2\n",
+       "p98L5zUHqrNvKKmqjSeE9LUo4Z8R0nTxLGybElG9h+zIxamttFUyZMdrD9cieSHDFkURrE6N884c\n",
+       "9WuvKtY2dnRuK3NSRYdMyIKuXYKiiR7ISUFzvToKdBckq7Je4hxs7lbFckbOtdR+IDQlxZxVTmxM\n",
+       "Kr6ghOac9tGrJA0zu90av7L+coPO5zzWw7mqv6CIyIC/HfnPLZdQ1C+B1uQ81yMjQKh2gSKngSYq\n",
+       "9aekW13S0bvQ5unNJtwTkwl54mGe6shRQOVu1MOvJKRpgr5jBXgBItRS6qZtkLkh9E0LtDyhT5qd\n",
+       "qUj+QB0/GmRV2MVhQNv4+acergUVlOXITtHtLFN4kraJzVM636WelPLziEjFiBEjRowYMWL8WCK+\n",
+       "SMWIESNGjBgxYlwxXlpqb1Ofy7owLRQlZzOJtAUpLU0NxlOSORs/7k9DOmbbWFpiC6JkQrBwg1RR\n",
+       "rurdHcOu4V82VOwU2ssJdlQCeEkGlfo+WrNBI9qesRgHzhHYImf6FPZkddwSmjop5dtUlblhaafR\n",
+       "LJgNgqFOTCQ+TT12mk7qLRXTAVpOmfQ6QsV2XhOQcXtSFt6HLsfQmgaXBwbOCLiy5xNKNyaq7YI2\n",
+       "CRHGM02jpoZPz+bhWJzu09TWTlpSFO4mZW9o22wXZ+O2EiTLHMfqyXg2LwPs25BmzAZpVobW9Tr6\n",
+       "wcbfegh9kc8NMk9HUialUaB3UysRlwxK97Lw282KtM1SjH8yKO2R5mHdp1wrFXbcPcNvKoK7Ly5C\n",
+       "6ulgP9wT5+d2LNWCYsJmiVRhy6rDSDcdHlpa9DEMkq9fNxNkTR+yFpWm3saUJRGRVVHf03jRlCGr\n",
+       "qGvxAhNQlQy/XLIq+WW9m65RXST0P+dxcT95Ni3GWNsuTYl8bxr6rqHUpuruFKSjNSrQ05RQNeE3\n",
+       "Tx58OG47xJhoNyjiWFnaIUVBS0aUhQukSpOBJhTMnVnPhRLoMzJyVfIwFw9oSm8kMbPCM65FQfOa\n",
+       "qoh7KmzQQgnW9tHCDt6faktV5J6gV9FB26iaUnoGbgtcAKGmtW1r/X+GvpsURgHQdPQgZBANY+6U\n",
+       "0meqZF5Cn4xuNfGagqZUnKaeuWAg16KA3rY1DbTy6DnRjel+ooWgoKFQXbyU5kscIyPT+AwE9J7m\n",
+       "/xKpr5wm4ARz4bKzOc4lSD2T3lONtNhqTYVCOO9MNeOIsuETnaetnWvo862WVGzQqrOGjbUS6euB\n",
+       "rqfSJ9i/3oyJQzu4ACDR8yctLIfn6IRSdlrI5mibG+DKUJG2l+xqy4mIJNmL0twWEZGKESNGjBgx\n",
+       "YsS4Yrw8+YOukw15TuUgpfqUa9PhtdXYKjnLAhIxNEQUx+rgeGa+WuebB+E4BPto2eUGyInv7C28\n",
+       "R+kq+0oNWpLfGdKgStRMtnYgG1a5bdt0WH0QYU2RKEXaGC1TwiADQl5Xn6yOjDLQhkh8Wv7NKyIt\n",
+       "Sa9p9Vf83+y9S68mWZYldI697Xveh1/3cI/IzMjMyqKq6C41AnqMEEJixJAZ/4ARo57xC3rCHCQm\n",
+       "/AQmzAC1oFvd0NQjs/IZERn+vM/vZW8zBntt2+umu7LQRSUX0tlSyG/Y9332OHbOMTtrrb022q6H\n",
+       "EL9zdm4taidlj65Lr5kF61qnkKwTsOpeUQ2rtoHYl1b4bYWVS8S2F/JvhGtlIWAPYftQGfp1dYl6\n",
+       "WYWtNFf4e1eZ27GiKFlkKEkEF/V1YdsegGYUuIaehOhpJu3kySaiw3V1NTm2T9IXi4X1P+/lt8ej\n",
+       "iY3LEit3as8JF1xCvJkO5KIP9/Zpadsy1HyaCNXRldNIx3K4j2NE4wli3yUhUkek/SsSxOJsRXAS\n",
+       "quEWqYv1wpC2w0H2cXbG9QexqqdyA9r/GTnU72n9LUbL9LpqFiJjRawu5c5ZX18tKSnhDz5zzq5b\n",
+       "x5BzzrVItVekbaKaWjWE7+uckK5ckIaEhK0HOKCvqK5gV+8ena9zzvW4J+29oVnqij68/rkd98ci\n",
+       "PPdHue5lave1PZd271/b/LdGu78jlHCFPnMglOzsTMbObk/O+ppkQ6v0dE67/9hWZK6nRwkAbpI5\n",
+       "s8jJFR6MwETJI4kKhB+lqeMEeJ7G8UuM/+/vbVx/u3/tnHPu1Fl1hGi2FSDBMJ4P3WDos1ohxGx1\n",
+       "kOh4IoYBSFSpVig0KaE4hPOUgOOBPvG1jqOKyAkRUQG+s+fJiP428m9Rd28AWupSQv/QxzIap7P/\n",
+       "eUK1a7WuHKG5E+5jRkzMvgYim1p7am7J2JMA3WEcKUpHsOqE5KV+JIajUZRw3jSPf3ZM10SdBde4\n",
+       "TaQtCEx1OWrmjRNYhZ7tD+TfKOKEAbXpsWvQ5+SSqiJojkVT0buA17qvtrcsC2LzECFChAgRIkSI\n",
+       "f5AIL1IhQoQIESJEiBBPjM9H7bnONY35PnWpwG1jb/BkCnFYSxSYioHjhL01BArMiQJbTELftCRA\n",
+       "HyHyvduJEFb9KpxzLgF8X5CLbAxh+0D03DgqBWLXMhcIJm8ddZRuCcaMMwha8bWUiux6wJ4J+WP0\n",
+       "8Ptg19UWQsGeKDsPeLwjH4+hg3id/GHU0VZdrx/5eWhBZWpXpXamR9Te9NG56/5aLoaLc6+JAokA\n",
+       "h3vCdgdAsCOg9dET7eC08LO1a6kC/E84MRfkCt6Bvi2JKiomoQCH2M6phNi8aaUvsrC5R/uoJ4xz\n",
+       "zp1qFYUTPQD/qGgiZ1+0z+7wYd42dkI9JSmJLdHH4km9UKxjbdfSh2MWRwJifyTiRzuV5I8Ue6UR\n",
+       "yW9mIWPrdDSqTCk9Fe+yiFfpUabbtF8PRHds4em133OBYDkXvk/696fE5nzcP/x+RX42KmhnHykt\n",
+       "8sp7UKH8pwo0O6IRlitpExVbFzSwU6hdmbJZreX7Dc0rutu+Yx8t9P8Ho5Y0UWS9vbDv3chcFJHb\n",
+       "dLd7L8dSEfutCdHjG1C2P3g5bzu+Ebpre/Vi3vbw+hs5T/KgG+DAXtVUgSFTXzDyQFNvr9lcjwq6\n",
+       "wjOK3eGX8Eo6kdxAfX48+ShFmjREZoETkiGYKoqRDKI04rOV+WhdI6Hj3Z314aYXWqqh+W+BRI3j\n",
+       "YPKREZxVQi7emgwQkXg/g/A8AgUY0ZzcAXuIqRh5rFRUY/ud6zMTVazFiidyQB/xbOnJb8vhukfQ\n",
+       "V1yMu9BKCER7deptl7I4HIk9lJSlonjKU3JjBwlKz+MeruhslYWPE52KqK09ntNVbW2tw6M6kSwF\n",
+       "gnr2hRtnHzmq3rBa4Xv0LICQfLXEvSEJxBG+WzyHRJhHo97uU4R7Nw3W19V7bKTxp/eEn+dTwrTh\n",
+       "xxEQqRAhQoQIESJEiCfGZ0OkpmFybWSrpWYQQWXe2upjXq1HJiI9nLBaKwy5mvDGHlFdpyKVFXFL\n",
+       "K7K+fezs3VIa+kKdYCcWQstqbexM2BxNuiK39/oZkSLH2BSrrqHld1W8McdaG4/S4NWVl5248bau\n",
+       "Kxk5J/n7UV0lqPL4nCYsJwZaaeghigwOu5RCOgKl6agOXgyB30Arg67X1RqtKnoVoJuIu2nEWZhT\n",
+       "/bWuGQGBrgXadAQiEFMadAShIJtjnyAizckVfxbF0srx7g7i+cT2lzogQpOtZhOIQU+t9BfP7rij\n",
+       "toWtYGqsyAdSUWalXFDLTvle24kEm0h/zjNCmBRNRSJAQuhPB5fxaTKkyQE5aHpe1UKUTkJ9r+nP\n",
+       "JPZu4J4+EcKpyJEhOLQy/oPvOGfITfzIWR92HoSSliVSjQnNUIH4o1qL+M0Kq1C2P9Dj3nirK6eO\n",
+       "8vp9PndGE/X8FksSwOOeFIRcj738ZgP7B7aQUCA2pWs9AM1LEk6/fvyv7Bh2Dh3NP/dvnHPOnRaG\n",
+       "SJ3j3O+9jZ09UKwV7DcSQtWGUdCX6GT3td6JeH1BVSFqpP9fPbNjvX/9FtdobZxnisgwSgA0G/eO\n",
+       "74kiR0Nn51THco1Jbn19itV+hFolU4SP7Bd0TqY+EWHuUkuEs7W1/8+Q0JNEZCsySA3NujaGowVK\n",
+       "Gw02/jMItcfexn+GOp5JwnYethfnTBAux5Vr4LE+jHDbJ6RrUBR1JJuASQXTdD8HxofwEyCgZvtD\n",
+       "KDnGPddkdZhjGdVLUOtzJJx2RKp/ElOty1H6ST+yeBtWA3TuWaG2C/7Rv86Z7UjdGPp6OqBSQM1J\n",
+       "CVo9g6wusJuusXNK1jK2Pbnyt0BTtQ1zSmxQe5qWkkjUidwT+qVAYE/zudqdjFR/Ua1teur3LfWZ\n",
+       "T0VApEKECBEiRIgQIZ4Y4UUqRIgQIUKECBHiifHZqL1oStxITqQVHIOLyCC2BNj67NLsnEsBCx4b\n",
+       "85bKIMb1JBhLAAuvqOCxUhuzFQoJsRPAmOyiPqqIz5GhBCi1iWDUVl3UqfBipi7WBI/2cPFWgSVT\n",
+       "kWpAzaJHN+i5sD+JnMswshAPkHFPIu5BKUiCZ3XfEBguC6NHRgilKxbnKmUUEfyMP8uU+DZQiyMZ\n",
+       "f2TwGTrtSewPF+Fo5EK6cs4NLjEnx2R1Ox9JbP3ug4ho1wsT23ZIQOhJRFg36uNkcLNSGh35gqmp\n",
+       "yQQvHKZCFRUfHtFo+Ju8cNQ9PopYgKn0CLlNq/N0RwU/0U4pKDPPxYiRqUAa8tm9mKmYCH29dsTt\n",
+       "oTDrRN+L4QDMVJ1SZVrwd722PqG0HBe0Ved/LsaqBVJZRJ7ChGXJAnhQZEzjqNhc/aSKwr6v7uVM\n",
+       "Iy61vYieUzH8OLDYV9p6Q9eT6CAjClJ/q0JVdtgevdJYJMSGKLhvmdqV6z4eiNpFn0moTfaD9LvD\n",
+       "b/6PeZt/9hWu1fr41gstXYOW3VAVAdUzrz5YEoPSkt3JxP6brVDfd/fmD/Ty65/JtncmXu9xb9kp\n",
+       "Xp2ltZniRyUDkDDCczL6QkJeO/FM31ACTkPtqNt6Fa9Tv4eLtUf7TyRE1oLHJfXrDeYx9pZ78yDt\n",
+       "M3JCEejzvCQaCXN2TxUNOsyBOhdnREWpO/04sgQD0gZKVJpQtHeg+X/Qou2k1FcajZNsZtE42r2n\n",
+       "fr2Ay/uxtuefFv71NCd1uE8ZeRs6PAvYbynLpE2mk427CvKJgtzeF+ifC/jdJXRPdMrqaV6b0BbD\n",
+       "xNQeKEi61iIX6nlB59kgaSEvjJYuMZ/sj5Kc4emZpDTfyNIGr8W4yQMO1UMmenZOqT7jOFUFtDA9\n",
+       "Tz09Az4VAZEKESJEiBAhQoR4Ynw2RCqOIxcYpJVtAAAgAElEQVQ5cqLFS2JHKZx9Km/1Kb1pqwNx\n",
+       "dbJUSx/LSmyiNPUFVt+eVphFj+NhZdpRym+sIm968ZzTdUlYWtfqNs5IE5AWEht6L6vPslzT9+DA\n",
+       "fSfXVcfs2Axh6cCICATLsb2tqzi3obf/VN+q6e1bNbN9SysyoGQjBOOeUnhTOMYeBxNsejg783XV\n",
+       "Xto6p3RQraE00spV60pxqmunteho9af17xTp4NWarla9NwRDV2JVbavvCKuJurX27HpxQ357+2be\n",
+       "toTz+Imc8rMCAsxO25rgH5wMC+ZVu80r0g3EsIM3RGICchHT/pCT4HJCHVV4Wqmth7d7kkGIzKnG\n",
+       "usI8VYT0YaUX0Tixc7b7rwL4aeJ+om2sSBd9BgSPESn9XFEQ56yu3mZjgmn9PKPxpwJQrutWFHDF\n",
+       "BjLELura17dbS0BRJIoRFP0tIyd1rX2MBbiP0S/5W46vqDKnmsdY/Q+NTQo5LBEqQlqrWu573Fs/\n",
+       "vb+WygprEsWfnX3hnHPuWNn33l4LcvInP/5y3nYAYppAvHtLCFLxDG1M1/rwQY51cWkr+LGW+WS5\n",
+       "sLE2YHIryBV+Rgfpvqt1Q6djkp3IgRbFj1ANuRc1JfY4uHcnZA/tMSfHZDGRlTI+IkK4FLlqMZ7v\n",
+       "BusvByCBntBfRanOCP08tNL/DjROEngGxNTH1DInJQF4W4l4v0XbeJrr+knr6tl19bjWuiU2BddY\n",
+       "k9VEW2tNOhqT+LvlMaYO+Uh2aQkNaTHvnEhYXSBh4DH4LclbMbEeamcRE36yiGROHJ3Np1oNI6Ua\n",
+       "q8oEKUiV55/oL/Ss0XHUkYdCguO3hAht1GqCIJ2+kf/JCaWKYVPRtsJIcK3BHNlIbUO1cyGsPxwa\n",
+       "+p78Wyy5Tq4ijARnuY+ZMEaxPhUBkQoRIkSIECFChHhihBepECFChAgRIkSIJ8Zno/a89y4i744B\n",
+       "BQ+rwSDGfBR4uhtT/qFzzrmYKItTL79ZJAbtnho4O5Mv0wixXRqvsA8WwgHGrlkwB2qHoe1YCymT\n",
+       "rwSEjx0VvNUCtTHDgxCPn18I7Hx7b/uoG/jTxESFJCrsM4izhGdR9cixWNouInhyHNQfJvtom0cx\n",
+       "5mkiGgki7yQ2KrLqBOLOiB7p8LdRJ85tCjluVVHR0l6/R/cYSnUVZ8tJ4H7O4mWmLOX80pgKZKJA\n",
+       "9fFkhUyLXKgfdgeuGhHZnpp3tm0SGqUbjRYexhWOIfBwEn88JLwnfzBQASysVBfnR4LFVvqkJxfj\n",
+       "GALMiHx08l767MwoEWWh1Bqf0sNeruGRZxqg7YG2ObRnRYh0UUDs6li8Ksc/wpWYPYaWKALMVNjx\n",
+       "KNd1e2siZvWMUj8n/puLFitVy7Sc/tYE8B/TcyyO1++zsF1pybK07ymNqKJz55yLIMC9u7O+8/y5\n",
+       "0G0qaFc3feeci3WuSVmIDQH6wDS+XNft69/N25T6ePvtr+dtV1/8COdux3j51TPnnHP390Y3X17I\n",
+       "tm/e/l5+RwWiT3t4FpGIebmW/v9wY8WQGwjUyUTcnR4wnoluy+EPFD2im0ABq2fWxHIDUCFEt+3g\n",
+       "aF8sbD7JU5wz7TfJ1EeIKBh07oETfzDfqtb32dpo5DiVPnFfWXudGpn/Puzsvja99GefcwUIOe6q\n",
+       "OLfrcZpkYA212sh92j2IZ+HtNUkwcrl3xZIKOSNB5Uie4eOo86l9rwa1Og3Wd7UwMTF1c6WIwctx\n",
+       "mW4/VJAP0DOxBqXsiZZWoX5FSVnpTJWR2/cASQkXIdb5iSpaaMWNCRQoe9Z16nY/Wr/SwvTef3xO\n",
+       "3NdOuJ6ifDZvy/XzntrJ6bPgEvviAu1yT1JKdmgrUIudzYke895APlI5+kdCc9ycIDLG9D0bg5+K\n",
+       "gEiFCBEiRIgQIUI8MT6f/YFzzsW2MkvxVtkP9qZZ1bIizmJO11afAEJaFP2gFUEKG4XEsYvr47pe\n",
+       "MR0/GtSuwPZR4VTSFaf6ApGiumpdp6JMsh9AqmtcEHKhbs9OVljr8ov5s/d3IopuOl5BPJdriAyl\n",
+       "G1K0SWpv1YcTxK60cBydigNJPFzJCifdyoqMndh7rMwY1dvtdzhfW5HFEFseJkN1nIryyYG7PUGA\n",
+       "2NGKJIMont7fta6VOtyyJUai+6OV1kKdvwk5cah151hE3cvxm8HsD9qDtCMjPInXFTlQleRjIfij\n",
+       "wmKwFYgpJTpC29GixjUQlE+MHEK8OZDFxziv3IB0cQ1FrJy4htlqI4LipCB3dKzmq5Mda3884vtX\n",
+       "87YU6eHqxO+cczcPWLkjKaDI7R6qyPsDpdorwrpYrGib7G+zNrFzDURkpHunDtFZZivizUb6ojqK\n",
+       "lyXZDyhKsbLVoDq1M0qmx2f0S4XnLGzW4mEJib0HrL5v3r/HuVFduRo1HAlpOe2kD5UZ2Z8gUSAr\n",
+       "bZl+/UbQkTU5QB+uxYG7vLCaeAe9T5Rq/nCS8yvUQuXycv4suhYk8Hi0eTLLZTUfE9I0oS/c3dn3\n",
+       "FNmrCBEtcC9KajtNBlBUbyC3d62NFpeUVo8mjnhdrmh6QvMv7klE1QZm13SyLpnnZ9w7AnXdCv3+\n",
+       "bGXI+fpujc9s7rzeS9IMC8U95k5HKNFqKeL9ZWb9uchknLw8E7uI6gub/373/c+dc4/F2TOCl1q/\n",
+       "ejhKf4qpKGaqtVvZKR4Iu6M2Hmpp4/xMvt9TEoOOJ0b1Na0/peeZx3Onb4kl8Vq7j5z9Md9OhHDN\n",
+       "iQQxnRN+O+K3NUHdKjKvCVZTVCclm4qZ4SH0sW3grF5S9RAn80hDFg/tqFUZ9BnKbv9y3CSlyh5I\n",
+       "PGvJwqBXJLCx/qfC94SE9VGhc7KNiSSy8/tUBEQqRIgQIUKECBHiifHZEKk8Kea3TOecm9RMkzQN\n",
+       "DeoZVb2tqmZpREwaBaAkTWv765CKzvXsIiBSKxh99cTBRkgXZ6M95c+rPR1rJW+pZW6rxAELFkbO\n",
+       "lCPvqZ7dYi2fp15WUHlsXP3ZVlap37/5BR0e10N5rRGMyLKMVhUHGCJ6Wn3BCiEnfYd3sF1AvcLM\n",
+       "26pO9VicBq2oy6P6e7FWC7cV5O4oq+Q0tm0VahNNxDMPSGuNS1ulRYlcT+wF/VhSDT3X6eqLdEOw\n",
+       "UJi8oS9qphmT1UKBWow7qnUYaZ+J2MwU5xnJvRnJQkPLxKWFHb9GzaeypLZWqwOCrjxQyo70ECl0\n",
+       "aC2tMCMHRAhWEyWhCoutIJKb2GwFTkB6VrQiVwPBjvru+YX0zzgx5CBBWvXD3lbTqivS2nGbjVkN\n",
+       "HI+EOup1oVHOz88/+oz1ZaqRYo1eDPPDPCczUays2XRTQxGxN2/2tBVaNkKa1CahpfGv55LQfmOg\n",
+       "jVfPTI/hsZ8SNfmayuYaD33laW+oZgadxZt339t5vpCxu78hk0y1GCCNyIRVb0soweVzucfvX9v+\n",
+       "FkCJNmoEzNojoNoPDzbW3nwPLdUzm5POzqR/7Mk4URHxlKb9CPf9WFH9OeiFRiAnMc0JM0r4SCOG\n",
+       "zwgRUdQlydgQUzV6hCbjb09rerVC0fNgBEd1c5zCX2DM5Nz/gGw2LWk0gVKf9tavzzbS/uul9YnL\n",
+       "i1fOOWMf7h9MexVDv/X65lfztg7Pp35kVAVaNmY9MqAqLev2Fvgt1TpErb0emtM0Z7sO7dekB0Rb\n",
+       "NB1ZAmEu6Oj5N0DXlpKdjZvblrSRhdZOpfp7QJN1hJH3qGs6GH1STVhFczyzJEC/WIeZ4Rh3+/fz\n",
+       "tjJRJoCskIAc5dAID9PHliz8TqDm2FnBNj0tfmv77WBdMtDcvU2EKVqUXM/3j78qBUQqRIgQIUKE\n",
+       "CBHiiRFepEKECBEiRIgQIZ4Yn8/Z3OUuoVR3FbMNk8HebhCosiLH0igV+K4juFf5Pq7/pLXWusYg\n",
+       "63Um8G2J1NwoNyFkDRpj35vArQEUWZAlwABX8CW5A2cbgTGblty2QSN0BC33oKrWqINVkOjSDQLf\n",
+       "P7/88bzp7iAurhPXdQJknJI7dgrX14isHhKI/GbKzrn5bt/fvHbOOXeR/8n8kQoFi5LSgBdCrRwq\n",
+       "a2utUxdRTbwBdFfLObyAWwcSm7tJa6IRtYfriJxCwWQ1AUHxNFGqK+jJcSL7CdhZRIQjPzv/M+ec\n",
+       "cw8ng4y7XijIkWDsvkNKbCzX1VBbl6AHB0eQOYTiTFk50IcT1ZWKQRWfiMZzo5wzOysvMqHtFEYv\n",
+       "PIt+5d96MMpqvZFrbclqQ8XBnq4/VXqG6E431wS0a1yv5RqVTmDaSYOtLn76U+kzTMUp3VBXdp4X\n",
+       "Fxc4N7vXmv5cUq0zpc9ZKK4xU0wkjtZzZ2pP6aaG6kROqdoUEGUBupGz+dNEqRLpawOJkxskcUyd\n",
+       "OdbvjjI/jK31id01zm/g/o++SyJyn6gol2xPIDbnlPDFSmibBhYTOc0hWpO0JEuIBFUEWBR+AmVR\n",
+       "kASgQL87EY2Xwkm/o7HboRZar21Hcgttd+5rWuOQHfPVHqKnc1LbDU8UjDpxeEqy+EOqdiQax892\n",
+       "BXZdS03iofPUpKGetmUgpu73Np88azXJgZMsZExebOV5saRkh3aQ+788WH+9xfjLqV7iZiV077G2\n",
+       "+SfFGE+JluuVxeL6o0hkUnkG06gZRP48T2R4ni1S228NEfdA86TWCeQKAPoXS0WUSvSUFDOgekWP\n",
+       "e8EVADSxqaDr71CAryBa8gDx+ETzf4/6rAON8VgTFSZKPJnbALYm/Px3Wi+VaXRQwKQY0N9Mj54/\n",
+       "mLu9PbuPlSR+5Zk995Mk2B+ECBEiRIgQIUL8g8RnQ6SWceFaesu7Q220jkwV51Ui1UbLR3lLnLiw\n",
+       "kC6EeKUDsRmVn5tNzFT0u1o9nz9KkfZ/VxP60aJeES9hOxhtOl4RweCTjOY61EmaKJ3/BoZ5m4WI\n",
+       "2aLYUm4VVZtIsJfAVqBPyawO+0sy22+MN/GSVxW4tSkBQmpIGGPF83BrAtfzCxEPj7mtjM5Xmhps\n",
+       "+73ef4/TtRXRAJsAruodebRJyu/qmkJrW9pGTnBZyvfYmE8FqJry6pxzmRpNHk0AvFk+x36tO1+s\n",
+       "5Hrai39v3vard/+T7HdiATxQGpy7JyGmApwpWUKMlSARbWXHTyEYHXoyGgT60lGdPi3k2JFSc1cJ\n",
+       "6rByso+O6nWtIJQcqMFUqH5/ZyjJOYTFI1cr13MjO4cjkKVXr17Z/kZdzcn3GNVR+4vnzy1d3wH1\n",
+       "ZEM+RSdevfpq3qbC1uETNarKglb4WqdrVESUqsAjyWG5tO8rYsm1sXogAquljSetk8Y2CYrc8W8T\n",
+       "IKIJjn9zZ4Lxdn/tnHMup1Tz9iD3f6D6Z1onLiOksQXSwfUPFcQ9OzNReHWSe1IQwjYAgSqvpA+f\n",
+       "yMLAAznU+pbOGfrpKIVbDQY9Hb9F//N08xT9OlDtSg+YRFHPhpJNFJFarm21nkMcnxAikaGGYk0J\n",
+       "ABmSYXJCTtTgtG0I9UXnVZuMlOwyGswJe0KOs8WAYxJqcCPHOpBxpxqRpmS++eHDL51zzr26+Np+\n",
+       "i+fE5bns78XWkj3aVsbru/1v520R8l44AUdRlSyxdho79ElChNSKJCNRetvATgHn2XlDlXIgiPFg\n",
+       "ba39KotpXKE+aUMG16mHmS2ZVMZ4xjAinMGkkgXd0wk2CRBs91QTNkKiDgvgPVD/jGxiPI5xR/Vc\n",
+       "R7ANBfUJjwQlKnE5z3vdAUgXPTqVnWHz3arV2qV2rWq705DYvVyCCSE2J4bYfQ9GyDnn0i3ZfXwi\n",
+       "AiIVIkSIECFChAjxxAgvUiFChAgRIkSIEE+Mz1hrb3SkDXPjqJC94XntJPRFmhg8PELYnJI/jjpL\n",
+       "R+Qsbp5BBrcWpXyeZQIjbgmyVcFsfkfuwIDs29E8i1a5eIV05E+k3hYl+U4MJzl+TNTKAGrl92+/\n",
+       "dc4598WVwaMFsMqIIN6uU2E9u/6CgiIvpAU8U3KiShP15+nstzWE2kqLHsgfZw16iL2oVIhaLK2t\n",
+       "FWI9DVZrbaYPPEHb8Cxy5KM19Oqsa21cV6Ab4A69LK1LenjgsD+UwrfDZJD1rhbx/OXmp/O2AnWX\n",
+       "LrdGY13v/tQ559z7/V/beUaPKa1FQWsLQNspUYsFPJ1aEtHG+HyivhvB0Tqi+1/VgMfJbX2De6b1\n",
+       "xc5X5g7eAk7PyR1/B2ftojAo/N17qSd4fm79uULtvKm1MfHsxZe4VuvPKWhpFV0eDkYjKI3DItoE\n",
+       "YmwWoCv1xsJu/S2L0nlsa+hxleJLs+Gj7+SUFKICdPZ702PsHozGUUF7T9RSg/Nrib7U3AYdE2sS\n",
+       "wr95K9fYU20uBwpwJMdyHX+nnsTGW2nXU2WU1QThfX2w+6/0fUqygHk+Q3utlnavb3ciD2jJxV6p\n",
+       "3QP5gxWg1lqiZ85Wck5Na9v2B5ljM5pPlV5X3y8W8ep97SmJIEZiS04u1h7SB3aqn/2QEkqeAUU5\n",
+       "kdv4TPdBYNy2LBmQ77XkI/cA8X49MQUOCoru3R5U9Zrq5D0goeDtrc0JF/AZ2+0l2eZyY2PyFeol\n",
+       "vrk3WcQHzOc9+SJqVYRHTvmgDB9JRYBlJJRsEClFqdc40hx+lP1uiMYce60rae1fgm7riFpvIOye\n",
+       "qLKB+m1xAYAY1RscSSBUetKjrXtKgMigH+F6dF6TLEiqolUsYpKFHDpIRRIWisNbkGQW6gep35tI\n",
+       "szN3cWpXlQA0jzwQ0RfZ2w2eUp7uk9YAjkcbT/f7v3V/LAIiFSJEiBAhQoQI8cT4bIiUc/1ct8k5\n",
+       "q8NFjghugPB8pDf9NEP6ubM3bY+370c10Zw667LaWt4wM9RV4lT/CMhAuTRUqTqKAH4isd/gZAXT\n",
+       "teYA3dRyrHzBLsqywvO0Ste09/f3gqB0ZPWwQoXz3FNaM+wR+s7EpgOqmke00j9HqrlrGaVDSvKj\n",
+       "quoQEaJtWko5PaAy/MurH83bRqzwFrkp+zRNtmsp1RXfi3gBo3YW5KysYvCBROFznTisvnNSx8e6\n",
+       "SiPBYAJlZ9PbivT+IALh5xc/o2tVEaW15+Xqz51zzu1rExRXSGQYNTmA2msWW07sDr/Bv4YqzJ/7\n",
+       "j5MdltTH1kB/BtOJz7UgFekcyEIghZ1CTJYQmq7P6epffiWWGR+uDSXMkTSwvbDV9AkWH1FGtaaO\n",
+       "SLWOPk4hTtBfl4SIqF3Fbkdu30D/Nhtzdu4hxuY0+fVaxsye0s/X6PeKZrEQXIXivKrU/ndo6o++\n",
+       "x8P/AejU5TkhfLMA3a7//l76vW/kpqj7u3POba/kt/sbS2FXQTlbGJxquWfnz6x2ZgNhe8Ju/7Og\n",
+       "mqxDUFu0L2xMrEsRmV+/l8+WW3ORXyBNP3NkiVCh1ibNCYoqLZd2T04Huf6InOU9EIOps3bPgdhr\n",
+       "V9iSYF+tS8qFoQ96TiPbXatAn1zRByAGjDDrPFYuCaWax538E9McrnPNlqxj1qg8sUhfz9u2QElb\n",
+       "QmkaFSxzQhNE4b/+xipKbFFRYL2BsJsE+x6C/u3KEjCWC2nju5vv5m1xCrE1oe8F2nUgOwO1CtdE\n",
+       "COfMnmAC+paxJcdezr3PqIZni8/JamMd6ZxNiUJ4jk2UlDBgHllR8oA+Fvi508OCZU58oXGaw4qh\n",
+       "Ohn6ul5KQoX3LNKWHV9s7BmzwNx5qAzh6wediwj11MQfjP/FkpNIgD5SAogiVj2xOSo2VxsO59wM\n",
+       "JRWE0nkwKwm3e8vVFT6OgEiFCBEiRIgQIUI8McKLVIgQIUKECBEixBPjs1F71Ti5iURnmVN42CDO\n",
+       "HqI3hlb7DsUTiZYa+o89Y/TPnqi1/QG+UF+C4qJ9LDKhHdYk9r3vBKrNSID9UIu3zJZE0ff3AhVe\n",
+       "RFd2PaAAmonEphAbKnJ9/fDr+bNoElF0lxiNZr5PdizVzp6fWZHNJBUYtTsSjdbI34fejl9hf0r7\n",
+       "LLitsePjiQS7cN3mpIActGg7GLTsIGL03ui+AdQDU7BxArqDPGD6PsMxVAhMlM1CYHFPUOx8LkQB\n",
+       "13Cj35FnzLoQmiVjv61IrvvF9h/P23739l/JeYCy7XsSIrcC2Y8xiUhBiw49CSvhmO2ouHKEv3Py\n",
+       "RUomFI2NjW4Za2mLFM7Gm40Jxh0g7o7Mxj1oue3a9tHgnAdy7E1T+fxwMgqqWGHbziD75Vb6+y1c\n",
+       "tNlj69WX4gvFLuL390J3Z+TFo4Lx49E4SxV7K03knHMlBPJv781HpkSFAIXbDwfrV2dn0hYPD0Yj\n",
+       "qrC7JyrkBJqBz1OTPFhYH4FaaNob218hxz+ijd+/M9p3rtk62f0f4MGzurDx10D4XZHfnQNFP7GP\n",
+       "DuaCBbmS7x+EMhgru56HVtr46koopsPJOkADevbZmUkLTqD2TkQ3qmxie2Zt0mmVgUfecvo/Nsa0\n",
+       "WHWHOaEiwXxZfuxs7kDLlzR3asWAnHzk5mORA/yIk4liGyceFM087REVp4W5PXkxrUApr87MFzC5\n",
+       "EX+okc5zgM8TF0F3mNsnb/f4F7/5N8455y4u0f9pH+el9MkNjeGilL9retY8BwUVl+TtpN5fPc1/\n",
+       "mgzCam/4KMUqT/F2r6tS7vXt3mj8dSHz/5JF7KDbtgX1v17+PpAoP8o+duxepHKNPRk56bw4Vuoj\n",
+       "Z/erw/joKAFA+9hm8/W8SannlKQFmf+BfEbJDsdK5ocDFRBXXz6d97kChlY+6Fp21sdnAyUxYEyk\n",
+       "Obuyqy+WXatKGnp68KXJH8ecAiIVIkSIECFChAjxxPhsiNSpHh/ZBRRwj26drWq1vs5IK0IVHrcN\n",
+       "r3TwPUoTTSFQ9FRXTe0Ujr2gSlltK0OUC3LLJaVaV0BLaFXbA/06kLN2hPfRQ2Vv2ioyVgQNJyj7\n",
+       "w7+exMldK8cgvbZrRhXg2pvxxeYnzjnnnl+9tC9CvP1AKuYjVsc91zACwjOiXtSG0upVCH5/oNpQ\n",
+       "l7JaOdR2rT1E2eyYrdYVKcFEFRCDiZa/iiwmib39q8h4mjRd1dq6LOHOTIJJFSN7xyid/ObUGCJQ\n",
+       "tZpYQMgJFIsp1WS8PPuhc865d3d/JfuorA0XTq6fmtDp2iMjF19NP04zW6VGENmmlFab6IqRHHhH\n",
+       "9IEJyF1dUx22Wq4rm6hPqtUCpfC+/l6Ems+vbEWuSE9Hx6ohLGdEZER6tAqWf/RDs5BYr2Ql3JGF\n",
+       "RgPH4O3GBNB2D+2cVPi5pOSNvmeX9/kM8FmHY1G6NsThObntK3IW0Yq0quS6rq4MEdb7mHkWYMO6\n",
+       "gNDUE9A5de+PyMV8PgahH2qXkRH63TjZ3/HINimo60e2Bg4JECeyLlhBFF1Rv5swjmq09UhOzGqn\n",
+       "caB6eXqvc0oKULF9S9/bvBSEZff2zbxNEdY0I+QK4uUBSBi7s+tfbH+h6GBLIuolENOUE0tiHc8k\n",
+       "NldHbbbCQVKMhzVKS22jY5fnGnVi57k+UeTsRELxWtqupjkmUwdyEmrfPwgi+JtvJeX9cmvC8qqW\n",
+       "c1qTJczPXv4j55xzb+++te81gqq8WBnCvMQ573d2PUMjc2tH6M9s7QMrHK41mKutB6XmVxBbn+U0\n",
+       "nztFBO08tws40DdkKwDLAq6AMWE8ayKMc84NqDwyo2qDtZeK+NlWoHd4/qSGPm9XMj6zyc4pBQNz\n",
+       "URrqpnVX++6X87Yax1AktK65YgISm2icaE/1NJ7VTmeifqLP4IHsFDKgecyEsD3FpyIgUiFChAgR\n",
+       "IkSIEE+M8CIVIkSIECFChAjxxPhs1N5D/didWWmPiNxxYxBd9yejm1T315E7bwpBY1ORAB0fx+Qj\n",
+       "0oIW+gaFKsvCfF9awOiTZ8dWiGjJCbkHLXDaG7S4hc/L/cOOfisCwJhcsU9H8TmJZ82n0R6T+9gJ\n",
+       "uunU2dug0J9+LV5IZUneTqCZDsef27Eg6KzJFVjtW9KVnJPvCOIu5bg9FVk+gdKrGoM1k1kcSv5Y\n",
+       "KorkgsvqVeUstEBsQ6LEHBD8CNE5WXe4fhQIPJpy2qb0oG1zcNkdSdj+UIlo+MXyJ/M2pWoigmnP\n",
+       "FkKR1vAROlYmRN6PAk+XJI5X190iNbGxh2B1HIxGLCKhA7LUWkALWPedbdNunBVCAdQPRsVocVX1\n",
+       "aZFjybm8fWOeNREg6/3B4P43b6WvrYmCe/5c/KbqgY6Ptnv5SlzPLy6soG4Ouuthx/5U0u5MmSq1\n",
+       "xh5QRaEePB9D4jvykXr5SmkT+L6QF4zSR3ys+3s5F6Zn9VhMCyrLMD7yllEFqp3T/kFo/kIpSPKs\n",
+       "c4mMk4zokQwTUEPUgu42y63z3t0IzXPx7AfzthiJBNWtCdojUJpcZaFuVAAr/y7XNtbvboXiYMfy\n",
+       "S9CDNzd3dp4q6KX55P6tOOCnRGN5FepSRoPexxE+PhkVLR9wTsx2pxDsc9FodZHuiW5JcB/ZF7Db\n",
+       "oVIFFaH3JNB3zrmcvL1ubmV8HsnbzWHuXhU2125XMj63J+u79U7bnatHYMxMdu6atPSb3/1r55xz\n",
+       "P3plc8jy+b8rX6fDv7j62jnn3L/z5T+dt/3d638p+4pMgH95Lue0Lo3aez9JP9lH1/O2w0nOc4lC\n",
+       "vhkJ0VUy4cj36IgEjZbm7kUsbZGQBCMFtffIVx1UYeRZ0K9UGX2zj/E92QdTux6Tdk6SgRqeacfO\n",
+       "rmvAPrLCJBBlJvPTMicJBMT2XW1JDuP0Pc4dyWZUIHrAfNp2nOyDRCXP0pIEnzFVL/vjhCo3Ki1o\n",
+       "N3n0j/QdH0VApEKECBEiRIgQIZ4Ynw2R6qvBdQt7481TFfZyqjtSLkd7M5ywghpHe9Pu2wTfp3RN\n",
+       "OLb62PbXIsXx9lZW68dnhj7oyoyFpSNWi+q06pxzLd7Se3qDP9Wywt4QwtbW8r0FbVvmstq/P/7e\n",
+       "OedcSSmfDtYI/UB13QpNdTdUQUVvF1tb6SyQknv3YCudb19/I/tjoT5E0V6tCWgFN+tECWk6QbCe\n",
+       "RHYNHWpXxZ21iTrvcq0pdQBWh3HnTFgb13bf6xau6CrOJtWjCgEnFmcD1fJ0/3OsznpOdccC81Sb\n",
+       "AH2xkuvYv6cVCYS0l6uv5Xyp/WvYX3CttxTt3xJypSn5iSM0J5JVV5YSwoe0+8EZ6pTDpqDqta6a\n",
+       "oZQZxLlpbELMHsLvlByjPdqkpL6mScmRNkoAACAASURBVBFnhHRUlay+jy2Jx3Efr56JUJ3T2gek\n",
+       "tUd0rEKrAhTWd3V16knYre7hvE0dq0caT5pIoUJ1Rp9MbE71L/E9tjrQFWZFVg8ex+qpdp+22Z4E\n",
+       "2Oo2f2whrKa1Zd/ie5RWXqHGXEaJGk4rFJAAN4Htw/2NOTavzy/xGSG36H8THUPd61XsyqjCEnYa\n",
+       "VUX11ypUIFjYPRkH2e+eaidu0Lc6sh+YgBgNhOapuFstKSqyZlD7g4RE9NpnJpq7O6DdGQnVG1h9\n",
+       "xJzqj/YcqI9NsSa0wEKA5skSVgPvrrk6AVAySmbIUSngbGVj5+5uj+sj1gF1JB+5YqOPTUgi+Otf\n",
+       "/O/zZ+drQVBXnhgBpMt/9cIcu3vYKVRHe07EiZzT1XNLCsm93JPv3vxfdnzMzw3qyo5k1xCjxl5m\n",
+       "t9W5XD6/bQ19W+F56ghV6jqtQMEiarm3LNNW5PyRxQVsURLcpySzzy420iZJTlVBgPDfnew+vQNK\n",
+       "W5aE0i0ECR9IAK/O7svM5q79UedOMEeOr0vtjNjCQF3Mra8pm+Cp2sZc4pH6bov9LKh24N/3ohQQ\n",
+       "qRAhQoQIESJEiCdGeJEKESJEiBAhQoR4Ynw2aq9tJldT8dbFmcCjw0QC9AhQXW/Q/qCnTMKywX3s\n",
+       "T9MCqu5JAKtFdcdeDvzuzpzFVwulFhnGF1qoG0mIB7qPofi6FVHcy2dGAY2dnGc1sQBXoEoPIXRE\n",
+       "8HieCSzqyWMjz9TjhNsE7tiJwagqfE3IgX2lHl0kLO4bFapmH12XFp4dBhZiCozak9Buwm/K1K5f\n",
+       "oeJuNLw5yURYGU3s9yHv7ewe7+Ga3EPsGlOyQd+pOzq5GMOJNy9I7IxLrBsuJCz/sGePOlt3LXm2\n",
+       "LGU/BWDc5MLg+ZtbOGHXVLR2o6JLO9QRlFlRkAvYiPMkUXSP2zOSsNSDxlDRq1JHzjk3RPDxooSB\n",
+       "Bv5lLbk9fwXvp4YomCUokLa1+//710IpPyNqYYF+rzQKu6NrIzK1ppSa+jk559xqJTTH48oCoHsf\n",
+       "ib2xj5QKyTZavSD+aB/aF3sSQi9BDx92JqxXOjAlymhEm+3IFX0Jv6WYlKUPEOifbWS/dUPzCpzC\n",
+       "B0oOULqdhe0ZkhzY72kFT6fD3rY93Il/04vn1sfu70QWUC6tQ5UoIFsheYVpPI0LKsY8U6Cl9T+l\n",
+       "Jc7OTBagrvSHk028BcZbRPRhhvud4j51PYmdUXmgJAG4+rMNNCfrfRyon6pqIo2NFktWcp4T+YLp\n",
+       "cFfK2vUkol7Iby+3lFjyXsbnWNuxUmTWJIUdawNn/9NAReiVKqakCPX50wLW33zzV/NnV/Cd+7M/\n",
+       "+ffnbX0m39+ujAr6YhLK6riw+adDofU0oULmuNdntSWvjFo0t8H8Q8LqGgJsrnYR+48TNe4bjA+a\n",
+       "k+JW28SOfzrJ9+Kc59MT9mdtPCfoIFPq/Oyr+bPNGmJ88kXU59P9kdz2MZ7fvrNEmZcX4i3V0bMr\n",
+       "UXkHib2LQvpxD6/EE1XscHjG8LNDs9JYljBhGyfFqLdczn5/SJSI6blD+RmfjIBIhQgRIkSIECFC\n",
+       "PDE+X629eud23lCVNRxgM3oz7rGqYHfk61t5q48GEmp7TRcm5ATIztDbikAd0kugEHd7c/j1EVYE\n",
+       "iX2/HeRtXV3HnXNuGHQFY+iTSvXu94Y0PFtrOjVBF50cN4MQMKIk4gXeuBcLW2nWqLlVNbbS2sGJ\n",
+       "ebuxc3q4l22nR26v8ta9opqALVZ4BWot9XT7kxQu7pSGvNvL23/TUL1ArOA6QuQ83vAbetNPBjmn\n",
+       "wtmKUN179Y3fOediiAI1NXUcGZGAwzG5I6c455zcvhP0mQMhR2MvK6GI6gk62DMkCSk1UR8wBjrI\n",
+       "yQ7brfSJHdkKjINcd56SsBL11/qBEyUgdhwIzZlE+Jon1naNl7YdsVwfSAh6AtJ5lpuIPYulfxSF\n",
+       "tVMPDwXPqy8gotfvrU/GmkhA3yuRcKD1r9h+QxGhga7r7k7Q1O3WRLyKel1e2j3R/TByo/srCOHi\n",
+       "Va9zn67hN1JtsARIByNtA5IROE1ff9PR9yasOnta4WtCQwWUpqRz20GU3jOqizpp7LbvCmkLRq52\n",
+       "J7l3nhDZJYTN+72hZNtncj9HWs0P6IMpxiTlWszJNm1rc4KiZHVt/TT2sq2lc1L0o1wZwn39TvpH\n",
+       "T3NMCwSsxH4nShhYlPJbnrs0dzxd8pyM33JeudekIK72AGH3SHYSuH6PcTIQgqzp79woBcZ/cqAk\n",
+       "Esx/ZW6p9lcXMmfuPSdKyHEjQuJrMCFacy0ipOff/u3/6pxzLiNh/49fiiVNFjEjsMHxDaW6gUC+\n",
+       "IZTGA0VNqOadB/qTAAbpSVitJA3ppd0ApMUTqngL5GogP5kN6hnGA83d+LfaW9/RPtPSdXuFZACr\n",
+       "L5eGoJ2dy7O7IzTXL+VefPv+m3nbopR5jMqPurudoOQZ1d9TN/yRksdSJG+MmE+GRwkz0neXC3vW\n",
+       "ePTPgd4JtLauJ/RxTmiiNlbEiuc95/845hQQqRAhQoQIESJEiCdGeJEKESJEiBAhQoR4Ynw2am9o\n",
+       "BjckBic3KNa6Xpt3xAhaKNkY7FsfBYI7HUkIB5+jvjfH5B7QfrEwqkDpAIVsJ/LJUPjex0QFQETK\n",
+       "QsQC1ELMbsfwOdmTA/vFhYjxppboRsCNKWjBjIr3JoDPM3L1jUqBT2/vrRjmm/F3cv0Md4Pm9OwO\n",
+       "PKHgaWLtlKAY5AhRaEKu6yoijEkcPSwH7JfoNvWdcXTvALP7xN7LaxQNjjIqDA2fnZREuWmkjuoQ\n",
+       "Ak5Eu8CxfejYM0yu25fkNg4qICJaal9JscxFZGLbEe0TM2Tdorht/gznYe2l9Mh6ZQ74Dv0qL8jZ\n",
+       "PJb9PVAh6xR+SwmJ8nW0MX1ZLIUC6A/w7iEoerURysjXlJSA/szO3h3U9vnC+s7ba4HMuT2rk+z7\n",
+       "8oq8ouA3paLsc3I2ryG2riqjdo8nULbkI8WCZg0VvnIRaqXoH/nTAFL/FBWowQL0Fq7fS/JAa5FQ\n",
+       "kMYsgFaxKRU3Bt2hfc4553IIqitQcUNNHjMqoqb+r07d6aMqr0hYIB+vSgvukrdainurlRicc24A\n",
+       "tZmQK7pSC+psT6yfO53k+Eei20dUY4hjuydKd3ZEWaVIZEioQPFMC57sexX8mO7uJdlmuzHaSYXS\n",
+       "JfEzKei+5dqK+w6j3JOBaLw8lXs2Uvt3EK9HrDVXUXClfcfO9wHPiWtKNnh9LfPu7mTzfwW5x5Rb\n",
+       "460gBo+Jptnv5Hv1kbya8CwY4EUYJ3b9NSjQv/nbfzlvy7yM06/Yxb6Ua+g6Oj4SJb4nsXUM6UWx\n",
+       "tP6cVzInDCf4s/XW13K0TUsVOCZ4Gk6x9SFN0Dm01k88GrlIqU8oBUsJLS0otRON3bQABQkadbU1\n",
+       "ylSd94/OkhgaSCuY2l2BvitzSl7A/ax7Sh6BB9qn5DPKKHNlgdjr89SovRyUZkuJKrXS/N6oVb0/\n",
+       "fqLniQ44Hqd/vGZxQKRChAgRIkSIECGeGp8NkVrk8VzLyznnqiPqar2kVP9E3gwPrb2tbvAi3FCa\n",
+       "uOop40duu/J5kRoitUAqaI9Vb7aly8fKKadVba11eGgFFSX6BktvxLGuyGx37+9/hf2ZKFfd2zVL\n",
+       "OyEhYAJUS11t5fvypl3RauH2XpySv5t+NW/blJLOPpI/rVoS8Iv0FMEpHiuTmBxzVcTJ6e8xbA8W\n",
+       "BQnWVdhMgvEJLubH2mp9pUDs+sFWiWg6V9W2SlHLAI+VS0/i4GiCJQXZL1QQCpedrX4ioGkTrb4f\n",
+       "DrJKnVIWJauI065HkTOH2n1sP9EDCYszW32t1hDbkmA1hTvw4f7tvO32VhIZTo3ZBKzXuE+EuuQZ\n",
+       "rg3dlLXxKl7Pc+4nKfZv6OcCVhd3VGttVESCjvXqFWrtnWyVWi7ktylSva+v39Fn0v9ubkywXiPF\n",
+       "/OUXlv6sIma2zlCEibfNqAPnX+D81Nagqig5BMhM39o+SiBhY2P9tIIT935vLvYb1JPMKXnl/Tu5\n",
+       "tnNa/WsiyRLfa47UX9HWbKugqfGcQq1zQkP1xxaoO7Yuqf4YxOuMhGdolIRQEu3tarWSUbr6WSnz\n",
+       "ybtr69cHuJdfnNl17Y/SJudnhvCfgHqORxLgYtzxPKFoYxnJvwdq1xRJBhEhommpx7X96uc93ae5\n",
+       "QAW5nUdA4kdCDvS3WgdyoAQQ/WWZW7tq8kJ0sOPrvRhimk9V0LymSglqHTBYv6uwnwnzf09VHLSi\n",
+       "wvv3lqj0b+P/Tc4j+0/mbWUrbZdSWv0ApCel8Xw4yJgtCXy5PHvlnHMuwRjSKgVynnKvi9jOqQM6\n",
+       "WrP9A9p6IjRrAPqUeGs7TS7iuTOBKHuge+Jz2VZosgElzKjFQBTZsQ6K8NLzJAVimsY8nwFhmwzN\n",
+       "7/BAzwiJ1i4wYk6MKIlJ+9BQ03MayQAFueJ3rRxjHG1b2ygibNsi1DZtaYz76Y/7HwREKkSIECFC\n",
+       "hAgR4onx2RCp1TJyzZF40UTedE9UL+vZlbymF95WVYuFfF5W9oY46ls3WQ1kQEy4cramWNYwPxzZ\n",
+       "mBErB+WMnXMuj2T11XR2npo5Hsecpg+NAmmEfAKOnvhYTWfVNHWujO6gORqohtzUYX/E3+4rWR22\n",
+       "vSEyh5O8aRdLO6eLpaxqho5QP9QE1NpdDa1W9I3bR6QfwgqTC18vwelzuqiDhiQhfZlW4s5y0oHB\n",
+       "fsKPtvzqYToXJ7CVIESoxQqLTVJ1BdF7u/4FVtATnbtqNI5kUukdVtopp8nKqmO/B4LVmR7CYWXM\n",
+       "5ncqr1qXVKEe6bqX56al+v7D75xzzh3IS7Gq1LjPjq+Z/cuFGo3aajFCu3akVdg9yAqWLTlKpNVr\n",
+       "jTTZn1xrS/qyM9RnvN8b6rI9EzSpPsoKsuLUbDQ7Z7AvgbRwNrAaRm42htx9/70gp9utwbSqUVyv\n",
+       "bNtsUwBkiu0X1OpjSXqcI1CdBVWa3wNF6sgSoGxhJ0Fp0iukot/dGepWYCwcoAeLSXuiyDUbKGqd\n",
+       "uK63dsqw/M/IpHLo5VrT2PrT+lzMB6uKtJyYO7gkZgp0bNI6fDSHdRBrXL0wPdLdh/c4JzZklT5x\n",
+       "f09WC4DMe0or73DvxvFjNDEFY5DRvKZyUU/oU4LHSEsocYwamxF1lBHzXkz6OjUljgayk4BGq0A9\n",
+       "u93BENQGfSgnVDnHsQgkcRkmLZ1f5IvoE4Tce8w7ntH5P9Tr0TyptSFZU/N3v/5b55xzZWnPqb/8\n",
+       "k/8A50v6XqBeA9U61FqDLddaBBJTwkx0vTINYgMLiZrQz7n+JF1/D4SJyxqq1pdrHQ419GBsnIrL\n",
+       "zak/t7CniBLVL5FdwKSf2bG0TqMns+yHndg/ZIXpMPV5yrUbHe5FP9oc50ZFuFFrl2xlMhi89oTI\n",
+       "6aMzelS7VWvy0W5bzJ09MRFogCGxQdnRc/RTERCpECFChAgRIkSIJ0Z4kQoRIkSIECFChHhifDZq\n",
+       "r8iWrm0o1d0JjHbzwSDziwuIs53B4zFSIqPIYLfNVqDd/Z1BwMlcL8popMnJ5zlcZNkdWN10u8Fg\n",
+       "fKc1tAhGVFF8WRC0nQvsmBAE3gEqLwhaVGHznHJM9MBDJdDhYrI6aJmKXckSQcXep8p+W6by+aIg\n",
+       "F2MIal89/w/nbX/zS0nZ/f0NhPCOaDTA2GRi7MpMxeZGo6xADzAVoJB9OpiwPhuEUoiJqswUMu05\n",
+       "UQCQLSDwaaRrBWTckOuyQvDHBxJxgw5cZEYZae24mtXbg9CifUtUDejA005g5KoxYa26Ig+RUXGa\n",
+       "rp5THSgVHmdEC19uhHr55ZvfzNsa0Awjwd1nSzj6g8bItkRjwcW/I2ftDu20KI1GU+uOxcrOKQMt\n",
+       "cE61xg5ID2+oJuFvfyH9eXUhdg7Prq7mz7Senh8p/Rqu2MeD9b/NWqgHFpaqAHy1tutRKj2l9HtN\n",
+       "sjBrEra1gNUAiXPv7uTcsw2leuM3vre+dvegzvoWHqLwLLE5YcAY70HtJWQhohQL2y+oLNs7u9cd\n",
+       "7CFiSmwpS2kTvtcqKF8s7d5pfbqJ8v8biG1XuZzn4d6owPVa6NnFmupVghY53HyYt6mFwIZoVJ0z\n",
+       "WIKwRCr6mNgYv7+R+96A7lmv7bOzjcxPzUg1GdH/UuprqlqIKaFoAgXHEgyv7ukx9WdYOyQ1EkCI\n",
+       "xp4mFXtbAoRWfuB5JUW/SknY36kVBSX5TOBUE7JT6R9Ad/WwMCDKdMQji61WIpzTX//8X8/bVON8\n",
+       "uTKbFBVWZ4Rf5Lg/TUM2NaqlaECtemsbtTYfPdG9rVJWNE+AAh6JgtViCBPP3XjuTSSm7tEmEdN3\n",
+       "oPZ60Lf9aDRuB6q4H6kmKtz+rzb2PHtzJ5Ysp4YkPUiKykjs7UFztgMnQ4EqxAMqp+vXhIaJ+pVe\n",
+       "f/koUURd8akCBKwW6obqBKr9RkKULvOBn4iASIUIESJEiBAhQjwxPhsilfjRrTN7Cz3sRQB6VZqw\n",
+       "7vpGVmKrta0gStQdy0nEnCOvNl2TdcGotdbsmDFS3FXk7Z2tqlukmMYZpzzqcW2lecRKW4Xbzjm3\n",
+       "PZMVWxETIpBqRXpbueqb9ghjypGsDq7vxKTt+TkhaBD7eVb2jurqyKsq+Xy9NEO4s6VU5F5khhL9\n",
+       "kz//j51zzt39CzVBM/SlUwNFQroGmNktyehRV7rLla2SaqBZRWaNfWy11tWjXHc534FrF8q5t50i\n",
+       "gmx+iNUXWU1EQC5HWq00sFNIva3INHU4IgG6CjBZFKtKejVLbcnora1E2L3orU+ooLgfDP0836BO\n",
+       "IgkmC4iMLxa2Iru9u5FzS6zdlxBAHyFE31AdtLyU63l4sGOphcYDCcavLuQYWq/ROecOR00ssOu5\n",
+       "+SBjjBGe7Vb6W4MkjwPV3NI7t1qSOD/62EJCEbnj0c5Ta/GxKFbT6tkkU0MFs/f3lGo/19Wza9B9\n",
+       "MEqp3ztSosqylG27e0Mur86l7a4PH6+IN7D42JHRowdKyDYlFUwSS8pXVyBkYvQTl8ip3jUQ8JhX\n",
+       "+kB9EkKzihRtBwR7Qe3fwEB2IPuDJRCui+dfztuOsCxgsa+KvLva+r+ijjmZJK7PZH8HWChUR2vr\n",
+       "xRK1TlnFDETckyXCfImECMVqJ0NjVzXbE5sTr+X4I2piblMTJ4/3er5kIXAv53mkftIDRe3JfqA/\n",
+       "SZvFSzu+1nYcSFC+WMrfN9cy/puGjJthCMuIvKqseduvf/UL2e8P7ZzOYbCr7eWccynYkWJl/elw\n",
+       "knkiQt8Zj5TEgHqtWl/TOef6DlYzZP5Zoy0iyhSagKq0EwvV9VlEdgoQ47Nxa6LzdCv3RPuhc861\n",
+       "PYxuiblQ54TF0vr1JZD9abTjqyk0Jy/oxNN3nAAhx4/Q1lybb7+D1QLbZEB4nkT2jqGCdp5PFHWc\n",
+       "uo8TCiLqkz3f709EQKRChAgRIkSIECGeGOFFKkSIECFChAgR4onx2ai9fde4ZUo0Rg9/ls6EsD3c\n",
+       "YYfeYLwkF3HecknUHiDNkqiVvcJ3tG21FAHw4SQUx76mmjut+m5Yk/hUXbzJxTVWKsrgwRNE8ynR\n",
+       "WCrA7qnWXgTPGoUO2TOrhhB6Qy7uk1MvFruGFB5IE9EoHUS8EVGQ68VL7IRgdMDCP3z+U+ecc796\n",
+       "89fzZyWojboyGN/BnXwkum0EFMreOpNXyJS9nfQd3a4xhbCUa4epQ+3tvVz3QMJOrcnGIkKn/lzk\n",
+       "OtvUcv2LzM6pBx2XpiT2hdi4I5hWIfKZCqD72rUqOiW6N1cnXDulPII/FFEwS1BGJa1VVOTZ0zWe\n",
+       "arnvP3j5I7mWxr6vsHdCzuq31wL7v7ig+n+gMT9cm7N6D+FpRKLMBbyqFkujew/wcstwQUyFqYh5\n",
+       "tTbBctMrZUDnCfEyexZdXJgYWkN9eZjaU/pWt0XExavLOYu9M4iXG+qn6n1Fw9RNEEMvye/r/QcR\n",
+       "Y19cmgD4/t03zjnnbg+yv4y+nyFRpSOKYe6TZK5VlNr/WZwLB2aqirCEK/ThYPRlAc+o6kT05aUc\n",
+       "b2zl3kVEBXW1tElB89QJiRfLrbX5FKtjOFFLz2X+2701V/zomZz7d7/9pZ3nUtpigfHaUrWB/U7u\n",
+       "sTriO+fcYoP2JBG7UjAj1Y5UR/NH4xlUaUzHmCCyj+G3xHNCiXnn1TOj+3bw7Lrd2fcaHHdJEpBd\n",
+       "r/591v5nJebTyebd5Ur28/Ag53sEJe+cc5Pek4nmP4y1ckP+WEhieHttdVKLXObdmJzdE/TTnJza\n",
+       "Rwiv7w5yrz0/pie5J+Ng8+qIhJ6RE6WU0qPsIY+Jd4hp/muQ7ED9eZgnaPYZHPVinXPOdeRj+OFG\n",
+       "ZClfPPvazmmSPlRQUtY5xlPdmbQj1vmRn1M495GTErwmecDHi+YfTTZhZ3WPOXlgSg5NEXlO3sL8\n",
+       "R5DSXA0hsnl/cmT09okIiFSIECFChAgRIsQT47MhUnH6uOL3JUTm97UhLVn5h79yboAojNEsXQiN\n",
+       "k70l93AqTZYmwC2A5viF/OCB6mqdOjmuJ9FZBrHfekFCwFK+14+2Ilan2gO5aPeDHGtsSGwO5+8Y\n",
+       "dZJ8ys0v2+4OtvrZYkWYk4VDies+kji1RcX6N2+s/tNf/FTOryc0T1P9FWlhd9oEqFK5slXVLETs\n",
+       "SFiMFUxP1eKnWOsfWdv1PVYJVGlcK20nVP9qqZXl4SK/e22V0RUl8uyYXsOdN6OVLsSrIyEHAxSL\n",
+       "EaVpq2P0kFOqMawtGqTwMtISQ+TuyfVXU8i5NtotHHvPnSFHHivnlDp5CsRspJXOAMTuYffaOefc\n",
+       "q/U/suNrunRpx7+8lJV4QX1iQP3B/cH6cwnn73Jp1iHzOpNWaQkqnKsVSUyCTUViuYbXGk7onlZ/\n",
+       "6k6uAnPnrMbeguraKcLkOSVd616ibVJCXw7kaD0fa9Q0aNuWAmlsycW8A7KX0XnmhZzL7tpQtw5j\n",
+       "dwtXdu5DisRwqnuK6gQTibj7QasC2DiZ0Wdy0dZdJylZQuDf1cbaSS0efvDlD51zzr1++/382QVQ\n",
+       "J0WGnHPu4kLa/VjZthbC6oZqFyoCfnZuiFz70GGboVndqPOZ/JYNYTQNfCInbm3hnmwK0kT63eM5\n",
+       "DnuiTQOSdybqYw72BzHEy9HaHMMzjJfdzlClczwo/uS5JXb8Bp/fO0IEce8eWkNzIqDIaqHjnHMj\n",
+       "KhmsIbK/ubX7dcK8p0kKzpkT+hk5kFuJR/ve7kHuz7MLSwrIYW0zknVDBhSzgKC+JXd2rwJ0asMY\n",
+       "/S+LOClI+n+Zco04CLUJTk8Wco1sAK/g0Eh2JhopnjsdoUpaJ/XdrVm9LHNpV651WCKxJuG6dep2\n",
+       "n9s8eToJShTzqWtNQMxdCaF6KeYwHpOKKkWPKnDI9Q+O7HQyZZ1o3k9h00JsxjjyyXwcAZEKESJE\n",
+       "iBAhQoR4YoQXqRAhQoQIESJEiCfGZ6P2+nF0nkSkUSFQ3CX56NS9+HjEVI1xhMs5iyhVFNqSYE0B\n",
+       "xQWJotV5OQPsPJAQsj0pxGeQcQx4siBs++VzoQDefTC6ScXDngrE1hCvpwsW7KnYElQUeXwkKLx4\n",
+       "aqgYqheqJqWCmlpcOSPH4BoC1Ie9/faXvxGX3RdXP5u3TepQHcn31afIOec8HMZz8tEaWhXs2XV1\n",
+       "8ODanUycmcz3keBu5VsnFsrL30VpNEIygYJ6JrD4/mhQ8G9f/518RkVO1avFj5wAIMfl4tJzsVg6\n",
+       "vooXmRb0sdBBEzywfWyw7wbC45xg5wIU6KknKgLQ+u2dib0ziFjZFXkCHRwTBO/RJ2/w203xav5s\n",
+       "CcoiItffE1yuh6PROBorEpH/6GsRtn73nYld61p+8+Lqx/O2M7iSf/+t0EdlbuemIt+IaJwB1M5i\n",
+       "QR5woBSL3GhEpQDY70kFrSMlCqjIfKb7iLPL4DPXVDVtk/3GfE7w9uEC5ftbmTv2g9H95xDAH7hA\n",
+       "q3+cPLIqqGg1DtF1LDSFO/nCvjc59Syj4tqgGZkqjiDyfeQLh+tlAX5Zwu/qWty715TEsIew/PLS\n",
+       "HOh3e6HCSpoTRvVPy1iALMfa763vXD0Tmq+pKckFDvG908QOmld7pfZstzoXexqn+htiip1PlAK1\n",
+       "H6co4D1V7EAtbdcrFUxUaAzBPvvzuQnFrWmcPgdVW+2I2kRSziK3+SebZMwUa6JlUV3hCOnHxQsa\n",
+       "rxCWZ5GJ7Z9fiYi/LO3Z5SL4GNFp1o3s7/0Ho2pfnElSEPeJUWUmHkXbKWGnR99lejxGu2bkLO7R\n",
+       "/hHJCGJQ4AXR5yO2+Z7mU8yTdW/UeoxXhcirsJ0TMKRNDjROe4wZPaZzzqUYnzn5LSbw0WJR+Bb9\n",
+       "/0BzXNPJeI4w/phujxNpk47oUQ/vN3Vkx5XJ9+m5m+OcNLHCOeeyTKsscL93fzQCIhUiRIgQIUKE\n",
+       "CPHE+HyIlPMupjfjCW/VGxKsnk6ymjw19mbcYoWZRibOTDxq/dCKsEZK7JKWRDFEfANqQ9W1rTSj\n",
+       "FKmxnb16qmA2KW1bAWfrNLIV6eEob7jVwd50G6x+mkcCTHlL9hkcuztarTpZzVAGsStL1LUjREpt\n",
+       "CthZelGIGPP4YKvKv/nlXznnnLsnF+dxgisxxO4xIUhpjmOk5OKL8+x7a6cHXNfaW/vvsSLOchIx\n",
+       "Z7AJGGg1CYShiG1FmKtrMVJuv3r50/mzm52kaXNNRE2dHulep5m6bdO9Vkdp+p6H8DIhsWELoaLW\n",
+       "euRabwvUU1vQSlctNhaU1q4lrtqIVr93gmw2KS/JdXVESCxEjnUvv31/fD1/duXF4qCYbPUbx6hN\n",
+       "d7IxkWLF/tUrc7b//ltBoqqjCatffSFo10Tn/ptfSd3FF8+/cs459/aDJSz86Z/9Y+ecczc3d/O2\n",
+       "50tpk9XKUBJFTmhRNyMsbUcuyuhv7ACtyNWMVtFKtwAiWB2sXdWepKBkCwektSWU8OJc2uy3v/79\n",
+       "vG2JFebZxsTLH64FJdCkhD63fq3JCW5k0ak64XPI/xWEyLSNzFOPTNxxjSkJWxugOSdCx/W6O7ht\n",
+       "92S/kahNBdmf5Kj1eaLxr8cdKfOikwAAIABJREFUqD3ne0KI3PUHQUIvL0yo/fvXgpgsUDtPk2mw\n",
+       "R7kUahMdYTHVupwGue+evE6m6WNh74hHkM/tnsxjUWu4nSyJwpXyfMif2fm2sOw4DFS7FbUwlzSe\n",
+       "73DZqSebklwQoZLy39ONtLsiSA1df+HlvsajXeuL5zKfZZQApclIIyW7ZJgLPrz/OztPVHko6Lcx\n",
+       "hNUdLAS4D2mfTOkeKsLLzI26mDtPzzMo1Advv9V6rmNOyRM63ZPbu6JeCRD+cfoYpVyQ1UKP/p/Q\n",
+       "XNMpclxQUg6uNSZMJ0KhwiKzcT+/qaA9PdUfXWNcn8hCpBtlLPQ0rhRpyyJGxOS4RcEJJXLcOLO2\n",
+       "41yIT0VApEKECBEiRIgQIZ4Y4UUqRIgQIUKECBHiifHZqL1piF1DPhULUGYdQXY5oPLfk2NzBHhy\n",
+       "saKCioAgu4n8hgBpDgStanHfIxzNR6JYlB7oyFCjaeC2vaCCmoAdSyq47CHALgaiFiGyq8lHRgWg\n",
+       "A/yUSPPmUlB765KKLIKCypKPRYwXS4PC9xD7xZ399mEvflTfdN/M27ICtNwWjtWFXf8AemJZGsQZ\n",
+       "QVDbVHaiR/jTHCYSYKIdh5ooGEC/q8Rg9A7eUru9eWV9cSXQetfiXKig5hdn4rfyzdvfzdsKiPIn\n",
+       "Elum8P0oS3I212SEgfzGFAJPycUXdKealvRc5BVdLKHi0hkoiGS0/ZbwVmoO5hh9Pwrd0JG3WAFf\n",
+       "ppwEoA40l2on33749fzR2WKD4xONDRftZLJtC9AC768/zNt0HK235hn0m2+Fxru8NLG5+rF49M0N\n",
+       "CdZvsL+CBNg5aMTX35tg9sc/+Qm+bxRgDtF6wkV7IUYdchIbg3pRCqQkYb0KwJcFeztJ/08Xdq+1\n",
+       "gO3uaIkiE4qVXp6ZA/b9g9yf83Pr49utiO2VAhtZHT2o79nHhXfrilz8S7lPczKHcy6Z6YOPf3s6\n",
+       "WR/XOa6mJJPVGoVcIahvSR6ggvKhI3oaXaxc233qkCgS0XhSWrQ/WX8+QD7REn11eQVRPpJXIvab\n",
+       "gzg4ZQ1CrPeQBMhIAJrIAVvdsfuEDAI1AaMksbm2DwT7MbnzjxCAZ1SxYrOVe/yBJAD3O+mL1+QZ\n",
+       "FWPuThz3HSSZ0G3PU+kTm6XM/9uTPX/yL+F2X9n496Xcdy5urVr/prEdq39fRgWidZ4+JSTBgFZA\n",
+       "aW/a7SzKnqhN1GV8pP6nhYfZnypGX+gnm5OVvps8+yTJ2M1o7MYqUZi0kLzd60WBhImWFNl4Fp4o\n",
+       "iSFXqrgxEblSlUn8cVJYTK70GSqfREjsOJ1srlEBPhtPabWL6JEoX64rjSl5CTd+onYatFgxvU/4\n",
+       "v+dVKSBSIUKECBEiRIgQT4zPhkilUelaEodtvpBVHdcB0hTPprbvVQ9IdV2YYE1F6x2J41K8YR8b\n",
+       "SuvF2+epgnNpTGmgeKuNJnrTh4v1uKV0eawwPb2tL4DsTJRqXwIlqWk10SAVszvKm271aLUkK5xn\n",
+       "Z+Z622HV6SNb1WWZnPMFraozuMdeEprVfwsH4MqO0aAZtV3p1NzqHKn+GYsDYRdB24ZRfnUgAWgC\n",
+       "4T2X1cohRvfOVtObDVyZD3ZOZa6rE7l+FsfGOM8NuXNr7cKMPCniRI5B2e/Ox1rry7YpItmSdnbA\n",
+       "yk0tFFiwXGOFk3qqYYj2LxNDBBPUbirXlpKuqOdxb9ef45zY2XectHYdrossQU6VoFpqw+GccwmE\n",
+       "xRMhpw9wTF6syH4A9+zm5nrepsJLdsqvvIyP9+8FffrJ1yT2v4MQm2tNql0BIS2ptjWt6lWMvttZ\n",
+       "P9G+y6CPoiS6cl8QMqEi2pr6RArRa92yhQHG5ES11rD695Fti7G073pDaVIgLB6iWBaC+z/4V64L\n",
+       "afWU6q2O7RlZDajLOVs9OKS4J5ldY4V+sqJ7dw3n9R98KQkA795ZG7ZIStguqf7hUbZNnqw20D1i\n",
+       "skS4vxfEbkVZAfVJvngk65DdTv5eIA2dEzDUMZ9yOOa6ehPXKUWrTSSi1hT/hFAC/dxPxBzMNfm0\n",
+       "5qAhAyp8b0mAHzu5F2c0TxyA+sf3dp8ar5bdJIrWCWIgsTX62xJJPAUjaJmiH9ZeMSx2Rsc2LTo+\n",
+       "bfxrvbaisP31rdyLmmxCOrXOSORaC6rhqo7y5P4z11r1nFiD5x8nCmldy4qYgxLX9ignAvcuYQd0\n",
+       "fCFfyh9ck1QrP3D92REd8ETP7lMrNW6XbB2yR1svbD6dqxdQklUSyXwSI8nsfGP96vYWcxwhUiVq\n",
+       "/HU02SdAotiVPwVj0lL/10oZzYkSxSJzrf9UBEQqRIgQIUKECBHiiRFepEKECBEiRIgQIZ4Yn4/a\n",
+       "y5w7T1hYLRDkIjfKboBT6yo3yqRLpDBiSl4QWpgwJm5pAfFakpMAGdRfCe+ghqBQLShKtShdBhix\n",
+       "P9GxEkDq5M6r+uSEIEP1anIJUXAQCO4hsKx78s6A23WWm9j36lyu++bBfEdG+GiV5CxdgDKJR3Jl\n",
+       "xjvyz3/zt/OWXSPQftsoZWoXuxzg58JFe5P00fedM1fiksS+6g8yUdHWHu7IrTdq4QCh+kgivu/e\n",
+       "irj6fC2eSU1jEL8KAQuiFgbQBxN5oeRw1OYil2Ov/iDWJnW7wzVyIWX4XQHFTkq7rxVovoicrVMI\n",
+       "fxee6CGInfvW2rP0cn921CdUUBlTIVWlTUYI2nPq/7u9iFzLwjxzFugf0cbapOrkunYHE3GqULkj\n",
+       "Z++vvhTarm5NqHk4gUb6gTjgv3tvgvVzdc8mIxulm78E7eScc2/fynlmlIChBXInssCOcYPYFVy9\n",
+       "3Mxuxo6l++sSg9jj2TGfPdBSfGb971RJm3S10chr+EediNpIcX6rpVAFHVMsEL3mRE9NmJOKzK6h\n",
+       "AX3TEgWl18pu51UtfSalQrJKo0zkSq0C/Q8f5F6wi7XDvFfROFng3Bui8YtS5q793u6/0rIfPlhS\n",
+       "hPr89FTwWb2ialClGd2vCuLh7dWP7OuYJyK6rh7eX1Fu439ay3mOAyW0KI1HiToOdKR66zmiJ0+3\n",
+       "0scfSJx/C6+8lqQdCxQ85j55wG+YbVXqfb02GqsFzaz9ar39gq4LyQ65tWs3yPFHohuVbh7JWy4H\n",
+       "3ZSmRIshKcp79qBC+4O+nugxrXQX+621M7VHVJz64xE9N2F+mnImq2XfbUXUqp47NdRqKfOSx8Nu\n",
+       "QfSkJj7UlVHQyvKdattvjvvYkwdWin7fkbO6Jp4UhSWKTO5xpYqONDMrPIvYHf0OrugRiegHPONS\n",
+       "eibr8HxU5eSoBceJ7vV/3EgqIFIhQoQIESJEiBBPjM+GSK0K78441R9p/SWJiO/2EOfRSjOHey4j\n",
+       "Ryqi61jshtUkZU7PtcBaCNobWoWNKnIn0aPWc/OjvdX3SNMvqCbZAUhHTr/NsPrrp4/f9BM4ZWct\n",
+       "iTghDo5Hqk0HMfyitDfz272sUpPejr9VkXNnbff8UmwFXpNTdZ+KG7IKEIeJ6hoekC46WFuXEKWS\n",
+       "h7RTF+fG2epbHZ0H/tqgLvL29r/bCfoxRLaaHMfH7vWpJ3tsoEqcFKC1lhyly6pjvaP0X00hZmfh\n",
+       "OdXfW79bxuoeLf1vvbKV0e5eExAIETtIe46EEiRwVu9GW7UosMeOvZk69lLtMA8rhEYtFmi1qoLV\n",
+       "dqKVHoT9fE8GIGwD2YmcdtKuV89tNa3ox7v3Zl3w8uUPnXPO3d5Iv1qTYF5X1RmtPlUoentjFhbv\n",
+       "bwTh+Mu//CfzNrVCONsQwjxpW9s1qmg1GoBMcNtgTE5cnQArTNK/zwbM48hoFlaptCLOkJRSUz1H\n",
+       "rRCglgQZOVxrfxoJVdPaiSnlpGeYx1jY2k8q7CekCc7TXFFhuRCxa1VZ3ykw7rROIafVN0DYuP6i\n",
+       "uoOPhNLvIEBfLmyM393JPVmSUP3uTu5jWZIA/UhO4s45T6n2GRILOhIx57kIcSdC36JM09UtJlgW\n",
+       "TCzehkB+2BnCEwP11twByrR3KzjwD/eWRHGrVSQiTlSC/QYxEjGE+iOhXwdca7GyNjnqXAT0I6UK\n",
+       "HIpwjbUdq4F7f8dWO4PWerMW0JqMbDSQ4lmQU6KCjpMJaBXXNVQhuic2I4F4vqH+F2mfoRswYYYe\n",
+       "aDw1SHgaCE07IpEl8oZEXoAo0XqWPK+2YB86qgrSzLYK9IoRaQUKSsqATdCJUP94kjZOW+vjlxup\n",
+       "ynC2kbqGx4P1UbXTOVbWJ9ZA7vvY2kQtRmKuv4ukhH6whjq1OOeIknyiP445BUQqRIgQIUKECBHi\n",
+       "iRFepEKECBEiRIgQIZ4Yn43aO9uULifYLwEXQrV4XRyrOM+gWPUPariQLf49HWjbJDBeQqJQP3tU\n",
+       "wZ+DnL1rQHt+YigWdBeBsX6S47edbcsh7Ly/Nrpjqd4qa4MR1b9iBH02TeZdUTcCu3dU5FXRTobR\n",
+       "VbBaZASjo8jiNNi1LiHsfHFFvlQfIMoEdM2u243SZyNxJp38XRC1pp5VMYmtHZzdk5jE5iqiJbG1\n",
+       "CiojFvGNcp/qWs5p8CQsxnt+7A12d7O3E9Gt6o8VPyIX5RJ69uCCiJCg7UKp4oVAwSU5Rg8D2ulI\n",
+       "PjY97tnuPX0PbtMkovQ4z4monQV8bh4J+tEXBwjrYxY1gpZ4f2POysUziD5HEqDG6i1j0Pp6LdfV\n",
+       "k2PvEcWlLy5NvH5/L6LMtkVB3zMWooJuI27h999LEeCffG1iY6UA37wxyvDVS6Fg7u9M2K50G7ES\n",
+       "s2dPAdqTvaCmXj226JzgxdZ01k86jInN2qD4mw9CI2/PzJ+mhch8opLD6pSeAO5nwbgmcTzyzAE9\n",
+       "kRFllGEe8VSgVWm5/d7G+AIJIgX5OFWzQNyucb+XpJCylOvJSGy9WAjN/+6tUfbVSY5VkLC7Bc2T\n",
+       "EN22RUH4a/rtei39aUdUiSZvRKCWIqI1VNA/kCxCPaPGrbW1mzDuKLFkQNvGK65uDaqIxl2sBZ+h\n",
+       "36goiWKAt09K1QHOFkqF2m53nez31fOX87bjd3KNJ3KlHwZp/weiMwel2Qf0VxpDfQMaq7bxd4Qv\n",
+       "XkTUoukcaE4ctT2NslqAUuw7u54sRRFezPue5BaaqDI11icLiMKZHuxVlE5Fi/V5l1Ki1IS5+0AJ\n",
+       "GCd49W0Km3dP6KczPUvzfw8ZRUelOvpJPbOs/60XmkRg4ZGMFNNvc1Bqq/LVvO0nX/2Fc865+3s5\n",
+       "jzyx6zrfyD4a8mWci6CTL+ACVTvYF039wzIaO6cO++tsWz/xvPhxBEQqRIgQIUKECBHiifHZEKnF\n",
+       "KnVJRymPeDPe39sKVkWBcWJvq5phP9Aq+XiSVd/1rYnNFGwanK1mngHF0ZfLid7M40hXlVRzZ5K3\n",
+       "2aphF1tBn1KqtZZnmqZ7mLftDoePzjPBiuGIt+BxspXJHVZdX35hq6VDJaiHuhk751yHFTS7iN/t\n",
+       "pc1Kzw7g8na+XZGLdSMr0mOlKaR2vg1W9ez6u0nl+wkhKF7rVbX22xHp33w9Qwc0idDEMpfVtCeE\n",
+       "qRtlP20tq/CIJOseyMFjBEtWjmNi1zpAFNpzCu+8ErZtKc49IpsGTTuPsNJeF2RJcSbbjrGtvqz+\n",
+       "mKEfR4hde1LFTlh9j4QmTECpGCVI0bYtkJaRVpou/tid93CUe50lhiqNQFGznByDgfSw2F0V2nHM\n",
+       "liCyvxw2EWVp/fqAPny1smu9OJPP90frp2cX4lifUFLI7IpMaJIiGy2n2mM1XQJN4nptkY7PT9Tf\n",
+       "jKmKgE+1Tqe1U4pkED5WC5uOzcb6n9p5zLX2KGGhrlEHjhaj5fJjB/6+kr/zzMba3P4FW0JIe263\n",
+       "ti2dhbdUlQHu9uqYfjzaWFsDwViROPrDeyRALOz4eh0VIS1LIKKrrf32Fi7qfN/1nsSKRBEipULl\n",
+       "lhiBAucZkWDeY+7sef7TumYtu23DaoCc3UdN2gAkX1xYAsTuVuaJ+52h/xUSetLUEhtcrUijHStx\n",
+       "GLuFIRf1IO2zPxFyDmQ7HaU9h5psHY5ynjXVK6wPcj9bQkmnUatSWJskmjyU2Xm2sKBJUpvj8gUS\n",
+       "GtCdm44qYGDu4moTLa7fJyQ2V3SesrJm93hCzhsga5qc4Jxz66Wgzefrr+dtL57JGO9GeU7dPPxy\n",
+       "/qxSNmVilgYME52nJlskZImj1h5UTtKV6J+rzPrpZikic9/L/Xrz3uafeyQxdb21U4T7PlH9P6/9\n",
+       "jxLACoyZviIrILX7oGlqIuf7T0VApEKECBEiRIgQIZ4Ynw2RitPIpaSRgmzHPezJLE4rnff2vTXe\n",
+       "IEfadoK+Y3ewt8Yab+fHxhAuX4Cjj2UfSUpcMdCEyVFtHqTJ152tCCtUkN8ujXsfsWRdLE0jcNcp\n",
+       "SmPnOVTy3noCH871ytQm4O2H387bVjkM5FJ709ZVd93wKlHezpv+u3lb26CuG6EvSyAWasJ3JF66\n",
+       "7mQf/ciIHFJtKQ1U0bGYVppqOlg31k5qphp7WyUXmewnSew+TVj11Vj1dt3t/JmCU91oy5Uo1j5B\n",
+       "WioseloykDtihRV5W5EugBxuz+0+nRq5xgmai4HOLYFxXTnYNejKqDrYvauRLtz0lNiMFflAtdsO\n",
+       "aONtZqt/rZOl5m/9RNfqdKVp3785iUapKG21lqdq9GnXWiLFPSLdTgp0ck9avgkI5NUz6c+Kwjjn\n",
+       "nAcykpCmJ0Vj14Q+rDaS/v7Dr0yP9+23v3POOXd5ZcjZw52sZguqJxij73RYkmaU1n/Yy+qzJFRH\n",
+       "tTQ5ua92GLsN9T9F5w4HQ6kjINvH04OdO2rnlYUiTbxax7Hyj41Gl1TXTVfV3WRLWEXfGtK+rJYX\n",
+       "2C9VlccYLMgKpoNx4nIl2+5u7Xzfw7ri6pm19cWFtPH+3u7rBrYTFWmfmiOQ+721k1pLkG+hy4Am\n",
+       "5qoboQ9TReQZfVNd28n22wNZTS6sRtkI5MoTwppuBW0ab23cR2qPoLYDdPztS5lDDt8YS6BWH/uT\n",
+       "ab/GGOafo30vL2U8PdTv7DyBiNcHns9USwT7gdb6624nfe3mgZBOPH9G0tdqkU/PDYt2WhP4lwB1\n",
+       "nqgmnxp8Tk7OTZ9Xzjk31lrXjzRleIwPDJ1iHqHsfwefUdcRStWizy7Ki3nbD1/9uXPOuednP5m3\n",
+       "bTdyDmdruZ9x+h/Nn/322//TOefcL17/i3lbqddD888Ea5+B2qRuoJuiZ5Eit18939A2ucc/+8Gf\n",
+       "yveJufhw8zu5rprGXyHfTzPSXDp97tp9aju1RCDkFIadOekgHdWx/FQERCpEiBAhQoQIEeKJ8fe+\n",
+       "SHnv/zvv/Tvv/f9N2/4b7/3vvff/Bv/9Z/TZP/Pe/9J7/3Pv/X/6D3XiIUKECBEiRIgQnzv+31B7\n",
+       "/71z7r91zv0PtG1yzv3zaZr+OX/Re/8Xzrn/wjn3F865L51z/7P3/k8ntiZGtF3rnDPIsgZVdKQa\n",
+       "UhlgwYigyAIC6IHccVclUsgnowVrpPY3PYmX38u+12uB1lkIpzWfGB31sERoO4PxD0jN9d4gc617\n",
+       "l5ED8dlWUjd3OxLPJxAx9vL9/YFSw0Fz7h4Mnk8uZRun36qIt26ZgpK229OxOrUuIFdeFbYmqbTd\n",
+       "o4xORUVJgD96+X5Ht2+AOHogm4QIIkJuu3ZUt22qiZZ87FQ+4RgpUpjvd2Q/EO0ffcc55zLQgwO1\n",
+       "yQBhZ5J+4n5OLOwEfXi0PpHCSbtBbSafGzycgRbNC6t/GA1Ke7BNBuhWbzTSNKo7sB1LHbJPfO6A\n",
+       "8QeIUhOy4h8H0B3kBH+sQeOmlhqcTEgXXtt5LkFZtRWlKddyfKXMnDNKT+FuFsxfbC9xDVSTElYA\n",
+       "ZUnjby3j47vvjFpWaqmj+m8LCMAPZAlwdfXMOefcHhTUVWljLc/hjk196LiX+7QhAXwGurdvTIC6\n",
+       "3Mp+Dzz1oMOzSbHW1lPH5p4sJFY4Bp/vAhQci821JhmP0/VyhWNRTcRKa+3ZtKt9guuaKaVwAhUX\n",
+       "c01Q/PtAzt7brbT1am19/R7nnLB8AML71dLu3cNOvpcSVaqVD5TaS4mKVYsHH9u2BGNoIvuTGHTw\n",
+       "SDYdE+jWiZJHpk7lFiSKVjE62snT9U8QuX8BR37nnDsi86aqLbHoWhN1WqNFR1iraBKFc87VsEc5\n",
+       "HFgoL+e5hiVKRPdaJSAt9ZMedeKiRyJq+TehSVFv8eHI1CrsXArru6OKx2GF0lKyT4a5dt9Swgrm\n",
+       "eLZfqNHuEdkfqMwiYVd0tTOgepIjXMkXNBZfvBAB+gZ95+7G6NFVKWPt+cbqb3YnmQs4UahFgkhE\n",
+       "NWFXa6GlM+o7N7dSZSGKje7Wuq+aeLNIbfyXeJ7dktXNhHnKx0ajT5A+DGS1MTuw03OvhyidyhQ+\n",
+       "qu36qfh7Ealpmv4X59zdJz76lLHCf+6c+x+naeqmafqdc+5Xzrl/+vcdI0SIECFChAgR4v+P8f9F\n",
+       "bP5fee//S+fcv3LO/dfT9P+w9x69tnRbltAMH9sed+1nnslHOtIoVVWNqhZ0aNGiyS9A4k/Qqx50\n",
+       "aYBogAQSvQIhUOELCSqzgDRkvsx85vP3u/6Y7cJH0Jhjxhwn7+WldKXkSqU1O/fc2HuHWbHWilhj\n",
+       "jjnGdCsin4jIP6fvfCeKTL0TvTQS0Wq9Nr8sQisGvOHHROI8Wys5sar8Pe5iDVJs+jP/LcjOA7lK\n",
+       "Y4EnaQZUKfaVWYLy44RkBVZ4S5/IhyfGm/7u1lep50ACVltHBNIMZF9apbzcfy8iLrCXEqpm797x\n",
+       "SKQ2rKCYbG9+gRJRCW8Hojy9Fp/MHZ3KxNdAffJEz62lFfSIN/Kejm9l8j0hAgmOz0JnI+4PCzda\n",
+       "8DYj78YLklMYDSXQFUaWEREc8gyr1FcmMkLUkATU6h7HpzJ5E1gcfZOkJpJJYnoNxOdGQ9popSmZ\n",
+       "eUP5thikdLaQSoC+ZSx/YE1GfbzHebLAatfrijmBCF/G0giJCfj5StvkJKraV9orlAmvlk5Anz2+\n",
+       "aKV1uNXfbLck3QBiZ4QV3/mZ+zrOS2g6X/M1tFJmEZG60X5akP+k+S/evn49b1tDOmCigoZ+9kTU\n",
+       "frei+9pAViChVX0D1Glc+PdMMDeh1bdVKjApNcOYbEi6owSyeAc0d7n01bKJasaE1kyzXyDJagCx\n",
+       "Seh75mu3JK+72f+PEI4YHel48DkuzfTvBXkcWmCxLC1JspgSxYIQBLsOkzcQEVkAsT1UTmzOQRpn\n",
+       "IWBXPYBYJh2/tz5Bfmmj/YA87CIgUlPkv44hcBrRY2cA8X4krzXjJ08oBBkJ1YuQYdhVjqqmyCxM\n",
+       "5F1qBUKHEwvCapsU4sjdq9c6T77dk3BjoW1SAEEn8FkiYIJT7G2YYp6850kIT75IGLnUf3vytavh\n",
+       "2dcO5PsKmYYRqHo/cL9G3yWyf4c+uaUMyzqF0DDxpW1O6EmmoGv1niQ0nk+VPqde3xA6DCS2iLXY\n",
+       "htH/HgUa3ZGEa/FMooSIpEAxi4UXakWFzvc8dy8xB3zx7M/mbZ9/9rn+gfl3TbI+Dy51H9c7KsCy\n",
+       "PkO+qoMhx9TXBiBSFbV/ApkkFni+J7b6nvhQsvl/KCI/FpE/EJHnIvLv/4rvvvt0DREiRIgQIUKE\n",
+       "+JcgPgiRmiYnI0VR9B+LyH+N/z4Tkc/pq59h2zvxf/zTF5KAZ/T5T87l6uHF+74WIkSIECFChAjx\n",
+       "/2u8/KqWl1+ZBNCvxoM+6EUqiqKn0zSZaMe/JSJW0fdfich/HkXRfyCa0vt1Efmj9+3jH/4bj2Qg\n",
+       "bziBN9LFpad2OsD+8eT45AIaPC1D0bgKJkxOgOWS0X9rartFAs+z0dMDGdSr05Y8f6Afkyz4e6q3\n",
+       "seuc2H5zp8S7qzOHLE2B9uHZT+Ztu05h5gO0rfLMYccSnmx55GmXAfokixUdHwTMmNKClqoYibA3\n",
+       "zuTheZNU2J8J65YdEQwj+FuR7pWlXWICLgcj++WextwdNWUU3YPAEZTa20G/59g4LH+21vaMkSox\n",
+       "pXERkSI2jR9/yR5awPiUsixAmBxJNOVUaUqjHT0FZl5/I5EtTcn+iIZiT6Um198uFw6tL6A9diSf\n",
+       "wg7nvl25AvMZFMDfvHVSsLVdXftvzafPdWGYxG+EdT/fDB5TA6UHMqQROlJx39/q8GzJgCxGiprt\n",
+       "FBflFtdIYxHRIo12vvUxuYcuU1Z4X18iLdjUTva8AaEzJx0tqzkpC+pjGMemLXU4eCrKSN6fPnSN\n",
+       "G0utjpRGbaBY3nZUWHLSPhYxUXr+17f5tUJFu/BxZUrtGXmY2fjrSQvJUpqswWW+im/e+DyxPdN+\n",
+       "nNI8ZartTOi2sJGzWvk4tZaLY9/WVEhVkLKzXevjKx+nd1Dl78kn9NRqGy9JM6eYfQeRsl3QZzMB\n",
+       "nBXz4Q7AKVsQwFNquxHpQOb/x0ij9BWlalE8I+bDRg+xCue7J8X26z0KdKgNKxQtHWtvky20pcrU\n",
+       "55PD3XN8n9JiCx0TOQpLmAifYUySBKF0qaWR/VqTHIRl8gQ17b2YdJQmjPuOiodMDd7oBpLTZ6Cq\n",
+       "5Kn39c8wPp+cecqyLPU+NXSeu5P2k2tKwd2CynCgdtpjPq8ofTp1uqPdzRciIkKXJc9e/pWIiNw0\n",
+       "X83bViieSdgTFP+Og88TyYR0P5G5Y2g/ff3sT+Ztb97+joiIFGeYLzunO6TofwMplrejzis5OWBI\n",
+       "b56onpY1j8OMnsUjKBif/9paPv81fR5PyUn+7H/xdPrfjL/1RSqKov9CRP41EXkQRdG3IvLvici/\n",
+       "HkXRH4iO9S9F5N8REZmm6adRFP2XIvJTUW+Of3d6H3EmRIgQIUKECBHiX4L4W1+kpmn6t9+z+T/5\n",
+       "Fd//xyLyj/+2/Tbt23ulwWmqb35MsCxQrtmf/G3VCJsFrWqOd5o9LNe+1E7gnTM0RBIDSjRgRZaR\n",
+       "C3SEV+woYW8k/e1m6UjDAkrc6YnUWQdd1RxOjj5cnf1IRERWpED99PJfERGRn3/3R7hmP1aWAhnI\n",
+       "yC8NJOeB/MdsOZeS1IItjtuayOP4Tdf76mNtq85OryGJiUTc62e80Kygup0SYTRFl0kIJVwvtJ7g\n",
+       "eHLpBis1jsVXzqZEsdsPKmzgAAAgAElEQVTzigSq3EYEp+WqoXoZlVqbYve+csJwZGTj3FcVW6zi\n",
+       "x87f42+BdpwtCPURve8m02Dl/SJeBs7+g1Omq5KmpnJ13ONV4SvddaFIQP7A0ZSb6++xY1/p9UBC\n",
+       "djvdttqQ6ndmbul+//MlVnqFr6qaSduiP5JiNtYvCyKgV5A9KEtfuW6xmu1wc4zgKyLy2WdKLP3+\n",
+       "u+/8+oEObB55G7a4F9XRV4nbM1w3KQKbJMREFQDVXq/j8lJJ7q9evZg/W0JW5OaGVK/RQStSMbY7\n",
+       "vKVijwHHyokAb+X8BSHMPcrpS8w7I3vooQ25PLk1BwQqSjgcgH7HhH6hP18+eDxve/lar21ByMnG\n",
+       "+il5/CVAlid5V2pEbNyTrMtyhbFLXxuBkh6OpOxtauxcZAKCeH2i8nso8BeQuKgO/phYr7TvsLOB\n",
+       "AScTKXAbGZm9E8XGJzkAmFJ9tnQkfq4QKfE9IiJnILEz+vgWMjJ96XPNeqUl+V1HzxioZ68KP9YE\n",
+       "nYKRqLx2L6K5vxIWgIvNUkZabA4jSQigJDk5SyzMUYMQ0RYK4AkV9Nwe9Xk2ZBjrjd/YbaLj5AeU\n",
+       "ufnNRyqFcn7h1x+hUOXlzucJk2zo6P7vUdBz2vvYNX/YnsbpRftL/e0OvoKEKn1//VPdR+vz2oj7\n",
+       "lOd+/TFI9BJ5QcuDjT47Tieau1AotSh9jP/86z/Vc3uAa2n9WWvXlRU+1xk/v6N+YkVjy9zvfwJP\n",
+       "1oiL3OyekSvGPQT2PRGUzUOECBEiRIgQIT4wwotUiBAhQoQIESLEB8ZHMy2OZSVN6wS3fIFU3EhK\n",
+       "zEjzrJYO2Z9vn4iISLl2yO5nIKWVa4KAM6RqKiKgIZNmukgTEas7pALZoNTSQkzEXMYKH04EWZei\n",
+       "eOPtzomlP3j0eyIiEhEp9tFWCxpvNwrd3oqbbKbQqUgzghMBhVeUbjDJXCYgT5NCxmzkG0FUygij\n",
+       "IiLxWXfvnEoi4pkppGRE+rN9NH6fFhuFRSNKd2XYz2pJCswgLKaxp3ZaQPrjwLpA+nkGCPhYe8ru\n",
+       "7HyBS/bvl9BH2YlDuyl0puLR4dntQmHuZe7XeHP3CxER6RO/nhbEwhpqyqaJJOKK+gmlNk0BnbXF\n",
+       "GriBPti4yWcDU+OE9HZ+9MlviYjIixe/mLedkA5NzPiW+yuGwtUDVoJGG5Z+DdWoqa8yccjcNKVO\n",
+       "ZFD76InC6BPpyLx+o+34+LGSxy8vPRVp5rqv3349b/ut3/5Hul8ilh9vFdJ/8MSPbwrMXUO6aLPh\n",
+       "sZ/72Tl0ZNAnWYl7De2an/3U9WQeX2mqbGJlcfSnntJdphi+WHifMPL48+c+7s6RDpz7GI3rAXoz\n",
+       "LZshg1qwpHlif9Dr2a49tWBGuynd/4cPde56+ewbPyfcivWazH0xP90i3bvd+H4t3behNOaXXygB\n",
+       "+JNPnvjhQVuYWj/+4Y1qek1EdrZ0aEdFCbNGGTgD67X39VmBndJtAiP1iEjRI3SOJkpjjkjfMAE9\n",
+       "zi21SarUoGPEMJ6XhadiMqS5P3vixeGvoRV1TYUilpV/TKn1qdXvNbXPHWZWvWv9ubNDCvwx7sm9\n",
+       "tFNm5+bXtTS9Q0r/ZBlMm4mAbirfOX2vzHXMdCOnpTEX1Xq/Vpl//xM8E55QG1qmOqY0egNdsubk\n",
+       "89n1nZHI/V7XKOgYqADG+nhJziNVg4IiHKOh58pMraA05g5pvqTxc1+X2j/ZFaJqdT4tSKtPoAe3\n",
+       "pXTvd880tZhWOp/3Gb07wO1kvXHJyhw6Yq/ffO/bcM9ypo9EZgLt597i/tgzQUSko+Ke90VApEKE\n",
+       "CBEiRIgQIT4wPh4iNT26V8LYpVB9rt8tMVxufLX26ZPfFBGRqvPvPbrQ1XTXfDlvS6GsfCCp8hhk\n",
+       "wxEr17b1N/PFGitXNtiJ3l1VLzb69r0glCqJ4CGVkCTAXlc9m/KR7w6r7hwq3iWRHuPRPLfImwnn\n",
+       "Odx7GUYJO8kElChX3pPaeQQ0pSBSZAsS6RLecQtCpDKoR7OHVDr/y6X+irqVC/dVstLtJTyXREQy\n",
+       "tN116/fJroeJ8j1U221l2NIqYLcH2T8njbFcz6+gFVmL1c92RarckyECvpp/VKoUxdudIwIy6Epr\n",
+       "ATmFA68+gaqMtKqOQbZuW19Btwdtn5dvnJT98ELb58nGPfESeAI+euDFC/s7/U21A2Gbrt9Qr2bt\n",
+       "fXgBD7esdFRhmIsCvO/mID6XF94m9Qmr1Nbv8ZPHivAsgeBldG++e6nI6dOnvvo3X8OavMkuUXZ9\n",
+       "IMJyBHSACyW6g7a1jUMRkb419Wq0NaGqBoR2JDVgStkjlTobUX5HJfHbjSFdhJJiPz0VFJyg7G6E\n",
+       "YvZGK3MjETv6NGEwttSGprKcE0rV4fxG8vWyEfvpZ+4Tt7u7xTX4/qxvGyLcEEpq6tnVnW/bALF6\n",
+       "89IR8bKEhyX5+u3BwJ1IxTk2XzGSoL5An00wd/B4NVRvovEnKHGfuAAEHmoRlZXHkJ8R8nqcQMqO\n",
+       "cpp3IaMwmer+5G3YQ8JlJF2X3/zJ74qIyJ988ct52w4If135/HOG+5ORU0IOkjH7fw4o0Hl1q+05\n",
+       "crHHQsddR84OaYExS2RzMUmW2MdEg7k1SZ0UvkQ7Rr2f09VGx1sf6zmtEn9OlkAET5XPE68hsZLf\n",
+       "+ZxwfdJzfnbwtn4OUv7xSOeO/Z1v/HmyWel+yg0h4TBj7TodlDX1//k5Vfs+zIGBvXNHEOuvLshR\n",
+       "BE1Laj5mKCFtxtIRuu/vb3XuXq2dbG9yIlnsz98RKFkae6GKQM4lpUxUFmv715O3Z9uZd6+fVB4t\n",
+       "RMQLYf5mBEQqRIgQIUKECBHiAyO8SIUIESJEiBAhQnxgfLTU3jQtJSXhk92two5F4ZBtV+nfT88d\n",
+       "Yrs417RI3jrstyw1fcGmoRlSNevU4cnlWmG8HMq2t6TcOhrcFztk3QP2PjFhGyS2hFNmMFfNKFVm\n",
+       "+i2pONzY1watQ7F38muYoLvUE7HZVMFZiqWCAm1MMLIRlbekNn7ozYyWCJgg8vcwGd5VpLFhXNue\n",
+       "tTNAjiQtkHpEW8Rssqowa0747MVDhaebtw6HHk/6d0Kpghxk02g2/vQ2fAPdnYdnnkY0aHlFmiGm\n",
+       "lFs1rmO1zD/BNRCMCx2Xy62TEt9+B0X7Se/XxT11fGh7UWoxgvZXTQR867HffOOkbIO7CzLhXaIv\n",
+       "JrH/1kSbY/QFK3oQcfLqQGasZvKcxn6eKe7PQDpiFdLWfe33M0e6kw0/J+xvD9JpRErkiyW+R2nk\n",
+       "O2g6/e7v/9687Y//9P8WEZEffuppTOv/Lesyoc0ePfB0Y4P0TQ+y88NL17g6nnQfmzWTrfUauQBh\n",
+       "udK2u772thsGI5t7yqQFodZUx0VE2pOeU7l4t7BhwDqzJG07y0qyzrBx3FsyQ02Q0hpJx62udexw\n",
+       "quzyoc5nxwORrbHrDvfOVMJFRAbMDyVpppnGTUpzUnVAmi32Noky09GjMW7XS5p6Jptkavflyts/\n",
+       "Rwo4JX2yWWOHiP3mFR8RLyEuzbSaHBhqTVslZJYenWv/mJbaF4ZnPq6OlaaHv3vjBQMTzoVJ1B2K\n",
+       "MgbSgOpSK0rw61+skQI6+tgZcC7Hk/b1itqrqJF2pfYyw9+EH6c9TIOJKG7z6anz585qo20S02/L\n",
+       "CQVNBZwViLA+9drGNzfeX57f6Pd6utf7k57TXe/zdJVo/4tIgX6NFPCWtPUuztE/SayxRgqwhRI4\n",
+       "NeF8TjGl0Qekx+qR0rKj9slj7UbSpVEUaDyZ+XhDWnFDo21yRGFNT+4Ulyslsbe1zzX1nMYnWgTm\n",
+       "sZGoAnYZNRVFjXgmxROlpanN3hcBkQoRIkSIECFChPjA+GiIVNM0syKsiIhgRd4Ssbs+mWIwrZJB\n",
+       "/O2FVj8g8Y0E5sQgZRapr9xNvdw8nJ48ctJnjbdfcsGaFU4H8rBrKn2rb+lN19TDyyWdE/692bnX\n",
+       "W4zjLrMrnLe/rR8hBdFMjn4lEd6wiRw3RPpWP4xOWJxifXM+WzkiVU66mrxuXvo54bddB/SLCJsT\n",
+       "yHxJ6avlCehTRuXyHf5uSFnWiN1rIjZvNvr3jwsnBb4F6shed6YbHaE0uMj8+0miiNT1nZMto1Gv\n",
+       "a7MiIiIYi9XJyd4DvKtWay9/TrByWueOSC0ea9s+++IP9fi0Wi6Wei7LpZPo+wRltTtvV1vBDqRA\n",
+       "/OyFEl8LQg4fXOm55CQxYaZdM7GXeyC8A/sTrf67Fa7PV2t5pufU0n3qB+0L7D+5yKDeTaiL+fN9\n",
+       "8kTRpBev/LoalP3HhH598vSHen3fO9L49KkWeySJj+fn3ysptCRl/0tIHfSE0tiYMTmPgSQMrG9c\n",
+       "XXq/OsBDL2UEBSvNR48cEashz9ARed3QlGHv25ZbRT06kGdzQlUyjPueUDVDrnIiWzdAAlMq6+6B\n",
+       "jrDXYY6iEFq4z7IjJSmwH251zmhAlM5i6utAlUYizBux/EiSBPEAUjSXtaNAZiQSr6Fu/CAw78AW\n",
+       "CBqjeim8CFMih1vbUQ2BJIZOU0n+/CchgkmpjTGRArsATYu2Ou7ih+7rePpr7Z+v6ftff6lyIjeE\n",
+       "YHx3rej0ckPXCqRrd3ICcgd/wjR3xMHU++0ecv+f8JDpO7+Jp6Op6NO1AokZJyroAQE+KvyZcARy\n",
+       "tky9AMX6xBKFDzwiDHS+IVTlFvepp+dUigKJiLYtMD4b3iMuo1yQJABU0Seau8yLco+xUzWEfqKP\n",
+       "sV9kCxQ1IbV7M/59/doRxuRK7+OqJoQVKFpEkgMdPDkjqI0PVKj2Gr6iLKvTDbrfvvUCiCX8B+OO\n",
+       "SPTotDfkU9q3es4P135P4oLvwrsREKkQIUKECBEiRIgPjPAiFSJEiBAhQoQI8YHx0VJ7Q9/dgwLL\n",
+       "zAiDRIREamMiFe8aWkh15fD8Hkq1NRF1swVMIyndsCg1tTKOetxmdNivWCu0OBHpsUAqRDi1BUj3\n",
+       "eHLC4B2UgNvB00gXSEckgzfxrKwK2HW9cEPTw1Gh6JHMIC210AkR9pACS3qHttdQcV+SufIWcPzu\n",
+       "uROwjTT/tn6G/bvG1QS49+yM0kOAQGtKAZoHdEw6KlMCjRFqz7FXAuCS1LbPkUa5uyZz48Hgc4XA\n",
+       "15SerBtNBb29dn2cNFaYvx38WElq5G2//9e3qinW0jk9uPgNEfFUnIjIdqlpvuHJPxARkV/81f81\n",
+       "f5YN1q4Oz/eJqfh6Ox2g/cQkxgg6Mm/evp63DTHMVcXJlmacbZJia0ottkiBkbSRdL2lVp2wPpqR\n",
+       "dUTmzmKmod4nOyMtU/qsR+rry6+/wkesZ4SCDSJ7JzCjvXnj6ZHPPtNigLHzMXkCifvs3NOi1otS\n",
+       "GveWW1jBvDci0ml3gKFo5tss7WR6XiKell4sPd1t6faIlNJHjONy4d8rQQa2dEZP+lRmoM2UgWav\n",
+       "fT0nFW/LlHIRhY0dVgw3M+IlEbVNM0moP5lZcAt3gKr2NGoOc/O8JG0rFDSUpffTznR0SBcvQhoj\n",
+       "zT3dUqMoISYKhBkOx3ZhlIo1de6xIWI92r0ntf9UTLOPUtWj9omJ0veWeoq3rlXWv9G0eALV/5hS\n",
+       "i49++++JiMiffO+aUSn0ob754q/mbTdQzL6lZ8JU63xXE3n8AJL/QKRoa6cCzTmNTDqGZhblZ20M\n",
+       "R7Qtwhw/UAowRvowS31OGqAKf6R0Ww2mvtU9daQwvoeO1oHSqA369T2ye4r2pzRWijETp97HM6R7\n",
+       "G9JbtCIw1k887nU/+1vt/5VnUaXHs2Ck/iexnjMbH8ct5i5ylri71c+zjfeJAdSPbGDyvh5wmvUO\n",
+       "vQ1vdt/jPLhQR69xTYT5KUHxRk66iCi2YbNyc9mISG19syGhq/dEQKRChAgRIkSIECE+MD6e/MHQ\n",
+       "yxj7KqCA2mxHC5hhAgGb1LbvoI68Jw+h49FW+P7jE6CTlo4R4S21AHGsIGmACqqnWeRNYnIBKcka\n",
+       "9EY2JGLnCWXaEykgl62u0lZcJgvfLUMBeBVoK82epRagKG4luiIiO7z913R887PLy9+et60LRQIe\n",
+       "kVL2L77SFd5gaE5GyupYhdY1kWMhRTFQaeiEFWlLy/S213vy0sEXWeWKSI3ka5ShaGC5YEq/Hs/a\n",
+       "kFfQhk4Nk6N0dwe9hhOtiABmzNIIIiId6nNfvCSieqLX+/jc26kXI8r/SEREHnzuK5PXUNG9Svka\n",
+       "9PyigZTF0WkbUvEtsDozJWYRkaHVbREp2tdAFtMlzoOIwLs7FEDkjqCkkd7XqSHCLPo4o5mpgJRO\n",
+       "K63DUfvWduXH70BaT9fa1jn1yQH3uCRErgKxtWByNIi/9YGQNvT1DfnEZRhvLRPlC702azru/6Z2\n",
+       "z75+I8rZ17xCxIq84zJ1oBQsidKBoL0k5Mq8KIultRf5xQGdyknCwjz09reEiOZYzZP/pBGws5RK\n",
+       "12NDs72frkAUH2k82z0rQcruyX+0yIE05X5OTfsu2XeyIg6SmBmALGfk/2eXNhBRd0DftvteEIJn\n",
+       "8hsjQxKYu7MLR5MnIM3Rwoni/VGJ4nHjaHqUYd9UABFfaUFDBNV/eetSBwmKAX7z89+Yt/0P/+c/\n",
+       "FxGRu9rHRDvgvpJ0y7fXisSPhHTUg6E0PiYilN+XC6jDT95eY2Pl+kRAN0Q4p6IokPHrg/cn846t\n",
+       "ckaucfzJz70XPRcD02IqNmghO9PR/TL0832uFBNhJam5HZSEUpokBH3vhCKovvVrrA769xFuExUV\n",
+       "MZiKeUzPmhJSQwRSSg9F/aH3flrheXNIKBMFNJ1lP0xGpgLqFNFzugI5n8Ds+dlumSkRkRHPsZGK\n",
+       "QqxQpCeNoQzFQFHuWSdJA9k8RIgQIUKECBHi7yTCi1SIECFChAgRIsQHxkdL7e3313K+dcXoxhDQ\n",
+       "yd/tBmB1rLd0u1Ni+YHI5gPg+XswOkiMnILaQ+17MIVXgvEroKJjTOkunEoUObRt5r4ppQVHCKRM\n",
+       "mcPdPVI2Y0bHAEFzNggliHO1VFIwK1bHnebKBjINLgpoW5HJYmO6GympWINE+PDyJ/O2r7/7C5yw\n",
+       "tudEBMcIBPyWIFuB7klSMj4LYnVCaSxohVSVp2C+fa3aLiOdZ4S/U4KqT42eezErNXu7JiBMbtau\n",
+       "dn13q2kRNo2doCPEei9Gym4pZfLNsz8XEZFF6f0uh0HoBBg7Xzs5egktkpbI2UluqvSkgI9r6IhY\n",
+       "Wx/1vm+IFDy2SBUQ2beJNC1qaST2PU2QUVuRGfMy0VQZE7YPvRJvp9FTJsOgv2lG0kyCBtC9VJkR\n",
+       "6mNtE1bstrTcy5euLfXDH2ra5c3Rt11c6pj54udfzds+faoabQWlkay/lQtSFkc6wlIQDNmPpvJP\n",
+       "WjgD2qlpfFyVgOon6msxNOBY78n0oBJKIyVo8ARzBx3KyfFkUN5CnyxekBn6aER1/+0sM0fpvsHG\n",
+       "Px3/NJkrgo+xCWmGHqTn8wsi7CNXklG6zVKwI811gxHliWx8ArE6L8mVYFb+p9+aUj5SYCM5MMzD\n",
+       "k1KGCf6OSFtvWlm/p9TSSue44db13iboTU0Ju0zotrhUAnL1wsn2/U7nxJHG+sWF7rdkCobdPWqT\n",
+       "Ear94+Bz/PlKC34WC7/HMead5QJ9iPpwd4IW2Ojb8lK/39CY6EGGHiKipZzmh8y8LSpA81j4PTED\n",
+       "8TIxjTOffxqooh+P3tcr+5zuyRLXUC642ED/XRApfLAULP22BwGeMoVi3aibNxI5He1q87XuV/fB\n",
+       "emMN5skbcoVoTbcw8mtM4R7Rkir9gFSqFZlMlNq0KWukedrMsI8nek7g/DhJd7L0KeknZgWeSQue\n",
+       "i4KyeYgQIUKECBEixN9JfDz5g26U08nJqal5rMWk2D1AYXfwt/XrO12R1PSWnmNVFRHSUAOl6anU\n",
+       "PY70TfgAYrlEvjKPRqiNT1QajpfUfnBELMZKZCZJisjlpa4Yb09e1v/gUq+HuK4ygjRq6FdGROz1\n",
+       "Us99aAj9KPF3QiReQ8noPAeooe+OrjZ+sVFEICNi3dVDvcZX11ouGt0jLKI0dPK2ttX6QKuVFCt3\n",
+       "JjHn8BU70Crp1VtILFDp/mqFVdz07mrC/Pwm8jRKoET/4MpX5Buspr/5/st5m5XLRiR10XcD9k+k\n",
+       "ZKx6vvz+T+ZN8VOU3ydKjk8G7y8bEKHPNy4hYGTnNHWUbAT6SFZXcnOr287P/HsRiO1t5dc4Qqk6\n",
+       "zrTtmt77pGBVxaslk0IY2cNs/oz6hJUd00pqANLB7gElELObG+1jFxdehvz6tfanH//YHQD+4i/+\n",
+       "TEREfud3nbD/5S8VfTzfOtl4hRL/mojl5h6wImXrxYysYKVJfaMHsTslREBmKRRSMQZinRJR3han\n",
+       "TOw24nnM6vGF/j3hWDmR6E35P2GkGwrLa7pWQ5qeffPVvO2Tz7U/NZUfvwTZtz7RPUZ5dsxFLhiL\n",
+       "6Yym+RjOMYYYkTWvO0ak7O/9wVW0J4yJ3Y0Tm5dLvf/LBUkiRCanAakVmmtaIKH5uaOksjBUiVbw\n",
+       "GMdJ79dv9fzxyufpfn+N/fl8KjYHAX2Pn7jX5qu/VF/Hiojdr251H4zSFigsici7dJrM/9LHZFnA\n",
+       "AYAQrgyT7BIEZwIQJUV/TSJHmgcQ28/PXMLhZq/q3fudFyWYKvrYUt9Ngdyw7AYKcGyO7+n77Un7\n",
+       "xoGKrczPktFU1FpIklNBE4jXlEyRDJIpXOzQ9u8iZwnQ0Rz7Y4AmRtYlK7lNgNKdSCYH/re7g5/7\n",
+       "ONr483ZaA7Fmk4O5GAIyOfdU9HEszjANmGPrlqR7UFDVEUqfgHifk0xCucS8m/q5jySt8L4IiFSI\n",
+       "ECFChAgRIsQHxkdDpKI4kdORBDEhCEZ6hJKkulpg3kRt5YqEEp1vdPXXib9V7yotex8Hz1uPgtUR\n",
+       "lhhjTytdLILYU6c8A6oU00oLH6edr+CWEVY19PZ/Ourx15ckiZDrKr3pzFfM35YziH8Wha/W38Jj\n",
+       "zkQQRUQmvDnTC/ksOni7/37e9vBSV0cji8lZDht+RXFM6BucrseOSuNrc5Vn3pr+m1ObFJH+Zlo6\n",
+       "0lGBN5bQNRrC1RPCaJygKIXgGon/bTZof1pCPXygiMntna9gDwfc14R4JgKBVeoTZa77y8i78dmr\n",
+       "vxYRkcutrlJK4j5Y6fTjS18RH6G70LLQIFCUmHkGWDK9JvFRUx0oFtTuB6w6sfqJSP4hQbsfar+v\n",
+       "C3NpJ6E7Q04y4vJEWFUPVNZryMnQUel6aStsE+Hzzyas8CoqdX9wqejc1198MW8rwPnjY1WVyQr4\n",
+       "ODEUsyOfuOXSOIcQySUH9w7XtWZBVCBMEy1JTUAv5lGBZmLuR47zjHg8Q85hwraE+Gsj2BQxLfVN\n",
+       "zHAa+Vh6jJdvCBFGP+Vl9c0RvnrUT2MgYcNI5f8YFCUQ0ZrEL9MZxSakZTSvNx/rtzc6d/D97Gvd\n",
+       "D89nhgCedlTqbWKmEGdltGy1An+FZCoKcF6i2O9TnGtn74/kNQqkIyLpinwLMU9CDhITjAQKUCxI\n",
+       "mgBjeLd3VG2P4+8J6avBKy1JuqJBv+8n5xIar7InRKoZ9DedyX8whAMOb1EQRxbnOVK5fn3UdkoI\n",
+       "EYvsmRX7/RSTPyDO6YDzO4K3Wbd+XwEqSt/791vIDqTUJ9tZJoC4X7jEhIR7O0hnNC3zm4CSUsYk\n",
+       "wnXn6AsZeZ3WJudzTzpI99dWhAihH1cVcxT13zL373XzGKP53BApjJeYMk3rxHhT1P5AdRtC805A\n",
+       "wtgT82yj7bOg4y8Kk1PxdwzOIrwvAiIVIkSIECFChAjxgRFepEKECBEiRIgQIT4wPlpqr8xjaUjG\n",
+       "3IioKZVGW4l/3zs8fXsHojSlm2KQjBeRw43xWmHJw/HZvM38tAymTOn7NVSh88SVmIcaEOfS4dG6\n",
+       "U6g6I2JrhPRZGrMkACB7IsVngHFTIwVTyq4DyZgh3h3Sg0VO3khIX7ASawrCcpc7sfPN3de4HieF\n",
+       "joC7sxxwJ0GXPWDUaXhXMXk4MtEO5desrDzp92K6nizB+RE8bMzyYWBoG+kmpAJjVpsHUXW1+LF/\n",
+       "H5Du4ydO9r1o9J69eOGkfCPb9pRaLJAWWKXe7ZtW2/2b7/9SRESeLN1/8Gxzhd95KqJFiXdLatN9\n",
+       "CRifSMwZjhuTsv7tnab5tiSnYYj1gP2mwmXVelzuV/tOS8dbuicZoGjqkpJlSNUwURqp7InSQlWl\n",
+       "6fUMqbWKyf5IN7965V6HBVKGHaso48BnZz52zMPtePD0vctTUKpELLWH9ByprreWliKvTatxyKms\n",
+       "e5Qe+yD/s8j+JaI+Ununyu+JXXcMdW6htG+av3u+lp7enTw9VEFWYGSvNaQgEpqnalOFJ2K3FbdM\n",
+       "xGlYgWRthQ2UnfRtdJ4nEI9j8no0RwGbV0VElkjjtnz9INsWCann416YE8Mk76Yx64ZSe3MqkI6P\n",
+       "1GKyonsHeZSE5sSp1HEcZ/7bAcVAKcZQ+4akEdD+d3siR486J2/Iw/CwU0rHiYodOjxPErpPluYe\n",
+       "SHanAZE/ncwHkjUx3vWfXOD5k9L8F6EAKE2IKoL+VN2TnbGL8DbuBW4Hkx5joDFpqUWW8LAU6EBU\n",
+       "DTOoqMnr0eaJgapiWozTuvPfWoZwYAeCSOfCHGnpKCNJEpP/oNMcB3MxoHOq9O+m9uMvOxSKTT5O\n",
+       "28Z8Kr2PSYd+Wuo9LOmWrOA80cee2rXn2Q0py5tn7NmG0ugpxmTm6dO+PuEa/BhV9atflQIiFSJE\n",
+       "iBAhQoQI8YHx0RCpRVlKS6slW3V1hFbEOL1Xr52wO8AZ20oURVz8MolJOK83US1ekaJ0H6tAFsab\n",
+       "FibWRmgF3uZtZS7iK8Ju8FVSXqioW1X5sQZbJbDQHVZdJioYExHxCCJeRyTO3VGve0WoyhKIxRi9\n",
+       "i/RERKzbVUrGLBJyuh7wpg2ickraDEaonZhYblILdA2jIRFcFQASZU7ExsiIf7RtxP3sCGHYV4oi\n",
+       "9VjVFYzSHUBwJBJrAXFAW7WLiDzcqjxCmvn3vn2hPnlHWpEZ2ZnJq2ugVHfXSp5+0zixe7PVVVhL\n",
+       "Hm4trrXv9vQ9rEdWq+AAACAASURBVKoW3v7FFuX01J+W8LNLiFDegVw9ogBBNr4ymnCt242jb/ud\n",
+       "IqwHQhPTVq8niUmkFqvvKCExRUNOU+qnuJ4GK9IDXdfF1RPsg+4X0Jcteei5JIQfqwDx3bz5RFxY\n",
+       "9XDwleODhw9FROSIJTT7gKUop+5IaiNBiTtVOkuMfjy+hwCe3OvjKGefvO0EbTElhup5f82w6q5q\n",
+       "v/8T+jCvtPfwGlwQ0rTfKxK3pAKACefEKMkScgsJwU6VeQICQYgYfUDcv349p2+/+mre9uiR9t3V\n",
+       "2vtOD4+7h48f029RqED3zu5BHL2LiPVAH8rU77/Am68nqYMI1z/GLjUQW1n73ufOtNSxG9OaPoqt\n",
+       "KEiPf3PniOjztzqvxSs/qSOQrloc/ewhSdA1LH+j7VSTJ+UCYsN9w/OkydToPmqaVw2lvD34/d+m\n",
+       "iuZ15KFpRUsjo08ziX7eJHWFohwak9Nozx0UO1DbZMm7YzhPDdX3/dq9Y0QyRxahn7hQQf9tyLx0\n",
+       "jBbYRrI/pR1L2zAlYnkD1JdqSOYiBhbJnqUeUn7tMDFZb5TOiOrNu96ROYo3FhtGn1FEQMLV5t3J\n",
+       "52SemFHOvnr23CfkahYfpQKE7t0xyBEQqRAhQoQIESJEiA+M8CIVIkSIECFChAjxgfHRUntxvpJ8\n",
+       "RXAeiIrxQJpJgNvy1NMdAhjx1bX7NR1bhXuXRBQ3r52EUkuzT97MIaVUADy0bnauBZMipdQSFrtA\n",
+       "6imJyetsj9Tj4NB+BLLb8R5krFCxeeeNk0PRC2jGVA2prePudKT701qWgeF2kEFJsFX6BVI7qcPt\n",
+       "lqookU5gFXUTUe4nUodGCm6KHOJN0Hg9aXaYplHUsoowFGs53ZKaZgqTN/V7h1tt4440PoZJv/fT\n",
+       "v/7LedsnUDn/0eeuInx+rpo9qzNX5TbNmIhg5AHpq6xwYq2pu2+gtj1rUolIg7TX7uAk9kN9wDX4\n",
+       "PVmb3HxCWjggG49Hb8/t2QOcB30P6bhhxD1piew8aFqmq70Pb0vVtDqSin21RyqAjj9stX+mQiq+\n",
+       "UEwuM/baMqJog//TPnrzZOTUQoJ/2cNO98GK6ZdXeu6stm06MtwlJtPxAbOTj9UCl09I48e0wpqW\n",
+       "tJVwLgn56pm2TkYkUsGYzVnbykj+IKLHTM6HttM0MtkchF1KBXQ4lumeiYjcQQOuIxX3zPy/jkTU\n",
+       "xjmPlIJOQAYuTEWcFLuP0KJarz2NNPRGWPfx/PLZtyIi8slnP/TvTXouXGywWsG7kfTrYqSNLKOb\n",
+       "URo9XekYKxak2I/+1FPK0pSwp7eu95Q80FRxSt6Zs5MBF/mAPJw0+tuzs4fzZzdf/ExERF7XPv+v\n",
+       "kDK8rXxMJKm2Ewnbz04JKaW7jQ4y3fOJQ7oX97imwooCE29RsD6f7q+mjp3he+zrZlnmiDxWGxR7\n",
+       "5FS8U5g/nc0rBHdE7yk2yCzdRlpIJaotMkrLm95gR1SRGHOsFUeIiLSNpQX9uDkKXiLM3ff8KvF8\n",
+       "rqiIwdgb7Oea4pxyIpbbdfTkCWlzRtsSeR9/piiUSsnDtkMKtqG5q4In7ETtZBp8Wcp3Ra+noblr\n",
+       "wPmNI7kcNIFsHiJEiBAhQoQI8XcSH89rL84kzn0Vkk1KQCwi39Yc9S34bEWrFdRYJnTqB7zVD2Ts\n",
+       "VsCzL8n8t22jZbIxSJwxv60nIIeT/15T61tqHztyNGIlOJCDeFuZYjOV9YNsN5JPVQp1VAOzOlL4\n",
+       "rktdOvVEIraF+EgKr1alGlGpfQSUboq41BQr7TW7yus1bnANNaEVZsDUEom8xsp4IHXmFCuniZCG\n",
+       "ywv1zmqpJNxKsltaJaxB/M+IbH5RKEoTtUZwdXLshJXw4e0v5m0v3mhZ88W5yxR8+lTv9cXSV4nH\n",
+       "p0/138rv53KDEloiAPc1VKHRx0pmgoIcuj/4SreDN9Rm48c/nZQMm5MqvslfpNR3DfUi5QTpWxBv\n",
+       "M6A6RGqcTlitUWl2ghL2Vexq6/te0Yfjye+JqdgvyEOqg4fZSP6DhtwMKGFfrchDDqTskVTcE6zm\n",
+       "Y1a2x4qckaZrKLoz2XtR6HGXRPbvgQj2psRM43oC0tWRz5UhRxMdKzZldUbOQPwdCbpNcc4joa6Z\n",
+       "eeuZX2fCsgpAxAl9OKGPZ4QSmfL4mpAuQ6JLKp1vgWy05L93dakIT137Niu46Aa7h36+Bc73QLIS\n",
+       "NtYePfI+ef1Kiyaa2vt/Ac/IjMrfnaDsDbpZ6PUaWrC9cG88a4uS7mENonJ5RkgftsXkLDAc1Cc1\n",
+       "/cRRsqnSOZms6ySNcY/HdwnLxcLmBEeJl0D1rpZP5m1jr+NqGFmdGh6CVNCSA2GPJkIfrMrGCkEm\n",
+       "knVA07EnpBXUNIRS2rw3Ursa2jqS1EA06G+7I/Vd+M4OqY1/ul8jpCEIabEinrRkD01DjqnU36QT\n",
+       "SkcErWphQbIrw6R9kbqOHHbws4QS/TjQsxOZjknIFWE070zfxxpI5O6O/Qfh7ECkcPPVGzo/RjFD\n",
+       "UiiOIW/CFs+nW5I6qIA0JyWhdCsds4uC7ufwLko+z8G0cSJ07H0REKkQIUKECBEiRIgPjPAiFSJE\n",
+       "iBAhQoQI8YHx0VJ703gfYp2QbmAALQV8XhARbwTc+/qG4DlAtlNKKQAjxSWk7XJUuDdByiAhsrkR\n",
+       "YSOShzbTximiYyEF2JFp7BCZZgip4wKr7EjFtTWNHqT7EjrWAXciycmMFgT8LPHrN2J5RqrsdtiW\n",
+       "oOW3t3qsltJy50ZQBcS7PXeCaXdUbH21eDBvSwDZ1q0bj+5PCs9fnnkKKAVhMxkcHm6QbkiJxGhQ\n",
+       "aV4SUdPMYkGEzTJPT3QgND45c3j+LYxJp45J5HrcjEjs59BeWpMCtqWAEjbXRDrOpGvyFZl8ZqaF\n",
+       "RdA6bs/FklJrd9o+U+Ztbcj7FHEfB3ma0kLrBciuMFfdUQHAza2SbVcN6U6tQewlBfr1Uq+1J30k\n",
+       "657M6y9yM4hlbZ1ZLExEROrKYff2Tnfy4PGn87YEmjkxFYAcYEz7eOX6RG9vNLV0du5pIdOD2lCq\n",
+       "zLJ2XW/aamT8jG0ppeBNvymjIgIjow+kN2WZFy5sMBPaOGMj1xzXA3I+keiPHXTXKD1TQ229o1yE\n",
+       "pRs5jWy0gIjmGCPvswJ+a98jQnuPdGsH/aqY0sM9HBCYAjChnZrWt11cPcTxfb8liLox90mMyYhM\n",
+       "4BsYAi83SI+SwvQAUamG9psuNWXIxsdLqIxz8YCZFk+kLWbm0+PNV74t0xRQglTp3Xffzp8lmONO\n",
+       "ZFDcDtqfFunlvG271jRfMzgpvYUGExdATCjGychtIJ5N7ZHuZUPlWXfL+3CE8dxQG/aVGUR7CirH\n",
+       "fMq6XDHSgmNPmAbmM8swRUJaeJhDMkpFDXCHKEufEzLToGKD7Pi+GbaIk8Yj0uWaTBX/6H38bq9/\n",
+       "n63NZNvHlZlwn0iDsEehyEycF2/jy60fv+2gN0YaZCPakc3N89yexXqMmu6h1Qdd71kdXq91s6Z5\n",
+       "osRzN6aUHVKwDRUADaDSRFS8ERMN530REKkQIUKECBEiRIgPjI8nfyDDrD4sIpKkuvoY37OqHIhY\n",
+       "VkEBuWEPIaw04slXkwkUaCciCmZ4Iz8cFK1ZlkTYxpKcXuAlKez8iLCLsvox8nOKQI5MC18RCBTF\n",
+       "mWxnpMS+gmLugdRk4cmUl1SaaWxzWn7ERsAkryNjQK4XvnI9P//RvV2IuCfU2bm29URE0POHimo8\n",
+       "+eR35m05Sse/f+Grup998ee4Ll9V9pBsmIiobWgfKzZPQMcGKnXNoRpvfNLVkkqDUSY8kor2cdLz\n",
+       "zEhZOQU5c6Ky2gkNH5Pa7QBSfkzl3BFIwSsgWGnNCrZYBdHqJ8LaYyBOukC9vN575ykXQG4W/sUG\n",
+       "/ncRldUu0ccWha6mB6rX3oPY/PotqY2jK6Q5raBxk5e5I4yGeo40JsSKFojsb6vv3kq9iWFqZOOB\n",
+       "ybH47es3r+dtj5+oiva333k/SdFno8RX7gNWujGpMhtgY63OCs9GrOYS6uNez6+JGCWAdyZLXeCW\n",
+       "5URUbkwVeUGIGLrHXK7PxN7epCHoZgPNJKDXPfaIgFxjjJ+OVKgCX8c1+Yk2uN8xoV4Fimb2O5VQ\n",
+       "OCOpgwr7ZV+7ttJ5JKf5LAXqxmX1BqzGREA2cJZX34YEHDGGstKPH6EtlkuSS0j07y7yOcGGTLwk\n",
+       "qYMa/fjoThXJ5a/hi545uPnFX4uIowp3pLD98rUWfhzQNiIiTayE/SXd6yTVcyqIbJzkup+2JvV8\n",
+       "oChUzyNFcn9+3B/fHRMJIZdGjh5pXLWQ1o4J/TB0MqICDFMsSQmlHiABNKF4ybIgIiJDDHXwwvvk\n",
+       "Bs+6YkHOEhkyLOT2YW4YC3IPKIGspSQT0gBN2+ckZ2Ek78ZcMUhWCOeb0Di1jM2C7r/A65MLNWL8\n",
+       "5kjzXov5ZmQpJEMuJ3MH8P5/Aorftt4my4127GXhfX2NsZDQc6puraCKZH+A0i7YFYH9Jt8TAZEK\n",
+       "ESJEiBAhQoT4wAgvUiFChAgRIkSIEB8YH49sHnX3NGN6YOXjSIISEBfZU7qhAfExJthtu9LUQkr6\n",
+       "QDNkS6RwU1adoLFkxFERkRwqsnlKsDeg0JHyc/2oEKRBxyIiJfSRcoZRQQZOEocsOyjGDoDCMzI+\n",
+       "NSJuOzlhc5jhS0/jzGrLlNrcbPRYv/3p78/b/v4f/EMREbl65GTft9eajjEl+IJ0t5peUxBPLyll\n",
+       "Vujfx4OnliwFutt5ysLkqzpqzwXSbDFpcRyRMmhJWysBfG+K9sfaIft+NG0rMoi2FCxBy6aiHlEa\n",
+       "yxSAS9I7erXX69gQAb0ApF0WmtobSB+nGVSr5tS5OnMEE9aI2s6KAUrxth4PuNeUgqkTbbOCtY2Q\n",
+       "W8mwv9XGz22NftpRas2U1xcb0keBPtNI+SYjnk4Ej0tmitWUKkY6rEfKggsbjGTdUWrrWNm98H28\n",
+       "Rpav6byvPziDjhcpxV9eaAqmbciYHNdvGmxVxUr8SMUfyQwX51kTYTlvdJxmIxeboE8QsTyC2vHp\n",
+       "xKroMDI2Q21SsTbyek/tmiTv+R766e6WU0B6/D2Nk8VcvMCGu3ptTPaekHqxNvnmW9dRs1Qlf9/G\n",
+       "JKdMZhVtylRb2jImVwYjXsdEFC6Q+kzy+3pOIk7OP5G2V4l+XV65Anm7g7ZaTanFFdJ3nBc3E/DC\n",
+       "NaA2T3R///S//e9EROS7u+fzZzuYitdETq5QPMP9agQBu6Xxl5UYf6RVF01QdqdCpaS3VK2eb03F\n",
+       "HqYfxZSNfnYH8ONbejQiCkbSGVGdUpAodklpWyxmrqzzVU9zaAFaQpYT2bvEbwumtmBc05jo0Z8j\n",
+       "Mqh+8EgdIh5c/GjedrZSna8k+n/8eppvcO7lO9d1anUs0qGkR6HEkBHZPjeyPaU20RcHclzu4O4Q\n",
+       "kyvJ3zTubhsy+UZxRkqUjQL6eauMxjq6eEPzqYhpMPqWGFpyKdFSyjUVd70nAiIVIkSIECFChAjx\n",
+       "gfHREKlxHO6ReMfRCGZE7IUP0KIkBXSQUbuJV7X4l94LaxgFFaVvy6GAu8RqnVfQp16/n9CKPJvL\n",
+       "VWmla6u/iN909a17s/SVTpIoijSM3/t5JlhFmjp0Tl53uO6MSG8nkFNb9rASRbOa0d/0DX242FzN\n",
+       "285xLldrLwmey+/3+tvbOycMN1j17A8kZwul7JsbR6QqKBYfaFVv6uAJrdLXkDiwkm8RkQbtNNzz\n",
+       "dUKJOwjbXf3Kj1/Yufn1N1jBsxL0EcTTsnTkLo/1+Nu1r5Jf7n+p103eeQ/WT3G+2k7jSKRLkGJH\n",
+       "VrbHJY50/8uVnmjJJcz4sxocObEq8pFKohN4C0ZYYZdEGL240v1G4oTN0x3OnfZrBPgkplWTIZeE\n",
+       "Po0gdsb0PbsVJvsRdTwl9Lhmv9a3L7UtHj91BMGUirdn3nZjZOOElPJR6j+PAxHpTBIDkgiM9ORA\n",
+       "RNgHrMPqd0HecDUKUAZCScyLbpp4rQjULeWiBFwpfstFLIZITQQ/nOCTN5BfWQXZgwMRy89Q4s3I\n",
+       "WYTf5FTR0kNuosh8W4JS6+qE0vCF9+vtpyq7cdqRYj8IvR2RbTMUbbQtl5DnuC5CX4Dw3WsnoLgm\n",
+       "TZIT0mG+ci15gg6t9t0tFYUUNmcTSj2hKCQiNEVwPyXzdkq3KsHyg5/8RERE/tk/+V/nz7pc56y7\n",
+       "xsfwHZwHciq2sEdLQ4VCl0D68pKKh0ymgBw1UhTAjIDas8yJ8HP/ZJkO9BP2mquAyGSTX2uGtmbk\n",
+       "eH4+kVL5gKzMCagWeyjG+H5EfpkFZCq4AGU06R7K5kQoaGJNlLNz7U8TSdcsIWfx+OqTedvrN69w\n",
+       "Lu+qrRtgFpMkSwQ06UDZjDVQnWpkdMkmVJYkQNah4GIwIPxou6b2NhlafJ+ySWa7yUirOR9GhDQV\n",
+       "QAKTkeYYawtG2Oh63xcBkQoRIkSIECFChPjACC9SIUKECBEiRIgQHxgfLbV3PO4lJXXuHOzIvnEI\n",
+       "bYX0UEV6SzFIgVnKZF+9DDNgFRGpkUeZhDQzkDbZQCl42Dmc3IJEHjFkCbg/Ix2nFBBjzKlFaEaZ\n",
+       "KaOISApC+f7kquB3B01B5VA2jijFYNxpku6Y1dFrSuPEgCUjegfe4Tq+I72nT2EMGpXeTi/fKgR+\n",
+       "BJx+IpPbZ8+U0Pp89Wze1rfarl9+49tu76B6vHYYOcOfGUHWRlS+I6LwEQrxXGSQAW5vQfBckrZV\n",
+       "j+81RMpfT3qw06236+0CZqiFk72vVvq98zNXas+fK2FyJPJ0B6i6gLbP0Ht7GWF8JBXrEanKrvd7\n",
+       "kpVISx59v3GONBJda2wkYjJy3mPfMUxAV6R7VoBEulyTZk2kaYaIlHat7xaUMhpwjJb6yYB+Okae\n",
+       "qphwnyw9wWkEwVjoak9ZGQTe1X6eXWMkbiJnIlOSJJza07bbn7ztNmc6ZjZIC7298ZRNCny+qb1P\n",
+       "2N8bGms7KGqzYr2l3rqGilfkPmFVxLWnhjk94+1qBNeY0i6Npdkp3X+qYZBLyvLHkx5/dc+VAVo8\n",
+       "pEtkmjasgdYhfT1AvbzYOGXANHbihNMoZrLq13XCPmJqEyN59w0Z5GIOyih9V2y0bVcYQyOZoUdg\n",
+       "5S4pjW50jOrVi3lbudb7GZdUUIQ0X0ZK7RPoE9FElAIQkE0+aXPplIVffvczERE50Fx/RHruEPm9\n",
+       "22NO/OHnvzZvW6EYKF158cpdD9PkiTXYNLUVoY3znByVMYcN9JxokYJryLS+xbhuKAXbNXBxIKL2\n",
+       "Gs+niDTARhDkm6ONV9+vOXWs15TGFN3W0LMzEksLkt6faJrtRGP3xSud9x9duFNDC43ArqdUNQyp\n",
+       "TyB59zQnF9Cxak9E9obafEMFACeksUdKI6eJ6TeStt/8nPCw7LqJndcVz8mYazMuNsGvqSjN+OzF\n",
+       "ggyXbdDQmLA5sKVnwTAxQf3dCIhUiBAhQoQIESLEB8ZHQ6T6rpvLIUVEsnlRRVIDIErGKb99wsOO\n",
+       "VrUdVE9PtRPbaqzcGvLriUHUNgXyReEr83qvq5SOymXT3Ei8RE4FmnG8R0oFmvWIvdb0TTjLSe12\n",
+       "gidbBCVwWoWOLc6NiMC2EE4TX/2lIKqXRKKzt+U//8ufzdu+faYlw+XaV1OGIm23WoYutNK83r3U\n",
+       "c7qmVRWUut/ckfwA/BEjWv6ul2vszttuAnITs9edveGTKnxjnmFAAs7PSJoAqGNE9+nUgkRLvmbX\n",
+       "13ru5cqv9dFG23NLyMXVRlear49eANCXUA8GwjNMvqodRiM4koo9uufIZGMQ31NCGHsgFjGhBAlW\n",
+       "SQWtvgwAOgEtGCuqq55XVVQujnLphKQeRlsRU0l4gpVYSmTzQ6vjIyU0o5gRVpMf8WNZAUhDfpHL\n",
+       "taIjd7eOHGW4P4c7WsFe6X52jfcdI9ve7n2cbkEKP2EbS5IYYbypqdgC18Xos7U7e6gZcsv+e/Y3\n",
+       "+28aGdb2wSrmhlb1hEga8XxP12DkbZZkMdmHs8de7JBjTpioeMTmm+2SEB4UdJQgT0/s9Yh5jRXb\n",
+       "D3eKdPd0rXOhDFs14HqSmBWb56POf1UoxlgusA9SjI9xvh39skffWTIhFwjrlLqciHkmjqSAPcGn\n",
+       "MCY0Zzxq237/Quew08nb/3CA6jah6TWcEhg1KDNFvR6uHWlZAP3rR0ezk1T7bJJQQUmtnxvJnL0x\n",
+       "M/QrllqY0HZtT/cV8xr7uR6PILaT/EBicgoNIVL2jENhQ0soveE0q5LkZ6yIhJRO2kE/H1hOR3Rb\n",
+       "RwUQ377QftpT4ZU97+rW+/jsiZqahAO7kqD/01ka6p4MrPYOAj65HYy4ZzxPFmif5N48CT8/qNzz\n",
+       "nGSALF+/qZ6klE2aYvM6JekczLsDFQBV8FasGv/tsiTE8j0REKkQIUKECBEiRIgPjPAiFSJEiBAh\n",
+       "QoQI8YHx0VJ7m2wjWepw2uwjyoTJRuHGlki8qyWIigQZHluQPUkxtzpp6mckCDZHms2A/YnUVGOQ\n",
+       "joeWNEYABUfMvzW9IyL29YMe4+0bN+O8utDvjQOrHSv0OwGLnITJsWYyS4aaohBkTFB4itRaWjg8\n",
+       "WsIssyai7F9/8YXu957elO77937/t/Ucz87nz/JMUzb7xpXFExh+lhUpccNlMyfIugGkXRakAJzr\n",
+       "5ylphiygWDsRBJuYGjwg3ib2az2Hsnofs2IwUjZHv9YTIOiKtG36XlMqq9yJrZebRyIi8vL2q3lb\n",
+       "VWka43hEepBMLuNe27gniDcGBp3TGiReKMl9TZpdHdIczcn7xCozXSS/79aN6pP24ZujQ/GuHszt\n",
+       "r40X07YJbTeRBpPprNWkbZUj3co+xvabGCmGmNKzg+lH0Q0bUij7Z56KurvVIoYtEfsjpNn2B09B\n",
+       "GGF0JEL7HkRxM0Pu6fgn/PbmxlMxZ2dn+B6b7Op5xpQKeJ8CuaX+Okr32+eWxuWUoX2f033v22bK\n",
+       "4i0bqc8q8kSsRd/paDznSDNklJcYQLLPoMXD6VZBMQIbulpa8u7ujX8N939z5gUYg/VnSu0skA5f\n",
+       "rzzdaRkqI73nRDfoMWfXpA5dgCrRE9l+lswb6BGzWdgJ+zakgCLSAHv2zbf6L/ShaLqQrsI8Tamd\n",
+       "HGrbLE/0g6c/EBGRVUH6XGjPqiYjYyveiTy1JZHOJ9YnitLn2gjPE+5XlvrtqV+ZGnzNKUA8Y6KR\n",
+       "2gnz/rRk/USbO0A7yDk9rfeu6sltYrBz9+81vY6rnCggtt+u8TF5vdffrrNP520b0CI2Wyf5n07Q\n",
+       "+0ug47YkzbpY592c0mhdCx2pvbtCxOZ2Qdpa5mwh0XsoHUTKH0BBqGoUZ9C9XiAFvaHsW77QL3RU\n",
+       "qGXUioq1zWz+bYiWgXuXRETp4ReT90RApEKECBEiRIgQIT4wPhoitUjjewuTbPZ18je/HUiXGZVL\n",
+       "Xm4VVZjWj+dtP//6T0VE5HTy1bytTk0GQUTkBYh1l1t9de1ppWd8VvbhMbXjqSRICp+3NZXJgwB3\n",
+       "uyACbqyrr6EjtVUgEWkB5dqTrww6rP6TiUqDeyVqJoQ+GBExWdDrN5pssfRtT57oCuPVS1cKtzLx\n",
+       "AaX7cU8efiBPW8mtiMgCqNcZEcDt+gu6Jx1WdR3JH8SR7jsjZXnjWCakCmxKAEOC689JYRYrmJTI\n",
+       "gXkJUnhENFmoQp9ab887rLoeJ96eZyslnue0xq1bRYwqrEiyxEvNF1jNbkpfrVnZb7r0c1qsFdmL\n",
+       "SYF/D1X0W0LJYlxsRirWx04/byANcEvl/6ZOnK/8vn5yqdfAqM4R++1bX1UXQESWtJoW3J9TTT5t\n",
+       "MxJgiBAhLebDRr6GtRV5ZIQcbrXNImKMvvxeUYWLSydb30CBPiPk6Hi0wgv9/3Lp53t9ravZntAK\n",
+       "K6euW78GQ1oz9v+q0McJETJ06PbOES6TDoisiISkCQxV4n0cDkZOJgRpbjNfka+A8Ew0x0xAuIuS\n",
+       "ikxmknVC39O/DXRnIm4DL0ouwLD7k9HYaTAntSS/UsD3LKOJt0ZJekrnvlhoP+7Q7iUVu0hmKC0p\n",
+       "m6NQ43zpxPJZ4iTi67+/DxGRBNfa3zrCcoLExdtXeu7ffvutXwPGWNvtaJve//NzR9g3kG6YxOez\n",
+       "5+hPh95R9yUAuyIliZ1Mx9HYg/RMCHI6mK8nzXVArpKRS+iBPpEqvynvRwSdjfDfiwhhjVAgEsNt\n",
+       "IMsYfURRCpHj68HI5uQrWKCvJT53xINeTx+xny18X4k83mMuXFHxzvocaDqkUBYrKsoCAb2pCEHq\n",
+       "4KHX+fGPexDwY/otiqcmgpjsanuaJ1sgSzbt8/hbLvW3OT1rGoznmFTM6yPalZ4dnTk5MEoG8nyZ\n",
+       "+8Arsl/9qhQQqRAhQoQIESJEiA+Mj4ZItXU7l5KKyLwk5dVfhDz/duUrHcuvjsIyAbr66HvPx67h\n",
+       "NSXMuUDZsQlYslhXB6GxjDy8THSP/f+s1tLKQEVEYogUNpR7PVVYOdJKM0KJawzhuAWV1Q44/kAc\n",
+       "DTtGQofvsFrrMl+RF+UC+/X2XAHF+MEPfzhve/1ay4mvX6qA5QWhWisI7B1oBTNNJhxKAn54m6fU\n",
+       "s/RYdWUpuXon9i+/q6Pt6IIGrNxqIGIH8nWrQU6LEkKw0D964vmUkNFo6LcVRBIrKvU3EG29duTo\n",
+       "tn+Nc7LVuver883FvXMUEUm3emHcJ7bn2j/jlL2+dDW9I+TOvA5l4nJ+vY+jQOhz8PtqonLx4Kv1\n",
+       "Ev2JS3gbeHztWLgWvJVi6cjFZgnkrPVh38HXy1CKYuFtM5p0AZEEQQeUgjhyVn7/yQ9+Mm+7g6zA\n",
+       "kby2BsC+y62jfoY6nYGvxyvTN28g/kr8neNRPz+/cO6P+cmtCBG5vdVz2m5d/qJ9D0fK5AzMY+90\n",
+       "clRvsdDjpum7CBrPU5MRLWilm4MvyD6Zo+hvnzx6NG+rgZyNwsitjsUNiKPHO0dQIvRFEsmY78+a\n",
+       "vD7tjBm5NDFjFpg0wdiI5BRaQ04xrzD6nUGmoSD0uW3AKaJ7N/MlafwJtk2EcE7ggb589Xze9tX3\n",
+       "Kk9ihzh1ErRs3AAAIABJREFUJE0A6RhGH8xj7fLKESnzXb07OHL1+qUKho781MOYjVd+PUlux4P4\n",
+       "Ld2bCXPY9B7h5mTkZwI+I6THxH5ZkDOHYCv3sdkTDvNuTPvogLB1hMjOnMvO2zrGnJySIG4JFL2v\n",
+       "fTw1xquKfEJfrrQdR6GxsMJcBOHWY+33y+a9YaTnGZDInLlf4AGaN56IzNzYiJ5dBc75QOLE9j0z\n",
+       "O01T2i+yWQO9T6Q2rmmetmTXROO0yIyHTM9p8x8lj800/dWYU0CkQoQIESJEiBAhPjDCi1SIECFC\n",
+       "hAgRIsQHxkdL7VXHXiIqq05R4tmSD5kR9e52XkJ+ttGy1mGkknj4aa1IEmC70t9ylea0Q+kyoL1C\n",
+       "iEy3Vcg6Kfyc1ufbd4+F0uEDnZOVnzeDE4V789Ob/BgpIO3WiLCEek+AfZvKocgs1eMOAwP5KLV+\n",
+       "j7IzqTnMqrx57g3w9ClUfkFw7ghiXeL7Ty48Ffj9NSD2ltXZ9VzWpZfG5rParRNQy7Xek1IoVQSS\n",
+       "5V3rqYp4/r7+1YlDzHb/WYk6w/0ZDgR3I99EmT15eY3SaUqtWYZuw2rTnaUx8J3E72uMNEKSecpo\n",
+       "JmVTqXcHuLmr/bpeIVVxIvKyZSMyKsnOc6j3w2Rxe+FplAjpvnVBREjISrCHVhKjPxGJtYXXW0Tp\n",
+       "hgXStgkR5Xsj+aM/URZVBqQRUkqBjyBgjpTaWCMFZV5aIq4EfCSyc29q8BOnOyD7ca+Pa7zBPfyN\n",
+       "y1/368J5FjTWl4ml1qnUHP20JlV0S8cxodxSKkYi5/R4hXHKaZcWKZX+PWr3/D0joEcjuTJYQu6e\n",
+       "1xhU2Ym8PGcK8dnm3NOY5nGYcMoax1hQUUIOqY2ey7ZxDC7TN5Xxs0tPC55fqIxFDmJ3RuRwS6Oy\n",
+       "rMMC93qiVMg8uRF9QZCyiSjdEiP1mFC6/X/+F38oIiIvbn+uuyjYRQCyGpSC/eFnmoq6f5+0/V+/\n",
+       "dkmI2AqEKAVvU1EVed/NURRQ5iaNQeXymM/vqeijL5QJl9DbPEH+o0hHFUQet2KQkojVlRVIYeyO\n",
+       "VGxUIgU79b6PCgVAZ+QiEU3of1TYYfdnRW4PG3zctz4mxljpDkXu3xvxrB6s7xLd4cVznTMToQII\n",
+       "pPkWdPtz/Lanc2pB3i+pnyTWP2ic2IOin+V/qP+je/SUxu5xD0eq1LBiDKagLDCfDkSpOGHuTAeS\n",
+       "/fCP3xsBkQoRIkSIECFChPjA+GiI1Klt771wlid7W/XVhwlMtiRhsFkqUfNEK82h1rfpIvffFlgx\n",
+       "ZVRqnFf6hmk+QBkRoS9ArFye+eqrQFn7RESz/Q5u4USsOx0ViYqorLODxxEjBx1KVpPZ84p8gFDC\n",
+       "ShaCkiRwcCdivZVhloQ0LbCq2ZGY436v57JZO5pi5eF5oWjSwAhGb35J/raejbrfQ0XebHApTxe+\n",
+       "gu1A2G6I7F9O5mtGhD0UBaSRv97XzRvsF+dGKwjzE4tYfHKEI33rDXXs4FZOq6RX8OKLqPw5RTsl\n",
+       "GZHy14pOZSXc0sXjdMT1TI40CSQZOhJa3R0hoEdl+kc4vWcssDqbJ/r3SojYXUZ6bjfUJzqgZX3s\n",
+       "fbLDynBROKpUNvBfWzgisztq39mTJMIZdr0g0dF81P0kuPKOVtBLoARd5fuNQMqfiGycoj37zttk\n",
+       "UepqtmdJDKAkDckvmBfiEWO9WJMwJFaYvA9DAu6hH+8Jk1Fh5MBQFJMBEfGSedtf17MkCsjx5INn\n",
+       "QqCMCCezXAGVWgPNulpTUQyUUBvylevRTwsapxH203R6/OboaMmFkezp+o08nhGxOIVg6oK1L9E/\n",
+       "CRCZkUAmql8+UrmPxZkKzCapX38LX8mJkM4M6GBOSONoJHZqu8kKJDpa/b/VuWN38G0Xl3rcn36t\n",
+       "447L+nuMCS72KJF2YHLwCf2pP1J/nkvt/Xt7lOSPMVWvGKE90+MniQvttjPSQ0hHZpdFIs0G3BAg\n",
+       "aDItY0pjB/cn7QjhwscxhDtzknDJgb63nc91pfnAHVnWRD+vqD83EJuelo4SZ4X2u5Q6RYWHEBdK\n",
+       "Zch6RECd+tYzEoKx/ua5e5gWOQo1mICPuTMn9G3CfJeSEHNi3o00x6XIClWYT1tq2BHoY80+uQug\n",
+       "tAu/hgKZgFRITBXSHfXR+25z0JvC4twsY/S+CIhUiBAhQoQIESLEB0Z4kQoRIkSIECFChPjA+HjK\n",
+       "5uVK2sEh65ujapEQh1FSkNL6yCG27579QkTu++pN0KyIB4JnRyNAkgYSCNURfpuSnkcKLZaSdiEg\n",
+       "4uZEdh5KhUJ78p/rUlOM9pTBbqfXdk/FODJlVxBMKWUlSIENBFn2ILayh535OsXka2QKzGfknffs\n",
+       "2Td6bpRavDhXONbIdhWRTmMoJUcNea3hXAZKrSQgzMf3CIPwQSIPJ+tZWekp2AypxDjmcwehGW2Y\n",
+       "khJwBL2tgqBwg6oXCevz6HF3RFiuoXz8feMFAGukhS/OnGyamKbOYLCzt/XmQlMcX3351/O2vNDU\n",
+       "Rk3pqR20ahpi+5uH4DZxYvsqW+HcyNcr1z62XCHt3Hm77vfQVmNhfeioTR2ltuA1tr4gEvtG2+nQ\n",
+       "err1pkNaOnUSqRHJC8hos2JwnJj/JOkuYdgtSxoTjW7cnnsKooOOWknETksHndhPEORuI1Qfbj2N\n",
+       "akLdTOw2AvhI4797jwZdi76dEom1QTqO032mKWXHYADf/NSYxG46UyMpUbfmNkDXaqm3nP0nYWJX\n",
+       "kaNBhrFQ3Bvjetzr1+r/uKLP6oP255wI4BM8wSIiWycg47IqtnFxo/foza03ntqLckuB6z2+u/VU\n",
+       "UAvidUE6ZqZZlC1IRd9oE6RZZX9PDal4o0/88U//wo+B1Od2o/PV82tXNq/32nafPHFfR1Pljigx\n",
+       "f4B+WZF7yrTAHMspoAG0kXpPjg4rpO9t/i19vCaFmbOR12MKWgBRQBr0iY5TdujQE+EXGeaMlvwP\n",
+       "Y/SdMjHdPZ9rlih8ySgVmYmmT3PyNTzuTAmc0l055l8aT5bGbEirbgvdsGbrc+w53EBS6OxdUXHA\n",
+       "Fqnoc+r/b15pP12tPbVbnulcyBp4Juk0UTv18FG93vs5dUe9ninR628bmkNQlFWsqP+DbsBSlT1S\n",
+       "rwOR0idoO1b+mBA5WJ/18xxYS/I9ERCpECFChAgRIkSID4yPhkg9vLqSl3smkcIbihzUzacqIr8m\n",
+       "83ViVrD56dRE4rzBW22aMgEUzummcEyE9R5v02/35OEl+nme+dt3BLfqLGdfJbyJd6SifAB5nlCf\n",
+       "DKTNMW7u/05cPTtb+PnOKsYDSR0AMSnp+CVWn0nmiNTpUt/Yb8h9+/T8OxEReXCh5MmCyHzXx1/i\n",
+       "RHy/Dx+qn+FycvTh/Ey3PXr0+byt+VZXDt++crLx6xe6n/3K5QQerfS4I3kXDpm22QlkvpLK6hdQ\n",
+       "7G3I1+046HUVTLaFO/0ipvuJVVdLxPa70w0+82OsoEb/8EKva7166te6ULmIz//e35+3/dnP/0cR\n",
+       "Ebm9++m8rTpCWZeQ09jKdIlYnkdW6u5tfIIkQITvbUpHH8zj60hkY1OCrlsqv0f/zxa+bYE10kBq\n",
+       "xxVWcQ2hGRnQ1ghoTkqIXH3a4xpopWdEaCo/L6HO3JCKdIxScFoQS5TruS+JAZ2ASLxeQDGZ+mth\n",
+       "XmOkhP3qoN6RPSFCIxBT9tozGQMjfes2FGqQT50pms9zApHI7e+JS9htNUuohhWNjOTTuVqj3Wme\n",
+       "yjDeaDqTGMdl1M3QLiPHtxVJaACROHXsF6jjviRibZwaIkWEXbR7S3IuE64npeIV8z2tgJJ0VPt9\n",
+       "rPT+FGfudTrLY8SEPs7HpUIRQ1NrLtRAnyQC+revv8CO9bem9C4iUgKJvCRfPQOCbgnNrE6Q84ip\n",
+       "eAFtUg8+T0RAicaOfAorQxPxvCjYfxHnQRImNX46EfyRIcPREgHdkLuCUNIYt53V00d0kAX680jj\n",
+       "b8T8nJIkSopjMKozVkBTSaZluYVMyZ7OHQUNJ/JYvcU8+snnTigfR507r/Ds2JCESmEekrEj/U8f\n",
+       "KJq/Pnsyb8vXevyYJoUK3n1v33h/3mNOuCxJqR5zx3dvdU6ayFdyvYUnIjlm2KvA/kiaOB3OmQvF\n",
+       "7jDWKBOwxRy7IJkMlrF4XwREKkSIECFChAgR4gMjvEiFCBEiRIgQIUJ8YHy01F6ZrCXJyeQXCsgR\n",
+       "ayuBUHxPMBckxigiFVf8O5CR4xFpuzhyGK8GjB9Ds6ii9MjrnUKAce/vlgvA7cXS92taLT0R2ydA\n",
+       "5RMr9qJpY07fAXrscUoxQ8HQ1lguCeIHFN4TOTPNYW7bOYxdLADBDn6tnz7W1Nvp4CmYA3Sevv3u\n",
+       "KxERuTh3fZSqU8i0JNPmq05TWz/6/PfnbUuQ7S+3brz6dvkS1+wpoxPg8Rc7v8dtoZpR2wuH2/Mt\n",
+       "9E5Oeq0dE7aRCiGBWWmndwmDqxxkSyIHxr22zxRzWlR/U9ekYgwy6t2Nblvkfg9/9Fiv8dMHbsb7\n",
+       "2af/qoiI/Df/0380b9vt/oVeA8HjAui/yv1+rjHaoolMg0F2tBTwGY3I7UbTniONiRZaMc3Jj5Wj\n",
+       "T56RYnEBsmVH2kInKDt3ZKQ7QasrR0pxIh0z6+sRkbj3O+1PZ2ek9o5xcqq9rxkZOSFS7AKFFz0N\n",
+       "6C1IrCv0f9a4GjF2Ot4vDKrvyMg3Q6quJtNwazJOma1XmqLn4okeaXMr6Lh+66noCJLlnMackMaI\n",
+       "ycjZCkCYgG+s/HtkV5hLn5NW1v5OUyZH0rE5O4N+V21kYzZIBgWAzLVtBhxIsyePIvpk/jF+S6rQ\n",
+       "jSmVU7sjldejn+Y5zXXQTOOU6cbSbJRanUAyjnrSEcIpV42nyv7oL/9YRES+ev5X87brg96DBmT/\n",
+       "hB5TT3+gc125eLew4PkLv3enBqbdOVElQK3g/md/ppQ+bpF6NCpIyk4EcG9I2W0htaIo72tZZFqF\n",
+       "5B6B9GE58vGtHxGmkVi6H+lxUkwfoEWW0VSTwtKB6dArpMozKtQZsR/TZxQR2e/1V9WBDOcz9N3M\n",
+       "0+yXmEejWK+/bz3dukn1OfL04Wd+AnCVL7b+TMgX9owjc/dJ/7668qKc5y80ff/Vl65LNSCV/vAh\n",
+       "dKRICyqHufjQULHRXueH/S0VheCdYOq9PbuD7ueqpPPE+0FGKdWG5pb3RUCkQoQIESJEiBAhPjA+\n",
+       "GiI1Tanksa9qu8yQJkd/RtG34C72FYxxEkvSKbAV21QTYa/XS6urnr6nb51tpW+pQ+RvsHcoFy32\n",
+       "5MOzhayAW13JcoVSfy5/xaoiolLjFiXhCREwE/gjtfCLSgj9MDWBBZVw2nKpSahcGES9fOP7PdWq\n",
+       "DrwkbyTBar4gEikqh2XAKvH1q9fzZ8a/zogc/ZNPQeIbffX1+RNFZyJa1a6gbMvl30sQX5+98NX8\n",
+       "Hqv/HxJv79McKutYuR2prDXGKmETe6lzDiSgP/hqaQDquFh6UcABdbUxefdlRsBOfFvfQyYDq49v\n",
+       "vvlq/uzxwx+LiMiPf/Sb87Ztoqvvf/QP/s1526nWlfDPv/7lvM3Uuw/k03UB0mpBxQtxc8J16woq\n",
+       "Kh1BO8f3stjvYYsxIROhBA3GDpV69yjUaKnYIQeKkRGhfAJR/AS/trRjErXek92tlyGbsjZ7g/WG\n",
+       "cDGajL57tvVzGowoSxIXKxy/r/S+LwhBtIX76fBi3rYE8bgjAvZgEgaMyAARa6h4pUR/YvJ0OxPK\n",
+       "7bekjg2ZAlZ9ntEfIrub711KqE4KNIcq4mcS8atX3/k54fpbUlHO0nPsV4+bE6oRRabYzlIbpixP\n",
+       "ytYoiWdJCPvbfBBFRHp4nHbUdgX2Z8UOTJguiw3Ow49vxP6BJFFim/dSQnqOyDokvvo/AFl98ebl\n",
+       "vG3APZuAQnz6xCfgYqGfRdT/3r7Q/b668bmjb7SPn5+7rEOawjuP8AMjiGcpqV1DNbyv9bhclJSu\n",
+       "rJ28ra2gJCGkK0PbXZFTxniyZwKNE6CZ9CiSFN6lMZTiR/KkjSNtz5T68IQ+XJKx3YR5P4v8Wm9A\n",
+       "vK4qmoDvgPpTAUIb6d8XhOYlqT4DGjw7jhXJKuB8X5GEwqLUdjfEVUQkOYw4T+8TSzw7SiooePJI\n",
+       "/V5XC0epvvxOCxBevNW54JycNQr0ses3fvy4Rb++Tel7yFyQU0SE6WlD3n3m6JBSHyvoefe+CIhU\n",
+       "iBAhQoQIESLEB0Z4kQoRIkSIECFChPjA+Gipvao6SEOkz6k3Q12H/UYjcRM511JbCb0DlrlC1Xni\n",
+       "aQSDo02fSkTEfCZTKJZPox/rAPi+opRBDciUSZzjbIxL6RExxVqH/yIoj9eNQ8YFUnQl0kg1K0bP\n",
+       "VEHaBig+LyllAG2VKfaU2anRFF0Wu7ZLV+n+jBwuIrJdKVT9plPS94mMQkcwuhsi8b18/UxERB6e\n",
+       "ubZSAnPdgUi8p5NC6qwYblodt28ojXILzR7S+0qRUiqhQfK68XvSv9V7sfjs03nbCmmJ252Tja0f\n",
+       "TbWf0zLX9EixcFK8pYP3x1fzNpP+qWDM25Ky+x//+f+ux889FfejH2pqk81IHz/8dRERuTu9mbe9\n",
+       "eq5poYYKELpJj5FRf1pAR+sGaSRT0xYR2SEX2xKN1I47kUFsDF2ahk5qOtkXWccHqY2CjHmhtt2B\n",
+       "vF+y6u9J79d26zC6EapjIRLrnNLxe22K2VnqMLqlAHjaSZCqGpDSX0TeNyxVxMUBloKKWAEfx2ju\n",
+       "FXuQejyiOml/YmVzS/1Zqm4cvW1apMp4XJvGU89KxyiA6Oi3C+j8tK2nm1rc457SMhXmuMtzT2Mc\n",
+       "oMe12ep4rXaexrYClZbU1kekwEgyaFbKTogwa/pQXBSzBW9hmHiOwzGg2VSxFpcp4ZOOj2U5E0oj\n",
+       "RRhYw5L0jrDf5898/KVIy68S0oXqdd7ZbmHQTan4FGnuA1E29uinw0Sk7N7up/f/weZpSkumiaZ7\n",
+       "zCBeRCRHmmdE8U5DxT7xTLPg+29EbL9W49hHlDIsVlCMJ21Dmwu439mjze7ISIR9S5/XlIK+WOtY\n",
+       "IwbArPZ+oJSxOWkMZELfoZ1WW0qLX+h8tzmj5xkKiZrazOj9uXLC3D2NPtZ3B22T650XAEx4Fl89\n",
+       "cArK06dQSr/yZ4c5dawX/r0ff/ZbIiLyAPpU0tPzF4Vfj0iD8fNzpEd/OG+S1VLnroTmjgKG2wUV\n",
+       "dOQomqC6J+mHTv7T/+wP5f8rAiIVIkSIECFChAjxgfHREKnD8VY6Klc/QEU5ppVJD3mCw9FXJBPI\n",
+       "w2vxN+LRSNu8IsFqksl2PRh9qZV1E8NvXVi5MKloo1y5IWJdhe/l5EmW4jpS8pqqjj3O148xiPlq\n",
+       "GVpG3ky1fjaxECvI20uSiYhLlISz/x0Ium9vv/ZzGrc4vrfJYqVv5OlOjzuSivkJZaBMdv/6G/Xr\n",
+       "e/zoR/O2N7eKfkVErH2zUwJgQeXfOa7t4ZXfpxuUc3PpcIzV/AZoRk5+eYejlRD7PlalrqCb3FGS\n",
+       "Qw2169HP3XwCDa0UEVmmKH/PvJErIJwNZAV6WoU8f/FcRET++//tn8zbfv2Zok+blV+DoaQmOSEi\n",
+       "crfTtstIAdtQDJbzWC+VbH+Bz5KYSKQoYpionwzoz6s19WusEtvJV7XW/4VI0ZLpMWhBLBMQwQXa\n",
+       "LiZ29AryCyTYLgWkBu6uHVXYQMKgIfmREn5dJXmtVbWiiAlJByRAzOyoERHhBxD2t2ufJ56/1P53\n",
+       "Ufh+7RIbUqw/AtU5O/N+cn0DQj8hElbWPAB9qCpGsvRGdUTAN3I6exJGuHdcpm6K7exJN2UmseDk\n",
+       "+RL7GTc+73VWqLKGYjn5mvWdzn+s4m7edKRSIEeUf7MqfQGZkIGKV6qT7u/80qVQUty7an/CdXl7\n",
+       "5UALuKw/ASLISE8EyYCEUPcaY+yvvv1y3vbFs5+LiEgzeH968FjvWQwSd0RZheag9+uGJCk63KeU\n",
+       "SMxxBKSbCNhzzUBLYwcIfL4g9wqkLjKgbi1BE/GAAgBSvS7RPwf2Wuyh4k2SCBXuSV/7/LM803uS\n",
+       "FexyAUV19I2YCPv5UsfkYslzIvwqF07YzlF4VJPa/u2djonuEcmf4NmVL3zsREBTC8qEZJAdERDf\n",
+       "MypAWhcoQCFJCkN9f/zUJzubkiZC82I8x6Le2/Mcsjyb3I9h3qYdELaBCPgZ1OvrE811KOzKcyKR\n",
+       "4/lcUjulkfm/MqaEvxkl5EnwPREQqRAhQoQIESJEiA+M8CIVIkSIECFChAjxgfHRUnvN0MhIxOZx\n",
+       "UiLo7Y1DwQNMGNuKzWhBFCdtk2Ewc2NKgQDl65nQDQ2OEe+PMaUCctPsIM2Y2lJFlEarDtD9IMKu\n",
+       "GYMyOrg0qJggyBwpyHSp+7ugtNve9GwGvyU30B1pWkojACnlY5ka/FA72blA/maMHDJtOyMKArql\n",
+       "tFeXK8QfC0P2+vfPv/6LedvWCHuUWjmeQHbPnKifJgrPr8igU6DoPJIC+w46Mudr6A7Ffk/uYDJ5\n",
+       "tf1k3vboqarn9nvXJznt9fjVyXVE7B6vF56ysAKEInMSY5yADIxu0lC/WiBVcLP3VMw/+0NNRTy+\n",
+       "8pTR00/0GFnikPH5JTRobj1lESHdEFNubQTZdLvS62p7V/NNkGamLJ4YT5T7/26qcc2kAbUAKZvU\n",
+       "+xu0bVVRuhnnVKTaJgOlEUdLRVN6yMZiRvf1LdTAWbNnAbXhOzKSLaCyzGrfBr2niaa9WEXdSLa3\n",
+       "JydsG2G9Ovp+O4yjhooNeozZuiUdNaSqlgsyLUY60s63JdPiHmlZTuMZKZ1TASkMxB9eenquNOY3\n",
+       "McBbkJYvL5xY3kNFuyVCfQmV+aG3/kLGw7gXCZmo1iCWcwFCj6KFA7W/mSDn525Gu4L57Ehm4Q3a\n",
+       "wI51Ovm4trZIU+/rVtATZ6RYbbvb+/Fffa99+0tQBkREvvjuF/rbwsdulmM+A8/hVPl17a/1XBIy\n",
+       "9y5jaGaxOjUoCo8fuGnuprT5Z94kC6Qxt1c+J9ipm6HzQMUJV0gVrxaeWisxdgpSm8+glXWiFOQt\n",
+       "5oJTRc4KUNRflt6eR7R3jYIC1gJcQBV8s/FU5AJzcZpyAQa05Zg+grFbU0GNqdgnCVEgQDxfrihX\n",
+       "DEV909hik2XTSiwojW/nxKbdI+az4V7xDGguRLMp0XbLzMfpBsVb5l7SUsoywfO3/H/Ze5NeWbY0\n",
+       "S2hb772f9vavy/cyX2RkZKeqbKBKwKDEEBjBBAkhZgxAjKj6AylAAjFgyqCEBFJJSAUTJBKJSWYV\n",
+       "JFCRVURGRrx4/bv9Ofc03rubmRuDvZZ9y697vigdCa4S7W9yzzV3N9t7297b7Fvf+tZ3bGPYxW8j\n",
+       "fcNBk7XgOYnnusZJvYik8ZogcMgCIhUsWLBgwYIFC3ZHe2eIVN1sXS7KqRlq3C3XRiy/ufF/b5bm\n",
+       "JUZ4M44q8RxBwFQ6GN8llSi5RXeZLuk29maa4820ye3d8qi/n+qdgDyYCiucKsqs2+Wcc1soTw96\n",
+       "Uv8MRPF+H3WgBP0pUBsuWatcgO/XVNJVNxvWepK0drRJU5KXQIm2saF5TAWm5zIcC2EaBNSmlhRu\n",
+       "pC43kXkwn337U38uId9VlUckclHlrknik1qDXZIHhRRdAm2bbtC2wryAU3iQ985M/uDRiSd0X3at\n",
+       "NlcKT7wn40nViVxqIq5Bdh9Iv5tqibb5/s+lhp3DGI8H5hFWS4+EffP1Z+2xOPWI2cmR1Zo6GXiv\n",
+       "6mqhqtggtq8snT0/8WO8rb2HWfRNxTmJZ2ijqJMD4ahqmydj1IbSWncrrJlKLh9DRmQ2k9qNQCK6\n",
+       "Pd/H4755uumWhE3r/wryIJEgIkzJPz01qYlvvvkSn9n1j3pjtN1+m6EWX5aBCNqza5VYn6kgDeCc\n",
+       "upupoSQLHFzObUzosCZC4l3OsRa21sflhmiOnzDbRlFyzEVpb6OZArCTYyQMDASRAaH56soQmS4Q\n",
+       "2W5qaMZ8yiQDScmPeA7f3k7H9pAUSMN6bigdf6s1FOlpb0RtenLl50e/a+1kzcp+z7z56ymSbOrd\n",
+       "5BznjOSeCEpGmDzqS/1FoPmLG0NYv3vmkzcSIeB/eM9XD1hFE/ktEm8w/2ZbQ3DGD0BiFxIx75PK\n",
+       "2TB1/mRsKCFRlL5UQBgioWLQt/7H+N62rU0oexiSLRIh+xdd//dQCOCsU6mVBV5f+HlyO721dmLO\n",
+       "FEKs5hQjMpWL6vZoONhpo3PO1YisNILqJCCHpxKlYVGE2UoU4LFQcolOcCwKIe9HQH2rkpJA8qwD\n",
+       "qiS5Bi1iXYicUQR9hlKWUMnziSTIFmOm1QMy1EIs8EzuClpEFLvTUakVPuvtObXB+ljLPrlFIoHW\n",
+       "k6QEkwC8O2jvIQuIVLBgwYIFCxYs2B0tvEgFCxYsWLBgwYLd0d5d0eKqdt2BwalVA2KlcJPLEuEh\n",
+       "USKtQCithChezaFiq+q8OF8m5PG8Sy0MFDQUYmkDslmvL3oWQ9+YRmDsLVV+JYxXQnumlhAYIfPe\n",
+       "SEJ7GaF9aNZIMcwCuGgnMzjzfOivO3mjxUg9PFqJxsUWOlJKwGsgiFQ3BuM2CO31QWzs9G2wYxCl\n",
+       "NYzZhe7GJ5/8yL4H8vwvvrPQ2nJKcqgoVnP8G1GgB6F3K9AqI0RzEHF7Q/v+ydFopx3OOVeh4PNm\n",
+       "IVokG3/9SoK7p6e+0LFq2xz3QABvLNxB0myS+faej0S7BMkQ86UR29mW7cbmyQW0jbpCwM1R8LIn\n",
+       "0HYJbbEis3myXjP0g4KaHSHHI2SdZhICx7g2tc2dfsffx6OhKdtfXvj7/nJi938UgcQshP4c86lA\n",
+       "weM0Fi0eJDGUooXFFZbLmhgj3HB9aWGsGqH3gWggxQiRdEUrqkDbW8Xswj5LYySRCMR/79SHRzal\n",
+       "kO1R8HgtxPoNkkyymc1xErAnlaktxywaXHI8tUAwiqELEZuhSi2azkLK05mS4vH9SCs1kDwuelMM\n",
+       "6Uj41q6P0JLsaxHUubtSsYBJFlshtvcRAinXSkFAceO1rb8I86lay3zqgvhfcl0JPQIk/liIxTHX\n",
+       "tTxwaO01AAAgAElEQVROYrB8GyHx8jenY5vjnNqb0kK1ozHDbX69LqZG91hC28/JHJ5M/bhr8IUK\n",
+       "/KqjRTV41RbqIPSbyz7RQZgnxfe0yDevoiHeAkVwi0zDc1DWlgQQ7mdM2HHOuQTn7gh5ngkPQyb2\n",
+       "SGiVYTQNe7HlmeiY5W0ClO2nJWJ7vdieSdxjdD6n0N7KJKGI4Xu3Qn82qphPwvZOLIy/tLaz4LzS\n",
+       "V0Ae1/sUYZ9opAJECm0rhu92imazjxLaZOWNrWQWbFs6gpCAEiZl2fdqhPc1pK2q/YcsIFLBggUL\n",
+       "FixYsGB3tHdHNt9UbqvyAyDMNoKqsK7ckRABF41/g14JKZoE9EpkqbcghyWqNou6Ti5ByrmksLP+\n",
+       "XdGTd0v8tJF6RRVI0Y0w5qqY5ExRYAcZL3KSJtxn/iWkDsT7d3iDzqSGXn8AD/KlvEEDJcjF+ymQ\n",
+       "Or4Sov5mScKkeC54c1/HOBabt1p0gQhk1q/xsfccf+sHv9cey/EGfzMzVOeLN548ma5srLfxvlcz\n",
+       "7ACdEPLyxYWXFmjrLsqQ9OFBTacm6zADsf7py6fW1wnSr7s2nVdAvUZ9I5vWOF8tc8wBbWEa8hmQ\n",
+       "LOecm8GDfX0l419C7Xkg9apwr2dzI5GPoPatJM4a9aE0JXe1xNhlGJvaUuNPx58455y7nPy0PZY4\n",
+       "JkCYp9vr+THOUpsT1zGIqlKAiwrAx8fWpvUMqtxAfbXWHz0yJYePocDdiIo5PbzLq2ftkRGQ2LNT\n",
+       "Qx8WSz9n+qKAn6B9KVAA9b7pOKbiVROcHI0M6ZrCw72Z2PynrEEylxpq8NJLTTIBIpBgLcZChG7v\n",
+       "8HafWN8VCQUHpOny4kV7aIQ0+UyIwkvUHytrVUwG6p1IncSYa9efdyUIbg/ny7U6QHvvhGyP9XRy\n",
+       "ZgkA3B/yjrWJ9fyqRtcOkiJAxO0IgsYkA1Wndqh/2awNVXIgqpdSbWI88HNioXUCQQrvyLp7cM9L\n",
+       "Fhwf+2O3F7b+iXTUghBM5kjKEJmCCAjDSlD/BeZCJQjr5MqjWdelyZQM0E7WmOz3Zb/AI0YTMHog\n",
+       "6vcEabpuZSdk7kJRey37Lu+TyvRkKStv4FkjQE9NWYuZEfBToH5ZZihlWzuy1ucp5k4h1Qsaf+8U\n",
+       "TYqxFjOpclDiWZ1w85JlmoJYHkm9umpD+REp1cFrSGJFgQWdyxzjGCsixXGs8JxUcjj7qihdDEkG\n",
+       "hSlTIHbCSW/rDypTPto/tBPtOmQBkQoWLFiwYMGCBbujhRepYMGCBQsWLFiwO9o7C+1tN6WrRYm4\n",
+       "ATxNrSPnnBtAs2MjWhB9hN7WohhLfZA0NvgtQyHNTkcIY4AeExYeTlUnw5uqvlYgNK8VigbZ2S2F\n",
+       "nEaCuMCjEUIvN5WpYq8AfS9w3UJVv6Fn45TsXF2hvRYym0+hyi7Q+oDnE2Xd2wiaGUKUJfFuDWK3\n",
+       "IOHO9UH6y+zd+vjch2Xef/SRnQNaHGdjCxn8ov4JzifaVlTWjTTch+K6ogG2WJLs6n+7TOyz7rH/\n",
+       "++Lqu/bYZOn1ia7WFkZqoOzdqw3aZog4X1m4Z4FioYn0san8Pbl/5n877li4N0n8b19cWMhkgASE\n",
+       "aGwh29USCRC1kK2hX9XpSmgNyQNupYU8CdX73/Y6clNAVB6P3m8PLfE9rTwcEfvf2rUYNi+FWJwh\n",
+       "9NCTEGgH8esCRPlupOrMvq/DnhQeBqFa1clfIfQyEA0qhkWU7BojHJMKjJ9Cj4e6O7GceIv1mqtm\n",
+       "DqoDFEKO7SPM1u9ZGGeO/WG5tPtP0nAthbFZeaAHDaRMCpoyjBTvkL29dSWJoC4RWpLvLWbQJ5OE\n",
+       "jgKq1Bruo3q2Vk9gHkmE8603FsbJNqQsqDo/Q3C2d1AXKpYQ2JMHXo9tcGTh1qjv71Nkw9T2O6Lq\n",
+       "tSTsdDISoKUYM9WhhbBL6kOa2dwxArbsXVgfXRn3Ldbu5NYneWwkFEqNIy08a4r6sv9jXun9n0x8\n",
+       "GH02sxAki1bP5xKWdEzUQdhNNJMYMlIVcYZ5d7SIsLdXstfx+VNJYgG3542GjvA5x2srGzULaGsS\n",
+       "FVW5qcnknHPRlu0UEjcI+qqinsb+c1Vv55qt5LlbMpSPcJuSvXn9WsjeJQsvyz1JMU8OEcV17GhV\n",
+       "rXqDu9pSWoGAY63n5dqJVe2dFU1iGZOECSUaFuT35Lca5ztgAZEKFixYsGDBggW7o70zRGq5rF13\n",
+       "YG+cNSSY9S00x5vuprL3vQgea6FkM3g4qZDjMhLrpIcNUCJ665W8LecgR68krf7mwnspV6Ki3INX\n",
+       "2xNiXVLj70Lq/yGdeiFk15nzHlEX3uIgF88ACEM9lzd9eL29gaSLLkCK1Zp48BJTedOeIk04qYVs\n",
+       "j+5uQFgUBQXnEtQQFE/z/Pihb6cgDddQAq9F/qEHr1vTxDd8w1dSYt97p7OtjeeyA6J46ceiLM2D\n",
+       "nd36c1yPDemZrvwYVpWpA2d9pGmLV0EF4norUgONP0+0tWtMVv4YidhFbIThCmrveUdSuJHY0Ahy\n",
+       "0s982xu5JxVkJNYyyJQa2AiJslyhJt2tH5N7x+IZoa5VWhj6kFGeohIPDmieXquIvUzAg+HD9tgY\n",
+       "yFIvtnnX9EDAjED6FHJ6gfl0e2Ooag8L6urWxv/0nkcnRZy49diXIkkQgaiqaEINr7dxqOEn3SKx\n",
+       "tRFyMFXxM0mrZ0q0KlA7x5p4dmwNUnotSS4j7BmsYTYSuQZ6vQoWUEV8K4klrHU37KkCORABQQ6o\n",
+       "kF7tJNQw1V72LiBmEdoeCYI2h5xFIQkDPIdmaBMJUKQnRX247tjU8wlmxolIx6AmWwM0ZTU3pK+D\n",
+       "ey0lRF0EVLeJJAEDyvpEgZyzpAUdz37fj5nWOru+9muSiFAkP+A+qdRfylT0eopS+T6ORdmcn9e1\n",
+       "QvHNzjWdc24JORsiSLFOK5giWK9f+/WhytpEVhVNqtv+a1WICt8XYneLOkFhWzrLPihAQrRkK/sK\n",
+       "j2mdPrYvlQ4RHdMkEybeKMrC9H/usdoHon5Natdv3D5yRXXwtUhtsN+1kO0PYT8ke1MaYac2Xr0/\n",
+       "hpRVcKV9j6T4HZTcUXbI7h1RKk2yqRudM/sWEKlgwYIFCxYsWLA72jtDpNZNvRM/ZXi9kVTjBG+4\n",
+       "WxG65Bu8aBS6Gh7reqNvjUCpxMXd4q2b8eC6suuvIUxWmgPl1jf43tKGaY3K5KkIR6a4bCyezhp/\n",
+       "bjfW0JqCZEjTrjsitAgvYW50CDe+B0RK+DDNEKn2EzkvxqSQWl95x3tkm6Wl5NOz7cCDrcXVWa/A\n",
+       "6SnsvA/PPTcntURw92bqz7corYbWcOiv++qVpRBnjX/Dj/sisAiUsC/e9Fnk+RrNBkib1JDbADlZ\n",
+       "SK3F1cKLKSZSk4+IZCOChA1rAYqrEAHNWC7FS177L3w9eeXPpQKG4NcVIr+RQRiucdYH1jrUeLyD\n",
+       "mGzUmJwBkTAV6Tw69WNy2oGHO5d5hXvS3UpVc/y9ySWFnzF98dKOztD/RgQ2E9/ORJb9Lbg8ERFG\n",
+       "uV+UK6iE+3Zz6z33oie1zsh9kHkaQ5LjaiKSAAe4JPTc10R1BBEiR6MulVMClFD4iEzP70v6eYY5\n",
+       "vtRK91vU5BTkrtn6+UdUSdOwiSBE4sFzyShIwRT7k3NDPyjjMLk1pCMFIp4J+tRD+nch6DRR1BxX\n",
+       "0fT3LXgwa+GIxODBJAITFUAfCrmfORGZSiQuwJchf9E54a1QHTkSBAH/Nl3hd2LeVTP7Hr+5WApM\n",
+       "Ca9eRVdbgVtBbogccm4kgv4T/bi5UT6ivymKSBFpUqHLipCpcHk6EFM1npWgqUDmFP7inFgsDJGc\n",
+       "Tv2mPR7bObSPNKIzmXBj2b56R6ZgV4hTJUHIeVJRUe6rVaXoL8SsBTlv0R+51i43DL9tOUIiponf\n",
+       "8HQbQZBKXLdOJCIA2ZdMhVtZkzGRY477pF1/i/MpD8odEPOl1UC9F9IXPuMz4YhVnKcHxkm5ZOy3\n",
+       "1klcr1XuZd8CIhUsWLBgwYIFC3ZHCy9SwYIFCxYsWLBgd7R3FtrrndY7hDnWhuv0NQ3Ww2lVZLDa\n",
+       "7cz/3REWOeG7UuQUZoCUNzOtteOhwhghiCw36I4k0q4ohpc5UvJXEkYBLJ8kAjGifVUt76UgJUc7\n",
+       "dcoAY+NrU4HCY9bak3pJBSDt0UjrQPnPF7n9tsl8SKHXtdDCEmGhy4sL6SNDFSTnW4ilqvbVaZn2\n",
+       "PRHF7m+/+9Y559zVrYX2GPqoJU14ATmH9VYgc7T9pLBwF5MLlrjXrbyEcy4rGIKUFPaUsgImdbCF\n",
+       "YvliYdDuHGT0rFLFXBB7pZ7gce5DoCmlAQobQ9a6yiTVfsvfRjon/FzQsFDchkUs3JKBbJ7K3Jkh\n",
+       "TX9bgLBby/2vEZ6pBdrHT+dbS2LgXOt1Nf0d0LrMpxVInktRyl46/B1TfsROu0XigfDF2zCbhhEj\n",
+       "kPFjDaPi86WESk+PPQFeicKU5KhrJkBIGBdruKpsrkcY1ziyhiYJ0+rtnvRBrJ9L+nvVknil/h7I\n",
+       "tiSAX0vIiIrRlGjwDfTX6net/8ORn4u3E6vJyBClDJNLWXdSkiJKhAx6Il1CuYEGVIRMQqEpQna1\n",
+       "9JVK3cOhkd05xsul7T+nrGuYSbIBwqe3QgpfIZTOGn95VzZq3p+BrROuq/rCJEmeP/f7w42EwErU\n",
+       "R6yl/wypNjuzAn/H+6RfhtRKkRBgaLORRKXnz33lAw0jbxFa1Lpy8xnbZ5N3Dn7F7a0PbWdy/yl1\n",
+       "UBQiU4P1NJG6lgwP6VxjuE+J0kmrSq7hvt5O21VWg2GpdGev2Q/FrVaqZ+GN4dNSQlYrkLKV7J0i\n",
+       "VN2VCgT8Da+v94tSI5Uk9qw6fp4cH5vUxhDP9qS2+TSd+jGeSViO/dXrdzA+DRJFlEbBubAjtcBk\n",
+       "DwnZkSA/k5qYDBHrPeG4RzKe9c783LeASAULFixYsGDBgt3R3hkilR3XzgmZrAA5uRZEqE69hycv\n",
+       "te7igh6MeR991iQSL6VaUsxMEBYQhfnTgdZLA/FcxRpZd28oIoWsQ6QCZluQnXMR2qMO31T4ciVE\n",
+       "6iJ4nCqWyTftpXhrN7feM0gaI+eV+b7QH+v6dWIbuy6EC5cz80xy1vOCN7NN7Voku+dyjpfX3sNM\n",
+       "MiMAv3rhPT0VumtV5RIb/xpIx+uJoVnnM+/FFqmdLwPZ8xJeQhJZX4/hOSqqkmz9PeynNimYztwp\n",
+       "DLnsFh4l6OdWw4sIQy4Cp0nskZtB359voEgX7sV6s5LvI11e0B+SEksZ6xHOEwkpug9PM5FaVzmL\n",
+       "qoNsn+fWNnrfcaoaHkj/VU+rIhFUPFLcnzK2MWGaeCNrgp4YESYd67ZOl3hmaeHHq981sn0JQvfR\n",
+       "2BCRCe57I0xpyh9o/ceGhG78fy1oWQwvUBNQGsf2itBuTKSpPeRK1pUTT7MCOhbtOJdIHgEiVIiA\n",
+       "aox5rYTdDpJXFnNDcDZLP3fzWEis2GSaUhAZrhMZ/wznLgV162GMXcF7LfOfySAqoIjR07pmp8P7\n",
+       "vl9Cei7nQFjum5guE1D6QtQGcO+aGshE1+5rtK+b6KIE9foyQ/Oupr7fXz77pj3G2zgc2fkK9E0J\n",
+       "9Q1uEJEJVWnpAZGOBU3vIdkllZMslh5VUlJ+i3BIXUESpTVNnzIBRDAUmUiwT+r6J2F+KvXvtvV+\n",
+       "TTiaErUJNqmcAJGzms/EWhE8PBNmEpEBYtYRBIfolJLiif42OwsAotOiXbJCQsNG6mkSla23zc7/\n",
+       "nXNuCfRrvjCkpx768/YHUv8Pki3aplcvfZLP1Y2huZRfODk1mY4Cz9YGQqPbUuY/pR7k+UsUV+Uf\n",
+       "iNJtpK9pG2ESiRuWE5S9uHvgPqoFRCpYsGDBggULFuyOFl6kggULFixYsGDB7mjvLLQXx7GLVHei\n",
+       "4yFAErydc67aeKiue2ThnkePPGT68lvRv4A68VC0WBYAhDcaFkAIKEOIQ0lvJOqqOnoMsu22MHB5\n",
+       "jdpoc4GME5C3Y1GWZe2wwTje++1q4dvUjQ2KXEPjSKSt3BI18W6nBgUP7wGqlBAkRYlLgSw7CP3c\n",
+       "P37QHuuCoE/iXCpaJAXG/f3H9+28gHhfv/zK2rTw11hOpU4h6uOdj+xaG4RWh0JsXS8BWfcFMm08\n",
+       "Gf3J0H+vX1jYj0TFnmiH1GhTJqEdDruKz5LQWpf2vRy/SRuJAYG0y1BgJiGDdjylhiPh80JCa8tb\n",
+       "H47abGyuXcxRf24o2l45tbLsGhlgbGp6qYoxCajVjjo4+iW12QhLq+7KQmpRtm1nOEIuQrg/bUN6\n",
+       "ou3GMGbPEgbqte/3WpSQT478PduUFp66vPShvb4kjzB82Ije0Xzhw01xRDLrvj5OLKrDjHJEKuON\n",
+       "NmcyT1Lc67LcJ90qAZX3pIPEgmMJT5JEvFPrDOO+XFoYYzH1YYn3H77XHru+8v3vdSUkgDCa1gls\n",
+       "a7JJUgpDn62atBDwK4Q0en2bVwwzqWL3lueTMGLe9UkekWoctUr2Nu557O83hbqTQu4hwlhNbCEb\n",
+       "+uPZUAjoHYTbciEsr1BtQSowULQ9k/BlgRA4538s6vB96BJFggGQKtHIlOAxneusydgcUAA/NCeo\n",
+       "jr6jIUR9JgkFbvCcUsV+3lc9L8Nyqt1EFXWdY1y7TLY5pI6eiv53D2vs5MRCYSPUU1xIXUFev5FE\n",
+       "Ge4ZSkAnAbsslSjv29JHWFbrzy5Xc5zfxikv/OdTIeBvlnP02faJGppuI1l3GZ5LqsVFKg2Tt7TW\n",
+       "IasH7JTDw/e3siY4FVTvq1X0lz1phXvSCM1BE4QOWUCkggULFixYsGDB7mjvDJGaT+OdNNhOzErb\n",
+       "gkhF/q22K6nB7z3yb91Xr8zTnE/9W+VoqG/p8BLE03FADFhrqBFPnwrAkRCwWy+tFLVtvBGXQhgk\n",
+       "764rXkIXnl7UERVlfg6ph9VCaJSRfav9C0TlUryqDVCqQd/eqsfwNI/Hp+2xM3iHP/rwV6Wdvj8k\n",
+       "TG6V4AjS62hoHmRv6Du2klpfP3gfSsAd8SAy//mgMC85AqGzI7X7qFR/JLW+iHqwrlpHEEGihFkh\n",
+       "iETJ6t+qTs0UdvOSSiCSC0n/Zs2uSMim9dZ7RxWItY3ca3qCmUht5F3fvh2vskWu2kMtIqKqxG17\n",
+       "m30viV6oep9EmjT9e7lS9Whvw8Fg71hL2BWi7JpV5cUj55/lIWI5iJqVzD+q929WRraew9N8/dok\n",
+       "MQYgL8cHVMGlO26Cmn2DAVK9BWnietmpq4WxK6VeHpeOku3pQe94qZjjqSjLD0d+LhZEa6Vt24bX\n",
+       "t+8vofa+Whvix3uspOAO+t80Nk9H8OYjUVZPWCdvp6KDPx8TQEpBqxKsxcVs2h6zJApbJ0QaM0l2\n",
+       "4Jx0gpI1me9/p2tINLfAGMhFJOvatV66VFtANYJSVMw7QHUe37daj1OiIzuIKKQrZJ4QbYza2myq\n",
+       "js7KFravR5AHKXTvAjqkc50yJaqAzevvoHlb1jr0/1epgUMSAryEVtEg6qGoClP8lai+Qt+WgiDf\n",
+       "Tjwi9Ob6CtdX+YUe+irVFoBwKnJHxGwwtL2Ba6KU6hE15om2c0x0SEjpHANWEVmtbQzXSOxYCdKU\n",
+       "pf7zTPaT+YH6e/wzlUyRXg+JHx1NsuEez7UjEYm2Tbb/8xpHR/ZMTLAWtCYnZRwUTWMygCa01UJu\n",
+       "P2QBkQoWLFiwYMGCBbujhRepYMGCBQsWLFiwO9o7C+1Nrxu3iQ0ezbdeg4MFYJ1zbtJ4EufoyZP2\n",
+       "2LDnm/zBxwbZffWZh0CrxuDJHsJ8GpboEzLEJSoRRVkCYs26ojECCDQqJdy2rPaOpdCnSiS0wMKo\n",
+       "mWjQOBCVyxKwvIQn8nq/yGd27NuXSOQmh2L14/HH7bGH5x8455w7PzJ9mH7XE4A7WiAU4YY+YW8n\n",
+       "WhxUYlc0Fd1Zie7HSebv05ORhRbbcwhRe4OxU6J4Do0e6in5H+EfQLG1hOcY5ewJYbnVlpHwyHIB\n",
+       "xW7hVyeOSrhyT0hAl3FPc4ZgCPGqYjiLoVpoo0I16rXA2FTHViXeIcZa9Z4OETtJqCWZUtWRqeei\n",
+       "3ycE3pMCvTXCXanevJj3U5TVM+rj2Nwterv91zBajTleJ6I2XvnxYQFk55y7vp2jTXafpghjnJ+Y\n",
+       "3leGEP18YXpDJO9z7JSwn/agRL+jzs77IxQAEMC3ElprC3lLQkWDOZaItgyLqq4QWllKdYQSYYSe\n",
+       "FOhttXVEs+js6D7aITp2mP+DrhRoxdhqOxOMiZLiN2w7lMiTrYS9ElZgUHV2//2+6OJFLG4rtIga\n",
+       "VQtSiVJsJ9jj+qJBhL8jx7bvqzpHQlgu537sXr2wAtUbjMVaikbHMcNoNsYZ7sVwqGscbcOaWE+W\n",
+       "8pn//subS+sXQjEffPhRe6yHvWYroT3urFuZ//yrrLRNUPQ/UCD5bYVv55zLcY+fvGfPqS5CaoUm\n",
+       "xaA/jcyJAdauhi8TcEVYMUIVyxmC6/TtodAd+r1ew5O3V/7ZGUsh8TYELeFmUgA6fWtTf+T3Ow3B\n",
+       "cX8uDxQUPgKxPYst3Mz1qY+/vNV2kvWE8VxLQkWJPbsR6ksmoV/nnKuWMv9W/hwL0TZsleJ1r8PY\n",
+       "zUTvazHnPi6UBtBLdkKQst8dsoBIBQsWLFiwYMGC3dHeGSJVuK7rx+bpL6eedNrt2hvkoOMJ07Nr\n",
+       "ITGeA5F6aJ4u30gnc6k/hzfRjXh/I7zpj0Esr5y95V7deA86FUmEFCTq1VZUh+HOdYdSpw5v9bOV\n",
+       "eKRMq8/tTb8L8lz/1Kttb8b2/TmUgPsjSZeGOnl/bOd4/+wT/GuI1IMzj0Rlkmocw3PdSk2uEunp\n",
+       "9D71jZu1BjWtuIL3rYTBtOe9n5549fROaqm11AUSpQq4VE/Wt3d6DqyTVCoRHB6WEoZJeowqafvA\n",
+       "/0ZrzdHDy2X8qYZbivczgxezRh06JXvTE1yJp0NyfCZeVYY04a6gRNEOy3n3fAcN81UV46eoHTgQ\n",
+       "MjmJ5UpAbdWZpU4iZRVymRNFvn/fuU7YbyVMb5HsoenqlBhoBGmYr7w6cRXbnOB96ktSxApp0oXM\n",
+       "CRJlicgpwZTef0/I4SRRbyVdOeLclX4Z6iFkY6TxD6RO3O10ht9C2V3myxUkDI6PrA8F1lUisM5k\n",
+       "6r93Kujb/MrvZyeCtMxu/LHTY0u2WOEeKxIfI9Xa6qrZmFCdXZHOAiRzRQTbWm+CXMRIRom7dn0H\n",
+       "tKtuBDlqPNmYazKJdb7wLxl/oL83t5aAcHmNGmYyn56++M45t1vrbjDwY1sLoZ7IBf9VRJj85+OR\n",
+       "RSQ4T5JYUbr67Qa31GGta9dKAsj3OI5cE6uVos/+LFrrb1CM9trJ/ed2ZpUd2M6iI3UV012kxTnn\n",
+       "hkCYmJyg7W0TYBTqQYLW9EbU9rEnF5KAkIEMLwCvi4DY6X7eov6CurX7BOekRF866EPv3KpIEAlU\n",
+       "hDsD0qN1Ojk/tpLQdH3tka2JzCfuccdYO1Sfd865LvZiVefnNWq5r3Mom68E4W9iKrXrPgHUX+op\n",
+       "fn+lvYBIBQsWLFiwYMGC3dnCi1SwYMGCBQsWLNgd7Z2F9jpp4XpSCLCE3s+gEMgaMF4tobVN6SG7\n",
+       "Y4H73//wsXPOuc+/fdoei0EsbhZSyHLlYfx7Zx4C7DkNT4EIqUVTgUAOxgZF9hDSa7ZS8HgLyDSX\n",
+       "EEyPYURT6j4a+5ACi4BWjYVxliBMrhcGOzoUQS0bC20+OfWhvdGRhSfIMS4TISyCKR6LLhdJmckc\n",
+       "RZ5FnToFnNkprA+MlHRFRTkCQX2ztnfwDcIoXSGbt2E5AUVrhBa3kYZPWTSS993OkaYM7e2/72sY\n",
+       "J231gfbVZyOBwDOEdKnE65zBvVdXPhTRFY2dASD2lRBBGbLaSr9IXtVC1tQ70gVGuDmJNSlhu/PZ\n",
+       "kdzXwchfP5O5ziKcO4RJ9GEkCQAnJz70Meib3lcC7R0l1jKUF0dQAhZybouKa3gE4dGVaPFUgMo7\n",
+       "Aq0zLNOV4sZ15dusyt41QoQ1tLh2wp/N7jWdMy201ULmOueOzgnq3sje0UWSQye1sAArHdcI6c9E\n",
+       "Cfr62hN2VYvo7AhEXAmZs6j07e2b9tgI5N1MQmBLjHumoTqsic3G9oLVAkrNCDN2JGS8pt6QxBp4\n",
+       "D0shAPcw7oVqjHX9/FC6QzZ65PuwEaIw9skYhbcbrQRALTYNtyNk/OCJKbv3oaP08ruv7af49+LC\n",
+       "iOJPn3mCuoYlqemVtrp3drFjzPFEQjsMs7+5eint9P+Mx0dyCGGcRMPHKGQtHZpOfcIH14Im7Bxj\n",
+       "faq2GxM7Gln/bxCWWkxE74skcgnBpwfI5qxaQHJ0vEN6RhWD1b5ivxYZNs02JUkjoeVGiivjWlrl\n",
+       "gjpKpST+cH9qC6jLnnDvnqeWCNd9J0HGrlXhHNJ/xBlz2SdInxgNbe+ifh4pCwN5JvVwLEsO4EIy\n",
+       "d6xSg+iiRfzH+kOyf0fCnVHz/cG9gEgFCxYsWLBgwYLd0d4ZIpXGmYtFRZxeep7ZW+gIb52bjTVz\n",
+       "tkRKuIAP3dy/LT86MrLn6wvvHTZCSqZq8py17vrmGZ8DaaoK8+pZzye+ryRO7wmp2nESQ4E726+1\n",
+       "VAh5lWq/fRBAc0EaKiBolaR/3048UfH1zSvrK34Ti+psTSXejdQEw/mUqFojJXsLT0MJlt2tb1NP\n",
+       "6pqxnfr2v954zyCR6xOl2vFg2hRm+94MKEJRmPcTQ6k8QdMVfaJXU5b7qaeFSA3QH1iLR04U6VYI\n",
+       "izT1fgcDP9/GY+/hJYVdvz+AhISMCdN1b+biwaOPlXhh9OAq8RJXG/ZfJCFIrEVKdCTeWgnF4K2o\n",
+       "XpNEqqR0eti9js3dQd+juX2p/2Wpy9YmemKcC7u1ufz3msquRVkBJwT0LpCOblfQVyAHVW33pEEh\n",
+       "yfnMUF/KRJRIyVbHrwOy52KtkhT+HyXsLqf+81zqTw5A8r5d2PVHSEmfSFLIHKToDATvkdThYmq+\n",
+       "evVbkLK1KkIf8gg6TzvjIZpra5IE2U2l54PsR2XIWR6BgM3vyaDkSKhIhIBMpEORm5bPmwphthCB\n",
+       "THUAACAASURBVOPnRCKo77bx9zuWNm0gwRFD7T/pG4mYay0SpM0B6Xv88a/btUDG3qxsnVwuLpxz\n",
+       "u7XW1qi12I2tnRvsXTfXfu0qgsR1tXxzYdfC9xXVyUGs/vAjq+wwX5HYb03PEXbodw25m898/1cr\n",
+       "P3cU1eIErA6guqWgRERJC9njibYS8XLOUJodOQXMOyp2q6wL93Otq9rW5tM5QXX8SORMDpDnD/02\n",
+       "SiixYvs+E16IoCuCx/1UE1W4d+sx9nE3oQfPWElAYf8fiio+ETPWQlWyeXsNeSbZXNCEKrxjyDO5\n",
+       "wfgksu8ymqBq++5AVEQtIFLBggULFixYsGB3tPAiFSxYsGDBggULdkd7Z6G91Wq6A9nmIBY2G9HY\n",
+       "gNp3LeGuxYQQqMGoPUDLqcDYPYToVnMjNvZzH8YZpR4yHPZFCRyw/1Zw3w4gwET1meJ9sjHhxkSK\n",
+       "WxJmV70RHmPhycHI4GTCnZuVhSJYoHchoQiyPFVZltDmji4USYnyPdoG5HUlBEa4fiWQ6QZFMJex\n",
+       "hPHQB4Ws+XcloY3yAIzLQr95roRyhCoBuyrEzZ820i/qPelYUzNGtWgO6cOwvx1RFmZYgOHGSODh\n",
+       "BBCvoONug3vSCNmYsLjSETnuqibFthxqE8dkraFAwO5K7GVEqdpRYsaakbZ3oB+jxUA30M86pJV1\n",
+       "aJ6wR9OphWcY0o1Fs2c8PH778m0RVlPHdq5A6E1R8jV0kdjesrJxzXOEbCQ5IM+wFoXZmkFlO5Z7\n",
+       "EmHO9kUVn6Tty9emVH1x6feHxdyP+w9+8Gn7GdXWZ1MLD6fOz6uzIwlP4y5r/1lIOJIttshZhNdC\n",
+       "Kxz/kyMLn5Ug4DPMu1waOThN9hMqSPKPZU32ELJ2QiKOEKqLhJYQgeR+/dxCZfOZH58BxmtgERYX\n",
+       "D/2eqXM94izXRAGEwz/49EftoadPv3XOOTcaWvh0hIQODXdzus+RFKCUAa7rupI9CRNKaRQrhNnm\n",
+       "Uxs7rvWVhOCqjMkLEipHWIzX0lDk5aUfJ93D2vUvg8J512yl4DJ6JsuvvUZe7Os4LbHva5Fzhps0\n",
+       "ZMb1qTQO7v86rn1QNDIJozG5ZWffBb6i56MxjHfoWaPHeF3VkeJepwXfOf81KagGfaSWsHiN36YD\n",
+       "JErVGgr090L3tbTHNqlOF8KI8t4RoZ07sn8tAV1oQb9ESep7Eakoit6Louh/jaLoL6Io+kkURf8B\n",
+       "jp9EUfTHURR9FkXR/xxF0ZH85u9FUfSLKIp+FkXRv/q9Vw8WLFiwYMGCBftrbL8MkSqdc/9R0zR/\n",
+       "HkXRwDn3f0VR9MfOuX/XOffHTdP8Z1EU/cfOub/rnPu7URT90Dn3bznnfuice+yc+1+iKPq1Rl+p\n",
+       "YdV246aSwj3KQRQXWYH5xL91L9eCvuBt9fl39lb74MED55xzvY6RXfsdIDyFvWl/8NDXYjo79XIJ\n",
+       "vZ4R23spUC15gaU6eKcw0h0Bq9VmP70zz204G+roikwA1VtZh2+ztj5wiLQOVAd1/x6k4q1WVOA+\n",
+       "RKwzo5ekqBMRI5LDt+LVxzjf6UjShfGmX0d2jgJeaiqIEE0zbROMXRSb50TCYrxDKAfqBmK1kqj5\n",
+       "/UZe90mypUqvP4dHPypBM+jp7HhkA38fdTre3voUd3p4WgdqDk9vLR5xRSKi9LsCGTyRe7et91Pd\n",
+       "6TlHcu9WmM+8h3ovSdSOBCWtW1K+1MEi0ik1DKloHGv6L1qtZFOOD+eJetW8RCbe9wboSJ7YeUv0\n",
+       "tZHxp7K9ElZJWq9LJVuznh3UvGWsmw3rdUldS6Q469xtEeFYPfIIbbcFTW++kfFsEUZc4oXUixtD\n",
+       "WXoja6iGnEgiqdZMIugIAT7v+Lm7nhtTOIqI0lr/CyrFi1I7EdMkJ0oqfcB1FdUkqqsLJR9DvfzU\n",
+       "UPeoAbH/1XP7Xt/vu4va5sTTCz8GH5z5fbUrqE7SP8cfzozSGeLBx/Dgh1L/8/H5Y/zUfsz5rgko\n",
+       "nFo51o6i/7cgag8GGiVADUMZV9afrCVRo9fz46o16drbGCvC0cH52EabkwvUfxsMbF4TuXnxwuQX\n",
+       "SCifLQzNpZzJQOQsRkO/355LAszlm9fOOefe4F8lVh8dAf2VPZSoj6JkXONVtY8054JIdlBJYKfG\n",
+       "JmqRbuQZx+nGfedQdYSdGp7YV7pSbYLX2FGqxz6tQu1VwmiCHeQdW6MWrUZu2BaVTuGYNbGgWjif\n",
+       "1u2L1r7/ijhxbHN5xml9zEP2vYhU0zQvm6b5c/w9c879pfMvSP+ac+7v42t/3zn3b+Dvf9059981\n",
+       "TVM2TfO1c+5z59zvf28LggULFixYsGDB/praPzfZPIqiD51zv+uc+9+dc/ebpmFO/ivn3H38/cg5\n",
+       "91R+9tT5F69gwYIFCxYsWLD/39k/F9kcYb3/3jn3HzZNM30L1msiZeju28HPoqbYEfRoWKw2E2Ix\n",
+       "NKMWQk5bQ1NGVbkvX/nwzIcfnrfHBgMP7XU7duz+mVfePR541edGmt2Ge7RAL8JHTSQkVjRPkNAW\n",
+       "gtRxITysRSNbWPRA8dqWPCltGkGLJhfCMLVAlBx8C90XPcZrKYmQxPcOQgYK584hVrKU0NLo2MPO\n",
+       "6Y4+yX4YpdU0iQQypy6HFKNs2rbbNWYgJVOpW/uQIixDrRPnLNynhF1C69ofEjCrStqUUIHcxiR5\n",
+       "S5VY1cnXIK+vVB8L3eqK6u2WhEmZ6QWIsgrBM0SjocW0ArSMqdPshKwA2cv4V1sWNxbCMuITg6El\n",
+       "L+RtWEjCLfhbIXiGAxgKSBLVfdlPYuBYN86uz9ueStHiMRW1JQHg5sbrop0fP2iPlSWI3xjD5dyI\n",
+       "3acDr7rdkTXE+ZwoYRTH8txCBkXq59O2vG6PURdIw5dMVCkTVBGQfYUhPS1Q3WDuFHJPGsQldKOj\n",
+       "3tG6trnep0ZZY9dgKDPuWkidys9crzqvm1ZZXPZJ3Lt792yvY6g6jUTZPEVou7CixbdXnmy/mkgI\n",
+       "smTBd39jVXcpajg/9NHByStrnX2RfSLCmkglLIpCFe2+js757yE81Bd19qwNWcs5sJ/UWxsnFuhe\n",
+       "yj7RCvXLb4fQiMozCcsizLg9sNe245pqKNB//8kTwwwuLlBweWFzl3N2LIrdo7HvWxQrLcBf9+bW\n",
+       "z+HqjSVHUNvudGz3kEWOtRjwBvNqvrBn52Ti11YntzBiivU/GqlW1m5fnbN1R+qFErvbxBqZky2N\n",
+       "Q8aaPVSyfYb5kR14xuxWWdjVr9qlICCxS/bpQ8rqbQhYn+cIh+s84T651ud5sk9lUfulL1JRFGXO\n",
+       "v0T9N03T/EMcfhVF0YOmaV5GUfTQOfcax585596Tnz/BsT37+ovbtszC0XHuhg8PfStYsGDBggUL\n",
+       "Fuz/W/uTf/Rn7k/+8f/hnHMuUXHOA/a9L1KRf9X7r51zP22a5r+Uj/5H59y/45z7T/HvP5Tj/20U\n",
+       "Rf+F8yG9X3XO/dmhc//O77znGkFLepn3llRFuEZqbiIkyhhN3q5UVsC/LS4n9tuTE0/QPj42ovbx\n",
+       "iMrnQGtKewslwS6Wa23pfQuCQJXpWAh7dMhSqTWX50QflBS6S2yrd1LYka4pnna348dkhwCPwnrL\n",
+       "2DyNAsQ6RW6IyKg3S+eARDz1KiYz//1vn1lklnXomKLsnKX1Ktm77YOq4+JtXr0aoh5KduaYkJSo\n",
+       "qbGXSE3XdtKurq7av8dQo1YvhJ8rIpSjjuNyKQgnajzSXYoTRXBIOhR1eCASaSaERcwZ9cg6XSJ3\n",
+       "ugChwC6oH72kVj3++LT9rARRVBGRGdK049hIrKy72BMSK+UZBBB0FQnwcp9ICuUxRTA5KDtJCZh/\n",
+       "G5EkSVA7byTJG0TTlDxuOcZ2nzoFVJGx/taVev9ck3YKqq1n6iC2a0yQPiYbyDxdAHVVz5UJIryv\n",
+       "WlfPJBysr2eP/FhvBBEcgnisxNrr25u9axUgry5lnY5ZvUEIzbxpDeaiol/teMm6Kjo471qIzYNf\n",
+       "cc45V9WS0EJl/SNDBHP89s2loR5fffOlc865Y8izpEIEb2+Gok+8r1IvcItxjaVO3z3U4nv29Iv2\n",
+       "2ArjeHn1uj3GtfDkyQe+HVJ/knXvFJG+xbrXemlMfGAyi3POnQDFUYSL+5nuE9xvKBeglRBaFXNN\n",
+       "4sGa0fs/QqKC7jWcuw8fPmqPUcV7PjdCP5fggwceXXj+3Ejs333nEwFKIYLnINEXqSYs+DZXsnbz\n",
+       "gnNM6griuorcd7CelbxOZGk+35dkIdLWFVkZPmN0nvJHssTaShmphngYTZHIwWK2+zzr9ewecioq\n",
+       "OZ577I5MA/bxXPbTASRDVo21k+f7w9/7XfeHv/e7vv9R5P7oP/+v3F9lvwyR+lvOuX/bOffPoij6\n",
+       "MY79Pefcf+Kc+wdRFP17zrmvnXP/pm9A89Moiv6Bc+6nzrnKOffvN80vqfYXLFiwYMGCBQv219S+\n",
+       "90WqaZo/cX81If3v/BW/+SPn3B/9sgv3B4lbTswLW2/gpS21CjMEOQWlaj1okUlIUJ8vE++nAz7A\n",
+       "QGqyRYiNUixS+RA5EIZG8jBbPsZOvToIV4rQWob4uqaa22f7sVVLpYzke/sCZjW4R5u1pnDuSxfQ\n",
+       "c+oKl+RQDaUe63QxDVyayzh7JZ7x7Y33UvVdmJ6gxr6JTu3wcVgt/ECa7EIkDphiSi9QzzsDqtYV\n",
+       "j5CehiJXSyANypupwM1RLtditt7rj3GJYvRFJCzwva0gko1jXTPhSJDzIehTBU+4EjSD7VMvLX4L\n",
+       "ucul5hN5CyogyLb0+uaRnRzf2+mDczZ3JxOr60XZgYF45G39q7ZavAiNwiOlDIlzzqUggvUFferk\n",
+       "Mf61ubYC5yVNFKXz80+9ZFIPrq6ucQ7zglvUQWro8XRafzABElzX+zyHXNbT1cR734rcTlD3j3yM\n",
+       "+gCnsJA+PLnn52Kvb3U9M6Av09s37THKaPREzJfCmrp2KSy7lHvcOcFvWBtxZe2doSlnp4a0V+A0\n",
+       "3RekI0qBJgmXz1X+WrOX37WHthDnPT6xOTGb+HV3S/mPhcgfjMBVES4r6wlWUv+NW2bjbE4cP7iP\n",
+       "axnqSomP0cBEOqdAH54/96yQjaBP3OOOjo0jRIT74pWhWgnWwlSQng2+dyz1FB3mUSUER9a/Wywo\n",
+       "1is1UVuJD02X9/+qrAIRqVTWs0my2PmG4EspSkKUrIs1PhQ+1MsXXrri22fGmFlsiHQZP+Z07OfH\n",
+       "mchfHEP09erK5in7kcgc5x67K/C5GzlQ+Y+8nWNS/7Wm+KnNCdbHHMr4r4EiLaWgIJ9ZusdPMCcZ\n",
+       "fTjE0apU/LoVpBZpIKBfk1tb4ytctz7Axyo3ilzvC+GqhRIxwYIFCxYsWLBgd7TwIhUsWLBgwYIF\n",
+       "C3ZHe2e19uo0crPGYNeYqatSQ6meeoit6Btkl6YeYksLSfXuIrQlCrAFINW6EQLaZjeFMhV4OgXp\n",
+       "MjlAOtwlIO8PWbeHsFxqbaeirCodtKEinDeRlEpCkUpEXhHS3wkZUtZAQhu4xm69NH+NTEnRESQh\n",
+       "AM/q93NAt3l/JN/fD1Uy/VXJlicnJ3vnY/RMQ2UtYVEU7W8ZPkGbVGphAMLksrbzbjf+xC052xlR\n",
+       "U0MmnAu7ZPvuTtucM0idcLKGjPg9hb3503qHnGnzjsZwg4aPDynwUiGeKv8adpzOfFhOpQ4SKBE3\n",
+       "IjUQQ1ahlLAUz6O1xqKYc8LucauADFK0Vhtg207PLTxAQuuob+GJ0yPf/8mlhRH7IAhvJWSVx0g2\n",
+       "WFh/1oDWG+fnfeyk/iTrL24Usvf/LiTcVaAtlLdwzuQ3VLF4A2h/I5IMXIskhdcq14G9o9+10HIX\n",
+       "CuzJTrjHt31Q2Dy5fuMTJY7fe9IeW839NXpCSmb4VO8dQ0Sdrg/pDDpCLWASgfRriBDQtrQ5GYFQ\n",
+       "3jTW9hjVG/KB/fbiGx8Oo+q2c86dnvvf3kx9aH8h4bFs4Y9lI6lXibakEsasb3z/k1MLNxWZv/5W\n",
+       "5v8UZOyupOTnp36MSemYL40IP5ujrqckFnCvZYKHc85lQ3++m4nJXzDyXm9FqR5JSysJS3Lhc/43\n",
+       "VxrG83+Px0aAJylbyc4MbWnyBmkp19eWKHNx4cNsx8d2vhHCV+NTHwJ9WNoYPnzo783r10ZAZw3B\n",
+       "I6lhmCYkjGtd0QbtUDkRjF1nX6ZHQ3tMpOrgOam0EMqzrCUBpcazQAnoNUOgssdS9qGSe9JsSOi3\n",
+       "e8K2v3njx05pHG1SjLLYsU70GNe/JjvN537PUvrGDHtmIgllfaFSHLKASAULFixYsGDBgt3R3hki\n",
+       "tZovdkSwSLqMCiHn4cW1EcLaoEG15oF55Gni33o1Jb+toC3k6U6+S+jeEVXj9QWFoVehxDaiCYp+\n",
+       "EeFRAGcBITRFGHg+XkPREnouuzXU6FWLtR/bWz1JeYoS8e9EUmLpWXf7rCto/V/P5nvXItlXPQir\n",
+       "dG7fpJjmoareOsY8ph5Bm3ZPoTshbBMdVPHL4WC/WjxrnM0ETeE4kvTpnHkxNzfm4b5N9tR6Xbw/\n",
+       "ev953unU+lpWrNO3ny58dGReItG5Ujy3Kcb9+NjPISVCX6MO4GopJN4NiN1982DnIAN3xPvbwBNT\n",
+       "suXVtUcJ3ntiY8d0ZqJ6m7WQnkHyPjs2YvWg79vZF1Sl2/fne/zBr7XHhvCOry6MFPv6q5/4/k9F\n",
+       "OmDoRSRX6EMh9QKnK3imUq/suG3LvnBmR1DKFEhD3dh9Itg8kNqZA3iaJKrWQlhtgIgcD61NGbzU\n",
+       "RDzdBOjgRurqjY79PL0Rsn9ZsyajzfFt3Ee/7Z4kQG4AoLvZyubL6Qnuu9TfKzLfB0Ua2PY4tfu0\n",
+       "Wl34PmwFYcWg3M6snT/89d9wzjn3zbc/d845txSkpYD8RjGSBAz2R/azyaVHDk6GRix3SKjJBX2i\n",
+       "AO3VxFCa41OPxI1Gx2i3zZc3QLqev7R5de/MI6aa2MCN7PTUrt/Kz8jexX2yK8+TakPRzQzXtzXB\n",
+       "v7XW6BmurzX52GZVbqHUxnpt49mipILmTzFnVliLWueNc/1MECzuT/lOoow/HxMctH26xx1KhuKx\n",
+       "XSFef28ZdVksbExaUZONJtEke+fIIRMT74g0A/2TSBDr2KqcEB+319d+T5zNbE+wWot2faKESuIn\n",
+       "yV/RrOtroK6y73bxnNT1pFIQhywgUsGCBQsWLFiwYHe08CIVLFiwYMGCBQt2R3tnob3NTematbzH\n",
+       "9QkdGoSWdj3st5bQ3hRE0SMn30sYshNSJuDbuFGiIJRVDyhlM7QWH6iDp1pIfYRPej2p61WQHGnn\n",
+       "pVaPhrYI3/L6h8J+qo5bo6/a3i0wa/3t5kBNrqg8cAys9Ahg7EpCqwyzdgTqZfhGIgHt+ZoD4zoc\n",
+       "GGTfanDFUpOM55UQDEmOrTa1qmiDKK9aQBnvz1ZJnBn+NRif59VrkbSt94QwNtWL9faT+K+wOz8e\n",
+       "DOy81EBaSl0rqvKyvpVzzkUxyeYWRqDOUwd9yITEHEGrqjuUUFBFBW6pCYh7pzA654zquExufVtu\n",
+       "h9YmhiqmMw9xJ1rzCqGCrZCze10fWlO9qQXCaJvKiL0dhM8ePPpIfuvH+Gxq15+h/l75yis2zzYW\n",
+       "YnIggDZbuylL9qfR0Brm80YI2ww4iN5NitDSUceIo3G8W2OxFNJxjcoH47Hd6yHCfIu5hTZ60KrL\n",
+       "RLOOStF5JmrzuO9FR8K9GM+OzCeqd0eVv8ZGdNc2qEl60h3KMYyT7B0Rwx0Sq09jP7fWS9MROgex\n",
+       "/OLyRXtsCpI51/C3X3/TfnZy32tVNaIEHTnq0kn9O94nSSxwR75fvZ61fYzQ933RO7rB/OAewwoP\n",
+       "zjn35vIv/WdCNq9rP4alkO2paabrn+E2TYBp9+ylavoh8QhjeHRuOk7VAcV0CzNJsgvuxbHoXV2h\n",
+       "rmEl6v2kFihV4QbhpmdPvcK8hkyPxv77D+59vHeO1xcX7bE3uJbSXVipQZ+TfN5p8hLXhFJVuj3S\n",
+       "MqC7JN+vE65TWX/48WpqIbMKNIPBwO4/6QhLCd9yzu5oKoKMzvW3kmQThiw1FEedK1WMf/XKE/T1\n",
+       "eXoEqsDRkd0nPrs13N49kFCkFhCpYMGCBQsWLFiwO9o7Q6SybscNG7s80+rrSIhwQJ0iIUynIGKm\n",
+       "8rZIJ1prLSWR/20W77998033ENFOvXoj3dl5iXQoiY2EXv0tPQwlqb2NiDXNPrFc01BbREr6RVKc\n",
+       "ol+t8y3jRISlbvbRtyXSO2fXhgwMWENPSJRjELUruT7lCZoDBOxqI5XW4bHyPvjvoXadpCnHJPm3\n",
+       "fTZPZ41xTcUzope8032qWCvZFtefTIxYTmIplYOdM8+FqFMt6Auvlcs8WcNzauRes56Wkt1zzM+5\n",
+       "kBgvQMDV37LtnGunQuymxeLv9PrDnb4459xqPUdfbK5vQejURIEaKf6TG0MkSN6kOneskhyUhJC2\n",
+       "3IAA3xFJgPORRxMSkQYhP3pyYbUbu7jXmgBwDPI0EZmLZ6/azy5eeSSkK1411VF2Kt1jnjS1XT8C\n",
+       "iiWOs2u2/vNZJXUKIZ1CQm8siHgX9bz6PZtXqxJSGyJ/EZe4n7Ke7z1+7M/fP7cGtEioradf+fgT\n",
+       "55xJuPgTYp+4RRJBX+QSMMfrqaF/RZ9VGex7CVPC5VoxFk0keyIRzqEgYt+gasMKpOjVixfyfazx\n",
+       "yObJFjNkJWttfOY9/G0mSAeaksaGyHzwwNffm8xsL1puWAHAX//+A0MLfvDJD5xzzv3885+2x4g6\n",
+       "PHr8QXssAtl6LUR5bo9a/7ADtEuRQyLcC6BpeS4I0gC1FtN9qZmRSEh0kDTBmqfOGeq7FIRxPCbC\n",
+       "K3IO+HsEQrl+RrX9r78zdfq/+Tf+hnPOucHQkNarK4/szme2J1vkxNrOfUdlEijjcHNjc+x24hGu\n",
+       "ckX5B0EVMSZNZPf69tpf//nz5+0xIty9uY0J26RJTkdUL28UdfRzskUEN/L8wfemt9Ze/m1337kK\n",
+       "UJdGEzqUDpEWrJGoUch6ktyOgxYQqWDBggULFixYsDtaeJEKFixYsGDBggW7o72z0N7p/fO22KZz\n",
+       "zs1ASpveWnjkzdr/naVS5BPwYSKsYKqXJkIiJMkwF2Vvhr6+L+y2q4TNosmiLfOWOrlzFmbZ0czA\n",
+       "uQ9pJjF8pQrbCbSwVOOj1aeSvlZQyt529kN2OwRwaKqsF1I0Em2KCz9Op6cWRqIuylz0ORbQMTk7\n",
+       "twKpLJZbCdny9WsfjunqeKLND+9bIVW2Ly2EgE5VboTWlAA/bBnoNv6ExXcI+G3BYxt/alr1pOAx\n",
+       "54SqV1MPheftdqxtb658KC4WwjJ1l1RbpgQUriHAFSBwTVTo5CDPiwI09dNWILYr7E/So6rdcww1\n",
+       "KaIlXjZSSDTz82kjmjU3Vz58shai5hxhjlMUjRXE3I1BbB30jRzNBIQTUTvvIAQ2krGmpktTW7jh\n",
+       "8rUPR8wQJnDOudOx//zemSc9P3xg4ZmXl/7v6SsLD2wb6K0J7E/CPsnhzjm3xbxqpDD4CqHnQkjZ\n",
+       "vGf8V8PYA4TZdV6lGNe8sL6OME59CRn8yic/9G2STA2GkbtCLB6f+LU1GAp5HKHKuufHvZBwP4nt\n",
+       "1bElVmyxdiRi5xrQEaJYYpsxzrO08X+GArY679eY4zOEzB7fM2Vt22KUbO5/e/XtZ+2xDpTlOw8e\n",
+       "u7ftVAjYU8T7MqkJW6BSBPfptayJx+/5/WQtqtfPXngS8cWFzZPjI7+36Z5MDTpNHuG+Qz0355w7\n",
+       "v+fvSXFgD69qT+jeoTZg2RUde3bMoX1GJW7nTHuu17M10Tkwx2Yz7BOFaUXRXr3ye20l4emf/exn\n",
+       "zrndkHlLlRBiu0MiS5ZLgeKNH1sWGffn9mO7WNqY3NyiuDD2vY17YOdAKDhP97X97t27b5fHvq/E\n",
+       "9pzPYqGUVNg79D6xCsi2ZuFnibWR2lLYuh6P/NpJZe9u1708dzMWrU/2Y3epVgWp95+3agGRChYs\n",
+       "WLBgwYIFu6O9M0SqSTP34L4hInO4wvU3lmo7eQmFV/E+RlBRdpJqG4Nkq+jDAKn4ihIwnZGok9ZB\n",
+       "Yj01JeySUF6W+6RwTVel13NIEkCvQe+jre8ln1FtvFwr0oJ2C9LDN+i1KFa3KJqQstdI9V8KAXpw\n",
+       "5j2tLsYhEhVppp92hMQ/W3qvJ7+VtGYgYupVkJT48FNTts4wjuMj86qI8KicQQEPOwZaRPKzN5Bj\n",
+       "naTkg7CriFSL8In3uTiAXBE5XIpS+NsKuDNRZyeqpYT9Cbz0SPLKqcY+FzStnELqQtCvNUjhkjnu\n",
+       "8o7/nPNU1eEPzSGilDvqzEDEqupWvkeyvR27mfr1dHVl/SkGfk7OU9/2YyGsnoD4nkhixRFQzMFo\n",
+       "vzaY26n15+9TKkrhrFdVLU3iYAkEdJr6e9Ibvm/nHaKu2tq83xzedCVk+xr+4HRpRPUV1sd6rbW2\n",
+       "gHpKO0mAbsdTXMsBUrOVxEvUQUnkfRBvHz2ydp7eg6yApKQTxezIeJKAq6hnguwKOsH9gV0/6vq/\n",
+       "88I8/QhrslxZEkEC4ruunVaeIjOE6/TU7wmf/eVP2mNXqAXH5In0se2JTGF3pSEYXB46d2ugPyc6\n",
+       "oFsS223fefrdF75tgojkXSq1A5ESRPbkBGiWwG9rzIWrq9ftsQSfawp/hj1enxNvLnxfN5KU8frC\n",
+       "d+jhQ49+CYe6lYZQFW2S3WdCmOf+SOVs55yrK3/9gdQzpSQC6wpqvykNcHNj56U6+mplaM2f/umf\n",
+       "Ouec+9GPftQeW1HZX87btqMRsn1BSQ6V3aHshLVzPKacDGR9Chv/i0vfh2FX0B/UwmUdVuecu8b+\n",
+       "H8mzu4falV2RxCDat5JEnQUkDqiwPha5AiK2a0HfciSoxPJQZBSrlmdCyhq3EnUqUEkjdfacag7I\n",
+       "IqkFRCpYsGDBggULFuyOFl6kggULFixYsGDB7mjvTtl807hSyOYDKPWenoqK7NaHGzTckkIdWInl\n",
+       "RyAWjsYGIw5BAM0PqFhb8eCdcsDOuV3F3gSK6cPhiRzbL8ZIUw2kQ7pQbxcmXovuUq/2lI7RhAAA\n",
+       "IABJREFU0GqnZxDrHDomSwltMhxYKmQLbD2RvkYIi/X7+7pUdUk1WzsHNZha6Nw5t1iwGLFBrNNL\n",
+       "Hz5Zr+yeZChCWlY2ng0IiI2M8RlInFpwkucZo8ht1BjEOkUh06JnZOcOQltKRFzjbw2Vaui1PR80\n",
+       "XRQWZ1Fhkvz1HvLeKRScA26PZerUCK1MhcTdRdhUFYBLQM9zUUDvIqRJDZpM7uEEoRWdayRWa2hv\n",
+       "gn7FUvA5z/xvL64s3DNFAdPJpYUKeggbEVrvdUXjCTo2mWgrUb06dhoeQ5FXCUFTvTgVzbBeCrX9\n",
+       "rc37zdSHviZIWFhM7Lwp9JOyxO5/E/txTYTYW2DupLcWMqwQItYixBz/11cWbrn/m540z7nRFS2y\n",
+       "DUK169JI5GOEwmIhojI8vJVw7zU02hoJ7cZgVM9EbX7DsLQQqjkHBgMkm4jGUZRRRVwSNrD+EmFs\n",
+       "JxH0eWSeFj0fDlzefN0ee/atp1IsZC9+CUJzVPt5N5O2zbAXdC4tjHoFfbSF6DNVEz/+3/z0x+2x\n",
+       "kyM/t+ZSoJgR2iy1MW6pD/jw9tbm6yn0qcZHFu78+GOv8l2JthcL1O4mFCU7/zpnCR0alqeOUoow\n",
+       "2nuPLdx8eenDh9fXtq64d+i+ZsV1bexOkFjwWkjxVwh3PXpo1/j000/9OaD3dnFhIcu8TVixflE9\n",
+       "/bPPjOxPustIlN0rhJYZHnTOudKxeoTtMYOhP19XiqDz2cmi4frkHJE8L/sUqxhoWLZE9E7pIwvQ\n",
+       "R5rE5k4XmlbjgT13Oz2/Trg/awUIavblkSibI7RbyzNhif1Z91OGbTuin9eG+6WP9YF3BbWASAUL\n",
+       "FixYsGDBgt3R3hkiFWeFqxt5g8QL39HA0nqrjX/Pa7ZG2Mydf9M9PzOy5wPUizqRtNou1IhT8dKJ\n",
+       "UvCFdCOEbZMksDdPEnu1XhO9pEPIhaauk9C4g5zg7ZxoVV/q9fF8et4IKcZaa3CFdH0lO7NDC0nh\n",
+       "7aNNnYG9adOLOQY5WOsbmTdh57135j3YzntGzryBevGXX33ZHstAznv64ll77Ae/+iv+tJLWS4Qj\n",
+       "TuwaX3zpz3OLmmuRU8I4FGb7NiYrIFgrSeufTEmoF0TsgEwAazEpAX259F4K75N+PzlAMMwpjyzD\n",
+       "HzOFWWQdupA4UAXyCK6OIoE9IJCcG6qYPxCSMY1t1z7Q+1IvlSjdrZDN6f3OZoacrYCYJd95L/lH\n",
+       "P/ptuZrv/0LQxzyLd87l++j78+TJk/ZYVQNNnVqyw8nYr9lI+v9m5dv3FITVx+8biZp1GlX0uwIi\n",
+       "1qSGnK2oQJ2K/EK7nmz9cW4NRYH69sbPZ6LeuSDdXRDqh0NDxCKMSST7Cj3cjhB2WTOyFESyB3R4\n",
+       "p9YbSMtbIcXTw+4gUaMRRMqhXp4SdlmcTJGWpmECiqRtYz7f3Bpy8u3Xfv09fylp+tf+nhWQRHhz\n",
+       "aZ999QXWq6iYm/yEzfVTJLZcvf62PbaZ+v6/urT9/OjI7/dZbm2fzv39fPLEo4VaMYBIj9bLJMLx\n",
+       "wQcfyPc8mjMUWYleKzWge4L/7UfyPCHZ/uUrjwSdn5jUB+u68V/nnCs3Ddq5ku9VaKet5xKRhdVK\n",
+       "E4oa9MGehTn203Psv7pfLBYT/GvPhDH2c6011wFKqSn83OO0JiJPHcvecXp6irZZO1mz8/ISchki\n",
+       "IXJ25r///LmprbPu6FIkFJggcHRka5fPnUY21AmQ5bEQ0DPWU+0A/VWkH3sn9yHnnBuiAoSS7Xnf\n",
+       "OV/9F3wftSYiZXxUlf5QDV61gEgFCxYsWLBgwYLd0cKLVLBgwYIFCxYs2B3tnYX2XFy6SojdW2g1\n",
+       "Fbnh+PdO/N9U/XbOuV7tIb6H90wx++yI+kjWnQbK29tUChknHu48wBM/aIRUVR+FoRolMxP2U7iT\n",
+       "nyss+3bR4q4Qy7cIRVGTxDlTpRWEtSXIa3FhqjxvNFQHku2TRzZOBbQ1bqFtcizK5hVCITdC2P3w\n",
+       "/Y98O6WvDx56leMjgUJvJh4C/frbr9tjzVvFeJ0zSPXlq5ftsRWg3wYEyCyxazUoPPvll1+0xx7i\n",
+       "+gz7OKfJA9Z9DXPRMpBxF0L2ZgiW33/zxkikfXy2lnGl6rMS0PtIbBgPDIpeLvy9u3pjulgVYOaB\n",
+       "fI+6KA0Uu1WJm3ZIC0uPMQS4FMIs1bY/Lj5uj50j3PLqpYVWNgg39BBmvhXV8eXKz49Yxprzeb0U\n",
+       "YjFg9nNRwCdhV8M9RONzUXbuAoL/7AsfFviVH/5L7WcF+lCIYnGW+fnfNBbG2/SQ2BBbv0i23coe\n",
+       "w9BXUagqvB8zrnEtGs1kk1yKJh+NfR9Pjk1Ha4OQwptLC3f1+3aPaR30Q8MtMcIsuhap8rzGHMpH\n",
+       "tv85h/WhxchZM1loCTH2iUZ9ZXy8EaLuj//inznnnLuVMEaDSgLdDfYwSQ5YlP57/Rd2T548eIQ+\n",
+       "WFiWhWFjUdv+5qkntv/0i5+3x/7wd/8m2r6vbcUxVN2n73AOJsI459zjxz6kfHwsyt4IaS5Ei2iC\n",
+       "MK7uDX/wB3+APraH2jDTCvf1s89+Yf0a+n4NB7b/ZSkL6to55gif305sP5lM/I1SBXImL2lhYpLr\n",
+       "eT/LjTwnt/vnoJ2eGC1m2PXhvqHM0xh7LFXXnXMuwyuAFiEnUV+fcUoad243tEqSvSqhc6/X5985\n",
+       "FPKHYxu7Y+hN/fj//MftMcoM6r7bSXyb+ggLNlJZg0kBel9TPAxWtSaP+RPrc3oB6oFScPjM0p14\n",
+       "KfPokAVEKliwYMGCBQsW7I72zhCpb7752t0/E3VYkO26HXurJDrV6ZtHPGz8m/ZY6n/1QNDdIcdB\n",
+       "2TsTAvAaSATfpnNRMaf3oSrW9FZ36rrhDVe9JCJS+kZM51CV0qnA2pAc6sx4XiWR16W/riINzQZv\n",
+       "+AK/8BenUn+Lsg+FqBhTHiAGCsLzO+dadHAkRNwUBNCN1Lpb3/gxLKVNBVJnf/WDD9tjM9SVWo/M\n",
+       "S+E4LqdCXoan0ev0d77j++i9hcHAvK/1moiMeUhpBiVo8T6WIEhnO8r2qKsmqrzRW8jZaCTzqkdZ\n",
+       "BztvF+jEWvq/BDl2sxL0BSTivqg4Jwm8dCEuLtGfCNefiYoxic0DqeE2xzGt/zWZ7HvaRClHfbs+\n",
+       "BLPdmUhc0NOsII3Rl5Tnck0irCjbY/qlMq83FeskirIwyNi1KHZTlT/JZEyATv3G7/4t/x1JeS5Q\n",
+       "z24g6vgkjy7lWhXboiRupO6XonZMtfueIMELJBtQ7Z+q4s45V6J2nxKGWWVAPXTOoY0cG5z4eZRt\n",
+       "rf8t8VVSqYmOsjacc1b3bAtUOZJ09RxK9K4RYjlS2GNJ//6+ZO33PrQKBL//B/+Kc865P//xP2qP\n",
+       "nQAdI1G/FPmVVllaJGEePHrPX18SS6j8TeK0c5YM9KMfWkJD1qbO6xzz9+nLrz7Duaw3I6Tm35f6\n",
+       "fzSVemCa/laQngFUtHvy7GiA8PzkJ/+sPXbvnieXnwD12XSs/0RYlOxNhOP99y3ZggR4RXNjJJvI\n",
+       "NtUmaPCaztl8oop4T5KSuCcVgioSwVKUiOhLJmgq0exMHvtEthSlmc/9+L94YVVGNqjJd//+/Z12\n",
+       "OOdcBTRnPjHU5vUrj2z/GqQcnHPuwTmiCZG+dlTov5H9iY4uRHanTDFoc0pY2BlGkGzZQe7xzKo3\n",
+       "QjYn1CUyPQn2qefPnlpfK7/uS9ljCk34OGABkQoWLFiwYMGCBbujhRepYMGCBQsWLFiwO9o7C+09\n",
+       "f37lyrlBcVnXQ6A9gRhJ3u2Kxsb9Iw+BVhLaqWoPXyphjDBep2vQMslzDK0oFEo4tdSKskAAh0LE\n",
+       "a3WBBDKO8/330QTwfZ5KcWW8txJ2rNYGO84mU/e2MdynWkRxq1ljY0JY9vHjx+0x9lX1fm7eeLh1\n",
+       "DDhXYU+G9I6lyCT1s16+NHI44VMtLsyx63b3w52vX5oCMuqo7sDthKBJCtW+vn1+55y7AWFUjzHc\n",
+       "sFrtQ/Aa2muJ0hKCcz0q0PfQZyNCU0dEQzvsl4YbnNsngDNEqOGO9XqJdsg8xcfUCtPrs6CmFrQ9\n",
+       "eUt3yreTBVpFgwvjqVpUHFuF5RkOLOL9NcS/EykyzfHcyn06gdp0LNe38bfQwhaE+jy36/cQqnnw\n",
+       "vtcd60j/O2h7I9sUSbGLjZD4oaidyH3aIHnl+lqSNxBa0zXBMeM4qIo2Qyoa9uCeoaGVDdbRtfx2\n",
+       "fM+vI93PCoQeEyHxdkBlUAX2Gve2Ay2yTMJDJJlvdxJ1UDEhFTX/tqiv6l3ho8Ta/of/wr/sP3MW\n",
+       "Kn712q/3bseHVDVkzDV+fm6hKN7jRNSxSTbWNZGiTQ/vW1juUGHusmIRYn+fdL0yYUATRs7PfVio\n",
+       "J2FpjrtSEL596nXu3hddsJuZ30/uPbBECZLtqwPagkxK0DXBdaWJKimqAXwgdIcG907XJDWgdE3S\n",
+       "ONaqhcX1x0Lxeg6lDFhSis0TXv/QtXYLoxc4r1EqXr5CuPVrn/ijFRjOEEZ9JUlELFZ8JiE7Jra8\n",
+       "emZ6U9wf+5JQsu34tl++sPNx7r56+cI5tzv+1FTTvZbPx75QGzhndPy5x91cWaJK1CY8yTMmk/eC\n",
+       "AxYQqWDBggULFixYsDvau5M/qDM3WRqZrECKp2Rfu5fwID943+oQPS48Oe/itdUfik5Qf0dqTRV4\n",
+       "Y+6LLLIRH/2b+Vq8b6pYD4XYPEFap2IP9E5VFbxCqre+EZcg75aSpkmFXiITW2UdwutJxVukxEGR\n",
+       "SA051C47EeToUF0/kmiVqE7Jggwo2VZqqFHW4NtvRYkYnvGNok+4ll6fqrDffGPkRBK/14K+sI7g\n",
+       "WsirRKfoEakHS1RtIp4+UTL9Homli4Upy29Qx1BJ2SWQi1zQBKbs3oDkrarT9D4VkaA3c3Nj12Jf\n",
+       "NV34CjXu1HNiW/TYC9Q1O8V4dofmLc2mvk1xLMhpzbkm6sxANTQjnoiVqvgeUuBv24L5p2r/RLUU\n",
+       "1eOcFaD14Dg9f+aV0kfi/dVAjmupP9cd+gSJfODblAqqcIvzjoaizoz7VQmCw7mjBPQS15jOra/z\n",
+       "Bdedqh37c3M+aR0woq7a/3aNC/qWQc5A0TyiLzNRdqfyeaRzFyTzuaiNFyDl50dAImRc3RbVGRq5\n",
+       "h0j1r2ub60m8r4pPYEWrB/z5n/+Zc85QIOecGwNN5TzRZA9KXCiJ3CoGaLUHIAHS1wpzayvX4jxW\n",
+       "2Zfrt5InFBHjtSai2E/kuJFrXUEpP5dnwr0HvgLGlexns7lfH0peJnLDa2lZU6p+71SgaMnh9qyx\n",
+       "uq7227Y/sk+MsN4VTSayzvWkex3XpM41zn89xt9om3j96UxRWqJvcg18rkr9x0d+j7984+fYRJTt\n",
+       "59inKkHpn2H9L5c2J3/nt3ySwWJpe9L02t+7kcg0EM0rBPWKUVHh5Ut/3ufPrV4hx4nIpHOG0mnk\n",
+       "5Azq9WupisGhPT61RIE+EiAauSe1C4hUsGDBggULFizY/yv2zhCp3nZXrC8Dz2UjbjW9yliaWUX+\n",
+       "N9e3Eo+GB3f/1N5IE9Zu0pJ08MQofqmVoU0SQcTyJObftgmohoqFLVinS9KU6Tmo50KUgMfI7XHO\n",
+       "uS7i1quVeRXNiiiNnYOejv6WMXyNm19DdDMSfg2F/linKM/N+2fbpoJgrOCRas25ycx/nsj1Wc9r\n",
+       "x0tCH2U4W09IPfwogTeNG6V8kA7GJJUK6ps1K60bR6JNhe4KSoC+ako0azwpmnN15ceJHm5RmIQE\n",
+       "PXGtubSAtziTY3O0RVOYOXbqTVOSQ9tUAM1grUEVEKSTqPefIq5ap3G5ECFWmHG5BBE4gPq1khRA\n",
+       "MI9FQmMFBGEky8DqVJZ7xxTh7UOyQZHj0cjzoJKddeXHJwXCUQiq+hc//UvnnHO/9Vu/2R5ra13K\n",
+       "2mVbJuJps9I70WfnnEuxFraSfm18ST9frm9sX/nkk0/wmXwf51BR2XvwhE/ObOyItGr6O2scrkS4\n",
+       "8D3Uh6tkoSwnfh33TzwiFam/C0RsK/tPlGCtpcb9OWi4UW9evWgPpYkfO+WSbkug+UATlfvy/vte\n",
+       "6kBFZbn/TKe2d7Du2Vb2nxnm6XQm/B7wNXdElzG3iQxVpSII++gvTYWTK1y/L2jap5/8yDnn3P/9\n",
+       "U5M6mIKbqjVZb258PzYbvyYfiagx90zdQ8ghVVSLPCRFkzeUH5B1Qv5TJXXiiCId4jkRfVHeFNc1\n",
+       "+aO+7f4aunfxuTMTftnx0RjX35dzuL2183F9PDz3Y1Gkdr+eQjqg3toAjFBDMZGox+eff+77Vdt+\n",
+       "3sn8HPv2qckPnAD1U+RuBgFW7qc//OGP9tqm6BstOqC+rTxcStvEEjlifdhC9thudx/hVQuIVLBg\n",
+       "wYIFCxYs2B0tvEgFCxYsWLBgwYLd0d5ZaO+oG7taQjy9MUJbQg6tHdTOC4MRCWkvhfuVZR4qvn9q\n",
+       "Kawk1tZbg0xXa9R1Y80plUsAVLtQIh7g1oXAniuEwJTETIJurAg8/qNhrCRhGMHDyCeiZtxpQ3ui\n",
+       "WA2i6kpI+SuELKYTgyx7gDQVxSQBXJXVCRGTKHgrdfV4LJYQA+vJLYSczH5puiyJf5u13RSSgdNs\n",
+       "v67ZoTpxtEPyB6dSh4yhL5IJ/THfPpUEYP23i9eWvbBCfbjTM5snw8ER+uXHaSA10jg/riVkynTy\n",
+       "RMI9lJ2YzZRY7OefhiCOoSi+ldgew1Ir9OHlpaThvpWa75yRw28PEPA13MDfKtmc5GH9Hv8egPSq\n",
+       "48/+aB/6IKCuVjYnmDyhyt5UQH751EihVE9fr63/bGevtx8yGQ39vZmLwjHnzkpCoAxBLkT+4gqh\n",
+       "bQ3Z1LhliYSbOJ4kFh91LF2aIY5S5vo1QrCzmY0r57CGWxg+11Tz6YzhYwlLMbQq5zvnfI+x70Wa\n",
+       "lIIKCMpZaBiyFEkWt2/cHpYSWmQ4TEm53Dv6mMMkWDtn82l3XyFR3/Zpzms9RlrE6enHcsx/T9fO\n",
+       "eMT77tsWSQ1V7iu6rzL0diuE/Rl+G8v3XoIA/fK1rTGe5wnU2Z1z7tNPf+Ccs8Qb3a/OsHfoPeQ9\n",
+       "1j5QuiGRWq+UlhnIPOGaaXb2hM3O+TSFn3uoPru4rnVMTLJl/7xdCbe9+M6Pic7dDZ6ZvZ4d+/LL\n",
+       "z3F9yLRI2OsIz926sfO2fZXvpQhBX0iomEvr137Nwvcxkqs2Eu7Mb/25nzx5f6cvzol0UanH9hOw\n",
+       "2hq3krxxAjqQUip4T6YrO9Y0+88ltYBIBQsWLFiwYMGC3dHeGSJ1/N6Zy4byBl1QVNHeoJmmuBFy\n",
+       "6rJk/Sf7XgVBPCV2b/DmXm0M4Xm7Jp6moc/xRnot6MsSHnQq6cJMl1YPgm+9mpLNNOpd4czezvdi\n",
+       "8ep4ulS8iiEIux2pF8i3av1eATGz169N/PLjjz1Rdi3IwTVIlEdAc779zqQODomVUUBzLojYKVGV\n",
+       "raJP9L6FWL2CcKiQWOnNKXJED5c1n5Qw+eKFJ8UOxDN6CJmGuXgQvI+KkjF5QQn1nCfPBCWht7dG\n",
+       "6vxyuV9D7VxI5CQeK3I4JTok8hf8HtPFnXNu2yZS6HyiN+XnRJsk4Zyboiahpisfoe6cVn+nEOZU\n",
+       "RF2JmB1JnTp6uM1OPUOQl3lM+kBCva6TDqQjyqUiCL4tMyH7soaVyglMgRh0+3ZPmEbPdXX5xtCC\n",
+       "9973SN+3334n3/fz/uqNIY231378n8p8JsKiXaXshSZqMAGFBHglUZeYL7quy9rPj8dPHsv3QFhV\n",
+       "QUDMe03UYIKKCuey313x3EmybYgqNOZpt3NHzltjT0x3hIH3Sbbcsx49fNAe++P/AZIlssc+xOcd\n",
+       "tKkQD/5tIrT2oRadAO4Puk8en3hki+nlzhnqtNbaiZW/aRTf1CSeFeRcColScH7oOd57D6T4vq2T\n",
+       "Lz/3tfs++dgQMQqrptG+cO9HH/nvxZGKP/pxWq8kUQkCjpu1ocQd1Ik8PTc0j2srFYmV9WI/oYdR\n",
+       "jy6kQBYH9rpDKLXKmtQHxET5+UQkOdjXRq7BPflIUP8jyB/0uhTJNZuDCK5IG/dVRZPbGSaSFMet\n",
+       "JI/NMd7v2cwI6GfnHuGeIoqi45+137f9p9MKfNo+bXumCqz6tivCylq0p0fW/7Wg3YcsIFLBggUL\n",
+       "FixYsGB3tPAiFSxYsGDBggULdkd7d2TzD09cJqS39Rp6QomExxiKawxaJRSZKhQIaFO1ndZtXT27\n",
+       "Zg8aPITq37yxmlusl6Uhmw4g7Uz0KQ7V6TPy6r4qtxIACSkSlo221q+vUMPo3n2D3TeAE5WwSih6\n",
+       "KeE2hrs0LPfFL/z5ZjMLld174Il1E6r5ythEIKwqZJ/iWrFA0fxRmu6HsTS0xjaPj/bHTuFmai/x\n",
+       "+1SJds654dD/No1EiRyhgC++/LI9RuJ5Lu0sQJ7uD0UVGm0v5xYCIAT+ArWWUgkF3X/g4eTzexae\n",
+       "I6FxPrcwWrcPbZexQdtxGzISHR1oVTHs4JxzNfRLloD4jwQed9RR0zAyQls6J/jbvtQ6TDGOqqMS\n",
+       "IbSoYQHqzFDHKxHdGwobX0odqsUS35d5wv4o2Xa58N+bL22cSGI9caLKj1AqVYSntxaeSBPf19fX\n",
+       "Fsar5/66L0Sf6s2V//wXP/tZe6ytayjJJgP0uyM1+cqaGmggrM8tZFRgPGutVwb15kHX7tPXb772\n",
+       "7S1s7NIl5rqExXgXd7S9QB8YSBhhtfHjOEpwTEJWDcjLkYSAoxrh2Y2NdQw9tEhFyxxClTMLQTHc\n",
+       "VnRtTKhpVmKPUT2fVasFZ31lsksmYdwx1kK1ldqNOJ8SlW1PthA0R4rJLqmEXT5BuC2V6gSsIlDI\n",
+       "84RciS+/tnDvPeyTqrZ9/9TvP6qB9fNf+L2F4SmtIcr9XFW0Fwhz37tv+wTDY6RnOGcEcCVKs2ad\n",
+       "hu9IfeDYaG3Iltohzx+GR3VeWRKF3RP+5r5QFRjuUv02PkeVKmPPGz/XNAFhPoc6e21zqJ+DliLJ\n",
+       "HvO579dclOJfXvrw3eWlhe/X0Bv7yU/+oj320Se/6pxz7h6I7Su5hwXoAXku9fqwTl+9tH3iNfYM\n",
+       "bfv/9mf/k3POuUeP7LnLcKN+L46+/1UpIFLBggULFixYsGB3tHeGSKVxx5ULeTOHR9rU0iR4i/1Y\n",
+       "yKnwOrZS14dvzoomkVg6kGrys5n/fA5ERj2TLby64UjVTIFI7aBK3qIDle6VWH6oqjm9BBKqx6K6\n",
+       "e4S336fPn7XH7p2wrpV5WqZAfdweI7L24oWRqEls7PTtLZ31AZnOrfWVkthfQ5XVcwxx0tj79gZk\n",
+       "T1VRZ7+VWMz0581rI+nRc8pyRfOSnc/Wcn0S4IlWOmdEzXsPTG04w/jUQgqeAelIhGxOQmda271r\n",
+       "Yn/uj554henLS/NgKDGgshIDeJh6T+j0HwmqQLK7qn2TZKxzoo/5mcGr6ktl+vv3qTpuKM1rtE+P\n",
+       "DUfe+1dEaAhErpFKAaenfs6UpZKC/d+sr0bitHPObZDyq3X1vvnGe463UuuMddoUuTlFCntXaud9\n",
+       "feW9z8ePrK5V68thjJXYPcU6nU+svTcXHh17+tzqOn7+lUdfhRPcevNKok6IoigBHQhfP/djOBCk\n",
+       "bzb3fby5NUQgy/x9f3NtxxwQkytZEwnmhPafSMNS0IcbrN1TkW65uPWfJ4nvfybob5RgXouy9PwK\n",
+       "dSJz+173DPteZt+rlx65W5ZGyv3NX/8N55xzP/6Lf9oee/zIz1Ny5xWtSECsVkSkB+iyHXPn3ApJ\n",
+       "PsuZoWTTKaRbpCbmaOj3QF1PXB99nC+SsAJRL/0+EwRUEoTI+oPHtk/0kACgCuj8ngJ3v//7v++c\n",
+       "s/qbXF/OmVK/ojVs7xtBjp5grWsNwQ0QQ1XgJoqk/WE/uBb0WaOJOjTuoXpeHlOUinN3p7IEzr2Q\n",
+       "MaEUgu4xHONDZHe2aSUo9fWNn2tZbtfaVH4sbi8tKSpGndAbkWk5RoLM7/z2b7fHWIM0Y5QqsblW\n",
+       "V6yNaM8fjjsRX+ecO0PVBk2eefTwEfpg31ujesbP/vLn7bFM+nvIAiIVLFiwYMGCBQt2RwsvUsGC\n",
+       "BQsWLFiwYHe0dxbau3zx2qUrg0cJH1dbIdFdergvPhIS8ZGH+GqBOEne05AJCbhbVUAH2W8O9fSe\n",
+       "kB5JfM+ExEldmDwzWI9aQEoYJAFZYXzCh0refgniG8nWIyEWzyceiuwIYY6EYhJh/TVAgBVo+erq\n",
+       "9V7/WXD08trCCFUNbS2QV++DfOmcczfQ4lHNqi5g9Kaj/d9XbGZbYiEWrqEiv5gb3M7xzoTsWyO8\n",
+       "SNVxDc9mmR+L+drCAxwz1VGifshECnRS7TgWsveSSsmaKIDQXgUy4UMpUGpK7DKvAHcr3M4wh97r\n",
+       "AuHLsYSKqcDOkIFzznXf6711Pg1ZUx/NzsuQ7tOnRs6MIpDSJQRC9Lrf1+KyCPd2bPxjd7ZzraMT\n",
+       "I/2uKj/vxkOb1w8eeKLq559/0R6j8vvnX37WHnvV86HCrYRl2bfJrYURGoSNN6geUMU2rxOqswsB\n",
+       "/eaN1xZTyJ7q5V0JwVArSonSHSQvOCG7MhzYzsmtFMjFXH/x0pSYmUSg5NM11r9WUejjutsL6/+r\n",
+       "C79OVVmZFIRtY7+dg2xbbf33NRRRYC2qttEXX/ow51Y69v6Hvk0PP/pBe2zDkFpla4yE6r/9L/7t\n",
+       "9hiLJM+QMKAFqjuF31dOpLg19eZS0dEqsSecnd5vj33yCTTonNkVQrU7Y4JwZIk9djQyesYzUB8S\n",
+       "2UNIxtaQVXfo+/UEIXvnnPvq81/4zyRUeYziug8fSYUF7PExtKVKGS+SzJUAzlCUVoDgXkz9Jeds\n",
+       "n3h9sV+9QEOlbTISPtNQJD/T0Bq12HT/OTvz7dRw1/JQIXXscRoy5Hl2VOlZgWGL56o8WDlf9NnJ\n",
+       "NjdKWEeYV+/nEGN3JNpepO00shdO8cyeYu/UQtbTqT92Jtpp7MNCwsjPoGz/sVzrww/93zpOVLTn\n",
+       "GDq3mwR1yAIiFSxYsGDBggULdkd7Z4jUm6+e76jJUp07Em95Xvq35Y6k9R6XHolW4Er0AAAVaUlE\n",
+       "QVRYC0ozJ/pTma/Tg8em8gd8YyYRty9EdHoQlSBNZ+eeMLhZ2dsv09UV/Wk/E/I2UaTJxMiGfOsn\n",
+       "me/m1j4jmvBAlLDnIA8rYXACku8hD2YltYGYarsWpe4cXvoERLxIlNCJVqiKcNHzb+mqrE1PQL0/\n",
+       "tl2RM3KcG/E/Hz323ulE+t3t+rbTg9F+Ueohtq+7Y0gdrMUjmYOUrvWieJ6ppHr34PVW4pH1gcBR\n",
+       "vXm9sfYyJfr65k17LALxV0nJQxAxte30JlXtOQLqmYra+1fffO2vBVXy83vmBZEIfXZPa5357z0R\n",
+       "Ze0LeLipsMKJmKr3Sa8/EeRwGft7xrn45ImlAadL1MHr2PVXQI4e3fvD9hjzPp69sEQJ1nG8vrU5\n",
+       "ydqVz1/Z93LM0xRrsxLW77NnHn16+MDaRA+2Eo94MGYNQxvrGGPx0Ycftsc2Sz9n1JtdsE7gBnIJ\n",
+       "pdSQQ1OuRX5hCc84kq1zOvXJIyo/UkLtv5a9I0n8vfg2sqSQFPeHc9g5575Dyv4p0unrcj+tXdPl\n",
+       "VyDKagLOEmrTw77KBPh2jnqCnLPKgzwJstjfkyzxe+1C9hDKVBBxds7QvELkP47iY/zW1t8hojZR\n",
+       "kgtBaXpAQsZIvPj5V1+3n62BumlSxuP7PnnhZmpSLwmGrJTKDkTzLi/tfg5Adk+lnirXEQMRcWLr\n",
+       "le3U/f/Npd8fBJBz//Sf/BPnnHPdgdZu9H+fn5v8AEnRinC/eOERUEqnPJD5f3JC6RCbE0z713qJ\n",
+       "PxMpENqnn37qnNtFrohI6T7Btmg05xgI5MWlb9tEqigQuUu7tq8kkALpSZ3A6crPmZ4mCqBqxWJq\n",
+       "8+n/ae9aYuO8rvN3ho8ZDofiQ5T40IukRUehZcduUqNwGiToInAWaZpN2gItsiiKAn2iqzabtstu\n",
+       "WnRRtJu6RVAUKQIUCbIpmrQo0rdVG3IcRVZsw6Le4lMkxfeQc7u458z5RiQdgbU4JnO+jUZ3hv9/\n",
+       "3//9z/nOd2x+WGAT4Grvd2/nvePeXQ8KqqlFfnXVf39e5RJGRt0ieetm/tvbN+7Vy9q1zqzKbkET\n",
+       "Z894UMxuz3tGWKQCgUAgEAgE9ok4SAUCgUAgEAjsE5IalG8P6KYi6bmLvajV2J6c/2lpkGvI5stz\n",
+       "QyP1kjM9WUU5kbTTib5sKjXdJcDddmVKkGo6R2YW340w3E2JGg1LpJlj/cVuPHNzsQtue3tTf+/X\n",
+       "Md0qc4Xx/Y283U5aTKbszCrCG2rubCE7srkFWAG4qu6AGpFy20v5e9M22m7QuMp1WqfkjGbuZTfe\n",
+       "prn2yI1lZs87d9xlM6jm9mKJiLLqqjPNKsCVoqWwk3S5rq7ChQXXGDG12XUiwFr9hKTabSxYbXhT\n",
+       "tW1aC6yLZYTWPO4LZLJe37CgBJ9D09PZtL9BbtSkbhxObmwuGCbRmouU3VILc3luPVC3y7mRs/Xv\n",
+       "LlzI5mkmmz5UwmaxQcclt2FycrJeZvO4o8PJ7kV1YwqROOdmsjtgeia7Oy4++3T9O9Od4Xlqis1G\n",
+       "cM3IfV2luWPtF0pQ2q6Ezfk5d8FY/czFk8TrZuZ7dnudPpXn1QaRws2PzElr797KZHwOSrAMAMs0\n",
+       "xjC3+JoGR6x6G7bUFWnzAAAeasLX2pbXc2omuzt4/ZtWV0fJ+//MYL6/UBt7j2dXTbFIwRvatuqW\n",
+       "zs0ub4MpO7O20rHu/P0IuTFblbBdof3P9p31FcoAofcoEFFddDwtqXqBNuUeDXawROUA6psc65gZ\n",
+       "OXdri6nlGRyoY/pE169fr5fV1666k6q7aPHNLboby1z6HADznl5vZMRdO6aozq49S9A8M+MBBbbf\n",
+       "dGm/87q+9OqlhjoCrg6/Tm7E8fG8jqbvu8tyq6Z7NwXAdKmLknWczN1m7eH15y44n0NGqG6lcbLk\n",
+       "66xjZa7nFqIA2L57vM+zDUwr5YPbaPctaP8/pMTHo+cyYfu//vvf6mXPXHwWAPA2uRiH1UW5RCry\n",
+       "tsZqtCYsGGdlzfuk05Il1/XZiLCua3x40AMb7Dly5Qeujj5ybmRHu0yVnonttk7YBQ0R/MznfxnJ\n",
+       "ODCPICxSgUAgEAgEAvtE08jmGxvb2OZcb/r200mWFiMsb9NbjZ2mh087EcyUdVl+oFPfdIuU/8rD\n",
+       "2fP1mBxq4JxLZnVqJ3VWqZMtWX4hf2a1cQvJ7O72cPKpKSfI8bUAJqC79auzW8PqKazaQmxrpABt\n",
+       "lpgCneqNeL617W9/5Up+c7K35LV1UvjVt76ODh8UU9g1RW4A6NBXgkZ1Xn2roJN+ScNJSx3+lmQE\n",
+       "eFbPNlVse4PgUG8LNWein70Zzc77G+m5c+cavgOAguaM29pmomz9y3qZ9bupCLe0eh8er/TsuK5Z\n",
+       "B6cp11uL1pP7xN7YurqoTqZ2/tCtWYPDeUwKLblPlpa8XaYyb3IduZ65rFRiEnm5oS1cZ5ZOWFhQ\n",
+       "+Qc45pWgOjg0oG3wNeGyFqQsrda/u3ddfsHCiVsoRNjmNhNLTan+2DGS+KjoZ8ntaiXFbgzV9FpO\n",
+       "tr17NxO1h8+SOrqu04War53Bk7k9HUTsN+mGGVI7FyO7HzfCun9XqAcM+HxdV6vXOlm/zDrY2+vW\n",
+       "7AcL2epVo79d1IwGnZWdEicNb8lqMS/pvsbj2qfZEM4QEbakef/WiTBdEF0ztHYsP107zZ3ezjzH\n",
+       "E+2xZvW2lSC0Ji3UvUhWjRXNtsAh5EZoJv47Hqpln9eTkaJ5jzF5lm19ANyddmvRSy+9BADomHXr\n",
+       "k1liWVn/ZH8ea+tzAEg1zedIeTKnv2eSNN7+wYFcFyORV8giODExAcCtIADw5pW3AABnznlY/f8q\n",
+       "2bxC+99HL+S/PX7cLVz3p/J8FiG1d732229nORG2tA0M5HnNVjV7Fn38J36yXrawkPcftrDWtP0r\n",
+       "1CdjoyMA3AoFuJwP56ns7ct1mp7KfdJGkkDmseG9486NHDDx1tWr3tbpXOfjRLbvUGs/7/uj5/P3\n",
+       "/NyzoBTbEyud3v8P5vJ1eV4Z8f40zatlteb3cw49PW90dXvdbf0lzl6yHWTzQCAQCAQCgSeCOEgF\n",
+       "AoFAIBAI7BNNI5s//dxxpJqbx0VdMYUC1yd//sjwWL1kYuJ5AMDogJMITR+ko+ymZSM2s7aKmUC7\n",
+       "K2YK9/ubaZfdGGaqZteS6We0ErHZCNALZDKtqKYKuwVXVrLJ0NxYTCK2+jIRs0V/x2VGFGaXgbn0\n",
+       "mGxtOlsFSnja35+JqkYovzHp7pnxj4zrvVgxNpuHG5JhKhG1SO2y/lmnZLibG7l+UnCTubnqykTA\n",
+       "XtUxSfpdC/V/qZjNvg8W3d1qmirbNE3qJl3xPjGya7XqZZ2qH8Zk4xZ189hYc0JfcxU/JMV0Wy9s\n",
+       "iq4nXCZ3b7HViNo7ie2slGxtrJvTKYjBNGPYPbythFXmPLYUzN3m5nlzVbIL8L4qdDNRc34+m+p7\n",
+       "+/McfvopX1cWvMAuEyOHtnEmYzWBb9KctG2lRmvM2l+i8V9SF7j19eqKzz9zY26s+Zisq7ZcH5Fj\n",
+       "jeS88IBI7KpFJEQV6FYNqkQug6ruD0V1VXD/13StbZIbr6Qk+35KMgzduzi5dVVd6jVaT0bUZaK4\n",
+       "7QlC42lzsUfHvYGCkCw4g1TsVWW92OUuoPu3JgEAJ044AbdHydttPV73lvXc/8yrTbbf7VJfS7jM\n",
+       "AQvtuk+xjo9RC1pZn0ivwzpGdVdh0V26q6r91qoJxx8suIq4uUC3aF1vKPHdXPwAsGR7MU3T2dlc\n",
+       "dmFivF72xuXsgqtS8MAzF3MiZ6lTS3y+dGkfz5O2nLlxR0b9OXVzMiv/pyrRLdRlOjbmLsDtlOdJ\n",
+       "kVxl6xqUNKPuOw7isfYPDnpWCpsfTPYfVBcgJ6E32kgH7Qmmy1SgdVKu5H5nt2hNN1zLCsGJp8dU\n",
+       "HZwzW2ypm5ldgJZIvpVU6c+cycFjNabv6JjduPEe3T/PGVuLvCYqmtFgjbJiGHjvNrqB3RNwagMH\n",
+       "5dhSfOcdz9QwNjaKj//054NsHggEAoFAIPBBo2lk8y2poq3o5zh76UlkalhdySfYgRNOGDs/mMPD\n",
+       "O3v8BGlvzkzYtM+rK07stTdhC6deJGK5WV2YMG4kU5Mt4LIyvf3NPshvTMtkYeqo5OuxsrlZndaU\n",
+       "nMlEYHtJ49DYJc1h1kFkQ8v1tEikdMv1tURvJBVVZ+3pcQKsnc7NmtJHpEcLZ+Y8cGbpaSCR63WN\n",
+       "YMrY3GBrTm7/9KyTsttVbZ6tTkWVe2hTYm25uDNf4SaFWlv/V8lyYBax1rYClalFjN4fVpd3KqAv\n",
+       "Ly/pv8t6T18SRjpksrHdi5WlrZ9YOmNBQ/y5rKpvU2xNLD7Sx4tENjclYlP9B4DNam7jGqlN26tz\n",
+       "w5u+mFK9vxGaRZKGE32aC8zIs9sNCr7S8A8AlJS8zW9g9bc6IptaTsoy5fozwyZLbJR1ntgbcank\n",
+       "V06qcbK+4n1yUsPVZylwo7NsZHvfE8zSw/ITPo7eoJKS5428y9ay0xrQwvPfAlZ6+3xcFxfzumLr\n",
+       "5+a2ElaprLahVtdWHxMjRVeo72ycbP4vLHj7zRLH+T9nlCg8TzkB76h0xNmzLmdR7s5Wim0ipW/W\n",
+       "cvuXl5y83KV7lwWRSI1lYnKdGnK4aZ/w+EubEpuXaU/UoIhOUrs2UvA2yVlcvZZD1sfO5/nP6++4\n",
+       "ykUcO+ZW2tn5vIbXN32fXlzLY/LUU+frZaPj+Xo3brxbL3vm4kUAbkEHgOmp3J+2J5RIkmZuMbef\n",
+       "pQ5aChpYU/L299oYkkXaSNkLJN1ga3aLLOGmrG2SLHNzbpEzSwyTzc3DwcFW1354BQDQ3+8WSctx\n",
+       "x3I2t+9nlW9WT+/uyHtCT9EJ3X29ud9XlvJ4zl25Uv/uwXye688+O1EvW1ZCf4n2rm4lbG+QnIj1\n",
+       "YzftyXdu385t7fZn8dxMHpNWfT7cuefW9xG1XLaR1Iqt+8FhD8qYVVJ6W8nn38mh/LuZWZepsLU4\n",
+       "NOR5V1k1fjc0zSK1tvz+LPjA0cG1dyabXYXAAeH1y1d/9I8CRwLf/Y//aXYVAgeES69dbnYVPtSI\n",
+       "g1TgieOH7042uwqBA8Lrb8RB6scF3/3PV5tdhcAB4dLrbzS7Ch9qNM21197ehkq3m+JNWXf1oR+w\n",
+       "JsazqXBs2MlhnUrALMBNsavqZqoSsdNIwQ1meSM2q6nY3FSAu2xYHdlccOWy19NwnVSkzR1YInO3\n",
+       "ud7aSFtns2qJeXNZe9HN82baZcJya5spm7sp1hSgWceooiTSjrKbR1dWzW3prtI6oVfdUv3s2lPS\n",
+       "bU+Pk3iNAM1k9zZtoxBhsKify6Uu+tvcnv7+EyiXO9HffwIlVWrv6fXfrRkpUc3+rPBbbt/Z71b3\n",
+       "At3f+ieRC2JJifLHju3UVuKAAla+BhoJmxZEwAEZ5pZixeCSEps5sKHzVK57Q/CAuiP62twtsal9\n",
+       "fLo/m6D7T3jSYhvjnh7/fUJV28DJdXNZR5nKdHmwVpnVb3HJibKf+MSLuT2aXLdNyO+nRHymV9aT\n",
+       "IROxtFPdDR3lMtrb29DZWa6TzdvJjeWuR69nS4vpw+T+LNKasOCN5VXv656U5+f6mrsHzFXK7m5z\n",
+       "y3FQgGml8Z5g6+3kgOlO+Z5gxOqhIXePzM7nNkySe0iSJqOmwI6i6jOxu/GUaj8xsdfc3K20xm2e\n",
+       "2p7U10e6Nzqe98iNt6EurRXqk+cuPpfr1ub3Wt/M/dhWIG0pva8FGwDuSku6n7KKvmU22Ja8s2wL\n",
+       "UFJdrCol/G7RVBXlCq1h/Xp21uefuS0r9LtRVfdvtb1zbWfA0NR93//eu55JyWMX3I3Zbi4gImoP\n",
+       "q/eqRlSBh+p6XCYKhtE8LPHt6po/V9Y21N1PQRSDJ7MLqEyE5V4l9G9tEAWgdWemCuuzB/PeJwMD\n",
+       "+Xr2POF92vaE8XEnzNt4Ne5ledxv3bxdL/nUp/Pz9Obtm369eXV30bPrWFd+njFVALVtYKuKDdUe\n",
+       "HBr2Oj3/sY8BAGZm3N1o7kmmERhthQOaLIFzCwUK9fbltTM/59SbSlde99Pq4uvu9efUptI8TlEW\n",
+       "i/ozgfb6IT1HlOm5n4wiQnu87Q/87DAKyF4IsnkgEAgEAoHAPtE0+YMDv2kgEAgEAoHAPrGX/EFT\n",
+       "DlKBQCAQCAQCRwHh2gsEAoFAIBDYJ+IgFQgEAoFAILBPNOUgJSIvi8g1EXlHRH6vGXUIPBmIyKSI\n",
+       "vCkil0Xkkpb1ich3RORtEfm2iPT8qOsEPnwQkb8WkSkR+T6V7Tm2IvIVXePXROSzzal1YD/YY6z/\n",
+       "SERu69q+LCKfo+9irA8pROSMiPyriPxARK6IyG9reaztx8SBH6REpAXAnwN4GcAEgF8UkY8edD0C\n",
+       "TwwJwGdSSi+klF7Ust8H8J2U0tMA/kX/Hzh8+BvkdcvYdWxFZALAzyOv8ZcB/IVw4sHAhx27jXUC\n",
+       "8Ke6tl9IKf0jEGN9BFAF8LsppWcA/BSA39Bncqztx0QzGv8igHdTSpMppSqAvwfwhSbUI/Dk8Ghk\n",
+       "w88C+Kp+/iqAnzvY6gQ+CKSU/h3Ao7kS9hrbLwD4WkqpmlKaBPAu8toPHALsMdbAzrUNxFgfaqSU\n",
+       "7qeU3tDPywDeAnAKsbYfG804SJ0CcIv+f1vLAkcDCcA/i8hrIvKrWjaQUrLkSFMABnb/08AhxF5j\n",
+       "O4y8tg2xzo8GfktEvicir5CrJ8b6iEBERgC8AOBVxNp+bDTjIBV6C0cbn0wpvQDgc8gm4k/xlynr\n",
+       "bcQcOIJ4jLGNcT/c+EsAowCeB3APwJ+8z29jrA8ZRKQC4B8A/E5K6SF/F2v7/dGMg9QdAGfo/2fQ\n",
+       "eLoNHGKklO7pvzMAvoFs8p0SkUEAEJEhANN7XyFwyLDX2D66zk9rWeCQIqU0nRQA/gruzomxPuQQ\n",
+       "kTbkQ9TfppS+qcWxth8TzThIvQZgXERGRKQdmbT2rSbUI/ABQ0TKItKlnzsBfBbA95HH98v6sy8D\n",
+       "+ObuVwgcQuw1tt8C8Asi0i4iowDGAVxqQv0CHxD0YWr4IvLaBmKsDzUkJ+R8BcDVlNKf0Vexth8T\n",
+       "B560OKW0JSK/CeCfALQAeCWl9NZB1yPwRDAA4BuaKLcVwN+llL4tIq8B+LqI/AqASQBfal4VA/uF\n",
+       "iHwNwKcB9IvILQB/AOCPscvYppSuisjXAVwFsAXg11OkUTg02GWs/xDAZ0TkeWQ3znUAvwbEWB8B\n",
+       "fBLALwF4U0Qua9lXEGv7sREpYgKBQCAQCAT2iR9r7YdAIBAIBAKB/w/iIBUIBAKBQCCwT8RBKhAI\n",
+       "BAKBQGCfiINUIBAIBAKBwD4RB6lAIBAIBAKBfSIOUoFAIBAIBAL7RBykAoFAIBAIBPaJOEgFAoFA\n",
+       "IBAI7BP/B0EEnTIvM42+AAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f66a46f8a10>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "plt.imshow(transformer.deprocess('data', net.blobs['data'].data[0]))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Adorable, but was our classification correct?"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "['n02123045 tabby, tabby cat' 'n02123159 tiger cat'\n",
+      " 'n02124075 Egyptian cat' 'n02119022 red fox, Vulpes vulpes'\n",
+      " 'n02127052 lynx, catamount']\n"
+     ]
+    }
+   ],
+   "source": [
+    "# load labels\n",
+    "imagenet_labels_filename = caffe_root + 'data/ilsvrc12/synset_words.txt'\n",
+    "try:\n",
+    "    labels = np.loadtxt(imagenet_labels_filename, str, delimiter='\\t')\n",
+    "except:\n",
+    "    !../data/ilsvrc12/get_ilsvrc_aux.sh\n",
+    "    labels = np.loadtxt(imagenet_labels_filename, str, delimiter='\\t')\n",
+    "\n",
+    "# sort top k predictions from softmax output\n",
+    "top_k = net.blobs['prob'].data[0].flatten().argsort()[-1:-6:-1]\n",
+    "print labels[top_k]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Indeed! But how long did it take?"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "1 loops, best of 3: 7.14 s per loop\n"
+     ]
+    }
+   ],
+   "source": [
+    "# CPU mode\n",
+    "net.forward()  # call once for allocation\n",
+    "%timeit net.forward()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "That's a while, even for a batch size of 50 images. Let's switch to GPU mode."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "10 loops, best of 3: 90.9 ms per loop\n"
+     ]
+    }
+   ],
+   "source": [
+    "# GPU mode\n",
+    "caffe.set_device(0)\n",
+    "caffe.set_mode_gpu()\n",
+    "net.forward()  # call once for allocation\n",
+    "%timeit net.forward()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Much better. Now let's look at the net in more detail.\n",
+    "\n",
+    "First, the layer features and their shapes (1 is the batch size, corresponding to the single input image in this example)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 25,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "[('data', (50, 3, 227, 227)),\n",
+       " ('conv1', (50, 96, 55, 55)),\n",
+       " ('pool1', (50, 96, 27, 27)),\n",
+       " ('norm1', (50, 96, 27, 27)),\n",
+       " ('conv2', (50, 256, 27, 27)),\n",
+       " ('pool2', (50, 256, 13, 13)),\n",
+       " ('norm2', (50, 256, 13, 13)),\n",
+       " ('conv3', (50, 384, 13, 13)),\n",
+       " ('conv4', (50, 384, 13, 13)),\n",
+       " ('conv5', (50, 256, 13, 13)),\n",
+       " ('pool5', (50, 256, 6, 6)),\n",
+       " ('fc6', (50, 4096)),\n",
+       " ('fc7', (50, 4096)),\n",
+       " ('fc8', (50, 1000)),\n",
+       " ('prob', (50, 1000))]"
+      ]
+     },
+     "execution_count": 25,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "[(k, v.data.shape) for k, v in net.blobs.items()]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The parameters and their shapes. The parameters are `net.params['name'][0]` while biases are `net.params['name'][1]`."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 26,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "[('conv1', (96, 3, 11, 11)),\n",
+       " ('conv2', (256, 48, 5, 5)),\n",
+       " ('conv3', (384, 256, 3, 3)),\n",
+       " ('conv4', (384, 192, 3, 3)),\n",
+       " ('conv5', (256, 192, 3, 3)),\n",
+       " ('fc6', (4096, 9216)),\n",
+       " ('fc7', (4096, 4096)),\n",
+       " ('fc8', (1000, 4096))]"
+      ]
+     },
+     "execution_count": 26,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "[(k, v[0].data.shape) for k, v in net.params.items()]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Helper functions for visualization"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 27,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "# take an array of shape (n, height, width) or (n, height, width, channels)\n",
+    "# and visualize each (height, width) thing in a grid of size approx. sqrt(n) by sqrt(n)\n",
+    "def vis_square(data, padsize=1, padval=0):\n",
+    "    data -= data.min()\n",
+    "    data /= data.max()\n",
+    "    \n",
+    "    # force the number of filters to be square\n",
+    "    n = int(np.ceil(np.sqrt(data.shape[0])))\n",
+    "    padding = ((0, n ** 2 - data.shape[0]), (0, padsize), (0, padsize)) + ((0, 0),) * (data.ndim - 3)\n",
+    "    data = np.pad(data, padding, mode='constant', constant_values=(padval, padval))\n",
+    "    \n",
+    "    # tile the filters into an image\n",
+    "    data = data.reshape((n, n) + data.shape[1:]).transpose((0, 2, 1, 3) + tuple(range(4, data.ndim + 1)))\n",
+    "    data = data.reshape((n * data.shape[1], n * data.shape[3]) + data.shape[4:])\n",
+    "    \n",
+    "    plt.imshow(data)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The input image"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The first layer filters, `conv1`"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 28,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlEAAAJNCAYAAAARaCA+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvXm0Ldld3/er6Qz33PneN8/9ul/PaqEJi5aEwBIoBBOS\n",
+       "GDteduKV2Am2sY0hSN0tqVFLtFoSYBniZHklXthh4diEtczCEASWkDViqSWhFlLP4+s3D/fd8dwz\n",
+       "1amq/NGNrf37ft+6h1LjK8z380+/vXufOnWqdu1T99Rnf3dUVZUJIYQQQog/HvFu74AQQgghxJ9G\n",
+       "dBMlhBBCCFED3UQJIYQQQtRAN1FCCCGEEDXQTZQQQgghRA10EyWEEEIIUYNX/CYqiqJ3RFH0ZBRF\n",
+       "z0RRdM8rvX0hhBBCiG8HolcyJyqKosTMnjKzt5nZeTP7spn9laqqnnjF3kQIIYQQ4tuAV/qXqDeY\n",
+       "2bNVVZ2uqio3s181s//qFX4PIYQQQohdJ32Ft3fIzM5+U/mcmX3nNzeIokgR6UIIIYT4U0NVVRGr\n",
+       "f6Vvoia6QXr3ve80M7PPfu737S1vvtuiRiv4/1GS4YbjBOuSsC7Ph9BmPCR1/UFYUWAb9hNdmob7\n",
+       "+Q8/+g+hzfvuvxdf6I7KYLsHTTa7XdzPsgjKUYqnq9FoBuVmqwVt4m86dr//+d+3u990tzWycFuR\n",
+       "Yf948MGHoO4n/86PBeXROIc2/R5+vigKtx+neI6jCI965vpG2mximwy3ZX5bpPv/3M/i53vgZ94X\n",
+       "lKuyhDYVVllVhCe5yP/jufvs5z9nb3nTm21cFP5lNh6Pw+3kuPFxjsc4jsMP5I+vmdkv/JN/DHX3\n",
+       "3XNfUE4z7FPkNFjDHffOwjy0mZ2fg7qhuyZXLpyHNhdPn4a67uZW+P4NPMe/9Mv/Kii/933v/w//\n",
+       "/synP2Xf/dbvsarEY27uWGXNBjTJWtjPms2wL2YpGZPGO5/jIRmThn5MMrMPPBD2xY985EFok0S4\n",
+       "D1nDXdsJGTfoNRMel4r0KSPj8Kg/MjOz3/nYx+y/+IEfsNFwBG3GJX413PeunwrKP/ezH4E2W64f\n",
+       "mJldu7wSlLfJ2LmwvAB1swthXbPdhjZJg3y+UXj++tt9aFMWeN1++EMfDMrvfOc7oU27hX2vkYV1\n",
+       "CTl/UfLSRfqJT/yevf3tb7OKXLS9Lo7Dve1tt994XhJyjqMo3Ief+yiOnffd926oi13/rHDTNtjC\n",
+       "8zc9PRWU5w/vhTbPfPVRqGsU4Rvc/pbXQ5uHv/jvoe7IoUNB+YH3fgB39Dq80jdR583syDeVj9hL\n",
+       "v0YFfPZzv29mZi+eOWMvvnjEjt906hXeDSGEEEKIP1le6Zuor5jZTVEUHTezC2b2l83sr/hGb3nz\n",
+       "3WZm9tnPmR07dvQV3gUhhBBCiD95XtGbqKqqxlEU/V0z+7dmlpjZL9GZeS//9Hns8OGX/j0If8qO\n",
+       "M/xpNOt0oG5qbjZsM4U/z47J4xj/k+bWygq0qcijQSOPcTyjEf6cv7W+GZQ3NzbwheTRQGcxfGSy\n",
+       "sGcPtJlyP0mzx5DxN83AvP3O221pecnyUfiT+5g8hmAU4/C4xORn5CZ5PJK6n6iz1hS0YQc4cY92\n",
+       "kwy3/Uc/bQdbcrNO43iyORT+8dZ4jPvEHoFV7uf8OPmPjwBO3njSslbDSvKYw9wjvrwYQxP/WNfM\n",
+       "zJuFk84Q6blHEUmMj2z8o0Izs0HiHmGQ8z47j49Q5pfCn+FT8lhu0MfHI333yIs9IvY0vukc33ji\n",
+       "BmvEsaVNPFdpI+xD/lGlGT8u/rFcd20V2nTX8doe5uF5L8hj3WiCMzgmj4zyCreVuzEvTvF1wxz7\n",
+       "md+HKMExKSZ1+eilx83Hjp+w4Si30QgfP+cjfD/YNnnEP+gRHcNtv0kevbY706QuHCtL8ohx0Mf3\n",
+       "g7Eyx+uYPc6DNmQmfEGu7ZF7fJ+QNsnLz8WOHTti4/HIEnLskhT7cOXckpK4CQlxHyqb4PuBdOHC\n",
+       "HbtWE7/Hm8uLUPfUF74WlL/z8GFo87rvfyvU/dJPfDgo3/Lqu6DNba9/LdQ9+YWvQt2kvNK/RFlV\n",
+       "Vb9jZr8zSdtjR4/s3Ei8opw8eXK3d+HPHMePH9/tXfgzx4kTJ3Z7F/7McfLGG3d7F/7MccMN6ue7\n",
+       "jRLLhRBCCCFqoJsoIYQQQogavOKP8yYhd898K/eM+5unh/8RZXEF6maXwmepC4cPQpuZfegRdeZm\n",
+       "gvL0LD4/3yK+Q06ez8PrtrahbrsXOh9N53KZmS0fwn3ffzyU7hfJtPLCuSPba5vQZpt4GuXYPRsn\n",
+       "7g/Duz9pE5/Fsy1V7jl7SVw15gf46dlEHbM4IV6Pi/SIiTfFaLmICOZ3pCl6WX4qfb9HogqId5Y7\n",
+       "v2IwIk4GcalS93Eq4ncwEvfCgrgq3v0xQy+EXaPzi0tQN7dnOSgvTh2ANoM++k5dFwOyOroEbTz9\n",
+       "LeznEfFQ+r1wSvVogMd8cwOvo7WVcEwYkKn1LFJhaja83qcX0QHpzM5AnSclsQQ56Z94reG2WKRJ\n",
+       "5fyqmLUh098L9ga+TYH7ie+Pddtk+vtwGDq0Mws4nk7NoHOZeB+PeExEhbPRdrjv1H+a4PJjcQJE\n",
+       "jzMfpZgSn7MClwm3nZJoBN+HRmPiGrKxhLh3nogMzqnrQ1fPYMTJq773jVC3dflaUP6N/+NXoM1H\n",
+       "P/urUPeV7/9UUP4XP/OL0OYDH/tnUPfEVx+BuknRL1FCCCGEEDXQTZQQQgghRA10EyWEEEIIUQPd\n",
+       "RAkhhBBC1GBXxPLMybtDJybmZB2p3ipKo1defDEot59+FtrsO3Ec6paPh/lUKZEQpzooeg4jFIo9\n",
+       "TK7beyxcl+fQTZjVdPgk5n3MTIf7sHX5KrS5cPpCUL52Bdv0iOw+dhanX4fweozdWnkpCborWTic\n",
+       "Oy4oRpqx5R19HQtrjInsmo9DETptTNbVp1xga0bCPVMSbDd24YWDAQnWJBROUh2PWJAfC+D0bSYL\n",
+       "S81csKR/fzOzQQ/3YcMF0q5cwokebB23WTcZYt9xXKHg4DHs+37dr5ysNwevIYL6oIt9v7cVrsc2\n",
+       "JEGeQ3L+EreW3NwCCuLTRHJeOrg/KM+Q101N7yyWM9+3rMjfwWW4n/6aNTOLYiJV+ykhTLwm/Swf\n",
+       "uu0T0TwnEyY8ox6O+yN23pOd1z7sTONkodLJ1z7U0sysYp/Py/vEK29kk4yfJMCVTaYpXDti3Kdu\n",
+       "PGWTAIxMpplqh9+9ZUkmkZAg1oJI8fA6ElQ6sz+cbNJdW4c2X/vsF6Duh/9muNDJw7/1KWjzz96H\n",
+       "a9f+vf8rXNPvR/7lq6DNv//Vfwt1N9+J7SZFv0QJIYQQQtRAN1FCCCGEEDXQTZQQQgghRA10EyWE\n",
+       "EEIIUYNdEcun5l2CrysPe7iqe9pAcW90Nmx3+cXT0Gb13Dmo23M6FL33kEUcW4uYDp40UUD37D+C\n",
+       "iyrvP37ItUG5Ni5RDDz7h08G5Se/hCtNnz991m0I92mKJLK3ZsJjXrEocIZL041IxG9Ck3JD+ZOJ\n",
+       "3mmEO5+mYbuEpfey3XQHwkvB18OvCJ9muPWKGPA+5Zu9W0JEz9gdTyaRshRlXxXRnHhkqhOKpf58\n",
+       "mpkNh3j9FU64vXrhArTpb6PE3Z4OV22fmcfranZpAeoOuMXJuxtr0MbTJJNBGo0W1C0shynq7Q6u\n",
+       "LJ+18VpvOfl7ag7fr0NWFeh0wm0xWbq/SZKjHTlZVaAk/cWvKsCc4JhM7ChcXWlkgkiM161P1I/I\n",
+       "FTkmEjduB48LSzpvOZF8ahrPX3MKz/vIrQ5QbOO2e+S7p3BGf0rGEnZteypyIbOwd3/cqdLt94Gs\n",
+       "2lDmuPHES/kZnk+2YkFFBHTYJfJZrl4KJ6Tc+DoUuD/z6x+Duke+GH7X/eiD74Q27/+bWPeOv/NX\n",
+       "g/Jfff+PQ5sv/j+/C3VvveFHoG5S9EuUEEIIIUQNdBMlhBBCCFED3UQJIYQQQtRgV5yo4TB8vtru\n",
+       "hAGHS4fQK5jfuwfqlg7sC8o+fNPMbIWsGr2xEgYFlmN89j93YD/UNWcxSM+zZz++rpGEz/DPfP1x\n",
+       "aPPcI49hnWu3uYpBZZ3FcJ+WjxyGNjNLGO7XcO5PTgIdGbF79h8TjylKSDhcMwyobDQwpJOFbfpV\n",
+       "x2PiRDEbKHb+AQvkZGRN51IM0JsYM8nEHYcsY6Gg+AFjF3KapMw5wXPjj0NEzgOj0XL7xVZeJ3Wl\n",
+       "CwHduHYN2lw6cwbqHn34y0F5fnkJ2py86w6o67RD32hhaS+08TTabahLptBtarq6mSXcp4yENU7P\n",
+       "zwXl9gy6ODH5u3TYDcM9r17EQNzVS5ehzsMCVVnfiFxfZ95UwdxCNwZExP0pSUjncBiGZCYsVNIH\n",
+       "VhL6JKyxJNJQy3l9M8RDazRxfCl8qDMZ93skZNVrg+lEwZoIu0YrErZZVGFdQRyl0o3DLCuZ7efI\n",
+       "vR/bp5j6ozuHbc7MzUHdxdOhk3zhLLqU3/X93w11X/zcw0H5wH/zQ9DmLX/+bqj7vV/6taB819ve\n",
+       "CG3mjn0F6rqXV6BuUvRLlBBCCCFEDXQTJYQQQghRA91ECSGEEELUQDdRQgghhBA12BWxfNAP5b2R\n",
+       "kw7ZitFMjt5/y6mgPH8A5dM9RLReOX8x3J8NDAk0tro1CRP0dK+tQt2L3wil0TNPPAltLjzzAtTF\n",
+       "LqDy4CkMBd1/PAzubJNgTSZse2Gz2tn7NDOzlhM2vaBuZpaPmOwadrU4RfGaiawgaJM27C+BOHVi\n",
+       "ORG2Gd5jZcJ9VaGwGbvP58NFX2pDhG3f1dnnY9aoEz0jEnDK8MGBbNudOTKBwu1nb7sLTTbJCu0X\n",
+       "Xgxl86cf+Qa0abYxGHFqPhTLGyTk0dMmMrifCGFm1uyE7WLShysi0m6uhTL9yvmz0Gb9Igria5fD\n",
+       "iSy9Lo4jSbTz5xuTsM0xkaP9oWJKcJnj67y4XhDpOSLhrOOhGzxIPy8mCNsc9DHoMiaTHNozYd/o\n",
+       "zJAxj2y/ux0Gmm53B9BmTI5Lw0vcRMpnE2w8LISUjc2FC0vNiZSfuX2qyPjGxlg/cWZIzgvr+wXp\n",
+       "Z55hjoGx+4+EE60unEGxvJngWHnyjpuC8gtk0sry4QNQt3Ex/P7dXsc+tXQMA7H7bvLHHwf9EiWE\n",
+       "EEIIUQPdRAkhhBBC1EA3UUIIIYQQNdBNlBBCCCFEDXZFLE+d8OZXNV9f2YDX9HoowM0vh0m1HSJV\n",
+       "zx1GiSxxYunWFUwrrXIiRzML0HH5zDmoW7sUiqV+lXUzs2O3nYK6fcdCaXx2P0ltTsP7YL+i+kt1\n",
+       "mATsb58nTeFttMJU6IwI1K0It+VflzaYeI339KkXKEnqNxM9CyeEZw2ULBle4izISugsAdqvfJ6P\n",
+       "SdI5EXXNbYt8FPqZvcjKpHxG6fYrIvJpmqHoPb0YXmvLhw5Cm7WrmGK+cXUtKF++gNfHwnM4aWTB\n",
+       "rRhAFqkH2iSdPKIvDM9pfx0ngwy2UZIddEOZvktE+u2NTXw3N1thahqTzv31wWDjBlOovQTsk7rN\n",
+       "zCrWh3M/wYeI5fT9XF8nkxzynIxBjn4fj3m7g+d0xk0gYOPGNpH311bC87xFzlWrhX0/c9I2m6zg\n",
+       "r2MGE7YrmFli5gdn1iR35yYl55iN6H48Tcm5ikjiPNt32KchivpJJ9z+3kP7oM3mFbyOiijsL/tu\n",
+       "XYY26SH8Prz6pa8H5f5lvLanFjFZPd/EvjAp+iVKCCGEEKIGuokSQgghhKiBbqKEEEIIIWqwK06U\n",
+       "f6bcdAGOLEBumzyzHA3CIK3pBXzW2Z7BZ+pxM3QSGjMkWLOHYYIRCV70sEC8pnMgOosL0GaBrG7f\n",
+       "mg1D5YZkNe/eRuiPsZXeE+bUeI9gwtvpxIVfklxUGuCYOK/Ae3Fm11k13q1ozj6flbjzZRWeh5L5\n",
+       "SISh88cqoqGwbY1d3/CO1EvbIt6COy4QLnodfNiedx2uT3jC2OuYY5KkobOzvH8/tOnfiB5KsxWG\n",
+       "65XE61m9ehXqGu3w/VpTOztDF57HwNrtTfQtes6FYR7TaIB+jneNmBvXaGJw58Ke8NouC/Ru+hME\n",
+       "+eZDDA4siTDjA1WZ28QcnsL1WX8NmZmRrE2rCv86bJRM4NQUhu8344I1zcxazpPynp8Z/77ogedG\n",
+       "zh9xQ1PnnTKfjMd7hjBtsSKf2Z/SnIwlmQsh5cog7pNvl5FxuEF81X6MvpMnTbFfD7fD8TQh257b\n",
+       "i9/b185dCsoXnsVg2xtvOYnbOho6V5dIiPU8cam6zZ0/3/XQL1FCCCGEEDXQTZQQQgghRA10EyWE\n",
+       "EEIIUQPdRAkhhBBC1GBXxHIrQhEwdsZdo0l2i5hzpRMa+0QmZKtPZz5MMEUZNGqQMDqyyrgnaaKo\n",
+       "12mHMl2zTUIBWyjcdZ3cPiJhZt58Tsmq5yyMEiXnycIaIydCF+R1BZHrR2519CjB40uDJp1lWTHT\n",
+       "mwQqeoF5ONx5UoCZ2diFbY5J6GpFPrMP6cwnfJ0Xy6sm9gN6Tt01wyYPMMbDcD8rEiAbxXj9pW71\n",
+       "9xYRfvcePgx13sHdWsMg3QEJiO07CTgjQqqHhV9WJOQxc8ducQnDPuMYJ3qYl/nJJAAW6po62Txi\n",
+       "1+MEQb7FCK//iPwd7Cc1sIkQfsw1Myvz8DywPYqpWR7WJbSf7/z3eoMEXbbbGEzqXezeNo7LLGzT\n",
+       "3HFot3DcZ6L1JEGak4yfKZlwM2RBqK6OTUjxAac+KNWMC/BV7IV03G8WoOzHKQbZTfjM7DssJqmg\n",
+       "iwf2BOXLlzAQ+8LpC1C353gYArxBXre1imPQJOPL9dAvUUIIIYQQNdBNlBBCCCFEDXQTJYQQQghR\n",
+       "A91ECSGEEELUYFfE8qrwyb9sdfKQmNzv+SRZluw6IIJh2fIyNsqESQMTkouKrYvttk1SYtPMCYxE\n",
+       "hB4Qab1wIiRLl/bSuF8x3oynZ/stsZRqRuYlWXJMoozIik5Mjom8yNzMLEpdE5bwS14YOfGSTDBg\n",
+       "+LRllr7MFNLIid0s9ddIWnfuX5eRS7LE8+f3gcmnDC+kGhHSWQJ03AqvhzjFzze7hDL20EnjUYLX\n",
+       "mheazcyGo1AIZ6K+58DxY1CXkYkeXh5utFEwTojMjzMf8NiNhiiy+y5UkM+bD/GYewqSWB4RK3fc\n",
+       "D+XdcYHbjmhiuZvwQ/YhJmNXmrg+G5Fk/gkS9ZtE1J+bxQkMDXf+8h4ez5icGy/TswkbNFXcfZxJ\n",
+       "JsAwWFp/RSbh+Mkt7Dslctc7O3YZuUbhu45d/2xlhQm+H6KYiOxu8ylJhB8NyIQpN9Fqcc88NBkM\n",
+       "sF+vroSTS6bnZ6FNTl7HrslJ0S9RQgghhBA10E2UEEIIIUQNdBMlhBBCCFGDiK9I/Sf4hhFLaxNC\n",
+       "CCGE+Pakuk4irn6JEkIIIYSogW6ihBBCCCFqoJsoIYQQQoga6CZKCCGEEKIGuxK2ec8D7wzK3jRn\n",
+       "K5NT/AuJJB9HZFtOD6OrXRP9PXaBcR9634egzX333Ytv51Z277SnoM3Zp16Auj1H9gbl1iy+7ur5\n",
+       "S0F5roVtIhImOnCrficRdoUPfuhBqPtffyr8fEWJ4YL5EIMtSx+SGWHoWtLA0EN/7FgwapMEuEVV\n",
+       "uA9xhfv0wff/DNS9+6fvD7dDgu763S7UNVzYHVuRfsxWbHdhe2VBwlppKGDYicsCwygf+vCHoe59\n",
+       "D3wgKOeDLWiTk5XWi1FYx8L3Gi0MqE18qGqCAYBRjK+DlD4Snvj+D9wXlH/yvndCm0aCxy5z13FG\n",
+       "AkDZCFS6QMWCHPOSZLqO3TmNfTilmZUkNPNDDz4QlN9z37uhTUbSIQfjMDiwMTsNbTaubUJd7EJH\n",
+       "j994EtpcevwpqFs9dyEoLx85DG1aczgu3f/enw7KP/1+HG+SGK8ZP6SzIN1yjONS6dqRnEmrYjw3\n",
+       "ib/W8GWWkn72wAPh+HL/Pe+FNhura1A3vW85KO85dgDaXHjhuaBc9HrQZmZhAeoGLvw2KUmItQ+H\n",
+       "NgxsfuhDH4Q27733Xfg6N5xFpO+PCjx/aRruQ4uEiSYkmDh13yvbm+vQpt/Fvt9sh2PQh37u56DN\n",
+       "9dAvUUIIIYQQNdBNlBBCCCFEDXQTJYQQQghRA91ECSGEEELUYFfE8gjMblckVrcXaf9oS64RtGDx\n",
+       "6H5VbiatMrxYyhgT2XRmai4ojza3oQ1b/X12756g3N1ACbF0K5gn8ygTDnMizrvPXE52CCxqhOJe\n",
+       "M0FhtDOPUnVjKqxrz+Drsia+LnYna7yNK9mPiQjtVwbPe/g6xqgftpuewVXAp/btg7qta+G56W6g\n",
+       "sD013YG6tBmK1nmO/Scf4grjjUb4uoTIoBQnUadTKB0zwb/fDf/eKke4T70eCvdNJ5tHEUqkcUZk\n",
+       "+sz3hUlWkcdOHJOJCIlrlhApuCSyq7/8ic9MJ6TAeIYDkJXewCWwOTIFmTDht84mAfS2sX/uWQ7H\n",
+       "qakOCv9rly5DXVWE77jgxGgzs60BjnmesZvsYmaWNPErquH6Z2sa2/h9MjMr8vBYlQW+35i8zner\n",
+       "yHcgM/5F42i0yfiW4/utu8lCB04egTbTy/NB+fyjV6DNVGcG6tpT4bjbW8drNiKTfuJ05+uPXA5W\n",
+       "uMk0VYnHvCL9s9kKr9tWE8ekYoTbGjjBfrCN/a4ii6akZPuTol+ihBBCCCFqoJsoIYQQQoga6CZK\n",
+       "CCGEEKIGu+JEwRNJ5zJRPYc6UX7D5ME086RcO3C0rrcT5Fmqh3lTHfcc+twz53DTJFBxel/43Pvi\n",
+       "8y9Cm9S9XauNHsNWF10qS8NTn07oRM3Mhs/ZkxY6JxlxajIXJugD7MzMRhsYjDZc3wjK/bUNbJOj\n",
+       "n+PDPSuWgkhouHDPq5evQZuZOXQNZpaWwv3cRtegIF5W5A5VShyQisT7DdxnbsQTSBlmVjlHISXn\n",
+       "KiZums8ArTLi9fVxP4fOUYhj9NfSEj0wH3KagCNFIGGmEfGPvFxElAy2KfMj12S+pVns3oCON2zs\n",
+       "gjZkLKMfL2yXE3mrIM7X4v4w3Dcivs610zh27T1yKChPL6GXeeU5vI48gy72qSLHcNb+IHRhWsTd\n",
+       "ykgQY+nOexnj2OVDZc3M8irsn3GJ26bn1NHo4LXWIMHLF596NiiPujhu7DtxPCivvHgG2qyvXIW6\n",
+       "xUNhEGqDfF/kxDXKqp1/b8nJGOsdMxbk2SYhvbMzoZ/Hju52j7iwzpkd9TGENJ0i308N7AuTol+i\n",
+       "hBBCCCFqoJsoIYQQQoga6CZKCCGEEKIGuokSQgghhKjBrojlYGRCZiZqZCUxKCOWPgevmzS408Ec\n",
+       "9QnkQbaydObC/K6SwLoDr7oR6qackL5+9gK02euCH2OyIj0LkGuCrzmZmFy41bwTdPtsvIki5MgF\n",
+       "Yg43Mexvaw0F+MFW2I64tZZMoZzZngtDMhsdDLpk+DC6NgmjvHYRg+22XbjmzNIctDEiL44GYchq\n",
+       "K8YJBi3y+WIXJjoiMihj7MI8E7JPWRNFz45b3b7XI6u/k+tjUIaC/XiEfYMFRlbuGs0muGZjEoLI\n",
+       "Ajj9ZJOYjCMp25aLE2TXVeUNfMPJJuxM5UT0xvdnYxm2S5Pw4h6QsNYYBwBbXF4MyutPoqy8ehbH\n",
+       "rjvf+Pqg3JzF/tongbgeFrbJRqXxdigLj8iEjfY0Xrc+UDEhk3nSBk5gGOfhNVqRyUPFBBMDygb2\n",
+       "l30nj0Ld1z/9cFA+/+gL0OZ1t4ffF0tHDkOby48/BXWDrXBiTnsax6kix+MyybyHfIyfzwfZsolP\n",
+       "LRIK2nACen8LJx1111ahbuj7BgkFbs1j38gUtimEEEII8Z8W3UQJIYQQQtRAN1FCCCGEEDXQTZQQ\n",
+       "QgghRA12RSwHF9JZa0weplIlJAiTNjTE3L0BseaoRzeB3MrEudFmKLutE0nuO04dg7r++mZYvoZp\n",
+       "3dM3nwrKOZF0C/JpYifcRhOIrWZmLXdyym0ikRPRs78R7ntvHSXyERFLIyc+T82jCDnthFgzTAKO\n",
+       "DGVJxtWL4QrqC4u47ZvuvBXqNlbDc7VKPl9F1jnvuWO13cWk86U9e6Auc2IwS8pn+ATfrIFDQJGg\n",
+       "ZBmloXA7NYPHc0SkeO9Zj0n6ekkStctxKISOJvh4bJ4Jm5ASp06mJ5HlMXld5T8fWZF+NMb09dIf\n",
+       "BCKtFyx6HHaAjVNkWy5uPXditJnZ9Pws1DXSsE9deOY0eT9kr5OjBwUelz4ZJxByzMe4rdLJwv4a\n",
+       "MjPrb25CnZfGWx0U4JstFMsjJ0ez1Q/YBCbP1haO33e9+iaom10OE98f+9SXoc2r3vbmoDx38BC0\n",
+       "WTlzFuoq1z8LMukgIdcDu448bIJG5r4PWySdPE3JKg3u+u+TY7dGEtn91ZCSc9yeRpG9YssWTIh+\n",
+       "iRJCCCGEqIFuooQQQgghaqCbKCGEEEKIGuyOE+XcInieTJ8v73y/R40l5jH5KtaGhnTuuAvWmcLn\n",
+       "rVfPnA+308SAw0M3HIe6Zz/zpaA8HqDbMLdvKXyvbVwJnS31HiXh8awKfDbO2LoaBk0O+ugj9LbQ\n",
+       "R8jdittVgudzamke6jpLoZM0NUuC0shK6PkwPFYFCV1jLO0N/aPHH/lDaPPik09D3avf8MagvHgS\n",
+       "w1N7FfodW93wWF0lHsPF8+ehbs/evUG50ZosLK4Yhccl7+Eq51mEw0KahQ5N0sBjbgnxzlz4a9HA\n",
+       "/Rz2MHi1cl4P81A8jYQMZ0Tl8IGxFdlvFqg4Hod1BRlxKuKFlN5TJP5hMcHnY64ok5RK72qRwNG5\n",
+       "ebzWxpuhj3ftLPa7hf3oCM4cCK+Zi1fRVTHivXnKAn2yiPgyFocfejzGcXHYx3DPsgpd1MYm+k/M\n",
+       "2Wl1wrokwz48idN27TIel3gaQ09f+33fFZR/7aF/Cm1Of+nRoHz8e14NbbIZ9N4S1/dKw2NuJfbh\n",
+       "JMNjBW1IcG/LhVg2SKhlQlzKfBh+j21cuwZtBtvoj07Nhf16eg4dWupETaAkXg/9EiWEEEIIUQPd\n",
+       "RAkhhBBC1EA3UUIIIYQQNdBNlBBCCCFEDXZFLPcuJLiRE7jg13nlRPhtTbqVSfIMSY6eXb0SCoV7\n",
+       "TxyBNlMkpPPs10OBeXqOiNf7QqnzmT+4gPtEgiZTZ8mPi8nCKDfXQzmTya4FEXxbbj9ZQOb08hLU\n",
+       "zcw5OZI7QMveAAAgAElEQVSsFD4gEmkxDoV3JvwyFp2w/Y6/9hehzSf+31+Hut/45/93UL75ltug\n",
+       "zeFbT0HdvlM3BOX9R3E19heeeAbqrp0LzzMLBWUU4/BYDbaIUVliXdv1/WgG5cwoQbE0Tl3oaYt0\n",
+       "GHJqxi54tSJBrLAZkqxbks9SuTf0ormZWVng68Z5WEd8bcsyJqmHrytJHy6GO38+FhxcsOuvCj9f\n",
+       "kpFJHG2coLF2Phyn+ls46WAvmQCTNMLPvHUNg2aTaufrb3YBReiYSP9+YlKR4/nrdXHCy3DoBfSd\n",
+       "A5zNzAoXHpqwcMgJvhtyEgr64nPPQt3tb31tUD74rz8ObR77XDjp6OhrMQB4ahq/LwZrK0F5roNi\n",
+       "+9YGjqdxsrN5ncR4XCL3BRGRb/JhHydD9a+F+7m2imK5kfMw675Xmh0cp7IMP3OfnJtJ0S9RQggh\n",
+       "hBA10E2UEEIIIUQNdBMlhBBCCFED3UQJIYQQQtRgV8RybyODbEYkvYqlbk+QIF5RedAnpjNpDjfO\n",
+       "pDjPaISCaG8QJqve/udux9et4SrVl144F5RPvf5V0KZwJnt/HdOf9xCJu/Sro0+Y2NpxAnN7HgXV\n",
+       "1mwH6pozoeSYtUlyLUm8HQ/C/dzcXIc2/S4m15ZOFo4TlAkZD3/sU0H5zX/xB6HN3//FB6Hu0//m\n",
+       "Y0H5q//fJ6HNlX/3Gaib+srXg/LJ19wBbY7dfBzqOm518rXzl6ANw0vAYyI097skQdzJ0GOSsJ1O\n",
+       "YYq5/yutIpMO4hj7gk/U9zIxg0/8YGJyWOdFczOzggjp5icnsFUN2BjhdoxJ6+ZTzQkVXUUB9z1y\n",
+       "nydtYN9nh/PahctBOWnh6xaP7Ie6vlu1YHMDx7JGc+fE6wZZySFm/cXP3iH72SCfuXBp3ew7hU1E\n",
+       "8GNJRGYPRczwd7RJyv8Lj+LqBzffcWdQvuvtfw7aPPLbvx+ULzyOgvr8PhTLz6yEwnZJ+n7awGM+\n",
+       "Knae+FCRa81PPBqPMF1+sIVj+upqOMlhlOOKE515TCNvTYffPVkHJ2yV5MsuH0osF0IIIYT4T0qt\n",
+       "m6goio5EUfSpKIoei6Lo0SiK/v7L9YtRFH0iiqKnoyj6eBRFeCsshBBCCPGfAXV/icrN7Ceqqrrd\n",
+       "zP6cmf1YFEW3mtm9ZvaJqqpOmdknXy4LIYQQQvxnRy0nqqqqS2Z26eV/d6MoesLMDpnZD5nZd7/c\n",
+       "7JfN7NNGbqQq/6DUpchx94jc7zlHgPlP9OG/ex0LsaN6xQQOVn+AAXVZJ/QBllzwpJnZmcfwmbZf\n",
+       "2f3QrTdAm7WrYQjZeIDPjttT6Jx0NzeDcmNCZ6izGAbixW10HSKyyvnIuTeDPh6nXhdD17or4fPy\n",
+       "UQ+D4DLyDL/h/aqYrFZO2LsvDNv8Z+/5OWjzxi+9Heq+70d/JCifevWd0ObFRx6Duuf/4PGg/PiX\n",
+       "vgxtLp07B3XH77g5KE8voB/AaLpQ13FCgiZz9HN6g/DcjEp0JKbGZNX4Zvh+SUquY+L1JP66jSf4\n",
+       "e4+oRjkJtjR3XflAQDOzkoRDFs5tGo1YSCfWeZWJOjUTdM+KHAPmSfkBrZHhNZpvoke47VymhYMY\n",
+       "fuuvfzOzTedzjnrovbSJ7+RhYY1s/PbjNfOYwJsys9SFLLJjR52oMnNl4qbFO0ul07MLUHf14nmo\n",
+       "O/vsc0H54B0noM2Lj4dtVsgYcXDuJqhrt0KHdTjAjpexgFPbuYMm5Is0dj7Z2Lu4xh1M364zi/1u\n",
+       "dhH759RcOA7GJAB4RL57RoNddKKiKDpuZt9hZg+b2b6qqv7ITrxsZvu+1e0LIYQQQnw78i3dREVR\n",
+       "NG1m/9rMfryqquB2snrpNr/euixCCCGEEN/m1I44iKIos5duoH6lqqrfeLn6chRF+6uquhRF0QEz\n",
+       "u8Je+/lPf+4//Pvo8aN29MTRurshhBBCCLEr1LqJil4KbfklM3u8qqpf+Kb/9Ztm9tfN7CMv//c3\n",
+       "yMvtTW99s6vRD1ZCCCGE+NNF3V+i7jazv2ZmX4+i6JGX6+4zsw+b2a9FUfQ3zOy0mf0l/vJQ+oPM\n",
+       "MyYTUqvb33wR4Y9K405Ip/dwWEklTgcTpuf3LAfljEirzz99FurmDoSS89QSJkZcfu502KaN4WI+\n",
+       "fM/MzHzgX2OyJ7sj5xwXm/h5bQMFcR9YlxORj63GXpXheZ/KsMu2SMhj6U58FU8wK8DM7vr+u4Py\n",
+       "oRuOQZvf+qe/CnVPPPxIUH6V285L2zoEdXf++e8KyidXUbJ88fGnoO78My8E5QXXV66HD9KLiURa\n",
+       "ZSiWj4ZuwgIJhxwN8bzHTuLOmtg/YaKJodgdk2vGkzDxmpx37wUzmZiGz7rrPyXychnh8SwsPAYJ\n",
+       "EenLfOf+yQ4ByTy12I2fMRm32ASNOA1f15nGyQpjIlV318NJKikNcNx54krFDjqrcgJ6FaE8XBER\n",
+       "uvQTkchxGdMg1LAYkWOQTDB8Zm0cpxpkEs6V0+F3wcJ+DDjdf+PBoDzcQpm/v4ljSeYmEJVj7AcF\n",
+       "C5+eZPyk11H4OiaWj8m478f0rIXjRnuaTGRxn2/Yw++Zfg+/s8Zs3yek7uy8z9v1faq31d4bIYQQ\n",
+       "Qog/JSixXAghhBCiBrqJEkIIIYSowa4sQOxD1fyz6pi4DUmMroF/YM6dJXzW6duxLTMm0d9L4orM\n",
+       "L4ehYN2ruODixrWrUHfwptDH2SaLG29uhh5Kq4PP3QcDfF7ub5+jCZ2hyi20mZCFN5nbUHk3hpzP\n",
+       "Btn3RjN0BqICj29JHDp/rqIJfDYzs8/8Xrhw8C2vwgWB/8cHfwrqTn/tiaB8+exlaPP0pRWoS1uh\n",
+       "zzFLFovee/wI1PWc7zCacAFNCDQkUk2VEOcjdYv2EoegIIuUQtYl8Z/SDJ2WyPUP5k158gHzGrCf\n",
+       "eaUlJx4MC830KhNbSNiPZWZmuXM+UhYcWk0SBsv+5iULs3tniISnjvvYX2I3BjSIX8k+Xzl0C/Sy\n",
+       "0MwJ/l73x+kl8PzFLpm0YoHK1HFxIcv0e4YFmrptERfuOqtfh+9HXtZoYRDqcDscr4d9HL9bc+FY\n",
+       "WeX4eYc94qa68MksQ1dtPMb3YwGxnph891VuvC5yHCPiGI9doxUuJBw38buhNY2OsF8EeTgk332E\n",
+       "Cb/++Gvrv1QIIYQQ4s8uuokSQgghhKiBbqKEEEIIIWqgmyghhBBCiBpEkwRIvqJvGLH4SyGEEEKI\n",
+       "b0+q68xu0S9RQgghhBA10E2UEEIIIUQNdBMlhBBCCFED3UQJIYQQQtRgVxLL73nXPUHZrxA/NYvp\n",
+       "pJ3paagrXRLvaIgrUufjEdT59PPRiCT6kpWsW60w3fWhBx+CNu95z3uhbuQSw6eXlqBNd+US1Pn0\n",
+       "1X3Hj0GbZ//wq0H54PET0CZJMRF65cyZoDy7D1cK/8ADD0DdPfeG546veo51SRImj0+xdPIWrmg+\n",
+       "HoXpxOx8jrYxmRf3Abv6R37+w1B33333BeW+W6HezGzxwD6om94fntPTTz0FbcZd7J97Dx4OyiVJ\n",
+       "Wh6TFel98m9i6Dw+9BB+vvvvvTfcDptY4qO5zSxyccsxacP6ArwO3w0ToY3lcCMf+siHgvI9P/G/\n",
+       "4rbJ68qxS1EmKfjFCPsZbo0kNJPjGaVh34tT7ItpE5Orf/6jHw3Kf//v/Ti+HzlQ/tywU5ykeCbS\n",
+       "RljX7mBiuR8Dzcxil2Y9HmFKdLOBn/md7/zpoHzfu98NbSLyAf1RZ/2nSfYzc6sf+IR2M7MrF3Cl\n",
+       "gY2V1aC8/+ABaDO7gCsNvMd9nod+/h9BmzH5zhp23WoEfRzf+tvdsA1ZsSAjaeid2YWwvIjfRUkT\n",
+       "j50PI7//Xbhqw/vux+++Vjs85hnpB2x1gKG7/gYktX2bJLIXbgwqxrjttIHfM3ESftf+wkd/Edpc\n",
+       "D/0SJYQQQghRA91ECSGEEELUQDdRQgghhBA12BUnqtkOn7keOnE0KBd99BHOPHsa6rbcs+NmB58B\n",
+       "t6bwuX7kjAv/3NbMbGF2DuoGvS7UeZIUn7MXzuNJG/jM2XsaZmaRcxvac7hP3Y3wuXBzCl0j5nyV\n",
+       "btVv5mkwmu3w2XFEVg8f93Gl7rVLK0H56gD3aWYeV+VeOhi6WguLC9CmIsdlexC6BsNuD9owOjOz\n",
+       "Qbns4zP1xz/7B1D3nT/0fUH5NW+9G9p8/rd+F+ouPH86KB86ehTaVBGuZJ+1Q89t4K6F6+H1mIq4\n",
+       "TWy5+dKJNd51ut7r4P1Zkxg/n3dhqnLnjN4RWyGetBs734IHDmNd2giPeZLiuMFcLu/6xRk6ikmy\n",
+       "8/VXEueMKHRWxeGnjoj7w857moXjUsbcrQRfV7ljRd6OemewHdIkIn0/cXnN+QjPe3+E13syHx73\n",
+       "uf17oM2ew4eh7plvPBaUzz3/ArRZ7u783VCV+AGTBD+fuWOckO+LRh5+1zGHLyqwD/s+xC7ZhJxj\n",
+       "I86lJx8TF9bVee/OzCwjPmDD+VyNJp7jrIHXUb8fumF5TpyoFI9nFNf/PUm/RAkhhBBC1EA3UUII\n",
+       "IYQQNdBNlBBCCCFEDXQTJYQQQghRg10Ry2Pnuz36xTAw8tL5i/Caxf3LUHfi9puC8rSTgs3MogrF\n",
+       "vdSFww1JONzFFy9AXVWg3OaJiQDrw0SbMx1s08XgsKQVSvEzCzPQZv1SGNLZIGL5cIjScTF0n2Vn\n",
+       "b9fMzFrNcJ9as/hZ0iYe85m9ofx9hRzfs889C3Wnn3o6KC8sYz84SGTsqcWwL7TaKC8y+nkoJp54\n",
+       "1Sloc+7xZ6Du3/2L3wzK/8OD/wDavPZ73wR1X/n4Z4Ly5voqtGFybWcpDPdjEwoYfltEa7WSOuNh\n",
+       "ZUmCJpn7nTjLmG3bS+tmZrEXWXf2Wq1BRGgmwEO4H2mTEvk7c4J4cwrFci91m5mlsF941Asi5Xpy\n",
+       "Ig/7QF4zM+8qJ2yYZ/a3Ow5pittukKDC0hnhETHEK2bAOxIWxEik8coJ0xUR7ocknLG7tuXaYNDl\n",
+       "Ta++E+pe/71vCcpPzOM4/OzXH4c6gMj8jRb2s+EwrCtzPO/jPPx821s4xsdkskLkxu+IXFhNIrKP\n",
+       "xxN8QbBr1H9mItJHJAzaf0fHTAaPcJ/8JJHYsB8YuUZjEj47KfolSgghhBCiBrqJEkIIIYSogW6i\n",
+       "hBBCCCFqoJsoIYQQQoga7IpYfvny1aA8vSdMqn7H274LXrO0by/UrV6+FpSvnUEhff0yirpXz4Xt\n",
+       "Nrub0GbvEXy//ccOQp0nivGQ+iTl6XmShr6JYnmnHcrCbSLOD5xQ2CTp62sbeAz8CuZRMoG5a2aD\n",
+       "9VBybHTYaumYEr/vljAJ+ObvRIFz5TyuoP7sV0Nh89Kz56HN048+CnULe0MBfW4ZVytnbK2tB+Xk\n",
+       "DhTL3/jD3wt1//y+XwjKX/z1T0Kbu//y90Hd/hPHgvJwC5OWxz0US7tXw3O6cBD7KyNx6eBjIvzS\n",
+       "hGsnbLKU75gkHXuxu2Kx1Ay3C/EEaehRhm2YOBs1w2uEpRVnZCJC6la3j4gIzXczrCxIijITxD1j\n",
+       "lr5MUpv9TjAlmB1PGBNIm6piEemujvSfCQLnLSJyb6uJE1cKv9oCOX9Fjv1s9WI4vqycw++L7soG\n",
+       "1L3qLd8ZlO94/euhDUuht3/5q2Ebcn3QNHKX4F0OUY72E0TG5PMaqfNp/f6cm5llZPJARD6ehx3z\n",
+       "wn28MUs19zPNzKyC71Fsw75nWm7lD3b9Myo20WJC9EuUEEIIIUQNdBMlhBBCCFED3UQJIYQQQtRg\n",
+       "V5yoY7ffGpSX9oa+ysYldHi+9vF/A3Xrzq2KyErhwwo9gsM3Hw/Kb/zhu6HNzPw81J1/9izUeSKy\n",
+       "jvvY7ddUB32LnIRt2sF9QdE7GWZm414YGJeSZ+wV80IK/+x4MjbXw9XK003sQmsX16Du6rmw7tAt\n",
+       "R6DNoRtPYN1NJ4Py6kX0GE5/43mo27gS+nL9PnpvjKE7Dy8+9Ry0eesPvwPqbvvuu4LyFz/2e9Dm\n",
+       "5jfcAXXT06EfV5G/azqzWLd+/kpQ3t5YhzaMyskpRYnXRxSjnzORE0U6kXdoIETT+DXjNz9JFmxM\n",
+       "rg/my8xMh302aWBQaZyREEvnvYzH6KqVYzye3gdiuZMxOeawHSIWUbfJBRwyT4sdl4b7fD4o9aV9\n",
+       "YOcqrCvGxI0hgZiefIjhl1lnGurm9iz4PYA2w+0+1OXu3DxPAjK/8bkvQN2m+555/Q++HdrcePOt\n",
+       "UOehR4CFs2Zh/4xIQGXpPjMLMy2Jv+aPMbuOfT8349ctbJuc99R5fH1UPq0iCbxlFR6XlFyPLEw0\n",
+       "dQHVzMVjn3niL0CCfokSQgghhKiBbqKEEEIIIWqgmyghhBBCiBroJkoIIYQQoga7IpZvXAil2Cc+\n",
+       "+6WgvHYRQxdnyMrZh0+FAY6LJ/ZDm5tehzLv/gNhaOaTX3kM2nzm33wa6kbbKD4C5LZ0XDqJm4Su\n",
+       "jchK3Ylb3ZqJgj50jQq/bNV4v60Jwgxf2li474MhCpzjHq68fuV0KISf/sbT0GbfDXj+Dt0ayub7\n",
+       "jh+CNjfcdRvUrV8NZdCNyyvQhrEwF04oeOLzX4U2PnzPzOwH/vZfDcr/+G/dD22efvgRqDt8ZyjO\n",
+       "r/Xx2LVJ3+8shfuZ9/E8MCo30aIqUQYt2YrtrnskRHaF0EXD/sgCHCOyun1UeQF+55DORgePEwsO\n",
+       "TRIv7qJIW7AQUrefMVl9viQCder2oUqYYryzeE2hoZnhfmYkkDMj8nCSuuNCZXfsG/4zs0DFfIT9\n",
+       "GiATg1YvXYK6YT8cK/cfPwxtDt94A9TdcGcof+85tA/afO2Tn4W6J7/6taCcFzh54HXf91ao87Cg\n",
+       "UjbqVu6csjBYHyLJxHI2g6F0+16M8bwU5PNVE9wqDIf4HWbu+ykrsN/R/Fb3RcoE8Thi10y4n1WE\n",
+       "+12Ray2aaOoKR79ECSGEEELUQDdRQgghhBA10E2UEEIIIUQNdBMlhBBCCFGD3RHLXQLsvkPhCvSv\n",
+       "fdsb4DWLh1ECzGbCpOEkRmnt7Deehbpfee//GZQvPP0itDn12juh7pY33A51HiblNt2q2AVpkxOv\n",
+       "LXGrxI9Jom+rFW6730PBmKYhO3mRpSEz5vcvh/s0IqnNAxQT2+3wXK25RHEzs3OPvAB1F544E5SX\n",
+       "Du+FNnuOH8D3mw/frzM7B20YSwfC7b/w2DPQ5pP/6jeh7r/7yR8Nyq//we+FNqcfw76498ZQio2J\n",
+       "mDzs4nlPndRcNScTk0GqJvIpW22+9BMWiBHL0qy9jM32ksq1XvQkcq3Hi9FmPNG78FIuEZoLUucF\n",
+       "31aG75dlO0vVPjX+pffDawZgvj+T8t1njskkgHSCYxUTKZ+NEpDITia3jEiatafRwH0akYTryy+E\n",
+       "48TVC7iKweYtp6DujjeF3yvf85f/a2hz6OQxqHv4tz8ZlM8/fxrafJ0knXtK0qcsIhOB3PWXkFUo\n",
+       "otSdU3auSBI4zCdiE5FIXTHBvCOWOO8T/POsCW3yHI9L7iYntNq4ykejid/3/nJnSfns2k4mGF+u\n",
+       "h36JEkIIIYSogW6ihBBCCCFqoJsoIYQQQoga7IoTdey2G4NyZylcqbs/GMJrHiWBmCsvXAjK577x\n",
+       "HLS5+NxZqLvx1bcE5R/94E9Cm+XDe6Du7Au4LYB4IZlzJ4ohfr6MOAqp81d63W1o05oNj11Ojl1K\n",
+       "7pUT73NM6ESVVfjcu9HB5/XZPK68Pn8wPJ4HR0ehzdbKBtRduxQGs442utDm8lN4XtpuH6YXZ6EN\n",
+       "ozcKj9+tr7sL2nzls+g/PPq5Lwfl7/jeu6HN6nkMkd1cWQ3KSUJWNC8wEK8sXPAjCYxkpFnoFiRE\n",
+       "02BhdF6TYguhG/FzvPBE/Se6qrrzc8jrPN6jMMOgSzMW5Ef8pwL3KXJ9Pyf5kWMSODjsh69jwYhl\n",
+       "NYF0QvyuhPhr3mVibVi2rg80zYmmRQ4nOCYF9Z92Hl9Yi9nFBahrOr/y4ot4/f/BpzA088XnQpfq\n",
+       "DW97K7Q5cvutUNeeD/fh0c/g9X/hueehzuP9IDOzgnUidz2k3n8ys8x5UuBImVlMrja/LeYHjUgw\n",
+       "atxgwasOMk75EOmcXR8DdKmyZjhOjYZT0IYFcCeuY4+Ib0U0NEtYx54Q/RIlhBBCCFED3UQJIYQQ\n",
+       "QtRAN1FCCCGEEDXQTZQQQgghRA12RSzf3FwPymdeCMMue+soD+fbKKS1nST36rtfA23+2rv/FtQd\n",
+       "vfNkUH76saegzWf/7aegrreBYjfsJ5FGUxe2OepiICYNVHOyW38T339qPhSmmbResNA1H8BXThD2\n",
+       "Z2bDfrgPgx6m4flVyM3MYifJt9ooCrb3z0PdoYVOUO6TY9frbuKOOkm2u7rzuTMz294OP8/yMk4w\n",
+       "OHbTSah79NOhbPodb38TtDlwM75u4KTjzgLKkiMiXvr+MmZBfoRGMzyeIxL8Oi7IauwQfomvY+qp\n",
+       "D35kQjMLZ5wg2xNgIYElkZwTFz6bMKm7REk2d+dqOMLzMiTXAwR+RuRIJTsPxQkJzaTt3CQVHxL6\n",
+       "UiU5Vi6YcJyzo47j28jJ0TkRqPlMhJCcnKsow88842TzqWkcSy667xQzs7NPPR2UP3H5CrS5/Y2v\n",
+       "h7qjp8Lr9shtt0CbSeYFsF5ckAkMGM5KJiu5kOVWB4/BuI/fBf56LMjsASq7T9A/Gw2c3JKPwu2z\n",
+       "a2Y8wDE9dWPeeIxjEpmDA8J96dNFX6qFGjYnZlL0S5QQQgghRA10EyWEEEIIUQPdRAkhhBBC1EA3\n",
+       "UUIIIYQQNdgVsXzQDYW3mU4ou+7bsw9eMz0/B3WdxVCmS6dxheizly9A3Sc++NtBeePyOrQ5cOgA\n",
+       "1B08dBDqPBERblMn3G0TEZolanvZbbCF0qqXLIcksbyK8bhkLScBTmZGWgbyIBOMUQYdjkIxsLe1\n",
+       "Bm1G/S2oi5Nw37M2fpZOA49dNZ4kRRlJ3MdZW8P9PHADpq1feC4UWc89ien5nXncz83RtaAcMQ+S\n",
+       "OLmVS5iPJ0iENjNrNMIk4JhI61XBxODStZlsRyt3PTDPsyKSc+QkXCafw3Zy7PtsBYGBW22+JCJt\n",
+       "nqPI6hOnx+Q4lUQUzlrumDMjdgKzNSWrGmREvPYrJLAUfC8YM8oSr5mSfGZ/bXlB3cwmEsvjGD/L\n",
+       "mEjq/V44SaTdbkGbozffAHVTM+H3zOrlFWjzwh9+A+o2nIB+4PgxaDO7tAR1Hn/NmpkVNMXc9T1y\n",
+       "0WRpOA5Pz+CElCHpU74vFETYHvVR9I5Jv/Z03PE1Mxu5iU7JAPtdbxvfr3KTWwryuj6ZaFG5CUtx\n",
+       "So4BSXdnE1AmRb9ECSGEEELUQDdRQgghhBA10E2UEEIIIUQNdsWJgmen7jntuMLnxJeunIe6wfnQ\n",
+       "ZRh1MVCxGuGz3IN79gflW267DdqQfErrb2MIqIdlHjZdUNlgC58BZyR8snJBZaMuvn/qfIvxCI9d\n",
+       "M8PT7I9KRfwHhl9tvmQr0ke4D6n3NNiq2RXuQ1GE/kpVkSA4EmLnAwbLyXIKLXGugXe5zMy2ifKx\n",
+       "5/DeoJyT1+XkuX7aCsPhRmQldBbE6l2KJJns76HEOW0x82wM369y1yTVXpgf585zQfoLe51XaCZZ\n",
+       "ZH1jBcMTfeiqmdmg13dNsL+ygMPIOTut6Wlok6QkaNYd47SJDs8kcaIFu0aZquIcmjjCc8xWsodd\n",
+       "IB5TTII7I+feMLepmsCJihIMa2Tnz7tF21s4Lran21B30LlMM3Po2W6trULdcDOsu3wa92luGR1a\n",
+       "hIQeE0/KH6pihO+XO6+PuVWsX/sA52KE482AOFEZjdINmZllTlR4TtvexTX+XdB3obXjnIQsb+K+\n",
+       "V3l4XCLy3ZeR689/P/1x0C9RQgghhBA10E2UEEIIIUQNdBMlhBBCCFED3UQJIYQQQtQgmkT4e0Xf\n",
+       "MKJKoxBCCCHEtyVVxROp9UuUEEIIIUQNdBMlhBBCCFED3UQJIYQQQtRAN1FCCCGEEDXYlcTyf/AT\n",
+       "PxbuRNYMynOLy/CauaVFqJuZXQgrSIzy1voG1A1dGurGNUypHbg2Zpi6+w//t38EbX787/5dqFva\n",
+       "vy8oz84vQJtRDxNZzz//fLif6+tk2weD8jRZTXxuGevWr14OyoP1a9DmZz/6i1B377vuDcosfTkl\n",
+       "q2s3XDJ32sRE4STDpGyLw2NekEj4gq0sn4ft8v4A2rz/p98Ddffee19QzkiiN7MLI1cbTxKxbWbV\n",
+       "BPMsSpLgX7q+WJKV3h968CNQ92P/8/8Svo4kHfdJYrH/NHOL89Bmfhmv0c7cbFBOyTkekZT9sVtV\n",
+       "Pc+xzQP3vzco33vfe6FNSlZsj126e9bGfYoicv5c1/Ppz2Y8wd+fmiIfQpuiwH7w4M+8Pyh/+Oc/\n",
+       "DG26qzi+rZy7GJRjN76amS0fPoz7GYXHqtHBVRSG3TWos3HYX0iouTEn92dc/3zPu+6BNlGKG8tc\n",
+       "6v5ggMdzRPpLw60KMdXBxPliiOPE1lp4jLM2pm43W3iM3//AB4LyfffdC20aUyw9O+yPTbKfLbcP\n",
+       "UYLjVH+An2U8DM/VmBynkqy24PvszzzwIWhzz333Q13lxmv2q00cYa3fhzHZpxE5773t8HubTZxr\n",
+       "tvCYT02Fx/zn/8n/TvaUo1+ihBBCCCFqoJsoIYQQQoga6CZKCCGEEKIGu+JEZWn4/LgzG/oVS/v2\n",
+       "wms607jidum8idUrK9Bm9fJlqFu/GrYbbOMq4H7FdjOzzvws1HmGfXSphqPw2XTaJKuVE9fHP/Md\n",
+       "9fAZd7MRPstNiMPDIsKGg3A/8wF6MIxiGO5TOcaVtEfEC+k7xyRrESeqgcel0XQuVQP9lUaGr6uy\n",
+       "8Pyl5WRdfeievbNn6t5/MsO/RqhTY7it1J0v5oBU5c7uzZgcc0Z3M/Q7+r1taJMT/6A9Fa7QnhDX\n",
+       "qD2D7kaz7V+H5y8f4zXD3KKdGGxvYSVz6MqwLiEOX0TOe1V4Dw23XRE3LYH+ieeTHU9PQY5J1ka/\n",
+       "w3ezPMfzycQ+74qZ4fvlxBmqxuH2m8QxG5c7u38RuUSbTXSNet1wvN7oYh+eW0bvdHlP+L3CfLKr\n",
+       "5xI7lkcAACAASURBVK/ifmXhvs8u4/dATrw+T498z1x68QzUbVwOv5+619BDG22G10yTeG/ze9FR\n",
+       "nFoIv0cbM+i9Nefw82Ut4qv6Ngme47HvQxWOb8wt9P06I85Zcwq/Q/zr8hF+Pxnpi8MJzt/10C9R\n",
+       "QgghhBA10E2UEEIIIUQNdBMlhBBCCFED3UQJIYQQQtRgV8TyaSeuzbmAyNkFDIfMhyh+rV0JAyIv\n",
+       "v3gO2lw6cxbqet3NoNwg8uIcCQ5kAYOe0RAlTu+xseDQQYpSbG8rrGPBoXEaSqsNIts1SaCbl0+9\n",
+       "pH89hi7AjQVdDono6YVU4t/amEjqXjlsEJGWCc3tTig076y1/tE+hJ+HSeRMRI592B0Rd5lsXrk9\n",
+       "S4hkCds2szIPD2A04Sfc7m64Mp6rNukv7ZmWK3egTZNNFnDBgXGC11qSYR+K87AvFMXO4aUks8+6\n",
+       "G3hddTfD678ggaM+rPXldwhKWYbnxQdBmpm1p8Nj1ewQITYj16h/d3IIGiTsNnGTYgYkdLEkwv2U\n",
+       "C9dsTeN1tb16Beqqwl8zeI6rCcJnfSCvmVlvGye8bG6G57S9gOPy0VMnoW6wGm7r3JPPQ5txiWPQ\n",
+       "oVuOB+UWGWMH2zg2e2689Raom92LwdKzS6H8HZPxptsNxfK1SzipamsFA5QH264vsKBLEu5r40km\n",
+       "euAY5CcrsGxhnNBAJuqwSTlkXEzS8FrL88nE8l6XTEqZEP0SJYQQQghRA91ECSGEEELUQDdRQggh\n",
+       "hBA10E2UEEIIIUQNdkcsnw/F6s50KJqPByi2XXUrk5uZnX7y6aB84XkUBTfWULhrO4FyahYl2ekl\n",
+       "lBVZnaciqcJeRGbC5pisSD3YDqXf/hYm3qYuDTltoISYNvA0+5TYgoimjM5suO9Dst9lQSRElwCd\n",
+       "M9mVSPnDXiiDDpwUbGbW20Cps+Vk8zQlKfGEwq1qzlYYZ4J4EYWfmUqPTKBMwm2lRCJNMlLnks7H\n",
+       "TAYleJG93UEJeIFMfFjauycod2ZmoA07xnEc7mdOhG0qcbuUdi56h5y4HcXdfIj9069GwJLyU7Ji\n",
+       "AUsM92TsGLju4leaNzPL853PX0lmY7SYWO7HgG183aCHwnZ7NhyHM7LafUWOS+6u20YHx9Nogr/X\n",
+       "x2SSyuraKtS1psPx+4Y78LynMfbrx7/4haB84TSmhd/6xjug7uCJQ0F5ewPH4aIgArPj2T98Civj\n",
+       "p6EqaoV9aOHIAWiz/4YjQXnxxiPQZunkYajrrYfj5yZZ5aNHktyH3Z1XtGBp/Z6CrQRQ4rWdu++Q\n",
+       "mBjp7Br1gehxxlaAwP2KyNg8KfolSgghhBCiBt/STVQURUkURY9EUfRbL5cXoyj6RBRFT0dR9PEo\n",
+       "inb+6UYIIYQQ4k8h3+ovUT9uZo/bfwyIuNfMPlFV1Skz++TLZSGEEEKI/+yo7URFUXTYzH7AzD5o\n",
+       "Zj/5cvUPmdl3v/zvXzazTxu5kcrS8Hn12K2gvHbpArzfc489CXXnng2fJ69fw3CxrInPTWfmQ59j\n",
+       "z9GD0GbvUXwOPTs3B3WeEVk1One+UUxC+liYWN+tVj7o43PpyD0rZr6Od0DMzCrzzslkTs28C4dj\n",
+       "q5ePR+g2jZwDNSSfZUCCH/suLHFMwtPYs/jEOUnsuDB8eGhS4Lkalfj5xlH4uhYJnowSslq5C6Ns\n",
+       "tNCpYSvZe6dtOEb3h7F8YH9Qbjcx4LA1jb5T29XFZNX43gCPSzIK++eItBkQr867hUPiNnkysk+t\n",
+       "KfQPp9z1z8JavcNnZtZz1+OIeEWjIfbPYT/s+yxAcpLu6ccRMzMW0emdRBasycYS7zaxUMk4xb+7\n",
+       "++7ztcn7JSmOQZ41Eg4ZZ3hgvAO1uIiBlV/4zU9D3dd//0tB+eR3YCDnnW9+Le6Y24UzT6N72yWe\n",
+       "FGyGeDfXiOt78fkwIPrqGQyR3tpYC8rekTQzm13G47L/5NGgvHz0ELSZW8YHSM02em4e/51iZlY4\n",
+       "36lk/ZwmcIYNS3I9lsSlqrzDSvpdSjxX5hJPyrfyS9Q/MrN3mtk3X7H7qqq6/PK/L5vZvm9h+0II\n",
+       "IYQQ37bUuomKougHzexKVVWPGF3gwqyqqsomX21DCCGEEOJPFXUf532Xmf1QFEU/YC/9ojwbRdGv\n",
+       "mNnlKIr2V1V1KYqiA2aGCy2Z2cd+67f+w79vOnXKbrvjzpq7IYQQQgixO9S6iaqq6t1m9m4zsyiK\n",
+       "vtvMfqqqqv8+iqKfNbO/bmYfefm/v8Fe/wN/4S/U21shhBBCiG8TXqmwzT96bPdhM/u1KIr+hpmd\n",
+       "NrO/xFuH4mN3fT0oXzp7Hl5y+RwGo/W23Wre0yiWLu1Hue7k7TcH5UM3omA4NY8S+ZhI40CFTzf9\n",
+       "qupMwCvJytJVEdax4MfMCYUVEfCYJOuDH9n7M1qw0jt+lpSI0D6osLeNEnnOAke9zEuCCgc9rIP3\n",
+       "ZwGgDNc3x2MUodmxiiJ/jklIZ0yC39ymmiTgsNPGOi+WN0sUxBknTp0Kym0ijOZjnCwwdGGXUYRD\n",
+       "BwvE7PbD81eN8diNqDQetivJefCceeo5qIvJCNdoh/3Th0yamZXsGnWTDnIyHoyHbKJF7trg5419\n",
+       "IiehIv0uJZNU/LYiEuAak8/nJ2202lPQhtX5z1eRay1tTxB2S+To4ydvhLo9+8MQyacefgLafPnj\n",
+       "n4G6hf3hmP49P/L90ObEbTdB3cO/+7mgfO5ZFL337NlZ/73pdfjE5S1/8R1Qt3wonOg0NYXX/7YL\n",
+       "v9y8gA991i9dhbprV0J5f5uMw8MhTjooJvh+YEHBHv99ZYZh1GYGAyP7XovJGFtOEIibEsGfTaKY\n",
+       "lG/5Jqqqqs+Y2Wde/veqmb3tW92mEEIIIcS3O0osF0IIIYSogW6ihBBCCCFqoJsoIYQQQogavFJi\n",
+       "+R+LgZPZNq+F6atrV1GSy4coD0/NhFLs4r4laHPsFpTGj58KZcXWLErkwz7Kn+sr61DnYYJomoXi\n",
+       "XDlGIbUgKb9Zw6VuEyHOC9NMoGbKqk8QJj48JXKr1DeIRD69iMez4+TdJEPRtCBCs09b9inuZmaD\n",
+       "LvaN3lY46WC4hQIlo3RieU7S15m7GMWFr4A2XuY3M2u2QiG8KEjifILHKrZwJzKyWjljYW8owCYp\n",
+       "nr+BS5c3MytcyjZLHu9tbELd+moosno528z4xAcnoGZEoPZskRULxjlex/6cMvGaXWtJIzxXvk+b\n",
+       "mXU6KOq3Gu78kc8yGuIx9zBpNmbSeLLzsYrIMfcTO3zyuRmfNOIHGD+JxIxPpvHMzC1A3fQM1p17\n",
+       "/IWg/IXf+Ti0SRIcY9/y34azwu/6njdCm6e/iitjPPy7nw/KjRRF74UDe6HO86WPfQrqRkTi9mnd\n",
+       "M4vYz1rzYap4ZxZT9ztNsmqCO+8FWy2ArAphxc5iOfsOKX1UJOl3LK3fr0LBVqWAbZtZ5cbFiIzD\n",
+       "7A1jMsZOin6JEkIIIYSogW6ihBBCCCFqoJsoIYQQQoga7IoT5cM111ZWgnI+wufE3n8yM5tdCp8L\n",
+       "Hzx+GNrsP4Z1rZlwFfduF32ZDedpmZmtXl2FOk/WxNDDLAvrBmQVd+aYtJxf0SSvg0fMLLSThUO6\n",
+       "ANCEpRISVi6GvlqSoZOxtYVuzNyePUF5wZXNzKZm8Lm+98na7tyZmQ1JAGd7K9xW0d/ZOTEzy9wq\n",
+       "9cUIn8Uzf827BqMcz2dCvKWxc4SY98I8t9LCfUgn8GDMzBpNF5YY4flrNnE/R+44bK1tQJvVFQz3\n",
+       "21wPr5mYeAwp8eOmpsL9zKZ2DhMtSdhnMcK6rWvhvve7W9Cmt4XunffcpkhAX3uaOFGzYZ/tzM1D\n",
+       "m0lWkWeOEqvzvuEkQZ5mZuU43BbzD9mmEud8FcSpYfvpabXx+t+8ip7bs197LHy/MY6Lb3jHm6Du\n",
+       "9d//5qC8TsbzT/6r34a6K2cvB+U3/ZcYhZhM7ezUnLz1FNT1iau5djF8v80XV6DNylNngzJz+DIS\n",
+       "0tlZcH1xAfticwoDVdO4Xv+ENiUZOwvctt9WRV4Xx8TZc85uSVyufIzbmiCj87rolyghhBBCiBro\n",
+       "JkoIIYQQoga6iRJCCCGEqIFuooQQQggharArYvkQJN9Q/mLBYSxIc3YhDGKbJpJcRILRer0wYGxj\n",
+       "FSXZ9asolg+IwOxhkmzkwr22N1FkHfZ3DhPtkBBEHy5WkTDDMZGcYR+JmMi4cvFSUM7JivQFCRP1\n",
+       "q3AzAXduGc/xzFwY3Nki0iMLsSzc+zXSycLUOu2wv4wLlFbHY7QQxy4olK06nuZ4uY2d5JgTKXdI\n",
+       "AvG85xknk8nD49ztp00mzvsgxkEP++uA9OFxHvbZhg+eNB4U6kNIm82dz1+TiN7TczgRYW5hMSgz\n",
+       "ETon4ZfFKDx2Q3IMctL3Y9cXyhG5Zsqdxd2I5B2yMNjE9QU2+SNtYF/0ryvGZNsxEZjdOfWBtWaT\n",
+       "/bVeEuH32gqOw2O3Xze/9lZoc9dbXg91uZsc8elf/wS0efoRDNt8zd2vC8r7ThyANisrl6HOE8/g\n",
+       "d9F+sq2bvuc7g3KrhQGnTfc9w67j3jp+z/jJUcM+iu0jMt7kQzw3HjqBoQzrKjrJYeew3djYhC0W\n",
+       "Ph1eowWbVEWuozidMG2aoF+ihBBCCCFqoJsoIYQQQoga6CZKCCGEEKIGuokSQgghhKjBrojllYsH\n",
+       "bbZDIbQVozycEbG04UTSOEP5LB+h6Nl1KbGrl65AGyaNxkTeg30iieXm0le7qyhLstTt1nQo2M+M\n",
+       "URovncjKUobTFgqNkC5LhFFG4lag9+9vZjbq434O3H5tXMZ063PPPAd1DSdQduZx8sDMIq703nFC\n",
+       "8dQ0CsaM1AmNjRQvEZ4c7SRcIi/6fm+GoveAJKvHhtJj6qRKlujL2NrwafJ43iuy89suhX44IKvP\n",
+       "E2Oz7VKTmVjOrpmWe12S7ixezy4uQl17CqXcKTdZoTM7C20yIl775P+cJKQPtvH6y0fhOfXXgplZ\n",
+       "f2vnSStGTnE+JJNG3ESWRgOPQUISqCPXz9g+DXooHadu+0lCtl3t3D9HfexTXhQ2M5teCMfFozcd\n",
+       "hTYJmUjyB//uy0H5G//+a9Dmlrtuh7obX3NbUN7q4ooM7PvCc+0iyufnT78Adf1BOKaWJRv33QQR\n",
+       "kszt5XMzs0Yn/G5tke+GlFxrabzzigFsDMr9JBUygSJNSBq5H3cjHANHZDUC31/I/B6LiNxeEgF9\n",
+       "UvRLlBBCCCFEDXQTJYQQQghRA91ECSGEEELUYFecqNh5Nd6TiNgzdRIYl7XDwMY4wmfAvS4+q964\n",
+       "turaEI+I3F42Jgj8yxoY+BdZuO/jAT7jzvvoNvgnt351djMMa0wb+Lw3iUhAXhZ6DN5ruB6dudAf\n",
+       "iWYwGNWHoJqZmQvgYyGdPeKFDZ0nUZBAvq114pi57ff76G4xGs6ra7dIGCVzm1wAJ3mET0PevFs0\n",
+       "bKB74D0tM7OqCs9pzDZO8EF6PiTUzMxIf4HATyIbtDvoMkZR2K+a5PNl5PP54L6SBIDCa8i2ezl+\n",
+       "vq0V50CuoJ9XEQ8tSsLP3GygT5KQgSNN3OdjDhhxlOD9I2wzHhFfxp2btMn8LuL1OS+EqDiWZXiO\n",
+       "m81wzCvGeG2XxG2CfTJsQ74KbO+B/UG51cEx6LnH0a983tUdOnoY2tz6mldB3cZm+P2wtrICbTrt\n",
+       "nb8b2m08dtPT2IeiyoWlkvPuSSI8xwUZg6oqHAdzch2XpG94H5CRkdBj/6qS+Z3EwYrcK8cFG7/x\n",
+       "/fw1kpIQYjY2T5g1TdEvUUIIIYQQNdBNlBBCCCFEDXQTJYQQQghRA91ECSGEEELUIGKrzf+JvmE0\n",
+       "oQErhBBCCPFtQFVVREnXL1FCCCGEELXQTZQQQgghRA10EyWEEEIIUQPdRAkhhBBC1GBXEsvf+573\n",
+       "BGWfTmyGiand9S2oGw/DFOPFg4egTcut2G5mtr25EZSHPVyVu8FWjXe7+eCDD0GTBx64H+q8j7ax\n",
+       "ge/HfPulPXuCcquNqeLXLoUrg29v40roUzO4Sr1fvbs/wNd95EMfhrqf/KmfCMosTbuVkVTqOLxf\n",
+       "j2J2/07SbONw+wlJpY8qPFeJe117CpPk/9ZP/CjUvetd7wrK21skzZ585oPHjwflqZkZaLO5tg51\n",
+       "w62wL2xewzTkfIjnpjUd9uvmNL7fQw89CHV/+3/6G0G5v70NbbY3sX/6hPmYnL+UJOq33HFnx6U5\n",
+       "RVKw3evSJvb9hz4YXn/33f8BaBORsSR18cQlS22nq7+HrytIo5gkJI9ztw8kMjkhx/ODH3hfUH7g\n",
+       "/vdBG5Y4n7jxlNmwBUmgrtyqAuzYxSlubZiHfWN9C8fqfh9XI/jlf/4vg/J999wDbSKSQm9u1YKE\n",
+       "fMK4gcdzdSVcqeLwTTdCmyvPncNtZeH220v4nVKSz/zgh382KL/nvndBm4qkkftUb3b+fKp5FWGi\n",
+       "f5yS8+cuo+YUjmVZG4954d7v3X8b++I973sP1BVuZYWogceuIuN35L6PWmM8vkY+c+mS20vyvRpV\n",
+       "+Dqf4P+hj/w8vt910C9RQgghhBA10E2UEEIIIUQNdBMlhBBCCFGDXXGixu75Y8Mt1Z2k6FakDazr\n",
+       "rYe+yiZZjb1JVpZvdkIfqLdNXJUhrhrdYM/nHUWBz6HXroUOVmceVx3fd+AA1EV5uA9Pfe0JaNN1\n",
+       "rsFNr74d2rSnOlC3eilcyZ6t6s7wK8Sz5NRxic+cc7eKO1uZnK2uPY7C/SLKiVXk/awMjx1bPZzR\n",
+       "aIb9bHN1hI3IOU7S8O+RtIl9hfky/vgVJToubAX1Zjt0htrT2Kco1EXzTejy726fSH+psK505z3P\n",
+       "8bqKx1iXlk1X3rl/lmw75LOMi7C/VAXxiogTWbrzUNLPS1wjV45J358k9Ji1iGKyn/5csc+X4Ovi\n",
+       "JOz7VUScGtKHE+eYxBnxbKoW1MG2iWvI3K04CvtwXuD1vzCFHuj5tefDfSKOaZLh9VGOwjEgJu5f\n",
+       "TvbTE5HfLCrmc7kq3vXd+5FLlr0scn2DjYtj8sLBCK8tDxkWwXeKyTGIidcXD8LvzCRCL7RKsL9U\n",
+       "7npgoz7zOcuo/u9J+iVKCCGEEKIGuokSQgghhKiBbqKEEEIIIWqgmyghhBBCiBrsilgO5pyTFX1A\n",
+       "n5lBwJqZ2UYRiuTbaxvQZm4vhrxN71sOyvkAJcTtdZTNJ7nl7HYxnHF6aT4oHz68H9qsnb8CdU//\n",
+       "4eNBuWzg6Xrd298clJttFOmf/erjUJcPBkF5bs8StKE4MTEjgqoRSa90wnRRoUzoxV0zszQObcUW\n",
+       "OQZM6fQy9mA0JK3I65zdPiYidJniOzac6D01jTJ/l4RYei942B9Am9EA65acAN+c2lncNcPAyCgh\n",
+       "nZqIl/64UDGZCMVwrbN9YjMK3Fn1QZCMioTo0SHOvR1/f1Lngvti0vGYCO0bRkQsT+g+hJRkAgWT\n",
+       "jmFTZNv0mvGuMr2Od5bpmw0UtjMiY+M+kQ9T4D5k7niuDTEw9ujSCajbunotKKdtvGYiMoFp6F63\n",
+       "PHUDtOkReR8hkjzrZzATgWzK9QU2LyE2MnnHlfMBHvOcfNcyadxTxXitRXF4jLOEHPP+NaiLx+H3\n",
+       "aMQuNtLP/DjFRgR2Hvzr/jjolyghhBBCiBroJkoIIYQQoga6iRJCCCGEqIFuooQQQggharArYnlV\n",
+       "OvnLSWslkV1n9ixAXePcxaB89cxlaDM9h6vGTy+F25qam4c23S2UFSdxB30aupnZnuXw/c4+8Ty0\n",
+       "Of/Ci1A3f3hPUD71htdAm9iJes/8wTegzXgTZffF/aFcX0wkRmK6fEYSk5mkF1VhVxsTCXhcEInb\n",
+       "CZRZguJnTLpx5ITmkojsjCoP3y8fopDOLprY9VlfNjOLiThb5GEacpdMaGBiuRetqdRN8EncTZJm\n",
+       "XxHxMkp8EjCe4wZJac+cqJu18PpIiHScujRiL8QzmCxdEBHaJ7IzKZdppn41Auaist0c+wGO9P04\n",
+       "2TnxuqzwDZlsnvidYAnN5EP7yR8JEZOZ/O3T1jOSJM3ezxMTeTgfk0kcrXDyTPcKTsqZWZqDuuFa\n",
+       "OA5W5Fy153GS0cWvPhqUb57H75RLJHUboKnY+PkK82MXtvErPsQRO76kvxRu20QitxGZwDCBeF1E\n",
+       "5Pp3YnlMJl6U22u4sYEbB6dwRYaC9mu3n9S4J8L9BBNXrod+iRJCCCGEqIFuooQQQgghaqCbKCGE\n",
+       "EEKIGuyKEzUehx5IFYW7kRM/Z3YRn0PvObo3KF94Dr2iKy+chbrFw4eCcnuR+FZNDPzMB+gWeaY7\n",
+       "GHZ56YUzQXljFZ8B77/tJNTtOXksKBcxuiNXnwk/X9VFh6dFVivv9ULnKyMhlowEnicz/wlf5x+p\n",
+       "R8TvyJjXA9siTgaLVHP+AQv3nISiwA+Tkefs/tN4F+h6dX4/WdhmQXyLrBme0ySd7Pw1XMCgDwk1\n",
+       "M2uSsNv2MPTVIhYcSBwFr4EkKR6DhAV+QofBJh7mDCUk7NM7X+UEeYcv7cLOvgULIfUBnBXztNgx\n",
+       "8G2Il1KQfS99UGlBvCni5/jtU0+SeEs+rBScrJc2hnWwHXxdTpyvLAv7ft5Ff7WxgG6TuT6ck/Db\n",
+       "pROHoe5L58NQ5w4JNK5IAC/CnLadryMWBuvHWOoxModu7Jyokl2z5HUTOFFVjL5jloaeVJqTMOEu\n",
+       "usxmYbuouQwtygS/1/ylFdEAXoRl5E6KfokSQgghhKiBbqKEEEIIIWqgmyghhBBCiBroJkoIIYQQ\n",
+       "oga7IpbHzuSsXLjXmASAJWTF5oM3hit1X3wOJfJzT52GuvWLl4LyNBHLm018v6IcQZ2nv7EFdYN+\n",
+       "PygvnzgCbVrLe6Aua4RBiEkXRc+NFy4E5Wvnz+G292Kg4sFTNwblhT1L0IbhRV0ftHe9Og8L5IuI\n",
+       "4Vu6EDQWPBcRsTxxAmxRThi26bbPHFkmUHs5OiYrmjOp2q/iPhphH2NiqQ+2TCYM25zqhH2BhVhW\n",
+       "RPAduWs0IfvEwlL9avMx2TYLcGTHeCdY4KAP1jRD+TSOUAZn++nPQ1Hh5+X+rd8vbMTez8NkcBYc\n",
+       "6MVyfw2Z8eMbJ+F+5Tm5rsjf3b6vR0ySn0DcZXmjBelTmdt+3sPJNEYmyjSnQ/F59fR5aHPna+6C\n",
+       "uu5WLyhXQxxLWD/zsEDHiJw/mL9A5PrYvHyOm6EBp66Kyeds/J7kaowTFO4zL3ZvXYI2VW8V6lL3\n",
+       "nVy1MWyzinA8rfw1STI0Kzo5ov7vSfolSgghhBCiBrqJEkIIIYSogW6ihBBCCCFqoJsoIYQQQoga\n",
+       "7IpY7uNB+068jlsojA36KNwuLIcp5kdvvQHaXD6NsvmlF8ME8eUjmFL7/7P35kG7ZHd93++c7n6W\n",
+       "933vOvtotJtBCyCHYjEklIJLJrjsAipOYZykQjmUnVRYIpBBMyONFrTMSEKKMEvFxAUFruBAERvj\n",
+       "KiqWTBlMygKBUQAhDRJCMxotc2e5c+99l2fpPufkj3uhdL6/79XbtCTeK/n7qVLp9pnT3ae7T/fT\n",
+       "7/N8zvfEzgt/zeb407XaeMnx1B23V8s7573EvbPj5e9uv5bkfvdfvMvV+eP/+DvV8pf/ja9xdb7m\n",
+       "2/6GK9s7d75a/v1//x5XhwNpyESWLOTdHKVclopLJyKHvjKwdHIipCds18hE2gL7azs/MzlLZMbd\n",
+       "ZZIyvl37vrHZ1P06k4R0EoLtkuPziERoM7MOhXQiu2MaupkZjvWIRIjFmQjMzFIPx0cGjfRbn2Ic\n",
+       "UG4d8ece61OJpIOjTNuSQQ4tOek9SKuxkH6QyP0AbfCp/yQNncIk+eNnn6eSPJG/nYhMLfnj20n7\n",
+       "cDj+2cmeG8zXLiDAx+TXOzhcubJzz6qfw08/QgbhkLT+FoT07b5PSG8aMhsBUNjsDuTB5Aa3UOkZ\n",
+       "Bw+wGkygbqAO6z9kY8d3M2vZ8a3rgVbDvk8nj3NyfKdqsXxofBr6sCWNghPBBuWwk4XP/b8I+iZK\n",
+       "CCGEEGICeokSQgghhJiAXqKEEEIIISZwIk5UA7PZry5drpbT0/WymdmpS2dc2e7pOtzrzJ23ujq3\n",
+       "Pvt2V/b0hXr7V558wtXpzvj9FeJAIO2Onz381Pk6SDMO/vfXJ9//EVf2gd98b7X8qY991NX55u/5\n",
+       "jmr5b37v/+DqPPbJx13Zu//pL1TLV0gdxphfjjHsz8xP/k5UKjqjOboi7DfugXkF+Ns4ayghgAvT\n",
+       "zrwT1S387/MGrkFP3J/NhoRRwnqLXe/GtS0J9wSXifkkDPSd5gvvgCxJG4rV7WQhnZkEI243te+0\n",
+       "OfL+E5PhSqqdsjHOAtM2WKgkButRn4R0mAh9j81sTx1B8KRYEGs3JlyUhiCyhsJqmUh1ZL1EQh1d\n",
+       "E9j9h/4fqROJd+a2TcpYbmef6362u+uDGC8/6QMc9+6oXdRLf+KDH9fEdzp9583V8hGpMyYclvUp\n",
+       "VuhcJhIO6frCSH8NHaxCHFPq+o0Io4zZ3/95famuE70rmvf8Z2Za1L7zwD4viAfauOMZ9+T/LLI2\n",
+       "9U2UEEIIIcQU9BIlhBBCCDEBvUQJIYQQQkxAL1FCCCGEEBM4EbF8d6cWc9e7tSB+cFDPmm1mdvGT\n",
+       "XhRcQEDlTbedc3XuesHdrmyzen+1fLR/ydU5TULXupYJmjXnQCI3Mzu8ULf90sf97OGXHvOS4+nb\n",
+       "a+Hub/3Aq1ydl/ztb6yW3/cb73V1/vU7ftqVNdtaAvzql73U1bF/+X+7IifOEpGWB7+hID7u/R1n\n",
+       "R4+BhDwaERrRzaTi5Yj9sVBQFlQIIvSwJW0iIiSKnt2CiOyd31+/rYM753MiuxO6WS2WL5ZELCfB\n",
+       "r01Xr9eSczBsfdjmCkT9QATRNPhz1W/r817C8dLzNnlptWNmMoi6fU9CQgcS/AidKpFA1UD09llb\n",
+       "P2YXDQtwPf7ZEsi91jKhGUT9ofhzl4ipyyRjJJM2+JBTlpB5/P2eSEhnR8Jg16u67+/C54eZ2eHF\n",
+       "fVfWna2fp3HmB9McPfaUKzt1Rx1MfLg6cHWaEeI8C9ZM5FzhJaUDbkaI7C6w1swJ6WxgAutnR5Wp\n",
+       "WAAAIABJREFU1hw/sKMt/j4yKMudP09hxw8MSKF+nrEA18gGWsDnCgtrZedl7MAjhr6JEkIIIYSY\n",
+       "gF6ihBBCCCEmoJcoIYQQQogJ6CVKCCGEEGICJyKWF6uFzOVeLbeu115QO7p0xZVdulBLgMtdL9fu\n",
+       "nb/Jl90CM0Qnv79+7VNp24UXGJHLF7yseOWpup2LHX/an/81X+HK7rz7OdXy8pabXZ1f+fGfq5Z/\n",
+       "+1/+mqtzmrT76//O366Wy/x4sfVazWpp/DzvsB7xFJn8jWIgT4QmwjaIujgz+vVAgZHNzo4p6mZm\n",
+       "h/u1bJqJSLslad2pr4+naYlYPvP9JUH6+Tr4wRiMDOnZKfnzGYnkjAIzE8sT84nhmmIi/NX9sVTx\n",
+       "gAV+48Bq689vP0JITyRdfsbalOFaEf92p/P9ZQ4i+Sz6OrEc/yhmEnAmKeM4ZiOwm42Ap5jdM4Xs\n",
+       "D73nltwzTBD3+/fHx4R7fF7Pd/25Gw788zuA/D3f80Lz4eN+ANNpmL1iu/UDCljKt9s/OT42EAEv\n",
+       "F90yBpYfu/dr24KNZ3aNmY094viiEbEcBiuEuf8sGqL/3C6Q5M7keprDj7MKZHJ+2Xo0Tn4c+iZK\n",
+       "CCGEEGICeokSQgghhJiAXqKEEEIIISZwIk4UBn7NwfnY2fO/mx7te+fjyhN1SObOKT8b9N6eDxM8\n",
+       "d/sd1fLmymVXZ+g3riyPCDgbBr/eLXfdWi3PSZDnQLyMC5+oAzgv/u4fuDoXH/1UtfzlX+ndqud+\n",
+       "xYtc2QEc36c+8rCrw8AgNhpmRoI0ne9E6qRMwgvhp/CGrJeJZ+N+QR+nhbjsUBZGOV/4stmsDqhk\n",
+       "LlUiv7tj5mELYZhmZi0JqNuCE9UEf+4YqU+fcdmMh2Y6dSOzRwcL7oNZ46mjwPpLXZaJO4IMW/+M\n",
+       "GIioleAiD70P++yiP745eD27M+9ydJ3vi3Pw3BZzEmY6ImyTuWos7BZvNXbPMAWkQCgnu1bcLQRf\n",
+       "jvTXdkRQcWJuY0ueE+ARWuc9wrz12yrgO3YkpPPgaf9ZsASnlN0zgQlyuH8SKsueE3mEn+OCO1nC\n",
+       "Mfm8IgaULyGbYu4U0tjKlaVQH3NmfZGdg4why35/rEW+y5JnEmsD2dZY9E2UEEIIIcQE9BIlhBBC\n",
+       "CDEBvUQJIYQQQkxAL1FCCCGEEBMIY0MIP2c7HJv8JoQQQghxA1AwAfQa+iZKCCGEEGICk1+iQghn\n",
+       "Qwi/FEL4YAjhAyGErw0hnA8hvDuE8KEQwrtCCGc/l40VQgghhLhR+Gy+ifpRM/vVUsoLzewrzOwh\n",
+       "M7vHzN5dSrnbzH7t2rIQQgghxBcdk5yoEMIZM3tfKeV5UP6Qmb20lHIhhHC7mf16KeUFUEdOlBBC\n",
+       "CCG+YLieEzU1sfy5ZvZECOFnzOwlZvYfzezlZnZbKeXCtToXzOw2tvI99766Wk65Tixtgk9/nWef\n",
+       "ohxjXcaSq7eFJcLWh12yPzed+QTo3Vwnst73tre7Ove+7gddGUbANmxGepY4C2nEDYltLbBeIe+o\n",
+       "kSTXDjC7NotsfdPr3+nKXnXvfdVySH7F1JLjg7azc5AGf41DV1+/YetTf/Pgr9WZc/UvyVcu+zTr\n",
+       "t731QVf2X7/iF6rlsvD7e+7sIVf2V7oPVsu35qdcnac3/nb4k+GFdTuLT7M+2/gU5XleV8uXyC/n\n",
+       "73zzq13Za++pr982+EdAJinROdXXJhSSvkz6Hs6qzr76xj5sZhZgjnYSMmxveNNbquU3/n1/PV18\n",
+       "t/l7pGG3Y/RtyvB4weRzM7NINlZc2jqZkZ6cu/t//DXV8n2v8c+WkP2JaaAs9L5NkawXIDG8IWnP\n",
+       "BZ8bZpZmddsH8qmSOn+u3vz6H6mWX3v/q1ydjvyRH2F2h46k2bN0cHwOZnIdMrkOqakPyG/ZbCDP\n",
+       "5je+8W3V8ite+XpXJ0TfdjazAdLiTA7kHmKfYVg2j/56tsWvtyj1/f/db/tJV+e1P/I/urKAH8lk\n",
+       "f7Ehsy3Euqwh/a6QEHw8DYVUyuS1J8OKb/jun/cbvw5Tf85rzewrzewnSylfaWaHBj/dlatfcelb\n",
+       "JyGEEEJ8UTL1m6iPm9nHSym/c235l8zsXjN7LIRweynlsRDCHWb2OFv5N3/zN/7838961rPtrmd+\n",
+       "ycRmCCGEEEKcDJNeoq69JD0aQri7lPIhM3uZmf3Rtf99p5m95dr//zJb/xu+4aXVcjp+7kYhhBBC\n",
+       "iBuKqd9EmZl9r5n9nyGEmZl9xMz+vpk1ZvaLIYTvMrOHzezb6Zrwu3MLM2C3xH+aBz9D9KypvZCe\n",
+       "uD9mS1dylPeq5RT8LOCFOANtJLPb43rkN/wAv5czB4ToB04E6YmD0cCs8TgTu5nZQByz4mYBJ/sf\n",
+       "QSEuQNMQB2uod5CYx0A8lAjb7+is52Smd3C1MpkhnpGgby4b71Ld0T7iyp7X/mm1PN94c+JT/d2u\n",
+       "7NHhOdVyRtnBzO7KH3dl59vauVoFf14YEVyRZu77fma/8kdwCzI5n3TMCG6LeVNktQn9MZB+EJg4\n",
+       "gbtq2LEwByPBMvFniLuBp47d7PSZ4CqR60KeU/gIaAdyDla+v3RQj3RFy5H4K/BXcJmTc0f8HLf/\n",
+       "7OsE9hc2OFEDu7fJczjjs4r018I+Q6CDBtKHyWPQ75/4cqzvD9A/2PMtQ50ZuS5dJp8X8FnQkc+L\n",
+       "lvT9OGIA2jbtubIA17QhTtSQib8Gz/lCvKlCrnuBE1rYB2v2rz3kI3I0k1+iSim/b2ZfTf7Ty6Y3\n",
+       "RwghhBDiCwMllgshhBBCTEAvUUIIIYQQE9BLlBBCCCHEBD4bsXwyTsJLtVjWBS+aLcLGle3YQbVM\n",
+       "MjOtLX49tPkOiJyZXEqY2TBC3mV+aAuCOAtYw1BCM+9GsoBKdA4zEQUDseYiypIjxEgzs9Cg9Egq\n",
+       "kcIMInLX+fNbiJA+X9YDAw4OfPBkYcI9tKGN48TrM3G/XrYLrs5N4aIri9BlH1s/19X5g6Ov8mVD\n",
+       "rRU+Y/Goq7Oc/Y4vg3ZGIoMyIvzdVEg/zzMvmyd4VOQtCfcjYnCLhjjZnxvkcLWhf2EiewCw4EIY\n",
+       "wBCI7Jpm/lgyBK/SOuQ6BDjmmHyb2uH4GzAwuZccXxzqkxc3/py3RwtXNjuC0GPSztIS+XsXg1hJ\n",
+       "aO4IczcmIg8PROIGWTiyJFYasorSMRvIQu4HfJ6xz5kRHXYg0jqbwGMLgyEyGRyBz/2WyNnsmR7h\n",
+       "82FO+uuMCf5jzOstecbiMRNBPNBz10Md8ownn6N5hFheSEA0G6AxFn0TJYQQQggxAb1ECSGEEEJM\n",
+       "QC9RQgghhBATOBEnyuB3bnQGtuZ/W2Vlc/j9ugv+t/hF8L/BbmCi26bxv62uzXsh63y8V0O9JQiH\n",
+       "Yz+/ssmFsYRNJJzg+MgczDYkEmaGE7yOdKIyeBlstYG4DQa+U+iIy0F8BHQiLj/ufaTTt553Zd28\n",
+       "vlb56PigVDOzvfbpavl8fNLVmZGgySe2d1bLf7j5Glfn99Zf78ouhGdXy8/rP+Lq7JDAT6cVjryA\n",
+       "EXyHQHyy0pDw2ba+NszACv0h2V+9faqvkDBBN7E22Z/fEHMGSaAieBmZhEOmPR/uu9mBcN8FcXhI\n",
+       "AGeAmYvbre/nzFvyG/fniWghblLiZvDPrfbIlzVX6smvw5aEEs78s6QrtZuayPPU2uOdPfYXvfOR\n",
+       "zCxAJ8rEccGA46tgPeJusnBP2BZzeEZkUdK40cb8dU/oRJF+3cB9y/bPHLoZlM2Ia8QmdEanlVF6\n",
+       "5kTVfWgY/Llr2OdTB/c/CQ7NxKEd9Rhk4aWkXWPRN1FCCCGEEBPQS5QQQgghxAT0EiWEEEIIMQG9\n",
+       "RAkhhBBCTOBExPIC724JhO1DO+vWudKec2WnygKWD1ydmXmhuMXAMfOBnEMmkjObRR2gXltAGZuJ\n",
+       "iV4CbJr68pCcQrOMgiELSjt+9vAxs5Cb+aAylmWYibC5WNTXOBH5vI1eaD66XF/T1aGXrJ//zC9z\n",
+       "ZWlbX/eD9TixfN7UQvFO9H0jDf62eXqoxfKHt893dfbDKVd2e/OJavn59mFX56b4tCvbhwuWgj93\n",
+       "jGZdH18kj4A433VlA8jmiYSXZtKvUXxmoXnsfnD1xtx7bKZ3dq+BSJ6Xa1enJ2J5f7ouWy3J/mZE\n",
+       "TO7rc7w48DfyzOZ+vREwnzmCdIxyv5lZTOTRDyJ56EmfItchwL0WB3Jv5+PvP9bORI3tuqwhA27o\n",
+       "Wq5w7ENvxGrsQQhksiKLsBzg3kpErm9ciK3fTkuCNPGzL5DPC/ZZZGQAiiORZwJsPjS+35XEBhTB\n",
+       "NSXhzEZCcjHstrAPNhZeSoJlx6JvooQQQgghJqCXKCGEEEKICeglSgghhBBiAnqJEkIIIYSYwImI\n",
+       "5RGkuFWphbSn7Ca3zlODL5tDkvNd5WFX5674CVe2AJF8t3iJlJmJiZrdAEvdPt4BtMCkQ7DyiA9n\n",
+       "2UYk17YkeRgrjhEHzbed+ZTdzEuyAeTPzQER/ve8yHrlqVqqPkPSye94zl2u7P/7d++p90/kzDGw\n",
+       "8HU28OGJdHu1fBD3XJ1bF4+4she176+Wv6z9PVen7a64soP4jHp58DI4YwbSb+p93y9bUraot1+I\n",
+       "fJ5IJnOG/oF92oxIsubvBzY4wu2f3Z5s9vmu7nt54ftiv/Cy+Xq3Llvv+aTlofX9LEJCeYQBMWZ8\n",
+       "sALCnhFUnI+4TFLpZ0yKhyR3OrWCPz7Ue6nUPebxQi4xzx2HwS3kmZtI8niEhOvCUrhZ46Hv8XTy\n",
+       "4/snu1YsB3zARzPZthsYROpEcjD42RtIHXqrjRl5RA7GDRrBB4KZFTKoIgeQ1Hu/8SaymTGgD5Nn\n",
+       "C50Zg7RrLPomSgghhBBiAnqJEkIIIYSYgF6ihBBCCCEmcCJO1CzULlMfl9Xypng35uPlGa4sQ4hd\n",
+       "2/rfSM8VH1S4LLUTNSveiRjI76Yb8y6DbxQrwt/U/e+7kfhWBXyOPPj13LaC/42bzoCNv4WPdKLw\n",
+       "tTuQmbSZV7Dd1Occ/YTrrYftfPaLv8TV2X/isit78uHahXv2C+72Gyc0eDqDv0WOkg/NvJJOV8vL\n",
+       "uO/qPHvugzS/qv1/6zrRe1OPN94HvJBvrZZX2beJEcFbalc+oLbtvKPQdHXfbxbeicrGAjjrfl2I\n",
+       "U5PT8b7amNnZM5NVmCcV63oDXnQzS8QZ6ru6rJ/59XriRLWh7uvDmuyvOf7+IwYI9V5yU5+soSPb\n",
+       "JgGjOdXtjHPvfDHHLC/BMSPP4RSPv8aRhQIzryei20S8F5axCP2DdTvmFvl+RbyeEcolc9pCYEGh\n",
+       "GM48Yj3WAPKIRVUs0xPli8YQEnum1zcgUxQLCbYeMgS/EuFqIGGbAfoeLpv5QE4zszCif14PfRMl\n",
+       "hBBCCDEBvUQJIYQQQkxAL1FCCCGEEBPQS5QQQgghxARORCy/eX6pWt7ta9F8n4Rvfarc4squpFq4\n",
+       "ZeFwgQjbAdLoUH69Wua31Y+QWwPZFsqRLDyNZUFGw3ayUEIQd8n+mTsYYFtMqKSA/MnWSmw2djAa\n",
+       "u85LyGnrRdbdvVpgnhOh+ZE/8sJ219bb7/Z8n2LMrRZuIxE2981L3Idhp1o+N/cDGp7XfdCV3dXV\n",
+       "Inkhgv+F4Pv+xXxbXTCMu34oqcbBC8YNkc1ns/r42P0RSKijT2P160V2z8B5QNmWwa4Vm5EeyzCc\n",
+       "8rqFkOZJvFYjnrBbr5CE2mGMmUxuZBbAm9DeJV0/Gwm7BcG+IcIve1BlkOmHBbn/iXCPsAE37M/8\n",
+       "DH0D+8rVQnpRq6WWDG7JZGAObosOphlz+7Ftk7a3GDRLNhVBtGYC/kDW7Jv6mm7JOcf9m5mVMUHT\n",
+       "LLASykryz/1h4wds5S10WtLRExmIEJu6X7fzjavTzI5cWdv5+2Es+iZKCCGEEGICeokSQgghhJiA\n",
+       "XqKEEEIIISaglyghhBBCiAmcUGL5YbW819aJ05lInWyW8ydDndp8Pjzl6nQkInUD2ypEct6QpOpM\n",
+       "UsxdHRY8DGIiE73ZhOJYkQmpBunEhYifCacFv04bxhBA5kuJ5CgTqdOJ5ESkZTOvz3dqkXx75dDV\n",
+       "ufzkRVd29tab692NTKRtIM12yD49/0o558qO4l61vBt9X2yClxcvhzrp/Enz2/5Yeb4re7KvB1Xs\n",
+       "setAyPNaEC+Dl/kbFrvfg3Df+vNiTNR1CfBEWiV9NmD/GBNZTu9PdkPW9ZpEEqF7f/+3m/qYU+tv\n",
+       "2tyTxHKQaZs1s6XJ+cQqRB5umFgOfR1nPjAzKy0RaRdwb5PnRiB/d+OMDMQdNiPnyhHHCdsZ+gt1\n",
+       "wVl3wQEFpG/Q4O+2PsmJnAM2gAGZsdRt8kwPBQcLkec3bIuNg2CDP4ah3nZPXgESOS/tmMR5In9j\n",
+       "GnlJpMOSew3F8rRdujpDIa8vDczIMHiJfEGS1UPy9caib6KEEEIIISaglyghhBBCiAnoJUoIIYQQ\n",
+       "YgIn4kStmvr3/0WsfYtbhsf9Sq1/37t5qAMNaSBf8a7Iuqu9kC354X3T+oS6zJwkgKop8Fs/m7k7\n",
+       "EElpgPDJMcGhkfx2XYhrlOE39RiZyODJXnJxdSI5lgAeA5+znmwLztXRlSu+DnFxds/XgZg5j3OG\n",
+       "GvQWyCXvsz9XeNZn5vc3kHN8AUIzHy3PdnU+0T/LbyvVrtjp9AnfUMIW+n5e+jA6NrM7ujch+/UC\n",
+       "8QgxAJMFI7KgVxpoeAxUR2RyDNwzZUv8p7W//+foXCXvkzEtK4IH0qz9/sJmxP1HgydJNaiXG+L+\n",
+       "kNshdxDyyDZOpE987hZ2/zfHO1Fsd8zEwfudPfepezeiDobRmvl+xcKSR3w0WEt6KOvl6HhRFRYu\n",
+       "ckkjwzbRByT3Iw1QHhF2ayzsGl0xFi7KnD1YbSAeUxm8R1ia+t7KgXhokQTNxnGfDwx9EyWEEEII\n",
+       "MQG9RAkhhBBCTEAvUUIIIYQQE9BLlBBCCCHEBE5ELL9stRQbwKSbNX5m+Xnad2WnQaIezItmPSnb\n",
+       "gqmXSWJdoTODj7AHSeBYUzCgksh1RJx34ZokVa4B4Y/OZE/EPdxWM+LQru7g+BnGmXiZBghBI2GN\n",
+       "AxF18bxsN77OfMdvq1vWYvDB0QFpqWcGxm1DhPRTwcvt6F2eLpd8FRI0d6Wcr5bXtufqLAcvR54f\n",
+       "HquW77RP+jYR+nk9Y3q2HV+JSfjQPZn3XVhwX64lTpbZx0T2gPfMCHM3kH7uxFYzCxAiOSNieSTZ\n",
+       "e3GoD7qdsZnlScPgmRCJJNuSNiA5suMjO4TzUNhza8SzhCVP4kCWaxWhEtkdewZhHRK22Takndgu\n",
+       "Uod50MS9Jm1gQjoOjvAbyjRlGRj88WG4r5lZLgmWfT/D5zwOwDGz69juOAjA12ADPUZkiXKxHEYL\n",
+       "sEtFRzmEugwHQpmZFXLucPNNJoOxejIwqJ3+KqRvooQQQgghJqCXKCGEEEKICeglSgghhBBiAnqJ\n",
+       "EkIIIYSYQBgz+/TndIdokQshhBBC3MAUN9LrKvomSgghhBBiAnqJEkIIIYSYgF6ihBBCCCEmcCJh\n",
+       "m/e89Xvqgm0dlhhJ8FxY+YCsdrWslpuDpa9z6Gdjtw3sj+WkdX6m5zKrZ66/9xde4ercd/8PkY3V\n",
+       "BBKe1jb+mEuG0DUS0rfFUEA2jXxLQvMiBJWRk/CWNz/gyl7/8lfXbSROXUvezVsMoxv8z8stSSrE\n",
+       "oLvE3vvZTPawTHLg7J5/8sO+7DX319sJLIiVBKNCqFwmddi2LNb9OpOgy1R8qFy0OnQ0mF/vrW96\n",
+       "kyv7odfdUy1jEOzVbfuywYUJkoBMV2IWoJSF2DYkDDJjKCBRKR/84bdUy699x3e4OjHtujKD54Zt\n",
+       "SeDoZuGKQqrvURrI2/jrUGZ1eHCY+WdLbH3A8P2v/qlq+TX3/KCrMw8+FbSzVbW8LCtfp/jQWrwK\n",
+       "pfHPzk0hgaoQaDwE9rHir/EPvfknq+VXPuCPj+aE4v7JdcjkueQejaTvN9m3fbhSP/d32tO+TRt/\n",
+       "v7/uba+rll9xzxtcHQx+NjNLVj8TjpL/7NvmuqwYCeR0JWYRnvsdee53LAAUQo//jze93NX5mX/4\n",
+       "Pa7MhV8Gf39kcm/jR8FA0n23JCR709bnZdX6czeQUNdtqOs9+GZ/ra6HvokSQgghhJiAXqKEEEII\n",
+       "ISaglyghhBBCiAnoJUoIIYQQYgInIpbjbOShq2UzlmkVBi+I5XVdjwnb1ntBNIJYymZCD9mLnoUa\n",
+       "6Liib4OThbMX6QqZBRxFyGj+HKR1LY0uThORLniRte3qY85MkiWUBs4VkboHInUmEAO7xu8vDWSW\n",
+       "ehSTmdDsPUg363ii2rMngswfiNRtROJsYBZ1djZx5vWr9er1YkOuMRHu3aTtadz1cw4n64vkVDUg\n",
+       "Y6JIf7Xs+HYyaZ10fbdiCMcfXyhkEMlABPFtXZaPvFheVqQs1dvHQQ9mZmHmJe4mwjGTP10TDvQg\n",
+       "bKOX5IvrCGYBylL22w4DG6wAx0OuMbvhM/R2HBQwFhxIY8bFcny+DOTZ1RLp2InziRxL78u6WH9e\n",
+       "tNnfo+uV/7xAZtFL1WzESw+fY53584Ii+UDuvYj9zswCXNNCtm2kLzYjLik5de76saht9nmf4fNi\n",
+       "IB2hJwMYNjBQZ0s+j5mQvqWDIcahb6KEEEIIISaglyghhBBCiAnoJUoIIYQQYgJ6iRJCCCGEmMCJ\n",
+       "iOXWQQo12GalEAEvkYRdENIDEb8LEfdKX8tn3Fn18qCR9FPXJpawCzIdFf6IirxY1iLplU9tXJ1h\n",
+       "U28rkmRXI+IlhraiwH09EkiAxGu1SI4ltccLjQ2RuAu852ffDawl1zhsMSl73PEZtAv3b2YWmFCc\n",
+       "6vVaJtcSh7OBBOjUkf1FMqjCxS+PFOdxLZYWTu6HCJ2YDcagnR83Rlcj9yj22RFiayTJzmVDHnGQ\n",
+       "Rh6Ozrgq/cEpVzaAkB7J4AibX/ZtAFE3Rj/Qw9qZLwPWwYvziTy8mlJvf0b6MEuJx0EV7Dkc2Xog\n",
+       "OTtB/erWSNmYOmwWA6hBEqhb8qxOff3wSEQsn5N7bQb9sz8kifNENkca8vCKTGiGNPnIHrKY6E+u\n",
+       "cU+OD+81dh9nco3LcPz1a0hfxIEkkQ1Iiayd8Pwm56DHQU5mtm3r87luvUTek1k9+ub4++966Jso\n",
+       "IYQQQogJ6CVKCCGEEGICeokSQgghhJjAyYRtNhjOBuGbbFZn8ntymNcBZ2lBgjWXxD/Y1mWBeRMt\n",
+       "mRmc/Abr6rDQSihDv8TMrJ2R39SHul0HT11xVebL+rfchhwK/bUeXp9Z8BwDf6smOpL1JJmtLMCD\n",
+       "WzBB6PgwurgmfoefyN7mELIWiEvFQFeLXXEWjOq8NxJm2LGAw77ui30hftDMuzADBhySUDlGhrZH\n",
+       "sj8WiBegw7CwTRak6ToaC1Qk95VzImjwI2y6950/9Cyktw5P3OzvuTqbKze7su22dhRj4wMWF2d8\n",
+       "O5seXMbs3UYXyEvoo3++MY9obXVnn5tfryFhwhF9R/Ioo0Ga4Hgyp2bcX+u+VibXHYNfmQdaiEPb\n",
+       "b+p7bT477bdN+lC/Xz9gmuTPZxvYtamZBX/d2WedRXSEfZ0ergMLo2RPL/SWIvGImZ87Bvbx6AJU\n",
+       "mW9FtpWgXk+8tw3x3o6cE0WuJwnbHBS2KYQQQgjxl4teooQQQgghJqCXKCGEEEKICeglSgghhBBi\n",
+       "AicilhcQy0uopcpEHNmWWI7o96Z06OqEgQSxgagXV14KLGzG7Y5I6se0ycysAek3E1GwJWF7B0/U\n",
+       "G1sf+Y2fvb1er93x2x42/lgKCMVM4GQEFK+JSF9av63VTn3u1uf9evsLcl6gXct9L4yeuug7DOSw\n",
+       "WjuMDNuMKFAT8ZqJrCCIx0QC+Q59EGOEW3BG+oaZD4NEj3xLxVIPXvdCZn8PpA04YKIlf38xAZZE\n",
+       "I/oaVK7FbZNNIyRsMxEJOPV12bD1YvnqYNeXrWohfb7w56Bb+mdQ7iGMkjyTxri820IejGHpijDM\n",
+       "d0X6YhNXfj1oA/sLO5lvQwIpt5BBDmMeLzTAlfUX2BYbdNCTZ17b1gM0FrMdV2e4tO/LDupnTrdz\n",
+       "3jdzOP6jdEZCVllAZYAA3ib4fo15zZEMDEqFtAn2NyNdKhr5vCBit6vDPvygnWxgwkDCp1Ek35AR\n",
+       "UytStu7qc7WKvk5pyf7GffxR9E2UEEIIIcQE9BIlhBBCCDEBvUQJIYQQQkxAL1FCCCGEEBM4GbEc\n",
+       "Z22GWc5pgikxLwOIyCX5d8JEhOKAKc0tSwsm60WSsg0QR85yqre/XHihsSGC6MHTIDkSCfH0TbVI\n",
+       "19sB2T9JygY5Gmf3vh4tbou0qTR+f2lZn4Mnz/iu99hZkgTc19u/zbxYvjwis83D4IVuZAovzoYe\n",
+       "yKzumaV8o6AZSUR68WXDwVP1aizxntwRcV6fq4YMTGBgyxsyoznVe0FcDeRWiERud0fDJHJijeNp\n",
+       "wMR0RiDZ/CWRRxyUpZ4kGA+sDGakJ4+NlMg5QGGaSLqRXAekRJ9cvyb9JWP6cuP7XSQJ6aXUUvyM\n",
+       "3NuJ9P0E5z2RwRjsuejrsMRydj/AxljCfuP7wqyt75l0QJLjD/x5yaV+NnfLW/0O9/2AAmRObppM\n",
+       "Ljs+q4ZCZj+AUQANmSViTZ77mGzOZs+gdxqLrwfc57rx9HrXJjJQZwtJ4yuSPL6a+WceJpZvOiKW\n",
+       "02koRg48IuibKCGEEEKICeglSgghhBBiAnqJEkIIIYSYwIk4URl+v8W8v8iCNclvxwP8nluMhJll\n",
+       "EsCJgYONd3EicSKoCIKQ3/BbmG16PvP+0/5F3/ajg3qm9TO3+HbO9+qTd3SZuDgsMBJ+O87kd3cG\n",
+       "eguZ/A7ekrIEZVfI7+AXSXjaDK7x2Za0MxAnCjY//q8FnEGd7Y+4GxDq1nc+rDHsnHVlEfpLpt6d\n",
+       "dzdKrveHAX3XA9UJFqzJHJMINylzVXjYJqxH3LTYsXsNm3S8k8EDY4lLCfdx0/p7pp35cx77up81\n",
+       "cxKo2q5dWdtiiiVzR45P+xvIfWzE2VvD8wwDVs3M0kACKmPtmCyKPweB3A/BRoRtUtPN8/gjAAAg\n",
+       "AElEQVQVoD4guR+gL0ZyDrrO+zL9lfra7Abvpq5IoLF1ddjtqb1bXJWDx57w6wEzcm8XFrYJx1zI\n",
+       "cxHd1MDuK/PnYINhqcxZYm7jiMuXGhakW+8vkX6A/pOZ2RE8E47m/lgOmRMFrmhPZLzAPLA87vOP\n",
+       "oW+ihBBCCCEmoJcoIYQQQogJ6CVKCCGEEGICeokSQgghhJjAiYjlKGiXBuVTIgoz2a2rBbGYvQjJ\n",
+       "vMSSQHYjomfZelkRAwcZgUQVtk29v+3aS4BXLvm2tzBL/N553yYMKt2siVgevIBXtvV6XMr15BGi\n",
+       "4JC8WBqH+piXa3+ezh75bTUgUC5WpJ0kLBFl2jGhb2ZEnCVSbs7+HGPAYSFhf2VxxpU1IMXSQRWd\n",
+       "H1CQwZwfI15fXbGuR7MvSYhkBpmX9fNRkOtQiFDsHPUR4nUk9zELPc1dfa+18yuuznKX9MVZfa1m\n",
+       "S7/t+a4fyBLnIJtHL6QXMjjC1WGCOAZrmlmyOdTx90ciM9mXVK83hJWr0yUi3OPlY64yexC7Ov6c\n",
+       "u2BkM8swsCOSoMSyJue4R8Hfn5ejjS97/t0vrpabA38smycuuDKkJUHBLf28qNtZ3Anmgr+rQ+6Z\n",
+       "ptR9GJ8jZiQc1syGEWHMA5G4E6w3kM2sSSDm0azui0wiPyQBw0cwiIt9trfk3LWfxfdJ+iZKCCGE\n",
+       "EGICk1+iQgj3hhD+KITwhyGEnw8hzEMI50MI7w4hfCiE8K4Qgh/TLYQQQgjxRcCkl6gQwnPM7B+Y\n",
+       "2VeWUr7czBoz+w4zu8fM3l1KudvMfu3ashBCCCHEFx1Tv4m6Yma9me2EEFoz2zGzT5rZt5jZz16r\n",
+       "87Nm9m2fdQuFEEIIIW5AJonlpZSLIYS3m9nHzGxlZv+mlPLuEMJtpZQ/M+wumNltfAu1vIcCXGAS\n",
+       "MBGfMwrpLZmBm0hrYV4LfpHJi0RIa/oRMi2R8kKpj3e9IoIhSW3tTtfbWp72296ua2l1WHtpjgSk\n",
+       "u2a2JEGcMWRM9Cbnjpyn+WFd7xaSPL5cHbmykOr1zuz7bc82rA31OR8bSIv+ZKHX0+8Pe8tA1svt\n",
+       "3JU10NcbMqiiRN83UK5lgigjgmSMffN6FDiBLOm8jEg/Zno4Czp34vqIWeQDEXet9YJ/nINYvrfv\n",
+       "6zREGk8wCKDx8nK34xPLA4jluSUzK5DUdEfyfSOQPlXg3A1GZmRoWMI1lI0U2VsY3MIToY8/vkSs\n",
+       "49CQQSpwfCzkf3tArl+3Vy1v/BgA2zv3TFd2dvfmavmj//5XXZ1dMlgAiVSc941vYKBFId91LODz\n",
+       "aSh+/yzgHm+2hAnmZpaJIB5HfN8ykGdQD/L+hnzOHHW+D+9D4vzRzPfhNfnMHBoccEOeU+QZayNn\n",
+       "7GBM/Tnv+Wb2cjN7jpndaWZ7IYT/vmrT1Tkgxg35EkIIIYT4AmNqxMFXmdl/KKU8ZWYWQvgXZvZ1\n",
+       "ZvZYCOH2UspjIYQ7zOxxtvJ7/tXv/fm/7/rSO+wZX3b7xGYIIYQQQpwMU1+iHjKz+0MISzNbm9nL\n",
+       "zOy9ZnZoZt9pZm+59v+/zFb+um/9ymp51OSUQgghhBA3EFOdqN8PIfycmf2uXZ0m/ffM7KfM7JSZ\n",
+       "/WII4bvM7GEz+3a+PngS6NCQ31ZZEBtmkLHfjqlJ0cBv+MR/IBNg20B+P0aYz5UgqDCRn1+bmW97\n",
+       "09S/3TKP4eiwDsRjDkEgv0NjeGmgM6h7MLcvZxas6c/B4rAumyW/vzM4272ZGQScdgNxMg7J8aFf\n",
+       "MTJsE72eSPyOTCQe1wKyHnOpcAb6wgJdyezoeIswr4gRCzpRxO9iG0PHi9Uh3mJpIBCXhW2S+xYd\n",
+       "rFHZnkSOYfd2aWtHKS6I/0gcpQJ9PTbErZiRkMcG9teQdjKxB+jMB10mEmzrLhX1QkhILnwcNOQh\n",
+       "GIlPgpcmZhKWPOKjpmR/kRviA/Y9eGfkedPEHVfWxd16O+ReO7d33pVd+tAfV8sHj33U1bn5y57h\n",
+       "ypCBPGMDC4iFS8qCSlt4vnTkBulZkKdrgN92Js+EllwHZEs+e7ZNvd6K1FkRb3ELQZpbEvZZiLuF\n",
+       "z6WGHF9L0oRnn4UTNTmxvJTyVjN7KxRftKvfSgkhhBBCfFGjxHIhhBBCiAnoJUoIIYQQYgJ6iRJC\n",
+       "CCGEmMBkJ+qzIYD56N7kSEoYC3DzgX8kGJEIm7Gt5bYyI1JZTyRgJv0CTI5MKNxSwZgEcM5qAa6Q\n",
+       "YMQ81G1vOl+nIWJyxhnNR4rJHTh5THpkcnQLoZm2JpIlaydK+cQ9j+RatdAG0n0oBYYiJCIhWvaN\n",
+       "KCAwRjIIgQ98gEEObLZ7ImwbhB6WkWLksIagWXKv5Z4EfsK9VmibfFHG606ucSDnqpnVZYmNxsB1\n",
+       "ohevY8fEeVjGESpmZr0XxDFwMLNBB0Q2bzrYVufvdTq4BSgDkbqDP2Y0ywORghM19WuZlweqHh9e\n",
+       "2JLgxzEDVyIJ8uw3/rw0oW5n23oBHuuYmaVV3XYWCry9dNGV7T/6aLW8d5MPh9y781ZXhrBBKi0N\n",
+       "xMRnM5GjoS8uyOChQp5duOmEI4XMrCWvBSWQ5yDQk/DLDYjl68Zfl03jgzS3VsvmmQxyYJ+H7t2C\n",
+       "dDs2fmlElu910TdRQgghhBAT0EuUEEIIIcQE9BIlhBBCCDEBvUQJIYQQQkwg0HTiz+cOmSEuhBBC\n",
+       "CHGDUnCqlWvomyghhBBCiAnoJUoIIYQQYgJ6iRJCCCGEmMCJhG3+t9/1r6rlnGEme/LLY9f6ULn5\n",
+       "bL9e7q6Q9Y5cWdPV22Kzo7fBB8Z1UPa6N77T1fn+t3ynK8OAQRbSF0jaJU5Azeo0wU3Z7mEBlTAj\n",
+       "fcg+zOzBV/8TV/Zjb39Vtbzu/Q6PNn6HmJm3WvvwtkRDLOvjG0iQJ9a5CqxHggr/6Y/56/eOBx+s\n",
+       "t8JmWSfBgU0Ht1JLguCwjpmlWNfLJHBwIDfEBsJSBxKQ+frv/z5X9vKXf3+13G/Wfn/Zh/s1MNP6\n",
+       "YmfX1dnZ3XNlS6i3s/DBeom0PUG78uDvx3tfc3+1/H3/6D5Xh90PfvJ3EppJUl0LXHemSLCu2DQY\n",
+       "/Oq33ZIV3/b2H6mW77v/J3ybkg+jjLh9MpM9m90+QlBoSv55enjlk67s4pOPVMur1WVXZ0au+z/7\n",
+       "v95bLT/wpn/g6uTG30cJw0Qx0NXMSiLBxKXuw6n3oZn9xj8HB3w2wnbMzNLg2/C//dgrquW3v+EH\n",
+       "fDtJIGYL129JEiO7FvsiqeM7ukE2syXyvEkkgBcDov/n+3/UVbnvvntc2WJWn6vNwaGr8+TK39t3\n",
+       "f+kLquXDjz7i6lzp/bNrdv6majniAZuZsRBS+JB80wNv8etdB30TJYQQQggxAb1ECSGEEEJMQC9R\n",
+       "QgghhBAT0EuUEEIIIcQETkQsTyDqDWlZLWc6e7jfTpdBUqOzZHvxcgazvTdMLI9E+Bsxk3VDZtNO\n",
+       "IAY2dCZ78j4L1WLxdQIIcSzKNBA502AG86aM6woof2+3/pxkJibC8TWkTU3DZg+HWblbf+7oBNyh\n",
+       "3l8gAwUY21Xdp6J5MTGQ61dy3fZAxNZAhNQC52Eg/QBFWjOz5MRyIlAS1oe1LLxdezmzJxI+SuNh\n",
+       "sXR12uiPedbVx9x2XuZtgj/mHuR2NugACUTOZtm+zrdl8jnpZylhn/LtjkTmxXs0kmscR2QQFyLg\n",
+       "l+SvOz4/6eAIMvABmx6i76/LxY4r29up+0YX/DN3vvTrIeTxRsfJuDueHR8+PM0swGAB9jnTk+eg\n",
+       "E8szeVbSZ17Nwdq3KZJ7Zg6bYu1cGPZFv78++/VSggE3ZEX2KAnkfCIDuUe38CzJre9Ttz/rVld2\n",
+       "9PhT1fKnHvUDGm7/6r/qyjKI5P3hU65OIQN8Qjv9+yR9EyWEEEIIMQG9RAkhhBBCTEAvUUIIIYQQ\n",
+       "E9BLlBBCCCHEBE5ELMek3wTp2ZlIbLF4+buAdkgl8saLs/NwAOt56TgSWZFJxg4i86FsmpkkS+Th\n",
+       "WEA6ZMI9booFbLNtgxzJZXBPP9Q7IF6rlUzkbzgvLZPIyfENKMkyeZiccywpI/9eGNa1WN6ywQpE\n",
+       "OraCCfBE5ifSakGxlMjKmRi3Ge3P471rMzPbrusE/6PDA1eHXAbrZiDOkx02RM7EpOpu7sVyljiP\n",
+       "wjQKo4xC+kFLBgGQNUfUIcfHJ3X360WQgMm56+Lx/ZP1c5SlzcwySPkl+Dr9iGcQ2Z3F4JPHT+3d\n",
+       "XC2f3j3t6rhEf0IkKepMGm/hemVyHdilGUYMDMjmxec+18dcBiImHz/myEL0aeiBrli3i81YgM/F\n",
+       "hqSaswEMW7jXevKcGsYb/hWb3n+O3vWM51XLD/3BB1ydM2SwQjmsn1Nnn3OXq9Ps+L546QMfqpZP\n",
+       "n/IDYLbk0cwE/7HomyghhBBCiAnoJUoIIYQQYgJ6iRJCCCGEmMCJOFENOEj4WzELrGta7zt17QaW\n",
+       "j1ydWbvvyhZNXS+SYM1AfgRmLoOrQ9yiCL9ps9DMSDywkrAOma3c5c6R2cvJLOex1OeczXrOSAmc\n",
+       "KCJO9MRfKXDMGQ/uOuDmUyIOAXEbEmx/IKGgdH+bVb1MfJKh98fc9HB8JHQ1MC8EyoqRkE4y03oD\n",
+       "fYp5aIwC/YyFnrJcvRZCM5njwrwC9FUSdXhIGV73Ec4Xm8m+sPsB9kcDMkmfQmePuWPM2cP9dWR/\n",
+       "Y/6abcl6ibShgQvIjoW6hW5/xF8zfx/NoFrbMI9whPNF/CByyL6dxBnK5PldQt0XWPxuIs/Boa+f\n",
+       "lXnwblMkfQ9hj6AmkP45ok6BG2LW+TrsntnA59N2IP2c9MYxnw6333azK3v8I49Uy09f8Q7m1/5X\n",
+       "L3Zl7/uVf1Mtd3fe4urkw0NX1ub6JMc94lv1/l1ijHN5PfRNlBBCCCHEBPQSJYQQQggxAb1ECSGE\n",
+       "EEJMQC9RQgghhBATOBGxHIVwDNsrJGBt3nkhbTGrZ6RfNFdcnWXj5bNZU8vDRkLeWLZYJIIm0hQf\n",
+       "1la2uH0msjJ5+PhGhYQSqZdBiQtqBcI2I2k3A2cUR1HZzCyQ4ECU6TPz+Iipi5Is86fZ7OH498GY\n",
+       "WcjNzDIExvGgQn9CC4qzRCyPPZPN6/OeWehi60VWPH9l5K28hIC62cwro0w2X+6cqpvUkH5Orl+/\n",
+       "hXudCJzD2gfipm0tf243PmzX7Z+UYX814zPeu22R9ZwUT54HNNsTn2+k8wdmUAORtIkeH4rlVJz3\n",
+       "23f3CKnTNLukYS3UYYHDI9IaSR12PgPck4Gcg8gEf+ghzAXPJHwyZxDLM3tWHj9whQYak+NLMECj\n",
+       "IYNwBmj8iljyhQ24gQcok8hZeGmiz9iaSAbcfPijtVj+X/zdv+PqPP7Hf+rKHvnjP6mWv/lv/nVX\n",
+       "56O/9duubLasn2+p89cqHRAhndQbi76JEkIIIYSYgF6ihBBCCCEmoJcoIYQQQogJ6CVKCCGEEGIC\n",
+       "JyKWtx2I3Q2kjEYv0qFEbma2012BZZJYjhK5mXVNLakGMgN2IhmtLFkZiSwB1vl2JLGYJCu7WdVJ\n",
+       "BG0s9SUMpN0NkSUNxHIr47pChihpdk6osImyIvFM2fEhgb33s/2RNceQh3rNgQw6YCLrYHU9lkBP\n",
+       "xyWker1ApM7AZrLHjbH4bMLe6TPVckMEeCZZtl09YCEysZz0hc1BPSBkQ65xv/bSOCbTY79jsMRy\n",
+       "JsnibASRdEa2nj88cj+SCPEWTlVHBeoRYjkpY33RjUcZyA1CjO2AK5J2RrJeDJi6T64Dm2kAYNeP\n",
+       "RWVHlLhJ12BtwANk57OQ2R1Sqo8v47NzJHTGC5Lyj7fIwIT0ppbd2cwRbBYDHDDRsH5AZf7j++cn\n",
+       "H/2EK3vxN3xdtby9dNnVee+vvsuVff1/8y3V8ubIf7Z/4sMfcWUv+aZvrNv02BOuzozNKsJmbhiJ\n",
+       "vokSQgghhJiAXqKEEEIIISaglyghhBBCiAmciBPVtHW4XgvzaTfRJ4fNO+9EYQBn2/jfTWPrZ2yO\n",
+       "EcpIoKLzA8wsNMefrtiTYETM6GPqD/ktPoAn1TG/w/2+69uYqe9Ur8eC2Rgd/HZM8iPNyKzjmJJJ\n",
+       "9QfiB7gwQaYVkLYX9MnGOlGwXmSOGekHLtCQhegRR6HB60fcAzpBPNRrWEgnYe/s6Wp5ufCznLcz\n",
+       "0ofhgLZrf19tjrx/uNoHJ2rl6/Qbf78H6ENj7j3mZGVyPvFM0duROCYYOBjI/mZz385Ti7psOSP3\n",
+       "Oo0KrcmJJdQSLwuuFQsOJYKXf+aRkMeS/XUvARwl9oAb8UkTaeAoc33qZdb3SdNd4K/hspkV6sLW\n",
+       "ZYmGbR4P68O0n0EPTSw0c6jbxJ9vxL3DTZGuEYv31zr2gQicvfUWVxYgBfRD73u/q/OSl369K1uc\n",
+       "q93N9/4//9bV+Sv/2UtcWYIDPHrqoqtz7ll3ubLDlQ/gHIu+iRJCCCGEmIBeooQQQgghJqCXKCGE\n",
+       "EEKICeglSgghhBBiAicilncgljcgJjKxfDE7cGVdrEXytvGhfSGSkDcI82RSJ84UbsbFVUc/d0Wt\n",
+       "1aJu2RKxNHmZN0DIGwZrmhnxSlm4IJmVG2dCb0ccm3mBOhKpMxOBGk9xIOuxmdcbkGJzT2RXFrZ3\n",
+       "vOdN2WIwIQmjDCSYDWVeds5pHiaeK3IOAhNg8byMDNtcLGuRfLl7ytfZ8bI5BqEemr8fMVjTzGx1\n",
+       "uF8tX7n4tF9vtXZlXVffD3PSTgcx8NmgA8wXZKeOOc64KeKH2017Xjq+86a67TsL3382W//Mc20i\n",
+       "+2NXvYBVzQIy2Zp4/9FzwMrgBLJnZzPmDiRBpYU+zyDck4rzZH8wiCOR0MVA+kuGE5ESu0ePP77t\n",
+       "QJ4JpB6K5D2R3XtoOx17QspaOFctCZpuycCgng5qgP2R5+JjDz9cLT/vxXe7Oom09P3/4T3V8nO/\n",
+       "9Hmuzt6tN7myh/6gFtfveMadrk5v4wKUx6JvooQQQgghJqCXKCGEEEKICeglSgghhBBiAnqJEkII\n",
+       "IYSYwImI5QtIFo+hTsHtohfE59FLq7OmljEjEcbYDO1IIbbkwFYbMZO1EUG8pEVd0C9cnSbt+fUg\n",
+       "/Tya37YzCtlM9uZThoNLbT9ebDUjqcljrG4zd+7YmWSJxbj5thsj13sZm8qnhC12IXZ4pJ0oY9JU\n",
+       "enp8mKLMhFhfhDJmGtHPzczarhafZws/EKKbeTl62NYDNJh0zAZebNd1QvkRiOZmZv3a3+/DrL5H\n",
+       "mrm/Z5CGpCo3TKA+dkv8WHCQw6kdfz/eetZL+c+8/dyx613a9zMyIIE938gxZ+gwTJZmj7IGBHRW\n",
+       "hw3+wHurIyeYjM/wbWIfR0QQHyDlO9JAdt+vt5A0njKb3YEl3NfnsyWDlRpybZA+kJkOyHOpz595\n",
+       "2cwsY6o52T27ftg3mLhPn85sFgpgfehnI7jpzjuq5YEc7yf/9BFXdiekis92/PV85MN/6vd3883V\n",
+       "ciSfF0eX/b3WtdNfhfRNlBBCCCHEBPQSJYQQQggxAb1ECSGEEEJM4EScqLatHYjG6uUZCc3sGu/s\n",
+       "NFb/Ns3cJh4hCYdNPCI2mzf//Rjq9GS9vn5XjdSb2vUbG2q/IpOwzQbayWZZt0BCSK0OOMwjwv6u\n",
+       "rojhkCyodMyGSLAe+b08g//A1abjPY3rrOjAEDvmk7DZ3zFglP7GzjblQkFJCCnpi5i8ODYrrkBo\n",
+       "Zr8h1514IakfPuOymVnJxBWBw+laf3zDmEDTEZ2qIaIWy5kMsK1C7n/mUrXQdur+MNcI6rWkUjPi\n",
+       "+Mips03x59y5haQOCxjGIuZbsecLPoeZ5xPJPeO2TZ5vmQRbYv9k4ZeFOFFpqJ0oFl7ckE+MWaif\n",
+       "lYX4o2O+jRjMu4aFnWN4nhX2DMJnAhHDAilroKWBhG0Gcg7KCOeybf1zCre0/6QP2z17/pwri4v6\n",
+       "M/JTH/+Eq7N7yn9mLqBs/+nLrs6889chjpH2roO+iRJCCCGEmIBeooQQQgghJqCXKCGEEEKICegl\n",
+       "SgghhBBiAicjlsda1JtBuGYTiLwYiBwZ64SxwCQ9JitikB6ROpngO2qm5+KFxhiWUOLrMGk8BhDg\n",
+       "ihf33AzYJBSNSfIxQtmIWcjNzBoQpiNJeSOTsVtCeZeG6PltoVieiUQ+kB1iaF6/JcI9YQ3NZCGW\n",
+       "iSSxtgHr+G03WMnMGeE0hJSF7UETWIgdY7uCQQbkOvStP1cYsro+OvTbJpJ609aC6M6p065OgPBd\n",
+       "M7MWwja7BQmaBSIR2yOT+eF8sgBJlMjNzLoGpVxfZ0XOwZMXa7n16NDf6+vN2pW5/c/9fTwk34YG\n",
+       "2kWfZSQN1h0zk9ZpYCQ8h8l63fFZjRZJXyzsmZdxYIBvUxrIczBhSCdpp/lBOP70kXPABn8ARwP5\n",
+       "uGWDmuBeHljYrhs44+uwQNwGrvuMDHJgA7ToDoBAHnoFHlRzEpo5kPv26MqlavkUkcjbuRfEDy7X\n",
+       "Yb6zzj83Ahn002/JdR+JvokSQgghhJiAXqKEEEIIISaglyghhBBCiAnoJUoIIYQQYgJh7Oz2n7Md\n",
+       "MvtbCCGEEOIGpRQ++krfRAkhhBBCTEAvUUIIIYQQE9BLlBBCCCHEBE4kbPPVP3R/tRy3dUBdIAFk\n",
+       "Nvc/R3Z31MF9w8wHnh2ufJDfgPsjaWYxkTA6CLJ88HVvdXXue8O9rgyy6CyT085m+DaY+ZydFpzl\n",
+       "vCEzfjfZBwDOoGw+rFyd+9/+Dlf2vT/x8rpNJISNzUxuUEarsLC9jMdDZisnM8TjlmLwQZ7v/L5/\n",
+       "7Mp++JV139zO/HrbOUnSXEAoaPR1WNZmhEsTN2Rm+Q0JjFvX9ULvN/7af/w6V/Z9D76xXg87p/kg\n",
+       "TzNzCaqBVGLHh/O4u6BbM+tIaGWBAD6SuWhvv/+Bavnlr/1BVyeQ+FIsYXWoKwrtbEiaYWQJhxBU\n",
+       "mgZ/PwbzZW97w09Uy6965WtGtTNDG1jM7ED+fs5wj8wbv+bZhb9+C+jr28H31ycPfcjiO97y+mr5\n",
+       "NT/kr19m4Z5t3RlI9i1dL6HSQkIlC4YQszKyXm78eu+8/x9Vy/fdex9pkyuygs998lnUww1xaeXb\n",
+       "tOoXrgz77Om5v/9Pk+fboq3r/fADb3B1HnjFD7iy9qBuw+Kyb9Ns35cVCJHt574v9qf9PbPZq+tt\n",
+       "F+RZPSOfIRDw/ZoH/Wff9dA3UUIIIYQQE9BLlBBCCCHEBPQSJYQQQggxAb1ECSGEEEJM4ETEcgNB\n",
+       "GzOsyspLZKX3cl3Yr2XFfMYLalT0zMdLzuaEZhv3ykkkwABlhcjfTJj20jaRZLHx5GAiOb4GhN/G\n",
+       "vGDIKIbHQiRLds4L7o8cCxHn8RREsp4xCRgldTozuWfb1edhvUPkxR0/4/dmD87fgsiLTMZe1/fC\n",
+       "4ogMaDgg1xS21RUyMIGAM62H7K97S+TvCH2WDSjAfm5mFgIOfGDiNVkPu3V7/M0XiBQcyU3rbhEm\n",
+       "95pvU9OAzE/6Hc5ab+ZF4dj4axVH9E+aU8xubtiUE6rNLJHjC3ADzohYvkPK8KxvkxfL12vfTCQS\n",
+       "Bb4hFydv6411DRmo07CLCn2YnBc6qAJOVWiIkE5WQ2Zk5EVPrPimhc+nlgz+wP2Tvp8P/TUecl1v\n",
+       "Q565K2K7B9bPsA3kGVTSslpOh6dcnf7SaVdWtvW28q7vG7m74tebHdQFC79eKl42J2NbRqNvooQQ\n",
+       "QgghJqCXKCGEEEKICeglSgghhBBiAnqJEkIIIYSYwImI5QHE4wDpq3njxa906M3EuFdLa3F36eq0\n",
+       "TDpEi2wg4h4TREfYZ4W8lzpxnkiAQyBSHsi8VD0dUNz1VZrgRegOjq9hceiEAAplIEngiUiyeFaY\n",
+       "RM7cxQjiZSDCP/PKUXhnrjujtLWIOCy9WL4968uOzkBq8w6T3X3RbFUfT75M0oLJel1f96E2jbuV\n",
+       "A/TFlgxyYIMx8H5A0fxaIdkhLFJzl1xTuH6jeidJqeaiN6xGtOAY2fmEpOzMBFX2jKhbH1haOL+7\n",
+       "Kwq7LmRASMLnKdsWkdRnoe7XZ8mgipt3/NY20IcvDr6d6/74/hkjGdBQ/LOrhYdHoqn7RKYPKLyT\n",
+       "a0X6kEFfoCnjIz4byHgN68gDu23qisuOyOdwread338iDb28rs8BS67vyWsBS71HshtiQNLXqXzu\n",
+       "ByKUXLchkwEwuG0zM3ws0Rk1XMl1ZtkYib6JEkIIIYSYgF6ihBBCCCEmoJcoIYQQQogJnIgTVWDG\n",
+       "a5yV24jHlA6Jf7Bfe1LxrA/yijv+N9gWwu4ykXEK+X0+M/kG12MzwkPREEgwGvktPqMTRX5Ub8HL\n",
+       "6Ei7u37l1xvqsqYcujoUPBjiHrCcO6wVSbYnyfGzGYRRxuTPHYsJLXhN25G/eXd1I/rOeyH9Kb/H\n",
+       "w7P1ddhf+v4aMADUzPZm9TWeZe8HtEe+DbM59OvNuLBUn9Lnq5QRIZLUISDrOUeI9Be6P2go8zvc\n",
+       "vjp/7nLvOxV6YUzTYp4UHjILzWSpfQXC/Ybs2xTJM8Fth5zzwJ436C2S8zsjN+BeV/tHtyx8vzvV\n",
+       "+f2tDuq2r9f+Wbbpjz8+IyGIDXEug6vn9xeIY4r9MxGHh3s9sH0WMEz8HCQR75R9XnRwnWeNPwen\n",
+       "5nUfOrNLPFTmmF2pt70aiKNE7tGePLsQosJZAL8rkj5le/uuqMl1u8rSO9FlceTL5uDQkQ8V5hYW\n",
+       "9qE1En0TJYQQQggxgc/4EhVC+OkQwoUQwh9+Wtn5EMK7QwgfCiG8K4Rw9tP+270hhA+HEB4KIXzT\n",
+       "57PhQgghhBAnyXHfRP2MmX0zlN1jZu8updxtZr92bdlCCC8ys79rZi+6ts5PhkAniRNCCCGE+ILn\n",
+       "M77klFJ+08yehuJvMbOfvfbvnzWzb7v27281s39eSulLKQ+b2Z+Y2dd87poqhBBCCHHjMEUsv62U\n",
+       "cuHavy+Y2W3X/n2nmf3Wp9X7uJk9g26hAWF6VjejmS/cKkzixFDOvPLSWpz5bQUncROjmUhyY2Za\n",
+       "Z6F5TvQkX9AxLRjl1kgM2LbUbZ/3PpxuufXi3iLVoh4KgNejATmzDCx0kQiwqQC9tQwAABowSURB\n",
+       "VD4vs63vevMDL3XODmpZOG79+WVO7nZWH0/eGRMXZ5ZRMPTdx7at39YaBPR1R6ROJpEOEODYEPFy\n",
+       "Rg4QhNvA0ksZqa7HpG4mIruZ7NmfX+T2wC7LhO1ChFvMox3zlXbPwlrJmsHdx8yIZYNNIDSXnAQa\n",
+       "/OrCdcm2R1w/JibTHcL915L7cdn658RNIP2eJSmveeuP+fJRfY9eXnnBf5OI6A2wkNdAQhbbUD/3\n",
+       "Wf9pyBMVz0Ji4axxTlpWXz8c9GDGA1SRRIKCB9ZfYPPE5bdT0MxzC3+8M5ZeDDfkk37MkW3I4B32\n",
+       "nECGjgymgbDicJYMYEIZ3MwyiOzDzD9zh13/rByWEJY88+0ml8EPRPoL8Fn93FauntnPtPfpLRNC\n",
+       "CCGEuIGZ8k3UhRDC7aWUx0IId5jZ49fKP2Fmz/y0enddK3P8xq//2z//97Of8zx7/h3PmtAMIYQQ\n",
+       "QoiTY8pL1K+Y2Xea2Vuu/f8vf1r5z4cQ3mFXf8b7EjN7L9vAS//Ll9UFZK48IYQQQogbmc/4EhVC\n",
+       "+Odm9lIzuzmE8KiZvcbMHjSzXwwhfJeZPWxm325mVkr5QAjhF83sA2Y2mNn/Usb8kCqEEEII8QXI\n",
+       "Z3yJKqX8vev8p5exwlLKm83szcftFGdkjpBYHpdeTOxO7fntGCQBr3yqabv0h9jAjNdMEE1EHsSZ\n",
+       "5RkNkRUTSI55lOzq04gjESg7EFJng09xnW0vubIWZPoS/TlnYNAxfU0evJjY9ZDMfehF0/nT3uKe\n",
+       "P7VTb2dDpEeSNhv2IHn87AFpqCdm2H4i14WkEzdQL26YMerlzwbqBXLuCjMhoQ+N/Xtlu67PS0v6\n",
+       "VMvES9h+ILPPh5bcH9AslvpPfGIfjD8i8JrZ7pkIxk4rZ0I8SyzHGQRow0kRpq2THfbp+MR5GmrO\n",
+       "ZgzAZZJOvtv6b//PzOu+0BDh/um1F6+f2K/v20sbfx+XePyPHonMyNCReybCc78h96NPNSezJjjh\n",
+       "/zrJ3JBCzwZesAB/JJNrlViCPzxLrqzYc78+vhm5HzsiS58FAX0zkJlASL/ux4xbmflr1cf6eZPJ\n",
+       "gJvsJxqxgn2BdJ+BbAvHLyTyLMvkYo0ZNHY9lOMkhBBCCDEBvUQJIYQQQkxAL1FCCCGEEBOYMjrv\n",
+       "sybD79wZwjdtRvwc4kkF+P3auQdmVjYsZBEOm87ifryjxMBj+7M1j4NmF4KvEonf0cBv/23xwWVN\n",
+       "8L97Y0DdmLA/Mx+aacRHcF6RmTWber3ugIRtPr10ZTuPnam3ve/r5Dlp+811wGjo/HlhRJhtPh4S\n",
+       "/+mKX2/Z1ccTSUgnBpWame2C47VY+f21XvWzsoW+wX78J2zWdUBdZu4feSoE8Edi469xO+ZvMjaD\n",
+       "OhWJXAuO3TRzlAJpJws9deuRfo2rsZBHkofrfTUakHm89MUCVZmfU8DhaaJfb4bPXDMrcN9ePvJ1\n",
+       "PnXZd+wnV/U9uUq+Ay3I/YDEhoRfEh+wuJNMnrnJBzE2DTi05GI1pA/j/gJ7xjOXCmCuUSH9rIfN\n",
+       "D4P/7MuH9ba2yX/OnZqTwFHw+pg3Nae34/HSF7nsLsQyd/66oNts5h3hRJwl995g/rHBWl1YIDYL\n",
+       "sh2JvokSQgghhJiAXqKEEEIIISaglyghhBBCiAnoJUoIIYQQYgInI5bnWiTzIWQkzKwjgij6mg0J\n",
+       "30JLj22LieUjZtdmFCIdojyIwvjVMtJOCBwL3ICvYBPoDDNvdcYE16A5fpb1qxurz10g4mcciIwN\n",
+       "YZvNlsjnR16gbPbrsM146bRvU0uE+zmESp4b9/dCswLRu/PhgoWI5THUwuQOCXmNZEb6xbZu1/zQ\n",
+       "768j56Xb1mXNMO76oSAaIgkqJKcqQgInDagkYYkoe7NQ2RHdmgYxunU6f87zQNqEy6RNVKSFevwZ\n",
+       "wdoJ67UkAJjJ0ViHBU/SgFEMzfTkwR/zYa77UJ9IsOahH9hxBH1vRsI9lyQY0bWJBE+yYOIEqaOB\n",
+       "yNGBDJhAoT8G386UvaCNgypYkGdiAxGAhrTTSMgqHt9ABo0cZHwmMMmaBeniZwobQMHCRI8X5xMJ\n",
+       "28XrMLBzTtoZITw0kbRdOigGP1vJ5xM7ks8ia1PfRAkhhBBCTEEvUUIIIYQQE9BLlBBCCCHEBE7E\n",
+       "iYro+mAYJPmdv5DfW8uAv92S3z9JoFpIGGLpIbmdY/L+rvN7a72xhv1eTyc8hd/wSbBegj0m4vCw\n",
+       "36FzW28rjZgg1MwsQBJjYSFz5PdrDEY1MlEz640Zkt9KNzJ0DSeLHfn3QgMTrM7JbLwsdLUBVyx3\n",
+       "zF8hE19u6r7Qbfz1a9beQ2k24ESQiYsZOzv1ejMSAMhyH/EssHDWhvhVfmJkEg5JHEEXTEh8R4R5\n",
+       "U4V4aBHvP+aFMQcEyugjgrlicN0zkytHzGDLJobu3TPQ3AUckt/fmihYQ1P3vcON73f7G+/eoZO0\n",
+       "23mv6Nzi+OtXiL2VyUMhGbSBTr7t24ldKpCAyjb69dC9wefy1SYwGxX2T647BqOambVwPGz+cXTF\n",
+       "Nsyb2vp2LsAHpqeOFY6ZYJmcF8xmZo99NtE1TgDOvCkW9OyeVDTglKw2bv52ir6JEkIIIYSYgF6i\n",
+       "hBBCCCEmoJcoIYQQQogJ6CVKCCGEEGICJyOWg0yHIX2BWN2RGZsw63cggXVUIsNZuUkVNjs6K3N1\n",
+       "qHwOx8dE2uJnt8YQQDaLuwvbZFZw48MaExh+mcx2T0FBfEQIm5mXI1PrRcx+d+XKmrN1smVo/HqF\n",
+       "9OJ07rDe/3IzopVmcVOfh5aJ0GhLmpltayG1RN/OriFhdNDXWQhpXJPgTgjpjL77cFDMJ1I+C9tE\n",
+       "QZzNhD6MSKOlAz2I/IliKW0UbofJp+SewXosbBMDgc182G5gA2CobQ7PG9KmRII0XZtYoCIT2aGQ\n",
+       "SfIDEag3EIi73/v9ZXKtFvP6eHZnvjPuzI4/vkIGpCT2EeXSUv1Jz4EI8HCdW3LO++Tb3uAzlQzU\n",
+       "GfN9BOsbMZJAzAbr+G1voAkDOXdYx8ysaY6X5NkTnQaFAjxvFCR5snV2/7vMzBH3lZkf9MM+s0sg\n",
+       "AxhGDOy4HvomSgghhBBiAnqJEkIIIYSYgF6ihBBCCCEmoJcoIYQQQogJnIhYbiCSRxT3iB/KUn6d\n",
+       "NEbX83ZdADE4sjR0IlqnEeJs6ogYDCJpYXIt8boLzPA9kERmTIlORKhkHnSB9+c8MrEc5c9IjoUl\n",
+       "UDu3dU5S1E97mXeItRDenfPtpCnRu3Uacdk78pUI7eb4vthl34ZtX7e9kGjehvRFFLQjkXmb5K+p\n",
+       "QUIx8dg5c7RWiVTNhE03GMO3k8nRES88TQI/vr9kNqgCII68DexY4J6hci0TUrGAybYszRo6Eb8/\n",
+       "jj8+J9ubWaFCLKTnkyTwnmyrh9EfAxGaZ36MirWxvu7Ljp3zEWIykY7Z8wwvDRtQwNLP3cAAsl4M\n",
+       "PsWcC9OwHut8rhIr9OcFJe5CEucb3Bgd38MGTNTbJh9XNiPtpAO7cG/N8ZI6HedFymJz/OAI/Ay7\n",
+       "WoYXmQyOICn/Y8dVMfRNlBBCCCHEBPQSJYQQQggxAb1ECSGEEEJMIPhZ1j/PO2QCghBCCCHEDUop\n",
+       "PFla30QJIYQQQkxAL1FCCCGEEBPQS5QQQgghxAT0EiWEEEIIMYETCdv87/7hg9XyTlOHte3O6oBF\n",
+       "M7MzO2tXttip12NhdJnMbo3paWx29D6xELTaK3v96x5wdb77jW90ZZiDmHGabjNLJIwuQcBZIcFh\n",
+       "PhSQBQe6IptBkCebKfx/f+V9ruy+V76yXo+ktSWyrb6vr+nte2dcnY/96Udc2S0venG1PDyx7+qs\n",
+       "9w9cWXPzTrVcsg/Re+AND7qyV99XHzOb8Tuxvz3gWuFs4lcL/e0WIRyuST41E0NXr1asF3viPD74\n",
+       "wBtc2f/01vr6YfCkGZ+kHiZ/t8gC60g/6zD7jmwbg0PNzN2jkaz4wJvr++/+V77K1WGhmREOJkUf\n",
+       "95ew4aQsLX2fGpb+2VVmEJ5IrlUm3eXHv+unfKEQ4oZC30QJIYQQQkxAL1FCCCGEEBPQS5QQQggh\n",
+       "xAT0EiWEEEIIMYETEcv3j+rlAhN17869ZbkgU0vPQAglrquticiKk2Kz2cOHRGaNzsfPtD4bvGwa\n",
+       "QUhPA9lOJGZpW1+enrzzopAaSRvbQtqU6rKGma2E0tZtWCx3XJ2Pfezjruy5L3hetbz/6EVXZ9P6\n",
+       "GdvP33Z7tfzQb33Q1Tn13FtdWbO3qJb7i35gAqOA4J+z70AFp4M3P4t7JlO/N8VL4xG3lbakUb4N\n",
+       "GeTkMvJWxpnPA5ntPhPRGgV4mt1LZ1qvaZhwzzYG/bEh7UTC4M9B6NnAkrpVkVTJC3+tQqwfXCGQ\n",
+       "+5hsC8X1ktkAEWbcCyFudPRNlBBCCCHEBPQSJYQQQggxAb1ECSGEEEJM4EScqNWmdhJ2unp5MfNy\n",
+       "09zrMtZa7REk4vUU4qYUEBcyCeSjYYkj3jkb4iT58EkSAEhkij5DO0lIJzazkGRN5mBEEEECcX8Y\n",
+       "HVyIp5/wbtPe+VOu7OxOXfaeh37b1XnRt36jKzt89PFq+dKFx1yd53/LX3NlD3/wQ9XyDrnGnOPr\n",
+       "xeD7Rsj1eg3pKi3KeGbWpVW1nAcS1kh1tfr6xW7k30NwHgrxejI5vgLrkQxSM6b6QR+OJMQ2sLDN\n",
+       "hE7U8bRr0s/3vbMX1vNquXSkTWcOfRmEACeWYtsQ7w3KUiQPM36RhRA3OPomSgghhBBiAnqJEkII\n",
+       "IYSYgF6ihBBCCCEmoJcoIYQQQogJnIhYPq9zEG0HlmdE9CzEWi0BAg6JDE48Vhea2RALmOQpmrO4\n",
+       "CR0RywsGd1LJ2YvdLei0hYjJKJJHsu2WZQJCWcdOFOEIklKbxczVueXOW1zZR37v/dXy7p03uTq3\n",
+       "PuMuV/brv/hz1fLz/vMvd3X66Nu+/7FaQD//wue6OozsZGFyzVkRBjgO/no22wO/3tGlug4R/BMJ\n",
+       "NC1xWS3nOO5Wxp6O/cDMLLEwUTgtTBCfkb43gwTcbvBSddwSsRtEazy/jNmKnIOLe66oXD5XL89Z\n",
+       "QC2R63fq69ec8W3KJFDVQ8JTR6wlhLjx0DdRQgghhBAT0EuUEEIIIcQE9BIlhBBCCDEBvUQJIYQQ\n",
+       "QkzgRMTyU3u1WLm7Uy+3wcuZmJhsZtaD/DkQ9zSzFHMnbDNplbxfEmncrUWk3AbE8kjaFLNvQ9P0\n",
+       "sB0iljewbSaWJyKtD/U5jiPV1nZWjwJY7sxdnace+aRfEdr5rC9/oavyJ+99nyub79Qi8jP+6gtc\n",
+       "nff8xm+5sttvu71aLs2YzGuzgGndbCwBSfSOIIS3uXd12o0Xy9Ph5Xq59aK+7ez69aBebr18zsB+\n",
+       "FlnKOEltb6GsI+stBn9eZnArd2sikW/8vYZ+f3Cp/564JdtZL13ZcFDL5pm1accnlrfbui8Ove9T\n",
+       "qffifIJU+ECeLc3YQH0hxA2FvokSQgghhJiAXqKEEEIIISaglyghhBBCiAmciBN1Zln7IzsYdkdm\n",
+       "R++JRxRK/Q5I1B8ampnAGyJZm5aI/zRCy3BtMnM6EG1US8IZcVtD8Z5NcrsjEs+IsM1IPB9G09b1\n",
+       "ji5d8XXIemfvurNafvzRx12dbr1xZc99yZdUyx97+OOuzhzDTM3s/B114OdjT1wgrfIEuDaF/J1R\n",
+       "CuloGKCYt65GZoGxc3CZdk+5OsPeGb+3pnbTcufdNAa6TMx/Yn0I15sTAbFd+y3NwHeaE28pk5DM\n",
+       "AEG61h4fYhnIc4N0DZJ1O+4cuI1tfbvjxjttGKCaI7vXFLcpxBci+iZKCCGEEGICeokSQgghhJiA\n",
+       "XqKEEEIIISaglyghhBBCiAmciFg+B0t1DpZzIhJ5Tl5XjhBiR4M1yWzsKHoHIpE6GdzMAhVQoU3E\n",
+       "D8U2dOYl2Tb7sriB46PBj/XiEPx5GqK/zBlWDFR29RQI6UQR28xs9/w5V/bkE09Xy13n23TLbbe4\n",
+       "sk9+/BPV8s7Ora7O2Zv9/vYvXaqWW3IOGAGsY+rbk2MOKAaTcM+MErmZpcXpus7OnqszzHxgZIbr\n",
+       "nEhYKyNCsCwLZ+3IphZweDRY049ysG4DYbBrf1663kvxGdpVRhxfmvnrknaJ7T7Uoael9YMAbHnk\n",
+       "24SHN5Bg1A0ZIALXigXbllZ/zwrxhYjuXCGEEEKICeglSgghhBBiAnqJEkIIIYSYgF6ihBBCCCEm\n",
+       "cCJiuU8IB7GcyK7FWZ1mHUSIZ5JYXIgZjFtiad2ZRZ2PcHe73oulMxDgG5Jm3SWfRj5H2Zy1E8oS\n",
+       "iVXfRD+zfN/Wl34gAj4Da3Vzv+2jlU8eR3n/9FkvUD9xwaeKL8/VCd4Rk6zN7GjfS8AL2H6M48Tr\n",
+       "grH35M+MTM4VitCJtLN0C1c2REgeb7x8ngMRmCEFOyR/zhkdXIdIwteZWI4J9w1ZL/gubAEGhIQN\n",
+       "OS/E68bE8simFQDy0m+onD5wZWle31ex8w3PSy+kpxkkj5P7sRTySB3qk8efNywFXwhxo6NvooQQ\n",
+       "QgghJqCXKCGEEEKICeglSgghhBBiAifjRGFIJjgYzH9igZgYPlmISxUzCbYDvyLTGdT9tlgbkHki\n",
+       "+4M2dCzgcCBl0K7ANC0IdRzIsYTogzwDhi62I7sC+EDU5CCpoLOudqfWq5Wr08y8XxUXddnqindc\n",
+       "ljs+rBHFu3RExBtCgFDOQtwmY2UNrEf6Sgks9BTKGu8/scDPCA5NGenUBLi30HUyM4uk7RFcsdL7\n",
+       "c5ASCa2F/jKw/bHbDzI5wwhnL839ORjO+H4WUu07pcb3jey7onOi0OU0M4ssERfOS2b9Z6STKIS4\n",
+       "sdA3UUIIIYQQE9BLlBBCCCHEBPQSJYQQQggxAb1ECSGEEEJM4ETEcidto1nOZHASIulmdichgYGs\n",
+       "h+53Q4IYifs96o2ThWZiM1mYIfFYrUFRl4X0gSDekVBCd57Mn+IwjA37wzYQCZmEnvZwXpbdrquT\n",
+       "Nv7c9ataAp4vfGBlIettt7VM344U59ELzlQQJ+cYqrFQ0MS2Bcth8McSiDReIIiVucoUN4jDV6HD\n",
+       "LCBEkvWWEhtXtsE+OyMDNkgjcKBDGXF8idTpF/58RrgfGmLXJ1KWoQuxgSYl+XOAjQ9k24E884QQ\n",
+       "Nz76JkoIIYQQYgJ6iRJCCCGEmIBeooQQQgghJqCXKCGEEEKICZyMWI5uKQipJXphk+X5BpBWA1Fi\n",
+       "M5GqG7BwWzYbO5HNx6Qmz7cb34ZQH1+bfIJ4m31ZA3Z7IOZ8wXNA2sgk5whqMDteRoA2sWTuIXnt\n",
+       "uF3WSdxp8CnR/eDP3fz0qXq9nijNgz93cV6nmBdmUDNAaM5E+S/sbw/oswNGbptZIXY0iuuRXOOG\n",
+       "tD1ETMEed/1wkAEbeIEJ22ZmyQ3+YH2RtBNOH7m1rTR+W0OBa0pjzaGN7ClB1ku4v+D7VGZ+OFyb\n",
+       "RGZWYINi/N+qZGBCHDuwQwhxI6FvooQQQgghJqCXKCGEEEKICeglSgghhBBiAqGwVMnP5w7DSHlD\n",
+       "CCGEEOIGoBQuReubKCGEEEKICeglSgghhBBiAnqJEkIIIYSYgF6ihBBCCCEmoJcoIYQQQogJ/KWP\n",
+       "zhNCCCGE+GJA30QJIYQQQkxAL1FCCCGEEBM4kZeoEMI3hxAeCiF8OITwypNowxc7IYRnhhD+XQjh\n",
+       "j0II7w8hfN+18vMhhHeHED4UQnhXCOHsSbf1i40QQhNCeF8I4V9fW9Y5/zwSQjgbQvilEMIHQwgf\n",
+       "CCF8rc7555cQwr3Xni1/GEL4+RDCXOf8c0sI4adDCBdCCH/4aWXXPcfXrsmHr322ftPJtPo/Pf7S\n",
+       "X6JCCI2Z/biZfbOZvcjM/l4I4YV/2e34T4DezL6/lPJiM/trZvbd187zPWb27lLK3Wb2a9eWxeeW\n",
+       "/9XMPmBmfyYc6px/fvlRM/vVUsoLzewrzOwh0zn//9u7nxCryjiM49+HbCA1BAmsdGKG0EUQ0RCi\n",
+       "UgjhwiLGVuXCkKLWBZHQLNq2Cl25URNxMSImOq4iaFEQlGIR+GdhOOgUMxOV/VvN4NPiPTLXwQtx\n",
+       "uOceuD6f1T3vORd+PJd73t+9973nNEbSCPAOMGb7aeABYDfJvNeOUubJTvfMWNJTwOuUOXUncFBS\n",
+       "fmnqgzZC3gxcsz1tewE4AexqoY6BZnvW9g/V43+AK8B6YBw4Vh12DHi1nQoHk6QNwMvAYeDObQKS\n",
+       "eUMkrQFesP0pgO1F23+SzJv0F+VD2kpJK4CVwC8k856y/TXwx7LhbhnvAiZtL9ieBq5R5tpoWBtN\n",
+       "1HrgZsf2TDUWDak+OT4LfAussz1X7ZoD1rVU1qDaD3wA3O4YS+bNGQV+lXRU0kVJhyStIpk3xvbv\n",
+       "wCfADUrzdMv2FyTzfuiW8eOUufSOzKt90kYTlWsq9JGk1cBnwLu2/+7c53J9i7wePSLpFWDe9vcs\n",
+       "fQt1l2TecyuAMeCg7THgX5b9jJTMe0vSk8B7wAhl8l4taU/nMcm8ef8j4+TfB200UT8Dwx3bw9zd\n",
+       "QUePSHqQ0kAdt32mGp6T9Gi1/zFgvq36BtA2YFzSdWASeFHScZJ5k2aAGdvnq+1TlKZqNpk35jng\n",
+       "G9u/2V4ETgNbSeb90O1csnxe3VCNRcPaaKIuABsljUgaoiyGm2qhjoEmScAR4LLtAx27poC91eO9\n",
+       "wJnlz416bE/YHrY9Sllo+6XtN0jmjbE9C9yUtKka2gFcAs6RzJtyFdgi6aHqPLOD8keKZN68bueS\n",
+       "KWC3pCFJo8BG4LsW6rvvtHLFckkvAQco/+o4Yvvjvhcx4CQ9D3wF/MjS17ofUt5YJ4EngGngNdu3\n",
+       "2qhxkEnaDrxve1zSWpJ5YyQ9Q1nIPwT8BLxJObck84ZI2keZxG8DF4G3gYdJ5j0jaRLYDjxCWf/0\n",
+       "EXCWLhlLmgDeAhYpyzc+b6Hs+05u+xIRERFRQ64jEREREVFDmqiIiIiIGtJERURERNSQJioiIiKi\n",
+       "hjRRERERETWkiYqIiIioIU1URERERA1poiIiIiJq+A8y9O8S/Rd3RQAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7ffaf3877710>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "# the parameters are a list of [weights, biases]\n",
+    "filters = net.params['conv1'][0].data\n",
+    "vis_square(filters.transpose(0, 2, 3, 1))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The first layer output, `conv1` (rectified responses of the filters above, first 36 only)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 29,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlIAAAJOCAYAAAB8y+mTAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvVusXed5HTrmuu/L2jdubpISSVEUTd0S3a0okpKYseoo\n",
+       "TRs7TWvFaRPXSYAA7UFeCuPkoUBjFMhL0gYI0IeDUyNoXMM5QgPLRhJfEje2oxhSoosp2TIlUVdS\n",
+       "JCVyk/u+122vdR6Wx7fH/Oe35lp7ywrd4B8vm1xrrjn/+/z/8X3f+JJer4eIiIiIiIiIiIido3C1\n",
+       "CxARERERERER8X8q4kYqIiIiIiIiImKXiBupiIiIiIiIiIhdIm6kIiIiIiIiIiJ2ibiRioiIiIiI\n",
+       "iIjYJeJGKiIiIiIiIiJil3hPNlJJkjycJMmpJEleTpLk/34vnhERERERERERcbWR/KB1pJIkKQJ4\n",
+       "EcBDAN4C8PcAPtbr9b73A31QRERERERERMRVxnvBSN0L4HSv13u91+u1AfwJgA+/B8+JiIiIiIiI\n",
+       "iLiqeC82UtcCOCP/P/v9zyIiIiIiIiIi/lGh9B7cc6itMEmSmJcmIiIiIiIi4v8Y9Hq9xPv8vdhI\n",
+       "vQXgkPz/EPqs1K5Rr9exvr4OAOh2u/b52NgYAKBYLAIA2u02ms3mju599OhRAMCZM2fQbrcBAEmS\n",
+       "2F99HlEoFDJlIcrlsl3jlSX8bZIk8PzUKpVK6rpOp5P7XKJYLGJra8v+z3uzTh4KhULm3nnP4HMA\n",
+       "pJ5FjI+P278bjcZI9xuGUik9VDudTuaayclJzM/PA+j3J9CvG/uVKBaL1i5armKxiG63m+oPPlef\n",
+       "V61WU7/VPuSzer2etSnbflAb8H58VrPZxN69ewFsj8+lpSVcvnwZwHabNhoNtFotAOl+YN/MzMwA\n",
+       "AB566CG77rHHHrN2mZubAwD7bnV11erhjUned2JiAvV6HQDw1ltvZa7j2G21Wm4bXHvttan7vfHG\n",
+       "G5l77N+/H9dddx0A4NSpUwCA5eXlzHUTExN2vxdffDF3nPO7iYkJrK2tAQCmpqYA9Oft4uJi6vof\n",
+       "+7Efs7H8jW98w8pP3HrrrQCAy5cv48KFCwDS7cbn/emf/ikA4Pd///fxrW99K1N+rm1EuVzOjFkP\n",
+       "Y2Nj2NzcBADcc889AICbbroJX/rSlwAgUx/Fj/zIj1j5v/3tbwPot1+IRx55xOZS+BdIry8cvxyL\n",
+       "4+PjtgZ683V6ehoAsHfvXpw+fTr1XZIkOHz4MIDtNVWv0fVHxxtxww03AACuXLkCoD92dI6wvGxn\n",
+       "jtPp6Wn7DTEzM4P9+/cD6M8RwB/32h+s2+bmZqpc+nzeGwD27NmTaQPFnj17rD/z1vRCoWCfe2uc\n",
+       "Pj/PP5ptqm3G+/xD5OctlUrumAmRJIn1HctaqVQy63He74HhdfpP/+k/4VOf+tTg8g4t6c7xFID3\n",
+       "JUlyBMA5AI8A+Nigi3XTAvQnHxc5YnV11RprcnISALC2tmaDlt/VajVb+DY2NjLPIHq9nk36V199\n",
+       "FUB/4LMxV1ZW7Drvt3mbAnYmJ7qiVqvZAOUzyuWyvZR04QsnX6FQsPJxYfEGSbhYeGB7sYyDNn15\n",
+       "0OdwQ6sbDNYvD9PT09bX3oZMX8KcVF6dWI8kSfD6668PLCevG9SHvV4vNTY6nY47mXWzpH+9+4Xf\n",
+       "exOX9eR9O52OvVy5aVpfX7dFPJwfikKhYJsDjqutrS2cP38+Uyb2FzFss6uLq/ci+dCHPgQAtql4\n",
+       "7rnnrM0PHDiQeW74wgK2N5OTk5NWT28DRezbtw8LCwu55SZmZ2cBwNYNYHseHj9+PLPxePLJJ/Eb\n",
+       "v/EbAICDBw8CAD7zmc/Y95cuXQIAvP322+7z2NZsg1/7tV/LbKT2799vbcm+rtVquS8Abq65dgHA\n",
+       "xYsXAQD/7t/9O3zve/24HtZHX/BEq9WyNTLvWW+88Yb9Nm+TCiAzR9fX121sc1PUarVsfLBf9+7d\n",
+       "ix/5kR8BAHznO98B0G87brC5KZqbm7PDhK6zHO98NwDAK6+8AgA4ceIEgH5f6juBcz08EDYaDdsE\n",
+       "sXxLS0uZw4mHmZkZayv+9vjx4zh7ts8jhM8HtufA9PQ0JiYmrN1CeJ958OZwsVjM1FPXVO86fjds\n",
+       "EzY5OYmpqSmsra1l1qU8EmLQdRw7WjbvMEv0er3Mu0PfnR75oGuwt3Z7a/Tv/M7v/MNupHq9XidJ\n",
+       "kv8LwFcAFAF8OkbsRURERERERPxjxHvBSKHX630JwJdGvDb1d21tzU4WZF6uXLliO0rueguFQsYM\n",
+       "tbGxYbtX3YGHJ8NqtWqnA54ClpeX7WTj/ZbQU5G3c+W/a7WanTCJRqNhn+npgydHMgidTsdONixL\n",
+       "sVi0k+MotP8geO0Wfg/0TwFh/byd/szMjLVHeOodhuXlZfeUy35XCpunDu8EwT73KHQtq8d6KUYx\n",
+       "a5ZKJfuez+12u0PvDfT7nIwpy7q5uWnl85gSMg0bGxuZE5/ej+zI7OysMUc8Pa+vr9upWOvKU7OW\n",
+       "neNyz549APrsEsvKcipTy2fdeeedxhw899xzqecA2yY0YHsO6zhmn5O56nQ6VicPXCMOHjw4stm4\n",
+       "VqsBABYWFsxcSCwtLbm/IdtExkRNLGQIh+E//+f/DAD4r//1vxpDQlPh9PS0sSfEDTfcgNdeew1A\n",
+       "mo1j+b1TPU/rY2NjOHSo71nx9NNPA+iPK5qKOZ4uXbpkfeitqcRrr72WYkrz4J3++e8333wTQN9c\n",
+       "ynnDMXn69Gl84hOfALDNSCnYPuPj49YGXEe3traM1eE45VgCgL/+678G0GfEWAZC60Pz8FtvvWUM\n",
+       "u4J9fttttwHor0mhKW5lZcXM5VwTz507Z2VWRopQBsjrV6JcLmfeJ6Nia2vL2pzP6HQ69pm6TYTP\n",
+       "mJyctHblX12DyUSNjY1ZXXR9DMfMsLnqre96j7CNht1vkOUhhFo/duOGEpXNIyIiIiIiIiJ2ifeE\n",
+       "kXq3CE/eR48etdMfT1S6i1YmItwBqz2XJ29lrnhKKBaLxgLwlK02VP69fPmynfTJILRarcx1S0tL\n",
+       "dsrhM9QnxLN5eydclnlra8t2zWQNNjY2Ug7PQP/UlufTMGy3zWd47I6yMfzLkxe/53f83julqs07\n",
+       "ZAt7vZ7LuLEu/K1ek+ffNT4+bs/gdXqq8RzzQwd0vW5ra8t8fHZ6QtRAALZvkiTmyMqTtLIlLKs6\n",
+       "8GvZOR+I1dVVGx+sz+rqqo1Bsp6rq6vueCNzxJP1xYsX7UTNv51Ox3yGyELNz88bg0QfGJYRgPnt\n",
+       "zM3NZcpcrVZtznHuN5tNd44cO3YMwDbbVq/XXRbDA+ff1NSU9SHXix/90R/FN7/5TQDpsUXfMpZr\n",
+       "ZWXF+ivPV03x1FNPAej76fz0T/80gG3/Jm+O3nrrrdbWTzzxhH3O8eY5Jd90000AgM9//vPuuCTD\n",
+       "yP5/9dVXbe6SgTl06JCVh2vhIP+vEOVyOePL4s3jJ5980v59++23A+izNgyCYP96dVRG58iRIwCA\n",
+       "119/PeXfBPTX1o9+9KMAgEcffRSAP3+OHTtmz6Gf2tjYmI1FtkutVrOxQ4br/vvvt+eRWVtfX8dD\n",
+       "Dz0EAPjCF74AIG1h8Xx9WPalpSXrG65TugYPW7dDX+PwN94ayXJ4/kghW6koFAo2f5RZHwWjBlIo\n",
+       "+KxKpZJhxXq9npXf8zsN/2qZFe86GOpd/fpdYljkAKFOleoIGDrLetDoKaU12fgeneq9UNWJnWYA\n",
+       "Drr9+/fboqQDlpPTu46dqYODKBQKNpn5Il9fX8+YN9V8yDIPckpUM1046XTDmEffe98Vi8VM1Ey3\n",
+       "27VNgTdAvfvkOZt3u91db156vV5mY6n0rQYFqHmEzw/7Zmtryy0D2yCPnm+1WvZbvS9fmlxIi8Wi\n",
+       "fbZv3z4AvsO1V45Wq2W/Yd0uXLhgY1bNHh44frgp2tzctOg5ls8zgz/99NO5UWLsX298jo2NpSIH\n",
+       "AX+DwTYG+i9foB9BNurLnuPgpZdeyozLhx9+2MqmmxfWmRvBdrttddnpC+Gxxx7Dv/k3/wYAcN99\n",
+       "9wEAXn755cx1TzzxhK1z3FgsLS1lTJ2VSsXWpX/yT/4JAOCLX/yi28bcnNPsB2Tb+MKFC1Yuzrev\n",
+       "fvWrbl24kSG2trbsfhwbly9fzpjVdc06efKkfcbNKdd6XVfYb7pWMnJwenraPThyA6X1D10Vrly5\n",
+       "YqY6mqM3NzftfjfeeCOA/hjgZpjQoIG77roLAPDMM89Y5CMPGK+88kpmM1IsFu37l156CUB/TWeb\n",
+       "33HHHQD60ZSs7zBncy+wxTvE5kVbD9rkeBHdo2ycCoVC5nmjzhmNJNcNaLghTJIkNyBs1AAarvmt\n",
+       "VsveA6O4a1h5R74yIiIiIiIiIiIihavKSClbNCq1pifzUUL2lX3wQiCHORZzd0paWU9F/O78+fN2\n",
+       "MuQJttvt2m9I7V64cCGzy26328Y+KKMTsg379u0zVoE75U6nY6Hf/M5zCAd8B7udhLvzL+/JHbye\n",
+       "4LRuHt3K+2lobUh3V6tVO8UolZ/HRPG55XI5Ez67ublpJz2Ol2GnorzTVrVatXZju0xNTVn58kLE\n",
+       "t7a23DbnaZOn7K2tLfuMJoVQ1mEQtM/ZfhcvXsxowHgh8cA2I6RtRFOdmnHJbLANzp49O5LkxcLC\n",
+       "gjn9skwzMzOuSTl0kK9UKlaWQWa1vFBpwusDzxEY2J7PygiwbXYqGfK3f/u3+MhHPgIA+PCH+1mz\n",
+       "vJBqdT7n/D527FiGkWq1WiaFwL/Hjx/H3//93wOAG06vshUcC9dccw2Afj9zrSJDNAg0FRLdbjdj\n",
+       "dpmcnMwwjPv27cvohjUaDevPQeH7rK+ur0DfpMz2Ck1jAPDP/tk/AwD82Z/9mY1ZMpiLi4umpUXo\n",
+       "+k52aWFhwcyQZNHUeV3nJhnG66+/3soeWji2traMiVLndDK6HF8f+tCH8PjjjwNISzCMCm3L0Cqj\n",
+       "70XVXGI/6Xs57BN1q1EmLGTFvN+q3piuleH7Qn+bN5eHOYfrs1T6Aej3Q+hIv1tERioiIiIiIiIi\n",
+       "Ype46s7m3IEOE84KWYVQwZvXh8yLCizyfu12231uKOY5OTmZy16oOGh4oq1UKnZvDa3W3TDLF7IZ\n",
+       "Kr7Jv4P8QN555x17HtA/hbKNBu3Uwzby1HBrtZqdgHiqLBQK1kZeSGqeDIEKLCo8X7WwLaenp42J\n",
+       "ZJlUfI/l8xgCb5yMCm88ec8YFDofwhMCrVardk+P0eHJdVSUSiUbEzztKis3TFSR7Kj6UpG50rpz\n",
+       "HKgzfB4jpXIZIevV7XYzAoCDGEiWi0zNxYsXXb+QneI73/mOK7dAVozlq1QqO2aiFC+88AIA4Gd+\n",
+       "5mcA9JkfT1GcYF/y7yCotAOZKG/+e5/R3+zGG29MBSMAfaZLfVSJPGFKzoepqSm7jp+tr69nBC/n\n",
+       "5+etTfX5/IxzfWVlJRUwBPTXhvAZOqc0cINjin1Zr9fx/PPPA9j2+bpy5Qruv/9+AMDXv/51AP22\n",
+       "D8ejSil4gTRkTnXtoc/VSy+9lApeCsF59Gd/9mfmn7ixsbFj1kTX9FF9hsLArPA+vMcoTNMg60j4\n",
+       "TlVfKn5Xq9UyzvCVSiVlHRlUH5Xx8Ngslk8ZPn5WLpdzxUgH4YdmI0VoI2iaFE4M3VBplBjvFTq3\n",
+       "FQoFaxguwjrRNLosfIF3Oh1TQ+YkVUdgLgT79u2zCZGn9VQqlUZSu9Z6EKoF4mkqKUU5qrnUm2iE\n",
+       "Z+ro9Xq55rG8VAiDTDGqTcR7hH3oaemoUyDLruk2VNNktykNvA08sD0GPf2nPPpdy8J2WVhYsPuF\n",
+       "Gjk7gaqYsww0ZRSLRSs/28fr3+PHj2dUwk+fPp1y9gWAD37wg1Z3mjKGOZ/qBpzl43y8ePFiKr0D\n",
+       "4I+XtbU1+57mqBBh+47a92+++aZFItLE2mw2bRPB9k2SZGTndg+f/exnAWw7mz/00EMWxciX/sTE\n",
+       "hL1Ah0Ukcs7RLHTttdeaZtSokVS8rtPp2KaFG0gv2g1IO/6H4HxcWlqyTRPb79KlS7Zp4bxeW1uz\n",
+       "fvLMRhpZzX9z/J08eRJ33nkngO3oyG63a+VjVN6RI0cy5vHZ2VnbEN18880A+mY6boIYCXnq1Knc\n",
+       "CE19N3CeeUEd3DA/+OCDZrLLQ6lUsrE47ADkwXvH6LrobYa8+RJ+NkibiX2igTdeijBNowb016zw\n",
+       "fTIokGYUdLvdjIlS33H6ng0P/6OkpfEQTXsREREREREREbvEVWekCG8n7FHo/MxTTS0UChkzju5O\n",
+       "lckJWQ/dMSszwNMJPyuVSvZvntYWFxfdUM0QehrPU0/XcnkOedzR6zMG6YfkIe+0rsygKo2PEh6r\n",
+       "yGMGarVaRuMJyA879cKAVZWY8Ji/vL7RPFOagzB8npoKtcz8TZhcWTFICoJtmsdEVavVVJABQQaB\n",
+       "rNbm5qblNWOI9VtvvWVmK20PMlZkBprNpjGwrEej0TCmhure8/PzpqfD++blwwO2Gcxz587Z6d47\n",
+       "YQ4LBKDZim0/yml5FKytrRnLRUbn7Nmz1v9kVnbDDChY5z/4gz8AAHzgAx+wfr3lllsAAO9///uN\n",
+       "4c5jpAqFgrFjf/d3fwegL4PA+ZCH/fv3p/S++CyaHMmyeFpje/bsyWiBKbSNaKa6++67AfRZo5AZ\n",
+       "0nGvDvKhk7beV7/jGCTrNTMzk0paDvSTOofPVfOcyhnQjKoJiDm+1arhZRrg3OT8mZqaypi8G42G\n",
+       "q5cVrk9JkqTm9ajQ9QtIB7nkMS67Ye7ZX8pQsr5bW1tmdmX7DXqHEBrE5Mn0hO8az6yvDv76nvUY\n",
+       "uN0yUCEiIxURERERERERsUv80DBSRK/Xyzg8euh0OrZ7VX8nj3XwwjL5W+52NzY2cp/LU1Oj0XBP\n",
+       "3zypKJviMTX8ntd74pBadlX8zhM1260fEMF2U2dDnobUKdhDHhOl34XCnc1mM1NudUpniHWn03Hb\n",
+       "nCHaykR54bteRvFQiqFaraaU6kNwbLRaLZcpZT1HdURmvRcXF3NPRXzu5OSk6wzN3+qYZbtx7Gj7\n",
+       "sF3q9bqNaZ4gV1ZWjGkg65okifnrkP147rnnLBfcqCc6tkun08llHPPGsfpwaJaA8Jph9/Hwzjvv\n",
+       "2Oma/ka1Wg3z8/NWbn42Cm699VZjPLz1hP5Qr732mrUN16SZmZmR2nVhYcFYJQqG3nPPPcYCsOze\n",
+       "uBnUPiwzxwSV3RXFYjEjYeDl6dNncFzdc889GXFLBZm4gwcPmp8W71uv1zMMca/Xy+Q+vPXWW229\n",
+       "YD3+5m/+JpPDs9PpuOOFZQhlaVj3QVhZWTHmknNrbGwsI0Px1FNP4Vd/9VcBbLfv+vp6ps+HzZVB\n",
+       "CB2y3y3CNlKJCNap0WhknlcsFjNCwPruIvtdLpdTAsBE6DfX6XQy78okSTLvJx2Hur6H2Qw8i4LK\n",
+       "OITZNPKQvNsX8G6QJEnuQ/PSgXgIX9BAvoKrF8nlUbBe4uGxsbFMmo9B4ET0zFeKYRGLQH8RGcUE\n",
+       "FCIcFMPwbqIXlFoNtZb03lQ+9pzIVXVewe/ZBrrRG7Z5CVP6dLvdkUyUGj3pbQxVk4VQ8/G7NQMB\n",
+       "2yalUqmUqxxOTExM4N577wWwPT7/9m//1r6nM/ni4uK7Wmhp1vacSfOgCWE9c0UYOTsKtG84Jnbz\n",
+       "MlGzAvGzP/uzALZNbM1m09VsC/HII4+YnpMX9ZaHWq2W0pQD/AwIR44csQ0KX8yLi4vWdtyIfPvb\n",
+       "3848Y2Jiwl7wGhHITTPnVGj+A3yduCRJRgpyueuuu2wTzo1Pp9PJRPKVSiX7TJO6sz/ZR3fffXdm\n",
+       "Y1av161uWn4eMDQNEZ+hL9dw/RkUccwE1DRzA9nN69TUlPWH3pfPpd6U10eDXB92uqZ77ztd2xRe\n",
+       "4JMHzhV+32g07NCnbakJjFkWL/lx6FZRLBYz60OpVMoklG42m+48zHv/7wZcz3u9ntvo0bQXERER\n",
+       "EREREbFL/NCZ9oDsbniYFpCay5SxANKSA3pK5YmAu96VlZWMac9jLQaFuqtiNNCnPD118jB0Wuum\n",
+       "91P5Bi2TXqf5rfSUMopqe4jQJNZqtdxEmCHdWigUrJ08ClbB7/XUSabJO/Fpe7AtlXHwmKgwx9/s\n",
+       "7GwuI+jl3yM8hpB11s9+UKceD15iUQ8czzMzM1ZfzxzK07HKJOym/JwHO2W01WFUT595iW4JNe0p\n",
+       "+6GOrqGcxk5yinkMGRk8MjV/93d/N1KdZ2Zmch2EPRZdvwvXDu+6c+fOWV62X/mVXwEA/PN//s+N\n",
+       "weF3HtbX1zOne2DbtKfzXPX3gMFj0VPADq/97ne/a8ELaqbTPI58Bp3+1XQX9q/qyREzMzOmjcXA\n",
+       "i3Pnztl96NT/4osvZuaIZlbgepEkSSpJMkGml1IWTzzxhDFRqn1FBl4tHhwblA/xxmSr1Uqts7vV\n",
+       "SNOMFMP0pEadz+HYLhQKGRO25owNAweA9PvCY5C9d5tnJg/fU91uN3dN88yMHkJ3lzxERioiIiIi\n",
+       "IiIiYpf4oWSkCJ5mPAc/PZ0qIxX65uiJSGUDwnDWsbEx2+3qjpUOcWRMNIzS2/XqKSvMb/T222+7\n",
+       "J4MwQ7pC/XU8BiRkiNT5blTHWIXu/vN24p70wjCfsVC9dmtrK5eJUlYu9D3Q8rHu4+PjdpLnSejy\n",
+       "5cuZ05eOp7wTWrlctrGgPjteGK0XKLBb6MkzFGH1yhj+hm2kbcs+IgsxPj5u/kh0eN3a2hqp/KNm\n",
+       "FQCyIqPNZtP8V7zwbK8fWOZyuZxRTy4Wi66Io34flrVUKtn48OQ0FHTUpy/LyZMnc4NgiJMnT5qy\n",
+       "vOewrcEOYZ0HKcSrvxzQZ2r4DKpma32G+ZlxvtKHsNlsZpiGTqdjZR3VJ1GDO8K53Gw2za+L9bl4\n",
+       "8aKxRfv27QPQXyvpkK/K5aGA4ne+8x0cP34cwHYWgDNnzti6TWZK2THOi1qtlpEy6fV6KSFooM+E\n",
+       "eexdmJPv2LFjJmdAqZAnnngisya1Wi37rfrRegriRMi87hSjWCe8Oexl2QCyrM4gFf3wnaAWE11r\n",
+       "Qn/iJEkyrOwgBilP5NoTINX75Al3qqL6MPxQb6QILxqv1+tlOnOYEzavUxrV6xw1VTE6SE1G6jgH\n",
+       "+AuL5xwKZDdXnU7H7u1J6nsvGI1YyDP97OSlPorjoT5bdTy8lxfB68bGxjIvrUEvX/5GowZDrRh9\n",
+       "mbGe3gvIcxQdtinROnJM6UbFi04ata3zzGkcV9VqNWPCGmQm4ljlON3Y2EilwAD6myaWmWaGer2e\n",
+       "2pSG5aNjrur0EN6iuX//fnfDwD5kO6qG2zBqnWVQDbdw3tZqtVTfePprYd2GJTpV0FGcppqpqamR\n",
+       "NlJPPPFEJiGugv05NTVlG0uNrqTGFx3BNcKV7bK4uGgveJZpz549dh86xc/MzGTSGGmUaqg7FILr\n",
+       "0iCV8xA6p7125jqgaYM0vQuQzlKg44RloHnw1VdfddPVhFGdxWLR5gPNl2NjYym9NCA9xnh4O3/+\n",
+       "vLWBt/5wfdF6c0OlQUyqjh+mhdI5zxf35uZm6p2wm4wHo0A3E6FbyKA1mm3E8acJgBXheqfvC90g\n",
+       "e3VjX3NdXl1dzZ233hrtZQFRhGRMkiSZjfQoiurRtBcRERERERERsUv80DBSnkOzMiKeUqmeaAA/\n",
+       "OfDExIT91jvhEN5nyjLo96o8C6TpT5ZvUM67kLlIksR2vHp9qJexG+fgQSyJ55w3qv5OWE/9N8uv\n",
+       "Ya88Tayvr2fMPNoupOKr1aqFLCvTF568PWmKvXv3ZhSX19fX7ZSrYbdhWypLMYwpCeu9tbWVcXL3\n",
+       "UKvVMqZCdbjXBM7sD57KKpWKm+ONbIbqoKj2FD9jW5Gt8E6AGl5Mk5EX/l6r1TIM2fT0tMtIsS11\n",
+       "bOed8NgWY2NjqfyBQP/0Pkg/igjV/3WucJ1YWVmxcoVh9yE4LpXNHhWqnA2kWWqWa2lpKXPPiYkJ\n",
+       "a3+O3dOnT7tmFCpks5+0X6iHdPjw4QwjpYzkqON9N87OeY7ALNOBAwesXfjZddddZ+s1v0uSxL6n\n",
+       "av91112XUgcPn8v+GxsbSzF+QH+shGNxY2PDWCo1GfO5NKG++OKLGWZVTalsWzrMA9trl2ZR0ByT\n",
+       "YT7Zzc1NYx8bjYZrXvxBIC8wYxDYvjreuBbR+f7ChQuppNH8HX/LNp2cnLS+ViY+NE3X6/WMXp++\n",
+       "4wblAORzw3dbpVLJ9KGadkPLUx4iIxURERERERERsUv80DBShObL82ztujtURXMg7S/BHaZmjNfT\n",
+       "Z3giKJfL9m/uqDX/kpYvZC5UOE1P4KF6tj6P8Bzk9BmeKrpeH568C4WCMRE8zYTwnPNGdTLMU49V\n",
+       "hDt9PXERBw4cMMbllVdeATDYQT5kQBqNhrUX21nZKLIZ9XrdHE4VXj/ksXHq9M0To+dboAgZyamp\n",
+       "qYxfnX7GE1qlUrF+5d96vW4nNJ5sDx8+bCdeMjVbW1s4cOAAgGzOMMBnoqgQXygUMj4jIZPB6+hr\n",
+       "xbbwoCHbesr2To7qBwWk/eJ4/SginWF4vILshAqbhrnAQrBfjx49CgB49tln7Tv1VfIQ+lL90i/9\n",
+       "Ev74j/84c13Iot5444347ne/C2C7/9WPSFlC5sajYKjnO+ixaMOc7BVsN64rXoCIQp3S1UcS8H39\n",
+       "lpaWTGaCdXvjjTcybKG2wdNPPw0AOH78uPUh+1yVsjnPb7rpppQfIdBvH/7m9ttvB9APEmC7kcFS\n",
+       "9ltZ4TBIaO/evRnWSxlHDbLhnGN9q9Wq9Z2npL2+vj6yjxoxqpVB17FBAtYsNxHeU31gv/a1rwHo\n",
+       "+7GF1+k6y3oq08S+8daIUXwTB8ETSG632y47Pmx8e7jqGykvUiEvzQs7c2xszCZlnuN4oVCwxuJf\n",
+       "L5JncnLSJjM3UAsLCynVX95PTQTAYCdATUUQ1tejC0OToULNArpQhZ/1ej1bMIaZQfIwzBlXkzSH\n",
+       "g7FarWY2Te12OzMhz58/b06jSpl7oPMtf3vp0iVrJ32WOibqX4UmtVTlXS8JdhjV0el0rL+HmVpD\n",
+       "E8HU1JSVlb8dHx+3zzh+5+bm7DccY+vr65m61Ot1e8nwBaNOujTtANuOs7y+3W7bBvT9738/gH5/\n",
+       "cCF74YWtMzCAAAAgAElEQVQXMvVhG2xsbFh0FTdAYUJYIK1pxesvXLhgdacpYGtrK6P/pg78fAl7\n",
+       "8/a6665LLdYcJ2EKE8BPVzTMXELVbI7TxcVF20CFytsKnQNst49//OPuRorgZu1jH/sYPvnJTwLY\n",
+       "Htuql8SFfmZmxkx7n/70p61MYXBAuVy2hMx6qBglCq9SqVgS5DyT7Pz8vD3XW9uGqdizvz7xiU8A\n",
+       "AP7oj/4os7nVTT3b46WXXrLDBDek7Ctg+5Dwta99zRT/meBZwY3or/7qr1ofcbzMzMzYhkfLxPcF\n",
+       "N0+DNME4H3Vd5/24SV1eXs5srsrlsvX1DTfcYIfNUTGqJpTngqIBN6OY/BqNRkYv6+zZs27S4nDc\n",
+       "eS4BGgmt6yfXQI2yHLWeO9W842E8mvYiIiIiIiIiIt5DXDVGijSt51genkQ3NzczzJXm9uFpxzMx\n",
+       "dbtdl1oluNu9cuWK7XLvv/9+AGnTnub/40lfWSBv1877qUZGaI7UUwBZg8XFxcyuvdVqZaj1ubk5\n",
+       "K4uq8A5zoM2D5/RPlMvllPI5/2p+Pi2LQh282U8zMzMpDSPAZx00+eWdd94JoH+y5v3ILiizEf5e\n",
+       "n+H1lTd2VFPE+82wkxq/Z7/VajU7rbGNdGxzLKqyNft3YWEho6V17tw5axfOlampKWNRlZH0WM6H\n",
+       "H34YAFInes+Ux/a97bbbAPRZGpaBZfJYWU0OTpZAmWSO8bNnz1q/0SwJZPtENZdUW0oZGLIOzDOn\n",
+       "faTzgePYM5dQ+VpZOeZTazabVsY8U4POAZb51KlTufnofv3Xfx0A8MADD2S+88wN11xzjbEdZJrG\n",
+       "xsYyz1haWjJdI7bB9ddfbwwOzVVeAEe73bZ65uV2u3LlSoZR4T2B9PjgZ14b/OVf/iWAvtk6dNZX\n",
+       "sD0mJiaMEdSsEiwrGe477rjDVM7JSOn6dtNNNwEAPve5z9lnOg489jlkUhYXFzNrtNaRzzt06BDO\n",
+       "nDkDIG1NYd94qvdnzpwx9tGDvh818CQsg16f52Q+bG0LE9rr2qbgeqLvCLJUaiYl68n1TtdyrgOa\n",
+       "pWRU+RIt5yjSCXrNTkyJkZGKiIiIiIiIiNglrhojtbS0lDqdcqepvgUqWhf6QwHIKJF7fi76G+6K\n",
+       "q9VqhmmoVCrmH0ImSv2SNOzfy7sUfqbMiuZ64wnYO8HryZrPVTaNpxzusi9fvpxhrnq9np1sPIVs\n",
+       "lVbw8sflnUQGhUmHubg8FAqFDFPlsR+qVM3r9+/fb34I6uybl59JRU5HsfEnSeK2pZaL140qvukF\n",
+       "TXAcsw82Nzct1J1/JycnbSzydDo3N2fty3Hi+cD1ej37rQr8sa1ZptnZWTutM3x8aWnJdcz/F//i\n",
+       "XwDYbtOnnnrK+oh+Tgqeyvft22f15Xhvt9v2DDITepLlOFxZWck4jlcqlUwAR3hqZHlYBmWhNJCB\n",
+       "5dF2JqPhKRnrSZWMn+f8nueQ/e///b/PqHArWL7QYRkA7rrrLjzzzDOpz1ZWVqzvVB6EzAvXorNn\n",
+       "z1oADXMGXnfddRkGod1uu8LH9F8i4+SxxqqKr/dgO5NNOX36tH3msYFklzY3N20+sl08hl2d5rlm\n",
+       "bm5u4hd+4RcAAJ///OcBAN/+9retfHxutVq1OUTn/gcffBCPP/44AFjo/pEjR4xl03qHDMf4+HhK\n",
+       "0ZxlUV8wPjcU9tR3ocfstVqtXIZO15hR1ju9Xv1dw3t4efqArNVB5wLn4B133GG5BNl3rVYrxYAC\n",
+       "/bmojv1AfyyGVqhBWTBUHJr18fKrsh9YN2WpdpKX1sNVdTbf3NzMTNxms5kxxemE1+/CCL1CoeBS\n",
+       "ySEGfRcmU221Whnp+vX1dZu86gzHRUtfxuGGoFgs2oDjS3NzczPjiKmbSX2Bc6Hgd56jN68F/KS7\n",
+       "HtUM7DxxrWp35Q1C1fPiosGJ1ul0rI1oXvKclpVC1xcntWTUCZN1ymsXrbcmpgzHRblcHin9zaAI\n",
+       "R0JNmmFbVavVjLbUpUuXbOPD+62srOSaajl2l5eX3Q1qiG63m3Hs5+KuOHr0qI3P559/3j7nhoFm\n",
+       "ZC+aVdM8cPO0sbFh9/MckPliW1lZsQ0N/+r91OFfN/B0Gta+ZPt79ePLct++ffYC89qPG5VarZaJ\n",
+       "slNwHu3ZsyeVgJfgxseL+Pvd3/1dAMBP/MRPZO7rbe4ajYZFr3FNWFpaMjOVziW2P++zsbGR2cgM\n",
+       "WwM02bgXrRmae/VgyDl633332SZDX76cAxwTs7OzNhboxKyuDDQV04QLbPfR2toannvuOQBpk00Y\n",
+       "tTc9PZ3Z+D7++OO4++67AWxHBhYKhZTyOZDuUzqMX7lyxYJmWMewnkB/DNBMzj7Sdxw3UKqKrvXb\n",
+       "KXSz4Y0jdR/xoAE5gJ92TbWx+H5kUucQXFP5br3uuutcdwTP7Sdcw/PSqoXwDj6hudqLLveiKENE\n",
+       "015ERERERERExC6R7DQk8Afy0CTpkR7O07rI0x5RM54654XyAoPMMGEeH1XA5o46PBEAfTaFJ2BV\n",
+       "3A0dBAdpcoQhooPMkSGq1arVzWPUBuXFy5NUGBa+zxNDeNII4Z1UwvIkSWKnP03i6d2TJ1o+d3Fx\n",
+       "0U4ZdEocxNCwD1kWr261Wi3zfalUyrBPyrJovdQpHEi3Cz9TRpKsjTrNaz4vPlcdLfV5QJ8BChlJ\n",
+       "YHv8jpo4mvIAx44dM4pdnarZ53S0PX/+vBueTPMR+yo0O7G+O2U6OecLhcKOtI6A/lgjq6SOrOwH\n",
+       "SjB4prNKpWJ14m+V/STrMDs7a/n38nDbbbeZWcNbvz760Y8CAB599NHMd7Ozs+Y0TzcDz/H18OHD\n",
+       "ls+Pc+GJJ56wenIuvP766+Ygz/Fcr9dtftHxeZBOVzi/C4WCMchkT0qlUkqTj9eH6/rk5KS1c57E\n",
+       "xqFDhzISFvV6PbP2as5AQiVA1KRIpo7WgG9961v2mSpwh1aNYrFoc1glRYgPfehDAICvfvWr9lme\n",
+       "CRdAhm2tVqvWvuzLPXv2WJ9oHb3sEu/GNMX7aBny8sRqrtq8546NjZnlQusxSu66YdD2Y5mVCR9V\n",
+       "rT3PMV/ZTLJhvV7PjbiIjFRERERERERExC5x1XykyEaF+Zu4+wO2d+HeyVbZD0+4U9VTPSdI7/QV\n",
+       "MkMrKyvmy8Dr9ZSs/lChXVVPC1p2MlFqV/eYofCzQSxEuLvXtvJ24zvZrXvMlzoD8h5hu1UqFWNI\n",
+       "lPkJT9XqUEi2anV1NSMoqGD7673oc+X1Q6lUyjgtNxqNjHK4d0rqdrvWx+o47gnAeiHLYRZxdeZV\n",
+       "5opl9vqDZV5dXXVZRc/Bn3ViuywtLRkTypO1KocThULB/ENYvjBXFtAf92zTPBXgnbJRgM/eeBhV\n",
+       "tVmvpT+Sl3er0+kY0zQoXBzot6XX1971HNNend73vvcB8EV/9+3bh5/+6Z8G4GdXIOr1upWZ/l/3\n",
+       "3HOPfcb5Ozs7a0woy/L222+b4zkZmkGMVNiPXvtsbW1lcp6qnIKud+H99u/fb8yWCqqSWVOGkN/z\n",
+       "s83NzYyf0/T0tK0T9EE6evSojeV/+S//JYC+1EY4vo8ePZpR9d/Y2MgNpCETdeLECZPJUL+iUDJG\n",
+       "Hcv5mfpPqv+c+tKG2AkLFUrFqL8h+0P735tfYZ47YLtf6/W6vRvoj6drA8tfqVSMNea9lT0OHfO1\n",
+       "zNpGoci21nFQu6hMDv+GY1F9yHbCnF01017e9+FCVSqVMo7bXrk9M1mxWMy81BV82TQaDWvU0Pw2\n",
+       "CKplw7IqHeyldPEc6DgAvUSQGlERpqEB0gtU+BvdlOZpwIS/YfnZDzoJWM9wUOq/BynHey9dLhSk\n",
+       "zMfHx+0ZXFBarda7UmkPodQ04fWNmkbDa3kfwJ+QmvxSTYChftFu0hF40IUyfMbly5czY3tra8s1\n",
+       "ndEJluaFM2fOZBamqakpe6HRLDRsrnjQdvQOBGw3NbVwLHppLTRhM68bHx/PbOAPHjxoi71uOlkH\n",
+       "r89VCyxvo0/Mz8/bxsJLqsvULl/5ylcy31UqFXzqU58CAPzv//2/AWzrKynuuOMOM4/RhHXLLbdY\n",
+       "+b75zW/ad+xrNWvS3Mc5+MILL7jrqvdyo2mPm8AkSUzpm5/V6/WMBtXU1JSNLa5dzWYzpXIPpE1o\n",
+       "HLOrq6tWPr6MNcOBzmma1vi+WFtbs+d6kaaem8A999wDYFvdXn/rzR11kCf27dtn7wl1/g/Xhs3N\n",
+       "zYxpdGZmxgIbvAjyQqGQ2Txo4MuoB5lRDgZarnK5nNl0qzmV0CToYRosxezsbCqJO9DvN85DTcnF\n",
+       "z9R5Pjzwa2LkPG3DUV0P6IAeTXsREREREREREe8Brqr8gTqK6ak3ZD1arVbK0ZF/Q5ZKw+m9Xbln\n",
+       "QuMu2tOYKZVKdsqiw22hUMiE6nqaJ7oDVrZHtayA/ikq3FF7bJCeFryQXi9RqLJQvJ+e2tSsGv5m\n",
+       "a2srZVrlb0Oa2nPI3trasjZiPwwyG/AZPLlMTEzYtUr5atsA/VMqn8vTpCat9mhqNePxPuwvPU2p\n",
+       "eThk8jS3k1LELL93oteysMy7YaK84AudD+F3Ot7DBM/e88kQANtO5KurqxkH26NHj9q11N8JnzcK\n",
+       "8jTVNNGu6gMRg07drBf79fDhw6ky8jdhcuxqtWrMDNcarbeajUJ4J9tLly4NTMANbCule2i1WiZN\n",
+       "QeZFQa2iRqNhbUOzZaFQsBB8hu9vbGy4ofNcO0KHW0W1WrU8fcqshYzu3NycMVFci/bu3WuMFO89\n",
+       "MTFh81XZm1AWRnWzNEMA60tT4LFjx4xl0zLRyZsyCaurq5n14oMf/KCxTR7TxO+q1ap9r2tNOG6v\n",
+       "XLmCBx98EABMi+rtt99OJQUH+msXmRld10PH916v58o8EJoFhBhk1uKzdX0n9H07KF+gXqfrCf9q\n",
+       "UmVNCh2ywTpX+Nvl5WXXbEjk5YJsNBqZdUfXaM136b2n1NGe14Vmw1FMfJGRioiIiIiIiIjYJX5o\n",
+       "fKRGtdMS6ueSZ7cuFotuuGiek66q+hJkumq1mu1yeQ/1BVCEDnvertizLeuuPS83l5cXLzwpeT5S\n",
+       "XngvoX5OO81rNAxhGPWgE9Bu4bUvkBY15fM9Z8pRpARqtZpd5/W5ym7wdL0b/65wPoyNjZnPGJm6\n",
+       "drudUvUH+v3HkyH/qjOzx+TwvgcPHrTTM9vghRdeMEVl+sXcfPPN9pn6fZCtoa/RP/TaogwifaXu\n",
+       "u+8+17+IZeUcaDab9hs6YVPcUzExMZFx0h02jtk3s7OzuWKeioceegjANhOmAo+f+MQnAPT9q0KV\n",
+       "+BMnTuBHf/RHAWznlHv99ddTztl58KRMyHCFAq7Adh/ffPPNxqRork/vt3mBAuqf6v3WYz0pzEu5\n",
+       "hGKxaGs055FKY9CvD9hmwFTpPpx7KpBMX7mNjY3cdZEs3rlz52weKnujATJAv73JimhwCttyYmLC\n",
+       "+prtVi6XM9kTvEAKjzHV3/4g1nd976iwdfierVQq9jwdY/wNmVNVVFeHfO99GOYqVUZKVc9DaaQk\n",
+       "SXKDCNTJnSzcIB+pq2ba40ZIVcSBtJaFbkD4mZqZCG5E1NncozDVIc9bMHhPVSzn73mdZxLRF6qa\n",
+       "WsIye/pA6+vrmSTDW1tbmfJ5E0Tvw+sbjYarOkzoQCa0XHmO6kmSZEyA2l/D9LDCyTzqJqpUKrlS\n",
+       "/lyMdLJwMnvOyIQujGy3Uqnkmpi8iD/PuZFtrt959wv7ptVqZfpVox6JZrNp45MLc7fbzaQSmpqa\n",
+       "yqi7q5OmPosvLW4gOp2OvXBpelANKX72/PPPu5vDkCbPyy6wGwzasISRv8D2WDt37pyb7YBl1UME\n",
+       "5zFflt6cW19fzwRIDBvH4TowCriJ03FAkx7vo33AuqkiPPWa1tbWXNNQXlkVYfYBbzOoa6A6bnNz\n",
+       "o5sTtqmXmUDTBoUm5XK5bC9aDW7g7/md5yYCbDvaM53X+fPn3SCJsD/VtJSnX6Vg2VWLkMEHrAuw\n",
+       "3b9ra2v2mb5j2JbeXPLeB95nnqld35+cr95a5MEL4Go2m5m10htLnouKaiR6ZIjOgTBpebfbTW1G\n",
+       "9fkh+Ly895QexndyyI+mvYiIiIiIiIiIXeKqMVLtdjulp0F4u+Jut5tJqlsoFDIUrNKjeQ5qmrjX\n",
+       "c8j1dJiUqclzqtWduncdT3d6HU8dqlnkhdOHzsYehem1qd7b06MCtk8o6mgdnjC8BJF6UvJoV0+X\n",
+       "Q8uS15ZkBlQlXM1uPL14pgI1YYXfd7vdjPaMnlJUHT10jNT2ZX+0Wi33xOj1g5cEO6y7MrWEnsrU\n",
+       "LEUmQMO8wzZQWlvbkffhZ+1229qU5kA9ufLfg0yVoUP7IEaKdQtzZep3vV4vw5x6NHy9XncdqXmf\n",
+       "119/3dpB5w/7m6ycBgSwfrOzsxlWpFAomPSDp3JNeFIsXvDEIFDCwAvVZ/m89lhfXzezEseCJoAe\n",
+       "Bs9NgvfJy/X21ltvuSrhHANkY1qtVq70B8fdrbfemgkS2LNnj7FhygqGTs4TExMZSwKwvS7xHgsL\n",
+       "C5lsAsp6kh3TcirT7eGOO+4A0E+SzPpznpHR3bNnjwUHsL3n5uZScj8sO+fS+Ph45p2mztJsAx3b\n",
+       "RKFQyORa1bUy710JIOM+4K1r+i5ShOuYZyHa3NxMsVP8Lly3dU3QNTjMzQukrSi8X+guo/dm3fJc\n",
+       "Q/IQGamIiIiIiIiIiF3iqsofqGyACm2qkxcR+uZ0u92Mc5veR/MCeacTIk9Ekp/r30G5eMJ6eKKP\n",
+       "WgZlOHhvz3lZn8FduNrXQ/mIzc1Nl6FRnyDNAE+Ev/HYJ62PnppCqYN6vZ5y4g1/o5+FjI+WOU8i\n",
+       "YHx83Mqvob1h+3rMWrvdzvSNsnNavlCdfHNzMyN/oRh28g9PbdVqNSMNMcjPge3BZ4SK2GGZlZUj\n",
+       "m6BCgGRjdE6FbNIg2YoQ6k+Y1wYTExN2evZ8bbwxkscyN5tN15eBz7hy5UrKYZ/gnKPMA8Ppge0w\n",
+       "f/oEKcbGxnKZGUKlXYidON+TVVLWi07SXp4x4vLly8ZyHD16FEA/dJ7sirJVnrAo66aMFP2cVB7D\n",
+       "A5koOutrrjwtJ8tChk6VzYm33nrL7sPAhgsXLmRkSzxxyHa7nesHw344c+ZMSpCZ9+M4Juul6xDH\n",
+       "kgoH61qiwRcsp1cW+iVyzS8UClYPKrpfuHDBnjE1NeWy1CyrvjvC4BplY/S34b8104g6brNttJ5c\n",
+       "A3n9IOuC907lfFb/aPW1ykPYBroec+x2Op2MhUWFij2mPC/4LM8hnbiqGykgW0jVceBi2Ol07DNt\n",
+       "yHATUSqVjELmIjGsY3RD4Dmgh2Y+jdAbFO0W1su7r6d3xIVjcXHRfs/yq9Oid+9hUX1Er9ezDQB/\n",
+       "G5ZN66Mol8u55hiW1VMJHqQSruXKQxiNtb6+njHFDqJgw+8HmdNC6Fj0IiSJWq2WMdl5UH0wtuPE\n",
+       "xEQmRUypVHKTbg9T9Qf6faTmVKA/NvjC0EWHCxrHoLbBqNFlhC5enLdeFOWBAwesLPoMlsuL3tV+\n",
+       "Dc3HgxxCOf81cEM3vnxJUt1bI/TytL7GxsZG0gAbdQOqYD3f9773uWXg/P/e974HoD/uwudcvHjR\n",
+       "zEZ0Nt/Y2LB2+83f/E0AwF/8xV+4G6k8c6Vq1nnrKhOKcwM1MzPjmpz5DDUFMvkyzXlLS0u2yeV4\n",
+       "v/HGGy2CjxFs+/bty5hMNXm96ldxc+jpCGqGCI4XbuDm5uaszOpIPcqa22g0bIxx/K2vr9vLXDME\n",
+       "cD4ywEPre+nSJXOS956na0KejpsSAmE6Nf2trgWcz4QeRPUdkvdcjYjnnOVaPsypW5/Bcaw6i6G5\n",
+       "z8OwZ2ikodcGwxBNexERERERERERu8RVZ6Q0Zw6QPk3wtJUkScbRulwuZyj/TqdjpyylTkM6eGNj\n",
+       "ww3RzGNMVDtIw0VZvlB5HcgyUUmSZOjPWq1m/+ZJTrWq+NyVlRXXxMF78/TmJQUFfPOI1pflIqPX\n",
+       "brftBOyxbEShUBhJNmBraytXKyyPtSuXy9Z3Htul4e15uluayypkqWZmZuzeynR69G44TrT+enoK\n",
+       "ofnceJ0X7qsBDXriDPMbang+y9ntdu3ebIv19fWM4nexWLT6khlaW1vbsaaMtrc6+/K78H5nzpxJ\n",
+       "hYGz7Kwb67GwsGDl433Hx8etbsNy+3k6Wcpc0CzKsqhzM5/hsU+XL1+2vHY/aHAOz8/P4/nnn898\n",
+       "HyZzD9uR37E/qbINbM8HmiuPHDlia6W2C/uBZl81H5O1abfbGUbq+PHjGVPz4cOH8dxzzwHoq4gD\n",
+       "wNe+9jX7nrIFBw4csLbneD906FBmbgzKe0nGRIOO2FYq2RDmUJ2ZmUklJiZ4HVkoj+lQU7aCrN3v\n",
+       "/d7vAQA++clPprJisHz8t2paMYsGP3vxxRdTjFVoNvSYaTU7sy0bjYb1vxdAxd80Go2U5BCvz2NX\n",
+       "2SflcjnjVuHJh6gcjb5ftAxh3fTd6jHRvF9e5odyueyyqJphRP/uFJGRioiIiIiIiIjYJa4aI8Ud\n",
+       "fSgeCGR32uqrEjrpAWnZAO5e+b06JWu4vMdchP5GGjbKnWqSJBmHdnXS1RBW3a3zHh6bEQpyNhoN\n",
+       "OxWpQz2fq/5C/EzDpT2Ww3OgVgFF/ob+FXqdij6Gqt8eg+E52qtjrMc+qWhhKOnQ7XatbdQJkicR\n",
+       "T4KB12lePXVKD8ug9VaE9fBU7JMksbbK88lTFkXrTsaE49TLKadgu9RqtZQUBsunDqC8b8j4tFot\n",
+       "q9swdodQPyxPIiQcVx6azWbmhK+OthqGHjKdg8a2+oSFYpmAf8pkH7788ssAgF/8xV80VoTXeyfx\n",
+       "brdrfReG3b9bsN8uXrw40j09aQRgey3gunjgwAFjRX77t38bAPDII4/gx3/8xwEAjz32mP2Wa5HX\n",
+       "h9dffz2AvvN6yNSdP3/exuU999wDoJ+rjn49ZKLuuOMOkwZgOy8sLFj5OI7feecd6//bb78dAHDy\n",
+       "5EnzpWIuvQsXLmTELRcWFowdI+MEbI9zSp80Go2M3EO1WrWxQ/Z2c3Mzs160Wi13rBGf+tSnAPR9\n",
+       "0f7pP/2nALbn7dTUlOXQU988MlE///M/DwD44he/aJ8VCoVMkEOpVMq8xzTXqsJzBOd1g1jxnaDd\n",
+       "bmf8w9QPa1iZuCaooLX3HsiDxxwqg8V1Ii/Prb7zR80ZClzljZRqQXFi6gYk3GAA2wuganHoyyt0\n",
+       "vtvY2HA3X2zgvE2O13H6YlMtqlBhut1up/R5+F34ktP6cdF5/fXXMy+3crls9+Zv9Rp1lgzbIrxP\n",
+       "aFrzBuDBgwetTzRZpdcm6qin9QWQcq7mb2mSuHz5coZu9xTVBwUChFAq2aN+veg+BV+MbOfl5eXM\n",
+       "C0XHkG40vQ1UuHhNTk5mXkCTk5O22OvEDU27pVIpk7xXx6Iquocbfb1WI43yFPAVGmHKsvB52s6c\n",
+       "r2zHQQEG4djWtvMihFS139tgeOZgTfaatynhs5955plUWg/Af8EA25uvYYEsedDsCexj9v+rr76a\n",
+       "a2IIFf1DsL1oOvuJn/gJPProo6nfPPbYY/jX//pfA0g7fdMsqFHSHDtcp7yoTJ0XnssAx9C3v/1t\n",
+       "U2jnJuLkyZN2vZqyaJZTp3hudulIT4dwYHvM1Go1G9v60gz1q6rVamZ+N5vNzHvHU9RfX1/PdW7m\n",
+       "Mz7zmc/gtttuAwAzcy4tLdn9mLZmeXnZ7vfFL34RAHDvvfdamp9ut5tJgq3EgSIkBOr1eqbvNGBA\n",
+       "xxHflepwzX+z/ZrNZoYkaLVamfedPs8z93njd9hmKaybukHoehaawXu9npuEmG3qmQy97AeDEE17\n",
+       "ERERERERERG7xFVjpBqNBiqVip361GGYO0tlongK445UTTFkNVRhmqfnZrOZMvMBaSreC+P3Eh0S\n",
+       "Xu42TbDI+3khwt1uNxMi3uv17DPS20eOHMk4FnrUqUId870To8cWab48thvpas3N5Tmq64k61FMp\n",
+       "lUpGnyu1zueRCvecr5V90n7ICzXWMF5PLyU8AanaOVmlsbGxVFkBX/kd8HM2qcmR0DGoz1Ksr6/n\n",
+       "SiaoZtAoEgt6SlaF9jCP5MTERO54UnDu8fpms5nLwI2aoyrPNOY57a+urrrh3jo2yF4w4a0+xwuK\n",
+       "YD1OnjzpOpHTAZzPvXjxohvwEGJQAm2C43R+ft5YgDzFckWeMrgy1yov8fDDDwMAvvzlLwPoj5Nn\n",
+       "nnkGwDYTfurUqUyZC4VCRn9HFbc96DzydN2YVJmM1F133WVlUedrMkP8LEkSM+3xtydOnMBf//Vf\n",
+       "A9hmut98802rE/PrTU9PZ/pN2Vs1D/I6b3yy3qVSKXfcst6f+9znTHKCZVlcXLTfcs5sbm7ivvvu\n",
+       "A7CdoPrZZ5+1+05OTmbcDwatAyFbtLy8bGsRn7e2tmZ9olYUT8uQUB27MMhFXQUU3ngK57Cu0eoc\n",
+       "HsLLytFsNm0u7cbEnpcP1HNfGITISEVERERERERE7BLJTtR2f2APTZKBD52bm7OTGXf/Bw8eTGUI\n",
+       "B/yccvV63T5TtoBCcTwReNATpDrwhUyD5jfK26nqLltPNqP4+pRKJQuFpb+GB8/nJmyX0N9obGws\n",
+       "4zDe6XTckz7t9zwJ6XWeD0UelCVTZ+m8E4H3G4VX5lGR91vPN0/LFJbFy6umuRG9OqofUei/pIxO\n",
+       "nqq3CgWqnxrLwhO6+s2RKVxYWLDTZ5hvTDE9PZ3yNwQGj136GHF+nDt3zr0ny0ofmEajkWnrYrGY\n",
+       "qfvW1lbGsVgDVpRpZN0nJiaMmfGYMt6vXC5bP2n/sp+oEv6d73wn49f3bnDs2DFjvehDM+xkzbVh\n",
+       "fX0948dVLpfxyCOPAADuvvtuAMCf//mfW1vT/2Z9fd38KikETCfwQaDidr1eN+aPY+vWW2/FCy+8\n",
+       "MPC37A8NNvjYxz4GoM/a0E+L91NJBgZjKCND5nxjY8PmGX87Pj6eWadarVZmLKrVQNljj70P/Wx1\n",
+       "7nnw3hH33nsvgD7TxHtr3VjmG264AcC2wj7QZy7Z5uGa/m7h5RFVRfBRmJlBmTx2Wgbtt1CkV32z\n",
+       "lArWkDUAACAASURBVLkKLQ6VSsXWOc6P9fV1W09UFkLLz9+GKutcU7//rnMb/aqZ9thp4YDTwcmK\n",
+       "vPLKKxnnws3NTVsI2FhKS7KhZ2ZmMhsopSa147xIvrDjVJY/T0K+1+u5CUA1Ok2v1ed2Oh3bQKmS\n",
+       "N8Eyr62tZTStGo2G3dujRwc5SHobijBtg24YvIgVzzzLttH2VedhQl+aYWJQb6Onfaj30OeF0P4K\n",
+       "qeSxsTH7rTpmhqmJPE0rz9k5rB8ROmRq5JC+6FUJmM8P69toNOx+el/dWITgAuOlFxkElnXY9WwX\n",
+       "Xjdo4WWb5iXx7fV6Kd0ioP+y4aGI9zhz5kyqXfiyZ9tfvnw5s4HSccwX/LXXXutuJLipUVN32K+D\n",
+       "0kuNCvb/qC8i1tdzhq9Wq3a4Usd9qqHTCf/FF1+0NqBLgYJryE033WQO3uyv+++/P+UgDgAvvPAC\n",
+       "jh8/DmA7ou6WW26xzRXbR9efz33ucwD6pkVNdAwA9913n5m4dF33DsV0Xn/qqacApFN2adYFL8It\n",
+       "TOatStlqDtMNlNZnEE6cOAEA+Ku/+ivbLNFxXNdlnVPsV667GuEYuh0A/c1VONd0rWSdarVaZmyt\n",
+       "ra1l0qjoBl6jnj3oWg+kXU90PoZtrq4n2oahyrq6MnjuN4TXh51Ox9pLs57oxiiEpyPl6VINQjTt\n",
+       "RURERERERETsEleNkdrc3Ewlq+RJeW1tzahz3W2TieIusdVqpUwW/I47Wj2RqrotkGYrPKfVYQ6t\n",
+       "XphnaLJTzRulLUOFWaWXeQ/VJeJJqFwuuznDeD+ewJQN0hOGsmNsQ5Zfdbp0Fx4ybnp6IhOlp3s9\n",
+       "qYeaV0obe2ZBNV2FpxOPGVKGS52g86AO1zTf6UmZbaAUMH/D65rNpssc5JnxFDzJsewrKyvuiUed\n",
+       "OFmW8CSlkh2esj5NT2NjY5kcVY1Gw56bd7puNBojMSVJkrgBBjsFWSgNwuC87fV6Nge4boQsHRlc\n",
+       "moq+8Y1vZJ4xNTVl45f3rlarbmg1oXUK835ubW1lTuijYnV11RikvAwCwPYa6THNhErKkD255ZZb\n",
+       "8KUvfcmeF4JMk5pndH3yysE12isf54Ka+vjcG2+80e5N05W6J9BJ/IknnsiEtc/OzmasC9dee20q\n",
+       "OTKwHcjEfwP9vvRkcPiZrpUhE1Eqlax/WfZh7gRad5ok+R7SscT7/czP/Ay+8pWvANhuD3Xr6HQ6\n",
+       "GR0pL1eih2EmaJV9CVn2SqWSSdLe7XatXTlnVPeN40DXSn0/hutNoVDIMGAeE6bWILUksKyjMrq8\n",
+       "x6233mpmcuY0VFmLURl7IDJSERERERERERG7xlV1NvcEKj2ntUHZxnl65QlDd5D8rtPp2PcaDj7K\n",
+       "rnhQCHNo9/Uc34Ft9oFlH9TWni8V4Tm20jm12+1mHB6HOZuPCq/unjCdh0H95fmghaxXtVrNdQb0\n",
+       "ckYRpVIp5ROxEwxymqcPDR1yn3vuuUybTkxM2IlWJRvYdxx/+/bts3qwbiGrGtaNYyhJkswYG7U/\n",
+       "rrnmmlSOSj5/FGfpUZ1NFxYWzCGbqtJvvPHGSL5Dc3NzmfYbVUJBoXIl/+E//AcAwH/5L/8lc522\n",
+       "B9s/SZIMCwj4TsP0eeFnS0tLxmzs9DQ7NTVlTAWZi0GMHtc0Mghe8MzU1BQ+8YlPANh2bn7hhRfw\n",
+       "v/7X/wKwffIGsnPv4MGDKV+wQTh69KjNZYqT6vqiwsIq9gmknaZDYU7Fddddl2GagO2AhgMHDgAA\n",
+       "nn76absP20PnFNtMndzVRydk3FSixnNy528PHDjgtn84Xm666aaM/xew7VDOIKpyuWzK8ZwDp0+f\n",
+       "xm/91m8BAP7wD//Qfpu3pqsCujKcnvjmTsG2mpiYsLbkXy0Ly+f5DhaLxZTQcghl7D3hYUKtEewn\n",
+       "tcSwvrxHtVrNCKOOKpdQr9exsrLyw+lsDqSpOG/DoHQ5X2hcZIrFok0YLggaxcbvSqWSbTw4gZMk\n",
+       "yZjvut1uKr0LkO5odYDm99pZHrUf0vz6suYET5LENkNsg0KhYPVUnSUOHqV0dxq5pok6WY9qtZrR\n",
+       "NdG6e8mGVeWW91Hzl+ofAX5CXI3aG2YaCTfX3sa30+nkvsD4jJmZGWsvtoVuvFi38fFxa2teNzc3\n",
+       "Z2OM9VFzqm52wo2Pardw06z9pqmOiDxT0aibjWKxaPXjc/UloqZRVT4G+u3s6RWF0NQpoXloEDin\n",
+       "19bWdrVxykOe1tO5c+ds08d1olqtWnk0yIJzUl/OfLFyk9BoNHL1d/KgLgqqneXN61FMGL1eD3fc\n",
+       "cQeA7ei+//7f/7sb/cv7MDpXN1HeJoJ49dVXM5/ppp4aeMViMfOy0ugp3UDdcsstALZNYm+88Ybp\n",
+       "erF8jz/+uNWDmye9Dw87Os/YtrVaLeMyAGzPe477ubk5m/Nadz3QAH0Hfa+PfvInfxIATNvq1KlT\n",
+       "tp6wjk899VSmD9vtth1A+N309LRtoK655ppMH9ZqNWtzlq/dbg9U5A+hkcNAv93C90ipVMoEB3mH\n",
+       "VO/9oyZvQt8BerD2Ds+ce/xudXU14+KxtbWVSZbtYWNjw4IXuNEcNdJwlLkdTXsREREREREREbvE\n",
+       "VWWk1BRD5kVZG80fRkZAmYtQ76fdbttJirtmDfMkdPfsUfceE0VsbW3ZtXoiCZmDyclJ2z3zVNbr\n",
+       "9ex5nklH2biQBUqSJHNqV3OEOk177A53/0tLSxl9Hi95sJ7glEZl+fWkGVK16hSorEzY1l7SUE+T\n",
+       "KawzrwvbY8+ePXYfrTvbkuNgaWkp0180rwBpRejwHmpK1QAJlllNBWE9hp0URzVHagJi9o2emtim\n",
+       "vN+VK1cyzECxWMywhr1eL6Pd4o1Tzww6NjZmbToqZZ7nlD5MGXwYSN/n3V/R6XTshK4ImS0dn2Qu\n",
+       "HnjgAUvKu1M0Go2MuvYNN9xgpkKVZOBJOo/p63Q6phzO+pw8edLGB+utwTAcx0BWwmJUnbg777wT\n",
+       "Tz75JIBtBunKlSsWJKQWh/vvvx8A8K1vfQtAnzkjE8V5WCwWzRmdfz/84Q/jC1/4AgDgq1/9KoC0\n",
+       "CZBrTbvdNhkM9p8GauTleCuXy651hGOb961WqyYboe8XMlE0QbbbbVtP1MQXjivNHUvcdtttqfx8\n",
+       "2k8sE8eMZrYImWaVhdHsCPp+BfpWEo4tdTr31OlDqJ6Trnt5jLrnaqOf5bkeaHJz7gP4t91uG3uX\n",
+       "p7au+QuHud8MQ2SkIiIiIiIiIiJ2iavGSNHpUBkQIO2rRNRqNTttqDMad80qaMhdPU904+Pjdp36\n",
+       "8qhcwCAME9rL26GrXwmfq74qeg+eKjQnWFiudrttLAF9Od555x1rK363tbWVCU0F0qqvLAPvs76+\n",
+       "bqcwzfTO8niZ3dW+7al6e+3G9lJmKmRpBqmsE16eO17v+XMUi0U3XD2EslC838bGRkYFXqF97CmQ\n",
+       "5wm2euCJ88iRI8YEcezqs+hfB/jjN8zgrj5r6gcY+sO12+2Ms77XB/oZ6zg9PW2//UEoLl9//fXm\n",
+       "+6DjzxsbXog+/X3UuVmvD9mwTqdjY4v53N54442Mf1in0zFRSPbNqD4pgxAq2x8+fNhYHWWk+Lw8\n",
+       "5nJubs58Qv/mb/4GgJ8hQPMWKsvKUz3HULfbHclHRP2m6GN2/PhxE+ekc/jKyooxUcQ777yDQ4cO\n",
+       "Adhm+TY3N/HhD38YAIyF+sIXvpBxXn/jjTdMZJTO3+pwzXmkcineGkJsbGzYOPaYfUpFFAoFk5fw\n",
+       "ZAjIiOzdu9fyPtLRX/1U+e5aXFzM+PK++eabKauLx5h61pZwfIwqyaHjQOdUqP4OpP1Dw3Ko/6yX\n",
+       "+5TQd0nIEiZJYu2vATKhGHa1WrXxyb4elgtS22dQ0JKWbxRctY0UB0uo96Hw1LN1AeSA9xYyNq6+\n",
+       "MFQDJHxBFIvFVMSVXh8iTD9QLpdzo8lUK4mdRBPk22+/bQvoMOdcXscXzPz8vLWN96L3kuq2Wi0b\n",
+       "hNqW3iaEbcgJdPfdd9vCrubN0BSr6VG0bt7ADDcbaj5UCpbjRDW5+JlSwOEk0JQ+XLQ0CTLrMWjT\n",
+       "7OkUhe2i5hlF3gbK29Rx4VhcXMxVmyZWV1eN0teNEtvDc9xUnRZPBX6niT95j0uXLo20cRxVBVwV\n",
+       "phVhxJLqPym8DALEgQMHXAd69iGdlgellAo1w5aWluzlzA3IKImNB2HPnj0ZM46WLw8//uM/buVn\n",
+       "9N6lS5dsI8PyvfTSS9aWur6yXW6//XYA/fEwSlLYixcvZlwttP24iSgWi/jFX/xFAMCjjz5q3zMx\n",
+       "Mftybm4uozRfr9ctSpC48cYb7d5cU1dWVszhndFxa2tr1n5c97wUW8vLy9b23DCfOXMmFTQD9Dec\n",
+       "4Qbq0KFDqaTLbBeOVba9biboUH/y5ElrN24Mz549a4cmL0lyqVRKmcIIfsb1ZG1tzVTnH3zwQQD9\n",
+       "jehf/dVfYRB0rHG8e/2vmoScA7yuVqtZu3kRePy3rheqqRjqNVYqFfu3uqV4EfhcO9jmvV7P2kVN\n",
+       "gByjXjYTwltfQkTTXkRERERERETELnHVdKSo/8Bdvac6SxQKBQvpfeaZZwCkT7Ya+snPvDw5pBw7\n",
+       "nU5GxVqpfT0x7CTEHNhmBkqlkp1Ylb0JHbM9LaBCoZBxGNZcRsouqaMw0N9ZK7Ommh78nr9R3Y1Q\n",
+       "VXl8fNxYDI8d8cD7djqdDJugST6VPQnNQOVyOaNbVC6Xje3QBLaER0N7oLnkwoULGSdD1UvymDz9\n",
+       "LM9EyH7TEGyFmmAH1cOrgzqHq+mRyJOPSJLEzLieA+eoeeI4f6rVakYq5ODBg1Zumj92IgkQJi3V\n",
+       "gAVibGws035JkqQkHfh79vU777yTGYsHDhyw8ntMLB2eL126lOt8yrofO3bM+oJ/R1Wd9vCRj3zE\n",
+       "2pKK5KPiT/7kT/CRj3wEwLapuNPp2L/p6E1n7UGg5k6pVLJTO9dSHUPKEHpzhXIA3/zmNwGkQ841\n",
+       "aXG4/qv5K09vCkAmx5/+lkxcs9k0CwdZo0Kh4M6lPO0wlvONN95wdcf4Gc10unby+UtLSym2i/cP\n",
+       "x6LO283NzYwW1Kgm9EEBPKFT/fj4eEYOQtkiSoYsLy+782ZUhHqCGjSjzvCj3ANIz3VgZyY59qea\n",
+       "ez0LFzXqBulIRUYqIiIiIiIiImKXuGo+UtVqNXVq40lkfHzcTgea2Z5MFDE1NWXOZXoKD/PqaQZy\n",
+       "DREl1E/Ey0AdolQqZfItaQ49ZcC8nTefp4yZx56Fp/mxsTF7rseS8cS0vr5uz9B2UaYkzGdUrVat\n",
+       "LjxRra+v286cTEmlUrFyqWxBeJqcnJzMyAWoH4aWP2TMPDar1+tlBEMVYQZ0IM3QsKzqtxJmAm82\n",
+       "m/ZvPZGG+b4qlYorCUC2ZhiDybJ6/j/KSKnDJsviqedz7OQFT1QqlYyfmObu0px7eYyethXbl6fB\n",
+       "er1uddupOGW1Wk3l2AP6jqOhOOj+/fszzJoqUQPpgA1+T5CVabVamYwACq4dR44cMV8bD6zvtdde\n",
+       "i+9973sA0sK9YR8Pk/ag78bS0lJKgXwn+Lmf+zlzztY5Rd8ezmmVP/DAsPtKpWI+O2SpTp06lWHc\n",
+       "tK3IXLz66qt49tlnU9cVi0Xrr8cffxxA2mH4gx/8IACk5CTYb8raPPzwwwCAL3/5y8ZEEfv27TNG\n",
+       "ioxQpVKxMUNhTJXI8HKRcs2p1+v2XN5jYmLC9bPjGsTxpWs+14Zut2tMFNv2hhtuyOSF7PV6KWYt\n",
+       "z++ODFeSJLY+sX91zKlkC+sSSugAvp+jBhTwPpqRILTo6P34LlemyWO99N3Mec/+WFlZyQQMtNvt\n",
+       "VK5IlsXLmxnmqlRZJbbRzMyMfU8GcRSr3VXbSHGh4+LBBXxjYyNjpuh0OpkXwZUrV0yrgw1ZKpVs\n",
+       "EaQTnyaPVLMA76cvPi8xbviy0UGpAyeMJtDBoWaz8H5zc3NWT9LBGsnH+7TbbRsIfJlo+ht17OT3\n",
+       "vB+Qfrmx3KEyOJAeNCH122w2TUeFbZ4kSebFuba2ZtdpfUMqulgsupsg9hP7Q81anLhK7epv2da6\n",
+       "oeBGj9FEN954Y8Zp1UvzUygUbFyGyuBAevEd1Uk7L+WQmky8AATtd6Dfptw4KNXONtDFxNuEhQvV\n",
+       "IOdqItQVA7bbeX19PTc6iJFL8/PzqXEJ9PuH9eAzVldXrVxcD1QPiy+OhYUFW0O0Lp7pgS++drtt\n",
+       "92S7eMrR3kZKgw30vvwtx8TevXttjhDDTA58UZ4+fXpkczpx9913A+jPHW5QqGyuDtBcH6+//npL\n",
+       "T8KxfdNNN9mGUCMJ2W4s/6FDhzKJk/VwwRfufffdZ2rSRKvVykTe/dZv/ZYpeHMDdfjwYdOg4j2m\n",
+       "p6fx8Y9/HADwP/7H/wCQdhhnn3obxHa7bXVSh/qTJ08CSJv2OBZ53ezsrNWd9w4VuwkeCDj+xsbG\n",
+       "bA33Nl6MLj179qy9s9gv6prhKdPv378/E4A0KNCD/cUyXHPNNVYnbvq8Q9TU1JS9B7h5bjQambro\n",
+       "hlEDL8L0bcVi0dYndUchlMTg55wXhULB2p1rm2r46VrpHWhHSYmlAVicv15UcIho2ouIiIiIiIiI\n",
+       "2CWumrP5wsIClpaWck0hZCHGxsYytObdd9+Np59+OvOb0FHwyJEjxkTkae5sbGxk5A8An14MHbOD\n",
+       "ugHoM008leRp30xMTOSaGd4t+Ew1k3ph74Q6ovP0NUxhmidfnooGhbjn6UN5jqp66g3bWvP0qYNy\n",
+       "WNZBCaVHhZdomVCanGOCiUdffvnld6WnxFM7x/358+fNcZYnw3a7badnPVHTzKvhw5pfCkgr3LMe\n",
+       "PAkryuWyfc8+nZyctPryWRsbG8Z2KDvxwAMPANhOZKume/7d2tqy8rG9VbWZa8SFCxfsN6o/x+ue\n",
+       "eOIJ0yMalnyXLBbHhppviA984AP4+te/nvpMy882GB8fz7BUx44dM0Ylb407ceKE9cOXv/zl3DLn\n",
+       "4Zd+6ZcA9E1j/+2//TcA2yfqpaWlTHLZW265xU7fXH9mZ2ftOh1PnHusr7oZkJkYGxsz5kJzn5E5\n",
+       "VLaTY5bXX7hwIeNQfscdd5j8QeiuAWxbHFZWVjK51iYnJ+0Z/M5bB+69915j4Nh/yqLwGadPnzZJ\n",
+       "CbapBjZ5yEtE/26Rl7R4fn7exhPXrJWVlZHKMTs7a+3GMbG1tWXriAZKhea7QUxYHvtEqHxQXkJj\n",
+       "BdeJ+fn5lLYkn8k1gd9NTEzYGCTbWywWrW68/vLlyxl5I5Y/OptHRERERERERLwHuGqMlPwbwPbO\n",
+       "cWtry3bFaq/kDl/z1hF0CiuVSmYH5YlT/RT4mZffystlF5Q581xFmM9N8+rpLjsU99KdN31HNjc3\n",
+       "3ROE55TuQa/zTi88CdLn5s0333SZjTAfWbFYNPaJ7To5OZkSuAP6tvZR1bzD5+opwWPO8iQCtO78\n",
+       "reZQ4zhZWVnJOIBOTU1ZW3k54HTsECzD3NycsTAasBCeGIflj+MY37dvH26++WYAwPPPPw+gf2qn\n",
+       "BAhPi1euXLHTOpmB8fFxG9OcC61WK5U5AOj3H5/HNiVzC2z7eoyPj2d8BWZmZqyvNacZnX51fJKR\n",
+       "UHYp9DtTBXmWqdvtuj4leej1epZnjozU0tKSmzstxNTUlH1/+PBhAH0/ktDHR520OT9qtVrGR256\n",
+       "etrqpNeH4/bEiRP4qZ/6KQDA7/zO79jnw8Z5iH/7b/8tgD6j89RTTw0s8zCwrTg/Bq01rLvKUChD\n",
+       "C6T9l+666y4AwLPPPptZk375l38Zn/3sZwHAGMUzZ87Y2OKc27t3r+XdU+hvQtDi0Gw2bW1Q6wZV\n",
+       "7L/73e9avUK2ZWFhIcN6AdssK/3oVOLFu45jUjNcsB0XFhasjzSgh58lSZLxv/LafBB4nWbZIDju\n",
+       "R12zPezfv9/GCts3SZLMONH1XaVnlIkG+u3iWX7yRH/z3o/VajW1/gP9ecl1Ud/VXlsOY6SuatLi\n",
+       "QqFgLzmNHGKD6CDhgOKg1Mpy8KpGkm6g2HFe1IOqq+ZtlvLSlWiaFL0u1LTSBJAKdqJGwnCTw9+e\n",
+       "P38+5aBO8MXI9hjkWKwO/BxI/Ds7O2u/V0o6fOl3u92MQ7JuOngPTSTJZ0xOTqY2WkC/3/gC4gKu\n",
+       "CsRehJ73gtF+COvumZI8rKysWH95m0rWo9frpVISsR7sLx2XnPSsx8zMjLUp+3BycjITAVcsFq09\n",
+       "uEE7evSomfZoTrty5Uqq34G+KYP1UAV2Ljy64eNcCTfMWg915lRdonBBbjab7tjOc2BnX5bL5czm\n",
+       "atAGIoys7Ha7qQ0r5xAPJUtLS/YC0ojJ0Myvmyz2jTqa837z8/P2IuN64plV1UxIExGAzEbgrbfe\n",
+       "cteWUTdQVO5mJBodzRWDNlGe2TrcSDWbTXfzzzmgCMfE2tqatT2jru+77z6LlmO7cRMF+M6+xPLy\n",
+       "sns4zdM0Yp9rUl2Fl8IkfPkvLy/b/NbycWx75kt9d4UBC4VCwfqN5nQ9xLAvX375ZTu4bmxsuJs0\n",
+       "VVrnvVlGrS+vY910k8t+27t3r208VQORz83baGn5VSeQbahtqnp0QH/usQw6JjnuuD5WKpVUgmV+\n",
+       "p+9/fhY6uTebTTPp8fCpavG6gQv7mm2Sh2jai4iIiIiIiIjYJd6VaS9JktcBrADYAtDu9Xr3Jkky\n",
+       "B+D/A3AdgNcBfLTX6y0Fv+sBaVVv7jTHx8ftBMVdZ6vVsut0VxwmYvWgitV6+hhmqgvBnXKv18s4\n",
+       "wWq+Ob0+PMkdPHjQduE8vbRaraHO3ESeojYRatWENHqtVrMyDMsHRo0T1v3MmTOpxJVAWseHp1Q9\n",
+       "3Su7E9KylUrFThv6G++k7OVdzHME1wSg4Qljamoq49y4ublpz+DppF6v27/JEIxqbur1elY3juOb\n",
+       "b77Zxiw/q1arKRMX0Jfs8PI98dlkXQY5ZIZje//+/TZ29NRM9oTtqM68CpaZp+xCoWDMMPHcc8+N\n",
+       "nAVgp+Bzp6amUqZ4oD+W2A4XLlxI5eAD+v3GEyXXlUqlYvfMUyBXcx9x7bXX2umfv1Wzj6rnEydO\n",
+       "nADQH7sMt9frqfD9p3/6pwBgTuo7wac//WkAwNe//nV85jOf2fHvQ6hej7Z1CI4xNW8PA5XIuY5d\n",
+       "unQpsxYN0twK10BlVkJTm2J6ejozl4FttpAMxttvv211JzunSeRpXtc8gPzs1KlT5jRP1k010tRc\n",
+       "yrVI8+p58NZ8tvnCwoI7Z8m4kM0Ctk2Xu0GokdfpdGztUL2scP57795Rc20CyLgjjLq+qFI622p8\n",
+       "fNx12eAz+F5bWloy5p/rXrFYxOnTp99TZ/MegA/0er07e73evd//7LcB/GWv1zsO4Gvf/39ERERE\n",
+       "RERExD86vFtG6jUA9/R6vUX57BSAn+r1em8nSbIfwNd7vd5Nwe96Gr4eIo+R4Hd6kqR9dWtrK6OU\n",
+       "XSwW3Z1s6JiWJIntvHkC89iHnSgWj+Iw6v12YmLCTt484Z46dSrDynjigEDaH4rtEfosAWlGQsNd\n",
+       "88rKk7xmQ2efqJDdqAKVeWHCKiypAoFA2u8nLw+e+lmNehLKg5cxfnx83MYEx6IKSvLUqQ6ZmuOL\n",
+       "p0r2ZaPRGIltVbAdkyRJ5WcE0n49bMc9e/ZknHQ9X5Px8XEbgyz79PS0/Zbii6+88oqb7y/MTq9O\n",
+       "/ZoRnvXkuFefFhVZzQuL9hz8B4GMpSo0hw7FhULBys+yFgoFYyoIZczZ5zovKQHx5JNPWvnV1+OX\n",
+       "f/mXAWwzpv/zf/7PkeowOztrLNAnP/lJAH1H+f/4H/8jAJ9xVh+jML/ZbsA+mpiYGHnOh3jggQeM\n",
+       "KVU2yZM9CKHrcV6+SWBbUoR+ajqWVH6Bc4/3q9VqNjYefPBBAMC3vvUte66WMy/cX1nyUJS0VCrZ\n",
+       "tWTQL1++bOu2ClVzDU+SxMYi1zZvHZ2cnMwEoCRJYmNV5+OoayTnDdne5eXllHh0CBWvDp3NAWTK\n",
+       "on2oczoUk56ZmUn5wQH9NYZrGa9bXFzMZVaHYZiz+bvdSL0KYBl9097/0+v1/t8kSa70er3Z73+f\n",
+       "ALjM/8vvekB/UudpN6k+UOhEPD09nTGxjaJcyvuNUu9Dhw7ZQqUvT40EAdJO8yzLIBMQBwLTKKjz\n",
+       "qW688qITFKH+0sTEhLVVqVSyMmpbhlFx4fdAegEYNVpQEepvTU5OGn2tuh/e4ptnwvQcwT3odWFa\n",
+       "AU1QnaelpdFunNiTk5PWRrxueXnZruNz3377bTM1UNX70qVLmQ3r2tqavfDU3MhNrs6PcJHWF/io\n",
+       "zsnEgw8+aGPmtddeA9B/+YftevfddxvVTUxPT9tvmVRX9dW81BCsx/z8vM2NUV+8o0bMNpvNkTdS\n",
+       "bF++BC9fvuxGZrFfOR8bjUaqLnw++1VfZJ45muPjnnvuAdA3AX70ox8FsO14/LnPfW6kOvzkT/6k\n",
+       "JQPmi/nnfu7nTI+KJkLd1GlbetptefCuZ58cOnTI5rem6vDWd69dvIMD56hmpAg1rWq1WirtDcvC\n",
+       "DYqHUSPdiGPHjpmyOMfs9PR0ZqM6Pj6eSjUCpNuea4i2hZZF3VuA7KYoXJv1/RleEz6HGPZeUR1E\n",
+       "lo9ruKqoh22n7xXeQ98/uiZw88eyDEuAPGoEK+9XKBQyUYC9Xs/mq268RiEQ+Pv3MmrvgV6vdz5J\n",
+       "kr0A/vL7bJQ+vKdSBxERERERERER/5jwrjZSvV7v/Pf/XkyS5PMA7gXwdpIk+3u93oUkSQ4AyB7z\n",
+       "sK1MnSRJiiEapkTN3WmhUHAdRcOQeQ2Z5K5TdV+4O9ZdMU+h58+ft92rJub1zGkhnTxIe4QnCDJR\n",
+       "8/PzVl+eEg4dOpTRRPEU2r220lOMR9n3er1cp1HtB4LP0HxKRL1et880hFXz8wF9swVPjGxzzSem\n",
+       "rBf7mH2kQQn8bHx8PGMiUiditsPhw4fNZMIyeWblqampVMJZoM9WhCHHg06xKhdAUAfHC4nX3F3h\n",
+       "qXLv3r0Z81Gn07HxtFsTCrCtkTQ2NmaZAcLk1IpqtZphIi5fvmzmFmWBvVMdf0MThZcIHEgHn4Nq\n",
+       "/gAAIABJREFUlgD9PuJcVtOOx07t1DQ1Pj5uY5HjwJurLAd/A/TrGJqS9DRO1Ot1M4nqGApV0aem\n",
+       "pswZ3ZMu8EDn9MOHDxsjpSYinszDZOLAtvL+q6++mumbYcEneUExFy9edJPQEsoqsD10XLEtvdyn\n",
+       "nqlOE8eHMiOnTp0ybS7KLqyurhozrAmhmYiZzuH1ej3D8qrVgLIFs7OzVg+at3Ve6njimnTvvX03\n",
+       "4ieffNLWBq4X1WrV6sv5ODk5mTLTh2NMEwVzPLfb7Ux76e+8vuGcVIVxWmI8tigsB5B+r5DNunLl\n",
+       "isuKhZYjTUZNJEmScTYfhmHWG2+/QEZaMz5wrpfLZWxubqLdbqc03jzs2tk8SZLxJEnq3//3BIAP\n",
+       "AXgewBcBfPz7l30cwGPe7/niLBaLro5NRERERERERMTVwNjYGObm5lCv14dupN4NI7UPwOe/f0Is\n",
+       "Afhsr9f7apIkTwF4NEmSX8f35Q+8H3PHy1MRd8WD2CgV/uPf0K5eKpUyDmXe/ZrNpm3e1CnNc3Qj\n",
+       "PCc+9bMJ/UPeeecdKx/DUM+ePZuxz1+6dCnjr3PmzBnbjbO+XkivxwBce+21rggiN67VatUNA1VR\n",
+       "NgBu9vlisZgRcdzc3Mx1tGff1Ov1lKJsWC7tJ57seMryQms3NjYyJyMVjyM0lFxD0/lb2s01XJ3Q\n",
+       "cG72mzpuq/M1fSK0bmxnsouvvPKK2/YE61soFKw8O1X3BtI+JfyrGdmBfjZ5jq08P6tqtWrlV2Yj\n",
+       "FEsEtv102OZevrbNzc0MY3L06FHrD/pr9Xq9jFp8+O/dotfrWV28kH1lT/g8jt1Op2Pfa54xZTGB\n",
+       "4ZkSyIrcfvvt5i81qnwA876FvmtAv1+4BupY01ybIdQHRsc0kGap2G/eOBkfH8+Uv1qtpoQ9eQ9+\n",
+       "xvabn583tkDnaCjg2+127TfKtvHfynR4QpXKRBEco2SNtc/V75VrHP3Yjhw5YuOc87ZWq9naoEE0\n",
+       "LMuTTz4JoP8+8OYPn8f+PX36NK655hoA/TntvQP4ztC5SYFQ9r8qeHNsTE9PZ/IleoLGlUrFzYPH\n",
+       "9x3Zp9nZWVuzdA3musj28xza1RFcLSOcU+xDDepR3+W8vQPX97GxMesTluXSpUvGduuc55jIk0YJ\n",
+       "cdVSxOzfv999eXlRcaq140VhDYtwCiPDjh49ags2v5udnXVTDOQ5JedhbGzMfqMLT6jgWi6XXafC\n",
+       "0MlZlajZ6aurq6loCCC7ufJSxPD3XIgvXryYKYPqfXAy0+EyhOd8PyhyJkToHFwul62edNZmItNh\n",
+       "KJfL1r6cVHnRoYPABa1UKllZQvXcYej1eraxYDt66S2A7TE97DARQueKLpChc2ipVEol9OQz8pw8\n",
+       "+dL8sR/7MdPMYTsOck6mng7HyfLyso01tmmv18skOi0Wi+6miWDdisXiDyxqL8T4+HjGGVjnHNFq\n",
+       "tczsyrotLi5ae/Eltrq6an2T186/8Ru/gZ//+Z8HAPyrf/WvAAx3qmXk2NGjR/HHf/zHqe/2799v\n",
+       "LwLPyZzrim5yWY9er2dl5mfr6+tWN03cTrCt3v/+99t6rol9PfNi6PCsSWZ1roYm5UHR0YSu8xzn\n",
+       "v/IrvwKgn2qJpmzP4foXfuEXAAB//ud/bv2rqU7Cd4u+kxRhmWdmZjKHJ89R3QuAOnLkiG2eSqWS\n",
+       "bVrYvnv37rX2CjeswLZrwcbGhrW/FzDAsaHuDXkHPn03eO9HzvXDhw/bRobXPfvss1YW9lGhUMi4\n",
+       "h8zMzNh1Ow2k8cbJIP0q1lcPQCwr5/LKygpefPHFmLQ4IiIiIiIiIuK9wFVNWqzObYTqPunuOWSd\n",
+       "1DmPUIqd3ynzpWY176TE79///vcD6O/8X375ZSsD0D/10GmRJ4PFxUVjEbxQTT0BjSJr4DmRjyrZ\n",
+       "oLnlgLQKLtDfXYc7/EqlYqc5MgOTk5MZBqJarVr5RzU5sd/q9XrmZK6MCtut3W67rI+qWwP9ccIT\n",
+       "Mh2VSZ2H4JggzbyxsWF1U6Vx/pt0r56O1TRK8NQ2NzdnbaQ5+W6//fZU2Z9//nk3UGEUqFmVp6jJ\n",
+       "ycmMvICe7tj3tVotE0RQrVbNIVaduXlK/OAHPwig3x9kb8laemOoVquZSYJh8Mpy8r7lcnnHDGEe\n",
+       "NJiEwSu7geqDeQ7t+hlZHY4n1Q9SFfjQDKFznuP4xIkTNo6+8Y1v7KjMGpTCspRKJbufnspDNntt\n",
+       "bS2j0+StL5pTj99760uSJMZw0y1AzXgsn8pkaEYEjhVdl/PYE2XJhjnJA/05yjbguNd1huajAwcO\n",
+       "pPI0Av22oplHncM5B7w8fBpgErovKCMVthmQtrB4OnuelUGTfo+yNg9j94jJyclUYmrWIxwrKomS\n",
+       "lyB7ZmbG2m1U5p0M1+TkpL3/uY4qy0TLycbGRoZRq1QqFmjDvlSLDv9Wq9WMM/zevXvxzjvvREYq\n",
+       "IiIiIiIiIuK9wFVjpK7GcyMiIiIiIiIidor3UpBz1wiVtIdtrEZx+p6amjJacTfpQHaayDgPk5OT\n",
+       "FoFCh8FRFYSBbbOmOp3v5PdESAOrM52nmq1OhKG2lJbHU0Vn+QY57oZ1m5qaMiduTV2hGjFEmIRS\n",
+       "ozrUjEvql/cdlFA6dM73dLM87N+/P2Ma8BwZPcdnLYvS997YpukidJ5X7Nmzx0wEOjZCk8ggZ95B\n",
+       "17OsYT1YTq0rzapJklhZRp17w9qcz2WKHS8YRO+zsbGxa9PeTjDqOvGDXE9GxaguAD+I5wBImeZD\n",
+       "M8kwlW29l85/3jccR6rX5T1DP+NYVidxLzIsfA8VCv8/e+/SY9mRXY3t+37kuzLrwSoWu5rsbrTU\n",
+       "bGnQgiYaGEJPDPiDPbM9+wB75oHH9sTQ0L/AQxseffA3MjwSLDVktCFIsNSAukWqKYqkim9WsYqs\n",
+       "ysrXfXuQXJHr7rMiTtybWUy2EGtSWeeeR7xOnIi11967WYlFyL/zeT5t1WQyqcy3rVarEvF7MplU\n",
+       "pB3tdrviUcnif563YbZSY/3b6v9Vseq70Gq1Kml2VN1i9c15nro2JkqvXZ8kfy0oKCgoKCgoKIji\n",
+       "2hgps/MVIXbmLNKEGI3jUeS4nR8eHlbcvFXckhhyVsutViuwMgCL6/D8n//850EkB/HtKowS6otr\n",
+       "1LXdbncpzxiQErKrmDzKlTe20seuCcwGhxfgXZt3y2dAiKliqSjHgpgw0o+Jfr9fCanR7XbDteh/\n",
+       "jvvF56WYKN4p++fmMjDcnnwPH3OLc4pBEDydTiv9+vTpU+k0ocqOiMxvv/125XcVEJd3dCg3JwcH\n",
+       "sDseDAZBsIvfWQSsWDf0Qey9UGymgmIQcpESlvM7oAS+KfCYXTWn3TrwjImZZnRzw3fU7ehz5ko1\n",
+       "1yCThS8z52fD+bGQLmaakVJzDsbGfD7P6jtmfji3pS+DYpoUixaLgaYYrlh5fLlSuAwbpUIP5Vxj\n",
+       "dl535Wi1Litbxwqp8czH/PuqLBg8xnLnmhiubSGFQeiDAY7H46UM8GarZWvGRxiT+unp6ZV4CbFp\n",
+       "CWXmlxkdgEXCw4cPQ8C2dZ6PeqSuvXXrVigDFp11HhtsOgPUyxOjOP257IWFMvPCUi304Kny6NEj\n",
+       "uWjyg5kDSvLz2VPFTJt+Xn311RBEj+GfMZ/Pk+MMk0RdX7K5qs77C8ACTnn/oF9ff/11GVBQfax9\n",
+       "v02n07DQQmwuDu6Xm1kAfcRjgz258M7hA8QLKbQLjw0fiyYGTsKtFiOYJ9ZBqm/UpJo70e7s7IR3\n",
+       "cdU4OLlQC71Y+Vb9QK7zQVGLTr94SZlh/LUcZyh1DNfg/eX3mNPH+GerRY76MPNCSn3M8e7xOOTy\n",
+       "+TZQ5kN13iq/p5Br7ostSlJAnVVAVrP6/o6do87jJM3K3KfKGtu8enB911n8FdNeQUFBQUFBQcGa\n",
+       "uDZGColo/apvNBpVdnC3bt2qiJwXi0XY3WIFeXh4GBiDq4pVo2j5GINjdhGThWPzrIOc8g+Hw9BW\n",
+       "OUyUhxIPM03tBeiNRiO0P2IjsUlJUbtKQI1I2WZVtnFra6uys1ksFoF14lg/PknqbDarMFwoZ6zu\n",
+       "bKpUOxDUSUWpV0AMmtgzWYSKMgOqz3Gs0+lU0guZaZG2Ys/A1v3sZz8zM51yiAEROcf+Qvs0m83K\n",
+       "Lvzo6Ci8j2DWGPjtxo0bgWXj2Gsp9pn7XJ23aq7O2A7dswTMhPI7kNqpYp56+vRpiFuDschm58sI\n",
+       "0VNMZ13096tCLtOUMjkyk5CqEwvM1TzmGQlO84Lzu91uOMZ9qeqB35n9UkJwtDM7nXA8N/6Xy6nG\n",
+       "a8y8nmK2cpHL+CjzVx1UeimVhogdklYd8zifx7V3Poo9r+5Zisn1/ZTT7oWRKigoKCgoKChYE9cq\n",
+       "Ns+Nrgp3c7Nl8R1yD4EFmEwmyaSL6+zUcoXEOee9+eabYZULhmA8HicjwaZwenq61DY54JU3CzH9\n",
+       "74vFIjAbHMnY69fMLiLP8nkAMwdgOVBmJW5MCdwZrVYrjAXFEmFMMKPCuxSvz1FMSKvVkm7KHsxS\n",
+       "8DhQmgy/k+J3IKXD+uqrr7J3pJ5ZYWeCv//7vzczsx/96Ef27rvvmpl+LxQjBXCbIJr5u+++GyIC\n",
+       "4x1UOrutra1KmAR2UeddIH7n/kW5mLWMhW2IIfabaodUWAt1Hx7PnDDbbHm8q/eQWcrUfKKer3bt\n",
+       "LxNKY+jHNjMqimnieykBeux8fz92IuH/m120R7fblWJjzz60Wq0kY8R1833J9eDrUqL5WJ3wDKUF\n",
+       "uyqkxm9dOBo/Bnlso83Z0SuFWN1S9U0xZnVtVTdHsFUmF9e2kMKLB3MFBsx0OpUfZIAHLUK9w5Rw\n",
+       "79690HFojLOzs2B+Sk0yMXG1/7jlLv4UptNpqO/9+/fN7Pwj4s2Rjx8/zurEVDJUs3OBMrCqqHYw\n",
+       "GMiFAD5knBmb+y4FnwBWUfoqWebm5mbluPK849Q6eBanXsAiiF9u5fGFReJsNgv9gPtxHSGgPjs7\n",
+       "C/dOLYp5IaUmJzgqqJQXk8mkYr6FedzDO0OosYQUJb5OABa7t27dSi7W+d4+FYZZVUB/eHhYGUNK\n",
+       "RLqxsREWUFw+/M3PSi1e1wH6OragUR88HOO2R10wng4ODpbGI4Bn5G7aUqJg9RGp88bKNYNcxpMv\n",
+       "ZdrLfV7M/KIWUAB7PaMflKccH1OCdtwHc8d8Pq/Eeot5TAJqUaQWdfyvX9hcFqoP2VPSz0+xMYnr\n",
+       "8a7wnKrm2RRy31k1tvlYrmC8zvlnnTmkmPYKCgoKCgoKCtbEtTFSSDCMFS2v7j2lPxgMwu+cUBAR\n",
+       "j+F2rZKkxlbFnh1RO/tOpxOei9XuzZs3w+46J1km4+HDh8EMxiJdPBvHcilFZu7AcG1vb4dVdkxo\n",
+       "7aFW4Jubm5WYTGYXK3g2t6SE8RyLRcU6qoslpMplpsX1e3t7gQ0BWxeLD+XFqAymdr0onccJxtDp\n",
+       "6Wk4T40jRir0xKp0dWznhDEP0zebwVDmX//618H8yewigPcM49Xs4j3jcY9YYP1+X5o9fb++ePFC\n",
+       "jkswZHi3jo+Pg3Cb74ExhHJtbGyslcXAQ7FKuVCmKe5L9Mfnn38e3lMO1eGZktzns3kJ7Xd4eBju\n",
+       "g/vW3S/lop67U1dsDDMDdeY5P/ZjcZP8e7tYLEL7pkyz0+m0whzF2Arch8uMY5hLZrNZYEVxXxUW\n",
+       "RNWdheNKElDnxr8quC35OcrUueq7hHbh8qVE4TGmLuebp8zqsTHkz1vHfLgKCiNVUFBQUFBQULAm\n",
+       "ro2Rgv4AK0LsnjgSNVaz+/v74TzsPnZ2dkIuOzAiL168CLsDsDXPnj0Lq1PsBieTydI1MaicSB9+\n",
+       "+GGINv2nf/qnZnauo/rzP//z2jqfnZ1VooDziniVwKMe2OE2m82gS0m54jNWWakrDZA61+fO4tAE\n",
+       "jFT0Z5zPfcish9/F8P1xn42NjbCL5HJ6FsBMh2/AMaUBYHZURf1W8L+z5g5jUWmfRqNRZcyA1TXT\n",
+       "u1iUvdvthjrx+/H7v//7Zmb2y1/+MlpeDuMBJop1UxgPzWZz6R02O2dCFBviGUwOIsqR2lPvA/p0\n",
+       "c3PzUu8NwGODI83nilb9DllpKW/evBneUw7jkFP+WD5H9D+PxXWZNX/vy17LGh8OB+BZKm4/Pl8J\n",
+       "89W1Xl+nMJlMpKZJMRy+/RaLRXh/eG5QuQC9A08sbEGKbeP/c31XDarKUGX1keDrxNdKl+aZZEau\n",
+       "Ni/mRKO+CbnjUjkx5QYCXQfXmrSYqURMruPxuNLpSLFithz76NNPPzWzi06cTCZhElQf/JQQuNls\n",
+       "Jj2VGPi4wAMqtRiLIbcTsRh69uxZZWJmYSSLIPGy15keYyY9PA9gbyKesM2WTbGpcpldmIbQfltb\n",
+       "W6Ht1AKEI2njA+ufb2b24x//2MzM3nnnnUqcITYBcvthsckLBR+FvdlshnGpxJ4suMRLuupHfT6f\n",
+       "BzMVTHDD4bDyETw9PZVmUD9h8OSlTJpcPowPtYAEZrNZZUJT9+UyYQx99dVXlXpwGg2+L8bBvXv3\n",
+       "zOy8bVOx0dhL7ao91XIjkdctsnyssC+//NJ+8pOfmJlO1eNjoDFUrDezqpeVOq/uA8n1yTWJ1N0L\n",
+       "91AmGG/uUSYZPs6LKxWfKQez2azihMEeelwW/9zRaCT7xNeNy8dIeSazyN1fe9kPfa7gOtcbj8/H\n",
+       "tzL17sUWUX6c85jFOB4Oh9nfVe/1OplMpIl91fbMdcIwK6a9goKCgoKCgoK1ca2MFP/LYQuwQ+Zd\n",
+       "ghcZxiKHr7o7xcp6c3Nz5WjkdStmv8OIrdDBokC8uLm5GcrF7uMwp6Ctms3mUmJnADt9ro9aXadE\n",
+       "jaPRSO6Qvasp7zoBzmvFv3mmj5kNvq9ns3q9nqSOmYkyW44Zlqoj3xvY3NysMJbz+TxpjgS4vqvS\n",
+       "7/P5PNDjqaTJ4/G4shtXO3klcmaGBc96+vSpvfXWW2Z2bqozWw4VgUjyx8fHUjAOcFnA3nFoBVVf\n",
+       "lBFicjbxg3G8c+eOvffee9H78Phbx+SxagTnXKgIztyWnoniOYLfs1w2QYmI1XkKOSb9y8Yv8vOA\n",
+       "am82f6l5hb8VPjRBbjJiNs9x5HLOxQegjOiPOhOv6o/Uecqsxoixe7lx5NTz2OnH7LwNchwKYt8I\n",
+       "zJVoIxa0K5Msvkntdls6J/lrV7HyqFANypkoJnTn5/I1q4z7wkgVFBQUFBQUFKyJa41srlaLzWYz\n",
+       "uG3j31arFfQcn3322UrPqBO8sfaGg26amd2+fTvoVlJBQmPAThSr3n6/H3QfnCcQO3Oczzn04Jq+\n",
+       "ublpb775ppldrNbff//9yjPb7XbYJcTqrVbrPqxBu92Weh/s4HyeKUaj0ahc+4Mf/KBSXmY60A+s\n",
+       "R0hF+jar6qXU7vTmzZuSaUQbctgCVRf0jdJmYZyMRqOVNRsMsIrof1UOFi+ntDQM9FW/3w9tyG2O\n",
+       "HRyez8/gZ6Wew+MB5cbYrruW74Hz0C+3bt1KhroA1mWUVr0ul8FSu1jOAqDu6xk/7i8+T4mDUwE5\n",
+       "+Ty1y/bXqrky9n7nMDTqucq6EBNk8/P439TzUvAOS5PJZIlRMTtvAx+SIHZffx4z00BsXvA6rHV1\n",
+       "aLlQlppVn8mOYf5+rHcGut1uJXyQYpqGw+FSAG2z5TyXl2GNVR2Vo0LdNXW4toVUjKpk4SnEyZub\n",
+       "m6GT8LFRph6YxsyWB4BaBN2+fdvMLibps7OzIOzGfbrdbjCxYUAsFhepU7C4m8/ncjLEMSwI/+RP\n",
+       "/iS8xH/3d39nZucLQ9QXH6Ber1dJn7G9vW0//OEPl57BE6ny0PLn5kCZJtR9UiaFyWRSufbRo0dB\n",
+       "SMzOA1gUoA34o8OpBnydNjY2wqKazQG+H77//e/LhRTKDaH36emp/NioMeqFveualwD0MUcY9s84\n",
+       "ODgIcb1YhJ+a7DGeub3xLjDdz4tnf4z7I/UxNrt4X3lhiHHJC1e0Fd4FJeRHfKo6rBNDKpbFIIV1\n",
+       "UlKgPzFX8Xks8Ee7YtGuTLxKNKsWIDEzjSqfWjTlmDVy5xJl9ufrvTlPlclflxJur/MB9PdbZVx4\n",
+       "cyV//FNzSKxdPPyieF0Ta2wxkWPGVddOJpNKLDi12VFeu8PhMLz3qW95nZeicpBJyU0YlxXxKxTT\n",
+       "XkFBQUFBQUHBmrhW0x4zKrwTAqsAsxqbDTgSut/RcowhrJBTAjOzCxZoOBwGJorDKWCXyKJjPA8r\n",
+       "6pOTk4rZ5eDgINwbzADXjV3AvSlO7UgPDw8rZs2Tk5PKNTHBcp2w0OfLi5kD0MYpYTT3KzCfzyti\n",
+       "84ODg0pUbe5XFhF6M0+KsmWwgFzFZEnFqqljmlIuwnVQlDnYol6vV2H8WBjL4z61g8bY5bpx//r8\n",
+       "gdPpNIxt/ywG9xHat9VqBeYVjNTx8bHdvXvXzC4YKc5RhnuoiOhHR0fhuGIsUmLdOtSxDnUCVQ8+\n",
+       "hyOMe3Z0f38/zGm8k1bPUHkL1XNxHjvr+LFTx2aoeE5156WgHFqUeY5NNykmR92Px8TLYBhWQWp8\n",
+       "ctwsZd6MhX7wx9YRm6eQ42gQK1ej0ahIBWJzkWfW2TrETJQPsRIrC5KkY95hWUXMDB2rh3JoUN+9\n",
+       "nDyHhZEqKCgoKCgoKFgT16qRMlveNZkti/2gQTo7OwurQg7i5XNx5bpMDgaDigZjNBot5SQDsPJG\n",
+       "GIJOpxOuxY6+3W6HMrNOywvL//qv/3qJZTOL6xK89ok1V971NAepnR7vWBXLwrsK1Fnpznhn4K9t\n",
+       "NpuV8AJ8D/RlDGhLlJ2vBcPBu3z019tvvx0i4IMRY6ZAue9yW3jbPwuoeRe1qssst7O/ZjQaVQTK\n",
+       "z58/D3o9hMFQ/c+Ccbw/29vbQSfGz8K7x+J1ZsDwr9dh8Q4NTAhrBxlgp1jTgL5m0a/aVfoMB+wA\n",
+       "kSswXgd1rIiCj3avWKCnT59WjrGwHOxTp9MJf6tAmww8jxksFQE9hVXHrJlmKVL6Gr/z52PM6KTa\n",
+       "WwXrrMOq7OIq8BqvmNBfaalS7Ili766LdVN14vkd4HAKXG41blPvugLmp1arFbKKfPjhh+H3XD1U\n",
+       "Thsq60KObu5aTXsMXlh5c8vZ2VnoHI6HgckLH5iTk5MlIa7Z+QSOa/FhSUU4j0EJ4vDh7na7lWTE\n",
+       "p6enYdLHZHd6epqk6oHhcBgWAij7ixcv5EJvHeSIDPll4MmcqVwcA5T3D5cf8Gl8+LzT01PplQYh\n",
+       "M65hkxP+ZpMTx99KxQdTC/hUNNwYFa8+CjmILaTZVGN2PtZyXmiV4DdGTWP84hm5XnbsmAHTrPI+\n",
+       "w7lmFw4X7JWpYgGlYpeNx2NpcrxKk4dHzrtiVv0QxD6qHmdnZxWzNUdmRp/zO6VMScrBRJnO/HUx\n",
+       "1HlM5ZrW1XW+PdiEzs/17acWUny/Oi/FqwRHRa9b+PhFf0xu4hdmvAi7TD2UeSvWr5ib67wJU+lb\n",
+       "uN/UOMLf+G5PJpPwTebnKUE5solwfbwpm+fyVTcJ647rYtorKCgoKCgoKFgT3xlGipESwvEKGDs4\n",
+       "/Ntutytiv1gMGrWiRqTsN954w8zOI2arWE0ezLao2DfYjff7fXv33XfNTJvGeEeCe+K8OjYCDMz+\n",
+       "/n4ogxLxxtgnlbMNz+Q2AsMA84xi93j3i2u3t7cDM8f388yG2UVMKZjims2mdA0HO+Hd6c3MXn31\n",
+       "VTMze/jwYaXter3eUj49My0sZ4aLQxNgp6SElrkmIG4DlUMPwJjY2dmpCPPVeWonXMfAQiDd6/WC\n",
+       "2ZB3aGpXqZi63FhrzOSaLUeOZnOeNw9Pp9OsZK8vE8r8rnb3fIzLz0mZzc7bHHMFx+RKMcR8b7wX\n",
+       "KtaaYgH4finGbB3niRSD5M/1z/NotVqyzACzx4o9yR0T6zJXahzG4mHlsB18LZ+vWPI6qPv4svB5\n",
+       "/FvKxMbXKgbWs108d7CFCN8nzDWxsntWPtZHXh7E7QYoluoyISA8CiNVUFBQUFBQULAmvpOM1LpY\n",
+       "Jc8eGAtonzhyOFbCsSjquBbggIeKAcNq/MmTJ8ldOzMIOavgRqMRRKb7+/tmdi46BXOhcncxoOvi\n",
+       "MoExYXaH9U5oY1zry2N2znBAZMzu9EpXpQTK2LGAkdrd3a2ETjBLu8ryfTnXGdeHr1W7GMUqqABw\n",
+       "3FZ1/eZDDphVo8UzE8Zs2yuvvGJmyznxfFlUvikuH4T3CO7p6wlwsDy0Jcp5dHRUYUBarVYyArna\n",
+       "wSuWlfPw4RkYa4eHh5VxPBwOo5HvPVLi3Tphb4q1YfYEv3Mfol8Hg0FoS3bfxr1VqAmA24pZVNR9\n",
+       "XWcHRiyMh7+3cpDg85R4nP+fcnZhYC5i1kaxBVcd+ToXKe1Mnc5JsUFKA5f7vFXO4WfGwPov/55y\n",
+       "P7BmEXMk5pjPPvtsSbcEeL2xsiT5v1N1UG3JZfX3Sr0r646H3+mFFD64MPesEpUWZiZ06rNnz+wv\n",
+       "//Ivl44p3Lx50/7dv/t3ZnYRfXk2m8mPG/Dpp5/W3pexymSIRVCdCRIDnhdXMAvwQiplZtra2qp4\n",
+       "CXHqFP+R4PPYIwzY3d0NbQO88cYbFXH0YDCoJGdutVrhhVBmMr6vvx+3Lybrs7Oz0EYo53w+ryyg\n",
+       "1IeAoZIcrwrV9s+fP7cHDx6Y2cVCSsXcYqj3Qi2kMCa47Dzxoe737983M7P33nuvYi7c3t6uLKSU\n",
+       "CTUXvGDlyOt+cj05OQmi1TqoflcmWTa/qvhGHiyMBnhRxLHXkCA6ZaY1W45HpZ7H90VZzfJjmilP\n",
+       "3ZhI25vsFDg9Ct6ZWFwktZBSH1Kc5xdUfJ5KC6WwTqy32H1QFv+R5jmJ66iE9EDKu48F7bkLxnUW\n",
+       "AiomV51J0SeWN7sYgyAgdnZ27A//8A/N7OL79PTp0zDfIMacmVUcgtRzX3311eAwBtN4nXlOLdbr\n",
+       "FrQeOXNXMe0VFBQUFBQUFKyJ32lGKmVKSKHRaFQYDrM8xujNN98M5/3VX/1VuA7MC1a4JycnYdWe\n",
+       "y0S9TKhVtXKZVuYsH1vI7ML0Y3bBdikTC8fG8VCxo95///0gyAVu375dYa7YdMcMDu4qJhMBAAAg\n",
+       "AElEQVT5zjvvhOf6+GK8m+QdFe7J4wpmJdxjY2OjIg42s0qEaYVWqyXZJs73Z3a+K1Ju/l6c2Wq1\n",
+       "ss1BAMq+tbUV6oRx0O/3KywAQ/UXzt/b26uYX3m3BwZrf38/nKfKp5w1ONaXGkd1IUVWjVuzikTA\n",
+       "bHkXWxf3CfFvWCSO67nuaC/lnMDPVbHAVi0zQzHX/pqYWc+b9pilYnmAn4tiIm0F9QzFAnms4xKf\n",
+       "WxYgxuj58qnwMLHzVmWk6kzUfB6banEsNZ/wfXPL8stf/jL6u5LO+PALZhffmufPn1eSzceSDqcE\n",
+       "91y+VBul3j2PwkgVFBQUFBQUFKyJ32lGSgG7e46a7MGrUOwCe72eFDSDpbh3756Zmf3DP/yDDIyJ\n",
+       "XTHvttbJTP+ywGwR2oj1K2plj90p/uV6Y8cc28VgR4D2Yx0W9B+s08GuYzQahV0HWJ633367cv9W\n",
+       "qyUj2SNwJ8rHZePoz16D0m63Kwwdh4AA+P98bx/cEteb6fxgDLAJvItKsUn8fL/zqgvF8N5775mZ\n",
+       "2SuvvFJpP6WlYhYs5UgRqxfGHfrjzp07lfeMtVRgQobDYRgzzJIopozHdkp/w/2BcqvAjrxTX1Vv\n",
+       "pPqN83DiPBaJ4xjad3d3N7DYeFd5V8zsE8q8SpYDLrfZRX0572edeN33Q6PRWMpiYLbMSKVEv8wW\n",
+       "1YXayGEaFOpYBRWdvC7PphLSKzG8Oo9/V2XA//G3CgcRK5dHnUs//uY2quv/VH/hGFtimKlV1/g8\n",
+       "nSofrr+PqifKlnJoqGOalPa2Dv/mFlJKtAjwAEXH4QPe6/Uq8Yhms1n4HdfGooun4rnUwaeDiZkq\n",
+       "VqXvY/CRxXnQcvoOTzVzGSG039zcTH5gldlDCWhRpuPj48qCYzQaVQb/97//ffvNb35TuY/yJgS4\n",
+       "3fwz1EszGAwqomq+B5vfFFXvP9aTyUR6KUI8jvQHX375pTR1YGypyYSP+Y+cqhsnFAbFzu8MFjvs\n",
+       "PYdFFYvcMV6+/vrryuR1eHgYhOB4b5QnqUrZwyYbhh9rPllq6oPB9VMTceqjn5rAUx5s/Hu73Q7v\n",
+       "nPpQQZj761//OhzjDYiK9Kw+GD7mmkooqz6uqXb08GOqLtJ3ykTF5r6UqUu16VWZ6fj9Td2TF1ne\n",
+       "I40Xk0pEDijzZsxElbqGn51rwlLjjuMwKe9fIDebhWpLfoafKxeLRUVqw04EyoOQr/UyCH6GQt0C\n",
+       "aS3B/spXFBQUFBQUFBQUmNnvCCO1vb0ddmOgC2OiOM8IdTqdEPUbu93T09PgRpnKw2Z2wVip8AJY\n",
+       "oe/v79dGaY1d2+12Q3RvPOvRo0eS+UL5YZYyu6gvu/EDygzC9wFisTtUHq/ULhYYDofS9d7vdnin\n",
+       "DBPr8+fPl9zFAew6wMopNgrXeygXdsXqecr5+Pi4wgLOZjPJtimRtBqjKXMvmLputysZBNyPTZSA\n",
+       "ElJzvkGP0WgUGDDg5OQkuOdD3H9wcBDGFP7tdruVsAuj0UiOc5QV4/nRo0ehnTnZtN/tct3YNOb7\n",
+       "rd1ur2VCV2J+gHfvPhxInbs1H1PZAvh3/7zf/va3lbLgmmazWTE9q919r9erMNo8ZlNmXz6mGHYV\n",
+       "G4l/U6Jvz4Qwc8HsDtfTg++bem6KTaljmoDYOX4un81mYeyo8AE8R6SE9Fw3P17UmPL/XzXW0qpm\n",
+       "Uu6v1HvG5cCcuVjkRUpXuSNVrLIYY5qKgK7GTkqQX8fexVAYqYKCgoKCgoKCNfE7wUj1+/3AwoC1\n",
+       "YRdhDqDoV+gsZFXanBSazWYycB6eG2O1lCsxmDWIeReLRdj9gYkZDAaSkfJB0HhlrVgefi7vSNUu\n",
+       "AbsCtKliRcyq9uXxeCx3u2rn6Hf/s9ks1B06ncFgUNlRDwaDEBKBmT//3N3dXdkXGBOsMVG7DJ/b\n",
+       "bTabJYPpMXLchpvN5lLgRNzPB/3s9/uVIJnj8VgyUYASSONfxeSMRqPwPjBz5cfW7u5ueAdwP6UD\n",
+       "jAm+0a8qij73S6r9WAyrBOHrINfNW/UXkBK3qnHCbKXSXymWVAU+VGJZL2L35atzhcd5eF5d7r7U\n",
+       "ferOVwyxYkWU/suzT3zey9RSqesx77BWyj9Pab5i8GOM5walkVJCdTU+VXlic7TSCa7CzJjFvztm\n",
+       "y98VLp//ruQ6G8Tg20ONNYa67ypj5ju9kELljo+PKyagg4ODIBiHMPbs7KzSIavEmoL49oc//KGZ\n",
+       "nccOeuutt8zsYnCw0BaINTgWf1gUzWaz8LGEuK7RuIiNwyLwlChQfUhxD6Z72bTHiyI/EXe73crH\n",
+       "dz6fL5kzUBZ/b7UoOz09rZhOYl4n/kPFdXvttdfMzOyjjz4K3ngACw+Bg4ODihkr5lGlYmh5cTiX\n",
+       "zydI9lBtnlr4YEPw4sWLyj35I8a/YSHIC2XfD3w+C8L9R7rVaoUxyOmSMFZx31ikdi/qn06noY3Q\n",
+       "l5yMmJ+L8nP50FapJNgcQRxjcjabSYE/93WK8gdik7Xqw9Sky/3hP+zcNw8ePDAznVRbtdE6UMme\n",
+       "UQaex7hNvYA6Nxq4ijCe+7FT1/DiVC1A6kTpVyVCT8HPmbw44Q2YlwKocaxiS/nFTu4CYNW+8/XJ\n",
+       "QU6MpU6nE37H/D0ajZLOUrzI9s4VdRuCmGNJDi47Xoppr6CgoKCgoKBgTVw7IwXWhnOyYQWK1ex4\n",
+       "PA47VbBPg8FgiYUxW46/oxgY7K7n87mkrvEMsBofffRRZfWcu0Pknbxa7eJZ29vbYcfCkaaxgsd9\n",
+       "vv76a/lsmMbAcLRarWRsn06nU2FjZrNZpYwqz5iKq6Ncq82qkb4nk4kM38AJaT3YjIf2Urst3EOZ\n",
+       "9TiaOO+e1H3QlooVUXVjtlNFf0+ZhXAetx/aZzKZLJmzAIxLDpOgWEOAd8qeqeEwCQhlofD111/L\n",
+       "fsNz1ZjEs9rtdvgdJr2NjY1K+3pWyUyLXPf29gJjxjv6uthtyoRRFyMI9/bxwWKOI3gu95dPAM5l\n",
+       "ffjwoZktm7KZZcN5uAe3fS7Tw2Xx4yQmslexz1S4gjqxeuy3XHZJsTbqeo7+zc/KNQGlGB2+X6q+\n",
+       "aFsVsiOWx06ZAFNmwboyMK4ipyDDf0+m02lybgN4jLETUIq55Pv4hNyrIMUGAzEWUJ1Xh8JIFRQU\n",
+       "FBQUFBSsiWtlpFh4Bkai1+tVtBbs+g2cnp7W5tgCFCOhtBTYKf/jP/6jmV1uZb+1tRV24eo+OHZ6\n",
+       "ehqYEDArw+EwsAA41mg0QvlQj16vF9qIhcq4Rj13a2tL5hlk7YTZcpBJFjymGCnenfi+2draqujV\n",
+       "er1eUsOGHcne3l6yLRH00efji0HtslkMr8S3vFNRZVYBOXG/27dvm9ky8wN2iRkJ1uEpRgp9o8IZ\n",
+       "4Hx2rsC/XFfWNngWha/Fb0dHR0lWJCUYZkYKUGzffD6vnMfXpoTlHKzVLB3WgJ+XgpobVD0Vg8Tv\n",
+       "jNITesZsPB5XtCDcD1wmxban6sd5y9TOOxVgMSc8hD/mHTNiYmc/TupYI3UtwAJ0Pj/X1V397c+r\n",
+       "Kx9rpVKi7pQYPgdXoftKtWUMSgdVl1PSQ+WWBGJhLYBVv8NqjKl71913Fa3ZtS2kms2m9Xq9SpqK\n",
+       "09PTZITcq4L35FosFtkRw1NCO5hdVN0YTN37Dx5PSmwawzXwYHv+/HnSG0t5TyivDrUYYih6VJkP\n",
+       "UgLqFy9eVIS7SvCtoJwIGMosiDJPp1Mp9lTicbQXv/R8H9zXv5ztdjv8riaW1Edd1WsymQTTLlPi\n",
+       "aD/Ul6OOA3UUO+4xnU4r9RiNRpV2iaVlUR/adSf62WwW3kPe9HjTWGzS5jHrY/twjKK6CdG/17km\n",
+       "QOWAosra7XYrTgF1805dvDNfN+VcoUwnLAQG6kTuPIekzEwpMx6b3bgNUuYetZnha1GPXJNn7gIu\n",
+       "FzyvKC8xBRXDSS3guF9zvoOxBVJOzCi1wIuZdlMLKN4Y4J7Km1TF/UqZ4vga9S0C1LXcLura1P1y\n",
+       "UEx7BQUFBQUFBQVr4toYKezEFLvjV5TdbndJjG6WTykOh8OKmYR3W0okWnc/mM4QioHj9OA+z549\n",
+       "S+40wXpsbm4GAT2XAfXl+E5gAbDi7/V64T54VizXn0okiXZRyXd5J6LYJ6azwZ4wM+TDVRwdHVV2\n",
+       "QcqE0el0lkwmqToBqaTJyq2dr+EdiYoOj2tYcI3o3xDDK8cGs4udNIe68G0wmUySZhRmx9C+aNNG\n",
+       "o1Fpm8lkIqOsA3XjHGXg91IxFygXx4XyDEi32802v/txsFgsKu0xGo1kOALeQXqTA7OFDMXkpGLZ\n",
+       "KCjhtsqHB3D+SoCZCG+mN7sw7Y/H41A+hH158uRJpYwxd3lfXy4zv2/KZJPj6h4z8cZYqdgxFWqF\n",
+       "6+AZCzaXpvor14y3DurM3P5bFRO2qzhS/FvKrKmYsFTMKDUXqWtjcdMAHhucqQDwmQH4+pRjSMyJ\n",
+       "KSWTARSLphwQlFRFHctBYaQKCgoKCgoKCtbEtTFS/X5/yR045ZbLub2wE55Op3LXh92aj9RtpsWj\n",
+       "uUwUcHJyEq5Fjjyzqpty3U4cO3ne0TO8PqzdbodngMHqdruBfUhl6479zs9CnTzDhefgmNphQBsF\n",
+       "PQ/vCNAOzWazwp6wfkllB2emxusbOOBhXRRmXJtifFS0cw4lwc/wfdbpdEJ7gZEwu2gXMEkxga+K\n",
+       "Xo1nKH2SdwKIQeVGU+NAaRrYpdtfM5vNkuOJGRaEN+E8gl5ErrCxsVFh1E5PT5fCfKAsPBa91k+9\n",
+       "3zGBstpxe7aw1+uFvlbl5zkJ17KGB/2Z0qpwm2Pczefz8B5ytoXcgIw5u2ylfVFMSAyqTn4e47EY\n",
+       "Y8/4fF92dnzx166qqX0ZzFSK+eF//XzGY5LZoJTmKlek78uz7rVKqM56Qu+sw3kw8S6YVa0Bqt04\n",
+       "JMqqfVMX6kC9HynWtW6eNbvGhdTOzo4dHx8nBdkAT6iYxHZ2dsLEgt9brdZSdHCz+gXNqmg2myHi\n",
+       "Np71+PHjyuS6TiJVhlpg+U4+OzurJC3m+FqKlmUoQbMyb6Fu6qPa7XalF5k3zymwaFWl02HvTbXQ\n",
+       "ZrG82TK9rBZN6gXCh284HFbG4mw2W5oAzM5TpqC+7BUFKFMXMBwOK/2q4uCYVYXlDNzj9u3bocy8\n",
+       "2MX7gEXiZDJJmj+U2aPOI009F0CfjsfjsLFhB4gf//jHZmb2zjvvVMqCZ7HQnydwbIxQt1jKoxSU\n",
+       "J5UysSlvQk6wzB85ZSpUqZxSZmg/0fMxNoOzeVN9ZFJxn9RCnkW/qs1z5rJms1k5T4212AdaLYL8\n",
+       "MbXYUAuumHlL4aoE57iXX2Dw5smXiZ9fJ/5OPZPBpmxV91wRvFrkpNqVI9uruVxFelfjiufUnG9I\n",
+       "rA6+nnw/JeBXzkSrPL+Y9goKCgoKCgoK1sS1MVKcEJbRaDSCe7/ajWO1qFiQ6XS6FA37KgCzB0Sf\n",
+       "N27cCGVAQlY2H8ZMYh5Yeff7/XC/uojaKbDbuDK/KVdTNkNgh8/snxdVb21tBSaCV/dgSFgYraDo\n",
+       "U880qThIKhq7Ss7L9wNUPB/uD5iKuO3Z/IK6oP2YUWLzlxIZ+/68devWUkRrs+W8hNx+nmlUrMve\n",
+       "3l6ITYU6qVAMzDTwOPDJkhkY70qwzjS+2uEq93zG97//fTPTjBSzCn5ny+xgnQOCmltUH3H5fWLl\n",
+       "mCu536GyCaPOycHfj0MnpHbqXGY2l6fMFUCdOTU3hhIzwH48sgmQy6DMborNULGnUuwTn5NryrtK\n",
+       "9kmBGXZ25MHfbKb37JkyAbL0Bf/PKUMqk4LqoxSU2a2O3eP5lcOtmMVDwfhvZV3oEYXUu6fqYVZl\n",
+       "gWMhRepQGKmCgoKCgoKCgjVxbYzU4eGhbWxsBME2VutHR0dZzEyuW3MM2J0qd2Cg0WiEXTCYi0aj\n",
+       "EdzZwc7EVtmplSxE4sfHx0GQe//+fTPTO3VGXR0Vi8DaIaWD8qv1wWBQYfdOTk4qq/7pdBraklkU\n",
+       "9A3vNNQx1B19PhqNKvWLBWxTeg7fFyxeV6JBfkaKQUQ5nz17VtGWLRaLpbARMbAGi8cu7nPnzh0z\n",
+       "M/viiy8q4nU11p88eVJhMXhMqGCZnCcQ9WUdoWpzP87q3i1mZxRz9Itf/MLMzP7gD/7AzMx+85vf\n",
+       "VM7pdruV92c8Hof5ggXXCnU6FLXr9P0ec/P2z4gJ2nN38oByQEg5SjBSfcJZCrhdlMbP/6bc1lV9\n",
+       "WSfGZVIsi2epVD66Otd+f34ML5uF8s9SWjTfd61Wa2UtbavVygoUyfMiwzth1LEsueXi++DeqPt4\n",
+       "PM7SKLdarcq7t04IAi5Tzjsf0455Z6ccDea1LaRGo5GNRqMw0XKcKCwEMGlubGyEidOnSWFsbm6G\n",
+       "a9Axjx8/Dg2S+jhA/M735g6uixgdq2MM/FLAC++VV14xM7M//uM/tn/5l38xMwtJWs0uPubodP4w\n",
+       "wyzRbDZD+ylPCdTLbLkN2Yxmdi6qxkvAJg+fTFXFxmGBMoNfMLPzfoX5lhPxoq1TCxtVDyV47ff7\n",
+       "S6YaD743Fi3qeSrGF8quKHQzq3isPX36tFInfumxoGJRemrCe/78eSWBsvJE4aTUPA7Q13gHeTJh\n",
+       "8xGQKx5mU6p6B1B3jPsHDx4EkyfAnppsfsUmJmby9AtLjv6uki6zuU8lYlVek2yWNztve7W5UaZE\n",
+       "fz/lPGFW7RuFuo+NMhWizGbaEcd7O6qo7b6seIZf/K0iqlbmOyUYXnVR/21Cvas8J3E7+jZaLBZy\n",
+       "sasi1tdBLVRznE3UoqNuUcJlTS04riKCeCy2FFDnpejrzrIPhbrvO6OY9goKCgoKCgoK1sS1Ji02\n",
+       "q8ZT6nQ6tr+/b2YXMaFOT0+DAL0u2jV2f9j57+/vV1zij4+Pw3m8a/c7ina7XTEHnZycJBPtArGd\n",
+       "HKASB7/11ltmdu7WztGrcb4X3/Num0XQinFSMZkYaAc87/PPP6+Ib/l31K3T6YQy8A4Y7Z9KxMrl\n",
+       "Y1G3Mi8BLNz0OyC1I5xMJrI9AG4LlWsP4LJD4K12fCqvHfebL+NisQjjHQzN/fv3Q5tz+Xx/LBaL\n",
+       "SjgFbmcwYjzW2ATJzCvup6Iwe3f/mPkAQN9PJpPwN7C5uRneHySZvn37dqUN2CwEppiZutjuXJkD\n",
+       "FXwU5tjOW5lg0G48DyjRqh/v3DfsYOCTqquYazEo1snPd41GI5wHlkw56yioOUzNbdxfHOvJm+fM\n",
+       "quzUfD6viKE55InK8Zfjkv9tQ4UKaLfbFTd65fwRw2XqlGs+VOJ2FR6D+yF1T8UGs7xCsWNKpuHH\n",
+       "Tiy2lAqD47/bsXAfntnm8q2SmLkwUgUFBQUFBQUFa+JaGSkVeXsymYRdKf41u9BJYAXZ6/Uq9vyj\n",
+       "o6Ok0JqDjGF3j5WtCrVwdnYWrsV9T05OJBOCnTfnw1s3KOijR48CI3X79u3wDDALSujNu1+1ggaj\n",
+       "x5HZgXa7vZS7CoBu6cMPPzSzc4YD7cTsCHYEKXav3+9XdugqOzjvUtQuQoU8AGJRaTmgqIcS+MbE\n",
+       "tLFjHHWaGRCM31SQUFWXjz/+OPzNjBMYLhybTqcVwTDrknhnpVz2lfhf1TGHpWLwe6aCyHo8evQo\n",
+       "sGcMH/7g5OQklNUH4wWUhsbvYnu9XqUvOJI6n885+8x0SBF+bkpzocIunJ2dVcZl7B7+Heh0Oknt\n",
+       "Jr+XOKbaX2ly1HOVzk6Vm9kWFcLAa3jUM9V7ycFBmQFkFit2v28LfhywO30qMKYPdQAoIX4KSkvF\n",
+       "LD+PbdVeqfx2zCqpnJb4FuF7l8uqxnLjpebcVLBRHicAtws/y89PrPVbhQW8toXUcDi0/f39MLlg\n",
+       "kRATrHlPvmazGSZTjuCs4BOxDofD0NCxFC0AOow/Xv63drsdyoKPXb/fz0pa3O12bW9vb+k3TsSJ\n",
+       "wcjJbTlBcm5U5y+//NLMlhdDQMxUg2sAfjF4MGJRxX3kJwVeNPEAxQKZP4hq4eHp21gUZjVBqUi7\n",
+       "AMevQh+jDxloZ6aruc04uWzsGYPBIEwyWNi+ePEi1F1FmmeoTQLGHcrHi3bc58aNGxVTspqAYp6w\n",
+       "7FgAeC8gs6pZrdlsVjYR0+nUdnd3zezCvNTpdOR7iL7m89G+d+/eNbNqm6A9UpM4L2i4H3w9uK+5\n",
+       "vVKUP8aO8oiNxT5jj1Cc5yd9vpYzF6Q+GGrhE1ssx8BerznJi7nMfC0/yy986tLQ1Amk+T7499tY\n",
+       "TKXM22yOUsJyJa73c6YXSKv3S22C/AJVxelScRxzFw5q3Me+RXgum9KV2W1VE6aKD8Xewqm+UWPH\n",
+       "l2dVFNNeQUFBQUFBQcGauDZGyu9A1WoWq9idnZ1K3Jj5fB52fWr3yeJKL1DlyLxq54Jd5WAwCPcB\n",
+       "28K7CaYAPZ0+n8+TeQRx3mAwCLtirI4///zzsDtNCdY50rg/bqbFsF78CyiRIdoNrBGzC7xz8Kaz\n",
+       "2WxWEfNzH7E7O8rFkb5Tu4QU1a12M7FQDB6xaNeePbl165Z99NFHS+ft7OwssQkATKOff/65meld\n",
+       "o9myE4TZsiCb2TmVQ8u/R4PBQLK3Htwu7Dig4K9nsbESfaLPOSZYKn8dx/9iUzXGG9qRBdKxnasK\n",
+       "t+DP5fKr90eJYfm9SLFAKTP+cDgM/Yrzu91uCHHCbIHfUTNr7MXLsboqti3lIh5jkFLxsvgeypFC\n",
+       "nacE43Vxjfy1gJoPlCj920BdGAf/LjC4zAA7hCwWi8r7qQT+ufKBVqtVYa44Qr8yg9WFQvHncYJv\n",
+       "duqoG7cp5AjAlfQgxj75erCJ1fdbCoWRKigoKCgoKChYE9cqNq/TJ2F1XRfFGGBRLQttoV/h4IVq\n",
+       "dwXdEv5tNpthRa3E0Cyq9KtYFd6AAXfvO3fuhHpCk/TkyZPKCr3VagV2BEFH79+/H9oGuqc7d+6E\n",
+       "Z7/33nvJMgDdbreywue2VHXnnT/KD7aLd9Qqhx8YE9bGcKBAr+Ng7Q63S45mgwXPKq8ew0doVznF\n",
+       "eKeJ83kXyG2EXIVgpDgsBPprY2MjlIcF1D7MBK5nKE3T8fHxUnBT3M9rKXg3y/2m2tSzrRwdn8/z\n",
+       "u7tY/jXf/tymKhwBjx28w2AF9/b2loLW+h06u9b7gJtmFyxcTDOSE0hQ7aY7nU5lzKqo/ePxONvN\n",
+       "2ge0rQMzUeoZXpcSYwWg4cS8wnVj+B08a3yUAJ2vyxVSqzJ7/U9u8ErFgq2q0fHP9VGzFTOlHCBU\n",
+       "GXw4BT8+9/b2Qt3V9yb1jJiTjWKacqPse8aM2Vnl+KCekRqfijGNISWaBxSbqcZijmbq2hZSfgJE\n",
+       "o3W73YpZiL2TMOkvFouKEHQwGIQPMhYYL168CB8WfEgPDw9D4+CY2cVAwMd9PB5niSCVFxBH1FbA\n",
+       "Qur27dv29ttvm9lFEmSzi4GFj/Ht27dDe+GlQZodswux89nZWZjklGj6yZMnFU+/s7Oz0L5s4lML\n",
+       "j1RiYpWskidzb2rggY37KTH3dDoNf/Nix0+EPIGyByH6GmMntpDy97tx40ZYjOBa1UcqdY7/218D\n",
+       "8EJKxdoBOIYSwMJt9lz1/X52dpZMDYG2Ojk5SabRqaO4vbdLt9utePKw4B4YjUaVKPDsRccmPbyj\n",
+       "uEesL/mD5idQZcaNTdAs7DZbTreizsM8oCLIpxK1+//7cVznWJJajClzSizhsRrH/iPNkfKBWNR7\n",
+       "tUhLmb9yBNyx4+q+CjlmxDrkmiOV156aF2ILTL7WX6ekFmpTr8yGjLrFi/8tBv8N3Nvbq2QL4bh+\n",
+       "LNfIMffVLXI57ZtPBM5IediWOFIFBQUFBQUFBd8yro2R2tzctF6vF1y0seNvt9uBreF8Uz42xWKx\n",
+       "CAxHKo5Up9MJrAK784PB4fg1HGoAz0gB13Y6ncAWYQcZE50iWjvq9umnn4Yyg+3hcApYFX/44Yfh\n",
+       "PI4nhGOIO6UoWzPtQs4rbuXiXhfXBPdQK3awTsx+eepXmZJiiZRTJifFnKFMm5uboSxKaM85Hjm6\n",
+       "utnyeEHfMAvIz1NhHBC5m9kK31Y8TvD8/f39pRhqXB+z5RheYGsePHgQng9TIsOLUVlwnTIBdLvd\n",
+       "Sr5JxY7x9Th/a2ursiPk+mLMHh0dZeegY8bKrMpIpUyO3A9KQO1Zz+l0WhGvc7vx/VJOIXUxppRJ\n",
+       "kZ0vzJbzgqlwH/wuY15S+RfVM1WIhdRuPMaq1ZmpAM+OqvAHKkRATJDtn6VMgKpMV8FM8f2azWZo\n",
+       "N2ZnPfPH5auLE5UKj6CsAvN5NR9hDL6NVMT6xWIR3geO25gy46LubHFi+DE1n89lPMTckAiYDzl+\n",
+       "nnLIUPdT8QmV41UdCiNVUFBQUFBQULAmro2Revr06dIqlv/Fbgw7PkTW9vCRmRViO0XsmrHD7fV6\n",
+       "Szot3D8VQA/MWbPZDLtrMGwx7QY0T9/73vfM7DyKNXadzJJhNwl9AjMAqk64B2tulIs9ymum2Sve\n",
+       "5XjXcK9rA7w+hHc2zEigLihXp9OpsGMciZwBBoL1Muxm748BJycnS3nNPFRwTdxjPB6HfuUgpriG\n",
+       "6+1zqJldjAVm+1A3pSEDsDtjjEajJS2T2XnYBTyD2yVHw8N6GMU8cvgQ3x+z2SxLK9dutyU7BvC1\n",
+       "nuFiBpOZM896eSgdkRLu+x01sx1cLp/fkN8Z3tmirEqXwno9FWzW91e32w3vs9qp4x5cV37PvK5L\n",
+       "uXQrp5m6CNd18EwTu9grMa/SSPH//Xnq/BhbkcNmXDY0gi+f0oHxcWamUkJ3r/nCNSlrQF19U1Hf\n",
+       "/XjxSGl9+fmexYxZK/x5nIOSx3uKoePfUuz4qsdWdWYK59ae8ZKAj5SPOn10dBQalTsW52HxcnR0\n",
+       "FBYMKnZMCkwlY8Jqt9vhYw3zW7vdDpMqR2F+5ZVXlsp8eHhYmZhjiw6kfGH6HfVQpiIMnNPT0+SL\n",
+       "j84+Pj6WHc8xiNTg9gsQM7PXX3/dzMw++OADM9NU7Ww2CyZWtBGL79l7z5vO+GOOMqlYRhwLzD/b\n",
+       "TMcHY9MIyscfdTYrmZ2bobBY4jGJ+/EH1XuGsicKR6n3wmieWFRsLuDs7Kwivmj2V+EAACAASURB\n",
+       "VOZ6AgcHB2EhVZciKOWJwgtg/3FQmxRlpsX1ZpZ0duDzeDGBscHehXhXPvnkk0odYqY/H+NNmTrU\n",
+       "woIXjKlJmql/ZWZWYE9EZRb0Hxa+Fy/QeRGMcnoxv5lVzMKLRTXNj1n146rMSBxvjM/z40nFh1Im\n",
+       "ttjmLmexxP2mTErqud82lAehL19sMQlwmynPcCC2qOeUOmZxc5+Pws/nqIjpKacJ/q7w2PHv+mAw\n",
+       "CPfkxPa4ht8Vv8DnMcZm+NxUNFxW/BtzkjC7eL/Vt9GjmPYKCgoKCgoKCtZE7VKr0Wj8r2b2n5nZ\n",
+       "48Vi8dNvjt0ws//DzL5nZg/N7L9cLBbPvvntfzSz/8bMZmb23y8Wi/9b3ffg4MDa7bZ98cUXZnax\n",
+       "+uv1ekthCszO82q99tpr5wX+ZnX429/+VuYeAyOQSpyoVqHPnz8Pu0/shPf398P1YDV2dnYCW/Dw\n",
+       "4UMzW3aJrwPYLlz75ZdfBraD3Tdh8sEKfWtrK5hTcEzlZONdMpuIWECvqFV1L4SQYLoVfYMyTKfT\n",
+       "ym6czWkq/x6HI+B+N9Nmlel0WjHlMLujKFj00enpqd27d8/MLISZ4PYA+LlsluSwDDH0er3wbG7H\n",
+       "lIs4t61nVL/66qtK/kXFJH755ZfZiVpTCYq5jjhPRfeugw8zETO/4V1SsW/4GvRDyozo4XeTZlWW\n",
+       "gENiABxiRZnBWWiPuSUVY4qZK95Rp+KDpVgUdX4saXFdKAQzHRaCx2uM7VD/9+crUbWKh5QSoMcE\n",
+       "46k4Umwa8wzXZc14uVAMXI7wncFmWuVYAHS73fCu4dul2CduI2a4cA3/5p1SWq3WUp48fz+WvigT\n",
+       "Me7jM0T4uvvk6yxoT7Ufh8HhdlOJ1v3fuWMix7SXw0j9b2b2n7pj/4OZ/cVisfiRmf3im/9bo9H4\n",
+       "fTP7r8zs97+55n9pNBqF9SooKCgoKCj4N4laRmqxWPy/jUbjgTv8n5vZf/LN3/+7mf0/dr6Y+i/M\n",
+       "7D8sFouJmT1sNBrvmdkfm9nf+vuOx+OlwJhY4Q6Hw7CKxe70s88+C7tAFueCscCOlfO08e41d1eC\n",
+       "50KT8dlnnwVmAEE9Nzc3w71ZP5HzjFu3boXyoT4cxgHsjWLaTk9Pl4TxOWCmQwlJ0W4xfQfvQAC/\n",
+       "44+5Pe/s7JjZclR6r+dg9kHtvNCm4/G4sgNi3ZTaMWCctNvtCpv0gx/8IER9T0WLbrfbcvcMsA4r\n",
+       "pfvh37w2i+/Luijo0aDH+/zzzyuZ1Fk/9eqrr5qZ2fvvvy8ZIZQL/aLa/vT0NDCOHFLEg7U+PO5x\n",
+       "DXSMHD6CoXIGAsymPHr0yMwuWObxeBzKF8sfh3syu5TSSLHeTeXQ89fyO6+ip/M9/LX8f8xdPGcp\n",
+       "V23WpfjycRgHRioXYIrdiYmDc+EZiZhYOCVAV+VjxkkFFvXn898pQfs6SM3zrL1VjFRKy6fuydaD\n",
+       "drtd+X08Hof3mJ/h9bp8DH3D+iVmnHzfcfBVfr7X1XEfcn95J6FYtoNUtH6+rwproFhoAGXh8CEq\n",
+       "3AefvwoLD6wrNr+9WCweffP3IzO7/c3fd2150fSJmd2TD263l8xkKPyLFy8qAtzd3d3g7cYxYTDI\n",
+       "MOGaVcW7N2/eDJM+04o5mM/nYXHzxhtvmNn5x/Nf//VfzWzZNJHzkh4cHNjf/u158yDG0DpYVVxv\n",
+       "tvziqphN/jwW36LNX3vttZCaA1D0/GAwkGl90Df8oefFEqBMLH5ws9iZP2JYNOGDd//+ffvVr361\n",
+       "dO17770XrucX0y9AWq1WxTNwY2OjshBkWluB2wgLE9yXr8PCiGM3ceR9tAFoch73/DeuVR5fapxy\n",
+       "xHmMd3aG8ItYHkO4H8eWwgIzltrJxz7jRZH6kLOTg/9wxMxOKfF3THzNHy0z/Z61Wq3wDB5rqcWL\n",
+       "ArefMon6j4NyvJlMJhVnHV+GWB1VdHKOv6M2T3VQEcZ9GVLiapSBy+nPU+Yjf5/YgtA/e52FVWrR\n",
+       "3u12K9H9+TxuW286VwmBWWzOpi6+r1rksJeoP0+Vgd8VL0DnMqjYWMoRgNveb8SUGZwXL6lFDpvL\n",
+       "MbfxN8SbIPl+dbHU1LFVzMKXNrstzp+SetK3Y5wuKCgoKCgoKPiWsS4j9ajRaNxZLBZfNBqNV8zs\n",
+       "8TfHPzWz+3Teq98cq+Do6CgI41qtVtid3rhxI6yQeXcCQTfvXr076HA4DKtm7KybzebKTBQDK3O4\n",
+       "zk+n02RsnBQ++eSTJXPMtwlFf7KI1+/WFFu1u7sbGCkOz4BdB5gS3h3zM9BfHGfIiyXNqmEDGNiJ\n",
+       "PH/+XO4YwKSg/DChmF2YnJ4+fVoxxTWbzYq5qtlsVsxQMdOH2lkCuAe7sHMCXW8W4l2REmTz7z4p\n",
+       "tIq8zQyHGn+oG8cgw66u2+1WxkIqvAZfW9dWKTNSjGLHvVfJg8X3NFs2Q/BvGFs8Fj1LqXbb3OfK\n",
+       "8YHHKccoA5R52TMvnFicnSxwDTO0PjMDm1PqIj3nsjQpdkeZ7Pg6z9DMZrMlM2nsWSr8gQovECtf\n",
+       "LsOQy1z5Np1MJvLdSNWpToCeastYuXwZYv2ryuXn3FgePCW18KwoM+sKii1KMY58nOcndU3KEYTb\n",
+       "1PeDMs+amf3Zn/1ZtB5m6zNS/5eZ/ftv/v73ZvZ/0vH/utFodBuNxvfN7Idm9v+pG2xubtpwOLRe\n",
+       "r5cVp6GgoKCgoKCg4NtG3UIqJ/zBf7BzYflBo9H42Mz+JzP7n83sPzYajf/Wvgl/YGa2WCz+qdFo\n",
+       "/Ecz+yczm5rZf7eILJsfPz4nsfwOo9VqhRAB0Eaw/oOREoICsWtzgXIhTENKe1GHlI5mFWD3dvfu\n",
+       "3YoOhtklDqbIK2+1WldaC3/vBw8e2G9+85ulYzdu3Ag7cxZ443lgM5Ruod1uV/qMA3cyMCaU+Jnr\n",
+       "4YcbhyNAoEJ2/QVYjMguwF7LxGUDG5QSGNdhPB6HfoIQnFkoNWa47CiPF7Hz3yqHFgP3uH37doWR\n",
+       "2traSoYd4KjIGC+KsWXdBvqIA7Sy9tHsvN6pkBc8xlO7XqU94eO8K/ZjgstQl3uOtSy41mv9Go1G\n",
+       "hS1oUFgLJYzm99LvnpWbuUJsTK4rvlb6L1X2GIuCtkf7sB5KMbp8P8/aKr2O/93fpw4xkXzsHDxL\n",
+       "MYopcf9lwf3A49MzfrFxj/kLv08mk2zWy48ptjgo3Ss/14c6aLVaMrMG1zNWlti7rxxMUnMzBxNe\n",
+       "p38aV9WpKz200fhO6qbUAsN/oNrtdvioo9NzzXW7u7sVT0Om5/GhZ/MlC2BxDUcxr4MXGTLYvKVM\n",
+       "EjjGL5wf6CwyZvOWf8HZ0wtQ5pu9vb3wDKaZ2SzH1/NzzS7Ekii7asuYOcinAzGrmnZUmfljjmdM\n",
+       "p9Ng6sRiTsWM4kUOFiKz2Sz0w8HBgZmdC7e9F1OsHj7hLR9jESmAttrf369sPLa3tyvjmydwvB+T\n",
+       "ySSYVZW3Hp7BaYHQjhwZHtja2lpamJvp2FD8/jBVrz7saryo8+oSqObG7vKIeQnlfrwAHsd+TLfb\n",
+       "7byUFu12xWN6Pq8mBVZQIufhcJj84HE7e5MOtylDtYt/Li+kVPmuCle9CEo9I7bhVot1IDUmuT14\n",
+       "Y+jNzLGFr89iUCd8BzqdTph3eA7xbcmbZ37X/YJrPp8HSQnPY6rOfrHOi0T+JqVSSvH50+kUdZSd\n",
+       "U2I8FRQUFBQUFBSsiX9z4iTv1t5oNLLDBaiVrRfu3rt3L6xylZmJ4dmMZ8+eBeE7wjlgR8/nv/ba\n",
+       "axW2gXcF64jdsZJnFkslccVqfTAYBEaD64nfseI/Ojqq7KCUyZDZEdRZMWrPnz+vxH3q9XqV/GHK\n",
+       "BNjpdAKboBwMVDJN3p2AieJdkaeO1Y40xuj5a3mXpXaVapwqsyXvLtWOSjFVKQG/FzEzzs7OKgwN\n",
+       "twGL3THGMF5UfC0exzzmPCM1Ho8rOcMYbE5hNgNscZ1I35eBwfm+lPBUxdABeN5JxZHiccCyBrPz\n",
+       "d8uPBdXXnBQYmE6nMqaZZ67YxFoXAd2zHarNYm2rGHH/3HXi9ii8TNbo27Tc1AnQGcp8lzJD8hhK\n",
+       "idL5HjlyFlW+2WwmrTX+3LOzM5m9QM1t/pvB7z7PFxhbyvSXCn2iGK6c8VkYqYKCgoKCgoKCNXFt\n",
+       "Gql1I4hCkLuzs1MJPDabzYKuQuWOS91vsVjI1TNWqhw4NPfeYJ1W1VIx6nZZPhhZv98P2pzhcGi/\n",
+       "/e1vzWxZ7Kv0Rp51UIxPt9utuFarMvJOXtnuX3/9dTMz++CDD8Ixtn2noo2rnE3AK6+8UmHr2KVf\n",
+       "QdnLmUn0TF5usD8WIEMnwJGD1bWqj7ld/HN7vd6SUDQF6L/QprD583MHg4HcyXH5UTcAOTA/+ugj\n",
+       "u3Pnjpld9NE777xTKQdHi0c7K4Epzw9czlQeP9ZIASqIo2JHYmPbg13wgdwQDMq1OsYqKlbJg/WJ\n",
+       "V4GYriw1T3P7ASnmah3x91Ug5nTgocIzKG3WdYIdEXL0cGZV/RWzNkCsjZS2FPdhZjVHN2VW/dbw\n",
+       "O5DSsfLcq8TrdW2hcm7mnA+WKqWRujbT3rp0LhYxuYsZswuhMjpwOp2GRQ4ilv/N3/yNXOigsfHb\n",
+       "KnFrEDEazzo6OgrJihXQwXt7e2FAYcEwGAwqguHDw8MwiPDvyclJsh7dbrcy+Xa73SVzBj+Dj5lp\n",
+       "8bp/WXgRpvpZiZExER8fH4eyYpHLfc0vixeH14XRYPrYU8mcsJejZ/u2ms1m2RHDPWIfJRXLBOdh\n",
+       "AbexsRHKgvNGo1GWJyh7KaLNNjc3K2lgZrOZXKB4syCXj9sAz8BCSmE6nYaxzddydHUA/cnxx3x9\n",
+       "fZumPJYA7isf24yfu1gsKoLX6XRaSZbN4L70Y4wX1+yUoDYbWEDhvWCxLEdUx9/syZcTp8uX1beL\n",
+       "aj/+yPnFpIrnVLeQUmNXbcauCrlCelWWnO9Vqj4vAywVQF/XPc97JqvfGOzNrBaWvNBPSQCUXAJj\n",
+       "dzQa1cZfQx19iqvT09NkahhvNufy+XriGamNVwzFtFdQUFBQUFBQsCb+TYnNm81m2MFh591ut8MK\n",
+       "lBPFYpWJfGCIaxUD7tvpdLJMdLu7u0sxdnCPFHXJ4luUi1f+/rmDwSCsxvEvRwtXK/WNjQ2Z/8jv\n",
+       "rvlappKxE4CpUzGDStDcarVCezDrcPfuXTPTpjrsWPgZyo1audjiWWzWw7Gzs7MlJspsuT+4/fyu\n",
+       "hKOd51LKKneb2t2zm68fJzHzpDJl+SjrfA6zdzDFIYHz7u5u5T3Y2tqq7OA42jmLxzGuEP+t0+lI\n",
+       "QbFiLLCbxdhgZoXrpsybPN7UvT2LulgsKu3LzBb3Z4rVSbGjZlWTZaPRqORi4x2/P9dMmzD5vj6G\n",
+       "0fb2djKyvBKR413mnIaKHU2xSWwqVkiFRFhFXL0ucnOoKYF8blnq5Bfq/5c1Gabc9/kZPqZZzHTm\n",
+       "y6PmJ3ZyUM41KeE4vwP4Xb3XXAbuB3wLVLJqFYPOfx89VCgZ73iVY4UqjFRBQUFBQUFBwZr4zjBS\n",
+       "WMX2+/2wAmTmQgHaIxagYpWLXdnz58/lilJFPIdOAyvSo6Mju3nzppmZ/ehHPzIzLaBVaDab8lxl\n",
+       "m/ZQGiIGi5ix0saOfm9vL5wXE7F6IasKdGhWDZlwdnYWjq2iUUOZVbgCCN65jxCEUvU/9DcvXryo\n",
+       "1F25l/f7/XAfpb9RQmfY4b/88kupaQEw/urYTJTv7OwsyXDxzs9Hu+bAeCpsBe+s/A6S87QBKopx\n",
+       "TE+khKooC+rG12O8xKIE+xAa0+k0SzTNO2u02Wg0WnqnUixhSuA9n88roVPG43FgmlC3s7Mz2Q8o\n",
+       "v2K9ePeeKw7mMWO2zAypIK1AjC33QVq5DZgNRpmZvQV4fCihcm7d1P3WBYvDc4Xsub/lskS52iil\n",
+       "MboMhsNh6Ae0fUzjo5wW1Hj3UFkH1L2YWVVzKr5LX3/9dSgrviWj0WgpHynKjvthbdBoNML45bqh\n",
+       "LVMZDliYr0KZ4H1jdmyVLCbX5rWHqKc+ZpDZctwlAIJxxCBqNBphwsYklhPpOwV07E9/+lMzM/ve\n",
+       "974XzB9/8Rd/YWZm//RP/xTO/8lPfhKuw0SFznry5In98z//86XKY3axuNva2gr1xGQ5n8/Dxwjt\n",
+       "wwNwOp2Gc/ll9yLuWFRan8Q3Nla88FTFkZpMJpIqRbthHLx48SIslmDe5DKz6Qf9xSJotD/MFbx4\n",
+       "UuYKpoVT1DQjRfnyJIa/eWHIUYRxrxR1zMlcPQ0du3ZVkyM2IoPBYGnRDKAP2bsPGxV4YPKmIfX8\n",
+       "Oi8w9pRRi1fltcftoj5que0BqJhrdUglSVWZEnDs5OSkkrCXI72zg4xavKSiOuO3nZ2dpcWtR64J\n",
+       "i82SLG7Hb1cVDypWvsucFyubb/t1Fk/f9jdUOfwAKmVSq9WqyAvWKT+Pk1QMNT7m50oeJ8q0j7F7\n",
+       "enqa/K4wfBovfi/qFkOp8vFcjjrHvPaKaa+goKCgoKCgYE18Z3Lt8eoTu06sNIfDYVglgmFJ0XhX\n",
+       "BU7imopi3m63A3UJU8Cnn366MtWtgJU6R8/GscViURF2+3xZfvdy586dkICZd65+Zc6RzRVS+c94\n",
+       "58Bu7SoHHHbouM98Pq/0La7z13qXed6NsUku5R7LZVVsB+6D35TrrGIG2AzFdHVOrCI2uzCjp6KE\n",
+       "4z6qjqo+qajX/X4/i4Xh8t2/f9/MzD7++GN5rhK+poTeDO/CrKJ2M3PKO2UVOZxNBV4sX5dvEphO\n",
+       "p0s5EWPnqzhddUjlS+M2YtZOxWvzDCKXkcM54LyYE4Q/ppgENpeuyuoo1LFPueyUP1/FglIxwVTd\n",
+       "Yma6Vet52fAI3mTX6/XCu51iXbms/H541nM0GslYSxgn7DjiE06PRqPw7cO71ev1ZG5PD3YS4zyc\n",
+       "KVOikjwoxxa2FHkLS25YCzynMFIFBQUFBQUFBS8B18ZIXcdzCwoKCgoKCgpWxXcysrn3BOLjOXRb\n",
+       "u92uiG/ZYwk043Q6jaag4OezODgV18VM05SgOFPpTfg+ucJOFZZ/lUVoSpioyuWvi523alwYhkrL\n",
+       "chVQdHsuBV8n+l4VbGZSqEsB4s3byquNY4alvN5iaUhygf5S8bAAdrhAWVSZYn2khOUAm7LVe6Xe\n",
+       "4VQ9FotqYtLLIjX2Yb6uS3K+Kl555ZVgikWfjMfjrPG+sbERxg6cTlSbbG9vVxxWUtKBdZD73tY5\n",
+       "KrB5a9WI3ylcZm5YZ06KObP47x2bJevqlxKHw7mC780SCtWWLOzmc8yW57RV3zOW86TiNabM4Hyf\n",
+       "q/q+1LVvMe0VFBQUFBQUFKyJ74zYPHJe5W8WwXko1091P3ZxxK6MXadZsJzDUtWxN7mRbNUqeh1m\n",
+       "ip+3Sr4g/zxfnthOypcnlwV68OBB2HV89dVXWeV7WVhFPJrj+lvHSAHNZrPCTvFYgxPDdDqVcbV8\n",
+       "rjXOv4dj3W43xLz65JNPsuqowPGfFJMGRkrl4WP43ax3kIidz+93LCq1anMcA7M1Ho8vxUhxHDez\n",
+       "NBtothz3Sd3L16WOAUEokPl8HsYExlC73U46iQCbm5vh/VZZBXC/7e3t8AyUU8Uxi7W7ik6dOk+h\n",
+       "jn3wZZ5MJtnXpMqSusdlWV5+nll9GAEuF7dp7tybig8G9Pv9cN46oYQwzj1LyojlxfRt3G63K/EB\n",
+       "J5NJdiR3D2bv+O9UH7KTRV3S4sJIFRQUFBQUFBSsie9MZHNenXoWJrZj9atxxUb1+/1KPipehaqV\n",
+       "ea6WIrULUJnh+X4+ujODtQAcKAyos9f7HdUq4Ijadbm6/PNi/4/hk08+CYxL3Q4yd4fpy6d0FasE\n",
+       "G1X3VzvWXM2YPzafz8PODeEcxuNxOIb8Z/fu3Qvjl3d6HPk6Vt/xeByYqJ///OdmZvaLX/wiWU5V\n",
+       "3zpWlBkwX052sfb3Y72jCg/BbXUZnSDnMvT1YHdwBurCuQBT840C7qGYJg4E68vrwXkNURaPfr+f\n",
+       "xUipMCMMlYNsVQYmNUfEwCyA7zcul5oH0H/dbjf0K2cNwO+5OdSY6fDu+dwWOYEoY4gxqx4cSkCd\n",
+       "y9YWjBO01fHx8ZKO2Ewzanx/sK3NZjO8x2zdUPUC46qihDNSrDH3ude3cXlZN4nzoItuNBoVFlW9\n",
+       "861WK9lPq+jirt20pz72l6HdU1GJVRJXfuFyPtadTqfyknKMEoZKEZGDZrMZ7scLKPUhiF0P+FQj\n",
+       "uagTdgK5JrF14q4wcsWDHGOH/02VK3VebiyTHNMeJwXlcqqPK37HJNZut8PYRllOT0+lIFfFB+Lf\n",
+       "zcx+8IMf2LvvvlupDy8szOKLBR9NmNsHC8JutxtSAAE3b96UKZDwoUrF2VLvgv9Y55pT/Yam2+3K\n",
+       "BQhSRKEdVk2NZHbRVgcHByGGG5ta/AdyHbMRMhuYXSy+LzOPssk4ZqLF72ZXLzY3WzbF4ll+jq5z\n",
+       "AgJyBeOXMTPyM3JjguVuDNaRa2DcbW1thbGtMhakon/HxiLqx2lmVAqwnM0Jz2P8XmMxh/OeP3+e\n",
+       "/Q3B+OUkzX7OMlv+hsfqWCKbFxQUFBQUFBS8RFy7aS/HnbXb7VZWjrPZbMkEB6REcnU5vlTkYM9w\n",
+       "xJgJ7wbKO5tVo7BzOet2plhdc85ClYx4VVyWyUldo55RtzvykZljdfNMg0rim1vOxWKR3YY57cBj\n",
+       "lqMTo3wQLas2mE6nFfd5NguxUwSuVzkDU6wmM6ugyTudjnynsPOGiP3p06ehrTjS/I0bN8zswpng\n",
+       "5OREhgPAtTyOVdR+lCsnyXEMKvlunUmZy+LnnbpxperGEcY9eL7LBUzkjx8/Dm3O4va6/JFm53X1\n",
+       "dYuxUZysGsiZExQztFgsJCOpWGWVQQBjEG2mHFcUG8UR7lMmHmZlkMPz2bNnFXMfX5saEzHHpZTI\n",
+       "fR2RPsr89ddfh3futddeM7NzdsfPE6enp5VxFzNheocSxY5NJhM556O9fG5TXGN23pZ45/Dv9vZ2\n",
+       "mIvq2EU1zn3dut1u5b1YLBYVWU7Od6MwUgUFBQUFBQUFa+LaGakUsIKM2WhTu0m1qsfquNPpLOV2\n",
+       "i53Hgjd1b6xceffk7+uRI0Ks22nwzgDn5QhMV4HSI8TKkHs/zybxTq8uxAKQ2qmrXd1sNqswjXU7\n",
+       "DCWSrRPZ57aH7/fT09MwJrDbXSwWMhidv1aJv80u6pdiHz7++OOgq4GOiVlZuMTfvXs33Bu7QR53\n",
+       "0Dvt7e2F3/Hc8XhcEUQfHx8HrRe/C3guxnG3263oHReLi9ySzGpdhc5zOBxK9sWHP2Ehcx2b5fH0\n",
+       "6dPAFmEcq/c2l8FuNBoVYfHJyclSbkpfjxTm83lo87oQFhirl0FKY2im28FraufzeWA2mZ1VY8Kz\n",
+       "3iqMgwIzhHgW61i5bCltZt0cktLwxsqn2kMB5ebyg1XE+1U3RtiZxH/vYt8L//4oRvLs7CwwQ5w/\n",
+       "1Z93eHgYzsM4PTo6qrQb9yu+Of1+vxJsdDweL33DgXUsOd/phVQKdfQo/+4XL3UTVZ23mvcCVOh0\n",
+       "OpVYVaPRqGJ6Ui8SJ2xEp3LsK5VQNNc7YhXkeCzGoCYAb567zP2UYJhjgbGHTuqDVycyzF0grfox\n",
+       "5/uifBAyb2xsLHm5qXJ5xExEZsvjlJOMIu4L38OPp88++yyMRSy8xuNxWOjxwgvX8GSozJR+4m42\n",
+       "m+Ea/ihyYmKz8z7F87BAu2w8H7TRxsZGEGkDg8Gg8mFkQX7quapcL168sDfeeMPMLsxPqt/qEs8C\n",
+       "7HWE+0ynU7n4SQnGFVIbvXa7vdY7bLZcX9xjMplUyle3oeL+gJm3znuOPfj8eap//fxilk5ozcl8\n",
+       "VR/G5nr/O4vJuU4pL2xeTOSI6ufzuVzEs/e8f27ddy9nflVzjNnF5gv/DofDpWTl+Nefx+8A/4vf\n",
+       "eQGMOmERdnJyEsqM2GyDwSC8m6hnjvd7Me0VFBQUFBQUFKyJ3xlGKpfCBHhlHRMQmqV3lZ1OR8aw\n",
+       "SAmjFQuQEqoyrc3P8Kv+GAOnXGH9jmod1LnoXoahUbs6ZdpLsUFcvpQpjlkqVTb1DDYFruqmnAvF\n",
+       "rAHMxjBy4q/4+5gtuyHzu6CuUe8KroV4+datW6G/wAawkL4uxo5/Ppsymc3yu+N2u52MLXMZKMZG\n",
+       "RQlX4SoUYmwwdsp14wkmQJhYY8yVj/5spuel3FAmOWwbIj2vg9w5hKUbdfcBeO5lpyS+Jx9TuerU\n",
+       "+/jixYvKvL6xsRHMm59//nk4F2wHh8lQDIx/RrPZrJiM/XlKaK9YSn8+f8d4fvQMGLNFfA+cxyE7\n",
+       "vMC+LlwOnERizJp3aKlzJmGWDOOdTbscV8/sfDyjHvwsHEN/PX/+PPShZ8SS5ak9o6CgoKCgoKCg\n",
+       "QOI7yUhx/jtAsQ/ehViFK+BjWFFPJpNKoEq1w5lMJuEaiNxUROB2ux1+TwUCrWM9OMhZStOQy9Rc\n",
+       "BquwT6uCdxPYqbC4Oof9Se3U/LUpRipXMH7VbQDM5/OKC26r1ZKswrrlio2Hzz77zMzM3nzzTTMz\n",
+       "e+utt8JvzBT6HfXjx48rO1cGa0eUSzdHFsYzPIPAkZzrRMHrRPAH8C6fnp4Gdgc70OPj4worNpvN\n",
+       "shgp5QjQaDTs008/NbPlfHkKdU4r/jmYL3q93tqBJznXGubgZrNZme/q5qfY88yW5+PUTj9Wh1R7\n",
+       "MDuigtzCkgC2Q92r1+sFFpLHrG+D8XgstUJ+/s8NBMrlXwdcFyW+VlYU9DHGgXpvFbM6m82CQwOz\n",
+       "Qax5xHn+ve52u+Fv1gEjfAfnf0wFCuVy+nGsvrOrYJ08g9/JhVSu14qKRwKwectTmLGIqwA+bOPx\n",
+       "eGliNzv/2OFvDMThcGiPHj2K3k/FrVG/1XnKKIGi/6DFPLnqoCJzr4pc8xcLZFNlrPPqyYVfqMbM\n",
+       "h0oY6T+kV7FI9fDRxEejUdJ8XOd96Bc+PCawcOVowh9//LGZmd2+fXtpYYFnqfcxlVIB9en1epX0\n",
+       "EmZVUS0LszGJ7e/vVzw6ladUqh1ywOMvFREev+V+7IbDYRDOq5Q5dR8JIbMcuQAAIABJREFU5bXp\n",
+       "0W63K+LbjY2NpFkEY2w4HMpn4AOJPlflbLVaK39sLvNxW9XbVjnjcEwj5cgBnJ6eho86nA/UfGF2\n",
+       "4fCQijsXS6uUmm9j9U1tXgD+3tV5QLIjk9lyX9dt1v3ffJ56R9DWynlmc3OzMo7r5nnlGZi6Jtcp\n",
+       "hdtYOa7FUEx7BQUFBQUFBQVr4jvJSAGpuBRM3zJrkGJUeOXtd5jz+TzsxpRbKO/y/OpaJQ+N1Scn\n",
+       "zkij0VhiwHC+Eigqk+c67M1VmK5yQxhwPXzushj7lFO+OrOmMinkiuZflmnP7GK8sfnYxyozuyg3\n",
+       "xm6uwwWLQzGOeYeGPjg8PKyINA8ODgKzgnelTgiK37e3t8MuNxWyQf329OlT293dXSpfzOHiMmE+\n",
+       "0L4cy0pBiXRT2N7eDu2mmI/UPXLjyKldNsc8UnHEWEzumUs2u6VyHk6n0yyzy1Uh992rSzzrWZle\n",
+       "r7cUggHngIlKhSjY3NysJDDmb5JyZmLRdio232VZb9wH37PFYiG/af79Y0tNnazCZ1To9/sVh5Zu\n",
+       "t5u0LuG5MfY1Jclg+Dmr1WpVhPucqzSVL3Nd82phpAoKCgoKCgoK1kTjZe60ow9tNFZ+6FW7nwPY\n",
+       "lTUajdqo2f75dWXKLXNKh8O6Hn8/tofHkBINfttQup9UQFG1I2SbvKqbbw8l5o8FhfPIDcXgr8Ez\n",
+       "1m1z5Qq9ublZybXHucJi90FZVNurY2on7TVcqv/U+9Pv98MuENHTFdsS6z9m1Mzi2kkWtyrNRgq8\n",
+       "m/Uam2azGTRlaOeYZszj9ddftw8++GDpmNJjjcfjyjjnflVtw27o6JO7d++a2UWkebOL0AkxnYtn\n",
+       "W1utVgg/gV17jDlX7vS5uEzuy3X1MLlMomKAVVneeOON8Ds0hrlsRqvVqrSf6o9YCIjcNkddODj0\n",
+       "qk4CKK/ZcqDQFKOmHML43eJ8tGb1ekGg0+kkgysraxXQbDYDQwdrz+HhoRwngLeIfDOGZKN/p017\n",
+       "qQ9kLngQKTOYijOCjuYJM+UxxS8BTxLqGnQi6jYejyvl4kWTii3C5fXPZbwMYfRloDwlmQbGOT5W\n",
+       "kJlecKkXlz/sZsuTIU8EOV6bdabFq1jcqwleLcJUst/RaJQV2T7mXKGO4T48eaJ8Klo895+PtXN2\n",
+       "dlah3WOLupQ5ty71x6rjnE0rKfMim/u9GacOdZ6k/L56L8bYWFTex4hOjwXQu+++G9rXywPqytdo\n",
+       "NIKZJTWmL7uZVc/2i6tchxAGR7b28yenA+E+ZO9Es/O28qJuXvigbd97772wyE6NPzVmcz0SVzEz\n",
+       "qfeHY/Oh7mzKVht3jBllkucFkN9MDIfDyrvB/aXKxWX3GyD+bnNKH8yBOE/FhOKYdrzwguQA1zSb\n",
+       "zcp3U8UWy+mHYtorKCgoKCgoKFgT32lGKraDM9O79sViUdnV1dGGvBPC3yo3mXKFVGyAWr3iPI5R\n",
+       "okwY/P8UdakiSCvzV4677FWijqHJEdorRoIZJHU/7oec3cOqIvac+6x7v1xzhYq/pOqr2KKtra2k\n",
+       "qaYuIrQqi2eVxuPx0jg3O2fMWHhupoWlzLakzD48B8TKl9v+uWyizw+ZG5qlLnxBig2MlQkMCMwz\n",
+       "zCqwmBj39vHJPNCHgHJo+bagzFWeMVHsDpvuVbgPBszM6MPxeBz69/bt22a2HKVcWRfYNKacLpjx\n",
+       "WQWXiSHFUFYcZab3fwOoH8do9O3KTA7a/uTkJLz3ADtwYKxxdgJOeOzrz+2skpvjd57veE7K+V6o\n",
+       "8T6fzyumR5VX0qMwUgUFBQUFBQUFa+I7zUgBzNDU6aZyVvacvZx3a1gh847aa2lYCMp6Er+jajab\n",
+       "FXdQ3sHwDtzvROsEnrxL9ozZugE5rwIxUauZ1oyxKJRt5Co6vQe3R12metYKxO63Cq7a8aFOf2d2\n",
+       "3n4Is8H56Dxms1lgLjDejo6OknoZ7NCVeF2FlOA+8s82u3C7ju3eWWuF67w2S2E2m1VYFLPVx3mO\n",
+       "kwaAsQPWri70A+AdAzx4F+3Zqfl8Hp6nhP0M9IMKkKn0XKwDzdGJfRvIdepQjJSyWsTqwWJ+/Ivv\n",
+       "ADNRfj5pNBqhP3BtjHHMmWu47VMa2FWgLBz8XniHG/7uKMsKjx3frszacFvjfVd1xxwzn88r47LZ\n",
+       "bFZCurDjCJfLv388D+Fd4TlLsU88t/pnLBbVoKo5uLaFlH8pUt4pSrSmJk8eHDxhoIGZKvYfDC5P\n",
+       "Ku7L2dlZZaCoTp/NZuHj5alMlIH/VXXhNphOpxUPiOl0emVJW68anpZXMUX4gwHTRF3UeUB9zNk8\n",
+       "i+euEjPKl53NbnwPtcjJSYJd552WElIPh8MwiagFFN8D56mEnQoweaj2WywWsm4+jsxisQh9iUWE\n",
+       "inPG75ky2XDcF9WWVxG/aJUFcMpLKAXlZctxtVRcH75WbSKU4B1RuOE5xkglvs41g5tVHWTqFqKr\n",
+       "Rt7n+irZAt/X33NnZycsIus8UvHeqIWDSjYPLBaLcBwmwOFwaF988YWsH66JYTqdJtuIx02Od7G/\n",
+       "Vi1AvPmz3W5X5s/t7e1KyqFmsxnefywiT05Okt7CdXX3YC9qniM5ZRqXyV8LpMzC7An9MjYOxbRX\n",
+       "UFBQUFBQULAmro2R8rt63oGpFTXALJWKq+R3T+zOmlrVLxYLe+2118zsYrf45ZdfVmJoLBaLIJzl\n",
+       "iM9eVDefzysr6E6nE+7HcWkUXe1XzSpOj0Kn0wn3Y/HfVZujcoXlgNrpzefzigA0xjh4VmQ4HIY2\n",
+       "5J1Fzi6jLqZMSoyuGBUuVwqLxULulP14V+UbjUZ27949M7OQ+JbZO443phLe4t5KgIwdvXIbXywW\n",
+       "oY9wj6Ojo4o7uBJ4qojQypStwL+l2L5VksKuAzw7J/edvw7vP+aLmzdv2vvvv790ngoBcePGjRAD\n",
+       "iqFYLJhxEY2bocbTqm3VarWkY0HKnOrnTLNluYQypwCpMaHKzmXhcqKsYOzY8SF2H7Nzdhbvg4r7\n",
+       "hbrt7OyEmF3rjD8/T62TbD5m6kwJrf31ZhflPzw8rEhFGo1GaDf8u7m5KR0f0K+QHmxvbwcWUI1n\n",
+       "BvoL71sscwnA1h4VjV1ZiFIAI2m2HKfN7Px9rENhpAoKCgoKCgoK1sS1aqTMlhkXM51ZWu2sY1B2\n",
+       "1FTUbAhjT05O7KOPPjIzC8xU7Fq4knOW8JworbxTAtitnVfUKg+WB7uSAszUqWBl6+x8FNbRG6n+\n",
+       "VDtp3x5KN7W9vS2Fv/7aGPvky5KbHZy1QCpMRiowJjONzLJ4XR8zq7zjAyOBHd/Z2Zlka1IMTsqV\n",
+       "lx0kGBjvyH3Hu3ZmAbzebTAYVFiU3N27Er7yc/k8BKV8GVj3XeF2RPnr6g7GcWdnJ0SCT2E4HIbx\n",
+       "wVoQpUtbF6wPqgtQDKgxpOY2ZiG8Y06sLB7sEMT3xzE1v6Sg5kyzi7o/fPjQzM61Uv69Zf0kQ83v\n",
+       "fg6J9ZHPCcvodrtJgXfKasCaW+VcBajxf3R0JL/X3oryxRdfVPTJXBYw3e12O8wxubpIjvzvtVTM\n",
+       "yvG8jL9ZcM8sK8oCYO7KYaOvNUVM7ke9zlOPB5tKB5ILb/a4c+dOUlB48+ZNMztPf6Geg47Dffkl\n",
+       "5Q7zH9dGo1FZEHJ0Wr7Ov8x8P+XtWGfW8s80y4/7VAffJyoGSG4f3rp1K0ySqQjZsdgzXsyrvPti\n",
+       "9cupuxKqq8Vav98PbaAmDNUG7L2VEqgr4Nrf+73fs7feemvpt9jYUOZI/65sbGxUBKhq8r958+ZS\n",
+       "GhOPlEei2cXGB2XxzgT4PSWGrUOd48CqaY0AtbBk/NEf/ZGZmT158iR8sAG1mdjb27P9/X0zO4+0\n",
+       "vQp6vd5a6UI8eIOWSuXDH2t8QJWnIdDpdJbiDAGpxPLsSebjkqkybW9vJz+SdQskdb7v99x3iucG\n",
+       "Hn+pxMlKQB1DasyiP1qtVtaiQc1jqu77+/uhvyBH4HmRY0txZHmz/AwCCuxFr9qFF1x+06EW/K1W\n",
+       "y0ajEfpSrniLaa+goKCgoKCgYE18p5MWq7w3HN0Zv6+6s+r3+xVBLt8PuxfVNopBePXVV+3Ro0dL\n",
+       "x1SyXGaUciMk1+2Oc0Xf69D7uaxTigVS91DR5DlMghIP+p1oLEyC2sH5Mqgdi2KhcneYsVhLYAu+\n",
+       "+uqraFnMLnaEgGIuY/3vTQQcGyW1q9vZ2QnszyeffFL5/dVXXzWzc3rex/qKRR2HqBrCUtVHm5ub\n",
+       "oe6op2JqdnZ2At2vxgvAbMFisQjmR1zLyAlREcNVO2sovPHGG2Zm9sEHH1Ses7W1VWHptre3w9jh\n",
+       "OEgKngGpY2NykctIcR9yXk3+l7GxsRHGBI83zAO4Rj2LGROc3+/3A4MNofRsNrOf/vSnZnbBgH78\n",
+       "8cdBMsAsfuqd4nG17jjJYcE9a6JCBXAoAYDbl3Pp1YVeSMGH52k0qvn36oC8eQcHB/bZZ5+Z2UVb\n",
+       "tlqtpHMAcFXMah3ApBVGqqCgoKCgoKDginFtjBTsu3Xi3FWgmIZ13KPZBRMrbuyOer1etng0R6cV\n",
+       "04kpm3Yq32BMZHgVgtPUbilW/jomClg14ByzeqnIzapMnAdLhTUA6gSbOQwcB+lkEbZ35eVygk05\n",
+       "Pj6uBILkZ/jI5Vy3mK5C1SOHoXnzzTft7bffXrp2c3NTRu5GuThw5N27d83Mwo7T7GJXjPuNx2Mp\n",
+       "4Ff1BHD+dDpd0jmoiOBA3fuYCsqY+s2sGnW81+uF57BeS2nAUv0AlmcwGFQYpJ2dndCv7F6uNEh4\n",
+       "Lpc/xbzngsf9qvM2a5CguWNmUjmEeE0LC9X5Oh+OZjqdhmdwNHj8jmM7OzuB4WBWE8wW+mgymWRl\n",
+       "YKiDCr7J3zA1P+bO6ZdhUVlHtCqDy9oi1Ye52QFydc6pPJ4Mr59cpV3qGKlrW0jFRHh1Ymj2FsDf\n",
+       "q35QY8CkDzFsXRRlPJ9pd0y4V0U3cn1TAjrALxyvYiHFZWEqv64sHv5jtLu7W4kvwiadWBnMzql6\n",
+       "NvOZ6dhMXMa6aMJqcZXy+PP3N1uOzKxiMgGpj2ev16vEVeEPIItqU/FovOefB6h1XhSxJ2pOWQHe\n",
+       "xGDCOj4+Du2BD/mzZ88qcYbYLJQSiSsan8e7EvjnYjgcysjhQN1CSt0v9cFQ7Qywqdd7FTFu3LgR\n",
+       "noG2HI/H4XqOcI9FqYq9ltrEqI0SLxrZQWNVzzF+hlqMqHHnFy+9Xi/LsUDVg02APIfkbu44dlvs\n",
+       "/Lo5hOuduzBbdU7n/lqVuKhzCMO7PhgMKlHpOZEx5urJZBLaGs/lse1TxcTKrJD7TVKbsDoU015B\n",
+       "QUFBQUFBwUvCtYrNWcSXcuOvQ51Zw6PODRlg0x5277lxSWJxiXz8DY5Ezc/1IujYbsHTwXwvFf5g\n",
+       "HdTtNFOmrrrdnY8YXLe7VGESUsyLMu3WsZ65O1LFhAGLxWLJrOCfq8ShsVgxZue7Nm/q6nQ6lRxa\n",
+       "eA7KYLbM5Kj348033zQzWwqHgDJ1u135rqTc0PGMbrcb+lP1Ebuo5wjaY/cBLsNIbWxsVHIUzmaz\n",
+       "yvuq3NAVVAJoBoTlPtK52bnbOO7NjgoenGeOQ6dgh4+xw+3C/ZUSJafA7BPH6/GC7Do3dIZqX3XM\n",
+       "s1Scw7PuveVxaRaPw8b1NDtnzlPzPs/lqZhWCqrMsfk2JTbnsqhvB8sa/L3ryuNZIu/gkQOYVc2q\n",
+       "czxn48i1LvH85OvE3wYgZnHAfILnqjUEvqOFkSooKCgoKCgoeAn4Toc/YHjmRQVJi7mrY9XJK2Gs\n",
+       "kLGzXVAwvxRb1el0wuoezz87O1tbH4B7mqU1WezWrrJX41m8Qp9OpxUGJFeXtkrgTl8Gfg7rhHJY\n",
+       "Ni4rzq9zj1e7qDqX5JQguy7IXZ3WCr95RorHp8p5V6dpwr15PHO/+2s58J3Kv+ehWJSNjY3AhKUC\n",
+       "aeI5/hl+N650Tu12O5Rf6YoUa8BMnWKQ+Fq/y1WBYJlNwHhi7Qa77OcwN71eT4q50ZbQSCFQIWMw\n",
+       "GITxgfbgNkP/dzqdoG/D+Z1OJzwX12xuboa/eY5BndBfMTZAsQ9+vK3DBLJWDvdmZwE1P+Swxe12\n",
+       "O7RRKuhnLlSYATXXLBbVPHfM3tWxcql5kdlnFTg4tw7MDPF3wpevbk5V5VP59+p0xv7erKlaN6vA\n",
+       "xsaGzN3KInj8hrbmeUDpEb+zYnOz/MjmsfNSgr1VY2SoZ9y4cSMMCk7squhqH0nXm9jwXG8qUBMC\n",
+       "e6esIixXdfKJH3NR1zephSMfV+fxS+rbTU1GCkrMG4stpcq86uIqtgAF/O/8DLVAwm+DwaBSj7p+\n",
+       "rfNm8xNBo9GQXlEAxzlLxQDiBVxq4c2pGDxl3+12wzHVVyjneDyujD/lLchpktiLlRN2+489LzZY\n",
+       "uO/Lz++wMilzGVKLNUbOZq3ZbIaFlkqdgYwKz549C22IBUi/3w/1xLXb29uh3bjsufGc/EeJExmj\n",
+       "HipdFSO1AFKib94EKvC7ivqiL5XTiZrL1LtSt9lJIWbOzZ1XUnNNq9WqfBN4/kmNT4Vmsxnai9vc\n",
+       "b3JizgapvsYY57LiWK/XC2MRz+KxzdlAfKwwRZ5wVHTuy9xNsYLqhyI2LygoKCgoKCh4SfidNe1x\n",
+       "lGO16lTxTZgtWjV+EZvffKTyRqOaGy8XKVZjFcRcXFPCxFXjjFymrMwWcV+m3PdTUCaiWHyjup2e\n",
+       "2bJZaB3zrAczUryz9ffjnSabYT1jqdhW1QZ1JtmU+ZBNSiyu9XGJ6pwJ+H1TZjJ1Ta7oNhXqgvsQ\n",
+       "sWVOT08rzFer1ZKhU1LMBp+z6lhQovo6KOYKdb9165aZLZsF0X6DwSCUH4xlu90O9+H5wJtqWJSM\n",
+       "Y71eL1zD0fNxHsYRZ4uIsU74zbcfMxyrRp+PzUk+PhSXNfeeyvSY6vtut1sxu6mo4qocitWMOSzx\n",
+       "POHL0+l0sh13AK4v2p+v9e3B56no80CMuWIWOHbtKvBR1ieTSbbFwZdpsbiI54V7jEajwkgVFBQU\n",
+       "FBQUFLwsfGcYqZSNV+3aE/cO15jV5z+qYyHWCTxpppmVmHBTIbUz44BiQEw07cNK1Om01mVgcuDb\n",
+       "koOqqp1LXW43IDeKfV3dVh0nqd+VK/4qeaFS4QK4/dQYA9RYY9YlpSdjhog1TziWM46ZJVXM1KpO\n",
+       "B3VsUIwFVFDvF+f5MrtcUF3F2ikGNnatch64fft2uI+Z2ePHjyvXsnMA6j6dTqUmS7GAKvCocswB\n",
+       "mB3xTF5dEEzliJI733LZUWZcW8fEpDSGMfD3Kad8q+otzfKDvuL6wWBQqSu3uXeK4r9z25fZOIW6\n",
+       "Onlm8NvIi9fr9SraYBWyIfZd4e+J2Xm/nJycJBmptjr4bYHNPalJPfaR9QNlsbgIZ88TZN3kC6iJ\n",
+       "JUdwzROBum+u4I29nhTN6yn2WJn8QsmXJzWQLrOAqnupvEiShaIKbIbA36kFJn/0+UXynhmx9ssd\n",
+       "J4B6SRk+cjiL67ntlbkKEw6beHCNSgECzOfzyiKM2wD/1pm0+XclRq7zMDQ7f5/QT2hzjtek2ozv\n",
+       "q96plMkLz+Tncd2VswEfYxNXDGyu5P5SJtvcVFIebHbjxQHaI9XmjUaj0kYxbzzlYabmhJzF32Kx\n",
+       "qMzhdfNLaiPHf3ObqjhNbHL09+DneS9FRt3chbrlOu2o8+okA778/tvk667iYHHdcs2j/P6gnrwB\n",
+       "z5kXY/EfeVGNZ/nvYa6DkUKjUY1FORqNKnGz1HyrTO1KHpQVc3Kt0hcUFBQUFBQUFFy/aY9dks3y\n",
+       "c1mtAuxEAPUMtXM1q4rcG41qJHKz1c0QoMsnk0ltCAO+ry8Tjsfchn2dYuXKjSzsd0sxcXjKZKoY\n",
+       "KcU+MZSzgUJqZ8lsh2czVxGb55g/WbSIvhmNRiu75bKpyu+MOKZZynQTo+eVGTwXPiZLLGwCcuyl\n",
+       "8icqbGxsJGMAKUZssVgs7a7N4nVKzQksXvUsy+7ubnhmaqe6jigd6PV6lQSrZ2dnoS3xfNXmvV7P\n",
+       "XnnlFTMze/ToUaWcbKb1dVPhQ+ocC9ScqUIT8FhU7vs5jHNdmzKzmxO6gEPZpCQGLwN+blXOOLGy\n",
+       "qDZX+Te57b3pdBUWCO3Kpkc1TmLl5DpxGZgBTslN6rKd+Ij1zI6mvsfNZrOSQD02XorYvKCgoKCg\n",
+       "oKDgJeHaGSkPDrCFsrFgXK1IlVCdd3S+jqxBYBturhBc1Cfrmtiq2K+o1U6XRZq55WPNQ274g5Tu\n",
+       "C+U1q9eOqTZXWhWF3NxzKbaO3am9uJWfy7vYHMZM3Yd1aWz3922uIrTH8j4qnVPqHVAMDcY4t4fS\n",
+       "m9Q5V6R0Z6k8Y2ZVPVe327UbN26YmdkXX3xROZ+REt9y3fi94MjYKWB+ULqjlCB/f3/fnj59mrz3\n",
+       "ugCLMhwOQ1uCyVOZDVSbt1otu3//vpmZHR4emtl5vj5cy0zNqg40ap5V7AjAbDWzCoByEqkLqLwO\n",
+       "e5oDtox4zRWH2qlrMz/+cufqWKgDxe5xm/r7x1hF3/7z+bzitDSbzSoRxheLxUqi/Muijn3Kxc7O\n",
+       "jpnZUtiPVEYSZsRU7ltYLGKM1LUtpCDa9OLXuo8rT/o51KSipvk561C6LJwzq/dEUC9DrmmHkTKD\n",
+       "xQZgaqJLLWhW8ZQEVD1TZY0t1NSC1qdbWSwWWYJnPJvvx5PWqnFrlFeHGjtK0MxRzHmRgJcev/FE\n",
+       "iHhI+Cgytre3K8dzk2XzpKmSKq8aKZnBGyFOvWB23lf37t0zs4vF1bNnzyr3UPUYDofhIxeLr+M3\n",
+       "YWb1sa7MdGRzZRLtdrsrfyRzgQXmfD4PDgqqbwAVSbvZbNrdu3fN7GJsP3z4MPyOiOmxJLyp90HF\n",
+       "quN3atX5IsdEzqiLMcflg6MH6jGZTJKykVScq5xymS2nhVKpSXixmCIE1CaAr0H5eCzmAnPmeDyW\n",
+       "30U1l/H7gOcCV+GFx45U65hWfVzHXM/gmPMCvDoxP0FCU0x7BQUFBQUFBQUvAdcW/oBFcR5Y8fIu\n",
+       "Qa18VX47z3bwLjUVZbUuh54qd67ZRTFECiyWU3mVlDs47zD8/RWdrlzhzUy6i6YEyikROf+eCimh\n",
+       "dmaxkAjK/TjFIrEJw9eD2Y7UDjdWPoCT3KZ2Unj+yclJYB2++uorMzs3fcF8c3BwYGZmT548CdeC\n",
+       "cVImwMPDwxBbCMLi6XQqWQXldODbj9vJC1/N8mP88JjFM8CEjEajEJH7Zz/7mZmZ/epXv5L3wQ6d\n",
+       "2w9MA9pC7SjNlusOkTYzX3589vv9pXya6t5m5+9ZiiUEciP0czgFzi2W6hs/73l4xpSBOYujU6tY\n",
+       "ZQpqxw+o0CMM9GWr1QosWp3Z3I+3VqslQxx40/10Ok06N6hsBnXmbX8t15PPS8kh1Pn8rnrHIWab\n",
+       "Yt8VJar3An+23qCvO51OJaxJs9mU9/PjJBXbzsM7pajQPrmOBWzeVjEU8S1st9uVECv9fr8SB43H\n",
+       "LFioFy9e2IsXL5bKnMNQFkaqoKCgoKCgoGBNXJtGChocZmHMzlfhuUzIusjdLeaiTmye2qnVsVl8\n",
+       "HsDskb/Ga5uURirVlqmdmWq3OjYGWGWH7sscy1Gm6pESKPP5/lpVj5hQ3fenunaxWNju7q6Zre76\n",
+       "v7e3F3ZUubs/sC7YTTFifeTbKqYZ8EztOq79GLNbW1sVfc6DBw+CgFuVnyNXY6estB6LxaISNsBM\n",
+       "BzD1u/Z+v5+dp9EHWlWoG++ss/T6z7Ozs+Q7xYEllTAWGjQwGs+ePatoqfr9fmhDHp8pZw0VEZzf\n",
+       "1ZQWiJmuy4QayNEbdbvdMKZRx+l0miVoV8gRgvOzzZbnH8/2pxxXzM4dGszOdWypoKaNxkXwVWZ8\n",
+       "0Ne5oVXQRrm5+RTqslTkXG+W/tYwUtkCNjc3JWOdE2Vd9TWsQd9JsTnEib4R1CS9s7MTJlicHxNa\n",
+       "KupPmQA9fXrZ9CLrIraQwuSPOsbKlorfEfPayxGCx7ywVkUqPhW3pVrwKbE50+OpCYqxqqBcTYY5\n",
+       "3ntmy6Yln7piNBrJ9vDlOzg4COY91JsTz6YwHA7Dh44jXKc873ITeHPZ6zYHZuft4hcdyqvI7EJU\n",
+       "j/PUJLqzsyMXpdzmfkzgmWbLphIf/TvmPanGwv/P3pvF2Jad52H/PnWGmm/Vne/tZrPZElsSSZG0\n",
+       "RFmCJYAx4AcDAWLkQbHzIBhwAgRIEAd5i/wSIgH0liDIgwMEsYMkD0JsBDCch1gOCMWKRFoGJTAi\n",
+       "uyVSIkX27eHe233nmusMeaj+Vn3n398a9qmqria5PqBxq88e1rz2+qfv90EOsc23RLgaDoehfljr\n",
+       "Mado9CtHhql+un379tz79vf3Q1343TjoY74cHR215jSnNeJy/UdzMBi0HIZjfYDgCphGmQW+K/jQ\n",
+       "BMTmWAnOS8hW+1jp3qrSM/HfXrjzf/v9VQmiqo/6/X4w7fH9KFclLefy/beIy43V1az8wBdbT547\n",
+       "bm1tLXw/cW5gMzwLLv7bEYuOBXNAdTavqKioqKioqDhnfGx4pFiS9KrumCTpT95sdlHajlxINNWv\n",
+       "9ay6nuq7RRi/WTOhTvyqvJS2hXlwfAirr0suTNQs3lepPsqZLdXpHyg1PQI5vhfWJKi5A5RqaLhO\n",
+       "vq7cp6zVwG+Q/BTr9GQyCSHs7777brgOR3VIWawZ8LxoDCW1M7q2VyFXBmumbty4YWZm77//fus+\n",
+       "Xt+K3gLvwW/cXl7/LP2ntBOKj8q3i6+xJjSlRRkMBkneN6bEwPtSzutmp+OEfmFNE98Dx36M55Mn\n",
+       "T6RZDpohaK6UqYPntlp7KIMZ37H2eH4qDSzGemdnJ7nvqCAghrI4+P0sZnryDvmxDAeLIqat9P3C\n",
+       "FpFSZvN+v59cs1grBwcHYayBR48eJekWWPvI2kmzk7ntTWfKqsGuJQC7nvj2lKCrWTbFJ4n6mLUd\n",
+       "6s3mNdnYz6tGqqKioqKioqLinHFp9AcefGpPSXo4CbPGh53mUgRhfJotcTyM+cOUnKBjrNgqpJMl\n",
+       "OA/W0KRyXTFlBKROFSKq2qekTrC58rMxzYuvq5JYlaZJUTr46/4paxVhAAAgAElEQVR97IyI/lBh\n",
+       "wmpcU+PAyDk5KsdyBdQfmpqNjY2gTVJzHA7jx8fHQRPF4cigTICUdXx8HMad533K4V6B/VxiZIBm\n",
+       "6XBwlauO1wr3mWdKZn9H1tChTVw+fKggbfv6et9C5TuSojfxUOOkHLx93+SIHVl6LwqvJt8N5WOo\n",
+       "6gIt1P7+fugvjNN0Ok2Sm/p68t+j0ajV3mfPnrV8X1gry3sXymP6GFWu3zvYoV3tbSmSS7P2XIk5\n",
+       "kZdqSEr8Jkt9W1dWVsLcVv2h6prTIHMwhGfj51yW2E8Gg0HQivJ69pQIMc2l92NV/Zjrj9icxrWu\n",
+       "/mu8n6B+KgCCNdNK251D1rTXNM0/NrN/28wezmazn//wt6+Y2X9oZtDN/4PZbPZ/fXjtN83s75nZ\n",
+       "xMz+/mw2+5finbPl5WUZocfqSl5A/uAzHo/lwaLETMGDft5Jkrl8tXGXRMyNRqMwiNwOqOyxoFSE\n",
+       "kwfK8czgvtyUaSiFXFRXSs2roCJ9YiZSLHBvUkC9zOZV9V3bqA5/sc3XH15ms1mrfr5eMcQS9qai\n",
+       "xXCAGI/HcqMrYYFfWloKbU59XEsjNXEvQyUj5XHzTuBmp/xaL168mOPBMZt3wp/NZuEwinKUA7li\n",
+       "p+/1TpP4LmLaWYQh3+xk3FBOKgqQU2ehLJW6o9/vBxMwm87QD9xXKagoUJS7vb0dIi+5z1S/4Rle\n",
+       "e11NyMoEpUyVzDuGcegqVCwCXtOLlqfWlD+ge+G03++HcUJ5h4eHrWTpw+FQCiqqX1EGC2h+rqyt\n",
+       "rYVnzqNflYDe7/dbplh23WCXDNzno/L4t4ODg1DnkoTWHudh2vufzexv+vea2X87m83+yof/4RD1\n",
+       "GTP722b2mQ+f+YdN01TzYUVFRUVFRcWPJbKmvdls9v82TfOquKROZn/LzH57Npsdm9kPmqb5CzP7\n",
+       "q2b2r/2NBwcHcyp9nCJzJ1zFM+FVinOVbJqW+WM2m4XTacrhLgelXVAM3ip3F0tq6ANc45MyaxIg\n",
+       "BbJkCAmD26ZO3KmcXSzB5dqpVNfMWmymTW0p+gV+lqE0USz5o+0cnKBU/968cHx8LOkUPJTpMdZP\n",
+       "at5Cakb5yuGRzX2o0+7ubmsMl5aWgsYC7WBzAGuwkLT23r174beS+c3jq8ASYsm6YU0y1uDa2low\n",
+       "H2Asm6YJ8xdjurm5GfqFmd4Rsg+W8q2trTkpG/3lGdAZo9Eo/I7xVbkHY1BaNox1zmEcwDyI8eYB\n",
+       "aq3k9ki0HVheXpaJlv27WRPKJjFv/mTzHKMk6CSmuUpB7eusifKaZh4fTkDs1zLnB1TO8GyiSmnH\n",
+       "+TfMA9B58NwFVMLw6XQaNIiqXGY+ByaTyVxSa/zGDPlmJrXbZqf9hfYylxrW0XA4DG3Cmtvd3W3l\n",
+       "0OSxVHk61dri76J/j5pfo9FIZvDA32pdKK0TnznA2YW28behy5ngLNqi/7Rpmv+vaZp/1DTN1oe/\n",
+       "3TWzt+met83spTOUUVFRUVFRUVHxscWizub/g5n9Vx/+/V+b2X9jZv9B5N6o2MGnRT5t+xMhh1ar\n",
+       "cNqcgyznl8L7UbayD+OEmyOKU6GfzNSu/JwAvHc8HgetE4d2e2brlZWVUD9IGE+ePJF0DyxNAMpx\n",
+       "W4UDK+R8vHz/K3JDDk1XfjIq/FRBadZ4HFIMxCxZl0gbsYABICdZQ4MIPwZ28AW4vawl9ZLUaDRq\n",
+       "5cva2dmZ8x/CNWii2I/ES7Mxeg6/BpSzPtNqqGdZC+l9EQeDgfRv8b4lMc0OJEesmffffz/8bXY6\n",
+       "P/g3D5UXbjgcSj8NvE8FqkCDcHh4mJ23HihL+aBweYqeIwX2kUJwQoxZH351GEtug9LkYb7HNBwK\n",
+       "eDfPF8U0z9pJs3lthpqn7H/onYgnk0nLx0u9I7a34/dFaEE4GArt8WPH/rPcXqVZQb/w+mcop3ZF\n",
+       "Ns3ZLnAPrqO9Ozs7YWxQh6Ojo7lgGbwX+xP73rKWFUj5XPJ+rDIq+N9iawy+wzzPsQYwx5guiR3L\n",
+       "oanFb0xbUuJXCix0kJrNZg/xd9M0/5OZ/Z8f/u87ZvYJuvXlD39rwU8wrrTvQJ5YypExFSHBm6FP\n",
+       "0uihzDilamgfoWU2n8oB71NQySDVPepj6J3wubzcYYGfVZFyXh2rnHQnk0lrwikTETsFcv3xvkU/\n",
+       "RHiPWZ5bJJVuJcaHVVqHFFRkG//mDxEIxOB68qED6uhHjx4F0wFMXpwKQzkjA8qJlQ8JbA7164UP\n",
+       "mABH1KggETZbAtgAnzx5kjRX5ZK4qrmTM89xklKU4Q8IXEaKE24wGHSev4CaQ6urq61DCwtAub7y\n",
+       "3FfsxM4HEM9zxQE8XIYf19zBQu2ZLGDiI8wcP6gL6scRqSrBOLsy4PDC95eOhxdE19bWwpxIRfmy\n",
+       "iZLv8eXGojJVXyozFAvc3sTKexavEb/vq/FiExb/hvmGA9XGxkY4kGM/4TmmonVL99TU94lN7Vir\n",
+       "4/FY7mk+5ZRZ26TKCa/VfOI9y6eQaZrGvvKVr0Traragaa9pmjv0v/+umX3rw7//uZn9naZphk3T\n",
+       "fMrMPm1m/0YWTARpFRUVFRUVFRUfF+CwvLS0lD1IZTVSTdP8tpl92cyuN01zz8z+SzP7t5qm+aKd\n",
+       "mO3+0sz+IzOz2Wz2ZtM0/8TM3jSzsZn9x7PIkTSltks5qEGKHY1G9uDBAzObD4X26tHpdJp0HlM8\n",
+       "MjHuoRhYWmSzYEloKPPDoL1sAlL5zQB2rlZO0zF+Ey9hxjQ5nPATbWL1ObeBcXR0JHO7eS1BzJk8\n",
+       "pdJX0m5KAmI1booviTVmShJV13hsUppLmKPYRK1M05DyXrx4IWkAMB5QR9+8edMePnw4V8bq6mro\n",
+       "59LQ5JhkFmvPdDptaSG571lDjP6H2ZqlR/zN46HajWvME8dmULWuIVmzRpqpDvz+w9ooNlf4fuB2\n",
+       "crLnRaHm7MbGRqgz5zcr4dBZX1+XWkwvjbNmSEElBS41cal9R3ELKfMhOwL7pN8q8TDvZzzf/dxf\n",
+       "WVlp7ZHswI16Md1DjnXcr5VY/le/3+aoYNR3j4NSgKWlpTBX0ZeDwaBFA3BwcDBnuvL14nHAHoR5\n",
+       "x47l7KrC31z86/u3q4bfA+1nM38uk4fZyRpAHZhT0VtToHUzOx131lylApFaZedumM1m/774+R8n\n",
+       "7v8tM/utbMkVFRUVFRUVFT/iuLRcez43F5MmKrZi5eCYcgZM2XD7/X5SWu+az4exCNlXimixKzwt\n",
+       "hNdsxca7pC9LQ7VVOUry7kLsmEKq7sp3jOt6lvmfegc717OG1TsyTqfT1nzf3NwM88eHHjOGw2GL\n",
+       "xoPntSJVTNWZNT7KRyrVVyqkm+cfsLq6Kh2QFWnqrVu3zMyC5hnlmGkS3vF4HJ5n7ZNfiysrKy3K\n",
+       "EbPTkHXURTlp8xiyRiKlRU9pxFWgyt27d8OYMW1BifPzF77whbCffP3rXzezeQ0iaynUeDKpJf5N\n",
+       "aRjw2+bmZtijU0EM0+l0zmkZdVFA/3LWBv8diOV4TPV56hoHyqj9LDUGm5ubc0EEHuzjhj5KEXjG\n",
+       "2ua15DmU7rPKKd1fN8uT13YlpzWbH2Mz7SutsLGxEb6bi/jNAqxhV75vsFjMIoScl3aQ8oPrk42a\n",
+       "zUdhvPTSS3jWzOaTufL9fmA3NzfDgKooIcXMy6pYpZYFUoM1GAxaPFccocNRG/4DNJvNWtxMR0dH\n",
+       "weyB33Z2dlpl+371iy7GzO3bwiYTFfGn1N4MFfHgN7BYXVJI8ZaoD3PMvNkVXcefnfW5f/x7VldX\n",
+       "w7zkaDb/br6PgWSk+OizozpHpHkzVOkBttfrhXYowSfXlypCj1PcmMXV9Pido1lTDqqz2SzMO7RT\n",
+       "mY82NzdlVCCeRRm5TRh9wOZ5ZQ4oZbvGAWNrays4+HaJkDMz+/KXv2z37983M7PvfOc74XcIeKhL\n",
+       "ju8qV2e1VwKpOcb7hVq3KgosB2UORn1w7ejoSCYPVsKdilj172VH7dy4+qCo0WjUWsvcLxxl7E13\n",
+       "/B6OiuX6qCTYuI7v6O7ubjiAqH7L9T3WCgvqXojhOcFpgUoOORzdy4d/vCeVTk0FsSnTo+LmYlcQ\n",
+       "djHJHaQq63hFRUVFRUVFxYK4NI2UWdxRlO4zM23u62IWglMZTsql7MOljs9mi5uIclqZlITQ6/WC\n",
+       "8z2kCq/N8BqpGH9QadmlKlOYSUodBXOSUFcNE0u7vtxYWG6JtqaUIZmd17nPSs24eAYS9e7ubitR\n",
+       "L2v77tw5CaR97733gvZBOeGyJFnaB6l+UZxAikMnprVFe1LzCnP86OgojKUyu7E5VbHss8TsTS+s\n",
+       "tVOJblNrYWVlpaVh4Pty8wrlYXx7vV4w5ZbucdBM/tIv/ZJ985vfNLNTsyDXRWkIzwJlZupqVslx\n",
+       "mqlrKU1nLr8ia4pVMInneGLzJjAYDMK4Yr1xrj0EUsTapfaQlAM10x9gTqyvr7cY+n05qD/aqfbg\n",
+       "VGAL7xPgrFNs7THtng/EWESrnYN/j3Iz4L2Bk3mrPsda4uCvx48fV41URUVFRUVFRcVF4FI1Uu43\n",
+       "M9POnDknYfav8mRaCsxhVSp553CWZyERoO7Ly8stgk8O6YQE9OLFi6DxAUnj8fFxqMv169ftjTfe\n",
+       "MLN5ibGELFP5Q3Fd2Vk35bzJ/VIiqTIhWte+ZDoIljRKnR9L7lOSrcrrx/56TBTox1A52l69ejX4\n",
+       "yHCILofCA379fv7zn7c/+ZM/MbP0nOR1pvxhuM5+rSjpUznGjkajlr8RS4upPmDHd4WXX37ZzE40\n",
+       "Uxzi7EkGp9NpkWZke3s7aBHYMVtpNwDM083NTRkMUILhcNjyLTk8PCymrgBeffVVMzvZJ7797W8v\n",
+       "VJdFwH4/GOvUN0VpY9j/T83FRQJ41HfCa15iztzKvzO1N3BQhNpH1TosddJm7Rie8VpS/nttbW2O\n",
+       "rBTv8O1UfoKbm5tyviu/P/YlQlmp/Rr72HQ6bZXBvmCchcS/j7/bvI9Cw8T5/9BvvB/4/WRjYyO0\n",
+       "AxaC9957T37HPrbO5p61FAMdWyyeJZwZaAEeaEze4XDYou2P8Rd5M0TMpIiFllKPLy0ttdKeDIfD\n",
+       "QF2P9z59+vRcItdiSEV4LMKNdBYn7tJEt6UH6NLNKFU/5eDPrOIlJgd1sFCHU1WX4XAYPhS8sd2+\n",
+       "fdvMLDgOMzgpKD4OzAmDuc9Orn4j5TqjTtvb2+EAB8Scg1MmInbwxAaKtvH6hilreXk5vIcd7/2h\n",
+       "mBOy4t/RaBTqv7+/L51gAY5EQ5+zEz7GHdfG47FMEcN9w3VZBNeuXZtL+bMo4ET8+PHj5OHvvMFz\n",
+       "SCVzTu25uP/u3buhDzBPlpeXW4fTWMBFCqVjlEsHwumAzBaL6AZ47S0SIY5nNzY2QvsgZMWEQJ8y\n",
+       "6fDwMPB0oQ5+7XsgGfrbb7/d2u9WVlZaAR6xyEbvMM7pgHh/xB6Da10O0ueN6mxeUVFRUVFRUXFB\n",
+       "+NiY9jiM00uTOf4iem8r9HJ9fT2cfHFSZu0T0FWVzvU7i3QSA7QJkIS89q0UXvLJaXRYgkupmlNt\n",
+       "jnE3qfuUJKISti6qCVPcSDENnG8vO0vmnCT9b4pHKgfloM8JaJVjty+fuaWYqwZtS0n0w+GwJVVy\n",
+       "2LBy5kyZPLjvsb6vXLkinVVZg8zlM3Jm1dls1jI98ztZ+5TSUKhxOA+ocm/cuBF+Y41AqZYX9yHY\n",
+       "QNHCxJ7z7+b9k7WPPpBiOByGcpmJ3L9PBRMpTejNmzdDeW+//baZnYwBJ6b25WIsc/2D9rC5XHEl\n",
+       "pSheuM6suSoJHGmaZo4brQs8ozrqkMqROhqNwnxHn7548SL0F5vBfMDG5uZm2GOYCR/rgC07ninf\n",
+       "t9ksPjalLh4l7gMxYA2z2b8rmHcS/FJVI1VRUVFRUVFRcc64VI3UyspKOJUq2zROtrdu3Qon6Xfe\n",
+       "ecfwLK4z4WZXfwUOiVS5jLzfVE7K4meVNiNni4/h1q1bQZLnOrHmzaytufIaKeX35duC57x/gfIZ\n",
+       "8kzqZt0kLyWdKIdN9ZyqC4BrzFQMKMlGjWu/3285eKo68/vYvwth++zrUUK0ef369SAtqnLRz4oV\n",
+       "neugNI05skFIs16L59/nMRwOW/5LXFc8yw7hHJLt/R/W1tbmSPxS4Dp7ot2PGkpTxuHxvl5XrlwJ\n",
+       "z4CuIEcLw8C74VP3wx/+UO5FXgujtGOj0aiVVy+nTU35YHaBn7OsbUnRdHBof9dvWYwGJQWVa4+D\n",
+       "Y3yeO567uL9p2nkTVQBUjBJBEXL6d5mdfmtWVlbCHsT1OQ8fP0BpGrlNi2iEYuWYdR/r5eVlSQcD\n",
+       "DRzOF+PxuJXPczqdhqCbj52zOf7GxgeOir29vXAYKE35kTJB8ULLoSTVyHQ6lapfQJWF+rEjNVBq\n",
+       "tuRy8O/q6mpQK6PuGxsbYdEsLS21IiBj7/UO3moz56Sc3Fcc4cXv8GUoB8uUGr10oeeCCFAvddBL\n",
+       "HeRKOa1UGUdHR+E+Ns8xSzfaBkZrVrWj7Zy41Sff5cgZddjg9pZyZPnoGfxulv+ooh3YlDi1Sw7o\n",
+       "K5jm9vb2Wu0cDofh0MnzhTfrs37QY+CIWi/85TijwEuzs7PT6v+1tbUwJ2DCWsRV4LXXXjOzk4MU\n",
+       "kNrHeO7gw7JIpgEOmvCHL3VQUYl9Y1DZLlKCAz6KzICdWsvXr18PeyVHlaYOsakIQrW/q+9P7ABX\n",
+       "ut+d1+G1K/jwiv5axL3FR/yp5OA5QHDgdG8Yk+Pj41ZyeGXGVQfaGKqzeUVFRUVFRUXFBeFSNVKx\n",
+       "ZMQ45Sp24q4n142NjXAvTDwq1xFDmQdi5iWzPOt56h1mbY1av98PUr3XAHE7FAOzr4PXminNELeB\n",
+       "1bJeMlL5oJRWic1uXG/FC8ImLrN5TYSSJhSURlLx0bATsTeTcbJKlKfmp2LN5X7mkH0/B5TmivuE\n",
+       "zbNKkvdSlll6DqZ4wI6Pj6X2QUncqTWX0sA8e/ZszgyJ+zA2nEPLawtj5q2Uc3uMciIFNT/Z4Rr1\n",
+       "Rl1Yy8tQfcTcOWbagX44HAYOOGhHSukLeC5CI/Xw4cMwZ0sdsXMaEJ8rjpn8OTAIaxh9pRyDlQko\n",
+       "Z7rHnOz1enOa3BiuXLkSHKS5LKX99vvA/v5+a+/g/JVYo8+ePZN7SAm4bYpuRtG/MFhj31V7ifK2\n",
+       "trbCPOF6Yww5wMQ7lue0lphXzH3ILide08x7Jc8J1naala8LBsZmNBoFUyf44rj/ct+aqpGqqKio\n",
+       "qKioqLggXJpGCtKUckYFcGLt9Xot/5vV1dVwaobkt7e3J6V2Ub5kwFbAaZylaK/Nun37djjdsw+H\n",
+       "so37DPPD4TBI/5wDyte/S25B4Nq1a8FBXUnqKf8g1rJw2H1XwktAOSPH2gRfG0gxMZ8C5fOEuvD4\n",
+       "KqJN71StnOs3NjZazvvr6+sy3Nn3C2tHcn4E/nrMyT2FUl8vtFtlOWctFBMQeuk553eI8RuNRpLq\n",
+       "wEPRJKgciTl00Uix3yLK6wqsZdZSMVs8NKAPHz5M1uPGjRtmdsKqXAL4zR0fH4d9Bz4j+/v7Yc6q\n",
+       "+cb0EH4NxAhoU+ubtQa+DweDQStk/7y+N0xzo96tNNKl7/XBM4qRnOcka11RLq8lr1nhOqk5ntMU\n",
+       "4n2rq6utdTMej6XmH+Vw2XgGWqimacL7umhFU3UFoMnb399v3QuNI+pqdrK2oFXMMabjGezLi8wx\n",
+       "vKPX64U2Ya1MJhN7/vz5x9PZfDAY2Gg0KmbzVZFS+Du10a6vrxexv167di1EzeTQdYNX4M2JD4xm\n",
+       "8cUPp2VM/BcvXsjkrQzvuB37mKc2S440xCaOSV6alFOZEWOm3VKoReyZ8sfjsWThTh0I+f24zoe7\n",
+       "1AGFWbgVd5cviz/+fPhTmy6nQPB1VylYUmDWccyhra2tFpu0StXS7/fnTCExrK6uhvvQ97EDiz88\n",
+       "x+rs1xxHPZYepGIfFgWMAzCZTFqJkTkaD2PNaZ64j3yaktXVVXv1w/Qub775ZrbuZmY/9VM/ZWYn\n",
+       "44aDKqcSSiWyLU1arExOAJt7OMIsZR5JRfTyh3QR9mplPgYwzhyRWHpIUOscY/WDH/ygcz1LEeOn\n",
+       "8nxUZ3U2L0m9w2uFDxveFMfmT/4mLRLM4++Dq8Du7q4cYwDzYGlpKbSNv/2Y+/j32bNnxfyM1bRX\n",
+       "UVFRUVFRUXFBuHT6A5Ww0yNmTqH3mdnJqXFRlS6/B/9yaGWsvNg1Nj3iVF6aJ2ptba1Yeso5vHuK\n",
+       "iJy2gqUUz6SttB3qN8WrxJo3Dk1WJlbFz+L7vJTPSznXc55GaAiOjo5k36j5xM7U3A/87+HhYfib\n",
+       "zbleo8JaFqUlYw1Caaixl/5zpkJ2wleaoVS5LOWlzHjcNvyNf6fT6Zxjr9nJ/FPrTGlU2ESlpHSv\n",
+       "8WuaJmnK42ATn2ONoXjiuMzU3opn79y5Y6+88oqZmX3ta1+L3s+ARur+/futRNYxZ/gUuJ9VoA9z\n",
+       "7OBfzjqBd/isEpPJJLlH8tpX2uVFM0ewRhxrdW9vrzXmisNtNBq1XBBU4A23g53wvZlxNBq11reZ\n",
+       "SZOc2rsAvnZR9AdLS0tzloNYvXIoHbeuPFaDwUA69p+FB8vv28fHx2HOoB37+/u2t7dXNVIVFRUV\n",
+       "FRUVFReBS9dIeWxvb4cTJvxwWGrHKZx9F1Jhp+zPwZK6l9Cn02nRyXY4HLYc30upDpaXl+ecUVF3\n",
+       "JdmqUz2egQ19NpsFrUguVLZUelFSgpKMUlrAGOmeChf2/gCxfEolzNwxpOg0GF4jpHy9mLGeCTL9\n",
+       "O2P+Ol6KjUmd/r4cqzy3wT87Ho+TjPq4NplMgvYBmg72CfL0AGbz4eBcL9zng0h8/jB+r68TO0Gb\n",
+       "zY85NFeHh4dzzNa+z72Ejfv8+lxZWWkRnjKp6nmwPzNAQHz9+vWwVlJ+N5zfDHQJ77///pnyfHoW\n",
+       "bs6rx4BWDv2jaGG435WGK0X0G4PXXE2nUxn4oHLZ4Tr8Sp88edIaw+3t7RAKn6tHyneU/SIV1NpT\n",
+       "WrmcJse3c3V1dU5rgvcqH8rU+1KBQzxeyrrABMToa9wXC7Lw9AelmQtms5mcO13pcjiADGPG5wvV\n",
+       "/zkfqUs7SDVNY2tra6ETeCNWETXKWZGdFM3mVck5x201iTDhcW1zczNMDjjhIkUN3zcajcKHB87Y\n",
+       "w+EwmDoW2ey8o513Ak4BG+2LFy9kqg9fhopYM9MOoiWO8U3ThLHhRJeeF+bg4CC5yaTMeHxYK40m\n",
+       "w4Jk7ivUnz/c+MjFTFXMVA74TXA2m81Fk3ZBjGdGpexRUUK+vRyNAzMez08Gxhwfz9LEvbmPEo+p\n",
+       "d7iOgQ9sZvFE2kCXqD023wPqg3FRwLwfDAbFATcpsOle8RCpj7RK0u2f5T6NfWDwLs+vFuu/EtNP\n",
+       "LILwPDEcDlvfC7UPMZ9YzgWhBCr1GH/UmbdJHRK5zlhLHHmrgiHUs6nE4115ExU++clPhu8X9oYH\n",
+       "Dx6EPffWrVtmdmLeRtnYE/b398OZYJGAJH/Qm0wmRe4+S0tLYd/kA9ru7m417VVUVFRUVFRUXAQu\n",
+       "TSN1GeVWVFRUVFRUVHRF1UhVVFRUVFRUVFwA+vlbLgbnEbpZQoz5UdjafXlmcaJK77yqHBmZXbdU\n",
+       "cxfri5SzeVc27PF4LNvlc3HFHNU5lxzgSeEUXUFu/Dgk3jN352gDFL0BO7YrB3XlO+ZDvzmg4eMA\n",
+       "lWfsPLXCZ11nJWOdq/MiufY+CpyHz9VHvY8BOYLXXDCLojwpoYU4q3N/1z6PBX0A8GnDPlFKJsl7\n",
+       "iHICX2Ru5Bz8gUXpI3I4771DgQPMVDtK/WJT45pbU1xurr2XdpA6D6gDlF+IpZw7w+Gws1Obmryp\n",
+       "D1Upu2tuE1Es4YuwrHN9/EbBUM6mfADxiR/N2gck5gABFD+Y6ktVF7zTbN6ZGzxIPJY+ujIW6QOw\n",
+       "E6aK+vIfjaZpiiNQzgO5Td9DRUJ2+RCUbPZn2VhjkTKKUfs88FF8CLiM0nRUKcT4mFQUbaw+qeux\n",
+       "+2N7UemBNbWXKfb0HF9TaYoqjiYrAQtvar9T7P5KcPTva5om2yaUm0rFxNF4qfrH6uDbxFGgXecl\n",
+       "l3WWg28qGlxxBzJSSdq537h/U99jfof/npXM9Wraq6ioqKioqKhYEB9rjVTqRMoJG5kHpeT0b9Y2\n",
+       "z0wmE/trf+2vmVk5w3CK44cleaWCXUTNmwpDVuDQboaSInyuMIbK7cSSng/BH41GrXD8Xq8X6AIQ\n",
+       "sruzsyM5nrwqN9a/Xtrc2toKYbYoA/Xh9rKJUrWX6TeUhkn1f4qn6bygpNgStXZMkxh7b+rvGFIc\n",
+       "WLn6ndd8L0UXbZTi/eraHzFtUkldSjWDF9H/KZQ8o7Ryk8kka07Hfb6sUvPmZz/7WXvjjTeidfG/\n",
+       "cxmlc405rVIYDoetPUGZS/ldShOSM2EBOV46XlO+/sxzyNe8dof7Eu1QWu/ceOXmMb8n1h61zpqm\n",
+       "aSXkZq1nqh9L7/O4dELOj4Kz5TyhBimGErUn868wuqrgYxu9V5k2TdOZ9ySVcPT27dt2//79ud9U\n",
+       "cllOiOtTz5jNmwI9Nwr3s7KXM8Ebxob9sOBDhcOd2pRY/Z1LaOuTpPIzqOtF+OuUrJUuZqtF/Spy\n",
+       "HyUgtvGpcs9jHzjvPudM8CxMlH7QzsOkV3qQWqRPz+N9OR8pBc8jxs+W7k2p+qnDS+6jXpqIHvVj\n",
+       "k91Z0tsoN43cs137XB3ccvOz67zb2NgI+yITaXuuOLV+lOlRQX1XYu9L+UPl5rbax/D7rEbtVVRU\n",
+       "VFRUVFScLy5dI1WCWOQIHIs5mWZp+gGFL33pS2Zm9qlPfcrMzP7pP/2nyftLIwdwUlen+6ZpJ1CN\n",
+       "tXcRh1F/Co9JZrgOExWb5lKSHif+VFId+mg0Gs2xnJvFpQ+UB8l1b29PslwDYJN/+vRpK+nu2tpa\n",
+       "aAtLkCr5cmmkpC9DSUpn0Y6ctzM0txd1Wl5ezjKLnwdKpFl1nfugVLov7fNY/6bSMqWcltWaGgwG\n",
+       "YU6UjmVpFG1X013s3vPSAvqyUmWotafWT66e3FcpB3RVVwdhQ/IAACAASURBVH6HH7dYn6WsC7w3\n",
+       "eXbymIk3Na9i7VEmtq5Q5apE4Apsfk2txRs3bpjZSQojNXal2r9SDbwPlCo1z5bu+fiOVo1URUVF\n",
+       "RUVFRcUF4NI1Ul25LnKcRimocFu0PycVAVtbWy0NR85BOucrBY0KqABy9eAcQLnxK5VelBYIUFql\n",
+       "1Dg0zSkdQI6GwksnPA4sPSFnE+dkBFhD5MeVc+hxmb6P+/1+K+xVSSycFDil/fyofaTUNSV9pvzd\n",
+       "cmXk7vcJqBepc9f9wEvqZ9FILerrwrkg+f7Us2dpOz+LZ1Kh7DkuqK5YRDuiND08F33fcxns31na\n",
+       "l0rz78s3O10jKFeN/Ww2a2nEVT/nKAy4bqlcpLxu1T7bVSPVxS/J79tcnn8+dg14/fXX7c///M9b\n",
+       "9/k+579TPrDT6TT41+byU5ZqclMciIycj9SlH6TOAs/x08XjnpMZ4lm8786dO2Z2MpHff//9TnXi\n",
+       "SeIH5datWyGqLBUlZ5YmIVMTn5Pw8iLuwoVhNv8xwYcRTuFcV2/eYsQyvJeQQg6Hw3Afmxd9VBw/\n",
+       "e/PmTTM7yTauPob+N054zO8via5ZWVlpmcTO27R3EUiRiCoC1dQHPmbK8mPexTH7PByoL+IgVQJu\n",
+       "OwdNqKTAKQdlVb9Sc98nPvEJMzO7d++ebNeiB6mcM+8iZqac2aukzihvaWlJHmr8fqGcjblcHqsS\n",
+       "c66ZJfdHRsn6Ufcxj91ZDlLq3fx8jnOr1MTmeQJjnIQlJtbScvk+Pg8o53o2L+NfX0c175DguZr2\n",
+       "KioqKioqKiouAB9rHimAT5MsOahTbKmGDRIkpA+z0xM0wjdjJ3SYvyAJjUajoGpMSTHPnz+Xp3+l\n",
+       "WfMqYlZrMz+UksYW4d1RPEhe6mQojVPKbMRO6SkTq5kVaYsGg4Ftbm6a2Ykmymw+JJnv8/VZXV2d\n",
+       "S0mDugDKGVL99lFwRyl0DWvngAaus5L4ffh5jvMGGI/HrXIX0SR1DbdfRKMeeyb17lz9gVh6JLP5\n",
+       "fitJu2J2Ol7Hx8dJ5/Xt7W0zO9FIee3DeZr1YuXnwP3n509OU5cKxR+Pxy0NF5uZmUdI1V+tf3wT\n",
+       "eN9QbU5polR7lVaI90z/nrPQZnikvpX4FilzqjJNmp3uh6j//v5+69s3Go3sc5/7nJmZ/fEf//Fc\n",
+       "mWZ5U7oqN/WdSq09NoOzGddrsyaTSWibytgRQ9VIVVRUVFRUVFQsiI+lj5SXOhRrqrovdq1UeoUj\n",
+       "m3cmz2E4HIaTMqSUXB6fnGPpIk63Zu02ltjTm6ZNMhrr8xTFAcqISRgpTQ8wHA6D5ko5L2OMptNp\n",
+       "eE+KLFH5Q7F2TJGDlmp8vMMot+cykxb7+ndJzlmKkmdKSfAYZ9F2sIbzLFjE4T6laV6E3NTXJUdX\n",
+       "8TM/8zNmZvad73zn3BL/lqDUXydXp/OgYuD+9rk+Y3VSNChdqWe6+vLMZrOWU3fsO6X6bREfqRQW\n",
+       "CXJgB3BfL6BL/6YoEUr7N9WO3F6k4LVyKR+pj6VpzzcwZi4rndSl5gCftHh7ezuY8RAtdnh4OBdh\n",
+       "hvqVJMccDodzKWkANQEUfb9XxaacNbugadqpUNQHod/vy3amIoZefvllMzN76623wmGTs3rjsATW\n",
+       "8/F4PJdCxpeBjfHq1ashEEBF9CmHdYwXRyTymPvFrBzLl5aWQjsU7wo4WS4Tfi7wIZZV2Jyc2UyP\n",
+       "uTKXLsISnDKJddlwPUod2rug6/vYbABwFKhC6kOvnLlLOXf83yVY5BCzaBmqvJhJqRQ+cpGji9V9\n",
+       "gJpfuWS5XaEc2zlijqHKU1GMpeBUV135FVW9uf6q7zxj/ZUrV0JaMCDWv6lAgNR3nq/zO3wZ6sC3\n",
+       "tLQkry80B4vvrKioqKioqKiomMPH0rQH4IR7cHCQzMGU48YpKYMlGJh7xuPxuToSdwnzLkGJ1H4W\n",
+       "NbCX9FSiy1woOTROrC1Cn7MDLT/nNSXKiXA6ndq1a9fMzOzRo0et8pmtF/ODnXX9nMlpWWD6nM1m\n",
+       "SWoHtPvo6OhSzEzqWf4bbWSTJ/ePCurwdYjl7rrovSQXOs0asK44C0VAjCE5RffBIfYpDQ2gAi4Y\n",
+       "523aK31H1/2ladq5Ps+yL6py+/1+a40qV4XRaBTGAfs/OzmnTGylCZRzdU6NeayM8zDtnWW9dskC\n",
+       "wXu92bxmlZnSlVUjZeosLZ/f5fd8tV+kAlEq/UFFRUVFRUVFxQXgY62ROgtyEpXSRJXCP8vSjjpF\n",
+       "l+YWyqHEgdJjUemllKU35t/CmhkPhGqDnNRMh7Uq3zHOq6eI81KEnMoOXkoEp7SeyvlSOY9+lMj5\n",
+       "m/Bvag0oUkBuUww5Cb0rZUPsntT1j1ojxaHT5+HYzXPIz7eYFhD3QYv+5MmThZxpU3VJYRGNVIn2\n",
+       "SWltcvdhD5lMJi2mefbrSbW71+vJoJlUO3LvO0/NFf+d63NcZ58h1uR4tnZFOVDqy7W8vNwipR2N\n",
+       "RsEHlff1Rb5jqWdLHfcBlRlE3e+JrVMaqR+Jg9RZJiOeNzudRKwmRwcOBgN5GCrlCkptPLmoCH/Q\n",
+       "6tLe3EQ/zwgPZvBOHSJi0YnY7NHPx8fHyUMpoMwaMVOHP1x98YtftG9+85vZtuU+0mwqLFmkbBI7\n",
+       "L3Q186X4utiRnscvVYY6uOYOa+fRjhy6Rkp2MWssGq3FayU1DrEDnP9gxD7c169fNzOzDz74oFUu\n",
+       "o5QhveQa4zz3F19+yUEl1t7SdD+l7UylEjnLfM6lEVL9m3KqLkVsDZRknzjr4dCbt1UWENV2Ntme\n",
+       "xx7SZR+opr2KioqKioqKigvCx5L+wCN2WlfmmVRoJcAOnvwOlRNJaT2UqU6dqFlSBpRU798XO+2r\n",
+       "Ol+kRtH3ZYxfxdc3lrAV7YS6V4UpK0nv8PCwRXGg+shzUpmVJ3GN9SNMJ0x1wFonX/Z5shF7lIx1\n",
+       "LKeYx/7+vjRhqLYBKeqLmDYrxYOTQ0rbxf/ftc9zqn+g1Mzjn/H1jJloAG/GW1tba1GxxOqpkm8r\n",
+       "U2yJBoEl9PMMijkrUg7ZMY0Tfs9ZFFgz6N/Ha1rNsdKEtx68RlNrledpLKfgomMRe87PE3Uf7++K\n",
+       "mZ2Z4Tm4xWzeJJeiDFJWlePjY3vppZfMzOydd94xs5O+xHVPb2FWnkFAaY3VfTFUjVRFRUVFRUVF\n",
+       "xYL4kfCRYqiTI07A7JeSklJjttGUozI0IkwwlrLDo46qbDObO72nHNW7SoHcNnaC7eIM6stL2fG5\n",
+       "/iktVKlmaGtrK5BzQtphzSBr5bw0NxgMWqSb169fD/4j3LZUm1h6KmHwVbhIZ/OzzAkg50Sey4eY\n",
+       "kvTPS3PR9T1d+rzUuT0Vqp9arzntDmNjY8PMzF68eGFm2gcl5pfCvntm3Zzmff1zFCBqzZyHj5Sa\n",
+       "nzlnc3VN1S/n13cW6pxFc212IbRVWKTPF9kzUvej3zDX1NxcW1ubo5XwQP8dHR0VfY85b6qq33lY\n",
+       "BWJng5yP1I+EaW9paUkekNDgrhOZHaQ5igEDxqpJDI5naDXTasPYhDI7+TD7gw0v8NgAeuSc15XJ\n",
+       "k6E2j5TDOPeVv+/4+Lil5uXxQGJh7j9VPvqI+xQHmhcvXtjVq1fn3sMHVvTRaDRqRYLEnELVAQm/\n",
+       "KQff3Af3oxRIcmYXv6EoU9Z4PG7dp9ob+zD7g+1sNpMfra7g912kKUkdLP0GzHtCyTvMbC5VTMmH\n",
+       "rtfrtczbOVcGLhdRrDhIcX1TXFWx+qdwloAfhq9LbN/zZlJ2ZWCOO/zG9UMQC+6L7bOl+6vaH/0a\n",
+       "4Pel9ujYQVT1R860V2L+ns3a0dZ8D+/fal0rlBw8vXkaZZR+t7mvfZncz7yn8f0MJRAq9yD1bVDv\n",
+       "86imvYqKioqKioqKBfGx00jFnGWVdMds02YnWg2YhXJaGyVpejX5tWvX5KnaM36Px+NwimWmXM/W\n",
+       "urKyEvLHlZrOFFL3lUiZqCtO+MpJm9lrGZ6zYzQahfewhAFzRU4TBckRGqd79+6FayyB4T3MI+VN\n",
+       "u3t7e3NqYP+Orv2sJDmF0WgUJN5FpPZSM1OJAzBLrixFp7SdqWTSrJXl62osu2qkclqSlPlDMa93\n",
+       "KS/VbymwxKq0t0pjDigncs4WkKKhiM3FVG7Hrg7+MXPaebJ6x+pSkm+UobRFwGAwkPyAfn8302ag\n",
+       "lEuGgsq6kVurKnBIwWtbYtdzv/k+4rri78lk0jmBsao/+nlzc9Pu378/91zKChID3s1uJKlnc33J\n",
+       "/88aP7zPW41KTOVVI1VRUVFRUVFRsSAuTSPFjmNm82HD6vTsT4X9fr8l+UIbZaYdKLls9h8xOzmR\n",
+       "Qsq9ffu2mVnrNI334aSKPHJN0wRNk5KE4OvD+eZYOimV7rwWgPuwVBswGAxafkSKRmFpaUnawb0f\n",
+       "VL/fb71vOBwGx1kG3nfz5k0zM3v48GG4hr5hJ2jWcN26dcvMtFZC2dp//ud/3szM/viP/zj8xlKl\n",
+       "l/AUESj3Lzugq4CBRVh6gZwmKncPQ0maMdy4ccPMTuc5+wmh3aurq3MSPKDmRipvYk77lGqn+i2l\n",
+       "+YlBzZ1U3sScDyL/f4kkv7m5GfpDzRdoZ7GX5NDr9eytt95q/b6o022sH73GJUceqZBzqvbvVPnc\n",
+       "co7+gKrf1atXg1Y7pwnBunj//ffDb8qfMAXsx4eHh1Fqg1j9Y+9b1KE/FnTk84PGmM1T2jWlLcJ+\n",
+       "sbe3Z1/60pfMzOwb3/hGKBPPpMirzdoa86OjoyQrei5ARt2n5p0PwlEWG49LO0j5DUwlLfQHKrO0\n",
+       "KlGxTisVtWJA7vV69vz5czM7HSTlgMoHDD64wZTF5jLUkQ9QmAi8KaU+eDzonC7C7KSv/AFqaWlp\n",
+       "jtreQ0U+TKfT5EbBY+X7gzd9xaHDgEM5DlD9fj+Y6t57771wHw6o/G6MCZtGfDnM1o1IPZTD7eDF\n",
+       "rA6g+KDxbzxG7FBsZvKgUYrcByj3ESn9eClTHCJRcZBiMxn+5bapwyTX0ydBVs6hvl7qeleoj0NO\n",
+       "OPHlxfiIupqcUnj69OlcInYPzPfBYGB37twxMwsHJcW5NpvNpOCU4gIDYtkH1Nj4fUftV2wm4d+A\n",
+       "XBCB/43NTKUO2cBkMmmZ8R4/fjwXROSBtFVPnz4N3wHeG1JBLGruxvooBiV08DMqeIH3O79u+dlY\n",
+       "n3suKwUVuaqeOTo6ClF4SCb/+PHjcIBS4H0iFT3JwmxKYEWdvJIG7WCliWqDmR43zIMUqmmvoqKi\n",
+       "oqKiomJBXCqPVI5ziVHK2aFCHNWJugSs0mN13+c//3kzO5V23nvvvaApSZ2sR6NReE+KKVepHBcF\n",
+       "+gHaBDPdhz7nnTrVs3pcqVjR96VJXNFXZvPmSm9CePXVV+3Ro0dz9x0cHARqBeU8ivYcHx+3+pol\n",
+       "OQbqj7mmzHh8X06T5KXEru/wdSu912sXYxQGygTktQ6bm5tBQmdH/xRKnUi7osTM4IMM1DOxgBYv\n",
+       "sSqnb+5L1uR0bTOvN6892dzcDDn0vv/975vZiTYXay1XRomZX9GbqPbmHMu5X0r6VJnxzmoWTAFa\n",
+       "8N3d3WLzK/aY1B6htDwciu+1x76uin4nZZLlUP0UN2BpIEBuLak6+3K4/gzu89Sz2Gdv3boV2o75\n",
+       "zmeDVC7F5eVlWU5JcABr6oEUA/6HfSZfXDVSFRUVFRUVFRUL4tKZzVMZuflU6U++7L/U1bmSn2Wp\n",
+       "Aidkzgl3Funah1aWsnubpR3LFS2EpzQA2CFaXcc7Svwq2PHYa7DMtNZQSY7sAwWtErQebAfH3Njc\n",
+       "3JRaEPSRcl5kf61FmZ7NtJOpXzPKdq98RkrLVWWUaqRimgZ1X1eH165l5JDSKvAa7eL3pMj0Uizh\n",
+       "XZn3uV7cdu/PE3NyVc6t0O5CslbldtGsd2XrVu3g8S0JGIjRpeTK84hpcGL38b4NxLQKJet7NBqF\n",
+       "50vnHb8j1X+p9yl/3Jh2CfVncku0TdUh5hPalQ6mVDOY8jHkoK5UVoSYVSNF/szvKN3T1FyMUaOk\n",
+       "NFKXdpDqatoAVPqOmKOoKDdaZq7zMQGbpgmDh9/YFMcOb6noBK67N5moRcX167IAlIrZQ5XHSEVK\n",
+       "5OrFmzqbLs3i0W54Bv3C0TMAmxl5c1BcMSnwOKRSEqj2ArxRpZitS6OO1PVcWglfHzOdKgMO5qpP\n",
+       "zfTGAudRmFdjkXxdscge4NvmHZC9ENY07aTayvF0Mpm0Pqpq/HOmk64fp+FwOMfjgzqVfHxj8O3I\n",
+       "1Tl18Io5pQMl+0sM6qO5SHtzH3MA+xgcyznAxb/Lv0+V5ctV0eBmZcKGOiizOW+RPuf6+UCg3GE8\n",
+       "F3ChoNrJaWC47iU4DxcB1Qc8n0uFyWraq6ioqKioqKi4IFy6aY9+C3/nJIEYtre37cmTJ2amHdnV\n",
+       "O1QeOUgV6+vrwfT07rvvhmcWkZrwnC+DzREsiXgJIqYxUqzD3G4vvbA0riTRnCSSUoXzO/AMm/tS\n",
+       "Kme+htBvSIysMeOce3gGpsLnz58ngxJSEhPzvfD8UH2UmkcclLCIlN4Vqi7e2Xh7ezvQczDvEH6D\n",
+       "SSlmBi2Z72oexMxCOc1w6lrO2bxrn3N5JWZcVcdSU0JME5ZCV7cFtY/GtB2pfQDIJUFWjtaldSy9\n",
+       "P+X+wfsnuzlwhgmzeWoPVS67KuA+1sTnqAvM5rXavK/lLBKlUGPYlYkc7ciZqHNaGw4ewL8pTf2r\n",
+       "r75qZidabXAMYu9VvIODwSDUORU0kUtkXNIX3J5YwAV+rxqpioqKioqKiopzxqVppDyTtDpZ5/yd\n",
+       "lERV4ry+CDMvsLKyEp5X2b/Z7l8iReb8k1JgaTEW8uz9l3J+BIyczxPgtUCKKC4mBfoQ4+Xl5VAe\n",
+       "2sKavFyuphIfKeXTNB6PpYM3oLQPKUn+LBqp2FxM+Qcp5MLgff/xeLDEmZJI2a8nxUSda1sKpfsA\n",
+       "+0F01RZ1QYnTcuw+gMemxPla+XjkwPenNFul/aueUf48OSfxknVWGsZv1s7/yX0FrSuTIuM3zs2J\n",
+       "31RGBq5zKqCGofbZlD+O0vz5b6T/fnGWDS5X+VCVrAHlE8hIje/S0lJrX5xMJsn5pvy1UhYALnsR\n",
+       "zXXOApO6P+cjdemmPTj+wSQXQ4kq8cN3z93H4AnNBx78pjYyqHzxvhiz6qLO8wyOQlNpGdi53Sy/\n",
+       "abOjLTv9lUQqmM2bCVFuaWSebxMfclXEBeq0trYWIvjU5oayBoNBMOlxYmQVTQiog0WO9yvFW5bq\n",
+       "x1Iz0yJmodRvakPm39A/4/FYpr8pmRvKRKXSS5ylHf46yldCEa8V3+e5jwOQE2hypskSTi6+j4NT\n",
+       "StdjV5NoblyVS0HKPKPMoBxVHOM/Yqj2qDbFTIrqw1d6EPVgjjQgJgSm2qHK43qqtuE39QFXB2BO\n",
+       "PM4HFvxdaiIsNSmm5iR/V3LfvZJ5HNtnSwXHkrndZT6pa9XZvKKioqKioqLignDpGimvEYhJRamw\n",
+       "TVzr9/st6bNUIkU5ZmnVOXMG5bQZ0IBAoxJzNk3RC/D7/Kk9p3bH+7k8Zn/OmcmUlK00UudBOaHq\n",
+       "lCpfqY1zzPHKQTXmeG42r1pPhckzUtqRUpy3+UutAdbA5JyNvckuxsPG7wZKuKC4vYtodlk75fnS\n",
+       "eN3ktNUljNs55+uudS412am68PxcxNyorqcCQpRTtcrNWRpYwGWeRyYHrntKE+JzZTJ4bivKm1z5\n",
+       "uTH0dUo9G9sHlOkuFyTk9yLWKjHQZk/JwWXErEGA2k+UNaCrw/1ZtPdqHs9ms7l5DnjNH/qqaqQq\n",
+       "KioqKioqKi4Al6aRAqMvQq+VMzT+VszRMUdr+M3gBK6cB/l6zgehq00Z7SjVguXoA7pK6N5B0Usv\n",
+       "Ob+fnHRcQlpo1ibxVDn0UB8z7bhf4nDNUM7rMZ8c74OWY8AGmIwypaVS/jylErqva5dnS1m7S8vg\n",
+       "/svRYSifhlI27hLkJP7ZbNbSSOXILbvmmYuxjiuNmp/bsVDtVJ/7cvi+Uu1YjGZE+UgBqg9SGqle\n",
+       "r5ekWFBQGmL2HSrxX1HXu2g4VblAaX7Xi1wDpSSoOT+x1BrOaSlLv4GeTiU2bl7j0+v1wrrFb+Px\n",
+       "OJSboj9g5Bj9U5YYnh+q3jkfqb768aPAZDKZSzaY2iC5I9m0A/BixodUOckBikmXWbbZVOhV171e\n",
+       "L0w2PHt4eNgadOW0aNaOolMO11x35fzNm5jfKHKTTqn5+fnURgKqfAabUzlizo9nbPLyR8bfp6BM\n",
+       "DoBqe+ygjMSacDYt3UBVVGlug+FyuzpaK8Ei9Wzq/pIySq+hTTwf/HjkTAD8rpL7SiJ6/fxM7QP8\n",
+       "HnbmTX3Ac/VMMdvzx58Fx5QzN7fHl60Ofwox4UTNHWUCAtTHnP8/lUomdSBAWzzUIdabmWOO4CXR\n",
+       "22zeUv1XevBRa4D38q6HXXbDSEXomZUFOcQOCf59LGRjf5xOp+E7xnNW7X1eaaH2aLWmptNpS1Hi\n",
+       "FQK+vrxWUZcU/1tM4D8PAc+smvYqKioqKioqKhbGpTqbQ51ndqpN4JPjogziDHWy/bAOc//GwpAX\n",
+       "Kc/spD0pLUvOhOXvW3SclBrYm92aRud0SvEQqbYpp3nuU6XhWlSlvr6+bjs7O3NtY01jqt9iZkbP\n",
+       "SszPsrYN0hprVAHux67O5qVmvJx6Xr23xNSR0wylnHnZzMTzwI/lWR2LFSUHwJJyyjzH6GrOz+XB\n",
+       "TOVu5Ptyc7wkIGSRvuzKHRczCwJqf+F3LcJ55d+XS6h+1j3SQ+1TKfNwqk8XCbtnqLaVmvZSZut+\n",
+       "vx/+LqXdYEtNKsiBn/PP8t8qiAj0LC9evJDfBgWlRfVaKoUcDQXqdHx8XJnNKyoqKioqKiouCpfm\n",
+       "IzUajezo6CicCOEkvrOzU+Qky06/THKHkygy1j948CA8w1KekqBSmih+ltmwcU35KEGaTJGqKWxt\n",
+       "bQVtSyo0maVjvsbEnh5N0wSNkbLFA2wvZ82UIrzEdSVdo0/7/X7r+mQyke9L+UmlGIiVdknNocFg\n",
+       "EK6z9iwlXfF9vh2s9WQtq0KJNinmIJ+aOymNSi5MXr1DIeVfw+3h/vFjGZMuS7UKuG8RrXFpgEnK\n",
+       "t6dLHymfMQB7wuHhoQy4UEz+ysn9LG0DuI3nQUOQ0oTkylX38x6nxn9RPzz+LZURIednF6McMYtb\n",
+       "I1KBLazFUePJ/4+5E7MCmJ34O3nteezbmtoX1Dcu519Z6hOKuR/z4fXPqkAFzKejoyMZgOCtLjFN\n",
+       "nLeseC24QtK01zTNJ8zsfzWzm2Y2M7P/cTab/fdN01w1s//dzD5pZj8ws39vNps9/fCZ3zSzv2dm\n",
+       "EzP7+7PZ7F+K987QKUgK7BlmGcPhcM58Y6aZq83mk9oCiyYIVVFgMXja+16vNxfBFbtvMpmEjy8O\n",
+       "CY8ePWq9P+Z8l5uofuJxBKQ6sPIhrMRMxshxLfmPkmIWZvD9KXNaKS8WsLKyEhYu6sIH89S71abE\n",
+       "UIEAHyW6REqVIJZ4GOhq6lBRO4u0Q0E5HpfWv9QxPlfX1LzjjyUfnpQw4efdWdJ35Ey7OWb1ksM3\n",
+       "m1NSQkIuuTUjZcZNoVRwiKGUUbsE7IKgkgN3EWj8QUDtL6urq61vlnof72P8TfLjoBQWi6Y0A/x7\n",
+       "cjyRi2Bra8vMTr8XpVH0MWAdLGraOzaz/3w2m33WzH7FzP6Tpml+zsz+CzP7v2ez2etm9tUP/9+a\n",
+       "pvmMmf1tM/uMmf1NM/uHTdNU82FFRUVFRUXFjyWSpr3ZbHbfzO5/+PdO0zR/amYvmdm/Y2Zf/vC2\n",
+       "/8XM/h87OUz9LTP77dlsdmxmP2ia5i/M7K+a2b/278Yp32sklLnq6OhIql69JNXv9+c0UWZxniYv\n",
+       "qUyn01AuUyywyVG1Af96aSl2slYmNNSBNVGvvfaamZndu3cv9AH6I+VIl5OcYqpmSAnQju3u7trt\n",
+       "27fNzOz+/ftmNp9PC+Uo9m/WZjG8FPb8+fPWuCp+mH6/39IWsYSuNFEpbZAaL2Um6fV6rf5i7jNG\n",
+       "yjRw3s6wKcSc60vzyPnfYuHqKak9pZE6D9ORR0r1HjPLlDiyljqql4JdD3hPUlrllElUteO85lbX\n",
+       "QAZVx9gaNovTvSitWGnf+3oplurYO2IaPAYHMeTmrw9YYe0Qa0ZTXG+KkifGX4a6Yv9R3ym2rKA8\n",
+       "3sNUH3BuVliNWLOm5p0aB/WtKsmosLy8HPpS7bdqPPDtWltbC0mqcT2W3UE53Ke+STEUa4uapnnV\n",
+       "zP6Kmf2hmd2azWZwPnpgZrc+/Puumb1Nj71tJwevioqKioqKioofOxQ5mzdNs25m/4eZ/Wez2eyF\n",
+       "kwZmDeXOE5DXptOpDCVm3xI+YSq/Ie/7oLQgLAEpXwQFfgbaFpxSZ7NZ62R7FpsxlwUp4OrVq/b9\n",
+       "739/7r7hcBjKTRGP8XuU8/dwOEza0HH6v379etBExe41m3c8VBQW3OeoN/s7QSoBWGoHxuPxnMO+\n",
+       "WdzmrSRfD1+mv19JKZiLjx49atFCNE0jNVH+fbiX/1VUAoycs24soICfifXVWYgnVUh8ah3mtBr+\n",
+       "eqmGRWkXzdJ55mL+XF4rUqo9izmYK39IBR/8oe5L+T12qV/OJycVdFKKnCYSzrzon729vdZezt+G\n",
+       "lAO9+oYcHBwUadFi15TmB3sG+kKVi9/NTveGXG5ONT+xvxwcHCQpaJjuRWmigP39/RblwNLSUus3\n",
+       "7g8ec+XH6jVvDOVny9dS+xLee3h4ODcXuJ5cbr/fn3vG7KSvMMeU1QJ9Oh6P5zRWeJ/fy0v2ouxB\n",
+       "qmmagZ0cov632Wz2zz78+UHTNLdns9n9pmnumNnDD39/x8w+QY+//OFvLcTMDRsbGyEiS5m84Ez+\n",
+       "5MmT8BubFHCIwLs5io0PEJ7OXjnaoZ5m2mTD8GznzGirNjyus1f3v/fee6FcvEMxJfMAoz37+/vJ\n",
+       "jY/7gN+Xcsa7fv26mZl98MEHychBVS/ue/yNPl9ZWQkqWF4sfnHGuMAUSj5+e3t7rXYo1nY+EPJ7\n",
+       "/WbJwKKOwR8Ucmk+Uoer2IGAPzK45qMKVWQLYxEOXgI1AQAAIABJREFUt5KPVuzw3/UQmSuTzeCp\n",
+       "iDsV6Vnq8Jprk4q88/2pXBnYGRn3s4DB7Smtc6ovFzG3+nkec+pPZUdQQS7qw8dgs5fZ/H6mDsWx\n",
+       "hM0p+H5eXV0Ne5Za39xGfyjh9qpUMspczhF4qX0vFxWJ8jhgAHUpneODwSDUC2Oyvb0tFRsl2Sly\n",
+       "Tt88h1JuGmwy9KbC2WzWYkofjUatuTWbnWYQ4NRNfEhEEMpXvvKVZL2Tpr3mpAX/yMzenM1m/x1d\n",
+       "+udm9nc//Pvvmtk/o9//TtM0w6ZpPmVmnzazf6PePRqNsh+dioqKioqKioqPGhyJmjtI5egPfs3M\n",
+       "fs/M/sROTXS/aSeHo39iZq9Ym/7gH9gJ/cHYTkyBvyPeGwpFRaGK29vbk6fsmzdvmpnZw4cnyi+W\n",
+       "gFKmrBg/R0olyepPr6VYXV0Nz0JaUIzaSoJksHTiVZdd1Om5ZJWscUFdWFLBs6quiqncq5pzDONq\n",
+       "bFDWaDRq5SNU87GLRqpEKmJNkwomYF4yxW9TkrhZheLHUKJVUGa8XFg70KX/+N2x96nryvl3kbDx\n",
+       "ruUyWEMDMA2J/93sbBqpHLpmNlBmHG6vql+p5nDRLBEx87HX+LAkz9qY1DqEu8Th4aFMQK/qojS6\n",
+       "3sx8XuPH5aacwxnMho16+rnKFDSpOcfrlt0dcO/GxkbQlLEWmrWYsfodHh7K+XnlyhUzs2ApYPAc\n",
+       "wr6Jco+OjlpjrbSUw+FQftsWXXtN07TmTGytpBJUK45JbxJP0R/kovZ+3+Jaq78Reea3zOy3Uu+t\n",
+       "qKioqKioqPhxwKXm2mNJXp0g4fdjdspknZMIcqR7+Bf3QetydHQUyi7RajBiUlsJgVksrLU0LFcB\n",
+       "Dt7Hx8ctp++Dg4Nk6ChLginqB/WOXHv9GCvNG7c99b5Yn5cEFMS0lN4pmOcn+4mhX3BNSXc8x1Ja\n",
+       "lJiPlNL4lPoRqfmu4J/N+WsxFtVwdGkH5gv+5bmp5sZsNpub56nyUuXm6s/lxRDrS3Z0NcvTGjDO\n",
+       "kyjSlxcrM6f15GspjRlri7xfipne//1Ycrm5fJe+LV2IZUsc2ku1vFxuTuui2lSa3zC3HlN7OaDG\n",
+       "mn/DvzmLCX9LFt0nMEfM0r50ZqdtZ79ir5U7y1qBhSWlkbq0g9Tm5qbt7Oy0HDJv3LhhH3zwgZnN\n",
+       "T3QVKeWdbnlyQ4XJ78599FMJO4HYwvARBuoQ1u/3sxE5McQmgu+Xu3fv2rvvvtt6Nx8csQjUR5r7\n",
+       "wB9KSutgZi3Geo46zCUhBRZh0s2ZR8y0KY6ZfplTC0EG/F6w5j59+jQ8D5U42ssbctePNYOfLXlP\n",
+       "7IAJgYHH/qJNRTxfcuzZvp7eWRRIvYf7vBSK8yjFpcTXU4fO2AFEsTqn9hj/Trwn9iz/xgfqXIBC\n",
+       "CZSzNO+FyvyVWsOpJOccNMHtx57EHD+pcevSJpThy+X9otSEXspwD1y9ejXsHTEHeeUY7aOZYwFI\n",
+       "3iG71+t1jsZU/Qvz7MHBQTSTAT+ztLTUcl5XGA6HUtgAuN1qbgOle6+ap9vb22Z2osSByS92kKqs\n",
+       "4xUVFRUVFRUVC+JSTXubm5stqgMGtBqHh4ctrZKSHNbW1sIJlFWY/lQay6sG5BzPSsDq4NIknqmE\n",
+       "vAq5vG8ctotyt7e356gjuL5mpydyla+KnddTTqFKFX7lypXgwMiSqNfasDO8krIAJSWytMP1KpGK\n",
+       "YnnQMCbsUO/ZepWaP2baS5nTUvflWLZzSAVk8Nh7fpjZTLM6q3mcmttdtQVdVPGsHfEaqZy5MqUB\n",
+       "8fXxSIWcx7SoJfQhMY6iUnOvrws73J9lDinHd9bk+HKbps3Q3mVcU1odtVem5p/SEJq1tYtq32Nt\n",
+       "G+8hft1ykA3AZjCeG9DkoKyUZQTP4v8VRQBr23E9p8nnDB54n9LU+4CgnPlfjRvazgEIfN+i87Jp\n",
+       "mmC2RF+qwCazdo5CFZwyI8oOdg/JOZtXjVRFRUVFRUVFxYK4NI2Ul+IX0fzAOU9poaBVappmjnHb\n",
+       "TNtmSyX+nESV0tSYzUtrZvN5i1TIrJKElU2YNQ78t3/PJz7xiZC/D+BwXH6fl6Rms5nUnigp3PtX\n",
+       "Kb8kLpedIZUGSUlBXsvC2qKU1LS9vR3qmtL+jUajMGcwt1ijx2MDyYbJVUv8dXK0Bv53Rkzz4ykv\n",
+       "VC6znLNsSoNUyqhuVuaDwijV3iqwJg9gCb0rC7NCr5fOVJ9a/7wGuF9yzs/AotrxLkEEi/Q5nvMa\n",
+       "jvF4XOSzyPNJ3c+0NCl/mFTIe6kfYKljeUxrCKRyb8bWT6nWlp3gVSg/3oM6sMZH+QsD6+vr4Vlm\n",
+       "M8fejH1WaZAWcebmtarGFZYLXONAKR7PnF9lSfnKQuBJjj+Wzub4G52gmMhZRQcHX+DZs2etgRsO\n",
+       "h61DiVmbq4qv8cbmFwZvmoodWZli1IQ4i7Oxr49/NzYePoTx/f7go1IDxKAOSF4VGtugPe9XrP4l\n",
+       "SaG5XakNKma2LL3PH8zYHMlt8JuI+kAeHx8n0ygwSsxkucNVVzPYWcx0MZRwwcScsM8D6iClVPtm\n",
+       "baGKgyFyzvzetGuWjjBUpnHPWefLVRFcpQeukg/LeQmObFLy+0nOcZvr6Q9QsYO+X6MsjKXM1zmo\n",
+       "vQZQB1FOBMxAm9C3y8vL4b5YABLuL90nvIlqEeTGH6bTo6OjKD+jr0tJv50VJXuGCp7iZxZxl6im\n",
+       "vYqKioqKioqKC0JR0uKLAMwwSmryuYkGg8FcqLmZ5rJgCQYakcePH4f3KS2GkmJybOH+ZMtSkQ/3\n",
+       "jr0HkgtrwlRfpHKj5ZzNY+rvVIguTHKTyURKhF6VvLm5GdTALEWrenknvqWlpaCJYlNgSupQWjSU\n",
+       "++TJk5akpCRgrlsqx5/CdDptSfzHx8d248YNMzN7//33w70lmhclyalgiFL6g5xTNGvJ1Pv8nOji\n",
+       "DJ8K2Vd1yWnbFqG/AHJmEk+7knM25/cxxw2QoiFImeI5oIWBNaJyKOZQwuSee1dXbaHSzinzC+8p\n",
+       "/Izfa/j/eV9hTZTZyRpUFoyU+TDFzaSg9kfWRrGW3O/hMfN511yAOY204l3ieefv4zWstKjs8oCg\n",
+       "L/5NzSelpfL38/VU24bDYStHbtPofLgppFwQ1Hocj8cLadGqRqqioqKioqKiYkFcmo8UyClLpPEP\n",
+       "nzEz7cgKTCYTu3btmpmdap/4XcyyC00PX/c+LaPRqCVRTKfpHHqR9rbqngJnqlZlqBxVgPf18rbi\n",
+       "mGajJCzbk336Z1lTV+KvoPwSOKRXSdZMhqnGwZfL7XjttdfM7ERrpJzMPQuzcpZlDQ1Lp0pCK/Fh\n",
+       "UNJY07TJZkufZV+VrlqHLv5LqTWgwosXYUz3z5b463gpWzna83UOXY9ReTCYIiJFLprzv0j5+imo\n",
+       "sPwuwQYeuTnW1fGZ38fO0IoC5u7du2Z26j/JYfeshfR+YipgxT+DsmJEmNymUgfpXIABA1oUvJfJ\n",
+       "S1V/c5CKn3+x9qYCkfh33rtK/JfUnNjc3Ez6eGGMDg4OpJO2xyKM/+pZjPn29nboN+zpqn9jGt1c\n",
+       "cBjwsXU2x4JPJUkERqNR6GBO36HU6QAPukrSC0BFzJEIfF11tNrgmTEW96cYizl6z0cVKWe5lKMs\n",
+       "t8Orlv2iW1tbC23h55leH+WpsUlt4pyaxh+CNjY2WoeXXq+dhNQs/ZFRdeJxT214X/rSl8zM7Bvf\n",
+       "+EaLh4YPILnIP7/Bq0MnHyJSG7f6LZaCoeTjpvr0vBzVU8/mHEsXTRURKxfwpkL1cclFIJrNJ9Dm\n",
+       "a4v2Zcx52e8dsegvv55LHXdjfGiqnucxJqq9ngeOr0+n7YwJ7LjNdVb1S9VZmd1SczyXNqYUymy1\n",
+       "SBLekghH/nswGMhsHD5ootfrzbnJmOnUOoxUCp5+vx/qlWInX1paavXDZDKRY6j6i81t/loKMbca\n",
+       "f+BeW1sL6yuXcaE6m1dUVFRUVFRUXBAunf5AoTRpcMp8hNP9aDQKJ2BoMXq9XjhxKxMPnmXTY1cT\n",
+       "i1mZVBLjUCmVkHKaAZZezHT+K66DkuquXr1qZieO+9w3+JdDePGv0iqlEgrnTIEYQ5TPrOhsWkQd\n",
+       "uM6oC+rJHCkAawbU/FN5qwA2u4DO4cWLF0nTXm58S3PTpTQ0/C6lxlfPlMw75bhr1l4jpQzduWs8\n",
+       "HsoBXTnVqncCMZOY1z6xZpDnnwoKKaWr8Lh69ao9fvy49fuiGincy4g9dx70E9xeXiOoR8q0r6hP\n",
+       "UnQo6+vr4XfsF9PpVH4nPOP3WehoYrQ6vqyYtsWXyzQYwMbGRmib0mAyu7rP6OCRqgP2xX6/H+YW\n",
+       "5y3kfb0L1tfXw7h3fbZ0bmMfwDP4F+1Qju/K4qH6R32buvBIVY1URUVFRUVFRcWCuHSNVMoHiB32\n",
+       "AGgXjo+Pw8kcVAfsWMpEkMDt27fNzOz+/futazF/HXWf8s1iaQN1UY67ngE7p3VTUA50rM1iB1nv\n",
+       "xBmzGaeIGtG/Dx48sOvXr5uZ2QcffBDuU3mv1H0eilFdtXMymSSdyAHWDLGE8df/+l83M7Pf/d3f\n",
+       "NTOzW7du2YMHD6LP5uqirvnxZH+dHG1AqY/KRbFdL6r9xHvM8g7NLD3j71x+sRLNtKcjKXHwZ3C5\n",
+       "vn3Kr0+RtHKbUuOqtFSx95Wy+yuUajMVus4nbm/JM7zmeaxgIWBNFPqA6Q1effVVMzP7wQ9+EK37\n",
+       "cDgM+0Qq71spq3zMMf8s9Bwl+OQnP2k//OEPW7+z9SFFDpwK3OgC1u4D29vbZnaq3VeZCHiPZvqF\n",
+       "3HpG23xdY9p2T+mhnu1CQKvG+mPrbK7MSx9ea3XC5uZmWBilrLX8fr8h9/v9cJ0XdcrBWzlzpxw3\n",
+       "Y4PpWZGbpmkdcphlmZmySw56PtLDH+ZUJIhybubDGBy8Hz16FA406I9nz57Jheb7nPtXqe/VRxOL\n",
+       "f3V1tfVBU1T+/X5fHoY8D0qs/9TGWPIxj3EGefUyXy9ddyqh8CIRcPi7NFFo6vDXBd5pVUVAKedQ\n",
+       "NU95zHk9cpTneR6k1Lpg3rSUqTVmtvSmdmbATznG+rqqd8fKVddzKU5KkYogi+1jgGL1VuablLDF\n",
+       "KYBy+2PJPB4Oh6EuJXxcMUCQPDg4CG1mdwdlBlcHb9X23OE1dcBLmSFXVlZaEb8sZEOgfvr0qUwp\n",
+       "5s2fsbqpoC8oAjBG6jufizQtBR/WPGcl77MsHFfTXkVFRUVFRUXFBeHSTXsXjdXV1eBApqSirsiZ\n",
+       "gBSUJJRyvFbPstaD35PTlKUkxpwGxjuMM5M6Tu2j0ShcV9obrosK70U/QAKJ8WahDE6Minrze722\n",
+       "a2trK7DipzQ6SgOiVNOz2Wwu0W3sfTEzU6lpLzU2rIlTUqfXcHKd0cYYR403xZXypsVMhamkxSmN\n",
+       "zmAwSOZaw7XZbDbn4K84e3K8NmYnfZWi3VB5E5V0XKI18s+WBtecF4UE3lVq6khRdrD2TmkT/Bjz\n",
+       "vON2+zyopdQjjK5aPAYzzafmCTuvq/3s5ZdfNrPT/fiDDz6QgTKq7mqvYWCvxPxsmibZb9x2zG3g\n",
+       "8PAwtAnrhy0K/N1RgSp4n6LSWcSM6F03RqPRXPAS/lXUQ8x9aDZPjcR7oLdgsCUpRUeCMqpGqqKi\n",
+       "oqKioqLiAnBpGqmmaez69etzucnM4jmRvLSxvb0dNA04zc5ms3CiLXEiPCv4ZOtJP2OOjF6KQd3N\n",
+       "5jOao50pPwZ1eo4xmyvtCNuCcSLnfHi+v1555RV76623ovVheI0b+0ukHDfZXwK+Tc+fP5/znTCb\n",
+       "l4BS7LSskWKkwtWVlk+FWytNAu5jegYFttN7cjuVWZ596ZTmQtVZ+RGxlJjShChfipQPSozGQ5Xh\n",
+       "51XOH4rfoaR7XD86OpJ+aSmfEa4r/lY+HspBWRG35vxwVF28tusse3LMD+sse19Ki1qyv5hpVvlF\n",
+       "HJ8ViznWHGsSFcGwQmlAgx8jpR1RvrecM461aNgf0Q5VvtfEYwyZrkDtCWfp31L4793S0pJcNyXf\n",
+       "YeVj+lGfS3KBAx9bZ3P8zfxGZvORbZyKwyfx5I+/2tDwDnZAzh2qPP8Kv7PrAMd4ZJRjNj+TKiOV\n",
+       "MoM/AvybP1jywaI06iR1UIkdfH1blpaWwsHoyZMn4XlEf/BvAKt7+VDlwYc2pepWHyp2UEfbPGv6\n",
+       "cDhs9U3OjMOHRP/B5fu4T0sPOan5ocpIcS6VzuNSE1DMednPHcWKzJFrbF5TdVWcZarPgaWlJckl\n",
+       "5N/Hqv/c5s8RSGhHirdIRdmq5NGcSDllEgNU1CuPQy4ReGkUaOoQpg5SpXMRyJnVOYrbP7+yshLm\n",
+       "ljrQ5pyTYxkhVB25/qPRKOlwjfKuXbsW5jbuH41GYaxVCjM8G0uXljq8MtdWLnrTP6++vWeBMuNz\n",
+       "/RW67nvspM8BNf6b3zRN65zQJRK2OptXVFRUVFRUVFwQLtW0t7GxITUMCiUhnRwKy85mylHUS4ax\n",
+       "k6mXEpQpjuvnHeQY7KjO74W2BVo3JXlx2HWpFMXtRDJnpQmLoZSJHFoHH/4aAzsUp8JdgY2NjfBu\n",
+       "SFvs+M5SDEJ0mUfMO6gyP5AKhVW/ATGWcL+OYs7mCqX515QTeewdMbDpy2szWePIfVqixVokF1wO\n",
+       "ql9827lc1eel5bFWpJTCIKUxi5n2uV5m887w3O6SxNMxVnQ/nrH8e+dBu8B9oShtWONrFk+q7Z2X\n",
+       "OQMDa3n8vt3v94socWJrKhVgwO1JfX/YyRl7MgekqGc8p6Gi2lhZWQnvY/cRnqfY2/Cbshrk5ux5\n",
+       "ocQFgAH+QaaIyAF9gG/gzs6O3L8wz3H/7u6u1Hap7zbexzQYVSNVUVFRUVFRUXFB6OdvuRjMZrNi\n",
+       "bZTSFvBpN5cfyP/O0qeSxtjm6qUxljA471zM9m9mduPGDTOzOTZtSFQvvfSSvfPOO3NlsB8J2vn8\n",
+       "+fM5R2bc5wnFWCrnOilJJafl45xZaKci00y9mzUE3hnx7t27SYZilmxQniqfJQzPEq38nJjck4MD\n",
+       "IOWwo7+XWJUvCMM7xav28LMsZQNKE8Zhvspvhslc/XgoiTTmrwXkfBRSWgol+aU0XDHtYoxWguvE\n",
+       "WmiFmFTs66jCxtV7mZlZvZufwdxRofpq/NmZu0RjENMqeXB+QB4vNXYp2oacw7rS1Pq5qOZVv9+X\n",
+       "GiHMS1xTzutra2tJjRS06isrK0F7x2s09Sx/c1LaZd4vUv6kDBVcASh/Iv4WMdTcUvunf9/S0lIo\n",
+       "M1dX/74YBVCJ5pe1o7wP+3nM9+Fbsra2FvZ39f3hvRrv4UwY6HP+hvhzBb/Ptz+FS3c2B5hJmyMy\n",
+       "zOKbhP/IKVWycjJjYFFtbGx0Mnt5eOfGfr8fVJc4MOaiZ5SjLQad1Z+8oPAMTyzFIwWTl4+SxP1+\n",
+       "HiinRd70U+kHYiZMQKWPQZ1XV1dbm8Pa2lp4ng93ajw/85nPmJnZm2++GX7zJkreGPnQDBMrnPFj\n",
+       "KWxSAQNAzLQXY/Tnupjpw0iKc4vHXEWflaZg8Zt10zRzpjXUXZnd/IdPsZibpbmAeKx8X/GcVGbn\n",
+       "nDmV67wo55D6iMTSweBgznVEuRCKYnxI3iE/hlIOKgUVfVh6aPZ7Kq+p1DeFI+CUAIc+43dwupcS\n",
+       "s3q/329F7fGYwc3h0aNH4TeOYEZfKq485inzaaG2t7dl0Iwvg02AmAe9Xq/lQuHNfVhf+G1rayvM\n",
+       "n67JwblsjhYHOEipa6Qnz0kv+KpDaY4Pj6/53/r9ftgLcDBbWVkJY5IKpOK5o8afUU17FRUVFRUV\n",
+       "FRUXhEvNtdc0TcvxUEkYMW6URcEn/dz7OL+Q2fzpmZ1cFeNyCpAuVldXJc+RQokzHzvkms0n3jSL\n",
+       "M1qX5DricFw2a8ZMIGY69F6FjauQVGj0+v1+kPRSzqFmJwmJzU7NqKxBUFQL0I7s7+93Vk2z83eJ\n",
+       "s7nSXCgnU9TbbF6SVhpar5nh8WctqdIqlXILpfJIppyX+T6+5jVSa2tr4T521vV7A/cVawVVrj3l\n",
+       "VJ8LB/cSugK3KTVG7ETOfQCpH9d8DkkgRffB8FoH1qJ2DUBQ9BGxfcw7Pl+7dk1qaFPUIzzv/DxW\n",
+       "2qeNjY1wnd0rvIbw2rVrc9omXxeV440tGOr7s7W1ZWZ678A8VhpsHnPWPvJ+jPJRP9RpPB7PUYRg\n",
+       "z+O6AqwpS9F9cHt9/s3cNxXtODw8bPVRjgtOIUVBxEFi6h2sdcdcxPeC5yE7jpfkllxZWQljwmuv\n",
+       "aqQqKioqKioqKi4Il+ojNRgMWidblipZW+QlIHXaXV5ebmmY2GmaNTmeQmA0GrVyqPX7/WKH+LOA\n",
+       "JRCU7x3f2WGU/VhYSjBr0z14jZRZW0pkRzxF4se2eUUKqnwevHTPkqMihVPz8Itf/KKZmX3zm99s\n",
+       "XWMJiLUsXpJnx90UXQCTvrLmjJ3R8ZvKGK4cPL10z7QL3D9KC5DqF9bG+PtibNIeSvJm8DzwY6lC\n",
+       "yVlbpLSGeN/y8nKQ6lUblZaH3/faa6+Zmdn3v//91vXd3d3wTpR3cHDQameMEqHUURjIabP9dWan\n",
+       "Vn5uLKGn/HgA5euV80+MOTLjPv9MbJ54R3rlm1ZKiRErw7dta2srrOtc/QCslSdPnkgtCupdOua5\n",
+       "wBsg5Qe4tLTU8q8ys5aGaDKZhLU3Ho9bOfF6vV7S3yxWb9QPc9D7JPN9MX+j0qwIvm1q3cUISjE2\n",
+       "CNba2dmRQRMliM2TUjqInEbqUg9SMW6PFHizK1HFm2nGWG82iE1o/yFlpBh3t7e3Q4QBf+TwN5tG\n",
+       "fP2xCM30QkyBnfnYdArV9LNnz2S/5SKtAH8Iyzkjpj7svOnz4kuZNXji+0OkYhuO1SkVyZlj1/UH\n",
+       "BWVO4cNVKbgPUh/1XOoXj7W1tVBXzD8+1AHD4TB8vFj48POF+0qNlTJ5Mv9ayvwN9fyLFy9a43D1\n",
+       "6tXwN+rOTP18AOX+85tl6Ydb3Rc7hKk54z+0KkODOrz4+gMYa47KTX0IUmZwbpvqKyD2fUAWAPQ9\n",
+       "m2IVUvM0Zt72/cL7BY8Vzxm8T32YS+qyuro6Z24zO1kLGEv01ePHj6X7AvqF2db9wXZtbS3UVXEC\n",
+       "Ytym0+lchDbqAEGED2Qq+qz0kHVe8GtgfX29ldCe65M7ZPu5nUslU+qqgPmyv7+fTLHGAVA7OzvV\n",
+       "tFdRUVFRUVFRcRG4NI3UZZRbUVFRUVFRUdEVVSNVUVFRUVFRUXEBuDRm867+IxeNLjnAPk7IOct5\n",
+       "h1LV78yayz4Zi+ZlYkI8zhW1KHq9XvCxYedQb99+9dVXg88Bcljt7u62yC85Y3gq5JedOeEbYTZP\n",
+       "4om24T1MgumfnU6nxU6tqi7oU/i7PX78uIgGpN/vB6dbzkEI/yaMjWIaVszrg8HAfuZnfsbMzF5+\n",
+       "+WUzM/sX/+JfJOuAcTk6Ogp+JKAW+c53vtO6fzQa2S/+4i+a2Slb/RtvvNG6bzAYhHc/f/486vRs\n",
+       "duqTwT5euRBsdb3UN/M8ocLL2Z9DBS/wuKXyjCmfsJS/Cd/Hflap8Hf21/I59NhXBu1YW1sLvmVY\n",
+       "M1wG/IVevHgR/kY7Dg4OQrABKE8ePnzY2i9iASvA3bt3Q7k+A8P169eD0zfue//998OcQP/dvHkz\n",
+       "7EXAyspKCCZA/37wwQctmhYOlOr1emF9sq+fJ4LO5Wnkvo/5iJmd9i/vF4ugxG9Jze2NjQ17/fXX\n",
+       "zez0G/Ld7343u1/nyjKbz8OX8kHmHIoxup1wb/LqjwjOY2PjxLgXGamXSsuyCEoPKKkPjEpJwu9m\n",
+       "B14sPr4fzvH4ME+n0xBtdJYDFKeZwdjyBo7642Dx6NEjycvjo2F4nih+LeZh8c6NzNOEPogtbp/A\n",
+       "OscczW3Dhw/JTe/duxfqjQ18OByGQ0SMj8jsJA0R7uONkdMxeKhsAMw3g+t/8Rd/ES2XwRGpqEtK\n",
+       "mFpZWQnzKsUgf3x8bJ/4xCeSZXterZiDt3JCVpsysz6bxaMs8YF99913zaxcWFNO7or7it+VYv82\n",
+       "m49iRl1SH6USZ/yS+1EXjPne3t4cU73ZCe/TD3/4w7l2XLlyZU5gMNNJafkghWcPDg7spZdemiuD\n",
+       "5z2vZd8HHBAAp27Ug5/lrAxo2+rqaqgD9kIV/MJ8XT4LgdlpoMfy8nLov1u3bsk1rpi5Iahgju/u\n",
+       "7rYi9FZXV8N1jjrEgQH99sorr9hbb73VKrcUar6rrAg4+OLb++LFC3vvvffMzEKk7t27d8M8UUBf\n",
+       "sJN7av2urq6GsUWaNkaXb3Q17VVUVFRUVFRULIgfC40UTp0x1u4SsJR9kap7qE4/Cn4qBmuXvKaE\n",
+       "+wwSEuddApqmCRIWtAW9Xi9IbjmqBqizUcaDBw/mJFW8z0u+MSkeUh0ktZjZDM+r62xeSGnPUKfD\n",
+       "w8MibhyuXy6pNqRXjMPu7m4oD1qNvb29oOVjMyL6lOFZ9j/3uc/Zn//5n7fuw3jB7MdQ0hjGvGma\n",
+       "IKUyKz9Mcffu3TOzeS0Am1XRLyl1+dLSUhjXlLZteXk58MyUQrGrKyqOpaWl1n6SM6EDg8EgaKK+\n",
+       "/OUvm5nZv/pX/6qofqqMXIJh5v2CNtNzVvEzMXb3VB2Yb8jPZTVfuFzUj59F3966daulaXj27FkY\n",
+       "109+8pNmZva9731Pvhta229961vhN2hKf/3Xf93MTjQOyDPK+e18/Zk9nf/FngVz07e//e3Wsy+9\n",
+       "9FLYC1E/hNozOBsEaxLxHm4bxgHfDQabdvlf/9165ZVXwr6NNbm3t2ef+9znzMzs7bffNrP5tYz7\n",
+       "rl69muTLgvbr+vXrc+OTgp87+/v7Yc2xCRgmUT9fYkBf7ezstDSCau9/8uRJmAueQoNRopmqGqmK\n",
+       "ioqKioqKigXxY6GRwkl1EQd2dr7zp/+L0EidJUfgeSDno4GTO5/gIY0xqRqgsp33+/2WVmk8Hsv8\n",
+       "V94mf3x8XJSvsNfrSX8troPZibMnysB9rLnEnFHSltm8ozjfHwM7pXtfANa2cf9yvQDOnWdm9tM/\n",
+       "/dOy//wcXVlZaflf5EhTVf9BooOjN96DerLfr1JHAAAgAElEQVSPiNmJZugXfuEXzMykTwXaMRqN\n",
+       "wrPKNws4ODiwv/zLvzSz+NiYnbAeq3nJdVaO0V57ynVhDaInoGUoRnal5S3VRClyRobKKqAIg5U/\n",
+       "FOdJxP0pdmrlD6X85lL1Y0DSv3HjRtA0YO4+evTIPvOZz5iZ2ZtvvhmueXJYs9N9QvlLqXKhpXr9\n",
+       "9deDRgraMXY2hoZyaWlJEpmiPARZsEYK77h9+3aYB6gz+1cB4/F4jkQWdUIfoZ8PDg5CvdScGAwG\n",
+       "LdJN1QdvvfVWS7v35MmT4Hv2K7/yK2Zm9tWvfrU1F9g/UWkusT/s7u6Gd6f8mGJA+9gSgjmLvKlX\n",
+       "r17N5loFSi0+PhNCSvudwo/MQarEI38Rx2aVZLT0sJOrk4raKmHePitSqnr+mONfs3lzkYc6XLHD\n",
+       "LcxLWMx7e3tzUSRmJ32Av9EH4/G4xSzPBwNskGx2Uak1GOhrTqeAe7FYRqORTC+EOmDzWllZCQsW\n",
+       "H/NYuVAN80EKYEZj9AH3Jd6tTGzYqF599dXWtaZpWocMju5D/z1+/DhsRip1hWIlR/JnPkixwz/a\n",
+       "iQ36xYsXoS74YDHw7J07d+Th22N3d1eaitFHKGtjY6MVFcVQewL3/fXr181s3nmY08t4R3X+mz9i\n",
+       "3nTAcxbjsb6+3mo7H4awLlQiXtUWnp8+bRH/xnXmCD1/gGa2e14fpR8vzDefksvsdI7hgG52ahba\n",
+       "398PkWoMfFQ5Yg7rD+vt8PAwOZ/+7M/+zMzMfuM3fsP+4A/+wMxO++iVV14JcxXzc21tLcxttPf2\n",
+       "7dthjqn9G+389re/bb/6q79qZma/+7u/a2Zm7733XlgjvC58X66trYV5gn754IMPwjjwOgQ4RRDm\n",
+       "7OHhoYyUVmsScwzRsFevXg3jr8Ya9bt69WrYT/ggjzWUivYejUahf/nQgrmIOq2urrbq8Pjx4xDp\n",
+       "+8orr5jZSSQfykNKsT/6oz8Kz2Dv7fV6UmBEO7APLIpq2quoqKioqKioWBA/chop4Lw4n/jU7N/J\n",
+       "ebwUmE/IOxb7v81OTuqcAPiikNLMTafTlvMzh29D+uTcaJD+j4+PWxqX2WwWJELWeimJGhKN4q1B\n",
+       "f6yvr7fMbrPZLNlfqOvW1laLDoAlSNRJSXcMXI/dh/IgxaytrYU2sSoc/QvNRb/fn/vb7KQv0Ocq\n",
+       "3D/lLKkwmUyChhBmhYcPH85pcFJAHZRzK+pycHAQHHw///nPm9mJBP7Vr341WS+zE2nbm40gTZvN\n",
+       "m3E8lpeX7VOf+pSZnYYr7+zsnCk8G+ZSpvZAHTgHnNLu4n6lQeI8eJjPMc2JD245OjoK/c9JWr15\n",
+       "eW9vr0WxwFJ3yjynJPSdnZ05p3WglLLBB+so7Q1rzLAfbGxsSG2B/421xvweTmBtdrLesA9Aq/m9\n",
+       "730vlOdzFnJZh4eHYd1De/aFL3whaKS+/vWvz5Vjdqq9feONN2TgA89vwDs37+7uBq0z1u0HH3yQ\n",
+       "XP8qifD6+nrLxDqdTuX+iXejbb1eL+zv0PzyPMb9rAVnHitokPBszBUBdCUYI9aWcWL7n/7pnzaz\n",
+       "eYoVOMHj3zt37oT6oy6vvfZamBM+sAp9hHb44Itf/uVfDvsJ6Bc4920MVSNVUVFRUVFRUbEgfmQ0\n",
+       "UheVwZodUT1xYyliEhskM5aAFnU2b5qm5fjKDvJdoPw+PDj0mzU+KFtJGwD3B6S6wWAQpENPfMk4\n",
+       "PDxssU6z3w80ISsrK0HCh0QDe7dHKUN76X2Q7hSJG8NrLsbjcWgTS8d37twxM005ASBEWb2fMRwO\n",
+       "gwQMqff+/fstR+EYOSR+g9R57do1SS/BxH54roQFudfrBX8YjNva2lrLH0LVj4lZoZHIUW4osPaE\n",
+       "xwbl5bTGfp5wv3AGAbwHkjyHqwOsbWUNEtYA6rS5uRn8jHK+L94fip/xJLtm8xK61wiur69LPzw1\n",
+       "d1Bn5fzP9BcAk0T6ftnc3GyVO51OW/5LN27caPVBr9drzYuvfe1r9su//MtmdkrPce/evZaDv3Iw\n",
+       "Z+280jhDK/jGG29IZ2XlAJ76xmAMmCoitlZ9YM5gMGhpBIfDYdCaKc0lfLIODw9b84OtFdDMrK2t\n",
+       "Ba0TtEHLy8thvJQVhwNV0Ief/vSnzexkTqDfoQ08PDwMmiiMudLK3b9/P9T5T//0T83s5NvgtY7c\n",
+       "XhW8gu/Z0dFRCHxAP6asUsCPzEHqosCHEoAd6HAQ6Mr7xNFTvLl2PaRxnXyalxz3UQwpZ3RMaOVs\n",
+       "OB6Pi53lvTo41n/elMAHTYwN1wULd2lpqVWXK1euhI2MNx7VR6oPMDbMw4PfeGPG4QF1zh3y2aHe\n",
+       "zwlOcaIOp1jEqcg1bs/y8nLrAMCmDvzL0ZEMtBN1uXnzpjQveEbgwWBQbH70bMxsRkTd1cdnf38/\n",
+       "bK5grl5EwOL0GGoe5NIk+fkU++D5tb6xsRHmN4+nj7xS/FXPnz+XUaCpVDfcBs+GPplMWuYl3p9Q\n",
+       "l52dnZbpMRbBiN95THydWdhBuVtbW60DyGw2a5nWJ5PJXDogs/loPG4vADPSvXv3wt8w3RwcHLQO\n",
+       "UsfHx2FuY+394R/+Yau9DHZ3UPMRa4r7Ee/mccN+goPGnTt3pAClgHnATO9o2+PHj8Oexn2j3DjQ\n",
+       "58zN6PfZ6XRqP/uzP2tm8+Y+z1/HDuMsdKB9WMvLy8v2cz/3c2Z2etD+/d///VBeal/htYh+3tvb\n",
+       "a63Rl156KSv4mp30H8bb7/MpVNNeRUVFRUVFRcWC+InXSHGeM2+KmU6nQYXJLME4rSvVP6QTzgXH\n",
+       "UoD/rYtGyauaSxm2GYtwbfkExCVQZg+PmFN6CoqmgeuXah9r9CCtsRqd2ZdRBqQcplDwbMLsRJ5y\n",
+       "ij8+Pm5pQra2tmT+LkD9psB8M5DWIeVtbm7OJX41y7PAY/zefffdlpnk137t18LfCE0vDf5gqgBI\n",
+       "/s+ePQtz2psgPPwcXJRORGUv8NoTxeAdy4OnOJl8H7OzOdMu+JyHXCZrTHwf8//naGGUycaP62w2\n",
+       "mwsEAXwiXqaK8c+bzbcb8xK/xfLF+XHk+7AuJ5NJSzug6EZiffE7v/M7c/9/584d++53v2tm8/MB\n",
+       "ARS/93u/Z2ZxlwFf16ZpZPugfUI77ty5ExyZWfPjc3cuYm3g7xM0jrPZTO4j0Bax9cCziCsz7uHh\n",
+       "Yeg3ftabxm/evNlK9sxgDTy0RdCmnQXD4bC1D7MWDZQYCr1er0VHUoKqkaqoqKioqKioWBA/sRop\n",
+       "76vCJ3nYaZeWllqEd6PRKDzLLLsqZB9gKd87XS7ifM4houeBpaWlOenF7KS9kE66Sv0xUkBI/JBS\n",
+       "ORM8sLGxEfzSmNlYOTLDSRv1e/bsmXTI92Hgw+FwjiTT7GRcMdYccu6Zb/f391uSYq/XK6Kz4BB2\n",
+       "3xd4TwwxB19PrspkqKBB+Pa3vy3Z4hFuraRG9AVrF+Az8PLLL9s3vvGNueulc+Tp06dBYkUfPHr0\n",
+       "KGgEQG+wu7sr6SdwH9ZNiSMowBonte48XQUzmyuqEPb1Sq1jaKsGg0HoV2gIVldXw7grx3n0a8z5\n",
+       "XmmaPBS7O7P7MyWHp3tgaoecBsyXwUCQiJpru7u7Sd9RzLvd3d2ieXblypWg7eC6QFsE52pebxij\n",
+       "w8PDzvsqNFavvPJKaz5ygAG0aZ/97GeDRgpg6gGMgW+rJ+xV2lHOYoA2feELXwikmykfLoWYdo+d\n",
+       "7s1O9hqME9r25MmTsEf79saQ8gVdXl5Ojj++YZubm618npPJJOwtgNJM5XxrY/iJPUjhw8OLxncc\n",
+       "R/Iop3RWxeN9rKb3m9t4PD7TAcrXcxHVr3pmMpkEcxAfMFPmAqXm55QkPuJveXk5TGosXDZhcOQI\n",
+       "PqBsasNHH6aO0WgUDldsMvF15ggO7nOvgo+lBsCzzNbsD1elZi1+Fs/s7u6G/sNGpMxuo9Eo6fTI\n",
+       "0WfgX0FZb7/9dhh3mJQ2NzdbUao8j1UdwCr8ta99LXww2JRdAk4Uyn3uI9fu3LkjD1K4nuMCU0Af\n",
+       "qRRBZu0oS2Xa4/uA6XQqOZsAZS4DlMmO+esUozofnlKO7yryivcib0qcTCaS00rBt0XVxazMUXdv\n",
+       "b6/FW7a2ttYyq7Pjs+ekY3z6058OBynsEXygwXvZfIwP+JUrV1ofWHaaZlMi+gYBMNvb2615per3\n",
+       "+PFje+2118zslAPr8PAwtAnt9X3nBS0eVwbGk7mWfIJizviQg+fB6vf7rf3y0aNHwVSPdjx79izs\n",
+       "N2Aif/78eVgr6rCbQr/fD4dJf5AzO93fnz171jK1P378OKT1AQP67u5uiOD05XC9SlxoqmmvoqKi\n",
+       "oqKiomJB/MRqpHDaVKo8r/I2O5UsYpom3Ku0RezkvIiD+EWB2+GlAtbecGg9s7mbzZurlJMeO2F7\n",
+       "9vfRaBRMW54d3UyreVWoMYf2erPn8vKyzBVYihQHDN63SAg+83GxucVsXrrnhNFKumdGZrMT6Rnq\n",
+       "dGXOgzP3w4cPQ9g42tjv94P06XMbmp2ao3Z2duY0iIDK4+dxeHg4Z073gKSpmNWbpmklc14EPLe5\n",
+       "b/y6Vtf87/h/Nv2bnbRN0RV4TKfTlvT89OnTlmaItQ+K6oClZx9QwNplfhbP8Lgpx3IPpX1itwV+\n",
+       "Bm3CXNve3m4xvB8dHbVy7fH44/6tra1WZgBlEsT85z5gYKwUJctgMAimOmi/edz4GwFtC/rsBz/4\n",
+       "gX32s5+du0/hwYMHgb6D4bMxMC2Bmd4POZuE2bxpD9e++93v2uuvv25mpwEes9ksrCXWtnsKGLWX\n",
+       "9/v9sI+gr5qmCeseLgW7u7tBK8b7ijKxog85hyuewTjt7OwE892Xv/xlMzvJq4d+YVcUjB2PP7RP\n",
+       "+G1jY6NFv8P8b948mELVSFVUVFRUVFRULIifWI2UlyAZSpvE17yEyVKAymXFObJKfAY+KihfCyVJ\n",
+       "saapq18Wh0T7Z4+OjoqctAeDQehzJWGy06+X2lV+M6Y/UI60AGvgFKt3VygNx8HBQfCxgIR25cqV\n",
+       "IN0hoGE4HCZJYZl9GH2Vyq/17NmzVt49pnGIsc7jPpTB0lqJky73Ha8Z/A3NhQprVxQlTdMspJ0q\n",
+       "IdpMXUO9uR1m833knzk4OGhJwMrpezKZSLZz71/HSAVDKN88BuYah37n6BTUWPvfeFyZlFI58Pt9\n",
+       "+P333w/aHThKHx8fzzmFm51qWBg8JxXdhyciZbA2lZnBAe6XX/3VXzUzs29961vhfXgGGrW9vb1W\n",
+       "Hfb29lrku0tLS629yo9Byq+Wg1g8qere3l4gIUUgj9np/EV+wIcPH4Yxwb5zcHDQ+mY1TdNy3J7N\n",
+       "ZqHfUX6v1wt9jPG6detWeB+usRaVrTyoA7R377zzTngGLOa3b98OxJ68d2Ec1DxFPx8dHbX6mNvR\n",
+       "BT+xBykAHT0cDueYhc3mzXj48E4mkzm6e7OTCe7VqXy4wmaYcxi+LJQejlj1q0xePhLObN5Z23/Y\n",
+       "S8tVG8hwOGw5dPJHOmVCnc1mreikfr8fNnO0I8b+fRZ4VbxZm8F9fX099CGb9hR4/qIdmGPqEMlM\n",
+       "5Di4cXoE5RjN/WF2Yp7BoY4PQ6qvvHMoOzmzqYCZpflf9IfZ/CGBHcIXOUiVzL3cPfzx8vON5yJz\n",
+       "QbG5zeykncpRXTkre2HNzIJJDGPIEYT8XmVm9BGksTWjDowxM2UOu7u7YW6zYKA+Xn5ucxRlisuP\n",
+       "2cDVQSq2lsxO5hinzMGziDrEmppMJuEQx2bI733ve2ZmIc3Im2++2QrgmM1moe2cFsr3/+Hh4dwY\n",
+       "lriF8PeJx807Z7MbBPqIk3QjmIMPZhwZjHdz5o+UoIeyHjx40Jpv4/E49AP69PHjxyHgQo0XTIbs\n",
+       "MoI95ObNm+E62tbr9cK+6BMVnweqaa+ioqKioqKiYkH8xGukgKZp5hLJmp2cXCHdQXWuzH7Mv8Iq\n",
+       "dB+GnJKEflTg28SaHPTV/v5+kF68psFMS8CQrI+Pj4N0mpJwu2iKMHaoC2vWGJxEtQSsLfIhszko\n",
+       "aQjlM2UD5kwXbQHU33iWuYDQB3fv3pXsv3gPpGzmPuIyfX1u3bolub6UNtFLuDFuGDZr4jm0g53O\n",
+       "U/xbMZRmFuDMB/jXP6toA7jeHLKvHO39fFPJjfG82TzrNDRRHMihtFl+3fJ8UXOL28iuCdxuD+XQ\n",
+       "7nF4eNjSsh4fH0vmbWiWoDl99uxZiypE5U97++23w37CWlnOPWc2r7lg4N3s3A3nanaKxlyEJobH\n",
+       "jU1ofv30+/3WdyVmqUCdd3d3O1kO+F8G2r61tdWiIbly5UqLnX4wGIT1ij44Pj4O7gi5tefnQmzf\n",
+       "Rl8zRyPqlTJpMncguxtgjmE8NjY2Ws7jXfPnplA1UhUVFRUVFRUVC+LHSiMVI9BLASdWPr0ruztL\n",
+       "ZV77MBgMWk6as9lszj5rdr422UURI85TSEmWHAasJAYvUQ8GgyAB4dqNGzdCCDEkx3ff/f/Ze9cY\n",
+       "ya7rXGydendXV08/prvnRU7zIXI0GvMhkZZsE5ZoUZJl2ZEDG7IDGP4TQAECJEZ+xTe/5AS+QQLF\n",
+       "iG0YAeToRwD7XkO+sfxQIJmSQFG2ZcqgKJImqSE1fI1myHn3o7q7urpe+VH8Vn1n7XVOVTcpkfTd\n",
+       "H0BMs6rOOft99vrW2t96zVUOn1Q8zpa9Vqul8umJDPuB5Syy6shAX1YqFa0HLByOIxhnoXkxG7YM\n",
+       "HHPniYky7Jja3t5WC55jOPA8xKdsbW2pRQYLvFQq6WeICXn55ZeDZ3pH+5eWllz1Yi/HHxgaj3Xh\n",
+       "9vNYOcsk8uGP/SBvDnBsjrcW8OEGlMsy0h5r1+l0VAzwySefDJ5nVcrHlbnf7+fGN40LqLfjjucZ\n",
+       "xw56a2SeEjkHhGOtRP/v7Ozo92AcspghjhlEffAZBHq9/uFYP2BhYSEILp+fn3efC7AshMfyIGgd\n",
+       "c6bRaOg65rEdLNlgJRgajYbOcY7Rwt/b29vuAYyDYn19XdcJPNdjBZkthnjoyZMnU+USGbaV7a+7\n",
+       "7rpL6/y9731P65QH9BEfhPFiPVlQ286Lra2tQNx40gwIc3Nzem+8mybJ//dvaiPFp4kmBU84Dmq0\n",
+       "wKKUJIkuQJxGAx3HC4xdbN4JG6lJXRme5lWxWAzcC16dOGiRTzFal92lS5eUIh4HLJzor7W1tSB4\n",
+       "UCSkyplW3+9pu0qlovXF5Nre3h67GOTdz+o0iYzoe5SVkxvzhtADFnv0x/b2ttx5550iMmoDXtSx\n",
+       "KHAiVqgec8BonmL5OH0vIEkSHQdwdezt7QVpl0Qk2IjWarUggWqz2Qw2vN1u1z15a8th6zKJkdDr\n",
+       "9YLAYzbW8C+7Tj01ZPRRkiS6geKTY3Zj1ul0gtQa3saFT/x5rsVx9fWSINt+73Q6gXuzXC67Lhr8\n",
+       "DmO83W5ruTgtlB3LfHCEy4yTYTiVNT09HZxm9AyXubk5bTcYE3fccYc89thjqd9xH0Fn6cUXX9TN\n",
+       "laeHxC9ktBU+W11d1Zc+xou3EW2321ouXNtoNHQTwXXCvLl06dJbfvAFcx2q4zjZNw6vvvqqbsLY\n",
+       "7W5P47366qtaT/S5t3bcdddd8vTTT6c+y8o0AfDaYQ10kXztJ8y97e1tHavYELbbbf0b/05yQCy6\n",
+       "9iIiIiIiIiIiDoh/U4zUJJpEWfA0iLJUffE9Wwh4tk3Iyn/nKT6Pe+6PA3nBo5ywF6jVakFOQXZ1\n",
+       "gb3Z3NwMAko9zY5Jy7e4uKhtzfmxcG9YO16ZRfKPd2f1Mcq8XyswizkSSefiAli7h5+PMnAgqJUS\n",
+       "qFarej+Us1gsqpWL75hpsHmzREbW+CuvvKKaMp5rAjn8zp07pwyDzetl64G6oV02NzcDZpDZT3aR\n",
+       "Y05hfHl9NTs7m/qcmWNcY5Xj2XXq5YwEut2uWtCee57HiXXZerndlpaW9H6exc31sEytV/dOp5OS\n",
+       "VkCZbJvv7u4G+ku27gCPI5E02+ate8DMzIy2h5cVgce4ZS9Z3Z9lA8Ac8DF5APX12vHo0aPKSIHJ\n",
+       "9lhLHuM33XSTiAxd2t/+9rdT9SgWiykGl8vNWFhY0KTMeP709LQ+hwP+7ZznccXl4vCQt/q9YDN5\n",
+       "fOhDHwpYuyxAHoFVwG35Njc33ZyWAMbk4uKifOITnxARkYcffjhVtkmw3/c+jxn7TmD2aT8epMhI\n",
+       "RUREREREREQcEP+mGKlJkZUrCrtnFljDbhff1Wo1d7dsjxWztcjsjFU7Z+D33W5Xn5f3rP3s2jlb\n",
+       "OqxNjkvB3/Atexakx6h1Op3gWrZYbfCqyCheYnd31931s0CcyND68axgZlzwDK9d7TO8/ufcTnk5\n",
+       "9DgOj+/hWfwWMzMzqtzN8OI8wPzh96VSSZaXl0VkxEgVi8UgFqBer+v3bHkh5oZjFDDO8btut6tx\n",
+       "IWfPng3Kcv/994vIkJHCZ7iHFxDqxa7VarVA7oDbkYVv87IP8H3ZiswL2GahSvShF2iNaznWzxPf\n",
+       "ZHFVOz4Hg0HAFl2/fl1OnjwpIsP4EZQF9+H+8ALa7SEXVnD2FNBR9nK5HATDe4HjPE84Dsy25aFD\n",
+       "h5SRAKampnR8Qomcn+fForJYL8BBxlZEst1ua8wavuOxhDHGcwIB0rfffnsQR8Ys6m233SYi6Rgo\n",
+       "/J6FYPHvHXfcEcQUcfthrB0/fjxgd2+99VYV7gSy1nLMzVqttm/F7UnfE2DPWJpiv0x8oVDQ2Mzn\n",
+       "n39eRNIxl1h/eP6gjV566SUdA5gDCwsLWl/L/L2VsAro3FaeAG4W3jUbKaunw+ki9gvvRI2X5iFL\n",
+       "sRgNi0A6TpKIa3kB4tNHWFA4USw6k6nfvJN+eZurrN+xy8NTAs8DTyrPZWf1l7xTbKxAy2lZ8OIG\n",
+       "td7tdnWRwf2yJrVNgjwOeNlMTU0FivWDwUAnLMbC7OxsKumyiJ+Co1qtTlwW7/QdFg9sVDudTvAM\n",
+       "DuYEvIW1VCrpZolfMnjx8ThBuz7++OMi4rseRUYpJBjomxMnToiIf7qPgXY5duyYvtw8wH155coV\n",
+       "HTv8crPuqOvXr+eqbPOmmTcbfKiCf8+fWWVp1MOqa7PrlIPO0f4Y49evX9eTUZ6aOTYRd955p/YJ\n",
+       "nsGbOi6nvQ+74myoAv/N65MXNM+wmwjWcwKuX7+uL1IG7ocxtLa2pi9ubHy2tra033lseGrceK4X\n",
+       "tIz6sLYUXuRPPPGEfPCDHxQRke9+97vBtRhjPO9Qvl6vF2wc9/b2gjJcu3ZNXYlw8bVarWBDfeLE\n",
+       "iWAjxesGNqRXrlzR57ZardyTkh4408Qkmknc3liTOBmxBx4veMYHPvABERmd1BMZvS+Wl5dTmRRE\n",
+       "RkYFo9VqqRaYd5CCgX7H+9gz6kRGY9Cu8yL5m81JXKrRtRcRERERERERcUC8axgpm9gza5c4CZ3J\n",
+       "1pj3Oz5SbF0AfA0rNNvcbcViMdCW6vf7akEybWiDOPnaSeExWGw1suVgj4vyNbCeSqWSWiWs5ot7\n",
+       "5lkpnIQS9eSgdNZwgnXA9C27IQFYh/fee6+IDK2fZ555RkTy+3xqakqfy25LrhPKbF07Ozs7Wgar\n",
+       "hyMyakfWrcnTemm1Wq6+FiwvWJ9ZQe6ezgsANsNjrkRGFje0Ud73vvepawIWfK1WywwaFxklZxVJ\n",
+       "590TkcDCFkmzKAjWhQslC7Da2fpmlszmLev3+25b8ZjAmGXG1GMOPZYoLzCeZQEsI9zv93Ucw9Ln\n",
+       "I+4Yz3x/HKTY2dkJysLWODPdtnx7e3vB0Xr+Da8Tdr54khztdjvID8n5/Li+npSJ5x5BG7A+Dxgp\n",
+       "SB1w+THPOHG3FyjPeSI50S0ApsfDX/3VX4mIyMc+9jH9DP1x5syZlO6XyHA824MlFy9elF/7tV8T\n",
+       "kREjtba2FryrLl68mNKoEkmvf9wPmMuDwSDQM8pSwAfYPcusaBaYkcRayesna9HZBMULCws6TlDm\n",
+       "er0eSAhcv35dbr31VhFJ9zXAYxfXjAsmx3MhydBsNt12QZgEWPRr165lZlXYLyIjFRERERERERFx\n",
+       "QLxrGClgXAzKfvMRWYFJkZE1ViwW1QqA37Xb7aasf5HhbttaXsViMffItBX1ZOxXONI+A/BYu2q1\n",
+       "6qr0WqbMi30SkSBQ3YtVq1arbnb2SY+pegHq6J8XXnhBRNLsDB/3thYGH00HC8BsB1t8HqvA/WnL\n",
+       "x3EHHDOWhWazGVjySZJoGWBReQHpWZakjcPj+ntMHb5vNBrKJoEhePHFF9VyhdTBa6+9ppY5S09Y\n",
+       "lsKz7HjcoN7jAkfRr4cOHQqCUjljva1jFvhYNscEeQHWzFjZ7/gzG1dVLBZTcZX4HeYUC4oiToP7\n",
+       "GG2OflhfX9ex6kkOjFNAB5MDRnlnZ2esQj6ehTqhP5mlYGbdHgAoFouusj3AOQFRfo4xssrTzGZw\n",
+       "8D/ayJvz6K96ve6+B1A+Zmfs+v+jH/0oYDA99Ho9je1hWRDLGvf7/YDVPnfunAZfM0uO77ktuI9t\n",
+       "jNSk74nt7W19Z2H8NZtNd63kMSMyXB/BnvFaaKUuWB7CYybx+9XVVZeJsl4D/jsvAP7UqVN6MAbM\n",
+       "78rKio4dT9EcORwbjYauuXhWs9lUEVRPODgL77qN1FuFvAWYI/m9gE0+/SOSPlGBzq5Wq64bD5g0\n",
+       "0SqX86DB9YxxAXveyR0+RedtkGww7zi6lE9mYNMKt9ba2lowcJeWlvR7XqjsKaasE3s2UHQcOMk0\n",
+       "2sBrN07cnPeCArLSWeClmqUOLjJsUz5NCtiXUpIkeh/WbgKwoM7OzroL3nvf+159noifFHZ6eloN\n",
+       "jLyTRN7m3tskemg2m0FyY3ZlWdexBbvYvVOWtmxeehSRUCm/UCgEc4Vd2Z5bht102AicOnVKRIan\n",
+       "I7HJQN/s7e3ptd6L1G4wUS60hz1Fy9dgTLB6vhdwD+zt7QXP5VAGvj/qCbBrl9W9bZsuLCzovGat\n",
+       "NHs45caNG4FLrFwuBxuphYUFdz4igP+zn/2siIh84QtfCMZBtVrVzQY2B567u9vtBkmLa7WaPPro\n",
+       "o1oukeE49Q73eGPDmy+8eTpoRgWR0VTQJMwAACAASURBVObm2LFjIjJ0q6KtWecKY4YDvNHW2HR4\n",
+       "B25ERuMX9VxYWND1A22Y9Q7DOEFqqo2NDW2jvI3M2bNnA+OEEzJ7RAnmSrPZ1DbFmKxWq0F2h0kQ\n",
+       "XXsREREREREREQfE285IHUQT6a2Ad7zcWgR7e3uB1cZHovFdsVh02Sc8g4MgrSzAuETLk7aLFyjL\n",
+       "YCqe89+hzCi3F1wPy2tvb89Vjs5TNMZOfzAYBPnv2GLmQEpYArCANjc3A1rXO6pdqVRy1eHzFM5F\n",
+       "wnaZ1BXZ6XTU8vGYRmtZi6QTosKiHsfk5el5od+np6f1edx+AGvoePWzBwK88bSzs6Nq6HkB8OMA\n",
+       "hq1SqQQUPI8r/Msq0ZgXbPGLhH3c6/WCXHGeS5v1lzB2yuVyELjP44qv9RJtexpKuB7uiNnZWa0T\n",
+       "9yv6DmOi3W6nGCHc12PCbDB8oVDQMvAYs8fyGWzJ2/uxfIR3DbC4uKjuYG63vNxlCDa+ceOGtinr\n",
+       "Q3mB7xatViuXTeB1yq4Tly9f1mewyw7wXLNgWzhnHJ6xvLys44AZJTyX3xt2XarX66m2ygsbYHbe\n",
+       "jjtmW9EfpVJJxxhc/PV6XTWgsDawThuY/UajoX3NYxZsK6Rspqam5L777hOR9HjnoHAR3y04Nzen\n",
+       "z8WzkiRxx6plH8+fP69sG+rGrKJ3cAzvolKppMzafhjAXEYqSZKbkiR5JEmSZ5MkeSZJkv/+jc8/\n",
+       "lyTJhSRJvv/Gf5+ka/5dkiQ/TJLkbJIkH5+4JBERERERERER7zKMY6Q6IvI/DAaDJ5MkmRGR7yVJ\n",
+       "8nURGYjIHwwGgz/gHydJclpEfkNETovIcRH5RpIkdwwGg0xFKxuQ+ZMGM06WaWJrluM0bCxAoVDQ\n",
+       "z7B7LpVKqeBMXGuDg1ll+c1gXEAcsw8ee2YtM0/9W8S36u3vSqWSG0xv2aJyuazt4O3+8+JpPOts\n",
+       "XD4/tqxtPVjsD/+yhc1xU7iG+83mrRKRgH30jvGPg6cIDyuQGUIOJma2Bv+ivWDRXbt2LQgOrtfr\n",
+       "GoiJYNgjR47Ij370o6BcsCr5WHgew+EB7JenXM6Mh6eAzTnIGDauzxsTPLa9PvQYVmZobDByuVzW\n",
+       "3+YxoqyAbgVBGfV6Xa1/yFV44qCon20Hu+54OSgrlUogNcEHR7g+NibMK3OxWFR2EmVheQCM2YWF\n",
+       "BQ0K9qRUPKYLDMHOzk4Q+D43NxewmZcuXdJ28/DXf/3XIjJkK6x444ULF2RlZUVERvONAYbDW5s2\n",
+       "NzcDxqrb7Srbzmsc5yUVGa4H9ncrKysqo+CB686MNGQeMJe73W4Ql9btdgPpmfn5eZ33YK48QdHt\n",
+       "7W2NZcLcZTFPrBdLS0vKBKFOW1tb2p953hbuU4w7y3ja36JMly5d0vKgvxYXF1PlB2wWjWazqW2F\n",
+       "oPNJYmxzV/PBYHBJRC698fdWkiQ/kOEGSUTEi9b+tIj8x8Fg0BGRV5IkOSciPy0imZkQ34pNxJuB\n",
+       "51r0FkMOXsXCw64HdDKfEPI2N3kaVD8JsLq6F6TNm4m8gNw8ZG3qbNA9qz8DpVIp2NCyi4X7CZME\n",
+       "LqKrV69ONJ48Fet6va5l4U0a+pUp4EndrXBT8ER8M65s3swDdjy12+3AvVWtVlPuWZHRIisyaj+m\n",
+       "7HFfb2wuLS25CzwWyEk3Unz60ZZ5enpaF1+81LlvMR6sCwf34T60GxB2f7CryJ7u89K8cLk9t1pe\n",
+       "EmwuH+qysbERJKPmhZ7/Rvn51JmdK9648uYtZxrwguzzQgW8E64rKyv68kVAs/cC8gwwHmNWK0kk\n",
+       "7Zq2z/XchP1+X1/gfArNtjPWDwv0ER/GsEaRh7W1NS0/b5Q8o8l+1u129WWOcb+9vR3opjFarVZw\n",
+       "ym5nZ0cNJKw/165dSynki/jzem1tzVUFt/O53+9rX6O+09PTwcGTGzdu6PzBpmRzc9NNNJ0HdlHm\n",
+       "rZ/YvM7Pz6cOI4gM1xBruG1vb2u7cjJ3jB12eY7DxMHmSZKsisi9MtoU/XdJkjyVJMkXkySZe+Oz\n",
+       "YyJygS67IKONV0RERERERETEvylM5F94w633n0Tkd95gpv4vEfmf3/j6fxGR/0NE/uuMy3NN70kY\n",
+       "jrca3lFnPjbMv7MUPCdTZVeAdQswTc7/2gDacTo4bxXytDi4/B5bA7DWSV6AOV/HweaWlvesXQ7c\n",
+       "hfuoWCwGTEmtVlMrHBbYuLHELKBNeJwVWJjncmaV/bz8h1xPG9zIbiGAXSJsDdqE1961lUpF2woW\n",
+       "GksieG0Eq3JmZkafjQBQz+LvdDpufizMlUkTn6LNOXEvwHMRbcDJclnviCl/Gxjt6fjwGPN0n7i/\n",
+       "rDXOh0O8+vGYwN9esDnaqNvtKkPCa0eWG0MknUvMm0OeHpr3G6/NvQMynuvePpefgTZbX1933UIA\n",
+       "3G/sJuOxb9mWmZmZoM0vX74csCw8L7i9bRLsrJxs+B0HKGMtyguh2N3d1XEH99rly5e17lxOL0je\n",
+       "urjX19dT+fLsIZN2u63PYTbIZm3gvzlDg21Lz006DpjDtVotcB/2ej0tM7cL2oM9E5O4z8YxUpiX\n",
+       "a2trOr/Qx7VaTcuCccWZS7z1P2t8uGUb94MkScoi8v+KyJ8NBoO/fqMSVwZvQET+bxm670RELorI\n",
+       "TXT5iTc+i4iIiIiIiIh41+Fzn/tc7ve5jFQy3P59UUSeGwwG/yd9fnQwGCBS9b8UESTh+lsR+Q9J\n",
+       "kvyBDF167xGRfzlQyd8kPCEuwDtazVacF+jqCXMyi8N+XAvvmrzjuz8OeHE/XhA0yscK6MyEsKUK\n",
+       "wJLitoLlA0vpIIcJbPZ3hneUmOFZLnyIwIM3Zjy5B1g0XtwWxz7k5SP02tErCzMirEqO59pM5lNT\n",
+       "U65yNMrFrAvqweMAz8C1XttmsXdcBpHxjBTLhzDLxt+JjFiKWq3mWq4cV2PZCWZPPDaBA6itsjl/\n",
+       "jzpxfzDr5bFZtm9ZUNRrG2a1MQbxLzMIbMlby9xjKb3DFXydJwvixYl6DCvA8USw5DmfHzNTHJco\n",
+       "kmakmBGxMTeHDx/W2D4OzLZziduAWSrEWiH2JSs21fvcO/DgXXfLLbeIyIjRZYkKvq9V+GfxX74f\n",
+       "rp2bm3PXE7BmXkA7wB4Y9OuhQ4eCgPnNzU33PjbOaW9vL2CuuKze2ovnX79+XdcvLhOC9FlMlmPj\n",
+       "RNKssTd/+DCO/Z5jm5lFw1jAd+vr626M5+c+9zn5vd/7veBzYJxr7+dE5LdE5OkkSb7/xmf/k4j8\n",
+       "V0mS3CNDt93LIvLfiIgMBoPnkiT5kog8JyJdEflvBz9pgag3MC7o2NsYWTdeqVQKNlJ88o41l+xA\n",
+       "ZV0aXqi8jZRdyDza9c3irVDhrlargc4QuyGwyLzZE5h5CWUnVU8/yLDz3GTe5ssu8MViURebrKTR\n",
+       "WfD6o91u64LGgZlYSJH086WXXgrKXi6XA2XzVqsVnD6tVCr6DIwNT2/KG4fVatV9meIa7xQew7r+\n",
+       "svS1AGz47MIqEh6KQBm8F4K32ckyjOz3/AK1Lie+J29AbKL1cSdreR2w5ffGlXe/TqcTuHG93/HL\n",
+       "lQ8l5Cn485zyXI/4HcYd9yFvKq0CNgNBzEtLS+7JOOue29raCtYLr/94jI0zXu17YDAYBJkrvOBq\n",
+       "kVFSbk7ijfbAtYuLi8FG6vDhw26KHVzj6WIdPXpUr8E48frQM4a2t7fVLYjNWL/f13HHp9iszh0O\n",
+       "OzAGg8FEh6Z6vZ5uwnhNwrxH/7K+GtDpdIJE0bZOFvyuxkaQ9QxRJ94s2ncL3Lp5GHdq7x/Fd/99\n",
+       "Neeafy8i/37skyMiIiIiIiIi3uV425XNJ8E49e+DwFMituxTqVTKTWTrWXxMnVtGwjs2zPpFbBl6\n",
+       "ukRvBuOSllrtJM+6aLfb7uce3W1ZCT5uza4J69LhwHIvv2Febjdcj3uLZCdu5nLhOk/Z3ru//R3n\n",
+       "qGJ4rh2rxeO5YgaDQcBm9Hq9gB3jduc25bZEPezx7Wq1Grgoe72e1jnPJZvljsQ1sPyyAJ0eT5/K\n",
+       "sr0iI1aOE4bjs36/n2KdPF0yywgdOXJEXVE8PjzXrm23brcb9LUnL8B9zXnuYO0yO5anPWXbRWQ0\n",
+       "Lr2gb/6tp3Pl3Y+lM6wKPP/NCcvzXCvsOkF9PQ0wTxYAbesFFvO44nyodi33+m8cU8wslCcHYRMo\n",
+       "Hz582J3zYGvATJ09e1b1jTj5toWXqJjZ1hs3bgTSEIPBIGAux7FC7B4GE8VMDauM4xmWJd7Z2UnJ\n",
+       "GRwU6P9Dhw4FOne7u7tB8LpIvqfJY6H5PQoW0GMkOYMADkFAF20Spi3m2ouIiIiIiIiIOCDeFYxU\n",
+       "r9fLDR73wOJ6eUfTPWkCPsJsY5/YmrDxThb2uczAeAKgQLPZVDE17PgnlYnIEs1ky9UKkyWUA9AL\n",
+       "8GaL2bJKHKTL9fB28VZQstPpBLFPnpBpu92euP5WhsLr/3a7rawILByOWeBxYMuSpUTvjTHrs2fR\n",
+       "QrR91tiBheTFpeC+zEiwMCMzm/jXsq3T09OBIGKn01Er24s38GJ9ON7JlsWL9UuSRANLPUYK1ifL\n",
+       "W3DAcJ7aPurPZS2Xy9qWsHAvXboUWK8cq8bz3/Y/x6DkiXXyOsEHB2ycFgcZ83pin+uNE643j3fL\n",
+       "eHQ6nYBB6PV6EwmKcqwKnre3txfESCVJoswLctOdOHFC/+a4Mlj6nvo4s164BmORGSl8NjMzEwRY\n",
+       "8/xEObmOHoPO662d39znaIN6ve7GcOGzhx56SESGjBQziCLpsYG/W62WnDhxQkRGYrkcXM2Covws\n",
+       "zCWM7atXryprizIvLCykDgOIpMcTs0p4Bsp1/fr1oD1arVZwCMMTV+b78FqEz8DCbW1taZ+A/eJD\n",
+       "CUCpVNI5ZyWIREb9/pnPfEa+9KUvBWWxv2Mw043xOamMi8i7ZCMl8tamkGH6ll0J1sVWLpddteNJ\n",
+       "FMvtvfH/4xZEAIObF+tJNhPjfsPuyjxadm5uTsvFv8NnPBgtfe7pSPX7fZ3Y3ovY2zh4yNMRmVR5\n",
+       "XUQCCltkRA2z3hUmttV/Ehn1TaFQcGl+u6nDi0EkvXmxG0x2sXB98DeuXVpa0mBTlLnRaOjzUCYe\n",
+       "7wic3Nvbc0/35QVEW6OCy8z1RH2yxnpeehyMg8OHDwcJkWdmZtwNnndghN1qWMRZvwZ1xgvUS0nh\n",
+       "Be622+2UsrhIerPhBbRzsmFrdPC6xn1utXZ4fWJ3uN2E8QuNNy8oH16UvV4vOAHJp22Bzc3NwJ3K\n",
+       "ZQCmpqYClwnPMU8PiV12vMHD/e0cf+qpp/SUFU4GeqfUOPWLd3gh6yQ3ymTngOcGzVqnnn32WRER\n",
+       "OXnypH5mswAcOnRIE/uyMYHnsmsJfdhut4NycR/itGC5XNY2we9v3LgREBG8MUPbd7tdnQe8acN4\n",
+       "8k7FoT3q9bp+zy40q7lWKBR03eTxiTYed9IZ97FzWmS0QT5//rx85CMfERGRb33rW8F9vMTTGNsr\n",
+       "Kys6bvGsSbQeo2svIiIiIiIiIuKAeNcwUoAXPJiHLFeAx6J4bhKm+XGt1WJhCYM8dx8zUnlUPD8P\n",
+       "u2xOguxhkoBVkeGu3mMdYLHgSOza2lpKfVvED+KcmZnRz3FfVtf2JBE40NqWxQsE9BLL8n3yAsvH\n",
+       "Ae6CYrEYqCEXCgUtP39mXTqsYu0dU0b5PJdCvV5XVuT8+fP6OX7LYwNtBYvTO5bbaDTUIoTV1mq1\n",
+       "1Frko9h5+dQ8cKJgT4aCGbc8XLw41Oj1jooD3rztdDruvPcS9gIeqzQYDJQpZSYKbYS2z2IG0b5g\n",
+       "MLNYG1smT2+qUqm4h1bsc5nJwZhgl4PnhsD8LRaLaoWzi8eyNXzIgZkpWycvyH1nZydgeprNprIr\n",
+       "CBngdnz++edFZHiMH6wI6ra2tqbMH+rRbDbV/QXGySvLoUOH9HtvHPGYRT3RBhxGAnC90M9ZbD76\n",
+       "7atfHR5sf/DBB+WRRx5J/ebq1avynve8R0TSjBT6hl2eeM7U1JT7TsFYfe6550QkrTeF+2xubqrb\n",
+       "FeN9fX3dDePAmoHxdNttt2k4At8XaxDG29bWloZL4B3S7/d1rqBfd3d3g2e0Wq2J5WomCfx+7LHH\n",
+       "5LOf/azWXUTkiSee0O9R9sXFxZQUgsiQBbRu/0nKFhmpiIiIiIiIiIgD4l3HSL1VMghePI+n+usp\n",
+       "W3sBpTYexrtHFmPiiVzaoPRxMWKTsjHValUtAVgYXCewBSIjJorVpm1Mk6c0PS44D+3hWReT9q/H\n",
+       "XHFcBbOKeRaFZ1myNW7bNUkSNx7KkySwwo3sk+cgTBsIymJ/rBJu47RYHJZZL5QFjNXm5qZ+j/46\n",
+       "dOhQwJ5mKcTbODFuU25b1NcGsVtgjCFO5KabbgoCz7e2tnSs4b5ZecC8OCMeY5Z9LpfLATvIkgi4\n",
+       "B8fc8Ry1EguNRkNZG2acvOPblj3Z29sLDnDwfAT4/8HUXLt2zZ0HHjtm61YsFnX+eyy/JxXBhw08\n",
+       "dgTtwYHtYEB5rfzwhz8sIiKPPvqoiAzjUhDrhz7vdDrKovFBEDuXT506Jc8880zqMx4n4/Kloa34\n",
+       "GrvWcruwEKgHxBaBAVlcXAyCyNvttitvgr8RG8jjr9/vB2OWc08CXA/OM4g+RFnW19e1rMxM4RnM\n",
+       "2GL9B3POfYN5WywWdbx5c8XLX8ksPtYK750KMFM47n34hS98QUREfv7nf15E0jGXqGOr1dL5ynFp\n",
+       "WX2bh3fdRuqthg1OFfGDy9h1571w7IvFc8mwNoqnl8PPejPJnK07Es9GWWzy26xy2DQVrGjNgdn7\n",
+       "VWEflxg5D7xx8IL5gTx3VaFQ0EmJsjQaDa0nv4A4uSx+j8nnBd4zcK1VRRYZ9dH169f1fliAeKOG\n",
+       "Ba3dbo89sYb/tydlKpWKLg68KGJh8cYL0Gg09BpPn4WBsk6a9ggvFii1M1ivLSsg24P9nE8OYmPJ\n",
+       "Lx8soN5JPpH8YHTUt9lsBqlpRMKXEr8I89JocNA36s7uKPTb1NRUSgMMn1lXnKd9l9V+PN7QLry5\n",
+       "sWVlQJcIAeEio5c02nZjY0NP8tnrRHxtKYbVp/N+v729LUePHhURcdXCgXq9HqzHWes7ME6Pzxpm\n",
+       "L7zwgrrVGBhPPOftuOLx4qVH2tjYCIw1Hu/2sIbIyI1+7NgxddmhrURG7eW5/bhux44dS5WVdRNR\n",
+       "j6w2wtjicAl7QIUPyPBmc7/vxW9/+9siMtxAYtMMA6JcLmtZ0Nbe5nQSRNdeRERERERERMQB8Z89\n",
+       "I4Xds5fYUUSCXTHrEnnsiL0v38PL65UkSaCAvB/3pXe819NOYUaNtVBE0ok1QcW32+0gcS1b22y1\n",
+       "WwvKOx5fqVSCY8/2b1zrudNsMP+kWk4eOKcU96EXKOyxbR7bxTIPgOeKYp0ckaHVhrJ4ViBbT7g3\n",
+       "yuQF8LM1xUyizdnFGll5OQH5CDMnh/UsQ095OQ/43h4Px3OtLITIaMyiHjZnmUW/31erH21TrVa1\n",
+       "zrDaPT2iRqMR9GGSJKkgfsC6MVlN3DtkkDcHeI567DjmtzcOmbnydK4YNkC9XC6npD/wGxtmwJkX\n",
+       "GPZYOcsQ3H777SIy7AO4dsGEMCPF9wVjwvWwffT6668Hh2HGsVpAtVoNdMdYK8+r4zhtIbi6zpw5\n",
+       "IyIizzzzjNtPYJA4FAG/48THGN+Li4suU4LrmYGzzOqRI0d0bcN68tprr2k9wUKVy+VAXoLfd6x6\n",
+       "D3YKbtzLly+nZD7we8voi4TvQx53AOtN4fmslJ/3jrzjjjuUeQOjfOnSpYCN5XUvT3ZhEkRGKiIi\n",
+       "IiIiIiLigHhbGaks1XFg0iP9b9VzbVZ1T0CTfcGeKjowLj9gnqW5H/FRbydt83ll3ZOvhUXtsTJ5\n",
+       "yua9Xi8l0SAytBwsm+U9n2OVWFjQswjHxSPtF+OYDABlQZwD+9VRt1qtpvcbdzwXfnpcy1Y29z/a\n",
+       "l9kEm/eN25StRpQZMUhHjx5NsVMi6aD0vJgmLy4mS6AO1ul+gzVrtVowjkulklqz+HdjY0OPWGOO\n",
+       "ZvUjB32j3F68E9iOra2tYP5xhgFc4yk4s6Akx0rZ9YulTnhNs8HeHOeCMk1NTekzuE/QNp6EAuej\n",
+       "s7/f29sL2Ha+LwKR19fX9Xc41s4MEgNyFmCkeC0Bm8LrIpiQpaUlvYbXLNumImGuzcuXLyvbhdir\n",
+       "rNyXFp5CO5eBBRkti58FFssF7NpXKBSCdfa+++6Txx9/XERG82h7e1vHN8uVMLD2Mhts59+lS5d0\n",
+       "HDPj5HkDbHB+tVoNGF2R0ZgCE18oFII1htvNi09kxtnWbXt7W8clZ7+YxOvAh2tYfsFeWywW5dSp\n",
+       "UyIyVKDH726++WYREXn11VfHPkvrMvEvfwwY1yhv9QYq77kckMkLoN1I8eaKKW9LF3rB654K9GAw\n",
+       "CGjetwq8qHqnIVgLitNwiKQXQaa97YamUqmk0hiIDCcyrmFXoaW4a7WaLlZ5weGDwcDdQOVR8B5s\n",
+       "ULdI2k1mXUkrKyvaLl7QKgI9NzY2XBVcm/yyUCjoNfwytm43kXDsM63Ni79tF35R4R6Li4sBJb65\n",
+       "ubnvl413AgfgBXe/Gyl2+3LiW7zMefHF/LInHQG8KPgatAM2Q9yWrCCOerErCdfghVGv14Mg3s3N\n",
+       "TXeT4QXLovz8DDt+vSTo3ulSdkfyy4Y1qoA8lWY+rMEB9KgD7oO6sauY4Z2AArBR4hOaGFd8+hDt\n",
+       "zfXgjQg2DuxGxOlPbKQ4XVEeeIzxPLJ1GwwGgcGXBbyEUceFhYWg77zDRDwX0Rb8Ir9w4UKQDJz7\n",
+       "H+3L9+ZxZV3U41T7MR93d3d1jqBuzWZTN1xYzzhIG/3VaDSCZMQMz0jjUBBcy6EFntFnDaBLly7p\n",
+       "+ECZ9vb2dBxjjVlbW9Nxgg3V7u6ujrG7775bRIaK+uMQXXsREREREREREQfEuy7Y/Mfl7uN72iOx\n",
+       "DA58Zgvdo0lt4Ka3mx7n3nyrwKrNsIZRLrbePVcSB5mi/VE3lj9g5g3XsMvGPvcg+RNZtsBah5VK\n",
+       "RZ/BOZ4sQ5MVWGj7h5XGvXEHiy5LeR0sBSxzZi5ZuZyTwaIe1rXDFjYsfy+IudPpuAcf7BF2du2g\n",
+       "zbLyBU7CmE5PTweM7jjwcWSrMM1/o51LpZIG5GeNnTzXRF4CcP4Mc2Bubk4tftaegVzDSy+9JCJD\n",
+       "FgxsjefC8FgMnme23biv8TdLHWCcevpUxWJR87wxo2EDkJmBQ/tw0mqA2Ts+VOK5mdD2zJjCFYv8\n",
+       "ZdzO6MNyuayuJzArrE4O1ujatWvalvfee6+IiDz88MPuuLV9zMH/DM5OIOIn2hZJayjl4f777xcR\n",
+       "ka985SsiMhwbVgJia2srkGeAMrlI2l3tjSfgyJEjgZQEy7PgeWtra4FMQrvd1ncC/r1y5Yo+G/IG\n",
+       "jUZDFeixHt5zzz3KcKFuc3NzymZhXLXbbW1LL+SB1z2wbWBCOfsI2n4wGGSueSIjxuz8+fNBQml2\n",
+       "HzNbhTphzB4/flzXCbQFxnAeIiMVEREREREREXFAvOsYqUmZqP3mEWOwAnJeUDr/3mOkOKccPrPl\n",
+       "/0mwUSLpuBVYB3ysFBYLB32zdSAyvi0n/R0siKxj9B64Dfke/N1+xEHRN4jn6Ha72kZ52eEZ4+Il\n",
+       "vBxvNl5iampKrUQb5Criq04zm2XH+WAwUEsU19y4cSNg5fBskVH7eeyo1xae1T49PR0wa1kHLmye\n",
+       "Po5PA5gJAbrdrrJULGQIRs2WB9/b+D8en56QJrC1taW/veOOO0RkKLDITJRImkXjnHE2XpLbwwuG\n",
+       "98YY7sGWOMaEN9Z7vV5gjaMuIqN24fp6defDDrYMLJ3BgIwF51y0/b+xsREofc/PzwfB11wmXkd5\n",
+       "7QDAmLAUjJV+4FyazC7bNjxx4oSWy5uHHiPCAAttc43ydxybxbGXGBN4vsiIiZqeng7KurW15UpO\n",
+       "oM5og5mZGW0vZsIwbtGX9XpdnwHpCS43WNcnn3wymHM8trntJz0cZNXT19bW9FrObQkWG2sNrxFg\n",
+       "ME+ePKl9jXpwP+B+fHAMbXH+/HntG1xrY9M8vOs2Um9GUXu/4NNTXgA6L4AYjLwJsy5CXlwxELxT\n",
+       "cpMii672wGW2i2Cv18vUmmHMzs7qAoe2HwwG+jLy3DPsNrDPYPqW28W2ES+aHOTunWxjbSyR9Okp\n",
+       "dp3g3t5LZ1KwCq+3YHgBwugvm6aHwWk5uG52MykyeuHwS8S+kK9du6aLA55fr9eD9B3e5j8r0bKd\n",
+       "e5yAmk+Neu3Cm3XAbuC8jRTfm09PYUMoErqw+D48VzAuUc92u633we+73a4u3C+88II+F33GJ/ns\n",
+       "y2Zubi4I8C0UCu7pUy8o3X7HmjzcLp6uEcaHp5CNTTv3tafTg+d6a0zWy9GmnJmeng4C83u9XnDy\n",
+       "7cqVK4FbaGdnR91LeKHdfvvt6sqCq1BktIZijG9vb2u/cV8CfHjGroXLy8u6JnCbTrrOwlWEecmn\n",
+       "6dg95502xXxEfy0sLKSSPdv5wGr8QLPZ1DGBTdqRI0d0LfIOzaCs7XbbTbrundK0J0I3Nzd1XMAV\n",
+       "xocrWLPOm9eYh9iEFwoFbS9O1m43/TMzM9peGAfNZlM3P3CNMpHAz7dl4QMm41JTMaJrLyIiIiIi\n",
+       "IiLigHjXMVKTHq1mS8PTfZoU2AHjfqVSKaDsWfuIv7M6PZ6r8M2wavsJ1mYL0ysru4ZEhkfmcawY\n",
+       "ePHFF10F57zEyvh9pVLR53q5/oBarRa4Hj11bQ/T09NaBraavSBegHNF2TYol8tq0fBx4Lzyezo4\n",
+       "DD4azPcVGWlVMSPFsMmIC4WCWot8nzz1X86fiDLwcXlYpHmU/GAwSB23x79gO2AhZmlqefPPSoSU\n",
+       "y2VXV42TM6PMfD+7PrDF6bnx8Pt77rlHnnzySRFJuz/smPaCrIvFovYDM1M2gTIfBMhjgbgd2CVr\n",
+       "rWc+wo5nVatVd43My7WHMu3u7gYHQvb29lymzDtQAHYE4/7YsWOB27pcLrttiPtxjkEofIOR4n7m\n",
+       "gyC8tgFoK06gDOStm7Vazf1+90LShgAAIABJREFUXP44EZGPfvSj6t7E79bW1rQMvP5gfnMwvm17\n",
+       "ZhK9+ciadmj7drut98GawDpSP/3TPy0iQzesZb1ZOsG6Xy280BmUEc89fPiwzjl8tra2FjC/rVZL\n",
+       "mTeMl0ajoeOSc5biGWCfOp2OzlcwR61WS/sbrs+lpaXAC1Qul90gfpswPks3jxEZqYiIiIiIiIiI\n",
+       "A+Jdx0gdBJ6acN7vPIuZg0RtPBQrfvO/HMSLfyfNwj4pbJmz5BTs8dcswPq8fv16rvo3KyrjGraY\n",
+       "YYWhfllMos0v6Fl8kwpyZtXNsyi8HIVe2WwsQBabaSUxRPy+tUrUDFhvXtZ2kZBZY1aAy+e1lR2z\n",
+       "m5ub2gZgRRuNhlqzEBT06ujVo1arucyAdw+vfFbFfDAYpARPLXAPKw4Jls07ds1B55YJfvLJJ4Pc\n",
+       "Y3wfIEvdGWOP40nQnzwv7Pjh8vF3VoqFRWRxP76WWQyPcbb344BxPHd6ejpgx6anp4N5NS5rA9gK\n",
+       "DljmgGZvbLG8hMhwHPzwhz9M/YalLXg9wThhBgesA1h1jx32sLu7q2sQM3Fot7zMBffff7/80R/9\n",
+       "kYiMxiHHLHHbe4c/MO8xL3d2djTuK0uQFZ+zsDDWbRyk2djYULbmiSeeEJHRQQkRX0Qa9+OYQAZn\n",
+       "/8DvbbaBTqcTxCoxG4kx3e12U3I6WfX14vWSJNH5inHAsWWIdyuXy6nYPZFh+3lrC4Dx5B1msXhX\n",
+       "b6Qm1ZTiRVdkvMvBS0bMJ5K8k1J2s+bpTXGZx53+mBQoFyZroVBwFah5McTAAHXZ6XR0YWKVY9DB\n",
+       "rD3ipQGxLodyuaz141M2aDe0Fbs/GTbg2dtcsUsR9SyXy7oI5QUUlkqlfSepHOcO9hJOZ514E/E3\n",
+       "B1jseIxhweOgeHZ9oi/xOy+YVESCl2ur1dIXE+5Rq9Vy3d9YgDhYHy8MDiz2AlaBXq/n9qvd7Lbb\n",
+       "7YDa39vbC+a8fcnj/7kf8lLrsPYM2pCTR9tysSGFlzTrNWGNKRQKQRJvPlzBYQG2Pfh0r2ek8Pyw\n",
+       "Gm6FQsF9Sdt1zDu5yG2JFx8HDON+WZsorAnsrrIB79VqVccoXDI7Ozs6H5Du5eWXX9ZNKe7BL2G0\n",
+       "/YsvvphyTYqkN3/4lzcveUbUtWvXgnGSJElwws3D+vq6Pg9t0Gw2dVNgT5yJjOY164TlaS4xeJOG\n",
+       "uh09elTbF2XudDqBltqJEyeCEIrbb79dXXloe2iS8f0ajYZ7otuelNzc3NTn4feLi4vB5o8TFKPt\n",
+       "K5VKsJnKMrKtgen9rtPp6OdsHK+uroqInzgdmOQgVnTtRUREREREREQcEMlPSsco9dAk+ck/dB9g\n",
+       "l4m1ZrM0VywjxYwJLPkkSYLguyRJApq0XC4HdDDnufMYs6zPcEwY1kKxWFTLJy/od3Z2Npf29IA2\n",
+       "qtfrAePm5f3j578Z3S/PXcHfvR1j3EtkLeIfSQdYxRhWM6z2c+fO6WcILL18+bJaVGCXsvoM1jD6\n",
+       "qNls6ljkoHOwYnmux3K5nGKiUC9YsWyRWhSLxSAY1t5bZMhu4HcIwt/a2kodBwd4jthkqpVKRdsE\n",
+       "92NVd2B5eVnLj7E4OzubYqeA48ePi4jIxYsXRUTkAx/4gDz77LOpujN4vNuganYd5rmPOSjdy92H\n",
+       "+jBbwGyFp/FjwcljWVGdpSREhtY9+h19vLKyou3Hbi0Lbmfgk5/8pHz1q18VEZEPfehDIjJkpLB2\n",
+       "YYxfu3YtYL2uXLmSyrwgMmQ92K0oks4Fx0HdaH9m5zmRNepjXcUsb4HA7FqtpgHcYKZ4nIHN39nZ\n",
+       "CdaprAMmnF3CModTU1O6FoANrlar+hmHZuAzTm59+vRpERE9ZMG5T7ncYAnB6GQl87WaVh5OnDih\n",
+       "7kqwX5ubm26eS8hfYG17q7w4BwHW88Fg4EaeR0YqIiIiIiIiIuKAeFfHSE0aQG3Bwnjsd7dCgWyh\n",
+       "sQ8Xu3W+1rsfxy0BLLCH6+zvut1uYH2OC1S39+Uyi/hMGizHYrEY+JnHsVEoM8fVWGG0LLDFzfFS\n",
+       "tvx5Ss/8nRf34TFcsOgOHTqkFhrHGNl4lHa7rfdG/Eyv11MrnOMv0JbjAhNh1aF8xWJR+4bbHM/j\n",
+       "eCOMdw48tkG69hqRIeuBPoEl7+VU9IT3qtWqtgEftWbGQiTNwHlMFPoNcXdcjxs3bgRxcTz3vPxg\n",
+       "zKrynLN5CFutVsACFotFbSPU48qVK0EMpcdGlUoltbjBRHzve9/T7/EsFjfktmZZAZEhS4H24rFt\n",
+       "2efd3V1XfNPOkWazqSwmWIper+fGWll2qlAoaN0x1ra2tnQsci49T7YFz8BYm5mZCQQxvTHGzCTK\n",
+       "fPLkyYBN57hMfMfxUCi7xwTzGEGZ7rvvPnn88cdFxM9biPtsbW0FivBcFoyDJ554IvAQMEPIc9+u\n",
+       "4Ts7O8GaZYP67bze3d3VcQK2q9Vq6TgGk3Tx4kUtL7NiYKL4fogZRF+XSqUgn18WPCbKxgReuHBB\n",
+       "64xxz2s0j0+wipPID7zdeNdtpLzNCxqaX2I8UK3bjTdDXnA40595GynWh+END/61NH6SJDqZeLLa\n",
+       "AF/+jJ8BYKL0+30tCwZguVwO9GsYXP43o6puT1lYWK0YViLn7zDBuY08CteqevPGkIP6UR5ejHAt\n",
+       "XsJJkijtjft4SWQZPDZQVg4wt1plHjigFODFkVOYwJ3FyUzxPVP2nIQYwCbRa0fv4ACn9LD1XV1d\n",
+       "1RQcrBCPsYU2vX79ult367Lb3NwMDmGw2jG7wzE++aWFazgJM48F6zJhV7an8eapxPOGht2BuD82\n",
+       "zXBNNBoN7Qduc08vCeBNp3cIxn42MzPjnny1m6GFhYXUqSn8Bu3Pcx7rgFWDFxmNk+np6eBQwmAw\n",
+       "CAyz69evq8sGL1Q+MOC5LdHOzzzzjH6G/n3xxRf1M09Z2rpXGXzq1csGAFQqlSABdbVaDU4zTk1N\n",
+       "BYHpPM7wPB5rHvDd1NRUsAHudrtBEL/NDOCttV6mCWxAMEduvvlm3QyNO73I+lxvFtVqNThI0W63\n",
+       "tXx8eMX+jjeR3pjx0hB5bc/6amhzHrtQ1OfTvlxWLlMeomsvIiIiIiIiIuKAeEcyUthFMsvDx/Lx\n",
+       "HXbcLC8AsASAVaL2ZA14x+qxRR6849usN2UZIWa9ePeM38GCLRQKgQuL78VUPK7hYEJ7P8Z+dKtg\n",
+       "gSIImvNb4bNarRbksmu324HW0d7enpv3CpZI3hHTer2u7cYWlbVi+Gg9uzKs25V1dfC7Q4cOabt5\n",
+       "SYE5eDQvf1wespLMosw40n3jxg1XG8tTd0d7oK/a7bY+h+uB75mRwP28cnl5E9HnrPEES+7y5csu\n",
+       "c2D1oTjnIurNFj9+l2XZ2/lo5xiYPATusgaQN/bx2fz8fOAi4gBllLVSqWgb8jF5y9BMT0/nZmHg\n",
+       "ethrWbuH549dsyqVSqBztrOzkzpWjvLZ+cXBzZ4OF9qfxx/GnXcgpNfr6bVgpDqdTkoXTGTIjoD1\n",
+       "QNvzuPFYV3zPblBee8Gscs44ACyZF/5x/fr1ICFtqVQKGIi5uTl3PCIYOg/b29uBy471kJg554wK\n",
+       "/B2Qx3Z57yn066SuuUnBivqecjzAc89bH9E3pVJJy4q24hypeSEsCwsLwd5ga2srODTDLn6EF6yv\n",
+       "r+s4y9NMnASRkYqIiIiIiIiIOCDeMYwUH/f3YnysSJqX98mLfRgMBrpb5/taS5YD0Md95sFjjmwA\n",
+       "d6VS0R03xxvYXTYzbJyvz1qaWdd6wnl5x+7r9bp+j3tzQDGsj1tvvVWZCPTD9vZ2wIA0m82JGBov\n",
+       "ZogFDxnW4vGkDmZmZrR92codp66ehVKpFFjyxWIxpciL+1vmhf9G+8zPz2sAM9qbA3cxJiqVilrt\n",
+       "3N6eKKDHEliGqVqtqvXHMSWwfL2+Qp+//vrrGmyMth0MBsqAgqXa3Nx0mS3O7Yd72ByUzHp5qu0A\n",
+       "M4RApVJJlR8sC1udHOsiMhyn9qj25uZmKv+dSHq8sPK+ZXw9scwsmRQ7X3nsoP28++3t7QW/4/aG\n",
+       "ijUfVLExXwzvMMn8/HxKPVwkHZuFNvAUpkX8mEvMFVbett8xQ4z+4xgZPnhjWWORUT+BjWTpA4xT\n",
+       "Wy+RYd8juBr35bHoxZhybBae84u/+IsiMmR+rPQDx7EybJ94BxKyxGsBVvCeFBhDt956q9aVleYx\n",
+       "jlAeb83ksZ0Xc8Uxxh7Q/1NTU8FYZbkPzOWZmZmUZAL+ZUVzPBdzGLkveR+Aeq+srOh7Ik+xfhK8\n",
+       "YzZS3qkzgDcMTG/bwcUuMS8wll86XvJgPIM1aDCZ806EsMuOT2PZEwutVks7HROET4ZxG3jB67jG\n",
+       "bgz5GQz+jBcenNziBKH4LSbOxsbGvjceHhCMLDKaOKiTR6d6myhPgdirL5eXDwfYlCVc3zx4pwB5\n",
+       "c41FM6vcABb4ubm54BBBr9dTtwMHluNFD+r80qVLEyepthuaQ4cOuZscLKp5tHaxWFQXBjZhjUZD\n",
+       "Fze8MG7cuOEGANuN6OHDh93AZwDjdGZmJphTu7u7gZHALhGR9AZKJL0g872xgUIfbm1t6X24zKzt\n",
+       "ZTEu4TiPGS6zSNrwsmOR/99LG8NAW2Kj0O12A3cLH5DAOG21WnLPPfeISFpHyJad55RXD6Ber7un\n",
+       "tmwfs/I+yj43N6dhAxy4bcHrAM8FtAt/xnpkIsO+soH5N27cUHc6gs45yN1zLSHp75e//GX9jNOt\n",
+       "YLx4Y5u1t/COAbL6nLW77DrW7XaDE5XjgDF79uxZPdXHOl1eAu1J0Gg0gsB3kbTaPMoMvPzyyyIy\n",
+       "HDt2I8MnG1mvC+OD56h1yXNbeorl3K/4GxvuQqGg4wiB6JOcGoyuvYiIiIiIiIiIA+JtZ6SwI+Td\n",
+       "tj1K7lHiTBmy5YrdJFuuVuqAdVoA3kWz28Wqk3tWaLVaVese9WAXBrNBNvC5VqsFbBcHNLPiq2Uk\n",
+       "SqVSYPXy31nJYzlo/K0ErKPp6Wm1HmHJczJdr1ysgzIp8wKgDUqlkt4bltza2tpEuZIWFxf1WpSd\n",
+       "DyCwxQ9MqrT7vve9T0SGFLpNiJolQQE2gVm8PCX6vEMR3W43YJ1OnTql1l0erb28vKxl8OQSXnjh\n",
+       "Bf0bbJB3JB1gBilP78dT7eYsALiHZUfQnnk56jgAmH+Pe7EcgMdE2bbma9EG4xiCU6dOiciQGbDh\n",
+       "CF4QuReAXi6Xte+YwcHYYVcg2o3V7MFEefnGPEYC7eK1yW233SZPP/10bp0BlB9MgueF8PqfgXnE\n",
+       "WRTAiLErmxkwlJ/HBNZ67i/rSeA1H23F4OTKVi6HxwrYz52dnSCon+vIqvc8N207TZqIOQuTBqHz\n",
+       "AS+R4TpgXbaLi4va5t///vdFJL2ueMH+eTI8vM7y2Eb7nz17NvN+s7OzKYZJZNjOGLfQ/Wo2m9r+\n",
+       "mFscMgI34iSIjFRERERERERExAHxtjNSVjAtS/XaZoLnvFUsSmiPQvLfvLO2ViVbtrhfpVJRZoOD\n",
+       "TS2yRDVtvBZbWZyhHRYIH8lndXWUhYU48SwryFir1dxcemATuJ7MenFsl4gfl1SpVNTqQLvs7Oxo\n",
+       "HAKsuixrHJagZxF6wagcX2WP2zYajSDQem1tTa2bvBiFTqcTBDV7cULePaampgIGYWZmJlBKFxkF\n",
+       "P7Iat2VSvNgqjvVDW83OzqpVx22Ux0R5sWiIY7r77rvlm9/8Zua1wMrKirIsPO7AajKLgbqgvt4Y\n",
+       "2t3dDQ5hiEgQ69Hv97WvObjaikdy5ngRP37HCoAyQ8OwAeisms1BsNa6ZkaK89J5TCiCm9mituKb\n",
+       "3Ea47/b2diDj0G63g2BklmxAu7HUgScRwAKWAPqQY8fygs1tzI9tF2Z5WDVdZMis5OUA9BTd+b6W\n",
+       "sTp06JDGgTIjhXGM+lYqFWUGH3vsMf2dnVO8nnnitcizKCIBY8pgqRAW5xTxMxPweGf1/x8XmPlh\n",
+       "Rs2uVV6A+/Xr14P8oK+//nou24216LXXXstdx5gZxJrAzwBwj83NzYCtK5fLWn4vborHH8YJ5tYk\n",
+       "OWff9o2UdWvxpskmDMb3IsNO9XSaLLyNGU8+T+0ck5lPYHnKxbZMXGZbJxHfHcmbMK6n3VyxO9JL\n",
+       "2MoLKgdaA9gUzM/Pp2hxkWEfeIMFEx+Dt9Pp6KKSN0EqlYo+I2+CLC4u6n3QrrVaTV9ovEFDnVBO\n",
+       "/mzSwEh+AeadhsFLvdFo6MKJ5/LEBXZ3d92XEU7AoK87nc5E7sB6vZ7SPxFJB0FOCq/tEUC5vb3t\n",
+       "vvwsbr/99iCVQ61Wc90KGLO4b9ZGyo6dWq2mYwzjq1Ao6LxBv5VKJbdO3ssXZdjd3U0p0Iv4c67X\n",
+       "6wU6WDxe7NrA4E0Z+ojT+OBluLe3p6cx84w6fh4r6nsniO2ptE6nk1LwFxkeVEC/Y1PNJ2Y5oBzj\n",
+       "nY0IL+Densby1g8Ob+C254TtIr5SOuon4hsbrJTvbUS9VE328MzCwoIGmzPy1hPPRYV2LJfLQbLk\n",
+       "LGCceGEOvG7zOLJj75577tHy4Lm7u7tBGEpWgmqbBLler+s13F+THMwRGc33kydPiojILbfc4rrg\n",
+       "bN0WFhYCo7jX6wXl5jCC/SJr7UQZOH0Y0hjhPcRprbIQXXsREREREREREQfE285IAewusUloWcLA\n",
+       "S1bLSYStG4+PUbK7zDI5rEGF75rNphsIaYMgmXJmNwRrMuH5lrJnTStm1LygdhvA6OlNeZaryIhV\n",
+       "Wl9fzw3yAwvH+aDygoc9jDsWDrDLiV0neB73NdoQ/9br9eBI97jgSe5ryxzMz8/r37Bcs/JOWTaL\n",
+       "8wh6rkl2++bJJOQdZZ4kYB6wwbIMVpWG5ZoXsL63t6dUPs+FPJkKz2pHmdrtdtBvtVotODrf7XaD\n",
+       "8VkulwOWK0mSlNVudYY8LTi+hseBJ3GC36H9WYWdNYOs+4kD2tk1YXXYPEuZA8uBwWCgbcg57exY\n",
+       "YXc//uXcklYewraLRaVScdkm2/9ePTyWaWpqSscHP8+uab1ez9XpAngOWvfh5uamrgXMxMG9yeA8\n",
+       "f4BluNhbwYHlFlmsMdgx5PPjOvEctQm3+XdcBuDJJ5+UO++8U0QkJUfC65JI+vCSp9eIMbm9vT0x\n",
+       "+5QHyLg88MAD8slPflJERlIHV65c0fUEn33kIx9Jvb9EhmsIxuyrr776pstUKBRSCZtF0sH8+Hdn\n",
+       "Z0fZbDBTYGRz7/+mSxgRERERERER8Z8pkrwYlh/bQ5NEHwrLguN6PHVyazV76uQeu8MxCHyM2lpA\n",
+       "HFvETI9lkDhgnJ9r78dxWBwgbwM3PVkALwZKJAxkbLfbau1wLAXHfSGWJUtIFM+FJe1ZifsF5wBk\n",
+       "pWe0AytHTxLfVKlUJma5AI5LsYrxnU4nldNJJDtXHq4BW9RutzMlCxiDwUDuvfdeERnGCoiIPPLI\n",
+       "I4FKNLOoNp7EwjKhHIPCshpZ0gB8D/b7e8GjYNNWVla0jTimCjFhzGyAKQFL4o0fZn4AT9qBP/OC\n",
+       "jYFyuZwKzOecfoAVt+12u0GQLweHs4K7F5cIcOyQvR8HqgOHDx8OVMmZbQEzkCVuaVWzGXlSASKj\n",
+       "vkFfc8A4ArNZFgVl4Tp4jCnaZ2Zmxo2Js4eIvDHBZeHnemUAvDHBjDPaIe8eN998s9YF/dbtdgPG\n",
+       "mccGYqpYDdx7Bt8jb6yNA+IsOQYTbXjs2DGN2WSW144FjrnEHGBx23cSwBrNzMxou+Z5GiqVinvQ\n",
+       "y2MGsQ6gb2ZnZ7UfxklJYA8yGAzciP+3bSP1djw3IiIiIiIiImK/yNtIRddeRERERERERMQB8bYF\n",
+       "m1tXEx/3z6McQcsdPnzYPYoOtxH0QaDea5+NZIYIpNzd3XVpW+vuy6INAZu/SmSUa+3SpUu5gcCe\n",
+       "/g6eX6vVUi4HkfFJeOfm5pS69lx7Xs6hLLciP5ev5WtYi8NS7+NcTuOQ57rwjmcznW6Dm9ltZANL\n",
+       "+VmFQkGvwXhBAOI48MECbjcvubHXj3BNYqxlBe5adw+7ivcrlyASqicXCgVtIz6qbYOVFxYWtCwY\n",
+       "v3ywgWn3vJxx+F25XNY24jLZXGAcXN3v93V+cY4y3MdzPfHz4VbwNMWQl6zVak10+OLmm292Dyt4\n",
+       "SWHtvO73+9oe7Lr67d/+bREZrU+PPPKI+2ybo7Df78sdd9whIqM28LR0Tp8+LQ888ICIiPzZn/2Z\n",
+       "iPgusaWlJV2rEDDsSVN4h0VarVYQpsHrAVyQCwsL2g/sSkQf4XcXLlxwDz584AMfEBGR733ve/pZ\n",
+       "nosY33W7XS03B6qjP6ADxjn5+F1i13eet3laWSKjfuMj+Xhuq9VKzXGRyXLA2Xvb+cg4SFgH60Ci\n",
+       "XT3JoTfjgeJ1dJL7cGgJfn+QtZAx7rmRkYqIiIiIiIiIOCDeMfIHXm4dL7fTuGzX2BUzIwTrCbvS\n",
+       "qakpZYk4wNIyXBwYa4N6RdK5sbAz5+eeOXNGRNLZ671jx7a+bDlzgDQslXFKq7hPVoD5OGsE8IRC\n",
+       "mTEQSVtcnvIw/z+YI68NPBzEmgET5Ymf5qkO8zVW9VwkbUV67Fge08iwwcZTU1M6ptEug8FAA1OP\n",
+       "HTsmIsNgSNvvbN1zwDXKgHF/5513KrPAAnle+1q2qN/vBxa/J3Z448YNN2jdYjAYaB9x+1q2qNvt\n",
+       "BuKlWWPXlldkNNc9tqJUKinbgPnf6XRUSPCuu+4SkTTjg4DXU6dOBeKWXBcwJZ/+9Kflj//4j4Nn\n",
+       "eyyMFQdGGUXS+dm+8Y1vpMo8Nzenz+Oj9fbQQq1W0zGWt3b0+3036NuiXq8HIoXMavPhDjA9Hutq\n",
+       "A+D576WlJS0zsLa2pmsIWLms+YY+gpfh2rVrOp44Q4QnXgygf1dXV+VjH/uYiIj86Z/+afA7rIWL\n",
+       "i4sqscDrKecCBbyDIygX5gIr108anM5ZEXjsW4mILJkc79CHFcZm1s5jenjs5M1Xr81ZiNYeviqV\n",
+       "SloWjEvODIL68jj23td8mMw+lwVIvcNnWXjbNlJWF4MHFiaLN4nhWuHTTnkvgqWlJR2gSELIKS6A\n",
+       "lZUV/czbJHinBfm0gH0RTE9Pa8diweANBJ4xPT2tHeclE8YzlpeXdZHh+uJlyYqwoL/HqexyW3mp\n",
+       "HOzvZmZmUikwRHxdIHtvlNnbrNm0HCL+RiXvVBefuLAbENb48dxjfF/vGQBvrixNLjJyDfDLf5KN\n",
+       "YKvV0kUSaQ+KxaK6j/DvTTfdpBtkzIFOp+POEQBj8oknntDP7rvvPhER+dd//Vc34e0kbldvUS+V\n",
+       "SjrOvZMwvLh66u6cCgnl4IVRxHc9cOoHlEMk7ca1i32329UXLMq6sbGhL+df+qVfEpFhu9mNx/PP\n",
+       "P6/1BPhUKV7gpVJJ2/rxxx9PPZufy/VjoG8wP5IkCebzqVOn5KMf/aiIiPz+7/9+cA/eqGCjlTcW\n",
+       "19bW5Ac/+EGqnB5WVlZcnStgUjcKK2FbraDnn39ePvGJT4jIaA6sra2lwi7yyvfwww+LiMhv/dZv\n",
+       "iYjI1772NR133C52TSiXy+7a9ZWvfCXzeeij119/Pbj22LFjOkd5/be/W11d1fmCdmFtw3Fg9xeP\n",
+       "fXwG8P289xgAY6dcLqd0l0SGcxXzDBvqubk5bcu8PmL9qjw3Y7/fD9aZrJPbdryx7uQ4gyUPnjZg\n",
+       "5m8numNERERERERERESAt42Rwq4Q7AmshX6/n+siyMuRVi6X5fTp0yIi8tRTT4nI0OqwO9Dl5WW1\n",
+       "cmDlMxuUpTaM8loXoMd03HTTTep+ZMscz+NgU8/qgGWO4NRms+k+B9YH2qVYLKqV4DFSY4PmSDfL\n",
+       "slNZytZ51isD7YHgzE6n4+rlgEFk5sK657xE0Qy2ZsCeIViT2VAeG3aceAmemUpm4BlcZqakRdIB\n",
+       "5mjLdrut4437ywamvvrqq3o9GFtWaEf5OEAV4441vDAv6vV64LZOkiQ34XAeut1ukKPqzJkzygKj\n",
+       "vseOHdP5zfMc/ZuXi9CzJO1cRVujDTinHN/7+9//vn4P4HuM00984hPypS99KXX/wWAQBKMvLy9r\n",
+       "3WEJP/fcc27+Rc+15iVgtQcG2NWBz9bW1gJ2jAH9shs3bkyUA+7y5cva73lW+OzsrKs27a1jNq/n\n",
+       "LbfcooHaaO9XX31VTpw4ISIjr4GIaFJtzPnjx4+n2FgLjDt2PSNo/pd/+Zflu9/9roiM+oDXUzCU\n",
+       "6+vr2odgQLiu3AdwBSOcY2FhIUiqvbS0pGX1QhoQvD49PT025CFPYdubG1gXa7WavhNQrq2trZTK\n",
+       "uUh6PfHmDMD9jDHujXURX0fMjhNey1kHEnOTPQqT5Codxzixi8+WZWpqKuXdmRRva4xUqVTSF4U3\n",
+       "MTAoRUYDgF9GNpXM3XffnaLR7e+sCCPfl0Xh+CQXBiAmJy92XhzB6uqqlgkLLm804HIal4ARExY+\n",
+       "96zBgVgv1K3ZbKpEfxbyXE4ck2V/z5+hzjwY+VSMfQbHm+EFz/2LU0UvvPBCbhJX/n+vHpwtHbBj\n",
+       "q16v66bK25jZeoukx0yej90D9z8mMcfA4GXIp5Q89xmnH7Hl8BY+jM9KpRK4ybz7c/3xktjc3HTj\n",
+       "EfHyh5jj+vq6ngTD5vjq1avBhnB7ezs4VVYsFrWs2GT3+/3g5V+pVLStMOY2NjZclzIW4YWFBR1v\n",
+       "3DaYXxy7g7/xss5zmzL6/b6e6sPL/OLFi/py4bHtAW0NtyCnLUEbLC8vy0MPPSQiozQlzz//vHz+\n",
+       "85/PLBc2DA8++GBwwq9er6dS16AeNpbpzJkzOn8QJ/ajH/0oSLfivZQYGLNcN6Q3OX/+vBvWYA20\n",
+       "YrGoZYEBzkYqh2TYteHs2bM697x+xfhsNBpBCAK7lLC+/Pqv/7rOC2y0bty4Ie973/tEROTZZ58V\n",
+       "kaHhcvz48eB5WCs5STfu7W1K5ubm9N77xe7urttfk24YvJPrFt77m9Mz5RnwpVIpeDf3+33tJ37X\n",
+       "oN85ybEVzeUwEs99yEa0dTPu14DUuh7oqoiIiIiIiIiIiLeXkeKAUgasSezQeTeN3eny8nJgxfCJ\n",
+       "JHZRwCq2aUv4Mz4Vx+z+hs6oAAAgAElEQVQD6FamVWE5ehYOLDrPqigUCgF9OzMzo8/jEzNWL0fE\n",
+       "t8LQfvtJ7OglTrbpODiFALNKsOrZlchuWcBaINPT01pP1I1ZEXZRjGNL7GdsfdrElP1+fyIro1Kp\n",
+       "uK5TD54FNwl7wWMdVu7Ozo5+jjF7++23q6XKOmB4bl6wdrVa1fbzUlYwI4XxjudXq1VtK/x7/Phx\n",
+       "tagxxprNpjKzmIPz8/PKymC+bm1tBX2ZpauDz5n9sqzC3t5ewBqXy+VUf3BALK7Fc8B69ft9+Zmf\n",
+       "+RkREfmHf/iHVBugfiLjk2ADhUJBmTk8d2NjQ8s9zq2GteI973mPiPiJdK9cuaJj+xd+4Re0fJ6W\n",
+       "HoDn3n///UFbegmgvVRMMzMz2q9oZ09HyzvdNS5IF/c7efKktjUYvUajoQHyWKNZl8tL6YE6Xrp0\n",
+       "KVjLz507Jz//8z8vIpJKwo13DYcYoJ3BJP7whz8Mxucrr7yiLkrWcoOuFsO2V7lc1ueCNWw2m3Lr\n",
+       "rbeGjfQGDh8+nHvi0juI4Wn9AeNSYvG9cO24EA7L7mSx9PZASKfTCcZKuVwO0r212+2g3/kkHycJ\n",
+       "tynPZmdnU9pyIsM2txqIe3t7+pldH3PrPvYXERERERERERERLt5WRspjGVgh1bNoEHzNbBRb42CE\n",
+       "YG3NzMwoy4IdPccBwFLvdrvuzhNMCVv3sEA8deA89Pt9rRtrrmDny9Y2PoMFVK/Xg914tVoNAvO9\n",
+       "ZKnjwIwUB1Lb/kGZRPzgapYIsNZI1tHV2267TUREHnvssdwy5kkYgLlsNpvahui3a9euBePIY7yK\n",
+       "xWIQGJllUXl12a9yL7Oj6Ff8W61WlZ3CWBuXVBMW5s7Ojl6DsW3jiADbljs7O9oGsOQuXryocwmM\n",
+       "yc7OjrK/aKPr168HQdilUkmf60lkoB8OHz6ssX5o2xdffNG1mj3VcQ+oW7PZVOsfbcosFdgHnkdg\n",
+       "1MYdtQfDVa1WdbyhPZrNpsZX4t4sscLAHEds1kc+8hENhkcbtdttZW0QZzkumTfWgWazqfXEXFhd\n",
+       "XVXmC33i3euxxx7T+yAg2+uDXq+nY4ZVpb1xh7KA0RkMBtp+CL5mVm7SoF+er946YfUERUZjmj0F\n",
+       "mGtgl2ZmZrTMYK6+/vWv6+8RL9hsNidaezudjr6DWOke7xN4PG677Tb5l3/5FxEZMmqcWcBiXFyp\n",
+       "ZXe8tY1Zb/u5SOjJ4O84kwPm2+7ubsAC8Xs2L1a30+nkZrPgoP88xoglcvLWUIyXarWq6+e4hPaM\n",
+       "d4wgJ9Dv94NAPJERzeotbgjWZOoXi1y5XA42G6yHBHhZ6WdnZwN31NzcXFCGSqWiL3OU89q1a+4J\n",
+       "Q5QLE3dmZiag/ovFYuAa9Cbo4cOHA9rYO1Fm4emkWKEzD953WUGG9hnz8/PahjwwbRDkPffco6dg\n",
+       "+OVrKWcObuXNHMZM3gkXPlgAZGWMz5vMvDhliZ/yv3yPvMnfbrcndjN6gGtqHLzDEl49MVahR1Wt\n",
+       "VtUFg7Zvt9tBnfj/8zaa165d0/GOhZc3O1iYq9Wqlhmbjqw0SbyhQVtyADrclN7mAS+0cX3ABho2\n",
+       "BR//+MdFROQ73/mOPg9lzgIHHIsMN1ww+rCe3Hbbbfo7BJuPSxFi1xqRkdG5vr6uaVRwmi0LeB7m\n",
+       "4zhDAvNyenpa+4fHFTZN/GJDuTAOuO3RjjxO8ftbbrlFN5iem5NTRX31q18VkfQJbASFw0g8evSo\n",
+       "loGN8gcffFBERiKt7FLCGsYnRIHZ2VmtJ4tJo41QPl5/8N2NGzdS75BJhCF5LWKCAXMR7ZokSSq9\n",
+       "i4i/8eW0LHwK2Yp+FgqFIM1Tr9fT9kCZyuVyMI6SJNH1Gvfb3t7ONRKy3ju4H+rOp71xjZeODmXe\n",
+       "3d2dWPw09ex9XxERERERERERESEi70BGqlAoKFUKq7FYLCoLxDt+6/bg3bSV22csLy+rtQEmiYMN\n",
+       "8Vm9XlcrB64HtiaAhYUF+Y3f+A0REfnDP/xDERnueu2uudFoBElvWfMEFiRb2Z5LBGA2ytOiyYJl\n",
+       "BwaDgT4bbAZbV7A02CL02Kk82nh1dVWtIW5rVjcWSQfps8WNdsJzp6enAwkD/juPjmX3J1gF7qu8\n",
+       "IE1m2zi40esf7+ivZanq9bo+zwsih1U2zo3DyEvO6t0bbT/pgYV2u619yBYznuslCsV8TJJE//ZY\n",
+       "DHYfeIcSPORZ6uxKQFlvueUWefrpp0XEZ7TA4HjMpUhae0pkOGdwDfp1ampK68kZCby2xnhi2QgA\n",
+       "9zt16pT80z/9k4iMGKxxQbBYxzY3N9WFhHu/8sor6iL80Ic+JCLZ7nWsDfYgQhbQl7u7uy6ja9cO\n",
+       "lEPE12zKc+1YRhvAu8ELQQCbura2pmMQKWAeffTRYEzU63V1NXIQO9of42FxcVF1sNDevJ7hQML0\n",
+       "9LSu3Xivzc/Pa1nRZnzY4ciRI5lp0Rgeg5QV9D/pemLBHgJe49DGzCTbNa1YLOo1aDfODILfs4TB\n",
+       "pGETXqaOrPIDaGsea+NSiXmIjFRERERERERExAHxjmOkisViYBmzrxVoNBq6c+SYEGuN864bx0v5\n",
+       "iCpbf1bc8uLFi65KNIDgy/vuu0+ZKCCLobDBt2xReWKJ+P38/LxaSmxhgIny2Ax8NwlsAslyuRzI\n",
+       "GjBDlBeU6O3kNzc33WBJWI6w9Fi0kOtp63f8+HENePZiffLiW7zcbPwstLMX6+UpoRcKhVx2wDua\n",
+       "jH+bzabGjLACu23DLOsRbcpsYF7sDOYH54LE2P7Upz6lZYGl/PLLLweq3YPBIDjAgWfz7/gQA/fN\n",
+       "JHIUi4uL2of4PTNEWfnDLDwmpN/va+wRmIFJVNMBy6j1+/2AaXrllVdUNgBjvNvt6ho0KfuHteCb\n",
+       "3/ymtgPHGnqSLQDmzNbWVsB2iIwSHf/mb/6miAwDvD2GDvUcx4jYsd3pdHQsIAZmY2NDxxMYiddf\n",
+       "fz0z4DgLqAeX95577tFyslSHBR8SQh8hqJt/j7ioSqUif//3fy8iozgnbm9mUW3ANV+DentJv7vd\n",
+       "rq5jHNML9mw/cZM2RiprbbJyQL1ebyIBTf4e448V3HGgwrvH3t7exEyYlbdhcAC8nf/FYjEIkOd3\n",
+       "D48ZzHtuW9t+kzBTyX5PG70VSJJkood6L2YMylKp5KYXyUshk0dhHzlyRAc4TzSrzDzuGd7vQele\n",
+       "vnxZJxhraHjAyQ1+6VgXx4kTJ3Tiexojx44d0xfFuMBUu5HyNgf1ej31UhNJB0ly0CInW0bZ8cLm\n",
+       "jQ/agwc02gsLiud2u+OOO3Qj5bk/vczygHeazKPEJz0ByfcDspI0T4JKpaLtiw0LB1+yC41TiOA7\n",
+       "tCmrBWN84DM+HZuHm2++WdsSbuP19fXcRR3BvEtLS0Hg7n6AeqLP9/b2UmroIum1odVqBW3OrgT8\n",
+       "e/z4cTlz5oyIiGYB4IMqHKDswaauqFar8pnPfEZERhuzL3/5y/r7n/u5nxOR4Sk1lA+bEh53WDu8\n",
+       "00XFYlHnEta9breb0jDKwvLysrYbXJA8rn/1V39VRIYv+u985zvB9XgGwM9izaK8TAkw6ra3tydy\n",
+       "Ua2urganohuNho53XgtxOhKbtRMnTqgbFDhz5owe6slLQVYqlfTkIDZSX/ziF7WvMba9OhSLRU11\n",
+       "g2fs7OxoG3BwNX6XdzJ0ZmYm6DeR/A1GFrxk9AeFlwUiC3Avox71ej11EERkOCY4TEJkWEdreO8n\n",
+       "ibMH65Ifd+KPgfV8MBi4jR5dexERERERERERB8Q7zrXnJaNNkkR3ttjVZwWg5mlPgb5lsBoz3Gy8\n",
+       "ewcDBoug1WrpMzhBJaxFG0wukqYNoRIN+rPX67lB5rBosCtn9gD16Xa7LhMF62lSzR2REQvDrgIb\n",
+       "KOpJK5TL5dSRWpE0S4C6cVlgQe7s7Ghbcg5FWOawMDlgE5bt2bNn1TqEdc1jB25Xz/qcVIZgXPJL\n",
+       "7x6TyE+MA9PfuDcHm+Nf1vViKQiPSfGCX22C4kKhoH0DVuT8+fMptkZkOD9sH7VaLb0G9+P281hD\n",
+       "ZjBtEL6nTjwYDA4UIGst9wsXLuhnmN/b29s6RuGm40wJDBtUOz09rawdpAIYmB98AAXr1+rqqj6X\n",
+       "rW3MG/yOFaZhWTebzYmkLryMCgzIH2SNXcwvdg96Af6W9WBdItTx5ptvnoiR6vf7KSYf9/eYUNQN\n",
+       "LD6zUQhkf/bZZ936Wddot9vV0A+4Plk9n8sOl+hf/MVfaL0xvzAHqtVqwKIMBgN9nnf4gI/uTypl\n",
+       "kgeut9XKs39bsJQAw7LenlwB6zoCWeMQY5tzvXpz3TJXnDOQy+xpXk2S8JjL4CnEZyEyUhERERER\n",
+       "ERERB8Q7jpHK8rPDKrIxEvZ32NmCyTl06JCyNt5RXNxva2tLrWvsti9dupSKR7D3gxW6tbXlWkq4\n",
+       "D+/CEWzKGentTpnF3jzGhEXL7LM4R944eEHQk/qMmSmx9+A+RPv1ej2NtWDW0IrQifhyFgAzkWg3\n",
+       "r63wLBZf5YBsL2bAMmoH8cfnqQ97GBdvgHHljS/Ocu+1AWdK93I/2qDvfr8fMKFTU1OBGv9gMAik\n",
+       "Cbx4DR7X+L5Sqeh9uC+ZiRIZWrPjjm9Pgl6vF4jz9ft9jU1hZgr1BPPiKZHzAQTua7Sbx3ogtqxQ\n",
+       "KOi4xH23t7cD9omBeZ0kiV6D53oxfNVqVeck2IxqtRrMTRYgRvmYaQR2d3e1T5ghtL8bDAaBhAWP\n",
+       "a1x75cqVieK6zp8/r7FKgMdmLC4u6ueQHhARjUFCXBTHE3KMJtqA2Vl7AKVarepcYvFkj1njjBoi\n",
+       "w3eDtyZYrwYD7bixsTERGzIOvCaMi6vyZA3sOlwoFCZiytrttpsHz1vXUee8Nddjn7IOdeXdJ+9Q\n",
+       "FLOotux5eMdspLzK4bNut6sULCYkBr5Imp61SWu9gHSRUQA4Tz5MMNZispuSjY0NTWsCNeNCoeC6\n",
+       "2OBOwaL9/ve/Xx5++OHgd7wZwf3ygnnx+/X19cA9g+sPirxNmJeUmMvJ3/NCAmBA8iT0NlJ2geLT\n",
+       "WnARcEoX70WLPvz0pz8tf/7nf556fqfTCXSzvA1N1ibLngz13NF8DZCXeNnC2/TjBYTPWq2W/g4v\n",
+       "tkqlom0+Lq0MgJd1u90ONi/ctnmL06SnfJiu58B3uxH0xj+r2e8HVr2YgfXh5ptvVrcmymiDU3Ev\n",
+       "fM7GGhSy4V5iYDwvLy8Hm6WrV69qO3BQN9YvPIu179And955Z5DguN1u6+ljjO2rV6/q6S/eNFn0\n",
+       "ej01DjkAHhsVPLdQKOSevGSDxY7jtbU1XRvyQgF2dnZ048gbL3u/QqEgq6urIjJaj7l+eevZ8vJy\n",
+       "4IoTGbW5NwcYOMnH18F4wLq8tbUVbOA4VVjW+wnAGHszmQ4Y4+bpJPOLTwbzOGKVc5HhWuRls/AO\n",
+       "0vA1IsNxx3pkIul+8PT/eL3N63dvjWZj6yDvz+jai4iIiIiIiIg4IN42RmqSI5QcaG2tSnYbYLde\n",
+       "qVRcK9wyCEeOHFGLD1bewsKCS9Xane2ZM2cCK5Atas5BZHf3HkVYq9UCK9XbvZfLZd09w9Kcnp52\n",
+       "LctxAbnWEuSdPH9n6z7O7ceyEGAJmZHiQEILWHD1ej3IPZilI2I/L5VKyszAOoW2jC2/dS/wWPTY\n",
+       "oDymicF9Z8e3p0uVBe93KCvaqlQqaV9zO8PyRX37/b66rdH27PLifuOcWPid119gFdhqRH3ZqvSU\n",
+       "jTHeJ3UjA4VCQYPDwR5NQrvnaeOgfGtra8pm456eO1QkncdPZKiUjmu5rFYWpNVqKcvCTDKHDaAs\n",
+       "tnzePL/llluCtYivAThnJLMsHiPEechEhmw/2BpcOzU1ldt3PAesC50VsDFO+v1+cOiE3W5YS9gL\n",
+       "gTF09epVl9WxOl0nT57UduXcd3aera6uButPu90O3g0sBYMxyfME5WP3K8ZIlhq7B3aXvhUHWZht\n",
+       "sVpL/X7f1WSy7j7+nTcOPD1Ehsf02Wv4WtS7XC7r78ble7RMU6lUCtYBdveiHvtdk4DISEVERERE\n",
+       "REREHBBve4wUx0mIpEX3OC7KWsWeomm5XE4pgYsMd7awHCDCx1YcYho2NjaC3ejdd98tTz31VOp+\n",
+       "ngVYr9eDGKnFxcUgQHFjY0OZEFgpu7u7uUfD8dyrV69qWbMs5Ulhgyn5WLbHhHAguydW6AmdeYAV\n",
+       "4QV6428OzOTgdDAQiNfwghbn5+e1vaCQ/s///M/6PVvgeaySF1/BZbbMRrlc1rrlBZtnHaTwjtmi\n",
+       "bt6BgHHK4BhbKHOj0dA+53GK8cQHDcAIwCLMsoLzBDZxv83NzYDd88ZGpVIJ2CyOh2JmDWM/Kyej\n",
+       "RaPRCAJjPQHVtbU1vQ9ijGq1mltPMCMIMD979qzm2uSAcDwDbbm1taWxShAH/ru/+ztto7yYNo91\n",
+       "yWL0PfaKYwFxLX4HBpOZf7BBu7u7QZ8Vi0WdjwyP4fKYaawXmDONRkPbEvO7Xq9ru3ntYuNURUbS\n",
+       "MpyD9Fd+5VdEZNh+lqXi36HPX3nlFWVbOY7Sy6wAeQSwVTwObS5XkXwmqlKp6PWQ0mk2m7oGbmxs\n",
+       "TMRmj/P0ePfg4Gr0F8csop+sEroFxjGkZzqdjvYrr2fWM9Hv911BTnsAhT9jNtPuA3hvMI5p8uqC\n",
+       "NRzzfJL4tLdtI4XO5o0CgM70Tgbwi8pqgGxvb+tpDbxMtre3Nejb2wQheJlfGKdOnRKREdXOf3vu\n",
+       "P+5IvJymp6cDHZoPf/jD8sUvflFEfHcaA53IulSTbKCKxWJK/d2DDdTzkrPyhORTOJgk2KhMTU3p\n",
+       "5OSgedY4ArAwMi1rky1PTU1p3dHmFy5cUL0VLLSe9tDGxoaWD+CDA/xSz1ts8DtuA3Z12Wur1arW\n",
+       "Ke+EGW8YvOTK3iKH3588eTJIsM0nPvFymp+f1xcE7u3No1tvvVVfBAxcgz6YmZnRlz4OTTz11FPa\n",
+       "1/ZEl20DW6dSqaSfeTS9F1DKKUc85PXlysrKRBupJEn0hY0x9tBDD6lGEMMaTTs7Oxpsjrq0220N\n",
+       "gmaFbtTp9OnTIjJsS8xT7g+rL9RsNoMDHP/4j//o1tmeqN3b23P11NB36Id6vR5c6+nicdJdIMtt\n",
+       "7a1tVul7bW0tODSzvb3tbpawyUC7zM3N6ZrPqb8AzJWshMwoC05TX7lyRdvXrjkioicJT58+nUpn\n",
+       "JTLcEGIM8Sk0L9uF7cvZ2Vld3/GOKRaLWn7PTeZtmt5MppJxyuHeZsQ7KOGlP+K115vH3meoCxMN\n",
+       "3qEVrx6AZzzz+8zqV/FBgP1o1kXXXkRERERERETEAfG2uva8Y4bT09Pu7tsGxu7s7ASMy+rqqu6G\n",
+       "J01q+uEPf1hERB599FH97KMf/aiIiPzJn/yJfublXANztbW1FSj4eqriTz/9dGDxszsFu/K5ublc\n",
+       "1wnqPTU1FVjbS0tLustmKjkviNxzBdTrdTdJMixCWGOzs7NBAK29xpabd/q4H5ijBx54QK1Hvgcs\n",
+       "OHbxgWkEQ7C3txe4AXgcsEVlxxhbd5yjDshTQ/dclF69WS2etYNs3j+RdNCtSNrKgxtCZMT8gZlq\n",
+       "NpsBW8RHxDEmmP2ARX/hwgUtHzMDf/M3f5OqT71eD47nc5+iTEmS6Odov/0Ec1qLMCv34Z133hl8\n",
+       "xgH5Njemt77U63VtQzDXp0+fVnaXmWjbx1NTU9quzPyAUWFGygbLHzt2zJ0rP/VTPyUiIu9973tF\n",
+       "RORrX/uajhm0edbRecwBsOOcFNhzUfM8B3ti8+sxVlZWgnl27NixlJSMyHCttusTtzMwGAx0fKIt\n",
+       "XnjhhRQTJTLMr4n7YRwxG+wpV3sSFox7771XREYZLjjXYB6z8vjjj+tn6EuPkavX64EW2ezsbCoY\n",
+       "XSTtbUB/eAeRfhxg7wHa0gvcZqbezmeeH5h7rVYrYJ+zgGv48IqVSWD3J2ezwGcYu8ViMQho58M1\n",
+       "vO/IW5fw/ElcqrmMVJIktSRJvpskyZNJkjyXJMn/+sbnC0mSfD1JkheSJHk4SZI5uubfJUnywyRJ\n",
+       "ziZJ8vGxJYiIiIiIiIiIeJcil5EaDAa7SZI8OBgMdpIkKYnIPyZJ8oCI/Bci8vXBYPC/J0nyP4rI\n",
+       "74rI7yZJclpEfkNETovIcRH5RpIkdwwGA3dLV6/XA6uq1WoFfl4WOmOLFFbMuXPnRGS4m/biV7xg\n",
+       "RVgRzER9+tOfFpERE8Xqv2xZIb4BljBnegfm5+d1l37XXXeJyNBPj90wLL7d3d3giHOz2Qys3hMn\n",
+       "TqjFByvGq1ev13MtVQ4etJabx6JwoG2er5jjB5jZ8srgxUtYq3Nubs4NfAdLwCwRYhjYOsRzf/Zn\n",
+       "f1ZExM1mX61Wg5gRL0A2S+QUFhLn57Lf8X04Nx7+xjg+fPhwEJBp/7bgnJA2houfB/T7fWWgMHbe\n",
+       "//73yxNPPCEiI1HaJEnc5yJWDcwfC+jh39nZ2SBout/va7kw3+bm5lLsmciwHcHGgXlcX18P2Lad\n",
+       "nZ0gm/zW1pY8//zzQZlRFrZOgaNHj6Zi5/B7a/2/9NJL2l4cUGzXmH6/r+wF7jE1NaXtyrE+GMdo\n",
+       "+0ajEeQyFBkxJD/4wQ/0MzCwXlwpA+MS6xTX1YsfQZzia6+9pu2KGCgvfvKHP/xhICJcq9VUzBPj\n",
+       "eXZ2NmDmt7e3dUxw3AzKjLWc2xl98MorrwRjO0kSN7cjgvofeeQRERmOO3gQEEt1+fJlueWWW0RE\n",
+       "VLSXgcNCvM4yuwhgLmflhgMrhnbudDp6TxaCxDsBbcHr9E033RR4KbLiobygfythIBLKvHAMkvcc\n",
+       "zqHIrDOA78cdhkF/ssq/d0gM643H2LLoKzDuufuVNthPRoWxrr3BYICdS0VEiiKyJsON1Iff+Pz/\n",
+       "EZFvyXAz9WkR+Y+DwaAjIq8kSXJORH5aRNxIPzsZ33ieLrpoyHK5HLz8Dx06FAws1v9gxWcbHMoB\n",
+       "r8CxY8d0EgPtdjug9mu1mnzwgx8UkXRyTMCqHouMqNrp6WkdRPwyzKMOvaSWPGBAieIeWYurN4g4\n",
+       "6a89OZg16FAOViAGxgU6om1Qfs9V881vflNpbh4f3skJm16hWCxqX3suIGBpaUlfcvbAwkHAiwlv\n",
+       "uPg0lH0G2urKlSu6UeHTbgDmAo8TjMVerxe48crlsr5wOQifKW6R4eKFIHKMGd7U4kV+7tw5bVO0\n",
+       "d7Va1Rekp1wPir3f7+vGAmXmccVzGn+jrVZWVnQDwOPKzvlCoeCuI9ikeYvh9PS0vlRxIMRzxa+v\n",
+       "rwdrB58mxPxpNBrBIZRWq6UbPLRzo9GQJ598UkSGrjqR4clgz42PjS8HdWP8YE0aB5RveXnZDRq3\n",
+       "Cb5fe+214GVUKBQC1+jOzk6w6Tx37lwqibPIsP0wZlg7DPeBi3p3dzdYe0+fPq0bTIwrb05vbm4G\n",
+       "oQmrq6vBRnlra0seeOABERF57rnnRGTo+vzLv/zL4J6YN3B5v/766+4hI6tPuLKyomscNlzXrl3T\n",
+       "zRrKxP1tE2DzZ4xms5kbuA94AdRZQeR57x12a3lJlyd1OaJeKPve3p72I6d9w9ieVGPL0z7jtSUv\n",
+       "BRhrTLHu47jf52FssHmSJIUkSZ4Ukcsi8shgMHhWRFYGgwEcv5dFZOWNv4+JCDvKL8iQmYqIiIiI\n",
+       "iIiI+DeHSRipvojckyTJIRH5+yRJHjTfD5IkyaMi3O9OnDjh7rxFRrth7Do9S6RSqejulbWNsMNk\n",
+       "VXSLQqEQuMVee+01ZbRw7cmTJwNL6aGHHtIcS8yAQaMKx6BhlYmMGJgkSQIJA89SYBVbfgaCR2Ft\n",
+       "93o9173k7dABtqhZn8MG/mfRmjgC7x1NZgvHWmteeXZ2dgK3EbcHrl1YWHCPb9u+qVarOlb4aLI9\n",
+       "asxB3Wx1YLx5cgGsE2XZK7aouA1gxfLBAnzP1DTqjvyPhw8fVpbIS9TK/cv5/vBZ1rxiXLx4UdkO\n",
+       "3G92dlbnBRiRBx98UO8HF+ru7m4gTdDtdoMg/aWlJZ0HuN/m5qaygB4biP5/7bXX1LWC+b25uan1\n",
+       "tUxcFjyGlvNlos1tQLDIkC20bFe32w3kRdg16VnF7OIHI4UxtrW1pesYjuC/9NJL8rd/+7epZ+A5\n",
+       "IvlaZRwCgHvceeedLiMFgJVbXV3VscgB6FYlutfruWwGWApmcjB20E9zc3M6pjHWjh49qp/BJffq\n",
+       "q69qGbz8hVg3dnd3dc4fO3ZM72vXs263K5///OdTn6GcjI9//OPyqU99SkREfud3fkdE0sHYfNjF\n",
+       "5mS8fPmyPPTQQ/o9/rUyOCIi9913n4ik55RdVzhp9vr6ustE5o3/cXPDBnP3+33trzx3WrVadTN5\n",
+       "eEr+HnPluRTBXLGepPf+8tyWNsefx6J512a94zw1/nGYWP5gMBhsiMj/JyIfEJHLSZIceaNwR0UE\n",
+       "s/SiiLCQz4k3PguwsbGR636JiIiIiIiIiHi78bnPfS73+yRvt5UkyWER6Q4Gg/UkSaZE5O9F5PdE\n",
+       "5BMicn0wGPxvSZL8rojMDQYDBJv/BxnGRR0XkW+IyO0D85AkSQZZRztrtVoQz8Hw4jS83Gg2lxqu\n",
+       "EUlbwAhYX1tb06Ow3/rWt4LnInDzxo0bgR/31KlTwWfMJCBmZVJF8qmpqaANWD07DzaQ1ubTy2p3\n",
+       "K8Fw6NAhlw0BIFvAli6zSx4jBQuI/eUIUIUVwAwC+nVpaUktMy8AlH/vWaLWwshiuCz4kAO3n2V8\n",
+       "mMnBGNvd3c31rXOwtmd5of1wv0KhoGOZA1TRl7hfqVRKBYWiDTzkMZceYI3//+19a4xkx3Xeqe6e\n",
+       "fs70PHZmX7O7XJJLLbWk6CUpiYQlRqEt0RIMWwoMWBEQQBCMOICTKH8CJArsKL+cwIAC/csfOYDk\n",
+       "JJSlBJYtBNaDBiWFMrQUJVJ8L2UuH7s7uzszO+/pme7p7psfvd/p71advtM7ojmmXR9AzLL79r1V\n",
+       "p07VrfPVeWxubmY6Ylr3Rdbpubm5wMLjGonom++bNAyszMdsKQPValXbCD322U2Rnr5Crrxm+FUH\n",
+       "dnZ2UoEieC4Af3LSvGAAACAASURBVKx7771XHn300dQz7rzzTl0fINNz584FzyoUCtpWMCmDkkz6\n",
+       "YP3MQrlc1nGA/JaWlrQv1jrG6wsHHoj0GGd/zTp58qQydNYYg31aXl4OMmAXCgVld62UB7v5O2Ic\n",
+       "8Pcb3/hGcM2HPvQhrYbAbcd7B395vb/rrrtEpH8aIRIyTj5wH2ZWfXmMjo6m/KnQd3zGMud3oMUW\n",
+       "+gzSzThS++DnAoPYLzCMYIDZHw5Eym7pBbKc5znJMVdj8DOlD2LYIH+LzfKTJd84tTAX9d2O9o6I\n",
+       "yJedcznpsVd/kiTJXznnnhaRrznnfkdEXheR377R0Redc18TkRdFpC0iv+dvogDkifA7WCgUMh3Z\n",
+       "0GF2DmXq2crmDGHxBMcEhFNgtVo1N1CYEHjxWs5w8/PzuunAxoBhHRWhzZwNF+3joz381tpE1Wo1\n",
+       "VSjcgzdRFhVcKpVUvuxEinGwojH836PPPjAhuJguw1JmLB5wyFxYWEg5OvN9RdIbKX8D3e129Xte\n",
+       "lPxJvrS0FCy6rDtAsVhUWfmT1eq3iD1OVsQMt9l35uUSB5y53HfW73Q6gQxarZa58OAzjorBb3/l\n",
+       "V35Fn4+SSNC/S5cu6b15Y+2XT7AiDvm41M+EzfdoNBqBw7WVAV0kjCAcBC6w6m+kG42GyoM3GBgH\n",
+       "Puq0jAm0B2vQwYMH9bc4yuT+YgO0tbUV6PbLL78sn/nMZ0TEPobEs06cOKEbmZ///OeZffextrYW\n",
+       "ROhdvXo1eIHxMR5nO+doTZHBmeEhZ8iMj8SwLs7Pz6cilkXSGx/oQbfbTZVHwWfYgFj6hPuMj4/r\n",
+       "b7Bpunz5sh6x8RzGnMLfy5cvm3M4a5PG0ZlwddjNadrXbUufNzY2UoSAFZU2bJ6m3Y6zBsE5F5SG\n",
+       "abVaqbIyIj35YQ5AVhxBbuk2roNOikiqQDZXscCzfOOQN9IwAtrtdrBOc1AKl6vKCjLyC3hnYbf0\n",
+       "B8+JyH3G50si8uEBv/lDEfnDXZ8cEREREREREfEOx75mNrcyV7NlytY2O5yKpOk77D6LxWIqmzPu\n",
+       "i10n74r90Mbt7W3T6gANDWfZ6elppbbZORSA1bi0tBSEg1vh73z0hN04HykxYI2DpWo0GsExCecl\n",
+       "4pBdqw2Q+fLyst4H1w3jsCzSsyr8+nHVanWoowSRPkvADCNkyPnB/OtFQouh3W6bDoJZjsBZ4GKa\n",
+       "0FUrj5B1fCgS6vTIyEiQrmJtbS11dOG3E7DCn7vdbnDMxOHgbEmCRreONOGUbGFqakpZFKS+uHbt\n",
+       "mhkmj/kD+bCugSnM5/OqlxyEgSMA6DZblOzgC8sdljoX2mXw2mIFNFisnX+cXigUNLM4nMRFJLBs\n",
+       "FxcXdS3A/GHGBGH87XY7YCo4y3VWNvHt7W2dF3ykZ+kdwGwG+glZOucCVoedyDFenK0ccrR0KEkS\n",
+       "fQaO53hOMysLNhtrej6fN6sJYLz4SJHzW4n0xshfq3Z2dtTxHAzRgQMHVLdwCnHPPffIs88+KyL9\n",
+       "9W5paUnuu+8+fZ5Ib/wgS8s9g+vlAZDbbqknLMYT+nXw4MGU/K36pZYeW9dYcwQytLLd8zqa5XiO\n",
+       "65rNpt4H485sMFgndtLHWFu5uQb11WJCrdMC6BOe32w2bzpL/M3knYq19iIiIiIiIiIi9oh9Y6R8\n",
+       "Rzpr14tdNCfkZH8S3y+h0WikUiGI9CxW7HxxHYeNw9LodDpBFOEv//IvaxZcKykcmKiRkRG1YthR\n",
+       "FL4RVjI37Oh3dnZ0Vw+rfHNz07QwsaOGXNhxHLIY5NAOa4mtI8sPirP0Wgydz2w1m80gozGHZ1ss\n",
+       "C1ffhjUBGY2OjgZV3Nl5nS16q05SluXF13GtJpG0jxS3z9dL9pviKvZ+Fm6RMHkkn/sDtVotlawQ\n",
+       "7c2qhs5hyH7CS5G+lchMHsYfn7F1hvFbWVkJ5Le0tCRPPvlkIAMre7rPDLAvjeWLxPUEwRZDjrVa\n",
+       "TX8LmY2OjgZZ0X34PkgifXmApbh48WKm3wMnPsWc9NcVtFGkp59oD9IpjI2NBQzIlStXggzOjUZD\n",
+       "1xEkxrScw51zmg2dgftY896SUVYWbq7kYNVBhb6PjY2ZCUzRN4uRwvhWKhUzrQ3G3ZIz+sZzFNfl\n",
+       "crlA7zhhKGqpWhUOeC4ys+PLeXR0VE6cOCEiNnvCrDqSjGKdmp+fV7mhj+yjC9RqtcAxutlsqo5t\n",
+       "bm4G/jyDfIx9Vt5y0mY/TGbErd/47BAcr/m33W7XjMLHM8CsFYtFHTt8V61WdX1Cup9ms5kKGOG/\n",
+       "DE7dg3cNr9vDpgXiIBXfp3WY05l920ihsX72Z5FwMu3s7KSiOfx7ZG3I2PkWi87ly5dTJVP4WSJ9\n",
+       "Kvmpp57SdmEicTkH3O/QoUOpDRTAznk+OM+Vv2FhhUQ7x8fHA0p9e3s7taCI9AafnX0BnhiQOR+t\n",
+       "WHm30D/evPgL7Pj4eKBofA2X6sDE4cy7/oax1WrpePLRGG9asmA5GQ5D0VqFLK3jP74/t8kvoC3S\n",
+       "33TyYuNT8IPKGnABaDwL94G8rbYMip7JOmqFXk1OTuoRFeexwiIHfVpaWgqihfhYCG0YJHe/MDKD\n",
+       "N+g+sgp5A77cRPpHRHgubxgA3vgA+XxegzesgALOMYVNA0db4nl4Ph9/wMiamZlRncDmz5Jbo9Ew\n",
+       "Ny+cEXsYwJBiOXGhYoy7dXyHY7V6vW7qO8DrhV8ZYmtrS59n6S/rhB8lyCWRWJ/9zQsDmyJLn958\n",
+       "803deKN9lowfeuihVKmeQWg0GvqewgZ8YWFB1zH00dpwbG9vB1ngFxYWdO7VarVgvgwytvwcb9bc\n",
+       "tNYJS+9yuZwpO3+zwZF8WCcKhUKQObzVagVr+Pr6ujn3shzpef3hQBsAbeAM7X5xeG6fVX3iLc1s\n",
+       "HhERERERERERYWPfGClYabDM2KnO37EyG2OFkDM4zFakt+vEjpKdK/3wcpH+7hW7Yw7BtQqLwkKw\n",
+       "MtdOT0+rZYFn8ZEI/nJWbO6bb41xO2G58O6ZrRX0Y1D9JlgssPAHOSpa1q5vFVUqlSB0l9uFfnJG\n",
+       "Y4bvLMljD/k550x61WcVrb7kcrngMz4Stahsv23+My2HdusoxGfHisVi6lhWpGedWmHo6NNuWXV9\n",
+       "ZqBarQaZqDc3NwMLMpfLBSzf8vKy/OQnPwn6y+Mgsre6hJiXa2trQ/8erAkfzVppEnj84Txsgetl\n",
+       "+qjX64FV3Gq11EkWz+W2Q/aHDx9OHeWgregzB81g7uFZMzMzqaAPkZ6e+KyFxcZxmpRhc4JZKSAs\n",
+       "BhvrnnVkY7EZR48e1bWB1wOr7pvl9OuH04uEx5XlctlkVjGeYL9WV1cz0wqwrHA/iyXCUfDPfvaz\n",
+       "VIWJQahWq/qe4uMmvw3sIM2Mrn8dZxC3+s0nCRwosVtGc8A/cuQ8bFgTBumTVX9vWOdsPzM/r3+c\n",
+       "OiFrnWBd9ddPfufz+8Rnx3ZjzIdJe6C/GfrKiIiIiIiIiIiIFPY1/cHU1FSm8x4sjdXV1ZQPkEiP\n",
+       "CfEZg1tuuUVDtAHLp4adOdmZD+G9sP7a7bbpsGllh/X9nJIkCVgUqy3tdlvP+DkZnW+NVatVvR+u\n",
+       "4wRwXGUbVi9bOPxsy6HUZz6cc+YZfJaPEjud+35ufs0ykbRVY2Wi383CgWWTlVTTQrVaDaxiZgbR\n",
+       "VsuHp1qtahvZJ8CyXnz/kFarFbCB+D0jSRK1mtlvAuMBOa+vrwc6xvfi8RvWSvVhWbgjIyM6vlxj\n",
+       "EM+zKsezzw365NdhE0n7R1ry91NKMPMjMlxdLMu/p9PpBHX3ms2mjjESOzJjBx06ePCgMhqcLBXy\n",
+       "gE9is9nUNQbXXbx4UfXEZ6sZ4+PjQRg96xzSGuxWPYF90NAuyJzvj/nYbrcDtoYTAQNzc3Op+qIi\n",
+       "PT1AP5mRQn/ZJ9R/BqcNwG+np6cDXdzc3NR3iBUybwHj3G639RmsN9DfLBbKcvTO5/MqN/6tny2e\n",
+       "ZYr3T71e1xMTngNZrEySJIGuWHUY2UeKx4HZySz4Y81BQr5flEg6SSjGBH3nNRryYzn6AVWDwKc8\n",
+       "kDnuMyjNgX9PdixnOd8MEwXs60aKX7gYiEajYeaUgtMdjofa7XagyHw8yA6NcACE0q2tranC8aYN\n",
+       "ixAXxPQ3SOzcxm3zF4xut5s5CdBvLvZq0fdYdKw8S1wwGIvDG2+8YSoSTzjrGM2XZalUCl7SfqQl\n",
+       "rgOy+rubkzhnevczJFvgMjrcFiwKTM/6L9esCFERO0IT+mKNUZIkpsz9o6Tt7W2VA+uV9fJHG3GP\n",
+       "SqWiG0t+4fkRkzeTuRhy5ogVtIVLLPgLLWfC9x3q/X/je7zor169muk0Dp2bnJzUucnHGpyTC8/i\n",
+       "TZqV+XzYYqXQXx4vzDvMOQbGfGVlRZ2L0WYuo8PRbljbsF40Gg39DcbaerENmj9oA9YTC7tFgeJF\n",
+       "xGsMnOyTJAnKQS0uLpqFhPEMqzwUwMYb+rmbAYQ18MKFC5n3HvbIGMewnU4nCKSo1+tamD0L1vqz\n",
+       "vr5ujgN00gomglHGBiR0id1JpqamhnJ65jZxFJuf94kd94F8Pq/rEjtwWwatX3ImSRJzDLOCRyxk\n",
+       "HatZAUHsvI4+Tk9Pa5vZfcU/yt7tneSvj5nt3vWKiIiIiIiIiIgIE/vKSOXzebXQEHZ76NAh00HQ\n",
+       "3xXv7OwENfSY1sZx2fz8fBDSWi6XdUdr5dNhC9ffSfNOGVbx1NSUmffFLx6bz+eDdAvlcjlVEBdt\n",
+       "gmUAWbCjOu/MYRnykSYsDes4jfvEx2m+pddut4cq7DsoxwY+5wzUgGVZwHIolUqmVec7VSZJEowN\n",
+       "U9B+wVOGZR1xNnm2rPy0ApaDeS6Xy2SCLLYKbTh27Fhm1nGAw8aZwcRv2HrC95AjF7yGnIvFYpCb\n",
+       "rdPppJzgRXZnuLhvFluA78ECfPCDH9Q2wDHcsj6t3GsMZj8s3bKyekMe1Wo10IHl5WX9nvsE2Vhj\n",
+       "yEe3WIPgnH7p0qWUIzGuR1+5SPhuR8kig+eZzwxa+K3f+i356le/GnyOPmF95DlguTTwHPTHzAom\n",
+       "sbJ6VyqVwHUjSZJUjiWRdE0+Xtcx1jgSK5fLZsoCf70Q6csc86NUKulneF+g1iSDTyF2c0S2WJn3\n",
+       "v//9IiLy/e9/P3VP/ttut9W5HUzU5OSkzoOlpSUz8z36wvpk5VDywTLnzODWfMe9rVp7e3UZ4Lbz\n",
+       "6UcW+9XpdPQ3vuM492O3422Ag3+4xqwViLYbIiMVEREREREREbFH7BsjlcvlZHp6OrBi4OjJuP/+\n",
+       "+9XJk5OvYXfKO1BYcJYTe1Y9vyRJhmJgRPpn3vBLeuWVV/Q7WLXMqsGSsEJY2W/GqkoOy7nVaulz\n",
+       "2crDPZmNYUc8C9i5z87OikgvfYPFSAFs7fifcVvAjm1ubgbh2Nw3zljvJ13b2toamHaA28CV6mFN\n",
+       "MDPB12elWADY14urjaNd+KxararVDIuFgxestmP8p6am1MKEPNivjwH5giVdWFgIWJHJyUnVZYw5\n",
+       "+4ll1ZZqt9tBMrp8Ph+wweVyWb8ftsI82Jlut6u/QZueeOIJvZ6d0v1+O+cC1ujUqVPKXKOd/lhC\n",
+       "vnfffbeI9OY61gL2c+P6fSK9ecb1wAA/LJ/ZCQ4fx/hj3lYqFWVccA/2VYI8ODOzBTyrUCiYaTL8\n",
+       "6g4Wrl27Jh//+MdFROTP//zP9XO/vman0wmscA5oYfhjMyhBJ+6NdZvX5TNnzoiIyPnz5/V+YJqc\n",
+       "c4H+VioVlS/W6s3NTZ1zzAD6zEq5XA7W4W63q2PN7x32c+W/3J9Bfn6QHypd/PSnP5VvfvObItJ/\n",
+       "/0xPT+t1YCELhUKwXi8vL+safeXKlYAl5NpzFpOD/nKgEvfdH+t8Pm8mPt7NlwhghhHP8NPL8GdW\n",
+       "2zn9gcV6ZbFs0A1OG8HvVp9VbLfbqjuDEiOLDOcjtW8bqW63a26aJiYmtFNI7//YY4/p95y9lhco\n",
+       "kd5gWY6afjSOSHhkc+zYsYEvNR9YNLCB4qMoP4OsiJ1jCKhUKsFm8s477wxyUxUKhWAiHT9+XCM9\n",
+       "rMlubXxarVaqwK1IT37WcRdvjPBbwCoKy/3cbcER6cnIP07lzQHDipDxj075mmGUX6S/UWG9wmTe\n",
+       "2NgIHDKPHDkSvFh4E2s913IoBYrFor7U8Xd5eVk3DGjXxMSEvli4LAxHnYqk8wNx2/EZxso5F2ya\n",
+       "ssoScT/5+JC/t6JwAKvc0Hve8x5tu1+gulqtqtMt2sllX/hYzZI5XthnzpzRf2Oz0el0NIM7lwPB\n",
+       "dfwihV5CzsePH9djdBhwp0+fVuds9G99fV3XHSBJEp1z/BLL2vByDi3ML2uucqFl/2Xz+OOPy+c+\n",
+       "9zkREXn66adFJO3IjLbMzs4GxuTS0lKw6cRzGCsrK0H08fz8vF5nGbZAvV7XPvELDTLnjP7QCW4n\n",
+       "NhuIlOM8cdjUr62tBQXIOfoM4NIpjGEjAn0j5uzZs1rwmqOtYYCiv2NjY4GeNhqNVCF7/2VfLBZT\n",
+       "2fpF0sEhWetOpVLRdwZkPygHlZ/hO5fLBYW7t7e3b7ooMMAG625O6X6hen7vcdReVl5A3zhmsKE0\n",
+       "TEFo/d2uV0RERERERERERJhww+y23vKHOpeIpMPpwQysrq4qJckW0Ic//GER6bNT4+PjugO2drG4\n",
+       "R7PZDKhJdoyENXP58mUtGorwV6s4K34v0rc6RkZGBu76RfpWkcXAHTp0KAhd5yMAn3EQ6TM/J06c\n",
+       "CCy9SqWSyiPlF7oVkSCcmS04foZPcXc6nSDfk3MulXMGf7OOz3Bdp9PR/Dxg4Lh2H2RgWYkWI8W6\n",
+       "nBUmzXrH1h/3Hf32mcu77rpLXnjhhYF94xwp1pGj76xq1X0T6R+FYNxZv/xxERF597vfLSI9OfrO\n",
+       "1UxrM6z8VT5yuVxQh9G6rlwu67hauVkAyyl5YmJC28d5mADO1O/PMz5q5RBn4MiRI8oW4eiuXq/L\n",
+       "+fPnRcTOX2YBDFi9XldGCkc1uVxOv4deXb9+XdcTzhWEOZfF0IyMjATH1t1u1zx+uP3220WkL+tB\n",
+       "rDru87u/+7siIvKlL30pONqZnZ1VBhRrxJUrV8w5AvD6ghQRfjoHxuHDh+Vd73qXiIj84Ac/EJEe\n",
+       "y+enHMjn86l8TyJ22o9CoaCfszsE2myx25DF6Oio6qLFxHAKENybZeAfZY2MjATP+/SnPy1f+cpX\n",
+       "UteJiMoAaz+7p0BvRkZGtKA19xnO9cOy7uVyWfUT/XjjjTeCIu2sd1ynz2c4rTWLs7VDLq1WKzON\n",
+       "QVZVCX7/WAw37lGpVFKMGu7nF0YfdHTnz7NB6WiwtiRJYgo9MlIREREREREREXvEvqY/4N0qdubl\n",
+       "cjmwWD/60Y/Kt771LRHp+3gMSnjpJ+RjBz5YpFeuXNHruP6eb3G1Wi2TQfDrlq2vr2f6pfghwD78\n",
+       "Hf/U1JRaDmy5cmZcfOeHg25tbWWGbVYqlcwMycC73vUuZYnYSd93iBwbGwuc6EdHR5W9QEjv3Nyc\n",
+       "WiqcZsD301ldXQ3SWvD9fYZIJG0RMos1CJz9F2NeKpVSMuS+oF0ig5OEwqJhywdtBbt0/fr1wLpi\n",
+       "HQY7OjMzo4wJxpqtdst6fvHFF/XfYEA46zjAejoMG83zjC1J3zeDZca12zCGYHws+a2srKi++ykD\n",
+       "RPoW6ejoaDDP/Dnr+2KBjRKx5yHax8w11onV1VW9P9gV6AO3td1u6+fox/Xr15U5wHhUq9WBTtmM\n",
+       "yclJ7R/kOoh98NngQYA8Hn/8cRERefDBB/XfwMbGRsDyWnX/BoF9xkRsNnhtbU2ZKMBKIpnP55VV\n",
+       "Yt84yNwKDOL6mX5gUa1W07kJ2TabTXMu+ckmB8kWz8vyXfvud7+rbDHPUcxlnpsYS2bneL0bJhiq\n",
+       "UCjoOstJK3ke+MB1PA7sJ4R3JeRmBU2xb95uvlJZQSvsj8VBSSI9XYSMOYG3VUsV7xCrrRgv9m3l\n",
+       "Nt+MbxSwrxspnqDHjh0TkTQ1jRcQNlEifdrz+eefT22MRGyHvKmpKaWr+Tv/BT4xMRHkI7GymE9P\n",
+       "T+vz+DurxAoX7BXpLcb+xmxxcTFYDK0jQI5wZCXHv/FbzgWDl4RIWE6FwXmGrMWD/+1PEquQKCso\n",
+       "57fBosXPgiw57wtT6iJp2VrKbWWgxT2YcgZWV1dT0Z8iPbmA/uZNgf9b3njz860gA9yH78dRXSLp\n",
+       "jQXubT3j4sWLehwA+fEmG99Z0T2DMAzFzrA2ZJyJHL9Hn5Ik0XmGZx09elSPjdCPF198MdgwlEql\n",
+       "IMpqeXk588hWxNZv/3h8dXXVNKTw8sUL69ChQ0GxcitnUaPRUD0+deqUfo55g+cXCoXgqH2QPvsF\n",
+       "zy3kcjm9d1YVAMbzzz8vImLmJBoZGdE1A47bpVIpZQwNAgfrQBa83vJ8Q98RTPS9731PbrnlFhHp\n",
+       "58Nrt9u6geIqEH7ZHc6oDZ0YGxtLBY+I2EERlgO5cy4w1nK5XOZmMmtjMDc3p3r10Y9+VER6myvL\n",
+       "2EW7EORz/fp1bQMfYQO1Wi0wGBuNRmaAD2SZz+e1DVbJM4DnMMOPSO12u4ELwF7gZ2Dnf1vtsyIN\n",
+       "2fEd39VqtSCjuXW/SqUSrIfDFGOOR3sREREREREREXvEvjJSIn3WhKlHWJ2+VSHSt6j834j02CdY\n",
+       "mLDU+B5WQUeE0+5Wfw3Wy+Li4lBFDZMkCZzS/XuK9Kxga8fvF7zd3t7WnbmVDwuOmYVCQWXATBHu\n",
+       "x8cbYG3YcrRYB8uaG9QfkfQOHhY8Z+kFxsbGtO98JIfncv/8TMVWfa5B2dN9dLvd4KiEnRsZsAJZ\n",
+       "5v7YsBO+laMM1tv09LQef4D1yOVyeu9bb71VRNLpAJiB4XxlIj3rCb/l7+CUytmCOeSb5cDI5/NB\n",
+       "PSp8jn4CHG4v0mN5/PsxG4j7zs3NBczG9PS0Xgd9aTabJruEduG51Wo10+k2l8upFc45nMAc8dzz\n",
+       "mQ3rKJBlwPMLLCBnrAdDw06znN5BZLATrN8Wi5mq1+uqixaLnQVrfuzs7GhKih/+8IfanzvuuENE\n",
+       "shmpJElSgTsi6TnKlQsgI+QGnJqaChz42+22rjsY662trVTuLpHeHDx58qSI9BlaZmStfkLXeK5i\n",
+       "nnU6nYAlLBQK5vwBOEUFvucgGugYn6xYQH9xOsPs52233aYpUQBOdWJV3ECf+AiTmTCficrlcgHT\n",
+       "xHOQs+xbwUnoOwdIWEw94OcaFOnL16p+4ZwzA198BozTs6Bv1hEfp55BP6138TCIjFRERERERERE\n",
+       "xB6xb4wU6jDBwsDuOZ/Pq6XAPhw4N+YwUd8Zme/Hlh52sbzD91MdWNXM2aLKYqGsbMe8A89KMsY7\n",
+       "YIQPVyqVwMI8ceKEOiuyw6pfC2zQjho7c2ZFgLGxscCvZnl5WZkNDhH2rf+skGhuD/trAdvb24HP\n",
+       "C59lMyz2xA/f5VpMkDnXNwSmpqYCNnNmZiYzczD0Y2NjQ//NDBsYQb6vn9Xb97fBNdBfrvNlsY7Q\n",
+       "WXbchoUJvyPnnMlO+EEJhUIh8HMSCf0BrNQT7NRvZSWG7FkPsnx9WB/ZSsVceu973ysiPYv4xz/+\n",
+       "sYj0fT1arVamFTk7O6u6xekoICN+nu+34pzT5+A7nsuw3tmHBkzjyZMndezwt9vtZjIbALO+lvM9\n",
+       "UK/XU87vNwOet8ywok/MAEO+Vi1S4PLly8rCANxmyODo0aPKbPF8y0pDwTUhITfMvWKxmClLzNX1\n",
+       "9fVAhhw0wffwWXfLz5LB10OWrJNZbCF0cmZmRucBO5uDOX3++eeDQB8rYITZs93qZPp15trtdrCe\n",
+       "M0uN79gvidks32ndYopZlrx2+OzUoBQEPvs0MjISsI5bW1tBG6rVasD+7uzs6HvTYqyAQRVCGPu2\n",
+       "kfLzxVgRRvxvfxJUq1XtPNOy/nUHDhwI8hKNj4+rssLp1HIi7XQ6OkhYsJaXl1NUrsjgFwYWpawC\n",
+       "rKyA2LDkcrkggogjPjjT8L333isiIufOndPvT5w4ISKSooL5pQSKGc+1NhHXr1/XY08eB39TWC6X\n",
+       "9d64R7lc1hc76HZr89dsNnWDxUcZ1hGsv2niYzjLMR5/i8ViMEmnp6eDl4FF6fMmgieh/8I9fPiw\n",
+       "eewBB1vc99q1a9o39Js3k7jv9va2LlrYXC8tLanOoj+nT5+Wn//85yKSPoZC1BSubzabgWNtVkkE\n",
+       "hmVADMpcnmUw3H///SLSe6EiIhE4efKkjg2Ow8rlssqNdRvAizmfz+sL3sLa2poeSXAEYZZDvn90\n",
+       "K9LfRLRaLX02FmGuRAA9OHPmTJALbGNjw9RfHyzHLAOuUqmo3LKOshlok0jf7YE369ioYoxPnjyp\n",
+       "G4HdnG45L51ITz6+LDkrNtY451wwHrOzs3pEiOeOj48H85sL6DL4nZAF/yh72Fx0FiYnJ3UcYFBZ\n",
+       "hZtFRDPrQ1YXLlxQp37OHQUXlWHbYblm8IbG0qesecvvQJaHL1dLN3jN57Jbvszb7Xbw+3K5rM+1\n",
+       "jPWsXHWlUknXO+wRNjc3VZZ4r73xxhuZGyhgGLnHo72IiIiIiIiIiD1iXxmp8fFxk5b3LYGJiYkU\n",
+       "lSuS3mVbdLmfBVakT2EePnxYn2s9n8NPsfPFddVqNZWvRCTtaIffrqysKCOUZRV1u10tcIn+vvTS\n",
+       "S/o9HxV97GMfExGRv/zLv9TPfAfkWq2mz2UWApYIMiEzWJbo78TERODcyLlCcO+dnZ2gf/V6XVkz\n",
+       "bpefwV2kbzGALdjc3Mysa5XlPGqh1WoFR8AWVbu8vBwUfi0Wi8oSscXspycoFApmRmk/WIJz2bBe\n",
+       "4jP0m1lKPvZivRTpFXsFw4BxWVxcVMaHHUetOl34HnJhtsA6IgeszPvVajVwLN7Z2dGxgWOxiARp\n",
+       "SziNA8ag1WoNFXbs5yLzHZ6TJEk5OoukjxestCu4jvUdunv48GFlCdHul19+WfuEZ/G4gYXc2trS\n",
+       "/mXl2hn2yGtmZkbHcNgs19BJ1nWMJRffBhYXF81KCFb6BMgI8jt06FAQin/58uVgvs7Ozgbz58qV\n",
+       "K6rvGAMegPCbHgAAIABJREFU50ceeURERL7zne/obzEGCwsLwbgNgl+v9ciRI3oEz0yEz2rzmo+/\n",
+       "o6OjwXtnfn5e74219+WXX1b5c/CUz65MT0+njvt9FItFnX9oa6vV0rmLtg5iNdEGdofAv9FmPhLN\n",
+       "Oiq0UgXx9XwU6F9nyXK3XFRZ71QrUCWXy+lahr/OOZUBH4da7jm7ITJSERERERERERF7xL6mP2B2\n",
+       "BztIK/EY/z92kCsrK6kaXCK93bPlEOnXJkOoLX/X6XTMhGJc/06kt8v3d9Tlcll38rBEqtWqMiFg\n",
+       "06zz3DvvvFMtILZi/fpWk5OTWmcQuOWWW4KwYeec7rhhObM8OEsw+s7sCHbhFhvAliSzRpaDInwj\n",
+       "/GcxuMYf10vKOpNm6z7rfgyfxWIrmcfft0BqtVrgX8f1F/36eoPAzukYV+hpo9HQvu/mt4RxggPq\n",
+       "pUuXAtaL2aKsbL2tVitIwsqOqtZv/cz63OZhfa5E+kwU7lcqlZRxsfwh2OfGh3POrMsHrK2tBb5K\n",
+       "o6Oj+jwr0zjmYbvd1vtxOhX2MwKgU7wOoH+Q+eTkZMAWLC0tZVrAYH5GRkYCdmVjY0MZpN0ypqPN\n",
+       "YJUsOVv32NjYUD2Hzi4sLKTWFpF0EAtkxU7W+O3a2pr2CeuixQY2m83Ab5Wz+3/ve98TkbS+Azxu\n",
+       "w6SqEZHAj45hnZxwAkoOvPAZpEqlot/Df845l2Ki8AywqFg7eR20fBDb7Xam/y0DY4K57pzTtQ26\n",
+       "4JwzmRx/7eYky1gbms1msM5ystSsDPL5fN50IvfHlZ/LTu4ICIHv09jYmPYNurW5uanP48zvWFOg\n",
+       "J1ZwzTDY142UNXA8wUHVbm5uyj333CMi/YVgZWVFhYkFYJBjH7LmwonPohx5I2XR6Fz2AuAs5VjQ\n",
+       "8P2JEyfUATULBw8e1M0QHN8tCrVWq+mkwXVvvPGGWZwTi5FfCFREUm1CW3lTakV8YHFxzgV5n6DE\n",
+       "jNXV1WBcBx3hQpF5IvkvTI7Q4ygqv83WBmxkZCTYdPFREo9/FnWNiVYoFHTc0c6trS2zuDTkyy8E\n",
+       "6Lf1IuPNDv6N9o2OjupYY/NULpd1k8aOuf5xlRV1xKUkeAwgU14UuaSGSDoDNmSbJEnKyMFneIHj\n",
+       "78LCguoxZGDl/yqXy/o92lev17UNVmSgiB0ZiWs5rxaATWmr1dIXHX8PnYAD/7lz58z8dtAPdqqG\n",
+       "PPD8mZkZXavQp2KxqPMZGyVeyHHfiYmJIAKWC/ZmlYUS6esMR0n786VarZpHYdBfPNfKfTc6Ohrk\n",
+       "SOPNFRui1ssfwQhYs6x1fHJyUnUCbRobG1M3Ap6/WUedWUXOl5aWguoT3FcOFsH3WHsPHDgQRAPz\n",
+       "sVWWY/Pq6moQSc7GuPVbK3cU/5tLRQ3jVG25SFgbUV7j+MjzZkurZG2yeCyx5nO5Ks4TlZUrioF2\n",
+       "WWs0wP31S8FlIR7tRURERERERETsEfvGSOVyOZmbm9OdJf/FDh9WydmzZ+Wpp54K7uEXKGZLkr+D\n",
+       "tcbsg/9bkb6FbB0lYHfPzA92yp1ORy0a3Pfll18OcgFZzEC9XtcdObfFzw/Du3arzQxY/3yECXCf\n",
+       "uDYVZ24XSR/FQX4zMzOBJcryQL23CxcuBNQ11wKzHG5hBeZyOTOPC5B1hNHtdgPn3Fwup1YMO2vj\n",
+       "nmyV+9bX6upqwHrNzMykss2jb8yeAhhr6ES5XNa24GiEWQVY8hzmjfutrKwMLPjLSJJEmSgcpzSb\n",
+       "Tb0P11L087UxJQ5ZDEp/ABlAjpOTkypzDrsGE7JbhQGMK889yBSy9/Of4RmsE1Z78Rmcpq1UFe95\n",
+       "z3vMzzGvIT/LIZu/BzY2NnTcwTAcPnxY2RP8PXz4sMrLYoNw32KxqGsCh3GDmclKf8D1N7l2G9qA\n",
+       "8Thw4IAZNOG3xUqX8vrrr+t6yPMC4GAhP7VLrVZTlwNeX/wjz2effTYIFHHOBTVIR0ZGVKZc4y+L\n",
+       "NUab/AAbwM+RxiwzAhysnF6NRkNPU5577rlUO0XSuRAhUxxRrayspK7loy2RtCx5zeQM/gDWAmSB\n",
+       "r1arOv9fe+01EUnn1cIYbm9vp+rz4TM/DQUzbwCnOrDqpgJ8fMjvcD42FOnJzaoFi/nPa5af3b3b\n",
+       "7Zq1L/15wXkHs9oc9GHXKyIiIiIiIiIiIkzsGyPF9XNE+hba1atXU+yESC90GtYLdvC1Wk1++tOf\n",
+       "pu7Ju3JYNidPnkz5xIj0dvb4nn1u/CzGpVJJrUrrzB5n2m+++abu4GHRTE1NaZ+w85+amlJLBqkM\n",
+       "nnnmGdO3h2uJifQcdP3klWxZwYpZWFgwmSg/1JVx4MCBgGniMFDIldkndloGYPW88sorQVoDDsGH\n",
+       "dceMFNdug8XAPi3QE26/5dPEgQciaUsdNcOuX79u1jrzE2Pu7OwEqRiazabJAlqWLPQDfhMsY4td\n",
+       "go5zxnI8/8CBAyprK40D5HLixAl9HrfJZ9YYmIuDklTiOXhGqVRKJQ8VGRxmDoYQcubs3hx67OvY\n",
+       "1NRUJhM1LIrFosrdcogH033o0CGTzYReIvHpgQMHVBezMqqzVQxcuHBBM+Dju2PHjul8tXxFmOWB\n",
+       "pQwZdTod1UVLnwAr0GNzczNgsdbX1zVE/9VXXw1+w/Xa/LZa1RHa7Xbg9yPSXxMgbz+Fi0hPx3yW\n",
+       "/9KlS0GAxOTkZOCrxL63nNjWT8Ug0l83rfUdsIJYONwfMvvBD36g38On7vz58+qXarGlzG5iDLk2\n",
+       "JCfk9NfuQe8NK1AI83O3dBAWLP8735nf0l3rM85OzwySJZthfbb8YJhut2sydYD1GXSs2WwOlXbF\n",
+       "x746mx86dEgVlKl/CAZUaavV0pcD/oJOFbGP4vxCliJ9p3N+wbCSWCUi/MgRq1iuRaG22+1UhlqR\n",
+       "NB380EMPiUgvJ5S/2PCLip22/SOF48eP63EF5+mxgIVna2sroKk3NzcDB9CdnZ1UlA6ej+M7Xvyw\n",
+       "MD7zzDP6GRYDtHlqairIZt/tdgP6vtls6rGXdUy2WxQOdAC6w5ORjy38l+qgI0PICH28evVqcO2g\n",
+       "rN6Wsz82S+jHwsJCkA1ZJL0xFkkfR1nHkpgPzz77rH6GxUGk31+03crkLtKnvbGYNJtNfQ6XvEGb\n",
+       "sTFYXV018834L1de6K0NKcALPmT/0EMP6TEEMv0nSZIZJMCO8VZuJ6wd58+fl9/4jd8QEZFvfvOb\n",
+       "2l+8JDGX5+fn5cEHHxQRkb/+67/W63xHW86hhbmwsbGh/+YSRrtF3AFY09D2CxcuaPuzNpvb29vB\n",
+       "GsnHJHiBz8/Pa5mXM2fOiEgviMHfMFobSNYlPmrhIuMiPb3jNUEkHRX3kY98REREvvvd76qMrJxV\n",
+       "Z8+eFRFJGdNWeRY+qkbfcV273TblBrn45bdE+kfyExMTehzJ5cUwDzl7v7U24Bk4eux0OhrlyO8h\n",
+       "zqhvHbtCNrzhhsytDRAbRfge9+BcSxivVqs1dHWDYYzd3crWcDutahZ+GRqe//xe9seQA3jQ33q9\n",
+       "ruPPOoP347vf/W4R6bsEZCEe7UVERERERERE7BFu2FDFt/ShziV+ngjQ/Pl8XneRsFyYavvgBz8o\n",
+       "IiJPPPGE1itC6PeRI0eC2m7tdlt3mNhls0WNHS6H8eN5g3bPsEqwu5+bm8ss6MnsAhgdWPn5fF6t\n",
+       "OS54iqMEMATOOf3+fe97n4iIPP3006mCniK93TMzesiSzuHHkAMXRPWtDi5qy8dlsFg5jcJdd90l\n",
+       "IiIvvPCCPgssAFi4Wq0WWKeDQrazQn6tTM9W2C0fUfjHWkzV82/9o12GVX8N/SgUCoHVmSSJsmKw\n",
+       "OtfW1gY6szIOHDgQMKF33nmnsiJ8vOGP2y233KLtt44r0N9SqZSZ7oOvH2aNYIfmXwSQKdffs46q\n",
+       "H374YRHp1Y6Evvz+7/9+oLPb29vKzPkO/CLpoJRf/dVfFZG+UzCzT08//bSI9NgF/IZzRllHp1gn\n",
+       "sDa8+uqryl5w/i3r+BifQfZHjhzRccf8f+WVV3T8reMxgOsWZtVVY/is9SDgvuVyOZVrTSRdzBlw\n",
+       "zuncxLrdbDYzmWbMo3a7nSriK5JmODHmU1NTQeAAX4c1uFqtplh0wK8gYJ1CiIi5Fv4i8N9Jg8B5\n",
+       "ybKOdAF2Use6Nz09reMEJnwQQ431C/rObBv0vlQqqXytHHR4v3e73dSJhEhvXP0alOy8zgWIsUZj\n",
+       "PS6Xy/qeyDqe/UUBRitJEjOvRmSkIiIiIiIiIiL2iH1jpPL5vBSLRXXI9hOZederlYC/58+flxMn\n",
+       "TohI36JeXl42a2f90i/9koiIZp1lCwMMB9fkwmetViuwyGZnZ9UihZ+GldzutttuUwaBWZQ/+IM/\n",
+       "EBGRL37xiyKSdhj92wDGF8zWoGf5vkqcYf69732viPQccy3mzaoB6FvoxWIxqGvUaDTUYmErjLPv\n",
+       "cpt8+BYch6ajkvrc3JwylxiP0dFR0/E0i5HyM81z+0RCay5JkiCjPt8H1tvExIR+xv588AFBmy12\n",
+       "rlarKduJe0AnhwEsTPS7Wq2mslH74DH1AymY6YTsT58+rewJ5vfXv/51vQ4+CJy1m5OIQmfx3Gaz\n",
+       "qXK22sl+EPiNc07nJmRVKpVMf5NPfvKTIiLyp3/6p/oZnIbhw7m9va1+YZz8E/Lwk3+K9NestbU1\n",
+       "lQ1kz07d8Euam5sLWMIjR47ob2699VZ9Pp5jpW5goO+QD1vvrKe+HjMzAGaD+4bvmFnH33q9Hvhh\n",
+       "jYyMKMPE7C7Wcsi0UCgEgQqtVkvlnMXEsM+iNW/x/Pvuuy/lII7rwahA3ry2cx1Y6IGli0gwyjVX\n",
+       "Mc9GR0f13/CzGh8fV4YL92Pf4CRJ1FcHDO2gpKN+SiGeN1kYGxsL0hBY6ySn7MH4Wu/KmwEn3USb\n",
+       "bxaY86dPn9bxRvuYgcf4ct84wSd0C33c3t7elZHaN2fzTqcjW1tbQY6nkZERHURQ9t1uV53C4DAu\n",
+       "0i84+uSTT+pvoej47cjIiFKvfuFJkfSxBn6bddQxMjISvKysIq7saM4Lzxe+8AURyXa0FUmXVBAZ\n",
+       "HNngO5EOOmLhzyza3t+sjI6O6vOYGrYmFiY239cqTAml5s0V7g15WEUteYPEmyd/ceAoNnYe9J3+\n",
+       "razoxWIxmLy8QeZFC7qFNrfbbTNHjTVe/rgPoqOtIwcfm5ubgdP8qVOntC1YhCuVio4NNgRra2s6\n",
+       "D6Dv6+vr5vGCn+28Vqvpb1hv8G842V+8eDEoa1QsFvW5586d08+xkMJIuXr1amZU3G7HIP4xskjf\n",
+       "cd85p07BaMu1a9fMscCLnecPFz0V6b3keJ7iuZAHH9NyWQwfHFmL3+Ll2el09MWNsVxdXTWdpSFD\n",
+       "jNvi4qL2nYsVY77gCNLajNXr9dRmSaT/YmF0u11dsyAfLh7LxqSV2dwvGM5rNM9BfwM1MjKixzwY\n",
+       "v2azqcYfjJNGoxG4BZw7dy4wICcmJlQOVlQcNnKLi4v6b2x8eMwxfw8fPqwbN9yXN154J42OjsqP\n",
+       "f/xjEUkHBADOuUA/ue9ccsjK8QSdwDHdxMSE3h/9nJubUz3OcgrfLYM4b8w5pxz+8nGgSG+8uMwO\n",
+       "gDWBoxj9dzgbNrjfG2+8ofqJz3gOQg+sEnV8b/y1gh18xKO9iIiIiIiIiIg9Yt8YqdHRUel0OrpT\n",
+       "5eK8sBiw2x4bG9NdMML8b7vtNqVFeRfrsw+D4IdHcrFcy7HYYnFgyW1tbekOmB0f/XBwDkdn+Lvs\n",
+       "Q4cOBXQxOyAzCwVLDlbP4uJiynEfYAvSt+LZksJ4MNPBcrBybGAc2ILjMHuRnpz94IFSqaTfo09Z\n",
+       "eY5E+ozf+vq69omLTWNcrZxBWRmw2XEbMrCy7PJ9OA+PRbP7ubt2s+SgH5/61KfUCkPQAef9wbiu\n",
+       "ra2pgz/mDBel/kXABYX9GlWc1wvXHThwQFkPyG18fFyZH8hia2sr0OPp6WllynDkNjs7q0dikAGz\n",
+       "QsMWoy2VSgELmCSJPg9pSAqFQip1hC8HfrYfCGAVWD106JDKg7/LyhgP9qtUKim7g3WHAzjYoraA\n",
+       "tQA6VCgUVPf4qMtPa2EFL6ysrATpaLi4NYOZKB9ZLP8999wT1EhkRhw6NDs7q2OJNBg7OztBtnOR\n",
+       "fn4wnFpYbWg2m6qDOC61iroz6wpW6NixY2YlDP8oEQ71In3Zj4yMKNuKPlp5uxhJkpg1SH2WivOh\n",
+       "YYxWV1eVyRvGOZ1RrVaDVDZcGQSy4bUNn5VKpUDH+Hv8HVSkHnMOf51z+lweS+uYHjphnaAwA++j\n",
+       "XC6nsr/7bR+EyEhFREREREREROwR++ZsLtKzmOADAFZhbm7uLXe+9v1hJiYmdNfOSRNxfoxdO1sA\n",
+       "7FiIpIpgGi5evKhWCTND/m+Xl5d1980MF9rHTpiWDwhndRfp7eQ5RBfXgEGq1+tq+WRVQ2fLwdKH\n",
+       "LAfPQZl0/TqDhUJBZcRWgp9dmf1vhgVXOce/2b8G8udK9L51J9L3R7F8ZdgR3fdvse6XJEmgE6VS\n",
+       "SbOrMxOK/g5TT1IkzTSyLoiI2S8LrA/svzDIsZ+f65zT3zAzgXkB3X7zzTeHZo6yANnffvvtQfLK\n",
+       "iYkJbddjjz2mLBYzZhhPrubuMyqlUkkd4xHezakqrKS/AMsD/T1z5ozJ1IJdQT+s1A5333233gfj\n",
+       "Pjk5Kffdd5+I9JOuXrt2TecPMx8A1ripqSnVfbBG7EuJNq2srATsOGd6ZobLr66Qtb6I9HWiXq/L\n",
+       "r/3ar6Xa/P3vf1/7izX46NGj6t+EQIRqtarrE2f+txgwBAkgLUSSJOZceuCBB0SkL1MrYbFIX3eQ\n",
+       "DuOHP/yhfscpefx1T6TPjjATY7XFR7FY1P5yzVB2/vfrwg0C1nD0w2dVhwHWrFqtFgQeWP62zFxb\n",
+       "64DF+DGgi3juoPQq6BvGbXp6Wk+VMM83NzdV1vi72wkB4++ss3mpVJL7779fnnjiiaGu52iJQajV\n",
+       "ajoo2HTkcrnAgW5tbc2kprnApQ8+asMgsdO5Hykh0h9gPIvbzorHDozoB2hFLOZc6sZSAPSbj+F2\n",
+       "UxQuAJlVNNLKGMsFQq2NlL8ZbrfbehRq0a3s+GjlhfIXNz6GgMw3NzfNPuNFgTEcdF9/g8TPyHLM\n",
+       "t44WRPryw4sgSZLM6CpELnW7XaWrrYWWn+/3t1Kp6GaC84RBB9HfhYUF3RQMa0zttlhbkbfY6LGR\n",
+       "go0tZ22GrBBZy8dlWED9QtgW8ByM+WuvvabPsaJ1gGazqUEpnLsNssnqe5IkgQzX19eDtapWq5kb\n",
+       "Hh+FQiE4tu52u6pnOAZaX1+XRx55RETsjRQfoeMFj42PVdHBORccPXJ+IFxXKBSCucIGFb6bnJxU\n",
+       "/eWNw7e//W0R6a+Hhw8f1jGxilwDloHNhcCBu+++29QVRC5yYIsVTGCtZ36VBwbcTbhcDcPXg0ql\n",
+       "EsxrLr7Nzs6YKxsbG0FRZO43F+L1KwckSWIGh2CusIHrF/ZdXV3VscF8HPZ4sFgsBkfjSZIE0bjs\n",
+       "ksFRgBzMw3Li9pXLZW0fnnH9+vU9lXnxn+HPwSzEo72IiIiIiIiIiD1i3xiper2eYqOs8HHAChXO\n",
+       "5XKaHwg780uXLunuOivL6ejoaGDJP/jgg2YosU8/3n777erQDgvi9OnTmkGcrV2/CKVIyHodPHhQ\n",
+       "28q0sF8bzS+87AO7Zt49s6Mly3c3ShX38QtTiqSPR/AXVgdnoh22PpPl+I77ceg05xISSY8hW3w+\n",
+       "M3D27Nng+JCPIbhN/vEEU+d4hpUzjOXI8s86ZsOzKpWK/p4dbqELSPexvr6udDz6yPWoYHFubW2Z\n",
+       "zpcAjx+YKzhXcr1G5LTZ2tpSXQSrkM/ng0zaYBvRLvQB17EzLfqB6yYnJ5UtxFiVy+WbPuL1n432\n",
+       "81ED+m4B8x86xkXGh60RhrnOLgOf+MQnRKSnJ6jjl4VqtRro7Orqqq6XLBc/KzqDC0v782xyclLZ\n",
+       "azCms7OzQQHg9fV1kynx5wozANCxxcXFVGCESPooG/NjdXU1YFsgB/xGpDdufh4uXuex9lvvi0ql\n",
+       "ojLFszqdjsmi+uviyZMndd0GI2UxphMTE6rH7ArAuQpFerKFGwHkeO3aNb0fyzsrv6KVsZ5Tz7Ce\n",
+       "+30ql8sm6wfg3VUqlfQ9YuVa4vc15M5BCdYR4l7mNcAy5L/cvkFsFNYoDmbz8+E1Go1UnkOR4eZ+\n",
+       "ZKQiIiIiIiIiIvaIfWOkOFxaxGaiGNg5co0qWLm88/Yd0iYmJtQ6sBwzcQZ9/Phx+dGPfpT6LfsC\n",
+       "sOWPHTqcSRGCbvVRpG+B1+v1wPJaXV01nbDBCKCtrVZLa1TBumPmgdMvgDFh1uD48eMi0qsLhb5k\n",
+       "7eCLxWIqySeAazk01HcoZGsaDMH29naQiG9iYiLIBM3tBzqdjt6Hq5cDPOZ++LGVPNBikHZ2dgIW\n",
+       "gOtCAc45tWzAYHB72VL/wAc+ICJpXzrICPe1/HW4XZZucdZuv2bg+Pi49sOq+8j9wW+ZiYJ8oTvs\n",
+       "A8eMra/bFpsr0vdL4WS3PpPI/oeQz8rKiv4GvmNs4YKZGB0dTaU8wXzmzMzoc1a2ZE7mC4sVfpY3\n",
+       "A8shFmsX/u6GBx54QBluME7lctmsp8d110TETHa5tLQU6Fm1Wg0c2peWloL6izMzM7pOg9Wu1Wqm\n",
+       "bgHsk8Y+jSI9Pcjy3UR/jx8/rn3BPVqtlsnMY40G8wM/O8bW1lYgv4cfflidzJk5wThhDeEAk6w0\n",
+       "LVeuXAl8faanp3VNYsY5K6CF13I+pYBuA8MGlhQKhSCU31p3JicntZ9gnJrNZibDzYDucLoUrNMY\n",
+       "G0s/x8bGVK4cNANdQGqKQqGg7cLYXL9+XX9jvccQAJPP55Xdw3pw9erVgJWzUoBwQupB2LeNVNZE\n",
+       "BLCQnTx50szx4i8inP0bC8HKyorpHAelxQvo61//enDN0aNHg/wmt956q+YwAQZFrmHjw0WJsYhw\n",
+       "Hg4oNSbf7OysKiDToFb5D79gsEh/M8EbA+uozd9QiaQLNvsRMnxPLp9gvaC4SDLgO4tacrOCCRqN\n",
+       "hvYJE3JjY8MMQLjnnntERHRTfPHixSCbNG8mcN/l5eVgM2CVVmg2m0rLA/l8PlWAE0BkD3SNgyGs\n",
+       "hQxtKRaL5oID4FmW8WHpei6X0/HiuYKXAW8+MNaQ6aDNB148uN8DDzygCxVeWFtbW5lFmtHfO+64\n",
+       "Q+WC9i8uLuqLwsqNNei+KDsD+SZJEpSL4LlgRVkBNxPVkwXM2wcffFCjxDirO4C16ODBg0GxcXZG\n",
+       "ZnDm7kHY2NgIdL/RaATHglxpAmBZQd/K5XLKuPKBl9PU1JSOsXXEg3vMzMzovWGkWGPORgr3B2sC\n",
+       "6wTWRcxHLrSLkmHXrl0L2lWtVoM5dO3atSCv3+joqLYBG1F+CXNGbX9dueWWW/S9grbkcjl9Bozn\n",
+       "XC6XWk/8Ntx5550qa2vew9jpdDrB9/l8Xo0crHGLi4umuwfeVX65MZF0FCh+axlVvJ5hY8KBPv7x\n",
+       "+87Ojq491sYYfcvn83ofzn2HMbGORkEqiPTHDrrY6XS0DbtFojLi0V5ERERERERExB6xb4yUT21i\n",
+       "lzo5Oak7UVhZFhs1PT0dZD5lqpNrGMHqhPWxtbWl12aFNjIb9eu//ut6P9+aHOTcBmsM7Fu5XA6o\n",
+       "39XVVbWo0I+LFy+mnFVFejtvUM64b6lUUiaKnZct6xTWTLVaVfkyI+UzQa1WKzgmY2YDO/lB2ZXx\n",
+       "Oeexsgr7ZlnSbHWifbCGSqVSUCsKfeG/nU7HzHVj5QXCv3dLtQFZgjHlmmLcx1OnTolI37pmax9t\n",
+       "qdfrQeZzSybFYlF/k3VEdfDgQWXM8Lw333xTdcwqsLpbf++++24R6Y9pq9VK1boSEfnWt74V/O74\n",
+       "8eNqOcIi3d7eVusYY/nqq68GjFC5XNbfcqZmWOuwIMfGxlJrCeYN5+7yx79Sqei8Z1bPP66xMpZb\n",
+       "sAIQGKgFOj8/r0eSfm47kb7unDt3Tq1sTqGC3zLzwvXlBiGXywVMQxbjybCYpJ2dncBav+uuu5Qd\n",
+       "47pvFqBHGN/5+XnVo6z8RoOcpzFveE6jfzgW5moRmBd81Idj+IsXLyqLwcflPgPKRe6Bbrerusp9\n",
+       "xxhinh85ckTzh1k1NSGXYrGobbECEJCuQ0RSNfdwipEly06nYzqbYy3gFCB+9nKLqeXPsF6Uy2U9\n",
+       "veEC6dAPzOVh06/wPLP6hjkwyJkdeue/W/32Q/6Yo8Mc7UVGKiIi4m8Fw/pwROyOYTc+EbvjrU74\n",
+       "HBGxb5nNYSFkZc3mM1RYY9iJcp0kDlfGjhWTxTrznZqaCqwlzpDLtaWQtRbtfPTRRzP7hqrjKysr\n",
+       "ej9rBwxwnTuAz9qxO2ZLw/J34uzdOPve2dnR82qwJ+wcDotqbGzMlJfvANjtdgOWamxsLOUoDvjO\n",
+       "yLVaLfDpOXHihLaPFzdOLieSTkrJ4e1oP7MBCOWHg2Sn0wksHs4YzIwA5M56B1mxLCAjrn3nMxfI\n",
+       "hMuo1+s6dpDZbkkuLWAMqtWqjit0w/IdYmYF8jl16pRau2A1JycnU07cIj02eFBGYUaxWFS5sX/S\n",
+       "2wFOl/HZz35WRESee+45ERF56aWXlLXhLNF+Og2R0IeOWVSM9djYWGY2aovpzPLDGgS0D+kvlpeX\n",
+       "lXUYZl0R6c8f51wms8a+Y/g3pzfxMTExoes1vh/kT+LXfdve3g6CNW4G7Hwv0psDvh/M9PS03hvX\n",
+       "3XbbbaaPKYBs6+fPnw+YKwsnT57U9Q760Gg0UnXmRHq6iXUvK8v/xMSEPg/XcQAM/xYyn5iYyNRF\n",
+       "gPXYSnZ8s6jVaqkaudAzzJ9h1gsfSKALHV9YWNBxwBiyTy0/E+w05lmz2QxSGHQ6HZUh2skBF1zT\n",
+       "EuOFPQLeIVmZzTUr79v5n4gkn//855OItwZRlm8doizfOkRZvnWIsnzrEGX51uEfkix72yV7TxOP\n",
+       "9iIiIiIiIiIi9oh9LVocEREREREREfFOQDLgaG9fNlIREREREREREX8fEI/2IiIiIiIiIiL2iLiR\n",
+       "ioihjjlwAAAEq0lEQVSIiIiIiIjYI972jZRz7qPOuZedcz93zv27t/v573Q45153zj3rnHvaOffk\n",
+       "jc+mnHPfdc694pz7jnNuuKJe/8DgnPvvzrlrzrnn6LOBsnPOfe6Gnr7snHtkf1r9dxMDZPmfnHOX\n",
+       "bujm0865j9F3UZYD4Jw77px73Dn3gnPueefcZ298HnXzJpEhy6ibNwnnXNk5d84594xz7kXn3H++\n",
+       "8XnUSw9vq4+Ucy4vIudF5MMicllEfiwin0qS5KW3rRHvcDjnXhOR+5MkWaLP/khEFpMk+aMbm9PJ\n",
+       "JEn+/b418u8onHMPiciGiHwlSZL33PjMlJ1z7oyI/C8ReZ+IzIrIYyLyriRJBieD+QeEAbL8vIis\n",
+       "J0nyX71roywz4Jw7LCKHkyR5xjk3KiI/EZFPiMhnJOrmTSFDlr8tUTdvGs65apIkDedcQUSeEJF/\n",
+       "KyK/KVEvU3i7Gan3i8jfJEnyepIkOyLyVRH5+Nvchr8P8CMHflNEvnzj31+W3sIR4SFJkv8nIn6K\n",
+       "6EGy+7iIPJokyU6SJK+LyN9IT38jZKAsRULdFImyzESSJFeTJHnmxr83ROQl6b2Iom7eJDJkKRJ1\n",
+       "86aRJAkyJRdFJC+9OR/10sPbvZGaFRFOx3tJ+koeMRwSEXnMOfeUc+6f3/jsUJIkKKJ2TUQO7U/T\n",
+       "3pEYJLuj0tNPIOrqcPjXzrmfOef+mCj/KMsh4Zw7KSL3isg5ibr5C4Fk+aMbH0XdvEk453LOuWek\n",
+       "p3+PJ0nygkS9DPB2b6RiroVfHB9IkuReEfmYiPzLG0csCmRg3ZeWvcMxhOyiXLPx30TkVhE5KyJX\n",
+       "ROQLGddGWXq4cRT1f0Tk3yRJkqq8GnXz5nBDlv9berLckKibe0KSJN0kSc6KyDER+UfOuYe976Ne\n",
+       "ytu/kbosIsfp/49LegcbsQuSJLly4++CiPyZ9KjTazd8A8Q5d0RE5vevhe84DJKdr6vHbnwWMQBJ\n",
+       "ksxTOYUvSZ/Wj7LcBc65Eeltov4kSZJv3Pg46uYeQLL8H5Bl1M1fDEmSrIrI/xWR+yXqZYC3eyP1\n",
+       "lIjc4Zw76ZwrisgnReQv3uY2vGPhnKs658Zu/LsmIo+IyHPSk+Gnb1z2aRH5hn2HCAODZPcXIvJP\n",
+       "nXNF59ytInKHiDy5D+17x+DGogr8E+nppkiUZSacc05E/lhEXkyS5Iv0VdTNm8QgWUbdvHk456Zx\n",
+       "BOqcq4jIR0TkaYl6GaDwdj4sSZK2c+5fici3pee49scxYu+mcEhE/qy3VkhBRP5nkiTfcc49JSJf\n",
+       "c879joi8Lr0IlQgPzrlHReRDIjLtnLsoIv9RRP6LGLJLkuRF59zXRORFEWmLyO8lsQyAwpDl50Xk\n",
+       "HzvnzkqPzn9NRP6FSJTlEPiAiPwzEXnWOff0jc8+J1E39wJLlv9BRD4VdfOmcUREvuycy0mPdPmT\n",
+       "JEn+6oZco14SYomYiIiIiIiIiIg9ImY2j4iIiIiIiIjYI+JGKiIiIiIiIiJij4gbqYiIiIiIiIiI\n",
+       "PSJupCIiIiIiIiIi9oi4kYqIiIiIiIiI2CPiRioiIiIiIiIiYo+IG6mIiIiIiIiIiD0ibqQiIiIi\n",
+       "IiIiIvaI/w/CAMCOMj3yxQAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7ffb01f952d0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "feat = net.blobs['conv1'].data[0, :36]\n",
+    "vis_square(feat, padval=1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The second layer filters, `conv2`\n",
+    "\n",
+    "There are 256 filters, each of which has dimension 5 x 5 x 48. We show only the first 48 filters, with each channel shown separately, so that each filter is a row."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 30,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlIAAAJOCAYAAAB8y+mTAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvWmsbldxv1nvOb7zgCc8GwMGMxkIMyKgRCgk/+RDuiMl\n",
+       "6UQBOg4eZQYj7GADRg42wthGcMEoHkCOlaB0R1GCWpGSNJmDiIAEOYAx4BHPxsb25frOZ+gPl2fv\n",
+       "9T57132dg92n8+/6fTnnPWe9a9eqVWvtVbVqmCwvL0ehUCgUCoVC4b+OudUmoFAoFAqFQuG/K+og\n",
+       "VSgUCoVCobBC1EGqUCgUCoVCYYWog1ShUCgUCoXCClEHqUKhUCgUCoUVog5ShUKhUCgUCivE03KQ\n",
+       "mkwm/2MymXx3MpncOplM3v90PKNQKBQKhUJhtTF5qvNITSaT+Yj4XkT8QkTcFxFfj4jfXl5evuUp\n",
+       "fVChUCgUCoXCKuPpsEi9NiJuW15evmt5eXl/RPwfEfG/PA3PKRQKhUKhUFhVPB0HqeMj4p7m870/\n",
+       "+VuhUCgUCoXC/1Q45Gnoc+Zd4WQyqbo0hUKhUCgU/ttgeXl5Mvb3p+MgdV9EnNh8PjEOWKWmcNxx\n",
+       "x8Wzn/3siIg44YQT4sQTT4w9e/ZERMTatWsjIuKTn/xkRESceeaZERGxtLQUERGLi4tx6KGHRkTE\n",
+       "3NwBo9onPvGJiIj4tV/7tYiIePzxxyMi4phjjjlAxIknTvVx1VVXRUTE2972tql29Pvoo49GRMTC\n",
+       "wkJERGzbti0iIt7+9rdHRMTOnTvjsMMOi4iIjRs3RkTE3r17IyLi2muvjYiIc845JyIidu/eHRER\n",
+       "8/PzERGxadOmiIhYs2bNFO3vec97IiLiiSeeiIiIQw45ZKrdli1bIiLiYx/7WEREvP/974/FxcUp\n",
+       "Ojdv3jz1TPqGFvqem5uLm266Kd785jd3PI2I+PSnPx0REe9973un+MX/4fdkMun4cu655079Db87\n",
+       "xstP5vP888+f4hf/53v8hJbzzjsvIiKe8YxnTH1vx44dEXFAXuj7wgsvnOrjxz/+8dQz4OmnPvWp\n",
+       "qXH62evWrZsaN3z84Ac/GBER+/fvn6Jl586d3Tz94R/+YUT084lcb9iwYeoZi4uL8dWvfjW+/vWv\n",
+       "R0TE+973vqlnAvjKuvj4xz8eEQfmPyJi37593TPWr18/9V348s53vnPq2fQJGC/jvOSSS6ZofvDB\n",
+       "B6fGsm/fvoiI+PznPx8RB+SLv/EdZGXnzp1TbZkjaGCOkDX4yPzDF8siaNfRb/3Wb0VExJFHHnnQ\n",
+       "vm+44YaIiHjDG94QP/uzP9vRwrpBTuDL5ZdfHhER7373uyMi4u677556DutucXExPvOZz0RExLve\n",
+       "9a6pPpAV+MJP5ujss8+e4h80w1fmlvYXXXTRVDv6n0wmg32RNcT+xjj52a7niH6/QM4B36f9lVde\n",
+       "OTXWI444oqPFvGTPZT9/+OGHI+KAbL3hDW+YWksR/Ryxjhj/rl27pvhH/4yV/WXz5s0d/ewV3ovY\n",
+       "u7Zv3959J6Jfa/D26quvjoiICy64YGr89J+tiw9/+MPd++GRRx6JiOGexH7x1re+NSIijj766Ijo\n",
+       "ZZW+eSa0wxf64znQTv9XXXVVt8/xDmG8zNHXvva1eMUrXtHxnPcizzSvvdedddZZU7T4nbW0tNS1\n",
+       "/e3f/u2IiHjWs54VERGHH354RPTvbN5lyBbtGRd9Amhjv/jd3/3diOjXETS364L9n/mHx7Sdm5uL\n",
+       "++67r1vnN910UxwMT8dB6t8j4vmTyeTZEXF/RPxvEfHbbnTCCSfEG9/4xogYboyFQqFQKBQKq4Xj\n",
+       "jz++Uwz+Xz9ILS8vL0wmk3dGxN9GxHxEfH4sYm/NmjXdaR9twSdwwKmYE/bCwkKnlXAKB3wXbYf/\n",
+       "8yxOnOCFL3xhREQcddRREdFrVN///vcjoj+5m5YdO3Z0fWJh4rTbjrGliZN2FikJrWg1jIFx0x+Y\n",
+       "m5sbWAHcB0DLQeN65JFHpqwS7ptnts9qfzL2iJ4nT/ZADG3wDU0DTdTzby0RLWoM0A19nn+DcaNZ\n",
+       "0o7PnlPkg++hgd15552DZ0AvYLytZaLlM+1/9KMfTbVDAzMtyNPi4mLXD234H2BubLHK+ILlgX6t\n",
+       "BXodrVmzpqMfntvC7HFCKzRlFiePibnhOS1fjj322Ig4sAlG9JY0tF3L9cLCQuzZs6ebG8bFs8wf\n",
+       "ZB2LNXKAjLb8pS+vd75jWmyx5lmeS/fD2NgvNm/e3NEDmD8s7+xrjGOW7PJ9yw+AZp47Pz+friHA\n",
+       "emcO6Htsn2sBX5h/LH6A72/cuLF7Ribn8Jxns4/y2XsRc8qzmVu/J8Bhhx3WteG7ln/A3oKVk2cg\n",
+       "wwb7oG8LPHdtXx4XP9esWdPNYUQ/j4899lhEDN/B5ift2z1prF1Eb4HinYRs8V7werFlljll3lu6\n",
+       "I/o5NE1zc3MD2WJc8IW+bImdhafDIhXLy8t/HRF/fbA2mPUKqwc21cLq4YQTTlhtEv5/DQ5chdVD\n",
+       "zcHqo+bgp8PTcpB6MnjmM5/ZnSyB774Bp8j2/tLaHMCPhtMrp1K0e34CrGGcYqEJ7Q6aQKtFc4rN\n",
+       "NEZotL9N6yMzBtpzWoY2W2J27do1OGEzHluk0Cz4+9zcXBx//PHdydsWBvjE2BirfWsONo5MC7SG\n",
+       "Zs3bWmCruUbEgOaWB8yXtRrzHvhZs7RotH9ohp9btmwZ9MVnW8VazQsLSksb47GFxnyG1t27d3da\n",
+       "KPPk+beM0h5+WQO7//77I6KXPcZtnxqwuLg4sAJapgBWPICsQZNph0bGYN63z6ENz+Azbbxv+CCL\n",
+       "dmzfQ8AzsR7YMtHuL/4uPLY/nvHDH/5wilbkIZNJW2aYsxb4RsIX1lRmBWCeobn1AWu/D/g7z9m1\n",
+       "a9fABxIgJ/iYchuQ7SPw0b6mtn4A5GX9+vUdDfDGcp7tC/DF7aHdVlLae8978MEHB5ZD9wHgHVZC\n",
+       "5goaPE768TPH/m4/Kz4z7qOPPnpqrKwXLDT2MfR7Bz741mVsP0V2sLwzXs8v8DPpi74ti9DgvW1p\n",
+       "aWlgvcz8LRlvds4wqkRMoVAoFAqFwgqxahapiP60hybKadCanC0X7anSp3q0MbQd+ylk1i5HjnGK\n",
+       "zXyHNm7c2GmvmSZtH5FZFilHGNn/y6fp9s6XPjNfMGvB8IHP5jnP8r3+fwWmqaW7Bf93dBqANt/P\n",
+       "ewwtrHllsGbJ50w7Zo5tkVlaWhr4j9hiZh8Gy4s1eL7HXGS+I/Pz84MoI/ME2ujblgjPyZg21/40\n",
+       "lpaWOu3V85TJYuZ3Yd76/16zLe3217OlKKOFcTqy0nyxr1nWrgX/81odk6G2b8u3tWM08TZaj7FY\n",
+       "fhk3ba1xZ35p0ML3+Z73ojELaOZPAw22Ym3dunW0Pc+2pS2zprf/p022br3XMEdjlvcWtuiY1rad\n",
+       "LamZlYP/OyLQtyim1Xv42A1J5o+XjdfvrmxtAq8LaPB7JGJo3QSWIeBnel1ke7V9x+bn5wfz4/cb\n",
+       "9DLe7LbJKItUoVAoFAqFwgrxlNfae1IPrYSchUKhUCgU/hshS8hZFqlCoVAoFAqFFWLVfKQ+9KEP\n",
+       "dT5ARJrhvc89K1l2ycqK38ZkMulybXD/+ZGPfCQiIt7xjndERH+/jN+Gc7OQNZXswNwJO7sqtFx/\n",
+       "/fUR0WeT3rx5c3eP6igbZ+QmOoH7WCKFuJ/+8Ic/HBF9ZmNHmhCJyJ0vGYLf//73D6Jr8LPiWTfe\n",
+       "eGNEHOB324d9wPhM32SHhW/O3s5z/uiP/qjLVGwfJ99Hk5GX+YTn8IfvwR+ySZOp+L777ouIfk6e\n",
+       "85zndM/5wAc+EBF9Bu82n01En+eFeb3mmmsios+yDa38RNaQTbLy0r+zNm/ZsqW742f+4TnPfuCB\n",
+       "B7q2EX36CTLVI4sAueIZ5F+hPdmHN2/e3EWRsR6gz3LOeJx1HFkmszGySDZmfA6hAf6QxfvCCy8c\n",
+       "+HYwB/TNGoUWaEAOoAU5gI+nn376VH/2lUEWP/vZzw6y7FvO+ens0I6wZJzg4osvjoiI3//93596\n",
+       "piNs9+/fH5/97Gen2iKvrD32OYBskcEdvhA5zHid8R3aHdU5Pz/fPZP1TOZpeOx8aMgo+xxzxHph\n",
+       "X0QW8Wciyz4Z37nh2LVrVzeP7F+sZ9Y/42Td2H/rc5/73FTf8JwoP8YI7bwvWNM7d+7s6EZ2yI/G\n",
+       "GmJvAawb5+HyOJF/eM5zwF/+5V9GxAEZQAbJB8W42QfYi3i3ME7a2XcMWni/OIcXc8Bcf/SjH+3W\n",
+       "M/PJHvvQQw9NPfMLX/hCRPTr375DznTOnFIJAX835qSVF+hGzu07BphX7//w2n6v0MK6g4/OU9n6\n",
+       "UiHnZ5xxRkT0vIPXjJdxQHuGskgVCoVCoVAorBCrZpHasGFD3HrrrRER8R//8R8RcSC3VETE61//\n",
+       "+qm2aMVoi29+85u7U+uXv/zlqbacQtFaOHmTR8l5oaxJOHO3o3E4uT744INx1113RUR/gnaSUUc+\n",
+       "nHrqqVN98n3A6RdLBf3RjzXZH//4xx1PXLcMix3gNE7tIJ4Fz9HUADRDi62HrW+doy6xAmU5meA5\n",
+       "8wotxx133NT3getgkY0eviNHEb32S1vXUHIkJNo+tH73u9+NiF47ph4kQIuCvySye/TRR+POO++c\n",
+       "aotF6Xvf+15E9Nraa17zmogYRuHB03vuuScieg32lFNOiYhepgFzs3Xr1q4v5NyZlpEdxsP/sXoY\n",
+       "1uLIdwUfnbtlMpkM8sA4EhbAc2T2Bz/4wdSzXvCCF0y1t5WI70HDWMQcfTkSyNYs5APrQJvXK6LP\n",
+       "p+X2aK7IINbGVrt2RFhb57IdD4BfyBF9QlO2pp3z7dBDDx1Yg+HVS1/60oiIeNOb3hQR/d77ne98\n",
+       "Z6o9fGONsmaRwSyyEpnduXNnt54d4Yc8IGPOnu29iP+/+MUvjojoSov9+7//e0T0VhUAHx544IGO\n",
+       "R6wVR2GxbphnngXtjsT1XsWcspf7HbBv376uDbKDNdDVAti7ac9e5mz7wFYz9puxrNzMD2uLNcV4\n",
+       "kWfDlt0s4pCxcWuAHMCnlnbn+sOihjxYXoCty1n29Kwyytzc3CDCDx7yTFfwmBW9CcoiVSgUCoVC\n",
+       "obBCrJpF6tFHH+1OoJzisY5Y2+VEjvZ/1113xb333hsRw9M3p31Owpx+OaU6O7jr9aCBoOXYgoHm\n",
+       "8uCDD3anWVfrBs5Rg9YylgU5otc4TzrppIjoNRK0RWv2z3jGMzoN47bbbpv6rjUpZ5Hl/2h/bo+2\n",
+       "7DqCrgLe/o5mAK/H2kb0c+D7aLRGWxiYA+bkm9/8ZkQMs9ZHDP2zrNXZAomG0mrSEb1PkHnOXCNn\n",
+       "rXZtTRrZwwqCZQGaLLu0dwZk/k79R9Bm+KYN47flBWsoNPIMPtt64mziyD08t6w//vjjHe9cp89W\n",
+       "IXiLNcy5ZmwlgzZrh6540LbN8mhZzpFBxvn1r389Ivq9xtqufUXYw+w70v6OjCE7WU1J1ia0QwP7\n",
+       "ojV1+/3xnF27dnX7H2D+seZgicD6abBfYCVBnphbr2nkhbW9bt26bi6wlADTyzhd9wxwq4BF6qtf\n",
+       "/WpE9NZ1rzto37lzZ7d3MA5bJFgH/B+as/qpz33ucyOi3x/vuOOOiOitxqxtsHbt2oF/LvPo2xF4\n",
+       "Zz/XLCs/8uOM+Yy57R9LIvLKnLR1CVu4/iV9tT6hLbyv8v2x/cL532xp8jua/d2+hq6QAcyvtrqB\n",
+       "9xDX7+MdlOVAy7BqB6mlpaVugbBIfegBDIrDwvr169MkhXx22nkmLStwyOTwss4KjjLpRx55ZPdC\n",
+       "YBLGSri0z7ajtzdGFiFCycHLNILDDz+8WxD8z4n3AAKEoFgo3Tffh49sMGMlaOjbz8ySvTEeXmJO\n",
+       "iulFyfUCm7+d9dtDALzFPA6PkSkvDMbDs5/3vOdFRM9HyyLj5tqF52zatGkwTjZ4Fx22TAJo43u8\n",
+       "YPzyBtC2b9++bj6zvtmcmU/Ggcx5U7cDqw8zXnetczHP5kXvgxQ8Q+Y4/DN3zAmwjLImXQzcv7dt\n",
+       "M0dVZAv+wPOs/JOTHzrpX8sXxu0ErIzfQE7YV2hvZ2zgQ2N2WIzoeftP//RPEdEfoHwt777ZL5BN\n",
+       "aM/4grKwfv36tOQHMoQyYgUse3n94z/+49T/WReWL553/PHHD65sDNr66i9z7YBG3AnYy1wEGGza\n",
+       "tGlQlDdL5sn8unCyDwQAmfVhaCyZNIoQz0TGZpXOgib6npVMlL2LvT27Mozo+eGrfoNnZnuy4Su/\n",
+       "1tXA77ksaTb7pPeiDHW1VygUCoVCobBCrJpFam5uLi2g6GsJl9ZotR2b9ThRurhqdqr3s50OwP1D\n",
+       "y4YNGzotPiuzYLOmna99teNQdLQATu5jJSJsSXDRSGCHXVvPsoKY0IQmiobS0u4SFrMc9bB2ZM/M\n",
+       "LFiYz32V2PKF+YTn1qwyjcRzlcmii5m2VibTDc9dMDQrnQMww9tpPyuF0Y4vuzbG8oK8YInKylYg\n",
+       "J/x0WhBbMFrN01YbWwzoK7PUZg7hdl7PClRHDMvTZLKIxYLrM/aYTF4A/WI1aK2DAPqy8jSZBdNu\n",
+       "CNl6gjb41RZ191UtFglft7sEELC1COtiZpGAj22BYej1uGnr8jqZFZA9iPGyf0B7doOxdu3agSya\n",
+       "fqfoyMqxAFvkXJjeWFpa6tr6u7YcuiQUa4pxZ+VtzM+xIA/kEvcRWzuzEjLsEw4g8fpnn4B//GTM\n",
+       "Y3tXViLK+wXjZL79vUy+PPaIoYWRPpgLLFDZushQFqlCoVAoFAqFFaJKxBQKhUKhUCjMQJWIKRQK\n",
+       "hUKhUHiKsWo+UmeeeeYgxJK7YKfxJ+V/G2JsHxXSyV9wwQVTfTrqjL5J+f6ud70rIvp7a0f34edw\n",
+       "9dVXR0TEOeecExEH7mG5XyeC4yUveUlE9CVfKBHhchzcO3N//KlPfSoioitvYV8Bvg8t1157bUQc\n",
+       "KBHgKCrupvkOpTCgG18Z/HDok/EyTtLsO4WD/RQ+9rGPdSn/ocH+BtxRUwqD0gb4YcAH+zwxp8yR\n",
+       "y7e04e/wkHFC98te9rKIiPjWt74VEb0/BaVQTjvttKlnE95MWDNzRVkO2uP/RNTk1q1bO3mgVAU8\n",
+       "ZD6zsGbknDIeTjWBTxFpQmhPCZq7776786dAxvgMLZT8IVoHwA/8CuAjpTDsz+fIQ8p4XHrppZ28\n",
+       "En7OPBK1dNlll0VEP0fwFjkhBQURVC4pBP/wx0BuWNvbtm3rSlt4XfCTaLU/+7M/i4i+XIXDvx1x\n",
+       "5v0CfkF7u46QFdaz03y4VBBlXBinS3wgP9AILYyVtYif1ze+8Y3ud8pmME7WL3PlCEH6piQO4yKa\n",
+       "GR8bxkJ79jrGtLCwMJB3yhW5/Aj0k8yRfcF7F7IKP7yH0T/vi4hhYlqXtqEUDmvMSV5JWUF7ymE5\n",
+       "xYcjyqDl/PPPH/Dc/riM06W26NNRn8wp8sKeztzg5wNft23b1vXN+B3NyjOhBTkHPJt1wRxcd911\n",
+       "U7QA7xebNm3q9i3aOsLc/lqME3nh76T2YL+AdpdmA/S/adOm7pm8Wz760Y9GxDBBMeNjHVHeKkNZ\n",
+       "pAqFQqFQKBRWiFXNI4UVgHIFX/ziFyNiGIXhAsQPPPBAd7J0MjYnXiR5I9YiNAzAyZNncOLmp33I\n",
+       "OBU/9thj8Ru/8RsRMcxB5bZE1WAVIQ8S+XMAGidag0vnOGpjeXm5Gy/J6WjjhGmOAEMThS/f+MY3\n",
+       "ptqjHZDL6ld/9Vcjoi+h0ubCcR4pR7Jk0UYA2qHt+c9//igt8Ovmm2+OiD6v1Mknn9y1ZT5ddgLr\n",
+       "B9obgFZkDn5g/fmrv/qrqfbwFVnE4rlz585BCRfn/7n99tsjotccmVeA5kk/jPfP//zPI6LX2AFa\n",
+       "1G233Rave93rIiLi53/+5yOizxcEGD/yT5JX+GOLDEAWKeOCNci0//CHP+wsA9DpPGGA+YeXv/AL\n",
+       "vxAREZ///Ocjotc0ATIODZQIotzJP/zDP3RtnVvIuWpMN3sQcoPFClnEOmZa6A++MafIcsRwHZBj\n",
+       "6ZZbbpl6JmCvQUZ5NhaGLOoPfqKpP+tZz4q/+Iu/mGoLvVhNWe/IEHsScB45xgWN5gtgX9i9e3e3\n",
+       "d1BcHNjC8vKXv3yKfpf9cvke1naWM63dX2gDXS6z4wSV0ITsuiwX6x6a4At89b67sLDQjRdZyRKJ\n",
+       "Opku42K8WckhSgq9+c1vjohepv/5n/+5a4v1i32Ad5VLpABHpTpSNuM58sR+ynpzlHjEMDrTUaiA\n",
+       "z06qSs439gVga3R7Q+JoQ9Ya7zXvg7ZoZiiLVKFQKBQKhcIKsWoWqWOOOaYr8/K1r30tIvoTqi1S\n",
+       "nIL5+cgjjwxKwQAsUZws0ZycgRg494Z9H3w65kS7a9euzo/mxBNPjIiIv/mbvxml2yVTOCH7dGy/\n",
+       "LDQQ+OSCmHNzc92JH62FcbpvNA+sN84H5AKYaCLOcMxYvvKVr3RtXSDWmoUtUtbqsHJkpTPoD0sH\n",
+       "c8VYWr8fjwc6XeIAOIvwt7/97Yjo5cdao3PXkNvq0UcfHVgvbZHhs/0NALxGrlwawbLIWJ7//Od3\n",
+       "PMXHyxZGaGA8tGe9OIMvz8bfyXwbs3jC+za7dcSw5BNaMdYfKhagYdtqzHxjNbGvTGvZcZkMWz+t\n",
+       "1TOfzmTsPDoG7fg/66nlCzTQNz/hqS1S9MEeBB9Y42jNoLWOR0T8/d//fUREvOIVrxhYgZwF25ZY\n",
+       "58uxfxZ7j3NdZbQ88MADg4LhwNUleBbf9R7tvZi1jTXB/cPHnTt3dn3yXc8/8oJssSYp7n3TTTdN\n",
+       "tYdP9u8as7hEHOAf8411IyuQDdgffFtiwHOqMTjfWCtf7H/QMusZyAlyjizCe68rZ1dHhnlPt3uA\n",
+       "88aBsYzs7bgYLz6GliPgPFqtf6/n33LOO4XxshfPQlmkCoVCoVAoFFaIVbNIPfbYY919tX2LOMUC\n",
+       "Tqicoo8//vju1Ok7bOBMrs68C1yDy1piVt/shBNOiC996UsR0fv0OCLKWXUpMMzf0RIBp2M0Fyw1\n",
+       "PuWDXbt2dRoD1g744npeaFK0Q4PC38gaKZ/RbP/u7/5u6u8+2UfkWbINrCBomsw3Vi/PqS0x8GXM\n",
+       "4mHt1RnKTZt9xqANP52sgPJYNntbXlwIFa0+swLRjvFhgcNnypoac3Hsscd2fTFPWe0s/Cno09E1\n",
+       "wAWhXTMLTR+0RWp5Nhqj/dJsveEZaOi2SCGr/B1LFvxp1zTjccZiF3gF0GDLBOPzOHkWcuAo2Fa+\n",
+       "nG2etnzXVj3a2RJtvx6AJQZ5Yd3ceuutAx4ybjKcY+Vir8kyekML6wN+en9hv4Wmo48+ehARDJhP\n",
+       "5BnfUdrZT4W+WU+usejqDvS7adOmbv/yrYDb2i+JZ/pdBA3wiz2LNe2s3BFDvzqseplfIjxmblyI\n",
+       "HbD+uRHBCu0xtH3AM/t4el90NnHGB22uh0o7xub3bPse5Xffgrh+peHqJOYPcDSgq120wKLMeoB3\n",
+       "yHdWo9Eoi1ShUCgUCoXCCrFqFqn9+/d3J0lOhZz+s9Ns+5NTq7V6W5TQ1lzXy/BpFeuPrQxgbm6u\n",
+       "65NTvf21fJp1HqTMzwBtgfbOOwU2bNjQ/c0WGJ/SrZGbf9a80BrttwXGKsxnd9u2djmyks9ZDTrP\n",
+       "HWMj0qbVppAR+4Bl1ct5tnOYOIcLcD3BVjv2fDIOeIcGBu8sH/DcPnS2DoG2Uj19YXHIfN5cnw65\n",
+       "Mc95li2ytia0f4c3rBmeYUsqz6IvtNhZPiG2dNnvp/3dedUctQqYd+YOWlpfyDHYAkO7ll+WV9aY\n",
+       "6xwCPmP9Q+Omb0ccMjb653t79+5NrZ2Mk33RtSMBc+GIXGBriq3Ra9euHVgDAfMOTbaWQJvB/DvP\n",
+       "nq1ALV+9H5ovjJv/O/rM1jR4jAw776D5tLS01MkIa5LPttQ64tQ8NS2+8eDZyG5r8bT/FM/C4ur9\n",
+       "HD5Aq2vXZvui55o12u51zovoCPzM34xn+jbAtMAP+oUPe/bsGcg5PITnzDv88W1IhrJIFQqFQqFQ\n",
+       "KKwQVWuvUCgUCoVCYQaq1l6hUCgUCoXCU4xV85E677zzuntZ5yHi/vkzn/lMRPS1efj7IYccMogQ\n",
+       "cq097mozX4as1pprqHEnTHZpahZF9He9jqagXh21tuyv42gzatDR3vfT3Am7Zh31rdrvOLcK2aKp\n",
+       "+8Z48OuCf9xPUyOK2mzwGZoZA/1cddVVg/kBzjBL3+985zsjoueffSkYL3WfGCf3265Vt2fPnkFd\n",
+       "Lvs4tXUaIyKuuOKKiBjWiALwhX6oh0YdJ9fDeuKJJzofCMZJ35lflcdJ+yzfGJ+hhbHOzc0N/AS4\n",
+       "42ddwENkyD4jPDOrzZb5ClA/693vfvfAz8S526jjR+0s+3F5nNQ3o+6fc6TZT+vKK6/sZGtW5A9r\n",
+       "lPls6zZG9H4a9MNe5Dpx7BftXnbppZdO0W2fF+cTY/7POOOMiOj5Bc/t++Jafq6Xtn79+u5vtEW2\n",
+       "7FMJ3cgBewv7qPc22rlOHHzkua1fHH+jrefIvm88k/lnnPap9R7GHMH31lfK/ojQwjjhsWsnEq3o\n",
+       "Om6OPIMvrll3xhlndDzOsoWzRqmd2ebBaj9bdlnTzg3GGPjeJz7xiXjPe94z9TfvF6w5anPCQ9aB\n",
+       "91PkiPcL/UMLc4sf7KZNmzq6aeu9xX1DC+39LncEqWu5usrHrl27Bu9c1jM0OOIPOWZPz1AWqUKh\n",
+       "UCgUCoUVYtUsUmvWrBlkHnX0Qds2otcq9u7d2510M83TGY6JBKG6M+AkzsmTzK9ktH3Zy1421b7N\n",
+       "6eHoAEcsOFLC43W0SRbFheblfDLLy8tdX/CME3eW0woQrYEW6Gg2TujwxRp7G1npquaONvGzrUEx\n",
+       "brS6rAaZs84jA632y3w6AhA5sBXDuUlsHTUt8Jv+0FxvvfXWrmaY+4YW5oa+HYUzxtuWJkeQte1p\n",
+       "Y60UwAfmBt6Rk8lZ8/0MZDvLtzY3NzeoKM98OsLLVk3mz5ZXAM95ptvbEtr+z2swyzkDnGU7q2xg\n",
+       "6xIRde0eAA1oxI7CdR0v/o9c8Azy7Lm+HWMZqw/qvYi5ceRoFpUM7cwdWj/ts1p77X7hmnoZyHnl\n",
+       "6gnAlllcuERSAAAgAElEQVRoIEKVPEpuPzc3N7Byem3ZWuh3kNeRI4ydtd7Rz/Pz84ObiyzPHnLu\n",
+       "aDyQRdYZyFUr2/DB0ZuuomFa4I9vVbK8g9BIHj7qLbZ1ZRkffGHes70ri85EJh3N7uhOsGfPnvTW\n",
+       "BHjeZ+VEBGWRKhQKhUKhUFghVs0itXv37oEvhevdgdYSFXFAm+ZE7ezArr+FZQYt0KddV4mn7ldW\n",
+       "Dwva1q9f352UnXMF8NmnYLRYa17QwHjxd4I/9ilZu3btQGtBg3SNMGs7PMtZwo3MR6rNyu48X87F\n",
+       "lNU35O+2aGW02EJnGlsa7KthGQLOfWQ+2YJlPz4yhU8mk4HF0ONmvqHXfcMHaIAmW0FAy1+P020N\n",
+       "fACBawqaj1nOs7a9LUTOEwQY31juobG+x57VYiz7cNanrYDOq+baW27PZyxVWEP43FoN7WfnCgVe\n",
+       "u/yd9s55Y+sZsK/Rli1bBpY0W+jQ4llLzlxPn7OyTQP7Q27cuLEbj7/rig0veclLpmggYz3w/LIv\n",
+       "+tYBtNZo3xpk68I0znoXmY/szd6jJ5PJwOqVzWObF6796fckcBZ/W51bvnkunF8rq1vX+iW3/WTv\n",
+       "OmjCosv32hsSrwvotD8r8Dvelk6/X72/tjcl5n3mz2l/z1koi1ShUCgUCoXCCrGqPlKZZuYTpk+L\n",
+       "W7Zs6TRoa6v4ftAnmjR/t4+UI0XwdUA7smWnPXk7+7H9Dawx0B7asrpPnLT5jNZr7WjHjh2DZ8I7\n",
+       "a7PWRNAQGLf9NezP5dN9a9mxJuTveo7sIzUry3br+xExtJK12pStW7agWcNwpXR47AzgwJYftKWj\n",
+       "jz46zZoL+I6tgsA+UtCELHqdtJadzK8CeP7RnFlH1khZN55TPpv2dqzW2s0H1zWjT+Y5qwrvDPdj\n",
+       "Y4aH9omxpdHtLUvMs2lBLpD/22+/fap9a5GyVY+fyGLm22H/Pme8Nu3wpY0gdAZvZ5t3VnjPP89m\n",
+       "nOaD+/cYInoZ8h7Fd+mTWwC+69qrjmJjvPZHAu1eZR/PzCJlntvfD2SVE5AfW7z37NkziE73nupx\n",
+       "ZhUhsooPIItmjRj6V9oqZIuz4UzumRWI57C3s7+07XkWz2a+4YtvHDKrkG80APLlDOl79+5NeWZL\n",
+       "m31lZ6EsUoVCoVAoFAorxKpZpNauXTvw9M/u432KXLdu3WiF64jhSRsNgWfZd8B3x9zfZ9pUG73m\n",
+       "O21rAozLp3nf2wPfdaNhMiZXop+fnx/UbXN0HcgiR+zbAKzt23+r1VBttXBeG2uBmZXHz/D/wcG0\n",
+       "S7QQ123LIgjtj2B/t8w6yk+sRYuLiwNrJ3AElOXGcJ4sR86YluXl5YFFLYs24plYWjN/nMy6Zmsj\n",
+       "mEwmA5653p37ck6btm7hWHueyRjHaIEnmb+IZWuWNdXr3/nroNWWijFaDPuOOc+YLbWZrxlo9wlb\n",
+       "jBy1iCXVUasgq0mZ+dLYx2xpaamjL/P1crSuLY/AtNki7Tlq29On6zu6L++P/gm8p3lvGpNd95FZ\n",
+       "OVgnjpj2s/3ZkcR8v+WLI2JnWbt8W2S5Ny1ek1iixtad59+WOs+/93JbDc1fZNQ+Zu2ZA/ida95l\n",
+       "a9coi1ShUCgUCoXCClG19gqFQqFQKBRmoGrtFQqFQqFQKDzFWDUfqQsuuGAQWeM6PVmtteXl5S7i\n",
+       "hbva66+/PiL6mlK+q3YklPvmDth5ZLj7veSSSyKir0G2d+/eQf4S10K76KKLpsbD/x0h4dpJ3Nva\n",
+       "Z4Q79Ouuuy4iIs4///w06sI14s4///xRftiPhZpS1FpynS9HWm3btm1Qa4+fzicFLdS3csZvR/Mx\n",
+       "R/DcERV8b35+vqud9K53vWuKTt/DA9cgs38XNLjWGnxBLtrIK8bpGpH2jYM/RM5Rr44aUfbbsV8W\n",
+       "Y21l3f5ljOeaa64ZpYXoGuQGXy/qW1EPzb4yfI+xtrRYlhxtC8+ZI/tU2QfKc+Q17bm9+uqrB7UT\n",
+       "bXF3LTTLrnnOZ2hhHWX5Z9asWTOotQiyqCtooR4mPGZ/8ZxSmw0+2v/tkEMO6eQBWWQNAeSWZzH/\n",
+       "1FqEFvvzOLoJ2WVNtzLuSN9PfOITU3xx3Uev/7aOY8TQ18654qCdfXRpaSmNvqPuI3yBd6aJ8cBH\n",
+       "6r65Lly2js4777yBTxDPgDbXt2PeM38/9lGvC9ei4+/XXnttt7c4Yti57a699tqI6OU88+f1O/rC\n",
+       "Cy+c+jv9t9GvXnOOlAT4KVFr8ayzzppqx/ecK8777liUZPZu8bsrq82YoSxShUKhUCgUCivEqlmk\n",
+       "2kgOTqDOswSch2nXrl2jVcYj+tOqNe7jjjsuIoYnTbQcawvkWbHVqY3uIdqECJgs4zI0uCbWWBbc\n",
+       "lgZ/39aBxcXFjifkgeLzUUcdNdUWPnHibq05Y307sy/zNRbNYsui8/xkmalp7/wpzmwOn+Abc8MY\n",
+       "zMf22XzHtdGAc3c5CiuLGLSmu3///jQLPjKG9moagCOrbHGx5tZGJNra5UzlzAEySwQo7RwR5rUI\n",
+       "7Xx2frW5ublBjpmsTlUmF878DSwnzhE2ljfHmmXmC4rsOO8cc5bVfUT+yU9HVvsxWbS12PPrcTrC\n",
+       "yvXuADQ407MtNxHDdU+uHvYuj9OWKK9pYyx3XJb/x9o/PHzuc5879Xe3935oq4dpXVxcHERu2bLi\n",
+       "vcefs+hF5oIKGM4/Btp3giPezEvm0Tm8svWUraM2Ss10wDvo57vekxg/4/H7I4sKBZn1qP2b5cM5\n",
+       "+jweWws5N/jZvB+goa0nm2UEQP5ZF1nUZoZVO0hF9AxhU3fJCGAz5H333dcxnXQFwMm93vjGN0ZE\n",
+       "v/HfcccdU+1dloFiiwiaC2Kyae/fv787vDgFAWBS2Cig//jjj4+IYep7h7MSTs/ifvaznz3Vfm5u\n",
+       "rjswcmCg+KwFwJuXrxm9edkE7CRy7Vhtqs7m0e3ZKBgDLyF/j5IQ/OR78KN9ebnkCxh7wUUMzb++\n",
+       "uvJLmmczRjbSrVu3Dg4vHh8/aec5Yj34oOjit6AN7WccyKIPRmx8yCIbJ0kNnRaE9vyEDxwY/OKd\n",
+       "n58fJGvNSicBxofsskatvHhTZ90dbJP2S4k15PB39h7mkTV76qmnRkSfoBcwZ8wR/GPdtS8Yv8Sz\n",
+       "cG3g4sx8D9qy1B2spwceeKD7n4sKM+/8PPnkkyOilymnKHCiQkoKZUmUmcu21BKy4pQJVhgpEcM8\n",
+       "/uAHP5hqbyWBOeLg5TlijHv37u3GlxUtdlkq9n2XdfE4ec8gm8y/193i4uJAkc5S8diVwwpDdoh1\n",
+       "CL+TzEb0cs74eG8ee+yxo306zQNyzfizQ7KvFa2At33wN4wc8MMlgpw+Bn7ATx9ePQc+RLbwnmWX\n",
+       "lzElbQx1tVcoFAqFQqGwQqyaRWphYWFQbsFlNwDaH1rCSSed1GkA1qT4OyVQjj766IjotZx77rln\n",
+       "qj3aLadbTqRoO7ZkWAtsv+vrDtqiMXHy5rtomqYF7Q9+oD2OFVxFU0AD5ZTvNPs2YZqnPsU7QSUW\n",
+       "jLGCu9awZ5lFram3pS0ihpqXr2fNR481op+3LPU/4HNbbDWi57UtL3aMR94OP/zwwdWLtV/mCM3L\n",
+       "1hEnebTzdabZz83NdbKXXTPDcxfxzq6NXJ4Iywvt3H5paWlw3Z4lQfW1CesDWiw3WVkX2rcWLBc+\n",
+       "dVFy98042Cfe9KY3Tf3/7rvvjjH4yuP++++PiOk17ethW9ZswYYf7IfWkrOSKFhgkJdnPvOZg32L\n",
+       "/2ENRw4c2ALguUtHuT8AX7F0bNmyZXClDeAL48HSnCW09fdf+cpXTo3Fe3prlXWyZ9M963rV1g5o\n",
+       "QeZ8A8BYwPz8/GAdQ1OWkNlO99m1qpMr2xrd9k8b76G+6gN2Q7F82Gps/tqy09LO3sP8I4u8k3xt\n",
+       "6PeCE9b6NsVBL/ycn59P9xb64D2XlfzJUBapQqFQKBQKhRVi1SxSi4uLg/B4TrmZEyoa/datWzst\n",
+       "LNMY+b99H3yq5xSMtoAlilOzrQatL4qdAG2B8P0z2h0nbp/yrTVAg0Ntwc6dOwdpGrKCjS6sjCY2\n",
+       "ZmFifO3/bclpi2A6JYIdfLMSMTyDcaPNeJw4ziMH8BHtr/WHssO2NSLz3A6JmY8RQNOnPRrM9u3b\n",
+       "B5Yx3+3bUdWFRF3M02U87PfV+gPZgdnzT5+2VECzrQFYFly+xU7nYHFxsXtG5kwKoBH5tpO6+WLf\n",
+       "D3zMsOy1WqPLkthHw7KIxc19UUjXBXTZL6AZn0r7+bV9ISOmwQ7dfLYFAlnLtGOe2Zb98fzb6uuS\n",
+       "KVmZFcZpfx7DKRh+/OMfD3wngZ8JT3mGZdS+VsgmBaPtU+Mxtr/bItXSG9GvNWiwtcPrnrGxjsZK\n",
+       "SrmodJayJiuYzLxnKQjY4+CfgxUieqsPbZy2wWvVfIKnyJjbwz/7ZyHr7Z5uHye+ax9Y4LI2LomT\n",
+       "WVNd/uyQQw4Z9M3/vM9xfsiKwBtlkSoUCoVCoVBYIapETKFQKBQKhcIMVImYQqFQKBQKhacYq+Yj\n",
+       "9e53v3tQZoH7VZfCoFxFe4fusilXXHFF12/EMHrHfhaknyd1vvPwOAkg6efPPPPMiDhwb0ufPMul\n",
+       "CignwDN9v4w18CMf+UhERJxzzjkR0d/9Qjt3yTynLSnh/ChO3kn5GVLhc79M39DG3ymFQXvutuEz\n",
+       "cwAt27ZtG6TZd3kN7qEpbcB82mfI47zhhhsiok/5D1/wHcIf7IknnujKLFAegr4crWUeulyBozj4\n",
+       "PqUTaM+Y2nb8Ds/PPffcqTa+b3eZFeaf9i4VAVw6ZefOnYPcLMwTJWIo+YG/hv20mKvPfe5zEdGX\n",
+       "/MAfhcg6/NPwCfnoRz8aEQfWHbyijRPnQQvy4vUPkEW3d84054y6/vrru7Ip/M/+eqxvyrggi14P\n",
+       "9rtgjij7BCw3+/bt62SL+bevkMsXMU72IuB1RHvKm5x++ukRMfTzmJub635nn6OEBz4+jJP8Qqzv\n",
+       "trRJxDD3EfsKc8pYzzjjjKl+22hXxuG9xYkp7RvLnk77LMIO2qC9XXfsuY4Mp1wNa860OvINnjP/\n",
+       "+FCxB+Hfg+zeeOONEXFgHdkHGDDuD3/4wxHRyyJwzq62FFJEP0dt3qyInvfIxXXXXdfxEL9deE3u\n",
+       "MXyJP//5z0fEcI0CeE3f8JH9gnUDH+Djxo0bu9JGvBfpI4uwZc3xzvX68ZqFL37vtu+ltrRZRP++\n",
+       "wE/L7yDmuUrEFAqFQqFQKDxNWNXM5o5SsgUD2GK1YcOGQdZn4HxBttSMZWSOGGpFttQYS0tLacbm\n",
+       "tk07niwqCTgrrMvdmJbl5eWB5cxRfMBlRmxZMhxR4/wgbWSNoylcZsV8gh/0kWXsBln2dTSx9u9o\n",
+       "Zc6TBVwKwZYneO78U26PRaaNCrRsOTs6spXxBTifVBYFB807duzoeORM/cByArBQWNv3Z2d+H8vH\n",
+       "ZasPNGSyiOWKvpHJLDrJkYNtNA7wGjKyYubAVpJsjpwR3xUDIvr5gWfeD7x3wSdbRW3BAuYXfN+3\n",
+       "b98gas9ynln3gOfSz/a+MVbFwGVjQJap2xYm004/rHGX0HH7J1veI2LI46zYOTx3lm34wZ7Ujgke\n",
+       "w/Msa3ZWCJtx20rqygnmZzsnzCP0Z/nzTEuGbI6c02psH6Ut69iWN/PFORB5RrYX+RaqvQGyLMJT\n",
+       "fjrC0ntXhrJIFQqFQqFQKKwQq5pHyrlMQJbxl9P0pk2bUu3F96L2L3Df9suyBmpNq81DYeuXc1S4\n",
+       "EKb/n/nMWJPPata19c3ch8fhHD8gK4hpWp1Ft6WdU76tP7bAACxQzvSbzalpcJ6ltn2WkdeFXYHr\n",
+       "ODJnY74eLSxH69atG2gvHic/Le/Ac2FLlDW11qfA2lyWeZpnQIt9A0Fm0QVjVkbnjslkkb7wL+FZ\n",
+       "tgYDa97OPt3CxVUzPyyPA75hocsKC7fjbWkY84NzcV37ujhTvS1Sbp/5ko1Z5b2GzBdbUixb/J2f\n",
+       "zk2UFdBu62xm8wnsv0Y7r1HvD/TrtQpa/tmabR7al9Z9ex05t5vn0PtrW5PVz8jqf7rOITR6jY7d\n",
+       "ULQ/27G6CLl9RrOqCUZWIYLvZ8Wh2znNLPKuNWnaLYuuFwts4fJZoIWrMdjS+GRRFqlCoVAoFAqF\n",
+       "FWLVLFKtRmPLRVYPjZP2E088MajHA+xXAjLLgrU+a8kHw1il77HP9ttwBAngRG5tN4veWlhY6E76\n",
+       "9GXt17TAY2d+zvKJQYsjJdoTu+mC7kyrt7bH953ZGbgdGogjCds2zsjcWjPHkPlG2MoEjfaNGcsm\n",
+       "bWtnlnXbtFsjzWSxfbb9sTK6banL6iLiY2bNLPM9Wrt27aBWWGapHKsI38LyklkRnBE9opcJW3Ey\n",
+       "TRnYCphFkjkKNPOha+mjrWvtuW/Lg/3aMgs2aGnOasS5ooF9J92XrUaZ9d1+oAsLC4M+gK2d9pmy\n",
+       "vNgf1n44RtvO2cJNi60j0OLoM0CUo3l/MFm3FQiYFvsAeY6ym4lZ/q4R/XvNPoCOygS25NmifTAr\n",
+       "YEtzNqdtX943sz3G783s3WX5an31Mn89W2Zpl1mkjbJIFQqFQqFQKKwQqxq1Z23XJ3FgL/82EsIa\n",
+       "oSvLO4IsixThxIxW4FOt+281rlmndEf0ZFWrfdJ2zSjTsry8PNDWZ0WqOKIh8wXIrErmr39vacj8\n",
+       "0lwpfJYfiy10B7PUMD7aWFuzpu4oRtOcaeq2viwtLaWWF+h0lEnmx2Z/NFsFPNbNmzfP5Ll9Wxyt\n",
+       "l/ml8T3XQxyruG6raBZtyWf7iGT5dmwddLReK/PZGsvWP33CD9plvmHeFw7mY8nvtpxkfknQ5tpq\n",
+       "mUXKloqDVao3X8CsKEfLTRbl51uF9v/eH7JI2cxH0pbabD8BLQ2ez8xqY3+bzPLmmxBbycyXubm5\n",
+       "Qc1U5z8D9lvye8P7nX3LHCHX8gWrnn0ksxsMz9GsSGPftnj9tXzJouzG6I4Y+kTNquXq90krf5kV\n",
+       "MMtpVrX2CoVCoVAoFJ5mVK29QqFQKBQKhRmoWnuFQqFQKBQKTzFWzUfqvPPOG+SbcZ0r6ttQx4d7\n",
+       "y8XFxe6uPqvLB7gD5Y64rREX0dcUIj+Ic9bwmXpYbW0m+w+5ptTZZ58dEb3/Dc+AFvp2jSDaZ/f2\n",
+       "1PI7//zzB//DwuhaWIwzy+wMn6idRG02RwW2EXIREZdddllX3zCLjOEztFx88cUR0fsbOCrniCOO\n",
+       "iIi+jpv5CN/h38LCQsdD6s/ZJ8IZmi+99NKI6OtVOSLEUV70Dy32c1laWur8h2hLHSfGad83/HKo\n",
+       "h0bfzr/jGlTUw2KOtm/fPshrw+ePfexjEdHLFshy9bgGoX2vkDfXT7z44osHEa/OzUJNScbZ+ni1\n",
+       "7aGNWmvUN8tq7PG9yy67rFufzvDO/LKmWEPIrv1QXEeTWpvUcbNctf5vzD99OzIIeJ+jb/sU8Zn2\n",
+       "7HVnnXVWRAwj8fbs2dP9zW3x14EmZ+KGL661Ce/plzlz3Tf4sXbt2oHcuy00mIesXfZoZLHd/yOG\n",
+       "PjLwsd0vkAPm3zzk3cL4aMez4Bdr9IMf/OBUO/sQMRbav/e97+14xfw40z1t3/GOd0z15dxdfJ+6\n",
+       "n/CRPFWmmX6uuuqqrtYiyGrosf9bFu0zxE/4SI1L+0i1/lvUK2Q+Dz300Kk2REQiL55/Rzu7Lh51\n",
+       "Qv0+an0oXWuR+SRK2f6byA97V4aySBUKhUKhUCisEKtmkdq7d+8gY63zKIExTdTReCCrnQccheNs\n",
+       "uY4YyrJ1t9/NMhUzPvp0dEJW34p+OBU/9NBDETGMGJlMJoNcGo6AadtGDCMAMyuSa3I5oqatj5dF\n",
+       "lQDPAc/EEoXWaz65f88FcjEWtWUN1HWd3N7Ra86ID+iP/7dWhiz/FePMtFfT4lp1tqaCdk7R5qDv\n",
+       "sMMOm2rLOOCDLZdZXpzMgmm+7N27t5sPR2NlWYJdv4/vZ7mrGBvtof3II48c9J1FujoizBFy8N77\n",
+       "hL/vXEZjdb8c4ej9wjzPaoQ5ehPw/6OOOioi+v3i3nvvHdTzdB6gLALKtDMnyKCjmoErCBwsSstR\n",
+       "zrYwm/fO3cUaziJI27Vrnhqu4+e9KKsE8aMf/Wjqs3OEgbYOJn1mtVady8r16rJ8fdCOdZF27bpw\n",
+       "Xixb9by3zMp5lu3RvKPo15adlhbm1XnEkDXgd5XfWVmkutfdunXr0goetlg/2fxRoCxShUKhUCgU\n",
+       "CivEqlmk1q9fP7BIoQ1YIxmrhp7VcXOeI7RWa8kAfxy0OvrjZO3TLpaYubm5QRX7TDNyllT7NAD7\n",
+       "I23dujUiek3DfFm/fv3gbjfL+uuq39bUshxYzoQ9ptk5y7OzILtvzzcaiPMQgaw+Ihpeq2XYz8b+\n",
+       "CKYfPrX+Vu1P59dyhnTkYffu3WnOKVsSs4zM7pNxwZ8sZ9jy8nI3TvtTAGve9jczTeYDc+CcXmD/\n",
+       "/v2djDhbsi1MPNsyZf8rj5P/28LXartZrSyP330zfvsAes3hf8IzDz/88IjoZb+1Nlju4cusmmL2\n",
+       "/ck0b1t0kJv169cPrB7O54N/SpZHzHPiNZplCIeWNtu+27KvwWusp5Yx4Lp/fM9+SoDvr1mzZuCf\n",
+       "luXRynKfMb/AVnSA5WUsp5H97rJcXNlNRTueFq54Ae28L9p1YWux17H3C+fsY3z0bfmy/5qrTrRj\n",
+       "QvbgLfIATb4VYl+0tQ9aLF+ZBXz9+vXpzQv8sdW4MpsXCoVCoVAoPM1YNYvU/v37B/eRWfVv+zns\n",
+       "378/zbTKydLRepygfVrlrpsTcxbl0T6b9tby3Nan2cwq4r4fffTRKVqAad+7d+8g03KWkT3L0Jq1\n",
+       "z3ynxiqLW3O2lSvzkbLPg2tNgawe3NjY3DcaUebbY18g+2OZFmvkbbb9zJ/C2lpWp83yg4UB/hzM\n",
+       "d8QZer2Gsoz+2fyP1VRsafdczM/PpxnZ3daWF2C/DeDs8/DBUU3tOLy3sP6zvm01pr3lBetzFhXY\n",
+       "ZsK2hcH+SFmtxazKQLYWsUqDhYWFVLYcAejahO7bGfwzKxNoo6mdFR1Ar32caO9s4rZAOoJ01n7a\n",
+       "jmdWHUfLrOcCWrwOvP7AM57xjEEkoGkCrYV5bBymlX55Nj+Zq5aPzsTvLOheo/CWn8xZti9aXrJq\n",
+       "Hm1ftv6ytuwj5Vqsma8c8K1Uu+7cFtiHMFtzGcoiVSgUCoVCobBCrJpFaqw211gkFG0jpk+92YnR\n",
+       "J2JH5WR3+z6Bc4J3Zeq2f2ucWSRDVt/O4K6Y9vbbcf/79+8faDFZLSn7FWFJyWpE0c5jsL9C2weY\n",
+       "VQsJWCviHt5WEGvDrho/VkuJvh3x8l/VAj02V2RvLZTW6lwjDFrsAwVcWxHNDTnI+LJ58+aBT1Pm\n",
+       "82CfkSwiyHNjvz5rk4cccsjgGchQVmvOspdZDegPvjkyteWjfRtMv2XR1d5t6c72IvtSjlWib3OM\n",
+       "tXRm68GatOfEVkNHuWJF27Vr10w5d/45gz7xT+HZY36J7ec2UmrWus+iMTPreFaz0uuitYzb2mWL\n",
+       "hNcDfPI8A9o58hbZtH/P+vXrB7XzxuqVtp+zyLrMvxM/X+AcaGN08Tmr++h3rq2H2Z7vPWwsKhTe\n",
+       "OYIYS5Qt71kUq9+PwPsJaG9wgK1ajjT2ODOURapQKBQKhUJhhahae4VCoVAoFAozULX2CoVCoVAo\n",
+       "FJ5irJqP1LnnnjvIveFcF9TxoX4W95aHH354d79KW+rbXXLJJRHR55jwnS13otRao29HPJApmpwX\n",
+       "V155ZUREnHbaaRFx4G75hBNOmHoW96+uKQQcEcgzqYdGjSDX/eMOmM/w5UMf+lAXdQjd+Gw885nP\n",
+       "jIi+Xhl1mRgXPCcvDnfm0PL2t789IoY5Ph555JGp53zxi1/s6M7yRvGZ+kau++YID+7tqZ1Fe9/D\n",
+       "t/0zn8wPssWzXdfvT//0TyMi4vTTT4+Insfw3r4V119/fUT0c+rorpZHyKLrPjqiDN4jW65Bxhjs\n",
+       "vwdfzj///Ig44EuFL4t9Glw7jT5ZP/zE5wnZfdvb3hYRQzmhf/hy3XXXRcSBuoL4dDkSiPlk/qGF\n",
+       "eYRWfx85P/PMM6f+zpqEBqJcb7zxxsGacy6urKYgPIePrGnmirpf8JwII/teLi4udnU5qfkG71gf\n",
+       "rFm+i2zRN2vU9S2hzfUT7b+2b9++Ad3IIuscv5q77757ihbaU9/Q+cmcbfvyyy+for39P/NC38g5\n",
+       "dNM3MmnfF+obIi/OkUf/rrdKjcu5ubmOZ54v6KbuJ9UjkDF4jqzxXmG/4O+0Y85Y48jAaaedNvAr\n",
+       "Y3ysvS984QsREYN6ePYJZZzeL+iHvEw8B/5u27Yt3vrWt07R7bqv7I833HBDRPQ1CB3FCB8ZE+8X\n",
+       "1pH3Kt4fGzdu7OQWuhnX0UcfHRH9HLhv6viZf46cZE0z/+xVbS5B79G8/5Fv+GIfWOYzw6odpJaW\n",
+       "lrqXMsLIguInsBPq7t27u4m3A56dhGE6G6PLuAA7OrKgsjQCrXMtzHcBUCbDyQ95lpO6OTmkFxxC\n",
+       "OUaPk7I5/J3xOFTavAbHHnvs1P/N1/awZOdfF93Mwrb9osic8RF+Dto+DLZ8YX7tLM/fs2RtzImd\n",
+       "Te1UDdhQeCnOz893B2uP047LLgAKXPSXeffLC7Q0Ogw/cwZlzfEzc8I+5phjpv7uUhGW9clkMggK\n",
+       "yNJVOITaSUJ9UGZ92SEYPtmRNmKY3NOOzcDKG3LAC8Y8dyi2Q/zb9eR170NXVgrDJYGyBK5Z8sC1\n",
+       "a9cO6LYzNPOXlR+ygzPfZ7yWXdZBGzDjg4D7dtJf+OU15wScVjAtZ6zN1tHbBW5NSxZCn6UkYT9x\n",
+       "gkvvL4ceeuhACWG+2jJbLY0uFZY5PDvgwf22a5r93HLBMzK+OBAsKxnjPastyxIxvUZZY/TBgdhp\n",
+       "MYADaVyOyPIFzT4MrV27NnU2d+odv8Nmoa72CoVCoVAoFFaIVbNIRQyvALKixZQQaMs4ZMne0LCc\n",
+       "8j1LOZCVHchKrbzwhS/snnviiSdGRG+S9OnV1hG01bEkZRER99xzzxRNpn3MCgAPXWQzC62nL5/u\n",
+       "XXVJDFoAACAASURBVOTWpTZc3Lblp0NcbZnKCkv7OjVLSMicuiSArxsi+ivNLMTe5Vp8lYO1i7my\n",
+       "FfDhhx+OiF7rRc42btw4KJtgLQ/NlGd5juAb/4dWeO91gTa1du3agaXAllfoRPsDTlkAuPpxORM+\n",
+       "U1KpBTKTWSCBr9s9HgNaLFes1Tb829coY6lTWngembNsXTB+h+wzt23/8Ah+QBvj5JoFQCsyl1nb\n",
+       "gUvwtPKXpW2Ah1wD2WoOXJaD/9uaAGx93rhxYxoqD22tpaCF5QVafQvh6xjQpmyxZS0r45UlHrVM\n",
+       "MqfQ7ELEtjJu2bJlUJQZuG+seu2eEtHLltcocpJZWVvrKPui053YegOcgsEWt2y/sLV5rBwasmWr\n",
+       "HuvBe5dvRXyz4zlFVp3iZ82aNQNZdOk0W3ezRNZGWaQKhUKhUCgUVohVs0ht3LhxUM6CkzRaIkBD\n",
+       "54S+sLCQFn7E94OTJKf2tthwi8xygXZg/wto27t3b/cdrBTWDPi7HZ8Zt7WXrBSISwCAxx9/vKMX\n",
+       "bSe7w+bkfdxxx0VExA9/+MOI6Hlufy0+owXYatjOkUs7uFRMpr167rKSD06W5oLUrWaHRYl557vM\n",
+       "o3nupHaMK6PFpYfo9/DDDx9oUqYXDQo+efzW/v1sa41tQjuewU/kF1iz4hlOvAjggx3e4aetI61P\n",
+       "nh1UrYnbf9HaoNvbggef0Ipb2YUuW+jg/ayEtU606DVnjdV+Xe0c4fPh8hPQ4mS/LpTdJrdsaQPM\n",
+       "jf0h9+/fP/gb1m7otGXZvpMOoIHHWaJKj/GJJ54YOOxncCCQZeuuu+6aonWsVFaLNuloVjDdnzP/\n",
+       "TrfHooulEv6wH5iWHTt2DIprZ+sCWfJ+1/p8jY2bPYg16iLfEf1+b7p5lvd/rx/TZCugE1lDG/tK\n",
+       "+95F1ux/x3cefPDBqb691lxA2/titnfPz88PeOi+nER3lux2ND6pVoVCoVAoFAqFAVbNItWeUDlR\n",
+       "cwc+q/zAZDLpTqe2MHGfbP+Z7G4XrdDagX0Kxujnu/bd8bgcnQRmlZ/ItGIwPz8/KKLLd/0saEVb\n",
+       "wWJjjaMdX0sTn235aseBZmDfN2uv9sugL+bXViM0b7dzeYaWPmSJ/6Fx2Y/JPiPQhBaYzb+tIkce\n",
+       "eeRAbl0ayOV1rO04gsbWlUwT37Bhw8Camfmd2FrksivAUWnWcu1r1tJm/wiP0xYIl3Exzxkna9tl\n",
+       "ndr2thghI44iAviO8D1HSmX+XfhzOI1E2z/0mRZ4ajm3BROe07ct1QArA9975JFHBv6UAD5gLR+L\n",
+       "wm0/Q2tWcgY4bcgznvGM1IcFuPwM1mS3px/2UxcKtjWFtbxv375B0XLPP+OynGQ+dfZj5FmZD+bu\n",
+       "3bu7Puz7ajlHpkzLWCmkFt6jsfi07yOn/7B/kmmxrLqUkt91meXKJYkihkXtXQrH43ShaL9vvKeP\n",
+       "FTOnfWYdd/murGh5hrJIFQqFQqFQKKwQVSKmUCgUCoVCYQaqREyhUCgUCoXCU4xV85G66KKLBj4E\n",
+       "9iX5+Mc/HhF9ivj2jtRRe5QToBSC71/9mRTxpML3Hbr9fUhXTymEvXv3dvfM7ptU+JQ2sM+Qc1Vd\n",
+       "ddVVETEsKcMYuZ+GNtqfddZZA58wR0JSIoS+M38baKGMx7nnnhsRQ38k+xRcfvnlXd/OTG6/qquv\n",
+       "vnqKL/CDiBJoghZKPjCnzqeCP8JkMul44vIDziPk0inmOXC0Ylb2p40QQW6RRUphOMO5/dighVI7\n",
+       "9lvx3EIL7ZeWlgZZnl02g7bILFF90Abt0OLSSbTDNwTfk3aO7OtgvwrKLLhEkOcIIIvwnH4cWYXc\n",
+       "ffrTn+7mH9g/z2WZaO8IU/vKILvIYrZPrF27ttu3sjIb9umgtAXrwj5CzhDu0kljY4UuZJH1jD8V\n",
+       "ewv+NMiF5dw+Z/DH5bA++MEPTtG4uLg4kFvKz1A6xzmb7CNHe2QROLrPJaXg+2QyGUSCel343cL/\n",
+       "7SNnvuBLwx6EnxZ+XND+oQ99qPPxYbzOgci7CL7AL/udMV76hnZnTHdOrE9+8pNd2RRHHcOXtm3L\n",
+       "Q/vOOnobWlzGx/m71qxZ05Xloa19gpFJy+4ZZ5wx9Wz6zvY6yvi4okS7R0O3S9vYZ4q5gpYMP9VB\n",
+       "ajKZ3BURP46IxYjYv7y8/NrJZHJ4RPyfEXFSRNwVEb+5vLw87vlYKBQKhUKh8N8YP61Fajkifn55\n",
+       "eblNl3xhRHxpeXn5islk8v6ffL7QX9yzZ88gC3OW08hRaw8++GB32rRlhc9ozuSkOPnkkyOij9IB\n",
+       "jk4jc60LooK2iKszrjpiw7WEnFvDuXscpdFmbG77A1u3bu1Ozvfdd9/Ud7J6hWhMzmRMxIyf5UgJ\n",
+       "5qgda5apHTjCx9EljPdZz3pWRAyjLZgTLFf/+q//GhERz372syMi4sUvfnHX1lYL11xzNJu1OH7y\n",
+       "PecoAfy/nUNbP3hWazmL6OteZTyHn0RWMceWXWjbvHlz14ZoQ88F833LLbdERM+Pl770pRExzGkE\n",
+       "DXfccUdE9Nn7TznllIgYyua6deu6eXUxcucoamtmRvTz6jxahiNvWU+t1uxaWdCSRQQ5CzJ56OjT\n",
+       "2cetBbO2xyJrbUkyvVm9Mv5OMVcsWM6v5+jXNk+V1xzjQKZ+8IMfRERfqYFnAVueiU7Lcv0xNiwd\n",
+       "CwsLHU+dcwirGP93niCvf2eC59nw3nsdNN53333dfscekkXtIVOME/45Hxv8IC8XNFBn0/3v3Llz\n",
+       "UBgY+ixbfHbU2QMPPBARwyoL3tPhM+utXdO0/da3vhUR/fvxda97XUQM15ytOV6zniPXJmQsY5HY\n",
+       "fl/cdtttU8901QTndHrRi140RQtrFjjqEdnev3//4GzhiFqvY1uPMzwVPlJ2vvrViLjxJ7/fGBH/\n",
+       "61PwjEKhUCgUCoX/z+GpsEj93WQyWYyIa5eXl6+PiKOXl5cf+sn/H4qIo7Mvc2olcy2fbWXiVMjP\n",
+       "DRs2DHyBAFo+J8vTTjstIvpTq61Gvpf/5je/GRHDjNiAz+vXr++0OmiwtcM5N17ykpdMfTYtfP/I\n",
+       "I4+MiP4EDu1jNYVuvfXWqe+gtWRVrp0N2TQA57Li9O98VBFD68esmnOMA83JleWdL+fb3/52RETc\n",
+       "f//9ERFx4403TrX/27/9266t86SggcNzW3UYD3yjvesjAucLav2AbNVx1uAXvOAFEdHzPMsL5Pxa\n",
+       "0Oz2bRZeLEbWsADr4nnPe15ERLzhDW+IiIjvfve7ETFcRzzrpJNOioiID3zgAxHRWz75XksLWify\n",
+       "ah9CgBaPjPIsaldmObCYi3vvvTci+jXb1sNz/jgsCvDHfOH/jOtlL3tZRPQWN88ptGFFg2bn22oB\n",
+       "3exzyLvb2mcI66GtQ4B+4CM0LS8vD2oEuuLD7/3e70VExM033xwREd///vdHx4msss9keadox9o/\n",
+       "5JBDOtmxnw1tnOkbmbKljv2C/zNe1qgt+3z/la98Zbd2kDlbgXk29DNHtlAD9gf6++Vf/uWI6Hlv\n",
+       "S93u3bsHuffgi/dNV8iwVdR7EeNn72ddjFUrYK/Fiv/rv/7rERHxla98JSIi7r777gHdEcM8abbg\n",
+       "AefM+853vhMR/Vy1a5q/sSZZQ1nOPsbBjcV//ud/RkS/dm3xhDb4c/vtt0fEgTVqqx5tXTsTerP6\n",
+       "n8ZPe5D62eXl5Qcmk8kzI+JLk8lkanddXl5erlQHhUKhUCgU/mfFT3WQWl5efuAnPx+eTCZ/GRGv\n",
+       "jYiHJpPJMcvLyw9OJpNjI+KHY9/9l3/5l+40uGXLljjmmGO6U621KVfPPuKII9K7y2OOOSYi+tMo\n",
+       "mgOnX9fxcfSS7699X4vWvX379q6t620BNAX6oM+vf/3rETG0AljbdX0ka16PPfZYp6XwE60my+CN\n",
+       "ZoL2wjOJiAP294H3Y7SghTjCh76tvXL6Z67gD1qRLW8vf/nLIyLiLW95S0T0mvq1114bEdPaEX37\n",
+       "J+PH3wAwDuaOOcMiYcsefLalYt++fQPNGx6+5jWviYiIO++8MyJ6WWT8AO0HXsM3/JRskWrr3qFp\n",
+       "2YcHMD604X/7t3+LiF5zxPcJUJMRfmB9xfrnzNl79+7ttNYsMzPweqCvrHYc/HANLeaulV3WMdZL\n",
+       "1gE02drFPvHqV796arzsGzfddNNUe1ueGBtjH8vizfign2fYL4W1ydw5A7w1b/Yi9jDWwSmnnNLt\n",
+       "AwB64TnWnS9/+csR0Wv7AMsNtPNs/E0ySx1je/zxxzs5Nxgn/LCvlK3GjJO5fdWrXhUR/frC4gDa\n",
+       "CFPkwJFzANlybVHamRb+jm8R/PnGN74REb2vFDj66KPTzOT2v0Ie4GVbNSFieHuAbLNv2g+w3cOR\n",
+       "c+bvr//6ryMi4qtf/WpE9PuCgdzwfcbn/QU+sD74Pz5nfqdHDK36fNd+TFj5eG/yf+8jAOurfU6P\n",
+       "PPLItF7hWET1Pffc08nPLKz4IDWZTDZGxPzy8vKOyWSyKSJ+MSL+ICL+r4j43yPi4z/5+cWx77/6\n",
+       "1a/uJsfOYoVCoVAoFAqrhRNPPLG7CkXxzPDTWKSOjoi//MkJ7pCI+MLy8vL/PZlM/j0i/mwymbwj\n",
+       "fpL+YOzLW7Zs6U7qnCzRSK3toiXw/x07dnSnVDRnwAmTn2hFjiZo6YjoT8XPf/7zp56Z1RSbTCbd\n",
+       "6ZwDoaOTsPo40gVLhCOfAFoA0V2u0QQWFha6NoBTvTUjtBf4AN/o2xYs57xijI6KjOhP8WgYWU0o\n",
+       "9834GRc0mC9oBd/73vciotfk0KKOOuqo+NKXvhQRvYygEdk6YmsHWpw1VqJyDNfLa+t9Wdu19Yrx\n",
+       "of27jhP/d/4d5/IC7RzRxrlYTDfaK+sCWkw7Fhl4T6Qkc4tfA1heXu7kGgsRMuL6Vq73Bu+xFtvy\n",
+       "iuwxRteWayOCHMnE3pIpba5nB39oZ4u0rYKseZ7bat7QxV6CXEO3/Wmg0dGPzJ33F/5uK/z9998/\n",
+       "mH/2OeaC8f3Mz/zMVF+mhf2Sn46KBYwfK/ohhxwyiNxy39DEnst4bcnC2oplhrnCEmVrGvxtoxfh\n",
+       "va3GtKVvorv5u99F0ML6YLzPec5zpsYGWnlj/ph3W2kcBQ7YF7wuWLOseaxnY7UcHSHHuE499dSI\n",
+       "GO4t7OWOZkN+HM3O/xk/fISG1oJp32G/Y22ph+fwh8NNex5o4XyDbX5C7/8gy2X3ZCu/rPggtby8\n",
+       "fGdE/MzI3x+NiF9Yab+FQqFQKBQK/11QtfYKhUKhUCgUZqBq7RUKhUKhUCg8xVi1Wntnn3324O7X\n",
+       "vjXUt6JOEHfLGzduHNTEohbSRRddFBHDzOS0516emkLUNyJiwlnWoZE6PmeddVZETGcVx2+C+2Da\n",
+       "Un+KPriHxX/B9aqoKUV/zirO/fOVV14ZEQdqbWV1ijxO+sY3Cr4RnQN/aE9NMWg2X7jvv/7667u6\n",
+       "TFg36ds10aiHxxzZhwSa6YdxMv+urdVm0/U4HeGFv4br1b3tbW+LiP6+nTmlnWszUifK9dP27NnT\n",
+       "/Q7d1HFyHTxAH1l9Q/t5MW74SK21Qw89dFAzi3llDb3//e+PiN43wrl4PP/UlHTeoDZ7dkRfs+o9\n",
+       "73nPwOeL77he5Zlnnjk1fkevmefUCQSuRoBMXn311R3drm+J7CBb8Jz2jhTCf4VnffSjH42IXr4c\n",
+       "GdVa9uE5e4t9/7I6fsiW/fBcAYC6X/ARtNFerlfKmsNnx/LrenXUIITH9jHk76wj6r6xR69bt67r\n",
+       "E14xn7R1dQrzkjmCj/Z/s/xcdtllEXGgvl3EAd8ZvkPbrAYhfjjOos3cuTan66B6bcPHiy66qPtf\n",
+       "67vVgnXBu8W54Jzxnr7ho/NL0Q7foW3btnXzSV/s545WZ47YW7zuac9+41qe7OGOQI/oeci7xXVw\n",
+       "vVZZF7xf4Af/93vG+67fowsLCx1v6Bsees3Rjn0BOc9QFqlCoVAoFAqFFWLVLFKTyaQ71XL645Ts\n",
+       "bKL8n9Pinj17BtYLgIZAtBEnYtpnEWX8nbwXRG05molT7lFHHTWI+DNcn4jvOpIMMH5oRvPKImUm\n",
+       "k0lHN5om2pm1FGhxZB0/M2uC26OZtLQwDveBxuE58rOxGqKx20LRRl1E9FpCVpMtoucZPEd2zBfG\n",
+       "yd/ps83Q3II5Ycz0v3v37sH80JYoJMuYI0h4lrPIO8s8wFJx7LHHdnRnqUSIZIF3yBh9mpdZBCZ/\n",
+       "dyTW3NxcZ+3if0S8mS+ed75HjhvTAp+yNdzOUVZlIIvWYTzwmshB5tfygiwi61gy+F47R4wbyyvP\n",
+       "wtplmpwFmv+7/qMBTccff3xEHJjbjOeuc+loPMO10tps+i0YG1aQHTt2dOvZ0WbQwE+itIjizCLr\n",
+       "HAXo/EMAPm7fvr1bx8iiZcc1OZ37zHua30XIDVF/Y7U5PW/0nfEQsGYd3QngE892BZCWdujlO7ZI\n",
+       "Z9HMttBlmc15Fv1AA/xq5zTLcZfVN6Uv18+kGoH3aD5DU/v+8fzbSshP9uhZEeigLFKFQqFQKBQK\n",
+       "K8SqWaTm5ua6k6jzSPkO2Xkn9uzZ050yne2XkzPWLnIOcdr1SZoTpzUtcnJktejWrFnT5S9Bk/LJ\n",
+       "2Ll90ALR1KyBME6f6skJNWbBct6sNovrGOjD1eCtHcEvtCJnWW5pyTKbW4sBmSWGv1uDBfAHayPy\n",
+       "4twtEf18Ih/QndXlQtuB12g7WZZtvocv0sLCQqqlOW8QspVZTewLMqsC+fbt2+O5z31uRPS8sWzR\n",
+       "NzzEGgQt5ostj7b0jFkZGSfWGdqi3QHmG36Rb825aAA0wHvyDjHW1oLlvQL5RV6dkdsZ8LEe0o/X\n",
+       "tH2vsNRAQysvtnY6o7l5aOsItDNOt3cNN56zZs2agWWS7/IMrIDwaSzzdETPB/ZRW5Hcrq09mVl1\n",
+       "4BHj4/9Y1JzDDblivNSLZD153UHL/Px8tz75m2XRfq7kEUN2oQnY58r5loylpaWOD/Ccz5ZzWySd\n",
+       "T8x7kbOxs5ZZT63s2s/KlibvRfYpazN+t/0Ayz8VJMYqZ/DMjIf2V6RPywlrObPC01+7f/o95zyC\n",
+       "rDVyVVleMpRFqlAoFAqFQmGFWDWL1Pr16wenYVuXAKfGtl4UliKfXn1v7JpTPpE6OsORAL6vRzt4\n",
+       "5JFHBpmK3TeneSLl+Mzp15oaz/SJ2/5K7VjpM8sSDjido1m2WW8jhpY3xsJp39XgWy3QEW7A4wPM\n",
+       "J+NhrujHtLv2HFYjtM3WBwEZyrQeW/Vc/R0tF4udtR1bPFuro8fPOF1xnGd5Ppkj18zi7+YnVqSH\n",
+       "H3544Itgvwrmm76cDdo8Z13ZeuoIMjA3Nzfww/IcANeOIzs/486y7MM35h0ZbMfKM5EJ16Gz7xDA\n",
+       "Ku7aeWN+iRFDy5c1/oie12jUaO1YIpw1m76QLSwzWbZ69iLvO5s2bRrwkGdBH+P1ngRscbDPSzZH\n",
+       "WEVammztYP55pmtyZlGctMfiwrqyLIK26kV2s+CoRWSw9fVqkWVpR97GrIzME20yCwrjbP0uI3Kr\n",
+       "F3zCigptY+8AWwxd0WKWj5R9A81zt+d5lpu2L1u74WV2a8C6wOKd+QNn/pCLi4uDvQieIktY0jg3\n",
+       "ZP7PRlmkCoVCoVAoFFaIVbNIzc/Pd6dBTuicZn3CHKs1xknRERHOp4PvyJglJWKYH6nNCxQx1ALb\n",
+       "vCrf//73I6LXGGy9cmQE9cnsnwAcEUK/zivSjtWRDGNREtDb9o2m4Ht7YCuTtaSxOk7OAzNWhTxi\n",
+       "Oq9HRM8f500BfIb3RHdh0WitksiKo+8YZxa1B80/+MEPIqK3xHiO7CPVWptcU445sOWN8WTRSfzf\n",
+       "Fh1btlq5wPKS+QJBG/PvWotuD1/gn+fIluC5ubmOh1g77DsHnB8L65/9T4DXPxo4NJmW9hmMO+vb\n",
+       "YI3CT1tm6Me54EAr6/zOMz1H7ts+hWjWjMFWI1v0sWBt3rw59dfk76whaLMF01YDLJisI9PiCMP5\n",
+       "+fmB5Rk4mhd/RO97psVzyp7k/aWdY3xdeMdkEX68B7CoYXn1uvBe5+jWsfcR9LL3OOIX2PICrY5C\n",
+       "A+xx8Ae/R+/Z7Xd9e8KasqXNkYVZ5HU7zoieH6xp+Nha9rznwkvWsd/RjAeamEvWf5afz1HPc3Nz\n",
+       "A55DA9Z0zgvQnUXKGmWRKhQKhUKhUFghqtZeoVAoFAqFwgxktfZW7WqPcggRvUkSYBYkFT7p6gnR\n",
+       "feCBBzqTJD8phUDJB5flcLj6ddddFxF9ing7yblsDWU5KD8Q0Zuk/Z1rr702IvoyC05AyLUKZkNK\n",
+       "IVAKxY7MgDGRlv/ss8/uzMW+/uJZn/vc56Z4SB/QigmTz9BCWn7ak8AR8zsh6B/4wAe6vl3ywVcU\n",
+       "lB8ghT/AeZCQape3oPwAcsEVSVvmgPIALptx8803R0TEySefHBERr3zlKyOinxv6Rra4yoJ/0ALP\n",
+       "KbUydmXk0iaU5WCe4TVXki5XAV983WZHUfpnDBs2bOjM3VzRYKK+/PLLp+gGvl7CyfKP//iPIyLi\n",
+       "tNNOm6KVK89XvOIVEdFfU7De3ve+93Xy6mt0zObQQskPxsmapB1XoJdccklE9OsCXuMIynOQxcsv\n",
+       "v7ybT5eRYJzIGH3Dc2SJOTHvKW9BSRE7J/P9Rx99tJMVysnAB3jI+ufnxRdf3PEwYrjukRvLImuU\n",
+       "vzP3O3fu7J7JPscabVPIRPRrCl5SOoP2dqGgHWHh7NHwkf343nvvHQQquFwNvGbN8V2X2mJOs2SY\n",
+       "zBHtoaVNf8C1OHJ76aWXRkTPc19lOSCEPd3t2bte8IIXRIuPfOQjERHxO7/zOx2P2VOQb4JmWJsu\n",
+       "+cK7q02xEhFxzTXXTPERvrFHsz9yHXvFFVd08wlw2H/9618fEf212h/8wR9M8RC4NAzycMMNN0RE\n",
+       "Xw7NqQq4ht+8eXMnW/QNb7kutpsKe7rf6XahgE/eR12abcOGDR1dyCJ7KH3DO/Y9rnopEZWhrvYK\n",
+       "hUKhUCgUVohVTcjJSRsnyZ/7uZ+LiOGJk1MuWsLNN98cv/iLvxgRvWOr0Z5CI/qTZuaYaEc+TqR2\n",
+       "CEZb2rNnT6cRvfzlL4+IodMrnzm9ozG87nWvi4ihI6OfzbMcHgomk0mqeXOSBg6JzUJNAXODNgwt\n",
+       "v/IrvxIRvbNqxLDoJNoL47eDpwtKo9W95jWviYg+HB44KeDtt98eERFvectbImJ67vxsvvNLv/RL\n",
+       "EdFbpoBDsWmPdcTyYkdwvr9hw4ZBqg14yN+Zkxe96EVTfAAOTSaY4aSTToqIoaM862RxcTFuu+22\n",
+       "iIh49atfHRFDDZn5d+Fnxmc5cPoA5uo5z3lORAwdPJ944olBAWysV1lyWLRVtOPf/M3fjIjhWkRm\n",
+       "oZ2xvupVr4qInp8Rw5B4eITFzikHXGbk1ltvjYjeSdkO/k6vYH62axRZwdoHLaeeeurUM4ETTCKD\n",
+       "WHbGyvJE9HPI/rJ169YB3ew13kP47BQF5h8/sUS1678Fc3nvvfd247RDvh35oWEshUREv6aRf94D\n",
+       "XougTbJ61113RUR074ssVN6O+21y0xbsE8g638f6Yyf8hYWFbl5uueWWiOjl/LWvfe1UWyxXLhU1\n",
+       "5rDd/p+/w1cstG0AEXTRl63jlpe2AHZEPyesI88p/bHv0h+W/lbunAwUuWfNOf0Fcs4eBU3QYvck\n",
+       "B2cxxr179w6CqtgP2mLbEf1ZJHs/GmWRKhQKhUKhUFghVjX9ASdLNC60AFuBXL5leXl5UJQW2DrS\n",
+       "au0RQ42M0yynV7Q7F9Y19u/f3526ObWijZlun945tWdlPPieE/GZlrVr13bjsaZkK4DDXF04OUuC\n",
+       "Sb+MFQvgWFI5l4bx34ETTTJeLJPmi0vIwD+sJGNWSbR5LDP0ifULOESdZ6FhYv0CLoMDn8aSArpv\n",
+       "+5t5nE6oZz89a7ttoVDkIEskagsUPIR+p9ZwcWY+U0JhrOyL01fwjKyYNZqn/VcyWTQtWMfa0H0n\n",
+       "b2XcWSoOh4NDG7JmvtBvlqiw3bv4Gxox8wlNWORm0eLUJR6rSwqNlXthHC78jSXBFgmnd/Ca9hy5\n",
+       "nMmWLVsGBY8B4/B8umAwsP+q90XPUWuR8D7n2w7kwalbgGlBZnnvwEcsmW0pFPp16o2sHA/w+8FF\n",
+       "fU07NCFffIam9tlt+p6I3m/PNxi20LCv0LflwSka4DNy1bbPyrTYZ87jtMUeGmeVWmr79xpCprye\n",
+       "oSFLnmqURapQKBQKhUJhhVg1i9Ti4mJ30iZ6BQtDlgQLze6UU07p7oUz7RX49Julwudk6qKe1kha\n",
+       "KxraHIUu7SPFKd3aO9F+1qRc5JPTPCdvW+q2bNmSljbINEYnw4TnttQ5coSf3/rWtyJi3L/Lifbs\n",
+       "RwN4li1s0I6WZNrR9vBzQtNtfZPMO2jBEmU/A2tqaIuZLMI3J0vdvn37wNoBDS4VkhUWduQlWqK1\n",
+       "PMDcr127trPKOcLH8J0/cmKrKX1j2USG4YvX0WQy6dq4xEmWSBReM6933nnnQWlxZB1z2rbPtHbz\n",
+       "Gnh/wD8v0469HlxgfIxuftLnHXfcERH9XAHWRRa1l5W3Yl9o/cPMw9aXr6Up2+dY/05umCUq5Hns\n",
+       "0ccff3xnec1KRzGf7G9ZKSQnqnTJLPOljf7Cv5A91/LvAsG22Gb+N/Aamhirad+8efPAssT6z5Lm\n",
+       "ukyZLTKmBX6wB4wlk2XevRdliapNiwtwe05p5zJvY8mnbb1in0MObDVinzdf4KvfM9DmW6m5ubnB\n",
+       "OLkNgwbmm/fck00PVRapQqFQKBQKhRVi1SxSrQaEdpMVObU/1ObNm1PfJfrw/7P2Ps3yLFto/P+I\n",
+       "YTFNn8ZdIsTakLV6nsn3XAjSaKMQ+A40+FTv6BR4aR8SYJ8KMFb2gT6z4sXuA03MpWQcrdGOs30O\n",
+       "lg9oaPkI3Y6m4hlZyRdb9DJ/E9qN+U5k82QZsy9c1rdLyZiPrYbmKBtbYOyv5AKxlt3M/4R+bZGY\n",
+       "m5vr5j/zWQFZCYis5Ic1c2Bfu/ZZLj9hSyzgmY70cV4h0zrmr2g47xHWHObT8p6VscEK4P3CI8QD\n",
+       "tAAAIABJREFUss5YxvY61hzjsf9dNk5byT23YCwaGroy+efZ9pHJrED+O/zLyrjMzc0NrJjmbeZj\n",
+       "i5x7XI5EZm6xqo4VubV/qiOLgS2MnsesmLPzjPGzXdMuWo6lmXHa2k07z53f1SArAzZWWBrewQfv\n",
+       "e1mRa/7vtZ29b8b8X7MbqWwdPFmURapQKBQKhUJhhagSMYVCoVAoFAozkJWIKYtUoVAoFAqFwgqx\n",
+       "aj5S73nPewY5WfjM3eYVV1wREREXXnhhREzfY/o+nbo81MLiPtnPIIqAGlRnnHHGVN++G+cem5p1\n",
+       "bf0s+1/wmbp81IjyHTn3tK4RBe2OAPKd7/XXXx8RB2oK+X4Y8CzqG7mmoCMgoPFTn/rUVHtHWHgM\n",
+       "n/3sZ7u2hn0bqBHHOJ01F1pcmw8+Gm0+GuimzpJzN9nPhhph9O2oHcN1H+1DMzc319HN/LjWWpZX\n",
+       "yjWifG/vqJ2PfexjU7S3NNvPcNu2baN8sZ8KtCAv1BTj/44YNV/e+c53dn9zbiX8KqhXBy2u4+Yo\n",
+       "LOo+uu4XQPYZ/7Zt27oagYb9MP7kT/4kInoe2g/DPkXZfkH71jeG9UyNMACd9vVAdi0vzmXHHDCn\n",
+       "XhdtdBJrg7bUK8z4gQ8N46Rvy67zbDGnrIsxfzXmF1k5/fTTI2Lof2O/Pmhv67JGDPcgxkA9PPg+\n",
+       "Nzc3WGvmIXLrPFKuF+r9xWN0tm6vu3a8tGG+WHPQYl85Ry1mNQizfG2f+tSnUh4aXs/Oq+j3LrS7\n",
+       "rixo3wG8F6nLh++f/TupBOB9zvLhqiS0px6uc+dF9HJO3U/WnP2ynBeOcWYoi1ShUCgUCoXCCrFq\n",
+       "FqmI/rSHZsHJ0adDV2RvPe99Auak7XwpjuICsyKqslP+8vLyQEvL8gIRVYFG4dwlwNE3WKagyVF/\n",
+       "Le2OpssqZPMM509ytEnGV743FqUEMqsO8HcdaWnrhzU4awstH62lmMdGFs2ZZWmHj8wRNM3Pzw+s\n",
+       "g5ahWXyxVou8O9O5aVxcXBxE42W5eFybMaPN43cEqfk2Fq3kcZkWW5SyqDPLnn+2/TvaNrMCAPiB\n",
+       "pZrPtgoZzsuU5fpq/2d/VO89WYRZVmUBWtkn4N/i4uIgw7a/O5b1eYxmR7061xmwZWtxcTGVRT5n\n",
+       "eYAcWWULntd99v3JZJJGaQF/N8vZBMwH36Z472qjWaGF71pW/B6hT0c3eizZ2mzH5vVvHlo+bKGj\n",
+       "vTPeu3+PxWu8pdO1WR3lbjjy3NYyP9tVTdratKbbcu334yyURapQKBQKhUJhhVg1i9TS0tIgH0T7\n",
+       "vxa21CwtLQ2yBQNqAmV5oKzV+6TNCTXTSFtfG/vJWLux7xR9WksGZKQlU7dP1GMZ3631ZbmYfI9u\n",
+       "jcNao2k+mAZijTt7hr+b5XaxFsAYfT8/ZjXMLAgZMh+QjC/OZN1qgabbflmWhyynFT+dTyzTntet\n",
+       "Wzd4ljUvz4G13mx+7UtzMEtNJtem2zX5Mo0SOHu/LVHt970e7CeRWbvtI+l9ALC/2PI7NqeZf4ll\n",
+       "zrR7/BmfsKIhi+1+lPHUWn1mYZ6VLTrbL1ofqWz9O9ed92D37e97b3dusINZoWflF/Te5bnILLeZ\n",
+       "1ShiaAV2tnBgK2eWww84y35mPW7ptDWQNp7/7N2c5XSzLxrrYsy6hLXUay2z2Gf1Hr2ugC2Yrc9p\n",
+       "ts+ZP9n6z1AWqUKhUCgUCoUVYtUsUm1V7Fm12cb8OTIrkCuNO7pmrEZY2871nty+vTs1XT7tOjLK\n",
+       "2qDbt/42Eb3m5Wy7oNXE+J8zEnucPvVn7T03pr09qWc+IBns6+R+DGdItsWm1aYclWgtLatBZw3M\n",
+       "c2faPUdjmc2tYVtm3T6jLfPXGovWcbQZwGphLTbzv3E2eWt5Wd23lp5ZWbOtaWaWF+bEmZ89t+2z\n",
+       "M1oyS11mTTRcGcD8aL9nS4T7MJxl33Udx/yvWtrbvXBWvbLMN8a024pqeXD7MfnwvpBF3WX7h/1W\n",
+       "LSfmS/vZlrdMFjPfKNNka5KrEIyN1RGDWd/O7G9LbDb/wOuiXQveM6Epm//MZ87vU9NuC6etjhG9\n",
+       "LGbvDY9zVpUBf5890JaupaWlwRzYYn0w366DoSxShUKhUCgUCivEqlmk5ufnB/fRsyKtWu3Ip3Xg\n",
+       "yCcj89dxdEpmNQItDdbWQOY7lI0TbQg/DFvbrAUsLCwMooUyi4FzE7neUTZOWyZAe7K3tpdFOhrm\n",
+       "W3Yv7/7Q0PnZapH2VXDfs/y1sjp3poUcKK3PXWYFtJxnWg59MTee/4NZD5z3JpN/WyoyHzjLhX0G\n",
+       "xiw/Hl/G8yy3zyx4TWa+Ey2d1krNeyzQtkxnkUO26HhMLS2ZRdLjB7bqgMx3Ctqximb9tn+z5QV5\n",
+       "sOXdUY72jXJ7+30tLS2l/lRYDDKfv1mWV8ti5lPZ+opluZYcpWnez7JMZjUcx9qa7iyCDHjODPfj\n",
+       "uWn5m/l6zapBa8t15jtmC6Yj8trnZFZw+5ABrOmZX57lxRapNoox86fKfOUs5xnKIlUoFAqFQqGw\n",
+       "QlStvUKhUCgUCoUZqFp7hUKhUCgUCk8xVs1H6pxzzunuT4844oiI6DOAc7/q+mb4BGzYsCEeeuih\n",
+       "iOh9GaiFRE0h55l69NFHI6K/C6UuE3V/uGclx8W3v/3tiIg4/PDDI6Kv+0b/a9as6XxaHnzwwam+\n",
+       "3da+DNDe1giL6OsbkUeE9tQe4u+0v+CCC7o7Xt8j85NaWNQUgufQbh8B6pu51h4+QY899thU/5/+\n",
+       "9KcHNcWyyAfoZj59503kh+uEQTtzTR4a5nTNmjXd/FOXKYvC4zP1qugbwBciS5g7ZNHtkY+HH364\n",
+       "o5u6TK6Fxf+p3wiYI2qQMReMc/v27VPfpzZfO1b+Z58W+GKewx94zjPdN7TDF3IXwce2vf1QLJOu\n",
+       "y8bf6XvHjh1Tf4fn1PFyNBNz1I717LPPnmoDHFV0zTXXRMSw7uNhhx0WEf2apm/zJcv8vGbNmm79\n",
+       "ew3h+0ieOPjl2nn2LWNOkUVoYb+gH/auhx9+ON0X4TXfuffee0f79hzBH+af/k0LYz3uuOPijjvu\n",
+       "iIh+LswX+63ZNwZ5gXa3Z10wJuQFGdiwYUPHc/ZQ00ItPPuvIS+ef/hiWfS7iz3g3HPP7dqwn/td\n",
+       "RFvohtdHHXVURETcd999EdHzHHmBj8gJezT7Sytfrm/JT+hmPPCFcUI7/PAcsY+yRgHPhpa5ubmu\n",
+       "LbXw4PXWrVsjIuKWW26Z+sx8XnTRRVPt2VfaviP6d/oFF1wwNSZXL4iIuOGGGyJiuJ6ZI+/RzH+G\n",
+       "skgVCoVCoVAorBCrmtnc2s5YXpiIYaTA/v3704igg2V3PVjfWY2dg2U2d1TKrCy5bp9lNnbEQJZl\n",
+       "O4vMOhgtjnTKIiStac/i4xjGcuu0380ixbK+nQNlLEIzixDLPrtveJ7lOJmVX2usT0fVZPXqsizC\n",
+       "Gc3tunDU4awswZlMZu3905FSbaWCLLqmbRuRr8WM545yGouUyiKCsohQa9po4AeLworII1Hb586S\n",
+       "xVn/d2RYJsuW1X379g0i4jLZY7yOUnRUV1a1wGPhuWvXrk3Xc7YPZvtFts9l66P9e/YM052t91lr\n",
+       "78nsN96vsvxHWV61WbmMHP06FhXuCFJbAbP3BfKR5Q8DGY2O4mv7yLKGZzms3CfIcn2N7QHZecGf\n",
+       "x6IND4aySBUKhUKhUCisEKtmkVq/fn13ssQXxP4XwJm9169fn2ojrvzMfWqWR8Y1x7hn5p7Wmhp+\n",
+       "CBs3buzo9n0qMI3W6t03PlBoAXxmDGM5gA6WWbmF65tlOVxAlvNqjI/WIGbl4LLVwH1mmveTsRri\n",
+       "L5HVObOGQXssE/bzMZ+YA/v3tLmcgPNcub6V4Zwt1pIPVnPMbU13lmdqVm4raLaG6rFu2LBhMK/4\n",
+       "MGVrznOSWY2cJdmZ4cesBllfhvuEp/hreZxZtmmvzYh+zXn+s1xcXv+uj+dnsP+ANsu+22ZrK6sp\n",
+       "Zn44q3y2jhjD+vXru98ti+yh0OI+szxSAFrtx2Ta27nPrMCWb2CLrftmbp25e6x/+vL8Wjad4Z19\n",
+       "P6s+4b2O/nkftf07+322poxZllmAnMBPnm3fsfZ/8Aq68Wez7GYVILL8c5nFr827CPz+h5YnO25Q\n",
+       "FqlCoVAoFAqFFWLVLFKLi4udpkUUS6bBcqpts0pnvjvWJG0d8YmUyA80S2ggasea2v333x8RB07T\n",
+       "aCXQb7rpG5qwdpF51VlTf/SjH0XEMArDUR7g8ccfT7OCe5zO2G0tJ7vrt+Y9dlK3BmDfJfPFfkWu\n",
+       "g5b5irgWlT9H9Lz1OLKsuVmmavrOwHN4NtrjWB+uczVGd8TQekY7vmcLHzTs379/4Ffnvl2DEmTr\n",
+       "gr8zLtPuelhj/OI7psVzxOeML4zJtI6N1bJkX5as7p0zeWdaLWsUHCyrv3lOG1vJAfscNMIXaLCm\n",
+       "Tnv7kO3cuXPAYyKFbZHAesEeZdr5CV8d/Qj4PjRF5DcM0AJsYcj4wjixIkCb99G2P78Hshpx5mG2\n",
+       "LuAXc8g7wBFkoJUB+1V6P8ciYx+fLGM9NDgCd0y+HEHL/HsfA17vnne/d5lr7+FjljreZ4C+23fr\n",
+       "2P/9OavZ6jls5zazAvo7Y/v5wVAWqUKhUCgUCoUVYtUsUmvWrBnUe8r8Nfz/+fn5mfWHsrpfs6I2\n",
+       "7EtijbT1b5hVG9D+FLOidtwebSe7v27r/blOV2ZhyjQK89N8yKI6ngyy6Cz71GT30mig9v8Zk5es\n",
+       "QnxGdxbdlNV3Mh+zGl0Rw4iXWf5I9suyP1bm3zKZTAbznq0hRwJllhfLIuPMKtC3a3KWv57nb1Zk\n",
+       "jP07wJiPRGaZncUXI+OLZfFg6yiTrQzmg9dF9qxZ+1BE78NinygsEtl3vZ/OwlhNu2zNZdbwbM91\n",
+       "hHU2B+0el+13bpvtm9m+6HEe7N3lPXRWxN+s9wSYtbe1tNh3KXtPZs/Mojcz2g8mL9BCn8hmZgUE\n",
+       "/w97bx5seVXdfa9zu+/tgUZo5maeJ0XAeSQxvknqLSsmMZXksZJSKQUhUYSKCJQS8iAEUHiVRKKI\n",
+       "hjwZXpKUScU8cdZgRA0OiYqRsRFknqG7aejuO5z3D/j8fvt8fr91z+WG1I3Pu79VXafvOfvs39pr\n",
+       "r73PXmuvIZvnDONquJZ9LNSnMkMtEVNRUVFRUVFRMQa1RExFRUVFRUVFxbOMJbvaO+200zqhkaTE\n",
+       "51qFciWkfMcMvWLFisZsx/XXBRdc0PRbghBRgPmQ9pQ2yBydXSKAdPXT09Md51/TQukUnAv53E7W\n",
+       "F110UUS0pRBsRrUjY1lqw6Zmrl4I44QW+qYP+sSsigMffVOuAPM7fKQ9/Lr44oubUgWZ461LfsBz\n",
+       "zKjwpXRUjWhT/ruMg5Mmrl69Os4///yIaEt+4PzKdyhDBF8uvPDCkb6ROcYHTfAF2l2CCD5OTU01\n",
+       "z3JpE6e78DUsZRmQc8rwMD7C3Pn+xRdf3OGL5x25+MAHPhARbSkE1hClUOBTJud2NvaVaVnGyaHy\n",
+       "DipwaQuX2bBDM+2ZU8BYkclSvpBb0pfwDDtuM873vOc9IzQa8Bz5gufwy1fAq1atanhOOSHG43mE\n",
+       "95SrgecOCAE8k/3ijDPOGOFH6azvvYL5R/ZwbHaJIGinRAhygizagd6lk2i30047NX3yHdqah048\n",
+       "7DmClmyOWKsf/OAHR9o//vjjnbQ28JTSOdDi0jnsRdAGH5FFxslcwkeXzinLeHmvZq3+/u//fkS0\n",
+       "c2RncadoQV74fWH+CW6ibBVzfdlll3X2OfgBbfCJ31yvOV/1A/jIXse6g8/M/apVq5rfImTFey40\n",
+       "8Qz2OdrTJ/wozwMR3f3F7hebNm1q/k9ZHvhilw5ogS/QnqFapCoqKioqKioqFokls0htv/32TWK2\n",
+       "733vexHRhsUecMABI23RDjgtLl++PHbZZZeI6Ia+OlyfEzGndDuAoiW6bMGPfvSjiIjYfffdR9qX\n",
+       "JSXok5NxFjLJ51gasI5Y43QyUTQWaCiLLgK0jjJRaEknQJtjvOZplvSO7xGy2pfy34kkQVY+hXYu\n",
+       "x8IY7NDsgrmmHS2o7Aseo1Ezn05iyDjh9a233jry/X322WekfZbQbdu2bU0fwONAc0ZuHnjggZHP\n",
+       "bTVjbqFp1113jT6sXr26+c5PfvKTiIg48MADR9q4+LAtEw5/hy/IE7zPfCrn5uYa+R2X/NXWPIrb\n",
+       "Mv8eJ5o5c8e8I5PlGmWc0ABPSVvgcQI+p2/kxg7tXld77rnnyFjKPQDeMf7bbrstIiL22muvkfcB\n",
+       "PGb8fP+mm26KiHauAPJlB/K5ublO4AZrDTrhNbLocbkgNBq6AyBAqfVHRFx77bWNDJruhx56KCLa\n",
+       "dcsag9deF053se+++0ZEyxfvO2VySMbBvDgJMs90YlX68DhdxBf+IGfm48zMTLP/O72N+VIm2I1o\n",
+       "ixUzZzwDsL7gM4WZb7zxxoiIOOSQQzp9IxeMl/dtaXR6IV6ZM4+TsUErtDFXZToOeMpcQNMtt9wS\n",
+       "EREHH3zwSN/+TUd2kRsKb7t/xsQY995773T9sy/OV35sPlSLVEVFRUVFRUXFIrFkFqkHH3wwDjvs\n",
+       "sIiIeO973xsREV/4whcioj2ZAk7BnGofeOCBRsNAIwRZKH1WKBVNi354Fq9oywCt6OGHH25O39be\n",
+       "TQtwuK8tGJzyrcFwqrc1ZevWrU2f0LX33ntHRDes2z5gz3ve8yIi4lvf+lZEdDU1NC20Y2i+8847\n",
+       "I6LVCiK6ye4c3u65gOdo4FgW4HlWzBP+4gtw6KGHRkSr4cKTss0ee+wRERGvetWrIqLV1gDjfulL\n",
+       "XxoREb/xG78RERFXXnllRET88Ic/HGlP/06SOhgMOv54aJzMI3RmYf8kpKOfI488MiKisb7ecccd\n",
+       "I+1LayxaHH2TJBbYJwLew/OsgCiWHb7PHDjh49zcXMdqiQyaL8jgEUccERERr33tayMi4p//+Z97\n",
+       "aWecWCKR3euuuy4iRpNksoZYt1g39ttvv4jI0zcgJy7LYYs37yNf0IqVqVx3rAss7K985SsjIuKa\n",
+       "a66JiK61C22ZfYXvYXGzxdv7D/LmuYlo55lnQANzsG7dupH2rEFkzr51WLIAcvKCF7wgIiJe8YpX\n",
+       "NPNjKwByv379+ohoLS/0gbwDFxRn3Px+2IJVphuAJ/Y7BcgLsmVLHH67gD3Y+25WiHfZsmXNXsk8\n",
+       "8gwnd2WdYEli/rG8lftcRHvDAQ2vfvWrI6Kdq9Ln1GXW4CG/Qb4dsaWKvvge+4Fpx3rO7dL1118f\n",
+       "EREvfvGLm7YuS2NfMJdcsz/i97///Yho5cRzBO2sdWjYZZddYv/99x9pizwgo7bAl7cd86FapCoq\n",
+       "KioqKioqFokls0itXr06Pv/5z0dEq4lnGiwnVjT0hx56aGxpC995ZonleDba0XOf+9yIaLUcWw3Q\n",
+       "mg444IC4/fbbIyIvVcDfLgAKfM8MjY6o4nRsbWe77bZr3kMDwGJkixR9cY8OL7FgoWEDtECsg7Tz\n",
+       "XXs5Po87Q+aH4lIqAA2Fzynf4yioiG70zXe/+92IaLVB08q8f+c734mIiN/6rd+KiIhjjz02IroW\n",
+       "LPt3gRUrVnQsKWg7PButDk3Sssgc0Q/yheaF7AG0yQ0bNjT04zdgfwpreWhaWJHMc5f38GufBdPJ\n",
+       "YR0JA9Cs0RRf//rXj9Buy5uLmmPpcCRWRLeYLLzmu7a82MKKZSor/YN80Y7++wrFsi/8+7//e0S0\n",
+       "828fOIBVHEsFPlXQlhWthQ88e3p6uuPbwT5x+OGHR0Q7/9DtiFnkB8sTfMhKrSBHyOELX/jCTskT\n",
+       "wDzj24M8sIdl1lRoMV+8juDf1q1bO1Zu3wLQh0u+uEwTYM6wgrBm2Q9s8dywYUOzt2K1ZN/zXsRe\n",
+       "Q59YXqGJ7wFuC26++eaIiPjsZz8bEe0c89sW0cqik9tCW1Y6Basf7bK9C1qYc3w14avLAvXRhCxm\n",
+       "ln1+g2644YaIaC1vLjnDXGCFZi947LHHOlZA+MC46JNnVh+pioqKioqKior/YiyZRWowGMTzn//8\n",
+       "iGi1IbT9rBQE3vnbb799qtVxendhW7Qc+xmhNfM9tAJOybYCoanMzc01J3+0XWtejk6wNcPjdDFG\n",
+       "tKnsfn9ubq6jIXOiNi1oc5zOrR17nNCMFsSpHw2u5DvPgueMq6+ERzlOvmeN2hqJIyrRaJmL0gcD\n",
+       "LYf5RItx7iaAtQf+/MM//MPI+9ZgATSXRUBtvWAemROXjDEtjNsymkU/0u9znvOcZj6ZJ/MwK9vi\n",
+       "yBngqB1HwtinYmJiopknZITv2toJb6Hxq1/9ajOOiK7lDXnhc2hwLrSI7tpCU2ZubGHAWsj4yz2m\n",
+       "HDdgTGiwWCbgVzlWeMt4nB/M65+5oU9ode42j5X9B1kty1cBF8BG44anWbkNaPLcWNaxLrEPlbcG\n",
+       "jk6zdZQ5Yg3aqoMVARpo5/JFoLTcZAWCgYv4wmv6dKQ0e43zb2WFpXfaaadGDmz18PyzV+Ezxnza\n",
+       "yg7gE5YrLFD81pXtXXSeeXQOL8B+8m//9m8R0e65jNfryHsU8oCclHPutcmz2FMz32LWFlblrMQQ\n",
+       "oH0ZBWvZclQqYJxZ2TKjWqQqKioqKioqKhaJWmuvoqKioqKiomIMaq29ioqKioqKiopnGUvmI/Wu\n",
+       "d72rU4vNNcmoKXXCCSdExGiUDnfV3Kd+4hOfiIi2/hCWNvp07h7q+LjuF3A9N2oKle3tE+Q6TtAC\n",
+       "iE7hHplxXnHFFRHR1hTi2c62y105dX9OPPHEji+Xc25Rf4r6dozf9c24T/7oRz8aEW0NMu646d95\n",
+       "Uy6//PKmXpGz/TpKkfmkphyw3wa0UJuL+ml8Dp/LaI1PfvKTEdHWfHIdPwMeUlPK2bN9/4680J57\n",
+       "d2jdtGlTM25qYcEX+yUg764pR3va8WzXiaM2H3M0Ozvb8AS/CtYHNaWQLWQOWXJdPGptwXP6w8eB\n",
+       "8SI3V1111QjtJf3OWQMPqSnnGpSAuTjvvPNG+oZfrlmJjF5xxRXNmoMfyBZ94r9F39ROZO7w12CO\n",
+       "2HOQXe8Xziq+bdu2Th03YH8a1/FiXTBO2tmPDXlh/hkj35uYmGieBS1ve9vbIqJdk9CNbCEPzD98\n",
+       "cV3QMjKw7N98f+yxx5r1mtVDdc05V6Ogvh08d8Qgz2KOoAW+LFu2rBNVxzPZ59gX7StlHynaU8fP\n",
+       "Ubt8j/0D+TrrrLMaXnmemIuyXmVJi2mCL6x//y4C+IQsX3nllc16znx/8DujpiS5He0TiEyyjqjN\n",
+       "6Jp19v/avHlzsxfBc1dCgCb45BqUrvdZ+itHtPsL68j+UMuWLWt4z3zye2E5hyZ+s/mNzlAtUhUV\n",
+       "FRUVFRUVi8SSWaTKKB+0nKy+DRonJ8zBYNBkKM08/N0nmpejsND2nMk4q8DO9weDwQg9fbSgzfC+\n",
+       "tbosagtaXQ27LzrFkX6c5q1x8Gw0JkcQOS+QLXnOz1NmT7ZWzt+2NLo9NDO/8NaRMozJ9aDov7R8\n",
+       "0YejNWmbZcJ3dXNrg6aFCBk0s1WrVnV4Dl3WEKHRuXt4NrSjDdEveWhAWXGecbgKPUC24AeRUERZ\n",
+       "WXZteSJzuuvclbTbAsX4srpvzL+zCjvXE/3aKmrLVER33SMzZU3EvnHyPUeEWXaRB+9J9OtoyYiu\n",
+       "JQKZyaJ2gfckR+3ZIsfanp6eTtezx0s0lqP2oA0+OIrTa5E55Hs77rhjb1RtRDtvjJe9yPnHDGgn\n",
+       "Kg/ruy0PZdUBxplZSvjblnxk1zyHr+QZZCxEzpHjCjz55JMNDaw5Xp1l2xZr+MKadTZ51ov5yffL\n",
+       "mpVek694xSsios21ZLptyWUuswhy2nud9UW9OVeXf2u8hqCdZzuS2HPEnNKuvDHyenZ+MeiFl76p\n",
+       "ylAtUhUVFRUVFRUVi8SSWaR23333Jvvp3/3d30VEW1OOmkGA0+ExxxwTERFvfvObm5o+f/ZnfzbS\n",
+       "Fu2HEyZaGqdZ5zBBs+JU63t7W8do9/DDDzc5p6hXhXYH6AtthRpKn/rUpyKia5HgtIx2hI8B4ye/\n",
+       "CHjyyScbTYI2Rx99dER0LSmcvMkwi7ZDTh9nwrZ1ECsROW76qnmX1ecjujWyABoHn1MXzZYpgMZJ\n",
+       "1vk3vOENERHx7W9/OyIirr766qYtGhH8IC9QFp1qbY68KeRBsQXLFeiZ04ceeqjJYA3gC7KKJkq+\n",
+       "GMO+ZWiiWL9c96nMDUUb5rOshViODxmCbmghWzhAfn7hF34hItrs43/xF38REd3s41NTUw39yBiW\n",
+       "A/vKIUPOyMz68fp3HirkBI279BFxjjLGYcsEYI3CB9YyGbrJfA9cHZ7++/KrwQ8y8VNVgBpqtjDa\n",
+       "WsL+SK4rXgH7Bf0hX+vXr+/Uq7PVD58pxkH+NPcNkBtk0PsF/IMPt9xyS1MVwRm52UtcsYFx2IrB\n",
+       "34yfum3Q+K//+q8j7dkDDj/88GZ+2COc/8o+QFiBkVGPk98L5pJcgtBIpnywww47NPsdn/3oRz+K\n",
+       "iO56tk+sfeO8p7MX2ZLPnlX6HvJbw/wfd9xxEdHKGFUGAPsl42UNwg9bPP1b5byNzo0X0fVLQv6z\n",
+       "+Wft0p4xZb/R0ALfZmdn03xi8AprJn/XWnsVFRUVFRUVFf/FWDKL1N13391oAWiBRx11VER0NRjq\n",
+       "wnE6vPrqq5tK8a6IjXZq7S7zebE1xZqYI4rKmkzQizZmaxdaIZrCtddeGxGt5uiMv7aOMDZOyX01\n",
+       "7ZwNltN7lpGVZ6Ld9fk8lX87AgstqLw7tv+E77D7KqKXz0ZLQgtyFAoaFv2gXX7605/u0OKaTq4R\n",
+       "aI0EGYS3hx56aES0/gi2pqFx8z3m9IknnkijZ+AhmimvtrzBr+yZtqqVkWlYcxinrTiOtkGmsOq5\n",
+       "gjrWHjRvomK/8pWvjDwbDAaDZpz4OEGLZQsLDX2g1TPP47RGtFv+to9MXx9ZW+YZzfsb3/hGRLQa\n",
+       "ta3MgLmyD1G5RrECIRdYoOBt5jtIH/DRVnXAGseqwD6z7777diwpjAM+XHPNNRHRWmC8MChjAAAg\n",
+       "AElEQVS9z/E3a4656bNIR7T7ImO+//77mz3GPm/0yeeOUrXPi+umYsmEBvOR34l77rmnkTXWg/2S\n",
+       "4Cn7G3sSa9O+YKxpnsF+Q/+2BM7OzjZ7Bc9iLrz+s3qAptVAXvidRDaxxkd0s6MTbctvjC3YztTO\n",
+       "HCLLniPG75udPn9A5t9ryL+fhqNZmQv/jtpPknUxNTXVsY77XMC6YRz2Bc1QLVIVFRUVFRUVFYvE\n",
+       "klmkIlrLkzUWTvCA0y6REhMTE536TJ/5zGcioj2Nor2gKXEitrULbY+TKhqro+EAp9/BYNCpHO22\n",
+       "nIQdPUCkhGF/LcYAjX110lwrzFFnwNou98t9FqaIbq1B5z7qs3i5D1umgCMG0dSznE+MCX8VNFAi\n",
+       "yUqLl+saOoLQmjSaCK9uZ82L9x3tuf3223fGz3eROcue5cWRYvAFa0A2R6UFC63O1ivGh4XOddBs\n",
+       "BWOe8fuCj7S3drx58+bmGWjraLvW6qGNvtDi+8ZT0g7NyLCjwSK6vlCOwrF11DXoXM/R42TeoYFX\n",
+       "9plS1l0P1Lm7LFuOYqJv+GefSvvOQPPc3FxqScFPh2cjW5bNbA6w1Jl273WrVq3qWDlAWRMwop2D\n",
+       "bG/hWaw91o2tSaAcOzzHWuP5B+6TWwPvXXyf3x3XRzTtpWWY3xhqzGZWIObfFhhHdVo+XAeznEOv\n",
+       "c76Lhc5WPdc3ZQ7YXywftIffyBPtSj7QR+Yj5ZsXPwP4twn4N5DvrVy5srOe4Sny699H+9RlqBap\n",
+       "ioqKioqKiopFotbaq6ioqKioqKgYg1prr6KioqKioqLiWcaS+Ui9853v7Nxt24ufOnGu4zQ7O9u5\n",
+       "/6TODjWCuNt0fS76pi6T61txf+u7bvLnUINqdna2uTd2Vmz6PuOMM0bed0QD9+uMk7o/wBnTGT+1\n",
+       "mc4444zmnhm6HYVAW9fOcu0pXml/yimnjLS33wavH/rQh5q2vqt29AQ1wqi15M+B64S5phhzU96Z\n",
+       "Qzd1uUwDtPE37akRxRzRt30p3B5aSt8C6PnjP/7jkbb0iR8Gcg8t1M6ivXluvtKesQ6Hww7d5fz0\n",
+       "0ZLlWYLnZ5555ggt9i2DJub0lFNO6dRvc3RmxvPMp4F1QX1D+7t5bi+55JJOHTfLLeP2unC2ffjo\n",
+       "NWo+ur7k5ORkMz+unebcU67jyPq3r5D98VzL0f2XfWe18Owr4zqO9O16Zt5fXN+s3Dc9Tuqy0ZY9\n",
+       "mu84Wg3aPUf2Z4O2PvnKMlPTFr7Qp32lvBd5nOYjr9QsLOst+vbHexHy4vXQJ+cR7fq3P6DXyYc/\n",
+       "/OFOfcs+uY1o59N1ImnndQLtZd3PiPb3CBlYvnx505a9hT6cNd2/F9neZZ9a+mcd0X9Zu5BnMJ+u\n",
+       "tefoW++jGapFqqKioqKioqJikVgyi9RwOOxoFpm/ljXy0grgaANr885tZA3FGVldqds0lf1bS7Nl\n",
+       "xXWKsvp32VgcpdCXR8aZlkGm/dg6No735ldfHSzaZNqf33c9xKx+EzD/0KqyqMAStnqYFsuJtaOs\n",
+       "b1s8yrpehvseB8+Vo1v62jlaNeO5I4GycZrn42S3pHOcPGQ0+nPD/OibG2vOWV3LjGbXi/S6svZv\n",
+       "q3TJl8zimtHv2qOe0yyPmOVqcnKy8x7Pcn1LRyGWfZTPthXY/Tv79NzcXFo7Fdiq68oIpt17crZH\n",
+       "l9bHbB903xnc3pF149Z2yYfMYlS27XtW9rnf9z5R0u42XoOmxdawcTXnbC1kj3Y28vL/XksL/f13\n",
+       "P6Yty7c1HA5TnmbPGCcfTbsFtaqoqKioqKioqOhgySxSpQ+Kc1ZYI7XmVeYwsZ+V87pwMuZ5bk8e\n",
+       "JvokNw15JTJapqenO9qsNQZbLYwss3GmWfRpXoyPV8ZnWly3KbOeGc5O3mfBMl220I3TGMa1t1bM\n",
+       "WG1VKf/vectykzAu5CWbQ0AuGPNxZmamk1tlnDacyYX7zvJrlXzzGsq0XWvUfRm5+2i15tZn8XAb\n",
+       "W2Qz2E8vs6ZkudHK961B23qR9Z1ZRy1HjNE538gJVO5rlh37bGTWEftE8Wp58n5R1mhzjjKPi+/Q\n",
+       "Lluj5oP5BMh1V/oxZmuRv9lrx+UPct1TeA/tmUWqfFZm9fb7C7Wu21cos0yVz7O107Q4lyGgz3Fz\n",
+       "lPlU9dE3bk3aVzDL2WTYMuW1HdHNn8Xey3eyfZL34ZPzkZkGclp5z+vrO0NmHTeqRaqioqKioqKi\n",
+       "YpFYMotUqflnlghgi08ZtefTOydI32H3WS8iuloQ2v9CfCTsR5JltrbVKPPX4W80UmumzmxbWgHG\n",
+       "jdOa1kLvvq019dFuLdfw+4zH/hnZfbXv2W2h6bsT9/gyPyNb2DLtH/Bs5rLUSD0/jrob5/OW+VLY\n",
+       "d8ZjHA6Hnei0bF6h35aoTKuz/4p9yEpaMkuLYd7CN9fBcvvMalDSYln0dzOLhNcosEaard2+igKZ\n",
+       "z0sm545yg+euZWna7ddpq3v5HtYcW/ky/8zMSmRaXAlgy5YtvXSU380s7lkGf1suPMegtGBnFnjD\n",
+       "spRZQd3O+8s4i3bZZpxPpbPHu6/MEtdnNXIUIq/ZurAlMruZ8bP5HnPv6Nfy/7ZM8l2sm8ARyY40\n",
+       "zqzGtqJNT093eJpF0vo3aRyW7CBVMtbm5cx82GeWzA4vFioY5bB2ihLa9En7+RhJW5tBTQubcxbO\n",
+       "DnxgyBxEy+dnYcnZJgZf7BybbRjZQuvDuB884IVg8292VZiVa+ij29/JfhB8ePW4fTjy5r2QA+lC\n",
+       "rxl9jbrQq8Bly5alfQJ4bqdhbyAZTVkB6pI2t83k3Ie9cT8QWQqLvgO8nz1OcaAPh2K7uLnbIy9e\n",
+       "syUtnhPvJVmJGL8/jnYfep988sk0CAd4v8icx59pwuaSVh9K3YarGf9YW254n8Maa3LcflHCSorf\n",
+       "t4zN57Bc9pPtH2B2djZVgAzez357Fur43HcgzRSjcQfp7Irb7X249Tosx8I8OgXPuIAg/774d9W0\n",
+       "9KU+yZQbH6Ce6UGqXu1VVFRUVFRUVCwStURMRUVFRUVFRcUY1BIxFRUVFRUVFRXPMpbMR+q0005L\n",
+       "HfSwkn3kIx+JiIi3v/3tI+2mp6c7d9OUTSjLZpSv3NXi0HrRRRdFRFuuhPtUEnTaZ4rU+ZQrWLly\n",
+       "Zefe3SUf3va2t0VE6+DJ5zj84iPw0Y9+tOFJRNfJmDtlp/E//fTTO/4Uvtt2OYGsNIjLFZDy3/yD\n",
+       "FnxKLr/88k45AWBrJ2n53/e+9430YVq4+6akxIknnjjyvuVlu+22a3jocdpPg2fRnhIBPNs8Z9yU\n",
+       "zkBe5vN7Qm5dlsN+e4zDJYJo5ySx0E4JGkoQTU5ONvQ7jcdll10WEW3JH/sA+W9k6+STTx7ph7mC\n",
+       "NtYJfHz3u9+d+oIg/9Dy1re+daSd/SpYH7SnvIUd3SlDAZ+uuOKKzvxkPGeOkC3TAu2EULvsk9d+\n",
+       "GUDBfJ500kkjfWUJN7NSGDzbaSRYF7S3/9umTZua8XpfpA37IOuDvYi+2efcnj0IntP++OOPj4hR\n",
+       "Px77JyIr9G05R8YYN3sX85+VzOF9+P6Wt7wlIkb3H/sElnJb0moHfuAyPsieAyTgD7SfdNJJneSt\n",
+       "/i1Czl0Ky+W8GA9zSjk0rx/4yPf+6I/+qFnPTtsBX57znOeM0EJ7+wraT8tleex7VwbOXHjhhRHR\n",
+       "8txlaux3xj7H3oWMel8BLs1F/9BS0g7dlPxxolmXI6PvDNUiVVFRUVFRUVGxSCyZRaqELRPjQm6H\n",
+       "w2HHagNsHXLZAVso7PGPppFFFJWa/7iSGFki0Sy02FELLtPg9tu2beto3C4BAtDieN9WrixKaaEJ\n",
+       "yfraZnzJCiZDk/txigInWS01DSfpy6xBgD6sxfWV/CjhJHHbtm3r8NAWCD73s0x7loB1vmhW5p3x\n",
+       "OEWE1wF/Z2H/9Mer56wPWYRnlvzVllo0zkwWLSd9/p22KHiexqU/MH/Grens+2Uf8NDf8Xr2XrRh\n",
+       "w4aI6CYWBLaylRa9LFLWVqIsQaPfJ2IYuepLxVK2m5ubizVr1vSOE2QWCa9/+GgrGpgvEhPesP9Z\n",
+       "tmxR5PMsgatpsrXQsr5ly5aOhanPQlL2yT6Q7af+m+9hPZ4vajGLCM32Fu+HIEthAh+coLVs78jB\n",
+       "7PfNtLPukYMsujGLQF2+fHmazsTrN0sOmqFapCoqKioqKioqFokltUjZD4lToHPVOKna7Oxs8x1r\n",
+       "3rbqcPeblROgRIxLJnBCzTT7bdu2Nd/Jck44X4a1n0wLsH/OfCUlOJ3z6vIJAA3BFoWsdA7aJLTw\n",
+       "6u/NR+e40zx8g3bfSwPPibWFvhIxwPfupgltH+3m4YcfHvnccwQf+3KGZdZLl/7IcpMwPmv7Tp4H\n",
+       "mKPhcJjmxwG29ti3Iyusm1ks5stbxWeWQZAV382sxtDukiP0U47F69UWyXFwbppx+djYX/qso7Y8\n",
+       "Wzu3vEC7Ew1m7XmmLbkTExMdPjDv5mVmRcyS5Ga0+O9t27Y11ilbM5wU0hYG0wLNzAVjs49c+eyI\n",
+       "p/jtvdbrmTVEXx5PNke2ttm6CrZu3drMj9eU16DnjByHmcXGZb+8T5R8t1+Wf2sXWrYLGr3f8Czm\n",
+       "3Oun5Lt9hKGBvTcr5p3R4r0pS645MTHRmR9b6pGlvvxX82GsRWowGPzpYDC4fzAY/LB4b6fBYPCl\n",
+       "wWBw82Aw+OJgMNix+OyswWBwy2AwuHEwGPzCgqioqKioqKioqPgpxELUtCsj4o8j4s+L986MiC8N\n",
+       "h8MPDAaDM57++8zBYHBkRPxmRBwZEXtFxJcHg8Ghw+Gwc6ybnp7u+EaATPPqK8Ngy4DLD6CdoBXY\n",
+       "IuFitT6Jcu8MykyvvtP3OHyP7qi9LC1/Vq7EmvrExEQzLkcy+ZS+cePGEVqz0h/Az7JPzXylMDKf\n",
+       "BeAs0sCFpoE1L2tXpbzYhwPtJPN5srboAqkeC/Jgi2WfVcgWhbJ8Rvk54FmmPfN7KjPdOwLW2qst\n",
+       "snzXvg0APtgfDQuMrQzl9y2/7tu+HbaWuG8X+53Pd2yctTfzkYC3Cy2g7bVpP8+yL/p2pKTnCKuG\n",
+       "9yJrzdlYSsuefWHcxvuj58hW4Iz3gOcx1uXLl6dZwrOKD6xF88W+YrZ4er/ouyGALv9e2IJvv8bM\n",
+       "Updl0zampqY6lnb2rcwC5QhDYFrY042+DOG2DGURpADabGHKstV7j+N3qM8HF567XFUWlQ1oh5xk\n",
+       "vpT2Ey79nrLfOVvFs/nPMNYiNRwOr4mIR/X26yPifz39//8VEb/y9P9/OSKuGg6H08Ph8PaIWB8R\n",
+       "L1kQJRUVFRUVFRUVP2VYrI/U7sPh8P6n/39/ROz+9P/3jIhri3Z3xVOWqQ6Gw2Fz+sv8nICLd05N\n",
+       "TaXRZI4E4ETtIpMATcRRHVn9vLLWlttaK3F+JGu9WX07jzeLnCh5kGmzwCdx7vhddDNr76i/8vPs\n",
+       "Djs7zWdWE99pA9+/83ep/QJb+5zDx9oxfOP9HXbYYeT9jC99xUr7oiojujXFMr8U52AZZx0p5Q0t\n",
+       "N4t8gpYsUsztmW/6hRasAl6zW7Zs6ayVrD6f8wDxeRZZ6XWVWY/K9+yvx/jH1S90/pssysnWpT7r\n",
+       "i+tY2prR5/NYjsc+VpYX+7mUFs1Mq7e1I4tO6rP+z0eLv1f6/5jHlvNxtda8l3vvysY4NzfXKQBv\n",
+       "WbRPDO2yKFXLxzh/0NWrV3d+S9gPvIaw4jA+rxPDFnr8N/ssdc7BNC4/lK2pyIktd8AFs73v9lmI\n",
+       "oS/zQzS8HjJrqq3FpTW1z5evj95niv901N7wKWrns3/VcjAVFRUVFRUV/0diQbX2BoPB/hHxv4fD\n",
+       "4VFP/31jRPzscDi8bzAYrIuIq4fD4eGDweDMiIjhcHjh0+0+HxHnDIfDb6m/eriqqKioqKio+KnB\n",
+       "s11r7x8j4s1P///NEfEPxfv/YzAYTA0GgwMi4pCI+PYin1FRUVFRUVFR8d8aYy8EB4PBVRHxMxGx\n",
+       "y2AwuDMifj8iLoyIvx0MBm+NiNsj4jciIobD4fWDweBvI+L6iJiJiN8ZJiav3/3d343HHnvsKSKe\n",
+       "vpfcddddI6K946QeDvWt8NafnZ3t+J9QC42aT/SFT8ftt98eEe2d6F/91V81dDw9zojo+soQpQUt\n",
+       "1FqKaKNJ8AeAFuoyUfeL8fE50RnQcuWVV0ZEW1PQeWQcFUidqHe84x0Nr/bbb7+IaH17eAa1k6Bl\n",
+       "zz33jIiIvfZ6ynXthhtuiIjWL8N1vPBL4G7c9/eXXHJJ07ejK533h5pizKfbcc/OK+3huX2HGOP0\n",
+       "9HTDQ9oyHubfuciohcX8E43GXN5zzz0R0foIMP/ULLPf27p16+Khhx6KiIjzzz9/pG/q0iEHDzzw\n",
+       "wMg4XA8PueIZjgZl/t/1rnc1/djPgPFaFsmbxrzCQ+aOGnTU2rKPCZFC8PHyyy+PiKdqVjkCLquF\n",
+       "Bt2O7GGc9EN9O+bUeamgCfm55JJLGp4zPuafvpEt+qa+mWuQ7bzzziPvs7+wLng2finsL6tWrWra\n",
+       "uo6f/ct4FnPkvhkDexlz5fpm9vOYmZnp1KtjPnk2r8gsf1NTDDm3Hyu8Zl9EFum/lAGva/jC/O+2\n",
+       "224jfSH3XnPU5oNWxskehhyxX1DjcNOmTc38sHfwG3LFFVeM0L3TTjtFRLsmkXOehbxQm82+NpZN\n",
+       "aD/55JMbPyxn++b1E5/4RES0NQLZixz1yiu0s1/QD79Z9ne8+OKLGx46P1YZ+RvR8pD5hw+sA/ZH\n",
+       "nuX6dvw+OAJ7zZo1ccEFF0REW2vPtRbtt8VvEeNELpAXxgm/kHXXTy3nEvqYT9OS+aGyRjOMPUgN\n",
+       "h8M3Jh/9X0n7P4yIPxzXb0VFRUVFRUXFTzuWNLP5AQccEBGtFnPjjTdGRNdznhMlGtkRRxzRaG3O\n",
+       "RI12++CDD0ZExJ133jnyPidSwCnYWWSzTOicUA866KDGsnDLLbdERKs5AufoGZeRFW3IkWRYm5x9\n",
+       "d2Jiohk/dGL9yKJI4AsaA1pelhGcZz/66FMZMPrq4VnLycYFnHME7QbNAhoBGghzhdzA77vuuqsz\n",
+       "TuYbftCnc6+U1ckjWosN7yMXBnN7yCGHNO85UopxlxnII1rt17TQ5x577DHyfebd7cvcV+vWrYuI\n",
+       "dj6d7RlgBd5ll10iImKfffaJiNaiApyNnzlCdk3LcDhs1uS4XDz8vf/++0dEO0eso4wvtoY6x1tE\n",
+       "N8KT9QHPs9w80HzQQQdFRMSRRx4ZERFf/vKXR9oj294f+nIXWe4Zh61AwBo2c4V11LSzT8APrM0b\n",
+       "NmzorAnovu+++0aejawZzmkGP7M6oY5ILfMneZ4cVYYlCjl3pCTtnLOL8XuvK6Me165dGxGtRfLu\n",
+       "u+8eacs+AQ22qmdzBK2sC+TL+8Xc3FzTB1ZCeOr93FY/W/8sT4xz7733johWbtirS2sTawpZ2Xff\n",
+       "fSOitZLdf//9UYJ27LHO8ZRlWXdOOOfpKwF9tCmj8kt4/hmLaxMCR3+W0fCeT3jKHgQP4UcWMWnU\n",
+       "WnsVFRUVFRUVFYvEklmkNm7c2FgWOM2jeVmD5bSLtWDvvfeOe++9t+mnry3v0w7LASdP4Ltit7fv\n",
+       "Ce3uvPPORqvHn8K+LJx+0ZzQRLOcJvDh6KOPHvk+mvjNN9880n44HDYnZuhGs/I4OZWj9btWoS11\n",
+       "aAG2jjkrbdm34SzKBtYCtMTMamANDVqhsbRKou2hpWA1RNtB4wBoJLfddltEtNqc62ABxoocIGfL\n",
+       "ly/vaNLQgB8a/HCNKY/z8MMPj4hW3pnb66+/vrf9gw8+2GitWDGyrL833XRTRLRrjWfYagCNzDf9\n",
+       "zpdlm7bwEPpYHwANFBqQVTR285FnMrf06/pYEa2mbH+RzELHWkQDZf3wausoNOLfg8Xj6quvjoiu\n",
+       "BSMiz4Ld17Zsj/WIceITBJyfDD6vWbOmMz/IM/KKRQKr4Pr160faw2O+x5wiJ55T+IKVZdOmTQ3P\n",
+       "s3ql9AlssQfI8ote9KKIaNeV14NpmZ6ebsaL5SW7kWD+4aX9MQG/TawH5ggZznKmRbQ865Pbkjbm\n",
+       "H9rZa5BpgKxz6wJfbEUux8FeybPtQwqQLW4kDjvssJHvsz6Ara2sC9eZjGh56L3atAL44nxc9G2e\n",
+       "u9oHcz87O9t5Fm35XbT1yzzPUC1SFRUVFRUVFRWLxJJZpFavXt1o6sARNgANhVPwF7/4xeYzrBhu\n",
+       "65N0FqXASfzAAw8c+Z4ztQI0j0cffbSxFOBnYGuHa6Q5A7EtOWj08MV1jayprVq1qvGLsPZnujm9\n",
+       "H3PMMRHRamDwxXfeaKJoGPDN9/0Ro7WM+saXZQenD/tWWSNBo4CGa6+9duR5fA5PIlotxhmIzRf7\n",
+       "3aBhog3asoemyjOvu+66iHhKg7GWznxbQ8pqyjHuz3/+802fEf08j2j5uttuuzXaPXRlVgCswKyl\n",
+       "PgtjOW6PwdGQZf/MB7KUWfXo2+u5jIAs4WzaWAOY05IW85jPvO4NfEHQuFmD1khZk1iH4QvPK62v\n",
+       "aOX2M8IKaJ7zXWQPa9G4LOWsm2uuuSYinhpzZpHGkoac49PjvcgWGfpztna3Zw1MTEw01ivv555P\n",
+       "5AS+ZBaKW2+9NSLaOUDWM1q22267xkLGOmVOAOPmdoG+keWswgN+Scw/68TWke22265j9eiT2/JZ\n",
+       "PNvZxTO/JPY4+8yVfGHc7HeuR2d5gQbk3DVZbcGC5zwbHz3WT2kJZPzMH8/OKj4gc/CLvqAxy4XJ\n",
+       "GMt6kZZF6Pa8e/8Yh2qRqqioqKioqKhYJJY0ao+TJSdn1/PJ2q9evTqNHrB1xDkqfHp1LS7fu2an\n",
+       "3bVr13a+k1XGRuNynSZrmq5/BTiR99X94zu2dpkWV8xGU7AWBFy/ifZZ3TfoKfsEWW4O+MKp3/mz\n",
+       "gPnM9/qsadDnaCr6tmxZPtA0XasNWANFk52bm+v4gtlHyJq4eQ7tWFldY9H9l1FRzFMWOen1Yi3Q\n",
+       "PHcNSmA5K+F8an3+ERFd/wyswfZ9ANDG+2jXfRE1jAft3JGz5iFWE88n7bP9xfyerwYdfHENTa9/\n",
+       "57JzHrbMgs37PGfFihWdvr13YOUwb02LLVPZHmdr5LZt2zr0u63rYmI9yiKlMkuOUVq22beyaGb7\n",
+       "xNiylLV3Pq75fPFsUcqiWR3dTXtHwwL45b3LeadK0Icjx5El4HXj3Hbj9nT6p11JC7zyGsssr/Cc\n",
+       "OXHtyqxOKPtQ+eysji/ywd5rGsehWqQqKioqKioqKhaJBdXae9YfWmvtVVRUVFRUVPwU4dmutVdR\n",
+       "UVFRUVFR8f97LJmP1CmnnNLcoWfZRqlZRm228v7aUQcXXnhhRLQ1fwDtnD2VWnvU8eF+ljt1Z3Z2\n",
+       "rb3ly5d37mzpg5pCZ555ZkR0895w7wrt1CCjPcAXhLtwxkDdn1NPPbXpyxmdXQuN+lO+2/adObTA\n",
+       "cz5njPjO8HrppZc2beEVbR2FQS0k5sh33dDsenjun/bQMDc316mFRt+8OoM5PPT820cCvp533nkR\n",
+       "EXHWWWf10rJ58+ZmnK5X5rxhrqGIbFEnDloc9cb7tKdO1MTERMfPkL+ZT2rK8bnroAFqbdG3a1oC\n",
+       "5Ih1d8oppzTjRJaQV9Ygdfze+973jjzbfne8UmvL9c0cDQofL7nkkqa+Xeb7xRx8/OMfHxmnLfOO\n",
+       "8qOmHPLlfEOMcfny5Z2akh5Xtv5d346+7X9Vrv+yPfvFE0880eHhOeecM9IHNDBO6Gec7BeOjLNP\n",
+       "JWOlfl5ZH9KRUF6jjngEjBe+MP/Il2lwbVbW9LJlyzrrAl65pqQj5Ly+2YtY/8BRX/49Kn/ngP2o\n",
+       "4PkJJ5zQ2zegH/iI7HrNA2T9Ax/4QLP+s76Zt/e///0REfHGN75xZDyO1qT9n//5n0dE97fLud9m\n",
+       "ZmZG6g+WbZyzjlfqpyK7wL5izP8f/MEfREQri2WdP8bCvFJrD1n0GcRzBO0ZqkWqoqKioqKiomKR\n",
+       "WDKL1GAwaE6StgYY1iqXL1/esTC4Lad057/xqd0REpxis+gUMDs724lM8HccITYuOsG1huBHlrul\n",
+       "fLZ55LbOOcMzHOUIHFno2lvl2KyNOXLOyKwh2Ry5H2u6fVm8zRdrjsAyiNab5ShxZE35PL9na5hr\n",
+       "QBmOthnHx/LZts5ktAD4kGWdtzXRubD6aOfZ9J3lBeNzZCjLZWZaLON90a/WmM3zLPLR7W0dzWjJ\n",
+       "2kV0ZcWWNEcEZVGfHrdp74sKy7LsZxF0Ge9pPy6PECitRVlEsCPkWJsZjZ5DR1SZ9+We5ug089zR\n",
+       "2Z6bLCu9I2+zjP/D4bAjYyCrPpBZ9rN2fO6bkXKvMw+9njNZzObZY8n2HdNY0sVvCTnwsv0R2cwq\n",
+       "W/TVoI3oRvlNTEykdRlN/7gcVUa1SFVUVFRUVFRULBJLapHyyTnTYDm5csJcvXp1pxI48J0/p1jy\n",
+       "Q1hjcDVzsiZn9/fl89B2eJb7pg9rYNYcAPlReMU/IavNFJFre9mp3lpcponaR8R3yGX/mQaZzSdz\n",
+       "YmthZqmyVpDRHNHlkbU688V+N8BzBZzDqqQt017sn2GfOZDxyxonKDVzy3FfLbyyj0wTN81em1lO\n",
+       "s5UrV6aWFvPFlqvMFwRklgfGUra3D5hz0Jg253izFSnjo2tVYsks2+PrYUur8wkB59GyVuz5Z29i\n",
+       "DGjqTzzxRIdu8mVlPm9ZpmrP1Ti+lJa6LHeXrdzQbb8aYCsjz2D/MB9Ly1YmO4B5Y3y+HcnWqMeW\n",
+       "3WRs27ats/Y8LoAsYWGBD1keKeYwy51XtvfvZGblBbYSew6zmx3/zZhKvsA7xskr+fayCg70RQUR\n",
+       "qm749wI4h9jmzZs7v2uZD232G51hSRNyesPw4ceACdPT0+mGzsBhNiVUfMByn4DND8ZmhUW3bNnS\n",
+       "HLr6rtwiukkgvaF6M/NmR/9Ojgampqaavr05m4dOMGeTqxepTde0d9LF8rvZNUqW1M5/O3Eh8Oad\n",
+       "HczK/3sDhU9ZYIOv/pyoE5CYjv4xS++www6p06wP2tlVlhOTcpBGLrKD2nA4bDngeHgAACAASURB\n",
+       "VOigT29G3jiRezZpy4vlxD9+fbR4nNlh1H35oJkdJH1V5ECJ8rsuK+G+AOudUikONuhLghvRygn9\n",
+       "912neLzzyW1E95AGP0sH7hIO/KC0zHA4TK92vSYzmbKS6OSJ2b5b7hsO+DDdmUtAluzRh53sWqpP\n",
+       "prN9zjQ64W7m2Ew7uw54vygdnBlXti4crGFZy662nQyU/stkur6S9cHBsmgFAzB3mSsIhyL+Zn2U\n",
+       "tNAn5diQaweEAYqbs2dZhk0j/fBaXv1lSbC99rLzRYZ6tVdRUVFRUVFRsUj8t3A2t1bkk7rbzczM\n",
+       "pCY3p+4fVyoBuPSDrwiMycnJTtkV921tzpqGNSkKpmKBAJg83f+yZcsas6UtL9n4Mt5mViBfjWVa\n",
+       "ZvmdLHU/sHOsw3fHaaS+Aivbm4bMsRu4/IpN95ZN/naR6ImJifR6JCtanV0bWDvKNK+ypJA16MzU\n",
+       "bo3LIeXAzui+yuy7IrcVIwuSyKwlmTndDqFG2T67/rMJH1j+0cRd8BRYXhysUVqNsquWzBpk64eL\n",
+       "oGelk/ocqG15tcUI7d7lfEy7i0D3XaeWNJQBMg4qAF7/dmi3nPuKk6tAW/pNS3nljZxnJcJ8tZtZ\n",
+       "6qCxLIRbjqXPmurAHpCVNrFFd1yQCpjv+s0BD6Y3s9R4rxoXMGUa+2hHrj3f2RU2FiVkls+zGwxf\n",
+       "R9Pv1NRU6mZgvmS/lxmqRaqioqKioqKiYpGoJWIqKioqKioqKsagloipqKioqKioqHiWsWQ+UpRP\n",
+       "iGjvpbnz5l6S8gOUTuB9/IIiIh544IGIaNPskx6e+1TuY7kL5a6fFP5Z6RSega8EKeJpPz093dyn\n",
+       "upQHbU03tHDnzyvlB1xmweAumNIJ7373u5t7dcZLpB9+CPCF8iP27XEEDKUTKLPAnMA32nGvfcEF\n",
+       "FzR0A9/Hl6UKItoSIXx+9913j4yB9n/5l385whdoZm743rJlyxpZcYkg5gjeQzfyR0kRQmWBI0k/\n",
+       "9rGPRUTE2WefHRHd6LeZmZmmb+afchL4vBFVZX8T5uitb33rCM32qaM95Q3gy+bNmxue2UcwK1dE\n",
+       "tBrzixzRN2Ucdt9994iI2GOPPSIi4v7774+INvSYcjhnnXVW0yeRjdAEb5Fzxrlu3bqR8Tqi1mWf\n",
+       "4B8yS0oTIoPOPffcOP7440fG6Wgz/GuYI8qPOLkr+wo+k+U4S1rtW/Tkk082a+htb3tbRLRy7jB2\n",
+       "ZPLcc88dGSc0Oz2E55/+7Xu1cuXKpi08L8umRLTRzIyT8TBOaEH22FecFJF1x75YRlTZb499C7lF\n",
+       "9rxPuNTS7/3e70UJ5Al5cAki9rrly5d3EqYyn8gWexHj45W58b7IOmIPgs+sC6I/KW9y+umnd8p3\n",
+       "sTbpm/JTrDlgnynauxSOfzfpn+dedNFFzXyy5vbZZ5+IiLjuuusiIuL222+PiIirrroqItryM/SF\n",
+       "/65/P9i7XDrnwQcfjIhRP0HKlUGL5RtfKPpm/pGt0tep5I9LBMFH5oi9euvWrR3ZYv2zRzu6G5mk\n",
+       "LE+GapGqqKioqKioqFgklswitXHjxuYUiIZOdFoWzcBperfddotbbrmlt1/nJMoKogLn7OF7nEzt\n",
+       "Q4ZWsGHDhuYz6M+iTXbZZZeIaE+3aOymhffpD82Lk7qjX7Zs2dL0OS7/iaOvnPvJgG/33XffSDss\n",
+       "E8xFRDcnif82X7KU/mjJ/hw+OKKiL/oJLRUNCp7R9/7779/bt/MAZdFJ5GuiXzTziG6OKmhxKZSs\n",
+       "XA3v8z0+R34MZPaRRx6J9evXR0TEXnvtFRGtxgmwICFbTpiXRYwdcMABEfHUmovoat5gOBw269nW\n",
+       "YFua4PWdd94ZEa1GCc2WF+YCixc0wJ9yTh3Jk0X6gVJbLV/Zc8r8NxHtOmPeoc2RtiW9jpDDypdZ\n",
+       "np2okrlhfwC870jC4XCY5kti3dI3OXr6kv2W7zsCddy+MTk52clFB8wP5syJNk0DfGPe4XlfNDNj\n",
+       "Zv6Y16wUij/P1j/Yd999I6KdY/YF9//44493ysjAQ99keF8oo3IjurKMJZZ15PVQysCuu+4aERHH\n",
+       "HnvsSJ9ZLi7PmfPTWXaZUyehdhm1iJZnLt/jvHKAucHCxOf0k+V6w7pYRpJ6PmmLBQ1a9t57795x\n",
+       "ZqgWqYqKioqKioqKRWLJLFJlZnBOgZx2ncGXE+XatWsj4qmTZZbnwflg6NvWL4BWb80T+ESKJr5l\n",
+       "y5bGUpCV/HABXOfysdYILZy8XQLBGsyOO+7YjMeWKWsvmQUiK17Lad+nfudNKd9ziZDsNM84sBIB\n",
+       "NM9bb7115H3ndnK5itISZN8mvoPGxHiMMit0RDdjNbCfC3K1Zs2aNJs82j9WHaxDWM0A68GZvhmv\n",
+       "ZRQLxYYNG1LrBMBSleVJsnWMtUa7m2++OSJazc0WjPvvv795D8sycmwLI7KJNQQZhuelD2T5LLR+\n",
+       "W19LC5Z94ZjvrPyOtX3Pr9eorUSugFCuO2TOFkZo9F5kK0hWSgk41xsYDoed+cSCgtXvrrvuioiW\n",
+       "T6VlNaKbPZ69yVnXgcuZTExMdKx7gPHTp318bJFk3dD+jjvuiIg8+zZrcmJiohlXX/HciHY/Z1zI\n",
+       "PXNjS81RRx0VES0focXrr6Qls7CZltLfshyfy9B4nMgXY+2rbnHwwQePvAfdfNcy5H3fvM4KybOG\n",
+       "nVOulBfnl4Ne5KKvgkdE6xPJuJ3JHTB3fcWi3Zbx05a9y7wch2qRqqioqKioqKhYJJbMIrXjjjs2\n",
+       "p1r7wPhEysn13nvvjYinTr+cmH1K55TqgqGckH3/mtV7yjK3lgWFXdMpO6VnVh+fdjlpW8vNMjtv\n",
+       "3ry5U2MtK0aMNuf6Tdl4aYcWwCmf15IWW6ScJTbzp2CuHAHn+UdrgH8uhFlqU3yGP4WjTMxDxoM1\n",
+       "yBF15qNr7YHp6emO5cXZg9F+7ccHbIlBQ+P7fiZj2X333TvRJrYYuEAoWh7asmvSsU6wGjEH8Kkv\n",
+       "gzj+c4wLut03NCJbrCPX6/I46df1HsuxZsXM+2QlopvZ2rQYjhy0rJfy4vnLiu8CxmFLXlZ9wJFF\n",
+       "0G5fqojWr8xRetk+x/6JZm5/J69prEZlJF62LwLaumap90msoFhR4B9y5TktrSLO5O81hPzDc+9Z\n",
+       "ph1a4SOv7N3+fVm1alVn3uE5fYG+7Pglzf69sG+hZbe0hME7ovP43cj8r/jblpmsULSLgmfFziPa\n",
+       "dUEbV7bwfNr6a4tbNqegpCHLVM7tCL5kWfWRDNUiVVFRUVFRUVGxSCyZRWr58uXN6Q/NxJW0AZaK\n",
+       "8jWrnWXtLKuxBKzB0h6NzLTQ/9zcXEeDyqw6rm6d1VpzDiCf8vtgTRoeuW/feZvX1qSwElqrxhpQ\n",
+       "at6uoO36ShnvXVvOfhaAZ5a1Fsv+Sxlw5BwaFXPgvtEk8XeDD5kfm/NplX5htoLYYpnNO7CfhS20\n",
+       "mR/fypUrO7ljrNX2WRLnowlLFMASY18isOOOO3YiIOGHeUi7TPO0BYsxoTVCA1aEcqzORWbfKNMC\n",
+       "ra61lsmua9BlmnlEV5u3JS3zqbP/WhY5Bq22Lk5OTqbWS0ebZtHJ0Eo77w/jrOuPP/54IyO2pDmP\n",
+       "nq3ptqbaQu+5tAUC6/uWLVs61g6vC88zNFiOgKOBfYtgPq5du7Zp69sP04I8m5bMv8+RcY7ILucE\n",
+       "i6QtkRnd9r11/U/TkvkW9kWom+f8nVmNPc/2IfSehp+bo8effPLJzrrg9w3ruGnxfpGhWqQqKioq\n",
+       "KioqKhaJWmuvoqKioqKiomIMaq29ioqKioqKiopnGUvmI0WdoIjuXbDrflGbp/RLcK4dauFQx8n3\n",
+       "0dz1cid6/vnnR0Rbx4n29tehvWu5lfeyzmlFHR/GmOWu4T6WOk7U5jOgjXtr+HLWWWc1dNq3B7ov\n",
+       "uuiiEbrdDnA/T222U089dYRG5zLh/UsvvbSpywScoZa/qYUEz11Dynfa1Amjvf0wSv8H+natPfvS\n",
+       "MW54Tt/2y3COHuTLNcjKKEjz8D3vec8InfZ9wBeI9tACjfYJgy/QQt2vmZmZzt0+QG7Nc9OCnwX1\n",
+       "EJlT+wjYpwZZPP300zu+Cs4tQ9/IFrCvm2ttmuf2T+P10ksvjXe84x0jfbiaAN+l7pdpAcwBfKFO\n",
+       "HHw0rdCwYsWKZpzUfLM/kSNr4SH7hdcY48Zf6eKLLx6h3f5tZR1Q9iLqjzmSjnGyt7gGoX1qXO/0\n",
+       "/e9//wjtpazb/yzb5yxjrhNJe+8r8Ml1//pqljJf5jnjzKK36ZvafPDcvlOuvQgfTz311E5kp/ci\n",
+       "1jNrzjnwAOuI+adeqH3E+uQLnjgyznuT5YW5w2cOX9ty/y/5wv5g37S1a9c2v0XQwvgcOcgzszp+\n",
+       "fO49m/6pn+i8ZFu3bm3eo75luYeWfHCkLOs/Q7VIVVRUVFRUVFQsEktmkSp9s8b5afXlPsoi2WxZ\n",
+       "chZVW4ey97PIqjL6xfX7MouJae3LrVH2nWUKN03btm1Ls+Rm+TKyvgxrUfOBZzkKI6PFVhHT5Gda\n",
+       "a7Jmmo21D5ncOOu8K40DW6KYs+np6c4cWNMyX9zeUYx+RhYVWv4/yznjPrLKAIb7NX/K/rNxGlmG\n",
+       "blsDjCxSroQju/iOM/MDNGzzPMsjZEs4n4+rzdbXVzaObPxZJJ75Pt/aXugc0c6Wmixa2nmDhsNh\n",
+       "Zw9238DWQ69/r3fLolFGpFne++TWdJftszXblzW7D7Ozsx2ejVvPtoZnEcT+/rgovz6Mqz7BM7kl\n",
+       "8jwDLFH+Perje/b7kK2hLIox2+uyWqyzs7OpdTj7jV4oqkWqoqKioqKiomKRWDKL1MTEROeEnllq\n",
+       "XFG6PIlnOUrcV19ujfJ95/qwn4L7n56e7mglzn+TZQ9mHNYwnOnVuT6cCXt2djat0m1kJ/Ess7mz\n",
+       "8WYaeglrUlktMJ/2rUln1b9tRbH/WzlOW7kyS5wrroNMc3d/Wa26so/MOpppu6WVq3ymczeVmr2f\n",
+       "ZViTzCyzIMt4nFmy5ubmOhbVzHpln6hMK8xom8865hxsnt9sPs0/vuf9xfPOenMun/L/C80L5PHb\n",
+       "GuT5p719ZPr6dg1OPnf+Hz97IdbQvu9t27YttVBnsuV1Dbz+yfHkSgegL5dexvPMIpXtd8515LVs\n",
+       "Wqanp1PrsN8fZy0ZlwtxvlxPmcWtbw8tx4F88Ntmq7lpyG5hStoZp/NHZtYxW6LH3TL5N7/cA7Lb\n",
+       "jiyze80jVVFRUVFRUVHxX4wls0hNTk52LBGZNm2ryWAw6GQwB5ykORGT5dbZUA2fdukny1y8ZcuW\n",
+       "jjYyTsOwv4a1F2eR9Zg81sFg0HnmuFO9NY/MFyDzpenTijIasjt6a0W2YLg/a4G+ty8zBPvZtjSY\n",
+       "/qw2XSaLmR/PqlWrOv5U1vrcp9tnY8j81co5Guf7ltVDzCyS5rmzrVuOBoNBWq8ys3Y6iinTTO2/\n",
+       "BvosWNZWnWnZ819WKijboYET/QqQNUf12I+zpCXz7emLtusbV7aekF38vEqa+uanfGUczOu49W+Y\n",
+       "RvorLTnO6O621BL0/u+16PUDzcyR+y/3esu515znyD6BmaWCfuFzVldw2bJlHdnKLHW2pLC/ZRUC\n",
+       "+DvL8F3Ki+lGdtwHyCLqMgtOto/2VRRgvpgL2jBOyy6Aj765MV8yWku/Zvc5zpduHKpFqqKioqKi\n",
+       "oqJikVgyi9TKlSs7J8rMa5/Tbulb5BM1sJ+SP8/uUx0xktU3K+uJWcO0tgMt9O3IoXG19pwvx3yZ\n",
+       "mppqTvfj6trZd8S5WLLoNDCu1tJ8yHxkMkue/3YEiev/lXNsHvFZVscJMK9oRZkfky0cpWxaVjKr\n",
+       "gPsCWE/tEzLf/NPeFtfMeuEIIP62Vg/oj7FlfkyTk5Od9WyrDch8vcbVtwPzRaVZw/Z8ZeufWlvM\n",
+       "QTb/lkX70pRjtcbNZ1kNPfYLtHrX/3NtNnIc2So2HA7T2nn2t+PV1nHvuSCzYNAe/s3NzXVqi5ru\n",
+       "zEJj+bBMU0sts6YxlyXt9g0z3ZapLFIuu2XIbjqmpqY6bfr8KUu4RmG2TzJ+51djHZY0eR3wjOwG\n",
+       "w7+HpsF8ZE5tyevzY0NG3Be+beOiE0FmBfT+WdKU+YZm67n6SFVUVFRUVFRU/Bej1tqrqKioqKio\n",
+       "qBiDWmuvoqKioqKiouJZxpL5SJ188slNvaZHH300Ikbv1yPaWmuun7Vq1apOFBm1kN73vvdFRBvJ\n",
+       "wt3tQw89FBERjz32WEREXHXVVRERcdJJJ0VEe4fL/Sp/c99MnagTTjih6RcfAOjFd4daSNROynJT\n",
+       "uaYgNYjs10N0CzRRg+i0007rRC/y3QcffHCEbmpKbdy4MSK6fltlvTL6jojYY489Rj6nZhn+Guec\n",
+       "c05ax80+DNRCguf2+WCOdt555xG+MP+O1kB+ItraVmefffYIH2655ZYR3uGHwjjf9KY3RUTrI+Uo\n",
+       "Ne766d9zij/Lbrvt1tDFOOELtCCTzMHuu+8eEW0dpxNPPHGERvgJn/AhoDYX9c2mpqaaNQS9yCK1\n",
+       "szxHjO+BBx4Y4SXjPOecc3pphj+sVWg/7bTTmvfok7XE+9BNDUJoYN6dL4n21P2CRngObfjMXHTR\n",
+       "RfH2t789IlqZY+3ss88+I/RTa5O+4Tnt7UPiup/wEd8j+n3kkUea9UktNMa1adOmkXHzvseJrLK/\n",
+       "2Ffqsssui4i2lh+0I38rV65seMR8soZY73vuuecILx9++OGRcSJbjBNe8wzG/fGPfzwiWtllHQ0G\n",
+       "g04kFzUI2ecsqwDfl/POOy8i2r3L8gFf4Cv7LrRPTk42+xUyAk9da5NxIbOsp912222kvWvK+TeM\n",
+       "McPHc889t5Ghu+++e+RZfJffOdcJhW+77rprRLTrn3Eef/zxI8+85557Rmjmt+9DH/pQM07k25GT\n",
+       "zCeySL065t/1/5gjyxd7HXMCn1asWBGf/OQnI6Ldcz3v/s1lj+b3wvmnGANyxLpDXhhbWavWtRZZ\n",
+       "o6xf5jGbowzVIlVRUVFRUVFRsUgsmUVqamoq7rzzzohoT+qvfvWrI6Ib5YMmUmYfP/zwwyMi4rvf\n",
+       "/e5IW07CWFI4xf7kJz+JiG50CloNkR5oLJz+Od0DTvkrV65sTry85xO282McdthhEfGU1lrS5HGi\n",
+       "1UHrzTffHBH9kVVoO5y6b7vttohorQAeJ5qZrVyOjIBvt99+e0REfOlLX4qIiKOOOioiIo488shO\n",
+       "W/qib0738Ac4/w193nHHHRHRWigA34dfz3ve8yKi5cfXvva1MNCwr7nmmohotVsskoA5Q1O5/vrr\n",
+       "IyLida97XUS0lj2AbN5///0REXH00UdHxFNyg+wB/sZiwJwgs44goR1ziRXoRS96UUS0cwHKjL/X\n",
+       "XnttRES84AUviIiIvfbaa6Sts+MfdNBBEdFq9/fdd99Ie2hlTl72speN0PaP//iPHVqcQ4b5LS2H\n",
+       "Ea0myXh4xm/+5m9GRDsHwBGHn/rUpyIimj3gkEMOado6utB12rB2lnRHtPJy7LHHRkQ7F6YFONrn\n",
+       "rrvuiojRdcezkVv2gRe/+MUR0ZUt5sYyyZ5kPtqKztzss88+HTmHXvqGtm9/+9sR0V2jrtaALGPh\n",
+       "2XvvvUfaw0eeu2bNmmbvZB8A7DW77LJLRLS8dyZq0+7s/OvXr4+Idr8EyNfGjRs7UWrsTaYFq8+P\n",
+       "f/zjkc/ZLwF84vfhwAMPjIiIz372syPvgxUrVjR0Q+9xxx0XEdHZL5x9nPXDGDxOaGGf4HcUGS7l\n",
+       "xRb25z//+RER8aMf/Sgiotk/AONg3TBHrHHmDsBzXuEnVrK+PFLspcxJFrXv3FfQdOihh448AzBG\n",
+       "+odvmzdvbngKmH/W2E033RQRrdx7/jMs2UFqxYoVzQJ7wxveEBHtxnLjjTeOtIWxbPaTk5OdEFqA\n",
+       "UHEw+vrXv948LyLiiCOOGGkPYx1qilCycQAEaYcddmj6YiK9iGwmZnxZ8VWH3EIbguaNdHZ2thnX\n",
+       "vffeGxHtxPMjA+iDBcW1EhuH++bZ8PyXf/mXI6I9oJaHQJt7WXS8Zj/q3jB+7ud+LiJaof7c5z4X\n",
+       "Ee3CYeOBn6961asiYlRe4Dkb+kte8pKIaH/gvvjFL47QwsJnbjggIl/wFbDwfCB9+OGHO6VtfP3D\n",
+       "nGRlaZCt8scoouU5V2bu/4knnmjGyasPo/Ca+eRHDtp8zcocwevvf//7EdHyp6/4L5sP68KbFmB+\n",
+       "uXblMLrvvvtGRMS//Mu/9H6PA9crX/nKiIh4/etfP/J+H+AZSpvXv69ob7jhhohorwKRJwAfmSPk\n",
+       "xlflEa1scOj+pV/6pYho5diHV/YPH16YS//AOIkk/T366KMNL4GTGbK/IceZ8sqPkFO3+Eed/lES\n",
+       "V69eHT/84Q8jolvCxrQgY3arAFb62JMZrw+BtL/nnnuaK0z2yazYMn1zQITnWXkbgDKAMsj4v/CF\n",
+       "L0TEU3KFnCOnrGd+tAHzD60/8zM/ExERn//850doAv7t+53f+Z2IaPfjco9mf4deFMyXvvSlEdHK\n",
+       "8Te+8Y2I6O5ByAPtPEcuJYTCQvvSyOCUG9l+COwqgrGA8dnY4cLJHOoeeeSRznyyxqCPK0pkjH1j\n",
+       "HOrVXkVFRUVFRUXFIrFkFqm5ubnm1Ip2c91110VE1+SJJsbJc/369Y22i2kVcEpHM+C7WAOyEiGc\n",
+       "qDktc4rNkoNNT0832jyakU+7NkljOeH6xVYgaGD8XBdgLbDWODk52dDDd+jTWqDLcaBZoQ1aw4QP\n",
+       "+++/f0REHHPMMRER8ZWvfKUzVjuZOxGpk6TCN6xEaFxYAQ4++OBe2vneD37wg4joWngiWm0MTRvL\n",
+       "GxoYFkvTznjRLOnH1lH4x5jQ3LZt29YxA9MnmiTmf8aZJfHDmgptWElsHS0tUs997nMjor2K4hoB\n",
+       "MBeM65vf/GZEtBq4r6Xh+QEHHBARrUVqv/32i4iWT2jew+GwWZPwzM6yANnBWoCMcYVt2bVcIYuM\n",
+       "9Tvf+U7T1skbnfwv4zn8wSrA1YWv3+zwDE1clZbXWDwT6xBtv/e97408Ezg4A/5gFfD+wr5g5+tH\n",
+       "Hnmkc81qR314zP6ZJc1lX7Elx3PEWHjOLrvs0tBlS6qvmdjXsex7X4QW9ihowrpo2aX/1atXN+Nl\n",
+       "j7YLA397ntknnByUv5GrL3/5yxER8ZrXvCYiuldBk5OTzXtYc7MCx3aepj0WGFswy9uRiHZPZ48u\n",
+       "k+eynlmLWJ6wjrHnAAcCMCf0Y9cR2nMLwV6ABay0BLs0GDx1YlHgxJ3QgrXIFqksAer09HTndy7b\n",
+       "F7Gmsq7HoVqkKioqKioqKioWiSWzSE1PTzcaGtqznSwBd958fuCBBza+K5nlhdMrDmnA97BoL75D\n",
+       "t+Ov+5+ZmWksBLYgAE7Y69atGxkH1rKsXAlasUP27UA6NzfXCQV1GDxwKRw0B0739pFx0UosOryP\n",
+       "w+yVV17ZKb+A5QieZmn50WqwMNA3tJkfWJfsrFzy3bTYvwJH9U9/+tMj44NWrD9ovXZOhn/IHa+r\n",
+       "V69u5hVAJxoVc4XcZMlwkUkXsbZ2zBhXrlzZWC+hG4sRfmb21+JvBwgA+kGDxfqHX4LXxdq1a5u1\n",
+       "gQwity4rYwdffEiQUdYLQPOGj1i86Bfn2i984QudArfIN5YX+zG6nAavWEfsKO3SEU43UvbPPKIF\n",
+       "4yuFtSsrX2Ua7YQMXN4Jfi9btqzjy8M47NCblS1hjthzoInv24LjMjgbN25s6LLFwPQj11g7swK6\n",
+       "rBvGiex6v4BPWFsiWrk3X3g28s56dpkj08yz8TFC3vx7tG3btma+aYMlNfNLxJrz13/91xHR8t58\n",
+       "ceqbW2+9NSLadVXeBMA71gP+pb79AfDL+ye/dbZ4upSMfXbLmwDGaTlHRr3n2nKHnCBXmTUdfmV+\n",
+       "XeV78BArF+cG+/dmqBapioqKioqKiopFopaIqaioqKioqKgYg1oipqKioqKioqLiWcaS+Ui9613v\n",
+       "6kRvcC/P35RCILcDWLlyZSc654ILLoiINuU796/2r+B7pJMnzT7tuJ/lbpw7VKfCX7FiRScZJvRT\n",
+       "ToBSBfjGOBeL0+xTOsP39M4XU5aI8N00fUI/qfApbWC+2f+GUjuUCKBfnsNdO/y9/PLLm7Ip0Gdf\n",
+       "McZNiQhKYeA7wrOd/4P28LEsgeHnUE6A+Tev+S7+B5RCoMwG7fzKs5Avxmr5KHO8XHHFFSN0A8Zp\n",
+       "Xwd4Tnt4zvhoD19cxieiGwnFeBknbekDup14j3VBGQ/zwb40lE44+eSTO9Fj9i+h7Tvf+c6RPi2L\n",
+       "PIPyNpR8APiWQDPr5cMf/nAj546uc6QQskXfXmPwhWfBR2j33JR/Mz8ubdKXWDiiLbPC+ncpGfpm\n",
+       "DbJfILv2tSr5zl4E3awx9w2Y/0zO+T7jplwJ8lXKR9Y344QPnhuX5aG8CbTgt4O8MP5PfOITEdHu\n",
+       "0TMzMx1Z4Tsf+9jHRmjxOGkPbR6n9xfv2WV5M3hlWYRXyBZ0Q4vLd3n/P/PMM6MPzBFj/chHPtLM\n",
+       "J7R4vwDsc6x/R9TBe/6Gj9AC3+ADtM/NzTU8IfGuy07RFt8uEu+eddZZI7TbZ5BnkXTZpbnKOeK7\n",
+       "7C3l73lE6xvm/HKs6QzVIlVRUVFRUVFRsUgsmUVqcnKyOVE6h4vzpXBqLk+kthwATsw+lbvMBODU\n",
+       "yimYZ0OboxPoZ9myZc2pHs3KtPh0zncZryNI6MeZv615gampqWY80OLTfdm2HD99enzAmrY1tHKs\n",
+       "1hDsd5flbvG4+NslImwt4Hl9tPB/lwgBWb4ca1wuqeLvu5jr1q1be0v4lLTwXVucAFpQxnvnHaLd\n",
+       "zMxM0xYZspwDl0Sgb/Pcc+m5cv8TExNNX4yP73gObJmzhm45t1XBc9wHPrOseP6JtGTvcZZ+r1Hz\n",
+       "wxG25RzZuuF5z9aJo7F43+OlX6/NmZmZDg+RTefkyXjoz12+xXNqS9VwOGzGZx6ahrKwbdmH4XVk\n",
+       "fs03DvcB6Mu3BMxvn5yX3/NtSt868jrOeI/8+7fH5YiA58B7Utnetyfmx4eztQAAIABJREFUi3no\n",
+       "0kqGx1lansrv25pe0sBrH7198HnB/DRtvJb7TJY93fNIO1vsMlSLVEVFRUVFRUXFIrGkFilgbcCa\n",
+       "uk/ek5OTadtMkyA7cFacl5xGnIqxTDh3T5kp2UVDXZeLMdIOnyCfekGWs8p35mBqaqrjP0GbPutV\n",
+       "2ZcLO86n1UV0NbGSL767Hgdo8x13dm9vq4LHVOZjYRzOm4V2l1k77FvkZwFrXHxv1apVnQzuztUD\n",
+       "D23JNJw3JuNrOVZbu7KcU7bqIZvOsWKNMst5VI41y1js+bR/iXmajddaMO3L95lP+jANtnYgF7T3\n",
+       "uLz3APtF0m9plYSu0oo9H+yPYouUrQPOylxq0Zm1wvNJO2eqNk3k6nH+NGCL3GAwSC2MnmdblrwX\n",
+       "2VoAj1k/pp05XLVqVcdXJ7uR8HftG+j2zpuF3JmWZcuWdfLpZdbyzAqa3UiAzMJf8tEWM+8t5jnj\n",
+       "Mf9s7QHwyVYj2pV1N+23DDLLm2ket3+6EgDy0pdfzfu/fYKrRaqioqKioqKi4r8YS5rZ3FFpmQbj\n",
+       "7NNzc3NpXZ6sfg+nUmtSttDYV8YofYqgi/ey+2Q/K/Ml8hgyawDYunVr6ndhiwrjZpyc5unbvlJZ\n",
+       "Nl2e56y0JbIM78AaJrQ4CgvYsmMLVml9gS7fozM+j9O0WH6seSEXtoiuXLkyzVTvyEH7MgCeZWsB\n",
+       "33MmZPg0GAzG+oJkWqtrTQLWouUrk8UtW7Y0PFmoRc3I2ntO4ZOj4CJaXmW+UVl9O685eG2LhP1Y\n",
+       "3G9pVYYunuHIscwi7TXK6zjfw1JuPE77GfIMW+JNC7Qj90RaeR3Bt9LnMLPyIfe2Emd7i/16Mt87\n",
+       "UFo8Lbe21GY+Q7ZUur2tYq4YAaanpzv7lv3wgG8N7L9o2h3lDGyNL/vKLFGeT8ui5d28J5rb1uW+\n",
+       "Pd1WPZ5FxLz3XOQFWh1Z6znC79E3HBs2bOjsc/alhjb/Xo5DtUhVVFRUVFRUVCwSS2qRstXAWjNw\n",
+       "Po1t27almrRzDHGKt9YDHLXhOj1Zjpa5ubmOP5Y1gyw6yX0BfGwYL1oPWmCfH4tzrzg6A9jiYm0o\n",
+       "q80FTZzu+yw75qExzs/EvgGm3eO2z0lpkXAOHmsYvld3fhxbOjNfE/sKTE9Pd+i0NcTPyny+zEfk\n",
+       "yLRYGy6/m0Wn2OKCDJovjnpz7qMs+qmkx1FlwHyxFcntsYZAsy2ZfVZikEWxArRgW8ft3wgsF1h0\n",
+       "PLcRrSbNuJBBWyIBn9tXyDmKAM82n2dnZzsRpPaJZF07cs7tPe5sreOXUkacwSNbUoH9kDJLHTx1\n",
+       "LdbMMuUal2WftupkUavAezpzYf/OzHdsxYoVndqZpgnY0mrfIM+pb3Ds31P2799Y1xg0LZlFN4vm\n",
+       "y6zOfZY91/u0L6GRWcEzn0rzpbx1yG5qkEX/vmV+WEa1SFVUVFRUVFRULBK11l5FRUVFRUVFxRjU\n",
+       "WnsVFRUVFRUVFc8ylsxH6pRTTkkz/AJqClGDrC9igLtN6jKV9cciuvktuC+ldpLrVWV+PtS3cv20\n",
+       "8jvQ5bam25l9XTvLfHEURlmDyr5PjlL54Ac/GBHRqUGW5dOib+rhZaD9Rz7ykaa2kfkBeCY8p46T\n",
+       "I0HsfwJfXCeuLxsxfVM7yTz339Qgy/jiZ7gelvk9MTHRzA81pTz/WZ4U2psWf898oX5aH/gONaKY\n",
+       "o8znAbim5Lj+4cupp57aods+D66dmK1/5IC++9Zc+X3aX3rppR1ZdFvodt2/+WQroru/OFqvXE9l\n",
+       "/cH5UK6hiHZd2NcFuE6ga7OV4LvUFEO2skg670WM0/43fqVOZN9YLbeMk76d68u5nuA5+6Kj3UwL\n",
+       "Yy3lJdt7WRfUoMuiU13L1bILnHcN2t/xjnekPqIgm6NMJj3OLAq8XBfZvujfor46niUsD8gi9fCy\n",
+       "fWViYiLOP//8iOjOZ8Yf12YErlLCXDBHZ599dtqe7/zP//k/IyL/jfb6Z01nqBapioqKioqKiopF\n",
+       "YsksUiWsLWZWofLEbUtD1pejjxaaX8fPmQ8+xfbRWz4r6zt7lnNzlO9nOZscdZNZ/bK+x0U79fXh\n",
+       "7NnZeMbVgRvHc2fn7kPG2ywKL/ue+WbLVak9OhJy3HfH0ZxpSW4/HA4785XJnl8Xstb8rPn6Lz9z\n",
+       "7ppxfWfIahNm/ZVtF7qnjFsf2bOy9bYYLDTDO5hvzfqzcTUHx/nKZtagxSCr45hVMLDMgnGRteVn\n",
+       "2Z6UzT80jdvT/XdfNGv2W2Vke1FGY2Z1XwjGtc36zvaNrJ5oX5Snxzlu73qmtGfR8WX+SZDxbtxe\n",
+       "bVSLVEVFRUVFRUXFIrFkFqnBYDBW0+77znyfR3RzdVhrsXac1QjKMiGX+VTmq3A9H532HQDOPmwf\n",
+       "kL7cQOOyaWe0mC+Zz0Q2htJyl2WXzzCupuI4LXAhfY/zkfIzs7//M/B4xllJrHna8pb5QJSymNGf\n",
+       "9Q2e6fzPN+eLzSuWwXy0haqvv3H0Z8i05QyZb135Xc975gNFPh2v+0xe5ls/z9Sqk83nOBoWgqw6\n",
+       "gn0/s3U+rsZcdotQzv24fW4hlpTyb1uest+X8vvj1mifv135d7an+9l96yLbazK+GOOswhlNfc93\n",
+       "/UvXLfR3yXnmvGRZ3jl+0+mXvFUzMzNj13fG03GoFqmKioqKioqKikXiv4WP1H/GSjDurnahyHxI\n",
+       "5rtTX6i2aytApmFYgx3nz9N3/57d7Wb8WCif5puTcb5g4/pcqL/GuLnp63Ocv9ZC/brcfx9tmV+a\n",
+       "6c58QUzTQp+9kDaZlSvzhRtnLXsmzxhnYR7H+3Fz1/feM7VIZjJsjdSyN59v1H/W2pnx0/31Wd8W\n",
+       "6vs2zufNr5ml5pnUVfQebTl5pnt3xpeS7sxSks3fOMtThmfi/5dZGBe6n46z/sz37IXudxmy2xRb\n",
+       "MP288v/2W86qiXgOx+1FtnSVz1mobC3ED7dEtUhVVFRUVFRUVCwSS2aRmpiYeMbWg/Lz7GTpE+S4\n",
+       "E6U1Ep/+/XmfJYZnZHlfxuXu8bPdPhtDyQN/Z1w047j2z8RSs1ALFMgiihZqDZovWsVa+jONEHPf\n",
+       "tkhkeXiGw2Gq3S40Imyc71iGZcuWdSK9Mo3Q2tq4qM2FanB9UXtZlXuPc6FzNJ/fRYaFytS4PQeY\n",
+       "X+ZTSVPmG/RMrSPZ/mKa5nsve+a4KD5bUXm17+h81uFxz8wivtzPOB8x0LeOMlrGyV5WU/LZkNHF\n",
+       "+jNm/VgGy2cv1hdoXLQjWKjvVPl/+0RlvHK9O/vBeo5cP6+0TC309gNka86oFqmKioqKioqKikWi\n",
+       "1tqrqKioqKioqBiDYa21V1FRUVFRUVHx7GLJfKTK2lyAu1Dev/zyyyOirfvDners7GwTJeD6ZtT8\n",
+       "efzxx0f6Wrt2bUREPPHEEyPtXVNs06ZNEdHmnthtt90ioq3NAy0zMzOdekzQT00pagQ5zwX3ydBO\n",
+       "nbiy7xKMgedRx+nUU09t7pehe8cdd4yINtfG+9///oiIOOOMM0b4smXLloiIWLdu3cj7ZR2/klae\n",
+       "7VxWl156aVMjDGT+OtRxoi4X+T640ya/SFb3y34ZJX9cUxB6oYEcPdBGjbDTTz89Ito58p0470M7\n",
+       "Y/W9/OTkZKe+oes4wcvnPOc5EdHy3LW27M9k3zmvi/I7jI/xXnTRRRHRyjk08oqcMwfQfuaZZ470\n",
+       "y7w7IqiscUhb1sEOO+wQEe2ao3YW9e3G+a1Qaw+eMyZocEbsD3/4w51aePAMeUfG4DnrwhE+0Mw6\n",
+       "gi+sC579yCOPRES7jqanpzty6/VuvxRkkf2CZ8Nz8ugwJq9Ry/rWrVub91z3ExrMO56BvEA78s+c\n",
+       "rlmzZoRf7KOso3K/4DvQgqxAN+NkXLvvvntERDz22GMR0a45ZNe087r99ttHRFv3rVwXfIf5Z89x\n",
+       "W/piHSC7zAHyguwiF/YN4nnUfTv55JMbXjn/EWCOWHPeD5kDXvl9QXbND74H/y+99NKmb2QPfjA3\n",
+       "lkXXffWaYy3CF/gI7Tx7jz32aJ7ndcF4XGvRfKE9NEMrfyM/n/zkJyMi4sQTT4yIrm/h448/3tB9\n",
+       "5ZVXRkR3j2Z8yAnPYO/KsGQHqbm5uY5TGAzlFXgDmp2dbQTcP65MDj9Sdo7kmYBn8/7GjRsjohV6\n",
+       "H/bKhQk9/DD6wORnjSvbwudOzMlY2TBKWmhLm8xp0I53CDrtzXNvzvyNIJabgQ9O48KYvXidmC9z\n",
+       "rmes8LVvrE4tQFsODNCf9Qkf2dQtXz6wlD/Q5iH0+UDE+DPHR/oEmcNjWdaIcfiHzn3QjoM3sOza\n",
+       "wRO+2FEUrFixonPgYzPKnOrhOZ+j7GQlZXzYYS7L9llwgPsC0FAeQspxmveMiUOAE1f2JeT0OKE7\n",
+       "O7S7ryy1ADT4B2ZycrKRd+D5skLkv3k2B4qHH3545BmWL/ZN+pmammr6YC81LaZ7nOM//EJ2s72O\n",
+       "g9js7OxYx2TPAe1RSBk3cALnzZs3j7zv/koH53GpeZA9eMl+wiE324uAD4vlXpfti7Tx3oX80J59\n",
+       "BdrMe9Yu/T366KMR0fKROYloZYTXhx56aGR8PAs4Ua1/w8wH+AUY2/Llyzvz798z9lT3MQ71aq+i\n",
+       "oqKioqKiYpFYMovU1NRUc7LcsGFDRLSnZGsknII5cZfWoCw9wa677hoRXTNgViKGkymnXGiBNsCp\n",
+       "eNOmTZ1rFFsSssKXaJ4+HXNCx5qG5pWZPkst0tdotjDYFM2VBOPMrGn0i+ZlzaUPDq33OF0gmnFk\n",
+       "iUrhhzX8+axjaDe0gX5kCWDlY+7uvffeiGi1JDQtAK3ICZrWihUrOtYuxonlgL95VqYdMQaXUrAs\n",
+       "lteR0AU940oEwdPMwgSfoMkWT1swZmZmOiHy0O91YVr4/I477uj93NeKtDefymejUZuHppv2jBf+\n",
+       "+RoWMJe0v++++yKi3Ud4jWh5inbLeob3Xks8ExqhGZ6j5QO+/+CDD0ZEaz3ZcccdO3sFz2TNsR6A\n",
+       "5cWyOy7hL1YE+n300UcbXtqSbqsocp3tc7Rj/La8WL522WWXhlZ4d//990dE19Jq0Bfj97qAj/Rr\n",
+       "/vRZX31dnNHAd30z4z3L7Zk7aLPFr/w/34EGXxsC7/+WTf+O0p45Ryax7Jdg3pAZ1hhz5N901jK0\n",
+       "2G3HFizG6n1j1apVHUuT9yy+k81nhmqRqqioqKioqKhYJJa0RAza7n777RcR7enPmjf39Gg2MzMz\n",
+       "HQdsgOWFU68tUtbE6If2nFhtNQGcnrds2dKxHPmUvueee0ZEV9vj9G4fKk7WnMDRrIA1mbm5uY7/\n",
+       "SEa3fWIYR5aQz4Uk7bRb9m/nWfuXjDvV2+/AFokDDjhghBZb0UoNBvrsyMgzrGEig0ceeWRERDz3\n",
+       "uc+NiFZ7xEIFsFDBn1LOzENbqGx5s7zQ3hpb1l9pBcksR2CnnXaKiK7lkvVijRTfBvsjAP89HA47\n",
+       "VgvWmuUfutFWkXf6xMpT9h3RdaZlrOWaps0DDzww8izWqjXSgw46aGS88Afrh9cofGb9eAwl35Eh\n",
+       "3sNKbhkFzAVj8LhtHUM+jjrqqJF+N27c2FlzBM3wTFtJsxJBbs+a9jpir2IvX7t2bbN2bEnjWfCD\n",
+       "vniGQTv4Yf+00goY0fJx+fLlzf+zBLP21+EV2r2O4HlWKNlYs2ZNQ0P5+xWRW8eZZzunl35GEe3c\n",
+       "0A4+sT7KsVp2vOa8Rm3Voi9+m72OXCCYdcLNRwnodTFiW3CBb5PgE7TYgsXc2DK1devWTt/jCocv\n",
+       "tEh3tUhVVFRUVFRUVCwSS2aR2rJlS8eXxqda4JP7ypUrO1FlAO2UkzCn1Uzzzsq6OGIGcHreeeed\n",
+       "G0uZ75sBWnEWteFnO5rLPkD2qVizZk1zirfvik/pjggCWeRDFq23kGKOCy306fFDs9vffvvtEdG1\n",
+       "joHyb/sZWbv3+Jkj/EvQ3GyxAfDRvnXD4bBjpcmKb2bjRGb5HhqXo/9AGXnp+cn8kDyvmcZli4Wt\n",
+       "i33zb23OkTAAixM8Z73beuB+kX/kwOHhEa3FINMsLefXX3/9CK3QkkXtATRbR572+es5kpg+LVuM\n",
+       "h+9Be7Z22RdtfV+2bFm6b8EXLFSZBduau63Otkjh34YFb+XKlR1rP8CiwP7pNZdFrdqSYSs7uPvu\n",
+       "u5v/Ww68LuC5LSnZunDEsX+rzPd77723s3/DuyzqjlfWYOZTy/fpl2fTvrRC2drHdx0RD5gjXh1R\n",
+       "Z1rgF3IFmKvSasz4WA/IB/Ppvi0P9n817TzTty6PP/54Z14dvZ75GI/DWIvUYDD408FgcP9gMPhh\n",
+       "8d4fDAaDuwaDwfee/vd/F5+dNRgMbhkMBjcOBoNfeEbUVFRUVFRUVFT8FGFsiZjBYPDqiHg8Iv58\n",
+       "OBwe9fR750TEpuFw+P+o7ZER8f9GxIsjYq+I+HJEHDocDufUrpaIqaioqKioqPipwaJLxAyHw2si\n",
+       "4tGej/o6/OWIuGo4HE4Ph8PbI2J9RLzkGdBZUVFRUVFRUfFTg/+Mj9Q7B4PBmyLiuxHxe8Ph8LGI\n",
+       "2DMiri3a3BVPWaY6ePvb3978n0go8kgQQUOK+JNOOikiWp+Rgw8+uLnzdfmJd7/73RHR9TOwz8cV\n",
+       "V1wx0jf39aS0546UfBgu+7Fq1arYa6+nhvbjH/84Itr74/POOy8i2lIoREzRHt8QchVRCoFU+Pa/\n",
+       "wO+JO2RKBJxyyinNuI4++uiIaCNk8Lcpy8mU4+T1Jz/5SUS099L0De2OmLzrrrsior2fLkvEODOx\n",
+       "/QUoEQEP+fzlL395RETceOONEdHKwWWXXTbSnjk57LDDRmh/4oknmrIplDbgrt5t8eWgb5er2Hvv\n",
+       "vUf4yP08fHnLW94SEa2MErV18803N+Ok/MBb3/rWiGjl4uCDD46IiJtuuikiWj8FaIHnzAVy4yhW\n",
+       "ZJ32y5cvb6INGSfzD19oi/wTpbh+/fqIaHn78Y9/PCJaPhIJRDQW7fHPKUtnILcHHnhgRLTrAr8Z\n",
+       "5Nzzz7rAtwUa6RvaQd/8M1ZkEd8HaHLJD9boCSecMELLPvvsExFt1Ba0fOxjHxtpT7/MEWPsK4Xh\n",
+       "HFOsf3xb2FugHT+cnXfeOSLaPcgliI4//vgRWpDFrVu3Nr5/zCflZ5AteH7nnXdGRLekEPsFa5kS\n",
+       "ONAOz5kj5hRa1q1b18ggvjCsf0p4IFv2kWGO2LtcUoRxupQO5Woo47Jt27Ymctp5jphPl6ui/T33\n",
+       "3DPyTNY/tCNPz3ve8yIi4j/+4z9GaCn3dHx6kC14CH8ohQTdLsPiihFeF/hp7b///hHR7kX089GP\n",
+       "frShG5k65JBDIqL1EWSP4XeR32f2fVcSwZcIWeR3lM+J4kQOB4NBw0Pvuewt0ODSWS5vZL9H3qe8\n",
+       "DWXinPl/+fLlzfjPPffciGjLDyFTzNF3v/vdkb5Z0xkWe5D6aESc+/T/3x8Rl0TEW5O2vdd4EBoR\n",
+       "ceihhzbMrKioqKioqKhYStx1112dVCwZFnWQGg6HD/D/wWDwiYj430//eXdE7FM03fvp9zp4+ctf\n",
+       "3mjqWIE4kdpLH+y7774R8dQJ9tZbb42IvEaYi/iicTriCw0KjcRaVJYJfccdd2y+y6ndtHBiRrNw\n",
+       "ThZHG9DeGc2zuoJzc3ONVkebLM8Hz4I/8C+LlETT5gSP1dC5OSLy+lWOmAPwEL6h/Tv3TznOiG5B\n",
+       "5r5oRs/jZz7zmYhorUL0ARxtQqQH3/fY4IPzE61cuTLlId+BD87UC1ybjHZosM7KXcqiI3ucg4px\n",
+       "8j5y4NxVAD4g/1hPHQULVqxY0bEsOMIRIJvwBRm21QAg98wdcmN+R3TXCtZNLC/Op8M4HBnJ36Yd\n",
+       "mstcRSXt5ZqGPkdIZvPvyLos6zRwxnCshI8//njnM/Yc12LMqjIgi8yVc7ZldQJ5feSRR9KoOqw5\n",
+       "rDEiuuCLaXfdP9Y76yLj4/bbb9+0dSFg4Ehp770GMohcsHfRv9fdxo0b070lq2fpSDnaO+IMfnnP\n",
+       "o59S1tnH+Q5WcX4v4C3g2bzP95krLHZuz/j5u68ObRb5zD7vdc3cMN8827cCwHwoI/K8t/i3hL7X\n",
+       "rFkThx9+eBxxxBEREXHttdfGfFhUHqnBYLCu+PNXI4KIvn+MiP8xGAymBoPBARFxSER8ezHPqKio\n",
+       "qKioqKj4746xFqnBYHBVRPxMROwyGAzujIhzIuJnB4PBMfHUtd1tEfH2iIjhcHj9YDD424i4PiJm\n",
+       "IuJ3hklY4Pbbb9/42/z93/99RET84i/+YkR0tV1Xhb/44oubU+hrX/ta09vbR5aLCY0En5hbbrkl\n",
+       "ItrTLtoyYDjXXXdd3HDDDRERcdxxx83bN/fQaAHHHHNMRHRzbnAyRyvCCsDJ3XXfVqxY0Tzzc5/7\n",
+       "XER078ABGgJ9wnvaWevlmV/84hcjIuL73/9+RET8+q//ekS01paIbm4Z193KLHVYoLjbfs1rXhMR\n",
+       "baZmwH072t+f/MmfjLR/yUvaeAZrbd/5zncior37ftnLXjbyOfLC/P/N3/xNREQce+yxEdH6HQBr\n",
+       "oiUfsyz73/jGN0ZoedOb3tRLK+OEb9dcc01EtH4bZHgHZZ4hfFvw8frZn/3ZkbZohtCNpe5XfuVX\n",
+       "IqJrecVCgd/aV77ylYiIeN3rXhcRbUZwsGzZssYn6pvf/GZERLz61a+OiO4aQj7wK8FfCVrwJQPw\n",
+       "EavK2WefHRFtFvqXvvSlI3REtDxj3qHJlQ2QY7Tdr371qxHxlLtBRFdTt9/Kt7/97ZEx4UsU0c4v\n",
+       "a+e2226LiIjXv/71I7QC5IfrBF6x2LO2gXPIYXVbuXJlx/KKls/6/9KXvhQRES984QsjomvBZpxo\n",
+       "8PAPWngFWFEY4y233NKsIdPiWwOsXOzpttTzN2vvBz/4QUS08mB5YZ+8++674+tf//pIG3zDgPN/\n",
+       "sVYPP/zwiOj+jtiic9VVV0XEqL9iiZ133rlZc+wt7Ln4tQLvLa5w4Xxszj6O7yT7Ysl3+uK36J/+\n",
+       "6Z8iovWz8zid04u+smzi/A6xp1999dUR0f7WlfLl2yH2Fp7BmgU8C5lDZtlvbE2FdvtgLVu2rFOp\n",
+       "ALq/9a1vRUTE1772tYhof+f6agX2YexBajgcvrHn7T+dp/0fRsQfLujpFRUVFRUVFRU/xVjSzOZY\n",
+       "ZH7t134tIlqtGS0fcLLk1Hzcccc1bfFRAWg7nMA5zdLOd9iOiECjd/03UN7XY92w9ua2aBScbjmR\n",
+       "+3RsvyxnabZlZ2ZmpvkOkUxZ1l++i+b4yle+cmT8aOQAzR3tCd7zar5DTzku+5sAngmPf/u3fzsi\n",
+       "Wj45Cy9aEJomkXPMMVpQ+Uw+e/Ob3zzyvrPkQys0/vzP/3xEtNqRaYGP9tfZbrvt0szjyPkb3/jG\n",
+       "kXG7Bhnyw+dEBPG35ausf4gG1VdnqxynaWGeLS/ML5YdonigxZGEc3NzjewRhYkGaR4ybiws0OLo\n",
+       "XcCcwetf/dVfHfm89DXB+uXakMyXLS+Mm7XGmkYePEfwCz5DM5atco3ybKwhWLnG1U5jnIyF8Vnz\n",
+       "/v/Ye9eYW8vq7nfM57DOLNfiqLgAAfEA1Wpt1HZr27Rvd/qlu2+/7N2DrSUK2MoqEFSgqIDCi0UM\n",
+       "Ym0ED0H7ptmJyU7eNk2a7jbdtam6baLiCVQOcmYBLl3KOj6HNfcH+N3zmr/7HmvyPpvmSZPx//Ks\n",
+       "Nec9r3tc4zrc1xj3GOPvuKWWo806pw3kZv2jp2xdYN3jgcz4M7kfHrkzzjij62cW82quPfrjucsY\n",
+       "0Y75Dr2Potdt27Z13hnaznTIeOPldJVwwLxnHV199dVTffA+eujQoW6e//Iv/3JE9LMUgZk+zC1o\n",
+       "T50zU//kT/5kqp12TNEJHnYy25g7jnmi31/96lcjYuKR5PloTw1jzfOBPcBct23b9I/YZ3ToZ4sr\n",
+       "/LsauWFmEGKlRqNRb49Gp+y1yEL/vC4yFNdeoVAoFAqFwhqxbh6p8XjcnTwz67+9NmJyEj3hhBO6\n",
+       "U7ctY07E/IbrsAp8inW2DZZpxinX1qTAGuXU6vfjnIQtkxm1AbLSDtYjVrDbH4/HnXxYsc7OA1xn\n",
+       "Dwwy2PLiOmT/xV/8xYiYeH9aa5o2sBz4Lf93tqE55PB6oEdbasjOX+YL82fI4kU+rkGWjA/PmS4Z\n",
+       "pxzX8T3zamlpqScH40acDVYOsS+ei+gR/TljKuPL2rt3b6dD2nTarrP6uI6xcHaa1yaWHNcPeVPp\n",
+       "P1Yr97JV57mG54qxsc7N14U3gLFoGebRkdca/bNHCthSba3YFow7f22Zt/MLHSELe0vmoTFrvfkB\n",
+       "vf7Rk70qo9GoN8+dbenfeo2Clls0YqIXZ5B6XbRt23vl2EB7KKxzx1w68zDjCV1ZWenF41huc7bS\n",
+       "T8bAemRvYj4hA7W+LPvS0lJ3D9cotPeKtojj4h4Zlysy48HFm4Lsrd7N2+eMaq9/5oOzwulLNl/4\n",
+       "nHXGPtLqhX6a3xJ4/zdPbsulGNGPTUYGxpI9/cCBA725gseZNti72FPsNc5QHqlCoVAoFAqFNWIm\n",
+       "195/yE2La69QKBQKhcJ/IqyZa69QKBQKhUKhMIx1i5G69NJLu/emvJ90bAQ8PvDngYWFhV5sy003\n",
+       "3RQRE34zxzL4ne6nP/3piJhwJzlTDtl4f3vttddGxISzauPGjd17YGcVXHPNNRER8e53v7snd8Tk\n",
+       "HTHvqakBBNcW3ztzkPvQ18suu6wXb+B39PBbwVcFXD/FXFuui+J2+f0tt9zSGx9A28gPdxZcS7Tp\n",
+       "/gH4jcz75ViypaWl7lo44mjLWUb8peaKed8c+0AfqHXl69sq7swxxodracs6dD/h2nKFa8D8gser\n",
+       "nS/c27GBGXcabTsWEL4y89tlQI/tejYXlsefMeJ7Zxrye4+p65UBPr/ppps6zi/AeHpvMY+X20I2\n",
+       "dA4fGuuItevYqvn5+Y5TjPF0zKP15DHiOv4S30IfsnXBXrW8vNwbzyuvvHKqf8jkfeC6666b6qf1\n",
+       "wV8+v+GGG6aub/vq+EvWBfMWeD/kd+Zmc90sxwzRfquXobjSiJyfCAkuAAAgAElEQVT3088iryP2\n",
+       "f68zZ9xx/dvf/vZ0D+Va+DA9d73fsWYZf/SIXrK4pRtvvLHrJ/ds4+kiJrqE9/OKK66YasPVx4nD\n",
+       "Yn55/Lm+nS88Q8m6NtMDQDbzvloWP099XvBcH41G3bVw7XnPddwq19PPDOWRKhQKhUKhUFgj1s0j\n",
+       "NRqNprIr2r8G17UnSz6zF8OeKp/qbZnye3Nu+Xet3BHPnKazCrwg80S4TgjI/s/vs/aH+pVdy735\n",
+       "nt85mwXY6rMFG9Eft6x//t7/d02v7HqAzO77sX47y4MH6JP1ktW6WVlZSbOq3K/MA+cxsVXsfrb/\n",
+       "z9aDwb2dZZWtC3sghu7N/+1pyWSxzrN7z/r/0LqwxT1rLtprZsvb84XrmB+Wre1L5oH2uLpt7z3Z\n",
+       "HM3m0YYNG3rz1tmazmLL9h7P3WxM0Vtr2Wfy+XP3M3sO8Lnr6h1rH/b4uu2hdRzR96YD6zUbm/b+\n",
+       "1qEzpoHnrvkfvRf7TYjXQzsXn+s6AFm9pQz2Pnr+tPuH90Hg/lp2P8My2S1Ttpbbazz3sqz9DOWR\n",
+       "KhQKhUKhUFgj1s0jFdF//5694/V1EXnNEWqN2OqzpQGwGh3HkNXLwBrasGFDalFm8Ik68yLxOfVD\n",
+       "HM/SwlYMcNv2tDl2KDvVZ16GVhaf4p+rRyqzbm2RDFk1bfvHsrwcK5XV7jIyj6T1CNq4PbfheIrM\n",
+       "yrEX1WOTxQYNeUeze7jNzPLm//6beSbG4/FMj2r2uXXs/9tLdCxPbmYJz9IHsDfQv7M+/LuhfnAP\n",
+       "x7wZjgHiL97yzGtsS340GqWxHvbmgczzDDIvAuDeXgPHkjdbe57n9oo6zs+6zzxVEbk3I6tJlXmN\n",
+       "XNvrWN6SdlyG2vS9Mg/trD39WM8Ar7lsXfu39tRkb0e8zo7lJXRNO9ewmuVVP9abmaHPs+8jco9k\n",
+       "9n2G8kgVCoVCoVAorBHrWtk8swafy/v6Wad7W7HOyvI9qcCK1UcmQfbuuJU9i6fh/5m1YthadL+H\n",
+       "LC9nsGQeE8fQzKomjFXoMRnqg709s/oJsliBzEOReSRa2T3uQ9ccSwZbXll8hysez83NpfM24yez\n",
+       "BYk31Ndl3tRWRnuzMkva/8/G39WCQRYz1f7fc2aWxynrr6+3zEOxRh43626Wx8Trwte7+rrXXStL\n",
+       "NnecKQUcr8j3mSfK86rltvS9vRdlPGUgiwWcNb+Q4fDhw+m+9z8bh+Jq3K6Y7jFqx2JWnKlje/wG\n",
+       "w/A8mvVWYnFxsecxckao5bbHPnvrkHHvDcVLzvIkzfLMHCsucejez2XdZZX8s0rlhpk0QBY7O+S5\n",
+       "n8XXNys2DJRHqlAoFAqFQmGNWNcYKVuPWeaRT7VthtCs98ZGdvL2yZRTreMa7LFo5X2u2QaZ58bv\n",
+       "dF135ljZbFlmmL+3jmd59rJMkFaWWdmLht/PW6YsFiqzJofunVl3s2IgZlnL1ltbv8mW06yMOMMe\n",
+       "hiweB7Qs6LMy3Wxxz8rCApn3aMgSzzxFWXaOPVb2PFh2z5NjeXrdz1n7QRYrk61p4L3pWGvSazCL\n",
+       "BXT/ZvHhuSZU2xaw5wxk8UrZ3M08WkPzIvOoZTGV2VhZ9swbZBnH43FvH5ilc3PvGW5nlud3YWHh\n",
+       "OekqIo8ZBL6X13QW/9Zek8UOHyv7tL139ny1xxMv01AmXlYfK9tzZ8UpeV0cKys6ey5mGdWzPLeg\n",
+       "PFKFQqFQKBQKa0Rx7RUKhUKhUCjMwDjh2lu3V3vvec97OvcZJQtcgsBl/FvqBF5r0AYl3KGIwW1I\n",
+       "0DiuRlyMn/nMZyKiT+Owb9++qXvxOeXqkeXIkSO9AnEEspvyw68ZkCVr2+m99JVDb0tv4VRp3JxP\n",
+       "P/30VNuU8OfeWcop9CPQMtAuAX1+7XjTTTd1bWevpPgNbdNPKHaQya8TLDuy8DvcyYcOHeroR0xV\n",
+       "YNc18+HGG2+ckgWgP9P0fPCDH4yIiIsvvnhKhqGimIw/VAV+lTOkw4gJFQrj7le79Je5jiybNm3q\n",
+       "9RO5aRuaBdpGD8wTgM6h2TDVg132yGLaj7bfXMv4m5aBVxTQM3FP0484kcCvvD784Q93/UQflA44\n",
+       "/vjjI2KSVMLeAi2H6ZgcAAsVDu1zT9oHKysrnU6giEJeqF64F3Po+uuvj4i+Dv26iT6Z3sbrZmFh\n",
+       "odOVKX/QMXIz/rPWP/rwaxiPf/v6kn6afop5y7Xbt2+fugf9pW3mC3P2pz/96dTvaCejw4qY6Bwd\n",
+       "QififdGv3ViD7C/o3M8VnmHot30G+FUkOmU90zZ6oc0XvehFU/qj35YdOByFPt94443dXHSAN/v6\n",
+       "C17wgojoU4S5cC//Z/5/9rOfnbqePjKvuG7Tpk0d/Yyfc1zjwtzei4CTu+inaZwsy+rqajdurCFf\n",
+       "axlMy5OhXu0VCoVCoVAorBHr5pE6cuRId4rHSsTCsJVnr8i9997bnZCzVElO2Fiie/bsiYi+Be4g\n",
+       "Ue7FSdTBZrR/+PDh+NGPfhQRk9PrySefPHWtA/J+8pOfdL9t2wKm7+D7zHuwcePG7t7ogdM5njXg\n",
+       "oNidO3dO9fvxxx+fup7+23KhndNOOy0Mp7hmAXvIams3S2e2F81euCFKCH6DFZdR4ThAk34zf5ib\n",
+       "gOuYu8yvE044oZsP7qeDjJFpx44dMQRkx+PifoN2/jC3+AxvB6ANLGb6x734634yj2bJ0qaa43lB\n",
+       "18w1t833eFy5R7b+0d/evXunvm9JTxk/1gVtMxaMF+Bz+o9syOwUdfQHGEPuw7xowXdek5bFxXKZ\n",
+       "J3x+4oknTl3P3GWOMjbbt2/v7RXezxgrjy+gLfTy1FNPTcnG/AEuB9EGertUCJ8fd9xxERFx+umn\n",
+       "R0TED3/4w8HrmQ8//vGPIyLiiSeemPr9CSecMCjLoUOHuv7RX68LywS87gG/dymGjN5kw4YN3WfI\n",
+       "gnze/xnnk046KSIizj333KnP6bf7yTxgDzjllFN6snANOmQ/dxA5cNC13x55jTJfmLM8b9FXq3fk\n",
+       "5jN+S/+yEkX2FiOLw5NY016TR48e7SWPmVKOvSXzOGcoj1ShUCgUCoXCGrFuHqlt27b1rHtO7D6R\n",
+       "clrGSjh8+HBnzdl7wUmSUymncizJzPLipE57nJYzL8DJJ5/c3ePBBx+c6gegf5yMuQene1tHfoeO\n",
+       "VZydjufm5rr+cJpvPWYtkBUdYmHaC2bwO6xF7ofl0crbkkpH9GOCAGOB5wIge0aFwvzAe8b9sEwj\n",
+       "+vEByIClYb0AxsReJYOxRF9Yfzt27JjSSSsf/cRLMKvAIh4WrH5k9+9avdNvrNIshRodMv4vfOEL\n",
+       "I2LiDQbIwJxGb8hgb9r27ds7XSPDLILboeKFEX2PhFOT7ckaSlF2XBFjk9GPZEVwM6+B95chfTPP\n",
+       "HdPlODPQltJo2+R6e0nwKrAvnHnmmRHxzNz32kIGPAWOBWUeAHteHOdny5750q5R5oG9V45fnVUM\n",
+       "0mn+zD17ESz76upqz+PiZ4tjoRwz5uvtdWV9eA8EmzZt6vTAtfTH+z9t0Pajjz4aEZP9zWOKbN7z\n",
+       "ua7dF13WgblE/y0L/WdenHXWWRER8dBDD0393rLQN55drIG2fZNO02/mg98y0bbjlnkm+bnI/2m3\n",
+       "3atanbRte43awzoL5ZEqFAqFQqFQWCPWzSN14MCB7mT5spe9LCImlqxPgZxEOeUuLCykcUb2/rgg\n",
+       "m0+vjstwuXlbR63lilxYXLYwOY3z154ay4LFYm8KMtoKWFlZ6Vm5WVyK44/4na094P9jUZgKoJUz\n",
+       "K6TmWLE2i6K9l9+dA8eK8Xcojs3WLZ5Lx1UBZKNN5iDWYOY1RCauf+SRR3oxMC7aZ4+TrSPm3pBV\n",
+       "297b7S8uLnbxM4438rX+3plS7ifxJ85m9NxdWlrqvqOtLM7Q8WiOU/CaRnb05piatn0XZLUHIbOk\n",
+       "AR5J9GTPG2OEhws90PfWm8a9acPeMVve9m4hG/e0p87xnci+urqa7mPO7mV9WOcusMhczQoVI0M7\n",
+       "p7NCovbMsB94TgJ7YvCe8nm2d23evLmLN7I3A9APxwxlVFP2VDkzdYjknrVpz5F16Di9733ve1My\n",
+       "e74gMzIxlnjo2jg55imyeM/1OrBMPE+RzfPLHlxkGSrgyrXI5FivbP1zb/fFb4J8feu5y4pgs88z\n",
+       "tzLPa4bySBUKhUKhUCisEevmkRqNRt1JkpM2J02fAk2guWPHjrREP9e6DP2sDBKfQJ31AtqTOPfA\n",
+       "42RZnOGAZZq9fzXxrD0cQ5lVtO134LZe/LljQjKdc5rHwzVEkTNEKt3+zWJlbA1k1AmA60zf0XrC\n",
+       "HDdDf7FeMjJSW9qmLwC0x/X83bNnTzqerqeTUZtgiWFpE8eUZT/S14WFhU4uLPCM2oR7OGMyyyAF\n",
+       "mVcVHDp0qGvD9X3skbTnra01E5HHJbQeuIjJemplNYk33+GJsteUMcq8xJ6LJsx1val2jNrxiZis\n",
+       "Ibw22Vzkd65t5Rg5e9/suWlhj7y9hda5ZcfDwOcZaW27pjNaJntDkDvTh2s2Od7JY8q+OxqNep6o\n",
+       "bG5lNcpmUSQx1+3BA4cPH+69UckyiB13BzJZAOuJdWFPb0Q+nvZUA2dtE5fq52smo+P92jHyM9eZ\n",
+       "00NvXto2/Gz3PkpfHFvcUssB+ofu8cB7Xc9CeaQKhUKhUCgU1oiiiCkUCoVCoVCYgYwipjxShUKh\n",
+       "UCgUCmvEusVIXX755b3aJM7agZvnD/7gDyJi8o55PB537y75DVxYV155ZUT0a3VwHfeEr+g973nP\n",
+       "1PXOwuD9PLxf8P6srq722uRdLDw+F154YURM3r8SZ+JK3uYgA67cba6td73rXT0ZzOP1gQ98ICIi\n",
+       "rrnmmojoxyM4ZgYOugsuuGBKFt6ZIwv3+8QnPhFXXHHF1GeAMeLvhz70oYiI+P3f//2I6Gcdua7I\n",
+       "X//1X0fEhD8R/To2YmVlpdMJfFWOWXH8DbxcjCvv1dGPY8qQHc4q9Ndm4nHtn/3Zn03p0NXnjdtu\n",
+       "uy0iJrxPzto03xV9pf35+flejBz9Zw1de+21EdHPmHJF/Ouuuy4iJuvC2Z4ef7j83vKWt3Tz2lX0\n",
+       "mfd33HHHVD+Bs3aQHe6siy66aOpzz0Xue9ttt3XjQ78c+8U96Cdzi3mSxUjBtfW+970vIvpxHPR1\n",
+       "fn6+myvmfeTezs6j7auvvjoiJtl37q/nYsbN2WbzwlfmeetYFmSE9w/eR8fnORaGfZcxYh6NRqNe\n",
+       "VtmnPvWpiIh4//vfHxHRqzpumVijV111VUT0Y0Rdd4h24X3btGlTJ4/nPXKzXzjGCyALeoQ/Ez06\n",
+       "PomxaLncHJfqfjD+PLsct+j4K66H39DxX9yP/3/0ox/tni1ZtqVl4Tnn2EqvUfgQ0UtbRTxiugaU\n",
+       "eT9de8txz+xdrIusthv/Z/zhRWUvb9vnnszz3/zN35zSmWOH+ZxnUYbySBUKhUKhUCisEevmkVpc\n",
+       "XOxVuOb051OyK1hv2bKlO6W7mrSzUVxPxnVEsP6oSWNeN/NbtfWXXCfFtTWQ17Vaspo21KOyZyer\n",
+       "x7K6utqzEFueqRboi8+591Dl2Yg+w7qzWtr27VFyzRrXKCGzzDWKqMnk6uJ4Im2Bct82w5J+0h8z\n",
+       "rztrg3HHkqLf/N7cavTNMm7ZsqV3LdyL1h33or/AnFK2NC17y/HIvZ0JA8zLxl97bA0+p3J+5lXb\n",
+       "vHlzJwNeHfppK5/P7Ymy5wUgK/PFmart9Z6DtjQzri3WKmsQD4W9B8wP9IFXhd+deuqp3bXcC3nN\n",
+       "Mej6N56DQ7WZWlgP9H1paamX4elaTWaAyNgHWLvm3nOWFxmUbbaWudEAdY7uu+++iJjsJawX2AIA\n",
+       "a83edOau9dO+AbCH0c8L+uV6YFkWnj2/zih0VuCBAwemsggj+l7A9tqIyZxyPbaWUzKin3HNHKT9\n",
+       "tq/s4+atdCV3YA5O72FZrachloG2b22//ExydrN/y/g7Kzx7dvn5urq62mub5zt7i88kMAHMQnmk\n",
+       "CoVCoVAoFNaIdfNItdxetqJtHXPS5tS4cePG7pTqa7FeXAU7s7g5DWMFcALHC2JrCrkPHTrUq2uR\n",
+       "8XiZzwlLwtYL97KlmTGLHz16tBdX4JgXwL2wBukvlqWzN+HtstXHSb3VJ207JsLeIAB/04tf/OIp\n",
+       "WbEk4C40HJ/jeiKtLFgjjmVyP201u7qwLVradaXfoezXc845Z6oNe7PM5g6sR3tZABb8ySef3OmQ\n",
+       "ftoasyfRtZxs1VPZnXa4PqvKvmvXrt4cZI3aO2oOSscnWpese7w9rq/UWt5mhHf1a1v1Xj/AngbA\n",
+       "+nnsscciYqIf+tTyyrF2XO/JMZKA/7tGFfq07PTbno02frP9LKJfN8u1vIArgmPJZ2M6FGOU1fqj\n",
+       "PprfAjiWFCAze5a9RV6jbSyavZ4Z+4TfXJivDTj+L/PYgH379vWqnjuW0P00o0X27HrRi140db0r\n",
+       "hbeeWrcJuCYbTzxW9uh7LrKncz3rzpXC27YyD5R1bk8tyLhZM6aN+fn5nrf753/+5yNi4gXle/bo\n",
+       "51rVoDxShUKhUCgUCmvEulY2d3wPJ/WM346T+Xg87k68rubLCZK/WAhYio4z4VTMO35O4lzvE2xr\n",
+       "TdqS9gnY8QSuip2x3BM746q8Q1VZs++sQ2dz2aLIqo+bWw+LvLUOsCis+4zf7uGHH46IPns9Xh4s\n",
+       "VcueYahasT1n5oEDzqR0Bqb16myUllvKbf/gBz+YugdtY2HZa8QczDJILQvtPProo52HiN+iS8B4\n",
+       "0QZzC69JxrVljj1kHIo1tGWczXOsVOYJ/XAGKbDeXBG7nS/Wlf9v69XjSL8yjzeeatpDz3hXW087\n",
+       "89teEXvNQOapQn/2Gtgz1zIJeHz4v70A/N86f/LJJyNisg/ye+aR911X7V9YWOjadn+Yc3i9mLPo\n",
+       "1jEvtLNr166ImIwJ86zVeduXxcXF3rrPeD89P7IK6KxZV+zOYjC3bt2aeua9r5kflfWUrU08dM4G\n",
+       "HIrXYt92NnMWI0dcr+eHPZmWxXuX48PaNp2Nnj0vmA9+HmbxWs7Abde25+K9994bEZP5jh5Y13hP\n",
+       "Z6E8UoVCoVAoFAprxLp5pFqL2TUqMi8QMSUrKyvpO1//hhNmls2Ehe0Yk8yCBaPRqFdrwqd0W21+\n",
+       "f2yvEdYOJ2pnJvpd8s6dO3vv7J2VAbAwaIO2bZkArHx7dOjzkF7Mr5SBfjqWCr04m8mZlLaOWu+Y\n",
+       "5cKaMa8f4HOsQcdbuD3iktBnWwPHlhTv2Znrjsfy+NN/e2Y8r0Ab3+YYFc9z+mevV2aBmxfM82uI\n",
+       "P89eQHPGGYwF3lG3CZABq9druZ1vztqdxd7u2CHzYHouO4uV+eD4lIi+l9Pck1lcFnpxtqbXKB4q\n",
+       "Z3kePXq053G2Xtx/ew3ZqxxLl3GQOV5rYWGhmw++J54oewPNjQboi/fFbN9tPXt+2+F90evEcUme\n",
+       "P5nnKYvB3Lp1a8+zzDVeF/S/rZcY0a/5BzKvqz02ERNduf+uKwbMyejnaMaf6bcz/G31yFzJYobd\n",
+       "tjNvXXcqe0a7j6urq6knjb9+thTXXqFQKBQKhcJ/MIprr1AoFAqFQmEGimuvUCgUCoVC4XnGusVI\n",
+       "vfe97+3eP/I+nnedxJTAzQTXEu8377///jjttNOm2jP/FJ42smmIvufdN1xIf/iHfxgRk3e41Krh\n",
+       "eu7J9fAbLS4udrE8bVxAxISvCK4t2ibuyPE1cErBFeSYKHMM0ddLL720y/B7xSteERGTd73oFI6o\n",
+       "888/PyIm9X+om3H//fdHxCR2Bu4k9Mi7cq5zNuNf/uVfxuWXXz4lH+/LXUfLvExk77361a+OiEmc\n",
+       "BhkmcC390R/9UURMYgfMobRhw4ZU566uzTtwX2/+PkCcAWP01re+dUoW2jt48GDXBm3DKUV8xnnn\n",
+       "nTfVb8YfTjn4rdA5c9AxIubD2rx5c69aOrE7cEpyLTom1ge9mIOQ69Ej8V6MMTIxpueff36nu1/6\n",
+       "pV+KiIgvf/nLU23Ab4ZemEvc2xlzzF3mIvd0BhKf33LLLd14OvaHeY/8cEq6bXOF8TvWnPkT0XOb\n",
+       "pWTeT/pPJW/2JO7FPIebE5lf+9rXRkTEN77xjYiYzFHaNx8i82Xjxo3dHoPc7FvojHuz7oH50Ji7\n",
+       "rFGynIhz/NznPhcRkz267St7NLpiLpo7k7WE/LTBuoAnlLnNHkesFf1nLsL9OTc311WmphYZ48Qa\n",
+       "evvb3x4Rkzn4C7/wCxER8W//9m9Tv4MPFa5F9iD0Q4Vs9u52D3Clccd2Mf7mQ3QNM8aKucsadTyv\n",
+       "Mwk//OEPd+PDnvPAAw9ERMTpp58eEZM9m+cc45/x2qEvxojraef1r399RER89atf7fp6++23R8SE\n",
+       "xxE4U9octDwv6Kdjic3Nad4/sG3btu63rCHmInWw+PuVr3wlIiZ7FGOUoTxShUKhUCgUCmvEunmk\n",
+       "2qqz/OXEnVUwbfnTsNoybh9O9a6D5OwUZ0a5nkxWfXjfvn29a51V4wquWYaQ+8nn6MH6aftoNm76\n",
+       "ndWR4vqM7RxgFfo6Vzpu23Rl+qwukDNj+P9QzZG2HbyFWKaPPPJIRExn7ZgTzJyBzmYCWJh4O/DY\n",
+       "2SLD4jIf2I9//OPe+BhZPRm3TTvmz/Pv2griWN6MM5aVr0V3szKIuCfeAmfMeR21Nc3w0uDNybgj\n",
+       "mUPOvvE6wtpHP4wV+mmvz7jSsmrHtIl3A/1guXr8nXHptd9awVlWkrkIAddxb2faZlxrrmZ/9OjR\n",
+       "3vjY+8c8B1kNPPQD0wG10TwXaRcZV1ZWemMBXOGb37K/e881e4DZC5yJS9/brGZ7yYEr4PtZ473I\n",
+       "zwv2oGxNLy8vd3tPy9Ma0a9/Rb9d4Z816D299YZHTMaUtYw3rZWXNlnPrFHr0HUY7RV3Rjr7rKvV\n",
+       "+3nTtuH54TEAbf3I9nvatiw+V7R7QFZNns/ZUzJuzgzlkSoUCoVCoVBYI9bNIzU/P9+dWr/1rW9F\n",
+       "RMQb3/jGiOhbpLYa9u7d250Y7Rmw9eJTvy01LBVO5GeccUZETKwGTu4Aa3Lfvn09T5Hfadtb5Arn\n",
+       "9tTQP36HTMSn2CIZj8e9GiWuvGtwPXEa9NOyY6k89NBDU/ceqlbsU74tAX/P52bapk6YK/y6hg0x\n",
+       "dMjezhd7Vlw91xYG1zPO6A8ZsvoqWGjwXR04cKDHEWZPEh6ITBbHHxDnxtjioQBDXpCsUjky0Aae\n",
+       "F8dMAfqXzdUhfjfu+bM/+7MREfG1r31t6l7AtdvuueeeiIh4yUteMnVv4PpBtIf+WlmYl4y79eC5\n",
+       "SBuMq9eNaxq5ho3jXdr5gtyOX3P9H8A6Zz2cffbZERHx9a9/vdf2kMx4BYY8o/SbOYVMeDHsBbB3\n",
+       "mBhMYkdcZd1r/Sc/+UlvrwV+m8Cc5f/2liEb4w/PITq3N6UdC1fazvoJXvOa10TExNOEvgDjzV6E\n",
+       "rOjcnuCtW7f2PMueBwAZrY/s7Qhzl6rczPUhFgeudQ0/16xq5Y7ovzXIvMbmiX3zm98cEZM5DXdn\n",
+       "xGSvccX+9o1TC+7pdcPnfkbTf+5jvbRw7ULm+V133TXV/1koj1ShUCgUCoXCGrGuXHt4kzjNO4YI\n",
+       "2BOzbdu27v2vLU5ndPFbTsY+lTrTLOMMAm2cg2MgDFdg9vtXe4Fc4ZcTu/nOwNGjR7s2sHIcEwS4\n",
+       "J/2zl8CWmd8zO16jtQL8zpt7ZZWq0SH6MMeePS/27BEzY1kjJhZkxlNoDjp79LConM0GGAsyjGj3\n",
+       "yJEjPe+FsxbxuDEX7Xlzf7HQGBvHArSxZXhxWBfm8fNc5LfWrdtmLbLOjsUogHzf/OY3I2Ji1Q9x\n",
+       "REZM1g3zgDllD4PjlYj34Xft3HW1c2Rij/Fewv/NuUZ/M0457umq9W37Gb9lVh2atukDmVWuiO++\n",
+       "MtfxhmzdurU3Ph5nr9mMr47+f+c734mI3ONtfs3t27d3/7YsznLmHo4hsuy0x9hYZsDYbNmypedZ\n",
+       "tReYNtEtmZV4UOw19PzBI82e5efFwYMHe17/7NmCPvwsYg3aU+MYXM/xdo+2dxvvL3uWPYyAcZ31\n",
+       "THaG+Xe/+92I6GfBRkyea9aDK9YDc1Qypshm2R2zy++2bt3ak5tr0I8ZPTIOWmPdDlIHDhzovS5C\n",
+       "MX6QMoFal3AW9GoqEE8Eu3YdsOkg0myyr66u9krX+9WOaRkYFBaUFwaDiQymcRnaYHgFZ1JST8bs\n",
+       "MJaV2bcrN3MvR/Q3VT8gsu+RnTYzlzcLB30cSy9+DeSNM0seoJ9sRr4nYD6gZ/5u27YtpV8g+NpU\n",
+       "N54vfkXjYEzLguwbNmzoHQD8SoLxo1+zCJEd+Oo+DR28fLDx2gKmYXHguzc7B8pyPfdrN1IHyZrG\n",
+       "KXvlaeqTtrRGC8bGD3W/2mj76deALvfg67nOhzuPUUYKPT8/35vnpghB7izA22PH4T+TxQ+viNyo\n",
+       "Q1fI5CQb70Xoj7nLGs1ekbKvzM3N9aitvIeavoeDog0qy8L1WRgCmJub6yU+eA0C03VZFq8LGyQm\n",
+       "Im8NDOax5zljZL2YSJj+ZhQ5loH5wuft62bTzpgIPSMtzozBLFwDvbf0QBlxNG0+/vjjU7/JnAFG\n",
+       "vdorFAqFQqFQWCOKIqZQKBQKhUJhBooiplAoFAqFQuF5xrrFSF1yySW9wEfe6fLO0yXi29L5LlYG\n",
+       "FQK0CQSB8n6Zd528E4V+4uKLL566jveuxF209BNt+0ePHu3e3fKemXezWQl/QP/oA7QMUK04uNBx\n",
+       "W8hy4YUXdvEAvJOmrAFpuZTChyKC9+4EbDuwHRqHd7zjHVP6Ih6FuA36fuutt3ZyO96AftIf5Ebn\n",
+       "gP45TfijH/1oRETs3r07IvqxZW2ROHSI3Lyrd2Ai/bQsyEywNoHhpgiCasEFC+fn57vxN82Oi9oB\n",
+       "5prpKriOoGqXxTAFTcRkPFzskGuRhbFwkT9kQefQmzgg1HGNUMq84x3v6HSM/I5VQC/o3PGMTrln\n",
+       "/V900UVTeqN9xqaVnX4ib1YwE8oHr1FTCjEn6aepM9pkA5+ThuwAACAASURBVPqMDqHC8BoDyMT1\n",
+       "H/zgByMiuuKq3NuxMFBhZPRGBw4c6FHbXH311RExWf+0yRrlL2PE+HMdsjNWplrh+jZInd/SD/YW\n",
+       "aFbYS4iRefDBB6fuwR7N/s+YcJ33MPTYzi/HwFjnrGf04qQKQD+RBThwmudIS0HleE3HwqEX5ovL\n",
+       "6iALMt52221T17M/oA/GgL3p1ltv7e1b/EU2dM7c8j7qWEL0Ce0Lz0XWA+22z2GeRaw5l1gg0Duj\n",
+       "QkIvzO1sj+Z6x8Xt27ev06mfLeiWPdcxYayLDOWRKhQKhUKhUFgj1s0jNTc3150OKcjnTBjgaP1H\n",
+       "HnmkO5XaysfKIXX4V37lVyJi4jVyOjOnYZ9yMxLb9nrkIhW+LckfMTkRu9SA0/eBiwNyQs8yZTZv\n",
+       "3tyluJLpiD6c5u9sC2TgHs7aQE9QZ6AfPm8zJTw+jKO9AgALFS/asUortDK7oCnttJY+/3Y/Z5Wz\n",
+       "ILMI3XsOAvefvr/gBS/olRxwgcFdu3ZN/T/LrLR3Cdmc/g62bt3aI+X1eKIPrFQIcSGhzahw6BP9\n",
+       "Zc26/U2bNnXjj7z2BgCXBXDWjceovUfEZL4w1kNF8zI9ZAX2GEcK0FKY17LQf6hSIH114cKIvmeW\n",
+       "Ncm8db+ZF6YnyTIlmV+seTw1GW1WxMTjxnzJ5hRzk/nOXxekNOjTvffe242T1z/jxl+yWiFG95g5\n",
+       "45I9y55bY25urtM5HljvofSLec54QiyelT9xqY6MImZlZaXniXJmcXttC3swM7oa5iT6RrY2/tm0\n",
+       "Oln2KnD2N3u1s7kBz13mtulbWtn5LYVVrR+XM+D/jA1zuC1r0AK9Mi/aAs5ZKQ57oP0cmYXySBUK\n",
+       "hUKhUCisEevmkVpYWOi8RhQ/I77HFganQ4qlHTx4sCs+Z48UlhKeKK6jjYzyhbiE73//+xExKVjm\n",
+       "0y4n1D179nSne07MFGcDrtmC5ZAV+/J7aCzUs846KyL6RRa3bNnSyf17v/d7ETEpbW/rBp1ixWB5\n",
+       "06ZrvaAX9EdxRdBavejElicWkU/1WAV8T2FWxsbWLpYHHj/K+FP0rfUaYEF5DrkYHsDjRn+wkvi9\n",
+       "ZWHs8HC2tAaZhYklhS7xHNgq9Bg5ZtCehtazyXhltXiQ7U1vetOULOjQsjMX0TmWWlY0d3Fxcape\n",
+       "Syu/de56OS7Q6La5tymFkLGt3WQ9MG9d/BMgG15jxsokrYA+4eGwJ7DdX/iOezBnXJgX2PuFd5z9\n",
+       "hf0RMF/wKtKH17zmNb29BY8Ba4d+3X333RHRX7v8nj3Idfssu+uXLS0t9doEjC97LBQnLooL+Jy5\n",
+       "es4550TEZAysNzwzR48e7TxLrFOPP/1g/Jgv7APsD8CeOWRhDKz3lZWVXjwav7XHCbldR4w2rc+2\n",
+       "+GnExGvkGmrIETHZM5jfzDHPc3sN8QKiRz8vGFM8tfaWtu3jIXLdK9eJMxhLxoaxzGTn2Ujfd+zY\n",
+       "0dsXHadpYnS/NchQHqlCoVAoFAqFNWLdPFIHDx7srDdO6qeeempE5BQBbTVeVwkGtvI51RPHZE8N\n",
+       "J2nHDrjqNsD62b9/f/cdVotjHpy1Muu0i0XBXzxYnJZtwRw4cKCzoLEsoHLwO297VrBes8rW9sg4\n",
+       "86SN+3KGg6scW4euTE1/bWn4eiw2rsOSbb2GzpAy7UT2jtxV6pkPWSwIIL7j6aef7umctunPnXfe\n",
+       "GRF51XXAnM0qfQPaaS14W/eWhcq9eF5NGAocr+Wq69bjeDzuUVxgebKWLIsplvhdRvlAe44lbPXD\n",
+       "vLQ3DEvca4j1zN6TUb+4fXui8Kq3XgPGkTmEde5sJWBaFn6HVzCLY6QPeMQ3btzY87zzW/ZBV5H2\n",
+       "vmlKLfaDLHbQ3pNt27b16FcA/8djwHhnNC7oFn2x5zGWzEnLcvjw4U5+V08HJhCn36zZjGoL/eHB\n",
+       "Yl4M0dZwbcYeAZyNip6Yk54vptixR7fVI+NCf/EwMb+tF5O5+xmczUVk5TreNrTPOsYbWVjHzDHv\n",
+       "d7SNzIwpHlrPL67nPjxHt2/f3rvWjBCuhu4xy1AeqUKhUCgUCoU1Yt08UktLS91p78wzz4yIiXWU\n",
+       "xTER1xGRW/N+/4ynxuSlgHtiYXKCzvjNODVv2bKlV9/JJ2ln/vG9a/i4T1g1/J4+DMUa0ZbjJBxP\n",
+       "Qf+xhltdtjICTv3EUGWeq/ZaX4PVYk8N32MN+R6OBcBSw/LkevrQtm9Lm3mArt1Px07xeywre0eQ\n",
+       "2V6FQ4cO9eRm/BxLx1yzLvk/831W5khbb6ytZ9X2x8AjZf6yjIPQJN4ea7C4uNi1hUeAfmRr1Zlu\n",
+       "GXecs0Ftsbdj5N+SfZdxrbm+nOsIeUzpE3sXnivaaS1vx6vZi+69hTlFv4ghov/um2MM2duw3Fuw\n",
+       "byET1/CbbI3StueyPVPIyFi88IUv7NWeA+b9cy0rj5HfWKAf5rKzpdt1cfbZZ0fEZAy85zpOz17O\n",
+       "TC9+k5Hx50X01wo6yojCaYPxNvE6sMff3qK2r8625DcZabE9ecwD4Llr4mzPj1Z2e+38NsBeYPqf\n",
+       "vS2yzs0P28ae2sPkPcVxqZW1VygUCoVCofAfjOLaKxQKhUKhUJiB4torFAqFQqFQeJ6xbjFSF1xw\n",
+       "QfduNKsnc8MNN0RExBVXXBER03WWeFfPe+A77rgjIiKuu+667pqIPnce///kJz851Tbf46Hj3TH/\n",
+       "hw/pd37ndzoZeS/OO3reC19zzTURMeFl4j2rY8CQ8bOf/ezU9X6fjwy8v4WD6Oqrr+4+cywQOuRa\n",
+       "eNxcT8cZJXBzoRdkdJVm/l5//fUpp6AB/xRtt/FFrWzmN4QnyllsbfYHnE/wj1l3jlej7fPPP39K\n",
+       "Rq7jPT79hAcNbiZne0RMdAVflfmtnI3HPZAFfiu/t2d9EBMA7xvzhTkQMYkfaMcnIuK9731vREzm\n",
+       "idcPa8t6BO09IiZ6/MxnPtP1lfHgHo4ng68Kvjpk9NiwRuHagzuL+WK+P/Rzww03dPxjHn/mt3nZ\n",
+       "aBs4fonYQvYX9OIaaS3PJHLDb2lmAmTweF544YVT96ZN4iDpL7ySb3vb26Y+Z89aWFjo+oHO2UvN\n",
+       "gUYWH/f6/Oc/PyU784QxdeZhyykXMRnTubm5XtYe/fR+4Xga5j1rzusfvdFH2mevg29xcXGxNz70\n",
+       "H65F5M5ih5CNffHaa6+d0gt7ufXDOrr88st7Vfb9bOJZBEcg/efeni+MJXs67aB7MlRZF9dee21v\n",
+       "X3R8IfdgnjNG5rM0WwXr6N3vfndETGJ0vT62bNnSXXvBBRdMyedMSGSEO4+56DqL6Md8eOyLjk0e\n",
+       "jUbdtZbbenFmMOOfoTxShUKhUCgUCmvEunmk9u/f31l71AfKajf55Do/P9+dHF1DhFMoJ0lOoFkW\n",
+       "E/fK6kU4a6et8IvFlbWBvGZMz+pCYS2ZS46+OvtjeXm5Z8XTpqsDYyHwvXnRLLstONchafWJXBnf\n",
+       "lGEONvpnLwLge48F7bT8ibagkYk2s1pf/EUfWWad9dv2xXPM3kJ7dSyLs908NuaJbCuI22J0jTJ7\n",
+       "LO1xySr+2juKfl2PZWVlpVe7K8tm5Z5Y822dl6Hr7dFztk6rd3tY7Q32HGL/cJ2kLHbUnyPDEK+g\n",
+       "a24xrngtzMpgbj2vWa9/Z22xX2zcuLE3V6yPWfujM28Zd+aN14U57bZt29bp2llVWZ055M/2EY+l\n",
+       "63SBlpOTdYwunRGObK6Xx1+Pt+c/MjBmQ3x59M/cklnWnvcqZPH19BOvEtc587SVN6sCbrk9vn5b\n",
+       "lNVjdK3EoSxfewlZ9222ZQva4C/9y2Qxhu5t2EOXcbNmKI9UoVAoFAqFwhqxbh6pvXv3didurBhb\n",
+       "wcAn19Fo1OMAAvzWnhROovZ2OU4DC8PvnUFrddtTZgvTp3FXrrU3DevJnGIZ19KOHTu6e9sDk9U/\n",
+       "Aa5o6+951232bmRrx4h7YRnYAjNsqc6qfcR15qKiffjAIibjh7x4O8ypB9rftm07Vg7QHnMWC/Wn\n",
+       "P/1pyhRvxnR79wBzmRgH1xXz+CP7oUOHUu4zYG+PLW3LjoeGOck8oKZZVq06oh8TZFCZHhkd12We\n",
+       "RMY049hrvW+ZRwpkFqatXJDpBQ8MGLK8XR/NHoms+jz9wYOZecfQB/tKqx/PW+I42SfsubenBv0h\n",
+       "I3+RxWub/aLlEaQ/3hd9T3u/vP+zHrgn1cQdSwTamoGuTeVrPTaZJx4w/q6FxVh5fj366KPdvkXl\n",
+       "edfFM8zOgMzZs8geLPrUshVkcgPPc+9N9h5mtb6Q1W8Rhp4FjnHyXAP+vo0FjOi/2UF/9rYOyWAP\n",
+       "o/udPZOM8kgVCoVCoVAorBHr5pE64YQTOp4yLFJnqwBOja3ng2vNEeb4C06anMRt7biiqfm8bA3i\n",
+       "kVhYWOhO1OYWs9ycmGd5JMwSz++RZej9O3I7K8nVY/GgZPEl7if/x5L1ab4dI1cN52/mHaNfjAmy\n",
+       "ZV5Aj6m9Dlh87W+x+viOzz238FQxrvzOmZIA/TFGrcfOcUm2yN0ve0u4jrFjXlmv7uuOHTs6Hbuy\n",
+       "O4BDzJ4F+m3Lm2wuxztlXuNNmzb15gH38rV4LaiCbA9cVtnYHsmhWBN7orynuG2us/fI2UnAXjHf\n",
+       "px1j9gfktpfI1i77IL/zGNoLSHvI0HqRWVMALyBtMB88D4CrSbsCflbFv43nY41kFblp27FO1gv7\n",
+       "quNvgPdFxmZpaambx+jSFbnpl/dPZ5wCez/QG3PSsrXzjX9nHkZkQZdmV/DvnM3pMWplRS5no7P2\n",
+       "7JF0FX6zLHhNc0/v6Y4jbj+zx8hZdoar8mcchl7L3Hs8HqfcvN5LnHU4C+WRKhQKhUKhUFgj1s0j\n",
+       "tWPHju7UyonbVjHghMqpecuWLallzGnU9YOybBwzRWfvfgHW9NatW3sZD1iEwLw99hJlnhfHc/F5\n",
+       "lrXTyu3YBsDp3RmG/M6WlPVmHqShscrisvy5M8HMSWYMZWe17Q69+3b/sMAz74jj9Bg75hOg/1jb\n",
+       "9GHjxo2p1866wir0GBHHgBVsr1imx61bt/Y8jZkXAH14jrlt1xuyN9WYm5vr5pgt71n95Ht0msVI\n",
+       "cL0ziI5l9bc1ZIZgTka8JK6BBGy5um5b66lhnJljIMsIo21kcYbokBcwYuIFwCNx5MiRnufFcWYg\n",
+       "iyVzPJd537x3OQ5qZWWlF1cD0O2sLF3grDTzImb7xtLSUm8tZdmJYGhODcHcgvzOnr3Nmzf3ajGx\n",
+       "p3hvsec5qzvWth0x0YMzDtsxQeeZN2+IO3Oov9matqwZj2z7b/dnlsfO6yZ7g2WZ2ozdbP9yLTt7\n",
+       "1WehPFKFQqFQKBQKa0Rx7RUKhUKhUCjMQHHtFQqFQqFQKDzPWLcYqcsuu6x7V8p7VWKMeI8JH9Ll\n",
+       "l18eEZP39k899VS8+MUvnmoP/iG40PyOlvgk3vXDtQMH0eOPPx4REWeeeebU73mPDY8T7W/cuLGL\n",
+       "O3BW1p//+Z9HxITzibgL4nRcw8WcYuiBWJI9e/ZExCRGBFkuvvjiLn6Gd/NkCN13330RMeHxg8fJ\n",
+       "NVsAcRbIAu8b9+R613z6+Mc/nnKKOe4GLiS4sx566KGIiDjvvPMiIuKee+6Zuh7eJ/iQiDVAH7zX\n",
+       "3rFjR9x4440REXHRRRdN6cExMsTh3HbbbRERHTcbY8H1ZCtSq4n5guzIQNbLyspKNxbmcbz//vsj\n",
+       "IuK1r31tRETcddddETGJnUEW5gvjTkygMw6RhTE97rjjepWluRZeNrgiv/SlL0VExG/8xm9ERMT3\n",
+       "v//9iJjEYSGL+RBZB86UgVfw8ssvH4wba/tpTjnmlrNfmVusf7j5XAEZvfC7j33sY91czLLL+D/6\n",
+       "oJ9ZTR5khGPt6quvnpKBNc18efrpp7v1yRpytp7lh68OPsRvfetbERFxxhlnREQ/ToPr0Qv6bWvg\n",
+       "mccR/k/W3Mte9rKImIyV5zl6ZF6hB+KX2NMYf9YRejh06FAvfsw6p23G2xmk7EXvf//7p2R0HKh5\n",
+       "QlkXJ554YjdvvSexR7P/O6bM8TWW5Qc/+EFERJxzzjkRMVk/7N2so7e97W1dv+in4xmZL/DbtewZ\n",
+       "EX0OUnSOHll3jM1ZZ501JdOHPvSh7tnCfGU8mZvmZmQ86b9jA5E929OZB+hjy5YtU8+tFsjNXorc\n",
+       "cO2xLzpWGH0gS7sXRUz2LK7bt29ft7cyz9G5q8SzpvhbXHuFQqFQKBQK/0FYN4/U/Px8ZzU9+eST\n",
+       "EdHPhAFYNFy3urqa1hxx1tVLX/rSiOiz3gMsETxcWFoPPvhgRPSrLCPLvn374tFHH42IvnUHsMh9\n",
+       "z4ybDouSUzCWDP12pszmzZu70/ib3vSmiIi49957I2J2JgweFeoo2WrmXs72Q6a27kiWGcVvZ2VM\n",
+       "ob/vfve7U/ewLFQ0ps+MDVZE2w+8HK7F5X66yrprVTnLgzFyJd/9+/f3xoe5hXcMKxDL+tRTT526\n",
+       "nvnC71rOsFZG0GYzUi+H8bTc9ly+8pWvjIiIL3/5yxExsQYB84e5h0zUfvLcXVlZ6axd5hp14py1\n",
+       "xhrjnowR+qEdkK2XoZpZrq7vLNWsRg3jigXNfmCPFnOZPr785S+PiMkcbGv3OPPVc8trlDbwKmJJ\n",
+       "I5NrGrkOz7e//e3u93gEAHIxjm94wxsiIuKf//mfB2Xhevp5yimnTP0/q2mGfp566qleljJABte8\n",
+       "y+J1mQ+MpecgbxNA66nAW2NuQcvNda33IqK/Ru0V4Q3Gv//7v0+1B7Zv397bM+lHlhGGp465yprO\n",
+       "MnFplz3g9NNPj4jpuci/2Xtcq8oV37nez7+s0jfzwlmg/G3bN6sG+wHw89I18ug313lPN8tHW/Hd\n",
+       "tf7oD3XWmAevetWrpu41C+WRKhQKhUKhUFgj1s0j1dZ0wELhZOpTIFYAFU1bq9LeC9rCysGy/PrX\n",
+       "vx4ReX2M0047LSImlqkr3AKs63vvvbc7zZu1ve1jxMRi4HtO+bZIuNfrXve6iIh44IEHpq7zaXpl\n",
+       "ZaXr56tf/eqIiPjKV74y1X9AG1i9yH7uuedGRN/CtOXG/x2/0/7WcQj833JjveBFw6rHGrTlTTu2\n",
+       "vBhrLLb2WiwN2srqgmAp8W4faxCr13qhfawcrKIXvOAFPa8f8v7Wb/3WlEyMjee5vaGuPm094nV4\n",
+       "/PHHZ3pe8Ob96q/+akT0OSVt7fI588R8f0O8X8iAXK4XBByPBhyXAuwV4ndDLPf2Xplr0xY1cwiL\n",
+       "lL0GT0RW08h9wXPXyspY0AZ/mVuMAcDzhId2KPapBWPAemDunnfeeb15iyfi7LPPjoiJ1f6d73wn\n",
+       "IiaxPoDv0QMyZ/xwjAXfLywsdHsz8gH0YM87a8nrAp2/5CUvmfqezzMv0/79+1PGBmDPCXx4tO11\n",
+       "gR7xGrJv0Efi2sDi4mInr+t+mQmDMaMt18nK+PBcC3GID5G9yF5jxtdeY3s7Abp1TBn/5zkEJ6dj\n",
+       "qiIm+zzPc+Ye+55rkgFzrGYwzyr6OO6443pyu1o6YC/O5ruxrgcpNmkedtngmUB28+bNPeoPwGTC\n",
+       "Fc0rLBYAbk/AJEVhvBrjwWFFtgXMhlyoQ2BzZ7IyIbxJIwuvDAkYd5AtOHz4cDfgX/ziFyNicvgi\n",
+       "4BCYToJNKSt22d6jBQukdb+6FL83Qv/frzj+9V//NSImurUs/J9D3K5duyJios+2fdpgbvlwnhVv\n",
+       "Q/ccAkypAvz6rU04sK5czPQLX/hCREw2a9PPmISV19IO9AftK0Bek7mApq9lw/inf/qniJhspD4g\n",
+       "cC8MDNP4GCsrK92cAujD488Y8ABgXDlI+OFFX0ziy//bNcp3pmVi3NxP08/w11Q6wNRU6Jt9hgNH\n",
+       "RL/YLQ8IF6IFftCSpJCRsxqveMUrIuIZ/XmvQAbG5G//9m8jok9mDNiLMVJYD8jufde/27lzZ+91\n",
+       "D3BRR8YvIwo2OTm/ZyxteKHX1vDIDgC0wZ5iouBsL0KWhx9+uOtvRH+/bPvDgz0rJIluXXjWFCqA\n",
+       "79nbcRp4jrfXoktk4dmSGdI2SB0w7z46LGHIOEI+G6OmcwOem+zppmcC/J/9gd9v3769dy16Yd2y\n",
+       "RrN+ZqhXe4VCoVAoFAprxLp5pI4cOdIrT8+J1K+lsGhaksbMq+OTN4GInDht7XKax8LwaylbXm2q\n",
+       "Jr9FFr+qsIXBX7wYtgLoH1YOejGRags+u/vuuyNiYkn5FM//CQBGBvprK8BWg2kI2lcYyIfOTBCb\n",
+       "BabyOoQxwtLIgqqxjtGj04Pbf9tCwiK3hcnnTsVnnO0F8qujNmjTbePdwP2PNw/PjS1HPHWmkMFi\n",
+       "ywJCt23b1htXW1KsKTwLeFCwTO0d82toe3T8um08HnfWHB4GfmOvjqlBaBOd+3pb5B7/1tWfvbpB\n",
+       "L9k8Rz/o2PMHeD7QV+Zuux852cKvOr230A/mpEtaZEkbXh/z8/O9AFwHnzPXsvF3sgmeTD637Ca3\n",
+       "3bRpUy9MAphOBJ1lnjcnlXhPcvttyAXfZd4uPmde4BXNyL/twTO5sZ8B7etbxgn5skQpvMt43P2a\n",
+       "GqAPPFEOym7XAnrgLQpzKqMIcmkiy26dc70paOij9672N97fLIuTbUxun4WhcH37TLfcXMP5wCEw\n",
+       "mefVKI9UoVAoFAqFwhpRFDGFQqFQKBQKM1AUMYVCoVAoFArPM9YtRmr37t29rATejZoi5vzzz4+I\n",
+       "yXvoNqWUNihVT/l53oU6IwgPHCXioXxwYUZnP3E9FDGbN2/uZVPxLhZaDmgWaNvxCNwL6gTK1QPi\n",
+       "HJwm/fGPfzwinimFT1wK7/qdzgt1CiX8iQUhRoL3z1x/3XXXRcSEloXvkdmxBp/4xCe6Ev5+V+2y\n",
+       "CJTw9xg5dR99QuMAXYFjItpCoOgQygenxruQHrKYfsa0LNwLehP043bbNkw/4fnhmEBoPKBOYdz9\n",
+       "Ph8gC1QI27dv7+RwDANto3PaRuceM/ToMfVY8n/W6LXXXttdS3yB40uuv/76iJjonO8dE+M1yrpw\n",
+       "bJTT3//iL/6iW3N8hh4cT4VeWM8u1eH0d+gqaN/rpo2NYS9yPxkjx/Ghww984AMREb1YM/YZ+sJ1\n",
+       "UMTQHmv76aef7u7JXuE1BNALcxedM8/RlzMwaQfqGehwaGfnzp09yi9kga4ki0ejv4wR+z9xgMiC\n",
+       "PrknFCToJWISp0hsINcynu973/siYrLeHY9H/z/4wQ9GxDOULxH9zDED2S+77LJuLFh7bemYVhb0\n",
+       "kmXpIQtjxPonlsop/C0FETrxHuu2mbvsXW6LsWIMkJ3nqJ8XzOXDhw93zyLaRm50yBh57/L6B4y/\n",
+       "KWJ41jmOc//+/d06RW6uRR/I7Ri5/18UMaPR6LTRaPT/jEaj74xGo2+PRqM/ffbz40ej0T+ORqPv\n",
+       "j0aj/3s0Gu1ofnPVaDS6ZzQafXc0Gv2vx7x7oVAoFAqFwn9izPJILUfEZePx+M7RaLQtIr46Go3+\n",
+       "MSLOj4h/HI/HN41Goysi4sqIuHI0Gp0bEf9HRJwbES+OiH8ajUYvG4/HvWIMR48e7Z0Yh2pwRPQz\n",
+       "BxYXF3tWHfCJ0taOT/mcajlJYw1kVBKtdWFr3pa162I4Hs2y872pJbLaPaurq73skSzbhM+dIZXR\n",
+       "FdjzkNHADCErzOnvLZsJMo1ZdWZaOdGZrTv/1hlVjJXroADXKWozSDKPnO9Ff114zlmOruXi+dPe\n",
+       "D3naNdKCuWQvsGsbAWeKzoqlPHDgQE/3rl0GGAN7x7I6SfSFecF9hormegw8/lkGIb/je9aHx8jr\n",
+       "wjQ+bZZfVnA0W8/uP3uT7wkYI8a2rX2V7a1ZTSe37cwyFzb1OrLsR48e7drI1hyw98+y4P1B1owU\n",
+       "HTAGzlxsv8s+Nzm3PXjMQe+3Ge3P008/3Sskia6yunDMG/SS7dHMTWdS2vPdwnuL38QYnh8mogYm\n",
+       "hXd2d3u9KaFmFR6l/6yHWfuiM3PBwsJCukeb4gnZnpc6UuPxeM94PL7z2X/vj4i745kD0v8WEZ97\n",
+       "9rLPRcR/ffbfvxUR/+d4PF4ej8cPRMS9EfH65yRJoVAoFAqFwn8yPOcYqdFo9JKIeG1EfCUiThmP\n",
+       "x088+9UTEXHKs/8+NSL+3+Znj8QzB68eNm3a1LMw+b9Px46lWVlZSYlMOd3ybpzaKtSJyqxBW4uc\n",
+       "rC1LS9dgyyerUeIaHFm9HL9/dpyHPTX79u3rWSucxm2NYQXwfh59YO1llbA5qfMX/bZ6971tWdoC\n",
+       "RZbMY2MvAO0zNhmJZfudLanMg+kYINebysYfGgfmyc6dO3s0C7zzpy1XIs5qsWANzbK8wZNPPtkj\n",
+       "trY3hLmH7vme8R+y3iOm11xE3ysAHnnkkZ5nwRaz2+R6t53FbyA78YtDVZNpuyUqbT/3+NvTypi4\n",
+       "Rhwg7sfxkb5Pe409Z1l8GjEurH/GFBky6iR70TZs2NCT23OK32QxM8xv5jvXM6b2SBCLCqPE8ccf\n",
+       "n1aepm6cx4K2/XlGODzEbOA+mbjWbVCzzyTN7HPWOX1xTCqye0238bz2RHld0JZrfrnyN2Cvoj4d\n",
+       "88Wenhauqp/RMmX/zzy7MIK4jh3zotULjA1Uk+c75J7lNfTzxXMRPXteLC8v9/YtqKHQsb28z7WO\n",
+       "1HM6SD37Wu//iohLxuPx060w4/F4PKOcweB3X/rSlzoFnHLKKR0VRaFQKBQKhcJ64oknnugF8GeY\n",
+       "eZAajUaL8cwh6r+Px+P/wT1Go9ELx+PxntFo9KKIePLZzx+NiPZEtOvZz3p4/etf31kHS0tLsbS0\n",
+       "1J0gfdr16Xk0Gg1WNW5/y/dYr1mMlGMAHKdktJlGyMO9MtJigEWRvSN3nJbfCfukvm3btp53a+i9\n",
+       "eNsfe3uw6oYqVbef26LTYToi+ta9K9dbFmCL3dfbO4ReXH25/cwegkwvrh4+y4KlPZMhLywspFXz\n",
+       "7VHM4hIYG7wezqiy16jNXGUcs8rTtoI9H7I4JmS3587z5bjjjuvmL3+zrEPgKsjZ+vfnGelxK6fl\n",
+       "4/OsCjLXO07D97DHk7+szVZ2r2frLvOkcB1ZvpnXqFq7aQAAIABJREFUyJXAW49XFtPmPSSLS2Ke\n",
+       "eL+gT0Ok1RETj/fhw4dTwle8FOZDHCKhjpisB9acuRYzz07ExOs5xMjQtslvHJ/q+WLvmO89FMdG\n",
+       "fxwDlcXrOMM8i3u0DOwBjFk7X7zn8l22F3Evrwv+7zGy5+5Yn8OqgD6QzR58y55lDmfnhSzud0g+\n",
+       "v01ZXFyM7du3x8tf/vKIiLjzzjsH+wdmZe2NIuIzEXHXeDz+aPPV30bEW5/991sj4n80n//OaDTa\n",
+       "MBqNzoyIcyLi348pQaFQKBQKhcJ/UszySP0vEfGWiPjmaDT6+rOfXRURH4qIz49Go7dFxAMR8b9H\n",
+       "RIzH47tGo9HnI+KuiFiJiD8ZJ6bR3Nxcd7o1b55PpH6POxqNerEJgJOy45FsibhtfmdrwVZjK4Pj\n",
+       "ELJsNGdMZTEvGd+V39uD1kLxKdwWg2NHzCXofmZ8Vo6xaJF5pHyt68ccqybPUL8dOzbkHXPbWVya\n",
+       "M+iwOGdlq/G7Nh7BcnJvz/NZcNzFrBipxcXFnhWaxRk5xiXLNsoyTTM+rDbm0V7MzMJ0tlmWtWvL\n",
+       "3DWf2rnrGDDXtrJVjz4yXWeeLXv+zEXZymmvnuOrsn4yt+wFA9ZX64X1+NjjMCsLN/MOZ3uYPZaH\n",
+       "Dx9OvUD23jF+5usDjrVyXSbD+8mQ/CCLmTRPouG1nu2LGzdu7I1/9jbF85nvs0w573HEBbvOXntN\n",
+       "Fr/qtnkWOf7V/KDAHi70MBRj5DpgeC75PMt2BcieeeqcWdqOsftpjyvwM2oWjnmQGo/H/xa51+q/\n",
+       "JL/5bxHx357T3QuFQqFQKBT+E6O49gqFQqFQKBRmoLj2CoVCoVAoFJ5nrBvX3iWXXNK9y6SWTVsH\n",
+       "JSLihhtuiIiICy64ICKmo/z9btOcYq7E6lggeJng/XHmA9knvMflerh5FhcXu3ofxGVQQwNenpbz\n",
+       "KaIfK0P/4Te76KKLpj7HW+iYIDiFdu/e3enMlZjpD5xCcKfRf3TuzDrLzueu8UE8w8c//vGOC8l1\n",
+       "j/x+mTGCg8zVZB1LZj40x7vBLbZhw4aOCwsuNFecdmVmdIhe6A9xBo7vgCfKHHR8v7S01I0XOkeH\n",
+       "ribsOCxkYS469sH8ZnDWcf3q6uoUp1XEZO7ceuutETHhqyL2gXFnXM21xjpyvAKycz1z96qrrurV\n",
+       "+XJMSMs/1rbNmsvmImOKvpzF0/KEca3jk9w2fIX001mqjoWCP5Hxd02btr4We4XnSlYnBx2+613v\n",
+       "mvqc69mD2F9uu+22KdkZe/R46NChrp/Mlbe85S1T9+Z7foOMt99+e0T056JjaxhrZPH+smPHjl6G\n",
+       "L/x9cMoxV70eGCvm4tvf/vYpGV0rjd996lOfiojJuhuqp0W/Wc/o3DW/HK/lfjIWroHk+fXHf/zH\n",
+       "vUxwzzG4WT1f0ItjDGnb3Hzs6eagu+WWWzouRO+L3MNyIwufm9OVezJG7OnuK/fbv39/jwuR/X/n\n",
+       "zp0RMZkP5olEdsfGOePe+67rr62srHTyIPd73/veqbZYD8jCOQA+zAzlkSoUCoVCoVBYI9bNI7Vh\n",
+       "w4YeV06W5eV6KSsrK2mFZVtctOkMIbdNe+Zoy2p3jEajmZxyzi4AWaVe18uxxW1ZWj04QyHLRnEV\n",
+       "ZJDp3F4x2m37lI3FrPg789Vlesmy+YY8X86ecr+zujDAmZO+3hlmrbWYcSdaP0M6bGU3x1bG0cb1\n",
+       "Bw8e7K2djHMukyHTuWv9ZFheXu5lgmXZhlh9eHK9D2T11dC5OReHKpvbU5TVE6Nfbjur+J/VqRrS\n",
+       "o9ex6wh5vri+1Kzq087UbDOPs2wj173KxspZj7Nq5Fnfhw4dSutf0abrjWX7CL/PZLYstNtykbJW\n",
+       "vCd5vbgmYJZxiNfHb0aGMo5Zz86sdj8zT1tWu82MGq6VOLT+vOc42xJkHv1sjDxnycQbygp0lia/\n",
+       "YV/I9OJneFYB3X1t974sCxOdz+KgzVAeqUKhUCgUCoU1Yt08UgsLC71TrGtWAJ9g21oWrufh2hKz\n",
+       "GLcdt8Tpl6rCPvW2XFzm5clYq7m366dkFXltkbhau2Vvv8ssKd5DZ7yGtl5cA8jxa+0Y2fPi/vhz\n",
+       "68lchLby7EWh8i8ytHPAuqVt19ECWHWMjeeNr3eNpLbmS1Zl3x7XzMoxa7s9UbamuH7btm3dNVld\n",
+       "FL43h2JmoRmMv2s4gbZPLQNBK6eRVW72fLFH1rFYbZ9bb0R7TVbZ3DGGjruwx9KVnj3GrScIWexB\n",
+       "c7+A49U87w3P9bZKdeYdRRbXCcIDB9xve+wz73Orv6wKuvdN7kWbsFEA68NeJnsN+f7QoUO9eEP3\n",
+       "i9/yvb3kmaeOdtGj68q1fUU3eLGyNcc8Zm3aQ5/t//yO+9CXdk1abvOdZswWjh3M+FO9LuzRPNY+\n",
+       "QxwSz6jsLYDHMHu7knnR5ubmUs+6PXPM2axWWe/3z+mqQqFQKBQKhUIP6+aRgl8vos8pl8XCcNo/\n",
+       "cuTIzArOfseZeYEcE+VsA3ukWvZ3Wxo+GbuyueNYfEp3TIE9N9n7+rYt+m0d+j09v82qSduit5ek\n",
+       "fafuuJRZ1WA9FraWsvE3L5wzs9p+2TJqKy63sJVnPr/Mm+Lqwxs2bJjJy+asJI+/vT6z9IqMmzZt\n",
+       "6s1FzxV7AcyQ7rbtJUA2zwfL0vY345Sz19RjYI+UM6Rc6brVexZXYg8CMKcaay7jLGQdWQ9DsWSO\n",
+       "/cpiJt1PV1XOLG/HxoDV1dWZcSbZXAWMJ/2zxZ5511rZM+8F/bTX2zEtbX8icg5G93+Im5JxsUcC\n",
+       "Wbw32wMPnGHIfOD3nssR/Yw3kMX30obvnXlTs9jaId5Hx9Nlnnd7ZvnecWqAmCjPXe95EX3vlfc3\n",
+       "jxHeM3v2maOeL/b8td5Fz9ssHpXPn2udzfJIFQqFQqFQKKwR6+aRak/XjmfJrm3jMjLL2J4CkLG5\n",
+       "2+IyB11mBbZxCBlHoGMXzLGUZe3ZwkDGoYwLe9wsA7BlAAM7MIeS43lsNVqOiNkxMUaWjWQrwN4U\n",
+       "e8faMXVb2fwwGCt7NLN4HXubFhYWerrnGmfTZPFaIKuBlMWatfVyHCtnWeypybzAgHYz7woYj8c9\n",
+       "3WVzxjFPLUdc+ztgazHLyGvbslcsi9fj/xlfl5FllA2tacd0gUznXIfOZ8V3As+rIdmZa7bqM1mG\n",
+       "dNt+PjT+rYyLi4vpWstqUtkLAhyf4/i0LL6z9dRmnHKW3+vC/fTatEfKz4s2LsdzxLJwnfcFx/0B\n",
+       "Z9bRHl7X1gNmT7S9o/aWuVZb9pwEXsPH2lf4zPWgzHcLPP7ePzIO2qHnUZZt6Odn5nnLUB6pQqFQ\n",
+       "KBQKhTWiuPYKhUKhUCgUZqC49gqFQqFQKBSeZ6xbjNS1117bZcD5Hb95v975zndGxPR7fkf6wylm\n",
+       "/iln+AB4v+AU8vt3vxs179toNOrFLvEbZKFtx3Y4BgCuNXh/HIfguJeWU8jZMo4XgMfLvH+ON2p5\n",
+       "mSIirrjiiqnrndUEbr755o7fLAP9ZjzRC6CGjWMB4ObietcuamNy6Kf5p9w/89txPXEFjL+zPs3l\n",
+       "6Pozq6ur3bjBV3XhhRdO9S+Lw4L3633ve19E9GOqnOVG+y0fmuMEkAUeL+Yt1/l65hjXs+acYeO1\n",
+       "ylzfvXt3ryab4ybo5+WXXz71OW0zBswXuLZaTsH2esep3XzzzT3uPMd0mA+R8besjvWAa4/2aY99\n",
+       "ps04zTgFHZ8E4HE7//zzI6Jfq82xUbR/9dVXR0Q/xqhdo+iQ8WeMvC48ntl+4SzIj3zkIxEx2S9a\n",
+       "Ljb6ST9om7nluCXLAr+Z+TA9pvwOLjdkmZubSzPbWP/wuDoz2GsUnXtdZPsuc/3iiy/u5muWxYoO\n",
+       "vS4cg0t/mS+eX64nhb5uueWWbjy9b3kNsf7Zuxy35n7ecccdXT9bGcmwb9cd3JnI7XH0+KJz82d6\n",
+       "LXtfZE1bz22Mmdt2vJX1ApdrhvJIFQqFQqFQKKwR65q1x6nYXiNnPlD5tM2C49/OtnEmiLMNMrj6\n",
+       "sE+7oD09u017vfh/lp1lSzOrpgvskVldXU3rgWRZNdZPlinhejvH4sPKMqKALQPaxgvCX7iWDGq9\n",
+       "uErxUKYUWTRtXaMhGTIZrTfrxRxSrdfA44ncZhZ3ZhBwlp69QQbt7t+/v1szruoLqBaNjGRtZhmh\n",
+       "Tz75ZET06ynZMwEOHTrUy8bxfAW0xbrOMumAM6LMg9nKnvWH32QceV4XrhINnEE3VMPJ/3Y2XVaL\n",
+       "yWvRHnrDlb3bzFv/5oc//OHUtYxR1namD2dBAjwQtHvkyJG0ev6sDErrxePujG2Pdfvmgv66GjrI\n",
+       "sludrQp+9KMfTf1/+/btETE8FyOmK8Z7f8gqvntOZnsX64c56EyzVr/InVV69zywd2sW117GrWcG\n",
+       "iLYN1yoDHhP+b17M7Pos07K9N+BZxG+YH357MAvrdpAajUY90k0Kb2UPmPaVh8sUgOxBwsB6wrAB\n",
+       "eBJnhwIWxsGDB3sEtkN9HPp/JosnTObab2V3UbLsQED5fbu6M0Jg7ulio0MP98zlmuGEE06IiMlB\n",
+       "w0UwDeaD03rRT0sRwmHF9BxZMcRsw8yuZ74hS0u544cucrUHnrY/GaWMX79lNA4nnnhiRDwzFmxk\n",
+       "2abLPEcf6D6bW/SF/tnQ8LrbuXNnJ4N1npEQuzhkJktG5uqHfNu2X9Fm1CZu24d099MPBg4LfjXY\n",
+       "fpa9ivLc2rVrV0RMHnqzUrA9XzzfWiAX4856zmhWOGgzZ138NHvA0OeTTjqp90oKsIdmpUaGSgi0\n",
+       "cBmZLKW9fcZkdDVQgVkvWTkZDk6MO9e7wC3YvHlz77VYRj/kOcqazcofoEdCZBgrz/32WlPgZOVS\n",
+       "0BPjyd7DPPBehB4x2Phr47Bt02vJr52BDTPGINO5X6W3z3Zf+6IXvSgiIh5//PGI6I+rD94Z6tVe\n",
+       "oVAoFAqFwhqxbh6pxx57rLMWbBVmRMGtlcxpMysk6dN9VpAto23xX9AGyNoNnL3as2XpEznAI+Pi\n",
+       "eWCI5JQTsz0GWWFO+u9+WxZO/XxuK7CV7ViFAIfAeGLF2LNgPfp1pYN28YRE9OkBMgJsgOXkVxaZ\n",
+       "xxOZmbvopSXhBqeeeuqUTBl5LbjvvvumZMa7lhWfpf9bt25NXxODk08+eUoG69gy4cF0Ab8sYHb7\n",
+       "9u09LybwvMWCtiU66/Urc/JYBT/bYOf23lmxX79etEfKv7MX7VgebHvk/Hoso7dywDPwPR566KEp\n",
+       "mbDcjx492mv7lFNOiYh+Acns1Z499YxZhgcffHCqvW3btqWUH/bIzSooigyZx81j9Nhjj3X/Nhmz\n",
+       "9eJwAj83fP1JJ50UEX1vyFCgf8Qzr1SH9syIvk7Zx9Chn0nZHt2+To0YnuvHH3/81HdOkLLcpk5x\n",
+       "HzxG6CUrZN3q0Z7KrMgx8J7l9WRZMhqsIY8UOrQ3dNabKaM8UoVCoVAoFAprxLp5pCL6AeFZiqWD\n",
+       "0jds2DCT+NeUGVnqsa39jCrDaE+qyJCVqs/oRjKvEeAEnlEKzM3N9d67O23f97JFYroVYD0cy7OT\n",
+       "ee/8vX9rCzQL8M9ILodoCDLi42xuZQTIWeC/41Ja6yijthmKWWi/B1nqbRZ0OhTf4JgGYFoFe1wy\n",
+       "guxsPj2XIMyM8sJzzzrPYh7s4RgiorUnwWNyLA9z+33mPbRHKiP3beG9KpMti+vMZAcew6NHj/bG\n",
+       "03F32X7gNr0usnH3mmxlyCiCHK+T9c/xOu6DMVTCJpvn7o+9hf6ez/ECeY/2XD98+HDvGZN5Xl0m\n",
+       "wcjGyGM1FNeTrd/M45LNk+y56z5YxlamLJA700u2VzkGCljGthSK70lsnM8JzzVJrZPxOV1VKBQK\n",
+       "hUKhUOihKGIKhUKhUCgUZqAoYgqFQqFQKBSeZ6xbjNTu3bt7dSR4B0qNns985jMRMSnjz3vLbdu2\n",
+       "9WprUH7+yiuvjIjJu07HX7gU/jve8Y6I6NdTclbLpz71qYiYpiswbQr9+PSnPx0Rk7L5fk/szDmX\n",
+       "q+d719kC1157bUQ8U2o/K+JGv00/QD+zehnQMkBX49ga10+6+eabe9QGwDEw0CbQtscGWYg7gMbh\n",
+       "oosumtKLMy2Wl5e7ftI2cBE3/poiCFn5i0zIAv0EfXUcwsGDB7v5DJ3I+9///im9eH6jW6hzmC+O\n",
+       "kXBGye233x4REe9+97u7dpz5SuwOtBzvete7ptr0vCHmAyoEaBw8Zz1GtH/llVf24iZckNHUKaY3\n",
+       "om2ymVhHUD44Xof/E+fw0Y9+NC699NKIyCmCAPPcsrifpqvgesdhtHFwUFUwV7K6YWRpMbcYT+I1\n",
+       "0HGbGRox2YvYLzz2rfy0DUUI+jAtE/245pprIiLiqquu6vrTts284nfMXfZR0MbK0M/Pfe5zU3LP\n",
+       "in2BCgU9Ahe05f/MXZ4XmzZt6sUyUWuJuUXb6NA6RxbmC88Xj4nrK7EfXXrppb0YL3TJvKWfplkx\n",
+       "TJ3jNep6e8j4iU98orf/Z0V+2aPZcx1D6thT5iJ6cRYga3nTpk0dLRN7rte/n73MLeYLOs5iDqFD\n",
+       "4vohuifGh/XMmmOeoBfmic8LGcojVSgUCoVCobBGrJtHamFhoWclcJJ25gNVdrGiNm/e3LPaAKd8\n",
+       "U1vY+wE41WJptZWqI3Liw6Wlpe60jSx4SoAzPPg7VJG5vadrOyG76/MsLy9PydP+1v10fQzq57i/\n",
+       "mSyMDb9r4YwO5M0qVbuGldvOquaahoHftdfThj1KWfVsZ8RRV4p55OrDjDXft9WHs4ww7gHtCm0w\n",
+       "r4Gz+1w/JaviPx6Pu/GlKrY9MKbO4f+0QZ0Zy04/gakf2uuRHw8Ef13Lx3MR+hLXAHM/naW5d+/e\n",
+       "qfZauZ1tmHmmXOPNnoas1pu9IczFdk0729DVpLN6ObSFXriX54vnB/vPaDTqtc3YMF9NtjwrO8nz\n",
+       "JssobImr6a91aLoSk1W7n6YesqcbfQL62GbSZnXlTJjMs4j+0hbge+ac6xgOrVFk4C9tZFl4zhzM\n",
+       "stP8XOAv6659HqFzU4PRP3vY7UUzu4ifi+gXvbDX0Veq9rdtcA9X/Pezy5mRrsZvWTKC9aGsPeqI\n",
+       "ITf7P3J778pQHqlCoVAoFAqFNWLdPFJ79+7tTn+cuDn9ZbxPnCZbj5RPr1jWVGbGAuGU7uvNHZXV\n",
+       "bAJPPPFERETs2bOna5uqrvZIufIsJ2m/w7bscNHxOyr1ulrs8vJyr1ZGpkNO2OYGe/GLXxwRfUsN\n",
+       "C8aWjPmO2rZmkTgDx1kBuOPskbDViBWBPtvr0RH3QA/I2/LyRUze4TOu6PG0006LiL6FSXvIytzc\n",
+       "u3dvb27hMWHu0RbzxVWV0a2rCmfWNN6iffv2df2mn/Yw2WNy5plnRsREH/Z2MjeR2WvQY7u8vNyb\n",
+       "1+jIOkcvzC1kytYR/WQO8/+hueh6SbP47aj4bg/sU089NfU5oI+MNfdhLFsPBh4lE+ayRu1hQKfo\n",
+       "hXujRzyzgMr53Jt589hjj/XWFvemDfrNuDMmlsX7Jv3z3LXnZnl5OeVlcywMfGfMk4xA1x59vM/2\n",
+       "pphIOGKyzu1hRmeMK2sOnXuMGFPmCbKhD+8BrbfYHmqv56yW4SzvqN8SsLbbvtqTyr6fVfxnPO3V\n",
+       "MecqQAY8UVx/1llnRcS0Zwdd+RlF/73+mavmTRzySEf0Pb7tfdxf5DbDATJ4Xmcoj1ShUCgUCoXC\n",
+       "GrFuHqnjjjuusyQ4aXP682kXKwDrct++fZ03K6uai0WJpWEOLsPs4FmlWmQ87bTTOv6qjGvPXjO8\n",
+       "Hshka4c+ccLGAn/00UcHZYno82whi3mc8FDQNqd7PA3IBPg/7eMlG6qaa0vL/8+yUGxpoj9bGOaa\n",
+       "Qq9Y7q1nD/lo27/NOOW4nvG1vgwsWcboyJEjaRVk2nKb/NbXZzFCWbX2bdu2df1gnOxhsneU/toT\n",
+       "53vSJ1cAHrImnVWJFWpZ+Jy/9jB6Hdkjh+z8vh1TZ9u5MrPnLx4X7uF4TY8/XjM+d5xHu47oF14g\n",
+       "dJ9l8TH38DQ5jskeDPZFOPfaDClfS/8Zi3vvvTcinvGsR/T3RceKsU/Qrvcue1GXlpZ6sZDAvI9e\n",
+       "m54vHgvHVnld4KkYjUbdb+ifvaOuls4Y0V/2GED/7fnHo+F18dOf/rT3POMe3s9pkzlo76/noueV\n",
+       "PXHtM4Dv7DlzrCdgbgGzS9izB+w9Zc9rx9S8hKwdjyvAy+UYSXvNgWMT24ryHgv6w77JPuo41Vko\n",
+       "j1ShUCgUCoXCGrGuHilne/k9LHAti6Wlpc6CsEfK3itOo5xafSLlpO7TbsYpxIm7zVICtjCxjOzd\n",
+       "yjiXsA44qXOKH4oFinjGunCGXOZB8ftl7p0xhvs9tms9tcis/wy2atE5Vp2tXSwr+uAxaseUfzuL\n",
+       "JPOOYYG4FhhjkXk8XXdm06ZNPQ8TVptrzaBzy+KsNVt9Wcbp3NxcJze6sizOYsUTQ/+cnZLFDvJ7\n",
+       "62VhYaHHrYdlaQuTNedMOrwaGR+a18EQ16D3FPOcec2ic+8PjssDrtnTZqn5emeZZZ5Z4AxD1+5x\n",
+       "hhj95/o2gzTLCHMcG/1mHQD3C70hi/cB1kvbx1nxpr4u42bM+M/YD+zxar2mmQcNmN9uqCZXC9f2\n",
+       "sx68ZldWVnoZjRnvK7Iwx/xczPTI9czlIQ+Wa3Zl8abA+mJuZc9Re7S4H3O2nbuWhTbZu7Lnhz1N\n",
+       "lgmYJ7Ddu7wG/Qxh7qA7e0czlEeqUCgUCoVCYY0orr1CoVAoFAqFGSiuvUKhUCgUCoXnGesWI/We\n",
+       "97yne5fJ+1O/M4YPZ/fu3RExneXh983w8sDLlVVH5XdwhJlrid8RS8A7cXPWbdiwoYtDcTwWcr/z\n",
+       "ne+MiH4cEvEo5lqDI4r2yCRw5gmcQpdddln3Xt3Zem4bvbjGht8vwxMGj5PjmBzH9slPfjKuvvrq\n",
+       "wX4CfoPO0YtjHJAdmeCUgg+J+Dfae+ELX9j1/QMf+MCU3PTfla75C+8Tsjv+wllOjD8cVIDMzaNH\n",
+       "j3Zzxrxs9JM5yHznXnAKwhPI9+jDtbDg8oNTqq1kzfymn+ZxZG6RVUP/yBiFO+v888+PiMm4M7+Y\n",
+       "kwBZdu/e3YuraitMR0R8+MMfjogJ7x+yOibQXGusf8cOMUbtXIevDJ0584nfonO4+cyIwN7kucv1\n",
+       "wHvWli1bOl6uK664YkoGZ52Zx9E8fs468l6HHs3dePTo0W6PQRa40JhbxMa4ojd7i/XoOkTeF7m+\n",
+       "jY1BN1zLemb9W4eOx+N6dM7Y0TfHlLb7YsQz+m5jGFtZmIvmCXW2svd05iKxUMTMkv1Klt8dd9wR\n",
+       "Ec/wyrlt1hJteL/wfu9sT+Yi17s6Pe3z+4985CMdFyJtOYMYXbIueF4wPxgj/o9ekMV8mObk3Lp1\n",
+       "a/dsYfwtL79hznnvciyYs/0Yf7ffxtSyvuknHJSsLWcGm5s1Q3mkCoVCoVAoFNaIdfNIRUT84Ac/\n",
+       "iIiJlYSl4QwCsrnOOeeciHimlgt1olyRl1M/Frf5i7Iqu5xeH3zwwYiYnFCpuguw5J588slOLk6x\n",
+       "zlxAFjwJWC/mLQLIhqeF03CW5ddmZeANwrNgviqzuNNfV0YHWZ2YIU4uWx9ZxhTwvc0P58q2zBP0\n",
+       "8jM/8zNTv3/88cd7bZsbDHj8s5o09mAAxo56QmCo3oirAFMPBavI/aRemGvetBxqLdrMQWcleq6w\n",
+       "Tl7+8pdHxMQDgSWdZeGcffbZETGxZNErsoLxeNzLmGPcrRvGhHF31pYz61zDBhmcldPek9+YG80e\n",
+       "WPTCunZFeLMPME9Yy+bHa693NhJWf1ajztlaXsv29LpmFvdu+d0AMrz61a+OiMk8+d73vjfVD8B4\n",
+       "O7uXfnu+oOd2rFzfCHAvdG7PflYXyGwVzB/XzOPztr4abXuP9noZ4i1swZyjYjf3+v73vz/Y1z17\n",
+       "9vQ8tMjkPdr7IM8wfue3LPbQ+C1Nm3HGPV/xildExGTeU0/MWb4Z7529wcD8sdybMW7XKN/5GZ1V\n",
+       "fHcmtvfoLOPQss7Pz/eeRbTNnmQuyqzupFEeqUKhUCgUCoU1Yt08Uk899VSvBoeri4NXvepVETHx\n",
+       "Ajz55JOdF8IxCpxCfRI2HxHgBM3JHEv7vPPOi4h+JdyWs85VXn3apYKvLQ8sKHtLiLfhFAzHHu3b\n",
+       "mtqxY0fnxcNrgwzZqd7WoGOfAH3DSuJ3Q3VZzCjuWAd7sdAHbVJd+SUveUlE9Mf/ta99bURM9IlM\n",
+       "eCWx9Nt+OMbBlZqBLXfXPLK1g2XHvdtaKBl3ItdSu+mlL33pMWXhd6627fFv477sDfS6+LVf+7Up\n",
+       "Gb74xS9GxMTjap3j/cXCRuff/e53p2QCq6ur3XxGR8wxrz3mFv3hd+jDtXiQDUvWzACt18CeV37r\n",
+       "ekgAq5h17pgQzwe8oshuvsBWFlvOrBPkNl+d+e3or71fAFnZR9uYNK/nN7/5zREx0e3nP//5qd96\n",
+       "n3OcntknvHc7xmTDhg3pXKQ/jnXhHh5/Kr2z1+HZRU/uK/OorbZvbwagH7TFXsXYtHtLxESPXP+F\n",
+       "L3whIvrV/MHy8nI3x3h+ZW8k+D/rHz68rMp+VpcKfbRr9A1veENETLycyM09mdeWhTnrZ13Gk4eH\n",
+       "Gz2y1w95atknXD/O+6L3cNdN877ouF7+v2PHjt6zyHUDGfes7liG8kgVCoVCoVAorBHr5pE66aST\n",
+       "uvfMnOYz3iesIuKXnnjiiUG29YjJSRmrhTaxamy9cPLmpPpzP/dzEdGv9As4LZ944omd3FgntnY4\n",
+       "5eKR4N5Yf/YCYLljSdDHrJr0pk2b4v7774+IiY5g/ranxdYwejPrNaBPwO+MW8v7f/b0Tj8efvjh\n",
+       "KVnwONnbgSx43bDMhjyY9AMr0DJmFb9t3TgOB2BxIRPzZDwe9/SABUU/me9YdbZ2uXdbPT9iYrF5\n",
+       "jBiDTZs2pdXAwSOPPBIRE08Uuuavx/uuu+6a+txcdY4FWVhY6MWCOL4G0H8yAC27vUCubM4azLyp\n",
+       "EZN9wFmmXnOMGZ4lV5H29Xz/wAMPRMRknjgg3WG3AAAgAElEQVQWp5U740H0fOFz+mX9OF6H33vN\n",
+       "79u3r6cTvNv/8A//MNUWvzUcU+qYqYyzso3n8pwCzGfHRNIfzwfGjjGCWzDzSIDV1dVOJ8xBrwt7\n",
+       "ElkPWRV6Yov+/u//fureeJk9pieccELn9UVnmScN3bH+WQfsWa6y7bgv+uLYzIiIb3zjGxER8bWv\n",
+       "fS0iJnrI3gLQtiu/I5O9qa5gjjdtCIw3/bLXy2PkjFq8fqwTe8ec7ZrNj/be5lp0hfNZKI9UoVAo\n",
+       "FAqFwhqxbh6pubm57nTrGk+Zhcrp8Pjjj08zwpx9gWeCz+3V4eSNRWVr2ifSNtbGlnaWEcbp3Rly\n",
+       "tnqc1eOTtu/34x//uNMNFoB579xP148ZysJrZff3Q1kbHoNZXHsGsQPO/ACMAdaCreP2fq5JhaWB\n",
+       "NeP+tLxcrQzA13MdMQVtzR+PpzO8mIvMTXswbQ3RL8c/WZb5+fkeZ5ytXTxSyIL8joWw7PaeOuYE\n",
+       "bNy4sbvWXotsPNu6cO5PC8c70Z7/tvKZG5I2HPOGrpHduveas9cIL4K56NprHRNlvkuApc4e5HH3\n",
+       "GNEuXgCuP3jwYG/vIDuPfjL+rpsG0LVrvWU8keZNbDPpshgmez2GxjNi4sHBI+W153XEXj4/P997\n",
+       "xjgWzHyYriPo9f/tb3976v9ktWYZqlu3bu0+cyaw41iR27Xg+OvrvV/y12MfEXH33XdHxKT/7EVD\n",
+       "10b0vUbU8ENfQ1nKEZP1kPH/RfTHyx5mt+09iOuy84I9Uvzu8OHDvWu95/jez/VtS3mkCoVCoVAo\n",
+       "FNaI4torFAqFQqFQmIHi2isUCoVCoVB4nrFuMVJ/+qd/2nvPzvt33rPC4wWnEO8vn3766S4Dinew\n",
+       "cEpxLe/qiTtydWC41uBxa/l4IibZfry3hg/rqquuiohnMmNom7gT3sUiC23zXpZ3/GSGcE+4s+B9\n",
+       "Mk8c4HN4oi699NJezJKrPcNXBdcSsvLum4xC+glfERxEhjnpPvaxj3U6b+u3tLIgI1xL5vFy1gnx\n",
+       "FeZOoj3qrLTcbIwn15ozzTVr0DnXO8uLv8jGXER2xo5YgocffribkzfccENEPDPHW1mIu3AGHLxP\n",
+       "novAcXrMRa7fv39/JydzEh3deOONETFZF44R4l78hVMK3kdnjiILf2+//faIeIZry3FFrhZMP+Hx\n",
+       "QxbiNdw2/WQueu5xHevp+uuv7+RmLjomht+wLhhPZKctx1bBKed1wZwkzu/QoUPd3GL8GQsyZF2r\n",
+       "in7CncfaZE2SIXj66adHxDS/YauXNtaIueZ9y3pwTB17C9x8yOhaccTKmJuTMT18+HAv/gq9ILf3\n",
+       "C/4iO+ufec4aRteOg/Rc37p1a69qOvc01x7PFtchYww+9KEPRcSE9412PLbM+Zaz0DGvjsdBL3AK\n",
+       "sl6IS6IPZHF6v2hjhyMmtRBPO+20iIi45pprejpHZ9n6Z54z7oyh6w8yv+CVRGZft3379m580Ln3\n",
+       "CcezIou5FgF7L5mU6BzOUtdZG4/H3b/h8UTuNr6wlY3ajqy5DOt2kFpeXu4UR/DgnXfeGRF5yjGK\n",
+       "2bNnTxd4R0cNNtDvfOc7ETFZfE6LdwCgN3UXzWxTTJmwDiYHTFoWBgGfTM4zzjhjsJ/0n4OXU3NB\n",
+       "GzjnoGkfwvz/N77xjRER8Td/8zcR0Q+qc3A2fbRMLbwouaevNbElcMAvyOh9TLwZMRlPNh/mCcH4\n",
+       "HJAB/WYuMlaUlXBxQC9OCrfec889aXFY6zajQnE6rykTMjLoAwcOdGuIgzLB5b7WAaoO+DUyMuSh\n",
+       "deTkiixg2yUnTBViWZgXbNIve9nLImKYIgRdu/TIK1/5yin5gfvvopg+eDpdGhlIyW5DJZzWTb85\n",
+       "GJlmCH3dd999ETEposgBnb4AZKV9HuabN2/uye216AeeqXFsWAKTFwM/oNv90fscoD/MA4zdjMaL\n",
+       "/vrgQEkSY2FhodMp44QugQ+1Tvd3MVkHI1sfQ3RY9AtdOp3fsKFIsWjv/8jI97/7u78bEZNnVlt+\n",
+       "wgHplMPIKH9cYNVUbF6jzCvuSR+4T1uCgP6jK/rJwcjPBRMmM5bsC5bdhT3bMZx1tti1a1dERHzz\n",
+       "m9+MiP7zP0O92isUCoVCoVBYI9bNI7Vly5bOO8Ap0GS9AKuJE/ndd9/d/ZYTNXApe9zEnER9qucE\n",
+       "zSn2da97XURMrOghclba+cpXvhIRE+v83HPPnbqWEzdeM79mMmz9YeW4PARYXl7uTuXoI6OlwWKg\n",
+       "qOlv//ZvR0TEa17zmojoW7tYAZzm6Ruft2OEXMiLzrMyD/YK2NPgYm8mwW1pWQwsKP7imckIVBl3\n",
+       "vASvf/3rI2IyF50uS7tQFFHQbteuXb1CksjNHHMJAVv16JR7ohfmj+cuety+fXt3D+S3R4q2sd6R\n",
+       "OytUaWJpPC4mSG2BpWkaFfoBXJDU3hDTbHBPdM/axLuApzeiT2wLJQb3siXNdfTXKeb2ApqQG1mG\n",
+       "+sI16Jy55+K4wJQ3rDWKZmKJA/TIvMCjv3fv3t5e5FITyII3z/sF/eQefM9+kMnCfY477rie9xf4\n",
+       "lZ9ly8ol8GrLJRjsfWvpofBEsz6ycjYAvTCPICO2LPyOfpuwG4xGo166vktnAOYFsrI3o8fMa0Sf\n",
+       "mIO8bfiXf/mXMFx6g7VlHdpbjEfKBazbfrYy8jv+tmPqucLzHxns9WPN8qxnr+aZ5X3Rexa/e+yx\n",
+       "x3p7DTqkLaixKLDqtwwZyiNVKBQKhUKhsEasm0dqfn6+Ox1S5IwTpC01Tuqcas8888zO+rd3B8uA\n",
+       "eAusOdqwBcLJGwveNDU+qbcWKfIji9vGUqBNB0naOrKFhrfJhffA/Px8z9vjwnkA3fLOGm8aMtib\n",
+       "gmWBNYB+7BWKmOgKixKZ+JvFa9E212VF3rgXlgz3QR8t7QvX0l/+0j97HEyUes8990REP1YO0H/a\n",
+       "Ze4uLy+nRK5Y8XhqHANlMO72qjpwug2I5Z2+44+AKU9ML2G6GnseLatjRzZs2NDdAy8u/bVV56KX\n",
+       "eI3oXzYPkBWvKmi9TMxFk2xnlrdj6RhXvAUZLQdrmDXN560H014+x51Yp/yfmCLmFnMxK3yL/tiP\n",
+       "HnjggZ4OPe+Rm/55/B0zxr5BOx5TJ2ksLCx03gjLgh7QtYsIZwVcTd90LE99xDOeHbxY6CYr9sj4\n",
+       "sVez9rJ+OqaW+WDZV1dXO3ldWNI653MIhfEau9+AdYQX+u/+7u8iIuLXf/3Xp2SMmIwn42fiaOvF\n",
+       "xPB4xeln5h1DH+wP6L2NY7RnHj3wG7ft5CuSLpCd+Q/oN7LjZVxdXZ2So72WsSHeDLkzmiOjPFKF\n",
+       "QqFQKBQKa8S6Zu1xMjWNi8Gpkfe0J5100iAlQ9sGf32Szt6/QyxJtlZWbr9NA8dybNPwW5guAWuY\n",
+       "/mRxKVhUWEUZgWbExErl1D1E4dJ+jpcAq95Zbr4ey8Wn/iFgOfJb+uMxcr+xBo5FQhuR67l9X+/U\n",
+       "YjwLpmkAWHX2QHEv65H2uR5y7I0bN6bZhlg39qxmsR1chyWKXjxGbao289fxWMDePPplSxX494y/\n",
+       "U+1bOLPTZN2G1wOwHj3eeFPoa3s94+cM2Ixsl34zVsjO/60Xk3ejj6G4N+Ya/XQKebYXMe54bLDY\n",
+       "nUHq9cX3O3fu7MX2oUNk4N6Z19D7iNdNllnHfZaWlrp+ep47jpExYxyz2DHLzPUZddKBAwc6by77\n",
+       "gNcQOnR/iOuzFzjzojumECwvL/eyb1taqRbIQCYw8VnsNd6L0DnzBL0QvzlUbNvxehk5N8Drj2eP\n",
+       "scjmC2PO2jSdV8Rk3EwNlZEQM5fZk2fR1ADaZ+/yWm77Q0wc+7kzYmehPFKFQqFQKBQKa0RRxBQK\n",
+       "hUKhUCjMQFHEFAqFQqFQKDzPWLcYqUsuuSR9D8n71ltvvTUiJtQJXDc3N9f9Fo8aVAVQfjgOhffL\n",
+       "vMumLD90Aq7k/dBDD0XE5B3qX/3VX01dPx6Pu3e1vE/nt9BmuLS9s1LojyliXCeFd8f0hdL5l1xy\n",
+       "SdcvX0vb0AlAEUA8gmt2IDsl/7keELfhOIabb7650wnI4gUYI/TimjyuJ9P2s72O9/r0ZWFhoUcn\n",
+       "YE+ra1FxPXPL39O2r4f2g+/barxcy7y13K6zxTt7rs/oitA5Y2BajqNHj3Zz0VloUJswt9qsqrZt\n",
+       "5gvXmzonq3wP7cPu3bu7+e3YF+JMrHPXA+N67ul+ohdn93H9TTfd1FG+oOtsjjHPadt7kGOJPBf5\n",
+       "/tRTT42ISezJgQMH4tOf/vTUtcBxOKZOMRWO4zb5/JOf/GRE9Glc2ngmryHvoYwR8Zv8lv0CWdCb\n",
+       "q4gz1nfccUf8f+2da7BmZ1Xn1z6nz+n0adOJhEBISNIhFy4BCh0GPljWTOkUhVWWjh+8UKUDFBpK\n",
+       "IyGWaIxUzIUEAxijSCo3MsiMM4yXKS0vpQNWUeqgXE0EEgIJ5kYMwcSG9PX0OTl7PnT/9vu8v/2u\n",
+       "Pl3HTp8R1r+q6/R5z36fvZ71XPaz1l5r/SMmVDttxi4ycC3z3DRLZGERU8ccvv322yPiEP1Q2w5x\n",
+       "OMie7eltvxxnhSzsc45nI8YHPUEp4rnoKv7ch3VBX1s4htT7hTMDvZdBh8Vex/Xeu5DtPe95z+hZ\n",
+       "5JpW7X4eMdnnHANIzBTfs16Qhb4xllu2bBnWHHRlvtaxUzxHkcV7lfck9EhfPSbz8/OD3Fxryifa\n",
+       "dtY+6yhDeaQKhUKhUCgUNohN80itra2NrL4sg8Iny/3794+qxYKWuDNiYkmRjZNlkJmclYwJV0Jt\n",
+       "5XcGR5Zd52y8LOPQdUI4DWMFHCmDwDx3mUeGv3O6RyZnp/h7jJE5m44GzjYxASpjaI4kf9+1jfi9\n",
+       "zcZwdlLGd2ZZ6B+WqetR+Xr/3LNnT1qR3dZuxkGYeaJ8L1+/srIy+o4rD7uOmj1SHv+MODuzkvu+\n",
+       "H3lSszpZXDerSn77fffTnGveF2bJ7zazmkPrZQC5nayO0Cx+M3uLM48t85jPyZSito1ltx5N6tzC\n",
+       "njnLlPGE8pN+mgHBaOek94xMJjxv7HPeB51x6fa8j7aef3tF/V10Zv0wtzyHPVeR2VmioP2+97ts\n",
+       "P3d9LHuR3R4y2evatu854jVk2Gtmz9R6Y8C9nYHbtuHahRnLAvdmfeB9917v6y1L3/cz10bEeG8F\n",
+       "2fWj647qqkKhUCgUCoXCCJvqkbL1nFkNPi3u379/ONX6NMpp1pVWOdW7IjMeJ+pI+F26udlof3l5\n",
+       "eVTF1XLakkJW11Vx21xHH7O6U0tLSyNdoVNbTsjKaZ7TfVZNFthSm4VZjOet3LZ23Q9X1/b19kg5\n",
+       "JqCVPfNuZv3jc9eosgcPMF88RgcOHBjVKWnfzbffzfqZVd+35xO0enTMm+eiq6nj7aCSczaGnru2\n",
+       "ksHS0tJwD7x41H1z2/ZqueJ35tnNxqTdA5jfjktzTEjWz/Xmrmt70Vdz11mu9h6uJwSYH8gK39ed\n",
+       "d9459blBO22cZ1ZHLqsG7jFCFj5nvhA76vGf5cHJPC6u2cbeaz6/rO3M2w7amFTz8Xk8kYUxgbct\n",
+       "W0eeR8w3PJGeX33fj7zd3oMBv3OdPZjeg10h3/HA7Vg4RtifZ/US6S9to2vPH+a/5wt7QHtfrznv\n",
+       "51lNO3tH8Qbam+x45zYG0223vIwRkzOIvWXroTxShUKhUCgUChvEpnmkWi4mWwuZFdCeHh1nA3ya\n",
+       "x+LGyrE3CAuLkzMnUk7o5kNqecKwQjJL2R4Jv3c2sorOzsYA+/btG1kfbSZbC/rB3zmZm/cMZBXA\n",
+       "Z1Ufzyr1Zlx7tvayGCCAF8DxXM6sa2UBGT+ZZXQ2Uza/iLXjPsj29NNPj+YKXi7uwd/5PHuX77iE\n",
+       "LPaljTHMsnGAM7vchsc7iynKYip27949iktzrIvhWJFsXTh2zJZ6K1ObJdT+xCq1pcm9M2+R55M5\n",
+       "PmGih4tyliU7q+p5xHi+swcxFnhqsor/fJ+xaj38WfV/V2S3xxEwzz0X6V/m2Wvni6vAA8aI9XD3\n",
+       "3XdHxGSvzngfszls0PelpaVBLnsS3U/6he75fT3vF8j2rq7rUm+P178z0ZmL9sgAe1FdKbyVyc8g\n",
+       "Z1R6/Tt21OwK1gPPV65jDHlGts9d2kZuczJaX4wFsrPvey4DZHTm3axYSmchOs7sSKwiLcojVSgU\n",
+       "CoVCobBBbJpHqu/7kcci4/Fyhknf96OMP5B5f2x5At+bkzrvfLO4lzbGK8tksozrZb6ZS+tIvGZ8\n",
+       "3+zVtlLaa9u2rA+fvJ1xxOl+VpwC1zgDZj3MshCO9Lm9JLM8WfbQefzXixFwPJLni9thniwvL4/a\n",
+       "tpXuuXa0YJ5lcSlra2uj7CuPJ7Jgnbm/hq1Ge38syyy+Q8f4GfZkZpmCRsYO38KeFnt3/V1z65mz\n",
+       "D5i7z7K0fV3PQ225s6w8Z3sCc3G288sehiwGzB4H4HF3/bgs/qudT5kH3rFwfCfjfbPOvc94TNv9\n",
+       "FT1k3lG+i1cE2dD5rJin9qfnl9dTuxdmHHLA8VzOTssyKz0XZ2XWZfui408tt59d9CHjXnQWPD9b\n",
+       "2R3HvN7+6H3kSGuu/bs9dV3XjdaQ+w2yjMAM5ZEqFAqFQqFQ2CCKa69QKBQKhUJhHRTXXqFQKBQK\n",
+       "hcIxxqbFSF1yySUjnjjzY8EpBI9PGzPE+2Ui+s0R54wgZyfRdsbN5ffSXA9n2cGDB4e/OdMHviLa\n",
+       "dkyEs4/e9773RcSE98f1eAAZJi1PGO98yT5AJr5rvjKyKNAb77CpzcH1yOL6IYD73XrrrXH55ZdH\n",
+       "xDir0PEn73rXu2a27Tg0Mj5uuummKdndbvu+Gy4kdO4qya4tAkcYPF7MD67nOuIUzEHlLMYTTjhh\n",
+       "+A5zhWuB46toA34zrueevp4xgw8PPS4tLY3iDOg/cwseP2fd8D2ymhgjdM711Efy/IJX7qd/+qeH\n",
+       "eDFnSO3atWtKL+bCYryZi8gEdxp6yeIdGbObb7556CfrPIsBYg3BKcj8cPwO33vPe94TEZP5hf64\n",
+       "N33uum7o59vf/vapNlzpnn2B8YQPjzXqODXuwXxBdsaijT1hbr373e+OiDHvI2vPsWBwkKFzMwI4\n",
+       "g87rDv1t2bJlGEf6iw6Ztx4j7kU/aZs16v2TOe7nBe0vLi4O37HcHv8sFsj8mchivbFmrZc3velN\n",
+       "oyxdx76yhmbpsL0H96SfjKnHxrFjN9xww8DLyB7Cuia2lvnOfsH6dwwp16F79mjvi85A37Zt24hr\n",
+       "j/0iq7bPfGG/sM79jIaD0s+j9nnr8bzooosiYlLDjExZxoxMWmTPUB6pQqFQKBQKhQ1i0zxSBw8e\n",
+       "HPG+YZEeiVOO72Y1J1xriDY5ibeVhyPGnGzOJLEnps1ys5fLnhW+y+nfJ29nbeAVQnZXcPUpf9++\n",
+       "fSPvjD1wwLWb8ALYogLIcNppp03J7nohbRtZhdksM4J7YAVkWV6MneuGoO/WM+X+813kNqccOmWs\n",
+       "sGKc3QbMI4eX8KSTThrp0BmVVIvm8yeeeGLm9fbkeU4C9LBjx47hHugaSwrYinNtIreNV+RrX/ta\n",
+       "REy8SngLzCuIDiIm89ZrC/A5Y8Jc5HO+D5hHeF5c463NyLJV7gwmy0J/0B99cD0c4Jp2rtnTzl1n\n",
+       "SLlmjfcW5p5/IpvXNGPgMZmbmxvVs0KHrqljqx2YbcKZVt5H6RP62LZt26ALZ8x5fjvzLcvazGR2\n",
+       "llfrXbLusorV6MUeLMviMUQvGSvH8vLysHZcsf3UU0+d+p05xDxnPLPMOtf4soe2BePltwbInVXN\n",
+       "z6rJOxOTdtGP+WHbMXINQ9e2yuoR+meWKem12/IMup/ck+ccle0Zq6OtcF4eqUKhUCgUCoUNYtM8\n",
+       "Uk8//fRw2lvPA8UJu62V5EqkwFYxnD9Y/3iHgBmn16s+3r6XtTXv75jXytaOLUysIn5yqkdme3ye\n",
+       "9axnjaq8OhYAICundK7HOsCqBTCy2/OAFdF6Uxzr4FiGrP4RcP0UeyRc48vtttYO38WjQrVk5lBW\n",
+       "/8h1oWxpWlbXYTpw4MDIaqM/WJi2ej3vGQs+Zy5n17c8b8jLuHiutN6K9rrMU4O+PF/Qh+fLCSec\n",
+       "MIpx8XfALI7EiMk8t4fBHkjXQGrnU1ajLVujrJ9nP/vZETHRKf3H4wjomytgz6qVxXjTlmsx2TsA\n",
+       "XFWa/rm+kr1CbZyWdYgstGGvRVYXiH1kvdpdrvHTzg979ZAFryn73CwvZ8R4r3I8kvc69Lq6ujry\n",
+       "THqe0xbf8duC7C0D4HrHNYKnn3560LV5TR0Dyxz0swvZMo+kY8aQvZXFjB/Im+1zrMVWlxHjZ5Tb\n",
+       "tzfVnsr2/7Rhz2T2RsJr2zF4IHsWzqpQ4Lci6N5vf9ZDeaQKhUKhUCgUNohN80ht3bp1dBKdVQU1\n",
+       "YlyFteu6tAoyp1euJcYDy9KnVywOWx7OIACcVFdWVkbxOOtVngVYA7byMqvhSBXDfSq3987wu3xX\n",
+       "dAZYT86M4X2/rcyIsZXqatuA7zqbESuGewBXmUUmfm9jkxx/gVcnyxDEenGMFG1aj1iJfv/e9/2o\n",
+       "bfpBm3zH/QV40dCX9ZB5EZjj7TVZ5WnmL9/FIvX1WMfAnhv00PbVXgx+2qtjDjlktscBIJs9MHyv\n",
+       "1aM9C85Ksg7RB2PD/kE7vt7WMXq1tzRiXHnf8VpZXApwfInHyJ4rsLq6mvI+Ois1YzZgjTFP7HW2\n",
+       "Z4ffmbP79+9P9y2udaYk88L98T5qfriMxWF1dXXkKcv4DumvM+sMZ1LSPrLQDti5c+fIG8i4eo2x\n",
+       "TpiTeOyy2CHWg/cuV4pv5WU9oHMzgQA8itzbP+2xwZNjD50rx0eMs1az7E3LYu48YJ1nleEXFhZG\n",
+       "4//P//zPU/1iDNiLHceaoTxShUKhUCgUChvEpnmk5ubmRrECnAIdC+CYm4WFhdTC8O+cTvFM2MJ0\n",
+       "7Ig55TJs3bp1OPH6nazbNps3p3bHa3E65gTOSTt7F7y0tDR8RkaYLQ7A+3g+dyyQrWNO4nzPXqFZ\n",
+       "cW3WfZZ949o0jvOyNY2euXf2fr5tC1nIwsCCcj8dC+F6U87acKZm65HwvLVlyHx3DTSDOeoYQs/J\n",
+       "Nu4L+Z3J437SFnNzliclYpJR5Hi9jCdu3759w7jZumVdA3TGPHcM0Kw4k1ZGczuytttrncmUZWHx\n",
+       "Oda996Qs+4372GPZts//PV+zbEbWmr0dGQedvYyt191tG7Piy1pkcxm4T8w/Z/lF5DFSs+ZQ2xaw\n",
+       "t93jb7AXtnKjQ3s7uZfnh+PyADIzVlyHbNb70tLS4Hmyh7GdtxFjD7U9UZlHEplY0/aSRkz2+2z/\n",
+       "y3j/HNebPR+9RzuzrpUle044yxVkbyQy2HvWjqn3JnTujErWfZZxbpRHqlAoFAqFQmGDKK69QqFQ\n",
+       "KBQKhXWQce1t2qu9Sy+9dBQYituRn5TZp+R/GwiIW5Q2oEKg/DzuPIIfCYbDrUyZfUrE8zmvG+yO\n",
+       "pnQ+lAILCwuDS93uYa6FrgZ3KNfj/uYnpfAp4+8AWBf/Qy8/93M/l7rJcYNef/31ETEp4Y+sLgeA\n",
+       "bpGdMvvAKau426+66qqB2iB7BcN4ovM3v/nNETEZE9zOgH5bdu7tgMb9+/cP1CZQhLjEhOkIrrnm\n",
+       "moiY0A/MSl9vsR5dRfsKCB0it93qDpr91V/91am2XfTTOmeuQynSvmaxS/3aa6+d6ifz25QY3Asq\n",
+       "HOgn/CrQ9CxXXHHF0L5TwJGL8aSfjBHz3MkXzEXGCFomu+r5neuvuuqq0V7BKxhc98z7D37wgxEx\n",
+       "2Vu89/h1QkZvQx/Q69zc3ED5gtxOADG11HXXXRcRYyokZPErn4zeyq8EI6b3iohxGQTPtXe84x0R\n",
+       "EXHZZZdNXe+wAvTL/EKPzOlt27aNXosxt5i3flXj127Iznxh3L2OkIX9oqV9YS5a98xF1qhLDQDu\n",
+       "Yeok9ML3kIF5dvXVV0fEoTFyCQD2Pfpx5ZVXRsRkz/W8dkFn+sl+4dfRfi37rne9a+hnW8S37Qfj\n",
+       "iyw8R10M1K9TTW/jV+ptwgG0LMwV1hh7S7ufR0zmC88LJ0ggO7+bxs2JFfPz84M8zK1sX2Ss0As6\n",
+       "z1Cv9gqFQqFQKBQ2iE0tyIk15yAzn6htJfR9nwamucDakYo3RkwsKE7oBAauF+A3Pz8/CkTNing6\n",
+       "QNmEyoATNNcRfM7J256bNujegcwOKnTRS/pHILCDR91/l0Fog2ptxa8H0zVkFCgGeuE+s7xxLlvh\n",
+       "ueMATxf7BMjm+WIvYet9s1fLwcDAHgTgoHt+x6uSFTZdWVkZBct6PF2Q0N5UzxfadnLBLFqeiEP6\n",
+       "9ppbbz64LIQD4Nu223vyO/Oh7avv7aD5tlRExHgssP6zJAzaQQaTY7fB5+jOBRWzhBD3AdibAkxL\n",
+       "gyxzc3Oj8QcusJmVYPBe5SD+9fa61dXVdJ7Tb6512Q/vcw74PVLCS/v3/fv3j/Yr68XlD+w9ss49\n",
+       "90xaPevNgMt08F3KPQAn4WSB4f49KwvRFvy0BzUj6QYO6Pfea9mc5JMRbrdtONEn2zccwO61l1HE\n",
+       "zCo2nD3/1+vfeiiPVKFQKBQKhcIGsWkeqfbU6dN/RrXRkvdysswsb6warN3s/TGgbTxSyGLy19Zr\n",
+       "ZovTqdAmnXWhQlsY9AlLxfE3s7wGfn8+q0hl2xbWC54ZEwEDk1zaG9BaO5bL3oqM8JJ7u/BcVtLC\n",
+       "VDktZRBAXq61Nev54lIDjpXIUvFnUaZklhQWt+emvZ0uXcCczIilTccQMdGN5cYS5SffoTxERqDL\n",
+       "9S5E6XW0srIy8qRlhKjZ/MhKVNhzndGWtPA4YRF7/F3s0mS0WZHZLAar9TIhN/11CQl7ARxbyL0z\n",
+       "75L1Sx+feOKJ0fibsoPvEK/jvcjlPawXg+va+9j7C6w7+osOM6Jgl5HxegDtvmsvb+YFdJHfjJaH\n",
+       "dthfkJl15DXdjoO9H96beFZ5f8uKQ7v4ruOdWj36LcB6FCieUy7BkxGo23NpKq32b455y2h87B2z\n",
+       "7FlZEOACsC3QoT2R6xFoG0f0SHVdd9CkT/0AACAASURBVGbXdR/tuu7urus+33XdJYc/v6rruq90\n",
+       "XXfn4X/f13zn8q7r7uu67t6u615zVFIUCoVCoVAo/BvEeh6plYj4ub7v7+q67tsi4jNd130kIvqI\n",
+       "+PW+73+9vbjrupdExI9GxEsi4oyI+Muu6y7o+350rFtYWBgVrHRGEDCJ465du9J4KhcQw2oxdQjA\n",
+       "QsGyuP/+++nLIQXodNwWMITY18W82j5GTE71Jmu17KYxwSJxITewd+/eUdaNs8kAv1Nok3s5RsDg\n",
+       "e+sRLrfy2bNoWUxPgHWHdZAVA8TCpfDgrKKIpkKgTVMWGI7Xoh1nbzpDBn0sLy+PvBemyKCftsSA\n",
+       "KQ3cTlbIbmFhYRRH5CKYWbxF5gXCW8Dn6Ad9zppfLk6YefUyQt2MtDYbM/clYmy9Q1rtuByAd9B0\n",
+       "G7OKGkZM9ODioczNdu/K4g4z6ih7pJgvnjeGZX3Oc54z8l7Yw+Z1b++OPRCODbIe0W+7r2QxUqZC\n",
+       "8T0d1+ixs1fI7TOPtmzZMvSbcTbZNvA9s33O1CnMG54Bnqtbt24dFZ41iTlwbCyFRbnOMvK5vWjo\n",
+       "o6WYQifIxzxm/O2pM3UQHjd7pgHzx5Q5Jhhv5WafIG6R/mZxz1nRZO9dmad6fn4+jdcD6Bi9ZB5Y\n",
+       "44geqb7vv9r3/V2H/78nIr4Qhw5IERGz8sV/MCI+1Pf9St/3D0bE/RHxqqOSpFAoFAqFQuHfGI46\n",
+       "Rqrrup0R8R0R8fGI+K6IeEvXdf8lIj4dET/f9/3XI+L0w38HX4nJwWv6xs2pMXsnCma9S3W8ETAh\n",
+       "Yha35Htx6sWycMl7Y35+fvS+eD2L0V6dLHbIlseRstk4MTvLJvOOuC1nTPie9irNIrnN3idntZks\n",
+       "C7rGQvWYesxc86O1bK07gLyWERmcKQRsqVt/rVWUyW2L2hkubst0LllWZJuJStt4bw1nz9g76jpB\n",
+       "wDQ+eLpm1RWyhZwR4jqmw8TRtuqd1ekM1FkWqDN2sgxCZ7l6rNxPxz06O6mV3d5ct5F52kycm8XI\n",
+       "AGRkHczNzaXE716/9iBksjj2JfOast8uLy+PdONrHX/lDEFjFlXYrOvbNe49N9tbPVZZViJt2ztC\n",
+       "u7OywrzWsrnljFD3wR4sj6nj2Nq3DNaZPVMZpZDftthrCjymXqtHok7i3nio3M/s+eJaeCCj9Vlc\n",
+       "XBxd6zVm4vWjjZE6qoPU4dd6fxARb+37fk/XdTdHxDWH//yOiLghIt6UfH3mDvaxj31sEPK0006L\n",
+       "5z//+UclcKFQKBQKhcIziUcffTT+6Z/+6aiuXZcipuu6hYj404j4877vf2PG33dGxJ/0ff+yrut+\n",
+       "KSKi7/vrD//tLyLiyr7vP6HvFEVMoVAoFAqFfzPIKGLWy9rrIuKOiLinPUR1Xfe85rIfiojPHf7/\n",
+       "H0fEj3Vdt9h13TkRcX5EfPJfI3ihUCgUCoXC/69Y79Xed0XEj0fEZ7uuu/PwZ78cEa/ruu4Vcei1\n",
+       "3QMR8eaIiL7v7+m67vci4p6IWI2In+kTl9cb3/jGUTyHq2nDWQa/UfsO1e9BzT/Ge9esPgS8P+a3\n",
+       "4p0x7RN3cNttt0XEhD9v+/btQ1ZEm7kVEfE7v/M7ETHm8XI9DD6Haw+uJd6301+yPLjPb/zGoTPt\n",
+       "JZdcMrxnJ5bHlcvhiHrDG94QEZM6Wa7Mzb3gIIJrCz0QG+NYgJtuumnEhWUORd5Rv/e9742ICUcU\n",
+       "uiauAlnoC7xf6AV9O4uj67qBO+snfuInpv7mTDDzIZo7K4tfgj8t40NbXFwc6ZC54hgwzwPahiPM\n",
+       "MUGu+WVutu3bt6d1ojxvgXnr0D1zC52TGYU+0QuyIctb3/rWUaYf+gCsUfPhZTFAN99889T1Hkv6\n",
+       "yhq45ZZbhn46I5Q5hvz0E64tV4l2zJy51gB7Vcu3yfibI9I1uej37bffHhETfkNnnNI2GaToxfsF\n",
+       "7Z5wwgkjTjG439AH65n+ck/2Reai9xPznXmut7ElrvNG2+ZORBbmFHqin+aUs+z0/wMf+EBETPPE\n",
+       "IYOrhbNfwLXKeLvSN7KwRzNfzCSBDHy/5XJlHOkf2efMHfN4Iqtr4XkfhYOObDfu7Xp7733ve0c8\n",
+       "roC2GWf6yXPUGaTOYmNMf/Inf3Kqj+izHSN04rbJfHQ8HnuXeQJZB+yLtAM3H2sUfbSZhrTN3OJa\n",
+       "dOhnL/1h78pwxINU3/f/N2Z7rf78CN95Z0S884h3LRQKhUKhUPgmwKZVNt+7d+9wCiSTKKu66iyE\n",
+       "1dXVwTp1tkHGf8fJ2JkSzi7gJJpF67cVX10VNeNx4ie8X7Y4M7iuivs6Nzc3ygTJqj7zOfoxX1XG\n",
+       "teYMI8amzU6xxZRlRgBnafATC8NWj3mtkOn000+faq/9G3PqzDPPjIhJPSHXhXJdIPd/vczDNqMk\n",
+       "q/PkOZfx1T3++OMRMZkfzJcs47D1hLoCvbMN7QVD1ixri7WGvrgXOjfaNetsLK8LxuLss8+OiMkY\n",
+       "0X/XNPMcdQ2kdoxc4d8ZXfZ6uRI+88ZeIMPVo2dl1qJrvMDI/dhjj0XEeK9Ch+gLmbLsP9cya7M9\n",
+       "vVfQJllYzlbO6k4B1l6WQUZ9OuoN7dixY5ABj4Nl8d5Ff61Le2T90y89CBBeWloa5iv3chaaueTc\n",
+       "pmXxc4f2Mv7Ehx56aPjswgsvjIiJt4Z6WsB117hHxvvp/RY9Z3ta+ze+C4uG939noSJTVkeM6/0m\n",
+       "Z1bmprMLW29uxHjvymqgoT/XBrPXvK3bZh2iO9a559Z6NeyGex7VVYVCoVAoFAqFETbNIzU/Px/P\n",
+       "fe5zI+JQ+YOIiC996UsRMea3o/LpAw88EBGHLJpXvvKVETHN+RYxOUm67gn3cB0RTrNYPZzQsZpt\n",
+       "qXF6Pv3004eTLtXQsagst2tTveAFL4iIsaXGCRurB8ttVl2QiEMeC6rgYs3T3zPOmC7fhVVMf7kX\n",
+       "FoYtTPSA5epKuK3es9pMs2rrRExqHTEW6JrfbZFgYX7xi1+MiIkn48UvfvHo/siFl2Pnzp0RMRkL\n",
+       "65x7oR/G9OGHH57qt4GsjO0pp5wyiuFxHBL9xBPrGmdY9cjM+kCPfB8g88LCwuDl4BrrEL3Qn698\n",
+       "5StTstjyxgPB3GbesI7MXH/aaacNujW3YGZJU/IES53v0xfAvfF2sD98+ctfDsMxPbZy7b1gHbCO\n",
+       "6B+yuZ+AsbNnqp2L7Cl4FpHlwQcfnLo3YL543TPXHFNqfkjm39zc3KiyPfdmjhFvSD8dE0b/HH/F\n",
+       "fmAvAOuAvp533nnDdz1O7KHolj0lqzuFHhhDxor2LUvLE+r9354UdIcHhbmXcbkyB3lW0TdYLrzv\n",
+       "Pv7444Nu2ffPPffciIj45Cen87DQLbLSr4ceeigixnOXdpkXyMLabucL1953330RcSi1P2Kyh6IH\n",
+       "y+LaVo5jAnzOWLFGPZfbtlwNP6uTxt7E9fZQZt5U72mLi4sjzxvznPmbMResh/JIFQqFQqFQKGwQ\n",
+       "m+aR2rp162BZZDxPgBM6VvJjjz02eBh8ksby5vRuziRbO1ggGceWPRJYT7t27RpO7VzrmCfk5rTO\n",
+       "qRevgU/1GQcXJ2/H6xw4cGDor9/tOubF79vdX9/TFaxtkbTIKhdnnEd47rIq3PYaEufAfGHM8Bq1\n",
+       "niD6iUVkndp6xRrC64WVbM4ly+6snq7rRjqkbawaewk8X/BAOYYss47avpobyuPkKvJYd7OY4iMm\n",
+       "esLDQL8feeSRmdfv2bNn5NVwrALgOtYwus8qPjMWeFOIZ5hVZR89uP98blnwXGaV/42sGrfjPlq5\n",
+       "mYvIOSu2q/3csSCu0g4YS8YGS71lXQB8l++wB+G9cTwNexX7i/fozLNDe60nxmOBN4x7cg/HKQHH\n",
+       "ovJ7Fr9y1llnRcQhvTBnnI0H2FPs7ci4FpGRdcGaRR9e4+ecc84wD/BiOdYPoHP2Na5jf7M3nb2H\n",
+       "PjJv6AM/IyY6Zi9FJn5msaMgi38FzF1XK/czsm0LXZkz1ePK/PdzYdb6b9t1RmXf9yMPI2D8+Q76\n",
+       "csxbhvJIFQqFQqFQKGwQm+aR2r59+3D64wSZZUpw2sUKiBjHDwDacC0b7pFxp5mxHmTZDHv27Bks\n",
+       "Ar+zdduuG2KrxW1zIvcp36fpr3/96yNvRWZRO17FtYr8nhk9YYFgFWE1tJad+ZeyjA5gfjMs14zv\n",
+       "C1nOP//8iJiMlbM7W3kZR2LHsCyIcchkw7pDNus84/3r+37kYeJ3vFxYOZklTZwF495yp0XkHGQL\n",
+       "CwspozwgHof5gjcwq+HEvLA3mHlgT93Kysogr7kBvaaYQ8wD4jXQC1Y+4J7mOUO2dj05K9HeHVvU\n",
+       "/J170gesf4+3YwozvrO233gauDazdn0vxxpm3GzcBw/fwsLCKJ7GNamcMZ1xy6E3zyvrkTWKHtq3\n",
+       "Bh5/exjttbDXCNnpE/MHfXkfIHbw5JNPHu7VZvK1cNa21737SZ+QFe+Xs2bBjh07hucD8xSvruVm\n",
+       "TPwWwPyxAJ3zd9a0M+fa/iM38VrrvQXxs8rZe8DPIdc1nAU/kzKWFdYibbP3MHezfdEe3a1bt6Z7\n",
+       "L3I6NuxoufbKI1UoFAqFQqGwQazLtfeM3LS49gqFQqFQKPwbwoa49gqFQqFQKBQKOTYtRurSSy8d\n",
+       "ZXWQ3dJyZ0VMOKjaeARXjTZ3HlkHXOdsPri24CtyfBbvSnmnDtcO3Ezbtm0b3qNyL2SBIwguLD73\n",
+       "u3/eYcNBZN4v3iE7RgaupYsvvnj0bt8VyeFxgn/KsVGu3XLTTTdFRAz8ecT30Fcy5nh/f+ONN8bP\n",
+       "/uzPRkSMMsccEwS/FRx6WeYgMnE93FyOwWprhdHPt7/97VPyEl9DPAZjxfi/8Y1vjIhJnBbxbFxH\n",
+       "RhR6Yb547LuuGz5jPK+66qqIGPMTOvYB2c21Z45Gvkf7jOlJJ5001K8hJsy8f7RtLkFnq9E23Fzm\n",
+       "YiT+BFkYy6uuumpUmZ64LDKCzLXnGlZe/7feemtETNY/cRfEShJ7xH1uuummEeebY5n4CacY+4Wz\n",
+       "MV19nvF//etfP3XdrDo5rM93vvOdU20Rb+K4E8YIvjJnMaEfPm953Nq/g5WVlUEuZIHfzCwJjoFE\n",
+       "Fta/9wvmJGudMUWPbYwYbfMd9mjmreMz21iW9nrWHO0xZ7kO3TOm7Bftd9qq720/kZt7E2fkmFn0\n",
+       "yJ7kyv8ZB+Hll18+yuj0/oXc9BN4v2P+Iwt7F/F97NXsdchyww03DPegP6wZc9BaFlfbRwb6C0+o\n",
+       "x9R7Xdd1w7z9qZ/6qan+MY7mWrUsGVsHeqV95q4r/j/99NODXHCtXnPNNRExeVYx/sSUsVbh2sxQ\n",
+       "HqlCoVAoFAqFDWJTK5tzqnWlWleIxkJts1wyNmrXUcLDYP4qYNZuvsep2F4krOG1tbXhxGuOJLdt\n",
+       "Ky6ryeQsJ9rD4rAlu7S0NPSXfnKSdlaFMzqwWoCzGamvw/c+//nPR8Qkg6LN8sl4zYAzH7D2GBMq\n",
+       "eVO5njozwBWM0R/WYFtlnTbxMFFFmv7iBQFY/bRB2/TTc9HZjIzZSSedlNaRog3GxPMd0B/mnD16\n",
+       "tsjwDj3vec8b5kFWm82eBVeodtYeY8TfaZ/6Q7P0QpvMDXTo7CR7rsiEQn/O9EEPeN1Yw85EavuB\n",
+       "DOarzPjquM5eYMvC+mGusz7MpNC2Qb+oDwScQebMIWc1eYz8eas/jz9zy8wEyJhlVnru4TV0hqpr\n",
+       "Y+3Zs2foj/tpb5+51LIq+6wfsluzOkJtbTfmSlYnjP2ANpCZuesYYvYm9MfzgEy8WRnKzqpEHxlH\n",
+       "HLpEJvZ/y8K+iSeKtwXmPYwYZ62jDz73c5Rx9PPPbzIAMvjZRl/a55G9uWZdWI9T0vMq40N1Dckt\n",
+       "W7aMnrk855iT7G9Uhz9alEeqUCgUCoVCYYPYNI/UqaeeOlhJ9957b0REvOhFL4qIvGYJXoalpaXh\n",
+       "Ha9Pr1jknLA5cbqCudu2JwprwWzhnMj37NkTd999d0RMTv62dvguljSneKwa83hxPbK8/OUvj4hx\n",
+       "DBZ46qmnBvmxTlxZFnBqp2YPMriys/sJN9M//MM/RMSkMu6s+iCuHWLeQ4D15qrQ8LpltUf4HFmJ\n",
+       "B2q9I4wvnqi77rorIiY1h+wdYWzo7yc+8YmImFhJrtbtmAvqyOzevXvkkULX9NM1yzx38W7QP+Ys\n",
+       "3kbLQr+ffPLJ+Pu///up/p933nlT1zKHkIWaXFivni9YqqwDdM+atUfi0UcfHf7mNeXxtBcHK5DP\n",
+       "bR3Tf+bspz71qYiY6K/lN3PcneMNXf+KNQfnGGMEr6PXKO0iK94RvE3tOmLNMU+Zk3CtGY5vBMjk\n",
+       "uWtPTltvKav2zFi89KUvnZLJ3m48OfaOI0NW0wi93HfffWlFdtpARjyrHjuAPvAaoD+45dw+7Zx4\n",
+       "4onDnERey826YB0gG/PecxGvErp+4QtfGBF5rac9e/aMKtDbWwq4p72otGmvoeM/P/OZz0TEuA5b\n",
+       "xGScmc/IhOfNnLX2vDKX7WkCrtfYjkHE9LPR8ZlcQ//81sgxop7bHn9zN7asFn6GfuELX4iIyV5t\n",
+       "PtS2OvyRUB6pQqFQKBQKhQ1i0zxSBw8eHLxKnH6dnQJcnfgb3/jG8JnfM3Pi9ntlx04ALEruzXtX\n",
+       "rF9bDZx22xgj3pNncjt+AM+C28bCsKeGuIRZHgyz1wPHSPBdZMCzxD19qr/zzjsjYmJpY8licbYW\n",
+       "huMpuAf9thfQcVrI7ng24Gq6tIvsracGCxRPA23itfAY0aZ5rOin9UJfGRv0vGvXrlHcEG0yRlhe\n",
+       "WOCGPU7OkMl4Ir/61a8O93rZy14WEWPvKDp1NegsLonfXREbC919XV5eHtYMOms9RS3wAqAX7oHX\n",
+       "0F4APDoPPfRQREwsVrxurTXtTGDaYj44zoh70t+MBw9gqeKFdnZw+z3uyfpln2DPsnVsyxwdey37\n",
+       "evMrLi8vj+YtcvPTmXKeLx4jV4D3GCFrW70947ez15/fzZEG8Ao4c9BrELTrxPPUexFjRH9pk/HN\n",
+       "+ELxYLCf2DvY3p9rkCVjqqB/zGH2ReaJ28Y7Rmwpc5D9pdU797LHiH3OsrDPMYbMLXt6ATIyd9HH\n",
+       "LK469ECb7F3ZePJMZ/0gmxkgAH1ztuOWLVtG44nu2EuZa8zzrBK6UR6pQqFQKBQKhQ1i0zxSrVfp\n",
+       "+c9/fkSMedIAp8L2fTendr9P5xTOKZe2sHp9IrUl4TorPlFjuSwuLg7xMZx8M56+jEPOstsqpL+c\n",
+       "wN3O2tracI+2nlHbH9+L/mCBcHp323gXOKFj5aCvWTx3fieNXiwLlrSz/fjcemH8HedjazNinIWC\n",
+       "RyrzdjIv+Iklko2dOR7tVZ0lNxYV8rquEMDT4gwi4PmFbIuLi4P3h+/Y88Ln5vFCFvoBsOKYg20m\n",
+       "1Kz2FxYWhn62MQkRYy+AsxMvuOCCiMh17ixOxog9oI3XymK91uO3pP/IlMVG2IpumeUjpmNHnJ3q\n",
+       "Nee9yJmUXOfYM4CezA+4bdu2kQ5bjtCIyTx2DTfLgsfC7XhM0cOsvc7XmluNuYTuMln4HC8C8D3b\n",
+       "GCNnPmbcaejDHGuWnXXBGs30APbv3z+MD/dg3tpT52eVYw399oV2aa/1vERMr1HWOZ9lWbqWxXu4\n",
+       "+TTdPjLyzHK9vfbe1kvGKZnVpuI67+n2Drexeta53w4x35G/lftIKIqYQqFQKBQKhXVQFDGFQqFQ\n",
+       "KBQKxxib9mrv6quvHlxwTgtt6UciJtQpbcl8Bzv+2q/9WkRM6CRwG7oAHS48KEKgE7CL20FzlJ+H\n",
+       "DuXEE08cXKm4GOkHbf/iL/5iREzcg7x2wJ1IP7keehPTEwCXt//5n//5QV6CJnGl4mo1FQpAH1yH\n",
+       "7imF/8M//MNT9yT40q8Cb7311pEO7fbmJ3JfccUVU587NZtxRhZK/rt4YvsaCzoBxh8dM/7cg/6a\n",
+       "ZoVXgNkr0euuuy4iIt7whjdMyc7rrC1btgz9Nl2NaSSYN8jGGF122WVT/WvT2SMmcxFaDsZodXV1\n",
+       "eLWHax1ZWBdQYWSvQein6YpMy4HOGQv6esUVVwzBscjN6zG+Sz+RBfgVGHqi7Te96U1TfWJs/bri\n",
+       "xhtvHNFDuDgorwluvvnmiJisf/YL1ijrg7nI/ILeBriI5NatW4d5y3giL207SBa9vO51r4uIcdAw\n",
+       "+jN1FhRUyNomY3j82bdayo6IyZziu+gFSiEC5WmP+U5/oc6BOoU+Pfnkk6PXaVCbIItpR9jDTG/F\n",
+       "fGnXWsRkTFnbyI5e9u/fP6w9lzNAL6x/EhpYawQ2cy/ahjoJ+LUSY8te95a3vGX0+hdZPM9Zcy76\n",
+       "y/cZM88X9EjihAOkr7322tGaQw9OPvFz1OUPnJyA7NC+uNAzMm3ZsmXYW5CF9esi2ugFiiDmi0Nf\n",
+       "GHf09IEPfCAiJs8LQN8OHDgwXPv+978/IibrmfAB+ofcHqMM5ZEqFAqFQqFQ2CA2tfwBFiunQFIQ\n",
+       "HcjGqZgT61NPPTUEhTko1IGXWDmcLB2gbHJi/o6F4qA8ZHv2s5893Bu5HRTLSbu10iImFoOpE+gn\n",
+       "Vo1L5rto2tra2qATLAHkd5Coi9ohe0adgWx8TnvcL0sLbvtnjxpw4KKDaR24zT3xXBAYfPbZZ0fE\n",
+       "9Jh6HBk/xsYlBugHHim+R3HQrFApsreUCNl4MhcZV5IUbGHRT9ph/pDm7OKQ9OXgwYMj74WDRx1s\n",
+       "TOE5ZHEgq1PqTYHiQPiDBw8O4+0iiL7Wn+PJwsL0GrWnCmtxVgB5Rq9C/5wkYavfpUa8vzim1POh\n",
+       "DWblb6xb99Nzy9Y//XTwMXBBQ2R+6KGHRp7HlrolYrIvkujjuci9HGxrImJAu/x99+7dI/JlwHgy\n",
+       "Jsxj5o33DZdq8F6U7TOrq6ujgrNeQ+iFv5sSJ0tOsn7sTW+v5zPGPyvdQ5v8Hb2Z5gZwT6fszyrg\n",
+       "7H3PSTkuOWCqJM/NrAiqr8vKQrTX0E+XZHE/XciTe7qAp98utfuN178L9SK/x2I9lEeqUCgUCoVC\n",
+       "YYPYNI/Uvn37RkSrWMkuWMjpsI1J8rt+X5udan3ydhEwvCFYLngD3P7y8vIoJigrc0Cb9rTYUrOH\n",
+       "xqS8s07TtsYyD5OL4NE/TuJuB6sPS8akyBlBccQ4vdtevSyGCivZxMIA2W39tpady1jQJl6hrOQE\n",
+       "euD76N6wpdrGVtlbYWoYk/jaSuT7eNywGh3vB9p0amLYuNZWGl4ee/+4zp4XW8Ws1YzMdHl5OSVX\n",
+       "trfDFBhY0PQv82CYYBrvUdtX5ESXljfLUs6Icu3ZNWlrGxsVMb0u7Ellf2NP8Ty3V4N7sx+aSJWx\n",
+       "Zr60hQ69V5gIG08Nc5ICvQY6R1bT+wCvyb179w7jm1E+oRdkygi0HTPIuGfrAr33fT98NytnA+wd\n",
+       "y7yeJrN30UzP3YMHD448L455BI4/Nem1dY7MLpY7y7PnuFKX+fH8d4yhCzp7H7W3iP2Iudvup3zX\n",
+       "ZNu+N0AvlsWxZG7fXvTFxcU0RtTzP5snGcojVSgUCoVCobBBbJpHKmKc7WbPk8Fp+JRTThlZIYDT\n",
+       "J224VH1W8p2TObK4uCTgvl/72teGtrMYBtMHYOXQD1sB/I5l6RL3tnbakzuWYkYn4kJzfo9uD4wt\n",
+       "EMd3tO/I/V7cbVgWW0P0Dz1mVDu2HpxxOKtfAI+LrUDaJIaK2Dja8RhldEZra2sjbwcyuNAi45YR\n",
+       "S5ucE5kdUwHm5+eH8WP+2qozXY8Lz3ldZIVasyJ4q6urg2eBtjM6CffPOrWnju8zb/CK2kPT9sOF\n",
+       "abNCss6owoJGpsxr6GzHWbEUjtd09pX3C9MPseaYu77eJOjM2eXl5ZTiBuBJ9XowGEsTStuD7XiU\n",
+       "7du3D/2xV89xKMjvuEPg7F6u91iD9i2Ds68ycl50jF7aDPEWjlfi+1nhxrm5ufTZ472Fv/sZ5DjF\n",
+       "7Pusi1myON7WcluH3ovsZfZeZ+ooP6va9l1oNCse6+tdDDR7vvB3x23NWqP0jzmK/DyLjrYgZ3mk\n",
+       "CoVCoVAoFDaITfVIcYq1t8An91k1LDgp+pTOSdt0E7Tptm2Rmuw0s9hWVlaGmIf1qA1o0+TLWWYN\n",
+       "9zT5pK2CtbW1oW1nbWWgLVsi1iMy4unAGsRaauMebP27zSzzIbMsbGkxRnyOBwsZ2zgGk8465i2L\n",
+       "7cqodrJ4H1MtLCwsjPrZUri0bWVzylY+HijumXmk+r4f5iJeDFuE9qQQ85LRMiALVqBrAnmezc3N\n",
+       "DTpjfExxAmzlEk+RxZn4nrTnjKm2DcubzUHTTziuwt4RfreH19e1f0OXJvHNvKPeu1hzs9Z/24eW\n",
+       "MsYeg8z7la1/2nR8SpbN5rW/bdu2EckwQFf2Ejo7D9iD75hU7xez+mk6GuA3GI7bdHyX6UdcC8l6\n",
+       "7Pt+FJ/FNfbU0g9+Ms+RzWvO8cCuq9WuI79xOdrnRBaDnF2fEUi3Y+T93rFSWeyYvWTZmuZ6e8QX\n",
+       "FhZSz5q9ZFkMdobySBUKhUKhUChsEMW1VygUCoVCobAOimuvUCgUCoVC4Rhj02Kk3vrWt47eeRvw\n",
+       "27zlLW+JiOl3xc5OMs8S70X5u98N33rrrREx4U7yO3Te0/L59ddfHxETHp/23bczmW677baImHDt\n",
+       "udq043iQBQ4i7unMQWSCg+iiiy4a4mb8jhegF3ToGAe/877hhhsiIufm8vvsO+64Y+BlcpwZPxln\n",
+       "+LjgtzLHlutt0U84qNo6ORHTXEtcC3cScSmuycLPD37wgxERcckll0TEJJ7A8QmMKTxRb3vb26au\n",
+       "Q5a9e/cO8RT0k/Fk3IhhcCwg85y56L/TF3QOlxt6jxjXZuJaxp/xJB7J9W/oDxxhF1100VQ76Bp9\n",
+       "EGN1++23R8QhbjbmCG0yN5ENvjr47Rx/wLpgjrEu4H0DfM/xjXfccccwz13TytfCEWd+M48/7ZiD\n",
+       "jHnEmLcxaNdee21ETPYK5M0ygeAIY245ptJ1cZCF+UL7yLJr165BHtYFOmSeIwvxSdyLvQu9uOab\n",
+       "Y0zgfbvyyisjYqLnAwcOjPZSrWEuGgAAHeZJREFUc+cxT5DbzwPmCzyhyO7YF/YLxpRnwMLCwtAW\n",
+       "44mu0At7kXkbHX/D+md+ITOZo/xOViC8gm9729tGMT6ugs+15qBz7Bvrg+vhoHSFe/rAfW655ZYR\n",
+       "1x5y009ibL0unGELzPvK3HXMEb9v27Zt2BfZc90WYFxpm+co/XHsKXpiXfAMMLvDtm3bhnFi3rIv\n",
+       "Ojaaec6+h14ylEeqUCgUCoVCYYPYNI/U6urqyCJxJgzAWsSSe/jhh4e6D8973vOmrnWmmK0583hx\n",
+       "8nSWVlYhPeMaihifrM2xx+/0B+8AQDbqpKznPWpP2FQ3pp/OZHGlaqweZDNc4wPLwlmR7d+cXeUx\n",
+       "cD/RB5WeL7jggpmym4vrwQcfjIhx7ZdWFrxc6AUeqp07d061jQXtrK0sOwn9YR22f88yo/CGMn/x\n",
+       "1GR1pNrsq4iJVeisvdYDiG5spQEsTmTBIj3zzDMjYpydxL25jv5eeOGFETHhaAMLCwsjDizXc3Hb\n",
+       "6APZmRfmQ3OdMmSZ1dfMW8HvzgjCijcHnb1pgDF11f5ZVZqz2l7MW2cn0g/676zUbL6gD37O2pvo\n",
+       "D9cwrugczkVgTjmvC2eY0kc8nWtra0M/XCWaNljX9qBYfvTF310x3WOKJ+Ib3/jGsL+x13rP9T18\n",
+       "b/fTnih4Vl/xildExOyMY7MrMO6upu3sXPT20EMPRcTYg2svqj2As3D33XdHxESXr371qyNiXF9r\n",
+       "1v4WkWc/ev9wrbBZcN0s+u22eI7gBc8YRAB6M3fr/Px8WhfQ2bfZ/p+hPFKFQqFQKBQKG8SmeaSe\n",
+       "9axnDZbWvffeGxERZ599dkRMexgiJl4FrJ2zzz578F7Yo2KeoawyK+DkjCxYLHiFbE23p9sHHngg\n",
+       "IiZVUW154WnBIjnjjDMiYmJx2sLAYuFkfv7550dExKOPPjqlBzA3NzfUD8K6NxM8wErhNE9b6MkV\n",
+       "nPk+MmGhnHfeeSPZXWvmSDx8bVuMJ94NLA6/22YsvvjFL0ZExIte9KKIiDjnnHMiYqLfiEn/sbTp\n",
+       "N9faIrWXBw4yPDieN65ThFV5//33j3i5Wg9qe21WuR+4kjN68XxBz9u3b48vf/nLETHx0Jo7j3Xx\n",
+       "j//4jxER8T3f8z1T92AeAXRK/1/72tdGxMSDhSXeykx/mVPo1nMRi5y1a743j5Hj0fBIsp5m1ddy\n",
+       "9fDMq2u+vxe+8IURMfFMMB8A+nI9Nsbs1FNPHclg7y5yZ54D12zLKn4jI3Oevu3evXu0Bhkvxv+l\n",
+       "L31pREzWM2PgtpHZNbts2TO//JYhYsyFRz+41pXNPXcZI1gHkJn91rySLaMAHmh0blnM1+a9x2uO\n",
+       "+UAVcdYFY+l1tHfv3mEtMa7IZK5F7sXebE+mPZh42Znb7DOzeGUZf9bQD/zAD0TEZN/nGQPQg2s4\n",
+       "sq9m/Km0jwyzmBYc44h+GAN7pF3Ti3tlPLGuacV8auP2gOOXaYszSMYTaZRHqlAoFAqFQmGD2DSP\n",
+       "1L59+war3+9AfcLE8sCK2rlz53D6xpoBruDKqZSTpasscx3Wgqsn2yPB9Vu3bh2sT3PjAXOP+ZTu\n",
+       "9/GOscJjg+fLPGF79+4dTuuc4rMTtOMnspgxQDtYclhD9Ln1BLpCbVvte1a/uB5LAY9UVh2Y6777\n",
+       "u787IiachZ///OcjYtoiNV8jnkvGwl49X8+9XD0d+N3//fffHxGHrEF79dADn2PlZFX5+d0eTq63\n",
+       "tdvOTTxuWKmtly5iYqUxnszv++67LyLGMRIveMELpmTB00VsiD01XdcN8tOWsw4tC/c499xzI2Ki\n",
+       "c3sN6CdzHI8kc7FdF+iK7zi+yl5jZ5rSL/Ri69hZrI7TavuKp8zZps6cA96baIu+eG3TV+ZoVkk+\n",
+       "YjIHv/M7vzMiJrFueCLs2benzp4G74tmWNiyZcswLhlfqTMpzRcK0DnrnnmCx99vDdDvjh07BnlZ\n",
+       "O9aNM+LMb+p4HMaUvYj9Ag+V9bJjx45hnjojzOuCfqNznknMWcvCGjdHK/1v9wDe9vg5gYfSOndm\n",
+       "Of10xhzwnsbf2Zvb671WkJt9w+vfHj2zMWRce/Yy+vnZ9g99eO/NOH2N8kgVCoVCoVAobBCb6pEy\n",
+       "RxveguydOifL+++/f8QcDziNY1Fw2uVU6/fSnF6xpMySnXGQHTx4cDjdOzYEcPI2n5Vr3ABbVrzX\n",
+       "5nO/Iz/55JMHa5QTtWPDLIuzGrOTt70L9JUxaL0pzs5zXIH7ye94JABxGq6z41owWKLcp42pM1M4\n",
+       "lhDxFY5j4npzCLqmEfDvzKtv//ZvH/Ub3SKfvZ2+nrmGB9IeHXskmBcHDhwYxinzApin0NaurTrP\n",
+       "/0ceeWRKdqPv+8Fq9Vz0GrIlST9Y/7Msx4jJvDBvYuuRzHi6zL0H7D1FL3gRvL+wnzj2albWnr1g\n",
+       "/I4XPdsvaIN7ZXPRHh1+Li4upnFptIknijVnzzT6QtfMG/rguCTvF33fD3uFvbrm2OQ7XJ9l+XJv\n",
+       "xijj8mtjpCyD15C/65gpy4LHAg8wc9bZzWBhYWH0GeOfeersJbdn0/10DNWsPZp+owf2UHs93U+u\n",
+       "N9+hY+RcG8uZu21fudaxXNnzg7b8/My4+bx/tNdncwUgE3sNeloP5ZEqFAqFQqFQ2CCKa69QKBQK\n",
+       "hUJhHRTXXqFQKBQKhcIxxqbFSF188cUjPjvex/JO+N3vfndETPhw2kwcv4OFOwdOId6nko1CjRre\n",
+       "Q8PjA6eQMwlcE4nrW24evkOcAXFWcArRNv3kvSvvsrkXPE7moOLevLfl3TGcVRdffPGIG8zxI+a3\n",
+       "4p7Iyvt6+guPE/0kvoOMGarsIuNtt9028Cw5C83xRozRZZddNnU9MQ/UzSLuAD0ii+M+iMnZs2fP\n",
+       "0E9zpxE/QAYkcwvuJHifXKsEvaBfOOvgieO9e5tpaO48dM41jD91cJAJvcDN5phAYgGor3TddddN\n",
+       "tf/UU08N2XiufwQXHpxi9I97u0YV4w+Pm6uTO6as5SB0fBZtIws6hMePOAx0jQysD/NhAfMFgt/6\n",
+       "rd8a8XhlFc3hWuN6x0Y4wwret5bfMGLM4Xjw4MFBJ17/zh7yfsHcoi2uQ4/E46EXOMjQd5vFR9wR\n",
+       "c9F8mMDVpZkvrFHH0JAVSuwI/GbmcpubmxvkRxb6aQ5S9hLHDJlr1TFVzrjzPtq22XK+RUz4Kr3n\n",
+       "OkuNPrifjCl7FfsKY4Aef+EXfmG4tzOn6T/rgvlivTiuB72wLpx5hkzEVP3mb/7mILfjF5lT3PP9\n",
+       "739/REy488zF6XhYuFmZX+YTZI0fOHBgtEcD2kY//N4+52a17XpR5lt1zOHc3NywBumn1zNrkuvY\n",
+       "m2655ZY4EsojVSgUCoVCobBBbJpH6oQTThjx22BpOJuFv3PCbqvwOguDk7Vr+HCqz7LTnL3j+jOg\n",
+       "zQbj1Jpx/3CtmaXpp9vOTuZ835kSi4uLo8xALAbX1zKcvWZvkqsyO6PSGYQReRVpW5rmPzMPFrVN\n",
+       "AHp0vS7Pi/ZvWIFY2syDLPPNbN/OtASuidVam86q4XfmiTO+snnurCasOuu85ZxjvLmX5TR3FDrl\n",
+       "pz0Vrk6O5cocnpWZiTVO5WZ7VgH9QEZ+4gXwGjVvHh4uZGwz6zwHgbOJADrmHpkHA2QxpfSpzWq0\n",
+       "p918ddaLM06Zq9kYOZuprfHjucLcor/OuvN8cZvMQT53Zq31Nj8/P6qflLXtzEfDWZ7mMDRaDwQ6\n",
+       "ZP17L7JOmYPr9ROZmIvIYj2urq6O3iigj2xvcdZdVrurvUfEpMYb7bfrgrXC84KsTT5vK/JH5Nmu\n",
+       "5l4EyGiGEHtLIybrG/nor984APrtzEj2pox9hHu2NQF9D2cKsm44Y2Rz0iiPVKFQKBQKhcIGsWke\n",
+       "qeXl5VElU+I8XMHZlt2jjz46fEa8icHJk9geTr0+YbpODCdpLA17dlqPBrWJqPqcVeTl9M71WC+8\n",
+       "jwV+T28PDVYhmJ+fH/RAtWe+C/cacKyLKxbbm0LtI3TOCd0esLZ/wO+ybQViURK/xjttdPsXf/EX\n",
+       "U9fbi3TPPfdMyfLyl798dG/6yzhS98UVvF01Gh43ONds9boOE/r5l3/5l5HF6JiYH/mRH4mIiI9+\n",
+       "9KMRMdYL13MPYoCQxZ5Pxmhubi4++clPRsQkFtBV1rHeiG15zWteExERn/rUp6ZkBMwT7knlZvRx\n",
+       "1llnjWR3DTbXoAG2brk3Y0RVcYCVyE/i9IjXaT0e9tK46r5rGrXei7af3MuVze1xYN6gp1aP9m7b\n",
+       "kvZ42lvkmEF76lhHzBdqfZ111lmjeW7vH5Wu/+7v/m6qv8B6NJ+fvQDM3dZr6O8As0zY0+zrWVf2\n",
+       "XOIltWeX3xcXF4c9Bt1m/WSOsm6ymlb2En7/939/RER8/OMfj4jxfrG2tjbIw7OE9Ww4lo49hjno\n",
+       "Zxd6Y05/9rOfjYiIF7/4xVN9i5jojjFhDjLfzW/rGFDXNsuYE+j/q1/96oiY8KO2b1MYCz7jHvbg\n",
+       "A3su7bnOvM+MNfvEgw8+OHpGO64MtgxzC66H8kgVCoVCoVAobBCb5pHaunXrcOLmdGsGZsCJE4/M\n",
+       "zp07B8va3hBOt5w8OVFSJdweKMApFg8GFodPsO2JnHtlvEz29mAdZVkqnObpJ/3GUplVZRvdwaGF\n",
+       "NZ/FSHCKR4ZZsU4RMcoGRB9c31o7tuqBLXHA+MJThxeIMXU76BnLDK8LFnirF8dyYHn5HT7gc8cn\n",
+       "MU9cyZvvo4fWUvPcYmzoFx6bz33ucxEx9uo4lgLgDbTsXDc/Pz+Kp7IObUG+8pWvjIiIv/mbv4mI\n",
+       "sXeU7/M566KNy2qxsrIyeNBok/HCgnQ/zQiArr2m0Stj5LnYepmyeELHtrRytzIiM/20F4i5bP43\n",
+       "1nbrPcX6N98YPzNPCuOKt4D+ZlW4aQ99nHLKKVNxpK3cZAC+6lWviojJXLRHAlkYU8d5eo6aT3DP\n",
+       "nj3DPc3LSH8YZ1es9tzy53gY8CJkXoaTTjpp8AI5Y85gbX3pS1+aatNvAVhH6B49fvrTn555fSsb\n",
+       "c4Q2PLeY5/ydMczi+9jb0C9zvq0uD/g/vKb8ZG+y3NzTeztjYQ+239zgiaL91kPO+JhthHt4/OkX\n",
+       "/fX+YNBXvteuecc8Mu68LWLN8abLHukMm3aQ2rt37zBIuPTbv7Ww63dhYWFY+CgXMEgMiqkt/MAA\n",
+       "bFIoku+7/TYIl6BaJp1dkgyaJx2bthcGsvtVoEsyzMLdd98dEZOAQzZ4wKT1QQPZvTHSDnrxq772\n",
+       "IeADk4PBDcYTPXzsYx+LiMli9GspB2mz0M4888yImH5dRxvcm/7aVQ14yHGPl7zkJREx2dQywmWT\n",
+       "Y8/NzY2u9WH1z/7szyJiojtvCP6cV8ZZ0D5junXr1njZy1421V8/SJEBXX34wx+OiImefAjkcx5W\n",
+       "9M0k1mDHjh1Tqc4R44BVYIJY2qQ/1gvXo3teY9MXjKSI/EGJ7nwwZs0xp5DVwbduB30x/rTb6oV9\n",
+       "DF16E/ehzqn37fi2sgH0zL7THsz9YPOhizVHf7zm0DWfmyDZ4087yPic5zxn6I8PjC5JwVy1boED\n",
+       "/5mTtJMdAr7+9a8PD8JZpNLtPV3Ggn54X6TfPPw/8pGPRMTkMGPaq5WVlWGc2UNp03OVOcS89oEi\n",
+       "o6dCpu/93u+duq7tK/s4hznWDM/DzPB2WQfr3tczFnZItAaGX6daH97nGBPmuYPVDSd5tc8yryHm\n",
+       "FM9y5GcMsgB/o17tFQqFQqFQKGwQRRFTKBQKhUKhsA6KIqZQKBQKhULhGGNTKWJ4b8p7Sd6N8g70\n",
+       "+uuvj4gxvcH8/PwQR0Ew5Ic+9KGImJTw512og4NNswL9AO9neZfOu1NkvOaaayJikqo/Nzc3ii9A\n",
+       "PqhtKLPPO1velTu24fd///en+pkV5rTsl1566egdLv3k81/5lV+Z0gufI4PjjmgbigjejfM99ML7\n",
+       "/Pe9731DyX8HkTIG6JC2oSvhHbcDm+n3tddeO/Sz7T9jxfv+r371q0NJAV9rGiL6C+UD40n/0J9T\n",
+       "1JElo0Jo5f7t3/7tKVnoJ/dGh8SGQG1gqh2nrBOHAC0D1Alt2y5Sx7XI4mB6ficoFLoKZLHOia1g\n",
+       "Xvzpn/5pRByiWkFurmVdoCNTeDhI2rEQUIpA42K9uK/XX3/9aPytH/rNGoUKx6n3rA9iPX73d383\n",
+       "IiY6z+b44uLiIDdzhX6ZnoM2oOWBIoY5Zz0yb66++uqp9kEbc4JOoJPx+GdB94y/ZWduE+dDO5al\n",
+       "LQrq2E7aZvyZz6ZnyuitZgUPt6Cv6HF5eXn4DvOV8YUihLnleC4SWpiLUMq8+c1vjohJELLLrXA9\n",
+       "6+6yyy4bxc6ia74D/ZjpzQC/I9sdd9wREZNnlwPAie/hPjfeeOPQT/TBvYmlY13/yZ/8SUSM91Hg\n",
+       "hIl3vOMdERFx+eWXT90Tmdv9lzVH2y6GafoZ9kU/0902v7One48mjnHLli1D27fddltETOaKE1mI\n",
+       "U+T8wNzNUB6pQqFQKBQKhQ1i0zxSa2trwwmUApxk2Dnzwdlqp59++nCydFo3p1NbAS7+CbCG7LHA\n",
+       "2nUGQXvC5h7IQFqu5cYKxrKkoKDT2V08EKuBwpwuVLq2tjbK/KOIn/sPnGGIZ4nMqbbtiHHRNGRu\n",
+       "M5BsUSCTs04Af3fBTqwAZ5BwLxcHBC4+GjHRHbKRueF0VvrnQnuUZnDBV1tN4PTTTx+8OsDUHswD\n",
+       "MsKyNF57AR5++OGIyDNUIyZWOpYlcyZrG/1ceOGFETHO2rLHhbmNF5C1CtosHuRGR1lKOLrH60OW\n",
+       "qte/vcTolfXQFs3zfHXquLP2TIXEGPG5M6U8p50m384Lr3/2CbwAHiPrnAwpU0UZptrat29fmhHI\n",
+       "NYwnxSHvuuuumbKga/TmMgruK7Kurq4ONCQuauhsTpO5u3SFiwkzD0jhZ96AliaI7zgDGphAGO8F\n",
+       "+4VLd5hQGi86RYG9LtpsXnSPLFnbgPnCmvPeZQ83MpHB7fIqbT8pyEoG7F//9V9PXecyH36b4j3a\n",
+       "zxt+R+Z272I92DPr54fh613mAFiP7dsU79umbeL5xl7uMcpQHqlCoVAoFAqFDWLTPFLbt28fvVfm\n",
+       "BO46Uj6hdl03WCOul+L6HyZ8zE67XIdFgrVrS512FxYWhrY4xdryNrUNtZ0cCwA4WaOPtuBixLhu\n",
+       "xvz8/IhOgOKW7if9wnoxPYc9e7bouY7PW2/IrOKM7Xcz0lpkpnYXsts75vbwSGHpPvXUUwNFimt2\n",
+       "AbxAruXjujguROd6TI6laa2frJCgC3Py0541e/YsS1aoc2lpaUQmmtXLQef2zNqKc+FN5t655547\n",
+       "df3f/u3fDn11PBWW93rkvBkxOMAqdh9NpDyrLRfotSx4Wkw/wk/r0X1wHaqWIgS5ucaeNVvSru1m\n",
+       "MtqMWNZ7Xd/3KV2V4+5YQ+sVKkYPeFy8Rh07s7q6OrTpvch1fjwfsvpzfM5axaPnvQsZlpeXB13z\n",
+       "Ha9/kzkzXygoSa06t809GW/Wxaw3AS4S63EDJmd3XTbrhc8Zd37aAxYx8SghHzXqoJOhEClwjSfX\n",
+       "hrMsGQ0YXtW2rqHjFb0XZV5fe2azmDnaRZ/tOrOcjo3i78idFaw2yiNVKBQKhUKhsEFsmkdq9+7d\n",
+       "o2wtV6E2sNieeOKJKc9QC1tBWAyZp4I2XT2dU7ItjDbmgjgDrFefpE3wiYWOd8wxVbae+Z3MRFvT\n",
+       "7f3w6nzmM5+JiLH1yrXoC53z7trWEf028eOsCsH0z++4M527+rwpEdarbI1XBW9h21f6acsDSzSr\n",
+       "Du2sxAsuuCAixpaXvYR8/8knn0zv6Tawdhw7gr5MXowsBjLPzc0N2VRY6ZnczCl0/olPfCIixvoC\n",
+       "XI8ngnuS1QROPPHEYVzpF2Nh0mnHUDGOGTkzfaE99gm+18rexse0v2c0RvYCcm/moL3MrqqM5c2c\n",
+       "bPcj00wRV4TcWRV22mZfYV147jrWBJx88smjtYc+WuL3iHGsJLCH1nuSx9Qej6WlpSFGxx4pU1/Z\n",
+       "i+oxok1TZUHSa68xY7S8vDzogXWRUX6Z+sYE08DxOXiX/+qv/ipmYe/evaNK7mbyACa5N9WLx9me\n",
+       "TWRlfrXeUce8EQNK/JX3C78FcTxbNqaOB53lXfRao+0so9RV502x5r2L8Xf888GDB0fPOXTIvXku\n",
+       "osMiLS4UCoVCoVB4hrFpHqldu3YN72lNVugYKU6L/NyzZ8/oPTtwLI/rCdmD5WwNZ5Rk7+sXFhZG\n",
+       "XjFbgY6n4R6ZF4h7+Z60a49Xa3E5xsEWo/u/srISjzzyyODJsKVmriETjM6Ca6o4tgUwNq574rpi\n",
+       "lh0ZsczsqWqvtSVtDxJwrSPX5fJYOFOk9fRkMXLcm78z/ieeeOKQwdn23/EJWdwT1tLc3NzI+vJc\n",
+       "9Hqxl9QeJvrn2m6OMWmv9zjhgfGaow3XnLFHDuAdyfgSW+sYa9XxKM6IBY8//niceeaZoxgye2SA\n",
+       "9QFmcbNZV96bvObs9WFN402xLLb624w5W/XmyrNHIYvv89jhZcr2G2JW9+3bN9oH3E/08dhjj8UZ\n",
+       "Z5wxtJkRTqNzsjxZu/YatbyJs4jeW3ic7QWyXpgf3l/43NmMJ5xwwmgPyjwveCD5nJ9+hoFMT64R\n",
+       "2Pbbc4Zn0iOPPDIVD2YvEHM545UF/N116lrZHQvn2Kf19uBsPlkGc75u2bIlnbfMIa9jr7kMm+aR\n",
+       "cppo4fijxmDzQWmDwuag1sDmg9eMhc0DCQ2FjaG49gqFQqFQKBTWQXHtFQqFQqFQKBxj1EGqUCgU\n",
+       "CoVCYYPYlFd7hUKhUCgUCt8MKI9UoVAoFAqFwgZRB6lCoVAoFAqFDeK4H6S6rntt13X3dl13X9d1\n",
+       "lx3v+3+rouu6B7uu+2zXdXd2XffJw589q+u6j3Rd96Wu6z7cdd3J67VTOHp0Xfdfu657vOu6zzWf\n",
+       "pTrvuu7yw+vi3q7rXrM5Un9zIRmDq7qu+8rhtXBn13Xf1/ytxuAYouu6M7uu+2jXdXd3Xff5rusu\n",
+       "Ofx5rYPjhCOMQa2DY4TjGiPVdd18RHwxIv5TRDwaEZ+KiNf1ff+F4ybEtyi6rnsgIv5d3/f/0nz2\n",
+       "7oh4ou/7dx8+1H573/e/tGlCfpOh67rvjog9EfHf+r5/2eHPZuq867qXRMT/jIh/HxFnRMRfRsQF\n",
+       "fd/P5m4pHBWSMbgyInb3ff/rurbG4Bij67rTIuK0vu/v6rru2yLiMxHxnyPijVHr4LjgCGPwI1Hr\n",
+       "4JjgeHukXhUR9/d9/2Df9ysR8b8i4gePswzfynANjB+IiA8e/v8H49DiKhwj9H3/NxGxSx9nOv/B\n",
+       "iPhQ3/crfd8/GBH3x6H1UvhXIBmDiPFaiKgxOObo+/6rfd/fdfj/eyLiC3Ho4Vzr4DjhCGMQUevg\n",
+       "mOB4H6TOiIi2lPBXYjKghWcWfUT8Zdd1n+667qcOf/bcvu8fP/z/xyPiubO/WjiGyHR+ehxaD6DW\n",
+       "xjOLt3Rd9w9d193RvFaqMXgG0XXdzoj4joj4RNQ62BQ0Y/Dxwx/VOjgGON4Hqaq1sHn4rr7vvyMi\n",
+       "vi8iLj78ymNAf+gdb43PccRR6LzG45nBzRFxTkS8IiIei4gbjnBtjcExwOFXSv87It7a9/3u9m+1\n",
+       "Do4PDo/BH8ShMdgTtQ6OGY73QerRiDiz+f3MmD75Fp4h9H3/2OGf/xwRfxiHXLWPH35/Hl3XPS8i\n",
+       "vrZ5En7LINO518bzD39WOMbo+/5r/WFExPtj8tqixuAZQNd1C3HoEPXf+77/o8Mf1zo4jmjG4HcY\n",
+       "g1oHxw7H+yD16Yg4v+u6nV3XLUbEj0bEHx9nGb7l0HXdUtd1Jx7+//aIeE1EfC4O6f71hy97fUT8\n",
+       "0ewWCscQmc7/OCJ+rOu6xa7rzomI8yPik5sg3zc9Dj+4wQ/FobUQUWNwzNF1XRcRd0TEPX3f/0bz\n",
+       "p1oHxwnZGNQ6OHbYcjxv1vf9atd1PxsR/yci5iPijsrYOy54bkT84aH1FFsi4n/0ff/hrus+HRG/\n",
+       "13XdmyLiwTiUxVE4Rui67kMR8R8i4tld1z0SEb8SEdfHDJ33fX9P13W/FxH3RMRqRPxMX7QD/2rM\n",
+       "GIMrI+I/dl33ijj0uuKBiHhzRI3BM4Tviogfj4jPdl135+HPLo9aB8cTs8bglyPidbUOjg2KIqZQ\n",
+       "KBQKhUJhg6jK5oVCoVAoFAobRB2kCoVCoVAoFDaIOkgVCoVCoVAobBB1kCoUCoVCoVDYIOogVSgU\n",
+       "CoVCobBB1EGqUCgUCoVCYYOog1ShUCgUCoXCBlEHqUKhUCgUCoUN4v8BaJ+sNGqpC6EAAAAASUVO\n",
+       "RK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7ffb01f42290>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "filters = net.params['conv2'][0].data\n",
+    "vis_square(filters[:48].reshape(48**2, 5, 5))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The second layer output, `conv2` (rectified, only the first 36 of 256 channels)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 31,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlEAAAJNCAYAAAARaCA+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzs3VuMHdd97/l/SSSbZLN5b7LJ5k0kRTISJdMWJcuyjizL\n",
+       "ythGjNgxjBMYuSGTGQxwkMyDHWSceZjIQBBMBnCekpMXjwPDmOOZIIkNx4EhObElHkqmJFIiJd7v\n",
+       "tya7m5fmXbyIrHkQe+lXS13F6rXrtnd/P4Dhf3HXrqpdu3Z1af3X+q8ojmMDAADA+NxX9wEAAAC0\n",
+       "Ix6iAAAAAvAQBQAAEICHKAAAgAA8RAEAAATgIQoAACBA4Q9RURR9IYqivVEUHYii6H8revsAAABN\n",
+       "EBVZJyqKovvNbJ+ZPW9mA2b2ppl9PY7jPYXtBAAAoAGKbol6wswOxnF8NI7jW2b2/5rZlwveBwAA\n",
+       "QO0mFby9fjM7IcsnzeyTukIURZRIBwAAbSOO42isfy/6IarWB6Q1a9YkloeGhlx88eLF1Pf19PSM\n",
+       "uY19+/Yl1rty5UpLx7dixYrE8smTJ138/vvvp75v+fLlLj527Fjqevfff/+Y24uiMb/70s2fP9/F\n",
+       "/vmfNOnDS2/69Oku9tPL+r7bt2+n7mvu3Lljbu/ChQuJ9dK+wylTppjZB+dt0qRJNmPGjDHf09XV\n",
+       "lXjftGnTXKzXW9HnXD+Tmdm1a9cK3X670uulr68v8drly5ddvGDBAhfPmjUrsd57773n4v379+fa\n",
+       "78KFC108Z86cxGt79+7NtQ11330fJgX0d2xmduvWrVzbmDlzpou7u7tdfPr06cR6S5cudfGlS5dc\n",
+       "nHWPVP5vVI/3zp07ubaR9n4zs97eXhcPDg6mvm/27Nku9n/nSu/vek34v2Vd1vMyefLk1G3rPSnr\n",
+       "s+v9afHixYnXdu7cOeZ79Pv0v5u67ukTzV/8xV+Ymdm3v/3t1HWK7hP1pJm9EMfxF+4u/7mZ3Ynj\n",
+       "+K9lndIetPQPs1n2g0maefPmufjcuXMtH1Nd9Hst8wen58ss+R3oQ8VEEHLO/Rv0l770JRf/6Ec/\n",
+       "avmYfud3fsfF+h8FW7dubXnbWVatWuVi/WO3bdu2Qvej5/zxxx9PvKYPRyMjIy7WP5BmYf9x9IUv\n",
+       "fMHF+ofZzOzVV18d9/aqtG7dOheHPPAV/RDVRPrgpdeRWdjfFX3Q1msxL/+c8xBVjdHzHkVRaktU\n",
+       "0X2itprZg1EUrYiiaIqZ/baZ/aTgfQAAANSu0HReHMfvR1H0x2b2opndb2b/NyPzAABAJyq6T5TF\n",
+       "cfwzM/tZ0dsFAABoksIfouoUkqv2tXM/qKpo53u/w/P27durPpy25nca3r17d6Hb1879p06dKnTb\n",
+       "WW7cuOHioj9TGu0wbZbsb/av//qvLvb7uIQ4e/asi4u471QppB9UlnbqB6UDDIaHh1PXSxukMx5T\n",
+       "p051cUg/KLQHpn0BAAAIwEMUAABAgI5K5zWdXyfq6NGjtRxHiEWLFrlY67i8++67dRxO4y1ZsiSx\n",
+       "rDXBsvi1yfLQ7+PMmTOJ17773e+6+Lnnnhv3tkPl/bx56ZDutLIsfq0gTe8VnXLTUg1ZdYQmAq1J\n",
+       "pWkwv5RECE2JaWkGM7MdO3a4OG+pHq3r5pdnefTRR138q1/9Ktf2nn32WRf7pTLKLiOCZqAlCgAA\n",
+       "IAAPUQAAAAFI51WonUax+HTqCH8aiSb43Oc+5+L/+I//qGy/mh5Qfjor7xQVIfwUnrp69aqLW522\n",
+       "qAyf/exnXfzLX/4ydT1N1+iUPOrIkSOJZf295Z06JYtOC6KjD2/evNnyttuZTqOj11sRtNL39evX\n",
+       "E6+FzLah02atXr068Zp2TfD3lebll18e9zHg3oqYfSSEPwVRHrREAQAABOAhCgAAIAAPUQAAAAGi\n",
+       "kLxySzuMomp32CBV5nn1e+3EGb8/+clPJpZ37drl4rr6/uQ957/xG7/h4n/7t38r9ZhUWp+eOv3B\n",
+       "H/yBi7///e+P+/16zvXzmZlNmTLFxUVcE3mrXXc6/2+Glngo+p6Wp7SFz++nWESF+qr09fW5eHBw\n",
+       "0MX+Z+/Ee7rOfuFfR3X1Oxw971EUWRzHY550WqIAAAAC8BAFAAAQoJZ03mjzbxHDjjG2pqfznnzy\n",
+       "SRefOHHCxQMDA6nvCWnar1Lec66V6/20WhPLR4TQlEqZ6ZQqr3OtgK7XrG/u3LkuPn/+fKnHVAf/\n",
+       "t6ezGVy+fNnFRZQ7SEtv+bSauV/yo2mTyvsV7rXMh1Y512unU9N5msLTmRe0FEWdSOcBAACUhIco\n",
+       "AACAALWk8zZs2GBmZjt37ky8VlVV0rpo1Wqz4itXq7Q0hz9yRSvzVnktaAqgiSksHUmZ97oMSS3N\n",
+       "nDkzsazfx0SvhJ1H1ui8tPO3Zs2axPL+/ftz7Stroud2panlvBOi+/eJxx9/3MVvv/22i2/fvp1Y\n",
+       "T1NzFy9edHFWunflypUuPnv2bOK1IiY4LpKfplu2bJmLr1275mL//GWlKdPe0ynpvKaPeCWdBwAA\n",
+       "UBIeogAAAALwEAUAABBg0r1XKd7ChQvN7KOzrmuevBP5s8+X2ScqTVOq9zaxH5Sqqn+e369Dh8fr\n",
+       "d1XX96bHY9aMIfvz588f89/9GQG0T47G/n0nr6lTp44Zaz+2dlNEGYKRkREX+/2glPYJ1e/Q7xur\n",
+       "9G9Cd3d34rW8faLKLI2ydu1aF/t9erQMBv0bx9bEflDjRUsUAABAAB6iAAAAAtSSzhtNTXR6+s5H\n",
+       "ky7uRdNl999/f41H8gG/BEPedF6ZFcv9tM4oHUaedQyhMyXofrMq67eTvKnIrGtRU3NZqTO9/+Xt\n",
+       "ylBEtfGiU3haSmPfvn2Fbhvth5YoAACAADxEAQAABKglndeEET51uHLlSt2HgAbIO0Ft1kinquSt\n",
+       "Yu0rczRh3slJi04p6mirO3fupK7X9CrMKm8Xg6xrUe9rel4OHz4cfmA1yxp96U8ajomNligAAIAA\n",
+       "PEQBAAAE4CEKAAAgQFT08M977jCK4tEhsf6s3O2qp6cnsXz58uUx1/OHCZfZ50W/106Z8TsvrQxf\n",
+       "ZT+0djrnfvX8pvXXmzJlSmI5re9OE8659oEyS1bm3r9/v4tXrlyZWE+rhQ8NDZV0dMXz/2aUed61\n",
+       "rEQR1dWzfP7zn3fxiy++WOq+xqvKc44PjZ73KIosjuMxTzotUQAAAAF4iAIAAAhQSzqv0h1OUE1I\n",
+       "c1TpySefdPGuXbtcnJZaLUPR5/yzn/2si998883Ea01Lv5Xtvvs+/O89HUZf5XWulapXrVrl4qyq\n",
+       "2np8v//7v594bcuWLS7evHlzEYdYiaJTS7NmzUos6+TfWoU+9G+Vbn/OnDkuXrhwYWI9vW807ffV\n",
+       "lHSeptn1d1jVhO1VI50HAABQEh6iAAAAAtRSsRwf0MldL126VOORtJ81a9YklnUi0CpTeGX65S9/\n",
+       "6eLJkyfXeCT109SBP3KvKvp7Xbx4sYv9kXWPP/64i3/zN39zzPeYmf3iF78o+hAbxR8Bmlbpu+iJ\n",
+       "6L/yla8klvW3c+DAARe//vrrhe53Ishb4X4ioSUKAAAgAA9RAAAAAXiIAgAACECfqBrdunWr7kNo\n",
+       "vHnz5rm4t7fXxadPn06sV3S/ilZNnz49saxDtUNwrXxIq1hXSfti6TE89thjifWef/55F+vw+u9/\n",
+       "//uJ9d56662iD7FR/GHvrV7Dy5YtSyxrP1I9z2+//XZivWPHjrW0XyALLVEAAAABeIgCAAAIQDqv\n",
+       "Ru+9917dh9B4fX19Y/5709J3vo0bNyaWN23aVNOR1E/PxdatW4O2MXfu3KIOJ5hOLDx16lQX+yUX\n",
+       "RkZGXPzGG2+4+Ec/+lGJR9c8169fL3R7x48fT33twoULhe4LyIuWKAAAgAA8RAEAAASoJZ03Onli\n",
+       "1ZMfo/3oCBx/RF6TaWXkiW7//v0uXr9+feK1nTt35tqGXgd1TXY6PDzs4h07drjYT+dpGuull14q\n",
+       "9BhWrlzp4sOHDxe67Ynm/vvvTyzrJNdljoZdtGhRYrmd7mv4KFqiAAAAAvAQBQAAEICHKAAAgAC1\n",
+       "9IkaHSqsQ4EnIq2yq/n58+fP13E4jaTXSF19YUJU2c+hv78/9bWBgYHKjiON9mfK2wfKN23aNBdf\n",
+       "vny55WMKcebMGRfrZ3r44YcT682YMcPFN27cKPQYtMwCWrNq1arEsvbdK5pWW+f+3lloiQIAAAjA\n",
+       "QxQAAECAWtJ5/tDSiUorlmsKAB+6cuVK3YcQZPLkyYnlModMa8pz0qTOnIRgwYIFLq4rnac0Tadp\n",
+       "eTOz27dvV7Lfdqb3uyp/4/Pnz3dx3lkP/BRqSHX0rGrraG+0RAEAAATgIQoAACBALW3/Z8+erWO3\n",
+       "jdPT0+PiTmmmxwfKTN/5rl27Vtm+6tLkz3jz5s3E8tDQkIv1Nx6ahpw3b56LT506FbSNpglJ4c2Z\n",
+       "Myd1G3l/b1evXnVx3gngmVlj/Lq6ulzc6X/baIkCAAAIwEMUAABAAB6iAAAAAnTmeOg2sXTpUhcP\n",
+       "Dg66uF2H9aM8GzduTCxv3bq1piOpR5Nnuj948GBiWctbzJ0718WhfaL+03/6Ty7+8Y9/HLSNTlDE\n",
+       "DBd5+0Gp6dOnJ5bzlkaYyLQfmZY0KrP8R11oiQIAAAjAQxQAAEAA0nk10qZlTeehOH51fG1mvnPn\n",
+       "TtWHE2zq1KmFbs+vbN5Okzs3jZ9m0vRPyHldvnx5Yrm3tzfswCaQdevWudj/rWzfvn3c21u5cqWL\n",
+       "Q8tK6KTZ+p36Fc878d7vl/3oZLREAQAABOAhCgAAIADpvBrt37+/7kPoeO02GkQn2h0eHnbx5s2b\n",
+       "C92Pn2bq7u52sVZ1xviFTFCrvvrVryaWixiVpqIocnGV1bg1zakV6PXaM0umL48ePZq6vdWrV7v4\n",
+       "0qVLLt67d28rh2lmZjNnznTx4cOHg7ah3TWKOCY0Ey1RAAAAAXiIAgAACMBDFAAAQIBa+kSN5uTL\n",
+       "zsdr5WCNmzIjPLODw6f9oKq0YsUKF+/atauWY2jV/PnzE8s61F37FRXR5+u++z78788ihnMvWbLE\n",
+       "xbNnz0689s///M+5tqGfX+93frX3Mu87PT09LvY/x7lz58Z8j/99XL9+fcz1/LIcfqX4VmWVSQDS\n",
+       "0BIFAAAQgIcoAACAALWk86pKY2nT8kQetq2ToJ4/f77GI0FTtWsKT33ta19LLB85csTFRZeIKLra\n",
+       "/fr16118/PjxxGv+chqdGNev1J+Hlj4wy3+fnjdv3pjv8e81mo7TMgZnzpxJrJdWlqTsqvpaXmTT\n",
+       "pk3jfr+fvkwrdaHnyyxZ4iHvd43moCUKAAAgAA9RAAAAATqqYrmmrcySTdo3btxoefvLli1zcRHN\n",
+       "rppuvHz5csvbS5M22iXUY4895mKtymtmtnv37kL3BeTlp6MGBgZcXGU6P6QiuKbzfvrTnwbtt6ur\n",
+       "y8U6Oi/v7z+0m4XuK2syXa1YrhXGmyIkhaf8e2Eaf5Ri2qjFvDQ16n/XZf5dwQdoiQIAAAjAQxQA\n",
+       "AEAAHqIAAAACtH2fqGnTprm4r68v8ZrOxL18+XIXv/POO4n10ioOax8oM7MpU6aM+/h02Ks/LFor\n",
+       "DGufrSIqIKu8Fdr1fC1evDjx2qxZs8Z8bcuWLS0eXTM9/PDDLj527JiLr1y5UsfhBNN+gWlDx9uN\n",
+       "XovqxRdfTCwfPny4isP5iDVr1rh43759ud6zc+dOF+/duzdov1p1XssBaLX2MmT1g1JNmSmiLEX0\n",
+       "u82rv7/fxatWrXKx/zdGr6u0kgt10vIOdZUh0j6M/t+9PGiJAgAACMBDFAAAQICo6klwoyiKmXgX\n",
+       "AAC0gyiKLI7jaKzXaIkCAAAIwEMUAABAgFpG561bty7x/6O04rhWcT179mxiPR2BoKPfPvaxjyXW\n",
+       "e+WVV1wcMopMK3Obmc2ZM8fFWn1XRwiame3YscPFOkLIH3WnVYo11hELZsmJO7XKuT8qRkfhaNVj\n",
+       "v5IzyqFp6rrOuV6XZp05IkoncNV7gz96Vn8rOjJRJ+r16QjVKqtq6288dKJdvQ/dunWr5e2l8btj\n",
+       "pF3rWsncPyatrp53VJuOSDNLjsbetm1brm0UTY9paGgo8VqR5z3vOS+b3l8effRRFy9dujSx3vDw\n",
+       "sIt1dPPRo0fLOzgLmy0gS55t0BIFAAAQgIcoAACAADxEAQAABKilT9Ro36Ks/hva12HPnj2J9TS3\n",
+       "rtWL/Tzxrl27WjrO/fv3J5YfeeSRMY/Pn4X7xIkTubav1WRPnz7tYj+/PGPGjDFf0383q7ZiLrJl\n",
+       "9QcpU5V9oKqsNrx69ep77suvyH7+/Plx76foflBaRfy++5L/zar9JbU/qPYnGY/33nvPxQsXLnSx\n",
+       "31enaFrlWSv6Z1X3z3uv0r5izz33XOI1rcZfZZ8o3a/2zy2671kT6f1F++ROnTo1sZ7+RqusRF5H\n",
+       "+SRaogAAAALwEAUAABCglnTeaJOgP+Rf03HaTOpPqqipkYGBARfrhL5mZpcvX27pOP336/Hu3r17\n",
+       "zH8PpSlBv2SCNovrsO3e3t7EemkTs6J6VaXv6lRlM/3BgwfvuY6WIDH7aJq9DjqRtd99QdN2oSm8\n",
+       "NJrCKzu1fOrUqUK3pzQFqilKM7MjR46Utl+9B2ua1CyZNtZuGBONpvOy/lbq9eaXISnib2fdaIkC\n",
+       "AAAIwEMUAABAgFrSeaPNsP5oFa3+qmkrP02nzfQ6UqLsKq4HDhxwcZnNkDpSxyx5nrTZVD+72UfT\n",
+       "BUCrdEYAs+SsADojgE9H7mlqxJ99oEhaBdssmd7OqlJeJk1f+OnGrNFrRaoytVxE5XWlo74uXLiQ\n",
+       "eM3v9lCkIo59+fLlLtaq3Z3i+vXrY8YTDS1RAAAAAXiIAgAACFBLOm80JfXuu+8m/l2bUDW156cU\n",
+       "dLRJ1gSkRSszJaBF+VatWpV4TQsAagrg+PHjifX8gmeoj/9dlNncvWjRIhfnHS3k/6Y0VaJp4eef\n",
+       "fz6x3j/90z/l2r6mkKocxac0fabnv8qitGfOnHGx/3uti14vOsKqiEKFy5Ytc7EWEw2lXTn8FFta\n",
+       "oeUiPkcRKdBOTOHho2iJAgAACMBDFAAAQAAeogAAAALU0idqlD9hqJYQ0OH7WRVjdfi/X9m8nehk\n",
+       "wn75BK0+rBNA+iUOdFg56uVXiS6zT1RI1WR/uLjSayxvHyhfmSVA/Im3R/mVs3W4vQ6HX7BgQWK9\n",
+       "vBOG56WlFrRfUNY5r1LRVbb1PlT0JLx6HfmlbrRPlH7XE2G2ADQHLVEAAAABeIgCAAAIUGs6z6dD\n",
+       "j3fs2JG6XloKr52rpmoKRSdVNksOldVz5Jd0SEtzoHqtTn49EWmara+vL/GalkPJW+lbU0v6fZT9\n",
+       "3eh+y5ycN4um9ssuMaHdK/KWcdAZKbK+D72n+ff3kZERF+vkxCdPnsx1DE0RUp4h7/lD+WiJAgAA\n",
+       "CMBDFAAAQIBa0nmjTZF+M3Pe0XVp62ll73ajlY39UTxpzbX+en6lc9THnwxaRxmFjGDyJ1vViuNF\n",
+       "j7bKsmTJEhcXkTbRkWzDw8Njxu3GH3VcBx2t1kSafsv6O6ApuzfffDOxnk7wu27dOhe3WzpP09g6\n",
+       "EjvL5z73ORfrKEX/d6Pb0/uEf8/Q0cQTbXRjqyP8aYkCAAAIwEMUAABAAB6iAAAAAtSSOB8dpt/O\n",
+       "FcaLFjJM9dy5c4nliZbLbjItWVEEv2q/v1yVkP4m2ufD77OhJTvQmnnz5rlY+3lUKe/Qey3bkvV3\n",
+       "IOtzPP744y7Wkg7//u//fs/jrIL295s7d66L/f5IIfeKRx55xMXLli1zsfYhM0vOAvLOO++42O+H\n",
+       "pn87mvB3RK9ls+R1oNeV9tE0S57LrPIic+bMcbF+T4ODg+M+VlqiAAAAAvAQBQAAEKCWdN7ocG+/\n",
+       "qZb03vj4za7tPCwc7ae3t9fFWqLD14TrciIM4V6xYoWLNdWf9d0UTScj9st8aKol73eQNbmxLuf9\n",
+       "jM8884yLN23alOs9ofQzahkCP50X0pVDt6Ep8d27dyfWO3jwoIv37t077v1USc+RP9m0lvPRyvX6\n",
+       "+cZDy5BoKjgELVEAAAABeIgCAAAIUGtZW9J3xWrnCZhRjvXr17t4z549Li6iqnaVaaJWdWIKz5+k\n",
+       "WUdmaYX8Kumo0ax0Xl56T/Mn5926dauLt2/fnmt7O3fuHPcxhNLuKkXPKqCfXdOBhw4dKnQ/RZs5\n",
+       "c2ZiWa9TrbLvjx7MW8k9L53dpNWZTmiJAgAACMBDFAAAQAAeogAAAAI0e6pvjEvTZ25H9arsA1I3\n",
+       "LblgluwHpUOk81q0aFFiueh+La3y+xhdvHjRxXVVtNfh9lnV6HVYud//Rc2ePdvFfhVrLeMwMDCQ\n",
+       "6/jOnz+fa70iRFHkYv1uirBjxw4X+33Fmsz/rrVvZrv26aUlCgAAIAAPUQAAAAHI/zSQVlc2yz88\n",
+       "uxOHcQN5+de/Tk4aks5rWvrO1+rQ7DplpfCUpv38yXW1mnldEy5nKfN+3IQUnp7zGTNmJF7TdLKW\n",
+       "MiqitErTNO/KAwAAaAM8RAEAAASYMOm8rq4uF2eNGsmiE3xqc3RW5WZtcs7blBnaDNzOzftAXjrq\n",
+       "SfkzIGglZ00L+evVNZKt6dLOc5V0FJpfoV1HI4dUQ8f46WhJjX066Xinfze0RAEAAATgIQoAACAA\n",
+       "D1EAAAABJkyfqNB+UEqrzmp/iyw6DLTs4Z1nz54tdftob1OnTnVxu1YHNksf3u3PEH/y5EkX62/P\n",
+       "71uzbNkyF+/bt6+IQ6yFlnRYuXKli/2+ktu3b8+1PT3PftmVIuUt6eL3rQnpA5r1OSgRc29aKkT7\n",
+       "Fvq/yU7vB6VoiQIAAAjAQxQAAECACZPOK4KWNXj//fdT19OhwVU2EdMcjSwhKTwtDWJWTFq8LP71\n",
+       "n5Y+HxwcTCxrimfOnDkuvnLlSmI9Tc038TxoqmXt2rUu9qtJa9mVbdu25dp2mfeWvNv203ch5WOm\n",
+       "TJni4rxV04s2bdq0xHK7ltigpM4HaIkCAAAIwEMUAABAANJ543Dz5s1c62kKRJuqO3HyxSr19PS4\n",
+       "OO/oSIyfpreamLZavHjxmP/uVyLPe73oRMNZafqm0/Tjnj17XOyPRhwaGqrsmFql6Tf/+9WK2XlH\n",
+       "JteVwlNLly5NLO/fv7+mI0ERaIkCAAAIwEMUAABAAB6iAAAAAkRp1X9L22EUVbvDGugQ1rqGr+r3\n",
+       "2oTZ2CeCJp7zkGHgOpTf74dStOnTp7v44x//uIu1T4+Z2fnz58d8f9Y5176JWX275s6de8/9tDOt\n",
+       "VG/WerV6/29G2rXuV5BnSPwH/D5qfsmNseQ95yjW6HmPosjiOB7zpNMSBQAAEICHKAAAgACk80rQ\n",
+       "hKH4TUwtdTrO+b2tWLEisXz06NGWthdyzhctWpRY1pIOWu6AGQA+pKlRPx3Yrte6pq3N8qeu9Rpu\n",
+       "9frNi3RePUjnAQAAlISHKAAAgABULC+BTvhJZW1kWbBgQWJ59erVLt67d6+LO2XUWBHpD39C3fHS\n",
+       "lB3SacqoiZXrVUgXilmzZiWWR0ZGcr2vu7s7/4Gh49ESBQAAEICHKAAAgAA8RAEAAASgT1SG0Eq/\n",
+       "VZeNQPsaHh7OXMZHzZ8/P9e/37x508Uh1bL9itu6vVarfjfRwoULE8tDQ0Mtbc8vJVFmX7Qq+54e\n",
+       "O3Zs3O+ZN2+ei8+dO1fk4RRO+2m+//77idc6pW9mkWiJAgAACMBDFAAAQADSeRlCm+zzTChZBobe\n",
+       "IkvIxNg6Oa9ZWHO+bmPZsmWp62nqYOnSpYnXfvWrX7n4+PHjY77fH4avaQkdrn/x4sV7HPEH/IrW\n",
+       "Wqlay09kva/sCZyL1Gr6zqeV4Jso73Xgu3Llyrjf09vb6+ImpvOmTJniYv29+l1adL26/s41TfBD\n",
+       "VBRFR83skpndNrNbcRw/EUXRXDP7/8xsuZkdNbP/HMfxhQKOEwAAoFFaSefFZvZsHMcfj+P4ibv/\n",
+       "9i0z+3kcx2vM7D/uLgMAAHSc4AmIoyg6YmYb4zg+J/+218w+E8fxUBRFfWb2chzH67z3MXStJA89\n",
+       "9JCLd+3a5WImq6xG0RMQ+03pKm+qWZvfdXRZFh3ldvbs2dT1NH28fv36xGuHDh3KtY1WMelz9fJO\n",
+       "hhs6urnpNE1c1Wjasicg1tSrVn/XiafNzO6//34Xa4X3q1evFno8TVH2BMSxmf17FEVboyj6n+/+\n",
+       "28I4jkcT60NmtnDstwIAALS3VjqWfzqO49NRFPWa2c/vtkI5cRzHtDoBAIBOFfwQFcfx6bv/fyaK\n",
+       "oh+Z2RNmNhRFUV8cx4NRFC0yMyoHAgCAtvPCCy/cc52gPlFRFE03s/vjOL4cRVG3mb1kZt82s+fN\n",
+       "7Fwcx38dRdG3zGx2HMff8t5L61RJPvaxj7l4+/btLqavyIf0HJ08edLFfk5f+w/pb8T/vWj/Aa2K\n",
+       "rRWKzcw++clPjrkvv5/IG2+8kf0BYH19fS7WKthc59UI7Z8T0j+vCfz+fnrsR44ccfGtW7dKO4as\n",
+       "cz5nzhwX+5X5b9++XdoxFa2J10eePlGhLVELzexHd7/ISWb2/8Rx/FIURVvN7B+jKPoju1viIHD7\n",
+       "AAAAjRb0EBXH8REz2zDGv5+3D1qjAAAAOhoVyzvI4cOH6z6ExtGmbjOzJ5980sWaVjt69GhiPa3G\n",
+       "q5W0/ebyxYsXj7lff/Lan/3sZ/kOuGGeeuqpxPJrr71W05F8iErJ7akJKZq8VftXr17tYr9q/7Zt\n",
+       "21ysZRz8Cu3Xrl0LPs57WblypYv1M4VWYW+Cdqrur5g7DwAAIAAPUQAAAAFI53WQy5cvj/s9mu7S\n",
+       "ZuFTp04FHcP06dNdXGZzdl7+6Lf9+/e7WEfT6Eg9M7MLFz6c8rG/v9/F/mgXfxTeKD89mGb27Nmp\n",
+       "+y2CjuIJGYlbdvquadcLWqMVrUNHhuk1q/ekIq6PvBNv6+/SH7mrxxE640ertOtGu0547dNuE+2E\n",
+       "ligAAIAAPEQBAAAE4CEKAAAgQK19onp7exPLmq++cuVK1YdjZslceFb/FB3arnl7s+QM9lXy+9eM\n",
+       "mjFjhov986p9erQStF+lV3P/mnf3h/9q1VntV/X2228n1iu6708aLWlgZrZq1SoXa7+lhx9+OLFe\n",
+       "d3e3i1ddgqajAAAgAElEQVSsWOHi48ePJ9abP3/+mPv1Z7DX/lfah6GI86DlFHTbWdv3z4vO1v7K\n",
+       "K6+k7mvdunUu1s/oz2av/WF06Lff7yGkXIFeY6jfsmXLXKzX4qRJyT8vBw4ccLHfz0jpfUz7R2X1\n",
+       "iXrooYdcvHv37nsc8Qf8/oz629H7pN+vUu+nIf1Qi1ZmPyi9D5olf8tV3cP943jggQdcrH3wzJLX\n",
+       "1cGDB8s/MKMlCgAAIAgPUQAAAAGCJiBuaYdRFNc1LBQAAGA8siYgpiUKAAAgAA9RAAAAAWoZnacj\n",
+       "LlCcJUuWuPjEiRMuLuJ86wi1vNW41d/8zd8kll988cUx46ItXLgwsTw0NNTS9rIqjGuauuxrXCvN\n",
+       "62fcu3dvrvf39PQklkNGGRVRnbpVVZ5z/e51tGXWSLNOkTUiTUd26jXhT8Kt6+koTf83tWjRIhfr\n",
+       "aLDTp08n1tN96UjRBQsWJNZbs2aNi/W78ids18nFdb/6XZslz4Xyu6no+3TkuZ4H/5jOnj3r4o9/\n",
+       "/OMu/pd/+ZfEe/gbWo08XY9oiQIAAAjAQxQAAEAAHqIAAAAC1FqxHMV65JFHxv0erbidVaU3pB+U\n",
+       "+sY3vpFY/spXvpLrfdOnT3dxyCzufh8ora6s/RT86rt+P4i09bRvktI+FVnby8uvDK/9KPbv3z/u\n",
+       "7fmzBYT0iaqrH1RdtNr9u+++W+ORVC9rBgm9Dm7evOli/X2ZJfvxaNV5v4q93nv09zUwMJBYT/sP\n",
+       "Ke2/aZa8x2nla7+/S9r17P829HMcOXIkdXvaT0v7W/lVwPXz62dM63uFZqElCgAAIAAPUQAAAAFI\n",
+       "53WQRx99dNzvSWsSL9uPf/zjXOtpCm/jxo0u3r59e2I9nQQ6KzXlTyA8yh92vHz5chcfO3YsdXsj\n",
+       "IyNj/nur6TufP5nrjRs3WtqeP7wbH+VPdKwpWk1bTXSantK0lU/TXVqWQycPNjM7f/68izW156f9\n",
+       "0pw5cyaxrL9fnUDbX08njtf7Sda9QdOc/r1Aj13vs/49V/elv3M9D0XbsGFDYtm/nyI/WqIAAAAC\n",
+       "8BAFAAAQgHReB9Fm4Xbyx3/8xy7+27/929T1tm7dmvqapvA0DZM37eKnxzQFELK9Iugoo7ypDBTH\n",
+       "H63lj8zsdJrGuu++9P/eTqvk7o/OS6sw/sADDyTW01Fpmn6fN29eYj39Xeq+7ty5k1hv8+bNLtZ0\n",
+       "mX+/9I93lD/qTo9Jj10ro5uZvfHGG2Nuz6f71c+k5yiLfjf+Z89L9+WPzC5S0TNIFOHTn/60i0Pu\n",
+       "s7REAQAABOAhCgAAIAAPUQAAAAHoE9VB/FnTi6T9I1odXu/77ne/6+LvfOc7ide0X8qf/dmf5dqe\n",
+       "9lvyyz6888474z6+kH5QoRXLtbqy5udPnjw57mMogpaVMDPbu3evi7OqWCsdEt7T05N4befOnbm2\n",
+       "of1pqqqU7u+nzL4iTZT3d679eBYsWOBiv9yB/ga0f5lfakD7Kmk/KL/Mh1YzP336dOrxaRXwTZs2\n",
+       "ubi/vz+xnv729Nj9PlE6W0BIH6Yseq/xS2yk0X5Qs2bNSrym17D+XusqaVBlH6i8f7NeffXVlvZD\n",
+       "SxQAAEAAHqIAAAACkM7rIGlppyJSIUWn8JSmSb75zW+mrrdkyRIX501vhaTvipA3fec3v2tKIOsz\n",
+       "asojbWh2Ed56663Ect4h1EuXLnWxVp3Pqv6epQmTHYcOH59I+vr6XOxPBKzfoZYk8WcY0N+O/h78\n",
+       "iuC6rPe+RYsWJdbTdF7eibs1lbZ48eLEazpMX++Lp06dSqyn6SRNc/r3hrRZDzR1ntcXvvCFxLKW\n",
+       "P/jhD3847u3ltXbt2sSyfqf+eSmTTlj/yCOPuPjXf/3XE+v95V/+ZWH7pCUKAAAgAA9RAAAAAUjn\n",
+       "dRBtqlbajO6PcKmqErY/ciVkpFNdI9SKoOkBrcjsp0nzfsYyU3gqNIV14sSJXOvNnTvXxVEUufjc\n",
+       "uXNB+y1T2uTV+JDea/yK4JrS0u93cHAwdRtaEdy/Z+iIPL2P+SNjNcWj6S3/+HSUoKYHe3t7E+vp\n",
+       "iEG9r128eDGxnr6m17lPR1WHprtHbdmyJbGsKa0y7du3r5L93It2HXj99dddXOb9hJYoAACAADxE\n",
+       "AQAABOAhCgAAIAB9ojrI6tWr77lOEX2gNmzY4OK8lW8nWrVnrX5slszJnz9/vuXtVzXretG0f4pZ\n",
+       "sv+L36cE7UEr0mvfP/83kFa53v/etT+dVhXXPlVmZkePHnWx9oXJGlKvZVL8a9GvnD5K+0CZJT+j\n",
+       "9r/SCupmyf6n+hv1j0/7ArbK71PVah+rdqPfjfZrO3jwYGn7pCUKAAAgAA9RAAAAAUjndZCsYbRF\n",
+       "0hSeDtc3yz8pbV00jaDDnUNTSX5KYNTZs2eDtpdXO6XwlKZd0Bk0HaVpZv9eoBW4NTXnVzbXa0Qn\n",
+       "KvZLEoRM+KvVwjUdmMW/ZrUkgX52//PqxMV63/FL0aTde9LuLUgXMll8q2iJAgAACMBDFAAAQADS\n",
+       "eR0kbSJLTbllpdu0mfrSpUu59ll2+q7oiXZ1YkxN54XSyZ2LUNWoO39SVU2bkHLDeOiIX01b+akV\n",
+       "HSGl6W6/G4Leh/S6PHDgQGK94eHhcR/r0NBQrvX0d+2/R1/T0WD+qFsdkaufUd9jln4fIp3XHmiJ\n",
+       "AgAACMBDFAAAQAAeogAAAALQJ6qDpM1UnbffUt5+UFUqosJ6mjt37rjYHz6dt/+V9rFSfnXlvP03\n",
+       "tKKyVjYuup9SVlVnYDz0N6p9mHp7e1Pfo1XJ/SH/Wum7q6srdb0yaT9S/7enpRq0/5beT8ySfcIO\n",
+       "HTrkYv18Wfsq4vNqv6pO7Ouo59+snr9htEQBAAAE4CEKAAAgAOm8DuI3E7cjvwpxVZW5iyifoEKG\n",
+       "X5slh353YvM7Oo+m8zRN3N3dnVhPh/brsH6/hIBW+tY0u39vKDO9p2lEjc2S96SsmQ708+p58T+H\n",
+       "TtSs93AtFxHqueeec/Ebb7zh4tD7U9P45TFI5wEAALQJHqIAAAACtH/+B07RKak6ZKXvQiqqtxsd\n",
+       "3dQEOsGqWTLVApgl03k6EtgfrabpKU116aTAZsmK4Jp2qjK9rfv1ZyXQVKSOzvUrkff397tYz8WZ\n",
+       "M2cS6+nn199XERXLf/rTn7a8jTw0JWmWvG/4n7dIeSeRLhMtUQAAAAF4iAIAAAjAQxQAAEAA+kSh\n",
+       "bRTRD0qHF5dZPsGfmV2Xy6zCXjS/n8eNGzfGvQ397JMnT255e2gWLVGgfYn6+voS62m5Ah3K7/8O\n",
+       "/X54o/xrMW8f0Dlz5rh4ZGQk13uUX0pBj+P8+fOp79N+QkuXLnWxX+JAfwNaFsJfr8m0NMtEQ0sU\n",
+       "AABAAB6iAAAAApDO6yD+JLrjpUOQ0ybWbXfadL5x40YXb926tdD9+MO7/eUm0zTM4OBgy9vTz076\n",
+       "rvPcvn3bxfr9Hj9+PLGeprT0XqUT9ZolyyRomijvjAz+pLSaFtOJwf10oC5ryj20G4GmDlesWOFi\n",
+       "v3SBlgDwz0Wn8UshaPry2LFjVR9OIWiJAgAACMBDFAAAQADSeW3Mr6TrN5WOV6em8JRWBNYUno7g\n",
+       "MQsbxZNXlZXXQ1K0mk4B7iVttOnAwEBiWUey6XXpT/CrFbxDRn3529NlTS2njQI0S/4GQqv066hF\n",
+       "raw9b968xHp6/rQqu6Ye25mmcf2RmO2awlO0RAEAAATgIQoAACAAD1EAAAAB6BPVxnRosVl29Vxk\n",
+       "K7MPlK/sflAqpH8TfaIwHv59aJRfzkKXtQr4jBkzEutphXsta5C30r/2qfK3oXFvb29ivX379rk4\n",
+       "tB9UGu0f5pd+SLsf5P0dzp4928UXLlwIOLriaT80/Rxl32e1v9m5c+dK3dcoWqIAAAAC8BAFAAAQ\n",
+       "gHReB8k7ISeKo03pTVR0WgLwhdx3NOWWN8XjVyzX9J5OCuxXBNfXNPXob6/o+6cen6bZ8v4mtdxB\n",
+       "lqak8JSWj6iyq0RVKTxFSxQAAEAAHqIAAAACkM7rIH4F83akTe9mnT8h50TjV5PWSaC1KX7v3r2V\n",
+       "HVPT+NXzNa0zkSdw9q8dndRXz4uf0tF7iG7Dr1iu956iU3shafUmpuny8iuTdzJaogAAAALwEAUA\n",
+       "ABCAhygAAIAA9InqIJ3QX6Ld+kC1c7+FOnR3dyeWDx065OLBwcGqD6eR/KH3Tf9d5x2Kn5f2TdL7\n",
+       "gV/ZXM+TVsXOOp7Jkye7WKummyVLIzShXIxWbkd5tF9myN8fviUAAIAAPEQBAAAEIJ3Xxvwhuv4Q\n",
+       "YBRj2bJlLj579mzitaJTGc8884yLd+3a5eKyK/FqakRTbhcvXix0P0ySfW9nzpyp+xBqlZZS8Sfk\n",
+       "1arnmvryS0TopLR6nWdV0tZyMWkTLJetnbo26Dk2Sx775cuXqz6ccdm6dWtL76clCgAAIAAPUQAA\n",
+       "AAFI57Uxvwru8ePHazqSzqbn9fd+7/cSr506darQfW3atKnQ7eWlk6VOnTrVxUWn84BQ/mg6XdaR\n",
+       "dZp+NzPr7+938bRp01zsj6w9efLkmPu9dOlSYlnTe3fu3LnXYQfTdGUonSBd06H6ey+C391gyZIl\n",
+       "Lv7Upz7l4pdeeqnQ/TYBLVEAAAABeIgCAAAIwEMUAABAAPpEdRC/ou+oJgzXzWvmzJmJZb8/Qqt0\n",
+       "KK729+nr60usd/r0aRfrOfvBD36QWG/+/PmFHl8T1FWtWb/7rO99wYIFVRwO2oiWGvGH1OvvXKu/\n",
+       "L1y4MLGeli/RPj5+5fAi+iq1Sss4ZJVqqGtGBe1fltbXrFPQEgUAABCAhygAAIAApPM6iA7lVYsX\n",
+       "L3axpqnM8g911Yk7y2zOLjp950ur/B3a5OxXMM9D0wNFD5Hu6elJLF+/ft3Feb+3sr+DVver1zOq\n",
+       "0U6T4Q4MDCSW9d6ln8OvgK4lEzQ92MQuEFkpPIyPztDgl9HIo31+GQAAAA3CQxQAAEAA0nkdRJsl\n",
+       "1YkTJ8a9rVmzZiWWy6xcvWbNGhfv37+/tP00RdEpPK1KXPRoHE2FmDVjZFJdI44mMj9N3E6OHj1a\n",
+       "9yE0gt4ndJSiWX0jcpsgJIWnaIkCAAAIwEMUAABAAB6iAAAAAtAnqkKTJiVPt5YX0Grjfn46bYjt\n",
+       "hg0bEstFDv0usw+Ur8x+UH7Zh8HBQRfXNXRZZzjPW1ohq29SmX2EmtAHykefqOo1cZg/xoffTTlo\n",
+       "iQIAAAjAQxQAAECAWtJ5n/3sZ83M7Omnn078e29vr4s1nbRo0aLEelEUuViHi8dxnFhPJ97V9fxU\n",
+       "Vdq+jhw5klhPq8ROnz7dxf6kuX4l3FE6+a1/THPnznXx1KlTx3y/WTKt4084/Nxzz6W+ryw6bNYs\n",
+       "+Tn0WJctW5ZYTycJ3bp1q4unTZuWWE/LH2zbti31OPQ70NSDX7147dq1LtZ06qFDh1K3XYTnn3/e\n",
+       "xZrOO3z4cGK97du3u1greBedVlu3bl1iWUtavP7666nve+qpp1z82muvjXu//kTPeo1oevrdd99N\n",
+       "rKfD1OuqqD6RaeV71O+Tn/yki/U3tHnz5joOx774xS8mlleuXOniv/u7v0t932/91m+5WP/e6n2w\n",
+       "bPq3yf97kQctUQAAAAF4iAIAAAgQ+Smw0ncYRXHV+wQAAAgRRZHFcRyN9RotUQAAAAF4iAIAAAjA\n",
+       "QxQAAECAWkocaIkClEP7nWlpBb/y9fDw8JjvyUuHxpuZzZkzx8XLly938YkTJxLr+UP7R/llG3T7\n",
+       "Otu2X1ZCl/VzaFkKM7NTp06NuV+fllrQbfvD67WMhpZtaPo13tXVlbqsn8m/JvTza4mOUFoqRIfR\n",
+       "+/tNuzb135t+zptCz1PWbz5tPf89nPfy+edc7+nnz5+v+nAmjDx/E2mJAgAACMBDFAAAQAAmIJ4A\n",
+       "NFXjVwRvtdyE//758+e7WFM1aek7n1/tPa36+40bNxLLU6ZMcbE2b4c2desk0P6E0Kqnpydo+3Xz\n",
+       "z5+/XJVr166Vtm2tRKzfkz9jQd5JoNP4VfvzTvT6mc98xsVvvfWWizUtbBY2YXUWTenfvHnTxfob\n",
+       "8l9Ds/jdMlAfWqIAAAAC8BAFAAAQoJZ03qpVq8zM7Pjx44l/b3WSVR0ZZpacyPf06dMtbbud6YS8\n",
+       "fpN9q3QSX7PqJof1U2yattNRfGXzUy9oDr2/6HXvj+xsVd70nU7ObWb2yiuv5HpfWgrv0UcfTSy/\n",
+       "8847ubaXlqYjfdc+ND2t96Ay0+MYGy1RAAAAAXiIAgAACFBLOm/SpA92qwXDzMwGBwfHva1169a5\n",
+       "+MEHH0y8pim8iZzO0yZeP/3WKj81MjQ05OJDhw4Vui+1cePGxPLKlStdrGmNvXv3lnYMaB9aQPTc\n",
+       "uXO53qOj+8ySKcGDBw+O+xiyRoqOdnEwy/+7qTJtjWbRa1FTe6TzqkdLFAAAQAAeogAAAALwEAUA\n",
+       "ABCglj5Ro0Pu/Uq/2p8mbyVt7fPiDzUO6WPVznp7e8f8dy1xUPRklf73pH2udL9F0z5QZmZr1qxx\n",
+       "cd6h3loCQye/HQ//Gh6lfXDMyj0XuLesqvNpdHJus/R7kv9daz/NXbt25dpXSP/BvO/xS7/4k3Kj\n",
+       "/ehsEHpvue++ZLvIRO4LXBVaogAAAALwEAUAABCglnTeaFOkPzx+/fr1LtYmypdffjmxXlpzdDun\n",
+       "TLq7u13sV27PW0n4zJkzY/57FEXhB3YPmoIte1/9/f0u7uvrS7ymw9bzljUo4ljTtuGXYNi9e7eL\n",
+       "i6hyrhOQtlrpH2PLm+Jdvnx5YrnM30BeK1ascLFOYGxmtnnz5nFvT9NHqJ+mkPVe4H9P2t0gb2V9\n",
+       "jA8tUQAAAAF4iAIAAAhQSzpvtKrqnTt3Ev8+f/58Fy9atMjFGzZsSKynE3fqNvwRW6OV0c2aMUpB\n",
+       "P5NZsmL7rFmzXHzlypXEepqmO3Xq1Lj3q+kebfr1XytC3lGVeemkrZ/+9Kdd7I+I8lO+eYSM2PKl\n",
+       "pXx05F8ZSOGVQ1PG/ujetO4Chw8fLvWY0vhpxGPHjrlY0+xHjx4N2r5WxaYSdrPotagjoi9evJhY\n",
+       "z/9bguLREgUAABCAhygAAIAAPEQBAAAEqKVP1PHjx83so0P3d+7c6eIZM2a4uKurK7Ge35dq1Ouv\n",
+       "v17UIRZG+yAtXrw48ZoOOdVctt9X5+zZsy0dgw579auaa9+iffv2tbSfLNo/zSyZx8+ybNkyF2s1\n",
+       "3jfffDOxXt7K0Gn8Pkzat0v7H/nXXlq/Kr+adBFlDVA+7Zs4MDDQ8vYee+wxF2/btq3l7SntA+Ur\n",
+       "or+flkYYvWejGfT+pPdFv8+r9mvLe8/F+NASBQAAEICHKAAAgAC1pPPyVODWoZntPExTU0FFN+fn\n",
+       "pcPw/QmI/QkryxLalKwpCx2q7ZdSSCutMG3atMRyWpojdALiNCdPnix0eyjOE088kVg+ceKEi7Wy\n",
+       "fKg//MM/dHHeSdC/853vuPib3/xmy8eQl6Z7/JT22rVrXVxEahPF0Vk7tKuA322g6PsaPoqWKAAA\n",
+       "gAA8RAEAAASIiq4wfc8dRlG1O5yg9HttwoSoRfM/k47+q6uad6ef8yYKOef+JK2a4q3yfvhHf/RH\n",
+       "Ln744Ydd/I1vfCPX+3WSYbPwyuTj5Z8jrvXy+ed84cKFLh4eHq76cCaM0fMeRZHFcTzmhU5LFAAA\n",
+       "QAAeogAAAALwEAUAABCAPlEdqmn9c3p6ehLLOrT63Llz496eVlo3S37GuirzNu2cTwRNOOd+H6tr\n",
+       "166NuZ5e82b5Sr00EX2iquefc62sf+nSpaoPZ8KgTxQAAEBJeIgCAAAIUEvF8lbNmzfPxTqJ7+3b\n",
+       "t+s4HKR46qmnXLxgwYLEazqs/MUXXxz3tvmu0RR++k5nAdAJq/2UdkgaGzCrthQHstESBQAAEICH\n",
+       "KAAAgABtmc7rxGZwrT585syZxGtXr17NtY3u7u4iD6llmnLr7+9PvKZpjtWrV7v44MGDLe83LZ2C\n",
+       "iaWrq8vFN27cSF1PU81FVH9Ou+Y68b6FepDOaw5aogAAAALwEAUAABCAhygAAIAAVCxvoOXLlyeW\n",
+       "tTr32bNnXexXqp08ebKLtRpyEyoKa78nM7NPfOITY663bdu2xPKhQ4dKO6YQ2s/GLHmetS9ME875\n",
+       "RJBVsby3t9fFfj9DhKNiefX8cz5t2jQXX79+verDmTCoWA4AAFASHqIAAAAC1FLiYPHixWZmdurU\n",
+       "qTp233jHjh0Let+tW7cKPpLiHD9+PLH8yCOPuFgncJ0zZ05iPW221irndckaKo9mKbq8BaUz0BSU\n",
+       "OGgOWqIAAAAC8BAFAAAQoJZ0no5owsSgKTszs0mTPrz0hoaGXOyPxmtCCg/t6cqVK4Vur9NTeKPd\n",
+       "LEbR3aK56FbQHJktUVEUfS+KoqEoit6Vf5sbRdHPoyjaH0XRS1EUzZbX/jyKogNRFO2Nouh/KPPA\n",
+       "AQAA6nSvdN4/mNkXvH/7lpn9PI7jNWb2H3eXLYqih8zst83sobvv+a9RFJEuBAAAHSnzISeO4/9u\n",
+       "ZiPeP/+mmX3/bvx9M/vK3fjLZvbDOI5vxXF81MwOmtkTxR0qAABAc4T0iVoYx/FoJ5YhM1t4N15s\n",
+       "ZltkvZNm1j/WBm7fvh2wW7SzCxcuJJZPnDjh4pGRkTFjoBXab0Sr/vtlNHQWgE4xa9YsF1+8eDHX\n",
+       "e+gDBYxfS+m2+INiFVkFKyhmAQAAOlJIS9RQFEV9cRwPRlG0yMyG7/77gJktlfWW3P23j2DEFQAA\n",
+       "aLIXXnjhnuvccwLiKIpWmNm/xnH8yN3l/8vMzsVx/NdRFH3LzGbHcfytux3L/5t90A+q38z+3cxW\n",
+       "x94OoiiK58+fb2ad2YzeFFkTszbBmjVrXDx16lQX79mzJ7Fek6uw+5p+zjtR3nOur1HtuTVNmYBY\n",
+       "U7R6DO+//34dh1OqppzziSbPBMSZLVFRFP3QzD5jZvOjKDphZv+Hmf2fZvaPURT9kZkdNbP/fHdn\n",
+       "u6Mo+kcz221m75vZf/EfoAAAADpF5kNUHMdfT3np+ZT1/8rM/qrVgwIAAGi6WiqWV5XG0yrZ2vx5\n",
+       "9erVSvaPdJrC6+npcfGDDz6YWO/w4cMuvn79evkHdlfTJj7GvS1fvjyxrBN50yjeebSC/MyZM12c\n",
+       "dzQiUASKYQIAAATgIQoAACAAD1EAAAAB7lnioPAdRhGdE8YwadKkMWOz9L5Aq1atSiyPlo4wM9uy\n",
+       "5cPi8U0cDqt9GPr6+lys/djMkpXOr1y5Mua/m7U+rHnp0qWJZT2Offv25doGJQ6qxzlvjV7n165d\n",
+       "y/UehttXzz/n9913X+prKE6eEge0RAEAAATgIQoAACBALem80XRVXZVl/ZSRVr69fPlyZcexaNEi\n",
+       "F/f3fzhXc3d3d2K9/fv3u3hwcNDF/nenZQN0WH7Tm9vnzZvn4t7e3sRrXV1dLtZJi8+fP9/yfjX9\n",
+       "qcOlQ7dPaql6nPPq+fcd/c0yC0VxpkyZ4mKdTNuMa70qpPMAAABKwkMUAABAgFrSeU8//bSZmZ06\n",
+       "dSrxmo4O0aqzN2/eTKynaavZs2e7eGBgoNBjbWftlObQ0Yh+ijdk4lhN186ZMyfxml47IyMjLi4i\n",
+       "PdhO57xT6DnXKvNm1Va4n0j836HOMnDw4MGqD2dCYERkPUjnAQAAlISHKAAAgAA8RAEAAASYdO9V\n",
+       "ijdaoVqH+Jsl+8a8/fbbLt67d29ivatXr44Zoz1l5fe1L4D2f/Mrlk+ePNnFWVWY6TfXuZYvX55Y\n",
+       "zltpfsaMGS7WqvjIx+/bCkwktEQBAAAE4CEKAAAgQC3pvMOHD5tZMmVnxkSKE8nDDz/s4tu3b7t4\n",
+       "aGgosZ5OtKkVxvU9ZsnSCFRNnphCU/vtmsLTWQ7M6ktV5524GOhEtEQBAAAE4CEKAAAgQC0Vyyvd\n",
+       "4QSVt3q2Tr7sp8jKNGvWLBdrdfosS5cudfG5c+cSrzUhpUDF8upN5HOuowrNqktLUj27epzzelCx\n",
+       "HAAAoCQ8RAEAAATgIQoAACBALSUOUJ/RavGj5s2b52ItDeCXGiha3n5Q6vz58y5uQh8oM7Np06bV\n",
+       "fQhoGP1NPfjggy7esmVLy9vW661dSzMAnYSWKAAAgAA8RAEAAASoJZ032iT93nvvBb1fJ5u9detW\n",
+       "rvfoUH6tgu2bOXOmi/0h//o+3Z4//FRTVXmPL685c+a4WNMGYx3HKJ3oWSuFm5n19vaO+f6RkZHE\n",
+       "epreO3HihIv9UgNl0nNetqlTp7rYH0quFixYUMXhoI3obyIk5eYPX9ffZeg9M4TeX6r8nQPthJYo\n",
+       "AACAADxEAQAABKilYjkTDQMAgHZAxXIAAICC8RAFAAAQgIcoAACAALWUOBjvDNTTp09PLLdardrf\n",
+       "3q/92q+5uLu728UDAwOJ9U6dOuXikKHGTz/9dGJZSyYcP37cxQsXLkyst23bNhe///77Lp47d25i\n",
+       "vfnz57t43759LmbG72poXz/OeTWKOOc6lF9/10VXxQ+9j3V1dbl4ypQpLr58+XKu9+t7zMxu3rzp\n",
+       "4uXLl7vYr76/d+/eMbfn92nlWi+ff85nzZrl4kuXLuXahpbvyfsen15Leh11Cv83kOc3SksUAABA\n",
+       "AB6iAAAAArTFBMRFN6v729N0WV6rV6928cGDB3O9Z/PmzePej1myQrum83RCXrPmTMqLZArGzOzG\n",
+       "jRs1HcnEodX8zT5adT+NThKsv6+tW7cWc2B3hf4+tVK/dj3wf/96H9K0RFbXg+vXr7vYP39atX/S\n",
+       "pLb4UzFhaLeTvKm50BSemj17touHh4db3l7ThHTToSUKAAAgAA9RAAAAAWijDZQ3hRfi6NGjQe/T\n",
+       "pvk0fppJm/AHBwfHvU9Na5qZnT592sVXr151saYGzPIdaztbtWpVYlnTIfr9FtHEjg/4k2tryuPY\n",
+       "sWMu9kedbdmyxcU6IXfZnnzySRcfOHDAxf5kv5qa09f80cMqb1pCJxbX2Mxs48aNLq5y8m/cm38f\n",
+       "r8rnP/95F//gBz+obL8h3WdC6Ij53O8p4TgAAAA6Hg9RAAAAAXiIAgAACECfKI9WC/crh7/zzjst\n",
+       "bebKW4cAACAASURBVHvFihWJ5dC+T+Ol/ZH8Ph8nTpwY9/Y0P+0Ps9Z+UCpvHyj/HOn2m9h/SIf8\n",
+       "Kr8PmPZrWbx4sYu1fIVZcoi9VqS+c+dOS8c5Efh9ndatW+fir33tay7+yU9+klhPf9dnzpwp6eg+\n",
+       "SvsZZX2/Wq360KFDLe+3v7/fxVn9qvTewPXXLHV9H3WVutD+jmX2iQo5r7REAQAABOAhCgAAIEBH\n",
+       "pfNmzJiRWL5165aL81aMzhryqxM4pg07NkumZFRV6Tvfo48+6mKdmHg8dJJWrVRbdIot9BwtW7bM\n",
+       "xVpmwU+XaZrNT0XmoRN/mplduHBhzPX8/eq1o5N4+s3jWtVar1+q0d/b2bNnE8s6Q8CSJUtc3JTU\n",
+       "1KuvvpprPb2v6fWW9Tk0lemnlvW1N99808V+qtC//6E59N5Qpaz0b5H0GjX76N/Ysvi/lTxoiQIA\n",
+       "AAjAQxQAAECAjkrnXblypdTta+qqiSPF0mjaStNyZslzptVa/clImzbZ5Pr16xPLO3fuHHM9v9lb\n",
+       "02KacvMr1aalfzUVZ2YWRdGY6508eTKxrBWkdaSinw7U46iryb5KOkLt9u3bpe1Hq5KHNNn79Hsq\n",
+       "Oz2oI17z7kt/8/551deOHz/e4tGhDlqNP++1qPcqHfE5HocPHw5633g98sgjieW33nqrkv2GzKRB\n",
+       "SxQAAEAAHqIAAAAC8BAFAAAQoKP6RPn9eEZGRnK9T4c/+31Zmsyvlq3D/JUO+fcriq9du3bM9xQ9\n",
+       "pN7/bubOnevivFWYp0+f7uK0PlDjoX2O/L5OWnpAS1bkrWjtDwWuamhwuymzH5TKKp3x2GOPufjt\n",
+       "t992cVb/Er0Wy+6LGTK8O6sPY1XDxVEe7dOUt59caD8oVWa1cOWXK9I+pU1DSxQAAEAAHqIAAAAC\n",
+       "NCqdp014mnbK2wypKSKz/Om8VlN4Wr3crLqmR394fFr17LRJgc3CK5iPl/9d5P1uVN4Uo6Yo/f2k\n",
+       "pTlu3rw57uNBZ9B0ct7USNkpvDr4ZT70XGgpCtRv0aJFLtb0bNrfgHaX9TesbrREAQAABOAhCgAA\n",
+       "IECj0nnaRK6Vtf00Tlq6LO8or6IVkb5LGw2G8dMRdEzci3tpegqkq6vLxXknUg+RlcqsahQl8tHR\n",
+       "zk1Ptfb19bl4cHAw13v89N3FixcLPaYi0RIFAAAQgIcoAACAADxEAQAABGhUnyilwza1f5RZuSUE\n",
+       "tEyCznZeNvpBFafK702r3QNlyHtv6O/vd7EOgd+6dWvhx4R66d/EpvdXu379+rjf0/TPpGiJAgAA\n",
+       "CMBDFAAAQIDGpvNUlRNmVpkKqoqmKC9fvpx4TSfhDTFz5szE8qVLl1raXlNo9ebe3l4XDw0NJdZL\n",
+       "G16s7zHLP3GxouxFcVasWOHiEydOJF5reupAj09Tdv6k1lp1n9IenW3hwoUubnqJjpDja6cJ22mJ\n",
+       "AgAACMBDFAAAQIC2SOehNXlTlDqix0/zaXqgp6fHxX56sEyaIstKjy1YsMDFaRMO34umQP0Unjp2\n",
+       "7NiY/66TIJuZdXd3u/jo0aOp29MUXhRF9zrMtuOnP7Ua9+TJk12s58Gs9ZT+8ePHXZx3kuEm0kna\n",
+       "ffqbCEkfo33EcVz3IZSKdB4AAECH4yEKAAAgAA9RAAAAATqqT9T06dMTy9oPhT4C93b69Olc6xUx\n",
+       "fNqvQj/K7/ui32ne7zC0H5Q6e/ZsrvXS+i1p/zKzZF+grDIQ2kdIZy5vtRRFU/jlBHT2Ae0TpbPU\n",
+       "++uFXH9N7Ael/b40zqrwvH///lKPCe0h7/WsMyqcPHmyrMMpnP/3wS9L0iS0RAEAAATgIQoAACBA\n",
+       "o9J5GzdudHHIpJl+M3/eiQ+1OrWmEcqulK7DlTVdc+PGjdL2OWvWrMSypozKpGkqs2Rzrabs/Gbc\n",
+       "MtMXs2fPdvHUqVMTrw0ODubaRtpQY922WbLiuFZ5969ZXa8TSxz49PxpisL/7er10ynVuL/61a+6\n",
+       "WEtlvPnmm4n19Lx0+tB25JO3tExV9/eJjJYoAACAADxEAQAABGhUOi8khZdlypQpLs5K7WlzeZWT\n",
+       "HV+5cqWS/SxevNjFp06dqmSfZsl0pf9ZW03T+ek3TZHlHZ1X5sSdfipOr0VNX2oq2SyZytWRbFev\n",
+       "Xi36EBtHU1U6ma5ZZ4xOXLNmTWJ59+7dLt65c+e4t+dXdS9zkuqJkFpuJ363jDRVzihRpHb6vdMS\n",
+       "BQAAEICHKAAAgAA8RAEAAARoVJ+ovLQadFaV7bwlDjpdlf2gVNF9vrQvkVa3NiumSnmRsvo6aX+f\n",
+       "rFIIev3629P1yuzbVSUtXeD3eWunPhJp/N9hq78PnZHBrNzh7JRWaJaBgYG6DyGT3q9CZgvo6ekp\n",
+       "8nBKRUsUAABAAB6iAAAAArRlOi/vRLkTjT8BcxV0Yl2zj04w2+r2Vq9e7WJN477yyist7edeent7\n",
+       "XZw18bE/zHyU34StQ8T1e/K/M01TapP4ggULEutpqm/79u0uLnOYe9k0ZeRfR+38uUYVnd72J2lO\n",
+       "S+cV/RtF/ZpecqLVCb/9CdybjJYoAACAADxEAQAABGjLdF5eaSMEOrV5u46JWf00U95Uq040rFXi\n",
+       "/e9CK3UfPXrUxUWPFtKK4mbZKTyVlmby03RaUV0ruU+bNi2x3nvvvedi/T79tKGm8/S1Jqa99Lv2\n",
+       "R9mlVXL3z59+350yGjGEnq++vr7Ea/r7UHXd37KubbTmgQceqPsQSjU0NFT3IeRGSxQAAEAAHqIA\n",
+       "AAAC8BAFAAAQoKP7RKUNsyy6j4A/3LRdq/tqX7Gurq7Ea2n9c0LLTWg/qCwnT54M2v54aRXxIvgV\n",
+       "t7V/nvZryarMrdevf83qa7q9Jlbp19+H/5vUPlxZ/RbbtWK5lqwo4jPodToyMtLy9spEH6jyzJ07\n",
+       "t+5DKNWePXvqPoTcaIkCAAAIwEMUAABAgLZM5+kQ8aKrAIdo1/SdT1NGWeUSQobR+9WV01IR/sTC\n",
+       "daVxdPi4ft758+cn1ks7F/39/YllnYBY06H672bJ86IpT3/Iv6a+ml69OK1sg1l6JfdLly4l1tNS\n",
+       "F+2k6JITev01fRJalKedSgAov5RMWjeKpqeqFS1RAAAAAXiIAgAACNCW6bwmpPCawE+R+amhJsnb\n",
+       "PFtl+q67u9vFfrpocHBwzPecPXs2seyPrhs1a9as1PU0/eunrQ4fPuziy5cvu1gnRDZLpr78JvIm\n",
+       "81OPOgovbYaBdqbf9VNPPZV4Tb+3l19+Odf2dIaA3bt3t3ZwbaZTZ5oIsXnz5roPIUiV35nOEuHf\n",
+       "Z4tESxQAAEAAHqIAAAAC8BAFAAAQoC37RIXQvhh1lSTQ0gxmZqtWrXLxokWLXOwf369+9SsXa27X\n",
+       "7yfjL3eCMstZFJGfT6sQfv78+dT1tP+Vn6vXfm3aZ0bLIvjba3ofwazyBNr3qarq9HV57bXXEsvT\n",
+       "pk0b9zaaWOpB+//p8fnlSlqtYO7fPy9evNjS9trZzp076z6EIFX2idLfF32iAAAAGoaHKAAAgABR\n",
+       "1amtKIpy7TArjaOTL2rz4ERu3vXp99r0itYh/NICTZh4V8/5N77xjcRrWrrg9ddfd7HfvH3mzBkX\n",
+       "azrPn3BUK3/r76NTSgPk1enXeRGyykr09PS42E95pKVe/L8ZWioka6aDvHS2AD2mIrbdrvxzzrV+\n",
+       "b9oFInTmgNHzHkWRxXE85kmnJQoAACAAD1EAAAABGjs6L2vEkT/yCR9YuHBh5fv0qwjPmzfPxcPD\n",
+       "w7m2sXbtWhfv27cvdT0dfaiVm83Mdu3alWtfVdm7d29i+cCBAy5Oq4bu08k5L1y4kHhNR/F1ygTY\n",
+       "KId/fWhqo4iJXotOs+X9fTSdjk7U1JI/alFT8LNnz3ZxHffzpvAnevdnisij6Mm/09ASBQAAEICH\n",
+       "KAAAgAA8RAEAAARobImDdtLV1ZVY1v4qVdJZq7XcA8Nhq6G/pQcffDDx2sGDB6s+nAmhiSUO9H6g\n",
+       "xxRahkPLEFy+fDn8wArCcPt8tO+Txv7fCz1/WtpHS0z41fybcM6131JIn6UsGzduTCxv3bq10O3n\n",
+       "RYkDAACAkvAQBQAAEKCxJQ7aSV3pO1+ZkyzWRSuTN6EqeV7Tp09PLGvFcS3RoSlYs2TqRq8rvxK5\n",
+       "DpnW9IpWPPfNmTPHxX4JkVu3bqW+L41+Rh2abZZMX+i2s86LVmi/777kf9/pOcs7+WraOS+bfm9P\n",
+       "Pvmki7ds2ZLr/Z/4xCcSy365jIlES6j4v4GmlfbQSutmyXScTszsTyKtv2VN0+nvwZf2+6pSmedf\n",
+       "y7s0HS1RAAAAAXiIAgAACEA6D43ip3FCUniaAkibRPVedFJVv/k9j6VLlyaWp02b5mIdOblo0aLE\n",
+       "epruyvrs2uyv58xP6WqTu6YA/PU03ZWVntb047Jly1zspxS0WrCmK/x0nqYB9Rz5o480NZe3QnYT\n",
+       "ZjbIO5pOz9GSJUsSr7311luFHlNV/HRU2khKfz0doabXtn+NZaWu6+CnoPJef/o+/c3rPcinr/mz\n",
+       "GZRJ760PPPCAi8+dO5f6Hr1n+Oco7R6Xlcosgm6/1dQhLVEAAAABeIgCAAAIwEMUAABAgFoqljdt\n",
+       "aCoAAMBYqFgOAABQMB6iAAAAAtRS4mB02GqVaT0dZrlhw4bEa5s2bXJxb2+vi0OH0OowUN3Xtm3b\n",
+       "ch2fP/w8b+XldevWuXjPnj0u1krVK1euTH3/gQMHXBw60akO2df46NGjifXyDv999tlnXbxr1y4X\n",
+       "+9+NDlldsWJF6nojIyO59qvD7d97773U9XSIvm67rglCFyxYkFjW72DHjh2p79NJUUMq8PvD8v0J\n",
+       "U/NYu3ati/ft25e63jPPPOPiV155xcVNmJR1IvDv27NmzXKxlrDwy4to2Y+sYeV5K9Lnpb+BefPm\n",
+       "ufjIkSOJ9UJKmej2/Kr9eg8dHh4e97ZV1qTPaRMYmyX/Fuk2/HIRev/U2Qy0VEkWf+YFrd6u14R/\n",
+       "HvRvnT+LQh5+SRy/qn2r8jyj0BIFAAAQgIcoAACAALWk8+oYnffQQw+5WNNWviKq4H7xi190sVYi\n",
+       "zkrnZU0enLci7ac+9akx/z1rYldtXg1N4anTp0+PGYd6+eWXc62n6YH9+/e3vN+sFJ6qslpwHn46\n",
+       "T1ObWem8kBSepnFOnTo17vf7slJ4anBwsOV9oTh6zWn6x0+dDw0NuVir9petv7/fxTqheRETx+vn\n",
+       "8NPJVU2Yrn9PQ+/heWcBSOP//cr6e1akotN3IWiJAgAACMBDFAAAQIC2nIBYe/vnbYbUptW8qRpf\n",
+       "2qSFy5cvT6ynzbo//vGPg/al8jZZ/sM//IOLv/e977lYm7DPnj2beE+ro0ZQDR0taJZ+DfujHlev\n",
+       "Xl3aMVWZklFFpGtxb5qu1cmrfToyU6/TgYGBxHp6bYaMxMrLT2nr/U8/RxHdSjR96Y9Q078DZX5e\n",
+       "1IuWKAAAgAA8RAEAAATgIQoAACBAW/aJChmO2d3d7eK8w9K1PIFZsh9UT0+Pi//0T/80sd6f/Mmf\n",
+       "5Nr+U0895eLXXnst13ta5feJqmoY7kTj9yHxKwSPdxt+9ec0/jW7d+/ece8X1dPK0nm/67JpXyLt\n",
+       "D+rTe6Gu5/cRqqoPnV8N/dixYy7WvklFn2e/YnlaH1q/zEzeWRTQTLREAQAABOAhCgAAIECj0nlF\n",
+       "N2nrxL3+BKl5ZE2++KUvfcnFv/jFL8a9bbPiU3h+M/Eo/RxNqPDaFGWmUELSd6Hb0LSBXvNmZtu3\n",
+       "b2/5ONT69etdHDJRbOiEoZoa0glly6RD/M2SE3n7E2q3qikpPKUVxv0yLkqrU+sEuH4ZjlarYufl\n",
+       "d9eoalYBv7yIfqdaSZx7cGehJQoAACAAD1EAAAABoqonA46iqLId6uiSrFFo8+fPd7E/eq1d6ffa\n",
+       "1dXlYn/0VlVN7BOBnnN/MtIyaarLn1S11UlWn3nmmcTypk2bWtpeXppqNUueT/28586dG3OdMuj2\n",
+       "65hEvWp6/9SJrPfs2ZNY74EHHnCxpvP8dLR/7xl14sSJxHJVk9cWTf+OmCVT15rqy+omksa/3tKu\n",
+       "dT+dr6lM0ojjN3reoyiyOI7HPOm0RAEAAATgIQoAACAAD1EAAAABGlXioGhpQ/59ndIPKo1Wy/Wr\n",
+       "6uowbj0PRQzRnwiq7PuUpug+JNr/ZWBgINd7/PPQap+hadOmJZa10rQ/lLwqE6EflNK+Z371caXl\n",
+       "HrTchl8iQmeNmDdvnov9fjzt1CdKr3vtD2aWLBER0g8qhN/viX5Q5aMlCgAAIAAPUQAAAAHaIp3n\n",
+       "V8vVCSWV35z6qU99ysU6+eUbb7yRa7+a1jDLX6W4t7fXxWfOnMn1nrxWrVrl4kOHDo37/cPDw4ll\n",
+       "Te/ptpm49kNZlc3TUjz+EP3HH3/cxVu2bCnw6Iq3ePFiF+etqr9gwYLEsqYyQmj6rin0/tLE4yua\n",
+       "3iu0lIRPSxfkLfOh14dW8243Wtagr68v8ZpeI9o9oszUXlXV2TuVX6YiD1qiAAAAAvAQBQAAEKCW\n",
+       "dN5o82/eZs28ozX8Jvaf//zn4zswM/vc5z7n4rfeemvc7zcrPoWnzeIhKbws2vxLU/DYQiaH1SrO\n",
+       "Ztmjm5ombwpv8uTJLvYnBW41nddEEyGFl8ZPT6u0+7ifptP1dISlPxKzneh58UeDa/pXR0i30+jD\n",
+       "iSZkpD4tUQAAAAF4iAIAAAjAQxQAAECAzD5RURR9z8x+w8yG4zh+5O6/vWBm/5OZjXb8+d/jOP7Z\n",
+       "3df+3Mz+RzO7bWb/axzHL4213dG+FHn7RI2MjORarwg7d+7Mtd/169eP+Z4y5K2U3NXVVepxID+/\n",
+       "T9SOHTtqOpLyrFy50sWDg4M1HgnKtnTp0tTXtBK53tP9vk7af0jLIoT0OWwKPfYbN24kXksbLk+f\n",
+       "qM5yr5aofzCzL3j/FpvZ38Rx/PG7/xt9gHrIzH7bzB66+57/GkURLV0AAKAjZT7kxHH8381srOaY\n",
+       "saqofdnMfhjH8a04jo+a2UEze6LlIwQAAGig0BIHfxJF0e+b2VYz+2YcxxfMbLGZaSnmk2bWP9ab\n",
+       "33vvvcDdluPTn/60i1999dXU9XQYd1YKTyfh1KGtZfObk1Gfa9euJZY7cci/pnj2799f45GgbIcP\n",
+       "H059raenx8XaBWLq1KmJ9bQkhqb2quyuUTS9v+usGGbJdJ5/LtA5QtJtf29mD5jZBjM7bWbfyVh3\n",
+       "Yk17DgAAJoxxt0TFcewmVIqi6Ltm9q93FwfMTHsfLrn7bwAAAG3lhRdeuOc6436IiqJoURzHp+8u\n",
+       "/paZvXs3/omZ/bcoiv7GPkjjPWhm+Wb6rZg/ii0thaejTszMTp8+PeZ6fjVqRl/gwIEDdR9CKXRS\n",
+       "7unTp7vY/01dv369qkNCBbJGCOv9T9Nb/n1RlzXdrZPzmiW7Q1y9etXFd+7cGccRl0fTkhr7o831\n",
+       "XGg6z5+YOe/oa5TPrzo/+hD17W9/O/U99ypx8EMz+4yZzY+i6ISZ/YWZPRtF0Qb7IFV3xMz+FzOz\n",
+       "OI53R1H0j2a228zeN7P/EnN1AACADpX5EBXH8dfH+OfvZaz/V2b2V60eFAAAQNNRxwkAACBAaImD\n",
+       "xtC+GHmH+OddT0sfmJm99NKYBdjpA9Uw2pdN+1RUaXh4+N4rtaG+vj4X67B3v/8gfaImDq1Mrvdj\n",
+       "LWNgluzr1N//YfWbuXPnJtbTvkQnTpxw8cBAM8Yp6efSz57VZ0v7ffl9xfzSCKhPSL87WqIAAAAC\n",
+       "8BAFAAAQoO3Tef6QxFZ97GMfc/HLL79c6LZRjbpSeO3EL0mQN8W9Z88eF2sagorME5dWLNeUmz8p\n",
+       "9ZUrV8Z8z6xZsxLraTqviRNb66TDOgA9K4WtZQ38Egdob7REAQAABOAhCgAAIEDbp/OKnsx4x44d\n",
+       "hW4PaApNfedN382ePTuxfOHChTHXmzSp7W8lCKTXiKaqzp07l1hPl3UU1EMPPZRYT68lHbl37Nix\n",
+       "xHp11XLW1LWmIv2K5fqajuDWSv9m6b8ptAdaogAAAALwEAUAABCAhygAAIAAtXRkGM2bMz8xUJ1l\n",
+       "y5a5+OjRo7ne4w8/T+u/sWLFisTyzp07x3VsPq1obdacatX4qMmTJ7s4rXq5WbIEgFYinzdvXmK9\n",
+       "OXPmuFirnGdtry6XL192sV9uR/t9aUV/v0L7yMiIi4vu44vy0RIFAAAQgIcoAACAALWk80aHeFJZ\n",
+       "GqhO3omyNTWX9RudMWOGi/1h260ifdc+NAWllcj99FZaCtlPES9YsMDFmipcsmRJYr2DBw+O+1iL\n",
+       "oKUMzp8/72K/e4pOVKxVzjUFaEYKr93REgUAABCAhygAAIAAtaTzmpbG0xEgOvkl0EnypvN01NOZ\n",
+       "M2dS19MRdJrWMEuORmra7x3F0u9aR+f5Vex7e3tdrNeYprrMkteppgSbch3lTb8dOnSo5CNBE9AS\n",
+       "BQAAEICHKAAAgAA8RAEAAASYkFOv69BTM/pBYWLQCso+rUw+ODiYa3val6Wu4eao38WLF12sw/z9\n",
+       "yty6nl47fl8n7Qel/VVHZ7oAmoSWKAAAgAA8RAEAAASYMOk8nbzyxo0bNR4JJoLZs2cnltMm7q1S\n",
+       "Vjrv2rVrubahFaSvXLnS8jEpHR5PFef2odeBfm+3bt1KrJdWsXzq1KmJZf2tFF0JHygaLVEAAAAB\n",
+       "eIgCAAAIMGHSeVWm8HSkk45IKZqmKM3MvvzlL5e2L4yPn6JoOj/1kme9rFF8K1eudLGOvhoaGkp9\n",
+       "T94U3vr163Othw/o/cgs/z1JR8k98cQTqetpOi9rQt40Wr3cLJn200mHs9LRQF1oiQIAAAjAQxQA\n",
+       "AEAAHqIAAAACtH2fKJ0ZPGvG+Sr5Of7xWrFiRWJZ+xZk9WfYvXt3S/udCLSPxcmTJ0vbj85E3878\n",
+       "IeZppRD6+/sTy7/7u7/r4r//+78v9Jh27txZ6PY6kX5v/v1kz549LtbZGvyyHMuWLRsz9r3//vsu\n",
+       "1j5zfr9ALY+R1Qevp6fHxZcvX3Zx3kr6QJVoiQIAAAjAQxQAAECAtkjnPfPMM4llHQKrTdUjIyOJ\n",
+       "9bSZuUqt7jetsu+9pFXF1mb6JlTOrtKcOXMSyzr0Xodj563YnVfR26tL3s/hn2dNGdWVZp806cPb\n",
+       "W5n3guXLlyeWjx07Vtq+8tLfvKbEzJKV4XVSX7+EgKb6/G2oU6dOubiINHZWGQygaWiJAgAACMBD\n",
+       "FAAAQIAob1XZwnYYRW6Ha9euTbymzbjaRL5gwYLEeps3b3axHn+ro+JCPf3004nlLVu2uLiulKKe\n",
+       "l/vvv9/F/oiZTkk7NYGec02TtBv9vQ0PD+d6z9KlSxPLJ06cKPSY0mSd87lz57pYq6b7v8nbt2+P\n",
+       "e79azXsiVNJet26dizVVa9be13q78P9OT+RzriNPp0yZknhNf9v6u/R/4/o3Uf8G+n8fR2dRiKLI\n",
+       "4jge86TTEgUAABCAhygAAIAAPEQBAAAEqKXEwde//nUzMzty5Eji3/ft2+fipg/FX716tYu174VZ\n",
+       "ff2g0mhu2O8DpbnhkL4hGJtfJTq0bEWrdKj7o48+6uJNmzalvufKlSsu7urqSrx248aNMd+TNQS+\n",
+       "LlrComhF94PSfh5N7KdY1/UL+PT3UfRvxZ95IQ9aogAAAALwEAUAABCglnTeaCVcLQVQJx0mqVV6\n",
+       "s2gz4k9+8pPCj6kqmsJrekqhnWzYsCGxXFc6RNNOWlk6i6an/QrUaem8Jqbfy6xYrmnOtHMyHk3/\n",
+       "vdVVPqaJtOK7Don3Z8xA+zl06NC430NLFAAAQAAeogAAAALUks4bGBioY7eOX11Zm+YPHjyY+r5n\n",
+       "n33WxS+//HLqelpNVie8zdtU2NPTk1iuauRT01MK7STvpLtPPvlkYjktxe1P8Js3dZD32l64cKGL\n",
+       "tUp53vR2qDJTyDqCdu/evanraTpfK0NPnjw5sZ6mORcvXuzi7u7uxHppVd41vei/T/el1dDNzF59\n",
+       "9VUX6/fhz5Tw8Y9/3MW3bt1ysT9Kcffu3WMen39f1mts0aJFY77HrPjUZtPp9+h/V5h4uAIAAAAC\n",
+       "8BAFAAAQoLaHqIsXL9a1awBoKxNhkmOgHUX+7NCl7zCK4jiO7YUXXrAXXnih0n0D98J1iSbiukQT\n",
+       "TZTrMooii+M4Gus10nkAAAABeIgCAAAIUEs6r9IdAgAAtCAtnVf5QxQAAEAnIJ0HAP9/e3fvIlcZ\n",
+       "hmH8uomkUAQRIX4FTBHBVNkmjYipwqYx2vhRpRAR/KjVRi1ttBJtjJJCImkiacREK7uwEDQQgwZc\n",
+       "SCRsLPwDEngszrs4rjsiB2de2XP9mjnnPQfmGbjn4WHOzBlJGsEhSpIkaQSHKEmSpBG6DFFJVpNc\n",
+       "SfJzkjd61CABJFlP8kOSi0kutLV7k5xP8lOSc0nu6V2ndrYknybZSHJpZm1uDpO81frnlSRH+lSt\n",
+       "nW5OLt9Ncr31zItJjs4cm1wulz5EJdkFfAisAgeAF5I8tuw6pKaAw1W1UlWH2tqbwPmqehT4tu1L\n",
+       "i/QZQ0+ctW0OkxwAnmPon6vAR0m8qqBF2C6XBXzQeuZKVX0F081ljxd4CLhaVetVdQv4AjjWoQ5p\n",
+       "09afrj4FnGzbJ4Gnl1uOpqaqvgN+37I8L4fHgFNVdauq1oGrDH1V+k/NySX8vWfCRHPZY4h6CLg2\n",
+       "s3+9rUk9FPBNkrUkL7W1PVW10bY3gD19StPEzcvhgwx9c5M9VMv2epLvk5yYucw8yVz2GKK8MZX+\n",
+       "Tx6vqhXgKPBqkidmD9ZwIzUzq67+RQ7NqJblY2AfcBC4Abz/D+fu+Fz2GKJ+BfbO7O/lr9OrtDRV\n",
+       "daM9/gacYfj4eSPJ/QBJHgBu9qtQEzYvh1t76MNtTVq4qrpZDfAJf16ym2QuewxRa8D+JI8k2c3w\n",
+       "RbSzHerQxCW5M8ndbfsu4AhwiSGPx9tpx4Ev+1SoiZuXw7PA80l2J9kH7AcudKhPE9QG+k3PMPRM\n",
+       "mGgu71j2E1bV7SSvAV8Du4ATVfXjsuuQGL5jciYJDO+Fz6vqXJI14HSSF4F14Nl+JWoKkpwCE+Fx\n",
+       "pAAAAGlJREFUngTuS3INeBt4j21yWFWXk5wGLgO3gVfK/+/SAmyTy3eAw0kOMlyq+wV4GaabS/87\n",
+       "T5IkaYQdfw8HSZKkRXCIkiRJGsEhSpIkaQSHKEmSpBEcoiRJkkZwiJIkSRrBIUqSJGmEPwDOrQm6\n",
+       "MQ8HvQAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7ffb01f42310>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "feat = net.blobs['conv2'].data[0, :36]\n",
+    "vis_square(feat, padval=1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The third layer output, `conv3` (rectified, all 384 channels)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 32,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlIAAAJOCAYAAAB8y+mTAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsnWuwZVV1tt+tCGLQGENEuTaXbqDtbrq5NgEEFFGQaKRi\n",
+       "vlgVKxgvMWpiiKl8ookcYzTBsqiUGiSJxKh8CSZGUVNFQBAQL9BCS9PQdNMgGlBjLhoTNcZL9veD\n",
+       "fs46+z1n9Jxr7bXPPg3j+bPP3mfvdZlzzLnWeNcYYw6Gw6GSJEmSJEmS9jxq2geQJEmSJEmyq5I3\n",
+       "UkmSJEmSJB3JG6kkSZIkSZKO5I1UkiRJkiRJR/JGKkmSJEmSpCN5I5UkSZIkSdKRidxIDQaD5wwG\n",
+       "g62DwWD7YDD4v5PYR5IkSZIkybQZ9F1HajAYPFrSNklnSPqqpC9IetFwOLy71x0lSZIkSZJMmUko\n",
+       "UsdLunc4HH55OBz+UNIVkp4/gf0kSZIkSZJMlUncSO0n6YE57x/c8VmSJEmSJMnDit0msM3is8LB\n",
+       "YJDr0iRJkiRJssswHA4HC30+iRupr0o6YM77A/SQKjXC4x//eP34xz+WJD3ucY/THnvsoRUrVkiS\n",
+       "/uu//mv2O5L04he/WJK05557SpI2bNggSfr6178uSXrwwYc2/5M/+ZOSpN12e+i01q1bJ0mamZnZ\n",
+       "6QH/9E//tCTpp37qpyRJX/nKVyRJj3rUQ4LdU57ylJHPncHgoba98MILd7q/xz3ucZKk733veyOf\n",
+       "77vvvpKkb3/725Kk7373uzs9Xs7z/PPPlyS9613vkiQ9/elPlyT9+7//+8jx/+hHP5IkfelLX5Ik\n",
+       "fe1rX5Mk7bXXXpKk73//+3rMYx6j//7v/5YkHXbYYZKadnzsYx8rSfr5n//5nZ7fYx7zGEnSD3/4\n",
+       "w50ePzzpSU+SJH3zm99c8P/sh9dnP/vZkqQHHnhgZH+0/7Zt20Z+v8cee0iS/uM//mPk8/Xr10tq\n",
+       "+n358uWSpCc+8Ykj+4s44IADRn7/r//6r5Iae2W/9OO9995bdX6w2267affdd59nJ7WsWrVqdjtS\n",
+       "Yw8vfelLF9xf32Avr3/96xfcH+326Ec/WpL0L//yLyP/P/DAAyVJ//mf/ylpfv85tPcFF1ygiy66\n",
+       "SJJmbbktkQ1zTt///vclNed04403SpKuv/56Sc05rV27duT3t912W6fjgchWJsVS3d9ee+2lxz/+\n",
+       "8bNzf1uYa1/xildIkv7yL/9SUnMtqZ27HOYgjzlmDn7Tm94kSbr44oslNbY9KabVf1dccYWk5tp7\n",
+       "1FFHSWrm7r/5m7+RJL373e+W1FxbuRfgGssc8cIXvlCSdNBBB418n++99a1vlTS/3/bb76GHYF/9\n",
+       "6lc7nc/ee++tH/zgB7P9dOqpp86O9YWYxKO9WyUtHwwGywaDwe6S/o+kj/uXnvCEJ2ivvfbSXnvt\n",
+       "NTsRJkmSJEmSTJvdd9999u/TTz99p9/tXZEaDoc/GgwGr5F0taRHS7psoYy9r3/967N38U94whNG\n",
+       "/ocHjyeKgnLIIYdIkv7pn/5JUnN3Cl/+8pclNV4ld8UlUHjw5FEo7r777pHtRqBElECBcaXh3/7t\n",
+       "3yRJP/MzPyOprEj5/lAcUGTYPooEXhj7pz3/53/+Z3Yb9IXU9AfelN/to9yhoOGt8xopTM5cQ537\n",
+       "nu07N9xww7zj3hmRKrFp06aR97T3aaedtuDx/OAHP5DUtBFeDl4UKgXHhTdEO7q3ijIZ8aMf/WhW\n",
+       "RezCPffcI6kZB/RLW1wxjLxuh/YClL7vfOc7kqSnPvWpkmJFinaPjnvlypUj2/vWt741+z/Ouasi\n",
+       "FSkSP/ETPyGp6Wtg7lizZo2kRmnANuhHxtL//u//7nT/++yzj6RGZasdS9MGxQC1lvP47Gc/K0n6\n",
+       "53/+5wV/53NAiT322ENPeMITZvvc+8PhOL7xjW9Imj+3cI2h31Hj2S5PO0pEYwN1FZ785CdLatqJ\n",
+       "uZi5tGQfwNwyaWWrLdgr/c344DwZ61yjeEricE1jTmWs8wpRe3WdPxmnpTnamcSjPQ2Hw6skXbWz\n",
+       "7zzqUY8qTsjJ4sAFLUmSZCnT9gKXJIvBRG6kahgOh7NeG94X3glKCqC08MrzU+IQ7rvvPknN3Wuk\n",
+       "aERwl4vy5O9L4K2W8PN173muZ70z8F4c7uLxClCuUAQ81oSYM/dq7rzzTkmNN3HwwQeP/B/FAW8A\n",
+       "b4zPuaunf5j88BLuuusuSfO9VI4nUgX6uuHjuLEXtzfAO+U4+T7eFMcL7l2uXr1aUuOtc161Xue4\n",
+       "cJxd9+dqyN577y2piQmLQMkCtzvsK1IjopgywM4WUgtcDeuKKwze13DrrbeGxyI1yoHHfdEn3jau\n",
+       "nLgHvlThPJjDUNcjJQoiZzqasx71qEfp+9///ryYtQhvP+JC4aSTTlrwd1u2bJHU2DxzAWo0++Vp\n",
+       "Atck5jr2g/IEbI//M6dht6jjjA2uEcxRzC3YkSudtU9HJgVjH+WNccPczzWwBKo158V5+9Mr3x77\n",
+       "Lz3Vcbgmo2DSrrWKaS4RkyRJkiRJ0pGpKVIHHHDArMeM8sHd5/777y9JOuKIIyQ1Xg3KDd4KsShE\n",
+       "8HNXz108cPcfeXfc1aLokBVWgrvYSEnC2+CulvPjPdmJUTYfEAtGO0TB+dxNoxhwXigL7jW7UsFd\n",
+       "OO2It8jn4Hf70d0/mTAcL14b5+1EShscd9xxkqT7779fUqMQ4cXXgtfG7yMvif/jVdFetAf9i/eF\n",
+       "90w/0q+8bt++XVK98tgVV2XaKlJ4vx5nUKtE1cZkdVWPUAEWOq+uWVeOKyWR4lSKoUE5YAwzd2Fz\n",
+       "UUbipG0kom3iD0oNNo8qiw0xl0ftFClSjDlvH5Q6FC9sMsqo9rnJ7YOxybWBpxtsj+PgGsWY57jY\n",
+       "P+9RMvmdzy3M4Ywtrjke08c1DGUGJQsFlN8vlrpdy5FHHimpmau5Rh9++OGS5qvVEVw7GOv0O9c4\n",
+       "nhbRf8QB83SIax5zOP3O3M01gGsO/YXixffp9xKpSCVJkiRJknRkaorUd7/73VlvgefN3M1zl42X\n",
+       "wP/xevACPv3pT0tqvDfuYj3mpfS8FK+A11pK2WN4n9z94qm7IlKqF+R30X6X7HfrZELg5UQ1V2hf\n",
+       "7uLxHvG6uDvvmvXl+62NOSuBlz8307ANeDkQZXhx/Hh/tAeKIl4o3o/3I7+jfyLlbNIZOO614u3i\n",
+       "zfGe8+JzvGfshM+xQz8fvG9XMB1+z/fb1svyuJi5cS/YatsYiUmBp0uboKZGSgxzF7btCkpt5mQJ\n",
+       "+pq+oA2jWLAIbANVllcyoFG4yLhF8YEouyqas2hHFCnmrkiRio4XUInJdHXbYm7YunWrpPnxkkA/\n",
+       "8X2UDlf4+H0ptstr0JXiBmHaChXty1MdFCjaAfuohXhalC0/P66FjHvec+1l/8z52Df277UbufZh\n",
+       "V7UxXalIJUmSJEmSdGRqitS3v/3tWW8ELwvPlLtaPP7oLvvmm2+W1DxH9ro+0NZ743ioW8VdqysZ\n",
+       "tdvBc+eumrtmjx+IYkbYDjFGrrjhBXkNm1IMCgoWx0PsEnfnXRWfSeHZnSg5HLd7t6VK616J3KGd\n",
+       "sUvahfYu1Soiw6fk5U+6nf38OS/aDW+O8+F4UCmo24T9eQV5348rUq500R7YZ1tFCjWA7c7NrPGq\n",
+       "8pMGDzbyXJmLmENq56JI1aNStNdRou1Ldac8E5XvM1baZjx7Vh71tFACSu3TFtR2j1Ns+3vAdsis\n",
+       "5ThRgLBZbDV6CoHdMWZQNHxu8bhZ7KGv2L5oLoniHsfF25/z5VrENdNj3fhe6akO7e7KLvCeccC9\n",
+       "AnMadk1/ROUzXFH0bMkSqUglSZIkSZJ0ZGqK1J577jl79z93zT2pUWhqn/dyt4k31darAleKeA5P\n",
+       "Jkpb8DY4P/fOuGsvKUe+5p17L9yNo3yV6iJxfngFxG34eeJFo/iVjq9vbwdoN481w3uI9ht5wZ5B\n",
+       "FWWSuFLStlo2MXd4u+BeKt5WKWuxL/w8eI+9Yvcoliip2F1UG4j/u/3hJfM5rx4rVVpTD1zZmuul\n",
+       "1nqQ0HZ9SIdziI4dW3U1m8+9Jhu2HNk0MSbMcbQdtoMt83viEj0OkjhT+pw+JTusLbQfcwXnx3bb\n",
+       "qvkR2NKyZcsktY+79HbF9hnrrly44hfB+Xq2JXOMw3mgTPWlSEWZ6fSLX5O8nlntqhHgc6yvckG7\n",
+       "esZ97bXC63L577iG8jlzq8dKet2vaC73dqlVPFORSpIkSZIk6chUFSlfE467P+5a8bJKz9e5e+R3\n",
+       "kRews2ORmhgQ3pM50nXdLq/H5AoK/y/VuSIDAsUsUtzYjnvHeD8oMcQWse6RP1fm7t3XmovoW4ny\n",
+       "6sO+bhnHVcqyjBRNvEavV1b7+1pob69m7PaEF4yygv0tVpyPx+1w3rySaYV6E8Xv8LlneeJtY0du\n",
+       "V22VOLxUjm+uctg23mxcJaBUd4lz9DmMMeYZmxw/bR2pl2yXV+ZQn8OwIa+ng01625eyySLYDrbC\n",
+       "eqicV18V2tkOY5bzob5SCR/rtC/tHq2N6DUAGTO8xwY5Hq5lPrZ8TNN/tZXa2+LZn658YjeMqbaK\n",
+       "lMP5oYz6OqTE0DH3lrItfa7wuRN74Dw9+472xz6juQZl2ZXC2mtAKlJJkiRJkiQdmZoitddee816\n",
+       "A3hr3E16pkTkzXA37TVQvLJ5Ce5yiRvgOWrXuj7czRJrwvbdA+c9d+fsD6UIuCtGSSFmBWgHz3YE\n",
+       "7spRPmgfvAVfT4z2ZD+1XkrtCvcOz+89awzca8J7w35qM0DAszvZr3v/eO38P6rIHkH/Y0+RHbu3\n",
+       "OGklypUv7ILzw874HseHXdAufj70j6s8nBf7YXx4bFYt7Bcvcu76Yn3FmtRCG+KBe0Yntos66XGM\n",
+       "tCk2jc248gS0JSo1cwa/pw+wIeYC2oWYokj56BqnRx+S7eZjsy98jHh2Vqn/PQbMFUOOl7mPdmZu\n",
+       "4fc+d3I8PA1BGazNVuy6jihzJcpmlJVWWvOwLxiLzB3MKeyf+mLEjZYUKcYJ12bvP86bdqcdvfYi\n",
+       "cwbjxeccnvZ4jGLt06hUpJIkSZIkSToyNUXqyU9+8ry7QJQYvAuvcO6QhebeYNe6PHiHnlVXAk8b\n",
+       "orgI7oK9HhJeVZT54zFUvqI42yk9X3dvmLt4vFRfL8prcwDrGlGDhvP3DA28AjKNeO8VraMqxuDr\n",
+       "SnG+XtW2lMHD9/Bi8JL43dq1a0e+7+st1SpGtCvtVFqjblIVzSM4Pvrba+D48aCS0B6RasHnbi94\n",
+       "5/we757+QK3xNSIjOB7sgN8vtO+uuMqJJ+zZZ+vWrZPUjC0qZDMG+D1zGmuOAf9nbPA7bMz3x1jx\n",
+       "CuGMabYTVUantllEyVYjPDOZ9mqr4pbwONC2c/5nPvMZSdIZZ5whab6ixRztcZ+MZWwZuGah4nJc\n",
+       "vHoNOWzXswG7qtAeA+T2P24F/LZgf9g114g777xT0vx46BJ33HHHyOsJJ5wgSTrrrLMkzV+dwWvj\n",
+       "YS9ca7w9uHbR/h7HWlvjLhWpJEmSJEmSjkxNkfrmN785e7fO3St31Xgz0fNe7j75Ht4Aio17DQ7b\n",
+       "9btNvNBSFprXTXIPGk8ZL9WVIhQBvkf9pqgqsStymzdvltTcnXMepbvn22+/feS4UGaimKpICeSu\n",
+       "3zMyPPaG7MKTTz5ZUtNOeNMcTy20D+2F91Va241+xRvkd3iTUY0bvFCO22vERBBfgVeD1xmtk1Zb\n",
+       "P6kv3M5471WXUaZ83alIQfMMJ/BaLoCijPfIdiM75rgOPvhgSfNrIc09h3FhbLAP5hS3FfqONds8\n",
+       "kxQ1kznDlQ+273GREVF22qRquNWC6uxZg1u2bBn5HjEr2MTc+LYauEa8733v636wc8D2iIWiX9yG\n",
+       "Tz31VEnScccdJ6kZ0zfddJOkuNagx9jwu7ZKXW38ad8KYFs2bNggaX4F+Y997GOSxq/bdsstt0hq\n",
+       "FCmuUcxh2J33o7cb49ufivn7WqUwFakkSZIkSZKOTE2R+t73vjf73BNP1JWpUhbWF77whQW/R+zL\n",
+       "2WefLanxqD37C+8VDxivom3dJPe0vbaL4woEd8HEevndNXfPKEnRXTLZb7Qn54nXy/bYPvulSjDt\n",
+       "TnvwvNjjD9zr8Wq5vPoK7V1ryriCyPF7xgprI+J90P6eleaxYB4XgtdEe6FClLwTYnU4v8997nOS\n",
+       "5mf4RL9D8aIfPTaO2jyRV0rs3KGHHiqpqVjvWaye2eL9i33TTq5YlWKYfDyi0HF+tIOfX0lR9X7k\n",
+       "+3PX2jvyyCMlNWOZtuW7nBNzRKSSLV++XFKjKHhmLZCl5jaEzaLQeMYiqrQrNF4Hp6816mrxOQtV\n",
+       "meNhjqKPaVcfo/SpZy/yvSiGa7EhZqrEVVddNfLalbbqM0ofNl/KIus7S9JhPGGnPqeV4j377m/m\n",
+       "Kq+TxpyHPfpTC+zas/rIIvTtlkhFKkmSJEmSpCODxY7ql6TBYDCcmZlZ9P0mSZIkSZK0ZWZmRsPh\n",
+       "cMH00FSkkiRJkiRJOjK1GKnFUKTYx2KpX9Pa37XXXitpfuwR8RjEqPB8nlo0PE8nXoPn38SPkIHE\n",
+       "ds877zxJ0jve8Y4F9we+JiDP99mer1AfPVd/uPYfcQ+vetWrRvbn2aAloixAh3547WtfK0n65Cc/\n",
+       "KanJsOH/xK/Qr/vss8/IdoiL8HpdxP8Qo8V6ZtTlatueXdcdm5mZmY0jpI2JVyR2gpgjxsRdd901\n",
+       "si/iz7BhYpxokze/+c2SpHe+852z+1wM2M973/teSU2WH8fHWPa+OfbYYyU1WWm0D9BO/B5bOPfc\n",
+       "cyVJl156qaSmXtaBBx4oqcnKoj4Qx4HtElPFnELMCu1PzAr9Q1bcYrfnjTfeKKmJPWKOJH6PMcmc\n",
+       "ScYocZnE+WIfxNL5ahO/9Eu/JKlpl7e97W2SmrnXV/Wg3bZu3Spp/ligPZlLiavEzl/4whdKki65\n",
+       "5BJJTcwf/ev1yrADrhmeaeurG3j23fr16yVJf/AHfyCpGUe0E7GHGzduHDkPjpt2efDBB7UzGJfn\n",
+       "n3++pMW3l4hUpJIkSZIkSToyNUVqKUG1Ybwprxo8qZW5+wKvzusycfeO94A3SqYRXsDTnvY0SY0X\n",
+       "RTZklEVFNhveFttnO7QjXjBKBd4xypSvYwXjrkDeF5PKnJqbZTYXvFC8RbxH+suJlCjUBezCa/VQ\n",
+       "iwhvMqphFNXXcvBe8Z6pqeSV4msZZ5y95z3v6fzbGmhzX/NrsXCPvbQqQFR3CrzvGZMoUmTyeq0v\n",
+       "lCj2z6tniDLWPWuPDNtp1z0ia682m4ysrkg9Lo0ZMqR9TUagfclwjeZCjpc5nVevo+YZ21S253PG\n",
+       "Luo2ilPULyhgfJ85C0XKM4o5n02bNi24PV83s8S066VFpCKVJEmSJEnSkVSk1Nw1RzU4qIeEt7bU\n",
+       "IEYFBcq9DLwaFCHqAuH5E6+A94nXiHLitU/ce+M9x4GSwvb4HC/LvWQqiOPte3XoaTGpGj5RFWRX\n",
+       "G6JK9yVQomhP6k/BpBW/xa59tJgQNzatc0RZqF2VflxQOJgriIVCZWZsY7vYHMfp63B6DBtzFrDd\n",
+       "xZpro5psxPYwVnzMRMpIqXJ3SUGEtmM0Wq0D5YxrAUqT18Sj30oKIefHHFOrKEbjhePzNQkjuipS\n",
+       "K1askNSMG2LdePpUWoOyRCpSSZIkSZIkHdmlFKlJxSrhJURrttWuAD0tUHq4q/cqzniBKD9eAZ1M\n",
+       "H89wQllypY67ehQrfofX4zFZJe+S/eCdlCrLL1Vq15EiU6dE2xpvtRkwk475qz0/p23W4kLUZjJ2\n",
+       "hWPzavBtQfEoVYJ2FkuJApQLYomoms9xoB67LdH+ZGuhSPA9fu9zCyr2YhEpJcyhbW2xNPY9hqwv\n",
+       "ovMgOxUlsWtMmmdd0r/jVirn2lW7jmlXNZ3YNcYdCt64ShSkIpUkSZIkSdKRXUqRmnTWHM/Lufvm\n",
+       "7nXSMTu+Jlxbrwzlh/pPrqyhCPnacsQnROs/8bl7jb6itntZKB4oE1FWGNvluFDU2q4IP21QgHju\n",
+       "fs899+z0+5HySdwI3pnHp5TAiyamD/w98QLUwOkb7Lgt9Dvtg9fYJl5m0is1eEZkV3aVODJUTmKX\n",
+       "UCBQBiIlwZULVyn5v8fqtFXoJsWkssMmtbYg/cHcC7wfd21DfudPIxijJVDExr2WekxdLZ7NyLWr\n",
+       "r5i8VKSSJEmSJEk6MjVFarfddive9eOhc9eI8oHy4lV8xwVvkxXkuYudtCJFO3SND3AvI8pEca+i\n",
+       "FG+B1+39hJeDgkTtEbxXauxQBTlSpPCiUCDo76VarysC7zqq9O64PbkCirJE7JhnREVgP1QW5/so\n",
+       "nhApYn1R2w6OqxtLJXtzLq7mtoUxw1zDGPQxy5iqbUuPVyyBzXEc0X74HrblthlBbBQxKK6y8uoK\n",
+       "ysOdSSuR0TWV9kYtpl/bjjGuIcwptTFf2DNzuyuRUFKuus5djFt+z37OOeccSePXn0tFKkmSJEmS\n",
+       "pCNTU6RqnkHzXH3VqlUjv+laX6cECglKFwoJr5GyMm1q79KjGial8/MMJbxxvBu8WrxLnpvXxjqh\n",
+       "xLDdXc1LbeudeXuSWXPQQQeNfI4Xh2pRW2mc/nQvENpmodKfkULpLHbm1WIyrqKAjdNHzHGsXQeR\n",
+       "QhRlXLb11FEWSr+jr13F5rhdOWOVCI6PpwdAJWyUua6xSNTz4lrA+XBcUdzntJnUtQu8PV3pZK7m\n",
+       "tavq6zFHJagIH8E1oJQNW5vdF8H5Yi9Umh83hisVqSRJkiRJko4s6aw9vC8UIlaOnnRlZryZ6Lny\n",
+       "pDIvusLdfFvFjPMqeYXuheOF4tXQTr6ie/Qc3Ln//vslNcpHrfKyVGhbW4j1qgAvGgUPhQuvvm3t\n",
+       "FxQn2t9jpGq9YvoVFYHjiSqzPxIYd+wTy8QrY6mWKH6w63GV5lLi7IiJwibIruP32BgxUZGixvew\n",
+       "+a4ZnihjKAnsz219qbHYNfJoJ+Zm2qlrrBHH75nb44L9do2vLIHdYi/sz9cc7EoqUkmSJEmSJB1Z\n",
+       "0rfvKCw8F8Uzx4vzFcnHBS+Ru1XuUvGeiGvoqxpqX3T1RrlLb/vcHi8ERcqVKLaLUlaC9qS+0VJd\n",
+       "4bsv3BvkfIkbwd5QA4gH8TXzIugflFWv4VPrRXo2Yl/e567MuBmlzF1eM2vc2B76ijGJTfH5mjVr\n",
+       "JM1fzaAEipPbAMfpc0+kKKA2M6dj06y20BbOg1fU4EkpGn2x2BnJroB5pnTXpyzY1bgV/mHSFfs5\n",
+       "Xu4ZeOrBOBm3X1KRSpIkSZIk6ciSVqTI+Nh///1HPuduuu8quH73zt0q3tdSrGsjdb+bL/2OuAiP\n",
+       "6SEGB8XJFRa+f8ghh1QdBzVn1q5dK6l9RshSoTaWyWPZsGPsjFfiPWqrB9NfePtkz3n9Kep91SqR\n",
+       "9OdStf/FxKvEt4W+JHsPhaA2k5L9E9vhoHTheXtGKWOV15LKiErK9hibePgcdxT7w/45XzKHWY0B\n",
+       "lX9cJl3Rvi8mXcON/gL6ibneFST6p1aR4vhd8dxVIL4Te8m19pIkSZIkSabMklakwFexHzfCvhbu\n",
+       "upe6J46XynNfvN5SvEDJCyE2zZWrY489VlKjJKGgoIR4JXrqI3ktEeJFqC573HHHjWxvV6OrndBf\n",
+       "XquHDBMym0oqAvExePt4W+6lto1rQG2oVU2wA6hVP3YFiOnpaqP0DX3LXFabzYUtRIoUNojHzdzA\n",
+       "+1I9H4e5lxgSr+jOeUQ16lCZvb1QYXe1VQzGpVZd7orP1ajOPvZcWayF7bAf7HmxQE3viq8V2de9\n",
+       "RCpSSZIkSZIkHdklFCnn4IMPltTUH+Kumrv9vp677yrgNeIt9J254l7junXrJDUxUl5Hyb1rFCr3\n",
+       "hqkqy+fE9Fx//fWSpPPOO6+Pw180UH7axqzde++9kpp+9AwpvMdaRYf2xFtEqYK2leO96nYJlEpY\n",
+       "TCWKmI9JZX6OG+OCcsM6lF75u0Qp285jhSLlqi3YJDZOPGCpDhUquVekZk5xFZe5gv9z/MR+oYrW\n",
+       "KikoYuwHBW1aLPb+o7HXV2zTpLPtnHHbj7kPO+prnkhFKkmSJEmSpCNTU6SWLVs2mw1EFhPPK32F\n",
+       "Zu4eUZr4nXs1eKPRWl9RjRVgu57NRDbUuOv8TAqPXSH2iPbAK+F8S8+F+R3n7WvIbdmyRZL0xS9+\n",
+       "UVLTf9Th2nvvvSU17Rh53XfeeefI62LB+aBg9hWnQbuV4g+8rhPthnfP8dFvkQrC58TNsF3UgqjK\n",
+       "82tf+1pJ0kc+8hFJ8+NfUHyXL18uqYl5YpwxDhgvmzdvHnn9uZ/7OUmNXaBURplV2Bn793XA8Hq9\n",
+       "qjZQf0yafA0y1EPPZK2F47vvvvsk7TrZZj53+Bp8qNPeN9g0tskrNvHAAw+MfJ8xSVwltnfkkUdK\n",
+       "amzv2muv3enxcq1gbGDjbSvJj4tnIBMPeumll0pqlDbsgPdHHHGEpKY9PKMXtdnPq7bWXF/4eKN/\n",
+       "uQb7qiBtr6E+p/p4QSFlznBF2mPzsEfaDbtlDsaeiaWqVe9TkUqSJEmSJOnIYBoe0WAwGM7MzCz6\n",
+       "fpMkSZIkSdoyMzOj4XC4YMpzKlJJkiRJkiQdmVqM1OWXXz6bocHq8jyn5Dk8sTg8xzz00EMlNc/h\n",
+       "yejg+SjPNffbbz9J0tOe9jRJD91JLgbs5xOf+ISk+c9fyaaiUvvhhx8uaX4mCs+TqcJKrAjtwvP/\n",
+       "U089VZL0p3/6p5LiDB1qb/jK176WYe358Uq7lzJ3usJ+3vKWt0hq4iSIpSG+wuMCyBrjeTmxWsQO\n",
+       "8RycfkCVffnLXz6y3wiey9OvxHNs27ZNUn17sJ+3ve1tkuZnOzIuOH7+z/kRi8bxE3fD+PDtef8B\n",
+       "/UhsE9sljgPIgKJdb7311gXPa/369ZKk5zznOZKkN7/5zSPHSdwEx0e8ETWPPGaO/ua8XEWnH97w\n",
+       "hjfMxj4w1jy+iqw5xhrHSiwOY5GxxPcYMx57s9hzS9f9RdmMUabpuPtrC/u5/PLLJTU2CGRoExtE\n",
+       "xu8zn/lMSU1m6YYNGyQ1MUP0O3ZBXOFrXvOakf1OGvZz5ZVXSpJuv/12SY0dMbeRlcn5M2ffdddd\n",
+       "kubH/BDSw/x5AAAgAElEQVRTxdzGWDn//PMlSVdccYWkJuuSMcecQk1A2umzn/2spOYaxDWaduMa\n",
+       "xHggluuMM84YOc9JMXesS9Jf/dVfSWrOn2sdx8c1ljmO8+PaTBwo7XLbbbdJamKjmJvOPffcnR5X\n",
+       "KlJJkiRJkiQdmZoihdoiNRWYUZy4y+au2dd1wrvgrhEvC4Wl64rifbFp0yZJcQbR1q1bJZUzTyK2\n",
+       "b98uqVGkSrVi8MKj6sNtwXvBm4oUGPeWSuAF4S2Ar2xP5lS0VhzH43XGAO8crw/vsBavQH7HHXe0\n",
+       "+r0TVbWOsh05f8+ia8uaNWskNV7vWWedNbJdxhc1j17wghdIarLzNm7cKKnx/vEWf+EXfkFS4wW7\n",
+       "guRZo6gMeLcOmUu0k6snc9sPlfKGG26Q1HimZBPRdoCtsI/Pf/7zI+fC7xjLp59+uiRp1apVkrpX\n",
+       "iC6B8lVb8bxENBctdh0gx7PaGPsoC8xtrgjy1AHb4XrCeTL38D3mIObOaeFzNWPB1V3qb5XwjGPP\n",
+       "WOdaE0HmNZm6QHtjfxyP76+2Dhr9wSsZyRwvcz/j0a8pjE9XKr2umtclwz7YHvaOvdA+KLOcH0pW\n",
+       "7fmlIpUkSZIkSdKRqSlSg8Fg1hvBo0Zx4K519erVkpq7SbwJvA4ULO6aURq6rp9D7AleEUpZ2/WE\n",
+       "eE7NcbqXQDxG13WKvA7R0UcfLanxLiadiYn3HSlCUKtEAV5QtGYdz69LeG0ah365++67F/z/pKtj\n",
+       "98W4dc1QeFFwGTf0G14Zdvqxj31MUhPDhFeJl0iMU+1ajw7j7YQTThj5nO0RB7Oz9eLoO44FUCY8\n",
+       "nixSI/FQiRHxdSxhUmONvhm3kjpzGdsZdx3LvscGcz0wdukfFARX5rgm0E6+bihzLjEuKBBt5yQH\n",
+       "9ZU6WG23R9wuYxf74Tw5XmKmUNVRhek/5khqsN1zzz2S5q9zWYJ29TmXfuZ4OE9XpGpXLeD3UXtx\n",
+       "/lHtO/q5dj1T5iS2G43TaDzzdMuVuohUpJIkSZIkSToyNUXq0Y9+9LxYJ3+OiheJN0hshnsfviJ1\n",
+       "V08dL4u7epSXtsoR3gTPX7mr5+4fLwLvhrttYpjwhv25Ld/jbhnwtkreMc+h+R5eal9xGH3hK8w7\n",
+       "tNu4awpGsV2LrUT1dT4R7vUD54n3hr2hYuB1EycQxQtgV9gpqoKv8VeC+BG8Urxw2mVuXOVcFjq/\n",
+       "SNGJ2sJh7qGNOCdeDznkEElNWzGGaINx1xckbs1tsa2tMFcyp6EIdI3p6nts+JzlsTMR/n+UBWyW\n",
+       "TFpss6Sel2BsrFy5UlIzB7dVpFDafDUK8Ax2rh1cA70iviuMfq0qPf3g6Qb7Yb/Ev0ZPVQDFdlwY\n",
+       "8+w32p8rVp45zvn40ym/pnKvEPUD9lIbb52KVJIkSZIkSUempkjttddes94DXgP1gLjLxpt68MEH\n",
+       "JcUriHP3yd1017gC7vo5jq5Zbni9HK97f6VMCs9eo52iu+fazAJX8saF45zUdoFYHbwJ+qmtgsN2\n",
+       "6N9pZyxBdB4otagdXVWOSKlknOB1elZsbTwN/Y/3hzdH5pWvtUf/sl8+x7tk/2Tk+Nqbbu+oD1Kj\n",
+       "FBBHxT6IrSjZKmPOY6MYw67eetv1Bcfrqh7KQa3tY+Oe3RQRxaiUvt9VqfJ4VupDoaQwB5eeMnj7\n",
+       "ez/Rr7XZcA5zB+3ftXYeipLDWEeJ8bGJGst58L2SPRNHjCLFfhhjKEDEFPI58YrR9rnGMbYhWleT\n",
+       "70eZvK58+bWF9nAFDPujP6I5i3GAkoyCWVrntVZxTEUqSZIkSZKkI1NTpHbffffZWBjPRPDMgLm/\n",
+       "kRpvw1cUxxvlLrsteEccD3fHbb2uceMQqODMq5+P11l6uOFeEN4MXmcpK5NsUJ5vE9dC1hfKDl6O\n",
+       "e1VLhWgctCVS3hg3eHMov3hpbVUMxp9768RgEQPlXj3jjd+jSjDO6acou5C4obmfeZ9iO6V4QGzH\n",
+       "V08A5iCYVFwb23WlZVz1l7aN+tbPLwJlg7mJ9uL4aEcUtUixQ2UG4kdRJPzpQm38KzZMzBE20lWF\n",
+       "RqHBvrz+VS3MZdg+9kg7+DUG5Wauje8Mt3ufQ/w9Yx7Fq20Wno8P1GG3L2IO6b9S3HEUB+3ZuJwv\n",
+       "/YFdRtcIrgF9k4pUkiRJkiRJR6amSH3rW9+afU7J3SZ343hj3D1zF+veJHfTHkdQe/fu+HNfvKO2\n",
+       "tVc8xsfh+T81KvB2ovpHeJF8z+tI1UJ79x3PUYu3a5RJ414T60y5N+kKJRDP4N5V37FcuwqRMopd\n",
+       "+1p2beuw4Z0ybt2rxu4YF4zXqLaRe7N8n+1gP3j1c+3F4ybxUFEmXOXi/+wTBYBzGTf7zkHBqW1j\n",
+       "339thi0VwHnF9plzUP98LijNXUA7R6opNudKgX/fFT2Ok1gmfu8KUCk+k3bjd8yZXTO6UV5Kq0iU\n",
+       "iGJuvJYisUC0j2dqR3jskSt+ESU7Z7yUvhc9DfJ4yK64HbBdtzfqdXHtqIV5AruqHQ+pSCVJkiRJ\n",
+       "knRkaorUt7/97dm7bjxN7i65++VuMIpD8GrDXiW2Lb7eT5R1V4vXuAAUM2JHSpkk7lWTvdQWtoNX\n",
+       "MW4MTtt28fiHCPe6o/50bxS7ocpvFNOGAlOqVfNwBztAvfEV32vB691///0lNf0MHjeDGlOrrvj4\n",
+       "IU4CRXOuyoCqzBjjPXME50o2GKomygyvxHq4BzxuZexxVdFaj57jJmsLFdzXFHT6HhMcbxRT5Ofj\n",
+       "FcvBx3JJGcEGUaeZq7Zt21Zz2FOD8ye7b9zVCxiTjBnPiEepKz11oT05Hto/UqA8e682szyCcep2\n",
+       "4Iob3+uqfHFt4PjJPC6RilSSJEmSJElHpqZISY136JW7uUvGmyR2g7thVyi4+yQbqG1FZSBrkLvs\n",
+       "rkoUyg93x3j6eODc7XKcUSVvsvN4Ps5r1xgpPPiuMWRObfwI549XWqrBUutNRFWRo+PyjCWPn3ik\n",
+       "QSYN3qZnCNVW0cbbRa3xTCu8Xa/63baiPt6or8M2VwHjmBm7PlfwXcYc58aY5JXfe12lcSt7j7uK\n",
+       "QG3MC8fJ+RIbxRzbdW6DqF6QwxznNf4iZY/j5pX28v2Ujp/9oLzw6vbAHMs1ZqnA8dLftB/nXZst\n",
+       "yvc8hoj2rK0bxrXXsx/96Qj7of15j93VXjP8aQ52QNbdaaedJmm+AoVdcq3knqBU2d5jJZkbWX+3\n",
+       "RCpSSZIkSZIkHZmaIjUYDObFUnAXy10k3gKxQXgp7lWgIPE8mEyVrqBQcBfd9XmrxyVwV++V2Dnu\n",
+       "TZs2SWrOk/Pibpq7c7L3ukJsVlsvoSv0K+04buZGiSg70bMi8ZbGjXvZVcEOUWKJY0AJLq0QQDvj\n",
+       "5fIe+6J9vT/aqjIeH+Hjf6794oHj0Xu8I0oUc4yvDu8xVahtHPPq1atbHXvf1MZ/ct5eef3ee++V\n",
+       "VK9oRIpN7RjGtmhv2pcx508P+D/tTj/WrsEHKDm+Bhv9CktldQOHMdX2vB2vuRbNxSWFkePxNfE8\n",
+       "Rorfu2LY9hrj8crgWZPYMbGAKGSsXlE7t6NIoZ6jfHFtRqGKSEUqSZIkSZKkI1NTpIbD4ezdMl4C\n",
+       "z1/xRnwF7KiWBneP3FWyvdJdZIQrZW0VINb58ro6KCLE6nD37lmKfD9a6y+qN1VLbcXqcfHn5LXr\n",
+       "Q7Xdvns7rkR53At4PaOHC5wXdrR27doFv4d3SX8wvvgdSjBeIeOQWCcy29gfr6gB2L9704xPVJ5S\n",
+       "3BHb837i+FeuXDn7GcfMZxwrHiq2wVzj2+SYUJNdXavN4okYd33KWgUFBemOO+6Q1LTx9ddf32p/\n",
+       "eORbtmxp9TuH46b9OH+PD2WuxRYZu9gAykgpRoq5FSVyUpXiJwVjpmu8L3AtO/LIIyU110jal3Zi\n",
+       "TDLWuQb7cVCRnPb3rMJSv9T2X2TnnvHNPQR24+fB+UYwD0RzUG1m+8PrCpIkSZIkSbKITDVrj6qj\n",
+       "eCXcTXIXyV0xz83xiKloDniRbA8vk+qmUJtpgsJVG8vhNVJK3hveAPshBmqpPq/vSimLrq/tlzjh\n",
+       "hBMkSTfffPPI511XcO8bnut7fSSorWoMxxxzjKQmfuH4449f8Hsovnh1vHpVb9QAXw8MZfP2228f\n",
+       "2S7j9eijj5Y03+t3hau0hiLKF+OX+YL2OvbYY2e/yxziMUCMUWIsfO00wAP91Kc+JalRvfDMOdZz\n",
+       "zjlnwWMt4WuOlWJfXDEjZql2rkDVxpP3Nc6i1QEgakfOA6WvBDEsJ554oqRmDvcxTEyV1zVqW0+J\n",
+       "48RmplUzbq5tdmHcOcprBqLA0D6MZRQnsjv9//weu2NuqW1X7I85pJQlGSlWrmDSv9gNr77fqF4W\n",
+       "dsnajM6GDRskSWefffZOjzcVqSRJkiRJko5MVZFyTxjILDnwwAMlNV4hK1RH4PnefffdkqTnPe95\n",
+       "kprqts961rMkNV4ad8XcdXP3z92wrw3G3TxeaZR5UoLfo1z5c1juknm+vXXrVkmNd1hb+6MrZD1y\n",
+       "vn6Xv9jUKokO8R3j1v7pCt4TXiH24s/t+T9qg9ch49WVWAclyGO/8BqJ92F7ZImWxpXXisEOI1Xk\n",
+       "4x//uKRGkeL3fJ/+qF23jPPgODkfXqnSfdppp816lowtfuvxW3jc1ImhD4g/ZC7wmm9eoZlz41g4\n",
+       "N89kRH2jbxlb7tEztjkuzzLj+Olr3nP8vkpCaY04YsmijEjmwBUrVkiaXymdufr++++X1LQbts75\n",
+       "cHzMzdgy54dShcqIrXLcXmuwBP3OeXEeHmOzfv16SY0SQ/8w1zD3oWCi3HFcbJ9rDFmdHgcIXlOv\n",
+       "6+oStbF2KHlc6+hfrrmc95o1ayQ1qxps37595Pecp69egB3CIYccImn+mKdd2c7mzZsXPH76DTvg\n",
+       "d7Sjj4fSNYHtR5XbmQPHJRWpJEmSJEmSjgwmXdNnwZ0OBsOZmZlF32+SJEmSJElbZmZmNBwOBwv9\n",
+       "LxWpJEmSJEmSjkwtRmoxFCn2sVjq11LdH5kjxH945g7xG6iTvh4Xn//+7/++JOltb3ubpPqsRl85\n",
+       "3PF1lYhLufDCCyVJl1xyiaQ4e43aQb5eEpQyk8Dbk+Pg+Dm+KLssgvbleT3xMb/2a782sr9Jw37e\n",
+       "8pa3SJqfGUO8A/Ee2AtxDvyf99H6VcSGnX/++ZKkP/mTP5HUxEd4JhjtQfwN2/V6aR6r6LF7MzMz\n",
+       "s+cYZen46u7EqBBrROwPteCiODD28973vldSOX5tXNjf29/+dklN35WyuqIq/w5jhDZ+3eteJ0m6\n",
+       "/PLLJTWxUJOC82NuIUaMWCBswW3O41M9I5QYMI/1edGLXjSy30kzrWsD7Un9LmLWWK8SzjzzTEnS\n",
+       "8uXLJUk33nijpMauiY1i7DGu6I9XvepVI/udNLXt+bM/+7OSmtivT3/605KauZiMYLJPiX3zelel\n",
+       "/aQilSRJkiRJ0pGpZu0tNtQ+ISMED3vVqlWSGm/mr//6ryU1Cgd36e614d1eeeWVEz/2ceA8o7pL\n",
+       "ZFrceuutVdtru1ZaqQaMe9VtV3r3TCqH/mp73K7QoV7g1dVmA+K9kZHSdS3I008/XVL76tRO1J5k\n",
+       "Wt12222S5vdDqfYLeCZalDEDKFSR6gDYyc7qh/laYLQ1n6NwcIyeGYnHDai39LXXnUL5mrQiBbVZ\n",
+       "a1Bbh4ix4QrPpJWo6DhqK6lH54cNkU3IHORZXw93aE8yXqMsv2uuuUZSM5eiWKHIMo4Yq6Vai4wL\n",
+       "lF/G31FHHTXyPZQfFDPmCsYn/cdchFJcC7UDsQcy9n29W89qLV1znFSkkiRJkiRJOrJLKlIeU1ML\n",
+       "tUB4RVFCaeC5OnESxE143SgUnrYVp4G7cBQGaqvgNfSdSYlXgfKEd038AN5GrSLFdvBK+gYvBrxy\n",
+       "vIP3g5pALR2e69O/tSuB0+94W6gY1Hnad999JTXek8dkUXMHO8O7wRtm+7WgruDN8Tyfdnruc58r\n",
+       "SXrXu94lafxK8hxn21iwElHcEkRKFGAHO6udg23T9oxV2pC2wSP1PgK+h02xHbehUmXkcSnZfom2\n",
+       "nvWk1p7D5ksV2X39TJ4KtK23RP+yX/q5tPbawxXatRQvytqM2J1XBqc9Syp/VEGc7THH8HTI66Ax\n",
+       "7rBfj6ushe1Si5F2YA5vWzE/IhWpJEmSJEmSjuySilRfa6R5hXDiIfBauCsndoPIf7wjvN+2lcZ9\n",
+       "5WzPqGj7HLgE1Vt9ZexS1eOIWmWnK67IebVmB++H33lF/LZxHvQv2/MK9njt7u2jgOF1sX+PsUL5\n",
+       "rAVliNgo2uOUU04Z2W6tEoU644oT3ir/x/sstX8tXdc7w2tlvO0s5oo2x+NE5eI3KEusw8lYZ4x7\n",
+       "ZXP6mFeOBSYdG9VWUQLagawkz9JatmyZpPlqqis/tYoQ6jZjj7HC+qf0vStSvpag23C0X54mRDaF\n",
+       "rXhMWdtVKPqmq8I2Lm2vmdgdShSZy7R322sA7c41j/NHaXKlkLmXCunMTV0VJJ9DiPfs61qWilSS\n",
+       "JEmSJElHlrQihQKE1zKuUkNsD4oA6/lwt8tzdJ7PclfMXbOvm0UWoK9FVgsxRnjL7LfkbbUFL9C9\n",
+       "wXvuuafT9toqWONSq7TghdOOXWN8fM06Xomdo799nTW8HFQKjgevDu+461qJ2CFKEetV3XLLLa22\n",
+       "E7UL8Q54r9STOuKIIyQ15+UqRi0odW2zJ6kXhbpCOy9kF3icZB+5ooQnjELj58i+6CNUasaOx++h\n",
+       "uEyKrjE9tAOKnNfFYbulNdv8fCOI7zzmmGMkNe2JGh7NGaWx7ccN1I5jbKEk+vY8tmbaitRiK1EQ\n",
+       "tWMJroG0NxAvWsrk9bptfJ/jQRFCBUdx4prYV8weCjSKKXPxpz71qZH9diUVqSRJkiRJko4saUWK\n",
+       "u1LuIlGAuHusralCVpqvVM3dLnfFeJ98jvfiFb69npQfR+3K3Hi5bBclhXiD6667rub0qonqH+Hp\n",
+       "9x2btdjg/QDt2bbuE/EA9KN773hjqBkoRMRi4XWxP7xQttO1lg3e9/HHHy+p6S9UB167xtUAChte\n",
+       "Pt5iKauuhGdBRuPD/8/4oP1qFMqS6oUazCvxbV59njmINnUbmrTC4NvHs6/N7GWuZM7ysYBKv23b\n",
+       "tgV/X8qyA9rpgx/8YNX3a4ls2ZW1yCZcMelL5XewWdp1GmvY7ozoWlWCMc/TGp7G0O6RIoXixJyF\n",
+       "MogyibrN3MLcvX37dkmNnXPtHjfbkkrtHFdptY22pCKVJEmSJEnSkSWtSKEUkTVEdhuxKhs3bqza\n",
+       "DoqBV1n1uAn2xysxMB4f4d6QZxHVeiPubXJXTkZN33fNHu+AMoJXsKuDV4r3Q7/VKlGAd0k/unKJ\n",
+       "V4vCSQwf9oDXhjdHP+MNda3zRLVnvDns+sgjj5TUeH9U8x0XqgD3TUmp5XyIu0EdQglsg681Rx/R\n",
+       "dvQp+6Jv8LT7yhDuC+Yub0PaKKq3g0LncyAqP+rmtGJ42oIt1MZwQdd6RJ49xhhnzH3xi1+UVB/T\n",
+       "45X2a5W/ttBOHCc16Npy++23S5LOOOMMSU1WbFRLkfhj5lBiD10RY5xFcw1269mdXeHa3VcmMox1\n",
+       "IzUYDL4s6T8l/VjSD4fD4fGDweBJkj4k6SBJX5b0i8PhcOdrRCRJkiRJkuyCjKtIDSWdNhwO5wZP\n",
+       "vF7SJ4fD4dsHg8H/3fH+9WPtZMddLcoPd/O1K5v7c3avB8R28TZQgvBa3SvleygT/ty9pIB4BgXb\n",
+       "I7MA+lKiorpBtFvXLKylBv2EvRDj0xZUCV7xeulX+hs7wssjxs6rAXM82G3XrD3UFLxqFLFxY6KW\n",
+       "GqgjtDvtyPm2yWp1xQLFhbHvFZyjDNelAsqIry/ZNruJ88OWfM3BaRFVkHdqn0YQY4Ny0lV9J46U\n",
+       "scZTA5SStvbCXIJ9Yn+0v8/VfI/XtpnMZN2hAJXWv3RQcOgXxqTHe/rcx3hrqxwC/dX197XwlAs7\n",
+       "aRvj1ode5mf4PEnv3/H3+yX9fA/7SJIkSZIkWXL0oUhdOxgMfizpz4bD4V9I2mc4HH5jx/+/IWmf\n",
+       "Mfcxe1fLK3e9teB1EUeAZ8/dOl4F79mPP7elOirKB7/rmqHB82ueJ+M99V0tOaobhPdHu7TN6Fhq\n",
+       "eHbXuODl0994gVHldV7pTxRNvED6oWscCkqp18tiPIy7xt5SAaXJY6JQ/NpkXtHWvsaY15cqUVon\n",
+       "cLGgz33tulIsl8ft8YqqijI3bUWq7dxeYtxMU2Ds0u68bxtzxdzkawrSP/Snz9mM/bbnw/GxH9Rc\n",
+       "5rC2WYxkd6Koud1Fc28phs/x2n20lyuxfcHc0pVxb6ROGg6HXx8MBj8j6ZODwWDr3H8Oh8PhYDBY\n",
+       "WnmgSZIkSZIklbA8V8RYN1LD4fDrO17/dTAYfFTS8ZK+MRgMnjIcDv95MBg8VdLCYf0twKtsWxEZ\n",
+       "uGvm+SexR14Bm8wCvFT38LlrRTFiu229VPeCOR5ibajZMWl47jyp7KzFBm/Fn8u3VQw9JsrjElCq\n",
+       "eI8C5XEE/M7jJ7oqUihbeGeeLdg1JmypgTdKXAfeeJdxgXqH2outoy7zytimT5kLYKnUBXIbrIVY\n",
+       "FuYeVo3wytPTzlLsex1Pj1ftqijSbow56h0xd3fN1AZ+jzLoREqUx1369rlmuhLUVvlj+yiWXifK\n",
+       "98ecxDUWu6W9SnbG8dHeKIHTigc9/fTTZ2tRLUTnGKnBYPC4wWDw+B1//4SkMyVtlvRxSb+y42u/\n",
+       "IunKrvtIkiRJkiRZyoyjSO0j6aM7PJndJP2/4XB4zWAwuFXS3w4Gg5dqR/mDcQ+yqxIFUSYOVVZr\n",
+       "Y0v8rn5cJYeYJOpjcZe+WLVcSusk7arQrmQ4ta0ZgtfKdvCG8G7JJOF7HjtV6r+uNVFQScju9MyZ\n",
+       "h4uy6DGIKFRd5gH6zF/xeLER+ppK586kKmI7rjaixsGhhx4qqfH0PaaJOc7XAKTt2B5KFAoBv5t2\n",
+       "jFRUJ6srqJqo1ShxbSGz2WsJto1LZMxiT8wZPtfU4pm8Eb5aB9TOjVyjeEXB9Ww6jyelPzm/WkWJ\n",
+       "82FNTOy1L7vom843UsPh8H5Jaxf4/JuSzhjnoJIkSZIkSXYFlnRl864QD1HCK1iXPF7uxomJQQno\n",
+       "mkmAd4yXiHewdevW8DdJzGGHHSapUW7w7qjCS/XqEsQjYBduT/Q7sXXEzNV6p10VKY4H74z99h1X\n",
+       "AniTZHf2VdesFmIGa73uhcAD9jYiniyquDwtvIYZqjlg09gybeOvxH6hQKBkeYVzVGn2E8XodKVt\n",
+       "nOLJJ58sqZkbsTkUkFrVFdV29erVkpr+7lIdf+7xRHCeRx11lCRp06ZNC34vitGKahKWqB0TKI4e\n",
+       "Z1kLcwDtwHiJYp04T9R53tfaAUox18Zxn0pNmlxrL0mSJEmSpCNTVaTwlrj79LXuqCbLXTTeJTU3\n",
+       "uHvnLhlP/cQTTxzZD94ZK7x7HAJKEHftXlNixYoVkpr4Ae6y8f78uXNb8C67KhWcl3vVtTVH8Da8\n",
+       "6jNxGIvtDXi13BK0P/1x8MEHS2r6q1aJgnPOOUdSY5eoAF7zhXa7//77JTXtRgwV78kWxV6J2+gK\n",
+       "3i7eaF/rUDmuCiwWvh4cqlEX5Y0+jFStcZUoFI7Fije89dZbJc2PbWEMeH0pbBaVG8XpwAMPlDQ/\n",
+       "m6+vrCjmDiqA33nnnVW/ox199Yrly5dLapQmjhPbQKniqQHfx4b5PirnCSecIKlRsblGMGaJZULJ\n",
+       "uueee3Z63CtXrpQ0/xrjClOk0DEn0F/RGnZArBdzXCnel/bsutYg10T240qpw5yLvbXNBqVfPXvW\n",
+       "4emQK6rMzYwL2onxznZpd/of++N97XhIRSpJkiRJkqQjg2nURxkMBsOZmZlF32+SJEmSJElbZmZm\n",
+       "NBwOF1z0LxWpJEmSJEmSjkwtRmoxFCn28dGPflRSE1tCtt5JJ50kSdq8ebOk5rn1M57xDEnNc31q\n",
+       "iBAPQQYCsUPEZp1//vkj+yU+g+e1PJ+OanewrlBp3R+ex//iLz5UoouV0DkO4gB4DsxzfuInjj/+\n",
+       "+JH98EpcAPEkfJ9Yqxe84AWSpGuvvVZS0z4e00I7Et9A3ADP73kO7Stt88p+X/7yl0uavK1w3L/3\n",
+       "e7+3KPsD9uP7o914nr9q1SpJ0n777SdJuvrqq0e+T9wI/eDxKMQJXHDBBZKkyy67TFJjn8TG0R9k\n",
+       "I0ZxDR4Dhr3Qz8QyvfSlL13w/CbFzMzM2Psi5gJbjMZq1HeTgv1cddVVkpq+JlaGmJy/+7u/k9TM\n",
+       "cS972cskNbFC1113naQmZoTfMfaZ6/hddH7YlNsIY6mU5QYc5xvf+EZJ0hVXXCGpiXliP/QLtsUc\n",
+       "wdxK3KLbLjbK75iDzjvvPEnSpZdeKinOvGY/vkZeKXYH6KcLL7xQUnt7Yf/EnJVik6DWPrED2rF0\n",
+       "Xm43tDPn9xd/8ReSmhgpMp6ZyzzO0a8ZxCR57bjDDz9cUhPrx3lxvGT3EdvE+4suukhSMye+8pWv\n",
+       "lNTExvE5+ye7lbmWa77XZXNSkUqSJEmSJOnIw7KOlOMZEHgpZMB4JsPnP/95SfPX9ooqVkeeO3fF\n",
+       "vs5QRKRE4dXglXlmBooRXtWGDRskNd4e50sGycc//vEF94PSxd06dYpQ8lCk8DZr163Cq8Cr+spX\n",
+       "vjJyXA51nxaLWu95UtAutIdnkuGdUaMGr5R+p79oV19p3e2TfmNctM2o8ZXp6V+8Ra92vJSgbVDP\n",
+       "OGbGytq1D9UYRsVtWxV/0pAFR2YqSgyZo0Cf4knjcVO5/cgjj5TUZEaTvVR7vpHNtB1LPgcwh3m2\n",
+       "m6+D2helzOZoVQzwseaMG4NMdiHHWatI1XL00UdLapSikiJVyuD2NfJQryO74Jrm9cGwL36PfXqN\n",
+       "RZ6+cG0lu5Jrr19z//7v/17S/PVJyfZ85jOfOfI7nhKVSEUqSZIkSZKkI0takcL74rl9V6L6M5EX\n",
+       "0Xa1ebxbh7tqXnke3NarwmukSu+2bdtG/v+5z31O0nylqu26RBzXli1bJMUV1r0iO3fvPD93ryWK\n",
+       "PxBeDWgAACAASURBVPCaKvx+GpmkbSAWDa/LvR6v5VMC+8C7dW8fr4lYOLwk4gFQiPDOSrViJlUJ\n",
+       "HftbyooUtoWiA8w11IxjDCw1vF4U7xm7fI5N3HDDDQtuhz7yGnluO8Tnedydr2HXFff4161bJ0m6\n",
+       "++67JTX95YpErYrqqrxTW8ndt0PsDzFqt912W9V2IqL2jPqvLxiz1N1CwWQ88HSAa0wJroXYH3YV\n",
+       "XfOiSvXE7JUULa+pd8QRR0hq+gnFi2t5pOihznPt8npTHEdEKlJJkiRJkiQdWdKKFNV38aDxvtpW\n",
+       "qm67VldtFV6IYp+8Sq5nlJA1xWtUPRdFjrt7zgfIuOCuv2v1WrzD7du3S2q8Fa+cjfdEzJh7DcR6\n",
+       "RcoSygteMdshW22pK1J4K3g73v9RLF0EdhLZEfEixPThNWE3/D6K9/DK+3jTperJbaE/USqXAigO\n",
+       "jA1UvLPPPltSM8fw+Sc+8QlJ89XoviqYt117ziHGieNgLN53332S5sevRRATgu0QNxcpB14hmjmo\n",
+       "pPg4ntXnMVkoFD4W+D7787nDM575HnNjNDZKqjHnjX1wLcKuSk8tUAgjiHHzOYN4SOZij5tECWOu\n",
+       "j+aOkjrOtZT2I052zZo1ktqv++rr1vpaeyVoV88Mjn7PtYS5jO/R/7UxZdgTChd2Q8xkKlJJkiRJ\n",
+       "kiQTYkkrUnhftTU7IqK18Np6UxHRit18zt1ulO1EvAa1K66//voFt4didcghh4x8TgYOXhvKRVvw\n",
+       "5vDS8SY89gov5d5775XUeJl8r+Rtc5x837MAx127cNKgMPp6Xl4Xq5ZS7R0UJOJV8CLx3rHjCI9Z\n",
+       "m1T7em2fpYArLJ5BSwwIaiiKi9PXWnrYBipvtDZaBL/DVphjapUowOaYm7Ahj1lydR41vWucndu4\n",
+       "90+kgLBfVxjBM55LKm0tKB5sn+OvtYfSuqHR8RGHedZZZ0lq+utDH/qQpEaJKtkP+0c9J44SmOOJ\n",
+       "gfI17qJMchRah37CLts+HaFfGZ/YZ6QsUfeJfuGV8VC7lia/w26YM2vnslSkkiRJkiRJOrKkFSnq\n",
+       "GI1L9Jya58zcfUfKUolISSjVIMGrIzOjNhPFMzvYTskr5Tk/x+PfR/Gitgheqj9/p14RtK0dU4oh\n",
+       "alvXaLHxrDS8xa6U2g8vybPy8M5LCphnJk0qaw/vbynHuJEB+Z73vEdSo0CtX79eUuPBE0MUZRWN\n",
+       "C0pCqQ6R46vau5oeVRx3GNOoncyRxOQsFh7vCVEs2bh1pEoKUe1x1FKKkYqgFmA0V5ZqEoIrTE6U\n",
+       "HVeqF8Vx+dzFdnil3WqVIaCfaf9ozmL7XNP8uGozp4Hvc630+OCIVKSSJEmSJEk6sqQVqa64QhDd\n",
+       "DfP815WZ6Dm8w91q5HXgbU1agYmyGD32hniQKK4CL4JYLbIF3evtK7YsIqrtMun91kIsVN9ZbxEo\n",
+       "kF1r9rjXiL3WZtK0peTNLgVcaUJlPeOMMyQ1cYiTUqSA7ChWUygpH8wVXqcHahUp4ihZp5Hq+JNW\n",
+       "EzlelCGPQeG9x5W2VaLIaGUORNEgVqgW5njmRI8xKtF1rhpX5a6FpzKsv4odfvGLX5TUtJfHKDFH\n",
+       "R3ML/Uw/RtfgaE732npRLBj7Yb8cL9cyYtmI6eKV+F6H8U67RDGTTipSSZIkSZIkHVmSilT0XJq7\n",
+       "Se5ia7P5otih6PlprfeDVxV5D20zcmrhuTZwHnzumT1AteYoJgfvlN97TQ/A68XLaBvn0ZWlUjG7\n",
+       "1uuPoL2mBf3ZtgJ7LbVe3FJm0rZGnSCy5Gizkk15rAhzD7+nb/kefeyKAHMAlayZI/uKS41AeaB9\n",
+       "PYPU6xB5dqPPta5kuO15peq2sTo+V0arN0R0zfasnUvJ7usa30sFea/RyDWQWEHqfXlMko8T3vNa\n",
+       "smdikVz55drj9bu8/zh/7B5F058qca9Qqivl9lN7DU9FKkmSJEmSpCNTU6Qe+9jHhrFD0fNXFJdS\n",
+       "rIjfRUaR974ulcPdcLQiOl5J5D30vVI5Shz1i4C7aOICqE3iXlOpThFexC233CKped5M9V2gPd1b\n",
+       "dG+E4y15Je5F+P5gqcTecL4l+yj9vi21sXuOe/2uXrRVpEpe+aSyAicJFZ1p40nFj7GGGTXBqEhe\n",
+       "q27S9ygzeOi8Z+zQB9HcxPfdpmqPozb+00GNZe5w28QWfU5BCcFmmbOYY/gdyozXC4O2Y5W5lOPw\n",
+       "+E2Op+1Tj74Yd6xxLSUmilgw+pdYIep7+Zj3p0a0N+ddirmLjp92pR+jdsTeeeUahz1zbUGRio6H\n",
+       "/mWlAOY49lvKvkxFKkmSJEmSpCNTU6Se+MQnhsoSd5VefZfPuVuNPGKUGX+Pt8LvPdbFFZuoxonf\n",
+       "LXs1YOCuFu+L4/Xn+vzf4xscfudeIO3kK2+TVYbXyl01lcu3bds2sn+e5/vzdn8uzXFyHK5QtfVu\n",
+       "Pcaqa+2VxYJ+6Br/UFubxMGeUGZrM8pcwcIO29b/8t9H1Na4WUqgLhKHydjoGxQRxlBbhQQPGxvk\n",
+       "lTmIdQ7pc7IR3aOnbtaJJ54oqclSrO27ruowyh/78TnWbdUVBL5PO9BfKEfMeVFF6q5ZiYxZlA/W\n",
+       "2Cupw34t6pu+4nDJYF+3bp2kxl7op9r+dgWpFC8ZxayxWgcxW8Qj+/c9Zs4VSWKoIFL1sSvmAWop\n",
+       "Mq5K2ZepSCVJkiRJknRkaorUsmXL5lUf9efMPK/kuS3Pw8k0iPDIfOISqB3BfvAWuOukqi9339zV\n",
+       "clfN/lkbDy+I9w6KF3fF3NV63SevdVLijjvukNSsw8Tx4t2yHVc+aGf3trmL564cr7a0sjnnV6o+\n",
+       "i3cY/R8vg/93rZe02HT1brtmN6LceRYlCiPt56qCe63EC0wqDmhXAnXPs8KoodY39D1zQaR6Rxx0\n",
+       "0EGSmr5jTLMd99ijzEyUG7L0PCYEfOx2XSMQmGuYW1wxQGGK1F7OG/UdUL+Zo2vX2KuN9WLMEJsF\n",
+       "pd+R9bar8I//+I+Smmtm2xg47NPrbdFvvj23I/qP7XBtQSm66aabRr6PwoT9Yk+uiPk13O81+D7j\n",
+       "AiXXFd+IVKSSJEmSJEk6MjVFatu2bcXn8ShL3J3iPdXWjwJXDrgLxuvk+as/B0YZ4fv831dEj2rO\n",
+       "cBfOaxT70zbewL1GvKTbbrut1Xb4HV45Xmcp9gZFrTbGpqTAeExW2/6dFnhDeO20J5lYfVdgR4mi\n",
+       "PfGSiG9hzUbHvb5JVYZnHE27TlYNxLqsWLFCUqPmlurMdIUM2GOPPVZS00Zt49RQ1RmD9CXqMGOn\n",
+       "NLeyribfR4FAlX/Ws54lab6KPG5MDtlf4MpAKe4QRWPz5s2SGmWKubxtRmvt+fC9tmp527X9lhoc\n",
+       "P8odim0pG5S5CbvEvvgdNQ29/YmNOvTQQyU1cx7j1SuSo3x5LCB2gELFtbeksHGPwT0DWYvPec5z\n",
+       "dvq7VKSSJEmSJEk6MjVFas8995y9yyRWyesO4elzV8xdptfx4f9f+tKXRr4HkdeBN8Nzf56Poizg\n",
+       "7blC43V0arOnXHkitgVFiLtrzgcly+MePBOEdiQugN/hRXB+PK/nbpt4BLw5jo/Pwb3Stl70pNfv\n",
+       "mhbevtjZpM4Xbxi7xw7x7qYN573YayF2yYzCM0WJQuFpW9GceEIUIDxgPmdskgV0wgknSGoUIb7P\n",
+       "9/CAaUOPY/vMZz7T6vhKoEjtKiowcI1gzuxas+/MM88ceY+iwfZRyLyOEnM21yiUFs8mxA6mBfXK\n",
+       "AIWIay2xalzDmNu5JqC6sx3OG3tBCQLil3nKQL+wHeYItsN7zw5kLcgHHnhAUjNO/FpLvC/H7Zn5\n",
+       "XqEdBZQsReZs5nLGIf1XWwk/FakkSZIkSZKODKahFgwGg+HMzMyi7zdJkiRJkqQtMzMzGg6HC0rW\n",
+       "qUglSZIkSZJ0ZGoxUjMzM7N1oqK14bpC7Mgb3vCG2X11wdd5KsF+PvjBD0qSnv70p0tq4i94Ps3z\n",
+       "dp7v8/yX58bEb7B/zofnvWRAnHLKKSP7nRRkHL361a9elP0B+5n0/ogXeOMb3yhJeve73y2peW5P\n",
+       "Jolna4JnpETVqokTIIPkd37ndyRJb33rWyU1dsbzfPbvGTJkxFC/jGxN4mo4H2L9iIOgHa+55hpJ\n",
+       "0u233y5p/rpYp59+uqTG7q6++uqR/a9du1ZSM159xXjsl/ZcTHu5+OKLJTWxEJ69Q+yDZ+fwf7Lp\n",
+       "iAsk9oI4LL7/spe9bHafO4N4RzIrIxuqObe5r8R0eM07bIy+95gS5iC+H8WzsZ+3v/3tkpo+jeJN\n",
+       "X/ziF0uSXvnKV0qSfvd3f1eS9PnPf15Skx1JNhbHtXHjxpH9vfOd75TUtDsxK8QFbtq0aeS4yaai\n",
+       "36677jpJzZihHRhLxLws1twCvr81a9ZIamKwovhH2p05g+97FhrxvWS4v+QlL5EkffjDHx45Dr5P\n",
+       "DBgxSOyXazLZcXyfdsTuyG5lPP36r//6yPlNCr8WYZ+lWCbGxUknnSSpmRPJsKZWJfcinhlfOq9U\n",
+       "pJIkSZIkSToyNUVKmlwF676Ura7xYyhJKAMoTWR3oURxt89dPl4T3gVex1FHHSWpyQCZ9PpNzqRq\n",
+       "6/QNNUzaZhX69/FmUTNKKkJtFWy8RVesyN7EO8JLjNYLw57wHl1V4H1UmR7vM/LiyCgjqxW7w+vH\n",
+       "K0S14XjHXYk+Wk+rDb76O5794YcfPvJ/jhn4Pp4onj9tjefdNiORc1m9erWkpg1RViBa3T4C23Bb\n",
+       "Yu28fffdV5L0t3/7tyP/R23E896wYcPI/6N1IEv1llDhn//850tq5i5slPpZzI1kTTkoiZ415xXh\n",
+       "gUrcEaXVGaYFq1M4KKVd8X7i/Et1xbCjSE3nmuS0tdtx8WtR7VzB+OY83C5ot6510lKRSpIkSZIk\n",
+       "6chUFalpE61DNS54W/fff//I9vGqvLpqtOYZv8OLxqvAS05G8ZXgS0RVh/H2264zFYFShlLq1Ztd\n",
+       "eUKhXLlypSTprrvuktTYDzVSovMkDgY7ce/fa784XlvGV3QnnojjHFeJguXLl0tqVBvOF+U6UjHm\n",
+       "4h4qCovX+3Giz2lL1Omu6yTSh8ccc4ykJr6NzyOFIiJSiamrQztw/ihz/A7V0XGPvK06SCwJyhHK\n",
+       "2GGHHSap6cNovdRIEUDdH5eulcapN4XtM4Zc0avliCOOkDS/0jtqL9emtqtI+JzlSmpX++0LxhP1\n",
+       "0lDha5/++FqHzHG164Z6ewOKMbFUV111VdX2IBWpJEmSJEmSjjwiFSm8M+76UQr4HI+4a2wQXg8Z\n",
+       "JzyXbRtzRXwB3jReBTE1beF48DL7UlyWCq5IEQvE83GPE4i87VI8AXEeKE2luBm8ySj+AS+I/iE2\n",
+       "ifd4YWQ8YV94l8TM4ZVxfChGDmoI8SjYAcoPx+vqAN9jnERKLsfdFqoSn3baaSOff+ADH+i0Pak5\n",
+       "Zm+bWpgbaJtaz9dBcSI2i7izGpWtDdg+NkUf0g7YoKuI2NS4a8MRT4itkoGK0sLqE77GHkRjKYrd\n",
+       "aUvX+DviWplToliyWlCaHLLHUL7YTxSHybWB/vQK72SL9qVItc1kd4h98zXzavEK/F3Ho0MmNOOF\n",
+       "OTeKDXNSkUqSJEmSJOnII1KRwsv0GBPu2vG4u4KSxHNg9sfzYc/YicA7JD6A36OEtMXPl+3hJUTg\n",
+       "VS51UJJoL2qG8Bw+Uprw/sDjEVAo8aJRMvFa8PZRMNuu+4WXRf+gVnhsFIob+8frR8lhO56h4uBV\n",
+       "0u+cB/vn/GkXYrpQOTgOj/XCLqNswxJ43ShQKK99eJ20DTZfS0mdrMUzbr3+E3FvkVID9BV9hO2h\n",
+       "tKCY+BzBXMT5uxJGLTRX7FzxqIXjYw7Ddtg+tsvYLMFx0B+LvSIH6is128aN2fKxA74WXmku8X5x\n",
+       "xYk4x0gBa8u0103tuqZiCX9aUBtnC6lIJUmSJEmSdOQRqUhF4EWNW3uEu3a8Ra82TCZLyUvAO+U5\n",
+       "OTEveNcoX12prYnT1ot3fEX7SdUPQ5kBlBtqCBFn4fsv1Q7By6Mf6Ve8VPqlq7fkKgRepMdzsD+U\n",
+       "G5SyKPYqimEiY4p4Heze94dXjD3zPeIb3P48u7Qr/J5XFOJx6sPRxm1rjPWFnwsKC+dUazvEn2HL\n",
+       "fj6ujgIxIChf/J52iWKHuipSZLVho8Tq8L5W0WHO5BXlDLWV84jiPTl+lLGuGc8+1j2bs237RNlm\n",
+       "zGH0SymGzseG28O4damWGszpfYP6f/TRR3f6fSpSSZIkSZIkHVkSitSk6jm1xevXdAUviQwWlBhi\n",
+       "S2qfM/OcFgWAuAe8r3EVqVpKMVQlUD66xnZFoNS5okSNFuJS8Po84wNqvUlXDfgd+0EtaPt83e2e\n",
+       "OBe8Sa/jxGspdihqH1d2ovOPMqX4PmsQeszUuPbioCpE/VeDZ1pOC9rO1zesBUUEdRR1m+1FCgax\n",
+       "PU972tMkNXMSryhI9KUfb1fw9FGUqI2GSu11fYghQ7HieOg3nhbUZjuyPd9uW/gd7cXYQunrqkjR\n",
+       "b2wP5bL2/Jh7+J1nXdZmF9LOnrHL72uzHT3Gq+21FMUxylIcN6s0grmOeYL9RLFsTipSSZIkSZIk\n",
+       "HVkSitS0lSjoa40+vBYUA+ISUJJq98N2qGHiygfgnbaN0fF6RBG1tTRK9BWf4hlAvKLkkGVIZXn6\n",
+       "oTYmzL1PYD94aXiVURYoEB+CohLFctEP9AveLjFTteC9Yy8eL4OSSU0fj4Er2QPHz3Zd5WlbqymC\n",
+       "rEGOdxxFKlpVYLHBVsZZT1Car7BhQ1GdH2yWMYiyRSwO22FsQV9ZWtu3bx/ZL7E9bjvYLN/jKQHH\n",
+       "33YuQllBJfXzqc2WRAlj7uW1rT25kkV/0A4oQ9hr6bg4fo+5g9oMdJQXYoQ4LhTFWjw+2HH1Gntl\n",
+       "PVnGOufvqrifD3Yy7rj2emu0ZypSSZIkSZIkE2ZJKFLTgrthFIi+FCn3yH1lear81nrYK1askNRU\n",
+       "pEZ5gVKlafBK7rWK1Lh1tfomUt5cGeJ7tDdej9fxIm4DIuUKb4vt8j7KQiQeBW+YOA2PW3Dvh/3z\n",
+       "eW1MFODd1sYDeSYZXllJQSSGy9UMj7NpC+3D+ld9KJkoQF4zbLFB/et6TnjO2AiKRaS4OKiJHitE\n",
+       "vJuD6orNt80yBOYszp+YragdmJuYs3zNwCgOj//TDh6r42MCm2d7pX5hLBKT1DbDm/7jvLBH2hOl\n",
+       "i+PYsmWLpLjmH9tjzPk1oK067PWr2vYzxxNlQmM/tDvra65atUpSeQWCtvGnbeF828Z5piKVJEmS\n",
+       "JEnSkVSk1J8SBXhfPL9GeeJ5btvMA74feUttsx55Po/3gxcQtcOkqsnWUltHCC/xmc98pqT5yk+U\n",
+       "wULGEtCetLuvSeegGN5+++2Smowk+rsUZ0C/sl/aGzWA/dcqUnjjKHEeY+VeHUoZihL/L8UHcJzE\n",
+       "saB2jJu1R39zvn1m2tXGyU2KUsxLCa9jRB/T5p7ZCa4ouS2hIPgYwwbGjWNFeTjuuOMkNTFTDmPB\n",
+       "xyrHRwyNj1ng+GkXxhBZcD6XYbO1CiHtMG6tQa49jHn6g1fmaLLXopi6UsxZ26w5v1a1pdSOzKH0\n",
+       "A/G/KHPM4dE6t/50pO/xTPu3VYxTkUqSJEmSJOnII0qRwjvhLta9n1qvC28CT5xXwMsg4+Cee+6R\n",
+       "1D12hOyqDRs2SGq8sle+8pWSmiy10vNjzg+lDOWipPT0rdiVcG+07f6pTUOMEv1w3XXXLfh9Vwlo\n",
+       "J7x8jx0ClB68X/oXtaA24wUvi5pAeKMen+JeMP3uihP2wXpmfn4oUNgt3iDjwtd2dIUJ5YqYPeyd\n",
+       "34+rYNLO9FufilTfNa7GBdWTeDBsj7Hs9Ym2bdsmqVFSsBkUBI+rA8YUWXEen0nNOycae/RJredO\n",
+       "DMyaNWskNXGJXkcKm0eBwQYYW7XrOKKkoXhEGaZtlTZsn/Zsq4jQTx6X63Mzqjb9f+utty64PWoM\n",
+       "ck1iFQeoHTtt4yrHhfOnfzkPxift7BnNnlXaV4Yw+8Ge6Zfa7acilSRJkiRJ0pFHhCKFB85dJ94J\n",
+       "3kTb9Xt8fSW/a+W5tnt9Xj+oFq+l4VliUbxARJS5s1Q47LDDxvo93gs1Z7oqgcRllPrtjjvukNQo\n",
+       "SVRxxssqgX1ij65weq0gvHli3FyRIpsOZcjX20JJIh4BxQq75v9eGZ0YMK87hf1hl5xPVzy7NIqX\n",
+       "6AI2X5vpWoLtoDR430c1yYC5A6WFto3qOrktRkpFtB8Umlqi424bQ8L3r7rqKknS1VdfLWm+eknt\n",
+       "t65zJaBsuO34cdM/qLKMYX6PCsz/XTEjpubMM8+UJK1fv16SdN9990mab1+orRs3bpQ0f25nzmI/\n",
+       "1KAjxotVIqLz8diz2orr+++/v6TG7nw/feMxX74/lLmDDjpIUqPu+/n0XZuQfsusvSRJkiRJkkXi\n",
+       "EaFIcXfJ3SyeLkoFd/940jxfr82S8uymqD5UbRXdtvgagXi1eMt+F4+X6SvS+zpe4O8hyqwZF46r\n",
+       "LagBtAfnfeCBB0pqlK5apagEMVj0J++JXfra174mKV43ClCM8H5RfvASiXki/gHFiP16LBPvo5Xf\n",
+       "P/axj0mKvVUUIbbnVbF5xat2aqtP00545dhllHkGPo7aKKt9VzZHwWAOwHaJRcIWmWs4VmyCtqQN\n",
+       "8NQ5t9rKyiW61gVibuA8SuogfeOxRNRDQr2MzouYMVekmJuZo0pZc2vXrpXUZFBjSz73nnDCCZKa\n",
+       "rEKPwcKW6R+Om/PDhrEDxlxUWw6idSwZAyhQtD+KmCs3Xtm7awwgSuW4czjtgb1jD7Qb7cl72hs1\n",
+       "HHuhwjqxYvR/28r2bN/nOq4VjAeOm3Zvq4KnIpUkSZIkSdKRQV9rKbXa6WAwnJmZWfT9JkmSJEmS\n",
+       "tGVmZkbD4XDBNL5UpJIkSZIkSToytRipNoqUV+CufY7LPmr3RWYLtVmWLVsmqcmKop5T1/3VVuh2\n",
+       "iKsgngIVkf1ceumlkpo4BuIBaCfP6gLPynJ4Lk3cx5ve9KaR/RK7w36J7yA2CGhX4hE2b948sn+e\n",
+       "T/Ocn1iv3/zN35Qk/fEf//HI+fNKHAr7bZ1psWP/tO8FF1wwcn6TJrIX7P3000+X1NS/8pg94jSi\n",
+       "uBfOjxix8847b2R/Xl3Z15mqrZhP+2EvvL7whS9c8PyI6+hadyqKkXrjG9+oK664QlJjg7QR2U+s\n",
+       "8QaMccD2iPEg5sUrZr/61a+W1KzbiO0yxtkO8ZecM9sh/o0sK2rEMbaplk/dqNe85jWSYtuMYkG6\n",
+       "wn7+8A//UFJ/2VEONupzy6TA5i+88MJW+xu3fdnPP/zDP0hqsi19DGPbz33ucyVJV155paT51wzs\n",
+       "gxqF0f6mPZc93PYXkYpUkiRJkiRJR3aJrL3Fqnfk9XrwDsb1xshEQXnB6ywpXLDffvtJarwSV3zI\n",
+       "TEFJw9OnJktEpEQde+yxkhpv+vrrr1/we1FmCtlleNOc780337zg90teXpRp1TW7Dzj/pVbtGjWE\n",
+       "DJ0oe7Sk5HB+UdYg/eI1XRhv1HCh+rRXNMe+vA5bqS7buJlznnm1UK0jam1x7vzmiCOOkNTYNm2M\n",
+       "qoqixP9RPcnicZunDfg/tsRYJIOX7CPamt+hXDH30CeoyFF2F9AnzFVsZ9yxAaW5r6Rql2j7Oyqj\n",
+       "c1xkA9bSNSY4WuuuLW77tB9j2Wu6PeMZz5AkXXvttZKk0047TVKjWkeKVDJK2/Vo25KKVJIkSZIk\n",
+       "SUd2CUUKbw7vy+Mfxl3bCwWAu3y2z3pXbVfQjraPV4MyRXxGqc4QNS+iGhrEWbBeFnEcXStCuzdb\n",
+       "277UUmHdMGKiPvCBD0hq4kSIraJ2Cd407TyNTNJpgupBu2MPbfsPr4t4DtSRtlWiUXgYX9gpKg0K\n",
+       "GXZBTRheu64c3wfUKaLuDuoaqidzCbW2sLl169aNfI82o74Q2/HK4KjEvkYXyhBqHrXLUBzoK2Jd\n",
+       "UCpQsGqrw9MHHO9irZUG1GbDdvusQr8QzKGRjbWt1Vd7DUGxZLusY0k/UvutBPbHXB0pctdcc40k\n",
+       "6UUvepEk6eUvf7mk5lrw2c9+tmp/49I1rrcEsWpeP6qvWoTOpJQoSEUqSZIkSZKkI7uEIoWX6d5O\n",
+       "5EV4Jk4JPO2TTz5ZUuMV4mXhhfJ5bUwNd/PcdRNfwd29e53EOhEXgbdDvES0/hHeMHf54yoCHGfb\n",
+       "KrJUhUVR+dVf/VVJjTJFrNhHPvIRSY3iB480JQqiKs/RyuN40dgX3+N9235zsEvUGbaH6oF9oXzy\n",
+       "OXbdt/faBjzPW265ZcH/o4YSn8ixoxi5Ck2cX+TRUs2eWBUUIeamKLaGWB8yKsniom1RLlhjrARz\n",
+       "wGLH+7E22bhPBWphLqRyudM2likaY46rusyRbc/bxyZPQfzaxv4+8YlPSJJOOeUUSY1iQ+YzcA3r\n",
+       "aw7Fjic1ljkfrgmo3FEcbV+gRLPfvmIJU5FKkiRJkiTpyNQUqac+9amzigsxSXjCvo5S7XN3PHWU\n",
+       "nVp47oyHTYwPcQt/9md/Jin29kpr0aHUAMqWr8nnGTp4wdHafb4fYpSidZlqIQ6AWBtfU68EtXeo\n",
+       "sXPOOedIarLALrvsMkmNEtN2+9OGjCzPnuyKe7WMC+zYY/T4PnE72G/tGoKrVq2S1HixeLfE8uH1\n",
+       "4pXivREnwv5cAUO55f/TgBgZ4s5QDjjXSK3jc84Rzx6PnLFIGwEKCH2EwhFlotKnjFXmNpQotkNW\n",
+       "X0lhIKPX14gjNqsWX7OtlsVSooC5iVfiTVF2ahU86JqR3TbuEDzDtLSWItdCYqq4NjmM1XFjjBi7\n",
+       "jBfib/t+WoCdca1CYSwpUsw5bWF8ML7ZbypSSZIkSZIkU2ZqitQee+wx6wWV6s6UQBFC0Wr7XBcv\n",
+       "AS8Or8YzOvDM8UJRUvjc4e6X7/nz+9qMCP7P812/K+f48a7whlEs8FK6eo9tvRw/TuJOvDK9r2yO\n",
+       "Nz7pDItx6at6dERJHUD1wBt3NcK34/1Hf+DlgnvZ2C9VuF0hQ00hhhGiuleTguOUmiw7PqOtC73y\n",
+       "lAAAIABJREFUOFZUXzx4r8OER86YY+xG50SMEIoWthzNaez/c5/73Mh+OV7GqGcHOsx1qOj0Ncd7\n",
+       "zDHHSJof10aMkavck8qWmhS0N+3JXEK7eGYpRE8PukLWYq0a7DFZpZg2vo9ywnlTy43fc57jKiwo\n",
+       "USh8nl3XF1Typz/Wr19f9buuMVv8DvuPVvvoSipSSZIkSZIkHZmaIvWd73xnVqHhLrrr3TR34ygZ\n",
+       "xOjUgneGd8NzabxKXvFiPRsvqrzuMSYoRsRHnHTSSSOfs3+8G86D/+O9ovgA3ixeJe3K8XEceBul\n",
+       "mCuozQDCC2S79IdXWEe5oL2ooePP9z0jZakxaUUK78njKfASqc5Nu3n2I6AkeX/X9j/9yn6wS94v\n",
+       "VFFcmm839Dffx17Hrc8Gc7NUOTdiO3zdScYiah82SNsy9rFVPGay6PyYidXhe15vKAKFjONibKIA\n",
+       "lLLPsEHOg+8Tw8XxeIYw2YK+WkFbT5/tc7xdY42Y02qh/2hfbIrzQ6mhzpIrUj53jgvn7U8rIlAs\n",
+       "24JdokCiRmM3KI/jxpuSfcr2J61U0j9f+MIXOv3e10Dk/P110tmsqUglSZIkSZJ0ZGqKVGkNqS50\n",
+       "VQrIvvLn13ipeH1RhkUUP4HX6V4RmUR4L9xV47lHmQl4B64E4B2yPbxizwRqe1ce1VhBoaNd3Evn\n",
+       "vDdu3CipOT+OB6WEGjq0B9mNc2NeHongbbsdoBrQ3l5R3KEfvH+wI1eKHM8uRd2h/1A/iNtwBRTw\n",
+       "ClED3C7HZe744ljc1jkmFAEyL1GkaBNil3zb0eoD/I62QKGojfNjbFDfyhWmSOHAFuh7xiptiu3w\n",
+       "f/qMGDCy/TxD2oniOLEdFBL22/apQm0FcsCWaG/2z3lh85FN97Vmnh9P7XZR4GoreXtskmd50j99\n",
+       "V7Tva2xGuIKHfdGvpWxG8PbBLplraN9J17ZLRSpJkiRJkqQju0Rl80nDXStKCDVoiGUqZdC4N4B3\n",
+       "iPfrMSx4S2RNUV+JbEG8FVcMIsWN5+R4zyhUkVdWC9v1OlgodXgT7AcVgFcyI/g+61PhLdAueFnE\n",
+       "hE06Bmmp4c/5weMdvNo2dod36tuJat1g56VaOOzPs1PZPvt3r9C9c+wDbxxFs6/1tRZSf1xJ8crf\n",
+       "jG3ivrrWBeJcvbo85xop716TizZBKeL4brzxxgV/z/48a4s+o688c7ZtHR7mIPfoUc7ou9oK4U7b\n",
+       "3/n5EROFDXBc2KbHbkUZrm1ZuXKlpPZKB/biMXW1CgxPP4hhYv99r3E46Tk4UvDa2qcrZ9gTr4tV\n",
+       "5ywVqSRJkiRJko48ohUpvAKP4cGb5O4Y74f/UzsE79EVGzz0Uu0NvCcUGq99Uns3jTfL+RCnMK6n\n",
+       "H3kNfE47eW0WvATOj/gPnn8TF3H33XdLkrZs2SKpe9XaXZ1Spo0rNyg87tVix14fysGuau2D7RKP\n",
+       "ggpCDSUnqlGEl4sX3VdG0Nx6W4yFKG6RsUF1dxSXUswmMVWuXKE6o976fiJQvWlb+oS5pFTviLkl\n",
+       "ysBEoWHOYsy1jWWiPaNMYI67a9Ze1/pExMKdeOKJkpp29HVKXVnpK0aK2LK2sUn0F2O47VMD7A21\n",
+       "vxTjtqsxbh0s5ibscbFWzUhFKkmSJEmSpCMPC0XKPXZf6wvvDq+RDBa8Ca9lgjfrz7/xyIn5wbuI\n",
+       "vN9SlhxxC3gXXSuQex2qvjIio+fknBdeH+3KfukP2hcvkDgAvA5iwmjnSWdWLBW87lgUn4PS4l6V\n",
+       "1wvDq62N8ynFY3B8KLH0i8dkdV3LcZI1XUoKAQoIWXjr1q2r2m60rqLbOupgqbo7cwcKlFceH7fe\n",
+       "ETZBXxOfiCJVq8xQn4l2wyZR1Jhb+47RqcUzQmnPaO7qK2aG9mirqo5bUZu50+MCfRWOXZVxj59r\n",
+       "M+OJuSzKuu2LVKSSJEmSJEk68rBQpMhgwBvhPXC3jheIEuRr9LFOFxkzKD14oygueBWluAAyB7g7\n",
+       "9tgQFJ1NmzbtdDt8j+34c2Tee7wEXlrfCoDXuIm8dbx0+oX2wEvuu6ZLX/Tl3WFXrGyOAkftolpK\n",
+       "Xm/U/hG+AjpeG/2EnaJi+FqSqBIouR4/s9jMja2rVWPJxGUsd81wRZWjDxhz2D7qN7bE/mhDqv57\n",
+       "rFHbmKMoS439cL5tt8vvfW7xWmfTUkKuuuoqSc0Y6XstvYjaLDvHM7g5Xub4Uv0mV6KIP122bJmk\n",
+       "/rISYVJr7U0Kf0rStR5WFP8bkYpUkiRJkiRJR6amSB122GGz3g7eIHeRZCKU7ia5C8fD5nk5ihJ4\n",
+       "7A1313hneOKefdcVvFJqnLAfFCXO25UlvEqyAtkO3+P83DvAWyQrjrtpFAfiF2rXWCMbkf4oxXsA\n",
+       "d+8oGLxn3TPiFvCaUDzwbjnOuVlYi4HHIJEJRMV1r4OEAoP6Qf+S2cV5+PmxH/fevV5YrZeKGkCF\n",
+       "eI7H+8tr9VADh+1iF3j1vh4Ydoia4soviifqDHY4KVD0sLNx4onGrbXm2VeR7Xq1ftq6VNk7WpvN\n",
+       "a83RF8wh9ClzB2PP1cUSrDkYMa24Rmza4wh5qoD6u9TwdVGJu6VffC5g7btISaR/aYdDDjmk1+Nt\n",
+       "q0TRL9gj8co+Tpi7UHL9KdFxxx0nqbmG0r+uvjM+iA/llTmB9mbO4Pc+p/p6r7R3rcKXilSSJEmS\n",
+       "JElHBtN49jkYDIYzMzOLvt8kSZIkSZK2zMzMaDgcLliKPxWpJEmSJEmSjhRjpAaDwV9Keq6kfxkO\n",
+       "h6t3fPYkSR+SdJCkL0v6xeFw+B87/neBpF+V9GNJvzkcDq9ZaLvvfOc752UwdMUzC4jt+a3f+i1J\n",
+       "0sUXXyypnGnB81m258+leU7Lc1diXXju+4Y3vEHSQ3euC+ExUsBaexx/aW0/nv9ecMEFkprzI1aG\n",
+       "diV2xWOjeG7N+fF9znvvvfeW1Dx/Jibmda97naQmVobjJHaI2LTbbrtNUhPr9YxnPGPkONgf7cAa\n",
+       "e17pnerT11zzkAl5ZpRXsaZ/eE5O/InXCeO8fD2m8847T1Lcf87JJ58sqcm69DiUUlYi++GVDBza\n",
+       "gfPCbsDth5o+POfHHj1Wyvc3abrub82aNZKa8Yp90F9RXNPMzIwuu+wySU12EzZCzIbH4zGWiU3h\n",
+       "d/QhbehxYa94xSskSX/0R38kqbF1jpHve30lvscYIwYJG6UODn2MDf3yL/+yJOmWW26R1Ky3yP5O\n",
+       "OeUUSc0qAYz93/iN35DUjJmrr75akrR161ZJTQwOx3P00UdLasbeYtvKFVdcIalpN2KwGFscJ3Mm\n",
+       "/ee1+JjbsCFe+Zz9/fmf/7mkZm7BDsi4JU4S2J/H2HicHu1PvOBv//ZvS5Le8Y53SGrmKvqX42Ju\n",
+       "pnJ7LbQL58m1yDPNsTPs++abb5bU2MG5554rqbFT7I1rI3M8cw77Iz60ZC/ERvF7j0smrpS6T1E2\n",
+       "KPt53/veJ2l+LB/bH7dSOnb2kpe8ZKffq1Gk3ifpOfbZ6yV9cjgcrpB03Y73GgwGKyX9H0krd/zm\n",
+       "ksFgkKpXkiRJkiQPS4qK1HA4vGkwGCyzj58n6dQdf79f0g166Gbq+ZL+Zjgc/lDSlweDwb2Sjpd0\n",
+       "s2+3LzVqxzGOvHdPvLbmR6kWCt5OaXvctXv9pGOPPVZSo+TgBZEZEGUNuuJ2wAEHjPyfzA+8KLyc\n",
+       "qP4QXpXvj+PwWjZeS4PaN/fee6+kpt3I9sPLYS09vOxPf/rTkhov86yzzpLUrNm2ceNGSU374RXj\n",
+       "7XlmC3jVWrw6r0DPcbJ92tXrbOFFlvqZ9nrzm98sqbE7vNj3v//9C24fLwc4n8i+OB/UFdQI+uXw\n",
+       "ww+X1PR7tAbetKAfeUV5o19uuukmSY1X/exnP1tSM0fQv9g37YV3i51JcbV1xho2gHpHX7stMDa8\n",
+       "77x6PP93xSOq9M2Y4dUhuwjFw+ck1EnO3dXgyy+/fOQ48Mg5vg996EOS5itxjMlSFuGkQanBNjzj\n",
+       "1FdvYE5l7Hi70j6MIc/6cuUHtdOVKIgyaKNafT6WS5nobZUoiOqnffjDH5Y0/3x8NRDAfqL/O664\n",
+       "lcDOvL14iuRrT5aIauiNq0RBKWsVuqpF+wyHQ67C35BEvYF9JT0453sPStqv4z6SJEmSJEmWNGPX\n",
+       "kRoOh8PBYLCz1L9doyRqBbWxLygOrvjgJXIXz3P/O++8c8Ht4KFzd85dtj+P91gu3kfeBN6Ge59U\n",
+       "4CYmBSXAt4P3gzfgsU2A937JJZeMHBdeJF60q5OuKKIy4K3hZXrsE+DVc1wcJ94t54cqQTuDVw8G\n",
+       "V6o4Pz7n9Y477pAUe6muoOIFez0nwN6oqeKxeUAcRN+qAioICl/bSuoeC8fxcbzYGf/n/Ol32pEa\n",
+       "QfS7153bGZGHiw0wtkq15FwN5T19SmyGKyoca2n72D7bc0WDMUYb4MGjULntXHTRRZKauQab4XfH\n",
+       "HHPMyHnMrRI/SaLVA1xJKGWVo0ZGYxbbi87L10asVWKwG7YbXRNQ6SN8TintnzFY+0TH65fB+vXr\n",
+       "JTVj2VX92jUEvY5YaTUNnp7wylMJrpmo6cSdlvqf2Kxa5WhSdFWkvjEYDJ4iSYPB4KmS0CO/Kmnu\n",
+       "c6f9d3yWJEmSJEmyy3H99dfv9P9dFamPS/oVSRfteL1yzud/PRgMLtZDj/SWS9qw0Ab23HPP0ItY\n",
+       "qqxevVpSk8kQgZfH3TqeNUoO3ikeNp+790TmBO3EdjxuALxqsWd7Ad4CcQPc1ePlsv3IKyHWBQUh\n",
+       "yg4EV8xQooj5ca/UvccoA4d1zJzPfOYzC35Oth7eI/vFy4MoK8zbjf7y9b7oz8g7i+IZXPHk+Pg+\n",
+       "7Uu/Yj8cbxSXMy7YA2pLW0UKe8c7pt08XoR+pio17YZ6gWpC+3h801xK6yXSV7UVxsE9cNqescAY\n",
+       "8u+hJq5bt05S01dkuAIqZ+TRf+lLX5LUxFlii6weEKnH/5+9N4+17KrOfb8lckmL9PR0Fcg1Drgt\n",
+       "u9yUm7JdtjG24UIIoXGkEHwheoQ4QAIJGBkSSILfgdCJToGrBEJCnhMSEEgI0xiDyw1l3JSbsss2\n",
+       "bqCuYhK4Ckn+uUprKVj7/WH/ap39nTPOnHOttc8+jsdPso7rnL3Xmmt2a45vjjGmQ3noU4wprsfc\n",
+       "U0t01l8EfR2lDFp9W0rtRvv76QXg6rfXH30LBY8+SP8qlbdUH644RnMuCk7rGHf/Vurd/UdL0YLM\n",
+       "1ZTPoxKBMe79lzmLdxn3QYGiP6NAcV33SXQ2S4k6//zztWfPnvDvNekPPq1HHMv/a9d135V0qaT3\n",
+       "Svps13UX6dH0B5I0m83u67rus5Luk/QDSa+dPVZOO0ySJEmSJGmkJmrvfwR/+u/B598t6d2l626m\n",
+       "GlWylkrWK7Aq54w/31cG9u2xdtzyZrWNlYNSgnWFFUmkBeXGyo1OeMdawEqOzsHCijrmmGMk9YoB\n",
+       "1g5tE+VMofxYN1jhkdXnYIXcdtttc+UGP9/o2GOPldTXC+WKfKQc6pd9eNoNa8eVu+i5sbKwpviJ\n",
+       "NUVUGedEoUhdc801c9dxfwruj1LnZ9cBSg39GWt20bYK/YHcQ6342Ys8X9Rf3JplXLp6wHNTH1Jf\n",
+       "l9yLMYWy4Ke6M8ZqT4mP1GCIFBIiWcEjbyFSooCIWcYqfbg2ygncTxLLPvKpKUG9lyKGwZUooH3G\n",
+       "nrfJnOYKSiu1ykjEVNHpQ9VmHzPUK9GN+BfSfiiRzGm8Q5gDmEt5B7l/aaTmM958nDGHucpOvUXR\n",
+       "q1uNzPGUJEmSJEkykNFRe48FPDID8PlASYhyuwCrY/LgYCV4tBrZhX2/GasIHyCsAz7HKhzli9U/\n",
+       "VgHWnlvFfhK8n4zu8Dn23bG6sOypJ6wLtyLYV+f33IestCXrCWWD+1IOrCMvN8/P57GCUDqwoqhf\n",
+       "lAqUN7eOaW+s8dWKhtTXf61KgVLG/c444wxJvXV10003SVqbNRu8H0RWHf3YI83ox9Qj7YOyN9Sa\n",
+       "drAK8ZGLok0dMu6jEqC01UTbrQY1hkgons/rU1rb5tQZbcp3qVOPmuJnFBE7lqEKA8/hpy+0Kvw3\n",
+       "33yzJGnHjh2S1kYjtlLyq6xlqvoF5oRWRcqjGPlJv2DMlqLomMMXDXOa+6l6lCjtw1zDXMmcx1iK\n",
+       "djPcf9PV9eid40ot45D7c2IA7c8csdWVKEhFKkmSJEmSZCCPC0WKVa/7QvHv0n68n2XGqhkrxxUi\n",
+       "v57fF6vGo7KA1Tvfc98oX6X777FOI38NPudZkrHOXflw6zmy0lzZKYE1gzWCsueKDPd3nzGiDX0/\n",
+       "P4J6IUIFRYR8RjA0lw71iiJJlCfnl5G53f0KoJRJPfIrob2wJqmvoX4hJVAGUQJLSi7qD/3DVZ9a\n",
+       "PIu4q0pSPJa9T2GBo5YxRzBmoroeqpjQ5tyHcuIT1Jr7i+vws+S75XB/VE3aYqi/XVRfrdSeQoFi\n",
+       "hP8kCgtzhO8+RESRta7GoyzRb6hv/BpR2Sk/fZ05CvxdUPLfpd/4+a7uP8lcXjumeC7KQ7uvPiWg\n",
+       "Bu8vpTnYof/t379f0lp/4alwFZtxR72Tt2ooqUglSZIkSZIM5HGhSPnqm1W9WwH4cLg1g9WBUkGE\n",
+       "C993KxgrJ8p+WxuJwOfc98nhOp69OILroFzge8LzcF+sIfefwCrj9+6n4WcDRlBvWFv4+Hi7sM/P\n",
+       "/j7l5t+1VhDWKu3hUXjgOWOoz9pcOfgO0Y/wW+A6U/mB8ByRn8zYyCeHdsVqJKdRCdqX7zM+hvqP\n",
+       "YHW3Rm+uhjan79A2tXmJWsHypswoKfRh1MoSUd8Z2qfoozz3VGeUlUA99fqsVcTYJUBp4PzJVlyJ\n",
+       "8szijClXs4m65PNRTjSfs+n7PCf14L5N+OEyRnh38XlXpGhH91GLzg3lc1yP5yi9O/zvY33ifO6l\n",
+       "vkp5rVqhHsk9iG/bVLn3UpFKkiRJkiQZyONCkWL1H+VfwqKNovtQqrAKPEIjysyN9cFPVtsoU6Vo\n",
+       "Klb7WF2uUPn9eD7Pvuvl477s6/Nv8ix51uFSbhCeD6sW3yDOnItA8cKPACvErZzIamn1y8Aq8/xf\n",
+       "bgXzXF5/+CFE0XzUG+0Q+Q61ZgaPwCpGQcPPYFEcfvjhknoVx0+Uj3D/DurV84XVgl8TCmaUz60G\n",
+       "+mzJEo+Ico45PDtjbahvUwSZ1VthrmAOZAwuGvrQUIWPOYHow6H+gD7Gfa71PGQoRfQ9/FujdiTj\n",
+       "PKBqc1/6D3MZ9cJ9UN6Y43kXoQbzLuI6rs57VCrlpN2Zg4f6JI1VpByPvJ4K5maUKZS6qXyxUpFK\n",
+       "kiRJkiQZyONCkfK8TYBygGUc+QdgdWBNuPXjq2f+zb6z52nCaiBqDGsI3yP3dfF8T9H9sA7cFyU6\n",
+       "mZxyoLRgBWHF+DlgQDn5iQ8Z1mytzwpnEpIDxaMAwZ93bLbiknqA9eb9pfQ9niPyvYLW6MYI2nlo\n",
+       "NupWUD1acw7xecYZ+bXoJ6gAtXmlyMjPCQND/GPos1yDSMuSyokiACgG9JVaXyXGOGPIcf88oA49\n",
+       "1xtE+YQiyP3GHIOaHBHNUa1M5SdYG50X4UqS5zdiboyi7HwOd3zu9bmTMeDnjjI3e7Qg+P08XxS4\n",
+       "4uJKKM83VNGMznMdCuWd2r8TnyjGL+MmqrdWUpFKkiRJkiQZyONCkYpWt1gVrEojRaqUJ8fx/WoU\n",
+       "Fqw4FA+silKUG+UiX5JbxZ4Dp9bfA6sXq9qjq/zkcC8Pygv77PjsRAqYQ73cc889kvrnG+o7U4Ln\n",
+       "5LloDyKoAKvefev4vEd6ofxh9fB7IkS4Hkqat99Q6Ee16sNYeA7P5F/C6xF/BdqDeiopUvRPxjPR\n",
+       "syh/hx122EHFwM9+o6/RRvRZ+kKksnkkr/dNlBzGMn5jJd+R0hl/0Rg+88wzJfVjjXJxf9omyuoP\n",
+       "qNvUC3WPKrxt27Z1v7eZZ6RuBq6M+TuAfuR+fh4pXQv1h28T9e3KEQop/ZfdAtqz9rQCcvT55z3v\n",
+       "1FAFqPV0ghKML3Y3/LQN5oxWON/VTwS49dZbhxd2FalIJUmSJEmSDORxoUiVWFTuFM8azOrarRjP\n",
+       "W4R1gB8H0VLke3IrFouc72PdYC1HChFKBtYJ9y1FTFx77bWSegUJsKJqrdavfOUrknrrPIqgQD3w\n",
+       "DPDu31Aqt7czfgdu5WCNH3bYYZL6esSfgfLSHvz0aLxzzz137vuUf6rIqKlyrJTATwPrttWfguhQ\n",
+       "t35R1Gqj/1ACiU6kPqn/M84446AigwVPn0JNow3JZk+fjfqO++D4uX6MrSiPj0NfRs31KKuSDxJK\n",
+       "xp49eza8Twnq/utf//rc7/fu3StJWllZGXX9rQLKG5Ge9957b9P3UQZpN8YC7VPym8QHDTyfU9Tv\n",
+       "mDN4Z3j+qVoi9Zu5cGx0nCtZKGzMFZSfcUL/jfxbiRz3dxLXHVpeFCl+Tu3blYpUkiRJkiTJQB4X\n",
+       "ilTJB8mz2UagEKH8sMrG6oFvf/vbc/9GOYh8jqLzpfg9+7hYR+4/gdWBgoJVixUTZQxHGePzPD+f\n",
+       "c98f8Ky6UOsbBVgHjkftoS64IjXWmsKqpH7PPvtsSb3vDe2IiuFqRslP4ctf/rKkPk8W/YD6/JVf\n",
+       "+ZVR5d8seM4oitPx9rv88ssljT+PDfUGn8X1fBdRoMhY7Cqvny7f2od8rJLDiuszF1BWt9i9DwMW\n",
+       "PBGJ9E2y5IP/e9H4WW9EWOLXRn2gxkZnlqHMHHfccZLW+ikCcyx+q6j1fK412g8Fieu2qrj4QqGq\n",
+       "8rOWaK6E6Hnov/xs9dMFFEaHfj/1bsz27dsl9bsVnnMxwvN3EYk71RmOztRRgalIJUmSJEmSDKSb\n",
+       "OoNo1U27bvafZQ8+SZIkSZL/3KysrGg2m62bwj4VqSRJkiRJkoEszUdqZWXl4P41qljkKwRR1l58\n",
+       "Vzxj9Bvf+EZJ0i233DL3dyJv8GN45jOfKan3d/jSl74kSdqxY4ckaefOnXPlY/+WaCOyI+Pf4Gob\n",
+       "PlpEjuCDA/hVEJkQ+VE43Oe9732vpL4eazN+4/eAT0tU/5Trd37nd+buu2i4z8c//nFJ9WfU1Z5/\n",
+       "BkTtveENb5AkffKTn5TUR6fRT/GXiPI2Rb54+IPg64U/wPOf/3xJcX2+6EUvktT7ruFzBa95zWsk\n",
+       "9b5c11133brXIefQRRddtOH9pob7tN4Pnz9+ls7Sw+fs9a9//Zp7eSbqofiZbNzn/e9//9z1fez9\n",
+       "6q/+qqT+3EnPW+M+Qc7qZ1t93yi6D58mfJKY+4CxTh/Ehwc/Pua0Sy+9dO5+i8b7Cn6d+AhNxUte\n",
+       "8hJJva/WVh8Li74f78JW/1bAh+2SSy6RJP3hH/6hpH5OpX+WfMWIouNdw7uZfk40H75NF154oaTN\n",
+       "r8+IVKSSJEmSJEkGstSoPRQfrD1WrVGEQqQEEG3FqtbzG1155ZUbluO2226T1GdpRVHAGqJcKBwe\n",
+       "fcfnUKQcruflgpISV+L444+X1K/WseBLikytYuNRgiiAU52cXYLIq9pzvmqfC6LIFdqtNtIn8jdE\n",
+       "xSDXUG1mcKLl/LooZKgPX/jCFza8zqIiX1qJ8oE5z372syX1SlykSBHptrp/ou4ypqjrSJFC9cIC\n",
+       "xjJ3BYo+72O1NAaw2KPzFVELI0UqinqKxgB1G9UxdeoKQa3aWwIlAmWB52pVmFDv/fPRGYQobX6u\n",
+       "qqvEU2Vmpz25fu1pEluNoUoU+Nzi70jPERcpxLSXR9MxLrlOdKbhsklFKkmSJEmSZCBLVaSuv/76\n",
+       "Sa6DLwurYaygCy64oOr7WBNu+aNwsbrG+sQqZd+2lkVHSGLdcp8oi69njQUybmNV4gsW+fxsFpST\n",
+       "dop8kbBK+T1qgytOWLWoF+7XgnI4tZVJ/6n1YUORImcPoMzhM1XKDD6039EfUOTG+hu5ShL5B6FI\n",
+       "R8rikUceKalXH8hOLrUrRuCWuVu+0Vl8Je644w5JazOYU5ebPZZqc+a14v6r+AMyl/gpBCUihS4a\n",
+       "kzyPz2m0G/U/Vv3nOXbt2iWpz+vlGfAfr/BuYe71uQl/4tq8WKjvKMtbVflLRSpJkiRJkmQg/yky\n",
+       "m5NpujZLK5EKWPaRtYQFHe33EomDj1JEtK/fivttAPv+PL9b4ViHWPKUhzPK8EHheyXFYaxV10pk\n",
+       "nTooLzxfZO3TDpHP3VTtFVG7z49fCf0TNYF+WHtGXe1J8Q6Kz0knnTT376lOTI/6WZQd21nPf4ho\n",
+       "tNpT6aM29rHGdaM+E3H//fdL6tU9IkpRQz2qrsRZZ50lqVdeKI9HAjPHUccoMlMrUUQ8f+QjH5Ek\n",
+       "3X777ZL6jNp/+qd/uuH3ozPPpppjaF8UjVYfG/z6UFLweWOuTCVqHj9f1v1aUZiIRi1lPud6zMl+\n",
+       "LupWIRWpJEmSJEmSgfynUKRazwvCKoQowsXzMpFrxfNeRVYJVhCWPNZQq1XI9/GB8jPqUCYinyis\n",
+       "V6wqVvcoeYAVHkXEbHVoF6zv2nxcztjcKiVozxKoCfz0iKRaULacUvQlvodEdEXXGcrQfoV/ET9X\n",
+       "l+voo4+e+xt9wf0bsZT9HEWPQmIMb9u2be46EPkbAtc7/fTTJfWWOH5nrdFyKFBu+TvMWfiqUO6o\n",
+       "nDxH5E+HgkZfZGxQ935+JPUWKVKeWy36+1goB3N+q68WkeXM2fhPRueEPt6hfiPllt/j98k7MVLN\n",
+       "3TdxrALIeOFdzhw3lsfGGzJJkiRJkmQLslRFCoWklLm4FvZPfRXr+7Ge2bwE+Wr4HqtiVtNuPbHq\n",
+       "RUEi+ojft/qsYA1SX55bpeS3QXnJl0V5icqDKDJiar+KyNcrojVT+djvRzl/pqI2r5MrmEMVqciv\n",
+       "oDaijfvX+qpFeLsPjYSjPfFbcv+g1ddGaWLsoaTgs4EFzRjjc4wBxg5j1vtsqS24D9dBBR6qaGBB\n",
+       "l9qOclJXKElReUvP4ZHL8LnPfU5SP5egUEVRWYwtyhfN/cy13K92rnYYM9y3dS7juVHs0idqY6gv\n",
+       "V0r9FA1+4jMVvRPpt8yZY3dHaP+p8qZBKlJJkiRJkiQDWaoixWq0VaGIwIpxq8MVI6yFQHXZAAAg\n",
+       "AElEQVRCfu/RQ4cffrikPtIGawS/BhQg/BDc54rrYuWhhPDv1ufF14fPR5EuJf76r/9aktaccQj4\n",
+       "jwy9fi2t7VyrJKEAkuOF7xHBRZRliVofJvobn5/aWqU9sMJa/TsAtcXBT2Bq6yzC+z3jtNUnjXFE\n",
+       "tOxqhdbP03RfKZ8buGfp3rRtazZ/nhnVrHTmWInW+9P3x541WPJDJZ9SidpyMLcOVaLAM9y3jiHm\n",
+       "XsbK1Op8LVHuvK0G/d0jqD0HHXNz6V3gGemJmhzL1O2YilSSJEmSJMlAlqZI/dAP/dBgJSqKNmIf\n",
+       "1X1QiNAB7osV6tYK1hfXIWsvn+PfRMu5fwEKAuVkNc79eF4+57k0ovrAKh5qpZXyInnm8KlYVDZl\n",
+       "hxPvUQjJk9XqW1Xrg+T9oBbvjxH0Q8o/NB+UqwD0f3wHUXRq82ZhFbae4ef+SiifHglWApWAel89\n",
+       "flAymBvoy9xjaB9E2YhOP4j6DH2xts2nZqwSBWP9FFuJ/OdQGqnv2vakX9SeKgCMDVc4p9pFqZ0b\n",
+       "PUP7VoXdDsqJksTc4pG/JV9B1HTmmkVFUo8lFakkSZIkSZKBLE2R+uEf/uHB1mFk/dVmw3UrAh8X\n",
+       "V6QAS9rPZsMa9VwoWG+cJYZViKKEJc19aq0a7j82qiyyKqPzqoZC/VAfQ/M61YJ1g0KB38ZRRx0l\n",
+       "qT8bcb0or9WguNCfvD7oJ9Rjawb0qL25L9Ycvnzbt2+XtPbsu1rcLwdVZWj5UbJaFSnvtx79WgtW\n",
+       "Kd/H6pX6NkO9w5IfqwhxL7eIS5GUlGdoRmavs2X5ykw1J4yFvspcSLuUlB3aoVVRI0rSdyWY21p9\n",
+       "1pyovChgPNdmKYFjYU7gHcnz8W/eiVDqx7Qb7V3rv7rZpCKVJEmSJEkykKUpUg8//PBgK6fVgl1t\n",
+       "sUq9lccqmdU+q2VXttynCmUF68gjCbA+eT6+x2raV9esyj06z8vL6n5o/h3gOVE2vH6mgudfdBQg\n",
+       "YC2eeuqpktbWZ+3+uvvVOLRXa0Z9iKxL+oWfE0d/oh/WKlLUx2mnnTb3e/pRq28XDP0e/Zh+wU+s\n",
+       "/Vq1BYUYJXj1PIJCQdvT9/jOUDhzrXWs8CxT+bdt9agtotvo40PPzKNNvU/Qd5nDqNfa3Q1yAtbC\n",
+       "fRmTHgW6KJireE7qoVUF3myoLxQ15l7GDbsGJ554oqR+ziv1E8YBavhWIxWpJEmSJEmSgSxNkXro\n",
+       "oYcOnlu0aErKgVuNvt/OPji+NW4lesZnrFd8XFAQOPfKz7grwf3uueeeuXK24idoY+VMlVk+YrOy\n",
+       "AbtPFnmlUA5rc/jwuakinhyPXMGKf+Yznympb2d+/8IXvlCSdN9990mS9u3bJ2ltFBztyXlzWN/k\n",
+       "RXOGKsKtkU9E8mClAv2RemYcRv4uqAEobXxvdbZjH+uMRfzkUIGpyxL0IVROr7OhfplT4TnsXAFo\n",
+       "JbL4Ud1LcykKQ6sfn8NzefSXZ6hnTqVd/Lnpc5GPTi1E7zHHtPoVDgXfPp57sxSpoeeN0v6MN+r9\n",
+       "jjvukLR2Lq7192XMbxVfPScVqSRJkiRJkoEsNbP5ZuWEcGUByx2fE6wUVtPs72MNHXnkkXOfx/8C\n",
+       "a/Wkk06auz4+JB4dNlTh4ARyyj30nK6f/umfHvS9xwpYN1/72tck9dY5ClykpHgkSKviMpYzzzxT\n",
+       "Um+F8xwoNNdee60k6Yorrpj7HuqBK2233367pF6NQcF81rOeNaqcWMdu9VMOyu1qBKqBR22iOLnC\n",
+       "Gvm7MH74/N69e+eu+6IXveigvxVjlUhN2hhlpZT/ic+jRhO9NVQNnsqnhrkIxYBn9+z0nMtJmzAn\n",
+       "lXxtPKcdlJQo1MUDBw5IGq/AkbWe0whoB/qG93nmcPom5SHilbl4qF8jbLYiQvsuSh2PGJpLkLFO\n",
+       "/Xs+N/A5gnHL+ZmcwuH+pEP9MxdNKlJJkiRJkiQDWaoi5aAIsYqttWrIaM2ql4zWEb7KxZqLzhyL\n",
+       "9vs54Zx932OOOUZSb816uXg+9osj+Pxhhx0mqd9v9pPkoTbrMNYCn0fxqI2cWBbkT6I/0D+oZ/wH\n",
+       "sBb5PdYN30M5cavb1YLTTz997vd8nvriJ/WFtUg7uUKD9Uz5jj322Ln7YcXzXPiscR1+4idA+2HF\n",
+       "uXXO52lvf176Jb5T3J/+gxXs9eV+Ifhg+ZmZrlhFmdlL45v7ez1vpCrgV+aZrDkfkzrctm3b3N+x\n",
+       "dHkWno02RI1zn46dO3dK0kF/T/cloi4i/0jakO9Rh9QZyhJwn5LCxVyGGks5hipFKGHuv0Z90SeZ\n",
+       "E6kv5hbmZH7PGPVdCfwEUfmZe3neKFIaBcvzRVHvnt8IjjvuuLnno33oD4xJrhep1dQzcwPPN9RX\n",
+       "DcWR/sq/eZ5FKzNDd4vwJaSdKCcKMO80dnNoX/w6gbGOkrjo/FEeQR/1s4hUpJIkSZIkSQbSLSMv\n",
+       "Sdd1s5WVlU2/b5IkSZIkSSsrKyuazWbrOo+lIpUkSZIkSTKQpflIvf3tbz+4nwrso+PPUAv77vhU\n",
+       "sL/7hje8QZL0J3/yJ5J6Xw/8HNj/Zp8f/wH2w0vZcvFlIjrqda97naRHVq6rYX+bvD/kJImu5yoh\n",
+       "/gf4CrF/e9FFF0mS3ve+90lafBQkz7VZaqLfj3b2DPTuA4SfDPVFxBX+J/gZeHtQn5v9fG9/+9vn\n",
+       "ykf74x/h7Ypfjp+sjv8Kz+s+c8tqv0984hOS4jxe+LJ5RneH/FvR+FlZWal+NuaAUjQUbcLcQJu8\n",
+       "9a1vPXjPzYD7vOMd75BUnpvo4z//8z8vSbrqqqskxXWMrxA+Tv58iz7jb9lzC+Dnii8PPmn33nvv\n",
+       "3OeYY/B7jKId8f152cteNnc/fIbof94PX/nKV0rqI5Ddf5e5EF8++gP+la31ecEFF0iSLr/88qrP\n",
+       "O9H9asfZ0Pv9xV/8haQ+ys/BF8vnHuqPesNHKopmLdVjKlJJkiRJkiQDWZoi9bSnPU3nn3++pF6J\n",
+       "8sgOj3hg1YgChBXAqp8opK985Stz34usMK6HIlWrRAFKUSnaDUWBn1i1HgESWXuUCyuIn379oaDM\n",
+       "jD3JHEonsQ+F+n7qU58qqW9/twaxEok+8/xFKFooVmOjFXft2jX3b/Ib1YL1S7+m3ogE4npYSx4d\n",
+       "iDWG9cd1yN6NOrEsShnlS0oURErUehB5iSWP6kUdkeeGqBxUPeoeJYo6p45POOGE6jIsAuYO5ooo\n",
+       "r9GrX/1qSX3dl+rYFReHOZL7+2kOU0HfpZ0Y24wF5jpvJ37SriiIHn1VygzOXMEYi6IcPYI4wjPP\n",
+       "QykfFX8nh6ArUvydfsnzOyhWpdM0xp5FGVGrRKHQsXsTKXxO9Dnqg+dHpWfc+NqC35fyy0WkIpUk\n",
+       "SZIkSTKQpSlS3/nOd/Tnf/7nknplqOQbxWqV1SSrRvLToGi50hOtMslNgnXF50qw2sUaqf0eUD5y\n",
+       "rHA9rK5ov/fb3/72oPuVmEqJgqmVKMBKxXqK8gmhiEVWGMoGis7Q86tQTp/3vOdJ6q2vu+66S1J9\n",
+       "DhmUMfoDVjdWElYt5bzuuusk9dY7VjT5obgemey9PdwnCcVq0WcikmXcM/4D/d99Faln/HRK+dKk\n",
+       "tYqJn7WGhU8bYbli4XIP6mSzTmEowRzIHMDc50rNpz71KUm9Je7wvFyvdFpCqW/g78o5kNCao87V\n",
+       "YxQwFCXaizmAXQjyETF3uk9X7f0ZU14e+iB9M8qt5qw+B7IFxvi5554rqffdeuCBByT1YwmfqEg5\n",
+       "KylRvHv37NlTVS73GZwK3s2tSlA0LvGN4txS8pNFz0kesW9961tN94dUpJIkSZIkSQay1MzmWBG1\n",
+       "+7NYI644YeGyOndKq1yui9Xj+/RYt1juroTgP1ALVoqflM31+TsRNM7UClKJE088cdT3sXr8/KVW\n",
+       "aEcUpejcM6zHSOHEykUBGZo1l35z2WWXSer7S6REYc2hhEHk40V9UU7PuuzZjYm8oj+ikLki5dbk\n",
+       "Zp1kHylRQLnI/M5zEHHWAs9MHbivDG2Ahc+cgoWLWs33+XdtpmMHBQn/RuaU1rFAn6YvcV1v40iJ\n",
+       "AspRUvdKCgQZyIlY/vjHPy6p9+tr9T9kDJF5/v7775fUz4WMCcrF9d1fkHcKn3d1N8Kz51OvKFzM\n",
+       "zbXRizXq6Xowh+DvS6TuU57ylLnr+juPuaIW3kGl/jKURUXtgWf4Z87wkwVQYF/+8pdLkl784hdL\n",
+       "kt797ndLkvbv37/u9WvrMxWpJEmSJEmSgSxNkfrxH//xg5YnCgNKgu+TQ+T7RBQX+/i11oL7GmF9\n",
+       "YOVgxXK9aFXdul/MffgeJ5zXKjaueLBqjnyG3BrGqsEqLvlH4HMzFPL/UI9DI12wNrBC8O9wX6FS\n",
+       "lBhWGM/FdYDfu58F555xfeo7UkId2smjLvFV4n70c+qh1j+HfoXChZXJeW7gStZmK5wRtAP12RKl\n",
+       "57hPCW3ovi20KUoPygdjBKUDlXDo2WkeZTUU96dz1Zoz5/y8yS996UuS+r7AXOM+IT72SooUc8vJ\n",
+       "J58897M1ctXLjyLlfnF+ziZzAWMq8qNjDvKzEh1UU8/NRn3T/pEa7rT6X3IfovUY01yHOQQfoNtv\n",
+       "v33u+7R3LbRrKVptqB9lSZHivuQ7u/baayXVR4V6P4jahfxYvEPYBfrmN7+54fU2OtdzNalIJUmS\n",
+       "JEmSDGRpitTDDz98cNWPdYQi5VYQViF/Z5WIvwKrZVbjJUUCuD9Wmp9I7deJrIuh0U5Y3EQMYDWX\n",
+       "TvZ2q7i0asbKYPVPPWJ9skpnNc+qnPrxHCat1OYJArcmsdJQHrES/UR2+k3JmuG52Tf3fXBXIbgu\n",
+       "1qFHjdZCP6W/e3n4O+3A/dwPIIL+irVFFF+UY2azod/RXu7XgwqE9eoqCO1dU++0EddCyUBt5tqU\n",
+       "yTNFY0nTFozxReVPqqWU3R0Lm7kRlY8+h/IW+S5F+XUiyPyN8heps0SQMpZdCQB8VSKfFWCOop1Q\n",
+       "1aPyUl8llZN6o28ytzLnMDe4UhTRqkjRLtdff/265cK/E8XV8XeRK038m+hV2i+qN96NjM3Wd11t\n",
+       "vqzdu3dLalfHfW6McjTyLmM8R5nK8a2rnXMhFakkSZIkSZKBLE2Reuihhw7uR/tqGWsRS519c6wC\n",
+       "LG8sVDI4Y63VRiBwH1av+NpgnUbKlvvODM2bhLXMqp9/l/wShioM1BM/I3g+fkYKme8nLwraA+ua\n",
+       "56fd3NouEZ1bBSiSHk03NOstYM1FGe393C3uH/kMOvjLuNW5bB8o2qvkX4QKgbX97Gc/W1Kfm6jW\n",
+       "X0Hq5wpXKxlTKAVY5LQp96Dvo1ZSp2PPmuO6WPilPD/R93k+xiD/5jl4vhtuuGHd62Chn3322ZKk\n",
+       "G2+8UdLaOQcLP4rsZCy9613vkhQrTaittVGPpdMRmHOmnnsYc9Qj9eG5zVDHUeB8TEeUziyM/k57\n",
+       "8pN3nOfpcgWMqEp84fA9Y47hHVc6VYOIYJTHqfNI1c5xjj+/t4NH1JeiKGvb0UlFKkmSJEmSZCBL\n",
+       "zSPFqph9bl8N+r9RbrCS8L1h3xslwVfLWEFEP/nf2TfF/6GkaGHpo9SMzTSONYiywnNiBbolvui8\n",
+       "P17vrsxgLW6W7w31wv417YdChRLl1kakIGFFHjhwQNJaVSCKMMFaHApWl/cX+iX9GOXGoxRrcQVx\n",
+       "qII2Fa31RnkZ39QbVnPkB7HePfkMfYVr06ex4L3vuIWO5evqZS3MGSgZEN0/gjkOlZHnoq/Ql73P\n",
+       "uHrM50t5cmrLxfWjCGLao1bJYM6ujY4bS23Gbuo9Upcd7/v0qygKznPMRSo7Y4N+CV4e5jLah88z\n",
+       "hk455RRJa6P/gPpgTqLcter/oimdl8p4r1Wahs6VqUglSZIkSZIMZKmKFNYR++Bu7aG8sNokVwhW\n",
+       "D99HscDKcl8MPleyNvAxiawwlBj3OWF1PxaUKM6P4mw9Z2guG6CeUEJaFS6e18+RWlQWW3zlsOpR\n",
+       "KKivyGrFWsEPBtx/xiN56IfUD88zVpGi3r3/UP/0O8+iTXlq253nY9ws2oetRGt/QDGMIsBqrEss\n",
+       "feoWSxPVmb9727oS5RnOh/Zt9xPjuq1jGUXKiaKpGCOuIDBmyNszFhSPc845R1Lvh0neLNoBxYW+\n",
+       "HUW1jZ3jWqH9mdt4HvqavzvoB7x7ojk0OjUhUj54blemHO7n7er383J6zkCiKfHZczWb73O6gI+9\n",
+       "ZUcEl/pJNFdE49198mrn/FSkkiRJkiRJBrI0ReoJT3jCwdUzq0qsJ1aBWNQoElgvfA/rklVnZC3W\n",
+       "7o9idbBKdyUDHx23osbuF3M/Mr3j9/Dggw+u+/mxkUMoetQr5XcrCWvDT0Rn1T40wqEVnhe/CxQd\n",
+       "slLT7iiKPEeUEZw8S2QP5jwvQPly3zCuRz9ojYYjqhQ/A6CeaXfql+eIlNbSffDFcj+KrQqKJtbz\n",
+       "GFWCtqNvlKKCIjWVPo6CNNQ/0cvDs7VGP7Wqk6W5iT7t0YCt3HvvvZL6evaM6yhQ7uMVsdkqKvVK\n",
+       "PZTyJTGH0n7R510hYqxHihT9jTHLLkwplx9zis95Xi7eXdQ/ShTRfFGEts/1rf1wUeArxviln0UR\n",
+       "vrQHP0s+gLX+z6lIJUmSJEmSDGSpmc19NYg147/HukEJwEL3fEhDc1EAq+vIKvMow6l8gVjtu69P\n",
+       "ZP3WWrFRLhaui9UXWUfcx8+nipSoRZ3wjVXm5ylhVWGFuQIT1R85Vcim68oj98OqIdcKChh+FH5O\n",
+       "WQR50E477bR1y4V1h1WJVYxCi3LI87tS5jBOuK5HirVS8gOZCvoP9e+KcwvUIZYnY4C2Q/3zscfn\n",
+       "8VHBYqcPnHjiic1lkfoxxpisPT/RoY9PrQSg+g7ND+R5jpxbbrllWMEmJvLpoT9QD1EUGDAWSnOe\n",
+       "z72l+uXv5GtCXYanPe1pknplj34QKWJEoDMn+DsGP8TWsb1sJQr8nN1SrjnWDlF9+VxX7Zda9akk\n",
+       "SZIkSZJkDUuN2vPcJq5EsQ/N51hFuo+Q5xeK9tex+LHUUS74Pfv8Hi2EdXv00UdL6pUvrBZ8aoaC\n",
+       "0nHYYYdJ6q3XSGGrzWXD6hqrm+fgPoAfRZRFuPWsvKm566675v6N1UF7uZUV5bJx8DtwKxq1YPv2\n",
+       "7ZKknTt3Sur7IXm/ajn55JMl9daQKyzUP+XwiK5SdKJz6623rvv76HypEotWovz8OFcIUZHwbaO+\n",
+       "qE/G/2r4LMoSSoTnJOOeWO5kVSeTs6vQYy3xoUoUtGR330xqfXlqKUX1odKiJDFmeEfQvmT2pk9F\n",
+       "ihTqZxRl53AqR4naTO4Oc3F0tiNjJopIBvpvNHcse24fS20eNsY1c0c0p9E/Wue8VKSSJEmSJEkG\n",
+       "sjRF6id+4icO7m/iA4VChA8IVgVKCkqN+/Twd1bxrqywGiU6DqvQM4lj+fr+9KGHHiqpV6CwEvxE\n",
+       "bvDzf7ycXBdlyfM6RVaCW2ElojPdyH7M/YaeFbhZoDTir0J+KKwvrAeep2S10w+4jvscUW/0P9qT\n",
+       "ctRarUB/cYXRlUxXUvn8WN+/VlBxeH6stCivk7OeQiT1UZL4+WAlY1V6ziPqmYgirE7GP+NutQ8f\n",
+       "dcoYI2+O+z9SRr7L5/k+90BhYeygRjKnOMwp3Ae1+5BDDpHU9zV8rnhmzzM1NDKU66E633PPPZL6\n",
+       "OYb710ZGou5yXRQf5i7aIFKOmPOISGZsUa/R/T03IPXhudWoZ8YYY4jnp57xKSrlZ2Iu5Lr85H70\n",
+       "Qdqr5CPlf+f+0WkRnlMRpY9+QP1F0WRbfS6fmkhpo996xLPnDGRcRpHrtaQilSRJkiRJMpBubE6i\n",
+       "QTftutlQf40kSZIkSZLNZGVlRbPZbN1U56lIJUmSJEmSDGRpPlIrKysHo9WIPBi6v4vvEPvW7Jui\n",
+       "epXUryOPPFJS74txxRVXzP0dPwd8cSK/hdr7RdSeQO73e+c73ymp34/H94d64bwrIK8RUW+uSuIn\n",
+       "wn4y7VL7fEcccYSk3tfL951r8xKNrc9S5E90v8suu0xS3874ReCXgJ+I++bVQvu89rWvnbtvBL6E\n",
+       "/Ny7d2/T/YD7fPCDH5S0tl6i86daod+96U1vmrvv2OtHedH4/aWXXjq4rzglH6WxfbOV0v2mipoj\n",
+       "MvllL3uZJOnOO++U1PuQnHDCCZL6eiGXH36k+I4x9okgZQ7lc/wbf098WpjLGGulaCzwvhX1FYjq\n",
+       "k7mPdxPl2LNnz7rXwZeJn/g+eUR6aexNDff7/d//fUn175Sh/cjrkzmO9uPdRP3iuxbB3I3fK3MK\n",
+       "/eSXf/mXJUkf+tCHJK31+cNHjn5a8u/k3ct9Pc9UaZynIpUkSZIkSTKQpeaRqo0CKlFrtUQQ2cOq\n",
+       "2fPalM5dGgurdJS01uzCHhlSivb65je/ueH1SnmlIrACKU8UAUHUHNF1HsHiJ24TeRFFaESZ5rEm\n",
+       "W60/rkO5aA+PMBqqrPjZhSWw3qPnQEWgnH7CuxO161T+ktF4HHt9j6hqyfnSGgXHHNAaNbcsaiN5\n",
+       "S6AYwTXXXCOpb9Mbbrhh3e/dfvvtVdeP1FQsfsayzwElXJFq/T7RdMzFlKN01ppn46ePehQmLFqJ\n",
+       "cpgTqA+iLKO5eao8YERJUn8oc7Xnfno9MfejSEGUgZ7dllr8zETKXRvFt9SF1FaBwcfCLgpvH3pQ\n",
+       "I99DnmSBhvzJZF3baFwnYmwILGkGGFS1kxL3LSWro9MyCFyG9xduKYkhLxEfVB7qWgvtUtpKqz0q\n",
+       "yBeCUZK9CJ7ft2iBeistoGDoMSBbBRaWhIKvXkh5m9CGvECoI09pQts897nPlbR2rNOGQ40/krse\n",
+       "OHBgTZnXgxcxiUFLUH6OEKlNGAnMKT73scVVMr5aiVJkRCluSvicx1YPC6PSAgGjif7DnFJKWEk7\n",
+       "+ruD+377299e93ulrcepwGhjAUPi2Vp+6qd+SlKfMqcV2pH+7As6T9ETpQ7y6y2a1vvk1l6SJEmS\n",
+       "JMlAtqQihcV5zjnnSOqtSBJ3RkfAgFs77izO35EfHawZv8/QZHl8j1UuVsLP/MzPSOqtZxwzS8pC\n",
+       "7bEDrc7r4FZL7ZE0JUjq12rVl6z3sQqLHxvB1mZpK6r2kGa3brh+LZESBX6odAnac6x1F1nVUX9B\n",
+       "1fEjcFpBPVhPKfW+wBaB92m2QxmLzAUESlx11VWS+rbCgvY5gb7DdSK18cEHH5RUf/QExwrVuhWw\n",
+       "Tc7c1Qpbd1deeaUk6YwzzpC0uIPIh26ZeqLT6ADv0nFhEDmp0660WwTq/fe//31Ja+fmqP42K3Em\n",
+       "5UNxbFWkhipR0e4NSjBBC7QfW8alw6K3ymHJTipSSZIkSZIkA1mqIoWFijXk1iT72uxXR6t4D0F1\n",
+       "RQqrgNUsfhORIlWylvh76z43Vg8KAv4S/D6yZvk89ykpCThOYkUPtSpgqOJD/VAed34fegyGM/b7\n",
+       "/nyt/iUR9NupDv1FbSBNB34rHGlUy1QH30b9PnJ8rj10me+jGu3fv3/dz62nGPrvonvedNNNknqf\n",
+       "IhSO3bt3S1qrCvvhsEDfKfm9tSo7tc7bfv3IF8iVCdT9Eq3+fK20OsmjIDGnoAQyx6FAMuZKxyt5\n",
+       "f+F6hOczt7hytWvXrrnPf/nLX567/9g5dyp4B9A/okObS1DfvHsiv1V8nLxecRLnOnyOfkk77du3\n",
+       "b8P7b9UjcFKRSpIkSZIkGchSFanIWnQPfw+rd1Ca/PPg4djsZ49l6OoYK6+2HOy7oySUFBisMbc+\n",
+       "sJ5aD8Et+RlEUD9+P/xQIkVwKLWJPjeLUrRhK/5cUeRLiUVHDLWWx6HeIiVqCpgzIjUPixnVdKzC\n",
+       "QDTXovumH5pM9NhLX/pSSX0UWaRIeUQwKv9UKqbTegA4CmCtotaaBJa52d8Z1Cv9gWi2yIeKdxWp\n",
+       "XpYF/ZgxOTTyHF8m0jygJLn/JrsnPgeg1KH00j9J6IpvFFGi7kdL+03lIzXVbgikIpUkSZIkSTKQ\n",
+       "pSpSUR4e/BXYF8VCrY1C86ghV6qwgsYeWcGqtlV5aL2fK3euEKE0sapn9e8RPJudDC4C/4OhSlcE\n",
+       "PnfUR6vv0GOFsTl9Fu1nMNQPYyuAIkN0EXmE8OUZGum46MSeqJP4fOHTxX1Rf/lchCsxrT5MrZHC\n",
+       "PkdNrZa2zrXUF8oH/eG0006T1D8fc1ikSDHX1kZYLwreCeTU83ct78Df+I3fkNTPLdddd92G10M5\n",
+       "ckWKdyvtyHhBoUIRjaItUbpckaLcrRHKEVMlH4ZUpJIkSZIkSQayNEXqSU960pq8QqyOsWawDviJ\n",
+       "dRD5VrH6daWDdO+eUXvsqpTVc6uy0rpP7dYZygvwHPwe68NX/WN9V4AIjKF+E1MrUYAVuCh/jqHg\n",
+       "p7LoCKitgo/Pqf0RpgT1mzFJ3+YZFn081FRQx54ZHAv+rrvuktSuMLUqcK0Rvt4nlh2V5bsLKHQo\n",
+       "k5SPf5fm8KF5vaYCZTI65QHliEN+UX4iRYr6iA4d5l3LO5Z/+7FbDgqwHwEDzO1THYU0tf9qKlJJ\n",
+       "kiRJkiQDWZoi9WM/9mMHrUFWmayOWfWSVdj3XSNY/XpkDN+bOisq92mNPGk9UBPlC2sPJQ+wejbL\n",
+       "N4UDXbHOavMDLZqpFLda6G9Y/dFhvVNZpWN9+mBoxvuhML4XpUiV5oWNQOXmJ326VRlhDnCLfCgo\n",
+       "TLVRflj6bvHTV4iWOuSQQza8jh+oPbUviePKQOQ3O7Tvcz3aZ6gSEZ2ZF0GfnzoyuRV8tCJFijmA\n",
+       "Q37pd5Q/qq8o4pwoQRQklF7u43M0ShS+fVF98Ryt787NojgDdV33Z13X/X3Xdfes+t1K13Xf67ru\n",
+       "zkf/+9lVf3tr13UHuq57oOu65y6q4EmSJEmSJMumRpH6/yT9T0l/sep3M0kfms1mH1r9wa7rtkt6\n",
+       "qaTtkg6RdHXXdUfPZrM15t2//du/HbQ6sIKwWP0srtIJ3igkWKbui4JVxyqb6xPBMtRSxsppVUKi\n",
+       "Vb5HrPj+MvmY3CrDJ2izFCmUF9qFco71AcLqmDq6cGj+rBL4j5BTJlKkpvLZms3lJe0AACAASURB\n",
+       "VEodYBwsKorTFaLIL2IqxvjV0DcYY7QpPiM+59DWURnGnk3nFv0YtU3q5zbmupKysmPHjrl/L9rH\n",
+       "xyOsPQ8WczWKIQpT7Zg6/PDDJfXtfMstt2z4eXx0UD687x555JGS+ndKdAqC764sC3Z9ShB997zn\n",
+       "PU9Sny8qytfFHOLPh/Lkp3HwjqR9+Tt+vShh99xzj9aD64wdD4uiWKrZbPYNSev12vU0thdL+vRs\n",
+       "NvuP2Wz2HUn/S9Lpo0qYJEmSJEmyRRnjI/WbXdf9P5Jul3TJbDb7P5L+m6S9qz7zPT2iTK1h9UoZ\n",
+       "RYPVP6tc/AxYjeIz5UT7v4BVwz35yXVrs+QCq2oUmdK5WChXpWg1t6w9AgfcCsB6mjoSIQIFEAUJ\n",
+       "62SoIoWVgRU+Va4QrFj616Io+d5N7RtU8l8oMXV2bax0rHPOAgT3xcLajRTmMdDmtUoA+XBQgkoR\n",
+       "vZ7hHIsaP0H6Lud58vmSPxoW+bOf/ey58rfmDIv6Bkqa5+dxPO/R1H6lDrkCgTkVP1CiJv2cUvpw\n",
+       "bTvzbonagTm0pHQxV9FeP/mTPylprW/PspUoiOZk3/3AR4qxW9rdiPxiqQfeye7bhjJKxnfakc9F\n",
+       "192KEb+rGaqTfVTSYZJOkvR3kj64wWcX662YJEmSJEmyJAaZ/rPZ7ODyu+u6P5X0pUf/+b8lHbrq\n",
+       "o0999Hdr+O53vxvmonBYpbKf3mqJ8z32ybEiPCtrLShMWGslHylXojyvju/L8/vouq5Q4QfAuUVT\n",
+       "45E8WIluJaD0tebecSXO/UyI6KAdazOWozZMdbZixGbnGhqrPB533HGSpO9973uSeisfxbc1dxDW\n",
+       "Pu3oUaVOSYkisoxcPS20KgEoFMxFKDo333xz1ff5PJY8ai1Ky7XXXispfhbmIqKXKAf+fKUz/piL\n",
+       "UMmjORJlgrGM+utzzI033ihJOumkkyT1bbmoHGg+h/D8jFnmRvoo7VXbzrXRdrX+h63Re5uN+5xF\n",
+       "/c7nXJSkj33sY6Pu77tDUb1u375dUj/XbFYEcS1Pe9rT9NBDDx3s91FeLRikSHVdt9rj8ucl4SH2\n",
+       "RUkXdl33xK7rDpN0lKRbh9wjSZIkSZJkGax2CTn//PM3/GxRkeq67tOSzpX0X7uu+66k/1fSeV3X\n",
+       "naRHtu0elPQaSZrNZvd1XfdZSfdJ+oGk186CJekTnvCEgwUt7X96ThaUm7PPPltSvwpnNew+T9wH\n",
+       "6+tb3/qWpOEnuqNEDVW0gOfAOsDKal2do3gdccQRknprjutH1lt0rhV+Jli7rkjRXuxzc3+UI/wc\n",
+       "sHpRIGgnj0xy69l9mvDboN0crFbKQ/mwpiMfOp7Pzx+rPe+Lz2EtU98eodLqkxTdn+tRXtq1NXM4\n",
+       "/cvPoERN8SzO1D/1i0JM/VG/njOmFZ5jbA6mFrDE8XNErat9Bvo+qhxlJ29TpAjQhvguMReV1DpU\n",
+       "VnyduD8/vQ8wFvk96vUdd9yx7vV9rOD/Rl+jb9NWjGHGLNM9v/ecZfiU0efcR4rP8XPoHP14xX3a\n",
+       "6C+0X6REDqWUadzb38/cY/eB3zMuxpaPfs54bI3YjqIxI4oLqdls9j/W+fWfbfD5d0t6d1MpkiRJ\n",
+       "kiRJHoN0i85cu+5Nu262srKy6fdNkiRJkiRpZWVlRbPZbN3U6lszu1WSJEmSJMljgKWdtbeysnLQ\n",
+       "JyPyYSn5fnjmb4/2QvX60IceScDuvhf41rB/zH4vKl2trwb7wG9729skSQ8++KCk3j+CyAT8H/Ct\n",
+       "OvrooyX1vl5Ef1Ef+MRQLnxVvvrVr0qSLr744rnnXDTcp/V+0flZzvHHHy+pjx57zWtes+H92Acf\n",
+       "66sG3OeP/uiPJPX+M54raCzUx+/93u/N3beW0rlj+LW4ryD3ufLKKyXFWZ7PO+88Sb1fAbmMGI/k\n",
+       "SKI/e4Z0ItAuuuiiufu2go9blD8OGB+/9Vu/pcsvv1ySdPfdd0vq6+icc86RJL3nPe+RJL3+9a+X\n",
+       "JO3bt0+SdOGFF0rqMztTZnyR8P/Cp+dVr3qVJOmDH3wk80spS/wv/MIvSJKuv/56SWvzDhG9hz+d\n",
+       "5zPysUfUk/ua1BKdIrC6Llffzxnq/+f43EmUI9fHR4qf1AsZ2M866yxJ0l133SWpb0/eLfht8g7B\n",
+       "d+2SSy6R1D9fNKaiCFLai/5Ae3o7MHf4WKA/1ebpOuaYYyRJBw4ckBT78PFOe8Mb3jB3v4idO3dK\n",
+       "6n2C/F1c67PU+m6gn9F+N9xwg6T+Xcm7AP9Zn+P9fvjx0h9L+eBoN/7O8/nZjPz9N3/zNzd8nlSk\n",
+       "kiRJkiRJBrI0RepJT3rSwdUwESTkbCDSgPOuohwOP/dzPyeptwLIzorVAZGy5FbfUMXBrQOUJqKe\n",
+       "sNw9nxS/p3ysxnmepzzlKZJ664VVfGtU07nnniupX2Xv3r177u9Rdt6pqLWWUT48u7JDeRd1hptH\n",
+       "otAvhp5A70T1Ucr4jQKHFRtFNHn/d0oRKfv375e09pwurPooehJq88NF0N+x5kuK1GqrnrI7KEG/\n",
+       "/uu/LqlXrIB7XHPNNZJ6JcozpbsyUZvPiJxdHpFKJnMs8I9+9KNV1zvjjDMktZ/KAMw13qdrc4gx\n",
+       "lzAWaTPKg9LEHBZFL/rcydzGXBnlxkOB4qdDffOzRDSmUUW9/JST/sFc5FFq0Vxdq0ShnDAnlaJJ\n",
+       "S3OT5xsjahQV+UtfeiQl5KmnniqpV/7YBfnbv/3buevRvsB1mTuiOQqlifaJlEAUMZSm6B1FJDhz\n",
+       "NYokiqvXN6o6awDKQ/1w/9rowVSkkiRJkiRJBrI0Reqf//mfD/pqOFhtpZOrsQJYNQ61zkpE/gQR\n",
+       "vj+LleKrbqzZz33uc3O/57nJAcPqGSWj9rR79stR+li1A9YDChrW6Gbm8VmPUj1jdZROAi/52GBN\n",
+       "eeZzrou1Rj3RXrXKHdfHmiopK1j37Pe7akK7kEU7svbob5HyVVLysN48v5Zf3zP8l6A/RueZkcOJ\n",
+       "v9Mf3R8HXy1UHPxGavA6BVRaV0NLilOtOrl37yNHkP7sz/6sJOm5z32uJOllL3uZpF7lc9Uaxchx\n",
+       "paJ0jqeD2sdcizJQO7e43ypjhlxjxx57rKR+7qnNUs916OP4rNxzzyM5nzc7yjxS6Kg/z09Um38M\n",
+       "9ZkxHD0XiSBr322lXHIoaPQX+jcK1Atf+EJJvUJDv3UlijHo56IyRumXpXNBo1Mq6DfsSjEHUO+O\n",
+       "+1DxDo3ODIzmAdYSkdIZkYpUkiRJkiTJQJamSG0EVkDJgofIcgbPpN1q1fA9jz5j1euRK6y+US54\n",
+       "Hj8HKYLrUk6s1ZJC4NYQlj0/UZ5Y3ROh5M+D8jGVT9DU1O5bl/pPZG3WRgGWIm9az/jDHyTqJ1i/\n",
+       "pSy9JasYpS3ylaJ+sQpRLagv/HqIGMOfqJQRPlKiwP1sUF1QiVDSsLqjqMMxRGoodVbqe6U6QIVH\n",
+       "mUKRuOKKK9b9fJQ5mr499JxHFIVI8aqFvu9+cfSZKCIbXNFgzLQqWRHM/a0qO+pppOrWnj2IyuyU\n",
+       "lChgtyJSVlCFUZhKUZSMQcYU/RVlCB/Dkm8ZqrnPodQX9cN4KY0Lf7dyf547Ot8VPLM+uwFcz/tn\n",
+       "7TuktLaAVKSSJEmSJEkGslRFyi1d4N+l6CJW36x2o7xTU/n8uHUSWYNY1jwHCkLJOgNW5zwX5ef5\n",
+       "nNp9eT+5nHxXkbUQWUtRuzlT5ZqJGGptQtR+tfmLaGes19I5aSWFj/41Nl8VvnUoot6ukXUbEY1P\n",
+       "r/da/5oI+glWPPUa+TOsR23fbKX2zLfaOkCZQgWOlKfoOVABh0IfLKmEJfC58b7gc43jczb4eZUO\n",
+       "eaFQVIj0dZh78C+9//77Ja0de9FYp15QehhTtUoURHNMq8ofzfGtvnGMDxRW6qf1bDveaZEfc200\n",
+       "K/i7leu72h8phPRD5k4UKN4RqNv4WFFv0e4DilatYpuKVJIkSZIkyUCWpkg9/elPP7jqIxstFimr\n",
+       "yNJqm8+z2iQXRkkZcIaeOB0pLvhIUQ5+RpELDlaMn4ReqyTUZuJmP5ysuZ4zJbKaaq39RSlRsKjo\n",
+       "Qqxerh9F6HjumAjqF2s2Kndk3beClY+i49cb6xdD3rdWqxMi1YhxgnIb5aNCzUC5XT1PRJF+EPkT\n",
+       "DsV9VFohGo3M1/jJobRE5YzU6Vpq57qovlDQSj4k5D6jbWkfFCXPq0Wf5XPMQe67xpjiHfKNb3xD\n",
+       "Uv9cRA2SDwglwxW40lxGOY844ghJ/dxcW39Rvxj6zhkK7chP9+NlLOH7xLvAxySw64OyBcwt9M+h\n",
+       "cwTjivaln0R+pz43M+55DvprKR8VeD2VSEUqSZIkSZJkIEtTpI466qiDq2BWtaxysUqwKrHk2S9l\n",
+       "lcr+rispredOYRV4RnLP3eE+LpTPc8+gBFEOVtO+uuX7KCDs16J08G+u5xEu4FYj9Vnra8PzbNUo\n",
+       "vRJTlxt/CDKEl/JVldQNz64bfR4rbCy0O0qaK1Ilfx/6GfUQZfM+5ZRTJPURNrX5tSIVgHGCisF1\n",
+       "3apnfG6krEVq6FRKFIxV95jb8DVhTot8fyCKBquFPlGKJI7qy+fACPIloRjQ9lzXFSl2Fyif52DD\n",
+       "b5bM1Sgf7EbQd4866ihJ/ZhiDPscUcq75Bnfp1KQNluJor55BzGGmAt411EuFLjSnORqPMoP7Uj7\n",
+       "tT4vY552Z1zQ7k6Ur4p3MIqa96sI+kX6SCVJkiRJkiyYpSlSs9lsTXQaq0lWjx7RgvKELwnWEN9n\n",
+       "1R1ZvKwuo8zPRJpgiXtUmFszlNdXwSgCrPpR2Pg3++ysiska6xE/vmqOctxQjzx/bR4kwFqY2lrf\n",
+       "LGinqaxG9ycoKVL+Pa/H2lw4WO/0c/pjq9KGYlTKcxbBuMAqJIO4qyRYnZxxORbGPdmxUZ49qzJs\n",
+       "llW/EWMVKcY8/mC1kb1j/ehQR8f6WkXnYqKYMTd5eemj3rdRIFDloxxtRBx7BmveDbQLfRnlpTUi\n",
+       "FsVwbHTjokGpcYWNdyRzEvVNu/s7EJ8y6jXKPA4+BnlX0y+4Pu88n3toJz7v/YTvMU7w4XKYi/z6\n",
+       "rf7Sft/a8ZGKVJIkSZIkyUCWpkg9/PDDB/dfWfV5pmgsUlbHWClEPGC9oUiw2o5yufD3kg9Va64Q\n",
+       "h/J7niuUBs8tEkV2YEXwvJFi5IpJawQROWnwTXms+kpNpVDQr2gnrF38Oagft5ZL7cP3o/bBj4Rx\n",
+       "QX+h39fmKEKZqz2fK8ojRj1EfglD/R8iKDcqBvXL+EeRrVFOsayp61qlJ8Jzw3mZh0I5a303gL5U\n",
+       "ikKMxjK/Z+6k7Us+Q0Cbcx3aiPpgLinVj9+P6zEGeD7qh3Jyf+qL9qEv+99rz0mNGBqVORWe+duJ\n",
+       "+jfvVurRxzh+jb7rU+tn7H679Ev3c6Y9UQR918aVKMrB31EEo3cS5XefrbG7LLWnkaQilSRJkiRJ\n",
+       "MpClKVL/+I//eNB68IgBcN8fVqVYLawW+Ttn0i0b9mk9OgrLvjbPE9YayhwnYZNrBqZSjsjRUboe\n",
+       "1ufQc75KRCd8R4zNBO4Q9Yh15dF2btWVsmnzefprFAnj+/me04XIJHyVprKSI2sVX0GyQjuMX1SV\n",
+       "seei+X2pb6xaxr/fZ73+QtQWbTJWkaLNXIUcm0eKfEg8Y23kI32OOQXfIle0orFMneFbwpyK+lnC\n",
+       "FSzK4z4x0ZhgLHhmbBQUnsejzIA+y/Pzb8pPe+CrxXOWzqncqrRGogP9gfpwnzXaAd8ydidQlEr4\n",
+       "O8Bz69GeKJ/cBwUpmjPoT8x5vNujyHV+z/PwfdYUtYqk7waUolIhFakkSZIkSZKBLE2R+ru/+7uD\n",
+       "q9HIxwKrxK0RFAhWt1gti446I6qqFAlAObD6/FwpLG4iFbByeS7P+cFzokx5DpmpzhWrzeKKNTGV\n",
+       "IoX1gzVAlNhYhp675u3G80b+IyhHpRPTS2f3OVjPWHcoMlh3teqB4z5RkV9O5CtI3rPnPe95kqQ7\n",
+       "77xzw/u1+tzxfO5XEbXjer/HAsdCPe644yT1Ea1RNBhgiZZURD43VJFCOWMsRZmbHfcnbT1FgPuh\n",
+       "QLT2JdqUuuf5icYr+c9FOdVQqGjzKOcZf6e+qA9yv20V/07GylCi/Ej+d+rfFRT3raJ+aRfmFuZg\n",
+       "7hMpP46/A/zdxb8j5dLf4cxJKMj8nX7leccAxdF998DnbuZAV/qoR/pdKXP/wetVfSpJkiRJkiRZ\n",
+       "w9IUqRrrPFqFs8qN8ss47KOP9ZOotdpQDtiXZVXNM2MFsJqPziNiH/uGG26QJN11111z120FK8P3\n",
+       "jz1KqhY/B8kz0NeCden5oCA6eR1rj3r1eiwpUfj2eNTjLbfcMnc9lEQURLe2FuUrBm61D1Wi4OlP\n",
+       "f7qk9nxj8IIXvEBSX2+l529VB77whS9IWmuNRqxWPc466yxJfZ4h6g7Ls7ZvoqjUZkBuBYUMFXPP\n",
+       "nj1N3/dIUtoC5Yyf9BXKiaruaiv14menRdCmPqf6GXwoXhGu5HH+Z62KPNYv78QTT5TU18vtt98u\n",
+       "qexLxRjCb/XGG2+U1D9Pa/RbRGmXwCPR/X7us4TazpjyqEio9dXzsY2iRD9gbuadRb/w00l4Tnyh\n",
+       "+DvPg9LGdR1y7nm5Udo8qtPfeVF/q41ITkUqSZIkSZJkIEtTpKQ+1wirYVaJrD7Z5/RcF6xWXcFg\n",
+       "v9z9DDzrKZYuq+mhZ4Thq+Tfx0qLcmSwCuZ5PIN6xNhcKKzu3cJv9a8o+Yix6sd3iPvye6wDsjl7\n",
+       "xvebb75ZkvSc5zxHUm/1UU4yX0OtMglHHnnkXPm8v3g2X1cdsKpot0VnPUZ18DMlh+JnO9ben/H5\n",
+       "yU9+UtL47NoO8wH1Sr9BXaFdNrLSGYsoCq19O4K6b/VziyAb/NCs8B/+8IcHfa80dj33WClPlUPb\n",
+       "eGRxLYw1ftLnUINRzLydI5j7UWbc1+0lL3mJpF4Fja5Hn2QOPvnkkyX1Coora/STkq9RVL9nn322\n",
+       "pF6tp159zLqvnONRbIwpnnOsYublph4iJRKFhzMT+elEc3o09qPdJuZm2sGjBvk3PlFeH3nWXpIk\n",
+       "SZIkyYLplhHd0HXdbGVlZdPvmyRJkiRJ0srKyopms9m6ElUqUkmSJEmSJANZmo/UysrKmqy6kf8B\n",
+       "kQb8ZP+cfXP+7fukqF6bpX5t1fvt3LlTUh+R4pQyU5N75OKLL5Ykvfe975UknX/++ZL6feXdu3fP\n",
+       "fY99fnyQiLqjvYlcYl8dHzT271/72tdKkm666SZJ0lVXXTV3ffwW8AOIovrwHyj5wlGP73znO+ee\n",
+       "K8LzTbX6SnG/97znPZL6iLMDBw5I6iO6tm/fLql/nquvvlpS7/9DOYhuxB/E/T2i/kI9Ep0Y5e5p\n",
+       "hft87GMfkyQdffTRknq/D/xL3PcQHzb8GuiXlA//BvwtqJ+zzz5bn/rUpyTFvhqlTOTucxKdQ1ga\n",
+       "e3yPspGN3tmxY4ekvi3xDaFO6INbdW4ZivuXcp9PfOITknr/SaDd3ve+90mSvv71r0uSLr/88rnP\n",
+       "nXfeeZKkO+64Q1Lsx8f9PvCBD0gqR4aOxeuTaEF85OjT7gd70UUXSerHDP6jvPuYm9lZwufo137t\n",
+       "1+buFzE0157jz4cvm0fv4QdL1CMwTvFZOuWUUyT1cyDvEPxjL7zwQknSRz/6UUn93M8cgU9j5GuF\n",
+       "f6yfY0o/8/NFX/WqV63/4I+SilSSJEmSJMlAlqZIHXrooeEqHFgtY61hnfE9VuGsQlkF33333ete\n",
+       "j1Uwq9Cpo462Ki9+8Ysl9Vl/UU62bdsmqY+CjBSp6MT7K6+8UlKvhDhYh9ddd93c/WHv3r2S+hwu\n",
+       "WAlu/Ud5imi/qB2jiJAStZEsU53xh0JHRJVnSCcCiuf0SDQ/UT3Kwh1Rqsex0K6oDzynn9QOKE0o\n",
+       "aswP/ETJ5DlXX4e6i6J7ShnIPfrJ+2IJ1D3mrlKkLeof+W6e8YxnSOpzmZVUTup2UWfItUbt1RKV\n",
+       "15UooN3e8Y53SFqbSw2FB8UCtdMzv/vYiPIE8dz+joHWs9scFBrqNboeY5qxyRxN5DC7CTxHa14z\n",
+       "n9tRUBk/HtFcq1z5mPV6jqL6UHJpX49a9Ihq3mEoUh71GUG7+Pimn1HP0Ry1ptxVn0qSJEmSJEnW\n",
+       "sDRF6h/+4R8O+iNgVTn8Hl8qfCbYV/ZVLvurEaUsu1PtF5dgNYwSUZs9dSj4Z7h1i/WO9RYRWfFH\n",
+       "HHGEpN4qov5Qgr72ta9Jin2zAKUC68/rH9+aW2+9dcPrtIL16vvoY61wsvVi7ZRy9/A5rGfPfQOl\n",
+       "s/xg6LlvwPOTv8tzC0XjJPInAj6PdY1640SqBGClUh+rrX5XE0t5dsbieYLczwwV/NJLL5XUn06w\n",
+       "b98+Sf0zMAb89AM/64t/48PBGKSNIlU56lP8HiWCUxTAxwI5/lB4aHPmgFLb0Xd4Dld1+X00J0ZZ\n",
+       "/f3c0sjPj+cF3hn0Gz8rjuclxxlzEedL+thwojxE3k7RnHPttddKWtsPqH98zfCrHKscoowyhoe+\n",
+       "C+n3fJ92451OJnKHuQF/Z/o5/cF92YaeKlGakyE6289JRSpJkiRJkmQgS1OkfvCDHxy05ny1jXXD\n",
+       "yeh+ojMWqFstYyMvFq1EAc9dOkdpLFhpkZVae//IqqL+sYqofxSpkrUGnvmcKDQo7ae3+rFAFNEx\n",
+       "1qo74YQTJNVnd0ZBojy0WwnaZepccDx/pC5g1btVF7UDKgG+eED/wwqtVQL5O/MAypm0Vo1zJcqz\n",
+       "po8dg1jMnvkasGix9N/0pjdJ6v0D3/zmN899Ht+hk046SdLaOY0xh5Llka4RKCsO10dhcbw+3T8P\n",
+       "/8Zjjz1WUt8no7FFvUe+J/SRWv9Gxsr9998vqY9qc/zUDEBBpP78HcDzoiS2+s5E9Y5PF5nKKZ8r\n",
+       "LCg5KKv+faLTUOCi/oxfMfUfqdb4sbpvVCsorPRXykl7ofC5bxPwe8Z49G7nPj53+NxY+65gnEbt\n",
+       "FpGKVJIkSZIkyUCWpkg98YlPPLg/zWqSVaOvTskrQ26QoQrEkDJK431OHFbnWMuLyi6PFcLq2v0b\n",
+       "yEtU8h2jXRysYiIp2KcHrKvoTELwk8ldkbjtttvW/R5K5qL8X4aCX0PpnK2I2oggrj/VeXJO5KdS\n",
+       "618APA8+eliXPq7wn3BfO4d+VKvcrYa+uGvXLklrfYtKPj4OljNl8r7L2HjjG9849/vILxT2799f\n",
+       "df/aSEssbc+1RhsMnePw6ULJQa1kDvfcgNRPFFXl5StRO1ZQYrx98dEq7UZQfygdlBOfqcjnx5UU\n",
+       "rkPkKX9nro6i+HyO453himc0ZlFM6f9EWuN3Sj20KlHRu4HxQH1R/yhwvFvxU412L0r+w1wHxY76\n",
+       "dCXKo1spl49X5ijq05XAiFSkkiRJkiRJBrI0Rerf//3fQyUEJYNoM1bPWDtYFYtSjIDVO6vaaN+/\n",
+       "lVardyhYg1jb7uuEVViqR1c8qBciK6ifKOKm5E+AVUD0INYZoLz4dbEWUChL1gvXxY+ArMCeI2Ys\n",
+       "lANfPz+xvUQpcglol6lz/ZCPDV8vlELqG7WE8euqiFup+HmU8nPx3KXoW6xNnr8lnxd9HsuUZxra\n",
+       "B1AEsPi5fkkpKfk0eV+NiMYsigdtV5sbrRWUEpQ35pho1wAVMVLSUIaG+v+h9kenZDi19cKcR3/x\n",
+       "XZRaGNO8A5gb6Ee1Edz4hBFtSf/z0z0ApYmfRBZPlQsvgnc244v6pl35yZh3BY/6pt/4uKK/RD5U\n",
+       "9AfGE+UpjT/m0trdjlSkkiRJkiRJBrI0RaoGVtdf/epXJfXKyaJX0YAPUKsHP6te9lsX5QNVgtU/\n",
+       "kRiRYkHkURTd56BYsA/Oav+YY46RtDbfU5TTxXPKUM+unGFVuF8FnyspOChDWDfsx2PFlPxVWkEF\n",
+       "wLcP67PWl4nvl84GhKhdW3MoUY+oIChJfm4WvnUedQeuRtT6NGIllrISR76UNWCRU6ZWnxwHS5o2\n",
+       "myrDOH1y6FyHglLK21MCpSNSkFAaqIcoCgv4XMm3a+ic6Rm/S3Na7W4G7wLPV1Vqb49Ij8ZMa8Q4\n",
+       "5cZfkTkyyrPl1Oaki2AMRkouczvKIHMREbYPPvigpH5ujHYtWAOwe+CKVDRH0k6MS75f61PInFrb\n",
+       "P1KRSpIkSZIkGciWVqRgaPbSqUAJq1UUsKZQTLDCaiNMpiZSLLCC3CepBFYWq3wgp0wtKBvkoMHK\n",
+       "cx8y7oOPGlZylE/MOfTQQyX1VhKKFIrd1PnDUBFqrRn3UatVoiKwTlFdav1A6N/UJxnpqR/P61ab\n",
+       "9bdWkcJaL7WHP0/LGYGMzbFKFHi0nvu6lOYM/P+iZxqa54rvM7Y9ItLrOFJ1S2omajblL6mEU/mz\n",
+       "RuVFcaw9b7LWrxC1HOWEdimNrWhumiryHF8p5tDNimjnPpGPmPcv3n20G/2gpDwyPqLcdRHeLtRT\n",
+       "rYrdqnanIpUkSZIkSTKQx4QitejovAiUJHx/SmfGAVYoq/ZWJYp93UX7gmENtNYr5cLK5TpYg7VR\n",
+       "Z0Tp4avEPrrnX8LfgSgx2oX7lqxProf17CwqDxN+KiX/Ga9/P4uRf9f2o9qIJcetWe5LfWOFY5Vj\n",
+       "JU6dYb3Vql7vzD7KiDpH3aGwoNSUcrmh4NCHougg/CG5Lvf1vDyoq/yMIpepA++b7hcYqXeorq5E\n",
+       "RYoUfdXVYL+/z8X0iVo/vKl8yEpKz9Qqs/tZcv/W56H+8DtkLvUca7XQbrTTWDW7lVpFz5VVlKzS\n",
+       "9+lXtadkAOOZcTvEn7KFVKSSJEmSJEkG8phQpPC9KWXgnhryV7XmmKk9hylis6ISsVaj3CMRRIZg\n",
+       "HbPq91wdUbSeQ2QNUW6uSHnUJPfH6sAqiyJWsPaI5KA9sVJqI11qISvuEUccIUnat2+fpHrrFWsf\n",
+       "1cFVlbFEOVuicniEG5E31N+yolJhPfWBOqMO3VJHGcAijhQO/Pii0+eBOuJ69Cn3IaEPluayyO+L\n",
+       "vlXy72MOQQ1FjWUs3HTTTXOf53OR7xjl9vtRr34+5n82mINq1fYI6pH+Co4G6gAAIABJREFUwruC\n",
+       "MwYZ47U+fC996Usl9XPaFVdc0VQe5nD8H1HcOFuwBOOsBPXmUbNA//R3EUpb664B7cN9avOyOYz/\n",
+       "EqlIJUmSJEmSDOQxoUhhRW0WRHmhjLTuzy4qi/BQsHpQIrBWn/70p0sqW8coPoD1yfVY9XOiN89f\n",
+       "q0gBnyfrLmANooD4vnrJWsEX6957713371P7SKHIsb8/NEIMq4r2oX4ZD0PHRWv/JAcRVh0Zz++8\n",
+       "886q72ONlrIvo96gQBOhU/KZWt1/XTnwKB/PXVaKCMZS5nOMmQhUPPqoq9OlUw1K/mbUBWOB5+Cn\n",
+       "K1lEupZyxXGKRERJZd8sFb0Ec/fUp0egxPFOGKoOo/xwRp9HPqOY7NmzZ+7fDmo37Up/az3fEyWM\n",
+       "69TOVfQ36rsE4xIFCwWN/l7r21U6+87zW1F/7ucZwfe4D3NuiVSkkiRJkiRJBvKYUKQ229oZa80s\n",
+       "OkIgilLzDOH4c2D1HH/88ZKk66+/XlK9b5Bbx5FCd/fdd1ddL4J6Q0Hy30cRHiUrivraLKWQTPKf\n",
+       "+9znJK31qxnqQ9d6Zl9E5FfjvlMoUKg4KG0oeLU+i6eddpqkvr9FipTnpuFk+NJ9VtevZ3Onz6Cq\n",
+       "ovKV+owrPMwJ60UIrsb7KGMHy7akbntf9eu5glby8eI61OHf/M3fbHj/iKly+fF81O9Ufn+MKT8V\n",
+       "Yez1qF/qbewcQl9lDKLCo5zWjin64+c//3lJfX8mX9KZZ5657veof/dtuvHGG6vuC/S3aCyjmDEX\n",
+       "op7TPnyf8tT6j3p06HOe8xxJvWrOeOWdEWVwj5RL5kCU5euuu06SdO65525YrlSkkiRJkiRJBvKY\n",
+       "UKQ8dweKy9DIic0Ci57yYmm3+rawj80qO7KKqQ+vFyJ98D3ZsWOHpHrramgeLPabURRRHPg99YE1\n",
+       "EvnClCJISlFjQ7NDA89DBAfWJFYk/6Y+I2ub547yWW021D9WIj8pJ/nTeB78L+hP1AfWcKR03XDD\n",
+       "DU3lwq+n9ozL1YoUZXBfCeYQ9xdkrKC40Ib0RVdZW9VxrlPrZznVnMbcc99990mqP0fTIbKXucfz\n",
+       "bwFzFOWnnlEI+DvqJmN2Kv9XlCP6Ti308QceeGDd64E/L32z1b8ShYZ6QNGhf6KU4DNEPRHtR444\n",
+       "fPUoB/2ZsenwfeakVv/ViKi+eU4HVTraDWnNGUkEMXNMlM+M+WDXrl2S+n5M+/mZiK3tmopUkiRJ\n",
+       "kiTJQLpl5IDpum62srKy6fdNkiRJkiRpZWVlRbPZbF0nvFSkkiRJkiRJBrI0H6mNFCnyCEUe9w4R\n",
+       "CH7GGPfwe+HbMTSCxWF/+7d/+7clSZ/61Kck9ZEEY8HPAz8D/Dhe+cpXSpI+8pGPSKqPwmOfGN+j\n",
+       "yN+B/Xv8BahHIkvIPcN+9j333CNJesUrXiFJOueccyRJr3nNa+bKja/WGWecIUk6cOCApD4qDZWU\n",
+       "+vT227Ztm6S+3vfv3z/3d3zA8EO45ZZb1q0Hz6R+4YUXSpL++I//WFLZj+DlL3+5pL4eqE+iJHfu\n",
+       "3Cmp96XCD4d/R/1zUXCfd77znZL69sAHjPr08+Ecj2jCrwM/BPyILr74YknS+9///rnP+7l1jEf6\n",
+       "r/uWEfHG5z3nDOV529vepg984ANzn4Wpz+v0tqMM3Icxdeqpp879G58lYKx5TjZ+0ofpm0RlMUb4\n",
+       "O/5t1BVj9LbbbpPU++XR1vzk/vgyvepVr5IkffGLX5x7vkXBGXbc98/+7M8k9T4s9BWe0yNL6Qv4\n",
+       "6tCXGNv4YDGW8cf83d/9XUmLfz58ky655BJJ0oc//GFJvQ+O99MTTzxRUv9OI2oM6F9833eU/F00\n",
+       "9vlKec04M5A5/rLLLpMknXTSSZJ6ny3akdxzjPFnPetZkvp3HP0W3y8fD7wrLrjgAkmbP3dGpCKV\n",
+       "JEmSJEkykC0ZtYf1EJ1r5GeltZ52jwU9lSLluUtay1OC5/SID2g9K27v3r0b/h0rKcrbdPXVV0uK\n",
+       "FUOsEqxBjw5EkfFyEEGD1RHB9Yhc4XpYp1j/UT4v2gur1SM0SooM/NVf/ZUk6ZRTTpn7PcoT16F9\n",
+       "3KpeFt4eqCmlCKpf/MVflCR99rOfnfu952LxyCGsUaxpvw9RlagmrkidddZZc/dxRWp1P40yO0+l\n",
+       "REVQBld3USSiCFksfc/3BB6h+5nPfGZ0WTeCXGG1EZNj8bkSZYk5HoWJevF8Q55TjzkdBQPljT7e\n",
+       "6hNM/UdnH5bwzOWlMcZ9yMPkbN++XdJaFR6iqLWhlOrL84vRbuwqHXvssZL69uCMR6JI/cxIxjjt\n",
+       "TlQe/XGqvGNTk4pUkiRJkiTJQLakIoV1x/6556rAyhuaQZxVNMoUigarZxQDFJOSz5bnKfIT36cm\n",
+       "ytczFurDM6M7WHcoD5GV9dWvfnXD+6E8cj/PShuBNUPma8/tU/JNw8rifp6/qtVqjZTN0vllreD/\n",
+       "goKG9YY/Cf4RJUWU/okfBtYv1n6U8yg66R2rnft7fTKeGEfus4aqEGXsp52i5xqinpR8P6ai5GfH\n",
+       "2IkyrUeq8KJgLJE/atFEcxlzC0pilPkaxYnroFjwe/7N7gZjpZbWs+ucViUUhYa5F39QzrfkOSNF\n",
+       "qhXmgiiHH6o9Y4/+ypjjHQ2o7lwXNZ7nQqXn+fDFY873dw7thdJVm2GfdmP8LHqcpyKVJEmSJEky\n",
+       "kC2pSGGNsBpHCfJIlqGgQGF18ZP97JNPPllSv4pl9e2KFP4E7qM0NpP2smBfmsigCFdgPIoLarM0\n",
+       "t1obnn16aDZo2g2lZChYa09+8pMl1Z+g3gpRjpwbh9pB9m36dQnqjahMrMDjjjtuw+999KMfXff3\n",
+       "KEkoUu7ngxWKKuBqEL5ukRJ58803S4rPKFzt41arUrdaqJGChcWO/6L/nbpFIXG/T76PCugKxqLP\n",
+       "7XRK6p4rGCgGPAdtX+tTFN2PuSBSxf1zzAm8G5iD+T7+d62+Tq62o3SgwhKZe9VVV637/da5gHeM\n",
+       "v2uYW0qnPLTCmI36Gf3VffwYc65e8zn3U6UefY6P6ofrM0cxx5R2Y6KM80MV6NpTKFKRSpIkSZIk\n",
+       "GciWVKSw3lCKOMma/fvWVT5WE1aK7wdzPX6SzwirwyMvIFKeWD37Kj+KQtwqtJ6JBihRHk1ZAmUF\n",
+       "K5F2Ovroozf8Hj5yWMelff4S9LexLLpdiXjBX+K8886T1OeaiaztkoLLOCMPWCs8d+THQv9gXDko\n",
+       "a5H/ClapR8euR+sZWbVEliw+IqiSfq4k/ny0jfcRfHhqfWmmzodVwvNP8ZwoD4xh5sgXvvCFknp1\n",
+       "FH/F2rP+UMVRSlCUqD/v43zOz/SjnihXNIeX8HbnuVGzN8uHjbEf+YUOxedqzyvGeKqNlqM+GKv0\n",
+       "d65XW1+UCwXL3+EOf+cdy/jgOrVKFP2P/lJb3lSkkiRJkiRJBrIlFSn2QdkXJqqILL2t0VBY3ET/\n",
+       "oWBE+61YU/wke6uDdYKvlJff4b74E3iWXs+LhVXLvu/YHCEoRqyyh0b/YQVSHqwPrEBW/zwvPjDs\n",
+       "p3Nfnt/zApWixrCSsYapz9ZM8vg7jI3MAawnfHnov9QH1l2k3JTAN42fu3fvllRWwtyqcj8Cyhtd\n",
+       "B+uMdvdItKG5Xeg3WJlHHXWUpN5n7Rvf+IaktePM70+/HnLvsdE8RE+VFIKojqh7FJ/SdWi7RSlS\n",
+       "jE3Kg5KAKujwXPiIOaWcaa4gMma4P741KA20F2OWvEr0XeZM/PKo35Kv1VAefPDBhVzXYQyQ+Zx+\n",
+       "MnXOQnZlUGZa51Tam90c2pd+Vbt78K1vfWvu38z1UWQv73jmYO7vKnfUj4F3C8pc7ekqqUglSZIk\n",
+       "SZIMZEsqUlgTf/AHfzDJ9TyvE1YN1k5plYwl7nBmnCtSkXLE77EisPSxwvg9q3cUqqmy1WLpo1BE\n",
+       "yk8JrA7K5fWJQoAVSCSPK2BRlJ5bqbQPyiA+P1gdWBH4YdT6aEVKFH4vtX4dDvWLoul+H0MVKac1\n",
+       "KhIoB/XsUXYOVmBrBn2IolspF+2AjxZRu9yXccm//WzN1f3FfSlc9YWp8sqgDHhfijKZO/QVsvpH\n",
+       "50LC2Cg+5pxo7PlzjPX7a1WCUET83EU/cw8/SeYAlCfGFsoD/WGo/2RElDNtUbjPl78zpoKxNXSO\n",
+       "8qg9lFPmcNqxVlGlnlGc3D+auYKIY8aj+896pHdUb+yOUM+1GQJSkUqSJEmSJBnIllSkpob9dWB1\n",
+       "ir8BvhlYP56PJ1Iu8LOI8ttEoNyw/8pq21fNU0eE+L5zRMl/BCUh2m92qzmySrE6XQHy6Cz8JbCW\n",
+       "sJIpH6pDrQoA0QnqNdFhQ+5D5nj38RkbdVjCFanWzPtRxFkt0fjhevykP0X5sKhHlMkaRYqxuag6\n",
+       "jvLW1ILljsKyaEq52qaOPKXta8emn0vJXMmY5Druj+kZ0GkPfj/1mOa6i1akKDe7HygyQ8/+K8E4\n",
+       "GepTRrvxk/aiPVrnaPqPq9fAeOd+nquOXRDmCt6pxx9/vKS+v7ALxhoAld7XDhGpSCVJkiRJkgzk\n",
+       "caFIuY8Tq07211ml4svBPqlbP67UED011IfJo9XYl62N4FkUWPyRMjDUVybCfZE8MsMjMLBSsU6I\n",
+       "sGi1prFy+D64NeZRiq1Qj6gjbuWUsgvX3p/rch9UGVegajPIQ210J6qKR6j5uAKs1Fo/BPwhUCAZ\n",
+       "L6vbzxUClINSHTv4LPHs9FFXnnjmoWOC8tRGPo7tiyXGnhpB+bgOYzbyN/SxHuVtchij1Btzpfv7\n",
+       "UU/ReaBD4Z1B+ccqkxEeYU7keW00WSutc4PvxvBv2t/PM+XvtUoe/YfdCD/vk3r3iHTmCBQn3g38\n",
+       "nnr1/ua7FLVzXypSSZIkSZIkA3lcKFJRBAKrZZQNfFiIboui/Ry3tGvxnDDf/e53JbUrK1P7f5A9\n",
+       "l9W9R0oM3Z+vtdr8+igtWDn4C5ABHau+1R+D/GReb26VoRBy/aF5k2hfp1QfWGWRCuGZ2V3JRNFb\n",
+       "NNSL524p5eli/ET9A9WHXDIowbTTaoWPZ0Uh4m+tSgFlLn2/1efDYezWntbQ6o8JtacquHofnaMZ\n",
+       "4f6npfu5/x7lLJ1XSh+nL6AceHvQFxd1ZuGxxx4rSdq1a5ck6S//8i8nuS5znT8XuwX33ntv1XVQ\n",
+       "a/EBKuHRdIcffrikfq684447JPU+R34+J/3Z84cxxzJ3+3mtpfLzbnbFEWWSucB9sag/FEn6M/6Y\n",
+       "Pu7ot/S/zGyeJEmSJEmyYB6TihQe97UZzkv7yVivKDFYSUTTtWZ3rcVXzygOrcoSviNjc4qgxLHK\n",
+       "bz3TsMRQ/wGsHJQq/BJQQLiunwdWYt++fZJ6a+ecc86RtDb3EPfBWh+qSEXQfljj3Iffo4T6c2Ft\n",
+       "kzn+gQceWPf6991336TljcCKfv7znz/3e1ds/bw4rGys3muuuUZSP/5QlDlT0FmtYOKPheXa6vMB\n",
+       "zC0oXFEka3T2W8mHCcULxaaUywtafaMYO7UqN8/Dc/D9Wsuc+mcsRiosoABA6xzG5ykvCgbKw9Bc\n",
+       "cLXQjvfff/+k1+W0Bq6PsuL1VSJS9kq7GNx327ZtkqRzzz1XUn9eJvXuuz2MWdqfdmActp5ripLG\n",
+       "XE0GfeZsxgPvaMrDO5z6wresxFBfulSkkiRJkiRJBvKYVKQWdW4Sq2Z8MVh9e96fElgTpczhWAOR\n",
+       "EsVq+vTTT5fUW8NjrbgSJaXP9++Hgl8BVkdklfPcKGRYWexzo3Acdthhknpl7a677trw/pG16jln\n",
+       "8K8Y62fhkSWAosbzEI1GvURWJSoDVlSkOtRm8cZ/wcsDUVQe0B9oR5Rj94FyPwysXqxVj7gpsXo8\n",
+       "uH8Y10RZIoIyUlhoe1QyFBbapKRGRorRSSedJKlXTK6//npJvao4VbZ7p1XdRlGA2pxj1C+qrmeW\n",
+       "jtTSof6lQH3zk/r0SNxF8fnPf34h1+W0BsY0ZzpG9Rjhiin9mXdolGGcXRKUtltvvVXS2neNR3Yf\n",
+       "OHBAUj8XcB/mktpchnDDDTdIku6++25J/Tv6vPPOk9TPyb5rNDaynHdc7TmeqUglSZIkSZIMZEsr\n",
+       "UljI7M9+5jOfkVTvT9AKq232dUv7+xGcSI4yQu4LrssqHt8t9qNZXaNAkHGdyBCs6NaM59F+OOXB\n",
+       "mqw9/2jqbL6upHhumciKQTEcSmTNTO0D5fdz3Hri37SbRy3ye9QXrouSQ//xaEOIInmivGGcFYha\n",
+       "c8opp0iS9uzZM/c57kdE0Qte8IK5v0c+cl/72tckxYpdidVWN0oSz4z/GHWH2hz5duCnRd3S1xkb\n",
+       "Q3O7cY4g5aDPR+d4RrzrXe+S1CsFWPyohcwVJ5xwgqRereT+jC1XNpgLUJRuuummuXI6nr+HuZrr\n",
+       "UM/UP23kfczzVvF5oE+hRrtqixJFOWg/YCxTrtrotWUT+QO2wrsHVdozvjsoVihKtE/tOxfljJ9j\n",
+       "YfyVfB3pj5SXfsM4r909IRKceqiNUE9FKkmSJEmSZCDdVKegN92062YrKyubft8kSZIkSZJWVlZW\n",
+       "NJvN1j20MRWpJEmSJEmSgSzNR+pd73pXMa9Qa1ZdB9WrpH7hX4BPx9e//nVJa6P18L+IovhK9xsb\n",
+       "7eYn29c+31Rs1ft5vSz6fmOhH7zlLW+RJF1++eWSen8OfL/wecJHkFwo+APwEz8b6gEfJfoZ133z\n",
+       "m98safPbj2zP+C3RTtQDvolR9KnnnXLwz3n1q1+95tlKfQNfnih6jL975GbUV0499VRJfdvs3bt3\n",
+       "7u/4IxJhij8c/pj4r/GTyNLXve51c/fDl8h9Vzw7PHV+xhlnzP0en5MHH3xw7vfMhZdccokk6T3v\n",
+       "eY+k3mcEnyvu8+QnP3nu/lGksvdZ2gNfp4svvliSdNVVV0nq/SLpE0SAkj+IOZgoMdrl/PPPn7su\n",
+       "fnzud3jppZdK2npz2WPtfkSo/9Iv/VLT/UrjrsSy6jMiFakkSZIkSZKBLE2ROuSQQ4qRAETSlDJs\n",
+       "t56x5mCxkwGajNKuPHleHaw0jzSJiM7JOuSQQyT10VlRNNrYE9+xNnm+2myvnldpqzFWiYpAgaRd\n",
+       "sGo9kgPFiM9hVQPtjj+iR9FRfrL2Av2SCJ5SHivUgakz0pOzBaXWYZzynG5lenZjImmIBiTHEhnN\n",
+       "nUiJor7Xy3WEEoWCEUV+lixij6R1UFhQmlDPfe5gjrjooovmvnfZZZfNfQ6VjrnMo9ogmjtd5ec6\n",
+       "N95447qfdzxilag9fx5XO0sZtykX+ZGozygnHmOPuYdoP85oiyJrPXO272ZMHXEMY3dPABWan1GU\n",
+       "oZ++sCx8bEbvYqIqUWzJo7ZV4MxA+hu5FP3MwIhUpJIkSZIkSQayNEWq5hTzkmWNb4QrRUOJMmHv\n",
+       "2LFDUm+d4gfAarw210RkDe3cuVNSb/2hSJEDBCstirAs+W4BFvzZZ58tqVe4SvW3jMjOFlAkUAg9\n",
+       "RxD5vEpnLjpenyiH3t74sbjSBG6l1lqRrX4EYxVLB8WopFzyOdQFVzCpL7dW8e1C8eKkebJFl7J9\n",
+       "Mz7WUyewgFG7UKQ4T7NWjeU6kRqI0uJ+XihigFLi51l6PieUJlTj1jxTY/F5mTZjDogUl9pzNL3v\n",
+       "ew41+hBzUm3Gd1dw6BP8HLtrUSKqF1cUmavpJ9Qb9ev9FiWR5yBPFgpnpOJuFq5IRfVLu5YUu+hM\n",
+       "y0Vz4oknSuqVVhQpz/QfsaEi1XXdoV3XXdd13b1d132z67rXP/r7/7vrut1d132767qruq77v1Z9\n",
+       "561d1x3ouu6BruueO+ShkiRJkiRJHguUFKn/kPTG2Wy2v+u6n5C0r+u63ZJeKWn3bDZ7X9d1vy3p\n",
+       "LZLe0nXddkkvlbRd0iGSru667ujZbLZGill9ynLtKhQrje96llyPWBmLR6a4IoV15b4tEZFigPKB\n",
+       "tQy1Z+hRPqze6DwmrLtPf/rTG14Pf49apa0ESljtCfQeCVQCpQ/FiP1uIqPI6tyqSDlRfdAfIn+W\n",
+       "Eu5ngtXWGtESKT8lGH+Un35Ke5WyQbtyRsQUYOX5yeq0D+MYH8XaMw0ZNxvNG0QwYulz6gBlvvPO\n",
+       "OyWVLeXo/MzIV4X7kDUftfuVr3zlhvcBrjf0NPqhuJo4tYLjY8gVMHyoPIN8ySeI399+++2S1pab\n",
+       "OQLVGvg3ylj0LjrmmGMk9dGVtX3UfcCYC9lNcV835uirr7563eegH6IElc6/XDS1Eej4R0b1Rj3h\n",
+       "Q8U7dardphIofIxvomYnOWtvNpt9fzab7X/0//9F0v16ZIH0Ikl//ujH/lzSBY/+/4slfXo2m/3H\n",
+       "bDb7jqT/Jen0ukdJkiRJkiR5bFHtI9V13dMlnSzpFklPns1mODD9vSQkhP8maXXilO/pkYXXGlZb\n",
+       "gKyqsWQj/4WSdTY2YsJB2brtttsk9fvT+D9gLaDgODwXq/aTTz5ZUv98WPpY1qx++cn3/Lmw2oDV\n",
+       "PlYU5eN7+FmUzisCtxr9foCVSJ6f0onbtXm0ak+cB6xIfM3oR7SXn6XXiitNlO+ss86StPYsOvIm\n",
+       "OShF3l9Kvm1OyTqvVRF4Dqwx2oV+VIrEop/h00e/Rjn2z/n45XOoBfhE1vp6Uc71+gt9EhWSOuc7\n",
+       "WJyUIfLFQWXj+94XUD9pQz+DDpWYnyga9AX6Kj9RoTdbiYLWsTcW97FBUXKFpdavsNT3/br++Ujd\n",
+       "pN0Z65Sn5Gvn9Unfpp/Q7p5bLVK/6a+8O5jbeS5Xg/n71P6TgMIGz3jGMyT1uyKMK/q/z/3u34sv\n",
+       "o9cb4yXyGRybS5D643lQKmvPZqxaSD26rfc5SW+YzWb/vFr+nc1ms67rNtqTW/dvqyv0Bz/4Qeio\n",
+       "myRJkiRJspn867/+68GFWekQ6eLqpeu6/6JHFlGfnM1mlz/667/vuu4ps9ns+13X/ZQknDn+t6RD\n",
+       "V339qY/+bg1PecpTDuYEYfUX7fPW5uiYOkcIViVWAIs9FpJYU1GeJX8eyu++T1iprIbJfRP5Ovlz\n",
+       "Ug6scMrJ74lu4vqteYai56u1Emt9o6BVocGK5LmI/qL93GpqxZUa1Al+71ZhCdppKFPljmHc0R+5\n",
+       "LtZyyUePSQarEyvS64H6d/8T/IaImPF8bLRnqV7XG/dY4NQ1bXX//fdL6lW92qgw5iDPU4SPi48p\n",
+       "FCX6PnWDbxbPSnmijOBj+0oJb5NIkeK5eZ6hpzOU8LaO/P3wWaL+o/Kg0LA7wFwLtX6g+/fvr/qc\n",
+       "475ezO3UN+Uj8pddA3yh6C/UP59ntwQlBiXr6KOPnrsff6f/4qPk7V377nQ/ZPdxYy70dwZzi+8O\n",
+       "+FzvSizPS/midh6rSLma/vDDD+tHfuRHDr6zzz///INZ8tejFLXXSfqEpPtms9kfrPrTFyW94tH/\n",
+       "f4Wky1f9/sKu657Ydd1hko6SdGvbIyVJkiRJkjw2KClSZ0v6JUl3d11356O/e6uk90r6bNd1F0n6\n",
+       "jqRflKTZbHZf13WflXSfpB9Ieu0s2HT+0R/90YNKCdYZChWrSlbjWLSR1eZMtc/P/i0KB9aFKyy1\n",
+       "UYL79u3b8O9EL1EvEb5qxxrGlwkrFquc8g2tl2h/feooybGQUwUr6eabb5bU+12Mxa04zinjvK9S\n",
+       "bjSs6kVlV4ZaXzSsU8pT8heKoP39J9BffSogMof7UR6scaL6du/e3VQeqe+zPBOWPmWhDRnjfA5/\n",
+       "L+qOPk7buU9PpO5yP29rroc/YUlti7K6TwX1gOXt5UVh8M9NNfbdpcMjfBlTrkihQJx55pmS+rw/\n",
+       "HuVFO4xVpaeC+uUnyqir3oxhlCh89fDtQxWn//F9V6tRhmgv96OlXmgHrz9Xery9fZeId5grcZSX\n",
+       "8UaEbglXmKLxQH622rnL++9NN90kqZ97qO/afFYbLqRms9kNilWr/x58592S3l119yRJkiRJkscw\n",
+       "S/Pw/qd/+qc11p6v/lgtsqqu3c+dyq+A1TX78Kyqyfc0tbUYnT/l+PNjFVA/ni0XK3xoTpjIx2mr\n",
+       "KFGAdXXHHXdIWusP0UrkD0B90P78vbbfLfp8rFr/FVQYrNBSv4vAz4Tv449SKg9qB/0UBRo/kjGR\n",
+       "ayhSfs4hMIZQhrgnkZ7kn6Jta3PFQWTJ0vZT5WgbiysKPkegctM21AP12qpekj+J+ve5hfu4j5nj\n",
+       "pz+Uxt7U508OhXbHN4j6RRFiDkOJov95PfBO8t0Cb4/Ib5F2dqWKfzM2GcuR75GXC6XJ+z/tyfPu\n",
+       "2rVLUu8bFs3Vte/Y1jxa+M/6+Z8ohFyvOgK66e5JkiRJkiTJQZamSHVdd3BViJXmfgtYi62+JVMp\n",
+       "Jb4Kx2pFIbv77rslrY02KoH15KttrIHWKDfAqmHVzzlBWAO1SgXPg9WzaD+NqcC64KfnamnNkuv9\n",
+       "yPuf/73m/Ehpbb9qjZyZCs9nVcoDFoHVSo4d72f0d6xUxg/jmkz2Rx111Fy5vvGNbwwqz2o8Sslh\n",
+       "rGGRTnW2XXS/0liibijH0CikoXgmZ6KZGFMoKkP9LbkObe3qJWOAdqM8jGH6DHMdCpmXm/qjL/H3\n",
+       "1tMCpoaxQDtTHo+8BhRDFBKUmyii1ue4SGX2iHN8sqgn2qWkUntuQhS16F1D1N4FF1ww97mxuwet\n",
+       "Kr+Xm3rA5w5lLTp/10lFKkmSJEmSZCBLU6T+5V/+5aBnP74a7AtjbaCk1O7Dc9Ya+65Ts2PHDkl9\n",
+       "9CDWk5/5VwKrA2uDn1HUUy1Yj6yuqVfPIF3KcF57jtRWB38XrCSee2j9lpTCkqKEYuUnivv3plKo\n",
+       "SlmNGW9k7x2aG4j7oGhF5XbFDqsYaxjrnCjLVv+b9YiUISxt8u5giW9WZu8oNx7lLSlRU2Ws9ghP\n",
+       "9zWhPuiz3latMBajSE4/8w5/VFcOPbqN3QKizOiDHrnsysVmRx7zziudSsBcTj8hop13TeS74/VJ\n",
+       "/4rmLvf35N1L+Vp9j6j/0lzCiQNTnaU31geOtcO2bdsk9eez1pKKVJIkSZIkyUCWpkg99NBDB1fD\n",
+       "KDRYK6zC3Z/AIwjcmosycE8F1otH3LjCUIL9frLzEsGBFeAZ1LEE/rmGAAAgAElEQVTCPHoR/PfU\n",
+       "C9Yd1lat9cr3aI/orL2tCpEXnoul1uqMToCPfNsg+j0+QKgIfh4WaojndRpL1N48H9YtSh0RU5Sj\n",
+       "pEz+/+2da6ymV3Xf/4/SREprJC6FAMaOx8Y2M77fnQzOACIEx1HBgAA3KKg1VaQUg4IDrklSHyBI\n",
+       "yFEQxpEiSNwowU2aKMjGkJAaw8Dg+2U8HmMPvqAxCsbBbWmkonwh8PTDnJ+f86456+z97Pd2Zvz/\n",
+       "fXnPeS/PZd+evf57rbXZV4vcNmTij2pFzClDe+I++Z9+T8TcPIkKBfljZr1fZwZ9q6Q8YSlHZrV3\n",
+       "Wkk5iD5L0+4ByO95zdRx+jBtJ64yUH58L9sFosSiI49rV0vom4zp9Nmx11u7ukB9xLFoLLV+lrPK\n",
+       "7TcriCblWVHaQzFiRcoYY4wxppGlKVJr12yZxTIrZg+waHWV1mtLe4NNC7lLYob1GDGSgcUdFSUU\n",
+       "p2wdGyWDaEauAygvFJO4f1HtOjfXhfKHooPisCim3cB627ZtkoZ2xX3UgoIUlUAUyUx5yvxxaMcc\n",
+       "L7brsfm9yJ3T2t7x9UMNQCFDlSHLbwY+d6961askDdeP3wNZpiO0f+oXtYPy5rpQtO6+e/67S+F7\n",
+       "QpvPrr0E94ZlG9V12iI+IZkShUWMElObWXnezCsPU1T3KT/GPNRJ6oe2T0Tx2AhnxoZF0ZpRPfrP\n",
+       "tjLWx2msEjV2bC0x7Z55tbs6RN72trdJGsZCnnm1zyIrUsYYY4wxjSxNkfrpn/7pg3Zox9IlxwjK\n",
+       "SimrMFYfylDmVwBxP6cSWEFYzvhEYYHHdX6sqa1bt0o62ArFOq1dh+U6M98ZygulikgXZveUB3vC\n",
+       "YQXG686i+qLvGcoVs3XKn/NgBUUrNkajZZFHlPNYOD73h3JYu0cjoAJwXyh+kEVbRrBuyMtFuUQr\n",
+       "s9QOo8/WtMor/Yz2wXGjLxPErN/0V6xX+mdUSgElNe6bFqP08FfKjjNPWpWomHOtlA+HtpT5ksQ6\n",
+       "GBsRPC1Z1GLmN9h6/MzSpy2hzMXoPMp5LPRFfKpK0MaJ6oz+kXEvxayeuI8M+iBjAGMDfYXfo7DQ\n",
+       "fjhffCbEsZo8WpQjfRblj/OWoudQBBmbab8xF10rPJPx08SXrNYn7Jd+6ZckDWMj5VPqP5Qnz3LG\n",
+       "Itpp7W4VVqSMMcYYYxrplrEG33Vdv7KysvDzGmOMMcaMZWVlRX3fr5sawIqUMcYYY0wjS/ORmoUi\n",
+       "xXo3vkdxvZhzXHfddZKG9eOxOSKA9WDW91lHJRqL811//fWShozR7EGGDxHvsz6OjxDvk1eHyAH8\n",
+       "A/j8kUcekSRdfvnlE+dlPZfoMo4bywWfL6Ik+Zz7iXv+cd/ve9/7Js530kknSRrW9fEfYL0ZtXP7\n",
+       "9u0T58FniXV/osU4H/sc/fZv/7Yk6eqrr5Y0rOvjO8fvuQ7Oy/o95Uv5xWzKr371qyUNvmPvete7\n",
+       "JEkf/ehHJQ1+Cpwn5nSpjajh/BwPPwXKcVHqLOf51Kc+JUk69thjJQ2+bNzf/v37JQ3+CfgE4g9E\n",
+       "/+H+6YeUC/+/5S1vmTjvvFlZWdGNN94oSdqzZ8/EZ/QlfF6oc9oS/nr4gcU2ut651r5GduzYIWno\n",
+       "gzECkbaMP+Odd9657nHwHbnyyis3PN+sifeHXx1jDH2pdR9OfG7oi7/1W78laRir8TXLorAYk2LU\n",
+       "H2MbYwB+s4wF+CleccUVE/dH/fM9fp+t2HBdnI++wvVwvfgXXnrppZKGsSX6/uAbxNif5VIjd9ut\n",
+       "t9667uf0wauuumri/ubNssayu+66S5J0xx13SBraZ5Z5H58z/DZj/jHq9eyzz5Yk7d69W5L0gQ98\n",
+       "YMPrsSJljDHGGNPI0hQpKc8UjUKB8hNnjURgYBFHxSV62mMlTLuHHNFG0QqKMBsmCgnrAuWqlJOF\n",
+       "z1EGOG8W3RWVEr6PMhCvN4v6ohxjXqO4QztEhYd6xFrkfu+77z5JB2ekx9omsoJ6yyI1KFfqMWbA\n",
+       "j9YkakDMScJ5okIE2Z6HlM/YPFete9jNC1SYmG8sg3rM9r6L7QkFdhlEJQpQc2krMWqOvtUavRch\n",
+       "qmvv3r3rfn7JJZdIypUoqI0amjdcB9FdY7Pvx4zZ9InYx2JfzfpONgbHtshYVIreYqxg7EPBjH0E\n",
+       "RYPVBdRz7g/ljuuIUZzZ2MaYko3xqMelnH61Ps+sOtBXUWJZ7XjZy14mabgv9p5rjSJ961vfKkna\n",
+       "tWuXpKE/Qm1uPMZ4+MpXviKpPgKfqNgsQpn2Rr+tzWdlRcoYY4wxppGlKlLZ+joKS5ajItufKTtu\n",
+       "NvssEfPtYLXEnB8R/B+wmshVgTXDcUrKVlRKMn8B1vfj98dm/YV4X5kVFXNtxPLCqsGK4TgcH6uO\n",
+       "42RKXWYVYBWTcyRm8c2sM9oH1le2E3sG9zl25/Jsp3msMa6/1u8EK7aUGT3LDTQv5r3DwDSgZkdF\n",
+       "alb7GwI+VrENnnzyyZKkBx54QJJ02223bXicuC9jzMU2NideCdp2ZNrM5pnqGym1nSz3HMRdCGLe\n",
+       "pRJcV1SiGMvww+SZElXt1lUPro/7i6D+4xOUkf0+wn3STt/85jdLkt7whjdIGsaWz3/+85KmX82J\n",
+       "YzPlxrOgVnmNu4jMqt1HSs/miBUpY4wxxphGlqZIveAFL3hGaYjWBQpD5ptT2k9n2r3aIEa1RWUL\n",
+       "65CoJogKStxLsLSOjVITdwrP7pfjZYrHtMT7A+6f9W58srAuiHygPLDqmO1jlfCKAlkbDRez9Y6F\n",
+       "8471Q8mUqFe84hWSBl+uqHq0WuEZtXv0RZWB7MQlf5cYGVW7D1bJjyPL7s37qAot5RIVmwh+h7OC\n",
+       "vhH9xyjzuD8mY90tt9xSdfzoYxMzV8/aIkcdXRaZwpa1PaLcaMuM/ai6tW0Iv1xAqWFMJUoOHzzG\n",
+       "4re//e0bHvfiiy+e+J9nWrYHXqa08ewoKSXZ77N+QXu6//77JQ3PVlZ9UKSmhTGaZ0T0kSqN+aWM\n",
+       "+MvGipQxxhhjTCNLm95tZCkweyXaK1KKgoqWeowYyfaNYn2ZvERbtmyRNETWREUKRSMqDVk0VG1E\n",
+       "RVSiSsSdqmMOlLGWPcdBkciisPgcRYfrxheM3CiUE/9j/bCvFlYm1lpUiKLVH9fXW/1bsHJnFRkV\n",
+       "/Sg2C7H+a/1dom9VbQRLjKyJqkEsb9rRtm3bJA31G6Ndayi1BVQvxgj6butu86Vrw/+O6yrtG1qC\n",
+       "vsWYRtlRp2PKShrqguuLYxTKGgrPtL4yERQoIO9S3B8zqx98zuIecDfffLOk+sjUqJID7eMjH/mI\n",
+       "pOEZcdFFF214PJSs6O8Y/SFLxLGuVq3PjhNz4FHflBevEfoNz4ZM+QLGclZXiDqMuQSBsSbeL/Ds\n",
+       "WFRE8FjfQytSxhhjjDGNbM4FxxmDVZHlrQJmoTGzeLSGsNKYrUalalY+SnFdO/OBwg+Dz7FSsQp2\n",
+       "7tw56ryUE8eJCgOgvPB9rBTOS7mdd955614P38fawcqOik603lqtskjcwb0WlBNUAKxUFCmsZJS3\n",
+       "aNUs2w+llljvlFPJTyP6PkXVIfpHcDyyf7dmy64h88ucNbTRWfswMSagJHCeVt8RjpMpAShqs/a7\n",
+       "hFg++OjUwqrFOeecI0l69NFHm64jy4QNZFyHP/zDP9zw+5TX3/7t305c3xNPPDHquqgPMuCjCI5d\n",
+       "ZeB6SlGPEcZmokfpq5mihuLFs4PVBNpRFj0Zd/uIz1yOO8+xYS1j+60VKWOMMcaYRg5JRSr6XJTA\n",
+       "WkMRyXxEUCjIbrxv3z5Jw+w/5iiBOHseGz0XZ+EoGygAzNZRnuJegcz+8Z9AIcO6G3s9WKcoCVl0\n",
+       "GOUVI4koZ5QXfNqIQOJ+eeW6UDJiXqdZKVARrKNaax7ljfKNViHZf1nPP/300yUdnPsly9Wz2aDc\n",
+       "S3nbIrE8oyKV5b9ahLUZfWDmBZZ7lguvFRQF2iIK1VglB6JaH+tuXkpUxticfyhq+LFmPlylaLnN\n",
+       "TlwdaY30zZQoxiRWBVCg4j6vtT54KDqM5Yz9Jf/m+GwFfN3iWDIvaiOUwYqUMcYYY0wjh6QihdVR\n",
+       "m9k5KhrMNrGA4++ZtcfoIqyf0no6uTLI/ZGBYnPKKadIGqxYMqOTswRlKipRgNWLVcH3sSaJPqz1\n",
+       "H+D3WHElazdG+mD1YUWwVyL1wCy/VI6AikA9Reu5NeIqRiwBVg/tAwWU72dWEVbiMcccI2nwL0Cp\n",
+       "4jpr8z8tm1L7zYjKUvSZmpfCWMPYLPaRqL5mMIbMWmVDhWcMo89P26ayPQgXTa2CFzOKzzoz/bKg\n",
+       "faE0MvZkKvisoK8TFccYRv6z2kjkqJDyyrOE+8l8pWjPmTK5qDxSY58pVqSMMcYYYxo5JBUp1m+j\n",
+       "D1EG67UoPqwDoxxFZST67GRgFcUM7LV7m5FNl4zY3BfXh29KyRpg3RmrAusSBYXIiahIZfm0sPJi\n",
+       "pEctzObZGR3rIipmkVL2Xcqb++M1u48SZ555pqSDrSN8nLD6sYIyfxLUAP4nDxnfp14pl9Yd1BdN\n",
+       "q5oSrebYH8bWUwZ+IxtZ6aVM5/OC89FWaCNj7z0qYNFnCZ+SaZW27PibFcbcWv+9WavAY/10a6F9\n",
+       "oHrTjtgXdN489thjkoZnytgoSMZAnmG0S+6n5KOYRctNu/owb6xIGWOMMcY0ckgpUnGWXlKiIvgt\n",
+       "8IqPElYj1h/Rb1HxiEoP6/kxEqE2MgTrj1k6Vg4+Ullm9wgKEtYZvjkoQJnFnlnHWBNYB2OtAHxg\n",
+       "KBfOUzpOdj2UD1YJ/9MOaBdZREmWPyzzscOKwtrnPnjFGo6/4/wocCiJUYFaVC6UWRGzIpes+2hV\n",
+       "Ul6z9u+g/2+kmEZFh77dmtE8U5uzTMjUdUmJirsvAJm+AXWZyOPYNku58jL4HZmol0VtJvJaJYr6\n",
+       "RxWeNn8Y9cSzg/aTRQtmUWglOO6iFRjaAX11rJJL/6LcGQMZM0o59LL6pxx5ts2bsUq2FSljjDHG\n",
+       "mEYOKUWKWenYyBJmx8y2sRqx2jguVgWKRIwuQjHC1wqLOFq9tdYgs36i8bB28HW68cYbq47D97/9\n",
+       "7W9LGiIvsLrH5mahHIi247rI6F0iRiyRFwvI+RLJrPZo7VFfvJas06w+yDKcZabHdy76n2R+EZzn\n",
+       "a1/7mqQh6rLE2H2dSlD/UckbC/2Fdl+7B2SWi+iEE06YeL81C3U8z3pRgHFPMSCCld/ee++9o86Z\n",
+       "RRzib8m9YsnyPteT9cVMKYlqW8yFh2KBBc33a8cgFC9+h9/mooj+pXFsZ+wp9fUs7w9jSqZ0xPKl\n",
+       "3rK+SP3SdkuKxax9zs4++2xJg8/UrPNildopyl5WD6yCEK3HcdgLEcUzy6SfgdK1devWifdRqGYd\n",
+       "bRqjFktYkTLGGGOMaeSQUKSwRpiVjvW1wCrBOsTayGbz2V5hXAezbpQSrKaxoCBh1WAN/P3f//2o\n",
+       "40RfKnx1WqPZAEWi9f5Q2jjO2J3pM7DysGawdrDKUeiwnvAfifVKdF60PksRUJkVijVcq0QB9UQ5\n",
+       "x2hQ1IfayCOsQKzGqAjWZnlG1SgpUUSdYvVHVYPfx/23gHo799xzJUm7d++eOP8rX/lKSdKtt946\n",
+       "8bv1yqOUW44My7P218L3gzZJXaL4UBfUTfTByNpUjCjmd5Qh56Wtc1+1KjR9J+b5aYXrqS3fE088\n",
+       "ceL/uAtD5nsU4dkw1qcoKoGlqLKo9KFcjN1Dbyw8gzjPWCWKdvNzP/dzkob2wtjH6kMWIU4k8hln\n",
+       "nCHp4HYMXBftE3X8Na95jaShX6AklaIeL7roIklDuzr++OMlDf3s2GOPlTQ7RYr+umPHDklWpIwx\n",
+       "xhhj5s7SFKnnP//51bNIrJNaH42MViuUWXuWWXzaHC5YybyOhXw6MVN39A8oreezw/hxxx0nabDO\n",
+       "8D9o5fHHH5/q9xncD1Zo/D+zTlm/x6qJGewzou9PSSFib0TKD2stWrUx6hO/DcofhQfrHNUg+o2Q\n",
+       "Ayb6L0Qrc1Z+FZQbVj3Xl9U3voW8wqmnnipJuvDCCyVJr3rVqyQNVi/1GBWp9SjVyayVKCxvxgjK\n",
+       "nlf8KKMvSG00UMwuz5jJ8WkjWPZjLXP2E6UNMlagJPCKYoVvEAoBaiq+Z/yPH2SpPqKSc9ZZZ038\n",
+       "vgSrDKjL3H9UmWvV+cxnhz4Y23bmA0XfrB1bSnA/Y/0o8UkiZx5jH+VBfWf7zwLt8I477pA03F9U\n",
+       "l3k/jlGMgbTbuFsGYyuvJ5988sT3uD4ULZTU+ExG6aI9oLTVwhhGudRm2rciZYwxxhjTSDerLMOj\n",
+       "Ttp1/crKysLPa4wxxhgzlpWVFfV9v+5mm1akjDHGGGMaWZqP1Kc+9amDcnuwHhl9KCKsm0bfkgiq\n",
+       "13XXXTdx3JgLI4twwS8gRgKxTs26Od/Dx+PTn/60pCHCgPuKWV137dolaVhnJqN5jGQg/w7Xz3Vf\n",
+       "cMEFkqRrr7124nPWkYmmgoceekjS4CNDNBe+UURgsJ4dd4R/05veNHG+af1NSrlEqL9rrrlG0uBv\n",
+       "UFrPrz1+jFT6tV/7tYnzAj5LWa4f/ACIQKKdUJ74VUTfKM7zoQ99SNLQLolAYr0+tgfaI/4hWU4X\n",
+       "Pue6f/d3f3fd+5uWWD60v/e///1TnY9yyPpnjJhaWVl55lytGb5LcK9XXHHFM+dcBJxn7PlKUYy1\n",
+       "54vHoW7imIY/W9x1Ap8cvk8EL33mkksukSR9+MMfljT4wOCbk4315FWivu+55x5JQ5+Pvmjcx+/8\n",
+       "zu9M3N+84Lo++MEPLuR8lPNVV10lSfr85z8vqbxLBn2VcmesoE/jh4vPHM9gcuaNbZ/4ReKjF33N\n",
+       "6NtErUZ/19b+0ErpPFakjDHGGGMaWZoitV6m2ZISBSUlKoIVyayW35dyrZSim7CEo1XG3mq8MrvG\n",
+       "GoqRH6VcFVhllE+MoovKEApFKfKFaC5eY1bkqOSgSE2rRMU9CkvZbWPkUi2l41K/pXqOmeuj2oE1\n",
+       "hRVOzhcUJSJIUJZi5A1+itxn6X5RBUoZ3WOW7XkRlbrWTOqRUv+MkT9rIWKSXG2toNpS1zGvUy2Z\n",
+       "OooCUKuyjmWsElV7HOoGNZ37y/Y/pY3HXHKxfmir9J3SWM/n3/jGNyQNbR6Vmc/p47WRtrV7/kXi\n",
+       "LgXkXVoU0ee5duzcuXOnJOnjH/+4pIP7NM9OIudRu1vZu3fvhp/POy/XrLEiZYwxxhjTyNIUqRe9\n",
+       "6EXV1h3ZRvEhuuuuu0adCytw1jlksnxF+IjE/bZa8/fEPEm1O2BnVjB+CViPWJfT7owOpezG7NmX\n",
+       "KWal7MKLAsWD7LnkC0NRi1YV1jYKFIoRihbZfb/4xS/O87KfoXXvvugPM3Yn9GmzY9eyUcRxpkRR\n",
+       "N7VjT5bpeSyxDzKm4Ws0L0Vq3qAA4Wc5dv/EmBkbalcnUKIiqL/44pQUECi1i9LebvhL0vfYBWNZ\n",
+       "7N+/v+p77Dl55ZVXrvs5PlI8y0pK77S7ahxqWJEyxhhjjGlkaYrU008/Xb0DNEoOVtxYRQqrD6UI\n",
+       "hSfOloniQpmp9S+IVhV+D1gvnK91PyD8NDhO3LEc5YSoL15ZH8faZb+k7du3S5IefvhhSXnGaNb7\n",
+       "KT+gHLN9sPAJyxSpUjnU7giegVVIe6ndLymCfwWqBP4gpeNFq5Z6yPaVwtqjvONeefOG+qUdoayh\n",
+       "LGJdlzL40+64n3nT4v/DmEMb4Zo5FveYZayeFdPuZlAiy8Q9a8gs3aqoZbsPMIbQB6PvVdyTLyMq\n",
+       "UdR3RmnsKY1dsY+w28D555+/4e8yaiPUM+IzrqQu33bbbeu+j5JL+T344IOjzptBBnO+T2T5oYYV\n",
+       "KWOMMcaYRpamSEn1ygOWeauPEb9HScGKiTlmOD7WTi0xao+ID3xFsPBbd+zGCkLhif4D+I4xu+fz\n",
+       "aCUy2y/N+lHUsFqiFVayjkp+BvOywoH6rS3vGJUXof2gwLWqFfghRPC5IhqTfkE9tkbd1eZS4n5i\n",
+       "lNxYn0LaK34rr33ta0f9fiy1/h9ridFYUSVElZ63IlVLq7/gOeecI2kYE2688caZXdNaok/QtKDK\n",
+       "oubv3r173e9t3bpV0nCf9NHPfvazGx6/du+7ki9URlR84lg9VmHie+x/+q1vfWvD71N+EJW71rEE\n",
+       "dR91HWUqU9mBZyOKUyzPzMdtLPh14hOHUhqjROeFFSljjDHGmEaWqkjVglXWupM2s3As7loLPYs8\n",
+       "wOrAZybm1EBRiDtfx0zTJUWO62YHa6yNaKXis4O1gHLSmtUZ6y6zMktWTUlxmlV0YImStQS16/lR\n",
+       "pSCrMhntKTcyv0eyDOlYrfhXxB3SMz+SjLgzO/nMMsYePwMFcFFqTklJrAGLuTanWGTaDOqnnXaa\n",
+       "pMGXJtZF9IeshTaGknDWWWdJOjjDdasPDirqli1bJA3+ltQ9bRClgDGD81Fu8f4Yc0rRf/g+1Ubj\n",
+       "QW20ZqxP/Ai5j6ydxD4bx8pWX6eSEgUxhxvPnlJOthIoUIyV2bOBPkk5cP7WXIDAsy8qbrSj+KzN\n",
+       "lCh2++B3Y9tPhhUpY4wxxphGDglFCuXm9ttvb/o9s1Re8ZUqrX9nSgVWBrPjaAVgvcTzYcWUop8i\n",
+       "WM28ZpmjS7N+ImFQtLj/+LuSv8O0uUHG5vKZN1jXYznzzDMlDfto3X///ZKGLMFx/Z+oyegrRXug\n",
+       "naMmtGYPRhVoVXAzSvnBOO9GGcfHgL9SZv2To2et9VnaFxGIhMVCRVEpqXeRaffy49ozpWLsWAG0\n",
+       "aXx9sjbOGJad/8gjj5Q0ZLQG6pryjiok/1NHKDSor6ilWU68ZfuoUe6MtYzlpbExKoqLiryFGElN\n",
+       "pvZpFSnqn7326JvRByzuIkJ/mjYSG6UrtmP6H6/4RmUwNs06p6QVKWOMMcaYRjaFIsWsH+uFWW6c\n",
+       "/bfOIpnFYh1gqaMstVoNzPKzdXCud1qfJWbzKF2tETJcF9ZCyUpojVwpwXGxxue9F1yJVqsffxN2\n",
+       "MEcFIZIoKlIlaymqKFxXjASi/Hg/8wco5Vka699D/yztg4biOC2l9rne57X+d8cff7yk4d6ntdhb\n",
+       "iUrPrEDtjT4lkdJ9o0LyPdokdY86jQ9NHEsZy/Fh4fv4L8Z9Q2FWUYCM/bWZ0iP0IcbAsdeV3d+i\n",
+       "iBHlrZCPjHpGkYrEnIrU+7SKFO0ty4QPJf/cPXv2THUdGVakjDHGGGMa2RSKFLN+Zs/4FM1qB2gU\n",
+       "LyxwfIQ4D1bsWGsjs3K4H6wv1vtbo6OwBnltta5idmCsikyZyJSokq9MCfJbbZZ9mFoVMRSpz33u\n",
+       "c5KkU045RdLgixbJfMJKihBWLWpArUIY6zsyNkcR7bf0u2mjALlf2mV2vPV8Amv3BcRSpu/zf9wn\n",
+       "kbqp3YVhs4BKiSIz1vcLGHPoq9EHjXLC5ywqUiUlobRn27SMHSuz9lP7bIj7VC7b1yvbfWIsjPVE\n",
+       "iGcqOPcf9+bj99NG75XGymU9U6xIGWOMMcY0sikUKWAWj1VIFuJp18uxirCu4vr+tD46KFuA1cpx\n",
+       "8QdotWb5Hdbe2Fk3VhYZy0888URJg6U/dt04ixqsZVqrpAR+Ia3tJuYnovxQSmIUGb5P5JU69thj\n",
+       "JeV+I9n5ojULqADRl6u0wzoRKplVPDafF+WJlZlZu9NmE6ZdtihbqKwl3x/Kjqg9LF3u7etf//rE\n",
+       "9zeLEpW1kQh1j18blNpMhLEyq2vKmSi8jFnlKmuFei5dx7TPglgvs4pgbaU1M36Edkd7yJQhxjrG\n",
+       "LMbAo48+WtL0Y39JZZ81cZ/ZDCtSxhhjjDGNLFWRwgeCWTzKDrNXrINsj7KMmH+HWXS2Xo8Vy2x6\n",
+       "7Lp2VGiybMGtWZDZL4vyqPVNQkHBOti2bZukQfHDPyG7nizip3Wn90UxVoki1wrEDPQl65+8URdd\n",
+       "dJGkwbevNhqU89EuUTAp/9Z1/9ZoxAza86ys3FbIbURW7VtvvfWZz2qj7x555BFJB7f9VtV3XkR1\n",
+       "tNQWYW2ZSIOiNPa+Sn2d8ma/z2mj5AA/THxtyPPVWi/0ydbM4rXwDKO8UamXxbSKGM8QlBnGJJ7R\n",
+       "2V55jJ3syrFopm2HY3dTsSJljDHGGNPIUhWpGBHDOjuWea31BeyQfcIJJ0y8j/LCLBkFCaWG/7Em\n",
+       "xuaVIuIHyPODssB9tvoWsct95qeAwsT94cNDOeLfgZKFFVnykZlVLpdIbWTVvMDKwl8iKpRZJu3s\n",
+       "fazdG264QdJQX7VkeZko/6weZqWaxPLIwBpFJSGzO4raAw88IKleFWpVaDn/Rr+LfZu6e+UrXylJ\n",
+       "euc73ylp6KMxWu8Tn/iEJOk73/nOqGubNdPWcVSmZg1jyO7du2d6XMaq6M/aSq0S1epfSTuLOeTi\n",
+       "6giZwWcdrZiNqdFHbiyMCYxR9L3MD7K0G0Er+PeW9k9985vfLGlYVbrppptGnYexED/X2rHJipQx\n",
+       "xhhjTCObImov5nnCyhmrWGA9Risgzo6ZvRNRgBWCclObM4Z8QS984Qsn3idfULTwxypsXFdUvCJY\n",
+       "CdwX5ci6NjlkiM6jvFnnjnA/rT5jJWalRKE6sMM99Uy0Z4T7jtbmtP4ccPPNN0s6uJ5L0Xvz9tvI\n",
+       "YO8/ygUrOV4n5Yt19vKXv1zSYH3SzrEaS/2G/lWy9mj3sf1xPnLarAWfEOo4qq4oNPwW5YNM57TN\n",
+       "ZStRsyL6s22W6EPGqNJ+kpkKf/7550uS7rzzzqmugzHkta997cT1MCbgS0e7IFcc/+NDxrMgRvbG\n",
+       "9sfx8Z2KEeUob7Rx3i/VG+039l0ykk8L/QH/xMx3btZKFGTnIxoTf1Aym4/1q2asodwz368MK1LG\n",
+       "GGOMMY10y4hO6bquX1lZWfh5jTHGGGPGsrKyor7vu/U+syJljDHGGNPI0nyk1ipS5HzAJ6jkmZ9B\n",
+       "hALZUzlHq/oV95UqMe35xhLPx/o7PiX4icTcJhEilvgd5cd6P+vzV1555cT55k1Wnvgc4ZODH0Mk\n",
+       "u29y03BftDfOg1/JX/3VX0k6OM8Tx6W94Zfw6KOPTnwfH1tnzHgAACAASURBVDX8BvDfedOb3iRp\n",
+       "8I/gvBdeeOHEdX7xi1+UNOQR++Vf/mVJ0tVXXz3xPfrNeeedJ0natWvXesWxsPZJDqDLLrtMkvTR\n",
+       "j35U0lCuRC7h25flmsHvhPujXeKDRn1Qv1dccYV+//d/X9Lgg0OZ448Vc7D96q/+qqQh0vL222+X\n",
+       "NPgP8jvqHJ+Vd7/73ZKGsqzNON7KsseWw/V81157rSTpjDPOkDREntKmiPIkoza7GNAn8bVjLKHd\n",
+       "0edp8/SF7P6IWM+i4cjTVdozEb/Fyy+/XJJ0zTXXSBr6Dj5F7I24d+9eSYNfL32Nz7/1rW9NHD/m\n",
+       "93rooYcm7mts/WV+kCXi+bLIY/o394FPHde/Y8cOScMYRP1n58uwImWMMcYY08imiNrDiotZfMeS\n",
+       "7TDeyti9yDIWtXN8lsm6tAM4VlCMplv2/lgR8nOh+JTKM7tvFCSU0Aj1NTZyoxasxgsuuEDSYMWi\n",
+       "ikQrkPrJIpRQxsj+TGQNis5tt902q0uvIqo+WJu8Pvnkk5IGKzKDKEIUU6zmyHr1WLunFxZorGuU\n",
+       "Byj14WzsabW4Ydp9LQ9ViAxlDKYvZ5GvjA3f//731/2cMSOOkaj2MV9S3NUBJZLzZ/m52CUBhakU\n",
+       "NRf3pIuKFApSKdKZPkBuN8jGcBSZO+64Y+J9yiPbvYC+Xeq7tbkCZxURnt0nY0iM4kNh5PNMiarF\n",
+       "ipQxxhhjTCObwtypzSZLfqPMh6p2D7pFwawfsp2rs735ImN3boeSj9S8M4zPSpHD2sRvpXYfJMBn\n",
+       "CesyUxHmvcN4tMJQXLiv+DnXk11XfB9/ilKOnmWBKlDaCxClLWufKHtr87iV1Nd47Ex1RAnCF4Yc\n",
+       "bBlZTizqgjot+bhEUCyebTDGUceZckEGavpypkhlbY33UXsZU2hzDz74oKQ8N12k9L3oS8f5UHj4\n",
+       "nzGbdsXuCRnkvYrtKz5TUbxQbLP+UtqdoLRas+hdK2gn1CfPVFR6nkHRh3HsLiYZVqSMMcYYYxrZ\n",
+       "FIpUyZ+BCIKzzjpLkvS5z31uLtfx/ve/X9KwboyVQMbqWlCOsD5Kvlu1ma1ZR8+sK9bJsWawjrBq\n",
+       "UYQWnUl71r5h3H/0YygRlbyS/8C8INoQyBpMvdWCD1S0VrFKx6ofi6akVGbqAtYm/ii1e/tJgxKF\n",
+       "WpeNPfSlWos1U9UZA7jmCG0hy9w8bZ4/7jf6Am12qJeSIlVSVhgzSz5L1M+2bdskDX6KPANOPfVU\n",
+       "SUOUWwRfOPwTUWziXndREaHtopzwOUoU98/xS6s20bcvEhWpLNo0a49Q8pGiHPCHbKXkY0j94lPH\n",
+       "dZPhnHpgNYJ+yn1nEcPAakEJK1LGGGOMMY1sCkWqBBb7vLOwo0SxbxFW61hFiuvESsqs0bGUlDvW\n",
+       "yePsHStk+/btkqStW7dKkr761a9OHHez+ZiVGLtDe+2eelnEyryp9e+Bkt/EZiHz7aN/jVGUpMH/\n",
+       "Ait6rb9GKVqItl7qS/g21frGYPFyXPog6maMCsLSzvIGwVg/QMBCP+eccyQNY9Cf/umfNh1v0dAX\n",
+       "uP9s7C/VI20kU+JQ688991xJgwpK/fA+oOaTMw6ob5QOjlv7zMpWLXif3GuMYdEvEuWqtNcdSiv3\n",
+       "R1Tg2AjlUrTdrJ7VjB1ZVCaf79u3T1K7z1eE+ssihSNWpIwxxhhjGjkkFCmYdr21BLlBshwhY4nr\n",
+       "yK1Rd5FSvq0sooJ8QpTjshSNWZUDYD1gLS5j/8gxPPbYY5Kkk046aclXshi2bNkiacgCjV9QFulW\n",
+       "gt9xvLXQFjLLNO5+kEFbKkXNoV5iKUdfE5Sg0047TdLQ52rHstboJ3yB8PVBmTpUwJeFsWysogDU\n",
+       "I/Ue83JxfF6JCGfspp5QDqMSFeE6GeNihDnnR2lDGeL64pjM//QZvodSQvvguCXfJZQrvs/qC/dH\n",
+       "O+bzTMnLIufx85w2JyTQ1/Fxi4pUbb64sYzNIWlFyhhjjDGmkUNKkTpUiNYGVsOslJJpM7jPW4kq\n",
+       "7Z046/VzrDWiv6JKUOsbBa1KSS2t+7ERQYK/AFZkyc9m2cT7jf2C/8kJRIQU1v1XvvKVdY8Da63w\n",
+       "kp8ZbeWEE06QNPiUEDUFKDpY8BlY4NxD9DMkimhtrqu1lHLIcdxW6Gt33333VMdZNPg/Tqts0GYo\n",
+       "36wNoW5SHygyROmVojfjvpCMPTGXG+0lKm60wyz6k3aUPUtQjkr+nfg2UQ58n1c+b/XrPeWUUyQN\n",
+       "e/JB62oSzzoidKMiyLOW617WaoQVKWOMMcaYRqxIzQFmxcz6sW5mtUP8tFbqvCHPU2vEUS2UJ+WN\n",
+       "8pT5U5BzpGRdzjvXTsmPIQPlCet1VntBzhusaKx2rFXUG/oD1jqqEdZ5lmeKSKYx0Y6xTx599NGS\n",
+       "hjIl/wyUoqCog8yCR43NoqKI6qPNRp+Peaujm5WY+XtaUIiiwkVfpE3S9+PeehGUJhQTojz5HeeL\n",
+       "PnaxvaAu08bxgWIMjdF59Al8hjj/UUcdJamsTnN97DHH96NiVMr9lz3DogI4bYbz0moO9ZDlnKsl\n",
+       "i/YtKdLP/H6qsxtjjDHGPIuxIrUOWAsxQqI2Q3ec3bL+jMXdGoECm12R4r7H5kUaC+WAlZlZr2Qr\n",
+       "Zp39y1/+8lyvq0SrIkm5jlWisHqhdmf2WYESSL+KOWi4H3ItocJgbVLPvI96QL8ak6kf3yeUCMqG\n",
+       "Y0ZFqgT3goJA2cY920q/x4cqKlKlfD2HK4y1KDqxDYzdnYHjxF0hqB/aJn2zpHAwlqNsohxxXVx/\n",
+       "PB/XTz1T7ygufD+2Tz5nrEMpZdcPFKmSjxS+VzyDsvbVuvsF94fSRX+oXQ0oHTcyq6i9OCaSF652\n",
+       "jLQiZYwxxhjTyGGlSGHBlnaVL3HmmWdKGnxusIZK+xhFmP0za86UhNq8SszqZ70jPOvtWCGUX6sV\n",
+       "wex+1hnCYznhz8B1Z1YL69yoDmR9JnoxKkTTRkWWIKqx9DnXEcsfa4n2XoqIidb1ohUpzpPtKRit\n",
+       "X/yJaOf4n8Ss5NR/i68Yx0aJaPWLY2zgOFEtLvWdmNcnQl0fLpT2V4zQNqJfIW2q1ocsK18UH3zl\n",
+       "iN6M6jb1i08bfYrz06a5v2wPRcYwjsfvOA4KEWo+7ZLyoq1TLjGX2nHHHbfufUboQ5yf++U6YgR0\n",
+       "bX2hdPE7yr31WUL9ZPXHs4Axgv60f/9+SQe3Dz7nelCgsz0Qa7EiZYwxxhjTyIaKVNd1R0n6c0kv\n",
+       "ktRL+nTf95/sum5F0rskkSDog33ff3H1N1dK+o+SfiTpPX3fj9uobgqmVaIAJQkFamweImbPrH8z\n",
+       "O2fdOFoptbkvTjzxxInjR1AymMXX5otidh4jPmqtB2b5WFvUQ62PVLY+HcEPICqDMbIlwnU88sgj\n",
+       "kgarDvUgWiNZ1t5ZUcqNg28ekTWRabM9L0qJghiBhBXMDvEZMRt1vN/1lKhSXibgWLT57373uxt+\n",
+       "PyMqCPTBUhtCFSU32K5du9b9HlFahwslZQOVlVfaTFQMGTNQqkqKIgpkHNMY61GYUHdj26I9MfbQ\n",
+       "hzguYyAKSHY9tH2eKZwHRQwlK0a/cZ+xD3Ad+P6Nzf+EShyh/HmW1K7GRF/B6Hs2lpgTMIPrLK1O\n",
+       "MOZn991KaWnvh5J+s+/7PV3XHSHpvq7rvqQDk6qP933/8bVf7rpum6S3Sdom6UhJt3Rdd0Lf94sd\n",
+       "uY0xxhhjFsCGE6m+7/9R0j+u/v2Druv26cAESZLWM6vfIOkv+77/oaQnuq57XNK5ku4cc1FY7ERZ\n",
+       "Ye0x+85yu2AVMKtv5b777pvq9zFyh3VrfHSiIlUCJYZyyaxtrJyYFRdi1lyukx3Av/CFL4y6Lq4H\n",
+       "qwzrvFbxYF2b68BKyHzAyDs0dq8+fodfAFZSZrXQ7uYFfg07duxY9/NMiYJpcwstWpGK1iz9uWTl\n",
+       "Yq3TX1B7NlKIa6ONUKBQu6ZVs+lTUYlCgWCvPdoi2dtLdXG455GK/ojUA0odygjlFPMTlZQolJzz\n",
+       "zz9f0sH+hCg8t99++8RxM+LnHJ96Z8zP1PzYzhgz2RORZxvHQZVFpWfsy+4b/8K3vOUtG95HCa5z\n",
+       "bL+gfUeftlaVn2dfSWnL8rRFps03lVHtI9V13TGSztAwKbqs67oHuq67ruu6566+91JJazWz72iY\n",
+       "eBljjDHGHFZURe2tLuv9jaT3ripTfyTpw6sff0TSH0i6NPl59eY3KBFYHWP9FpjNt+bAGBuhkIHy\n",
+       "w2yeyAAUEc4Ts9vyO6wPrFlm0czqS7kzMkUqvo91NdYHDFCEyLM1lqx+435lgEJRq0ShTMb1c+47\n",
+       "81X6+Z//eUlD5mzqJe6HRaZu1t2xrrk+6vmxxx6buP5f+IVfqLr+eYMPHwoR14c1yX3UZimmPKIP\n",
+       "X1RUURmIrMmgXFEIuS7UimnysVGX+M1FUGmx/MdG7AJlGzNg4zNSyls1dgykjDlf7VhGnWV+a9RF\n",
+       "a14r+kxU1WMEK9eBEkeUHJHFjH1ZvUXYUxGFJypg1AP7PKIW4zdKnibaC2oxY3Mcq6jvUm67CGMF\n",
+       "14dyxn2jcDGWxTGN16jm8/ss4zl9iXZCedDXnnrqqarrj1B+rbsv0E8oP+4vRvzOC9phbYR8cSLV\n",
+       "dd1PSvqspOv7vr9Rkvq+f3rN538i6fOr/z4p6ag1P3/Z6nvGGGOMMYcEP/7xj58RH3bu3Lnhd7uN\n",
+       "rPvuwLT6zyT9n77vf3PN+y/p+/6p1b9/U9I5fd//+1Vn87/QAb+oIyXdIunlfThJ13X9ysrK+Dsz\n",
+       "xhhjjFkwKysr6vt+3WWMkiK1XdI7JO3tuu7+1fc+KOmSrutO14Flu/2Sfl2S+r5/uOu6v5b0sKR/\n",
+       "kfQbcRJljDHGGHO4sKEiNbeTdt26Jx0bjRXBFwPfmF/5lV+RdGAmuRZ8Q8ZGEsQ9vqJvEOf55Cc/\n",
+       "KWnwJ8C/AF8ncsjgK7Jnz551z8f6POu1rJ+TW+UDH/iAJOnaa6+VNKzXxyy1kVLulejrwyv3F8uT\n",
+       "dWRk0LFRYax740dBOXGeP/7jP5Y0+JTxPcqPiA3uB/8B7iPu+4QfCH4R3N9rXvMaSdLHPvYxSbmv\n",
+       "HefFryDuC1bru5OVZwb9Y8uWLZIOzmo8q/OdfPLJkoZ2tHv37onP6V+UAxFH2fk+9KEPSRr8Vfg9\n",
+       "vnmx/RPNSL/56le/Kmnw3+A41CPt5+yzz64uy2nJypI2tnXrVkmDTw3+kvTl008/XdIQIZyNeUQi\n",
+       "X3755eueb17E++O+8LnJ/CJ/8Rd/UdLQJrI2StuiLVx66aUT55sVcZcFxuRSXzjppJMkHVx/jEH4\n",
+       "AJVy2EE8H/VPVN7Xv/51SQfnW8JPMz5D+P/444+XNDz78MeMz77ox4kfbqt/a4TzXH/99ZIG3zbG\n",
+       "XsZEfK6oj7PPPnviOIzljHU8cxlbeWZfdtllE+fl/hgbuS98yyhn4P4Zg3h2UL/47uEb9973vndD\n",
+       "RcqZzY0xxhhjGlnaXntHHHHEMxEOWGNE58RIFSxfFJZMaSECprTHW1SisPqy/FSA4hP304qUclVg\n",
+       "3ZT2XGvNvlqKrMmUKCI4SlYKVjXlzOweBWfsfkrUZxZtiOKDEsX18zv2RnzwwQcl5eUfsxFzPqLH\n",
+       "UKRKUZ8x1w3lNe+cP/STaaNKS1C/WT6rsfnQuG7aBf2Ifh1BAcv646OPPjrxyvVE63YZ0MYyhQnF\n",
+       "AaWmpL6XxprIrPYbjXBf2diAek7dlqIR6SulCGTUcxSOsSos111SyRlbUHZQOx966CFJwxjHakJU\n",
+       "n2N0GvWWtXGiDrPdFaCUU44+hUIWxz6UP1T6qNpz36X6yohRkGRYj7n7Ioxhd91114bHr23HPPMY\n",
+       "E2LkMGM69cT3Y3Qr79MO7rjjjqrzW5EyxhhjjGlkaYrUejNNFBpev/nNb6bfXQ+Updo95gCLFoWi\n",
+       "pCwwa2VWPzZbKvl24mweLr74YknSPffcI6msTMU8UPiM1JYb58PaLClSWE+81voJZJQUNI7PK1Yc\n",
+       "r1hBtWC9oYDeeeeoxPvPgLU5Kz+DDNoJVn/JSp0WlCD8V2KuHNp9KQ9UhPLGSuY19teSMhz30xur\n",
+       "gK5HTdb0GlApY/6e2Bdrx4ySah3hPG984xslDWUb+1AttX6rKBuolFmfRimI+3JmkM8rU6u5T9rM\n",
+       "l7/85Q2Pl8H1o3xFtZXrZazh+1mepFK+I37XmmcJKBeelVGx41kW6z0qM61kSl9JaZwXrJJQ/owR\n",
+       "5KWivKK/LatUjOUoZaXVLbAiZYwxxhjTyNIUqfXAox9LH+Wg1uIsWbIZWG1YkxdddJGkwRJn3ZVZ\n",
+       "P7Tu21PKFst91+78Hq0KFLZaRQrFg9n72HKPcN0xW27tdZT8GbjfVmvq7rvvljR+p3TIdmKfF1hT\n",
+       "RKtlWbZjxnXUkCxTfAbWNwrv6173OkmDWoO1mVnT0a+H49C+sPpQW/BHqPXTiDsgZMruGDK/sxg9\n",
+       "lGWIBu6VNn/WWWdJGnxTvva1r426rujrUcvf/d3fSZLOPfdcSUP011hFqjaCmkzcpb5bOxYAyhCK\n",
+       "AWMTkaU33HDDxHGJYhu7KhGj4mgP9CXer1Uss7GTsYO9FmkXe/fuHXW9kbFK6rRKFGS7QywLrof6\n",
+       "ZE5AFCb3jXK3b98+SUO7grHt1IqUMcYYY0wjS1Okuq47yNrBwseqY8duIiWYbZI3KePEE09suiYs\n",
+       "ZSxuosFq93WqBeuNWS9KAuXB+TK/gAgKHrky4u9Kfg5YyXEW38pYJQpQXjKlI+bHGpt3jPVuyomo\n",
+       "0bF+KDH/VKb4UJ58b2x5ABFDKGjHHXecpIPzN1Hvsb3WKptAOXNfKJtY2agOHJfzxtwrgK8VVh+q\n",
+       "DvXIcehv3Oftt9++7vVRHqhEs4C2EKHuSkoUKhl1jNJwwQUXSBrKiDKt3a1+bE42oGxvvfVWScOY\n",
+       "Oi9q9zelnOjjpahExkr6Or4v3Nc111wjafAbbN0bjnpDeaLNogiOVbgyWPVgDCqtGtT64dL3Mn9N\n",
+       "/BN5trXuRwv00aOPPnqq40wL9ROfpbSXWN6MQShTjJWUB3MNxoNSvwcrUsYYY4wxjSxNkdpIRSBK\n",
+       "DasKxSWzrFEamCVn1mUJrDZmoazPM+uOlj7W0djz4dNBhAj/o8hhhWY+PHFHcf5nlh3Xyyk/LHgi\n",
+       "iu69915JQ/nxeVQeapUxoJ7GKjCUe1y/x7qgPGgX/F8b+YJShpVHrpjMisusQa6vFK03rR8Cygvt\n",
+       "i5wmJZ+n2C7HqhpYr7QL/DfwryF6kFf8DTI/Ddo1fZ7/saJj1CX+L1leM9ov1nyr0jdLaIO0VRQN\n",
+       "lAzqYGyUVmsbwrKmrLHIua6xka6zItZVbduk7aBQwfve9z5JB0cDjoXfM6ag3GT+iJHajOe0h6jK\n",
+       "ZtT64ZbqE98sXhkL8T2rfYZRvoxN8Vk0b2IUXXzWUA/8T/9hTrFt2zZJgxJ6yy23SBoUZMayOKco\n",
+       "YUXKGGOMMaaRTRW1B8x2mTWinDCL5nOsLd5nVt2a+ZnZZ1y3J3IjkkULlda1YyZmrCxm9/h4cT3R\n",
+       "2ojKXFSQIvisoMDgY8PvmKUzC0d5a7WGuW6UjVrrN1u3xxrFmkdxweqI6+QliMKkXjM/haz+an3m\n",
+       "4v5OpdwqMZcR7Qsrsjb6jvZFeY31kaIfYcVF65rr4f6w2rP2Qrvk+1jB9LPo61byH6L+KZ955/Ea\n",
+       "A3VH36JtUZZjc4C15rWiTzAWUmcoVSgjs1Lzxkbcxt0RWuF89B3GntpdIWiDlFOW96sEfa707MFH\n",
+       "DGWQNj82Y3stcV9S4D657qhgUk/02Rg9WburwayJ+bli+415orjP2267TdIw9md+wMwtGKtrn4FW\n",
+       "pIwxxhhjGtlUihSzQXyHmP0y60RR4XvRh4f15taIBM7zwhe+UNLgG8UsOGZUzma1pdwvWC1YA1hz\n",
+       "/E9enSxiIPMhKkEerNNOO03SkOOGHbPvv//+dY8/lrE+VbWU8kdh3WJ94/MW926EWUXiZNB+uN5M\n",
+       "kaL+ecWKpL3F90tRlShI8bi1EPmUWfX0M5SrUnvhvunX3AfWOTmOOG+p/TAuUM+tkW3SoGTgIzGt\n",
+       "QoOChHrIvbbumzltBC1+ZqeffrqkYayYlRIVfWaou2w/T4i7I7RCBC1jJm29trxpu4wVjCFcV+2z\n",
+       "hDbIs4I2HRVIFCjayazGyhhBC/yPgsT56bv0IT7nfqlP7idT2qIyVdq3tla5zIgKLfXFK0oi94ni\n",
+       "yH1lueoYi8hDhspeu7plRcoYY4wxppFNpUixPouvELNmZsfMKpnNss6M9ZMpD7VgOXMc8lhxPbXW\n",
+       "ScknhevFBwerIfqKZBEDcZaM1VDrT4FisXXr1onjTWPZrwWrYFGZv4HyG2v1TOunEfNbwWOPPSap\n",
+       "nCsn5mXCOorUZrmmX9B/xt4fik8EnzL8LWp9xaJyTPunHXK8M844Q9JQjvTD6KNF/iz6R2lfs42g\n",
+       "raIGZ/degrrhXuI9l9oidURfxL+wVbFAEaAtZRGxs4K6QOGb936QwPlYRRibxZ9Vju3bt0/8nvpk\n",
+       "TC354fGMinssRhjbqYfaHHglSu2EPsd5GbNob/H+uB9U4troQX7HWEY51K6aRIWpFNVIv2Ws4zrH\n",
+       "RtLj17x7925JQz0SQVzCipQxxhhjTCObSpEClAxmo8yamVXzeeZzgpXRCrNo1knHKislRQormOgk\n",
+       "1pWZFXOf2X5N0aocm8sDpYScJ1gLMbJjWjLrHmt71lYy1il+EyWFMuYUidRGA2ZWE/VYUoRQDaIS\n",
+       "RdQd58fHqLRfGooUKshYdYDrRunh/qJPUm290b6w9rEWsca53qj8ZjmXsL6pt7GZ6dcy9l4yaCv0\n",
+       "fSx8LHRU0qgm8z6KCn2COmhViYm24rrwGZo2o3UkZu3HV4n7bPUNK0G5MYbxLNizZ8+o41DutFHa\n",
+       "eowMjv6xEeqdvp6ppJQLY/usx9yMuFpRymfGGNOqiDJWAO2kpMAxFtQqi/SPV7ziFZKGCPhszKNf\n",
+       "xLkDvl7UN/dPfe7YsWPD67AiZYwxxhjTyKZUpKKFH/f8Ks2Sp/XNwfcDS5dotlpqz0/uEDJE44PC\n",
+       "/e7atavqOK3ZkmO22yyiYVZgHTPbr/UPKPkdAOVQm5H94Ycfnvj/4osvnvh/Wv8FrCSsnUxBRUGL\n",
+       "mfW5H1QNIoGwku677751j0d7ReEZ2z6A31OetOvW3Eb0J8qB46IsocLgl1FS8hgPWvdXW0um/sac\n",
+       "dRH6LHl4HnjgAUlD28NXCSUi+u/xGlVGzofCMxbUbq4fFZjI3VmD+owSU7tHWSsoQPjIlPpYiYce\n",
+       "ekjSwbtKoBjFthh3g+D7jK1ZtFdUt1HUGBuzdrhoWpUoFLtWhZf7rz0/ChTtG79f+h3lzZgV20cW\n",
+       "XclYVfLRAitSxhhjjDGNLE2R6rputMVfO0sdm5U2wnXdfffdTb+vXd/F2sH3hd8xq8fqKc3ux0Yo\n",
+       "YD2iXDz44IOS6nekb6U1Z0ytdUt5oVC0KifQmiH/zDPPlCSdeuqpkqQvfOELG34faxjwlUJJoj3S\n",
+       "PvAjQMmJ7YPrblWiIPMjGAvXH/sv7ZydC1DkUKZQaksRQ7OMDsVnhj5SOja+UCeccIKkwdeGOqRu\n",
+       "aJsxz06pjY3NARbh+rkvOOmkkyQNYxBjQa0FnjHv3GxAedI2Wn2xGFsYKxhLY74tlDZ8bOJ+mzHq\n",
+       "Las32kXMUzVtvrBFQbuhHcf7nFaJLOUfi1BPsf5px+ecc46koX5R8RlbXv/610uSPvOZz0ycnzlE\n",
+       "7bPVipQxxhhjTCNLU6Rq1CisPCxhZp/kdmAWOtYK4jhYGSUFC+uHLMH8jii7mE+nNtIGCx1rEOuU\n",
+       "dXNm/TFSpHWH88i8/CUy4o7aWAXRKo9789VGzxGtiVVB+aHc1CoqZIHGhw1rMbbZ6O9CvXB+FL6x\n",
+       "ymtUkqK1GrNmL4pps1CXog3jfbbkIqKNsY8ldYLvRdbm8QeLkabUMX56UXmgronwxSLneCg+KBko\n",
+       "Ehxv//79kg7uA7TZWCb4btB3UCfxx8Pvj1xb3Dev+HQxtqI68v7YCOB5Q/2x/yhjMPdDffE+9UCf\n",
+       "pPwZc6IqWqtkoUzUKhTZMyD27Xn7kgHtg3bD2MizK/qLRig/xthZZ8hvhf7E2BLHmJ07d0oa2jf3\n",
+       "zzP1pptukjT4Ccd+WLsKZkXKGGOMMaaRblaZVUedtOv6lZWVhZ/XGGOMMWYsKysr6vt+3eUgK1LG\n",
+       "GGOMMY0szUdqrSKFz9HY6LPacyxK/ao9X8nnB78HcsA8+uijG57v937v9yQN67z4VWQ+YDG3DH4C\n",
+       "cd+x6Pvznve8Z8P7MsYYY55tWJEyxhhjjGlkqZnNiYghGyn5ZMZCpAlZgIlY2ayUIh1QgMj9kilS\n",
+       "8Xgxb1IWjUhumSxHztgd1I0xxphnK1akjDHGGGMaWaoihQ9OloGZnCn49PBK/hyylJLFdFZKynnn\n",
+       "nTdx/i996UtTHQ/fpZipOoOcF+SpaiXmOTLGGGPMbLEiZYwxxhjTyFIVKYiZrIEsxSg4+AJ997vf\n",
+       "Xff7s8r4TXZUdnJvhQzlZIXlfoiqIzoPXyh8u9jHKWZZHouVKGOMMWa+WJEyxhhjjGlkUyhSRN3F\n",
+       "fW3YByruQp+B8tMKyhDKFopSaY+wCL9jnyf21WLvNYg7geNDtXv3bknSz/7sz0oa9ulC0VrU/kzG\n",
+       "GGOM2RgrUsYYY4wxjSxNkTriiCOeUZB4Jfou23EZpQelZt++fRO/I5N3K+wIzfnZgX0s/I48Tlkm\n",
+       "czKKx+vGJ+zb3/62pCFakfucNy996UslDUog5WKMMcaYSaxIGWOMMcY0sjRF6nnPe55+9KMfSRry\n",
+       "HfEawWcJpQTfIpQelJpp9+ojmm5ayGdF1BzXGUGhyjKMo0xRLplSN2vIkE7+K3yzjDHGGDOJFSlj\n",
+       "jDHGmEaWpkit9RdC+cBXKGY6J/qN6D7yLPEKi1JsSsT8TdNG2X3/+9+f+P85z3nOVMeLxAzo+HZl\n",
+       "e/UZY4wx5gBWpIwxxhhjGlmaIsV+edIQFbb2vfX4e1kaUwAABjpJREFUp3/6J0mDz1FUpNgbLyo4\n",
+       "reCbVdobD58tyKL0poXjEsUXQdlDWYrRdnzOdZ166qkT/+/Zs2fd484qY7wxxhhzuGFFyhhjjDGm\n",
+       "kaUpUs997nP1z//8z5IGpQSefPLJif+jkpJF1z3xxBMbnpMM47U+SyUlCl70ohdN/B99pFCSiH5r\n",
+       "9eXi/p9++umJ98nDFaMZeR8Fi+viOjgeUXr4qKFAEVWJb5oxxhhjJrEiZYwxxhjTyNIUqf379z+T\n",
+       "PwnFqaQAoaRkeZlKoBzhO8Qrx2vN4H3UUUdN/B+VHxQhlB+i5F7ykpdIGhSyWqUqZjg/5phjJs6L\n",
+       "oke0YzwPn/N/KTovRlEaY4wx5gBWpIwxxhhjGlmaIrU2mze+UhnkTUI5yTKBZ6B4odC8+MUvljT4\n",
+       "EKHM8PqCF7xA0qBQxfPxO5QgXiP4GJHpPML9sDdfq+8Ue/ZFZYlM79/73veajmuMMcaYjbEiZYwx\n",
+       "xhjTyNIUqbWgAOH7E6PyHn/88arjoOzE/FLs0XfcccdJkrZs2SJpiEZ76qmnJA1Rb/yeKEAUMaL+\n",
+       "TjvtNEmDghTPV8tYZS1j1hnI8ekieo/8XWYx7N+//5k2apaP62Pz4LrYXLg+DmBFyphNRimNh1ks\n",
+       "ro/Ng+tic+H6OMDSFKkdO3bo1a9+9ULO9Y53vGPDz1GqIueff37T+VZWVpp+18q059u5c+fC6sIY\n",
+       "Y4w5nLAiZYwxxhjTSFebvXumJ+26xZ/UGGOMMaaRvu/X3Xh2KRMpY4wxxpjDAS/tGWOMMcY04omU\n",
+       "McYYY0wjC59IdV33+q7rvtl13WNd112x6PMbqeu6J7qu29t13f1d1929+t7zu677Utd1j3Zdd3PX\n",
+       "dc9d9nUejnRd99+6rvte13UPrnkvLfuu665c7Svf7Lrudcu56sOXpD5Wuq77zmr/uL/rugvXfOb6\n",
+       "mCNd1x3Vdd3Oruse6rruG13XvWf1ffeRBbNBXbh/BBbqI9V13U9IekTSayU9KekeSZf0fb9vYRdh\n",
+       "1HXdfkln9X3//TXvXS3pf/d9f/XqBPd5fd//l6Vd5GFK13UXSPqBpD/v+/6U1ffWLfuu67ZJ+gtJ\n",
+       "50g6UtItkk7o+/7HS7r8w46kPq6S9P/6vv94+K7rY850XfdiSS/u+35P13VHSLpP0hsl/Qe5jyyU\n",
+       "DerirXL/mGDRitS5kh7v+/6Jvu9/KOl/SHrDgq/BHCBGH/w7SX+2+vef6UCHMTOm7/uvS/q/4e2s\n",
+       "7N8g6S/7vv9h3/dPSHpcB/qQmRFJfUgH9w/J9TF3+r7/x77v96z+/QNJ+3Tgoew+smA2qAvJ/WOC\n",
+       "RU+kjpT0D2v+/46GijGLo5d0S9d193Zd959W3/uZvu/Z3fh7kn5mOZf2rCQr+5fqQB8B95fFcVnX\n",
+       "dQ90XXfdmmUk18cC6bruGElnSLpL7iNLZU1d3Ln6lvvHGhY9kXKuhc3B9r7vz5B0oaT/vLq88Qz9\n",
+       "gfVe19USqCh718v8+SNJWySdLukpSX+wwXddH3NgdSnps5Le2/f9xOar7iOLZbUu/kYH6uIHcv84\n",
+       "iEVPpJ6UdNSa/4/S5AzWLIC+759aff1fkm7QAfn1e6tr4uq67iWSnl7eFT7ryMo+9peXrb5n5kjf\n",
+       "90/3q0j6Ew3LE66PBdB13U/qwCTqM33f37j6tvvIElhTF9dTF+4fB7PoidS9ko7vuu6Yrut+StLb\n",
+       "JN204Gt4VtN13b/uuu45q3//G0mvk/SgDtTDO1e/9k5JN65/BDMHsrK/SdLbu677qa7rtkg6XtLd\n",
+       "S7i+ZxWrD2q4WAf6h+T6mDtd13WSrpP0cN/3n1jzkfvIgsnqwv3jYBa6aXHf9//Sdd27Jf1PST8h\n",
+       "6TpH7C2cn5F0w4E+on8l6b/3fX9z13X3SvrrrusulfSEDkRmmBnTdd1fStoh6d92XfcPkv6rpI9p\n",
+       "nbLv+/7hruv+WtLDkv5F0m/03opgpqxTH1dJelXXdafrwLLEfkm/Lrk+FsR2Se+QtLfruvtX37tS\n",
+       "7iPLYL26+KCkS9w/JvEWMcYYY4wxjTizuTHGGGNMI55IGWOMMcY04omUMcYYY0wjnkgZY4wxxjTi\n",
+       "iZQxxhhjTCOeSBljjDHGNOKJlDHGGGNMI55IGWOMMcY08v8BGKNoaFbqcjsAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7ffb01dfb3d0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "feat = net.blobs['conv3'].data[0]\n",
+    "vis_square(feat, padval=0.5)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The fourth layer output, `conv4` (rectified, all 384 channels)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 33,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlIAAAJOCAYAAAB8y+mTAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvXvQZVV19vtsTbxHEy+A3OluupsGBJHCexEqVuqzEv0S\n",
+       "K/V9aGlSGsuYxBK1SOSYiK8GRVNENIZEy8o5ieU5+BkrWqlUWQaUEFEJcpVuaGiguSNijOaeiNnn\n",
+       "D/j16v287+x5WWvtvd/u8ftnd+9377XmmnPMudd41hhjTqbTqYIgCIIgCIJ6HrPoBgRBEARBEKxX\n",
+       "4kYqCIIgCIKgkbiRCoIgCIIgaCRupIIgCIIgCBqJG6kgCIIgCIJG4kYqCIIgCIKgkVFupCaTyf+Y\n",
+       "TCY7J5PJrslk8s4xzhEEQRAEQbBoJkPXkZpMJo+VdIukl0m6T9I3Jb16Op3ePOiJgiAIgiAIFswY\n",
+       "itRpkm6bTqd3TqfTH0r6jKT/OcJ5giAIgiAIFsoYN1KHSbpnr//f++h7QRAEQRAE+xU/NsIxs88K\n",
+       "J5NJ7EsTBEEQBMG6YTqdTtZ6f4wbqfskHbHX/4/QI6pUlp/4iZ+QJP37v/+7JOnhhx+WJD31qU+V\n",
+       "JD3xiU+UJH3ve9+b+fxjH/vYmfd/9KMfSZJWVlZmXkvP/8///M9Fn3f8fBs2bJAk3XHHHU3HS/HM\n",
+       "Zz5TkvSWt7xl5nxjU9qfxx9/vCRp27ZtkqS/+Iu/mPn7kUceKUl64IEHJEk//OEPe51vKDjP+9//\n",
+       "fkmd3cE//MM/jHK+ZRu/yeSRtYL58E//9E+DnO8nf/InJUnf//73Zz6HvTBfvv71r0uSfvzHf1yS\n",
+       "tGXLFknSjh07JEnf/e53JUlPecpTJHXz/+1vf7s+9rGPSZKe/OQnS5L+4z/+Q5L0ne98Z+acBx98\n",
+       "sCTpMY95RJTHFp/2tKdJkv7rv/5LkvTf//3fevjhh/fMOfrkta99rSTpwgsvlCT94Ac/mDn+qaee\n",
+       "Kkl6/OMfL0n62te+NvP3zZs3S5L+9V//daZ9zIWf+qmfkiQddNBBkqRXv/rVkpbHVg499FBJ0v33\n",
+       "31913KOPPlqS9I//+I+Sun5b1Fz44Ac/KKmzE+yGcfmxH3vkJ5I1q3QtZ9yf8YxnSJLe9KY3zZw3\n",
+       "BbbMb92//Mu/FJ3PGas/n/SkJ0nqro9xXNa1DGjvf/7nf868zxqS6+fTTz9dl19+efLvYzzau1rS\n",
+       "sZPJ5OjJZPI4Sf9b0l+NcJ4gCIIgCIJROeOMM/b598EVqel0+vBkMnmLpC9JeqykP01l7HE3iEfK\n",
+       "3S53uShSeI14ibziFfH67W9/W5J09913N7Wdu1ZXpGgnlHoJQytRgEdeC94VXhLXgfc1FHh1P/Mz\n",
+       "PyNJ2r17t6ROiTjkkEMktY9TK3h7KJY5sDOUkVawK+wYxXVZoZ0pO0epq1WqXImC++67T5L0rGc9\n",
+       "S1KnQNFvqBZu97SPdUN6REGSurFL9fWDDz645vvMfY4D9MkTnvCENd93+L7PLdYS2sc1uSpLO579\n",
+       "7GevefwU2Dh90qqu56hVooA1yBW8ZcHHi98g1rBSUD5q+4m1CQXU5yBK1aLWELL8U08Rlg3mJwov\n",
+       "9whw+OGHS5J27tzZ6zxjPNrTdDr9oqQvjnHsIAiCIAiCZWGUG6lSuPvmFSWKWCfuyv/t3/5NUncX\n",
+       "zudQPoh3cC+yFvd4USJQvPDuWp9bLwo8e7yZlDc+FMQSffOb35TU9RvjefPNdSXFHve4x0nqvPhW\n",
+       "UFKwnxR4W0N5Xf5cftnJKXatMVMpiJ0iHggvkvn30EMPrfk99zalzmNmrUBBwnawQVQw5+lPf7qk\n",
+       "1WsBSgVrjp8PiKHCZom9AdYoYrJStuEqZimMHe3g/Lzfd43sC4rZskA8YA7GHVvjN4ffpptuumnQ\n",
+       "dt177yNhxaxZzI1aZWxomFfLNo4psHdXoqCvEgWxRUwQBEEQBEEjC1OknvrUp+7xunhuzvNoFCky\n",
+       "WdyrA+42ed7uGQ99QZG49dZbBzneosDrnZcygpf2hS98QVL/bLe+SlTf4+C1Dr0LwKLwzDReUS3w\n",
+       "glFn+Dv2UzuexOtgFw7qzJ133impU7w88w51hnnv81/qYpBcUeK7rDkoVb62cE7/Pm1yG3B1mr7y\n",
+       "46JQpfrAQQFrVUUZO7LNWEtzcZs5BYy/YyOpuLcUjPGyUBprxDjzes89j5RKHEuZ4bcQuxtaBT5Q\n",
+       "GOppRo5QpIIgCIIgCBpZmCL1tKc9bY8HjCKFF1cag0RGBLEUxFrUZlnN6651WcllgtQqfHhpjIcr\n",
+       "GNSSmbd3irKEfeS8fZQbvPBcbFXp+RetbOUyppgHeMWoPLXxOpBSlAGl69prr505L3B+7AnFGvZe\n",
+       "L1CaUKs8Zgr1C2WKTEEUhrvuumufbfUsOI854lpoBxmyrHGo26UKQ2vGKH3I3CXOM6dIpWKomNPY\n",
+       "AGtmLestXhBSGb+lGcCLorROUi1jrWHEgnndt1YYN35zXM1mLSlVilOEIhUEQRAEQdDIwhSphx9+\n",
+       "eM9drddUqYW7Sry+2rgCvDZiOWhXaa0Tz+RZb9Bf3L2jnODV4s2WwnG85g7/X1R/cZ2lmTqeOeUx\n",
+       "OrV4LOBQDK10oRowH3OKEtfF+d1LL+0v1Be+z/Hwpkvshjnr8V+AMkX21d41qFrg2v082A5ZgsQ8\n",
+       "1Y6917ArBVvH06YvW2uAeYb1stdCGxrfRWPoXQ5ScD5q7zGOrXW8hmLo7E/WMObTUGCnvi4w//nt\n",
+       "D0UqCIIgCIJgQSxMkdr7jpZ/lyoFDnELeA21z+G5GyXTBeWEOADuZvEm/W586MrgrXh2VKmCkvOS\n",
+       "a/vTFQmPNUrV8Bmb2usgIwm77OuFedxOLbTDlb7DDjtMknTbbbf1aF0H7cspUd6u1HWVKma58Snp\n",
+       "t1TNKQebv+WWW4o+D9gyoJLh6fJ/roX3iU0q9Xx9DaqFdriCNFSWWalt7C+MVSE+BQoUiiYKZ23s\n",
+       "0LLXPOQ3wffNHBoUWN9dYqiYvVCkgiAIgiAIGlmYIvXEJz5xj1fIXSiKUqlXCTxX5W6z9nmn78/l\n",
+       "MUM8R8ULW9bME2JM8F6GqsxNlV3IKQzErJGBQcYECk8uM2pZGDqLs2+GD/09VHwP9I3dytkZ7cwp\n",
+       "tzllOjWvW5TsnGefqjPlMRyuUqIEoWBwzbUZj4xF65gwB4lvZA1j7z7WsNbYkPWy11qO1Dg7ubk7\n",
+       "dHYc9kI/kzG8KDV/bIjda1Vgc7AGcR7mQ+29RopQpIIgCIIgCBpZaIyU7wPFXX1tFhL7HuENuoKS\n",
+       "Ay8TBcoVJ7yDZa8Zgnc8tGKGogS5ccHL9XEl06W0jlMwC3ZI7aPWHeYd98qHzirk+DlFiutDySQ2\n",
+       "MZch1hJzRjyk16RK7ZEHbrOeoco1sLcXc+e4446T1GVC5pQLxqB1LrMGHn/88ZK6NY692vpmKe0v\n",
+       "DBXrNXScLPXO+C0cqq7SsuG/WWNVcOc8zFfm11BPHUKRCoIgCIIgaGRhitSPfvSjPZ4msTN4lsT4\n",
+       "lHoLmzdvnvn+3XffPfN3vMxU1t0xxxwjqfMq8fC5O172DBWe9+L14mXzXBgFaNeuXZK668JLJeaG\n",
+       "jKQtW7ZI6u7et27d2tQuvDQyT+hfKlQP9Xz6QMG9N9SNvl4cKgoxiihBQ9WKKVWMUC5RllFNmLct\n",
+       "CqZnsrIWEDvE3/H8yYCkAjlzadOmTZJWq7O0iePQl1wzfeg11EpjaRiTVpiDVFhvrdW3rIxVm62U\n",
+       "VMXzvngNtf1dvR9bkfK9K7GboeqhhSIVBEEQBEHQyMIUqcc//vF7Yj3wDh988EFJ5QoQ3tpRRx0l\n",
+       "qfM6N27cOPM5IvVz3hiZNuvtefSLXvQiSV3tEbLiPG6Du3IULBQAvB+8VvqBceB7L33pSyWt9vLx\n",
+       "sj1OgLt9+p0Yqb571rXC9aaqXq8XiJ9ozRBC8c15gXyOfmN8GX+PL0qR86Y5/uGHHy6pU1C9HprX\n",
+       "8sG+mf97g4rKsTkWawavXDtqLrFTN9xww8zxvC3edo8H5Dh4vrfffrukTn2nD/mex0Lx977xb5yP\n",
+       "623du29ZYTywVfpzXgrV2HGzpbtrwHod37HH64gjjpC0evcSfotST0d4mpIjFKkgCIIgCIJGFqZI\n",
+       "HXXUUXuUDOIP8LBRRvDiUDT87v+MM86QJJ188smS0tVnPQaEOAnuTvFs8Zz5HMfzGhSeAeD1fHif\n",
+       "bMKUV+k7qrc+B6c9O3bskNRVa/a9y1CIyIri7yiB9DPtQLHh+yhSfJ/+IbaKzxNvwveouN16fcTA\n",
+       "EbdSC94qit0999wz8/exqumORa2X2orHFjK+td4jmWOuoKHW8OpVw71+G9/nFa9yLS98586da7aF\n",
+       "c51wwgmSOvUadZysNo+dYA6jVqMCP//5z5ckbd++febzvrs8/yf+0FXdb33rW2tee25PN/oMtZC5\n",
+       "t2HDhpnrYo058cQTZ47vKv16UTQYR35DeMUmSustedwmYPuuLKIGk5XJ2kGcqSsYrbt1pKBdKJ/Y\n",
+       "F9dRuy9qLZwnVfORGEMUVtZ8vsf4+Pyin7Bn1mz6l/7n/VJ8f07/baJdqUoBpdmYoUgFQRAEQRA0\n",
+       "Mhlqx/iqk04m05WVlbmfNwiCIAiCoJaVlRVNp9M1JcZQpIIgCIIgCBpZWIzUPBQpzvGJT3xCUvd8\n",
+       "e+zzffzjH5fUPUd3+tY+4bnuOeecI0l63/veJ6mLATryyCMldc/9/+7v/k5SF4/Bc3xinq644gpJ\n",
+       "3fNunvezLxfxFb/0S78kSbr00kslSVddddXM5zi+x7oQ08L18nza3yd+hBim3/qt35KUthVq+hC7\n",
+       "47Fo/J2YolS2IHEQb33rWyVJH/3oRyV1sTmMV+sO8B5bx/ne8IY3SJIuuugiSeV1tYhvIc6AOAL6\n",
+       "PZVNRz/+3u/9nqQu3mKoWiqp833oQx+aaR/zglg78GxZz7BJtRO7f/Ob3zyXdUXqru3888+X1MWE\n",
+       "EJPEXPAYi+c85zmSujH8+7//+5m/00fElnAczpe7PuZqaw0w2vXOd75TknThhRdKWp2VyJwg69Ez\n",
+       "P4lNYcy8PVwn/fXa175WknTeeedJ6jI4mYOpKvesZbTb4x+Zex4HedZZZ0mSLr74YkldXOlYMG6X\n",
+       "XXaZpK4/6Rdsn3hS+pkYPNZKYtpYoz0Dl+t8+ctfLkn68Ic/LGl1HC52uW3btpnzsaYDazL9S0Y4\n",
+       "40H86plnnjlznaUw/rSLLNMcpfNhKLLzbi6tCIIgCIIg2A9ZmCI1T0rvcocCL8q9BTJi8MTxKsj4\n",
+       "QWniNdVur7OFV4MHj0KEJ+/1kvBqPv/5z695fNpF5o8redddd93McfFSoLVieSrTKgXeaaruF+3O\n",
+       "7VfmmUpD17lyb90zeehv/3vqulIZL2TS4L3yvl+/Z3GODefx8+XqtZXuB5dSfhcBfZvK9iE7DwXA\n",
+       "Ye6jFtbuicf3U9/LKVY+Rp4hWlq7LJftxBrmY+d1obDdlK3magOidqOoeEzwHXfcsc/vDw1rC+NP\n",
+       "+31/WPr5y1/+8prHwT7YlYPsTq4XRSpXKdyz8ADV/JRTTpHUZYT72tiq0sP+srtFKFJBEARBEASN\n",
+       "HBCKVF/PO1VjIodXQeY5MJ441VYdf56Pt8Dzfq+R4u3Dy8BbQPEovfun7g/xB+4t4d3hdQ613xXP\n",
+       "y0trd+CtpsYlp0SBK3zEb3gtFNQG+hslq7ZCOv0F2CfHqbVX+h1lEG8XhbK1Uj9e71gV4LFnvN87\n",
+       "77yz6Tjen/PEbazU5lzZYayY6yhGtR57SonCJrDZRe0uAPST95fbmlfXr63mz9qQquI/7z3siF/k\n",
+       "t4FXxj+3pgFrEGs036u1FxRH1jziSn1/1FQtRGoQHuiEIhUEQRAEQdDIAaFI5SBmyTMkeO7sVYcd\n",
+       "r2yei8nCg8b7QjGiKi3PowFlKfU8Gu+FSvAoRnineCm0E+8jlcXI5/m7x0B5jA/XQzta959yZSYH\n",
+       "19E3Bs6VQ9+3y71WvHrGy+3FvUk/vseneKZRX7wCeCtkfaKS0M6hFCrUAewVhap2B/gWJRT1k6wk\n",
+       "bJ051EpOTcVmsS3mKDbHXCrdx7CUUltAgVg0jAdj2/pUge+hOrfWTeQpwQte8IKZ97/2ta9VHQdl\n",
+       "h9iwVhWftZynE6i5tbs0+K4FrGlkXe7atUtSejeFee1puOyEIhUEQRAEQdDIulSkWp+Xp8ATfvGL\n",
+       "XyypixG6++67JUm/+Zu/uc/v1+6n5HuI5eoc1YJy5cfD+80pRng3KBKuqLgXwvN5YmroT49PyHmV\n",
+       "peOJt4SqgBLUag+MB9BPqeNhf1wn8QTUM7r55pslra4VAx6TVYvvBVlLaa0hYqu4Dq43lylVyyLi\n",
+       "LFBdU7EzraTiBelzbBdVk88xV1GEcjaCIoGSgKJVGl+YAhteNPRTrvZfqYrZdwcP5gD2wh6NtVAf\n",
+       "C3tojQtE2WRfWhQur+GXm+usdb6G83/iY3l/rJiyVjW6Fp7+0E/Mu777l4YiFQRBEARB0Mi6UKRQ\n",
+       "APDSWmNwUlBN9pJLLpHUKRyld9+1XiDxD0PHQbhSk/K2c5lFeDcHH3ywpM7rAVe68PZ4xcum//oq\n",
+       "MA7eFd477WtVpDzGLffc33eGpz1cJ3EKKTv1/q/NjuPzrdRWvd60aZOkbnz5/tD2uwiG9oA9PtFV\n",
+       "yVT2k7cH20q1DxtADcfTRhFzDxsFLLemeQZkzpbHotTGx1YwnKuvvnrmtRbicfvWX+K62cWCNd+V\n",
+       "t9xcR13GXvmt5beEtQrFqK/imWJe40j/8Bs21FOtUKSCIAiCIAgaWReKFDEo3DW31sXJwb5XPBcm\n",
+       "g2Fez2+B5++1GUQoK8Q20V7iQWq9SrzXnHeI14r3g6JTq0SV1qPifBzfK5MD/YD90A/urXsMWE4Z\n",
+       "IksQb4Z+wrvzfvY4BW8v3l+pIoViOjYvetGLJHWZbV/5ylckjadE+T5upbF1ywRj36rg4PFjsylQ\n",
+       "NLAt+iplQ6XqeqoKfg7Ua2yGuLevf/3r+/xebXzpesczfFOgbPq+mowHc5A1hzXR50qpEondcRx+\n",
+       "a5mLHGdRCuVQjLXLSShSQRAEQRAEjawLRWpsJQpQQjgfGSO5jI++MStOq+LgShBeAxk9tft24YXk\n",
+       "YqpckWpVEEprkjAeHpPkoHDwmlIU/bxe5yv1ef7O/2kH3+eV/uf8vs9arf1wXM6XGp++2X14b9/8\n",
+       "5jclra4nNjT0A/3Fa98aQGuBesucQBkhhqU1k3Ao9Szn8fN3nwt94xFLK7M72Nju3bsllXv+qTHd\n",
+       "X5WqXEYw6vRJJ50kqVPP2ROQOcHaQ6wP4+ZrGYpWSpGi/1lTaB/HoT3MTY43VGzR/kIoUkEQBEEQ\n",
+       "BI2sC0VqbCUKiDfAG+KuO+cdDl0NuPU5rtf6wGtprf2BIpW7flcOxoLrw4tifIgnwXvieT/qAOpC\n",
+       "yo7cC+c4nIfzclyvSo0yRH973SX6EUXKvfBUjFcK7DOXkVOrQDo33XSTpOEV1xT0uyttKHh9M532\n",
+       "Bpsl4/Owww6T1HnirYoUa0hrDImruzn4HLbZ9/ytVeuxtZ07d0pavRaU1seCVmUsxdj7RpbC+KT6\n",
+       "gXGkvpTvvUfcp8fEpdTu2srj/nlXBkOJWptQpIIgCIIgCBpZF4pUaSXmvuAl+M7cufN61te8wbv2\n",
+       "eAnaxfPvWmWqVNGif/pWvc31N+/j/RIfcMwxx0jqnt/j1ZXGmrmiSP9xPq+h4nWz+D7KEt4vqoB7\n",
+       "wV6Lxe0nlxlT2s9DZdb4XopjVTdGcUJhxK49q3EItYLYE86BeknGbilUSOZ4biu1CgiVr1HKSqvI\n",
+       "Y6t9Y4v6qo/MBc/gre0HxrwvrAlkQteO79CUPr2g3ti2bdskdYoTawP9g/1SR4y4SCjt95Ry5ZX3\n",
+       "g7UJRSoIgiAIgqCRdaFIja1EAYoC3iAeODtsl4JXWJplRHVavJDa59ooBMQA8Rwbz71VmfjWt75V\n",
+       "9Dm8vr5Ze16d2fH9yfi/V1SvBa8O6E/6LbXfF+P87Gc/e+b/xNekYpRc0fHxwWtNVRGed12zvrFW\n",
+       "pRAb6NWaURaHjJvBVqnizzXW9ik2iyLVd88u1ppcXagUXlm9Ft93shbGkLlQ2x5i1Y466qiZ76G0\n",
+       "UFuvdK670rjommSl52dNIosUpXDXrl2SurXBlSJX4b2fUmsHShfZrNgf/08pUot+GrMsRC8EQRAE\n",
+       "QRA0si4Uqb5wd5/zaPGCUARQCnK1OPzuvrbeDRW3W3HFbmwFwesTkSlCv9HPuevyqrup7DXiATgP\n",
+       "44JyRe2aVMXynKLpXn+pt0ssFf2AHdQqgK545vazmvf+YvMG9QB7GLt+ldTep65W9lXPUQZq49Bq\n",
+       "VfAUHmPTCkrKEUccIUm64YYbJOWvi7nAdaDSe9xhLWNVtC7djQFKM9D5zcK+uO7c2pCzP+zLsxj5\n",
+       "nmfGomCldjMYyl6c1NpN1uKyEYpUEARBEARBI+tSkfL9hxzPXMl5C9xVc/eL94K3wXPilNfqd814\n",
+       "0p5JgxdAe8hO4nPs+I6yc+211655Pry9FHiBeBsoNbkaLig/fJ92UE8I3KtEeUp5S3hXjBvepitI\n",
+       "xKs4KFz0E9fF9aDA+fP6Uu/VvS3GBQWMTCrOz3HpH+yEeJlNmzZJ6saTz5dWIOd79Cf2gh1yPo6L\n",
+       "vdEeMr2IMfJMJfZFo98YH1dD3F6Yb4wfxydzjb/33YuP73u9sGWEMcd2iZdjTOlj/p5ai7Zu3SpJ\n",
+       "OvHEEyV1awYVrQEbZwx9/0aP78vFWHEcjouN9YX2Y9O1ChtKFCy6/lMK1rTSGmf8BuRipfitod8Y\n",
+       "J9Yi/p5TUvk8c8nVfcaJfWVdVceeDznkkJl2s5Yde+yxM+cjvpjx8qcWnnntYH9kK3q8bC6Gj7+z\n",
+       "NpXGLvKbx/igjD7vec/b5/cgFKkgCIIgCIJGJkPuX1V80slkurKyMvfzBkEQBEEQ1LKysqLpdLpm\n",
+       "obZQpIIgCIIgCBpZWIzUysrKnliQvjEVxHagrhGzcdZZZ0mSPvnJT0rqns/yPJTz3nbbbTPHO/74\n",
+       "42eO5zFCPC/muTXPgX/5l39ZkvSpT31KknTPPfdI6mqj8LyX8/E896d/+qcldc+fL7/88pnzAzEw\n",
+       "xPC88Y1vlCR94QtfmLk+njNzfbfeeuvMcX7u535OUlcb5Otf/7qk7jk2mRFcJzEqZ555piTpox/9\n",
+       "6Mz3UxDLxfPpXNYkz++JC3jXu94lSfrc5z4nqYsv8crzZO1x/C1btkjq+pesL6+VwnN6rvMXfuEX\n",
+       "JD1imzVwncQVlFZU5zy58/G8P1VlmPPnMoI4D/1JLJVnWxLfQDVo4hW4Lvob+yL+h7gg+uHss8+W\n",
+       "1NkLxycuw/e0zMH4EdPFK/Pvd3/3d/dcG5/FBoj94Bqwda+8zdrB32mbz8VXv/rVkqQ//MM/nOkL\n",
+       "YK5jW8ReUGOMdjGXicnwWBtqar3hDW+QVG6bHjNVC+f52Mc+JqmL1yNuDttgDrF2nXHGGZK6fr3+\n",
+       "+usldXGnHhvz0pe+VFK3Buau7+ijj5bUjZfHUtGvnD91/ZznvPPOk7R6fGuzA1m7fL9TjvPOd75z\n",
+       "5rxj49eXis3LZXv6fquprMDStWwoas/HvCZ+lbhe5m1uDcqdJxSpIAiCIAiCRhaatddXiQL3HjyT\n",
+       "wbOQ8IpStUV27Nixz/PhfeGluZeFEsV5yZryrDKO88UvfnGf5wPa71lYKEh4Q7SLjCIHRcGvH+8Y\n",
+       "L5rve/aW/z9VSyWnkHA+2kG7vTYJ3gI7opNJgpfh2YK58cuBAoTXgqKIt+sKJioC7UlRW3MGuP5U\n",
+       "xffa45Ghg3eJXaKWYJccl/GgqvLdd98tqbMPvHHswpVHlGf6CTtFCaNadS5ek7/zfcZp7/Nt3759\n",
+       "n8cArtVJVbFPwbVhw/QpfcKYsSaRRcVrTq2trW+FwnD66adLki699NKZv9fuW5pSVxkz5ytf+Yqk\n",
+       "bg1hLUzxt3/7t5I6RSoHay39iirJ0wHWQmw1p8i17obgsAbl6jzNm9z1oc6ndqXAnua1u0gtXg8r\n",
+       "BfPMsxGHygwORSoIgiAIgqCRdVlHKocrVNxtcteKUuJ367m4Au5+8Sa5W0/VP4K+Xo/H9ngcBbVm\n",
+       "uD6vO+R4zBdQAwQFguN5hXL3WmqvD2+S6/Dve/+j9PA92ofXu3Pnzqrze2ybK0mujOCNpeyC/sjV\n",
+       "hmm1g9xxa5VdVA68e8bZz4MX9+Uvf3nN46CioMpwXFdXOD6qDSqC16WC1E4EXueN+bi3IpyrMeek\n",
+       "5jxzgT5g7FwVZC1xj51roDI07UGF5bx+Le5Zp1TIFPQlaxKqndcPKlXuaum75yAqZyr+0mvtHXPM\n",
+       "MZK68UMtLR1/joetuaLEWp/a2461hDVyWZWbFOzmgZ3wW+LzIqcsLora/iZmD4baKzAUqSAIgiAI\n",
+       "gkb2S0XKIXaDu9dU7E7uebrHIHnGSulxakH5Sj1/53y0A28il1Xnx8fLw7tKVQwvxTOfAG89dT2u\n",
+       "KOI1EttDe1r3KMTLTMXmuBKC4pPaC3C9QYaYZxbVgtqCaoJy6jBe9DN26vOE+UWcSyqOCdZS+GgT\n",
+       "yg7nTHmu3gbmAmuGn8PVy1xMDHOAa+OVjEnahbrq8Xetc88VJ2zXK6UvG6jorF2sSfQj40mcKP3P\n",
+       "+/RXKh7RK2P73nMcB7sgSxDIACZbkOOn7Cs1J5YF+ps1lrnJ2r/s9tL3aY/HibYqiqFIBUEQBEEQ\n",
+       "NHJAKFK1d5l4b9yt87wfZQUvib/zfDlH7V0v3gxxA/zfY2LwkvG2UGpK93/CC8ML59X3PKsltbdf\n",
+       "zot3lQCvCIWidL+qFB6/4ePhChoKVa4OVoq+NX3AY4daswCxC15bFSlUFD+u42oC7fV2oyLllChY\n",
+       "67qZI7y27tGGR048Hm1n7gOxVK5yY+Oofygb3me0D5vO1fXJkfq+11saitqYtByeJcgaQowXcxNF\n",
+       "z6FOEMdxVd7bic26gskrdoRig10QM5SrGdc6t+YFa5//pmEvqT3x9jeYf7n9aFOEIhUEQRAEQdDI\n",
+       "AaFI4aV59VnP5vNYjtIdo/1zKFTc7aMYEfuBl5rzvPFm+D4ZInj4wPVAqxeOF0LdJK6r9XhD1WjB\n",
+       "q3RFiHH+xp14AAAgAElEQVStBS+S/vXMqKH3n3RvtlQpdPz6W/sXe2ptB5ClR+waipnjStRQrKVs\n",
+       "+i4Htfj3+D/n8mtIxcB4XSEUFOrXAAoJcXv0JWtEbfYbawEqqKurKDYei9XKUEoUpBQB1iBXBJ3a\n",
+       "uM6cAnHVVVdJ6tbEeTGUspoDe2Yt5P/Yfavqvd5oVaIgFKkgCIIgCIJGFqpI9Y2UL8U9Yrw1zotS\n",
+       "xJ5lvJ/y2vAuiXvwu1m8CN+fCAWFu3zPlkNx8P7Aq8XbJDMJ3Gv1/ZHcm0CZc4+eOA9qi/jed4vC\n",
+       "FSNX4Oiv0ngEavfQ7368oeE8QylBfaGfXJGr9TqJXeN7qZ0C+saJpGLM1jquZ131xRUdn+ullceZ\n",
+       "a6wprqYSi+VV/Wvj8vh8SpFDHaWeVSrWaNmg33N1tRgPVPtUTTLIxaShuLEbxLxinnyt90zXoZRA\n",
+       "lE/OR//Rz8QA8lRgLGVsvROKVBAEQRAEQSMLVaS8wvFQpCp649V4LAfvczde6nUQU+TKDs/p8dT9\n",
+       "eb1nw3E+PG/3nvD08Qq8v4j58XiOFKnYIo8vQUnx2itj44ob4+l78Xkl91I78npGOUWxLyh9eHeL\n",
+       "xq+3b/wD9pfy+hkv79ecGkC2Kv3mCvFaGWOpulBDx71B695q3h72kRxKHU3t2sBalIs1WlZyCh0K\n",
+       "IjaW+3zOLlh7fM0Ym9LdEvrC3EWBwk68Nhz9EIrU2oQiFQRBEARB0MhCFamx7ra99gVeqSsuvI/3\n",
+       "ltszz6H9HqPE+65IuVcIubt8YoCIrfG4DLzw0kwV2ufeOtmGeHPEjs0b9xLxjvDWUSL4P9ddak/Y\n",
+       "Acd1BTNVp4rP1dZW4TxD7evUl9Z6WClQjrhO1BVAAeb6XYFN2T/KZKq92Cv7q0mr50ZfJYq4Sc/s\n",
+       "HRqO3zd7CPy66StioqgMvuy4eo4tsSam+muo35YDpY4SCiXZidg5sWGp+MfgEZZjZQ+CIAiCIFiH\n",
+       "HBB1pFASPFsJjxiFojSbyrOwPJbGs4zwmjyTpDS+guw5lBhvJ++jCODduuLmx6PdfN5joYhtGcpL\n",
+       "LiW3pyCkFKUcrqB4XS7+jldLv7DTPP2/txKyL5773OdK6vrdK9PPG+zX7QgFkvmA3aRqGdFP1Dej\n",
+       "n8gEAs8exdtlHvi8hFzdrbWyfVtrizmMFXGJ2N5Qx3foO64ZRSlVmytHqrI5WXvrBc/CxBbGzvQ+\n",
+       "0GANZC6zBq/XWLp5E4pUEARBEARBIweEIkXMi++71Johg0JDnEFqh3i8y1QF7VJQMPBOPaaE/btQ\n",
+       "Djgfn8/tYE9WGechjoL9pOZd1da9IK6PSvBcJ16pK0o5vHYOx33Zy14mSTrppJMkdfaBN0ycCf1X\n",
+       "qkihAnAc1I5FQUV+VyRdkeJ9FL8HHnhA0uqYNY7jtW6AcfJ9zDx7Dzgfiih7RzprxfnUxsZ43Sbm\n",
+       "bEoVHSv7DxXb48ZOPvnkQc+zLKoovPjFL676fOlaRC08YuboT/q3FuYMleF37dolqVP61rtCRsV9\n",
+       "FClio1gLcvW49hf4LaHOWum4hiIVBEEQBEHQyEIVKerD5HbQrgXvAVyJAryUofeSc28PL7ZvpgyK\n",
+       "gMNxvQpzynugfdx9076hvFS8/NYK3q6Y3XXXXZLStVX6tttVBpQmzz7jemqrZl966aWSVo/Porj9\n",
+       "9tslrY59Km0fdo+Sl9szEsXTSWVE4QWnvrcvahWjvlXmh6pTRTtQ6VD7atVWBwXG91RzUAFRXcH3\n",
+       "mPP4t1TVeYc1wWsH+lMBdotgDm7YsEFSlzXmCpPjew1yPaiobnPYGp8nUxTbo19OPfXUmetgLeb7\n",
+       "i1KkaA/ji6IEXA/jzvVgt/yf6+e3kjWX/kbJ5HyM96Iyu8eCNbB2rQ5FKgiCIAiCoJGFKVKbNm3a\n",
+       "8xzSlRG8Ct8jD68C743sH+6WeX+sCtJrVVKWVu8Af8IJJ0iStm/fLqlThvCOuPsn1or/c7fPdXNd\n",
+       "XD9emHupubtn+pP+pr2eDUhMVN+K3ng7HBfvhXbnapK4d5+KkRmLW2+9dc33S/dVc5ZFiYJ5x7zh\n",
+       "tXp2H9A/zBPsx9Uir77McYZQUpkjter0aaedJqlTL135Qamgral9IcmA9DUwF1fJGsjxfb9OV4q8\n",
+       "r+hL1gKPIcopWW5LrC2swShA3q++n+npp58uafW+oqimOSUK+Htqn1S/Ps8Y9ePT/zfeeKOkrn+p\n",
+       "OTjvfUjpX8bba7O5vdBvXsuN3yLexy5QAokj9fHn+D5nD3RCkQqCIAiCIGhkMlYWyj5POplMV1ZW\n",
+       "5n7eIAiCIAiCWlZWVjSdTtcsJBeKVBAEQRAEQSMLi5H67Gc/u+q5K7ERPH8le4vYHp5H89zdY6d4\n",
+       "rs5zX56759QvzuvPm2vhPLnz0d5UpfIctPc973mPJOnjH/+4pC52i3gH4hBSewhyvR6XQSwT/cA4\n",
+       "vP71r5ckvfe975353ljQj6XnIxandV+o0vFLQdwB45mrCN/3fLWsl/MRY4g9k2UI2CfrBHZ87rnn\n",
+       "6qKLLpK0OjsL2yZmxGtYbdu2bebvxAsC2U9Ut//FX/xFSd3cY8yJwWGOe40ttwmuhe9h4/z/0EMP\n",
+       "nTnfosaO9qRivEphjnIcYqLe/e53z5xvbDjPFVdcIamLCWLt2Lx5s6Que43Mct5nD0Yypvke/6e/\n",
+       "+NzrXve6mfOODef5yEc+Ikk6/PDDJXVZmNjrddddJ2l13C91pchOZJ5s2bJFUhfvSM2/F77whZKk\n",
+       "P/uzP5O0OuPZYxCZs8cdd5yk7jcqlcHPbzrtOOecc2auc2xy5wlFKgiCIAiCoJGFKVLf/va392QK\n",
+       "cHeaqsXBXTDeGxkZeKR4OSg8pVl7XgMGL2vjxo0zx+PunXo57lV67Q7u/rk+9445Lt9zRYpaKnzf\n",
+       "s8W88jdZbe795vYF8/4mA8MzMfz8846rKz3fUNloVC/Gi/IK5p6FR7YlagXjhr1gt1Q4H3on9daM\n",
+       "sxQomtjBvLP8yHZNZax51uneVchTGZ65SuU33XTTzP8d5oBng2ELqeN7/STWKtYostKwCfqa/7fu\n",
+       "jYeK71X8WxkqO62v7aMMtmbQOihI1MdiLWZcvAZhaX/yG9G3Tllf6G/f75LrSu0E4DULmRc7d+6U\n",
+       "1NmD7wyQmj+prE0UqFz237JnB4YiFQRBEARB0MjCFKknPelJq/aGS3HLLbdI6pQYV4S8pkWq2ipe\n",
+       "IYoQSgGKEV4OihKfwwv1+k08B/dK3HjFqbtovJSUt4KXkPK63IvwPeHw2Gv3HUvhXgbH71tvamhS\n",
+       "9Zo8piYHXirXnfOGGMerrrpqn3/H3nOxUznwyhlv1AeP62kFdaV1b0iH9jLPiNNABUgpXv4+85f4\n",
+       "jVSl+33BNfmxS1VPV3FT32PtYG6iDmOL1I3yueQxIrVzjPOmFCnUbo9hWS8wfuxBeP311/c6HoqN\n",
+       "21Cr4oXSyLj57gHzJrVHnj/VKIX+5zfPK9PX7mWYit91mD+ugJUytJLphCIVBEEQBEHQyMIUqYMP\n",
+       "PniPN4fXlPP83ZPnrhoFiuewxE44HvPhd+t4EVdffbWk7u4aD532epahP//FK+T92pii3N5ljlcz\n",
+       "Tnn4xx9/vCTp2GOPldQpbcSkpPD2L5sSlaO2snhfxShFam+5WvCeUaSGUqIcvL/UXpWlMA+82nZt\n",
+       "7BXzCu/y/vvvnznuvmCu+t5ztbhy4RnFtIXYJl8jeGXNYw1jLXJq1w7aRyyLs16VKCD+cOvWrZK6\n",
+       "eMbdu3dLql+bUopI6xo31L6x7IGHPaHioqCVngeFDLvgt8t34ygFJZVxcGVrKBXbKd3TMQVrRyhS\n",
+       "QRAEQRAES8bCFKmHH3541V55PG8tzXTwWi3crabuirl75pW71JR3yl0w7aR93M1zPrwHQCHjPO71\n",
+       "0L7nP//5M/9HIcLTLvVGc9l5gLKGMjKW8rLe8PFbVmjnUMpWDuZVX2/Q7SyV4ZaDeep7Oe7tZaZi\n",
+       "oLxWXC30gdsKqh1KgdfJQVHAkydTFwWKsUT14/gcZywPuhbPcF40KDL0F7XHbrjhhoW1aUh8jnst\n",
+       "tVLoH//tap0H/KZ57Uag7lYpqf1rnb4ZyTyVQEFmDRoqjjgUqSAIgiAIgkYWpkh973vf2+NV+A7m\n",
+       "pYoUd5m84jX5823uwt2r5NXPjzfL3arXV/LMBM/m43Pf+c531mw3x+dzRx55pKTubh8v1mt5pCh9\n",
+       "nk98BArZelGk+nrDOSVnXgpPX1Lt9KzNoVSDsdSQ1jgK5jnqDfNw77pxudin1nPzPffkmUOsFcxd\n",
+       "YqBoI2sQyhRznawuj9OE1qyvI444QlIXQ0SdrNr4S1gWJYo1mvHgeuj3oWuqrXeYM/SH23Gt2kw8\n",
+       "JjFbXrOxVuGZV4067ILfAtrNPUjfemuhSAVBEARBEDSyMEVK6ryIoSooc5ed2s+KbB/iE7hLxdOn\n",
+       "flDpebi7xcsEMhpy2UQ333yzpK5yNgoWXkJt/aNS1osSBX29YcZnvShPtXi2amsM0ljQ/9SeIYYQ\n",
+       "xas29grVhuOksnT3hrnoVf9LbYvvuSLFNTAGroqzpnHtfB4lhVpa2GaqRlwtKAe0Y+hq+g4KG2si\n",
+       "a/rQtsjxUaa8Nltr7M+y0XffUODpDnZF/zFevj8oMX/8hqV+K/hN6vub7fHGY0F7mWfUoqOf+ypS\n",
+       "vW6kJpPJnZL+SdKPJP1wOp2eNplMni7p/0g6StKdkv7XdDoddxYHQRAEQRAsgL6K1FTST0+n0+/t\n",
+       "9d45ki6ZTqe/P5lM3vno/8/xL04mk1U7iveFu1u/S+a5MN4nd7/+/LgUvC28H8+aKz1eqgosXihe\n",
+       "19CKFFDlmP7Bi132fY1qSe2/tr8xtDee2wOzFLxcMmZQZ1qzAfGSU/N9X/SNnXEP3f+f2gWAa8UW\n",
+       "uWbmGnMQpYA577sm1EIG8NjQD4z1UUcdJamLzRoKbNFr+gH9t97V59oK4SlSNdwAxdKfDuXqTPlu\n",
+       "Da2MrUQBaxlrBfZRmvGePf4Ax/CWvFLSnz/67z+X9AsDnCMIgiAIgmDpGEKRunQymfxI0iem0+kn\n",
+       "JR08nU5JDXlQ0sFrfnE63XM3SMxS31gY7r7dS+QulFfuTrn7JhYpVdMi5TnzOX+OzfFz2XTc1ZMB\n",
+       "QawH3x9LSeF8VDrnPL6z/f7C/prBg9fo3uSygWrA/MG+W9UDrpf5My+vVlq911cuC9AzcLlmlCav\n",
+       "hedjONZebUOpjQ4xOKiPQ+9xRsYm/cR48Nuxv8RIDRVbxm+azxGvtM/5XO1Ngd30XVsZP377ON7Q\n",
+       "v30+r1g7hooX7nsj9eLpdPrAZDJ5lqRLJpPJzL4E0+l0OplM1rw7+sEPfjAzaGOVlg+CIAiCIGjl\n",
+       "sssu2+ffe929TKfTBx59fWgymXxe0mmSHpxMJodMp9NvTyaTZ0tas5jSE57whD2e6HQ6nal03hc/\n",
+       "jlc894wdqg2XVlkFrx8Fpc9dUzuEj31TibeBklZarypYLvD+c/WjWuMAhlIrUHQ9/sL37SoFRZU4\n",
+       "ojvuuKP4u9RsI1M2B9k9vLoHnovP8s/TF4wV8ZDEJ/oYjhUfObQSBazpXPdQlaMd+gUlg6capTUI\n",
+       "50XrnnapfktV7k/hvyWeIe+/lfQj7+cUp6HUYNaoeWWU19rlGWecocsvvzz59+Y7l8lk8qTJZPIT\n",
+       "j/77yZJ+VtKNkv5K0q88+rFfkfSF1nMEQRAEQRAsM32kj4Mlff7RO8kfk/T/TqfTv5lMJldL+uxk\n",
+       "MvlVPVr+YK0vP+5xj1u1l5e/lnpjKEl4d17XCc+duARqZ/A9dmpP7Yzud90HHXSQJGnz5s2Suuw3\n",
+       "P19tXADt4fvEAwwNcQt4Ja3VjoNhcCUUO2Z+oFK4iuCxUdgxXjDqACrMonDlldotrd4sSvBacRrs\n",
+       "ucZc5jPM2S1btkjqPG4+554+Y8JaRAyJKwy1c5wxxvNnjUupicRUrRcYC65rrPg1bPvQQw+V1I3n\n",
+       "WDFlrZTuOuGk7KG2bpN/PleXKhevyNrk++S2wnxgvg2VRTdvmm+kptPpbkknr/H+9yS9rE+jgiAI\n",
+       "giAI1gMLi/DeO8AcD7M0281BaeJu2WOXNm7cKKnzhN3L47l6qnYHx+Pumbt8drrmLp59rWqzkHxX\n",
+       "e+Ilhob+Zp8hlIxlz2qjJg39zXjQb4zr4YcfLqnz5tgJnnHFvlAX6G/PxJo3xPyQNYlawbgwTrQ3\n",
+       "FVvEvlFObQzS0KB4Ms/6Zszs2LFD0tqKLTbuajbKEx45NoIt+ZxlrpfGUqXwjF/WNo/RwCPHlrHR\n",
+       "5zznOU3nZU3kuOyvWRtjUwpziz3+xs6e8+MzV5ZlT8ChQElFCaUCd6nShxrN2sDa4hXhsUevZ4ad\n",
+       "oPwdcsghM99zBal0Nw6+x5rN/8dWFD07kH5kTfV+LmX/yBUNgiAIgiBYAAtTpL7//e/vuRskzqA0\n",
+       "U8DBe+Mu+Pbbb5ckvfSlL5XUecDcleO1cFfN3afXmvA4BsCr5Lju3ZVmxHD3y10+xxvLq+Kum+te\n",
+       "L7FRKFHg44G37ePhrw797HEuZGihXgydeYTqANgLXhneIN4TXiLfQ1HcufORaiOMo8fYcVxip1K4\n",
+       "SoHdp+pSEWNHP7FHJAoRChvkqn/Xwnxfa1x37969z+8ypq5GMjdSqh54jJR7sq6m+155jAlKFZ/3\n",
+       "eDiuLaWIMcaslXyedqBIMVc4H/GhjFlOmeJ41IXymC6uH9Uf26SfUZNRGlAssK2cAsH4eN0r5jyv\n",
+       "zrLuO1mL27rXYmPupn4zfQ9CPs9awTj69xln7Jd+R7nlNVfp38FusSvf+7K1Ij1rlu+aQHtY41P2\n",
+       "gJLK2sY9QWmccihSQRAEQRAEjUwW8Ux5MplMV1ZW5n7eIAiCIAiCWlZWVjSdTtdMKwxFKgiCIAiC\n",
+       "oJGFxUidd955e56vE8NBfEJtxDyR/8SW8Fz0jW98o6RH7iTnAef59Kc/LUm69957Z9ozFDzvPvfc\n",
+       "c2fOC8QpEF9AxhLfe/nLXy6pi0+45ZZbJK2uguvPlTnPH/3RH0nqnh8T30H8BfEJjCvPvXkejgrK\n",
+       "cX2fMZ5vv+Md71jz+saC83zhC4/UkGXcvF/oT/qXzBjiCog/IQ6A6+T5O3EOv/EbvzFz3rHhPO9/\n",
+       "//tn2gHEKRDjRHyEVw6nVhP24XE8HOc973nPzHnHZmVlRe973/skdTbGGPi+hEDsiSvzXHuqajzX\n",
+       "dMEFF0gqr3mHLXH80pgQzpeb67SXvcq4PuYkNugxR7Sf988666w1zzcWnOeDH/ygpPI1k2wyYnxK\n",
+       "fzs432c+85mZ97l+YrvY9YFXfrOOOeaYmXYSm8dcYJxZA9/85jfPnHdsUvbieDwl1+O/zWSSeywV\n",
+       "MW9nn322JOlDH/qQpM7eOF7fCv3EdBEDVdufzLfWDPXceUKRCoIgCIIgaGRhitTDDz+8x2tCiWjd\n",
+       "IZysJc94qQWvru9+TShrY+0blMsKTO2gzfcuvfRSSemMDZSlVF2tlNfH+Xj1bDK8FBSbVFbYWPtz\n",
+       "lYKySeYR40h/4V35q9flwnvieyiArZkpQ5GqQUM7UVJTbN++fZ9/X2Qtn5TiVLsmlHqutZ427WCt\n",
+       "6WsLrDW5bEOv4bWs1K6Z999/f6/zsda4ik79phtvvFFSt5bxSsasg8LD95cd7M9rL9Iv2HdqPrj9\n",
+       "872h13CvgF7L2LUSQ5EKgiAIgiBoZGGK1N7gnXE3T20QYi1yHi53062KFlBL4qabbpp5/+CDD5ZU\n",
+       "Xndp3l4f/Yc3tW3btpl2XHHFFTOfTylB9CPVlInt8TpOeFsoSw41Pah7hLdHLBv/57iLVqAcvM3c\n",
+       "vlSQGm/sGS97rL0T9xe8Cvj+zFi2wP6f1HVijhLDQzwiNplaC2pJjR3toLbfspFa21AOc+qsQywO\n",
+       "v1nLXmmdpw+nn366pC62i+uuHbex63e1Vj4nlpDfctZsYidT++yWEopUEARBEARBI0uhSPGc2+/e\n",
+       "S+/miYto3Wkb7rvvvpn/c/dauwP70PtY5eDuH2/Ivc5SUJBQUvBKfK+21Lj4nnZkugB7yY1NrjJ3\n",
+       "jr5eOtePtzev6543qWrefY831Hw+EMH2yCQ95ZRTJHVxbV/96lclDadEMdfI8uIpAllvy6Y250DB\n",
+       "a1WS+B5PL2p/O+YNaz2K1GmnnSZJuvLKKyVpTxZsKWOrya1rAr9NL3nJSySt/o0KRSoIgiAIgmBB\n",
+       "LFSRwnvBq6lVULjL5K6a556le905/vyVvcS4e/Xn6YvCs+nweoiV4v+1XhWKFK9kSLhX5dmAgDdS\n",
+       "qsj1VY5S8Jwe76VWIWRfMd+PDMUv52Vjf3ye5/McpzWDhJo5xLLdeuutTcdhb8fUPmU5mG/YSd/M\n",
+       "KUCRql0HPOPoQIQ1ijFFUcCWx8ok3rp1q6ROAcP2eR07W2osWpU0ftNye+AtC/ymfeUrX5HUrcX8\n",
+       "lnoNvRzLer1cD1mKxEO33is4oUgFQRAEQRA0sjBF6jGPecwej913hC6N+EcBoV4P9M3eox20D4Vj\n",
+       "aLhurqP0+bI/J6Z99APtrq254Tu0c9deWyunNMtyaCUKPKYrRSrGh370SuvEgRDzlPPu6bdNmzZJ\n",
+       "6q9s4iVv2LBBUuf91T7fx2t2UM7w3jg+iiT9xPv081D11zzLsTQeotZr3p/AJplz2Ag2Wqta1sb0\n",
+       "oICxVqDiY2MoYtdff72k4RSA0ozuVrDp2rWP3y7W5EXXjCvl85///MzrvPAK+2PBuHzjG9+Q1FVq\n",
+       "57eobztCkQqCIAiCIGhkYa7ck5/85D137XiUtYoUcNePl5KqyF0K3ghZa/x/aFCMOH5pnSq/a/bY\n",
+       "JLzS2ngI7tLxwrhbH1qRI0aH/l1U3aCU4uGxTMRy0R/UmqG/Ul4xGSHsy9U3loe6VvRba6ZJqoYR\n",
+       "Sm5qDz5/n34aKu4GhYv1oFSRKq33tQz03fPLcVUVFdXVXpQh4tpSmaS17WLNolbdli1bJHUKFZmr\n",
+       "/L9vnClrO3NprHpc2Lzv35qDObKebHJISpUdrwDP05Cxa+1RF4tXnhb0tc9QpIIgCIIgCBpZmCL1\n",
+       "lKc8ZY/SkcqQ4K4VxQnvwPdCw5NFGUjFgOTA2+H7Q2UjpSCThh3ZW8Eb5bXv82YUD7yLoSu148WO\n",
+       "tRdhX2iXK1P0A15nLj6DzzPOfav9cr6+NU9S/Z5SBudVC4j5N1Y25yIZWokCPPlcZioqH+1I0bp2\n",
+       "YOvYODbPWjrUXGft77vG50AZmXdNwPVO6S4j/KbzG8tv/bxhfFv38INQpIIgCIIgCBpZmCL1pCc9\n",
+       "aY934TFOniXE3SLP991DxpPu68FynHnviebZYbUxQ3jw3F3jpaJg1D7nx3vleChTfSGeY9l3oMe+\n",
+       "6D/Gh9gxxilX2Zt+QzGNSt37Brsfyt6WibHq65SuFShNtXvHlcLTBdZoxpC1qHWPNGCusZaVHg+F\n",
+       "hLlbqnzQr+xu4btepOC3igzYsbPRxoLrqK0NyDiVwryY99rIbxz29N3vfrfX8fa/FSsIgiAIgmBO\n",
+       "LEyReuxjH7tHieJul7t3PH6UAd9R2p+/rvdYir7tx2vyGBPqIeElXHXVVfs8DjVpTjrpJEnDxVzB\n",
+       "elFkuF739mk/Xgz9nbou+h8vmMrky1r9F57+9KdLStfjwp74HJXuWxVV8MyzYDio0j8WrOWo3q5o\n",
+       "lKrhzBkUA/9N4Lip3RUcn5uuZHkcLk9FaAcZt2Tg5tZCaryRib3scz0FSl5t9mFrHGhuLR0a7Ag7\n",
+       "C0UqCIIgCIJgQSxMkXr44Yf3PHf2GCEyS1CqiFkaq4ptiqF3tx8L7q5dSaE/S2NO6F9ea+v51MI4\n",
+       "U2sGezj66KNHOV8peJV33HGHpE7hY0d77AIvJuVt49UyLowD1Z8XBcqjxwKimFFTharY/jnGyxmq\n",
+       "Htiyz7dgNazVPC3w3QFK8YrgKBwoR7yWxn3mMk5Za7yOEWsg18V5U5XOWYOp9E6dIub8K1/5yn22\n",
+       "Y9Gw1vObQb/WzulWBW7eGdzYD8pb36zaUKSCIAiCIAgaWZgihbe/N6k95+atRMGyesa+tyDPsVFK\n",
+       "8Jpof2mGC8ehSjHxAp7pgzKD93jzzTdLWj1O1MfyWDjg8/58elHjDXilHoeBt4J3mosfIC7Fvdh5\n",
+       "10zxyvyprFRquuTqp2FXpXEqYzPP/iQrbX+ltfo+ys7GjRsldZXU77777qrjpGxvqOw/55ZbbpG0\n",
+       "eq1nzbryyiuLjoPyNa+aayly9bVST1lYm9l1gv/XZlj7XMSeULxc8aE9vM9v/5FHHjnzfdZS7KoV\n",
+       "lMMTTjhBUjefifdEUawlFKkgCIIgCIJGFqZIHXTQQXtiNQ4//HBJqxUV7g6pkstzVGp0sK8T+/UQ\n",
+       "Y+OKxubNmyV1njjf5zwoC763GooKMSMcFy+L98nogFQMCs9hOT7n9/byORQfPpeqqJ3aN6uVa6+9\n",
+       "duZ8Dt5BzvvKVbdNHb/vflx9SXnRpZlPuf2mfEd4vECe22M32Dt2QGwScQx4edgT8wQvEqUPb6u1\n",
+       "vcwD5hleHN5vab9s27ZNknTYYYdJ6q4TxRO7Zh4yD1DUOC/9xbyYZ/VpVyG3bt0qqWsrVftRjRkr\n",
+       "qtET80Pfcc18jrHm/z529AXXTl+6SootpFR1xhLPv29mrs8Z2tO3Cv/YLOqpAzbM3GMu88oa4QoJ\n",
+       "f2fciLfEnjz+EgWHtYA1JpVdx9wrnVOsPYCChJ1i38wP7NmzJfmtYK3BHlEosee+6jO/Wdu3b585\n",
+       "D8dvJRSpIAiCIAiCRiaLiEeZTCbTlZWVuZ83CIIgCIKglpWVFU2n0zWD0EKRCoIgCIIgaGRhMVLz\n",
+       "UKQ4x8UXXyypi19IZVvxPNkrNBO7xfNUvu8xPpzvggsukJSuOTIUnG9e6t56Ox/jyXj53oEeF7Ko\n",
+       "68NeqFBPXADxMrST9hMrRXwHnyc+wfeqhF//9V+fOe/YcJ7zzz9f0upK6FwPcR/g8Rn0A/MQFd13\n",
+       "Pnj729+evLZctXayeXJxf/TtOeecI0n6wAc+IKmL3yIWhDEj9oO1gO8TQ0UcHJ/jGhljanu94hWv\n",
+       "kLy2rVYAACAASURBVDT/sbvwwgslDZ8tB8zFc889V5L0kY98RFJ9Re1aFjXXL7roIkn5jFevAcj/\n",
+       "eU1l3hJj9I53vEOS9N73vldSN2d870GH2DnmoM8X5ipxwKwxb33rW2euc2iIxaIuGef53Oc+J6lb\n",
+       "A4g9Y17deOONkrp+fOELXyipu/5du3ZJ6uadx6Dt3r1bknTmmWfus32hSAVBEARBEDSyMEVKWh25\n",
+       "3woZC7y60oCXmfNyvJ4Rd6fcneNt5qq9krFAe7hr533ullu9PM+UCNYGL98VjmXbkZ3MK1QI7CNV\n",
+       "x4vsN/eiUnYxVMXxVvCi8W59PPBuyTiito97mcxPzxLcex7RF64is9akFKmcEuV7sAEKE33Mcfg8\n",
+       "NddYe1AiuGaypLgG381h0bXsSm3HsxBL8bm4aFsdG2waO8B+UESxU/qF8UdRpX9SipTD3MHO2DuQ\n",
+       "NYanNKyVrCG0i3Ywr1Be+fu8sjJTFfJZM0855RRJXSa/31Mwv3y3CfqHNRVFj/4urbgeilQQBEEQ\n",
+       "BEEjC1Wk+ipR4HvEOa07Urvn7PVtUvvyeG0YXqlV0Vr3JrW/FHfZY8UxrDeIjcJ783HK1U2aN9gH\n",
+       "YB+oFk6q2vKmTZskdfaJPaDO1HLyySfPtIf5SnxBaf2oXKygK7bEcWDnjFPJPli01eeYq3ql4JnS\n",
+       "BreZXK00roHaXlzDzp071/y8z/HSfTKdU089deY49CmKwpe+9CVJeQXJbXNs6O/WNXto6H9i1Rjv\n",
+       "3Lin8LUbpYXzUOOQNcxrFOaeqvhc5/MoTdgByhcKGe3YsWOHpNUKIzUTsR/U4UWPE9dFe5lnrFX0\n",
+       "Hyo3MVFeNwrFDcWO/indey8UqSAIgiAIgkYWqkjNi9YdnR2eY+d2NG/drydH6nlt7X5I+yuMD0oK\n",
+       "SgiV2rGDZVOkiNshaw8lyuMRcs/rUWG4rr47quONUr0b7xU1pVSRysH1szMBiprv05Wbd2NALApt\n",
+       "QjEAPFdiK3zu4/nmFAyPgek7dqjUxIwce+yxkqQbbrhBUnksU6nKzfGwXZQRjzvNcc899xR9ziuy\n",
+       "M05DKyRcD7Fu2ODVV1/ddDyPeaPfUFCY86wFnnFcW4GbmD2P4fO9C5nbqRg11kx/HXufS8+gd3yt\n",
+       "ox+xA/7OuKXWEK/8zmtp5n0oUkEQBEEQBI0sTJF6ylOesue586233trrWLnaF0PFYi16Z+8Uy6Ks\n",
+       "LBrUArxvFJM77rhj5v+ejblofG88XvH68E5zexCiuOGdY69ep6kUjoMChcox1l6I7NeGCoAKxHxe\n",
+       "xPxDMWAsPG6NrB/6GBvLxXURD0bWEcoAsR4oBK02et1110nqPPprrrlGUqf6jQX9gEJXq0iVQoYn\n",
+       "qiX1goZWpFCAiB3qW98qZ8PE9PA5z/L0GDJXWPz/zFns12MIUXKYayn1m/Ni79jtWL89tMNr47lC\n",
+       "6vXasHfGqdTe6WcUNjKpS+dfKFJBEARBEASNLEyRevzjH1/8/J27aTIZPDYjtzP0omuxBPMB7584\n",
+       "FSp+u5fqFbVrM5PwyvDiUFJawatyZcorsaNSoLB6nAvfJ1asNfOL72/cuFFS5xVSc2asmDzvhz47\n",
+       "snvWETE7tWsBaw6ePWMAHI8xS3noKAoch2wjlAeulXYzBq3xnah42CgeO+0fawzph7HrQfkcGSoO\n",
+       "NsVQldZzCg4KiispKJgoQ1yvK1CuePl4+6vHADFP3P54JYaLz6HUAUok9sbna/sPu2WNZh76OHsc\n",
+       "qce/lto5axvzk+svzXgORSoIgiAIgqCRhSlSJd4md6HcZXK3CyhTeIWRvXZggz184xvfkNR5a+6l\n",
+       "+XPvWpUCRciVnlyF/RR473wPUJRKM7iwfzK1UD1qYwRpD6/UXknVPhoK778+uFrXqkqjfKRiW1A9\n",
+       "UwoM7SCOE/UT1dRjiDyLqy/erg0bNkjqbGOomCmOh5JAf7UqgTmIaaH9fbMc50VpzA1KEmo6axuK\n",
+       "SSqbzO3e658xtzwbLlcpnbmPGs9vsbcDZYjx53O1ihT2kssuRTliLfbv1cZw0Q8oYdhzjlCkgiAI\n",
+       "giAIGlnqOlLc1fpduddyCfZvavfx4nk3doIXhXrg1O4PhhdG3ILvTF57PD7v8Td4ja3eNt5rbWVz\n",
+       "vOabbrpJ0uosWLxOjk9/94V+8LgP2t+3P/rAGnT77bfPvJ+LBfK6PcRxFu/h1VjZHOhTMqQ3b94s\n",
+       "qYs1GUqRIrMUBYDrHav2V2tl8UVTGstFViLX6Xs6lsLaxPd8rSoFe7355psldePtSi2f43ytT4no\n",
+       "J98D0Mcde6MdnB9lCkWpdL75WhOKVBAEQRAEwcgstSKVqgmyXp6HLzvLVk8pBe0s9cZcESKzwxUp\n",
+       "vP1SrwPwgvCSyMhqrWiP4obCRbtqlS3A28SLbY05ciUKLw2Fr7Tqb1/wLpcx+7Y0Bgg1HdUx9XkU\n",
+       "K8aubzYaY0Zlbv4/VAwWkLnKXFtEFfoxod+Ym2NfH3OfGCDWmlqFkjWFNYvr8OOVxhLxPdZk/g/e\n",
+       "L7WKFPaD8kT7uBfw337mn2dvMn+Isy7da5PPkx3pTwlShCIVBEEQBEHQyFIrUrBt2zZJXczGonec\n",
+       "3l9YdiUKXFHh/+6NpHDlBC+H77cqNmRc9fXuvR14h61xDWSI4U25slQL3ifVfvH2Sr28/QmUHXDb\n",
+       "SSlUXtcmZXO+z2DfTGRXNcmeuvPOO3sdNwVzEeVj7PpO84I5wLiQNVgLcYW5GoooSajdxABhD7SD\n",
+       "8eU3EQULWAM8O5Tvo4CWqssnnXSSpE6x8b0RfY2q/Y3h+8wX5gP94Nn+zDevmYeyRC280rWKfmHN\n",
+       "jTpSQRAEQRAEI7MuFCmUqFa4yxwqpoO75JS3RQyJV9+tPU7wCHiD9BNeWK5qNePuXozXVmmtwjxU\n",
+       "5tBzn/tcSdKJJ54oqfM6PUuNPSlT9uL11vDKWvfag5T9lsZr1MZhLDNeb8evCUXA15pUTIp78ChG\n",
+       "Q1XSZuyZK9jsWGsOttIa37essMaQrYYKXft0hDmZUqTYw+7II4+U1P2WMG5XXHGFpPRvmdsT7UWB\n",
+       "5DpQxmhP6W8jn3MlbCh8reE8rOVeS9LjST3brjamjH4mm7X0KUAoUkEQBEEQBI2sC0UK8Oq4+/QI\n",
+       "fZQF94ZSXmIrOW+uNHsrlKgyUjFIuUwpvIuxsstK61sR70ANH9+fCu909+7dkjovD+8LL5J4h5Td\n",
+       "oHJceumlkjo1gviCF7/4xZK6+ZKre0VMFDVteEWVwXukfXi9nulCJg7tZjxTCpX3a00dsdqaY7W4\n",
+       "Cklfck0pD5YxoH0cxz8/tEqNDVD/auw1Z3/NqGYNIfuyNS4yty8nsVfXXHONJOnoo4+W1NlJLtbH\n",
+       "59Rtt90mqav1xtzlN7F0v1v44he/uM+/980E96cDzAf634/LWsr73CPwNIj+LI0z5ekX44syfPrp\n",
+       "p+/ze6FIBUEQBEEQNLIwRWrjxo17vDPuJrmbxSP3Pb/wdPk/3hVeEHfj7o0OXTMlmC+1KgPeFmoB\n",
+       "SgrPy71KrisoJ5xwgqROkSHOgOw3vDqeoxPThHdHZtdxxx03832y6Ry8KleqgOunUjvHoR2pGC/m\n",
+       "lWcYER/hihX95RkwKE5407xynMMPP3ymne4Vk3FEOzmex2ugwHFe+hsv0sdvrdhHYkwYW85B3+Fh\n",
+       "8nfUQq6dviLWgrEDtxU8Zq/dha0yBjmVmj5kTGhHawwKY8H3c6osfetZX/Qx/eCxXqzBHN9VYD5H\n",
+       "v5HtxXHpL2Du3HXXXWu208/PdaZiz1Ct+RxZaq0wLr63WynMNZQt8L3vWFv6Vp73bD2vPF8K10s7\n",
+       "U8oOMVfYHfOReZfKIMbuma+ME9+nn12RuvrqqyV118Vx+H+rMlab1RqKVBAEQRAEQSOTRWRXTCaT\n",
+       "6crKytzPGwRBEARBUMvKyoqm0+maBeBCkQqCIAiCIGhkYTFSKysro2fYoHq1ql+19W84z/ve9z5J\n",
+       "0nOe8xxJ3XNhnvsS28JxPUaF5+d8z6vYHnHEEZKkV73qVZKkT33qU5K6uALiPogLIW6B58e8TxwD\n",
+       "8Rs8h+Y5vcfMcH0XX3yxpK7GyYYNGyR1z8cvueQSSV0GxOte9zpJ0sknnyxJ+pu/+RtJXbwBz8G5\n",
+       "Ts5HFd1Pf/rTkroquqnn88TYlNZ3op94Lv+2t71NkvTe975X0vi1cOjPCy64QFI6jgU7JE7lgQce\n",
+       "6HU+nw+54zM+/P2OO+6Q1MUeeqbO1q1bJUlnnnnmmucbi5WVFX3sYx+T1MVoeKYv8Wy1Y0tsD2P0\n",
+       "rne9a8855wHn+fCHPyypi5Mji4s5wxgxJh5jQxZYLgar79qZwtdU1r6zzjpLknT++edL6tYSYrZY\n",
+       "E31uY5vEQRKLQ+wWawLn4+9vectbJEkf/OAHJXVrM7FcrImtNeZoN/GBY/VnitK1hZgnfhNa9wvl\n",
+       "fBdeeKGkrv+5/qFZVH+mCEUqCIIgCIKgkYXWkcp5hXgrXk14XrRWYuYun+wvMgDwnvCueB/lBy+S\n",
+       "jAru6lFOOC7eMaAI4ZUeeuihkjrlyvdDooI23tk3v/lNSatr4WzZskWSdMstt8x8HwWHduHN4SX7\n",
+       "TtxUFqemyZVXXjnzPdrJdXklcuwkVwOkttI46oRX/p533GAuowo7bFWicuSOj3qR2l/MM2N27tw5\n",
+       "YOvqYOw8uwu1FZtN2UqqDo5nDC8K2u1V74F6QV7RGbWX90uzAZmjqcxnsuxQMpijrEFkI/J3X4t8\n",
+       "LzOuC0WHv6fmPmuFZ+kx/hwnVTeJ/uO1776UMJYSU0tubcGeW5UohzV/vezjOhShSAVBEARBEDSy\n",
+       "1JXNh1Ki8DLxVvoqDsQhoOi4t8T/qXGBAsXzerw73s/VCvGaKn63T0Xs+++/f+Z1165dklZ7Ydu3\n",
+       "b5fU1VZJeU8oSO69ct2u6OA9usLzpS99SVKnBqBs4K1Si4frci+K/ubvqRozraz3PeD2l73sXvKS\n",
+       "l0jq9hMDFFdis/YFtozCgKKCrec89JQnvWyedm4NwxaYY8zZXB+i5EBOkUJlR0lireFpAv9PVfT2\n",
+       "Okecn7WS8/M51havQ+WxVKxFxE7xvaGUF8j1z4HGsihx8yYUqSAIgiAIgkaWWpEaiqG9SO66czE7\n",
+       "KC1+l15bDdfjNlzx8WrLkIoDKd0Pi+P6cWg/SpVXccabJKsPL9D7AW+O9nAeV8By+0v1zf70OI31\n",
+       "Bu1fFkWqdsd1wA6ImSPOhrifElyBIhvJK3en4r1S+H6HQ9G6t14ulgeFyCuP5/A1LaW00L+p+EKe\n",
+       "JtSMndTFPDFeVL33uElfIxgX3uf6uW7+37pGoIDRn+wfiSo/NBs3bpTUtXuRcYdBnlCkgiAIgiAI\n",
+       "GjkgFKmhKc0Ow5tDieG1VgFxRc0VpVS2Vd9YsJTX6/sheXYe7cspdihbnIf+wUv3dpR61SlS8QyL\n",
+       "zsTqy7K1n1pFrZAJRrwMMX0loEARK+NZW6ikxOuVxF1JnY2jxAxFrRJVCmsGyk4qGzFHSpFibSEe\n",
+       "MxUDVQuZv15bD1tIrSmuULFGsRa5claLrz3EbI2lAt9+++2jHDcFChjzhDjUsRQ3xuf444+X1PVj\n",
+       "zVxfJkKRCoIgCIIgaCQUqRFJeX8oI613+35cjx8YipT354oY8RBk6KAg5bIuiX0iDsBr3eTaAXg3\n",
+       "OW8bpWu9Z9hwHR4X0qpu0H9kR+J919blYtyOPPLIpnYQE7V582ZJnUKJHV177bXZYzAXyFLDVolp\n",
+       "oVYZNpdTOgAFwrPaWqE9KGRDx8CgGA2dpebQj+xCQL+zBqDgldoS/UJlc2yczObcOAEKB3MdpbJ2\n",
+       "/Lg+sv+wH+bMNddcU3W8ZcV3tRhLiQLGh3GhX8k0Lx3nZSEUqSAIgiAIgkZCkepBrn4Pz/eB5894\n",
+       "WXhLtVl8rtjMO1sL5YPXVPZersYKXgfVjqmk7v2BApOKBSqN+6C9vofgegPlBjWDeIbSyueMC68e\n",
+       "h9Javw27xIuvxWsKEedUczz3ZBljPGzUNj5XG1/W6ikfdthhkrr9Nxk75gxZhGOpy2NBrBmZlqwF\n",
+       "qIm1MVmeEc3cz1ViZ20lForxxoaIuaqFuUHcH9eL0kdWodfq6ws2zxwd+vgOu2zMC9Yaxpvx4npD\n",
+       "kQqCIAiCIDhACEWqB7msOJ7/AsoR3metEgV9M1D6gvLA9ePV433iXZTGItEPeCH+PWKnUl5paZwL\n",
+       "xx0qziXF2NWO6Wf2QKxVkOhP4lGo++VKYm2VYh//WsjYQV3ALsjiK8HVK/5PTEtt3NdQ+D6ZjAGK\n",
+       "mavX84J+aV2LUEEZe9a21lpivpdgafv4HP3oaxCKTu2cZC5wfGJ4UKT4O2sKClhfUNCYm1x/38zl\n",
+       "ZYFxYrzpz7GeEvBbPJbiG4pUEARBEARBI6FI7YNcLE1OkcLjJyYKL63v81+vs5Si7x5sqarLeGF4\n",
+       "jSgX/J/rLr37J14EL8xVg5wXWVsva6z90hjnvvW7cuQqvefAu2a8sHPGmXGYd7wO404tIeJPauKY\n",
+       "XAnhu631n+gr9qzLxeqkIAaF43js1tgxMClalSigcjmxQ9hOq7JAe/g+/c/xUzbJuPvcZi1CASSr\n",
+       "sBS/Pr7PmsfahX0NpUgxF/2pBvuq9h23RUM/8huD0sbTFtamoWrkjb2WhSIVBEEQBEHQSChSa4BH\n",
+       "jFfQ6l3hJeHx83/usmuPW7unmlczrlWmctmIvOJNENuC143CkNvX7LjjjpPUVdclCw04bioTiH7J\n",
+       "ZdDgNeI1e5ZYX1rVilZa41tQRPEKsRPGE++9ldb4GMCeUBNqvHw8XF5RB2lT7d529C193beyOTaH\n",
+       "DfP/9R77gs2gVlNHqlV9p99RJp71rGdJWq0sEJvk2XqATfP31vG77bbbJK3O/sOOhs56Y26ylrH2\n",
+       "ja1EDa0ElZ7P911dtt0acoQiFQRBEARB0EgoUmuAF9VaKZpYGbw0r69Uu9ce4I2wv1WOvvt4pWKJ\n",
+       "iAvg+vAC8SbwonJKFOCNEIeAsuTn43PELTBOXnsmBd5pag/BVlytGCsGC+gP97ZL4ydQzugHFB/6\n",
+       "v2/mDNWef/Znf7bp+9gP11kT14KKyrXQR8TZtV5b37g0YG8/5k5rzS5o3UMvBWpyLfQ7a06pKulZ\n",
+       "lv4UACUolQnKOLsSBih9ZIX1VY19vErX4lboh3nVCpy3EsQcZ5zIRF5vhCIVBEEQBEHQyLpQpPC6\n",
+       "uCtH4RkrO6pvBg1eDzuiD62ALBriFdhTjVgW3xGeeAIUEs/Go0YK+1il6kHhbbrChvLH9+jnVLYi\n",
+       "3t3QdjPvDBrfP42q0qXt8P7FCx3KTvtmpaIwllZq3xvGHlvDdoizwyZTCgrfc1tFQepbg4zzoSAx\n",
+       "B2rXnNz+kh5XyHm4jpSS4sqbK16eRYaCxRrgMWCAWovajMrIWgKMC59HAUpl7mKzKdWS8eZ4rVX3\n",
+       "U4ytPs+74vi8IeZt7L39xiYUqSAIgiAIgkaWUpHC6yO+AY8Z7xKvBeUHLwOvsm/WUV9QSvAafedx\n",
+       "lJGcAsD3jznmmJn33SucN9ddd52kzqvFK0MRwbvg/ZQXyOeJ7SFWh/8ff/zxklarBHzP92nC60wp\n",
+       "TqVKlFdSp/21MWeoAvTDUPEHXiUbr96rTENp1exc/6VAjUAVwP5T0B7Ok1KwiIVDxaA/+T7jv3d7\n",
+       "XcViH0fmEjEZqHiePcccxQaI3eDaaEMrrFHYku93WBrDlVMfN2zYIKnb44+xJW6xNLbnBS94gaSu\n",
+       "r7E5roN20D/0H3sK0l/YLApLqn4Wx/dMZ89iA34LXH1mfJkbnlkMvm8ltkY/oYRxfNYojkPVfa+h\n",
+       "hwK4Y8cO7YtnPvOZkuoziFHx6R/sfqhYqr4V71PQXpRCryfFOLjqPjRc38EHHyypf6xbKFJBEARB\n",
+       "EASNTMauwrzmSSeT6crKytzPGwRBEARBUMvKyoqm0+mahchCkQqCIAiCIGgkGyM1mUz+b0k/J+k7\n",
+       "0+n0xEffe7qk/yPpKEl3Svpf0+n0+4/+7f+S9AZJP5L01ul0+jdrHfeiiy7a8zyazBCeL19//fWS\n",
+       "uniBTZs2SepiKYh74HtHH320pO75Ns+bX//610uSPve5z0nqns8SY8Rzf2q78Nx069atkrrn+Pyd\n",
+       "7/tzfNrxyle+UtIjd64lpLLUSuE8vNIuMl1oH3ESXA+xJx43wt9RKXnOT8zLa17zGknSH//xH0ta\n",
+       "nU3H/z0ziP4hboFxvf322yV1sUg8Nyeu4dWvfrUk6f3vf//MddNvxBzxynlof21MEv34l3/5lzPX\n",
+       "Q7vY+Z3aNrSTcSd+46tf/aqkzr6IwyDW7b777pMkvepVr5IkfeADH5AknXDCCZJWV+HmuMQNMK4e\n",
+       "gwVul5z3ec97niTp8ssvn7kO+on5g73z/y1btsych0wb2kP8CRlc/P9tb3ubJOlP/uRPZr7H+NA+\n",
+       "xhN7w56Yx3zeY6OclZWV4rnXF597++v5PvnJT0rqbBZYE0466SRJnW1gU15xnGxAxpK1njpQv/qr\n",
+       "vzpzXvBYtVo4PjZFrNg555yz5vlKycW0sRYTB8t5+C3i+6y5ZECzxhBLRj96bB1rg2elcl7Wltz1\n",
+       "cV7m3vbt2/f5eYfxOfvss/d5vlT8Jb89XA9rCtfpsVr0W9/xqyV3nhJF6v+R9D/svXMkXTKdTjdL\n",
+       "+vKj/9dkMtkm6X9L2vbod/54MpmE6hUEQRAEwX5JVpGaTqdfnUwmR9vbr5R0+qP//nNJf6tHbqb+\n",
+       "p6SLp9PpDyXdOZlMbpN0mqQr/bgPPfTQqsrX3FX7XT5ejuMZN+4hA8pHypN1cvVrPMMC76qWvvV2\n",
+       "HM8C5HpT/cddvnubkKrtwXlQSvw6Ut5aLjOCz3stG7wyXsmQ8Uwc/o7iliO1Nx8KGcol7fJ+wn7/\n",
+       "9E//dJ/nQQkCr4mD93XttdcWtbsU+gmvjnagQPn+Xbzyd+YXGUJkazKfPCOKeeHjTv+6csb5sB+O\n",
+       "k8o8Kp2/JcwrO2i9k5pLzJEbb7xR0uoMStZEMm1diSBbK1e1vu8ehLn6RMwN1g5sNJehm8uuxL58\n",
+       "TaY/mBP81mCHvhalfiPob58TtfXIGJdTTjlF0mpFiqxE3/8USscnFYvN+JC96P3uWYN9d10Yi1a1\n",
+       "6ODpdMoMe1DSwY/++1BJe/8K3ivpsMZzBEEQBEEQLDW960hNp9PpZDLZV+pfcVpgq8eZ82p4nu/H\n",
+       "r93pmue4eMzcLffdER7Gqt0xNHhtqaq+rV5DqnoxXk/K+yF+gFikHMRtpEBxoWaM162qhZg1XlPe\n",
+       "3VBgp9g9cRNwxRVXrPk9t2PmFXEdjs+b1Lgzz3J2kauBUxJTSExMTolAMeGV2JSh5x4xJNgmSo7v\n",
+       "ArCsoPZ6f2IrjAnX5TX+UsoOsT1um0PBHPcaaw7jjW0yd/qSsiNsl7k1dGX02lp3rKmsvYyrx2GO\n",
+       "zdBPZ+ZNqyL14GQyOUSSJpPJsyV959H375N0xF6fO/zR94IgCIIgCNYdl1122T7/3qpI/ZWkX5H0\n",
+       "oUdfv7DX+//fZDL5sB55pHespKtKD1qrEHlFZXDP2is74yV6TEgO7vZ9v62hqsm2esN4UR7r4tT2\n",
+       "L+QqVS8a+t9jq1LgBebiCfru/4QdojqgcKUgOxDlyyvGM85U40WJw5v2eB+8PK/0n5o3pTXlUH2I\n",
+       "A8kpwkPFNZR4raVjhkLBtYylAnsVejIo57XLPZmg2GDtforYjvcrawJrK7bJWOfG3DNuh4Y1oVRF\n",
+       "TlVIbyWlrmMP2B1zGOWn7z6vtU9H6P9LLrlEUrcGMB9yFddz6r6DnfBbinLJ61h4hQCeDpSO9xln\n",
+       "nLEn23ktSsofXKxHAsufOZlM7pF0rqQPSvrsZDL5VT1a/kCSptPpTZPJ5LOSbpL0sKTfmC6i4mcQ\n",
+       "BEEQBMEcKMnae3XiTy9LfP4Dkj5QcnKek3P3XquYpDxh7vb3apOk7u7f94CjvhH1qXLgGXOXS2xT\n",
+       "K2QsUO+JeI2UV+NQA4Trc+WD91u9P8ZpfyOVXUiWGl5ZTklKgd3RfyhBKWWFeA7fq5DPY2fUa0It\n",
+       "oH1egwXvy73GlJKEl5YDOyrNjhzKyx8SPP++CkAO+pTaXIzl2OcFFABsr1aRStmE70NZqzpiQ7m5\n",
+       "hc2jgHEejwXy+DlsrrZdQ9lqan9R2sl48JszVKyUZ5Cz9qeui/f9/F53K0WtAsZaxPnmtTZgR9Sy\n",
+       "G5qo8RQEQRAEQdBI76y9PuDxc5eOklSqxDi+IzlwF8zds98N84rX40qA4xWv+9a34Tl07Q7g/v0U\n",
+       "fe/6qX2yv4GS5+NMvAL2gkLUOs54xdhXzq6wP9QE7Jfv0+7du3dLku6+++41j4ci5tmOfWOWSuNg\n",
+       "gP7sWxNoPZNSXlrjFku54YYbJHVrYy2pMca2SlXMFB75QTvpF9RTFAXWImwJBeaggw6S1P12kBnL\n",
+       "bwpPD1xFdcVmaGXI43O9dhvKJHPc25OqdZfClbrc2k/7vO4Vvym5OVtrt62/7X0ZOyswFKkgCIIg\n",
+       "CIJGFqpIcffdtw4T3gsxI+7l4Mnzd85LjEiqsjdeEO3zfY043lC1R5aVvnfzuef0i8L3FgRvJ/bV\n",
+       "qkjh1aFgprxe3if+hPNxftrJ32+99dY12w+8715g35i+Wmoz4nL7mO0Lj8FYdlhD+io7KXKZvIvG\n",
+       "6xSxlmLzjCdzAQUE2+Z95qxnpPK51J59rE2uCA2VaerXx/k9btHttXSOur3XPj2g3/ieZ/6ul3mU\n",
+       "Y+yswFCkgiAIgiAIGlmoIoXXgcfcGidAlhXKkXsdeLheD4nzphQX9+JSsSv7y1370JCNSD+P9Xzc\n",
+       "M3pKve+UvfE+3lpttWCHOAdeU+oD/YN3yHXgJXtWXg7s1O3b60otG33UgNq5yJpx5JFHSurU6bHn\n",
+       "NJXOUblzlcD3VzwjmDnnKqvXymPcfI4Sb4sNcRx+azxWzPejHEo1T8UR0m5XnHxXC4+hSuG/QbV2\n",
+       "i3K33iuLL5pQpIIgCIIgCBpZqCKFF8FddGvtTrwM38kbPIbKs/tQAHIxMK5scb5l3ZF6KDzz6XYl\n",
+       "uQAAIABJREFUpJTWLMRaWp9/4w17XAF2gZfWd3yJk3jGM54habUyh30yD9ybboXj1FYfLoV5RNVu\n",
+       "Ys7cu23NGEudb8hYOzIgvXbXWFArbvPmzZK6OkIHmhIFbpu+ljM3WIMYe7LLUI9ReuhHbJC12n9r\n",
+       "wL/Xdxy4nlTFfJQ1V6RaY9iG+u0ZK2v0QCEUqSAIgiAIgkYWqkjhqXNXjXfQWluF73kWoO9j5HV5\n",
+       "crE7HBcFwxWvvnWkxob+aFU4WrO8hoohq62lUkoqWxSVArug/a1779F/HmMH2HkqixD7rbUzvGs/\n",
+       "nlf+d0qz5tjHzVUdahcBmVSlyqHvxwVjKFLzqjAO9CmKSipjeL3Amsga2bc/GXvGmLWWOYDS5HOX\n",
+       "fvVdMjwGLaUQ+fteKb0UflOOPvpoSV1mLYyteHqWYClcL2tVxEzVEYpUEARBEARBI0tRRwrwQLkr\n",
+       "LlWkiMXh+56VRAwM8Qj8vTRjgXZ4Zg3ZgUNno+EdoCT0fW7fN9amNQbJM2FalQRXAIci5T0zntiH\n",
+       "V1uuVUpRHW655RZJ6etJZfPllCjP+HF27twpSXr5y18uqVOSbr/9dkmdfaAsEfN05ZVXrnk8qkhv\n",
+       "3LhRknTzzTdLSu+9V+uFp+y95Di1NajILJ1XPB97fTFWy65m52BOHHzwwZLqFSmfSx6viE2hSjM3\n",
+       "UaR4RYnxCvK5tRnFijnEXGeOsmZ5O1PqqGfe+vm9TpbTV8VvjbXi+vz7y1IDkH4jQ3/ZCEUqCIIg\n",
+       "CIKgkYUqUr4Tc+teXP493+GcfZdcWXElCu8GxcpjYvB28SbwfvoqJp4NSEwJCkVux3a8qpT3xfE3\n",
+       "bdokqVNa8B5zsT+tledbn7P7c/7cDvFDc++99w56PLfP0n7BG8QbS9WkQUnCq/Tqz67Q3nTTTZKk\n",
+       "Bx54YOZ96lTlYuKYR5dddpmkzp7GIhUztRZca06RYk6kFCn+jjJAVfrWCuTMaa6FOetq97KRU2FR\n",
+       "iqjDhc3edtttkvLKSm5twya94rbTaoNeId3jdbED5gRqLeN45513zhyPftqxY4ek9D6eqUzavjFU\n",
+       "KUWKceE3hjWf/5944omSun5mzX3JS14iSbr//vslSdu3b9/n+TlealeFWjge4xGKVBAEQRAEwX7G\n",
+       "QhWpeeHKF56tZ7PxmlJo3IOHlMJQ+nzZ24GCkfPWIHfXT/voh9osNI+xwksj+8u9KNrj8QV4YcRT\n",
+       "4J3xfbwgzyrzfbf4vMcG+d6NtBPvn//zd/rXlY7WWKihwev1mDnG0+015c0+9NBDM///2te+ts/z\n",
+       "5uIsatUTVAtUg1Q2oXvN2FGNIuVzDcXJ91BDWQBXCrzPuYbWfTXHVu3GIjcH7r777pnXoaCfXelC\n",
+       "RWetcKWIuY4N0X7WPrchbJ1xZm1g/J/1rGfNnJ81hONyXtrF31N73hGnS005B+XT61DRPtaslOKa\n",
+       "mrvMC1/zmcvXXHPNzHUB8Y+lMYTHHHOMpNX9wHV5PDPjx+fpH/qfHQD4TUTZhaOOOkpSZw+MB2s4\n",
+       "32P8U/HCfI92ofKXzvdQpIIgCIIgCBqZ9M3oajrpZDJdWVmZ+3mDIAiCIAhqWVlZ0XQ6XTNgOBSp\n",
+       "IAiCIAiCRhYWIzWmIsVz1Te96U2jn2tvOA8ZDkBGCM9bP//5z0vqnnefeuqpkrrn8bt27ZLUxXds\n",
+       "3bpVUpexwPF5Hv3Xf/3Xkrrn7jz35nk0z515n/gBjkdsDc/RvbYKz69f8YpXzFzn2HCe3//935fU\n",
+       "PfdOPe8vhTgYntvTL7/92789c176h37kvPRLKmbHaxnR35yHzLKzzz5bkvQHf/AHM98nHofx5/+o\n",
+       "x2ShYlfEkXiFdNrNdb7+9a+fOZ/3H/3C8bAv6k2RzUj2JzFMxA56fAX9OE97ueCCCySlM4CJfWAM\n",
+       "PQaC/xPbQeyL1/35nd/5nT3nnAec5xOf+ISkrs+poM1Yc93EfGAznm2ITRFz8vM///OSpIsuumjm\n",
+       "fJ/5zGckddlcjDFrhseH0k/er8wVtxGfC/Puz09+8pOS8hXmfU57HSvWBl9TsLe3vOUtkqTzzz9f\n",
+       "Utc/9D/26FmA/JYR+0S8I+ejvzkO53/Na14jSTrvvPMkdePF54hTpbYdvO51r5PU2dFXv/pVSd08\n",
+       "8Exi2v22t71N0vjjx/ne/e53S5I+/vGPS+rskzUMu6KfsNPaTHKu99xzz93356qOGgRBEARBEOxh\n",
+       "v8zay9UcGYpU/aYvfvGLkrq7Yf6OF+jZeFdfffWaxycTBm+CfZt2794tSfq1X/u1mf/jpXI+Xv0u\n",
+       "HG+JWi+Q2pMPr2pRDF05nv70bDbAC0GR8rpKOfuiH/EOySzhe3iZfj73Fskk43h8j+My7l7zyLP9\n",
+       "+Dx4LRng+yhenP+0006TJB1++OGSugw0VI5cZheK67e+9a2Z87tKgp1t3rx5pn1k3PA9lDi8zjvu\n",
+       "uGPPMXJt8Wyq0ozARWdwgmcOu4IBuYxfFAZePbMZqIpfitcyy9FaO3AoSvc69Cw5z/QGt2nPdiMb\n",
+       "z9cU7wfmvD/dcFg7WOO95p5nOqf2FgTmgT/VANYarnve88Lb7dfLdQ6VJVta0T0UqSAIgiAIgkb2\n",
+       "S0WqdV8g7t5Ld8BO/d3jCaD27p3n3157w+/Kvc4SypcrF7kMTWK06D/Ok9oXatnYsGGDpM5rb93H\n",
+       "jOtHEcQbpT9y1Yc5L5/D20ztOejqAEoZdogil/Iic/vLeRVllC3OR3tRxPBG8daJlfJaPShGrmz5\n",
+       "HprYH/abiy0j1o/+47rxPmnvWhXva2MgvC0pVXYoUCdRxlorpQ9Nao884uXcRvcX+taMq6lxJnVK\n",
+       "Cbad2luPtTunLDInmbOuxPiawJxJ7RaBGp1S60v3sJwX9F/f/Wj7EopUEARBEARBI/ulIuUecSlk\n",
+       "SaEY5PZ4I0bDvTRiXFCmUhkuOXiejsLC92kn4LHjLXA+Xku9a47vlaUXfbdfCgoKipxnbNRCv6KA\n",
+       "1O6sjpeLfaA+eOV2vEpXslCkcgprrXLq1Zs5L//3OINLLrlk5vsvetGLJHV24nh7Syuhc35eDzro\n",
+       "oJl2uLftle33RanSNHZdPTz+VmoVkFJQAx1seL2sAbUQB1irSLX+xrjylFK3S3e1SO22UQvKjsfl\n",
+       "LjvLYpehSAVBEARBEDSyXypS7hHj4eY8d2KRSu/GU5/jfO615LyeQw45RFLnYZfWvuDvPO8nZiV1\n",
+       "t+573Llywt/pR5S3WlAUjj/+eEldBorXLhkKz1xB+alVpIjROe644yR1XlptnAgxZ4cddpikLrYI\n",
+       "xQzoZ+yGmKpSRdHHx2OW3A7YV8t58MEH13yf/qN9kNrB3vEMpRQodjfccIOk1fErZPXR/7SnJKtz\n",
+       "ETs4jAFxbNg4tprKuivFxxZQE1vjDXO0KjvQN8apVmUG5uyyKCLg48jaw9MRFE36jVg91ppUliBz\n",
+       "ne97PbEDnVCkgiAIgiAIGtkvFSmn1ButvbtOxX64opAC74FMHjxuvp+q4eLeF7FRpV4SihP483j6\n",
+       "C2/RP18KXjPtSu2IPhatlc9d4WvNWqRffad3V0w5Pv3N+PJ+Tg3wmCu/7qGyLukXKuWXKpXE3+Ri\n",
+       "pbD7VG0hYg+xb4/lOxDwiuFDZVGlxobYsrHw66mlbx2j0vi9oWAtZc4yV/sqfjzN8LhF1nKP9yz9\n",
+       "TUQ9p59pP+/n1ljWHtT42qxP382iVUEcm1CkgiAIgiAIGjkgFKl5V1/Fy+EumufLZNtxF08MDRW0\n",
+       "8c5yd/nuTbi3kSOXEeJZhn37z6vjLjsoIsTsoKTVekN8nmwz7CIVa8TnUabwLnNVelP1qaBvHIpD\n",
+       "PM4RRxwhKV0LB4aKT8JuOV9pjNYyQX0gXrG1XIZwilZFBZvAZlJKQali5Huvla5F66VG3VC4wjeU\n",
+       "DaPK+vFSsXM5JZP9NLETdtXgPB7Pm4K/91X+WAuHfqpRk/m7L0KRCoIgCIIgaGRduHR97xr7esQ5\n",
+       "j9shdsRjpfACuKsnSwrvDe/M9+bj+3gH7nXg5QyVQUG7UaRaqy/3zahZNNgN/dvqPWO/Ke/es9oY\n",
+       "Z1emvMI6eB2xsWE8XXEdW5HyfdxSmWbLCPsUUn2fsU3tlZciVe+pFlcvU3GVxLbkYIxrY31a4xiH\n",
+       "IrVf6ljQT8x5n9u5tTL1W4hS4+NaG+OGEvW85z1PUlenCkUKULpy7c2p5TnGzg4cKuYqFKkgCIIg\n",
+       "CIJG1oUitehI/aHuhj0OwbO1iIXBO/S6Q3xu7P5whaT1Of56VaK4Xrw5/t+qrDBuKFKeBekZKT7O\n",
+       "rkx5nAD2kvKqW73tlKJIe4h9yylhHqM1lFKZyu5bRlB3GdNFZbKmSM3xUiVhvc71eWd+sqZzXtYU\n",
+       "+s9jmsjuY01mTfKnBGR8+9rCrgleH8rXNOqToZiyxyLt4fOowN7usVn2OnChSAVBEARBEDSyLhSp\n",
+       "ReGZLaWkMhT8rhov9dBDD535O1VoURo8a8s9/KGfH+MFoaBQv+dAAa+P6/esulo8/sHjFogT4dX/\n",
+       "zv+Jj+Fzbk+peIjWGjVklnmtG69Vk9vHztWOobxYYhFzWY3LwEMPPSSpUxbw7Ev3VIOxFACvRQa1\n",
+       "cXe5mCNshzXSa6zNm751rHK4uk+lcdZw5hS27Pbg+7mm+pXfEP+tIjPcFSneR8HilbWetYb2MYc9\n",
+       "U3f79u1rtudAIxSpIAiCIAiCRkKRWgOeK/PcuTazpnT/JRQp9nIrfQ7sXgftJfaE5+e0w5WFlIKF\n",
+       "l4T3gze6Hjz+IcHrx/vzasC1WaSoEeA7trtixHkYP7xDvEa8eL6Hd4mXmNt3jXHmOlIqB3biihL2\n",
+       "xl6KfH/Xrl2SViuYtM/38sv1Y26PzGWPm1gL1OZWhoqPRInBxlLKDH3M2sLYMyZuu/53b6+vXWMr\n",
+       "Qjmo7XffffeNcvxUhi124HvduU0zt3K/KaldMFw9BtZ24m9ZW5jrxD/eddddM59n7Wodt9anPMtO\n",
+       "KFJBEARBEASNhCK1Btw1u3LgUJHcP1d7t903YwRvIRVvUVoHinbPqx7RoiDuAKUnVZtnrOf/rsyk\n",
+       "FELiSHhNKU233XZb1fmPOeYYSZ2agP26N4wKgbdKLBSKJd40e++Bqy733nvvmu3IqSspJQpq9+1a\n",
+       "D2CLKAU+p0888URJXZ+SbcX3UPnoG95nrMjG8orV1A9yqGCNIuJ7xZE5iWrNWsYc4/98Hhvi/8ce\n",
+       "e2yyL+ZB7dOGWjyedceOHVXf77srROr6+E3w3wbPIPa523fOtSpRKGuLzuBPEYpUEARBEARBIwtT\n",
+       "pJ7xjGfs8XjxYH2/JjxS7mLJCEEx4X28rv+fvXcNtqys7n7/y5jXSplKjtGEcIcGmqa5XwSiKIdo\n",
+       "QI0xJ5oYY2leU4oxQcErclHYwdCiIkYUbyXRJCZaWpbXilGCSAKxBWmu3dxBFMuYT+fDqfKDb9U6\n",
+       "H+DH3Ovfe+zneeaca6/dMn5V1KL3mmvO5z7n+M8xxsPfee8NJ554oqTu6Zosw77bPNYT5+H6WFdH\n",
+       "HXWUpM5yx0dkx44dM9db65wqa5UDpQTv16NstnxPP2Ld0u/Uo1Qfxgfn9wzk4AoLVr775vTN5cO4\n",
+       "YLxhfRIhEyl7i/YLuf/++6uOc+UMWtuLfka1oP6oKb4/G+MEnz5XpvBdRC1ZZD4plBzqQtkZw3ff\n",
+       "fbekWF3jePzINm7cKKlTA13tww8NaFPOg38aaxNzAlUThcst+6uuumrF8uG/6bsq4L/G313VRcHy\n",
+       "nGGu0OCDsyhcDXafIY+ULfkforRxPP2wKKIIYNZgjzDftm3bquejXejf1n08GT++Bpciz/1ev95I\n",
+       "RSpJkiRJkqQnk0VEvkwmk+nS0tKaXzdJkiRJkqSVpaUlTafTFZP1pSKVJEmSJEnSk4X5SK2FIsU1\n",
+       "LrroIklxxADRd5s2bZIkXXPNNYOud/nll0vqIhzclwc/hsgHBd8b9/3g/TJ+GS972ctmrjtvuM5a\n",
+       "X+8973mPpPYM3bxPxz+jFPHB9bZs2SKp6zd/f4+PTl/fKsp1zjnnSJLe//73S+r8BvC54t/4weB3\n",
+       "4eODdsE/w/Og4RfxR3/0RzP1HAvPbI7fxAUXXDCX60UsLS2t+djEp4RP5ih9jP8nvjJEqeGbQhQX\n",
+       "Yw2fItYIfFKe97znzVyXPdHwMRoazYTfKL5cZ5xxhiTpn/7pn2b+zpjE14oxGUF9yOhNvib3I43W\n",
+       "Ft9loBTJGeG7A7z97W+XVL43kIOPNcDry5zDJ877wef6pZdeKqmbu54Jnhxx7l/p+3/iH0iOOn5P\n",
+       "/7z1rW+VVJ57HB/t41nLWPeG0i4J+BIyPt/73vdK6tqNiGT8lj3qER84jn/ggQckdeOLtRdfReYv\n",
+       "99qIVKSSJEmSJEl68rjII1XKXUEenX333XfF77H+eHotUdo/i6d/j/CAyLrEKiL3Sy1YHb5vk5cn\n",
+       "2iPQWevstIcccoikzorAasK6xZrA+iVSivbC2qaevoee5wGLsgjz+82bN0vqlCKyCqMEcf4oi7Xv\n",
+       "a+b975nkKSf1iaIaaRe3Kl2RGhu3HheZtTiaU/Pi6quvllSeO/Rp7dz1sYMiBUSPMeb7KlIeIenn\n",
+       "QRHz/FC16jDKU6REOZ5xnbHVup+ojwPO4+WmflH/lXLwlcaZzw3fJcDnMvUsjSdfo/rup0l/s1ef\n",
+       "rx0oNLTDvPM4lXy2vXwoekSRcq/zPQ6Be5/fA2lPIq73228/SfU5FVORSpIkSZIk6cm6UKTc4h6K\n",
+       "586o5X/+538kdVYX78dbc2VEYP0ccMABkjofFt6Xo3iVrAvPk8XTOOfj95wXKyLKXxQpMNFeZ2ut\n",
+       "OKDwUX63Kqgn1jkwnvAj4f047US7+M7oEbQHoGCiRHH+m266adXzeDlLYC0yfhgnlL+0Z55beZyn\n",
+       "rxU7L5hnnq0bdaZm3LnFP2/23ntvSfFeZ/NirKzujF2UFR8TKGl8osoy1kt5lTh/SaWHaC0q7cuI\n",
+       "6ox6jcrsuxP4GsjcwPfG98Ucit87qF+0ttb269i5A1F2UNtRfnzvvRL4G1MP7sWMq6H3DsYT0J6+\n",
+       "9+PQXH30W21OyFSkkiRJkiRJerIuFKmxc1m1WvzgPlBjZ0zmaRnlC+uHp//ap3V/SuZpvGRNRfXB\n",
+       "n8Ctxr4RMmNDRnqsIqwN90/A+vHM5/wbhYr2j96jR9A++LlQDqwXrCXOH9E6rlBIsfaYL4ybyBcr\n",
+       "ggiyRWeVdugvz5rcYsXW7isZRca2gjqM/x5jMlJWWmkdo/DCF75QknTbbbdJ6jKUOz5W/XqotfQB\n",
+       "Y622fkOVs5JSRAQz/n/XX3+9pPp9Mmt3U+iLK3yudPDWw8etZ4QvgULYN+qO8cF1Kbf7a5ZgHrgS\n",
+       "Odb+rf6s4PdOxq/Pa49OZO2OlDbmRbXfcNVRSZIkSZIkyU6sC0WK98U8BbcqVL7XHtbmeoUIlr6R\n",
+       "EH2tPNqX9j744IMldU/1tX4Maw3WQ0k9IIcIVp9H743lgxe1U62vVa1i6uMafxSsx75WHuONdqJ9\n",
+       "x7Iah+J7YM4D6hz5qtRCX5R8hfoSrQ0e3eb8+Z//uSTppJNOkiSdcMIJknbeuw88khhcDaQ8tYpU\n",
+       "3ygvxiblihQpFBMUBOZ8xFrvg+r4mI4U1NZy9r0noNQw9z3KsRUUHu7BHlE9NOqvtEZxj/N25br8\n",
+       "vuRHjW8Y/polUpFKkiRJkiTpycIUqSc84QmPWYO838X64H01T+Ul68ffexKVtF7BCugbXTg08gHr\n",
+       "m3ZdZN6fGrAKSn4M+FLhAzR2zhOsZM7vO6jTr6gTrnIwPmsVU7cSUdSGKkeUl35fL0pUBNGRY/p0\n",
+       "jaW+Rr5H86a0Jl555ZWSOpW05DvGWHXVNspaH0FErOencoWFuROpxIzNH/zgB6teDwXki1/84szv\n",
+       "Ihaxt+x6hjVyLPXX85GhlI3lMxhF5daq2JSjVB6+r40CTkUqSZIkSZKkJwtTpJ70pCft9N7Uc0LU\n",
+       "+rSU8iStN1rrNzb4QaDktUZmrDW1yt1a+NZInTWNMsW4RTFD4cE6xqpxf4FaOG9kRbkvVYnaSJT1\n",
+       "gu8D1xql+Hjkm9/85sxniciPkLcFrBG+K4D75nj+IaLSfF/IsZUhzs91fQ+7ZG1AefS3HmNRUoha\n",
+       "18II1hgU1hKpSCVJkiRJkvRkYYrUz372s50ycPf11XFFBStqvTJUARpaP3zIaPd5W/goN/hptEak\n",
+       "uE/ZUGvWM2jX+sswPokg4t9uhXnGcawjPj0yqhSBhXXt0XX4GKLYsOO5E2VXXu9QL6//GOB3xdhf\n",
+       "LznT1hu//du/Lalbsxg7pahHV7hcVWbtH2vXiOi6iwZF7vEC7T6vXRNK/YraP1SR4t5Ye69JRSpJ\n",
+       "kiRJkqQn6yKP1NhRY2PtQ7VeGRqViFWJEsN7Z7IIs0/VWAzN3eKZrodG41FPdvh+8MEHm35PZApW\n",
+       "NooJ5WI8Y6W7VePZpPFVi5Qi+od93XbbbTdJ3ThAHbjvvvtWPM96j8p0iGpEMWT8jJkDiHPhAzH2\n",
+       "Hmu7GviWeBvzd2j1QWLsRX6Ou9rYbGW9+5+ODZnNGSesRWPlWSuNt7HUdu41jPPi8aNcNUmSJEmS\n",
+       "5HHIulCkoHV/occrQ60cfJZQNLAi8OkZW5FCQcG3C1+pWuUQq5hyAtFnrVFoKHGcr68/BdY0VgvK\n",
+       "E+/no/frKFBQikTBmmOvPa6HckOk1K7i+1SC/vQozDHHJW3KXKBPdrWIxlpKewsypnxORpnNUZhK\n",
+       "EbVr/XYAhRFFYWxFqDUT/q6etwp/Tt/XNIJ2517O2spa26pMuf+or9WUD6WK8ep7/bWycePGmeuX\n",
+       "SEUqSZIkSZKkJwtTpJ74xCc+9vQ61JL2p9a+GcNrQbnBOsNKqT1+KEMjEnha5zy0W+1eca2gKKG8\n",
+       "tO5QjjVEOd3qbAXFM4pyK0F/Y/088MADVb/D6t9nn31m/l5SQaj3PffcI6nb8Z52bbX66YexIpvw\n",
+       "ZcJKHxpVSvk4n/uUjQHnZm7Sp62KFHUfqnywZrGWUS6P8OxLKbdaZLm7gsB5aD+Uh1blYl4wt+64\n",
+       "445Rz/u0pz1NUhfFSEZ75tDY+apQEGln1qxNmzZJku66665Rr8e4Y01jDhPdum3btqrz8DvPIM6a\n",
+       "3apI+bOB/5s1lXYiSpK3LX0VKd+z8cUvfvGqx6cilSRJkiRJ0pOFKVK/8iu/slNGc6ywVh8pfHCw\n",
+       "htyXhmgn9mLDinRFpqQY8TtyZHA81wdXoMbe820oRHfhq+QKwNigdKGEtWb2ditm3lYv1oxHLDEu\n",
+       "sboYP26N8j1WOudB9fDcMlhVvr+TK49Yh4ceeuhMOTmu1sfwyCOPlCT96Ec/ktSNZ1SRWsUQK/ak\n",
+       "k06StLOPWF8o/9i+eiuBmlfaiy4CpWKoIkXbRfmYWiEylb3o+vrqRErWPCIppXYfJIf6RrA2t0L7\n",
+       "eR6tUv+U3lZEMK4OOuggSdJPf/pTSfOLcnzBC14wc73rr79eUrsaTPu7EjQvXznmLf1CO5XYc889\n",
+       "Z37nUbut8yUVqSRJkiRJkp4sTJH6tV/7tZ3y4PCJdVfa+RulCQsbXEngKZ6nTCzwWsu5FAHiO8Bj\n",
+       "DZI1GYUC5cKz/bqVgbJwwAEHSJJ+/OMfS+qUGJSxVlCE+D3txL/H9isArBraca33vypFLHlkxtFH\n",
+       "Hy2pKy/9xXhh/NRav67UeNZf9/VyBZOIMuaL+x9wfqL6aG+ULa83/gooY4xXxgPWGf5CXJfx7zsR\n",
+       "oPTyd+r5rGc9S1I3zvEvob1R7Dgv44L6007uF8G84Ljl88Gjg9x/krbG4qcNKGPkU4Gy4GsLdZ8X\n",
+       "ff1HWRtZS/r6P85LSaA9fS+zkhLFGGJOMgZQJkq7NHheIMZDSelxxZBxMC8FjHufZwofKx+T8+//\n",
+       "/u+Supx6999/v6T2fUuZbwcffLCkbg3x/mKu0z7eD4cccoikndcGXyv97QbfM67oL87raym/p105\n",
+       "jnWhVp1PRSpJkiRJkqQnk0XkuZhMJtOlpaU1v26SJEmSJEkrS0tLmk6nK6YESEUqSZIkSZKkJwvz\n",
+       "kapRpI477jhJXXRRrUe+X6N0Ld7neu6IWngf++Y3v7nqen2zvDpeP97r4tdQeu9PVufayIza9uwL\n",
+       "763xB+A6W7Zsmfne/TboP6IR3ceI9+b4NbiPFj5Ub33rW2eu6/mW+D0+QJz3+OOPl9T5NeCnwft3\n",
+       "+gEfJvwO3vjGN0qSLr74Ykk77/9Wej/P+aLoNvxJ8BM466yzJEnve9/7ZuqFfxA+dO5Ps8cee8xc\n",
+       "76abblrxeu5z97rXvU6SdPnll0vq/EzwU/F8XCWfSGCPRN/H69xzz9Wll14qqRvb+DB5PhuuGfmA\n",
+       "lPIi+VxgbOJvRl1Zu/y8tBHlwheE3/H9XnvtJUl60YteJKkbK0QK04cl36AIfFroO8Yc9briiisk\n",
+       "Dd+LsBSN5+0ZRaCyxvWNsoyuV4J+oL9ao9lK1/O1z3FfP4f25d7y2te+VpL0oQ99SFJ8r+EeyzyI\n",
+       "8lORuw5/XS8n9frYxz42U073OfQ57/3L/OD7KK8b1/vABz4wU/7aCPlSe0fXi0hFKkmSJEmSpCcL\n",
+       "3WsPD3oUAVecsNJalahWnv70p0vqlIaHHnpo5vtSNtnWTOpYrb/3e78nSfrc5z438z3ZebF6PLrQ\n",
+       "82QBCkltefpmjEapwXqOrIDaDNooHVirbiXw+yiCiHpgtfnvff8lV6RclYii/Nwa5Lxbt25dsVxu\n",
+       "baGOEDEGvs9ZbaRIqf8ilcIjkDwflYNCVYpy5Xs/zq1S2hHFrFaJAo5HlVmeq6fUZyhMJd/Q1lxl\n",
+       "tKVH8EbnRXFhrrpSw1iIcpmhEA3NfO4RoL52jLV/Y20+rCiXIO114oknSpKuvvrqUcpHfaPxwBpG\n",
+       "eU4++WRJXZTbUGUMSspIqZ60r9ejNI5RmEo527gnlvKGUY5IeSxFZVJeH/fgUZes+aztkXLqil6t\n",
+       "ElXLQh+kSpvO8qomgsamcfomK7vtttsk7SzDs9ATbh09SLV2Cud59atfPXOdT33qU5IPTw4qAAAg\n",
+       "AElEQVTKi3G0keK8QmMdFu/SIlLbHxwXna90ntrFrHa7gOhBikW+tl7Rg6Q/yPXdKHesRK+ekBN4\n",
+       "4PcQ4RKlB3nOF5WfB6RnPOMZkqQbb7xR0s6pBvi9bwItdQ+H0WamY8+V1rWHG0Z0Y8IoitYWbkiM\n",
+       "VefYY4+V1D0E1yY39Rvx0OSqULtNVvQ97cQaHT1YeLJl+iU6vvRAzdzFmP/yl78sqT2p8Lyh3byf\n",
+       "+Xv0qnrspLcYAEMTtfIKlzU7egBjnpRePXv/8+zAg1nfrWQeK8egXydJkiRJkjyOWZgitdtuuz32\n",
+       "lExSs9ZtFtxa4ilzJQt1NZA33ark1UkpKV1fefkzn/mMpE65qE1+N7Ys2UptyoxaKx2FLTovCofL\n",
+       "7A5O0VHywdpEoJF1gpVU+0oUtYBxjfXk43OszYNrKV0Pqx4llv6h/FESSlQH3zLJYVxEViTbVDzv\n",
+       "ec9b9bpcbyXVJDo3FvpYTst9KVnszPHSXHNljTah7ejDv//7v5ckXXfddU3lHLptVKtTbwnmEnPI\n",
+       "1Vxv17FeTfr5x94aJ4LEqv62pBbG+9A0R7W/H6td6GfWHsahl4N53qoIR24IfUlFKkmSJEmSpCcL\n",
+       "U6R+/vOf77SFxlD6PmX6dgo46fKUywaOteCHgTUQKU28n64NLS45Rtb6IdRublsC5cKtBxSEaCNW\n",
+       "/z1b6HAeLxfn8e0BHJQowuPZOugrX/nKqr9D6YLIiuX6+LShTEXWkCusHNfXyo+s8AjGg2+SHI0f\n",
+       "FF1+h9N7tNWM45uQR3CeaPsMVBN8F0vrA6pHDawN1BX1jL5di42SW2hdy6gXbYwidcwxx0iKFamh\n",
+       "mwVHjK2e49NCOX0u8G/fLLi0TdRY5eq7/RVzjP477bTTJEl/+7d/K0natm2bpC51yS233LLieXyu\n",
+       "w9B6j62acw96/vOfL6m7x6KwRuMmCiRaNKlIJUmSJEmS9GRhitTPfvazx0Iqsbp4SuXpfK18gXha\n",
+       "R2nYc889JXWJB1sVMxSV0u9QNDzRY4nIkq+N4hqqRGHtoQTQfkR5YQVTnxtuuGHm99SXJHdYc1iN\n",
+       "kcJQay0THl8bVl9rbaEIUn98/Kg/45ZyRkn0+lqtrdF9vkEoROMSfyH6h3bhurXjpuTr6Jtmc36U\n",
+       "W/q/lAaCdiY9SQ2MSeY6iQZbUzCsV2jLr33ta5K6+pZSyDC2S6rvvCkpR9QjUvmZc3wyB+bth0i5\n",
+       "+/pQUT7q/cUvflFS1x+85Shd39X12k2ZI1DBGUdjRQrTPmyOvKuTilSSJEmSJElPFqpIARYoVgQW\n",
+       "P8oG+aTG8rB3HnzwQUnd07bnwcHnBt+pUjlak/nV1gslYa0iRiK4PooM/edRaSQWxRpCYUChob/5\n",
+       "fl79Oxb4srHNiFvFbq0xjlxxXOsoPVekojxPjC/K2VcRrrVaUZQoH9Y0yjTXj1QKj4Zsgd+iRBG5\n",
+       "+4vCAw88IEm68sorJZX9yFC5oxx1a0VJdeb7yCcJtZOxxFo87znnSYHH4gtf+ELVcbSLzxH6vaRm\n",
+       "057c66gPazj34NpkwbVs37591PMtilSkkiRJkiRJerLQzOaAJewWPlYF/gz4VI0V5QduQfv5+Z7o\n",
+       "skUrJ4v2Y3AfMPqJ/EtYLSiKbAFDvjAUKf/donL61IJVW6sIctx6iSyBknU+dH6VrFb3HaOdGNf+\n",
+       "e7bUKfnN1ICaiH8eytSic7O1Urv9Et+j8nmkLaBoRP6XQ6mNFC59j8LCRuXsEkF9UMl5qxHllKsl\n",
+       "aq+IsfNW1cKc8i2Dau8VjCfK7xHX8xoXJcaKMJ83qUglSZIkSZL0ZF0oUpHCw9PxUKuiL7w35j37\n",
+       "ov0HYNHlcL8DrAWP8sKnjKy8HhlFvy/KiutL7T5b60WJcmvS96GLjnellnGH9Rr9vqRoeWQV48X9\n",
+       "Y1CiUZEiPFKp5tr4Xa3V/pR9QTlz6MNaf0xUvWgfT+buvHyJxjovqjWf7itVm8usFsbL0DxR84Y5\n",
+       "FPlplmAtZjzxyVuitfbrBL8Ht8z1taSoSE0mk7+fTCY/nUwmty/729JkMnl4Mpnc/Oh/z1/23bmT\n",
+       "yeTeyWRy12QyOXVeBU+SJEmSJFk0NY93n5L0IUn/uOxvU0mXTafTy5YfOJlMNkv6U0mbJe0p6d8n\n",
+       "k8nG6XTayzTHp2ZRkCUW6ybaY42n5rVirfw5UAIiKyAqh+8LFSmOQ3fcHouWzNhSu4+av+fH126t\n",
+       "QI0Aoioj6zqKumMeDLX2yR5OO0TtiUJ1++23r/g9tIwj97NatLobga9L1DatOcXuueeewWUaAjnm\n",
+       "avepLHH33Xev+HfaZSzlgjlSUvRqac0ZOJTaKDvmdIu/4VrAWoS6v17fXhTvCNPp9D8lreQFvJJm\n",
+       "+IeSPjudTn8+nU5/IOk+SccPKmGSJEmSJMk6Zchj+xsmk8mfS/q+pLdMp9P/V9IekrYuO+ZhPaJM\n",
+       "rQkbNmyQ1OVQaQXlgPfBT33qUyV1T8XRnniliIKx36+X9k2KMmq30jdnCH4nRLqUfFxaIfP8WLl/\n",
+       "XFnD+hyqiLp/AlbyWiuYHg1Jf6DkjJWtGDxyyGG+lNq3Noqzdq9KaeccWfggoZL1nTNjz3F8QnzP\n",
+       "OFgv/ncl2MuQMTGWIhWBT89YygWKYKty+Vu/9VuSdq7v2P3GXGatHqqwuv/i0D36ht6L5rU3Yi2l\n",
+       "tQz6Ru19VNL+ko6S9BNJ71/l2HFzFSRJkiRJkqwTeilS0+n0scfsyWTySUlfe/SfP5a097JD93r0\n",
+       "b6vyhCc8QU94whMeU4KwwvDlQCHgqZtoMOfhhx9uqMXOkPeIcvAUXfKJGfp9idb36uvlPTJKR0lZ\n",
+       "KOVq8fq3KBB9oNwokh7ZFSkpboWiSLliudY5i3w88O+xlCiP8qPd1jOo1uQ4o8woU7V4JGMtrHGl\n",
+       "TN6tuySMjSserb/DD27ecxbGXvtYc1rVb8ox7/xLroAOzTxOf/VtR+7Z+Kr1PQ97C47tR4s/LP1S\n",
+       "Urx4FrjmmmtWPa7XHX4ymey+7J9/JAlv0K9KetlkMvlfk8lkf0kHSbrBf79TIRacYDJJkiRJkmQ5\n",
+       "T37yk/XkJz9Zp5xyyqrHFc2oyWTyWUknS3raZDL5kaQLJf3fk8nkKD3y2u5BSX8pSdPpdMdkMvm8\n",
+       "pB2S/o+kv55WpEn2HBX4wPh72kiJgtLTLz5QfPI0j5/EcccdJ6l7msZqLUXIuFWJdbv33o+Ic313\n",
+       "uMaqw3odmuulr3U5b3gPjfXnvkWuxM1bcSspaOw/RSQS/U9uIoZ8ZIW78ub+NVhjnBel1P0XOJ7r\n",
+       "osiSr8t9/oDxtO+++0rqxgPzDgXQ/Tu4LuUnzxNWKPVFmSuB/wzRsffee6+keqWMcUN5a3D/tKHR\n",
+       "bIzFyJcJpYqyYgljPJZ8hlrqNg/o29o1g/L6WI3WrHn5C9ZmJGetYa64Usj3tW8D+H10XVeqWCv6\n",
+       "+k6Nnd+JOUy/tb4NqZ27zH3q7WtlrRIV7cNJ/3v/cb1a8abWT7b4IDWdTv9shT///SrHb5G0perq\n",
+       "SZIkSZIkuzCTsfetq7roZDJdWlpa8+smSZIkSZK0srS0pOl0umKq+HROSpIkSZIk6cnCNq6pUaRK\n",
+       "O5yXfH64xrzVL8r5jne8Y+Z6+NCMHbHC++vzzjtv5nqt4LdRigzCf+D888+XJF1yySWSuvfO1J+I\n",
+       "D96T1+6Yznt43mvj+/PmN79ZkvSJT3xiphwPPfTQzO/x1cE3bfv27TPf8z5+//33lyRt3bpVK0E7\n",
+       "cj3epxNlx3t1fKRuvfXWmd97lKn/nnbG5+mlL32pJOnTn/60pC6rMO0R+WrhF8D4970o3feI9nr7\n",
+       "298+U0/8OfARxF+nNVKM8Uj/MV/PPPPMmeuVKM33EktLS7r44osldW3kbYjPBEq8Z5knDxFwHsrG\n",
+       "GDzjjDMkSZdd9sjmDkQD0Racx9cm91vjvPiVUS7+jo/G2WefLUmP1S/KXcfv3I+uFfrsfe97n6TY\n",
+       "T5S5EGX8pl0Ya4wt1ghfWz70oQ9JivdAPPbYYyV1uydEPmaUC79afJN87aR+fF/KCdgX2pPoL+Yk\n",
+       "/YuPD7kQiRKknRgv3FM4jjWDT9ag17/+9TPXjcAvmLX8jjvumPn+sMMOk9T5C0fjieuwltGe1I9+\n",
+       "9nbGV8x3EWEtYe1kLWX+ev0Y94yz1sz/4Hm4aO/TTz991d+lIpUkSZIkSdKThSlSk8nksac/z7fj\n",
+       "T+ERfZ86xyYqJ1F7RGGxP9Qee+whSXrmM58pSfrCF76w6vk9T0/k1xZlV8aqcGujVnnwSAysTKwY\n",
+       "6k//tUYFch6sFv89ShDWlrPffvvNfLoi9Tu/8zuSOqXnpptumimvg4oR5XuKco8wHkvjcseOHTP/\n",
+       "phxcr5STiOtH5UANoZ+ienikUN+syK7ytObJwromirCUs2U1otxlUXSPK1AO0VvMKc/Sz5hy9bWU\n",
+       "H4q2Yu6gQGBRo5R5n/i+jc6mTZtmvvc96Vqz9pfGcmnvOcZCNCa8HtQvgjUgys+EOk0/efk84pd+\n",
+       "oBw+lseGdmfN8/HIv6Pxg1IXveWI2jnKME6kL/cIznvSSSdJ6vrf7x3g/cD5/J7o1/W3ST4euTdR\n",
+       "btojWqOGRrRH5axVKFORSpIkSZIk6cnCFKnpdPrY0x4WeGum5bH3COuLW1FYqdu2bVvx+D/8wz+U\n",
+       "JH3kIx+RJL3hDW+QJH34wx9e8XjqiTUZWYluHaBIYMX13Q/MlYvIdyd6endFLbKq8XtwKG+U+wNr\n",
+       "HqXJwSrlPCUro5Snin7A1+muu+6SVG/lez/5OB6qtJZyvnB9lFL8AHxctPosYS16TqWSlY//RUnF\n",
+       "qcHHKtemz2r37kI1JtM5ZcefDJhjKFLR2KEcnv3exyJt4BmYoTR3yb9D+Z2S36Krocy5KM9RlP0f\n",
+       "UB5YgzxzPCoklFTyG254JL9zNMZZe0s5B8HbMxqj+BJ9//vfrzpvBOMvyg9Ge1EuH699801Fax7t\n",
+       "xPhkrl977bWSymual4fxw3zDd8vbFZ+o6O2FK4VQmyuy1dctUu9r/XxTkUqSJEmSJOnJwhQpqXua\n",
+       "nXem6r5PqbW4olDKBnvfffdJkj7/+c9L6hSNEiXrwJUMyoUfSN8Intp9xLBCXFnCujjrrLMkdVb9\n",
+       "FVdcIanLxB2BtRT5BURKFkTKYARWdhQRRD+QCfy5z32uJOlzn/tc1fl9vLi1VrI6S9GqKI+R1cv1\n",
+       "mXee9ZdoTvah43sUJ8YvoKIwv7w+tf4mrfuZrQZRP8x9z7hNG0SqNsfxe8Y25wXazM/D74kUxVep\n",
+       "VEd8uShv65wlgtMjOaGkLvr3pb5jDkSKFL5ekWXvfy/dC0pqK2vB2G8rWBPwHWJN/M53vtN0Htop\n",
+       "uhexhtcqp7WU+rF1j8novPQPSpf709JuKFUlmF+16jjzztuX+YT6jtLLWum+hFB770tFKkmSJEmS\n",
+       "pCcLVaTWinkpUeBP5aWn2KuuukpS9x56LEUOaxalwhWHWnhKp92wKkpwPc9PhVVx9dVXS5Je/OIX\n",
+       "zxzv+F57tGfJqqrdX6tESRGiXfCXGOo34VZ7aX8v72eHdiCC6cEHH5z53vcVA6w9+h9/Ho6PVA6s\n",
+       "QKLvhrb/GHgZPCIShYkx6r45RB/RFnyPeowKydzwscnvWuce5WIMRErPWlGacyV/PnxhItXYoyZR\n",
+       "DqI1m7HPmEOdRcEpqfalqMCIO++8U1I3xj3yNsIjjZlzkeLUN3IWWHvXC6jVjCOUPdqldh9axkXp\n",
+       "3hqNR59PnK/0NiSj9pIkSZIkSebM40KRmjd9c47wdD1UkUI5cCUFa9lzx5Tev3sW2taoLT7dyuc9\n",
+       "/M033ywpti48Ugkro2TdowJg5fbtl9bM3rVE7/mJnkNFwb/D60u7RL5PQL/j4+THY52535BHaKGU\n",
+       "uV8Q7YNCxTjhd4y39QSKFG1b8j3iOOrEnPGx4ccB52dM0hclXxT3a/Tzomj0jXAk632tj0oJ32UA\n",
+       "UBzwobrtttuqzlfqF8Yq0Yl81tI3zxCKGipv5D8JzDH6H5hTkdJI/0Z5zxyPiO4b1Tc27oPEvHHf\n",
+       "Kda+Uj/SDrVvRxyPxqv1Paz1tUtFKkmSJEmSpCe7hCJV+3S+XqjNJ+R5d/h3q7XI0z/+Alwfa4Dv\n",
+       "a9+/Y7V59uUSKCjRUzxWOe/JycPk2YfdiqvtdxQd2rPVPwVqrX3KiR9IKSIrakf6Bys2UupoV/Iu\n",
+       "RWA9Ex3p7YfyhNKElcwn/c688wg4V6QAKx3VYxG4hQ4ly5IxQ9vTp7RFNAYjyxaVztuIOchcwBeL\n",
+       "8kV5d2Cov2e0O0BffEyjRr7gBS+Q1I1p2jPKQQelfhqq3nu7liKsgbW19vpcx89LO0SgnNSued5e\n",
+       "Ua69tcYjg51SxvuI1uOHwtueEuuj1ZMkSZIkSXZB1rUitXnzZkmdlcP7eCxjFIC1fkodm5KVVoL3\n",
+       "zig8vEfGOkE5IdtxrV8Bv6+NdMGvI1JUiNBgj0HPagxuxdRmZO+bJ6svtOdQnyoUMKLwSqpDKS8a\n",
+       "7Ydy5VYrfglY2ShR/N33l6N+qBmRDxTlj/ZBg3nuZ9bXR4Q6o8LVZkJvHXP0DXPW+4bvozk01Aem\n",
+       "NuN3K/T5CSecIKlbO2hXVOiha93Yaz1rZWmvNtY2FMRafHyU1lKi7mojkN1nrjT35o3P7b5znHZi\n",
+       "3Mxr3ILvOAC1Cm4qUkmSJEmSJD1Z14oUT/NYukSAeDRStDP1egert2R1lHzEeOonEsQjlIh6I6tr\n",
+       "tK8QeC6O2uyuWNOenwi4HtZWVB/3K1gv7/0drJeovq3Qj1hjkXVcyrUDlMvbD4WP/vIcS8w78k8R\n",
+       "aUaW7sjKRC3x/vPxOw8lCrCIfdf42t0NOJ61huPJN+N9MlQBi8ASr/W3rGVeUV30Kf55qM977733\n",
+       "zKdnxW9l7IzlPudQdKL+4bhaldzLy9yIVFkUu1plycu51qq84/Xp69PGvKOdaqNVWesiRcwjk1lL\n",
+       "icbkWYN+qvUPXp93qCRJkiRJkl2Ada1I8dTIe0reY+6zzz6S4r3XdhVqrYfaCA4/DmvAc5a0+hm0\n",
+       "Rr+VnuJdOSvRN1oThQV1YmxrDWtnqJVM+/JZ8qOo9cnCinOrDKsO9YB+4LqUgwgy1BF8qKIcOlzH\n",
+       "/WCw+uYVdbvcekftxP8LJclzjEXst99+kjpfHzJa05ZjR71FMIc8y/9Q+mZcL8Faffvtt0vqLP9D\n",
+       "DjlEUqfwXXPNNaNeN6JWgWTssFaW5jJqfa0i5XDPon38eoxPFJLWzPbz3rd2rUENx3cs6p9ohwEf\n",
+       "5zw70M746dIffnwqUkmSJEmSJHNmXStS+BAde+yxkjqrZ9OmTZLqs+UOzQZcoq8Pz9gWemR9oTj0\n",
+       "9bdo9WmJlCaUDYiyIpcgWi7a+41xg/VBVNrY1vBYGdA9v9dYe0PS395//J1+QmHC6vOoTqwyFKnI\n",
+       "zyVSe1rHOdGnWOPuQ+h+DsutVFRIV45qLUuUKCKGWWMii7V2TUEhi/ZHdCLFYiitmcBbYayRLwmL\n",
+       "f+gecq3UziHat7ada/svgvHCmsT5GJ+MX/yAUVRrfduiexF+itwLmVvME/ptqGpfqwS2wvkitd7v\n",
+       "OT7eUMW5B3GPOPHEEyXF+6XWzu9UpJIkSZIkSXqyrhUprD9/r88O7FdffXXVebCKeLrn6dz3taqF\n",
+       "p1sseM/E7bgFDWuV/4r6rfX7c6yugw46SJJ0+OGHS+qiByMroESkRAHWFblHShnHF41ndh+LSAny\n",
+       "cYe1F6kVHE8OnVaFqdYfB6URK5F+xldrx44dkla3zpmL+OR4pvBod/iTTjpJknTkkUdK6nxZUN9Q\n",
+       "DNxC9f0TKRtrzMaNGyV1FjAKF8ejDqL2Uf71GqlaCyrid77zHUldBGiJeb89cEoR02ODAhWtxXyP\n",
+       "/+JYUZbMWZ+70Xzoy9hKFPda7tW15/f29ZyI/gyAb59T+9Zh156tSZIkSZIkC2RdK1JYoHwOBauQ\n",
+       "9/dY4K2K1MEHHywpVrR8/6bIquDpGsXMd8bmaXionwRWR22G8hK0Y+RzRX08Ooz375TDo7+wyrEW\n",
+       "+tIa6eKgiKBgok7U9gOqAnvd1UYEoeB5ni3GSeSfgRVPpA/qC34CtWpAidKO94z7vv4wzEeU5tr8\n",
+       "Zcuh7jfccIOknRUHouDwlaDNaUPyH23fvl2SdOutt86c12GuUmeuR5/R9vgDMgfYjxBlijnBWsLc\n",
+       "97akvK1r1lpDvZj7UXm9j0tK1FrVn3HCZ19lyPNB4ZNUejvgmb25PhGx+H8yTvh76e3IvBl71wLq\n",
+       "HSlRtddjPDIfeQZgvkbKnPv1RqQilSRJkiRJ0pPJPLMMhxedTKZLS0trft0kSZIkSZJWlpaWNJ1O\n",
+       "V0zslopUkiRJkiRJTxbmI7Vly5a5R5GherWqX30jR7jOxz/+cUnSEUccIanzh2BPQCJ3UAM9TxZR\n",
+       "iUQaHHXUUZK69+J8/8IXvnDmuvOG62zZskVS1068X478CPDzKPU3uXbwkXrta187c9154+MF/w38\n",
+       "DtxHiXxmHtmFrxftQb3dZ47rXH755TPn5/29Zzl2fwgH/xHKjf8OfgSvfvWrZ647b7w98Tcgoo6o\n",
+       "PNoNfxLqQT3xLWMc4aPH+MN/4qUvfak++MEPSurakDmEfxdtzLWYgz52KSu+Kvh/Uebzzjtvpm6U\n",
+       "2ffPxO/LlX8ifvGZoo6MfepKOc844wxJ0kc+8hFJ3ZjAn44x6BGtlJ928DXtwAMPlNTNPcrzspe9\n",
+       "bKZ+DrtMuN8c5yGjOf6FDzzwwIrnYW6dffbZkqRPfvKTkroM89T/2muvXfH3jAHagesx5slJxnX4\n",
+       "/vTTT5fUrdX0F+ehv/j3vffeK6nrJ9qT69Bv7h+Ln+Sf/MmfSJLe/e53S+rGvPtNEj3KXMdHDxjX\n",
+       "RNJGEbH027ve9a4VrwMedVqC431OnnvuuTPXnTdc56Mf/aikbp65LxXjm/nt45A9IPkdfsrR9SJS\n",
+       "kUqSJEmSJOnJwhSp1faQQqG56aabVvz+tNNOk9Ttw+MZzrFS+oL14tYbVhLRXNHTPNYsShJWDVYG\n",
+       "VjHWH1FmWEEoGChZlAcrb9F5kSgnESi0Q6SU1CqPtMvY2Zz7Qr9G0XIog05r9miUI37HuKO/jznm\n",
+       "GEnSd7/7XUk7t/Pzn/98SV0uFPI9cV7Ug1ZardUIVAUUNuanR30yv1B1OJ72oB/4HeddPt+ZK1ig\n",
+       "rDMeHYYl7Tm1UBM9whTFIVJdUShQplA+XImirFF+GspLn3kUluezQoGKLOlS3h2iFPfff/+Z65eI\n",
+       "IjgZqygn3/ve91Y9DwoOEPH7rGc9a+bvnCfKLE++MId2RD3mrQBwDwEfD56HDHwcMFY9mpB/o0iV\n",
+       "3nIwd6P+ZO4zzko52kpraevcHroW9IW57vVhLYjqGSlRwL166L69qUglSZIkSZL0ZGGK1GpP5pES\n",
+       "BTzlRxmWh2ZX9ZwcWMhu1UZP51g199xzj6ROWUJx4Om4lJuEcmCdYAUuWpHyTNj4RcDQ7MRjZ9td\n",
+       "73h/oryQ84R+Z7wzLk8++WRJ0qGHHipJ+sY3vjFzHsZha14tfPFQZ+6+++6Z87WCMobVGJ0HpRdQ\n",
+       "csH3AVvpfCgHWOoc4yqhl8H9yfieTxSAKMqZMUtbu4p52GGHSeqyt7MGoHigUHEe5pjXmTWIseD+\n",
+       "Za35jrDIGVNDM32TybwWV1epj2firs2O7zB3PHdgLb6LhStxGzZskNQpkr4PJeOmFp8DDsoaKv+i\n",
+       "84mtVQb6kuIUESlRwHweqrSlIpUkSZIkSdKTdZ3ZPGLr1q2S4gzYY+1PRGQM1kytUsLTsysBWA+t\n",
+       "5fOIm/XiQ4RV5L4uRNthfROJgz9GMotH+rgqEvlo4R9Tux9ULSiMRNc997nPldTtjdiqOqDoUk/q\n",
+       "hfrhKg9WOSoR44x/M3/4XK6IMhZbI4JdgXLI0B3B9chg7qBUMCfYfxLliT6kTlzPfYg4Hp8Ovu+r\n",
+       "wrOWoAz1VX764n1PeehH2iXykSnB8Sh7rYpbaVzsvvvuM9dh30zGw9h7z6EcLlqJWi+wb2tp1wUg\n",
+       "ipN5ytueyMeONadEKlJJkiRJkiQ92SUVKYjeaw7dqw14P4+ViDJU8gHiOCJGeKpt3TuM6xNh5EpB\n",
+       "K/hqsVcgUXfuW1OCemAl0g7Um7+T54pcHkSuRJBzpa8VR/2IHPrmN78paTyFcl64VYVfBRFjkU+c\n",
+       "55hx8H9BUa2FvS3pB3KtYL1t3LhRUucDWAJfK+qFykB9I2ufv6PQuT8Q83x5/fgOn5ax1bq+oPTg\n",
+       "s4H/V+S/hqLhUXSuzrnK17dcrGm026JgbWEtoXx9FSnGMGOQyNCxuPPOOyV1SpEroX32i5S6uUvk\n",
+       "LkrXtm3bms7D+Bk6Tlrxe7D7Mx933HGSurWatz/k9XKFiLXQ7w219wrueeQPI9KfPTUjRap2f9pU\n",
+       "pJIkSZIkSXqyMEXql3/5l8OnSaKRnvOc50jqLPYrr7xSUuer4dYUyshYkQRYF1iPWMRYC55FGFy5\n",
+       "8gzZrfBeH2UFJakVnrpRGPpGKvA7Pn2H9H/913+V1L3Hp/+IcIkiKbCW+lrF9NOzn/1sSZ018dWv\n",
+       "frXX+dYK2o1xS3vW+qtE1rorOa3lIVqPSCf6Z/PmzU3nwz8FXy/Gc1Q/+p9xg2rDeGLdoN7LrX7K\n",
+       "jkXMHB66JpR2ma9VTKg7dYoij/28QN1pI9aYvpGy9ClrWmuU2dhQX9ZKV6igtr74fuGnNzb4zfLW\n",
+       "wOm7ly3j47rrrutXMLv+WilR4LnjHObnH/zBH0jq1G4f7xA9K0S5Cx1Ua54dUPYi/1MofQ+pSCVJ\n",
+       "kiRJkvRkYYrUau82eYrFGkFpwDeI99JYtG6VjBUpwVMzvklkwS1ZGZ57hKfsvnsL4iNTyuJaC0/l\n",
+       "Y+HWOhFKfNJ/pRwuWGG12ZUdFCjaGx81fK/IGrwoIlXD/XiwsmrHC2qCR65gLbs13wrlwS+n1m8A\n",
+       "UF1QpqgXVjzz9/jjj5fU+TOQR8tz5rjiu9wHjnN5xuqhUHfP6wS1c9IjXEv49QpFo0oAACAASURB\n",
+       "VFCM6HPf8w0LutU3jDUrUgTWCupBOVDfWUNQLmqVN9qBe0LrGlzaJ5TyscZ4RvLWufKLQimqkLcS\n",
+       "r3nNayR1a/+8FFEUOfw/a/07a0lFKkmSJEmSpCfrMmqPPcz+8z//U1K3hxj5o1CifCfqsfBd6D1q\n",
+       "r6R4YaW4soIS0bqHGVYV1uxa53op4VFxWHGUszWbcF+/AiJzsF7JAoz1sWhFCgXFrdQoYzfjrRR1\n",
+       "GKkcWGG1uVAccuSg7jCuW339sNKj8U75uB7zBiXL1Qfag/pFKtGYUOe1uNZK1wX6gLHEWGFM8T0K\n",
+       "E8oAigprmqt7pb0E1wrK4TnzUOCiPehKcB6/V5TWYto/+r3/28dJyQfu8QpRiA7jM5r7fcHfszWC\n",
+       "uZZUpJIkSZIkSXqyLhUplIwovxFWRCl6ri9YGb7vFdZe6T27Kw5YfSglKF6t0VSct68P0VpB/5DF\n",
+       "ubV/+iqMWIFY21jljJNFQ708x4qPg9b6l1SEvtGZKHyMO3zOWqNGS+VDWUIx9BxCkQpUyjo9Jq2q\n",
+       "6ligPIHvP+hRiaiTqMKRYoLqyxyhTyPfFr6f9z6YzA3fK9HrXQv1I0+Rv00oraWlTOgoJ/STvy3o\n",
+       "6xf7eGVsJQrol7F9JyEVqSRJkiRJkp6sS0WqBBZ2bY6HElEOGN/zi316fP8rByuE/bCwsjwbce1T\n",
+       "N9YO54neL68XsCqwAql3rXLQ1w+F6DLaByVqaOQM0ZeMu74KCNbwWu+T1VfhQ30garWvdV3rd0M/\n",
+       "oWQyv0uK5lrsPbko3yGPvkNJos70CRa353hz3CKnbzmfKypRtGKrn6fD75fvkyjtnHuPOexZ7ftm\n",
+       "OPf6DY3wRvlbLz5muzq1e+YNhbxirVG0EalIJUmSJEmS9GSXVKR4X79WEREoEJ7JOwKrhKddIk14\n",
+       "396qSHhuHHJhkBW2FZQiz9Uydm4N6t2q4PRVPlCkyF+FkjHU6kAVGCsnTKsVjLXbd7z39Wvhd0TS\n",
+       "ME7uvffepvPURh+iFqDY7rPPPpI6ZThSoNfLfno1uLpXisB1xYexQ1uwNvTtY/zS+PQ8UihS9CFz\n",
+       "mZx+tH2rDxnX8RxnKF98z1pLLr1WJYr2/fKXvyypa8/DDjus6TwRqNX333//KOd7vLHvvvvO/Ju3\n",
+       "CX0jt0sQtTfW2yxIRSpJkiRJkqQnu6QiNXbkSG0UHNZMaX8f9rRDQcLC7uvLQfl4r9t3rz3A74Cd\n",
+       "r+cFUV/uvzEvsIq/973vzVx3rCjHsTLmt4IfRq2y4/TNVo0agerBOG4dNyh5nCeqxy233CKpU+AY\n",
+       "7yWFci18pMbCfZtaQZmZl5+dt6XnhKPPHnrooUHXYSy4ynvTTTdJ6hQDrjs0dx4q9djQj0Pbw+nr\n",
+       "C7arMbTdSntgOrV787WSilSSJEmSJElP1oUiNfTpG0t5w4YNksq5P5xapaTWqsF6wscDCxvrjlwu\n",
+       "vFePovdQjp797GdLko466ihJsc9PqR3JA0RUFPsd8VTfN1rOFQaUDOq52267Seqs6XmzqJw/fUG5\n",
+       "83LTX+TQwepCjSgpU8yLzZs39yqXR4eWolUj8LHCGiwpez/60Y8kdeOcf/8iMFRNx8eDuc4nawt7\n",
+       "ltUqOPioRDny+mYSr8Wz7jMHXA3tC2PPox3HgnuN5/saCv2BT9paqeGe+X690zcn49ikIpUkSZIk\n",
+       "SdKTyby841e96GQyXVpaWvPrJkmSJEmStLK0tKTpdLqiw20qUkmSJEmSJD1ZmI/Uu971rvB9Nbk5\n",
+       "8BEij4xDxEf0/hjV633ve9/M8ZEPFb5Ihx9+uCTpu9/9rqQumoi/U258OPBLOP/882euO2+4Tuv1\n",
+       "dt99d0lduUsRRLyHfuc73ylJes973iOp81/gfT7HRe2Lbxb5kPDJ8igtcu286EUvkiRdfPHFksp+\n",
+       "AuTHcr8OyhXlxOHvb37zmyV17el+I5STf7u/C+XHz8N9mPCjwHfsrLPOkiRddNFFknaOkIrAVyk6\n",
+       "jnbAzwEfuLPPPnumfvOCecZ8IO8U84j5Qzswr9ihfdu2bZI6fyDWA9oHXyv+/Wd/9meSHmkXr9t+\n",
+       "++0nqfOhYMx7bjOuxZhmbDI26Dt8SP7iL/5C0s5tWcr4fcopp8xcf+vWrSse5/mmzjnnHEnSlVde\n",
+       "KanzG/NyeWZoxix+iswh2pC+oty0A2266LWMOcVY9jUAP1T+7pnbfVcI5uaFF14oSXr/+98vqcuH\n",
+       "xdzyuRrt0cbY9FxmtDffv+IVr1ixfvg/MpZrM3tH/rCsbRdccMHM9aJI2SOOOEJSt5Z5PizGFWuI\n",
+       "7z8LUf8xriIfNcbboYceKkm67rrrZr5nHvzmb/6mpG7NO/300yU98hyx/Ly0Z6ufNNDvvlcla2dE\n",
+       "KlJJkiRJkiQ9WZgi9au/+qthZABP91iDUVRTbQSGZ8mN+I//+A9JXdZerBOe4rFmsZK4PtbHrkKk\n",
+       "mER4rhe35rG+S1F/5NfyfsN6wJrwfqqNWKmNMMKqwUrDqnVccSrlICllUEft8P3F6IfaSKvScd4O\n",
+       "pYii1qjZ0vHeX3fccYekOP8UCpTDeOEz4oorrpAkveUtb9npOzJ2l6J6or5FEWLMo2wAbUvfRtfZ\n",
+       "a6+9JHVKz+c///lVyxNFa/mYZOxESgZ9RAQm54sUiqH5mobie/+hZHg70A8oKl//+tdXPB9rF2uT\n",
+       "zwVXkjxfFmu7K1KcN8rRxpgt3XNQ51F+SqCSczy7NxDhG+0iEa31vO2J+p23F3fddVdV+QAlDkWK\n",
+       "cvmawbyLdm2gvtwb2L8W6E/6l3t2390guA7zozbvVCpSSZIkSZIkPVmYIlVj/WJlkT8JJcgVotJT\n",
+       "I1ZNbYbrz372s5Kkk046aebvPFVjNZFXaqzM2WtFa26WkpVKu2ClRX1b6nMUnbFypmANQWSdlPJO\n",
+       "uepQC+MC6wwrh/fva0Wp3K25dVqPn3der9WszqH5ZbyurjpiMZfaGEv6G9/4hiTp29/+9qrHR3MA\n",
+       "nx3aFLUeVRe/UsYc5UJ5QOV3v0jqgc/KWPiaUFLBPft91LfM7VJuP87XuqZQTlePgfOVfHFK9yb3\n",
+       "3astl+9RWOtb5fiec54pvFWJYm3ztzrkLIzuJfhPOiiBnM/vtZ51gHHdd94zTiln7ZqfilSSJEmS\n",
+       "JElPFqZItby75GkV64Dor1rPfKyi1n12iCDAWuMpF2uQOrS+hwWUtjvvvFPS8Cy+a0Wk0LiV1Art\n",
+       "2LqXHNBP+KOgAqCYuZWCf4P7STgcV5v1F2sZ9YD39YvKvtuqmDLPPv7xj0vq5sE999wjSbrqqqtW\n",
+       "/T31hnnvF4a1uha4RV1bN+bKl770pVWPYwxHGdBdIWEN5PwoDCgUPuYov1vyXK8283rtvo8oN8yJ\n",
+       "WkUKmDv+d+pdugf0zZOIksKc77vPZWk3gFIErsNaVvLH7Av91PetAO0z9n649LP7S3o5uTf3zTTP\n",
+       "Pah1D79UpJIkSZIkSXqyLvbaqwVri6dE3seWlBzem/a1Tni6Zs8yrCT8EPru5I7SRmRIKTppvYDV\n",
+       "QrtHigv14jisqVJ/eWRULeQieclLXiKpi1S68cYbJUk33HDDzPGoCZEi5f4vpahErBgiaDivW7Nr\n",
+       "HRnVOu6ZZzfffLMk6cADD5Qk/emf/qkk6frrr5cUtwe5m8D3UxubIbszUNaNGzdK6nxCfJ/BiJKa\n",
+       "CSgb+CB5bjz+7lFXPhfcJ8jnUkkdH2snC8YyPlklH5++anVftR/6qsDMZRQP+qfWlwncT9MjXksK\n",
+       "F/e46B7TVymLIAdd3/0t3eeLe1pUftqHdonuDYwDXzuZV1yPt09RXrUSlId7O/eAEqlIJUmSJEmS\n",
+       "9GSXUqQAK+Hoo4+WtHN2YM971FcxcrAijznmGEk7Wxt92VWUKMB6wF8DK8StBXyVNm3aJKlTOr72\n",
+       "ta+tev6+7bpjxw5J0kEHHSSps9ai/EUlsEI9q7LXE9XgyCOPlNRZm5SH9mJcjmU9zpv3vve9M/+m\n",
+       "H0uKIvWGvv4KtUS5c2pgrJx66qmSOt+TWkXK+zTyLWEMRf51tf6b0VqG6oeSUlJPh8J1yDgdKTX4\n",
+       "gRJ9tausdbyFoF6tShS4okaeotpdJVBKWcvwpx1b1WZuH3vssZI6RQpFpzV6j/kQRYcSgY9CGmWO\n",
+       "B+41Pq4Zf6wxnI952errxO9QWqM8YU4qUkmSJEmSJD3ZJRWp0ntvtwo9b09fhYqnYZ7SS3v9/aKC\n",
+       "tVaKWELBw4+ktt37WtNYf//8z//c6/cOETVebrcG8SE67LDDZspBpAnHM14if5GSP8SiabVKwbMR\n",
+       "j02fPG777ruvJOmQQw6R1FngvtdYCSxvFKFoLajtW8/IXOsjxBjDQi/l04n2iOP3JbDwqT+KBpnk\n",
+       "8RPEsmdOD1WkIp+gvrneIjh/a6R3dB5ojWZD1SfDuKu9Y0G7PvTQQ5K68YRy1EoUjYjv0oYNGyR1\n",
+       "PlnMO/djBdZO8qb5dVC6uCex9vJ9pFpTHt6W0N+MWxTEEqlIJUmSJEmS9GRhitSTnvSk3tYDVlZt\n",
+       "Lg2sQayooRY/VhfnXetM1YumNndOX/+Ceft3lPD36nxG78uxdvDtYu8+rDvyLzFu3WeI8YMa8OMf\n",
+       "/3iEWnQMze81lHn3Zx//FSJK2auM/QBLOcIcFKiSKo1ljEUd7QvZmnPLo8CwrEuqfaR0uX9pCaII\n",
+       "UQRcSUORoP6tPitO5F8YtX9rniaHewWKY6s/nueRai0H95qrr75a0vi56Ohv/EG5Hv2HctsK9WRN\n",
+       "pNwokiii5IBj7YuUUp4V3H+W8/p44jjqxbxjfqMMUr97771XUte/qOieEy8iFakkSZIkSZKeLEyR\n",
+       "qvWGX43a99dY/L7zObRGUfF7nroXlbH6F5VF+whhpUU5YJy7775bknTwwQdLkvbee29J3fhgvETj\n",
+       "BIVqXuNo0VGC8+7PPoobGZApW6tvVCsoGvgSRYpUSfFA4fD8RqyFtYoHvje0AxZ6676IjK0onxZv\n",
+       "DfA1GSuPVVSO2r/XgprK3Gz1i+17fZQ7+tWvx+4eRLvRrqVM6g7nxUeQT67fV5HCR472QjVmnDEf\n",
+       "UKBYW/GduvXWW2fOhzLkb7FYo+kn5o/vr4tiyvG+xvu8Q5Gr9e9MRSpJkiRJkqQnC1OkxvCbqPWN\n",
+       "4OnTrSa35mrhaZsIG3xhknEgQmVs8F3iPX3ko+eZ27GqPWLEIUoR64dPsmdzHs+2y7iszZK9qzHv\n",
+       "qFbff64GFKhStJ3TN0s7Y65v1n6/Pha3KyUlHyvPk+MZrMfeF5Hyosy4z0xfaqP0uF5fVRSFD+WE\n",
+       "dq8dL333nOM6kc8eqjc+ftDq4waeNw2Vt3U8oDyyVtLurIWMA/7NdTietR+/UpQi7tFeP/rhwQcf\n",
+       "nPm3q/zUg3s+a63vFAAcX8pvBalIJUmSJEmS9GRhitRkMim+Lx/LeuGpl6dlnkb9vXKtf4H7Rs17\n",
+       "d/vHG+T2aAVfOPqTCKaTTjpJUmdVlrJWo5b63o4lxQhfKSJReL/Oe3rGS20kSFIHOWNWAwsYhQDL\n",
+       "ttWCd18K+tJ9MyIiXyzGPApO5EOFAsOn+4fh++S5zABflKjNWtXDUn4sxjxzjzU4iriu9UGqjfhm\n",
+       "Lej7BsT3xxzDt3c5+FWydjgeXUn7EF3nDFW1UYSive1KcC9k7cPnzucN+bBYG8nnxvzx41FOGUfA\n",
+       "WykfL9H48LdPQ/dyhFSkkiRJkiRJerLQPFKlp12syNbcLm41RD5MNZbsSvh76ccbUa6Psajdcdvx\n",
+       "nC/kJqGcN998c9V5qB/7XN1+++2SOmsNq8Z99J7+9KdL6jKcYyVhRfK7IXvD1eC5hfCHWRTHHXec\n",
+       "JOn73/9+r9+jYkTztcZHyn1VGCt91U+/dl/Fg7F+wgknSIqVCWAMcj0sapQTPqO2Ys3Fn89p3Rex\n",
+       "5HtEu6NclXyGeAvRqowxxzyvUGsUYsRYGdMdf5vB2kW0nO8mcN5550mSvvnNb0pqzzlHJvFI8WQ8\n",
+       "0l+tPn2t/p7ud1oaT37eqB61jBXRnIpUkiRJkiRJTxamSNVYHFh7WBf4rJSYt88SUVgoEn0zR0eW\n",
+       "Nk/pY1lTEbyfxgrCmitZBUQ+0M70jys2nB8/EvcnQdkjy7Tn+IBof60IrCisXyIzaiNoPKKD8mL9\n",
+       "RO/V8ftgvyjGB+2K9TivXDqAEsd88XxYEd7OnGfoXnmHH364JOm2226TtPP4IifOaaedJqlT0PAn\n",
+       "Itpy+/btkqQbb7xRUqeeoBxKO+9n6EoFigxj3lVvj8pjjEf+mvjDeV4qh/PyPaohaxxRfSWfDc5D\n",
+       "3zJWmZP0Xeu+jShznGdsaN/SHK5Va4844ghJ3RrKW4fWCGrWctqTccPYwufM1y6UHfwtb7nlFkk7\n",
+       "16+064VHjTEuiAxHmeK8jLe+mdpLCg5KJ+Om73VKsOZTX8a9/31X8T9ORSpJkiRJkqQnk3lbxyte\n",
+       "dDKZLi0trfl1kyRJkiRJWllaWtJ0Op2s9F0qUkmSJEmSJD1ZmI/Uli1bHnsP2+oD4+C/gE8O/leo\n",
+       "Xp/73Ock7bwvEf4F+CngWxNFQuBv4bks8Am68MILZ64LnoW3FDkB3i68N+aTCI6LLrpIUucrQuQP\n",
+       "/gMcjw/PgQceKKnzD8CnB/8PfFLwB+B3b3vb2yRJl112maTOXwP/DjjmmGMkSXfccYeknf00at9/\n",
+       "0458jpVXrHS9T3ziE5I6XzXan3GBv4fvYE5/Uj/8TfD5wUcLf4BXvepVkqT3v//9M9/TrpFPF9eL\n",
+       "xinX8yy/Z5xxxkw95w3X+ehHPyqpGwfRTgLMQ3ztiCxj3jA/n/a0p818jx/Keeedp0svvVRSN1dp\n",
+       "Qx+DtDHnYozTp/jEkIUe3xHm5AUXXCBJj12P83M+j+wlKo+xQ9nxQWFs4y/m2fdpy3/5l3+ZOb/7\n",
+       "fjFm8bHBd8uz6eP7g28Qn/hKvelNb5IkXXLJJTPnb4V60W74pDE2mQvUb8uWLTP14Hs+yejNHKPd\n",
+       "aS9fUziPRyP+5V/+5cx1542vZev9ep53LYI8Xeecc44k6Yorrpj5nUezMh6Y661+wMzr888/X1J3\n",
+       "72Nt4J5HuciQ7nC8PzMw/5k/+GWW2jEVqSRJkiRJkp4sTJFabiEO3Q8JqwxFxhUSoo6wBrFmfD+j\n",
+       "Ur4qnlpbn6JdkSplxyXvDlFLWPBYWx4JgkLxzGc+U1JnPWJVYM1hJfA90YJYo7QfmcCJOiOPErgV\n",
+       "62zbtm3V+kVKFEpYlHl8XkqUg1LpEVY+PjjOc70wnhknKFVYU57ZHKsN5atkBUZZocHzWzE+Wjng\n",
+       "gAMkxdm4a/H5GFEaV4xjb7/l6wYqKsdGUVMcR5/yb8YYyhDn8T7yMkGUY44+pS1c4eG6paz7RKVF\n",
+       "eY0Yc6U1KsqM7WN8aNSWz9nSGs/3kVrPWkg0G/3mewhSD45fhC/weqA1ehNqI5w9hxvXidZ4FCDu\n",
+       "NVu3bp35nrkd5WXzaH/mI2sG46A0j6LoWMZ7a6RyKlJJkiRJkiQ9WZgitZy+SpTjViagzGDF8RTr\n",
+       "T728F3V/gqG4NYRvC+/78aXZtGnTzPUjXxJ/2ucpnf2IsDaxRryefO9WKdfl6Z4dtYdmj631gWvN\n",
+       "0jsv6A+sGxQmFKhSFmu3wrHCqL+Pd85fmzOlNcsy12+FcYtC6vPIrcl5g++U58Vi3ko7t6H3Bb5R\n",
+       "rBWufjGXPK/QUPAXHEppDo3tR0h70a7+OZTWfSdREvBdYWwzVvHxGjsTOWOfz7HvESWoV2tOxbEy\n",
+       "d0d4O/j+pA7zy99y4DfMPfA73/nOmMXcidJuCa2sukpMJpO9J5PJNZPJZPtkMrljMpmc+ejff2My\n",
+       "mVw1mUzumUwm35pMJv/Xst+cO5lM7p1MJndNJpNTRyllkiRJkiTJOqSkSP1c0pum0+ktk8nkVyXd\n",
+       "NJlMrpL0F5Kumk6n751MJm+XdI6kcyaTyWZJfypps6Q9Jf37ZDLZOJ1OBz0WY2X9/u//viTpK1/5\n",
+       "yorHocg4/r6U9+i+0/S8rIxo3yHKi4KGUuE+N44rGjzlo6SQkRpFqjbbL1Ym5yeywfdDaqVWkeL6\n",
+       "+D8sChQO+g0fKaxz6oFiVdpXiu+xKr0d5rWPF/Tdsf6BBx6Y+TzyyCMlxZEwEbVRqhFEwpEhHTUC\n",
+       "lvtBeUQocwK/MY+Oow99juAjwXlc+VgUzG3a0scSCg/lPfjggyV1CkDrPo+0F36ezAEioPvCmBiq\n",
+       "+Hl/1PrjtfLyl79ckvTKV75SkvSP//iPkqRPfvKTc7me07rfLKyVXynUzhP3wcLvs1Xl5nqltZhx\n",
+       "xj2tpESxbtQqf6uO4ul0+t/T6fSWR////5N0px55QHqRpH949LB/kPT/PPr/fyjps9Pp9OfT6fQH\n",
+       "ku6TdHxVSZIkSZIkSXYxqn2kJpPJfpKOlvQ9SbtNp1Me/X8qabdH/38PScsfKR/WIw9eg/irv/or\n",
+       "SdJLXvISSdK3vvUtSe3WFfA0WvId4SmX41wJqo2IKOVNKkUrOW7FYW2zbxPKGu+Ba0EZ4ame+nu5\n",
+       "qTdP7SUrsNUqipSLeeeRAiI3+KT/PWdPa0QTOUpq974bC9+7sC+33nrrqt8zzvEjgVY/GAf1gvHt\n",
+       "8201pcujfBjbKCzs2cbY51yRZVvaO23ecP1I3fU1hBxxfWGMk5eHdnMlgD5njpRUWo+gdrge/TKW\n",
+       "Eogq3ArRaccee6ykbiz+wz88oifU7B27nmGO+luaVhifrWsj44Z+r4Xysia4Lxn94tF9JSL/5Igq\n",
+       "XfXR13pflHTWdDqdmSHTR0q82ih/fMadJkmSJEmyy3PNNdes+n3RNJ5MJr+sRx6i/mk6nX750T//\n",
+       "dDKZ/PZ0Ov3vyWSyuySci34sae9lP9/r0b+tiluw/v4UnyaUF3xY3K8BK8mtHFeOaiNOeJrl92QD\n",
+       "JkcL71s9b4/TN2qq9Xzu11Eql0P7YC1ifUZKnGcr9n7Diqu1ArwcTq0SVeuTRRSY59yh/lwP62ao\n",
+       "LxNKlI93xivlHdsPp6+PVAnvd8ofKZh9wSeKdsFPB5aPC792NPbwybjxxhsl1c+VsSKM+8IaRGb0\n",
+       "ecMa4EqFKwGsla1jN/KRoo+H5rEai8985jOSpNtuu02S9N3vflfSeEoUyl7pfLUZx1s5+eSTJXW5\n",
+       "4/7t3/5NUqe8lcoDjJe+/XbYYYdJ6uZla3Qoa3SpHfH7rc0zdsopp+jaa68Nvy9F7U0kXSlpx3Q6\n",
+       "/btlX31V0v9+9P//t6QvL/v7yyaTyf+aTCb7SzpI0jBtOUmSJEmSZJ1SUqSeKekVkm6bTCY3P/q3\n",
+       "cyVdIunzk8nk1ZJ+IOmlkjSdTndMJpPPS9oh6f9I+utphYnC03XkS8FeeVim+Oa4IoUV48oFT599\n",
+       "o4Y4nysxKEO+j1P0+1pQLrDwXVmJntJRgIbuXYhCw/lc0fD9ryJoHyKkoqzPTl9FBl8az9weEWWr\n",
+       "pr70K/UsWVmewd6h39xHir+Xft+XoRFWDn4IPg7pN7eWh6o4Ph76+m+sREmJWiu/vFp8n8954z4r\n",
+       "qPDuF9k6Z5lL0Vhv9VGppe/cor5XXXXVmMV5jFpli6jNsRQplDDWPHIHltZq7g1+z+4bXQisVX3z\n",
+       "lJXmKc8OlLt1l5KIVR+kptPpdYpVq+cGv9kiacvAciVJkiRJkqx71kVmc4gyRpNXabfdHgkOjHwu\n",
+       "ot/7e1wUG6yokjWFYoGixXWwjMfK8gs8Vdf6tqBk4EuGFVnKwB3hiklfHy/fw3BeigvQDyhThx56\n",
+       "qKROdfD8XJH15BFHKJ0lpa9UL6zwyPqcV7uUxkHrflxEg9bmJ2v11QPGcYu/RW1das+NBcvYGsuC\n",
+       "7Uut8kPurdK+jCWYC6wB7Ic5lNq1d2wW7eM2lLHVZfqVcU3Ed+mehu/h2App7ZoS4YoUazYKFOvD\n",
+       "mKq2lHvtJUmSJEmS9GZdKVIlUAawEiM8EiTaMRpKO06Dfz+2EuXUnh8fMD5RsminVn8DrFkihMbK\n",
+       "FlxSXLCOUB6dKLKF/mZ8sIchESDsKVjKGA++tx4+QZ7ttjViB2tpXv4fESVlE1WgVs2p9XWDvtmr\n",
+       "I7UIP5GVdmivVXFr1T/m4Fh77g2lttz49XmmaVRrFIiSIsT18I1ibm7YsEFSl/W+FfcDTeroq1rT\n",
+       "f+7XylxnTePtDdF7KFSRstkakb3WML5Yq1kfhvpyOetjdUiSJEmSJNkF2aUUKZ6KS+9lW/cyQ+mo\n",
+       "zeWx3sBaxu8D6wNrlPqUouyA/EpYndddd914hV0FrOPIpwbFBJWA+mJF8T31xafN91oswfmw2nmf\n",
+       "jkKCNc04rPWb8X3Q1goibCJor9oM6K3zYyy/FKI/jzvuOEldLp/lypT7PpT2/ipF5dG3a91nEbX+\n",
+       "Zp4pmn97HqhaWDvw0Rm6RtKXtWvSWJQirH/RQE1n7Ynam3xRGzdunPl7pPCW3h6sFbUqOvcM3jKM\n",
+       "nZ8sFakkSZIkSZKe7FKKFHmkWq2KUs6Nsd+XOvNWujg//g9ch/fXrYoA5yOnSN/8W32J2gkfJY9q\n",
+       "pP9QJkrZeEugUuCLQ5Qa6gTjD+WPdi7lMMEaOvDAAweVr5Vav6F5jU9XhPvmOXv6058uqRufNeoM\n",
+       "qur9998/83f6uFaZqT2uT6RhC6yBJfAXdJWxNeoQ5Yg5xhimD1sjcVFH2Z1i6D6MrezqUXuteNQp\n",
+       "48LVe/x/ydzuUW4RQ6NCh1Lbn7V7QDq1vpGpSCVJkiRJkvRkl1KkeL/fmgNird/DO1iFWLVERNx9\n",
+       "9929zocSAjxl//jHj2xrONRKYB8vzuPKAU/pfLoS4zln8KugHSg/0WscX7JqsZ44bmjepWgvQKw3\n",
+       "rGZ8plCmsOrcF62UA4XrDN17rpVa5SfKDI+iFUWR4lOHguftwDghmvJHP/pRddmlzheNdrv99tsl\n",
+       "1dUr2pOOOg1VjtwHa957w9VG8qIw0HZ9fbxQefGFoY8ZG61zkOOJpGVOrRXzjrTuS8mXLwIfKNrR\n",
+       "I2rpNyKWS/dQ5gVr40qRscvLWdo9IgJfLM4/ViSz+82W/G0jRQtFjkj4jJb9hAAAIABJREFUEqlI\n",
+       "JUmSJEmS9GSXUqQct7J4Km99uh26N10JrEMUDKxEFA9yddTiebTwe4jyPfn7bsqDnwj/xgrBiudp\n",
+       "3n3SsEawTrHyyD9FOYiW853i+eR3tDuKSOSrw3k2bdo0U65aJY76oZRRfhQOIDdOZB3Sbx6953m3\n",
+       "onq4InPwwQfP/I5+wreP9sL6ZNxzfRQ/rKcjjjhCUufbVvIj4Dy0p5e7ZMVzfvrBowSxPvfff/+Z\n",
+       "40v5ychM73tromashPsjRmWnD7FgKTuWeu1awNwq+WG6ZTvvDOnUO1IMave/pB1dwUBVp0+3bdsm\n",
+       "qVvLornDGs2n7zpRS18FZ9F4pm1X+pizkZIKrDXMXeacvx1gXDOeUZxoPz/e1x5+x9rDWsW48ntt\n",
+       "lK/Kueeee1b9vhbKy5rO9bl3uSLF2wTWbBRS+oP6subUvs1KRSpJkiRJkqQnk0U80U8mk+nS0tKa\n",
+       "XzdJkiRJkqSVpaUlTafTFZ0NU5FKkiRJkiTpycJ8pC677LJwnx7e/+K/0DcvEKoXn7w/xX+A9/77\n",
+       "7befpM5fIPIp4f0274vxKcEv4IILLpi53rzx+kXU7iU41vXGgutcfPHFkjq/DN6Lk/PEfXuOOeYY\n",
+       "SZ3PFOOMdsCXC/8BfvfGN75x5nqotXvuuaekLjoS/wHer3t2X/f94fe8d2d8vfzlL5ck/d3f/Z2k\n",
+       "nX2siGgp+YPwPXnEqC/+AJSb8fnBD35Q0s7+DfiocR38eNxnyv0g8LvAX4N/n3322ZKkj3zkI5Ji\n",
+       "X0Daj/L0zXt2/vnn6/LLL5fU1Rmfh5KfVykyEZj7b3rTmyTtPBeGzjXWGPqCerzlLW+ZuR59Tdsx\n",
+       "VqIoK3xCPI8OYw6fJXx0Xv/6189cD5hr9FHtGw3GIlGN1JOx+453vEOS9OEPf3imXu7D5jnt9t13\n",
+       "X0lde+CrRX8++9nPltStGZznj//4j1esHz5bzFnuCUMjv7nOli1bJHXt4fciIltpZ/wpaS98gdxv\n",
+       "0H3i3v72t0vq5h7lZ1y6fy7XpT/dD5Hr0S533HGHpM7378wzz5ypZyu0e8nXkHZgLbvoooskdT5m\n",
+       "tGt0L8e/Ft8pX2O5N7jPGvULy7Xqt0mSJEmSJEnIwhSp1Sw2vosiZ7BCSnl7HJ5OPdtvbXSTW7dE\n",
+       "Sqx15u9WvK3ZqwxrpxQ91ZfWrMcRRJZgjXgeLc6PIoKC6RFLKCwoH1iBnr3WowlpvyjSqtR+WMGR\n",
+       "wkM5sb5dVeDvUe4XrEjqgfVNu/k88nZhHDMvSnmesG6x4mgvPr1/SlGpZM3um3V6eXtSttYs7bX5\n",
+       "hUoW89DM2fSF7w3mMBbo4yhSGQUt2r0B5YG5WsrR13cu+3mj9maser6jqPzcA/xewPlRHlgbSlFm\n",
+       "HIdiQ33HykXIHEWZ8zlUitbbsWOHpE4hKkWoR3PP/86cp/4O1/HvfTzUKktO7fG+lqGk0p4eIe5Q\n",
+       "T78ev2Pt5Tq15UpFKkmSJEmSpCcLU6RW25cMy533+g7+A30VKX9abc1GjA/KUJ+jI488UlJnZcxr\n",
+       "rzOnNo8PuMJQC/4ibrXU7tgNWHEoKZFSibVU2k8JK5Bx5L56fv7WTPoR+H2Q98mJlE3Ke+yxx0ra\n",
+       "eQdz2pnsxb4PmuO5j5iLrXtOun8M1iEKGuC3ELXj0GzgyxXFee+lFlnsUNpvsURtpmjmFGuQX5e1\n",
+       "84QTTpDU5UaL/E2ZA5EF7tn4h7ZzNCYYO9SPsVGrmDnuE+RjEx8fFCfmForNajnL+kB9KAeKIfWt\n",
+       "9TmrzZWIQoQPGep4lEcqGr+o1ZwP5cbbkzWO8TZvGLeUw1V5H6e1ChPjpvaenIpUkiRJkiRJT9Z1\n",
+       "ZnPfYwurpJQVtbTbfWtW3CgDOR7+fX2kyAp89NFHS5I+/elP9zpPK0S71dJ3bzi3mk455RRJXf9h\n",
+       "HZWgP4nCcwUDKwTfL46/9tprVzwfv/eImYih/hEoeqgZkZWDVeX188z4Rx11lKQuMuUb3/jGzPHu\n",
+       "Z4L1D8wrrLOhyirQnj4fKEdUPyeKMItgHvahdVeDvnvDsYZ49FSJSFWkrSMfGPY/ZC6UfJtQIqK1\n",
+       "E8s+2rOPaC7U1lJ7Rn5/vI2IxgjtQYZt2mH79u0rHo8C58oFcB3qzRxjXHB8q89PBFFjrKl8MkdR\n",
+       "eqLrRfcudg/weyP1o19or9Y1nePdt83XxigSH2oz6tfCuKUcQ9fqkj9qRCpSSZIkSZIkPVmXihRW\n",
+       "wYYNGyR11gI+UaWnWd/ry2nN5h5ZfUMteRSbAw88cNB55s1Y+4KRm+POO+9s+h39iZXgfiRYv+TE\n",
+       "YY82rFFXvrBisG5L+31xfay7Vv8QL3dkrZf8a1DYbrjhhpnzRqCAufXpv0PRKe1VWKKkINHOJUWq\n",
+       "dV617lW5nNb9NUs+UhF9y1i7z6HD2CcKrDRm6ZPSbvfR2lka207k4+P7grrCgbJWq6rj24Mi6PcG\n",
+       "7xcUR9qBcvZVpDxCnPOg2rqiEkG5Ih+66K1I5FuGslUbWR3lR/PfRedh7p966qmSpFtuuWXmsxZX\n",
+       "FMfG16ZIgXVSkUqSJEmSJOnJwhSpX//1Xw+jhLBqeFrnKRzrquRr4dZOa5RYLaX3wSVQGFqjD3c1\n",
+       "sGZ++MMfSmq3KrCCSkrF17/+dUmdP0DJByuyMrEi8WXyHdNbQVUo/b424qv2/T3nc6XIrd9aX6Ra\n",
+       "H6eIWiW4NqfTIhgalTfv67nPV+2ax5giiqyVsXyIUDR4KzF0jfU57AqR49Ff5Czsm2vPc9SVcrRF\n",
+       "lKI5o3Zi/Pjcc+WItQn/RD8fvlslH8FobeAeTHuiULUqUn3X4FZQniMfRScVqSRJkiRJkp4sTJFa\n",
+       "zTrFiuLp1q0BnhJrLeO1ys/Ul1I22xKtUYhjUbou1g25RbBy8D+g/0pRj7U+Mxy3devWquMjXJFC\n",
+       "IaE+fbM70061Vs5YuN8KVjL1qq3P0HxPtTmSYFHjejXWs1omtft8OX19wMYCZQvFwucic5C5Tn+U\n",
+       "fH0efPBBSV2kdAS/x18Q/8G+amzftYIoSJQ5yh8pjCg+rrjVKprMsZICSHQhc9kVt+hey1sifPfG\n",
+       "ypA/L1pzJ6YilSRJkiRJ0pOFmR+rPZHyFM37ZJ5+eX/faqHytN73KbiUmXnekQQl5m2xu9XRel2O\n",
+       "Q4kio3irQlECa3zo3n7+Ht5zzfDZqk5gNS96vMxboY1yEbX6N0T+GoukVh2dl19mCd8rrjVfz6LH\n",
+       "JtDOjCVXh93vsJQ7kDHk+1g6vjdflLuuL7VrB3nA8CliDY1yKDLOfDeQoQql42+HWpWbe++9V1Ln\n",
+       "azVv1bnvPOT40riCVKSSJEmSJEl6si4VKcA6AJ62eVrkHLU5MErHkWPELeAoFwfn7QtP4+RXGpIP\n",
+       "Zx7wNN7Xp4dosM2bN0uSDjjgAEld/w3NWzQvXIH0DOh9rTz8F9bKz4bxPFShayVSMFvrvZ6UKIjq\n",
+       "5gxVolz9rAU1HwWllH/Iqd1tYN54PcAVwdrM7VBSY5nb0b1gKLVrgO+hiEJFxG2kNPo9E8ZSSH3t\n",
+       "6+szhvJJ7r++0Ywl+taX9i/lGIRUpJIkSZIkSXqyLjObOzy9+o7ZtbvV1+6UHVkfvKd2q46n8b4K\n",
+       "Be+FsX5QpFr3/5oXWE1DM7hjRVEf2nm9KA6uMnjWX6waPlvf5z/lKU+R1PVz1J5cD6s1ynaM/0Yp\n",
+       "IgdF0Mvr1mmUtbgv+Of8IuI5t8ZuO2DuRap3ybeE6LfSLg/OWkVFYel7/inGvud9oj18TWQu1Coj\n",
+       "pbmLgofvD9evvYeUqC0n48wzzh900EGS6n3fGD+o7JFiVYLxxl55KJetig/1QhmsVXhL+HgZ6tPG\n",
+       "GpmZzZMkSZIkSebMLqFI8XTJnm+t78VrYWdurBEUsBNPPFGSdOutt84cP5ZihDVLNBtKQ60PkStY\n",
+       "KCBDrSiexodG8jzwwAMz5xvLChkL70esN6wbrBMUw1r14alPfaqkTmmi/lGWZMZzaVy15oZx/PzU\n",
+       "x3PQRP4kqBye7w3mrTTWlnMtGFuJcqKs8yVlBb9L1hT6ZOia6XmdgDWCtbmU6TzqMxQUFKgoMze0\n",
+       "Kg++pjI3+aTdSnvf9cWj9lgjXClC+TnkkEMkdf3G8bX7bXIvoB3pJ+5x1DNqR9Ys8m+hNqNItSqe\n",
+       "Dz/8sKSun4fu6sF4ZE1DUSQ6k3amX+ln2iPKYehvIUqsrztakiRJkiTJLsQuoUhB7Q7jZKNt5cAD\n",
+       "D5TUWXFkl523zwfvu/vmJ/L3uGO9z8cKHOo3we/vuOOOwWVaC1CkPLsxPnklFQK/Daw+fsdnaW+7\n",
+       "eSsskaKFdRfVD+sTH8Uo0sateSKOaE/qx7zyaNVSFmky5WOll3IDjQERwygFrfA75jptTSTrXXfd\n",
+       "NbSIM9CmrGVY5CgCfYkUqQ0bNkjq+o63B9GuDZGlX+v3OhbMBcaS7/1WG7VVC+o2a2Lks4SKv3Hj\n",
+       "RkndnCTDee2aHPnS0X8oVnyPQsO9iPoz/l3BaY3opt8ZH61QXvBxGClMPDuwdoytJKcilSRJkiRJ\n",
+       "0pOFKVJPecpTHrPYfUdo37Wep0x8a/w9L0/Tvj9TK/hAYUVx/rGtkoi++YV4f+3ZeyO/ApQFlBcU\n",
+       "LLcS+b7kh0A7odxw/ZKfhGf6xmr233mUZmQVl8DKQgnhOu7TgxXnPl20B+eh3rzvpx4oUrQn1k9r\n",
+       "hEspo36JaCeAKKdMyUqr9bPB6oOf/OQnKx4XWeO+87pfr0VVoe/22WcfSV2da6Oe6Gv6uK/azRxl\n",
+       "7qHa4dNRS20+INYS6jmWyuljhD464ogjZr4/+OCDJXXtj8JSC/XkE1p9l7i3MBeiNY4x536K5Dni\n",
+       "uLvvvltS/zlZ+zvW0P/6r/+a+V2rkoMKjKLEWs4na8Pee+8tqVvTuB5KDvdk34vR12oUUNZCvqc9\n",
+       "KUdt5DPji9+3ZlIHxn9p7aB8PHtk1F6SJEmSJMmcmSxiZ/XJZDJdWlpa8+smSZIkSZK0srS0pOl0\n",
+       "uqJElYpUkiRJkiRJTxbmIzVPRYr3queee64k6ZJLLpHU+Tr1ze6KnwTvl/HV4b06deITXxHet/J+\n",
+       "mffPJb8F94fAXwO/ije84Q0z1wOipCLflFqIYuT9+Kte9SpJ0rvf/W5JO++b5Du0Q5SpnffP9Av+\n",
+       "D7wHf+tb3yqpfax4HqiovWlfjnvb2942c73aKMra/czIToxP2oUXXiipG5/ui8Y4xq8GfwNUZOpD\n",
+       "/1Bejwyi/S+44AJJ0qWXXiqp6zf8KPgd9fV6Ux6uX/LToR0/+MEPSupy4NDPtEPkcxW1P/3LJ/nf\n",
+       "Tj31VF188cWSumg4Im/Ju+MRk5TJ8xZ5NBJtSJu/8Y1vlCRt2bJFUjeWvS6er8ep9XnyteWoo46a\n",
+       "KSfRZn59/AEpB9F81Nd3i6B+p59+uiTpC1/4gqTOT5UxWvJ/BM7P2omfI9fHp+Z1r3udJOmjH/2o\n",
+       "pJ1zreHDQ395dB1jAR821njaFd8X+unMM8+UtPPaQj9zXGnuR358/j33oosuukhSfQ7CUgZ4v6fQ\n",
+       "33/9138tqVuriXZzv1TWLG9v/FWjiFj6AZ8t1rIrrrhi5u8cR30ZP76PqeNzn/6lX84++2xJ0t/8\n",
+       "zd/M1DvK0eflbn0GKN2DUpFKkiRJkiTpyS6VR6oWtw54im19Cj3uuOMkdZEvWO48XfO0HO3QjnUT\n",
+       "RUOh+Nx3330rfu9RcFiLHrnh0WRYX0MVKaxzV1q8fUtRfZH1hbLhvx+aVRhrp5QpPMpei/VeG63Y\n",
+       "Wl73S4yu43mSsBJRVTwfFOPRI4M8+zDl9eNKqkjfrNhE+7XmN4usVcrNJ7l1pE4FRukgysqVKOYe\n",
+       "UDbfs4trEHX3wx/+cOZ39CVzztuINYByucXcGsmJcnT44YdL6sbA7bffvuLxKEGe88tzeaHgefnI\n",
+       "YF1SWyNoj2iM+1yIcpyhpEVjiH6KcpvVjr1apYi8WZQrmhvMWWBNRxmKyguR8kf/ucruOdlcgeJe\n",
+       "RDtHUWml6EDGtUfxuZJEu7dGjfrcj6IdGT+l3HzQ+gwQ7XXppCKVJEmSJEnSk4UpUk984hOr9wxz\n",
+       "Iusueq8bPc2W9qS7+eabJcWWcan8UZZViJQowCrwp3m34lx5qc2RU4KcG+4vwlM61nSpHUp+IGR9\n",
+       "pr1arfSIvnsh1ipMrUpUZOVhpUZ7K+Lzdthhh0nqrMGtW7dKirNBY21iPTuMo74Z9SPcSsXPo7QH\n",
+       "H+VFbUFpaskEjwV/3XXXSYrHJmOba7r/F5YrChBj3vu8lLsOZYw+5PyuHNTiPjwQ1TPKLA7sdcZc\n",
+       "8T6qtfQjSlHhnheIdqb9UVJoX46nHVr3dYzeHtRCeVB5S/mxfE7RT6Vch5FfqUN7RW89uL6/zeD8\n",
+       "0VpbWgv4nd+bGO+Um3LNe7eGee15WXveVKSSJEmSJEl6sjBF6pd+6Zd6K1KRBd6651bJB2aohd43\n",
+       "RxfWZu3O5vhneHbeodA/WOWA9V7K0ks0G9ZPpMhgnWEttu7fBCWfs4ihViqgcGLF1PZDSZHC163k\n",
+       "80ZUIO2H71xktdOvjLfI927oXotYpyXli/mCvw7WNspbxPLs4CgGjF36gjJEfnlY2L6vImMXBcst\n",
+       "+JLvEG2KKjvU/w+oB+fvq74CfexrlmeoHhsfW96enokbX7VjjjlGkrR9+3ZJZeWNMe4+L6yZtJ+3\n",
+       "oytDlMOjBh0/L9C++J5F1PZnaW4y1yiP+/71heg3vxfzloh+5N7EfKy9p40N0bsonYyDvnv+OalI\n",
+       "JUmSJEmS9GRhilSLaoIli8WPdTfUF2joU/nYnHbaaZI636xaPwrag/bBShhq/eL/4YoN77s9WtD9\n",
+       "KShHSZnjfIyJ2kgJByvP/VZKqoErJFjhtGtJ0YHayCDOD/fcc0/V7yLod/qbcqNwRf4JUXmJ5Dr5\n",
+       "5JMlSTfeeKOkstUPWPFAf0Z5xpwvfelLVdeB5co2ljJtggVayi9Dmfw4xjQqn/u2lCI3PdJ3qC8H\n",
+       "ihjnRaXsq+5DpNR5DrOIWp8ex9fwkk8WUZMoCe6PF0H9/Hj6Naqf1wf1E7/OHTt2rHo9V5lZM1pV\n",
+       "a8fzc0XKCv1JPcbyVYrWDq7HOKW+HkU7dLz25fjjj5fUzddUpJIkSZIkSRbMLpFHCmuRp3u3eMcG\n",
+       "i94zSM8bnuKxAiNFyhUirDism7EiGLAuPEKI9i9FztT6iHlm7r7+GGMrjKV+R3HzXDEl3Orv69+C\n",
+       "P8/mzZslSffee6+krh1oR5TDVrDWUPhqcT8i6juW756zXA1izKHU1KqEkaLC2Iyy46NMoJTU5kty\n",
+       "6COuF409Lw/l7asIAeVm7YNaP9Fav8mhoC73zWvlimLrmGQu0N74ALmSRrv59VCSht7D6K/99ttP\n",
+       "0s651RzGH2v50Ejd6B7jUbDUn/HMdfsqUrUKpMPbipJvWl9SkUqSJEmSJOnJLqFIOUMjVEqMlceo\n",
+       "FXK6lHyEPFM1VgDWJO0z1EcKq8mj6LBuxuoHt+L7RjuORWtkydCoP/ezweoqWW2MA46j/z26r1Ul\n",
+       "QPnFd2uoH0Gr1ct4rh0Hy4+rzcRNLjqUIOZc5PMStSF5hEp9xZiKIiE9uipSpPgeFZQ5iDISRTSX\n",
+       "QL0k3xXUriHrzd/Uod1q13aUIxQrj7ZD6Ywyg4P3I2spanIps3kJV52jfsBXiXFKO5Tmtvs3uj+r\n",
+       "K2ucn+85P+NoqCo977dRfVmfpUqSJEmSJNkF2CUVqbEgAqPv+/axQUnYuHHjqse5lePvoceKiMCK\n",
+       "c8UF62Is5Yhyr1drIwKrcq+99pLURYi1+tShSHm+pVIEE+MFqzRSI1oVIcpRspYj/xCn1QptHVfL\n",
+       "/aBq5zJ+fah41AElwlXJSHWsnWvMpaitsPyJbrrrrrtWPI41i5xh+Krwu76KVFS/eWWMHkqtagvR\n",
+       "vqGRqo7ay/GuZKFERf6cKEQ+juifsdZo5ihzkWhVQImiPKw1XJ/xHkXA87toTfP243zML8b7WPmj\n",
+       "5v02qi+71p0rSZIkSZJkHfG4VKR4aubpvK8ihdXSut9TBFaCZ6h2i979FrBSyYEzlo8X53UfKf49\n",
+       "lpWBdYkiNa/orrFxvxbGVasixe+x/vh3SfkrReqAR2KNBeOspA6U/EiGUtq3bCVoM889FikJQ3c5\n",
+       "cL9HV6ZYg0pqHIoDc5C2bY2sdFBX5zVW+hJFI9Ln5DyjHdjdonYN9HxjwPmi85Qii2lH7xf+7hHR\n",
+       "Q9X9kirM9Rhn3PtKc4fy1a5pHlXHWj7Wfp5j7UIxNqlIJUmSJEmS9ORxqUjxdN7XnwBQosby7eGp\n",
+       "H5+P2vfoXJ+nfX8P3jeihsggt8rcp2noe+vaXD/rDcqNNdjXWqJ/fB+osXzQPJP6WNQqh/P2a1ie\n",
+       "x6t1THKcq4vRcX2p9SssrUn0JWOOOY6ShgLSmouNepfUQ/fHHIrPGT8/a5mr/rQD/omUm3oQAe14\n",
+       "/aK3CaVM+CXoR4+wpj74JEXjYax2phy0C2sM48XL57T6yHE+rx/1pj59fe/GutdyHsbDUKUsFakk\n",
+       "SZIkSZKePC4VKfwLsN4OOuggSV3+HLIU14J/wViUdhZ3UEawmtnpmvr0VaTIrO5WG+/513vumLUC\n",
+       "VaCvauHti7VGO7f6vLkqQ0SPU8o9Mxb42rkfylgsVxm8D2ojC2nzyDIdGmXVN1u/Q18x51G6aNu+\n",
+       "18FXDP/MiLF3edh///1n/s1eif/93/8taee5wdjed999Z/798MMPSyrX3xWgee9a4bsK1O6xR7lQ\n",
+       "pyMFp3aNiCKvuQdGEezRmnbEEUdIkm677baZv0eKEQoin30Vqb5rLNf1fGIoaL4nYiupSCVJkiRJ\n",
+       "kvTkcaVIeQZprBesnr4ZnBe1kzXg8+X7GA2NqsMqdMgKjXI39D0zuU54n9/3PThWB1YLVgc5dqL6\n",
+       "9AW1w8cTimDJDwLcKqYfOX+tknPSSSdJ6hTX++67T1IXyeRgjbUqUj6PovNC5HcURfKU/JywornO\n",
+       "YYcd9th39DVt3lo39/8by4eillIU1/bt2yV1cxxfoLEiaFvXsqG+PO7LVFKUGBPsK8n110suQMfL\n",
+       "Rblrx1OpX0vnYY7svffekrr2ZXxzz4uu43+nvVEOPaLb/V2Z41wfZbivDxpKZCsog5SXe8FQJQpS\n",
+       "kUqSJEmSJOnJulakUCp4CiXbL5EaKCK1VpRb0Dwdc53IGuJpOsr03Ncaas3OC67Y+Pt2lAjHc9B4\n",
+       "bpFasDIoB0/77ofC+3vf5wnlCQXJ+6Xve3D8CVBy8PtozfNV8kugvviT4J+BdYhvWW3U3aZNmyR1\n",
+       "1qP7PVAO1BUUGcbPMcccI0k64YQTJEl33nmnpK6/o6jIyCpEcUQdac3r5e1NO3Be/DH22GMPSZ3i\n",
+       "dvfdd1ddj9+vtN8XawNjjb3wSjBmaFvmBv+mb13VJI8Rljp97wqNq2yR6uaZpH2ucx0iFV1xc/Wx\n",
+       "lFEdDjzwQEk7r0XPec5zJEkPPvigpG6sMzYjJcr3gaSvXEFx/z3WepQC2p9P2sMjUX0t43vOx/W9\n",
+       "flzfM8XTjlyP3Sb4PXPH7yEedeljmbnNGsM9J1KEmDuRUskcj5QV6sN1uC7nY/zUKpq+1vv4ZPwD\n",
+       "44B23m+//SR148DzfkUKMuO+79sfyluKEO+b1ysVqSRJkiRJkp5MxspV03TRyWS6tLS05tdNkiRJ\n",
+       "kiRpZWlpSdPpdMVEa6lIJUmSJEmS9GRhPlKrKVL48PA+199v4x9A/iaiozzq7p3vfKck6QMf+MDM\n",
+       "790XivNxncinBn8Ifw/sdeKT9+H4gvzkJz+ZuQ7w3tl9wRy+p55vectbJEkXX3yxpO69c18fo1Je\n",
+       "Ier1nve8Z9Xj8PmJ6lGLt2fEKaecMlOerVu3znzPe3rPKtx6PfwoeM/uai6+Y7R/pPYyrs8+++xV\n",
+       "r4dP0dAsy0Syve51r5u5nu8rV1Kna3368DO48MILZ65XSykSzP0YmGfnnXfeY3N99913l9TNOeb0\n",
+       "WAo8dbrkkksklX1M8BHZsGGDpM6P0dcifF722WcfSdLtt98uSbrgggskSe9617skdWsKa8L9998v\n",
+       "qasveHQV39MO+PacfPLJkjofo9/93d+VtPPaSc69o48+WpL07W9/W1IXfcfcP/zww2f+fsMNN8xc\n",
+       "j3oypl7xildIisfKAQccIKnzBXJfJCI3o2gsroev1Wte8xpJ0sc+9jFJnZ+m+23Sry960YskSddf\n",
+       "f72kne8R9IPvZcdxZ5555qr1G4rvmXf++efP9XoO17nssssk1fulshbiIxWtLYx31oQzzjhDkvSp\n",
+       "T31KUjeu+d73oozuVZFPFO3JPHzlK1+5aj1SkUqSJEmSJOnJuozaQ7GJoneIxvI96Uo5NaLsr1hX\n",
+       "3/rWt1b9PVZGpEgBT8NYynxG9eFpOVJw+D3Rgx7l5rk8sPKwjohyQoFwqxV46o+sPi9vxFAlKso7\n",
+       "FIFis2PHjhW/p30iJaqWUp6x2twwtZnKh+4FCVHEVmuOpdqImZLqU8rGXMpJhLqDmrP8erQZcwVl\n",
+       "aqy2dGqjneiDW2+9ddXjXO10dZkxxphnbEeRt/yd83nfUC6OcyXH282P8zxQrC2sOShRfj7KgVJU\n",
+       "gnozdhz6mzxNDmPKM3zTf6ypfKJi8/1Xv/rVVcvnkdIoMr42R3g0YhQxTD1pD9Y06ueZ6Wkv2pvy\n",
+       "+D0EJZHxgLLnOeN8jfPM7Sg4d9xxx4r1Yzz79binR3B9Xxv4XbQ2lda4aK2inrVrdSpSSZIkSZIk\n",
+       "PVmYIvUbv/Ebj1kHPL235qup3a+nZI3y3r60czrvU0u5JlxR8/fXrfBUjDXoma4pL9aD5/bgd9Ge\n",
+       "a+Te8f2XIp+yEvj2cD36CWsiyrQNrZnNsRojH57S9daaWmWnpHYwf/BbiZTAsfZ5KxFlPMfKxhoe\n",
+       "qg65H8tyK5Ux4BYx4AtEfiR8j9Yb+Da50kSfozRgkUeWd0klRSH4wQ9+IKk8NunjaO1lzOLbBa7M\n",
+       "4CtWCwpYlE0fv8joHsLvvNzRrgIlSvcK1uxaRYN+iNr1rLPOkiTm3A+kAAAeOklEQVQ94xnPkCS9\n",
+       "7W1vW/E4b2fKidLp44G5xJ6HKESMg/+/vXOL1bQq7/h/hWq0SEAOHcYZyIzhEMdUFDKjiSFgAqYa\n",
+       "g/SCWhsSLZaYCEqMMVQTy45cWE1UqBfERhq1UqrB4DGkggEsFzCAjAOMQCfM6ECYmQJqJNEEm9WL\n",
+       "/f3m2/vZe+31vus77b3n/7vZ+zu9h3V61/Nfz/Ms2l/8HUpifLaVfKO4rtJekTVKx6UfxHbbdVWj\n",
+       "NGZRH139ja1IGWOMMcY0MjNF6rWvfe0Sz3h2ksZK2LFjh6ShsvHggw9KKs9OW7OSEtWEr9R99923\n",
+       "7PeYTXc9fsyay+/7+qZgJZVm71xPLXtxzPqL9cism8/junqkZo3hQ9YabdbVioOu2atLYJW0cvXV\n",
+       "V0saZhbndUl5Kfl59OXSSy+VNFQvRvVNG5VS+8SPgr+jKlJE+sQoSalsocLZZ58tSbrsssskSQ88\n",
+       "8IAk6cc//nHTtZT2C2yFMRHfLqLxgPPUxoS+0Jdj3UR/NtTwksVf8m/ldcyM3XX/y9p9onDgm1Xy\n",
+       "GYsKQ1SkKN/SM4YxnSi9kvLZ+iwqgQJUi+SN/q+xXXL/tDOeAXfccUen8wPPgFhO7DYSidfR+myI\n",
+       "7a7UDrs+Q2rPYsbWGlakjDHGGGMamZkidfjw4SPWALP3aB0Q8XHhhRdKGu4pds899yx7zNbZP7Pj\n",
+       "p556asXv9d0pmlk8ESC1WXhp/y2sh7iXXFewHkqKVYxmq/mq1Wb7s8iWPwqjWvVYW29729sk1RWX\n",
+       "vopbCfxCukY+zQrKB+uX9ltTUGvHa1H2UKB+9KMfSRpdxaNPtt5LhL5e8/Ho6h/aFRSuGIUF3Cf+\n",
+       "pF2j0SL8blz+o8B1cZ1d9w+lnOmTtYhSIptrYwZjYN8I5BKf+9znJEn33nuvpO7+xH33GY1Qrigz\n",
+       "KIjsudiVWM41Sr5Lsc/Hz+MztDbW1JTkrsqZFSljjDHGmEZmpkj1seBKChTUfHZQhEqzc2bZtWgx\n",
+       "ZtNdMzwTLYb1ULvnUoQA5+XzGFmCXwBEa6wWtUX58D1m6aV1/q55fkr3y07qNQVwtUM9/PSnP5Uk\n",
+       "3XTTTVM9PxFb41K4JgXth37QNfdSiaigLlRHaHvR/w/I44RfXa1PMrZEVQ1qfaEvKE21XHW0PSz3\n",
+       "2q72NRgz2L0BaFsoPaMqHCgItIm+6noERWv79u2ShmN5a844fJDIGRfHzlq9RFB0RoVnTsl/t0RU\n",
+       "AHnGdW23qOuUQy06rrT7B0pSVyWVdlFSnKC0esMzEbW+VXl2HiljjDHGmAmzKjOb96WmuNRmlczW\n",
+       "u/pcdM0DREQAyk4rzLqxnuOsHGsR64DvdY0kImoRKw4rYNOmTZL652GK5Yj1fNFFF0ka+hlQ7jE7\n",
+       "8lqB+5y1stY371br8Vv3cKT91yLc8MPAT6fkhxF/vzDypmv+l66KeG1sGVVdK1Hz8Yn+ZqMSc9VF\n",
+       "KGPqsFUBY+xEkRo1YpbrQUmCGE1XWkWIygxRiSXlpO99jyuaE2WLvtjVRwmFhrG2tQ93feZFRYo+\n",
+       "TT139V1DSeI4lGPN/zbulxtzLval69hqRcoYY4wxppGZKVKvetWrJmbNxXXb2nnwzC/lwIh0zR3D\n",
+       "9/pmbC/BLDtaS8zSWc8uXRc7pGO9kaMGxaCU3bVv5AlWG4oNPlHkXhmXUjdr+uYDmxS19j1q5BAK\n",
+       "EVZi34gd2lXNGo6RV/ip1PwqFlqp0YIcNZ8PljRl3NUyH5WulvS4FA8oRaPRl2lLo47dHGfUCF+u\n",
+       "Y9euXZKGCh0qOz5TvB8jauP5ua5x5YHqmim9Bu2OXShQamqrLSUfolbfPvpoaQznfLxP++S85FSs\n",
+       "7VvKfZ188smShvfbddWo79hcqu+uPl1WpIwxxhhjGpmZIpVSWpJhe1z0tRr75oDhukv+E1ixREy0\n",
+       "rtPGWTLnjdfL/dYiavAb4HhYF6X7b80+i1XC8bHy2M+rlEXZtFErx1Fz9aDytOYuIiq05h9BJN2W\n",
+       "LVskdVfSFkbtRfWXNogF3nffQa4FSpmsx01XFbtm2Y8LMq2jMKA6d41gjowrvxKgrlNfMc9VySeL\n",
+       "92P+LhSsUaMUY0R1Kyg5XB/PgpoiVVq96FtvlA/9iXYQ2ynPcr5P+XG+0047bdHvSv1x7969y74/\n",
+       "LoUvQvnyrOY8vF/DipQxxhhjTCMzU6T+8Ic/HPG5YN0zzv5RZPD853utOUJKMAtltnzgwIEVv1+z\n",
+       "zPHxGDVjNlYR1hVRdKX1X6yN2vo+uXGwGkqRKDHioy+xvoj+wwrpGsFhRmNUXy4Uy1Zlq28WbNpb\n",
+       "V/+UhX5R0feDe29tw9NSoCIoQHHvtEkTFZQYEYziQN9GGem76wNtigjeUYltJkYzMtYwxgPKSvQ1\n",
+       "w8dqVEWqb8RzCcqZtt7VVyhG3Lb6+NGvyF1HeUVlMe51yXmpF575rT5ak/KrjmMU99d1rmFFyhhj\n",
+       "jDGmkRUVqZTSaZK+KekvJGVJ/5pz/peU0pykf5DEdPvTOec7Br/5lKQrJP2fpI/lnH+y3LGPO+64\n",
+       "I9YO69bMAlEsTjnlFElDq4j1YGa1UUlh/bbV9wYrsKZI1SxlFCGspNYIEKyhmIMjKl1xll46D7Nu\n",
+       "ypMoOq4zRi2OOvvHisKKMZOhtuP9uGi1Ikt+PCU/Ddpd1/a3UE2IPhcln6nV6p+Hjw/XOW1FKrYh\n",
+       "VMi4Fx2KT6vCgVKI3+SoMOaVMtBDzaeoa6RoV0ZVtIDy4pnYVR2OOQJjxHltb8EIfZZIXo4PjPkx\n",
+       "WpB+SM691t0YRl3lKVGKfu36zK4t7b0s6eM5510ppddIejildKfmJ1Vfyjl/aeGXU0rbJL1P0jZJ\n",
+       "myTdlVI6K+fclgXMGGOMMWYVs+JEKud8UNLBwf8vpZR+qfkJkiQtlwTovZJuzTm/LGl/SmmvpB2S\n",
+       "7o9fPP744zvno8Fzntl9ydO/1cpEqcEXqwTWQG3dm1k+s9yueadKMPtHecDHCbpGHWIVkFsFq6Cm\n",
+       "wEW65iLZt29fr+NOmq7115eYs2ba4GcyK5+zWvsutZOSmtE3e3QX6xZfEfLSoBKPWmbkSONesJi5\n",
+       "h1of4TrOOeccSUP1GUWKvQFnBRGXRPAy9rZmyI6MqnpTr2TUJs/S7t27l/1+10jk2nWNK89UV558\n",
+       "8klJw7Gra7Qmqw8oiKy6RJ8rfAFLOwOcfvrpkoZjDT5x8VlUUlC7Knwcj+urRbxzf6MqiKXy5Nlb\n",
+       "o7OPVEppi6S3aDgp+mhK6RcppZtTSngovk7SwtnRMxpOvIwxxhhj1hWdovYGy3q3SbpmoEzdJOmz\n",
+       "g4+vl/RFSR8q/HzZKXsXNQqFCUWK2X/JAi35TtXAmnzkkUcWvY+l3Rp1hDXHcVjn7muFxfXwuC7d\n",
+       "l9adsAHfqlKuDxiX1dpKVKDGrUShbpApnnY3rkz2Xakpf6PuQE99syci1iJqS01pRWVBrcHKwxru\n",
+       "m9updHxp2Fdi5CzXSL6hcUGdcx7Gpq7qM2Na7Et999DDd4UyjWNkbb9EFJ04NsXdDXjNmBajsmgb\n",
+       "1Ekpmo96og+1goJB+Y+aZyhmNo+KYvTrpbxow9Q7Y3TMQ9R3dSJmAu/rc8UzJ+bPYixkbEDJozw5\n",
+       "D88+fs/qB/Uan+OlsS8qXYzNKE4xQp9yrilSnI8xJWZcp13TT0tzh5JyzJyiRnUilVJ6haTvSvpW\n",
+       "zvl7kpRzPrzg869J+uHg5bOSFsaXbh68Z4wxxhizZmBCeffdd6/4vbTSGm+an9Z9Q9ILOeePL3h/\n",
+       "Y875ucH/H5e0Pef8dwNn8//QvF/UJkl3STojh5OklPLc3Fz/uzLGGGOMmTJzc3PKOS+7QWxNkXq7\n",
+       "pMsl7U4pse71aUnvTym9WfPLdvskfViScs57UkrfkbRH0p8kfSROoowxxhhj1gsrKlITO2lK+YQT\n",
+       "TjgSMUM0Wswfw3op66SsX8f9huJeYKz3Xn755ZLmZ5ILfxejhVinL63nx/XXEpzn85///IrfjxEf\n",
+       "XX27+B33fe211y4676ThPPztu5M46+1dfYji+cZ9/Nr5aEcxfxdRmdx/yceHeopZl/GT+MxnPrPo\n",
+       "fJOCdnPddddN5XzAeb7yla9IGvZj/EfwgYz9BH8M/Bvo1zHDOuMD8vtVV12l66+/XlJ7hGxX+rZN\n",
+       "iP5ifc/32c/Ou6biQ4IPzeHD894WNf9L2m4t0zXnm3Z53nDDDZKWRlOW+hJjOr49+MDwPm2F6+dZ\n",
+       "cc011yw6L2M85RMjwN/znvdIGkalPfzww4s+ZwzHt43rp21ynhtvvFHScKzn+uL+rdQPz0aeUeSD\n",
+       "ok/Qt7kv7pPzffWrX5U09NelnVBOnI92+cY3vlHSsJx37dolqTy2Uu6f/OQnF52371jc9ft9xzLq\n",
+       "hftszV81Nze3oiLlzObGGGOMMY3MbK+9hUoYlma0enh/27ZtkoaRC1i2pX20StFkJeurtk8UljFR\n",
+       "S1gtKDFR1ePz2p540DXKkN+VjosVhvUSI3SwRsZF30zXk45mG/fxS5GlXSOzSurApK37yKxX10vt\n",
+       "vVRfqDVY2aVIJZSthblepl22fRk1MzN9muP0PR5jRNe8O9Muz9LYVupLjOmjZqrn+KXz80wpRTxz\n",
+       "/tpYXrqPUi5AlK1SvrOY0b30+1qGfNrRAw88sOznpb5aUuPj6k+tHXUdu+NYUouC7PpsHTUflRUp\n",
+       "Y4wxxphGZqZILZyZx9kkO3ST4+Liiy+WNFRUmDWXcsKMmkskghIFMXdKJO5H1TXzeImuWXSZ/XM+\n",
+       "rAWyL7/73e+WJN13332ShhY9e+zFvfaOdmLunKjwTctaX+37w9XA1wxrH7+QkhWKv0QtZw7WI8rV\n",
+       "cuB7EpWAHTt2SJJ27ty54jnWG2SmruWAg1L5Ab43tNGYx6qmhMQcedPOwYYSgbpeUtlrufdi3jLu\n",
+       "K+YgxKeIvsDrUTPso8rGeqqtjkyKUXPDdWXUMZi5As9IfML6YkXKGGOMMaaRmSlSp5xyStEHhdk8\n",
+       "s3t8opjlnn322ZLKVlXfWSr7D0HNiqrteM7nk7Kuajt/R2sAy/3pp5+WJJ1//vmShlYQCtV6pXX9\n",
+       "m3aEtTfqfk6lqNFaxAoKLaw1ZYryI5sxVnJJcerabyjHlTLol5SU7du3SxoqKqjco6rHNWoZxvtS\n",
+       "ajsxQzfn7etTVVPB6SNER7EnG5mz+bzknznt3Q8oD0A5Y4yAmv9njPSO5crvY+b2qFDVlCiUPtpp\n",
+       "bV/UWF+cr6Ys1uA4XX2eJkWspxq1iHiiFLtmMC9hRcoYY4wxppGZKVIr+TGRK4PZ8/79+yUNZ6Pn\n",
+       "nnvuWK+F2Sg5WWqKVA2sjHFFS8Xj9I2WQ8H42c9+Jkl69NFHJfXfk3CtghXaN+9VzPs0KWoKDPW0\n",
+       "1pQoQFHmL/2s775jEcqtJTfMvffeK2noozKq2lgCZegTn/iEJOmSSy6RNMx/c+edd/Y6Hj5OtImS\n",
+       "7wsqO0oJY9LBgwd7na/WNlG/d+/evegv1PYpjXU/Lr/SEnEsjVFvK/nbLaQW6Q2tkdMoWai59JmS\n",
+       "IlVqB+OK1J6271oJlLWu1BRP8meN6kNmRcoYY4wxppGZKVIrWaGldVysxtpu930hP9W41utnnbcn\n",
+       "Eq27qERt2rRJkvTss+tzf+nWCBJ8esigXYvcqVHzrSux2tpTX+KOBZQDak2rIkU7JvtzHx577LGm\n",
+       "c/aFe/3hD+f3dUcJu//++5uOh1pfo+ZLMy2iT1KNSfuoRVCgJhVlFtXvrmp4VLxWS33Omr7RjTWf\n",
+       "sEOHDklqG0MWYkXKGGOMMaaRVZHZvC9EhIwrAgbfjVGzDq9V3vSmN0kaWviT8heZNV3bC34dtAd8\n",
+       "6PCnmVaOFHj9618vSTrzzDMlDVWNtQJWOAoUVmL0u2jtz30jeWbBnj17pnq+1ZJ7rJTfaLVA25lU\n",
+       "OdHmjzaIzG3d267EuKM8UXjxzeuaszFiRcoYY4wxppGZTZdb/UWkoU/FuGanrdlf2YNv3HvYTQt8\n",
+       "o7AaukasrFWwjmuKEhEy+M5htYxqXZILppaxO4JCRj6ptaZIRT+Zkt9M3/7cN4LnaGLaChB1yl/q\n",
+       "BmUM36c47tdy4k2aSY95jCHTYrWM4eNWolqp5ejjmc9fvt+3/6yOUjfGGGOMWYPMTJE6/vjjj3jM\n",
+       "l8BawVJFiSL3w7jWtVt9XlAY1poiheLCLBz/jfXuI9bVSsJq5vvjyqGCj1XfciavGT5SKGvT3j+r\n",
+       "FcoRtQIlN1qDXaEcY9ZuM2RcbbZrri+UJ8Zm+hB1U1qB4PvrlVFWXlpgdcXM07X8mWu05gz0CGSM\n",
+       "McYY08jMFKkuM+doaWIVTSqqrO9+Ql13UF9tTHvdflrUIkXIFULUZwmUzhhFNmo+JxSUvr5Av/rV\n",
+       "ryQN74/9oR566KGRrmdaELUXy7HVNzEqyGsham+tQjQT+Xt4TZ0xRuI/SJ9hjK7V7azVxNWSsbtE\n",
+       "3z3yVmt05KSoRdnRDtkRgNUjnvEoouQMfPzxx5uuw4qUMcYYY0wjM1Okav5R0lBZwBcJa4jZJHmP\n",
+       "uloVpfV+LFoyWD/zzDOdjgd9s/f2BWtv2uvtaw18Z6hnrOGzzjpLkvSGN7xBkvT9739/xeNMKuKk\n",
+       "VUml3vFt27Ztm6S1o0jF/dN+/etfS1qqZvRt39TzpPdCHCe13egnTd+97GIWeiz7qDTFMZXXNUWq\n",
+       "ps6iIvfNaF0iroTgbzit3HmbN2+W1P0ZwzMJJaXvPqvrna6rBDzj47Oadk1/aF11sCJljDHGGNPI\n",
+       "zBSpLioSs0N8l7BesB76KkHRamKHbaKI8J3pOyvFSpsU41Kiajk11gv4XWB942fQur/ZqKCYtEbU\n",
+       "YE1xP+ecc44k6dxzz5Uk/fznPx/1EifKli1bJC1tx/hz1PJqcf/0S1SOt771rZLWhs8f9zBqLjLG\n",
+       "rLgXW1f67mUXI0yffPLJRa9RWKLSRV3XFKeozBE9hfIybrU/qs3T3sWhr3/ket3/tJVWn7qSPzNK\n",
+       "56hjqBUpY4wxxphGZqZIbdy4sWqJYp3gI4X1gK8IFu3BgwebrgFLlr32WtdHo9XQNffKpIkK1FpV\n",
+       "orr6dWBt4ldB9N1TTz0lqX9EC+0MK6hVBaAdtLYH+sm3v/1tScP7G/e+U5MCJY5+jDrD9dNO8XGj\n",
+       "Hrlvfo86wu9QdxaOI0Th8N0NGzZIGiokKB20Ker2wIEDne6Fa+0Kx0f1ZqxphTbI2Ng1ErWVkood\n",
+       "fUvoa7FuKG/GxPh669ati44bVcuoWJ166qmShvdPW+LZwPtPP/10p/ur+XDh08b991WwYub2vjkL\n",
+       "x7Wf7LSIfZsxj7GUdtr3fshTxhgybkYtZytSxhhjjDGNpFFz4zSdNKU8Nzc39fMaY4wxxvRlbm5O\n",
+       "OedlnfasSBljjDHGNDIzH6nbbrttiQ8EuTJ4n2gfwI8BFY31Uvwi8EPAj+CKK66QJF1//fWShhEv\n",
+       "ZDFlXXT37t2LznPGGWdIGvrUxJwf+CWwzo+/xQc/+EFJ0s0337zoeiM33nijJOmee+6RJN1+++2L\n",
+       "Pr/44oslDf0PYrTZpk2bJElXXnmlJOnLX/6ypKGfAlFq+DfgV8A6f/QJ4/pZz6YeWJfGT4D7M8YY\n",
+       "Y8w8VqSMMcYYYxqZmSL1/PPPV6Pt9u/fv+Ln5IAg1wjKEcoMoLSQTZ3vlzJY1/bQi1l2Y/RhLQLo\n",
+       "lltukSTt3Llz2c/PO+88SdKtt9667Ocxt0vMdouSxl8iTUpZcYmoiPVBOfbdC80YY4w5WrAiZYwx\n",
+       "xhjTyMwUqT/+8Y9HfIzIOdKa56i2bxW5LVBk8Knqm9OjRMwtg89XSfEqKVH4fHFd+CpF4nXX9oar\n",
+       "RWbGfD4x39Kk9p4zxhhj1jpWpIwxxhhjGpmZIvXSSy+NnPG5KzEbLQrWuBSpmL22dW88fK2+/vWv\n",
+       "Syr7JsXd7mvn4/ul7/E+UYz4kM0ix5gxxhizlrAiZYwxxhjTyMwUqeXUDvbaGlUpwkep9j6vUWD6\n",
+       "Rqfxe3ywYNR9kcgfVdqzr6svGddXU6yoi7hvlTHGGGNWxoqUMcYYY0wjM1OkjjvuuCX5mFqj9iIx\n",
+       "ygwfJpQnIHM3ChjRal19g4j+iz5Lkb47S5NJHIWs1YeM+6j9nvxaJ510UtN5jDHGmKMVK1LGGGOM\n",
+       "MY3MTJE66aSTlihS0ZcHJYc99EqZ0FGGUJxiHiTyR6EcsRddVJJQZPAR4i970eG7BGQY5/wl8FWK\n",
+       "11WC45YykXcFJYo8XTVfKftGGWOMMf2wImWMMcYY08jMFKljjz32iDJUUmrw8akpJSeffLKkoQLz\n",
+       "4osvLvs9Pn/1q18taWlG8qhAnX766Ys+f+KJJxa9xhcrXl/Mw9RViYKaElWKSozgk4XP1QsvvLDi\n",
+       "972nnjHGGNMPK1LGGGOMMY3MTJF67rnnjkTT1RQpMn6X4HP2qqtF//3ud7+TJJ144omL3sdnCl8p\n",
+       "8iqVFC7Alyte96SImdS7QvmgPI3qg2WMMcYc7ViRMsYYY4xpZGaK1G9/+9slikjffEuAYoQSVYtO\n",
+       "I28UmdTxlYpK1v79+xcdv0QtT1NUykqKGb5M+FyVvtc38ztKG8ffu3dvr98bY4wxZnmsSBljjDHG\n",
+       "NDIzRWo5/5xR96jrqtSg+Dz//POShlF6v/nNbxZ9ji9VjdJ1kzmd/FN8Dx8n8jtxfpQxrmNcHD58\n",
+       "WNLSjO/GGGOMGQ0rUsYYY4wxjcxMkRqF6PNDlByKT1dFhwziKFD8vm/eJ5Sn+Doel/xVGzZskDRU\n",
+       "vIgOjJnTxwU+Wpyf+4v3ecIJJ0jSkozzxhhjjFkeK1LGGGOMMY2sKkUKRQSfoUOHDkla6ttDlBwZ\n",
+       "zfE1wucpKkTx+OyNx+/x13r88cc7XefGjRslDaP+Nm/evOjzkm8VyhnRgNBVQeuaJytChnYyyeOj\n",
+       "hVJ26NAhHXPMMUfux4rUbNm3b5+2bt0668swA1wfqwfXxerC9TGPFSkzspO/GS9xom1mi+tj9eC6\n",
+       "WF24PuaZmSJ1wQUX6B3veMdUzjU3N9fpe5dddtlUzzcuRj3f3XffPbW6MMYYY9YTVqSMMcYYYxpJ\n",
+       "k94XbtmTpjT9kxpjjDHGNJJzTsu9P5OJlDHGGGPMesBLe8YYY4wxjXgiZYwxxhjTyNQnUimlv0op\n",
+       "PZFS+p+U0rXTPr+RUkr7U0q7U0qPpJR2Dt47MaV0Z0rpqZTST1JKJ8z6OtcjKaV/SykdSik9uuC9\n",
+       "YtmnlD416CtPpJTeOZurXr8U6mMupfTMoH88klJ614LPXB8TJKV0Wkrp7pTS4ymlx1JKHxu87z4y\n",
+       "ZVaoC/ePwFR9pFJKx0h6UtJFkp6V9KCk9+ecfzm1izBKKe2TdF7O+cUF731B0vM55y8MJrivzTn/\n",
+       "48wucp2SUjpf0kuSvplz/svBe8uWfUppm6T/kLRd0iZJd0k6K+fsxF9jolAf10n6fc75S+G7ro8J\n",
+       "k1I6VdKpOeddKaXXSHpY0qWS/l7uI1Nlhbr4G7l/LGLaitQOSXtzzvtzzi9L+k9J753yNZh5YvTB\n",
+       "JZK+Mfj/G5rvMGbM5Jz/W1JMZV8q+/dKujXn/HLOeb+kvZrvQ2ZMFOpDWto/JNfHxMk5H8w57xr8\n",
+       "/5KkX2r+oew+MmVWqAvJ/WMR055IbZJ0YMHrZzSsGDM9sqS7UkoPpZSuHLy3Ied8aPD/IUkbZnNp\n",
+       "RyWlsn+d5vsIuL9Mj4+mlH6RUrp5wTKS62OKpJS2SHqLpAfkPjJTFtTF/YO33D8WMO2JlHMtrA7e\n",
+       "nnN+i6R3SbpqsLxxhDy/3uu6mgEdyt71MnlukrRV0pslPSfpiyt81/UxAQZLSd+VdE3O+fcLP3Mf\n",
+       "mS6DurhN83Xxktw/ljDtidSzkk5b8Po0LZ7BmimQc35u8Pd/Jd2uefn10GBNXCmljZIOz+4KjzpK\n",
+       "ZR/7y+bBe2aC5JwP5wGSvqbh8oTrYwqklF6h+UnUv+ecvzd4231kBiyoi29RF+4fS5n2ROohSWem\n",
+       "lLaklF4p6X2SfjDlaziqSSn9eUrpuMH/x0p6p6RHNV8PHxh87QOSvrf8EcwEKJX9DyT9bUrplSml\n",
+       "rZLOlLRzBtd3VDF4UMNfa75/SK6PiZNSSpJulrQn53zDgo/cR6ZMqS7cP5Yy1U2Lc85/SildLem/\n",
+       "JB0j6WZH7E2dDZJun+8j+jNJt+Scf5JSekjSd1JKH5K0X/ORGWbMpJRulXSBpJNTSgck/ZOkf9Yy\n",
+       "ZZ9z3pNS+o6kPZL+JOkj2VsRjJVl6uM6SRemlN6s+WWJfZI+LLk+psTbJV0uaXdK6ZHBe5+S+8gs\n",
+       "WK4uPi3p/e4fi/EWMcYYY4wxjTizuTHGGGNMI55IGWOMMcY04omUMcYYY0wjnkgZY4wxxjTiiZQx\n",
+       "xhhjTCOeSBljjDHGNOKJlDHGGGNMI55IGWOMMcY08v87vVmfz9SwBgAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7ffb01cf9110>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "feat = net.blobs['conv4'].data[0]\n",
+    "vis_square(feat, padval=0.5)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The fifth layer output, `conv5` (rectified, all 256 channels)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 34,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlIAAAJOCAYAAAB8y+mTAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzs3XmMXdd17/nfEUWRLM7FKhZZHDUPLcu2HDg2XgBbL0Hw\n",
+       "AmR4iYOkAwSdP/JHA+5uJ51AkB14uIITDw3HaSCIjaT7OfFrdAYjhuP8Y9hOolY8QLFlJ9ZEDTQH\n",
+       "cagqVpHFSRQlUTr9h7j2XZd1eId9z3jv9wMYOt5VrLvrDqfOWWvttZM0TQUAAIDB3VD1BAAAAJqK\n",
+       "CykAAIBIXEgBAABE4kIKAAAgEhdSAAAAkbiQAgAAiFTIhVSSJP8lSZJnkyR5IUmSh4p4DAAAgKol\n",
+       "efeRSpJklaTnJP2MpBOSvi/pN9I0PZDrAwEAAFSsiIjUOyUdTNP0SJqmr0n6W0m/VMDjAAAAVKqI\n",
+       "C6ldko65/3/86hgAAMBIubGAn9kzV5gkCfvSAACAxkjTNMkaL+JC6oSkPe7/79GbUam+TExMhONL\n",
+       "ly5d9/uSpP37DFrn1Wq1Mo+rwFyyMZdszCWbPX5V87jxxvap9MMf/nCuc7GffeXKlYH/bdXPi8dc\n",
+       "sg06l5tuuikcv/HGG5I6/26uW7dOknTmzJkw9tprr133561fvz4cP/jggwPNZfXq1WE+ly9fliS9\n",
+       "/vrrff3bXur0Gr3nPe/Ro48+et2vF3Eh9bik25Mk2S/ppKRfl/Qb/f7jbhdPHpstA6iLmIucOvxs\n",
+       "NM+rr766YsxfKG3YsGHFWDd2MRbjtdde6/txmuyBBx4o90IqTdMrSZL8r5K+LmmVpP/Gij0AADCK\n",
+       "iohIKU3Tr0n6WhE/G20WwvXH/u7CQq32X5TvrW99azi2FM3TTz8dxnht0CQ33PDm+qRhohgx9u3b\n",
+       "J0nas6ddNWKfncOHD4ex06dPlzqvunj55Zczjwf9t4hDZ3MAAIBIhUSkUI61a9eGY7tTtPy49GYh\n",
+       "oETUo0qrVq0Kx1bX54tFeW1QV3ZOGWZhT15uu+02SdI999wTxpaWliR1FlWPa0QK1SIiBQAAEIkL\n",
+       "KQAAgEhjldqzVFfTWajdLztdXl6uajro4sknnwzH47BMuF++aNh64Jw7dy6Mzc/Plz4ndNq4caMk\n",
+       "afPmzWHMzj2Li4thrN+WNcP4wQ9+IElaWFgIY5YWn5ubK/zxgW6ISAEAAEQaq4iU7z7cZGvWrJHU\n",
+       "2ZHWlrDm1VV2UL4Vw9TUlCTppZdeCmO+IHScEIXqZO/d7du3h7EtW7ZIkl555ZVK5oRs58+fl9S5\n",
+       "YMLOob4AvQxnz57t+C/qwZ/3bfGTb41x4cKFFWOjiIgUAABAJC6kAAAAIo1GrqtPo9LB1YrmfR8p\n",
+       "S49U1UfFes5I7V4zdUilVtWJuZu77rorHNv8nnnmmaqmUyp7b/gUtKWAfQEzqmev1bim5XvxpRV2\n",
+       "rvMLJsaBT/vefPPNkqRt27aFsRdeeEGSdPTo0TA2ivvkEpECAACIVH3IAAOz5ca+Q/bFixermo6k\n",
+       "zkJh6zhch0Jr6/RuRY9SdXdE+/fvlyTde++9Ycxet5MnT4axUS6otZ3rDx06FMbstan6TtW3R8la\n",
+       "tFGnqCaq56Mx1sLDZz3svT7KfCbCfl9/LrPjqj/bRSMiBQAAEIkLKQAAgEhjldqzHjZNZ2kH3828\n",
+       "6rTDlStXMo+rZr1wquIXBMzOzkrqfB9agfW49VCq+nXxrIO3LdiQ2gXwPj3t08MYDT41Neg51PfJ\n",
+       "sy7r45DO8/zn49SpU5I6P9vj8nwQkQIAAIg0VhEpX5w9CqqOQqE3/xpZ4bvvCm13taPSmqOJLNK0\n",
+       "adOmFV/jMzbahnl9/YKEqnaUqJqPONl5Lauw3Bfmj+JzRUQKAAAgEhdSAAAAkcYqtUexKMrmQ99W\n",
+       "mOkXCdSp6HrcnThxouop5MIXUANF8mk6O9f5dKn11/ILbPz5b1TwiQMAAIg0VhGpJvNX9Lak3i+t\n",
+       "t2ibdT2X2gV+fk8oi4qsW7cujE1OThYwY1zL9pvyxeaHDx+uajq1YXetfm9GInXx6rDHJcaDj37a\n",
+       "+87/XbJ994hIAQAAIBMXUgAAAJGIAdfI9PS0pHY4VGr3F9q6dWsYs5SdL/Sz7sx+w1vrUeRDrdbL\n",
+       "yIdkfUdnFMdv1DuuLMS/ffv2MLZv3z5Jnf1nLFXtU3xHjhyR1Pl+HeUNnmONSzfpPPg0e7eNdX0f\n",
+       "pM2bN6/4fnufjmKPpG7882J92Hbt2hXG7Lka9T55RKQAAAAiJd2uwgt70CRJW61W6Y8LAAAwqFar\n",
+       "pTRNk6yvEZECAACIxIUUAABApMqKzatM7fnHrjrFyFyy1XUuf/InfyJJOnfuXK6P4Yv/szZStQ23\n",
+       "/+AP/iBzXlXwj//xj39cUnXFtjaXqp8TPwfm0snm8Id/+Idh7MqVK5XOpU7PC3PpZHP49Kc/HcaK\n",
+       "Klr351/fY9E8+OCD3f997jMCAAAYE7Q/QFe+I63xS4attYIfs6v7y5cvhzHrXr169eowtrCwIKkz\n",
+       "iuG7sNdRUUvLs6JQZTxuXsZt2XdRbAn5KHd2ryoKhWbyf0eyZLUDGpRF/KX27h/+b1ovRKQAAAAi\n",
+       "cSEFAAAQqRGpvV6FuCjOK6+80vXrlorwnaqtm+2JEyfCmIVnz5w5E8ayQrHWjb2umtyh13fMt+7g\n",
+       "p0+fDmN16hJuG6COWxqo1+cNGDe2a4eUnfK2cpFhUnu+n6aVUfgdQXohIgUAABCpcREpu/rkzq0e\n",
+       "rIjc9gmU2vv5LS0thbGLFy9KahfyXY9/resoj8LGqvg5W6THXj+pHW2r6rPl9+2y4k8iUsB4s+i0\n",
+       "1M6A+LGsxU39slYH/txjn8GshVbXU++/WgAAADXGhRQAAECkRqT2fHjfwm2+74OF4urea2cUzc/P\n",
+       "S2qn7qR2/43FxcUV3++7xloqyYdp/etaR01M6RlfTF51YbnvJ2bvCf/c1j3FC6Ac/lyVtdjM//0Y\n",
+       "lF1b+GJzM0ianbMVAABApEZEpHwhmN2p+qvFrKvJMtiVsBXASe35+avoJhfMZnV39c+3Ffj1W+iX\n",
+       "FZHydxnD3F2gOfyig6wFCLwPVvJRvF6LNjBe/N/IJkfNs/RqeTTM39dunyMiUgAAACXgQgoAACBS\n",
+       "rePnVng8OzsbxizcduHChUrm5Dt4T05OSuoMLdpGvE1O53l5p02zUoU+dBvTC6Qb65N06dKlXH/u\n",
+       "KLKFHHXoZTQqn588lZ3asx0Kpqamwpid3/ziElTPp8LtHMtnqH/++bO/R4MsfCIiBQAAEKnWEams\n",
+       "K+qqIws+omJz8R28q55f3S0vL3f9et4tLOr0elihfR3267O7ra1bt4YxuyvzeySOA+vEX/coS9kF\n",
+       "5haZtBYnUv33wiyTL/C2yH1Ve8H6v0ujFonyv5tFZf3fCTuXDfO3w7dbsfOBf317/vvoRwYAABhz\n",
+       "XEgBAABEqnVqz8KkvidT3sXIg7JiS6nZG9hWparQdx3UIaVndu7cKUlav359GKtjassXfBa1c0HV\n",
+       "55R+lZ3aa8rzUgdZi2jKNMp9xXzvwaJSez6Nt3btWkmD/a0iIgUAABCp1hEp468WreXA3NxcVdMJ\n",
+       "7OrYL52sw9Jx1IcvYrQO+L51R5nRzKy7Ll+YWlVLkSz2vJXx/NSxONffhdtr5SNEZUY3fbSlql0k\n",
+       "6sheF6n9vFS1uGWUsyJ+QYw5f/58OM4jGuh/ni2o8K9vL0SkAAAAInEhBQAAEKkRqT0f0q5TsbLN\n",
+       "xXccJrUHz4edN27cKKnz/XL69GlJ5byvffjfUkM+FeHD21Wr0+e8Cj51V/UiBdJ52eipVQ6/2Czr\n",
+       "Oc+7JMHOk4MUrxORAgAAiNSIiJS/Cq3TXYBdsRa1LBv1Y/uP+de8W8Rgy5Yt4diiU77ou9/Ii0Wz\n",
+       "8nLy5ElJcYXWgxRhXsueg+np6TDmI3TGPuf+bhTA+KmqDccgLSWISAEAAETiQgoAACBSZam9G2+8\n",
+       "sSNFYEXavtOydSz14f2sNNrExISkzpTDmTNnrvt9ZfC9pSyV4wvR/e9ZR1V1bd+7d2+pjzcoK87u\n",
+       "N+zr035WtBvThTiroDKrF5Qd+3RZ1uP1m9KzVJwvOM5KM1rfp16pStsQ1H8WszoT9/u+yzvlCaDz\n",
+       "b2nVHe6b0COLiBQAAECkpIqlrUmSpK1Wq/THBQAAGFSr1VKapplt1IlIAQAAROJCCgAAIFJlxeZf\n",
+       "+cpXOgpYrYjXF7YtLCxI6iwotWJk/31LS0uSOgtiszYetMLZ3/md3wljVacY/ePXaS6f/OQnJXU+\n",
+       "p1Yg7F8PKy72xdBZCwKyiowvXrwoKbsw+gMf+EDmvKrgH/8v/uIvJHV2BLffN68NS9esWSNJ2rFj\n",
+       "Rxg7fvy4JOkjH/lI5ryqUMf3btXz8HNgLp2YSzbmki2PufhFX/0usLENw/1CoV5zICIFAAAQqbKI\n",
+       "1JUrVzoiERZhsjtvz7cyyGprYGwZtdSOPvml33Xqil53WXsGWuQlptt01Uto82JR0iKX5Npzf/To\n",
+       "0cIeAwBGXczODTF7WxKRAgAAiMSFFAAAQKTKUnvHjx/vSPdkpZIGlVXknMfPxfVZQbRPm2Z14R4V\n",
+       "Teiyez3btm0Lx6dPn65wJsBgtm/fLkmanZ0NY7ZY5eDBg5XMCTBEpAAAACJVFpE6d+5cVQ+NHFl7\n",
+       "iZmZmTBm7Sh8of+pU6fKnRgCa1fxlre8JYzZsuBDhw6FMX8M1Int/ekXFFmE1RaASKMdDUf/7Jy3\n",
+       "efPmMGbvk157nWbt/dkLESkAAIBIXEgBAABEqiy1h2L40PcgoclYJ0+elNQZUrdu6PTtGs4NN7Tv\n",
+       "c6yDfAx7bZ5++ukwZn3WSLmiCax/oN9pwY5jegVhtNkiM98TytLDvVJ7k5OTkqT5+fm+H4+IFAAA\n",
+       "QKTKIlKrV6/ueWWI7vw+QtPT05I6o0BlRKQssuHbAti8aD0xHH/3nQffkZ7PHprEziWLi4thrMmt\n",
+       "SDAc2w9Pau/f6lu62N++mDYv69evH/jfEJECAACIxIUUAABApMpSe3mnLcaR3/R5eXlZUvmbA2/Y\n",
+       "sEFSZ9dsK/48f/58GCsjzThq8v6MTE1NhWNL88Vs0Ini2Gfa+rNJ9EbySOd1uu222yS1O79L0oED\n",
+       "ByS1/yaMIr+oype45CHmbygRKQAAgEiVRaRYsjq8OkQTbIm+v0OwwlAfMSszIrVp06ZwbHewvvB9\n",
+       "XN97fiGCvTY+6lV2NBMr2SIA/9mxz9gwLTAwmixyafsOSqMdiTL+74kvPM+Dz6T0i4gUAABAJC6k\n",
+       "AAAAItHZHEOxMKhPRVjKsapUxJo1a8KxzS+vdJ51x21i0atP3VnfMf8azc3NlT4nZLt06VLVU6gl\n",
+       "+/xJ7XOOT/OMW/rzqaeeqnoKlfDnMutEnpeYMhQiUgAAAJGISGEoVsTtl2hb8V9Vxcs++mRz8Xeq\n",
+       "w3T1bnLbDr9339atWyVl70XVxGgbxoN/D9v7ddyiUOg8D8cUh3cTsyMHESkAAIBIXEgBAABEIrWH\n",
+       "XPhQq6XTqupz5fslzczMSMqveLfJaQSfFrG+M76wkpQe6s4vatm4caOkzvcwm3GPnzp0/iciBQAA\n",
+       "EImIFHIxMTERji2yUVVEyt+hHjt2rJI5DMrvrWZF+r6NQ0wB5LV8VO748eOSml08j2LZPpq+a3bV\n",
+       "/EIS+0wQhULViEgBAABE4kIKAAAgEqm9Evli36L4zYOtMLrITXrXr1/f8ViSND8/L6kzbWQ9X/xY\n",
+       "Ewu37TXM2lT2xhvbH6ckSST17nFiBbM+NWo/b/v27WEsayNSe+59cf21jy9lp+/GdePmLPTPylZU\n",
+       "Ss93J+/3Obc0o0+BD/p6+VQ5mqNXiUO382Av1hXd9zy08+8gfRCJSAEAAERKqig2TZIkbbVapT8u\n",
+       "AADAoFqtltI0TbK+RkQKAAAgEhdSAAAAkSorNq8ytecfu6p5WMHlRz7ykei55N1nqA7PS9bjf+5z\n",
+       "n5PUWXBdVO8YXzBuBeUPPfRQGPujP/ojSZ0Fs2VuzlzX16guc6l6Hn4OdZrLxz/+8RVf8wtTbDcC\n",
+       "3yXaPmNWkCu13+u9dgqwz5E/Rz344IMdc6pSHV+jOs3lz/7sz8KYFfWfOXMm18fqtSBm0OfF/7wt\n",
+       "W7ZIyl6kE6PXHIhIAQAARKL9wYB8xGKYJeR5LLXOIwrVBEtLS5LKaZfgX9Os19fu0ummPJy8o6no\n",
+       "Lut843ce6LYLQUwkwj47tNloJt/6oqgdKnpFpAblf4a1ldm1a1cYs1Y0/v2cV4sPIlIAAACRuJAC\n",
+       "AACIRGpvQFbEJklnz56VRPi6aNZptomd0NHJPj/33XdfGDt8+LCkzpC7pRN4zYHyFbnhvC1y8JvL\n",
+       "5+3kyZMdjyW1y3KKKMsgIgUAABCp1hEpW2JuS3Ol4vZ/6pcVPg/LF9qhO6ISo8M+035fLNsryzt2\n",
+       "7FhpcwJ68efrW2+9VVLnvn9zc3OSpNOnT0c/ht+/s+rFLH5f2LzPv/a8FRmRsizRwYMHC3sMj4gU\n",
+       "AABAJC6kAAAAItU6tWeFqdPT02HMQnbz8/NhrOp0X4wqNotuKlJ7+bPCy7IXSljqY5gUCFA2n3az\n",
+       "1JRP7W3dulVSZ8p60B0Pss5zfgcF+5tRxvnQF2nnvXNDE/9e90JECgAAIFKtI1Ld7l793QCA3nxR\n",
+       "98zMjCTp3LlzYYwoEZDNF0bbsd/lwgwTycnqPp/HDhgxiox6jWKGgYgUAABAJC6kAAAAItU6tddN\n",
+       "3gVw/cpr02KgbD7tULUdO3aEY9shwKdPRjH8P47q9J7Ly6lTpyRJk5OTK742MTERjm2T3CYqo4+V\n",
+       "LzXwRfpNREQKAAAg0lARqSRJjkg6L+l1Sa+lafrOJEkmJf2dpH2Sjkj6tTRNzw45z9ogCoWmeuWV\n",
+       "V8Kx3VVXtRR527ZtK479Xandzfv999AcFrkfxYiULdDYuHFjGLPoii8Ot9YFVRWMD6PI9jz23vA7\n",
+       "lox7RCqV9N40Td+epuk7r459UNI30zS9Q9I/X/3/AAAAIyeP1N61m8b9oqQvXj3+oqT/msNjAAAA\n",
+       "1M6wxeappH9KkuR1SX+epun/JWkmTdOFq19fkDQz5GMAyIFPS1edbrhw4UI4np2dldTeycAjtddM\n",
+       "9l4bxS7WlvZaXl4OY5cuXZLUmT6v+jNWV9Yl3m+MbJtCN3XHj2EvpP5TmqZzSZJMS/pmkiTP+i+m\n",
+       "aZomSdLMZwYAAKCHoS6k0jSdu/rfxSRJviLpnZIWkiTZkabpfJIkOyWdymGegV3FVrU82hfIrVmz\n",
+       "RlJ7+TY6+TsOw7L26vi7vc2bN0vqjAzZ18tYUOHv3Dds2LBiftzNo+78e9hYZArXZ9EnH22ueyTq\n",
+       "kUce6fr16BqpJEkmkiTZePV4vaSflfSkpH+U9FtXv+23JP1D7GMAAABU6YEHHuj69WEiUjOSvnL1\n",
+       "6vJGSf9vmqbfSJLkcUlfSpLkt3W1/cEQjwEAAFBb0RdSaZoelvS2jPEzkn5mmEldy/frsOOFhYUw\n",
+       "VmYawD+WpUCseE4qpyNsU/hwrW0y/fLLL1c1nbHn35v2Pq6qU7//HNkuBb4weX5+vrS5ADH858U+\n",
+       "R00qXbDSC9+NvQyjmP6kszkAAECkRuy155dFWxdkXyTrj4vm76St2LCJUagyomg+ImWPR0SqHuy1\n",
+       "sUih1L6bts7NRbKWB1L7jtgv2mjSnT3Gk98rziJSw5zfbNGFVE7bCOu8bv9FPCJSAAAAkbiQAgAA\n",
+       "iNSI1N6xY8cyj/Mw6MaS/vvs2PpiSPXvh2HKTkc2fVPKUZO1qWyZizaOHDkSjk+cOCGpXXRe9lyA\n",
+       "GHmn38o+J9vjlZHKH3VEpAAAACIlVURQkiRJW61W6Y8LAAAwqFarpTRNk6yvEZECAACIxIUUAABA\n",
+       "pMqKzatM7fnH/uM//mNJ0quvvhrGrIfNpk2bwph1sfXf54+N789kfBGtsQ2PP/ShD2XOqwr+8es4\n",
+       "F9+3pczi9bo+L5/+9KclVdeby8/lM5/5jKT8C3D77Xdmc6n69fFzYC6dmEs25pKtjnO5HiJSAAAA\n",
+       "kRrR/qBI3bqinz59euCf1++y7axoFrqzvaHwpjp1iS+qE3MTdw0YxuTkpCTpzJkzXb/PPgtFdoAf\n",
+       "tDUMUDdl7SXKXyYAAIBIXEgBAABEGvvUXlXh66Z0QK+TMjenboKNGzdK4nkZJbZLgt+o3c5NPs2Z\n",
+       "tYAlb/fee68kaWZmJoxZF/oDBw6EMTaYRt1Y6nvnzp1hzBYrLS0thTF/PNTj5fJTAAAAxtDYR6Ss\n",
+       "GK3siJTfn28U+KK+7du3S5IWFhbCGAWrQG9nz56V1BnlqTp67VtQrF27VlLnwo9xiEj58/Wdd94p\n",
+       "Sbr55pvDmEUIn3jiiTAWs1gJ+bD3bFYU1+8vmhciUgAAAJG4kAIAAIg09qm9V155pZLHrTpcn7c7\n",
+       "7rgjHL/jHe+QJD355JNh7JlnnpFU//5Z1nFeai9EqCuKzEdPnVLgL7zwgiTp6NGjYcx68RTZkycP\n",
+       "PvU4MTEhabi+a/58bT2+fCGzpTeLSBuhP/4137ZtmyRpeno6jFna/Ny5c/k/du4/EQAAYEw0LiJl\n",
+       "V5r+yn9ubq6q6eCqw4cPh2N7bXz0qe7RHeMjlFn7JgLj4tKlSx3/LZIv5rY9ToeJHPh9OTdv3iyp\n",
+       "c0HMME6dOiWpMyJsj1HmHqDolLVAw5/DswrQ80JECgAAIBIXUgAAAJEal9qzXhB5pYos3Od/nj1G\n",
+       "kRseNiXV1S9fyPkf//EfFc4kP+O2Ye6427BhQzi2fkk+vcT7oTi+mDuPzbh92s1+dt6vn59nVRuI\n",
+       "Wwd8K6TGm+bn5yV1fn6LTFETkQIAAIjUuIiUFfPlVdRny93tDlRqL0H2Y3kvNfdLNWMVGTEDxo3/\n",
+       "DFn0gihU+fJukXLx4sVcf16dFLGUfxRYFLKMhRISESkAAIBoXEgBAABEalxqL2+WxvPFjpZ28wXh\n",
+       "lkbLK4WWR8qAdB6QH1tkIo3ezgMYTbxP64GIFAAAQKSxj0jZslXfFdW6W/sxfwxgtBHtBdAvIlIA\n",
+       "AACRuJACAACINPapPeM3qzX0kAHGky04scUoAHA9RKQAAAAiEZECAHXuZGA7HtA5enRMTEyU+nhZ\n",
+       "+6laG52tW7eWOhcUi4gUAABAJC6kAAAAIpHaAzDWNmzYIEmanZ0NY1lF5mWm+fym5qtXr5bU2cV6\n",
+       "0I19t2zZEo7Pnj0rqXPT802bNnV8TRq8d57NU2o/pzfddNOK70uSJBwX1Zl73bp14Xjjxo2SOp+D\n",
+       "MmS9h2xsfn5+xdd86tHm+tJLL4Ux+538e8M2ZD5//nwOM0YsIlIAAACRkir26kmSJG21WqU/LgAA\n",
+       "wKBarZbSNE2yvkZECgAAIBIXUgAAAJEqKzavMrXnH7vqFGPMXKw/SVYxoy9EtLRtr/St/ZuPfvSj\n",
+       "A8+lKE1/jYrCXLLZ41c9Dz+HOs3lz//8z8OY7eLgC5TL2KS5js/L3/7t34axqakpSdLly5fD2PLy\n",
+       "siTp5MmTYcw2ul+/fn0Ys0J7v0OGjVnhvSRt375dUrtIXJJ+8zd/s2NOVbI5fPKTnwxjWbt+FMUv\n",
+       "EnjooYc65lSlXnMgIgUAABCJ9gcN1G3/r0GXLMf+G9MtOoZi2XJpfwdtr6W/s7M76GH4JetZS9qb\n",
+       "zJb+j/ISct9l216/IqNQTTkvzM3NheNDhw5J6r+1hG9NkMU+d/59tbi4KKn++7hWsQhNGrytR10Q\n",
+       "kQIAAIjEhRQAAEAkUnsYSt1D96Ps0qVL1/1aHuk8z4f6yyw+LcPk5KQkaefOnWHsueeeq2o6hfAp\n",
+       "rDI+s005LxTZrT4rvVn3lJ6pKsXmSwiahIgUAABAJCJS6GiZAIybM2fOSGrvyTaKmhIhuh5bPOFb\n",
+       "DlgLAb/Yomp+vzwr6vf7FzbFmjVrwnG3CPS2bdvCsUXBu0XKe/H7NTYJf0EBAAAicSEFAAAQidQe\n",
+       "tHbt2qqnUBgrXvTF0pbCuXDhQiVzqopP4e7du1dSZ1H6wsJC6XOqA+vzM8p9pJpu69atkqTZ2dkw\n",
+       "Zqk9vzBg0P5H/aaw+uV/RlbJhI0N07uvDL4PXbfnxRelD5PSM1X1rxoWESkAAIBIRKSGcOONbz59\n",
+       "ZexThThZdzhWEDpuESl7v0rtKGRTlmOjfJs3b5bUGbWxz0ze7TV6sb3ufDdxi4YME8WIiQxZEXlW\n",
+       "iwBf1N+r83md9Rud63UOtQicP/d0a61Qp4UDgyAiBQAAEIkLKQAAgEik9oYwKim9Ufk9+lVkN+M6\n",
+       "8ykaK9Qd1wLzuqpTMbIVePui6bJTetfK+7Pbb2rbPwf99t3LSjnW4XXtR8zrPDU1Janz+bHfd2lp\n",
+       "KZ+J1RQRKQAAgEhEpDB2nc3LLGj0rSWqLqS0KJRUfaH9hg0bwrGf17izO/isu/qyWfTHR6yrft9U\n",
+       "xb8Go7bXZF7sM+0Ly23XgFE3Xn9BAQAAcsSFFAAAQCRSewh9UZC/qtN5XlO7Bo+jOhQlLy8vVz2F\n",
+       "Wur3c2RpfZ/qsn9bp/NClpjUshXu+993XBCRAgAAiDR+l45YwXfjBcpAR/Xm8G0z7Fwxbi1TYqxa\n",
+       "tUpSMxdTxESkrNh8x44dYczeJ6NedE5ECgAAIBIXUgAAAJFI7aHRm2uimeqY2vO9rWzDXp+SqLqr\n",
+       "d1V83yQWpvRv3M6rlsr0pSLjkgImIgUAABCJiBSA3PRbpFqH5f3X2r9/fzjetGmTpM476nGNSHmv\n",
+       "vvpq1VNACWJ2uzh69Kgk6cSJE2FsXPY1JSIFAAAQiQspAACASKT2gAJYUe66devCmKWJYopQfSF0\n",
+       "nW3dujUcnz59Ovrn2PNXZippbm4uHJ88eVJSvfrf+ELvJqfYrChZaqeQ/OID64zti5bz6Mq/evXq\n",
+       "oX/GuPCbrdvr4NPx9lwmSRLGbFHCuBXZS0SkAAAAoiVV7L+VJEnaarVKf1wAAIBBtVotpWmaZH2N\n",
+       "iBQAAEAkLqQAAAAiVVZsXmVqzz92t3n4Qro8UqC+WNQKLh966KG+5tIvv8Go70jcD//4X/jCFyR1\n",
+       "Fmhu3LhRkrRz584w9v3vf1+StLS0FMZ+6qd+SpK0b9++MPboo49K6izeXb9+vSTp1ltvDWNW5Pje\n",
+       "9743jH3XPGmmAAAgAElEQVT84x+XJG3bti2MnTp1SlJn4arN9fLly91+zb5t375dkvT+978/jFWd\n",
+       "ku73vVuGOs6l6nn4OQwzl6xi35hu8KP2vOSljnN5+OGHw1gVJTd+LnV6Xuo0l+shIgUAABCpsojU\n",
+       "mjVrOiI+eUURrmUdimMeI++7gjKWLA8ahboeu/udmJgIY/Z8+DtjH4kyjz32mCTpwIEDYSxrKfyW\n",
+       "LVskdd59Z/08uyO3KFTW1649zkPW4w3Kom7SeC4LLtvMzEw4ts+C/xzbZzCvLuX23s37/JX1uVte\n",
+       "Xs71MRDH2jPkvY9cVVEoDI+IFAAAQCQupAAAACJVltrLKwXVy/nz50t5nFFjXZ59t2czNTXV9d9a\n",
+       "yLtXZ+tLly5Jko4dOxbG/PEoIJ1XLr/4wEoHfEfmvNO/RaX2/CKPqjZ4zurOX9UmtJs3b6708b28\n",
+       "U3rjwNKh0mg+f0SkAAAAIrHXHjJ1K6jM667Qitb9z6vq7hujwS9csOiUj1KdPXtWUn4LP2wPxLwX\n",
+       "kiwsLIRj24+ubPY7Zf1u/jnNO8pnbrnllnB85513Suo8P9hr+cILL4SxqvdG9K8V57I2iyhKw+3B\n",
+       "WVdEpAAAACJxIQUAABCJ1F6fLJTt+wJZGsEXtBfVD6tsVmjqw9N2HNNhOYsVm3s+ZQAMyi9isV5R\n",
+       "/j2V9yIXew8XWUBbxxSR7wFoz29RhfySND09LUnaunVrGLOFMCdPngxjVaf26vha1cGo/F28HiJS\n",
+       "AAAAkYhI9cmWI9t+c1J7XztfjJnVTbmJ7M7K791nv2ded572HPk9CP2dLjAo3zYg67OYd9TEIlGj\n",
+       "uKS7m8nJyXBsEeq8O6/7SNPRo0clSRcuXFjxfX4PzjLbp/jzli068KqOjtXJqLeBISIFAAAQiQsp\n",
+       "AACASKT2+mRpAl80Z+H8vIqv68R+N5/as+JP3yslK9Q+KF+gWcZzWVQ3alTPd1C219m/p+zrefV9\n",
+       "slThKBcZ79mzJxxnFZYX9btbnyipvQH67OxsGNuxY4ekzgJ0K73I47zUi/+97X3lU8vWET6vDbJR\n",
+       "X0SkAAAAIhGR6pMVrPqCShtremF5lqxuyhal8i0grDjcPwf2b/u9U/VFm2UUJRbViRlx7D3ko5/2\n",
+       "nhhmr8ysz6ePWOWhqM++//zZcdkF7RZ92rRpUxiziIs/D5YRjbPdD/y+fxMTE5I6X1MbKyMi5V97\n",
+       "ezx/LrOIKBGp0UdECgAAIBIXUgAAAJFI7Q0oK4U1iqkiC5f7vk5WoOs7Dlso23eM7jfUb2mCmZmZ\n",
+       "4SY7oFFcHNBklkLyhbqxxeD+tb148aKkzs9s3qm4Mj77VfWost/Nbypu/aP8nMr4PNl5xheWG1+U\n",
+       "bq95GXwq2hau+DSepRkx+ohIAQAARCIiNSAfobFutn4Zfd57eRXFF4xnzdnuOH10ye7m/d2oRRH8\n",
+       "z+h37y37t34umzdv7u8XwMiw91PerTRsHzxfjOyLlWP5QnBbbp/3575O7RT8nphbtmxZ8fUyIlL2\n",
+       "nGctSPDzK7ODdlZ2wsvaSxSjiYgUAABAJC6kAAAAIpHaG5APufsizKaw1Nn09HQYyyrQ9IW/xlIw\n",
+       "vVJ2ltqz/0rZxcM2duTIkRWPMW6yNmv26YJRXNBQBP+c2fsvq8/QMBvK+nNAnVJwRfG9vKynVNnl\n",
+       "DJY+9I9lr2VVrwH9oWCISAEAAEQiIlWRrIiPjRVZvGlRNN+BNyvaYXd5WRGiXnvU9bt03X72wsJC\n",
+       "X99fJ77bsy06sKXhUrtw+sUXXwxj3Zbe33fffeHYlnj714jC1f749569T4uMnnSLbPmIbFERRd86\n",
+       "xKJx/n2TB78YxN7D/ncrY2cHK3L3LQWs8Nw+f0BViEgBAABE4kIKAAAgUuNSe4NuiFtXWek7C1X7\n",
+       "dFreYXMLjfsUSNZzSSFltltvvVVSZ2rWUkdZr+nU1FQ4th43/rm3vkZ+E1hLv/r0Sd6b7Y6qpaWl\n",
+       "rl+P7Zgew6d67TX1n2d77YfZmNmn3bJ2I8iDX1RT1QIbe978uerUqVOSpKNHj1YyJ8AQkQIAAIiU\n",
+       "lFEouOJBkyRttVqlPy4AAMCgWq2W0jTNDPcSkQIAAIjEhRQAAECkyipYq0zt+ceuOsXY71x8x+Zu\n",
+       "hfa+0HTQtG0Tn5cy9DsXX/hrr1FeRftWSPzhD384jH3qU59a8X3WJ8l6UUntYma/iMHeJ36T6LNn\n",
+       "z/Y1Fyui/sAHPhDG6vIa/emf/mkYs+fA9ymzBR2+/5O9Vr64347952nt2rWSsguufV+x3/u93+uY\n",
+       "U5VsDv3Opd/zzDBz+au/+qsVX/OLMuy1OXToUBizzaHf/va3r/g+36vNekr5XlrWa+uFF14IY/be\n",
+       "beJrVCTmkq3XHHpGpJIk+UKSJAtJkjzpxiaTJPlmkiTPJ0nyjSRJtrivfShJkheSJHk2SZKfHWby\n",
+       "AAAAddZPROovJf2ppP/uxj4o6Ztpmv4fSZI8dPX/fzBJknsk/bqkeyTtkvRPSZLckabpULc2vntv\n",
+       "Vhdsu1sf5X3a+r07rGLxgFTsnWxTWHuDImS9t20pf1YUMiu6ktVWo98olDfMPnVF860E7PmxLvPX\n",
+       "Hl/Ldx/P6kTerSv6MC0M6sR/di0K1O05i+EjSPZ4fr/NLDaHf/3Xf+36fbZvqP897HWr8/sWzdYz\n",
+       "IpWm6bckLV8z/IuSvnj1+IuS/uvV41+S9Ddpmr6WpukRSQclvTOfqQIAANRLbLH5TJqmFhpakGQh\n",
+       "o1lJx933HdebkSkAAICRM3SxeZqmaZIk3fJJQ+eabMNKqV0c6zsUW1id0G11br/99nC8Z88eSZ3p\n",
+       "kaefflpSuxsxhpfV7dlYikPKf6PevFmXeF8gHJsm9d3li9z8exzkndIzeXde7/Wz/WcBxbHzvi8h\n",
+       "sOfe/72u+/koRmxEaiFJkh2SlCTJTkn21/GEpD3u+3ZfHQMAAGicRx55pOvXYyNS/yjptyR9+up/\n",
+       "/8GN/3WSJJ/Vmym92yV9L/IxAn9HYQWQthRZki5dujTsQ2BIc3Nz4diWIPvCdyJR5ar7Xol+Acn0\n",
+       "9LSkdmsCSTp27Jikwd839t6TiER4fm/IqiN19npL7SxCXnsg2qIXHw23LEaRi0HQPt/719Jeh6oW\n",
+       "QQ3D9sKUpAceeECPPvrodb+354VUkiR/I+k9kqaSJDkm6aOSPiXpS0mS/LakI5J+TZLSNH0mSZIv\n",
+       "SXpG0hVJ70+b+AwCAAD0oeeFVJqmv3GdL/3Mdb7/E5I+McykAAAAmqCyzuaD8L2jrO+NTwMUWbx4\n",
+       "Ld8vyTpZ++K5vELUTeP76PzgBz+ocCbjo8nB3sXFxXBsKTgfSl9evrbjyuDsszqufc08X8hfdWrP\n",
+       "d9O318YXKF++fFlSXMmGvea+9MO6ppPqLdb8/Lyk0enn6P/W9/zeAucBAAAw0hoRkZqYmFhx7O+q\n",
+       "sroQF8XfTdleXqdPnw5j4xqRwmiySIaP+ubxHvdRIisCzqMY2M+NSFRbnQqt/fnaok/+/WXvuZiI\n",
+       "lGUH/M+zhRcsSiqWRZTtNZXaf6d9hNB/PQ+2wCTviOMgkTUiUgAAAJG4kAIAAIjUiNRe1gaUPlRd\n",
+       "ZjrNpxTPnTvX8V9cn3Wnj9kkF9UhVY28+cU5dh736b5Binyvxy/EWFpakjSa7+Vt27ZJ6lxM4Hv6\n",
+       "lckWgPnyF0u35VWAbhuw+/dLUYsnBunATkQKAAAgUiMiUv7K0K4+/V2LFbLlXcSWxRe0sZy2O3+X\n",
+       "ZB3py45IZbWoGJXluVhpFAvMrXDan/PKXGCTNx8tyvo98n4NLYoxiqyQ3i94qoq1wMl6vov8W1mH\n",
+       "vfuISAEAAETiQgoAACBSI1J7vpjM+kj5LsgWHi4jtVcV38n9xhvffNnq1Bsmiy/uPH78eGmP60PL\n",
+       "9j7x6bwyUnuWjimj+7i9HyTSlnkUKg/CUsd5FzJbKtwfW9G01OzUnn+/ZvHnOjNo+sZ6/EnS3r17\n",
+       "JeWf/vK9qqraZaBOvbHsM1BkKrWuuzkQkQIAAIjUiIiUZ1fgq1evDmN1vUrNk7/Ttjs2fzdS9+eg\n",
+       "zPn5u3W7ky27ILGM39eWPvvf14r5/R253SH6iMag/Puvzu+1HTt2hGOL2PpIhC2A2LRpUxizz1Gv\n",
+       "u3tr4eG/zx4j70iYX0Jui2nq/LwPwne5zmLF5sM8p75g3Yqg847i+R037Pwy7hFhqXORUd7n3bou\n",
+       "JiEiBQAAEIkLKQAAgEiNS+0Z303cF/2NKusXcu0xuhvlBQiWsssqol1eXg7HPo0Vq64h9WstLi6G\n",
+       "46xUjhXE+jRnr+JnY+ecrBRb3s/PmTNnwrGlworq4Fy2Xr3k8vg9/d+Honae8Cks+xvk075N+czk\n",
+       "7ejRo+F4VNLRvRCRAgAAiNTYiJRnxddVRR82bNgQjm0OFB3G8YsI8jAqd/FZ+r3TtmLbcRATre33\n",
+       "s1rm3bX/PZq8R1zW53l+fr6CmeTPvx+sMN4XyI9rRKrpUSiLUPsFH70QkQIAAIjEhRQAAECkylJ7\n",
+       "a9as6QipW2FoVpdm3+nWCmx9CNUK/XyKzfq7+IJT//U8WK8eH8och5Sefz3y6BPi+8pMTk4O/fPG\n",
+       "RR026+yHf03ts+rTHvZZzUph+c/5zp07JXWG3PNOBdfFqJxT8kqtW88mX+Bt6c+qPgdlb8BeJt9Z\n",
+       "3/6G+kJ6+zvt/16XUVqzZ8+eFWO2e4VfoNFvzzz7W3b77beHMTvPDPK5IyIFAAAQKamiMCxJkrTV\n",
+       "apX+uAAAAINqtVpK0zSz1xIRKQAAgEhcSAEAAESqrNi8ytSef+yqU4xNn4sV++bdr2mY5+Wuu+4K\n",
+       "x4cPH5aUXZBqRYWSNDc3V8hc8sZcstnjVz0PP4dBP0NScZ+jmOfFConzKnZv8mtUZH+oOj4vDz/8\n",
+       "cBirqi9UHZ+X6yEiBQAAEGkkOpuPq127doXjEydOrPi63ekWuZQ6qwWE3+/MzM7OSpJOnjyZ6+Nn\n",
+       "efHFF8Nx1t2ULcFv8rJyjA7f/qPqTvy+vUC3yIvf39SiNVl7G+bFHq+q6Ig/1168eFFS536WZmpq\n",
+       "Khz3uwS/jprYndz+xkjtlipZuz/4925evycRKQAAgEhEpBosKwrl2d2tz+/nzfZx849hzfMsWuWP\n",
+       "y4hI+Tt8a/Lomz3a/OyuBdXavXu3pM4Gh3bXPw4uXLhQ9RSCfvf183fyZUQvqo6QHDt2rOvX169f\n",
+       "L6l9bpHa0b0m75XYJH5/yv3790vqPI9YhsZ/3nr9De0XESkAAIBIXEgBAABEIrXXJ9uT51d+5VfC\n",
+       "2PT0tKTO0O1XvvIVSdLCwkKJs+su7+W6Xrf9lXwxd14h1H74FFHW725pgiJTnujO7+V1yy23SOrc\n",
+       "y+vxxx8vfU6IU+T5pSmsTMCXC9i+sCiHT6vawgfbh09q/13IWgw1LP6SAAAARGpERMrv9m5X+Vbk\n",
+       "LJWzjN2Wv959991h7Jd/+ZcldV7hHj16VJL0ta99rfA5ZfF3QUUuR+5HVUWWve6QfeQD5bKFAHv3\n",
+       "7g1j9tk6dOhQJXOywtTJyckwZkvXfSuNMtjn1y+jtyjHOBXgj4K8z79Vt4CoO2sgK7XP8T4iZREr\n",
+       "yy5J+f2NIiIFAAAQiQspAACASI1I7flQpvXr8GE8C3kXmbKxlN3Xv/71FWO+L0W3PduKZGkJ30Op\n",
+       "jJ5NwCBscYJPh8/Pz0vK7kJcBjuXWD8rqd1zpuzUnvFFy1V3O68DW5zg02XjlqK3vlRZ+4ai8zNj\n",
+       "7w2furMSIZ/Cz6uHGxEpAACASI2ISPmrRiss90XV3Zbg58XuhL7zne+EMX9chZmZmXBsd9VZ+z8B\n",
+       "dXPq1KnM4yocPHhQknT69OkwVlXXezvPUFjeyaJyZZzr64oO6d35iHZWFNc+30V8tohIAQAAROJC\n",
+       "CgAAIFIjUnu+2HzcCgy78eFLC2uOc+gbGAZp8frivEb/qF76XZRRRLE+ESkAAIBIjYhIIdvLL79c\n",
+       "9RQAAA3h2waVsSPIuCAiBQAAEIkLKQAAgEik9hrMp/aso7l1ZJbaYVxSgGgqv8EoHZ1RB3aO9akx\n",
+       "2xC3qv5jvdjnyHYGkaQzZ85UNZ2RQ0QKAAAgEhGpEWGd3qempsLYDTe8eZ3s99yjwBB1sG/fvnBs\n",
+       "3bxtLyyp/T4+fvx4GPvxj39c0uxQN/be8DtaWPTHL3t/4403Cp+LRfq3bdsWxizSY/uvSsWda+28\n",
+       "LvX/+1rGIqurt89ijDL7PYvYu5KIFAAAQCQupAAAACKR2hsRFm62EK4fA+rmlltuCcfT09OSpNnZ\n",
+       "2TBmheULCwvlTgy1ZKm9ycnJMHb+/HlJ0vz8fBgrY+eLjRs3SuosozBFlk5YWtM+L1I7VZeVskuS\n",
+       "ZMW/zdr42BegN5kV/Evt94lPBVtn+CJKXYhIAQAARBqJkIVdfY7zck5/5W3s7mOcC8ytMNMXZVrU\n",
+       "bvv27WHsxRdfLHdiY+6FF14Ix7ZPpC8st8/ys88+W+7EUEtWWO4jTtbWpez9VxcXFyV1RvyzIj2D\n",
+       "8udwW4Dh2ZiPwNk5zBeg21x8dqJbW4azZ89GzrhefBF51nNg75Mi/h4SkQIAAIjEhRQAAECkxAqw\n",
+       "Sn3QJElbrVbpjwsAADCoVqulNE2TrK8RkQIAAIhUWbF5PxGpn//5n5cknTp1Kox973vfW/F9+/fv\n",
+       "l9RZhHf58uW+HrvqyFhec7GlrsNEGEfxeckDc8kWMxcr0M274NMev+rnxM+BuXQa5bncdNNNkvov\n",
+       "OrcWCpL0+7//+7nOZRhNeY3WrVsXjovaS9a3hXjwwQe7fi8RKQAAgEhcSAEAAESqdR+p3bt3S5Le\n",
+       "9773hbEdO3ZIkp5//vkw9u53v1uSdOLEiTD2jW98I9e5vOtd75LU2XOjTj1uLNTpUyZ59DYB8jTO\n",
+       "Pc0wugY91164cKGgmYyHrHTenXfeGY7f/va3S2r3qJPa/b983zHbCN12UvC69d66FhEpAACASLWO\n",
+       "SD3yyCOSpOeeey6MHT16VFJnsdmWLVskSU8//XRhc/mJn/gJSdL999+/Yi6PPvpoGDty5Iikzj3C\n",
+       "iiqG88ru7gvEWLNmjaTsO0DPinf9fmZ+jywUzxbx+O7QdYrCV2316tXh2HfVRrk2bdokSfqFX/iF\n",
+       "MPae97xHUueOFrYQ67HHHgtjX/ziFyVJc3NzQ82BiBQAAEAkLqQAAAAi1Tq1Zyk9n9rLcuDAAUnt\n",
+       "XkpFsEI1XyRoaQeffrCNV30PijJSe2WytIvU/t2XlpbCGEXu9WKhb0vVSO3PihVbStLFixcLn4vf\n",
+       "XLUbNtyuxvT0dDi23jk//dM/HcasfOJzn/tcGPvnf/7nQubi3yt2XKf3w8TERDj2Rc0ol523Tp8+\n",
+       "HcbsfepTrva3ypfB5LVhMxEpAACASLWOSPXLF5QV5Stf+Yok6e/+7u/6+n5fiDhqXn/99XB8/vx5\n",
+       "SUSh6szunO+6664wZgXEPgpVRkRq0IjCqEVz684Xlu/Zs0dS57LyzZs3S5K++tWvFj4Xf14v4xw/\n",
+       "KKJQ9WDvjb/8y78MY7aoxXZSkKRVq1ZJ6swq5bXXMBEpAACASFxIAQAARBqJ1F4ZBk1djXJfER8O\n",
+       "7dUPCNWzzbwff/zxMGYF6GWnoAf9XAzSXXhUWbqt20bsebHFMpL05S9/WZL0L//yL2HM5vDNb36z\n",
+       "8LkAsezvUll/n4hIAQAARCIi1Se7g7fi6nFW9yJQZDt06FDVUxgY769y+WjzX//1X0sa7eg6kAci\n",
+       "UgAAAJG4kAIAAIhEaq9P/XZkHjdWrEz4H0WwDcmlduHouPWW6rfI3LrV59Ubh8800B+uDgAAACIR\n",
+       "keqT7cnjI1MUwra7xXL3iiJMTk6GY2uFMG4RqX7lFYkCMBgiUgAAAJG4kAIAAIhEam9ApPM68Xyg\n",
+       "SNu3bw/Hc3NzFc4E17Nu3TpJpFwxvohIAQAARCIihaG8/vrrVU8BI2jbtm2SOguoFxcXq5oOrjE7\n",
+       "OxuObaFJGRGpG29s/8m6cuWKJGlmZiaMLS0tSeK8hHIRkQIAAIjEhRQAAECkRqT2rFeR1C5stJ4y\n",
+       "knTTTTdJanc+Rnk2bNggqTMFw8bOg7NNsaX28zfKPcuyUjSedek+d+5cGLt06VLxE0Owdu1aSdK+\n",
+       "ffvC2NTUlKT2eViSvv3tbxc+l40bN0qS7rzzzhXz85+ThYWF6MdYs2aNpN5/R+zx/HNg579+36N2\n",
+       "3sRoICIFAAAQqRERKV84uHXrVkmdV/R2RxsTkbK7mn73s+rXxMREOB7lO+lXX31VUv7PX11ZdLRX\n",
+       "Mavd3U5PT4cxW8rvo0t2d3v69OkwZhGpUYtCeVlRKM+Khi9evFjGdEaCvZfyet/YZ/q5554LY0eO\n",
+       "HJHUjhj67yvShQsXJEnHjx8PY7YPo+06MSzbN9T/HbG/M37nBhvz53iLSNnP8HwRvp0v7e9OXfnf\n",
+       "Y8eOHZI6s0BnzpwZ6Of5v9e7d++WJD3//PNhzN5PTV0kQEQKAAAgEhdSAAAAkZIqNrpMkiRttVql\n",
+       "Py4AAMCgWq2W0jRNsr5GRAoAACBSZcXmVUak/GP3Ow9rsWDFglXOpSjMJVvT5zJO792q5+Hn0O9c\n",
+       "9uzZE46t0PnUqVMrvi+mHUaTn5csk5OT4diKn2MWGfU7F3tt3vve94axF198UZL06KOPhjFrz7B3\n",
+       "794w9vTTT+c6lzI0eS6++H/nzp2SpB//+Me5zuV6iEgBAABE4kIKAAAgUiP6SPXL+opI+fUWMdYX\n",
+       "KO/0SAzr8eG7Q3fbMLTI5wX153vgoD6sS/gtt9wSxqxfku/NZCkL39vn2LFjZUyxdgbtXzQs66zv\n",
+       "03QnT55c8X2WZrT+Zyif7zRvqdjNmzeHsR/+8IeFPTYRKQAAgEiNi0hZh1TfAdW6JBcZbbE7RS/v\n",
+       "TsL9sghDv5EGolDjrYoWJ+jNohc+2pEV0bBO93Xvhj2K7Ln30Qzf1d3Y34Bh9vqLYX8PfTRmcXGx\n",
+       "1DnUhd+X89ChQ5LK29OQiBQAAEAkLqQAAAAi1Tq1Zxu+zszMhDEr9vb9RF544QVJnRu/5s0KPv0G\n",
+       "xKO8qWwTWVH9z/3cz4Ux2yj43/7t38KYHZPyQpWseLzfjZlHZWNw2/hbauYmtVWfN2677bZwfP/9\n",
+       "90vqXGxkm0z7TYHHgd8I/bvf/a6kuB5jMYhIAQAARKp1RMqiCXfffXcYyyosLzISZXwkCvVky8l3\n",
+       "7NgRxu69996Or0nlRDCNRVWldgTClkpLnQWSRdu/f384tufj+PHjYWx+fr60uaAdjfEdy7vxnZub\n",
+       "fD7yBcC2iIfofv98ZNKeP78YynYyGGdlRaIMESkAAIBIXEgBAABEqnVqz1IwPkxn4e28NiNsItsg\n",
+       "07qtS+0UUb/dzvO2e/fucGz9bnzaqIxC2YMHD0pqFxpK7XSVL7wsI6VnfH8Xe+9ab5qy+Q73liJf\n",
+       "v359GLPXbVSKmuvO0ln9piGanM7z/O4QpPQG58+rVuLiz/U+BYxyEJECAACIVOuIlN25WKQBb7Ln\n",
+       "xUek7M6uzCiU5++C7rjjDkmde4h961vfklTO/LJaHVTFR79sn7B+l7vnzUfl7D3klwwDZajqHFWk\n",
+       "bdu2SepcXLK8vCyp2G7ntmDB78NIRLl8RKQAAAAicSEFAAAQqdapPWSz4tSye2V0c/To0XBsaT5f\n",
+       "SFqnuZbJ94yq2qgUKwN14DeR3rVrV8d/pfbCnyJTe3v27JHUuZDEPudzc3NhrMwFNuOIiBQAAEAk\n",
+       "IlLIhY84HT58WFLnnlq2P5UvkB/XKBWA5vN77ln03S/eKKq1Q5Ik4XjTpk2SpFtvvXXFvHzEjIhU\n",
+       "sYhIAQAAROp5IZUkyReSJFlIkuRJN9ZKkuR4kiT/fvV/P+e+9qEkSV5IkuTZJEl+tqiJAwAAVK2f\n",
+       "1N5fSvpTSf/djaWSPpum6Wf9NyZJco+kX5d0j6Rdkv4pSZI70jSlfe0YsZSeDy1v3bpVUucGraT2\n",
+       "gPHjU1M+PdY0/vy1tLQkqTO1V9TiDv+cWc+o1157LYzZzhd+DMXqGZFK0/RbkpYzvpRkjP2SpL9J\n",
+       "0/S1NE2PSDoo6Z1DzRAAAKCmhik2/9+SJPmfJD0u6ffTND0raVbSY+57juvNyFRu7G7GRzusu6vf\n",
+       "wwnVsc7Fk5OTYcwiUVXtMwc0gZ3XfNTBltH7sSa3sti8eXM4tr3ims7Oa/7vUhnRNtslYXFxMYxZ\n",
+       "YfmBAwcKf3y8KbbY/POSbpb0Nklzkv64y/c2N3YLAADQRdSFVJqmp9KrJP3faqfvTkja475199Ux\n",
+       "AACAxnnkkUe6fj0qtZckyc40Ta1t6i9LshV9/yjpr5Mk+azeTOndLul7MY/h+d5D1i/Db9JoqSS/\n",
+       "KSvK5cP1loqwjTwlaffu3ZKkEyfa19WPPeazwBgn/jNt6Xr/mbaURZOLkftlCzEk6a1vfaukzoJs\n",
+       "c/LkyXD83HPPFT8x9M06i/veUbZJeZGOHDnS8d+y2Dl+XDY9f+CBB/Too49e9+s9L6SSJPkbSe+R\n",
+       "NJUkyTFJH5P03iRJ3qY303aHJf3PkpSm6TNJknxJ0jOSrkh6fzoOZ0IAADCWel5IpWn6GxnDX+jy\n",
+       "/Z+Q9IlhJnWtDRs2hGPbt8gXLQ+zzNOiJv5OYnl55SJFW9Jvhe3oNDMzE46tyPzuu+8OY7fccouk\n",
+       "8Wt54KMNWe+rceU/RxbNvHDhQhgbp/sv/76wSJPfFcDOb6OyUKPfRUG+VYqdn30xt0VD+o2K+H+b\n",
+       "N2tD4KOqddpnM29ZEdNxRmdzAACASFxIAQAAREqqCKEnSZK2Wq3SHxcAAGBQrVZLaZpm5jSJSAEA\n",
+       "AEQaprP5UKqMSPnHrjoyVtVc/PJzK2b96Ec/WslcsvAaZWMu2ezxq56Hn0MT5+ILsq2AOos/f/S7\n",
+       "gKTJz0svtmjp3LlzYazfIvgmPi9WbL5v374w1q0Fw/79+8Pxiy++KKlzgdcwc7EC/yL3Fuw1ByJS\n",
+       "AAAAkbiQAgAAiFRZag/10SvEitEzMTEhqTMcXmRoHM3QLZ0ntTta23+lcnrDWcoxa372Xpba/cny\n",
+       "nvILYpUAAB2fSURBVJPvabVx40ZJnWk823x5XPoMWmrX+jpez/bt2yVJe/fuDWOnTp2S1P/G275n\n",
+       "VdbiuDqct4hIAQAARGpcRMo6/t50001hzPbaK9ttt90mqb3PkiQdOnRIUjn7LA1j3DqMo7OQ+Pbb\n",
+       "b5fUeYdvRaBVfZ4grVu3TlLnXXad9jMbtJt4Xu644w5J7WiQd/r06XBc1J5zPiqS1WF+XCJRplfk\n",
+       "0likzu8TOWhX9LxaNNnfaZtTnohIAQAAROJCCgAAIFLjUnsWQvXphzL6SBhfZHnrrbdKaqf4pHbI\n",
+       "s+6pPYwfH47/0Y9+VOFM4PkyBSvO9Ru1WyrixIkT5U6sT5aqyXuXDHsuJGnXrl2SpN27d4cxe7xv\n",
+       "fetbYazflNOgfN+sfouk0d6g2grMpeJeo15mZmYkSZs2bQpjVs4wLCJSAAAAkRoXkcpS1fJHX7xr\n",
+       "6lQYCsSwCK9Uj6XFo87u2qV29MnftS8uLhY+B4u0+6hSvwXURe3XaguLpPb78PDhw2HMonZWoF8k\n",
+       "P5c6sWigj5hVtSCgm6wC/bxYV3n/9zgrevvcc89J6owA54WIFAAAQCQupAAAACKNRGqvTD5c+u1v\n",
+       "f1tSu4hNkp599tnS54Q4vifNhQsXKpxJvfjnhUUT5bJu2b5rdhmsc3eddjmYm5sLx5b+9D2jpqam\n",
+       "JHUW5luX87wLwut6frC0alUF3LOzs+HYirj938hjx45JKrZvob02/rx1yy23SGr3dfR8Kr0bv7Cs\n",
+       "FyJSAAAAkYhIDcHujvxdUlP47rK+uHic1PUusyrWzsMXGRORGg/93qVXJescu7S0JKmz2NyiIrQo\n",
+       "KIfvWL68vCyp/J0R7L2bd1f7QYr1iUgBAABE4kIKAAAgEqm9MRXTLwajx4pzJWn9+vWSpIWFhaqm\n",
+       "A/TNyhM2b94cxiwdMz8/X8mcxtk4b3ZORAoAACASESkQkRpjvijXCketaBSoM79nGsaXtb/wC6jK\n",
+       "XkhERAoAACASF1IAAACRSO2hFBZ2nZ6eDmOnTp2qajrIYH15gLqxLtN+c17rZO07m5OWHj8XL16U\n",
+       "VG2ql4gUAABAJCJSOVi1alU4vummmySN91LQLPv27ZNUThHgli1bwvHZs2cLfzwAxbLok9/X1Pba\n",
+       "886fP1/anFAvVb72RKQAAAAicSEFAAAQidReDnwfJlJ62S5fviyp2A2ed+/eLamz0zGpvXzccEP7\n",
+       "nmvt2rW5/mwrJB5kk9Drse7skvTSSy8N/fPyYhuDv/HGG2HM5upLA2wDXr9ZtH12PHs9/M+zBR1+\n",
+       "14JB1bVMwQqJb7755jBmz8FTTz0Vxubm5sqdGGrD95Ea5jMQg4gUAABAJCJSKEUZe18dP35cUufd\n",
+       "SNad+6iwonofLbLl4Vu3bg1j1r38yJEjff3c7du3h2Pbi88vMc/7bi+PSJSpUxTKs0iTL4h99dVX\n",
+       "JXVGUC3yYsXVUjuK6yPfFsVbXFwMY3m8Lv4xXnvttaF/Xl6OHj3a8V+pHeWr0zxRnbxb6/gIVy9E\n",
+       "pAAAACJxIQUAABApKbsoS5KSJElbrVbpjwsAADCoVqulNE0z831EpAAAACJVVmw+bETKd6+2AlO/\n",
+       "dNcKObOKjP1jVx0ZYy7Z6jqXhx9+WFL+Bde21Fxqv3d7zaXf58UKxW1PqmFZEebHPvaxgedSFHv8\n",
+       "qufh51CnuXz2s58NY1kdoIcp3Lb3l/9M2KIHv/edvV/q9Lwwl05NnIu912xBh9R+j2f9/e/3XJs1\n",
+       "l+shIgUAABCJCykAAIBIje0jldWxmn4iKFpRizP6DTHH6PdzYT2Mem3+mcdz4PtSWc+kpaWlMDZO\n",
+       "n2VLq0nF/d69XtNhHjcrZVzXfl4YPfbe9X2k9u/fL6mz0/3CwoKkznNtXn0GiUgBAABEamxECriW\n",
+       "LTZ429veFsYuXLggSXr++ecrmVMdvPLKK319X6+oRZ7uvPPOcGx7JPp9GA8cOLBibFSNU/QN+bJo\n",
+       "5q5du8KYFfifO3eukjmVzaJJPvpk541t27aFsR07dkjK3l2AiBQAAEBFuJACAACIVOvU3ihvODsq\n",
+       "rKeQ9fLwx2WHlq0/iN909+abb5bUTvFJnSHgceI3N7Zi7zLTeV5WOsu/h9auXVvmdPpSxvnIeuHY\n",
+       "psRS53sX1bNz3q233hrG7FznN5Eug70XZ2dnw5ilsB577LFS51I1v+H2yy+/LKm9kf31+IUewyAi\n",
+       "BQAAEKnWEak6RaLsDvnKlSthzI793aP/+jiwpfCXL18OY/64THYXYoXKknTvvfdKkvbt2xfGxi0i\n",
+       "tXfvXknS+973vhVjn/nMZ8LYiRMnSpvTE088seLYR6Gqeg91U8a+pPY+9eeUp556qvDHRf/sfXDw\n",
+       "4MGKZ9KOwnz3u9+teCbNlNdCDyJSAAAAkbiQAgAAiFTr1F6dWKGp3xjRikDHLZ1Xd0eOHAnHVky9\n",
+       "c+fOimZTvcnJSUnS/fffH8as8+9dd90VxspM7WWpYzrPKyO19+yzzxb+GADyRUQKAAAgEhGpPl26\n",
+       "dKnqKSDCmTNnOv47jqyY+/Of/3wYsyjq9773vUrmBABFs1YVUrERZSJSAAAAkbiQAgAAiERqDxhx\n",
+       "1o+NXjMYhC2ssf5sQNOUsUBEIiIFAAAQjYgUAGCFvLo+A2Xw++aV/d4lIgUAABCJCykAAIBIpPYA\n",
+       "ACusWbNGUuduDpYy8akT+/rFixcLn8srr7xS2GOg2TZu3BiOy+4bSEQKAAAg0khFpNauXRuO675v\n",
+       "F4ozMTERjl9//XVJ3MkCg7I7/FtuuWXF2PHjx8PYwYMHC5+LfY6B66ly9woiUgAAAJG4kAIAAIjU\n",
+       "iNTeli1bwvG2bdskdabxzOLiYjgmtTe+Xn311XBsqYgbb2y/1W0D6rK63qIa1lcmr54yds7xxdeW\n",
+       "Mq77puYxPXY2bdokSZqZmQlju3btktTe9FqSnn766Tym2JV/PKBuiEgBAABEqiwidcMNN4Q9wHrZ\n",
+       "vHlzOJ6ampLUWXy4vLwsSTp16lSOM0RT+bvXrCLzcY1EVdn5tyw33XRTYT87SRJJ0oULF8JYUyIl\n",
+       "Ma/3wsLCirGTJ09e92solkUIvfPnz/f1by2T4//mWuTe3teIR0QKAAAgEhdSAAAAkZIq0hxJkqSt\n",
+       "Vqv0xwUAABhUq9VSmqaZeVAiUgAAAJEqKzbPMyJlxXK9omuTk5OSpA984AOFzCOGf/xh5jI9PS3p\n",
+       "zSJ+M2hBaF5zyUPec/EFlYNGYUf5eRlGHefS7zxWrVq1Yiyv7tmDzqVIzCXbqM3Ft6iw81vM4qtR\n",
+       "e156sVYmL7/8cl9zuR4iUgAAAJG4kAIAAIhU687m99xzj6TOXkBLS0uSOsPw69evl9Q7leX7v4yC\n",
+       "DRs2hOP7779fUucGovR6aRvX3lFNZR3p/Xt8bm4ut5/vO933u6G1pc377X+XlzIft669xnbv3i2p\n",
+       "c8cK+1vge4f5XQ3GiU9V23t7mHKGGPY61P018OcUO8/0Su31QkQKAAAgUq0jUqdPn5bUeZdkV9b+\n",
+       "arvfffXqdIeVB38n/f3vf1+SdObMmaqmU0tVRREwuF/91V8Nx3v27JEkffWrXy3ksfqNQvkiXjsP\n",
+       "HT9+vJA5Se336zvf+c4wZvv4PfHEE4U9rvH7CNbpfGnRjv3794cx6+rtI+/2N6MpHefzYh3npXaU\n",
+       "xe8Icvbs2UIe10cDLSuSFTmen58Px/1+9opy8eLFcJzX3wUiUgAAAJG4kAIAAIhU69Te4uKiJOm2\n",
+       "224LY3v37pUkHTp0KIz5sOE48aF3UnrZSOkNzqd3hi3CHIT1QpPa4Xf/OS+TpZDe8Y53hLETJ05I\n",
+       "Kja1Z+/Xxx57rLDH6MbSiHWT9T6w9wib1Xcqc1HVfffdF45/8id/UpI0Ozsbxo4cOSJJ+vKXvxzG\n",
+       "inq9JiYmwnG39/HatWvDcV5pRiJSAAAAkWodkbK7M3+leeedd0riLqRurNBQkvbt2yepXQAvFXsX\n",
+       "P67yLqTftWuXpOIKU3v5/Oc/X8njZrFInL+zteLmUZZXd/du1qxZs+K43+e2qgglsvno03vf+15J\n",
+       "nRmkJ598UpL04x//OIx94xvfKGQut99+ezi2RSK+7YNF6p555pkw1u9CtV6ISAEAAETiQgoAACBS\n",
+       "rVN7xhfPWWjXhwpRPd+bw/rtbNmyJYyR2stfHik9nza3nzduPXiyWG+ir33ta2HMd0MfVdaDSGp3\n",
+       "qM4r/WH8+7buXbCzWLFy03+PPPgSGysJ8M/F1NSUpM4C76L4RTJ33323pM7P7FNPPSWpXRKRJyJS\n",
+       "AAAAkRpxi+WjT0Si6un555/PPEa9+WLqui59L4K/U+03AldmpK7sfdJMGcXmvm1Lnbqn98uiLD7i\n",
+       "btGOcWPF5JL093//95KkAwcOhDF7H/vO60XtyXf48OFwbJ8ZH021v0tFtHQhIgUAABCJCykAAIBI\n",
+       "jUjtAUCeKKjP9tJLL1U9hUxWBO87UVdV4L20tCSpc4HNuPLvl69//euSpB/+8IdhbOfOnZI6Xyvr\n",
+       "8XTs2LFc5+I3r/bHZSAiBQAAEImIFADUTJkF5k1giwPqUJxuBcx5t4VoOovyzs3NhTGL2llkSurs\n",
+       "bD8qiEgBAABE4kIKAAAgEqk9AEA0KwT3O1DkzbpWb9q0KYwtLy9LGo/NpJvKUrF+cYe9bqOEiBQA\n",
+       "AEAkIlIA0IetW7dKGs076mFs2LBBUrERKeuGvW3btjBmkTC/20URXasxGHutpHYXeGsZIY1mBJGI\n",
+       "FAAAQCQupAAAACKR2quRVatWSSpn41DgeiYmJsLxOGxk3O8mquvXr5dEak9qp9Wkzm7jRTly5Iik\n",
+       "zhSR9SNq4vnSzvWjyPf6svPHKKbzPCJSAAAAkSqLSK1evbpnl1q7A3zjjTfC2CgXEzbxzmrc9Bu9\n",
+       "KMMNN7x5H+Q/H8OwO/zVq1cP/G+TJJGU3ZHb333b85fX59gedxj2PPZy/PjxoR9rVFiBudTZybpo\n",
+       "fn+7Ju51Z3/TRrG7t/HngDNnzgz98/xnfJiO/3mfLzt+du4/EQAAYExwIQUAABApqWJzzCRJ0lar\n",
+       "VfrjAgAADKrVailN08xaAiJSAAAAkSorNn/44YeHKhwbho+GVR0ZYy7ZYuZihZwvvfRS5XPply0j\n",
+       "94Wz3T4XTX+NimKPX/U8/ByYSyfmkm3QufjFEdY53C/eGKbDfJOflyL1mgMRKQAAgEhcSAEAAESq\n",
+       "LLW3Zs0aXb58uaqHxwi6++67JUnz8/NhLKv3z+bNmyVJ586dK2diXRS50Ws/bryxfQq4cuVKaY/n\n",
+       "u6fXpeuxpYal/NPDTeZTSUX04MFg/GtgvQftnCaVe07x7w3rETeOf9eJSAEAAESqLCKVR0diwDt4\n",
+       "8GBf31eHSFRdlBGFynq8Ue7sPIx7771XkvTud787jD355JOSpMcee6ySOfmoZR06+lfNFoj4RSFV\n",
+       "dVk/ffp0JY9r/HtjdnZWUmen+1HeicTrGpFKkmRPkiSPJEnydJIkTyVJ8oGr45NJknwzSZLnkyT5\n",
+       "RpIkW9y/+VCSJC8kSfJskiQ/W/QvAAAAUJVeqb3XJP3vaZr+D5LeJel/SZLkbkkflPTNNE3vkPTP\n",
+       "V/+/kiS5R9KvS7pH0n+R9LkkSUgfAgCAkdQ1tZem6byk+avHF5MkOSBpl6RflPSeq9/2RUn/n968\n",
+       "mPolSX+Tpulrko4kSXJQ0jslrYhJj0vID+U5e/Zs1VNAnxYXF3P5OT61MKw6FJhfunRJkrR27dow\n",
+       "5o+rQDqvU9ULROrE+lhJ0smTJyVRbN5VkiT7Jb1d0r9JmknTdOHqlxYkzVw9npXkl0kd15sXXgAA\n",
+       "ACOnr9u5JEk2SPqypN9J0/SCLxRP0zRNkqRbi/Lo9uV2t1l2QSyAZqhqd4SiWLTDL5wgAoK6OnPm\n",
+       "TDgex0iU6RmRSpJktd68iPp/0jT9h6vDC0mS7Lj69Z2STl0dPyFpj/vnu6+OAQAANM4jjzzS9eu9\n",
+       "Vu0lkv6bpGfSNP0/3Zf+UdJvXT3+LUn/4Mb/xyRJbkqS5GZJt0v6XsS8AQAAKvfAAw90/Xqv1N5/\n",
+       "kvSbkp5IkuTfr459SNKnJH0pSZLflnRE0q9JUpqmzyRJ8iVJz0i6Iun96RCxd1J69Tcz82Z5nO+j\n",
+       "Uoei3W6sG28ZXZq3b98ejnfv3i2ps9v6qVOnVvwb9M86O48Ke2+eONEO5C8vL0uSVq1aFcZG7fdG\n",
+       "sVavXi1Jeu2113L9ueOczvN6rdr7tq4ftfqZ6/ybT0j6xJDzAgAAqL3KOptjNNjdcpOWSJcRibJ9\n",
+       "p+64444wdvPNN0vq3BfLijWJvkKSFhYWOv4L5CHvSBQ60SwTAAAgEhdSAAAAkUjtYShNSumVKavj\n",
+       "thVm+q9NT09L6kzllJF6BIBhrV+/Phxv3bpVUnvBhNQ+l83Pz4exUSxjICIFAAAQaWQjUkUt98yy\n",
+       "bt26cGxX236pclFLRK/pMF/IY2A4/nWx94Ef27hxo6T2+1WSjh07tuL7AKBu/N+gXbve3A3OR5zO\n",
+       "nTsnqTNyZWOjhIgUAABAJC6kAAAAIo1saq+olN6aNWvCsRXXWUhTaoctX3nllTBmqZq8bdu2LRzb\n",
+       "xqa+0M/md/LkyUIeH9d36dIlSZ0dqO29s2XLljA2MTEhqXPzzxdffLGMKQLAUPyOFnau86UsVuLi\n",
+       "z3mk9gAAABCMbESqKL5obtOmTZI6C4XtaryMq24fCbMrfx+lstYERKSq41sZWKuDffv2hTHbd+/l\n",
+       "l18ud2IAkCPbzcHv3GB/G/3+oqOIiBQAAEAkLqQAAAAiVZbaW79+vV566aUV477/ki/UrQs/PysU\n",
+       "9t29T58+LamcVI31IJLavax8Ud+PfvSjwueA7nzHcuvuu2HDhjC2uLgoSTpx4kS5ExsDlob3n0Xr\n",
+       "Kk9H/vqy3kR59VGzRR5+ARDyMTMzE453794tSdq+fXsYm5qakiQ9/fTTYcyOR6lPHhEpAACASJVF\n",
+       "pK53d1DHKJRnEQR/XFWH8e985zvh2KJjPmJ2/vz5oR+D7unDOXr0aDi2pcLf//73w5i9Rnm8Vuhc\n",
+       "+GEtKPz7lkjU4PxzagXFfol73udsiyT6n2vtbGL2abP5E5HKn19U9fjjj0tqL6qR2lEq/3dzFP+O\n",
+       "EJECAACIxIUUAABApKSKMFuSJGmr1Sr9cQEAAAbVarWUpmmS9TUiUgAAAJEqKzavMiLlH7vqyFiR\n",
+       "c7Eiy373HRyV5yXvFhqj8rzkrY5z6Xce9913Xzi2AuZnnnlmxff5vTWtS32vz9OgcylS1lxsP07f\n",
+       "dT9vtkjFP38f/OAHV8ylGytsl7ovEvC7TWTt95al7q9RVWwOn/rUp8JYr+ey6LnU6Xm5HiJSAAAA\n",
+       "kbiQAgAAiMSmxX2yjRjL2Ix4GH7DSFtI0G9qr6767XRsKT3f+6qbflMH6K2MdFGe/GtvG337z/by\n",
+       "8rIk6Y477ghj9h7JSgE2SRmvkX1Wh0kL+X5E3Tr/33777eHY+rEdOnQo+nHrznoGSu1eaXnr93Xz\n",
+       "51r7TI1jvy4iUgAAAJGISPWpqoK7Qfm76l27dkmSLly4EMaa2FW23zn3W1hu+60hP/1GOSz6Y3tS\n",
+       "VsW6MF9PVgfvY8eOFTonSVq7dq2kzgLqs2fPSqr/rg95s6jg9dhz5SM09lyNsqKiUDEsEi1JW7du\n",
+       "ldQ5v3HZsYGIFAAAQCQupAAAACKR4+hTEwvoFhYWJBWbzrNiQ1/waeH1U6dOFfa4w4jZ+DSWhbul\n",
+       "3qmKPPki0KmpKUmdaaqXX35ZUrnPhST95//8nyV1FgP/4Ac/kNSZznrLW94iSfr3f//3MFbmZ9AK\n",
+       "y5999tnSHlNqv0bDlBL4TYazWCqsTikiz967vd6b9hwtLS2Fsaak9nyvuybz6eb5+XlJ7fS91LxF\n",
+       "KLGISAEAAEQiIjXCyog2WOTgd3/3d8OY3SE+9NBDYayJRe55KDMK5fmC+k2bNknqbPFgr4dvl2F3\n",
+       "j0XO2d6Ts7OzYcwiUn5+o7YgwH6fMj6TvT5rFqF88cUXC5vDhg0bJEkXL14c+N8Oeq54/vnnB36M\n",
+       "qo3ywoGqF5JI0rve9S5J0rp168LY3NycpGKizESkAAAAInEhBQAAEGm04ucF6re79rg5c+aMJGlx\n",
+       "cTGMWQ+ePXv2hLEi0wjX8oWcN998c8ecJOnAgQOSRvu19MXm9tpk9XTxfcfKKIC1XkxZKS7fgf+H\n",
+       "P/yhpNFJgZRZ1N/rscp438ek9K61cePGcGyLNmyRhNS92znGmy2k8Oe0Ihc/EZECAACIRESqT6Mc\n",
+       "vRjG8ePHJUkf+9jHwphFEara489HMayru49IjcNr6SNS/XYXLiP6YwsRerUysMiDFS1LnR36ES+P\n",
+       "aFEZ/Dzt/dzENjQo3xNPPFHq4xGRAgAAiMSFFAAAQCRSe8hFXTd1tu7u48an9qxj+EsvvVTVdALr\n",
+       "MdMrRWOp2C1btoQxm/+od0kuWlNSe/49QkoPdUZECgAAIBIRKWAE+YJ6K9iuQ0Sq34Jx60jsO69b\n",
+       "q4Z+i+eRrapFIOPAL7cfldYd6I2IFAAAQCQupAAAACKR2gNGkC/O9d2g68I6VUvtTZJXr14dxmxT\n",
+       "Y9tgV5Lm5+dLmh0Qx7+HLc3nN+PGaCIiBQAAEImIFDDi6lic7e/Sb7jhzfu5iYmJFWM+CmVd0YG6\n",
+       "qmsbGBSLiBQAAEAkLqQAAAAikdoDULqsnlY+3Wf9pv7/9u4nNI4yjOP490djCf4BMSZRa6A9VLBe\n",
+       "mksRiqS5lPRi9eIfEHoQEfyLp9qL5uhF8SB6MUoRqRSkpRexVXLwZAlEW02LFgy0UtNAzEFyycLj\n",
+       "YWZ0stmVOtmd6ez+PpedfWeZeZMnz87DO5P3zWZCv1n5h31vxfmS8vMMZbcyt7IYc34G+2w2eM8C\n",
+       "blYuj0iZmZmZFXRLj0hlD5xmrwCNRqOq7phZGwMDyVfJVvIzP03D8vIyAGtra//rGJ0ahcpGtrZy\n",
+       "vPzo0/DwMLDx95PtLzIilU0LkZ89O3vQOT9K1crg4OCGz3dDtkbi6upqR47XiXi0MjIy8s/2ysoK\n",
+       "UM41ZmhoqCPHyVYtaDXFgqddKI9HpMzMzMwKciFlZmZmVpDyi5uWdlIpJiYmmJycLP3cVp3Z2VnH\n",
+       "vI843v3HMe8v/RTv6elpIqLlfXOPSJmZmZkVVNmIVOknNTMzMyuo3YhUJYWUmZmZWS/wrT0zMzOz\n",
+       "glxImZmZmRVUSSElaUrSZUm/SjpaRR+suyQtSrogaV7S+bTtHknnJP0i6ayku6vupxUn6RNJS5Iu\n",
+       "5traxljSsTTnL0s6WE2vrag28Z6WdC3N83lJh3L7HO8akzQmaVbSz5J+kvRa2u4cb1J6ISVpG/AB\n",
+       "MAXsAZ6V9HDZ/bCuC+BARIxHxL607U3gXEQ8BHybvrf6+pQkj/NaxljSHuBpkpyfAj6U5BHxemkV\n",
+       "7wDeS/N8PCK+Ase7R6wDb0TEI8CjwMvptdo53qSKH3IfcCUiFiNiHfgCOFxBP6z7mv/D4XHgeLp9\n",
+       "HHii3O5YJ0XEd8CfTc3tYnwYOBER6xGxCFwh+S6wmmgTb9ic5+B4115E/BERP6TbfwGXgB04xzep\n",
+       "opDaAVzNvb+WtllvCeAbSXOSXkjbRiNiKd1eAkar6Zp1UbsYP0CS6xnnfe94VdKPkmZyt3kc7x4i\n",
+       "aScwDnyPc3yTKgopz7fQH/ZHxDhwiGRI+LH8zkjm3fDfQg+7iRg7/vX3EbAL2AtcB979j8863jUk\n",
+       "6U7gS+D1iNiwwrZzPFFFIfU7MJZ7P8bGKtZ6QERcT1+XgVMkQ7xLku4DkHQ/cKO6HlqXtItxc94/\n",
+       "mLZZjUXEjUgBH/PvrRzHuwdIuo2kiPosIk6nzc7xJlUUUnPAbkk7JW0neTjtTAX9sC6RdLuku9Lt\n",
+       "O4CDwEWSOB9JP3YEON36CFZj7WJ8BnhG0nZJu4DdwPkK+mcdlF5IM0+S5Dk43rUnScAMsBAR7+d2\n",
+       "OcebDJR9wohoSHoF+BrYBsxExKWy+2FdNQqcSvKQAeDziDgraQ44Kel5YBF4qrou2lZJOgFMAPdK\n",
+       "ugq8BbxDixhHxIKkk8AC0ABeCi+rUCst4v02cEDSXpJbOL8BL4Lj3SP2A88BFyTNp23HcI5v4iVi\n",
+       "zMzMzArqizkezMzMzLrBhZSZmZlZQS6kzMzMzApyIWVmZmZWkAspMzMzs4JcSJmZmZkV5ELKzMzM\n",
+       "rCAXUmZmZmYF/Q0kXnLFpKb3UQAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7ffb01c2f0d0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "feat = net.blobs['conv5'].data[0]\n",
+    "vis_square(feat, padval=0.5)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The fifth layer after pooling, `pool5`"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 35,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlEAAAJMCAYAAADaNPObAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmMXfd14PlzWPvG2lhciquojSIVWZsj27GgcqA4GseQ\n",
+       "nX9sB4ghpNMBgo67Y4+nZSuDNKQ/0tM20OMMMsgf44kNJZioo0k7XgYtWEuz5FYUWZJlmSEliqTF\n",
+       "EllkVZFVxdr3qt/8wZJC1u+U9OPv3nfvfa++H8CweHiXU/fe997hrXPPU+ecAAAA4NpsyjsBAACA\n",
+       "ckQRBQAAEIEiCgAAIAJFFAAAQASKKAAAgAgUUQAAABFSL6JU9QFVPa6qJ1X1a2lvHwAAoAg0zTlR\n",
+       "qlolIm+JyP0ick5EXhGR33HOvZnaTgAAAAqgOuXt/aqInHLO9YmIqOp/EZHPiMh7RZSqMt0TAACU\n",
+       "DeecWvG0i6idInL2ij/3i8g9axdS1XeTElWVhoYGb0MzMzNe7N31rhR6J81aztpeFpLk0tHREbSc\n",
+       "dfzm5ubeN5dHH31UHn300bI8LmlLkstdd90VtNzPfvazkueStqLkklce1dX+W+bi4mKquVj7WFpa\n",
+       "Clo37eOyaZPf8bGyslLyXD7+8Y+/79+/8847snfvXnnzTf+XHCMjI6nmkrbQXGpra71YfX29F2tu\n",
+       "bvZiFy9eDNre1NRUUC5NTU1e7F0LCwvvbXt6enrd5T5Ikc/R++WRdhEVVNFcmSBfOwMAAIqit7dX\n",
+       "ent7g5ZNu4g6JyK7r/jzbrl8N+oqa+9EAQAAFEFPT4/09PS89+fHHnts3WXTLqJeFZEbVXWfiJwX\n",
+       "kc+LyO+sXWjtnSjrV0+W0LtWNTU1QcsVnVVgWrfXGxsbvVhVVZUXs36dd6UrLxogbTfffHPQcm+9\n",
+       "9VaJM4kX+mu1ou/DsmfPHi9mvQdZvyoKfQ8Pdccdd7zv33d1dcmuXbvk/Pnz3t9Zv84rRwsLC0Gx\n",
+       "iYmJoO1Zv3YO9UG/pkuy7XKXahHlnFtS1S+JyI9FpEpE/oon88oHRRSAcrBr1668UwBEJP07UeKc\n",
+       "e0pEnkp7uwAAAEXCxHIAAIAIFFEAAAARUv91XhFYc1bKkTX/w5rXYc36yKK5cvfu3R+8kIicPXv2\n",
+       "gxdKyGq4t4TOuEniyJEjXuzGG2/0YgcPHvRib7zxRklyylpdXZ0XC+1jKXJjObKZ3XP48OGg5QYG\n",
+       "Bkqcif2ea33GjI+PlzyXvFifRR/96EeD1n322We9WCWNNuJOFAAAQASKKAAAgAgUUQAAABEoogAA\n",
+       "ACJURgf2Glk0D6NYzYFW46MldLpvEh/+8Ie92HXXXefFrOb/Smkst66NJF9OmqbQByLGxsa82OTk\n",
+       "ZNrpFMaZM2fyTuE9R48eLfk+Qr9c2fr2B+tbImZnZ72YNWG8HIU+uLMRcWQAAAAiUEQBAABEoIgC\n",
+       "AACIQBEFAAAQoSIby2tra/NOIRVWE6vVnJtXI31/f38u+7Vk0TAeympyt6Y8W42olcJqqH3ppZdy\n",
+       "yKRy0eybDes9d25uzotVShO5ZXFx0YtZ38xgHYO2tjYvdunSpXQSKwBehQAAABEoogAAACJQRAEA\n",
+       "AESgiAIAAIhQkY3l5ThV+MCBA16sqakpaN3x8XEvNjMz48W6u7uvPTFcM6sJ02qkHBgYyCKdXNTV\n",
+       "1Xkxa5K7xXpIIM1p2mfPnk1tW3mqrq7It+/MWFP1renkra2tQetWcmP50tKSF9uxY4cXs46V9QDN\n",
+       "yy+/nE5iBcCdKAAAgAgUUQAAABEoogAAACJQRAEAAERQq0GupDtUdVnvEwAAIIaqinPO/9oJ4U4U\n",
+       "AABAFIooAACACBRRAAAAESiiAAAAIuQy8lbV7M+6yqZNfn23srISvU+rmT0kj1Ioei719fVezJpE\n",
+       "e+ONNwbt4/Tp017MmrI+NTXlxYp0XNra2ryY9XOEsq5xawr1/Py8Fws9Ltdff33Qcr/85S+DlrOO\n",
+       "i5Xz8vJy0PZCp26vnZhc9NdQaC6bN2/2YtbE9ixySZuVS01NjRezpmFnkUuRjgu52LlYnzvWZPi5\n",
+       "ubmgfVifbdaUeuuzaD3ciQIAAIhAEQUAABCBIgoAACACRRQAAECEXBrLQzQ0NHgxqxHXarq1Gs8Q\n",
+       "zjqmjY2NXqyjoyNoe1bTcjlOrb+WZsMQ1oMSeV27dXV1Xsy6DiyhTeRWE7XFujYmJyeD1i03oce4\n",
+       "UmTRRI7KYT14Ultb68VCG8ut7VmN5deCO1EAAAARKKIAAAAiUEQBAABEoIgCAACIUIjGcqtRrKur\n",
+       "y4tZTZhZNGYeOHAgaLnh4eGgWNFZE2sHBwe92PT0dND2rAbq0EnVoazGd8vMzEz0PkIbqIvEauq3\n",
+       "msiRj9CHOCxJrmVL2tPtkb7Q126lPLAwNjbmxawHzEJZ7+FJ39e5EwUAABCBIgoAACACRRQAAEAE\n",
+       "iigAAIAIhWgsT6JI08krpZnPmhhtTYQNnRLb2dkZvW45sqbtW2ZnZ0ucicjevXu9WFNTkxezprGf\n",
+       "OXMm1VwmJiaClrMeNElTc3OzF0t7Gj0qR+hE6ywePLEe+qlkLS0tXsw6zqGv35qaGi8W+n69Hu5E\n",
+       "AQAARKCIAgAAiEARBQAAEIEiCgAAIEIhGsut5vAiNWn39fV5MavZMHSC90YzMjJS8n2kPb05iS1b\n",
+       "tgQtd+7cOS9mTXdPor6+PtXtpS3J9OFYRXqowWpqtaZSZ5Hz22+/XfJ9lKPQ11AW7/+Li4sl30eR\n",
+       "WA+BhD6gkhXuRAEAAESgiAIAAIhAEQUAABCBIgoAACBCLo3la5vFGhsbvWWsqaRWU11HR4cXS7sJ\n",
+       "02r2tRrLrYbQ6upC9O5fE+tnS3sa7549e1LdXhZaW1u92Pj4uBdLMlXYmtBrsZpdrXMUOhXdanLv\n",
+       "6uoKWtdqDremou/YsSNoexcuXPBi1msr1tLSUmrbSso6P1lMsrdY31SAYj0wlMVUdOu9Ja+HMcbG\n",
+       "xrxYkvNhNaUnfb1xJwoAACACRRQAAEAEiigAAIAIFFEAAAARNOtmQlV1NDACAIByoKrinDOfGOJO\n",
+       "FAAAQASKKAAAgAgUUQAAABEoogAAACLkMk77jjvuuOrPU1NT3jJrp5qvp7+/34tZU11HR0e9WJLJ\n",
+       "0klYjfVFyqW9vd2LWZO0rUnu8/PzXsyaSG8tNzg46MWKdFx27tzpxayJujMzM0H7sKZwb9++3Yv1\n",
+       "9fV5sdDj0tnZ6cVGRkaC1rUU5dotSh4i5LKeJLncfvvtQcu9/vrrJc8lbeRiS5JL6DeDWN9W0NDQ\n",
+       "4MVC38NFuBMFAAAQhSIKAAAgAkUUAABABIooAACACLk0lp8+ffqqP4+Pj+eRRsWora31YgsLC9Hb\n",
+       "s5ql29ravFhHR4cXGx4e9mJnzpyJzqVIzp8/n+r2rOb6s2fPRm/Pav5f+xDHep599tno/QJps97T\n",
+       "rCZj65qfnJwsSU4oLuuBHIv1IFpra2uifXMnCgAAIAJFFAAAQASKKAAAgAgUUQAAABFyaSy/lmmg\n",
+       "uNqOHTuClhsYGEh1v1azptX8aU3hRjasc3Ts2DEvZk30Rz6sxmjLRmuWDn0YxZpAXck+9rGPBS33\n",
+       "4osvljiTbFhN3/X19V5sdnY2eh9NTU3R64pwJwoAACAKRRQAAEAEiigAAIAIFFEAAAARcmksd87l\n",
+       "sduKYE0Tz+J4btrk19tWY3lejZ5dXV1ezHqAYXp6OnofVVVVXiztJu2VlZVUt2edNysGFMnFixe9\n",
+       "WE1NjRdL8s0MKE/V1X7ZMjQ0FL29ubm5JOlwJwoAACAGRRQAAEAEiigAAIAIFFEAAAARcmks32hT\n",
+       "ZtOUZDJrEhMTE16ssbExh0xs1hRbK5aksbwctbe3By03ODjoxZhsXnobbRJ5KOubDzZv3uzFRkZG\n",
+       "vFjaD2cUSaVMIg9lfe40Nzenuo/h4eFE63MnCgAAIAJFFAAAQASKKAAAgAgUUQAAABFyaSzfSLKY\n",
+       "Dm1NDreaK5M09FtTYkNlMek7C9ZxtqYoW6yp8lZjfkNDQ9D2mpqavJg1oX1qaipoe+V4PmJVyvWY\n",
+       "NqthN/T6CRV67JM8tGI1paM8We+51rd2hOro6PBiSafecycKAAAgAkUUAABABIooAACACFFFlKru\n",
+       "VtXDqnpMVY+q6r9bjXeo6jOqekJVn1bVtnTTBQAAKAa1Gl4/cCXV7SKy3Tn3uqo2i8jPROSzIvJ7\n",
+       "IjLsnPumqn5NRNqdc19fs66L2ScAAEDWVFWcc2r9XdSdKOfcoHPu9dX/nhKRN0Vkp4g8KCKPry72\n",
+       "uFwurAAAACpO4p4oVd0nIneIyE9FZJtzbmj1r4ZEZFvS7QMAABRRojlRq7/K+68i8sfOuUnVf7nb\n",
+       "5Zxzqmr+3u7RRx997797enqkp6cnSRoAAACp6O3tld7e3qBlo3qiRERUtUZE/j8Reco59+erseMi\n",
+       "0uOcG1TVHSJy2Dl3YM169EQBAICy8H49UVF3ovTyLae/EpE33i2gVv1QRB4SkW+s/v/310soa1bh\n",
+       "FprHli1bvJg1NdWaCG5N6LWWC83FmsY7Pz8ftK4lyXFJm5XLtm3+b4QvXLiQ6n6tKeHW9G9rubm5\n",
+       "uVRzsRT9HFm5tLX5D+bedtttXuz06dNe7Ny5c15s7QT+cjwmWUh7Mr5ldHTUi1kT9K1p50U6LuRi\n",
+       "57J161YvdvHixeh9WFPHrc9Aa3K9dVys2KFDh4Jy6evr82LWdXotN3pif533ayLyuyJyRFV/vhp7\n",
+       "RET+k4g8qaq/LyJ9IvK5yO0DAAAUWlQR5Zx7QdZvSr8/Ph0AAIDywMRyAACACBRRAAAAERKNOEhL\n",
+       "Z2enF+vq6vJiVjPaqVOnSpJTWtY2xCaVpIm8HFlNrGmbnZ0NWi6LJvK0VVeHvcSt11YS1gMVFqsh\n",
+       "Gemyru/Qaz7U9PR0qtvLi9UEbT0QYTl69KgXC33PqKmp8WLWZ4fVfJ22ycnJVLdXX1/vxSYmJqK3\n",
+       "ZzV9hx7nxcXF6P2uhztRAAAAESiiAAAAIlBEAQAARKCIAgAAiFCIxvKiC51ObuErbpJJu+F5o9mz\n",
+       "Z0/Qcm+//Xaq+x0ZGfFiJ06c8GLWayvthzFQelZD9kZjTekfHBwMWreSr3mrGT7tB0ryfMCMO1EA\n",
+       "AAARKKIAAAAiUEQBAABEoIgCAACIUIjGcqsJ1YoBKF/79u3zYtZk83Pnznmxvr6+EmSEtFRKY/nC\n",
+       "woIXs741wfp56+rqovebxSTyUGl/K0alfysBd6IAAAAiUEQBAABEoIgCAACIQBEFAAAQoRCN5Zbu\n",
+       "7u6g5c6fP1/iTESam5uDlrOmL280DQ0NQcvNzs6WOBOIhF+TNTU1XmxxcTHVXEJfRyg/U1NTeadQ\n",
+       "MhcuXPBiVhN52g3ZaQt9/fEtG9eGO1EAAAARKKIAAAAiUEQBAABEoIgCAACIUNjG8q1btwYtl6Sx\n",
+       "3JqWbE2OXVpait5HkVjNw3ntg8by/FjTltva2rzYxYsXU93vpk1h/2azJpYDRWJ9TiRprreavtNu\n",
+       "1rc+77JgNeZXEu5EAQAARKCIAgAAiEARBQAAEIEiCgAAIIJmPZ1UVR0TUQEAQDlQVXHOqfV33IkC\n",
+       "AACIQBEFAAAQgSIKAAAgAkUUAABAhFwmlqua/VklZTWzb9682YtZ05ytieXz8/PR+52bm/NieRwT\n",
+       "ETu/IuVinaPJyclccuG42Lm0tLR4sSTTlq2p94uLix+YR5HOT2gunZ2dXsyahm29Z1ixJLl88pOf\n",
+       "DFru2Wef9WIrKyup5pI2crFVSi67d+/2Yk1NTV5seHg4KHYtD79xJwoAACACRRQAAEAEiigAAIAI\n",
+       "FFEAAAARcmksL4qFhQUvlnZzbl5Neln44he/GLTc3/3d33kx69hbsmiWbmxsLPk+KlmSJnLL2iZy\n",
+       "ZMNq6Lds2uT/29tqLM+ClUuS17P1fv3AAw8Erfv66697sYGBgehcYLPOeUdHR9C61oNjSXEnCgAA\n",
+       "IAJFFAAAQASKKAAAgAgUUQAAABEK21huTfK1msKSNO5ZTYTV1f4hsSaWh7Ka4DaaqqqqvFOoOFk0\n",
+       "3Feq5uZmL1ZfX+/FxsfHvVjaTe8jIyOpbi+Jf/zHfwxaLsn7ofWea03ft469xZpK3draeu2JrQr9\n",
+       "hom8tLW1ebGxsbEcMimW0IciQq+ra8EnPAAAQASKKAAAgAgUUQAAABEoogAAACKo1UhX0h2qBu3Q\n",
+       "ahi0YhcuXAjar/VzdnV1eTGradKayBzaXGk1vFnTukMnm6fd+G4dl7ymrJOLjVzSzSPtxvKiHBOR\n",
+       "8szFemAo9BsNLNb5tR7EKPpxsaTdWF6O14sldEr9zMxMVC6qKs45MxnuRAEAAESgiAIAAIhAEQUA\n",
+       "ABCBIgoAACBCYRvL02b9nFu2bPFiy8vLXixJY3loLuXYzJc2crGRS+nzqKur82Lz8/O55JIEudjI\n",
+       "xUYuNhrLAQAASowiCgAAIAJFFAAAQASKKAAAgAj++OsNZGRkJO8UAGTspptuClpuaGjIi1lTzJPY\n",
+       "tMn/d6z1LQdW023oVG9ryrW1X2vy9crKSlB+HR0dQblYjcJJHm5qaGjwYrt3747eXhJVVVVezPpW\n",
+       "DEt3d3f0fs+fPx+9LpLjThQAAEAEiigAAIAIFFEAAAARKKIAAAAi5DKxPOt9AgAAxGBiOQAAQMoo\n",
+       "ogAAACJQRAEAAESgiAIAAIiQy8Rya2ptqVnN7HnkIRKeizUBN3R7oftYXl5ONRdre6HK8Rxlwcql\n",
+       "qakpaLnZ2dno/dbV1Xmxubk5L1Zur+ebb745aLm33nqr5LmkzcrFmuA9ODjoxZaWlqL3a70/WNsr\n",
+       "0nG59957vZh1fb/55ptezJra3tjY6MW2bt3qxX7+8597sSIdl/r6ei82Pz+f6n6tSfMzMzNerEjH\n",
+       "ZT3ciQIAAIhAEQUAABCBIgoAACACRRQAAECEXBrLESZJk7bFauYLlXYuabMaEJubm4PWnZycTDud\n",
+       "krOaMC379u0LWq6vr8+Lpd1MinwMDAx4sbRfz2lv7+677w5a7mc/+5kXC20KfuWVV7xY6DVvNZFb\n",
+       "jhw5ErRckVifE6HvpSMjI2mnU3jciQIAAIhAEQUAABCBIgoAACACRRQAAEAEGssDVFeHHaYkE3+R\n",
+       "vtDmz3JsLA9lTR/e6EInkWdh0yb/37ErKyup7iO06Xv//v1By7399ttJ0ik063zU1tZ6MasB3Vq3\n",
+       "UiwsLHgxqwHdmtA+Njbmxa5lInjRVe5ZBwAAKCGKKAAAgAgUUQAAABEoogAAACLQWB6gUhrGQxvk\n",
+       "y5HVqDg+Pp5DJnYz99zcXA6ZiExNTeWyX4s19bhI+RVFFs3mRffqq6+WfB9JmsNDm/VDH24pkomJ\n",
+       "CS9mXX/btm0L2t6FCxcS51Rk3IkCAACIQBEFAAAQgSIKAAAgAkUUAABAhMrtNMaGl1czd177tfT3\n",
+       "9+edQuFYDe6tra1ebHR01IvNzs6mmkuRGsbPnTuXdwq5s85HkV7PWQh9sGHnzp1B2zt16lTinIqM\n",
+       "O1EAAAARKKIAAAAiJCqiVLVKVX+uqj9a/XOHqj6jqidU9WlVbUsnTQAAgGJJeifqj0XkDRF5d9Lh\n",
+       "10XkGefcTSLy3OqfAQAAKk50Y7mq7hKRT4nIn4nI/7waflBE7lv978dFpFcopArDmkSLZGpra71Y\n",
+       "Q0ODF7Om3k9PT3sxq+m56Do7O73YyMiIF5uZmfFi1vGzLCwsXHti69i3b58X27x5sxezzlnajeWW\n",
+       "PI6JiD3BOzSXUDU1NV7M+rYBayK4tVzoPrJQKdP3rSnr1rUROrW90iW5E/UtEfn3InJl2/4259zQ\n",
+       "6n8PiUjYXHgAAIAyE1VEqeqnReSCc+7nIqLWMu7yPxvC/ukAAABQZmJ/nfcxEXlQVT8lIvUisllV\n",
+       "/0ZEhlR1u3NuUFV3iEhlf/MgAACoKL29vdLb2xu0bFQR5Zz7ExH5ExERVb1PRP4X59wXVfWbIvKQ\n",
+       "iHxj9f+/H7N9AACAPPT09EhPT897f37sscfWXVZDm/XW3cDlIuqrzrkHVbVDRJ4UkT0i0icin3PO\n",
+       "ja1Z3iXdJwAAQBZUVZxzZutS4iIqIhmKKAAAUBber4hiYjkAAEAEiigAAIAIFFEAAAARoieWJ7F2\n",
+       "ovPc3Fyq27emD4+Pj3sxVf9XnFYsSQ+XNb3amtxs7TdUXV2dF7MmzFqsn+2OO+7wYjt27PBik5OT\n",
+       "Xmzv3r1e7Pnnn/diN9xwgxc7fPiwF9u2zZ/XGjp5PfS62rp1qxcbGhryYqHnqKmpyYtZ08lDWeco\n",
+       "yfWSRFFysfLYuXOnF7OOu/VeEKqtzf860EuXLnmx0GNiXd8W63q0JDk/ra2tQcuFHr+0c0ly3pLk\n",
+       "Ul3tf0xa0+yT5LJpk38/I4ve4bRfz9axsljHryjvLSLXduy5EwUAABCBIgoAACACRRQAAEAEiigA\n",
+       "AIAIuTSWp91IvlZo47HFavDr7OwMWvfCBf+rAmdnZ6NzCRXaRB7qxIkTXqy/v9+LDQ8Pe7EXXnjB\n",
+       "i1nNuGfPng3KxTqmaUt7H0mayBGvvr7ei9XW1nqxJA3Kzc3N0etWirQbrQ8ePOjFfuM3fiNo3e99\n",
+       "73teLPS9JZT1s1mfE5aVlZWg5SplAHXowwkjIyMlziQ73IkCAACIQBEFAAAQgSIKAAAgAkUUAABA\n",
+       "hFway4uspqbGi4U2k168eNGLlWPDoDUlNknzeui0+LRZjcaWUj/ogGxYzb5pN/lb3zaQROgk8ixY\n",
+       "3/RQVVUVtFzazdyhrG8+SDsX6+GErq6uoHXPnTuXai5FtxHfS7kTBQAAEIEiCgAAIAJFFAAAQASK\n",
+       "KAAAgAg0lq9hNVBvtAnUVtO31XBfV1fnxRYXF72Y1Zy6c+fOyOxQrqxJ142NjUHrJvkWgjSl/e0A\n",
+       "VjO8FUsyETyU1TBuve4vXbqU6n7feOMNL2Y1c2/fvj3V/YayrlvYNtpnpQh3ogAAAKJQRAEAAESg\n",
+       "iAIAAIhAEQUAABCBjrk1rAnjoVNYizSdvLW11YuFTtm1LC8vezGr6dQ6BlZjZlNTU3QuoYo+Pddq\n",
+       "4LeaiiuZdb1YDyeEsNazrtFQ1rloaGiI3l6olZWVku/DYn2zQEdHhxfLosndemjFauofGRkpeS6W\n",
+       "tJvrUb421js2AABASiiiAAAAIlBEAQAARKCIAgAAiFDYxvLQBtssmjCthstQoY2tzc3NXmxqaip6\n",
+       "v0lYzb5JJjUvLCx4sb6+vujt5eXAgQNezGqan5yc9GKjo6Ne7K677vJi1qTmSmE1JFvHqiis95bQ\n",
+       "9xvrIQ7rNRQ6iX3//v1eLEnTvMU6F9b1Hdv4fy2sbzmwms2zMDMzk8t+UR64EwUAABCBIgoAACAC\n",
+       "RRQAAEAEiigAAIAImvWUbVV1RZrsDQAAsB5VFeec/xUTwp0oAACAKBRRAAAAESiiAAAAIlBEAQAA\n",
+       "RMhlYrmq2Z91lZtvvtmLWQ3pw8PDXsyaDm2tG5JHKYTmYk1ttyYmW+uGNu8nOS73339/0HJHjhzx\n",
+       "YhcuXEg1l7SF5tLU1OTFrHM0OzsbtN/QCdENDQ1erKWlxYtZE7GtdcfGxrxYR0eHFxsZGfFioefI\n",
+       "mrBtTWifnp7+wG1Z5+fGG2/0YtbPdenSJS9mHff6+vqg/VrfaFD069Z6b7Gu5SQT5a1cPvzhDwet\n",
+       "29jYGLTcmTNnvNi2bdu82EsvveTFin6OspAkl927d3sx61sJrM9ja3p/0Y/LergTBQAAEIEiCgAA\n",
+       "IAJFFAAAQASKKAAAgAi5NJaHsBrKrJjVEJo2q2HXkqQJ02I1KFuYAJ+fkCboa2E1Zlqsxkzr9WEt\n",
+       "Z8UsVkNoEtbDInV1dV7s7NmzXsx6EGGtwcFBLzY1NRWU2/LyshcLPU5FZzWMW9dt2u9fFmsfFy9e\n",
+       "9GKh157V/N/W1nbtiWHDsl4f14I7UQAAABEoogAAACJQRAEAAESgiAIAAIhQ2MZya9KwxWoITVtX\n",
+       "V1fQclk0ZhbJs88+68Vo6syG9TBB6FR0izU5PO1pwadPn/Zi1vR0a6I4AJEdO3YELTcwMFDiTOz3\n",
+       "oKqqKi9mfVNB6EMb1vas94zQB0hKgTtRAAAAESiiAAAAIlBEAQAARKCIAgAAiFDYxnKr8WzTJr/m\n",
+       "y6KxfHx8vOT7sIROUk17anYSodPdQx8cCLVly5ag5YaHh1Pdb2jzddpT5a1J/aHTzi0LCwtJ0gli\n",
+       "XadpXrtZ/AxJpxuvZb1erJh13Yb+vEV6f7De1638rEn21rpWk3Ha32KR12sctsXFxVS3l/T1wZ0o\n",
+       "AACACBRRAAAAESiiAAAAIlBEAQAARCirxnKriTALIyMjuezXaq60FKlxtL+/P+8UNoSOjg4vtrKy\n",
+       "ErRuaHO99SBHkW3fvt2LzczMBK1rHRNr+n7aTa2tra1erL6+PtV9FEk5/myh38JgPYAU+poMlcUk\n",
+       "8lChDfxJpolbzfqh086zUl7vkgAAAAVBEQUAABCBIgoAACACRRQAAECEXBrL1zZMh04dtyb0Ws3X\n",
+       "VtNtEu3t7UHLTU5OerEkU6RHR0ej101b6ATh0Km93d3diXO6Ul5T5Ys0pTjtid1JmmI7OzuDlrNe\n",
+       "M11dXV4sZCL9xYsXvdjs7KwXC22Ita6ptM+39Rq3mq+zmMaeRE1NTdBy1jcVWO8jVjO39WDRpUuX\n",
+       "vNirr74alEvarAcx0m4sz4I1Md96oGJwcNCLpd30vXPnTi9mXQfW68h6WOTWW2/1Yjt27IjM7jLu\n",
+       "RAEAAESgiAIAAIhAEQUAABCBIgoAACCCZt0Yq6quSM24AAAA61FVcc6p9XfciQIAAIhAEQUAABCB\n",
+       "IgoAACACRRQAAECEXCaWq5r9WSVlNbPnkYdIslysCa4XLlwIWteaOhuay9atW4P2Ebrc0aNHo3Ox\n",
+       "HDhwwIv19/d7MWsa78DAQKq5pK1Sctm1a1fQctZ5SzOPtIXmYl17FmuKe9q51NbWBm3Pes+wJnNb\n",
+       "3zqR5BxZyyV5ICk0l927dwdtL/TnOHPmTHQuWbBysc5vFg+DhR4X69surG85sKanW6+tc+fOBeWy\n",
+       "Hu5EAQAARKCIAgAAiEARBQAAEIEiCgAAIEIujeUhOjo6gpYbHR1Ndb/79+/3Yj/4wQ+C1n344Ye9\n",
+       "2FNPPZWLiRC7AAAgAElEQVQ4pytZTXBVVVVeLO1GwImJCS9mNSCGNpanLbS5PknTLsLV19d7sXvv\n",
+       "vTdo3SeeeCLtdK5y5513Bi332muvpbpf69rbtm2bF2tsbPRiQ0NDqeaysLAQvW5eTcZZOHv2bNBy\n",
+       "e/bsKXEm6TfXhyr6N4rU1NQELVddnU15w50oAACACBRRAAAAESiiAAAAIhS2J6qpqSloubm5OS82\n",
+       "MzOTdjqFYQ3btFi9U0lYx9liDdHMQtq9cahcoUM/0+6JqhQrKysl34c1mNQa6JnXe701RDNt1kBU\n",
+       "a4AkRJaWloKWK0WfFHeiAAAAIlBEAQAARKCIAgAAiEARBQAAEEGzHqylqkE7tAbOWUMlp6envZjV\n",
+       "+Fj0b84OzSXtxvJKOS5pIxdbUXJJkkd7e3vQcpcuXSp5LmmrlFysYa2hD7eknUvaQnNJMmwztIF6\n",
+       "cXExaL9ZCD0uocM2rWHQFqtZf20uqirOOfPAcCcKAAAgAkUUAABABIooAACACNFFlKq2qerfq+qb\n",
+       "qvqGqt6jqh2q+oyqnlDVp1W1Lc1kAQAAiiK6sVxVHxeR551z31HVahFpEpH/VUSGnXPfVNWviUi7\n",
+       "c+7ra9bL5Suiy7Gx0GJNcu/s7Axa15qyWynHJW3kYkuSy969e73Yvn37gtZ9/vnnU8sjbeRiS5JL\n",
+       "R0dH0HJjY2NeLO0Hi7Zv3+7FrPfhd955x4tZk7RDcwltjLZ+3tDjNzIyEpRLFtK+dq0GdKuRPiSX\n",
+       "1BvLVbVVRO51zn1ndYdLzrlxEXlQRB5fXexxEflszPYBAACKLvbXedeJyEVV/a6qvqaq31bVJhHZ\n",
+       "5pwbWl1mSES2pZIlAABAwcR+G1+1iNwpIl9yzr2iqn8uIlf92s455/L61R0AAECM3t5e6e3tDVo2\n",
+       "tojqF5F+59wrq3/+exF5REQGVXW7c25QVXeIyIXI7QMAAGSup6dHenp63vvzY489tu6yUUXUapF0\n",
+       "VlVvcs6dEJH7ReTY6v8eEpFvrP7/92O2v57QZrnR0dE0d1sodXV1Xsya7r7RWE2YtbW1Xmzr1q1e\n",
+       "zGq4R/ruueceL9bd3e3FrAnCaxvLUdn27NkTtJw1xXxmZibVXLZs2ZLq9izWt3F0dXUFrRs6yd1q\n",
+       "QC9H1udd6LGyvsnDav6/FrF3okRE/q2I/D+qWisivxSR3xORKhF5UlV/X0T6RORzibIDAAAoqOgi\n",
+       "yjn3CxH5sPFX98enAwAAUB6YWA4AABCBIgoAACBC9MTy6B2quqz3CQAAECP1ieUAAAAbHUUUAABA\n",
+       "BIooAACACBRRAAAAEZIM24ymenV/1h/+4R96y1jTjY8cORK0/W9961tezGpmX5uHiMiXvvQlL3bn\n",
+       "nXd6sXfeeceLPf74416sr68vOhdL6HTy0Km9SXJJG7nYyjEXa7J+6PasCdHnz5+PyiMLlZLLgQMH\n",
+       "gpY7fvx4yXNJW5JcampqgtZdWFjwYi0tLV5sYmIiOpe0Ff0cWZ93f/RHf+TF7rvvPi9mTWj/p3/6\n",
+       "Jy/2F3/xF15sampq3TzX4k4UAABABIooAACACBRRAAAAESiiAAAAIuTSWL7W4cOHvdhbb73lxW6/\n",
+       "/fYs0gliNb9WV5f+cIY2jKetu7s7aLm1DcDIzvbt273YJz/5yaB1//qv/zrVXDZtCvv32fLycqr7\n",
+       "RZjdu3d7sTfffDNo3YceesiLJbl+rGvFii0tLUXvI4nQh3msxvLJycm009lQZmdnS76P6enpROtz\n",
+       "JwoAACACRRQAAEAEiigAAIAIFFEAAAAR1JoSWtIdqma7w1Whk1k3b94ctD1r6qw12XZxcTE6lyyE\n",
+       "5tLc3By0vWuZ9BqbSxbKMZcsGstDc7FeCxbr9WFNeV7boFuO5ydUfX29F5ubm0s1F6ux/MyZM0H7\n",
+       "CG0sr+RzlAS52JLk0tTU5MWqqqq8mNXob+13bUxVxTlnJsOdKAAAgAgUUQAAABEoogAAACJQRAEA\n",
+       "AESgsXyNtra2oO2NjY2VPJcshOZiNbtaQhtgk+SSBXKxZZHL/v37vdjaqcKDg4MlzyNUXo3l1j5W\n",
+       "Vlaicwl9MCbURrtuQ5GLrci50FgOAACQMoooAACACBRRAAAAESiiAAAAIlTnnUDRWA3jmzZRawKl\n",
+       "0NnZ6cW2bt3qxQYGBrJIpxCSTCdPwmoi7+7uDlr3/PnzqeZSXe1/NFnXimVoaCjVXID3Q3UAAAAQ\n",
+       "gSIKAAAgAkUUAABABIooAACACIVtLN+9e3fQcrOzs15seHg4aN3QycDWFGBLY2OjF5uZmQlat+jq\n",
+       "6uqClksysbzoqqqqvJjVAHvo0KHofbz++uvR6xaddaysicRWw/TFixdLklM5S/uBl5tvvtmLhb4P\n",
+       "p91Yfvvtt3sx6/3VEtpY3tzcHLRce3t70HITExNezJoCX3TWOV/7jQEiIqOjo17MOqa7du3yYidP\n",
+       "nozMrni4EwUAABCBIgoAACACRRQAAEAEiigAAIAImvbU2w/coarLep8AAAAxVFWcc/5TMMKdKAAA\n",
+       "gCgUUQAAABEoogAAACJQRAEAAETIZWK5NaV4rdBpstYU5LGxMS9mNbNbebS0tATt15rgGjrZPDQX\n",
+       "a9qt9fNaU9vb2tq8WJLjkgVysYXmUltbG7S9hYWFkudSakXJQyQ8l/vuuy9oe2+//XbQcmfPno3O\n",
+       "JQtJcuno6AhazpqanSSXL37xi0Hbe+2114KWO3bsWHQulq6urqDlQif8l+P10tnZ6cU++9nPejFr\n",
+       "KvpPfvKT6FzWw50oAACACBRRAAAAESiiAAAAIlBEAQAARMhlYvna2KFDh7zlDh486MWWlpa8mNUo\n",
+       "NjIy4sVCm9YaGxu9mGVubs6Lpd1YnoVyzOWGG27wYo888kjQPh5++GEvluR6sXR3dwctd/78+aDl\n",
+       "kuRy9913By336quvljyXNBUlD5HwXLZs2RK0vZmZmejlin5crAdexsfHc8kl9LhYyyX53EzyPnfn\n",
+       "nXd6MevBoh/96Eep5pKF0Fysh6vuvfdeL7a8vOzFYhvLmVgOAACQMoooAACACBRRAAAAESiiAAAA\n",
+       "IuQysTxNVlNwEqFNnVmwpqfX1dV5MWt6utVsmLZPf/rTQcs999xzXiyL/PKyefPmoOVCG8uzUF9f\n",
+       "78WshycQb3JyMmi5qqqqEmeCa2E1PB84cCBo3ePHj0fv13r9WddQ6HVVKawHzA4fPpxDJpdxJwoA\n",
+       "ACACRRQAAEAEiigAAIAIFFEAAAARCjGxPAvlOJnVaiK3ms0vXbrkxaxprUlysaTdWF6O58jS3Nwc\n",
+       "tNzU1FTJc7Em8C8sLHgxq1kz7VzSVJQ8RMhlPZWSi/XQxSc+8YmgdZ966qnoXKzYF77whaD9njx5\n",
+       "0otZ30pQKecobUwsBwAAKDGKKAAAgAgUUQAAABEoogAAACIUdmK51VBmNfhZDdRW42w5mp+fD4rl\n",
+       "5cUXX/RiHR0dXqyzs9OL9ff3lySnIghtGM9CkSbww2e9p1kTy62m23I8t21tbV5sbGwsh0zCWcc+\n",
+       "9EGMLFjXELLDnSgAAIAIFFEAAAARKKIAAAAiUEQBAABEKGxj+S233OLFampqvJg1+frEiRNB+7Aa\n",
+       "nkObHEMngleym266yYv91m/9VtC6f/qnf5p2OoXR3t7uxayp8huN9XqzjI6OerGsv1mhFKzr4vbb\n",
+       "bw9a9/z5817srbfeSpxTEWza5P9bvra2Nmjdubk5L5Z2o7X1MI81ETxt1jVvXQfW8Tty5EiquVif\n",
+       "vYuLi6nuo1xxJwoAACACRRQAAEAEiigAAIAIFFEAAAARNOuGTVV1ldAkCgAAKp+qinPO/xoV4U4U\n",
+       "AABAFIooAACACBRRAAAAESiiAAAAIuQysVzV7M+6SlVVlRezpthaE8stVjN7SB4iIjfccIMXa2tr\n",
+       "82Jvv/22FwudvhyaSxJ1dXVezJr4m0UulryOi6VScvnQhz4UtJw15d96bRXluBQlD5FkuTQ0NHgx\n",
+       "axL00tJSqrmETvW23oct09PT0blYfu3Xfi1ouZGRES92/Pjx6Fysn7e5udmLVVf7H53j4+NezDpv\n",
+       "5Xjt7tu3L2h7e/bsCVruJz/5SVAu1jR269hbrxnrMzr0G0mu5eE37kQBAABEoIgCAACIQBEFAAAQ\n",
+       "gSIKAAAgQi6N5WmqqanxYlaTWRKHDh3yYrt27fJiVpO21Viel/n5+bxTADYs68GY/fv3B6177Nix\n",
+       "VHMJbXjO4sGTHTt2eLGDBw8GrfvKK6+kmktjY6MXW1lZ8WJWQ3sWQh8IsM5bEn19fUHLWU3fSVjX\n",
+       "5NatW4PWtR4I2Lx5sxc7c+bMtSd2Be5EAQAARKCIAgAAiEARBQAAEIEiCgAAIEJhG8uXl5e9WOh0\n",
+       "8ryEThXOi9XYutG0tLR4scnJyRwyycYvfvELL3b99dd7se7ubi/2y1/+siQ5bVQLCwtebHh4OIdM\n",
+       "7EnQVgO15VqmOcc6ffq0F7MahZEN6/3BatK2PgNPnTqVai7Ww2TW9WxdQ5akzfDciQIAAIhAEQUA\n",
+       "ABCBIgoAACBCdBGlqo+o6jFV/WdV/VtVrVPVDlV9RlVPqOrTqup/jTIAAEAF0JgmQVXdJyL/XURu\n",
+       "cc7Nq+rfich/E5FDIjLsnPumqn5NRNqdc19fs27puxIN1s+Z9uTdUHnlYjWWW1PMN9pxsVRyLrfd\n",
+       "dlvQckeOHCl5LrGKkocIuawn7VxuvfXWoOWOHj1a8lySqJRcGhoavFiSh79Cc7EeDrIkeWBobS6q\n",
+       "Ks4588DE3omaEJFFEWlU1WoRaRSR8yLyoIg8vrrM4yLy2cjtAwAAFFpUEeWcGxWR/ywiZ+Ry8TTm\n",
+       "nHtGRLY554ZWFxsSkW2pZAkAAFAwUQMSVPV6EfmyiOwTkXER+X9V9XevXMY55/L61R0AAECM3t5e\n",
+       "6e3tDVo2dsrU3SLyonNuREREVb8nIh8VkUFV3e6cG1TVHSJyIXL7AAAAmevp6ZGenp73/vzYY4+t\n",
+       "u2xsEXVcRP5UVRtEZE5E7heRl0VkWkQeEpFvrP7/9yO3X3hWU52lSFPWrYnJadu3b1/Qcn19fanu\n",
+       "t63NfxB0bGws1X1UivPnz+edAvC+Dhw44MWs1zjyk9dnW15N+OuJKqKcc79Q1b8WkVdFZEVEXhOR\n",
+       "/0tEWkTkSVX9fRHpE5HPpZQnAABAoUR/aYxz7psi8s014VG5fFcKAACgojGxHAAAIAJFFAAAQITo\n",
+       "X+cBedm1a5cXa21t9WITExNebGVlpSQ5ZW3Hjh1erL293YvNzMx4Maup33ogYPv27VG5rae6Ouzt\n",
+       "ZmlpKWr7TU1NXmx6ejpqW0lt3rzZi1kPo1jnzJrcPDo66sWGh4cjsxOpqqryYtY3GuTVPHzdddd5\n",
+       "sU2b/H/zj4+PZ5EOCqS+vj5oOev935K0UZ07UQAAABEoogAAACJQRAEAAESgiAIAAIigVhNjSXeo\n",
+       "6rLeJwAAQAxVFeec2YHOnSgAAIAIFFEAAAARKKIAAAAiUEQBAABEyGVieeyE0La2Ni82NzfnxRYW\n",
+       "FrzY8vJyankkZTXWk0t4Lta05dtvv92LTU5OerETJ05E52JNTE7ykIQ1Idq6dkOPS3Nzc9B+rZ/D\n",
+       "Yh0/a+J76PXymc98Jmi5F154wYuNjIxc9edyvG5ramqCtre4uJhqLtY0f2sSeeh+revMupbXnjOR\n",
+       "4p+jLCTJxfpmAYv1rQRp55K20Fysb6zYsmWLF7Mm+luT/61vdbiW93XuRAEAAESgiAIAAIhAEQUA\n",
+       "ABCBIgoAACBCLo3la4U2uo6NjZU4E5H6+vqg5ZaWloJilcJq8Nu8eXPQuuPj46nmYjWxXnfddUHr\n",
+       "hjaWW9KetG81kSfR0dHhxb7yla8ErRu6XBZCX4OlZr0vWY31oazG7VtvvTVo3aNHj0bvd2JiInpd\n",
+       "y9TUVKrbK7obbrghaLlTp06VOBOR7u7uoOVCG8vL0cDAQN4pXIU7UQAAABEoogAAACJQRAEAAESg\n",
+       "iAIAAIhQiMbyJM2aabOaSa0J2ZXcRG6xmqrTbhgPZU1btibXbzRWY/ndd9+dQya2H/zgB17MaiIv\n",
+       "yrlM+0EClKcsGsZDvfjii3mnkDvr20fOnTsXtG4pXtPciQIAAIhAEQUAABCBIgoAACACRRQAAEAE\n",
+       "zbp5UlVz6da0fk5rCncWyMVGLrbQXKyHIj7ykY94MeuhiJdffjnVXEqtKHmIVE4uDQ0NXsx6iCOL\n",
+       "XNJGLrZyzKWmpiZoe9b7XGi9s3Y5VRXnnHlguBMFAAAQgSIKAAAgAkUUAABABIooAACACGXfWB46\n",
+       "8bgcG+iyUCm5NDY2ejFrsu38/HzJc0kbuRQ3D5HKyeVjH/uYF2tpafFi/f39XuzYsWOp5lJXV+fF\n",
+       "Ql+7lko5R2krx1ysb2awjI6OppYLjeUAAAApo4gCAACIQBEFAAAQgSIKAAAgQnUeO107WXllZcVb\n",
+       "5p577gna1tTUlBezmhxR2axGVMvCwoIXy/rhiixZ030XFxdzyKT0amtrvZh1vkO1t7cnSacwQq+B\n",
+       "bdu2ebGdO3d6MWsSdNrvudZDIRvN5s2bg5abmJjwYp2dnV7M+pwtR0kaxkuBO1EAAAARKKIAAAAi\n",
+       "UEQBAABEoIgCAACIkMvE8kpu5AUAAJWDieUAAAApo4gCAACIQBEFAAAQgSIKAAAgQi4Ty1Wv7s86\n",
+       "ePBg0HojIyNBy1nTeK111+aRFauxPjSX3/zN3wxa7sc//nHJc0lbklzWTsFfT+jU3ko5LmlLO5ev\n",
+       "fOUrQct961vfisojdJJ9W1ubF7Mmfff393uxJMfkIx/5SNByL730UtByobkkmYYdKslx+fjHPx60\n",
+       "3MmTJ73Y0NBQqrmkLUku3d3dXqy62v8Yt87b2NhYdC7WtwHceeedXmxgYMCLDQ4OerH5+fnoXEI1\n",
+       "Nzd7sZaWFi9m5XwtD79xJwoAACACRRQAAEAEiigAAIAIFFEAAAARcmksX8tq+v7Upz4VtO73vvc9\n",
+       "LzY+Pp44p6J65ZVX8k7hPb/9278dtNw//MM/lDiTjcdqpA9tmt+5c2fQcufOnbumnIrIamC1hDYy\n",
+       "W43lCGc1tCdpXt9ozp8/78WshyLS9sADD3ixP/uzPwta96tf/aoXe/rpp6Nz+dCHPuTFtm3b5sWs\n",
+       "5vCf/vSn0ftdD3eiAAAAIlBEAQAARKCIAgAAiEARBQAAEKEQjeXWhNlQldxEbhkdHc07hfdYE53z\n",
+       "EtpUjWJZO4k8bdY0Z8vMzExJ81hPKRpdK8ELL7yQdwrvufXWW4OWO3r0aIkzsVmTyIukvr4+1e01\n",
+       "NDR4sV/5lV8JWrcUD2ZxJwoAACACRRQAAEAEiigAAIAIFFEAAAARCtFYbvnud7+bdwr4AE8++WTe\n",
+       "KWxYSRrpK2ESedqeeuqpvFPI1PLyci77DZ1O3t7e7sVmZ2e92NzcXOKc8MGee+45L3bgwAEvpqpe\n",
+       "zJqynsTp06e9mDWd3Lo2StGEz50oAACACBRRAAAAESiiAAAAIlBEAQAARFCrIaukO1TNdoerrJ/T\n",
+       "aoLLArnYyMVGLvnkYTU3X7p0KZdcQmWRy44dO7zYwMBAqrncddddQcu98cYbXsxqQN9o5yhUklxq\n",
+       "a2u92L59+4LWPXHiRKq5pG1tLqoqzjkzGe5EAQAARKCIAgAAiEARBQAAEIEiCgAAIEIuE8tramqu\n",
+       "+rPVoGaZnp4uRTqFUFVV5cXymioMm3WdLiws5JCJyKZN/r9/QqeY19XVebH6+novNj4+HrQ9q/nT\n",
+       "2p7FagIO3UcsKzdrunFTU5MXsxrLK5nVRI5wHR0deaeQqZmZGS8WOqXeYr3uQx+GS/IeeS24EwUA\n",
+       "ABCBIgoAACACRRQAAEAEiigAAIAIuUwsz3qfAAAAMZhYDgAAkDKKKAAAgAgUUQAAABEoogAAACLk\n",
+       "MrG8sbHxqj+HTi1OwmpmT3MK8rUgF1toLnfffXfQ9k6ePBm0nDWZO+3j0tLSErTc5ORkyXOprvZf\n",
+       "9ktLS0HrhuZi7aO9vd2LXbx4MWi/sXlYrEnkltBvSEiSyx/8wR8ELfftb3+75LmEfnNE6JT+cnxv\n",
+       "sVivXWt7U1NTqeZiTdzu7u4O2kd/f3+quVjXxq5du7zYwMCAFwv9fC/69bIe7kQBAABEoIgCAACI\n",
+       "QBEFAAAQgSIKAAAgQi6N5WtZTaiW0OZXVLZTp04FLWc1jOfFahjPSxavo7T3EfoeUW7q6+vzTgEF\n",
+       "tbKy4sVCG8bTtmXLllz2Ww64EwUAABCBIgoAACDC+xZRqvodVR1S1X++Itahqs+o6glVfVpV2674\n",
+       "u0dU9aSqHlfVT5YycQAAgDx90J2o74rIA2tiXxeRZ5xzN4nIc6t/FlU9KCKfF5GDq+v8papypwsA\n",
+       "AFSk9+3WdM79D1Xdtyb8oIjct/rfj4tIr1wupD4jIk845xZFpE9VT4nIr4rIS2u3u3aCaaU2jVaS\n",
+       "bdu2BS03NDRU4kxExsbGgpazJv5arAbOUNYk389//vNB6z7xxBNerJIfnoidTi5ybROEP0joJPIs\n",
+       "hD4kUVVV5cWWl5dTzSV0EvlGU6SHQkLV1NR4scXFxejtjY6OBi03NzcXvY9yFXOnaJtz7t1PyiER\n",
+       "effTtVtErnx0oF9EdibIDQAAoLAS/brNXf7n4fv9EzG9fz4CAAAUSMzv0YZUdbtzblBVd4jIhdX4\n",
+       "ORHZfcVyu1ZjAAAAZaG3t1d6e3uDlo0pon4oIg+JyDdW///7V8T/VlX/d7n8a7wbReTliO0DAADk\n",
+       "oqenR3p6et7782OPPbbusu9bRKnqE3K5iXyLqp4Vkf8gIv9JRJ5U1d8XkT4R+ZyIiHPuDVV9UkTe\n",
+       "EJElEfk3LrAbNEkzrdVAl4WGhgYvZv24aTfaqWrQftN26dKlku8jbUkaxkMleShi//79XuzkyZNJ\n",
+       "0qlYaTdRF8VTTz2VdwqoQEmayC3W+9yhQ4e8mPVZbj08UaRvk0jqg57O+511/ur+dZb/jyLyH5Mm\n",
+       "BQAAUHTMcQIAAIhAEQUAABCBIgoAACACo8ID3HTTTV6so6PDiw0PD3ux0InEoe644w4v1tnZ6cWs\n",
+       "6cPPP/989H6ZZhyuubnZi+3du9eLtba2ejEaywGUA+t9znrQa35+3otVUmM5d6IAAAAiUEQBAABE\n",
+       "oIgCAACIQBEFAAAQIZfG8qampqv+bE31Dp1QnPZkVktjY2PQcmlPJ7dYTeRtbW1e7MKFC14sVF5T\n",
+       "0cvRzMxMqtvjOMdb+74iYr8/8JCEzWoKrq2t9WJJ3q8tVoOytb3Z2dno7SEZ67V1yy23eLEtW7Z4\n",
+       "sWPHjnmxo0ePppNYAXAnCgAAIAJFFAAAQASKKAAAgAgUUQAAABE060ZWVXU0zwIAgHKgquKc85+4\n",
+       "Eu5EAQAARKGIAgAAiEARBQAAEIEiCgAAIEIuE8utidilZjWzW3m0trYGbW98fLzkuYTauXNn0HLn\n",
+       "zp0reS5JWLls2uTX+VYsNGdrXWuCddGPS5Fyqaqq8mLt7e1B2xsZGUktD+uY3H333V6spaXFi126\n",
+       "dClov6+//np0LhbrGwgsoccp9PysrKwEbS9UfX29F7MmjFvHJfT9a+/evUHLvfjii16s6K8hK5fQ\n",
+       "b8pI8q0JVi4NDQ1ezJr839XVFZTLxMREdC5FOkfr4U4UAABABIooAACACBRRAAAAESiiAAAAIuTS\n",
+       "WF5kc3NzeadQSLfddpsX+/KXv+zFhoeHvdjDDz8cvV+rwW95eTlo3erqfC5vq6k6tHE5ieuvvz5o\n",
+       "udHR0aDlQnO2mpR//dd/PWjdgYGBoOVeeOGFoOXWevXVV4OWO3DgQNT212Nde0tLS6nuo6amJmi5\n",
+       "Xbt2ebEzZ86kmkuS11rodRbaWF4kVlN/qCQN40lYn4GhP0foNZm2T3ziE0HLHT58OPV9cycKAAAg\n",
+       "AkUUAABABIooAACACBRRAAAAEWgsX8OaXl101iTyvOTVWHjDDTd4sdraWi/25ptvljyXLJrIiy7t\n",
+       "JupSO378eC77jZ3YLnJtU5WLzJqQbT2c8c4773ixIr33WUIfgik66+cYHBzMIZPi4U4UAABABIoo\n",
+       "AACACBRRAAAAESiiAAAAImjWzYmqmks3pPVzqmoOmVROLvX19V7MakBcXFwseS7btm3zYlZj+dmz\n",
+       "Z0ueS9pCc7GOgWVoaKjkuezbty9oe2NjY14sZML2+Ph4UB5ZKPq10tHR4cWyePgh9LjU1dV5sZaW\n",
+       "Fi82Pz/vxSYnJ1PNJVToBG/r/bDo1wu5+LmoqjjnzGS4EwUAABCBIgoAACACRRQAAEAEiigAAIAI\n",
+       "TCxHtLm5ubxTeE+SZmmkr6+vz4tZU6itJvItW7Z4MaYjxyv6BH2rYdyKFUnoNzNUysRyrI87UQAA\n",
+       "ABEoogAAACJQRAEAAESgiAIAAIhQiMbyTZv8Ws6KLS0tZZEOUJbyaq6vrvbfRqzX6sLCQtD2hoeH\n",
+       "g2JrWc2+odPyk6xr2b59uxezJjKHnjOr2T60adn6ZoG0Hwppa2tLdXtpnw/L1q1bvdiFCxeC1g09\n",
+       "fp2dndeU0wdpbm72YtbrKvS1huS4EwUAABCBIgoAACACRRQAAEAEiigAAIAIajU7lnSHqi7rfQIA\n",
+       "AMRQVXHOqfV3ud6J6u3tzXP3WIPzURyci2LhfBQL56M4Nvq5oIjCezgfxcG5KBbOR7FwPopjo58L\n",
+       "eqIAAAAiUEQBAABEyKWxPNMdAgAAJLBeY3nmRRQAAEAl4Nd5AAAAESiiAAAAIlBEAQAARMiliFLV\n",
+       "B1T1uKqeVNWv5ZHDRqaqu1X1sKoeU9WjqvrvVuMdqvqMqp5Q1adVtS3vXDcKVa1S1Z+r6o9W/8y5\n",
+       "yImqtqnq36vqm6r6hqrew/nIj6o+svpe9c+q+reqWsf5yI6qfkdVh1T1n6+IrXv8V8/XydXP+E/m\n",
+       "k3V2Mi+iVLVKRP5PEXlARA6KyO+o6i1Z57HBLYrIV5xzh0TkIyLyR6vn4Osi8oxz7iYReW71z8jG\n",
+       "H4vIGyLy7pMenIv8/B8i8t+cc7eIyG0iclw4H7lQ1X0i8gcicqdz7ldEpEpEviCcjyx9Vy5/Xl/J\n",
+       "PAveniIAAAMgSURBVP6qelBEPi+XP9sfEJG/VNWK/o1XHj/cr4rIKedcn3NuUUT+i4h8Joc8Nizn\n",
+       "3KBz7vXV/54SkTdFZKeIPCgij68u9riIfDafDDcWVd0lIp8Skf9bRN59jJZzkQNVbRWRe51z3xER\n",
+       "cc4tOefGhfORlwm5/I++RlWtFpFGETkvnI/MOOf+h4hcWhNe7/h/RkSecM4tOuf6ROSUXP7Mr1h5\n",
+       "FFE7ReTsFX/uX40hB6v/0rtDRH4qItucc0OrfzUkIttySmuj+ZaI/HsRWbkixrnIx3UiclFVv6uq\n",
+       "r6nqt1W1STgfuXDOjYrIfxaRM3K5eBpzzj0jnI+8rXf8u+XyZ/q7Kv7zPY8iisFUBaGqzSLyX0Xk\n",
+       "j51zk1f+nbs8QIxzVWKq+mkRueCc+7n8y12oq3AuMlUtIneKyF865+4UkWlZ86sizkd2VPV6Efmy\n",
+       "iOyTyx/Qzar6u1cuw/nIV8Dxr+hzk0cRdU5Edl/x591ydeWKDKhqjVwuoP7GOff91fCQqm5f/fsd\n",
+       "InIhr/w2kI+JyIOqelpEnhCRX1fVvxHORV76RaTfOffK6p//Xi4XVYOcj1zcLSIvOudGnHNLIvI9\n",
+       "EfmocD7ytt7709rP912rsYqVRxH1qojcqKr7VLVWLjeh/TCHPDYsVVUR+SsRecM59+dX/NUPReSh\n",
+       "1f9+SES+v3ZdpMs59yfOud3OuevkcsPsf3fOfVE4F7lwzg2KyFlVvWk1dL+IHBORHwnnIw/HReQj\n",
+       "qtqw+r51v1x+AIPzka/13p9+KCJfUNVaVb1ORG4UkZdzyC8zuXzti6r+TyLy53L5SYu/cs79b5kn\n",
+       "sYGp6sdF5CcickT+5VbrI3L5Yn9SRPaISJ+IfM45N5ZHjhuRqt4nIl91zj2oqh3CuciFqn5ILjf5\n",
+       "14rIL0Xk9+TyexXnIweq+rBc/qBeEZHXRORfi0iLcD4yoapPiMh9IrJFLvc//QcR+YGsc/xV9U9E\n",
+       "5F+JyJJcbhX5cQ5pZ4bvzgMAAIhQ0fMbAAAASoUiCgAAIAJFFAAAQASKKAAAgAgUUQAAABEoogAA\n",
+       "ACJQRAEAAET4/wEtHY5P7rJ1tgAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7ffb01bbe590>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "feat = net.blobs['pool5'].data[0]\n",
+    "vis_square(feat, padval=1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The first fully connected layer, `fc6` (rectified)\n",
+    "\n",
+    "We show the output values and the histogram of the positive values"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 36,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlgAAAJPCAYAAACgtar/AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xv8LEdd5//3OzdCEpIQAyeBBBKQICCQgITI9RAIBJQQ\n",
+       "RIEoGFkWXUVAVCTghYOiXBQVxXVXIRhZhPUHSww3ySHyVXA1LEsCIVwi/kBByQkriCDLLpjaP6Yn\n",
+       "ZzKne6YvVd3V3a/n43EeZ74zPVXV1bfPVFVXO4QgAAAAxHPQ0AUAAACYGgIsAACAyAiwAAAAIiPA\n",
+       "AgAAiIwACwAAIDICLAAAgMhqBVi2j7X9Ztsft/0x2/e3fZztvbavs3257WNTFxYAAGAM6rZgvUrS\n",
+       "O0MId5N0L0mfkHSRpL0hhNMkXVH8DQAAMHveNtGo7WMkXRVCuNPa+5+Q9NAQwj7bJ0jaCSF8W7qi\n",
+       "AgAAjEOdFqxTJX3B9utsf8j279s+UtKuEMK+Ypl9knYlKyUAAMCI1AmwDpF0H0n/MYRwH0n/qrXu\n",
+       "wLBoBuOZOwAAAFoET9t8TtLnQgj/o/j7zZJeIOl62yeEEK63faKkG9a/aJugCwAAjEYIwTHS2Rpg\n",
+       "FQHUZ22fFkK4TtIjJF1b/LtQ0suL/y9NWdAxsb0nhLBn6HL0jfWeF9Z7XljveZnxekdrGKrTgiVJ\n",
+       "z5L0BtuHSfpbSU+TdLCkP7b9dEmfkfTEWIUCAAAYs1oBVgjhw5LuV/LRI+IWBwAAYPyYyT2NnaEL\n",
+       "MJCdoQswkJ2hCzCQnaELMJCdoQswkJ2hCzCQnaELMJCdoQswdlvnweqUuB3mOAYLAACMT8y4hRYs\n",
+       "AACAyAiwAAAAIiPAAgAAiIwACwAAIDICLAAAgMgIsAAAACIjwAIAAIiMAAsAACAyAiwAgGztGroM\n",
+       "wJQQYAHADNiyrTMqPrutpOt7LhIwaQRYADAPZ0n6UMVnt+izIMAcEGABwDwcNnQBgDkhwAIAAIiM\n",
+       "AAsAACAyAiwAAIDICLAAAAAiI8ACAACIjAALAAAgMgIsAACAyAiwAAAAIiPAAgAAiIwACwAAIDIC\n",
+       "LAAAgMgIsAAAACIjwAIAAIiMAAsAACAyAiwAs2XrbFs/PnQ5AEwPARaAOXu5pN8euhAApocACwAA\n",
+       "IDICLABzFoYuAIBpIsACAACI7JA6C9n+jKR/kfRvkr4RQjjT9nGS/qukO0r6jKQnhhD+OVE5ASAF\n",
+       "WrAWqAcgsrotWEHS7hDCGSGEM4v3LpK0N4RwmqQrir8BAOPjoQsATE2TLsL1A/A8SZcUry+RdH6U\n",
+       "EgGozZZtvX/ocowYLTcAkmjSgvUe2x+0/YzivV0hhH3F632SdkUvHYA6Hjh0AQAAN1drDJakB4YQ\n",
+       "Pm/7NpL22v7E6ochhGCbX4IAAACqGWCFED5f/P8F22+VdKakfbZPCCFcb/tESTeUfdf2npU/d0II\n",
+       "O92KDADR8MMQmDHbuyXtTpJ2CJvPL7aPkHRwCOErto+UdLmkF0t6hKR/CiG83PZFko4NIVy09t0Q\n",
+       "QmDwJJCILUu6MQQGKbdh668knbVaf7aeIemVIejo4UoWn62HStop21dsnSzp79mPMHcx45Y6LVi7\n",
+       "JL3V9nL5N4QQLrf9QUl/bPvpKqZpiFEgAOhR2S/MB0i6Vd8FGRgteUBkWwOsEMKnJZ1e8v4XtWjF\n",
+       "AoCxIrBYoOUKiIyZ3IFxs3RTVyEAIBMEWAAAAJERYAEAAERGgAVgzhiDBSAJAiwAAIDICLAAzBkt\n",
+       "WAvZ14Ots4cuA9AEARaAOcs+sMBNd8leMXQ5gCYIsIBx89r/mDhbt7T13NjJRk4PmD0CrImxFWyd\n",
+       "MnQ5ACTzIEm/PnQhAGxGgDVNpw5dAADJTLJbs/hxOLdHFGHCCLAAzFlZsJJ7d9kkA6zCcUMXAIiF\n",
+       "AGuacr9AAGhvygEWMBkEWADmrCxYyT2ASVG+3NeZZ25idAiwgHHjgtNN7oFFmTGWGZgdAixgGgi0\n",
+       "5oMACxgBAiwAAAE6EBkBFgCMy41DFwDAdgRY08SvUaCeMXa3jbHMwOwQYGXA1h1s3TlikofZOjpi\n",
+       "egDyMccAix+NGB0CrDxcKelTEdP7PUlfjpgeMFVjDFbmOE3DEoEWRoMAKw9HRE7vpMjpIV9ccLoZ\n",
+       "S2CxaoxlBmaHACtDtv7A1h8MXQ6MCoEWAGSEACtPFxb/AGBdihYsAnQgMgIsALi53IMNugiBESDA\n",
+       "ykPuJ3RgqsYYrIyxzF1xjsToEGABmLMxPux5zhONEmhhNAiw8jDGkzyAYXBuAEaAACtf/FJDHewn\n",
+       "3RCsLFAPQGQEWMA0NAq0bD3E1s+kKgySIhgCRoAAC5inn5f08qELgVYIsIAR6DXAsvV9tn6xzzxH\n",
+       "oqz1gZNoj2wdbGvX0OVAFnLvdmUeLGAE+m7B+jktfjljpGy9zNZbhy5HAj8q6fqhC4HejfGHzBjL\n",
+       "3JXX/geyRxdhHsZ0wvwBSecPXYgEaL2apzHewZt7+ZKy9SxbZw5dDmAbAixg3Nr+oqclAGP1W5Je\n",
+       "NHQhgG1qBVi2D7Z9le23FX8fZ3uv7etsX2772Jr5zfqXF4DsjPGclKLMY6wHIGt1W7CeI+lj2n8Q\n",
+       "XiRpbwjhNElXFH9jHjgR56lpixTbcbzmPJM7MBpbAyzbJ0l6jKTXaP9J/DxJlxSvL9E0x+T0ie4a\n",
+       "IB+5H48Ex8AI1GnB+g1Jz9PNfzXtCiHsK17vEwOEgbHJPYhAtTkGWOyvGJ1DNn1o+7sl3RBCuMr2\n",
+       "7rJlQgjBduUBb3vP/r/efpT0Xa0KOnFjvJMJmAKOs4WxBDBjKSdGoohtdqdIe2OAJekBks6z/RhJ\n",
+       "h0s62vbrJe2zfUII4XrbJ0q6oSqBEMKe5WubrsQGOJEA6Y3xxw3nBiCSEMKOpJ3l37aj3aG6sYsw\n",
+       "hPDCEMLJIYRTJT1Z0p+FEJ4q6TJJFxaLXSjp0pr55X7iwnZsw7y0vdiyHQEgoabzYC1Pyi+TdI7t\n",
+       "6ySdXfwNYDi0arRDoLlAPQCRbesivEkI4c8l/Xnx+ouSHpGqUDPEswjRNwIyAEiImdzRma1g615D\n",
+       "lwPzZOtwO+oPktyDz9n8+LL1cFuPVv7bZNSKc/h9hi7H1BBg6aad68gBizCFE+bJQxcAs3WLoQuA\n",
+       "ZN4u6Z0rfxNopXOXoQswNX0HWDkHEkcMXYA5sPWDtq4duhx9smVbxw1dDpTiLsJ0aQKzRgsWmup6\n",
+       "8XmUpLvHKEhXtr5g6049ZPVdkv6ph3yayD2I6Av1ACAJAizM2fGS7tFDPimfdNC25YEWC2ALW8fb\n",
+       "us3Q5egJPzYiq30XIZIa9GJn65AQ9M0hy4DOCJjaGeNFJUWZc62Hoffr/6nF8JG5BFmIiBasAdm6\n",
+       "1NZJQ5dD0jdsfUdPeQ19wgS2YR/F0u20aOkGGiPAGtbjJD1YeQy0PbHn/IAc5Npyg4Xl9hkq6CXY\n",
+       "RmuzvYvQ1j1t/cPQ5Vhl66P2TRO4TvXAvmkfsHU/W6cMUQhb9xsi3zEqpjG51dDlwE2mem6oY87r\n",
+       "jpGZcwvWmVo0/+bkHpIePnQhtugaJFuSbN1e0gdU/zmWsb1noHzH6uihC9CjbH4IAj1iv49szgEW\n",
+       "hvW54n8O6m74Rd/NaPc/O+q2Zz8CIiPAygPPIhw5W0fYOnPocgDYz9bRNjP9YxhzDrBqBzDFQfqA\n",
+       "lIXB6D1P0pWSZOtgWy/pOX9aINrhh8zCVOvhy5Je0+H7HFdobbaD3Bv6eUl/mTD9JPVi60hb902R\n",
+       "Ng5w2MrrYyX97FAFQWdcVPPTZZt8a7RSAA3MuQVrI1sH2Tq7+HOsE7L+rKQPRk5zrEFyn7hAj0en\n",
+       "/dnWGfbNgus+zHn/mvO6p8a5PTICrGoPkXRF8XqsB3WKsQdjrQsghQ9J+g8D5c2xOEHFw+EPHroc\n",
+       "6G7OAda2aJ0dPCJb59j6b0OXIxdF9+1/HrocKNX0lzyDqBHTHkn/Z+hCoLs5B1g5GdMv0bbNyE+W\n",
+       "9Hjlt65DNYt/u6QfjpDOTfVp69Yr3dq1vzdzdItgkyGOk/sowg98W3ewdVGE8qAlAizMXe0TaNF0\n",
+       "n/NDX39O+7u1gdkqfmxcO3Q5BvY0SS8duhBzNuhdhLaeauucnsuQo87PIrR1vK1dkcqT0phbDL5H\n",
+       "0g01lhviVy8tUu2U7Y/UZX6abpM7Sbp7ioJM2JjPzVkaugXrDyX93sBlqGMMF8wPSPpkioKsifKo\n",
+       "nJG67dAFaKNoeTtp7W1OpuM15mNoMmw929bpQ5cD+Ro6wBrS1C4wJ0o6ZuhCjFCK/WCoC2BVvt8n\n",
+       "6bN9FmRExnweSL6fFdPVPDZ1Pg3kFFy+StILhi4E8jXnAAuYi1uXvJfThWrsxhykbXMfSZcNXYgB\n",
+       "cZygNQKs/c5a+7vPA4tnEQ5nSidQ9pk4qMf9uEYALfGonP1m8yvN1q/Y+nctv35i1MKgqykFiEOI\n",
+       "cU5iG2AKcr4+jxK/Tqqt7mypT6B979gvUPuxA7ds+b1cL0JzGINVprd9ztZhtvb0lR/Ss3Vq31n2\n",
+       "nB/Q2ZwDLKJ1TEnTC1CfF6y7SHpRj/k1EWOahqHOJYMEHbZuIen/7yu7nvLJjq1Hd/juD+nAYS/o\n",
+       "2ZwDrG1me2AnsrwI5VavuZUH2Gbofbb1dcPWBXangHTode/TOzt893WSzo1VELSTQ4A1pwOmCnWA\n",
+       "GEbdKmvrm7aO7znbUdfZCN2r4fJV24fthuzlEGCNAQFQPHM4MY5hDFaaAlgn2QdcROuW62BJt49c\n",
+       "pNpsHVG0rsxhHx27wff1CWK/jyyHuwhz3ai5liuWzg8TnYixb+fcLjR/IunDQxeipbYT9f6q3f3u\n",
+       "Wlvn2tltTwAtzbkFa9uFta8TXdUv5tQX/r7vAlriAjI/OQexscp2Qt0Fbf2WrW8r+ehdkk6LVJ6p\n",
+       "4HwhydaDCb7HJ4cWrLlbP2hc8T7SmFI953B8/d+S96ZUxzE8S9KTIqRzQL3aOrPLIPKML+K5lqsv\n",
+       "fyHpzKELgWY2Bli2D7d9pe2rbX/M9kuL94+zvdf2dbYvt31szfzmfpCkRN3mY6iHg+ewD5QFWDlr\n",
+       "NE2DrWDrkITlqWPTdr5ngjTbJWj9st05kMzhR0MuWg3rsHV6xoHzpG0MsEIIX5f0sBDC6Vrc/fEw\n",
+       "2w+SdJGkvSGE0yRdUfw9Zal3ztX0p35CyW39citP72wdbuvpkZL7RqR0+tJm+899/GLd8+ELteHa\n",
+       "UMynlSrvMatax7brfpWk+9VYbvbnwti2dhGGEL5WvDxMixPLlySdJ+mS4v1LJJ1fMz82YH1N62os\n",
+       "dTurE6St8yW9ZsCy1PFQxSvj2AIsDMDWmZK+PnQ5ZuTQoQswR1sDLNsH2b5a0j5J7w0hXCtpVwhh\n",
+       "X7HIPkm7EpYxlVwCkq3lsPV8W3/SR2EQ3WOHLkDPcjmuupjCOqQUo37q3nUZ7QeZre+OlVZi7H8T\n",
+       "sXUsQQjhRkmn2z5G0rttP2zt82CbmXnbs7Y/9/Apkr59SzqHRysRxmTux8+cjXnb91p2WwdLelvf\n",
+       "+UY25rLPUu3BmiGEL9t+h6T7Stpn+4QQwvW2T5R0Q9X3bO/Z/9dlR8/vBz3WzO0kMYb1jVnGMazv\n",
+       "qlitBWNb77qGXq8cHrFFi9KE2d4taXeKtDcGWLaPl/TNEMI/276lpHMkvVjSZZIulPTy4v9Lq9II\n",
+       "IezZn17ps5HGsPP2eXCPoT7aGPpE3ac5resU5b79UpZvvUU9V2MoY1uxB7nXNeU6rRRC2JG0s/zb\n",
+       "drQH028bg3WipD8rxmBdKeltIYQrJL1M0jm2r5N0dvE32jlE5QfOobbqTn8xJnM7iPta36Yn31R3\n",
+       "ro5t+8Yq72jW29atbb3U1vuHLksLbecJ7BKcjGLb2rrS1qOHLgf229iCFUK4RtJ9St7/oqRHtMgv\n",
+       "px01l7L8oaSvVHz2QUnf2mNZ6ghqd7LKpb7Rv15ahGw5hMHuvs291WvVz+jA6RP6Ln9VfofYOiYE\n",
+       "fXnLcn3quwxV++S2cpwp6TFaPBEAGcjhUTk5HEA5Wa2POw5WinRy2d59lSOX9V03pYA31zpGc78o\n",
+       "6Z8TpJt8f7d1sK0jU+eD8eBROXkYU71wMcPUjeV4LDsWj6jxva7rl/IcsP6M1By2Rd0yvFjSVxOW\n",
+       "g3PvyPQdYJXtIDkcQNswyL273E4OKeu5z5n5XfEaDdg6RtIDOiSRy3H7W0MXIJGu+3Yfx8Zdesgj\n",
+       "pVz24clIHmDZepOts1Lng95wEDaTa9BDuRaW+/MeSW/ukE6f5R5628U4B3Rdh1ZlaPlMvlzOeUNv\n",
+       "dzTURwvWkyT9VfG60YNVE8vloJHyOHBS10dO9T1Vc6/jLsfR6g0/ORyPQ8lt3YeasqBNXrnVHQaW\n",
+       "wyD3XA11sMwt36FMbX2nsj5tgsTJ34IfWc5jsFLlPYXjYwrrMCs5BFhjOMGxY3c3pzocQ5A8huOu\n",
+       "D7XqwdajbN2jazqR9TEDf2UeLbvbxqjutk095Uc29W3rj2wez7YNdxFW67OsoeI10pl7PWdzsl6T\n",
+       "a7n+VNLrN3yea7lzVTeIiH2cpp4Ffy4ukHTy0IXIXQ53EQ7lgAPX1n+29bQhCjMibU94cw9oUunz\n",
+       "jsW2cjru69hWj2NbnylIHXht0vdxNdRxnOv5Y7Ry6CLM6WT1w5J+rHidU7n6EOXgsvVIW0fHSGtD\n",
+       "HmfZOidScpxU8lL2w+cWNb87l2N2kDvwerJetljdZV3qbOt3bT1IW56MEsFc9u/JoIswDznMZRTr\n",
+       "pP1uST/RMa1t3ibp8o5ptH2mWZO0+1YrX1u3lPSgxGVZ1fW4/7qtB0cpyYFyurM5C7ZuaW+8NqQ8\n",
+       "dsbqfZIenzgPHvY8Mjm0YPW2UW2dt+XEUfnV6IXJlK2TbH1hwyJtn5PFSTmtJvX6Y5IO7Zyh9Tu2\n",
+       "frfOol3zknS7xOnXkSwfW2d0/P4P2XpspOJ8TdJPRUqrStuWqSEmHO078Mj6BxrqS92kuW7oCPlP\n",
+       "JN1L0jU1lp1rP/jdJR0/cBm2iVFHfc3knqNYx/2yO/1HI6U3hL6nhajyoRbfWS3H6yRdH6EcyzRz\n",
+       "ecj8HB/GncLc138QOQxyz3Wi0bk+HidV0B3W/s9Fn4+yQX5y2x/7kMM6Vx0XOZRtXS6D3OkiHJkc\n",
+       "ugiRh+XBdXDifAg4ZsDWO2z9et/ZRkpnjheaJnWXwyzqfW6jKZyz5rhPD663AKuYlC67jZzhZHlD\n",
+       "97+3bcFa3ba51WmZvsaE5dBCtl6GPo7Dx0j6vh7y6dMY9uvYNq1znzeIrL8/5Rndc+3VQUM5tGCN\n",
+       "YaOm3uFzOujbtmClHpxarxBWsHWnPvLaVIyB818XpTxF3R4RI61N2fT0nS6G3r5D55+lLTOLT+Fx\n",
+       "SkmnmuiYPkrkEGDlaq6D3Le1YA1dvjp21VimrzFhUztp3bLBsmNY9zFP09C2nDkcw3W7/pqU9X/b\n",
+       "2d+g0wVB0sjkMA9Wrs2hc91RYwxyL6u7mCf13INfS713P09lf81lQDGq5fyw5yY/AOqawj5ZJ02O\n",
+       "hcj6DLCsPC4C62WIcWtzV5127EgX8r4Guccw1IlgDHPo1NFXmeYyf9CU5DrYfV3TfavLmMsx7Vc5\n",
+       "nm9mK4cuwqF3iNsOnP+6oQe5t90nxtYi2Lo8xVike9dMO7f17tMYLmZNzj9znCz33w+cf6xH5YxJ\n",
+       "inVuczMMOsqhi3AMpnwwNzXUHC25yWUixhjGfhNH7PSjn6dsHZTRHct11m9Z1vMSlaFpXTwzSSnq\n",
+       "CZJk6yhbLxiwHHWkDsLQQA4tWF1aEo63dauGX8sxyFutg8HKZ+sZki6OkVTLz/rUx0zufaxrLvVZ\n",
+       "pk0dpwhu7mLrTbHTbegfJb06Ulp9To9wY0/5b/OSAfJc92BJv7L23hN7zH8Kd0LOSt9jsGJv5H1a\n",
+       "PPj3wMwWJ9VNz1ub0g4X64T3/QnzGqS+bZ1cPNwY1VJtm+W+ckD6to6w9QuJ8l33GElParB8igBi\n",
+       "l6QzI6eZMtDp9ckLK6176+sU+1E5YxhHeVN+tr6w8vzc1N18U7omZiGHFqwuDpJ0csVn10l6To9l\n",
+       "iSXXuyrHls/S30t65YbPs2kBsvWLth6eIOmYE422ee5gWR3fT9KLay7bJZ9tabYZg5WrE3rOr059\n",
+       "nD5AnrENud2Pj5R/7vvuJOUQYKW84G7qPiRav7m6B2BVvW2byT1mt1nTbfctJe+lPOG0TfvnJf10\n",
+       "hzzbDo5tUt5farDsGI6xXB72nKuyLsIY6tZh3fmypmSocVRTrtNB9N1FuP8P68495l1WjkFvR7b1\n",
+       "/pU/x7Rj51DWKcypVYs9imkzqrQZi5f19thgKkFXqi66tvl3Xa7t8qty2SensA6zMmQL1qeK/6dy\n",
+       "YmrqgRXvj/UX9bYy5HaAZz/I3dZxkr7Z4Ctt1ym3bdNUlxa6sa97G03WOafJoWMYQ9m73Kk9x/05\n",
+       "Wzl0EQ4lpx1xDAf90pjKOnZH1lim0fawFSTdvV1xokox3cdTO3x3VZuy9Xk+6eMu1WXam7oIU+Q/\n",
+       "xIPJt8mhDHX9eMX7nLcH0HeAVXY315h23s5s2dZtNi3SW2FuLuZ22DQG68APrFvb2ps4/zbLpMx/\n",
+       "yHxPa/m98kzjdmO+s8N3fytaKTab4sWq7y7b9fxiB40/GCkdqf/tPcX9a5b6HoP1HT3mt81Qdw+d\n",
+       "K+mGiOnF0nWQexd3l/SIBOmiH3+28rprl9KJHcoRawbsMcxAn1ouj6NqG/g9u2Y6YxAj+JxVQ0Yu\n",
+       "cugizP3XfmwpnvbeZJxOKjkdwEOXZWytkF3r60Err8d2IetzX6nT0tdH/TVZ51wmGo2V9xjmwaqS\n",
+       "ut5zWc/JyCHAkiTZutjW5T1kleNOFCpej0ndQe5lyzVd5xjPPexjlvE+L0SbpmkYWup9uq9f9l3H\n",
+       "YJ1h67AG+eVgsEk2t8h1X48hRZ0nqS9bR6RIdyq2Bli2T7b9XtvX2v6o7WcX7x9ne6/t62xfbvvY\n",
+       "jmV5nKRzOqZRR+Xs0gMa08miyXPMpqbNLeQ51MW2gcNdy3jQltnyN42/O9jWIzvmv03sbdAlvW2t\n",
+       "WEPvL02maehjvqbYM7m3MfS1Yuh9osqjJP3r0IXIWZ0WrG9Iem4I4R6SzpL0TNt3k3SRpL0hhNMk\n",
+       "XVH8vcnY5r2ZW9dlX60MMfLZlsacg8Ayrdd1wyNM1v2n4v+m2/dsSe9u+J2YcriAt83rtfbGpxTE\n",
+       "cLP6sfVLkv4mYX45Hpe5DHLPrW5OGroAudsaYIUQrg8hXF28/qqkj0u6vRZPWb+kWOwSSeenKmQi\n",
+       "U3pERgxDrmOuQXZbQ1xsu4hR/3co/m9angPOQXbl46+2qcr7jIbpDLk/Nqm/J0r64RZ5NPkBsr7s\n",
+       "2UozjrRpUNF2G7U5Xg5vmVdsY26Fm6VGY7Bsn6LFyepKSbtCCPuKj/Zp8TDTNnJtKWKHbGbbo3Lq\n",
+       "fDY11nz2o0MipnV0xLQk6QcaLt+mBWGq23mq6zU4Wy+wa91ZH+OcySSlA6gdYNk+StJbJD0nhPCV\n",
+       "1c9CCEFsnFjGWo9jC1j7mMn9Zq/t3uZpairGCXxrgGXrjrYOsqPfXJNDF06sMuTyg3N5fKSaaLTt\n",
+       "d9tOWXDA8rb+2m40X1bsbfMrkn6q5P3czpWSJFuH2Eyn00StE53tQ7UIrl4fQri0eHuf7ROKz09U\n",
+       "5dxOe4p/t/h5aadTYauKV2uhA8eSxLgTrY0Yd9FNzZDr32fez+oxr1V9zI59aI20z5T0dkl/lSD/\n",
+       "tuZ+7G0z9LMIU+Z/f0mPTZh+W7mOwfouKeqE0Fmwvdv2nuW/mGnXuYvQkl4r6WMhhN9c+egySRcW\n",
+       "ry+UdOn6dxf2FP/+z0uk3WUL9HUAx54pOKYc7jqrux26jn3oY5B7k5ncU9T3ENuwS7dsnTvFtqWx\n",
+       "bMHa1rrzUC0CrVpsPaTusk0VY71ymvxYKm9peYGtL9VdvoauzyKcq1wmX20jSJKtN9p6fMs0spnW\n",
+       "KaYQwk4IYc/yX8y061TYAyU9RdLDbF9V/DtX0ssknWP7Oi0GPr6sZRm6XuiazpvS5kIf84KZ6wmr\n",
+       "zRQEbZaJUZeHbl9kWorm+cOK14+1ddu1RWKOgWojVf5/3mBZS5Kt41darDf5r6p+6Hpl+n2y9Zta\n",
+       "dCV1nQZnVZOpF/6iKEfsdb9VxfuxpxBZT2eIbs1YYpT9yYr7GCFsUOcuwveHEA4KIZweQjij+Pen\n",
+       "IYQvhhAeEUI4LYTwyBDCP/dR4BKpdvq+nsE1FV3qq+l3d0mSrb+0S28VTrXtHlNzueU2vm/EvP+b\n",
+       "pI8Vry+T9Py1z5/ZIe0Y+2TZ/E5DzTn3BdW7q7ntHWopj+H1tC/cskzqur1yw2dd6uEeFWn1PT8W\n",
+       "9qNOI+v7WYSbpJ69tukBnHtXXWybHkC9qqp8Q2zfB6g8iEl1x8x3N1z+r3Tgg5Xbup+kO6/8fQdb\n",
+       "p2r/um66867PfWrTswhjtsKUpb/qhMjpDSWnsaJ9yKnu1/V1HFW14pV1H/+MrSenLxLayKFPddMj\n",
+       "VJp8f5tcxmCVlbequbxPr42YVtO7rKY2yH11Xes8f66N75X0qZrLbhs4HKMO6ty88XuNEvQBrRxN\n",
+       "dKn3Ic8RQ5+fquRWrmTlsXV6xZ2ufdVBk3xeLumlNZajdWoAOQRYXdW9cLcKsGzdtXGJtiTZ8fOh\n",
+       "jeFAbTKRom09WtLPNc3E1mkJxqY0cZCkuyXOo+5xU7Zc67opun4/2vb7ShPY5nhspnqmZtcfvm2l\n",
+       "eKzTWWp2/r9K9YcD1GLrmAjnijGPH5ulHLoIU47TWM3zmJb5fVQNdk5bj7N1z7rLIws/0/J7n5T0\n",
+       "QlvPXnu/z5NZjO6BxuW19Z4Gi2/qNlymd/ra521uZOij5bBvfd9lXffv2Pn1lW9dtyh5r8u2+GeV\n",
+       "j6erI5fgKJdyjMYUWrC27vS2ztL+ebqa7iRNT9SXav9z2cqMoQVok7YDgzd9HrtO+jwRvETSq/rK\n",
+       "rGhtu9lba//35eEV74eS13XKFnturJRdhClbEsZ0ERuyRSX5edTWUetvdUzy9hX53HSHcGGoa0TT\n",
+       "fMd+LUtuCgFWnS7C1QHcTQ+Svn65LZXttJ13ZDva40fqlGXTs7tyuYCEtf9jSrmOf9TiOznO35O6\n",
+       "C2rULVhUtAEhAAAgAElEQVS2jtj0ccXrKFlX/B07n1TTMVRpk35fPRFvlnRdjeW61FG0+Qdt/fvi\n",
+       "71zO5dnKIcDq65ElY1a5HrZesfXLi8kavxypLHXuIvyJkrFrfW6LOf2yWr0A1u12STnIvc1YrfUy\n",
+       "bFqXTWmuahpgrY5la3unbAy2da6kf91SlhoJ6WRbv9ry67kfQ223xdBBSpn7Srrjyt91j+MmYu67\n",
+       "663oqJDTGKy2cj8RrIt92/UzaizT9kHcXRw3QJ5Ldeowl7tKu2oyPnDZdV3aelBxQ0fqekp1/DYN\n",
+       "sFa7PFPuE3XSPrlBepvq73sk/XSDtFblMgZrbOf3FLbV/Sn24BMNo0QOLVgpbRpcO9RM7nO6oB9i\n",
+       "67K1t+dwwkzZhVOVV50Hrv/I2nfWbZqnqu561B2Lt95FuD52q81+EmuQex/jpPbYpT9CmgYYbcoS\n",
+       "69zXR4tKb4Pebf2Era+nSj+x7xu6ADhQ9gGWrVvYrScoHHswszSGoKSsjMdo/8NUN425ib1+c33W\n",
+       "Wt0bEHIYd5bqwr00hu2aeoqNLsZQf03U2afur/K7B/vWpot6WwtWk6k56praPhJd9gGWpIulyged\n",
+       "tpEk6LL1AFufq7Fom7vthpw/ZV3TiV1TlKFp3mX6GvvX+GRp609tXZyoPGWazBu27fOm2znlzQax\n",
+       "xdiHt7Vu3vSeratrpHekHWXOpjbTNLzU1vNa5le1vevuB233l87b0Natu6axLYu1vx9X8X6XNNGD\n",
+       "HMZgbTtQ7rzl8036PGk/UBW34TZU5+I8pOWdJIfbevXK+xzA5drUy6O0/6S6Lb0YY6Q2pdEl/eM3\n",
+       "fBZz2o6p7nv3Vr0fXO/YlIjdaQzmprp9lqQXRk435225LNsXe863zZ3DKeS8bbKUQwtW1wvEzb5n\n",
+       "6472xu6oNvkMtWPluEMvy3QXbX7AcONuuorHU7QRo0WmizpjsFLM/ZVDQL5ahsMql0pT/30dL7Hy\n",
+       "SdFaXeZva+S7Lf++xhLWHYNWVZ6fsDc+eLxOGVJ5SeL0kZlBAqzEjxe5XVmWJe/lcDGqa0xl7eLf\n",
+       "hi5AAqnHl60OEu86EL1LWbuOqZrLPj7Ej6YjW3wnl+3RtL5+UtJtUxREw9fJ0FNMrKdxUYQ0Jy2n\n",
+       "Wzvb7gB9zBA+5gNLilv+thfnPutw0/imixSnK7du/qnXO0bwUqeMXR68vKrNXbxtlu+iTStOm2P0\n",
+       "tyXdJ0I6Kay3KPVVriGPlxR3bLYx1Dxsuaz/ZPQZYFV1m3Sd0bnOSW99EsOYecRW9qiRTcay03c5\n",
+       "scX0XC1+4f5LT/nlsH1iTDuwPt1GmzQ2LR/j5J77+MV1Z9RYZgzr0Ubbrsi2AfqYjLn7HCuGGoNV\n",
+       "Z4xK/cSs+1b0u3dKNnJ6ddONWjc9ajuvTQpNAlNL2h05/9rruGGCQOvmE05uuyClqtcnRk4v6iNS\n",
+       "ivGWp3ZJoyzZLZ/fzz6gFbTt3bXbPk8RSLS5ZT/2/tX3ua3rfnaoph3U1TH39W8sh0HuSzdtPFth\n",
+       "7eGX2773QUnnbUpzRZtfQEMHOl27QYco/5B1NvT2auI5Fe/fWtJf9lmQCr/ecPkH11yuzUDrKifW\n",
+       "SDOmd0l6Q9WHtu7QIe1Hav/6VGbRIf26+u4arJLLRf0vNHxdDJ0/GsqhBavqvbotUsvvlQVkQw8K\n",
+       "jJVukgPL1rG2Pr7yVorJ6Op8f8iT6NCTbm6aymBVVVd6mzFYVX/HmOrhVg2Xr5oHa/CufFvfsyH9\n",
+       "Tdvh72yd1DLbl0h6ccvvdtH3xbvu9o59V2Pb753V8nsx9XI9s/VMWw/rkBcKQ82D1Xpgrq3D4xSn\n",
+       "kZgX4SYHSerA446Svm3l75gHcJOLeOo77XL5FZzSAfVqb20JqfxuQnUCxMEmkVzzFkmyW3Uhr0+N\n",
+       "sJR7K0Rf0zSkzOfeidLt6zwy1I++Zb6vlvSylmlgRQ5dhNtmcl7fqP/bvtnYlKZdZG3GCg01yD1W\n",
+       "vr8aKZ0upnIRr/P9lOta65e/reMl/WPFMjkMot0UBLvk8zZptrWezntbfKfuEIem6faVZoxWzaG8\n",
+       "K1G6jerC1hNsXZA6H+Qrhy7Cqr7+TSeBoyvSGoM2d0t1PclWjQn5+Yj5jG07dC1vrEH95yQqx6Zn\n",
+       "qt3YIP3YcwqtX7i3/cBa/16Z2OOFctqXh2p9TR2Ut03v+cX/Q3ft1/FHijsL+9D75Rx6AqLKrosw\n",
+       "0SSkZa1CQw1yjz22pEvZnrD2d8x+9yhdobZ+x9a1HcuSolVwXdsWz/vaOsFuNRlkLHWDnLbqjl1a\n",
+       "6muG8yptuq/qlu/o7Ytgg5/q+P3cxuXeJkGaZWKUfegAb3Ry6CJcWv9lW3djxt7pl4N0c4jWcyhD\n",
+       "lW3b55fX/m67LmdLunvL7y6lDtq7+ryk19RctipAaXKnaYrWidg/QqZ6Mv/71T9a/qBs/B1b92r4\n",
+       "lbrn46G2U9N8W4/77ZBnHcc1KEeMm1DQoxy6CNffS3HgNFn+rg3Ta2pM00M0sb5e37H295jWpY5N\n",
+       "XdqueH+TE7oVp1HrSt/dK5smz13+fWziMsTQ5cHzbcXaVndsmGbquqwK8lvlm/jxa30bal1y/kE/\n",
+       "SkM9KqfOxSBmt1gvvxIjpZvbNAZ9aHIL8Tsipd9XnfZ1sjxC0lM6pjFE92lVnmPb57vc+djHul4m\n",
+       "6QMdvj+lAGas2AYjM9QYrMbLFZOP3rHko7GdiLfZ1LpXpu/1H6rrdukxLfJuWqddDTFeaLekQ1um\n",
+       "ccC2svUke5AWm1rs0vm2hjwXpNrmMdfpzA2fVf2wzfWi3rReYt8IMTbB7vw80alda5MbWwtW2aSM\n",
+       "TYOPoQe5N8l3TDt0TieunMoSW5sJObfNC1aWxpuKf0PYuN8XF4qP6sBzxRjvIhxiXz2/xjJjOvdI\n",
+       "0z7ml7qOwfrWAfOfpZzGYFV9NtTAvibdVk3uAGtSphx36Lbz43R93M+Y9Nk61uYHQwqp77RdXb9v\n",
+       "qfyib7ora+j6yNl31lhmLC1ZMdVex4hjvureRbvM9yl2rQC5fgGmNX4tKzncRVj1a7pxy5Sth9dY\n",
+       "vlNXZYWvqv4jQtoYalLLJvq4U62uSV1ca54AY4xZ7GMetLaz7Netgxtqptc1r3Vt6+6pdumdZF3T\n",
+       "jWVZF4+peDB57Fn3Y+4PY9B0SpLXS3pdorJsM/S+ODo5zIN1ytrDUdseOPeU9J619/qcjbjJw6mb\n",
+       "fj6Vk4nUz7o06TbLbcLCPm7gqOvbE6RZJ/AOFe+vft6+ANaP2bUfSl072Zbfe52kp234vGyi1yHO\n",
+       "B29U3Hny1nXt5t32vZy7kVP86F/HPFgD6HMM1qY7iX5e7efBqkqzTRpVzouUjlT/RCDl+Yuhyfax\n",
+       "lG0TdK6DXo+JlM7qBIbr+1HdH1YpAqw6Um+T35H0l4nziCXWI3di6HTHo60vSvp0jTymonJdbP1i\n",
+       "zPQ6fndKdZ6VPgOsl2z5vOsFr2nTcpOWpJTdf5uMaccfS1lzvety0/J1gu717121IY+zG+RdV+zj\n",
+       "tklX4lj2vTFa/2Hcpev/1sW/pt8rM8QzGjs/EcLWoVpMl3F4g3w75dkAx1FkfXYR3m3ldZO7COtu\n",
+       "9LLnq+X4bL02ZcppgHib/Jq02jX1vR2+20XXC36fXRU5njibdt9vagFvI8c6GVKd823KOls+yaDt\n",
+       "GL1tctlnjpJ0eoR0YuJYSGRrgGX7Ytv7bF+z8t5xtvfavs725babzsK86c6Jtgd43SkZetuZbD3S\n",
+       "1k+vv90gia51kkJud0F2nVyzlpJuzj4Gjtcx1pPj0OXe1N01dNly0PcPt+8v/m97nPUxjqlVOrZe\n",
+       "HSnPLraNa6ybhiKkMxt1WrBeJ+nctfcukrQ3hHCapCuKv7s6oOXK1t104CNXJOnkkvdy7EL4JUm/\n",
+       "2vA7OY67quPta39XBbW5HJxDD3Kv2zW2nl6XLpqUYv4wKvvOpsftNEl3Ux7Yr6xFK6f6ilWW1Ts4\n",
+       "t6XZ9lzxzEhpDtGFHzuNWdkaYIUQ3ifpS2tvnyfpkuL1Jao3cd2qOifjMyV9z3pxiv/fVfK9g0ve\n",
+       "G+Mdebme0Nro+1flUpOLa19j/upKUZ4cg/ZNv4aPlw54asPYj4WYplAXuaxD1XjEMinK3KZHIPY5\n",
+       "IpdtMTltB7nvCiHsK17vk7Sr4ffrdBHubZjmz9bMZz2/McixrG27Cvs4mEvzsHUXld/23ibNnLpt\n",
+       "Y+V5lK37haD/kTD/Ot0M75R0ywR51/0OF5xyVcfVwZJuDKF5a4ytEEKr+m76RIPY27Rtej8cMa22\n",
+       "qq7XQ/0gnqzOdxGGEILtDQfWnuL/fzpVeoIWj0wr1TU6H1rKu1pGUSf2AS2OuZV7T8l7P9J3IWwd\n",
+       "pv13U61r2+oaI2D4OUkPbbB8XU0D7G3BVYxu0k1jsLBfnW33TS32nV9ukG7d1pRbNEhzDH40Qhpd\n",
+       "99vvtHV8CPpfEcoyerZ3a0Ng0kXbAGuf7RNCCNfbPlEbZ0/es3zxd5JOKV63vRg0fZjtGLsIK9n6\n",
+       "gRD0hqHLscG9V143uajW2ia2/qlmOR5t64Eh7B//VtweXba/153jbNOYoE3Lli33a5KeVTOfdW2e\n",
+       "RVhXnxezpheJ2MGQJd01cppzULUd7tVrKdrPF9fnIPe6QxX6mqZh9ftP0mI+uBjpjloIYUfSzvJv\n",
+       "2y+KlXbbaRouk3Rh8fpCSZfW+E7dC+6mz55XI5+6acUWK69Ndzz+l0h59KWsTrrU06ZHiqz6OUmv\n",
+       "WHvvU5KeuPJ31wt2l+b09fFFXfKNdaLvKvVYs21Ba1OW1PTu57mz+g1QNnlQw/RyDSJoRZ2wOtM0\n",
+       "vFHSf5d0V9uftf00SS+TdI7t67QYJPiyGnk9ZFM2Fa/XnVgjnzKDTNPQ0qbxaTlZjqH4gTbfq/ww\n",
+       "zazvQbrZ45i2lqOF1W6tLrNvV7W6nlCx/HI9ctxHlpIcc7b+oMPX6wbrc1E1RGNKN93ENGRdlOXd\n",
+       "ZbhPXzfizM7WjRJCuKDio0d0yHd9Az1d++9U7OtOjc4XJFtHqnzAYKyLXc4XzaWqVrX1oLZul1qK\n",
+       "sTEx9qltXYTPXXn9BxHya2rok17qQeNl+8SFJe/VdecNnw1dl9sMWb5YE4FWrUPfXY05anO338Up\n",
+       "CiLdNLb2slTpT1mfM7mvSnkyTnrysfUIW7cr/vw1SX9bvE7ZSpb7Cb9Km1+/Y1nXTeW83YbPuqSb\n",
+       "U5p9509LyvDKugjbbouhfkTl0sW5VNaSWpV219bqbd+ryvctWkybhIaGCrA26Rp8bVunrgfGXkmv\n",
+       "LF6nHsMx6IXE1qG2/tbWU+xGdwgdkFTF623LxjKmJ8k3vTHjCakKMiJj6VbPWdW5bLVuYz41oe9z\n",
+       "W4z8mt5gVCfPO9Zcrmm6Tb7XdBwnP3BqmkIL1vqJNNY65bTTDXWxOErSnbSYY+yFLb7f5ldlin1y\n",
+       "yAtwjJPnt3XMJ+q62qVPUqh7TE8l8Ln39kVG5aU1lvk+1dx+icZS9u1pa3+vBqFTWL91B09ku2Vj\n",
+       "qADrURs+63pBSt2C1We6uVyMYnbbrqe1PiC8rwO8az6pHiRe9tmHW2eU5oR5n7KsNhVjw3J16zGX\n",
+       "Y2Gp7TQB2bF1mjaPSbvZ4jWX23Ye/qGa6QwlSHpGmy/ajbvTbtq3bf2ayp9KIqW7oWWZ7p+qfCJU\n",
+       "qdnEvygMFWD94YbPUo/B+paa6XR53lmOg7Rjq1OmsmXW37t7i3SbStFFmOM2qTJ0WWO0WPfZCjZ0\n",
+       "fVUqxn9WXYDb+qSkk6qybJlm39eWukMPYo3V2vT5lR3K8VPaPvQk5f75nyrevyJhnpM1tjFYdU6s\n",
+       "29Yp1VxSKQbh5nKib9MVul4f683tm5aNrSzNWyXIp0zZ/tjng8lz2IemuE7r+irTP0j6sZrLtglE\n",
+       "p3buys1yupg2194hW91XsW1rGluA9Z01vl93nWIPtt64vK3vsVtf1HPrHqnjIEl/0uJ7uR68bbu2\n",
+       "uuYTQ+yyVs3JlVKfQWnublNzuSHrJcdrS0xD1G2q3p25HT+9yfEg6Lqxy9bp+R3TrNLkwvUWNR93\n",
+       "kGNgVff5iEdIum/J+0nuIrR1N1vHV3ycoh67dDX01UqXKq+yboSYN64MrU15g60jopekJVt3l/Sb\n",
+       "NRf/xqak1v7u67Eu2zO4+fjCse1jY5bjdSlLcwmwHtAxzSpNu85+sGU+Yz95NAmq2q7rx1Td/Vv1\n",
+       "9PgctV3/NpMTxpQ6wMr9GDhG0r8OXYgVD0uUbqxB7jE02SeajsFKHUSs1s96marGwy0NPcY392Mx\n",
+       "G3MJsNrYtBO33cG/o+HyOe7IqW9C6LL9qu502d0hzaW+fsm3rd+cW5BW8+/rbqQoF6HiaQ113LJY\n",
+       "fui6bqPsmPteW2d1SLN2PXSoszbfe5Wt74qQdsrtXPWDsGmQmMrQ+Y8GAVa1pheslDvd2Jtk2/zS\n",
+       "zF2Xcva5PXOozxhlGOoYmMMDocvuSry1pNeq/Q+LJufhGAFW3TSeJOnHW+aXSjbdrohrzgFWypP+\n",
+       "VJ5FuOkXU9v6K5sDbdOJcug6WIr5K7vv1qY+6jD1yd+SZOtbe8i7S3pTvwjGmuag7bJt5bSPSJuv\n",
+       "U32f86a+zw5migHWUDtLWZAwldtilxe3w5X+1u/1ZZd5/26LfFOqWw/3bJhu7O6SofcdKW4Z/kZM\n",
+       "ephym66n/Wst02lybVmfbLiuHPbtFHL5UVllqvUe3RQDrD7GYJUt03e3YR/WW7Denjif9der/kOi\n",
+       "vHPR9aQ69DQGbVvl6gbOq+uX83mrTV23DWKG0Ko7y974va+1LMuQwzIOTZj3jRXvx1zfNnMbPjBi\n",
+       "/rOQ84mqrT6fRdiXoX7RrF807rbyWVmZYtzJllO9r0p1cltPv9euDFs/qfQtQpvKcErivBFX1bZM\n",
+       "9UzYTVJO07AtvbpPBKmdvt3rDO5t0vrJ4v/cW9iyQYAVT1m5h5xJOYZNrXKpWuxyWv8hpLijatN+\n",
+       "+Eqp8bPTmhrbfsEYrIUuP4xuOg8nvLNyyB9uKYKdx0ZMM6Up7eNJTT3AOs7WHyXI5xBbt1V1M2vd\n",
+       "yTjL5NTVGKtVpcktzznuk9Lwvx63if0suaaS57PhQh3rx82UpFr/Nl2EXJAPVHaeG/o832r5DRM8\n",
+       "z16OF7OuO9nqLcf3lXTBARlYD1G3ySe/V9I+DTtDeB82BVjvSpBPVV5j8+qhC9DQgxKnH+uO3SYB\n",
+       "/1B3Yk1h/13qUoebJtKMZcjzRswfnXV/kKceptDW4RHTmpRDhi7AilsX//fRRfjnij/rcqxf0U0f\n",
+       "DpzSpotG2RiEtuWcWoD1jx2+23n9bX37Wnrb0rxZgFXcLdo425afNUl/mU6dY3zKLVg5HiPr9d3H\n",
+       "MZ1yrq3U+8+mnoo+nsqwLa1TG6RVNSh/9qbQgrW+I9T9fspfC11+zea0TZquR5MunFWrrY45Xjyk\n",
+       "uOM9Ut/xd7fti2z0GxHKsCrWNh1DC1Yux2+KsXxNvrdaD6nuuHtqonT7kONQkFV1HyguSfe19YVY\n",
+       "hZmSXE4Gq/oa5L4tn6a/kmNfRLa916fU+W/qTsi1JWLb88I26XvAdtM6PCFBGbrqs4uwy9jJHM+p\n",
+       "Q1itw99PlMdqEDC2LsJNY7D6Xpcf6fj9+0mMwyqT48kg9YDqpW3r3qUcsU7uh9o6WCu30ds629ZR\n",
+       "HdJvU5bU26SP8RpdrZfr+xss2yWfseij3Mv9pM8WrKatt2PdfrGtHtP3HqwU+9294fJDbse2PQFN\n",
+       "BEmy9QBJryjNzLpFxPxmaYoBVt112rZc07qJNQZrPZ2nS7rzyt9/LOkrLdJtVgjrnmp+0fiFltmt\n",
+       "dhHmuE+Wid3N1+XO001yuIsw9his9fRS3kXY9AfbWPbfOpq0pm8ag5WqTprsV7EnSY55nVrfr/sM\n",
+       "7v5yw2dfrni/zvEH5XkyyCXAavorOVbX3nq5br32d+sJ7mw9u8HiR4sWrE1SBxSx0s6hPmN3n/c5\n",
+       "yJ0WrHo2jcEqe5h07Dxj1/u2/afrtXPT9aIq7T4HuUuqbMEiwKqJAKtbOVLc7VF7m9g6QdIxDdJ+\n",
+       "VcOy9HXR6HSitPUTEctSmU2iZWN+N1djHoOVYwtWX/tIk6c1rFst410jlGVbHrG9JXHeq99fb73O\n",
+       "8bq8ydjK25scK2ZMg9y3pbfs5/6XjulU+YsGy7bRVwvWpi7COnnHvuutjtiD7+vWda4tWH206DUJ\n",
+       "YvpuwVruw1MMkFfFGuMaw5B1nbIFq6rFL/oYrEhyjCOykGPF9BVgxU5n0y+SW7VMp+zvVevdh9sT\n",
+       "bzbHUV8nsIMqXuekrxasIdPOXZN1HyrAynX/ratt+Yd4FuGQuq5fTjO5dzXWcieX40GQS4BVpxwp\n",
+       "dqzUO+vJNZerGlSconzZBFi2Xmzrog7fP17Sni5F6Ph52fJ9nAD7aMHKeZD7ctLmXC42bdd/2/HX\n",
+       "poswldTnpU1Wn7XYZsqWNmN2+x6DNURak0KAVa1OObYNzo4xk3vqSSk3GaKLsMuA2Id0+O7SL0h6\n",
+       "UYfvf2eEMmwyxpPZ2Aa5t7lwL/fbD9z0xXHe5r7t+LtzxfubBrmnkksX4ctbfL9NgPXwFvn0Icc4\n",
+       "Igs5Vkzqi3ld9+qSl61T1O5RRLlvkxQTf2664yiXiUZTn8xTPY/MGr4Oy26ueG/DNKxhpkLYdIyv\n",
+       "Pkh+eazfaeW9/y9JidLa9lSFupPQDt1qmlrMMVh1f8g+qmOesXAXYU05PYtwqevGinVL8M/WWGbT\n",
+       "L91Pt8w3p521rxasbLoIM9BnN0HMtDd995QO6Zbl0Wf3/ab98Ukrr8vOO10fWVQl5T7Q9vw5xBis\n",
+       "LLoIW2rTghVTzCcd5HTNykqOF7OuG+vXopSinqHHYLWeE6um9Qva7ZRmnWN1EaY01pPIWMu9bnWa\n",
+       "hnVDBZ2rn+X4Y7WN1WtC348b6kuMR0HFDLBSTTKcytxuaGgtx4oZy04mpfkFVSsdW98aKb+N2az9\n",
+       "L608tqdFOlX6mJSwL6lb+3JtwepD1Ris2K0BXcZgjV3b9RjTGKzTbX3bQHkv9X0XYcpWpxzjiCx0\n",
+       "qhjb59r+hO2/sf38SGU6L1I6fag4EV/WZPLPdXW3yWGJ05cadxHuNCzKTUYeYO3ETCxGgLW+X/52\n",
+       "++JsslOVZyqNxmDZOiTCJLQl67VT9tlUWrA2HH87TdLJ/S7CI+svulP25sZ90Na3SDp00yI13xvQ\n",
+       "TtUHdBHW1DrAsn2wpFdLOleLB2leYDvGmIM6Y5/qSr3hK+rvii4BVt0yt33gc5MAJlaA1eQuwhx+\n",
+       "DZV1jWxYh51t342pzT5dNxhvmPZO03J0VbU/Vl2s7qzuk9BuCrBW5fbDoO0djG0DrKjdRrZ+0NYd\n",
+       "ti3WJY/6dsre3LZ+79ryeVnZ+wzSa5yndmT3Ot5xcrocBGdK+lQI4TMhhG9IepOkx8Up1misXrhS\n",
+       "DaqtSrftWLO6FwKrcYBV6S5bPt/UgpVqoHCuYrRgbXtGZgp9tlikzmu1G7xuXmUXxyF/LOxq+b22\n",
+       "geIh9s3Oh13X/RJJP71lmVQ3ZdSxbaza/bZ8v2yoxSvbFyeZOvWUw4/iLHWJmG8v6bMrf39O0v27\n",
+       "Fac+W8fWWGxTE23jtEveX22puu/+lwdvqtdtTdPrB17V8tt+3UnSESXv1ak3aTH7/NeL1227I5e2\n",
+       "zSu12hp3TM1tu1WHdG6xZVtL1XWysWWxSLdsNv3ldt/W8lDnmF3dZ5rUwXKdyvabbQ4tq+9Y21KL\n",
+       "el1ug/VjouwYOUJrTzpoUJbVqRbqtkaXbfejI67/avnrtE4d3TDNpdW/6zz1YbnMEyT9ra17Nsl/\n",
+       "i23nyqNWyt+01+Cm8q2k0WR86erTOdocL6uatjYevmm/qvjsVrr5PnpUzX2zbJn1erplka9DGHxK\n",
+       "mKw4hHb1YfsJks4NITyj+Pspku4fQnjWyjJUNgAAGI0QQpRW8i4tWP+gmz925WQtWrFuEquQAAAA\n",
+       "Y9Kl7/SDku5i+xTbh2kx6d5lcYoFAAAwXq1bsEII37T945LercXAyNeGED4erWQAAAAj1XoMFgAA\n",
+       "AMolub0y0QSk2bD9GdsfsX2V7Q8U7x1ne6/t62xfbvvYleVfUNTFJ2w/criSN2P7Ytv7bF+z8l7j\n",
+       "9bR9X9vXFJ+VPfg3KxXrvcf254ptfpXtR698NpX1Ptn2e21fa/ujtp9dvD/pbb5hvSe9zW0fbvtK\n",
+       "21fb/pjtlxbvT317V633pLf3ku2Di/V7W/H3pLf3Usl6p9/eIYSo/7ToLvyUFg94PVTS1ZLuFjuf\n",
+       "If9p8SDn49bee4WknyleP1/Sy4rXdy/q4NCiTj4l6aCh16Hmej5Y0hmSrmm5nssW0g9IOrN4/U4t\n",
+       "7j4dfP0arveLJP1kybJTWu8TJJ1evD5K0ie1mIds0tt8w3rPYZsfUfx/iKS/lvSgqW/vDes9+e1d\n",
+       "lPMnJb1B0mXF35Pf3hXrnXx7p2jBmssEpOt3SJ6nxeR4Kv4/v3j9OElvDCF8I4TwGS021pm9lLCj\n",
+       "EML7JH1p7e0m63l/2ydKulUI4QPFcn+48p0sVay3VD7p3pTW+/oQwtXF669K+rgW891NeptvWG9p\n",
+       "+tv8a8XLw7T4cfwlTXx7S5XrLU18e9s+SdJjJL1G+9d18tu7Yr1XJ9NeFW29UwRYZROQ3r5i2bEK\n",
+       "kt5j+4O2n1G8tyuEsK94vU/7Z1K+nW4+fcXY66Ppeq6//w8a7/o/y/aHbb92pRl9kutt+xQtWvGu\n",
+       "1Iy2+cp6/3Xx1qS3ue2DbF+txXZ9bwjhWs1ge1estzTx7a3Fo6OeJ+nGlfcmv71Vvt5Bibd3igBr\n",
+       "DqPmHxhCOEPSoyU90/aDVz8Mi/bDTfUwiTqqsZ5T8ruSTpV0uqTPK8/HWkRh+yhJb5H0nBDCV1Y/\n",
+       "m/I2L9b7zVqs91c1g20eQrgxhHC6pJMkPcT2w9Y+n+T2Llnv3Zr49rb93ZJuCCFcpYpH4Exxe29Y\n",
+       "7+TbO0WAtXUC0rELIXy++P8Lkt6qRZffPtsnSFLRlHhDsfh6fZxUvDdWTdbzc8X7J629P7r1DyHc\n",
+       "EF70LhgAABTnSURBVApaNDMvu3kntd62D9UiuHp9COHS4u3Jb/OV9f4vy/WeyzaXpBDClyW9Q4tH\n",
+       "fk1+ey+trPd3zGB7P0DSebY/LemNks62/XpNf3uXrfcf9rG9UwRYk56A1PYRtm9VvD5S0iMlXaPF\n",
+       "Ol5YLHahpOXF6TJJT7Z9mO1TtXjw8Qc0Xo3WM4RwvaR/sX1/25b01JXvjEZx4ll6vBbbXJrQehfl\n",
+       "fK2kj4UQfnPlo0lv86r1nvo2t338slvE9i0lnSPpKk1/e5eu9zLIKExue4cQXhhCODmEcKqkJ0v6\n",
+       "sxDCUzXx7V2x3j/Yy/G9aQR8239adJ19UovBYS9IkcdQ/7RoUry6+PfR5fpJOk7SeyRdJ+lySceu\n",
+       "fOeFRV18QtKjhl6HBuv6Rkn/KOn/ajGu7mlt1lOLX8XXFJ/91tDr1WK9/50WAxo/IunDxUG1a4Lr\n",
+       "/SAtxihcrcWF9ipJ5059m1es96Onvs0l3VPSh4r1/oik5xXvT317V633pLf3Wh08VPvvppv09l5b\n",
+       "790r6/361NubiUYBAAAiSzLRKAAAwJwRYAEAAERGgAUAABAZARYAAEBkBFgAAACREWABAABERoAF\n",
+       "AAAQGQEWAABAZARYAAAAkRFgAQAAREaABQAAEBkBFgAAQGQEWAAAAJERYAEAAERGgAUAABAZARYA\n",
+       "AEBkBFgAAACREWABAABERoAFAAAQGQEWAABAZARYAAAAkRFgAQAAREaABQAAEBkBFgAAQGQEWAAA\n",
+       "AJERYAEAAERGgAUAABAZARYAAEBkBFgAAACREWABAABERoAFAAAQGQEWAABAZARYAAAAkW0MsGwf\n",
+       "bvtK21fb/pjtlxbvH2d7r+3rbF9u+9h+igsAAJA/hxA2L2AfEUL4mu1DJL1f0k9LOk/S/wohvML2\n",
+       "8yXdOoRwUfriAgAA5G9rF2EI4WvFy8MkHSzpS1oEWJcU718i6fwkpQMAABihrQGW7YNsXy1pn6T3\n",
+       "hhCulbQrhLCvWGSfpF0JywgAADAqh2xbIIRwo6TTbR8j6d22H7b2ebBd2s9Y9T4AAECOQgiOkc7W\n",
+       "AGslwy/bfoek+0raZ/uEEML1tk+UdMOG70UpKOqxvSeEsGfocswJdd4/6rx/1Hn/qPP+xWwY2nYX\n",
+       "4fHLOwRt31LSOZKuknSZpAuLxS6UdGmsAgEAAIzdthasEyVdYvsgLYKx14cQrrB9laQ/tv10SZ+R\n",
+       "9MS0xQQAABiPjQFWCOEaSfcpef+Lkh6RqlDoZGfoAszQztAFmKGdoQswQztDF2CGdoYuANrbOg9W\n",
+       "p8TtwBgsAAAwBjHjFh6VAwAAEBkBFgAAQGQEWAAAAJERYAEAAERGgAUAABAZARYAAEBkBFgAAACR\n",
+       "EWABAABERoAFAAAQGQEWAABAZARYAAAAkW182HMKtksffsgzCwEAwFT0HmAtrMdYxFYAAGA66CIE\n",
+       "AACIjAALAAAgMgIsAACAyAiwAAAAIiPAAgAAiIwACwAAIDICLAAAgMgGmgcrvaoJTSUmNQUAAGlN\n",
+       "NsBaKIuxiK0AAEBadBECAABERoAFAAAQGQEWAABAZARYAAAAkRFgAQAAREaABQAAEBkBFgAAQGQE\n",
+       "WAAAAJERYAEAAERGgAUAABDZ1gDL9sm232v7Wtsftf3s4v09tj9n+6ri37npiwsAAJA/h1D5TOTF\n",
+       "AvYJkk4IIVxt+yhJ/1PS+ZKeKOkrIYRf3/DdsP5g5cVDmNfzdPQHMJfnkyYvAAAwfmVxS1tbH/Yc\n",
+       "Qrhe0vXF66/a/rik2y/LEqMQAAAAU9JoDJbtUySdIemvi7eeZfvDtl9r+9jIZQMAABilrS1YS0X3\n",
+       "4JslPadoyfpdSb9YfPxLkl4p6ekl39uz8udO65ICAABEZHu3pN1J0t42BqsowKGS3i7pXSGE3yz5\n",
+       "/BRJbwsh3HPtfcZgAQCAUYg5BqvOXYSW9FpJH1sNrmyfuLLY4yVdE6NAAAAAY1fnLsIHSfoLSR/R\n",
+       "/iahF0q6QNLpxXuflvQjIYR9a9+lBQsAAIxCzBasWl2ErRMnwAIAACPRaxchAAAAmiHAAgAAiIwA\n",
+       "CwAAIDICLAAAgMgIsAAAACIjwAIAAIiMAAsAACAyAiwAAIDICLAAAAAiI8ACAACIjAALAAAgMgIs\n",
+       "AACAyAiwAAAAIiPAAgAAiIwACwAAIDICLAAAgMgIsAAAACIjwAIAAIiMAAsAACAyAiwAAIDICLAA\n",
+       "AAAiI8ACAACIjAALAAAgMgIsAACAyAiwAAAAIiPAAgAAiIwACwAAIDICLAAAgMgOSZ2B7YemzgMA\n",
+       "ACAnDiGkS9wO0hlf3v/OFw+V/u4IaT1PK4Tg+HmXrVv8vLpYlPNAOZURAIA5sB1iXX97CLBW03+T\n",
+       "pAtEgLVfeTnzKiMAAHMQM8BiDBYAAEBkBFgAAACRbQ2wbJ9s+722r7X9UdvPLt4/zvZe29fZvtz2\n",
+       "semLCwAAkL86LVjfkPTcEMI9JJ0l6Zm27ybpIkl7QwinSbqi+BsAAGD2tgZYIYTrQwhXF6+/Kunj\n",
+       "km4v6TxJlxSLXSLp/FSFBAAAGJNGY7BsnyLpDElXStoVQthXfLRP0q6oJQMAABip2gGW7aMkvUXS\n",
+       "c0IIX1n9LCzmekg33wMAAMCI1JrJ3fahWgRXrw8hXFq8vc/2CSGE622fKOmG8m/vWXl9Y/uSTkDV\n",
+       "pKIAAKB/tndL2p0k7W0Tjdq2FmOs/imE8NyV919RvPdy2xdJOjaEcNHad5lodDXniklFmWgUAIDh\n",
+       "xZxotE4L1gMlPUXSR2xfVbz3Akkvk/THtp8u6TOSnhijQAAAAGO3NcAKIbxf1WO1HhG3OAAAAOPH\n",
+       "TO4AAACREWABAABERoAFAAAQGQEWAABAZLXmwRpK1bxRw06zUC63MjHNAwAAw8k6wFoomzdqSOVz\n",
+       "aw0rtzoCAGDe6CIEAACIjAALAAAgMgIsAACAyAiwAAAAIiPAAgAAiIwACwAAIDICLAAAgMhGMA/W\n",
+       "dpsmAAUAAOjbJAKsBSbbBAAAeaCLEAAAIDICLAAAgMgIsAAAACIjwAIAAIiMAAsAACAyAiwAAIDI\n",
+       "JjRNQ31l82aFEKLO6zCWubmqyhm7PgAAmJNZBlj9zZk1lrm5xlJOAADGgS5CAACAyAiwAAAAIiPA\n",
+       "AgAAiIwACwAAIDICLAAAgMgIsAAAACIjwAIAAIhslPNgjWUSzy6YABQAgPEaZYA1j4kxy+KrKa4n\n",
+       "AADTQxchAABAZFsDLNsX295n+5qV9/bY/pztq4p/56YtJgAAwHjUacF6naT1ACpI+vUQwhnFvz+N\n",
+       "XzQAAIBx2hpghRDeJ+lLJR8xIAgAAKBElzFYz7L9YduvtX1stBIBAACMXNsA63clnSrpdEmfl/TK\n",
+       "aCUCAAAYuVbTNIQQbli+tv0aSW+rXnrPyusbK5fKbW6r3MqzlGu52mCuLwDAkGzvlrQ7RdqtAizb\n",
+       "J4YQPl/8+XhJ11QvvWfl9Zs2pJrjvE85zreVY5m6mNr6AADGIoSwI2ln+bftF8VKe2uAZfuNkh4q\n",
+       "6Xjbn5X0Ikm7bZ+uxdXx05J+JFaBAAAAxm5rgBVCuKDk7YsTlAUAAGASmMkdAAAgMgIsAACAyAiw\n",
+       "AAAAIiPAAgAAiKzVNA1TNKX5pYaW2/xWm7Ytc24BAFIgwLoJ8zHFlVt95jjPGgBgqugiBAAAiIwA\n",
+       "CwAAIDICLAAAgMgIsAAAACIjwAIAAIiMAAsAACAyAiwAAIDImAcLvclxMteyMq1PPprbxKkAgPwR\n",
+       "YKFHuU0+KtUvU45lBwDkii5CAACAyAiwAAAAIiPAAgAAiIwACwAAIDICLAAAgMgIsAAAACJjmgaU\n",
+       "6jI/1JByLBMAYH4IsFCh7fxQm5btC3NWAQCGRRchAABAZARYAAAAkRFgAQAAREaABQAAEBkBFgAA\n",
+       "QGQEWAAAAJERYAEAAETGPFgYLSYVBQDkigALI8ekogCA/NBFCAAAEBkBFgAAQGRbAyzbF9veZ/ua\n",
+       "lfeOs73X9nW2L7d9bNpiAgAAjEedFqzXSTp37b2LJO0NIZwm6YribwAAAKhGgBVCeJ+kL629fZ6k\n",
+       "S4rXl0g6P3K5AAAARqvtGKxdIYR9xet9knZFKg8AAMDodZ6mIYQQNs9HtGfl9Y1ds8tWbnMy5VYe\n",
+       "xFW1fUMIzFMBADXZ3i1pd4q02wZY+2yfEEK43vaJkm6oXnTPyus3tcxuDHKbj6msPGXX5KHLifZy\n",
+       "2+cAYFxCCDuSdpZ/235RrLTbdhFeJunC4vWFki6NUxwAAIDxqzNNwxsl/XdJd7X9WdtPk/QySefY\n",
+       "vk7S2cXfAAAAUI0uwhDCBRUfPSJyWQAAACaBmdwBAAAiI8ACAACIjAALAAAgMgIsAACAyDpPNIr5\n",
+       "YPJSAADqIcBCA0xsCQBAHXQRAgAAREaABQAAEBkBFgAAQGQEWAAAAJERYAEAAERGgAUAABAZ0zRg\n",
+       "FJiDCwAwJgRYGBHm4QIAjANdhAAAAJERYAEAAERGgAUAABAZARYAAEBkBFgAAACREWABAABERoAF\n",
+       "AAAQGfNgAWvqTmpatlwIodbkXFV51P1+XygnALRDgAUcoO6Epl0nPh3LxKmUEwCaoosQAAAgMgIs\n",
+       "AACAyAiwAAAAIiPAAgAAiIwACwAAIDICLAAAgMiYpgHZqTsP1RR1mVurT7HLyTxWAKaGAAsZKrvW\n",
+       "zuU6O5a5nFKUcyzrDgDb0UUIAAAQGQEWAABAZJ26CG1/RtK/SPo3Sd8IIZwZo1AAAABj1nUMVpC0\n",
+       "O4TwxRiFAQAAmIIYXYSMRAUAAFjRNcAKkt5j+4O2nxGjQAAAAGPXtYvwgSGEz9u+jaS9tj8RQnhf\n",
+       "jIIBAACMVacAK4Tw+eL/L9h+q6QzJa0FWHtWXt/YJTsgeykmSW2S5lgmKu1Ll/qoO/lp10lSu+ZT\n",
+       "Ny8mcwUOZHu3pN1J0g6h3fXA9hGSDg4hfMX2kZIul/TiEMLlK8uEm08e+CZJF6h8QsGqySXrLFv3\n",
+       "vbGkOeayzznNPMvefYb14fJpFqTELWfdMqUqe7186ufVtZzAHNgOsY6JLi1YuyS91fYynTesBlcA\n",
+       "AABz1TrACiF8WtLpEcsCAAAwCczkDgAAEBkBFgAAQGQEWAAAAJERYAEAAETWdaJRABi1FHOXpUy3\n",
+       "bl5MvwAMiwALACrnGkuRZtl8XV2lSBNAF3QRAgAAREaABQAAEBkBFgAAQGQEWAAAAJERYAEAAERG\n",
+       "gAUAABAZ0zQAM7NpfqbYcyf1ORdUXTmWKYWh1rPP/asvVes01vVBPwiwgFlKMe9T3bxSzQVV11zm\n",
+       "jMqpjvvOP4W57DeIhS5CAACAyAiwAAAAIiPAAgAAiIwACwAAIDICLAAAgMgIsAAAACIjwAIAAIiM\n",
+       "ebAARNHXxJZDTxQ6dP5jVlZ3bSfrnOKEpnMwp+1GgAUgkqEnL+0LE062F7vupjih6RzMY7vRRQgA\n",
+       "ABAZARYAAEBkBFgAAACREWABAABERoAFAAAQGQEWAABAZEzTAExck3mbcpvjKbfy9GnoecXW5ySq\n",
+       "u1wOYs631Wc+TbZ53XzGtN2mhgALmLwmcw/lNsfTPObLKdfXtmhSx7ntH1WGqrsY+ZSl2TWfsWy3\n",
+       "aaGLEAAAIDICLAAAgMg6BVi2z7X9Cdt/Y/v5sQoFAAAwZq0DLNsHS3q1pHMl3V3SBbbvFqtgaGtn\n",
+       "6ALM0M7QBZihnaELAPRgZ+gCoIMuLVhnSvpUCOEzIYRvSHqTpMfFKRba2xm6ADO0M3QBZmhn6AIA\n",
+       "PdgZugDooEuAdXtJn135+3PFewAAALPWZZqGmvN1nP3l/a+vP0zSLTvkCQAAkD2H0G4uO9tnSdoT\n",
+       "Qji3+PsFkm4MIbx8ZZnZThIIAADGJ9YkrF0CrEMkfVLSwyX9o6QPSLoghPDxGAUDAAAYq9ZdhCGE\n",
+       "b9r+cUnvlnSwpNcSXAEAAHRowQIAAEC5JDO5MwFperYvtr3P9jUr7x1ne6/t62xfbvvYIcs4NbZP\n",
+       "tv1e29fa/qjtZxfvU++J2D7c9pW2r7b9MdsvLd6nzhOzfbDtq2y/rfibOk/I9mdsf6So8w8U71Hn\n",
+       "Cdk+1vabbX+8OL/cP2adRw+wmIC0N6/Too5XXSRpbwjhNElXFH8jnm9Iem4I4R6SzpL0zGLfpt4T\n",
+       "CSF8XdLDQginS7qXpIfZfpCo8z48R9LHtP+Oceo8rSBpdwjhjBDCmcV71Hlar5L0zhDC3bQ4v3xC\n",
+       "Ees8RQsWE5D2IITwPklfWnv7PEmXFK8vkXR+r4WauBDC9SGEq4vXX5X0cS3mfqPeEwohfK14eZgW\n",
+       "4z2/JOo8KdsnSXqMpNdIWt5RRZ2nt373GnWeiO1jJD04hHCxtBhXHkL4siLWeYoAiwlIh7MrhLCv\n",
+       "eL1P0q4hCzNltk+RdIakK0W9J2X7INtXa1G37w0hXCvqPLXfkPQ8STeuvEedpxUkvcf2B20/o3iP\n",
+       "Ok/nVElfsP062x+y/fu2j1TEOk8RYDFqPgNhcfcC2yIB20dJeouk54QQvrL6GfUeXwjhxqKL8CRJ\n",
+       "D7H9sLXPqfOIbH+3pBtCCFfpwBYVSdR5Ig8MIZwh6dFaDD948OqH1Hl0h0i6j6T/GEK4j6R/1Vp3\n",
+       "YNc6TxFg/YOkk1f+PlmLViykt8/2CZJk+0RJNwxcnsmxfagWwdXrQwiXFm9T7z0omu//X3t3r5pF\n",
+       "EEdh/DkWASNpJG3EFNoFCzubgETBJqWxkeA1WGlhm8ImN2AVJCBCNGJrYasgGLQThQQMpPEO/haz\n",
+       "EiEgCDMI5vnBsl8v7HKqw+7OvK+Bq5j5SNeA1SRfgW3gepItzHyoqvo+rY+AHdrnNmY+zgFwUFXv\n",
+       "pv3ntMJ12CvzEQXrPXApycUkM8AasDvgOjppF1iftteBF3/4rf5SkgBPgM9VtfnbKXMfJMn8r1E8\n",
+       "Sc4CN4APmPkwVfWwqhaqahG4A7ypqruY+TBJZpPMTdvngJvAHmY+TFUdAvtJLk+HVoBPwCs6ZT5k\n",
+       "Hqwkt4BNjicg3eh+kVMuyTawDMzT3hM/Al4Cz4ALwDfgdlX9+Ff3+L+ZRq+9BT5y/Nj4Ae1fDMx9\n",
+       "gCRLtA9Nz0zLVlU9TnIeMx8uyTJwv6pWzXycJIu0p1bQXl09raoNMx8ryRXaQI4Z4Atwj9ZbumTu\n",
+       "RKOSJEmdDZloVJIk6TSzYEmSJHVmwZIkSerMgiVJktSZBUuSJKkzC5YkSVJnFixJkqTOLFiSJEmd\n",
+       "/QRSxC44KICduwAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7ffb01ae5a90>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "feat = net.blobs['fc6'].data[0]\n",
+    "plt.subplot(2, 1, 1)\n",
+    "plt.plot(feat.flat)\n",
+    "plt.subplot(2, 1, 2)\n",
+    "_ = plt.hist(feat.flat[feat.flat > 0], bins=100)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The second fully connected layer, `fc7` (rectified)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 37,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlcAAAJPCAYAAABRvvFyAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xv0LGdd5/vPJ3dygRADOzGJJsgdkWRxNyAb5BJw5OaI\n",
+       "okBE5CAiIJ6jgs6Y7XgDR5DjcMRZQ4KRYXB00JyIoyYoP424JKIJCTcjZ5JlgskOs4gKXkGe80dX\n",
+       "79TuXdVdl29VPdX9fq211+5fd9VT37p/+3mefsopJQEAACDGUVMHAAAAsE1IrgAAAAKRXAEAAAQi\n",
+       "uQIAAAhEcgUAABCI5AoAACDQ2uTK9jm2P2D7Y7Y/avs1xfun2b7a9k22r7J96jjhAgAA5M3rxrmy\n",
+       "fYakM1JK19s+WdKfSXqupJdK+t8ppZ+x/UOS7p1Sev0oEQMAAGRsbc1VSumOlNL1xevPS/qEpLMk\n",
+       "PVvS5cVkl2uRcAEAAOy8xn2ubJ8r6QJJH5K0L6V0sPjooKR94ZEBAADMUKPkqmgSfK+k16aUPlf+\n",
+       "LC3aFXmGDgAAgKRjNk1g+1gtEqt3pZSuKN4+aPuMlNIdts+UdGfFfCRcAABgNlJKjihnbXJl25Iu\n",
+       "lfTxlNJbSx9dKeliSW8q/r+iYvawIOfE9oGU0oGp4xgb671bWO/dwnrvlh1e77BKoU3NghdKepGk\n",
+       "J9u+rvh3kaQ3Snqa7ZskPaX4GwCAWbGVbH3z1HFgu6ytuUop/ZHqE7CnxocDAMDovlrSr00dBLYH\n",
+       "I7TH25s6gInsTR3ARPamDmAie1MHMJG9qQOYyN7UAUxkb+oAJrI3dQBzt3YQ0V4F22kX+1wBAObD\n",
+       "VpL0H1LSJVPHgmlF5i3UXAEAAAQiuQIAAAhEcgUAABCI5AoAACAQyRUAAEAgkisAAIBAJFcAAACB\n",
+       "SK4AAAACkVwBAAAEIrkCAAAIRHIFAAAQiOQKAAAgEMkVAABAIJIrAACAQCRXAAAAgUiuAAAAApFc\n",
+       "AQAABCK5AgAACERyBQAAEIjkCgAAIBDJFQAAQCCSKwAAgEAkVwAAAIFIrgAAAAKRXAEAAAQiuQIA\n",
+       "AAhEcgUAABCI5AoAACAQyRWwZWwdZ+sRU8cBALuK5KqCrSfY+oWp4wA6+m5J108dBADsKpKrai+T\n",
+       "9MqpgwA6usfUAQDALiO5AgAACERyBQAAEIjkCgAAIBDJVbU0dQAAAGCeSK4AAJBk61hbJ04dB+aP\n",
+       "5AoAgIVLJX126iAwfyRXAAAsPEzS8VMHgfkjuQIAAAhEcgXMGP1DACA/JFfATNl6qqS/nzoOAMDh\n",
+       "SK6qMRQD5uCsqQMAAByJ5AoAACAQyRUAAEAgkisAAIBAJFcAAACBSK4AAAACkVxV49eCAACgE5Ir\n",
+       "zJKt420dN3UcAACsIrnCXN0g6XemDgIAgFXHTB0A0NEDJd136iAAAFhFzRUAAEAgkisAAIBAG5Mr\n",
+       "25fZPmj7xtJ7B2zfZvu64t9Fw4YJAAAwD01qrt4paTV5SpLeklK6oPi3bR2LGYoBAAB0sjG5Sild\n",
+       "I+muio8cHw4AAMC89elz9WrbH7F9qe1TwyLKA4kjAADopOtQDG+X9B+K1z8u6c2SXrY6ke0DpT/3\n",
+       "Ukp7HZcHAAAQxvZ+SfuHKLtTcpVSunP52vY7JP1mzXQHuoU1OfpcAQCwxYoKn73l37YviSq7U7Og\n",
+       "7TNLfz5P0o110wIAAOySjTVXtt8j6UmSTrd9q6RLJO23fb4WNTw3S3rFoFECAADMxMbkKqX0woq3\n",
+       "LxsglpzQLDgP/PAAAJAdRmgHAAAIRHJVjRqReaCGEUAWbP2yrQdPHQfyQHIFAEB/L5b0nKmDQB5I\n",
+       "rqpRIwIAADohuQIAAAhEctWCrStt/cHUceAQ+sYBALLT9fE3266uWfBpkk4YMxAAADAv1Fxhzugb\n",
+       "BwDIDslVNZqbAABAJyRXAAAAgUiuqtHcBAAAOiG5wmhsvdLW+6aOAwCAIfFrQYzp2yQ9YeogAAAY\n",
+       "EjVX1WgWBAAAnZBcAQAABCK5qsZQDMNguwIAth7JVTskBwAAYC2Sq2r0uQIAAJ2QXLVD0tVP9Paj\n",
+       "JhEAkB2SK4yJZAgAsPVIrqolSbJ1f1tXTB0MalGTCADIDsnVek+T9JypgwAAAPNBcgUAABCI5AoA\n",
+       "gEzYer6tN0wdB/ohuVqPDth52/X9Q58zYPv8uKSfmjoI9ENyBczXrieXAJAlkqv1qBnIG/sHQCS+\n",
+       "sFSw9SybbdMGyVW1ups2BxeQKVuPt/X/TR0HsIV+S9LpUwcxJyRX1bzyf1ZsPdTWg6eOA8jM10m6\n",
+       "39RBYNaoDUeIY6YOYGZyOfE+JumLko6dOhAAAHA4aq6q5ZJEYb0saxYBYAtxX2yB5ArAtuDiDyAL\n",
+       "JFcYU3RNEzdTAEB2SK7ayakZKqdYAABAgeSqGjUiw2C7AgC2HslVtbpaoZySgyNisXWNrQdNEQwA\n",
+       "YKvldP/LHsnVdnmCFmP95Cq6KZOmUZRx8QeQBZKralykZ8TWF2y9duo4JsBxCmw5W/9i6xFTx4F2\n",
+       "SK5mwNa9qt4ePZB8HSPpUUMvxNZRdlYD73IMYDC2vszW06aOAzpW0tdMHQTaIblab/XmNfrNzNZX\n",
+       "SfqbsZc7kOialrFrbt4m6TMjLxOYyo9KumrqIIA5IrnK3z1r3p9jk9Dca1oeKenUqYMAgAnM8Z4z\n",
+       "GZKranUHEQcXkC/OT2wrju2ZIbmqtqxh4YDO29xrwgCgCa51M0NytV7OB3TOsWEcJP8AkCGSq2p1\n",
+       "Ny1Lkq1jMvvVGABge/FFamZmm1zZ+hZbx060+D1J10y07CVONrYBtZcAxrLr19tWZptcSfoVLUYk\n",
+       "H9Py4LpQ0uNGWiY3UKAZLv4AsjDn5AoAACA7JFfV5vANeI41WnOMGQCAVkiuquWUBMwh0ZtKTvsJ\n",
+       "AABJJFdtTXEzr1vmHJOuOcacM7YnsBtyONdziGE2SK6qcRAhO7aeYuvC8luTBZMnzlsAWWCspna4\n",
+       "ePcTnQzs2v74PUmfl3TK1IFgJ2xl8m7rbEmfTmnnrh8YETVX+Vs7oCkAoJVbJT1l6iCw3TYmV7Yv\n",
+       "s33Q9o2l906zfbXtm2xfZfvUYcMcXU7faEiiAEwhp+tgtHtOHcBc2NyDumhSc/VOSRetvPd6SVen\n",
+       "lB6oRVPF66MDm5hX/l99PwdzvPBFx+ya1wCwTWqvnbYebeuEMYPBZhuTq5TSNZLuWnn72ZIuL15f\n",
+       "Lum5wXEB2GyOCfaQ2B7YVuu+PF4r6TUTLRs1uva52pdSOli8PihpX1A8ueFiHWvIk5R9BWBXjfGc\n",
+       "XZKsFnp3aE8pJW3fja1ufXJaTw70vPbHFDgGgN2w6VrHtSAzXYdiOGj7jJTSHbbPlHRn1US2D5T+\n",
+       "3Esp7XVc3lQ4YIGJ2PoxSb+Ukm6eOhZgh23tfdD2fkn7hyi7a3J1paSLJb2p+P+KqolSSgc6lo/N\n",
+       "dr3WZtXWXgB22I9K+qKkH586EADbp6jw2Vv+bfuSqLKbDMXwHkl/LOlBtm+1/VJJb5T0NNs3aTFe\n",
+       "yBujAspETmNLkTTUY9ugjC8csTi/IHEcdLKx5iql9MKaj54aHEtOOJjmhxsrgF01xj2L+2ILjNCe\n",
+       "v5xq0ZAXEkoMieML6Ijkqh0uNnlhfwBbwtYf2nr11HFkasprHV/kOyC5qpbTTbvuwM4pxhwcPXUA\n",
+       "E+Cih23yRI03IDXnDgZFcoVtcahvoK3jpwwE2BIkIPMx5L7iOOhg7snV2Dt9ioOMGqp6dfvjn2x9\n",
+       "7aiRAD3ZOtPeyRrYKXBdbY8kq4W5J1dDyenEqzugOdDXO3PqAAbCfq+X03nbxV9LWfU5mvv2RAyu\n",
+       "OR3MPbka6uTnYBoG2zUWN7/tc5+pA9gRva9Ftn7O1n+OCAbbZ+7J1dBWT8CcbmY5xYLxsN8xFr4M\n",
+       "rfc9kv6PqYMosK8yQ3KFOdv1RCPbC6qtk2w9ceo4gBq7fu1oI9vrTM5IrqrNYeDOnGJpigtaf3PZ\n",
+       "76+T9IdTB4FeOF/zkcO+mMu1JwskV+vlcEBvk0Mnp6372fpMVHnbytYrbV04dRwddH0ofB+cr2hq\n",
+       "668dgdhWHZBcrcdBNZyHSzpdkmy9xNZPTBxPrn5B0k+V/iaBCGDrLFtvmzoOIAj3qsyQXFWru4Hl\n",
+       "dGPLKZa+/r2kH5k6COyUZ0l61dRBDMXWo2zt61tMSDB52qbrJzI09+RqqJN/my8qO8fWk+3ZH+tL\n",
+       "5WOTGwTq/KmkS6cOYsvtyn1iV9Yz1LbccHYRB3zz5OL3JT1pyEBwBI7P6fUd7X2bk3eOz/bYZi3M\n",
+       "Pbka++Sfw8G1zRfEPlof67YusPWYIYLpobx/53A8joljvyFbt9l6QYf5kq1HDxHTDI15vG1aFs8W\n",
+       "zMzck6uh5HSRbntg79KJMPS6/rGkDw28DGAKZ6l7be6DIwOZSE7XeGyhuSdXu5RIrOLiAGBIu3x9\n",
+       "BXqZRXJl61xbD5k6Dk2T0GxTEjXmxTpiu3Fz6Y5tBxSK5tR7TR1HR5zLHcwiuZL0QUkfr3h/qMRj\n",
+       "DgkNB/x6u7B95nCcYr62+fia4vpw8oBl78L1blbmklwdN/Ly6g7UKQ7gbTppoi/W23zxR3scD0A8\n",
+       "r/yPBuaSXAFtcaOdFhfi+dvmfTi368Pc4t15oyRXtu60dY8hih6gTOnuAzmHi8scRotvasztOcSy\n",
+       "ctjmrnkNYHtxrs/MWDVX95F06kjLwu5oc8GhgzuApdVz+RxbfzPIgsb5MRbjXGVm7s2CY9ck9F6e\n",
+       "rafa+t9tZmn5/lzMPf6p5FB7ht2wS8fag6XWv+Zreg2r+jFWW7u0L7bC3JOroQ1xQF8o6cvazmTr\n",
+       "+AFiGVtOIxo3QQLY3RTbjhsQmuJYaY4O7R3MPbkaemevlj/lwfVPtk6YcPnRhr64beuFgAc3S7J1\n",
+       "tK3vzSCO59l66dRxrGPr2219tsus4cHkq8u5tLPnHzabe3K1FWz9s61/12DS8oNY53hiR1+sh94G\n",
+       "q+XPcZtvq3Ml/aepg5D0DkmXTR3EBk+QdO+pg8jMtiWO27Y+szeX5KruwNmWm91xkh45dRBbZohm\n",
+       "wRwuYHN5cHPOsXVm61hbPzd1HEE27aNtub6in608l4c2l+RqbHO4qBxT0w9rDrFHaXXS2zppqECw\n",
+       "M75c0vdNHURDF00dQMaolcag5pJc1R34Y2fUuZ2Ar6p4z7Y+YuvfRi/M1rk9i5hy+z1C0ucnXH4U\n",
+       "vkXWy+38zN2m7cWxlo8pj206tHcwl+SKnVqt7heEXyPpGQMs72b7sH5fbU05AObpAWVs7c3b1olT\n",
+       "xwCMKOL6k9N9KadYoPkkV7ngAN5tOez/VPO6M1tnSfr7iLLKxQaXB8yGrbfZ+uGp4xiDrSfbeu/U\n",
+       "ceRmLsnVNj0CZpZs3T+4yIh916aMrTlWbN3PDhmYcOmegWVhe2zNOVNh6OvPqyS9NmAZOdj0Rek4\n",
+       "SaeMEciczCW5QnPhF0Rb50v6y+hig8vLbXlDsRa/LI18pMa2bBugq21OJIdmsf2OMHlyZesuW2d0\n",
+       "nb3Hcn/K1q1d58/YEDfKIR66vZGtv9vQMX/sQWRzk3N82Y7QbutUKayPWc77YJNNsc953TaZW5+r\n",
+       "3Du0k1ytmDy50uKBzl81wXKfKOnsms9ohjzcEBeR8rasa3I8RdLjB1jelGXkaFvXq85tkt46dRBA\n",
+       "CzknwtuchHeWQ3LVx9g3BQ6i+W6DLnGvHl85rPuuJUJDqBzvzNaP2HpAy7LmvD/mHHs0Hn/THc2C\n",
+       "FeaeXA0th2rfbb2h57Bem+QYY44xbYufkPRdLeeZ8/4IHaHd1vG2Ht4jnjHNLRnIoVlwnbltz8HN\n",
+       "Pbma84VtVdS65HyQb9P+ygH7GkNquw+/X9INQwQygLn1ucoZ26FCLslV75uErV+39eCIYEpyvnnV\n",
+       "ya6Tt60ftPXJlbebbtuoTpRz3JdoZ4p9vHaZtt5p681jBVPju2vej95ekw1Ea+sBdq+HUx+xLYpm\n",
+       "4mzGb7J10pp4hrzub+rQTrNghVySq02aHDjPk/TMgeMY8gDauk70tk4pfpn1NEkPUvxDh9uUEbG8\n",
+       "HPZFUsd1sfU7tl4cHM+u27QvvkPtmxrR3k2SfrnH/FXn9kskPb/lPEO6v9bHM6Ucro1ZmUtyVafT\n",
+       "DrX1g2r2LSuH6s4cYihrE88fSPoL9TvxcuprkMO+6PMIoWdIesGGMrE7QvtcZeDkFtNG9HHdlfMm\n",
+       "518qZuuYqQNoKLqz95tGXt6YcrogfpUOH/27vP3GjjOn7dLXoOti6z6SjkpJB/sUExVP5rbpuMrB\n",
+       "WNtzjONzV84BmgUrzCW52pWDdJMm24EnmMfJ9YIRvW9X1/NaSffWYgw6rDfn82zT8T3Fuk21PdPK\n",
+       "/23mGUOO+6q87FyvlZOZe7PgLsjtoK08iW0dZ+uymnmq1mEOj79ZnSeHfRH+4GYduZ5nSLpXUNl9\n",
+       "3d9uvJ457J85mXNimA1br6z5KPJ4nDp5qozB1mlatFBw7q3IJbnatGO2rrN3C11PqrG3zZmSXjry\n",
+       "Msf+lpnLzWgOx33Utpri6Q1tTL4vbH3RruxLh3qR14Nv2/D5Nvtv0uS/hs1SLsnVJrkcpBtPSFsP\n",
+       "tfWVAy1/qs6Wfcuq2m7blOyMaehHEeXmX6cOYINJjsFimIBlQnW0pEd3KKbrl9oh9VqmrafYekKT\n",
+       "SQNjGewYsHV6y1gGCWPNZ8s+tTlfQyaRdZ8rW+dp/S9Acry5fkzSLZLOm2j5Qx/kUdt8Ds2CuZvr\n",
+       "vmgj9+RqKj8h6ZOSfrVHGTn+CqzPMvcX//5BNY84WmOIx99EXIvvuXkSSfPqq7YTcq+5+oAWI/52\n",
+       "3nG2bGtf19k3/F3nuI7Li7CMMdm6Pai5YBtOnG1Yh9mz9e22HtRiltyTq6yPKzvrxLk3W6+09QdT\n",
+       "Lb74P+QYqDkvsj6+UC/35OrogDK+WdIdLefpe0DncEGzFh2Tn9hoYus0W+esKauP7C8Qts5r8Fy0\n",
+       "HNYjhxiaqDtm/qsWtS5NtUmupmhqbvML3tzM5Vha5/mSvm7lvRNt/Y8N82W17rbuIR3xFAtJ+tLY\n",
+       "sVRY9wv0ZXxZbc8c5J5cLdVdnJrs0Pv0WO6h8m29T9LxDecb6mI65EX6f0r6qxFjGLo/Q5Iku/GI\n",
+       "xsta0iFiiTTlWGFT6FxzZet3ez4SBYFsPdjWKzZMFnVMf9OmcAZabld19+JJ4rL1WltvbzApzYI1\n",
+       "5pJc1RnzZvcNIy4rQtuD/csCy2pi6MfRLMt/SINppepa0hySqSpDj3OVkz7Ngk+X9NCW87TdFk2m\n",
+       "z7Uz8tgjtP+IpF/cME2u51yViGfifsDW4xosY+xxrl6ju59JOad9ko1ckqscf7UidT+opjwYl9tq\n",
+       "0F8L2vrUhotCWQ6Pv+ECUW+IbdOoTFuPtvV3aybp2+fKxXL2t5m+bflbKut1sye/f0Vsn/2SnrXm\n",
+       "85y/+EjUXNXqdXDavsX2Dbavs31tVFBVixqw7CEMddLn0L/jLFu3aDH+0Kb+XOtiiWgWjCh/3fSr\n",
+       "7+VwAUnKI44+yvE/StIpa6bt3aG9eHj4B/qWs4VmexzZ+kbF/tihz7aouw5ty8Pi1yG5qtE3CUiS\n",
+       "9qeULkgpPSYioBVzfbBoDslg5LYpl/UwqfU4XlW1aUNvo4g+FXWj0d9h6wc6lBfBquhgausoWydM\n",
+       "E9Kg+g4Um9TuWBuiWbD1sW7reLvxz/DXLaNPs2DOHthz/ojr41C16vcbYBl9rOvQTnJVI6KGZYwT\n",
+       "dKoHKXc9YLrEFX1w5nDhzOkCFrGc5TbdJzUaqHBMl0j6xwbTNR7x3Nattn6ye0hZaNp03dVQ59ll\n",
+       "kv52oLIx7X1rk/KXpMkHMkU3ETVX77f9Ydsvjwioonxp/ANniuU2+eYZOarwokDr1GIsnKbr2meb\n",
+       "5PSg08aKWqGcvplVxfLghvO2eRjz2eqeROZysf/ZqQNQt2Px/iMsI9e+rk2M8aOO8q/Fbetetk63\n",
+       "9aiGMWxLs2CT7hc5xJmVvsnVhSmlCyQ9U9KrbDcaU2lkb5tgmTncWNYe7Pah0fnv0mIssLWT17zu\n",
+       "E8vYJ2OfJuYc9udSuZmLC1q8tvs6933Qp1kwp+N+at8o6W8k/YKkP10zXZdtNuftTHJVo9fjb1JK\n",
+       "txf/f8b2b0h6jKRrlp/bPrB4dYmkqx4v/fGvd11UnzgnENks2Hbd17WPLz6wHiHp+tI0Z3ZYTptY\n",
+       "2n7WdLo2CdGcmijXyf5CbOtiLY6pOpsS/3tJh56plrvs98cAcjgP+mq7Dsvj+dgWZURup6l/GVln\n",
+       "1seC7f1S418St9I5ubJ9oqSjU0qfs32SFmPK/Fh5mpTSgcW0ukQ68Mc94uwziOjgioEK/z4l/cvy\n",
+       "rcjia153mX/pjJZl9N3Ooc2spUf6DNlRuWxZIzv187vq/q57byq/1HP+y6TGg7+W5ZpAj3Hc5NCF\n",
+       "IeKJGmOJ2F5VNcip4vM+sSw/25RcDbn/t7ZDe0ppT9Le8m/bl0SV3Scb3ifpGtvXS/qQpPellK5a\n",
+       "M33kz+ablDmmz0p6c+nvXL9ldNU1wVu3X5vu86rpHtthvj7HyvJn/FMdb6vLzeW4H0qbfmHRdqlZ\n",
+       "MLrmZduue02NcT7mvm1zPw9G17nmKqV0s6TzowIpHlr5zSkd9tyxOd1EvmLqAApDj8009qCerY6B\n",
+       "HsMRVK1Xjsdfl2/G2bF1tqRP13w8pwt1ox+Z2Po/Je2lpD8bKI6woUZafB69PGm6fT/EciPH8hvt\n",
+       "XC+ec1j+grNu2Tk8+zBLg2fDLZ7K/gpJP17z2ZwutlLsiZDDzbRvDH2qjl9nt2pu2IYb9qrV7d94\n",
+       "XWydFR9OiFvVrelvaEM1C/6spNcPUPYm755gmX2MdY0bowm5y/hmdfP0HPC7/hmbth628mvo90o6\n",
+       "rWHRs24WHNKYVY2bqqfXjbY7aJ8rW19h65GB5Q7V52rsZfctM6o56+QW09ZdFLr8WjCXC0bddmwS\n",
+       "320ZPCakTt2+ymW71yol/LnHuu6XwOfYjYfwiJDTtorscxVRzqahePrWMn7W1tfXfLba6tJ4LLyS\n",
+       "nPZtFnK66HapXow6uK+Q9OGgsqQZN9eMqM3JeGzN+0Of0JP1dbL1Hlu/VfHRSS3KGHzIBltn2npj\n",
+       "y9mGGkduzAv8F4v/c27qOoKtf7IPdef4N5I+YevEKWOSlMNzAof4ctWl3+cgNVeF+3acb2s7tA9p\n",
+       "jAO66QW0quZqrB0XfZHvsl3bjBAeVV7Tstc2C9p6rKrHE1u3/9qsS11yFbk9ord/YzU3ludq/QNd\n",
+       "V63bxkMmhc+W9EMd521SI53beGhzd7yO7Cvb9stL3fuWWnUFKVsOVrtNI5K36Ve6qebqs/3DGaSf\n",
+       "K0lVjVyaBaX1NVdzOJH6ilrH1VoB23q6rTf1KHNTbN+pxbfgoRwXUMbkx5CtB9mVVe7/aq8d16nv\n",
+       "r2nD170Ytf4lfYtpOk3HG/YYJr25lJp6xuwOMMSyls2sOfW5yumY+4eAMqK6d5RRc1Vj6qrYshx/\n",
+       "dTDEwTi0qmW/TtIPjh1ISd8Tr+6b9dx8UtKf13y2rrmvbkDUTcdZ1fkddWyeI+nynmXkOH5dZI1v\n",
+       "m2m6el2Hedr+AvfhLcvNKSmpU3dOlUX3R+vyq8DI2iYXj/G5YMO8bc4BkqsavUZob6jpwVGVXA3e\n",
+       "Z2QgfX5RF1XuUNusTQxRF9mI4zTip+oR6xOxLm0vuK+2Dz3UOadzqe6Xj1XN0K1+JdnDHBKDKkNu\n",
+       "mxtsPTAl/eUIy9rI1nOkw36ANHdjDcVgSY/Q4gveXI/z2RgjuVratDO7/FowV11qBIccU6bJN8/K\n",
+       "C6atkySdEhJR3nJuJujbLPhz0qGnB+SkSXLVxhQ3/SbLHDKuLsdkl3nK94q+v2xbp8m2uqJH+ZHm\n",
+       "Npq/1a0VgGbBDsZMrjbJqVmw0QFj6/iU9M8jxNNG1SCifX4N9Ac6/Fti1Ym2qXlnqBNv6BM66fB1\n",
+       "++qBl1enrto+fJydkbVpUsupL05ZdLNg41+D9jDVcTF1E+o6QwzC2nT6Jh3aI2wsy9Y9JT2wxfzL\n",
+       "7ZZTF6Ms5PJrwU3jXE1lU+x/GLisXJsFH9Rj3tVm3XL8Xdr168qP0OQXjaF9v2z9la2Xd5m14r25\n",
+       "jDAvbe4zNuW34Km32WtbDpq7NHXcUlAibOueGQ9+W8kO+dGNNGw/3ybn2z1alr2c98UNlr9Tcv+1\n",
+       "YPSzr6I9auLlV+l6gevTcXKMb31R8+bkHElPHrD8NrWMwwRgPcDWU1be/tYOsfQ+Xup+cWjrDLtz\n",
+       "Lf7U16CxmgXHLP+/S7otIpARVSXEtceGrX22/n75p/rXXH1fg2mG2O9TH//Zyqkqb+wbdISpl99E\n",
+       "TjH27Ts0pLEuEl06yVc19XZZVpt5o/yypN+r+azJN+kx9v3t6v5r2qnPr6mXX3ZYLLZObzGERrmW\n",
+       "u+tgl2Pr0yx/P1V31xiyeZDkakS5NAvOVaMTwdbX2rozYBldmwXnvg/mHn9fdQnZpu2S+3aLrrna\n",
+       "1Hy8rox9LWJpa4h+XF2mbTtP3y9Dn5H0LR3nnYO59bnqer9v0iyIFaPXXNn68qLTXKPJG5aZUw2c\n",
+       "JB1j68tKfz9e0n1GWnbf/k2bdEnwhnrcSaSxLhJdHonRttzvaFn2VPul7rzt9cXA1hUr518bbWsj\n",
+       "cvmVaa/j11aq6OvVtk9c1fRNk9apb9Ktlm/rfl3KtPWfbH1nm2VlJOfrd3YGTUpsXV/+s/j/05Le\n",
+       "E7yoHDvD/0xQOREXnakuXFEn4+xPalvX1nz03LpZSq/bPoZkOfhh11rPHLWJ/zk68hEvTcsYqq/i\n",
+       "0GVGxNG031nXGtOph62IXO6rG0xTtR2+V9JrK6arq10d9deCHeabOinO1tA1Po9Q9Y5ZbVPPbQdF\n",
+       "1LQMsW3b3BzWffuPviHM4cY96jFm66KVTtKPrpn0+CbFdQyj6QjtuSXfTWqu2sSc9fFp6yW2zi6/\n",
+       "1Wb2LovsME/ncm3dt+h7lfN+CPmlY8NlTIU+VyMaozntjIAymu7A6B099ckgDfNto48u5VaNyxR+\n",
+       "c7R1c98yAuZZ+m1Jzwgus62q2q+cLoZj/2J0lE7Btl5g6/9pUebl6vYYm0bxBM1T1rbp9KCk5zcs\n",
+       "e+prbuSnufsoAAAgAElEQVTyI58KMeSx26fvZk7Xk6yMkVz9dPF/eQc9SpKi+161mC4bdueLalN9\n",
+       "D/6hqqWH+HZ+bosyV839InFexXu5nw9taqXqakWaNJ+OvR1eLel7Sn/n3qE9smN2nXUPJt8GVUlQ\n",
+       "5H68SLr7mYC2nm+37sd7VMuYmpj7dXMwU/9a8G9Lr3d1J72lxbRdfjEV/cugPh3a52bTCP1/b+vU\n",
+       "yDJrpmsyT9XI3rknV02uPzn0ORl7uWPHmUM3garuDP2CaD4MRFUcXedbN3+TpLQu8f1+Se8svf/e\n",
+       "4r02+u7n3K8nWZnsV3YND/y2J9qFXWJZI4eDaarxl9osv2mNQW5NnGVdRjg/UdKZA8TSNI5NtTZV\n",
+       "F+2cEt0+NVdD1+6MUVZEuZE1V9t2821zfxujz1XV8pavx1iuFT+eZE7Xk6yMWXO17uIYVZ3/TS2n\n",
+       "3ySHi8XU33pblVeTNC9rVXJOruauahs1fW9I6y6+TWLp2h8kh3Xvaqqaq6ha7j79MueQCFeVPVSC\n",
+       "mvv2ILmqkdv4ULssl/FyNukSwzPDo9gs15O+S//BJkMxbEomGiU5tn5R0gkbo2uuSzNJ9HMSy+tX\n",
+       "99y+qWv25vQ8xa4do8c2ZrNg2/lXz90xtukQX25zvc5ObsrkKrcTsWyKQS+jljXFwT70dhrjIjmn\n",
+       "i8Rhsdp6jqSHV0xXuw1sueZhs6/oGVsbIQlTyaZ+apb0rg7ltvk8wlQd2qPWrU85kedhl2bBppom\n",
+       "R3XLmOL+N0Tz45yum6Oa8sHNOSdXETqvn62jbR0bGcyI1tWWNNomtv7c1lvjQqqMZU6etPL36nZ8\n",
+       "waEP3PgC/jpJ/1y8HnLbdGkWrKqR6/PNvjxN3XhjrbdBcY7eu+18dcXVvG4zX1fL+0DTsr6n5pfe\n",
+       "EbFMXVPTpWa5/HffZebcLLjpCw1Kpvy14BySqzFjLB+kvybpLwYoN2L+Phf+pvNeIOlpHZe5xEm/\n",
+       "/qL9kDWfDRVDm8/G1iWWi8OjaK9x3HblsDityig8TtI3RMQ0sLk0C0YstwlXLLfpfHW4ztYguRpO\n",
+       "n4Pu0eo+blH0N5+o/hhdyuozX641pUM/57EuoaqsCbV1pqQ3N1qY9YrgGtUj4rf1H3X4I4HqagSm\n",
+       "uHGultFkdP2mxnjQ+utryh+qM3b1RA7vU1fnH1pM27cbQZ9mwTFrrnK5Dm69MZKrOTy0t05onxBb\n",
+       "z5N0oEeZUU4JLi/nby9HjLVl6922HlQzXaRNyc8QyuU3Ob+/UdUjyd9doHWhrZdJ+kVJX90ynrbN\n",
+       "gv9Xy/K7fOGoMvUxPGZ/nF3pc9VF11ryJtvwh4OXG/XFt3ab23qppPuvKXPq/ZWtpg/q7KNpzVWO\n",
+       "Oyn6IvfQEWJo8q1wU6fedeVV9aka+2bQxLrYvk3Snyuu6XUqET+JbzLNWyQ9psF0bTVppmhz42/z\n",
+       "68k2n+Usss9VX7n1uRpy+V1aZL6z9HrosQGblF31/uo0l0n64Joyc7xvZyGXZsHcdlBEPGPVhDSx\n",
+       "Gkub5oy2N6Rcb2Cblj3E/qoq8x4dR45uqmkNSFXN6tj7p08zRZ++f30MtY1qE0Nbjy5qvaPkXHM1\n",
+       "lTG7T3yNFgMQ911+22tW3/t9VYy53buzkctQDJOdjDXt/9E4ABc630htfaPdqqa1smbUzmJst28d\n",
+       "sOy251XOCW+Tads0dcy1Y+4vSfr1ms8ia4vG7p9ZlsujspquQ10y3HT+8jAoXftcvX7NZ1W6Xg/m\n",
+       "et5MKpc+VydI+tkRYmkrqs/VmDewUZvobJ2nw59rt3H5xfhKbceguVLSE1vMU6euirvtRaLPoK9n\n",
+       "t1xWG02Pu6rzsktTR5+L65DJ35ySzNXlj10rF30fmHMNR9dmwYjuG0MLqSW29U22/ktAPFstl2bB\n",
+       "qsEMsZBFDd8a/0uLPkxt/LOkNwYs+15rPltevFYfrPy4gOX29TPB5XVJipaGaCoYYt6Ivii9zh9b\n",
+       "D5b0lT3LONnWXaW/q5rox+7DOEbNVd++TEOLWvcu67mueXyQJm1bjyw6qzdZ3up73y3pu4rXXwqK\n",
+       "bevk1KE9RxHV3Kuv22p784v+RtS2vKYX2AcElblJ21+fRWqz7bqs4+kbyomqsVnXxDZ0crVpmrq+\n",
+       "m22HNmiyrz7RYJpN7qPDE/5/arH8Kjk1C66WN/Q8Uvz1rkuzYJf56+YZqz/mz0h6iqR3rpmmibnU\n",
+       "SI6OEdqHE3XCTL2dIvfbqOvSso9W6+KL/1+yaRq78tE0EV5Q8V6f5GrsY23Ix5O07TsS2aG47w1n\n",
+       "qpqrqHKjvpS2WebYorfhqDVXE82/U3Lp0N7VycUgiEPJ4WCaW9+RdULa/Fv4rs2THNL1htikU2lV\n",
+       "ErQq8ubepMwTGkzTpOypku3VfiDPUs1AqT2X0ziGBu83HfNr6D5Xq/O0ffyNNEwfxSn1PZ77zD9F\n",
+       "zVWbadbNl/t+ncwYzYJNdD2gLteRfWq6WD1AoodiGKrT7xSJVJtmmrGWWafcJ2ubLwJdfrn0ogbT\n",
+       "DK3NxX5TIvNbkt7bY3ldjo8cfnnaxdB9rtreoLtMN5QpmgXXzZdTzVWVbb6u9jJmh/Z11ahdd3pE\n",
+       "YrVu+VHfrAf7dm/rNPuwTrY5H+xHrIt9qAalid/vsexN22Wsca7qDNHfJaIpbdO0U98Mm7i+wTRT\n",
+       "r0fY42+KX+O2mqXLYjrMM7oBx24bqrvEkN1nqLka0RjJ1YNr3p+yr0dTOcS1bjslLb6t3zJaNJu1\n",
+       "3Wb3HqDMydj6tH3ocRFDNe+8aM1n5XKanN9Db9u2j79pOk3V+3XLOq3F8tro0vzXtqzDJ7K8kjBU\n",
+       "zdf2uj5GzVVTU92so7fBJnX76JU9l79pmY8vXkfXvGHFGMnVw2re/7dVcRQPh33bsCGNbsixgKJq\n",
+       "75oqx1O1Xv/YcN5uC+/+TXTTEAxRTeRfLun8oLLqfOPK331qXofuY3LBms+OUvNzY+hmk7VxtDzu\n",
+       "wm7Qts6WtK/02bskfaznsrr0uTq5w3LKpk6ahpp+db6+8/9kzft3v9H9GvhwSW+tK7eBNl9odt6Y\n",
+       "fa5Wd8wP1nz2fZIebOsKxT51fkpNDuSuI/52eTr73LStjaly0YbPz+1YbpQu+60qYWxbI9y1Brnp\n",
+       "tPfoWcZYzSKblnPfDvN0nb483S2Sji79/URJX7GhzKiaq/K+u7ZmWU3KqXtvXRlNRTxfM8JYSVwf\n",
+       "TZqL1+47W0+X9NTSZyRXNabs0L7pon71WIGsMeYJ0OSb+dy/OUR09hxqn0zd5ypKn+Sqiz61zH2S\n",
+       "uT438BzVHStH17y/TtMkaNP0q8+/G0vkedM2qZuiWdBr5svtmH7Nyt9zuv+MaspfuxxV8zonbQ7s\n",
+       "yIOsXNambbM6Qm7bZ6qNKSKOqGPlOUHlyK5tmq1dX1uXNJ22hyYJSNua0abzNXFYGbYe2bOMXGox\n",
+       "+va56luDuOkc6dIs2CaWPl8CU0AcZWvLsvVlbaZvsJwxjrGIL6ltzuvye19Y+SyXZ0JmZ8pBRFcz\n",
+       "9ykNMRTDiba+qkcMS11vfm2naWqKfTVEzdUTJcnWo+za8ZGauk+HeQ70XGadtjWdY9d0rSvrKWum\n",
+       "adu0XnfMr2umXCzI+oYGy1pbRObz1yXYnZdr668kPb3io6Fvum3XddX3dFxu3+4Yq+ddSM2VrePs\n",
+       "w571WhbxA5fV5Ao1qLkazjdL+lRAOZtufo8OWEZXUyRa0cfKn2r9r+/G0mdbdu2v12e5UTVXTcs5\n",
+       "q8HyN637qRXlVHlfw9H9u2yDpvOM+cisLuuxGt85xb/V8rokbnNoFuybDPW5jq1bxrskfabDfE2X\n",
+       "+y8r7y1bTqi5WjFlUlMem2nqmqs6OcQ1akfJlTGzoo3Z56rNA0WPU+zF4dc6zBN1rPWp5WtbO9RX\n",
+       "m+TvIz3KWGrSh6hvk1yEoZsF68rI4XonjdgsGLjsPjGvm7fqs3esmf6hqq+dLZf1tS2WV37viyuf\n",
+       "0SxYI5cao1xO6qEMORRDtFs2PFJoqItI0/maHrNzf1p7m1/KtvkW3bbPVdtljl1GtKNW/m/FVpLW\n",
+       "fkEZ40HDozcLril/6ptv3TodZR82xMXQy1s33boa3Krj8DvaBFSzzCYJWBWSqIam7HNVlkuSt2qI\n",
+       "i02feceqWWg7wnNTXW4EXaffmFytjBcz9UVjqpqrLJIrW3dJekDP5bfu0G4fkcTWJVdtjt1zat4/\n",
+       "NI+tMzZN01BkzVXfctqUvU6bc7HrjxheLOmOiun6ng/RzYKR98aIc72uf/LU18/s5JLUTP0NNqQz\n",
+       "YU91yUDbKuOhRS+zbSfLyD4rU9TCDW3TD0X6dmiPVF7eqZK+esM0m8ro4vSVvyOSqzrlX8I9c810\n",
+       "EX1j2nxe92vBrteevttqjGbB+274PGo5TaZvW3PVVdWXi6Ml7V83zQqSq4ZySa5yiWMoTQ68LsnV\n",
+       "d3aIJTdtL0pNj5W2yVWSJFv3tvU1LeeNUK7FiUr6qrbVLjcLNlneoeTK1pNtXbtm3i7xj9G/adM5\n",
+       "sprE1sV0WDkdj8vK83DAZ/4dsajg6Z8btLzoGq6uy3ySpB/oMT9JVQ2aBauN0S9iVZPkarWcNkM9\n",
+       "DKHtdurahDFUzdVq50xJ+nnVd54um/LmuM5YzYIRrMP3U9WQGJG1yq2SKy1G9X/0yvt9Re+TLknf\n",
+       "tzecfnWdm26DqmOwzdAFY/5oYrmssZsFm37JGTq5Wh2kdtPxRM1VQ7kkNbk2sSz7R4zxGJ665GqK\n",
+       "fbRuMMeN+2rNt9Ku3/431cZEWC5j4zhIAy9f6jYq91J5+xyoWs6aB/+O3XSzWsajAspYp8mxsxzj\n",
+       "qmtisUmTm3DfrgBtY23aLDhGc120XJeTS81Vl2mqkFytyCW5GjyODQN6bhrA86E9F9/kwPvXDTH0\n",
+       "8f6W07+39IvBugHp1hnyJh1d7S4t9s9yXKMhym9bXp/zoTzvsxosK/dmwcjtXLVd77Xy9/KXfkP0\n",
+       "uSrPE9VZvGtt8Go55f/ryin/ve6atq6mo2qaPtr2L+tbXtT8WfS5WrPcptNQc1Ujl2bBMb5dnD/C\n",
+       "MupUdST8fVvfXXqrS7NgU3/WYZ6oG3xZRLPgUMfsMrnq0/RRp+7xOHWGGmBw+XlUn66+xo6jqkbw\n",
+       "Yw3LjzrumjxqZuhmwVW/1LDsLtsgslm3i7bJzlTNgnVyq7miWbChXGquRmsWtHXuwItoepA9WYtR\n",
+       "3Je6dGhvatm8eVhZtk5YN09N894YfUbWTTPUsdKn5mrTefS9Lcvr0yzYttmo7bbtexOqi6OrNnH8\n",
+       "botph665Wjfvv29YVt21pu12XQ6BsSmZikwcos7j6OdJrhvjr8kyvs3WvVvOsy7GHIZiKM8/xKPi\n",
+       "tlIuydUocdh6uKSb28wStOgmVeNDJld1/nHD511v8tE1V5vKiNCnuSb6+B2i1nBpteZq7GaTtmVF\n",
+       "JuqntShryj5XL+9Q3qb3upQVsQ2GbhbcZIzlHF1azn0lvazBPE37s0VeW6rKavJDA5oFO+i842xf\n",
+       "ZPuTtv/S9g81maXjZ5HWPv5izQNbW8T3ga7zD9ks2KW24Sip0fPVCnur80rSKQ1mbHuD7dJs10bL\n",
+       "b+d7fZZVt9whmwVXp+mYXL0w4pmWEc2CQ107ampt9qpikK1T7VZN3ENe8wb4ZePeyt9N55tds+CK\n",
+       "veWLddfCY1ssZ2nTeHSy9UJJz2hZbtNl1i276jgvO5RE2fp6kVzV6viIBx8t6W1a/FT5oZJeaPsh\n",
+       "PeIYo9mnSV+T9xX/r/48t2n5kv6g6zftKWqu1jlG1ReUTRchqV2T4pQ1J1VaJm97beZpashmQSuk\n",
+       "WfDPx0quhk6S69TU2uzVTX+X2jX/Rh3HVeVUDWnRpazSNthb+bu33JKrGnvLF5uSq7ImiUaTJtf/\n",
+       "Jun7G5RVy9b/sg89mL7FdWXvsGJqJvpKkVTValEzcZjHSPpUSukWSbL9K5KeI+kTHcv70Y7zDW2s\n",
+       "fgHScL8W/EpJr+pQ1jHqfpE+qmbers2CUbU6TXSpGYvuOzRks+DqsromtxHnxrdIenxAOUPo0ufq\n",
+       "KxqUO0bNVZMHVFfZ1FTVpRboe4r//3PHsvqqW07bpCC65mqMCgVJOk/SLxavq64NqzVjm2IpbzfX\n",
+       "vA91T67OknRr6e/bJD12wzz3tGt/NXW/0uuq59odLy2q3jcFtmaaUySdXPPZoY7dxfzLsY6W/9cu\n",
+       "t7S8e1Qtu3hvebFbveiVx1Q6oTR/eX3LMd+rbv1K7y+nXy7r7NJk97YrB848SUc+KPg0Vdemldeh\n",
+       "bpiGe6v6RK7at/dcvlhZt3Jn+3LzYnl7rtvfJ6v5Taa8jU/cUO5Sed/V7ZdTmhyzhfL5cUSH2A3l\n",
+       "lGPZtLwTV6Ypb6Oqee+hw/dFEeeJhx0vm9az5vN7SHrQ+nDvPi5XylgeE+XmuOMrPm9q9Vi5tw4/\n",
+       "FlaHbKia57DzYfV81uIHLD+/jG9lfVbPpSN+bFJMf1zNNCeVyrtPRaxNLI/jh0r6oKQ/Wvm8fFye\n",
+       "vGaf38vW54rXD6yZ5lT7sHVZrv89VaxXw3Nn3Y9ylsv5kha1Nv9Sen+5T05c+Vuryy5erx5PJ+nu\n",
+       "L5Anq3Qd093XkxM2XJ+WTlyZv7GKOKu23XLbVo3XeKhmrJinKo7lfIf2TancoZ5DO3tOqX3Cafub\n",
+       "JF2UUnp58feLJD02pfTq0jRksgAAYDZSSiE1iV1rrj6tw5/8fo4WtVeHRAUIAAAwJ137dnxY0gNs\n",
+       "n2v7OC36TlwZFxYAAMA8daq5Sil90fb3ajEo39GSLk0pde3MDgAAsDU69bkCAABAtUF+1t5hgNFZ\n",
+       "sX2L7RtsX2f72uK902xfbfsm21fZLv2Kw28otsUnbT99usjbsX2Z7YO2byy913o9bT/S9o3FZ//3\n",
+       "2OvRVs16H7B9W7HPr7P9zNJns19v2+fY/oDtj9n+qO3XFO9v9f5es97bvr9PsP0h29fb/rjtny7e\n",
+       "3/b9XbfeW72/l2wfXazfbxZ/b/X+XqpY7+H3d0op9J8WzYSfknSuFj9VvV7SQ6KXM+U/LR6hc9rK\n",
+       "ez8j6QeL1z8k6Y3F64cW2+DYYpt8StJRU69Dw/V8oqQLJN3YcT2XNaPXSnpM8fp/avFL08nXr+V6\n",
+       "XyLp+yum3Yr1lnSGpPOL1ydL+gtJD9n2/b1mvbd6fxcxnlj8f4ykP5H0hG3f32vWe+v3dxHn90t6\n",
+       "t6Qri7+3fn/XrPfg+3uImqtDA4ymlL4gaTnA6LZZ/TXksyVdXry+XNJzi9fPkfSelNIX0mLQ1U9p\n",
+       "sY2yl1K6RouRp8varOdjbZ8p6ZSU0rXFdL9cmidLNestVQ+wtxXrnVK6I6V0ffH681oMCHyWtnx/\n",
+       "r1lvaYv3tySllP6heHmcFl+K79KW72+pdr2lLd/fts+W9CxJ79Dd67r1+7tmveue2BK23kMkV1UD\n",
+       "jJ5VM+1cJUnvt/1h28uHrO5LKR0sXh+UtK94/eU6fJiKuW+Ptuu5+v6nNd/1f7Xtj9i+tFR9vnXr\n",
+       "bftcLWruPqQd2t+l9f6T4q2t3t+2j7J9vRb79QMppY9pB/Z3zXpLW76/Jf2cpB/Q4YNDb/3+VvV6\n",
+       "Jw28v4dIrnahh/yFKaULJD1T0qtsP7H8YVrUG67bDluxjRqs5zZ5uxaPkjhf0u2S3jxtOMOwfbKk\n",
+       "90p6bUrpc+XPtnl/F+v9P7RY789rB/Z3SulLKaXztXiKw9fZfvLK51u5vyvWe7+2fH/b/jeS7kwp\n",
+       "XaeaR9xs4/5es96D7+8hkquNA4zOXUrp9uL/z0j6DS2a+Q7aPkOSiirEO4vJV7fH2cV7c9VmPW8r\n",
+       "3j975f3ZrX9K6c5U0KJ6edm0uzXrbftYLRKrd6WUrije3vr9XVrv/7pc713Y30sppb+V9FuSHqkd\n",
+       "2N9LpfV+1A7s76+V9GzbN0t6j6Sn2H6Xtn9/V633L4+xv4dIrrZ6gFHbJ9o+pXh9kqSnS7pRi3W8\n",
+       "uJjsYknLm9OVkr7V9nG2z5P0AC06xs1Vq/VMKd0h6e9sP9a2Jb24NM9sFBeepedpsc+lLVnvIsZL\n",
+       "JX08pfTW0kdbvb/r1nsH9vfpy6YQ2/eQ9DRJ12n793flei8TjMLW7e+U0g+nlM5JKZ0n6Vsl/X5K\n",
+       "6cXa8v1ds94vGeX8Xtfbves/LZrL/kKLzmBvGGIZU/3Toirx+uLfR5frp8WDjt8v6SZJV0k6tTTP\n",
+       "Dxfb4pOSnjH1OrRY1/dI+mstHnh6q6SXdllPLb4R31h89vNTr1eH9f5OLTow3iDpI8VJtW+b1luL\n",
+       "X0x9qTiuryv+XbTt+7tmvZ+5A/v74ZL+vFjvGyT9QPH+tu/vuvXe6v29sg2epLt/NbfV+3tlvfeX\n",
+       "1vtdQ+9vBhEFAAAINMggogAAALuK5AoAACAQyRUAAEAgkisAAIBAJFcAAACBSK4AAAACkVwBAAAE\n",
+       "IrkCAAAIRHIFAAAQiOQKAAAgEMkVAABAIJIrAACAQCRXAAAAgUiuAAAAApFcAQAABCK5AgAACERy\n",
+       "BQAAEIjkCgAAIBDJFQAAQCCSKwAAgEAkVwAAAIFIrgAAAAKRXAEAAAQiuQIAAAhEcgUAABCI5AoA\n",
+       "ACAQyRUAAEAgkisAAIBAJFcAAACBSK4AAAACkVwBAAAEIrkCAAAIRHIFAAAQaG1yZfsc2x+w/THb\n",
+       "H7X9muL9A7Zvs31d8e+iccIFAADIm1NK9R/aZ0g6I6V0ve2TJf2ZpOdKeoGkz6WU3jJOmAAAAPNw\n",
+       "zLoPU0p3SLqjeP1525+QdFbxsQeODQAAYHYa97myfa6kCyT9SfHWq21/xPaltk8dIDYAAIDZWdss\n",
+       "eGiiRZPgnqSfSCldYfu+kj5TfPzjks5MKb1sZZ7NBQMAAGQipRTSKrcxubJ9rKT3SfrtlNJbKz4/\n",
+       "V9JvppQevvJ+klbL/klJB96Y0hfe0CdobB/bB1JKB6aOA/njWEEbHC9oynaKSq42/VrQki6V9PFy\n",
+       "YmX7zNJkz5N0Y0QwAAAAc7e2Q7ukCyW9SNINtq8r3vthSS+0fb4WVVM3S3rFcCECAADMx6ZfC/6R\n",
+       "qmu3fnuYcLDD9qYOALOxN3UAmJW9qQPA7mGEdmQhpbQ3dQyYB44VtMHxgimQXAEAAAQiuQIAAAhE\n",
+       "cgUAABCI5AoAACAQyRUAAEAgkisAAIBAJFcAAACBSK4AAAACkVwBAAAEIrkCAAAIRHIFAAAQiOQK\n",
+       "AAAgEMkVAABAIJIrAACAQCRXAAAAgUiuAAAAApFcAQAABCK5AgAACERyBQAAEIjkCgAAIBDJFQAA\n",
+       "QCCSKwAAgEAkVwAAAIFIrgAAAAKRXAEAAAQiuQIAAAhEcgUAABCI5AoAACAQyRUAAEAgkisAAIBA\n",
+       "JFcAAACBSK4AAAACkVwBAAAEIrkCAAAIRHIFAAAQiOQKAAAgEMkVAABAIJIrAACAQCRXAAAAgUiu\n",
+       "AAAAAq1NrmyfY/sDtj9m+6O2X1O8f5rtq23fZPsq26eOEy4AAEDeNtVcfUHS61JKD5P0OEmvsv0Q\n",
+       "Sa+XdHVK6YGSfq/4GwAAYOetTa5SSneklK4vXn9e0icknSXp2ZIuLya7XNJzhwwSAABgLhr3ubJ9\n",
+       "rqQLJH1I0r6U0sHio4OS9oVHBgAAMEONkivbJ0t6r6TXppQ+V/4spZQkpQFiAwAAmJ1jNk1g+1gt\n",
+       "Eqt3pZSuKN4+aPuMlNIdts+UdGf13AdKr/f3ChQAACCK7f0aKDlZm1zZtqRLJX08pfTW0kdXSrpY\n",
+       "0puK/6+omF2HJ1eS9MGOYQIAAMRJKe1J2lv+bfuSqLI31VxdKOlFkm6wfV3x3hskvVHSr9p+maRb\n",
+       "JL0gKiAAAIA5W5tcpZT+SPX9sp4aHw4AAMC8MUI7AABAIJIrAACAQCRXAAAAgUiuAAAAApFcAQAA\n",
+       "BCK5AgAACLRxhPZdYrvyMT4pJY8dCwAAmCeSqyOs5lfkVQAAoDmaBQEAAAKRXAEAAAQiuQIAAAhE\n",
+       "cgUAABCI5AoAACAQyRUAAEAgkisAAIBAJFcAAACBtmYQ0brR1SVGWAcAAOPZmuRqoSq/Iq8CAADj\n",
+       "oVkQAAAgEMkVAABAIJIrAACAQCRXAAAAgUiuAAAAApFcAQAABCK5AgAACERyBQAAECjrQUSjRl2v\n",
+       "KodR2wEAwBCyTq4WIkZdXy2DvAoAAAyDZkEAAIBAJFcAAACBSK4AAAACkVwBAAAEIrkCAAAIRHIF\n",
+       "AAAQiOQKAAAgEMkVAABAoGwGEV03GnuO6uJl5HcAAHZbNsnVwtxGUp9bvAAAYGg0CwIAAAQiuQIA\n",
+       "AAhEcgUAABCI5AoAACDQxuTK9mW2D9q+sfTeAdu32b6u+HfRsGECAADMQ5Oaq3dKWk2ekqS3pJQu\n",
+       "KP79TnxoAAAA87MxuUopXSPproqPGHcAAABgRZ8+V6+2/RHbl9o+NSwiAACAGeuaXL1d0nmSzpd0\n",
+       "u6Q3h0UEAAAwY51GaE8p3bl8bfsdkn6zesoDpdf7uywKAAAgnO39Gig56ZRc2T4zpXR78efzJN1Y\n",
+       "PeWBlb8/2GVxAAAAoVJKe5L2ln/bviSq7I3Jle33SHqSpNNt3yrpEkn7bZ+vxa8Gb5b0iqiAAAAA\n",
+       "5mxjcpVSemHF25cNEAsAAMDsMUI7AABAIJIrAACAQCRXAAAAgUiuAAAAApFcAQAABCK5AgAACNRp\n",
+       "EFHUs52q3k8pucl0VdMCAID5ILkKV5Uz1eVKbaYFAABzQLMgAABAIJIrAACAQCRXAAAAgUiuAAAA\n",
+       "ApFcAQAABCK5AgAACERyBQAAEIjkCgAAINAEydUXX287rf4bP475q9qObf9NvQ4AAGybiUZoZ2Ty\n",
+       "OKvb0hXv1b3PNgcAIBrNggAAAIFIrgAAAAKRXAEAAAQiuQIAAAhEcgUAABCI5AoAACAQyRUAAEAg\n",
+       "kisAAIBAEw0iOj1GJ1+o2w4pJUYYBQCgg51NrhglfontAABAJJoFAQAAApFcAQAABCK5AgAACERy\n",
+       "BQAAEIjkCgAAIBDJFQAAQCCSKwAAgEAkVwAAAIF2eBBRRGg70v0QI7+vi4GR5gEAYyO5QoDV3MYV\n",
+       "74zGqzoAAAiBSURBVC3fHyuGoZcHAEA1mgUBAAACkVwBAAAEIrkCAAAIRHIFAAAQaGNyZfsy2wdt\n",
+       "31h67zTbV9u+yfZVtk8dNkwAAIB5aFJz9U5JF62893pJV6eUHijp94q/AQAAdt7G5CqldI2ku1be\n",
+       "fraky4vXl0t6bnBcAAAAs9S1z9W+lNLB4vVBSfuC4gEAAJi13oOIppRS/QjZB0qv9/dd1GHajgw+\n",
+       "l2XVLW/qkcbH3gYAAAzJ9n5FJyeFrsnVQdtnpJTusH2mpDurJzuw8vcHOy6uStWo4EOZerTxXEYa\n",
+       "ZxR0AMB2SCntSdpb/m37kqiyuzYLXinp4uL1xZKuiAkHAABg3poMxfAeSX8s6UG2b7X9UklvlPQ0\n",
+       "2zdJekrxNwAAwM7b2CyYUnphzUdPDY4FAABg9hihHQAAIBDJFQAAQCCSKwAAgEAkVwAAAIFIrgAA\n",
+       "AAL1HqEdGMK6EeGnHq0eAIB1SK6QMUaEBwDMD82CAAAAgUiuAAAAApFcAQAABCK5AgAACERyBQAA\n",
+       "EIjkCgAAIBDJFQAAQCCSKwAAgEAkVwAAAIEYoX0k6x7nkqO5xVunaj2aPj6HR/AAALoguRrN6n06\n",
+       "93vz3OKt03c9eAQPAKAdmgUBAAACkVwBAAAEIrkCAAAIRHIFAAAQiOQKAAAgEMkVAABAIJIrAACA\n",
+       "QCRXAAAAgUiuAAAAApFcAQAABCK5AgAACERyBQAAEIjkCgAAIBDJFQAAQCCSKwAAgEAkVwAAAIFI\n",
+       "rgAAAAKRXAEAAAQ6ZuoA0IztNHUMEbZlPZqqW9+UkseOBQAwDpKr2ai6R8/x/ly3Hqvvz3Hd6mzz\n",
+       "ugEAVtEsCAAAEIjkCgAAIBDJFQAAQKBefa5s3yLp7yT9q6QvpJQeExEUAADAXPXt0J4k7U8pfTYi\n",
+       "GAAAgLmLaBbkp08AAACFvslVkvR+2x+2/fKIgAAAAOasb7PghSml223fR9LVtj+ZUromIjAAAIA5\n",
+       "6pVcpZRuL/7/jO3fkPQYSaXk6kBp6v19FgUc0neU9yFGTW8bU9MY1pXLKO95Y3R+IG+292ug5MQp\n",
+       "dbtP2T5R0tEppc/ZPknSVZJ+LKV0VfF5OnJk6p+U9O/UbpRupt3uafOJrTqx6T/tWDEgL9X7jv0G\n",
+       "5Mp2ijo/+9Rc7ZP0G7aX5bx7mVgBAADsqs7JVUrpZknnB8YCAAAwe4zQDgAAEIjkCgAAIBDJFQAA\n",
+       "QCCSKwAAgEAkVwAAAIFIrgAAAAL1ffwNgC3HKPEA0A7JFYAG6kafBwCsolkQAAAgEMkVAABAIJIr\n",
+       "AACAQCRXAAAAgUiuAAAAApFcAQAABCK5AgAACERyBQAAEIhBRIHCupHIc1QVb9MR0xl1HQCGQ3IF\n",
+       "HLKab+SeY/SNl1HXAWAINAsCAAAEIrkCAAAIRHIFAAAQiOQKAAAgEMkVAABAIJIrAACAQCRXAAAA\n",
+       "gUiuAAAAApFcAQAABGKEdqCDoR6VM0S5bcvsE0OXeXN83E7Ojweqi23quDCNnI/VXUZyBXQy1KNy\n",
+       "cih3qMfqzO1xOznHO7dHNWFYOR+ru4lmQQAAgEAkVwAAAIFIrgAAAAKRXAEAAAQiuQIAAAhEcgUA\n",
+       "ABCI5AoAACAQyRUAAEAgBhEF0FnEiPJVZVSNLD3UqPh1yxvKUCOsR2yfPjG0GSm87ajiY45KP9SI\n",
+       "50NuH9SbaluSXAHoIWKk8L6jxzcdET6XUeKHiiGH7dCm3LYxjDkq/Ry3D+qNvy1pFgQAAAhEcgUA\n",
+       "ABCI5AoAACBQ5+TK9kW2P2n7L23/UGRQAAAAc9UpubJ9tKS3SbpI0kMlvdD2QyIDw67ZmzoAzMbe\n",
+       "1AFgRmzvnzoG7J6uNVePkfSplNItKaUvSPoVSc+JCwu7Z2/qADAbe1MHgHnZP3UA2D1dk6uzJN1a\n",
+       "+vu24j0AAICd1nWcq4aD1T3lbw//++YTJB3fcZkAAADZc0rtB/W1/ThJB1JKFxV/v0HSl1JKbypN\n",
+       "M9hoygAAANGiRm3vmlwdI+kvJH29pL+WdK2kF6aUPhERFAAAwFx1ahZMKX3R9vdK+l1JR0u6lMQK\n",
+       "AACgY80VAAAAqg0yQjsDjKIN27fYvsH2dbavnToe5MP2ZbYP2r6x9N5ptq+2fZPtq2yfOmWMyEfN\n",
+       "8XLA9m3F9eU62xdNGSPyYPsc2x+w/THbH7X9muL9kOtLeHLFAKPoIEnan1K6IKX0mKmDQVbeqcW1\n",
+       "pOz1kq5OKT1Q0u8VfwNS9fGSJL2luL5ckFL6nQniQn6+IOl1KaWHSXqcpFcVuUrI9WWImisGGEUX\n",
+       "Ib/QwHZJKV0j6a6Vt58t6fLi9eWSnjtqUMhWzfEicX3BipTSHSml64vXn5f0CS3G6wy5vgyRXDHA\n",
+       "KNpKkt5v+8O2Xz51MMjevpTSweL1QUn7pgwGs/Bq2x+xfSnNyFhl+1xJF0j6kIKuL0MkV/SQR1sX\n",
+       "ppQukPRMLapmnzh1QJiHtPhFDtccrPN2SedJOl/S7ZLePG04yIntkyW9V9JrU0qfK3/W5/oyRHL1\n",
+       "aUnnlP4+R4vaK6BSSun24v/PSPoNLZqWgToHbZ8hSbbPlHTnxPEgYymlO1NB0jvE9QUF28dqkVi9\n",
+       "K6V0RfF2yPVliOTqw5IeYPtc28dJ+hZJVw6wHGwB2yfaPqV4fZKkp0u6cf1c2HFXSrq4eH2xpCvW\n",
+       "TIsdV9wgl54nri+QZNuSLpX08ZTSW0sfhVxfBhnnyvYzJb1Vdw8w+tPhC8FWsH2eFrVV0mJQ23dz\n",
+       "vGDJ9nskPUnS6Vr0f/hRSf+vpF+V9BWSbpH0gpTS30wVI/JRcbxcImm/Fk2CSdLNkl5R6lODHWX7\n",
+       "CZL+UNINurvp7w1aPHGm9/WFQUQBAAACDTKIKAAAwK4iuQIAAAhEcgUAABCI5AoAACAQyRUAAEAg\n",
+       "kisAAIBAJFcAAACBSK4AAAAC/f9A40wIgLpJlQAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7ffb01953f90>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "feat = net.blobs['fc7'].data[0]\n",
+    "plt.subplot(2, 1, 1)\n",
+    "plt.plot(feat.flat)\n",
+    "plt.subplot(2, 1, 2)\n",
+    "_ = plt.hist(feat.flat[feat.flat > 0], bins=100)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The final probability output, `prob`"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 38,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "[<matplotlib.lines.Line2D at 0x7ffb01f12a50>]"
+      ]
+     },
+     "execution_count": 38,
+     "metadata": {},
+     "output_type": "execute_result"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAmEAAAJPCAYAAAA0UwMNAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzt3X2Q7md93/fPV0fGMuLZphYWcnBAtoEB29iVabCdg00Y\n",
+       "hXEsppkxCD+kDkNoU9m0zXQI6YyR23/atJ0kDgmRXcVJXGJNkgKRW4jASc+YOg4gm4BjJCoFa6oH\n",
+       "TDDgBzyxfRR9+8d9L9xa7e6955zdvX6r6/WaObN7P+7vnN+59/fe6/rd11Z3BwCAk3XZ6A0AAJiR\n",
+       "CAMAGECEAQAMIMIAAAYQYQAAA4gwAIABtkZYVV1fVXdX1T1V9eY9br+hqj5aVR+pql+pqu/euO2+\n",
+       "qvrY+rYPHfXGAwCcVnXQOmFVdSbJJ5K8IsmDST6c5MbuvmvjPld29++vP39Rknd19/PWl38jybd2\n",
+       "9+eO768AAHD6bBsJuy7Jvd19X3efT3Jbkhs277ATYGtPSvJbu56jLnkrAQAeZ7ZF2NVJ7t+4/MD6\n",
+       "ukepqldX1V1J3pvkxzZu6iS/UFV3VtUbLnVjAQAeLy7fcvuhfqdRd787ybur6juT/GySb1jf9LLu\n",
+       "/lRVPTPJ+6vq7u7+wMVvLgDA48O2CHswyTUbl6/JajRsT939gaq6vKq+srs/292fWl//map6V1bT\n",
+       "m4+KsKryyysBgFOju4/kVKttEXZnkmur6jlJHkrymiQ3bt6hqp6b5JPd3VX1kvXGfbaqnpjkTHf/\n",
+       "XlVdmeSVSX5iry9yVH8ZTl5V3dzdN4/eDi6cfXe62X+nm/13eh3l4NGBEdbdD1fVTUnuSHImya3d\n",
+       "fVdVvXF9+y1J/mySH66q80m+kOS164dfleSdVbXzdd7R3e87qg0HADjNto2Epbvfm9UJ95vX3bLx\n",
+       "+V9N8lf3eNwnk3zzEWwjAMDjjhXzuVTnRm8AF+3c6A3gkpwbvQFcknOjN4DxDlys9UQ2oKqdEwYA\n",
+       "nAZH2S1GwgAABhBhAAADiDAAgAFEGADAACIMAGAAEQYAMIAIAwAYQIQBAAwgwgAABhBhAAADiDAA\n",
+       "gAFEGADAACIMAGAAEQYAMIAIAwAYQIQBAAwgwgAABhBhAAADiDAAgAFEGADAACIMAGAAEQYAMIAI\n",
+       "AwAYQIQBAAwgwgAABhBhAAADiDAAgAFEGADAACIMAGAAEQYAMIAIAwAYQIQBAAwgwgAABhBhAAAD\n",
+       "iDAAgAFEGADAACIMAGAAEQYAMIAIAwAYQIQBAAwgwgAABhBhAAADiDAAgAFEGADAACIMAGAAEQYA\n",
+       "MIAIAwAYQIQBAAwgwgAABhBhAAADiDAAgAFEGADAACIMAGAAEcZUqvLfVeUto7cDAKq7x25AVXd3\n",
+       "Dd0IplGVTtLdfgAB4MIdZbc4EDEj0Q/AcCIMAGAAEQYAMIAIAwAYQIQxm7HvRAGANRHGbEQYAIsg\n",
+       "wpiNCANgEUQYsxFhACyCCGM2IgyARRBhAAADiDBmYyQMgEUQYczmkdEbAACJCGM+RsIAWAQRBgAw\n",
+       "wNYIq6rrq+ruqrqnqt68x+03VNVHq+ojVfUrVfXdh30sDGAkDIBFqO79j0lVdSbJJ5K8IsmDST6c\n",
+       "5MbuvmvjPld29++vP39Rknd19/MO89j1Y7q762j/WrC3qnwhyZXd8X8OgAt2lN2ybSTsuiT3dvd9\n",
+       "3X0+yW1Jbti8w06ArT0pyW8d9rEwgJEwABZhW4RdneT+jcsPrK97lKp6dVXdleS9SX7sQh4LJ0yE\n",
+       "AbAI2yLsUAes7n53dz8/yZ9J8rNVZaqHpRJhACzC5VtufzDJNRuXr8lqRGtP3f2Bqro8yTPW9zvU\n",
+       "Y6vq5o2L57r73JbtgoslwgA4tKo6m+TssTz3lhPzL8/q5PrvSfJQkg/lsSfmPzfJJ7u7q+olSf5x\n",
+       "dz/3MI9dP96J+ZyYqvx2kqc6MR+Ai3GU3XLgSFh3P1xVNyW5I8mZJLd2911V9cb17bck+bNJfriq\n",
+       "zif5QpLXHvTYo9houARWzAdgEQ4cCTuRDTASxgmqymeTPMNIGAAX4ySXqAAA4BiIMGbjxHwAFkGE\n",
+       "MRsRBsAiiDBmI8IAWAQRxmxEGACLIMKYjQgDYBFEGLMRYQAsgggDABhAhDEbK+YDsAgijNmYjgRg\n",
+       "EUQYsxFhACyCCAMAGECEMRsjYQAsgghjNiIMgEUQYcxGhAGwCCKM2YgwABZBhDEbEQbAIogwAIAB\n",
+       "RBizsWI+AIsgwpiN6UgAFkGEMRsRBsAiiDAAgAFEGLMxEgbAIogwZiPCAFgEEcZsRBgAiyDCmI0I\n",
+       "A2ARRBizEWEALIIIYzYiDIBFEGHMRoQBsAgijNmIMAAWQYQxGxEGwCKIMACAAUQYs3lk9AYAQCLC\n",
+       "mI/pSAAWQYQxGxEGwCKIMACAAUQYszESBsAiiDBmI8IAWAQRxmxEGACLIMKYjQgDYBFEGLMRYQAs\n",
+       "gggDABhAhDEbK+YDsAgijNmYjgRgEUQYsxFhACyCCAMAGECEMRsjYQAsgghjNiIMgEUQYcxGhAGw\n",
+       "CCKM2YgwABZBhDEbEQbAIogwZiPCAFgEEcZsRBgAiyDCmI0IA2ARRBizEWEALIIIAwAYQIQxGyNh\n",
+       "ACyCCGM2j4zeAABIRBjzMRIGwCKIMACAAUQYszESBsAiiDBmI8IAWAQRxmxEGACLIMKYjQgDYBFE\n",
+       "GLMRYQAsgghjNiIMgEUQYQAAA4gwZmPFfAAWQYQxG9ORACyCCAMAGECEMRsjYQAswtYIq6rrq+ru\n",
+       "qrqnqt68x+0/UFUfraqPVdUvVdWLN267b339R6rqQ0e98XARRBgAi3D5QTdW1Zkkb0vyiiQPJvlw\n",
+       "Vd3e3Xdt3O2TSb6ru3+nqq5P8lNJXrq+rZOc7e7PHf2mw0URYQAswraRsOuS3Nvd93X3+SS3Jblh\n",
+       "8w7d/cvd/Tvrix9M8uxdz1FHsqVwNEQYAIuwLcKuTnL/xuUH1tft5/VJ3rNxuZP8QlXdWVVvuLhN\n",
+       "hCMlwgBYhAOnI3MBB6yqenmSP5/kZRtXv6y7P1VVz0zy/qq6u7s/sMdjb964eK67zx3268IFEmEA\n",
+       "HFpVnU1y9jiee1uEPZjkmo3L12Q1GvYo65PxfzrJ9d39+Z3ru/tT64+fqap3ZTW9+ZgI6+6bL3jL\n",
+       "4eKIMAAObT0wdG7nclW99aiee9t05J1Jrq2q51TVE5K8Jsntm3eoqq9N8s4kP9jd925c/8SqevL6\n",
+       "8yuTvDLJrx3VhsNFEmEALMKBI2Hd/XBV3ZTkjiRnktza3XdV1RvXt9+S5MeTPD3J26sqSc5393VJ\n",
+       "rkryzvV1lyd5R3e/79j+JnA4IgyARajuscekquru9g5KTkRV3pHkdd3etQvAhTvKbrFiPrMxEgbA\n",
+       "IogwZvPI6A0AgESEMR8jYQAsgghjSlXOCQNgLBHGrEQYAEOJMGZTuz4CwBAijFmJMACGEmHMyv99\n",
+       "AIZyIGJWRsIAGEqEMRvnhAGwCCKMWYkwAIYSYcxKhAEwlAhjNjvx5f8+AEM5EDErI2EADCXCmJUI\n",
+       "A2AoEcZsxBcAiyDCmJUYA2AoEQYAMIAIAwAYQIQxGyvmA7AIIgwAYAARxqyMhAEwlAhjNuILgEUQ\n",
+       "YQAAA4gwZmVEDIChRBgAwAAijNlYogKARRBhAAADiDAAgAFEGLMxHQnAIogwAIABRBizMhIGwFAi\n",
+       "DABgABHGbIyAAbAIIoxZiTEAhhJhAAADiDBmY4kKABZBhAEADCDCAAAGEGHMynQkAEOJMGYjvgBY\n",
+       "BBHGrMQYAEOJMACAAUQYszECBsAiiDBmJcYAGEqEAQAMIMKYjRXzAVgEEQYAMIAIAwAYQIQxK9OR\n",
+       "AAwlwpiN+AJgEUQYsxJjAAwlwgAABhBhzMYIGACLIMKYlRgDYCgRBgAwgAhjVkbCABhKhDEb8QXA\n",
+       "IogwAIABRBizMiIGwFAijNmILwAWQYQxKzEGwFAiDABgABEGADCACGM2tesjAAwhwgAABhBhzMpI\n",
+       "GABDiTBmI74AWAQRBgAwgAhjVkbEABhqa4RV1fVVdXdV3VNVb97j9h+oqo9W1ceq6peq6sWHfSwM\n",
+       "IL4AWIQDI6yqziR5W5Lrk7wgyY1V9fxdd/tkku/q7hcn+R+S/NQFPBZGEWMADLVtJOy6JPd2933d\n",
+       "fT7JbUlu2LxDd/9yd//O+uIHkzz7sI8FAJjVtgi7Osn9G5cfWF+3n9cnec9FPhYAYBqXb7m9D/tE\n",
+       "VfXyJH8+ycsu9LFwgqyYD8AibIuwB5Ncs3H5mqxGtB5lfTL+Tye5vrs/fyGPXT/+5o2L57r73Jbt\n",
+       "AgA4dlV1NsnZY3nu7v0HrKrq8iSfSPI9SR5K8qEkN3b3XRv3+dok/yLJD3b3v7qQx67v191tVIIT\n",
+       "UZX3JPnTSb6xO58YvT0AnC5H2S0HjoR198NVdVOSO5KcSXJrd99VVW9c335Lkh9P8vQkb6+qJDnf\n",
+       "3dft99ij2Gi4BIIfgEU4cCTsRDbASBgnqCrvzWrZFCNhAFywo+wWK+YzK+EPwFAiDABgABHGbCxR\n",
+       "AcAiiDAAgAFEGADAACKM2ZiOBGARRBgAwAAijFkZCQNgKBEGADCACGM2RsAAWAQRxqzEGABDiTAA\n",
+       "gAFEGLOxRAUAiyDCAAAGEGEAAAOIMGZlOhKAoUQYsxFfACyCCGNWYgyAoUQYAMAAIozZGAEDYBFE\n",
+       "GLMSYwAMJcIAAAYQYczGivkALIIIAwAYQIQBAAwgwpiV6UgAhhJhzEZ8AbAIIoxZiTEAhhJhAAAD\n",
+       "iDBmYwQMgEUQYcxKjAEwlAgDABhAhDErI2EADCXCmI34AmARRBgAwAAijFkZEQNgKBHGbMQXAIsg\n",
+       "wpiVGANgKBEGADCACAMAGECEMZva9REAhhBhAAADiDBmZSQMgKFEGLMRXwAsgggDABhAhDErI2IA\n",
+       "DCXCmI34AmARRBizEmMADCXCAAAGEGEAAAOIMGZjxXwAFkGEMaMevQEAIMKYlZEwAIYSYcymYiQM\n",
+       "gAUQYcyoYyQMgMFEGADAACKMGZmOBGA4EcZsds4JMx0JwFAijBkZCQNgOBHGrIyEATCUCGM2lqgA\n",
+       "YBFEGDMSYQAMJ8KYlelIAIYSYczISBgAw4kwZmOJCgAWQYQxIyNhAAwnwgAABhBhzMZ0JACLIMKY\n",
+       "kelIAIYTYczKSBgAQ4kwZmQkDIDhtkZYVV1fVXdX1T1V9eY9bv/GqvrlqvqDqvpLu267r6o+VlUf\n",
+       "qaoPHeWGw0UyAgbAIlx+0I1VdSbJ25K8IsmDST5cVbd3910bd/tskh9N8uo9nqKTnO3uzx3R9sJR\n",
+       "cGI+AMNtGwm7Lsm93X1fd59PcluSGzbv0N2f6e47k5zf5zkc7Fga05EADLctwq5Ocv/G5QfW1x1W\n",
+       "J/mFqrqzqt5woRsHx8ASFQAswoHTkbn0EYOXdfenquqZSd5fVXd39wcu8TnhUhkJA2C4bRH2YJJr\n",
+       "Ni5fk9Vo2KF096fWHz9TVe/KanrzMRFWVTdvXDzX3ecO+zUAAI5LVZ1NcvY4nntbhN2Z5Nqqek6S\n",
+       "h5K8JsmN+9z3UdM7VfXEJGe6+/eq6sokr0zyE3s9sLtvPvwmwyUxHQnAoa0Hhs7tXK6qtx7Vcx8Y\n",
+       "Yd39cFXdlOSOJGeS3Nrdd1XVG9e331JVVyX5cJKnJHmkqt6U5AVJ/qMk76yqna/zju5+31FtOFwC\n",
+       "05EADFfdY49HVdXdbVSCE1GVX81qWv2HuvPPRm8PAKfLUXaLFfOZkZEwAIYTYczGqCsAiyDCmJET\n",
+       "8wEYToQxI9ORAAwnwpiNJSoAWAQRxoyMhAEwnAgDABhAhDEr05EADCXCmM3OOWEAMJQIY0ZOzAdg\n",
+       "OBHGjIyEATCcCGM2RsAAWAQRxoxMRwIwnAhjRqYjARhOhDErI2EADCXCmI0lKgBYBBHGjEQYAMOJ\n",
+       "MGZlOhKAoUQYszEdCcAiiDBmZIkKAIYTYczISBgAw4kwZmMEDIBFEGHMyHQkAMOJMGZkOhKA4UQY\n",
+       "szISBsBQIozZWKICgEUQYcxIhAEwnAhjVqYjARhKhDEb05EALIIIY0aWqABgOBHGjIyEATCcCAMA\n",
+       "GECEMZudc8JMRwIwlAhjRqYjARhOhDErI2EADCXCmI0lKgBYBBHGjDrJZVV52ugNAWBeIoxZfWeS\n",
+       "/330RgAwLxHGjDrJVyS5YvSGADAvEcZsLFEBwCKIMGa0E2FCDIBhRBizEmEADCXCmM3mdKQIA2AY\n",
+       "EcaMnBMGwHAijFkZCQNgKBHGbHamIy+LCANgIBHGrIyEATCUCGNGzgkDYDgRxoy8OxKA4UQYs7FE\n",
+       "BQCLIMKY0c6J+QAwjAMRMzMSBsAwIozZmI4EYBFEGDOyThgAw4kwZiXAABhKhDEj05EADCfCmE1t\n",
+       "fBRhAAwjwpiRFfMBGE6EMSMn5gMwnAhjNpaoAGARRBgzEmEADCfCmJUAA2AoEcasjIQBMJQIYzbO\n",
+       "CQNgEUQYMxJhAAwnwpjRzhIVADCMAxGzqX0+B4ATJcKYkelIAIYTYczIdCQAwzkQMSsjYQAMJcKY\n",
+       "jSUqAFiErRFWVddX1d1VdU9VvXmP27+xqn65qv6gqv7ShTwWBhFhAAx3YIRV1Zkkb0tyfZIXJLmx\n",
+       "qp6/626fTfKjSf6Xi3gsjCLAABhq20jYdUnu7e77uvt8ktuS3LB5h+7+THffmeT8hT4WTkpVvqEq\n",
+       "X54vTUdeFiEGwEDbIuzqJPdvXH5gfd1hXMpj4ai9Lcl3rD83HQnAcJdvub0v4bkP/diqunnj4rnu\n",
+       "PncJXxf2cmb9JxFhABxSVZ1NcvY4nntbhD2Y5JqNy9dkNaJ1GId+bHfffMjnhItVefQUpAADYKv1\n",
+       "wNC5nctV9dajeu5t05F3Jrm2qp5TVU9I8pokt+9z390HtQt5LBy3nQhLjIQBsAAHjoR198NVdVOS\n",
+       "O7Kayrm1u++qqjeub7+lqq5K8uEkT0nySFW9KckLuvsLez32OP8ycIDN6BJhAAxX3Zdy2tcRbEBV\n",
+       "d7eDIceqKr+Y5H9O8neSfCTJVUme3J1vGLphAJwqR9ktVsxnFpvnhFmiAoDhRBgz2TwnLBFhAAwk\n",
+       "wpjF7nPAnBMGwFAijFnsNR0JAMM4EDELS1QAsCgijFmIMAAWRYQxi80I27kMAMOIMGZRG3+MhAEw\n",
+       "nAhjFrunI60TBsBQIoxZ7DUdKcIAGEaEMZPNJSoEGABDiTBm4Rd4A7AoIoxZmI4EYFFEGLOwThgA\n",
+       "iyLCmMXuX1skwAAYSoQxC0tUALAoIoxZ7J5+NB0JwFAijFmYjgRgUUQYs3BiPgCLIsKYhSUqAFgU\n",
+       "EcZMNqcjnZgPwFAijFnsXjEfAIYSYczCdCQAiyLCmIV1wgBYFBHGLPZaokKEATCMCGMWu88JE2AA\n",
+       "DCXCmIVzwgBYFBHGLExHArAoIoxZ7LViPgAMI8KYielIABbj8tEbACdk94n5fgABYCgRxiw2zwlL\n",
+       "jIIBMJjRAGbhnDAAFsVIGLMQYQAsighjFrXxR4QBMJzpSGbhd0cCsCgijFnsXjEfAIZyUGIWfm0R\n",
+       "AIsiwpjJ5q8tMh0JwFAijFnsXqxVgAEwlAhjFnstUSHEABhGhDGLvVbMv6Iqf2vcJgEwMxHGLPZb\n",
+       "rPVPjdkcAGYnwpjFfueEmZIEYAgRxiz2WqJi8yMAnCgRxiw2zwnrjeu9BgAYwgGIWew+J2zzegA4\n",
+       "cSKMmYgwABZDhDGLyt5rg4kwAIYQYcxiv+lIrwEAhnAAYhbOCQNgUUQYs9jv3ZEiDIAhRBiz2L1Y\n",
+       "6w6vAQCGcABiFrsXa928HgBOnAhjFvtFmNcAAEM4ADELK+YDsCgOQMzEEhUALIYDELNwYj4Ai+IA\n",
+       "xCw2pyN3Xw8AJ06EMYv9Fms9M2BbAECEMQ2/tgiARXEAYhaWqABgURyAmEVt/PFriwAYToQxC+eE\n",
+       "AbAoIoxZmI4EYFEcgJjJXivmm44EYAgRxiz2W6wVAIYQYcxiv3PCAGAIEcYs9jsnDACGcFBiJnud\n",
+       "EwYAQ2yNsKq6vqrurqp7qurN+9znJ9e3f7SqvmXj+vuq6mNV9ZGq+tBRbjgcVtUXzwUzHQnAYlx+\n",
+       "0I1VdSbJ25K8IsmDST5cVbd3910b93lVkud197VV9e1J3p7kpeubO8nZ7v7csWw9HE7t+ggAw20b\n",
+       "Cbsuyb3dfV93n09yW5Ibdt3n+5L8/STp7g8meVpVffXG7Q58jLY5EmY6EoBF2BZhVye5f+PyA+vr\n",
+       "DnufTvILVXVnVb3hUjYULsGB05Eb05UAcGIOnI7M4UcM9juIfUd3P1RVz0zy/qq6u7s/cPjNgyO1\n",
+       "3w8dRscAOHHbIuzBJNdsXL4mq5Gug+7z7PV16e6H1h8/U1Xvymp68zERVlU3b1w8193nDrHtcFib\n",
+       "54TtFVyXJXnkRLcIgFOhqs4mOXscz70twu5Mcm1VPSfJQ0lek+TGXfe5PclNSW6rqpcm+e3u/nRV\n",
+       "PTHJme7+vaq6Mskrk/zEXl+ku2++6L8BbLft3ZGmIwHY03pg6NzO5ap661E994ER1t0PV9VNSe5I\n",
+       "cibJrd19V1W9cX37Ld39nqp6VVXdm+T3k/zI+uFXJXlnVe18nXd09/uOasPhAogwABanuseeClNV\n",
+       "3d0OghybqlyR5N8nuSfJH89qRPa/37jLFd35wxHbBsDpcpTdYsV8ZrBtiQqvAwBOnIMPM9i9WKvp\n",
+       "SACGE2HMYPc5YfvdDgAnRoQxg23TkSIMgBMnwpjJfu+O9DoA4MQ5+DCDbb/A20gYACdOhDED64QB\n",
+       "sDgijBk4JwyAxRFhzGDbSJjXAQAnzsGHGViiAoDFEWHMwGKtACyOCGMGTswHYHFEGDOoJI9k///v\n",
+       "XgcAnDgHH2bxH2IkDIAFEWHMoPLoCNvrdgA4USKMGWybjhRhAJw4EcYMdo+EWScMgOEcfJjBToSd\n",
+       "OeB2ADhRIowZbI6EdZyYD8ACiDBmsHNO2H6x5XUAwIlz8GEGOxF20O0AcKJEGDPYjLCK6UgAFkCE\n",
+       "MYO9wmv37QBwokQYs+h9Pk+8DgAYwMGHGeyMhO03GmYkDIATJ8KYwU6E7XdyvggD4MSJMGawO8Kc\n",
+       "mA/AcCKMGWybjvQ6AODEOfgwg90RZiQMgOFEGDNwThgAiyPCmMFOZIkwABZDhDGDbdORXgcAnDgH\n",
+       "H2ZhnTAAFkWEMQNLVACwOCKMGVgxH4DFEWHMYNu7I70OADhxDj7MwHQkAIsjwpiB6UgAFkeEMQMr\n",
+       "5gOwOCKMGTgnDIDFcfBhBlbMB2BxRBizMB0JwKKIMGaw7cR8rwMATpyDDzOwRAUAiyPCmMG2E/NF\n",
+       "GAAnToQxA+uEAbA4IowZbFsnzOsAgBPn4MMMTEcCsDgijBlYMR+AxRFhzMBirQAsjghjFtYJA2BR\n",
+       "HHyYgelIABZHhDEDJ+YDsDgijBnsjrA/2uN2ADhRIowZ7I6wP9x1u9cBACfOwYcZ7D4n7A/2uB0A\n",
+       "TpQIYwa7I2z3SJgIA+DEiTBmsHs60kgYwBZVeVVVvnf0djyeiTBmsG0kzOsA4LFeluQ7Rm/E49nl\n",
+       "ozcATpDpSIDDuyLJmdEb8XgmwpiB6UiAC/flEWHHSoQxg21LVIgwgMcyEnbMRBgzcE4YwIUTYcdM\n",
+       "hDED05EAF06EHTMRxgysEwZw4UTYMTMNw6lWlddU5Wnb7paDV8z3OgB4rCvWfzgmDj6cdm9J8i1b\n",
+       "7rMTYV+xvvzwHrcD8Ggi7JiJME67p67/HGQnwp6UJN1fHBHbvJ2BqvJVo7cBeIxLirCq/M2qrd+f\n",
+       "pybCOO2esv6zp6r8iSSvyyrCnrzf3Y5hu7gw9/hmDYtzqSNhr0vynKPZlMcnEcapVZXK9pGw70ry\n",
+       "Z9af7xdhx/I6qMpzq/J7x/HcjydVeWKSpyV55uhtgR1VubIqf2r0dgx20RFWlcuSPD3JM450ix5n\n",
+       "RBin2Vdk9c6dfUfCsjqwf2U2piP3cFwjYV+X5ElV+ddVecsxfY3Hg6/a9RGW4GVJ/trojThOVfmZ\n",
+       "LacCXMpI2FOz+t4qwg6wNcKq6vqquruq7qmqN+9zn59c3/7RqvqWC3ksXIKn7vq4l51vMJ1VsP3R\n",
+       "Hvc5rgjb+ebzTUledExf4/HgK3d9hCW4KsnXjN6I47KeSfj+JN9wwN2+fP3nYjx910f2cGCEVdWZ\n",
+       "JG9Lcn2SFyS5saqev+s+r0ryvO6+NslfSPL2wz6Wo1d10S+Yi/x6dfYkv94uOyNgT6nK66ryp/e4\n",
+       "z2aEJfni9OCbNu5zXBH21euPfy/Jq6ry+sM+sCo/UZWXH8tWffFrDN13m7ZGWFVeXiVkNy1o/z1e\n",
+       "PSvJ06uO592BC9h/T0vyxCTPPuA+lzIS9oxdH4eqyrOq8vdHb8du20bCrktyb3ff193nk9yW5IZd\n",
+       "9/m+ZPUX6+4PJnlaVV11yMdyhKryzCQPHlWIVeXphzhZ+uxRfK2LtDkS9tok/+ke99k5z2gnwr6Q\n",
+       "JN35yY37HNe0/FcneWtWUxpPTQ4fYUleneSVx7FRG84e8/Mf1mFGwv6bJD98AttySaryZUf8fAe9\n",
+       "/s4e5dd6vKjKk9fTbJf6w9VV64/PutRt2sfZY3rew3r2ro97uSLJFRf5b7moCEvy0iQ/cFxRfbG2\n",
+       "HXyuTnL/xuUH1tcd5j5fc4jHcrReltWB7FuP6Pn+VpK/uXOhKk87ypG2qlxWlR+pyhMu8imesvHx\n",
+       "m5J8867nvzrJC9cXr1x/3OtE+eMcCft0kgfXl1+8Pln1QOsD+fOTvOSYtuvIVeWvVOV1F/nwx0RY\n",
+       "1WNW6X5JjunfoyovqjrwHbZPrsrXH+J5bkzyi0e4Xd+b5ONVp/M3m1SljjpKD+nlSf6zZO99VpWv\n",
+       "rMr3HOJ5duLr8TolefWuj4+y/n93WZL/kGzfj+s3Iv3jquzMeO1MQ15yhK2f+1JHwl+c1SkpL9x2\n",
+       "x5O07cW9ez2l/VzSQawqP38pjz8Fnp7kjyX56BE/7+7987ysVoP/qar8VlZDzU9P8old99trf+11\n",
+       "3XckeaQq/9f6a31bks9X5d717WeSN72o6qKj7ylZheOPVeVTWb3YL8RVWUXOS7P6ie2ZVblj/Txn\n",
+       "sgqzJ67vuxNh9+zxPDdVffEdlEfp25L8n0k+t778hCTvrdrzvLRNVyT5fJL/eOPf/hj8F19flW9b\n",
+       "X6isvuFBEI0uAAAF1UlEQVTufDzM55vXPS/JH61D5DCuyGr/fybJc5N8NqufUr85q3NQ/mRVfmn9\n",
+       "+eez+n/80n2+Vxz0/eeyrP6ffGa9jf82j/21VS9P8sDG/+sdO//u1yZ5VlV+MasD1sNJ/t0eX+ub\n",
+       "s/o/+M/XX+ORXNq+e2FWI7nvr1qN4D7af/71u157+/07PDGrkdg/zJf+L56E5yW5qir/T1bHmidl\n",
+       "9WaaTx3isU/PatT6/CG/1h/L6jV+PquRnT9I8g+r8tA+2/XHq/L+HLx/XprVQMKtG/83dv6NL8vq\n",
+       "h91fyZcWf64kX7t+zCN5tCett2vje/GjXn87XpTkN7L3D4tH7dlJfjfJ91fl2j1uP5Pk32f19/un\n",
+       "VXk4q9fs+fX1u70wq3+Xf1qVT2T1b/FbSb53/X1sLwf9+2/e9pIkX1aVDx1w/22+KavX7U+vjzd1\n",
+       "kX+OVHXv/29QVS9NcnN3X7++/JYkj3T3/7Rxn7+T5Fx337a+fHeSP5nVO8MOfOz6+mM6wAAAHL3u\n",
+       "PpIg2zYSdmeSa6vqOUkeSvKa5DE/6d6e5KYkt62j7be7+9NV9dlDPPbI/iIAAKfJgRHW3Q9X1U1J\n",
+       "7shqaPLW7r6rqt64vv2W7n5PVb2qqu5N8vtJfuSgxx7nXwYA4LQ4cDoSAIDjMXTFfIu5LltVXVNV\n",
+       "/3dV/XpV/Zuq+rH19c+oqvdX1f9bVe+rqqdtPOYt6/15d1Ud9xILbFFVZ6rqI1X18+vL9t0pUVVP\n",
+       "q6p/UlV3VdXHq+rb7b/TY70/fr2qfq2q/mFVfbn9t0xV9Xer6tNV9Wsb113wvqqqb13v73uq6m8c\n",
+       "5msPizCLuZ4K55P81939wqzeKfRfrvfRX07y/u7++iT/fH05VfWCrM79e0FW+/VvV5VfjTXWm5J8\n",
+       "PF96p5F9d3r8jSTv6e7nZ/X2+rtj/50K63Oh35DkJd39oqxOyXlt7L+l+pms/t03Xci+2jm3/e1J\n",
+       "Xr9evP7aqtr9nI8xcidbzHXhuvs3u/tfrz//QpK7snqL/hcX6F1/fPX68xuS/Fx3n+/u+5Lcm9V+\n",
+       "ZoCqenaSVyX53/Klt1bbd6dAVT01yXd2999NVufYdvfvxP47LX43qx9in1hVl2e1TMhDsf8Wqbs/\n",
+       "kNVSOJsuZF99e1U9K8mTu3tnGY1/sPGYfY2MsMMsBMtCrH+y+5YkH0zy1d396fVNn86Xfj3P12S1\n",
+       "H3fYp2P9tST/bR69ZpF9dzp8XZLPVNXPVNWvVtVPV9WVsf9Ohe7+XJL/Ncn/l1V8/XZ3vz/232ly\n",
+       "oftq9/UP5hD7cGSEeUfAKVFVT0ryfyR5U3c/ahHBXr2z47AL7nFCqup7k/y77v5I9llg0L5btMuz\n",
+       "WqDyb3f3S7J65/lf3ryD/bdcVfXcJP9VkudkdXB+UlX94OZ97L/T4xD76qKNjLAHk1yzcfmaPLoi\n",
+       "WYCq+rKsAuxnu/vd66s/vf79oFkPwe6sHr57nz47X/qVPZysP5Hk+6rqN5L8XJLvrqqfjX13WjyQ\n",
+       "5IHu/vD68j/JKsp+0/47Fb4tyb/s7s9298NJ3pnkP4n9d5pcyPfKB9bXP3vX9Vv34cgI++JCsFX1\n",
+       "hKxOdLt94Pawy/pkw1uTfLy7//rGTbcn+XPrz/9ckndvXP/aqnpCVX1dVr/u5VJ+zQQXqbv/Sndf\n",
+       "091fl9UJwf+iu38o9t2p0N2/meT+qtr5/YevSPLrSX4+9t9pcHeSl1bVV6y/j74iqzfI2H+nxwV9\n",
+       "r1y/Zn93/S7mSvJDG4/Z17BfDGsx11PhZUl+MMnHquoj6+vekuR/TPKPqur1Se5L8v1J0t0fr6p/\n",
+       "lNU3m4eT/MW2EN1S7OwH++70+NEk71j/kPpvs1oI+0zsv8Xr7o9W1T/IarDhkSS/muSnkjw59t/i\n",
+       "VNXPZfXrFr+qqu5P8uO5uO+VfzHJ38vqd6S+p7v/2davbT8DAJw865AAAAwgwgAABhBhAAADiDAA\n",
+       "gAFEGADAACIMAGAAEQYAMIAIAwAY4P8Hf+iH2xY5ngUAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7ffb019b9dd0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "feat = net.blobs['prob'].data[0]\n",
+    "plt.plot(feat.flat)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's see the top 5 predicted labels."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 39,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "['n02123045 tabby, tabby cat' 'n02123159 tiger cat'\n",
+      " 'n02124075 Egyptian cat' 'n02119022 red fox, Vulpes vulpes'\n",
+      " 'n02127052 lynx, catamount']\n"
+     ]
+    }
+   ],
+   "source": [
+    "# load labels\n",
+    "imagenet_labels_filename = caffe_root + 'data/ilsvrc12/synset_words.txt'\n",
+    "try:\n",
+    "    labels = np.loadtxt(imagenet_labels_filename, str, delimiter='\\t')\n",
+    "except:\n",
+    "    !../data/ilsvrc12/get_ilsvrc_aux.sh\n",
+    "    labels = np.loadtxt(imagenet_labels_filename, str, delimiter='\\t')\n",
+    "\n",
+    "# sort top k predictions from softmax output\n",
+    "top_k = net.blobs['prob'].data[0].flatten().argsort()[-1:-6:-1]\n",
+    "print labels[top_k]"
+   ]
+  }
+ ],
+ "metadata": {
+  "description": "Instant recognition with a pre-trained model and a tour of the net interface for visualizing features and parameters layer-by-layer.",
+  "example_name": "Image Classification and Filter Visualization",
+  "include_in_docs": true,
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "python2"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.9"
+  },
+  "priority": 1
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/examples/01-learning-lenet.ipynb b/examples/01-learning-lenet.ipynb
new file mode 100644
index 0000000..3562c7a
--- /dev/null
+++ b/examples/01-learning-lenet.ipynb
@@ -0,0 +1,5196 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Python solving with LeNet\n",
+    "\n",
+    "In this example, we'll explore learning with Caffe in Python, using the fully-exposed `Solver` interface."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "import os\n",
+    "os.chdir('..')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "import sys\n",
+    "sys.path.insert(0, './python')\n",
+    "import caffe\n",
+    "\n",
+    "from pylab import *\n",
+    "%matplotlib inline"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "We'll be running the provided LeNet example (make sure you've downloaded the data and created the databases, as below)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Downloading...\n",
+      "--2015-06-30 14:41:56--  http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz\n",
+      "Resolving yann.lecun.com... 128.122.47.89\n",
+      "Connecting to yann.lecun.com|128.122.47.89|:80... connected.\n",
+      "HTTP request sent, awaiting response... 200 OK\n",
+      "Length: 9912422 (9.5M) [application/x-gzip]\n",
+      "Saving to: 'train-images-idx3-ubyte.gz'\n",
+      "\n",
+      "train-images-idx3-u 100%[=====================>]   9.45M   146KB/s   in 57s    \n",
+      "\n",
+      "2015-06-30 14:42:53 (171 KB/s) - 'train-images-idx3-ubyte.gz' saved [9912422/9912422]\n",
+      "\n",
+      "--2015-06-30 14:42:53--  http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz\n",
+      "Resolving yann.lecun.com... 128.122.47.89\n",
+      "Connecting to yann.lecun.com|128.122.47.89|:80... connected.\n",
+      "HTTP request sent, awaiting response... 200 OK\n",
+      "Length: 28881 (28K) [application/x-gzip]\n",
+      "Saving to: 'train-labels-idx1-ubyte.gz'\n",
+      "\n",
+      "train-labels-idx1-u 100%[=====================>]  28.20K   107KB/s   in 0.3s   \n",
+      "\n",
+      "2015-06-30 14:42:53 (107 KB/s) - 'train-labels-idx1-ubyte.gz' saved [28881/28881]\n",
+      "\n",
+      "--2015-06-30 14:42:53--  http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz\n",
+      "Resolving yann.lecun.com... 128.122.47.89\n",
+      "Connecting to yann.lecun.com|128.122.47.89|:80... connected.\n",
+      "HTTP request sent, awaiting response... 200 OK\n",
+      "Length: 1648877 (1.6M) [application/x-gzip]\n",
+      "Saving to: 't10k-images-idx3-ubyte.gz'\n",
+      "\n",
+      "t10k-images-idx3-ub 100%[=====================>]   1.57M   205KB/s   in 8.2s   \n",
+      "\n",
+      "2015-06-30 14:43:02 (197 KB/s) - 't10k-images-idx3-ubyte.gz' saved [1648877/1648877]\n",
+      "\n",
+      "--2015-06-30 14:43:02--  http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz\n",
+      "Resolving yann.lecun.com... 128.122.47.89\n",
+      "Connecting to yann.lecun.com|128.122.47.89|:80... connected.\n",
+      "HTTP request sent, awaiting response... 200 OK\n",
+      "Length: 4542 (4.4K) [application/x-gzip]\n",
+      "Saving to: 't10k-labels-idx1-ubyte.gz'\n",
+      "\n",
+      "t10k-labels-idx1-ub 100%[=====================>]   4.44K  26.9KB/s   in 0.2s   \n",
+      "\n",
+      "2015-06-30 14:43:02 (26.9 KB/s) - 't10k-labels-idx1-ubyte.gz' saved [4542/4542]\n",
+      "\n",
+      "Unzipping...\n",
+      "Done.\n",
+      "Creating lmdb...\n",
+      "Done.\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Download and prepare data\n",
+    "!data/mnist/get_mnist.sh\n",
+    "!examples/mnist/create_mnist.sh"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "We need two external files to help out:\n",
+    "* the net prototxt, defining the architecture and pointing to the train/test data\n",
+    "* the solver prototxt, defining the learning parameters\n",
+    "\n",
+    "We start with the net. We'll write the net in a succinct and natural way as Python code that serializes to Caffe's protobuf model format.\n",
+    "\n",
+    "This network expects to read from pregenerated LMDBs, but reading directly from `ndarray`s is also possible using `MemoryDataLayer`."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "from caffe import layers as L\n",
+    "from caffe import params as P\n",
+    "\n",
+    "def lenet(lmdb, batch_size):\n",
+    "    # our version of LeNet: a series of linear and simple nonlinear transformations\n",
+    "    n = caffe.NetSpec()\n",
+    "    n.data, n.label = L.Data(batch_size=batch_size, backend=P.Data.LMDB, source=lmdb,\n",
+    "                             transform_param=dict(scale=1./255), ntop=2)\n",
+    "    n.conv1 = L.Convolution(n.data, kernel_size=5, num_output=20, weight_filler=dict(type='xavier'))\n",
+    "    n.pool1 = L.Pooling(n.conv1, kernel_size=2, stride=2, pool=P.Pooling.MAX)\n",
+    "    n.conv2 = L.Convolution(n.pool1, kernel_size=5, num_output=50, weight_filler=dict(type='xavier'))\n",
+    "    n.pool2 = L.Pooling(n.conv2, kernel_size=2, stride=2, pool=P.Pooling.MAX)\n",
+    "    n.ip1 = L.InnerProduct(n.pool2, num_output=500, weight_filler=dict(type='xavier'))\n",
+    "    n.relu1 = L.ReLU(n.ip1, in_place=True)\n",
+    "    n.ip2 = L.InnerProduct(n.relu1, num_output=10, weight_filler=dict(type='xavier'))\n",
+    "    n.loss = L.SoftmaxWithLoss(n.ip2, n.label)\n",
+    "    return n.to_proto()\n",
+    "    \n",
+    "with open('examples/mnist/lenet_auto_train.prototxt', 'w') as f:\n",
+    "    f.write(str(lenet('examples/mnist/mnist_train_lmdb', 64)))\n",
+    "    \n",
+    "with open('examples/mnist/lenet_auto_test.prototxt', 'w') as f:\n",
+    "    f.write(str(lenet('examples/mnist/mnist_test_lmdb', 100)))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The net has been written to disk in more verbose but human-readable serialization format using Google's protobuf library. You can read, write, and modify this description directly. Let's take a look at the train net."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "layer {\r\n",
+      "  name: \"data\"\r\n",
+      "  type: \"Data\"\r\n",
+      "  top: \"data\"\r\n",
+      "  top: \"label\"\r\n",
+      "  transform_param {\r\n",
+      "    scale: 0.00392156862745\r\n",
+      "  }\r\n",
+      "  data_param {\r\n",
+      "    source: \"examples/mnist/mnist_train_lmdb\"\r\n",
+      "    batch_size: 64\r\n",
+      "    backend: LMDB\r\n",
+      "  }\r\n",
+      "}\r\n",
+      "layer {\r\n",
+      "  name: \"conv1\"\r\n",
+      "  type: \"Convolution\"\r\n",
+      "  bottom: \"data\"\r\n",
+      "  top: \"conv1\"\r\n",
+      "  convolution_param {\r\n",
+      "    num_output: 20\r\n",
+      "    kernel_size: 5\r\n",
+      "    weight_filler {\r\n",
+      "      type: \"xavier\"\r\n",
+      "    }\r\n",
+      "  }\r\n",
+      "}\r\n",
+      "layer {\r\n",
+      "  name: \"pool1\"\r\n",
+      "  type: \"Pooling\"\r\n",
+      "  bottom: \"conv1\"\r\n",
+      "  top: \"pool1\"\r\n",
+      "  pooling_param {\r\n",
+      "    pool: MAX\r\n",
+      "    kernel_size: 2\r\n",
+      "    stride: 2\r\n",
+      "  }\r\n",
+      "}\r\n",
+      "layer {\r\n",
+      "  name: \"conv2\"\r\n",
+      "  type: \"Convolution\"\r\n",
+      "  bottom: \"pool1\"\r\n",
+      "  top: \"conv2\"\r\n",
+      "  convolution_param {\r\n",
+      "    num_output: 50\r\n",
+      "    kernel_size: 5\r\n",
+      "    weight_filler {\r\n",
+      "      type: \"xavier\"\r\n",
+      "    }\r\n",
+      "  }\r\n",
+      "}\r\n",
+      "layer {\r\n",
+      "  name: \"pool2\"\r\n",
+      "  type: \"Pooling\"\r\n",
+      "  bottom: \"conv2\"\r\n",
+      "  top: \"pool2\"\r\n",
+      "  pooling_param {\r\n",
+      "    pool: MAX\r\n",
+      "    kernel_size: 2\r\n",
+      "    stride: 2\r\n",
+      "  }\r\n",
+      "}\r\n",
+      "layer {\r\n",
+      "  name: \"ip1\"\r\n",
+      "  type: \"InnerProduct\"\r\n",
+      "  bottom: \"pool2\"\r\n",
+      "  top: \"ip1\"\r\n",
+      "  inner_product_param {\r\n",
+      "    num_output: 500\r\n",
+      "    weight_filler {\r\n",
+      "      type: \"xavier\"\r\n",
+      "    }\r\n",
+      "  }\r\n",
+      "}\r\n",
+      "layer {\r\n",
+      "  name: \"relu1\"\r\n",
+      "  type: \"ReLU\"\r\n",
+      "  bottom: \"ip1\"\r\n",
+      "  top: \"ip1\"\r\n",
+      "}\r\n",
+      "layer {\r\n",
+      "  name: \"ip2\"\r\n",
+      "  type: \"InnerProduct\"\r\n",
+      "  bottom: \"ip1\"\r\n",
+      "  top: \"ip2\"\r\n",
+      "  inner_product_param {\r\n",
+      "    num_output: 10\r\n",
+      "    weight_filler {\r\n",
+      "      type: \"xavier\"\r\n",
+      "    }\r\n",
+      "  }\r\n",
+      "}\r\n",
+      "layer {\r\n",
+      "  name: \"loss\"\r\n",
+      "  type: \"SoftmaxWithLoss\"\r\n",
+      "  bottom: \"ip2\"\r\n",
+      "  bottom: \"label\"\r\n",
+      "  top: \"loss\"\r\n",
+      "}\r\n"
+     ]
+    }
+   ],
+   "source": [
+    "!cat examples/mnist/lenet_auto_train.prototxt"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Now let's see the learning parameters, which are also written as a `prototxt` file. We're using SGD with momentum, weight decay, and a specific learning rate schedule."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "# The train/test net protocol buffer definition\r\n",
+      "train_net: \"examples/mnist/lenet_auto_train.prototxt\"\r\n",
+      "test_net: \"examples/mnist/lenet_auto_test.prototxt\"\r\n",
+      "# test_iter specifies how many forward passes the test should carry out.\r\n",
+      "# In the case of MNIST, we have test batch size 100 and 100 test iterations,\r\n",
+      "# covering the full 10,000 testing images.\r\n",
+      "test_iter: 100\r\n",
+      "# Carry out testing every 500 training iterations.\r\n",
+      "test_interval: 500\r\n",
+      "# The base learning rate, momentum and the weight decay of the network.\r\n",
+      "base_lr: 0.01\r\n",
+      "momentum: 0.9\r\n",
+      "weight_decay: 0.0005\r\n",
+      "# The learning rate policy\r\n",
+      "lr_policy: \"inv\"\r\n",
+      "gamma: 0.0001\r\n",
+      "power: 0.75\r\n",
+      "# Display every 100 iterations\r\n",
+      "display: 100\r\n",
+      "# The maximum number of iterations\r\n",
+      "max_iter: 10000\r\n",
+      "# snapshot intermediate results\r\n",
+      "snapshot: 5000\r\n",
+      "snapshot_prefix: \"examples/mnist/lenet\"\r\n"
+     ]
+    }
+   ],
+   "source": [
+    "!cat examples/mnist/lenet_auto_solver.prototxt"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's pick a device and load the solver. We'll use SGD (with momentum), but Adagrad and Nesterov's accelerated gradient are also available."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "caffe.set_device(0)\n",
+    "caffe.set_mode_gpu()\n",
+    "solver = caffe.SGDSolver('examples/mnist/lenet_auto_solver.prototxt')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "To get an idea of the architecture of our net, we can check the dimensions of the intermediate features (blobs) and parameters (these will also be useful to refer to when manipulating data later)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {
+    "collapsed": false,
+    "scrolled": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "[('data', (64, 1, 28, 28)),\n",
+       " ('label', (64,)),\n",
+       " ('conv1', (64, 20, 24, 24)),\n",
+       " ('pool1', (64, 20, 12, 12)),\n",
+       " ('conv2', (64, 50, 8, 8)),\n",
+       " ('pool2', (64, 50, 4, 4)),\n",
+       " ('ip1', (64, 500)),\n",
+       " ('ip2', (64, 10)),\n",
+       " ('loss', ())]"
+      ]
+     },
+     "execution_count": 8,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "# each output is (batch size, feature dim, spatial dim)\n",
+    "[(k, v.data.shape) for k, v in solver.net.blobs.items()]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "[('conv1', (20, 1, 5, 5)),\n",
+       " ('conv2', (50, 20, 5, 5)),\n",
+       " ('ip1', (500, 800)),\n",
+       " ('ip2', (10, 500))]"
+      ]
+     },
+     "execution_count": 9,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "# just print the weight sizes (not biases)\n",
+    "[(k, v[0].data.shape) for k, v in solver.net.params.items()]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Before taking off, let's check that everything is loaded as we expect. We'll run a forward pass on the train and test nets and check that they contain our data."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "{'loss': array(2.301163673400879, dtype=float32)}"
+      ]
+     },
+     "execution_count": 10,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "solver.net.forward()  # train net\n",
+    "solver.test_nets[0].forward()  # test net (there can be more than one)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[ 5.  0.  4.  1.  9.  2.  1.  3.]\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAWwAAABKCAYAAACfHW4mAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJztvXlQW1me5/s5EhJaECAJhEBgdrMbDNjgtNNOp7d02pk1\n",
+       "mVlZW1dWd0XH9ERMzxIzEzE1M3/M1HvzIt68iZjpF9HRPdFvpqeqZ6ajJyozy5VbpZ1e0k4n6R0w\n",
+       "JBizrwIJxCYJgQTc9wfcW+D0KiOwK+8ngkBcJN2jo3N/95zf+f5+PyFJEioqKioqzz6arW6AioqK\n",
+       "isrjoRpsFRUVlecE1WCrqKioPCeoBltFRUXlOUE12CoqKirPCarBVlFRUXlOiNpgCyFeEUJ0CCG6\n",
+       "hBA/28hGqaioqKh8ExGNDlsIoQXuAoeBEeAG8ENJku5sbPNUVFRUVGSinWHvBrolSeqXJCkC/G/g\n",
+       "OxvXLBUVFRWVe4nWYLuAoTV/D68eU1FRUVGJEXFRvu6RfhQhhBrzrqKiohIFkiSJ+x2PdoY9AmSt\n",
+       "+TuLlVm2ioqKikqMiNZg3wQKhRA5Qgg98H3gw41rloqKiorKvUTlEpEkaVEI8Y+AM4AW+GtVIaKi\n",
+       "oqISW6KS9SkvFqIfmAWWgIgkSbvX/O9b7cPWaDRotVr0ej06nQ6tVkskEiEcDhOJRFheXkZNbaui\n",
+       "8js0Gg3x8fHEx8ej1WpZXl5mfn6ehYUFlpeXt7p5m8qDfNjRbjoq7wu8JEnS5FO+z+8dVquVnJwc\n",
+       "amtrqaqqwuFw0NLSwq1bt2hpaWFsbIxwOLzVzVRReWZITk7m2LFj7Nmzh/z8fLxeL2fOnOHs2bPM\n",
+       "zMywuLi41U3ccp7WYAPc906wUQgh0Gq1aLVa5Vh8fDwulwuj0UhcXBwmkwm3243P5yM/P5+MjAyS\n",
+       "k5NZXl7G4/EwMDBAf38/8/PzsWwqAFqtloSEBGpqanjppZeoqqqitLQUu91OdnY227ZtIz09nVOn\n",
+       "TjExMRHz9jwNcXFxWK1WCgoKsNvtNDU1MTExwcLCwlY3DYPBQHZ2Nrm5uTidToQQdHV10dHRwfT0\n",
+       "tHpxPwKNRkNcXBxarZakpCSSkpLQ6/WMj4/j9Xo3fUar1+txOBwcOHCAgwcPkpubi8/nw+Px0NjY\n",
+       "SDAYfCa+0/j4eAwGA0IIEhISSE1NZfv27RiNxge+JhKJMDExQVdXF/39/U/Vtxsxwz4nhFgC/kqS\n",
+       "pP8a7RsJ8Tu7r9FoEEIoxtpoNGIwGJT/22w2jh49isPhwGw2k56ezrlz52hqauLHP/4xL7/8MiUl\n",
+       "JYTDYb744gtOnTrF+++/H3ODLYQgPj6ebdu28dprr/GTn/wEo9GIRrOyt1tZWUlxcTElJSVcvnwZ\n",
+       "n8/31G4R+b2BDb/IDAYDBQUFvPPOO1RVVfHzn/+cW7dubbnB1mg0JCcnc/jwYb773e+yf/9+NBoN\n",
+       "v/jFL/jLv/xL2tvbn4mL+1lDvqY0Gg0GgwGj0YjRaKSoqIiioiISExO5fv06DQ0Nm+6GMJlMZGRk\n",
+       "UF1djcvlQqvVkpqaSnp6Oqmpqbjd7k1ry72s7Ter1UpqaiparZbs7Gzq6ur46U9/Slpa2gOvZb/f\n",
+       "z61bt/jlL3/J6OjoU/Xt0xrsvZIkjQohUoGzQogOSZIuP+mbaLVaEhMT0ev1GAwGXC4XLpcLh8OB\n",
+       "wWAgNzeX7Oxs5fk6nY60tDTi4+MRQhCJRFhcXKS4uJgDBw7gdDrx+/14PB46Ozvp7e3dFCNjs9ko\n",
+       "Li7mhz/8Ifv371fuxIuLi4TDYZaXl5XPKs8gJicno/Znx8fHk5mZSWJiIktLS3R1dREKhTbs8+h0\n",
+       "OrKyVtSbQ0NDRCKRDXvvpyE7O5v6+npOnDjB9u3bkSRJ3RN4DEwmE+np6ezevZv8/HxcLhdpaWlY\n",
+       "rVYsFgtxcXHk5eVhtVo5d+4cU1NTm9a2hYUFfD4f3d3dOBwOZdw9CyQmJpKenk55eTk7duygoKAA\n",
+       "jUaD3W7H5XKRlJSEJEkPHH8Gg4HS0lJOnDgBwPnz5xkfH4/qenoqgy1J0ujq73EhxClWQtafyGAb\n",
+       "DAYcDgd1dXXY7XZlxiwb7Pj4eLKyssjMzLzv6/1+P7dv3yYcDmMwGPB6vYyPjzM7O8vo6CjXr1+P\n",
+       "qcGWl5Zms5nKykoOHjzIoUOHyMrKIi4uDkmSCIVCTExMMDw8jMvlUlYIRqOR1tZWBgcHo2pffHy8\n",
+       "4gIKh8MMDw9vqMHW6/U4nU5MJhORSARJktathLaK1NRUSkpKKCkpwW63b1m7ZPeX3W7HZrORmJiI\n",
+       "2WwmPj4eAI/Hw+joKOFwGKPRiE6nU8bmRn5PDyM+Ph6LxYLT6SQnJ4fi4mLq6urIzc3F4XBgsViU\n",
+       "VZrsYgyFQrS2tjI3N7dpq6lIJILf72dkZISZmZlnymBnZWWxd+9e9u7dS1lZmdI2eYUihHjoZCEu\n",
+       "Lo7U1FRqa2sJh8N0dnYSCAQ212ALIUyAVpIkvxDCDBwF/o8nfZ/k5GR27tzJz372M7Zv3664PuQL\n",
+       "8GEXoiRJzMzMcPr0afr7+wmHw5w7d45AIMDMzAyBQACPx8PExETMZl/yRZubm8uJEyd4++23SU1N\n",
+       "RafTKc8JBAJ0d3fz4YcfcvDgQd58803+9E//lJKSEt59910++OCDqA12Xl4excXFhEIhLl26tGGf\n",
+       "SwhBXFwcNpuN1NRUYGXgPQuYTCasVquiJtgq9Ho927Zto66ujrq6OkpLS8nOzlb66+zZs3z66adM\n",
+       "TU2Rnp5OYmIiX375Je3t7YyMjGxKGy0WC9u3b+fVV1/lwIEDVFZWEh8fj0ajYWlpCZ/PRzAYBMDl\n",
+       "cpGZmUltbS3Z2dmbul+xuLjI3Nwck5OTzM3Nbco5H5eKigrefvttqqursVgsUU8OsrOzWV5e5vTp\n",
+       "07jdbmZnZ5/4PR55BQoh/jtwAvBKklSxeswGfADsEkKEWYly/J+SJH32pA0IhUJMTU0xNzenuAzu\n",
+       "RzgcxuPxEAwG0Wg0uFwudDod09PT3Lhxg+7ubpaWlgAUF8Ti4iLz8/MxXSrv3LmTl19+merqakpL\n",
+       "S7HZbGi12nVfanJyMi6XC5PJxOTkJH19fbhcLqxWKxkZGeuM+5NgMpmoqanBbrfT2dm5UR8JWFn5\n",
+       "OJ1O9uzZg8VioaWlBbfbjd/v39DzPAnyaqu+vp69e/eSkJCA3++nv7+fjz/+mMuXL2+a6yYzM5Md\n",
+       "O3Zw4sQJSktLcblcJCcnEwqF6OnpwWg0kpubyzvvvKPMsJeWlkhISCAcDsfUYOt0Oux2Oy+88ALV\n",
+       "1dWUlZUpm7OysZ6bm6Ovr49f/epXeL1e0tPT+ZM/+RNSU1OxWCxYLBb0en3M2ni/NicmJpKRkUFS\n",
+       "UtKmnfdxGB8fp7e3l4qKCoQQLC8vMzc3R29vLwMDA8q1LtsZu91Oeno6GRkZymoLUPzgT7MafJwp\n",
+       "0y+APwf+x5pj/wr4SJKkF1dzYVslSfq/o2lAKBRifHyc9vZ2jEYjKSkphEKhdTO7hYUFPB4Ply5d\n",
+       "wuPxoNVqKSoqwul04vF46Ovro7+/P5rTR41Go8FkMrFjxw5OnjxJRUUFZrOZpaUl/H4/fr+fxcVF\n",
+       "MjIyFN98OBymv7+f1tZW7HY7RqORxMTEdRuHT4JerycrK2vdhuxG4XK5qKmpoaioCK/Xi9vtZmpq\n",
+       "ass2HI1GI06nk/3797Nnzx4KCgqIj49neHiY69ev8+tf/5re3l5CoVDMNhzlTXD5ez9x4gTHjh0j\n",
+       "OTmZ+fl5hoaG6OrqYmhoiISEBEpLSyktLSUxMZG4uDimpqYYHBzEYrHEpH0yiYmJbN++nRMnTlBX\n",
+       "V0dOTo6y37O8vMzCwgJ3797l0qVLfPTRR0xPT1NWVsaPf/xj4uLiMBgMGAyGTV1RyW7FtLQ0EhIS\n",
+       "lOPx8fEkJiZiMpmYn5/fks3k4eFhrly5QnJyMjabjeXlZQKBAO3t7XR2dn7DJZKWlkZxcTFHjx4l\n",
+       "NTVV6cdgMIjX62V6ejrq6+iR34gkSZeFEDn3HH4dOLD6+G+Ai6wY8ScmEokwOjrKhx9+iNvtJjMz\n",
+       "k8HBQV588UUOHz4MwPT0NC0tLfzFX/wF3d3daDQasrKy2LdvH+np6ZvmD1yLXq8nMzNT8aXKvqxw\n",
+       "OExfXx8tLS3Mzs7y/e9/H5PJhM/nY3BwkGAwSDgcZteuXcru89PccWPlu62pqeHtt9/GarXS29vL\n",
+       "xMTEliov7HY71dXVvPPOO5SVlSkbup2dnZw/f57BwUH8fn9MV1NCCEVOeOTIEb73ve9hsVhwu900\n",
+       "Nzfz4Ycf0tjYyPDwMDqdjrfeeos/+IM/oLy8nISEBCKRCENDQzHfzHO5XNTV1XHgwAEyMzPXrfgW\n",
+       "Fxfx+Xx8+umn/M3f/A1utxuz2RzT9jwOcXFxGI1GrFYrJpNJOW6z2cjJyWFoaIj5+fmo3AhPS09P\n",
+       "D2NjYzQ0NKDT6ZAkicXFRaanp+/bHqPRSGlpKYWFhVgsFuUGNDIyQlNTE11dXVGPgWhvoWmSJHlW\n",
+       "H3uAtCjfB0mSmJub4/bt24yPj5OUlITP52NxcRG73U5RURHd3d1cunRJ2ZCQlRcLCwskJCQwPT0d\n",
+       "7emjIjExkcLCQn7wgx9w4MABTCYTQgi8Xi9tbW28//77TE1NYbVa+eyzzxgfH6epqYmWlhYWFhbQ\n",
+       "aDSEQiEcDofi9/T7/czMzDxRG9LT0xW9+UZjMplISkpCq9Xi9Xrp7u7eUjnftm3b2L17N9nZ2Vgs\n",
+       "FhYXFxkdHaWxsZEbN24QCARirhJxuVxUVVVx8uRJ9uzZQ3x8PAMDA5w9e5bTp0/T0dGhaJhzcnIo\n",
+       "KCggOzsbvV5PKBRibGyMmzdvMjQ09OiTPQWZmZlUVlaSlJREXFycsurz+XwMDAxw8eJFLl26pARv\n",
+       "ORwOUlJStnSPYn5+Hq/XS3NzMw6HA6vVCkBxcTGvvvoq8/PzhMPhLTHYi4uL+P1+wuGwshpeXl4m\n",
+       "Eonc1/2WkpKCy+UiISFhXZ8Gg0F8Ph9zc3NRT36e+huSJEl62jB02afn9XqJi4tjYWFBuThycnII\n",
+       "BoPKBohsnGZmZp7IwG0EGo1GcUPs2rWLkydPsm3bNkUJ0tfXx5UrVzh9+jSwYmR8Ph/9/f3cuXOH\n",
+       "ubk5lpaWsFqtLCwskJycTH5+Pjt27GBiYuKJPo/NZiM3N5fk5GQCgcCGfUZ5s9FkMpGQkIBGo2Fq\n",
+       "aoqhoaEticyU3RB5eXns3r0bm82muBdu377N7du3Y+4O0+l0OBwOdu/ezbFjx/jOd76DXq9ncHCQ\n",
+       "q1evcvr0ac6fP6+Mz5SUFGpra6moqMDpdALgdru5ffs2ra2teL3emLZXdnu43W68Xi+zs7N4vV6G\n",
+       "h4fp6Ojg7NmzDA0NKZt7stJlKzdw5eCSL7/8koKCAsrLywHIyMhAo9Fw+/Zt2tratqRtkiSxtLT0\n",
+       "yM3QuLg4xR1VWVmJ1Wpdtz81Pj5Od3c3c3NzUU8uojXYHiGEU5KkMSFEOrAhI3BxcZHFxUUkSSIS\n",
+       "iSgbhrm5udTV1XHp0iUmJye3TG8bFxdHSkoK1dXVHDx4kJSUFMWfNTMzQ2trKzdv3mR6eppAIMDY\n",
+       "2BharXadDht+tzmh1WqxWq289NJLDA0Ncffu3cdui9PppKSkBIvFsqEuIY1GQ0JCAjabTTGOoVCI\n",
+       "2dlZZVN3M9FoNJjNZoqKiti1axd6vZ6FhQXGxsY4e/YsHR0dMW9DUlKSYqj379+PyWSitbWV8+fP\n",
+       "895779Hd3b1uczsxMZGXX36Z4uJi5T2am5s5deoUbrc75iuVxsZGAoEA/f39zMzM0NXVxdjYGJOT\n",
+       "k8zMzHxjhhcfH09CQsKWGmyA2dlZzp07R11dHa+//vqWtiUajEYjVVVVyj5LcnLyuj5ta2vjzJkz\n",
+       "T7VxH63B/hD4Q+D/Wf39m6hbsIa1hri7u5vPP/+cvLw8MjMzKS8vZ/v27QQCAXw+30ac7olYG113\n",
+       "+PBhdu7ciU6no7m5mStXruB2u+nr66O3t5f5+fkHLpdkZJ+irHVOTEx8ovYkJSXhdDrR6/UEAoEN\n",
+       "8zHLxqayshKTyaSsZEKh0KbfKI1GIxkZGRw5coT6+nqMRiPLy8sMDAzQ0NDArVu3GBsbi2kb8vLy\n",
+       "qK+v5/XXX6eyspK4uDj6+/u5dOkSH3/8Md3d3coyPS4uDrvdTnFxMTk5OSQlJREMBmltbeXSpUs0\n",
+       "NTVtiutmenqajo4OQqEQoVCIyclJgsGgMi7vxWq1kpmZGbVaaaNYXl5W9nieBb3/4yAH9pWUlFBW\n",
+       "VkZxcTF5eXnrxACLi4tMTk4qGvynmfg8jqzv71jZYEwRQgwB/xb4D8CvhBB/DPQD34u6BQ9gYGAA\n",
+       "SZIoLi7m2LFj5OTkUF9fz9LSkuJPDQQCioY01siG9fDhw7zwwgs4HA7FH3jq1CmGh4eVL+JRsjJ5\n",
+       "V1mW+awNtnhcjEYjSUlJaDQaZmZmGB0djUrOJrtAjEajoic/fvw4FRUVaDQa+vr6njqcNloSExMp\n",
+       "KirizTffpLKyUlmWtre3c/HiRbq6umLmFpO/m5KSEk6cOMHevXsxGAyMjo7y5Zdfcv78ea5du0Yk\n",
+       "EkGr1WKxWEhPT6e4uJj6+nqcTieSJOF2uzl79ixXrlxhcHAwJm29l3A4zMTExDdy1ZjNZqxW6zc2\n",
+       "urOyssjKykKv1xMOhwkEAgQCgWcmOdlWG28562ZCQgImk2ndtZqYmMiePXvYv38/u3btwm63K9LJ\n",
+       "5eVlpqenlT2gwcFBlpaWnuqG/Tgz7BArOa/vrtFh/xwoBsaBVKAeOB11K+7D0tISHo+Hd999F6vV\n",
+       "yne/+13eeustysrK6OjoYHBwkJs3b9La2ropMz/Z3yz7JcfHx/nlL3/JhQsX6OjoWDe4H3UHldu7\n",
+       "Ue2emppieHg4qgtMp9ORnJxMSUkJO3fupLa2lj179pCWlqYsUVtaWgiFQptusJ1OJ+Xl5eTm5pKY\n",
+       "mMjCwgLd3d189dVXfPnllxvqu78XWRFSXFzMnj17MJvNjIyM8NVXX/FXf/VXdHR0KNGfssvmJz/5\n",
+       "CdXV1WRnZ2Oz2ZSVwKeffrrhOvloKCgoUAJn1hrByspK0tPT0el0+Hw+urq66OrqYnJy65Jwrg31\n",
+       "3uqUA2azmYyMDHbt2kVJScm6KEyz2UxZWRkpKSmYTCa0Wq1irOfn57l+/Trnzp3j9u3b3L17Vxkz\n",
+       "0RKtDlsC/rMkSf856jM/AkmSmJ+fp6enhy+++AKHw0FlZSU1NTXk5eUxOTlJamoqer2e/v5+Zmdn\n",
+       "YzojKC4u5tChQ6SlpREKheju7ub69ev09PQ8cWTWWjlfLGcPCQkJmM1mhBBYrVZF+y3PqLdt20ZC\n",
+       "QgJGo5HU1FSSkpKUwAmdTsf8/DydnZ14PJ5NNdbx8fFkZGSwd+9eDh06hN1uJxgM0tvbywcffMBX\n",
+       "X33F+Ph4TGWGQgjlZuZwOIiLi6O9vZ3Tp08zPDyshBrLaRTy8/PZu3cvLpdL0VkPDQ0pqpBY3lwe\n",
+       "1H6tVovZbCY1NZXS0lL27NmjuHXWjruUlBQcDgeRSITGxkY++eQTPB7PluWO2WoDLSNvItbX13Pg\n",
+       "wAFKSkqUgDcZnU63TmstE4lEGBkZ4caNG5w9e5bR0dENkZ1Gq8OGGKdVhZWZ6szMDNeuXVOSJhUV\n",
+       "FVFWVqYsUeLi4rh48SI9PT2Mj49veBIgOal6RUUFBw8exGKxMDg4yJ07d+jt7Y1aUigba3mD9UmN\n",
+       "z9oZSHJyMjk5OUxMTKwzDGlpaYpKISsri9zcXEWqZzab2bFjBwaDgUgkQiAQYHh4GK/Xy/z8PAkJ\n",
+       "CYrSYDNlkxqNhqSkJOrr6zl8+DB79+5Fr9czMDBAW1sbH330EV1dXTFfrssuEZ1Oh06nQwjBxMQE\n",
+       "breb7OxsSkpKqK2tpby8nMzMTOx2O3q9XskfI/vam5qaNnXDdm1wT1paGunp6Wzfvp1jx45RVVVF\n",
+       "VlYW4XAYnU63LpJxcXGRqakpOjo6uHLlyqb42h/2GZ4FtFotdrudffv28aMf/YiUlJT7+vnvjXQE\n",
+       "FJ324OCg4sLdiEnP08j6/rEQ4ies1Hf8F5IkxeyqHhkZ4fPPP2diYoIjR45w6NAh8vPzKSsrw2Kx\n",
+       "YLPZOHfuHNeuXWN2dnZDZ14Gg4GSkhKKi4uVMPLR0VHa29ufKl2rbHBlSeOT+mJlHagkSezcuRO7\n",
+       "3c7IyMg6BUJaWhppaWkIIdYZk6mpKSYmJmhubmZ0dJShoSF6enrweDzYbDby8/OxWq1EIhG8Xu+m\n",
+       "hqObzWYKCgr46U9/SlVVlbJ8n5ubY3x8nOnp6U3Jay4HR4RCIYLBIGazmePHj1NdXY0kSRiNRiwW\n",
+       "CyaTiaWlJYLBIKFQSPFxBoNB+vv7n3qcPAnyTSYlJYWysjLeeecdCgsLsdlsyuRGzg/vcDjYtm2b\n",
+       "cjOClZtlZmYmFRUV+Hy+dcqmzeRZmWGv5WEr4vsdMxqNlJSUUFVVxa1bt+js7NyQHCnRGuz/Avyf\n",
+       "q4//PfCfgD9+6tY8gPn5ecbGxhTx/MTEBJWVlVRVVZGXl8fBgwfR6/UYjUYuXLiwoTNCrVZLcnIy\n",
+       "ycnJGI1GFhYW6Ovro7W19YnkdPLFlJWVRXV1NUajkZmZGSUoqKen54na1dPTw7lz59Dr9WRkZCi5\n",
+       "P9ZeYLKOemxsjOnpaWZmZhSjJxtt+cfj8Si+OoPBoAQLxDLU+37IyZSKioqw2WzK8YGBAW7evMns\n",
+       "7OymGBFZy9zS0sKZM2fYv38/KSkppKSkEAwGlfwSQ0ND+Hw+NBoNNTU15ObmAigh6pt1s5N97mlp\n",
+       "aezdu5fDhw9z4MABFhYWGBkZwePx4PF48Hq9zMzMsHfvXpKSkrBarYqLxGAwUF5eztzcHD6fj/b2\n",
+       "diYmJjY9de1awyhJkpJOOdYh/feytLTE1NQUN27cICEhgaKiIsLhMFNTU+v6RG6rnOsmJyeH9PR0\n",
+       "LBYLSUlJ6zIiPi3RFuFVdNdCiP8GfLQhrXkIi4uLirD+zp077Nixgz/6oz8iPz9fEaknJSXR2tqK\n",
+       "3+/fsCWorKKQfVRzc3NRzZzkrH61tbUcO3aMhIQE3G43N27c4MyZM/T19T1Ru9rb2xkbGyMQCFBe\n",
+       "Xk5GRsZ9nxcIBGhubmZwcJCRkRFFdng/8vLyqK2txW63Ew6HY+4nXou8lC8qKuLFF1/EbDavW4V0\n",
+       "dHTw1VdfbZoqSJIkFhYWuHr1KouLi9hsNvLy8pTIzzt37tDU1MRXX32Fz+fD6XTicDjIyMhACEFb\n",
+       "W9umJt03Go04HA5qamp46623eO2114hEIpw/f55PPvmEO3fu0N3djc/nIy0tDbvdzu7du5Vc6rLa\n",
+       "Rb5RjoyMIEkS7e3tBAIBJT4CVgxZrG+aa5VU8fHxFBQU4HQ6lcjNzbiByDbn/Pnz3L17l7q6OiUQ\n",
+       "7n6bh0lJSezbt4+TJ08qrsiNJiqDLYRIl3NhA28ArRvXpIcTiUTw+XzcvHmTgwcPsry8rCwD5Wou\n",
+       "fr8/JvpceZk8Nzf3RH5JOUDmwIEDvP766+zbt4+lpSVu377NxYsXlVD8J8Xv93Px4kVu3rz5QFmg\n",
+       "HJYcCoVYWFh4qO/X4XAoSZXkJFWbladFp9PhdDrZvn07hYWFyucJhUJ0dHTQ2dm5qTcQmcnJSa5e\n",
+       "vYrb7VZSEMjSt9nZWaanp7HZbLhcLoqKikhNTWV6elrRPscaeeVWXl7OoUOHOHr0KEVFRczPz9PV\n",
+       "1UVDQwMXLlxQxlh2djZ/+Id/yMsvv4zT6USj0dDW1sbdu3eV79/hcPCjH/2IyspKbt26xZdffonX\n",
+       "61XGzuTkZMwVJGtnsEajkfLycoqLi3E4HFEn/4+Wubk5BgcHmZmZIRKJPFAx5fF4mJ2dJT8/nxdf\n",
+       "fDEmbXmowRZCZLHio7YBWiHEFPAvgKNCiBOAHpgB9sakdWuQZ6ipqamkpKQoqRjXbt7Jy9hYfZny\n",
+       "zu/ExMRjnUPW52ZlZVFeXs7x48fZvn07k5OTXLlyhcuXL9Pc3Bz1Bo+cOGujMBgMJCYmotVqcbvd\n",
+       "fP3115tmsJOSkjh+/DgvvPCCIjGT/cC//e1vuX379qb5gteysLDA+Pg44+PjD3yO3W7HYrGQmJhI\n",
+       "fHw8CwsLdHV1PfQ1G4EQApPJRGFhIS+//DKvvPIK5eXlTE1NKTlWGhoaGBkZUaqevPDCC0qBjenp\n",
+       "adra2mhoaKCtrQ2Hw6EkM3M6nRQWFpKamorL5VIyzM3Pz3P58mUaGhpi9rm8Xi89PT24XC4l57lc\n",
+       "W7SsrEzRv28Wi4uLijb9Uc97Wtneo3jUDDsCHJMkqVkIkQDcAq6wosH+95Ik/cfV9Kp/TJTZ+h6F\n",
+       "RqNRVA05OTns3LlT8V2XlJQoviG/38/Q0BDd3d0xu/vLOuDHuRDlG4zsYz98+DA1NTUMDQ3x2Wef\n",
+       "8ed//ueblsQ+GkZHR+no6Ng0I2mz2XjnnXeorKxUIsQmJiZoamriV7/61RP7+DeTezej5ubm+Prr\n",
+       "r2P+/Wq1WlJSUnjllVd47bXXqKmpIRAIcP36dT744AOuX7/O5OSkUpno5MmTvPbaa2RmZuL3+2lr\n",
+       "a+MXv/gF165dY3h4WJF6yhLA3bt3K/p8rVar5KSfnZ2NqcHu7+/nxo0bSpEKmezsbGpqamhtbY15\n",
+       "Eig5b/Xj+u/lyVl5eTnp6ekxa9dDDbYkSWPA2OrjgBDiDuBiA9OrPgydTofNZqOsrIzq6molGZRc\n",
+       "tkrehFheXiYUCim+642+w8kXpLw0e5C/eC2FhYXs2bOHI0eOUFRURHJyMnfv3uXjjz/mo48+2pLw\n",
+       "+mcZ+aa8VmrW09NDQ0MDk5OTz3RR3bGxMTo7O2NeLONe8vLyePHFFzlx4gT5+fl4vV4++eQTLl26\n",
+       "xJ07d0hNTaW+vp7S0lIqKiooKCggISGBS5cucfPmTRobG2lra2N8fFyJexgeHmZmZoaOjg6amprY\n",
+       "vXs35eXlmM1mJicn+fTTT7l8+YnLtj4Rfr8fr9e7JTpw2cWUnp6O1WplYGCAYDD4SPdnTk4OdXV1\n",
+       "vP322+zcuTNm7XtsH/aqFnsncI0NTK96n/Og1+uxWq24XC5KS0upra1l586d5Ofnk5ycrMzAQqEQ\n",
+       "brdb0egd8pInAAAMP0lEQVReu3YtprvyOp1OadOOHTvweDyKy8BisZCSkqLUpayoqGDXrl3s2LGD\n",
+       "+Ph4vF4v165d48qVK7S3t8esjRtFfHw8ZrN5w3a3H0ZmZiZVVVVKon/Z6Lndbtra2ggEAlsiL3tc\n",
+       "tFrtN6oMbQaFhYUcPnyY4uJiEhIS8Hg8LC0tkZqaSnV1Nbm5uUpF9JycHPx+Px0dHZw+fZobN27Q\n",
+       "29u7boN+bfENuYTV2NgYd+/eVVRNX3zxRcw3U2V33JEjR0hOTlZu4ikpKYrkdHJycsO1+GazWcm/\n",
+       "LReeuDftxFq0Wi0GgwGr1Up9fT3Hjx9n9+7dpKSkKC7apaWlDdXgP5bBXnWHvA/809Uajsr/NiK9\n",
+       "6roGrUYX7dixg/379/PKK68oaUTXnBMAn89HS0sLv/71r2loaIhZ+K+sVpCXPfX19QQCAT7//HPF\n",
+       "h1xYWEh9fT27du0iJyeH1NRUDAYDExMT9Pb2cvv2bS5cuEBvb29M2riRCCGUG9Bm5EjetWsX3/ve\n",
+       "97DZbOvkXJOTkwwMDDwzOS0eRFpaGkVFRd8I+Y41JSUlHDp0SEkclpiYyEsvvcSBAwcwm804nU4l\n",
+       "0EOSJG7cuMH777/Pb37zG7xe7yNXA0NDQwwNDXH+/PmYf5a19PT0oNFoeOutt0hLS1PcIgUFBQC8\n",
+       "9957Si7vjSQlJYWamhreeOMN6urqWFpa4tatW4yPj983w2J8fDypqans3LmTkydP8uqrr2I0GpXQ\n",
+       "9HA4zMLCwjqFzdPyOMmfdKwY6/8pSZKclW9D06vKkVlyxN7Ro0fZsWMH+fn5OJ3OdRUx5Iv45s2b\n",
+       "NDc3097ezvDwcMw3eGSEEOTn52MymaioqGBmZgZJksjKylLkUiaTiWAwSF9fH93d3comzejo6KYX\n",
+       "W4iGtfrSWBogOay3sLBQUacsLS0xOzur5I72+/3P9OwafjczW7s62Azm5+fx+/0kJCSg0+mU7Iaw\n",
+       "sgHW1dWF2+1mcHCQnp4e7ty5Q0dHx6bnkY+GSCSi+MvXhoLHsn+rqqr4wQ9+QGVlJQ6Hg0AgwMGD\n",
+       "B8nJyblv0EtGRgYFBQUUFRWRl5en3LDHx8fp6+ujqamJzz77bEOLfzxKJSKAvwbaJUn6f9f866nT\n",
+       "q8r6ZqfTSVZWFikpKaSnp1NYWMiRI0fYtm2bMnMIhUL4fD7Gxsbo7u6mra2Nq1evKtWnY/klyuHx\n",
+       "U1NTzM7OYjabsdvtJCcnk52drWzKyVnQFhYWlJl/Y2PjuiRVzxNGo/Eb+Xw3Grkqe0ZGBhkZGcTF\n",
+       "xSnRlZ999hmtra3Mz88/8wZbzncu64Y3i97eXi5evEhhYSFWqxW9Xq+EmMt+9f7+fgYGBujq6lJS\n",
+       "Fzzr/QkrN6O7d+9SVFREdna2cjyW13pGRgYVFRVkZmYqs/p9+/ZRXl5+X3+60+lk27ZtpKWlodFo\n",
+       "mJ+fZ3p6Wkm53NjYSHt7+4aKIB41w94L/BhoEUI0rR7712xAelWNRoPRaGT//v288cYbSnSQXHV8\n",
+       "re9UNoC//e1vuX79Ol1dXYRCoU0R0IfDYQYGBuju7mZ4eFgJnpDdI3K9NiEEoVAIr9fLlStXePfd\n",
+       "dzlz5gzLy8vPxQVyL3a7nW3btsW0crY8BuQq3RqNBr/fz+DgIB988AGdnZ1bUjThSZmYmKCvr49I\n",
+       "JBLzVclaLl68SEdHB7W1tYoEb3Z2llu3btHY2Ijf71dkrrLa4VkM+74fwWCQq1evUlFRwe7du7ek\n",
+       "DWazmRdeeOGB/abRaJSfYDCIx+OhqamJ9957j9OnTzM/P7/h4/dRBnsAuAQ4WMnQ9/9JknQ62vSq\n",
+       "er1eqWNYWlqKy+WioqKC4uJiLBYLRqNRMRDy0ri1tZXLly9z9epVenp68Hq9BIPBTQuXXVpaYnp6\n",
+       "mosXLzI3N0d9fT21tbVs375duTCDwSAtLS3KMr6zs5Ourq4trYH4NMhGZzM2HO81cPJmTSQSeS6M\n",
+       "NaxUShkeHmZsbIzU1FS0Wi02mw2z2bwh+SMehFx559q1a3R0dGAwGNblwl5rqJ835BiDkZERxsfH\n",
+       "v1FuKxY0Njbyt3/7t9TX11NWVkZWVtZ9V5iLi4vMzs6ysLBAKBRScgutvfbn5uZi0vePo8P+Z2t1\n",
+       "2EKIs0SZXlUOMT148CAHDx7E5XIpqo+FhQVmZ2eVemc+n0+pkdjQ0EBLS4uy7NxMZMmgLH8aGxtj\n",
+       "fHyc4eFhJWBndnaW69evc+PGDdra2p55GdqDCIVCzMzMbJqhXCvHDAQCz0T17miQV1Z37tzB4XAo\n",
+       "ebTdbjeBQCBmxR/kgg4DAwMb/t5bjRwW3tzcTFpampIpz+12Rx0Z/CjkLJATExNKErm0tDQMBgPL\n",
+       "y8uKgmZqaoqBgQFmZ2eZmZlhcHCQ1tZW2traYi5HjFaHDVGkV5V1zLt27aKyslJxfcgbDHfu3KGn\n",
+       "p4elpSWuX7/OlStXmJ6eJhQKxTyC6FHMz88rd/sLFy6scxXIO8LhcFiZ1TyPjI2N0dHREbOw2ntZ\n",
+       "XFzE6/XS399Pf38/hYWFm3LeWOD3+zl//jxOp5N9+/Zx/PhxJYmS2+1+5pUuzxqyL/43v/kNZ86c\n",
+       "UQJZZOlhLIzi9PS0El0rV4h54403cLlcLC4u8vXXXys/jY2NSi4fOcJxM679aHTYV1nxbT9xetVA\n",
+       "IEBDQwMej4cPP/xQOS5/CT6fj6mpKSRJYnR0VCl79Sws6eQ8InIukd9HBgYG+OSTTxgaGlJULrGM\n",
+       "KJPdXhcuXGB0dBSr1apUGrq3vNWzztzcHM3NzZSVlZGfn09ubi779+8nGAxy+vRpxsfHnxsXz7PC\n",
+       "8vIyc3Nzm3a9yRMv2TjPzs7S1taGxWJZNy7Hx8fxeDwEg8FND+4Rj2MMV90hF4H/S5Kk3wghHKz4\n",
+       "r2ElvWq6JEl/fM9rtt7KqqhsEnKhi6NHj/Lmm29SV1fH/Pw8TU1N/Nmf/ZmyUa6i8jhIknRfD8aT\n",
+       "6LD/l6zD3or0qioqzzJy4jE5s93ExAT79u2jqqqK9PR0RkZGVIOt8tREpcPeyvSqKirPKnKV7I6O\n",
+       "Dk6dOkVraysGg4He3l7VWKtsCA91iQgh9gFfAC2sKEMA/g3wQ6Bq9Vgf8A/W5BaRX6u6RFRUVFSi\n",
+       "4EEukcfyYUeDarBVVFRUomPTDbaKioqKysYS+1A2FRUVFZUNQTXYKioqKs8JMTPYQohXhBAdQoiu\n",
+       "1TJiKqsIIfqFEC1CiCYhxPXVYzYhxFkhRKcQ4jMhRPKj3uf3ESHEfxdCeIQQrWuOPbBvhBD/enWM\n",
+       "dQghjm5Nq7eGB/TVz4UQw6tjq0kIcXzN/76VfSWEyBJCfC6EaBNCfC2E+Cerx5+/cSVnotrIH0AL\n",
+       "dAM5gA5oBkpica7n8YcVZY3tnmP/EfiXq49/BvyHrW7nFvXNi6xE1LY+qm+A0tWxpVsda92AZqs/\n",
+       "wxb31b8D/vl9nvut7SvACVStPk4A7gIlz+O4itUMezfQLUlSvyRJEeB/A9+J0bmeV+7dBX6dlfqY\n",
+       "rP7+e5vbnGcDSZIuA1P3HH5Q33wH+DtJkiKSJPWzcmFtTS7OLeABfQX3z/Pzre0rSZLGJElqXn0c\n",
+       "ANbWpn2uxlWsDLYLGFrz9zC/SxqlsqJfPyeEuCmE+Purx2JWJ/P3gAf1TQYrY0tGHWcr/GMhxG0h\n",
+       "xF+vWearfcVj16Z9ZvsqVgZb1Qo+nL2SJO0EjgN/KoRYlx5PWlmXqX14Hx6jb77t/fZfgFxWAttG\n",
+       "gf/0kOd+q/rq3tq0a//3vIyrWBnsESBrzd9ZrL9jfauRVsP6JUkaB06xstzyCCGcsBL6z1PWyfw9\n",
+       "40F9c+84y1w99q1FkiSvtArw3/jdUv5b3VcPq027+v/nYlzFymDfBAqFEDlCCD3wfVbqQH7rEUKY\n",
+       "hBCW1cdm4CgruVjkOpkQZZ3M32Me1DcfAj8QQuiFELlAIXB9C9r3zLBqeGTW5vn51vbVY9Smhedk\n",
+       "XD12PuwnQZKkRSHEPwLOsKIY+WtJku7E4lzPIWnAqdWyWHHA30qS9JkQ4iZPWSfz9wEhxN8BB4AU\n",
+       "IcQQ8G95QA1RSZLahRC/AtqBReAfrs4svxXcp6/+HfCSEGJdnh/41vfVE9WmfZb7Sg1NV1FRUXlO\n",
+       "UCMdVVRUVJ4TVIOtoqKi8pygGmwVFRWV5wTVYKuoqKg8J6gGW0VFReU5QTXYKioqKs8JqsFWUVFR\n",
+       "eU5QDbaKiorKc8L/DzAr6bE92WeRAAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f7939901710>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "# we use a little trick to tile the first eight images\n",
+    "imshow(solver.net.blobs['data'].data[:8, 0].transpose(1, 0, 2).reshape(28, 8*28), cmap='gray')\n",
+    "print solver.net.blobs['label'].data[:8]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[ 7.  2.  1.  0.  4.  1.  4.  9.]\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAWwAAABKCAYAAACfHW4mAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJztnWlwXNd153+3V3RjaaDR2Bs7sRMgQIKgKAokuIgUpcg2\n",
+       "q+QljstO4kpS9iQzlUlqMpkPSWZSlclM1SSTmg+umrI9ZWdGViS5ZMuyJVIUSJEUwA0QSew7QKCx\n",
+       "Aw2ggd4bbz4A7wncRABEo4Ho/apYbLzeTt9+fd695/7POUKSJFRUVFRUdj6aSBugoqKiorI+VIet\n",
+       "oqKisktQHbaKiorKLkF12CoqKiq7BNVhq6ioqOwSVIetoqKiskvYtMMWQrwkhOgUQvQIIf5iK41S\n",
+       "UVFRUXkUsRkdthBCC3QBpwAHcAv4bUmSOrbWPBUVFRUVmc3OsGuAXkmSBiVJCgBvAF/eOrNUVFRU\n",
+       "VB5msw47Axhe8/fI6jEVFRUVlTCh2+TznhpHEUKoOe8qKioqm0CSJPG445udYTuAzDV/Z7Iyy1ZR\n",
+       "UVFRCRObddi3gQIhRI4QwgB8HXh368xSUVFRUXmYTYVEJEkKCiH+GDgPaIEffdEUIlFRUeTm5mI2\n",
+       "mzEYDMTGxpKUlITVagVgYmKC0dFR5ubmGB8fZ3p6OsIWq6io7HY2JetTnizEILAAhICAJEk1a+77\n",
+       "VxvDNhqNpKWl8Vu/9VukpKQQHR2N3W6noqKCoqIiAD799FMaGxvp6enh2rVrNDc3s7y8HGHLn4xG\n",
+       "o8FkMmE0GvF6vfj9foLBYMTsEUKg1+sxGo3o9XoAgsEgHo+HQCAQMbtUVLaDJ8WwN7vpqLwuUCdJ\n",
+       "0uwzvs6u4tChQ7z66qscPXqU+Ph4dDodUVFRxMbGIl8Ac3NziYmJobS0FLfbTX9/PwsLCxF1gk9C\n",
+       "r9djtVp57bXXqK2t5fz58zQ0NNDV1RURe4QQGAwGqqurOXbsGPv378fv99PV1cXbb7/N4OAgS0tL\n",
+       "EbFNRSWSPKvDBnjsleBfI9HR0ZSVlXHmzBnOnDlDXl4eJpOJUCiE2+1mdnaW4eFhTCYTCQkJZGdn\n",
+       "k5CQQHl5OXfu3KGtrW3HOuzU1FSqqqo4cuQIra2tmM3miNmj0Wgwm81UVFTw6quvUlZWRiAQwG63\n",
+       "09HRgcvlipjD1mq1xMbGsmfPHhITE2lsbGRhYeGpz4uNjSU6Ohq3271jVgl6vZ7ExESKiopITU2l\n",
+       "p6eH4eFhpqen2Y7GJjqdDovFQllZGQCzs7OMjo6yuLiI3+9f9+totVqMRiMWiwW3243L5drRq9ln\n",
+       "4VlriUjARSHEbSHEH2yFQTsVIQSJiYl885vf5Ctf+QplZWVERUURDAZZWlrC4XBw+/ZtLly4QGNj\n",
+       "IyMjI2i1WqxWK8XFxezbtw+TyRTpj/FY9Ho9GRkZWCwWgsEgTqcTt9sdMXt0Oh0JCQmUlpZSXV2N\n",
+       "yWTCYrGQl5dHeXk5KSkpEbPNYDBgt9v5+te/zp//+Z+v25aUlBRKS0tJT08nJiYmzFauD5PJxN69\n",
+       "e/n+97/PP/7jP/LVr36VPXv2oNFoECL887Do6GgKCgr43ve+xx/+4R/y8ssvk5WVteHfSVRUFMnJ\n",
+       "yezbt4/MzEy0Wm1Y7RdCoNPp0Ov16PX6bRkrmWedYR+RJGlMCJEEfCiE6JQk6epWGLbTSEhIoKCg\n",
+       "gKqqKtLT0wkEAkxPT1NfX8/ly5eZnZ1lenoal8uF2Wzmtddew2KxkJycjMFgwGw2o9HszFpber1e\n",
+       "cSTz8/O0tLQwOjoaEVuMRiOZmZl84xvfoLq6OiI2fB5RUVEUFhaSlpa2IceQnp7O4cOHEUJw5coV\n",
+       "rly5EmZLPx+j0UhGRgavvfYalZWVxMfH85WvfIWRkRGamprCOkPVaDRERUXxyiuv8I1vfIOysjIc\n",
+       "DgdDQ0MsLCyse7Kg0WgwGo289NJLfOlLXyIlJYVf/epXTE5OMjs7G5bVrF6vx2azcfToUbKysvD5\n",
+       "fLz11ltMTExsy6z+mRy2JEljq/9PCSHeYSVlfV0OW46bJicnY7FY8Pv9hEIh/H4/09PTeDwe/H4/\n",
+       "Xq93RyxvYmJiSEtLIz09Hb1ez/j4OFevXuXdd9/l0qVLir1arZasrCy8Xq+yrPR4PMzPzxMKhSL8\n",
+       "KR7FaDSSlJREeXk5UVFRDAwM4HA4cLlc22qHwWBQxresrIxTp06Rl5f3wGPkmXdmZiYzMzMsLS3h\n",
+       "crm2bTUghMBkMlFcXExaWtqGLsA6nY74+Hjy8vIYHBxEq9WyvLy8LaGHxxEbG0tOTg7V1dXY7Xa0\n",
+       "Wi1xcXFERUWF1SatVovFYqG8vJxTp05x5MgRnE4n9+/fp6uri/n5+XWHi4xGI4WFhdTW1lJXV8fY\n",
+       "2BhCiLD4C9lR5+TkUFZWxsmTJ8nOzmZmZoaPPvqI6enpne2whRBmQCtJkksIEQ2cBv7zep9vMpmo\n",
+       "qKigrq6O8vJynE4nXq+XmZkZrl27hsPhYHZ2lomJiQ3Fs8KFyWQiLi4OrVaLy+WitbWVH/zgB7S2\n",
+       "tj4Qw4yPj+fYsWMcOHCAlJQUdDodY2NjdHR04PV6I/gJHo/FYmHPnj3U1dXhcrn49NNPIxIOiYmJ\n",
+       "4YUXXuDw4cPs37+f0tJSoqOjH3iM0WgkLy+PpaUlrFYrQ0NDdHV1cf/+/W2xUQiB2WympKSE1NRU\n",
+       "pqam1v1cp9PJxMQE1dXVJCQkYDAY8Pl8EXPYNpuNPXv2kJSUhMlkYn5+njt37jA4OEgwGAybXUaj\n",
+       "kaysLH73d3+X2tpalpeXuXnzJj/72c94//33N+T04uLiOHXqFAcOHECj0fD+++/T2NgYFgmt2Wxm\n",
+       "//79nDt3jtOnT2Oz2TAYDHR1dREXF4dOp9uWfYmnOmwhxI+BV4BJSZLKV49ZgV8CB4UQflayHP9Z\n",
+       "kqQL631ji8XCK6+8wuHDh0lPT1dm2D6fj0OHDrGwsMD8/DwOhwOfz7eu1wyFQiwuLtLR0UF7ezsT\n",
+       "ExNbNohjY2NcvnwZj8eDRqNhfHyc3t5ePB7PA48zm81UVlaSlZWlLJklSYrYD/NpxMbGKtLEnp4e\n",
+       "7t69+8hn2g7i4+N59dVX2bdvH1arFaPR+Mhj5Jir3W7nueeeY3x8nA8++IALFy4wOTkZ9h9MUlIS\n",
+       "JSUlFBYW4nK5aGpqWvfFTa/XExsbS2pqKgkJCej1+ohMRIQQaLVaSktLOXHiBLGxsQC43W6am5u5\n",
+       "f/9+WM9VeZVRWlpKUlISMzMzfPjhh7S3t2/YWefn53Py5EliYmJoamqisbGR4eHhpz95g5hMJrKz\n",
+       "szl37hyHDx8mMTFRiV3Hx8fz5S9/Gb1ez+3bt8MeEVjPDPv/AP8L+OmaY/8R+JUkSbWrtbATJEn6\n",
+       "rxt5Y1lnGwgEmJ+fx+v1KvK4goIC5Qcr3xcKhTAYDA/EDGVHGAwGlY2ShYUFrly5gt/v39Dy6mks\n",
+       "LCzQ3d2txKr8fj8ej+eBLychIYH8/HxKSkpISUlBkiRcLhfz8/O4XK4dGRJJTU2lqKgInU7H6Ogo\n",
+       "XV1d274SSE9Pp7q6moMHD5KTk/NYhyGHD6xWK0lJSQghmJ+fB8Dn8/HBBx8wOxsedakccy0tLaW2\n",
+       "thabzUZTUxMNDQ0sLi6u6zXi4uJIT0/HarViNpvRarVhsfVpaDQaYmNjlf0Ys9mMJEm43W7u3buH\n",
+       "w+EIy/vKG3WpqakUFhaSnp7O0tIS9+7do7m5ed17JvJ3UVJSwvHjx6moqGB0dJTm5ma6u7txOp1b\n",
+       "bntycjLl5eUcOnSIvLw8jEajco7GxcVx/PhxJEkiOjqawcFBxsfHmZubC8uF76kOW5Kkq0KInIcO\n",
+       "fwk4tnr7J8BlVpz4unE6nbz11ls0NzcTHx/P2NgYsbGxZGRkkJ2dTWFhIbm5uWRmZipSKKvVqiRR\n",
+       "AIrjdLlcmEwmoqOjkSQJv9/PxMQE7e3tWxqLlRUUwGNnzYWFhZw8eVLRYPv9fu7fv09vby/379/f\n",
+       "EaGdhykqKqK2tpZgMIjD4WBkZGTb7XzhhRf47ne/i81me+Imnt/vZ2lpCZ/PR0xMDBaLBavVyquv\n",
+       "vkp2djZNTU1hc9iy7PHFF1/ka1/7GsFgkKamJj766KN1n19ybD4qKiosNq4X+bPk5OSQkZGBRqNR\n",
+       "HHZnZyfj4+NheV+NRkNMTAz5+fmUl5djMplobGzkjTfeYHh4eN3nnGz/K6+8wne+8x1sNhu3bt2i\n",
+       "paVlSydoayksLOT48eOKgGAtJpOJffv2kZ+fz+nTp3n33Xc5f/48TU1NYQktbTaGnSJJ0sTq7Qlg\n",
+       "wzorr9dLR0cHDocDo9HI4uIiBoOBmJgYYmNjSU5OVjah5BlqZmbmA0vlYDCI1+tlamqKuro6Tp8+\n",
+       "jdFoJBAIhE2L+bjXjIqKIjMzk+PHj3PmzBkSExMJBoPKkv3evXsPbELuBGQ9cWZmJllZWXg8Hrxe\n",
+       "77baKW807tmzB7vdjl6vf+RC6Ha76ejooK+vD4fDgcfjITU1VZkhxsTEYLPZqKmpwev1MjQ0tOV2\n",
+       "xsbGUldXR0VFBcvLy/ziF7+gsbGRxcXFp55jspIhIyOD/Pz8iDtsk8lEWVkZGRkZyix/cnKS3t5e\n",
+       "lpaWwract1gsnD59mpdeeon9+/ezsLBAS0sLzc3NuN3udZ9zBoOBnJwccnJyiImJoa2tjYaGBpqb\n",
+       "m7dcm6/VaomJiaGkpISamhpiYmKUFbPb7cZqtWKz2dDr9cTFxZGdnc3JkycZGRmhvb2dpaWlLV9V\n",
+       "P3PijCRJ0mbS0AOBABMTE0xMTDz2frk+R2pqKi6XC5fLRUZGxgMOOxQKEQgE8Pv92O12Tpw4wfLy\n",
+       "MhMTExu6aj8rRqNRcSAVFRVotVqmpqbo6enh8uXL9PT07LhwiDxTkWOqcrLCdiX26HQ6rFYrhw8f\n",
+       "Zu/evSQmJqLVapEkieXlZQKBAIuLi4yMjPDhhx9y79497t+/j8/nIyMjg+rqanJycjCZTMTExFBX\n",
+       "V8fS0hJerxen00kgENiSC4/ZbMZut1NbW0tOTg5Op5OLFy/S3t6+rrEyGAxkZ2eTnZ1NYmIii4uL\n",
+       "uN3uiJ0PUVFRFBcXk5qaCqysFIeGhhTHGS5MJhOVlZVUVVWRlZVFd3c3Q0ND6155ajQaLBYLOTk5\n",
+       "HDx4kNzcXPx+Px9//DENDQ1huVDrdDqSkpLIy8tT3q+jo4O2tjbcbjd5eXmUlJRgs9mUXIGKigqq\n",
+       "q6tpa2tTkry28iK4WYc9IYRIlSRpXAiRBkxumUWrBAIB5ubmlA8sSRJLS0uPLJmjo6PZt28faWlp\n",
+       "iiKjra1tWzfP5GQK2ekIIRgZGeHmzZu0tbVtSE2wXciKi6SkJEKhkBJu2C7MZjP5+fl8+9vfprKy\n",
+       "koSEBGXGJ4ee2tvbaWxs5O2332ZkZASPx4MkSYyNjbG8vMzZs2dJTExUNrCNRiMajUbRxW+Fw7bb\n",
+       "7dTU1Ciz+cHBQQYHB9cdK42NjeXkyZMUFRXh9Xrp6+tjbGwsYnJVo9FIbm4uiYmJSJJEKBSipaWF\n",
+       "8+fPrytj81mR95xkvXUoFHrq9yTvd1VUVChZxrm5uYyNjfHmm29y9+7dsNgq5yfIobqhoSHeeOMN\n",
+       "3nzzTbRaLXv37qW2tpazZ8+Sm5tLfHw8sbGxnDhxAq1Wyz/90z/R19e3pXtCm3XY7wLfAf7b6v+/\n",
+       "2DKLVpFPprUzkYdnJQaDAZvNxssvv0xFRQVut5urV69y79495ubmtmW2WFlZSV1dHXV1deTn5+P1\n",
+       "eunv76e+vp4PP/yQmZmZHTe7hs9ORovFwtLSEtevX2dgYGDb3l+r1WI2m0lPTyc+Pl7ZNJ6dnaW3\n",
+       "t5cLFy7Q3t5Of38/Q0NDD4QfnE4nfX19XL16FaPRSFlZGQkJCdTU1BAMBunr68Pj8WzJEtlut1NZ\n",
+       "WYnNZmNkZITbt2+vW1NvMplIS0vj0KFDZGVlKRviXV1dYZXOPYn4+HhycnIoLCzEZrMp+z3j4+P0\n",
+       "9/eHfUUqf1551fHiiy8SHR2Nz+djaGiIkZFHS+onJCSQmppKeno6VVVVHDhwgNzcXBYXF+ns7GRy\n",
+       "cjJsEzM5nKXX65XfSEtLCxMTE2g0GuViIyte4uPjEUIQFRVFdHS0ohDbStYj6/sZKxuMNiHEMPBX\n",
+       "wN8DbwohvgsMAl/bUqvWSXx8PIWFhZw4cYK8vDwmJye5ceMGPT09YZ8tyvKoqqoqvvrVr1JSUoLZ\n",
+       "bGZ2dpbr169z6dIlbt++vSM3Gg0GA/Hx8eTn52OxWJiZmaGhoWFbHbY8flFRUUrs2uPx0NfXR319\n",
+       "Pa+//jpDQ0OPnZ34fD4mJia4cuUKdrtdiQ3n5eURCoVISkrakgJRGo2G9PR0SktLldl1Q0PDul9X\n",
+       "zo7du3cvVquV4eFhrl27Rm9vb0Rm11arlT179pCZmUlsbCx+v5/JyUnGx8fDtmErEwqFmJ2dxeVy\n",
+       "odFolBCmvPJoaWmhvb39keelpaWRl5dHfn4+2dnZJCUlEQgEaG1t5ebNm2GtKaPVaklISMBsNisX\n",
+       "FXnlFgqFGB8fJyoqSlGASZKEEAKv18vCwgJ+v3/Lv+f1zLA9rNS87lqjw/4boBiYApKA54APttSy\n",
+       "dZCfn88LL7xASkoKGo0Gl8vFxMTEuqVWz4KcGZaRkaHEUuWNxgsXLtDa2hqWL2wrsFqtlJSU8Pzz\n",
+       "z2OxWOjp6VF065HC7/cr8eq33noLh8PxuRc7j8dDe3s77e3tiu794R38Z0EuN5uSkqK8dl9fH9ev\n",
+       "X1+3k8jNzeXw4cMkJSUpn6+rqytiITKr1aps7gIsLi7S2NhIf39/2N/b5XJx7do1cnNzKS4uJi4u\n",
+       "DpvNhsViQZIkiouLHztTNhgMGI1GDAYDBoOBYDDI7OwsN27c4IMPPghrGMdsNlNVVYXdblcyXJub\n",
+       "mx8Iwcjh0ISEBOXYzMwMQ0NDuFyuLV/lb1aHLQH/IEnSP2ypNetErjJ28OBB6urqsFgstLa28v77\n",
+       "79Pd3b0tsTiLxcLLL79MTU2NUmLV4XBw9+5durq6mJmZ2ZHOGlYSQIqKisjIyCAQCDA2NobT6dzW\n",
+       "GLaMEAIhBE6nk9dff52LFy8yMDCA2+3+3PFbXl5W5J5+v1+Z3URHR1NTU8PU1NQzaXJl3XBcXBzR\n",
+       "0dEMDAxw//79DZ1bCQkJyo99bm6OsbExXC5XxCr1yfasdXzbtbLy+Xz09vby61//mqmpKdLS0khN\n",
+       "TSUjI4OioiIsFgsajYbBwcEHVlVy8tuLL76I2WxmaWmJkZERent7GRwcDOsKNhAIMDo6yvz8PHl5\n",
+       "eeTk5JCZmUlqaioWi4WUlBT27dtHRkYGOp0Ov9+PwWAgKSmJwsJCUlJSFEXJVrFZHTZEsKyq0Wgk\n",
+       "Pz+fAwcOUFVVRTAY5ObNm7z99tv09/eHfbNRlqOdO3eOAwcOYDAY8Pv9dHd309DQwNjYWESyBddL\n",
+       "UlISe/bswWKxMDg4qGzobecFRq4fLm/Syrr8jo71NS6SS7CazWZls1GOHxYWFnLr1q1ntlGj0aDT\n",
+       "6RQlhdPpRKPRPHWchBBoNBoSEhJIS0tDr9czPz/P2NhYRJy1XF88NTWV3NxcDAYDgUCA2dlZ7ty5\n",
+       "89jY8VYTDAaZmpri4sWL3Lx5E7vdTkFBAfv27SMQCGA0GpmYmODmzZsPaNu7u7uRJInq6mri4uJY\n",
+       "WFigra2NgYEBJXEqXHg8Hu7du8ehQ4c4ePAgdrudsrIyJicnSU9Pp6SkhL1792KxWJiensbtdmO3\n",
+       "20lNTWX//v0UFxczMzOzvQ77c/gTIcS3Wenv+GeSJM1tkU1PRZYmpaWlEQgE6Ovro7Ozk6GhoW2J\n",
+       "GaelpVFZWUl+fj7x8fH4fD76+/v5+OOP+fDDD7dlhv8sWCwWRVXjcDhoaWnZ9gtMXl4ezz33HGaz\n",
+       "eVNFkEwmE6WlpZSUlJCeno5Op2N5eZn5+XkuXLiwbsf/JCRJwufzMT09zdTUFDabDZvNhtlsfurs\n",
+       "f22SSEVFBSaTienpaQYGBiKyp2E0GsnJyaGqqorKykplprq0tITT6dzW715Wf3k8HkVJ9dZbbyGE\n",
+       "UPIn1m7o6nQ6SkpKkCSJ+fl5Ojs7eeedd2hpaQm7rV6vl97eXhwOB6FQiOTkZF577TVefPFFDAYD\n",
+       "JpMJjUaDw+Hgo48+oq+vj9///d+noKCApKQkamtrGR8f39J0+c067B8A/2X19t8C/wP47pZY9BQ0\n",
+       "Gg3R0dEUFxeTkpKixMba2trWlcjwLMgzleLiYmpra0lOTsZoNOJyuRgZGVEq3e3EJgXw2a53amoq\n",
+       "drsdj8dDb29vRBy23FJtI4kkGo0Gg8FAdHQ0OTk51NXVUVxcTFRUFBqNRsl6HRsbe+aLpqygmJub\n",
+       "Y25ujoyMDA4fPozT6aSjo+ORWZNOp1OaFMTExJCQkEBVVRVxcXHAZ5makVAMyVUOExMTFSWD0+lk\n",
+       "eHh42zXhy8vLD+jsnxbPr6ioUDIje3t7qa+vp7W1lZmZmbDbGgqFmJmZoampifr6eg4fPqyEcQCl\n",
+       "qFt9fT3Xrl1jfn6eAwcOKCuriooKmpublZozWzHOm23Cq+iuhRA/BH71zJasE5PJRHJyMsXFxSQk\n",
+       "JDA1NUV9fT1dXV1hX9LrdDoSExOprKzkhRdeIC4ujlAoxMLCAn19fVtabCocyPZnZWWRlpaG0+mk\n",
+       "t7eXnp6ebbdFDss8rsjTk9DpdNhsNrKysqiqqlLUQbDy45IbHm/Fj0PWC8/OzjI+Pk5eXh61tbWk\n",
+       "pqZy8eLFR+LjcpgsOTkZm81GUlIS2dnZyspBrj0TiWxXrVZLfHw8ZrNZkZqNjY3R2dm5IytIwmeT\n",
+       "o71793L8+HGio6Pp7Oykvr6eycnJbVmpLC8v4/V6uXHjBhqNBpvNRmFhIdHR0SwvL9Pe3s57773H\n",
+       "z3/+c8bHx4mPj+f27dtkZ2eTnp5OXl4eBQUFZGRkMDg4GDmHLYRIk2thA+eA8K9PViksLOTUqVMU\n",
+       "FRXh9/tpb2+nu7t7W664sbGxHD16lJqaGjIzMzEYDIyMjHDjxg3efffdZ16Ghxuz2Ux1dTWFhYWK\n",
+       "7eGWc20lchJKXV2dUsfZaDQqiRj19fW8++67W7qP0dbWxi9/+UsASkpKlLTux62iQqGQIlW0WCwP\n",
+       "dJYZGhri9u3bEWltJpcyzsjIUJxQV1fXhhQv243ZbKa0tJTnn3+e/fv3o9PpcLvdTw1HhQNZLhwV\n",
+       "FaXowL1eL5cuXXog18LlcvHRRx8pq5m8vDwOHDjAyMgI//Iv/7IlF8fPddhCiExWYtRWQCuEcAJ/\n",
+       "BpwWQrwCGIB54MgzW/IU5ALihw4d4uTJkyQmJnLnzh0++eQTJiYmtkXhEBMTw9GjR9m7d69Sq7mz\n",
+       "s5MPPviA1tbWHe/85IQFudrd3Nzcjv3BPkxBQQEHDx7k2LFj1NTUUFhYqNzn9/uZmpri3r17NDY2\n",
+       "4nQ6tywsNT4+zo0bNwiFQpSXl5Ofn09MTMwj1fZkna7X6yU2NpYDBw6Qk5OjrCAWFhaYmJjY9pCI\n",
+       "wWAgMTGRqqoqMjMzFccyNDREd3f3jp1hR0VFUVZWRkFBAXFxcQwPD3P//n2mpqa2PeTo8XgYHR3l\n",
+       "ypUrjIyMkJaWhs/no7Ozk8HBQaUMglzsraWlhZKSErKzs8nNzaWqqorz589vyXn5tBl2ADgjSdId\n",
+       "IUQM0AQ0sqLB/ltJkv77annV77LBan0bQS4cX15eTl1dHbW1tSwtLdHS0sKlS5e2rTtKTEwMhw8f\n",
+       "fqATSmtrK7/5zW9wOp2EQqFHUucfXgKvvV+WtOl0usc+LxAIbG0dgtWQiLykW1xcjIiUby3y55bH\n",
+       "QR4Lg8HwQG/Buro6fud3foe0tDRsNtsDr+H3+xkeHqavr2/LmxksLi7S09NDT08P9fX12O12UlJS\n",
+       "HqgaCSs641u3bik1b77//e9z9uxZrFYr8FnsdrtDImazWdkkT09Px+fz4XQ6GR0djVgbuPUQFRVF\n",
+       "UVERKSkpeDweWltb6ezsZGxs7OlPDgOysODzNOtyGG1oaIg7d+5w9OhRJVnJZrMxNjb2zDkin+uw\n",
+       "JUkaB8ZXby8KITqADLagvOpGMBgMpKenc+7cOSorK5WrW1tbW8TLlsoVBoPB4CNXz+XlZXw+n5IF\n",
+       "JW+ayT92efZTU1OjFJKHlS/e6XRy6dIlZmZmtmxGIdcRTktLUxJPIvUDkC9W8j85McFoNGK1Wjl7\n",
+       "9iwZGRlKQ9aMjAylWuPDddFnZ2f58Y9/TENDQ1htnpubw+fzMTIy8kh7sEAgoIRhgsGg0sJMxmKx\n",
+       "KF1qtnOfIzExUZnpCyHweDy0tbXtaGcNnynBbDYbMzMzShG13cDIyAjXr1+npqaGgwcPkpGRwdGj\n",
+       "R3G73c9c92TdMexVLXYVcIMtKK+6gfdlz549nDhxQskam5qa4pNPPqG9vT3iErri4mLOnTv32Nia\n",
+       "z+fD4XCwuLhIMBjEaDSSkpKizBD1ej0JCQns37//gXinJElKEfS7d+9uiVONjo4mJSWFzMxMLBYL\n",
+       "ExMTjziV7UTOBpO7r8TFxXHmzBlmZmaIj4/nxIkTpKWlKSoS2bE/XH51dnaWzs7ODRXB3yw+n29d\n",
+       "KxI5TrzWMcsz7O1GLj+r1+sJBoPMzc1x8+bNsFS32yoSExPZs2cP2dnZREdHMz4+Tmtr646/yMi4\n",
+       "XC4GBga4evUqKSkpVFVVcfz4cRwOh9KlarPnwroc9mo45OfAv1vt4ajct9nyqut8XzQaDbW1tfzR\n",
+       "H/0ReXl5uN1u+vv7+c1vfvPY2gPbwVqHcerUKU6dOvXYxy0sLHD9+nUcDgderxeLxcK+ffsoKyt7\n",
+       "6uvKnZ+Xlpa2xGHLPfxsNtuGlBnhor+/n8bGRvLz8zGbzVitVn7v937vEYf8cAhBPtHl4z09PRuq\n",
+       "77EdPLx6gJUfcbiaA3wesgxSlj3KNVj6+vq23Zb1kpeXx5EjR0hNTUWn0ynZjXNz25bq8UxIksTC\n",
+       "wgKXL1+mrKyM559/nlOnTtHV1cUnn3zy1LILn8d6ij/pWXHW/yxJklyVL+zlVWFFjiTXP5BrINy9\n",
+       "e5d33nmH/v7+bf+RejweOjo6SExMVLSYn4e8O19QUEAoFEKv1xMfHw981vJKdjyjo6MMDQ0pTYcX\n",
+       "FhZoamovsU04AAAJfUlEQVRicnJrhjY9PZ29e/diNpuZn5+nv7+frq6usDQsXQ9yGv9LL71EfHz8\n",
+       "ui4ioVAIt9utdNm+ePEibW1t9PX1hT3rbSPI3+va7zdSzStSUlKUlntut1uZBOxk+enapKq1Dax1\n",
+       "Oh0Gg4GoqCi8Xm9EmkWvFzmt/ZNPPsFut3Ps2DHKy8s5e/Ysv/jFLza9Af00lYgAfgS0S5L0P9fc\n",
+       "FfbyqnK9kMOHD1NWVobRaGRgYIDGxkauXr26bVrMtbhcLi5dugSsCPof3ig0Go1ER0djtVrR6XRK\n",
+       "k4D5+XmlVsfY2Bj3799XwhHyEtvhcDAwMMD4+Dh+v1+JlT6rQ5VXKRkZGZSWlmIymZiYmKCrq4vR\n",
+       "0dGIhkQ6Ojq4fv06brdbqS3x8GbeWnw+H93d3XR0dHDnzh3Onz+vdKHZSQ5IHnP5/IjEZqNsh6y7\n",
+       "NxgMzM3Nsbi4qPRI3alYrVbF5oWFBZxOJ4mJidhsNpKTk5Ekid7e3h29SpDzM1paWkhNTaW0tJSs\n",
+       "rCyOHj1KY2Mjc3Nzm7rgPG2GfQT4FnBPCPHp6rG/ZBvKq5rNZgoKCvje977H/v37mZ+f57333lPq\n",
+       "JEciHjgzM8NPfvIT+vr6eP7555U6GDJyd4rnnntOyXADGBwc5Pbt28qGk8vl4ubNmwwODir68YdV\n",
+       "BPLs7Fk/p1z83W63K1mBDoeDe/fusbS0FLGZn9vtpq+vj5/+9Kfs37+f5557jjNnzjxQ9exhFhYW\n",
+       "+Oijj3jvvfe4deuWUlN6J7Veg8/GXO6gEwgEtt1ByuVrY2NjSUxMRKfTKR2agsHgji1M9jCSJBEV\n",
+       "FUVVVRV79+6lsrISh8PBm2++uaMdtszQ0BA3btzglVdeobKyksrKSjIzM3E4HGFx2EPAx0AyKxX6\n",
+       "/rckSR9sR3nVnJwcnn/+eaV32/j4OL29vYyPj0dsdiBrLdva2pienn4gRgkrO9txcXGPNOucmZlh\n",
+       "ampK6TIi64ZdLlfYdbBrW24tLCzQ0dHBtWvX+OSTTyIa95XrX3d3dzM7O0t3dze9vb2Ul5dTVFRE\n",
+       "dnY2c3Nz9Pf3097ejtvtxuVycePGjW1t/7YZzGYz+/fvJy0tjcnJSW7evBmRpCpJkpibm2N0dJS0\n",
+       "tDSWlpa2VHW0HWRnZysJK4uLiwwODvLxxx/vGsWI1+tlYGCAH/7wh3zrW9+iurqaY8eOMTMzs6lw\n",
+       "53p02H+6VocthPiQMJZXlTuRyFlOSUlJSjnIqampbal1/TQ+rxflTiQUCjE4OMiVK1eUrjzd3d0R\n",
+       "DyMEg0Gmp6eZnp5maGiI0dFRenp62LdvH4WFhUxPT9PR0UFzczOLi4sEAgGmpqZ2/OaTrHdfWFjg\n",
+       "3r17vP/++3R1dW2rDfLKY3h4mObmZmw2G+Pj40xOTu54hz09PU1fXx9msxmtVqusVuSs4oaGhl2j\n",
+       "GJHP8YsXL1JWVkZVVRUHDx6kra2N5uZmfD7fhlY7m9VhQ5jKqxoMBnJzczl06BBHjhxRMgpVNofc\n",
+       "HaO+vp6GhgZFG75VTWq3CjldemBggF//+tdK9b1AIPBAAlEoFNrxy3k5kaepqYmPP/6Yu3fvRuQi\n",
+       "s7y8TGtrK8FgEK1Wq9i1k1cnAHfu3OGNN97A7/fj9XppbW3l8uXLjIyMMD8/z9LS0o6/6KxF7lE6\n",
+       "ODjI8PAw+fn5SlLQ6Ojohr6Pzeiwr7MS2w5LeVW5SajdbicuLg6tVqvsbns8nl31Re0kPB7Pjq7R\n",
+       "Lcd6Iz3r3wpmZ2d54403WFxc5P79+8zNzUXsc7lcLrq7u3nrrbdYXl5mdnZ2R6srYKUcQENDA9PT\n",
+       "08rKemhoSHHUO/2C/TiWl5e5desWFouF73znO5hMJmw224bFExvRYb/Nig57UQgRtvKqOp2O5ORk\n",
+       "pTGr3Gm6qamJqampHT87UFFZqyaKNPJ+SaTakm0Gl8uFy+XaFZuKG6Grqwufz0deXh7Dw8ObWuFu\n",
+       "RIf9f2Ud9naVV5XbV73zzju8/vrrjI6O7uhZooqKisqTCAQCDA0N8Xd/93cEAoEHZL3rRXyel1/V\n",
+       "Yf8EmJEk6U/XHFfKqwoh/hQ4KEnSNx967qYCpGazmaKiIkpKSrDb7czPz/Ppp59y9+5dpXefioqK\n",
+       "yr9mJEl67B7h0xz2C8AV4B4ryhCA/wT8NlC5emwA+KM1tUXk56qeVUVFRWUTbMphPwuqw1ZRUVHZ\n",
+       "HNvusFVUVFRUthbN0x+ioqKiorITUB22ioqKyi4hbA5bCPGSEKJTCNGz2kZMZRUhxKAQ4p4Q4lMh\n",
+       "xM3VY1YhxIdCiG4hxAUhRHyk7YwEQogfCyEmhBAta449cWyEEH+5eo51CiFOR8bqyPCEsfobIcTI\n",
+       "6rn1qRDi7Jr7vpBjJYTIFEJcEkK0CSFahRD/dvX47juvHq7duxX/AC3QC+QAeuAOUBKO99qN/1hR\n",
+       "1lgfOvbfgf+wevsvgL+PtJ0RGptaVjJqW542NkDp6rmlXz3XegFNpD9DhMfqr4F//5jHfmHHCkgF\n",
+       "KldvxwBdQMluPK/CNcOuAXolSRqUJCkAvAF8OUzvtVt5eBf4S6xo3ln9/yvba87OQJKkq4DzocNP\n",
+       "GpsvAz+TJCkgSdIgKz+smu2wcyfwhLGCx9f5+cKOlSRJ45Ik3Vm9vQis7U27q86rcDnsDGB4zd8j\n",
+       "fFY0SmVFv35RCHFbCPEHq8e2rU/mLuRJY5POyrklo55nK/yJEOKuEOJHa5b56lix7t60O3aswuWw\n",
+       "Va3g53NEkqQq4Czwb4QQtWvvlFbWZeoYPoZ1jM0Xfdx+AOSyktg2xkqdnyfxhRqrh3vTrr1vt5xX\n",
+       "4XLYDiBzzd+ZPHjF+kIjrab1S5I0BbzDynJrQgiRCiup/4SpT+Yu5Ulj8/B5Zl899oVFkqRJaRXg\n",
+       "h3y2lP9Cj9Xn9aZdvX9XnFfhcti3gQIhRI4QwgB8nZU+kF94hBBmIUTs6u1o4DTQwmd9MiFMfTJ3\n",
+       "MU8am3eBbwghDEKIXKAAuBkB+3YMq45H5hwr5xZ8gcdqHb1pYZecV+uuh70RJEkKCiH+GDjPimLk\n",
+       "R5IkbX+PpJ1JCvDOamsxHfD/JEm6IIS4TZj7ZO4GhBA/A44BNiHEMPBXPKGHqCRJ7UKIN4F2IAh8\n",
+       "f3Vm+YXgMWP110CdEOKBOj/whR+rDfWm3cljpaamq6ioqOwS1ExHFRUVlV2C6rBVVFRUdgmqw1ZR\n",
+       "UVHZJagOW0VFRWWXoDpsFRUVlV2C6rBVVFRUdgmqw1ZRUVHZJagOW0VFRWWX8P8BCxPUWfGXxrcA\n",
+       "AAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f7939901490>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "imshow(solver.test_nets[0].blobs['data'].data[:8, 0].transpose(1, 0, 2).reshape(28, 8*28), cmap='gray')\n",
+    "print solver.test_nets[0].blobs['label'].data[:8]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Both train and test nets seem to be loading data, and to have correct labels.\n",
+    "\n",
+    "Let's take one step of (minibatch) SGD and see what happens."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 13,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "solver.step(1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Do we have gradients propagating through our filters? Let's see the updates to the first layer, shown here as a $4 \\times 5$ grid of $5 \\times 5$ filters."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "<matplotlib.image.AxesImage at 0x7f79383819d0>"
+      ]
+     },
+     "execution_count": 14,
+     "metadata": {},
+     "output_type": "execute_result"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAATQAAAD7CAYAAADkSGhKAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJztvV+obt113jfWOfvYcmSLEtvfJ/FZqnSR4siWsS8sG9Ii\n",
+       "XZSgEEiam8QCU18kJZg2LaUXcS6cpO1Fm4KMIYFQ6j84dew0UOy6hqRxikuNLxwLkkpuJepgCUup\n",
+       "8snQmqb6952zz+rFd8b5nv3s5xljzPfde7/7HL8DFnOuudaaa84xx/yNMdda797bvu9xlrOc5Swv\n",
+       "gzw4dQPOcpaznOWm5Ay0s5zlLC+NnIF2lrOc5aWRM9DOcpazvDRyBtpZznKWl0bOQDvLWc7y0sjF\n",
+       "bVW8bdv5e5CznOUstyL7vm+q/GCgbdv2kYj48Yh4GBE/se/73+BzfviHf/jadR//+Mfj+7//++Ph\n",
+       "w4fx4MGD5xvvP3jwIPZ9j6dPnz7fLi8vy/19369trnzbtufbs/7IsocPH8Y73vGO+KZv+qZ4xzve\n",
+       "cS2P28c+9rH40R/90Xjy5Ek8efIkLi8vn+fV9vjx43jjjTfijTfesPncf/z4saxX3ePp06dyzNw3\n",
+       "h5eXl891qdLMf+pTn4pv//Zvj4cPHz7fcuxUnnWJqSqrUszv+y77rvTy9OnTeNvb3hbf8A3fcCXN\n",
+       "Dfd/+Zd/OX7gB37gSv9U37iP2Ncun7ZdlUVEaTPYv8ePH8dXv/rV0fbrv/7r8b3f+73ynpzPfaV/\n",
+       "tY/zLeccpi4/SVl+9md/VtpxxIFLzm3bHkbE34qIj0TE+yPio9u2/dFD6jrLWc5ylpuSQ5+hfTAi\n",
+       "/vm+75/d9/1xRPy9iPjTN9es0wtGEqe4/ixvyn3R4221477072WRQ4H2WkR8DvY//6ysv/C10Wkn\n",
+       "l0N+EvbhD3/4qOvvu3zzN3/znd/zLvX4/ve//87bcZf9e9e73nVn9zqVHPoMbTQKv/mbv/k8/9pr\n",
+       "r8Vrr70W3/Zt32bX46rMHVPb88aBkeT6Xh3LfOclndHxc7kPfehDcXl5+fx89exudcN6qj5gf524\n",
+       "Y/m8EuvjNCLiW7/1W+WzTrdVz9CqZ2qT49wv3B48eBBPnz59/jxKPetS+9/5nd8p78E65vF5+vSp\n",
+       "fa7EeXyGi2m2N4/nczLe8PkxP0PG58VKRxlMTOwd5wbPE3WMr7sJ28/t9ddfj9dff71sc8qhQPsX\n",
+       "EfFu2H93vBmlXZHv+77vu3ahMyYFNr5m8iAfjYSFJ2llvG7QHWxue8N7YTtZt1Xbqz7nZEoIOLBV\n",
+       "LwEQdg8fPjzohUDXVjxWbQm16kE8nqvsUekJ9YEgUxDDLYGVKeY5jYhrLzgc3BBq2Sa2G6VTp3c1\n",
+       "f6p9zN+W7b/yyivxyiuvPL//Jz/5SWsXhwLt4xHxR7Zte29E/F8R8eci4qN8Ug4OSgWzLmKbXMOe\n",
+       "byI8WJPzOqix53RvWw8F20S4L25f1esmA4KLAcZl0+isixi6PlYgw3Z3byUrO0S9KFgwyLBtESHB\n",
+       "5aAWEVdApsCm3vJPYaYk26/yeW0HtdsC24ocBLR9359s2/YfRMT/FG9+tvGT+75/is9TCpzArIvC\n",
+       "VDmf76CmlDSBmIuSjtk6yLn7c9u4LyoSqvIclSnJ8xlcKkKbfLahUjVWbr+yE1y6Zf8QampZrJbJ\n",
+       "fG9nA5O+uWgMoYqfKyXIXHTmlqLOjjpQc18UyKpjkzlxqFNfkYO/Q9v3/R9ExD+ozlGKW424Vq5n\n",
+       "L8mTf1U5lRwCtUnE1tXJ969kNTJygESZRmjVktOl3D+eMOq8vN5tCl6TY84OcSzZzpzDyP1uqYlL\n",
+       "TvzWDCM1BTG35HRtqco7qPE+j9HUllfgtiK39kuBiHWguTpWtk4BEyVVHozTQ8B2qJdaGWQXHVVQ\n",
+       "q+plCLhnZ8e8FMA+ooNSoKsAxrrrXlrgtZU9Th2Ls3sXlXEacX3JqZabCmwVDCbzDfuEYJtAberU\n",
+       "V8G2IrcKNPcMzcFsUrayHSqsRGXANwWqm/JUCh6qvAKdGi8Wt7xU0Zq6N98XU+6vgpnqM8MM9zGS\n",
+       "cstMBTjWI7aBHdtUJtFZPkNzMOuWnO7r+tU5oeBVBQydAz7WmU/lXkVoCkbHgEwNovNcnVSR0k0N\n",
+       "nKq3q1/pFyd15xyUdBHQBGZuyelg5vrpxqKzkYTb5HkZX+fsBvWrjnGer5vALO/LwJq8FJjYSGXr\n",
+       "DDHVb+w/5vleq0582v5O7hxo7vW4846ovCnM3KBVE6S6rqrjNmBWwW3aJ5aJA6jqZBhUMJsuOTHv\n",
+       "DF+1qwI0w4yhVi01uyWn0o1yPiqN8BEatyEiSpgpqCEUqiiNdY/9YOeCY4N5B7W87i7s3slJIrTq\n",
+       "IWyew/kVsLl7R1z3JDyIU6jd9DZ94znxWl2ExbrCpWZGElxXljmAVUvODmYpqANeguHY8Pg5mOW5\n",
+       "k6WmisxclMZ2MMlzhMYQc0vOlaVm9emG0nflzCpoueM3bfMTW2e5N8/QnPfgc9I4pjBMqSKcCdQq\n",
+       "uEwGTj0MPeQ1dtefSkfOkbh6EWRYxxRqHcgwj5O+Woopx8b7+CyN295Fa6rNbAecThxR1uUghsci\n",
+       "Qj7878DWwaBz9txPBS0HukOAtmrzEznZklN5R+XV1eSsOjmBDr4ez/O4DlVfVSdCyr1Od57Vedru\n",
+       "tXw1eRhMrgzLGYY5XnjO6k+f8F6V00mY8RhPdK9sQPVT1eOuw7JqYq9MSuwj2zpHiNUbTXeMwTbR\n",
+       "heo39o9tAh2F0+kK5LFtqKeUyYsqlDuP0BzEVJqD7wxtAhdXhlCrAOfqruplWKnf4HXe9ljIsa5Y\n",
+       "GB58HRtVGnIHsO6NoUsnTmqiD35+NHFubCsuCmG9ZL3TTw9cRKkcNtpAl/IvCRhqeD+lG7fPbXKB\n",
+       "htNvJS7YWa2H5eRA66DGiovQwHFG5cCGIb+qM+L65wTd2xmGmvsx8QrUHORcm7gvEdeBjcITHoWN\n",
+       "rgOaghrXUy3pFCy68T1mYz3i8rrSgWpf5/RU/ytdKPup7Mp9aOvu38EM552DMJ7H+lD7TpSO7yXQ\n",
+       "Hj58eK1MhdkKbmqQWSbwcp5YPadhuE0nRBWhsUedAmy6LO08Wu5jRFYt4RHmaNRZPl1qpi4dHCpo\n",
+       "VGMw1cGhY9dFZBXQlON0k7KqNyLseHd21cHM2YdKlZ0o+2F9cB7r7XSQ17mVWScnfSlQPUeoPFlE\n",
+       "/axsYux5XZdOJoQD5hRqFbC6pVXVv07QuypR0Fl5fuackjNkF4lXEFIgUfZRjSfqlPvMunRAq9qT\n",
+       "kZ/Sv5NVW3BQw3ZX0ZnSUQUv1ZdqjrHgmKtjnX6cnPwtp3rTlGUcVSiZeGIFAhWduQkwuY+D2+pS\n",
+       "c/J6nvu2CjQ0VjaqLl8tMd1LAR5/JdPxZafhIOLGbnJ91SYFNFePahNeiymXdY6ycppVhOb0qjYX\n",
+       "0TvQdVBzzgFtsILmRE4OtApsx4CsA0+2D2GGiud1/ARgDqCHwqyKzNzErICmQOaiIxcld8/NcMO6\n",
+       "DpFO5xXIOkfk9IhtdnrEiagA1rWN24X7XK9bCUzsz7Vf9U/pppqr3XipPlVtwfQYuTdAm3qBiPot\n",
+       "5MpSjWHm4OYmg8pPlwfuj/dV0d10srCOWNAjYh7HyI1Xt8ScLlGqNrkxVhP4EIBUY5d9rvSH51Tg\n",
+       "6YCb96vOm0R/XR7H0elW6dLBDI/x1wErtjhZGazKvQYaDgamKQpkzniUIVUwqwbdTQYFtekLgOrF\n",
+       "AL6Kny5vWBIYnOI4OZCpt8/T52h4/0rUGLuJNoFFBzPnhCYw4/ZNoqlDgFSVTTfWsbMN1e60DQUz\n",
+       "9cnTpA1uvLt0KicFGu9XXr6C2XRz0FMww4lfgayadB3YJsvNyacaaqsmpoIbluNYqU8xpkDjccf7\n",
+       "O7tQbVXwmU74ia0w0KYyhZkrOxTSDhjYX6VnFSxUulBjo8CGkeYEZlgvt0vtr8jJgIb5CeQq6YxU\n",
+       "AYHbeQgg3X0YRu67IRedqfwq0JyeeKmZ56IOeBxWnpspoKmJ1Rm7G9cVHUwmrapnKiuwUvesbEe1\n",
+       "Hfc5X0nlLFjXDmjdJ06uPZ1upwHNVE4CtEw7Mk865wxXyarBqms7kDlgVf9BvXqexs/QViYww8vl\n",
+       "nV4qXXUGx8BU9anJUAGrglA3caZOaSrcTgeqzlbcMbwH6sqlHFljXp3jYI/92rbrP6jHeniedrDF\n",
+       "VYHa3LPYFbkXEZrLV52vrnMKcddNRU0AhNXjx4+v5R8/fnwl78o4z5ubxNguZ0ScsoGzEU/Amefn\n",
+       "OCMM9n23ERpPxMyzM6gi7Omm7tk5JdVmtgFu9yEwmwJtokMe08qBrW5qecm2xPNctZXnmoOXKl+R\n",
+       "kwOtK5sAjPPunghKd6zzEGikKgJjWHEetw5iHLXl/SdA67wp9mcFYggDBJmLEKYRxuqnK67fPFaq\n",
+       "Hdl+TlPHXR0pFbg6mE2A5trBZQyYDnArDgGXlwkzF3hMZAIw9YH9VO78p08Ra284qk8C+PzMT6O6\n",
+       "CcBSlFdXMFsBWwcyhhpPSsyryKcyPjZ+7J8DXNbPz1IwTY/NIFAg4zpX/yprpwu8Px9nmFURWgVj\n",
+       "F0l2MOugtiKd02JdKJgrvaQgyDhSU/PGzUtMeV7z29R7CTT3Ad6kw5nvILYCJXWdO0fVxYPuoKbA\n",
+       "5iK0CmZYNxpZN6G5D2ofverKUjPFPVvh8ZnCRy05Fcxw8qk6caxQXH+qpV6XHgqxDmoqGkPbVGXs\n",
+       "oFz0VI2tchhKXKSm5lQXSDiQ8c/npnKylwKcV8eUF1gB1+Saab0TmOFWgUwBjSMyPqYmMrYLy1CX\n",
+       "Ez1MQKaMXD1byUlUfaCs7slAq56jcSRVgafrZ7WUneQPhdgUaG6+KMfPeew/wm51rBGQHaCcjU1A\n",
+       "9sIDrdvvvkCfKFRtaoAq0KUwzBhqHbTcMYzIHNxWo4eq/+ohLkZpLu+8tnI8DlwOaqjLDgBVRKba\n",
+       "WIFI7XPbVD+y35Nt3/eyXw5oaOddqkDmwLYCNAVFBU+EEo8Jn18BTB1bkZM8Q2NxjZ7Qf7XDXG91\n",
+       "P5YqonDLxy5CU8tLlTqgubIKYgig1W/w8j5Tx3EI0FSk5q5nHTiduHYo+FQg665fjcbUMYSIA0jn\n",
+       "oFEPvL+yMcxc2/b9rbedCSN1bgcwBbkVOckztIlsm/5zxXnM7U+2vEZdy23AAcu0W3JOlpsINIYZ\n",
+       "56dAw7x6sKrgtmrgPEGyjmpCMQAqMEzecCJwuB2VMKRwCauAdtMwWwXa1M67OcA6ONSB5b6bQykM\n",
+       "MxT+MHuSrsi9BVrEfACrc13koOpwZSnKkNWS00Vq6sVB9fMnfsPZQYzbqhyB+65oxcDxHipKyHtV\n",
+       "IFBl6mNihkCe2wmO8QqUOvDy8ckS+SaAxtFNRBy0LFsZE3RgrM/Mp+DvgdGmUiYR2ksHNJ4wHJ11\n",
+       "UJvKFGQcoVUwu7i4KCM0VV7BTC27Kl3xfi4l0Sgy5Z+tYN86AOD92BM7oPFkUeCYTHo8/1CpIiv1\n",
+       "a4yu7SvAWgFa9X0W6r77rAkByGNd9Q/tTcEsBecH2oAaoynMXjigTQ3SAcdBbBqhYd0roFQRxZMn\n",
+       "T+Lhw4fx+PHjePDgwfiTDQc0XnbhBKiEveHTp/pbsWw7H0sjzHxXxjpU6aFAm0RNE5vhMZtEam5y\n",
+       "V0A7BmLqOEZGDDJ+xpRjUn0ixY4IHRnbztTGuO4KPAgqF4m9EEDrFKWOV8pXkcKKsPeYRGp4DEHG\n",
+       "S8MciDfeeOP5luDCMizPt5fdUgs/ql0d4Gw3wom/H6vq5TFQ11T1qKVMBQ4Hs2MnXQWxClxV2TEQ\n",
+       "c33k/nIUrKRy7rzPkVtCMu+hytR9+J4PHrz5v1gfPnwYFxcXz/NuYzC/EC8FlLDBVfsMs+o61/EJ\n",
+       "tKoyvB9HZwyzbduuRGAMMC5Ho1cTgJ8dOY/YQcbpNSLKyI/1Xt1blVUgcWVVVHaMQ+tA5e5bpTe1\n",
+       "VUBjZ1JBJveVXWPewS3vg/kuCs88Ao03BTiGVpVfkZNEaKtQc2DrDLsbbCyrBh7vx8BJoOV5CC21\n",
+       "zMTy7lkRl7n2d0bOusrobMVgOGpQ9+K8m6hTsB0LMWz3JEpzQLsNqKk2OKChg5hGasqe+VgHNleH\n",
+       "KptGaBcXF+XPnl5IoKnjCmrKU03qjfCDy2VdpKaijYSZUvp0uclA6yYRGpjqC/eZdYX9wGPumipq\n",
+       "cPdXxzqgOMCtRmbumAKr07OK2iqorQLM7bs+c9tXYMb5FYBheefo8fxJZMYR2mRbkXu15FSRmYOa\n",
+       "qy+iHtyp1+Jr814YnfH5+76Pl5u55JxM6FwWKmNkQ3dGoPSeE8bJFGiqTIHE1TnZ8FrVn6q/Dp4d\n",
+       "2Lr8KtwmTivHuoKYGnPOVxFVBTY1rhVo8Pru2Vker76RPAZmEfdgyenyuK8MuTPoiNmzpUmkhpOT\n",
+       "l5w8cVcjtJUJrX6yhFByBlDBbNu255MIj+/71WcpHCFNoMbXKqipcyqQqf64sq7uCkYduDqQqeMM\n",
+       "NJVOdOLG2QFMlU3h5o6rsml0lr8gcgHGCwc0PlblV71zNdG6iEyV4zGcBJeXl7J8JUJzRox57isa\n",
+       "3ARk3H7Oqz4yyPiH5pWusbzrm+rnJM997Mo6eHUAmoBsAkcHMF5qKr04UcDidBINuZ/GOYjxfvdW\n",
+       "EwGnvo3s0qmcdMmp8gpoE/ixrCqqC3XR+Bzknj59KkHmIrQ05E4XCCb+pgwjLewHtq0SvJbhhXmM\n",
+       "BJ2uXf3cFuWcDk3VvVzfqm0FdIfATcGLQcb2sAo3FmXzqROGU9q1is6qH5FnfvWzDdfGKj+Re7Pk\n",
+       "rKDVgc3JKsRcpMZtwSVa7j948CAuLy9biOExXupVesJ28A/KJ4OuJnulJwZZBTQUFREeAqmqvavH\n",
+       "ViG2GslNojkFsC5Cw/Z3usZ9BQQXpamfwvELgcnPk6bRWUZoVfu7skpOFqF1EQmfg+d2E8sNKKcq\n",
+       "GnOQY5ghyHBg3ScbCnAc7bk+cLsd5J10+uW6+ct0TA+RQwBW1ePKujxDBH85cejG17t9By+1cVud\n",
+       "zlIqgKk8RmlZpn5VwhEY5tX+ymcbtyV3HqFNjFoN6qEgw3w1yG5TbU+obdv1P0LplpvqmzQE2tS7\n",
+       "5jXK2PE6F8WoiYJeWkEN00OkGmPX/kPrVfWpiEj9DOymwDaNzCqgOV05qWy/cuoRIWGG0OOfLSmo\n",
+       "HRKhVeN5qNwq0L70pS+151SdcH99Ao0l4uqEZ2VxaJ3XKQ/jvJBqM0MlRf23Jn6OoiKjLnXLYqXH\n",
+       "DhAMPAdz9Tarin65zEG1a+9UpqDMCTVJO3Dx5zpoV1V+FWg58RMWmHK+WgqqsePnrxiBo506mLly\n",
+       "hhbm1acaK9H1VG4VaF/+8pfbc6olVxoQp+rP6TDM2ODQa0bEtYGp8lXEyNGO+rNADLVKD86jdtFj\n",
+       "3r/Ts4q2qgi1i16rMteubt9JFXVWZROYVUBjiPFxBTIVpWW7uk0BQwEsyxlgFdRwzNl2E2y5r5x7\n",
+       "BTcGL8MM24NAdWN3iKO78whtJSrpHsRineh9WGHoIRXQOOUyZ5S8pHj69Kn9F3RuWaHAovIuWmRx\n",
+       "dasxYH1PwaUmiosIJvByhtudW00ABloFMQc0BJkr5+Ua2iqXdY4R210t8xRUHNQqZ+PAgSuIauXS\n",
+       "tYshzHbsxpf1tCJ3HqFNPDt7dw6F1dJNhfWoOL5GeQ0HOfx7ZPn9GUM2z1H/3ISXnE4nuK90445N\n",
+       "REEM61P6V/eqwM9lLFNwTfenaQUwzjO0JoBjqKmILe0z2+VgxkBz4GB9Tx2Nswm3X61epu1T8HXj\n",
+       "6XQzlZNEaM6bV5O1C0dxEk6WIhPPxlGRgmRCzP3pbAc11f5phOTghP2roKXKXF6VrUQOE4N0kVWX\n",
+       "n6buzVsFNAcz3HfgctGZm7hcFuH/VLVzuh3I3LiijXBZBbMu78pyq5zWvQWaitCqyIiVkVJ5EjVQ\n",
+       "Ef36WylZ7WddvGTNPINM/eOTLkLr2lkZJQoC1+lnauQuVUsK97BatTHb2ZWvQqtKO5jxkhPhxVEZ\n",
+       "76edVFDj1UO2qwOaA1cHs25jW6jsZAKqSXCA+2hPVfSN820qJwXaJJydDE6E9jDVfmcQqHylXFxy\n",
+       "JsCq/xHgorNDDHIVangf1WenP3Us6+A3WWp/VVYgtXJs+gV7Ao3hxSBzz9AwGlPR2QRoPF4VGKbA\n",
+       "Wt0qeFYwW7Hdauxwvt1ZhLZt22cj4v+NiMuIeLzv+wf5HPcMbbpkcWtyzEf4/3CUaZXvjCTirf+/\n",
+       "iGWpdP4nKfw2Vr3pQl0o/UyN1QlDrTJUrovr5WMcoVVRT9VG1+5MO1CtQK77GQ63OSFWgY3HNdMu\n",
+       "Opv251AY4bVcj7KFLgJ056nU3Zvz2E+1r1ZEUzkmQtsj4sP7vv/f7gT1DK1aorhjqmzb/D8Mvskt\n",
+       "4q3v4ThqS6PG/yGALwh4q54LVLB1Hu7KYJB3R91wnWyo3A4lWA8DzX0Kwc6lk27CHwq3FaDheE3A\n",
+       "tu+7hZh7wznpQwWqCcB4zNT4sYOqgodJymOt8mre8vgjzO56yVlaqltyqg/vXBl7/uc3JmV0zxg4\n",
+       "j3VUhhMRzyMwhFkqnCM09akJ568psfGsh8CNjYlhhgbr2qOE68Ex4m217mpZdgjcViK07Au/BOCU\n",
+       "wYaRmYrWpkDjdAosBa8uzU1921Z9alHBrHOE03Jebt7ZkjPejND+8bZtlxHxX+/7/t/wCSpCmxgV\n",
+       "T5KnT5/GxcXFFe+VxpL7rOTJUrWCWObRw3OEhlBLoPFgqH2UyuAmEKsAgefwhjp6PqAD41HLTQbZ\n",
+       "o0ePngNtxaAZYBXcppFcRFj7qoCWwOIUAacisgps2KYKaEpHU2BNrlMQU4FFBzAX5au+VI4cz1FR\n",
+       "2oocA7Q/tu/7F7Zt+9aI+JVt2z697/uv4Qmvv/768/zb3/72+MZv/MayQjdxXcd5Hw0jFZFGh/Vw\n",
+       "yOsgURmaiz44Oso2cPv4+d/EG1bQ43LlbZ0XRj06yWNVvS76m0ZqzohduzrgscOrdJfH8JGBeh7K\n",
+       "/4kLnZXLO9BinlNul7KrCmaurAJhJXl/1fYEPp+/ImosU4e/8zu/E5/5zGdG9RwMtH3fv/As/b1t\n",
+       "234hIj4YEVeA9s53vvPadZMIhCe0A4iieQIMoZRAQcNw9+BjeC8lCj7VRMMy1YYuP9VhB7Ep0LhM\n",
+       "1ePg6yaN2uflVtUGB68KalX/8FGGAlj18zsFLhWh8/0rqKVOKsdZ5StoVePCbWGA4YsxnGcTOFbz\n",
+       "V5Xh9r73vS/e9773PT/nV3/1V+19DgLatm1/KCIe7vv+r7Zte3tE/PGI+E8nnViZiJU3VQqICKlk\n",
+       "hAeDJPfVM7ZqSVD1QXljhpmqg/vsdFBB2AGt2lCXSr8oVTQ2GbOVfWfsPO7TJf6kf+qfPTvIVdGY\n",
+       "+1xj2g43rk46mDmo8TFsSxVlOZi5+6nIE+/ntrtacr4aEb/wrHEXEfF3933/R3zSBGhqYkwGkQ0E\n",
+       "FazglRt+DIkwy7bxc5FKcPC4zs4bp6xCq4soV4HmjFm1NaMZfOayMn4VvHBs1L2xDQoc1fdfXX9Q\n",
+       "HMjUfgWvalJWdsU26xyO0muVKtBU4qDL46r2+Z6qzaruLsKeyEFA2/f9MxHx3d15FdDcBJhMEmiH\n",
+       "7Xin/LwGl4j4XAshWSnWTeDOaN21K9Cq9qsoircJfDNfLWPduCp7UB5cOSIVLVRQU9+Huf4wzPIa\n",
+       "92E0l02iMo4qJ4Jjmc6V7SZ1xPqsIOZsinVQzSccG3UfngPcBlVvNbZ3ArSpVEBzIKsmtRI2FqVE\n",
+       "pXRUGHpBFFde9QEn/kQ/q5vSkSurQIZlHcSwzEVnDq7Y10meQYZSAUR999cBjet++PChhZfaqqhi\n",
+       "MjGdTaO94vgwxJwuXZm7H+pAAY6PV3Osm7MINgUyzL8wQFNLI7XfQU15QJwMbuJgVMYKY9jhvuqL\n",
+       "Ao3qu9s/FGiTdAI39pwV0HLSrzxHU/1VZajviRPrQIYb90flM1qbgOwQoCmpJjw7RLbZzkEoJ455\n",
+       "NZewTIEM83gc26ycmesr30s5rLv+sLYUN2Au2qgiNJap51XXPn36NB4+fFh6TQZbdV4VkUy8pjM4\n",
+       "9tQVsBzEurwDmgObWm5W41f1nY/lpHWT1XlxBhj/UkP1A5eb2FcFLvcb3SnIOghVOppGK3y9A1jV\n",
+       "Bm5rBTV3/xx7XCpnXdU8XnUGTu4caO65mQJb95BZGc5Ku9jLbNv1v9nfwbKC0mq+259EXVPYTYHG\n",
+       "abXkVOOJ+u4mlZt4KMqLI9QQZvh37NT4ZVnCLeupIIbHOoBxeQV2LktbzP2JjfP8cDBzumW9TMpc\n",
+       "fxBq2PYKopi/l0BTz5GU0TuwqcnB4jpe7btBzy0N24HN9akD3LHw656LTeClUuUUVIoRWrV1E0ml\n",
+       "GBmoiceOpYIaf2bBdTDE8Pg0Qnvy5EkLMixj26tS1kFli2pucB2uXqXjvA87OhQ1t/I6nPNoWx3I\n",
+       "lP7uHdBUJxzEXORReW3ltSYT08FRgW1yHrY7wn+K0YEN61ZlHUQcWDqoqcnnyrpnaJP+qVTplaVa\n",
+       "ljiYXV5e/Q/3lb3kSwEFMoZaBzSlu+l483IdYebE6a0aEyUIp6nesO2ZX4FTN64rcnKgTYxfiVIu\n",
+       "5qdlqq2ubTyB+a804J+fUdA+dLKr+08Ax/d2efTKnXOofhfJZazTKsWoeDoBV7y3gg7+RjPb4p7D\n",
+       "uTeonQPobM5FQCoqUzpS109tudvPtjj7wD5MYKl0wHXxvOjqYzn5W05u9BRiKPxbsgpWqg0TQKT3\n",
+       "Tnjhj+bZuA+BGebVsSoKc2VVO7Cdla5Ylwytat85JtVnfN7Ck1a11e1jf3MJpPrpAMew4n0VcSiH\n",
+       "qfpd6ZihUYGMv9LHMcf6WXds652jc/rivma7J6ssrk+NXdaBLxamcufP0LrlWMqEzKvrazYANcAV\n",
+       "3PjvZeVfAEGD7/rW9dfpQBmKg5oyjkrX2K5OJiDjv0yyMrH597grYGOYdTamljYuCnNLJxchoXNT\n",
+       "yzIWFyFzJMlQS0EAZF14z87WVar66MDW2ZsT187pr3VYTr7kVGVOASudq+BYwWwCMt4uLi5k/ROY\n",
+       "qba5fAU6yo3XAAAgAElEQVQy1Z/pxvfr2uBAxsdU/ypBmGXq2sXlasOIRbUDgZH3q57nrICtA5lr\n",
+       "j2obAoxBhpIAUG1BnTi75zLXLpXm/Sdz2UWy3E7Vl05O9pZzQm8U9DrK+7FXwn28d6bTQa3g5pYe\n",
+       "NwEz1eZJe9VE7nTO91D5TKfRGT9Dmximi8y6Nqu+uiiNJduFUKuelzmgKVmBObfJLTu5XrZ57hfq\n",
+       "poroVZ7rcstNvk/laJzOOqc0lZN8hzZddqYwzDrFqH2nrGqgFch4iclAq6DgYNYZfh6fLhU6oCp9\n",
+       "OB0pnU2WndWyRe1n/3Lj50RKV127qwhN3b+ClxvzmxAX3SmQ4T5fg9dxOT9mcUDDjdvWAS3vh/fF\n",
+       "Midu7A7R8b36Dg3PUeI6WEVjeE83uJ0HY6jlrwzUAOP9sB0OdFPp2q10Wt1X6UuNCZdNQIZLThwj\n",
+       "1hXnJ0BVepleV4ENHeUkMnMT2e2re1bHqgjN6cBBwIGs2hCQqm1dX6b9V/M0+3EI1E7+DM1NMjY+\n",
+       "teRcuS/f23msbuD5JzPKI7p06rFW267yXVsmdbol+OT5WV7jlisq7aJ3pQsHLewLjw2OH++rt5wq\n",
+       "n+ejTTp753sovbAupjDLe2RUyxBgvaxCbeKInD4rcUGAgtqK3IufPuG5E7JXYMtBmEwA5ZEcxNTy\n",
+       "UoX2ri/Tfq48Y6giKndf1xbuvwP9aoSm9KWi2zRe169KZ9gvngAOBGoibtsmHyt0z9C4XW6sGWaq\n",
+       "PayjbH9OcP7lQ9ZZvRXsnDjaOS85K311YKtgrdpXOaOp3IuXAquG24XsDmrV/SceC3/QXj074DJ3\n",
+       "fLV/brIrI3CTTOVXvXcHMoxkVdTh9rF/XbTJbedz2dNX+sU2YIQ2WXIi1FSUxu3l8127+B4INn45\n",
+       "4NpZ2TmPN8Is81XbuvbiPKxAznIIxFBemA9rp+EsKjDr4/zKMwSGGT8bwnqrflTHKo/t7jNxDOpe\n",
+       "VZsc3JSxTz/bqICAG0+0ztExHNw48xf3SscqKmKouZcCHH2xPvkcbjuPdQUO9xmL+uUCjzM7CzW+\n",
+       "PM6VrlTeQZ/PVW1z47cqtwo0NfmrEJcVjuIIj2Vd5NJBy0Uh6eX5+ZmDm9t3E1L1ZQq0qtwJH3eG\n",
+       "vgIzdSz7UW24hHIwVuUYgfA5CDO3DOOooZuwnTidd2OB93IQSCgzoLNvWa6gphxzlWLegdeVqTHl\n",
+       "89U+trWLeDu5VaA9evToWtl0WVOFvLyPyxWejFmu4MnQqpZYeC8FMyxzEYXKT/qG11WTuLuvK1sZ\n",
+       "k84h8fgpI5+AogMEGzxDDGGmlmJq4iMksr4q3+lanYM6URBT16eo33FmXycgY8fTQY7boVLlmLG/\n",
+       "7hzVR+xDpxsnJwOaAhCXoXRhuYswquir2vB8VK6KyricwVKB5pAobSXl61Q9K/CaOIA8D2GWE4/7\n",
+       "rqIk1+6JPhlm7kfeeY0qQ3hxlK8iw0rv2K8KYtUEVm1BqOE3khOoOaDxOGO7uB94LNtRja+L9vge\n",
+       "7p5TuXOgoYK7fMok7HURWgW0KfSUVJCbwKYbYL4XXs/3OSavItgptDp9M8w4ulqRlagXnVDqT0HN\n",
+       "AbSCmCpX4iax2lflXA/mVVQ6gdnFxUUJMd7HtqjoiSNdZedVhNZB/YWI0FLJkzSl8hAo0+XjJFJz\n",
+       "EVoVqWEfM3UwQ5mCrbufK6tgUEVoq/rkLduPSyU0/i46c+1X/cVxwuUtw43rVWPLwHL5Sv/ZJxWJ\n",
+       "VyBz+uBoB9s/eX7Gdu0ghvuurawr5bgQvjjmDmJKdy8M0BBaVT6lC3mz3tUIrYrUugitEgZZBbgJ\n",
+       "zCaDyudMosRMVyE2BZwz1GMjNAeQFOeEJhGag5faJrICsgnMUBeZn8BsGoVjuWub6gfCrHNeTk+T\n",
+       "8enkZEDj8L0yGDXYXKaWTl1kVkVqXYRWiYKY2u8AVkGt21ftcOkqxDrvzkBTUHBt50nJ/an2sXwC\n",
+       "M/ebSAUyBTolDuKZViCrdFGN701EaGqMJzBDXTrwHwKmqt+V3DnQIjTUHNAcwLgs4vqSc+U52SER\n",
+       "mhrYCmAuyqgAVpVVx6YRoopsb2pLnbhIZyIKYmgb6jjqoAIYthEfaDuQTaDsHNUEZC5C47q4vwg0\n",
+       "1lM3H6o5wm11TkDpbTrOE8ezIrcKNP47YRG646o8xQHNRWiTgVMQW4nQKpn0zQFb9RnLKqAjULkt\n",
+       "XX41IpuejzpDMGD7UJxu1cRQEyVhgiDLCR9xfSnqnrVNojS2URWJuPFy46r2WS98LPun6uY5sfKG\n",
+       "Wtl9FamxLrpxdnPpXgOtWnJO8pUSuQxD625phKmDGD9LYOkm3wTcqq4JzDoDy/uo9qgyB/1DIIb7\n",
+       "PNEmEU4leC1CxJW5qKj6feQEZFnG39Z1UOucsnNalV10EVrn6Kst4vrPqpwuKzvLfCUO5C8E0Kbp\n",
+       "xDNgh7tvaxzMnBfDJWflUbh/043r6Ix3ZZtGwpMI7dCoLdvt/pz2MVLVwVDDaIO/UYu4/vvIDmQq\n",
+       "ClHjpcau2udr3Tl8/k1HaLkhMNX9+ZcYzmlNYOZ0tSoneYYWUT/g5cnuIMYTeAIyBTO37yK0yps4\n",
+       "eD14oH+f6AxZHUsjqkDG8Eh9VoA7BGgrHh4Borx3JVXbU9/uOoSagheDjO+n7s/nVhOvs9mb2E8d\n",
+       "K5s8dowRWt1H0a7/2GZV1tly9WeTlNz5X9tgUQbpDBajtjyOIFFLSpdeXFxcSauN76UiIde3buKi\n",
+       "UeAkdILRhmsPt2ua5zYjiNnYsayCdk6GrqwCV6W7HBvczzJlR5OJiDKZeIdurn7skwOB6pc6numx\n",
+       "/y8B61B/nhz/Dyqn7pjL878QXJGTA60SBTPMc1kFMBWNKagpwEVcf27AE86BYqWfHcxYF50hKli5\n",
+       "tIoqO5hxvqqPf2iN6bFgS1GOEMtVitdiqsqm0cUqyBxMurw7Pr2HE9cv3ioodekkvyL3AmiVcivj\n",
+       "5HMUvBzQusgMz933t57D8BfRCmwT4Wiiu5Yh5soV0Lo8g42jrg5sDoTTiKyCmmsn6q3SLerIQcyJ\n",
+       "g1oFpUMioQlsujmC+Wn9U7g5oFVwmkReDowvZISmJl6n2CqSyQitey7GW7f0zGtzQBNsOEmxbV37\n",
+       "VcSxEs3l+ZVhTgHp2sYgUzCbwm0SpVVRWeUwVpyHgpkD2yGAqGC2ArmVCcxOsYMu/izJwc3pAvvR\n",
+       "LSkVlKbLzBc2QuPJ544ro1VlKiKroq5umYnXJMByW31rNzlH9Y8NFo0RJ6OCmRJ1bAKgDmAOag5e\n",
+       "k2dorLdDoI96Q6jl8U5XfHwVZiuA47IK3JVjX4WxusbpQkVouFVL0ZUIjo+tyEmAhhNyxcicUeME\n",
+       "6h7wuwhNgQ2js1wS8aSuoNaBbAJCzDuoMdxQppHwFGgdwBTMphFbBTanr4kOGWqdnbH+qohmCqjV\n",
+       "fexztT9t8xRuXV3HAq0DnILaitz5X6xVRsEGpgasW3ZMYeaisSpKe/r06fO0e86j8iwKdCuT1QFs\n",
+       "Wubuo2AzjdQmYKvyTq8OZofqq5JjIOfg5MDlzsEJzACuIrXpppadrv88T6sl5wRe08iM0xW58wiN\n",
+       "vZDzDuyVUpyhT56TrR7DKA1hpqITbN9KVDZZWvA+6sYZJINM6RkjYwezDmLqBcAxEVkVqTn9Tvan\n",
+       "9R8a3XQwU3mX5vh2jtPNG9cXhhmfM61jGqEdE5ndywitAxpOKAc29tYun5OrA9bkuzMGmlpuTpac\n",
+       "rh9YxvlJlKfAhvrllPVc1emg5iDW6aSC2PRZmtNfpS91bTdGTm4KbNNUAU3NJR5PBSkXKToH5+yD\n",
+       "YYZgevLkSVxeXtpzcqvqeCkiNKdYFAUxNtAc8JU3mtMl6oMHD+Ly8vJGQcbHOe9S1KEqZx1jvvLC\n",
+       "LjpzEVoHsA5q3QsC1a5Kdx3UJmDs9MhlU2hUS04FtJzADLJ0qqqf2S6XTrdKBwxotexU0KogN31p\n",
+       "kIBXDpzl5EBzBHZhdzVhKqipzzO6N6JYZ0LNAXVVXLSm6q0iOydTPat7O/2ujEcXZXWO6hidTkCm\n",
+       "9itRDkEBYVLmYIjQyLnD0XVlN9g2dd+E4hRuCsIMsYzOGEBdfuUXBStykpcCHForL1AZPteXadax\n",
+       "bdvzFF/7svFkdHB5eXkNcJnfti0eP37cbk+ePIknT55cAwBGIjhZ0VDVRJyWqRT7inlnuOgIIuKK\n",
+       "7lJfT548eX5OFbnhMfWcxW2pP9Qn6hUnzuXl5VgXaF+sc4yAKpCnTeD9Ly4u7DJuJVJz57vntSp9\n",
+       "29veFl//9V//PH306NFz242I5/p94403rqw20M65LMcPx0DlM01bqfrWRWxqieoiRycnidAqiHFE\n",
+       "4bytqhevVSBTQEuYqYHOjSeZm3AJNF5SJdgQag5okyiG9aI2521ZH1lHti2PY6ifAEFYMdTUsX3f\n",
+       "rz00Vg+SEWhq8iioTeHPfUcbQpgxRDAydxHFJNLpop/KySiAcdmDBw/i0aNH8XVf93VX0gRajuXj\n",
+       "x4/ltVU+HdlkbBzc3eaW2+rYitz7CE1JFaVhhOaO41tL9YyMJ6oaROWtcqIhvFxeRaHdPk5ALmej\n",
+       "xz5PUtan+6CxmghclvVMHvwqqLnoDAGLelB57BMCHPuDx3Gc0FbU854KUnzP1bQDGZbh4xR+pJJj\n",
+       "gDY33RBoPD64n2PC/VB947k4gd6K3PsIja+txF2b5RhKd1EGGo2aXG7iqQhNgcwBzYEty9JIFdSw\n",
+       "3UpnCmLOiFyZmlguRc/LQFD7qNsqxei9cwJO8DjqQy2L3D5OUKXfymlU5dgXBTSVV3ac/csILaGD\n",
+       "emEd4X4CjZ+XccrPurhfbr+Cf6c7J/c6QuNOVZFHRFiQZd0uoug8IYOMU8zz9QwwTiuAqQ37ywaN\n",
+       "S2XWDepEjQcaJZdhOtUZA617VqKeqbnnbeksWGduH9Mu7575OOBXenXStYXHvINaN778+IXzztGh\n",
+       "o2G48daBxzmXVdh3cu8jNGc8PLlVKJtAwSUEGoQyEi6PiHKSqYfVeR91bwe0bstrsa8RHmpTQYPH\n",
+       "tvHDegS20pmaaAg0l0egVc/Y+E3axBlgeeZZb3xe6nmyrUoHYT5W6Rj3O2ehlsmTZ17VGPA+Bh1K\n",
+       "190xzqv9iZRA27btpyLiT0bEF/d9/8Czsj8cEf9dRPzrEfHZiPiz+77/vrr+JiI0Z0Quj8aO0OBJ\n",
+       "4LZnfbyydW/ocIDT0DAiVIBbBVq2C2GlYIZAcwaB5ahrfNidHpnf5naOgCM0Fem4yIcjQhclVrpT\n",
+       "44d6q9rLTkNFMM6pVDp2bar0OAFabu5RSETI5XoHPd668cg6pmPB5eo8LFuRLkL76Yj4mxHxd6Ds\n",
+       "RyLiV/Z9/6+2bfvLz/Z/RF18bISGIFPLSbePyo3oHxp3ZZMH2rgUSsAwwLiPU8NWE04dQ6iloEG4\n",
+       "fHrjLGOgvfHGG883vnfVZuWQXGTQLUtxUh0a3XL/1bMnZa/VPtuNKlP6Wnn0Uek7Iq6MUTqdiKvP\n",
+       "A/OzDWe3Lp2MR87Nzi66MgXwVSmBtu/7r23b9l4q/lMR8aFn+Z+JiP8lDNCOjdBwQDLSyTryutzP\n",
+       "lEPfY/MRUQ4klzPIEGYKbjiQPFF5gFE/2FaGGX6D1KURcSW6TD1mWU6Wr371q/G1r33t+bWTDcdk\n",
+       "slVLU86nsXdOAPur9OuW6gpuqpzvUeUZoG7rnAWWR8TzscE3m+lcc3xzHNVjEvcopVq2sv3nWEyA\n",
+       "zXm3X42Dk0Oeob267/vrz/KvR8Sr7sRjI7ScxBFv/fQjyxXMUDiim4hT3kqYjoPCIOPnMxw9sLHm\n",
+       "xMV9hhlPFgTaZLJFxPNIDF8mcIT2ta99Lb7yla9cq8PBg4HGY8V5F8WpfF6nIOUmv4qs8jg6ArQD\n",
+       "7pM61ukDN/cBq9qvgMb35u/O8HknR2jVpzHu27LuUUHq1sHZgVvpIOtFh7UiR70U2Pd937bNPh29\n",
+       "iQgNBb0O18mTXYHO5bv9lYFlqCmQcd9xciHoGGqqnwpmFxcX48m273s8fvz4SlSX/ciJkUD76le/\n",
+       "+rw+rJfvged0YzGFHPe9ilpUW9RynScTO4JDN7e0Uj+5Y6AeAjT3q4Dsc5Z97Wtfkx+Hu7x6POAe\n",
+       "GSh9un385QV/M5dQy3HiednJIUB7fdu2d+77/i+3bXtXRHzRnfhzP/dzz/Mf+MAH4ru+67si4up/\n",
+       "LkJo8cRP6bxwSqWALqKrwKcgkm3CtjuYdcsLtczgssq7O31kWzudVdd3gmCe6L0qq6A2bYe7f4oC\n",
+       "Der1JoDmIFctuyZwVlvVbwYRrzBY36kf1BPrsLOPiQ2pcXbjFRHx+uuvxxe/aDFzRQ4B2i9FxA9F\n",
+       "xN94lv6iO/EHf/AH28pc9IIdxqWQ88B5vJIuauuOde3mCdKF2RXAuIy/AFdAW5Hq/M55uHQiFcz4\n",
+       "HGyPG4sOxArefN1qVNTpZqK7SicrUUkXzaqoyumPIZv1p/6rep3tOyer8k4/r776arz66ltPtn7r\n",
+       "t37L6qP7bOPn480XAN+ybdvnIuKvRsR/GRF/f9u2Px/PPtsorq+qbyWV5CI0dY/K+FmqqEzlVX9w\n",
+       "sCP0cwQFoVWoqb8Uoq5z7ZxIF13w84zKMFF3q2nVPm7rMf1y/bwpkKnyia4OBZr6MqAKFlByhYHP\n",
+       "sRTI3OOirGMSbbIOjuUESveW86Pm0L89qbxrqCO/itAwVcrhpY8DWxeldRGaGgg8bxVoDmoKaO5P\n",
+       "HClDmRgJ64tTNeH5/EPlWLgdIwrU1RJ+BWiYr6DGesj8IUCbgIzfkOM48vji8pvrUGUKaMo5q+Dj\n",
+       "puVWfyngGsxkr45lfhKpOUOoDEQBbAVqvK8AVu1PYJbXdEvOQw2mgpmLAlel0u0EYui0uI3TdrnI\n",
+       "4RigYb0qj2mlmy6Cml5bwU199oNtzjmGb3vxOpybXJbnO5Ap/XAbVH5V7hxoCjxMfSxDRVeejyOH\n",
+       "CcRWYFbBogJaB7cKYpyfLjkPkQoUeJ8q6lUy1XHnRLJtHfS6CVP17RigqXtU963GagI0hPsKyNim\n",
+       "GWaZIqx4fmKkhhGagpjr8xT2q3KSCA2PK4hxVFZFZnyvQ5Yq3URzA6GMtgKaylcQ44m2suTk9k6k\n",
+       "MkS8D+tJ6b7Lq+VPFbFX+l/pn+qncy6HAk2VTSA2jc7YJlWE122sl+x31pcpLzMZZjg+KzqrxucY\n",
+       "ORnQWBHsFdjwUYFq6wyBB7KaTG6CVd6WJwf/NdDus4sqv22bXW5WS87pOLgy1TdeZqDOJuOgdN1d\n",
+       "U9Xr+q3gwn1i/a0ALSc/1+/u7RxO6oDt3kkFs2OWm6pP3C4HM/XTJ95f1cmhctIILc9Ry0wVmT14\n",
+       "4P9L0FQ6o1GgqyYEt8OBrAJaBTV+jlY9gzvU+yldqomPETPrjMfO6dWNA+u5a3+33/XR9W8CtDw+\n",
+       "uT/rs2vr6gpDRV/VLy34Hjy3EmrbtpUAy+M89lV/pzo4Rk7654NQeEIoz6Cghtere1XiorQKas6b\n",
+       "YyRVAY1TN6HcvoLiFOpTI6qgjecoIHXOogKbi4hdPyY2UPXtEKAd4zyq6xgyU7BVAKuWm3y/iKt/\n",
+       "PHQCMyzjutS4qL4r6B0j9+IZmkvRa6glZ3ePyjCOgRlPBJ4U/LMWTrsIbQVuatIdOibV5M19nhhK\n",
+       "p7jvzlMTbAXKDmQTmKg+3iTQVgRXJ1ymzuX7Omh1kEO9sJ7UKqmCGV7Douq/Kd0puVWgTbxMp4xq\n",
+       "4zCZvUXEW16n+rG6iy7cBM228n7VxwksFKzdZGUY4HIClw2uTqxL/QFFnAhd31x5BT8+XtXjylYn\n",
+       "hnNI6GyqMZk4jkpf1TW86nD347K8Vv2kqYrQXNvUWLC+c95hvnOU03KO+lflVoGmpINGSjWwCKnM\n",
+       "R8SVffZWqlwtmVwb2Wtx5MhtR4OrJrW7P16bZWg47EnRc1Zg5LJ936/9FQaGm3IGbgIoz66isckk\n",
+       "q0T1RwnXzTBTUXXnSBloKsrsJiZfo1YCmHdlaRc8XlOHtCJqzDkYWHUyKU5fq+0/WYQ29RQMMn6e\n",
+       "psA0nUQMI2d4Ci6ZImSyXMFzRWdoIAhrNGLUS/WiRE14hqL6/6IcrXVAxvq4/dU4YJnSxaGiQFuB\n",
+       "jP9KSQcyBzS8t4s+O6feRWU8ftVfonXOW+nLCY4n7x8KMCVuHq/IyZecTpwxTSItBhZfw3/hgyMQ\n",
+       "ZXA4iFlngqTKd5NWRWloLBVklcFjnQpivI9AU/8yTum5MmiGewU01guPYxftdNGZOk+Bjf8wZgcy\n",
+       "3J+AmduNZZVjn0At4vrf6zvUmVaioHZsfUqUfazIyYC2opDOYyY4FLiqMvx+BturQFLtY4TEMHMD\n",
+       "xEbJ9bo2OGhVk9vpmiM0hlkVoSmdueMVwFy+6/+qOLthqE1Axk4IIzHnZDlqxWMqnUI17VgtNQ8F\n",
+       "baVDd62KWKf1ODlkvO8caNOJkHlnUAyMCiCqjKOsvIeK9pS4ZR4DE6O0Y3XH5Svwqu6Tf4ue/1pp\n",
+       "9QxN3c8BehVmnHcw6Prq7G+y5MxzqzzbVjXOCs5VlNpFZphXz88qZ+p0syoOYpWjdqLafEiEebKX\n",
+       "Agi2KoKYRmRZVwU09RLBGaprd9WvBFpuhzxLuwlPulJPRmj8H+BVhMZjNcl3E7cbPzceSirAKRvC\n",
+       "t5v5O9kp0LB/CiLOCVQTl/tdQQ3boF4KqCgN00NkxVnyfTiQwfMqBtwroLnGpJG66IxhV8FMPadi\n",
+       "eLlJhe3sAKAGhNuX9508x3BerAvdnZc9NL/v+xWYqWUn609NbpdOIIZlEw891ac6p4vQ8Lyuf8qu\n",
+       "+HMGp3fus3Ic1VITbW4aoR0qDkZ8HPu1Er2xOAc6kZMsObOhXYO75Sameb/O46fxOK839RTch7xu\n",
+       "5TkGD3pnOJXH7Y5V16h/kIEwc6//1fjg/kr71KainEOMHNvLdqSeoXHfXIptRCfKz2adM8R+sq4r\n",
+       "2+T96hnasUBTEXIFMGXH0yibIXZI2+9FhIZ5lTLIMs/3qICmnmdVnnDSDwXmSYRWTUiu0wHBwdqV\n",
+       "YT+4TH2DNvlsI/tSTb4VuPLX7fzyhnXSCdtVBzNecuZ1mGKebSni+hvwPL+DmdJ1p9tMq2/QWM/V\n",
+       "GHSiQKX0rUDWjZsC2cpYp9zLt5wYTrvoDPfxfm6C437mua6qD065Cjwu/O/67wbRAWvly/CqTP1f\n",
+       "Rl5yYvt4nNQYHQI01F8V6bBelLjJWsEsXwrw+S6P7cNoUn0zqNrnwMY2WYFNQdE51A4OSlcOTnm+\n",
+       "ymN9K1Dja1ZhFnGClwJKHEyqgXSgw2cYbh+frSlIYpucMTqZQmYyyHyOgli1n3V0gHMgU5MEdYLt\n",
+       "Rj25Scxj6wx+avhd2aQd2B435m6fIwq3YdSkNv5v5djuzu5x/DhCUw51Agl1rrMddUxdq8p41aRS\n",
+       "toluPE/yF2s5dObySb2qHtVh3ne/NqiiQCedkVQGvvJJhwNaBzdsg8tXE60DMU8s/NnQxKA55ck/\n",
+       "cQhcH+dd39QSjSNC1L8aEwWlyX6XZ3t09p5p9YZT6crpX8kE1NPx5XrRrvH3xxHXA5IpkE8KNLU/\n",
+       "ua4bYBT1S4AVmKloBOtRA9WBA/en0gGtW2ZUaQde3HIiu2jBOaYJ4Fb6V9WDeQSP6hfv57iyuEid\n",
+       "oyuXTqCXZROgoY5dZF05s05/FbxcvhsLHBOEGIpaWa3IyYDGeT6fz2FwuMmkhKG2AjNnyEomkMAJ\n",
+       "VE16VffEqHBpyHU6o5tEMdjmlcnWtQXzk6hsCjQHywpuFdBUOUIIAebyHcwc0KofzEeEhJmK1Jx+\n",
+       "JvbBqYukJ2ONkZm6NuLq76FX5ORAc2XuOgZcBR4U/pMnE5ipJSEOCJdzqjwcpthmVx/uHwK0ru6b\n",
+       "BFo1HhPIrmxO55xihOaismOApp49umeSDjqqfajHSu8RIe9VwUKNS+UIFMxUm6djHRFX/v+nEzXP\n",
+       "OjkJ0DJVYFPXccfcMqcTDGenMGPo4D2dst0kVF4Nr+G885SuLlWvatu0vVivAxqPRxWhuX5yGyZt\n",
+       "Unpyx1aAnf2pxIGk26ZtYFusIjVsRxehdQ6gs4kqwlR258a6KkObYqc/kZMCrUsZHApiXWTn2oXQ\n",
+       "qmCmIg0Hs8mk5AhtJdqoIOZC/04PkwnPfeiisi5Cc/tOb5Vuu7xa3nRAyz50usPJjRt+BoP5KVxR\n",
+       "xwgx5YgjwkaAyslVMHNlrDd3v8lYZ9+4nG0n+4r1TuTkQHNluY8ww4FW+ZV2uU1959ZNRiUKYmwY\n",
+       "DLQu4pgAzRlApduVKIb1h/vq2NQzHwKrKn9I37B/kzHmyY0A49R9VtHpGCHGEVoecy8EJpETl01s\n",
+       "T/VbwdDt89xX++iIVuRkQJvmcx+9rYLaikwjs6pu58XcNlkGTCZmlWK+cxiYV21yk44dSAU2p69K\n",
+       "j9PU6YrLVBRa9U+13Yl7Zoa/unB/vcSBzdklAgzBlu2ofuHBuun0zec4ffE93bhymdOvCy5W5KRA\n",
+       "4313PsOMQbYKtLxmAjMXabjyFbjh9e5cPNYBjScEttOlEf5bLWXIqp6qbieTKKEqq4DmHIEC2zFA\n",
+       "40nNPyGrfhvr2oDj50CG+xH6OzQGmtNnNw4TmDHQHDhTFAN4HuY9V+f2Sf7rUwU13ncw43TSFlYU\n",
+       "K9ApVrVJyRRuHO1MIThNsc9uKciwdhELw2AKSjUmnbdVE88dmwAtj3cAYaC59rOoic1wy79i4u6p\n",
+       "8vv+1n88Q3t0YHPRWQc1pV+Vsh2qZTZHaO4+rNtsf/ZTzc0VOel/Tl8tr2DWgQ0V45aT3TLT1emi\n",
+       "KfY2CDKM0FaAVsEM8y7KVP2b3JujhyqtxpHHojvm9qd6S11PtxWpPtOo/hSTStkhZdsTXgk51HVG\n",
+       "aApiOF6drjuYKfCrb+0qcfMv5wmnCsQTOfl/Tj/m/JRVI+8mrvKm0xQ9Dnoe7h9HR6vtdAOuIjIX\n",
+       "nVXnOj3fVLSsxo/Luok2GfdslxpbjmouLy/btmM7+KF/9YmGajOmSo9KT2qloGyhGn+sa+owJvpD\n",
+       "qZNw3ZoAAB8OSURBVFZdzib5mKqnk5P/OP2QaEil6vwVKPDAqf1JyjDL1PVVtaFrPx9XOu02PA/D\n",
+       "fG6r0vchEKuEJ9ZEF5PzssxNRjUxsQ0sXDb5xqwbQ6WHLro9RN8OkHhfLK/mjdMl2g62s4JZBd1D\n",
+       "+nmvIrRjRBmMO65CaAeyVaApmKXnX4nQXJ/UZGDPnOkq2DqvWMHsWLBlfasb6knlI+ofqPPSifur\n",
+       "8rnfRWbKRiobTT1kWo3DVKfTcXXlzrGzDhlg1T2r6Kzreycnj9AOkWpy5/5q1FN5IDWgFdAUzCZ9\n",
+       "mUQhlfByZbrxA2jVRmxPBbNjwOaiBHY0DvCqLCNQFY2pVLXHgU3BbOV7MCUcLU0mt6rXRUUTeKk5\n",
+       "4YIB7G/akutX5UiraG5FTh6hTb2NO7ea+CueXi0RHMSqMhwgB7Osn6MS1WbXb9btamSGxleF/U7/\n",
+       "xwDMQRPzbgJlHq+pUgZaFaGt1Buhf/rULTuxf0onaEOVzpQTd7p2Y7sKt0p3aAdsW649eFyB7hA5\n",
+       "eYQ2UbCS6twKEB3IquXnBGir4iIMBzT0tGgAPBE6kHUG5KCDE4/P6+A2dXDdRFK6cdFUAo3r4Wdo\n",
+       "GKGpOlUZRnzd0rNqs9NR5VhwHDqYYZ7B6cDYjYHSY0KM/2ijg5eL2FR7p3KyCM0NpiqvPAmnHSAm\n",
+       "YHODV0UMCLRJpIMGNWk/16tgxnqvQNZtWI/StYoYDoWZG3c3gXgSV/kqQlMvBrivlY2pTxecQ6za\n",
+       "rPSgxoClAhnvV3ZTQa1aufCYsKjvyKYgO1ROFqGxp59KpfxMHci4TD3jqKI1Bz3cn7T/4cOH4/Zj\n",
+       "ykDisH4KK76e63U658gA5RAjdNGIczbqE4EKEh3QVITF/VV2k/tqyTr5ZOMQnXCf2CFy/dW4IzQ7\n",
+       "x1BFaKg3vrerdxKd8XkrcpIIreq889buelXegYzBpQDnorMp0Fx0kzBzxqP6gXpJ+OBHiJ1HVtGZ\n",
+       "i9bwetYtj9NU1LmdbVTRgHIclU1wPd1nFnhd5xC7evFcp4NOl5N5VF3bgU3pS21VXxW0Kpup7O7e\n",
+       "RmjsSTCNuLp8WYXYxLNk2g1SlarB7IA2maxdlMYpR1TcP2cUE7gp47pNUQDmMawmU+p5OqEnkRlH\n",
+       "aG7D45OlLNqIchJKH05P6jqGhrt+dZynzp4jtLQv165JJHaMPd76knPihSo4TferaGy6Kag5b+vg\n",
+       "5vqq2ocTmVMuU3UiFFHf1YYR3irUKoBMJ2W3n/fpIoOpOKA5EKn7T+ylivhw6+bDygRmm8LrK4c2\n",
+       "jcRX+o1ON1cOPF8727wJh3qrQPvyl798rWw6gTPtIihlNJMtBQc3BwFBwRMiB8wBDf/nI2/VYGE7\n",
+       "lOfO9uX/ksSU8+q+Kq8+21BjkX28uLi4dn43YVQfXb8RujhWDG4GWgdapf/sE/72Ev8s9CFAq5yd\n",
+       "6m+VdvrCfR7T6s9bZ58fPHhw5Qv/bGfm89jFxUU5f7INOU4XFxdX/tcp/s9TVd6dd6/+ScqXvvQl\n",
+       "e8yBbeodKlB1kFRQq34QyxCr9jkCqp5V5b1ZXBmDTAGtApuDXRUluSW18vQKZhOIuzFwk3KyXGK9\n",
+       "Yds48kuY4Y+9pzCbOl1sj4uiuAwjOlfG+q+glrDCKArhxh+DT8YC2zUB1iHbitx5hBYxg1mmDlgT\n",
+       "RUf4JQ0e58gAr8vBmj5Dw7p4ok+jNLWPhurAhsdU5KaOVcsOnLBoXG7ZUi1luv66CE1Jgqca9zxe\n",
+       "RWj7vl+J0BioDmxcruDGedXPaj/bnqkrw3p5hYG2nBuCLK/BP6LAf1Chi8zwXGWfbh/LVPSG56/I\n",
+       "vQFaB7WuLMUZCOfdNTjJMRSvojIFNK63muQuquF61GB3+Qp42VelW+wTfpKgIq+u/RNhmFU/oZku\n",
+       "ORFo7KC4f0+ePLlybAKvDnx4HPvo7KKzlS5CYyBfXFxcq5MfiyDAEGqpv+zbdMwq21O22G33aslZ\n",
+       "AW0FapO8i4ZUXl1XlXcQw2N57SRF6SDsjMEZR+URcauiM5zwl5eXz5+hYZu7/FR47NR9eAKxVBEd\n",
+       "R2jYN7SPKsqawMtt3MeVTemC9aYcAes0bXTlvtO5lvd2KwH1aKSC2L1ccn7lK1+5VsbgWom+KsFB\n",
+       "wzwrOweUr424+v868xr1jKyK2ro2qjKGgdp4yTmBGm8utGeAMcj4s4ZpvyJmY4hLqEqHCB1XtypT\n",
+       "S2sGNraX4VUBjqHlyrD97rlqpwPnKCpHwJEZLjU7eHbLTLRLnGPdNrXbPL4iJdC2bfupiPiTEfHF\n",
+       "fd8/8Kzsr0fEX4iI33t22l/Z9/0fquvdW041+OoYtaXcVx4Ay7I+5/0zz23IwapeAmDUxu3u9vHe\n",
+       "laFNQ3QHMJfnZQr3ld8CrgpDpBJ0OFWkUS051T04QsN+Kpi5h/uqDO/Z2TFGM268Kx0gGFnHeD7X\n",
+       "hW1GXTiYVc7IARiBxgGFKl9ZVaxIF6H9dET8zYj4O1C2R8SP7fv+Y13lFdA6qOWgR8yWcKwMfpCd\n",
+       "57GXx3K8Z7YlIsrlJue7qBPLUZSRsBesjEGBDFOXR2ArmF1cXDxPnzx5IsezE56I1TVq6Y/LJFxW\n",
+       "OZ1y/WrJiYB68uRJCTIGGjuvrh0IMIZZ9gehgjBS+sA6eC6oVQb2oYoOsR6WatWQH9YytNCOuWzl\n",
+       "8ciKlEDb9/3Xtm17r+rfpHL32cYhYToqXA2EmswMEjQiLOvyHI1NgOY21AHeyxnM1AAqkKkNz+OI\n",
+       "RS0381zVflfGk3AqOEY8Ofkh9STPesX+5rlcvxtnBtxUHww0hBnCIM91euh01gUKCmh4TyUVyBTQ\n",
+       "2HadPXd2feNLzkL+0rZt/25EfDwi/pN9339fndS9FOjA5iY6K2rbtucT9OnTtz4GTMlz1JvIPM4p\n",
+       "5hW8KrjhhMGJk3104iKyLlRXy8kE16NHj67BDMsw8uS/PvHkyZNrdVdRCZdhRIZQc4BD2OA1uPEk\n",
+       "n7SFJ28ez/HBKLCCGOdXhIHGMMtzIq7CKW2N9YL1sv5QWDc8b1Q9lf6UfSqgVXNV2bLbv+klp5K/\n",
+       "HRH/2bP8fx4RH4uIP69O7H4pUEUxmXakR6A9evRIvmbG6/AeVdSHA66emXVwywmSho9vQCfAZsOZ\n",
+       "PG9AmCG0Ms9pnq9eAjx58iQePXr0HGoYzfE44nhmXgFsNVJDXbHNKHtSKdfH5/CknjxWqIDmIh0V\n",
+       "lSHM3LW8oujuyaBS9VW2zvVW9sgvizgQqLYOZHcWoe37/kXo8E9ExP/ozv3EJz7xPP/KK6/EK6+8\n",
+       "UkZkXIYTwE0ON3grE6cL51fqOWY/ojaELmTnaE15O+UYOsPOtiqH4/rR9bM75iI6jrI6O+A6J/lJ\n",
+       "NLnanzzmbF1FYnzc1ckgQX0wYBKik005EKeziYNmoLGzZjtN+/z85z8fn//850f6Xwbatm3v2vf9\n",
+       "C892/0xEfNKd+x3f8R1X9t2A4nFzzzblydh5KpbJxMLzqgiz6+OknxXM0BBUBOegxobS6WkSQVd5\n",
+       "p+NJVLUCzC4aZHtRZZny5zf4iIIn7gR0auxdxDmBGJ+jXihU80OBSx3DiLKTmwBa9ZjlPe95T7zn\n",
+       "Pe95fr/f+I3fsG3pPtv4+Yj4UER8y7Ztn4uIvxYRH9627bvjzbedn4mIv+iur7yKSllJaqJ1MOuu\n",
+       "O1Qm0FoFWzfBKphVz9QqiHXRWNUnfIjurlP1HJJOpIriKv12DpIhxlsXvbmyym6yLhWxod4Ttnxt\n",
+       "2gf2A/ub46/6lvaBx/A+3bisAs3ZsbPXFenecn5UFP/UtHKnhM4jo3Te1UVl6rpVmURZK6BT/cT9\n",
+       "m4RZB7bKYFT73Xd2ri+HyDFwi9BLU5xoWd7l+boKbAy1yqYrgClbS72zLSfYsK4EUJY7qK1skzFQ\n",
+       "91H2O4Fa1Y6p3OovBfDNkZNqgqdMjbHzvBO4KcDgsZXI7JBopDKK6XKTP5ytojSnFwe1FbmNqIwF\n",
+       "J7nb72zFORLshxoXPNZFqB3AFNDwA1ucSwgtjNr4PsdAJO9RAcUFFB3IKkfNZTcaoR0rnUfncyup\n",
+       "IrGbiMx4QmA5G50qPwRqqo0uGnAGUH2XNl1+Vjpx/V/V66HpVBgqbC+ZOqBXERoCRE1i11bVlxWg\n",
+       "pXTP8dB2MaLDNlaRfgU0p+vKdhXMuEytFlzZitw60I4RNTDOI1SGOmmnAplqwyEAc8dW+9bBbPoy\n",
+       "QOmFJzNHZrjvdDp1IisQc7py5e7aylYULBTElON0bVaAQgB1QHP35jaoSLSK0DLqOgZorHOeb2hn\n",
+       "CmTKSVfXrcitLzlZVieDm3wdvKoJ2wlO2pVIrIMb51XbJiBjqKklp1qaOu/JMFP6wG+vFNR4cmFd\n",
+       "lR6mOnI6q85l25hs+OzIRWYIDtV/7Efl5CrAcT3O1hloCkR4jKO9KdA6veU5DkoV5DrwrcidR2g8\n",
+       "GXBgqsa7CYh1dMddG3lCT9vE9VTG2Rkt9yNTN/gcgVWRmfoGrTKWDtCq3RyFcN0TqFV5J5Px6QDG\n",
+       "+nAwUxN42tZJVMb53GedYprl+FZU9d1Bi99sKqBl/ZVec7+DmHOoHSynctIlZzURuo50HT9EIQpm\n",
+       "q5FYB4JJtOYmngJU9cV1tU10w33gXzsonaPeXB87qLnrDpUJyLKMYVZFaa6NlX2wTeH5eYxfpqkA\n",
+       "gPuVcHJvozuwKbipscF78s8IHbhWYcb3WZF7seR0EwSP3wbVXTTm2jKFm4JYBTalFzX5JktOBhpf\n",
+       "p3S00lcnDDOsczUS6yBYySE2xJOvghlPuq5fk0is029lK9nO/KE41+OA3W3uL32gU0OoYVsmKYOL\n",
+       "21qNZSUnfylQNdrB6VioVe1Sk7L7axrd8ax3Cjt3fhX13IRRrLQb76smnIOU6g+fMylTDkC1jdMJ\n",
+       "zCei+jg5h6M0zCuITNqBEHJ/yqqro4I9/l6Zy7CtCl4un/fF+1f5qZwMaOzB2cgm3rQDGd/DAUUZ\n",
+       "PqZoKGpbidacfhiO6U27+6q/8sHQcBMIIwwHzmmElvpCb451qP6qlPNqH8tWQKbEORLOd23Ee3OE\n",
+       "pACs7hURVyIeJ1x/Rurux/Td2DlHhW3PNNumQIfgciuESYTG+RU5eYQWMYvSKsBVyylsRw40GxkO\n",
+       "Grdn2+b/qHYCti4CYrCp+7PB4r2xjJcgCmo8Tl273Bih7jpwOZhVIHOTzpWpyTGZJFU7uzZiGxg6\n",
+       "3f266KyaS/mXLxBqbBvYp4lj4r6oiB/hlul0y+td9PzCAc01VhnfSrSG53NbXCTSGaD600AKYvxA\n",
+       "tgOCOw89X6ZssOqe1f0RYux1XVtVXagjt491qn66Y6yXKq/uN50czuGxHlz7JtLdQwlGOitw72BW\n",
+       "2X7Xbi7DPK4iOLjoXkxNYbYKtZMArYqGMN9FZi5Cq9qTA4D3qyZqhAYa1lUt9xgEbr+CWrapAqr6\n",
+       "+JXrdFDr2r3i2Ttoc5lKXZ7rdpNPTRB3PurB3W/S3k4q26x0xse5bNu2K3+XTDk9ZXNd+117VRnP\n",
+       "xeqzoYcPH8r5jPc8Jko7CdBwMDBKSFGQ6wCnIja8XzWx3L1TVpZ6VWTjIFFBLfuiIFotd3nJiRDD\n",
+       "+7tIlvcV0BTAHNQOgUQHus7Yu8gM61Rwr9p9E6L0xHbbpdu2XYvOpi+pDulPBTn1Bl6lVYTm4DaV\n",
+       "kwHNwSxlGp0pkDlPPIk03GRUD+FVGV6D9blJiWUIMN4OeQlRHWewTa9V46Sgpvp+E1DjtuP9Mt/Z\n",
+       "gpKqLW68lEwgi/1wUHM2pHSEfzq9ejHQtZ3bWM1NZU8dzKoPvKvAZUVOBjQ2TBVmTqIz932LagPC\n",
+       "aaXtDmgqXZm0HUQQcMpoq+Wnis4OAd/EETBglB5vOsX78v5kQlSgUv2t+o/3n0Kf++LawPdWbctn\n",
+       "rJPnZ1OwYZunkbD7RpK/l1QrKTe/V+XOgebors7LtFteVp9yZDvcpJwYmnuLqMq4DmVAaqI4mLk3\n",
+       "nW6ZOYVQJatAS3Fguw2ouQi/WrIwDFU7O5A5PXSRqls1qDzfp3JCEXHlP3U5G8F7VH1YaT/220GN\n",
+       "YaaAVs3dex+hVcpBmXSYv3XpqF55qCrvgKGgpvrd3edQmE2WoKr+leOHwFFB5JjUOQI34ZxT6yKl\n",
+       "qr+rzoEjnA5evL8yfvlCoHssoe416Udep4DHMOv+jFX+4YQJyF6IJWc3QapOTaI1p4Ru4lRlDmiu\n",
+       "rNODMnQFszwnIiTMKrBON4aP6/vqhFb9PQZiVdsqmHH/HFw6gKxIBbNKL6o93ZhG1B9+r/bDOQLX\n",
+       "x0wnkRn+eSu+Vwe4qdwq0JR0EZeDVPUtiwNfCg7KoZOrMii8j+szHlftnA6uCsW76KTSv2rnTYia\n",
+       "pJifOBgWjggmxo/XuPpdWyeiJn6VdzK1Nwc0d6zbn/av6281P91K6oUD2sXF9eqr71PcWxL1r61Y\n",
+       "USsK6CIBdc7EELp75xfSeL57tc0p/8Ng9ee2V/7KRuov28WOJNs30anSiXMgTsedLicwO2YiKKkc\n",
+       "b7Z7EomhqCgRj92kTB1uxPW+5kslbHN1D/dZBl/HOrtpuVWgZWiJcgjQ1PEV0kdcHcw0qA5sKs91\n",
+       "ZX3VvitXb4LUGyGEF0Nt8ue2K7hlu6qIt+qTMnY3kSqoOXFtqOyga3PVRrwWJx5O9HwsoM5V+929\n",
+       "+dhKBNWJq0vpScGMz3WiIrHuuXZ3j3sXod0E0CqYHRKdRXhYufA88yrlJU2WTfLVq20GFEdoq5GZ\n",
+       "23JMptHOBBI8oafLz65+BhpHlTgxDxE38TEyzLKc9AwMBbNJhOOO3RTgOrAdEuUqez40Qqvm0Iqc\n",
+       "DGgVwBzEuggtQlO9Mia3tFTHnTDUpin2dfJ2CKHGD1lVpMdG5oCmwKDg0MGMoxrU2wRwSq+cR91V\n",
+       "k/DQCZHXYn/UvflbRo76WabL0WPrmNyDRc0fjNK6+YTXTCJnZys3IS800FRY2xm0m2BuU9cq4cmn\n",
+       "2qOMpgIY7ncRmvt5yVRn1ZLTeWue9Or50EreAczpsoMa1nXoMysHdI7QVmHT2ZarcwWMnajo2+mx\n",
+       "c3Boa110xjBTzuNQOQnQFKymAHMG3Ck/4npYW4GMDbVStPNYPAF5vwKZSt1WLT2d51xZcnb9VTpe\n",
+       "hZrTa6XLyfFDhe2Eyw4VtyzH48rZTtuq6uqg2+lzmlaPAZwOOlnV+b2N0NwEVJ4Al04p1SBnugI0\n",
+       "lMpLZTqZeByhOYgx0FZeBjDQWH+T9uJWLYeyvhWo8bWr7VGTj8eqe07VSVXv5Gd0eC/Vnm5Zvtq2\n",
+       "VZk4iEl+4hQZ6FV0dkjf7n2EhrR3aWXQE1kBWqX8LOsintxchOaiMvXphnqpMHUGGKF1y4VKdwgy\n",
+       "1BGWcd4dc3rl8e4m1op00Qvv4/lq6ekgh7bj4Jbt4XZNYDeJ5pw9Z94FCRXsun3XDnaANyEn/Q5t\n",
+       "CjQ2ZgUMJZUyGVgKZPnBIg7QoTBT5d1LgG6ZWT1HY/BXQOu8M/ZX6VQt5fm425+ATDkxvEY5tUMm\n",
+       "yNR55TEHrgcPrr80wHZOYda1b0U654x6w/ZPHLPSucu7CM3J6jie9LONFaB1kJiImlgOaurPrkT4\n",
+       "h/+4P4Ew992BS70MmLwUUPByYJg4i07Hled3uuf9FajxeKgJdIzXZ/CoujA6Y4Ax1DgKmkC+gttU\n",
+       "uqjN9XMKMuX0qrZgfuIcV+VeA+1QZU4mD0Os+j2kkgpkFUAwVVFWF5lVIFNvOKtUGa/rX6bTybUy\n",
+       "CatnaLyvIjTVZlXvVBzEuBztRcEty/J4xNU/s81bFUE5uFYOVumE68ZzEMIYvVdzEccDZSXCxLl2\n",
+       "DMwibhlo6EmxzEVj6vmPMmzOO3ERAkKr+tNAmaolWooC2DSvIKaepXVbB3/st5o4CuBVdFXp3TkX\n",
+       "vi9PKNdu15fVSaQeM7i2V9DgutM+UIdufxrZoG6qyC1lJRhQ/UVBMOc5hwQUxzgXXimtyL35pYAq\n",
+       "Y0/cpWpCqrQDGKfZbjcoCDVlWFVU2kVhVVTmtgr21bJ7ssTpJrqLanic1LHJhFl1ZNxPd45qixJ2\n",
+       "EFOoTSDG+umcS9ZbPWpQDiIF4aU2bNdkXHicsb0TmKKuKsdTycmXnFW0FlErYmrY1RKz+5PaKqpA\n",
+       "o61g1m1VFKbA5j6gZQPGSNJNIBe5qHIWNwaVgVdREU5MFZ11Y10tZVR/XRvwPuq+zoGqicj7K3DG\n",
+       "P+Ve9SWlsgPWoQOM0gm30QHNjREHANW9FchwqT6Ve7HkdJHaTYibpApmDm4RcU3ZKTi4VZ9UH1ee\n",
+       "k1UvAFx0VoGM9aOgpvrJZV3qorFKj9WEUZPG7Xd51QZui7s/w6uL0lSdE+dcgSzrnb5Qw/uo1Yay\n",
+       "8Q5sru0KZpMAhG3jhVlyTiY8Kl9JZbBqicEgq2CGZWwYPOBV1MnwwWOTFwDT782U3iaTpHt+5qTy\n",
+       "0GqisrG6qKjasE7VRm5/BQPVHzXWDkS5z/pj0CXUHjx4EJeXl6WOqv6tRGgOaM4esM2cxz5jWyug\n",
+       "VTBz0arr26rcq2do6qXAxDhZYXysi8pcGQItnzeg8OAqsCkIZTqNzCZ/WaMzXGc8LjqrDMtFGqqM\n",
+       "AVZBbTU6475gn1QfJlBTWwUHBzBVxv3BFEUtOd2+itDYHtkZd2Dj86YptgnbyzBzcizMIk6w5Owg\n",
+       "hvspk4lWlavITIGuAhq+mkdho2eYMdCwrHoJsLL0VFsVwbBu3J9qVnVwZOEiF57wmHdl0+iM6+X+\n",
+       "cH5FHMhwfLFNFcA4cuP2T6Qbx2zT5E2nui/DC/Xn9O7KVCSGbe5gpoKS1TG8Fz99cvsR15eP00jC\n",
+       "XVPBy20JCL638uRqczBSX/5XbznVctN5Z9aF2meYKf2iVPByEKoAxh68A5mDGbeXx0pNpKpMQQ31\n",
+       "y49DJlCbgmzVYUfoJWfnGLjfKpri9rp8VZZ1VfWyPajrpnIvn6EpoPEbR5ZJhLYSmT19+ua/BcNz\n",
+       "qoneAUxFWwpm1eca7lmcm2xKLw7yrm+dh+yiKgUwvJajjWmEpmTSJxdBMMzU0k09p6yisnSE+A3a\n",
+       "5eVl2X6lF3densufOjGAJzB10dQKuNT1VdSm6nG2MpWTLjk7qLnojN98VIRX1ypIKZipj2+zrpQq\n",
+       "QqsiM04rsLk3m91bTtYF62WysVTQ6aKBCm7Tujup+oD35/uq+7uIGx+JcCSo7t+1/xBHjTKN0FSf\n",
+       "lVSRXCcTmKkxcDBfBdu9iNBcPiKuRGYJl/SGWba6BFEgc9FaFaFNJkAFMwW2KqKbgCzzLqI9BGJO\n",
+       "HHQ4gqmiM0yn0OqcF/ezu2/XN+d0+T4qSsvyKjJzfVDHVTunL4qOAdVEDoUZ13GMrH21dpaz3LIc\n",
+       "a9B/EOWss7fkzoH22c9+9q5veWfy27/926duwq3K7/7u7976PW4qWjhEPvOZz5zs3sfIVGef+tSn\n",
+       "brklp5cz0G5QXnagfe5zn7v1e5wy2rht27wtWE919ulPf/pW7n+f5LzkPMu9klNGaLcttwXrl1ln\n",
+       "q3IG2lnulZyfB63LWWdvyXaLXuOs5bOc5Sy3Ivu+y7D01oB2lrOc5Sx3Lecl51nOcpaXRs5AO8tZ\n",
+       "zvLSyJ0Bbdu2j2zb9ult235727a/fFf3vSvZtu2z27Z9Ytu2f7pt2z85dXuOlW3bfmrbtte3bfsk\n",
+       "lP3hbdt+Zdu2/3Pbtn+0bdu/dso2HiOmf39927bPPxvDf7pt20dO2cZDZdu2d2/b9qvbtv3v27b9\n",
+       "1rZt/+Gz8pdm/JzcCdC2bXsYEX8rIj4SEe+PiI9u2/ZH7+Ledyh7RHx43/fv2ff9g6duzA3IT8eb\n",
+       "44XyIxHxK/u+/xsR8T8/239RRfVvj4gfezaG37Pv+z88QbtuQh5HxH+87/t3RMT3R8S//2y+vUzj\n",
+       "J+WuIrQPRsQ/3/f9s/u+P46IvxcRf/qO7n2X8tJ8ELTv+69FxP9DxX8qIn7mWf5nIuLfudNG3aCY\n",
+       "/kW8BGO47/u/3Pf9nz3L/38R8amIeC1eovFzcldAey0i8DPzzz8re5lkj4h/vG3bx7dt+/dO3Zhb\n",
+       "klf3fX/9Wf71iHj1lI25JflL27b9b9u2/eTLsCTbtu29EfE9EfEb8Qdg/O4KaH8Qvg35Y/u+f09E\n",
+       "/Il4M8T/t07doNuU/c3vfV62cf3bEfG+iPjuiPhCRHzstM05TrZt+8aI+O8j4j/a9/1f4bGXdPzu\n",
+       "DGj/IiLeDfvvjjejtJdG9n3/wrP09yLiF+LNZfbLJq9v2/bOiIht294VEV88cXtuVPZ9/+L+TCLi\n",
+       "J+IFHsNt2x7FmzD7b/d9/8VnxS/1+EXcHdA+HhF/ZNu2927b9nUR8eci4pfu6N63Ltu2/aFt277p\n",
+       "Wf7tEfHHI+KT9VUvpPxSRPzQs/wPRcQvFue+cPJskqf8mXhBx3B788edPxkR/8e+7z8Oh17q8Yu4\n",
+       "w18KbNv2JyLixyPiYUT85L7v/8Wd3PgOZNu298WbUVnEm3808+++6P3btu3nI+JDEfEt8ebzlr8a\n",
+       "Ef9DRPz9iHhPRHw2Iv7svu+/f6o2HiOif38tIj4cby4394j4TET8RXjm9MLItm3/ZkT8rxHxiXhr\n",
+       "WflXIuKfxEsyfk7OP306y1nO8tLI+ZcCZznLWV4aOQPtLGc5y0sjZ6Cd5SxneWnkDLSznOUsL42c\n",
+       "gXaWs5zlpZEz0M5ylrO8NHIG2lnOcpaXRs5AO8tZzvLSyP8P5bdSohzrzUEAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f7939901850>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "imshow(solver.net.params['conv1'][0].diff[:, 0].reshape(4, 5, 5, 5)\n",
+    "       .transpose(0, 2, 1, 3).reshape(4*5, 5*5), cmap='gray')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Something is happening. Let's run the net for a while, keeping track of a few things as it goes.\n",
+    "Note that this process will be the same as if training through the `caffe` binary. In particular:\n",
+    "* logging will continue to happen as normal\n",
+    "* snapshots will be taken at the interval specified in the solver prototxt (here, every 5000 iterations)\n",
+    "* testing will happen at the interval specified (here, every 500 iterations)\n",
+    "\n",
+    "Since we have control of the loop in Python, we're free to compute additional things as we go, as we show below. We can do many other things as well, for example:\n",
+    "* write a custom stopping criterion\n",
+    "* change the solving process by updating the net in the loop"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 15,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Iteration 0 testing...\n",
+      "Iteration 25 testing...\n",
+      "Iteration 50 testing...\n",
+      "Iteration 75 testing...\n",
+      "Iteration 100 testing...\n",
+      "Iteration 125 testing...\n",
+      "Iteration 150 testing...\n",
+      "Iteration 175 testing...\n",
+      "CPU times: user 12.3 s, sys: 3.96 s, total: 16.2 s\n",
+      "Wall time: 15.7 s\n"
+     ]
+    }
+   ],
+   "source": [
+    "%%time\n",
+    "niter = 200\n",
+    "test_interval = 25\n",
+    "# losses will also be stored in the log\n",
+    "train_loss = zeros(niter)\n",
+    "test_acc = zeros(int(np.ceil(niter / test_interval)))\n",
+    "output = zeros((niter, 8, 10))\n",
+    "\n",
+    "# the main solver loop\n",
+    "for it in range(niter):\n",
+    "    solver.step(1)  # SGD by Caffe\n",
+    "    \n",
+    "    # store the train loss\n",
+    "    train_loss[it] = solver.net.blobs['loss'].data\n",
+    "    \n",
+    "    # store the output on the first test batch\n",
+    "    # (start the forward pass at conv1 to avoid loading new data)\n",
+    "    solver.test_nets[0].forward(start='conv1')\n",
+    "    output[it] = solver.test_nets[0].blobs['ip2'].data[:8]\n",
+    "    \n",
+    "    # run a full test every so often\n",
+    "    # (Caffe can also do this for us and write to a log, but we show here\n",
+    "    #  how to do it directly in Python, where more complicated things are easier.)\n",
+    "    if it % test_interval == 0:\n",
+    "        print 'Iteration', it, 'testing...'\n",
+    "        correct = 0\n",
+    "        for test_it in range(100):\n",
+    "            solver.test_nets[0].forward()\n",
+    "            correct += sum(solver.test_nets[0].blobs['ip2'].data.argmax(1)\n",
+    "                           == solver.test_nets[0].blobs['label'].data)\n",
+    "        test_acc[it // test_interval] = correct / 1e4"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's plot the train loss and test accuracy."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 16,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "<matplotlib.text.Text at 0x7f793878f490>"
+      ]
+     },
+     "execution_count": 16,
+     "metadata": {},
+     "output_type": "execute_result"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAaAAAAEPCAYAAAAEfBBiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJztnXm4HGWV/z9fwhK2JIRAgCTsYYkswsgiiwYBRVRwGxV1\n",
+       "dNRxcEGZUcdtVBhHZ3Abcf8xiruCjguigohIANmXQBISIAECYd9CSFgTOL8/zlvpun2r+1bf23V7\n",
+       "uefzPP10d9XbVe+t2/1+65z3vOfIzAiCIAiC0WadTncgCIIgGJuEAAVBEAQdIQQoCIIg6AghQEEQ\n",
+       "BEFHCAEKgiAIOkIIUBAEQdARKhMgSTMkXSjpRkkLJH2woM1sSSskzU2PT1XVnyAIgrGOpO9Lul/S\n",
+       "/CZtvi5psaQbJO1TZX/WrfDYq4F/NbPrJW0CXCvpfDNbVNfuIjM7psJ+BEEQBM4PgG8APy7aKelo\n",
+       "YGczmynpAOA7wIFVdaYyC8jM7jOz69PrVcAiYJuCpqqqD0EQBEENM7sEWN6kyTHAj1LbK4FJkqZW\n",
+       "1Z9RmQOStD2wD3Bl3S4DDkqm3jmSZo1Gf4IgCIJCpgHLcu/vAqZXdbIqXXAAJPfbr4ATkyWU5zpg\n",
+       "hpk9IenlwFnALlX3KQiCIGhIvVeqsnxtlQqQpPWAXwM/NbOz6veb2crc63MlfVvSZDN7pO44kbAu\n",
+       "CIJgGJhZK9McdwMzcu+np22VUJkASRJwOrDQzE5t0GYq8ICZmaT9AdWLT0aLFzFogqSTzezkTvej\n",
+       "H4hr2V7ieraXYdy8nw2cAJwp6UDgUTO7v/09c6q0gA4G3grMkzQ3bfsksC2AmZ0GvB54r6Q1wBPA\n",
+       "myrsTxAEwZhG0hnAi4EpkpYBJwHrgY/JZnaOpKMlLQEeB95RZX8qEyAz+xtDBDmY2beAb1XVhyAI\n",
+       "gqCGmR1Xos0Jo9EXiEwIY5U5ne5AHzGn0x3oM+Z0ugPB6KFeKEgnyWIOKAiCoDW6fewMCygIgiDo\n",
+       "CCFAQRAEQUcIAQqCIAg6QghQEARB0BFCgIIgCIKOEAIUBEEQdIQQoCAIgqAjhAAFQRCURdoAaQuk\n",
+       "8Z3uSj9QeTmGIAg6iCcF3hjP9/UU8DRmz3W2U6OMX4PxwITcY2Ld+0aP+nbrACuA1wEXjeaf0Y/0\n",
+       "TCYEsPXMWNPpvgTBqOKD50bApBYem+VeTwSeAVYDG+AD8WpcjBo9nh5i/0jbrKHMwFP720cqGhOA\n",
+       "Z3HheKzko1Hbp0v1vUvo9kwIvSRAJ5nx2U73JQhaonb33UggyjzWAI82eSxvsm8FZs/U9Wd9amLU\n",
+       "6DHU/pG0WYfGIrUeNdHYNO0fjlDkHysxe7r5P6o/CQFqA0mAVgHTzHis0/0Jxjju/98F2B3YgaEF\n",
+       "5TmaC0gzMVnRd4OntC4DhSl7vSFunWWishKz8HqMgG4XoF6aA7oB2A+4oNMdCcYI0kRcZOof04Db\n",
+       "gEXp+RHgdorFZAVmT41637sZF5U1eL2ZYAzTSwJ0BfBCQoCCduIuqakMFJhZ6XkCcBMuNIvwCr8u\n",
+       "OmarO9LfIOgjekmALqfi6nxBHyOtA2xPsUXzLC4sC9PzH9LzXWMuYizoeyQdBZwKjAO+Z2ZfqNu/\n",
+       "GfB9YEd8Xu6dZnZjJX3poTmgacA8YAszur/TQWeQ1gdmMlhkdgEepmbN1B5mD3ams0FQLfVzQJLG\n",
+       "ATcDRwB3A1cDx5nZolybLwGPmdl/StoV+JaZHVFF/3rGAjLjHokngJ2BxZ3uT9BhpE2A3RjsNtsO\n",
+       "uIOawPwJ+CpwE2YrO9PZIOga9geWmNlSAElnAsfiv5WM3YFTAMzsZknbS9rCKrhR6xkBSlyOzwOF\n",
+       "AI0VpC0odptNAW6hJjQ/w11oS/ouaiwI2sc0YFnu/V3AAXVtbgBeC/xN0v74Td10YMwLUBaI8ONO\n",
+       "dySoAHefHQgcCbwIt2rWoyYyC4Hz0+s7MHu2Qz0Ngq5E0mxgdpMmZaYvTgG+JmkuMB+Yi8+Ttp1e\n",
+       "E6BrgDd2uhNBm/AItN1xwTkSOBS3bs8HPo9/+e/rpZXnQdBJzGwOMCd7L+mkuiZ3AzNy72fgVlD+\n",
+       "GCuBd+aOcTu+3KDt9JoAzQP2kFjHjIhO6kWkLfEJ0Ex0ngX+jFu1/4jZQx3sXRD0O9cAMyVtD9yD\n",
+       "39Afl28gX//2pJk9I+ndwEVmtqqKzvSUAJmxQuIhYCdiHqg3kDYEDsHF5qV4KPQc3Mr5b2BxWDhB\n",
+       "MDqY2RpJJwDn4WHYp5vZIknHp/2n4a7vH3r0MQuAd1XVn54Jw85CCSXOBn5kxq873K2gCF9vszc1\n",
+       "C+dA3HL9My46V0V6lSAYHSIVT/u5AdgLQoC6Bmk6NcE5Ak9Bcz7wTeD1mK3oYO+CIOhSelGA5gFv\n",
+       "6XQnxjTSpnikTSY6W+Apks4HPoHZHZ3rXBAEvUIvuuB2Bc41Y8cOd2vs4NmLX0BNcPbBV1BnbrW5\n",
+       "kbImCLqPbnfB9aIAjcNTtW8dpRkqRNqJmuC8BF+8dn56XIzZEx3sXRAEJeh2Aeo5F5wZz0rcCOwB\n",
+       "XNbp/vQN0mRcaDLR2RAXm7OAEzC7t4O9C4KgD+k5AUpcARxOCNDw8awDL6QmOLsDf8NF5xvAjREe\n",
+       "HQRBlfScC87fcxBem2VWZMZuAWka8DrgZXjWgZupudUuixxqQdBfdLsLrlcFSHhqiNeaMbdzPesB\n",
+       "pM2B1+OrnfcCzgbOAS7A7OFOdi0IgmrpdgHqSRecGSbxc+DNEAI0CC9VcCwuOofiJQlOBf4U5aGD\n",
+       "IOgWetIC8m3MwsOAt428cIC0AfByXHSOwudzfg6cHXVwgmBsEhZQRZixUOIZvCjZwk73pyP4+pzD\n",
+       "cNF5Nb5I9wzgfeFeC4Kg21mnqgNLmiHpQkk3Slog6YMN2n1d0mJJN0jap8XTXAocNPLe9hCSkA5C\n",
+       "+gaeRv2/8ISBe2I2G7PTQnyCIOgFqrSAVgP/ambXy+ckrpV0fl3t8aOBnc1spqQDgO/gySvLchku\n",
+       "QN9rZ8e7Dq+bsxdu6bwJeAK3dA7FLLKCB0HQk1QmQGZ2H3Bfer1K0iJgGwbWHj8G+FFqc6WkSZKm\n",
+       "mtn9JU9zKfAvbex2dyHtjAvOccDGQFa/fV6s0QmCoNcZlTmgVPxoH+DKul1F9cmnA2UF6EZgK4kp\n",
+       "ZvRHITNpG2pForYF/g94N3B5iE4QBP1E5QKU3G+/Ak5sUFWvPkKjcJCVdHLu7Rwzm5PS8lyBu+HO\n",
+       "bkd/O4Kv1XkdLjp74+lv/h24MGrnBEHQr1QqQJLWw+v2/NTMzipoUl+ffHraNggzO7nBaS4DXkSv\n",
+       "CVDxWp2vEWt1giCoEElH4esCxwHfM7Mv1O2fAvwU2ArXiC+b2Q+r6EuVUXDC0+UsNLNTGzQ7G3hb\n",
+       "an8g8GgL8z8ZvwHeKnGBxM7D7vBoIG2A9GqkX+BC+2Y8mGA6Zm/E7KwQnyAIqkLSOLxQ5FF46e3j\n",
+       "JO1e1+wEYK6ZPR+v+/UV+ZKPtlOlBXQw8FZgnqQsW8En8XkNzOw0MztH0tGSlgCPA+9o9SRmzJfY\n",
+       "Fvgi8CHgfW3pfbvwf/hhuNjEWp0gCDrJ/sASM1sKICkLbMoHh92LR90CTAAetoqmAqqMgvsbJSws\n",
+       "Mzth5OfiGYmvAVdJnGjG6pEec0S49Xcg7l57Ax5ocQbwacwKXYxBEASjQFHg1wF1bb4L/FXSPcCm\n",
+       "+BhWCT2bCaEeM26XuAUvLXDOqHfARWdPamt1niTW6gRBMIpImo27zRpRJpL2k8D1ZjZbXpjyfEl7\n",
+       "WwUpvfpGgBI/B95CJwQIfoebrWeSudoibDoIglHEzOYAc7L3kk6qa1If+DUDt4LyHAR8Ph3vVkm3\n",
+       "A7sC17S5u72bjLS4HVsCi4EtzRi92jZSdt4pmHXW/RcEQZAYXMpG6+J1wA4H7gGuAo6ry1DzP8AK\n",
+       "M/sPSVOBa4G9zOyRdvevsii4TmDGA/gE2mhHw70EuCjEJwiCbiYFE5wAnIcncf6FmS2SdLyk41Oz\n",
+       "/wJeIOkG4C/AR6sQH+gzC8jbchbwUzN+VXG38ic9Hbges2+M2jmDIAiGoNvLMfSVBZS4CS/RMDp4\n",
+       "8MGReFnrIAiCoCT9KECLGE0Bgpn4dbx5FM8ZBEHQ8/SjAN0E1K/srRK3fnrBlxkEQdBF9KMA3Qzs\n",
+       "Ko3a33YE4X4LgiBomb4TIDMeBVbiK36rxUMaZwMXVH6uIAiCPqPvBCixiNFxw+0H3EnrCVSDIAjG\n",
+       "PP0qQKMVCXcEHicfBEEQtEgI0MiI8OsgCIJh0q8CtAivdVEdXlBuX+CSSs8TBEHQp/SrAC0A9pAG\n",
+       "lftuJy8Grsbs8QrPEQRB0Lf0qwDdl563qvAc4X4LgiAYAX0pQGYYMB+vz1MVEYAQBEEwAvpSgBLz\n",
+       "gT0qObK0DbANnqY8CIIgGAb9LEALqM4COgL4K2bPVnT8IAiCvqefBahKF1y434IgCEZI39UDqn2G\n",
+       "TfFghAlmtM9S8fILdwOHYnZr244bBEHQZqIeUIcwYyXwALBjmw89C3gauK3Nxw2CIKgcSUdJuknS\n",
+       "YkkfK9j/EUlz02O+pDWSJlXRl74VoEQV80BRfiEIgp5E0jjgm8BR+M30cZIG5M00sy+b2T5mtg/w\n",
+       "CWCOmT1aRX/6XYBuB7Zt8zFj/U8QBL3K/sASM1tqZquBM4Fjm7R/M3BGVZ3pdwF6ENiibUeT1gcO\n",
+       "Bf7atmMGQRCMHtOAZbn3d9GgdI2kjYCXAb+uqjPrVnXgLuFBPF9buzgQuAWzh9t4zCAIgrYgaTZe\n",
+       "o6wRrUwdvAr4W1XuNxgbAtQ+CyjCr4Mg6GLMbA4wJ3sv6aS6JncDM3LvZ+BWUBFvokL3G4QLrlVi\n",
+       "/icIgl7mGmCmpO3lUwpvBM6ubyRpIvAi4HdVdiYsoLJ4GOIewKVtOV4QBMEoY2ZrJJ0AnAeMA043\n",
+       "s0WSjk/7T0tNXw2cZ2ZPVtmfvl2I6p9jCnCLGZPb0IlXA+/D7KUjPlYQBMEoUPVCVEnjbAQpyfrd\n",
+       "BfcIMEFivTYcK9xvQRAEA1ks6UuShlUAtK8FyIzncBHavA2HiwCEIAiCgTwfWAx8T9KVko6XNKHs\n",
+       "h/tagBIPAVNGdARpW2Az4IZ2dCgIgqAfMLPHzOx/zewg4GPAZ4D7JP1I0s5DfX4sCFA7AhGOBC7A\n",
+       "7Lk29CcIgqAvkLSupGMlnQWcCnwFz7/5e+CcoT7f71Fw0B4BCvdbEATBYG7B1x190cwuy23/laQX\n",
+       "D/XhSi0gSd+XdL+k+Q32z5a0Ipd59VMVdGNkAiStAxxOBCAEQRDUs5eZvbNOfAAwsw8M9eGqXXA/\n",
+       "wLOuNuOiLPOqmX2ugj6M1ALaC3gUszvb1J8gCIJ+4Vv5Ug2SJkv6ftkPVypAZnYJsHyIZlUXSxqp\n",
+       "AEX4dRAEQTF753PFmdkjtJB/s9NBCAYcJOkGSecMN5Z8CB4EtpA4XBqccqIEIUBBEATFSNLk3JvJ\n",
+       "eIaFUnQ6COE6YIaZPSHp5cBZwC5tPkdmAb0NeKnEeDOeKvVJaTzwQuANbe5TEARBP/AV4HJJv8S9\n",
+       "WX8PfL7sh4cUIEmbAE+a2bOSdgV2Bc5NxYxGhJmtzL0+V9K3JU1OZlx9P07OvZ2Tsr6W4UFgOrA3\n",
+       "ngl2f+Dikp89GFhAhenIgyAIehUz+7Gka4GX4B6t15jZwrKfL2MBXQwcImkzPIHd1XgG1bcMo78D\n",
+       "kDQVeMDMTNL+eG66QeIDYGYnD/M0DwI7AxfhfX8x5QUowq+DIAiaYGY3SnoIGA+YpG2tZNBWmTkg\n",
+       "mdkTwGuBb5vZ3+NZoYf+oHQGcBmwq6Rlkt6ZUjUcn5q8Hpgv6Xp8EdObyhy3RbLicb/ERWjI2PQc\n",
+       "Mf8TBEHQAEnHSFoM3IavB1oKnFv680Nlw5Y0F3gf8FXgXUnt5pvZnsPtdKuMNKOrxN9wAX0GL0e7\n",
+       "uRnPDPGhzYHbgSmYNW8bBEHQhYxCNux5uPvtfDPbR9JhwD+Y2TvLfL6MBfQvwCeA3ybx2Qm4cNg9\n",
+       "7gBmHGLGA2Y8CiwBXlDiYy8BLgnxCYIgaMhqM3sIWCeVZriQcuMrUEKAzOwiMzvGzL4gzwrwoJl9\n",
+       "cAQd7jSXU+4ChfstCIK+Q9JRkm6StFjSxxq0mZ2y0yyQNKfJ4ZZL2hS4BPiZpK8Dq8r2ZUgBknSG\n",
+       "pAmSNgYWAIskfbTsCbqQu4BtSrSLAIQgCPoKSeOAb+IZamYBx0nava7NJOBbwKvMbA98rr4RxwJP\n",
+       "AP8K/An3ML2qbH/KuOBmmdljeInWc4HtgX8oe4Iu5B6GEiB3M44HbhyNDgVBEIwS+wNLzGxpWkpz\n",
+       "Ji4ied4M/NrM7gJILrZBSFoX+IOZPWtmq83sh2b2dTN7uKh9EWUEaF1J6+EC9PvU6e6v492Ye4Gt\n",
+       "h2hzJPAXeqFeeRAEQXmm4YFYGXelbXlmApMlXSjpGkmFBoeZrQGey+eCa5Uy64BOw0Pr5gEXS9oe\n",
+       "WDHcE3YBZQToCBhW2p4gCIJupsxN9Xp4PrfDgY3wTAdXmNnigraP40tp/oy74gCsbJzAkAJkZl8H\n",
+       "vp69l3QHHiHWqzR3wbmP9CVALwdaBEEwBpE0G5jdpMndwIzc+xm4FZRnGfCQmT0JPCnpYjyTTJEA\n",
+       "/SY98pT2HJVZBzQJOAl4Udo0B/ismY2aFdTOWHYJAU8BEwtzwkn7AT/E7HntOF8QBEGnqB8707zN\n",
+       "zbh1cw9wFXCcmS3KtdkND1R4GbABcCXwxlZS7JSljAvu+8B8PMmc8ACEH+ALO3sOM0ziPtwNd3tB\n",
+       "kwi/DoKgLzGzNZJOwNOqjQNON7NFWXYaMzvNzG6S9Cd82uU54LuNxEdS0RhqZrZjmf6UsYBuMLO9\n",
+       "h9pWJe1ezStxBfAhMwZV8UO6EPgyZn9s1/mCIAg6wShkQpiSezseD9ne3Mw+XebzZaLgnpR0aO6E\n",
+       "h1CbbOpViueBfK3TfpRPVhoEQTBmMbOHco+7zOxU4BVlP1/GBfce4MeSJqb3y4G3D6Ov3USjSLhD\n",
+       "gevIlYkIgiAIipH0d9SCDtbBs8y0ryCdmV0P7CVpQnr/2DD62W00ioSL7AdBEATl+Qo1AVqDL9kp\n",
+       "XcCzoQBJ+nDureW2C59k+p+Wutld3Estqi/PkbjFFwRBEAyBmc0eyeebzQFtCmySHpvmHtn7XuYe\n",
+       "kgtOSuaiF8fbDi9aFwRBEAyBpP/KZ0KQtJmkz5X+fC9km6kgCm5v4KfAF4APAAcaOg54A2avbtd5\n",
+       "giAIOskoRMFdb2bPr9s218z2KfP5MkEI/cg9uLXzZXzC7PnE+p8gCIJWWUfSeDN7CkDShsD6ZT88\n",
+       "VgXoYWBD4MfAw+K5f8QDEE7pZKeCIAh6jJ8BF0j6Pp6o4B34uFqKMemC82Pyb8B3gc1nceNVC9jj\n",
+       "ccF2kQE7CIJ+oWoXXDrHy/HUPuCluc8r/dkSmRDGA6/D6wBlFpOZ2Wdb7+rwqPoifkafXfImzrx9\n",
+       "li08sqpzBEEQjDajMAe0A3BfSlyaueCmmtnSMp8vkwnhd8AxwGq81OoqPAV33/BqzrLf86oHOt2P\n",
+       "IAiCHuNXwLO598+lbaUoMwc0zcxe1mqvegZpvd0ZP/3N/Px3hcXRgyAIgkaMM7Nnsjdm9nQqYFqK\n",
+       "MhbQZZL2GlbXeoP9VjDxkZvZrcy1CIIgCGo8JGltSe/0urCEdxFlLKBDgXektNtPp21mZv0iSkcu\n",
+       "YvcF+ALbIAiCoDzvAX4m6Zvp/V14yZ5SlBGglw+nVz3EkZdy8CV4kEUQBEFQEjNbAhwgaVN/a6ta\n",
+       "+XyzXHATUuLRfkg+WownWH3+d3n3qcAene5OEARBryHplcAsYLynCoWyUdLN5j3OSM/XAdcWPPqB\n",
+       "FwNX3sl2jxAuuCAIxgCSjpJ0k6TFkgbFXkmaLWmFpLnp8akmxzoNz379QXwh6hvwLDOlaGgBmdkr\n",
+       "0vP2ZQ/Wg2TlF1YSAhQEQZ8jaRzwTXzsuxu4WtLZZraorulFZnZMiUMeZGZ7SppnZv8h6SvAn8r2\n",
+       "p1QqHkmbATPxkqsAmFk/VA09Engbvq6p1zN8B0EQDMX+wJJsoaikM4FjgXoBKrt49cn0/ISkaXia\n",
+       "s63KdmZIAZL0bty8mgHMBQ4ELgdeUvYkXYk0HZiK/03bEBZQEAT9zzRgWe79XcABdW0MOEjSDbiV\n",
+       "9BEzW9jgeL9PBsqXqE3NfLdsZ8pYQCcC+wGXm9lhknYD/rvsCbqYw4G/YvYsYhUhQEEQ9D9lcl1e\n",
+       "B8wwsydSnrezgF0KD2b2n+nlryX9ERhvZo+W7UwZAXrKzJ6UREq7fZOkXcueoIvJl19YBWwiIWAC\n",
+       "8DIzftmxngVBEAwDSbOB2U2a3I17szJm4FbQWsxsZe71uZK+LWmymT3S7NypJMNTLfW3RDLSs/AU\n",
+       "2yfiVsNyYF0zO7qVE42EtifU81jBe4GDMLvNN/EUMAk4GPiqGf2y0DYIgjFK/dgpaV3gZnwsvwe4\n",
+       "CjguH4Qgrw79gJmZpP2BX1YVjDakBWS1CqEnS5qDWwiloxy6lD2AxzPxSazCAxE2BzbrSK+CIAgq\n",
+       "xMzWSDoBOA8vxnm6mS2SdHzafxrweuC9ktYATwBvqqo/TS2gpJYLzGy3qjpQhgosoA8Bu2D2ntom\n",
+       "lgKHAS8DvmLGxm07XxAEQQcYhXIMF5jZ4UNta0TTBJxmtga4WVLphUU9Qrb+J08WiLA5sJFUvqxs\n",
+       "EATBWELShpI2B7aQNDn32B6PtCtFmSCEycCNkq6iVgfIyixSSmVaX4H7E/ds0ObreL65J4B/NLO5\n",
+       "pXo+XKQNgEOAt9btyRajTk7vNwPur7QvQRAEvcnxeFzANgzMjLMSX+haijIC9CkGL0oqW7b6B8A3\n",
+       "aFAjXNLRwM5mNlPSAcB38HVGVXIgcDODIzryFhC4EIUABUEQ1GFmpwKnSvqAmX1juMcpUwPnFWY2\n",
+       "J/8ASkXAmdkleNRcI44BfpTaXglMShEYVZIPv86TBSHkLSCktYIUBEEQDOT+lAkbSZ+W9BtJ+5b9\n",
+       "cBkBOrJgW7tCsItW5U5v07Eb0UyAMgtoJbVIuGukKNUQBEFQwKfNbKWkQ/DQ7u8D/6/shxsKkKT3\n",
+       "SpoP7Cppfu6xFJg30l7nT1X3vqx7bxhn0mZ42vDLCvZmc0CbA0uAyRLr4Au1tqysT0EQBL3Ls+n5\n",
+       "lcB3zewPQOmS3M3mgH4OnAucAnyMmlCsNLOHh9HRIupX5U5P2wYh6eTc28wV2CqHAZdi9nTBvswC\n",
+       "mowL7GbAFDxWPtYFBUEQDOZuSf+Le5ZOkTSecp41oHk5hhXACipchAScDZwAnCnpQOBRMyuc+Dez\n",
+       "k9twviMZHH6dkc0BbQbcmp63TvsmN/hMEATBWOYN+NrJL5nZo5K2Bv6t7IdLlWMYLpLOwIu+TZG0\n",
+       "DDiJZJ6Z2Wlmdo6koyUtwUO831Flf/D1P438k6vwDAlP4tFvO1AToLCAgiAI6jCzxyU9iC9tWQys\n",
+       "wacwSlGpAJnZcSXanFBlH9biC6QmAPMbtFiFV/J7BI/c25daXYuwgIIgCOpIUyN/B+yKL7tZH/gJ\n",
+       "nlNzSEr76vqAI4ALMHuuwf6VuAA9jAtQ5oJ7hrCAgiAIingNXtDucQAzu5sWinuOJQFqFH6dsQoP\n",
+       "iHgYt4Im4wJ0MyFAQRAERTxtuZt6SS3l0BwbAiStg8eoNwpAABeg9am54DbDXXCLCBdcEARBEf8n\n",
+       "6TQ8icA/AxcA3yv74UrngLqI5wMPYbasSZtV6bneBfcXer38eBAEQQWY2ZckvRSfwtgFX5jazNM0\n",
+       "gLEiQEO538AvINQsoMwFdyNeHyMIgiDIIekLZvYx4M8F24ZkbLjgissv1LPWAjLjSeA5YFvcBRdz\n",
+       "QEEQBIN5acG20qna+l+ApA3xDNhzhmiZCVCWJXs5sBq4kxCgIAj6BElHSbpJ0mJJDS0VSftJWiPp\n",
+       "tQX72pKqbSy44A4B5uOZHZqR1TrK0gwtx2sUrQLWl9jAjKIUPkEQBD2BpHF4vZ4j8LRnV0s628wW\n",
+       "FbT7AvAnBufrhDalahsLAnQEQ8//YMZqiaepWUCPpO0m8QhuBd1XWS+DIAiqZ39giZktBZB0Jr6O\n",
+       "Z1Fduw8AvwL2KzpIu1K19b8LrlwAQsZKBlpA9+VedywUW2J9qfziriAIggYUlcAZUEJb0jRclL6T\n",
+       "NlVWoaC/LSBpCrATcGXJT3wAuD29Xo4rfPa6k/NAb8ZTW7y7g30IgqDLkTQbmN2kSRkxORX4uJmZ\n",
+       "JFHsgmsL/S1Avvj0YsxWl2lsxpm5t/cBD6TXmQuuU0yklhg1CIKgkFSmZk72XtJJdU3qS+DMwK2g\n",
+       "PH+HVygAL0nzckmrzezsdve33wWoFfdbPSfjodjQYRccMB6iNHgQBCPmGmCmPDnzPcAbgQFJo81s\n",
+       "x+y1pB8Av69CfKCf54BcvpvV/2mKGU/mot467YLbEL8TCYIgGDZmtgavwXYesBD4hZktknS8pONH\n",
+       "uz/9bAHtjP999dEdwyFLTtopxhMCFARBGzCzc/EQ6vy20xq0rbRGW/9aQFn2A7N2RHB0gwU0Serr\n",
+       "G4YgCMYY/SxAI5n/qafTAjQ+PUdW7iAI+ob+FCBpXeAwhjn/U0CnXXAbpudwwwVB0Df0pwB5GOFd\n",
+       "mLUrc8FyYAepYyKUWUARCRcEQd/QrwLUTvcbeOjiFcASide08bhl2RB4lrCAgiDoI/pVgMqUXyiN\n",
+       "GU+Z8U7geOA97TpuC4zHY/ZDgIIg6Bv6T4CkTXAX3EUVHP084CCJTSTGS7ywgnMUsSG+WjlccEEQ\n",
+       "9A39GNb7IuBazB4fsmWLmPGYxFV4ip+dgH/Bi9ZVzXjgNsICCoKgj+hHAWqr+62AP+Iluo8Apkis\n",
+       "Z0apXHMjICygIKhDYm9gnll12ZqDauk/F1z7AxDq+SPwVuByPGHptObN28J4XIDCAgqCGr8Fdu90\n",
+       "J4Lh018CJG0FTAeurfAstwB/Bj4L3AFsV+G5MjILKAQoCGpsTHgFepp+c8EdAVyIJ9yrhGTuvwxA\n",
+       "GjUByiyg+LEFQY2NiN9ET9NfFlD17rd6wgIKgg4gIVyAIj1VD9M/AuTlF6oOQKincgFKP7QNgHuB\n",
+       "CZGQNAgAWB8fv8IC6mH6R4B8MnI1sGQUzzkaFtD6wOoUafconU2KGgTdwkbpOQSoh+knAWpn+YWy\n",
+       "jIYAbQhcmI0AAAAgAElEQVQ8lV4/TLjhggBqAhQuuB6mnwRotOd/AO4EZkiNr6PEBInPj+Ac46kJ\n",
+       "0EOEAAUBhAU0bCQdJekmSYslfaxg/7GSbpA0V9K1kl5SVV/6Q4Ck9fAMCBeM5mnNeBxYBWzZpNm+\n",
+       "wCelYQvHhsCT6fVtxLqHYBSR2E7isE73o4CwgIaBpHHAN4GjgFnAcZLqx5S/mNneZrYP8I/A/1bV\n",
+       "n/4QIDgAuBWzhzpw7qHccLum5+H+iPMW0AV4GqC+R2JSp/sQAHA08MFOd6KAjfAbs7CAWmN/YImZ\n",
+       "LTWz1cCZwLH5BjYwjdkmuOelEvpFgDrhfstYSnMB2g1YxvCFI28BXQAcnnf5Sewpsekwj92VSMyg\n",
+       "2sXEQXm2ojvdvhvhSxPCAmqNafh4lHEXBdlcJL1a0iLgXCq8AekXARrt8Os8dwAnSnxb4nKJi+v2\n",
+       "7wacxvAFaK0FZMYy/G5k79z+rwHHAEhsILFF0UEkjpWYOcw+jDab4wNf0Hm6WYCWERbQACTNlnRy\n",
+       "9ihoUipIy8zOMrPdgVcBP2lnH/NUKkAlJrtmS1qRJrvmSvrUME4yEdgL+FsbujwcvgH8EFgEfAbY\n",
+       "X2KD3P5dgV8Bk6TmmbMlZkq8UWKv3Oa8BQQutEfk3m+Lpx8CeCPu3y3ivcDrmv8pXcOmwEbS2kqw\n",
+       "AEiM61B/xjJT6c5BfiM8KlTS2pL1Yx4zm2NmJ2ePgiZ3AzNy72fgVlCj410CrCupku9AZQJUcrIL\n",
+       "4CIz2yc9PjeMU80GrsDsyaEaVoEZS834rhnfMON83CLaGSD9MLYBbgX+CjSMJkmWy3zg48CHc7vy\n",
+       "c0CQE6DkiptB7Qu1Ez5gFDGdgZZTN5O5FNe6VyS2Am7sTHfGNFsBmzeL9OwQGwGP4yLUjQJZGRKv\n",
+       "HMH/4xpgpqTtJa2P37SePfD42km+sB9J+wKY2cMj6XMjqvxSDTnZldAIz9NJ91sRN+FuN4CZwG1m\n",
+       "rMEttAOafG4rYDHwSQaKSL0FNAc4JH0Bt8AXqmYCtF3aVsQ0GGBZdTODBIiBll4wemyFjxPdFhSy\n",
+       "EfAE8AhjTICAMxhmFn7zPJkn4MU1FwK/MLNFko6XdHxq9jpgvqS5uIv/TW3ocyFVpnUpmuyqH4AN\n",
+       "OEjSDbhp+BEzW9jieY4E3jzsXrafRdRCpXfFBQk8i3aRAGdMwed37megAA2wgMx4VGI5LjpbAs9Q\n",
+       "E6DtKfDXS2ySjrOjxHizARZVS6TUQOtWXAOpSIC2AjaWGGfGsxWeO0ik//VWeBqoKfhg3y1kAvQw\n",
+       "YygQIaXi2gTPiLJsiOaFmNm5eHBBfttpuddfBL44gm6WpkoBKjPZdR0ww8yekPRy4Cxgl6KGdRNq\n",
+       "c8xsDtIM/Idx/Ug720ZuojZHsxs1AVoMTYMAGglQvQUELma74l/Ea3PH3Y7kLjHjuVz7afgNwFO4\n",
+       "O/S6Fv6eevYDvgocPIJjDEUjAQL/m1dUeO6gxgT8BucO/Pt5S2e7M4CxagFNTM99IbpVuuCGnOwy\n",
+       "s5Vm9kR6fS6wnqTCC5ufWDOzOWnzEcAFmD1X9JkOkbeAdgNuTq/vBKbWT6zn2AIXoAfwSqvZ/6Z+\n",
+       "Doh0zF1xt9S1wKbJypmW2k6sa58J0A2MfB5oGrBfk7+jHWySnvPfha3Tc0+FnHdT5KHEKS32Zyv8\n",
+       "hqgb51mGZQGlSNFeDlrIXKEhQENQZrJram6ya39AZtaKmd/J9T+NuAnYNQnI3+F+VtI80B3Ajg0+\n",
+       "NwV4KLm2VlL7gjWygHbBBegOXOwPAB6k5i7JM52aAI10HmgysB7VBjRsilvQRRbQhArP21aSC+ta\n",
+       "ae1i5E7zUuB5LbTfCq/6uzYFlMR+FfRrOAzXAjoRj1btVTIB6oukxJUJUMnJrtfjk13XA6fSymSX\n",
+       "tA6+tqabAhAwYwXwGHA8bo1ck9u9hMZuuMwFB37XmQ24zSygGbhltQw4BF8U+xCDAxGm4yI1j5EL\n",
+       "RyYK+4/wOM3YFLiHYgHqJQtoCt7f53e6I4mJtDZYTyUnQBKTgasqtn7LMtw5oBlUEMwi8QJpVG6O\n",
+       "+soCqrS2TInJrm8B3xrm4fcEHsNs6bA7WB03AacA704VVDOazQNNAa5Mr7N5oAW4BbSyrm1mAd2P\n",
+       "i08mQHfgA169BTQNdw3eAOwlobp+tcJkPKy8agG6g4GD5Vb4gNMzFhA1a3dv4Bed7EhiEq0JUOaC\n",
+       "yyygmbntS9vaM9ZOsFvJIJNMgEQt6rQMW1JNRN8XgO/hEWpV0lcC1G2x/a3QbeHXeRbhP9xf121f\n",
+       "TFojVMAU3IUGAwMRiiygpfggMJOaBfRCahZQIxfc/bhrayRZBiYDf6J6AbqTwRbQEnrLAtoRT1bb\n",
+       "8fVXyR3YqgVU74LLAoS2rm8ocYjEV0fYzVOAd5ZsO1wLaEuaJw8eLpszcM67KsIF1yV04/xPxveB\n",
+       "dxXcybXigssEaNAcUJpPuh0fjO/DBWhj3GpoJEB3J6vnRtI8gMQbGvn0077fS3y3btfm+JqmbSQ2\n",
+       "qygPXWYBTU59ET7oLab3LKBzSAIksYU0KEBktNgIGMfwXHBZEELeAqpnJ0YutFvReCF1PcOdA5ra\n",
+       "wjlaYTKjJ0B3ExZQB5HG42HAF3a6K0WYca0ZlxTsGsoFVyRARRYQ+DzQ3UnksvUASykWoCwKDlyA\n",
+       "9kivPwR8Jw3wa5E4HPgKcDGeaSLPZNxSuw5fXPyIxA4N/qbhsgn+t2Q/sol4OPB9tMECkviixAtH\n",
+       "epwS7Ih/RydIbI6nbPrnBn3apGh7G8nunEdqAT1GgQWE/49GKq4TKH+DkRegVi2gLSrI7DAZmqfa\n",
+       "ahOT8LIsIUAd5IXAQsyWd7ojLZKFYr9A4kN1+1oVoFvS8aAmQHfg4rBWgCTWx7+sD6RNC4DnSayH\n",
+       "z6ONB16Taz8e+A7wfjyVUn3Bvcn4j/5M3KK7nBYj6yT2ltinSZMBFhC1gfAx2mMBvZDRyQqxI36N\n",
+       "5uGLkF9O48Se10vsVGFfMnEY6RzQ3ximAEm8bYiBfyLlbzAyAXp0qPPmzr8ePoA/QRvngVLux40Z\n",
+       "PQvodsIF11G62f3WkOQ6W4pHBn5acp+65MW1zHgiNW3qgkvMpbbGKBOgOxkcBbcNcF/OHZi54J6X\n",
+       "+vJvwOdyA8MHgYVmnG3Gk/gPPD/gTAYeMeM7ZrwfD5wYFNorMVHivQ0uxdvwKMFGFAnQvXgwRjtc\n",
+       "flMoHkTbzY743eoNwH/h/5tBApBEf0eoXIBW0Vpm63wU3Ba4BXQxwxCg9B3/UYPPZrRiAW2MC8kK\n",
+       "yovJFNydeC8tuuHqEgzXMxm30EfLArqdsIA6SjcHIAzF2/ConR8Cb03bskWoGWUsoDNJ7hwzHgEO\n",
+       "SwJW74LLu9+gJkD74SHif8LvprZJ+w/BB4qMpQysd5RZQPnjzSro3wuBb0scVbBvK5pHLmVh2OOT\n",
+       "BdduC6hyAcr1exkuQFPxpQZFA8f2eDRXlQPYJDx6sZQFlG5ItsQt5+X4d+QpPMCmoQDVu3NzZK7n\n",
+       "ZgP/cCygVcCGKYJuKLbEf1sP0EIgQkoUvLhJk83xG41NspvJCgkXXEfxTAm74a6fnsOMK814EPgx\n",
+       "8A/ph553v0EJC8gMy4dSmzEnvawXoB2oueow4+F0vFcD16RjLMUHQXCxuSP3+TuyfekucH38R5+x\n",
+       "kOLFjTvgg9W3C36UW9NAgNIAtkk6xyP4wLc1LkBrLSCJGcMpz5Cu92Sqt4C2w+foVgMXAacDl1Es\n",
+       "AJnlU6UATSQNXAVzfpI4sK79ZsDjZjydLPdHcbfvvTQWoPWg4RqhbDFus4G/5TmglHLqMcq54abi\n",
+       "4nP/EP2oZyY0/b5Nxn93dwHTJQ6V+GgLx2+FSfjveeOSotvV9J4AeUmDv2H2dKc7MkKux9PJH8xg\n",
+       "AXoA2DINFI0soEbUC9BeeJmHPAuAl1FbJLuUmpVTL0D5fZvh7rf8GqJFeOaH+h/njrgldTXwgbp9\n",
+       "W+FzYUV+7I2AZ9Kgl0U4FVlAv8RX9g9AYh2J4wqOmzEJ/95XLUCZ+w0zbjbjn2ic0mZH/LvQdgFK\n",
+       "wQ/gf/dD+M1H/SC/NXBxnTBtQW1ZAOmzi3EBKoqCm1j3XE8Wwl1oAeXCxFu1gGCIeSCJbdLxh2UB\n",
+       "UVvP1cjVtzn+v12G/w9fD7yrheO3wiT8d9GK67Fr6UUB6mX321rSIP4T3A03QIBStuon8AG/0RxQ\n",
+       "Ix7FXQHrpfd74y6gPFldnSyJ6x3A9ilEeF0GutiWUrOO6t1vmLESH6jqI+GyAfh/gdfW7ds6Hbco\n",
+       "Rc2m1BbeZhFOmQCtpDZ4TqfY8poG/Fxam4+vnim4oI+GAN1at61RxNZO+OR+WwVIXto8u/mYiH83\n",
+       "ikRwW9x6yd8QbEEtcAX8+3kLPoBvUXDDMZQA7ZqO0cgFNx7/7g1pAaVzr0/txmxF/XmTCzTjz/iN\n",
+       "XuZSfKCoHxJfbRAIkn23G7m9st/FnXggwqHALlIl640m4f/HVqP/upJeFKCeDEBowK9wV9hUBt5t\n",
+       "Qs0N15IFlFwS+bURezM4W/iNwIIUZAA1K2c74I46C2etCy4dsyhX343ALIndVKv6mgnQxfiPcWtY\n",
+       "O+G+Ee6OKhKgTWgsQI/hiVfXSduKRCaby3p7wT5wAVqIW5hVfv93xCeL8zyCZyuvnyfJwrVHFEVV\n",
+       "cNwdgK2T63QiPlA3EiAYaNlsycDv5DJgfnIpLmdwuqeJ+E3TxNSXcRL/mVtHtisuso0G5Qn4Iuky\n",
+       "LrgNgSdz39MB1kBKuHp1rv0MPFdi5oIbZAElUfsnKFwXN5QA5S2gPXFr70LgoNzxN5PasiA5BKhj\n",
+       "SDvi0S8LOt2VdmDGrbhL41gGuuBI27eldQsIarm7tsLvbO+u2382kC+RnolMvfsNBrrgBllAiYV4\n",
+       "8MJfgI+kgXBHvBjfajzQ4ZWp7VRcXBdRPA+Ut4AeBvbFk7reQM0FNwW/Wy4SoGzB6lsb+Oyn4AEO\n",
+       "K6k2w/PW1F33JPjPwqA5sZ3weaL6kPfSSLwAuLRucyZo21AbuB5mcCRc1i4vQPUuuLcAf0ivi+aB\n",
+       "JuLfncwS+Q3wIuCNaY3TrvjNSCMLaGI6bhkXXN79BoNdcNviNz1K554AvICaC65oDmgWfvNTZAHt\n",
+       "iN8EDmUBLQP+HrgKr4B8SK7N64DPD/WHNSPN+WzIwPnRnqa3BChzv5kNN49ZN/Jr/IdaL0B/xcuZ\n",
+       "tzoHBLV5oL2BG+rzvplxnxnn5TYtJWcB1R3rDmC7JCqNBOhG4CP4Hf8LU7vnzMjWaf0eeFV6nQUU\n",
+       "5CvH5ql3wX0cONVsrWhsmjvG7gV3/dsAF6T9hxccP3N3NppMbxf1FkTGAAsk9X8H/KZqJY0r2jYk\n",
+       "HePLwAvrFrTmBaiMBZQXhwEuODPW5L5HjQRoGR4JNx7/7h6Oh+m/FViN33Q0EqAJ+P9sgxKT6/UC\n",
+       "VD8fknkOJuN/+zO4ZZO3gKZKbC3xs3T99k/tirLV74DfAA1lAd2Ju4Yvwa29vADtwMgzMEwAHkte\n",
+       "juVN+tMUSUdJuknSYkkfK9j/Fkk3SJon6VJJla2Z6zUB6if3W8Zv0nO9AP0GnzsZjgV0Nx58UDT/\n",
+       "U8Sd+CC0PXUCZMbj+B3XljQWoMtxl8Mx+J3k8xjofjoXmJ2i4bI1PWsFKN2pbpfmoOoF6C58cIWa\n",
+       "BbQ1PrfxLIN/1Nuk4/8ELwFSz+aMngA9ULC9XgC2BlaasYra/6FVXoELxkIGFnTMBGgazQVoBv6d\n",
+       "aeaCy3Nfvm2abxyPW5YTSVZuCiQ5G/gwtfmjqekze0pr3aXk+vcYQ1tBQ1lAmXUzHf/br07n3Y2B\n",
+       "QQhvxKspvwB30Z1DnQWUC6dvJkB5CwhcgK4C9sxFgG7PyHIwQs2KhWG64CSNwxeYH4X/Vo+TVO9J\n",
+       "uA14kZntBfwnPo9bCb0jQH7hXkIfBCDUsRC/M7yvbvuNuOWzEa1bQP8DfAKfDB1SgJLIrMTvAust\n",
+       "IKgFIkzGB7D6z99kxuHJ4lkAvIEUAZb2L8f/nv2oWS9LgB0kTsQHrpuAzzJQgL4PHJ0rIf44PtBN\n",
+       "xwVkIYPdcFun411CccLU0bKA6ifxM+oHjnywQssClKLcTsVdqjcy0KqcgQ+K9S64IgvoKpq74PLU\n",
+       "X7sJuHBkQpBlUAC3fnfGF03n517+g4GLkbNj5ANNGjGUBZSdYwb+ty/DU0ftwMAw7DfhN09vwgXo\n",
+       "5wy2gLbFv0/301yAMgvoceCKtCZvPrU5pR2oRbYOl3oBGo4Lbn9giZktNbPV+HrCY/MNzOxyM8uq\n",
+       "Dl9JBeUrMnpHgGAf4H7M6uczeprk1jiYOv992v4bYE26k2zlmNfg2RZeSTkLCFx4DqBYgLI5okYW\n",
+       "UJ7L8TvL2+q2X4vP5WyFZ2Z4Che21+Hh1MfgFttaATLjHrNaGeh0TVbhd/n34MJdvwh2m7RvAbCT\n",
+       "xMZ1+/Or4YclQCnUe7EaJGJNg0yjAfxhPBBhssRJ+ER1dq2W0UIgQgou+A3wWzP+QK1OVMYMfADJ\n",
+       "W0BF2Rhm4FbCcAUoO3YWjZZlUMCMpXgqokyAsmq/e8CAtUcTaK8FtJqaBXQ3tSUHD6RzjMetnffg\n",
+       "VtDOwB9x11w+60EWTNPM4tgcX57wGDAjWbOkc2Ypp7aHtamAAJA4UuJ7Q/yteSZRK0c/3CCEadQs\n",
+       "NXAPw7Qm7d+FW4aV0EsC1Bfh10WYsTz5dev5Na273zI+if8AFpZsvxQPbS0SoGvx619GgC7DB/lG\n",
+       "ArQ1PoCB+8hnmzEfH6T2wgefVTRmJT7I3svA8ucZ2wD3mvEMbhHUF4NrhwW0PT5gbddg/0Q8Sqto\n",
+       "rVo2cLwYeC/u4liS9rVqAb0fv1aZH79IgK6gyRxQmq+ZhF//vDuzkQsRfEDP3xXnBWgStajFjI8A\n",
+       "v04BKY+lz24H7J8LupjIyCygvABNxa2P6dRuSK7GXZ1Z9NwDqU/z0uv5yWpZRi3qE9xyuZ3mA/5a\n",
+       "z0Bu3pPUh73kJcAn47+J/DV+Ea2VNclbQMvxG5mPShyWNZA0W9LJ2aPgGKXnzyUdhpfHGDRP1C56\n",
+       "SYD6cf5nKK7F3Y4tk6yH/RoMgkXcgd813luw73R8cd0ulLOAoFiA9iU3OJnxUCa8Ztyfzr8rgwvw\n",
+       "5XmMoQXonvT6Glz08rRDgLJs4o2slWbzJ5kAzMKzYWwPa+votCpA+wK/zN285OfVNsKjum7A73Ab\n",
+       "RcFl1XLvobwFdCsDXVWZ9ZIJQd4Fhxnnm61NZXM/Lr43p/7MrDtGmXRLRRZQvQvuWmouuLtxD0M+\n",
+       "Q/1ifJ4QvJDcn3N/W34eqLQFVLB9Hh6WvS0ubPU56PbF3dACkDimIEvFUbmgjHoX3Btwcf9w1t7M\n",
+       "5pjZydmjoE93M/B7O4OBqbrSebUX8F3gGKsw6XMvCdABsDbdzJggpdu5ZuiWbWEpsKzIEjPjAeAs\n",
+       "3JpoKkBmLMOtrpvqdi3Ef4gzKRY58B/swTQXoJX4AHEvbuHsmYVbpwnjidQGzmvwCeY8LQUhSKwv\n",
+       "8cG68OhMgBqJRTPrIROg3fGkr/emMu7gAtRKaYtZDLRwbwFmpr5mwnIXPshsjF+7+jmgbdN515aB\n",
+       "T5/PrlMRt+LuzWywbOiCK+ABPDpuAW6dZW64zAIajguu3gLKBChzwd1jxjIzXpFrc7iZu73N+IYZ\n",
+       "n8n9bXlxbWoBJesmn0g4zwL8f7Qz/vvKp9gCd8+Nx92SmwK/I1dTKQXlnAv8S9qUF6B5eKaRvYFD\n",
+       "G2QVKeIaYKak7SWtj7vLzx74N2lb3LX7VjNbUnCMttFLAnQ9Zs0GpmBk3JIejfhaeh7KAsKM55nV\n",
+       "8s+lbWvwH+RuNB6c5uMRdENZQOvig8pd+OCQJTzdCo++ykS0SIBanQP6Mv635y2tPfCBqpEF1CgA\n",
+       "AWoD2SzcgstzA75+ZUCKFRUUsUuiuys5oU9zEI+mfmUBCPfgVtbKdF3qBWgGLkAP4i6dcfhA93hy\n",
+       "Yw4iZb9YRe361QtQvQsuz/3UBOhK/MYSRhaEsNYCSqI4FQ86yCyge+oP0KQk/W0kCyjlxzsMF8pG\n",
+       "FlAj6ye7TtnfezsDowC3wsVnHi5yWfDIy3OH+DvcPfvxlKFhrQCZcYcZ/2zGvfiyg9dQAjNbA5yA\n",
+       "zxEvBH5hZoskHS8pCwr5DB7g8B1JcyVdVebYw6GXBGisud9Gm/MZnDJnLWZcj2cXWNaoTQmuTc/3\n",
+       "N9g/Lz0PZQFBzYo6jVo01TYMtK4W4muYsgSm2eC6PLXbusDlcUTu9XH4gPB7GFDAbg98Ynbbus9m\n",
+       "A+dQLrgpuHgMEKC0UPVScm7XNAjeI9VW1Se2Ax5Og1yebB5oBm7RPkatbAG4MGyZc+tsm9plGQ6m\n",
+       "0NyCy1hCzVWVCVBRFFw99+OWSZEADTcIIW8BZUEnN6XzZFGRZbkV2FFiN9wK+EeztRZQkZVRGBma\n",
+       "Yz4eYLOUgRbQPnhJldtwi2sW/r85OvfZ/fHFv/+Fe3/eQu3/mOcXFC85KMTMzjWzXc1sZzP777Tt\n",
+       "NDM7Lb3+JzPb3Mz2SY9W5qlaopcEqC8DELqF5O5rGvBgxo9bjcir41pgeS6sup4sb9lQFtCKXF9/\n",
+       "ARyc8p4NuNtNg+o8agsCN0ufXZMilVaQm0yXmAqcL61dDPoO3Mf+J5KrKK152RmfM9g299nxwJ1p\n",
+       "bUuzAfwRfPBZnsShnvPwRLEZWdLY30q8U+IlSTR3pzjA5CZyApS23ZP+1izP4H3UJtozFxzU3HDN\n",
+       "5n8yluDXAVp3wYEL0FxgtzRflXfBDbCAkhv0BokXp03N5oC2xK3glfjC0ieG+l7XcRtZwmP4dzP+\n",
+       "mLYvBzYrCKNuaAEl5uNCPcACwud/rkvbd8D/n6cDe+cs4P3w8Piv4gu5/wNP31XPH4ADMzecxAYj\n",
+       "DPceNXpJgCozA4NR4zIGu53yLASeY2gLKC8yjwNn4FZQ0d3ul4GvpXDs+qzjC6jN50AtZDZzt83C\n",
+       "8+hdQc0C2hmfW7mZgS64Q/FB9Pk0H8AfxoWy0XU4D3hZbgA5HL8DficeiPMjPH9g/fxPxqV4nai9\n",
+       "qQnQ3dTmDiDNFaXX21EToPvwAbKMAN3KQAF6jPIuuCeApUkMF+Nu18wCKio6+Np03B+ndU87UWAB\n",
+       "5dxvmcgtY3AaqqFYBHwB2MuMH2Qb083MUwV9G8oCyqz6pTS2gHbA/5/X4cJ3ZGqzP3B1ujm8Pt0A\n",
+       "DgoYSL+B66hF1H2RwRnou5LeESBfNBX0MGYsMuPgJvufxAfVZj/oxxgcxPAVPKT5gPp9Zvwad/V8\n",
+       "hdr8T0a9AO2bnmeleZdJ+CA2D3flTUrtF1Cr/ZL9hl6GD1B7MbQFBI3D4xfhc1wzU1qdfYG/mfFH\n",
+       "M44DTsJdoY0E6Of4xPXrKLCAErdQy5iwF7XcilmGg7IuuCILaHN8XGkUSn8vcGNunu6G1IeGFhAe\n",
+       "bv5hPBBmKf4/+L9sZ4r0fBbPGpLv+1205n7DjGfM+FxK/VRP0TzQDjQXucyqv52awIP/X+em7ZkL\n",
+       "biH+v3t1miPamMEZ1RtxNbVFrwfjgtT19I4ABWOFQ6ndNRaxksEiczvwLeAfKB5wPoAHI/ycwRZQ\n",
+       "vqTDvvgPeRZuBd1kxnPJ7Xgtfoe5B7VM4iuorbp/KV7ldk+GDkKABhZQmhw/D1+dfyheNPDxXJNf\n",
+       "AbNxt+KgY6TPfwwX5Gxx8z0MtoCyDOXrUgvDbdUFN2AOKAnBM/hC40aT/OfjCz8zsvVfeQtorQCl\n",
+       "DNLb4xFiH8UDTg40GxRlmVlfWcJRGIYADUGRAB3C4CSweRbj4fZZBoapEtNTX2/BBWh3PFrvVuBn\n",
+       "uIB8jmT9lOzb1cB+6aZldxi16NkREQIUdBVmPDrEj+4vuJDUcwq+lmlp0TFx6+iL1HLvgYdx11tA\n",
+       "P8UFqN7CuAIf2N+HzwlBWreT5n2m4z78zAIqHMBzizGbLRD+PC4gn8YjnPKffwz3+e9MExEz4//l\n",
+       "FkXezMC1HpkFtA8wN3e978Pdg6XngFQrJJdZWCto7H7DvMJqPrR3Hu4uzKLgspIbkng7LrhfT/N2\n",
+       "T5txaYPvRxYAkXfBzaO9mfMHCFCyfg/B3WaFpH6/PVl8mcAfA5xjxrP4d3ZrUuZ4s7Xre95Ga9MO\n",
+       "V+E3SPvjCYhbTd/VEXq+pGswtmi0LsqMJyT2gAHWQn7/s3gSxjwL8Yza6+CD1xb4nfbHGSxA5+NJ\n",
+       "P49MEYFQq4A5CxeKBbgwPElzF9b3GFyjKd/X2ySOxSOf/rWgyQ+Bl6TBakjMOL1uUyZAmRso4xI8\n",
+       "Hc1qPPii2TEfkXgOd7nVC1CjCLgiMgHKUjBlLriXA5/CU+X8tcRxsiwMW1KrRPuNFvpRhnoLaDc8\n",
+       "O3Wpeab0HV2NZwf/atr2tMTd5L5rZlwl8RoGr6Vrxp3AOFy8mllkXUVYQEHfYMaqFlwWmTWRVXPd\n",
+       "BxeFO/EMAgcxcFD4ixl75MSH1HY74N3A2emu8zY82q7RIk7M+HCDCLh8m6uAbcy4smD3BdTCl4fD\n",
+       "HbilcBA5AUrneh5uYV5R4jhZIEJpC6ge8wwYzwBPJVdnFoRwOPBDMy4o+T/Nu+CGmr8aLvUCdAgD\n",
+       "syuU4X58fc+fcttuY3BI/h/NSs//ZK7Xq3HLKQQoCHqEzA23L3Bd+iEvwqPemkXsgVtAH8Lza/0s\n",
+       "bZuHh1iPOGgmuQ6LtpvZ8NdjJWvwNjy/39y6ffcngSwz+F2Di++wBSgxD9YKcmYBHUZrmU+yUOz8\n",
+       "HFC7qRegQxmeAF1Yt37rXLycyUi5Gg/EuKwNxxoVwgUXjHUW4GG+++LzSOCWz94MLqldz524C+ol\n",
+       "uaiuedTCubuZW3DrbfFQDZvwUbzK6TSG74IDv2bZeqyV1AIhrm74icGswANN9qQgt1mbeAR4kcTH\n",
+       "cAt3Nh4s0ArLqBNWs7Xfu5FyJXCLWWUWYNsJCygY61yDzzd8G6+NAi5AN5dYdHsO8FIzbs5tu5bW\n",
+       "1550glvwyepnh3uAdBf/Ctxll93R/w+eOaIVsnLrpOeNgMutQSqgBjyK50z797r/Rzu5Cr9p3xJP\n",
+       "qAceZTUAAAbWSURBVDqX5umring3nuSzCv7MwEXMXY96obq1JDOznljZG/QWKYprXF5sJPYFjjDj\n",
+       "i8M83iYFKXK6ComjgVlma6vNdrIvU/Hgjp+m6/c0cJIZ/93CMfYANjVbm409oPvHzhCgIAi6ComH\n",
+       "gFealQqECJrQ7WNnuOCCIOg2/p5IvTUmCAsoCIKgT+n2sTMsoCAIgqAjVCpAko6SdJOkxZIK64pL\n",
+       "+nraf4OkXghfDYIg6FmGGpcl7SbpcklPSfpw0THaRWUCJGkcnvrkKDxVyXGSdq9rczSws5nNxFPI\n",
+       "f6eq/gQ1JM3udB/6hbiW7SWuZ7WUGZfxjPEfgOojJKu0gPYHlpjZUvNSCmcCx9a1OQavb4KZXQlM\n",
+       "kjSVoGpmd7oDfcTsTnegz5jd6Q70OUOOy2b2oJldAyPP5jEUVQrQNAaWb74rbRuqzXSCIAiCKigz\n",
+       "Lo8aVQpQ2fC6+giN7g/LC4Ig6E26anytMhfc3QwsWTyDwTma6ttMp0EaE0lddeF6HUkndboP/UJc\n",
+       "y/YS17NSyozLo0aVAnQNMFPS9nhVwjcCx9W1ORs4AThT0oHAo2Y2KJFhN8exB0EQ9BBlxuWMysfd\n",
+       "ygTIzNZIOgEvLzwOON3MFkk6Pu0/zczOkXS0pCV4IbF3VNWfIAiCsU6ZcVnSVngm8gnAc5JOBGaZ\n",
+       "2ap296cnMiEEQRAE/UdXZ0Ios5A1aI6kpZLmSZor6aq0bbKk8yXdIunPkiZ1up/diqTvS7pf0vzc\n",
+       "tobXT9In0vf1Jkkv7Uyvu5MG1/JkSXel7+dcSS/P7Ytr2QRJMyRdKOlGSQskfTBt75nvZ9cKUMkF\n",
+       "U8HQGDDbzPYxs/3Tto8D55vZLnh55493rHfdzw/w72CewusnaRbuU5+VPvNtSV37G+sARdfSgP9J\n",
+       "3899zOxciGtZktXAv5rZ84ADgfenMbJnvp/d/A8ts5A1KEf9ZOLaBcDp+dWj253ewcwuAZbXbW50\n",
+       "/Y4FzjCz1Wa2FFiCf48DGl5LKJ7sjms5BGZ2n5ldn16vwkvIT6OHvp/dLEBdtWCqhzHgL5KukfTu\n",
+       "tG1qLtrwfiCyT7RGo+u3DQNDWuM7W44PpFyQp+fcRXEtWyBFte2Dl+Xume9nNwtQREe0h4PNbB+8\n",
+       "7PT7JR2a32kehRLXepiUuH5xbZvzHWAH4PnAvcBXmrSNa1mApE2AXwMnmtmASrzd/v3sZgHqqgVT\n",
+       "vYqZ3ZueHwR+i5vc96dQSyRtDTzQuR72JI2uX+mF1YFjZg9YAvgeNZdQXMsSSFoPF5+fmNlZaXPP\n",
+       "fD+7WYDWLpiStD4+eXZ2h/vUU0jaSNKm6fXGwEuB+fh1fHtq9nbgrOIjBA1odP3OBt4kaX1JOwAz\n",
+       "icqeTUkDZMZr8O8nxLUcEkkCTgcWmtmpuV098/2sMhPCiGi0YKrD3eo1pgK/9e8p6wI/M7M/S7oG\n",
+       "+KWkdwFLgTd0rovdjaQzgBcDUyQtAz4DnELB9TOzhZJ+CSwE1gDvs1hot5aCa3kSMFvS83FX0O1A\n",
+       "tiAyruXQHAy8FZgnaW7a9gl66PsZC1GDIAiCjtDNLrggCIKgjwkBCoIgCDpCCFAQBEHQEUKAgiAI\n",
+       "go4QAhQEQRB0hBCgIAiCoCOEAAVjCkmXpuftJDWqBDncY3+y6FxBEBQT64CCMYmk2cCHzexVLXxm\n",
+       "XTNb02T/SjPbtB39C4KxQFhAwZhCUlZW+BTg0FQE7URJ60j6kqSrUmbmf07tZ0u6RNLvgAVp21kp\n",
+       "u/iCLMO4pFOADdPxfpI/l5wvSZovLw74htyx50j6P0mLJP10dK9GEHSWrk3FEwQVkZn8HwM+kllA\n",
+       "SXAeNbP9JW0A/E3Sn1PbfYDnmdkd6f07zGy5pA2BqyT9ysw+Lun9KfN4/bleC+wN7AVsAVwt6eK0\n",
+       "7/l4gbB7gUslHWxm4boLxgRhAQVjlfoiaC8F3pZyal0BTAZ2TvuuyokPwImSrgcux7MLzxziXIcA\n",
+       "P09Jnx8ALgL2wwXqKjO7J+Xkuh7YfgR/UxD0FGEBBUGNE8zs/PyGNFf0eN37w4EDzewpSRcC44c4\n",
+       "rjFY8DLr6OnctmeJ32QwhggLKBirrATyAQPnAe+TtC6ApF0kbVTwuQnA8iQ+uwEH5vatzj5fxyXA\n",
+       "G9M80xbAi/A0+EWlqINgzBB3W8FYI7M8bgCeTa60HwBfx91f16U6Kw/g9WnqK0r+CXiPpIXAzbgb\n",
+       "LuN/8dT415rZP2SfM7PfSnphOqcB/2ZmD0jancEVKSMsNRgzRBh2EARB0BHCBRcEQRB0hBCgIAiC\n",
+       "oCOEAAVBEAQdIQQoCIIg6AghQEEQBEFHCAEKgiAIOkIIUBAEQdARQoCCIAiCjvD/AXFRJnS871y9\n",
+       "AAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f79387025d0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "_, ax1 = subplots()\n",
+    "ax2 = ax1.twinx()\n",
+    "ax1.plot(arange(niter), train_loss)\n",
+    "ax2.plot(test_interval * arange(len(test_acc)), test_acc, 'r')\n",
+    "ax1.set_xlabel('iteration')\n",
+    "ax1.set_ylabel('train loss')\n",
+    "ax2.set_ylabel('test accuracy')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The loss seems to have dropped quickly and coverged (except for stochasticity), while the accuracy rose correspondingly. Hooray!\n",
+    "\n",
+    "Since we saved the results on the first test batch, we can watch how our prediction scores evolved. We'll plot time on the $x$ axis and each possible label on the $y$, with lightness indicating confidence."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 17,
+   "metadata": {
+    "collapsed": false,
+    "scrolled": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAFZtJREFUeJztnVtsY8d5x//f4Z2H94skaiXvemUbsAsD9otbwA2ahyCw\n",
+       "USBpXxoYKFD0EvShN7QPddyHJo9pgAZF+1CgiB30hqRFCxfpQ1vbRQukD724sGOnaydZY8XVihJF\n",
+       "iXfykDwipw/kNzuHklbiRRRJzQ8Y8OgsdXYk/vXNN9988w0JIaDRjIJx1R3QLB5aNJqR0aLRjIwW\n",
+       "jWZktGg0I6NFoxmZsUVDRC8R0cdE9CMienWandLMNzROnIaIXAB+AOAzAHYB/A+AV4QQH023e5p5\n",
+       "ZFxL8wKAu0KIbSGEDeDbAD4/vW5p5hn3mN93A8CO8vUDAD+uvoGIdKh5wRFC0Gn3x7U0WhDXmHFF\n",
+       "swtgU/l6E31ro7kGjCuadwE8SUS3iMgL4AsAvjO9bmnmmbF8GiHEMRH9OoB/AeAC8LqeOV0fxppy\n",
+       "X+jB2hFeeKbtCGuuMVo0mpHRotGMjBaNZmS0aDQjo0WjGRktGs3IaNFoRkaLRjMyWjSakdGi0YzM\n",
+       "uElYAAAi2gZQBdAFYAshXphGpzTzzUSiQT8Z69NCiOI0OqNZDKYxPJ26EqpZXiYVjQDwDhG9S0Rf\n",
+       "nEaHNPPPpMPTi0KIPSJKA3ibiD4WQnx3Gh3TzC8TWRohxN7gtQDgTfS3tmiWnEl2WAaJKDy4NgF8\n",
+       "FsCH0+qYZn6ZZHhaBfAmEfFz/loI8dZUeqWZaxYyR9gwDBARiEheq/eY0342IQSEEOj1evJafR9f\n",
+       "D79eR87KEZ7UEZ45hmHA5/PB6/XC6/XC5/PB7/fD7/fL+71ez9FUAfR6PbTbbdk6nY58D7+/2+2i\n",
+       "2+3Ka42ThRSN1+tFKBSSLRKJyBYIBNDtdnF8fOz48NmidLtd1Go11Ot11Go1NBqNE++3bVs2LZqT\n",
+       "LJxoiAg+nw+hUAjxeBzxeBzpdBqpVAqpVAqRSASdTkd+6MfHxw6LY9s2isUiisUifD4f3G63QyS2\n",
+       "bYOIpMA0J1k40RiGAb/fj0gkgmQyidXVVWQyGaytrWFtbQ2xWAydTkc227YdQ5Vt2wiHwwgGg3I4\n",
+       "U9/f6XRgWZZsbvfV/IrUIVX1wYaH26vwuRZONC6XC6ZpIplMYmNjAxsbG9LKsKVhC8Ov6i/7+PgY\n",
+       "wWAQ8XgcKysrqFarDstk2zaazSYsy0Kz2USr1Zr5zzjsk7VaLViWJV/55+I2a+EspGhCoRBSqRQ2\n",
+       "Nzdx69YtxGIxxGIxRKNRmKZ5wpFVZ0m9Xg/xeBzNZhONRgOWZTkExqLhZlnWzH9G9rG41Wo1lMtl\n",
+       "2ZrNJtrttnyvFs05GIbhsDSPP/44QqEQTNNEKBSC3+8/MZ0eNucsDvUvlj8oVTQsqlnDfWMLeHh4\n",
+       "iHw+D4/Hg16vJ8MK3W4XnU5n5v1bSNH4fD6Ew2HpBPOU2+/3w+v1noi78C/5rFe2Sr1eD8fHxw4r\n",
+       "02w2Hc9Sv28aqP1jbNuW4YB2u41IJCIF0263Hf7ZNPtyURZONDxlzufzyGazMAwDgUBANq/XK4cn\n",
+       "FoPL5XI0t9sNt9strw3DgGEYcLlcMlDo9XpBRHC73Sd8DH4/t1E4zbFVA5SGYZwYLrvdLlqtFlqt\n",
+       "lhyWhBBot9taNBeBRVMoFLC9vQ3bthEMBmXj2RCb9263C6/XC4/HI199Pp+cOfG1eo+I4PV64Xa7\n",
+       "4ff7Hf5Ft9t1CO6is6vhAKMq7GFBs8VT36OK5vj4GO12G7VabWTRToOFFE29Xkc+n4dhGGg0GggG\n",
+       "gzBNE6Zpwuv1yl9wq9WCbduO4cvv9ztEFgwGZZBQCOH48PhaFWGn05HRaBbheZzmU6nN7XbD4/HI\n",
+       "V36vao3UCHar1UK9XkexWJxP0RDRGwB+GsCBEOLZwb0EgL8BcBPANoCfE0KUL7GfEhbN4eEhjo+P\n",
+       "Ua1WpWBYNOrsx7ZtBAKBU0XCjWM77GSy1fF4PPB4PNJJBh4KgIezi4pmuPH/xT4NWzW/339iDc22\n",
+       "bTQaDdTrdTQaDZTLZWlV53V4+iaAPwHwF8q9LwF4WwjxtUHh6S8N2qXDUV3LsmAYBrrdLprNJur1\n",
+       "Ovx+P9xuN9rttsOU8/oUi0H1gQKBAMLhMEKhkHzlD499JI6PcKxk+PvPY3g2xw4uW65oNCrjTMlk\n",
+       "Eh6Px7Egy8MVW5l2uy19nbkM7gkhvktEt4Zufw7ATw2u/xzAv2NGoun1euh0Omg2m/Ja9VdcLpdj\n",
+       "Ot3tdh2mn9+r+jfBYFBao+FXv9/vsFzNZhOmaTos13l/7cPRXFXU7XYbmUwGt27dgmEYCIfDICK4\n",
+       "XC4YhiG/7yzRXAXj+jSrQoj84DqPfm7NTGBLw3+xPPvhXzJbH3W9SZ3p8PtU53PYGWZRmKaJQCDg\n",
+       "GBoajYZjaDNN80KiUfujRnhbrRa2trZgGAZCoRAymYx0rvm57DgPi0ZdUpglEzvCQggxy/p6LBrb\n",
+       "tqfyPPYnuHk8HhkoZPHU63XHqng4HHYMaeehCkZdFmDhCCEQj8dx48YNdDod+Hw+EJGcjrNg2u22\n",
+       "jB91Op0rWUIAxhdNnojWhBD7RJQBcDDNTs0S1TFlc99ut6Uvoa5F8QfVbrfhcrkuvBI+PDzxB81T\n",
+       "fHUBlf0Znmp3Oh3UajVUKhUUi0UUCgWUSiXU63V0Op2FEs13APwCgD8YvP7D1Hp0BfR6PQAPBdRq\n",
+       "taRgWq2WdFjb7Ta63S7a7bacOnOw7VEMz5zUBDKObpumCb/fL0WjLm3w2tPR0REKhQKKxSLq9bqM\n",
+       "Ds+ai0y5v4W+05sioh0Avw/gqwD+loh+GYMp92V28rIZjomw49lqteByuRxBNp6xqBZnlP8HAEKh\n",
+       "kHTCI5GIw9K43W45NLGjzKJRLY1lWVK8s+Yis6dXzvinz0y5L1fGcF4Kx2TOYtJZi8fjQTQaRSAQ\n",
+       "QCwWQyQScVgaFm273Ua9Xke1WpWiOTw8RKVSkYHBubQ0mskZToAPh8NIpVLY2NjA5uYmNjc3kUql\n",
+       "YJomDMNAq9VCtVrF0dERjo6OkMvlcHR0hFqtJpdGeIZ4FWjRzAB1uu9yuRyieeKJJ5DJZJBOpxEK\n",
+       "hWAYBjqdDqrVKg4ODrC7u4tcLofDw0MpGrYwV7VTQotmBnCwjqf1nETGokkkEjKBTBVNoVDAzs6O\n",
+       "tDQ8Y+Kptk73XGJYNByRVi3N1taWdIy9Xq8UDa/k7+zsYG9vzyGaq05416KZARyL4SjyysoK4vG4\n",
+       "XGDlPB52gDkJSw0AztN2Gi2aGcD7tJLJJJLJJNLp9Kmi4ak8x4RarZZMbmcLMw87PrVoZgAH8FKp\n",
+       "FDKZDFZWVhCLxRAKhaRo1BgR5+6oa1RXuao9jC7UOAN4eEomk1hfXz8xPPECpbqafZalmQe0pbkE\n",
+       "htNBE4kE0uk01tfXsbm5idXVVcRiMZlw1Ww2UalUUK1WUalUcPfuXezs7KBQKKBer0vRXNUC5Ymf\n",
+       "76o7sIyo6RZ+vx+JRAIrKyvIZDLY3NxEMpmUEWEigmVZODw8xN7eHnK5HO7fv4+dnR0Zm+HF0nkZ\n",
+       "nrRoLgEWDaegDosmHA4jEAhIS8OiyWazuHv3LnK5HPb39x2W5iojwMOMmyP8FQC/AqAweNtrQoh/\n",
+       "vqxOLhqc72uaJqLRqBTN+vo6HnvsMRmP4ZROVTR37txBoVCQuylrtdq5a2Gz5iKO8DcBvDR0TwD4\n",
+       "uhDi+UHTglFg0fCGvmg0KlexOU+n3W7LJPFyuYxKpSJbvV6X24XnYTgaZtwcYUDXDz4Tj8eDQCAg\n",
+       "K1vwKjZbGDXtodVqnRAO58pwWuu8McmU+zeI6HtE9DoRxabWoyXA7XbLXQ6JRAKRSERuOeH0TU57\n",
+       "GBZMtVpFo9FAq9WaW0szrmj+FMDjAJ4DsAfgD6fWoyWARROJRORi5GmiUYcnFs4iiGas2ZMQQuYE\n",
+       "E9E3APzj1Hq0gAwXFPB6vQgGg9Kn4dkSR35brRYqlQry+Tzy+Tz29vZQKpXQbDYdaQ/zKBhgTEsz\n",
+       "SCZnfha6frBjcxuLhi1NJBKRG/lYNOVyWRYx2NvbQ7FYlHu55lkwwHg5wl8G8Gkieg79WdQ9AL96\n",
+       "qb2cc4bL0w6Lhi3NsGj29/cdouGikfNejnbcHOE3LqEvCw0Lx+VynTk8qaLh4SmbzeLo6EhWuLrK\n",
+       "jLyLoiPCU4CXC3hv9+rqKpLJpBSMz+eT23G73a7D+eUAHtfSm3fBAFo0U8Hn8yEajSIajSIWi8mc\n",
+       "X05/APor2JZlodvtOgJ5alxmXmdLw2jRTAHev7SysiJL1HKiVTgcdmyntSzrxBSbZ03ztlxwFlo0\n",
+       "U4BFk06nsbm56RANF0vi2Mtpywbq9lptaZYUtWCA2+1GKpXC2toaNjY2cPPmTayuriIajUpfhh3f\n",
+       "g4MD5PN57O/vy12S87R6fVG0aMbA4/FIx9fv98u0hxs3buCxxx5DPB5HJBKRh3s0m01HXGZ/fx/l\n",
+       "chmWZS2EZRlGi2YMPB6PnFKHw2Gk02mHpeFKWlx6rdlsolQqYX9/H/fv33dYGi2aawAROVaxuVx+\n",
+       "JpORogEe7g8fFk02m0WxWESlUtGiWWY4cMdRX66Yzgd5rK+vI5VKIRwOw+v1yqJLnP5wVlxmUWZL\n",
+       "w2jRXAAWy/BebD6bYWNjA4lEQtbfOz4+hmVZsuxaqVRyzJjmfRX7PLRoLoAqGN5Wm06n5bZarsoZ\n",
+       "DAZl6oNlWbJEiCoa1cospaUhok30S8GuoL84+WdCiD++yjrCVwFbGq7JFwqFpGiefPJJWZuPdxdw\n",
+       "QaRarSYPJFOFw/UC5301+yzOS42wAfy2EOLHAPwEgF8joqfxsI7wUwD+FTMqB3sVGIaBYDAoT315\n",
+       "4okncPPmTWQyGXm+lN/vl7kynJFXLBaRz+exs7ODfD4v82WGK48uIo+0NEKIfQD7g+s6EX0E4Aau\n",
+       "sI7wrFATq8LhMNbW1qTje/v2bayvryMej8ttKABknbxKpYLDw0Pkcjlsb29jb28P5XJZVvJcdC7s\n",
+       "0wySy58H8F+4wjrCs0AVDNf3XVtbw9bWFra2tuTRh4lEAn6/33FgarfblbVldnd3sb29jcPDQxSL\n",
+       "xYWdYg9zIdEQUQjA3wP4LSFEbejs65nWEZ41bGlWV1extbWFZ599Vq5o8y5J3szGvky1WnVYmlqt\n",
+       "Jhcsr4VoiMiDvmD+UgjBpV+Xpo7waagVzrkMPgfy0um0LG/P+5hs25b7sCuVCnK5HA4ODhxBPE59\n",
+       "WAYe6QhT36S8DuCOEOKPlH/iOsLAEtQRHsYwDFmylY/64SrmoVBICobLwVqWhVKphFwuh08++QQP\n",
+       "HjzAwcGBXF+66rMMps15luZFAD8P4AMiem9w7zUsWR1hFXV6zSe2qKIJh8OOk+mAvmiKxSJyuRzu\n",
+       "3buH3d1dHBwcSCvDDvIiz5hUzps9/QfOtkZLU0d4GA7ieb1eh2hYOOqyAnDS0hQKBbkf27KspREL\n",
+       "oyPCgGMzPi8TxONxxGIxJBIJrK+vI5FIwDRNuN3uE4e/7+/vy8YxmVqtJh3kZRIMoEUDwLlM4PV6\n",
+       "ZcUqzpG5ffs2VldXYZomgL5lUbfRZrNZ7O7uIp/PyyqcfKrdMnLtRTO8RBAIBJBIJHDjxg0Zl+Hc\n",
+       "X9M0IYSQwxHvkLx//76cMR0dHcnV7UXZXTAq1140AKRo2IdJJBLY2NjAU089hWeeecZxRibw0PHd\n",
+       "3d1FNpvF/fv3HZaGh6RFS+O8KNdeNLwjkkURi8Uc50mmUqkTJ+PycYjlclmeisK7CjhJfBktDKNF\n",
+       "Mzgdl8uCpNNppFIpuWeJj9PhYwwByMgvlwrhEmc8HC2zYAAtGgAn6/yyaEzTlEE8jhADD0XTaDTk\n",
+       "ZjdOqroOXHvRsKXhqlVra2uO3ZF8niTHZLiq+GmWRotmiVFPzOUttYlEAqurqzLfV82TUVETxvkc\n",
+       "g2WL+J7HtRMNn47Lzq1aspWTxJPJpCxBrznJtRQNb3ZTa8ik02kZzOOFSZ/Pd9XdnUvOW+XeJKJ/\n",
+       "I6L/I6LvE9FvDu5/hYgeENF7gzZcMnau4ZKtoVAI0WhUnoxy2nYUzUnOszScI/z+IBHrf4nobTys\n",
+       "I/z1S+/hlOHhSRVNJBKRp9aGQiGHzwM4T9NlX0Y9R/I6TLNVxs0RBha0jrA6PLFoQqEQAoGAPEZH\n",
+       "nV4DkALhbSfqARfqscvXRTgXLtSo5Aj/5+DWwtYR5pKtLBr1eGMWjTrNVs9g4qSqTqcjE6sWfXfB\n",
+       "qFxINIOh6e/QzxGuY4HrCKuWxjRNRCKRUy0Ni4aHJj4nWxXNdZtqM6PkCP8V5wgveh1h9SBSn88H\n",
+       "j8cDt9stxcIi6PV6cneB2tRzsnk1+6oOVr8KzttheWqOMBFlhBB7gy+Xqo4wO7sshE6n4yhGxBvg\n",
+       "+DymZrO5dDnA5zFOjvDvAXhlmesId7tdeSRgs9lEoVBANpvFvXv3cO/ePRwdHcmm1svTlgaPzBH+\n",
+       "p8vpznzAorEsC/V6HQcHB8hms/joo49w584deRgpny953abd1y4iDDhTG0qlkoz+ulwu2LaNZrMp\n",
+       "W61WQzabxc7ODnZ3d7G3t+eYfl+XRUqVaycarudbKpXkRrdGo4HDw0Ps7u4iHo87zmKyLAsPHjzA\n",
+       "gwcPUC6Xr+2MSYUu6wef1626XMlKTd9cW1uT602maTpWrzudDkqlEkqlEorFIsrlsiMus8x+jBDi\n",
+       "1ADutRMNT7d5w5tt244NcG632yEINbDHDXhY73eZrY0WjWZkzhLNJMcRaq4pWjSakbm04UmzvGhL\n",
+       "oxkZLRrNyFyqaIjoJSL6mIh+RESvTuF520T0wSDF9L/H+P43iChPRB8q9xJE9DYR/ZCI3holN+iM\n",
+       "542dCvuI9Nqx+nhp6bq8ZjLtBsAF4C6AWwA8AN4H8PSEz7wHIDHB938K/USyD5V7XwPwu4PrVwF8\n",
+       "dcLnfRnA74zZvzUAzw2uQwB+AODpcfv4iOeN3UchxKVamhcA3BVCbAshbADfBvD5KTx37DRTIcR3\n",
+       "AZSGbn8O/bK2GLz+zITPA8bsoxBiXwjx/uC6DkAtwTtyHx/xvLH7CFzu8HQDwI7y9QM87PC4CADv\n",
+       "ENG7RPTFCZ/FXEZ524lTYaddgnea6bqXKZrLmMu/KIR4HsDL6FdP/9Q0Hy76dnzSfk+cCjtcgnfS\n",
+       "Pk47XfcyRbMLYFP5ehN9azM2YpAtKIQoAHgT/SFwUvJEtAb0MxIxYXlbIcSBGADgG6P28VEleMfp\n",
+       "41npupP08TJF8y6AJ4noFhF5AXwB/VKyY0FEQSIKD65NAJ/FdNJMp1redvChMiOlwk67BO+j0nXH\n",
+       "7SOAy5s9DTz2l9H32O8CeG3CZz2O/gzsfQDfH+d5AL4FIAegg76/9YsAEgDeAfBDAG8BiE3wvF9C\n",
+       "/9SaDwB8b/Dhro7wvJ8E0Bv8jO8N2kvj9vGM5708SR+FEHoZQTM6OiKsGRktGs3IaNFoRkaLRjMy\n",
+       "WjSakdGi0YyMFo1mZLRoNCPz/yU19i71FpCwAAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f7938797710>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEjpJREFUeJzt3X+QXXV5x/HPJ793ITSAiWyysaEttsBoDRFYEEGq7VBG\n",
+       "0bZWpa1S27HTUQulyojMtH+1o5XpiI7TzlgoCv5qqxZ1WhFaU0TsJhASfiQBsUNawq9N24DEZZMl\n",
+       "PP3j3oRls5s9T06+e84N79dMhnvOfe73fO/5nnv34Zxzv48jQgAAADh0c5ruAAAAQK8joQIAAKiJ\n",
+       "hAoAAKAmEioAAICaSKgAAABqIqECAACoaV6TG7fNnA0AAKBnRISnWl80obJ9gaRrJM2VdG1E/OXk\n",
+       "mEsvvfSA1w0PD2toaOhF60rOl5Vpe+/evcX60QZz5lQ/aWlPeUxp3bp1OvPMMw9Xlw6rzPg9//zz\n",
+       "Rdp97rnnisRmxk6S5s6de8C6jRs3avXq1QesX7BgQbF+VLVnz57KsaOjo6m2x8bGKseOj49Xjs0c\n",
+       "F5nYqcZOkh566CGddNJJB6xfuHBh5bb7+voqxy5atKhI7Lx51f80TbcvpjPd99ZUMn8bMsfF7t27\n",
+       "D1i3YcMGrVmz5oD1mWNzuranU7fP02nD92wpa9eunfa5Ypf8bM+V9BlJF0g6RdLFtk8utT0AAICm\n",
+       "lLyH6gxJP4qIbRExLukrkt5acHsAAACNKJlQrZD0yITl7d11MxocHCzSIcyOFSsqDTNa6IQTTmi6\n",
+       "C6jhuOOOa7oLOEQDAwNNdwE1lUyoDvmmJxKq3sb49S6+1Hvb8ccf33QXcIiWL1/edBdQU8mb0h+V\n",
+       "tHLC8kp1zlK9yPDw8P7Hg4OD/DEGAACtsHPnTj311FOVYksmVHdJOsn2KkmPSXqnpIsnB03+NR8A\n",
+       "AEAbHHvssTr22GP3L2/btm3a2GIJVUQ8Z/uDkr6jzrQJ10XE1lLbAwAAaErReagi4tuSvl1yGwAA\n",
+       "AE1rdKZ0qR0TdWUmFis1wWim3Uxs5r1J5SZka0Os1I79XGqywOxnKdN2ZoLRUpOcZmTbzeyLTGxm\n",
+       "MtKM7OSpmcksMxNlZvqRaTczEWl2X5SayLnUhL3ZYzmzP0rFZiZmzcRmxzoTn/1bMuX2arcAAADw\n",
+       "EkdCBQAAUBMJFQAAQE0kVAAAADWRUAEAANREQgUAAFATCRUAAEBNJFQAAAA1kVABAADUREIFAABQ\n",
+       "EwkVAABATY3X8svU8ek1baj7V7LtUnUYMzWVsn0oue96TWbftaE+X0mZ4yLznZWpX1eqxqNUrnZc\n",
+       "ps+ZdsfGxirHZvdF5lguVb+uv7+/cuyiRYsqx0q5OoiZ2Pnz51eOzey3TJ3JTKyUO+ZaX8vP9krb\n",
+       "a21vtn2/7UtLbg8AAKAJpU8PjUu6PCI22T5a0gbbt0bE1sLbBQAAmDVFz1BFxBMRsan7eJekrZKW\n",
+       "l9wmAADAbJu1m9Jtr5K0WtK62domAADAbJiVhKp7ue+rki7rnqkCAAA4YhT/iZ3t+ZK+JukLEXHT\n",
+       "5OeHh4f3Px4cHNTg4GDpLgEAAMxoZGREIyMjlWKLJlTu/MbxOklbIuKaqWKGhoZKdgEAAOCQLFu2\n",
+       "TMuWLdu/vHnz5mljS1/ye52k35F0vu2N3X8XFN4mAADArCp6hioivi9mYwcAAEc4kh0AAICaGq/7\n",
+       "UnW69+yU80eykvuiVBmANsRm4zPlEzIlRkrFltRrJXuy5W8y7y9zXCxYsKBybF9fX+XYo446qnKs\n",
+       "lCsbkpEp+7Jnz57KsaOjo5Vjn3322cqxUq7ESOb7IlMiZsmSJZVjFy9eXDlWypXAyeyLUmOSic2W\n",
+       "GcrEH47vOM5QAQAA1ERCBQAAUBMJFQAAQE0kVAAAADWRUAEAANREQgUAAFATCRUAAEBNJFQAAAA1\n",
+       "kVABAADUREIFAABQU+OlZ6qWcchMC9+WMhml+pwpF1BSpgRHJjajZBmevXv3Vo7NlkQo0YdsuZVM\n",
+       "fOaYK1WyJzPW2c9I5vOXKaGSGb9S+03KjXXm/WVid+/eXTk2Mx6Zki9SrhxQZkwyfR4bG6scu2vX\n",
+       "rsqxUv57oKrM5y9T6ihTKmfhwoWVY6VypcamM+07sf0bkkLSVD2KiPh6lQ3YnivpLknbI+Ith9RL\n",
+       "AACAFjtYavgWdRKq6VRKqCRdJmmLpFyFRwAAgB4xbUIVEb9bt3Hbg5IulPQXkv6kbnsAAABtNONF\n",
+       "Q9sn2L7O9s3d5VNs/37F9j8p6QpJ7bjpBwAAoIAqd2F9TtItkpZ3lx+SdPlML7L9ZkkjEbFRU9+H\n",
+       "BQAAcESocnv9yyLi721fKUkRMW67ys8IzpZ0ke0LJS2SdIztGyLiPRODhoeH9z8eHBzU4OBg9d4D\n",
+       "AAAUsmPHDu3YsaNSbJWEapft4/ct2B6S9PRML4qIqyRd1X3NeZI+PDmZkqShoaFKHQUAAJhNS5cu\n",
+       "1dKlS/cvb926ddrYKgnVhyR9S9LP2P6BpKWS3n4I/WrH5FAAAACH2YwJVURssH2upJ9X516oByMi\n",
+       "NYthRNwm6bZD6yIAAEC7zZhQ2e6T9H5J56hzlul2238TEdWnegUAADiCVbnkd4OkH0v6tDpnqH5L\n",
+       "0o2SfrNgvwAAAHpGlYTq1Ig4ZcLyd21vOVwdqFoLqmQtv1JtZ2IzNYdK1q/LyNRLK1VfMVMrTcr1\n",
+       "OROb7UdVmTpX2fpumdpVmX5k6ruNjo5Wji1Vjy4rs98ydeMydcqytQozx0amNl5/f3+RPmS0pbZp\n",
+       "5pjLjHXmsyfl9kem7UxsppZf5rjItFu67alU+Wa42/ZZ+xa6v/LbUHvLAAAAR4iDFUe+b0LMHbYf\n",
+       "UeceqldIenAW+gYAANATZiqODAAAgBkcrDjytonLtpepM+M5AAAAJqhSHPki2w9JeliduaS2Sfp2\n",
+       "4X4BAAD0jCo3pf+5pLMk/TAiTpT0RknrivYKAACgh1RJqMYj4n8kzbE9NyLWSnpt4X4BAAD0jCoT\n",
+       "S+y0vVjS7ZK+aHtE0q6y3QIAAOgdVc5QvU3SqKTLJd0s6UfiF4AAAAD7VSmOvO9s1F5JnyvaGwAA\n",
+       "gB50sIk9d6kzkedUIiKOORwdyJSTqCpbmqVUWZRMP0qVRMkq1Y/MOGfGI1vypQ39yPQhE7t79+7K\n",
+       "sdm2Sx1zpY637Gd6bKx6rfdMbGZMSrUrteP7pVQZpUwpIKlc6a7MfhsfH68cmy2jlGm7VHm0UmWt\n",
+       "SpaeORylkQ42D9XRdRu3vUTStZJOVSc5+72IGK7bLgAAQJvkqi7mfUrSv0TE223Pk3RU4e0BAADM\n",
+       "umIJle2fkvT6iLhEkiLiOUlPl9oeAABAU3IXn3NOlLTD9vW277b9t7b7C24PAACgESUTqnmSTpP0\n",
+       "1xFxmqSfSLqy4PYAAAAaUfIequ2StkfEnd3lr2qKhGrDhg37Hw8MDGj58uUFuwQAAFDN+Ph45V9O\n",
+       "FkuoIuIJ24/YfmVE/FDSmyRtnhy3Zs2aUl0AAAA4ZPPnz3/RdA0Hm96k9K/8/kidcjULJP2npPcW\n",
+       "3h4AAMCsK5pQRcQ9kk4vuQ0AAICmlbwpHQAA4CWh9CW/GVWd7r3UtPeZPmTbLtVuqdiSbWf2xYIF\n",
+       "CyrHLlq0qHKsJC1cuLBIbKZ8QqlSR9mxzvQ5Myb9/dVnR1myZEnl2MWLF1eOPeqo3BzCmfjMfsso\n",
+       "+bnOKFVGKVOaJfN9kS1HUqrUSeYzUqr8jdSeck5Nt5tVtR8H+5vDGSoAAICaSKgAAABqIqECAACo\n",
+       "iYQKAACgJhIqAACAmkioAAAAaiKhAgAAqImECgAAoCYSKgAAgJpIqAAAAGpqvPRM1Sn4S5ZaKDX9\n",
+       "fqbdTCmCTGzJaf1L9SOz3zKlL0r2I9NuqZIv2XIWmfc3Pj5eOTYzJqXaLVmSqFRppMwxlD3uM+Vk\n",
+       "SrW7Z8+eyrGZ4yJ73GfaLlUup2RpnUx85rso8xnJHMuljotsfKYf0yl6hsr2R21vtn2f7S/Zrj4i\n",
+       "AAAAPaJYQmV7laT3STotIl4laa6kd5XaHgAAQFNKXvL7saRxSf2290rql/Rowe0BAAA0otgZqoj4\n",
+       "P0l/Jem/JT0m6amI+NdS2wMAAGhKyUt+PyvpjyWtkrRc0tG2f7vU9gAAAJpS8pLfayX9ICL+V5Js\n",
+       "f13S2ZK+ODHozjvv3P94+fLlWrFiRcEuAQAAVPP444/riSeeqBRbMqF6QNKf2u6TNCbpTZLWTw46\n",
+       "/fTTC3YBAADg0AwMDGhgYGD/8qZNm6aNLXkP1T2SbpB0l6R7u6s/W2p7AAAATSk6sWdEfELSJ0pu\n",
+       "AwAAoGmUngEAAKiJhAoAAKCmxmv5ZWoaHclK1dwrWcuvlExtrkwtqqzMsZnpR6bdTA3LkjW/MkrV\n",
+       "QMzst2wtv/7+/sqxmVp+Y2NjlWNHR0eLxErl9nNfX1+R2Mz4ZWuwZY7PzOcvs98ydfEyx2a2H5la\n",
+       "jKXqoJaqMynlxq/qd/j1118//fYqbw0AAABTIqECAACoiYQKAACgJhIqAACAmkioAAAAaiKhAgAA\n",
+       "qKmVCdX27dub7gJqYPx618MPP9x0F1DDtm3bmu4CDtEDDzzQdBdQUysTqkcffbTpLqAGEqreRULV\n",
+       "20ioehcJVe9rZUIFAADQS0ioAAAAanKTpUls915dFAAA8JIVEVPWR2s0oQIAADgScMkPAACgJhIq\n",
+       "AACAmlqXUNm+wPYDth+y/ZGm+4Pp2f4720/avm/CuuNs32r7h7Zvsb2kyT5ierZX2l5re7Pt+21f\n",
+       "2l3PGLac7UW219neZHuL7Y911zN2PcT2XNsbbX+ru8z49bBWJVS250r6jKQLJJ0i6WLbJzfbKxzE\n",
+       "9eqM1URXSro1Il4p6d+6y2incUmXR8SpkoYkfaD7eWMMWy4ixiSdHxGvkfRqSefbPkeMXa+5TNIW\n",
+       "SftuZmb8elirEipJZ0j6UURsi4hxSV+R9NaG+4RpRMTtknZOWn2RpM93H39e0ttmtVOoLCKeiIhN\n",
+       "3ce7JG2VtEKMYU+IiNHuwwWS5qrzWWTseoTtQUkXSrpW0r5fjTF+PaxtCdUKSY9MWN7eXYfe8fKI\n",
+       "eLL7+ElJL2+yM6jG9ipJqyWtE2PYE2zPsb1JnTFaGxGbxdj1kk9KukLS8xPWMX49rG0JFXM4HEGi\n",
+       "MycHY9pyto+W9DVJl0XEMxOfYwzbKyKe717yG5R0ru3zJz3P2LWU7TdLGomIjXrh7NSLMH69p20J\n",
+       "1aOSVk5YXqnOWSr0jidtnyBJtgckjTTcHxyE7fnqJFM3RsRN3dWMYQ+JiKcl/bOkNWLsesXZki6y\n",
+       "/bCkL0v6Jds3ivHraW1LqO6SdJLtVbYXSHqnpG823CfkfFPSJd3Hl0i66SCxaJBtS7pO0paIuGbC\n",
+       "U4xhy9l+2b5fgNnuk/TLkjaKsesJEXFVRKyMiBMlvUvSdyPi3WL8elrrZkq3/auSrlHnJsvrIuJj\n",
+       "DXcJ07D9ZUnnSXqZOtf7/0zSNyT9g6RXSNom6R0R8VRTfcT0ur8K+56ke/XCpYWPSlovxrDVbL9K\n",
+       "nZuW53T/3RgRV9s+ToxdT7F9nqQPRcRFjF9va11CBQAA0GvadskPAACg55BQAQAA1ERCBQAAUBMJ\n",
+       "FQAAQE0kVAAAADWRUAEAANREQgWgcbbv6P73p21ffJjbvmqqbQHA4cQ8VABaw/Yb1Jnk8C2J18yL\n",
+       "iOcO8vwzEbH4cPQPAKbDGSoAjbO9q/vw45Jeb3uj7ctsz7F9te31tu+x/Qfd+DfYvt32NyTd3113\n",
+       "k+27bN9v+33ddR+X1Ndt78aJ23LH1bbvs32v7XdMaPvfbf+j7a22vzC7ewNAL5rXdAcAQC+UvvmI\n",
+       "pA/vO0PVTaCeiogzbC+U9H3bt3RjV0s6NSL+q7v83ojY2a1tt972VyPiStsfiIjVU2zr1yX9oqRX\n",
+       "S1oq6U7b3+s+9xpJp0h6XNIdtl8XEVwqBDAtzlABaBNPWv4VSe+xvVHSsKTjJP1c97n1E5IpSbrM\n",
+       "9iZJ/yFppaSTZtjWOZK+FB0jkm6TdLo6Cdf6iHgsOvdEbJK0qsZ7AvASwBkqAG33wYi4deKK7r1W\n",
+       "P5m0/EZJQxExZnutpEUztBs6MIHbd/Zq94R1e8V3JYAZcIYKQJs8I2niDeTfkfR+2/MkyfYrbfdP\n",
+       "8bpjJO3sJlO/IGlownPj+14/ye2S3tm9T2uppHMlrdeBSRYAzIj/6wLQBvvODN0jaW/30t31kj6t\n",
+       "zuW2u21b0oikX+vGT/yJ8s2S/tD2FkkPqnPZb5/PSrrX9oaIePe+10XEP9k+q7vNkHRFRIzYPnlS\n",
+       "25piGQBehGkTAAAAauKSHwAAQE0kVAAAADWRUAEAANREQgUAAFATCRUAAEBNJFQAAAA1kVABAADU\n",
+       "REIFAABQ0/8Dsw8TC+BipngAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f7938323190>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAGPlJREFUeJztXUlsbNlZ/v6a53myy37Pft1B6kiRkk1YJFGyiKKOkJKw\n",
+       "IYqEFAWEWDAJFjRhQWAXIhEhWCAg6SgMSkCgoICESAeBaBYMjbrTHUgP7nbZZbuq7JrnW9NhUfWf\n",
+       "Pve6bNfk91zl+0lHVa7yuz717lf//59/JCEETJiYB5YnvQET6weTNCbmhkkaE3PDJI2JuWGSxsTc\n",
+       "MEljYm4sTBoiepaIXieit4jouVVuysTdBi3ipyEiK4A3AHwcwCmA/wbwOSHEj1a7PRN3EYtKmg8C\n",
+       "OBBCZIQQfQDfBvDp1W3LxF2GbcF/lwaQVX4+AfDj6i8QkelqXnMIIWja64tKGpMQ9xiLkuYUwK7y\n",
+       "8y7G0sbEPcCipHkJwHuIaI+IHAA+C+C7q9uWibuMhWwaIcSAiH4RwD8BsAL4unlyuj9Y6Mg904VN\n",
+       "Q3jtsWpD2MQ9hkkaE3PDJI2JubGoc2+tYbVa5bJYLCDSq24ikq/zc3UZMRwOMRqN5ONoNIIQAkII\n",
+       "+bP6/rrj3pHGYrHA7XbD7XbD4/HA7XZLcgBjwthsNt1yOp1wuVxwOp1wOp2XrtnpdHSr3+/rlqZp\n",
+       "6Ha7cq17Xva9Iw0RweVyIRQKIRwOIxgM6iSLxWKR5HA4HHC5XPD5fPD5fPD7/fB6vZeuWavVUK1W\n",
+       "UavVUKvV0Ol00O125WO9Xkej0cBoNIKmaSZp1g1EBLfbjVAohGQyiXg8riOM1WqVEsjj8cDj8SAc\n",
+       "DiMSiSAcDiMcDl+65vn5Oc7Pz3FxcYHz83M0m03dstlsEEKg2+0+gU+8emw8aVjdWK1W2Gw2uFwu\n",
+       "RKNRJJNJpNNpbG1t6ewVJo3H44HX64XX60U0GkUkEkEsFkMkErn0N1hiBQIBeL1e1Ot1KV3q9Tos\n",
+       "FgsGgwFarRasVisASGmzjlJn40ljsVjg8/kQCAQQCAQQCoWQTqexvb2N7e1tJBIJnaRh9eRyueTy\n",
+       "+/3weDxSYhjhcDjg9/sxGAxARPD7/QgEAlLSEBF6vR6azSaq1Sp6vZ7OeF433AvS+P1+pFIpuba2\n",
+       "tpBMJrG1tYVYLCYNYSaOzWaD3W6Xy+Vywe12w263T/0bDocDPp8PFosFLpcLrVZLt1jKVCoVuFwu\n",
+       "AEC/38dgMJAnrXXCxpPGarXC5/MhkUhgf38fe3t7SCaTSCQSSCQSiEajAKA7PbHEISLd8fw6ScOE\n",
+       "CQQCaLfb6HQ6usdKpYJCoQCXy4XhcCiP4+uIpUhDRBkAdQBDAH0hxAdXsallwDea7Rifz4d4PI7t\n",
+       "7W08fPgQTz31FKLRKKLRKGKxGEKh0KVrsI+FCcLSoN/vo9fr6QjGy263S/K43W7dEbtUKiGfzyMU\n",
+       "CsHn80EIASLCaDRCr9d7rP8/q8CykkYA+JgQoryKzawCrI7YholEInjqqafw4MEDqY4CgYC0UaZh\n",
+       "OBzKNRgMJFl6vR76/b40mHk5HA65nE4nLBYL7Ha7JIfX60UoFEIkEkEikYDdbketVsNwOESn07mX\n",
+       "6mlqJPRJwWq1wu/3I5lMIpVKYXt7G7u7uzrSsHNvGmlYbahEabfbuqXaO3a7XZ6y2Ihmuwh41xAP\n",
+       "BoPy1AZAEsbojV4HrELSfJ+IhgD+WAjxpyvY01Jg0qRSKTx69Aj7+/vS+E2lUojFYroj+DQMh0Pp\n",
+       "ye10OqjVaqjX66jVamg0GtL5xw5APjU5HA65B7aN7Ha7jjSJRAL9fl86/e4jaT4khMgRURzAC0T0\n",
+       "uhDixVVsbB4YbQu/3494PI7d3V08evRI2i/RaBSBQEBnr7AKUtXRNCO2Wq3KR/U47nK5MBgMpJTh\n",
+       "UxT7h9iZyMf+cDiMer1+paRbByy1ayFEbvJ4QUTfwbi05bGSRo0V2e126YsJh8PS4A0EAvImCSEu\n",
+       "2SvsjOPFBiyHARqNhlzNZlNnwzgcDjx48EAayHw0Z0nGxFGP8izppgU/1wELk4aIPACsQogGEXkB\n",
+       "fALA76xsZ7PvQ3p6nU4n/H4/gsGgjjRGG2Y4HEp7pdPpoFAoIJfLIZfLoVAoQNM0uZg86lJvvt1u\n",
+       "R7/fBxHJkIPL5YLD4dBJGzaeVcKogdJ1wjKSJgngO5MPbQPwl0KI761kV3OAVZLT6YTX651KGr7B\n",
+       "qqRhwjSbTeTzebzzzjs4ODjA4eGhJJR6YlIf1bQKfnS73QiHw0ilUlL18d9T/T2qpFlHwgBLkEYI\n",
+       "cQjg/Svcy0LgG+JwOKTtwBFpPnarGA6H6Ha7aDabaDQaqFarODs7QyaTwVtvvYU33nhDqi1+ZHXG\n",
+       "bn9VYlitVmxtbaFWq6HVaqHX60m1pTrvVN+PalOtI9bTEjNAtRccDoe0KfibrN70Xq+Hi4sLGZEu\n",
+       "FArIZDI4OztDpVKBpmnSMOZHNakKAFwulwxqejweeSoLBoPwer1SPXFwko3rRqOBcrmMer2OdruN\n",
+       "fr+/luRZe9Ko9gKTRrUbWB2xaul0Ori4uMDx8TGOjo6QzWZRKBRwfn6OarWKbrd7KdNOlQ5EBKfT\n",
+       "KR2HrJKi0SiCwSA8Hg+cTqeOuP1+H51OB/V6HaVSCbVazSTNk4YxyGi0GdiGYbV0cXGBbDaLN998\n",
+       "E2+//bYujYEz64xqRL25TqcTwWAQ8XhcBkFVSWM8HV0ladYxhABsCGmMMBq67MntdDqoVqvI5/PI\n",
+       "ZrM4PDzEwcHBJaP3OrCkYV9QOp1GIpFAJBJBIBCAy+XSBTx5P5qmodVqoV6vo9VqSTVoSponACEE\n",
+       "er2eTHCyWq3wer2wWq3QNA35fF4XPGw0Gjg8PEQul0OtVpNE4cjzLHA4HDKelEgkEA6H4fV6ZcBy\n",
+       "XU9Fs2LtScPGbbvdlnaIxWJBr9dDtVpFOByWUoTDAoVCAYVCQZKGDeV5SRMOhyVpfD4fnE6nzju9\n",
+       "qVh70rCk4aTtTqcDTdNQq9WQz+fh9XplVcBgMECv19N5eHu9ngxSzkoaDlKGQiHE43GEQiGdpGFs\n",
+       "KnE2gjRMCmBsFHO8iE8xxlQH47oJarWC1WqF2+2G3+9HKBRCLBaTUW72Aqt5OMC7dVHGta5Ye9IY\n",
+       "oaY2ENGlG6b6bGaRLFzy4na7pX+Gj9gc03I6nfLEBOASOflkxks1hNcRG0caYHzT+BtvtVqvrHic\n",
+       "VR1xGmcoFEIoFLrkl2GHIqsm9YivaZo80vMyT093DEwM9uayXWH0uczqymdJEwwGkUgkZHKXMXpu\n",
+       "s9l0pNE0De12Wx6z1cU+mo2VNET0PICfAHAuhHjf5LUIgL8C8BBABsBPCSGqt7jPubCszaDmALMN\n",
+       "Ew6HkUwm8eDBA0kav98vy3rZ5mEbi31EXHXJ0qbZbELTtLlU5F3DLAkd3wDwrOG13wDwghDixwD8\n",
+       "8+TnjQBHzT0ejy5Fc2trCw8ePMDe3h5SqRTC4bAkDAAp3fr9PprNJkqlEs7OznB4eIjT01OUSiVZ\n",
+       "zmKMZa0bbiTNJBOvYnj5UwC+OXn+TQCfWfG+niiYNIFAQKZobm9vY2dnB3t7e9ja2kIoFILb7ZYq\n",
+       "iY1sLopTSXN2diZJY+wssY5Y1KZJCiEKk+cFjHNrNgKqpOH0zEQiga2tLezu7uLhw4fyJMUhAyYB\n",
+       "n5aMpCkUCiiXyzrSrCthgBUYwkIIsWn99YzqKZFIIJVKIZ1OY3d3V1fGy3aMGklvNBoolUrI5XI4\n",
+       "OjpCpVKRke1N6FGzKGkKRJQSQuSJaAvA+So39aRhs9l0SV2qL2Zamma/39eV4RaLRZRKJZmI3mw2\n",
+       "0e121/aIbcSimc3fBfD5yfPPA/i71WznyYNPTE6nEx6PRxb/M2mmxZY49lWtVmXLkXK5jHK5rCPN\n",
+       "TRH0dcEsR+5vAfgogBgRZQH8FoAvA/hrIvpZTI7ct7nJxw3ufjWNNNNiSyxpqtUqLi4udJJGDYpu\n",
+       "iqS5kTRCiM9d8dbHV7yXJwZVehhVE/eccblcsNvtl5yFQghomoZmsymL/Jk03B1r3W0YIzbOIzwv\n",
+       "LBYLvF4vfD4fvF4vAoEAdnZ2sLOzg3Q6jZ2dHcTjcQQCAdlvj9MsOOXi7OwMJycnyGazOD4+Ri6X\n",
+       "Q7VaRafTecKf7nZw70nDTYg4RMDHa7WfTSgUQjAY1JGm1WrJOBKT5ujoCJlMRqqmdSzunwX3njRc\n",
+       "oJ9MJmX/mng8LlcsFpNRbqfTKfN3WB1VKhXkcjmcnJzg+PgYmUwGzWYTrVZrY3rsGXHvSUNEOtI8\n",
+       "88wzusaMoVDoUs4v2zDlcllWZ56eniKbzSKTyehqw01JsyEwtn/lTp5cnakavmqYABgbwK1WC+Vy\n",
+       "WSao5/P5qR7fTSQMcA9Jo9ZJqZWZbAxzr2DVL2PMw2Epk8/ncXR0hEKhgGq1qvP4btqJScW9Iw0A\n",
+       "XXEdd/JU68D5NZY0akkux5bK5bIME1SrVUka1RdjSpoNglpcZ1RPgUBAV3jHpBFCyMR0VT0dHx+j\n",
+       "3W7L7hLr2OJ1Xtw70vBpibuPx2Ix2fHT7/frsvBYNTFReLHjrtFooNPpyNqpTVZJKu4tabhjOefI\n",
+       "JBIJ+P1+KV3U05LaOLpSqch67GazKQdocHLVfcC9JA03cnz06BGefvppJJNJnaQxpj2wpKlUKjg/\n",
+       "P5eShgOR69x9fBHcGOUmoueJqEBErymv/TYRnRDRy5NlTAe9s2APMJPmve99L/b39yVpuIHAdX6Z\n",
+       "YrEoScPFeaZ60uMbAP4QwJ8prwkAXxVCfPVWdrVCcCYeG7dsz3BogPNljP1kePV6PZRKJRQKBZye\n",
+       "niKTyciS3k2Y3bQIZolyv0hEe1PeWouaUzV9k0f2qFNTfD6frgkRVxNwv712u41isYhcLic9vqye\n",
+       "NjVMcBOWaS/5S0T0AyL6OhFd7hV/R8D9fbn2OhqNSknj9/ulpOEmRMC7/WTY+C0Wi8jn8zIomc/n\n",
+       "TdIsgD8CsI9xz70cgN9b2Y5WDCaNmvMbiUQQCoWkpFHVE/tjuAESJ1YxaTKZjEmaRf6REOJcTADg\n",
+       "axj3D74zUPv28lidSCSCVCqly4/hagJj+iaXorTbbdkUiUtsuZfNulcULIOFSDNJJmf8JIDXrvrd\n",
+       "x41psSWfz4doNCrLUBKJhAwXTCMNG8CsorgJtUqY+3JSmoZFcoS/BOBjRPR+jE9RhwB+/lZ3OSfU\n",
+       "vr089S0ajcrhGolEQkoalSw8TodrsdX2a6qU2fTY0k1YNEf4+VvYy8rAHl1VPUWjUaRSKezu7sr+\n",
+       "wty5ygi1Z1+r1dL5Yu6LA+86bJxHmCe8caOhSCSCZDIph5aqbVtVw5frsDVNk4bv6ekpjo+PpV9G\n",
+       "07Qn/fHuBDaWNOpQLm7ZypPdeOQOH7FZqrA6KhQKODs7QzabxdHRkfQA39fTkhEbRxqehMJzt9Pp\n",
+       "tI40fr9f16aenXncR6ZSqeikjJrza0qaMTaONDzcgkmzs7Mj1RN34VT7z6jH61qthmKxeEnSzNOf\n",
+       "7z5g40mTTqd1dUvGGUtCCLTbbZTLZZyenuLk5ASnp6coFotoNBrQNG0jmiuuEhtLGm53tr29LSUM\n",
+       "jwtUwaQpFos4OTnBwcEB8vm8rp/MOo9Dvg1sHGm4R54qaTidc9owdq4uKJVKyGazePvtt2WyFZOG\n",
+       "f++++mWM2DjSTFNP6jAvI0aj0SVJwy3xWTWZZNFj40hjHP13VU8ZFWq7WFZH6nXmhbGLqPF1da/G\n",
+       "x2l7NL7PSe88jGzezuiDwUA3cnFeL/fGkWYRGIm27AQ4lk6q8cxE5Ef+u4C+eG+aNFTfs1gssnKC\n",
+       "Uztmmbqrfg4+KdZqNVSrVWiaduWYomkwSQPoCGO323U3fBHSGFukTbsZqlRhMnC87Kr9sYoNBAKI\n",
+       "xWJybPQ0A98IdQ+1Wg2FQkEeAtRU1aUlDRHtYpzmmcA4OPknQog/uOt9hOcB37yrJM0isFgs0qdz\n",
+       "07dXJcxVA1HV92w2GwKBgJw1lU6nZTeL66Duo1gsAgA6nQ7K5bJsiTJrXO0mSdMH8KtCiFeIyAfg\n",
+       "f4joBQBfwLiP8FeI6DmM+wivZS9hbgCQSCSwv78vh3NwWcoiDj11PLPq52EJpKoaridXl1FFqZLG\n",
+       "ZrPJ8AhXUcwraZxOp2zD3263YbfbdTOxbvrM15JGCJEHkJ88bxLRjwCkMe4j/NHJr30TwL9iTUlj\n",
+       "sVgQDAaxs7MDIQT8fr9uassiUe1WqyVDD9xwWh2mygRgEng8HtlUyefzTSWNurh8mGc1zGLTAO8S\n",
+       "x263S2L3ej3Y7XbZVZ2/LNdhZptmklz+AQD/iQ3qI8ykEULA5/Nhe3tbZ88s4tRjA5MfeQ4CSzA+\n",
+       "1bENpQ7rCIVCl+waVYXyOETumaM2wJ4FQgg4HA6dRGVJxnXqN2Em0kxU098C+BUhREPVueveR5gN\n",
+       "S6/Xi1QqdamnzCJ2TalUQrFYlIu/0dxn2DgOOhKJyAZK8Xh8KmmMezb2Mr4J6udg9aSWE3N7/llc\n",
+       "DLNk7tkxJsyfCyG49eud7SPMUWt1YLs6RXeaKL/q9WX2AEBmDvLNMUoa3lcwGJRNlILBoLxxsxLW\n",
+       "OABNtZ+mqddKpYJGo6HLSLy4uEChUJgpkn/T6YkAfB3A/wkhfl95i/sI/y7uWB9h/ta0222Z6sBi\n",
+       "nMfs3Da4zkoIAZvNdq1Nw4NauSeO2j1UfWRMIxJ/SbhzhSpFeFSjilqthlwuh/PzcznymcdGd7vd\n",
+       "G0c/3/Q/+CEAPw3gVSJ6efLaF3GH+wirtde1Wg3lchmBQECWsjwO8IxLm80Gt9t9abqdUb04HA6Z\n",
+       "GMYOQP4ss6jKwWCAdrsth5DxOGleRmnTaDSk6lSHy08j2DTcdHr6d1xdsXAn+wgzadjrWalUdLVP\n",
+       "jwNceOd2uy85zVSPMHDZyGXSGH1F15GH50vV63UpOZrNplzG0xDP+ORmTOrgsqVJs44YjUZyPkE2\n",
+       "m4XNZpP/KSy+543VGI+8qqNtmrrjagiGmsTV7/flqeyq0xkP5+DfV22Uab/PRX28+LjPk+2MpNE0\n",
+       "TQ4s47a23DLlXpJmOByi0Wggn88DGH+rUqmUPALHYjEAl4OF10FtIMB+lXlsJJZ86k1k+2aaocrk\n",
+       "ZjWjHtmntcrvdru6pktcPcHORePfYMnEf4ftISb0Tdg40oxGIzQaDRCRTgyzyK7X67q29rOQhj21\n",
+       "LpdLGoyj0UjaLDeBc5B5L3xiYUPVCN4nLyYBd0o3SgOOWvP7KsGmNVtiSaZ6vudpzLRxpBkOh1Id\n",
+       "XVxcwO12y65VXGY7D2mISEoWj8cDj8eD0WgkbZZZwJ20yuUyzs/PL0kC47ebc5VLpRLK5bKs8rzK\n",
+       "sFVTO4z20LQY2lW/M6szc+NIo9YxAeMbxmOQiQiaps1NGvXI7na7Ua1WUS6XUSwWEQ6Hb7wGO/t4\n",
+       "QguThSWD8aayq4CXao91u90nnnq6caQxQgiBTqeDarUKItIZwrOqJ0524mMx57Hw400wDnNnHwqr\n",
+       "BSNYIqqlMzz+5y5kEW48aUajkSRKr9dDrVYDMJsBzL9nTE1Q7ZtZ0hLUCDL36OM1LWeHKz15qW3z\n",
+       "7wLotph7V+JRaq7MoumbRnVmPILfBGNqxFXOO4aaBDbNTnlcEEJM/WZtPGlMLI6rSLNM+zQT9xQm\n",
+       "aUzMjWtJQ0S7RPQvRPS/RPRDIvrlyetr20fYxPK41qYhohSAlJojDOAzGEe1G+KaPsKmTbP+uMqm\n",
+       "WTRHGFiTPsImVo+ZbRolR/g/Ji+tRR9hE6vHTKSZqKa/wThHuIk16iNsYvW40U8zyRH+BwD/aEj5\n",
+       "5Pf3APy9EOJ9htdNm2bNsZCf5qocYbrDfYRN3D5uOj19GMC/AXgV47JcAPhNAJ/DWDXJPsJKHRT/\n",
+       "W1PSrDnMMIKJuWGGEUysDCZpTMwNkzQm5oZJGhNzwySNiblhksbE3DBJY2Ju3JqfxsTmwpQ0JuaG\n",
+       "SRoTc+NWSUNEzxLR60T01qQL6LLXyxDRq5MU0/9a4N8/T0QFInpNeS1CRC8Q0ZtE9L15coOuuN7C\n",
+       "qbDXpNcutMdbS9e9rq53mQXACuAAwB4AO4BXADyz5DUPAUSW+PcfwTiR7DXlta8A+PXJ8+cAfHnJ\n",
+       "630JwK8tuL8UgPdPnvsAvAHgmUX3eM31Ft6jEOJWJc0HARwIITJCiD6AbwP49Aquu3CaqRDiRQAV\n",
+       "w8ufwritLSaPn1nyesCCexRC5IUQr0yeNwGoLXjn3uM111t4j8Dtqqc0gKzy8wne3fCiEAC+T0Qv\n",
+       "EdHPLXktxm20t106FVZJr11JC95VpuveJmlu4yz/ISHEBwB8EsAvENFHVnlxMZbjy+576VRYMrTg\n",
+       "XXaPq07XvU3SnALYVX7exVjaLAwhRG7yeAHgOxirwGVRmJTqcEbiUu1thRDnYgIAX5t3j3RNC95F\n",
+       "9qhc7y/4esvu8TZJ8xKA9xDRHhE5AHwW41ayC4GIPETknzz3AvgEVpNmyu1tgRW0t10mFfaq9NpF\n",
+       "93hr6brLnGZmsN4/ibHFfgDgi0teax/jE9grAH64yPUAfAvAGYAexvbWFwBEAHwfwJsAvgcgtMT1\n",
+       "fgbjqTWvAvjB5OYm57jehwGMJp/x5cl6dtE9XnG9Ty6zRyGEGUYwMT9Mj7CJuWGSxsTcMEljYm6Y\n",
+       "pDExN0zSmJgbJmlMzA2TNCbmhkkaE3Pj/wFJ7Hv45ZreFAAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f7938233050>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEx1JREFUeJzt3X2QXXV9x/HPJ4+bZPOglUqV2BsabAVNg1UHfEzQdigj\n",
+       "aFur0Fap7djpqJUSdURm2vGPMlqdjg/jtDNWqoAibdWiTqtCYaNRkQhmCU8+pAMFFEhLMdnN4yb5\n",
+       "9o97N1mS3ezvuye/vffi+zWT4T5895zvnt+5Z7+cc+7v64gQAAAAZm5OtxMAAADodxRUAAAADVFQ\n",
+       "AQAANERBBQAA0BAFFQAAQEMUVAAAAA3N6+bKbTNnAwAA6BsR4cler1pQ2T5X0kckzZX0yYj426Nj\n",
+       "3ve+9x3zc0NDQ1q/fn3N1J4gMxfXoUOHKmZSxp50LCc1Z07uJGQmfqo8br75Zp1zzjknfLknQmas\n",
+       "Dxw4UBx78ODB4tixsbEqsfPnzy+OlaSBgYFjXpvqs7dkyZLi5S5cuLA4dt688kPQ3r17i2N3795d\n",
+       "HCtJo6OjVfLI7BeZ2AULFkz6+i233KKzzz77mNcHBweLl71ixYri2OXLlxfHLl26tDg2sw9Nth8f\n",
+       "T+ZzkjkW7du3rzh2z549x7x23XXX6cILLzzm9V27dhUvV8rt+/v3768Sm9mXa86Fmfm7M3fu3KK4\n",
+       "DRs2TL2+4rUl2Z4r6eOSzpV0uqSLbD+n1voAAAC6peY9VC+StC0i7o+IMUnXSXpNxfUBAAB0Rc2C\n",
+       "6pmSHpzw/KHOa9NqtVo18sEsWbVqVbdTwAzx2etvp5xySrdTwAw997nP7XYKaKhmQTXjC6P8Qe5v\n",
+       "jF//Yuz628qVK7udAmaIgqr/1bwp/SeSJn66V6p9luoJhoaGDj9utVoc0AEAQE/Ytm2btm3bVhRb\n",
+       "s6C6TdJptluSfirpDZIuOjpoNr/NBwAAUGr16tVavXr14ec33HDDlLHVCqqIOGD77ZK+rva0CVdG\n",
+       "xL211gcAANAtVeehioivSvpqzXUAAAB0W1dnSpfKJ9PKTlCZ0QuTdWb0yraoNUlmRs2JS2tNUFlr\n",
+       "/LIT5GXiM7GTTVp4ImS2cXaS01qTTmYme6w5+W0mPjMxZGbi2R07dhTHZiaSLf0bMi5zjMtMZpk5\n",
+       "xmViM9tYqjOZZdZUE89OJvNZrTnWmQl7p0IvPwAAgIYoqAAAABqioAIAAGiIggoAAKAhCioAAICG\n",
+       "KKgAAAAaoqACAABoiIIKAACgIQoqAACAhiioAAAAGqKgAgAAaKjrvfxK+w5lelFl+1xl+gPVyiOb\n",
+       "c7eXm1Urj2z/ukwemZ5YmT5ztXrSZfpnSfXGJPN5yvx+mR562V5+mTEZHBwsjl28eHFxbM0enQcP\n",
+       "HiyOrXXcyvRVGxkZKY7N/G41ZbZFZr9YtmxZKo/ly5cXx2Z6WC5atKg4tleO95k+iKV9Gzds2DDl\n",
+       "e1XPUNleaXvI9t2277L9jprrAwAA6IbaZ6jGJF0aEcO2ByXdbvvGiLi38noBAABmTdUzVBHxSEQM\n",
+       "dx6PSrpX0jNqrhMAAGC2zdpN6bZbks6UdOtsrRMAAGA2zEpB1bnc93lJl3TOVAEAADxpVP+Wn+35\n",
+       "kr4g6TMRcf3R7990002HH69atUqnnnpq7ZQAAACmtWnTJm3atKko1tmvIWa4/d3JqyQ9FhGXTvJ+\n",
+       "XHHFFaXLyqy3OLbmspk24Yhe+Rot0yYcwbQJRzBtwhFMmzAzTJtwRK8c72tMm7B06VJFxKS/YO1L\n",
+       "fi+R9EeS1tve0vl3buV1AgAAzKqql/wi4ltiNnYAAPAkR7EDAADQUNdbz5TKXDuteU09k0et2My9\n",
+       "CNlrzrW2c+a+moya98tl7mnJ/H61YjP7RVatZde616L0fohxBw4cKI7N7BeZe7mWLFlSHJu5T0bK\n",
+       "3V+XGevdu3cXx+7atas4dseOHcWxO3fuLI6VcmOd2RYrVqwojj355JOLY0866aTiWEkaGBgojs38\n",
+       "fqOj5V/Qz+wXmdh9+/YVx0q548CJOMZxhgoAAKAhCioAAICGKKgAAAAaoqACAABoiIIKAACgIQoq\n",
+       "AACAhiioAAAAGqKgAgAAaIiCCgAAoCEKKgAAgIa63nqmtA1Ar7RbqRWbaeNSs8VIRi+0A8q2Lsnk\n",
+       "PDY2VhybaWeRid2zZ09xbKa1R3bZmRYOtdrJZGS2sZT7TGW2WyaPTJuhefNyh+5MHpn2Hpn9Its2\n",
+       "pFSmZY+Ua82SaTOUkWnjkm2tk9k/M8e4zGek1r6c3e8z43cijltTZmf79ySFpMnWEhHxxZIV2J4r\n",
+       "6TZJD0XE+TPKEgAAoIcdr9w7X+2CaipFBZWkSyTdI2lpaVIAAAD9ZMqCKiL+uOnCbZ8i6TxJV0ja\n",
+       "0HR5AAAAvWjaC4y2T7Z9pe2vdZ6fbvtPC5f/YUnvltQbN/0AAABUUHLH1qcl3SDpGZ3nP5Z06XQ/\n",
+       "ZPvVkrZHxBZNfh8WAADAk0LJLfNPi4h/tn2ZJEXEmO2Sr4y8WNIFts+TNCBpme2rI+JNE4OGhoYO\n",
+       "P261Wlq1alV59gAAAJXs3bu3+BuqJQXVqO1fGH9i+yxJO6b7oYi4XNLlnZ95haR3HV1MSdL69euL\n",
+       "EgUAAJhNAwMDT5hqY2RkZMrYkoLqnZK+IulU29+RdJKk180gr9zkUAAAAH1i2oIqIm63/XJJv6r2\n",
+       "vVA/jIjy2cDay/iGpG/MLEUAAIDeNm1BZXuRpLdKeqnaZ5k22f6HiNhbOzkAAIB+UHLJ72pJOyV9\n",
+       "TO0zVH8g6RpJv18xLwAAgL5RUlCdERGnT3h+s+17TlQCpb12Mj15avbyqyXTRygTm+mpJOW2c60+\n",
+       "STXHOqNWT7rMmAwODhbHLly4MJXHokWLimMXLFhQHJvp21irn2C2b1xmn8tsi8w2zvSky+6bmfhs\n",
+       "H8RS2WNRqWy+mf0zs9127Jj2u1qHZfpuZvflTH++jFo99zL7Rba34vz580947MUXXzzleyXZfd/2\n",
+       "2eNPOt/yu71ozQAAAD8Hjtcc+c4JMd+2/aDa91A9S9IPZyE3AACAvjBdc2QAAABM43jNke+f+Nz2\n",
+       "L6o94zkAAAAmKGmOfIHtH0u6T+25pO6X9NXKeQEAAPSNkpvS/0bS2ZJ+FBGrJL1S0q1VswIAAOgj\n",
+       "JQXVWET8r6Q5tudGxJCkF1TOCwAAoG+UTBbxuO2lkjZJ+qzt7ZJG66YFAADQP0rOUL1W0m5Jl0r6\n",
+       "mqRt4huAAAAAh5U0Rx4/G3VQ0qerZgMAANCHjjex56jaE3lOJiJi2YlIoLRtQK2WKFJu6vtaLWJ6\n",
+       "RaaVy6FDh6ost1abmqxa7RMyv1+mRcXIyEhxbDaPWu1WBgbKZ2J5ylOeUiUHKde+JNPaIxObaV2S\n",
+       "bbdSq71HZvwysYsXLy6OzeybUu5zndkWmZY2NT/Xo6Pld+Rk2trUOoZn9otse61M65lMHlM53jxU\n",
+       "5U3EpmB7haRPSjpD7eLsTyLiu02XCwAA0EvKS/WZ+aik/4iI19meJ6m8+ycAAECfqFZQ2V4u6WUR\n",
+       "cbEkRcQBSeXntAEAAPpEvRtQpFWS/sf2p2x/3/Y/2i6/MA4AANAnahZU8yQ9X9LfR8TzJe2SdFnF\n",
+       "9QEAAHRFzXuoHpL0UER8r/P885qkoNq4cePhx61WS61Wq2JKAAAAZYaHhzU8PFwUW62giohHbD9o\n",
+       "+9kR8SNJr5J099Fx69atq5UCAADAjK1du1Zr1649/Pyqq66aMrb2t/z+Qu12NQsk/ZekN1deHwAA\n",
+       "wKyrWlBFxB2SXlhzHQAAAN1W86Z0AACAnwu1L/lNq0brkMwU+VKuZUAtmZxrtYfJxme2Wy+0qcku\n",
+       "O/P77d+/vzh2586dxbGZdiSZlhOStHv37uLYTIuKzHbLtA3JHCsyrVak3H6UaSezZ8+e4tjMNs7k\n",
+       "IOX2z0zOtWIzn9Ps35Bara0yrUtqtbWS6rVpy2y3TMuXTA6Z7TaT+KY4QwUAANAQBRUAAEBDFFQA\n",
+       "AAANUVABAAA0REEFAADQEAUVAABAQxRUAAAADVFQAQAANERBBQAA0BAFFQAAQENdbz2TbY1SItuO\n",
+       "JKPWVP2Z9gKZ5Wbb8NRqW5BRq01NTZltcdpppxXHLlmypDg20+4hK9PqJNPmJLPcAwcOFMeuWLGi\n",
+       "OFaSli1bViU2m0epTJsaKbftMp+/zPhlWh1l9qHs8T6z7IzFixcXx2ba1AwODqbyyMRncs4cizL7\n",
+       "UGa/GBkZKY7NLru0NdJ555035XtVz1DZfq/tu23fafta2wtrrg8AAKAbqhVUtluS3iLp+RHxPElz\n",
+       "JV1Ya30AAADdUvOS305JY5IW2z4oabGkn1RcHwAAQFdUO0MVEf8n6e8kPSDpp5J+FhH/WWt9AAAA\n",
+       "3VLzkt+vSPpLSS1Jz5A0aPsPa60PAACgW2pe8nuBpO9ExGOSZPuLkl4s6bMTgzZu3Hj4cavVUqvV\n",
+       "qpgSAABAma1bt2rr1q1FsTULqh9I+ivbiyTtlfQqSZuPDlq3bl3FFAAAAGZmzZo1WrNmzeHn1157\n",
+       "7ZSxNe+hukPS1ZJukzRe3n2i1voAAAC6perEnhHxQUkfrLkOAACAbqP1DAAAQEMUVAAAAA11vZff\n",
+       "vHllKWR6tmX7u2XiDx48WGW5tWIz+Ur1+uhl8sjEZnqJZZfdC2PSK30NM73j9u7dWxxb2j9Lym2L\n",
+       "bJ/JTB/ETO+4HTt2FMc+9thjVWKl3Phl9s/Mdlu4sLzzWGa5mX1IqtfXMLPPZbZFpt+eVP43Vcpt\n",
+       "i5p/d2rJbItM7FQ4QwUAANAQBRUAAEBDFFQAAAANUVABAAA0REEFAADQEAUVAABAQz1ZUN13333d\n",
+       "TgENPPDAA91OATP08MMPdzsFNJCZHgG9hbHrfxRUOOEoqPrXI4880u0U0MD+/fu7nQJmiIKq//Vk\n",
+       "QQUAANBPKKgAAAAacs12FdOu3O7eygEAAJIiYtL+U10tqAAAAJ4MuOQHAADQEAUVAABAQz1XUNk+\n",
+       "1/YPbP/Y9nu6nQ+mZvufbD9q+84Jrz3V9o22f2T7BtsrupkjpmZ7pe0h23fbvsv2OzqvM4Y9zvaA\n",
+       "7VttD9u+x/b7O68zdn3E9lzbW2x/pfOc8etjPVVQ2Z4r6eOSzpV0uqSLbD+nu1nhOD6l9lhNdJmk\n",
+       "GyPi2ZJu6jxHbxqTdGlEnCHpLElv63zeGMMeFxF7Ja2PiLWS1khab/ulYuz6zSWS7pE0fjMz49fH\n",
+       "eqqgkvQiSdsi4v6IGJN0naTXdDknTCEiNkl6/KiXL5B0VefxVZJeO6tJoVhEPBIRw53Ho5LulfRM\n",
+       "MYZ9ISJ2dx4ukDRX7c8iY9cnbJ8i6TxJn5Q0/q0xxq+P9VpB9UxJD054/lDnNfSPp0fEo53Hj0p6\n",
+       "ejeTQRnbLUlnSrpVjGFfsD3H9rDaYzQUEXeLsesnH5b0bkmHJrzG+PWxXiuomMPhSSTac3Iwpj3O\n",
+       "9qCkL0i6JCJGJr7HGPauiDjUueR3iqSX215/1PuMXY+y/WpJ2yNii46cnXoCxq//9FpB9RNJKyc8\n",
+       "X6n2WSr0j0dtnyxJtn9J0vYu54PjsD1f7WLqmoi4vvMyY9hHImKHpH+X9Bti7PrFiyVdYPs+SZ+T\n",
+       "dI7ta8T49bVeK6huk3Sa7ZbtBZLeIOnLXc4JOV+WdHHn8cWSrj9OLLrItiVdKemeiPjIhLcYwx5n\n",
+       "+2nj3wCzvUjSb0raIsauL0TE5RGxMiJWSbpQ0s0R8UYxfn2t52ZKt/3bkj6i9k2WV0bE+7ucEqZg\n",
+       "+3OSXiHpaWpf7/9rSV+S9C+SniXpfkmvj4ifdStHTK3zrbBvStqqI5cW3itpsxjDnmb7eWrftDyn\n",
+       "8++aiPiQ7aeKsesrtl8h6Z0RcQHj1996rqACAADoN712yQ8AAKDvUFABAAA0REEFAADQEAUVAABA\n",
+       "QxRUAAAADVFQAQAANERBBaDrbH+7899ftn3RCV725ZOtCwBOJOahAtAzbK9Te5LD8xM/My8iDhzn\n",
+       "/ZGIWHoi8gOAqXCGCkDX2R7tPPyApJfZ3mL7EttzbH/I9mbbd9j+s078OtubbH9J0l2d1663fZvt\n",
+       "u2y/pfPaByQt6izvmonrctuHbN9pe6vt109Y9kbb/2r7Xtufmd2tAaAfzet2AgCgI61v3iPpXeNn\n",
+       "qDoF1M8i4kW2F0r6lu0bOrFnSjojIv678/zNEfF4p7fdZtufj4jLbL8tIs6cZF2/K+nXJa2RdJKk\n",
+       "79n+Zue9tZJOl/SwpG/bfklEcKkQwJQ4QwWgl/io578l6U22t0j6rqSnSlrdeW/zhGJKki6xPSzp\n",
+       "FkkrJZ02zbpeKunaaNsu6RuSXqh2wbU5In4a7XsihiW1GvxOAH4OcIYKQK97e0TcOPGFzr1Wu456\n",
+       "/kpJZ0XEXttDkgamWW7o2AJu/OzVvgmvHRTHSgDT4AwVgF4yImniDeRfl/RW2/MkyfazbS+e5OeW\n",
+       "SXq8U0z9mqSzJrw3Nv7zR9kk6Q2d+7ROkvRySZt1bJEFANPi/7oA9ILxM0N3SDrYuXT3KUkfU/ty\n",
+       "2/dtW9J2Sb/TiZ/4FeWvSfpz2/dI+qHal/3GfULSVtu3R8Qbx38uIv7N9tmddYakd0fEdtvPOWrZ\n",
+       "muQ5ADwB0yYAAAA0xCU/AACAhiioAAAAGqKgAgAAaIiCCgAAoCEKKgAAgIYoqAAAABqioAIAAGiI\n",
+       "ggoAAKCh/wcQESvdP72F3wAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f793821a310>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEBVJREFUeJztnVuMJNdZx39f36d6+rI9O7PDrveSlQKysSX7xSA5EREK\n",
+       "0fqFwEsiS0hRgIgHboIHTHiJHyMkIsQLEoqNwkWJEMgoIAG2ERJBKIDROnYgjmPJK8/sXHd2unu6\n",
+       "p+99eOg+h+qanktX12Snqs5PKk13zXTpm93/fOec73zfd0QphcUyC4lHbYAlfFjRWGbGisYyM1Y0\n",
+       "lpmxorHMjBWNZWZ8i0ZE7ojIuyLyAxF5MUijLBcb8ROnEZEk8H3gk8B94L+AF5RS3wvWPMtFxK+n\n",
+       "eRZ4Xyl1TynVA74BfDo4sywXmZTPz10D1lzv14GfcP+AiNhQc8hRSsm0+349jRVEjPErmvvAddf7\n",
+       "64y8jSUG+BXNm8BHReSWiGSAzwLfDM4sy0XG15xGKdUXkV8D/glIAi/blVN88LXkPtOD7UQ49AQ9\n",
+       "EbbEGCsay8xY0VhmxorGMjNWNJaZsaKxzIwVjWVm/G5YWk5ARMwFkE6nzZVKpRgOhwwGA/r9PoPB\n",
+       "wLzXXy86VjTnQCKRIJlMkkwmSaVSlMtlKpWKuVqtFo1Gw1ytVmviGg6Hj/pXOBErmnMgkUgYz5LJ\n",
+       "ZFhZWeHWrVvcvHmTmzdvUqvV2N3dNdf+/j7VahWlFO12+1GbfypWNOeA9jDZbJZcLsfKygq3b9/m\n",
+       "qaee4sknn2RnZ4e1tTU+/PBDstks6XQapRSdTscMaReZuUQjIveAOjAAekqpZ4MwKuwkEgkymQy5\n",
+       "XA7HcahUKly9epXbt2/zxBNPUCqVEBE6nQ4HBwc0m03q9TqpVCr6omGUjPUJpdTDIIyJCslkknQ6\n",
+       "zcLCAouLiywsLJDJZEgmkwAMh0P6/T69Xo9Op0Ov16Pf7zMcDglDbX0QS+6L/6fxQyaZTJLJZE4U\n",
+       "zWAwoNfr0e126Xa7RjRhYF7RKOANEXlTRL4QhEFR4CyiOc7ThIF5h6fnlFKbIrIMvC4i7yqlvhWE\n",
+       "YWFGiyaXy02IJpFIoJQyXqbdbnN4eEir1aLX6zEYDKI/PCmlNsdfd4FXGZW2xB49EXZ7mnQ6TSKR\n",
+       "YDgc0ul0aDabVKtVHjx4QLVapdls0u12oy0aEXFEpDB+nQc+BbwTlGFhxj085fN5IxoRYTAY0O12\n",
+       "aTab1Go19vb2qNVqRjRhYJ7h6Qrw6niJmAL+Uin1WiBWhRzvnCaXyxnRuD1NrVbjwYMHNJtN+v0+\n",
+       "/X4/FJ7Gt2iUUh8ATwdoS2jRsRW93+QWTLlcxnEcUqmUCeC1Wi2azSYHBwfUarVQRIHd2IhwACQS\n",
+       "CVKplNlvKhaLVCoVVlZWuHr1KoVCgVQqRavVMtsGjUYjNHMYL1Y0AaCDeZlMhkwmQ6lUYmlpiStX\n",
+       "rnDt2jVEBKWU8TAPHz6k0WjQ6XQetem+sPk0AeBeYufzeUql0oSnKZVKJJNJ42m0aMLqaaxoAkB7\n",
+       "Gi2aQqHApUuXWFpaYmVlhcXFRUSEZrMZieHJiiYAdCqE3qDM5XJkMhkTmxkMBrRaLZMSsb+/H6q4\n",
+       "jBcrmgDQniabzeI4jokA68nxNNFoTxNGrGgCQHuabDbLwsICuVzO5MlME03YIsBerGgCQE+EHceh\n",
+       "UCjgOA7ZbNbkx/T7fVqtFvV6nb29PSsaC+RyOUqlEisrK9y4cYPl5WXy+TyJRMIE8w4PD01A7/Dw\n",
+       "kE6nQ7/ff9Sm+8KKJgCy2SzFYpGVlRUee+wxLl++PCEavZutE8kPDw/pdruh2dX2YoN7AZDNZimV\n",
+       "SiwvL3Pjxg2KxSKLi4skk8ljPY3OoQkjp3oaEXlFRLZF5B3XvYqIvC4i74nIayJSPl8zLx7u2qZc\n",
+       "LkexWGR5eZmrV69SqVRwHGfC02jh6JKVMHuaswxPfwrc8dz7XeB1pdSPAv88fh8b9F6TjgIvLCzg\n",
+       "OI4J7DmOM5F0pdM7dUGcvsIoGDiDaMaZePue2z8LfG38+mvAzwVs14XmONEsLi5SKBQmMvXcInFX\n",
+       "UiqlQisav3OaK0qp7fHrbUa5NbHBLRp3spX2NDphXCllcn+93iasgoEAJsJKKRW3/nq6GE4LxnEc\n",
+       "Mzzl83k6nc4RwXivMON3yb0tIqsAIvIjwE5wJl18UqmUSRovlUrk83lyuRyp1OhvUM9jdMWBu9A/\n",
+       "CvgVzTeBz41ffw7422DMufiIyIRoyuWySenUEWB3Vwi3aMI8JLk5y5L768C/Az8mImsi8nngy8DP\n",
+       "iMh7wE+P38cGr6dxiwame5qwT37dnDqnUUq9cMy3PhmwLaFBF/fn83nK5TL5fN7sNcHRYriwVVCe\n",
+       "ho0I+2BaiYq7grLT6VCv19nd3WV7e5udnR2TQB4FT2P3nnzg7gqhJ8Fe0dRqNdNSZHt7m2q1Grqq\n",
+       "g+OwovGBN71T1zVp0XS7XeNp1tbWIudp7PA0I3r1dNLw1G63jWjW19d58OCBFU3c0GmbqVSKdDpN\n",
+       "uVw2SeOrq6uUSiUymQyDwYBGo2GK4KrVKvv7+xwcHJgi/yhgRXMKIkIymSSbzZp2aLrSYHl5mdXV\n",
+       "VfL5/BHR1Ov1I6IJS9ntaVjRnAEdl9Gbkl5Pk0gkTHF/o9GgXq9PeBqdCmE9TYxwx2WKxeJETdPq\n",
+       "6qrJmdGX19P0er3Qp0O4saunM6A9TaFQoFwuUygUTEDP20Lk8PCQdrs9EdCLSiRYY0VzCiIy0XTx\n",
+       "0qVLFIvFIxUHw+GQbrdLq9U6IpqoCceK5gy4PY0WjTs+Yz2Nh2NyhF8SkXURuTu+vOmgkUJ7Gu/w\n",
+       "5N6kHAwGdDqdqaKJmnD85ggr4CtKqWfG1z8Gb9rFQC+5j+uhB9Dr9Tg8PDSdrXQxXK/Xi4xQ3PjN\n",
+       "EYYY9Q8+rcWrWzTestsoMs+c5tdF5Dsi8nLUS1i8u9reDcper2e6de7u7lKr1UxBXBTxK5o/Bj7C\n",
+       "qOfeJvAHgVl0ATlteNITYDs8nYBSakeNAb5KxPoH6/Oa3GUqOhpcKpVYWFiY6AbRaDSo1Wo8fPjQ\n",
+       "iEZXUUYRX6IZJ5Nrfp6I9Q/Wk1/dR08PS4VCgWKxSC6XM6LRVZP1ep39/X2zox3l4enUbYRxjvBP\n",
+       "AZdFZA34EvAJEXma0SrqA+BXztXKHzLTPI0Wje6fl0wmTQsR7Wm0aHSKZ1SHJ785wq+cgy0XAl2f\n",
+       "7fY0enjSonGfP9npdI4MT1HZYzoOu2E5hUwmw+LiotmgrFQqE1sHrVbLzGfa7TbNZpN2uz2xMRll\n",
+       "7DaCBxEhm82yuLhIpVLhypUrLC0tUSqVcBzHbBv0ej3T3Uo3KYpSbdNJWNF40G3qC4UCS0tLrK6u\n",
+       "srS0ZDxNOp0GmGiJ1mw2Q93ZalasaKZwkqfJZDLApGjcniYOWNFMwdvi1d2pUzOt7DbqcxmNFY0H\n",
+       "ETnSf0b3BNanrejJrj5uJ0yHlgaBFc0U3KLJZrNmn0lvG7i7W+ljBK1oYo7X06TTaVKp1FTRuIcm\n",
+       "K5qY4k7vdCdduROu+v0+7XabRqNBtVql0WjQbrft6inOpNNpHMcx5zZ5l9vu/Bl9BmWUNyi92Iiw\n",
+       "B+1pHMcx0WCdDqE9jRaNbluvl91xEc2JnkZErovIv4jI/4jId0XkN8b3I91HeJqn0Tk0gIkGa09T\n",
+       "r9cjVXZ7GqcNTz3gt5RSPw78JPCrIvI4Eesj7E2yyufz5ggefaLKtImwXnbbibALpdSWUuqt8esG\n",
+       "8D3gGhHqI6yHI90OTbeodx/2pRsA6DhN3DnznEZEbgHPAP9BxPoIu2u1S6WS6TquRaPza7SniTtn\n",
+       "Eo2ILAJ/A/ymUurA/RcX9j7C3m6duvGiWzTucxAsZ8vcSzMSzJ8rpXTr120RWVVKbYW9j/C01ZKu\n",
+       "oNSCUUqZeYtO8Ww2m+YonrAfkDErp62eBHgZ+F+l1B+6vhWpPsKZTMZ06tQ72nq15C651amd7h40\n",
+       "tVrNJGHFJbh3mqd5DvgF4G0RuTu+90VGfYP/SkR+CbgHfObcLDxndP6M4ziUy2UuX748kXAFGNHo\n",
+       "I3jcgqlWq3S7XVOGGwdOFI1S6t843htFoo+wFo0+hN0tGj08DYdDE5txexktnKiceXBWYhkRdk9o\n",
+       "dXtXLZpKpWKO35kWAa5WqxNDkq44iMNcRhNL0cBk1YHucqVFoyPAqVQKpZSpoNRlt97TbrVg4iKc\n",
+       "WIpGJ1q5RaNjNJcuXZrqaZrNpinw157GXQwXF8FAjEWjhTPN0+jewHoi3O12TYH/tFrtOAkGYioa\n",
+       "L1pAWkR6SBIRut0u1WqVvb09tre32dzcZG9vj0ajYQ4DixuxFI32Dvryns+klDKrJRFhZ2eHra0t\n",
+       "Njc3WV9fNzvbnU7nUf8qj4RYikbjFY1bOJp+v29OU9nY2OD+/fsmwBfVAv/TiK1o9LDijrG4S1L0\n",
+       "1el0jGg2Nze5f/8+vV7PXHEklqJxz0OGwyHNZpO9vT02NjZwHMcIRx/ytba2xtbWFvv7+zQajdgF\n",
+       "87zEUjQavRFZr9fZ2NggkUhwcHBghipdorK5ucnOzg4HBweR7NY5K7EVjf4PHwwG1Go1RIRms8nW\n",
+       "1taRw9f1lkGj0TClt3EVDICc9MuLyHXgz4AVRg2M/kQp9Uci8hLwy8Du+Ee/6G0LG5YcG50aoWub\n",
+       "3IeX6q/uOY7elIyDaJRSUxOIThPNKrCqlHprnIj134xSOz8DHCilvnLCZ6P/rxpxjhPNabvcW8DW\n",
+       "+HVDRHSOMMSoj7BlkjMnvbpyhL89vhWbPsKWSc4kmvHQ9NeMcoQbxKyPsGWSE+c0YHKE/x74B0/K\n",
+       "p/7+LeDvlFJPee7bOU3IOW5O4ytHOOp9hC0nc9rq6WPAvwJvM1pyA/we8AKjocn0EXbVQenPWk8T\n",
+       "cnwtuefBiib8+BqeLJZpWNFYZsaKxjIzVjSWmbGiscyMFY1lZqxoLDNzbnEaS3SxnsYyM1Y0lpk5\n",
+       "V9GIyB0ReVdEfiAiLwbwvHsi8raI3BWR//Tx+VdEZFtE3nHd893e9pjnvSQi62Mb74rInRmeF2gL\n",
+       "3hOe59tG4Gi1YVAXkATeB24BaeAt4PE5n/kBUJnj8x9nlEj2juve7wO/M379IvDlOZ/3JeC3fdq3\n",
+       "Cjw9fr0IfB943K+NJzzPt41KqXP1NM8C7yul7imlesA3gE8H8FzfaaZKqW8B+57bvtvbHvM88Gmj\n",
+       "CrgF7wnP820jnO/wdA1Yc71f5/8N9osC3hCRN0XkC3M+S3Me7W3nToUNugVvkOm65yma81jLP6eU\n",
+       "egZ4nlH39I8H+XA18uPz2j13Kqy3Be+8NgadrnueorkPXHe9v87I2/hGKbU5/roLvMpoCJyX7XGp\n",
+       "js5InKu9rVJqR40BvjqrjSe14PVjo+t5f6GfN6+N5ymaN4GPisgtEckAn2XUStYXIuKISGH8Og98\n",
+       "imDSTANtbztPKmzQLXjPLV13ntXMGWbvzzOasb/PqApznmd9hNEK7C3gu36eB3wd2AC6jOZbnwcq\n",
+       "wBvAe8BrQHmO5/0io4rUt4HvjP9zr8zwvI8Bw/HveHd83fFr4zHPe34eG5VSdhvBMjs2ImyZGSsa\n",
+       "y8xY0VhmxorGMjNWNJaZsaKxzIwVjWVmrGgsM/N/z4EQsKT2Kt0AAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f793819f790>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEn9JREFUeJzt3X+QXfVZx/HPJz9IsiSQwYRsAqmJChoyLQRbBiiFYquD\n",
+       "TKFVaykqxerUcdraiC1Tyoz+pdPajlPsdHSmgrTQXyqttB2FghYDpZYU2A2QpIQ4RIH8MpjQ/Nqw\n",
+       "SR7/uHfDZtnNnmdPvnvPDe/XTCb3nPvcc773fM8999lzzv0+jggBAABg4qZ0ugEAAADdjoQKAACg\n",
+       "JhIqAACAmkioAAAAaiKhAgAAqImECgAAoKZpnVy5bcZsAAAAXSMiPNr8ogmV7Ssk3SJpqqRbI+Iv\n",
+       "R8Zcd911r3rdmjVrdO655x41LzNeVnZsrcOHD5+wsSWNtZ3Xr1+vZcuWHTWv1Ps7ePBg5VhJGhwc\n",
+       "rBx74MCByrEvv/xykeUODAxUjj0eduzYoXnz5r1qvj3q8WNUU6ZUP/Gd6ev9+/cXic3GZ/o6s38e\n",
+       "OnSocuxY/RERoz43bVr1Q/2MGTMqx5500kmVYzNtyOxDmVgpty9nvkvq9vW+ffvU09PzqvmZY1a2\n",
+       "HZn3l/mslvy+zsj0ddXYYx2/i13ysz1V0uclXSHpHEnX2l527FcBAAB0n5L3UF0gaWNEbIqIQUlf\n",
+       "l/TOgusDAADoiJIJ1RmSnhs2/Xx73rgWLFhQpEGYHKNdMkJ3GO2SA4Dypk+f3ukmoKaSCdWEL4z2\n",
+       "9vYez3Zgks2fP7/TTcAEkVB1t8w9I2gWEqruV/Km9BckLR42vVits1RHWbNmzZHHCxYsIJkCAACN\n",
+       "cPjw4co35JdMqB6VdJbtJZI2S7pG0rUjg0b+mg8AAKAJpkyZctQvSY/1a9xiCVVEHLT9YUnfVWvY\n",
+       "hNsiYn2p9QEAAHRK0XGoIuIeSfeUXAcAAECndXSkdKn6IHJNGeht6tSpRWIzbc4MkJdpg1RuW5R6\n",
+       "f9kbOTODFmZiZ86cWSQ28/4ygyxm4zPtyMRm9qHMdsvenN2EQXgz2y3zGZFyfT1r1qzKsZnPSGa5\n",
+       "c+bMqRyb3RaZgSSbMBBwdpDazLE2+/1QYrmlvkekMgMHn3HG2IMVUMsPAACgJhIqAACAmkioAAAA\n",
+       "aiKhAgAAqImECgAAoCYSKgAAgJpIqAAAAGoioQIAAKiJhAoAAKAmEioAAICaSKgAAABq6ngtv0xN\n",
+       "o1Kydb86vdxua4OUq5+VkanVJOW2R6nYUrUYszXNMg4dOlRkuZn3l6l1l635lYnP1K/L1NDL7EPZ\n",
+       "/f7gwYOVYzOf1UxsZh/au3dv5djsd0imHaViM/typgaiJPX09BRZdmZfznyuS35HZfrkeOQiRc9Q\n",
+       "2V5s+wHba20/ZfsjJdcHAADQCaXPUA1KuiEi+m3PlvSY7fsjYn3h9QIAAEyaomeoImJrRPS3H++R\n",
+       "tF7SopLrBAAAmGyTdlO67SWSVkh6ZLLWCQAAMBkmJaFqX+67S9LK9pkqAACAE0bxX/nZni7pG5K+\n",
+       "HBF3j3y+v7//yOPe3l719vaWbhIAAMC4nn32WW3atKlSbNGEyq3fQ94maV1E3DJazHnnnVeyCQAA\n",
+       "ABOydOlSLV269Mj0qlWrxowtfcnvzZJ+R9Lltvva/64ovE4AAIBJVfQMVUR8X4zGDgAATnAkOwAA\n",
+       "ADV1vPRM1WHnm1JCJaNUuZVSy83KtCNbCqTUckvtR5myL6XaMDg4WGS5JWX2oUz5lGypnEwpl0z/\n",
+       "ZUqMzJw5s3LsnDlzKsdKubIhGZk+yWzjkqVLSpU7ypR8mTt3buXYU089tXKslHt/mWPG7t27K8fu\n",
+       "2VP9x/z79u2rHDswMFA5VsqVk8nsy2PhDBUAAEBNJFQAAAA1kVABAADUREIFAABQEwkVAABATSRU\n",
+       "AAAANZFQAQAA1ERCBQAAUBMJFQAAQE0kVAAAADV1vPRM1dIhmRIVJUuzlGpHpixDJrYpZWqaUjoo\n",
+       "sz0y5UtKleDIlE7IlmXIlJ3IbLdM6YtMbKa8T1amhFGpMjWZ8jAzZsyoHJtV6lhUqvRMtgxPpkRM\n",
+       "pvRM5hiwefPmyrEbN26sHCvljhmlvksy/ZfZ77PHgEx8ps1jrm+sJ2z/hqSQNNoRISLim1VWYHuq\n",
+       "pEclPR8RV02olQAAAA12rPTtKrUSqrFUSqgkrZS0TlLuzwgAAIAuMWZCFRG/W3fhts+UdKWkv5D0\n",
+       "J3WXBwAA0ETj3jhgu9f2bbbvbU+fY/v3Ky7/s5JulFT9Qi0AAECXqXIn5hcl3SdpUXv6GUk3jPci\n",
+       "2++QtD0i+jT6fVgAAAAnhCq3wM+LiH+wfZMkRcSg7So/Z7hY0tW2r5Q0U9Iptu+IiPcND+rr6zvy\n",
+       "uLe3VwsXLqzeegAAgEI2b96sLVu2VIqtklDtsf1TQxO2L5T00ngvioibJd3cfs1lkj42MpmSpBUr\n",
+       "VlRqKAAAwGRatGiRFi1adGR6+EmgkaokVB+V9B1JP2P7B5LmS3r3BNrVjAGRAAAAjrNxE6qIeMz2\n",
+       "pZJ+Xq17oZ6OiOojAraWsUrSqok1EQAAoNnGTahsz5L0QUmXqHWW6SHbfxsRuWGZAQAATlBVLvnd\n",
+       "Ieknkj6n1hmq35J0p6TfLNguAACArlEloVoeEecMm/6e7XXHqwFV64mVrEmXqWfUBJm6YyVr6DWh\n",
+       "Pl+27zLxme1cqs7c7Nmzi7Uh8/4ysZlaYvv27ascm6lVmFmuVK4+X6ZPMrUj9+7dWzk2K9OOjMw+\n",
+       "lDne7969O9WOzPvLtGP//v2VYw8cOFA5NlMjUMq9v1LHuMxyMzX0st85mTZnagqOpcq7ftz2RUMT\n",
+       "7V/5PVZ7zQAAACeIYxVHfnJYzMO2n1PrHqrXSXp6EtoGAADQFcYrjgwAAIBxHKs48qbh07ZPV2vE\n",
+       "cwAAAAxTpTjy1bafkfSsWmNJbZJ0T+F2AQAAdI0qN6X/uaSLJG2IiKWS3ibpkaKtAgAA6CJVEqrB\n",
+       "iNghaYrtqRHxgKQ3Fm4XAABA16gySMNO23MkPSTpK7a3S9pTtlkAAADdo8oZqndJ2ifpBkn3Stoo\n",
+       "fgEIAABwRJXiyENnow5J+mLR1gAAAHShYw3suUetgTxHExFxyvFowPTp0yvFNaXcSimlSuuULKuT\n",
+       "aXOp2EzZAqncfpSJLbUtMiVfsjLbORM7Z86cyrGnnFL9kJPdLzKlQDKxVUtrlYzNKlWSaObM6qPu\n",
+       "ZEqGlCy5lJEp+ZLZhzIllyYSX1XmM1Wq5Eu2PEzV/EKqvn8++OCDYz53rHGoqhcRG4PtuZJulbRc\n",
+       "reTs9yLih3WXCwAA0CRlKrq+4q8l/WtEvNv2NEknF14fAADApCuWUNk+VdJbIuJ6SYqIg5JeKrU+\n",
+       "AACATilzMbllqaT/tX277cdt/53tnoLrAwAA6IiSCdU0SedL+puIOF/SXkk3FVwfAABAR5S8h+p5\n",
+       "Sc9HxI/a03dplISqr6/vyOPe3l4tXLiwYJMAAACq2bJli7Zu3VoptlhCFRFbbT9n++yI2CDp7ZLW\n",
+       "joxbsWJFqSYAAABM2MKFC4860dPf3z9mbOlf+f2RWuVqTpL0X5LeX3h9AAAAk65oQhURayS9qeQ6\n",
+       "AAAAOq3kTekAAACvCaUv+Y2ragmFUmVAsvGlyhY0RRPKyWS2cbbsRKZ8QqlyMqXKnBw8eLBybDY+\n",
+       "8/4yfTJr1qzKsZnSJdkSFZkSOBmltnG2VFWmLEqmrE2m3FFmuaVKcWVljkU9PdVHBZo7d27l2KaU\n",
+       "Uit1DMhs46aUGhtzfbWXAAAA8BpHQgUAAFATCRUAAEBNJFQAAAA1kVABAADUREIFAABQEwkVAABA\n",
+       "TSRUAAAANZFQAQAA1ERCBQAAUFPHS89ULa3RlNIzpcoAZIfUL6VUGZ5M7OHDhyvHZkqzZJUqfzFj\n",
+       "xozKsZkSFdmySJntnCkbkumTTOmSPXv2VI49+eSTK8dK5UrgZMqRZMrDZLablOvrzLEoU2IkU4Yn\n",
+       "E5vZblJuX85st8y2yMRmyyiVWnYmNtMne/furRy7f//+yrFS2dJdoyl6hsr2J2yvtf2k7a/arv5N\n",
+       "AgAA0CWKJVS2l0j6gKTzI+L1kqZKem+p9QEAAHRKyUt+P5E0KKnH9iFJPZJeKLg+AACAjih2hioi\n",
+       "/k/SX0n6H0mbJe2KiH8rtT4AAIBOKXnJ72cl/bGkJZIWSZpt+7dLrQ8AAKBTSl7ye6OkH0TEi5Jk\n",
+       "+5uSLpb0leFBa9euPfJ4/vz5Ov300ws2CQAAoJodO3boxRdfrBRbMqH6saQ/tT1L0oCkt0taPTJo\n",
+       "+fLlBZsAAAAwMfPmzdO8efOOTG/YsGHM2JL3UK2RdIekRyU90Z79hVLrAwAA6JSiA3tGxKclfbrk\n",
+       "OgAAADqN0jMAAAA1kVABAADU1PFaflVrbmXqqpWqwSblat2VbEdVmVpUUq7N2RpaJZab3caZ+FK1\n",
+       "CgcGBirHZmpRZWv5NaF+ZKbNmbp4JesaZvpvx44dlWN3795dOTZT11A6PnXKRjN9+vTKsZkaiJm6\n",
+       "cZn+kPLHxKoyn6dMPc+S+3Kmjl7muFyqbmNWpk8y+/JYOEMFAABQEwkVAABATSRUAAAANZFQAQAA\n",
+       "1ERCBQAAUBMJFQAAQE2NTKi2bNnS6Saghq1bt3a6CZggPnvdbefOnZ1uAiYoM8QGmqmRCRVfyN1t\n",
+       "27ZtnW4CJojPXnfbtWtXp5uACSKh6n6NTKgAAAC6CQkVAABATe5keRTbna/NAgAAUFFEjFqDrqMJ\n",
+       "FQAAwImAS34AAAA1kVABAADU1LiEyvYVtn9s+xnbH+90ezA2239ve5vtJ4fNO832/bY32L7P9txO\n",
+       "thFjs73Y9gO219p+yvZH2vPpw4azPdP2I7b7ba+z/cn2fPqui9iearvP9nfa0/RfF2tUQmV7qqTP\n",
+       "S7pC0jmSrrW9rLOtwjHcrlZfDXeTpPsj4mxJ/96eRjMNSrohIpZLulDSh9qfN/qw4SJiQNLlEXGe\n",
+       "pDdIutz2JaLvus1KSeskDd3MTP91sUYlVJIukLQxIjZFxKCkr0t6Z4fbhDFExEOSRg7NfLWkL7Uf\n",
+       "f0nSuya1UagsIrZGRH/78R5J6yWdIfqwK0TEvvbDkyRNVeuzSN91CdtnSrpS0q2Shn41Rv91saYl\n",
+       "VGdIem7Y9PPteegeCyJiaKj0bZIWdLIxqMb2EkkrJD0i+rAr2J5iu1+tPnogItaKvusmn5V0o6TD\n",
+       "w+bRf12saQkVYzicQKI1Jgd92nC2Z0v6hqSVEbF7+HP0YXNFxOH2Jb8zJV1q+/IRz9N3DWX7HZK2\n",
+       "R0SfXjk7dRT6r/s0LaF6QdLiYdOL1TpLhe6xzXavJNleKGl7h9uDY7A9Xa1k6s6IuLs9mz7sIhHx\n",
+       "kqR/kfSLou+6xcWSrrb9rKSvSfol23eK/utqTUuoHpV0lu0ltk+SdI2kb3e4Tcj5tqTr24+vl3T3\n",
+       "MWLRQbYt6TZJ6yLilmFP0YcNZ3ve0C/AbM+S9MuS+kTfdYWIuDkiFkfEUknvlfS9iLhO9F9Xa9xI\n",
+       "6bZ/VdItat1keVtEfLLDTcIYbH9N0mWS5ql1vf/PJH1L0j9Kep2kTZLeExG7OtVGjK39q7AHJT2h\n",
+       "Vy4tfELSatGHjWb79WrdtDyl/e/OiPiM7dNE33UV25dJ+mhEXE3/dbfGJVQAAADdpmmX/AAAALoO\n",
+       "CRUAAEBNJFQAAAA1kVABAADUREIFAABQEwkVAABATSRUADrO9sPt/3/a9rXHedk3j7YuADieGIcK\n",
+       "QGPYfqtagxxelXjNtIg4eIznd0fEnOPRPgAYC2eoAHSc7T3th5+S9BbbfbZX2p5i+zO2V9teY/sP\n",
+       "2vFvtf2Q7W9Jeqo9727bj9p+yvYH2vM+JWlWe3l3Dl+XWz5j+0nbT9h+z7Bl/4ftf7K93vaXJ3dr\n",
+       "AOhG0zrdAADQK6VvPi7pY0NnqNoJ1K6IuMD2DEnft31fO3aFpOUR8d/t6fdHxM52bbvVtu+KiJts\n",
+       "fygiVoyyrl+XdK6kN0iaL+lHth9sP3eepHMkbZH0sO03RwSXCgGMiTNUAJrEI6Z/RdL7bPdJ+qGk\n",
+       "0yT9XPu51cOSKUlaabtf0n9KWizprHHWdYmkr0bLdkmrJL1JrYRrdURsjtY9Ef2SltR4TwBeAzhD\n",
+       "BaDpPhwR9w+f0b7Xau+I6bdJujAiBmw/IGnmOMsNvTqBGzp7dWDYvEPiWAlgHJyhAtAkuyUNv4H8\n",
+       "u5I+aHuaJNk+23bPKK87RdLOdjL1C5IuHPbc4NDrR3hI0jXt+7TmS7pU0mq9OskCgHHxVxeAJhg6\n",
+       "M7RG0qH2pbvbJX1Orcttj9u2pO2Sfq0dP/wnyvdK+kPb6yQ9rdZlvyFfkPSE7cci4rqh10XEP9u+\n",
+       "qL3OkHRjRGy3vWzEsjXKNAAchWETAAAAauKSHwAAQE0kVAAAADWRUAEAANREQgUAAFATCRUAAEBN\n",
+       "JFQAAAA1kVABAADUREIFAABQ0/8DDbzgL6GT1HEAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f793811c290>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAGlxJREFUeJztnVls49t93z+Hi7iJu0Rq44xm5l4bAVzAfnEfnKB5CIJr\n",
+       "FEjSlwYGChTpgj50Q/vQJH1o/JgGaFC0D0Xb2EE3JC1auEgKtLUNtIj70DYOfGNf9/pO7r3SSENR\n",
+       "JCVS3ClS5OmD+Ds+5FALN4nS/L/AwZ/L8Mwh/1/9zm8/SmuNAweTwHXfC3Dw8OCQxsHEcEjjYGI4\n",
+       "pHEwMRzSOJgYDmkcTIypSaOUek8p9SOl1J8opX55notysNxQ0/hplFJu4CPgZ4As8IfAV7TWH853\n",
+       "eQ6WEdNKmi8CH2ut97XWXeB3gZ+f37IcLDM8U35uGzi0nr8G/rT9D5RSjqv5gUNrrca9Pq2kcQjx\n",
+       "FmNa0mSBjPU8w6W0cfAWYFrSfBd4Vym1q5RaAX4R+L35LcvBMmMqnUZrfaGU+hvAfwfcwNccy+nt\n",
+       "wVQm960mdhThB495K8IO3mI4pHEwMRzSOJgYDmkcTAyHNA4mhkMaBxPDIY2DiTFtwPLRQ6kfuyjc\n",
+       "brcZHo8Hl8tlhtvtRmtNv99Ha43WmouLC3q9HhcXF1xcXJjXAR5DyZBDmjFQSpnhcrkIhUKsrq6a\n",
+       "q9/vJxAImKuQo9vtcnFxQbVaNaNSqdDr9ej1evT7fXq93n1/vZnhkGYMbMK43W5WV1dZW1szIxqN\n",
+       "Do3z83Pa7Tbn5+ecn5+Ty+XI5XIcHx/T6XTodDqGVA5pHimEMKOkyWQy7OzskEqlWF9fN6PZbNJo\n",
+       "NMw1Go3i9XrpdruUy2Uzb6/XQyn14LeomUijlNoHqkAP6GqtvziPRd03bCnj9XpZXV0lmUyyvb3N\n",
+       "8+fP2djYIJ1Os7GxQSqVotFo0Gg0qNfrNBoN+v0+zWaTcrnM6uqqIcrFxcV9f7W5YFZJo4Gf1lqX\n",
+       "5rGYZYBsTUKYlZUVQqEQsViMtbU1Njc3icfjrK6u4vV6gUtFeWVlhUAggFKKSCRCNBolHo+TSCRw\n",
+       "u90AXFxc0G63325JM8DYSOhDhi1lfD7fEGk2NjYIhUKEQiFWVlaAH5NGKYXH4yESiRCLxYjH4yST\n",
+       "SSNlWq3WPX+z+WAekubbSqke8M+11v9yDmu6d7hcLjweDx6PZ4g06+vrbG5u4vV68Xg8eL1etNa4\n",
+       "3W5DGJ/PZ0iTSCRIJpN0u13a7Ta1Wm3IlH+omJU0X9Ja55RS68C3lFI/0lp/Zx4Lu2vIzXS5XHi9\n",
+       "Xvx+P6urq0NSQ7abUYjSLPMEg0FjnofDYarVKj6fz2xTDx0zkUZrnRtci0qpb3BZ2vLgSCM3XaRF\n",
+       "NBo15nUqleLJkyckk0mCweB9L3UpMEuFZVApFR48DgE/C/xgXgu7S4i15PF4WFlZIRKJkE6n2d3d\n",
+       "5bOf/SyZTIa1tTUCgcCN87wNmEXSpIFvDH4oD/DvtNbfnMuq7hC2eS2kiUajbGxssLu7yzvvvMP6\n",
+       "+rojaSxMTRqt9R7w+Tmu5c5gb0di+cgIBoPGStrZ2WF3d5dwOGzCB9dBa/2GN1kUZjHffT6fiUfJ\n",
+       "Z0bHsuOt8wgrpfD7/UPxo2AwSDAYJBAIEA6Hef78Odvb26ytrREOhwkGg7dSZGV7sq2uaDRKu92m\n",
+       "2+3S7/dRStHpdExAs9fr0e12Tdyq2+0uPXHeOtK4XC78fv9Q7CgSiRAOhwmHw0SjUXZ2dgxpIpGI\n",
+       "kRK3tX7cbjd+v9+QxiaMx+Oh3W7T6XQ4Pz+n0+nQarVot9u0Wi263e6Cf4HZ8daRRiRNNBo1MSQx\n",
+       "p2VITEkkjWw1N5FGtqdRSWMTxufz0Wg0aLVatFotms3mg/MYv5WkCQQCxGIx0uk029vbJJNJMxKJ\n",
+       "hAkDRCKRqZRfj8eD3+8nHA5zfn4+5PsJhUJDcap6vW7CEeI1lhjVsuo4bx1pXC4XwWCQRCLBzs4O\n",
+       "z58/N9tTJBIxRJnGGSc6jdfrJRgMmq0mEAgQiUSGIuH2tVgscnx8jMfj4eLigvPz86FErmUjzltH\n",
+       "GvHYrq2tsbOzw4sXL4YSqvx+Pz6fD5/Ph8cz3c8jpAFYWVkxEkf0GNmW5BoOh3G73SY63mg0TG5O\n",
+       "r9dzSHPfkEy8ZDJpJI3EmcREttM7J4HoNEIar9dLKBQymXsiPYQ4ovyKGd5qtSiXy4YkvV6PTqez\n",
+       "iJ9hJrwVpLFzev1+P5FIhGQyycbGBpnMZceUWXJ4hSzy2O1243K5TBRcIP9GpIhk/ImEqdVqlMtl\n",
+       "4+tZ1sj4oyeN2+025nQ4HGZtbY3d3V3W19cJBoMmQcq+Tgo7sdxOMJdhk9Z2+glB4/E4Ozs79Pt9\n",
+       "/H4/2WyW168v2/3U6/WlSxF99KRxuVxEIhE2NjbY2Nhge3t7iDTAG8SZFFprkzg+bsj25/V6h+Jc\n",
+       "8jgejxvCJBIJVldX0VpTr9c5Pj6e908yMx49aUTSbG5u8uLFC54/f87Ozs5CJI3oLfJYrjKv6Ehi\n",
+       "govESSQSBAIBkskkmUwGr9drCLOM6RQ3kkYp9XXgzwIFrfWfGryWAP498BTYB/681vpsgeucCHLj\n",
+       "7Uy6dDrNs2fP+MxnPkMikTA3ahqMbj8SBpAhiq+tAPf7feDNNAyllEncEjSbTV6/fk0sFjPkWqbY\n",
+       "1G0kzW8D/xT419ZrvwJ8S2v9G4PG078yGPcOsV5kSxDfi9QtiQ9Git5GFeDb3JTRkhWxhGSIlSQj\n",
+       "EAiY+JbEuGT4/f43pJsksycSCTY3N/H5fEP/pxDwvnAjabTW31FK7Y68/HPAnxk8/lfA/2SJSOPx\n",
+       "eMwNiUajRgmWGyaksS0e+3oTzs/PqdVqVKtVarXakHe30WgMkebi4oJQKGQi5aurq8RiMWKxGAA+\n",
+       "n28saUKhkCGN2+2mWq0CLIUJPq1Ok9Za5weP81zm1iwFRNJIuqaEA0TSBAIBVlZW8Hq9M0maWq3G\n",
+       "yckJp6enVCoVU00pFZVSddnr9Yy3WdbSbrcBjPl/HWk2NjaMTtTpdJbCmppZEdZa62XpryfWiF12\n",
+       "kkgkiMViRCIRQ5rbShr7ua1TNJtNzs7OKBaL5HI5SqUS5XKZs7MzSqXSUAig1+sN5RnHYjG01kaP\n",
+       "6ff7byi7Xq+XcDhMMplka2vLzNVsNqlUKm+Y9XeNaUmTV0ptaK2PlVKbQGGei5oEdjWkOO+SySSb\n",
+       "m5vGxM5kMqRSKaLRKD6fzyiXkgw+CttcFq9st9s1JbavX78mm82aIduUbFm25WQPqUoIBoPEYjGa\n",
+       "zabZbkQ5drlc+Hw+otEo6XSaXq9nzHVZc61We8M5CHfXXGBa0vwe8BeBfzi4/ue5rWgKiN9D3PeJ\n",
+       "RILt7W2ePXvG06dPTapDJBK5NWnson47wFiv1zk8POT169fm2mw2h2JJNumk5qnT6dBut02saW1t\n",
+       "jUajQafTeSP1wufzEYvF6PV6ZiuV8IbL5aJUKpkGA7JGwV0Q5zYm9+9wqfSuKaUOgX8A/DrwH5RS\n",
+       "f5mByb3IRd6wvqFqSCHN1tYWL1684MWLF0MeYYley1/2KMTnIjda9JezszMzDg8POTg4MEMkkIxR\n",
+       "k1wkQqvVol6vk0gkODs7M5JGAqOjksbr9RKJRFhZWTEE11oby08IfdeR8NtYT1+54q2fmfNapoZd\n",
+       "SSCk2dzcZHd3l3fffdfk6MpfrO3HEdg3WdITRDKcnZ1xcnLCyckJxWLRkGV/f5+Dg4Mhb/A4JVXM\n",
+       "8kajgd/vJ5VKUalUqNfrnJ+fm3waIcbKygoej4dwOEy/3zfv2zpMr9ej0WiYP4C73KIehUfY7idj\n",
+       "u+ltolwnWUTEi95ydnZGuVw2yu3oNZ/PUyqVzFZ0k1IqZJJtpF6vUyqVyOfzHB4eEo/HjSS0Qw39\n",
+       "fh+Xy2WSxjY2Nsx36Ha71Ot1yuWykYzifV40cR4FaQRXVQLI86tCBKKgik6Sy+U4OjoyPWbEFyPX\n",
+       "s7MzKpUKzWbT3CghzzjITZXHtVqNUqnE8fGxybXp9XrGqSdrFZdAIBAgHo+brEORMqVSiUAgQKfT\n",
+       "Md/tunXMC4+GNKOSxiaN/d4o7OL8er1OpVIhl8uxt7fH3t4e+/v7Q7kvrVZryDsr29F1N8oOaF5c\n",
+       "XAyRRspihDB2/Euufr+fWCxmrp1Oh3K5TC6XIxAI0Gq1zP9xF3jwpBklwyhpxmXf2QFKiR1Jgb7c\n",
+       "jL29PT788EM++uijoe1rmmoB0XcEsj0FAgFDbOmBI+SziSPebVl7q9Uil8sRjUaNs1K2v7uo8nzw\n",
+       "pJkGtu+k0+lQLBYpFArk83mOj485ODigWCxSq9WGPLvzEvsi2arVqgl1yJZXrVYJBoNDmYTLVu77\n",
+       "1pHG9ptIzdHJyQnZbJbDw0MODw/J5XIUi0Xq9bqJWo9Ki1lgk8btdhOPxymXy1SrVer1OsDMecqL\n",
+       "xPKtaMGwTepWq0WtVqNYLJLNZvn000/59NNPjT+mVquZisd5KpjdbpdWq2WSyePxuJE0tVrNmN52\n",
+       "s6RlwltHGsCQptFoUKlUjKTZ29vj5cuXQ4ruIioeRdL0+32j1NqkEY/1ysrKlUQdp9zfFbkeJWmu\n",
+       "+yH7/T7VatW0bT06OmJ/f59sNku5XDalJrIl3ee6bVLYmYXSDmV9fZ1MJoPH4zHEsy26ReFRkgbe\n",
+       "tKoEWmuq1SpHR0e8fPmSTz75hGKxSLFYNKQR5XfRpBlH7nHrHk1JlVqqVCpFJpMxFmOn06FarS68\n",
+       "HvzRkWYcUewUCJE02WyWjz76iA8++MC0dG02myYz7i6cZLK2cWGN6ySNpFWkUikT5RbCXBWEnSem\n",
+       "zRH+KvBXgOLgn/2q1vq/LWqR02Lcnt/v96lUKoY077///ht5M/eJ20oaaWAgfqZqtUqhULiTRPTb\n",
+       "0PK3gfdGXtPAb2qtvzAY90YY+y9vfX2ddDpNPB4nFApda66K1LmPhCZJRw2HwyQSCZOcJRmG4rAT\n",
+       "AowmiYlUEasvl8tRLpdNWGPRuJE0+rJbZ3nMW0thB0pALxqNsr6+zsbGBolEglAoZFq2jhv3CSFN\n",
+       "JBIxlRE2aezk93HZhUKaQqHAwcEBR0dHQwHURWOWDfBvKqX+WCn1NaVUbG4rmhB2kyIhzW0kzX3C\n",
+       "6/UOkSYejw81V7pJ0pyfnxtJc3h4OESapZA0V+CfAc+47LmXA/7R3FY0Iez67IcmaSQPWCRNNBod\n",
+       "kjS2fnKdpMlms5yenppzGRa+/mk+pLU2OcFKqd8Cfn9uK5oQotNIInkymSQSiZhg4H2tScxgO4dZ\n",
+       "EqYSiQTr6+tsbW2RyWSMHhYIBMZaP6P6l/Tnk5jYXVp7MCVplFKbetB4Gvhz3GP/YHF2SasySWjy\n",
+       "+/33VtIqKaj2GQt2QlgymSSVSg2RRlIfrktBHW1ZctdkEUyTI/xrwE8rpT7PpRW1B/y1ha7yGowr\n",
+       "WRG94L4kjUgW+3wFaZYk1RLpdJqtrS2ePHliiH6dpLFrqUTS2Jl6d7n1Tpsj/PUFrGUqjEoaqdH2\n",
+       "+/33uj3ZSWB221nJYRZJ8+TJE6N/XRWctJPdbcKMugzuCstpXkwIW4cQPeI+I8M+n8+U4MqhGlJT\n",
+       "HolEePr0Kdvb24bgkkhuVxzYhGg0Gqauqlarsbe3Z3wzrVbLHHd4V7GyR0GaZYM4G9fW1ox1ZI9U\n",
+       "KkUqlSIWiw0RRgg/WmQnllKhUKBYLPLq1StjMUkZzLwTxa6DQ5oFQOqWUqmUOfPSHuFw2BztI6a1\n",
+       "LR1HKyQqlQr5fJ6DgwNevXrF0dERx8fHxjfT6XTmmiR2ExzSLAB2WW0mk2F7e5utrS0z7C5Y44KV\n",
+       "dsmLOPLy+Tz7+/u8fPmSQqFgEsXsLudLbXI7GIa0MBFFN5PJ8OTJEzY3N0mlUsTj8aH+xDdFovv9\n",
+       "viFMs9mkXq9TrVY5Ozvj9PSUs7Mz0zb2PlrGPkrS3LUSHAqFTFt88UrLibrJZJJoNGpaxN4GYikJ\n",
+       "aaSGXBoM1Ot1c0jHQ+oasdS46x8yFAqRSqXY3d1ld3fXHFsoZy2IlLltvq9IGrv+W6SNlPOKz8Yh\n",
+       "zQPF6uoqqVSKZ8+e8bnPfc7Ej+RUXemGPomksbcn2+QWSTNJE6Z541GSZpxyab832ilr9H3x3IoX\n",
+       "1/b9jJvz3Xff5Z133uHJkyek0+mhz/r9ftMu5LrSYBvtdptyuUw2m+Xo6IhXr15RKBRMKufS99x7\n",
+       "SBiXOjl6kyTsEAwGTWrC6Pv29hKLxYba3o+LZ0kDpc3NTRKJhCGJHW+ynXc3EafdbnNycsKrV694\n",
+       "+fIl2WyW4+Nj0zDpvvGoSDOK6yRNMBg0YQcbbrebra0ttre32d7efuMc7nGhCdvbG4lEhoKVoxHu\n",
+       "20gaKeA7ODjgww8/pFAomO1p6UmjlMpw2Qo2xWVw8l9orf+JeiB9hEcfy/PrJI3H4yGTyZgt5/nz\n",
+       "50NnXI6edwAYCSTkuGo9t7XqhDT7+/v88Ic/NH327tKBdx1ukjRd4O9ord9XSq0Cf6SU+hbwSyxJ\n",
+       "H2HpNNVoNCiXy5yenhKJRNBa4/V68fl8Q/9eKWVM5KdPn74xn9vtNrGh9fV10wBaAopXKbOjVQOy\n",
+       "tttgtHl1qVQyVpJ0qVgmXEsarfUxcDx4XFdKfQhss0R9hPv9Pu12m0qlQrFYJBaL0e12TUbf6Mlw\n",
+       "LpeLcDhMOp1Ga004HH7j/fX1ddPYUdIuRYkdR4TRagGYrMVsp9MxZTT1ep1isUi1Wh1qwrhMuLVO\n",
+       "oy4bUH8B+D8sUR/hUdKsrq6a9hx263iBUopwOIzWmlAoRDr946XLjbcj1NLv7jp9xCbMNJJG+gOX\n",
+       "SiVKpRLFYpFKpbK051neijSDrek/AX9ba12zfzyt77ePcL/fNx0YisXiUKdyaZpow+VyGT9KKpUa\n",
+       "ynyT66gCK1iUpOl2u9RqNU5PTzk+Pn74kkYp5eWSMP9Gay2tX5emj7DoNNJdSvrTSRPEi4uLIR+L\n",
+       "Umqom6bMYV9nWYsQxz7/abRFrH1I2Pn5OScnJ+TzeZP+IA2tm83mw5M06vJX/Rrw/7TW/9h6a2n6\n",
+       "CEtJqijCfr+f9fX1ofiMLTkWGZeSue1On5KmaSeDS4TabgApXc/L5TLFYvFO65gmxU2S5kvAXwC+\n",
+       "r5T63uC1X2WJ+gjLX269XjfnKJ2dnb0R1LvLjlKjOb2jkuX4+JijoyMz7IM55FqtVh8mabTW/4ur\n",
+       "a6OWoo+w1AFJzY/b7aZSqZgD06WrpuSvLHot9vYkEkZiSNI9NJ/Ps7e3x8cff8wnn3xCo9Ew/XCE\n",
+       "6DIeHGkeAuzOVoBptpjP58lms0YxtmNBtt9lnsnnIslsE3o0v7dWq5mO57lcjkKhQLvdNucvCFHs\n",
+       "U+mWDY+CNPJXrZSi1WpxenpKNpvF7/fT7XZNaqVc7bGIioXz83MqlQqlUonT01Ojq4jeIo0hT05O\n",
+       "3sjxtfsSLysePGkAE4+RDt9iRQE0Gg2i0agpsJcqTGnZMer8mwWyPQlp8vk8R0dHpmmSJIaLxKnX\n",
+       "66b+etTCWoby4avw4Eljm7WiT5RKJQBzOHoymTRVAc1mE8B0k1oEhDR2Vwdp15bL5Ux7NluyyHex\n",
+       "v9ey4sGTBob9LHa7VVGS7Vzber1uXpPsOEldsHv32qkQdinsOBMahmNP0skhl8uRz+cpFotmm5KT\n",
+       "5x4yHgVpbEiBfKvVAjBZ/XIqW6lUolarUalUKJfLpFIpkxAuB6OK4uz3+00vO/tgU/v8J5Fctjkv\n",
+       "JnUul+Pk5MSYz+M81A8Rj5Y0UnQmUqZarbKyskIgEBjK7C8UCibZSq4Sm5K2rEJC0UVEsRXnnA2l\n",
+       "FKenp0NDSLPoBop3hUdJGtlK2u320FGFkoAl5zcVCgXW1tZIp9NDTQ/FGSjnEYxKqnw+b0ahUDAK\n",
+       "sMB20tVqNeOjua/qgXnj0ZEGuNZctQ/VEgki3ctF75EjkMXiEokiQ6wgGaOQKgK52r2JHwMeJWmu\n",
+       "g2xfzWZzqG5aTpArFAqmikB0HPvMbTnexx4C27knyrb4YKRA35E0DxBiUQFmC2s2m5TLZQKBgOni\n",
+       "4PP5THqn3HyJHdlSRHJe7O1p9IhlGcvssJsE6jrmX5Mj/FVu6CN8nzk2N2FUzxmXCG6/P5rmYB+h\n",
+       "PI4IVzUaWmaH3ThorcdGeG8izQawYecIA7/AZVS7prX+zWs++3B+HQdjcRVpps0RhiXpI+zg7nHr\n",
+       "XAErR/h/D15aij7CDu4etyLNYGv6j1zmCNdZoj7CDu4e1+o0YHKE/wvwX0dSPuX9XeD39eCwDet1\n",
+       "R6d54LhKp7lW0lyVIzxIJhfcax9hB3ePm6ynnwT+APg+lyY3wN8HvsLl1mT6CFt1UPJZR9I8cExl\n",
+       "cs8ChzQPH1NtTw4cjINDGgcTwyGNg4nhkMbBxHBI42BiOKRxMDEc0jiYGAvz0zh4vHAkjYOJ4ZDG\n",
+       "wcRYKGmUUu8ppX6klPqTQRfQWefbV0p9Xyn1PaXU/53i819XSuWVUj+wXksopb6llHqplPrmJLlB\n",
+       "V8z3VaXU68Eav6eUem+C+TJKqf+hlPqhUuoDpdTfmmWN18w39RqB8fms8xiAG/gY2AW8wPvAT8w4\n",
+       "5x6QmOHzP8VlItkPrNd+A/h7g8e/DPz6jPP9GvB3p1zfBvD5weNV4CPgJ6Zd4zXzTb1GrfVCJc0X\n",
+       "gY+11vta6y7wu8DPz2HeqdNMtdbfAcojL/8cl21tGVx/Ycb5YMo1aq2PtdbvDx7XAbsF78RrvGa+\n",
+       "qdcIi92etoFD6/lrfrzgaaGBbyulvquU+qszziVYRHvbmVNh592Cd57puoskzSJs+S9prb8AfBn4\n",
+       "60qpn5rn5PpSjs+67plTYUdb8M66xnmn6y6SNFkgYz3PcCltpobWOje4FoFvcLkFzor8oFRHMhJn\n",
+       "am+rtS7oAYDfmnSN17XgnWaN1nz/VuabdY2LJM13gXeVUrtKqRXgF7lsJTsVlFJBpVR48DgE/Czz\n",
+       "STOV9rYwh/a2s6TC3qIF70RrXFi67izWzC209y9zqbF/zGUV5ixzPePSAnsf+GCa+YDfAY6ADpf6\n",
+       "1i8BCeDbwEvgm0Bshvn+EpcVqd8H/nhwc9MTzPeTQH/wHb83GO9Nu8Yr5vvyLGvUWjthBAeTw/EI\n",
+       "O5gYDmkcTAyHNA4mhkMaBxPDIY2DieGQxsHEcEjjYGI4pHEwMf4/w2zPGHuGeikAAAAASUVORK5C\n",
+       "YII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f79380a2550>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEqlJREFUeJzt3X2QXXV9x/HPJ5unTUICFKoFdjcWYitBa6gygAkP1RbK\n",
+       "aLStBWmr1HbsdNSaUmUMzLR/tSPIdKSO085YqAI+QdWiTiuBVioLYiKQ8BTkoSNNAoWkFDbPybJ8\n",
+       "+8e9Cctmb/Z89+S3997wfs0w3nPud3/nd8/v3JOv55z7+zoiBAAAgMmb1u4OAAAAdDsSKgAAgJpI\n",
+       "qAAAAGoioQIAAKiJhAoAAKAmEioAAICaprdz47aZswEAAHSNiPB464smVLbPl3SNpB5J10bEVWNj\n",
+       "Lr/88gP+bnBwUMuWLXvVumnTql9Ms8f9rIdEpu1SsZl9MX16bogz8a1iV61apfPOO2/S7Zb8fJm2\n",
+       "MzLj9/LLL1eOzcwT19vbWzlWkubMmXPAuptvvlkXXnjhAevnzZtXq91WZs6cWTl27969lWN37dpV\n",
+       "OVaSdu7cWaQfmbHOxLbaxzfddJMuuuiiA9YfccQRlds+6qijKsceeeSRlWPnz59fOTZzLM+ePbty\n",
+       "rCTNmDEjFV/Vnj17KseOd7xdeeWVWrly5QHrt2/fnurHjh07Ksfu3r27SOzw8HDl2FLnw6yq5/Bz\n",
+       "zjmn5XvFbvnZ7pH0BUnnSzpZ0sW231RqewAAAO1S8hmq0yQ9GRFPRcSwpG9Iem/B7QEAALRFyYTq\n",
+       "eEkbRy1vaq6bUH9/f5EOYWqceOKJ7e4CJmnx4sXt7gJqYPy619KlS9vdBdRUMqGa9M3OgYGBQ9kP\n",
+       "TLGTTjqp3V3AJPEPcnc75ZRT2t0FTBIJVfcr+VD605L6Ri33qXGV6lUGBwf3v+7v7yeZAgAAHWHt\n",
+       "2rVat25dpdiSCdW9khbZXijpGUkXSbp4bNDYX/MBAAB0giVLlmjJkiX7l6+//vqWscUSqoh4yfbH\n",
+       "Ja1SY9qE6yLi0VLbAwAAaJei81BFxPclfb/kNgAAANqtrTOlS9Un9RoZGancZnbyr1KT73VCbGa/\n",
+       "SdJLL73UVbElP19mP5f6fJnJAjMTTmbbLtWPUpNkltwXmQkOM+1mPl9JpSa/7enpqRybmfA1299S\n",
+       "3+tOON9L5SZGzrSbmTz1UEwm3UqpCZdboZYfAABATSRUAAAANZFQAQAA1ERCBQAAUBMJFQAAQE0k\n",
+       "VAAAADWRUAEAANREQgUAAFATCRUAAEBNJFQAAAA1kVABAADU1PZaftnaPOicml+ZfmTqK2ZiM7W2\n",
+       "OkWmJlamFlUmViq3n0sdF6VqzGVl6syVOr8NDw8Xiy9VZy5zDO3cubNIu1nZurBVzZo1q3Jsb29v\n",
+       "qu1M/OzZsyvHZvqcUWofS2VqrF511VUt3yt6hrLdZ/sO24/Yftj2J0puDwAAoB1KXx4alnRpRKyz\n",
+       "PU/SfbZvj4hHC28XAABgyhS9QhURz0bEuubr7ZIelXRcyW0CAABMtSl7KMH2QklLJK2eqm0CAABM\n",
+       "hSlJqJq3+74paUXzShUAAMBho/hP7GzPkPQtSV+JiFvGvn/nnXfufz0wMKCBgYHSXQIAAJjQhg0b\n",
+       "tGHDhkqxRRMqN37HfZ2k9RFxzXgxZ511VskuAAAATEp/f7/6+/v3L999990tY0vf8nuHpD+QdK7t\n",
+       "tc3/zi+8TQAAgClV9ApVRNwlZmMHAACHOZIdAACAmtpe96VTyqhUVaqESqk+lGw7M3bZsihVlSxd\n",
+       "lCl1kont6ekp0m72uzQyMlI5NrOfS5WeybS7d+/eyrFSrkRFpu0ZM2ZUjs2UDJk/f37lWCl3zGWO\n",
+       "i0yJmB07dlSO3b17d5FYKTd+mX0xd+7cyrGZ8cuOdaZETOY7NTQ0VDl2165dlWMz47dnz57KsVJu\n",
+       "/A7Fv6tcoQIAAKiJhAoAAKAmEioAAICaSKgAAABqIqECAACoiYQKAACgJhIqAACAmkioAAAAaiKh\n",
+       "AgAAqImECgAAoKauKT1TqpxFSaXKapT8fKVKxHRCaRYp9/lKjV+pEirDw8OVY7PxmRIOpUr2ZMYu\n",
+       "U0pGyo1JptxKph+Zz5c97jNjXarsSyY2c7xlSvZIudIsmTHJlNbZsmVL5ditW7dWjpVyY505PjPn\n",
+       "w8zxOXPmzMqxmVJOUtl/S8bTMqGy/TuSQtJ4R1RExLerbMB2j6R7JW2KiPdMqpcAAAAd7GBXqN6j\n",
+       "RkLVSqWEStIKSeslHVG1UwAAAN2kZUIVEX9Yt3HbJ0i6QNLfSPqLuu0BAAB0oglvGtp+ve3rbN/a\n",
+       "XD7Z9h9XbP9zki6TVP0BBQAAgC5T5SmsL0u6TdJxzeUnJF060R/ZfrekzRGxVuM/hwUAAHBYqPIr\n",
+       "v2Mi4ibbKyUpIoZtV/lpwJmSltu+QNJsSfNt3xARHxoddNddd+1/3d/fr/7+/uq9BwAAKGRoaKjy\n",
+       "Ly2rJFTbbf/cvgXbp0samuiPIuIKSVc0/+ZsSZ8am0xJ0tKlSyt1FAAAYCotWLBACxYs2L/89NNP\n",
+       "t4ytklB9UtL3JP2i7R9JOlbS+yfRr86YHAoAAOAQmzChioj7bJ8l6ZfUeBbqsYhIzSAYET+U9MPJ\n",
+       "dREAAKCzTZhQ2e6V9FFJS9W4yjRo+x8iovq0twAAAIexKrf8bpC0VdLn1bhC9XuSbpT0uwX7BQAA\n",
+       "0DWqJFSLI+LkUcs/sL3+UHWgav2cUjXYsvGl6uhNn169rGLJml+l2s60W6qeYFapsc7Ulzr66KMr\n",
+       "x2ZqYmXjMzW0MnXYMvXdMnXHsrX8MmMye/bsyrFz5sypHDtv3rzKsdnvSOa7WqpuY6lzXKYOYza+\n",
+       "VC2/Xbt2VY7N1ugsNX6ZGoiZ2My5JVvLL3PMVY1dvnx5y/eq7M37bZ+xb6H5K7/7Km0ZAADgNeBg\n",
+       "xZEfGhVzt+2NajxD1S/psSnoGwAAQFeYqDgyAAAAJnCw4shPjV62/fNqzHgOAACAUaoUR15u+wlJ\n",
+       "P1NjLqmnJH2/cL8AAAC6RpWH0v9a0hmSHo+IN0h6p6TVRXsFAADQRaokVMMR8b+SptnuiYg7JL2t\n",
+       "cL8AAAC6RpWJF16wfYSkQUlftb1Z0vay3QIAAOgeVa5QvU/STkmXSrpV0pPiF4AAAAD7VSmOvO9q\n",
+       "1IikLxftDQAAQBc62MSe29WYyHM8ERHzD0UHqpaeyEyR343lVjpFptxKpoRDpt1u3G+Z0iWZ0hDb\n",
+       "tm2rHJstt5LZz5kSDpmSNqXazZRxkXLlPTJlQ4aGhor0IVuOpNT5s9SYZMr7lCxHktkXmfNhJjZT\n",
+       "nknKlcDJHEeZPmfOh5ljKFteq0TpmYO20eqNiMidkcZh+0hJ10parEZy9kcR8eO67QIAAHSS+inZ\n",
+       "wf2dpH+LiPfbni5pbuHtAQAATLliCZXtBZKWRcQlkhQRL0mqfv0bAACgS+QeNsp5g6Qttr9k+37b\n",
+       "/2h7TsHtAQAAtEXJhGq6pFMl/X1EnCpph6SVBbcHAADQFiWfodokaVNE/KS5/E2Nk1Ddc889+1+f\n",
+       "cMIJ6uvrK9glAACAap544gk9+eSTlWKLJVQR8aztjbbfGBGPS3qXpEfGxp1xxhmlugAAADBpixYt\n",
+       "0qJFi/Yvr1q1qmVs6V/5/Zka5WpmSvovSR8uvD0AAIApVzShiogHJL295DYAAADareRD6QAAAK8J\n",
+       "pW/5TSg7lXwV2dIlpUqdZEvgdIJMeYGMUqVnMiUOpHKlgzL7befOnUVi9+7dWzlWypWqyXy+zD6e\n",
+       "M6f6TCqZciTZ80omPnMs79mzp3JsZjwyfZBy45c5jjKfL9Nu5rtXsvRMJjZzLGf6XPIc1wmxJcvw\n",
+       "ZI6j7HdqPN33Lz4AAECHIaECAACoiYQKAACgJhIqAACAmkioAAAAaiKhAgAAqImECgAAoCYSKgAA\n",
+       "gJpIqAAAAGoioQIAAKip7aVnSpV9yShVFqVUGZdS0/p3ikyJiuHh4VTbIyMjbY/NlDnJlFuZO3du\n",
+       "5Vgp1+fMfs6UI8mU1tm6dWvl2EwZEEnq7e0t0vb8+fMrx2bOQyXLDGX2RabdTNmQTLtZpc4vme9T\n",
+       "pvRM5hwg5cavVGyp8ky7du2qHCuVK6PUStErVLYvt/2I7Ydsf832rJLbAwAAaIdiCZXthZI+IunU\n",
+       "iHizpB5JHyi1PQAAgHYpectvq6RhSXNsj0iaI+npgtsDAABoi2JXqCLi/yT9raQNkp6R9GJE/Hup\n",
+       "7QEAALRLyVt+J0r6c0kLJR0naZ7t3y+1PQAAgHYpecvvbZJ+FBHPS5Ltb0s6U9JXRwcNDg7uf93f\n",
+       "36+BgYGCXQIAAKhm48aN2rRpU6XYkgnVTyX9pe1eSbslvUvSmrFBy5YtK9gFAACAyenr61NfX9/+\n",
+       "5dWrV7eMLfkM1QOSbpB0r6QHm6u/WGp7AAAA7VJ0Ys+I+Kykz5bcBgAAQLtRegYAAKAmEioAAICa\n",
+       "2l7Lr2rNn0xtoEzs4S5by69U7b9S41fy802fXv3rkanPl/l8mfpg2X2RqQmZqWGZaTcTm6kllhkP\n",
+       "SZo1q3pVrMy+eP755yvHDg0NVY7dvn175VipXG28TJ25TGxmPDI1AqXcd6rUeStzDGXr3WY+X+Y4\n",
+       "2rZtW+XYTvn3OnN+mTdvXv3t1W4BAADgNY6ECgAAoCYSKgAAgJpIqAAAAGoioQIAAKiJhAoAAKCm\n",
+       "jkyoNmzY0O4uoIaNGze2uwuYJMauu23evLndXcAkPfPMM+3uAmoiocIhV7UyNzoPY9fdtmzZ0u4u\n",
+       "YJJIqLpfRyZUAAAA3YSECgAAoCa3s0yLbWrEAACArhER49YDamtCBQAAcDjglh8AAEBNJFQAAAA1\n",
+       "dVxCZft82z+1/YTtT7e7P2jN9j/Zfs72Q6PWHW37dtuP277N9pHt7CNas91n+w7bj9h+2PYnmusZ\n",
+       "ww5ne7bt1bbX2V5v+zPN9YxdF7HdY3ut7e81lxm/LtZRCZXtHklfkHS+pJMlXWz7Te3tFQ7iS2qM\n",
+       "1WgrJd0eEW+U9B/NZXSmYUmXRsRiSadL+ljz+8YYdriI2C3p3Ih4q6S3SDrX9lIxdt1mhaT1kvY9\n",
+       "zMz4dbGOSqgknSbpyYh4KiKGJX1D0nvb3Ce0EBGDkl4Ys3q5pOubr6+X9L4p7RQqi4hnI2Jd8/V2\n",
+       "SY9KOl6MYVeIiJ3NlzMl9ajxXWTsuoTtEyRdIOlaSft+Ncb4dbFOS6iOlzS69sWm5jp0j9dFxHPN\n",
+       "189Jel07O4NqbC+UtETSajGGXcH2NNvr1BijOyLiETF23eRzki6T9PKodYxfF+u0hIo5HA4j0ZiT\n",
+       "gzHtcLbnSfqWpBURsW30e4xh54qIl5u3/E6QdJbtc8e8z9h1KNvvlrQ5ItbqlatTr8L4dZ9OS6ie\n",
+       "ltQ3arlPjatU6B7P2X69JNn+BUlUa+1gtmeokUzdGBG3NFczhl0kIoYk/aukXxVj1y3OlLTc9s8k\n",
+       "fV3Sr9m+UYxfV+u0hOpeSYtsL7Q9U9JFkr7b5j4h57uSLmm+vkTSLQeJRRvZtqTrJK2PiGtGvcUY\n",
+       "djjbx+z7BZjtXkm/LmmtGLuuEBFXRERfRLxB0gck/SAiPijGr6t13Ezptn9T0jVqPGR5XUR8ps1d\n",
+       "Qgu2vy7pbEnHqHG//68kfUfSzZL6JT0l6cKIeLFdfURrzV+F3SnpQb1ya+FySWvEGHY0229W46Hl\n",
+       "ac3/boyIq20fLcauq9g+W9InI2I549fdOi6hAgAA6DaddssPAACg65BQAQAA1ERCBQAAUBMJFQAA\n",
+       "QE0kVAAAADWRUAEAANREQgWg7Wzf3fzfAdsXH+K2rxhvWwBwKDEPFYCOYfscNSY5fE/ib6ZHxEsH\n",
+       "eX9bRBxxKPoHAK1whQpA29ne3nx5paRlttfaXmF7mu2rba+x/YDtP2nGn2N70PZ3JD3cXHeL7Xtt\n",
+       "P2z7I811V0rqbbZ34+htueFq2w/ZftD2haPa/k/b/2z7Udtfmdq9AaAbTW93BwBAr5S++bSkT+27\n",
+       "QtVMoF6MiNNsz5J0l+3bmrFLJC2OiP9uLn84Il5o1rZbY/ubEbHS9sciYsk42/ptSb8i6S2SjpX0\n",
+       "E9t3Nt97q6STJf2PpLttvyMiuFUIoCWuUAHoJB6z/BuSPmR7raQfSzpa0knN99aMSqYkaYXtdZLu\n",
+       "kdQnadEE21oq6WvRsFnSDyW9XY2Ea01EPBONZyLWSVpY4zMBeA3gChWATvfxiLh99Irms1Y7xiy/\n",
+       "U9LpEbHb9h2SZk/QbujABG7f1as9o9aNiHMlgAlwhQpAJ9kmafQD5KskfdT2dEmy/Ubbc8b5u/mS\n",
+       "XmgmU78s6fRR7w3v+/sxBiVd1HxO61hJZ0laowOTLACYEP+vC0An2Hdl6AFJI81bd1+S9Hk1brfd\n",
+       "b9uSNkv6rWb86J8o3yrpT22vl/SYGrf99vmipAdt3xcRH9z3dxHxL7bPaG4zJF0WEZttv2lM2xpn\n",
+       "GQBehWkTAAAAauKWHwAAQE0kVAAAADWRUAEAANREQgUAAFATCRUAAEBNJFQAAAA1kVABAADUREIF\n",
+       "AABQ0/8DEfw5JxfRlIgAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792bd10b10>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAFYRJREFUeJztnVuMZHldxz+/ut+7q+89072zs8Oa8EACL/gARB4IWWIi\n",
+       "+qIhMRpE44OiURMRHwSjD0gCMb4QlV2Dl4BGAwETFTAa8cHLml12UXbZTRimZ/tW3VXVdb//fej6\n",
+       "/ffUmeqeruq6nJo5n+Skbl2nf931rd//8rscMcbg4zMKgXkb4LN4+KLxGRlfND4j44vGZ2R80fiM\n",
+       "jC8an5EZWzQi8oyIvCIir4nIxyZplI+3kXH2aUQkCLwKvA94A/hv4EPGmO9O1jwfLzKup3kn8Lox\n",
+       "5q4xpg18Cfjg5Mzy8TKhMd93E9hzPL4P/LDzB0TE32pecIwxMuz5cT2NL4jHmHFF8waw63i8y7m3\n",
+       "8XkMGFc0zwNPi8iTIhIBfgr46uTM8vEyY81pjDEdEfll4J+AIPCsv3J6fBhryX2lE/sT4YVn0hNh\n",
+       "n8cYXzQ+I+OLxmdkfNH4jIwvGp+R8UXjMzK+aHxGxheNz8j4ovEZGV80PiPji8ZnZMZNwgJARO4C\n",
+       "JaALtI0x75yEUdNGROwRCAQIBoOEQiGCwSDBYJB2u02n07G3s7AnEAhYmwCMMWhc0H1/3lxLNJwn\n",
+       "Y73XGJOfhDGzQEQIhUL2CIfDLC0tkclkyGQyJBIJ8vm8PQqFwtRtCoVCRCIRwuEwkUiEXq9Ht9ul\n",
+       "2+3S6XTs416vR6/Xm7twrisagKGRUC8TDAaJRCLEYjHi8ThbW1tsb2+ztbXF6uoqd+/e5e7du7Tb\n",
+       "7ZmIJhgMWlsSiQTdbpd2u02r1aLVatFutxERK6B5MwlP800R6QJ/bIz50wnYNFXU00SjURKJBOl0\n",
+       "mu3tbe7cucNb3vIWbt68STwep91uk8/PxoGGQiFisRjpdJpMJkOn06Fer9NoNOywBXhCMHB90bzL\n",
+       "GHMgIuvAN0TkFWPMtyZh2DTRDymRSLC0tMT6+jo7Ozs89dRT3Lp1i0KhwBtvvEE8Hp+pPalUimw2\n",
+       "S6fTIRaLUa/Xqdfr1Go16vU6xhja7fZiD0/GmIP+bU5Evsx5aYunRSMihMNh4vE4mUyGbDZLJpMh\n",
+       "Ho8TDocfmJDOglAoZO1ZXV214tCjWCySz+cREZrN5tw9ztiiEZEEEDTGlEUkCbwf+N2JWTYlVDSJ\n",
+       "RMKKJpVKDYhm1sJxDk8rKysEg0G7Yur1euRyOUSERqNBsViciU2X2nuN924CX+7/Y0PAXxljvj4R\n",
+       "q6aIiBCJRB7wNIlEgkgkMnMvA4OeZmVlhVgsZrcCAoEA0WiUZrNJsVgkEJj/1trYojHGfB94+wRt\n",
+       "mQnDPE06nZ7r8BQIBAiHw8RiMZLJJMlkkmg0SjQaJRKJ0G63OT09JR6PW/vmOa+ZxJJ7IVARqKfR\n",
+       "SfDKygrpdJpYLEYoNJ9/R6/Xo91uU6/XqVQqBINBwuEwwWCQeDxOPB4nGo3a5wKBgB2+5iGe+fu6\n",
+       "GeAUTCAQIBKJkEwmB0QTj8fnJpput0ur1aJer1Mul2k0GvR6PTtsOUUTCoXm4g2dPBaiAQbCBupp\n",
+       "nMPTPD2NUzSVSoVGo0G32yUYDJJIJIZ6mnkK57EYnlQs7rnD0tIS2WzWzh0AG2+a5XZ9r9ej0+nQ\n",
+       "arVoNBq0220AuzWgoolEIlY0ML841CMvGp34RiIRIpEI6XSa5eVlG29Kp9MD3/RGo0GhUKBSqdgP\n",
+       "bxY2qqg1DhWNRu0GpIpGg6o6p5kXj41oNK6jgtEjnU5TLpft0HB2dkY+n6dardJqtWZmozPaHg6H\n",
+       "iUaj1uZhw1Ov1/PnNNPCKZpUKsXy8vKAp0mlUoRCIVqtFmdnZxwdHVlPM0vRiIgVzTBPo1Fw95xm\n",
+       "HjySnsa5WgoGg0SjURvXWVtbI5vNWi+j8SUdlg4ODsjn8zMdnnRY0mFUBaORb53P6Mpp3qunR040\n",
+       "KhQ94vE4a2tr3Lx5kxs3bnDjxg1u3rzJ0tISgUCAer3O2dkZJycn7O/vs7e3x9HREaVSiWazOROb\n",
+       "3bGn5eVlksmk3aH2Go+caODNfJlIJEIqlbKieeqpp3jiiSdYXV0lk8kQCASo1WpWNAcHB9y7d49i\n",
+       "sThX0SwtLQ2ENbzGIycazZfR+FI6nbapD3fu3OHOnTtWUCJiPU0ul7OeRlMRfE8znIeKRkSeA34U\n",
+       "ODbGvK3/3Arw18At4C7wk8aY+YdfeVM0zr2YtbU1bty4wa1bt7h9+7bNiGu1WpTLZQqFAicnJxwd\n",
+       "HbG/v29TK7vd7szsdQYsve5prrJ6+jPgGddzvwV8wxjzQ8A/9x97gkAgQDweJ5vNsr29ze7uLhsb\n",
+       "GywtLRGPxxERarUauVyOu3fv8sorr3Dv3j1yuRzVanUmubi6xNY8Zc0iTKVSAxP0cDi8mKLpZ+K5\n",
+       "E2V/DPhC//4XgB+fsF1jEwgESCQSZLNZtra22N3dZXNzk+XlZWKx2AOiefXVV7l37x4nJydUKhUr\n",
+       "mmkGA937Mrono6LR/J5QKORJ0Yw7p9k0xhz17x9xnlvjCS7zNE7RnJyc8IMf/IDXX3+d4+Nj62mm\n",
+       "LRgYXOEN8zSpVMq+7lxae0VA154IG2PMvPvrOf+poVCIZDLJysoKW1tb7Ozs2NVSLBYDsKK5d+8e\n",
+       "r732GuVymUqlYoenaaNeRvdldOdXE8vj8fhA6sMshDwK44rmSES2jDGHIrINHE/SqFHQIKSuiDKZ\n",
+       "DGtra6yurrK6uko2myWRSBAMBm0gslarUa1WqVQqNoTQarVmIhjAbjam02nS6fQDw6eWsGjwtFwu\n",
+       "U6vVbDBTa6LmlSs8rmi+Cvws8Af9269MzKIRERGi0ajNeFtZWWFtbc0KJ5vN2jyUbrdLo9EYEI2G\n",
+       "C1qt1sw+hEgkYld1a2trbG9vs7y8bCfqnU6HRqNBo9GgXq9TKpWoVqs0Gg06nc7Mo/BurrLk/iLw\n",
+       "I8CaiOwBvwN8CvgbEfkI/SX3NI28DM2h1bjS+vr6A55G/8FaT+QWjX5zZ+lpMpkMGxsb7OzssLm5\n",
+       "STabtcNnp9Oh2WxSrVYpl8vW0zSbTetpPC0aY8yHLnjpfRO2ZSw0fdMZW1LR6J6HfmtbrdYDgqlU\n",
+       "KjO31yma3d1dtra2hnqaSqVCsVjk7OzMJmepaOaZ7rnwO8LO4UlFs7S0NLCjqpt4xWKRk5MTTk9P\n",
+       "Zx6QdB7OVNONjQ1WVlasvQCtVotSqcTx8TGHh4ccHBxQKBSo1WpzFww8AqJxDk8qmuXl5YEdVf0Q\n",
+       "NL6kUexZpT44N/JCoZBdWq+urrKxsWFrr9TeZrPJ2dkZx8fH7O3tWZt1dTfvldTCi0Y9jVM0F3ka\n",
+       "FY16mlmLRld4bk/jDBsANJtNSqUSuVxuQDROTwN+uufYaKL4VUSTy+Xmmi+jnSqSyaSNM62vr5NM\n",
+       "Jm0+DbwpmuPjY+7fv8/R0RHFYvEB0cyLhRSNsymRe0c1nU7bb20wGKTX69FoNGwkex6eJhwOk0ql\n",
+       "yGQytuGAM5Kt6ZvaVkRXTaVSiWKxaPeSvFD8DwsoGmcpim7sOWM3GuzTD8MYQ6PRGJjT6A7wrDyN\n",
+       "esLV1VXW1tasaBKJhK3q1AZG3W7Xru6cotGVky+aMVHBuGM3usPq9DS6fHV6mmazaTf0ZoGKZmVl\n",
+       "he3t7Qc8DTDQxGiYp9FNPV80Y+IM+A3zNDrhVLevnkZFM2ucotna2npg3tVutwdKc1U0Z2dndo/G\n",
+       "SyykaODBWqFwOGyPXq9nd371n1+r1WbSdHEYaqOKW8tRtOhNBdNoNKhWqwOxMC94FjcLKZqLunNq\n",
+       "xn6z2aTRaNBsNsnn83blMas5jBv36slZwwRviqbZbNrApJdFs3B1T27BuNMMnJ5G0zjPzs6o1+tz\n",
+       "9zSaBuEsR3FGtTWYqiulWcXCRuWhohGR50TkSERedjz3SRG5LyIv9A93OujUcQpHd1rdoikWi+Ry\n",
+       "Oc94Gq1nusjTuIcnr0x83YybI2yAzxpj3tE//nHypg3HmZikyd9aOF+r1QbEod0h3AVnWqU4rUw4\n",
+       "Z7WkVndqDbmGOZLJpM0B7na7dmhyp0EspGguyBGGOfYPVtHo3oaKplqtWtE4A5mzFo27mF9Fk81m\n",
+       "B5bb0WjUikaX2qVSiUqlYocoL3KdOc1HReTbIvKsiCxPzKIrcJFo1NN0Oh0byEwmkyQSCWKx2AP1\n",
+       "0LP0NJrvo2XBzjCH29NoGsTCepoL+Bxwm/OeewfAZyZm0RVwDk9u0VSr1Qs9jWbwOSeh0+Ci4ckZ\n",
+       "hR8mmmq1+kDujBdFM9aS2xhjc4JF5PPA1yZm0dV+/4C30d1UTbbSzT3dC1lbW2NnZ4dqtWpLcfVo\n",
+       "NBoj/35n501nFwdnaENtiEQi3Lp1i42NDVtlMEyszhyZeac+PIyxRCMi29p4GvgJ4OXLfn6SGGNs\n",
+       "d0u3t2k2mzSbzYFWqvF4nPX1dWq1GsYYEokExWLRHuVyeWQbnBuJejjzZbTzg97evn2bjY0Nksnk\n",
+       "lf4+rzNOjvAngPeKyNs5X0V9H/jFqVrpQr+J6tqdQ1Sj0bB9eLWt6vr6Or1ej2g0yvLyMsfHx/YY\n",
+       "5/oH2gZED6dXcbYK0d+/vb3N5uYmqVRqaDvXhz32GuPmCD83BVuujDMJySka9TTdbteKJp1OY4yx\n",
+       "gllfX7dJT7qSGhWdXOttPB5/QEjOQ7tuJZPJC+dR7mHJy8JZyDCCE61jKhQKHB4eEo1G7Ra8czkL\n",
+       "5x4im80OTJSXl0df+KkX0Vv3cKSHvq6T8YtqszudzkDLk0KhQLVapdlselI8j4xo8vk8+/v7ADZq\n",
+       "LCL0ej27UtK5R6/Xs00bNzY2Rv6d6qH01jmncXazcovoopaz7XbbCv/o6IjT01PK5bInLp4xjIUX\n",
+       "jSYt6VVKNGajy15tCJBIJAZax2vfmnFzapzDiDMGNmz1pIK6aOXUarWoVCpWNMVi0fbH8T3NFFBP\n",
+       "o1n89XodYGAVo3MaTbtMp9PX6lnnvL6lc9dWz+eMgzmHpIt+n3qaYrHI8fEx5XLZFu/5opkCWjnZ\n",
+       "bDZtADOXy9lhqFqtks1mWVlZsVdccV7kdJyJsPYb1lu3PSpMvXUvz93icU7onc2vvXC9ymEsvGiA\n",
+       "gckuwMnJifVAp6enLC0tDbSBdX+Io2CMsZWZmmvsRvOA9dA6c80Jvui8zgucenmDb+FFo99SeLNd\n",
+       "fLfbtYLRchHtTp5OpwfmNpqjOwqFQmFgg9DNzs4OTzzxBLVajV6vRzabxRhDKBQikUgM9TTu6L1X\n",
+       "BQOPkGhUOCJiwwVa4pJKpQYO3VfRmNSo5HI5Tk5O7OHm6aeftoJRUapgLvs7VDBeXDE5WXjRwOBG\n",
+       "mN5XEWnE2zn3ce7WjuppjDEUi0UKhQKlUolarfbAz2jgVFM13IX7btzpoM45jRez9x4J0VyGUyyA\n",
+       "jSg7Y0ajUqlULs0EdMbDdOmse0fDcF8dxtnUyItD1SMvGsDmBmsVoztCPSrOi6wPw52yoambVxFN\n",
+       "IpGwO9Z6noUSjYjsAn8ObHAenPwTY8wfebmPsBvnnKfVag1MQsfdp3lY+oJbNM5mRMNw1m8lk0m7\n",
+       "E+z0kF7iYZ6mDfyaMeZFEUkB/yMi3wA+zHkf4U+LyMc47yPsmV7CbmYdBBx1JeTOQnTu1XiRS32z\n",
+       "MebQGPNi/34F+C5wEw/3EfYSV/Vk7moE55DmtaEJRpjTiMiTwDuA/8TDfYS9xrA+wG4xuYvltEbL\n",
+       "iysnuKJo+kPT3wG/aowpO/9oL/QRXgQuiz/p/MVZLAd4dlf4KsVyYc4F8xfGGG39eiQiW/3X59pH\n",
+       "eFG4bF7lruPy+q7wpaKR86/Fs8D/GWP+0PGS9hGGOfcRXgQWIRtvFB42PL0L+GngJRF5of/cx/FQ\n",
+       "H2Evc9HwsujiuVQ0xph/52Jv5Ik+wj6z57HYEZ41zs4WFyV7OYesRah1cuKLZko4W6G4xeMWipcn\n",
+       "vcNYuP40i8CwHjoX5dA4V0qLIhxfNFPiKp7GmT+zSMLxh6cpoBde1YbY6XSaWCxm+xprIFIPZ9t9\n",
+       "XzSPKe5unnrBDG1Rq4VxpVKJUqnE/v6+bWPvi+YxRa9/kM1m2dzctL2N9UJl1WqVQqFALpfj+PjY\n",
+       "XmWlXq/7onlc0foq9TTRaNRWZGrSe6FQ4ODggPv37w94Gq+mQzjxRTMDms2mjVp3Oh329/e5f/8+\n",
+       "e3t77O3tDVRV+p7mMUWzBOv1uq3Jdiab64W/9NAk9Uaj4YvmccXZQ0+vcZDP5+2Ry+XsfEavB67V\n",
+       "mgsvmktyhD8J/DyQ6//ox2fZFtbrqKdR0ZyenrK/v8/BwYGdv+TzeQqFAvl83g5d87xs8iiMmyOs\n",
+       "fYQ/O3ULF5BGo0GxWOTw8JBMJsPJyQlHR0ccHh5yeHg4sNyuVqsLIRQnD4tyHwKH/fsVEdEcYZhj\n",
+       "H2GvU6vVyOVyhMNh2u02pVKJQqFgD683l34Y4+QI/wfneTYfFZGfAZ4HfsOrJSzzoFqtcnx8TLPZ\n",
+       "pFAoDPQ4rtVqD62b8jpyFaX3h6Z/BX7fGPMVEdngzfnM7wHbxpiPuN6zeF+hCaGVm1rF6bxYvLMr\n",
+       "hNfrto0xQ0eTh4qmnyP898A/uFI+9fUnga8ZY97mev6xFc2jwkWiGStHuJ9Mrsy0j7DP/LnU04jI\n",
+       "u4F/A17ifMUE8NvAhzhvcW/7CDvqoPS9vqdZcMYensbFF83iM9bw5OMzDF80PiPji8ZnZHzR+IyM\n",
+       "LxqfkfFF4zMyvmh8RmZq+zQ+jy6+p/EZGV80PiMzVdGIyDMi8oqIvNbvAnrd890VkZdE5AUR+a8x\n",
+       "3v+ciByJyMuO51ZE5Bsi8j0R+foo1xi/4HyfFJH7fRtfEJFnRjjfroj8i4j8r4h8R0R+5To2XnK+\n",
+       "sW0Ehl/adxIHEAReB54EwsCLwFuvec7vAyvXeP97OE8ke9nx3KeB3+zf/xjwqWue7xPAr49p3xbw\n",
+       "9v79FPAq8NZxbbzkfGPbaIyZqqd5J/C6MeauMaYNfAn44ATOO3aaqTHmW0DB9fTY7W0vOB+MaaOZ\n",
+       "cAveS843to0w3eHpJrDneHyfNw0eFwN8U0SeF5FfuOa5lGm0t/2oiHxbRJ4dZbhzMukWvK503WvZ\n",
+       "OE3RTGMt/y5jzDuADwC/JCLvmeTJzbkfv67dnwNuc55vdAB8ZtQTuFvwXtfG/vn+tn++ynVtnKZo\n",
+       "3gB2HY93Ofc2Y2OMOejf5oAvcz4EXpeJtrc1xhybPsDnR7Vx0i14Hef7Sz3fdW2cpmieB54WkSdF\n",
+       "JAL8FOetZMdCRBIiku7fTwLvZzJpphNtb3udVNhJt+CdWrrudVYzV5i9f4DzGfvrnFdhXudctzlf\n",
+       "gb0IfGec8wFfBPaBFufzrQ8DK8A3ge8BXweWr3G+n+O8IvUl4Nv9D3dzhPO9G+j1/8YX+scz49p4\n",
+       "wfk+cB0bjTF+GMFndPwdYZ+R8UXjMzK+aHxGxheNz8j4ovEZGV80PiPji8ZnZHzR+IzM/wMn9Av6\n",
+       "T5UJ3wAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792bca2290>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEr9JREFUeJzt3X+wXGV9x/HPJ79vQoJaaCwYmx83psJohYqDv6IUwwRG\n",
+       "0LZWpa1S27HTUWtK1RGZKX+1amU6UsdpZ6zUH6hIqxZ1KDEpgkGthB8JEBJ+hAktYE1aCprLzQ03\n",
+       "4ds/dhOvl7u5z/eePHd3w/s1w7Bn97vPefY8Z8/95pyzz9cRIQAAAEzdjG53AAAAoN+RUAEAADRE\n",
+       "QgUAANAQCRUAAEBDJFQAAAANkVABAAA0NKubK7fNnA0AAKBvRIQner5qQmV7raQrJM2U9NmI+Jvx\n",
+       "MZdffvkz3rdhwwadc845Nbs2Zb0wb5c94VhOaMaM3EnIo9H29ddfr3PPPbdRP0plxyMTf+DAgSqx\n",
+       "o6OjVWJnz55dHNspfuPGjVqzZs0znh8YGChud86cOcWxmf1teHi4OHZoaKg4VpL27t1bHLtv377i\n",
+       "2Mz4ZfahTmN966236owzznjG83Pnzi1ue/78+cWxCxcuLI6ttQ/NmpX7MzZz5szi2MzxYmRkpDh2\n",
+       "on2o09+97L6c2T8zfc7EPvXUU8WxBw8erBJby9VXX93xtWqX/GzPlPRpSWslnSLpQtsvrrU+AACA\n",
+       "bql5D9UrJO2MiIciYlTSVyW9qeL6AAAAuqJmQnWypIfHLD/Sfm5SK1asqNIhTI/BwcFudwFTtHz5\n",
+       "8m53AQ2cdNJJ3e4Cpoi/e/2vZkI15ZuN2LH628qVK7vdBUwR373+dvLJRf9mRQ/iu9f/at6U/qik\n",
+       "JWOWl6h1luoXbNiw4fDjFStWsFMBAICesHv3bu3Zs6cotmZCdZuklbaXSvqxpLdJunB8UK/+mg8A\n",
+       "ADy7LV68WIsXLz68vG3bto6x1RKqiDhg+32SvqPWtAlXRsSOWusDAADolqrzUEXE9ZKur7kOAACA\n",
+       "buvqTOlS+SRrmQnWspM9ZiYLe/rpp4tjMxP1ZdrN9DczsaDUGxNU1oqV6m27TLuZSe8yk+nt37+/\n",
+       "OFaqNwFg5vNl+lxrG0vNJ2XsJPP5MseAmjKTrdaaZDgzEWl2Ys+MzJj0QqyUm7g0s+0y7c6bN684\n",
+       "NjPWmQlfpdw+dzQmn6aWHwAAQEMkVAAAAA2RUAEAADREQgUAANAQCRUAAEBDJFQAAAANkVABAAA0\n",
+       "REIFAADQEAkVAABAQyRUAAAADZFQAQAANNT1Wn6ltaCORp2dTjL1jDK1qzIy7WZqFdbqb02Zz1ez\n",
+       "/lmtOlCZ/S3TbqbWllRvO2draZbKfL7s8SLzPZk9e3ZxbKamWWa7ZWtY1qqZWKv255NPPlkcm63b\n",
+       "mPl8tWrIZvahTK07qXfq6JWqWdcws3+Wxq5bt67ja1XPUNleYvtG2/fY3mb7/TXXBwAA0A21z1CN\n",
+       "Sro4IrbaPk7S7bY3RsSOyusFAACYNlXPUEXETyJia/vxkKQdkk6quU4AAIDpNm03pdteKuk0SbdM\n",
+       "1zoBAACmw7QkVO3LfV+TtK59pgoAAOCYUf1XfrZnS/q6pC9FxLXjX1+/fv3hx4ODgxocHKzdJQAA\n",
+       "gEk98MAD2rlzZ1Fs1YTKrd8iXylpe0RcMVHM2rVra3YBAABgSlauXKmVK1ceXh57Emi82pf8Xi3p\n",
+       "DySdZXtL+z8yKAAAcEypeoYqIr4vZmMHAADHOJIdAACAhrpeeiYzNXwvqFXqpFaJg2wZkF4p+3Is\n",
+       "q1VGKVuOJFOCI9N25jud6UMmdmRkpDhWyn2+zH6fGetMyZAFCxYUx0q5sj2Zz5fZzvv27SuOzZSe\n",
+       "GR4eLo6V6pWemT9/fnHsc5/73OLYRYsWFcdKubI2tcY6U+ooE5s9xmWORUejZBZnqAAAABoioQIA\n",
+       "AGiIhAoAAKAhEioAAICGSKgAAAAaIqECAABoiIQKAACgIRIqAACAhkioAAAAGiKhAgAAaKjrpWdm\n",
+       "zSrrwtGYFr4T21Vie0G2zEnm85WOnZQrhzBnzpzi2Llz5xbHZvuRKdeR2c6ZPmfGI/sdybSd2W4D\n",
+       "AwPFsZlyHZnYzNhl4zOlS2qVnsls46zM53vqqaeKYzMlRjKypVky2y6z32e+f5nSOk888URxrFSv\n",
+       "jFJGrWN49nif+V6Xxl5zzTUdX+v4F9H270gKSRMddSMivlGyctszJd0m6ZGIOL/kPQAAAP3kSKcY\n",
+       "zlcroeqkKKGStE7SdkkLSzsFAADQTzomVBHxh00bt/0CSedJ+mtJf9G0PQAAgF406QV+28+3faXt\n",
+       "9e3lU2z/cWH7n5T0IUl1LtQCAAD0gJI7Jj8vaYOkk9rLD0i6eLI32X6jpD0RsUUT34cFAABwTCj5\n",
+       "mdYJEXGN7UskKSJGbR8oeN+rJF1g+zxJ8yQtsv3FiHjn2KD169cffjw4OKjBwcHy3gMAAFSyadMm\n",
+       "bdq0qSi2JKEasv1LhxZsnynpp5O9KSIulXRp+z2vk/TB8cmUJK1du7aoowAAANNp9erVWr169eHl\n",
+       "j370ox1jSxKqD0j6tqTltn8o6URJb5lCv+pNJAUAANBFkyZUEXG77dWSVql1L9R9EVE+c1irje9J\n",
+       "+t7UuggAANDbJk2obA9Ieo+k16h1lulm2/8QESO1OwcAANAPSi75fVHSzyR9Sq0zVL8n6SpJv1ux\n",
+       "XwAAAH2jJKE6NSJOGbP8Xdvbj1YHSutGZWoOZWuaZeJr9aNWjcCatQdr1pkrla1FlalTlmk7025G\n",
+       "piZWpraiVK9WYaaWWKamWS/UjZNy2zkzfplafjVrm9balzP7UObzZfY3qd7n27dvX3FsZl/OHuMy\n",
+       "2y5zDKhVc6/W9ynbdvb4OZGSPfwO2688tND+ld/tjdcMAABwjDhSceS7x8T8wPbDat1D9UJJ901D\n",
+       "3wAAAPrCZMWRAQAAMIkjFUd+aOyy7V9Wa8ZzAAAAjFFSHPkC2w9I2qXWXFIPSbq+cr8AAAD6RslN\n",
+       "6X8l6ZWS7o+IZZLOlnRL1V4BAAD0kZKEajQi/lfSDNszI+JGSS+v3C8AAIC+UTLxwuO2F0q6WdKX\n",
+       "be+RNFS3WwAAAP2j5AzVmyUNS7pY0npJO8UvAAEAAA4rKY586GzUQUmfr9obAACAPnSkiT2H1JrI\n",
+       "cyIREYuORgdKS0Rkypxky61k4jPlE3qhNEuvqFUCJ1M6QcqNX63YjMx+kS2pkWk78/kWLSo/NJxw\n",
+       "wgnFsbNnzy6OzZaoyJQCyZQ6OXDgQHFsplxOpr9SvWNcZjvPnz+/SrvZkiGZ+My2yHz/MmM9PDxc\n",
+       "HCvVK+eUKYGT2d8y45E5Bki5vw/ZtidypHmojmvauO3nSPqspFPVSs7+KCJ+1LRdAACAXtK8GuCR\n",
+       "/Z2kf4uIt9ieJWlB5fUBAABMu2oJle3jJb02Ii6SpIg4IOmntdYHAADQLXVu/GhZJul/bH/O9h22\n",
+       "/9F2+UV0AACAPlEzoZol6XRJfx8Rp0t6UtIlFdcHAADQFTXvoXpE0iMRcWt7+WuaIKG64YYbDj9e\n",
+       "tmyZli9fXrFLAAAAZe677z7df//9RbHVEqqI+Inth22/KCLul/QGSfeMjzv77LNrdQEAAGDKVq1a\n",
+       "pVWrVh1evu666zrG1v6V35+pVa5mjqQHJb2r8voAAACmXdWEKiLulHRGzXUAAAB0W82b0gEAAJ4V\n",
+       "al/ym1Tp1P41S8/UKifTC7G9IlPmpGa5gFplJzJlGUZGRqrEZvog5cpO7Nu3rzg2M36ZciTz5s0r\n",
+       "js2UfJFypU6yZW1KZfa3gYGBVNuZ719mv8jEDg0NTR7UltkW2fJTmfjM8SIzJpnj1nHH5YqWZEo/\n",
+       "1VLrb1TNEm1Ho23OUAEAADREQgUAANAQCRUAAEBDJFQAAAANkVABAAA0REIFAADQEAkVAABAQyRU\n",
+       "AAAADZFQAQAANERCBQAA0NAxWXqmV2Smss98vky7Nafqz6hVmiVbYuTgwYNV2s58vlplTrIlODL2\n",
+       "799fHDs8PFwc+9hjjxXHjo6OFscuWLCgOFbKlQ3JxGb6kfmuZvf7bFmiGu3W+j5lj3GZfTnTdqac\n",
+       "TK2SNlKuRFOm7Uy7tY73mRJY2bYzZZQ6qXqGyvZHbN9j+27bX7E9t+b6AAAAuqFaQmV7qaR3Szo9\n",
+       "Il4iaaakt9daHwAAQLfUvOT3M0mjkubbPihpvqRHK64PAACgK6qdoYqI/5P0t5L+S9KPJT0REf9e\n",
+       "a30AAADdUvOS3wpJfy5pqaSTJB1n+/drrQ8AAKBbal7ye7mkH0bEY5Jk+xuSXiXpy2ODNm7cePjx\n",
+       "8uXLtWLFiopdAgAAKLNr1y7t2rWrKLZmQnWvpL+0PSBpRNIbJG0eH7RmzZqKXQAAAJiaZcuWadmy\n",
+       "ZYeXb7rppo6xNe+hulPSFyXdJumu9tOfqbU+AACAbqk6sWdEfELSJ2quAwAAoNsoPQMAANAQCRUA\n",
+       "AEBDXa/lV6NGX6/Ur8t8tkzto1p1/7Iy/Sit2Sjl6tdlamJJuT5n2s7EZmru1eqDlKs9ltmPMjX3\n",
+       "Mvt95vNla/kdf/zxxbGZ+meZWmJ79+4tjh0aGiqOlXpjOy9cuLBKu9n6bpl6npn9PvO9zuxDixYt\n",
+       "Ko6VpLlzyyu8ZeorZrZb5hiQic30QaozfpdddlnH1zhDBQAA0BAJFQAAQEMkVAAAAA2RUAEAADRE\n",
+       "QgUAANAQCRUAAEBDPZlQPfjgg93uAhrYuXNnt7uAKdqxY0e3u4AGGL/+dccdd3S7C2iIhApHHePX\n",
+       "v/iD3N/uvffebncBU7Rly5ZudwEN9WRCBQAA0E9IqAAAABpyN8u02O6NGjEAAAAFImLCGmZdTagA\n",
+       "AACOBVzyAwAAaIiECgAAoKGeS6hsr7V9r+0HbH+42/1BZ7b/yfZu23ePee55tjfavt/2BtvP6WYf\n",
+       "0ZntJbZvtH2P7W22399+njHscbbn2b7F9lbb221/rP08Y9dHbM+0vcX2t9vLjF8f66mEyvZMSZ+W\n",
+       "tFbSKZIutP3i7vYKR/A5tcZqrEskbYyIF0m6ob2M3jQq6eKIOFXSmZLe2/6+MYY9LiJGJJ0VES+T\n",
+       "9FJJZ9l+jRi7frNO0nZJh25mZvz6WE8lVJJeIWlnRDwUEaOSvirpTV3uEzqIiJslPT7u6QskfaH9\n",
+       "+AuS3jytnUKxiPhJRGxtPx6StEPSyWIM+0JEDLcfzpE0U63vImPXJ2y/QNJ5kj4r6dCvxhi/PtZr\n",
+       "CdXJkh4es/xI+zn0j8URsbv9eLekxd3sDMrYXirpNEm3iDHsC7Zn2N6q1hjdGBH3iLHrJ5+U9CFJ\n",
+       "T495jvHrY72WUDGHwzEkWnNyMKY9zvZxkr4uaV1E7B37GmPYuyLi6fYlvxdIWm37rHGvM3Y9yvYb\n",
+       "Je2JiC36+dmpX8D49Z9eS6gelbRkzPIStc5SoX/stv18SbL9K5L2dLk/OALbs9VKpq6KiGvbTzOG\n",
+       "fSQifirpOkm/IcauX7xK0gW2d0m6WtJv2r5KjF9f67WE6jZJK20vtT1H0tskfavLfULOtyRd1H58\n",
+       "kaRrjxCLLrJtSVdK2h4RV4x5iTHscbZPOPQLMNsDktZI2iLGri9ExKURsSQilkl6u6TvRsQ7xPj1\n",
+       "tZ6bKd32uZKuUOsmyysj4mNd7hI6sH21pNdJOkGt6/2XSfqmpH+W9EJJD0l6a0Q80a0+orP2r8I2\n",
+       "SbpLP7+08BFJm8UY9jTbL1HrpuUZ7f+uiojLbT9PjF1fsf06SR+IiAsYv/7WcwkVAABAv+m1S34A\n",
+       "AAB9h4QKAACgIRIqAACAhkioAAAAGiKhAgAAaIiECgAAoCESKgBdZ/sH7f//qu0Lj3Lbl060LgA4\n",
+       "mpiHCkDPsP16tSY5PD/xnlkRceAIr++NiIVHo38A0AlnqAB0ne2h9sOPS3qt7S2219meYfty25tt\n",
+       "32n7T9rxr7d9s+1vStrWfu5a27fZ3mb73e3nPi5poN3eVWPX5ZbLbd9t+y7bbx3T9k22/8X2Dttf\n",
+       "mt6tAaAfzep2BwBAPy9982FJHzx0hqqdQD0REa+wPVfS921vaMeeJunUiPjP9vK7IuLxdm27zba/\n",
+       "FhGX2H5vRJw2wbp+W9KvS3qppBMl3Wp7U/u1l0k6RdJ/S/qB7VdHBJcKAXTEGSoAvcTjls+R9E7b\n",
+       "WyT9SNLzJA22X9s8JpmSpHW2t0r6D0lLJK2cZF2vkfSVaNkj6XuSzlAr4docET+O1j0RWyUtbfCZ\n",
+       "ADwLcIYKQK97X0RsHPtE+16rJ8ctny3pzIgYsX2jpHmTtBt6ZgJ36OzV/jHPHRTHSgCT4AwVgF6y\n",
+       "V9LYG8i/I+k9tmdJku0X2Z4/wfsWSXq8nUz9mqQzx7w2euj949ws6W3t+7ROlLRa0mY9M8kCgEnx\n",
+       "ry4AveDQmaE7JR1sX7r7nKRPqXW57Q7blrRH0m+148f+RHm9pD+1vV3SfWpd9jvkM5Lusn17RLzj\n",
+       "0Psi4l9tv7K9zpD0oYjYY/vF49rWBMsA8AuYNgEAAKAhLvkBAAA0REIFAADQEAkVAABAQyRUAAAA\n",
+       "DZFQAQAANERCBQAA0BAJFQAAQEMkVAAAAA39PxShDsSnYXpyAAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792bc15650>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAES9JREFUeJztnVmMpNdVx3+nq6prX3qbnvG4x4tmsEYRkv1ikJyICIVo\n",
+       "/ELghcgSUmQC4gECgkiY8BIjeIiQEiFeIiA2CosSIZCjBAmwjQIYIRYjb4E4jqVZPNPdM91de9fe\n",
+       "fXmoOt/c/qZ6qaU9VfXdn/Sp9qvTM3+du5zlE2MMDscgzN1vAxzThxONY2CcaBwD40TjGBgnGsfA\n",
+       "ONE4BmZo0YjIFRF5V0R+KCLPjdMox2Qjw5zTiEgI+AHwCeAW8N/AM8aY74/XPMckMqyneRJ43xhz\n",
+       "zRjTBr4JfGp8ZjkmmfCQvzsPfGC9vgn8mP0FEXFHzVOOMUb6vT+sp3GCCDDDiuYWsGa9XqPrbRwB\n",
+       "YFjRvA5cEpGHRWQe+DTw7fGZ5ZhkhlrTGGM6IvKrwD8CIeAFt3MKDkNtuU80sFsITz3jXgg7AowT\n",
+       "jWNgnGgcA+NE4xgYJxrHwDjROAbGicYxME40joFxonEMjBONY2CcaBwDM2wSFgAicg0oA3tA2xjz\n",
+       "5DiMmnbm5uYQEe8xHA4TCoUIh8OEw2H29vYOXPv7+weuSS+VHkk0dJOxPm6MyY/DmFkhHA4zPz9P\n",
+       "NBplfn6eTCZDJpMhm82SyWSo1+tUq1Xvqtfr1Ot1Go0G9XrdG2dSxTOqaAD6RkKDTCQSIZFIkEgk\n",
+       "SKVSnD17lnPnznHu3DkeeOABisUiW1tb3Llzh62tLUqlEqVSCYBms8ne3h4AIjKRwhmHp3lVRPaA\n",
+       "PzbG/OkYbJp6wuEw8XicbDZLNptlbW2NixcvcunSJS5evMjt27e5ceMG169f97wRQKvVolKpTPwU\n",
+       "NaponjLGbIjICvCKiLxrjHltHIZNGyJ3HW40GiWVSrG4uMjKygpra2s8+uijPPbYY1y+fJnFxUVi\n",
+       "sRihUMj7bbvdplqteusgmNHpyRiz0XvcEpGX6Ja2BE40/oVvKpViZWWFBx98kAsXLrC2tsby8jKJ\n",
+       "RAIRIR6Ps7CwwPnz573fNJtNisUioVCITqcDzKBoRCQBhIwxFRFJAp8Efndslk0RKphQKEQoFCKd\n",
+       "Tnse5tKlS6yurrK0tEQymUREiMViLCwsICIkEglarRaFQoH19XVCoZC3lpnFNc0q8FLPlYaBvzLG\n",
+       "vDwWq6YMEfG21KFQiFQqxfLysica3TWpp1HRJBIJlpeXqVarrK+vk0qlPNFMqmBgBNEYY64Cj4/R\n",
+       "lqlFvUwkEmF+fp50Os3i4iKrq6usra0Rj8e9LbiIEIlECIVCxONxjDEsLi6SSqWIRqPedKXXJDKO\n",
+       "LXfgCYfDxGIx4vE4iUSCdDpNKpUiHo8TjUaJRCKEw2Hm5roH8O12m2azSbPZpNVqkc/nqVarNBoN\n",
+       "77DPGDN7nsZxl1AoRCwWI5VKkU6nyWQyJJPJvqLRnVKtVqNSqVCtVsnn81QqFU80xpiJ3nY70YyB\n",
+       "cDhMNBolmUySzWZJp9Mkk0kSiQTRaJRwOOwtlqHraXZ3dymVSuTzeU80erA3yYIBJ5qxoCfAmUyG\n",
+       "paUlcrmcNz3p+sVGPU2hUODOnTsHPI0tmEkVjhPNEOgCVR/j8Ti5XM5b+K6urpLL5YjFYn0Xs+pp\n",
+       "isWiJ5pqtUqz2Zx4wYATzdDYOxxbNA899NAB0fRDReP3NM1mc+KnJnCiGRoVzNzc3AHRXLhwgaWl\n",
+       "JbLZ7JGeplarHelpJhknmiFQseilwcmVlRXOnz/vbbmj0Wjf359keppknGiGQA/y9EqlUt52O5VK\n",
+       "ebsmewFsi6HdblOv1ymXyxQKBcrlMrVajXa77UQzq4RCIebn54nFYt5W2xaOpjvoVltRQahoKpUK\n",
+       "+XyecrlMvV53opll1NPEYjEv0coWjcag9FzGPt01xhwQTaFQoFqt0ul0vOSrSefYxHIReVFEbovI\n",
+       "O9Z7iyLyioi8JyIvi0judM2cLEKhENFolHg87k1JyWTSO9CLxWL3nM8YY7yc4Far5aV8lkolL4Qw\n",
+       "LZ7mJNUIfwZc8b3328ArxpgfAf6p9zoQiAjz8/MkEglyuRzLy8vkcjmSySTz8/OH7pYajQbVapVC\n",
+       "oUClUpmq6cjPsaLpZeIVfG//NPD13vOvAz8zZrsmGj0B1h1TNpslkUgQiUT6fn9vb+/AdGSvYaaR\n",
+       "Ydc0q8aY273nt+nm1gSGSCRCMpn0PM1xoul0Op6nKZVKVCqVqdot+Rm5WM50/+rp+8tHwPY0tmg0\n",
+       "QdyPvcXe2dmhVCoFUjS3ReQsgIicA+6Mz6TJR/NnUqkU2WyWVCp1IFEc7u6Y9vf3qdfrFItFNjY2\n",
+       "uH79Ouvr6xQKBer1eqBE823gM73nnwG+NR5zpgM7FUIXwbFYjHC4O9urYPRqNBoUCgU2Nze5evUq\n",
+       "Gxsb5PN5arXabIpGRL4B/DvwmIh8ICLPAl8CfkpE3gN+svc6MBzmaVQ0it/TbG5ucu3atan3NMcu\n",
+       "hI0xzxzy0SfGbMtEo1vpubk572BPRZNMJg8kW9lTkzHmgGhu3LhBsVj0ynFnUjQOvML9cDhMJBI5\n",
+       "UJetouk3PWlBv+YE64FerVaj1Wp5qZ3ThhPNMWjXh2g0SiwWIxaLeYLRS2NQtmjsLhC65a7Vap6H\n",
+       "UdFMI040J0DXMBqY9HeB0JCBf/e0v7/P3t7eAU+zu7tLvV73QgrO08wo9m7J9jB6+UMHtqfpdDr3\n",
+       "TE+tVus+/SXjwYnmBGiAUmua+uXL2LRaLXZ3d71ra2uLcrlMs9n8kC0/HZxojkHXNBqkPIloms2m\n",
+       "F2fK5/Nsb297OcDTOB35caI5AX5Po2W2R3maSqXC9vY2m5ubbG9vUy6XaTQaMyEa16jxBOiaxu9p\n",
+       "/Id5iopmZ2eHjY0NTzRuegoQmt6pSVcanFRP4w8baHBye3ub9fV1Nz0FDW0jctj0JCL3dOes1WqU\n",
+       "SiW2t7fv8TRONAHBnp76VRvYZzKacKWiWV9fp1gsep5mFhg2R/h5EbkpIm/0Ln866Eyh09NhC2Fb\n",
+       "NJ1O5x7RzJqnGTZH2ABfMcY80bv+Yfym3T/8XSByuRyLi4ssLy9z5swZFhYWvJxgOHiYpyfA/h40\n",
+       "nU6H/f39+/yXjYeTRLlfE5GH+3w0mW2aRsQfa0okEiwsLLC0tMTKygpnzpzxqg80vdPvafQUuNVq\n",
+       "0Ww2abfbU9FC5KSMsuX+nIi8JSIvzFoJix1rymaznmiO8jRaomILxhbNLHmaYUXzVeARuj33NoAv\n",
+       "j82i+4x6mn6iWVlZ8TpCDOJpAjc99cMY4+UEi8jXgO+MzaIJwG68qJdWTGrVJNxNzLKrDez7HUxr\n",
+       "4vhxDOVpesnkys8C7xz23WnE7gtspz30Ewzg7Ziq1aq3vW40Gl4T6VnjWE/TyxH+CWBZRD4Avgh8\n",
+       "XEQep7uLugr88qla+SFj9wXWy24/70+FUE8zK8VwxzFsjvCLp2DLxHCUpzms7NZf0F+v1+l0OjM5\n",
+       "PbkTYR92rXY2m2VpaYlMJuM1XYT+JSpaCLexscHOzo7XpGgWcaLpg7/sNpfLkUgk+uYAG2Oo1Wpe\n",
+       "gFJjTbMUNvDjRONDPY2KRgv8D9tia12THTbY2dmhXC7TarXc9BQUbNH0K/A/KkC5sbHhFfk7TzPD\n",
+       "2Pdr0vWMNpLudwLsjzHV63WvbX2pVGJ3d5dmsxncLXcQsO/VZAcq9c5wmnhlexq7ykBrmnZ3d72d\n",
+       "k4rGTU8ziHoYPY+xRaOeJhqNejfGAPrWM6loKpWKF2ua1mK44wi8aOCup9GOnVqjraKxpy/gnnom\n",
+       "u3pyGm5cOipONPSva9JEK7utqz5qDz1dw2gf4Far5W3FZ5nAi8Yf1dZ7NdldIGy0pau2qZ+FzlaD\n",
+       "EnjRQPcwzy679Zeo+IWjotEDPVs0QeDIKLeIrInId0Xkf0XkeyLya733Z6aPsO1ptLhfPY1WG/hR\n",
+       "0ZRKpXs8TRA4LjWiDfyGMeYjwI8DvyIil5mxPsK6e9Ibl9r5M3B44vjW1pZ3AjzLh3l+jhSNMWbT\n",
+       "GPNm73kV+D5wnhnrI9wvqm1HtP2pnPa9mm7dujVzFZTHceI1TS+5/AngP5mxPsL98mfsqcluG9Lp\n",
+       "dA7cdufmzZuUy+WZKlE5jhOJRkRSwN8Cv26MqfjuLGJEZGr/pex7N+kUdZin0QM9WzS3bt2i0Wh4\n",
+       "+cBB4CTFchG6gvkLY4y2fp2pPsKHbblDodCB9YwKQ0+B9STYLlMJAsftngR4Afg/Y8wfWh/NTB9h\n",
+       "ESESiRCPx8lkMiwsLHgtXu2wga5nGo3GgbIUDRfM+imwzXHT01PAzwNvi8gbvfe+QLdv8F+LyGeB\n",
+       "a8DPnZqFHwLatj6dTrOwsOCV3uo5jYrGLklptVqed1HRBIUjRWOM+TcO90Yz0Ue4n6dJp9MHWrz6\n",
+       "A5R+T2Nn8QWBQDc10jveanu0eDzu9QSORCKHntOod1EPo4IJimgCGUawA5D2+YzeLU4DlXadk783\n",
+       "sC2UIAkGAuxp7Juxa1qEFv1r7oz/HpR263p/crl+LwgE0tModgLWqJ4mSATa0yj2QleL9e1u4v6k\n",
+       "K7t1SBAJrKexpxT73gWVSsVLytLDOhWNZumpcIIqmsB6GkVF02w2+97wQkMI6oXspHEnmoDiF43e\n",
+       "/ti+S4rtafSzIIUN/AR+etrf3/dqsbe3t0mlUgAHKhQ0D7hSqRzoPeNEE0B0R1Sr1SgUCqyvrwN4\n",
+       "gtDPd3Z22NnZ8XrPaJDSiSZAGGMOpD3UajXy+TzGmANFbvqdYrHoXX7RBG27DceIRkTWgD8HztBt\n",
+       "YPQnxpg/EpHngV8Etnpf/cK0tYW1p6darQbg9ZgxxnjT09zcHJVKxUu0sstVnKfpj+YIv9lLxPof\n",
+       "EXmFu32Ev3LqFp4yWpKiTYja7bZXjTA3N+clXWkFpSZg7e7uTv3NvobluCj3JrDZe14VEc0Rhhnq\n",
+       "I6y7I+ge+hWLRcLhMJ1Ox0vj1POZRqPhTVNBFY2cdE7u5Qj/C/AR4PPAs0AJeB34vDGm6Pv+1Ez2\n",
+       "dufOUCjkNZfW+yCoB9JHO2uvXq/fb/NPDWNMX8dwItH0pqZ/Bn7fGPMtETnD3fXM7wHnjDGf9f1m\n",
+       "akRjR701VUK7SITD4XuClBpC0PSIWWVo0fRyhP8O+Htfyqd+/jDwHWPMj/renxrROPpzmGiGyhGe\n",
+       "9T7CjqM50tOIyEeBfwXeprtjAvgd4Bm6Le69PsJWHZT+1nmaKWekNc0wONFMP0NNTw5HP5xoHAPj\n",
+       "ROMYGCcax8A40TgGxonGMTBONI6BObVzGsfs4jyNY2CcaBwDc6qiEZErIvKuiPxQRJ4bw3jXRORt\n",
+       "EXlDRP5riN+/KCK3ReQd672h29seMt7zInKzZ+MbInJlgPHG2oL3iPGGthG499Z647qAEPA+8DAQ\n",
+       "Ad4ELo845lVgcYTff4xus8l3rPf+APit3vPngC+NON4Xgd8c0r6zwOO95yngB8DlYW08YryhbTTG\n",
+       "nKqneRJ43xhzzRjTBr4JfGoM4w6dZmqMeQ0o+N4eur3tIePBkDaaMbfgPWK8oW2E052ezgMfWK9v\n",
+       "ctfgYTHAqyLyuoj80ohjKafR3vZzIvKWiLwwbDf3cbfgtcb7j1FtPE3RnMZe/iljzBPA03S7p39s\n",
+       "nIObrh8f1e6vAo/QzTfaAL486AD+Fryj2tgb729641VHtfE0RXMLWLNer9H1NkNjjNnoPW4BL9Gd\n",
+       "AkdlrO1tjTF3TA/ga4PaOO4WvNZ4f6njjWrjaYrmdeCSiDwsIvPAp+m2kh0KEUmISLr3PAl8kvGk\n",
+       "mY61ve0oqbDjbsF7aum6o+xmTrB6f5ruiv19ulWYo4z1CN0d2JvA94YZD/gGsA606K63ngUWgVeB\n",
+       "94CXgdwI4/0C3YrUt4G3ev+5qwOM91Fgv/c3vtG7rgxr4yHjPT2KjcYYF0ZwDI47EXYMjBONY2Cc\n",
+       "aBwD40TjGBgnGsfAONE4BsaJxjEwTjSOgfl/g7yNWl4b+UcAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792bb98fd0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEptJREFUeJzt3X2QXfVdx/HPJ89PJExMFrJJaqKCQobaRGCAUii2OsgU\n",
+       "WrWWorZYmTpOWxuxZUqZ0b90Wttxip2OzlSQFvqk0grtKBS0QCm1hJAEQkIDOETJ4xKEkMfNZvfr\n",
+       "H/du2Cy72fPNyW/vveH9mslwz7nfe85vz+/cc7+ch9/XESEAAAAcvwmtbgAAAECnI6ECAACoiYQK\n",
+       "AACgJhIqAACAmkioAAAAaiKhAgAAqGlSK1dumzEbAABAx4gIjzS/aEJl+3JJN0uaKOmWiPjr4THX\n",
+       "XXfd6z63Zs0arVix4qh5AwMDhVqZW3Z/f3+R2IxMe7PjjGXiR4vdsGGDli1bdtS8zLbI/H2HDx+u\n",
+       "HJuN7+3trRx78ODBIrGHDh2qHHsiviMvvvii5s+f/7r5EydOrLwMe8RjzYgybd6/f3/l2Mw2zi47\n",
+       "s19k9rfMd2TChJEvLgwMDIz43qRJ1Q/1U6dOrRw7ZcqUyrGTJ0+uHDva31c3NitzPMz030j7xb59\n",
+       "+zRz5sxKsdllj6bUcbnkb1QpVfejYx2Ti+2JtidK+pKkyyWdLeka22eVWh8AAECrlLyH6nxJz0XE\n",
+       "5ojok/QtSe8uuD4AAICWKJlQLZT0wpDpLc15Y1qwYEGRBmF8jHTJCJ1hxowZrW4CashcbkV7yVwS\n",
+       "RXsqmVAd94VREqrO1tXV1eom4DiNdA8HOgcJVefK3I+G9lTypvStkhYPmV6sxlmqo6xZs+bI6wUL\n",
+       "FpBMAQCAtjAwMFD5JvuSCdVqSWfYXiJpm6SrJV0zPGj403wAAADtYMKECUc9AXisp/yKJVQRcdj2\n",
+       "xyR9X41hE26NiKdLrQ8AAKBVio5DFRH3SLqn5DoAAABaraUjpUvVB5wrOdBbJj5z02dmMMRSsdlt\n",
+       "UWrZmeVmBiHMPhmTGbRw2rRpRWIzbcj8fdmbWksNypiJzewXme2WVWpw38xyS+73mb7O7MuZ5Wae\n",
+       "IJ09e3bl2Mx2k3IDSfb19VWOLTW474EDByrHSrnfqFK/q5k+ycRmjhdS7rtatU8WLhx9sAJq+QEA\n",
+       "ANREQgUAAFATCRUAAEBNJFQAAAA1kVABAADUREIFAABQEwkVAABATSRUAAAANZFQAQAA1ERCBQAA\n",
+       "UBMJFQAAQE0tr+V36NChSnHZmnSlZOoklVpuphZVqfa2i8y2yOq0uo3ZOlcZmZp0GaXqg2W3Raav\n",
+       "MzUFM7GZNmRqlEnS4cOHK8dm+rpUXbx9+/ZVjq36GzIosy0ysZk+yezLmRqI2fhMbGZfznz/Sv5G\n",
+       "Zfokux+NpGiWYnux7Qdsb7D9lO2Pl1wfAABAK5Q+Q9Un6fqIWGd7lqTHbd8fEU8XXi8AAMC4KXqG\n",
+       "KiJ2RMS65uu9kp6W1F1ynQAAAONt3G5Msr1E0nJJj47XOgEAAMbDuCRUzct9d0pa2TxTBQAAcNIo\n",
+       "/pSf7cmSvi3paxFx1/D3165de+T16aefrgULFpRuEgAAwJief/55bd68uVJs0YTKjechb5W0MSJu\n",
+       "Hilm+fLlJZsAAABwXJYuXaqlS5cemX7wwQdHjS19ye+tkn5f0mW21zb/XV54nQAAAOOq6BmqiPiR\n",
+       "GI0dAACc5Eh2AAAAamp56ZmS5TI6SaaEQ6nYrEyJilKlCEqWGClV9iVTbqVkWYZMO0r1X6ltkSkZ\n",
+       "IuW+J5llZ8qtZMqAzJ49u3KsVK4ETqZcRyb24MGDlWMPHDhQOTYrs9/PnDmzcuzcuXMrx86ZM6dy\n",
+       "rJT7TmX2z1dffbVy7N691R/mz5QZyuwXktTb21s5NrMtRsMZKgAAgJpIqAAAAGoioQIAAKiJhAoA\n",
+       "AKAmEioAAICaSKgAAABqIqECAACoiYQKAACgJhIqAACAmkioAAAAamp56ZlJk6o1IVPmpGS5lYxM\n",
+       "m0vFZpXadv39/ZVjS5Zbyfx9mTZnypFk+i9TriNTZkHKlVooVWYoE1v1WHE8Mu3I7BeZMiBTpkwp\n",
+       "EltSZluU2oeyZXgyJWIyJXsyJXC2bt1aOTZTmkXKHTNK/a5m+m/y5MmVY7PHgPEubTdq62z/tqSQ\n",
+       "NNKvW0TEd6qswPZESaslbYmIK4+rlQAAAG3sWOnelWokVKOplFBJWilpo6RTqjYKAACgk4yaUEXE\n",
+       "H9RduO1Fkq6Q9FeS/qzu8gAAANrRmBf4bZ9u+1bb9zanz7Z9XcXlf0HSDZLK3fQDAADQYlXumPyK\n",
+       "pPskdTenn5V0/Vgfsv0uST0RsVYj34cFAABwUqhyy/y8iPgn2zdKUkT02a7ySNNFkq6yfYWkaZJm\n",
+       "2749Ij44NOixxx478rq7u1sLFy6s3noAAIBCtmzZUvmpzCoJ1V7bPzM4YfsCSbvH+lBE3CTppuZn\n",
+       "LpX0yeHJlCSdd955lRoKAAAwnhYtWqRFixYdmR56Emi4KgnVJyR9T9LP2f6xpPmS3nsc7WqPwaEA\n",
+       "AABOsDETqoh43PYlkn5RjXuhNkVE9REBG8t4SNJDx9dEAACA9jZmQmV7uqSPSLpYjbNMD9v++4g4\n",
+       "WLpxAAAAnaDKJb/bJb0q6YtqnKH6XUl3SPqdgu0CAADoGFUSqmURcfaQ6R/Y3niiGnDwYLUTXSXr\n",
+       "82WWXSo2U7+uVGzJZZfqv2xdw1I1E0v19SmnVC8wcOqpp1aOlXJ15jKxmVpi+/fvLxJb9bgyKFOT\n",
+       "LqNUTbOszP6ZqUuZkdmHMt+97du3p9qR6evMdiu1f2b7o9SxKFNHr9SxJRMr5dp8IupjVmndGtsX\n",
+       "Dk40n/J7vPaaAQAAThLHKo68fkjMI7ZfUOMeqjdJ2jQObQMAAOgIYxVHBgAAwBiOVRx589Bp211q\n",
+       "jHgOAACAIaoUR77K9rOSnldjLKnNku4p3C4AAICOUeWm9L+UdKGkZyJiqaR3SHq0aKsAAAA6SJWE\n",
+       "qi8idkmaYHtiRDwg6dzC7QIAAOgYVQZpeNn2KZIelvR12z2S9pZtFgAAQOeocobqPZL2S7pe0r2S\n",
+       "nhNPAAIAABxRpTjy4NmofklfKdoaAACADnSsgT33qjGQ50giImafiAZUHe69ZLmVjFIlVNqh/E02\n",
+       "PlMeolSZmmwpgoxSZUNKbeNsuZV2KI2UKfeQic3uF5ltl4nt7e0tstxMeR+p3DEjs52nTp1aOTZT\n",
+       "MiQTK+XanNkW06dPrxxbar+QcvtG5vhSqk8y+0W2PEwmftq0+qNCHWscqll1F277VEm3SFqmRnL2\n",
+       "hxHxk7rLBQAAaCe51D7vbyX9e0S81/YkSTMLrw8AAGDcFUuobM+R9LaIuFaSIuKwpN2l1gcAANAq\n",
+       "5W5AkZZKetH2bbbX2P4H2zMKrg8AAKAlSiZUkyStkPR3EbFC0j5JNxZcHwAAQEuUvIdqi6QtEfFY\n",
+       "c/pOjZBQrV69+sjr7u5udXd3F2wSAABANbt27dKuXbsqxRZLqCJih+0XbJ8ZEc9IeqekDcPjzj2X\n",
+       "KjYAAKD9zJs3T/PmzTsyvWnTplFjSz/l9ydqlKuZIum/JX2o8PoAAADGXdGEKiKekHReyXUAAAC0\n",
+       "Wsmb0gEAAN4QSl/yG9OBAwcqxWXKWWTLTpQqVVOqLEqpMi7tIlO2IFPyRcqVkym1X2TKTmRLjGT0\n",
+       "9/cXWW6mT2bMqD6SSqY0RKacRTY+s1/09fWl2lFKpq8zbc6URTl8+HDl2IzscTYTn4nNlDnJHrdK\n",
+       "KfX7kDnOlorNxlft67vvvnv0ZVReGwAAAEZEQgUAAFATCRUAAEBNJFQAAAA1kVABAADUREIFAABQ\n",
+       "EwkVAABATSRUAAAANZFQAQAA1ERCBQAAUFPLS89ULUeQKQEwMDCQakOmlEQmNtOOUmVqSsqUiMnI\n",
+       "lEPIlL6Qyu5HVWVKqMyZM6dybHYfyvx9mRI4mdj9+/dXjt29e3fl2OnTp1eOlXIlcDLLnjlzZuXY\n",
+       "THmYbEmiTDmZUiWJMqVnMn9ftkRUZtmZY1HmeJgpiZIto5Qpa5Mpl5NpR2Z/27t3b+XYffv2VY7N\n",
+       "tuNElIkq+itu+9O2N9heb/sbtnN7BgAAQAcollDZXiLpw5JWRMQ5kiZKen+p9QEAALRKyUt+r0rq\n",
+       "kzTDdr+kGZK2FlwfAABASxQ7QxUR/yfpbyT9r6Rtkl6JiP8otT4AAIBWKXnJ7+cl/amkJZK6Jc2y\n",
+       "/Xul1gcAANAqJS/5nSvpxxHxkiTZ/o6kiyR9fWjQ+vXrj7zu6urSaaedVrBJAAAA1fT09Kinp6dS\n",
+       "bMmE6qeS/tz2dEkHJb1T0qrhQeecc07BJgAAAByfrq4udXV1HZnesGHDqLEl76F6QtLtklZLerI5\n",
+       "+8ul1gcAANAqRQf2jIjPSfpcyXUAAAC0WucNzw0AANBmSKgAAABqanktv6q1rjI1lUrVYMPRMtu5\n",
+       "VJ9kl5upU5apjZepzZWpJZapf5at5Zep+ZVZdqnlZmroZfpDytU0yxyLdu7cWTk2U6twz549lWOl\n",
+       "cvX5MvXdMjUsM8vN1vPM9F+2TmBVpertSbmagr29vZVjM3X02qF2pJQ7vsyaNav++movAQAA4A2O\n",
+       "hAoAAKAmEioAAICaSKgAAABqIqECAACoiYQKAACgprZMqLZt29bqJqCG7du3t7oJOE5btmxpdRNQ\n",
+       "w65du1rdBBwnjpudry0TKnaszkb/da6tW7e2ugmo4aWXXmp1E3CcduzY0eomoKa2TKgAAAA6CQkV\n",
+       "AABATc4Mw3/CV263buUAAABJETFiTaKWJlQAAAAnAy75AQAA1ERCBQAAUFPbJVS2L7f9U9vP2v5U\n",
+       "q9uD0dn+R9s7ba8fMm+u7fttP2P7PtuntrKNGJ3txbYfsL3B9lO2P96cTx+2OdvTbD9qe53tjbY/\n",
+       "05xP33UQ2xNtr7X9veY0/dfB2iqhsj1R0pckXS7pbEnX2D6rta3CMdymRl8NdaOk+yPiTEn/2ZxG\n",
+       "e+qTdH1ELJN0gaSPNr9v9GGbi4iDki6LiLdIerOky2xfLPqu06yUtFHS4M3M9F8Ha6uEStL5kp6L\n",
+       "iM0R0SfpW5Le3eI2YRQR8bCkl4fNvkrSV5uvvyrpPePaKFQWETsiYl3z9V5JT0taKPqwI0TE/ubL\n",
+       "KZImqvFdpO86hO1Fkq6QdIukwafG6L8O1m4J1UJJLwyZ3tKch85xWkTsbL7eKem0VjYG1dheImm5\n",
+       "pEdFH3YE2xNsr1Ojjx6IiA2i7zrJFyTdIGlgyDz6r4O1W0LFGA4nkWiMyUGftjnbsyR9W9LKiNgz\n",
+       "9D36sH1FxEDzkt8iSZfYvmzY+/Rdm7L9Lkk9EbFWr52dOgr913naLaHaKmnxkOnFapylQufYaft0\n",
+       "SbK9QFJPi9uDY7A9WY1k6o6IuKs5mz7sIBGxW9K/SfoV0Xed4iJJV9l+XtI3Jf2q7TtE/3W0dkuo\n",
+       "Vks6w/YS21MkXS3puy1uE3K+K+na5utrJd11jFi0kG1LulXSxoi4echb9GGbsz1v8Akw29Ml/Zqk\n",
+       "taLvOkJE3BQRiyNiqaT3S/pBRHxA9F9Ha7uR0m3/hqSb1bjJ8taI+EyLm4RR2P6mpEslzVPjev9f\n",
+       "SLpb0j9LepOkzZLeFxGvtKqNGF3zqbAfSnpSr11a+LSkVaIP25rtc9S4aXlC898dEfF523NF33UU\n",
+       "25dK+kREXEX/dba2S6gAAAA6Tbtd8gMAAOg4JFQAAAA1kVABAADUREIFAABQEwkVAABATSRUAAAA\n",
+       "NZFQAWg52480//uztq85wcu+aaR1AcCJxDhUANqG7berMcjhlYnPTIqIw8d4f09EnHIi2gcAo+EM\n",
+       "FYCWs723+fKzkt5me63tlbYn2P687VW2n7D9R834t9t+2Pbdkp5qzrvL9mrbT9n+cHPeZyVNby7v\n",
+       "jqHrcsPnba+3/aTt9w1Z9oO2/8X207a/Nr5bA0AnmtTqBgCAXit98ylJnxw8Q9VMoF6JiPNtT5X0\n",
+       "I9v3NWOXS1oWEf/TnP5QRLzcrG23yvadEXGj7Y9GxPIR1vVbkn5Z0pslzZf0mO0fNt97i6SzJW2X\n",
+       "9Ijtt0YElwoBjIozVADaiYdN/7qkD9peK+knkuZK+oXme6uGJFOStNL2Okn/JWmxpDPGWNfFkr4R\n",
+       "DT2SHpJ0nhoJ16qI2BaNeyLWSVpS428C8AbAGSoA7e5jEXH/0BnNe632DZt+h6QLIuKg7QckTRtj\n",
+       "uaHXJ3CDZ696h8zrF8dKAGPgDBWAdrJH0tAbyL8v6SO2J0mS7TNtzxjhc7MlvdxMpn5J0gVD3usb\n",
+       "/PwwD0u6unmf1nxJl0hapdcnWQAwJv6vC0A7GDwz9ISk/ualu9skfVGNy21rbFtSj6TfbMYPfUT5\n",
+       "Xkl/bHujpE1qXPYb9GVJT9p+PCI+MPi5iPhX2xc21xmSboiIHttnDVu2RpgGgKMwbAIAAEBNXPID\n",
+       "AACoiYQKAACgJhIqAACAmkioAAAAaiKhAgAAqImECgAAoCYSKgAAgJpIqAAAAGr6f7xE4rRkFyo0\n",
+       "AAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792bb3b910>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAGNNJREFUeJztnVmM7Fldxz+na1+6qrqW7q5ebt+ZuTMDTEzgBU2AyAMh\n",
+       "Q0xAXyQkRoNofFA0SiLig6D4gCYQow9EZYlbQKOBgInKYETxwQUzw4CyzMxdeu/au/a1jw9dv8Op\n",
+       "ukvfWrvrzv+bnPyram7/51TVt37n9/ud3+97lNYaBw5GwdJlT8DB4sEhjYOR4ZDGwchwSONgZDik\n",
+       "cTAyHNI4GBljk0Yp9axS6rtKqZeUUh+c5qQcXG2ocfI0SikX8D3gbcAB8N/Ae7TW35nu9BxcRYxr\n",
+       "ad4IvKy1vq217gCfB941vWk5uMpwj/l3m8Ce9Xwf+GH7HyilnFTzgkNrre71+riWxiHEqxjjkuYA\n",
+       "2Laeb3NubRy8CjAuab4BPKmUuq6U8gLvBr40vWk5uMoYy6fRWneVUr8E/BPgAj7tRE6vHowVcj/U\n",
+       "jR1HeOExbUfYwasYDmkcjAyHNA5GhkMaByPDIY2DkeGQxsHIcEjjYGQ4pHEwMhzSOBgZDmkcjAyH\n",
+       "NA5GxrhFWAAopW4DZaAHdLTWb5zGpKYNpdTAY7fbjcvlMtezszO01macnZ0NDHkd4LLamIffg1KK\n",
+       "paUllFID87bnOitMRBrOi7HeqrUuTGMys4J8wEtLS7jdbiKRCMvLy0QiEUKhEN1ul06nY0az2aTR\n",
+       "aNBsNmk2m5ydndHr9cz1MuZvE8XtduPxeMzo9Xq0220zzs7OzN/OgkCTkgbgnjuhVwXyQbtcLlwu\n",
+       "Fz6fj5WVFdbX10mn0yQSCUOORqNBo9GgXC5zenpKuVwGoNPpoJSi0+lc6nuQ4fV6CQQCZnQ6Her1\n",
+       "OrVajV6vN3OrOA1L81WlVA/4Y631n05hTlOHUsosRz6fj3g8zvb2Nk888QRbW1tUq1UzKpUK2WwW\n",
+       "j8cDQLvdNkvDZViZ4fcgxA+FQoTDYZaXl2m1Wiil6PV6tFqtAeLMApOS5k1a6yOlVAp4Tin1Xa31\n",
+       "16cxsUkhX/TS0hI+n8/8KiORCOl0mu3tbR5//HF2dnY4PT01lqVUKqG1pt1uU61WzRcl/sNlvAeP\n",
+       "x4PP5zMjHA4TjUaJRqNEIhHq9TpKKbrdLvV63SylskxNm0ATkUZrfdS/ZpVSX+C8teXSSSNfsFIK\n",
+       "r9dLIpEglUqRSqVYXV1lZ2eH7e1tVldXiUQixlfpdrs0m03c7vOPRZ53Oh263e6ArzBLDPswy8vL\n",
+       "xONxEokEiUSCSCRCOBw2o1gs4vP50FpTr9cH3k+32536/MYmjVIqCLi01hWlVAh4O/DbU5vZmLA/\n",
+       "bDHliUSCnZ0dHnvsMa5du0YqlSKZTJJKpYhEIvR6PTqdDu12m3q9bpambrdLq9Wi2+2aX++83oPt\n",
+       "w4TDYdLpNNeuXePatWtEo1GCwSDBYJBAIMDJyQlnZ2fU63Xy+bx5L1rrmSxVk1iaNeALfRPqBv5K\n",
+       "a/2VqcxqQgyv/0KaZ555hqeffppQKEQwGCQUCuHz+cyH3Gq18Pv9d1kaCWXnRRpgwHlfXl5mfX2d\n",
+       "J598kte97nXEYrG7lqt6vU6xWOTg4IBGo2EIM4sldWzSaK1vAa+f4lwmgu3D2D5ANBollUoZx/ep\n",
+       "p54a+BVrrc1VTLodfrfb7bm/F3HavV4vXq/XRHs7Ozs89dRTRCIRQ6qlpSWq1SqRSIRAIIDH45m5\n",
+       "DzaNkPvSIR+QECYWi5mRSqXY2dkhlUoRDodxu920221DiGazyeHhIUdHR2bs7e2Ry+Wo1+uX8n68\n",
+       "Xi/Ly8tmbGxskEwmiUajBAIBAOr1ukkV7O7ucnx8TLFYpFar0Wq1aLfbM4uiFp409vrvcrnw+/2s\n",
+       "rKyQTqdJp9Nsbm5y/fp1VldXCYfDuFwuut2uCa9LpRJ7e3tm7O/vk8/nKRQKl0qaSCRinPeNjQ1S\n",
+       "qZTxZTqdDrVazcxTSFMoFKjVajSbTeOHOaS5D4bzMLFYjI2NDR5//HEee+wx1tbWjKVxuVx0Oh2q\n",
+       "1Sq5XI5MJsPu7i63bt3i1q1b3Llzx/yCm83mpbwfmzTb29tsbm4a0tjJvGw2y97e3j0tjR1yTxsL\n",
+       "Txo7te71evH7/cRiMeMDPPHEEyanEQqFWFpaotPpUKlUyOVyHBwcmA/+9u3b3Llz59Leh/ggfr+f\n",
+       "aDTK6uoqW1tbrK+vE4/HCYVCuN1uut2uSULu7u5ycHBANpulVCrRaDRm7octPGkAY2ECgYDxAyKR\n",
+       "yABZfD4fLpcLrTWNRoNiscjR0RG7u7tkMhlOT09ptVqXMn+JkmREo1ESiQRra2tsbW2RSCRMlFcs\n",
+       "FsnlcuRyObLZLNlslkKhQKVSodVqzWVDdeFJI7vWPp+PYDBoCBOJRIjFYkSjURNJyY62kOb4+Jjd\n",
+       "3V1yudylkkYceImWIpEIiUSC9fV1tra2CIVCZu+rVCoNECaTyVAoFKhWq2ZZmjUeOdLIfoxtaexf\n",
+       "ca/Xo9FoUCqVjKWpVqvGF7gM2GkC2eqIx+Osra2xubmJy+WiVqtRr9ep1+sDpMlmsxSLRVqtlkOa\n",
+       "izCcl/H7/YRCIWNlpPwhHA4P1Ma0Wi0qlQqFQoFMJsPh4eFAXmbe81dK4fF4CAQCZlsgHo+bjPXa\n",
+       "2ppJD7TbbbM85fN5MyqVCr1eb+YblYKFI81wAZLs+MqvU/ZmAoEAbrebs7MzY0mq1SrFYpG9vT2y\n",
+       "2SyVSoVOp2O2COZVYDXsw9h7Y6lUihs3brCxsWGSeN1u15Rs5PN5isUi1WrVhNZ2sdg8sHCkgcEU\n",
+       "u9frJRgM3pc0vV6PWq1GJpMhl8txfHxsknfVapVOpzP3D932YTweD/F4nM3NTbO3tLW1RTqdJhKJ\n",
+       "mLzSMGkqlQqNRmNgX2weVXuw4KSxfRkhjZ05tS1NNpvlzp077O7uGksjpLFLPecBl8s14MMIaW7c\n",
+       "uMFrXvMaEokEsVjsoUgjlhLmV4p6IWmUUp8BfgzIaK1/qP9aHPhrYAe4Dfyk1ro0w3na8xn40CVi\n",
+       "isViJBIJkskkkUgEv99vHF8hzd7eHq+88gonJyfk83lqtdpMSgcuwrAfFo/HSafTXL9+naefftps\n",
+       "d3g8HrTWtFotarUap6enA6Sxl6e5zv8h/s1ngWeHXvsN4Dmt9VPAP/efzwUul4tQKDSwCSklD5ub\n",
+       "m6ytrRGJRPB4PCaJd3p6SrFYJJ/Pm/C6Xq9fWvmmXX0nDnsoFCIQCOD1eg1RTk9POTk54fj4eOAq\n",
+       "zu+88jLDuNDSaK2/rpS6PvTyO4Ef7T/+M+BrzIk4LpeLcDhMMpk0db6bm5tmpFIpPB4PbrfbRB3D\n",
+       "pKlUKtTr9UuxMvCDZGQoFCIajRrS+P1+Q5pms2lCbJswJycnZo9pXiH2XfMf8+/WtNYn/ccnnNfW\n",
+       "zAVLS0uEQiGSySRbW1vs7OyYzcl0Os3KyoqpWJM9pmHSNJtNWq3WlbI0wWDQkKbVatFsNs28hTCZ\n",
+       "TIaTkxMT9Q13HswLEzvCWms9T309l8tFMBgkHo+zsbHBtWvXWFtbMyMSiVCpVIw1qVarpva3UCiQ\n",
+       "z+dNOeS8CsXt8k1gwBdbWVkhGo0SDofN8iSbpaVSiePj4wErc5klG4JxSXOilFrXWh8rpdJAZpqT\n",
+       "ehjcby2XCjuxNFI7Yye/Zh0p2Yk7ON+A9Pv9BAIB/H4/6+vrbGxsmJFOp4nH4wSDQZRSNJtNCoUC\n",
+       "BwcHvPLKK2Z/rFqtXmpHhGBc0nwJ+Bng9/rXL05tRmPCJoNdgSc1vhJlzCu0tpOQgUBgoDAsnU4P\n",
+       "kCaZTLKyskIgEEApRaPRoFAosL+/z0svvcTx8TH5fJ5qtXopy9EwHibk/hznTm9SKbUH/BbwMeBv\n",
+       "lFLvox9yz3KSF8EmgVgau1hcugmGk2AzlMMdKA4LBoOsrKyYJVTIsrm5ycbGhtnuENLYluall14i\n",
+       "n8+bRr6FsDRa6/fc5z+9bcpzGQvDhLFJIxZGliX5Iu3Hw71BYx5lNPDYrt+VFIHkYiQ1YFsbr9dr\n",
+       "+soBY2kODw+5efOm6fS8KljIjPCDIG2rwWDQEGdzc9NU6IdCobsa/GWHWGprR4Xb7R4YUuIgY9iH\n",
+       "SSaTpqhKEnh2z3ipVDIh9VU8N/2RI400yEmVnpj7s7MzU3RuR09SBVcul6lUKlSr1ZH/n+Lo2g6v\n",
+       "9CQFAgHTpCdDCsUk8yu+lyxBUrZ5WSH1RXgkSePxeAx5JFnmdrsJh8Osrq4asojagpQa5HI50yg3\n",
+       "CuxuRxk2MeLxOCsrK8TjceLxuMnHyIaldEVI9rpUKlGtVk3D21XDQpPG9kPksU0WwCwBktupVCrG\n",
+       "15EOysPDQ4LBoGmSGxVSIWhfZUi/tT2G+5FkiSyXy+RyuQFL45BmCrAr705OTvD7/abJX9LxduQC\n",
+       "5z5HIBCg1+uZTUwZIiNiF3OPCrEuouQgVxnBYBCfz2dIOexTCVkODw/Z39/n4ODAbBU4y9MUIDW+\n",
+       "QhrZLZZlwC7vhB8sV36/HzhvD7G/sF6vh8/nIxKJkEwmp+LT+Hy+u557vV4zJ0k+Si5JOiMODw+5\n",
+       "ffs2h4eHJsx2LM0UYJPG7XYbVYWVlRXq9Trtdtv8om31K2lZHc4Ka62JRCImcppG9CTDlmiTq1LK\n",
+       "kEb6x21Lc/v2bVP361iaKcEmjSTr4vE4q6urA2GqXd0nX6LP57srpyKY5y9aSCMbk8OkOT09NZGU\n",
+       "Q5opQGttPnCXy8Xp6SlHR0f4/X601hQKhbuWBilokqu9hMhyZQs02vp1D7MTbveGdzodEz3Jddjx\n",
+       "FcUq2VCVUL9Wq9FoNMwO/DxLUEfBQpKm0+kMZHaPjo6MoM/x8bHJj0iuRHIn8ppENJIIlP0qCcWl\n",
+       "CF2+yItQq9XMqFarA8m8cDh8T9KIFo7kh6RFRUgjKYGriIUkjRRPiTOptaZWq5HL5Uxtiq1BIzUr\n",
+       "y8vLRKNRo6MXCATu2naQGpxisUihUKBUuriKtVgsDowbN24AGDGiYdiWRso4qtXqAGnmXew+Csat\n",
+       "Ef4I8HNAtv/PPqS1/sdZTdKGkEaI02g0qNVqZLPZgYo4O8kmSbWVlRWSyaQJsWOxmLmnkEa09gqF\n",
+       "gil8ughS6yKj2+0SDodZX1+/55cupKnVapTLZcrl8l2kucp4GEvzWeCPgD+3XtPAJ7TWn5jJrEaA\n",
+       "XT8jkPyMvWlpZ1wlxD06OiIejw+Ev51Oh0KhYEaxWLxwDpLFlUL14T4qe9ui1+tRLBbJZDIcHByw\n",
+       "v7/P4eHhlc7LDGPcGmG4QvrBNnHEzzk7OxvY06lWq8anyeVy7O/vm/pc+wsV7ZpRfBopH202m/ds\n",
+       "vLPFoaWJP5PJsLe3x82bN029TL1ev5LL0TAm8Wner5T6ac4Pdv/AvFpYhmFr4dkRkERYEqHYCt92\n",
+       "8k0cYTuCkvzJKHkb29kdLvYSAovAtSQm9/f3uXXrFoVCwXRIPMqk+STwO/3HHwU+DrxvKjMaA+KT\n",
+       "CGRrwMa98jP306QbtbZmOCNsE1lEoWVTslarGUuzv7/PzZs3zeakhNlXHWORRmttvEOl1KeAL09t\n",
+       "RlPC8Bc+y1+waOTZOn8icQKYzgLZTRcRonK5bHSK5yk5OynGIo1SKi3C08BPAN+a3pQWDxKJSTnn\n",
+       "+vo6sVjM7He1Wi3TWSAbkrlcjnK5TLvdvqfzfJUxTo3wh4G3KqVez3kUdQv4hZnO8opDpGfX1ta4\n",
+       "fv36XaSxlbdu3rzJ4eGhUa2QRN6iEAbGrxH+zAzmsrCwLc21a9cGSCOVgyKiJBuSInkm0q2LhIXL\n",
+       "CF8FSBQme1m2AJF0eUrxl123I86w7C3NS4Ro2nBIMwakElCG1P6KTyNbGCJ1Mpw8XCT/5V5wSDMG\n",
+       "pHBdNP1swqytrZmWFBFVsjPTw201iwiHNCNCKgGDwSCxWMwsTba1sZOMYmnsDVFb8mwR4ZBmDAhp\n",
+       "RH3L1iqWsxds/0UOIZPd7GazOdOzC2YNhzRjQNQ4ZQd9eXnZlJMCRu5M9q+KxaI5uU52s+XYwEWE\n",
+       "Q5oxIDXHYmnsY3MAc3ZBuVweqMuRgivZ2LwsUaVJ4ZBmDIilGSaNfbiYCCtKeYWQxhaHhMs753sS\n",
+       "OKQZA/dankQY8uzsbKBYXPaZpFh8kfaY7oeHEWp0MITh5WmYNI1Gg9PTU7OTLQd2NJvNhbQsw3As\n",
+       "zYiwZemFNKKXN3z2QjabfSRJ80BLo5TaVkr9i1Lqf5VS31ZK/XL/9bhS6jml1PeVUl9RSsXmM92r\n",
+       "AQm57eXJ5/OxtLRkSHM/S/Mo4KLlqQP8qtb6GeBHgF9USr2WS9QRvgwISaLRqJE6k7OxRVzRbrlt\n",
+       "t9sm5BYFCBGKfhQszQOXJ631MXDcf1xVSn0H2OQSdYTnDfvQDulwECn9YDA4IE0iFYRSmyylptIu\n",
+       "vKh5mWE8tE/TLy5/A/CfXKKO8GVABAKkFSaVShGLxQiFQqZDU6Iiux5YSGOLRT7ylkaglAoDfwf8\n",
+       "ita6MtQDPVcd4XlDLM3y8jKJRIJ0Ok0ymSQWixEMBvF6vYYMdhmEbWmktfdRIAw8XOWeh3PC/IXW\n",
+       "WqRfL11HeJ4Q0iSTSaOZJ2G2FI7b8mfVatUcQGpbF1uE2h5298Ii5HAuip4U8Gng/7TWf2D9J9ER\n",
+       "hiuiIzwr2JYmmUySTqdJJBIsLy8bta1OpzOQAS6Xy3cd2GGfHS7FW16v12jXiGzK/TokrhIusjRv\n",
+       "An4KeFEp9Xz/tQ9xxXSEZw3xaRKJBBsbG/ckjew13Ys0tq6wLRUrQ0omhltxriouip7+nftboyuh\n",
+       "IzxriIafbWlERmTY0sjZmOVy2RzgJfcY1suxdXNEAWMRCANORvihIF+4dGfaywkMhtoirmjndoY1\n",
+       "cWSpEjLVajWjHnFVxRltOKSZATweD6FQyEjbh0KhgfPChyv75ORe6SO/6s6wQ5opYFjDzy4HbbVa\n",
+       "rKysmKMSk8mkadMVy+Tz+YyY0qPgCDsYESJ0LZZGaz1wiMbGxoZxnBuNhjm7qVqtksvlHNK8WuB2\n",
+       "u40sbSwWw+VyDTwXSxOJRPD5fCZ7LMJGktNxHOFXESSPI1YmFouZRF+9Xjc5GXF6S6US+XzejKOj\n",
+       "IwqFwiMvNeLAgtfrNYdjBIPBu9Q+7VNe6vW6kRqRowbl2OR6vX7lnWBwSDMVSAgeDAYHBBblKod1\n",
+       "5HK5AX2ag4MD9vb2zHmbV1WhfBgOaR4Cw+KNtg8ie0f2LrfdFNfpdMjlcmSzWTOOjo7IZDLm8Ay5\n",
+       "n11wfpXhkOYCiD6xHA/o9/tN6CyJOFusWvSBbW3hUqk0MMSXkWo+u+tyEeCQ5gIIafL5vJFGs4/V\n",
+       "cblcxuGVIb1OoitsC1MPX6Wib5FEAR5IGqXUNudSsKucCxj9idb6Dy9TR3je0FrTaDTI5/MAZimR\n",
+       "SCkQCBgtYDmh7uTkxJylfXx8bEQf5Tq8fC1aD9RFlkZqhF/oF2L9j1LqOa6QjvA8ID6NUopOpzNw\n",
+       "bpMcvWOPTCYzMERexK7esx3lRcO4NcJwhXSEZw0RrxanVyRERFpfcjKyTA0fKSj+yrwOk5811MNO\n",
+       "vl8j/K/AM8AHgPcCp9xHR/hRKgGVsyZlDJ8iJ5GP5GZsEsnxO3YYDpMd6TwvaK3vaRgeijT9pelr\n",
+       "wO9qrb+olFrlB/7MR4G01vp9Q39zdT+NEWGXMSil7jr4azgvYyug20vQIhDFxtik6dcI/z3wD0Ml\n",
+       "n/LfrwNflsM2rNcX45NxcF/cjzRj1Qj3i8kFr3od4VcbHmhplFJvBv4NeJHziAngN4H3AAM6wlYf\n",
+       "lPytY2kWHBP5NOPAIc3iY6zlyYGDe8EhjYOR4ZDGwchwSONgZDikcTAyHNI4GBkOaRyMjJnlaRw8\n",
+       "unAsjYOR4ZDGwciYKWmUUs8qpb6rlHpJKfXBKdzvtlLqRaXU80qp/xrj7z+jlDpRSn3Lem1sedv7\n",
+       "3O8jSqn9/hyfV0o9O8L9pirB+4D7jT1H4O7m9WkNwAW8DFwHPMALwGsnvOctID7B37+Fc7HJb1mv\n",
+       "/T7w6/3HHwQ+NuH9Pgz82pjzWwde338cBr4HvHbcOT7gfmPPUWs9U0vzRuBlrfVtrXUH+Dzwrinc\n",
+       "d+wyU63114Hi0Mvv5FzWlv71xye8H4w5R631sdb6hf7jKmBL8I48xwfcb+w5wmyXp01gz3q+zw8m\n",
+       "PC408FWl1DeUUj8/4b0Es5C3fb9S6ptKqU+Pq+Y+bQle637/MekcZ0maWcTyb9JavwF4B+fq6W+Z\n",
+       "5s31uR2fdN6fBB7jvN7oCPj4qDcYluCddI79+/1t/37VSec4S9IcANvW823Orc3Y0Fof9a9Z4Auc\n",
+       "L4GT4kQptQ6mInEieVutdUb3AXxq1Dk+SIJ3nDla9/tLud+kc5wlab4BPKmUuq6U8gLv5lxKdiwo\n",
+       "pYJKqeX+4xDwdqZTZjpVedtJSmGnLcE7s3LdSaKZh/De38G5x/4y512Yk9zrMc4jsBeAb49zP+Bz\n",
+       "wCHQ5tzfei8QB74KfB/4ChCb4H4/y3lH6ovAN/tf7toI93szcNZ/j8/3x7PjzvE+93vHJHPUWjvb\n",
+       "CA5Gh5MRdjAyHNI4GBkOaRyMDIc0DkaGQxoHI8MhjYOR4ZDGwchwSONgZPw/UDzRgG/E2K8AAAAA\n",
+       "SUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792ba9ddd0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEvtJREFUeJzt3X+QXXV5x/HPJxuS7OYXQwNYdbNLG20loxWrDoKYpNoO\n",
+       "ZQRta1XaqrUdOx21UqqOyNj+4bSj1elIHaadsVB/4K+2alGmVaQ1iRFKIpgQSFAMYyhoZQkkkt3N\n",
+       "j014+se9G5ewN3uePfnuuTe+XzMM95z73HO+93zPOfvknHO/jyNCAAAAmL15TTcAAACg15FQAQAA\n",
+       "1ERCBQAAUBMJFQAAQE0kVAAAADWRUAEAANQ0v8mV22bMBgAA0DMiwtPNL5pQ2b5Y0jWS+iRdFxF/\n",
+       "e3zM+973vqd8buPGjVqzZs2s13v06NFUfGYsrieeeKJIbC+OB2ZPu0/p1ltv1YUXXviked2wjSXp\n",
+       "yJEjlWMPHz5cJPbgwYNFYufNy11w7uvre8q8HTt2aPXq1U+Zf9ppp9VabieZY3V0dLRy7N69eyvH\n",
+       "StK+ffsqx46Pj1eOPXToUOXYzD7UaRvv379fS5cufcr8TP8NDAxUju3v768cu2jRosqx8+dX/9OU\n",
+       "2d+kzuet6WTOF5ljdbp9aGRkRGeddVal2BM5cOBA5dhS57iJiYnKsZlzQPZv+1wrdsvPdp+kayVd\n",
+       "LOlcSZfbfk6p9QEAADSl5DNUL5a0KyJ2R8SEpM9LelXB9QEAADSiZEL1DEkPTpl+qD1vRkNDQ0Ua\n",
+       "hLkxODjYdBMwS2eeeWbTTUANCxYsaLoJmKXFixc33QTUVDKhmvVDQcPDwyexGZhrK1eubLoJmKXp\n",
+       "nuFA71i4cGHTTcAskVD1vpIPpf9Q0tRLFYNqXaV6ko0bNx57PTQ0RDIFAAB6TsmE6g5Jz7I9LOlH\n",
+       "kl4n6fLjg+r8mg8AAKAbFEuoIuKI7bdLulmtYROuj4h7S60PAACgKUXHoYqIr0r6asl1AAAANK3R\n",
+       "kdKl6r9KyQwMmRkUTsoNDpkZWKzXlivlBrLLDN6WWW4mtuT3KzUYaalB77IDAJYadLJUbEZ2uZlt\n",
+       "UWrgxMyAk9kBbTP7UeYYGRsbqxybOS9nBhfNDFoq5Y7rUuetkue4zGCry5cvrxybGTg4M4hrpq+z\n",
+       "v2LNHFNV94sNGzZ0fI9afgAAADWRUAEAANREQgUAAFATCRUAAEBNJFQAAAA1kVABAADUREIFAABQ\n",
+       "EwkVAABATSRUAAAANZFQAQAA1ERCBQAAUFPjtfyq1qTK1OTJytQoKtWOUnXjMsuVcnWjStUHy3y/\n",
+       "bM22UjUTs9u5RBuyMjUFS9X9y/RHqWMk246+vr7KsZn6dZlj5ODBg5VjpVz9wcx+UWofeuyxxyrH\n",
+       "ZrdFqZp7mf0zU38wU5tPytXGK1WfL7PcUucWqdzfnU6KXqGyPWh7ve0dtu+x/Y6S6wMAAGhC6StU\n",
+       "E5KujIhttpdIutP2LRFxb+H1AgAAzJmiV6gi4scRsa39elTSvZKeXnKdAAAAc23OHkq3PSzpPEmb\n",
+       "52qdAAAAc2FOEqr27b4vSLqifaUKAADglFH8V362T5P0RUmfjogbj39//fr1x14PDw/rnHPOKd0k\n",
+       "AACAGY2Ojmp0tNp1oKIJlVtjDFwvaWdEXDNdzLp160o2AQAAYFaWLFmiJUuWHJseGRnpGFv6lt+F\n",
+       "kv5A0jrbW9v/XVx4nQAAAHOq6BWqiPiWGI0dAACc4kh2AAAAamq89Eym5EM3KFVipFtKl2RK62TK\n",
+       "aixYsKBybKZsQSZWyn2/UiVGSn2/TJkFKbdvlCpJlGlDydIsmRIqpUqMLF68uHLs8uXLK8dK0sKF\n",
+       "CyvHZr7f+Ph4kdj9+/dXjn388ccrx0rl/uYsW7ascuzZZ59dJFbKlYjJHH+ZPhkbGysSmymhJOVK\n",
+       "1VTdFtu3b+/4HleoAAAAaiKhAgAAqImECgAAoCYSKgAAgJpIqAAAAGoioQIAAKiJhAoAAKAmEioA\n",
+       "AICaSKgAAABqIqECAACoqfHSM1VlyiFky7iUKgWSKXNSqvRMpiSKlCu3kvl+mdIzpWKlXCmQTF9n\n",
+       "tlu2zVVlywxllCqhUio2u99n4jOlSzL7xcDAQOXYzLaQcueXTImfQ4cOFYnNHHtnnHFG5VgpV7an\n",
+       "v78/teyqRkdHK8fu2bMntezMds7InO9L7cuZ5Uq581bV73fttdd2fK/jWcT270gKSdOtJSLiS1VW\n",
+       "brtP0h2SHoqIS6t8BgAAoJec6J9ll6qVUHVSKaGSdIWknZKWVm0UAABAL+mYUEXEH9ZduO1nSrpE\n",
+       "0t9I+ou6ywMAAOhGM96otv0029fb/lp7+lzbf1xx+R+R9G5J5R7uAAAAaFiVJ/8+Ienrkp7env6+\n",
+       "pCtn+pDtV0oaiYitmv45LAAAgFNClZ+2rIiIf7F9lSRFxITtKj8DuUDSZbYvkbRI0jLbn4qIN04N\n",
+       "2rhx47HXQ0NDGh4ertx4AACAUjZs2KANGzZUiq2SUI3a/rnJCdvnS/rJTB+KiKslXd3+zBpJ7zo+\n",
+       "mZKkNWvWVGooAADAXFq7dq3Wrl17bPr9739/x9gqCdU7Jd0k6Rds3ybpTEmvmUW7ygy0BAAA0LAZ\n",
+       "E6qIuNP2yyT9klrPQn0vIiYyK4mIjZI2zhgIAADQg2ZMqGz3S3qrpJeqdZVpk+1/jIiDpRsHAADQ\n",
+       "C6rc8vuUpMclfVStK1S/J+kGSb9bsF0AAAA9o0pCtToizp0y/Q3bO09WA6rWjcrUosrWNCu17Exs\n",
+       "qRqBmdisbqhVmO3rTHymplkmNmPRokWVYzN1q6RcnblMrbvDhw9Xjh0bG6scm6lRdvBg7gJ6qbqU\n",
+       "CxcurBybqWmWPa4z55fMsZppR4m6alL+2MvUYsy0I1OfL7MvZ89xme+X6ZPMuSiz32fOLdk6qJll\n",
+       "Z75fJ1WOsu/YfsnkRPtXfnfWXjMAAMAp4kTFke+eEnOr7QfVeoZqpaTvzUHbAAAAesJMxZEBAAAw\n",
+       "gxMVR949ddr2WWqNeA4AAIApqhRHvsz29yX9QK2xpHZL+mrhdgEAAPSMKg+l/7Wkl0i6LyLOkfRy\n",
+       "SZuLtgoAAKCHVEmoJiJij6R5tvsiYr2kFxZuFwAAQM+oMkjDXttLJW2S9BnbI5KqD7gBAABwiqty\n",
+       "herVksYlXSnpa5J2iV8AAgAAHFOlOPLk1aijkj5RtDUAAAA96EQDe46qNZDndCIilp2MBpQoSZIp\n",
+       "qdEtMtuhVBkXqVxpnUw5hFLLzcaXKkmUacP4+Hjl2Ew5i2w7MiU4MsdfpiRKqfJMUq4EziOPPFI5\n",
+       "NlMC58CBA0WWK5U7v2RKe/T391eOzZQYKVlyKRObOZ4y/ZfZN6XcOaNUmaFM/2ViM/uQlNs3MuVy\n",
+       "OjnROFRL6i7c9umSrpO0Wq3k7I8i4va6ywUAAOgm1f95MTt/L+k/I+I1tudLql79EwAAoEcUS6hs\n",
+       "L5d0UUS8SZIi4oikn5RaHwAAQFOqP5SQd46kR2x/3PZ3bP+T7YGC6wMAAGhEyYRqvqQXSPqHiHiB\n",
+       "pDFJVxVcHwAAQCNKPkP1kKSHIuLb7ekvaJqEatOmTcder1y5UkNDQwWbBAAAUM2ePXv06KOPVoot\n",
+       "llBFxI9tP2j72RFxn6RXSNpxfNxFF11UqgkAAACztmLFCq1YseLY9H333dcxtvSv/P5MrXI1CyTd\n",
+       "L+nNhdcHAAAw54omVBFxl6QXlVwHAABA00o+lA4AAPAzofQtvxkdOXKkUlzJshOlyl+UanPJbZGR\n",
+       "WXYmNlPOYtGiRZVjpVx5gUzZgsz3y5SIOXz4cOXYqsfSpEzZicw+l9nGy5cvrxy7dOnSyrFLluQK\n",
+       "PSxeXH3M4cy2mJiYSLWjlExppMz+mSmhki2NVFWmP7LxmdIzmXNR5hyXPYdnjuvM/pk5v3RDCTOp\n",
+       "TPmwm266qeN7XKECAACoiYQKAACgJhIqAACAmkioAAAAaiKhAgAAqImECgAAoCYSKgAAgJpIqAAA\n",
+       "AGoioQIAAKiJhAoAAKCmxkvPVB0aPjM8fcmh+ksMZS/lShyUaq9UrgROph3j4+OVY/ft21c5Vsr1\n",
+       "SbaUS1WZEhWlSuVIuTIOmRI4pcqRZMpkZMrUSLlSNQMDA5Vjly1bVjm2VMkQqdz5M9PmTF9njr1s\n",
+       "6ZlMOzLbbcGCBZVjS5bX6u/vLxKbOUZK7ReZvw3ZZZ+M0khFr1DZfq/tHbbvtv1Z29X/OgAAAPSI\n",
+       "YgmV7WFJb5H0goh4rqQ+Sa8vtT4AAICmlLzl97ikCUkDto9KGpD0w4LrAwAAaESxK1QR8Zikv5P0\n",
+       "v5J+JGlfRPxXqfUBAAA0peQtv1+U9OeShiU9XdIS279fan0AAABNKXnL74WSbouIRyXJ9pckXSDp\n",
+       "M1ODbrvttmOvBwcHNTg4WLBJAAAA1ezatUu7du2qFFsyofqupL+03S/poKRXSNpyfNAFF1xQsAkA\n",
+       "AACzs2rVKq1aterY9M0339wxtuQzVHdJ+pSkOyRtb8/+WKn1AQAANKXowJ4R8SFJHyq5DgAAgKZR\n",
+       "egYAAKAmEioAAICaGq/ll6mLVUqp+nWZ2FJtyMosO1N/MPP9StUSyy47U0cvU28rU8crUx+sZM2v\n",
+       "zH6RqX+WkdkWmbpjknT66adXjs2cszK1x/bu3Vs5NlvDMlMbL1MTMlOrMFNfMdN/+/fvrxwr5eog\n",
+       "Zs4vmeM68/0y+2a2HZn988CBA5Vjx8bGirQhU1NUKldPtxOuUAEAANREQgUAAFATCRUAAEBNJFQA\n",
+       "AAA1kVABAADUREIFAABQU1cmVLt37266Cajh/vvvb7oJmKXt27fPHISuRf/1rttvv73pJqCmrkyo\n",
+       "HnjggaabgBpIqHoXf5B7G/3XuzZv3tx0E1BTVyZUAAAAvYSECgAAoCZnS3ec1JXbza0cAAAgKSKm\n",
+       "rWHWaEIFAABwKuCWHwAAQE0kVAAAADV1XUJl+2Lb37X9fdvvabo96Mz2P9t+2PbdU+adYfsW2/fZ\n",
+       "/rrt05tsIzqzPWh7ve0dtu+x/Y72fPqwy9leZHuz7W22d9r+QHs+fddDbPfZ3mr7pvY0/dfDuiqh\n",
+       "st0n6VpJF0s6V9Lltp/TbKtwAh9Xq6+mukrSLRHxbEn/3Z5Gd5qQdGVErJZ0vqS3tY83+rDLRcRB\n",
+       "Sesi4vmSnidpne2Xir7rNVdI2ilp8mFm+q+HdVVCJenFknZFxO6ImJD0eUmvarhN6CAiNknae9zs\n",
+       "yyR9sv36k5JePaeNQmUR8eOI2NZ+PSrpXknPEH3YEyJivP1ygaQ+tY5F+q5H2H6mpEskXSdp8ldj\n",
+       "9F8P67aE6hmSHpwy/VB7HnrH2RHxcPv1w5LObrIxqMb2sKTzJG0WfdgTbM+zvU2tPlofETtE3/WS\n",
+       "j0h6t6Qnpsyj/3pYtyVUjOFwConWmBz0aZezvUTSFyVdERH7p75HH3aviHiifcvvmZJeZnvdce/T\n",
+       "d13K9isljUTEVv306tST0H+9p9sSqh9KGpwyPajWVSr0jodtP02SbP+8pJGG24MTsH2aWsnUDRFx\n",
+       "Y3s2fdhDIuInkv5D0q+KvusVF0i6zPYPJH1O0q/ZvkH0X0/rtoTqDknPsj1se4Gk10n6SsNtQs5X\n",
+       "JL2p/fpNkm48QSwaZNuSrpe0MyKumfIWfdjlbK+Y/AWY7X5Jvy5pq+i7nhARV0fEYEScI+n1kr4R\n",
+       "EW8Q/dfTum6kdNu/KekatR6yvD4iPtBwk9CB7c9JWiNphVr3+/9K0pcl/auklZJ2S3ptROxrqo3o\n",
+       "rP2rsG9K2q6f3lp4r6Qtog+7mu3nqvXQ8rz2fzdExIdtnyH6rqfYXiPpnRFxGf3X27ouoQIAAOg1\n",
+       "3XbLDwAAoOeQUAEAANREQgUAAFATCRUAAEBNJFQAAAA1kVABAADUREIFoHG2b23/f8j25Sd52VdP\n",
+       "ty4AOJkYhwpA17C9Vq1BDi9NfGZ+RBw5wfv7I2LpyWgfAHTCFSoAjbM92n75QUkX2d5q+wrb82x/\n",
+       "2PYW23fZ/pN2/Frbm2x/WdI97Xk32r7D9j2239Ke90FJ/e3l3TB1XW75sO27bW+3/dopy95g+99s\n",
+       "32v703O7NQD0ovlNNwAA9NPSN++R9K7JK1TtBGpfRLzY9kJJ37L99XbseZJWR8QD7ek3R8Tedm27\n",
+       "Lba/EBFX2X5bRJw3zbp+W9KvSHqepDMlfdv2N9vvPV/SuZL+T9Ktti+MCG4VAuiIK1QAuomPm/4N\n",
+       "SW+0vVXS7ZLOkLSq/d6WKcmUJF1he5uk/5E0KOlZM6zrpZI+Gy0jkjZKepFaCdeWiPhRtJ6J2CZp\n",
+       "uMZ3AvAzgCtUALrd2yPilqkz2s9ajR03/XJJ50fEQdvrJS2aYbmhpyZwk1evDk2Zd1ScKwHMgCtU\n",
+       "ALrJfklTHyC/WdJbbc+XJNvPtj0wzeeWSdrbTqZ+WdL5U96bmPz8cTZJel37Oa0zJb1M0hY9NckC\n",
+       "gBnxry4A3WDyytBdko62b919XNJH1brd9h3bljQi6bfa8VN/ovw1SX9qe6ek76l122/SxyRtt31n\n",
+       "RLxh8nMR8e+2X9JeZ0h6d0SM2H7OccvWNNMA8CQMmwAAAFATt/wAAABqIqECAACoiYQKAACgJhIq\n",
+       "AACAmkioAAAAaiKhAgAAqImECgAAoCYSKgAAgJr+H9OLZ8u3dMr8AAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b9b9550>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAGLBJREFUeJztnVtsY+tVx3+ft+/29i224yQzk+lp+9AHpNOX8lAq+lBV\n",
+       "p0Jq4YWqEgKVUvEABQESbXmgBV5KJSoED0ioLeKmFgQqKi/QVgKpPHA5qKcXzqVnTjOTjJ2L49jx\n",
+       "/f7xYK9vtj3JnLHjTOxk/6WtOJ5k5zv1v+tb31r/9d9Ka40LF7PAc9ULcLF6cEnjYma4pHExM1zS\n",
+       "uJgZLmlczAyXNC5mxtykUUq9oJR6VSn1ulLqk4tclIvlhpqnTqOUsoDXgPcBeeB/gI9orV9Z7PJc\n",
+       "LCPmjTTvAu5pre9rrXvAV4EPLW5ZLpYZ3jl/bwvYc3z/EPhx5w8opdxS84pDa63Oen/eSOMS4gZj\n",
+       "XtLkgduO728zijYubgDmJc2LwNuVUneVUn7gw8DXF7csF8uMuXIarXVfKfWrwL8CFvAl9+R0czDX\n",
+       "kfupbuwmwiuPRSfCLm4wXNK4mBkuaVzMDJc0LmaGSxoXM8MljYuZ4ZLGxcxwSeNiZrikcTEzXNK4\n",
+       "mBkuaVzMjHlFWAAope4DVWAA9LTW71rEolwsNy5EGkZirPdqrU8WsRgXq4FFbE9ndkJdXF9clDQa\n",
+       "+JZS6kWl1McXsSAXy4+Lbk/v1lrvK6UywDeVUq9qrb+9iIU9ayj1KGBaloXH48Hj8WBZlvl3+ZnB\n",
+       "YGCufr9/Jeu9SlyINFrr/fHXolLqa4xGW1aONEIIpRSWZREOh4lEIkQiEcLhMJZl4fV6sSwLy7I4\n",
+       "PT01V7VaRYRsN8XrZ27SKKXCgKW1rimlIsD7gd9b2MqeIZRSJrJ4vV5s22ZtbY21tTVSqRR+v99c\n",
+       "Pp+PQqFAoVBgOBxSq9WAEWGUUjeCOBeJNOvA18Yh2wv8rdb6GwtZ1TOGkEYiim3bZDIZtra22Nzc\n",
+       "JBQKmSsQCBAOhxkOh1SrVTweD8PhEHAjzZtCa70DPL/AtTwzeDwesx15PB4TQXw+H6FQiHQ6zcbG\n",
+       "Bnfu3OHOnTtmuwqHwwSDQdrtNuVymXw+j8fjmSDLTSDORRPhlYNSCr/fTyAQIBAIEAwGsW2baDSK\n",
+       "bdvEYjE2NjbY2Nhgc3OTXC5nfi4QCOD3+4lEIoRCIfx+v0mUh8Mhg8Hgiv/rng1uJGkCgcAESdLp\n",
+       "NOl0mrW1NfNVrmQyaaKQz+fDsiyi0aghjdc7+p9QcpqbgBtJGr/fTzQaJZVKsba2xq1bt9ja2mJr\n",
+       "a4uNjQ0TeeSSJFm2NSFNIBDAsiyGw+FEbnPdcWNII1HAsixCoRDxeHwid9ne3mZ7e5vbt2+brUu2\n",
+       "IyeGwyHRaNQcySORCJ1Oh06nAzCxRV3X/OZGkMZZrAsEAsTjcbLZrEl0c7kcmUyGWCxGMBjE5/Ph\n",
+       "9XrP3G6UUoRCIVKpFFtbW7ztbW8z9ZrT01OGwyHD4RCttfl63XDtSTN9nA4GgyQSCdbX17lz5w5v\n",
+       "fetbSSaTpFIpYrGY2XKEaGchHA6bba3VanFwcMDR0RHD4ZBWq2WqxcC1TI6vPWkAU7Tz+/0Eg0ET\n",
+       "aW7fvs1zzz1njtORSIRAIGCIdl5i64w0Wmt8Ph9aa1qtFicno4a/1vpaRhm4AaSR1oAQJhqNkkgk\n",
+       "yGQybG5ucvv2bbMdyVfBeR96KBQimUzS7/fx+XwMBgM6nQ71ep1yuUyn06Hb7dLr9UyVWK7rkCzf\n",
+       "CNJI4huPx8lkMmSzWRKJBOFw2ByjLct6qiOzUgqfz0ckEqHf7+PxeOh2u+Yovra2Rr1eN1etVqPd\n",
+       "bpur0+msfAX52pPG4/GYPCabzbKxsWFIEwqF8Hq9E8dpwZM+UCnwyfHd4/EQiURYW1tjc3OTk5MT\n",
+       "SqWS+epMlHu93sT9V5E4N4I0oVDIJL+3bt0im80Sj8cJh8OGNE7pw5tBIo3Ue4QwzWaTRqPBwcEB\n",
+       "hUKBg4MD03qQiFSv11eaMPAUpFFKfRn4KeBIa/1j4/dSwN8B28B94Ge11pVLXOfMEAJMk2Zra4t0\n",
+       "Ok08HjeRxonztg65n2xP0qcCiMViRlszGAxIpVKmACjbXq/Xo1armWKg3GsVifM0keYvgD8F/srx\n",
+       "3qeAb2qtPz82nv7U+LpyOGUOQhjbtkkmk2QyGdbX10kmk0SjUXw+HzApqhoMBqbWInUWIYkky+f9\n",
+       "TelDhcNhEokEnU4HrTX9fp9ms0m5XDYnrcFgsJKEgacgjdb620qpu1NvfxD4yfHrvwT+nSUhDTw6\n",
+       "YktdRk5M6XSa9fV1YrEYkUjEfID9fp9er0e326Xb7T5GoGAwaKQRTyKNvJbIJoQTwsjfFMIMh8OV\n",
+       "jDbz5jTrWuvD8etDRtqapYB8gFKXOSvSCAmckabb7dJqtWi32/R6PUOkwWCAbdtmWzrvb8KjynM4\n",
+       "HDaEiUQiVCoVDg8PDWn6/b4hzSoW/y6cCGut9bL561mWhc/nIxAIEAqFiEajxONx1tbWyGQyZiuR\n",
+       "JmO326XZbFKv12k0GqbG0u12TZ4ipAmFQhP5jTN3EkiuJEf9YrFIPp/Htm1CoZDJaVaRMDA/aQ6V\n",
+       "Ujmt9YFSagM4WuSiLgKlFF6v1yjsRPsiPSUhSr/fN1+Pjo4oFosUi0VKpZIhivxMKpUyHfFUKkUw\n",
+       "GDTRSk5GTiLKa5/Ph1KKeDxOLpfjueeeo9PpcHx8TKVSoVwuUy6XJ7arVdiq5iXN14FfAP5w/PWf\n",
+       "FraiC8JZAZYoEw6HCQQCppDX7/fpdrt0Oh2azSb7+/vs7u7y4MED8vn8RMMRMDob0dokEglzxePx\n",
+       "iQanHN8lKfZ4PMRiMXK5HO12G4/HQz6fJ5/Po7U2kW2Vos/THLm/wijpTSul9oDfBT4H/L1S6mOM\n",
+       "j9yXuchZcVakEdJIpOl0OjQaDU5PT9nf3+eNN97gtdde44033jD3ke0nk8lMXOvr67TbbWBU6JPo\n",
+       "IMdrIY5IQSXSeDweotEowWDQEKZYLJqItyri9Kc5PX3knH9634LXshBMRxqnNFMKedO9okKhwM7O\n",
+       "Dq+88govv/zyxJHd6/VSKpVMhVd6SwDBYJBYLGYIIomycyQGwLZt4FHPajAYUKvVODw8xOfz0ev1\n",
+       "VqovdS0rws4PbfrSWtNutzk9PeXo6Ij9/X1KpRK1Wo1ut2vuIQ3GwWBAu92mVqvh9XoZDodEIhFi\n",
+       "sRipVIpWqzVxWnPKPqdfO+s/sgU6r1XBtSQNPCLOdItgOBzSbrepVCocHR2Rz+cpFosTpJEPUP6f\n",
+       "L6SR343FYiSTSWq1Gs1m0xDmrEgx3eGeLh5Ok2cVcC1J4yTMWY3IVqtlIk0+n+f4+PjcSKO1Np1p\n",
+       "IU8qlSKbzVKv12m1WuZoL0nstMh8Oso4ibNqhIEbQhoncYQE1WqVYrHI/v4+Jycn1Ov1xyKNvBZZ\n",
+       "g9x7fX2dSqVCvV6n3W6buo6QYLrx+WaRxkmeVcC1I43ogG3bJpVKkclkTEdbElWfz0c4HCYej5NK\n",
+       "pcxJarp5eRamE23bts2R/jxNjiTezWaTWq1mIpS0LJz1mVUgzrWzT3OOqEjrQARXUksR0kgya9s2\n",
+       "wWDwqUgDk0d65wyU1GZg8sOXNkWz2aRardJoNAxppIC4KoU9uIakebNII6RyRppYLHamTOI8SKQJ\n",
+       "h8MTkea833+zSCMV4VUhzbXbnpwTlKlUymhnzoo08XjcFNjO62BP3xsmI41t26YG5Iw0TjgjTa1W\n",
+       "o9FomFxItqdVwrUjjSS6tVqNk5MTisUiXq/XkAQe/9BFfRcOh40fjfNyjuX6/X62t7dZX1/Htu0J\n",
+       "5Z9UdqeP+9LtTqVS9Ho96vU6p6enlEolIpEI7XbbJMnXoo2wapAWQb1e5+TkxESZeDxupgPOIo2T\n",
+       "OE4vGtmG5IpEIty9e5dsNott22bLk7+ttZ44sTlzLPn71WqVUqlkdD2A0Q6vQm5z7UijtTZa3HK5\n",
+       "TDQaNUP+EgWENDJS6yRMJBIxXWzpZMskg1zb29uGNF6vd6LWMhgMJpqVgCGNbJ3lcpnDw0NisZjx\n",
+       "upFIJeRZZsyrEf4s8EtAcfxjn9Za/8tlLXIWSKSR7SkYDJJOp2k2m+YDEUWfCK1s256wS3POaUej\n",
+       "UdPdzmQyrK2tkc1mzRiviKokNxEPPmfj0e/3G8LEYjFKpRKpVIp4PG5GYQaDwcSc1DJjXo2wBr6g\n",
+       "tf7CpazqAtBa0+v1aLVaVKtVwuEwtVpt4rQCj4gzGAzIZrO0Wi2jfXFqZSTSiAwiHo8TjUaxLItO\n",
+       "p0OlUqHRaBgBV71eNxYmcsm6JMqJhlgG9iSRlrHeZce8GmFYUv9g2Z6azSaWZREIBEyPqNPpGBWe\n",
+       "kMbj8ZDL5bAsC9u2zYco+Ywzp5FIJCKrTqdDv9/n+PjYXKVSiWw2Sy6XI5fLTRg8yu8JEcWiTeQa\n",
+       "Qtxlx0Vymk8opX6e0YPdf2tZRlickQZG1V8hjUQaIY3kNqJzyWazhmxnnZ6ETGItIuO3+/v7PHz4\n",
+       "kHw+z97eHnfv3qXb7eL1eo2pgAzVOWWg6XSaRqNhphUqlcq1Js2fAb8/fv0HwB8BH1vIii4IIQ2M\n",
+       "TiQej+ex7UmIIKSIRqNn5hHy3vQHWalU6Pf7Znva399nZ2eHe/fu8frrr9NqtYzhYy6Xm2g9OI//\n",
+       "mUyGfr9vuu7BYPD6kkZrbTTBSqkvAv+8sBUtAE6Vv3ywBwcH7OzsmGqx2KfJeO20cAoekUakofK1\n",
+       "WCxyfHxsvu7u7pLP5ymVSjQaDWq1mtEAHx8fG1WeRBnZGm3bpt1uk0gkzFpkAM/ZzFw2zEUapdSG\n",
+       "GE8DPwN8f3FLujjk6Asj0oikU2oi2WyWbDZr5rzPklAA5hgspzG59vf32d/fN6O3Qp6TkxMzmlut\n",
+       "Vg1pJNKEQqEJ8ti2Tb/fJx6Pm5qNTCv0+31DtmU7Tc2jEf4M8F6l1POMTlE7wC9f6ipngFOGIElx\n",
+       "pVKhUCiglKLT6ZiEMxKJkEgkzO86db1OiJRCEt3d3V329vbY3d3l4cOHxiFCGpFS8T05OeH4+Bif\n",
+       "z0cwGDTSC/leZKASaaT5KfUkGeRbNsyrEf7yJaxlYZgO6ZVKBY/HYyrF4vKQyWRMYuyMMtO1Eok0\n",
+       "x8fHFAoFHjx4wM7ODj/60Y/Y2dl5bKy3Xq9PRBqJKs46kWxDXq+XRCIxEWm63a4hzDLWba5dRXga\n",
+       "Em0ajYbZggqFgnG9krFb8QkOBAKPVXjz+TwPHz5kb2+PfD5PoVCYUPtNC6mcGmSZgJBBu0gkYo7Y\n",
+       "sj1KR15cLYLBIJXK6DAqIvZlwo0gjfMIrrXm4OCAQCAAQKvVeqz/5ExC+/0+e3t7Zjva29vj+PiY\n",
+       "crlMq9U6U3kneZTMWEm9SPQ3Mrgnx3jpyOdyOer1uulndbtdqtXq0jUxbwxpADNqK4SRXCWZTJor\n",
+       "Ho9PbDW9Xo+9vT0ePHjAgwcP2N3dNflLs9mc0AULhDTS0ZYoI+PB0j6Q0WFx0Go0Gmat3W6XWq12\n",
+       "rlnkVeLGkEb6Os1mE3iUp0gFN5PJmERWTi4inpIoc//+fXZ3d41BgKjuptFut+n3+9TrdSzLMltQ\n",
+       "IpEglUpNmAn4/X6zPckaJcIcHR25pLkqTOtvJSFWSpmI0m63jZzCaVDU7/cpFApmzEW0L0+qoUgS\n",
+       "K/UikXmWy2WKxeLE6clpLGDbNp1Ox0QikaBalrVUUws3gjTTcOY4MtctUoqzchoZ1G80GhPD+ufB\n",
+       "SVJxpWg0GoY0slXJKUlyHul4y0nKaYQ9PblwlbhxpHHmOFLCbzQaVCoV06CcHnBrNpu0Wi1jLC33\n",
+       "edKH5zQtkshWqVQoFotG0O4kjUg1gMdII/qcqyaL4MaSpt/vmyLfWW2E6dmnWQbbztoOncRMpVJm\n",
+       "zkoqxJKcS89KSON80suytBRuHGng2buJ93o908X2eDxG8F4qlSZ8+JRSxu7NacJkWRbNZtOc1q46\n",
+       "4txI0jxr9Ho9U1wcDAYkk8mJZ0qJvaxTvyMegZubm3i9XsrlsikcXnXEcUnzDCCRZjAY0Gq1SCQS\n",
+       "Ew8kSyaTRm8sxtYindjc3DTbo5giXTWeSBql1G1GMs8so+bkn2ut/0StgI/wMkEMH9vtNkopQxq5\n",
+       "pM0gNrXSSJXaUa/XM62JpScN0AN+Q2v9klIqCvyvUuqbwEdZUh/hZYTTNUIKjKenp+YkJbUaEYP5\n",
+       "/X5isZgRaUmJQH7nqo0DnkgarfUBcDB+XVdKvQJsseQ+wsuMadcKsXULh8PGJcvn8xmtjc/nMxXi\n",
+       "4+NjU0cSMl2FdOKpc5qxuPydwH+xxD7CywqndNRJGq21sVVrNpuGKDKIF4vFaLValEolIyTrdrtG\n",
+       "IHYVp6mnIs14a/pH4Ne11rUpSeTS+QgvM5yRBjDOWrlcziTL0mIQkVa9Xufg4IBEIkEkEqHVak3o\n",
+       "bZ41nka552NEmL/WWov169L6CK8CJCkWXY0o/A4PDzk4ODDP0pRLbPqz2Sybm5tYlmUmF8RMSfAs\n",
+       "os6bnZ4U8CXgZa31Hzv+aWl9hFcBIrmQmou0FwqFghltEZ9iEYbF43HW19fZ3t424zeDwWCiH/as\n",
+       "tqk3izTvBn4O+J5S6jvj9z7NkvsILzvEekRmt8U0Umzw5d/kFOX3+w1pxMZNCHNycjJhKXvlkUZr\n",
+       "/R+cb3y0lD7CqwAhi+hnJNLIeIvMfcfjcbTWE5EGMHKLUqlkrE7kOP8sNMVuRfgKML2VtNttYz/i\n",
+       "NFxKp9NGIyzmAVprI9ByRqYnicIWDZc0SwBpM5TLZfMshXQ6TbVapdlsmnHeaDRq+lCJRMI890Eq\n",
+       "zSKEdyPNDcB0QzORSFAul80MumxbYkAgNiUSaeRZCzKOc9lwSbMEENKIEUAymaRSqRjS+P1+YyIQ\n",
+       "CAQ4OTkxA3bhcNicoJ6VIZJLmiWAJMYSLZxu6rFYjPX1dRKJBMlk0kQbeVpeNptlMBiYKOVUF14W\n",
+       "XNIsAeS4LB92tVo1s1n9fp/T01Nu3bqFZVnGfcu2bTKZDLdu3QIetSdOT08vfb0uaZYAzsc6D4dD\n",
+       "arUaBwcH5hE/nU7HJMgbGxtm7CWdTlOv143tSbVaPdeWdpFwSbMEmJZOyFSlPBNKa21cupxd8HQ6\n",
+       "Ta/Xm2iAPgu9jUuaJYFTjC7uEiJ+Fy1xtVo1BgZyBM9kMqZuIyO/Z82XLxIuaZYQkhDLtEKr1TKe\n",
+       "N+KYpbUmGAySTCYnjuAinZBin3jcLBIuaZYQTp2MzF2JfUmlUjFqv2AwaOo2TjetdrtNp9O5NH+b\n",
+       "J26ASqnbSql/U0r9n1LqB0qpXxu//1ml1EOl1HfG1wsLX9kNhnzYkq9MR5pms2kijcyHT7tpybzU\n",
+       "ZRT75tUIL62P8HXA9LBdq9UyvoGJRIJut0s6nTZ5jbhPSOSRaU1R+C0a82qEYUl9hK8j2u02pVKJ\n",
+       "vb09AOOJLM6kcuqSSc1AIGD+/SoijYFDI/yfjHQ2S+kjfB0hpNH60cPfPR6PKfBNG2o7H1x/GaR5\n",
+       "qkP9eGv6B0Ya4TojH+G3AM8D+4x8hF1cEuTYvbe3x6uvvsq9e/coFArmGeESacTCxGkccCWRxqER\n",
+       "/hvRCOsl9xG+bpB6jViWHB0dUSgUWFtbIx6P02q1ODo6olKpPPaYw8uQScylEV52H+HrCKcnTrVa\n",
+       "pVAo4Pf7zfH66OiIYrHI4eGheU6mRKFFYx6N8O8AH1lWH+HrCCdhtNbUajUKhQKdTsc4oosxdrVa\n",
+       "NY+EluLgoqEuS+XlzkItFs7cxPmEGL/fP+Fw7nz21EWds7TWZyZELmlcnIvzSHP1FgQuVg4uaVzM\n",
+       "DJc0LmaGSxoXM8MljYuZ4ZLGxcy4tCO3i+sLN9K4mBkuaVzMjEsljVLqBaXUq0qp18cuoBe9332l\n",
+       "1PfGEtP/nuP3v6yUOlRKfd/xXkop9U2l1A+VUt9QSiWedI+nuN/cUtgnyGvnWuOlyXWdfv+LvAAL\n",
+       "uAfcBXzAS8A7LnjPHSB1gd9/DyMh2fcd730e+O3x608Cn7vg/T4D/Oac68sBz49fR4HXgHfMu8Yn\n",
+       "3G/uNWqtLzXSvAu4p7W+r7XuAV8FPrSA+86tKtJafxsoT739QUa2toy//vQF7wdzrlFrfaC1fmn8\n",
+       "ug44LXhnXuMT7jf3GuFyt6ctYM/x/UMeLXheaOBbSqkXlVIfv+C9BJdhb/sJpdR3lVJfmmW7c2LR\n",
+       "FrxTct0LrfEySXMZZ/l3a63fCXwA+BWl1HsWeXM9iuMXXfeFpbDTFrwXXeOi5bqXSZo8cNvx/W1G\n",
+       "0WZu6LFaUGtdBL7GaAu8KA6VUjkYKRK5oL2t1vpIjwF8cdY1PsmCd541nifXvcgaL5M0LwJvV0rd\n",
+       "VUr5gQ8zspKdC0qpsFLKHr+OAO9nMTJTsbeFBdjbjj9UwUxS2Kew4J1pjU+S6867RuDyTk/jjP0D\n",
+       "jDL2e8CnL3ivtzA6gb0E/GCe+wFfAQpAl1G+9VEgBXwL+CHwDSBxgfv9IqOn1nwP+O74w12f4X4/\n",
+       "AQzH/43fGV8vzLvGc+73gYusUWvtthFczA63IuxiZrikcTEzXNK4mBkuaVzMDJc0LmaGSxoXM8Ml\n",
+       "jYuZ4ZLGxcz4f041SDwzkyB1AAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b97ec50>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAE2tJREFUeJzt3XuwXWV5x/HfLwm5kavBBDSHRKi0wsSKVbkoIFU6lFG0\n",
+       "rVVpq9Z27HTUSqk6Ik77VztanY7oMO1opSp4oa1a1Gm9pDWQaDARTEJCUKCaNhFISi6cnFxPwtM/\n",
+       "9k48nJyT8z5Zec/eO34/Mwx7rf3std693rX2ebIu7+OIEAAAAE7chE43AAAAoNeRUAEAADREQgUA\n",
+       "ANAQCRUAAEBDJFQAAAANkVABAAA0NKmTK7fNmA0AAKBnRIRHml81obJ9taSbJU2U9KmI+NvhMR/4\n",
+       "wAeO+dzy5ct1+eWXP23eU089VamVuWV3Q2xNEyY0P2m5cuVKXXrppU+bV+v7HT58OBU/ODhYHLt/\n",
+       "//4qsXv37i2O3bNnT3HsxIkTi2MladKkYw//hx56SOedd94x86dOnVq83NNOO604NrNf7Nq1qzh2\n",
+       "+/btxbHZ+P7+/uLYffv2Fcdm9qHRtvGhQ4dG7Ndp06YVL3v27NnFsbNmzSqOnTFjRnFsrf1Nyv3G\n",
+       "HTp0qDg2c6wODAwcM2/r1q1asGDBMfN3795dvNxsOw4ePFglNvM7m/kNqDlupj1ijpRqQ7VLfrYn\n",
+       "SrpF0tWSzpd0ne3n1VofAABAp9S8h+olkh6JiE0RMSjpDkmvqbg+AACAjqiZUD1b0uYh01va88a0\n",
+       "aNGiKg3C+Ojr6+t0E3CC5s2b1+kmoIGTcckenXH66ad3ugloqObRd8IXO0moehsJVe8ioeptJFS9\n",
+       "K3OPGbpTzZvSfyZp6F/WPrXOUj3N8uXLj75etGgRyRQAAOgKmRvhayZU90p6ru3Fkh6V9AZJ1w0P\n",
+       "Gv40HwAAQDcY/vTf8RKsaglVRByy/U5J31Jr2IRbI+LBWusDAADolKrjUEXENyR9o+Y6AAAAOq2j\n",
+       "I6VL5YOylQ66JeUH/6o1sFit2JoDhmYGyswMeldrudnvl4nP3OCbGYhwypQpxbHTp08vjs1sN0k6\n",
+       "cOBAcWxmsMDMcjODBdYc1C+znTMy+0VmfzsZg7iOJtPmzHIzx17m9z4rs89l9uXM8Zfpv8ygrFLu\n",
+       "acHMoKiZPsn8xtUcxDXT5tLBSO+6665R3+OREAAAgIZIqAAAABoioQIAAGiIhAoAAKAhEioAAICG\n",
+       "SKgAAAAaIqECAABoiIQKAACgIRIqAACAhkioAAAAGiKhAgAAaKjjtfwytZLQkq1fV0umTlKt2lyZ\n",
+       "GoFSvfqDterMZeq7ZepnSbl6YpnYTL2tTH9ktkXNGo+16jZm9qFMPTpJ2r9/f3Fspk9K659JuTY/\n",
+       "+uijxbGZ7ybV+w3I7EOZ+nwzZswojpWkmTNnFsfOmjWrOLZWXcpaNW+l3D6XrYU6kqpnqGz32V5m\n",
+       "+wHbG2y/q+b6AAAAOqH2GapBSTdExFrbMyTdZ3tpRDxYeb0AAADjpuoZqoh4PCLWtl8PSHpQ0rNq\n",
+       "rhMAAGC8jdtN6bYXS7pQ0qrxWicAAMB4GJeEqn2570uSrm+fqQIAADhlVH/Kz/Zpkr4s6XMRcefw\n",
+       "91esWHH09dlnn61FixbVbhIAAMCYdu7cqV27dhXFVk2o3HpW/lZJGyPi5pFiLrvssppNAAAAOCFz\n",
+       "587V3Llzj05v2rRp1Njal/xeKukPJF1pe037v6srrxMAAGBcVT1DFRHfFaOxAwCAUxzJDgAAQEMd\n",
+       "Lz0zefLkorhapUukukPf11hupsRBtgRHpmRAphxJZrkZmTZIuf1o0qTywyMTW2u7ZUsnZMpq1IrN\n",
+       "7PeZ5e7bt684VsqVUMnsQ5kyPLNnzy6OnTdvXnGslCt1krFnz57i2P7+/uLYHTt2VImVcqVqMvvc\n",
+       "nDlzimMzD18tXLiwOFbKlarJfL/SG7Ml6cknnyyO3b17d3FsZn+Tcn1d+rdy2bJlo77HGSoAAICG\n",
+       "SKgAAAAaIqECAABoiIQKAACgIRIqAACAhkioAAAAGiKhAgAAaIiECgAAoCESKgAAgIZIqAAAABrq\n",
+       "eOmZ6dOnF8XVLLdSq6xNrXIymeVmv1um1EkmNlOCo1aslCsRk9l2mdhM6ZlMaYiDBw8Wx2aXnenr\n",
+       "008/vTh25syZxbGlvxVSviRRaQksqV6Zmkx5mMw2lnJ9feDAgeLYTGmPbDmgUmeeeWYqPlO2J1PG\n",
+       "JSNTxuWxxx5LLTtTniWzL2eOv3PPPbc4ttZvgCRNmTIlFV/iE5/4xKjvjfrXxfbvSApJI/0iRER8\n",
+       "pWTltidKulfSloh4dclnAAAAesnx/rn+arUSqtEUJVSSrpe0UVJ5GgoAANBDRk2oIuIPmy7c9kJJ\n",
+       "10j6G0l/0XR5AAAA3WjMGyNsn2n7VtvfbE+fb/uPC5f/UUnvlZS7qQkAAKCHlNxp+hlJ35b0rPb0\n",
+       "w5JuGOtDtl8laVtErNHI92EBAACcEkoeeTojIv7Z9o2SFBGDtg8VfO5SSdfavkbSVEmzbN8WEW8e\n",
+       "GrR06dKjr88555zU0wEAAAC1rFq1SqtXry6KLUmoBmwffc7U9sWSnhzrQxFxk6Sb2p+5QtJ7hidT\n",
+       "knTVVVcVNRQAAGA8XXTRRbrooouOTt9yyy2jxpYkVO+W9HVJ59heKemZkl53Au0qHzwJAACgh4yZ\n",
+       "UEXEfbYvl/TLat0L9eOIKB8NrLWMuyXdfWJNBAAA6G5jJlS2p0l6u6SXqXWWaYXtf4iI8iFyAQAA\n",
+       "TmEll/xuk9Qv6eNqnaH6PUm3S/rdiu0CAADoGSUJ1QURcf6Q6e/Y3niyGlBad6hWrbvayy6VqfmV\n",
+       "qUeXrWmWkWnzoUMlD4a2ZLZxpu5YVq2+zvTJrFmzimMzNbGkXF2sTK27TF/v3r27OHb79u3FsXv3\n",
+       "7i2OlXL7cqY+WKbm3uzZs4tjd+zYURwr5WoxZn4PM/typu5mpj8ef/zx4lhJ2rx5cyq+VKZPMvt9\n",
+       "pl6ilDv+MvtF5vcic4xkYjO/QzXbMZqSrflD25ccmWg/5Xdf4zUDAACcIo5XHHn9kJjv2d6s1j1U\n",
+       "Z0v68Ti0DQAAoCeMVRwZAAAAYzheceRNQ6dtz1drxHMAAAAMUVIc+VrbD0v6qVpjSW2S9I3K7QIA\n",
+       "AOgZJTel/7WkSyQ9FBHPkfQKSauqtgoAAKCHlCRUgxHxhKQJtidGxDJJL6rcLgAAgJ5RMqDRTtsz\n",
+       "Ja2Q9Hnb2yQN1G0WAABA7yg5Q/VaSXsl3SDpm5IeEU8AAgAAHFVSHPnI2ajDkj5TtTUAAAA96HgD\n",
+       "ew6oNZDnSCIiymtiHEdpmYPMEPndUm4lE1urzEnNMjy1SvbU2hbZZWf2uUxsxsBA+dX1TMkJKbd/\n",
+       "ZsqGTJs2rcpyM2UnMiVfpFwpkK1btxbHlpbWyrYhW1qn1nGd6ZMZM2YUx2b2oWw5kkzprsxxnSmD\n",
+       "1d/fXxybLTO0a9eu4tjBwcHi2My2yBzXmZI2mVgpV05m6tTmo0Idbxyq8r1/FLbnSPqUpAvUSs7+\n",
+       "KCK+33S5AAAA3aQ8VT8xH5P0HxHxOtuTJOX+2QgAANADqiVUtmdLuiwi3iJJEXFI0pO11gcAANAp\n",
+       "dW78aHmOpP+z/WnbP7T9j7ZzF0ABAAB6QM2EapKkF0r6+4h4oaQ9km6suD4AAICOqHkP1RZJWyLi\n",
+       "B+3pL2mEhOruu+8++nrRokVavHhxxSYBAACU2bx5s7Zs2VIUWy2hiojHbW+2fV5EPCTplZIeGB53\n",
+       "xRVX1GoCAADACevr61NfX9/R6VWrRi9lXPspvz9Tq1zNZEn/LemtldcHAAAw7qomVBGxTtKLa64D\n",
+       "AACg02relA4AAPALofYlv47IlE6Q6pWTqdWGbliuVK9ETKZ0ULZcQKYkQmbbHT58uDg2U2KkZjmS\n",
+       "gwcPFsdmjqnMNp4zZ05x7MyZM4tjsyUqMmVRMmVtMuVIMvtQ9tjLLHvfvn3FsZl9LrPcWttNypWe\n",
+       "ycTOmlVejW3+/PlV2iDlSsRkylVlt3M3qFUSbNT1jevaAAAATkEkVAAAAA2RUAEAADREQgUAANAQ\n",
+       "CRUAAEBDJFQAAAANkVABAAA0REIFAADQEAkVAABAQyRUAAAADXW89MzAwEBRXM0SKplSJ5mh7DOx\n",
+       "tdqQLVFR6/tlZMpOlO4/R2S2R61SC5kyJwsWLCiOnTx5cqodmXIy+/fvL46tVY5k27ZtxbGZkjZS\n",
+       "br/IlMA566yzqrQhc4xIuRIjGbVK2gwODhbHZkodZZedkSlJNGXKlOLYTEmbbPzcuXOrxGb25czv\n",
+       "RaYUl5Tb57J/S0ZS9QyV7ffbfsD2ettfsF2+FwEAAPSIagmV7cWS3ibphRGxRNJESW+stT4AAIBO\n",
+       "qXnJr1/SoKTptg9Lmi7pZxXXBwAA0BHVzlBFxA5JfyfpfyU9KmlXRPxnrfUBAAB0Ss1LfudK+nNJ\n",
+       "iyU9S9IM279fa30AAACdUvOS34skrYyI7ZJk+yuSLpX0+aFB99xzz9HXCxcuVF9fX8UmAQAAlFm/\n",
+       "fr02bNhQFFszofqRpL+0PU3SfkmvlLR6eNAll1xSsQkAAAAnZsmSJVqyZMnR6TvuuGPU2Jr3UK2T\n",
+       "dJukeyXd3579yVrrAwAA6JSqA3tGxIclfbjmOgAAADqN0jMAAAANkVABAAA01PFafvPnzy+Ky9ak\n",
+       "y8jUCeyGWne1livltkWm/mCtNmTq0Um5bZepjTdt2rTi2Mx2y9T8mjp1anGslKs9lumTTN24zHFd\n",
+       "s/7ZGWecURybqcWYqT32xBNPFMdu3769OFbK1a+bNKn8z0JmO2fqK2aW29/fXxwr1atrmPm9yHy/\n",
+       "efPmpdqRqW1Yq+5mZrmZ9mbqaErS7Nmzi2Ozv58j4QwVAABAQyRUAAAADZFQAQAANERCBQAA0BAJ\n",
+       "FQAAQEMkVAAAAA11ZUL1k5/8pNNNQAMPP/xwp5uAE7R27dpONwENrFu3rtNNwAlauXJlp5uAhkio\n",
+       "cNI98sgjnW4CThB/kHsb/de7SKh6X1cmVAAAAL2EhAoAAKAh1yzpMubK7c6tHAAAICkiRqzF1dGE\n",
+       "CgAA4FTAJT8AAICGSKgAAAAa6rqEyvbVtn9k+2Hb7+t0ezA62/9ke6vt9UPmPcP2UtsP2f627Tmd\n",
+       "bCNGZ7vP9jLbD9jeYPtd7fn0YZezPdX2KttrbW+0/cH2fPquh9ieaHuN7a+3p+m/HtZVCZXtiZJu\n",
+       "kXS1pPMlXWf7eZ1tFY7j02r11VA3SloaEedJ+q/2NLrToKQbIuICSRdLekf7eKMPu1xE7Jd0ZUS8\n",
+       "QNLzJV1p+2Wi73rN9ZI2SjpyMzP918O6KqGS9BJJj0TEpogYlHSHpNd0uE0YRUSskLRz2OxrJX22\n",
+       "/fqzkl47ro1CsYh4PCLWtl8PSHpQ0rNFH/aEiNjbfjlZ0kS1jkX6rkfYXijpGkmfknTkqTH6r4d1\n",
+       "W0L1bEmbh0xvac9D71gQEVvbr7dKWtDJxqCM7cWSLpS0SvRhT7A9wfZatfpoWUQ8IPqul3xU0nsl\n",
+       "PTVkHv3Xw7otoWIMh1NItMbkoE+7nO0Zkr4s6fqI2D30Pfqwe0XEU+1LfgslXW77ymHv03ddyvar\n",
+       "JG2LiDX6+dmpp6H/ek+3JVQ/k9Q3ZLpPrbNU6B1bbZ8pSbbPkrStw+3Bcdg+Ta1k6vaIuLM9mz7s\n",
+       "IRHxpKR/l/Rrou96xaWSrrX9U0lflPTrtm8X/dfTui2hulfSc20vtj1Z0hskfa3DbULO1yS9pf36\n",
+       "LZLuPE4sOsi2Jd0qaWNE3DzkLfqwy9k+48gTYLanSbpK0hrRdz0hIm6KiL6IeI6kN0r6TkS8SfRf\n",
+       "T+u6kdJt/6akm9W6yfLWiPhgh5uEUdj+oqQrJJ2h1vX+v5L0VUn/IulsSZskvT4idnWqjRhd+6mw\n",
+       "5ZLu188vLbxf0mrRh13N9hK1blqe0P7v9oj4iO1niL7rKbavkPTuiLiW/uttXZdQAQAA9Jpuu+QH\n",
+       "AADQc0ioAAAAGiKhAgAAaIiECgAAoCESKgAAgIZIqAAAABoioQLQcba/1/7/ItvXneRl3zTSugDg\n",
+       "ZGIcKgBdw/bL1Rrk8NWJz0yKiEPHeX93RMw8Ge0DgNFwhgpAx9keaL/8kKTLbK+xfb3tCbY/Ynu1\n",
+       "7XW2/6Qd/3LbK2x/VdKG9rw7bd9re4Ptt7XnfUjStPbybh+6Lrd8xPZ62/fbfv2QZd9l+19tP2j7\n",
+       "c+O7NQD0okmdbgAA6Oelb94n6T1HzlC1E6hdEfES21Mkfdf2t9uxF0q6ICL+pz391ojY2a5tt9r2\n",
+       "lyLiRtvviIgLR1jXb0v6VUnPl/RMST+wvbz93gsknS/pMUnfs/3SiOBSIYBRcYYKQDfxsOnfkPRm\n",
+       "22skfV/SMyT9Uvu91UOSKUm63vZaSfdI6pP03DHW9TJJX4iWbZLulvRitRKu1RHxaLTuiVgraXGD\n",
+       "7wTgFwBnqAB0u3dGxNKhM9r3Wu0ZNv0KSRdHxH7byyRNHWO5oWMTuCNnrw4MmXdY/FYCGANnqAB0\n",
+       "k92Sht5A/i1Jb7c9SZJsn2d7+gifmyVpZzuZ+hVJFw95b/DI54dZIekN7fu0ninpckmrdWySBQBj\n",
+       "4l9dALrBkTND6yQdbl+6+7Skj6t1ue2Hti1pm6TfascPfUT5m5L+1PZGST9W67LfEZ+UdL/t+yLi\n",
+       "TUc+FxH/ZvuS9jpD0nsjYpvt5w1btkaYBoCnYdgEAACAhrjkBwAA0BAJFQAAQEMkVAAAAA2RUAEA\n",
+       "ADREQgUAANAQCRUAAEBDJFQAAAANkVABAAA09P8W4xDCBDf4RgAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b91c3d0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "for i in range(8):\n",
+    "    figure(figsize=(2, 2))\n",
+    "    imshow(solver.test_nets[0].blobs['data'].data[i, 0], cmap='gray')\n",
+    "    figure(figsize=(10, 2))\n",
+    "    imshow(output[:50, i].T, interpolation='nearest', cmap='gray')\n",
+    "    xlabel('iteration')\n",
+    "    ylabel('label')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "We started with little idea about any of these digits, and ended up with correct classifications for each. If you've been following along, you'll see the last digit is the most difficult, a slanted \"9\" that's (understandably) most confused with \"4\".\n",
+    "\n",
+    "Note that these are the \"raw\" output scores rather than the softmax-computed probability vectors. The latter, shown below, make it easier to see the confidence of our net (but harder to see the scores for less likely digits)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 18,
+   "metadata": {
+    "collapsed": false,
+    "scrolled": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAFZtJREFUeJztnVtsY8d5x//f4Z2H94skaiXvemUbsAsD9otbwA2ahyCw\n",
+       "USBpXxoYKFD0EvShN7QPddyHJo9pgAZF+1CgiB30hqRFCxfpQ1vbRQukD724sGOnaydZY8XVihJF\n",
+       "iXfykDwipw/kNzuHklbiRRRJzQ8Y8OgsdXYk/vXNN9988w0JIaDRjIJx1R3QLB5aNJqR0aLRjIwW\n",
+       "jWZktGg0I6NFoxmZsUVDRC8R0cdE9CMienWandLMNzROnIaIXAB+AOAzAHYB/A+AV4QQH023e5p5\n",
+       "ZFxL8wKAu0KIbSGEDeDbAD4/vW5p5hn3mN93A8CO8vUDAD+uvoGIdKh5wRFC0Gn3x7U0WhDXmHFF\n",
+       "swtgU/l6E31ro7kGjCuadwE8SUS3iMgL4AsAvjO9bmnmmbF8GiHEMRH9OoB/AeAC8LqeOV0fxppy\n",
+       "X+jB2hFeeKbtCGuuMVo0mpHRotGMjBaNZmS0aDQjo0WjGRktGs3IaNFoRkaLRjMyWjSakdGi0YzM\n",
+       "uElYAAAi2gZQBdAFYAshXphGpzTzzUSiQT8Z69NCiOI0OqNZDKYxPJ26EqpZXiYVjQDwDhG9S0Rf\n",
+       "nEaHNPPPpMPTi0KIPSJKA3ibiD4WQnx3Gh3TzC8TWRohxN7gtQDgTfS3tmiWnEl2WAaJKDy4NgF8\n",
+       "FsCH0+qYZn6ZZHhaBfAmEfFz/loI8dZUeqWZaxYyR9gwDBARiEheq/eY0342IQSEEOj1evJafR9f\n",
+       "D79eR87KEZ7UEZ45hmHA5/PB6/XC6/XC5/PB7/fD7/fL+71ez9FUAfR6PbTbbdk6nY58D7+/2+2i\n",
+       "2+3Ka42ThRSN1+tFKBSSLRKJyBYIBNDtdnF8fOz48NmidLtd1Go11Ot11Go1NBqNE++3bVs2LZqT\n",
+       "LJxoiAg+nw+hUAjxeBzxeBzpdBqpVAqpVAqRSASdTkd+6MfHxw6LY9s2isUiisUifD4f3G63QyS2\n",
+       "bYOIpMA0J1k40RiGAb/fj0gkgmQyidXVVWQyGaytrWFtbQ2xWAydTkc227YdQ5Vt2wiHwwgGg3I4\n",
+       "U9/f6XRgWZZsbvfV/IrUIVX1wYaH26vwuRZONC6XC6ZpIplMYmNjAxsbG9LKsKVhC8Ov6i/7+PgY\n",
+       "wWAQ8XgcKysrqFarDstk2zaazSYsy0Kz2USr1Zr5zzjsk7VaLViWJV/55+I2a+EspGhCoRBSqRQ2\n",
+       "Nzdx69YtxGIxxGIxRKNRmKZ5wpFVZ0m9Xg/xeBzNZhONRgOWZTkExqLhZlnWzH9G9rG41Wo1lMtl\n",
+       "2ZrNJtrttnyvFs05GIbhsDSPP/44QqEQTNNEKBSC3+8/MZ0eNucsDvUvlj8oVTQsqlnDfWMLeHh4\n",
+       "iHw+D4/Hg16vJ8MK3W4XnU5n5v1bSNH4fD6Ew2HpBPOU2+/3w+v1noi78C/5rFe2Sr1eD8fHxw4r\n",
+       "02w2Hc9Sv28aqP1jbNuW4YB2u41IJCIF0263Hf7ZNPtyURZONDxlzufzyGazMAwDgUBANq/XK4cn\n",
+       "FoPL5XI0t9sNt9strw3DgGEYcLlcMlDo9XpBRHC73Sd8DH4/t1E4zbFVA5SGYZwYLrvdLlqtFlqt\n",
+       "lhyWhBBot9taNBeBRVMoFLC9vQ3bthEMBmXj2RCb9263C6/XC4/HI199Pp+cOfG1eo+I4PV64Xa7\n",
+       "4ff7Hf5Ft9t1CO6is6vhAKMq7GFBs8VT36OK5vj4GO12G7VabWTRToOFFE29Xkc+n4dhGGg0GggG\n",
+       "gzBNE6Zpwuv1yl9wq9WCbduO4cvv9ztEFgwGZZBQCOH48PhaFWGn05HRaBbheZzmU6nN7XbD4/HI\n",
+       "V36vao3UCHar1UK9XkexWJxP0RDRGwB+GsCBEOLZwb0EgL8BcBPANoCfE0KUL7GfEhbN4eEhjo+P\n",
+       "Ua1WpWBYNOrsx7ZtBAKBU0XCjWM77GSy1fF4PPB4PNJJBh4KgIezi4pmuPH/xT4NWzW/339iDc22\n",
+       "bTQaDdTrdTQaDZTLZWlV53V4+iaAPwHwF8q9LwF4WwjxtUHh6S8N2qXDUV3LsmAYBrrdLprNJur1\n",
+       "Ovx+P9xuN9rttsOU8/oUi0H1gQKBAMLhMEKhkHzlD499JI6PcKxk+PvPY3g2xw4uW65oNCrjTMlk\n",
+       "Eh6Px7Egy8MVW5l2uy19nbkM7gkhvktEt4Zufw7ATw2u/xzAv2NGoun1euh0Omg2m/Ja9VdcLpdj\n",
+       "Ot3tdh2mn9+r+jfBYFBao+FXv9/vsFzNZhOmaTos13l/7cPRXFXU7XYbmUwGt27dgmEYCIfDICK4\n",
+       "XC4YhiG/7yzRXAXj+jSrQoj84DqPfm7NTGBLw3+xPPvhXzJbH3W9SZ3p8PtU53PYGWZRmKaJQCDg\n",
+       "GBoajYZjaDNN80KiUfujRnhbrRa2trZgGAZCoRAymYx0rvm57DgPi0ZdUpglEzvCQggxy/p6LBrb\n",
+       "tqfyPPYnuHk8HhkoZPHU63XHqng4HHYMaeehCkZdFmDhCCEQj8dx48YNdDod+Hw+EJGcjrNg2u22\n",
+       "jB91Op0rWUIAxhdNnojWhBD7RJQBcDDNTs0S1TFlc99ut6Uvoa5F8QfVbrfhcrkuvBI+PDzxB81T\n",
+       "fHUBlf0Znmp3Oh3UajVUKhUUi0UUCgWUSiXU63V0Op2FEs13APwCgD8YvP7D1Hp0BfR6PQAPBdRq\n",
+       "taRgWq2WdFjb7Ta63S7a7bacOnOw7VEMz5zUBDKObpumCb/fL0WjLm3w2tPR0REKhQKKxSLq9bqM\n",
+       "Ds+ai0y5v4W+05sioh0Avw/gqwD+loh+GYMp92V28rIZjomw49lqteByuRxBNp6xqBZnlP8HAEKh\n",
+       "kHTCI5GIw9K43W45NLGjzKJRLY1lWVK8s+Yis6dXzvinz0y5L1fGcF4Kx2TOYtJZi8fjQTQaRSAQ\n",
+       "QCwWQyQScVgaFm273Ua9Xke1WpWiOTw8RKVSkYHBubQ0mskZToAPh8NIpVLY2NjA5uYmNjc3kUql\n",
+       "YJomDMNAq9VCtVrF0dERjo6OkMvlcHR0hFqtJpdGeIZ4FWjRzAB1uu9yuRyieeKJJ5DJZJBOpxEK\n",
+       "hWAYBjqdDqrVKg4ODrC7u4tcLofDw0MpGrYwV7VTQotmBnCwjqf1nETGokkkEjKBTBVNoVDAzs6O\n",
+       "tDQ8Y+Kptk73XGJYNByRVi3N1taWdIy9Xq8UDa/k7+zsYG9vzyGaq05416KZARyL4SjyysoK4vG4\n",
+       "XGDlPB52gDkJSw0AztN2Gi2aGcD7tJLJJJLJJNLp9Kmi4ak8x4RarZZMbmcLMw87PrVoZgAH8FKp\n",
+       "FDKZDFZWVhCLxRAKhaRo1BgR5+6oa1RXuao9jC7UOAN4eEomk1hfXz8xPPECpbqafZalmQe0pbkE\n",
+       "htNBE4kE0uk01tfXsbm5idXVVcRiMZlw1Ww2UalUUK1WUalUcPfuXezs7KBQKKBer0vRXNUC5Ymf\n",
+       "76o7sIyo6RZ+vx+JRAIrKyvIZDLY3NxEMpmUEWEigmVZODw8xN7eHnK5HO7fv4+dnR0Zm+HF0nkZ\n",
+       "nrRoLgEWDaegDosmHA4jEAhIS8OiyWazuHv3LnK5HPb39x2W5iojwMOMmyP8FQC/AqAweNtrQoh/\n",
+       "vqxOLhqc72uaJqLRqBTN+vo6HnvsMRmP4ZROVTR37txBoVCQuylrtdq5a2Gz5iKO8DcBvDR0TwD4\n",
+       "uhDi+UHTglFg0fCGvmg0KlexOU+n3W7LJPFyuYxKpSJbvV6X24XnYTgaZtwcYUDXDz4Tj8eDQCAg\n",
+       "K1vwKjZbGDXtodVqnRAO58pwWuu8McmU+zeI6HtE9DoRxabWoyXA7XbLXQ6JRAKRSERuOeH0TU57\n",
+       "GBZMtVpFo9FAq9WaW0szrmj+FMDjAJ4DsAfgD6fWoyWARROJRORi5GmiUYcnFs4iiGas2ZMQQuYE\n",
+       "E9E3APzj1Hq0gAwXFPB6vQgGg9Kn4dkSR35brRYqlQry+Tzy+Tz29vZQKpXQbDYdaQ/zKBhgTEsz\n",
+       "SCZnfha6frBjcxuLhi1NJBKRG/lYNOVyWRYx2NvbQ7FYlHu55lkwwHg5wl8G8Gkieg79WdQ9AL96\n",
+       "qb2cc4bL0w6Lhi3NsGj29/cdouGikfNejnbcHOE3LqEvCw0Lx+VynTk8qaLh4SmbzeLo6EhWuLrK\n",
+       "jLyLoiPCU4CXC3hv9+rqKpLJpBSMz+eT23G73a7D+eUAHtfSm3fBAFo0U8Hn8yEajSIajSIWi8mc\n",
+       "X05/APor2JZlodvtOgJ5alxmXmdLw2jRTAHev7SysiJL1HKiVTgcdmyntSzrxBSbZ03ztlxwFlo0\n",
+       "U4BFk06nsbm56RANF0vi2Mtpywbq9lptaZYUtWCA2+1GKpXC2toaNjY2cPPmTayuriIajUpfhh3f\n",
+       "g4MD5PN57O/vy12S87R6fVG0aMbA4/FIx9fv98u0hxs3buCxxx5DPB5HJBKRh3s0m01HXGZ/fx/l\n",
+       "chmWZS2EZRlGi2YMPB6PnFKHw2Gk02mHpeFKWlx6rdlsolQqYX9/H/fv33dYGi2aawAROVaxuVx+\n",
+       "JpORogEe7g8fFk02m0WxWESlUtGiWWY4cMdRX66Yzgd5rK+vI5VKIRwOw+v1yqJLnP5wVlxmUWZL\n",
+       "w2jRXAAWy/BebD6bYWNjA4lEQtbfOz4+hmVZsuxaqVRyzJjmfRX7PLRoLoAqGN5Wm06n5bZarsoZ\n",
+       "DAZl6oNlWbJEiCoa1cospaUhok30S8GuoL84+WdCiD++yjrCVwFbGq7JFwqFpGiefPJJWZuPdxdw\n",
+       "QaRarSYPJFOFw/UC5301+yzOS42wAfy2EOLHAPwEgF8joqfxsI7wUwD+FTMqB3sVGIaBYDAoT315\n",
+       "4okncPPmTWQyGXm+lN/vl7kynJFXLBaRz+exs7ODfD4v82WGK48uIo+0NEKIfQD7g+s6EX0E4Aau\n",
+       "sI7wrFATq8LhMNbW1qTje/v2bayvryMej8ttKABknbxKpYLDw0Pkcjlsb29jb28P5XJZVvJcdC7s\n",
+       "0wySy58H8F+4wjrCs0AVDNf3XVtbw9bWFra2tuTRh4lEAn6/33FgarfblbVldnd3sb29jcPDQxSL\n",
+       "xYWdYg9zIdEQUQjA3wP4LSFEbejs65nWEZ41bGlWV1extbWFZ599Vq5o8y5J3szGvky1WnVYmlqt\n",
+       "Jhcsr4VoiMiDvmD+UgjBpV+Xpo7waagVzrkMPgfy0um0LG/P+5hs25b7sCuVCnK5HA4ODhxBPE59\n",
+       "WAYe6QhT36S8DuCOEOKPlH/iOsLAEtQRHsYwDFmylY/64SrmoVBICobLwVqWhVKphFwuh08++QQP\n",
+       "HjzAwcGBXF+66rMMps15luZFAD8P4AMiem9w7zUsWR1hFXV6zSe2qKIJh8OOk+mAvmiKxSJyuRzu\n",
+       "3buH3d1dHBwcSCvDDvIiz5hUzps9/QfOtkZLU0d4GA7ieb1eh2hYOOqyAnDS0hQKBbkf27KspREL\n",
+       "oyPCgGMzPi8TxONxxGIxJBIJrK+vI5FIwDRNuN3uE4e/7+/vy8YxmVqtJh3kZRIMoEUDwLlM4PV6\n",
+       "ZcUqzpG5ffs2VldXYZomgL5lUbfRZrNZ7O7uIp/PyyqcfKrdMnLtRTO8RBAIBJBIJHDjxg0Zl+Hc\n",
+       "X9M0IYSQwxHvkLx//76cMR0dHcnV7UXZXTAq1140AKRo2IdJJBLY2NjAU089hWeeecZxRibw0PHd\n",
+       "3d1FNpvF/fv3HZaGh6RFS+O8KNdeNLwjkkURi8Uc50mmUqkTJ+PycYjlclmeisK7CjhJfBktDKNF\n",
+       "Mzgdl8uCpNNppFIpuWeJj9PhYwwByMgvlwrhEmc8HC2zYAAtGgAn6/yyaEzTlEE8jhADD0XTaDTk\n",
+       "ZjdOqroOXHvRsKXhqlVra2uO3ZF8niTHZLiq+GmWRotmiVFPzOUttYlEAqurqzLfV82TUVETxvkc\n",
+       "g2WL+J7HtRMNn47Lzq1aspWTxJPJpCxBrznJtRQNb3ZTa8ik02kZzOOFSZ/Pd9XdnUvOW+XeJKJ/\n",
+       "I6L/I6LvE9FvDu5/hYgeENF7gzZcMnau4ZKtoVAI0WhUnoxy2nYUzUnOszScI/z+IBHrf4nobTys\n",
+       "I/z1S+/hlOHhSRVNJBKRp9aGQiGHzwM4T9NlX0Y9R/I6TLNVxs0RBha0jrA6PLFoQqEQAoGAPEZH\n",
+       "nV4DkALhbSfqARfqscvXRTgXLtSo5Aj/5+DWwtYR5pKtLBr1eGMWjTrNVs9g4qSqTqcjE6sWfXfB\n",
+       "qFxINIOh6e/QzxGuY4HrCKuWxjRNRCKRUy0Ni4aHJj4nWxXNdZtqM6PkCP8V5wgveh1h9SBSn88H\n",
+       "j8cDt9stxcIi6PV6cneB2tRzsnk1+6oOVr8KzttheWqOMBFlhBB7gy+Xqo4wO7sshE6n4yhGxBvg\n",
+       "+DymZrO5dDnA5zFOjvDvAXhlmesId7tdeSRgs9lEoVBANpvFvXv3cO/ePRwdHcmm1svTlgaPzBH+\n",
+       "p8vpznzAorEsC/V6HQcHB8hms/joo49w584deRgpny953abd1y4iDDhTG0qlkoz+ulwu2LaNZrMp\n",
+       "W61WQzabxc7ODnZ3d7G3t+eYfl+XRUqVaycarudbKpXkRrdGo4HDw0Ps7u4iHo87zmKyLAsPHjzA\n",
+       "gwcPUC6Xr+2MSYUu6wef1626XMlKTd9cW1uT602maTpWrzudDkqlEkqlEorFIsrlsiMus8x+jBDi\n",
+       "1ADutRMNT7d5w5tt244NcG632yEINbDHDXhY73eZrY0WjWZkzhLNJMcRaq4pWjSakbm04UmzvGhL\n",
+       "oxkZLRrNyFyqaIjoJSL6mIh+RESvTuF520T0wSDF9L/H+P43iChPRB8q9xJE9DYR/ZCI3holN+iM\n",
+       "542dCvuI9Nqx+nhp6bq8ZjLtBsAF4C6AWwA8AN4H8PSEz7wHIDHB938K/USyD5V7XwPwu4PrVwF8\n",
+       "dcLnfRnA74zZvzUAzw2uQwB+AODpcfv4iOeN3UchxKVamhcA3BVCbAshbADfBvD5KTx37DRTIcR3\n",
+       "AZSGbn8O/bK2GLz+zITPA8bsoxBiXwjx/uC6DkAtwTtyHx/xvLH7CFzu8HQDwI7y9QM87PC4CADv\n",
+       "ENG7RPTFCZ/FXEZ524lTYaddgnea6bqXKZrLmMu/KIR4HsDL6FdP/9Q0Hy76dnzSfk+cCjtcgnfS\n",
+       "Pk47XfcyRbMLYFP5ehN9azM2YpAtKIQoAHgT/SFwUvJEtAb0MxIxYXlbIcSBGADgG6P28VEleMfp\n",
+       "41npupP08TJF8y6AJ4noFhF5AXwB/VKyY0FEQSIKD65NAJ/FdNJMp1redvChMiOlwk67BO+j0nXH\n",
+       "7SOAy5s9DTz2l9H32O8CeG3CZz2O/gzsfQDfH+d5AL4FIAegg76/9YsAEgDeAfBDAG8BiE3wvF9C\n",
+       "/9SaDwB8b/Dhro7wvJ8E0Bv8jO8N2kvj9vGM5708SR+FEHoZQTM6OiKsGRktGs3IaNFoRkaLRjMy\n",
+       "WjSakdGi0YyMFo1mZLRoNCPz/yU19i71FpCwAAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b65d950>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAD0ZJREFUeJzt3XvQXVdZx/HvL2+ubUoh0oJA2lAFJR2QIjDlTgGdykDx\n",
+       "gkBFQHRwHEAqAgN0Rv/SAYdxqAyDM0jlUm4qYIFRLlUqFJCGQtNbys2h2oI0SkNoapO8SR7/OCfN\n",
+       "2/Am2Ts76z3npN/PzDs5e5/n7PXkrJPkyVr7rJWqQpIkSUdv2aQTkCRJmnUWVJIkSQNZUEmSJA1k\n",
+       "QSVJkjSQBZUkSdJAFlSSJEkDLZ9k40lcs0GSJM2Mqspi55sWVEnOBS4C5oB3VdVfHByzfv36n3jd\n",
+       "9u3bOfnkk4+63b5ra+3du7dJbJ889u3b1zm2j2TRfj9m8Yu54447OPHEE4/69X3e4927d/e69vz8\n",
+       "fOfYPXv2dI7t09fTvPZbVQ3+DEzz70+SWmk25ZdkDng7cC6wETg/ycNatSdJkjQpLe+heizwnaq6\n",
+       "qarmgQ8Dz2nYniRJ0kS0LKgeCNy84PiW8bkjWrVqVZOEtDRWrFgx6RQkSVpSLQuqo76RYvXq1ccy\n",
+       "Dy2xlStXTjoFHaVjcQ+dJN0Ttbwp/XvAwjvO1zMapbqb7du33/V41apVFlOSJGnmtCyorgIekmQD\n",
+       "8H3g+cD5BwcN+TafJEnSNGhWUFXVniSvBD7DaNmEi6vqxlbtSZIkTUrTdaiq6lPAp1q2IUmSNGkT\n",
+       "XSkd+i2e2FXfhQX7LKrZKrbVIqB9Fwzts6jmNLxvfX9/rRaddDFLSbpncy8/SZKkgSyoJEmSBrKg\n",
+       "kiRJGsiCSpIkaSALKkmSpIEsqCRJkgayoJIkSRrIgkqSJGkgCypJkqSBLKgkSZIGsqCSJEkaaOJ7\n",
+       "+a1atWrSKfTSZ8+2VrF99tvru8dcn2v3ie2zZ2Of2Pn5+c6x0G6fwCS98mih5X6C7lUoSYfXdIQq\n",
+       "yfoklye5Icn1SV7Vsj1JkqRJaD1CNQ+8uqo2J1kLfC3JZVV1Y+N2JUmSlkzTEaqq+kFVbR4/3gHc\n",
+       "CDygZZuSJElLbcluSk+yATgLuHKp2pQkSVoKS1JQjaf7PgJcMB6pkiRJOm40/5ZfkhXAR4H3V9Wl\n",
+       "Bz+/bdu2ux6vXr2aNWvWtE5JkiTpmGpaUGX0XfKLgS1VddFiMfe5z31apiBJktRc6ym/JwC/DZyT\n",
+       "5Orxz7mN25QkSVpSTUeoquqLuBq7JEk6zlnsSJIkDTTxrWe6bjPSZ2uPltuATMMWHC3fiz7xy5Z1\n",
+       "r8dXrFjRObbPljZ9t57pE99n65k+5ubmOsf2eY/75jsN2wy1eo8laak5QiVJkjSQBZUkSdJAFlSS\n",
+       "JEkDWVBJkiQNZEElSZI0kAWVJEnSQBZUkiRJA1lQSZIkDWRBJUmSNJAFlSRJ0kAT33pm5cqVneL6\n",
+       "bH3RdzuLVtvJtNraYxa362i1HVDLrXX6vM99YvtszdLns9nycz8NWy5J0jQ7ZEGV5DeAAhb7F6iq\n",
+       "6mNdGkgyB1wF3FJVzz6qLCVJkqbY4Uaons2ooDqUTgUVcAGwBTipa1KSJEmz5JAFVVX9ztCLJ3kQ\n",
+       "8Ezgz4E/Hno9SZKkaXTEm9KT3D/JxUk+PT7emOT3Ol7/rcDrgNm76UeSJKmjLt/yew/wWeAB4+Nv\n",
+       "A68+0ouSPAvYWlVXs/h9WJIkSceFLgXVfavq74C9AFU1D3T5mtLjgfOSfBf4EPC0JO87OOi22267\n",
+       "6+fOO+/skbokSdJ06LJswo4kP7X/IMnZwPYjvaiqLgQuHL/mKcBrq+rFB8etW7eue7aSJElTqEtB\n",
+       "9Rrgk8AZSb4MnAI89yjaciEbSZJ0XEqXBfuSLAd+jtG9UN8cT/sNbzypM844o1OsC3se4MKeB/RZ\n",
+       "JBNgfr77R7dPbKs+cWFPSZouVbXoP2hHHKFKsgZ4OfBERqNMVyT566raeWxTlCRJmk1dpvzeB/wY\n",
+       "eBujEarfAi4BfrNhXpIkSTOjS0F1ZlVtXHD8uSRbjlUCt99+e6e4PtMZfacnWl271TRJq+mzvlq9\n",
+       "F32mP/tO+fW59jT09bTo85k73t8LSVpMl2UTvp7kcfsPxt/y+1q7lCRJkmbL4TZHvm5BzJeS3Mzo\n",
+       "HqrTgG8uQW6SJEkz4UibI0uSJOkIDrc58k0Lj5OcCqxunZAkSdKs6bI58nlJvg18F/g8cBPwqcZ5\n",
+       "SZIkzYwuN6X/GfA44FtV9WDg6cCVTbOSJEmaIV0Kqvmq+l9gWZK5qroceHTjvCRJkmZGl3WotiU5\n",
+       "CbgC+ECSrcCOtmlJkiTNjiPu5ZdkLXAno9GsFwL3Aj5QVT8c3HhSp5xySqdYF/Y8wIU9D3Bhz+nj\n",
+       "eyHpeHbUe/lV1f7RqL3Ae45hTpIkSceFwy3suYPRQp6Lqaq617FIYNeuXZ3i+ozKTMsITh+tRnBa\n",
+       "jta1GtlrORrpqJMkqYXDrUO1dujFk9wbeBdwJqPi7Her6itDrytJkjRNutyUPsRfAf9cVc9Nshw4\n",
+       "sXF7kiRJS65ZQZXkZOBJVfUSgKraA2xv1Z4kSdKkdFmH6mg9GPifJO9O8vUkf5PkhIbtSZIkTUTL\n",
+       "gmo58CjgHVX1KOAO4A0N25MkSZqIlgXVLcAtVfXV8fFHGBVYd7Nz5867fvquKSRJkjQNmt1DVVU/\n",
+       "SHJzkodW1beAZwA3HBy3evXqVilIkiQtidbf8vtDRtvVrAT+A3hp4/YkSZKWXNOCqqquAR7Tsg1J\n",
+       "kqRJa3kPlSRJ0j1C6ym/IyewvFsKc3Nzx/ya+61YsaJz7MqVKyce2+e+szVr1nSO7XvtE07ovgrG\n",
+       "2rXdF94/7bTTOsdu3LixcyzA6aef3jl23bp1nWP7vG+7d+/uHDs/P985dtmyfv8/6vO5P+mkk5rE\n",
+       "rlq1qnPs8b79lKTpd7i/WxyhkiRJGsiCSpIkaSALKkmSpIEsqCRJkgayoJIkSRrIgkqSJGkgCypJ\n",
+       "kqSBLKgkSZIGsqCSJEkayIJKkiRpoJnZeqbPVhJV1SuHPtt77N27t3Psrl27Osf22Takz3vRJ9+W\n",
+       "+uSxc+fOJrEAe/bs6RzbJ+c+n7k+/dd3O5k+9u3b1yS2758/SToeNB2hSvLGJDckuS7JB5N037hL\n",
+       "kiRpRjQrqJJsAF4GPKqqHg7MAS9o1Z4kSdKktJzy+zEwD5yQZC9wAvC9hu1JkiRNRLMRqqq6DfhL\n",
+       "4L+A7wM/qqp/adWeJEnSpLSc8vsZ4I+ADcADgLVJXtiqPUmSpElpeVP6o4EvV9UPq2oP8DHg8QcH\n",
+       "7dix466f3bt3N0xHkiSpjZb3UH0D+JMka4CdwDOATQcHrV27tmEKkiRJ7bW8h+oa4H3AVcC149Pv\n",
+       "bNWeJEnSpGSSi/AlqVNPPbVrbJ/r9s2jc2yfhRZbxbqw59HFggt7LuTCnpLUX1Ut+pe4W89IkiQN\n",
+       "ZEElSZI00Mzs5ddSq+nEPrF9plRa7mvYauqqz3X7THPNzc11ju2bR6u+noZp477xfb6B2ye21bSq\n",
+       "JC01R6gkSZIGsqCSJEkayIJKkiRpIAsqSZKkgSyoJEmSBrKgkiRJGmgqC6pdu3ZNOgUN0Hf1ck2P\n",
+       "+fn5SacgSTPJgkrHnP03u/pszSNJOmAqCypJkqRZYkElSZI0UCa5nUMS95KQJEkzo6oW3WtsogWV\n",
+       "JEnS8cApP0mSpIEsqCRJkgaauoIqyblJvpHk20leP+l8dGhJ/jbJrUmuW3BuXZLLknwryWeT3HuS\n",
+       "OerQkqxPcnmSG5Jcn+RV4/P24ZRLsjrJlUk2J9mS5E3j8/bdDEkyl+TqJJ8cH9t/M2yqCqokc8Db\n",
+       "gXOBjcD5SR422ax0GO9m1FcLvQG4rKoeCvzr+FjTaR54dVWdCZwNvGL8580+nHJVtRM4p6oeCTwC\n",
+       "OCfJE7HvZs0FwBZg/83M9t8Mm6qCCngs8J2quqmq5oEPA8+ZcE46hKq6Ath20OnzgPeOH78X+NUl\n",
+       "TUqdVdUPqmrz+PEO4EbggdiHM6Gq/m/8cCUwx+jPon03I5I8CHgm8C5g/7fG7L8ZNm0F1QOBmxcc\n",
+       "3zI+p9lxv6q6dfz4VuB+k0xG3STZAJwFXIl9OBOSLEuymVEfXV5VN2DfzZK3Aq8D9i04Z//NsGkr\n",
+       "qFzD4ThSozU57NMpl2Qt8FHggqq6feFz9uH0qqp94ym/BwFPTnLOQc/bd1MqybOArVV1NQdGp+7G\n",
+       "/ps901ZQfQ9Yv+B4PaNRKs2OW5PcHyDJTwNbJ5yPDiPJCkbF1CVVden4tH04Q6pqO/BPwC9i382K\n",
+       "xwPnJfku8CHgaUkuwf6badNWUF0FPCTJhiQrgecDn5hwTurnE8BLxo9fAlx6mFhNUJIAFwNbquqi\n",
+       "BU/Zh1MuyX33fwMsyRrgl4Crse9mQlVdWFXrq+rBwAuAz1XVi7D/ZtrUrZSe5FeAixjdZHlxVb1p\n",
+       "winpEJJ8CHgKcF9G8/1/Cnwc+HvgNOAm4HlV9aNJ5ahDG38r7AvAtRyYWngjsAn7cKoleTijm5aX\n",
+       "jX8uqaq3JFmHfTdTkjwFeE1VnWf/zbapK6gkSZJmzbRN+UmSJM0cCypJkqSBLKgkSZIGsqCSJEka\n",
+       "yIJKkiRpIAsqSZKkgSyoJE1cki+Nfz09yfnH+NoXLtaWJB1LrkMlaWokeSqjRQ6f3eM1y6tqz2Ge\n",
+       "v72qTjoW+UnSoThCJWnikuwYP3wz8KQkVye5IMmyJG9JsinJNUl+fxz/1CRXJPk4cP343KVJrkpy\n",
+       "fZKXjc+9GVgzvt4lC9vKyFuSXJfk2iTPW3Dtf0vyD0luTPL+pX03JM2i5ZNOQJI4sPXN64HX7h+h\n",
+       "GhdQP6qqxyZZBXwxyWfHsWcBZ1bVf46PX1pV28Z7221K8pGqekOSV1TVWYu09evALwCPAE4Bvprk\n",
+       "C+PnHglsBP4b+FKSJ1SVU4WSDskRKknTJAcd/zLw4iRXA18B1gE/O35u04JiCuCCJJuBfwfWAw85\n",
+       "QltPBD5YI1uBzwOPYVRwbaqq79fonojNwIYBvydJ9wCOUEmadq+sqssWnhjfa3XHQcdPB86uqp1J\n",
+       "LgdWH+G6xU8WcPtHr3YtOLcX/66UdASOUEmaJrcDC28g/wzw8iTLAZI8NMkJi7zuXsC2cTH188DZ\n",
+       "C56b3//6g1wBPH98n9YpwJOBTfxkkSVJR+T/uiRNg/0jQ9cAe8dTd+8G3sZouu3rSQJsBX5tHL/w\n",
+       "K8qfBv4gyRbgm4ym/fZ7J3Btkq9V1Yv2v66q/jHJ48ZtFvC6qtqa5GEHXZtFjiXpblw2QZIkaSCn\n",
+       "/CRJkgayoJIkSRrIgkqSJGkgCypJkqSBLKgkSZIGsqCSJEkayIJKkiRpIAsqSZKkgf4fuHwpG022\n",
+       "rncAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b4bf3d0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAGPlJREFUeJztXUlsbNlZ/v6a53myy37Pft1B6kiRkk1YJFGyiKKOkJKw\n",
+       "IYqEFAWEWDAJFjRhQWAXIhEhWCAg6SgMSkCgoICESAeBaBYMjbrTHUgP7nbZZbuq7JrnW9NhUfWf\n",
+       "Pve6bNfk91zl+0lHVa7yuz717lf//59/JCEETJiYB5YnvQET6weTNCbmhkkaE3PDJI2JuWGSxsTc\n",
+       "MEljYm4sTBoiepaIXieit4jouVVuysTdBi3ipyEiK4A3AHwcwCmA/wbwOSHEj1a7PRN3EYtKmg8C\n",
+       "OBBCZIQQfQDfBvDp1W3LxF2GbcF/lwaQVX4+AfDj6i8QkelqXnMIIWja64tKGpMQ9xiLkuYUwK7y\n",
+       "8y7G0sbEPcCipHkJwHuIaI+IHAA+C+C7q9uWibuMhWwaIcSAiH4RwD8BsAL4unlyuj9Y6Mg904VN\n",
+       "Q3jtsWpD2MQ9hkkaE3PDJI2JubGoc2+tYbVa5bJYLCDSq24ikq/zc3UZMRwOMRqN5ONoNIIQAkII\n",
+       "+bP6/rrj3pHGYrHA7XbD7XbD4/HA7XZLcgBjwthsNt1yOp1wuVxwOp1wOp2XrtnpdHSr3+/rlqZp\n",
+       "6Ha7cq17Xva9Iw0RweVyIRQKIRwOIxgM6iSLxWKR5HA4HHC5XPD5fPD5fPD7/fB6vZeuWavVUK1W\n",
+       "UavVUKvV0Ol00O125WO9Xkej0cBoNIKmaSZp1g1EBLfbjVAohGQyiXg8riOM1WqVEsjj8cDj8SAc\n",
+       "DiMSiSAcDiMcDl+65vn5Oc7Pz3FxcYHz83M0m03dstlsEEKg2+0+gU+8emw8aVjdWK1W2Gw2uFwu\n",
+       "RKNRJJNJpNNpbG1t6ewVJo3H44HX64XX60U0GkUkEkEsFkMkErn0N1hiBQIBeL1e1Ot1KV3q9Tos\n",
+       "FgsGgwFarRasVisASGmzjlJn40ljsVjg8/kQCAQQCAQQCoWQTqexvb2N7e1tJBIJnaRh9eRyueTy\n",
+       "+/3weDxSYhjhcDjg9/sxGAxARPD7/QgEAlLSEBF6vR6azSaq1Sp6vZ7OeF433AvS+P1+pFIpuba2\n",
+       "tpBMJrG1tYVYLCYNYSaOzWaD3W6Xy+Vywe12w263T/0bDocDPp8PFosFLpcLrVZLt1jKVCoVuFwu\n",
+       "AEC/38dgMJAnrXXCxpPGarXC5/MhkUhgf38fe3t7SCaTSCQSSCQSiEajAKA7PbHEISLd8fw6ScOE\n",
+       "CQQCaLfb6HQ6usdKpYJCoQCXy4XhcCiP4+uIpUhDRBkAdQBDAH0hxAdXsallwDea7Rifz4d4PI7t\n",
+       "7W08fPgQTz31FKLRKKLRKGKxGEKh0KVrsI+FCcLSoN/vo9fr6QjGy263S/K43W7dEbtUKiGfzyMU\n",
+       "CsHn80EIASLCaDRCr9d7rP8/q8CykkYA+JgQoryKzawCrI7YholEInjqqafw4MEDqY4CgYC0UaZh\n",
+       "OBzKNRgMJFl6vR76/b40mHk5HA65nE4nLBYL7Ha7JIfX60UoFEIkEkEikYDdbketVsNwOESn07mX\n",
+       "6mlqJPRJwWq1wu/3I5lMIpVKYXt7G7u7uzrSsHNvGmlYbahEabfbuqXaO3a7XZ6y2Ihmuwh41xAP\n",
+       "BoPy1AZAEsbojV4HrELSfJ+IhgD+WAjxpyvY01Jg0qRSKTx69Aj7+/vS+E2lUojFYroj+DQMh0Pp\n",
+       "ye10OqjVaqjX66jVamg0GtL5xw5APjU5HA65B7aN7Ha7jjSJRAL9fl86/e4jaT4khMgRURzAC0T0\n",
+       "uhDixVVsbB4YbQu/3494PI7d3V08evRI2i/RaBSBQEBnr7AKUtXRNCO2Wq3KR/U47nK5MBgMpJTh\n",
+       "UxT7h9iZyMf+cDiMer1+paRbByy1ayFEbvJ4QUTfwbi05bGSRo0V2e126YsJh8PS4A0EAvImCSEu\n",
+       "2SvsjOPFBiyHARqNhlzNZlNnwzgcDjx48EAayHw0Z0nGxFGP8izppgU/1wELk4aIPACsQogGEXkB\n",
+       "fALA76xsZ7PvQ3p6nU4n/H4/gsGgjjRGG2Y4HEp7pdPpoFAoIJfLIZfLoVAoQNM0uZg86lJvvt1u\n",
+       "R7/fBxHJkIPL5YLD4dBJGzaeVcKogdJ1wjKSJgngO5MPbQPwl0KI761kV3OAVZLT6YTX651KGr7B\n",
+       "qqRhwjSbTeTzebzzzjs4ODjA4eGhJJR6YlIf1bQKfnS73QiHw0ilUlL18d9T/T2qpFlHwgBLkEYI\n",
+       "cQjg/Svcy0LgG+JwOKTtwBFpPnarGA6H6Ha7aDabaDQaqFarODs7QyaTwVtvvYU33nhDqi1+ZHXG\n",
+       "bn9VYlitVmxtbaFWq6HVaqHX60m1pTrvVN+PalOtI9bTEjNAtRccDoe0KfibrN70Xq+Hi4sLGZEu\n",
+       "FArIZDI4OztDpVKBpmnSMOZHNakKAFwulwxqejweeSoLBoPwer1SPXFwko3rRqOBcrmMer2OdruN\n",
+       "fr+/luRZe9Ko9gKTRrUbWB2xaul0Ori4uMDx8TGOjo6QzWZRKBRwfn6OarWKbrd7KdNOlQ5EBKfT\n",
+       "KR2HrJKi0SiCwSA8Hg+cTqeOuP1+H51OB/V6HaVSCbVazSTNk4YxyGi0GdiGYbV0cXGBbDaLN998\n",
+       "E2+//bYujYEz64xqRL25TqcTwWAQ8XhcBkFVSWM8HV0ladYxhABsCGmMMBq67MntdDqoVqvI5/PI\n",
+       "ZrM4PDzEwcHBJaP3OrCkYV9QOp1GIpFAJBJBIBCAy+XSBTx5P5qmodVqoV6vo9VqSTVoSponACEE\n",
+       "er2eTHCyWq3wer2wWq3QNA35fF4XPGw0Gjg8PEQul0OtVpNE4cjzLHA4HDKelEgkEA6H4fV6ZcBy\n",
+       "XU9Fs2LtScPGbbvdlnaIxWJBr9dDtVpFOByWUoTDAoVCAYVCQZKGDeV5SRMOhyVpfD4fnE6nzju9\n",
+       "qVh70rCk4aTtTqcDTdNQq9WQz+fh9XplVcBgMECv19N5eHu9ngxSzkoaDlKGQiHE43GEQiGdpGFs\n",
+       "KnE2gjRMCmBsFHO8iE8xxlQH47oJarWC1WqF2+2G3+9HKBRCLBaTUW72Aqt5OMC7dVHGta5Ye9IY\n",
+       "oaY2ENGlG6b6bGaRLFzy4na7pX+Gj9gc03I6nfLEBOASOflkxks1hNcRG0caYHzT+BtvtVqvrHic\n",
+       "VR1xGmcoFEIoFLrkl2GHIqsm9YivaZo80vMyT093DEwM9uayXWH0uczqymdJEwwGkUgkZHKXMXpu\n",
+       "s9l0pNE0De12Wx6z1cU+mo2VNET0PICfAHAuhHjf5LUIgL8C8BBABsBPCSGqt7jPubCszaDmALMN\n",
+       "Ew6HkUwm8eDBA0kav98vy3rZ5mEbi31EXHXJ0qbZbELTtLlU5F3DLAkd3wDwrOG13wDwghDixwD8\n",
+       "8+TnjQBHzT0ejy5Fc2trCw8ePMDe3h5SqRTC4bAkDAAp3fr9PprNJkqlEs7OznB4eIjT01OUSiVZ\n",
+       "zmKMZa0bbiTNJBOvYnj5UwC+OXn+TQCfWfG+niiYNIFAQKZobm9vY2dnB3t7e9ja2kIoFILb7ZYq\n",
+       "iY1sLopTSXN2diZJY+wssY5Y1KZJCiEKk+cFjHNrNgKqpOH0zEQiga2tLezu7uLhw4fyJMUhAyYB\n",
+       "n5aMpCkUCiiXyzrSrCthgBUYwkIIsWn99YzqKZFIIJVKIZ1OY3d3V1fGy3aMGklvNBoolUrI5XI4\n",
+       "OjpCpVKRke1N6FGzKGkKRJQSQuSJaAvA+So39aRhs9l0SV2qL2Zamma/39eV4RaLRZRKJZmI3mw2\n",
+       "0e121/aIbcSimc3fBfD5yfPPA/i71WznyYNPTE6nEx6PRxb/M2mmxZY49lWtVmXLkXK5jHK5rCPN\n",
+       "TRH0dcEsR+5vAfgogBgRZQH8FoAvA/hrIvpZTI7ct7nJxw3ufjWNNNNiSyxpqtUqLi4udJJGDYpu\n",
+       "iqS5kTRCiM9d8dbHV7yXJwZVehhVE/eccblcsNvtl5yFQghomoZmsymL/Jk03B1r3W0YIzbOIzwv\n",
+       "LBYLvF4vfD4fvF4vAoEAdnZ2sLOzg3Q6jZ2dHcTjcQQCAdlvj9MsOOXi7OwMJycnyGazOD4+Ri6X\n",
+       "Q7VaRafTecKf7nZw70nDTYg4RMDHa7WfTSgUQjAY1JGm1WrJOBKT5ujoCJlMRqqmdSzunwX3njRc\n",
+       "oJ9MJmX/mng8LlcsFpNRbqfTKfN3WB1VKhXkcjmcnJzg+PgYmUwGzWYTrVZrY3rsGXHvSUNEOtI8\n",
+       "88wzusaMoVDoUs4v2zDlcllWZ56eniKbzSKTyehqw01JsyEwtn/lTp5cnakavmqYABgbwK1WC+Vy\n",
+       "WSao5/P5qR7fTSQMcA9Jo9ZJqZWZbAxzr2DVL2PMw2Epk8/ncXR0hEKhgGq1qvP4btqJScW9Iw0A\n",
+       "XXEdd/JU68D5NZY0akkux5bK5bIME1SrVUka1RdjSpoNglpcZ1RPgUBAV3jHpBFCyMR0VT0dHx+j\n",
+       "3W7L7hLr2OJ1Xtw70vBpibuPx2Ix2fHT7/frsvBYNTFReLHjrtFooNPpyNqpTVZJKu4tabhjOefI\n",
+       "JBIJ+P1+KV3U05LaOLpSqch67GazKQdocHLVfcC9JA03cnz06BGefvppJJNJnaQxpj2wpKlUKjg/\n",
+       "P5eShgOR69x9fBHcGOUmoueJqEBErymv/TYRnRDRy5NlTAe9s2APMJPmve99L/b39yVpuIHAdX6Z\n",
+       "YrEoScPFeaZ60uMbAP4QwJ8prwkAXxVCfPVWdrVCcCYeG7dsz3BogPNljP1kePV6PZRKJRQKBZye\n",
+       "niKTyciS3k2Y3bQIZolyv0hEe1PeWouaUzV9k0f2qFNTfD6frgkRVxNwv712u41isYhcLic9vqye\n",
+       "NjVMcBOWaS/5S0T0AyL6OhFd7hV/R8D9fbn2OhqNSknj9/ulpOEmRMC7/WTY+C0Wi8jn8zIomc/n\n",
+       "TdIsgD8CsI9xz70cgN9b2Y5WDCaNmvMbiUQQCoWkpFHVE/tjuAESJ1YxaTKZjEmaRf6REOJcTADg\n",
+       "axj3D74zUPv28lidSCSCVCqly4/hagJj+iaXorTbbdkUiUtsuZfNulcULIOFSDNJJmf8JIDXrvrd\n",
+       "x41psSWfz4doNCrLUBKJhAwXTCMNG8CsorgJtUqY+3JSmoZFcoS/BOBjRPR+jE9RhwB+/lZ3OSfU\n",
+       "vr089S0ajcrhGolEQkoalSw8TodrsdX2a6qU2fTY0k1YNEf4+VvYy8rAHl1VPUWjUaRSKezu7sr+\n",
+       "wty5ygi1Z1+r1dL5Yu6LA+86bJxHmCe8caOhSCSCZDIph5aqbVtVw5frsDVNk4bv6ekpjo+PpV9G\n",
+       "07Qn/fHuBDaWNOpQLm7ZypPdeOQOH7FZqrA6KhQKODs7QzabxdHRkfQA39fTkhEbRxqehMJzt9Pp\n",
+       "tI40fr9f16aenXncR6ZSqeikjJrza0qaMTaONDzcgkmzs7Mj1RN34VT7z6jH61qthmKxeEnSzNOf\n",
+       "7z5g40mTTqd1dUvGGUtCCLTbbZTLZZyenuLk5ASnp6coFotoNBrQNG0jmiuuEhtLGm53tr29LSUM\n",
+       "jwtUwaQpFos4OTnBwcEB8vm8rp/MOo9Dvg1sHGm4R54qaTidc9owdq4uKJVKyGazePvtt2WyFZOG\n",
+       "f++++mWM2DjSTFNP6jAvI0aj0SVJwy3xWTWZZNFj40hjHP13VU8ZFWq7WFZH6nXmhbGLqPF1da/G\n",
+       "x2l7NL7PSe88jGzezuiDwUA3cnFeL/fGkWYRGIm27AQ4lk6q8cxE5Ef+u4C+eG+aNFTfs1gssnKC\n",
+       "Uztmmbqrfg4+KdZqNVSrVWiaduWYomkwSQPoCGO323U3fBHSGFukTbsZqlRhMnC87Kr9sYoNBAKI\n",
+       "xWJybPQ0A98IdQ+1Wg2FQkEeAtRU1aUlDRHtYpzmmcA4OPknQog/uOt9hOcB37yrJM0isFgs0qdz\n",
+       "07dXJcxVA1HV92w2GwKBgJw1lU6nZTeL66Duo1gsAgA6nQ7K5bJsiTJrXO0mSdMH8KtCiFeIyAfg\n",
+       "f4joBQBfwLiP8FeI6DmM+wivZS9hbgCQSCSwv78vh3NwWcoiDj11PLPq52EJpKoaridXl1FFqZLG\n",
+       "ZrPJ8AhXUcwraZxOp2zD3263YbfbdTOxbvrM15JGCJEHkJ88bxLRjwCkMe4j/NHJr30TwL9iTUlj\n",
+       "sVgQDAaxs7MDIQT8fr9uassiUe1WqyVDD9xwWh2mygRgEng8HtlUyefzTSWNurh8mGc1zGLTAO8S\n",
+       "x263S2L3ej3Y7XbZVZ2/LNdhZptmklz+AQD/iQ3qI8ykEULA5/Nhe3tbZ88s4tRjA5MfeQ4CSzA+\n",
+       "1bENpQ7rCIVCl+waVYXyOETumaM2wJ4FQgg4HA6dRGVJxnXqN2Em0kxU098C+BUhREPVueveR5gN\n",
+       "S6/Xi1QqdamnzCJ2TalUQrFYlIu/0dxn2DgOOhKJyAZK8Xh8KmmMezb2Mr4J6udg9aSWE3N7/llc\n",
+       "DLNk7tkxJsyfCyG49eud7SPMUWt1YLs6RXeaKL/q9WX2AEBmDvLNMUoa3lcwGJRNlILBoLxxsxLW\n",
+       "OABNtZ+mqddKpYJGo6HLSLy4uEChUJgpkn/T6YkAfB3A/wkhfl95i/sI/y7uWB9h/ta0222Z6sBi\n",
+       "nMfs3Da4zkoIAZvNdq1Nw4NauSeO2j1UfWRMIxJ/SbhzhSpFeFSjilqthlwuh/PzcznymcdGd7vd\n",
+       "G0c/3/Q/+CEAPw3gVSJ6efLaF3GH+wirtde1Wg3lchmBQECWsjwO8IxLm80Gt9t9abqdUb04HA6Z\n",
+       "GMYOQP4ss6jKwWCAdrsth5DxOGleRmnTaDSk6lSHy08j2DTcdHr6d1xdsXAn+wgzadjrWalUdLVP\n",
+       "jwNceOd2uy85zVSPMHDZyGXSGH1F15GH50vV63UpOZrNplzG0xDP+ORmTOrgsqVJs44YjUZyPkE2\n",
+       "m4XNZpP/KSy+543VGI+8qqNtmrrjagiGmsTV7/flqeyq0xkP5+DfV22Uab/PRX28+LjPk+2MpNE0\n",
+       "TQ4s47a23DLlXpJmOByi0Wggn88DGH+rUqmUPALHYjEAl4OF10FtIMB+lXlsJJZ86k1k+2aaocrk\n",
+       "ZjWjHtmntcrvdru6pktcPcHORePfYMnEf4ftISb0Tdg40oxGIzQaDRCRTgyzyK7X67q29rOQhj21\n",
+       "LpdLGoyj0UjaLDeBc5B5L3xiYUPVCN4nLyYBd0o3SgOOWvP7KsGmNVtiSaZ6vudpzLRxpBkOh1Id\n",
+       "XVxcwO12y65VXGY7D2mISEoWj8cDj8eD0WgkbZZZwJ20yuUyzs/PL0kC47ebc5VLpRLK5bKs8rzK\n",
+       "sFVTO4z20LQY2lW/M6szc+NIo9YxAeMbxmOQiQiaps1NGvXI7na7Ua1WUS6XUSwWEQ6Hb7wGO/t4\n",
+       "QguThSWD8aayq4CXao91u90nnnq6caQxQgiBTqeDarUKItIZwrOqJ0524mMx57Hw400wDnNnHwqr\n",
+       "BSNYIqqlMzz+5y5kEW48aUajkSRKr9dDrVYDMJsBzL9nTE1Q7ZtZ0hLUCDL36OM1LWeHKz15qW3z\n",
+       "7wLotph7V+JRaq7MoumbRnVmPILfBGNqxFXOO4aaBDbNTnlcEEJM/WZtPGlMLI6rSLNM+zQT9xQm\n",
+       "aUzMjWtJQ0S7RPQvRPS/RPRDIvrlyetr20fYxPK41qYhohSAlJojDOAzGEe1G+KaPsKmTbP+uMqm\n",
+       "WTRHGFiTPsImVo+ZbRolR/g/Ji+tRR9hE6vHTKSZqKa/wThHuIk16iNsYvW40U8zyRH+BwD/aEj5\n",
+       "5Pf3APy9EOJ9htdNm2bNsZCf5qocYbrDfYRN3D5uOj19GMC/AXgV47JcAPhNAJ/DWDXJPsJKHRT/\n",
+       "W1PSrDnMMIKJuWGGEUysDCZpTMwNkzQm5oZJGhNzwySNiblhksbE3DBJY2Ju3JqfxsTmwpQ0JuaG\n",
+       "SRoTc+NWSUNEzxLR60T01qQL6LLXyxDRq5MU0/9a4N8/T0QFInpNeS1CRC8Q0ZtE9L15coOuuN7C\n",
+       "qbDXpNcutMdbS9e9rq53mQXACuAAwB4AO4BXADyz5DUPAUSW+PcfwTiR7DXlta8A+PXJ8+cAfHnJ\n",
+       "630JwK8tuL8UgPdPnvsAvAHgmUX3eM31Ft6jEOJWJc0HARwIITJCiD6AbwP49Aquu3CaqRDiRQAV\n",
+       "w8ufwritLSaPn1nyesCCexRC5IUQr0yeNwGoLXjn3uM111t4j8Dtqqc0gKzy8wne3fCiEAC+T0Qv\n",
+       "EdHPLXktxm20t106FVZJr11JC95VpuveJmlu4yz/ISHEBwB8EsAvENFHVnlxMZbjy+576VRYMrTg\n",
+       "XXaPq07XvU3SnALYVX7exVjaLAwhRG7yeAHgOxirwGVRmJTqcEbiUu1thRDnYgIAX5t3j3RNC95F\n",
+       "9qhc7y/4esvu8TZJ8xKA9xDRHhE5AHwW41ayC4GIPETknzz3AvgEVpNmyu1tgRW0t10mFfaq9NpF\n",
+       "93hr6brLnGZmsN4/ibHFfgDgi0teax/jE9grAH64yPUAfAvAGYAexvbWFwBEAHwfwJsAvgcgtMT1\n",
+       "fgbjqTWvAvjB5OYm57jehwGMJp/x5cl6dtE9XnG9Ty6zRyGEGUYwMT9Mj7CJuWGSxsTcMEljYm6Y\n",
+       "pDExN0zSmJgbJmlMzA2TNCbmhkkaE3Pj/wFJ7Hv45ZreFAAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b44f290>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEbZJREFUeJzt3X+QXeVdx/HPJ5vdTXaTGAIhIcliooIC01qwMEChKbQ6\n",
+       "2Cm0ai1FbbE6dZy2NmLLlDKjf+G0tuMUOx2dqSAt1FKVVtqO8kuLKaWUFEjCj1AgDigkkohJyO4m\n",
+       "2exuvv5xb8Jmsz+eJyfP3nvC+zWT4Z5zv/c8z73PuXe/nB/P1xEhAAAAHL1Zre4AAABA3ZFQAQAA\n",
+       "VERCBQAAUBEJFQAAQEUkVAAAABWRUAEAAFQ0u5WN22bOBgAAUBsR4YnWF02obF8m6UZJHZJuioi/\n",
+       "GB+zZMmSI143MDCgefPmHXW7uXNr5cQfOHAgtzvHvA/2hGM5oY6Ojqx+5MTPmjXxAc7du3drwYIF\n",
+       "R73dnPeXO9Y54zc0NJQcu3///iKxw8PDybE5n9tk8SMjI5o9+8ifhc7OzuTtzpkzJzl2orYmMzAw\n",
+       "kBybM3aSNDo6mhx7vM/dN9n3eiI53+vu7u7k2Pnz5yfH9vb2Jsfm9iPH4OBgcmx/f/8R6/bs2aOe\n",
+       "np4j1u/evTurHyMjI8mxx/u+PNOKnfKz3SHpS5Iuk3SmpKtsn1GqPQAAgFYpeQ3VeZI2R8QLETEs\n",
+       "6RuS3l2wPQAAgJYomVAtl/TimOWXmuum1dXVVaRDmBmlDqmjvJzTPQCOnZzT6mhPJX89j/rkLAlV\n",
+       "vZFQ1RcJFdAaJFT1V/Ki9C2S+sYs96lxlOowYy807erqIpkCAAC1UzKhekTSabZXStoq6UpJV40P\n",
+       "qnI3HwAAQDsollBFxIjtj0m6R41pE26OiKdLtQcAANAqReehioi7JN1Vsg0AAIBWcysn9rIdfX19\n",
+       "0weq7MWyOZ9BqQk420XOBIelJkPMiS05cWnOpJM5sTnvL2eywNzJLHMmAMwZ63aQMyGqlDfha05s\n",
+       "HSdOLPW7VWpC4tz+lhrrUtqhDzjcZDOlc0sPAABARSRUAAAAFZFQAQAAVERCBQAAUBEJFQAAQEUk\n",
+       "VAAAABWRUAEAAFREQgUAAFARCRUAAEBFJFQAAAAVkVABAABUVLQ4corUGn2l6kDlxufUFCxZj6pE\n",
+       "H3L70Q51ynJrzOWMX059vjlz5iTHdnZ2JsfmvL/u7u7kWCmv3t2ePXuSY3t6epJjTzzxxOTYk08+\n",
+       "OTl2//79ybGStHDhwuTYVatWZW07Vc4+1Nvbm7XtnDqP8+fPT47t6upKjt2xY0dy7D333JMcu3Xr\n",
+       "1uRYKW9fzqmjl/O5XXrppcmxF198cXKsJC1btiw5Nmc/yvks5s6dmxyb87nlfEdypf4eTrXPFz1C\n",
+       "ZbvP9v22n7L9pO2Pl2wPAACgFUofoRqWdE1EbLA9T9Kjtu+LiKcLtwsAADBjih6hioiXI2JD8/GA\n",
+       "pKclpR+PBAAAqIEZuyjd9kpJZ0t6eKbaBAAAmAkzklA1T/fdIWlN80gVAADAcaP4XX62OyV9U9LX\n",
+       "IuLO8c/v2rXr0OM5c+YUvYofAAAg1dq1a7V27dqk2KIJlRv34N8saVNE3DhRTM7tygAAADNl9erV\n",
+       "Wr169aHlG264YdLY0qf83iLpdyRdYnt9899lhdsEAACYUUWPUEXED8Rs7AAA4DhHsgMAAFCRW1k+\n",
+       "xHaceuqpqbHJ2819TzlT6ueUAsnpR05sqT5IeZ9FTmyp0jq5282JzylTkxObMyYjIyPJsbnlVkqN\n",
+       "dY6c8cjpQ87nlqsdSi7llEXKjS+1f7bD/tYuckqC5Y51qb9nx/uY5IiICX+4OEIFAABQEQkVAABA\n",
+       "RSRUAAAAFZFQAQAAVERCBQAAUBEJFQAAQEUkVAAAABWRUAEAAFREQgUAAFARCRUAAEBFRYsjpxge\n",
+       "Hk6KKzlFfqkSMaVKLeTEliyTkbPtdik9U7eyGu0y1mgvqb+bRxuPsnL+nuXEorUmTahs/4akkDTR\n",
+       "X6yIiG+lNGC7Q9Ijkl6KiMuPqpcAAABtbKojVJerkVBNJimhkrRG0iZJ81M7BQAAUCeTJlQR8btV\n",
+       "N257haR3SvpzSX9SdXsAAADtaNqL0m0vtX2z7buby2fa/v3E7X9B0rWS8i5qAgAAqJGUu/y+Iule\n",
+       "Scuay89Juma6F9l+l6TtEbFeE1+HBQAAcFxISahOioh/kDQqSRExLCnl9qcLJV1h+3lJt0u61Pat\n",
+       "44P6+/sP/RsaGsroOgAAQHtImTZhwPaJBxdsny/p1eleFBHXS7q++ZrVkj4ZER8cHzd/PteqAwCA\n",
+       "ektJqD4h6buSfsb2DyUtlvTeo2iLSXIAAMBxySmTAdqeLenn1bgW6pnmab/qjdtxyimnJMUysefR\n",
+       "xTKx5+GY2BMAUEVETPiHZ9ojVLbnSvqIpIvUOMr0gO2/iYh9x7aLAAAA9ZRyyu9WSbslfVGNI1S/\n",
+       "Jek2Sb9ZsF8AAAC1kZJQnRURZ45Z/p7tTcesA7PTygnOmpVexzn31EfuKcIS/ejo6Gh5rJQ+Hu0S\n",
+       "mzvWpU7ZlqqVtnjx4uTYhQsXZm17xYoVybFLly5Njt22bVty7MaNG5Njt2zZkhy7c+fO5Nhcpfah\n",
+       "np6e5NjOzs7kWEnq6upKjs252zrnlHvO9zqnv4ODg8mxUt77y7Fnz57k2JzLCXJr+XHq/+ik7stT\n",
+       "fb4pWcpjti8Y0+j5kh5NahkAAOB1YKriyE+MiXnQ9otqXEN1qqRnZqBvAAAAtTBdcWQAAABMY6ri\n",
+       "yC+MXbZ9sqQ5pTsEAABQNynFka+w/Zyk5yWtlfSCpLsK9wsAAKA2Ui5Kv0HSBZKejYhVkt4u6eGi\n",
+       "vQIAAKiRlIRqOCJekTTLdkdE3C/pzYX7BQAAUBspE4PstD1f0gOS/t72dkkDZbsFAABQHylHqN4j\n",
+       "aY+kayTdLWmzuAMQAADgkGmPUEXEwaNRo5K+UrQ3AAAANeTJplG3PaDGRJ4TiYhYULlxO1JLa+SU\n",
+       "UMkpcZC77ZzYkuVy2kGpPueUs8gtwZGju7s7OTZnn8spfdHf358cOzCQdyY+pyxKzvtbtGhRcuyy\n",
+       "ZcuSY3NK5Sxfvjw5VpI2b96cHLthw4bk2JwyPDnli3K/e6V+t3K+I0uWLEmOPffcc5Njc/YhKW//\n",
+       "zPl9ySkns3bt2uTYhx56KDlWknbt2pUcm1PWph1+73Pl7Mup5Y727t2riJiw01PNQzUvuSeTsL1Q\n",
+       "0k2SzlIjOfu9iPhR1e0CAAC0k7xDOfn+StK/RsR7bc+W1Fu4PQAAgBlXLKGy/VOSLo6IqyUpIkYk\n",
+       "vVqqPQAAgFZJP8GYb5Wk/7V9i+3HbP+t7Z6C7QEAALREyYRqtqRzJP11RJwjaVDSdQXbAwAAaImS\n",
+       "11C9JOmliPhxc/kOTZBQDQ4OHnrc2dmZfKU9AABASaOjo8l3RBdLqCLiZdsv2j49Ip6V9A5JT42P\n",
+       "6+3lOnUAANB+Ojo6Dpt2ZKrpMUrf5fdHapSr6ZL0n5I+VLg9AACAGVc0oYqIjZLSZ2gDAACooZIX\n",
+       "pQMAALwuTFp6ZkYat2Pp0qWpsSX7UWzbqXLKgJSKlfJKEZSKzZE7djn7e6n3l1Oioo4liQDgeDZZ\n",
+       "6RmOUAEAAFREQgUAAFARCRUAAEBFJFQAAAAVkVABAABUREIFAABQEQkVAABARSRUAAAAFZFQAQAA\n",
+       "VERCBQAAUFHR4sgpUkt2tEvpmVmz0nPQnNju7u7k2Jz+5pYu6ejoSI6dPbvM7pNTmqVUSRspv2xP\n",
+       "qhNOOKFIbGdnZ1Y/du3alRy7Y8eO5NjBwcHk2KGhoeTY/fv3J8f29PQkx0pSagksSVqyZEly7LJl\n",
+       "y5Jj+/v7k2NfeeWV5FhJGh4eTo7du3dvcuzu3buLxOb8BuTK2Y9yzJ07Nzl2wYIFRbYr5f1u5fx+\n",
+       "5vze79u3Lzk2Z7/I2TelvM8i9W/lVN+lokeobH/a9lO2n7D9ddvpWQMAAEBNFEuobK+U9GFJ50TE\n",
+       "GyR1SHp/qfYAAABapeQpv92ShiX12B6V1CNpS8H2AAAAWqLYEaqI2CHpLyX9t6StknZFxL+Vag8A\n",
+       "AKBVSp7y+1lJfyxppaRlkubZ/u1S7QEAALRKyYvS3yzphxHxfxExIulbki4cHzQ4OHjoX6m7LwAA\n",
+       "AHIdOHBAo6Ojh/5NpeQ1VD+R9Ke250raJ+kdktaND+rt7S3YBQAAgKMzfvqjqaZiKHkN1UZJt0p6\n",
+       "RNLjzdVfLtUeAABAqxSd2DMiPifpcyXbAAAAaDVKzwAAAFREQgUAAFCRc2u9HdPG7ejr60uKzak5\n",
+       "lFuDLeczyInN6UeJmkNSfq27Uv0o9VmUHOtS222HWADA0YmICQvqcoQKAACgIhIqAACAikioAAAA\n",
+       "KiKhAgAAqIiECgAAoCISKgAAgIraMqHat29fq7uACnKnagAAoO7aMqEaGhpqdRdQAQkVAOD1pi0T\n",
+       "KgAAgDohoQIAAKio5aVnWtY4AABApslKz7Q0oQIAADgecMoPAACgIhIqAACAitouobJ9me2f2H7O\n",
+       "9qda3R9Mzvbf2d5m+4kx6xbZvs/2s7bvtb2wlX3E5Gz32b7f9lO2n7T98eZ6xrDN2Z5j+2HbG2xv\n",
+       "sv2Z5nrGrkZsd9heb/u7zWXGr8baKqGy3SHpS5Iuk3SmpKtsn9HaXmEKt6gxVmNdJ+m+iDhd0r83\n",
+       "l9GehiVdExFnSTpf0keb3zfGsM1FxD5Jl0TEmyS9UdIlti8SY1c3ayRtknTwYmbGr8baKqGSdJ6k\n",
+       "zRHxQkQMS/qGpHe3uE+YREQ8IGnnuNVXSPpq8/FXJb1nRjuFZBHxckRsaD4ekPS0pOViDGshIvY0\n",
+       "H3ZJ6lDju8jY1YTtFZLeKekmSQfvGmP8aqzdEqrlkl4cs/xScx3qY0lEbGs+3iZpSSs7gzS2V0o6\n",
+       "W9LDYgxrwfYs2xvUGKP7I+IpMXZ18gVJ10o6MGYd41dj7ZZQMYfDcSQac3Iwpm3O9jxJ35S0JiL6\n",
+       "xz7HGLaviDjQPOW3QtJbbV8y7nnGrk3Zfpek7RGxXq8dnToM41c/7ZZQbZHUN2a5T42jVKiPbbaX\n",
+       "SpLtUyRtb3F/MAXbnWokU7dFxJ3N1YxhjUTEq5L+RdIvibGriwslXWH7eUm3S7rU9m1i/Gqt3RKq\n",
+       "RySdZnul7S5JV0r6Tov7hDzfkXR18/HVku6cIhYtZNuSbpa0KSJuHPMUY9jmbJ908A4w23Ml/bKk\n",
+       "9WLsaiEiro+IvohYJen9kr4XER8Q41drbTdTuu1flXSjGhdZ3hwRn2lxlzAJ27dLWi3pJDXO9/+Z\n",
+       "pG9L+kdJp0p6QdL7ImJXq/qIyTXvCvu+pMf12qmFT0taJ8awrdl+gxoXLc9q/rstIj5ve5EYu1qx\n",
+       "vVrSJyLiCsav3touoQIAAKibdjvlBwAAUDskVAAAABWRUAEAAFREQgUAAFARCRUAAEBFJFQAAAAV\n",
+       "kVABaDnbDzb/+9O2rzrG275+orYA4FhiHioAbcP229SY5PDyjNfMjoiRKZ7vj4j5x6J/ADAZjlAB\n",
+       "aDnbA82Hn5V0se31ttfYnmX787bX2d5o+w+a8W+z/YDtb0t6srnuTtuP2H7S9oeb6z4raW5ze7eN\n",
+       "bcsNn7f9hO3Hbb9vzLb/w/Y/2X7a9tdm9tMAUEezW90BANBrpW8+JemTB49QNROoXRFxnu1uST+w\n",
+       "fW8z9mxJZ0XEfzWXPxQRO5u17dbZviMirrP90Yg4e4K2fl3SL0p6o6TFkn5s+/vN594k6UxJ/yPp\n",
+       "QdtviQhOFQKYFEeoALQTj1v+FUkftL1e0o8kLZL0c83n1o1JpiRpje0Nkh6S1CfptGnaukjS16Nh\n",
+       "u6S1ks5VI+FaFxFbo3FNxAZJKyu8JwCvAxyhAtDuPhYR941d0bzWanDc8tslnR8R+2zfL2nONNsN\n",
+       "HZnAHTx6NTRm3aj4rQQwDY5QAWgn/ZLGXkB+j6SP2J4tSbZPt90zwesWSNrZTKZ+QdL5Y54bPvj6\n",
+       "cR6QdGXzOq3Fkt4qaZ2OTLIAYFr8XxeAdnDwyNBGSaPNU3e3SPqiGqfbHrNtSdsl/VozfuwtyndL\n",
+       "+kPbmyQ9o8Zpv4O+LOlx249GxAcOvi4i/tn2Bc02Q9K1EbHd9hnjtq0JlgHgMEybAAAAUBGn/AAA\n",
+       "ACoioQIAAKiIhAoAAKAiEioAAICKSKgAAAAqIqECAACoiIQKAACgIhIqAACAiv4fPgLxE2ST8JkA\n",
+       "AAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b3b3a50>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEBVJREFUeJztnVuMJNdZx39f36d6+rI9O7PDrveSlQKysSX7xSA5EREK\n",
+       "0fqFwEsiS0hRgIgHboIHTHiJHyMkIsQLEoqNwkWJEMgoIAG2ERJBKIDROnYgjmPJK8/sXHd2unu6\n",
+       "p+99eOg+h+qanktX12Snqs5PKk13zXTpm93/fOec73zfd0QphcUyC4lHbYAlfFjRWGbGisYyM1Y0\n",
+       "lpmxorHMjBWNZWZ8i0ZE7ojIuyLyAxF5MUijLBcb8ROnEZEk8H3gk8B94L+AF5RS3wvWPMtFxK+n\n",
+       "eRZ4Xyl1TynVA74BfDo4sywXmZTPz10D1lzv14GfcP+AiNhQc8hRSsm0+349jRVEjPErmvvAddf7\n",
+       "64y8jSUG+BXNm8BHReSWiGSAzwLfDM4sy0XG15xGKdUXkV8D/glIAi/blVN88LXkPtOD7UQ49AQ9\n",
+       "EbbEGCsay8xY0VhmxorGMjNWNJaZsaKxzIwVjWVm/G5YWk5ARMwFkE6nzZVKpRgOhwwGA/r9PoPB\n",
+       "wLzXXy86VjTnQCKRIJlMkkwmSaVSlMtlKpWKuVqtFo1Gw1ytVmviGg6Hj/pXOBErmnMgkUgYz5LJ\n",
+       "ZFhZWeHWrVvcvHmTmzdvUqvV2N3dNdf+/j7VahWlFO12+1GbfypWNOeA9jDZbJZcLsfKygq3b9/m\n",
+       "qaee4sknn2RnZ4e1tTU+/PBDstks6XQapRSdTscMaReZuUQjIveAOjAAekqpZ4MwKuwkEgkymQy5\n",
+       "XA7HcahUKly9epXbt2/zxBNPUCqVEBE6nQ4HBwc0m03q9TqpVCr6omGUjPUJpdTDIIyJCslkknQ6\n",
+       "zcLCAouLiywsLJDJZEgmkwAMh0P6/T69Xo9Op0Ov16Pf7zMcDglDbX0QS+6L/6fxQyaZTJLJZE4U\n",
+       "zWAwoNfr0e126Xa7RjRhYF7RKOANEXlTRL4QhEFR4CyiOc7ThIF5h6fnlFKbIrIMvC4i7yqlvhWE\n",
+       "YWFGiyaXy02IJpFIoJQyXqbdbnN4eEir1aLX6zEYDKI/PCmlNsdfd4FXGZW2xB49EXZ7mnQ6TSKR\n",
+       "YDgc0ul0aDabVKtVHjx4QLVapdls0u12oy0aEXFEpDB+nQc+BbwTlGFhxj085fN5IxoRYTAY0O12\n",
+       "aTab1Go19vb2qNVqRjRhYJ7h6Qrw6niJmAL+Uin1WiBWhRzvnCaXyxnRuD1NrVbjwYMHNJtN+v0+\n",
+       "/X4/FJ7Gt2iUUh8ATwdoS2jRsRW93+QWTLlcxnEcUqmUCeC1Wi2azSYHBwfUarVQRIHd2IhwACQS\n",
+       "CVKplNlvKhaLVCoVVlZWuHr1KoVCgVQqRavVMtsGjUYjNHMYL1Y0AaCDeZlMhkwmQ6lUYmlpiStX\n",
+       "rnDt2jVEBKWU8TAPHz6k0WjQ6XQetem+sPk0AeBeYufzeUql0oSnKZVKJJNJ42m0aMLqaaxoAkB7\n",
+       "Gi2aQqHApUuXWFpaYmVlhcXFRUSEZrMZieHJiiYAdCqE3qDM5XJkMhkTmxkMBrRaLZMSsb+/H6q4\n",
+       "jBcrmgDQniabzeI4jokA68nxNNFoTxNGrGgCQHuabDbLwsICuVzO5MlME03YIsBerGgCQE+EHceh\n",
+       "UCjgOA7ZbNbkx/T7fVqtFvV6nb29PSsaC+RyOUqlEisrK9y4cYPl5WXy+TyJRMIE8w4PD01A7/Dw\n",
+       "kE6nQ7/ff9Sm+8KKJgCy2SzFYpGVlRUee+wxLl++PCEavZutE8kPDw/pdruh2dX2YoN7AZDNZimV\n",
+       "SiwvL3Pjxg2KxSKLi4skk8ljPY3OoQkjp3oaEXlFRLZF5B3XvYqIvC4i74nIayJSPl8zLx7u2qZc\n",
+       "LkexWGR5eZmrV69SqVRwHGfC02jh6JKVMHuaswxPfwrc8dz7XeB1pdSPAv88fh8b9F6TjgIvLCzg\n",
+       "OI4J7DmOM5F0pdM7dUGcvsIoGDiDaMaZePue2z8LfG38+mvAzwVs14XmONEsLi5SKBQmMvXcInFX\n",
+       "UiqlQisav3OaK0qp7fHrbUa5NbHBLRp3spX2NDphXCllcn+93iasgoEAJsJKKRW3/nq6GE4LxnEc\n",
+       "Mzzl83k6nc4RwXivMON3yb0tIqsAIvIjwE5wJl18UqmUSRovlUrk83lyuRyp1OhvUM9jdMWBu9A/\n",
+       "CvgVzTeBz41ffw7422DMufiIyIRoyuWySenUEWB3Vwi3aMI8JLk5y5L768C/Az8mImsi8nngy8DP\n",
+       "iMh7wE+P38cGr6dxiwame5qwT37dnDqnUUq9cMy3PhmwLaFBF/fn83nK5TL5fN7sNcHRYriwVVCe\n",
+       "ho0I+2BaiYq7grLT6VCv19nd3WV7e5udnR2TQB4FT2P3nnzg7gqhJ8Fe0dRqNdNSZHt7m2q1Grqq\n",
+       "g+OwovGBN71T1zVp0XS7XeNp1tbWIudp7PA0I3r1dNLw1G63jWjW19d58OCBFU3c0GmbqVSKdDpN\n",
+       "uVw2SeOrq6uUSiUymQyDwYBGo2GK4KrVKvv7+xwcHJgi/yhgRXMKIkIymSSbzZp2aLrSYHl5mdXV\n",
+       "VfL5/BHR1Ov1I6IJS9ntaVjRnAEdl9Gbkl5Pk0gkTHF/o9GgXq9PeBqdCmE9TYxwx2WKxeJETdPq\n",
+       "6qrJmdGX19P0er3Qp0O4saunM6A9TaFQoFwuUygUTEDP20Lk8PCQdrs9EdCLSiRYY0VzCiIy0XTx\n",
+       "0qVLFIvFIxUHw+GQbrdLq9U6IpqoCceK5gy4PY0WjTs+Yz2Nh2NyhF8SkXURuTu+vOmgkUJ7Gu/w\n",
+       "5N6kHAwGdDqdqaKJmnD85ggr4CtKqWfG1z8Gb9rFQC+5j+uhB9Dr9Tg8PDSdrXQxXK/Xi4xQ3PjN\n",
+       "EYYY9Q8+rcWrWzTestsoMs+c5tdF5Dsi8nLUS1i8u9reDcper2e6de7u7lKr1UxBXBTxK5o/Bj7C\n",
+       "qOfeJvAHgVl0ATlteNITYDs8nYBSakeNAb5KxPoH6/Oa3GUqOhpcKpVYWFiY6AbRaDSo1Wo8fPjQ\n",
+       "iEZXUUYRX6IZJ5Nrfp6I9Q/Wk1/dR08PS4VCgWKxSC6XM6LRVZP1ep39/X2zox3l4enUbYRxjvBP\n",
+       "AZdFZA34EvAJEXma0SrqA+BXztXKHzLTPI0Wje6fl0wmTQsR7Wm0aHSKZ1SHJ785wq+cgy0XAl2f\n",
+       "7fY0enjSonGfP9npdI4MT1HZYzoOu2E5hUwmw+LiotmgrFQqE1sHrVbLzGfa7TbNZpN2uz2xMRll\n",
+       "7DaCBxEhm82yuLhIpVLhypUrLC0tUSqVcBzHbBv0ej3T3Uo3KYpSbdNJWNF40G3qC4UCS0tLrK6u\n",
+       "srS0ZDxNOp0GmGiJ1mw2Q93ZalasaKZwkqfJZDLApGjcniYOWNFMwdvi1d2pUzOt7DbqcxmNFY0H\n",
+       "ETnSf0b3BNanrejJrj5uJ0yHlgaBFc0U3KLJZrNmn0lvG7i7W+ljBK1oYo7X06TTaVKp1FTRuIcm\n",
+       "K5qY4k7vdCdduROu+v0+7XabRqNBtVql0WjQbrft6inOpNNpHMcx5zZ5l9vu/Bl9BmWUNyi92Iiw\n",
+       "B+1pHMcx0WCdDqE9jRaNbluvl91xEc2JnkZErovIv4jI/4jId0XkN8b3I91HeJqn0Tk0gIkGa09T\n",
+       "r9cjVXZ7GqcNTz3gt5RSPw78JPCrIvI4Eesj7E2yyufz5ggefaLKtImwXnbbibALpdSWUuqt8esG\n",
+       "8D3gGhHqI6yHI90OTbeodx/2pRsA6DhN3DnznEZEbgHPAP9BxPoIu2u1S6WS6TquRaPza7SniTtn\n",
+       "Eo2ILAJ/A/ymUurA/RcX9j7C3m6duvGiWzTucxAsZ8vcSzMSzJ8rpXTr120RWVVKbYW9j/C01ZKu\n",
+       "oNSCUUqZeYtO8Ww2m+YonrAfkDErp62eBHgZ+F+l1B+6vhWpPsKZTMZ06tQ72nq15C651amd7h40\n",
+       "tVrNJGHFJbh3mqd5DvgF4G0RuTu+90VGfYP/SkR+CbgHfObcLDxndP6M4ziUy2UuX748kXAFGNHo\n",
+       "I3jcgqlWq3S7XVOGGwdOFI1S6t843htFoo+wFo0+hN0tGj08DYdDE5txexktnKiceXBWYhkRdk9o\n",
+       "dXtXLZpKpWKO35kWAa5WqxNDkq44iMNcRhNL0cBk1YHucqVFoyPAqVQKpZSpoNRlt97TbrVg4iKc\n",
+       "WIpGJ1q5RaNjNJcuXZrqaZrNpinw157GXQwXF8FAjEWjhTPN0+jewHoi3O12TYH/tFrtOAkGYioa\n",
+       "L1pAWkR6SBIRut0u1WqVvb09tre32dzcZG9vj0ajYQ4DixuxFI32Dvryns+klDKrJRFhZ2eHra0t\n",
+       "Njc3WV9fNzvbnU7nUf8qj4RYikbjFY1bOJp+v29OU9nY2OD+/fsmwBfVAv/TiK1o9LDijrG4S1L0\n",
+       "1el0jGg2Nze5f/8+vV7PXHEklqJxz0OGwyHNZpO9vT02NjZwHMcIRx/ytba2xtbWFvv7+zQajdgF\n",
+       "87zEUjQavRFZr9fZ2NggkUhwcHBghipdorK5ucnOzg4HBweR7NY5K7EVjf4PHwwG1Go1RIRms8nW\n",
+       "1taRw9f1lkGj0TClt3EVDICc9MuLyHXgz4AVRg2M/kQp9Uci8hLwy8Du+Ee/6G0LG5YcG50aoWub\n",
+       "3IeX6q/uOY7elIyDaJRSUxOIThPNKrCqlHprnIj134xSOz8DHCilvnLCZ6P/rxpxjhPNabvcW8DW\n",
+       "+HVDRHSOMMSoj7BlkjMnvbpyhL89vhWbPsKWSc4kmvHQ9NeMcoQbxKyPsGWSE+c0YHKE/x74B0/K\n",
+       "p/7+LeDvlFJPee7bOU3IOW5O4ytHOOp9hC0nc9rq6WPAvwJvM1pyA/we8AKjocn0EXbVQenPWk8T\n",
+       "cnwtuefBiib8+BqeLJZpWNFYZsaKxjIzVjSWmbGiscyMFY1lZqxoLDNzbnEaS3SxnsYyM1Y0lpk5\n",
+       "V9GIyB0ReVdEfiAiLwbwvHsi8raI3BWR//Tx+VdEZFtE3nHd893e9pjnvSQi62Mb74rInRmeF2gL\n",
+       "3hOe59tG4Gi1YVAXkATeB24BaeAt4PE5n/kBUJnj8x9nlEj2juve7wO/M379IvDlOZ/3JeC3fdq3\n",
+       "Cjw9fr0IfB943K+NJzzPt41KqXP1NM8C7yul7imlesA3gE8H8FzfaaZKqW8B+57bvtvbHvM88Gmj\n",
+       "CrgF7wnP820jnO/wdA1Yc71f5/8N9osC3hCRN0XkC3M+S3Me7W3nToUNugVvkOm65yma81jLP6eU\n",
+       "egZ4nlH39I8H+XA18uPz2j13Kqy3Be+8NgadrnueorkPXHe9v87I2/hGKbU5/roLvMpoCJyX7XGp\n",
+       "js5InKu9rVJqR40BvjqrjSe14PVjo+t5f6GfN6+N5ymaN4GPisgtEckAn2XUStYXIuKISGH8Og98\n",
+       "imDSTANtbztPKmzQLXjPLV13ntXMGWbvzzOasb/PqApznmd9hNEK7C3gu36eB3wd2AC6jOZbnwcq\n",
+       "wBvAe8BrQHmO5/0io4rUt4HvjP9zr8zwvI8Bw/HveHd83fFr4zHPe34eG5VSdhvBMjs2ImyZGSsa\n",
+       "y8xY0VhmxorGMjNWNJaZsaKxzIwVjWVmrGgsM/N/z4EQsKT2Kt0AAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b33bd10>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAD4tJREFUeJzt3WuQZVdZh/HnPz2XnjhDpiJJEJg4qKAkBRIEKtwJoBUp\n",
+       "CKgIRAVEC8sCJEaggHzwkxZYlEWkKK1CIpcgoAIGKBWIEiGAJARmcptws4gmIBkvGchA9dA98/qh\n",
+       "z0w6Tc/02b2z+pw9eX5VXXP2PuvstfqsfU6/s/ba601VIUmSpLXbMOkGSJIkDZ0BlSRJUk8GVJIk\n",
+       "ST0ZUEmSJPVkQCVJktSTAZUkSVJPGydZeRLXbJAkSYNRVVlpf9OAKsl5wCXADPD2qvqT5WVOP/30\n",
+       "H3rdgQMH2LZt2932dVkv6/Dhw53a2aX8oUOHxi7bqs1djttynbFjHXt+fp5NmzatuR1d3ouufT0N\n",
+       "fSJJOvE0u+SXZAZ4K3AecCZwQZKHtqpPkiRpUlrOoXoM8PWquqWq5oH3A89uWJ8kSdJEtAyoHgDc\n",
+       "umT7ttG+VW3evLlJg7Q+NmzwXgdJ0r1Ly798a55UYkA1bDMzM5NugiRJ66rlpPRvAjuXbO9kcZTq\n",
+       "bg4cOHD08ebNmw2mJEnS4LQMqK4FHpxkF/At4PnABcsLLb+bT5IkaWiaBVRVtZDkFcDHWVw24dKq\n",
+       "urlVfZIkSZOSSa6fk6RWWodqJa5DtbbjTmIdqr5lXYdKkjStJrKw5zi2bt06VrmNG8dvapey0G0S\n",
+       "dZdjb9myZeyys7OzTcqO+/4e0WUOW5eyXdqxffv2scuecsopY5cFOO2008Yue+qppzYpu2PHjrHL\n",
+       "djmHpqWvu3xGunz2utw92jXA7VJ+YWFh7LJd/gPW5b1IVvw+P6ZW359d+qRLm7v+fkPT5Xzrcg51\n",
+       "PXarPmnVfy3Pi3Hft+N9Tr2/XZIkqScDKkmSpJ4MqCRJknoyoJIkSerJgEqSJKknAypJkqSeDKgk\n",
+       "SZJ6MqCSJEnqyYBKkiSpJwMqSZKkngyoJEmSepp4Lr+5ublJN6GTLrmEWuVUGmLC41ZJibvkVet6\n",
+       "7GlIhN1Fy/x1Joq+S5f8dV3y801LAvhW/XeinxdDdKLnTFxvTUeokuxMcmWSm5LcmOSVLeuTJEma\n",
+       "hNYjVPPARVW1J8k24ItJrqiqmxvXK0mStG6ajlBV1beras/o8QHgZuD+LeuUJElab+s2KT3JLuBs\n",
+       "4Or1qlOSJGk9rEtANbrc9wHgwtFIlSRJ0gmj+V1+STYBHwTeU1WXL3/+zjvvPPp48+bNbNmypXWT\n",
+       "JEmSVtXl7tSmAVUW78m8FNhbVZesVGb79u0tmyBJkrQmy5eWOF6A1fqS3+OB3wDOTbJ79HNe4zol\n",
+       "SZLWVdMRqqr6DK7GLkmSTnAGO5IkST1NPPXMuGkchpiapYsuqSFapS6BdukvWvVflzQgLY/d6r1o\n",
+       "eV4MLRXItLS3S0qiLmW7nBddU4a0SjEyLX0yNF36o+t3XBfTkGrsROIIlSRJUk8GVJIkST0ZUEmS\n",
+       "JPVkQCVJktSTAZUkSVJPBlSSJEk9GVBJkiT1ZEAlSZLUkwGVJElSTwZUkiRJPU089czGjeM1oUsK\n",
+       "h67L3rdaUr9LKpAuv1+X4w4xDU/LFAet0oa0Oj9N93DvYf/de3Tp6y7fLZqsY0YzSX4FKGClpENV\n",
+       "VR8ap4IkM8C1wG1V9aw1tVKSJGmKHW946FksBlTHMlZABVwI7AW2j9soSZKkITlmQFVVv9n34Eke\n",
+       "CDwD+GPgD/oeT5IkaRqtOik9yf2SXJrkY6PtM5P89pjHfzPwGmD8ST+SJEkDM85dfu8EPgHcf7T9\n",
+       "NeCi1V6U5JnAvqrazcrzsCRJkk4I4wRU962qvwEOAVTVPLAwxuseB5yf5BvA+4CnJnn38kL79+8/\n",
+       "+jM3N9eh6ZIkSdNhnDULDiT50SMbSc4BvrPai6rqYuDi0WueDLy6ql60vNyOHTvGb60kSdIUGieg\n",
+       "ehXwUeAnknwOOBV47hrqcpEVSZJ0Qso4C4wl2Qj8NItzob4yuuzXv/KkzjjjjLHKtlr4ElzYc61c\n",
+       "2HNtZV3YU5KGq6pWnBe+6ghVkq3Ay4AnsDjKdFWSv6gqJzxJkiQx3iW/dwPfBd7C4gjVrwGXAb/a\n",
+       "sF2SJEmDMU5AdVZVnblk+5NJ9t5TDRj3zr6Wlz6Gdglmw4bh5bRu9b4tLIxzw+naJOOv9tGlbKs2\n",
+       "tDS0z4gkrbdx/jJ/Kcljj2yM7vL7YrsmSZIkDcvxkiPfsKTMZ5PcyuIcqjOAr6xD2yRJkgZhteTI\n",
+       "kiRJWsXxkiPfsnQ7yWnAbOsGSZIkDc04yZHPT/I14BvAp4BbgH9q3C5JkqTBGGdS+h8BjwW+WlUP\n",
+       "Ap4GXN20VZIkSQMyTkA1X1X/A2xIMlNVVwKPatwuSZKkwRhnHao7kmwHrgL+Osk+4EDbZkmSJA3H\n",
+       "OCNUzwG+D1wEfAz4Ot4BKEmSdNSqI1RVdWQ06hDwzqatkSRJGqDjLex5gMWFPFdSVXWfe6IBs7Pj\n",
+       "rcQwDWlAuuqSVuPw4cMTLzst7ehSdmZmZuyyMB2pTlq9Fy1TLrUyxM+qJK3keOtQbet78CQ7gLcD\n",
+       "Z7EYnP1WVX2+73ElSZKmyTiT0vv4M+Afq+q5STYCP9K4PkmSpHXXLKBKcjLwxKp6MUBVLQDfaVWf\n",
+       "JEnSpIxzl99aPQj47yTvSPKlJH+Z5KSG9UmSJE1Ey4BqI/BI4M+r6pHA94DXNaxPkiRpIlrOoboN\n",
+       "uK2qvjDa/gArBFT79+8/+nh2dnbsu/4kSZKmRbOAqqq+neTWJA+pqq8CTwduWl5ux44drZogSZK0\n",
+       "Llrf5fd7LKar2Qz8O/CSxvVJkiStu6YBVVVdBzy6ZR2SJEmT1nJSuiRJ0r1C60t+q5qbmxurXMvU\n",
+       "M0NLa9MqpU3XY7cqu2HD+HH+xo3dTuFNmzaNXXZhYWHssvPz82OX/cEPfjB22S5apk9pdexp+DxB\n",
+       "t3OulSH2n6S7TP5bRJIkaeAMqCRJknoyoJIkSerJgEqSJKknAypJkqSeDKgkSZJ6MqCSJEnqyYBK\n",
+       "kiSpJwMqSZKkngyoJEmSepp46pmDBw+OVa5lioppSD3T6rhdU050aUeXdB1dyh46dGjssl1SvnQ9\n",
+       "dpe0PV2O2+U97pJap2tfd/n9upRtlZJoWkzD98UQ3zfpRNd0hCrJ65PclOSGJO9NsqVlfZIkSZPQ\n",
+       "LKBKsgt4KfDIqnoYMAO8oFV9kiRJk9Lykt93gXngpCSHgJOAbzasT5IkaSKajVBV1f8Bfwr8J/At\n",
+       "YH9V/XOr+iRJkial5SW/nwR+H9gF3B/YluTXW9UnSZI0KS0npT8K+FxV/W9VLQAfAh63vNDc3NzR\n",
+       "n4WFhYbNkSRJaqNlQPVl4JwkW7N47/DTgb3LC83Ozh796XKLuCRJ0rRoOYfqOuDdwLXA9aPdb2tV\n",
+       "nyRJ0qRkkgvEJamTTz553LIt29GkbKs2dDEtC3t2aUeXRTK7lO1avtXCnl106Q8X9ly7Vt8Brfqv\n",
+       "6/s2Le+zdCKoqhU/2KaekSRJ6smASpIkqaeJzwLftm3bWOVaXXJoqUs7uvx+XXQ9bqtLc60uL7W8\n",
+       "5NfqPJqWc3laPidDMw2fa0nTxxEqSZKkngyoJEmSejKgkiRJ6smASpIkqScDKkmSpJ4MqCRJknqa\n",
+       "yoDq4MGDk26Cepifn590E7RGLqUgSWtjQKV7nAHVcBlQSdLaTGVAJUmSNCQGVJIkST1lkkP8Sby+\n",
+       "IEmSBqOqstL+iQZUkiRJJwIv+UmSJPVkQCVJktTT1AVUSc5L8uUkX0vy2km3R8eW5K+S3J7khiX7\n",
+       "TklyRZKvJvlEkh2TbKOOLcnOJFcmuSnJjUleOdpvH065JLNJrk6yJ8neJG8Y7bfvBiTJTJLdST46\n",
+       "2rb/BmyqAqokM8BbgfOAM4ELkjx0sq3ScbyDxb5a6nXAFVX1EOBfRtuaTvPARVV1FnAO8PLR580+\n",
+       "nHJVNQecW1WPAB4OnJvkCdh3Q3MhsBc4MpnZ/huwqQqogMcAX6+qW6pqHng/8OwJt0nHUFVXAXcs\n",
+       "230+8K7R43cBz1nXRmlsVfXtqtozenwAuBl4APbhIFTV90cPNwMzLH4W7buBSPJA4BnA24Ejd43Z\n",
+       "fwM2bQHVA4Bbl2zfNtqn4Ti9qm4fPb4dOH2SjdF4kuwCzgauxj4chCQbkuxhsY+urKqbsO+G5M3A\n",
+       "a4DDS/bZfwM2bQGVazicQGpxTQ77dMol2QZ8ELiwqu5c+px9OL2q6vDokt8DgSclOXfZ8/bdlEry\n",
+       "TGBfVe3mrtGpu7H/hmfaAqpvAjuXbO9kcZRKw3F7kvsBJPkxYN+E26PjSLKJxWDqsqq6fLTbPhyQ\n",
+       "qvoO8A/Az2HfDcXjgPOTfAN4H/DUJJdh/w3atAVU1wIPTrIryWbg+cBHJtwmdfMR4MWjxy8GLj9O\n",
+       "WU1QkgCXAnur6pIlT9mHUy7JfY/cAZZkK/DzwG7su0GoqouramdVPQh4AfDJqnoh9t+gTd1K6Ul+\n",
+       "EbiExUmWl1bVGybcJB1DkvcBTwbuy+L1/j8EPgz8LXAGcAvwvKraP6k26thGd4V9Grieuy4tvB64\n",
+       "BvtwqiV5GIuTljeMfi6rqjclOQX7blCSPBl4VVWdb/8N29QFVJIkSUMzbZf8JEmSBseASpIkqScD\n",
+       "KkmSpJ4MqCRJknoyoJIkSerJgEqSJKknAypJE5fks6N/fzzJBffwsS9eqS5Juie5DpWkqZHkKSwu\n",
+       "cvisDq/ZWFULx3n+zqrafk+0T5KOxREqSROX5MDo4RuBJybZneTCJBuSvCnJNUmuS/I7o/JPSXJV\n",
+       "kg8DN472XZ7k2iQ3JnnpaN8bga2j4122tK4selOSG5Jcn+R5S479r0n+LsnNSd6zvu+GpCHaOOkG\n",
+       "SBJ3pb55LfDqIyNUowBqf1U9JskW4DNJPjEqezZwVlX9x2j7JVV1xyi33TVJPlBVr0vy8qo6e4W6\n",
+       "fhn4WeDhwKnAF5J8evTcI4Azgf8CPpvk8VXlpUJJx+QIlaRpkmXbvwC8KMlu4PPAKcBPjZ67Zkkw\n",
+       "BXBhkj3AvwE7gQevUtcTgPfWon3Ap4BHsxhwXVNV36rFORF7gF09fidJ9wKOUEmadq+oqiuW7hjN\n",
+       "tfresu2nAedU1VySK4HZVY5b/HAAd2T06uCSfYfwu1LSKhyhkjRN7gSWTiD/OPCyJBsBkjwkyUkr\n",
+       "vO4+wB2jYOpngHOWPDd/5PXLXAU8fzRP61TgScA1/HCQJUmr8n9dkqbBkZGh64BDo0t37wDewuLl\n",
+       "ti8lCbAP+KVR+aW3KH8M+N0ke4GvsHjZ74i3Adcn+WJVvfDI66rq75M8dlRnAa+pqn1JHrrs2Kyw\n",
+       "LUl347IJkiRJPXnJT5IkqScDKkmSpJ4MqCRJknoyoJIkSerJgEqSJKknAypJkqSeDKgkSZJ6MqCS\n",
+       "JEnq6f8BUrepXiOjd3cAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b2b79d0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAGlxJREFUeJztnVls49t93z+Hi7iJu0Rq44xm5l4bAVzAfnEfnKB5CIJr\n",
+       "FEjSlwYGChTpgj50Q/vQJH1o/JgGaFC0D0Xb2EE3JC1auEgKtLUNtIj70DYOfGNf9/pO7r3SSENR\n",
+       "JCVS3ClS5OmD+Ds+5FALN4nS/L/AwZ/L8Mwh/1/9zm8/SmuNAweTwHXfC3Dw8OCQxsHEcEjjYGI4\n",
+       "pHEwMRzSOJgYDmkcTIypSaOUek8p9SOl1J8opX55notysNxQ0/hplFJu4CPgZ4As8IfAV7TWH853\n",
+       "eQ6WEdNKmi8CH2ut97XWXeB3gZ+f37IcLDM8U35uGzi0nr8G/rT9D5RSjqv5gUNrrca9Pq2kcQjx\n",
+       "FmNa0mSBjPU8w6W0cfAWYFrSfBd4Vym1q5RaAX4R+L35LcvBMmMqnUZrfaGU+hvAfwfcwNccy+nt\n",
+       "wVQm960mdhThB495K8IO3mI4pHEwMRzSOJgYDmkcTAyHNA4mhkMaBxPDIY2DiTFtwPLRQ6kfuyjc\n",
+       "brcZHo8Hl8tlhtvtRmtNv99Ha43WmouLC3q9HhcXF1xcXJjXAR5DyZBDmjFQSpnhcrkIhUKsrq6a\n",
+       "q9/vJxAImKuQo9vtcnFxQbVaNaNSqdDr9ej1evT7fXq93n1/vZnhkGYMbMK43W5WV1dZW1szIxqN\n",
+       "Do3z83Pa7Tbn5+ecn5+Ty+XI5XIcHx/T6XTodDqGVA5pHimEMKOkyWQy7OzskEqlWF9fN6PZbNJo\n",
+       "NMw1Go3i9XrpdruUy2Uzb6/XQyn14LeomUijlNoHqkAP6GqtvziPRd03bCnj9XpZXV0lmUyyvb3N\n",
+       "8+fP2djYIJ1Os7GxQSqVotFo0Gg0qNfrNBoN+v0+zWaTcrnM6uqqIcrFxcV9f7W5YFZJo4Gf1lqX\n",
+       "5rGYZYBsTUKYlZUVQqEQsViMtbU1Njc3icfjrK6u4vV6gUtFeWVlhUAggFKKSCRCNBolHo+TSCRw\n",
+       "u90AXFxc0G63325JM8DYSOhDhi1lfD7fEGk2NjYIhUKEQiFWVlaAH5NGKYXH4yESiRCLxYjH4yST\n",
+       "SSNlWq3WPX+z+WAekubbSqke8M+11v9yDmu6d7hcLjweDx6PZ4g06+vrbG5u4vV68Xg8eL1etNa4\n",
+       "3W5DGJ/PZ0iTSCRIJpN0u13a7Ta1Wm3IlH+omJU0X9Ja55RS68C3lFI/0lp/Zx4Lu2vIzXS5XHi9\n",
+       "Xvx+P6urq0NSQ7abUYjSLPMEg0FjnofDYarVKj6fz2xTDx0zkUZrnRtci0qpb3BZ2vLgSCM3XaRF\n",
+       "NBo15nUqleLJkyckk0mCweB9L3UpMEuFZVApFR48DgE/C/xgXgu7S4i15PF4WFlZIRKJkE6n2d3d\n",
+       "5bOf/SyZTIa1tTUCgcCN87wNmEXSpIFvDH4oD/DvtNbfnMuq7hC2eS2kiUajbGxssLu7yzvvvMP6\n",
+       "+rojaSxMTRqt9R7w+Tmu5c5gb0di+cgIBoPGStrZ2WF3d5dwOGzCB9dBa/2GN1kUZjHffT6fiUfJ\n",
+       "Z0bHsuOt8wgrpfD7/UPxo2AwSDAYJBAIEA6Hef78Odvb26ytrREOhwkGg7dSZGV7sq2uaDRKu92m\n",
+       "2+3S7/dRStHpdExAs9fr0e12Tdyq2+0uPXHeOtK4XC78fv9Q7CgSiRAOhwmHw0SjUXZ2dgxpIpGI\n",
+       "kRK3tX7cbjd+v9+QxiaMx+Oh3W7T6XQ4Pz+n0+nQarVot9u0Wi263e6Cf4HZ8daRRiRNNBo1MSQx\n",
+       "p2VITEkkjWw1N5FGtqdRSWMTxufz0Wg0aLVatFotms3mg/MYv5WkCQQCxGIx0uk029vbJJNJMxKJ\n",
+       "hAkDRCKRqZRfj8eD3+8nHA5zfn4+5PsJhUJDcap6vW7CEeI1lhjVsuo4bx1pXC4XwWCQRCLBzs4O\n",
+       "z58/N9tTJBIxRJnGGSc6jdfrJRgMmq0mEAgQiUSGIuH2tVgscnx8jMfj4eLigvPz86FErmUjzltH\n",
+       "GvHYrq2tsbOzw4sXL4YSqvx+Pz6fD5/Ph8cz3c8jpAFYWVkxEkf0GNmW5BoOh3G73SY63mg0TG5O\n",
+       "r9dzSHPfkEy8ZDJpJI3EmcREttM7J4HoNEIar9dLKBQymXsiPYQ4ovyKGd5qtSiXy4YkvV6PTqez\n",
+       "iJ9hJrwVpLFzev1+P5FIhGQyycbGBpnMZceUWXJ4hSzy2O1243K5TBRcIP9GpIhk/ImEqdVqlMtl\n",
+       "4+tZ1sj4oyeN2+025nQ4HGZtbY3d3V3W19cJBoMmQcq+Tgo7sdxOMJdhk9Z2+glB4/E4Ozs79Pt9\n",
+       "/H4/2WyW168v2/3U6/WlSxF99KRxuVxEIhE2NjbY2Nhge3t7iDTAG8SZFFprkzg+bsj25/V6h+Jc\n",
+       "8jgejxvCJBIJVldX0VpTr9c5Pj6e908yMx49aUTSbG5u8uLFC54/f87Ozs5CJI3oLfJYrjKv6Ehi\n",
+       "govESSQSBAIBkskkmUwGr9drCLOM6RQ3kkYp9XXgzwIFrfWfGryWAP498BTYB/681vpsgeucCHLj\n",
+       "7Uy6dDrNs2fP+MxnPkMikTA3ahqMbj8SBpAhiq+tAPf7feDNNAyllEncEjSbTV6/fk0sFjPkWqbY\n",
+       "1G0kzW8D/xT419ZrvwJ8S2v9G4PG078yGPcOsV5kSxDfi9QtiQ9Git5GFeDb3JTRkhWxhGSIlSQj\n",
+       "EAiY+JbEuGT4/f43pJsksycSCTY3N/H5fEP/pxDwvnAjabTW31FK7Y68/HPAnxk8/lfA/2SJSOPx\n",
+       "eMwNiUajRgmWGyaksS0e+3oTzs/PqdVqVKtVarXakHe30WgMkebi4oJQKGQi5aurq8RiMWKxGAA+\n",
+       "n28saUKhkCGN2+2mWq0CLIUJPq1Ok9Za5weP81zm1iwFRNJIuqaEA0TSBAIBVlZW8Hq9M0maWq3G\n",
+       "yckJp6enVCoVU00pFZVSddnr9Yy3WdbSbrcBjPl/HWk2NjaMTtTpdJbCmppZEdZa62XpryfWiF12\n",
+       "kkgkiMViRCIRQ5rbShr7ua1TNJtNzs7OKBaL5HI5SqUS5XKZs7MzSqXSUAig1+sN5RnHYjG01kaP\n",
+       "6ff7byi7Xq+XcDhMMplka2vLzNVsNqlUKm+Y9XeNaUmTV0ptaK2PlVKbQGGei5oEdjWkOO+SySSb\n",
+       "m5vGxM5kMqRSKaLRKD6fzyiXkgw+CttcFq9st9s1JbavX78mm82aIduUbFm25WQPqUoIBoPEYjGa\n",
+       "zabZbkQ5drlc+Hw+otEo6XSaXq9nzHVZc61We8M5CHfXXGBa0vwe8BeBfzi4/ue5rWgKiN9D3PeJ\n",
+       "RILt7W2ePXvG06dPTapDJBK5NWnson47wFiv1zk8POT169fm2mw2h2JJNumk5qnT6dBut02saW1t\n",
+       "jUajQafTeSP1wufzEYvF6PV6ZiuV8IbL5aJUKpkGA7JGwV0Q5zYm9+9wqfSuKaUOgX8A/DrwH5RS\n",
+       "f5mByb3IRd6wvqFqSCHN1tYWL1684MWLF0MeYYley1/2KMTnIjda9JezszMzDg8POTg4MEMkkIxR\n",
+       "k1wkQqvVol6vk0gkODs7M5JGAqOjksbr9RKJRFhZWTEE11oby08IfdeR8NtYT1+54q2fmfNapoZd\n",
+       "SSCk2dzcZHd3l3fffdfk6MpfrO3HEdg3WdITRDKcnZ1xcnLCyckJxWLRkGV/f5+Dg4Mhb/A4JVXM\n",
+       "8kajgd/vJ5VKUalUqNfrnJ+fm3waIcbKygoej4dwOEy/3zfv2zpMr9ej0WiYP4C73KIehUfY7idj\n",
+       "u+ltolwnWUTEi95ydnZGuVw2yu3oNZ/PUyqVzFZ0k1IqZJJtpF6vUyqVyOfzHB4eEo/HjSS0Qw39\n",
+       "fh+Xy2WSxjY2Nsx36Ha71Ot1yuWykYzifV40cR4FaQRXVQLI86tCBKKgik6Sy+U4OjoyPWbEFyPX\n",
+       "s7MzKpUKzWbT3CghzzjITZXHtVqNUqnE8fGxybXp9XrGqSdrFZdAIBAgHo+brEORMqVSiUAgQKfT\n",
+       "Md/tunXMC4+GNKOSxiaN/d4o7OL8er1OpVIhl8uxt7fH3t4e+/v7Q7kvrVZryDsr29F1N8oOaF5c\n",
+       "XAyRRspihDB2/Euufr+fWCxmrp1Oh3K5TC6XIxAI0Gq1zP9xF3jwpBklwyhpxmXf2QFKiR1Jgb7c\n",
+       "jL29PT788EM++uijoe1rmmoB0XcEsj0FAgFDbOmBI+SziSPebVl7q9Uil8sRjUaNs1K2v7uo8nzw\n",
+       "pJkGtu+k0+lQLBYpFArk83mOj485ODigWCxSq9WGPLvzEvsi2arVqgl1yJZXrVYJBoNDmYTLVu77\n",
+       "1pHG9ptIzdHJyQnZbJbDw0MODw/J5XIUi0Xq9bqJWo9Ki1lgk8btdhOPxymXy1SrVer1OsDMecqL\n",
+       "xPKtaMGwTepWq0WtVqNYLJLNZvn000/59NNPjT+mVquZisd5KpjdbpdWq2WSyePxuJE0tVrNmN52\n",
+       "s6RlwltHGsCQptFoUKlUjKTZ29vj5cuXQ4ruIioeRdL0+32j1NqkEY/1ysrKlUQdp9zfFbkeJWmu\n",
+       "+yH7/T7VatW0bT06OmJ/f59sNku5XDalJrIl3ee6bVLYmYXSDmV9fZ1MJoPH4zHEsy26ReFRkgbe\n",
+       "tKoEWmuq1SpHR0e8fPmSTz75hGKxSLFYNKQR5XfRpBlH7nHrHk1JlVqqVCpFJpMxFmOn06FarS68\n",
+       "HvzRkWYcUewUCJE02WyWjz76iA8++MC0dG02myYz7i6cZLK2cWGN6ySNpFWkUikT5RbCXBWEnSem\n",
+       "zRH+KvBXgOLgn/2q1vq/LWqR02Lcnt/v96lUKoY077///ht5M/eJ20oaaWAgfqZqtUqhULiTRPTb\n",
+       "0PK3gfdGXtPAb2qtvzAY90YY+y9vfX2ddDpNPB4nFApda66K1LmPhCZJRw2HwyQSCZOcJRmG4rAT\n",
+       "AowmiYlUEasvl8tRLpdNWGPRuJE0+rJbZ3nMW0thB0pALxqNsr6+zsbGBolEglAoZFq2jhv3CSFN\n",
+       "JBIxlRE2aezk93HZhUKaQqHAwcEBR0dHQwHURWOWDfBvKqX+WCn1NaVUbG4rmhB2kyIhzW0kzX3C\n",
+       "6/UOkSYejw81V7pJ0pyfnxtJc3h4OESapZA0V+CfAc+47LmXA/7R3FY0Iez67IcmaSQPWCRNNBod\n",
+       "kjS2fnKdpMlms5yenppzGRa+/mk+pLU2OcFKqd8Cfn9uK5oQotNIInkymSQSiZhg4H2tScxgO4dZ\n",
+       "EqYSiQTr6+tsbW2RyWSMHhYIBMZaP6P6l/Tnk5jYXVp7MCVplFKbetB4Gvhz3GP/YHF2SasySWjy\n",
+       "+/33VtIqKaj2GQt2QlgymSSVSg2RRlIfrktBHW1ZctdkEUyTI/xrwE8rpT7PpRW1B/y1ha7yGowr\n",
+       "WRG94L4kjUgW+3wFaZYk1RLpdJqtrS2ePHliiH6dpLFrqUTS2Jl6d7n1Tpsj/PUFrGUqjEoaqdH2\n",
+       "+/33uj3ZSWB221nJYRZJ8+TJE6N/XRWctJPdbcKMugzuCstpXkwIW4cQPeI+I8M+n8+U4MqhGlJT\n",
+       "HolEePr0Kdvb24bgkkhuVxzYhGg0Gqauqlarsbe3Z3wzrVbLHHd4V7GyR0GaZYM4G9fW1ox1ZI9U\n",
+       "KkUqlSIWiw0RRgg/WmQnllKhUKBYLPLq1StjMUkZzLwTxa6DQ5oFQOqWUqmUOfPSHuFw2BztI6a1\n",
+       "LR1HKyQqlQr5fJ6DgwNevXrF0dERx8fHxjfT6XTmmiR2ExzSLAB2WW0mk2F7e5utrS0z7C5Y44KV\n",
+       "dsmLOPLy+Tz7+/u8fPmSQqFgEsXsLudLbXI7GIa0MBFFN5PJ8OTJEzY3N0mlUsTj8aH+xDdFovv9\n",
+       "viFMs9mkXq9TrVY5Ozvj9PSUs7Mz0zb2PlrGPkrS3LUSHAqFTFt88UrLibrJZJJoNGpaxN4GYikJ\n",
+       "aaSGXBoM1Ot1c0jHQ+oasdS46x8yFAqRSqXY3d1ld3fXHFsoZy2IlLltvq9IGrv+W6SNlPOKz8Yh\n",
+       "zQPF6uoqqVSKZ8+e8bnPfc7Ej+RUXemGPomksbcn2+QWSTNJE6Z541GSZpxyab832ilr9H3x3IoX\n",
+       "1/b9jJvz3Xff5Z133uHJkyek0+mhz/r9ftMu5LrSYBvtdptyuUw2m+Xo6IhXr15RKBRMKufS99x7\n",
+       "SBiXOjl6kyTsEAwGTWrC6Pv29hKLxYba3o+LZ0kDpc3NTRKJhCGJHW+ynXc3EafdbnNycsKrV694\n",
+       "+fIl2WyW4+Nj0zDpvvGoSDOK6yRNMBg0YQcbbrebra0ttre32d7efuMc7nGhCdvbG4lEhoKVoxHu\n",
+       "20gaKeA7ODjgww8/pFAomO1p6UmjlMpw2Qo2xWVw8l9orf+JeiB9hEcfy/PrJI3H4yGTyZgt5/nz\n",
+       "50NnXI6edwAYCSTkuGo9t7XqhDT7+/v88Ic/NH327tKBdx1ukjRd4O9ord9XSq0Cf6SU+hbwSyxJ\n",
+       "H2HpNNVoNCiXy5yenhKJRNBa4/V68fl8Q/9eKWVM5KdPn74xn9vtNrGh9fV10wBaAopXKbOjVQOy\n",
+       "tttgtHl1qVQyVpJ0qVgmXEsarfUxcDx4XFdKfQhss0R9hPv9Pu12m0qlQrFYJBaL0e12TUbf6Mlw\n",
+       "LpeLcDhMOp1Ga004HH7j/fX1ddPYUdIuRYkdR4TRagGYrMVsp9MxZTT1ep1isUi1Wh1qwrhMuLVO\n",
+       "oy4bUH8B+D8sUR/hUdKsrq6a9hx263iBUopwOIzWmlAoRDr946XLjbcj1NLv7jp9xCbMNJJG+gOX\n",
+       "SiVKpRLFYpFKpbK051neijSDrek/AX9ba12zfzyt77ePcL/fNx0YisXiUKdyaZpow+VyGT9KKpUa\n",
+       "ynyT66gCK1iUpOl2u9RqNU5PTzk+Pn74kkYp5eWSMP9Gay2tX5emj7DoNNJdSvrTSRPEi4uLIR+L\n",
+       "Umqom6bMYV9nWYsQxz7/abRFrH1I2Pn5OScnJ+TzeZP+IA2tm83mw5M06vJX/Rrw/7TW/9h6a2n6\n",
+       "CEtJqijCfr+f9fX1ofiMLTkWGZeSue1On5KmaSeDS4TabgApXc/L5TLFYvFO65gmxU2S5kvAXwC+\n",
+       "r5T63uC1X2WJ+gjLX269XjfnKJ2dnb0R1LvLjlKjOb2jkuX4+JijoyMz7IM55FqtVh8mabTW/4ur\n",
+       "a6OWoo+w1AFJzY/b7aZSqZgD06WrpuSvLHot9vYkEkZiSNI9NJ/Ps7e3x8cff8wnn3xCo9Ew/XCE\n",
+       "6DIeHGkeAuzOVoBptpjP58lms0YxtmNBtt9lnsnnIslsE3o0v7dWq5mO57lcjkKhQLvdNucvCFHs\n",
+       "U+mWDY+CNPJXrZSi1WpxenpKNpvF7/fT7XZNaqVc7bGIioXz83MqlQqlUonT01Ojq4jeIo0hT05O\n",
+       "3sjxtfsSLysePGkAE4+RDt9iRQE0Gg2i0agpsJcqTGnZMer8mwWyPQlp8vk8R0dHpmmSJIaLxKnX\n",
+       "66b+etTCWoby4avw4Eljm7WiT5RKJQBzOHoymTRVAc1mE8B0k1oEhDR2Vwdp15bL5Ux7NluyyHex\n",
+       "v9ey4sGTBob9LHa7VVGS7Vzber1uXpPsOEldsHv32qkQdinsOBMahmNP0skhl8uRz+cpFotmm5KT\n",
+       "5x4yHgVpbEiBfKvVAjBZ/XIqW6lUolarUalUKJfLpFIpkxAuB6OK4uz3+00vO/tgU/v8J5Fctjkv\n",
+       "JnUul+Pk5MSYz+M81A8Rj5Y0UnQmUqZarbKyskIgEBjK7C8UCibZSq4Sm5K2rEJC0UVEsRXnnA2l\n",
+       "FKenp0NDSLPoBop3hUdJGtlK2u320FGFkoAl5zcVCgXW1tZIp9NDTQ/FGSjnEYxKqnw+b0ahUDAK\n",
+       "sMB20tVqNeOjua/qgXnj0ZEGuNZctQ/VEgki3ctF75EjkMXiEokiQ6wgGaOQKgK52r2JHwMeJWmu\n",
+       "g2xfzWZzqG5aTpArFAqmikB0HPvMbTnexx4C27knyrb4YKRA35E0DxBiUQFmC2s2m5TLZQKBgOni\n",
+       "4PP5THqn3HyJHdlSRHJe7O1p9IhlGcvssJsE6jrmX5Mj/FVu6CN8nzk2N2FUzxmXCG6/P5rmYB+h\n",
+       "PI4IVzUaWmaH3ThorcdGeG8izQawYecIA7/AZVS7prX+zWs++3B+HQdjcRVpps0RhiXpI+zg7nHr\n",
+       "XAErR/h/D15aij7CDu4etyLNYGv6j1zmCNdZoj7CDu4e1+o0YHKE/wvwX0dSPuX9XeD39eCwDet1\n",
+       "R6d54LhKp7lW0lyVIzxIJhfcax9hB3ePm6ynnwT+APg+lyY3wN8HvsLl1mT6CFt1UPJZR9I8cExl\n",
+       "cs8ChzQPH1NtTw4cjINDGgcTwyGNg4nhkMbBxHBI42BiOKRxMDEc0jiYGAvz0zh4vHAkjYOJ4ZDG\n",
+       "wcRYKGmUUu8ppX6klPqTQRfQWefbV0p9Xyn1PaXU/53i819XSuWVUj+wXksopb6llHqplPrmJLlB\n",
+       "V8z3VaXU68Eav6eUem+C+TJKqf+hlPqhUuoDpdTfmmWN18w39RqB8fms8xiAG/gY2AW8wPvAT8w4\n",
+       "5x6QmOHzP8VlItkPrNd+A/h7g8e/DPz6jPP9GvB3p1zfBvD5weNV4CPgJ6Zd4zXzTb1GrfVCJc0X\n",
+       "gY+11vta6y7wu8DPz2HeqdNMtdbfAcojL/8cl21tGVx/Ycb5YMo1aq2PtdbvDx7XAbsF78RrvGa+\n",
+       "qdcIi92etoFD6/lrfrzgaaGBbyulvquU+qszziVYRHvbmVNh592Cd57puoskzSJs+S9prb8AfBn4\n",
+       "60qpn5rn5PpSjs+67plTYUdb8M66xnmn6y6SNFkgYz3PcCltpobWOje4FoFvcLkFzor8oFRHMhJn\n",
+       "am+rtS7oAYDfmnSN17XgnWaN1nz/VuabdY2LJM13gXeVUrtKqRXgF7lsJTsVlFJBpVR48DgE/Czz\n",
+       "STOV9rYwh/a2s6TC3qIF70RrXFi67izWzC209y9zqbF/zGUV5ixzPePSAnsf+GCa+YDfAY6ADpf6\n",
+       "1i8BCeDbwEvgm0Bshvn+EpcVqd8H/nhwc9MTzPeTQH/wHb83GO9Nu8Yr5vvyLGvUWjthBAeTw/EI\n",
+       "O5gYDmkcTAyHNA4mhkMaBxPDIY2DieGQxsHEcEjjYGI4pHEwMf4/w2zPGHuGeikAAAAASUVORK5C\n",
+       "YII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b23ec90>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEJBJREFUeJzt3X2wXVV5x/HvjxtIbkxEgoINxsZSoYbRiiiD4guoZaiD\n",
+       "KK1VaavWdux01ApUHZWZdvijHa1OR3ScdsZCfcGqbdXiS1GhlSoqgkTeXxQ70oIi0BIhCSG5CU//\n",
+       "OCfhcsnN3TubnXNO+H5m7nD2Ps/Za92zzrk82Wvv9aSqkCRJ0u7bZ9QdkCRJmnQmVJIkSR2ZUEmS\n",
+       "JHVkQiVJktSRCZUkSVJHJlSSJEkdLRpl40lcs0GSJE2MqsrO9veaUCU5ETgbmALOqaq/nhuzevXq\n",
+       "h71u3bp1HHDAAQ/Zt2hR865OTU217Wer+Kba9Hnx4sWNY5ctW9Y49uCDD24cC7By5crOsRdeeCEn\n",
+       "nHDCQ/YdcsghjY974IEHNo5dvnx541iA6enpxrF9rdE2MzPTOPb2229vHNtm7AAOOuigh+07++yz\n",
+       "Of300x+2f8WKFY2Pu3Tp0saxbb6rbcZjy5YtjWMBNm7c2Dj2rrvuahx70003NY694IILGsceeeSR\n",
+       "O93/la98hZNOOulh+4899tjGxz788MMbx+63336NY/v6O7u3OOusszjrrLNG3Y15bdu2rXFsm+/f\n",
+       "/fff3zh206ZNjWMB1q9f3zj23nvvbRR39NFHz/tcb1N+SaaAjwAnAmuAU5M8ra/2JEmSRqXPa6iO\n",
+       "Bn5cVbdU1QzwWeAVPbYnSZI0En0mVIcAt87avm24b0FLlizppUPaMw499NBRd0G76Zhjjhl1F9TB\n",
+       "YYcdNuouaDcdd9xxo+6COuozodrtC1DaXOei8WNCNblMqCabCdXkMqGafH1elP5TYNWs7VUMzlI9\n",
+       "xLp163Y8XrJkicmUJEkaC2vXrmXt2rWNYvtMqK4AnppkNfAz4DXAqXOD5t7NJ0mSNA6OOuoojjrq\n",
+       "qB3b55xzzryxvSVUVbU1yVuBrzNYNuHcqrqxr/YkSZJGpdd1qKrqq8BX+2xDkiRp1Ea6UjrAfffd\n",
+       "1yiuzaJ+bRYgaxvfJvaBBx4Y+XHbLk7Z17H7WiRTkqRxYC0/SZKkjkyoJEmSOjKhkiRJ6siESpIk\n",
+       "qSMTKkmSpI5MqCRJkjoyoZIkSerIhEqSJKkjEypJkqSOTKgkSZI6MqGSJEnqaOS1/JYtW9YoLknj\n",
+       "Y7aJ7VNfte62bt3aSyzAli1bGsfOzMz0EttXbUUYj5qCfX0+29Rh3J14DYzD35dx+BxLeqhez1Al\n",
+       "WZXk4iTXJ7kuydv6bE+SJGkU+j5DNQOcUVVXJVkGrE1yUVXd2HO7kiRJe0yvZ6iq6udVddXw8Qbg\n",
+       "RmBln21KkiTtaXvsovQkq4Ejgcv2VJuSJEl7wh5JqIbTfZ8DThueqZIkSdpr9H6XX5J9gc8Dn6qq\n",
+       "8+c+f/fdd+94PD09zfT0dN9dkiRJekT1mlBlcH/xucANVXX2zmJWrFjRZxckSZJ61/eU37HA7wPH\n",
+       "J7ly+HNiz21KkiTtUb2eoaqqb+Nq7JIkaS9nsiNJktTRyEvPNC110qbcQ9uyDG3i25Tr6Ou4bcqt\n",
+       "tC0v0ubYbcrJ9FWGp88yIPvs0/zfG236MTU11Uvspk2bGsdCf2OytxuH96Lt534c+izt7TxDJUmS\n",
+       "1JEJlSRJUkcmVJIkSR2ZUEmSJHVkQiVJktSRCZUkSVJHJlSSJEkdmVBJkiR1ZEIlSZLUkQmVJElS\n",
+       "RxNTeqbPcitt9FVOpq/YPktO9PU+tyn50lZf5Vb6GpOm3w+ArVu3No7VZLOUjDR+5k2okvw2UMDO\n",
+       "ikZVVX2hSQNJpoArgNuq6uW71UtJkqQxtqszVC9nkFDNp1FCBZwG3AAsb9opSZKkSTJvQlVVf9D1\n",
+       "4EmeBLwM+Cvgz7oeT5IkaRwteLFKkicmOTfJ14bba5L8UcPjfxB4J9DfRU2SJEkj1uTq348DFwIr\n",
+       "h9s3A2cs9KIkJwF3VtWV7Pw6LEmSpL1Ck4Tq8VX1T8A2gKqaAZrcTvQ84OQkPwE+A7w4ySfnBm3Y\n",
+       "sGHHT5s7miRJksZFk2UTNiQ5cPtGkmOAexZ6UVWdCZw5fM2LgHdU1evnxi1btqx5byVJksZQk4Tq\n",
+       "7cCXgV9J8l3gCcCrdqMtF06RJEl7pTRZIC7JIuBwBtdC/XA47de98aQOOuigRrEu7Ll7sZO4sGfS\n",
+       "3yV3fS3s2Sa2ze/X5j12YU9J6l9V7fSP+IJnqJJMA28Gns/gLNMlSf6uqu5/ZLsoSZI0mZpM+X0S\n",
+       "uBf4MIMzVL8LnAf8To/9kiRJmhhNEqojqmrNrO1vJLnhkerA9PR0o7iZmeazjG2notpMJ7aJbTO1\n",
+       "s2hR87KKbWrdtZ0+62u6ra8p27bTXG360UabKb999923cezKlSsXDhpq+7nfuHFj49h77lnwPpQd\n",
+       "Nm3a1Di2r+9TW23G5IADDmgcu2bNmoWDhk455ZTGsddee23jWIBLL720cezNN9/cOLbNndnWH5xs\n",
+       "bf6/0+b7tGTJksaxS5cubRwL8JjHPKZx7P77798obu3atfM+1+Qd+kGS527fGN7lN/8RJUmSHmV2\n",
+       "VRz52lkx30lyK4NrqJ4M/HAP9E2SJGkiLFQcWZIkSQvYVXHkW2ZvJzkIaD7ZKUmS9CjRpDjyyUlu\n",
+       "Bn4CfBO4Bfhqz/2SJEmaGE0uSv9L4LnAj6rqKcBLgMt67ZUkSdIEaZJQzVTV/wL7JJmqqouBZ/fc\n",
+       "L0mSpInRZPGjdUmWA5cA/5jkTmBDv92SJEmaHE3OUL0SuA84A/ga8GO8A1CSJGmHBc9QVdX2s1Hb\n",
+       "gI/32htJkqQJlPnKASTZwGAhz52pqnps58aTWrZsWaPYqampxsdtE9s2vs3y+21i22hTwqHPMjx9\n",
+       "lYhp04e25Sza9LlNqZM2sW0+F21+v82bNzeOhXbvxTiUDemrlNPuxDfV1+e+7fdaejTqo1xVVVFV\n",
+       "Oz3wrtahapbp7EKSxwHnAEcwSM7+sKq+1/W4kiRJ46Sff5Y96EPABVX1qiSLgOaVCiVJkiZEbwlV\n",
+       "kv2BF1TVGwCqaivQvGS9JEnShOjnIp+BpwB3JflYkh8k+fskS3tsT5IkaST6TKgWAc8C/raqngVs\n",
+       "BN7dY3uSJEkj0WdCdRtwW1V9f7j9OQYJ1kNs3rx5x0+bO2IkSZL6NLyrb8fPrvR2DVVV/TzJrUkO\n",
+       "q6ofAS8Frp8bt3jx4r66IEmStNvmLr2wq6Sq77v8/pRBuZr9gP8C3thze5IkSXtcrwlVVV0NPKfP\n",
+       "NiRJkkatz2uoJEmSHhX6nvJb0NKlzVZS6LPkS1+lZ/pY9r6tNuUsALZs2dJLbF+lS/osM9Rm/Nq8\n",
+       "z21KxPRZhmfStPn9ZmZmWh27zQ0xbT4Xbfq8t4+ftKft6e+UZ6gkSZI6MqGSJEnqyIRKkiSpIxMq\n",
+       "SZKkjkyoJEmSOjKhkiRJ6siESpIkqSMTKkmSpI5MqCRJkjoyoZIkSepo5KVnmpYC6bOMywMPPNA4\n",
+       "ts1S9m363LaESlNtSmpAu/eirxIcbfrc9vfrqxRBX5+LNqWO2v5ubcZ6bzcOpZHajIdjp3HW1/8b\n",
+       "xl2vZ6iSvCfJ9UmuTfLpJIv7bE+SJGkUekuokqwG3gQ8q6qeDkwBr+2rPUmSpFHpc8rvXmAGWJpk\n",
+       "G7AU+GmP7UmSJI1Eb2eoqupu4G+A/wF+Bvyiqv69r/YkSZJGpc8pv0OB04HVwEpgWZLf66s9SZKk\n",
+       "UenzovRnA9+tqv+rqq3AF4DnzQ1av379jp/Nmzf32B1JkqR+9HkN1U3AnyeZBu4HXgpcPjdo+fLl\n",
+       "PXZBkiSpf31eQ3U18EngCuCa4e6P9tWeJEnSqPS6sGdVvR94f59tSJIkjZqlZyRJkjoyoZIkSepo\n",
+       "5LX8tm3b1ihuXOpc9VVTsE09o77q4kHz8YB273Nfxx0XbWrutdHX50K7r83fgLbfP2lv8Gj9W+QZ\n",
+       "KkmSpI5MqCRJkjoyoZIkSerIhEqSJKkjEypJkqSOTKgkSZI6GsuEasuWLaPugjpos0SCJEl7AxMq\n",
+       "PeImcR0pSZK6GMuESpIkaZKYUEmSJHWUUS4Rn+TRuT69JEmaSFW10/pTI02oJEmS9gZO+UmSJHVk\n",
+       "QiVJktTR2CVUSU5MclOSm5O8a9T90fyS/EOSO5JcO2vfiiQXJflRkguTPG6UfdT8kqxKcnGS65Nc\n",
+       "l+Rtw/2O4ZhLsiTJZUmuSnJDkvcO9zt2EyTJVJIrk3x5uO34TbCxSqiSTAEfAU4E1gCnJnnaaHul\n",
+       "XfgYg7Ga7d3ARVV1GPAfw22NpxngjKo6AjgGeMvw++YYjrmquh84vqqeCTwDOD7J83HsJs1pwA3A\n",
+       "9ouZHb8JNlYJFXA08OOquqWqZoDPAq8YcZ80j6q6BFg3Z/fJwCeGjz8BvHKPdkqNVdXPq+qq4eMN\n",
+       "wI3AITiGE6Gq7hs+3A+YYvBddOwmRJInAS8DzgG23zXm+E2wcUuoDgFunbV923CfJsfBVXXH8PEd\n",
+       "wMGj7IyaSbIaOBK4DMdwIiTZJ8lVDMbo4qq6HsduknwQeCcwu7SE4zfBxi2hcg2HvUgN1uRwTMdc\n",
+       "kmXA54HTqmr97Occw/FVVQ8Mp/yeBLwwyfFznnfsxlSSk4A7q+pKHjw79RCO3+QZt4Tqp8CqWdur\n",
+       "GJyl0uS4I8kTAZL8EnDniPujXUiyL4Nk6ryqOn+42zGcIFV1D/BvwFE4dpPiecDJSX4CfAZ4cZLz\n",
+       "cPwm2rglVFcAT02yOsl+wGuAL424T2rnS8Abho/fAJy/i1iNUJIA5wI3VNXZs55yDMdcksdvvwMs\n",
+       "yTTwG8CVOHYToarOrKpVVfUU4LXAN6rqdTh+E23sVkpP8pvA2Qwusjy3qt474i5pHkk+A7wIeDyD\n",
+       "+f6/AL4I/DPwZOAW4NVV9YtR9VHzG94V9i3gGh6cWngPcDmO4VhL8nQGFy3vM/w5r6o+kGQFjt1E\n",
+       "SfIi4O1VdbLjN9nGLqGSJEmaNOM25SdJkjRxTKgkSZI6MqGSJEnqyIRKkiSpIxMqSZKkjkyoJEmS\n",
+       "OjKhkjRySb4z/O8vJzn1ET72mTtrS5IeSa5DJWlsJDmOwSKHL2/xmkVVtXUXz6+vquWPRP8kaT6e\n",
+       "oZI0ckk2DB++D3hBkiuTnJZknyQfSHJ5kquT/PEw/rgklyT5InDdcN/5Sa5Icl2SNw33vQ+YHh7v\n",
+       "vNltZeADSa5Nck2SV8869n8m+ZckNyb51J59NyRNokWj7oAk8WDpm3cB79h+hmqYQP2iqo5Oshj4\n",
+       "dpILh7FHAkdU1X8Pt99YVeuGte0uT/K5qnp3krdU1ZE7aeu3gF8HngE8Afh+km8Nn3smsAa4HfhO\n",
+       "kmOryqlCSfPyDJWkcZI52ycAr09yJfA9YAXwq8PnLp+VTAGcluQq4FJgFfDUBdp6PvDpGrgT+Cbw\n",
+       "HAYJ1+VV9bMaXBNxFbC6w+8k6VHAM1SSxt1bq+qi2TuG11ptnLP9EuCYqro/ycXAkgWOWzw8gdt+\n",
+       "9mrzrH3b8G+lpAV4hkrSOFkPzL6A/OvAm5MsAkhyWJKlO3ndY4F1w2Tq14BjZj03s/31c1wCvGZ4\n",
+       "ndYTgBcCl/PwJEuSFuS/uiSNg+1nhq4Gtg2n7j4GfJjBdNsPkgS4EzhlGD/7FuWvAX+S5Abghwym\n",
+       "/bb7KHBNkrVV9brtr6uqf03y3GGbBbyzqu5M8rQ5x2Yn25L0EC6bIEmS1JFTfpIkSR2ZUEmSJHVk\n",
+       "QiVJktSRCZUkSVJHJlSSJEkdmVBJkiR1ZEIlSZLUkQmVJElSR/8PcYZmdpOLkfYAAAAASUVORK5C\n",
+       "YII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b1bd290>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAFYRJREFUeJztnVuMZHldxz+/ut+7q+89072zs8Oa8EACL/gARB4IWWIi\n",
+       "+qIhMRpE44OiURMRHwSjD0gCMb4QlV2Dl4BGAwETFTAa8cHLml12UXbZTRimZ/tW3VXVdb//fej6\n",
+       "/ffUmeqeruq6nJo5n+Skbl2nf931rd//8rscMcbg4zMKgXkb4LN4+KLxGRlfND4j44vGZ2R80fiM\n",
+       "jC8an5EZWzQi8oyIvCIir4nIxyZplI+3kXH2aUQkCLwKvA94A/hv4EPGmO9O1jwfLzKup3kn8Lox\n",
+       "5q4xpg18Cfjg5Mzy8TKhMd93E9hzPL4P/LDzB0TE32pecIwxMuz5cT2NL4jHmHFF8waw63i8y7m3\n",
+       "8XkMGFc0zwNPi8iTIhIBfgr46uTM8vEyY81pjDEdEfll4J+AIPCsv3J6fBhryX2lE/sT4YVn0hNh\n",
+       "n8cYXzQ+I+OLxmdkfNH4jIwvGp+R8UXjMzK+aHxGxheNz8j4ovEZGV80PiPji8ZnZMZNwgJARO4C\n",
+       "JaALtI0x75yEUdNGROwRCAQIBoOEQiGCwSDBYJB2u02n07G3s7AnEAhYmwCMMWhc0H1/3lxLNJwn\n",
+       "Y73XGJOfhDGzQEQIhUL2CIfDLC0tkclkyGQyJBIJ8vm8PQqFwtRtCoVCRCIRwuEwkUiEXq9Ht9ul\n",
+       "2+3S6XTs416vR6/Xm7twrisagKGRUC8TDAaJRCLEYjHi8ThbW1tsb2+ztbXF6uoqd+/e5e7du7Tb\n",
+       "7ZmIJhgMWlsSiQTdbpd2u02r1aLVatFutxERK6B5MwlP800R6QJ/bIz50wnYNFXU00SjURKJBOl0\n",
+       "mu3tbe7cucNb3vIWbt68STwep91uk8/PxoGGQiFisRjpdJpMJkOn06Fer9NoNOywBXhCMHB90bzL\n",
+       "GHMgIuvAN0TkFWPMtyZh2DTRDymRSLC0tMT6+jo7Ozs89dRT3Lp1i0KhwBtvvEE8Hp+pPalUimw2\n",
+       "S6fTIRaLUa/Xqdfr1Go16vU6xhja7fZiD0/GmIP+bU5Evsx5aYunRSMihMNh4vE4mUyGbDZLJpMh\n",
+       "Ho8TDocfmJDOglAoZO1ZXV214tCjWCySz+cREZrN5tw9ztiiEZEEEDTGlEUkCbwf+N2JWTYlVDSJ\n",
+       "RMKKJpVKDYhm1sJxDk8rKysEg0G7Yur1euRyOUSERqNBsViciU2X2nuN924CX+7/Y0PAXxljvj4R\n",
+       "q6aIiBCJRB7wNIlEgkgkMnMvA4OeZmVlhVgsZrcCAoEA0WiUZrNJsVgkEJj/1trYojHGfB94+wRt\n",
+       "mQnDPE06nZ7r8BQIBAiHw8RiMZLJJMlkkmg0SjQaJRKJ0G63OT09JR6PW/vmOa+ZxJJ7IVARqKfR\n",
+       "SfDKygrpdJpYLEYoNJ9/R6/Xo91uU6/XqVQqBINBwuEwwWCQeDxOPB4nGo3a5wKBgB2+5iGe+fu6\n",
+       "GeAUTCAQIBKJkEwmB0QTj8fnJpput0ur1aJer1Mul2k0GvR6PTtsOUUTCoXm4g2dPBaiAQbCBupp\n",
+       "nMPTPD2NUzSVSoVGo0G32yUYDJJIJIZ6mnkK57EYnlQs7rnD0tIS2WzWzh0AG2+a5XZ9r9ej0+nQ\n",
+       "arVoNBq0220AuzWgoolEIlY0ML841CMvGp34RiIRIpEI6XSa5eVlG29Kp9MD3/RGo0GhUKBSqdgP\n",
+       "bxY2qqg1DhWNRu0GpIpGg6o6p5kXj41oNK6jgtEjnU5TLpft0HB2dkY+n6dardJqtWZmozPaHg6H\n",
+       "iUaj1uZhw1Ov1/PnNNPCKZpUKsXy8vKAp0mlUoRCIVqtFmdnZxwdHVlPM0vRiIgVzTBPo1Fw95xm\n",
+       "HjySnsa5WgoGg0SjURvXWVtbI5vNWi+j8SUdlg4ODsjn8zMdnnRY0mFUBaORb53P6Mpp3qunR040\n",
+       "KhQ94vE4a2tr3Lx5kxs3bnDjxg1u3rzJ0tISgUCAer3O2dkZJycn7O/vs7e3x9HREaVSiWazOROb\n",
+       "3bGn5eVlksmk3aH2Go+caODNfJlIJEIqlbKieeqpp3jiiSdYXV0lk8kQCASo1WpWNAcHB9y7d49i\n",
+       "sThX0SwtLQ2ENbzGIycazZfR+FI6nbapD3fu3OHOnTtWUCJiPU0ul7OeRlMRfE8znIeKRkSeA34U\n",
+       "ODbGvK3/3Arw18At4C7wk8aY+YdfeVM0zr2YtbU1bty4wa1bt7h9+7bNiGu1WpTLZQqFAicnJxwd\n",
+       "HbG/v29TK7vd7szsdQYsve5prrJ6+jPgGddzvwV8wxjzQ8A/9x97gkAgQDweJ5vNsr29ze7uLhsb\n",
+       "GywtLRGPxxERarUauVyOu3fv8sorr3Dv3j1yuRzVanUmubi6xNY8Zc0iTKVSAxP0cDi8mKLpZ+K5\n",
+       "E2V/DPhC//4XgB+fsF1jEwgESCQSZLNZtra22N3dZXNzk+XlZWKx2AOiefXVV7l37x4nJydUKhUr\n",
+       "mmkGA937Mrono6LR/J5QKORJ0Yw7p9k0xhz17x9xnlvjCS7zNE7RnJyc8IMf/IDXX3+d4+Nj62mm\n",
+       "LRgYXOEN8zSpVMq+7lxae0VA154IG2PMvPvrOf+poVCIZDLJysoKW1tb7Ozs2NVSLBYDsKK5d+8e\n",
+       "r732GuVymUqlYoenaaNeRvdldOdXE8vj8fhA6sMshDwK44rmSES2jDGHIrINHE/SqFHQIKSuiDKZ\n",
+       "DGtra6yurrK6uko2myWRSBAMBm0gslarUa1WqVQqNoTQarVmIhjAbjam02nS6fQDw6eWsGjwtFwu\n",
+       "U6vVbDBTa6LmlSs8rmi+Cvws8Af9269MzKIRERGi0ajNeFtZWWFtbc0KJ5vN2jyUbrdLo9EYEI2G\n",
+       "C1qt1sw+hEgkYld1a2trbG9vs7y8bCfqnU6HRqNBo9GgXq9TKpWoVqs0Gg06nc7Mo/BurrLk/iLw\n",
+       "I8CaiOwBvwN8CvgbEfkI/SX3NI28DM2h1bjS+vr6A55G/8FaT+QWjX5zZ+lpMpkMGxsb7OzssLm5\n",
+       "STabtcNnp9Oh2WxSrVYpl8vW0zSbTetpPC0aY8yHLnjpfRO2ZSw0fdMZW1LR6J6HfmtbrdYDgqlU\n",
+       "KjO31yma3d1dtra2hnqaSqVCsVjk7OzMJmepaOaZ7rnwO8LO4UlFs7S0NLCjqpt4xWKRk5MTTk9P\n",
+       "Zx6QdB7OVNONjQ1WVlasvQCtVotSqcTx8TGHh4ccHBxQKBSo1WpzFww8AqJxDk8qmuXl5YEdVf0Q\n",
+       "NL6kUexZpT44N/JCoZBdWq+urrKxsWFrr9TeZrPJ2dkZx8fH7O3tWZt1dTfvldTCi0Y9jVM0F3ka\n",
+       "FY16mlmLRld4bk/jDBsANJtNSqUSuVxuQDROTwN+uufYaKL4VUSTy+Xmmi+jnSqSyaSNM62vr5NM\n",
+       "Jm0+DbwpmuPjY+7fv8/R0RHFYvEB0cyLhRSNsymRe0c1nU7bb20wGKTX69FoNGwkex6eJhwOk0ql\n",
+       "yGQytuGAM5Kt6ZvaVkRXTaVSiWKxaPeSvFD8DwsoGmcpim7sOWM3GuzTD8MYQ6PRGJjT6A7wrDyN\n",
+       "esLV1VXW1tasaBKJhK3q1AZG3W7Xru6cotGVky+aMVHBuGM3usPq9DS6fHV6mmazaTf0ZoGKZmVl\n",
+       "he3t7Qc8DTDQxGiYp9FNPV80Y+IM+A3zNDrhVLevnkZFM2ucotna2npg3tVutwdKc1U0Z2dndo/G\n",
+       "SyykaODBWqFwOGyPXq9nd371n1+r1WbSdHEYaqOKW8tRtOhNBdNoNKhWqwOxMC94FjcLKZqLunNq\n",
+       "xn6z2aTRaNBsNsnn83blMas5jBv36slZwwRviqbZbNrApJdFs3B1T27BuNMMnJ5G0zjPzs6o1+tz\n",
+       "9zSaBuEsR3FGtTWYqiulWcXCRuWhohGR50TkSERedjz3SRG5LyIv9A93OujUcQpHd1rdoikWi+Ry\n",
+       "Oc94Gq1nusjTuIcnr0x83YybI2yAzxpj3tE//nHypg3HmZikyd9aOF+r1QbEod0h3AVnWqU4rUw4\n",
+       "Z7WkVndqDbmGOZLJpM0B7na7dmhyp0EspGguyBGGOfYPVtHo3oaKplqtWtE4A5mzFo27mF9Fk81m\n",
+       "B5bb0WjUikaX2qVSiUqlYocoL3KdOc1HReTbIvKsiCxPzKIrcJFo1NN0Oh0byEwmkyQSCWKx2AP1\n",
+       "0LP0NJrvo2XBzjCH29NoGsTCepoL+Bxwm/OeewfAZyZm0RVwDk9u0VSr1Qs9jWbwOSeh0+Ci4ckZ\n",
+       "hR8mmmq1+kDujBdFM9aS2xhjc4JF5PPA1yZm0dV+/4C30d1UTbbSzT3dC1lbW2NnZ4dqtWpLcfVo\n",
+       "NBoj/35n501nFwdnaENtiEQi3Lp1i42NDVtlMEyszhyZeac+PIyxRCMi29p4GvgJ4OXLfn6SGGNs\n",
+       "d0u3t2k2mzSbzYFWqvF4nPX1dWq1GsYYEokExWLRHuVyeWQbnBuJejjzZbTzg97evn2bjY0Nksnk\n",
+       "lf4+rzNOjvAngPeKyNs5X0V9H/jFqVrpQr+J6tqdQ1Sj0bB9eLWt6vr6Or1ej2g0yvLyMsfHx/YY\n",
+       "5/oH2gZED6dXcbYK0d+/vb3N5uYmqVRqaDvXhz32GuPmCD83BVuujDMJySka9TTdbteKJp1OY4yx\n",
+       "gllfX7dJT7qSGhWdXOttPB5/QEjOQ7tuJZPJC+dR7mHJy8JZyDCCE61jKhQKHB4eEo1G7Ra8czkL\n",
+       "5x4im80OTJSXl0df+KkX0Vv3cKSHvq6T8YtqszudzkDLk0KhQLVapdlselI8j4xo8vk8+/v7ADZq\n",
+       "LCL0ej27UtK5R6/Xs00bNzY2Rv6d6qH01jmncXazcovoopaz7XbbCv/o6IjT01PK5bInLp4xjIUX\n",
+       "jSYt6VVKNGajy15tCJBIJAZax2vfmnFzapzDiDMGNmz1pIK6aOXUarWoVCpWNMVi0fbH8T3NFFBP\n",
+       "o1n89XodYGAVo3MaTbtMp9PX6lnnvL6lc9dWz+eMgzmHpIt+n3qaYrHI8fEx5XLZFu/5opkCWjnZ\n",
+       "bDZtADOXy9lhqFqtks1mWVlZsVdccV7kdJyJsPYb1lu3PSpMvXUvz93icU7onc2vvXC9ymEsvGiA\n",
+       "gckuwMnJifVAp6enLC0tDbSBdX+Io2CMsZWZmmvsRvOA9dA6c80Jvui8zgucenmDb+FFo99SeLNd\n",
+       "fLfbtYLRchHtTp5OpwfmNpqjOwqFQmFgg9DNzs4OTzzxBLVajV6vRzabxRhDKBQikUgM9TTu6L1X\n",
+       "BQOPkGhUOCJiwwVa4pJKpQYO3VfRmNSo5HI5Tk5O7OHm6aeftoJRUapgLvs7VDBeXDE5WXjRwOBG\n",
+       "mN5XEWnE2zn3ce7WjuppjDEUi0UKhQKlUolarfbAz2jgVFM13IX7btzpoM45jRez9x4J0VyGUyyA\n",
+       "jSg7Y0ajUqlULs0EdMbDdOmse0fDcF8dxtnUyItD1SMvGsDmBmsVoztCPSrOi6wPw52yoambVxFN\n",
+       "IpGwO9Z6noUSjYjsAn8ObHAenPwTY8wfebmPsBvnnKfVag1MQsfdp3lY+oJbNM5mRMNw1m8lk0m7\n",
+       "E+z0kF7iYZ6mDfyaMeZFEUkB/yMi3wA+zHkf4U+LyMc47yPsmV7CbmYdBBx1JeTOQnTu1XiRS32z\n",
+       "MebQGPNi/34F+C5wEw/3EfYSV/Vk7moE55DmtaEJRpjTiMiTwDuA/8TDfYS9xrA+wG4xuYvltEbL\n",
+       "iysnuKJo+kPT3wG/aowpO/9oL/QRXgQuiz/p/MVZLAd4dlf4KsVyYc4F8xfGGG39eiQiW/3X59pH\n",
+       "eFG4bF7lruPy+q7wpaKR86/Fs8D/GWP+0PGS9hGGOfcRXgQWIRtvFB42PL0L+GngJRF5of/cx/FQ\n",
+       "H2Evc9HwsujiuVQ0xph/52Jv5Ik+wj6z57HYEZ41zs4WFyV7OYesRah1cuKLZko4W6G4xeMWipcn\n",
+       "vcNYuP40i8CwHjoX5dA4V0qLIhxfNFPiKp7GmT+zSMLxh6cpoBde1YbY6XSaWCxm+xprIFIPZ9t9\n",
+       "XzSPKe5unnrBDG1Rq4VxpVKJUqnE/v6+bWPvi+YxRa9/kM1m2dzctL2N9UJl1WqVQqFALpfj+PjY\n",
+       "XmWlXq/7onlc0foq9TTRaNRWZGrSe6FQ4ODggPv37w94Gq+mQzjxRTMDms2mjVp3Oh329/e5f/8+\n",
+       "e3t77O3tDVRV+p7mMUWzBOv1uq3Jdiab64W/9NAk9Uaj4YvmccXZQ0+vcZDP5+2Ry+XsfEavB67V\n",
+       "mgsvmktyhD8J/DyQ6//ox2fZFtbrqKdR0ZyenrK/v8/BwYGdv+TzeQqFAvl83g5d87xs8iiMmyOs\n",
+       "fYQ/O3ULF5BGo0GxWOTw8JBMJsPJyQlHR0ccHh5yeHg4sNyuVqsLIRQnD4tyHwKH/fsVEdEcYZhj\n",
+       "H2GvU6vVyOVyhMNh2u02pVKJQqFgD683l34Y4+QI/wfneTYfFZGfAZ4HfsOrJSzzoFqtcnx8TLPZ\n",
+       "pFAoDPQ4rtVqD62b8jpyFaX3h6Z/BX7fGPMVEdngzfnM7wHbxpiPuN6zeF+hCaGVm1rF6bxYvLMr\n",
+       "hNfrto0xQ0eTh4qmnyP898A/uFI+9fUnga8ZY97mev6xFc2jwkWiGStHuJ9Mrsy0j7DP/LnU04jI\n",
+       "u4F/A17ifMUE8NvAhzhvcW/7CDvqoPS9vqdZcMYensbFF83iM9bw5OMzDF80PiPji8ZnZHzR+IyM\n",
+       "LxqfkfFF4zMyvmh8RmZq+zQ+jy6+p/EZGV80PiMzVdGIyDMi8oqIvNbvAnrd890VkZdE5AUR+a8x\n",
+       "3v+ciByJyMuO51ZE5Bsi8j0R+foo1xi/4HyfFJH7fRtfEJFnRjjfroj8i4j8r4h8R0R+5To2XnK+\n",
+       "sW0Ehl/adxIHEAReB54EwsCLwFuvec7vAyvXeP97OE8ke9nx3KeB3+zf/xjwqWue7xPAr49p3xbw\n",
+       "9v79FPAq8NZxbbzkfGPbaIyZqqd5J/C6MeauMaYNfAn44ATOO3aaqTHmW0DB9fTY7W0vOB+MaaOZ\n",
+       "cAveS843to0w3eHpJrDneHyfNw0eFwN8U0SeF5FfuOa5lGm0t/2oiHxbRJ4dZbhzMukWvK503WvZ\n",
+       "OE3RTGMt/y5jzDuADwC/JCLvmeTJzbkfv67dnwNuc55vdAB8ZtQTuFvwXtfG/vn+tn++ynVtnKZo\n",
+       "3gB2HY93Ofc2Y2OMOejf5oAvcz4EXpeJtrc1xhybPsDnR7Vx0i14Hef7Sz3fdW2cpmieB54WkSdF\n",
+       "JAL8FOetZMdCRBIiku7fTwLvZzJpphNtb3udVNhJt+CdWrrudVYzV5i9f4DzGfvrnFdhXudctzlf\n",
+       "gb0IfGec8wFfBPaBFufzrQ8DK8A3ge8BXweWr3G+n+O8IvUl4Nv9D3dzhPO9G+j1/8YX+scz49p4\n",
+       "wfk+cB0bjTF+GMFndPwdYZ+R8UXjMzK+aHxGxheNz8j4ovEZGV80PiPji8ZnZHzR+IzM/wMn9Av6\n",
+       "T5UJ3wAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b1419d0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAELpJREFUeJzt3X+QXeVdx/HPZ3fZ7MaEkBBoA0kJKlF+tBYsDFACxVYH\n",
+       "mUKr1lLUFqtTx2lrI7ZMKTP6lw61jFPsdHSmgrRQ26q00nYUCirSlNqkQEKAUCBOUX5IorKEXfJr\n",
+       "d/P1j3s3XJb98Tx78uw9Z/t+zezknnu/9zzPnufes9+cH8/XESEAAADMXU+3OwAAANB0JFQAAAAV\n",
+       "kVABAABUREIFAABQEQkVAABARSRUAAAAFfV1s3HbzNkAAAAaIyI81fNFEyrbF0m6XlKvpBsi4k8n\n",
+       "x6xatepV7xseHtbSpUsnr6tQL6WcubgOHjxYrB+pcrZFb29v1rpz4nt6pj7AOTQ0pOXLl2e12yln\n",
+       "PA4cOJC17pz4nNixsbEisTmft9zvyFTjNzY2pr6+V+8Wcj4XOeM3Pj6eHJuz3Zhfr35K7cMZa9RF\n",
+       "sVN+tnslfVbSRZJOkXS57ZNLtQcAANAtJa+hOkvSjoh4MiJGJX1F0jsKtgcAANAVJROq4yU91bH8\n",
+       "dPu5WfX39xfpEObHwMBAt7uAOZruNC4AYGYl955zPrG9aNGiw9kPzLPBwcFudwFzREIFAHNT8qL0\n",
+       "ZySt6Vheo9ZRqlcYHh4+9Li/v59kCgAANE7JhOo+SSfZXivpWUmXSbp8ctDku/kAAACaplhCFRFj\n",
+       "tj8s6VtqTZtwY0Q8Wqo9AACAbik6D1VE3C7p9pJtAAAAdFtXZ0qX0u/oy5ngMGeywNx150wumNOP\n",
+       "UrG5E5HmxJeKLTlRH5MAAt3Bdw8LHbf0AAAAVERCBQAAUBEJFQAAQEUkVAAAABWRUAEAAFREQgUA\n",
+       "AFARCRUAAEBFJFQAAAAVkVABAABUREIFAABQEQkVAABARV2v5Wc7Ke6II45IXmdObE4fcmNz9PSU\n",
+       "yW1z+5tTb6tULb/R0dHk2P379yfHSnl1EHO2RR0+F319eV/nnLqUBw4cKBKb04fcupQ5csY6Z0xy\n",
+       "YkvWK20a6v7Njzr87csZ67p/LooeobK9xvbdth+x/bDtj5RsDwAAoBtKH6EalXRlRGy1vUTS/bbv\n",
+       "iohHC7cLAAAwb4oeoYqI5yJia/vxiKRHJR1Xsk0AAID5Nm8XpdteK+l0SZvmq00AAID5MC8JVft0\n",
+       "362SNrSPVAEAACwYxe/ys32EpK9K+mJE3Db59aGhoUOPBwYGNDg4WLpLAAAAh1XRhMqt+yxvlLQ9\n",
+       "Iq6fKmb58uUluwAAAFBc6VN+b5b0G5IutL2l/XNR4TYBAADmVdEjVBHxHTEbOwAAWOBIdgAAACrq\n",
+       "eumZ1NIhpaa9l8qVWyk1pX6pPuSuu2nbYi7xqUr9fiXLMtTlM9c0JUvgAPNpIZV9qQOOUAEAAFRE\n",
+       "QgUAAFARCRUAAEBFJFQAAAAVkVABAABUREIFAABQEQkVAABARSRUAAAAFZFQAQAAVERCBQAAUFHX\n",
+       "S88sWrTosK8zd4r83t7e5NhSJXBKTevf15c3xP39/UXWvXz58uTYY489Njn2uOOOS46VpJUrVybH\n",
+       "Dg4OJscuWbIkOXbVqlXJsTn27duXFT8+Pp4cu2LFiuTY1atXJ8ceffTRybEDAwPJsTm/mySNjY0l\n",
+       "xz7//PPJsUuXLk2OzdnGOduipNHR0eTY3bt3F4nN3Qfk/M3p6Uk/5pCzD3/ppZeSYzdt2pQcK0nb\n",
+       "tm1Ljs3527d48eLk2HXr1iXHnnDCCcmxy5YtS46V8v/+pZjpOz1ta7Z/RVJImiqDiIj4Wkrjtnsl\n",
+       "3Sfp6Yi4JOU9AAAATTJT+naJWgnVdJISKkkbJG2XlP5fNQAAgAaZNqGKiN+sunLbqyVdLOlPJP1B\n",
+       "1fUBAADU0awniG2/1vaNtu9oL59i+7cT1/9pSVdJOlihjwAAALWWcsXd5yXdKWniyr8nJF0525ts\n",
+       "v13SrojYoqmvwwIAAFgQUi6BXxkRf2v7akmKiFHbKbfEnCvpUtsXSxqQdKTtmyPifZ1BQ0NDhx4P\n",
+       "DAxk3VkFAABQysaNG7Vx48ak2JSEasT2oXubbZ8tadZ7WiPiGknXtN9zgaSPTU6mpLzb6QEAAObL\n",
+       "+vXrtX79+kPL11577bSxKQnVRyV9U9KP2/6upGMkvWsO/Soz0RIAAECXzZpQRcT9ts+X9FNqXQv1\n",
+       "WESkz+bWWsc9ku6ZWxcBAADqbdaEyvagpA9KOk+to0wbbf9lRORNywwAALBApZzyu1nSi5I+o9YR\n",
+       "ql+TdIukXy3YLwAAgMbwbPWHbG+PiFNme25OjduRWofp4MH0qaxyYnPjc+o1larPl6NU7cFcpcYv\n",
+       "pwablFfjrVSfcz4XObXEcsc6Jz6nzznbOLfmHgB0W0RMufNM2Vs/YPuciYX2XX73H66OAQAANN1M\n",
+       "xZEf6oi51/ZTal1D9TpJj81D3wAAABphtuLIAAAAmMVMxZGf7Fy2faxaM54DAACgQ0px5EttPyHp\n",
+       "h2rNJfWkpNsL9wsAAKAxUi5K/2NJ50h6PCJOlPRWSZuK9goAAKBBUhKq0Yj4X0k9tnsj4m5Jbyrc\n",
+       "LwAAgMZImdhzyPZSSRsl/Y3tXZJGynYLAACgOVKOUL1T0h5JV0q6Q9IOcQcgAADAISnFkSeORo1L\n",
+       "+nzR3gAAADTQTBN7jqg1kedUIiKOPBwd2LNnT1JcXUpwlOpHHcrUSPUorZOz3Xp7e4v0IXfdOdui\n",
+       "LqWOcuJzxqSvL+VKgpac71NJdRiTuuwDSqnLvnOhb+eF/vvV2UzzUC2punLbR0m6QdKpaiVnvxUR\n",
+       "36u6XgAAgDpJ/6/k3Py5pH+KiHfZ7pP0Y4XbAwAAmHfFEirbyyStj4grJCkixiTtLtUeAABAt5S8\n",
+       "gOFESf9j+ybbD9j+K9uLC7YHAADQFSUTqj5JZ0j6i4g4Q9JLkq4u2B4AAEBXlEyonpb0dER8v718\n",
+       "q1oJ1ivs3bv30M/o6GjB7gAAAJRR7BqqiHjO9lO210XE45LeJumRyXGDg4OlugAAADAvSt/l93tq\n",
+       "lavpl/Qfkt5fuD0AAIB5VzShiogHJZ1Zsg0AAIBuq8c0xQAAAA1W+pTf7B1ILFNRqjxMbnyp2JK/\n",
+       "X45SJVRyYkuVOZlLfKr9+/cnx+7enT4d24EDB5Jjc7axtLBLqOSWn6rLulPVYRvnyv18LmQl9/c5\n",
+       "6y71Wa7LvqXE92SmdXKECgAAoCISKgAAgIpIqAAAACoioQIAAKiIhAoAAKAiEioAAICKSKgAAAAq\n",
+       "IqECAACoiIQKAACgIhIqAACAirpeeiZ1Wv06TKcvlZsmP6fPOesdHx9Pjs1dd07s6OhocmxOuZWc\n",
+       "WClve+SUyig11nX53JfaFk0soZKj1PcazVZyH15qn5FTAqdUCbO6f0eKHqGy/Qnbj9h+yPaXbC8q\n",
+       "2R4AAEA3FEuobK+V9AFJZ0TE6yX1SnpPqfYAAAC6peQpvxcljUpabHtc0mJJzxRsDwAAoCuKHaGK\n",
+       "iOcl/Zmk/5L0rKQXIuKfS7UHAADQLSVP+f2EpN+XtFbScZKW2P71Uu0BAAB0S8mL0t8k6bsR8X8R\n",
+       "MSbpa5LOnRw0MjJy6Cf3ji0AAIA6KHkN1Q8k/aHtQUn7JL1N0ubJQUuWLCnYBQAAgPJKXkP1oKSb\n",
+       "Jd0naVv76c+Vag8AAKBbik7sGRGfkvSpkm0AAAB0G6VnAAAAKiKhAgAAqKgxtfyaWB+sVL2mnPpL\n",
+       "OXWSpLw+59R26u3tTY7t7+9Pjs2tX5ezPUr9fjnbOGe9ixblVXYaGBhIjt27d29y7O7du5Njc+7s\n",
+       "zRm7nO0m5W2Lo446Kjn2xRdfTI4dGRlJjh0bG0uOLamvL/1PSM4NSMuWLUuO3blzZ3KslFdXNOcz\n",
+       "l7MvGhwcTI4988wzk2Ml6bTTTkuOzfn+5Xw+d+zYkRz7zDPp833n9EHKG+vU/f3w8PD060huDQAA\n",
+       "AFMioQIAAKiIhAoAAKAiEioAAICKSKgAAAAqIqECAACoqJYJ1f79+7vdBVTA+DXXnj17ut0FVFCX\n",
+       "KWOQry5TYWDuaplQ5cyNgfohoWqunPmmABw+OXMRop5qmVABAAA0CQkVAABARe7mOXfbnPAHAACN\n",
+       "ERFT1hnqakIFAACwEHDKDwAAoCISKgAAgIpql1DZvsj2D2w/Yfvj3e4Ppmf7r23vtP1Qx3MrbN9l\n",
+       "+3Hbd9o+qpt9xPRsr7F9t+1HbD9s+yPt5xnDmrM9YHuT7a22t9u+tv08Y9cgtnttb7H9zfYy49dg\n",
+       "tUqobPdK+qykiySdIuly2yd3t1eYwU1qjVWnqyXdFRHrJP1Lexn1NCrpyog4VdLZkj7U/r4xhjUX\n",
+       "EfskXRgRb5T0BkkX2j5PjF3TbJC0XdLExcyMX4PVKqGSdJakHRHxZESMSvqKpHd0uU+YRkRslDQ0\n",
+       "6elLJX2h/fgLkt45r51Csoh4LiK2th+PSHpU0vFiDBshIiamte+X1KvWd5GxawjbqyVdLOkGSRN3\n",
+       "jTF+DVa3hOp4SU91LD/dfg7N8ZqI2Nl+vFPSa7rZGaSxvVbS6ZI2iTFsBNs9treqNUZ3R8QjYuya\n",
+       "5NOSrpJ0sOM5xq/B6pZQMYfDAhKtOTkY05qzvUTSVyVtiIjhztcYw/qKiIPtU36rJZ1v+8JJrzN2\n",
+       "NWX77ZJ2RcQWvXx06hUYv+apW0L1jKQ1Hctr1DpKhebYafu1kmR7laRdXe4PZmD7CLWSqVsi4rb2\n",
+       "04xhg0TEbkn/KOlnxdg1xbmSLrX9Q0lflvRztm8R49dodUuo7pN0ku21tvslXSbpG13uE/J8Q9IV\n",
+       "7cdXSLpthlh0kW1LulHS9oi4vuMlxrDmbK+cuAPM9qCkn5e0RYxdI0TENRGxJiJOlPQeSf8aEe8V\n",
+       "49dotZsp3fYvSrperYssb4yIa7vcJUzD9pclXSBppVrn+/9I0tcl/Z2k10l6UtK7I+KFbvUR02vf\n",
+       "FfZtSdv08qmFT0jaLMaw1my/Xq2LlnvaP7dExHW2V4ixaxTbF0j6aERcyvg1W+0SKgAAgKap2yk/\n",
+       "AACAxiGhAgAAqIiECgAAoCISKgAAgIpIqAAAACoioQIAAKiIhApA19m+t/3vCbYvP8zrvmaqtgDg\n",
+       "cGIeKgC1Yfstak1yeEnGe/oiYmyG14cjYunh6B8ATIcjVAC6zvZI++EnJa23vcX2Bts9tq+zvdn2\n",
+       "g7Z/px3/FtsbbX9d0sPt526zfZ/th21/oP3cJyUNttd3S2dbbrnO9kO2t9l+d8e6/83239t+1PYX\n",
+       "53drAGiivm53AAD0cumbj0v62MQRqnYC9UJEnGV7kaTv2L6zHXu6pFMj4j/by++PiKF2bbvNtm+N\n",
+       "iKttfygiTp+irV+W9DOS3iDpGEnft/3t9mtvlHSKpP+WdK/tN0cEpwoBTIsjVADqxJOWf0HS+2xv\n",
+       "kfQ9SSsk/WT7tc0dyZQkbbC9VdK/S1oj6aRZ2jpP0peiZZekeySdqVbCtTkino3WNRFbJa2t8DsB\n",
+       "+BHAESoAdffhiLir84n2tVYvTVp+q6SzI2Kf7bslDcyy3tCrE7iJo1f7O54bF/tKALPgCBWAOhmW\n",
+       "1HkB+bckfdB2nyTZXmd78RTvO1LSUDuZ+mlJZ3e8Njrx/kk2SrqsfZ3WMZLOl7RZr06yAGBW/K8L\n",
+       "QB1MHBl6UNJ4+9TdTZI+o9bptgdsW9IuSb/Uju+8RfkOSb9re7ukx9Q67Tfhc5K22b4/It478b6I\n",
+       "+Afb57TbDElXRcQu2ydPWremWAaAV2DaBAAAgIo45QcAAFARCRUAAEBFJFQAAAAVkVABAABUREIF\n",
+       "AABQEQkVAABARSRUAAAAFZFQAQAAVPT/E259UVIep5MAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b127d90>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAES9JREFUeJztnVmMpNdVx3+nq6prX3qbnvG4x4tmsEYRkv1ikJyICIVo\n",
+       "/ELghcgSUmQC4gECgkiY8BIjeIiQEiFeIiA2CosSIZCjBAmwjQIYIRYjb4E4jqVZPNPdM91de9fe\n",
+       "fXmoOt/c/qZ6qaU9VfXdn/Sp9qvTM3+du5zlE2MMDscgzN1vAxzThxONY2CcaBwD40TjGBgnGsfA\n",
+       "ONE4BmZo0YjIFRF5V0R+KCLPjdMox2Qjw5zTiEgI+AHwCeAW8N/AM8aY74/XPMckMqyneRJ43xhz\n",
+       "zRjTBr4JfGp8ZjkmmfCQvzsPfGC9vgn8mP0FEXFHzVOOMUb6vT+sp3GCCDDDiuYWsGa9XqPrbRwB\n",
+       "YFjRvA5cEpGHRWQe+DTw7fGZ5ZhkhlrTGGM6IvKrwD8CIeAFt3MKDkNtuU80sFsITz3jXgg7AowT\n",
+       "jWNgnGgcA+NE4xgYJxrHwDjROAbGicYxME40joFxonEMjBONY2CcaBwDM2wSFgAicg0oA3tA2xjz\n",
+       "5DiMmnbm5uYQEe8xHA4TCoUIh8OEw2H29vYOXPv7+weuSS+VHkk0dJOxPm6MyY/DmFkhHA4zPz9P\n",
+       "NBplfn6eTCZDJpMhm82SyWSo1+tUq1Xvqtfr1Ot1Go0G9XrdG2dSxTOqaAD6RkKDTCQSIZFIkEgk\n",
+       "SKVSnD17lnPnznHu3DkeeOABisUiW1tb3Llzh62tLUqlEqVSCYBms8ne3h4AIjKRwhmHp3lVRPaA\n",
+       "PzbG/OkYbJp6wuEw8XicbDZLNptlbW2NixcvcunSJS5evMjt27e5ceMG169f97wRQKvVolKpTPwU\n",
+       "NaponjLGbIjICvCKiLxrjHltHIZNGyJ3HW40GiWVSrG4uMjKygpra2s8+uijPPbYY1y+fJnFxUVi\n",
+       "sRihUMj7bbvdplqteusgmNHpyRiz0XvcEpGX6Ja2BE40/oVvKpViZWWFBx98kAsXLrC2tsby8jKJ\n",
+       "RAIRIR6Ps7CwwPnz573fNJtNisUioVCITqcDzKBoRCQBhIwxFRFJAp8Efndslk0RKphQKEQoFCKd\n",
+       "Tnse5tKlS6yurrK0tEQymUREiMViLCwsICIkEglarRaFQoH19XVCoZC3lpnFNc0q8FLPlYaBvzLG\n",
+       "vDwWq6YMEfG21KFQiFQqxfLysica3TWpp1HRJBIJlpeXqVarrK+vk0qlPNFMqmBgBNEYY64Cj4/R\n",
+       "lqlFvUwkEmF+fp50Os3i4iKrq6usra0Rj8e9LbiIEIlECIVCxONxjDEsLi6SSqWIRqPedKXXJDKO\n",
+       "LXfgCYfDxGIx4vE4iUSCdDpNKpUiHo8TjUaJRCKEw2Hm5roH8O12m2azSbPZpNVqkc/nqVarNBoN\n",
+       "77DPGDN7nsZxl1AoRCwWI5VKkU6nyWQyJJPJvqLRnVKtVqNSqVCtVsnn81QqFU80xpiJ3nY70YyB\n",
+       "cDhMNBolmUySzWZJp9Mkk0kSiQTRaJRwOOwtlqHraXZ3dymVSuTzeU80erA3yYIBJ5qxoCfAmUyG\n",
+       "paUlcrmcNz3p+sVGPU2hUODOnTsHPI0tmEkVjhPNEOgCVR/j8Ti5XM5b+K6urpLL5YjFYn0Xs+pp\n",
+       "isWiJ5pqtUqz2Zx4wYATzdDYOxxbNA899NAB0fRDReP3NM1mc+KnJnCiGRoVzNzc3AHRXLhwgaWl\n",
+       "JbLZ7JGeplarHelpJhknmiFQseilwcmVlRXOnz/vbbmj0Wjf359keppknGiGQA/y9EqlUt52O5VK\n",
+       "ebsmewFsi6HdblOv1ymXyxQKBcrlMrVajXa77UQzq4RCIebn54nFYt5W2xaOpjvoVltRQahoKpUK\n",
+       "+XyecrlMvV53opll1NPEYjEv0coWjcag9FzGPt01xhwQTaFQoFqt0ul0vOSrSefYxHIReVFEbovI\n",
+       "O9Z7iyLyioi8JyIvi0judM2cLEKhENFolHg87k1JyWTSO9CLxWL3nM8YY7yc4Far5aV8lkolL4Qw\n",
+       "LZ7mJNUIfwZc8b3328ArxpgfAf6p9zoQiAjz8/MkEglyuRzLy8vkcjmSySTz8/OH7pYajQbVapVC\n",
+       "oUClUpmq6cjPsaLpZeIVfG//NPD13vOvAz8zZrsmGj0B1h1TNpslkUgQiUT6fn9vb+/AdGSvYaaR\n",
+       "Ydc0q8aY273nt+nm1gSGSCRCMpn0PM1xoul0Op6nKZVKVCqVqdot+Rm5WM50/+rp+8tHwPY0tmg0\n",
+       "QdyPvcXe2dmhVCoFUjS3ReQsgIicA+6Mz6TJR/NnUqkU2WyWVCp1IFEc7u6Y9vf3qdfrFItFNjY2\n",
+       "uH79Ouvr6xQKBer1eqBE823gM73nnwG+NR5zpgM7FUIXwbFYjHC4O9urYPRqNBoUCgU2Nze5evUq\n",
+       "Gxsb5PN5arXabIpGRL4B/DvwmIh8ICLPAl8CfkpE3gN+svc6MBzmaVQ0it/TbG5ucu3atan3NMcu\n",
+       "hI0xzxzy0SfGbMtEo1vpubk572BPRZNMJg8kW9lTkzHmgGhu3LhBsVj0ynFnUjQOvML9cDhMJBI5\n",
+       "UJetouk3PWlBv+YE64FerVaj1Wp5qZ3ThhPNMWjXh2g0SiwWIxaLeYLRS2NQtmjsLhC65a7Vap6H\n",
+       "UdFMI040J0DXMBqY9HeB0JCBf/e0v7/P3t7eAU+zu7tLvV73QgrO08wo9m7J9jB6+UMHtqfpdDr3\n",
+       "TE+tVus+/SXjwYnmBGiAUmua+uXL2LRaLXZ3d71ra2uLcrlMs9n8kC0/HZxojkHXNBqkPIloms2m\n",
+       "F2fK5/Nsb297OcDTOB35caI5AX5Po2W2R3maSqXC9vY2m5ubbG9vUy6XaTQaMyEa16jxBOiaxu9p\n",
+       "/Id5iopmZ2eHjY0NTzRuegoQmt6pSVcanFRP4w8baHBye3ub9fV1Nz0FDW0jctj0JCL3dOes1WqU\n",
+       "SiW2t7fv8TRONAHBnp76VRvYZzKacKWiWV9fp1gsep5mFhg2R/h5EbkpIm/0Ln866Eyh09NhC2Fb\n",
+       "NJ1O5x7RzJqnGTZH2ABfMcY80bv+Yfym3T/8XSByuRyLi4ssLy9z5swZFhYWvJxgOHiYpyfA/h40\n",
+       "nU6H/f39+/yXjYeTRLlfE5GH+3w0mW2aRsQfa0okEiwsLLC0tMTKygpnzpzxqg80vdPvafQUuNVq\n",
+       "0Ww2abfbU9FC5KSMsuX+nIi8JSIvzFoJix1rymaznmiO8jRaomILxhbNLHmaYUXzVeARuj33NoAv\n",
+       "j82i+4x6mn6iWVlZ8TpCDOJpAjc99cMY4+UEi8jXgO+MzaIJwG68qJdWTGrVJNxNzLKrDez7HUxr\n",
+       "4vhxDOVpesnkys8C7xz23WnE7gtspz30Ewzg7Ziq1aq3vW40Gl4T6VnjWE/TyxH+CWBZRD4Avgh8\n",
+       "XEQep7uLugr88qla+SFj9wXWy24/70+FUE8zK8VwxzFsjvCLp2DLxHCUpzms7NZf0F+v1+l0OjM5\n",
+       "PbkTYR92rXY2m2VpaYlMJuM1XYT+JSpaCLexscHOzo7XpGgWcaLpg7/sNpfLkUgk+uYAG2Oo1Wpe\n",
+       "gFJjTbMUNvDjRONDPY2KRgv8D9tia12THTbY2dmhXC7TarXc9BQUbNH0K/A/KkC5sbHhFfk7TzPD\n",
+       "2Pdr0vWMNpLudwLsjzHV63WvbX2pVGJ3d5dmsxncLXcQsO/VZAcq9c5wmnhlexq7ykBrmnZ3d72d\n",
+       "k4rGTU8ziHoYPY+xRaOeJhqNejfGAPrWM6loKpWKF2ua1mK44wi8aOCup9GOnVqjraKxpy/gnnom\n",
+       "u3pyGm5cOipONPSva9JEK7utqz5qDz1dw2gf4Far5W3FZ5nAi8Yf1dZ7NdldIGy0pau2qZ+FzlaD\n",
+       "EnjRQPcwzy679Zeo+IWjotEDPVs0QeDIKLeIrInId0Xkf0XkeyLya733Z6aPsO1ptLhfPY1WG/hR\n",
+       "0ZRKpXs8TRA4LjWiDfyGMeYjwI8DvyIil5mxPsK6e9Ibl9r5M3B44vjW1pZ3AjzLh3l+jhSNMWbT\n",
+       "GPNm73kV+D5wnhnrI9wvqm1HtP2pnPa9mm7dujVzFZTHceI1TS+5/AngP5mxPsL98mfsqcluG9Lp\n",
+       "dA7cdufmzZuUy+WZKlE5jhOJRkRSwN8Cv26MqfjuLGJEZGr/pex7N+kUdZin0QM9WzS3bt2i0Wh4\n",
+       "+cBB4CTFchG6gvkLY4y2fp2pPsKHbblDodCB9YwKQ0+B9STYLlMJAsftngR4Afg/Y8wfWh/NTB9h\n",
+       "ESESiRCPx8lkMiwsLHgtXu2wga5nGo3GgbIUDRfM+imwzXHT01PAzwNvi8gbvfe+QLdv8F+LyGeB\n",
+       "a8DPnZqFHwLatj6dTrOwsOCV3uo5jYrGLklptVqed1HRBIUjRWOM+TcO90Yz0Ue4n6dJp9MHWrz6\n",
+       "A5R+T2Nn8QWBQDc10jveanu0eDzu9QSORCKHntOod1EPo4IJimgCGUawA5D2+YzeLU4DlXadk783\n",
+       "sC2UIAkGAuxp7Juxa1qEFv1r7oz/HpR263p/crl+LwgE0tModgLWqJ4mSATa0yj2QleL9e1u4v6k\n",
+       "K7t1SBAJrKexpxT73gWVSsVLytLDOhWNZumpcIIqmsB6GkVF02w2+97wQkMI6oXspHEnmoDiF43e\n",
+       "/ti+S4rtafSzIIUN/AR+etrf3/dqsbe3t0mlUgAHKhQ0D7hSqRzoPeNEE0B0R1Sr1SgUCqyvrwN4\n",
+       "gtDPd3Z22NnZ8XrPaJDSiSZAGGMOpD3UajXy+TzGmANFbvqdYrHoXX7RBG27DceIRkTWgD8HztBt\n",
+       "YPQnxpg/EpHngV8Etnpf/cK0tYW1p6darQbg9ZgxxnjT09zcHJVKxUu0sstVnKfpj+YIv9lLxPof\n",
+       "EXmFu32Ev3LqFp4yWpKiTYja7bZXjTA3N+clXWkFpSZg7e7uTv3NvobluCj3JrDZe14VEc0Rhhnq\n",
+       "I6y7I+ge+hWLRcLhMJ1Ox0vj1POZRqPhTVNBFY2cdE7u5Qj/C/AR4PPAs0AJeB34vDGm6Pv+1Ez2\n",
+       "dufOUCjkNZfW+yCoB9JHO2uvXq/fb/NPDWNMX8dwItH0pqZ/Bn7fGPMtETnD3fXM7wHnjDGf9f1m\n",
+       "akRjR701VUK7SITD4XuClBpC0PSIWWVo0fRyhP8O+Htfyqd+/jDwHWPMj/renxrROPpzmGiGyhGe\n",
+       "9T7CjqM50tOIyEeBfwXeprtjAvgd4Bm6Le69PsJWHZT+1nmaKWekNc0wONFMP0NNTw5HP5xoHAPj\n",
+       "ROMYGCcax8A40TgGxonGMTBONI6BObVzGsfs4jyNY2CcaBwDc6qiEZErIvKuiPxQRJ4bw3jXRORt\n",
+       "EXlDRP5riN+/KCK3ReQd672h29seMt7zInKzZ+MbInJlgPHG2oL3iPGGthG499Z647qAEPA+8DAQ\n",
+       "Ad4ELo845lVgcYTff4xus8l3rPf+APit3vPngC+NON4Xgd8c0r6zwOO95yngB8DlYW08YryhbTTG\n",
+       "nKqneRJ43xhzzRjTBr4JfGoM4w6dZmqMeQ0o+N4eur3tIePBkDaaMbfgPWK8oW2E052ezgMfWK9v\n",
+       "ctfgYTHAqyLyuoj80ohjKafR3vZzIvKWiLwwbDf3cbfgtcb7j1FtPE3RnMZe/iljzBPA03S7p39s\n",
+       "nIObrh8f1e6vAo/QzTfaAL486AD+Fryj2tgb729641VHtfE0RXMLWLNer9H1NkNjjNnoPW4BL9Gd\n",
+       "AkdlrO1tjTF3TA/ga4PaOO4WvNZ4f6njjWrjaYrmdeCSiDwsIvPAp+m2kh0KEUmISLr3PAl8kvGk\n",
+       "mY61ve0oqbDjbsF7aum6o+xmTrB6f5ruiv19ulWYo4z1CN0d2JvA94YZD/gGsA606K63ngUWgVeB\n",
+       "94CXgdwI4/0C3YrUt4G3ev+5qwOM91Fgv/c3vtG7rgxr4yHjPT2KjcYYF0ZwDI47EXYMjBONY2Cc\n",
+       "aBwD40TjGBgnGsfAONE4BsaJxjEwTjSOgfl/g7yNWl4b+UcAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b044750>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAD4BJREFUeJzt3XuspVdZx/Hvb87cO4OT2pYWGBysoLQBKQIpdwpoKoHi\n",
+       "BYGKgGgwBpCKQID+4V8aMMRQCcEEqVzKTQUsEOVSpUIBaSnM9DblZqi2IB2ltyl2JnN5/GPvaU8P\n",
+       "Z+a877yzzt779PtJTma/7177Xevstc85z6y13vWkqpAkSdLRWzXpBkiSJM06AypJkqSBDKgkSZIG\n",
+       "MqCSJEkayIBKkiRpIAMqSZKkgVZPsvIk7tkgSZJmRlVlsfNNA6okZwMXAHPAu6vqLxaWOeWUU37i\n",
+       "dbt372bz5s33Onfw4MHO9fYpC3DgwIEmZVu1uU/ZvvuM9Sl/uLL79+9n9erVncouZtq/P0mSFmo2\n",
+       "5ZdkDngHcDZwGnBukoe3qk+SJGlSWq6hehzw3aq6oar2AR8BntuwPkmSpIloGVA9ELhx3vFN43NL\n",
+       "Wrt2bZMGaXmsWuW9DpKk+5aWf/mOegHKunXrjmU7tMwMqCRJ9zUtF6V/H9g673gro1Gqe9m9e/fd\n",
+       "j9euXWswJUmSZk7LgOpK4KFJtgE/AF4AnLuw0MK7+SRJkmZNs4CqqvYneRXwWUbbJlxYVde3qk+S\n",
+       "JGlSmu5DVVWfBj7dsg5JkqRJm+hO6dB9AXqfO/8Wbiq5lDVr1jS59oYNGzqXPe644zqX3bhxY+ey\n",
+       "fadU+1x7/fr1nctu2rSpc9ktW7Z0LnvSSSd1Lgtw8skndy574okndi7bp819+rrP563P5xhgbm6u\n",
+       "Sdk+NyUki244PLhsX6021u2zEXDLmzn6vHfT0NcrXZ9Ng/fu3dvs2n1+v/T5XEzLz3ULR2qvt2NJ\n",
+       "kiQNZEAlSZI0kAGVJEnSQAZUkiRJAxlQSZIkDWRAJUmSNJABlSRJ0kAGVJIkSQMZUEmSJA1kQCVJ\n",
+       "kjSQAZUkSdJAE8/lt2fPnk7l+uQz6pPLqK9pyDvUJ5dYX33eu1b5z/qU3b9/f+ey0C+3Wp+yrd63\n",
+       "Plp+7vtcu2U7WmmVe6xP2Zbv8Sz2iTRrmo5QJdma5NIk1yW5NsmrW9YnSZI0Ca1HqPYBr6mqHUk2\n",
+       "AV9PcklVXd+4XkmSpGXTdISqqn5YVTvGj+8Ergce0LJOSZKk5bZsi9KTbAPOAC5frjolSZKWw7IE\n",
+       "VOPpvo8C541HqiRJklaM5nf5JVkDfAz4QFVdvPD53bt33/147dq1rFu3rnWTJEmSjqmmAVVG9wxf\n",
+       "COysqgsWK7N58+aWTZAkSWqu9ZTfE4HfAc5Ksn38dXbjOiVJkpZV0xGqqvoS7sYuSZJWOIMdSZKk\n",
+       "gSaeembVqm4xXcuUL61SPrQq2yd1ScsUFX3a0Spdx9zcXOeyLa30dDJ9tPpZnZb3ok/ZWfzct/r9\n",
+       "Yvqbe/Tp65Y3avVJr9Unzdd9ta8doZIkSRrIgEqSJGkgAypJkqSBDKgkSZIGMqCSJEkayIBKkiRp\n",
+       "IAMqSZKkgQyoJEmSBjKgkiRJGsiASpIkaaCJp57ZsGFDp3J9tr3vs51+S31SOLT6/qYl9cy0tKHP\n",
+       "e9enbMt0QJpd05B+StOnT1/v2bOnYUt0LB02oErym0ABiyUdqqr6eJcKkswBVwI3VdVzjqqVkiRJ\n",
+       "U+xII1TPYRRQHU6ngAo4D9gJbO7aKEmSpFly2ICqqn536MWTPAh4FvDnwJ8MvZ4kSdI0WnJRepKT\n",
+       "k1yY5DPj49OS/H7H678NeD3ghL8kSVqxutzl917gc8ADxsffAV6z1IuSPBvYVVXbWXwdliRJ0orQ\n",
+       "JaA6oar+DjgAUFX7gC63pD0BOCfJ94APA09P8v6FhW655Za7v+66664eTZckSZoOXbZNuDPJTx86\n",
+       "SHImcPtSL6qq84Hzx695KvC6qnrJwnLHH39899ZKkiRNoS4B1WuBTwE/m+QrwInA846iLjffkSRJ\n",
+       "K1K6bDCWZDXw84zWQn1rPO03vPKkTj311E5lW27s2WrzvT7tcGPP5WmDG3tKkoaoqkXXhS85QpVk\n",
+       "A/AK4EmMRpkuS/LXVeX2rZIkSXSb8ns/cAfwdkYjVL8NXAT8VsN2SZIkzYwuAdXpVXXavOPPJ9l5\n",
+       "rBpw++1Lrm8H2k3Ltbx2qymxZDp2oVi1qntu7VbvRd/rtvwctdCnr6ehvZJ0X9XlL+I3kjz+0MH4\n",
+       "Lr+vt2uSJEnSbDlScuRr5pX5cpIbGa2hejDwrWVomyRJ0kxYKjmyJEmSlnCk5Mg3zD9OchKwvnWD\n",
+       "JEmSZk2X5MjnJPkO8D3gC8ANwKcbt0uSJGlmdFmU/mfA44FvV9VDgGcAlzdtlSRJ0gzpElDtq6r/\n",
+       "BVYlmauqS4HHNG6XJEnSzOiyD9WtSTYDlwEfTLILuLNtsyRJkmbHkrn8kmwC7mI0mvUi4H7AB6vq\n",
+       "R4MrT+qEE07oVNaNPe/RcmPPVptDtsp11ycHYt92tMqZ2KesG3tK0nQ56lx+VXVoNOoA8N5j2CZJ\n",
+       "kqQV4Ugbe97JaCPPxVRV3e9YNOC4447rVK7P/9T7pESB6UiL0mekpdUoS99rt3rf+rS55WjdNKTW\n",
+       "cdRJkmbDkfah2jT04km2AO8GTmcUnP1eVX116HUlSZKmSZdF6UP8FfDPVfW8JKuBbsNRkiRJM6RZ\n",
+       "QJXkp4AnV9VLAapqP3B7q/okSZImpd9io34eAvxPkvck+UaSv0mysWF9kiRJE9EyoFoNPBp4Z1U9\n",
+       "Gvgx8MaG9UmSJE1EyzVUNwE3VdXXxscfZZGA6rbbbrv78fr161m/3vzLkiRptjQLqKrqh0luTPKw\n",
+       "qvo28EzguoXltmzZ0qoJkiRJy6L1XX5/xChdzVrgP4CXNa5PkiRp2TUNqKrqKuCxLeuQJEmatJaL\n",
+       "0iVJku4TWk/5LemOO+7oVK5PipG+6Uj6pBhp2Y6uZjFRdJ/3YvXq7h/LNWvWdC4L7RIv9ym7b9++\n",
+       "zmX7ME3N0WuZwqgF+1qaPo5QSZIkDWRAJUmSNJABlSRJ0kAGVJIkSQMZUEmSJA1kQCVJkjSQAZUk\n",
+       "SdJABlSSJEkDGVBJkiQNZEAlSZI00MRTz3RN2TENKV+gX5qaabjuwYMHe5Xv897Nzc11LtsnVcaB\n",
+       "Awc6l+2bxqXPtfuU7fM+T8tnuU+bV3qqk1bfX5+f65X+HksrXdMRqiRvSnJdkmuSfCjJupb1SZIk\n",
+       "TUKzgCrJNuDlwKOr6hHAHPDCVvVJkiRNSsspvzuAfcDGJAeAjcD3G9YnSZI0Ec1GqKrqFuAvgf8C\n",
+       "fgDcVlX/0qo+SZKkSWk55Xcq8MfANuABwKYkL2pVnyRJ0qS0XJT+GOArVfWjqtoPfBx4wsJCe/fu\n",
+       "vfur6x1/kiRJ06RlQPVN4MwkGzK69/uZwM6FhdatW3f31+rVE9/FQZIkqbeWa6iuAt4PXAlcPT79\n",
+       "rlb1SZIkTUomuZlcktq8eXPXsn2ue7RNWpIbex5d2VYbe/adJp6GjT2nhRt7ttdqY0/7Q5qcqlr0\n",
+       "j5+pZyRJkgYyoJIkSRpo4qvAu075zeL0RJ92tJoy6jv92Wraoc/316dsn2m5vtdu9TmaxamdVtO7\n",
+       "K90sTgVLOjqOUEmSJA1kQCVJkjSQAZUkSdJABlSSJEkDGVBJkiQNZEAlSZI00FQGVHv37p10EzTA\n",
+       "vn37Jt0EHSW3PJCko2NApWOubzoYSZJm3VQGVJIkSbPEgEqSJGmgTHLNRBIXbEiSpJlRVYvm4ppo\n",
+       "QCVJkrQSOOUnSZI0kAGVJEnSQFMXUCU5O8k3k3wnyRsm3R4dXpK/TXJzkmvmnTs+ySVJvp3kc0m2\n",
+       "TLKNOrwkW5NcmuS6JNcmefX4vH045ZKsT3J5kh1JdiZ58/i8fTdDkswl2Z7kU+Nj+2+GTVVAlWQO\n",
+       "eAdwNnAacG6Sh0+2VTqC9zDqq/neCFxSVQ8D/nV8rOm0D3hNVZ0OnAm8cvzzZh9OuaraA5xVVY8C\n",
+       "HgmcleRJ2Hez5jxgJ3BoMbP9N8OmKqACHgd8t6puqKp9wEeA5064TTqMqroMuHXB6XOA940fvw/4\n",
+       "tWVtlDqrqh9W1Y7x4zuB64EHYh/OhKr6v/HDtcAco59F+25GJHkQ8Czg3cChu8bsvxk2bQHVA4Eb\n",
+       "5x3fND6n2XH/qrp5/Phm4P6TbIy6SbINOAO4HPtwJiRZlWQHoz66tKquw76bJW8DXg8cnHfO/pth\n",
+       "0xZQuYfDClKjPTns0ymXZBPwMeC8qto9/zn7cHpV1cHxlN+DgKckOWvB8/bdlErybGBXVW3nntGp\n",
+       "e7H/Zs+0BVTfB7bOO97KaJRKs+PmJCcDJDkF2DXh9ugIkqxhFExdVFUXj0/bhzOkqm4H/gn4Jey7\n",
+       "WfEE4Jwk3wM+DDw9yUXYfzNt2gKqK4GHJtmWZC3wAuCTE26T+vkk8NLx45cCFx+hrCYoSYALgZ1V\n",
+       "dcG8p+zDKZfkhEN3gCXZAPwysB37biZU1flVtbWqHgK8EPh8Vb0Y+2+mTd1O6Ul+FbiA0SLLC6vq\n",
+       "zRNukg4jyYeBpwInMJrv/1PgE8DfAw8GbgCeX1W3TaqNOrzxXWFfBK7mnqmFNwFXYB9OtSSPYLRo\n",
+       "edX466KqemuS47HvZkqSpwKvrapz7L/ZNnUBlSRJ0qyZtik/SZKkmWNAJUmSNJABlSRJ0kAGVJIk\n",
+       "SQMZUEmSJA1kQCVJkjSQAZWkiUvy5fG/P5Pk3GN87fMXq0uSjiX3oZI0NZI8jdEmh8/p8ZrVVbX/\n",
+       "CM/vrqrNx6J9knQ4jlBJmrgkd44fvgV4cpLtSc5LsirJW5NckeSqJH8wLv+0JJcl+QRw7fjcxUmu\n",
+       "THJtkpePz70F2DC+3kXz68rIW5Nck+TqJM+fd+1/S/IPSa5P8oHlfTckzaLVk26AJHFP6ps3AK87\n",
+       "NEI1DqBuq6rHJVkHfCnJ58ZlzwBOr6r/HB+/rKpuHee2uyLJR6vqjUleWVVnLFLXbwC/CDwSOBH4\n",
+       "WpIvjp97FHAa8N/Al5M8saqcKpR0WI5QSZomWXD8K8BLkmwHvgocD/zc+Lkr5gVTAOcl2QH8O7AV\n",
+       "eOgSdT0J+FCN7AK+ADyWUcB1RVX9oEZrInYA2wZ8T5LuAxyhkjTtXlVVl8w/MV5r9eMFx88Azqyq\n",
+       "PUkuBdYvcd3iJwO4Q6NXe+edO4C/KyUtwREqSdNkNzB/AflngVckWQ2Q5GFJNi7yuvsBt46DqV8A\n",
+       "zpz33L5Dr1/gMuAF43VaJwJPAa7gJ4MsSVqS/+uSNA0OjQxdBRwYT929B3g7o+m2byQJsAv49XH5\n",
+       "+bcofwb4wyQ7gW8xmvY75F3A1Um+XlUvPvS6qvrHJI8f11nA66tqV5KHL7g2ixxL0r24bYIkSdJA\n",
+       "TvlJkiQNZEAlSZI0kAGVJEnSQAZUkiRJAxlQSZIkDWRAJUmSNJABlSRJ0kAGVJIkSQP9P+4wayRS\n",
+       "hyMkAAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b06c950>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAGNNJREFUeJztnVmM7Fldxz+na1+6qrqW7q5ebt+ZuTMDTEzgBU2AyAMh\n",
+       "Q0xAXyQkRoNofFA0SiLig6D4gCYQow9EZYlbQKOBgInKYETxwQUzw4CyzMxdeu/au/a1jw9dv8Op\n",
+       "ukvfWrvrzv+bnPyram7/51TVt37n9/ud3+97lNYaBw5GwdJlT8DB4sEhjYOR4ZDGwchwSONgZDik\n",
+       "cTAyHNI4GBljk0Yp9axS6rtKqZeUUh+c5qQcXG2ocfI0SikX8D3gbcAB8N/Ae7TW35nu9BxcRYxr\n",
+       "ad4IvKy1vq217gCfB941vWk5uMpwj/l3m8Ce9Xwf+GH7HyilnFTzgkNrre71+riWxiHEqxjjkuYA\n",
+       "2Laeb3NubRy8CjAuab4BPKmUuq6U8gLvBr40vWk5uMoYy6fRWneVUr8E/BPgAj7tRE6vHowVcj/U\n",
+       "jR1HeOExbUfYwasYDmkcjAyHNA5GhkMaByPDIY2DkeGQxsHIcEjjYGQ4pHEwMhzSOBgZDmkcjAyH\n",
+       "NA5GxrhFWAAopW4DZaAHdLTWb5zGpKYNpdTAY7fbjcvlMtezszO01macnZ0NDHkd4LLamIffg1KK\n",
+       "paUllFID87bnOitMRBrOi7HeqrUuTGMys4J8wEtLS7jdbiKRCMvLy0QiEUKhEN1ul06nY0az2aTR\n",
+       "aNBsNmk2m5ydndHr9cz1MuZvE8XtduPxeMzo9Xq0220zzs7OzN/OgkCTkgbgnjuhVwXyQbtcLlwu\n",
+       "Fz6fj5WVFdbX10mn0yQSCUOORqNBo9GgXC5zenpKuVwGoNPpoJSi0+lc6nuQ4fV6CQQCZnQ6Her1\n",
+       "OrVajV6vN3OrOA1L81WlVA/4Y631n05hTlOHUsosRz6fj3g8zvb2Nk888QRbW1tUq1UzKpUK2WwW\n",
+       "j8cDQLvdNkvDZViZ4fcgxA+FQoTDYZaXl2m1Wiil6PV6tFqtAeLMApOS5k1a6yOlVAp4Tin1Xa31\n",
+       "16cxsUkhX/TS0hI+n8/8KiORCOl0mu3tbR5//HF2dnY4PT01lqVUKqG1pt1uU61WzRcl/sNlvAeP\n",
+       "x4PP5zMjHA4TjUaJRqNEIhHq9TpKKbrdLvV63SylskxNm0ATkUZrfdS/ZpVSX+C8teXSSSNfsFIK\n",
+       "r9dLIpEglUqRSqVYXV1lZ2eH7e1tVldXiUQixlfpdrs0m03c7vOPRZ53Oh263e6ArzBLDPswy8vL\n",
+       "xONxEokEiUSCSCRCOBw2o1gs4vP50FpTr9cH3k+32536/MYmjVIqCLi01hWlVAh4O/DbU5vZmLA/\n",
+       "bDHliUSCnZ0dHnvsMa5du0YqlSKZTJJKpYhEIvR6PTqdDu12m3q9bpambrdLq9Wi2+2aX++83oPt\n",
+       "w4TDYdLpNNeuXePatWtEo1GCwSDBYJBAIMDJyQlnZ2fU63Xy+bx5L1rrmSxVk1iaNeALfRPqBv5K\n",
+       "a/2VqcxqQgyv/0KaZ555hqeffppQKEQwGCQUCuHz+cyH3Gq18Pv9d1kaCWXnRRpgwHlfXl5mfX2d\n",
+       "J598kte97nXEYrG7lqt6vU6xWOTg4IBGo2EIM4sldWzSaK1vAa+f4lwmgu3D2D5ANBollUoZx/ep\n",
+       "p54a+BVrrc1VTLodfrfb7bm/F3HavV4vXq/XRHs7Ozs89dRTRCIRQ6qlpSWq1SqRSIRAIIDH45m5\n",
+       "DzaNkPvSIR+QECYWi5mRSqXY2dkhlUoRDodxu920221DiGazyeHhIUdHR2bs7e2Ry+Wo1+uX8n68\n",
+       "Xi/Ly8tmbGxskEwmiUajBAIBAOr1ukkV7O7ucnx8TLFYpFar0Wq1aLfbM4uiFp409vrvcrnw+/2s\n",
+       "rKyQTqdJp9Nsbm5y/fp1VldXCYfDuFwuut2uCa9LpRJ7e3tm7O/vk8/nKRQKl0qaSCRinPeNjQ1S\n",
+       "qZTxZTqdDrVazcxTSFMoFKjVajSbTeOHOaS5D4bzMLFYjI2NDR5//HEee+wx1tbWjKVxuVx0Oh2q\n",
+       "1Sq5XI5MJsPu7i63bt3i1q1b3Llzx/yCm83mpbwfmzTb29tsbm4a0tjJvGw2y97e3j0tjR1yTxsL\n",
+       "Txo7te71evH7/cRiMeMDPPHEEyanEQqFWFpaotPpUKlUyOVyHBwcmA/+9u3b3Llz59Leh/ggfr+f\n",
+       "aDTK6uoqW1tbrK+vE4/HCYVCuN1uut2uSULu7u5ycHBANpulVCrRaDRm7octPGkAY2ECgYDxAyKR\n",
+       "yABZfD4fLpcLrTWNRoNiscjR0RG7u7tkMhlOT09ptVqXMn+JkmREo1ESiQRra2tsbW2RSCRMlFcs\n",
+       "FsnlcuRyObLZLNlslkKhQKVSodVqzWVDdeFJI7vWPp+PYDBoCBOJRIjFYkSjURNJyY62kOb4+Jjd\n",
+       "3V1yudylkkYceImWIpEIiUSC9fV1tra2CIVCZu+rVCoNECaTyVAoFKhWq2ZZmjUeOdLIfoxtaexf\n",
+       "ca/Xo9FoUCqVjKWpVqvGF7gM2GkC2eqIx+Osra2xubmJy+WiVqtRr9ep1+sDpMlmsxSLRVqtlkOa\n",
+       "izCcl/H7/YRCIWNlpPwhHA4P1Ma0Wi0qlQqFQoFMJsPh4eFAXmbe81dK4fF4CAQCZlsgHo+bjPXa\n",
+       "2ppJD7TbbbM85fN5MyqVCr1eb+YblYKFI81wAZLs+MqvU/ZmAoEAbrebs7MzY0mq1SrFYpG9vT2y\n",
+       "2SyVSoVOp2O2COZVYDXsw9h7Y6lUihs3brCxsWGSeN1u15Rs5PN5isUi1WrVhNZ2sdg8sHCkgcEU\n",
+       "u9frJRgM3pc0vV6PWq1GJpMhl8txfHxsknfVapVOpzP3D932YTweD/F4nM3NTbO3tLW1RTqdJhKJ\n",
+       "mLzSMGkqlQqNRmNgX2weVXuw4KSxfRkhjZ05tS1NNpvlzp077O7uGksjpLFLPecBl8s14MMIaW7c\n",
+       "uMFrXvMaEokEsVjsoUgjlhLmV4p6IWmUUp8BfgzIaK1/qP9aHPhrYAe4Dfyk1ro0w3na8xn40CVi\n",
+       "isViJBIJkskkkUgEv99vHF8hzd7eHq+88gonJyfk83lqtdpMSgcuwrAfFo/HSafTXL9+naefftps\n",
+       "d3g8HrTWtFotarUap6enA6Sxl6e5zv8h/s1ngWeHXvsN4Dmt9VPAP/efzwUul4tQKDSwCSklD5ub\n",
+       "m6ytrRGJRPB4PCaJd3p6SrFYJJ/Pm/C6Xq9fWvmmXX0nDnsoFCIQCOD1eg1RTk9POTk54fj4eOAq\n",
+       "zu+88jLDuNDSaK2/rpS6PvTyO4Ef7T/+M+BrzIk4LpeLcDhMMpk0db6bm5tmpFIpPB4PbrfbRB3D\n",
+       "pKlUKtTr9UuxMvCDZGQoFCIajRrS+P1+Q5pms2lCbJswJycnZo9pXiH2XfMf8+/WtNYn/ccnnNfW\n",
+       "zAVLS0uEQiGSySRbW1vs7OyYzcl0Os3KyoqpWJM9pmHSNJtNWq3WlbI0wWDQkKbVatFsNs28hTCZ\n",
+       "TIaTkxMT9Q13HswLEzvCWms9T309l8tFMBgkHo+zsbHBtWvXWFtbMyMSiVCpVIw1qVarpva3UCiQ\n",
+       "z+dNOeS8CsXt8k1gwBdbWVkhGo0SDofN8iSbpaVSiePj4wErc5klG4JxSXOilFrXWh8rpdJAZpqT\n",
+       "ehjcby2XCjuxNFI7Yye/Zh0p2Yk7ON+A9Pv9BAIB/H4/6+vrbGxsmJFOp4nH4wSDQZRSNJtNCoUC\n",
+       "BwcHvPLKK2Z/rFqtXmpHhGBc0nwJ+Bng9/rXL05tRmPCJoNdgSc1vhJlzCu0tpOQgUBgoDAsnU4P\n",
+       "kCaZTLKyskIgEEApRaPRoFAosL+/z0svvcTx8TH5fJ5qtXopy9EwHibk/hznTm9SKbUH/BbwMeBv\n",
+       "lFLvox9yz3KSF8EmgVgau1hcugmGk2AzlMMdKA4LBoOsrKyYJVTIsrm5ycbGhtnuENLYluall14i\n",
+       "n8+bRr6FsDRa6/fc5z+9bcpzGQvDhLFJIxZGliX5Iu3Hw71BYx5lNPDYrt+VFIHkYiQ1YFsbr9dr\n",
+       "+soBY2kODw+5efOm6fS8KljIjPCDIG2rwWDQEGdzc9NU6IdCobsa/GWHWGprR4Xb7R4YUuIgY9iH\n",
+       "SSaTpqhKEnh2z3ipVDIh9VU8N/2RI400yEmVnpj7s7MzU3RuR09SBVcul6lUKlSr1ZH/n+Lo2g6v\n",
+       "9CQFAgHTpCdDCsUk8yu+lyxBUrZ5WSH1RXgkSePxeAx5JFnmdrsJh8Osrq4asojagpQa5HI50yg3\n",
+       "CuxuRxk2MeLxOCsrK8TjceLxuMnHyIaldEVI9rpUKlGtVk3D21XDQpPG9kPksU0WwCwBktupVCrG\n",
+       "15EOysPDQ4LBoGmSGxVSIWhfZUi/tT2G+5FkiSyXy+RyuQFL45BmCrAr705OTvD7/abJX9LxduQC\n",
+       "5z5HIBCg1+uZTUwZIiNiF3OPCrEuouQgVxnBYBCfz2dIOexTCVkODw/Z39/n4ODAbBU4y9MUIDW+\n",
+       "QhrZLZZlwC7vhB8sV36/HzhvD7G/sF6vh8/nIxKJkEwmp+LT+Hy+u557vV4zJ0k+Si5JOiMODw+5\n",
+       "ffs2h4eHJsx2LM0UYJPG7XYbVYWVlRXq9Trtdtv8om31K2lZHc4Ka62JRCImcppG9CTDlmiTq1LK\n",
+       "kEb6x21Lc/v2bVP361iaKcEmjSTr4vE4q6urA2GqXd0nX6LP57srpyKY5y9aSCMbk8OkOT09NZGU\n",
+       "Q5opQGttPnCXy8Xp6SlHR0f4/X601hQKhbuWBilokqu9hMhyZQs02vp1D7MTbveGdzodEz3Jddjx\n",
+       "FcUq2VCVUL9Wq9FoNMwO/DxLUEfBQpKm0+kMZHaPjo6MoM/x8bHJj0iuRHIn8ppENJIIlP0qCcWl\n",
+       "CF2+yItQq9XMqFarA8m8cDh8T9KIFo7kh6RFRUgjKYGriIUkjRRPiTOptaZWq5HL5Uxtiq1BIzUr\n",
+       "y8vLRKNRo6MXCATu2naQGpxisUihUKBUuriKtVgsDowbN24AGDGiYdiWRso4qtXqAGnmXew+Csat\n",
+       "Ef4I8HNAtv/PPqS1/sdZTdKGkEaI02g0qNVqZLPZgYo4O8kmSbWVlRWSyaQJsWOxmLmnkEa09gqF\n",
+       "gil8ughS6yKj2+0SDodZX1+/55cupKnVapTLZcrl8l2kucp4GEvzWeCPgD+3XtPAJ7TWn5jJrEaA\n",
+       "XT8jkPyMvWlpZ1wlxD06OiIejw+Ev51Oh0KhYEaxWLxwDpLFlUL14T4qe9ui1+tRLBbJZDIcHByw\n",
+       "v7/P4eHhlc7LDGPcGmG4QvrBNnHEzzk7OxvY06lWq8anyeVy7O/vm/pc+wsV7ZpRfBopH202m/ds\n",
+       "vLPFoaWJP5PJsLe3x82bN029TL1ev5LL0TAm8Wner5T6ac4Pdv/AvFpYhmFr4dkRkERYEqHYCt92\n",
+       "8k0cYTuCkvzJKHkb29kdLvYSAovAtSQm9/f3uXXrFoVCwXRIPMqk+STwO/3HHwU+DrxvKjMaA+KT\n",
+       "CGRrwMa98jP306QbtbZmOCNsE1lEoWVTslarGUuzv7/PzZs3zeakhNlXHWORRmttvEOl1KeAL09t\n",
+       "RlPC8Bc+y1+waOTZOn8icQKYzgLZTRcRonK5bHSK5yk5OynGIo1SKi3C08BPAN+a3pQWDxKJSTnn\n",
+       "+vo6sVjM7He1Wi3TWSAbkrlcjnK5TLvdvqfzfJUxTo3wh4G3KqVez3kUdQv4hZnO8opDpGfX1ta4\n",
+       "fv36XaSxlbdu3rzJ4eGhUa2QRN6iEAbGrxH+zAzmsrCwLc21a9cGSCOVgyKiJBuSInkm0q2LhIXL\n",
+       "CF8FSBQme1m2AJF0eUrxl123I86w7C3NS4Ro2nBIMwakElCG1P6KTyNbGCJ1Mpw8XCT/5V5wSDMG\n",
+       "pHBdNP1swqytrZmWFBFVsjPTw201iwiHNCNCKgGDwSCxWMwsTba1sZOMYmnsDVFb8mwR4ZBmDAhp\n",
+       "RH3L1iqWsxds/0UOIZPd7GazOdOzC2YNhzRjQNQ4ZQd9eXnZlJMCRu5M9q+KxaI5uU52s+XYwEWE\n",
+       "Q5oxIDXHYmnsY3MAc3ZBuVweqMuRgivZ2LwsUaVJ4ZBmDIilGSaNfbiYCCtKeYWQxhaHhMs753sS\n",
+       "OKQZA/dankQY8uzsbKBYXPaZpFh8kfaY7oeHEWp0MITh5WmYNI1Gg9PTU7OTLQd2NJvNhbQsw3As\n",
+       "zYiwZemFNKKXN3z2QjabfSRJ80BLo5TaVkr9i1Lqf5VS31ZK/XL/9bhS6jml1PeVUl9RSsXmM92r\n",
+       "AQm57eXJ5/OxtLRkSHM/S/Mo4KLlqQP8qtb6GeBHgF9USr2WS9QRvgwISaLRqJE6k7OxRVzRbrlt\n",
+       "t9sm5BYFCBGKfhQszQOXJ631MXDcf1xVSn0H2OQSdYTnDfvQDulwECn9YDA4IE0iFYRSmyylptIu\n",
+       "vKh5mWE8tE/TLy5/A/CfXKKO8GVABAKkFSaVShGLxQiFQqZDU6Iiux5YSGOLRT7ylkaglAoDfwf8\n",
+       "ita6MtQDPVcd4XlDLM3y8jKJRIJ0Ok0ymSQWixEMBvF6vYYMdhmEbWmktfdRIAw8XOWeh3PC/IXW\n",
+       "WqRfL11HeJ4Q0iSTSaOZJ2G2FI7b8mfVatUcQGpbF1uE2h5298Ii5HAuip4U8Gng/7TWf2D9J9ER\n",
+       "hiuiIzwr2JYmmUySTqdJJBIsLy8bta1OpzOQAS6Xy3cd2GGfHS7FW16v12jXiGzK/TokrhIusjRv\n",
+       "An4KeFEp9Xz/tQ9xxXSEZw3xaRKJBBsbG/ckjew13Ys0tq6wLRUrQ0omhltxriouip7+nftboyuh\n",
+       "IzxriIafbWlERmTY0sjZmOVy2RzgJfcY1suxdXNEAWMRCANORvihIF+4dGfaywkMhtoirmjndoY1\n",
+       "cWSpEjLVajWjHnFVxRltOKSZATweD6FQyEjbh0KhgfPChyv75ORe6SO/6s6wQ5opYFjDzy4HbbVa\n",
+       "rKysmKMSk8mkadMVy+Tz+YyY0qPgCDsYESJ0LZZGaz1wiMbGxoZxnBuNhjm7qVqtksvlHNK8WuB2\n",
+       "u40sbSwWw+VyDTwXSxOJRPD5fCZ7LMJGktNxHOFXESSPI1YmFouZRF+9Xjc5GXF6S6US+XzejKOj\n",
+       "IwqFwiMvNeLAgtfrNYdjBIPBu9Q+7VNe6vW6kRqRowbl2OR6vX7lnWBwSDMVSAgeDAYHBBblKod1\n",
+       "5HK5AX2ag4MD9vb2zHmbV1WhfBgOaR4Cw+KNtg8ie0f2LrfdFNfpdMjlcmSzWTOOjo7IZDLm8Ay5\n",
+       "n11wfpXhkOYCiD6xHA/o9/tN6CyJOFusWvSBbW3hUqk0MMSXkWo+u+tyEeCQ5gIIafL5vJFGs4/V\n",
+       "cblcxuGVIb1OoitsC1MPX6Wib5FEAR5IGqXUNudSsKucCxj9idb6Dy9TR3je0FrTaDTI5/MAZimR\n",
+       "SCkQCBgtYDmh7uTkxJylfXx8bEQf5Tq8fC1aD9RFlkZqhF/oF2L9j1LqOa6QjvA8ID6NUopOpzNw\n",
+       "bpMcvWOPTCYzMERexK7esx3lRcO4NcJwhXSEZw0RrxanVyRERFpfcjKyTA0fKSj+yrwOk5811MNO\n",
+       "vl8j/K/AM8AHgPcCp9xHR/hRKgGVsyZlDJ8iJ5GP5GZsEsnxO3YYDpMd6TwvaK3vaRgeijT9pelr\n",
+       "wO9qrb+olFrlB/7MR4G01vp9Q39zdT+NEWGXMSil7jr4azgvYyug20vQIhDFxtik6dcI/z3wD0Ml\n",
+       "n/LfrwNflsM2rNcX45NxcF/cjzRj1Qj3i8kFr3od4VcbHmhplFJvBv4NeJHziAngN4H3AAM6wlYf\n",
+       "lPytY2kWHBP5NOPAIc3iY6zlyYGDe8EhjYOR4ZDGwchwSONgZDikcTAyHNI4GBkOaRyMjJnlaRw8\n",
+       "unAsjYOR4ZDGwciYKWmUUs8qpb6rlHpJKfXBKdzvtlLqRaXU80qp/xrj7z+jlDpRSn3Lem1sedv7\n",
+       "3O8jSqn9/hyfV0o9O8L9pirB+4D7jT1H4O7m9WkNwAW8DFwHPMALwGsnvOctID7B37+Fc7HJb1mv\n",
+       "/T7w6/3HHwQ+NuH9Pgz82pjzWwde338cBr4HvHbcOT7gfmPPUWs9U0vzRuBlrfVtrXUH+Dzwrinc\n",
+       "d+wyU63114Hi0Mvv5FzWlv71xye8H4w5R631sdb6hf7jKmBL8I48xwfcb+w5wmyXp01gz3q+zw8m\n",
+       "PC408FWl1DeUUj8/4b0Es5C3fb9S6ptKqU+Pq+Y+bQle637/MekcZ0maWcTyb9JavwF4B+fq6W+Z\n",
+       "5s31uR2fdN6fBB7jvN7oCPj4qDcYluCddI79+/1t/37VSec4S9IcANvW823Orc3Y0Fof9a9Z4Auc\n",
+       "L4GT4kQptQ6mInEieVutdUb3AXxq1Dk+SIJ3nDla9/tLud+kc5wlab4BPKmUuq6U8gLv5lxKdiwo\n",
+       "pYJKqeX+4xDwdqZTZjpVedtJSmGnLcE7s3LdSaKZh/De38G5x/4y512Yk9zrMc4jsBeAb49zP+Bz\n",
+       "wCHQ5tzfei8QB74KfB/4ChCb4H4/y3lH6ovAN/tf7toI93szcNZ/j8/3x7PjzvE+93vHJHPUWjvb\n",
+       "CA5Gh5MRdjAyHNI4GBkOaRyMDIc0DkaGQxoHI8MhjYOR4ZDGwchwSONgZPw/UDzRgG/E2K8AAAAA\n",
+       "SUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792af4a550>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEQZJREFUeJzt3X2QXmV5x/HfL7ub3SVvJE2QEiKBQFLIaIWaDCBCqLRQ\n",
+       "RtC2VqStUtux00FrSoUR+aN/MC1amY7oOHTGQlGwalu0iNOCUEowYiUCCS9ZJIEhJUAhUEiWQF52\n",
+       "w9U/nmfDZtmX+87Jvc9z4vczs5PnnL2ec9977rPPXjkv9+WIEAAAAPbflFZ3AAAAoO5IqAAAACoi\n",
+       "oQIAAKiIhAoAAKAiEioAAICKSKgAAAAq6mxl47aZswEAANRGRHi09UUTKtvnSLpGUoek6yLib0fG\n",
+       "zJ8//y3v6+/v18yZM/dZlzNfVu7cWnv27CkSW6rPObH2qON+QLY9lh07dqi3t3e/t1tqPEpuux3G\n",
+       "GgDQOsUu+dnukPRVSedIOkHShbaPL9UeAABAq5S8h2q5pCciYlNEDEj6jqQPFGwPAACgJUomVPMl\n",
+       "bR62/Exz3YS6u7uLdAiTo7OzpbfmAQAw6UomVPt98wcJVb11dXW1ugsAAEyqkqcSnpW0YNjyAjXO\n",
+       "Uu2jv79/7+vu7m6SKQAAUDslE6r7JR1ne6Gk5yRdIOnCkUEjn+YDAACom2IJVUQM2v6UpB+qMW3C\n",
+       "9RHxWKn2AAAAWqXo3cMRcZuk20q2AQAA0Gotfxxr2rRpSXFvvPFG8jZzYiVpcHCwSGypSSTrOPFl\n",
+       "qfHLnfiyHSbVZLJOADj4UMsPAACgIhIqAACAikioAAAAKiKhAgAAqIiECgAAoCISKgAAgIpIqAAA\n",
+       "ACoioQIAAKiIhAoAAKAiEioAAICKSKgAAAAqanktv9xacylsZ8V3dXUlx3Z2pu+yUvXrStXQy912\n",
+       "qRqIpWKlcnUCc2Jzj88SfdifeADA2IqeobK9wPbdttfbftT2p0u2BwAA0Aqlz1ANSLokItbZni7p\n",
+       "Adt3RsRjhdsFAACYNEXPUEXE8xGxrvl6u6THJB1Rsk0AAIDJNmk3pdteKOlESfdNVpsAAACTYVIS\n",
+       "qublvpslrWyeqQIAADhoFH/Kz3aXpO9K+mZE3DLy+y+//PLe1729vert7S3dJQAAgAOqaELlxvPh\n",
+       "10vqi4hrRouZM2dOyS4AAAAUV/qS33sk/aGkM22vbX6dU7hNAACASVX0DFVE/FjMxg4AAA5yJDsA\n",
+       "AAAVtbz0zK5du5Licsp15Jb2yCnBkVvKpYSSfZgyJT3HzinDM3Xq1OTYkiVRSu27digns3v37qxt\n",
+       "DwwMJMeWKndUKvZgL6tT6njL3Xap8kx4U+5Yl/pbWbe/k63AGSoAAICKSKgAAAAqIqECAACoiIQK\n",
+       "AACgIhIqAACAikioAAAAKiKhAgAAqIiECgAAoCISKgAAgIpIqAAAACpqeemZVDmlL3J1dHQkx3Z1\n",
+       "dRXpQ6kSDjmlZKS8EjE5254+fXpy7KxZs5JjZ8+enRwrST09PcmxOaVZckotzJ07Nzl2586dybEv\n",
+       "vfRScqwkvfjii8mx06ZNS45dvHhxkdicPjz99NPJsZL05JNPFoldtGhRcux5552XHLt8+fLkWCmv\n",
+       "xEjOcb958+bk2NWrVyfHbtq0KTn24osvTo6VpCVLliTH5hxzOfutr68vOfaqq65KjpXyxmTevHnJ\n",
+       "sQsXLkyOPfvss5Njly1blhyb018p7+916t/V8f7ujZlQ2f5dSSFptN/EiIjvpTRuu0PS/ZKeiYj0\n",
+       "TwwAAICaGO8M1XlqJFRjSUqoJK2U1CdpRmqnAAAA6mTMhCoi/qjqxm0fKelcSX8j6S+rbg8AAKAd\n",
+       "TXgTjO3DbV9v+/bm8gm2/yRx+1+SdJmk9BtMAAAAaiblruKvS7pD0hHN5Y2SLpnoTbbfL2lLRKzV\n",
+       "6PdhAQAAHBRSnvKbGxH/bPtySYqIAduDCe87VdL5ts+V1CNppu0bI+Jjw4O2bdu293V3d3fWU1gA\n",
+       "AAClrFq1SqtWrUqKTUmottv+paEF2ydL2jZOvCQpIq6QdEXzPWdIunRkMiXlPSIPAAAwWVasWKEV\n",
+       "K1bsXb7yyivHjE1JqD4j6QeSjrH9E0nzJH1oP/qVPnkSAABAjUyYUEXEA7ZPl7REjXuhHo+I9BnM\n",
+       "Gtu4R9I9+9dFAACA9jZhQmW7V9LFkk5T4yzTatt/HxHpUzgDAAAcxFIu+d0oqV/SV9Q4Q/X7km6S\n",
+       "9HsF+wUAAFAbnqh+je2+iDhhonX71bgdhx9+eFJsTi2/nFp3Ul4dtpxt52w3p9ZWqdiSSu23wcGU\n",
+       "B07flHMc5fSj1DFUx7Fuh/0GAKVExKgftinzUD1o+5ShheZTfg8cqI4BAADU3XjFkR8ZFnOv7c1q\n",
+       "3EP1dkmPT0LfAAAAamGi4sgAAACYwHjFkTcNX7Z9mBozngMAAGCYlOLI59veKOkpNeaS2iTptsL9\n",
+       "AgAAqI2Um9L/WtIpkjZExNGS3ifpvqK9AgAAqJGUhGogIl6SNMV2R0TcLendhfsFAABQGykTe75i\n",
+       "e4ak1ZL+yfYWSdvLdgsAAKA+Us5QfVDS65IukXS7pCfEE4AAAAB7pRRHHjobtUfS14v2BgAAoIbG\n",
+       "m9hzuxoTeY4mImLmgehAaimQnLIaHR0d+9udlilVgqNkGZ6c2JySL6XKw+Ruu9R+LlWGJxelXADg\n",
+       "wBlvHqrpVTdu+1BJ10laqkZy9scR8dOq2wUAAGgnKTelV/FlSf8RER+y3SlpWuH2AAAAJl2xhMr2\n",
+       "LEnvjYiLJCkiBiVtK9UeAABAq6Q85be/jpb0ou0bbD9o+x9sH1KwPQAAgJYomVB1SjpJ0rURcZKk\n",
+       "1yRdXrA9AACAliiZUD0j6ZmI+Flz+WY1Eqx9vPbaa3u/du/eXbA7AAAAZRS7hyoinre92fbiiNgg\n",
+       "6SxJ60fGTZvGfeoAAKDeSj/l9+dqlKuZKulJSR8v3B4AAMCkK5pQRcRDkpaVbAMAAKDVSt5DBQAA\n",
+       "8Auh9CW/CQ0ODibFTZmSnvvlltTIKVVTKjbn5ysV2y66u7uTY2fNmpW17Zkz0ysm9fT0JMfu3Lkz\n",
+       "OXbz5s3JsVu3bi3SB0nFHgLp7Ez/WMm5hzLnuOjt7U2OlfJ+T3L2c86+yNHf358Vn/OZmPqZLEkD\n",
+       "AwPJsbt27UqOLVUiKnfbOaWfco6hrq6u5NjcY6hUn0v1Ied4y4mVypTXGu9nq99fWwAAgDZDQgUA\n",
+       "AFARCRUAAEBFJFQAAAAVkVABAABUREIFAABQEQkVAABARSRUAAAAFZFQAQAAVERCBQAAUJFLTM2e\n",
+       "3Lgdhx56aGpsznaz+lGqlEtOP3LKC5Qsw1NqP+eUIsgpUZETK+WVLihV/iJn/HKOi9zjPqdsSM6+\n",
+       "KLXfWvlZNVzOfm6H0h65co+jVKXGr12Oi3ZR6vicOnVqcmzOmOR8DuUe9wWPuVF3ctEzVLY/Z3u9\n",
+       "7Udsf8t2ejEuAACAmiiWUNleKOkTkk6KiHdI6pD0kVLtAQAAtEqZUugN/ZIGJB1ie4+kQyQ9W7A9\n",
+       "AACAlih2hioiXpb0d5KelvScpK0R8Z+l2gMAAGiVkpf8Fkn6C0kLJR0habrtPyjVHgAAQKuUvCn9\n",
+       "3ZJ+EhH/FxGDkr4n6dSRQTt27Nj7lXO3PwAAQLsomVD9XNLJtnvdeI7zLEl9I4N6e3v3fnV1dRXs\n",
+       "DgAAQBkl76F6SNKNku6X9HBz9ddKtQcAANAqJZ/yU0R8UdIXS7YBAADQapSeAQAAqIiECgAAoKKi\n",
+       "l/xSzJgxIykupyZPyVp+ObXVStVhy6mplLsvOjo6kmN7enqytp0qp17T66+/nrXtnFp+OT/f7Nmz\n",
+       "k2N7e3uTY+fNm5cce8wxxyTHStKSJUuSY3P228aNG5Nj+/re8pzKmPr7+5NjDzvssORYKW/fLV26\n",
+       "NDk25+e79dZbk2M3bNiQHCvlfX7mHPeLFi1Kjj3rrLOSYxcvXpwce+211ybHStJTTz2VHJvz+ZLz\n",
+       "2Xnssccmx1566aXJsZJ01FFHJcc++2z6XNs5v9d33XVXcmzO78i2bduSY6W8z63UPGDHjh1jbyO5\n",
+       "NQAAAIyKhAoAAKAiEioAAICKSKgAAAAqIqECAACoiIQKAACgorZMqHbu3NnqLqCC3Edb0T7Wr1/f\n",
+       "6i6ggpzHxNFecqYFQXtqy4Rq165dre4CKiChqi8SqnojoaqvV199tdVdQEVtmVABAADUCQkVAABA\n",
+       "Rc4pSXDAG7db1zgAAECmiBi1pltLEyoAAICDAZf8AAAAKiKhAgAAqKjtEirb59j+ue2Ntj/b6v5g\n",
+       "bLb/0fYLth8Ztm6O7Tttb7B9h+1DW9lHjM32Att3215v+1Hbn26uZwzbnO0e2/fZXme7z/bnm+sZ\n",
+       "uxqx3WF7re0fNJcZvxprq4TKdoekr0o6R9IJki60fXxre4Vx3KDGWA13uaQ7I2KxpLuay2hPA5Iu\n",
+       "iYilkk6W9Mnm7xtj2OYiYqekMyPiXZLeKelM26eJsaublZL6JA3dzMz41VhbJVSSlkt6IiI2RcSA\n",
+       "pO9I+kCL+4QxRMRqSa+MWH2+pG80X39D0gcntVNIFhHPR8S65uvtkh6TNF+MYS1ExOvNl1Mldajx\n",
+       "u8jY1YTtIyWdK+k6SUNPjTF+NdZuCdV8SZuHLT/TXIf6eFtEvNB8/YKkt7WyM0hje6GkEyXdJ8aw\n",
+       "FmxPsb1OjTG6OyLWi7Grky9JukzSG8PWMX411m4JFXM4HESiMScHY9rmbE+X9F1JKyNin/oXjGH7\n",
+       "iog3mpf8jpR0uu0zR3yfsWtTtt8vaUtErNWbZ6f2wfjVT7slVM9KWjBseYEaZ6lQHy/YPlySbP+y\n",
+       "pC0t7g/GYbtLjWTqpoi4pbmaMayRiNgm6d8l/ZoYu7o4VdL5tp+S9G1Jv277JjF+tdZuCdX9ko6z\n",
+       "vdD2VEkXSLq1xX1CnlslXdR8fZGkW8aJRQvZtqTrJfVFxDXDvsUYtjnbc4eeALPdK+k3JK0VY1cL\n",
+       "EXFFRCyIiKMlfUTSf0XER8X41VrbzZRu+7ckXaPGTZbXR8TnW9wljMH2tyWdIWmuGtf7/0rS9yX9\n",
+       "i6S3S9ok6cMRsbVVfcTYmk+F/UjSw3rz0sLnJK0RY9jWbL9DjZuWpzS/boqIq23PEWNXK7bPkPSZ\n",
+       "iDif8au3tkuoAAAA6qbdLvkBAADUDgkVAABARSRUAAAAFZFQAQAAVERCBQAAUBEJFQAAQEUkVABa\n",
+       "zva9zX+Psn3hAd72FaO1BQAHEvNQAWgbtleoMcnheRnv6YyIwXG+/2pEzDgQ/QOAsXCGCkDL2d7e\n",
+       "fPkFSe+1vdb2SttTbF9te43th2z/aTN+he3Vtr8v6dHmults32/7UdufaK77gqTe5vZuGt6WG662\n",
+       "/Yjth21/eNi2V9n+V9uP2f7m5O4NAHXU2eoOAIDeLH3zWUmXDp2haiZQWyNiue1uST+2fUcz9kRJ\n",
+       "SyPif5rLH4+IV5q17dbYvjkiLrf9yYg4cZS2fkfSr0p6p6R5kn5m+0fN771L0gmS/lfSvbbfExFc\n",
+       "KgQwJs5QAWgnHrH8m5I+ZnutpJ9KmiPp2Ob31gxLpiRppe11kv5b0gJJx03Q1mmSvhUNWyTdI2mZ\n",
+       "GgnXmoh4Lhr3RKyTtLDCzwTgFwBnqAC0u09FxJ3DVzTvtXptxPL7JJ0cETtt3y2pZ4Ltht6awA2d\n",
+       "vdo1bN0e8VkJYAKcoQLQTl6VNPwG8h9Kuth2pyTZXmz7kFHeN1PSK81k6lcknTzsewND7x9htaQL\n",
+       "mvdpzZN0uqQ1emuSBQAT4n9dANrB0JmhhyTtaV66u0HSV9S43PagbUvaIum3m/HDH1G+XdKf2e6T\n",
+       "9Lgal/2GfE3Sw7YfiIiPDr0vIv7N9inNNkPSZRGxxfbxI7atUZYBYB9MmwAAAFARl/wAAAAqIqEC\n",
+       "AACoiIQKAACgIhIqAACAikioAAAAKiKhAgAAqIiECgAAoCISKgAAgIr+Hyoqh+rLDshuAAAAAElF\n",
+       "TkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792aed8c90>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAI0AAACPCAYAAADHlliuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAGLBJREFUeJztnVtsY+tVx3+ft+/29i224yQzk+lp+9AHpNOX8lAq+lBV\n",
+       "p0Jq4YWqEgKVUvEABQESbXmgBV5KJSoED0ioLeKmFgQqKi/QVgKpPHA5qKcXzqVnTjOTjJ2L49jx\n",
+       "/f7xYK9vtj3JnLHjTOxk/6WtOJ5k5zv1v+tb31r/9d9Ka40LF7PAc9ULcLF6cEnjYma4pHExM1zS\n",
+       "uJgZLmlczAyXNC5mxtykUUq9oJR6VSn1ulLqk4tclIvlhpqnTqOUsoDXgPcBeeB/gI9orV9Z7PJc\n",
+       "LCPmjTTvAu5pre9rrXvAV4EPLW5ZLpYZ3jl/bwvYc3z/EPhx5w8opdxS84pDa63Oen/eSOMS4gZj\n",
+       "XtLkgduO728zijYubgDmJc2LwNuVUneVUn7gw8DXF7csF8uMuXIarXVfKfWrwL8CFvAl9+R0czDX\n",
+       "kfupbuwmwiuPRSfCLm4wXNK4mBkuaVzMDJc0LmaGSxoXM8MljYuZ4ZLGxcxwSeNiZrikcTEzXNK4\n",
+       "mBkuaVzMjHlFWAAope4DVWAA9LTW71rEolwsNy5EGkZirPdqrU8WsRgXq4FFbE9ndkJdXF9clDQa\n",
+       "+JZS6kWl1McXsSAXy4+Lbk/v1lrvK6UywDeVUq9qrb+9iIU9ayj1KGBaloXH48Hj8WBZlvl3+ZnB\n",
+       "YGCufr9/Jeu9SlyINFrr/fHXolLqa4xGW1aONEIIpRSWZREOh4lEIkQiEcLhMJZl4fV6sSwLy7I4\n",
+       "PT01V7VaRYRsN8XrZ27SKKXCgKW1rimlIsD7gd9b2MqeIZRSJrJ4vV5s22ZtbY21tTVSqRR+v99c\n",
+       "Pp+PQqFAoVBgOBxSq9WAEWGUUjeCOBeJNOvA18Yh2wv8rdb6GwtZ1TOGkEYiim3bZDIZtra22Nzc\n",
+       "JBQKmSsQCBAOhxkOh1SrVTweD8PhEHAjzZtCa70DPL/AtTwzeDwesx15PB4TQXw+H6FQiHQ6zcbG\n",
+       "Bnfu3OHOnTtmuwqHwwSDQdrtNuVymXw+j8fjmSDLTSDORRPhlYNSCr/fTyAQIBAIEAwGsW2baDSK\n",
+       "bdvEYjE2NjbY2Nhgc3OTXC5nfi4QCOD3+4lEIoRCIfx+v0mUh8Mhg8Hgiv/rng1uJGkCgcAESdLp\n",
+       "NOl0mrW1NfNVrmQyaaKQz+fDsiyi0aghjdc7+p9QcpqbgBtJGr/fTzQaJZVKsba2xq1bt9ja2mJr\n",
+       "a4uNjQ0TeeSSJFm2NSFNIBDAsiyGw+FEbnPdcWNII1HAsixCoRDxeHwid9ne3mZ7e5vbt2+brUu2\n",
+       "IyeGwyHRaNQcySORCJ1Oh06nAzCxRV3X/OZGkMZZrAsEAsTjcbLZrEl0c7kcmUyGWCxGMBjE5/Ph\n",
+       "9XrP3G6UUoRCIVKpFFtbW7ztbW8z9ZrT01OGwyHD4RCttfl63XDtSTN9nA4GgyQSCdbX17lz5w5v\n",
+       "fetbSSaTpFIpYrGY2XKEaGchHA6bba3VanFwcMDR0RHD4ZBWq2WqxcC1TI6vPWkAU7Tz+/0Eg0ET\n",
+       "aW7fvs1zzz1njtORSIRAIGCIdl5i64w0Wmt8Ph9aa1qtFicno4a/1vpaRhm4AaSR1oAQJhqNkkgk\n",
+       "yGQybG5ucvv2bbMdyVfBeR96KBQimUzS7/fx+XwMBgM6nQ71ep1yuUyn06Hb7dLr9UyVWK7rkCzf\n",
+       "CNJI4huPx8lkMmSzWRKJBOFw2ByjLct6qiOzUgqfz0ckEqHf7+PxeOh2u+Yovra2Rr1eN1etVqPd\n",
+       "bpur0+msfAX52pPG4/GYPCabzbKxsWFIEwqF8Hq9E8dpwZM+UCnwyfHd4/EQiURYW1tjc3OTk5MT\n",
+       "SqWS+epMlHu93sT9V5E4N4I0oVDIJL+3bt0im80Sj8cJh8OGNE7pw5tBIo3Ue4QwzWaTRqPBwcEB\n",
+       "hUKBg4MD03qQiFSv11eaMPAUpFFKfRn4KeBIa/1j4/dSwN8B28B94Ge11pVLXOfMEAJMk2Zra4t0\n",
+       "Ok08HjeRxonztg65n2xP0qcCiMViRlszGAxIpVKmACjbXq/Xo1armWKg3GsVifM0keYvgD8F/srx\n",
+       "3qeAb2qtPz82nv7U+LpyOGUOQhjbtkkmk2QyGdbX10kmk0SjUXw+HzApqhoMBqbWInUWIYkky+f9\n",
+       "TelDhcNhEokEnU4HrTX9fp9ms0m5XDYnrcFgsJKEgacgjdb620qpu1NvfxD4yfHrvwT+nSUhDTw6\n",
+       "YktdRk5M6XSa9fV1YrEYkUjEfID9fp9er0e326Xb7T5GoGAwaKQRTyKNvJbIJoQTwsjfFMIMh8OV\n",
+       "jDbz5jTrWuvD8etDRtqapYB8gFKXOSvSCAmckabb7dJqtWi32/R6PUOkwWCAbdtmWzrvb8KjynM4\n",
+       "HDaEiUQiVCoVDg8PDWn6/b4hzSoW/y6cCGut9bL561mWhc/nIxAIEAqFiEajxONx1tbWyGQyZiuR\n",
+       "JmO326XZbFKv12k0GqbG0u12TZ4ipAmFQhP5jTN3EkiuJEf9YrFIPp/Htm1CoZDJaVaRMDA/aQ6V\n",
+       "Ujmt9YFSagM4WuSiLgKlFF6v1yjsRPsiPSUhSr/fN1+Pjo4oFosUi0VKpZIhivxMKpUyHfFUKkUw\n",
+       "GDTRSk5GTiLKa5/Ph1KKeDxOLpfjueeeo9PpcHx8TKVSoVwuUy6XJ7arVdiq5iXN14FfAP5w/PWf\n",
+       "FraiC8JZAZYoEw6HCQQCppDX7/fpdrt0Oh2azSb7+/vs7u7y4MED8vn8RMMRMDob0dokEglzxePx\n",
+       "iQanHN8lKfZ4PMRiMXK5HO12G4/HQz6fJ5/Po7U2kW2Vos/THLm/wijpTSul9oDfBT4H/L1S6mOM\n",
+       "j9yXuchZcVakEdJIpOl0OjQaDU5PT9nf3+eNN97gtdde44033jD3ke0nk8lMXOvr67TbbWBU6JPo\n",
+       "IMdrIY5IQSXSeDweotEowWDQEKZYLJqItyri9Kc5PX3knH9634LXshBMRxqnNFMKedO9okKhwM7O\n",
+       "Dq+88govv/zyxJHd6/VSKpVMhVd6SwDBYJBYLGYIIomycyQGwLZt4FHPajAYUKvVODw8xOfz0ev1\n",
+       "VqovdS0rws4PbfrSWtNutzk9PeXo6Ij9/X1KpRK1Wo1ut2vuIQ3GwWBAu92mVqvh9XoZDodEIhFi\n",
+       "sRipVIpWqzVxWnPKPqdfO+s/sgU6r1XBtSQNPCLOdItgOBzSbrepVCocHR2Rz+cpFosTpJEPUP6f\n",
+       "L6SR343FYiSTSWq1Gs1m0xDmrEgx3eGeLh5Ok2cVcC1J4yTMWY3IVqtlIk0+n+f4+PjcSKO1Np1p\n",
+       "IU8qlSKbzVKv12m1WuZoL0nstMh8Oso4ibNqhIEbQhoncYQE1WqVYrHI/v4+Jycn1Ov1xyKNvBZZ\n",
+       "g9x7fX2dSqVCvV6n3W6buo6QYLrx+WaRxkmeVcC1I43ogG3bJpVKkclkTEdbElWfz0c4HCYej5NK\n",
+       "pcxJarp5eRamE23bts2R/jxNjiTezWaTWq1mIpS0LJz1mVUgzrWzT3OOqEjrQARXUksR0kgya9s2\n",
+       "wWDwqUgDk0d65wyU1GZg8sOXNkWz2aRardJoNAxppIC4KoU9uIakebNII6RyRppYLHamTOI8SKQJ\n",
+       "h8MTkea833+zSCMV4VUhzbXbnpwTlKlUymhnzoo08XjcFNjO62BP3xsmI41t26YG5Iw0TjgjTa1W\n",
+       "o9FomFxItqdVwrUjjSS6tVqNk5MTisUiXq/XkAQe/9BFfRcOh40fjfNyjuX6/X62t7dZX1/Htu0J\n",
+       "5Z9UdqeP+9LtTqVS9Ho96vU6p6enlEolIpEI7XbbJMnXoo2wapAWQb1e5+TkxESZeDxupgPOIo2T\n",
+       "OE4vGtmG5IpEIty9e5dsNott22bLk7+ttZ44sTlzLPn71WqVUqlkdD2A0Q6vQm5z7UijtTZa3HK5\n",
+       "TDQaNUP+EgWENDJS6yRMJBIxXWzpZMskg1zb29uGNF6vd6LWMhgMJpqVgCGNbJ3lcpnDw0NisZjx\n",
+       "upFIJeRZZsyrEf4s8EtAcfxjn9Za/8tlLXIWSKSR7SkYDJJOp2k2m+YDEUWfCK1s256wS3POaUej\n",
+       "UdPdzmQyrK2tkc1mzRiviKokNxEPPmfj0e/3G8LEYjFKpRKpVIp4PG5GYQaDwcSc1DJjXo2wBr6g\n",
+       "tf7CpazqAtBa0+v1aLVaVKtVwuEwtVpt4rQCj4gzGAzIZrO0Wi2jfXFqZSTSiAwiHo8TjUaxLItO\n",
+       "p0OlUqHRaBgBV71eNxYmcsm6JMqJhlgG9iSRlrHeZce8GmFYUv9g2Z6azSaWZREIBEyPqNPpGBWe\n",
+       "kMbj8ZDL5bAsC9u2zYco+Ywzp5FIJCKrTqdDv9/n+PjYXKVSiWw2Sy6XI5fLTRg8yu8JEcWiTeQa\n",
+       "Qtxlx0Vymk8opX6e0YPdf2tZRlickQZG1V8hjUQaIY3kNqJzyWazhmxnnZ6ETGItIuO3+/v7PHz4\n",
+       "kHw+z97eHnfv3qXb7eL1eo2pgAzVOWWg6XSaRqNhphUqlcq1Js2fAb8/fv0HwB8BH1vIii4IIQ2M\n",
+       "TiQej+ex7UmIIKSIRqNn5hHy3vQHWalU6Pf7Znva399nZ2eHe/fu8frrr9NqtYzhYy6Xm2g9OI//\n",
+       "mUyGfr9vuu7BYPD6kkZrbTTBSqkvAv+8sBUtAE6Vv3ywBwcH7OzsmGqx2KfJeO20cAoekUakofK1\n",
+       "WCxyfHxsvu7u7pLP5ymVSjQaDWq1mtEAHx8fG1WeRBnZGm3bpt1uk0gkzFpkAM/ZzFw2zEUapdSG\n",
+       "GE8DPwN8f3FLujjk6Asj0oikU2oi2WyWbDZr5rzPklAA5hgspzG59vf32d/fN6O3Qp6TkxMzmlut\n",
+       "Vg1pJNKEQqEJ8ti2Tb/fJx6Pm5qNTCv0+31DtmU7Tc2jEf4M8F6l1POMTlE7wC9f6ipngFOGIElx\n",
+       "pVKhUCiglKLT6ZiEMxKJkEgkzO86db1OiJRCEt3d3V329vbY3d3l4cOHxiFCGpFS8T05OeH4+Bif\n",
+       "z0cwGDTSC/leZKASaaT5KfUkGeRbNsyrEf7yJaxlYZgO6ZVKBY/HYyrF4vKQyWRMYuyMMtO1Eok0\n",
+       "x8fHFAoFHjx4wM7ODj/60Y/Y2dl5bKy3Xq9PRBqJKs46kWxDXq+XRCIxEWm63a4hzDLWba5dRXga\n",
+       "Em0ajYbZggqFgnG9krFb8QkOBAKPVXjz+TwPHz5kb2+PfD5PoVCYUPtNC6mcGmSZgJBBu0gkYo7Y\n",
+       "sj1KR15cLYLBIJXK6DAqIvZlwo0gjfMIrrXm4OCAQCAAQKvVeqz/5ExC+/0+e3t7Zjva29vj+PiY\n",
+       "crlMq9U6U3kneZTMWEm9SPQ3Mrgnx3jpyOdyOer1uulndbtdqtXq0jUxbwxpADNqK4SRXCWZTJor\n",
+       "Ho9PbDW9Xo+9vT0ePHjAgwcP2N3dNflLs9mc0AULhDTS0ZYoI+PB0j6Q0WFx0Go0Gmat3W6XWq12\n",
+       "rlnkVeLGkEb6Os1mE3iUp0gFN5PJmERWTi4inpIoc//+fXZ3d41BgKjuptFut+n3+9TrdSzLMltQ\n",
+       "IpEglUpNmAn4/X6zPckaJcIcHR25pLkqTOtvJSFWSpmI0m63jZzCaVDU7/cpFApmzEW0L0+qoUgS\n",
+       "K/UikXmWy2WKxeLE6clpLGDbNp1Ox0QikaBalrVUUws3gjTTcOY4MtctUoqzchoZ1G80GhPD+ufB\n",
+       "SVJxpWg0GoY0slXJKUlyHul4y0nKaYQ9PblwlbhxpHHmOFLCbzQaVCoV06CcHnBrNpu0Wi1jLC33\n",
+       "edKH5zQtkshWqVQoFotG0O4kjUg1gMdII/qcqyaL4MaSpt/vmyLfWW2E6dmnWQbbztoOncRMpVJm\n",
+       "zkoqxJKcS89KSON80suytBRuHGng2buJ93o908X2eDxG8F4qlSZ8+JRSxu7NacJkWRbNZtOc1q46\n",
+       "4txI0jxr9Ho9U1wcDAYkk8mJZ0qJvaxTvyMegZubm3i9XsrlsikcXnXEcUnzDCCRZjAY0Gq1SCQS\n",
+       "Ew8kSyaTRm8sxtYindjc3DTbo5giXTWeSBql1G1GMs8so+bkn2ut/0StgI/wMkEMH9vtNkopQxq5\n",
+       "pM0gNrXSSJXaUa/XM62JpScN0AN+Q2v9klIqCvyvUuqbwEdZUh/hZYTTNUIKjKenp+YkJbUaEYP5\n",
+       "/X5isZgRaUmJQH7nqo0DnkgarfUBcDB+XVdKvQJsseQ+wsuMadcKsXULh8PGJcvn8xmtjc/nMxXi\n",
+       "4+NjU0cSMl2FdOKpc5qxuPydwH+xxD7CywqndNRJGq21sVVrNpuGKDKIF4vFaLValEolIyTrdrtG\n",
+       "IHYVp6mnIs14a/pH4Ne11rUpSeTS+QgvM5yRBjDOWrlcziTL0mIQkVa9Xufg4IBEIkEkEqHVak3o\n",
+       "bZ41nka552NEmL/WWov169L6CK8CJCkWXY0o/A4PDzk4ODDP0pRLbPqz2Sybm5tYlmUmF8RMSfAs\n",
+       "os6bnZ4U8CXgZa31Hzv+aWl9hFcBIrmQmou0FwqFghltEZ9iEYbF43HW19fZ3t424zeDwWCiH/as\n",
+       "tqk3izTvBn4O+J5S6jvj9z7NkvsILzvEekRmt8U0Umzw5d/kFOX3+w1pxMZNCHNycjJhKXvlkUZr\n",
+       "/R+cb3y0lD7CqwAhi+hnJNLIeIvMfcfjcbTWE5EGMHKLUqlkrE7kOP8sNMVuRfgKML2VtNttYz/i\n",
+       "NFxKp9NGIyzmAVprI9ByRqYnicIWDZc0SwBpM5TLZfMshXQ6TbVapdlsmnHeaDRq+lCJRMI890Eq\n",
+       "zSKEdyPNDcB0QzORSFAul80MumxbYkAgNiUSaeRZCzKOc9lwSbMEENKIEUAymaRSqRjS+P1+YyIQ\n",
+       "CAQ4OTkxA3bhcNicoJ6VIZJLmiWAJMYSLZxu6rFYjPX1dRKJBMlk0kQbeVpeNptlMBiYKOVUF14W\n",
+       "XNIsAeS4LB92tVo1s1n9fp/T01Nu3bqFZVnGfcu2bTKZDLdu3QIetSdOT08vfb0uaZYAzsc6D4dD\n",
+       "arUaBwcH5hE/nU7HJMgbGxtm7CWdTlOv143tSbVaPdeWdpFwSbMEmJZOyFSlPBNKa21cupxd8HQ6\n",
+       "Ta/Xm2iAPgu9jUuaJYFTjC7uEiJ+Fy1xtVo1BgZyBM9kMqZuIyO/Z82XLxIuaZYQkhDLtEKr1TKe\n",
+       "N+KYpbUmGAySTCYnjuAinZBin3jcLBIuaZYQTp2MzF2JfUmlUjFqv2AwaOo2TjetdrtNp9O5NH+b\n",
+       "J26ASqnbSql/U0r9n1LqB0qpXxu//1ml1EOl1HfG1wsLX9kNhnzYkq9MR5pms2kijcyHT7tpybzU\n",
+       "ZRT75tUIL62P8HXA9LBdq9UyvoGJRIJut0s6nTZ5jbhPSOSRaU1R+C0a82qEYUl9hK8j2u02pVKJ\n",
+       "vb09AOOJLM6kcuqSSc1AIGD+/SoijYFDI/yfjHQ2S+kjfB0hpNH60cPfPR6PKfBNG2o7H1x/GaR5\n",
+       "qkP9eGv6B0Ya4TojH+G3AM8D+4x8hF1cEuTYvbe3x6uvvsq9e/coFArmGeESacTCxGkccCWRxqER\n",
+       "/hvRCOsl9xG+bpB6jViWHB0dUSgUWFtbIx6P02q1ODo6olKpPPaYw8uQScylEV52H+HrCKcnTrVa\n",
+       "pVAo4Pf7zfH66OiIYrHI4eGheU6mRKFFYx6N8O8AH1lWH+HrCCdhtNbUajUKhQKdTsc4oosxdrVa\n",
+       "NY+EluLgoqEuS+XlzkItFs7cxPmEGL/fP+Fw7nz21EWds7TWZyZELmlcnIvzSHP1FgQuVg4uaVzM\n",
+       "DJc0LmaGSxoXM8MljYuZ4ZLGxcy4tCO3i+sLN9K4mBkuaVzMjEsljVLqBaXUq0qp18cuoBe9332l\n",
+       "1PfGEtP/nuP3v6yUOlRKfd/xXkop9U2l1A+VUt9QSiWedI+nuN/cUtgnyGvnWuOlyXWdfv+LvAAL\n",
+       "uAfcBXzAS8A7LnjPHSB1gd9/DyMh2fcd730e+O3x608Cn7vg/T4D/Oac68sBz49fR4HXgHfMu8Yn\n",
+       "3G/uNWqtLzXSvAu4p7W+r7XuAV8FPrSA+86tKtJafxsoT739QUa2toy//vQF7wdzrlFrfaC1fmn8\n",
+       "ug44LXhnXuMT7jf3GuFyt6ctYM/x/UMeLXheaOBbSqkXlVIfv+C9BJdhb/sJpdR3lVJfmmW7c2LR\n",
+       "FrxTct0LrfEySXMZZ/l3a63fCXwA+BWl1HsWeXM9iuMXXfeFpbDTFrwXXeOi5bqXSZo8cNvx/W1G\n",
+       "0WZu6LFaUGtdBL7GaAu8KA6VUjkYKRK5oL2t1vpIjwF8cdY1PsmCd541nifXvcgaL5M0LwJvV0rd\n",
+       "VUr5gQ8zspKdC0qpsFLKHr+OAO9nMTJTsbeFBdjbjj9UwUxS2Kew4J1pjU+S6867RuDyTk/jjP0D\n",
+       "jDL2e8CnL3ivtzA6gb0E/GCe+wFfAQpAl1G+9VEgBXwL+CHwDSBxgfv9IqOn1nwP+O74w12f4X4/\n",
+       "AQzH/43fGV8vzLvGc+73gYusUWvtthFczA63IuxiZrikcTEzXNK4mBkuaVzMDJc0LmaGSxoXM8Ml\n",
+       "jYuZ4ZLGxcz4f041SDwzkyB1AAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792b570150>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlQAAACbCAYAAACkuQVhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAEbVJREFUeJzt3X+QXWV9x/HPJ7vZZJONYfgRAyE0SQsUMloBYQIiCNpC\n",
+       "QcBSK9JWqWXsdNRKqTJiZtq/2tHKdERH2hlLioJVS1GiDi1CW0DEQviRAEkghI78SGiyQCHssmST\n",
+       "Dd/+ce+GzbI/nicnz557w/s1s5N7zv3e8zz3POfe+8359XVECAAAAHtvWt0dAAAAaHckVAAAABWR\n",
+       "UAEAAFREQgUAAFARCRUAAEBFJFQAAAAVddbZuG3u2QAAANpGRHis+UUTKttnS7paUoekayPib0fH\n",
+       "zJs3702v6+/vV09Pz163+/rrrxeLLxW7P90PbHBwUDNmzNhjXs77K7WOc+N37dqVHJvz/krFAgDq\n",
+       "U+yQn+0OSd+QdLakYyVdbPuYUu0BAADUpeQ5VCdJejIinoqInZK+L+mCgu0BAADUomRCtUDSsyOm\n",
+       "NzXnTaqrq6tIhzA1Ojo66u4CAABTqmRCtdcnf5BQtbfOzlqvdQAAYMqV/OXbLGnhiOmFauyl2kN/\n",
+       "f//ux11dXSRTAACg7ZRMqB6QdKTtRZKek3SRpItHB1W5mg8AAKAVFEuoImLI9mck/VSN2yasiIjH\n",
+       "SrUHAABQF9d5nxvbMdZ9qKriPlSth/tQlY8FAJRXy409U8ycOXOfLzP3R6gVfmRLJV+5CUfO+xsa\n",
+       "GirSj5KJaE68PeZnZr9BsgYA+w61/AAAACoioQIAAKiIhAoAAKAiEioAAICKSKgAAAAqIqECAACo\n",
+       "iIQKAACgIhIqAACAikioAAAAKiKhAgAAqIiECgAAoKLaa/kNDAzU3YViStVKy63Pl6Pdat2VrNtI\n",
+       "rTsAQKqie6hsL7R9h+11ttfa/mzJ9gAAAOpQeg/VTkmXR8Qa2z2SHrR9e0Q8VrhdAACAKVN0D1VE\n",
+       "bImINc3H/ZIek3RYyTYBAACm2pSdlG57kaTjJN03VW0CAABMhSlJqJqH+26SdFlzTxUAAMB+o/hV\n",
+       "franS/qBpO9ExMrRz7/66qu7H0+fPl1dXV2luwQAALBPFU2o3LiufoWk9RFx9Vgxs2fPLtkFAACA\n",
+       "4kof8nuPpD+UdIbt1c2/swu3CQAAMKWK7qGKiJ+Lu7EDAID9HMkOAABARbWXnpkxY0ZS3LRp5XK/\n",
+       "nBIjpUqXtEIfpLz1vGvXruTY6dOnZ/WjFeSsi5zYnJI9OWM9ODiYHCtJO3fuTI7NGetSctZFbnmm\n",
+       "VigzlLMNdXbmfXXnXOyT04+cbS5neytZXqvd5P725Xy/lPouKvXb1+rYQwUAAFARCRUAAEBFJFQA\n",
+       "AAAVkVABAABUREIFAABQEQkVAABARSRUAAAAFZFQAQAAVERCBQAAUBEJFQAAQEW1l5456KCDkuKG\n",
+       "hoaSl5lbJiPn9vs5cvqcc1v/nPeX+95yS1qkmj17dpHYuXPnZvUjtdSRlFcqY8eOHcmxOeu4t7c3\n",
+       "Ofb5559PjpWkvr6+rPhUBx54YHLskiVLiix369atybGStGnTpuTYF154ITk2Z/s88cQTk2PPPffc\n",
+       "5Fgpb/vcsmVLcuyqVauSY9euXZscm7NtnnXWWcmxknTBBRckxx555JHJsQMDA8mxK1asSI69+eab\n",
+       "k2Ml6bXXXkuOzfl9yPnemjVrVnLsEUcckRx7wgknJMdK0uGHH54cm/q5vuaaa8Z9btw1ZPt3JYWk\n",
+       "sdZ4RMQPUxq33SHpAUmbIuK8lNcAAAC0k4lSzvPUSKjGk5RQSbpM0npJc1I7BQAA0E7GTagi4o+q\n",
+       "Ltz24ZLOkfQ3kv6i6vIAAABa0aQnpdueb3uF7Vub08favjRx+V+VdIWk9BOEAAAA2kzKVX7fknSb\n",
+       "pMOa0xslXT7Zi2x/UFJvRKzW2OdhAQAA7BdSTts/OCL+xfaVkhQRO22nXL52iqTzbZ8jaaakt9m+\n",
+       "PiI+PjJo5BUlPT096unpSe89AABAIZs3b9bmzZuTYlMSqn7bu+9tYHuZpG2TvSgilkta3nzN6ZI+\n",
+       "PzqZkqT58+cndRQAAGAqLViwQAsWLNg9ff/9948bm5JQfU7STyQtsf0LSYdI+vBe9GuiKwYBAADa\n",
+       "1qQJVUQ8aPs0SUercS7UhohIv+NhYxl3Sbpr77oIAADQ2iZNqGx3S/qUpFPV2Mt0t+1/iIjtpTsH\n",
+       "AADQDlIO+V0v6RVJX1djD9XvS7pB0u8V7BcAAEDbcMTEpzbZXh8Rx042b68at+Owww6bPFB59esm\n",
+       "e0+j5dTRy112qmnT0utUd3R0JMdOnz49qx85y86p7ZSz3Jz6Ujn1EqW8sc5Zdk7dv5yxTq11KUkH\n",
+       "HHBAcqyUV9cwp45ef39/cuyTTz6ZHJtTqzCnD1K575eurq7k2Hnz5iXH5n6uu7u7k2Nzts+c2O3b\n",
+       "0w9qbNs26XVPu+XWK83ZNkrV82yF35xcOeu51O9ZTmxufGqfX3nlFUXEmCsjZQkP2T55eKJ5ld+D\n",
+       "SS0DAAC8BUxUHPnRETH32H5WjXOojpC0YQr6BgAA0BYmK44MAACASUxUHPmpkdO256lxx3MAAACM\n",
+       "kFIc+XzbGyX9Uo17ST0l6d8L9wsAAKBtpJyU/teSTpb0REQslvR+SfcV7RUAAEAbSUmodkbEC5Km\n",
+       "2e6IiDskvbtwvwAAANpGyo2EXrI9R9Ldkv7Zdq+kvJu8AAAA7MdS9lB9SNKApMsl3SrpSXEFIAAA\n",
+       "wG4pxZGH90btkvStor0BAABoQ+OWnrHdr8aNPMcSEfG2yo3bMX/+/KTYnFvZ55REafajSGwpOaUI\n",
+       "cssW5MTnlE9ohVgpr8RIKa1Q/ia3HznbfU5ZlNzPaqrccR4YGEiOzd3mUFbJ7+RWKfuC1jJe6ZmJ\n",
+       "7kPVU7VR2wdIulbSUjWSsz+OiHurLhcAAKCVlPnv4Ru+JunfIuLDtjslzS7cHgAAwJQrllDZnivp\n",
+       "vRFxiSRFxJCk9BLiAAAAbSL9xKR8iyU9b/s62w/Z/kfbswq2BwAAUIuSCVWnpOMl/X1EHC/pVUlX\n",
+       "FmwPAACgFiUTqk2SNkXE/c3pm9RIsPbQ19e3+29wcLBgdwAAAMoodg5VRGyx/aztoyLiCUkfkLRu\n",
+       "dNycOXNKdQEAAGBKlL7K78/UKFfTJel/JH2icHsAAABTrmhCFREPSzqxZBsAAAB1K3kOFQAAwFtC\n",
+       "6UN+tcgtO9HR0VGkHzklEXJK65TUCmU1ckqXdHd3Zy17xowZybE56yKndMmLL76YHLtjx47k2Nzt\n",
+       "vtRY55TAydnuc2Jzy5GU+vyVKmuV+/5KlasqVaoqR6uUh2mFsmTtqFVKv6VuRxNtx63xKw4AANDG\n",
+       "SKgAAAAqIqECAACoiIQKAACgIhIqAACAikioAAAAKiKhAgAAqIiECgAAoCISKgAAgIpIqAAAACpy\n",
+       "nbfttx1z5swpsdxi8aXKX3R2plcByulD7vjm9DmnZE9O2Ymccis5ZU5y+5FTyiVnPeeU1skplZNb\n",
+       "PuW1115Ljs0Zk5x1XCq2VcqAzJo1Kzl25syZybG5233O9pmzHQ0NDSXHDg4OJsfmfPZyt/uc7Shn\n",
+       "vbVKqbGc7+VDDjkkOXbZsmXJsYceemhybF9fX3Ls2rVrk2MlacuWLcmx27ZtS4rr6+tTRIw52EX3\n",
+       "UNn+ou11th+1/V3b6b8OAAAAbaJYQmV7kaRPSjo+It4hqUPSR0u1BwAAUJf040z5XpG0U9Is27sk\n",
+       "zZK0uWB7AAAAtSi2hyoi/k/S30l6RtJzkl6OiP8o1R4AAEBdSh7y+1VJfy5pkaTDJPXY/oNS7QEA\n",
+       "ANSl5Enp75b0i4h4MSKGJP1Q0imjgwYHB3f/5VwxAgAAUNLQ0NAeecpESp5D9bikv7TdLWm7pA9I\n",
+       "WjU6KOeycAAAgKnS2dm5x22NJrqFTMlzqB6WdL2kByQ90pz9zVLtAQAA1KXkHipFxFckfaVkGwAA\n",
+       "AHWj9AwAAEBFJFQAAAAV1V7L7+ijj06KLdnPnLpKOTX3cmoq5dSB6urqSo7NqVsl5a2LnH7kyOnz\n",
+       "wMBAkT5I0ty5c5Njc2pXdXd3F+nDQQcdlBwr5fU5Zz1v3749OTbnyt6c7S31e2XY0qVLk2Nz1tvT\n",
+       "Tz+dHHvjjTcmx65cuTI5VsqrxZizHZ122mnJsWeeeWZy7JIlS5Jjb7nlluRYSbrzzjuTY5955pnk\n",
+       "2Jy6tJdeemly7IUXXpgcK+XVCt26dWty7L333pscu3HjxuTY3t7e5Nic+qNSXv3IxYsXJ8UtX768\n",
+       "nlp+AAAAbwUkVAAAABWRUAEAAFREQgUAAFARCRUAAEBFJFQAAAAVtWRCVfJSeJT38ssv190F7KXH\n",
+       "H3+87i6ggr6+vrq7gL20bt26uruAikiosM9t27at7i5gL23YsKHuLqACEqr2tX79+rq7gIpaMqEC\n",
+       "AABoJyRUAAAAFdVeeqa2xgEAADKNV3qm1oQKAABgf8AhPwAAgIpIqAAAACpquYTK9tm2H7e90fYX\n",
+       "6u4Pxmf7n2xvtf3oiHkH2r7d9hO2b7N9QJ19xPhsL7R9h+11ttfa/mxzPmPY4mzPtH2f7TW219v+\n",
+       "UnM+Y9dGbHfYXm37J81pxq+NtVRCZbtD0jcknS3pWEkX2z6m3l5hAtepMVYjXSnp9og4StJ/NqfR\n",
+       "mnZKujwilkpaJunTzc8bY9jiImK7pDMi4l2S3inpDNunirFrN5dJWi9p+GRmxq+NtVRCJekkSU9G\n",
+       "xFMRsVPS9yVdUHOfMI6IuFvSS6Nmny/p283H35b0oSntFJJFxJaIWNN83C/pMUkLxBi2hYgYvgNy\n",
+       "l6QONT6LjF2bsH24pHMkXStp+Koxxq+NtVpCtUDSsyOmNzXnoX28PSK2Nh9vlfT2OjuDNLYXSTpO\n",
+       "0n1iDNuC7Wm216gxRndExDoxdu3kq5KukPT6iHmMXxtrtYSKezjsR6JxTw7GtMXZ7pH0A0mXRcQe\n",
+       "tUsYw9YVEa83D/kdLuk022eMep6xa1G2PyipNyJW6429U3tg/NpPqyVUmyUtHDG9UI29VGgfW23P\n",
+       "lyTbh0rqrbk/mIDt6WokUzdExMrmbMawjUTENkm3SDpBjF27OEXS+bZ/Kel7ks60fYMYv7bWagnV\n",
+       "A5KOtL3IdpekiyT9uOY+Ic+PJV3SfHyJpJUTxKJGti1phaT1EXH1iKcYwxZn++DhK8Bsd0v6TUmr\n",
+       "xdi1hYhYHhELI2KxpI9K+q+I+JgYv7bWcndKt/3bkq5W4yTLFRHxpZq7hHHY/p6k0yUdrMbx/r+S\n",
+       "9CNJN0o6QtJTkj4SES/X1UeMr3lV2M8kPaI3Di18UdIqMYYtzfY71DhpeVrz74aIuMr2gWLs2ort\n",
+       "0yV9LiLOZ/zaW8slVAAAAO2m1Q75AQAAtB0SKgAAgIpIqAAAACoioQIAAKiIhAoAAKAiEioAAICK\n",
+       "SKgA1M72Pc1/f8X2xft42cvHagsA9iXuQwWgZdh+nxo3OTwv4zWdETE0wfN9ETFnX/QPAMbDHioA\n",
+       "tbPd33z4ZUnvtb3a9mW2p9m+yvYq2w/b/pNm/Pts3237R5LWNuettP2A7bW2P9mc92VJ3c3l3TCy\n",
+       "LTdcZftR24/Y/siIZd9p+19tP2b7O1O7NgC0o866OwAAeqP0zRckfX54D1UzgXo5Ik6yPUPSz23f\n",
+       "1ow9TtLSiHi6Of2JiHipWdtule2bIuJK25+OiOPGaOtCSb8h6Z2SDpF0v+2fNZ97l6RjJf2vpHts\n",
+       "vyciOFQIYFzsoQLQSjxq+rckfdz2akn3SjpQ0q81n1s1IpmSpMtsr5H035IWSjpykrZOlfTdaOiV\n",
+       "dJekE9VIuFZFxHPROCdijaRFFd4TgLcA9lABaHWfiYjbR85onmv16qjp90taFhHbbd8haeYkyw29\n",
+       "OYEb3ns1OGLeLvFdCWAS7KEC0Er6JI08gfynkj5lu1OSbB9le9YYr3ubpJeaydSvS1o24rmdw68f\n",
+       "5W5JFzXP0zpE0mmSVunNSRYATIr/dQFoBcN7hh6WtKt56O46SV9X43DbQ7YtqVfS7zTjR16ifKuk\n",
+       "P7W9XtIGNQ77DfumpEdsPxgRHxt+XUTcbPvkZpsh6YqI6LV9zKhla4xpANgDt00AAACoiEN+AAAA\n",
+       "FZFQAQAAVERCBQAAUBEJFQAAQEUkVAAAABWRUAEAAFREQgUAAFARCRUAAEBF/w/CsMbhRL/ldgAA\n",
+       "AABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7d792baffb50>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "for i in range(8):\n",
+    "    figure(figsize=(2, 2))\n",
+    "    imshow(solver.test_nets[0].blobs['data'].data[i, 0], cmap='gray')\n",
+    "    figure(figsize=(10, 2))\n",
+    "    imshow(exp(output[:50, i].T) / exp(output[:50, i].T).sum(0), interpolation='nearest', cmap='gray')\n",
+    "    xlabel('iteration')\n",
+    "    ylabel('label')"
+   ]
+  }
+ ],
+ "metadata": {
+  "description": "Define, train, and test the classic LeNet with the Python interface.",
+  "example_name": "Learning LeNet",
+  "include_in_docs": true,
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "python2"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.9"
+  },
+  "priority": 2
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/examples/02-brewing-logreg.ipynb b/examples/02-brewing-logreg.ipynb
new file mode 100644
index 0000000..d36871f
--- /dev/null
+++ b/examples/02-brewing-logreg.ipynb
@@ -0,0 +1,5771 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Brewing Logistic Regression then Going Deeper\n",
+    "\n",
+    "While Caffe is made for deep networks it can likewise represent \"shallow\" models like logistic regression for classification. We'll do simple logistic regression on synthetic data that we'll generate and save to HDF5 to feed vectors to Caffe. Once that model is done, we'll add layers to improve accuracy. That's what Caffe is about: define a model, experiment, and then deploy."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline\n",
+    "\n",
+    "import os\n",
+    "os.chdir('..')\n",
+    "\n",
+    "import sys\n",
+    "sys.path.insert(0, './python')\n",
+    "import caffe\n",
+    "\n",
+    "\n",
+    "import os\n",
+    "import h5py\n",
+    "import shutil\n",
+    "import tempfile\n",
+    "\n",
+    "import sklearn\n",
+    "import sklearn.datasets\n",
+    "import sklearn.linear_model\n",
+    "\n",
+    "import pandas as pd"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Synthesize a dataset of 10,000 4-vectors for binary classification with 2 informative features and 2 noise features."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAiMAAAImCAYAAACB54oCAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvXmQHOd5p/m8mZWVdZ9dfV/oRqPRAIiDIMH7ECmJkqjL\n",
+       "lqxrZK3Xno0Zz3i0G2Fv7EZs7IR3YsYbc2x45N2Vx4csj6WRZy1bHh2ULZOUSPGUCJIAiLvRDfR9\n",
+       "Vtd9V+a3f2RBbIIACRJoNAjkE4GjMquy3swvK/OX7/ceopTCxcXFxcXFxWWz0DbbABcXFxcXF5eb\n",
+       "G1eMuLi4uLi4uGwqrhhxcXFxcXFx2VRcMeLi4uLi4uKyqbhixMXFxcXFxWVTccWIi4uLi4uLy6ay\n",
+       "oWJERH5fRH4qIv/xguU+EfmaiDwpIl/ZSBtcXFxcXFxcrm82TIyIyK1AUCl1P+AVkdvWrf4y8F+U\n",
+       "Ug8rpf7HjbLBxcXFxcXF5fpnIz0jdwD/0Pr/E8Bd69Y9AHxcRH4iIh/bQBtcXFxcXFxcrnM2UozE\n",
+       "gELr/7nW6/MMAz8AHgX+dxHRN9AOFxcXFxcXl+sYzwZuOwdEWv+PAtkL1j2tlGqIyBmgA5hf/2ER\n",
+       "cevUu7i4uLi43EAopeRiyzdSjLwA/BPg28DDwNfXrXse2CMirwKDwPLFNnApo11uTERE3QxjLiIa\n",
+       "dPwWPByCbavO0ooHnuiDl/9cKXVqcy28ttws4/52iIgHur4M7zdhOO0sLRnweC8c+ppS6szmWnh1\n",
+       "ccf92iMSvAf2fxQeOAceBTZwqAeePaVU+lsb//2XdjJs2DSNUupVoCoiPwWaSqmDIvIHrdX/Fvg3\n",
+       "wLPAnyilmhtlh4vLdUg/DLS/LkQA/E3YnYGOuzfPLJdNZgAGE68LEYBgA/bkoP2uS3/MxeXtERGB\n",
+       "+P2wf94RIuBIgL1z0D4mIrG33MAGs5GeEZRS/9MFr7/c+ncReGQjv9vF5TomAOGLPCFEK2Akrr05\n",
+       "LtcJgddnttcTqYAnfs2tcbnR0MAIQiz9psWEFBDgjeEU1xS36Nl1hgg7RfiSCMnNtsVlw1iGeQ2s\n",
+       "C1zUswkonN4ck1yuA5ZhThzX+Xrm4lAc3xSLXG4YlFIWVGdg+gIPSNkDKxawtimGtXDFyHWECF8E\n",
+       "fgz8MvCqCIOba5HLRqCUWoX5n8NTg5D2Q02Hkx1wUEHuhc22z2VzUEotwewr8PQWWGudF8c74WAD\n",
+       "8j/bbPtcbgQW/wGej8DZBDQ0WAzBT/pg5XGlVHUzLROlrs+klZstuEmEUZwYmgeU4rgIvw18CrhH\n",
+       "Ka7PQbrK3Exj7qSzm7dB272ghaB0AtaeUkpdNJj7RuZmGve3wwli9d0OyXtAC0LpWOu8WH37T7+3\n",
+       "cMd9cxCRAWh/CLz9YKVh6Smwj6lrIAbeasxdMXKdIMLfAC8oxX9ovdaAl4DfU4q/2VTjrhE325i7\n",
+       "OLjjfnPijvvNhytGrnNEuAWnWu2wUpTXLX8U+NfArTeDd+RmGnOX13HH/ebEHfebj7caczdm5Prg\n",
+       "N4GvrhciLf4OCAMHrr1JLi4uLi4u1wZXjGwyIviAzwL/+cJ1SmEDf4RTPM7lOkVEvCISFxFjs21x\n",
+       "2RhEJCwi0c22w8Xl3SAigdY16rq957vTNJuMCJ8F/rFSfOAS67uBo0CXUtSuqXHXmPfamIuIHoL7\n",
+       "k3B/GPQCNFbhqRI8ey2CwW4UrudxF5FkCj6RgCEBsjC3CN9VSs2/7Ydd3pLredxvFEQkEIcPJ2Cv\n",
+       "D8hCbgF+YCl1cpPsueSYb2jRM5fL4vPANy+1UinmRTgKfACnuaDLJiEiW9vhPi+kKnDWB/ZeuO1u\n",
+       "mAlAswTGs/CRw2DhtDxweQ8jIr4e+O/vB98ITGvAOYj/BH5DRP5AKZW7Ct8x2gH3eiBRhokMPOek\n",
+       "+Lq4XBkiEo3Bv+yF3TFYaYOJTrCehi+JyB8ppaY228b1XLcum5sBEfzAQ7y9yPg28JmNt8jlUvhF\n",
+       "9u+F3/gItH8Wyu+HsR74F/1QCEATIAiNu2EuCe9zO1G/99Fg23aIjcLy+QvlIGRuAcMPu690+wGR\n",
+       "O26FX3sUkp+DyiOwcxh+U0S6rnTbLjc3IhLphN+5HQ58GFZvBU8T9o/D8H4opODezbbxQlzPyOby\n",
+       "MPCKUqTf5n1/A/wrEbxKUb8GdrmsQ0SMfvjwwzAXxjn+vZAvQXMOtm9z6sMAEIZ6AEyc0sqFS2wv\n",
+       "AcG9EGiHwhRUX1NKFa/N3rhcLkFoS0DjwuVJKIfgsgRDa45+BNp2gW3B2lFgAjAHnXNqNtj6ju2w\n",
+       "pEMqB+8HvnEVd8XlBkdEUhDaA/42yE76IbkTkoOQC0AzAM19UHsGtgjMmpd5/l5LXDGyuXwC+N7b\n",
+       "vak1VTOOo2Z/vOFWuVxIWxt4zwsRAB/U/FBahVQNdNOZmiELZgkqQFlEAsAwiAlqXik1LyKDMPxr\n",
+       "sEsgXoGlnXD0ARH50xuxsNV7mRIsr8KbgpJXIVCA2bf7vCNEYr8Mo/thaxFsgfE74NRzUDjc4VQ1\n",
+       "e4PYGYJVP2yT1uT6BdvrAOkF1QQmXAHrAiCib4Ntvwo7LYhWYX6Xl5/u8VNdzkC0HYqGU26VdlDn\n",
+       "oLsK112lZ1eMbBIiCPBRnA7Gl8NjwKO4YmQzqJZBs3l9XtMAy3Dm+PdXW2IkB+bz0JOGv8XpzPtF\n",
+       "GPGBX8G0iERfga5BeLgA3QXn5jS4BtEOePyDwJtaeIuIF1BKqTc9obtsLDaMn4K1FHSMwrKAmoS2\n",
+       "16BahdcuYxPDjhB55OzrZ86wQPMuODRTusg0eQG8NrxBZDjdViMfhp33wBYFVQ9M1ET0byllnboa\n",
+       "++ry3qM1FWxC76fgg2loO18aIlmjb+sy6f4wtn2a0v5eGuNRSOfAPAeeVXhm0wy/BBsqRkTk94H9\n",
+       "wCvrO/iKyO8CnwQywPeUUr+/kXZcp4wBVaU4c5nvfwzHdfvbG2eSy8VQSmVSIuNHYXA3LIDTymzR\n",
+       "6Tr1138N0QCYBbAzcC4EO2q0/1Ob903AYCtIbB/wg/ugFAB92s8rt3qoRy085TLRKfA9ItJmQ/Ec\n",
+       "1F4DAtD2IdiyDZQSSRyCzONKqYtO/bhcfZRSNRH5+pPw4ZdhTIAcnF2Bx95qHESkDcK7IfEIJMPQ\n",
+       "MMBsiUmPguEmjCeXKJ07Dl07YAmgAsZzcFcViv3wr9pEjqXhSSAB2++D23KwtgOCCQho0BwWkS8r\n",
+       "pS7Z4MwRMgy3wT4NvKvwmg0nXHG78YiIH4ydENsClTUoHr4a3k+nhEDofui7G1QbxEZAPQOUYTUB\n",
+       "9d1V9uQX+ck2H9H8El4tQ3aXQX3yCOQX4StKqbf17F1rNkyMiMitQFApdb+IfFVEblNKHWytVsBv\n",
+       "K6We3Kjvfw/wMO/My/EKEBdhSCkmN8imGxIRMYHe1stZpdQ7TpFehe88A184B/1xsBdBm4dXy/Df\n",
+       "ys75fF8X/JNtcKtAXogOlDnTW2R2HMKrMLQMW9Pw8t4kk73tGCUTM1OhFlth9hMZAiXFg6cgvRMO\n",
+       "fwA0A+6vw8gMWBoc3w3P94jIf3JvJNcOpVQG+JZzY8EHdIOxRUSCwDml1Bta7IroIzDyq7DLhsUO\n",
+       "8A7ARBIGn4NAqxGZBdjNFfj2U/DFCeiPgpqCPX6ofgmOBKFxBkaeh+FpkkswYEH6HuipQzjjnBPV\n",
+       "QVj7TRH5PaWUEhG/CXsTsLMJpRU4GIKh7fDgqOOqb56Fna/BuIg8jXP9X1RK5a/hIb3hEZE4MAjJ\n",
+       "T8A+D3SXoGA607H6N5SyrrADc/wTcOs+2D8H6SpM7IDZ98GZk1DrENp6QjTCeUSfIxP1oVMmoPLU\n",
+       "tTT8llJq+mrs59VmIz0jd+CUOAd4ArgLOLhu/b8VkQzwO0qpwxtox/XKQzhZMpeFUtgi/B3wEeD/\n",
+       "2TCrbjBE9FHo/wz0eZ0ls3UR/dtKWe8oz14pVRCRP16BHiAEpJVSKwBBkQe2wG/vAyMB+Qnon2Nl\n",
+       "QCNR0on3WMg0PKdD7IhONhKhUjTxVRWgaHb6Uc06lXqR3iyMrEL2NvAkYfRHkDXh+AgUBiASg2xC\n",
+       "JDzuxMkGK1CegNrR9fEDTjCbMeS8aky4sShXhQj0/BpsC0MEONsO0yGRyDhUX4LGT4Ey9H4aHllz\n",
+       "XObhGkz2grkVJraAbwoikzDVhMpppVRORL667AjlkV0Q/DT84rzcDks5GJ6lOWBTDMGIDZGWK95j\n",
+       "QzIHqSEodYtIpgN+Yy90DECmCqnDcFcGEh+CZzyOYMYPjdNEPg/9d0IoB1NxEf8RqH4HmL9R6+OI\n",
+       "SA+kHnLGws7C6k+h/spb7a8jQD1jEOqE0jI0jiulLqySvf79OkQ+BKN3Q2QIvD1QnYboSzCUhp4A\n",
+       "fPfTIvLvlVLNd7kfbbBjL9wz5czymXl4QQO7H2LdoPngZIefeWMrqtJGpFCiYZyhYdrgBc/7RPRD\n",
+       "YI+/1b5sBhspRmLwiyf4HLBz3bo/UEr9HyKyFfgz4P4NtOO6QwQdeAD4Z+/wo48Bv4ErRi4LJ2tl\n",
+       "6xfgI2uQqDhLMz744RdE5CtKqbfLYnoDrQvXG9ybIhIbgUfGQIaduAIqUOnCE5whWvQS81UI9kEb\n",
+       "8MJIkFza4Kidpz8OAU+dfNSklI1il4ocHILODJg6+KOQN+Gle2EoALdUYDIFvZ+DVaBrBdIV8B2H\n",
+       "6QdF5GtKqRWR0P0w9kHY2rrInhGR4N8pVXquZe+bAiNd3hpnqqPzM06pn74ZOHkPbDsAfX7IjkLt\n",
+       "fTD9UVj+E+jyg6cIR7shF4RsCIZC0KnACMOJe2Hme0qpRfjFOTUjIu1beL0dhAXaKYzdZQIjJh6z\n",
+       "wmovDCyCVQLdgqbAItCXhXMJH2zZBx13wi+eehVEj8O+LBxsg7IF8jTRA152Wx4k2UT1wH0a5MZg\n",
+       "+haYf0xEvn+ht+e9joiMwcCXYXcNRiegosOrn4YjcZwH5Tf9Lpybfs+vw44ItFVh1YQTD4vIn51/\n",
+       "CHkz5m2w+1548Byc2gWjM7AQgkO3wcPPOAK1MwEzXcDMu9ydBLTbr4cbLUQg4oW2AKgEYPjR/T0E\n",
+       "lQ+P1KgG2zALFUqBEvFBgx2fbBAYgdNVEfnPSql3a8dVZyPFSA7nEQIgCmTPr2i5PlFKnXGmNC9O\n",
+       "K7bkPE8ppZ666lZuDvuABaWc+IN3wJPA10XwKUV1A+y6wfDvcKb6zwsRgHgVxhTM7QKevgpfMtQJ\n",
+       "YQW6AhFQecxoikB6num2JjEDwg2wbIiUFPHaANlKgEOncniSJfz+PEnPArFeiNchZ8FqwLnAnOmC\n",
+       "riAMZ2B6AJIeaK9CxIC2DGxtwpEOuHcVnviIiDwBux6Bj85CJgW5URgLQX23iPkDSLVBv18kdQJW\n",
+       "n3SLa102HdDZDoMzMN8FshdCNlRsyPdAxyyU90LjY5DphIP7oFODYgcMhCA5B8sGtB+GO1cgHxOR\n",
+       "5Hkx3ApETExARxIK/ZCZRBtZJLmtjNey6TgBJYHpPqho4JuHKT94T0OxAhQScP8QpBegcxH/tjpa\n",
+       "CBoSoW4uQLwNytMQr9IZ9uBr2GQG4aFxCDShVHLCnmJ3wgvjwPHNPNhXE5HgPdD7O3Bb1LkOTA9B\n",
+       "/CC8bwqWHxCRM5C8E/p3ivTVIPsCFJ+F1KNwvxdGW+JuFGhPweOfAP704t+WvA/2LTpxQShQAn0F\n",
+       "mIrDfLgVtA6tv94lBVhbF/g80wPdSUjUYDIEARWmokxiYlPVGkRZZjbuI6gC6HaGALA9D4MWPPZ5\n",
+       "Efm/lFLWFdhz1dhIMfICTk+Vb+PER3z9/AoRCbfc3m1vZYNS6nc30L7N5CHeRVaMUmREOI4z5fWT\n",
+       "q27VDYcZgeBF4iuCDfBF3rz88hEnb/f+DvhMAw7UIXAMurbDMRvRPaiaRdNsULJBhUBVwciX6Dsx\n",
+       "S237QzRe3ktz9rs0Rldp668zkoZEEmwTlBdeqYP0wO46VE0odDmpwB6BjjpkA9C3CN44RMsQ2Qrl\n",
+       "FRitw3I/NPZCXxG8RZjfA/F/Abu/DT2zcGYIXhgWka++U+/QTYreusEA+S1QD0LNB9uakGhAO2C1\n",
+       "weIdYPth3xokS5DvhJEKzA1A8EkYmHC20RWFEykgLSLDkPpdaB87jZ5KkzuQZPmkh+aOABW9TMkI\n",
+       "otk1trwE8wKrEQjkoeMVJ1vz1TQwbUNlDhmtkhrxkCh5MUo1apFFZjtzlHvjjmrxKnxkyMVteguO\n",
+       "EAEwGoAPRrMwsZ8bRIyIyADseRR8Hhibd8awzYDJ2yHyBMRM6P7ncH8ets5CzQOHH4KDQxDpd+K1\n",
+       "1jO8Ai8NnL9/vfkbtTBEW20C/GchPeJ4On1AzXAEyUIBx6X1rlBKLYjEq/DjT0LKhmwb+FPChCfC\n",
+       "kh7EtD1kBDxYKI+PYKkEAYW/UUH3W3QmIfsANBehexnme4HrohLrhokRpdSrIlIVkZ8CryqlDrZK\n",
+       "KH8Z+PcisgvH1/S/bJQN1zH3AX/xLj/7OE5RJFeMvC3ZszB3D2y/YPmsH7JXFATshX3b4Vd6oXcV\n",
+       "wl2gCtD5KoTDVBcnqGzNMCgQa4DHC1Ud0qZNW/4MybNF8r1JiI3j81cIVWy0GEgVbNuJNQg34WgQ\n",
+       "ZKcXb1ChouApNAhYUM2AtyWyBOcJTAG6AZpAZp/jRcnFoV6HgAE9y1AeAX0RRpeh1g1rB3A6Q7tw\n",
+       "PtbGP+YUWM1PAJOtKYtlWKrCmt85zmtBOFAFSwdfBQwTegNw/FZofwEWvJCNQ7YHFn1O8cvMGMxN\n",
+       "Qc8K5AWnDk0Ekn/kZ/uWKPEyWOUS4c4Gpf4dZKshPItxYhN9+MpHmd42x8jzcPpW8KdhwQerpyD9\n",
+       "XaWUrYm8chLz10ZIzekYTYA6IitELYEHTMojWbBWWA6W2TrhCPXzFELgnXbiUDTvZhz7jSG+F8Yq\n",
+       "MFuCohdiNSerKS6QboeVHtizCNtbHkJPA+6eguWtsBKAwhKE62/MwJZf/PVmqqfg7BYYWYGeMzDZ\n",
+       "5hQ7PROFs/tgpQjLf3AlnggR320wGoN6GpYTkAtBNtJNs96J6THxSI6geFmTJpqZJaw38eglNHJ0\n",
+       "LkOyBt46ZLqc6gTXTxX2DU3tXZ/O23r95da//3Qjv/d6plVf5C7g3R6DJ3Bqk/xvV82oG5dxOHkO\n",
+       "AgMwugKi4FQHnJoG+/SVbDgJ7+uBvm3Q6IbjEzCUgOUiJI9BY45GCXxhCJmAghUTvE2YuK9BLTdD\n",
+       "Z2wGyo54aOYglIGGB7xV2FqAxXaDSSPEaV8nw/kaZqVEPZilEquxosM9x5z6WLWCc3PMnoT8Cryy\n",
+       "H3YNQaDiXGfmA45nJnEcFuJOFoZuQ3cWQkNX5SjfAIiYe2H007DdAtOCqfth/LiI/H9KqYaI/rfw\n",
+       "D1+AnipUNSj6oFKDqG5SSipKugezGqSWylPI1SgNQp8FWQvQIbsTFrvhmUlYfBaYBeOLQWIjXXQt\n",
+       "amgKwEOlEcHXZ2NWdNomPPgrAINU/SvkQ3XqT8Hk14Dy+t44ChbPkpirUY62I6qG8ixh9YcZOa44\n",
+       "5dlK+eUiBBdY6bJJZRxPzqAHSiFYsaB3Ag6lIP2OH3Ja6cOj0H47eAKw9hpUD21+gKQRAF8D2sdh\n",
+       "/DbYV3e8Izow3Q5VCwbm3viZugeiAzDXAc/uAt8CDL4Gw2k4m4TMzKWzj1Z/Ai9uA6sD+tbAOAHP\n",
+       "doO/AH0ZGMzAxKdFTE2p2qvvdG+clN6+R+DhKSguwultYEV8pK0ONJ+XoLJoSgilVhERBI1KPYtm\n",
+       "rBJWdXr8oHdAtRPsohMHw3XT8NEtenbtGQFKSjH3tu+8OC8AO0SIK0XmKtp1w6GUaorIN+CZO+Do\n",
+       "bYBA5nEo/+wqpMcOJkEPQT4ExOH4AoTKUFyG0026UhCvwUIMfBYM1sHjg/xOGJ6FWBXyOswagAlr\n",
+       "JdjZiiEqe6BUj8HWA6ycWaSZgJiWZdZvojSLULTJwT2QWQLPEfiJ3sHaQCfsyzM/kqGZzOJZhs6q\n",
+       "E9dQAuomUGvVbgMyQahPXOExuCEQkTAM/xJ8dNF5EgbYAZi74PnjwKtKWSdE5KswfwCCt0OiDbps\n",
+       "nVxUkbe8ZPMhqismxUKT0PYaQwoGZuCHg1CP+iloMWp+k6ZZpG4W4DkL444gZuO8EAEQmnoEj61B\n",
+       "vclKQJHSDMyKgRKDM711cn+mlLpYrFm5TujMOe7PnWMlJpQ6+8AK4c1VsQKDMKWDqtHM/Yij4xaR\n",
+       "EKzd6cQgmQswtRPO/Awal1PM7QLCH4SxB2FHDnx1OPsROLy/FVS9iYJk9Tic2wX3T8FLAXhqG8QE\n",
+       "pmMw8zSoBcgNQue6InNnb3OmR+98HvwjoKfg1Adg4jDMLsPydy/1bc4UivwhrHwJwh+BUgr6YjA0\n",
+       "BYEAxGZhLAt/+0siMvEuUqqjEDdhOQKzd8GwBUNFD/8gTdKaQc72IVoFj3gIqQLY81BWbDXrRKpQ\n",
+       "E0h7wKjDmRSUD8H1017EFSPXnru4glK8SlET4TngfcB3rppVNyhKqSpOoOrTTgOy2K0Q/pSIOWVQ\n",
+       "19qcaFZrFV5qwmuX60KtwWLRKVwHgAlWL+RPgNeAWgUtACPL0Cw6sQOVMORM6NGdKpwdraDaI+1w\n",
+       "OgiNPmjPQkOHMyaY54LY226BpR1kVg5T2dlFfMFGb55mMTVPZh7ypyH3zR545BGnWV91jo7JIqpy\n",
+       "hp91LrF9Cvqn4fQgnBmAnsegqcNUHxzcCrlTIhI/H1B+EzMAQ/rrQuQ829bg9H7gVXBuNsB3RYJ5\n",
+       "WPs8qC0BKoNemgZko14KHiEftPAEhZ664lAcCAXxWMO010wahsaK0UMzvkzlt6aozesUlEVN1zEt\n",
+       "AI1AI0tV66aWHab03ALlkSqeRJG6Vqf+LaAkkvqs4+VbOQKcVkrZSqm8SPIYLI3B/mkPJ2NesvUi\n",
+       "c7GtZI/prdTeKNS9EK0wOAf9fwXlJJSDsKpB8fg7rcHjxP2N3QcfmAKjJXQ7iyAD8NQ+4LkrGZgr\n",
+       "wz4Bx6bBMwDD81BIw9FemPwOFL8O9MKRMegwnSmctSgUtkBjEXadAzUDy12QGIAXViD3f799Cf7A\n",
+       "MGxPQNcrMPlx2N+EYBegIJeCxkHn9z8xCBx5hztUdorznt0LB8qgMDjcL5Q8BootKGlH2SVqzKM4\n",
+       "hthBRnMNYok6/Tk4ZcJEwIk9C8yDlmnVp0lAaJ9jZ2EWyoeUUtm3teYq44qRa8/dXHl7+fNxI64Y\n",
+       "uUxEjF0w9jnYVQdfPcjTnx9hydxH7UkPNE7CZ4/CqIj81eWkvq7Bd1+DD6UgGAMrC+Gj0DlJiCId\n",
+       "eaiY8FwnDOiQbDjBhw0DUjqEgqCvOV6K7SswbcCiCd/vgWAOuo5BNlDCLs1DyoNIgKTtI1hqoIwA\n",
+       "sgwPPQGnwvBichv4klCYhM4mdihB9HQfRTIU9DqaDmYWXq3BnALtY2AFoP0obLsFjt/SKsR0uZWA\n",
+       "b0SE1s36jWg26+IDRCQKdAN4ODxlEd+q8KaCNK0eqpl+AvkF6tEKZ7x5PBVohGBbrY0l8eHRbZQu\n",
+       "9GAx3x7D9iyQLxisFWqc8uv0NwXdyjGXTFM1vAR8E1gHOikd9cDkEag1CAvs/BKMtJ6oT++DUwdF\n",
+       "5DvOObv2DPxkBI7f2SAUWGW+J0lhfg07/gxs74T0PPgqtG2Be2da5cNb2SIFL/zVgyLy83foNeyB\n",
+       "AV4XIucZTEN8F5soRpRSdRH5c3hmLxzbCyoDy98H+/jrKdXGX0L+E44gKbSDVoEHXnCm6rCgfwoi\n",
+       "i/CaoVT2LYWI07ph4GF4YBZeuAfaxOmXGbKdhKe+cZjYBeoEl4w7eUtMmKuBvQNOphOcHuxCjzQQ\n",
+       "+oEaSA7BoGkZ2JqNqVWxQxYLApk4RDTYtQbNEJyIQOWQiNwNfR+D/XVIlGB1BI7c1+qV9U6zPa8I\n",
+       "V4xce+7ikqlhl80TvPuYk5sOpwLrwC/BI0vOE9CRnmE80klPpcp01yjNo12QL8Pul+FnwLmLbCOM\n",
+       "M8XWj5O2PjkJ/28V+Z+DGIkqmiwSDtXoaNj402AvQK0PxtGYbyQ546ni16r0WopGsEmh3bnIrXmd\n",
+       "aZlQENp1KHXD5GiM5aUOmJ+E3TFQNna+Qi2QoelfIXQS+jOQD4D01pEdPye0TfAE6pSiNbyDfrRJ\n",
+       "k8piHe9rUGqD9J9CaQfsN+HeU87UEcBAAP7bZ1qFmG7Wyq5TMKlgr/HG7KvxJKSfBhAJPgBb368R\n",
+       "GwlT6NNZDTQpxjvQzE6MchKlIngaIczVBRqJJXJpiKdAs00aXhvNFAJ1DU/Jxh/woed06EiT/VEP\n",
+       "Zx5ULPlXoatOl1bltidPUp+aZ2YY1h6okfurAjwLY78CH1zX52ZLGqzb4ODJpMiBfvSPe9Da1kgH\n",
+       "sqS9eYIxYVuygbKyLLPKUn4OJiF01hEiC2EY3waVTtCroOWAqIjkgCEgCKzgVC2+lEBvwMWcKVUD\n",
+       "7E0vP9Dy9Pys9eci6xtHReQUzLQDKdj36TdO2wCshqF+9mKfd4oqtj8IRjsks2C0wXITyvtgyAMr\n",
+       "EQg1oC7AWadp5oTJBdcYJ5iZLTgiZWq9t9KJyQm9HwY/B8n94OnycXKgm5ovjJIqNj78YlFgCdu2\n",
+       "8Gg1YraHiC1sLddYMkH3OJV7vRnwlSHSDp33QGgUfDoUTzrTxP1ZiCeg8lHgT97lYX9XuGLkGiJC\n",
+       "FOdHfugKN/UaEBVhUKk33zhvNlp1GiJApTUtcyFd0G04QgSCzHUmMKpevJUcZh80j2rAADSPO2Lj\n",
+       "3Bu3b+yCwS9B5z4I+2BNIHO2QXp8irZjBmqgQWUnbPfAliJoKajPw5EuKAbaKWm9+CrnyItFXdnU\n",
+       "DMGONjAyzjTNgOk8gXWehELCx6FhE2+jg8njNZqFcdSDipUhH2Y+jydvES1BzgcZDYx9M4Qe2E6y\n",
+       "YSLNBqVimYo/Q2NXmdAqvBKC0y+COumUp77zzOtCBJybUk8CZru5TlL8NhInCJAQUFJK1QGcKQ7f\n",
+       "9+GxT8BY0wk0nvbDyVPQOCwiI3Drh2C3lWSm20fMyjNiengl2Is/pxB/lka0SsaroWeDmIt+FoIa\n",
+       "VV+DQCBHzgzTXjfQchaNkE3NX0G1l2lXDYovTFB/zEv+n9foj9qMzYLodXz2Kvd9Dxai8MOaE3Q4\n",
+       "VH5j8oORGQzxAAAgAElEQVQGDFUiHPrHe7AiGh3+ObyaQShikEw20Cyb5YCP8ILGVjsH1k6WZl8k\n",
+       "3V3jXBrO3AOjCjqKUPJCeQxWPgKxDhiOQVQ58Y1TJ1sew4vFF5yFiRqMBaG95CxqChyPwdLbdiS/\n",
+       "HmiJ8DkRmYfZ2+DVPtg975QOWgnAK0FYeZOHR8S/H/b8CuxPQ2oVJrvh4Ifg1boTa2JUYK0Gkzoo\n",
+       "A5ZG4HQB5r6/PvjYCZ4e/mXYqjkB7ZNqfaFCkF2w5Z/BwHYYVFAt+chHgqD5MOwGFZpYdhil5VFa\n",
+       "mYRtYTbzVGolyhmnyFu/D/wLkFiBQ/3Q3nTaU5QM2DUHrw3Bq1U4cBoG1iDSLyJBpVTpWo2DK0au\n",
+       "LQeAV5Tiip5AW6Xhn8Cp3/K1q2LZexTnh9z7YYj6oaJE4i9B9h8uuHC2Klaef+GtNbF0ha0JWHXw\n",
+       "6GDXQGviFJMTkRDQCZiw9bOwqx8GixBbdLb1syQE+6Hpa7BnFV5THpIVWAs28fqg/yyMjsMLQ0ns\n",
+       "vIGsdSPxSY70K/o9YHhgdQtIDSJF6DgNnoZGM2jSU62TC84RGdzF2hELtnix+vLoq+1EFsLk2sf5\n",
+       "649XyaWhYyhPuJImbwQQvwKjSqW+RLnhJVeqUPsznHbztkjfJQ/jRo3P9YLzdBm4G/ofcgrKFZoi\n",
+       "kWeg8LRSylKq+pKIzMD0LvAGYO00MK6UskRSt0NXSmdpjx9POI+Keog1LKKqTs4IECiXiRabWHYE\n",
+       "/6Rien8KZipYVolFslSUh2kjRCQWQJRC5WZI5hp0NCDzqEX5RxWGcpA6A/1rTv2PuVGYa0DXJHg6\n",
+       "wJp06opYAucSwlpE4a/CYiJCcyiPkcxQjVTo9gpdEiBRqdEMCPFqhsl6F/3H6lTjEZbKcQrNRV44\n",
+       "AHcJ9OWcGlzlIGw/BNkvwL4X4ZZ1wvTZMXj+7lY/GxOonfeUKKUqIvJN+MEXYWvSWT2FEyD6emn7\n",
+       "9wKt+Im/hGcehWO3OPVBMjlY+MaF/VxamS0fggfnIFprzfINwq1lmOuBvhUgAilgXIe6BY1OyE7D\n",
+       "eiEiSdj2Kfj4EoRa16zdHvjhR0Rkymlol/gsDLVDW6Pl0QrYKK1AjSw5rU6FMnVuQbMEpZXxFLIE\n",
+       "V7Kk1qD3R5C7BWJRWCvC7CrU4rBrHmYSLSuA7Vl4eis0W31zrF/89U4QkW6BbgUNnOvO28TYvI4r\n",
+       "Rq4tVxS8egFP4sSN3LRiRETfDrs/C/cvQiINNR1evhNe8gF/ve6t8zCXP18Fscrw3CxzwwbpjhCl\n",
+       "+gR8pAr6Ucg0YNpxyQ8/DN0Cc93QvgW8liNEwEkPHKjDfA+0Bz2cNdvJBxKcVaAkQ8WzxPyOJjtO\n",
+       "GhQDJtWmTj0IXrMTtVrgRLBOuJLBVhCugG8Fym1QbdcodFvURdGoZ7C8r8LWrRCqovzd5HvrlDos\n",
+       "lL4Fu2HimV2m3F7E1E+S1PxYmgbNBj1anXTVx0SlrNS6plzZl+DE/XD7umJOKwGYq3AZKX4tD1Q/\n",
+       "4MXpY/Ie6iDsPwC7Pwb3zkKgAfPtcOiLcLRHRP5SKdVslWh/Q0EqEfFB4g6QHUKyCiVDYYVsECj7\n",
+       "cmT0OFVvk4LewJPNkh5YRQsa7MvGMCyN6a4oTd2kpFnkfHNo5Sw75+vsn4JjAeg2Apz8lzHOBX1M\n",
+       "xstM5HL0LVe4dRYWR2BuDWqzUBiHEw8HOPlAF8VkCq1RoGnMszDWCc12iOo0fKusqjS6JcTKAuIh\n",
+       "ZjXw+UusxIsU26bw9Gk0s5BVkO+As3FHfxuTkJyFrr1gXHATumUBXv0UxG9zigVWiyK+H0PtZeVw\n",
+       "VkT+A0xvwTk35i5dMv36pJWe3A2EYe3HsPYkzv1x5RLl8WMQjUA2DGk/WBZ42mDwFJzrAJqw0nDS\n",
+       "6dssGJmFnA23/xwOfUZEryplnQDfdsc7FVr38ORvwva6U6KfWTDHIJkHCTupxzNdwgqQJYUiik0Z\n",
+       "OIqtp52c3UqR2087mVIzoxA9B7N3wMDzoOcg39VqtFgFzzJMd4O/Bg2fUw/pbBvkjl3Cy3yp46dF\n",
+       "4WO74cCgMz+njUNdF/mW9YZr0KVxxci15W7gD6/Stp4Efk/EedS6Stt8j9H+ENyefr3cu2nBndMw\n",
+       "tVdEnlzXdsASkf8Kf/8lGI1DyJ6mmVcsDd0Ciw2wVpwLei4Lv55jLOJkB/gsOGiCsQ2qHa1mvYCt\n",
+       "QSMG1ZSXfLyLpWYbutcRQyHVgQ9YGZjj5a4I9dUY5UYcO9aNXZumaKRIvjJPxczQdwKWdkM2AknD\n",
+       "QEVC+KtCIaTTMDsotRXAlwZ7FKw4LK5ipcYgPw6+WfSOXmzvPOlEGUP5ERSa1QQbkkaJlQsKWBWf\n",
+       "g5e2OU33eipO0OIJu/Xk95beOicTqesfwUDUeWKcRST4hFKlq1FS/13Tuolsg9Q+p2DXyuFWgGJj\n",
+       "3Xt06HsI7poDQ8Hpe8Bsg70Cxf8B1lIi8hegdUByJ9hNSB8G6dFJ/Hc23tsVpVgTvZanogtSt/lZ\n",
+       "W4Kyt4K3OkHN046lFSi3z2ApD9snY8RUnXP9u2mW4oSLSzRSFmg+YpTJtNU5tgr1mQ4W93Rhd+/B\n",
+       "N1WiYqyQbV+jmphgsbdMfAnmm2DMQN/2IKfv7YP+MP5yGbGgFLody7TRghECqonHGydQb7BipkmY\n",
+       "DRJlm6Y0aJgr2FsVKW2V6OAy6SzkJiFyGiKWk9nlr8H0IOSjMNMHwarjNQGY6IbuffDgGUewrPnh\n",
+       "xU/BazqtWAylVIX3aOVWJx4s9QXo7XNu+KvDIBXgJORfFpHHLzJd0QGVO0GrgSSgnIKyD2QcrDSk\n",
+       "JiEB1Ech1YTYFDTK0L/s9ChafBg4AZoJVhQmw6A1IL4I0aIzVWgEnK+qp6EUhJIfzmwxOW30UiKG\n",
+       "zQgKDaGOwgMUERYJzzoF7AJVaCbAmICXF6Dshe4gzEWcINbgMTD6oNzlbLtowbc/DvlZsLMixj3Q\n",
+       "fPlyRIkGYzvgzvfDufOZW9vB/z34goj8u9b58Za4YuQaIYKG08n4S1dje0pxToQisAsnhuQmxOiA\n",
+       "zgue6DUgaeH0Q/pFEJhSakZEfh9mR8Ab8lGdHoEZ02n2Yd8N6RDUsxgfz9F15PW4irY0zBYhmoK1\n",
+       "GPhqwvwozCY8ZD0+yipO1aehdJs2BR4RmkTxacuUfHXagqdINjrJGu2oagq9eJh89xI7noH3H4Y/\n",
+       "7IL5EIjfwO9tUkEx6YlRtrxIMIR4YygtCbk0pDzgr0PQxEMD3aOj6UHKWhc5/PQom7pWYk1N0oiU\n",
+       "8Ovrj4xSqiwifwyr2yA6CJUsVI+9XRqf45Lu+VV4BOhveVVqOjz+IRFZVEqdusKBvALCj8LYPU6/\n",
+       "DY8Fk9vh6LiIfHOdIPFBwA/RNJzZDbEEdLbK4PdaMNgOP//XMJaB4Tw0NZ2nP9dBdkc/kekVqsEZ\n",
+       "JuN1Rq0sOhrjZhTd66HfMtA8RdLWDIuNCI1jJXx2hDalqKQ8FIwE3oaNRAwCykO6aVL3pNBSkM8o\n",
+       "Ao0k1WA3dr1ANZbH5/cTL0fRfAma/jIrfdC7BPeeifD3946x1rmVat1PwcpB6AQEeiGzgDdQx1IB\n",
+       "sItUPQEsbZ41zcIKZkDpFP1+hpYC1FcKhDNNDjwHjw3D4R740DHH6ze5C9J7nHYEA71wthcWjsOt\n",
+       "4zB9O2w7BcnWDSVahduWYP79IvLyu+1Ae/3Q9sswuhPqPVC+BQZqECpAcgpy++DFDhH5k/MeEuf3\n",
+       "0PsxSC3A0gGnR1RH1kkDnhyE8jlY0p3KvH4djAKkBZKHnSmRzgJ4+hyRHNoJS7c5XX1tDRZ3QPEV\n",
+       "OGc4dVIAKn8PP/8/oSsFvkCUBmEUA0AMwUYoIpjYhAjZGmbQ5lgYjpmwoEPxKKx9BZZTEP4oRHIQ\n",
+       "TkH9YfDUoPccTIUhuKxRuyWGPxVBH6xgfjpL6ZiI/K/rY1wuRgpuH4OMvi4rTXfmF4cX4B+JyJNc\n",
+       "JDFgPa4YuXaMAWmlWL6K23wSJ27kJhUjjQWna2Xvuh+KJZDWWdeY8Tytm/GpKNWHIvDJglNTfXEY\n",
+       "joZaxX9iaF5orosS7M/AzFmY74CVLWG0sEkhbDBnmWQCGppE8YkQpEaGOiXAryCLQdzWiNsG8cVl\n",
+       "TnQbYMBSWwU9IyiBE51QPQ7mksb4dh+lfg+NkBd/yYNHa2AHB9GtKpYmqEAIrBLiz2LQJGRp+DQN\n",
+       "H100qFOlQUGgIWEsqwtLy1PZf5Fj0ACOtf5cLoMwGH5diIDjhdqVhZk7gWsmRkTEDMI9MbjThqSP\n",
+       "9oEq+5+DjtaTa38W1Ag8s53XfxdVKFWcJ/r6AKRa50tNc4rCtdVheBfc9W0IVWA+3Emzv58O04NO\n",
+       "haDZQU8zS85boIiGoSWINiLQ0NDKAdolR8XfTdlco2mkKQXzmEYYsFD+OrpWp0kXygqi2QX81Tj+\n",
+       "YJaFPXkavjqReoFQD3i1JhXLwKx6qVehewm2KJjq7CaXCtEIedANA6vUBcY8GFmI6XhUk0CxSilY\n",
+       "IBNsElUBYjUb2y5z1mjiXa2jHy3TOWPRNe3U5rt1Hv6+6Ljso93QvAu8ORg4CHYbjFXhyB540oSS\n",
+       "BoNHwUbn4JYYE6NeGt4yRW/ZaUtxpaUKNg2nxkb/HWAOQn8IUhWncNupKJy7Ax7+Biz1w8oAcD6j\n",
+       "phvauiHe53gUTmvgizqCYxUYLEFuDipjMBeCHa/B0IuQaF2TFiLOtUvbBXu7wXgNzg3AQBmiGrzy\n",
+       "EIz/BdCa3ihNgnghXjFZiASoEcKDhkKniY7jq3QCv/y24GvCljocPwvz/xWkDIk7IWfAcAo+9EOY\n",
+       "G4L8h8E24OUU7PihcPKhQUy7G1NLYi5Y2DLN0q5xar8F/Ju3Oo4amMa6GJMZZ2f2JiC8H4p1GDoD\n",
+       "r7zVU48rRq4dVzNe5DxPAl8E/uNV3u57hOUn4ee/7pR8bitDxQMHe2Dp4MWe9sWJFvvcARiOOPWe\n",
+       "OxoQOgr3euGpTihWaKxBxf/6pzTgwBH480CcXF8SvRnEW1U0rUE8bWUs/wqWN4GoMGGVo4qNr5kl\n",
+       "pDUZUAbL6PiaNUy9RNZXA49BrdHJaw/Oc0zqyAQUum3aZ/2cEYv2oE1fo0jRBDyrrCiBagbLC5pW\n",
+       "xBYPZjOHEgNThKal49FsTCIYdgMNH9VmgpqKU+rSRe6xlLrSWg8+JwHlQkI18FxRw8F3Qmv8Pr8f\n",
+       "Rm6BhUWIncFIjvPje9f48FPns6VgSx5O/MJj6EzT+Z6A538Fej1O/ZCqDkfjED0KxU7oqDm9ZgpB\n",
+       "jRO3tEFYI8AyhUEvPUYQA5Nko0rRGyEmDdB07JKGFLzgt0g2l1iK9dL8qSI9tEZnb5WAb4ayP4g0\n",
+       "TEr1TozsAvVYCV1FKWttVLVJwtgEPQY+y0PEtqhrVZaDRew8dGfAD5zuNiiONQiFSii9iS9gUq53\n",
+       "0CytQtjAVm0EawUaoSCR2gKNZoBFS8cqBfDNrRFpq7HjGcdt//rRBFbg3CqE74fbs05sSDkKixak\n",
+       "005c98EpJ15Bq3t4cesI47cMEMx58VYKLCUy8ElTpFRT6vC1Og+uMj4wtsCuAuQSzrXEb8HOIjze\n",
+       "AQUTOsCZcznbmhbsAHsvpDRoX3D6SVV0iPihUHa6NU93QfsZCC07TS4XxiD6ImT88POkU++k/R7o\n",
+       "Auyo02fm552gz0H1BGReWudx2g5bVvzkhuPU8eKhgWINjaSzA9jYlIAMtrIoNuCED46fhM67YGfQ\n",
+       "6ep77oATc5JdcLyEnnPQnYN0L5weDZDr7qA7q1EyAHQ01UdifpnsAyLylbcKRl2DI2fh0S4oFCFQ\n",
+       "gj2DkJsH9sHpJJQfh/1vdQN0xci1YyPEyI+BPxbBoxTvcVfpO0cpa1zE+CasfhgifVBpwtrTULhU\n",
+       "f43eAdh6B0yvQWHFeV3cCvoZ2LIGy4vYh2DSA+2dsGXVSaF9fkRYXtuJVRiG/BIDexbIDXrQzDi6\n",
+       "toCuFihJAp+qobFCgyVSyiKvNNY8eZo9RcpRizZVpVMzWY5XKabAZ0OqAxomlMYW8etRUrYXo6Fj\n",
+       "aAZ9jXkMvYeZ6jk01QnNVZQnT1Mv4pUIHtbI6xaoECZNllEIFkUBVe9xvEOfEpEXLhGEd7kswowG\n",
+       "B8RJdzzPdBxyz1zOBlrBr6NtsKMJ9SwcUUqde4d2DA7ByN2tFOQ1aMQxSsM0zTzH+5rc3SrcVtfB\n",
+       "uqD4Re0gHNNgfhiWukEKkHgN9k/AT1pz/7kUVG4Bf0IRCDaphOso5cNb1tErFpWQh2ogQFgpDKtE\n",
+       "TQLUu2pglWhKGbvWjs9QZAQqngzm/8/ee8fIlp7pfb/3O7FyV3V1TjenuZMzJ5AccpbLsCvSpCVt\n",
+       "kAWvBFuwDQkLGDAM2IYA+Q8DtoSVdyUIsCBrd21Ju7LWXIpLcmkOORqGyfnO3Bz6dqyurpxOnfT5\n",
+       "j68v53J2MufO7lJ8gAa6uqtOVZ3vhDc87/O0rtDzFoj1PJG+gmTX8HYnqZ7x6Lghbl7I7NtGW1XC\n",
+       "yCZNU2yrDWqEF8IQ8AVqJzvkpwuU1Q47UiLEJWv36LqXIPYZ2kOa1R6xP4AYFsdT2KMYqx0Qu23a\n",
+       "lYQzD5vsfKILXR9O3Q+LczB3CFQRRj4E28Zl1ilBM4TKcyDPQzOFH3y+ws7xA+SbNkJAozxPcPom\n",
+       "2NyBR0XklfciFvgXEG2IM5BrmerZuGwIpIkDuZHZV3UBa0Vk5nMwfxdgw2AZmiPIJzDdg2IM5z3w\n",
+       "+qAV3PI43HXVtDOfugmeexCenoXROWj+HqQDGH0RztwBVQuW+2Ybm8dhIwN4e9ojY2BGaM7ksXM2\n",
+       "FgEeXRRtEgI0VTQN4BJOWiduQ/E1GAWQL8HHkzfch9MRZHJw+mZ44FnjELB2HAoFKGUSpLxFx1+G\n",
+       "y+AAoBBypuKRx8i/viXG8OJLcFsCKwXI2ZB7AVQOXp8yBzJHofPzYOQvBu4HfufD3KDW1EW4AtzN\n",
+       "hx/o/LlARGyMXnJBG87HlXeSaNc6ek1ETsNmDgjehYhZmd3raVagHcJTl8wJ5J+HQwN4fhe+CmsW\n",
+       "9B8C73bQx2G2q3kgc5HNAx1qm1XqmQS3nFAMLRIp4iRtfOsifekzkQ7IaOjaUCBgpRCwNQF5xhRE\n",
+       "MUhsjmhNnQka9pBRYUhmlGXNdlmwMwQ6Yej2UGmAnRSYD5s0rApxcAEZXCTnd7g5dNHKoekoJmTI\n",
+       "jqzQpIgd23QkwQ4CVFwiGbbg5g58XESeei8ksrfex7ouUnoSvvsA3FyHTARXJuGFIfSffbfXi4hd\n",
+       "hl85AccPQC8C+wzclxf5Tl/r777Xz2HB1IKZQwVgEna2aFFgOsyyO92FC2b89UzeEFB/4jto4GkR\n",
+       "+Xtg/R24Y88s8Ow0XBCjIKofgDs2UiaDJhulBXYcH1UY040c3HGPoTtJtjOi5ZWYsvu4qod2YmJ3\n",
+       "SCt1yGbanLgrR7Ee0GdAU2qEF9vo+S1UfpLcsIDf9wj9Md2wjq7CyGqS0ztEiaKZ0eSigMrQIzvo\n",
+       "cWnKEFj9I3WKVpG+zjBNQKQjurJG7CQk5Bl3zjKe6FDC5oBt0Z0IqBU8KCdUGop2ADuLMPw8pDuw\n",
+       "edSIYB1W4OZhbhVqx+Dcfph8HcpdMyl2vgHD88AWvDiZRd2bkCUhZJLBuQMkZy3QWVjCJOgf6Pj6\n",
+       "KCEiOXBOQG4K+lvAGQhehnMnjE7K7hSQg3EK413YLsL5GbjnIageNXySeg7OR7BrGWXVkTY/uyNw\n",
+       "FLRH8MAel+38NNTvg+kpkHkYHYb0IXB8mK2CnwdvaKZyggQKFtTuhsXfAnkV0jmo9oWNWRAnIERI\n",
+       "iLBw8agTsUZMD802hQTuXDNGf6ddmFiAlW+88e3dGuT2QzptWpT1CPblQBKobKeEKwFjp8GGKmOK\n",
+       "wyO6xYBwAyP0eP1+VMDhKbhVmw7RKzvwL5+Am/LwuWVonoQXDhvRvGt4x2D1hgYjhjDIncALb3bw\n",
+       "3St3vQj8ttb6Z3o8VYQJzAl7I7gd13gjPxPByCz81wdhugq6BlwyYkS//04lwr2s/72MmY42IROA\n",
+       "5UMyC7Up+PYpWOnAmZbWv7eXwR8QWhua4kF45DTctgHt4pjMdJ9KIeFlO8JOXyHJOCgHvDRHKZkm\n",
+       "VG02YsNavzmGgjJeMPcgdIAkTdFqxKvuUSxyWDImR8CW32IsEZ6GSW3hxAVia5fESmhainFYQg93\n",
+       "yTgBRwLFpIa2PYPNBFOEKJpc1h1i24PIRUbzjIMOWNMgR+AXTsOte8ZlH3Act/sNeHoDLn3MTBy0\n",
+       "12H4no5nBcdPwvFPXUdg2w9WCI+IyKn3+gkSGHSu00TJw2iR7guvEj80ZiGC50xSx+pjwKW32obW\n",
+       "et1clzp3Q3Z/jsbCfqJOFrXUYryvzvpUm8MbW4zGMfFugdiJOO3VWLFSvMEsuc1L1JbqDCeKzKUW\n",
+       "aRhSdwV/oKlqHys/wsoHzPbhodfhm0fGlHsNuv0JxrZg+XW2btLoZBLllMhQwB6uM7auYqUxTlZo\n",
+       "ZFOa+y3i0GY0GrOUHbHCWZpSYIgiJwEzRPRwGMoWvYxidqTJeIoNew5LJshbOeJxn42pHsPamOU/\n",
+       "MSJn9RXjEH13BJZAtwrShdw2rC/Ba9NQGBu58LXvG50LEJFvCtx1iGGrAEPP6EjQAS8wme/78rT5\n",
+       "84CIzMDS34ITGdO22PHgtR7svgBn5ve+0prxqOlapoD62g4s+HAEyCZQ7RtdkfE8RJuw1YUnfJM7\n",
+       "6QJcqcPNZ4xWzMiCU1+C2/KQsQ1npKYhPQkkUE1hEujmoKehE8Mogf0JTOTg8oNw1yXFqbuX6MpB\n",
+       "UorkGGNzgQEdNC4T9IEu3RTuHYA1B/Rh/5bhoWwtwcoVswdmLhlfqmEFagVQPthb0B0BnZDMDwLi\n",
+       "W1uks0M67YS+vUNr3Eb/4fWJjBhfhC8eg7uPQFeA83DbGXihBf+uBZen4TeXoHn9/r8AE++0Pjcs\n",
+       "GBGRO4Cc1vphEfmnInKX1vq5657yS8AO7xIt/YzgXuD5G9RKeQz4b4H/+QZs+yPHw1A8cZ0S6Iuw\n",
+       "8B/gUeD//aDbNIFv7kFY/uTryE11Brcfo/PCx4gutCBzBnQbvisihWn4zw7C/ARYz5B9YEC8EdDt\n",
+       "wNpck92FPuNpKOVssmHEnC242Ixw2Q41TjAm6xuR3eUh7JRhMgVHaUoorgAz2sdRDqGGFCFhBksg\n",
+       "lXW2xKWoYWRrlFYkEtDSLZRuU3T72J7CGwvttMKGWyEXWQTaRqksRSKaqYbdAnEyhE4edn04fQes\n",
+       "deDWOtnfFMk89l4maN6MvYDvRRFpLsGvrhjZ2JWr8IW8yP/X1/qJt3ttFW45+KbMyoPkIOizsPI+\n",
+       "lJUunIf+PphY2SMoW+j2BoMrYy6+COcuAc9ord+RJK61bgDfyoh87F5YehDOXkDpXTLJNH3neZ6Z\n",
+       "Djhc2+Zma4fNnM9Ze57TL9TJ3LaKP69BaVQ9ZlNsnLFQ8EMyXoZursVqachU2OeWSyAWTFSEuU6E\n",
+       "nz1Ps1ugVVGIexBn7DFxNiVc6hDPjclhM21ZFAJFlgFNT+H1xmzNQyCwSUSGJovAEooeNldRFNOA\n",
+       "rOthh3mazFDTBUQNsSUiyeSxuku4g9MMspr9ocuum2EwkaMepVi6w8gb4a3AvmdhVcPGFgQuXPxW\n",
+       "jnE0LfJX2ybCO9WEH7wOD9y7V64fgf00zO/C137KNuCHBnOuqxMw/RDYEzC8AM3va61rMPMleCSF\n",
+       "/Xtti2IBGo/C7Ajowuv7wKlD+ALsnDUBOIvwwFcgmTIVQQAngul1E7itDI1fTD6A+lW4+QJcPgE/\n",
+       "DA03vjBpbrOlBBoa1hbgNgWhhkCZqpwvhvyaJqAci57rsX1QGErKN+cUYW4JrElSFAkOcByPc2i2\n",
+       "yRKS02MKEcx1QK/D0iqkAjszcOX+N4KR/BDS1+FKwbSCChqO/xBuu8qec3SbK2eHnHnkNLVWAtst\n",
+       "+FoAu1MiX04gbJkgf2USPn87nJo1luDsg0YCdzwDL2qtL2RFvvUn8NmjEDqQXIHsWXjHxONGVkbu\n",
+       "Bb699/t3MG2K64ORXwH+Df8RqD8C93HjKhdPAH8gQlZr/hztuj8cHIPa9Y9PwtYrcJuI/MnbyFG/\n",
+       "B2Tvh1s+Bw+uh/CtOmfu7HDh/jOcnxbi1zfh97XW62WRv3ESuX0SJ2oROha2V2S0HPHMoQQKcEcY\n",
+       "4rThQjlEeUBqcyIIGFpDzmVhVWDBgbyA9sBWRqXe8AYVY5WnRwEfYVcUA1Js1ScQi5D9NPSANSVU\n",
+       "tcWYMXWKhExRZQzRBWI7pmdrcvYkapxH9R1U2KdXyZJpBnjZC8S5VVxXozIlwtbtRI2vU3q4z74J\n",
+       "m8JKzIQNr39aRH5Pa/2WfhtvBxHxl+HXP2fMTBqYb2Z9Gz4rIpta67c020uNIpR6898T87/3fBPT\n",
+       "Wo9F5F9+C/76LCxFULzI5IkRizVQD0Hvs7D7sIj8Q3PzeWdMwv0noaaAMnGtQXA0Jhe5TC8G3NWD\n",
+       "0iBlYjyk1L3K8zfZzI888r6FF6RMhAlXsjkygyHjXpWtlSq262DpkMha4+VjdYZti7hoo1xNwbKZ\n",
+       "1i22cvO85M0hic+w2mVYuIiXahbdDBARekOKVsRELLwybzGrNMf2dpKP6VyeIgVcptF0VIqnXba8\n",
+       "ZZSsUIgsHBkRyiotNEfX8mxbObbnBnhTBwiyHvn1XYazFkXJYccxq9WIaN5MgtibsDo4wHjhVpgq\n",
+       "QrAJN5+CB7fgd58HrsB9e3Pz6S58O3gb35c/H+QehBOfg9saMNEznIhnbjLKqscXYf9VU63cmoNX\n",
+       "fgFusmE4gGPfgehpeOIYvPQ9CB8HjkPmi3DqAZhrQVqA4t7UViOCxgh29hkfKvUqfPZl6Duw/glY\n",
+       "/VVz/qsJ6AhkGoYzv+gZA72LtvGruTk1p0ZX4FRGkTDFscQj1QmRO6bl2JxVDh42DikxWXxyQJaY\n",
+       "LDFDBqRU26D7UK6bwEJpOHIFvrsE7hJMBiY3eb0Ljd8CdkH+GxD/WiBikPZC0v9jHf4VkFTgP70D\n",
+       "fvlMF/0AACAASURBVHE/DFZhvg63lmA8Y1qts1fh9XvgvAIOmJ7eMeDCUOsfiMjlVThpgd8yarwX\n",
+       "gP/r7VbuRgYjE7xRKu0AN137h4j8AvA45nr0HwNv5X4+PLGzn4DW9ER4GXgA4+b7lxpvvmNZkFrm\n",
+       "z9ZbPf/dYDgoi58RljyP8/ekOKOQ5dMhyy/36U3D+j/WWgcisjyD/5/bTA/PkMw1yC6OIavxhpp+\n",
+       "1pBZ51uwUYJyBBUXVhW0BTwb5lO46ICnoWbDjG8mFiIMGewyOSwgJqbJBD18Ino0ZZssGWJsInFY\n",
+       "JWSdSUIpE9MmR0TOzUK0TEKD1qhPktMkaOLMkJHv0NdCJg1wnJDSUHM4jSioBoPlx7nwn/gsdieZ\n",
+       "Xa+jBe5ah8U8fO0rIvKP9gTh9iYEuDZFlMVkv2tvyngPHoLM7HV9YB+Sm6C7BndhLjZ/Brvw4jnD\n",
+       "rm9e0yEYgHMB0G/TTnk7aK23ReQfb8ECzPxXcMurkByFgxHku3D+PnjxH4jI/7SnqPq2SKD4LN70\n",
+       "Jtl9CYmXpylD0lmHA9rIo7czEK1Bth4y82DI7DjA7ziMCppkasSMNWQjKdONllA6pm+BTZ403Yf2\n",
+       "A84uj0gSqGiL2simmVa5ksvhJIqRaxHNZInyfbJ2QsiABMhiUY6h6VoUEeb3liYmZYSRW/cAF4WX\n",
+       "htSVoi5FBnpERlrE7hRae8QyTaq32Vh0iVohYyfHenUadw2s/hUuTSQcdhTlWAgFnpuEq/8com8v\n",
+       "wJc/D7XJPQ7IfmjmYOFx+FhL66+LyOMYQmP3/ah03mgYPsiBR+GRNUNCBThWM15RjY+DaDP6f/Eu\n",
+       "iPbBdBGWRnB2Hi7fDkefgfsvwNqdEB6GpV+GpRzE89Bagdf1nrBhH64ehANdaA8BGxqfhq9PmcmY\n",
+       "43kopBD7cNkBncKlGdDaBCg9TEHhuDa7cV1ACyyJww4OKtGM7YQYh0AgZpOIHFkcAlxkr0aSkhKQ\n",
+       "pT8KiT2T+GSvO1+HAuMX4Ltfg8Is9LYhev2agJuI/DF87zfglpxx7N0pwCsR1L9fhE9k4NMTcLIC\n",
+       "zwdGRe3wL8H6edg3Z/p7rSfhpqch8SFsQjHCVP9FpApku6ZSsnPdGr3t+t3IQKCD2TtgBKiuLwv/\n",
+       "LYz416+80wZE5O9f9/BxrfXjH+Ln+0hwndjZ37yBb3ONN/KXPhjZgsLcdfyPVaj0jIvlByXHTXno\n",
+       "+2fphlnsUUxYbHJhpUn1tKY0gvUMEEDlc4osQ5xCl9xShQPjLP20wUZR8Czw8lAfQsMGP4bUEcS2\n",
+       "uZJxmZKYUZoyUNNcUFCiydNOwAwxI0DjskaWLAlbZNhimYgLuDSZYoBHEWGKiAl6QCBXsXCYIGJa\n",
+       "drG0zyhfpBUPsNbGbLs1UidH2xZsbVEadoimtnF0l0rOx0l9JB6R9UaUKpqJfoO+PWJpr5Uy24e5\n",
+       "JdicE5EyFL4CCzmf/v5pWlNzDFcT9OpV2NoTD7vWYvEyb7GDs2ZesPAOa3DuDDw1hnsOQByBnANZ\n",
+       "hT/WWrfe6QL1VtjzELFgScPgADzQe8Nt9+QmhIswfhT4/WuvMc7NTGF8VeoikrEoLwRM3w0lS1Px\n",
+       "23SLEZd8ze4uTNagdBG2y3D11ywWJj2qPU235DC2DIlQrIT+pCK0wSKLrSPyhCSe0PJyJDpmZpjw\n",
+       "4kSBdlhlkK/Qc2qM3G1COYToTfJqzAIhywywNOxamgsiVLBQQAFBoXCJiEh/HIy0GbImc2zpaXoy\n",
+       "RyoxKQ0S8YnTCjE5YuXSrIxRhWUuSp8RQ/ITLfRMiePNhMgKuVwU4s2Q5SehN4CdwTz4XUhWYSoD\n",
+       "4QGoH4Xac4b/93Wt9RDevgpr1kadgOm7jT5G62UIXvywApc98uSt4B0E2YXgnNZ606zvLG8EItew\n",
+       "vwGFBWhuwqkjMDEHpY4hq44VlGqQzsPuJBTaEN0Kxxw4qQ25N9iBnYNwqgivzBuX7SNNU1QrWOCV\n",
+       "YFZg9ZPQs2BxA6IpKI8gG8I5H9y9gzxJoalMQTCrIdUm7h9pKGuLoUoZJyldt0xNVQCNRcqQFl2q\n",
+       "QIeEBj5NFB0SMiyPFZO9lKwHL98OVg8SG2p9E4xEbWhe1Fr/BIdDa31VRH4bardDfg66z8Lw4gL8\n",
+       "9bshb8GkA/4afGUH4gOmh1UoQVCHXAHsEOaGMLUMOwMo+dAuicycMOTWdBdUVeRMA/7du13Db2Qw\n",
+       "8iTwXwL/FnOj/D+v+98R4KvAAqbN932t9bk3b0Br/fdv4Of7qHAEaGnNu5aNfwo8BvxvN3D7Hxm+\n",
+       "C6VbIV+Bfh0KL0Fag29+0O3l4dYSI7uANCyseERcTWDBon44prcOuf8gIk/CoYUBfm2X3Yds5h0Q\n",
+       "x6dAnhF9OklC3YJBYk70xgoUbEDHFFVMKBYX5BD9NIsaa2K3TESTVTZwSemTJWSClH30KBNzGmGL\n",
+       "GULKeOyyQIEcFjGKIm2WyfE6iiJTpHjSZQQMrCFjzyKtjcj451H5DEnBpZ/vMkoz2PExlMDp7JCx\n",
+       "XaAchQxUi4zVJRtrNvdBeR3Kgcn8K/8AZj4GM65Ql1nizi0cODWmXl1i+/IBqHwPvgz8i73duXkV\n",
+       "5A5DhvhxafcqTLTgqbdbg73g4WsvwQtnYX8K0RjO7XE3PihsiLNQtd8IRACs1CiE5o6IiLWnMXIX\n",
+       "rHwWZhwYKpGZNbAvJKxIj6KXZSlr4ziKxWhE0UtYzUFkwziEwX059jsRg8hnYhBjTViMMnlGWhFG\n",
+       "CTVtgZuhnEzh9yFJWuhMi8QTBrjUUp/1eBnLtRCrQqQWiTmP4hlEQpaxKDHARjMl5pL4qtLskjLA\n",
+       "Y0RMB8gh+AhdNLVUsS5lQjlAIDnQPonkCMhicYW+8kAP0NLHTifwIwerWEHGTeL8EM/xSRMHaRao\n",
+       "vLaJikyFaX0f7BQacPM2OFXMbfIJMwLyLO/BOM1U2Up/BU7eDceaxufm0hfgpdtE5F9orX8qoquI\n",
+       "LEDlf4DFm2EugXEEW+dF8v8WeB36f6YdaMbz0y5sfxWe+V/hTh8qEax6YA9hZQOGGehMw45jiL3V\n",
+       "HJT3kiJ/BIunYGseBjsmaCvsgPoYOAtwa2i0SfIedF04vQQ3KbALxt+qDVzcq8qUlKmeBgKbYqa/\n",
+       "+omR31dWQuClKMthXU3j4KPRxNgouqRcAVxstkgZU8XmcBqSZjRqFbJDUAuwWoW5i5CdhpXPmMmh\n",
+       "bl2k/By0v369Yu7eOfida4/zIo/cAYXbYP1VM2418zC0vweTUxCUIbNmyCbPPwnHJ2EiY6IrTsKT\n",
+       "E4Y5e8eX4esKE3I9B0d/BL/Iu/D+blgworV+UUQCEXkCU6p5TkT+d63139Va3w4gIn8TsN4qEPkZ\n",
+       "wv28w4X6Q8JTwDERylq/IYH+lxHn4Xd24K4MzA3g1R489+aI/v2gBDcfpfPCZS7fNGY20yF3WKGU\n",
+       "xeVsgt/VHPsinHUhcbpktM14wifGRsYxqfTxsXGDmLbWWD64FaEkmqtAS2LmRaiRZ1MK2DpGWZq+\n",
+       "bRNTxaLDiC534fEUJ+hRROMjlHBZp8wYYRqLBIsYjZAhYICDMCYliweAIp/u4EtIZ6pAcWOKud0m\n",
+       "u8E2nu1hyxTKm8fVFmOZwgZUcoEgzmKnDqsOZByX3O09LlTrzJxLaN4G97pQcWC563J6MsCer9Nr\n",
+       "TFLe3aFz6DZGj5+C/SIyqbVuaK23J0Se+S7cdwIaPkSXYfIlaAbw0jutw95o7drez1tCRDJ7z30v\n",
+       "VbBNqIdQetM1rFUAtQppCKQichBu/TJ8egMKe5yjM9Pw2H1gVRNmRz0GZYVKUspD2NczwlSbxwDb\n",
+       "olJW6NSjOxyxVbJR2QxiFUnSlJ30AEl3nYzXo6dKWJaHlWRBrxImwthaYCwJtjdmIvWZkIC6rpDK\n",
+       "UVL5IRk8JgnRCDFvtPSywJAYjUsdlxJj2pjWXBNIWKYgOXpUcIG+hMTkSCkgFFDsoPQYO1nEi1zs\n",
+       "cICXUyx2Ha4kI6KxR7EV0cu00XaPueeN6miifLhlGoq3QCO3R3bahtwP4BMt+EfvYV0WYN+d8KnL\n",
+       "prtay5ub7+ydsPsahuP2gWDaMDN/D44egYevGMPKkQuXlsD6JXj9HGyvwelpOL7XGogFXpqB+h9p\n",
+       "rbdE8v8P1D4PwdhUVbYXoeRC4MHaBFzcAbkMcutPzlYowB2BakISQXsOrCKsNCEpmsmZ2IJ8CgPP\n",
+       "FAB9TIvG0Yakugx09saAI4GzGqZT48YLsK5C2gS4UiEhj01EFyHARxEjXEWxnxLTKC6SRTHLkB4a\n",
+       "FUM/gtkBhB5sZOHkBuyvw8VpOPgyPHUPPN0C/oyflIgcrMIdRfiCZ85ne2xkZcWFdBbCDXBXTLSX\n",
+       "y8HVTZj1IYnh1BKcyRnyzdGxUQbOVIzVBnfA+lm4XUT+9J3W94byNd48zqu1/rtvevy7N/L9/4Lg\n",
+       "RpJXAdCasQg/Aj7BTzF18hcBe/3Fb7zrE9/r9iA4SLI14rz/Mo2/5pB3ynTVLBIpvIkrDD+5jVep\n",
+       "sObMMDqmcZNtav4Q144pBC65ep6IEWezsFO2KeYVRa3ppTG+MpmqTQaLCNsCx4KiNreXAuZ6PqKH\n",
+       "w1U0B4iJgRoRI1I8CkQ0sXEZEZKSYKEBjxifATExmgYJwpAF+pkpMgsBa0mDQabKIWyCJM9Q9em6\n",
+       "wsiOUGkOW9t43jpxdJJIK1I3otyfZ5izeeET2yxOe+gwJHKEvmuh8zbLgx1O7Z/C3oqwcgrIsPdh\n",
+       "9tCBf/8kXLkE9ynw2/D4wEywvNlI7H1hSuTXD8LRvd/P7cI33qlyYqzrvX8D9kk4M2ssAXo+tEaG\n",
+       "WNh60lRkZj4Gt3XeCEQAjuzAE4/A4GCRc4VJLMdB6zaXnSbTvZjyBqxuQb0oHAoq5GohmeMttnxN\n",
+       "XzloEpqyzBAXrYpY+gqJ7DDOWETeyNyYVBXiGbAttNWkbzVJUQh5EBdXazQBWkb4KKZJyGpDanSB\n",
+       "o2guEnGOiC1ismhcoIDCxccnJSAlwcbDQugyJCChjsUYS46RkYDU0SSBkNchth6inISGvcupxSEz\n",
+       "nZTCNlQ34Dt3wriSpXR/hkSfYriwSNr2IR6DHRmVrrdNGvd4RwuC+kVtkuhtmxeX57h4Yg5ICP1t\n",
+       "rN/MiAxGWj//wY4S+yhML8DBgQlEwFg1TWehWYGrR2HnD+B7vwbnl6GYwpaC7e9D9LyZSGUbun34\n",
+       "xHnTzrl4AU4fNlWS+tdh+Bhkb4XGAWjMwOLeMTi0YVtBqwHDPtQ+BZVpM6GuR9DOAyPoZ4zo2Y4N\n",
+       "pRj6tvEPrGgz4VLBVEQOa9gQw+sghi0POkpzqNvicr5Chz4pAQFLQI6EESAkxAwYU2CMJkbSGFeD\n",
+       "N4a4CEsvwvYUeC4c3TLzIb5jjPbu2IQLD+11In7MLcmJfPxW+OUqLNXh1jrkLpsPrnvgrxv2rtMH\n",
+       "6+tQycJuAcp9yAFnjsPzlnHqda0957/oOo6fbbRoBBOdvf3qfrCD4ud4H7gf+Ocfwftc4438pQ5G\n",
+       "3g0ikofSw8byAKD7HHSfeKuboYiI4Ay/SvGvjrGXNEl+ho4+Rj7UKDK4jRKxD+37DxF/XdO5RVNU\n",
+       "GUZRjQ27R9HN4rh92gls/8DlyIpHf0nYZ48oFmyqqcZWCSF9asQM8Uiw2JUGFj0ihuRQbFMBhmTY\n",
+       "ZIAP2CS4XGVEnwYRM/QpIkAfF80mMKaIjas7KK1Z14fpMoUkDtodou0sEZuspw7aOkCo84TKQbND\n",
+       "qtqAx4gctnKZ7IU0UWyOfZrhPsgEeJkSqdRoZyJyCrxUM8jEKCKCfI6w1gGvbjx7fkxY3buIvbz3\n",
+       "86HhU7Dv6F7V5Bys/AD+toj89h4/4fo1zWCu6P296ut/B4P/EeZmIN+AwRXYeBX6e8qwziSU3lRp\n",
+       "Wa1ANV9iwznATOowHwtWXKLnZLhQWCW/AZVNn3N4nK+OmViOqNgFDgaaZ61FOqpMgEVsp6Rln0jl\n",
+       "cfU62XQfYlVAlujgg6yi7DmcZBqlnqcnARNpA6XaBNJCE9ImpkJKB8iLoLRmKC45HDJUccgxZJ02\n",
+       "XebxyTJmpPqElPBoU2caB41HQsgIHxeHPOgeIxGs1MNXAYkecWlmiaHcQmbjLPWZbcaLPQqf0Zy5\n",
+       "HSSCz/6h4nv358n2xrQmt9nqVUjWM0aRy+Nt2jQiYpXgr+yDO13SqVUaxwf86MAUa85JSusWKg0Z\n",
+       "FJZg9zR8UUSufLAWnTdhqizXT38AeBFIFsTSWrdF5J/CzgKmxFQHulD8LMzeZ2hDaxPwh4/CymVI\n",
+       "NGxsQO2faD16fu/7nIUXt6G+DCu3GiHRTWD7ZeisGepT+BzsPAqXi5CbBhWC3YNhAuuYMeCBMgFM\n",
+       "RUPWgoGGIDXCaK6GldhwhIc1GMyYKp8egpUbETEg5RhCEU0IeAh5FDVgQEhAThvV6SgxxNhQwcYk\n",
+       "1DWUh+b+H7jGiTmdg5k1sH1MoHDN9K98ED5fgE8lJqpTu6YyVnahUYTWszDvA/fAiyHo12DfizAd\n",
+       "w3dvN74KKYAH4RCiDtiTeyO/AE3ItMzjdzTb+3kwcgMhQgEjOvFR+DY8xjuMTf0swJAQZ34D7qnC\n",
+       "iW2TtJ++H548URA5V4JjKQwb8FQIL0HuY5qbDzVxsj72pKKlMjRVB1QJ6cYkJRuiWWIX8GcYNQSR\n",
+       "PjqXo+6cwfH6eH4HvQbRUxade12KXkA9oylLSrjXn47oUaFGj0MM2KFAzBKaDHkcAobEtBkBNVwy\n",
+       "JBTR7KfLCE0Tj3N0KBOTY0wXTZ0BIZoe2xIxYpItNUEuHGG5PRylABcHRYtpfCCyCjhJjwQHnz4W\n",
+       "dfObilH5gFwohNUSUsqg8xZtVWJ/DHGww24mYCJosmXP0AuHDMse4YXHYH4b/vCaqq2I+BiVpsH7\n",
+       "1Sl5NxznDQPJY7DTgKUanGBPDmBPK+aTsP9hqCroKpHyGZgswb4L4O1Cw4OdPjT//RvchP5F2LwD\n",
+       "KiOoZ6FehKv7wGcOte6ytS9hnBeyCSRJhX5SZ2d2gtGXDpJccmjvdkj21Rl6PfxRhEQD+l6VhAJI\n",
+       "F1t5eIQkaoqOFJlKHSLJI+Sw01kUTex0HqFMYq0zsHcROmTI4gA1QkI0CbCBJtQwKy5dUurYZPFw\n",
+       "2U+D8+wQkUWYokWEyxrrhHRQuMRsoxgS61l8mvQEknQaiFHSYVstEKgZshtruI5mMZ0iHyeo3AB7\n",
+       "QTFIHF753JB8u80gV6FSH9ObmKVzZQzsmCGEt+S9KThxEu7+FFzuwnaL2qxHNDlDOqlgAxKV0vYW\n",
+       "iC4lkFk1FbAPYK432IBxG9YrhltzjR7S9WCjAb2xiBzCTIGtX3uVSP5huO1BONCCzUVYseDSArx+\n",
+       "CDJt6F+GyBERF7J/G1a+DJOekXw/lUDuNExdgf1j2Pg8rLxqpuriEM64cCIxx2RYgKEGdwid1Bgz\n",
+       "nggh75qpu+kUagJXUuOKQGgqMtYkVB2j2rw9DZPWkAYhHfpERGgiBLBYxOIFPEbkMdWhVy1wY6gc\n",
+       "APrw5EmwXoWugrUZ8x3yIeSPwtlboPvidedzxoUvhvDXIqjuM30e1TGzylKCyiqoKqRlaDowCMFb\n",
+       "gScEdn4I3/kRPDKEuVnoNCH/mpEMttagPAvdHcg/D+Ud+Nd7k3tvu7o/D0ZuLO4BXtImsL3ReAmY\n",
+       "FmFBazY+gvf7c4A6Biem4c6rb/zteN3j0peW2L1rDhoKxj3Ydxr2d5k8BkdDRaNoU9lSDGcUWS8h\n",
+       "SIY4ToYwP2QQ+ow7GvJl6HWIymO85AoFv08JwU8Vw4U8+m8UqZPDi7Y5k3WYFJsKMWOGdAk5QkCH\n",
+       "i/ToMYmQwSOz177pE1HFYagLaBmRUiCnQxIJGKczFBKfntPGw6VK4cfZcIcuc6mgFfhJgOfCvNK4\n",
+       "pAyIKBFxQXkM0x5K24RWQAmHMj5jYJ4RPbZpqBy27ZIqi/JEwDhToBO3uJwpsBhU6AVb7LoBV1WL\n",
+       "ZsslTi/BwTr8UQgvvSEat+8RmLJMIFB9DRp//ObKxYeFKQgKMP/GX7y74ZZH4eNXwU8MGfB7v2SU\n",
+       "Mr/wbX4slHd6Br77JX5Muu08Cc/d4XDhgRk61Um06tJe2sbyFSKK2VATBSmdjGbg2nScKdLyncio\n",
+       "BIdaJElKW9sMkhH9bEAxXMez8wytCG25KFqMdIhKFxESxjjoOMC2NJZy0cmYVA/JJl0iZ4OczqFl\n",
+       "hSp9AoRlXHK0SUnQmGz6KiFTZKkyQjNilzKKMoodUiwuk1BmFXAoIijGTKKYwWHIDuuhj8sKw+EZ\n",
+       "LHeMHjkMveNYY0HFdfwFoew6uFGRKE3IamHfEJ5fzlFQp2hPLdLFx+Zp1GCb9Oo6fAemf1VkaQ7G\n",
+       "G1B/Qmu9CjANdx/bIzFOwPhOtp/5Ef1ftvG8AXrBod+bpntqChrbsKCua/u9T1yErVMgVXhqCpbG\n",
+       "0CvAqRw0evDQp02CcjUUsf5A6+ScOXYXH4KShtWPmwms7SNwrAqDCE5ehv40PPu/wNpjcPDLcJsL\n",
+       "VtnwSJrAxWPgXYKZOrQ/AxeOw4Rt+Ju2BZsKrmpj1mkFkOmYSs0gZ2wGaokpCsSW8bKxBbZSo7y6\n",
+       "oMEpmsDqfKzAdZiVEUMCOgiGsFrCRhCGJLiMGLMCbAnkemaEOGhCqQn7ijBagfMjCGbg5hbMnTPn\n",
+       "SUdgIiMii8DONPzGMtzrG0vfcAjuGjhzkK4Zx754T6XNTaBwEUpleOmA0cKfLoB9Ef5JA+7JwkoA\n",
+       "rzXNoIrzTfi4C4sR1GrwR2+nQXQ9fh6M3Fg8CPzgo3gjrUlEeBx4hOtGGn+2UF6B+Z8ouSuuHsnh\n",
+       "LWZw8/sIsz2whhBVIN8lM4QoJxQSF783Yio/pGFlCVTMyBrQzboEF7bRYQWidTKLQik5TVzsUMmE\n",
+       "zCQOlaFHlIb4s0XO79TZXvYpKdPr7TBFTIYCp3mFhJg+VWAZizIhARFdLYSUsaWITY5YT6O1SyQ9\n",
+       "XC1mvE9NkUUokKWKpsiYXXLUGLKrFIvpgKYKsdUbVuEuIQFDCoxIyOCkTUaWIktEaihtuARMskOf\n",
+       "Bfq4lLtd2pld/PQofqfJqt+lZgmWlWOYTjC6+Ovo37aACNSfwm3Pw4bGGcFNn4dPXjV99hR44Tg8\n",
+       "qYD/+0asdAv8wU9k4tVPwN3bJhAB05efy5rBnmbGVD4Ajtbg5f0iUtJad7TWu5bID49i3zlnOg3W\n",
+       "EuPTeTJ3N8iWi0x1bbxmSuzHrE+O6FgJKtomm3QJfZfQA8edJY1iQhUxznaIaZCKxtWKFJ8cN6F0\n",
+       "i45ShHpMRQb0xMLRHQIJ0HSI7E1KpKzIPNvYTDIiwKWEh0fEmC6RhkQETRHIkENhkWDTokdMgM3k\n",
+       "3pTNZSaxEMp0KTBiCpuBdlgeDwhUkWY9T7QOUdKl+zvT8Is+8W1D9ExC1fLJBimpK6AUBBovTVET\n",
+       "HrbqcSD7Gl43w2uZhIliQCsPU4/CfbvGFK62CM/9FyLW72qdnBNwr7ePPw47Y/rf2qb/hUl4fdmY\n",
+       "BY1S4BI4w/epLXMNWutYRH4Pxquw9Rl4eQaCy5DLwq+8CuW9algzA9/4NRH5LaAPVg4aJ+Fw1uj1\n",
+       "+XNwMIZtD3aqcMdFM9rb/jsw7UDXgZEHOQsOJKCroG+FM4dgMAlHxpBzDEViWsMhDc8JHBwaLZHv\n",
+       "zIMXGw2RjmXGbI8lIMrwRhIFT3kwKYZTtqQho102XZebRUgRCnSBDBofB4XgAC0UBYRdHIzD8CiG\n",
+       "ox2461VwI3NubszB1RKkz8DVAC7nwWvAwXNG/6h+k6JbOQHztrEl1gFYTbAdw9LVWWNiRAgIbFpg\n",
+       "rcAT1T2J9y7YI2hrrevAn7zFcl15v+v782DkxuJh4B9+hO93jTfyMxqMDFtmdO4NeNRvrTCWw8SN\n",
+       "ZegCVCHbhgPQWYVBKaGQpCSOx/TuFjt2SitbReKUUX8LGV/G3qihF2cplALGOYc0ux9JQ9ppjdbE\n",
+       "iEMDiwPuJer7C8xYM/gaRCI8etToM8aiQJ8xDkXYK69buIzxxcFiRJ8hETVyUkFJkYQRCSmx0ojk\n",
+       "UGgsQDOgRwsLzQEsPBIyohHp0CLHiBSHgIQaMQNm9TZbMktMHqGIT48AIUsGTR+HAQXGjGKbbGgT\n",
+       "BPvxIps0nSIfHaE+PA0TFWNUcSUx4kb6PM7NWfzDebKHezgueB14ch7SDGRqcPAizBy/Nmnz067s\n",
+       "JZg8sKfqehkqr5lxh9fgmm7FviJUrquIpXv30XwKfe+NYESx5zjqXHvmDBy7k/i5EfHNOUO6K/Vw\n",
+       "vDrK2aLmT1K0NGGuQTdtU3QyzLgBrrVDaCe05BA9XcQFhjpPX8ZETJHIPpwUFCGxSsnIRYQcnmTx\n",
+       "aNLVQ0JrG2UliLUGaYP9aYVVlSNmTA9FHrUnhOej6KJwcYhwETK4FFAoIiw6OHtHUMaENGgcyiSU\n",
+       "ydLF5jwDVoi44iXYaUwcnYb+EP4ZyKtNso+eJdx3gHyoGSmNqJBYArzUxR8PWS0KytZYGRtXByT+\n",
+       "gCO7UBDYPQzOGWNsDXCgAdkxtD8vIud9ePkSfGH2OlfXQ7D9LFxVMMqZHlrmLJQumRHh69bx/WGv\n",
+       "EvdN4Jum6pF/BO596I1ABMyxcExg/bjWox+JVIYwswDrk0ZDMSemOFN1YGM/hOuQi8GdhEJkEvqu\n",
+       "ZYZBGgI5Bb0S5BagWYA4BxOR6Vxoy/A1ysDTM4YfksO0cKy+IZUeSKE4NHI8oQ1Vz1RwDmLGiFsC\n",
+       "dZ1nLEMG5EjJUCdGs44wS4yDzQioo3CwCAiA/sBwi5caRqL+2vG/sAWZGcMDKl8Fz1ZkSMk4hrNi\n",
+       "ewV4wILbIzNOF27CRBV0ybBxaZnIiSkYvwRT+2F1/14gsg6lMxDEcPaDruNb4efByA2CCC6mTfMB\n",
+       "eqMfGI8B/70IovXPoufP6BS8/BVwZqDQh1LNYjCRYTtaIv0xOWraUN+PlqhHmtcX+xye6VFyPfQ4\n",
+       "xWGHylaNnarGt4XZWYdJf5edlSZFOUzNqpLRwqT4JK5PqK+ylg04SELeUXhaUBSxdUAiNUpoMtKG\n",
+       "SwAAIABJREFUmqREaFZIKAGX6LNJhgXGKCCgT40Rlgg2FjC1l/MOiCXGZpOYDCAMqJMScYgURYRG\n",
+       "YYnDLD3GXCGiQA8Ln0kmKDCUDVIdk+gSwjpDYjxciliEe14WDR1wshHzsdeEs1PC07fYpL0xycw5\n",
+       "3GkojiIiHAonnma3NEl8qsPcvgz5hgMCaRX8g0bMaWoTdpbh1UXwzmPSv586GPlTaJaNmaS0YL0G\n",
+       "X71mjmh6zXObsF4yUzNg+uRJE+pzcPI68nItD40O15l0WVDows37zSxmYwD+NjIo42ZPU2nUSRON\n",
+       "thTiT3EAD5GU1J4ko7pk2WRVIPV2sbVHUSIcrhIQ0JeD+NohpkNHDXH1NqLKtHRKhS4WYyZI8SRi\n",
+       "zZ5nnQJClgSPLruEaKoIiogYi0ggROPSp0RMl5QWYyxCSmgExWWqhP8/e3caY0t63of991ad/Zze\n",
+       "l7tvs3JmuIyGQ4qiRJqmaNqOZdmxkQSJAyNCYhsxDCUfEiTwh0RBPgRBYlgO4h3whsRW4gRW7DiS\n",
+       "aS2kFlIkh8sMh5w7c/e9b+999qWq3nyoc8Wr0XCROAtl6AEafft0n6q6XdVVz/t//ouqdSNTXHcC\n",
+       "mzJXXdFVjS2N/LZ4L+PPlb+BJ/5iZnn6DZdn94wXZ7LqyNV6tBqbjo0Tk8Wm64H7aaHdyVQKfuAm\n",
+       "jdD00nM1h9OMzsDJezw+JzMf77N4hrud8Tw+PuPcOY7G1F6hfcBP32P/Cs9G8m3+OS7OZd7fc5Vq\n",
+       "qfUFWm+Q1N3KqM2N+A52ufdUSX5tRO42SknwMyMqHa69j2/UWU3Kj0qVlYTNwNcTDlPiu2gXZBW6\n",
+       "kfv10o11W0mKHeMCmqE0H28lfGm5DMBrVThsMwhlGvCxOJcKh2A9lg3Jl0PVWHBRzZIzBno6pUez\n",
+       "XKEiqmqquaRAb8KHv8IX30e/W+KlUdls3Wovu7V8XHy64eAH+mI6sraXOba/71ZasZ/X+VMt1tYY\n",
+       "fp3iaaaNchyTB/Id0ozRLo9ss7dLvMkTCaP7HNzlH36vCrrX1+83I29dPYfLMXpTiX7foV5TXpWP\n",
+       "+zYyvN+LVa6OVz5Bq8rVR1io001z18JZ3e3GQzDxDicy2j8m/7XPuPuJscmpaKEyLZcwo57hiURn\n",
+       "0vahvUQUbS8lkk5bvZqqGxqEch67GBvaYUk3HTnMghiTUoKbdCQyRUw0w1iUe1RiXWJVYVXmK3q+\n",
+       "oGnmuD3nsK9pqu/Ioq8bW5GpmbplYmzJI2oGxkaOq9k0NZsjI32FJbkVy7pOSGXGxqbqqta18sw4\n",
+       "FCrFgWpStSpRUZPHUkY4GtTIMvdPRtX+jvf/X1233ldx8/Syc7f6OpOqUf209cORyqn7tlrLli/e\n",
+       "N1jq2bhL/z08NSRbKg0Zzx9hiU+d9bpkzt9tbcX4t+fSy/DG5NjtT/G5nyjFaaePSij++iG3Mi5v\n",
+       "lOODgxYvVtn6Rw9LF4/YHpc5K7f2WdznTDRoDdQrDbXm0PplLm+sSCupJNTlsUc4RFXVgVrct2/m\n",
+       "dBKtyWUCcdsgDtwKpySidixZH43irnqSOaFcJY8CRag7oeG6dRuGdi3rW9O1I4jW9NTV9EV3JTYU\n",
+       "Cl23NdUkEtGGuuNSd7RkclOJkQtyLW2ZVBTCLYdxZGWwKD2xbzZi8c9S+xDJsZkTC9s6aaKRc5Rw\n",
+       "FKO79WWhWDLsHTMYH0rrmceGM+P6cderm6qxolJPjdItn/2RG6afi57ZKh/E4wLTeWbQ3/sVnlkr\n",
+       "mZnDvdJb6kH20UtvxjXy+pqH4l3m9gfKW97DdbtO93oI4Rwn/1LZKGxiWC3Jr/uBFxOyjO4KaYfj\n",
+       "Wdlk5JFmQhLKhIT7Rfkc3qmUlvJpUjYgmRIM+kZ5lekohSoV5CmtUN6OFyP1pOSt3KmUVIyKiiKk\n",
+       "+iFXk2kpHFi2L3HPWE2NOQ4atUwdKVwX7GtE4jaVHTYu8XKNyslUdTnRa3V8buNxo8OOvJ6otk9r\n",
+       "H92yvXxfb7Lu1sk6Ty6X/JB0kUGFeI3ZKeTlYuCwS+8Yqx3CY2R9lr7B4f2Si3XxrQhG/P1m5K2r\n",
+       "j/oeDH5+NxWjGMJvjmr+jWpGSvLqU8/xia9SvMjtZWZpdPP9EwaXyj8cOeEmx2u8Ug49VqpPye53\n",
+       "HNhTWRzLu0ca50ZOazg/gMxGLfqNauFWvW6cnJDExFEYGCZbKgrNrHBHSuypJX1ThUyVMNY1siBY\n",
+       "w0iQzYctx9Td9C4zZ4z1NPTkms5raYjaBqI9WzJ3TVRc0hdMZIKpsVyQGKpqChKFAwvGJhr2FYYO\n",
+       "1azEtiTfVdTWHNOU27etL2JkIstqGnf4SjWxt1h4/NdpDCYOT04cvz3ww99gfVTx0vHbrh5b1c5m\n",
+       "qpsT/XsXJUszjSUWGuy0SzfNpRoKsjrFXoyx963P2e+sHrKdf4Pv5ZdDSP8+e5+kfrZUEO79IyZX\n",
+       "+IXn6Zxl9CqHL7w+k6bHV67xZxY5Fji5QrZndlTTjw03V6a8t1Cv1GU6akVNlrRlcUcRtgVjwrK2\n",
+       "TQ1dibE1I9Gyjol9C3ZjZjqH2Ssh+qDgSVE1lNLNL6sZ65vquS9VcV9fzUR0YNeaXEdNZkXXlqGq\n",
+       "e85JLVhXONR1YFtb6owjh1bmNOkaciMjhanMmpEDtdFYp8b4vw/qf5rjMYqd0rr8QkzsSiwWmaHc\n",
+       "nZC4OmqZjI6064eag7rVybLLy6esHaWyIhpW2uLoCc3ukZfffeDJLb58isPfeKBamn/+8vzjLa0Q\n",
+       "wnHWP86Zp8jHfLVG7TxP3S/Hdxc3eO0KLrPyv/Jcmyf2CJ0yA+bavKG4VOdUSnNE6LNaKc1FbzbL\n",
+       "tU0pyC8FZJ3A7cjjBbWE66FsbnIlPeK0cjwzUSIUaSgTEpqhNDgbKsdDq7gW6IjIHSpB7K9ZFj2B\n",
+       "RGHZ2C0l3jKVGAq2cGRplFk85Pz/zlPXOXwPLz7a9OKppmrM9bJT9pw0bS+S1ez1tx0trpL1ZI+e\n",
+       "L31C4vGyk8xf4fSIQZP8LlmHgxNcjLz/dOncNtjgdpUs4Ykh7+vG+I234rz+fjPy1tVH8Q/egf3+\n",
+       "In7cWxTM987V+g/wxFEJbyaR8wcw9t5rL/lM5T3cm5RD2uoVmj/Crw2pJ2Zr57THLZVsSSyumeXR\n",
+       "6QmxPnO0Fg07iXFa0ay1XE/P6sRFzTgzji1FSA19WU/Ncr+hs9A1SjJFUThKyPT18YzEIhKJ37Bm\n",
+       "bMHM0KFlmQoyExNLNi0qTEQVQVOq6kBX05NGamauYqZtpq5mgEKh6simREvdnrqBk6KqsVD03asP\n",
+       "LMf7+pbVnHDMWHTXTsxUxuecujjw6urM1z41ce8W4zbdV/jEKTZGJDHz7L0rVoZbXryQyOtjq8OZ\n",
+       "RxssDLi5Vn6+u8b+k7QuU7yCK2/nFRBjfhmXH9i8P/StT32r95RoS3VxX777ouLEGnoli/HmmkHt\n",
+       "X+s/0ZZXZ7ZWGob51CBUdeJENFSVqWhaioW1cEchEdW0IewoQrQah/o2TMKmatjRVMwbwURfYqBl\n",
+       "XdW2czKJvtocR9sT9aRSi+55UtS356pUw3FdyxaEuRV4XSbqO3JMX8OCmVRXoSEX9BXqquWa3O5a\n",
+       "Rzp9t9q/M1Kv7NuNPZsxs4zdNLMQK/qxqR+DNAbV0WmT1wYqGy2t9j0XV+uOWkSJwV5N7/ai2dWh\n",
+       "bLMpdA7808fZ+SqHb3sWVhnAduHP80M5j96mX+NLS3x+Uo5URPb/P8YvYJON0xyflP580xQpp2OJ\n",
+       "jCQFa1c5d5W997PVprlYoh67ygZiG4uBVwNFUQq81rIyAO+BGr2uREaWlM1IXdn07CnV6cfMk3nn\n",
+       "2ztSIjlrcjMVv+G4Xc8p255dJcbyXqWufRk9LQOVYqoyqdkbj6weZ/8+V+OSrWzRXjpTTaZymyaz\n",
+       "J7l3yKlj9NZl01c43aT+FJN9woTqSaZNRp9ncsDkJAc14j7HU5pd+qulhrkfcI7xy6Uo45+8Fef2\n",
+       "95uRt6BCkCpTdP/jd2D3v4i/OndPetOhtHeuAsRSHpfmDzQltMfb/NyvlUyx0wN2q7xYlCvgQTCs\n",
+       "NSz2Y8nmEiUToopppbBz7Jgkr0pDrkg25fqyUDFLa9JYGIsKuVpaVW1PVaWuOGkWVsvVj662266Z\n",
+       "OS9125m5eVnFSE20KDURnBJcUjGRqwpmc+vvgUJuWdW6qaFUVcO6iZtyi3OCY0/NPSetuW5H1XEL\n",
+       "KoaqoiLkTqo4sm2pOFCkiYmpdux6XFUvO9A9PXFiMrH18qbtE+c53aDxeZc2MqdC7vSlkgB35mjg\n",
+       "Uo/JP2TwR1PXl2r2n4qypcz1kFm/VlqPnPgsL6yz+7YoxV5fr2tEflvNE0OPYYXzn+CZZM/uQc3X\n",
+       "fuA9xuPzijtD6lcoWpavVqSnhvK1ZQ19N8KRNQONUNrP9WxaKnqSpO94SGxpmmppyVTiWCOMLBjI\n",
+       "3NSIQSfUDM3sqqprSyQaMn2n5kTVLVEFuyrGFqRSDVeMJHjKum3rcmSiBYVDi2bGEtvuWlC3M0dD\n",
+       "Ts1twpva6mpumyULxrVVORZHazrDpu7Cq3qV0gujHRI3berG4xbzaJJMhfZt1k4Y3D5y9L6gmo1N\n",
+       "ZsFwSF6smXzt3bIX7ttbfpkz7P61h308vsU5eJT156l2OHiF8VffHBn40g/x/sATc+7K0oSPXWbv\n",
+       "LF//xzHG31RghRBaLN6j/zjHMmr3ywYjdjhIcIWzN0rEL01J5w1Goyibif3AoChBgXNKa/ndyG7F\n",
+       "3A23REFmypHNnnJM01GapE2VwX3t+c8FD7AO7sjtYN8Zr2nPL9iOcst3fTN58r7CvoEa8RHjYsFC\n",
+       "se/ij17x4kc6jl5raT/R0G5nivaBfn0X92hG8kkpP1vMSOvkoxIemkTCDu0luqEMxrn2QtnZXRnx\n",
+       "rpPs/0FeXJ43Iub/g/jbg9XftPr9ZuStqXfj/lscjveGFaM7IdjBs94GyPTtqHI2nE556ZMMRiQD\n",
+       "ll4pvXSuwMVxqej4/PznX/08f+GDNGtGW9sOTjQsToP6nQWjStdOyHSyJcM00az0ddPEraQqSE3t\n",
+       "qsZUkeTaJjrIw1Clkbgaz2pYE2JNPeQqFjQFO674VS0sWJKYSEyk6g4VVkwtqlsxsGWigcSRgQN9\n",
+       "XYtGgq8o1C2qiw5UrGjaVjERTKwLjmQOzQRbotn8NriSBEsSC4F2OpJqipoqIbcXonHrUH4hc/xn\n",
+       "Gzz+gyy9f65mGLjdfsWv/aGRR9vR4j2u1bj8OfKfrrn2Y22bZ9sezWrq+zOz2p7+yaEX23z5NXZ/\n",
+       "juztMPP7rqvkFS39GE9/gJOR7Q9T63HuM7kP3L5vVr3m0g+N9Ncr4tZUc2lq+OTAbKmmbl01X1XE\n",
+       "HTth26jStiBIZWbJgtXi0FFa/nZH8ySZe8oQtCVDi6p6YWpP1argvjgnHUY9GwaGCuQK0ackZlYE\n",
+       "G9iU2pHoizZkuoKJ1CFyQWosmJko7IpCaYdnUapiUaFQuG/gQPS4epEbp1uS9shROlGTOpnMbOKY\n",
+       "tpshmiZkoW0yWtAqegaLl9RrA0tp32qaWc8O3F9eMmj1xDM7XL1YJsf9y+/ciLQ+zoU/09ZqZprD\n",
+       "iQvv4doHQwh/93snPXYe49Tr8rcSpV/H19f9VmO2XQ73GF4qreI3xiXP4mqNnQlnvsArz3DsqLxO\n",
+       "zi4ilhyQqTJpd3P+8L2h5IecSHgqlA3GlvL+kysxjW8wz9UtJbZrSjfW48o2Yzw/vJlVV1SkMlNN\n",
+       "ifH83VEpA0vn/95VRpgvyYun6a4Ydo+MqgvCTldybtux56ZO15tCI1NUc5Nw143Q0K8ulEfYqJTp\n",
+       "ucWU6hGjDkWj3HZ6l409QoN7p3jhJpd2+Qdr/NRIqRFS/jaSiyzt8cvf2/n71vX7zchbUx/Fr76D\n",
+       "+3/AG/k3ohmh9SGe+GAwHNV97VSmlmaKZ+i+zM5ff720NMa4FUL4W4d8JJEtTu199LTBbEFlVjca\n",
+       "jxzOhk6kqVDpykLqMHnccBYklQ1ZGOqFbSvqVvX11LWLulZyqBU6siKRmppqObJgQUNi7MBERTIf\n",
+       "ytS0VC0Y2nPfTE8u6jl0INOUqliw54KBhqpgaM/Aoba6ZVOXZAYqplr6Kjp2TCw4L0g07OsYmblr\n",
+       "V9+eXGrdipa6VJSp2VLVSO6YNDI3rXA6pfcip0+x/8flX3zElZufcuPDRxq/Tv/TSvXXWkdotT2+\n",
+       "07TRQ5kpq7lw214rd/Ufxxjf0rwlyib0u1FehBBWcZLKU7z7eT5+lcOl0pOmEXnpA3z80zMffumG\n",
+       "o/UgO71ieVS33M3txr779bp68nU951SKqiJ5WiwqqiGoFMEk7LoblsXYNwsTfUEjNoxVnMfQcbnE\n",
+       "kvu29dy2anku+I6C2x4ztIod0V1RS8OGiolDu0Z6jiGTCHrW3XfVewWM9I1NcCjTct7MmoE7EgM7\n",
+       "TtiVK3TVlEyEQ73kyAmFs6EQjBSxdC6JRXAz6SgES7perTQlWS4epIrptvb62LvuRM0FlkbXXeos\n",
+       "e21thx9+iZMHklfHJr8UQki+FYExhHCsY+m/eFQMK7JR5rB9W37ypuPtwtEH8Onv7aqY7rJ9gX6V\n",
+       "vMrCAasHpcOp34K8xBiPQlj6lVLCm43YfhfDBle/yr2/RW9IzDn/LJtL3KmXzqXtWDYia0o+SGoe\n",
+       "XjgnpB6hmGfNVJVNxnVl9MqDrOO2b5JZryuRk1RQWJU4qy5xBld0TX4zAOqcsgnp4xVlm7WgZJo0\n",
+       "tRol2WWavWhxfWQlnTkVouU06qXrurGpYddmGJQJ4vEeSaaoIauWbN0kIz2gXicMyJ/map37B1TP\n",
+       "sfLLnNnll3+DjxxjrUG8Q+smL+Vv7CnyptTvNyNvTX0E/+Id3P8v4s/jf3oHj+FNqRBClVN/ZNHk\n",
+       "7FnDWuGwuWCyGuTVHXl7X/xGCOHLr19xxRjvhxA+u0FnKjauGZ0tyI5srrG5TfdE5lijInXc4mTg\n",
+       "KG3amr1mXG+KcWDmtu14pB5qOkmug3Uzl5NVE0HDxjziYaTQsaSqMNXVUpUj0dRQc1fbvtSBTFVf\n",
+       "sI6B42raOnJ95Dakxo5M0XRgVUVFtCu1b2jJWaUh2X01LamqmtKhsWpH5lDLqkxhYCzR1ifNzZrc\n",
+       "e7Ze5pb3qhQvERZ5+XmuvCJ74Uj/bz5o6kIIKzXVfkXejqa1oDalSEv7rfZh/rqb/pt8vkON59b5\n",
+       "g2dZPhHCrS3+dYzxtxllhTKo6Eef5GNniS9pf6inMR463Ke/GAyPJdqzXIJrmzy6veNDX+j5l48t\n",
+       "asSJ3nrXsXFFu8gspDsyB66kp43ieWkxME7bFo9Sabpo2r5nV9CvNKypSAw1QkVmRW5x7o7ZNrVg\n",
+       "z749FYnEyNMmHlE1EG2Jqkp2QWEgkdrEgX1RQzFfSQ/mg5sVhZGxXR2Ftp4chVRdxTFVK2amciuG\n",
+       "7hnblmmFluNmqnNp+HLItWNwlESjgjRrSiJpUjdsdQxaXaYVrSI1Xs88uUW9mJo1tq2MuT1h7Q5P\n",
+       "fYZf/xhfPjRHIV9fNT75mNrCOcvXH7y2KE/H9s9sWXje99yM7NzlpT/PB/u0cw4TLh5y4wt+04X3\n",
+       "4er+Ai8/x7H3lfZDswqanPtY6Vm0scb4iK83aaTldGMZ5yP3lHyRFSX6sakcxVxHJ7IUeDLyUiiR\n",
+       "j0U8pRyyvKgc3ywqFetT5Oq+ZllVooaJsYlN5Tw9KHGTknFUfj1ThrpsSMJYXjsS0tuqaccpuSxJ\n",
+       "NcOGVGpJbhpaxpY0JNLQEIuzVA5UZ7tC/l71m3uGJ67LFxZKo7PkFIcXiBNO3Cs94y+u8Mw1/lrO\n",
+       "xR0+GqmMeeGwRMXeVDnvw/WWNiMhhL+K9+PLDyf4luFW/qhSN/XfxRjftJTWd7pCEJTIyH/5Dh7G\n",
+       "p/EPQlB7m6zo39QqV7oLz7NwgcVhavbUk6at3KRz2qx9SjJNpKM7xgtd8d/6UrmA+KnXbWPjEf7c\n",
+       "DzO9wNdGXPw/tT6565n79LOaU+tNxbhmXK0pQrAeuyZp0052WVqJahY0Q1ti6tCqhih1pHBeEE1F\n",
+       "DZmuPVQ09KTu4rj2PLZs36GZqQtuu2vFSMVJdcdMXNZUV1gSJbiratGCzC23PG1q3UzfoqpNO7p6\n",
+       "ZmYyS84aaeqbCIZWbTuncGCq477LFvRdMNPW0hY0cel90aXu2LSfMTvL0U1WvkSxXw7KH5bodpns\n",
+       "JSZZotcpxE4o42S3CqMBb51cvcMfeDd/+Hnur3PzKmuf4y+3Q/jsjJdmvPLQDfFdT/GJT3K9SnFN\n",
+       "9b3LarObLv6hupBFsZ3qTKdCK3ftR0Y2/xU7a2PrXx37dz/DZz7Bs73cxVM1K7FhscgtZnVfrE6J\n",
+       "Fbt5YlpkOmlF0Qv2q+dM6i1HlaGaYDV2tELpDxP1dGVyE6clOpjo2XXTRCHXUz5Y3oeWVCLRk7th\n",
+       "aBEHqkrY/8iSuqjiokOZ0yrWRBW5A7mxpmNzgfctE6cEq/O8565UQ1tNxUw0MpaLlkPdTEeYTQ0q\n",
+       "iWlcdpRsiHFBOtoltiT1pqEdd5ejzoRhp1SXLG+zUBArPH+P6x8LIXzhjVCrJZ5YkfyWZrUqzTfF\n",
+       "dMuk/b1cG2VI4rkf5tTnee0JlkrzWLdX2H7hjdGa6rM83+aj/5TL76dzuny2/uIP0lghbNCs8miV\n",
+       "1UAvlD3NFaVt+9NKVCSJpQFayfZhObIe2Q5lo/HM/PUrSozjgtIT7GvK5mYDh4KxsURuX91YMHUk\n",
+       "ehRPKFueoXK4U0VXsCjYUgiy0BCrQ4uhoWpspm6sqSUXjNX15tGLdcJdRboqxLrmkCK9pNKoafcO\n",
+       "dWstilP0n2C3WfY+OuUqbTMpD2N0EOPPhhD+hVJun30v5+67qbesGQkhPId2jPGjIYS/EUJ4Psb4\n",
+       "wvzbfyXG+D+GENr4V97EyPjvg3oSkxjfqEt/eypG+yF4DR/yNsuLv9cqZXvn/xw/kHKsy+7JxOef\n",
+       "K0yvBoPNk2JoqkyJFpidYPcGPxpC+NsxxnsPtrPEh57Do3NDriazVDtdVl3YkxRTa92Bfn2o3wyK\n",
+       "tEa+q1GZSZzVsWRJpQyjcqTrjrtaEkNctOCkmUxiB9FMak/0tKFV11Sldsy0jG1JVaypqom66nJ1\n",
+       "NFX15MZzYlihYiiRWNTQ0HRgLDo1f/9UX0/iOTUzVTU1LV0TI4c20VXTc0pVx7Ig2MN2yDUFx5Ou\n",
+       "7tKuW0ub5Nc4gf6XaN7lJx5+sMQYt5ZD+DWu/6llT99t64ymstqWrZMzO7/ke3DQ/A7nvnmBj/0B\n",
+       "bjVLb4PWmOeeY2WBzXUuvMIfCiH8vRjj/WN88BkOqvPU0HWDrR39Ex3x9Jr2lULl1R33Hk3sxCUL\n",
+       "tS2/9CO5O19gYVq6tlabtEaFpe5UczWVJYlGMhCStkZMNZIl0/pMv3IkKzbEbCYpMoXUyMQgjCVW\n",
+       "5mdw11DTKQuW5yqpoUzLfQUOdZQw/En0TAVNqypGcjvGqvpmTgqioUwuiFYtiaq6epI5yH9HZklF\n",
+       "V/CIjk2ZibFVTY+J7ulqioKJisyujqqOwopZZWqS9d2qMsxzndFlQkdtq6G/OdBvrrl2MtEZDk1a\n",
+       "fd0hJw+5W2+5+lQhhLHZMU785RBOR46+SP+zDxrECjv0stxaPVWbu6JGM4MWR1/5Hi+R8zxS48Ov\n",
+       "MbrK/YXSdv3RQPcpbxi9sf5Bntpl0CKe5sQ+L5/g9GppB79UL13RH4nlM7goSlfVS7HkodTn/M08\n",
+       "zI3KlM/uPCWNZSjwksQ26grLSmzjtJLtcTg/6wXOqTgtt2/gNRWpnr4Vc/2NcrCzPN/DFoaqGjLH\n",
+       "hLAmDzWsyA1FUw1s21dX0dKTmGmIDgw9Zmw7zBw4ob80U89WLU8yMUsNw5osaVEMWR8xXWQ7ozjk\n",
+       "bI9fijGO+M5k8Tezvm0zEkJ4Cn9C6RJD2bD98xjjK9/Ftn/QNyV3v6B0K3qBMmNg/nrLW7jKeofq\n",
+       "4/ild/ogfJM38nuqGWHjk/xwweNzItrxfmJn667XHj0jq9clGVEuq6WKYY1Zp/zBMjmqjIU8sy75\n",
+       "RFDkPY4aTG5LzuXimaphlfwg17mcOVw6rj1s6NWCmKXGnZoQNjRVTBX6ppo6Gqp2dYgDKyFadckN\n",
+       "K6aaqmpatmSCRC4xtaiE0msqToq61gytK+zZVTgptWDfoWVDTVOpwpGZAwtaggRBQ26gpTBV/q8L\n",
+       "HSO76mbzx1VVV81tmX1tiyqCIxesCDomEmu64bY7YSg4UM9OmRSPEa9SrXO4Uv7uvvrwWTji72bu\n",
+       "ZUODP05ncyqfTQx+lv7ferMcNN+gVtdJmuWA3V3efZzaKvdusvpBbh9n9V/xJ/G3Uzot30T+njV6\n",
+       "9edcPJ862YhqITErOm5uL7u5tSDtjo3CHj/N7E/wyrPlW0dVFg9nqtVMMotCZaay0FaNTZUwEpNM\n",
+       "Vukq0uNqs32KiWS6Z6WWWjQ2dl/bnomxms7cR/fIREe0oGFgxWMO3VPe8CaomMyB+6ZErmuEVFOC\n",
+       "RQM9E1sesW5BWzGPRgwObWmK7ipUVDUEE9FYYqqlpq8himpWpEYKIxXXNAwlJE392rLRbFctP1Sb\n",
+       "prLRSC3NDTRcKthMMktabtgwrudefa6iGHQs1Gqq69tm9S2P3Zn68Ctc/AhffGJOTp1u8/Ki4R9s\n",
+       "u3QhWMwWtbtRL+zr3iB+Swn2d1npNx9ZzeyBvL903U1qb/yWpEYlL13pm7FsKrobnBlxbYNToXRE\n",
+       "beSMKmXzsRHnqpmkPFtt80tSCSAeKduHG4EjiYa6NYUlM4Voee4dMpVKVGybWMeKoX31ucC7qqci\n",
+       "WlYiZg/vaeYBEze3IGiq2JJKTUQDia4NSxqqsedG2BKkhqoqFnTkqlI1uXG4bpRumBV9exs9kk0x\n",
+       "PyQ5SW9AJ6VxhRMj9m5T2X6HwIFv2YzMRyn/Pn7GN+eDZ/BPQgj/R4zxf/gO2172zUCkIyWO9fD2\n",
+       "/wb+bfyHv4vj/n6uj+Nn3+mDUDYj/w3+23f6QH5n1X6cR289/MrU+V+buPbjPdPmkaQxXMXCAAAg\n",
+       "AElEQVS5JE4T+SBjOColu0cYhBASlv4kp374SO/5X5cf/5LZeNXhzopjw3Xp7iWj82xUefmJjpWD\n",
+       "oN440t2oGjVP2Q8jLKjEXAyFmZrMWGpBEWeSOFGEih2bGoK+k8YKQU9i4MCyhh2ZQkvqrJmb83Vx\n",
+       "amZFzcTYKzqOG2g5sGNNX13mnqmemsSCqVyqa1GmLjiSyzSN7JrJZUZaZnL0NcysmVo3tm1TX8Wm\n",
+       "TKKYYyin3DOQWddK98Rw1zQ51AxHmufGir8UQvM+4y88gLrnMsy/HkL4h3TXsPtWzovnNegS8vIj\n",
+       "zTm+zEGfamCcEB9hb5UzIYTlFl+/wY9ulvdyx+k/b+ezn3d4OujMmvLes7pffRf3B1RvlIzE8wx2\n",
+       "+ewOS0Mqp1hOOXk5SlocLI+cqF0xqa9qD/ckychO2hGTlmntjGpxi5ipGOtIFQqHhnYk6voyVVMV\n",
+       "Jcsow0TigTLhUPmgWUJd7sCRa1KFtqqg4Z7julIVg7lHzWiOmLVVnDPT1RdkppZNjaSiwlRwYKaB\n",
+       "aOKKaCw3k7umPU9KWihXxQbUdkzjXZMQnYuFhUZm3Go7CNHdg7rb4awsrCsafTFfVx1edvjESGvy\n",
+       "tLWvpO5duGb/Bs/fpn+WnSdDCAfn+egmcdVsOLXXumFv7ZAvbPN3cDyE0PsesoxucT3yXKVsRh7U\n",
+       "1dXSW+SNav8rfPY/oLHK7DHu7pceX7Nx6Xg+nZs3Tx98hLkMN5ZfXyovGW0PJLnlo21FOcK5JHhR\n",
+       "piM3ELVFGb6mpacl1TYW3HPfVSNP29NRM7JkoCHYFZ2c7+mB+HeoxFJOCu5ZEm1YVtM3NnbPkhv2\n",
+       "rTnSCcHYSV2rmqaCbdGyFVU1XS1PGdkUdOWNVUWyK5+2Obg8Nz/p0tgpQ6e+fpu/+TDC/HbWt0NG\n",
+       "/hM8HWP8Lb7/IYS/olQhfadm5EjJ3qH82/stCEiM8S+GEP5r/GslivLbKoTwUw99+ekY46e/wz7f\n",
+       "0QrlgvZj+M/e4UOhhCyfDUEnxm8GWH3/V5Ez+S03m+js9SPhYluRvKR4/nz59zoL7L1K44CXcZXk\n",
+       "KR79CM3zM+8ZDPRGFTG549Z7x7ovN8TByN09Jjn7K4VrjZHFbm44O2V/dNp48WX1atc0tDWM5OqS\n",
+       "IjMJQ2MrYnhGIqjomnlWY27bPbMuU7HjtprEMYW2iTtKTvyCI+ckanNS42uqLhoJ1uQacotSqwo7\n",
+       "em5J7Mqc0ZNbcqTtrkKu0HRgWYKKroapmcKyzIJEW3TOyMsyh6aWUZXoWjdSVXNC3aE03KLWczKJ\n",
+       "Gp2ovkDxJ/jaMn7+4bMxz4d5W66fGOPhWggvf5l3v6dkD8oJF1la5ysPDA7muHkY8sJXeC5w+jz7\n",
+       "Q2q3aUzMfuUTDm6cf4gH80XObllu8vSfLW2/D1K+8Q2+8gLHP8DRCWyX4WNrzcIkO3Ji2FJP+YpT\n",
+       "DprHTZNCDIs6aV07vqYwcVrDVMMgVoxCw8RUTTJnAk3kUkfmhMFwA8cERzoKub6luUT3gpl9Ffct\n",
+       "OjTVsoa6qUJNfZ7cmkosGWCkZmhPXWZFUMgtmKmpaKtaldqdS4HXNa2rGBsZ2HPbmp6I+9bqHZ2V\n",
+       "mTRPtSosTAeq1WNuTZ+QHubydqGorJi5oKh9TZjWnekf16vccns9c/awVC0tXzhu/6MfZ3iGn99l\n",
+       "bVAGvXR+VRqjR/50iTgcJCGsfYP9f/a7uD6OQmj/HD//Yzw9pD7j5iIv32b8LUZASZvsAmcSkpTe\n",
+       "U2zXqO6UI57bsQzEe7xKCORFOXrZSUolViVwS9ka7Ct1LReUdMdJeYV6BCtzs7u7OFB3zIozElMT\n",
+       "UdUNp01ccR+FoNCRWRXdVqIgm8oxTV/ZXT+Bm5qaTklVZeioSlRVXFZ1X8t9j2CsaaBuRUUiM8RE\n",
+       "jCsKaxiIcSAvFhW9J9nfIjnGYJ1bFfJPcWybX4184Xd6Xt6s+nbNSK4cz1x/3esnPZQD8m3qc/gL\n",
+       "+KfKccHff/CNEEJ9bh889m1MVGKMP/Vd7Of7qd6H3RjdeacPJEbDELygJNP+HuLkHH6elz7MDz6E\n",
+       "jlxeyw1/7haf2uYn7vChdkllX88ZZOXd5FRh/Tkaizxa4ZH7R3YmU7feVXEiHjl8ZlXv3mm12ZHe\n",
+       "2oHT91pup5u6g5Htx56TH1y3tLymFu87DJuWFBp2ZcmB3aJjPGsL9VWJmYqawp6ZmsK6DLkVIz3X\n",
+       "deVSBNsqDi3YEBX6ojIL56zomlNmclM1mY6KTO6cmoq+Kybz9qEp18Ixyw4VbiscUxHkhtpKPVDX\n",
+       "TNNEPrfkKt05SyCjpq9QaCjMNMOuakitSzTSgUYHZ6l+gLXnQlg9ycEvxRivv/3nnn3++ecIV3im\n",
+       "yuwyJzf56g/MlRLXWdkvx+mHZVBa+DuHfGCFd+fsbvMvC3Z/gf/oMc4u4S7haxrN3I90ef4hvsup\n",
+       "dT7V5dJPcuPjtD5Sl38k0WwVnpwmWkuJnVhzWGuYhJFZmKkXFY3YshRb+mEkN9YTVUPHvrF7ploq\n",
+       "qhKZ3JYVh8V9laSQFEOSixbUpUgMXVBxRsW+RRVta/Zdt6ZrJtewY2ZJrjQPH+haMbEg05EpXDFy\n",
+       "TM8JdX0z+xYsq1rWm2uwqp7Qt21dsKhtz0k3Xbaiomeh2JOktVIOHuuqRXS8UrjZDhyMBYHJiuJ2\n",
+       "hcVvmG2QJwWhkM4Jo/0ag3yFE2fnfKIN9jbYy4THW1Y+3PXH/gnL0/Kh/qV38dk/xuvsQr6LinHw\n",
+       "2RDCHW4+Wxqq7b9C9vUHtvQPVwhhkcc+xCf/H177Uer9Eg17bKUUjZxPy9yZGMrMmLVYsjVuoJ6z\n",
+       "ntGrl/lCMyUy8l7lY4uyhTiBvtRUoSVXinVaFqWaOFSTz23yjjRdM5HY1NVSeEK0qlzd7yofyD3l\n",
+       "3HQdd7U1VHWVbJVF5fDnlorcxFk05yESuaimak1iaqYvhkf0swlplBXHOBhTG5UypF187ibHbnDi\n",
+       "Ki+M+JkY4zsmePh2zch/jl8IIVxWtoaUY5rH8Ze+04ZjjF8JIYxDCL+iDE16IYTwv8QYfxI/HUJ4\n",
+       "l3Lw9ntefvpQfb/wRR7UA97I76FmpPtpvniC7UdK46r9wJVddv7vGOM+fqodwr93ij/yFFunuXtA\n",
+       "7bP8xMsmh8zW2JiPEzYORypXq15bWXFseFpyt6py4owTu9fsre5q3F9TZBXt7JrhJp1Yk2tZidvG\n",
+       "4UjfQDurqw+WZfWxLB7KbSvCigWJVMdAlAuCqkLL1Lod++pq9qVaFmTqc/D2UGLPkpqKVT0nROdV\n",
+       "7CrcEnTkcmNVUeaMqg0trfn2S2b92F253MSyhqdlJqK6aDCnxE1kBloyE/U4kocojccsh4qankzV\n",
+       "oqAWc2t5kFaCyXujc6/yxJPcfSyE+s/EOHlLAs6+Xc2Jcz8TQljBmRP8+Dr1a6zt0/w6ky3+2QPe\n",
+       "yhy5+WWvM2MKIfz0bR5N6eQc0fkpjp7k1z/I2i02X2U1Y/kptv9Z0/TGedMfXbJ6OBMPe7Y7267X\n",
+       "F4VYkVSqEn1F7MuSmV6s6iRBHoODwIX5g6in6oKamYk9wcApNYXFpKvq0DAZKuamd1Uja4IViW0N\n",
+       "mUKwoGmqourgN0c6ZZNZXhk9M2cEOxKvaqk7homRkQpWdWyLxkZm1pVr7aGRzB1xLh1tWNCMa/J4\n",
+       "l7CiMyvUJ1NFJTespnq1iSIdyVdHQh5JbrNWYViRZ2Oj9l0xFM5ts9sqH6WTlwt+qPDN1WVB2NV6\n",
+       "nE6X2rxxSfDcHV573++mGZmf8xveUMb722qzXDvHCq0aj7/M/jLdhdKUNPY5vsQjB2wnXF9ic1be\n",
+       "d/oJ/QqNUK4xawXfSErmwaZybENJJ52omEg0REOFZYUxqnJBrm8mmurHRAh1U2vGdvElpRj4A7im\n",
+       "7LKfVbEukZmqKuYco3LUsK8vOHRMZlM5bth54I2qbzDPf+6aqBoVU7Mkp1gupVFypi+xusW9nmrn\n",
+       "ls2TR46Ppto5N34yhPQfz2MX3vb6ls1IjPHnQwhP4oNKhCQqB2YvfLcyn4flvPOvf3L++T/9XR/x\n",
+       "93d9XJlq+P1Sv4i/8U4fxO+kYoyjEMLfZ/csX1pTopZXH1xzIYT2ozzzh/lCfY7QNZl8iOSuo5V9\n",
+       "g6QkJS7Mx4uToqFbKQxqBw7ONKyly4yOK7J9rXtft7Ld0Hv6QLJwVjKLYlq1rDDTtV4MVWcdL7UX\n",
+       "NMOKUegK6loORU0zNVVBVXRkV6Fi0TEdB4ZaDtUtW5bPRwupmZEDYzNn52vcfW2HqngUA4WmoCeq\n",
+       "6CpM5da0bci07SuzgVMbcomhzETTTEWnzEjRchRzuZ05fXHfSnxMEYOasSIEE7lQjKRZtDQN8kBn\n",
+       "xuVlfvA+78o4/LEQwjfeDknfG1WM8QAHIYQruzy9yKkB98e8/O3C+UIItYQnFuaEvIxbrP1lzvwI\n",
+       "53oMcu6e4uDDpXdGq8rGf3XMTv0RjVZm8Wiit1u11VgwTMcahmbumYZzqvm6UNkyC3fd01cLhUeQ\n",
+       "S8yMta1bmSMaqyaO3HFgJo1DUeqkqrvhpIkVbVftqethKpeoWVGZJ988pS6Y2FXGLy6bKeYi7apS\n",
+       "BlxzzlKZMm1iVaFjYldHRc+mctJ1oGXJqqA9p7kOJHOT+qHgIEanMmLSVplNdGZjr7WDpLgqhkfF\n",
+       "8QL7N1m5zMJA2rvu5pk9J77OV1e4M+bO/xZjvLEZwtVLnHiynHOYm2stdW1epvXQdZTG0qfjrasQ\n",
+       "QgWxdDOf1KlHtjeZnKKyWDYpzfrcjGyrXGcXGYs9JifK9N6kUg5Lhkpi65pyDHNXiVMMlaObqYnV\n",
+       "+XKhdAcZq2k4mNOU12SaduyFQrRk36pvcoh+XbkqH+GYVGXuv7qEit580FOfK/iObIo2zCwpsZkj\n",
+       "QaEyx2sL9/XiWD2bquSXhGRN3OqUMcuVfWoT9vbp3fToYu5Dn+HkXWpZ2Vj+v38mhPA/vw38sN9W\n",
+       "31ZNM5f1vOVOi/8mVAiqyhChP/tOH8tD9UVcCMFGjHbe6YP5bmu+6v1Wq5/VddRfNyo8zVGblX03\n",
+       "P8sXf5wf3CamNZceW9CfVdypL+ofHxo3busunLH+1brcwLnZ1GxcczG7olFhIJOYOVvUrZu62Dwm\n",
+       "KdZUYlMt///Zu7MYa9LzPuy/t6rOfnpfvu5vnflmvuHMcIYixUWUqM2yHNlaINhBYiGB7ViOEyBG\n",
+       "kps4CZIAgQPkyoETJIDtq9iOlzheEmuxSFmyRIqWSYrrLORs3770vp/9nKp6c1E9JCVrn6GGSvgH\n",
+       "+qILp7vfrjqn6nmf57/MNNJrWl4xclOhodCWOj6/EVxQuGVqQTzPmuibGcs0HRrqa0qtyowEXVOL\n",
+       "XnbH+x26LPjXoouCD2HbVKblyJF9TC0bnDd9l84zboZuOXNBMG9s38C+TCqol5lOXCYcSOJ9kqn9\n",
+       "cslUA1v2jH3bgCSNyjI6yRnmrA54daNt+p4mf+G8s/maqkO8qepV3/uDKlLOb4qfDSG8ovI7+O0K\n",
+       "kfkN/vy7WL9QhX/V/rX2swPPX+DqA1ZaPD7llSvVdO/set1+2pIvzUkXeyZJx1a9JYaOvDUnuqvQ\n",
+       "lZUXKpePpKleLkmcCfGBmATHITG2KDPS1Ve51TJV6jjUNVYvGsqkStmdhcvq9gxdMdK1YqhuXuLQ\n",
+       "kR3RM4ZmgvY5GTHT1NCUmjjR9LpCsKirrYVS09jeOfG5IZzTmjnQtaergbpEet5ZuycI4UgwZ280\n",
+       "dCupuTApxFlpr73iVClzIA9Hyk6T2bFGvmPTyLWXo/Qur+V8+qfwwpu8wn1+6hP8+R2uLlbf115V\n",
+       "HA1dv/vrr1S/XtWJby+qOADvovsjXHycRo+H13j9lPoi9XU2I2+0uHFSlXWP6swusPA55g44XGLQ\n",
+       "qIYkF2Jletav/htt1Ufgy6q9+Z6K3fEM5yqXWKlsTFx3pNQ8l/UWJhKXBK+7oRr2nKsAXcGnVV2w\n",
+       "mboD0ZnpuRHayNB9YysoXTRUN7Kq4k5lqt7XialE34k0nmjGmYXJmfL+JfHRiNXXqn92Ms+D72Xv\n",
+       "nuZzhff+Ix7b+toZXB3y5Bp3r6sMUv5A8S0H1rcPH8TtGP1+meJvO2I0C8En8Ufwj97p9bxN6J2c\n",
+       "Ky7S6o4A9mlXc9D+/8oLD9n7cebbXb1RYn/lwwaTC7I4UmSv6s/fV94YetevsP99xNgxfzTVmZu6\n",
+       "UJtZzOoWyqlhaOuVTQvFsZ00EqvZfamBI4tuG9hUk4lWznkeZ6YuuSB36nGFV93Rt2wiFXR1nZoK\n",
+       "ziwKLijwgr5vMzFWmlM9+Sf6dqVqOiaOTaVaenZ1najLNY08rqrLtk2cyl2OJ2qmYtF0GEemYcHj\n",
+       "9yem3XteWRtK4qFWMlaP3MyiVo2iIDtk8ZWmf/Z9jzm4vGwQrnDpHn/2K0wvkFxV7QXvcRZC+Ht/\n",
+       "EKz7KvRu7ce4cZ0ghI377P60qse/hvGb6oxF/sSzPB81Vl5VrE3lBd1LNaFVc3Q6dbCYWy5Zyri5\n",
+       "0HGcXBf2U83ljrOVDTG2xOFQ0ZoXkyFSsewqbcimO/LWUF4eKkPQVdcwcikm+mHqSEtDTzCTaciN\n",
+       "pHK1GBRpsBQGUqWeh3a0DTwndWLfQE2hZtnMzMSKuiOlQzVtM22ZR2qOTS3ZFB2anPfFRgozTR0N\n",
+       "Q7dM1eTn+q2GU08rZjvu1MbWlMgc2zC0J8RTrWLTcHDVg92Zo+WORv/MMJbGzcfV+os6oxPT1uti\n",
+       "Z+TxQXT1xeg7P1o9Bq8u87EPxrj9pneUGONhCOEf9Pjz3cpvaVAYf54vrtGacfmU/Q6fW2P/n+Gv\n",
+       "vA3vj2Xaz9O4wNxzrH0bTy+x0ac34daUV36A9UBrjVspoVc9wmdpNfxIVhh/H0dbnHYpZxX/tlji\n",
+       "QlnJfx8EishCrMzRFrEYqu7TnqrP8UBwLOpIva5wxdCZlurstyT6alZVXZDa+dcF1dBnS2pJTd9Y\n",
+       "du7K2lTFcS7oqyktqNQgB77mS5KJemIcyl12Ib+kUQz08ruONqb0v43bnYptKyUe0ozShOEaxY6v\n",
+       "cn+oOkh+C5n0NxbfKkbePnyz8UXexJu8kf9PFCMxxpPlEF76LM9/gIcZcUDts2wc8H+d79j/Xgjh\n",
+       "/1nkL13k+6/KVpc1lWKaSeNlZfbA6NLY8fvZXuc7XzjVf+ay0DtzvHxqy0gnLTWlpob6tahd7ihk\n",
+       "euYV5lVtw4FGeKA4v8G07YvGpiYOzbTsOfOYka6hr2g4taCvo6lpoCZTw5K+iz7poeXzx8whElHb\n",
+       "qZG+jmjJnvw87TfzuJHU2EyqKdc1sW5fNyyJedv8tKcVjz0cLQrlRGzMdPIdi0UUa1ErlBbqHB+w\n",
+       "/oCTAUebm4ZXV8zNUtNJVFt7zmxvxh97F//8qfP2+30WP8afCSH8tW9kh6Ry3Lz0k3xvnRvnvLU7\n",
+       "q4mf+28vOjzZwIiwHsLdfX52Xe3fPzG3NjW5tKDTnKpnDfVuYhYXhJOa9ta+7c2+Rit1nF2TzVo2\n",
+       "5gtntZ7TWt1wNjRr1YWwzOweWVMMmaJoCmFByLdlouW0Zq2IhqHUCzVrZlKFPWMP5VpKhVmlegl1\n",
+       "N6KvdjFyA9EFD3VFHaXxuUB3XeJYdCQx0XJipCczMjKVxa40ZE4kJgo9afV8OS9yN9QlprYV5lCY\n",
+       "GMd9J9mauqnxeVZO11gt7krHOYP7+p/7DpNXM5PvzGTNR8rLV6TDZUke5Z056XBd1jw2bU7UB5x1\n",
+       "Wejz2BHLV0IIS+cjNSGE1mX+zPcQbpwbkN1k7Re8Nt131Kd5kfyA/X8Q4+ylyhLorbw/sid44s/y\n",
+       "bCC/xPA5Dru8+0U6ObdXmHua73idV5Yo5thsc7/Nrw1Ya7MSuFty1qB+idMWP3jC1hxZrBQ1SWQ+\n",
+       "pYjBzWReazYyq01dVpUDE5Vqrq5hzcR9GRIXNZTWUBclEkEiV6lCgqrXsqgaFmWqjKFLqq7JQxWH\n",
+       "ZBHLEi3B4TlFdlX18J6oXB1Lwg0L4znj2ciotqm+nVmsvyife9Fs6XkOF5g+YOlTatcnWgVH38Fr\n",
+       "myx/kY3dKpX4bjj/03/g+FYx8vbhj+J/eqcX8ZvgX+IvvdOLeDtxzE99mvwW75urKv3igJ+dnht3\n",
+       "VZEltWdzYamU31jUPuobr5UsJLI4lBUtzSHzrzBrMqsNNQZ7Hl5dkqR17DkJQ5tlMKcvhIYsqZvl\n",
+       "S0qMDDQsy8K8mUeiaMmpXN3YVU2resbqXhNcP6e2tU08NFTXlWk6MxbV5Q4MdM1bseDEIkozO3Lr\n",
+       "akYKO6oNy5IVmcKZQ01reueqmsRYzWOinTgwiz3zpi7v5aaPzhwkcw6ud6T5TFKe2TiZ6Relr3Rz\n",
+       "s5yb22RP12yuzEmyKPTbNt841b428fDaU/SOqvvfPlzl5DGublUGDN9Aslv2NM/M866vU8DEcEX6\n",
+       "7R/mM89U4yNf4eIv8p/lOmu52fKmxbRlpT82bW8YFqdiODbaWJY+7Gj0+vrLTZKOx8ogndXktaaL\n",
+       "8cvu1xZMp10x36u2iLGBfTGbE5NjaZhZmtWs5GfibOZqs9TJ+vZDIhFNRMuCe2ZIdGIVhnZZbuBA\n",
+       "X0tpxaJ9u7JzFVaVa5PYkjhRM7FgYOTIvJmOBbuuOQ5tVRDebVwVLOjLNBwr3HbHkZrENXUtUSMu\n",
+       "eCNsWDJvpKtppOEOds2FiYt1JrOhq48+7o0L847vNOXX2rJRSy3pK5pdIS+V7aDRLMhLq/Ns/xEm\n",
+       "n2b93xj71nn2mSrf5KtKuKfYP6T5S/Y/O43xs2/XO6PihFz+d/ihE9YHfPl5ru/zaJFbF3jPI846\n",
+       "XE7Ir7BaZ25As1PxP77YJk0rh4DHJpWNTTFg0OR0vlIlL0VeTSrb9xMMQt1eOefZsnRazvSSaKxS\n",
+       "19zAktyhDG2PFLZ0rWpJpaYGcoNz4uoTqoLjEK+qmCfXVR+oXHUCT6SmglzdvK4DwZnjc/5Q1RQ+\n",
+       "whlxJrOqnUehJPSbasOmWllTm+6a3R/TmbKyo7kRPfcoWJow6DNbYut7Of40dzPu/6sY4x942jzf\n",
+       "KkbeFoSKV/ZB35xupy9jLgSPx+jOO72Yt4oQwkLKjSG7b/D3VHeJkzelfZWd/JWf5N2tvln92GcW\n",
+       "ekaNDckkNz8spWWuSEqNhKUp8XTJr9y4YmHC4tnEbHmsVx+5nieuKrTCI9vlxO1wwUlIheJYkSw4\n",
+       "mx3Ka0OtkKvyeJlY0T13Hdm1IZHIPBCM1OQKHQeG6miKqtzVAqtWBH1B32smNqU6KvLijsSquvG5\n",
+       "y8SCUt2pB04El9V1ZNbVbRpJY8OrxdhkOhPPEgdrTzquddVmC64czpTxoa90p5pxVToa26kfyhd2\n",
+       "PTmLVu5m8vma7miof7Vr7vZIdr0t305+A0enU3WJG9/Yq929wOqvk2x2vfrEVa1eVG+VpuGAlQ7d\n",
+       "Fcl772jOmnpzdXOzIIhidlmYnXhQC5bb0cmVxCwGh7WgU0SmpUkzCFlHW2rZIzvpE5z2JfU1ad7Q\n",
+       "LHNF+nnDrKk96yjzQ2fpkc1Gai0Qy2icFDINIw339Uxiy3VzhmFMLIzC+DwYPtd2rFBT8wWFFUFX\n",
+       "Q8vMl5R2taWitiUbNuTuuWzRRK6FmYFFXBatKA2NLEt1FXoKhVrMXIuZBy5px7ZZHKinlRNFDcM4\n",
+       "8G14LAT38obmha6wtOil48zwpYH8mWXtfqK5v2eyMNOtF0I5k9RYOWPS5tFH6P0KR1tvdkXO3xMb\n",
+       "q+fjgK/HCuO5imzxdmKDzQ7rb3rJJNSnzE+rUMQ3Rhws0Kgjq/xCVvr0O9Q6aLCccn1U/Xg6ozvl\n",
+       "VsndOk9GNiLtsuoWPCxr5mPd4ijXDwuGtaimcGBsqoo/PJALmHeqo+3LlSGSjompgSMXDM9DEr+W\n",
+       "yPtmSu/GeXlaU9NDIXOkpq2ukEqMXVN3Ud2JsZFoJonrZrP7Yu1EXmbCtKbeGyhbE8VJLuw+4hNd\n",
+       "nspYmLoxN7I66bv2a8wf8uoVHj3L/RYH/7uqxfOO4FvFyNuD78KLMfotyXXvFGJUhuDn8cfxN97p\n",
+       "9bwV1EN49gl+4ilCg3iX9A6fO+WnvvaqC3+SHyh5/AHsevTG625/e6YI5HEs6OuMB9Kkod4o9M7a\n",
+       "ppcOhUZUS6Kz0FWLK07CgdWi4Wo5MZc+lBr5bHFD+6AmX9+zmgVrMh25iS37nrZv2dCZbRN1+xKF\n",
+       "qZ6GeUvWjF0z85rbth0ZWRBcFi1IEAUNF/Scum1qZmSi1JCZNyc10DfWtAKmguiGzAMtryttYd5I\n",
+       "drYgvDJ0y5JHR0+Y7R+5vEwoU9P0ulm6Ly27lsq2ZLBmPsuN2oeu3d2z15mT5rSzqd5CLhnfpP3e\n",
+       "qmMMcsL9qsv8DeaM9HfZr/P0194DBt3EpDUzbb/ED87T6VJeVG5uGaV9knlljUIhBkJYczBecZI3\n",
+       "FftrJsef1X7XqUutIzutRUsh1ZmMjWo9ZVrTKjvy5UQ9f1UMXclgItR2LKVTS2Fe587I5ELThWZJ\n",
+       "vVJTTZOOB2HdINS14gN5mJick437ISjVBKW2ID1XRGSGasainqltHQMXBFdk2mr2nTmzJtFUR6qm\n",
+       "VZ5K1PWSgKFSQ6JUWJGaExxpyiQxM461qrOTFNplX5GM1CSaIWoXNaNJQ1rMmayuu3Cn7/7Sdxve\n",
+       "/jgnxwaNTQuTqFk2rA6OHHfGZnjtPazMOGzx6cDOr7N46LN7+JsUqEcVk3TnNx5/i4iUX6fIaTxg\n",
+       "eo3ThMMLXC9pt3mwwnqf9TO6Q5r3uHmF+lI1oklTGlMmdYpGRUO6k1fiE6FibcTIUpHYz+g3umJR\n",
+       "Myz6HiTRfMxcDjPXfO0jcSra0HdRdMtYoS1aNnZJMCdzqHQkihJtQVN0X9ueoY7ootQiTnTiSCMc\n",
+       "OrGEmmBJplu56Bqrx76ZvrR8QaNR10oyg3LFbO/QIOmb/EP835/iekP6I1Nr7ei5F6tzAe+/zdoR\n",
+       "W9sxxq+8zdfo94RvFSNvD/6YahzyzYqPqqz9/9AWIyGEzuP8uz/G/uL57ut5wtsZ5lMAACAASURB\n",
+       "VC/yoc/wOr4cQljkmUs8/tW2funGp0ZOrrzgeD4ViqA7HmnGhtU7ibLG0aXgxlbbchnFem5x2tNf\n",
+       "rRnnuZjmJkUmTqN2Y6aZ5JK5I82k7kJZkyU1szCSaZsXHNoUVMmnfRNLcWwpjKzEu2I4sWdF4VQw\n",
+       "UkdDy46ugVRfXV3vPG+kbsGxA2cmLphTattz6kxf3YIzDWO0fcUVx5aIU2fxzGn5wHh+T/5saWf3\n",
+       "itmnLnP/xOgDU735xLC2Kiu3TZtTx0XL+DCxat04HhrXtj21VXh9c01MOb14rHhjxL2LzLWZDmi8\n",
+       "zPIWnzj3ffkGIn+VV85YX+PGPoPazOEVDjfmKove5ZKjFvcDe++SN7fEzry8nOrmhXQ2cLayQtFQ\n",
+       "HD9n8PKIxr5e8ZL+c/cspieGoa2II5N8KMmXzeWfczbXFIp1c4OpdPhAq557PCndX5h4VyN6lIzd\n",
+       "lFsLQZG13UsuEqO6PXmYWFHoahrqKsxsmVg201PREY+talgWnRra0dXylEV1UyNjUWnF2C2FGMm1\n",
+       "xTiVm0i/mpIyUaXStGTlmCQRY65Xjs8Nc6fGxUArmZFMkcnjyKxcZDhmWDdNZsLirtn1UkhG1Ja4\n",
+       "uau4fOa4HGjWR04mQ3NHicuYv0m/WYUM3niVyQdwK4TQavPhFb7rK3wg5eLzfLHJ9A4rX2Y0qyQo\n",
+       "bycqqwzbc2z22HyDNx6jt8DaIeNm1dXYPWB7vrL835nnUUXdsRAZlRVPYjEQSva6VXbNU0c0suCl\n",
+       "+WAuYaEoHcXSnfGKabJuT8d4lmmkuy6HoIy5k1CJcRfRE23rOLSso+NIZuJMRXVNZbpaFtVFwba+\n",
+       "XGnm8Xhi19MmoSZ3IsrFEK3Eh85Cw1iOU7moNNZWmIYDWdJz6VHh2lGLhUQRt93anDn7m+T/9Dzi\n",
+       "4Y0Qws8yWPtaIfLVUznHya++zdfn94xvFSNvD34I/+k7vYjfBv8CfzMEjRj/zTbqHxJcf4La4te1\n",
+       "gVPi0xzf5gN+y5vdtYe5l25eVlvdNQ0Dodm2uD02WRjZXk5151ILRU16t6ZxPJWs55IP9vQz6gdN\n",
+       "28sNC6L9WLMyfGCnQ11NG1NDdJXW1G0rnJiZPzc423UaTtUVZjI1hzp2LCv0YpCGS265oI3M+Fya\n",
+       "d6CuYUMqt68m6jpyqi13YsmpCwptwWLk1bDp9DxabxwztbhoFqbGHrg24cLZll/74VvKlxlPZs4m\n",
+       "mUmtp6iVYujqHW+Kk23DS6m05LXFwod2tq3c3PaVy2wfUvyX2PolPjzPuwtO9/hYWY3/fs84DzJc\n",
+       "VXWp93/z2PcK554z/zu/+KN89smGkxvXnfRmit6scmbNdyRX/6X02oHs3g2jL+3KWrftL60bNzPp\n",
+       "dN/oLFM2N42PHlaihb2PiL+8507nnsceO3K9PDJtv5kGcmJ6PHGYXFY/OJXMdtQXc08ftbVHR27r\n",
+       "u5+zNKoZ1TOvjWoOOiti7EuymTlt188j4uc0XDHz4LzYvCu3rXRVdNUtIw9sS800LJ1H5JWaCnVT\n",
+       "R4iyeExcdlSuivmWIqzKk0OSAiuiHRwpw4ki9gWFvTixkBS64YHj+rqZzIl5MwO1cCYmU6NQyuNM\n",
+       "Wi+tLCTyLNUtPmX1h5ccDDLGN8R//AXF80PrF3ILj01dfsjjh+x22BvzPa+y9VwI4aNr/MT7ufY0\n",
+       "O0d84ot85FV+JOOLZ9zZrcJV31bfihhjEUL4R/yLP8fTiyxMuT9iepen3yAdcWGnCqH95Z/kU5s1\n",
+       "83mQ16ayLicliyMOA8M604SzgsMpz/bXfXlzTas4cRr6eulMy9DTh8d25i45KubNOg/UdYUYjONQ\n",
+       "P42mqrTeTXUPLXrWxC3HtrWxoXJXzRTnxPhKzrsl19Q0th8aeqYyD4kjIeZOk8xqiC656ZFLCi+a\n",
+       "WFNXlzqQlzetjcfec0TjwUz33OByLuMTn4+x9/WfrzvcusNnrvH8NrWS1y7wYp/xC2/n9fn94FvF\n",
+       "yFtECDZUvKNPv8NL+S0Ro8MQfEXlg/LN3MH57ZBkXyflfRM1iqRSyIkxnoSw8Yg7yzx+xLDBdGFf\n",
+       "a9QxmVyW7o08yHbtXDrWaXDpK4XFjTOt+dTscaZFTWsvGN3r612vedTtCMXEC0lNb9Ax96itNr4j\n",
+       "fy5Qb6jFYFYmJHuOLIlORXVVw3Z0nhHxIe3QMIwnhmFfzbZpWHfqhrpg5kCpo7AsWjb1wH19waY5\n",
+       "DdGh6AVDIwtqlnUx9UpIHceWGApJWchjYpQmogX1ZMu4W3iueey4+cCDa4/beHHk9LEzw0sHprWr\n",
+       "ws3n5TdbjIfO2m9Y3KDzauUyOVhm6zYH/8PXJXT//PnX7xshhEsb/NvrrAXsV6Zm//S3+5lz2e7f\n",
+       "CSEsXOC/+mEe7NH9Iv/h61pLifrkSG3a9O6Hd2xdbbv5pTdMP3qo92SqvTRVzK6YrD7BzUNar2lf\n",
+       "+VWNH+4b15veKCamnejxAQvZvFmja7g6cJoObGyUFvq00lQjG9haiDY/GT37af7Vj+eOPnBNXizp\n",
+       "JMt6aTB1G3XBnMLEgb6BVOLMjhVRS4zHgmAcUicmji0YS52Z01cKxiprvMzQ1K556exIMd0TFabp\n",
+       "qkQmKW8qww6hipsvwpDymrQ8cmbglsJaONUKXffiVVEq0RXKa6I7PtMau65wKWs6KRI7zZZumZpr\n",
+       "TgwmJ0aDQz6yavrKA2+sn7qal64MeHG58p179+epxXMF6NPv4tqHzz2BFplc52d+nic/zU+VMX7m\n",
+       "t766v+P7JeAa808TC3qvxhi/So6NMd4LIfwvPHyaxpO4yHcWLD7gwja1AhKLN1NfXC+ELt2MNOFy\n",
+       "we0GT25Xib15nQcn7O525O+5pDbKDdu5+bBgsyCb7SrqI0t7t714sWZQdsziyKC2oJEMZeedirHS\n",
+       "sZqU8w1GV7VbylQylQNR3czgXG9VU/eEugP75szMqcVcFk9NwiVB3x0jK46seGhXV4hHUhNFfuLi\n",
+       "3syH+oQVZm0OWlz7RdYS2iu/4bNUhBD+Pp/8CC99JyGj/wLHv3zuZPyO4lvFyFvHv4VfitE3TOL4\n",
+       "NuGj+BP+8BYjD+4Q3kva/Doy5S2WD/iVEEKD7N10Ir/wNFfrXclGU785cLB232Nnu5KQGoeeYiX3\n",
+       "nj0uZby2fGwxnzhNW/rXo8nZ2LhW03tpyRdajwuvXhLvHll7X8v8UWIhPnBvI+pv1qyUQ7N600xd\n",
+       "z5NyZyrJSWXNTSbXMDS1HOY0q0eRh1ITK6YKmZGpK6pbV9S0oPS0hp4FUdOaqaaegR2Jvr5oXRmb\n",
+       "xDWl0p6RScKC1OZ5M7+Yz7z43pln9l63nx2Yteqahz1LecNW9ym2+6RHzB+aHadOj+pevzi19DmO\n",
+       "v8T2T8cY3zbCcwihe5X/4AeZXT1XW2wx94v8udu/0w9XmJ4P9Yt1hgu6Jx3rk450ODZuFxp54rHB\n",
+       "Q/sXOfmZ/GuE5vpt/mKD6w9ceHriQrfp4LFrWrVS3hg5zLdksbAR5yV5tFW/qjaZOBwfGnXPLGRl\n",
+       "lTb0Kb7vlzlu0+4sOkgvyWs1pVSwItUX7TiybNFIqalyFkmMLMjtG4ZVLzvSEi2b03bZxIK+fT11\n",
+       "c7qqFNfcUWw7LHsahuYnp5Yfrtu9nMtrjytNJMmuMj1POA5diXWdsiXJ9s2VqXFo200WZXFoKadd\n",
+       "ZPK0ZZxu2ol3PGrPKUYd2XhZM2vJ4kPF8pHO6LuNPrZL+ByfzfX/Gtt/iXurrO3zkd3KTXWny8nR\n",
+       "PMuXfhPS6nVOX38LpNWqEJn7EZ78Lp6cVPuQN74/hLmPx9j7hfPXXGLhPcT3ceE662PiBcxx85jr\n",
+       "nyKbpW5fzayNg6fLme5SkBfRoxqNku1FRmcc1Hj0OqP/uu3g5zPza2faUutlMFcm0mHTtBjqd8eW\n",
+       "MnbHbVOZg1rdZpwzCjOkBuVYL4lqMndsGltSnYigItQ8UvmMPMC7BBdE9/VdU2jIykO1kAjhaUl4\n",
+       "qPBufQcmrmiWN43jfe8uZjaKyB6d2/Se4/GS6ZjiEgffx6O7HP/S153PBZWA5zjG3i+qol7Cm7EK\n",
+       "3wz4VjHy1vFD3uKO8Q8IH8Xfxn/xDq/j94UY41EnhH/xsSqTZtAgv8f8y9yZ8Qar/wkb765pxZlJ\n",
+       "WPXis1cVb0wVk4H3HbdtnD0ymztWHNHsVeS1hRHr9+hfHloZDc3iilMrHm5tmt59pPs9PcXzJ0a9\n",
+       "RB4TIU7laaa9m9ip9xwuzMvixDCsOI3LYkjObbeDoM+57fZMLouJWmgaWVdoqUtMkLsiWhCciE6x\n",
+       "pKajZWwqV9c2taHuvpmr9u2qyTV1tZJjZ/GKcQhaYWxd1HIkDXXvjdHDkDvsFpbG+67c4rnbPLh8\n",
+       "0Ue/LdX77OtsJsyu8eDdpuN/Yjvbtf0PVZ2mt7WtXuOZZ2he9TUn4Iv0nmXhd1OMxBhHmyE8vMvS\n",
+       "InlN83gsS6dmcz1Znkqax2I2MPfg6wPCY4zTEMLfOtL+y6lrjQVbTzylMWtLpmNhfuQ01OyFAyfl\n",
+       "nGbZ0Z61dGYrRtuHhvXP214YunbEhz5J3uQLP1qz/dy6cdaVxYZQZtJiYpY2TDQNFZbUzkW+ZyYW\n",
+       "5dblenLXtK1q2JNq6OgK2o4tuevYgkwzTg3DRafjq8rTLxlNF7iZGC9Hg6wlL5Y0JntqnjKrL+C2\n",
+       "WTaRJrTSOY14Ig0dawrj0LQfcu2SMmWxzOwXDa24qGXBaj42S46VjSFJppYHzXxg89lFBwdrZpe2\n",
+       "YtytRmXbP1mlHQ/r3F/hi3V2//a00tX+G8+RfhXIdvIbj/8e8BhPfYQfvlvZx8O7En7m+ys33vo6\n",
+       "7/5TlSB258M8mzM4Zu9eZfve3OSl59g/od9IfOeE1bSmeVpKamPTdhQTFhusjpgbUjvgle8ptI6D\n",
+       "Ri1otCuufJ6VynZuWnTMQiJMT4R8qta+6qw4FJO2thOHYaadBAvnrLAqY6qtKkKiyszwTPUB6+Ip\n",
+       "dWUVz6ApKevmtJTl2DRpq8WmJBwpzGsWQShXxOS2SYxaA5ZfZ7iMEbvNiq/SHDBKuXmZ2cvVJm3p\n",
+       "R3nqvRVP5lAIc5+g/8u/3Yj0ncC3ipG3gBAkqs7If/NOr+V3gc9jPQRXY3T/d3z1NyEGMX4ihHD/\n",
+       "Pt9Wo73Pl0teof7jK2o/ds1o2DYuHzndWFdPFxWdR9KiZalfk+RVFkS8xnRSRYCPmjy7zQuBTz+T\n",
+       "VMZXLydieeB9l4LW4cDowquOP7Jsa3fd2cIjZe3M2v3g4OJFu+k1WU633JPU9sWwRMyFUBk5l2VO\n",
+       "aMrCyCQErZhXDbSwLrNjZlmhRszEMBbPJZxBoVAIcvtWECQyqUVR09RDs9DQsyqGuwgazvT0ZGZu\n",
+       "yGShsBh4o8G77rB/Q5XyNR7Ke4+z3/W1dOlf5Wqfoyf5jy/glLBW5cL847ejhdtieamykv91WPxa\n",
+       "BOrviB1+7pP8hefJZsZhJt+9o1zY9/hWaXE319zjpSnq55u+KcQYhyFcHuYWbl+wdW1FOpwpNrpC\n",
+       "UlNLMqedseZ4wcI46nSjXiAkmVqamLvDcMAnnqfzJMfPN60npU4spWHgLK2UK0nMDMJUDTNzEn0j\n",
+       "NVuahoYyF6SONWNXQzAK1GPNQjgx1XFi1YFexaQsNpk0Kw/z5vuNJsFo+3OsP2J1lUmqrPflWRRD\n",
+       "UzQiCbJYiEVl8V8F4o0kMr2MaZmYxro8H0uLtmk41W8sydI6WSGdnOllTdOzjov7p8qN3O7G+anf\n",
+       "qnvjsw0P/mShsTyTvzgz+DsxxlshhMNX+KFrtNcq4o0jWl8hDN+Spfjis5WCP/26nXut5MaMm8+z\n",
+       "8WF+eJt7K1yZVGPZnSVajxjd5eET3Gqx+zcKm88VwvuCZgxqI3qNoBWiemAa2Vmk8Smuzdj+z09d\n",
+       "6B3bNW/+sO9ss7RUTo2bI0lZMykmjjV0mhn5RUl60Th+ySxp6aIhWlToG5liaKKyC+6oBluJKlNm\n",
+       "iGOlXBbJpMowOw/drPyQJmEk1aDIjEIqDUOxbDrOJ3ZiYbkkafDcHh9frXxCwrCKEeoMqz+3+AN8\n",
+       "6H186EF1Licpv/KDfH7gm4xa8K1i5K3h23EQ4+8qQfIdxbnE92fx4/jf3un1/H5xPjr46vgghBAW\n",
+       "pP/ee3R687pnMDGcXzWZG0vXMtPe0LRW15omxEQtlOqH3LrMkyW7XXobXC4XrN3kJD3UfGLNyutd\n",
+       "te2+etlTv3Rq0r7vXnpirRulmyfm5rv2i4F6qIYxl8a37DefMAodSdyRuKMRpuYdmOqYWDEKI2MD\n",
+       "mRG6gjMh7osuoke5SrFvVEvkcnNqxuo6DoykKoulDubPg7TejBR/aMXIvNyiKFE4Na1C2CKPn5A3\n",
+       "qhv3q7We0d/6OT7yTDVknt6n+wLJe9j4AW7XKhsEX+LaJ/lT+D/e6nXrs7VD4+nfcHy3qgp/V4gx\n",
+       "Pggh/PVDPpQYLQ3F9bHv/SRPHNGvBz/93jXbg2X+8ohiPoRP9/ilKjtldpw5ebIQ6yPFfFeYlGKj\n",
+       "MGsuSoqbJs1o2o/6s6A4HEj6O+JZ3zMf5yzlV065+u7g8UFHmvTtpxPjtNR12yyQnvex+rHwWpiq\n",
+       "mxetGZua19I2cWqgG5Jz1UzUNjTWsmzq1KJojaJk/3HSIe0ms4LmWmXrvfNpVj5v1loUi6EYnhaK\n",
+       "BfVyT9EYG9jVSGtaojxUOh6edKwrT0YOsx2Nsqd71jaYW3OvCBbMdELdSZizE4L6EE7U0iPtcQgh\n",
+       "WeEn3sfTzxq/UDMu3mD9s/xYCOFvxhhPshD+7k/xp69USXIeMXnI33/Tpv/tR7nG1YTOrHJHfVPi\n",
+       "u9jn+Aprn2F8n9dP8HIpvDAxeCbTW2BpmBsUpSRUPODLPRq32LrB1gdZbRY292/pNxcdLKfm81Ox\n",
+       "MbE+Kw3znn69rT4eWa+npum2wzRKnbliQUfHmSO3lYKJkUVVebqvIqtOcF9VjHTUbYk21OK8hkP9\n",
+       "MG9WFjplXSO5bS/kYlxDUEzvEYaS8VX93T23iiOzBbIF7rbobnHlszz+SqUIun0ZddbexwcffK2o\n",
+       "axR8cIt73x9C+Mz/b8Y0IYT/Ge/HF74+wTeE8N+rxhvw38UYvxlt1H83+CF87J1exO8B/xR/2R/i\n",
+       "YuQ3wfqystVVP33zQF3rbGCwWposbxj969c8uJHrjPpiWmrfobnCG484Cxw/k7jSrJn1Clk2opOZ\n",
+       "a48N3jcRp31pvW+pXxI47vH0NtlGU9mgeWkmNtuaeUeaHlsfftGDViYJhdWyZkNdluyZhcKpXf1I\n",
+       "M7YkycjUTG5ZEIXyFqEmK09N02P14lCSLmpKz42tRkqXjPRUBklHKkeEkSSuacVbeknTikVTJ3rl\n",
+       "yCyZ6c1YOeDBJvdz9loc/p95jC+EEL70gOebzB3y+gY/+kGGbxYi8B62XudGCGHlrT5YSl57lf0l\n",
+       "Lj7LTiC+ysaXq3CQ3zVijHv42RDCxzj8Qf7Vh/nSZXbmbzho/DHx5TWGI7LP8b1foBVC+OkmrZrm\n",
+       "tZEwV5rVBrJaYjZpOHUkr+Wy6SMPm+vauy1rr2+Z1u5Z22H9hC92WD5kLomuHUc7WbDafs3dpFCm\n",
+       "dU/GmWkciGHRWPTAsrELEj2JVEMpONDQ0IlDeXliOVnU9dCsXHISGxJHijCgeIz+gLXzJ23zlGfH\n",
+       "QnYkac0L6a4QMrHWlRQvCRoYivkt0yRRCx2nBurl0Cxc1HFWGZ8libNkZpI1ZK0oT54yGU8chZ5m\n",
+       "SKnNi7NHyrVbTg7uaJ8WBq8zfOw67/ruysEcfDsPx1w94Hn8Wl51SP7qw0pKAo/e7Er9/nHyFd74\n",
+       "bm6Erz1IZwlv1BjfJr9RHbt6yK8W9GvVKOreJrt/lHyB9j3m/iIHt6O1L8/svXtmWqMfGc5oHFWd\n",
+       "hF6gt8b1QL7Heiw1R0denCdPEjtF0+3QkMZoflRqJ/dl+YZaPjCo78uTRY1zPVTbnGAkcWrNffc8\n",
+       "pSK5vaZyXK2jLvW0zK0qRjN0DOOOvJio95ZlcSa2T6yWhX79TD+ta+ZD2XRF55V1p1tPOX3jl+0U\n",
+       "Pc2U927wHS/ROTdwe3GT4xeRMFdWtvZfj8UJtXXVtOgtXqe3D9+wYiSE8O3oxBi/N4Tw10MIH4gx\n",
+       "vhmq9HdijH/lnFTz0745M11+N/jj+B/f6UX8HvAL+Lsh2IzxG21Y9QeGpGGyNdXbbFo5gIlYe117\n",
+       "kW63pXh35tFox9nVExf2ybaqbsj1l6mFlul7VhWjttatodPLAwePz1sd1SRZphGa0mQqLSaKza76\n",
+       "u5e8cTwy6y5K03krowf26l2H9abGdIgd7btNnY01l/Oo1hzJ41RWJFbKM+O0o5UemUiNzavZEcNU\n",
+       "LSl1ponuzraTuX3zLRbTLXlkWF8wDs9IjM8lwyeqgqRFXFLGoamGqYGmM4sGYpiKZVQeVjkdX9Dy\n",
+       "2l7T+HPHDM6Ja9vOTctC5a39pxf9+pDHc4fIqIrQeEvFyJvcjY/zA1/gfaq0speO+EX81d/H78vx\n",
+       "sRDCx9G5wE98L4/eHBW0yL+L+/d5/xmLV/lTPByf6NZeJ2wosmCStuXbW8zeY/rxT/Hysf5SKf3A\n",
+       "yNUtWod8tMmDv8v8uyjGZPUzncGqWTGz0hmbqw8UoZDkhSxONWrBONz30JlSkEkcG0uMNZR27OvE\n",
+       "oevj3DTL1OOuIk/VQleSXZblW9JmbpSUitoSkWy+FOtL2qEvNbNa3BRD5iReMhlPTWXyWGp0BrrJ\n",
+       "lrmkNIvX9S1Ii7qNWU2RBdNk3qz4gklzQWFH2c61pxPprNAZJTrTI2eNY5u3K0uZ1z/XZPPy1xHG\n",
+       "qzAEtQ16S5UH+q+dX48Zb6vD811e/1XKcwLruMPtJnd/Dp/l/vdXkferQ65+lk9/B4MnWBszzire\n",
+       "yNwV6n+c/kPGN3llxsqYUYJBTb07Z1LOnD02tNgtDCaJYjF1eymnFq3KHMZFY8smZU0zLeXF0Dg+\n",
+       "shxed7UsdXTcNJGZKbSkOgqJwpHSmWpWtaDijSziSBCk2rJ4RWJPVuwpen3T3dLceNvaQqo2Cqbt\n",
+       "3Gg0FWpNyeFl5f11h69cM3ttn2ZL3OoZ/RPe+AnGT7Besp/w4C5HP4eco5JhVpGO38T2HOO9t14w\n",
+       "vr34RnZGvkPlb0F1w/lOfA5ijHfPj0/9JnLNPwwIwSLe65vTAv43RYwmIfjn+JP46+/0et4m7J3x\n",
+       "erBTH5uuHMva2xYvZy4N9rVe7mkclrYWpl79GC//YxWR/Zij5+aV/1HX0sGC1q1MbTIxuPCYwp69\n",
+       "hWg5LciDZNB0tNF1L33KfL+jGQvbyVBsbVHPvHd4pl/bN01OjRq5s2Ri42ii2cmIqVJTYlN9cCif\n",
+       "WzdWs1GOLXroLI61kprh9HGLt1e1vpSI1/atXOJoI3cjb1nJBh6mrzi0KlgRPIOomg3ex5oYn9Yq\n",
+       "Smde1otTTWOtKRcGNR+9ftX28VPyew0++JD3v8GnQwg//WaLNsYYN0K4fZ/1x6qmC9WJyvaq58/B\n",
+       "23GhYoxn+GchhJ85/76AtxKYFmMcY3w1hAsXf0PAV0pM2bzBD71HNjenMTvS7z9ULuC0oLzHyfP8\n",
+       "/Da1Ab8SY/5KCOEKR+9T3Z9+NcbYCyEMOHyNWx8Zeaa356AMYqNUTwuTaeGZT6/YfiI1XTxSLjQM\n",
+       "nKonTM2bmNO2rhtHSkNH5ci9/5e9Nw+y6zzP/H7vWe6+9b6hF+wgQYAkQFIkJUqiFkqivMkaxXbZ\n",
+       "HieWx7NUMqlJTSqVeCrl5I+ZZOIlY3ts2bFleyxbXiRZ1kJZIsV9AwmCAAgCaHQD6L373tt3387+\n",
+       "5Y9zQTRBUCRINAhy+FQ1gNs499zvft9Z3vO+z/s8tok0chTyCWLNIqlJk75YkYaXx2m5OL05ymoU\n",
+       "mwF8PUpUCihVQ/wIA16KjtkmIsJZT3DiO5FAo9OZoZPUSaswkM6ogEAM2hEfnyhOK4VLFAwDGk1i\n",
+       "iSSmM4YEOp6Wp5J2SbZhdTuUfg+YdSDZDBtBmA4dFfcJpBoQ64QdGcZmmCUqpZSIfBdeXIH5X4Z0\n",
+       "T0gT6z0QVhaX/hoe+AXY3QdBFBZqYDdChdXOHfDRJqRy4FrwwiSs2nD7X8JTe6Cd7Cc1NYHM1anH\n",
+       "KjhjOnGnjZc0GQl8LIljSZSkZJhmDBVY6NowflSjZS7gSgdXisRjPZRkHAuXChESWNiUcMiik6bE\n",
+       "VsKU0jxhSTUDjJB2NeKtaVpxG7QAt7aCOeOjuduwshb5IMDw4zh5aFlVgvy9tB7IgC2EJ+SZ0Bj0\n",
+       "O10tnj+D9THCWKcGLF04t0VSD8Pj98Pta9DXgeUMPNsL+T+/2mv2drGZwUiOkDAH4QTtvcw2vw58\n",
+       "aRPHsJn4NPCYUrTfcMvrC18D/kfeI8HIBfGjOG58G/lSnsxtAVPFGuk1jxsf9Uh1YCIBqzdCawiY\n",
+       "hsRtkPlwkur2fprrTUqZBH0dk/ZQH9FWh1K0RhPFOB5WwmRe30pQj2IEHsqIIVqGwF/GNGNkXJOG\n",
+       "1ktNHyIILCLZFRoJi90B9CqDlmdSijSp5aJoJBgMAiJKCCQgJwl2Bg6ndZsg5xFEW2h1GJyFlc/B\n",
+       "rAm+9OEHHTwZRWQYXdXxpEZ4YcuCpuMGa4iqE3UzDFsZXP00tuNxLjdERY2Q9upEP6ERWH005y3a\n",
+       "PTU4zoYn2Tw89Az8agAyDpUSJI7AYAEeUEp1rvaaXc39AbiwtgbpUS5aMlih8tXeKaLpJKmOgeEM\n",
+       "kpyPUBufxY5nYXYADpfBOB2q+M6kRO7ZCfdNhPvkPBwUkYeAKqz+QWisuLbfJqtBoQXKhrufAtc0\n",
+       "EbuNE0/TYJyOxGmxjoNPBg0Tl14xcYNhGnodL27isoWIVAkGk0xaHbxInETVxKykaE5UMDSfVS+D\n",
+       "MpqIWsVXJaJK0TI9TNXCMsroqXtAFlH+MiORKAdtF990Eb1GQxvE9H0KkqbjZHHN5TCwaMVgsUhw\n",
+       "015oCKI7+I00WjCB3zxJs6BU/bsAIjJzBpwoTLXhltugmYDaDOhJGDkBKLvQvgAAIABJREFU9xNm\n",
+       "tzcDJox+HD6xDNuOh78qx+HBn4dTvw/nfgtWfwFG74QtbbAcqN4OdzphY70Tg7gON+rQuAFeGIDk\n",
+       "ssbayggqkWRYz2K0M7iLc9T2xZkMPPxYlKYxSDLoUNL6CGQbIjU8qeDLMEnZgsE6AREaMoalejFU\n",
+       "lSVlgT6AgY9PC4cYLtuAOBoG4s3i6wkQHUcsfL0GTpSeRRdVqNOyYGzbLOmBJIGfxG97JIxltEyL\n",
+       "0jcewB/cFcr7B+fBPAuHg9BjD+jaFsFsNzjfgNaTcKQNC/dCZACsFSh8Syn/zCat2VvGZgYjF66X\n",
+       "EGapXtXmJSKfA3qUUn/9ejsQkV/f8PJRpdSjV3mMbwc/weadhJuJ7wNfFmFcqYvumu9mKKXOich/\n",
+       "WoObbWJjPrvOKIZWIObA0Uko3wx7MrD7CzA3AZkKfOhYwPea/fjSZnq4Str0acfnaGXj6AxidxKU\n",
+       "3Da+4WLrPWSwcQ2XckLQdQOlDHzlUNAy5PUdRDolTMOFuNDUl1mKWiQ8j5Rfpe07zETG6KgOSjlY\n",
+       "rKOpDoMqg4ciSplSZpXOrSVip0HqkDxkMBDozH4wTWBqGIZLRGp06OCpHmAI7DaYFjgGAW2CToyy\n",
+       "7xJRBr1NUMkcEvUZTsdIrfq4UYvKaJKVdJLGB9gQjCilFkTkD8vw0Shs86FchL/yfkRHhC6ydwju\n",
+       "NWDQgcV8SBY9ey3WXES0CNzaBx/UINWE+hMw/slQ8a5jg/4UbFdomkl/vk27LwIOCDmyS4rqyAxO\n",
+       "x4JzVXjSDR2fJ3bB/Z+ChRj4HYjm4IMx+OQQPF2GYJ71h1rUT5nIwQi23cbepyinoa/WpNmXoqb3\n",
+       "se7vISYayBSBPk2bFg4RdN/ECEz6/SjrZgstvkgm4eGkXYi00YvrJGci1O9JYmoaORWn6pVQmqB0\n",
+       "F03ZDJkmPYGFplxGqNFvHOJcYFDHZcRPkbQUHdWmHq0SkRWq9NPwxml7DrqxCJIiW69iD6UIrCaJ\n",
+       "uoOXdInlk2QXB2jlMpTcC/OslGqJyH+x4TfvhkgNEisQ5OC53bBchNtF5DGl1BVxfy6uY+xOyIxB\n",
+       "swCd45fsZzvsyMK2Dd1/vR3Yb8PabVA5BXvH4VNPh/SaB/vA3wGpCLg6aBEwXDA0GA5AVxrTe016\n",
+       "Dgi4Po2KQo9GaGlC04L+pCBGFF8c1iXCKr0YgKMG0VjFkwaWeJjKwZc460EEXTQynk/BnCSKRgyP\n",
+       "sKfGxeVlIAZBCnnFGM/EUXFiegqjtETLz+P2wmgTdsc77Fy0aEYqLCZ8qm04WICHt0xT/LV52KND\n",
+       "tAVnCZOiukj2M7Dtru4tNhDJPA6NRy+07XYzJC8AL4iIvhkPAlcLmxmMPAP8c+DvgI8Df3rhP0Rk\n",
+       "P/CvgM/+qB0opX59E8f3liGCSZgZeddpdihFR4S/Bf4p7y6+y49E1yflEZGRPaCnwkBkJQ3VW+Cu\n",
+       "WpgszZZgaCfkB8E26wwvtMhvGSBWqbK4awz8gESQQ/NrOJQpm5BWVaL+ICQMOvYk9dUq5tA6Qdwi\n",
+       "plLUmv1o6TJG1EbTdGytF1OzKKlVzokCw6cidTSngyERAqXolQjjrkJpa8ybAY7WYlwsegwXScOL\n",
+       "t8COYy771xT1XcvEhjNYbkBL66DrDh6j+E4E7Ar4W2Clg5ZrEdhVMh2XtG2R8nS8KHg6pHQfFdWJ\n",
+       "2B6JhE/a9HB2XGYOF4G/eDPzHRO5/Vb4/G1QHITlZeh5Dr6oi/y5r9T01V3d1yIDn74JPnwzFNJQ\n",
+       "nYP+p8D8G+jtA+lAsA6PxzBvMkg1q7gxhZNMoLk2gbaOrtbgzx2lvnxhnwMiB28MzVd8gAW4eRwS\n",
+       "EWgpcHsRrcDwv4Rhs5/UskZVb3K2UueZbTaZqsdKsodGsIuYiqArH6ULQhbRTDzl0rJN4lqbpvJJ\n",
+       "dpKk2yv0LoRmgH064IIz4THhJGhGdZoSoS+iUdVT2Jwgppv0+gGG5jOOju0liTsdIlGPWfFoqTpB\n",
+       "TOjDIIZGX2OVs7pFXhM0IuATEmrtEobbTzUSug6nG1ESxQDf9CnHTOxXGaYppebHRU4Nh3wcbQQq\n",
+       "kdDpnn4IzkAPV0hEvoh7PwuD7fCB4cS9IvJlpVS33CbJMMF+KbItiPVD/22wzQPfANOBsRNw7I7Q\n",
+       "OycjoAO1KCRcqBspIr0G0XiUyRbkzSRmwkHsOP1zgywES8xGA6IGtFQPhvRjKIUvDp6ApmxMFTCo\n",
+       "WgSBhy6CrVxsimhajAQx+gOFEpcAiCqFyBotNYHpTuPrY0RtH709SLtp0NGaaKZPkKmTrcPuGKRS\n",
+       "4OqKVMPn9jU4nIBYAEamSyB/lX+MSObjcPAeuHMx7JKxdHjyk/CiKyIvEq5L/UKAdz0HIrCJwYhS\n",
+       "6kURsUTkceBFpdRhEfkdpdS/Bv4jMAh8X0RqSqmf2qxxbBLuAWaVYuWdHshbxJ8CXxHh3yv17uTs\n",
+       "vD4KD8GhXwbDh6URGCNs4ojMhJLPvUF44i6Ndtg38yKr+6KoPTvIpNoEotCJoKkkeIvUzVV0T2EG\n",
+       "FYqxbTitDtGEh+5Xcf0KtuvQMNIo08TDwA8UzUBjTOsjRomEyuApDQyDut7Gtso0/CgTcUVgeKyL\n",
+       "ohEkiJd1MusdlA790zBhwuJ+2LLLIxeBqWaNsjnPtDlJYGuY0Q6+4YLtgFODhIYrHhJvoNwlsKE5\n",
+       "4OPHS3T0BI5tYEQAO0CjgGg2ibdcXhQRcxzu+ygspbts/AmoRsArw6dF5MxmtgyKSM9u+ODH4PwF\n",
+       "i4AbYS0A+QEcPQmPAR2llB2V6P5F6vdtZeBcnU6mSju3jBtfJnXEp/PAxv0akIp3v08rTH2P9EO5\n",
+       "Gqr8xk8wcLtixyBEzQZmkCJRjOJHB5mXBn4PBG6GwPRpGS2iARiBYOLRIUrEh1QQIWovoOmK3R3F\n",
+       "Ga2XelzHbbd4alebrRkYMcAxalRUP3XPJaWqOPoKHk00pdHEo0dptMTEdIU4PgmxyBk+Hc/EwyMi\n",
+       "Pp4fJ9qwMdMGKp9kdL6O1buMN7ZIKqpj2ooWo6z5DokgihER7FYHe7Zykev3CnxY8yC2sQwWAJXw\n",
+       "jv823Mrv7GZntwFDWXjgcyLye+Hxo9ZhRUIFhY1YzUK1CrnPgZWD8zYYS7DrJZj7IRz/8ZBbm7VA\n",
+       "i8O8rtGQASa0MmU/xUC7QTuZoho3MWM+yhT0aEBe6yeJhhVEiOk6MVH4dAjkPIo8WVUBcQg0kz2+\n",
+       "T6Ct86IyqGnDpDFQyiVQgrhp9HaFdKqNqz1Hr5OhrXaRWIkQ+A7tdhUjV8dIu6CHBYTt87A2Br4O\n",
+       "9QFCHRofluIQvCa4F5EYTN0NH+gGIhDK2t+2Ame+CMNL0B9AVRPpfREq31FdVeLrFZva2ruxnbf7\n",
+       "+l93//70Zn7uNcC7tURzAc8R1hg/BDzxDo/lqkIpf0ZE/zNY/wxoo7AnCuMvwtQsFIbCTn8zgMCA\n",
+       "0/scDnoRXo6A4XgYArVIg5aZI+b1IK0qIwsaxb4ijlUlmU6RxsKsOeTmhNJEBT3ej66lSQXQ0nRM\n",
+       "peMFVTp6Gl+2YEsPcT/AIIZtPc+SX6GhHKKmRpMEPflRdh730VWLSN1j7CjUbwoFnBabEG23SAQJ\n",
+       "RNUYdM7T0pK0Iz4S7EaVcmBUoaeF4hw2yzgG9CYh50C2lOdsT8BSZpiYpRH18jQTBbKnofLC25jm\n",
+       "XC9E05e0BQ6HfIJxQt/1q8ozuQRDY+ECvirgGYdyGnbVlPqHC79zcP6/M9g7i9S2pIl0msTrZTIL\n",
+       "PoVvKaVeZWtfgVOL8GNboOaCaYSGKDINqVUStxXp2ZskjkGvo5O265QnU6RjI2hrfZzrzJKMNkkl\n",
+       "+lBuhKivcDQLRxUJRBFQpRVYKCwSvmI6PUqlM0Zvw8UebxFLLrGUWA9Fy7QAnBoDHY/AtIloPgkr\n",
+       "gqV8PDeKpGPoQQfD9tHEA83BNAQdn5LvUtJ8EuUmxWSaTkWIN22izWmC/joHzuuMmgaYLYzgBaaT\n",
+       "22if8CBdhdw66FnYKyILXcIxAHl45DD84kfAyYLtgbwAW/Jw4urpiYzXoHccVnsJu7fm4fwMPLMD\n",
+       "9q9C1IPZQTgch6m9sGUBIgnYXYHiFpiLwCefhT/PQmEYBsfA0UF3UwxrEVAarZiJ+DDoNxC/RtN0\n",
+       "8TQN3zuIVUri9gcYqkFVFfCZAnsa3VxiQDR2+TWU4dIWYUUXskGDKDq27yJ6BN+Nojsm4rUJoiXQ\n",
+       "6gwWFfcVFCczJc73D1ApW6TiJn3tGDp1iMGgEZaTcwHYSUhpUB2B6Q5wFEqXk49IQlrCAGQjzkzA\n",
+       "1A645wjE3TALfPLH4Fi/iPzx9ZwdeV/07AohghAGI++2bM4rUAolwpeBX+Y9FowAKOVPi8gZ4AYY\n",
+       "/UWYOB82qvYXYcaCUk8oCe9tg2i/R0oEo2aQ8DyMHhs/WsSJlXE7TaqiUVgZIZkPuMXoEGg+vqEj\n",
+       "gY/0BAx4K6x7k6AlyEYcNL1D1a+QZhBLBkn4CsdXuE6DwIoxnoaBehytY9CpWzRTDoG2RjVnIxFY\n",
+       "v19YvTlL3R7mjNvE7y9TtJpMlKMk4m22uj6LVVjN+ej9A4ihg6yj3En8H0Dj7mWGZmCqBMu7AsZj\n",
+       "eXKqyLINvZ2A5BwsvgjtF9/GFLebgBc6bb0SELTAtMMAZbNbBjsXOjw2ohFemV/FTVOhnPn/UoD7\n",
+       "C/h7gRrkH4P205e+34ZjR+EOHcYnoboO+gtEbyoyjk1/DIaUhZ1QrKko6SBCQneImQFoNqKlmCqv\n",
+       "4iYCKvERHKXQnQKa5qJVFokbMGGaDPk+7SiUomt4fht7Vwo9nWOyNUo9ahHxbPboARXNI2+4BH6S\n",
+       "ljZEy/WxKvPkYz4DukOv38HRAywvIFbXUSaoiE+86RNfB6/pMNiu0/YszqdXaCY77KrA3Us+AT7z\n",
+       "YzBh58kGFsdu3ELQHMKa/RTeY+dgz1MwJCJ/0G3ZxVfqZEzk60X4VC9Em0AJjlRDm4mrCAVdrZtu\n",
+       "R81X4al74OW7Qg5IdRn8FNyowZ55eHEoNIkbaUFzEh5sQvOPofE0VH8Vpm6Csa0+9S0Wq4lRrHKb\n",
+       "fCJJVO+np9ogG3MotvbjOlPQdPBjffhOAJGTkFoGq8aI5bOdBlFN4Sdhp4K2Fjr+9otPf2WdZipK\n",
+       "x5zEQ0eCKkryBJ5LrKhRDRxSahndj6Bi/fRaAYa/SCdWZvs6HJiHB++B0XWI29BOhCZ+TgDOGAzf\n",
+       "J5KMQ/uJDcFEHWoONCOQ6p5vnsD6jTC2DsqAM/dAOhHKwVR+AfKRbhnsqlo9XC28H4xcOfYS3tne\n",
+       "htTxdYE/B86I8G+VensaEtcjuhey0zB7FH54C+yqhPevhSWYbUFPGvpTsN5rsb2wzkxslERgkCkF\n",
+       "rCctRApUzrUp/q86tMr0/JZBW3dIZKAfh2LCQe+FiWqD8eYZShJnqR/i8Q5tP0m22Us+oqP5AW7d\n",
+       "oe2uMJyOMOyZaH6Z3JE4btpBHzzH0mDAkAW3tMFK9SDxKCk9Qll24Fcq1DMzrHttts5FmU0EKFUk\n",
+       "3bHwhwRpa8SsJIlWjErfJOZKjeMDTRIWqGLYwerrAcUMrJ2GzkNQOweZO0TMMninlVJXVLJRSrV6\n",
+       "RY4choN3wKJGGJg8D2MVePAaPH0tLkBxFvp3dNuObdCPQV8BvnuZ8a4BX34jY7Bum+SfPAq35eDm\n",
+       "KpRcBm5PEMehHYOy2OxyNZZ1j0rMJK65tLSAprJItGP0pGL0nj/Pwtg5amYE24eBRpvWvE88A/1D\n",
+       "DpJUxMw4O1yNuu9SyNTQIj5lv594I0ot4WN6PlHRyOuj1LQe6vY4RnmU4OxRir3Pc3xXk7rhMroG\n",
+       "yVVoOR7pdVjZCv2PAInQGmZk0eXBKZf2X0DwMZCfgfkesAQaCdi5pDOdmyIen2LkZYty3xpL2/YT\n",
+       "TBdgMg/budixgRWW2o8thkSO9tW/qZ3rg9KSUqqyYU1swhbiRyD7UzB1ALK3Q8qGI9tg8nlopOH4\n",
+       "GOTrcOoB4Pvd8/+34MRuWPppB//+FMHoBJpV4GTQINMzRzpTx+pkKDZ34a/GQZ2DXaNQjoE9Dtkj\n",
+       "6HGfvqjge4qWD/ucMHMRJKAHoaed5pyt04l5ZIM5PEPDS7RRbh/eyxpqdZ3MMWjerrNNXqaxN0cQ\n",
+       "sfCjRaae8xlNgB5AfxPOCDgJaMch6sKBRfBbocLwc/eF4nvh8a2UckWSD8MTPw4fWAuJvasZaOSg\n",
+       "72FYuQWGDfAsKKUgKrB9D3gfA759ddft6uD9YOTK8RPAt97tXAulKIrwLeCLhBye9xyUUoGIfA2e\n",
+       "Pg2nPgeRneCWwf8OzMyB+/PQE4Hh5BKa1eJsNktHdwmcMrrTwvsPSqnHRUQr467ppO+Is6MRCj8l\n",
+       "GzCTC420xHbY0uOwuw0lDx5J+5S9Mp4aplN0qMUtsijGDJ+0GRBLgHGwQ2QOjIfhyEG4tQl2IsbZ\n",
+       "qQx+J8qg0aEV6UWtJLGddabH2yz1tkhbilhHkTT7aZWmSNc7qEQnlAQJojTdOFapSexpSHSl4JsG\n",
+       "nEjA6pdg6Jfgw3tgyIGKCafbIvKnl5Ys3ggV+N5zEJ2HfT0hWVTLwzPNa5Bp667rVx6Enz8F4wlg\n",
+       "BVQe/jH0Knrd973hOdu9wT4mInMaE/9NhJtbQs5TOBJQMxXTmke/qrHcbyLozGtR1EtD+NYi9kAF\n",
+       "a9JmZ0TR17LpAKcT8HIE9npw0yHFwgHoiyvaCQ1MjzUvw1CtzarYuLYQCTSa4lJOZ1ljiqDcIdKq\n",
+       "0HajIBNQmaXaWKD1LDQsaDkwuACmCccK8PSHYUoLv80TNVj5i1BxOLkF6gqa62EAY+6DbDPK+ngC\n",
+       "vQUQJVMtE98KrenB0BWw7zLz47LB7PDt49GJsBmrbMJ0C/LfvPx25q2w/za49zzMDsNQD0z6cPgg\n",
+       "3Psg6Ofh6xPACxv0c1zgBHBCRL60BP9zC/0nkyQjLXxpUglSOGcmcactGMuFXVhzs9A7ACyDVAlW\n",
+       "NSItH8MDbwxED+XW24Dl6Zimi2X246kpGlYBJ57Adj3sSJy+uEtgrJMoN9n7A4uTtwhmf4fdczbb\n",
+       "j8PIKpzbC8u3QjMDcR9u0GBFwVYLIqlQ6j3mwT3zsHKniDxxsXzWfgaOurD8MYj2Q7sG/tOhOJ/0\n",
+       "wmICvAHoVzAYgfZ2iP6kiHxvM7Rh3i7eD0auHD8J/Lt3ehBXCb8LfE2E31SK67aW+HYQ6pDkJmFv\n",
+       "DG5+HpIOnN8Dh3ZD4XHIjkLsYMDucom0W+JEHIbKcGYBeLK7j0Bk6KxH31CDTBuiVmgdcnwYykZI\n",
+       "ko3noZ2EJQXRNZ/l54u0x1dRO2Lk0lYoiJS20W2fPgVGJFSVbE+AvwqJQ2DrOkEqHRpaJVuY8RaN\n",
+       "WATlx1FKGDobwa/GaN8eoJvLtPURooFDxIpjrrtEUgFtzSYXwFBXqMwXODICpW9Dz8fh7l64eUOr\n",
+       "5Fgv/OPnReT3r4R02n1i/RsReZCwY7H6Vts73wqUUiUR+d18OPkxIK+UehtEykvR/9kIo4k42WqE\n",
+       "REeINJvEt5jMRBTresB63SKyGKNeysFSFEsdIz9gsy2mmKiHRmU1DSQNxpbQ/6Q5FVJRelsOIgb1\n",
+       "WBRNaeAqtESBmlXm5vUonajBGZWk6fnoxWGCp3yCxjTc7ENa8PMa5SDgprOQsWA5Dc8lYD9wxzdg\n",
+       "qS8kQk4aUP4k3FqAXWtwZB30LVDrh5QObcOh6fvdYETQlAolikdmiBy0SEVFMiloPLt5a/v4X0Ny\n",
+       "BDrr3Szd63CN+j8ANxXDpHTvDKx9GLa0oDcKMwNQicHKcaXU6wVKroMZrZPRmxiqQ2ZNZyBQuLFV\n",
+       "Tgy26aRrEN0K367B+DzsW4LGMOrwCZb2d/jQ87DqgB6F4v6QezbVAjvpcy7aIXBjxLVJfHsYvb5A\n",
+       "LFUjOtygbUPaBsvwWFuGyl9B9SOQK4aZ2m0vwyM7YSWAD/gwUAyN+wYUNFOQLkBxAIYLYVAxkyM0\n",
+       "/r0QXD8vIocJVeYdMG+AQ78KgznIZWGHC8s94XHSn4OlAUInietOgfv9YOQKIMIEYery0Xd4KFcF\n",
+       "SnFYhFXgx4HXeSJ5d0NE+uGGO+Cj5y96XNy4Bv4oPOjAqd+GlV+Dl26AhA3RdZibg7U/AIZFcneA\n",
+       "VYc+F9JPw8oeMCNhZ07qGMzeAg0dcgrqDdCXYGABNLdOJXICQ8ugmyZRs0JNBWyzY2TbHs2kzfpO\n",
+       "WKuBNQ1aA7bUHAq7AuzhHFreo9SXptKyUXGLXG2AYt8eEmWT5Mkmxq4qRvoUpfQNTJ7QENehaq7j\n",
+       "nKtTWoFvToSEuOUYrB4H6zgMfQZuvKQDbKoM/eOwdoE0eEXotlSX33DDTUD3Yrz0hhu+JZjbkiRL\n",
+       "AbUhRVwUXiyN4zloOsxLls6qUG+uYv/hI+Cl4LYObgvWXLBN8JPQ9GHHacgPwWocxmIhrcY2AjLN\n",
+       "FieiinjTpxWxqHo2QVExrXWoKig+WEPRj1coYOwuo39mG5qfwusU8CtDFMrH+BY2PStQnYWeD8Nt\n",
+       "Tqiy2dedk3N9ULob9p2CbBtueRJO3AbOFNR6QXc8ZH4elUhjxyM4EYXNMfo+tkiuDp8swNrdcPgW\n",
+       "EflDpVT1R07ZW4BS/gnC7MUbQItDtNsN0l8G/ylYuAmKGTg/ANYDUL+srYiIJEfhX42g37+FXN1E\n",
+       "nHWs+CyFoMyYtk5PxqDzzSUYHAlbWAor8N1iqBi+CmsaHPqJUHT21ACkapAtgjMYsGbEyfktlHme\n",
+       "FXMcx28iGTAbNRyziorDN3dAvQPrf6dUcEQkWYfSJ2FYh4YJlQCGnwRtFzgeaAGU9bAtP1cFKw1B\n",
+       "Acoal+lc6p4HFzplTopofwDrfwa3JOBcBIba0NcMH5YmIrD6BeB33sJybSreD0auDJ8H/kEp3Dfc\n",
+       "8t2D3wP+B96jwQgwCGPq1VbkAOMlSO9UqvZtEfkJKO6E7H7wa9CcgYFPwK5JGLOhZcCR7VAowu3f\n",
+       "h1YS7CjENPjBGGTL4UXEdKDWhC3TsPKz8KGKRf9hi9k7DIaSARYaS4ZDM+OjYsISimwxfLh/+hPw\n",
+       "8QdcRk6uUt7vUs5NUF0Zxc0/jbHTImJtQ2/3kGiAaircmE7f4BIrHKI0lsAWi9rZGvwRNF6Ao9sg\n",
+       "+VkYGIedu6D4b8EfA235tVOkvfLH+7iAYMnBvbkPd7nKyriHmUuBC8v6FhZP3of6ewuMb8Dty0r9\n",
+       "RshPSn8WDtRC0zYlsK8Vtpg/m4IgD6ezkFKwkADNUZjrLW44CS/mYP00xAcgmISJFuztq7BW/SEz\n",
+       "X+ghot+E0UqiqxJu3MJv3U5zvoKTniH/LaVUXWT8M9BzSfainIEhBU4kfD3chMzTcKgHSgp2WpCJ\n",
+       "5TmZbjE3mKHdUkRVg/HpJh88CgPt8D2MQe0DhIKJ7xBqx+DcPXCgG2gNFSH9BDw3DoX/+0cFSjHY\n",
+       "vx2m4hgqjm4DDBNvt2gnwG2tECs48BdtqK3CEKHOTKHLO8mC2gIL3wC+A/l/AXv3gJeA80GAajj0\n",
+       "p3TQl1nDxRCfQG8Q15vkVn2smVD3ZLAF5v0iMV0p6+lQB2RuANBg5xdhKAOxWGgGGHVgrg/GnFAB\n",
+       "Xu/AoQkoHN/Ip3k9KBVMi/T+FThfhKk49DfCY8D1YXgBem8VkZ43s69rifeDkSvDF4D/850exFXG\n",
+       "3wG/IcJepXj5nR7MJqATunJeilocvCK8IgZ0uvuDSOpjcHAC7py/uH1PBx65J7xYxKaAFCxGoH0S\n",
+       "Gi3oXQ1JdTcX4bm9kIzD1pchbgunfB1lBfQmBenAQjzBOsOojoObKmP6TYIa/O0eGCzWKMzGKGVr\n",
+       "OJWjYNt4JwL8uzJkulkNUWnMOVgP+rGsBdTDJVoPE6oyBoBA351w5wAcmA0DsUYEvnkrvLAX7tjw\n",
+       "JLqShvUyV8l/5r2D4vdb9NyVYjTXQ2TOoRKJUtcGWKnejfpBCpxUeIWfEBEdKENlBvK7wpKIIpQj\n",
+       "P98LrTmonIboXkgCNQuq8TAFv56Fsy/DthYkBuCuM5B2oNADGbVGsu1xKldDVAPd0uiZj4AUmds6\n",
+       "hFucCXkddbBnYX4LbN+Q3dK9cFnTG56mC5PQF4PIizCxFOWZjw5QiYbW9c7cMr39FwORC5gsQWYv\n",
+       "72gw0nwWDu8HbxzGq9CKwok05L/9RhmbLGzNgaPhdFyciEnEAcghaplazKSCDZVuhuGVzGFS5CM7\n",
+       "4ROThO1h58FapP4d8F1w9sL+J0CL2EzfqdOOKHq8Jba4iqG6hx/xKSRgpQ9+4R9DPkgjAo/8tIje\n",
+       "UEqdJjxfERlehriCZQ/6HOhLwDJwpA86JnRWoP4NqF5B51LlIah8AYwaNAwwyqHy9Ok0TFRgZQR4\n",
+       "3WBERHoIy5+la2Wo934w8iYhwjiwm3evw/BloRSOCF8C/g3wK+/0eDYBCzC/DrP9sKN7w+0YcLwH\n",
+       "8v9w6cYiYkLvZ+NUki5PRT12rMJQE3aU4KVpODoauoimVuGmBdgeg0O3hbpYwxVY7IG5rZBZBl+F\n",
+       "CYftizrnxz3cREAh3UdLTZGtmehGBG9lHJejRPUW/otw7O998JpwZwE+YIBRh4JFPdHBzCQwW4pA\n",
+       "HBrZLLWzTbxl8B6Fkbtg4LNhHXrFhZ5BOPjSxYRH2oGDL8CTB8Lsz1A7rLO/HMDKn22mSNm7EUr5\n",
+       "p0Riv71K+VeiRG8wace20Vo+gPXQMBQAVkMyQH6D4d/vwJP/L5R3QToNjTjMAs0HISmgHYPWLugt\n",
+       "w5YGTMdh7VGIN2BbBhwTst3AYagCq5Owu2GxFFFkZ6MYtkMn49PsqSHpElhAt6Ol+DA8+6sQDMBE\n",
+       "OQx2lmOwfAZKsfApG6A+AUUXdi4YzPbvoRpMkjvuoHp6aOV78QZP8MRtDX7q8YvHTisK/lUv0VwJ\n",
+       "umaFX4LKAcjtDk3xysUc5NIin2jCSaXUZUUo27DugJ/GKbUo9Jv06hEibh07ZrHqKirfvvT4F5Ed\n",
+       "B+DT93WtAQDykPo26vYlFl34oAMDHShFocf2seodRgwY1YSsr2iyZLMsAAAgAElEQVQ0w0AzeSYM\n",
+       "RCA8Bw+UYeWjbOhSgvx34PgXYcu5sMW3vxfEgehTsPUYzCVh5fwVipadgaXTMLMDpioQCCykITIN\n",
+       "nTYXyzqvgoikoe9zsGcXJAMouiKx7yllHb6Cz35LeD8YefP4PGEXzXVlu3yV8J+BGRH+nVJcUVfF\n",
+       "9Y6ukd5X4MGfg5PjYYfJCrD6XXWJbLmIxAbgFwdo39pHseWyziJnb1zhwPOKm1ZAcrA7D/ed0Hhh\n",
+       "spfDt6XxzTZ+qsbjcYuTDQgq0F6FgTthdgJuWVcMrQVIJsJMkOJcbDd9nomrFO5qhsSsIkKO0lgL\n",
+       "V20g4H2vSw41lFKWiDySZ+XfJSindJAIVqdGMDoPWaGvV3HfHGztprAPT8LqTdCehdQGQuCuAjzx\n",
+       "Mjz6ndBevV2E9ptK/f7XCXt5mMKJLDRsWLXCx9cmwBz0PAI3B7C0ReTflOAQcBgW/ido/T8w4EH0\n",
+       "NNxyNNS16dwOH/06NA5DdQqCGIx4MPccJO+EVAuqG26IQihj3jaa5EoLNDPjWJksTjqKZ5RwvCbs\n",
+       "iIQ1voJSallE/ggqH4H4TvAqUPxL8Nbhuz8HY+NgqvDeuvsETFSzPHnnKMmaICh8SUO5l3a5l8pI\n",
+       "g7U0jDZC7YoTvVC4ynoiV45up9MTIvJ0D/yTXfDJrdB2QT8N9yZFvttS6jX6MQ04fQL+2TAM5WhH\n",
+       "67QV6HYJ1TIJHnEuo5WSg08OwtZzsFsP+2VnhqCwC/qWWHkEDu2DZi9YKSgL7MtDwoPzacVSHTwt\n",
+       "VFHdeklZdKAJ5sAl32tJRH4P1j4Go9ug7wXoWYCt+XCL4RTkPwocu4K58kW034Ol/x6CAHQHRlfC\n",
+       "NuLjitBI+FUQEYGBn4V7xsIHLY0wm/PDz3eV0mfe7Oe/FbwfjLx5/BPg37/Tg9gMKMW6CF8l5I78\n",
+       "2js9nqsNpdR6eLLnR4Eo4dPsazQS4nD7rTDVj3usjNoZJVPJ4RkuRw8U6G1AwYR7ivDy8HZOHNhB\n",
+       "shrBaFqIX6PYP02hWlbqb0WkD/KT4Wc9PgD9ymKtrXHSG8DzdBoZncxanPgyCEKASSsClROXjNuj\n",
+       "6wGilHopKvJ/WbQ/3wN3JiErsHwAynNoN59jJrDZ2g0qthVgRYf8FkhtuIAs5SA4p5R1iPDm+T5e\n",
+       "ByLSsw3+u3uhMw6nPDh9BA48BB8x4CUbdtwAhZth0QP9FHz2KOwsEZ+HD74ABzbchJ7LQFqD4jCM\n",
+       "L8BAt5QS7YGXUuAugpaDshfyTVJuWG0LOiF50iifJaugvM1G8ywsPQJH74fnH4KfEpEZpVRTKbUM\n",
+       "/NVlvstvwcoYoIP0wMQXwBUNL2ISb/k4EZ2a0w/FOO1DM/j3wTMTMFQPZWRWHoXguinharDnRrjl\n",
+       "43D+Qu5mBxjfhPtFZPpSRdg++OhuyOcgosNoB3rO4MdW4est+M+XdgqJyOQE3D8C+ihULUjm4YMu\n",
+       "vJAI629LUP0dKHwSRqtQD0ANg6nBLcchWYfDYzDrwvgl5c+VLNgLXIJud9hzMLjvIi/mAvpboI9d\n",
+       "+Uypl2Dp6xDcA+MKziRhvg4rX3md1t5RGJuE/RvGl3bg1gqsfhh4Pxh5p9Et0dwIPPROj2UT8VvA\n",
+       "MyL8B6XCp7/3Erpp2MuQNy+iF27bDYUkvt9gddjC7dVJOH20cgW+uxXyX4fSwSz5bZPEmhGM7glt\n",
+       "GTcRnKnCPhF5sHth+WNo/xjEd8DpJDS/F9D8Sgd+tof2Tou1XTa5HtCVQ9VoUT8DPP+jxmcr9WJS\n",
+       "ZHAbJG+ExWFoTmPsnSRbarM2Nc/KufBptrcD5hxMb4Wh+VChcTkDz2Uh/7dXaUrf00jCrftAH+8a\n",
+       "wBmg7oAXOrD+GJz8KHgf3vB0eQ/MN2BXCSMFuUts3DO1UJqjJ/Hq3+dTUJsHuwBHvghbTsNzN8GY\n",
+       "Bp04rKzB3PNgrSRQv9APCwZWYxfuiZ2EjttT0HMWJuH1+V7dMlKXnyDz8PIWqH+ghbhF1sZztGrb\n",
+       "aD4bAc+Bjo39FLz0N/CSAKvXW+asH27dGfZNv4I4eDuAMzDFhq4wERm6CXZ9GI77cKIGGQX0gv59\n",
+       "KDaVyl+6/yH4zBQstGDSBN+EThTcGdh7PiyvrED9GEw3ofhBCM5CJR0uQ38W8v0wU4Tat+DEEBxc\n",
+       "Cc/BpSw8l4bCV1/nq5UhT8hLM4OLv17MgfOaAOaN0L3mfT8McqZHCEsz8z9CYyQFPZcp1/a3wOy/\n",
+       "0s+/UrwfjLw5/Dzwd+/REg0ASjErwmOEEvHXXdvXtYDqalHHwNmH9USBpeE6kf4obgPUfwFegpNb\n",
+       "DfhQgr4lCDSbZiZCqTEU9saOEGpulLvOo18SkSTgK6UsABH57nH4lZtpHI3SiDQguQBeDf70zQgR\n",
+       "5WDHPpgfCFWXSOJVV2nFs5hAIRsGIxDu9ul5WB0CMwqdPBT+RCl1fjPm7r2GZKiq9Rpl2j5w07Br\n",
+       "/DItluPgHaXpOawlYduGp/PtRXixDSoGIzoYAZwZhJeaYB9XSrVE9D+F0qdD8bKTWfDOQe17oE4q\n",
+       "pewRkf5dkJ4INcg34jLk7NeHCq3lvyUiz7XDXPxPDkLdCj14hk9BfA2+rpR6XfG46wCX5Te9Dukp\n",
+       "09eVl9ch6O3aBSTAMMPOmVdBRCJTMLYNFk6Gi9UzCi0XtAUYmIe/V0pd0Oj4YdcINgZkIH8XaOPQ\n",
+       "OA88ArTg+Xtg9oPhOdheheI3lFKvKZHABV5M9ml47COh4V3OhsUsHMpA/m+ubIpetd8KP4KsugEl\n",
+       "yEs4XRtDveUcdGbf6ue/WbwfjLwBul40/xT4Z+/0WK4BfgP4qgi/rxTXnULfZqMMz0/D/XfDfAS8\n",
+       "LbBUwVk/BFngmFLKFpE/8TF3VqntSCBWBntxCveUFqqQCq/1Rmld8npJRP6gDHclYMqGwjo8rZSa\n",
+       "ezNj9KDehLEBaC9B9lmyu6uYY03MJJx3Qz2JZhROV8D6CizagHmF5Lf/6tGAxXW4YeqSi3gBDBvm\n",
+       "WzB66XtaoPmol+DlNGRGYM8a+BqcHIbSQ7A6C7O3gmjQfAlKP7xwfIQGjzJLKF7ldoOGV1CCQ3Pw\n",
+       "uckNT/0NiJwPyZVzV/r9uoq7ayKmWif538bRB11Up4XzA2hdNyWZy2Edjp6Bmya6whsAHTDOAgou\n",
+       "DbbLxVC441W31zXION3s0iXYUiB326PklMKVdUoygGVGobkELzcvMUjtqry6hMHp1y6zv0dE5DHe\n",
+       "9DlY/wEcrsL5j4A5BNYirH39zV4f3g7CcnbPEXjyNjiwAgkHFnrh+SiUNl1ZeVODERH5beAgcERt\n",
+       "cPAVkV8mVDF9Sin1i5s5hquAg4Q8g9cQo95rUIpnRVgCfgb4y3d6PNcaFhw+Aje0YOs4WC0wTwOL\n",
+       "8FcXLiRKqZqI/GYZ94s3hiSAZgvMJ0MvjzelVKnC1PBb0nUpwLMvwS8nwX6c/ruFfUGS2GwJZxCM\n",
+       "GDz6Kah+GWrf3qBmaYtIBszdEElDawE4fw08ZN616MDx4/DhLAxsh2IAchKGz0DRgn88Cf98MpxM\n",
+       "B6AAyelQyOElWD4FD38Cnt8HKoD6EagehcgYFB+Dzhwwd2kHxyXiVa+CC0ePww0W7J4EqwPGadCW\n",
+       "4Gtv1SNGRL8BbvmEzb2HbNJOSFZ9cQye+Wmu4/M/gNMn4UUXbtkKlgv6GdDn4TtdEb5XoJQq9Yoc\n",
+       "expuOQjLcfBWIP08ZArwqnKJiAzAjl+yuPG8jz3VS2y9QyVV5yV/O5V5C069FeG3bmD5et0rGjAJ\n",
+       "iUlwO8C0Up1nReQQoF972fbqt+DZEszcA3osPFbzP3i9TqWrCdmsjj4ROQD8C6XUr4rI7wNfVkod\n",
+       "7v5fH6HZ0q+/XjDS9bS6ohTkZkCE3wXWleL/eKfHci0gwseAPwJuuNbibtfDmouIAezMwVYbGp2w\n",
+       "ZfA1yqS6yO5huD8FvRa4FXiyAY9fi4tHQuRDaeSXAvbv1hmpNzA6bbY/B5k6PL8FHn1Aqc4zG77T\n",
+       "Ntj6i7DHgKQHSxGYmYHSV6+HjMn1sO6Xg4gMDcBnMrA9AFWDE+XQhK0WFbl5FH5qnJA4tAidJfhr\n",
+       "pdS5De/Xw39F9sPUT8NuBREF50w4dxhq37w0A/IG49GB7TnY7kC7HR6bb9knRmT0X8KPJy6W9iBM\n",
+       "0X9tHE7+J6XUpmrPvJ11797Et2Zhlwd2K5yLy3YCioiZhnt74a4IGE0o5uEBX6nZV2+X/RR84i64\n",
+       "MW8yfXOa+ngCUQ0WcgEnv9+A31ZKXTU+XXit6fkC7LgJJl3o6DAdwLmvKuWffuM9bB7CzpqrHwz9\n",
+       "qDXfzMzIB4AfdP/9EHAXobzuBeZwehM/+6pAhCjws8Cd7/RYrhWU4mERzhNyR/7wnR7PtUb35DvF\n",
+       "jzBcA/CVmhaRM0AccK7lE0xbqSdFsoMw3oTJQqhbcUFhts+C5Cutg6FuyvjPwWfqoQokwE1AameY\n",
+       "juWpazXudxu6Gaw/E5E4EGwM3GyljonImTnYQlgqWeym7De+3w8VPCc+Bz+Rv2j1vhd48HZ45jRw\n",
+       "8grG4wNnuj9XAfogDFxC4NSAXkXIfbpuhfC6QdzZ7s8bbesCPxCRhwnLYJ3L6+rEh6CnBYbvsvdI\n",
+       "mfqZMu1k2IE080OlnKtM7Ddvgf374CNzF4tIO2LwDz8jIv9Rva5Pz+ajOz/XNCuzmRLQOS6SvGrd\n",
+       "1+82fB44ptQbH/DvMfxvwP8uQuqdHsj1DBWife1TqQDtc9BphW2iG6XuCzGob0ypjsOW2MVA5AJu\n",
+       "LEDvHddkqO9yKKU6l8sgdX8/o5Q6d2kgsgFb+f/Ze+/4OLLrzvd7qqpzowMaORIgmHMaTp7hZEnj\n",
+       "0UgrybKsMPKT5ZV3V2v7eT9rrf389Lwv2ZL8tLZ318/yyrYkK1hhlEZhcuYEZg7JIQmCIBIJNBro\n",
+       "RudQffePaooYDjlMAJoA6vv54AOguqvu6b7dt86995zfoUc/54iANeyuSEDD5tmx+HIpDsJw8K3H\n",
+       "TIGoxuUFPM4rlFKlyvf1ItsByQEYmzbmBVLQNAqJDBRnwTGr2worYm+9DYdz0OXAygpaVMymM5IA\n",
+       "ApW/g5wX2MdFg5/PISKfn/Zz5wzbdzn8G+C/VaHdqqIUr2OtZi006fsFROlNODIJbzRbe/2mwOFG\n",
+       "OJyA4vQARAH9AudrZRA7gH320UC7wFinla1y9NVk9Bkr3XukskqddsCLnXDmtauJjZj/ZPbC/oJV\n",
+       "BbiMlWK7txV6T3MZKzBXjuhvr5kFlc/LoqsVNZuD0U7gd7Bqn9wN/MN5j19yr1Ap9fmZN+vyEGEj\n",
+       "VuL4jy/13AXK/wocEuEbSrGn2sbYvJWKKuv/gGfuh11rQASmjkDsF+ct7w7DUAkSLghOm90fb4D4\n",
+       "s3Nt9yLkFPQBG3RwTQsYPhGCaBVrvYBS6qSVUjz5AHg7oJCHiSch/Xw17aoWleD0v4fH3wU1S8Es\n",
+       "w9RemHxydoK9J/ZA74NWuYmzpB3Qr6jowiwmZi2AFUBEvgxsBvYqpf69iPyVUuqzIvIg8B+BpVgZ\n",
+       "NR+8wLlVDWoT4SvAgFL852rZUG1E+E0sRdYb5kIIrdp9Pl8RESeAukhBKxHHWuj5MKwpgS8PQ344\n",
+       "dAZGv3q1mRgzyULvdxHfHbDsfliZA0cJ+v1w5CjEvvkO2ztzaJ8IVsZgcS4zrK7nfhcRF1ac0Kz1\n",
+       "j9VG/SdgdSd0pCDrhCNO6P2RUrnXZqvdavJOfT6rzsi1UM0PqggtwBvACqW46mj1hYAIX8UK+vqY\n",
+       "UpfeWru2tq7fwWm+IyJNULMe3CGrumzp8PWQSQOLo99FpBNCa8HhhugR4Fh1Yo2uHxZDv18KK8Bc\n",
+       "Wwm1K6CYhsQBZcn6L0hsZ+SK2+aLgKEUv3fJJy9wRPBi1TH570rNbvyMPTgtTux+X5zY/b74sJ2R\n",
+       "K2qXCFZBoA1KXVChb9EhQg9WCui/UooXZ68de3BajNj9vjix+33x8U59vugidi+DPwS+bzsi51CK\n",
+       "XuAR4F9EaKuyOTY2NjY2Cwx7ZeQtbdIO7APWK/XOFV4XIyL8J+C9wB1KkbvU86/8+vZMaTFi9/vi\n",
+       "xO73xYe9TXPZbfKPwJBS/MlctjtfqBQN/C6WhsynZjqg1R6cFid2vy9O7H5ffNjbNJeBCFuBB4C/\n",
+       "qLYt1ysV5+MRLKn/f11da2xsbGxsFgq2AiMgggF8BfgPSjFVbXuuZ5QiJcLDwEsiHJzNgFYbGxsb\n",
+       "m8WBvTJi8XtADPhGtQ2ZD0wLaP2OCM1VNsfGxsbGZp6z6GNGRFgPPAXcVLnJ2lwmIvzvWFL/dyvF\n",
+       "NSsV2nvIixO73xcndr8vPuyYkYsggg/4DvAHtiNyVfxnIA3839U2xMbGxsZm/rJoV0ZE0LEyQ+JK\n",
+       "8Vuz1c5CpyIStxvLofvBtV1rfs2UKjU9lkPdRtA0GD8I5TcXu8z3lTLf+n06ItINtZvA4YHYISgd\n",
+       "uliNIJu3Mp/7fa5561gjOsQOzMexxk7tfdu10YC/BlYBDyiFPXhcAyJsA34GPKwUL139debP4GQN\n",
+       "DoEHYeXNsGLKKgXeG4TDh2DiW3NZcGy+M5/6fToi/rtg+T2wKg2uIvQH4Y0BiP7j9VL353pmvvb7\n",
+       "XLOQxpp36vNFl00jggMrc2Y58G7bEbl2lOJ1ET4KPCrCQ0rxSrVtmgPaoOsmuL/fGhwAlkxAeQ28\n",
+       "vBw4UkXbbGYZEYnAyrvggUFwVW4GnXHQOuH59cDrVTXQZiHRCl03vn2sUavhpQUz1sxqzIiI/H8i\n",
+       "8ryIfPm84y0i8rSIvCQid8+mDW9tlx7gRaAWuFcp4nPV9kJHKX4JfBL4iQgfrQikLWA8XdBdOjc4\n",
+       "nKU7CXVrqmOTzRzSDl2cc0TOsnQC6jZUxSKbBYqnG7rNhT7WzJozIiKbAZ9S6nbAKSJbpz38R8Af\n",
+       "A/fB7KuditAuwl9iVZ/9Z+C9SpGe7XYXG0rxGHAP8DngRyKsrbJJs4hZAvMCDldJB9Neol/4mFC8\n",
+       "QP8XDChfc2aZjc05ykW4UGhI0QBzwXzWZnNlZDvweOXvJ4Gbpj22Vim1UymVBpIiUnOxi4hQI0JI\n",
+       "BOflzrZF8IqwWYTfE+FxrHozOrBGKf5qpmXMbc6hFPuBzVhVfp8Q4QURPifCu0VYLUKnCI2VnyYR\n",
+       "mqps8lVSOAZHBdKOc8eKGrzpgcmD1bPLZo7ogxNFmHSfO2QKHA1BdFf1zLJZeOSPwbELjDVHvTB5\n",
+       "oHp2zSyzGTMSAvoqfyeA6ctJ+rS/E5XnJi9ynT8FPg14AU2ENFY6aaryOw1kATdQA0QqPyewtmS+\n",
+       "grUSkr32l2RzOShFHvhzEb4M3AXcjyUs1wl4sPoKQAEFoL0adl4LSqlxEfcP4UfvhWVYfn2fQP9T\n",
+       "Sqn+KptnM8sopdIizu/ATz8MyzRwAf0a9L8C5QWxh29zfaCUiom4f2SNNT1Yt88+gVNPKaVOVtu+\n",
+       "mWLWsmlE5HeBqFLquyLyfqBVKfXXlceeUUrtqPz9I+A3lVKp8863Vy9sbGxsbGwWENXIptkJ/A6W\n",
+       "lsfdwD9Me+yAiNwIHAQC5zsiZ5kvaV8iorfCH74P8nWQOXv8KDQ8AX1Rpb5ZTdvC8IEe2NAFuSJo\n",
+       "b4LRDz/JKPVqtey6EHaq3+JkPva7iIS64P/aCp11gBPMJDiOQHkPfCmn1DPVtvF6Zz72+4XwiNx8\n",
+       "B7z7Jhg4eywP+g+g9Sh8SSk1ea1tXGwc74Mf5ZSaN5lb77TIMGvOiFJqr4jkROR5YK9SapeI/JVS\n",
+       "6rNYlXG/hrVk/6ezZcMcUt8Avjp4y4euB6IvwyoRMaolTqPBqtWw8W44eTZAaBkYP4QHReTYTHxR\n",
+       "bGwWIUuaYPVSGHVjlUIIAwVo7IWHANsZWSTUwoZumJh+zAXmEuCotQV9zWOsBqvPH8d7wPFDeEhE\n",
+       "jiul5n1m6KzqjCilfu+8/z9b+T2MtVqyUCgVeXtwbQH0shUGXa6CTQDUwaZlEJ8eqeyF0lKQY7CE\n",
+       "Gfii2FwbZwOz7cDqeUWDAcZZR+QsPsgY0FIto2zmnjLkC+A//3hFwGpGJqH1sHk5TE4fx31Q7AHt\n",
+       "uDWO75uJdqrJoq5NM4PExmC4F+qmHzwIzQl4XSlVNWcEQLjoTW7eL5HOZyoZRY9iDVj9Inys2jbZ\n",
+       "XDaDUSglwXn2QBkYAG8ehqpol80cMwavHYZac9p4Og7ePsgDMxZgepHBWl38ofnFolNgnQ2UUkpE\n",
+       "vvc0PDIAHWHgNHAS+qeqvFw7DvuOw+r2aasjWWuvEaC/WnYtdkQIA88D3wN+E1gL/LMITUrxhaoa\n",
+       "Z3M59E3A3pdgZRsoF6hRkDMQz8JPq22czdxRhsOH4ZUU3LAEVA7kOBQG4Z+VUjOSxRmFvcfhw23n\n",
+       "jeMnLGekfybaqDaLsjbNbCEiTmCpgF9BDOiv+qqIiBGGDy2Hdd2QLoDxJjhOwE+zSu2spm3nMx/7\n",
+       "/GqobMv8EOhTit+fdrwVeBn4rFL8qFr2zTXztd91kZWd8IkGCOnWTag4BIfH4GszdRNayMzXfr8Y\n",
+       "ItIi0KKsrbveio7WTF3bqIVfXw5ruiAzbRz/SVapeVN+47oslCcia4C/A0zgkFLqM+c9vqA+qNVE\n",
+       "RHSgOwwripBLwWGl1Ei17TqfxdLnIrwf+D+BjefXRhLhRuDHwBalGKyGfXPNfO53Eanzwho3BOPQ\n",
+       "V4ajSqkFo4o5m8znfq8GlXF8aRiWX8/j+DtxvTojv8owEZGvAn+tlNo77XH7g7rIWAx9LoILOAZ8\n",
+       "XCmeu8hz/hTYBjy0GIJaF0O/27wdu98XH+/U51ULYD0v1dUDdtE6m0XBI8DhizkiFf5foBv4wJxY\n",
+       "ZGNjY1NlqppNIyIPichBILeQZG1tbC6ECA6sIoJ/9k7Pq2zd/DbwZZG3pwza2NjYLDSq6owopX6s\n",
+       "lFqHVSzv3mracjmISEBEakXEXlq0uRo+BvQqxSUDh5XiZeBprArXNtcZIuISkYiIuKpti82VYY/j\n",
+       "1ydVS+0VEadS6mzw3hTT8vWnPefz0/59Vin17ByY9jZEJFQH710OPU5gAiZE5EdKqb5LnmxjA4hg\n",
+       "AP8J+K0rOO1zwD4RvqIUp2bHMpsrQUQ0P9zRCbcHQZ+CckDk+SQ8p5Qyq22fzcURkXAdvHcFLDWA\n",
+       "CYiJyA/twpbXB9UMYH0I+AMswZaTwG9NT4O9XoKbRMRogn9zOwRWwqgGnIaapyF4HP6rUmq02jYu\n",
+       "FK6XPp8NKoJmn1KKO67wvM8DK5Xiw7Ni2HXAfOp3v8jtG+Fdt8KgB0pZMF6Gtr3wRNKuR3NFzGW/\n",
+       "i4ijGf7t7eBbAWMaMAyBZ8DfC3+jlBqfCzsWO+/U51VbGVFK/RgrhfF6p3sp1K+eVgSpGZIbwB+F\n",
+       "rcBjVbTtHRGRlgjc5oHOAoyNwQtKqRPVtmuxIYIO/Anwu1dx+heA4yJsUoq9l3y2zawhIo4OuP1m\n",
+       "GPJUZL49ULoJhgbgdhF5adpq7+Ve0+OBbbWwBSjH4PUc7FZK5WflRSxeepZBZNW0cbwVptaDf8x6\n",
+       "739ZRdsuiojoTtgYgRs18EzBgSS8opSaqrZtM42twHoJBAKRCxyPQMoDTXNu0GUiIp3L4VNbIdcM\n",
+       "iRg07YFPuUS+nVdqf7XtW2R8EEsE7+krPVEp0iL8OVZByffNtGE2V4THB07fefVovFDyW/F3XuCy\n",
+       "nRERcTTAxzdDxzIYUyBvwnv2wQoR+Zq97TNzOCAYuUCNsFpIe6G5GjZdDkF473q4YQ2MuSHfB7fu\n",
+       "gnUi8v9frNr9fMV2Ri6BgskL7cNEoSYFB+fcIKASeCVYBblqgInzt4ua4IFbIdlRSZmugYkQpCet\n",
+       "ar2HgDZo2AHOViiOwuhzSqljc/5iFjgiaFirIv/hGjRD/g74jyJsVGr+F8SaL4iIF/w3Q3CLdcS7\n",
+       "K0GmmABX0Ko7AkASnEnLQbkixU0NVqyCzhunyXnfCqfSsGwcbhKRGJCohrCViOgLyRkqwsQo6Ocf\n",
+       "j4I/CbtF9FXQeCfoEcj3Q/Q5pdRliw6KiIZVGWTG4h5EpHkdbNkBfWczTTbDkAntk7AZq5zE2ec2\n",
+       "ABEgBQzNpB1zhe2MXJr+PhjaA63r4LQO5VNQuw/KSdh9uRepqOd16RAxIQGcuFKlRhHxQ3AHtN0M\n",
+       "ah04s9ByEhJFkdoDMPkDpVRRRFzd0NbBWxU8Q5CPgGsQ2QKr3gs3TEFTDKIh2PVJEdd3lMrbN7uZ\n",
+       "5dewZsu/uNoLKEVWhC8Af4y1ymIzy1ilHRo+CduaYOUolAXevPM0Ox0vEK+5FQZDkI+Daye0jMOP\n",
+       "r/T7HIaetvMcmDw4HBgrnDR0FVjdC1FNpP44jH/naiXmLaeKHgGXsspmDV/sZiXi2gB1d8GSiEhL\n",
+       "FMafUKpw+Gravc440QsjDdC6FkY0UP0Q2Q+lNC4N1n4cNscgEoPhdnjt0yLy90qpdwwcF5EIRO6B\n",
+       "jrWgSiLB12Dq2RkqB9DUAWp6ymsMPDnwuuB+ETkCxCH8MKzZAE0KJgUGh0Tkm/NtK8d2Ri6BUsoU\n",
+       "ka8/D/cdhI06aEkYGIWfKaVil3MNEfHVw8eWQnuTtS+s9VoZOf+glJq4zGs4oPETsL0B3I2WE5x1\n",
+       "wfEuuO85OLAeXokBTwKlIhQyYHinlbAuAxnQoOFOuH0cmirLfO0J8Och9i4RObiQZkTVpFKD5nPA\n",
+       "/zMDSqp/D/yJCO2LRSa+umgrYHUzbBs4d2z7oCLZuZcX95yBbi80ZCATgx9m4bUrbSEPyQw4ph87\n",
+       "gWNNmkidyfpX4MZKP7++FF6+H6ue0RUhIl1L4KPLwOUBNQBaP+wVkUfP/56LeLfD+ofhhjFoGIAz\n",
+       "fnjlYyKubyuVn9dbu5Vx/GvPwX37Yb0O2hT0j8GT0PQI3DUEvoozuTQGehni9wFfudg1RSQA7b8N\n",
+       "Nzth2RAUdTh4M7zeJiJfnYFxNDfdUz0A7eOwKQze9RCPwr8/gR4rsKkObj11TqnjjSZ49v3AP15j\n",
+       "+3OK7YxcBpWCR4+KyGOAoZTKXMn5IbjnRmjZwrn0zDaofwA2RcYAACAASURBVNKKAfgfFztPRHyg\n",
+       "LQOjBmiH9nXQOgTxRmiYsHZqMmE40QybRuDozSLytFLKrBHZuQd23Aynzn5ED0JLFE5AoBuazruh\n",
+       "hXMQroehEFZ8g821cwcQBn5wrRdSiqQIXwc+g5UibDOrhLqh9QKz27acSTA5ROILgBtLsPGqbjop\n",
+       "OHgYdiypbPsUwBjH1T2CN2nSkIXTTeBJw8ZhOLJFRH6hlMqdf52zehnnr3aIiLMdPvJuSBkwdRJp\n",
+       "aEQMoXznAegD9kx7rgHt98BtIxCsbEE1peA2E6L3ViYpVS36ea0opZLA90Xkp4CmlMqKSAvU6+cc\n",
+       "kbN0TIKnc3rZkrfj2QgbvLBqyPrfKMENgxDrhOgSEZnAiivMAQOX8zkREW3a+9x3AtLLocaA8jhs\n",
+       "vgGSZ0Bvhd0BmPgOjl8/ieM12NMFoqBtHFafgUM9IlJ7uZPd6wHbGbkCKpHyVxot7+iEzWut5dFf\n",
+       "sQyi+2CJiISVUpMXOG+J0PTHLhqWg9FUJOU1yedgdACohXwWHHmoLUBvBDynwHBg9WkhBc/vhshp\n",
+       "WN8E5gRoQ9A/CY9C/rOQNcAz7UtWEsgorC+OzczwOeDPlWKmVpr+K/CSCH+mlN1Ps0suDqm3aR9B\n",
+       "ygH5ROXGclVVWUXEjRXrNXUCvv0o/KsOMIrgOoZ4Y3RNgtoGZQVjAgyDMwq4mPb9FBF/CO5st7L6\n",
+       "pFZk7yQ8o5RKVJ7S2QnuMcTYS9NWRZsmGKrAqNvN8KdF5Hen3fgCEHSdc0TOEsmCP4IVnLsgAibP\n",
+       "y1TKQkqsdePpGyJJF5Qy8E7f3WAXNCbffry5DO5fg846aCtDWuBUXES+rpQau9CVRKStAe7tgO4W\n",
+       "ETUBO4HHB+Hrj8Fv+mBFN9QMgvLCgQaImiAGelhj9K4ya8+AAg4LBA+At4j1eZk32M7I7KPpoDvO\n",
+       "+1Br/OrNf1sfiIhuEPk/6ljeWoPTUPhyBUpqjLH6HLEM+FthPAzOBMRS4OiDgTBkBs+mFlZ+f0dE\n",
+       "njkEtUASGFFKKZHAK7D7Dri5srRXBva0QWzPTJa9XsyIsAVYDXxjpq6pFMdFOIAVh/LdmbquzYXI\n",
+       "vAGH74JOD9RWVkiiXjhShtxVxVBYcWP+HdB1KwQ1SJRLRF/oJ/XFfmgFFHjaoS0MXVHrLIW1FTth\n",
+       "YolDnr2WoxEeuQEa1lixbOoIbNwJS0Xkv1ViFgzAsY/GDR42pAxcJYACEccYrqVwdBVw6OwLhnQZ\n",
+       "8jq4po1VaQdkSyzQSYpSalKk7k3Yvxw2DVtHTYE9zRD72TsHgmbHILHE2uaezplGWNICH3gdjMr5\n",
+       "/bXwy4+KyH95+/aYtCyD394IRg53Vxp3eBTzrl7K782T/rNh+JITPtAFmU446q0ET5+C2inqncKy\n",
+       "LPRUJrQdGry4CUb2Ms9WuG1nZJZRSuUbRHqPQ1sR9LiVAVN2w0QMJrjwB2adh3B3PcGhHPk1Gp6M\n",
+       "m3IujLt+lLHOMmsHYbIB9CQMNYI7Ai9lYPRbF2h/DDjPG08+Y63yDW+E+jLENDh9BOJXHWRp8zb+\n",
+       "CPhSpc7MTPJ1LFl52xmZRZRSMRH9n+HRD0JbBJTAUBZOf10pdZVFPf23w6a74ZYBSHgcHO50Uvp3\n",
+       "efIrSxT/CTDAPQH9QXD7IZSFuBuGTVA5rH1ZBaDB8hXQtGWabsZ6GElCR8xygncDQ70QctPsOOuI\n",
+       "AGQo+Qt0n4CJrVScEaVUTiT4Krx2K9w0YN1Eixq81goTT1x8q2IhEPshvPQh6FsK4bKlbXlmJ2Qv\n",
+       "UbYhsceKEWnyQl1l6/5kLQzUw72vnHNEAJZMQEc7nG5nWvYUQARu2wik8a8WWss1eMb9qPE4sWXD\n",
+       "BH4XTn+xCOPHobUEWgsMLoVYH+7WEg0Jk0IWJgPgS0PeBU4/TBy/Us2bamM7IzOAiDQBQSB+IUXW\n",
+       "KDzxNHx5FbR0QiILrjfBOQF/f5F92IgbTVmB1OfE6gwcBYO0p8CAglgG4nnQR+CEE8b+9mwKoIjU\n",
+       "YkW4JpVSZ86/eCXq/3si8gwcCQFTSqnozLwbNiJ0AHcBn5yFy/8A+C8i1CuF3WeziFLmMRH5Cxhs\n",
+       "w3IChq80Y+YsVgB65+1w0yAMhVvYeVMnghu3Gcf44BjF4DD8AAJJcB6C3e3gdIJrAjpfgwE/RHUq\n",
+       "Whk10NoApRMQKYCzHSb8kG+CdAA6sYTTkg6RnV5Y66CQ19FKGUqeKM6kInIKNMdbrZx6EnY5YGAb\n",
+       "hJQ1V4q+AKnn3/aCFhCV1eB/EJFGwA/ElFJxEdEqmUj5C8V7KKVGRfSvQ/r90FBrbXWPjUL5wLnV\n",
+       "tOl4ADoq9YxiZ1VfPbDUCe4kEYcLzySAINTiygwTjtRw5g9Wozw6YMLmfljVB8eGoS6BbxA6X4fx\n",
+       "bjgTseJyHcOgz7tSJbYzcg2IiCcCH1wHy+ugHAUtInJkAr731n1JbXstRoOOoY9QavJR7L0D9VoZ\n",
+       "lotI0wUchv4SE+UypTKYJTB1UFIga0BwBJpOwaTDj2PUTTpcIBdOQoOIREPwaythcyOUJ0CrF+kd\n",
+       "h3+5UNBtJRtoXi3lzRM+A3xNqZnfY68Esj4G/DrwNzN9fZu3UnE+flVRXERcXrgxDDcKuFKwLw7P\n",
+       "X8ZqiRd8OrjMWnZtXoMr68WZB3DhNrrIep6Gj7jpvaGOVKGIZo7hSk9y65tWlkbm1HRHKAnuQ3Bv\n",
+       "A3oANNdRKIcxd3kpn8pM+06X4CdTTG3NsMRpgDNP/XFF85C14jH+5AVe64+tSQoBLI2TBREncjlU\n",
+       "JpKjACKujdB6D3gDkMuL+J6BzCvnTx4rDusXYKgBays+CqH3wMBWWDdNH2bc42Pf5m5w10F2FLSw\n",
+       "yJ44/LgJxicxlgrOyuqTIk+yNktsiZ/JlgaUW4OReisWMJcC9zi095P7JmTXQigOtZVgZFPgJx2Q\n",
+       "mXdK29UslLcd+EssT/91pdQfVMuWqyUED9wIy7ZVlkrLwKuwaifcAzxmRbkb7/PQ8L/58TgF75gD\n",
+       "NZIlXjPBmZYWyvoRS9zop+fNuPpMYi9McvA2N21xk3xzkaKe5aQU2D4KA752hqQHl9+LI5el4I7D\n",
+       "h9+Egc3Qdgv0K9ANMHdD10vwEPDtarxHiw0R3MD/Atwyi818B/h9bGdkThFLVOLDm2H5WjjjhGQv\n",
+       "bH7FmlT8rVIqJSJ1EL7FKoFSmoKxF6F8CEhDMg/9tSEKbi+BSYAyJUOjWKiFUgPcdwO5Z8dIboEa\n",
+       "vYF08CCP3xvHeA1GvznNjkAb3N2Ao3EJ/owb50QeU99P5uZD5IMZzC+efa5SakLE970S/vtLrM4A\n",
+       "JdjZCof6oHjgQq+zknVygcDMxYGltbL21+GWMxAZgqQTdv4aHHABb6s/VEkbjgJlKyZPXoLX14Fq\n",
+       "sXbNUm4vP7ttG1Pjd8DxEWitwb3EQN2cRi0fpfDMKUo3NpNxOvGRY6K5wGSHB3eihYy/BUI9lhrb\n",
+       "YY8l3uZ9wXIWT8BRE7R10JMEU4Njfuh7CRia6/ftWqnmykg/sEMpVRCRb4jIWqXUG1W054oQEc9S\n",
+       "2LRpWqdrwGYYPgrbROQJ8N4Krf+2TGMB0lLAWVsgFfYQOH2KzLvSEDOp80J+tYj+L0qZx8FK0ROR\n",
+       "LwrDkybRrWAUchQdSVyHYEACZIwVeLIepFxgPNBN/nUfRKPwvmbYvRfvPSV0r4GZbiTzZh2sEZHA\n",
+       "fBPBmad8ENijFMdnsY0ngK+LEFHKXtmaQ5Z0wfJbpqXor4ORLLTHYIOIHIXOz8A2DTpjMOWH/R+F\n",
+       "I48rNfW0iPdJ2P8bJqYBUKboKDAebCazbwq6/JB2QVGR0/MYdQpx1RE34+R2K6UGLZXNunuh7o4g\n",
+       "mdugJn6Gkuah4AVw4chO4ilD7C1ZQEqlnxORUzCwARxeGD8E5TfnW0zBXGBNIFvuhZtHrUwigJoC\n",
+       "3DYII3eIyM7p6dUi0gz198OSHjALIjUvA8/DwN9C4iaoWQW5YhPjI7fBnuPoq2PUr3AQTrVCdIjk\n",
+       "vROIdpzB748x/tkA5U5INgfwRiM4ckPEu5oxyi4keIby5kbMfY2QrIUGgTrF5L/Ay/vg2HpQJsT2\n",
+       "A722AusVcF5sRZFp4lzzBLcbxOCtYlYuMF2WXxKBxnugPpdn1dQ4r/Z04kyZeH1J0qsglBmlnIX7\n",
+       "D4Gm4OcfFZG/OiukVknP+0trplVwAeNQrIVjf+JAPphCL5rkBrsoPNlmRdYZBtSNUr/dSd2kC+ek\n",
+       "SdE5xPgNOmODWBuWtjMy+3wMS6Bs1qgosj4FvAf42my2ZXMOBzS2VWI2suAaQu9K4GrNYRoeCqUc\n",
+       "wUbYDqyupPHXFKAuDRM7ROR14DU4ok2g//kwse1eJBMhva+F8uCLsNENZ4YJbXXTGvPiOAOQxtkI\n",
+       "uXtEZBd0fAJuEYPolJNeVcbvLhBPabgHNYyih7I4KFAkFqay3XAWpVQ/5wVO2lwQF3gCUHfeyoKn\n",
+       "BCHBWpHIAYhIPXR/Gm4pQNeAFbaz+xHo/QQYOyH6CiR+CTSE4TNZ8MTwL3dSO6VhFDQEB84crHcX\n",
+       "iLZE8T4WJfuuJZTMCKl8EUfIgW+8SL7GgZ4XTGcMowvyx01QCqKVWJYjlZ95TdVjRkRkPVCvlHqz\n",
+       "2rZcIYk4TI2Dtw5+FY9xBvxTVuSXH9pMMHMQLAzROVpkoKGWgreE6Y+RLURZvxcME8YCUFcHg9uB\n",
+       "n01v5GyQkzUrav5b2BwuMzCmY6g8E/XDnN7STmEgBa4o7tYS7pYyBeUkl2tC6w1QGx8j2QHZq8wA\n",
+       "sLlcRGgGtgHvnYPmfgg8jO2MzBlFIArBBMR78dxQpMnnwJvKk/ZmYK2lT9X+8lvPcpnQqqC3CeiH\n",
+       "0Noky/b0caannZIzi3vpASbaRzHHI+hdwwTqIJcLk4014RmLoZnQU4CpB2GjE1YMl9BUguOmB4e3\n",
+       "iFZfJBsy0JKTlFNZjD7sSce1UIB8BibdlhDkWfJ65W2dFkMT2m6ViFkag5wTxrbDRh+4HeBug8HP\n",
+       "QN8+yPxTFMwjyNY0rqVZsnmhVDJhMktkGPJu6PowbH8cCi8qnvWVSTidhGJ+zIk4pW6NklchChz+\n",
+       "cfItY1YW1DvGhYjoayr1duqgMABjT19K3r6aVNUZqWR9/DXzsN6GUqrsFHnsWfjoFphqsJTxAq9C\n",
+       "YwpONsC/jjO8oUBtQuOpJSV64kNsOzHCia4ykwYYRyG6AR5/yEon805Bc0TEl1Yq/dzbW/R8yFIC\n",
+       "3nZqCnc2xvGWJtqiUVIrf0HsA8dxrE/TFYDWYghfoYgqnaBvo07s5CThfZB1Mq24l82s8GHgh0ox\n",
+       "E3UpLsVjwN+I4LYF0GYXEXGE4D1L4YYorDkAN5aplQCBowVK+gBCntt3was74GQ9rB+BCY9wolGn\n",
+       "4Cgx6saaTS+H7jUGrqYCroZjmK4cNShayqB7MhR6ArgKYdqTo0TbBzm+KkHNaci4wdcBkV2WRV3j\n",
+       "I+zNGWRbAoQLQqk4Rdnbx2RYMfVzzhNYtLGqmDfAXU5YYkIsCs+W4OD52xlKqbKI9xl49WG4rSIR\n",
+       "n9fhlTYYf1EplRERtwbLNeruK+NIQ0mH0Xao9UHjJBxZAqM3w/I0+FfCYMcgY3mhZnUDtWUPkWyG\n",
+       "tOcU451TBA7B+HpYNQpLJqHMBIdO1VFc40f3uHEMnUL5EpRrBFfJxPBM4ZRhCt84mwFp1VH6lbZU\n",
+       "5fV6t8Omh2HLONSPwnAjvPrbIsZXlSpdl5k21QxgNbAEof7wHVTpPj/t32eVUs/OgWlnC0s1Yg0g\n",
+       "Zy62/1ZQ6rCIfGUMbnNCSwbiLqh7EGqaoe+7DN/uJ73MA+k8w+EzOIihF8CMWrOcZJew3KPh1MuM\n",
+       "NCjWnITwgyIyrpQ6NM2WCDTeZkkUg8mq6AlyjiiHO+vIRpwQKhJwOWgu52ktxcgCRU+J5ekiRz0w\n",
+       "NcRlihaJSBhLC6UA9F9tKuMi5UPA5+eiIaWIiXAYuBl4ei7aXKwE4J4tsO0mOHUaJp/F+SEDMzLA\n",
+       "xPokomVoivmIbk9Tm4ODnZB213Foaxt6yY1pxBgrR+HmON6gm6EHOig6O3AlT2HWTtDqnSSUKBMq\n",
+       "lqnfF+fo2jTRWiHgLrC+AM2nQRlQdMHAdgi9APFIjnWxXnJ5L28s0zC9GZqiJZqOwP7T08eryjjr\n",
+       "wPr+13hhUwC6chCNwx6l1IJ3XERkyUr41HZItcNIDHy74TcOWVsuL779jOxrcNAJp++EkAOmyhB7\n",
+       "AaaeFJH6VvjkSggMk2me4FRdklRXFk8Z6rKWMF62CW46BPVJ6zbS4lLs236KVb8Yp/8+g4wvT/1E\n",
+       "jo0n4WA3iB9Cu6wdf40kd77+Jj8NuBjbXkOhS1F2nSSs3HjJopOjNZOhuFlE2xZB9XTBagUqLNIX\n",
+       "h13Aaei436q34yxDPAw1ZdhkwtgjIvJPWGP7dVWDrJorIx/EkjH+i0pphc8ppV6Z/gSl1Ofn2igR\n",
+       "3y3QfR80C6Q1GDktIt+6mMa/UuokldS/WpGHb4X6FTC2C3o2k487iRPDF9Jx9UdIGG+Qjk/Q7gaz\n",
+       "RcNbr6OZUBADV0ln8jaFc8SBryck8rUi5LrhpgbgINklGUbdirpDYJhFlo07ONko6AShGMFrKETF\n",
+       "SHiK1KXK+HJFPDHoDUF0z6WcCitwy38XrNgB7QpyAv0pEfmGUmreRWbPNSI0AKu4QLT9LPI4cB+2\n",
+       "MzJriIhrCdy4DYYMUO2QqEPfPUzThhRaYyONe1pwZouUjRHiSyYZcrrI3eUkXIySVLVMnH6Awo92\n",
+       "Iu/ejXOjm3BTCWe6j6irhE9vpS6RZ6ouRX0cAlFoP13kQAP0TEBeQX8ThI/Apl7htUdcZJcLLkee\n",
+       "sK+M71SKxhdgyyvgzVsLn8fqKnY7amBHB9zoBEfCqk8T2QK5JkgmoOMg3KiLfMNU6mh13+XZpRHu\n",
+       "uxmmOiBe+T+1A3JjcLeI7Dq/3k/FmXtBRF6FUwEgfbYKb6PI+3aA0QMDJ8mknmfkdg/u4DB5T4nG\n",
+       "BLzZYAW+RirZSEXAa0KnDoZKc9+3YHg7ePyWMPfYEoNy0UfdxgLRpVna3oDGaJaO3jr6b1uFgwK+\n",
+       "YhqPnMKhK+pNNw6ziNZgcOaP7mZqXxecOQUbxuH2fvhIFo6cwVNvEj8OiY3gNSDTAFk/NEzBUgOG\n",
+       "Yu8kT18NqhnA+i3gbYqhs4WIaFazF48yFpGVsOVBuGfwXN2WY3Xw1EdF5G8uVSjKa5UEnwSIw5Lt\n",
+       "EPORP3OEfPcozroizkYfLmMCTwrGgkKroXAoIadMyjWCKT5KTh0Ju+GPDdC3w3dXwLiXqRd3cvxD\n",
+       "OSJdirZeGPW5SATcZIwemBgl3+zHURDSRMl6CwSBcg3ERiD31GW8RSth1T1w3ylwVF7nSA387GMi\n",
+       "8iU78v6SvBt4chYUV9+JJ4AvY6m92swOXh+Ie1o5h2VkB0aJ3VFDe9bAKAIocrrGqO5keX2E+qO1\n",
+       "OLIKzBRB9z4OrXDj6/YQbDUJOsZRAaFGypQMD5NuBwUHpJstyXd3HDKtwvFGIaWVye+HjbucHLxb\n",
+       "cDh1RpVGbanEhFvQekw69kIoBaebnfSu1YmPOkQ2hmDFVli/BYY9UPol3FoDnUvg5z7IeUBClm7F\n",
+       "+0XkCwtVYVVE9E7o6JimVAvggVI96P2WOOTwhc6tjHnj064VXgUdPZVrdcHEAKeGjpC8U6gJw4kS\n",
+       "mJOwo8/KYUi7YazGS2FFnmynMPEeJ7kzCtdEFr1XY7ijlsSEh6wqs79TpzaRIN6QIPdshP23bsAc\n",
+       "DGOGBvHVufBqDRTlJKaRJ9BRJufz4TyzAsaOwuYirGiDhAecechnmFo6SbQLVvVDIQhBD5CF3hq4\n",
+       "dcQKtv3FRypJE9dFAcSqB7DONiLSCg33QUc3mFkR/4uQfvnCX77GW2DD5FsLyC0fh+Md1p4g7xj8\n",
+       "U4LJBESC1pRG08Ech/BpAj0l6pxegikPk4YO9SaNuiJWMnAaXlJuH6ZepIRJsraTjMsJ9S5IDsF9\n",
+       "JXj2Vjg6yrHnj5G8tUiHwGBTgmKwndpUHAn6SDPFiDtIS0Ewi0OkczCRgInHgEYRuUTVyMYbYE38\n",
+       "nCMC0JKEJR1w+iGR1lbQPJB6A+IvXr0k9oLlQeAnc9zmK0CPrcY6q+iTIJPgDle2OttgKshQpo9c\n",
+       "Y4KxTTpm3sd41IM/WaRW1xAMnFMKNCeRwCn873LgqMkjHiElGn7RaZYiJ4mRdUEEoehRGH7hWE+Q\n",
+       "cWcb2UyBBAlcS8fZ+bCOs9FJeKoBOarjLI0z2pmnKZgitU3nDVeEiSVOTsgm0kfG4ZFBWHID/NwJ\n",
+       "Z+80tSth4iTs8ELRY+nLS8AKvm/lEmPbfEUpZbaJJOPgCk2LmSsDyUqJMJHQu6FmPZRzENsJ+d0X\n",
+       "cc40fVr25EloH6LuRp3WgoZrCvyTVgX1Q8uheFKI1oUp5OvxZk9zmgASqcHvKmKSIN4yxdFuD1MO\n",
+       "Jy5/iqwrz0jZyWBKeMPTRCJaAw4BVxGlFO6ynxrlRml5mvQSwxGTwtTzsC4Fd/VAtgDhODjTMFnD\n",
+       "VGKKZI+J0QtT9RDIwgk3RBJWSOOGIWjrgDNtnOeoVYsF7YxYMu1LPw235GDJIKSdsOcB2FvHBcu6\n",
+       "G2EIXiD4MFAGvCLSCZHboLw+QMoQim8k4DmgT1m5yi/th0/UQyoAAydhxSDOtUm0gJN0PkveE8db\n",
+       "FMIFnUxAY9LhJ0mYWq1MSQxSGAyLm7KzC8onILgC1JuwsQdGP0jhue9zavQgQznY3GOS7zSYDJoY\n",
+       "gTCSS3DMf5rRmhJes0iuCGocNvvB/C0YmKpsuVxkj1j3g/cCs/r4StjSBNveAG8STmyF19aIyH+3\n",
+       "dUssRHAA9wK/O5ftKkVRhOeAu7FF7WYUEalvhPevhrYMND0JN62HV1bAwH5oPUVjxE93nxd/okTO\n",
+       "MMFZIu8o40zFMV15Ej1lRru9JN05iu5JajHZXNZoNgymlDBU1miWBGOGh/qch7KZZXd7mH7nUkol\n",
+       "B1GnQTBXh9edZ3TTFGFxkzyl02TGiTUXyPt1dteEyTY7oclFuX8L2afWw+lRS4Niax80rDyX4msm\n",
+       "IeCFthXwulZxUt6AlUH4/UaR1yfgUAkOL7RV0Bg8vwseuhP6jcrr3getZ6APWj8EN/hg6TjkDDj4\n",
+       "MOzvFJEXQ7DZZcmbnsjDAWBiDJI/w7E1iT80ibmuTJfTQ+tUGU8clp+EzhH46TbQmnQifhPJD9Hf\n",
+       "0s2ZyUaKiZOMLjXRNrsp5F0UG3wECdGumtBLI2S1KPmAwXiPg/L309A4Ac4YaZcwpIRwqUSIEq4C\n",
+       "5IwCxbY0PLAEAgEoB6wbnHEY2n2oIZ3TGZMX68Dwg5TAH4WuNEy4rXfGq7iOKvsuaGcEam+FbSXo\n",
+       "rghD1RTgtn4Y3Swiz59Nm4Wz2zjBkzC4FoLTbtimwIgGWhiWf9JHcWMn+VAQX0GIb0yS2twHjwJP\n",
+       "Acf3ox0/Rs0HHIhLiHd1oULL0EpOtPI4BSOB00xSKyWOqABxrYWCyjAuBk5ClKjFW05Q0EsUlQMc\n",
+       "OfAGIT0KNY2QzEAKGgpw04tJDjuPk1jWgMKg4F0HOBmNjkB6EG96iB27FNsqM57BIPzi4yLylxeO\n",
+       "H4kfhoEd0DCtau+pINAIt7wMwUr68voRKLbBxBbmNj7iemY9MKTU+QUJ54SnsJ2RGUVEXC3wyF3g\n",
+       "6IFBYHAnxF+ELc+BZ4JQZ54NL3og4sU1peMp5/C4TrF/aw79ZYXe42B4ZTfK7cZPlow+RUmVmCzl\n",
+       "aCuXCOk6hbLJgKbhMuG4qsEpBpPmespxP6KmKHkcFIsGOSJkVALRi5Q9/YxsL9PiDJBztxHQDEpq\n",
+       "KfEBIDkCXWU47YJsAEoT0IBV9Q0/9J+E96yCca0yu38dOt3g2w7rG2B8CFYfgpOVWIIFk3mXg1f3\n",
+       "Q+gEPAQ17RkMT57coRzlw7BpibVKAOAvwB0nYfSebuK3bIVxP+SHYeVBuHkEvj1GYyRJy6Za/KUU\n",
+       "400mzYVJ8hRZVhE5dDmgrQTFXhfDK1xoaY2SQ0eUUK69k2zaBCMJ5hhijOI1veilAQoO6BAvHpVn\n",
+       "3NvH4a0miUIrOOtxF0Fppxlx9pEplXGUPbhSBrpjGRQ9iBrB3TyIu1DCLIZIF6bI+MA9AsufgIGN\n",
+       "0FJnafO9FoFwAnI6DCuuo8yrBe6MeLuhdfKtxzSgqQyH64Bxywnx3gRtd4DUwqurIHcc1h2FpAv2\n",
+       "N8LwK9D0AIS3thFrr6M1qyh6BHepg8FIifwOEdkPoTtKrFuWZOWzEA818kzQIKdPYNTo+Io6Nbk2\n",
+       "yo40p91pVClAzhHGJ+0EcQIlskBJsnj118jjI0cEPAUgA87noGMUngRtO0S7oMM5gn9fmZfXb8KR\n",
+       "gpKeIZ90YUx10qjSHN06wcYBcChonYKudhjuBi4QsJbZBXs2g7RB5wTkHPDyKmgYhmDKyqMvixUo\n",
+       "1xaH4HJsZ+QsNwGXqPA5a7wA/E6V2l5wiDUgLFsBwZ5py9c3wXEvpH8Br+eovxG2D43RtyzF+AoX\n",
+       "SB60HO5exfEWneZQEw7lwDBLREWj1gxiGBlizhEypolHmThNIWvq+GhF0x249XESDg+Gu0DOdBMs\n",
+       "+fHENVQujhGCUCZNrMtgdd5JsdiKT7mIKHAVJsi1rySfP41a+3PEkSWgxSlHhMyqWszTzZBshpFn\n",
+       "QTmt2ibhKDhPQ/1dcCgK7nrI9MCYBl0vwDqsrIwFgZWu608lWZGBZQcglLASH1/6CAQOQH8PZDus\n",
+       "ysxa1I1v9TJ4aXVlVakd4h5o/QX6v8ux4oYsSydHyPlhShQNJmSnBcAmGyGQh7pjTpKeMMXgFO7g\n",
+       "OGNtnRRMF0Y6jWaWKPqD6JLHlBFSOizFQ025gNKFiIqzdLCd/ZsjmP1TFPxZ3GWISDsOcxCX7kJc\n",
+       "TsxEAow4IYeDGjJoeghjcJKkL8pEa4HBx6EjCN0HYOQ2GOmEySGoM+GJDjjz82utPSSV7JOZUHxd\n",
+       "4M5IMQYTEWtFZDpxDaisANTcDRvugm0jEByHQ1PwryWyKgAAIABJREFU2kbYLSCnrPLS+dPQ+Ckf\n",
+       "hGoJTem4iuDCxPCkGFvRSb7/CNwAtXdD0xnwZSGv1xI840RzmDhFw+UUDOUgj2LQlccrUQyC6Pgx\n",
+       "VZKsTDFFDl3G0EjQorspmpOcieQpDhXBN4lnWFFq8xLtynPiDpNtg4quWJk3xxMU9RKFgE7AreFS\n",
+       "QtB0k47AgVscuLJuUq0l0v4yjFTiR96yHaWUSorI30HiBgiugXIczjwKkdvh2A1gNlUqCMehOAil\n",
+       "EWzOciPVc8z2A60i1Cl1LtjO5vKx/A/PNgjfDu0BRVKPEW97GjrL4PTByEo4GYaMFxqypGvhWNjE\n",
+       "KCRp2Z9kqgcIguaFOC5yAQ9SFBziwFnK4fIrKAcpcJqYUjSYQgoHZtFJ1GHQUEqjOdzU5uPEXF5y\n",
+       "hhOnylFsymIWR8j4vJw+kUGkiFec9Hk1NFOjnNCoKSXxNU7iTMUhNEJrh4dwMQGSJN/0XYaXeykP\n",
+       "QPyEm8KP3ZCZBCMOoRVgeqFQAMNnqSIGwuAMwg4R2T0fJcUvhIj4oPseuPfEuXjAZqA3D0MPwMqo\n",
+       "FadcrIO410nKX0TvH8ecHMG1IoOjvoxZcqE25ug5AB0JBaMwYUBsOfhNmKoBZxEmg5Aag21DSXY9\n",
+       "6ERvNXGSAWcMbzmDcniRqImYJYoKSkyScHiJqgyFco5Q2aRUcGEGXPhLMRKZNKXeHI5OF+6yn6xD\n",
+       "Z1CvgVgr2cQwjmAXwSETPVxEBUdxtERxlmLUH4GRr8AzK6BmM5h7rYX1mhL0TcDYo+q8LKqKrIMG\n",
+       "TFyq70WkJgh3tlsZsdSK7J2Ep69l636BOyOjL8C+T0IkYy3BlYHDTTA4BAxZH9Klt8JtA5ZSIsCa\n",
+       "MxB6GX6SgdPfgNoHoPE3gA1lEqUCmugYeQ3JaRi5ElooDWEIfxw6W8HshN0KjN40ZW0Jgb4MIx0m\n",
+       "mWARRyhDwZujhho6JoQ6bYBopEBCeUhJCA0PGiVqyOGTKZq1OO566I/wP9m7s2Db8vs+6J//GvZ8\n",
+       "9pnvuefOt+/tWd2S2pIs2TKOU46dCXCcpBzKdiqGQFHhAQoeKHig8kBBQUGRFKSo4gESIAxJSBzi\n",
+       "OEPZjuNJsmRZLau71eOdxzOfffa8hj8P+7TVNrJly0PLkr8v955d65617hr2+v1/v+/guc1E/9qa\n",
+       "0bmme8WxmyndqzMnL8487o80Qq4Xg0mvaTis3U7mumXm8Du3tW4s69w7cZBe4vqX+KEQwv/86wmt\n",
+       "pwFZP4mfDCH06XwHt/44a4GLtxE4eJpXPsjuH3ZFvoxP4L94P3YcoyoEn7II5vuH78cx/MFH/3t5\n",
+       "4V+hHxidiw5evCVcuGZ4t6FVDU0u/4LyiSY3DqxfpHmp4e3nW1aS2qw3tXG3lD9i4yY+WUh6hUlM\n",
+       "NbLaUhYN0iCLU7MYjSIno+hWp7KTBgf1LSedJetlS7O+7ySedRI6yuaAxkwZr+ruthW9u6bZA69O\n",
+       "ZnpFaZgksnml6tUUd8TssdV+bXt+oJmNbYy4cL/ymfUT9w44Pznxpdu3TLc/vlCI1Hf52Fua1wrp\n",
+       "/ftm39O23JxqNibikHoUQvjbp7EUf9BxZmHV8G4hMm2ws40ug4skYxprbAxRM+wP9J79rPK5JWf3\n",
+       "aORD0/O1ySaf/TC7t9g6ZqnFg5r8DPW3Mt/j4SHn3+bmh0ornccelZysJVbDDVWyZh56ypXEdD43\n",
+       "m80MWlf0qoZpPDJI9jyIbIyXJctR1e6zHBW92oMmS1khS2ZqlfHyNoeflrVy805fNy019ofSk/vO\n",
+       "F8xXeLAa49GP48cXflXNF6iuMz/yHpfeEMLmGf7Ms1xKifvshxD+fozxKxJbQwiNLX7k46w/xyN4\n",
+       "nQ99mqunXMKvyYTxG7oYiTG+GULnRzn4Xs5kjBN2brD7/5yG0a2wHr5ciLyL8wPyK2z9m3z7Mpdu\n",
+       "8/pswvqu1/Jzlo+CpMd+lZgfvqH9MT78kO0G3ZrjM9z94GPjum86v2b+qQvGX3pb+Og9a8+Mnb2z\n",
+       "LlyvaIxED0ielhpJFc7qaDpv6qZBqF1P2P+W4OD5ymyW6rzV9PRR4dVG5uWNzEpz7Eq85zBsmcSz\n",
+       "OgmN5thRo6VRNxTVkfzq3Fv5NY++8Ed4/Yhndxdhfp/9SgqbRSFy/t/hmat0TrtLDz5Kf0494MyE\n",
+       "8i+G0Hwc4+wLvz9X8+sTp/4ia3g/4wx+Dt/hD4uR3zYW9/qT30arR3Kd5+a5w6WJs623vXX1aWff\n",
+       "mZu3B2698MBkNXr+CxfcWut4mI4M+pVeo+ne1YGyKJwvefGoUG/uuOOCbB6NmixVM/eTXbFk723e\n",
+       "zhMnW1HRLlytKg1H6rzjIOmpyhnhobIRJclVjSozWwlCtawMqcHSTZsne46y3L2LPaPQNhrfU20e\n",
+       "u9yoZLESG9FJxpvJoumRfJL+DTZf/BV3//rbmn9iovPHGlbW1p0dpJL2vmGr5+hWVM9Hnt7Ht/K5\n",
+       "HH/9azyvDWxg9m7e1vuIKcNk8ddhhzvfzkqblS2GMz73HOcPFp3f+8lEo75r+1Iia2UmF9aUjZF2\n",
+       "Y+RaYLxMZ50vPcGLR6QFXxqydosHG1Q5J9c52mKyzMkST6otV4ceJgOPQ+Z+u6XR6JqELWURjVUe\n",
+       "ZpuCllm4ZWetttI4MukMeL6ilWlLdOZjiW3bs7GYvSy7nDuublmranUx1z0eu/qwduWQnzhH9zJ+\n",
+       "KYSwxPZf5sW1hUJyeI1Xvi2E5v/N/M3z/Mi73ilwj+Wf4kdOZb+Hv/5kJjz9LGc+/J4x5gd5MODS\n",
+       "7sJr6fNfy0X6hi5GIMbxL4YQXub2Bqa/7sEYchQWJNX0PW2pvQ6jlOc3efE2R0ukJSvDBzaXCneW\n",
+       "VqUq43TPvLnvpQdcbPLFJxm+RDfyxHxmff9Nn1/ZceMTy8rZfb3LY+eGxJUT75y5JjaiGEuTGAml\n",
+       "NWyi1tHUdhAqqyHXiktajYca6b7Gs/tuZw1ls+V8UlhDI9R6HtsNR24lUd6da8635XHqdmtoo9X0\n",
+       "OL9kdv+n+M5sQR65fsLdEMLfwS9Y3AvTxZ/dj/FSf2Hc0x2Sl4vPy8DV1xaGjrcqqu8LIbzxtVbC\n",
+       "3yD4BH4xRu+nVv9n8V+9j/v/g4wN2svsvcS1GUcd8k7uuf25aX/HYKVvZVR4YjB3b61jsHVBfaaQ\n",
+       "zCfmsXZSdU1Siu19FwZcf5PXrj22Fqf20yVHoTILx5YmY/F2y+Dtq2bbDdn5Xamxpebcap2SlPrZ\n",
+       "wIN84qAuXKivOEqW5XFq2pjY3061q46TNPdaf2gWjx3nU1Uo6BRUTTGZaGTRJGeSETqsVJyveLK3\n",
+       "+H+W/93Y5oiVeuYDbx27c6bnZDlxafpA8tRUdZ8nd7l/tqH50UYIRcH/+ZVeSr8RmiF8+Bw/3JNd\n",
+       "nkmaKyH5lWPxv38f3V4f8fD+wpAsvcDWqUjm5S3O3WVljf2K7l3WLhaeOTh2a2tJJy/NGjfcSWc2\n",
+       "J7WnI7dyHm2Rp/zc8iLktB4vPrvaZgXTe+wEHp5nO6WIjJOJTkg8JdNQKJJl90NUxcSRLUdFIglL\n",
+       "ZEOT9tCgPVFXv0JoEjJlmBm1CleKjn4yctLY0W807M6PHah85GWevU+d8KVV2m8tpm7Q/yQfW+HD\n",
+       "d798Si40+fvfF9z+sVN+1K9Kuy9w/AK9x3zYVzBU7HPh7FeIFjm7IE5f8ofFyG+MU2b4VzK2aS04\n",
+       "Wz/1p7j6Ntu3UfKZs4ze5MylxWbHG5x5QLlReiI8VLUe6k7Yay++B7IVXr6W6XRyl5JUQ2mnXbhx\n",
+       "du6P/PyO1tKue9+eeGqXi+sd46VVmYk7MnNNMw1NE3OZmVxUi8hMHemYm2lmLGXk+TkxFHrWrMRC\n",
+       "HWYKE1HUEXXDFbN0JqQzMea6MXN9Otbc/GUH33ONmHCmy/K+7KVE+oMHuvdpfalt0Oga7h9LLmRu\n",
+       "jprebEysnZlJm3zHCUct6gaTQOcO5zNuncPXZdbB7xM+YeH38X7iM/hACLoxGn3Vrf8Q78UKx9/G\n",
+       "B5cXSrKdpcqjbqY1T60eT433+no3KydbCc80HH7oUOxEV+uupVkpTUpHYebGKqHNzkrDQw1lo9DK\n",
+       "H9iKtcmcc0Xbm5fX3N3INMtAkVvLL2g5dpyRh7EQE+uG0mRTljTFqi+bd3WSHUW7kJjIi9xefkmd\n",
+       "XJOVUTfMFPGWsnFiN2mZpxd0tMRwZG7fW2Hmo3NOziWO0548C/aXJtbMFWHk+Xsjr11lraCdsj9a\n",
+       "8rlL5w1WOup2Lf/3DhXXQgh/I8b4VZ/zEMK1J4X/6KL1My1rw0Q62XX0idccPRVC+JHTUfDvK067\n",
+       "4P8XP/VDbH4PT53whVWmN7h3gS5WksWicyNj2JnK86nVhCwuzs1yubAZKWpagXnGWsXFOZ2aX1nh\n",
+       "UkEPr71Ae4cnEs6EZBGxGGpPqFHYFtwOiV4MBqG1+MVJX+2YpBRcxkxILyKK7gtKTUMHeamZBkEh\n",
+       "pkEjXfMoNPyTlyZ+8rlKNi41HpM3a3v3QwhbXPgwT50q/Y57qceXctOlmaVeymyJ9L3na4+1iexq\n",
+       "kG+G0J4w/YKFBPgy6pzZMY1ff56PaY597by1b4pi5CshhPRJnv5hnqnYPeDVj/D5D3H8WQ7/0WKu\n",
+       "dnCVImGWLUxj1t7hlTXCbq6YNhQX56pmYX8zt513ZYKmxJJEHVMvp6VPvViaF6mz9YZkf+DBhSDN\n",
+       "m5aM9ZNr7poo1DqxZRY4kukbGGNFZkcphGXnjXUs2UvWbXnk2FyUmuk40ldYljhUoLasSjuqdN9x\n",
+       "7MrzgU53LEkL9SSnOdBZ2hAa61ayHeHyI/nlygcPxu7WZ+32thwWfb0vDDxu3Dbb2vXUw8XtUjU4\n",
+       "nnPhDm+se48z5TcpPo7/8v08gBhNQvBFCzLZVwhZ/MZECKFF8iTNVSY7ePu34yS6IK6e+y6e2mWt\n",
+       "z9aU9br21tnC22tt3cNS0T7w+W9ZM1/JzKuhZGNotdHTnlbKrBKbiUasLOHm+bP2+tvKVqblQOpQ\n",
+       "CBODfMsgWZPVS7JWw9H0gWanqTVrCSEK6UAZK8JIK6ZyK3rxwGFYN29lkjCUigbpurk1uUy0K6Rn\n",
+       "xLiqTudiMjS1qdSSm6Kp1DUMt91oJ3ouelj09ePUSb4nzOd2+4eyIf0h8x5VmXqwet2st6VZUlZ9\n",
+       "WX6i+OiMfyuE8J99tTyTDb57S297yfYj0hrO2rq3J145Uf0p75MEPcZ4GEL4G4QnuD5keo3mhabj\n",
+       "jcqnN0rdnLrJ9hIXjxfRXFnNalhkA9XpQvMwqjlXsVvQnjBrBKsd+lk0axBTqpQnh9wT1HJJmJtg\n",
+       "KNiQO7apXRemdSJPW6qwTUhxR+Ka6FjtWbS0pWZStQcm5mp7jpIM1+3HC7IwkyYPTdofUkVCPpeH\n",
+       "I2WRKC/t81feVmSLIPmH68t2nl+TlCeqrSCeKV148g1H86ZZ/ynFF+8LFx9Y+9CeVnviyqv0/lTq\n",
+       "U//GVYPJ9UUl5i3SX6Z/gePzp9yThyy9toiof/VrvUbvZ1DetkXy6LPo/l5b0i72t/LSQsV2dJOt\n",
+       "f4XvPmBzjHscf47Xz/GLr8c4/LkQQq/hl7b63vp4LqtPdM4NXT/iaL6uqC+LSW3QKq3N33GSNZwN\n",
+       "uTQWhMpd6x6EFZXao5Uj43LHSnkiPN80bc60s7GmrjSZm8WJXnxLP1mSaNiX2jOyrKXSdl/beaXa\n",
+       "XKGn1hCl5iZqXXuuy4zklgSbasfmZjJNSdnUivuO8sxRLKUrmXr+UNa5ap6f6DdTzbgpZg15vKOx\n",
+       "3NYOV3TGJHmpemJb95cTs2zsZ8+PPLVL94CtLzBMuTf0G1gpfzMgBBm+Bb/4fh+LRXfko75JipEQ\n",
+       "whnO/yWeXma15HHKWw9DCH/rtyFX3GJrhRf/OW/8EPkKYZlmKA2bU6Otys7WeXndsTXpG99+0/Hl\n",
+       "gbyZinlDb5IqZhOpmZVs1Stbz1iZtCzPK2neU8g8jhOc0U264izTbq84CQ1F9kUHaUs/jFS6EstS\n",
+       "j43D1IFCM9TWve5AT6EycNbcWD9sSrRNw74iDM2r/mKFkLbVlnTtmTjWUFqSmGnZS1JFvaFR1opG\n",
+       "X2N2aCfvWpuO3NyYO0mDopcYanu43JGOK1m2zKgpfTd//sOPOOurPO8JTzQszd8tRN7FkuaI3ge9\n",
+       "j344C4nv6k9z5/sb4jNXVPmalWFpNt/1YO2ek3Zlc8oo0JxTtdlNuJWQdVitWC8ZNLlb0u6z2owe\n",
+       "WMTeHISeQaeynlRuP9lxN5zR0LJST5wkj3zeyPPajnRtl7VOcstj1yzaCQOJocRlZEq5oMRbco8s\n",
+       "Geo5NJbacdmKNVU4q5VOTOKSNFTKzjpxX+U6N16XfPC6unps9zKf7TQtr26K4UgWC8u68v2+Z+88\n",
+       "dGNryckHgr3qSPfqiXbztjpUVo5ZHp/X+c6XDH/5OfU78AKNf0DnR2mdX7C+PWbwgL/5O3Hmfj87\n",
+       "Iwf4oxaGYb+nCCF9hmd+kBeKhcPqrRe5+SLxH395q+UZH7nFl66HELIVvu8ZJ822IqXVnTic33D7\n",
+       "0pFeuqkVKlO5dDw36aybq40sq8IDM32PXLSiMFKRnLGRpDTu227NtUPtrloWMuO4Zz3puSSTmlk2\n",
+       "M5W77YyW4EjLxIFopBK1FOaRKqxrx/seh7NqqVRlojSyopCI3lQktX4ZqafezFcMJqmifYduX9mb\n",
+       "aYbSclJohUoZ+rpVw27elc5TnWZTdTywvz7TW1lV3eq5G0byn+Pph7ze48059//WN2qmxW8RL+JO\n",
+       "jL4erPE/g3/9/T6IrxV5CC9tLAo7+3y+4Au/ebjj1p/lj6ZcO511v4D18/zMd+NHv9r+Qgg5LlKs\n",
+       "Lsyhrvxdbn4/zS3ak2C/21DNG/JWTzd0zKpasn5R9/HrxiuHhNxJIwjzmd6gNhltm27k9ppTszwq\n",
+       "00xiW7BrlrJUV2adTKVN1lLXmZ38vsyqtdDSUClj08OwbmzfUNd1pXW3PXLJsSHmpslELZPqmduT\n",
+       "hFRipDSV2ZUbu6hl1VyltqTwUEMRKmUjdVwt2RxsmvQfebVZyfLEJR1V3dKtaOSHbvX6ZpOGlaQ0\n",
+       "78w0NoP51GKe8ZtizK2x+bcuveezKDpUZ4TfF+l5CGGT7gs0lzh6h/qNL99LRz/F3X//rOWNTauT\n",
+       "oMoayukHnP1UavrhG96p6KSLkVWz5lGGesGbO0kWHnKDsEjruJSQptSCgbNelWnEkTxmpskVZQha\n",
+       "dTCNTWndsJvc9Fl9F2Mii5VxSGT1CckeNgWpYKxWoxTcETz2hMSyllQ00XdH5khHQyaLpTpsSb2u\n",
+       "Dmu6saS3p325MC721cvnTHqlt16f2GqlVsdT9bnEwcG69ddbOuMdl1uvGNy/afDxTLnSMppclE7P\n",
+       "+/l/7Z6qcVZ7b6R1nvGrsMT8eY7/KT+5uwiJDXj8O20ovJ9BeTPMTj1Tfs+wiNC++Gf4nsUoDIv5\n",
+       "X/wYP/8dwbk3ogv7CwVNkVBXuNDX+OG+zTLTPapMW6mDsGV02DP66BXNtJDM70nzjnHjOalfdmhq\n",
+       "U8OhVUOFSmVPtFpnhGV1cuBE4lIYODL2SOUkNJw30BLVEkPk5noGbtg0c4RjO0505QbG0nissCbR\n",
+       "N66CcTowkKh0FTHXqEtC0K0KM0MDV5T5hlgnWvXQdH0oyVYtVczk2nEqC4mQEM1VIZNOc820YbB/\n",
+       "zt3PZ0xe53O89rd5+xrFEfHlGOM3Oz/h/TQ7+/X4LP7z9/sgvlZ8O3/+6dN581t8/6/w/Gl8wVdS\n",
+       "e63z/HmuvUd6WKYL4uXLL4UQfuw3K5LTEK5f5AfO0brh3tMjP//E2Lk3mK8t1HAPsxXDeUtjUssa\n",
+       "bUVYE8spS6VkpWU0K8QwszZCOlEk7B+nspVM2V4yiW2hKsj31HEixiVl2hCrwig8Jj8iS9VW3bNk\n",
+       "P07loVCHLa1IEh57ZCrV0JY6EDSUttEwtKNhosJY4k1Vdk9wIlXINS0plIJSVJtbEdxTGSVn1fei\n",
+       "/XRJUg3Mci5bMZl29capPJ0LSxOTpHQ7nTvOVjQfjWTpvnKJ+n4IoUnrJdY+ujibB59j+kvvOrYO\n",
+       "+Uc3Df9cw/Haiv5BqU4fGK4+ZsjhT/xu3S8hhDYunv54510SfQj5B3j6B3iuWvD57n6ML9057ZhN\n",
+       "0clUy6tCHZU5RZWqJ4l03lKHRNWrHU+5n9G2MDDrNRbekzcDs0A743y6ENF1kGp7w7JgnTB1x1AU\n",
+       "tQVZMhOrYFiuqIqO485UEnnY6DiJZ5xUgfKIbLHgpBDVglzwSE9mWRAFUVdTz5LMnrFCJQkkShXS\n",
+       "WAhKSUi0WmOhWZq0cuvzvnR96k590Z3DiY1W7slxolHuenBpana21BtNdIvnzfcuSvNVdasjm/ZM\n",
+       "lzOzJwu/NgC0QcxpzmN89Lt1Tb8ZOCNbbLVZ+1UVTXCw0VX2W/bPXzdoPPJ6ct+VNwvtE44/Reu7\n",
+       "l6y02jbvFMpuLawc6m71DTfPEZ9QxFLWzM27B7KkqRH77mGoL9NzrGXHSFLXkpgqk1QaohMT76ic\n",
+       "4Jy5lsoVM5XcRGGkZaBh3zkTfYmm3FCJIVqxMAl3TBwYiJpVIYQL2prEHYfJWBUm8nouJA/MGg2q\n",
+       "dRvFimr3xGy5YZLsG4UDSZpomNkLubmBZpg4Z2ZWjIW6VjZz1duXuflZLu5xj/N/mrXlRSz23pMh\n",
+       "hB/9Jg/M+wR++v0+iFO8jdU/qKF537VYYYFznEx5+lNc9xXdgiVfVr+Nm9x/luIiwuliYstvME4I\n",
+       "Iaxc44f+OEc7pEeK45k3X2g4fGluIyb20r53kheIQ6P+HZuOzPRIjkzzoVmaKtKhL8XEajfVjrlp\n",
+       "txbbe2L3qhhWFGmqGWFXGQ4IZxzrmYWocIt8ReKahlIZZqZaqKw70Q1HmGianZYglVVB4ZJaMFJK\n",
+       "DJT2lB6LgSyuaVcdkkPCsqlEUCicyFU248xuHJnGynylZ368p56e6GSZtWJFp04kjUqoMo1ipJ3v\n",
+       "qOtMPR6rk1vqxoGlmxxnbPwAH7rG07uLBfGbf5LPPxdC+JsxxiLGeC+E8J8M7f7HHcMrpbwccLcy\n",
+       "/FHqL/5u3Csh5M9z5c9xOVs4298uQ2j8XYqbXP5zPJlwfJaTmnP3aFzmZ384hO0G299VS4tamGVa\n",
+       "46hcGymeu2H4gaEzWe1iZKNBGhKdOjpuROPICWLk6QZ3k0UR0osLZeGuFQPXdeRKU4l1EwNVvCMN\n",
+       "qXZCC9O6MjzsOnm0wdKKsDzRzh+pswtKZ7Gv8sBib3dVokxXLZMZaejqGTmwJpiqPDLVEtxTSLUc\n",
+       "ysPcWhyZtY81ilSdbFl+3NSMJyYlg82OeTkzb+7aeXLuYrdy7hGamcHFmXeqJXHe1z2ALe3DR04u\n",
+       "rijbQzo9xjVukY3f88yePlvLfb5tiQ/UjPf59JyXvxrP6F18MxQj1UKS+i5OussefnBb6+2R6VPL\n",
+       "YtLXiMHnP3pT9c+DkwOWfuBYWhXKbmn6ZF8ojo07F6Rhqirm6mluXuWs9MTkgap+UVbnRmHPOAzU\n",
+       "NqX1XDOW8jAXwo5o7gmVQ9EZmVwpCg6sWparlFIHjvREd6zqGevKbFs3NzMVQtey1Chu25wfGeV7\n",
+       "8pCqnFGEtio+0rWrq3R2cmwQnpalDXUxMLsQLZfBsoa5G/bCBfO4JjFSGRlYUaSHzo1fMc2XPS4u\n",
+       "m3zpH3L5Fo9HnrzCJw+pxouW5eAiP/MXT1n236wk1k94n8mr7yJGdQg+a8Eb+fH3+3h+p7jC+HWe\n",
+       "9JWLkX32Drm/zODDbCwt0kjv99iaEf5SCOF/+EqGXS2ee5bkdfnFG554run8aNVw78Bgee6L6ZZG\n",
+       "uGj7qOdRb418bOqOmIwcrmyp045WPK8rEcKRfJYT2i4UY+90xkL+WCcuupzT5Bg7og1l/cgspIQJ\n",
+       "liVhW1JnOJaFZambGmrRwDLWlbqiqDCOtS+FPWfxwEXHeqcvvCCLS0p39MOJvmhkbm6kJ1MrZRZC\n",
+       "iaNQqTy2Xg7drxNV+0QopuI8mjVKjbQnmVdCNpOGVDUbMD7RGNWaw339t2bmNzi+wlPX+eStL5/R\n",
+       "jdtMrrD7JF6DGONnQgg/ODJ80uJt/SD+Lq2gFyOYp/4Cf3KHlVN56WGLH/8LvPWPab1IaPH0dGFy\n",
+       "eefSQgX4xMe49jJHF2rj7K43NyaGea2VzUzDyFI61kwoC04a9JPoZkKvpJUScspkoaB5jJAsxjhV\n",
+       "4ChZMZap0NIVjI01pKGhZVm7pB0PdUJUVE1Hh48XcsRWVMXLsioR7CqSNQvDoHdlS581NZVZ1zp9\n",
+       "S1SmNtxzXx9HUnMj+xId3djWV2o7tBxHurH2RroiqQ/tra1zsiOsNx0X99zqjn3oqLT1OFJm7jzd\n",
+       "t1RFa73a/ThRNTKtwbJpeEM5yElucqHPzpusvsPnvMdnJISwdJ5/+6MsXWVvSvMV/vyvLKS+vyUq\n",
+       "xtdLMfIVZzUhhL/6nh9/Osb401/D737Mox1urXLlMNg/s4qZ/eZVg5+8Yr4/pzVku+TZp+kcmD23\n",
+       "73jzdYP8AzZ2Mtm8NG9OievsP2KrT9onPVbbV4dnlSHFM8RXCPd1QktMJnYcW7XrikJX6pZMR2Uq\n",
+       "VVn2SKYtlRo71rClq5TKjU5r300jK6Z2TKzoVzNnQuFRY0M7RD0PtJ24r6shMY5Ry0TZLBWziTqd\n",
+       "m7UbzgwzjfmQdq6bToQ4MoipflULYdlhbHB/4kHY13xrYPzTd7V3mLzK8iabH+DN51lNmAfGx5x7\n",
+       "yOOrFqvybyqcmp2t40vv97G8B5/Fx3wDFCNTsvki3v7/hwURMfwD/vl/yHMX2NjlrRXuzLn6KsUH\n",
+       "mf/XIWx/np2fpX7tXXvrJssJyU2bT7ddPp66e75heOGsJE1Nmk2DdCjrFqrkgiJ5wiBMJNalVaGT\n",
+       "pJbrFuGyRjk3yYdaIfi8rmnJqlK/vm8S5qoYhLBtNxyJcUqyJI+H6nBWqqeZ1EqpJA7EkKo9VKj0\n",
+       "jeRKM6vesaEOmZkTd0yMkJoiUVlTGjoTWi7V3dNtAAAgAElEQVQIturCOORGCneUtizGCAGPJPJy\n",
+       "qh2PbXRrzbJj0Go6U4+MkwMNQUhbkvncYRI8Gh7Y/sKxp19mbbgYX/+Lm3RepJcvFnfZe3yZLkxY\n",
+       "u+60GDm9RlP8rnRCfi06z3M557XrjM+RzNh4h6dx49tZO8NLN7/8SlkKfObpzO7jc175lhWNtVr0\n",
+       "SKPzwJKOtXhsJ689GYPN+YIDctTmYYw6CVcSBgmbkdsJN+KCxPpOTj9JPQ59r+iZq0UtU7WmVM9M\n",
+       "Q72wUqgLMa/kZdNyGhy/tClv1qpkKFrWEeSaDpxTauKAuEbYc+yORxrOaGiYKCQem2p6ZF20pDaM\n",
+       "tXk89FyVWlMJoibmMwbFfY/PfdxkcE1zMDC2r9zNjJcKk2btcLfh+NxF6eul9kpt9Xru/vGSSRpN\n",
+       "jgNHy8x3OHiV/YzHu/yTepHu/Kv3QIePfJj+h7gHy8w2GB3yLSGET/1WitH3U02T4Z/ig/hnIYT/\n",
+       "NMb4mfduE2P8q7/T/ZxqzP8OP/GXePIStqfu9bc8vPmt5m90KIc0Znzsw+yWek8uW63aurNHdtc/\n",
+       "7aCxqbN7IMQV9bRP6wKxzfSQrCTbInlsEZH5ELlM05J3jOMjuZluCB7quy1R6dl2pBDsOqdp4p5D\n",
+       "B2qXNUWpVEtPYtVYFIykdqWO0E9rtcqhhpamsQtObNiWqkLTfQPHyVA5Wbd+a6C6dIY8aDQqdSyE\n",
+       "sKuUynT0TM2TcmFTXZeqak21t2/zHv9qRusMty/xxlPk63zboy+71T7s8fMvWLj8fDPi495/s7Nf\n",
+       "j8/iL7/fB/G1YEijdzqXHpG/Tjb6TWSCMcabITR+nDPLiy/d9iGXj7j7rTzf4qkxKz1e/iFe/adO\n",
+       "VUbH3P8CT5fiubkvdK9LqmWt8cDJxjUxOVKFnt0wkMYvmsW+EFsmsc840RPUzR3tTma97huZ2qiW\n",
+       "7KcddXZiCXXat1aXgso8DD0MI5K2EOey0FZo48RUH00hTPQcyezrqC0LWla87Kq5pkXJcs7AA7Vz\n",
+       "2pZlZsZ+RRWiSzqWnOinlQ6WJN5Uu2ARM5LgAoqQeJS2PRPmTqpEM1mzkrSUBo7iY+Nmbphnjkcz\n",
+       "9S+NXX2dOmv59IuZB/0l8w+sCPXMF7cP3b1y6Ft/iSunRmiTnNnvKHDtt45si70P88Kcs0OmTd75\n",
+       "GLu79O/RmzLLaZ0SVsslssZ5g/4Lzt8cqNcKs/W2i/O7jvNj7ZPa5eXgagzGKfOUfsIxBpHjMmjI\n",
+       "LJ0ULjX5hQZ7KceRg3TbOK6qTXQUKE1VRiYajmTGWnKx0Vs0LxNmKxetqrVUakGhr3RgZhOlRFSb\n",
+       "EeZ4SmXPTZU9pZauUtvMA9eVrkodxr6RUgwjbyWlj8+CPKGqOEzYmJT2RY3Ht8z6mfrR0/yzoXh5\n",
+       "rn76WOfVM07CkubRTDbZMTt/n9Yqg116Oxyv88ZdTvb5b2OM8690VVZ45qJfS+RPiRfxykKF9fVb\n",
+       "jJwSzL7792lfj0MIf41716P0akf1J/4EX2wu+nhuLfq8rUKzG5wrl7RuDJTXVmwu7wvxnmmdCXt3\n",
+       "JRuVeOaSmMzF6g5Zj9C16FlNLfyPJ2pTQ3MtbEpdj2veCuc0TA0deaRjyUwhE60qlOaCxFBhdqou\n",
+       "j6KgY+7AWE+2mF2bm+idyoAJ1mxIdFXmZrpxSRK3mBdWjh4abL2tytcNs1LIDnDkqVnHnTD2KLuk\n",
+       "UfbMQ66YDVWt14QnOpYfjD25c+qxssHtZ1ifcXTIzspCc796TKttMTz9ZsTXE3n1XXwG/1MIQoz+\n",
+       "QIWc/QM2r5IG3Fjkpvz9GOPj3/xfFXfo3llkS8HPfIRnIo05JwcLUvrSnIc/fCaEJwsGazy5znpi\n",
+       "uNkUG6vS6tCsvUTa0p23FfmuJCwJ6b5ePXJ2NPKoPzDqJJIkkcWgXc+l6UQdUgNtBaahaWxXVc/M\n",
+       "0obczECh9gRWpKGHL6jjQB2WpQ5Rqowce2xVYtUiSHPgrKmlUxn/xNCa4Oh067m2WmWuLVhTCBK0\n",
+       "NX5VQRPkotWacZI6tCZLKkU2d1tlLSusGRnpa9RrluqZpdmhWZiZ/B9j/gM+852XhR+8Ih7NWf4A\n",
+       "nQOxe+Aov6TZec3Pf2LH+k8skry/lDB6NYRw3qIl8fD3bnRb9Dnb4YlTE69OuSCZ/ujzlNmimHj9\n",
+       "0iI7pjlnt9Pw0LbmUWGyHI1DpdlNJMmmGI8Mm8HZOiqSSpHSS8hrOgn7MdEVmBNrqhlZO7cUo8tS\n",
+       "b8Q105BjrOXAzIaGucJDwY6WtrUwNzMkTBTJhrkgrxJVmpwOXgZ2JaeGpk2ZwSlXdNninXJelBlo\n",
+       "OZHqeltPoafjtiumoS2Pc2UYG7jrF6uJ5/cX5pS7bVrjRDFLNR83jIbPqn+hy+RpY6+59wHON099\n",
+       "8kXjdMX+K0eU/5InA0WHN24yusf//hsVIlAwGC0Wpr+mKD1VOPyWHLq/XsY0v+c4ZXu/ilf7IcSf\n",
+       "4JNPMcyoXuXiCodN7bSlcwzL8rcLwyrlXGGSNlx8cOS43XLUjgKKvKPSEsxEZyw6Ix1sq90yD4U1\n",
+       "mS2FPetSTbmGSuWBfZWx3LGpNZlcYmQiyh1roFhQngw8Vom2FO6ZG1qQ4fooRWMtqUpDLZWokiXZ\n",
+       "dEeR7buz2RIPJvLWTb2s0E4zvcMg78xNG+tmcUWRNswtq8Iq8ZYsXDftveEff3LF6NKWav2RamPf\n",
+       "2+3K8XmuPV6stu6dZzC04Fp9M+Lrhi/yLmJ0PwSVxVL47lfb/usJ7/DfvLNweITbX82pcxFyaYc3\n",
+       "jji3uTAum5xdBGI+TBfExWmj5a1P9HXPf4Tdh5wbyz4y1nkwVs7Oqru1JGPc7Frdq427lMlA3d1w\n",
+       "oVpW1m9p90oXpycO9cyalVbNtJo5SGdmoavMNs3rM+owNAoPNZKHGnrqOFd5WhKWhTCSWBK8IITP\n",
+       "qU8NC4OBloFULdWxpzRQ62iZaEgURhrGGlpa2De1KhUXSjgnClFPX/tUkVeqjdUGp+4lj22ZWRWS\n",
+       "h5q2POGxOrBZl4Ijd8KmaexaHhxLu9MFKbHY5tnvFb94hwuXmOeS5Y58/VCaz52sbyiqHT+2z+wO\n",
+       "d37mHH9xezET8ZBRCOHvxRjf+d2/U1aXaN7jwTprp/fIvSt0+4tuWChZLbjfpH+T5PXE3veV4upE\n",
+       "a2kh2x2pzNJgXheGKYnaYbboXDTighS7izMSG2o7VWGnzVFMrGp5flopmx3D2LSXZO4lHc1QWPGa\n",
+       "A8cSR5pqB0ot61qihoZjS4t+SLoQZ1fa2h4KWtLTb/NohisWfuv3LSRDS7gvmqvlck1TywaW9YUF\n",
+       "AbrIhLBl0Hnsdp6IdTAa1Yat2s6dVHXzRV5rUEKbWXDv88TJvvZSqg5te8PvcvTpCxz/BLc+xeer\n",
+       "Rbf15rtqqd8Iu/ziK3xgm+P26T7usXxjUZz8lhy6v2mKkffihB//LG/c4EMJ2R7/8jrfvyQ+t7gR\n",
+       "A0I9UR8d2r4b3Pu24MxB0I9H6kFf7DWFrOlIS4y3VWFVNLHwyF2Yj809NnO04G85sSPT19TQUSkd\n",
+       "irbtWMNMT2HfY2Pb5hJjqwYOZcbOOevYocRM7a4ejhUOtdARDFXWpUZx4RJ5mO2TjayeX5YfrGjs\n",
+       "jRysDqw2otBsuJctu50uKWKpDg0xHlJFWmeEcirUm+5+8lkbg1XN48wgm6haM+vdubq/4OGdG/Kw\n",
+       "TfhgCOGV39wT4hsL7zE7+8xX2/Z9wMv4kD9gxchp8fHKV9suhLC8wZ++zjMZ9j0c7frxks9fOm3Z\n",
+       "l1z8BfrDxNtPnVUsBfPJUHLpnrU/su5cKLTah0I88aiZKKqmSTJxsLaqngyEaa2bTvTikYmRy+OG\n",
+       "1cktn+mvu1t3PcjnqrQgPkk4Y6QWkqEYN4zjXC8M5OY6EuOweN5bCmOPRT3BEm4LgpauflyTh6kj\n",
+       "57Ud2VPIzE77HtFEpTBWaAh2BF90om+x6jxwYNk5iVQtShxK7cutGNtJeoY2FMZOtOVairgiOjZR\n",
+       "WzG1Wd93s26r5sFg3uHOCb1lNs9x9wadivUTS2cSeZYrGsHq2x2DTu7GYaH4H6/y734Po+1TvsAO\n",
+       "3X/GD4cQ/vpvJ9PmN7jeKdkH2fz4IqZltk3n9cWi/dZlygbzcuGOvXaTg2NuPUOvxb0+j/enjn7m\n",
+       "yPr3NOVFIa7XymaQz3Y0WpVz8+Aoi9YRq8WkoSgYp/SS0iwg42jM3eXgWhVMqpZYVNaSEwfpum5I\n",
+       "HXoLI88rbUkFtdtyN5Q6gsyagVpTD4e/6iiSachNbTpWuOWuy7hlMSiaWlic9i1MXtaM7UsMZPoS\n",
+       "jOSq2FYntUF9RrM68nq4IB9MFC4a3b1J9XmmFRfPcXSPJ29wpa1+venuzj5/L+WZZcIX+einyO8t\n",
+       "CpG/Z/FKa4cQ5u/liPx6xBjf7obwY8d87wXCjHCH4wf8r79ZR+W9+KYsRk5P6tveQ7zshtCMRs9u\n",
+       "OdhKNJNDk+27sqR2VKE+drXouBcOXU8SD5OLYrlnlJdmhhIbgkLqLU1x4dehMBC8YduJdROrKocW\n",
+       "dXmlq3LRqqZc6eSU0Jq4Y8nbcrlUJlhy4lDi2HXnMPO2Y8e6Ej0te24bWvFA17yoVfGuVuPIclyS\n",
+       "t9fMzncN6rZ8PlbF6F7+lGq3QWdL3swUjbZqfMR0k8N75udrx+f66k5f+jAh5qZJQ2PWUpUHJmml\n",
+       "a2EffeGE0fezn4UQ/revVj1/A+EF3I3R7+iL9vcI7xYj/+j9PpDfLZzyy84jW+fPvsjW09zbYnSX\n",
+       "5Z921H3H0f9C9i849928eAAtxxeCQZ463HxoeytxpW5rOlFspLaPa2s7d728fkFW7Ws3Bor6jnmY\n",
+       "6zg0T/ZJaum0kHfnrjfGBnHdSbVknjyhoa+ot8RkIrorhnuCrhCX7Ie2k3pMGi3bM9RRydX21O4J\n",
+       "mqKmYGhqoIpz03BoILfqrFouc3iaOJVgV1SJvhUP8Kq2XWfkgsJbCys1E4ljG0aO7KIlNT3llz3S\n",
+       "djG2FGZSA4chqmMQ66AcNN0ZpgY7u4v7p5ijXLz9jx/Lr16RVTnzplA1xeW78ryw2m3YeepZ8u33\n",
+       "dEjPMHqO1YcLU8DfoSPwyvfxwkd4fpdGxad7fPHD/LH/l8vv8GCbo+9gf8hL+wuvmYcPePs8X4wc\n",
+       "/jXO/5X7+m/Ujj667P9j705jJUvv+75/nnNO7Xffuvv2Oj0znIU7xUWiqM2WYlMLDMeLEseGYTnI\n",
+       "Ar9IgCBGXgSJE8BB3iQBEsQvYidxFMOIIy+KF8qmaYuWRHEbchYOhzM9M73f27fvfmuvOsuTF1UM\n",
+       "SZmUhhxSQ0X6AQ3cRlfdevo8p875n//y/a0U0er4yP7ygYWUmuh4wIMO5wuGGad1NsuZ0++NZNZ7\n",
+       "kSWUZaZetuXpRFIfW0ju64TUkQaGNiQuSSQ66gYeEfXV7EtkSlEhuK+Y481yJ3Ycazhnoq2vqeHI\n",
+       "SA3bZlmRXfMoDyfY0tdVqrTUtFAlpaGWqUonrovF+5wVlWo4YnwqWX/O2rVPaPXquu/fcjb9cT7x\n",
+       "KMcPWPw1LjyY1eMvbc0ogGdhlqX8b84zSGaf+2COcviWBN5BjJ8KIbzwOhfM+A93v51y3e/LYOSb\n",
+       "aRDjJ0MI/fuO/2rH4tVAtWnUuuhksacWj7y+NLZkydX8RGda2akl0vRQO6xIwkBuILMt1RFMRWOF\n",
+       "ykPXLSpcVjgRdDGVaWjZlVnRsomapr7zgi2VgcxQ6cCOqaGLVh04c6Qu9zQ6ckOlJUcOHFI2TJJK\n",
+       "lg48XSxop0tqoe8kRA/CmmKSCdW+vLFsnE7F1q4yu6hMGqSLtPZmxdKi4Wy7J2tODTu5vFrU627J\n",
+       "xwMHbdI1LpywfJfmkO4Jl6/z6z9g5v77+0Hfj/0iX9Vz+FNv9SK+WwohPMKln2e7w8G7BqbvekX7\n",
+       "1m3jJ9cdPfhxw2ffTWefR3uKT8xawQ4/zIVYuL+ceNjatnJwS+tCzfrJme7GREuqkUWtowPLawOj\n",
+       "pKlejAzrE0tJagUrMbeWB2drqXBSGVapqrZktXlFFZclBcN6LpMobEgM1YyMQk+Ii6p0LLWvb91U\n",
+       "Oe8B2xE9KtpUuWUsMwjV/EGmY6SQOJPanLeuvqhlxaqWRKbrgVOLKjWpmmWpqQZmGd2+jq5ly7qi\n",
+       "VF/w0JJD14yrL+uGuk4shVBIq7ajKjHortnbaxid7Sh+2czfp1wL4bnnedcqxZeEdEWRN8hyjdHQ\n",
+       "VE8rp9ass7X0W4BY0CZvzCzI3+TeX/tZ1HnpMqt3+IkX+Kcd/v57eeQ+p+sMIj/2mdnlC8736PVI\n",
+       "J5hS1UYebd5UO2vq1aIiTGwP2W+TnczcmgcTHi7zaJyxatZwP/JqNZvYyTLKamJQ71qTioLU2Lqb\n",
+       "7lqWKG3JVDJjhVJuINFVWdKwpnBi4shInM9NjeXzc6PmyFNSU6WaxAOZA3WJQsvYxCzV+TaJBUt+\n",
+       "SNN9XYyEeUm/L00OKBfUpk0r1bHBym15deKRTuKJlyrv+kLNZ7dTd9KRW02iC/TOcXWFx3+Ej72m\n",
+       "8b6p9qV1ro4Nm9umv/xj4st3WPkkvxBC+B+/2cj8VzXPcH5Hpfs/CEa+US9tGTyzatDdklxtyIqG\n",
+       "iwd9k5Ud9y/dcGE6dKWsWym67jUuS8q+pexEFXNH4R1q2lJDY30NKxaN9d0xseHIVN95leuCPVGl\n",
+       "MnZopKbU1HBi5juRGGnJLNpy177cvqGBDRNXZqeQSqmlsiSzHEtFcmo/JOrVqk66Lg9NIUx0Ylc9\n",
+       "LBgt1yTTiXFyS2OtTRmMRq9Ia21la4DIYYf+iXjQkl9P7Dav8GKN0YH9p0cWCjYPeHw+svfMBhsv\n",
+       "sHU2Mxr8fRWMfL96wDyHv/pWL+K7oRDCMtf/PH/0jPvLrGw2bA1GTjaW+Mqxg3P/yovvfrfhjQ7b\n",
+       "3RmO+mMhhE/zymbJZ5pq/9GChdMgP1dXn/TUxgNxsS5JJnobueX81HvudhTXCrfSxCPlVJ5kqjJV\n",
+       "1CtrZenLC5lury4bVfqtUgyZNPbV4tg0LAuGApK4qxnOrBrKLDs2MNKa/W+qe8pkTU1i6jVsmLoi\n",
+       "OFObhxFdTWMnUgfqbqtLXXVeTaLQs+GGA5ftCIbI5YLckS2JhonMWE9b07LEkkzw0KmUJDqMtzRC\n",
+       "33Je13jxyGBcd7esdF/sqt2qbJzS/2AIYY/m8BOsnDO+sqrqPSekOSHTGLKx37A4qXsxG/DKLj/w\n",
+       "5HzPDmm/xNMPearD5Y0QsiP+2bdbrpllw5b+Am97nKd3Z8HW3ffymQv8wJf4hyN+9RMoufgnv/bO\n",
+       "YZPbH+Dmdba+xMpfZmeNwVWebo51puwszkxodwsuVSRD7o/Zbs14IiMzT5payXrOE13WIzs5z14e\n",
+       "mqqpqUxCMFSzOv/ssdyOwoIpGl5Ss62jrm1JadmppoZbrmNRqpTqzfPiY9FYsG7ROdwWjC3rO5Pr\n",
+       "a2FBS8RAYcminqGRMu4qjDXiWBqeMh5n0upEzY7m1lhzVDla5zc/2CBpe/TkJSdPPe1kN0FkZZXs\n",
+       "FY0PTV1aChrlkryIhsOviB88b/f4CfbfzqW9Wbbr17+T7/PvpD8IRr5R61vEBs0PqG48I7wnV2Rt\n",
+       "zeMVw3TRa4tHFh5OdWOq9uCW9JHKYlpoGJqGoURppLIss6qptI7b6qIHFkUrZtvfNDU2tKbpdQ+V\n",
+       "1i0ZSoz0VXKpTM+iiYGGgbZUKlNXGCllUh2VXalhKOwKzse6UdKSxEoMxSw/ExJjZyb60ubQ07Gm\n",
+       "JRGqh+53rtotOtLpnmkyJTvj9R/j4y/wkwe8c41am6LtYPgai6XOKbdX2EvJb/O+PY7as3rr7xv9\n",
+       "EP7bt3oR30Kv4lwIlmP0LZ9gfm+o8TRPJWwMeO5tvPe4NFoMFpMje481NOMdS1cLw7T/dQZs8xvf\n",
+       "SQjh9ZtqPzPSe6KmmpzaWRvZ6o5k45HQnj1Rdx6y2ZvY265RW7I46uo36qZFbhKiflnopdFBOZFk\n",
+       "Z6ZFS5KuGYbSzNxsKHEXd7RD4jELmiEXNazr27Fqb47xrjTlUrOutHWzC3BHYWRgVeKWiYGmPYnc\n",
+       "ZQtqzuYgs9SSaNVNqYEzq26pbBgI7jrWNsK6zLplq0ZKXTsW5cY4Jw8tt6vXtMNEKhj94kT56Ykn\n",
+       "/hRv77B8nhc+wtl1nnhmZPHgti9vdb120NQeLnjbcUNnPDVodd3cSBz/cuTGy+wucWltNgzw49ss\n",
+       "PsGdx3n+Fo//BpfmcMTRG9/75G287TxXz74GN1ud8Lnz3DnFp2OML0MI4f/kV/4c1y+Tv5dykbUv\n",
+       "8NMvUib8gw/PWkEXIxspxwmhYn3MV9q0Trm/wFptNkF4YYLarEyzNgeWBFwe8PqIu/Wgn7Ia29rh\n",
+       "iovG2g7tKn3ILKtSSixqWDGWKwxEUSFxTtA2QBDm5qalyhiFjqlCXTCQe10imvWOLKLUsKFlrHAk\n",
+       "1bHitkHIncSWKj/naBo1khuqzm0hnlrusF62tK/kqotdewvr1p4tNepT0iZlPkv9hKnFi6n6NFcs\n",
+       "N8RQClXN1uhl/cee0N1fZ7gwG9P9nugPgpFv1LBLtkLjVy0+cWR9obTSDAZlR9ZvKE9pHrE2Kl3+\n",
+       "lxMf+9OF4t2cy2tOG5FaqdRU4lihrmeqI1rV0jEV5AYK5wUDJ051FDK5ysChCuMZZVVwMh/WW5er\n",
+       "CTK1uTFeT1Q3lQraxhoGTuyFqYape4ETuYGOussmesbJoUuhbl1PiEE/27BdvW5YX3ZYtNQnpXR6\n",
+       "onPuV/nRy05fvqlIb7Bdl7V60sHA6Wf4YpPyHuePuXw0w3K/ssnxP3yrN+93Q3PY2YbvL9jZ/6cY\n",
+       "lSH4khm/59fe6vW8OTWWZgTMKqDJ+snI/QsLji+tyEcbavtHGs0x74izO8A3KMZYhpD8Tzct/yXO\n",
+       "jTl8nH6DZpsbFxn0eO8pp6uF0ShTNgpTddMi6pYTSUzlMTEWXAmsOtOdDt1rjR2nJElDaiQayGJh\n",
+       "Lcy+5WOJhkNraHnNWHQW2nI9s5nrJYnmfAC3r9JQqGmrjIwFOSp1A3V3JZatSGVy0UjLxLboVUv2\n",
+       "XFTMzCKkWgrLJk7cMHbibUYuzo/HPmr0PmJ41OczBzzyJZuP8lMP2Bhy1qBxmQ83OHuS9c/yM58a\n",
+       "+scXtnx5v2t4fl9jvWl8Epy9PuafxBjzEMIv/jr/WYefvsZGm/2S9inrT7N3PLOTeNqMgPAGtfI4\n",
+       "j+5wtsVqg/Y8INks+fVtDp/5un2+H0L477j/fq5u8dFXWJqPkyaRy4EbE6a7PNziqMYPjthd4As9\n",
+       "Fi7SbtJpUB8yHswG0potjhbYqc+4IlVrdi4u9yvTRZbTVKXvTEfdok09XaUCU5XCbL5yoDCdz0wN\n",
+       "1SW25/t/otA1y9OM5fM5qdxEqScTDC2qLEmsiqbG9qW2NDRM3ZcqJLE9Y9U/fKA5uq8+CNJqoLzU\n",
+       "slWs2tjN1Ce5vH4oz3bsvnPb5IX6DKedHc1inZVLkuWWfEhs9eTLDyVHizaOR+4uwAHt3tfaV77r\n",
+       "+oNg5OsUYzxaC+ErJxb/2AVPFps6B2fiuYmN5p6d5Z5BxvPZjIz78tPFrLcn5059KnHqQXyUsKiO\n",
+       "yrFDXblrrmvqGTvRUGljqLIuSvSkyJ041bBrzaqWpsJE047CmcpYW2pb4VjUVNdQk6GQmlhzSdM9\n",
+       "Dx14yrKmqQ2pMyP3jZSWbFirBtI5z6RM+5bz3KPjtlZ/X7lYaS8MpbVSrTqzstW3kw195FcYbJFd\n",
+       "oL3Fa5u8+iTFMUc5+2fc+adMvviWbt7vnn4Qn/s+g539Vn21ifX3eDDSvcvOj/JUJDvluBlVvQVF\n",
+       "lTI9k7em+uP38dkJP78YQujzqRjj100SxZe58w8Y/xE2jri/xt5Dzv4qtWXSf4vHj1idJqZ514Ol\n",
+       "RGvcsr1fefgYeRZsT4OttHS3UYj1wvr0jknjvFpcVgsd0ZKzsO9Ycw4qnMx7waJc5cCBtVBz30Df\n",
+       "u9WNpRLRQGEi0VDpy02lcmuWPaZnJDhvqu9UTV00NVVqa1sSZTYkzpvdzBYFd3WNkMq9zdiVeWdJ\n",
+       "UFg1KV+aXfN7B8LVSmNSE5dzzf9ndqxefpzVR9kYMzw3w6If745d//Krjn9i2bW9uta9M5NsZG/C\n",
+       "zSU8SLj+TpI2n77K5Yv0JtRe5/2rfHyL8cKsAfnbCEamgxnp9cJnuP0hOu0Z2v72Irv/OMb4Db4o\n",
+       "McZpCGGXduBLP0zSoBpTnjC9wvICD1cYRdIBzzfpdmcmeI/26QfGgcd73FnieEAR2QmsL7LRZ2+D\n",
+       "fkWzXXhEzWqoaZrqG3lV3SUrMiP3DazKpUZOtdXUNZVyIylKq7NsupYgEe0wh571pBpuqFkUvEuq\n",
+       "JjU0cd9YR+HYUE3qTOGuppo8vt3C9IZ0bdfCjcK/8zf4zafqbv58TXvv2GCb6bQpGaxaOnng5sap\n",
+       "0/7f5x2nbJ7RW+H0vsHKtnqYCpO7qjLaqjFprRk9eJXNFxmN38DE23eqPwhGfotO+Hzqwg71q7mi\n",
+       "VjEcKbOBtcZQ0uOdH+PVxzh6hI/8Crv3OPpA5WAzUjyQ1wYGyvmk/6p0PsBVKTTtmrqqbmIomnkL\n",
+       "XFc5E1Qa7prl1hOFoWCkY9lFDU1TZ1jUc0tzXq5ZNrLhvHROW7xiNMefzTyAl0xsim5KqqY0GZrG\n",
+       "hnaMYpJaGTKsdU1qQxdD3bUkStNC0ui6s5ErhhQLbF3gXUeMmixfZDiepUrbr82Gzuo1/H4p03zY\n",
+       "939vzHP40Fu9iO+CXuWVuyxeZusGL3w4k11osEv7Tt/dpYtOiw5PXRLWSrU/35H/yVoIXyx42ew4\n",
+       "NKi/h8n7OJrOvFQ2j7nxGPt/k1tfZvinWLqQq4eG/kGiU43c3IjyUGpVwTumFYuV9yTciZSNFYNk\n",
+       "WxmitoCWFRtOve4Em/NZmYlKV1M9Rgthz6mWqUNNhdKLCssSbUFP5VUT+5qanlJa1LZnat9EUyGZ\n",
+       "M0i6WhYsylWG3q6hlJuVfiuPaXveUEZCDJQAACAASURBVBBsS+TKeS41ix2VJXnjczqPTF1dzCyc\n",
+       "lIbbNb+03VY9GGlcmrrWSQ2zUlWyfcb+RQ5bpctfPvYDzxADy2czP5hf/rkQwqvn+cCTHO8Tu2QX\n",
+       "Zwc9XyI7Zv10dvff//a2vv8iL/04j/R528c5WqPX4n7G+O9+8/c0rs5Kex/cZfmEu4s8/4fZ6rBV\n",
+       "cn6XgwV+c4FXdige5z31WfmmVXEHzyzO2Hs3a0yTGexxtZiRD9Yr0jrtIlgqKgfpVE2iHQoLKoey\n",
+       "eXNx0KoqSXLsxMSylpahidzY4ZxAElRWzJxuemZAjkxlVy7T8bRq3ldS6ai7aOaAWYleNnEqmqKu\n",
+       "qO5rhCjNma4t+J//zJpR59hGZ2glS3TGmVoYmaxMHBSJwcmJ+BsbvPsHebk3s5CIXzHdeUk9bdvs\n",
+       "tq28vOf4qWOvnB8b3rzFzgEf+534P29GfxCM/OuKpcu/eeD6b5zafzwzOJ9rDwsX7/GlKY/dI38H\n",
+       "C+mqT/3QNWmeiXdv66+umbYWpEXXNFuUuSY4lbvrgUWrFiSGxl4wdWhGCGmrdGQ6UstyAyceKKyr\n",
+       "uSYx1XTiwMiaqGPseJ6o3XHZspaacm7hUSm1JXKLaImCMyPJ7CKYzN57LkRTqdTIw1bDTjmQlTVP\n",
+       "HhZqjYmiGFt7EK08ZHCd/af4wMEMdHa6xmZBHNGPPH6LzSHFVfavm/UrvCGFkD7J1h+ido58l/1/\n",
+       "+T3Zze++fhj/1Vu9iN9Bz+Hff6sX8WYVYyxCCL/Ir32YlfczfSEYJ1ONrKEsrju9uy0+0rWwdqbV\n",
+       "7jm3mMnXVzz46Ibhaa4qjywcnHq8zweP6Ke8ts1Tr7Fyjn/1I/Re5PjXeDBKhT/blF6tVEmN9Irz\n",
+       "ZSo49aXWoYtJ5UmUaWovLEsQpCrBBVGi4UTHsZF1LaWRQ9Guyko4NVTIYksalixoS5zKPZSb4tjE\n",
+       "SMdQRzBRt6rtnMKZsddVMixpaltXSB3NnUoCgqZoLKoL6kKcEGZI+iAXzfzqojPRwKX6utWVUr6x\n",
+       "qd1MdJbq7p5/xLix58XY1Rw2lIOR1x4fO7/L6eNs/wbrX9eEujVg5TK7S4E0o3yUB5/j6fVZCnWY\n",
+       "EA9Y/jInk98G7f8t9n4vhMbf4+//cR5JqCK3C+79XzHGo9/6+lnD6+WP8MQXOH7b7P97ssz1GoOU\n",
+       "uyNubhPqLKeM38aFOo8ms2CjStgIvJxSFjOy73qN1W36AyYZm9UsCOtUUZpG7cnUca3SDJVakroT\n",
+       "B0Zh5LKgTGir1PWcmtrXMvCoSpR50WgeMM5KdetmAckmUqWhoa7UJZVEUM2nNPdFqQW5IMhlugYu\n",
+       "J9FymaoPgrtrFx2Gi6qDL8tkhrWpZpZrPKhbGI/sXKrpLqxr/6fHGuUXVLevOst+kINUvvrQnd2e\n",
+       "s/Vc1u4Y3NrV/++HfObb6/f5zvSmgpEQwl+IMf7vb+L9/4MZPOqLMcb/+M2s5c0ohHCe2o+y+hhb\n",
+       "BTcv84HP5y7u5l57P1sbHGbE2PbMT+eSJ1L5yjnlwwuWXk2EB335ExMPmk1Feg2pxFQmEy0pPFAo\n",
+       "tKVacj0Nhz6gtIDXRLsKG9iSGenatqAuzivJwUNndi2qeVR0Og9AEkMjuYGgdGwYmzpheY6Ijkg1\n",
+       "HZhgEjKvx4mzEGwYGhnppZcUr3Q1Lkwl2UR9WGk/ZHWP4eLsiztaml1TmIGG2jCapUFvn6N1b2ae\n",
+       "ZcsbDEZCaLyHd/08Hzzk3C4PlvjcL/DCd31vv5sKQQPvw2ff6rX8DvoSngxBPcZ/fezy95LmF8F/\n",
+       "Mf8jDeHpC3p/7ie5e4uP9DU3h1ZaB5KFVZ16lKlrlBvuFecV9R3ZpVsO+w9Uh1ztki3y6lP8wBf5\n",
+       "7J/h+j3OrS57+Q9t29+Y2atrLsvSaJQlimrFNDbcTe6oC9ohuGpsR2mASs1Y4UDHmWVB7obcvppK\n",
+       "w8TImdKK0nLoGtpReVRTWybYMFLZcWTFBSNtY4eilPkUzYKOoYeC3MLcj2rJmUyiZqqvUqGm7kyr\n",
+       "6ukmicxtuU2LsWcS6oo4VoY7amHN6rSjap8ItW2d3ljeOFLvrGuWlxxVr/tsreOdg5rx4titx2Y0\n",
+       "04vv5MZ5Nm6wdjoroYwjJsc8/xo/9xHuPMWnXuC9GesPWTjiwT5/+zt5oo5x8mwI4VVuXjEbp7kT\n",
+       "Y/ym5olYYKnOk19m92zWP7u3SCtjv8V2Ovt5nFCLHCWz/pNBwkacMUyqOovJLG7KFnmwzShLDdul\n",
+       "pcDWcMYAG7YIZd00YZhUepEHcWIaxkY4FvWQCtZkSo8ZW7IgUero2ZR4RfS44L4ZsvTdvjYjvYGu\n",
+       "0o5SZx5Qjk3sK3VNnFezKhq4JlpMjqkFxdaqrdDTbRw5rS9ZHeTuN6OzZGThAsNh6iiJOnHTY2VL\n",
+       "u79jfOWGw7zpVvuq6uFEWfyMo48d0/rHM7z7p+emh99zvdnMyH+N7ygYCSG8D50Y44+GEP5aCOH9\n",
+       "McZnfsc3fhcVQkiX+GOX1P7igrWlocbo0PLBUFHnV36aD36SacmdFe5mWzrTDdnJPYN26mwhKloP\n",
+       "jVcTVZ7rL2WqZJ2wjZHoSHSgMYtPFYLrenoSGVo+476nFZZQm/tLjHBFIlPoS+fFlpYNqZ6pRGJs\n",
+       "oC9xx5nr8xNzKFdIPMSyqKlhYKqQe6ipI1eXh0ccV/tO4sRiVbg6va3zBe69g/EaizmthzNvpAdv\n",
+       "52SB1iafXuDiPRo5B826wVrT660103Tfl352qFlwrRXC5iMc/qPfbg59RlS89FF+7AGr85P80tns\n",
+       "d39/ByNmgfMrMX5/I/BjNAzBHTzp98BB/XZU8ZWX+een/MQClybq233tUW49XdJIg3FtRWdypN6+\n",
+       "JJ0sSrINRePA564Vlqd1h62GvcdGjuuFzcf42U81/eY7N0yaK5rNZUFmWjTlCeNaJShUMTXI9jWq\n",
+       "qeWQ2nJmEFaV2nqiI2sGxoLHtDxu7At2XdQyVjd02VRdX2Wi7bYdQwOLasYmjkx1XTK1oBCRqXso\n",
+       "11KqVKYyQ21HViW2JFbV3Ra9LrWgNi/VVCb6IZFI1Z0SC5NwJhMUYU8ou9phWVrvStNMYqBqFoq0\n",
+       "YZSkJsVUEbfEg45bu0cmG5nGucL289RqbK2z96NUv8HtNsfPxRhHIYRnn+ddBVev0tvm5edZvc0/\n",
+       "m8zGer9jn6QYY9/XuQH/NhrO+jnGGRfvz/7czTl+mnemXMwZZdQmPNtkKcwCqjO8XufcZBYKPAjs\n",
+       "By5uEJsdd5NMTenUxIN67lrBXQ3raWYDm6F0HHKFwmWJC4InEZW+MrfruGe2J4lEZSw3VBniGaXU\n",
+       "rF02Zb77sybWBZl9hUexrHKsVMOa6O0qL2hpW9AUYkOZ7Au1TYkZDv9kbapbLVmbFCZhYJilmtOJ\n",
+       "hVFH47ShVatLyi2N6b5Lg1ecLtcdFUN6L3Hhy9R2+aXfrUCENxCMhBB+OxvorTfx2R/Cx+c/f8Js\n",
+       "XPJ3NRhp85En+ekVW6Fl83YU7emtv2BxPHbW5v/+w7RbpGt1K5MNG7tTvXO5g+VcVe+5WJ3pb6UO\n",
+       "PW6UVDODryQl5DMqo5al+eWk40wu09OyNp8fX1G5693OdEQ3VXIZ6jq2JFpmrRgn81O4NDZRd2TJ\n",
+       "0LJKquZE1LQ8XTVOh2L6JVOrClHUR2FdqnJBETOd2Bfj1NaYw4TTS2Q3ePW9s+mC9nnG1zlOafVp\n",
+       "tDlYouiQ3qyrwqru4nnx1onFzQ2Xs548nHj6AQfX+fU/H0L4a3MjxG+mJVbarP6WVOvmt3ri+X7S\n",
+       "R/Abb/Ui3qCew3v9/ywYmd/Y/mUI4Qs0LixZe/Sc99yfOnisEqqmLE5Ma0EYUsmFeKBaoNZYomhZ\n",
+       "SDJJsWrnAweuj0a6y6n+uZrxxkXNdCyEoXF2RYxjk9CXpEFjuqyqWu4Z2yjbknRow22HllXWDPSl\n",
+       "mprWNLxqoq3hSKontWxkXVNhaKjAsj17duWiptJFbBmLEplKZeiOiMqymQfVqo6mjpahA/tG8w6x\n",
+       "0p6eukpX10TpgmtVYimuGYfgJDzUtaduIqmWFGFEEmRxorCozBbkZWqYvJ3pnrz5smL5mmy4J19J\n",
+       "XHnIez/Os+/m5ByNBs/9CAe/xOmvzLelPNB4+ZOWnqiJjwWD16ZG/1uM8be7d3y3z4tpCEu/zmd+\n",
+       "kg/fm2Hiy6fpVDOC6lmdNCXvzCZiQpyBRp8q2U95PZtlenbMhrN6i8ti0nBF3QUJJo7CsZu1XD/m\n",
+       "hqG0o64RWxZD8ITS/rxX6EyiPW+i2xNt66npOrRqz0WZY7lc1DXDvJ+ZBSIRGxJn8xJeT7A3B8dv\n",
+       "y5wT7YoyidYce1kXQkcaEzHO/MnSLJXEmp1YMy5ydbnVvK35YOru+qon7o1MrjfEUBPKIMlYu/+S\n",
+       "o7MDvvgSN854Lsb44Hdr/3hjmZEt/FG+Kfr6zTTyrfiagc4Z3v4mfte3rRBCcpkfOa8WC50xVKp6\n",
+       "Xf3KguHS2PqUKyPWdiieZ+9Hd7zcKpXnKxfrNY3Y1Im5Kpk6TVJnNnBoVq2sSV1Td2riIcY21Eyw\n",
+       "rWZRoiFKTARf8qotfXFusBQ1XZmdVCotmdyZA6U1F51Td0nfy7btzxvUmqbO0kovbFg0tGHqnJGu\n",
+       "JVFw37rJfA0dC6qkodeeaJxy6y6dEY2vcHyFpTVENs74uf3ZKfLsOW71OK3WDG4/Zjo40zwXPdHs\n",
+       "SB42tM4GDq5NveMZdq/y8DpufItDP2JYMUlpfF3T6+j3Qv/SD+Nvv9WLeIN61myi5v94qxfy7SoN\n",
+       "4e3n+PF01j1464BPfuOEDBhzYW+gdufA8WpNMpmYtmZ8j2l1zmQQlLW7kuXo+llN1l6zOmEUJi7H\n",
+       "qZezddnZQztPltL1XAxtQVSm98WkKWiq5nbwedqfcSNCKs/GDiwYKZzz0BQjF6xrOXXb0KFLlqyY\n",
+       "qgsWnDk28VDuHM7NKZsNLXcFNazqI9oUDKVKM/P4+6IrZuzNDxq6Z2qsUneGZQtagpE1NZnUoQW3\n",
+       "QlNNJohiElwsl2xWe0ZZQyfN7RjbLWrOZVPjUJnG6EHtMXnZErJleWwSjt1Mxi53px75OCsjfuIz\n",
+       "M/DcWYvuKkd/62sPHKt/gve8m3fezbVe5/VNPvuzIYS7GJqxKaLvqZsv9P4VX8y4+2Gal8gXWD5m\n",
+       "a4WLGQJ54KWErJqt6WbGRjkzcH8l4UJkOc2M05oVDdvznHUpqElcwTjUvUflSO5maMzNCmdD2yOl\n",
+       "ocotLMqck80RDA3bBiqVI9k8GFlTmarcNktjpiC1LDrEBcGixCWVNZUTdFRGpralXjLAlsks9Ik5\n",
+       "1dhh9oOCsTJUzhpn6rHrrOizsCbczGSjSrpzanSxVDYnascjg8Wc/3XE3/tWmawQQr3JB9b4YCA9\n",
+       "5bnBrIwz+G7s3hu5AfwTLMQYn/0mi3szFMozs1EMZm7Jp2/id30nyjIaC6rBsfJ8qWx2jd8btBeD\n",
+       "wzpLDVa6LHXpxoa1r9T0LxZOO01XIrX81Mu1juPQUsSJkBwLYUFqT2XTDAc/Ee3KFUqpQy2Pyp3I\n",
+       "tM1Qvm09S6KxmkTUEqzrGllUyQ115R5o27SgpYFU6pyJYwtyE0MtaVKThZ6euodKwbKo5cBjjpE4\n",
+       "dhiYpplSRxajk4Wg8a6JP/13aL/Kx36E8TWupHzohPp8fPXtR7Pa6v7O+/TvfJB7/0Dzx9qWmk2t\n",
+       "4VRZT+Wd2Ws3I2H5Wx30GOM4hNXP84Uf5Afvzhpjy8AzF7/Ve74fFIIUP4L/8K1eyxvUc/joW72I\n",
+       "b1edED78Pn7ufRyuc3CfS5/j3wsh/I0Y452ve2lFLEs/9A9PffkPsTw4VdRbuq0NO1aVxUNFdqY9\n",
+       "HUinDcq+3fZY2qgsjypLydTdtVSajV3a73r1/Jlh1jEMmRBfJayrjKROVOFAlTREjzidg9lnDeNb\n",
+       "0pgqQ8uxqcqRJSPrOprS+Rh9ZduZV9Sti4JFUysW1D1m6nV9hbYL+gqVIw1LxgrRttmsxSbOG6q7\n",
+       "71lL8+fkQmmqZlFNQ6KtLZHG3GHZshVyaVVzLHWWrZrKrBvbKGvupsFgVJm0T5XJqhAPhWpkWs8Y\n",
+       "N6ndVr6Xg+GSL17qyvszz5dLZyyP+HRpPj0XQtjmXe/iR27PvsvwjgeMrvLgL51XnNuejaWMd+mG\n",
+       "EP5OjPHu9+LcmQc6Hw8h/DqNP8LjHeof4mKXVspwjUZt9hz8iln712HJa+UM01GvWMxIsxnufWUe\n",
+       "LM4sCEdqMpsm7qJZZbaSUicOfCqU+pouaUjmV+SaGdQsk+LMSEOu0nHPgb6GfUuCY8nMddfQDITX\n",
+       "ldvFER4RbakELM7JJa+jPYen7djVVehaCiPT0NYrtg3SOmFAPGfSXzWpmuTPcLwkedB1f3vJIwcj\n",
+       "4n3Hm2PHgeqUcyvzB8l/zW05hJCu82fey+NPzVJJxav82DM8FUL4X74bvmS/YzASY/yF3+bf/u03\n",
+       "8dmfNuv4/yX8Yd+k9ySE8Fe+7q+fjDF+8k183jcoxji9EMIDyio6TsZcS6XtfaPYtVhPXc4z29WE\n",
+       "83TXB+ovZBbeXspj3UKsHMcVI2sW4lAMp0bWtOSm6nhF7kghVzmzreWehtSaoJQZOdUz9UDT1JKm\n",
+       "0pqBpimWndpyrKcydGBBw2ie+BuYpeIyh4I1dZmplTkkJ5G74ljHsSWZnsIaXtcy0pHqVGeotOKK\n",
+       "V8KixXDTjUtLHlzcdLK9bzWOLbSis2u5xgkLh7Nxvk5CPr7J1nV6K8aHO4aPsGCsaE8tz5tX9wPx\n",
+       "dwgsTz/O55vcfTdr82ay/c9/t/b2e6T3YzdGu2/1Qt6gnsN7QhDi7JHt+14hhOZVfuonuN+ZGW15\n",
+       "lKN0dqX8N/DXv/raGWxr9VlO3seP/yNeP58rN3KDD1R6lz+tWq3EkxNrO4W1l+p2PhotWrVxEgU9\n",
+       "D5oVnYnXN5gMD6yWz7sbHleEBVk4UzqRxpGWpmUNuZFJjA6s6oSGYZwahXWXnSA3EDVMLCstOlVY\n",
+       "lGjK9VFpyNExlDq1ApraWtYd2J8P6ieW5Ag6ZodghKaau9pKlS0P9OcF2KZNUaWva+xYX81ijCRj\n",
+       "B+W6Ml0j3TOOlywXK2rVPWexpzU90U4q16crM3xBcdNh+qh7R23qNaHz1MwFuJvbf8cDv3rlruPf\n",
+       "5P23+cI2x1/fB7I1R6p/3U7ubTH50JaFq0/q3GzoVy29gy1JEZX/eQjhv3yzJYAQQhPJN2tonfex\n",
+       "PM/4Z2ZZnX7GaJksCbpVlKMe6x6MogtyzZLJhGmNTqDKphqxMAhRMncaK1UyhSrSiLlcXYzR1Ozn\n",
+       "JXVdDTXLODHQ01TINeRWtQSlnpFjpWMrxt7hllc95siKUz0M54iGsaghuqdy1aybZWBGJ5vitsqt\n",
+       "+de68lA5c5APi4pkSRUGatqyaVCUF5UParRfYHRT9bczN/7svsNHJpp5ZTFy/XWe+jTHa3zu3w0h\n",
+       "/Zsxlq/M+vtcNsPYtB7jbR+ZGQqDD3BvzJWjWVXjTXOm3rLUeIzx2RDCOITwa3j2mzWvxhj/yvdy\n",
+       "DQf802f5hWtOdqYmj3c1a/ctdQrnpqkwydQnhaPFmnGS6L2/J9ZKZ9PSWbPtXrKiHZuyUBiFPatx\n",
+       "aqJShhWlKCi0HAkyJ644E9V07TJnE2QypaZMYuKpefhyFwfqHlezMrff2lVJ5UptQ7mohpGGGyZW\n",
+       "JVK515QuSdREj6o08QAftyC4IuroGiSlqupYnBYW0lzv4obP/LFV2XBR6/TYWSM1FeVpKW5UyhYP\n",
+       "cw4LVhdu22p80tE7Ng13KzezY9m5Y4v90uZdnt/m9V1fK799U80tpf9uCOFfmGfFYoynIYTv5Xa/\n",
+       "Wf0U/vlbvYg3qhjth5mByVcdyX8vaH2L9KuByFd1mZMG10MI6Tem+U//OZ89x/7lWXp9ENYcn3xU\n",
+       "+fce5fCE5t919tEDO08m0oWaxTSIaem0WKPXtdCIpoF6I/VIr2f56CteurChys6kajpx1VLoKsK+\n",
+       "lnXnIvfC2+2WXdLPS+KuKqTqbs5dWHtOla4p1Y2dmqUIjxUGOLZhLJjK1HVN0LNkIjqwaGpZVyJz\n",
+       "onKmEl1Tc8OGd6mpyVUqi/p6xqKRPctyXRHnBGvFlKovD2P3akOFzFKxaVkQkjUxOZUHtuPEomND\n",
+       "m5KsoVne0FtfcFy8W+3kWKwN6bQddVaFtdInt4985stj5d9i+Kmv254h3a/74g4bnHwok7cvaXYX\n",
+       "LZ92hcf7Vh/ryG6e02vtKf+LEMJ35J8UQlhh/aNcf5pECOdusf9PYowPf8tLb/Pw87TfwVGL5iQx\n",
+       "rqfaRWUpROdDqcgy6YRY0Ltbt/tES5FGRRiowsBE4q6WRTMzw7GJQWRzUhrUR8Y4SoKrGjbVpHNo\n",
+       "w0OLagb2NbTVLMpNZIKga11wqIGahiVNY0NT0dSqaFmUiO6qSfEJk7kb2deCkZl/WeVUdKqNbZn7\n",
+       "4aFpYyxxjeqc2lmqWe7qbj8Uy+lsHjtn+pfZu8K5v8hP3uXS4eyQLUxoFBz/dAjh5AJ/9jJrHapb\n",
+       "XKuIFbe/Puy8RH+Nx/1eDkbgrRznhSLGmyGEv/6QP94xfP+Zaq3v8Zi5Mi3daybK2oa80SFPFYsT\n",
+       "u7WB3vjIi0kiNpo6aTRROI0152NuP7lgWV0jBmloSKS6DrWdN9QWvOCuaENNw9SSuvuWNRwZSnQ0\n",
+       "zXAdU/05EmkoMxQtmqhbUtMSde1LDe3b0DGIwVgHXdPYNg1jRMEDHRNvk7psiIm1qrIXSnu1Jeth\n",
+       "Tb68oKidF0e39N62av1k4rVa1KgSabsybNGr2C9oXBnb/PKLFgYd958unYWJV6qok/Klp+l/gpNf\n",
+       "eaN14a96iHwPt/i7qZ/y/etH8630VRLr7bd4HW9Uwx5J5WvP2Ttc3OedkeVt/pNmCL864Zk40yCE\n",
+       "8DfYv4a1Nf74n5hx3nuwzvgH9L/4L9z4D6YeKaKpwkmWOailGmWQtLjSZeGsUt8sZGlpeFi4txW1\n",
+       "q+iR0QO9ha48VDYkFpNCjJ83SQqlbe3Qlghz3seesSUNfTtS1wwtz+kfRxoqiwbWzG4oZ6aCfe+W\n",
+       "SwSFkUVMrKJtzdh9fQ/tW3OqckNuQ18PF7TR8IptdROVhi3nZNVDspownWqEro3YdlJdsC4q0twk\n",
+       "JvrVRD0ZawxYyqc62ZlBu1KmY1spvXBffSVKq1Wx3lYfNXX7J6b7jxndep7xyTc2p9/i5hm317h2\n",
+       "zNEW7Syz1+xo7A8NLkSbSarRL/UaC5xm1pJc9m+y922dHCGEOhf+Aj+8xFP/L3v3+StLmt+H/fNU\n",
+       "de4+fXK4+U7cCTuzecmlyF0uVzQVaFm0DEqyKRtUgAEbkG1Ihl/Y/gsMCxb9QjYMUTIgCjQlSKJF\n",
+       "kDRJMS43z+5Ozjffe3Lo07m7qh6/qDPL0WzgkrPSzED6vro4t+vcuvVUV/2e3+8b7pQk1Gtb/O5f\n",
+       "CyH872+WD8cYYwjhZ7l2H/X/lEdP6mp5VM+ia0vl2CZT6NXYTRa9uHpZzGqW01wtHRmnN80ce9Xp\n",
+       "mahgbo4rkcWCyYTdJt1YMUemph4zm0kZw7EQK15WkYaZC6Yqoj1dAxWLGlJzFZkVuUzuyFXBA2oS\n",
+       "0bJCR8UtQd/Ugj8gua5g7czVdabuVTM33FVVUXE19owMzdwxXK+axIGFcEvdQHeV4f9HMmTheWpt\n",
+       "js/R+V2Wzq7d5oD25S3+6o/hCreOWFok7vCBZ7jzwdIdDgyoT3xzBlYIoa2ch51+t7Lu9wJp8N8o\n",
+       "Yow3Qwif7bnyNxsePglCNbfSKPRVPd2pWSwIw8Sd6SN6Wa5a/4LbJpLkUBImGjGT5H1HyUVF6OqI\n",
+       "WrFQhJaxDRP7Fuxp2lJYdeScEzsakjMya0swkzmVSpVs6kV7jhwbipYlzuk5dWJb/cxvYKahrtCL\n",
+       "FdGullNjNbUwVfOasXXBvq5U1cihiQJ9wVKMpFX5nIU8VzFzvHBFrZiYth6Uv37Pl+8PukVmtaCS\n",
+       "8EOnPPM4+SW2joaWO5wkPPGzJDkvrHB08G/Soe+dQgg6ygHze81e/Q1Fzb94p0/ku0GM8Xg9hJee\n",
+       "5qEPcfcuF4d8bEblA3zlfqZf5D9+tuSIf+7smByvhxDuLPHnLrwlvvxQa7WpftR0ezYRG3Wded2V\n",
+       "WW64kYk5xZCLr0T2R/LHo241UxlEJ+0NNxYWVWJLJfQcGVgULISpJ3S8DEZSp4KgJtVQkQuuOXSs\n",
+       "0FUakmUaTnT041BbXwj0fNSpBcFLomWpRew7MpdoaMXL1vK+NCxqp1W3Hbojs6pmrG5kU01V6UcR\n",
+       "1SE5R3Es1pZV47FWHNsvxrZDIQpqYaKVlJFtw1YingbFtCft1CxFWsVIPUxcSNr2tdRGqepkKOsW\n",
+       "Bjvfx6s3+VgI4dfOupveyKbhV/8yFy6Rn0dzqvu1gdm5BdlSqjMqZNVI5Uiezt1/h/E6O0IILWV3\n",
+       "dPCHPzuSh3jfKu9/E+fkwQOOL7L3JL7RsQkhBDo/XHJn9w8JKzOxGkyqhc2sYnVQeKrGzUmi8uxF\n",
+       "yfpEqO077SQqYUFWbMnDHRuCRpipRRZPuNbmizVWptwKPFSwVCSmyVhMKxZjoR2G9gV5KOM/npFI\n",
+       "NEQ1OVJNQdPA0NyemVWlJUMqGivsoSJVM1HHBalUbv/sYjXNzc8+cc5YX6Ln/phYj1O74brTkMgV\n",
+       "OtgKQx1RI0+EUEgWqH+Ew2MWm7y+zAf/MWlRCglOW5cJl7j1Gh8KZeBPMmXhNn/uPD+/wX6P+gtU\n",
+       "T8vnzBvXPe3yow/wAys4ISyF8OUevxJj/Nc6nm/Fv/PFSDkXW/2ruQ9tZ5YWql4fFCYpzeZ51WzV\n",
+       "0XGqP2sZF7mlxblQ78iTXe24p2HDcmxrn5zorRzat+JEyyiZq8XEPCwiSOKuldByoqbivEzbxIvK\n",
+       "kO+ZQkUqlTg0RnSq60RL3cyGsQ2ZDxjr69tWaKnbNxGth541iWrsm6vaFxWuW9UxMJEYm5zJB7eU\n",
+       "qZQH6BkR96wMU/1qIV+6Kp80GEjaOwAAIABJREFUxUHdwvyi+fA1MefBMbUWs4IHK3QSBqs8dsRR\n",
+       "jVc/yp/8VS70OP5UCOGL3yt29bsIP4bPx+i99v/6Gv7KO30SfxQc8M9/n598nQdqfLxBbPPSR0rZ\n",
+       "Q/wkd7b5TAjhK2+8EM8wmzI4prFcNh/AifrKxObeir0sGK1UnZtVNOLQMB06zVnusTpimOZMgnS1\n",
+       "EOJls7wpmxXa466FxQ6Va24mEwtGoroFJ+pnhmNb2meh8ntOVBxoOFbeLlWLmhblNgg9u05FlxQS\n",
+       "qeuCniUXJGpyFfmZQVoeKmJaxaGopmYgwUVNqzJ7uCWRaarFnllo6ZjKk6gwFIqpuWAeRrrzinYl\n",
+       "UyQ3zeJcd8SsE93XLYS8qpekXgtN+3Gko28eO9JAUmSydE8yS1Uz5pWk9OPaVEp8QIxxN4TwM+yc\n",
+       "w2Ue//PRZ25d98ufuuKovigfZyb1U4PRbYvHPLDPc2eE9fv/hzIZ9ySEsPwVTn7527+0mpusfYu/\n",
+       "Wx/RvfiWHz7EI5/hx27yyv/L7qdyqw8wWU40ZoWX64n+IHr891p25j3dq9Fm0tCZ1YTaxDTNvBZL\n",
+       "p5iLRV87zG00eWTEl+rsptQjLXO1pDBMaybmpqGwrdAz9f4YXAsbMm0rirPguyMDUV1baSJ/qKqn\n",
+       "cHwWATLVEiwj0bOvau6ehpdEq1Kn6sZon4XtVVQ0dY3VVK3H1IqBG2HuROG8xLKZpbxwPeGCoB5T\n",
+       "YmZUY5bROs/1h7jvFb5ykePnFri6zYUaVy9zUPr8ml/j/s/xoy0+d5fpNr8QY/xGi6vDD32IT/4g\n",
+       "t6oUGeELfN+XS1+3X/5O3/1/54sRnGe5y/Lh1OZ2YIsb56N8PjFarjkNdfLU+QWa1YKQEYmGHpod\n",
+       "6lUOjZZSQWrJxIlLxgayMNNwcJYyM1ePtx2HpsJ1QUdUs2euaapmqozUmqi668MqBgLqjp0YSmVS\n",
+       "mbnE+pkL301dU5ccKeSSEDQMdM0MsHLWCbmG9yt52jNlFHY7UrHo8o2K5ig3S+8Zb3TtDgdi7VRv\n",
+       "fU877XtkUuYxTMbca/Jgxl5JE1fUaU5ZrHF7iavHbIZS1veee2n/YfgJ/LN3+iT+GPg6/s47fRJ/\n",
+       "FJwVsv8ghHDfJZZ+gldXShYnaDNfonqrTFQ/fNNxsRHCb32Rn/gkt1tkc5KJSWVi9fVbusfLXn9i\n",
+       "7uvrY9XWyDTJrH62vI8PF+k9umRSq5sMW4r++62c9vVXt2XrUciDorJmFu9ZjhXjJPGQVE/NqtSy\n",
+       "qQMzE11LUizakxq7LrevbqQdb5uETKJj2f5ZAHyuoWbzzF85P3NebQoO9U1CYdHImsJcalW07FRb\n",
+       "LjlTZ0yk8hAMjbWKkWoYKkIwDg1HMZqlrxpG1vKJGObuG1CMqaXRsF4R0rpqTCwVbYexbim9Q9Jz\n",
+       "mg+Ma1NxVtG+2zJb/y2t/3pfI6XzcAiLv87p331DRRFjLHC3DKu7c44vfOzEk8+MfHmlafuBIPSG\n",
+       "rn5t7uMvc6/LwVlWzV/cKSX+WeALH+crOX7pW98d432Oqt/884MWg7fMfNY/wmOnVAse3eZgQu+Y\n",
+       "Xq1QhOjS3cyFE4b7hWK9abO2bPF2bv5YUz1raeT3rFeDJOYUUa1WBgfeScvpQ56UPiU38fH8VC1U\n",
+       "3U0Sp8q4u/vV3QhVhUOLpgYKDOX6Viw4lpgqNDU0jFUcm6pZlOhIJDKZXNtM1YtyM/cbOIot9ZBq\n",
+       "FUOHITHRVwtl0k2jSBBVVJ0rhg5DYTGyEetOQ6afFioRMQgFW6G8JrsLjB/ny5OSZzP77B3+2zWu\n",
+       "XC4TBMGEeJHfPqT4HL+F3yvVkaFSfjVNL/PJj3O3WlJcVIgf487rfDyE8Fvf6bv/74sREqrH7K5w\n",
+       "pRddvc6V62y3Dz3//RcNh12X8kxoF4IDucx6QUhmNosTMVtykKxIVTTirhg6Zqq4a+hA4gG0TUJL\n",
+       "RabpNXOPiR6UueGGXa2zhMc5LqlYE+Wi5yzJbWnpS52aaZu6Kziy6NiW1GVFGXse26qhZ4xtXYW5\n",
+       "e/pWFSZKJV6tKJ/fg8Dm/MC9zZr1vOS9pMkXLYa+ZjXqZFTq7GZs9ljpM7taFjNRMGzV7aWJxsHU\n",
+       "5L5c/xkcc5rgvWBe9l0jBDX8Gfz37/S5/DFwDUshWI3RN+V5vMtxC9vVtwQwTkn75YPum+6zKV9+\n",
+       "lvo2n16m0ifuGv+Lwp1HZ35kb9cT19ntcmedV1d58kW2n+RzH01laxW9sGR4r656mgixpVpbMygG\n",
+       "WrOphZR6dWwcamfbjQSptqqKqYaop2HhzN1nqmZB3cyRy8amOg5VDBwba1mMiRDINBWOBdfNBA3B\n",
+       "0KncsUSQSFw2cAuLZzndt23Ys6lp1UTF1I5cz4tJT0dbdN7EsizeVs9T48rYjUqqbWjSnOvWeaCf\n",
+       "upfUhEqulgSNrC/HUZbbdNdSNnN4+pCFF4PxhVNL67vWjvo+8SIrE774H/FUxP/y5jU442r8Ip9/\n",
+       "lY2PzeS/NOM+Hh9zbsCzW7wwYecf4Wf+wGuoEvm+21z7eAjhN7+17XvxCi+fsLXOg2d5WTeXeH7O\n",
+       "6GkIIXSwxsYWjbPOWa/LxZyrz/D0RXpp9Ng9Qs6vPpybFyP1yqrGbiFbzkwuz4UwVYs1w6SqnwyM\n",
+       "sSy4L604VtUME7NQuI2TKnVzI2Whch790LBgXhaicttqTqUmrpjoSixq6jlyV8+mmUNV588CQ44V\n",
+       "+godK47lUlls68YdWYyOk3NEWvGuk+RE3akiFhbyWMp/04FZUphjFNuup8sacaASxwYIodBKqQ75\n",
+       "4C1+OXL9dxn8vTMen24Iv9vkby2VGUPzHTqvk3+QV18qu2P3MG2F8H2X+EyHxqC8oJfrpR79G6iT\n",
+       "d8rFan2nL/y/L0a4R3+3DFX6+jqXBmRNXr04MPvsM6pXN0wfbEkcCdWBbpx/I35qkmYaRVTJ27J8\n",
+       "7Ki6apZsiTEKyVC0KMc0ZCpYtCk3VnddqmYsM5WZmGgpPIzzEusKPW0rZ3bTK1pGjm07kDhQta8m\n",
+       "F3XUhTLBN0yNLJpgatO6KHHP3L6+Qlkzly6DzcgsHXuglZumNXfklsPI5RM++RvcuMArDyEpuY9L\n",
+       "GUmflzs1J60l+7OoWWQuxKpjmfpjQ1+fs3M9xvhHTOd81+NH8GKM/q26EX4vEKMiBE8rYy/eK0GE\n",
+       "KLkgrRB+5wv82R/iVoN8TvJFLh7y2TP5ZkL1I6z/EMkia68MHfzWkC/dLD2MhjHGUQjJwxz8dF16\n",
+       "NcrizPQpxr/Nb/5tHulweVz18vS8fNbRLIYGYc98YVXeWpbMZkbTsUp9T6eaq2Cmqmns9MwfqGmo\n",
+       "sCQYaauIKlK5iqqKuqdd0QplPF6zKJwmYzHuaJsah0TPQEXPlqaa1ImhSswthyCTGImCiqCmrmvP\n",
+       "FqqWnJiqn/EKbpupOXAOqRB2hYQsPKZSDKQGlgyNqrcchomlSpQUuZV5VVIv7d06aUVa5DYDi8N9\n",
+       "h/2B595fMWrPfeD1iQ/cZPOsSPi+W9z40yGE/zPGePrWtVPmIz0LIYQ6n32UzjmGh8xfiDEOvlk9\n",
+       "Vy3Khpe2b1FsxhinIYR/yK//OZ66jyRwvM3Oz2EeQv2nOfcxLp2ws8Wz6yz9GlnC9gr3LpZdlOQW\n",
+       "v3o/0y3yLunRUKt3w+iJhpBEkqhIG7J5017CQtFUhIGNUDOKiancrkQH7wuF+9BRurc+j5GKB7XM\n",
+       "DQxVDV2Q62iZOHZVcGwkmFk0tyizo22scEtbU64vNdWxoKZwIGjGqSIZWimuaU0O7VejYVKYFYV5\n",
+       "NrV6XHdaPTGpT6WNzJ2irT6verZa1Y1zK6FOHHpOw5ViTjrX3Odzj5Vu2/kylfeFEJ6OMY77/Nrz\n",
+       "nMv4i4tkrdKH4bU2sxvli+R2nY88wZ//E9xb5GBA9bf4gc/ygR9+E4+kT+2EKU7fuqZvxr/zxUhJ\n",
+       "wEp/gfp/znzG9Q+SLTI+4LE7fS8ujHXu6+iMgpPGpmnSltqxnRyRTK32p2KRyyK7Sw+U9rxJ/Ux+\n",
+       "ex4DUydyEysOTbSUjqyZFfecnM2IM3nZ/jPzuqChoq4ql59R5BqW5Joq2nIP45qhuzo2BU25Aw2n\n",
+       "mgaCviC3qtCzEycuzEt79wFuLhI1HSYXHMWmNCnUwrHDhR1ffbLwwdfJn+fmAs+f58YCw/22k+rD\n",
+       "muGS1t2h4eKepzZ3hf2qo5WhfsHJL4QQyshgjv8wwtJ7BD+FX3inT+Jt4A0S63uqGIExn3uG2j0+\n",
+       "uUzSwwG/f1rGR6D7YzzxST60y9I9rl/lC/8lr/8fb/hYhBDCIo8+6MAVrs1pPK/x4zddGOaqG6W3\n",
+       "2k46EWanGq26RrWq6Nw0TKeGtZpkUhGP79ltn8gL5iGYhwUXZTrFqeOQlzqYUComJqIbNsxsqJor\n",
+       "WzwNBxq6WaaT1CUSo+QWpqpnzp0PKQxMnJy1gqrB2XYjWrVsZmjbwJaOKKgr5KrGFiyrKfQca5hZ\n",
+       "k1pS8aJpWMeCVj6RJ6lqsmzL0G13TVLWijInK93N7C4k1qWyIjqd1s1rTXExce5g5m41uhBZfpOx\n",
+       "VTMrDcKs+0NeMmejnK970wvqW2NY5WTuW6gz3vS7DvCzIYQuEpIrrP9N6p9hqcvmbSZDPvxMqbT5\n",
+       "7I+S7lLZ5MKM+24zaPLKubL7u3bKPGdnbeLR8cTyuGJ2p+50reegkQvTxM0w1UwTKyF3EoOpwiSU\n",
+       "XawrglTUxIrUpuj4zAmqLrFn1VzHgqnrVswtiWdP65FM1JZirKvjQFB1TkVmInFiJjiU6GpYzFvl\n",
+       "qL6WCfk5cX7O/F6F2ct213Yld6LWWmFYOW8/3bI8b8qTmr3itnuVoVicIzs1LDLtcUWy0TardA17\n",
+       "DSsfvavy8Mjkd0MI/1eM8TSE8Av7tLc4f4neEZ3P0r5XjtEG63zm+9lZLAsNHeZP8LnP8cM3uHGV\n",
+       "k31aX2TrgH9+Rnb+tqv/ri5GQqh9jNUnSx34/lcoXjybTX5PEWP+Sgjhf6P1V3hgiYdfPHMv/iDn\n",
+       "H89k3bl78RGyumrIHVYuGsWal4o9rcpQ42jH6doVA92STxLmZ4Y0bWV/qibYlhvZ0rNnx55FNRfP\n",
+       "OiTbDh2oCtZlUsGGuVUT4zOOdYolmYmhiqoLcgsKTxkZq+gI7ukaaqiamRWMtSxkC3rpxLOBzgJH\n",
+       "GfsheGh+ST9Z1EgKSaAeNtUqMzv3HdhdpHlE5XrF5KDrzp0JCw8yfcJwMlerLct7j8uefZ6FYyc3\n",
+       "TvmdDn9qlSfbxCNmrRB+fRTjuz3h9tsilCyyH8d/806fy9vA1/Dpd/ok/jg4+67/Zgjh89fLTsfg\n",
+       "DXJ0WWM8/Cf49I2yxU/pElpscPJJ/D9nv+b+h/j+P82NlPhVtU+0ddqrjj5cWG6OLWZDF3OO27cc\n",
+       "FVGaLErymo3+K8YtZsVIsjl2Ja1byKd6lan9oi+xYCl2ZOHUUyExjYUYmurFiknykLYg1Rfdr2Fo\n",
+       "otCvkCjUzozU6rhP4ggvWJFra5uq21PVl+IwVk1Cbv3spXfHzJEoiDJLUnU1hYEFifukdkTLWDjj\n",
+       "puUkc7U0UckZJy2VwI0QqYylMdVvVMxniUfGU4dpMBxfsr5NvxUUu4furnX0Onv21goXz1rw/Sq9\n",
+       "uT+qPvebsNNha1B6gXxpi/1/+RZi8re7N05DaH+KR/8CFx9lI5CMubnO+RFPf4rOtTJpuPUgH366\n",
+       "tMQISekO210qx8of3WZhzO93ea3Lqtx4IZdOGh4ZDcWlxGJsmExytVqwUsxtp6Vishqi2tmzeSqY\n",
+       "q+rIjeTqxgYa6DpSOLJoIMokogY6ClNBIjszmy8sOtY3NdaxhoadYkW/uGOcJ+oxtWjkNLnsxKKh\n",
+       "jmJljVl00hgaPXZkYbagmly0VjRlRV08XDBvdGXVa2ZFyp3zRl+4ofmDNbXsnOU767YOU0nR0lt9\n",
+       "xqVHoskP4pfLrmL4+0c8scxjGf0jvhpjvBFCaDbprLzFmuEqt7/Es79EpcPlGUcH/PyMp/+wNX1X\n",
+       "FyN88ie4/4h5yss/xfNfDCH84ttJgXwDZYvXJWW9sB9j3A/hYpP/4Lc5bfDKp1lY41yd7qDjy52K\n",
+       "WcJ+XpNMaaeXxfHMINs2mOWSwyHnR2JoKvWFFeWQoyNYUdU/ixqPqq6I7sfIxJ6JVSM1N4wkCovG\n",
+       "duXYdmRLU1VqLjg6Y9+XBjoLuCjYU3Nd9+yBNDpTpafCtHTnaydUZuWYb2/eViQNtyvLqiHKQtAS\n",
+       "NSUyK+rZgVhhtlR3/JEtvTsPE3tcWGTzRDx90vQrlTOC0mXCLeoNPvgJVj5cMrxin9pv8xNpCP08\n",
+       "xu8mdfPdiP8Mv/Ie5Fu8GV/Hf/dOn8TbQYxx7IzEGkLYWub7N/hw3+SRsTv9kjz9Bi4flWaRJZZ4\n",
+       "5EFGKXFA6yYfWzBaXrFaHVqPmeV8x0my46pM5rpGXneS0Xo5c/G5luNPd2zGXK0SrIq6cebIqX42\n",
+       "d5RG87jgKD5sEnYk8chmWDoT+I4V5qoSM1tS+2ZqZx6bUV1VzdyKwqFLMnVdQaZmasW+e4JUCKfu\n",
+       "mOiJOpYc6RqeJVjVtM5o7YfGZ7THmsqZqXwU5CZOzZPMahFkSVCPpZdJHgOR/nAqvTN1Lo3SbhTy\n",
+       "Va1ZGYwZV+ZOl1acng683qlKtqYu7HLU5EvnOfi5tyrnQggPbvLDNbam3N0rnbOvf/vV/cWc+iXm\n",
+       "Pfb/KbPvyjyrlATf/yO8ryg5LHmFjXE5Ufjq+7k6pxFJqmWj5dyXuXmFGz/GeqtU8CQ58xVm+2xl\n",
+       "LBW8kEadItUdVDRPxlrtukcGud3q3HYS1NJgsyg8n0bLkU4ot5sDQaYwk5rIHZkZqDk885CZWD7T\n",
+       "QA1FbYmOqqroptwFiR3DM/nvQNuuy3LnFIYkJ7L0pjtx7FTTKCzI4op5JVA5ZJYLratyLQMNSWgq\n",
+       "Qq7WmppUFs1dJh8zP8/+83ywbnz4iIXjhqVvfHfWNQYLxp1TjU+FEL6GPXS6PNDgwUiWMQkh7GI6\n",
+       "ZnBE880E8wnpmL0D/s4B8z9KFtG7vBj5oRt/8OdLJ4w+xtNfwZ2381tDCMub/NRlNheJ90iWQniq\n",
+       "VDLVcm5dYqmGZtnak0X1NKg1c1kIClE1tuRpXatZVV24LBtlVqav2W88aGJTMJG6JjVWsaQwkymD\n",
+       "vJo2TdEzxCXRilTfzOvu2DGyYumMXbLi1PzMPL5q7AmlyvweKgoHcseqGg6smuvZQMUsBLG6Lwk9\n",
+       "9ZzdKtXpeUv5innl0GloWopVbadOk0wSy9jrWiTmvLbYsT35qPn4CY5eoNYkHbL5Og8+yGsFYZvl\n",
+       "e7xyhbWPcPMNs6oFZh9h/y4/7LuLAH9XIQQBfx1/650+l7eJ5/FgCJox/sFD472IEMLlh/hrHyZv\n",
+       "Mn9Zf/Wu3/nULaPP89jZjv24Sfbm3VqMhJs8cMgH2+bnlqW1iWoahKJKsqEWT+yGifM5L0+ovbrs\n",
+       "VM/uY2319pYjuRj23Q0TSxKtkInZWHdetR22xHRmJVnXDiP9MFYx0YhRElJDiakoQ2FDy8DAS/qi\n",
+       "izJ9VUNDWdyRh1xLRd9lDWuIojXRjqFg6EMaatbiPf1wz8Bc1FOVSzxojiVTM32JlLgj0yxZZSHX\n",
+       "CFOZI6lK+X0Pia1KYTrg/Cm3rjBIgsVGatIsTNKandknTG++5PVLR47qvHaF6YD9/5vJ33/z+tRD\n",
+       "ePJJ/vJHOd7gcIfNL/M30hD+YR7jtwzOjPHe3w0hVJH9ETeZ66V6L61RyQgDpk1Omlxocn+PwZzp\n",
+       "iGF7wW/8pRWzjapuree4cWw5ZCqh5HqMlsr36VGLrVri8nBquDF3bSOSFU4bE9qZep66m1Y1YnR0\n",
+       "Nip7OZa2N2mIego7Go6LiknYMrUuCcffCAMIzuE1QY5l0TEqVlQMzUycKizhCVyXesFiUhrK56aW\n",
+       "Y12wbGRJLemoxLlREmheEkNfNa5Jk7vGYcXcVC3tyBpBnN6VpcfMF5h12e1RU0orv4GhabPm5MJV\n",
+       "w80V/qttphndH2HwEHfmpM/xiS9xYZ+f3edffYG/cMYZmY6ofJ5L+/yrGOPEHxHv8mLkzUgj9+W8\n",
+       "eMXbKEZCCGGDn/w0i49wq097Q3KpJvkvnrV/N/d0tSSw5meS+sqIeXdo5WTssFrTSQuDZFGRV2Wx\n",
+       "qVl0JaEjqyRqyUR38rpZ/RVpMrIoWLJl0avGZg5dlnNmbDPVtmxq2VyQq2NRz9wQ9zTkptaRaZg6\n",
+       "EQQrZwz7uYptFacSNZnCyCWlT0BP3TiMtNLCg3nFxZBbzHmpWbg9q1mLCw7Dkb1kTevsVj+VaWeH\n",
+       "AiYhdW9wyXTyYGlK4jzHL7F5oXzIVF+kOyZ5jteH/ONlfiJ5y7XeYFDl3B93rd5hfFg5GviOcrR3\n",
+       "O2I0DcEryvyIb4pceC9hqySzDi9zEnFsfKdhcXXgmQ8cefjXS6Li19fZ+0dvHHPCC8/xH36AqxtM\n",
+       "ErFfYWWskMoUinlFXqs5MRFV9ZIPa2UN1clNs/WrFkJTNRam2YrF9IZKcuJC0bdXTd2bLerP6hqV\n",
+       "hmplrB4rstBzWF0wDnVVbdVY1/Cc0zCUmCtMVTXkZm6oOTRx4tCVUHW/iqZo6rrXdN2xbFVwaMVc\n",
+       "JmipKsSwZcmuuX1THQ1Boo9cxaHUgnZsmg1uaVX2jOstdwx1wtiK1IW8IYToFYVQSVQuFe7cof4a\n",
+       "oTpw9EN11ZA4dNnodKS60jU/ajj+7MzxzyhdNf81gmkIIb3In/lhdt7YKV/lqMHsiD8TQnj12xUb\n",
+       "f0xu2agcszR3Ob3KxjYnD3O0wMUJvRqjCt2nF+x//AHF+bq1mNroLTtNWq63b7o65bhDPmG3zmY1\n",
+       "lU9rupNCZzZ3ulB4vjO3lpPmJCG3OI/WcZAWukW5ybuKuijGwjxMHYfztsMFqY4irsniEcld0VBF\n",
+       "oa2rbarMsHlYRSFVV1hRXrwbUrdtmFlVWDWUyd1NanbiRBHuoCoPBemq0sSjR7wgDXOJvmjJVKqw\n",
+       "Jg6fpUj43Qf47JDaK/ypqD/LTRup+mQmSw8dPPyQau9+2dcf4vYzPHGNJ9f45xVihexj3D7k6j73\n",
+       "TXnqWdJtPrNQWs3PDvm1wR/THPIdK0ZCCH9a6YFwEGP8oe/uqGkgf7vpgBtbXHyE2/usvm7xB1i1\n",
+       "LhQLxo+feGbE0gHdRYoutRn58dBW44Z7cdO40jXKJyrFDUkt2pwHR+nUuLGsPq/L01Q3u2ahVrEq\n",
+       "kbgnqmgppI7dNBdkOlZlFgRR48wgKeiecbT7mk71rTvV1TTXUHPsnl+XWVN1qmYmlVo1VZd62bbc\n",
+       "R/RN9N2U6ISKZlJTiXPtLPFQNjRozuzHNdOiJ0sOnYYFoRjqFIdMTnzwKZJp3cFSXX+lfkZOWmJ6\n",
+       "juvP8ciESY1ntzm9U87mj/ZxTD2cfRbusDQr+XvvRfx1/GyMvuccpXcAb9jCv2eLkRBC80HOXz4z\n",
+       "2wp42OQrr9r+SEf96pFffpCTMTu/QvH8mw69eZfeCo0twiHZivE8N6w2tOPQJC1ks0x1yr3GOdPh\n",
+       "mpXDe44ub5mniVmsC3GonnR08y3D2HOYLImx5dy0JS+mQlyRVW87qk48EArLbrllzcTIOBwrYk/q\n",
+       "PisydS2HFgQ9wYmeV1Q0PKChI0qlKiYetmvHTKGtomlorGaKucI1JypqZhJHck1LjjE0ULEUJ4bF\n",
+       "vrrE1mzuXjG1lp1qNSsuh8J2uuEgrMizxLzo2e/cEbfHtl5k5QO5fDCzl16V9hq6kxfVl8ZOtrtm\n",
+       "ee9NBldvweISrRWO3vzDLQZdLm6X4/DB9+A+WFducCbs3GDvMvUDDteo3uZgqzRmLE7Z+iz56aZK\n",
+       "v+Eki/bSKK9VTIcr5smBfhgaFtxLmYzqRpNonE4UK1E1YZDyYFryUVJczXktFG6ERDcjiD6al+/5\n",
+       "04L1EC0VuUmo62uahwVFbKiETYkXFA7ULViUKS9U4Yqbti0qL1BXudMuRQqbmu53IpPYLZOHVELb\n",
+       "wB5exXI5Qg9zLCtE0/CQNPuCPDkR1cXhAQczJsul/870Noun/No144srbjyRWEy2zbqLsrBh+vIV\n",
+       "+WvQoP0g0+tcXOcbna1z5FXWZzG+hi+GEJ46O/3RG4VlCCFJeHSNj6XUj3hmWvLXvi3eyc7I55WS\n",
+       "w3/17T8yTf9Ah35S55VI/urb/HfrrTLwJ9zU+nDF+XGqPi3EUCHl8af5nUdZrDNs0VqgOyOZ96wO\n",
+       "Bl6zpDVY1spyi8s1q1nTJD8xS+qOKk15Hs3TaKsYSJJG6aqUtBVGZ/bNXaltifxsjthUNVUDXYlD\n",
+       "U0OZJVXn5YLSSueyTXN3bcu0nVfREIxkeuhr6BqdWQRnagpdhdM404uJ9izXSDP1RsO4uCKd5Irk\n",
+       "2Dx/Tlrra4eR9cDuVY7vjnjtWPa+EWmHIqJBusxTX+S5Y34Od2OMWQhh5YDOb/LpRfoJB2u8/hK1\n",
+       "3fegEiWUkal/EU++0+fyPcLXlIqa9zKyebkTSetn3iNtxk+afvYZ032++g9x460chhhjPB/CzWXm\n",
+       "hywdky/RWnN05ch8NTod7cuTkdZRsH/rqv4R0yQxnSyJe5nTS1OdNFpVKJKyY9q3omkqqc2EvK3h\n",
+       "JceVka0QdMytxFOLDuzGqWNbYnHeeqUpMXViydRQMDMwMBDcL5OamUjPFDhjDZklbOk7sW8eE6du\n",
+       "2QvB3IbUqiWnCnsSd0XHHhKdxiWN0VDI6ppSoywxLvriNJElwX5tzVFyUS1fEOYzh5Utqkt86qad\n",
+       "cKxfTV3anblQeUleT0yrC9ZuB5U4de87FROTMTJChW90QMZUJuV6va1NZMnxW/xxHv84F5W+iq9N\n",
+       "+Z0TLtyllpU5K70vs3GPT3yVSs4r61VZvao4rEhPZ4q1xPqkZfEodbDCC4Hh14PJ1bqFJLdVKcRq\n",
+       "ZtJiOSmLkIOknGrMA9XI3aywUrBdsDljmtCu0ohRZ5jZqh94rXlR4sQ0rCkQBG1jbTtacm0tLVW5\n",
+       "nmBg5EklG3iCuTrW9E1OrZRTAAAgAElEQVRl6jZMXXDeibF1I39SYQdHZ4VIDStmSVApMkXaEezJ\n",
+       "Jym3I62C6jYPvMzxU9R7/GyPrGH8kdTk4blw/nFF40leSM/WLy1d3sJJOcv6Bk5I5m9SUMUyq+hf\n",
+       "U0B1+bOP8wOPlCGX2TV+/Ot8+Dv5PryTqb0n8J2TWv/ZBe5XNqFez7j98zHGbyv7+i6xt0O+x1Km\n",
+       "1ayrH0PPuDO2skNW55ENPvKzfPknefkSlWYwrTclB4k8nTpamBrWGnpx5HqtLswWnN+/bn+xIqu0\n",
+       "hWxkVqw5n/YNql1ZPifUDLSMpdpJYmpb18yJ+6QKHR19M4nbgpq5VVWpwkzfqVR0ZMVM36pLoqZZ\n",
+       "zNTDwIYDI1UVURZLL5FBEszl1pNMI7LTYhKjaT4Rp31FURHzTFoUpCPTYaE3KncZ9VMGv3mb3/9l\n",
+       "fup9TBKyPqfXuHnMz8UYb56tX/08P/2D5bD+82MeOuXCl1m6w//8nclr71r8J/hcjG+Pm/Quwtfw\n",
+       "l97pk3g7iDHOl0P4ytf5vu97kxX5c5zv8VSM8flvdVwomY7pdf7EKqM2+R0OG/LpoZOreyanPa1X\n",
+       "mPxSorfX5NMfMbr9NJf6/OiSeTNzsjEQkoFpODSONetzalkhjmp6rZaYjiyZWjfUialG7KJmMd5z\n",
+       "z8Dr6dyJTKqhr6lqRy4xtYG+kRpmEuMzphipYCL3okzH3P0hM/GKXRcNLes4xETLkkU1A5kiTozi\n",
+       "RL8+1y3m8nuM77V8MF81CQPPPJbZWV5Vj031mJvFNeN8VSOuGM8rXDmRt1/VqNd9YNTSOCqM04HX\n",
+       "tjJxOuLbKuNijKPlEL72FB/+GLcTJcP9q1w4Lp0636bMv/Ik7/8EP3KjHNnD+xb5lZTnfkZpJ7CP\n",
+       "wPM/TX6hLBR2W8cOXTV/aaz7SOqkzWRxKKYT1SE3n+bwb0ULf2Pm4k8GzTTRSlKHIdfjjBZcN42l\n",
+       "0LZhroFGj+ywtMDpZDQuUBsFo2bdcW2gG3eNLIlun/XCb8tDbk1XV8/coR1BW3BBsOj3HKobS2Rn\n",
+       "I/oo11ITLXEWuNizKZgquyg56kp3wxFFQ5bdJD0tN8G7n+aVwNINLh2z9Rs8us//GGO8d/b9eHlD\n",
+       "3MjFS7tcOeHe6plCZpVbX+ZDzTc5al9j9ZWyEHnt261UCOHcE3z/nzxTsFF2yAqu/MZ3WuE/1n3x\n",
+       "bw0v/x1evqy85tdjjG+7zRdjnLRC+NXP8Ze6skaQVfomrVcx9MGXePYHOXeKFo8fcf5W3Y33171c\n",
+       "Oaf7zNDg4xcdt+tGxYrmPBGS61QPpK0Dj92ODi8um+ym+iv79rs0i7FqoF9UbIdUXswNksuiFYmx\n",
+       "um20Tc3O8gnauvZN9M1MzUw0ZFoqJqJEfqas6QqhQpyYBTJzx1KdIpUXUR5yG6F0BGzMKFJeCHW7\n",
+       "R1Elu2lWVC3uzmUrc92lwhMTLr9QbkBfq3P8ibHh336B39/lR2sszHjpsHyw3HvjeiY8/D6WHyvH\n",
+       "MTs5r+Wkaakt73ybZXi346/jf32nT+J7iKfwRAjqMb69Heo7iRN+40us7fDgJvGgJKbeOeRffrtj\n",
+       "WvzA+7nvPCcXqNYZP8faTbbv8XMDk3/CpIgxzkII7Rf4/i1WHmb7d7gxZTkRxz3HrbpRsqheZE7j\n",
+       "gXnRluxvKmoNR6s17XpmNdKJDVneVismimpVHo6tx5lOOO9AqupY1aKWgW2bFENH4Z7XQsMDRaKe\n",
+       "TI3lrll0pOqcJZeQOnFqqOFEoqUvUcN5U0NVFalBDORzKxJbo8LCPvuzkcHxhu2PPmohDgzyVaO0\n",
+       "a5IV5BsqkTDLJGlVEVbULMrmR+q9qXk3VTV1vpJ5/RC/94esz698kdYtHl0l36eyw9On3xOPm/Uf\n",
+       "4P0HZVLvc+fYv79837bnWI0xPvfGJ0MIf4+9h+lcYHzYM/+tm/ztLfvVluXjvkHtQF5ti0fB8T+d\n",
+       "xPhqCOEfzdz886nqZrRZK5zkHFYT61b1NLRDMC8mTkzsVk8t7REmXHuBh5vlK2p3q2W72TGaZR6L\n",
+       "t9xM+g6SqFUMTLJTWnXzNLVnzcBFdXNDx8g9rCIzds1csGpH5kDqnMJE5kTbSEu0qCLI9YhzIaZi\n",
+       "GAjxRWk2kceRynFqnjzJpKB2zHSFX3w/18ec3y+v0WrCRx7jxz7O/iI3f4+HnuXPPshvbLJ3i86r\n",
+       "/H7K4JCLGWGPnV3+yRsxAN8KgQtXiembOmRwhZPvtML/xouREMImfv4tP96JMf7lP+zYGOORt8wg\n",
+       "vxcYxfjFEMLxgun/VDFandi6Ofbka6Xe/fPLtJ4v54CVyLjb1MwTC/nMqLVkUr2g0h/RPNVQk8/a\n",
+       "YnFNOil0bzIYjtXutmXn5+5uzITNiqS6bJbXFEkqqVwx1Ve3qmUuc91IYWpN3VRxNg2+Ysd1D2Ns\n",
+       "pqJSTM3DrmCqG3fMk0yhKY25PAZZ+P/Ze9MgO6/zvvN33v3uW9/eu4EG0ACIhQTE1ZREipREyyPF\n",
+       "sl0ex055MhNr7DjJB6fictlV8yU1S5VnqXKcZcZZnBlJySi247G1WIoWiqQpcV8AEBsBNND7dvvu\n",
+       "9777e858eJsSRIGUGBGEKOZfdT/gVt/ug3vufd/nPM9/sWlKSSXRcXzJeCYhFrCaQDmA2ABlRESa\n",
+       "Tv/iKKY0cOIr+LOSPQImVmB0V4VQ8WBtHhhVSn3HSfFGyEF9BL5z6tHTD2E8Am4+9Xx7V0EIDgMH\n",
+       "gL+61Wt5u6AUg10S6wne5HT7447dHIxPN2EKqJCe0FbeyHtICGHOwkMPwDUBy5uwL4CpHOx0oTuA\n",
+       "P7ne00IpNRRC/Nv/BD9fg8kYrm2nDMW796MO6khNQ4k6ATpdPUBUFVqQIRgKvKJiSxuhJu1UO6El\n",
+       "DJA0gKoacpdcZEvkuSKy9AnoI1BK4Mg9+EbABdVmR0vIKvDIMBBViipmlByaMIilQ6J10VREVSQ0\n",
+       "d4e3HRJMegxUDw+NQ4lAj23yrsuUC3s6Nv/xjgPkz0iMCcmw2kGfnADVIbEEoqOQ4RbKccCXWH0b\n",
+       "d1KR9Hyq2+DtHoy1xR8k09xVUPz7XV5HidT48G2SxWvZNAvr2aOgH4Tjw3SEf24WOr8qhPhfXkv9\n",
+       "3d3Ts7sPhBDVAVzxCbYbbE0EWNEMycIR4mtfh727Ng/nJcmfSYa/lmbOlAT0EpsFzdrtS0TE6GzJ\n",
+       "afrxNc4rQVOF5O+NuVJJOyQ53UIkeYpRB2VoWInN7Po6wopw+hmiXofzd+3D0YpktHRgH1JAskST\n",
+       "iGliyspCEGIIiysMyJNQpkeP/Ui6u7osHSFjNEAoiyTUMJil2nXoix5hdxk6ATQH8Godtiq7Y5Qa\n",
+       "6Fn4lT1QkHDvQYgTuFyEsw/B15+Bk1+B+zR4oQ8vd1ICf38DRklHFDs/SPWkILpRpRKksqM3xE0v\n",
+       "RpRSW/xnmi4JIf7xdf98XCn1+NuxJgCl1CUhxO9A+N9BpgCLY/DEYdjYhoMtmO6kIUgZUxAK6CYC\n",
+       "R2WIzRJRS0HYwywo6q5OJC3CCFo9m836CUp6nfolgbj0Ipt3KuT+MsVeiKcqGDWPRPfwsehgIJlg\n",
+       "CCQqIhY+FpsosmzSJqKLTQEwCXazdm1pYQmJrQb4qo2rFP3YZuDlqLPDdhwSaJKJOC0ucsug6jDj\n",
+       "QiEv6DBgb2RzbvMOVlZPUzviU2jB+HVEUzuBrA67RJY3wTDN5/6+D1kTsoPU2OTdhk8Bn1aKH7Gt\n",
+       "/GOHZ4D7eBcXI5ByQEg5fj/MCC2XByufhiqxP5U5nwO4CNObaWv/ewy2dhNI/y8hRGX3qcpR+L0a\n",
+       "emNAcVpScWxGXJNEnKVVfIV+rohvKzS3iudIFoRLXnPxNZeGshgkZTzl86Qx5Lhq86Dq8bLIsyhK\n",
+       "qV28kpjhFJ4YZ1PvY7GAoM5IopPRE2wVoYQk1AokykNXXXTRoyhrmCi2tIAhm5ixYMzVmPd1XC9i\n",
+       "UBb0C4pazyRvpo4nXneUeHWdTHwJb0yg9ITIaUARnEEBZSX4MqB8FYZLMCyD3kgPxN+bN/ID9qhB\n",
+       "OjJ5G9E/B+cegugAfLCZjmoSDUp9uFPAk3fzxh0YswaNR+BlF2V4BGYpHT0rLZ126Kk7qPWnkPsE\n",
+       "TBahkAFhFrgiEgrEWCKDCB0C38cJTbw9Q8pC8FPSYIKYLQEbokuoG2ybCUNjjLGNMuVmh6i8TvVM\n",
+       "l15YxSsV0cYVVsEgI0BoFppw6NJhVOpYUic2BLUkZl3TWBBp3Y1aRgkDXZ3Hp4SGjU5AJF9BJFPY\n",
+       "6zUiqRMEOZKtCyAsuDB3HfEUYBXmT8Da0XQ+pVkpqftEBMlhOP9RePbfwcwV+P3XKabeShzGlcsQ\n",
+       "HoJMbVdZFYB+ftfr/41wK9U0dwK/DxwTQnwN+Buvb/0opf7xzVzDbvT1H0Lr12DiPpgcgrMKL9yd\n",
+       "dnNzV2HhniFbhXH0SxFDe0AvakGpR9WxYQB9usSZDkERtrxxpF+hnAjAJbQlJd1FhmfolzL4QsMQ\n",
+       "BiMx7GhZQmmitAi0HVABSkRIZolkiNBiDjEgwxYhGg1N4KsSe0LFlhjgGkNsAwQ2rjeK2tjhWm1I\n",
+       "vZNgRwmbY1DrgV+HOS9V4q5mXbLLbUoMqRwSeO02ccsnEWly6Ug7/X5uVsDd5Ue9OSRcugjtOowe\n",
+       "gm0NuAbVV1JTnDM3c//ebuyG4v1t4IdUd72r8AzwMeAPb/VC3kEMBxAPwcxd170bgDVMi5A3DHVU\n",
+       "SrWFEKN1+I0q3DZD4q7QneljRQmOkoQO+Nk+t70IwaEqHbNHXInoOx5ZPcARFpoapSzrlJKIlt7m\n",
+       "29oKIyohpkst8ajJIjEVnCihF3v0sy0KBByQAxbMUXpqkHphqCyx9EAMkCoE1WBUDnGBIO7h6AHj\n",
+       "scC3YT3JUNuOcHKKThVyLUmQ8+jsFbTWIDFHiAkQySmkkcERs1Q6BXJLCrmzxOahNuoVmDuTHkou\n",
+       "1+CZ90GiCVH72xqtnRpkXGgP4cyuPfs7gN5T8NJH4e4CRIPUOr6RgcxZGNmAM0d442KkuQXhl7Hv\n",
+       "alKeAFsYDIJxOitD5KnX+CxKhaeFKHwawl+B6BDE2gBDKwA6fSkJZJeiobEnihBIirYgUYKMEBxE\n",
+       "kVeS02KHbauCMdhgpTpkoNrkkxhXZjg/XkCpPJ7bR2UitqwMuoAcWaRKSKKArhFRC2ICYTIiq9S1\n",
+       "IR1iHE2gyR5D1SQObDRhEBl99F6WyqV1/HpI37FIuoswuQnfNIEJKExCPwbxEuztgPNRuPws3G/A\n",
+       "TCE1BNSvwCMObIxBP0o7INFuGq98q67nSqmhLsTnvgh/ax+MmqAWgVX4OvB7b/Q68TaYmd4UCCGU\n",
+       "UurN2K1vx98YAx6A2x6Bj52G0u4p6dw4PLYHrDPgagbhiRHM8QL9oYu8u052rEotMoj8VTx9mYo+\n",
+       "4NA16PdGWTZn2WmNkEm6JNmzFN8Ht5lDaki27f0saqngtyFzSIqYcplI6PjuJlFGJ6dncOigi4g8\n",
+       "ZSbZQWAwpEVfZZFBSOwl1HI6U7GioGv0Qo3LrsbaygD3TwSZv6mY2i8YV4qMBaNB2pXeHsDJz4KT\n",
+       "wDeOwtUeTNRh9ghUM6nHjb0Cwy148SXY/mc/jBGREKI6Ap8ow7wGdGFlC774Wj7IW9iPm77nb/73\n",
+       "+QXgt5TiwVu1hpsFITgE/CelmLvVa3k9bua+54V44AT8zP2wmoNoCOa3Yfo0fHmg1PdxIIQQDqlx\n",
+       "hF6DRz4M+TW4+37wNrFPbmHXupRMF5sFHC9haO5hUxzDu7gKE2cYn0g4HGvkLYsYQYsi60yQk5Ie\n",
+       "C3S1DqNKMJcYTMQFGonNlmbgyw79TMieVkQ9p7FuTdPWPYbagBF06kriazENaaAHJiNeHlYaNKZ8\n",
+       "DhYFdV1nK5IYgJlI8suwHVt0shbnnH30kxzkauB5aW683sc2mlQ6Cfmmg5QhtttlZENyPoFiF7Qi\n",
+       "JMfh8GWYvWZz8cEi18pHWHuxguycA7EAn02U+s9SOL7VfRdCnIQ7fzu9zGh+KtYTEtZn4KUQ5Jeh\n",
+       "eUopdfX1r9VE8R/l2fcLo4w1bWx/yLDQYCHvsvR7Sqmndn+/yMHfMSj+Dx4jdcl+Q6HpNQJdx096\n",
+       "bCU16q8MMUYG2NmQPSVBoEuUDqF4zSQYXDQaUsdOEuJE0ugbGMM6ThO2Z08g3QIqXENOZyhpLpa4\n",
+       "iqkUpbiI8Fcwsj5N3UZoFo7SwQvZ0cfp2nVMN8Q1XMxuDyfp4BRq9KNZBt0GyihCXIDtPDxxGgo+\n",
+       "DGsQRSnHamkWDkyBU4aDWShkoTQC3gIUr8CFHFx4Gs5l0wL+YAJhB57pwxNvxhN5g/3KkCpQDNJx\n",
+       "auvN9vzHnMB6cyDSNLf/6ijcn8O6w2fgbPCl0Qb3PZMaNR7dhDULnvqSUmpBCKFvwkNDSv8kR927\n",
+       "TOBWGWYyhIUmNWWxv+VgOxFJEWaiBmRnWf3qEFGV7LtPZzZQOEKjaGwQK8FFLY8jIrJylZ7ewg4l\n",
+       "Sh/D0etUhMSmRp42fTbZQKOgoCeKBEKnY2dwjCI5IhbtFpkkYDZUzJg+3l4T8XdLHLIE5UGfYSlh\n",
+       "w1QMLIgtReFyQskFPYZeER7YhBPfgqfdlN9nleBKCFuvwPb/+8M6Iu5yez6TfpfRXpvdvgvx68Af\n",
+       "/8CfenfiMlASaYX6I+aJvHswhG+dAlbhwTyYA4h24MtDWBAi9yHQDBhcBpZ12DebnuZswFmHe1x4\n",
+       "ugQL30J7JIeqh5QtF93cItMx2deu0q1JOjkH77YeGTPHIU1R0yOUZlMkoqJ6+FQIsDB0B4lJX0ks\n",
+       "LcK3XSpuwnhX0neGbBo58t0Iw4yZ19c5RYGenqGFi6s8skrHIIPUKyRyh+GemDFNIx+BEwqmEli1\n",
+       "JH0JrbrFWm+crYUMfb0I+6ZACchpaWcDF+XVCc0FRq92efAsmAm8MlnAP1Gk+WIbp+Ty8AtwzzWD\n",
+       "V49NkVVZjq5u4M1+gJ0LM2D/JfyiEOL/eIdCMc/B1jl4X6jjThXZOJHBm/TYqoK20uboALp3CZF/\n",
+       "VKnBdywjhBA1uK3a54Nf9NmaMwkKIWNXYsZbEBwTQpwC5oC5abhrC3OYUB5T1DUoJC3aWLRETNZq\n",
+       "sblfUhoq0BVDUzEi4LyW0phSDRU0kRxQkosiJbnuswVxPqJVsRnRzrJQ2I/n5QllQtPbhmyEHZu4\n",
+       "pktUzuOrDLGy0MQMhaiNq+kEzihGYBJaeVRsoXlr5Fo19rYzdHN9LsRHiMiD8mHxOCzOg/0XULsI\n",
+       "f0TKGUm68LsZOHYSuhp0V8HoQiUCOYD9F0BU4I6TcPV2OB+BfhoeeAHGhRCffSsuubsxDjdUud0I\n",
+       "78liRIMjR+EDH4HFUxgHBMXBOEq8zDP3Npj4GmRjyCjSixJKqUSI/AmYjCI03aFrzDLhtvHNDJOa\n",
+       "jW0V8csdop6N09Coja2x8f4ejm+RD/t0dBgzTKzYZcJaYJ0MQgkMzWdCKHLJCKecPAVyIGNizUNS\n",
+       "psAWDQR9sZcsFlBCigBlrOFTZEoVGHCNqznF5NCgiEO3bpNxe2haGdOyKMV9PC1htJcnsnY4e29I\n",
+       "exPCAO5cSkm6H3oeli+n7oWNKmz869fHgv8weL3Hw7sJQrAPuAv4hVu9lpsBpZBC8Cwpb+Qvb/V6\n",
+       "3g7snrwOiDSEZIPU9+Z7Lpa7Lea/FkI8TaruGkDmXtj3D+BgDLqEhYfh8ukZBoc+Ab2R1C47P5aq\n",
+       "CI5HWDGMts4xnNRwjA5l5TNv52lWs6BFFEWXttUmb5QoyiFKdzFEFjvRUGJAXgQEmpWq30Qdy+vQ\n",
+       "diSaFpEtRHiWzlAaxP2AxZykYhZwjVk6usO+SENnyEBbIyJLueUzsASRcNAyHkUlkTq0hcboUDAf\n",
+       "wqKCl5Iya8FtgAczU5BEKUeAfalVaf9ZkokCsbXNwok2Hzmt88LcXk7fu5eoeQd0nkbMLfDKnm0S\n",
+       "3SLenyPX0RHSo5TfZic/Bb0JqK2kJmQ33dxwV/H0WfjibxUo3VvAVpJBdi+8Wqe2c4bVfZv89DfB\n",
+       "f0gIcfq6EVIJRiSMtCNG2t+tmiINxDHY+zuw39AZzrdYmusy3KPImIJCIjA0ixEhycQxA8tmy5pA\n",
+       "Y42glPpgbZJO+4qkRN8uUFSpB1hOgLKhZhrIxCXKmmhhyCF5lrN2Bk030SmjolECTeFrEySygxI5\n",
+       "RNBFl0OG5giRsx8R+UhNI1EhRi+DHtYJ6ybmyyZTos3C7VWilTHYvpoaYxklCPaBsQhjpPwotuFR\n",
+       "Cx7xICiCNwLrSxAMQWYhtw+8echl4eAiWAfg5Z+C5W042EwNXr4jq3+78Z4sRupwzxFoG6CKBOst\n",
+       "3PkMpfYEfr7B4gjMb+++5xtCiP0a+s/X0X9zklaxCDTpawGmYVNIdHRTEWYd4o5DkpQoLTYZ6FuI\n",
+       "TIsoPyBOJEFb8OpEjIagnOgkmsYkBlPSARFy3skTigya8ImFicLEIyZHQsQMkjw5FCsUMMmh4eHT\n",
+       "xldZHFHGVdu0Cw690CDS+8S2jhGWqbgGBc2gF+1wMQ+eyBBNhwy/ALN3fDftFGC2kz5eBdZu2ajk\n",
+       "FuLvAp95t+e3/AB8m5QP864vRoQQe/bAfzMPThbUMmiLcFoI8ec3Un3sntzbaRrE3Mfgb6ym40qA\n",
+       "2wR87pEZrvgjuym0RRispTHpIxexqgUqmkFxEDOtx5gCIhHRK9nEoY4mm6AkkUjDMC0ktvKINBsd\n",
+       "gYfAkkN8zacgLVzLwBd5SjJCigHK9HF8xWYGas8L2ocnkXoWSwNN01CyRBbFthziGhG+GTC0TBxd\n",
+       "pykirESQixQ7hoaIE/qWwOocQPPmkf018MdBLsJYBmSczl9CQRLmSMwy0mhztVbl/B178aODhKcC\n",
+       "MFzi2l66Tp3nR9pknQH19QLTa4oIK+UVoFIz3HfMpVgptVoT4sWTbGo+1mGDqTBHrmeiR1P4+ibr\n",
+       "1XQycGGW3SgLoJMal0nSULvXsDQC+l742QWoeoKLZR/jngzLjsWObrPDkESXjCgdy9Toyhhh2BTc\n",
+       "AlEY4NuKpoCmgGukXZGySuXHCBAJWLpATxRCKAzNpeuMo0caecaRYZMoK1BCYIVzwJAYg0iroYwK\n",
+       "uOcZZnWUVkIYEqXyaD2JQQ9pZgmckFgzKfQF+tCEjgGhkZ6mDSsNaapOwKf27qppVsFfg0dPwd11\n",
+       "sA1o67BVg/kubJogpqBTgHAB9rThWhU6E6Behir/pRh5e6GDY+9+maZJrvXYmg6gYpCY0CjDYgbW\n",
+       "HoPyL8L4L9cY7p+FgkGcL6EGI5hBk67RZahFxLpgUlPoeojKh8S2Q7tZIdlYIXfSYsXOs1MvoMkA\n",
+       "ZW7jiDJt4TGOia0Srog5QhGioxGSwUQSAz7ubpJFFohYx0EpE1tFxORBtHCFoBybuKbDUE3R92vE\n",
+       "5iL9zADT0an2AUwycZEg/BBbT1wFnoIXoF+H5XJagLyG1RJ0trnOXe+9ACGwgb8DvP9Wr+Um45vA\n",
+       "v7jVi/hRIYQwp+Fv/Qy442myKCeAx+Hk0+ld4fk3frV1IA0gda4rWHQlGAkVi6O7lwUEUIeXL8PH\n",
+       "h8iKgTYUeFGLMIL9gYOyEhpmSCh0OkRg1fBpsKJH7BOKNJ11wA6CTVYxDJsSNgOVwdEdTDnkmoKi\n",
+       "ZhKqiJ6KyW+CMWHTLlSJzSymlLQ0iaVCNHLEhPh2na48SNRvQTFgwwnIKIODnsQ1NLoexEOb2Koi\n",
+       "e7k01U4NwOqQNUIc3YT8NYZ2QtDxcAcZZAKPHizRMvcRfLsOjW/CA7cR9bNkxDpKmyC32mB7zw6R\n",
+       "GKW1VAd3Awpr6bXiLXHDflQYYFvo5YjMZELOGWCOCMLQQLbThPdYsevSC+kYWYjqaXj6BNy5W4Q2\n",
+       "svDcXGppUPUAYpSWgXKWyf6QayWNvqiQU10aWkRWaqwrnZbRwap6oGm4KkGEYJrQFmkDQigY7NrH\n",
+       "BwpcXVFPYlpGnj7jxBh4hkFfhlh6FosmHYbERhV0CxMTDYm0Q6ReAZkmpKskA7qGUiYoSaL3EcLG\n",
+       "lkPaBReZfJ3qkTwi0hj2SvjGBtT6cPRn4UsTu8XIDmT/CsbOw1MnIDMJg+V0HKnthRcbMNWH8WIa\n",
+       "qCf7UKlCp/VaNPHN3df3HlrwyjX42BgMsuAfwX1yiaV9ZzEPw/AqNB8FIw+H/yE45REGtoGtEjTa\n",
+       "uPkCelxEam2kXmAoh7RUTIUIO97k6rE6KxfWOHAwQ12MYl6NUNMujUyFtq7YUUUK8SZtzWdHlOiQ\n",
+       "x2CAzhYt6hQpYKiASDTo4zBQFWJhYdCloEIUNtkkoK/DBpJtzcVXE9jeYeLOAKM2RjsZ4Bo9woyJ\n",
+       "DGIa2hTiqg76dno8aMLmF+HRvw+j+3WsOMGMYKULG3/6FtMzfxLwi8BppfhRowZ+3PE8sF8Iakrx\n",
+       "Nvk/3BLMzkJ2nO/+HzTgKDQuwz28aTGCAO37Pt+KSncLRq8/O9eheQZOLWDmLEqhz9yqojkmuDIL\n",
+       "wtFwo22WjVE6zKKrkESLaNAmlB4ZTVJEoJPwYDKkjcaKVkago8hSUnmSZMiWcLFCnWLUJqlCr+Hg\n",
+       "2hqhbmMkHkibCgmJNiRQktCs0YnyZEQB3WsRGy6XZMDQAUvEJAEYyzatYgEubsBcDOHz5GcqlIME\n",
+       "zYyQahnTHuJyhP7nL+MVwfsTC35xAtbXoJSFyjwsbTGYS7CrOl6Uw4s22R6bJHr6GZi5CNE6fPqt\n",
+       "xMS/HWgi4hVKt81QWRkQzulU3ITIWmM4BQUPnjNJTUmvQ/vz8OwALt8Ljgb9Hmx+DT5+7LWfcPDL\n",
+       "DkZfYFt5ksDgij7ANgxiFaCJIpEapRBCSA/d1JhXCU1A8wyuOIJAS9gREkkq1qpFIJTGFT2LriwS\n",
+       "oREowboaI6CLrRtYcRlNDJB6HUeZCNFBoSGFRNM8EjUH8SJoNkQzqKSDby5hWtsUlzSuzQna1TyH\n",
+       "V3wy0iOuRPj2n3L1d6EfzSKaAbKqUF0BjIB7FKKvw1NPgFtKA++yHwDjMGw5aUdkpgRmDEKH6AKM\n",
+       "LqZF/+LN3Nf3ZFIpnb0AACAASURBVDHiw0un4KSC2T3Q9sFaQHodgv9dKf9RACHGfhfsukCUdcak\n",
+       "Trkn6doh23afUNeICLDlLFpnne3cGm0zw9hlA8I+udEhk1QwWwVy6xHexoDSHX2a5SxlaRLpWfQ4\n",
+       "pCMtepZGIkfQhYulttnR1hGiCSiE3Icm2whjH0oOGIoWJjqBvoXAoDhs0jWmyHWniXp5/ERhDlyU\n",
+       "XWPg92hTwm6PUz6f0LMvQGUbvqCU6ulCjM2wpE2yUtAw8i1idw15oQdWRYiPA3TgAqnz7U96cfL3\n",
+       "SEMbf6KhFJEQfBt4EPj/bvV6fgQYNzLAsdLYXvvNXxouwBUdbtvNvYoFXB2By+MrxGe/AfuOwo4F\n",
+       "8QKMXIILCdGih/o5qHhlFlWRq7FGmIAnBwyGJrqxTMaQaHaFQIX0dMFI0gUtpq6gpEDFIZcMHVtk\n",
+       "cQkZSptMrJExc+Q0jzhvo4c28d5DWHFCopeJVY6ObNPXFEo2UOIQkXsAkiZBbgUjEWTaUxS5ghEp\n",
+       "BhIKz0H+miTZa8FyFtZaOA/sMLvawM1HhFWTXKIzs6PoDp/m/P7NVPl2qi7EibOwJ5PGlisBMkF2\n",
+       "RvBO5VluSHj6KlSfhG960I3g4q0gqyeM1heprJkE2Sx+r89StYlpbJIN4OkRWP03r1/XrhHaV4QQ\n",
+       "3wRmoXoPjNwNr94ONQEzKwKkgd4I6JfHcFYzuNUqsdHCqwVgFCBUjNIgG8ccSRR6Bhw/wyljL3o7\n",
+       "ZNX2SLwm0gmYGKacEVvPsVQsk2VIw3IIggPoQ5vcIMQbjUgSH5Uro4kdQjGLJkpItpA0EZoLsQ/h\n",
+       "EKSbcpicbVSwRdgo0+hl2Cx22W/5mDMu9lUDTdQwxyUHfIcrwypJZgXjYUnviX3IKwB5iBxwOko9\n",
+       "Rhp0N7cGv3E8nVm1h/D8X8NdLuRisJuw3IAbjj/fTrwnixGllCuE+DcdOFGBozG4jTTR9LqTscxC\n",
+       "XFHc1W3xvF1Ib/6tGK9usKUFRN0Bo/FVCr6ieL6AUe1gaQlq3KdYVWRXynjFhLBvYLctig2XnJ7B\n",
+       "jHcIslUMt4VbCDHQEYlJLKp4jBDJBcqmQU0qdFyGUmcnvExk9okISEQDXcVMxYoxmWD2RmjFgq5n\n",
+       "E2/1iMoGohtTTHY4/q2Areom27OS3sYQPhPAc0IIexZ++WdhvYZcgJAY+I/wkRm47/juqeIyfOAS\n",
+       "fFsI8Vc/qQWJENwFzAJfuNVreYfwTeBh3t3FyNpKenE2sq/NVYCrMNKBJ97shUqpTSHy34QvP5xu\n",
+       "+9K9YMzBSOiT3fsUy2uX6KxbIFvwFR9eSq0Yzk9nufzwAZSexdjMoGeKBDuXoJwlmR7HkBFCrqBJ\n",
+       "j1yYo+528XKQl6mb83Q/YNPc4pIzi2uGvMqQSdNIXZeNCD8pkHOniXITZMwQw1+ibTkktIkFafJw\n",
+       "XAB/BWyXxOhgBjNkLl6kt1exdwmOPw704ZmRPsE/fwIePAZWHyPy0KMBZS1k+pJJTpm4+ZhkbB3r\n",
+       "jCT8EsAO/PmT8CujKSm4aKeE12sH4IwO6lUYDeGprlJvFjHyDsB0Bnz0W6e4NlpkdTomsobkB+nE\n",
+       "aPHT11vD3wCjMPv34UQCh6/BU2W4+GHYOeejt/tsJyVW+jWKDZPCqk+/skg3GFLrb1Ibj7AcRVlA\n",
+       "P0koBODaJoOwjxMrxgcq5fv5ClMLmVmCKJvHmZvA1Hy8MI87sNFdAy00iX0fx9hBiCmU5mHIU8Ro\n",
+       "KDxMVlGJRN9xiTcyYNgwtgGmCwslYJRE6uiJQ6ltYMarDOctjCDL+FqIqys2JLi2A9ktMsemGS5a\n",
+       "EK+mwTqLr70hSqlreSG+PIRH9gIRiE14fgW+puDS2+ei++Z4TxYj8B3Z0dO7jxugewqS/xZq0TYz\n",
+       "A5OlQgVNVwTCRZMRqmNQ6OY4cFVDi7oMbBfqkmwOVACToseG6RAfcOCKIlQ6URBjDjbZt+7Tqubp\n",
+       "ij5dc5XAOErcrKNaT1MbGzKbn8IcdpHZEoV4gNQNtpINxnSdidhDxYpuLNiIYvzeFs2LJvSbkJGw\n",
+       "NWSoNwl9uNTpoxp9hp+G6AkgsuBkCX4uC/dfgIsH4cooDJehNgHVOkSHd90WD6Qi/vtfTu3gl96p\n",
+       "vXmH8TvAHyj13ZvaTzgeAz5zqxfxo0ApNcgK8eWvwiePpOF3wQqUzsDO8IdwmFVq8KgQ4jJc+204\n",
+       "Pg7HLsHYDiQ6nJ7Z4dubSrV++/rXCCH+YAR9eoSsI5GmgVcxke1ZKOkQlkmCECvKYAWbBMUufScm\n",
+       "xOGCSsgmEXUBg6jHkacu8eyBvXRik8FUhO30yOp5jkeCtp0nF3u07Qpooxhtk7C7lt6EuvvgCzbc\n",
+       "WUMrKYxMl7i4QXtvm2QLXnoWLhrgh7D9/yglXxVCnFlF/5uS8oOS0SqM2DCYieh0I45eBM+C/S5c\n",
+       "mCFNPO4LIf51AyYc+KgDJ++C5RD0Jag9B1oLlseF+O9NGPVhZSf1n7jpSprvxeACbJxQ3LXU5a7d\n",
+       "61Kgw59NcoMANyFEFcr3g/0BGH0IDgPVBizuh2MvwPYqPH9SEfy1z45/CGlk6FouenmDuNtGfB6m\n",
+       "Dnj4j8BsklIOXR3ORxqDgYUXhMwmITlTogUSKRSxhNUWjHUGiIxO35sgWZOwuEiy3yGpXEMfUWix\n",
+       "RmKBsArYIoupTDRfgOoS6hJ/WIOVNsy1wDXhpTEoJzBbB1UguXwRP1ei5CaEo03iDOiNBM+qYTVb\n",
+       "RMUCflWg57ehtAq5V1Pl0/e8TwOlnhRCnLkMM3w3C86/6Vt5Hd6zxcgPRvQ4bK3C5YmYKW8JxSYr\n",
+       "OYHj2uTd9zFydYnNcZdy3qYohyQTEk2D3AaM9qAz1acoTXpGnuFcyLql0xZLSAfuXOzghB2q05As\n",
+       "WVzKLREZFxiMdakrB8fzkU6E0BKE8KiqLiGTHA4EdqiwehpHGorzDEl8l61WAnMCfY9PJruNMRhi\n",
+       "PQbNx8B97LWuRkmITxyH90+DcNLc6+nTMHkCntiC8WkIw9SERE9blqgDECyk396fuGJECPYDHyYN\n",
+       "xnuv4GWgLgSzSt18OebNwm6+1NYK3GlBoQ3fCuD0btfTBO0w1I5CEkDr1A3So7ehsgfuugSVXUm6\n",
+       "lsCJJXj1HiHEtFLqesv5Tpbk4j30VwSodZg+Bx8/CGyBv41rldBbimGlQmIsc9U4QCnKopuKXrDD\n",
+       "ortF8UVJsFom8j8MT32V+NeOEjtPkp8ZkrU0XAYMREIiNHwtwcw7aKJNYPkoOQXNdfSVVXI1ENEW\n",
+       "YqfNscdC8AXPLVbY3vKh6V5nxR6zbwbe/zhsfgrKOuzz4EIVnrkTeBYO9eDSGLsn5d1rxboQ4jOn\n",
+       "4NQafFBPw6oudqB5B3zyfdAehc46TL8Av2EI8X/HSi3cvN1+PTrfgueOgZyE2Sb0MvBKBTa/9vrx\n",
+       "TOoxMvubcHsJzOOwXYBxBXYHxoaweB/s/wY0T8MTn+/D6rNwvIY6GhMPd+AlUNuw8kdwuw8THtQ8\n",
+       "iE2wcorHjYBKFDJZElQU6HpCJxPR1KCZhXs/36czd5Vn7jlBcG0G1pbBfR799g4nv1igc6jK1qEm\n",
+       "XlbhOzaWlCTJGrEe4fsCVfLRH87hYKDTJH5/hqhbIsqOp1a0UybrrkvBMtFiC2V7dLOCdTVKtmVQ\n",
+       "aLzCJn0G2mUQHXjUhWde7wsjhBCkrm3nblUX/L8UI2+A1A66+L/BlV+FzjRohYDbz8Kqp9OTPlY0\n",
+       "i9xu8XK8Qf62NlkJIy14+Ao4MZwHlqdbqGaHxb0GaiHk6FlgAp48AHkH5s/B9JMuwYEMnSlJZtQl\n",
+       "W7WoJQPcao9AmehyB2VlqSkDMxmQaJJsYFKIQnJZk8vZw1DOYtZ1xuQ4mY1RYjMmvneH6QGcCoUQ\n",
+       "1wB1G9z3ECxGYC3C0QMwlGky4z4NZAcqCYZ8gczHQaky/lJE3JXXMdN/wvCPgH+pFO9Wk7a3DKVI\n",
+       "hOArwCeA//NWr+dHgVJqkdeR6tJCZORX4cg8zPVSpePFu4XIf0OpwfV24WZqxVB4XX6SAeQUMMJ1\n",
+       "+TdKqe64EKtLUJmD1hSsXoYzHsw7cC5LsrxN/4MemaKH7c3gigq4GpYn8fJ5OlHC9sg28q8OEg7G\n",
+       "wC3D2UuUPjpDpbNDKxdS1tpsGTpZ+pQ9Sd/rkPhXySSKwFnBnR2QPeSTFStE9hajZ2GkXebibTPo\n",
+       "9xyHbwzAOAcPG0J8BijAfg38PVBZA38ENgwwA1AdmDJgR4fk+/yBdm9Ip3cfCCGMGfidh2C9TJr8\n",
+       "vB+aDkQd+BkhxL94p25iSqlmmszbvQ/yhyFuQuNLIC98/09XPgD3GJDTIR+D34eqDs0JqLagImBn\n",
+       "HHoJ4O12A57nOhK0EEIHYwuKF2B9FlYroCJwXlbY5SG5n7KZ7umYkURpCZkg1WOdLcN/KID/0iaD\n",
+       "T38bJnIw1oPnXOoBPNTo01vS+FamwnZlh6Qi8EwH3zdQQgPrCFocUpc+tlFB18qo+DJJOWLb1AmU\n",
+       "jiqdoHP5AudGWxSkC3GGa84+Cucz5MM+bq6H0e9i/EFbqe8L/0yLEOskTD0Mdgn8thD2o0oFp2/e\n",
+       "Dt4YtzKb5jdI5ZQA/1Qp9blbtZbXIISwIXs/VO4DYYJ2Fi7+GZQ/CrMxdDtQenXIJ1dP8eKhHKv7\n",
+       "QdIleCrmwauw9UEYFGGgwb4W5AJ41tAYa1qMdwRRJsBfF+TbCjFTYqFjcu4+H1M7g6qWoezRyQ+Y\n",
+       "3PTJDTPkLRfXTGgJ0OIdXM0lG2tYAUBMaBnsjOhoFcjaDqYnkEkJszeOX99h8CDMHYXq07Behc2c\n",
+       "RrKcgSAPZxbhZBZUC/YlsNpBr84z28xQaIPSWnTnrrCuDYj+1S3emrcdQrAX+GXgyC1eyq3AF4Ff\n",
+       "411ejNwYxjE4Ng8fWvzuc3t18B8WQryilGoIIYrAFHSHcGkaKi7YIZQG0LWg63EDueoW/OVj8Kkt\n",
+       "mKmC3wZ9BzK3w3Id1iOS50NKUcRg5BBjZx1sXxKbClPT6QQ2/sI2nGvA/CpMSrTKBkWrgLBNNlRE\n",
+       "W63j4ROoMSJrG6JNigWYbXn0Kl9j6eMlyMTEgcCQ07jHIx49OYvWL2NszELbgXAKcn8Bv74Jp1Or\n",
+       "lMF8Sq8JczDZg6oL/hDiPFxS3GC0cQNUqpAp870qrCno5dLWvgPvnEfPruvzl3cfb4LcEZjbgY3x\n",
+       "VCAyug2Lc6kJmpsBK4GrE7D2DG8a7GkM4PCj4OVguw7LM7A9AmJGEiQekdTRLIUWS3IN0DMg20qt\n",
+       "/P5rv0EIsdBHvA8yd0F40uQrmSJeIomTJXQxigKE2KKaHWCYk3Slh25a2IlAs3QgjyUzhHJIPlpD\n",
+       "y1ZwBxHk9zNoWQwWXwInAGuDztgGdnUb6XcJvzFOOD8rxP8YwvIWPPrdTmHmHjj2c3DfZupf18jC\n",
+       "078shK0pFbz8I27TW8Kt7Ix8VSn1r3bDeJ4BbmkxksZI134F7pyH4xupNfLl2+GZAax+FsZ+CUYC\n",
+       "8Grwqu1x5JrH3jX46iWQEWx8DLojsJmHaQ8aGiyIAp1SncmrCe1pweZkkdiU9PEIMjp5MUm9NUQb\n",
+       "X8NhnaoHLRMGCuZfimkfCuhkJM2gQ1UvENoZpOWzuKdLEkVcFRXKSkczImx9iOlkCe2YpJYQGjb1\n",
+       "fEI2k7DnKcUdW5s8/rGzLCzfjlqbgaUmdBfh8DKEDYwXdOaLAr80waCkgA00scp0C65Zu+9RBXIn\n",
+       "IT8Nw3UYvPxOkZtuAv5n4J8p9cOnkf4E4avAHwtBXqmb6x3wzqN2O+zrfO9zdpIaYV2czQtxeB4e\n",
+       "GQN7iZ36kMcPmcy3YvJen1iGNHrQ/AugK0T+YajcAxjQfwl4chn+cBOOGlifdBlPbJzFAZ0HyvhV\n",
+       "UMNNZHFIKQ6gmEEf6uhJhJczkW6cHpc7y1jmE2R+uk2xljAb94itGFMPaTdmCLUaI1nJIOriGBZ7\n",
+       "4ohJlRAkGQrCZD0YI5RVEr2JX+qgG3XCRBKOAoRgtOH2KTgwA+VzvPqgz+1VyR0r6bhhrQ5PjYC3\n",
+       "CV4Rtl4mXdcPguemLl5Ch+90QFwwwjSE8Mc05Trpw8AGpwGDPbC3CRfX4MU5qOdhx4FrL0L7P7xR\n",
+       "Zyd14K6dgbMnwZqDtWMw6UBRQt6AYQjP2wmzXQgsaI+lmaGxJoSYUEptCCGmYeIfQvFDBqpisF3b\n",
+       "w4pZprjjYF1co1vcJjQUk7qgLKtEfhaNED0TIA1JTrdRSJQjQUHGWyYeNvDJIt0QmusweRucrUK8\n",
+       "RFBZIrgawnN3wYfvSiuotZWU+/PrhhB/nMAyzHwEHliDwm4uW92FD2zC9iO7LrbvmKHdLStGlFKv\n",
+       "cRAS+LEgD+6BffNw/3XciOPr4M7Adhm2DsAeHeZ74Nbg0ny6t83PARps/RY8uAmNPFysgbTyXMhV\n",
+       "kauQZHSao3PIJItvWUS2TSIW6M2tYFZnmF/dg2f22CgN2HsZ9AKcyiYYFyKE1HGOFFnKG8xJg4o0\n",
+       "CLWEFSemGkdEnR2GoyWElhAZMbFmI4M2NibjfplBK2D1zg5Hvukxu3OGxonb6a4B5KC/Advb8Gmo\n",
+       "3R1z/8tXkWKF5TGFIGZ2C9p5WKsLIYYw9ym43YD6AJr74MwHhBD/9p0nsP1oEIL3kXJF/t6tXsut\n",
+       "gFL0dq3hH+Hdraq5AWSURsu/HokANXEQ3v/TsPwcnHgYthTb/im6cy4lWcZVmwSXhkT/Mh313LkX\n",
+       "jm2mlvGv3gvPHYKNPwrRhiF3CfjI0wGuvcjZTxrIoWTJkFjrglp1nWuHbPTIwWgltDIOwebSbufA\n",
+       "44i8jD/Moo+PsumUKIQJJH0GpTJyRxJoq4h8zD5D4CiXHVMgohr1lsuw6LJeTtgjDQoyYmBfYacy\n",
+       "xVqnARMR5EtQr6fZI+0CpaaOmR2wOaUYWwfLhaIOhQjKFyA5AWemhBB/rJR6w8JUKTWoCvHKy3Ds\n",
+       "rt3xlQRehKkWPKaU+nG4ht8AjW/DmV+C96/CUge2K6kXiXYOOmtwdRE6/+QHr7/1DXjm52H/fihb\n",
+       "MO6lRmdZwM7AigZXc1BTMBaBrafPj/8DIYw/gfrPl7A+OsawbmNYDpY2QJchRrHP5t37CVoaRVcy\n",
+       "2tNJRhyw2uhCYWKjMLBUSMCAmIiSyrKp5chvXaDfieAJB973MXji0K744C7gcdh/Ef7rh+D0a6nV\n",
+       "e1PncdmCn96Ez0HJ/m4h8hqqXpqpSA7euRH2jwNn5Df5sbCnNsdg+gbciMkeFD8Mxy+kEsChk7rr\n",
+       "VcWuEngF9BOw9xXYmTYIRmLEDliXimTm6+y4y2wfmSYSOTynhhEnYOfQvHlio0unPGStnyEOcwRd\n",
+       "ybVyhsmSgy18rKjNwMuzfe0oMvYYMGBQNZAiQasEmGOKOXOVlhfScbLopk+gu/jJkNkQBkEFY90n\n",
+       "cbo0R32OPrnM6Q9/CWazoNZAbcBXgIvg7YH2HNy+EbHnupPl1SKEXRj7ODwQwdxuJ2G6C7USuJ8U\n",
+       "Qvzzd5n0938F/qf3ElfkBvhz4Jf4iStGGi/DxeMw2wZ99zPZt+ByUoWRY9CTIAKYmoWODl2TYBiy\n",
+       "fW4MVr4A9UuwH+bn4KeuO5icXEsPJjvHoHoEDrVTe7T2KOwJYiY2wJoq8nzOpp1rE9sJnbtKhC0d\n",
+       "79p5mN+EP4XcPIzq0FaTeO1Rip7EzoIQY1j2Bp3RNsVEMeEaTCWCjIR2QbClW1hJF2mETEQ1arEC\n",
+       "3fn/2Xvv8Lqu8073/U7vOOegd5AgCXZSFEmRlCiZKrZkx0WJYzlW7LhMnEwSJ9PvzXNTfOdOynVu\n",
+       "4plkPPa1E8eObcndkSyrF0qUSIliETsJkOj94PRe1/yxDkgQhNgJgDTe58Ej6pS919lr77W//ZXf\n",
+       "h7VooiEyRs7hY7TSCA1uyMZBuRFxUh3yYhgdYHBNipgBxADtIehKwJo3wJEFSzO8cgfw0kxHdJIw\n",
+       "PPUmOPpgSSWUxsAwCu8kLlFOPbcUDsGRepjYptVWTzRBwAPFA5DYCak9l2lIlcDVD9kSmDq00rqh\n",
+       "AlqB4STUVoC/BGYjGHNQNQK2JNhKEPukk/zqdnBZ8ZYMUKhFGVMUzT1kjBYMpXoKo0NYq4qI0Yop\n",
+       "mAGvgWo1SNTpJWF1YVRBDIQwFQykCyay2S5yNS6KJ9p1jXpjEewTWmglZAAWQXII1jq1bMVZmiBq\n",
+       "16G1HCRKkDGer0icNEO6wCyG3WAWjBERqQW+P+3lEaXUJ0TkDuBB4CM3ehyXJp+E2AxPVHEbWGpg\n",
+       "/S4wdkHQDyWjzsiuqYOR5SAVNuIrajBn7ZgG85TME5TqsxgMlZTiY4zkilDhwGgoUaRIXiyYYxYK\n",
+       "dgcFiRLOlnAGIdO8hapDYUbq4qTwQMRJzGilGLBQ7TNh8XopOsyYmCBVUcJuMVJjFJoGRgjUGgi5\n",
+       "s9gtRnqA0UQDdYdsmNJ5ilahaFLETDkMP9wHL6JjvKOT2ecicgCObIM6J9SUE9rOVEJnFBgGdwu0\n",
+       "TutL0BQFXzOMeIHwDZyc64YI70WvIN+Y67HMMT8G/l8R3LeYUXYSTuyGzBZYVIS8wCkFAz+xwiY7\n",
+       "5IpgKN+VFYAFlNJ5VBm9IFqboHGGsENjErztYDCDqey+Lpp1n76CwUqocSm23ga8vaNM1MWIuyKU\n",
+       "PCZIbYFjMbjtAClfgLiyE/FUUpG3Yk8WyGZyJGoVWbsRAxaq4kZy5iQBi4maFFjD4CFHwlQgbTJS\n",
+       "lzdiTJYomKxILo5lIo+/po9Rrw18wxBaAnsSKDukxExF1kxmBEojUNWs1eHdp7QhAtAegIO3cQlj\n",
+       "RCmVAr4lInWAGwhPaUY3rxARF9AA5CH2GsRicLoRij8H9lyFgJdFt3xp6gOrDzwpyNRDVQr6Mrrx\n",
+       "LS7w5iAUg9Z3ADv0+8BvdNNfZcNRUtgpkrYUyVt9WNQQKYsJiiUMVYqUo0DWXIHJUAAJYnJkaM4O\n",
+       "ccZYIqey2AsmKuJGLLkJYhYfE7GHYE9cJydG2yDbC5t98LwRSjZtUZimh9aiYM1rQyMFwd3w1ntg\n",
+       "Wz+YS7pE+q1GCD0/296uG26MKKXGgB3TXxeRRuD/Az70bk/VIvLFKf+7Uym180aMsUwXdCahrQKa\n",
+       "o/qliBWOWIF+3fPAmgNXAvo3g8ULVjcs+pyZEbOPEYuX2jEDBmUHrBStZ8gtHsRgq8OooiQNQgkz\n",
+       "hnwJUzpLydxDqTSG35qltSoAdQWMEiJVXUMmfifh16og1AO+F+CuJJlaP0WzA0MKwE02NYLFlaZg\n",
+       "d2Ev2agfyuCqMtDvt2M4s5zQ7iGG2vM0ZmKUjCWyOTjsgsBjSqkLmh0ppcZFTN+GJx6GqiatTBkY\n",
+       "gvGfAGntlC1Nk9IuUY6wzVMX7fmIYEB7Rf5Yqfka454dlGJChNeADwPfnevxXC+UUkpEnoJDB+BE\n",
+       "GxRzUOxSSkXsIq4BePAOGBAIjoOrGpIxkCrdTr0iDONQGIbYDGtj3AaZEGTOQPfD0BjTVcURA+Cy\n",
+       "M27w4Yh1E2+J4Kg3ssToIJVLEWxoIf2yG3JWlOl5Ti0RXHlFzg6OnAFjzkZemZFwGqNPsFjcOAtp\n",
+       "Bu1pzMk8rd1CtjJOj8lOOJamypkg6TBQLJlwdBYwx0cwZcLwqhWq7wPjIgilwLSXXnM/6rYk9Urn\n",
+       "NwRaoTMOa6boK+WNoHIX/t53PcajlBsKzkdE7Fuh/SFoEgh5ILIc2k5CZQwG1kJPm4j8YHp56yUI\n",
+       "QyiuAyCdBXC6dGO8rAXyJciZoSECFVGIGsFU0KFBQxHIGMjmzahYhrTPjE1SGJSVPGZUsQi5FCWv\n",
+       "l1ghRCCWotrlxKwi5At5+m1GPGEnVYF6zIk0qjpL2OYmpOzkR+p1xUTuLVDLoeAERwh81RAcAk8I\n",
+       "9hyAptt1HIkCyD5oCMIvytfKy3DABP1btFJwBAjshMSuGzA1F2UuwzR/CtQAP9Ulzjw0XWRFKfXF\n",
+       "2RqMUiojIt+Gp38DGpp1R9uRHAx/H6y1cGQHbO2HgQ1Q4wKVBnMSHjlp4uePVtA1Euek30R9zoCh\n",
+       "lGDQK5gNXazqqiTcVMLkPkTEtwgKDszBPhLeU1QZFW3FJC5nFneuEr9pjJ41PgZerIIQgBNyGTg+\n",
+       "TGqJlRgm3IYkeUsWY8rAeDqHPx/GZBGyNkUsDcWjleSCy8j1KY7TycCqENYg0ANjL1wsv0OpwhkR\n",
+       "+Tud5EZhanKqSOUROL4S1k7JOD9VC+HTcyEJfZV8Ashwy4UmrprHgE9xCxkjcLYsdaj8d5YMvPMO\n",
+       "bDZCUx107Yc7fVBthjMpcB4GRuAHUAzCqSwsckN9+dwO2+CoEeKHgDAcWgfFxdASg9EoHOrwEI8E\n",
+       "sNVEqK2344o5yVscGDMhKt276Fr7flL71kLP24SrRjCkQ8TvNpGyK4o2F4lcgXQpgSmdx6AU7iIY\n",
+       "smkGTDAqiolEkoF3qsnbEkQ29FFnBmsGzPE01qEiscMFrbDsPgifBxq8kCpgtmfwqiKOAhirwW0G\n",
+       "gxNCO6BvNzR3w/FqCMyDUPm1IyKLYf2H4H0D2nv1ykrYkoVoMyx5EdYXYNdKeGMTsPtyt6uTWI1P\n",
+       "wFufgqZOOLQKxANuC0RyWjndAoy3AAOQN0C/CZxxCEYyZJ/PE7pPex5qTGncpRgj5ggqYyAfPwrO\n",
+       "RajhZvpGjxJqO4PJbyAfc5EYG6dmMIdhKVR4TORSdgLpDH5bGoO5i6GaDl3Wc/gtuM0F9jh4u/T5\n",
+       "HIjDd/fAg92wwgelcZBx2JUui32WvR9Pi8iraG9XrOwBm3Vkvob6RUQppWa9lb2uqqEebaiNKKVy\n",
+       "uuS36hPQvAYqN4InDqNZWLkHGmJOXv9wHQcNHYS7x7BVGjAmE5RcQ6yvjrHlKSgWYKDBxPE7qxlY\n",
+       "UklqMAtDY6xZkqStWKSjC3IuN92LvWTyHvbGPkLs22Yo7oGWnfBKFfyaA9PKLPbqEq6UUIoYiOcC\n",
+       "tNalWLkfPEEdatrrMNL5RjXUKygEYH9J95C4Jk+ALoes/S1YUgc1RQgaoCsII99SSl2XEM2NnHMR\n",
+       "bMBJ4JNKHj9GhQAAIABJREFUMetW/3xEBAdajXGjUje2CdbFxzF717qIuJyw2QtrsmBMQNwDuRQM\n",
+       "JeBsdZiItEDDI9Do0bknwzkY/IlSxRPl981gWAGVKyGfgajUYfqsGW+bk3VDJqxJA/GlCohjGTVw\n",
+       "PPchhp+zQ+F7UNsPX3HB77XCBi9S4UBVDFDhTtHaF8NQC22ApxRBpeB0P5x6EkLfAyrB8SFYtAIW\n",
+       "5SFtgp40DHxfqdzk2Pwe2GyAO+LctrrImh7IbYT6iJam718CwyVwRmHibRh4G8JPo3MPjEB/uXT2\n",
+       "Rs/FNc27XpdpR/cjGtVVK9UfhwcXw5IJ6PFDYDtsDsGAH4x7oWEEQnb4sUGpoS9fxT6boWormOsh\n",
+       "egd0rIIdx2HUC8OLoOSDUgEyfZDuhehRGPwWUGqEP6pB3mPB2p7HXApgTBVR/R1ED8RgYxbiXkha\n",
+       "IdwE3c1a3a1pJ7wKDZ8x4nXYyBqL1Azn8BsaiC2u4tDwR4i9DnAKqnfC6iK8GIYjeThSVhovi7/h\n",
+       "QofW5qwr+8XmfD4ksM4ryqVMQ9Ney2qvycRWaK+A2wbgrnEdQxyvLpBrSiIteZyLK7DGLORHg5hs\n",
+       "OVxBbbgI4D9TYM2ZEZ5bE+OtE0WMjgweH6wcAXsOSCXIYOB0qwmLqxPaQlA4AsNZ2F2Cle+h0G0k\n",
+       "XkoSt1oh3w3143SWIKjAZodsH4w9V7hQbfJ6HJeYFhoaawdLJeTCwOlrNXJmkT8ADi8YIudQipQI\n",
+       "3wF+B/jjuR7PbFCuGHm5/HcBZSXKVhs05RneWWQ4jo6vD08918v/Plz+A8AmErRi+Asb5iygEihD\n",
+       "ESlW4phIY/VkwdwNVUHd+8mVgC8fg3o36j4vfEjRFqqibdhBPByhvyFKsRoMEegfhtCPyh6fCRH5\n",
+       "noNjv+7g+D1mlCpBKKBjppNjCwHPisiQdvyUKqEuChXlXLDCKeixQWEMju8CTsHifw/tJn1L6EbE\n",
+       "+YJSyXmbmKqNguZPwhKHrmjpFxHvPrBX6BQKgILx3C3OjA6lgNYWMdivZr/lEPeAHkPd58EThjcb\n",
+       "dNVz/ggYAwa6V7oYj3pQsRLIBNRklXpHRP5yHLWvmcyjjWRG2mG8XXfD5cfQ0AoD28tVEXGwnIbq\n",
+       "M9qAmIClZ4rsGDynTpeyTnDEV8DU3A/eFFiPgC0If5NV6sAM4w7ChZ26RaTOAouVLkM7PZc5QAvG\n",
+       "yGWilCqJyH4o3q9FdBwFCPq8nHrATsllwBAsYs6aybtHMFnGKJQymI+cX8JvAHKxJDwJxRIEH5yS\n",
+       "V4SiYTxKnyWFMRfVLradZes2KyK/eA0+vQFyAagcgbusULuG4liacWMaTo1Cd+IK2zyXPUEt6Ct6\n",
+       "/GInY9mld+pKtj8fEKEa+D+BO+d6LPOQrwK7RPiiUmTnejA3GhGpB3zormpDU/PVRMTkg48uhjUt\n",
+       "UEyDdEKxF75XvAyjO6PUayI1z6dgsQlrKUPTS04ibQYSfgNR49vQ2A8Ni/Qds72ceLEzDl8XcFpJ\n",
+       "fCRH0WrAUTJjjhSwdgJ9cGaPUurs3Pjh4Y2wYgPqgA2KY+B6DX5LRL4xTfZ+AHqVVhydSsAGzUd0\n",
+       "1ceRJDR/HD4YAG95H+uN8Mz7RKR/Bhn9OUd7pZoehfdnob68Xm0QeGUzvN0Fg14dXmuI6Oe4lBHi\n",
+       "AlXlXMCuaoi9ee0jMQgs74baQ+WcwqKFV1YtpWS6HXVkMfSHwP4aPGIUqQaG8vBaAJIu2F4FhhNQ\n",
+       "1wXGAfhWFtY7y1nSMVhlBJsTeivh3iBDDm1vTtZYOLIZvEfGKBp+AaM5iIbhwEz5gO+GW+T+VbBj\n",
+       "CZRKQBeIXeSptFLX4dhcOQvGyBWg80ocz8JLH4H1YQuBDgcxv5Fwph3/iRwlU5KcY5ycLUlTFxy1\n",
+       "Q60XWiO6fOpQA/R3UV4ERdwvwhsfhNVj2hU8ZIf4YB5rdwi+MuliK++7W0S+NggfbYUH2nV92X4X\n",
+       "JIbAdUyX9O04qC3ry1pAtOuu7jd1zyWPgiGDiHcvRJ+6imzz+cyfA48pdfMZUjcapegU4R3gUeCb\n",
+       "cz2eG4V26Vf+OqxdDrUlmDDAQK+IPK6USgKYYf1aWHsP9E4u+UvA8SR8XES+NNUgeHcC3y3Q/bkC\n",
+       "G9NQH01imEiyr81CZE8CGu+D2OqyumsWjC/DA/thIgZ/L0z4Y4yvM1AdTVN3CIoJ2G2E2FRp8upV\n",
+       "sHoL9E2OsRYSG8ESgLuZcu0rpSIirhfA9Cgk/brJ26gDxsKwcQheqtOJ6svknCECWiRuRQr613GZ\n",
+       "a8ks0wItTl2qO4lRwaoJ6PTAwQgYm3SVkOMUPL8Nqvt0AeHJFjgSgtgb1z6MiYNw+iPa8LEWIW6p\n",
+       "pH9JI9loXdnjUQJJ4+kwU3t7lta3YNgQZ3j/YSJf64JFCooZ7ZEYE5E9AXhkGdzZoSWzT9fB6Emo\n",
+       "eZa+iiivt+p5cxRgyAPvWBXRr4+e0+y6bESkdT3c9wHoM5e9aivB9CT8ioh0K6XGr/34XBkLxsgV\n",
+       "olTqLRGJwNB2I/kaL9lwDbWxClyTST+xLDHfCM4sHHkKnmkCZwsUChB+E2IvnXsaS3wVDnsg3KH7\n",
+       "Jhh6IRSB0Z9MNUTO7VsN14kkl8CRJmhzodUzGyExCH4nmHywlstYQLQ7uubjcJ8TlpaTWosCO7fA\n",
+       "3lEuo/vpzYAIK4BH0M3+FpiZv0Qrsv7Lrdu9uOI+2NQBW6YkcB9ohl0fAH4IUAWbV2gr5SxVWlyk\n",
+       "ph9WlBUpL5pkp5TqEZGvQWg7WFsgNwDj38tBeiX8/mrONSe0QvE2CPTD9mGlDovI/5Ol8wEY2wAm\n",
+       "IyTGYOyZaSrH3iqd+HEedRCzQNOF40m8psM1I78H1cugugfqArCzHnpeAVMSrDP8JmsBzI6L/dY5\n",
+       "xKzTRKZjy4PZBEPfgFe2wv41UOqF8Zd1oulhj07DyB6eNECvjfwhOLQW8ouhJQGjfjtBdwOJNxyQ\n",
+       "KQGv4d5YYp1YcOaybBrSjdBf3QRvTqTJ7gdSk+eUUipaL2K4A55t1J47AFbAeCd5x0F2H4HTy8Bi\n",
+       "huQ4jH9TXYUhAuCF1R2QNk8J79mhsEx7AjsoG1OzyYIxchUopU4Bp2pE0otga4H84qnvx1AGmMgB\n",
+       "+5Ua+YWI2NHVKXkAEbFXwP1tsNFEnDDHO4N4+kGdhvjhcjn0jJjBa9fiCectIE5QQb2CmS/zZ9RB\n",
+       "Q/05QwT008X6UThzJ7eIMQL8DfBXSl0YL11AoxSvijAM/Abwnbkez/VGu/XbNsNt03qPrBuG42tE\n",
+       "5BdKqaSAyTRlcU6D6SB0RGDdIvBl4aRR5OmiUl0X25/S3X7Pa28hIu3uadcsQIXWNvGXv5cCnhCR\n",
+       "pwHj9OrCMtEJMEx12AOMaUnVC/rplLd7RkT+C4SXQHAlHMtD5Ci6E3cj9BhhjZwTiQPodUPg2MV+\n",
+       "5xwyrNM2pot1dVdB9LVyguZz5b8bxrlcwtc7wN8B6WKJpLGpHCofBXeCKq8Nb7KIymsl35zJiM9v\n",
+       "w/6njWTfjMG4iDw1GQ4zgc87xRCZpAIKkDkA/T9Gr/GZaxGaNOh7xQXeb5N+73LvIdeVBWPkGgjA\n",
+       "692wtpnxeAYqjTiT4yR9oxQyEH+hXI/PVC+HiEgVPLIZ2tfBsBWKg1DxGrGGTq1Ea7KL7CiCPa/b\n",
+       "OZ9n+SbgdBLqzWCsBMbA2YfV14VUQCY0cfk5HTZwzNB3wJUDY81VH5R5hAjvQ3tEfm2ux3IT8EXg\n",
+       "H0X4kVLMdBO8qRARpwk6rFAW5DOZtSt9KkYFdtCP2ckwHDwN76sqey/2wnofNK+AZDscj4DlNfi0\n",
+       "iHz9Sp5Iy1UfpSEw5bViWikAjpOYm4ewNE+QPS4iFUqpKJxNjp0xR0UpNV4lcuJtWL4BhsxQCoJ9\n",
+       "P3jHtJDdjJTDrqeYtj5or0nP2/DiHdruMheh2w/Humfugjv36GR6x3Pw/AdgVQwcORjwwjthSMzq\n",
+       "Q1R5ro6W//CJFHbD1s0wmANzCYsxSM6TpultAAsnN9aRqjFiyn0CBobA8xp8VkS+CSS80DcA7cs5\n",
+       "1zOrANIDXvDcDrbbIXQSCsfhwhwvEXGYoMMOvrg2TmcsMgjByTOwZYluogZAEeSM1qu4qLF9o1gw\n",
+       "Rq4BpVSfUeSbUbIP2xlYksJSE8F4PE36n4EBv8hHnLCsALFx2FWC40BDEyzZPMVd2wTRDWAdhz/w\n",
+       "QZ0H2hxgmgCDR+SpOPx3tLDYYnCaD5DyrkDF+zAsG6fRn6bKGMOUjJPzwOBmETml1CVFjMZgVOnS\n",
+       "QPsU13xPFaSO34DDNauIYEd3pf3CL0Ni5rWiFC+Xc0f+C/Bf53o8l4OIVPvgzvI1FhmHNyavsRb4\n",
+       "9HKweyA/DJb9TLRn6U3oRmmTTDi0WiZRgDTsOwBrc9Dq0QmsHfW6/8NhO2TtkN10Ljfjkh4kEREn\n",
+       "3NUK93rBGIKmH8HiFXDqKHW3ZWl2RLCV0tg8cOoPROQfZ/KK6hb2tENFG2QTwEt7IHkKNtiBiI7n\n",
+       "fE8pdeZKj2FZ+OpJeKsLujeCmCD4KuQPX8YaMmcolXpdREZgYBOYPBDZC+kD1yf8cvVE4Ll9UOiG\n",
+       "O+xgHSVkznL7AUXzIIQqPCTrbGRyFaQnAOohvghWhuF/VWAIJijxinah0A4TcbA+g2H7ME2NNpo7\n",
+       "itjCedLb4PRREfn2VO+ZiNQ3w2eW6wTpwihYOmG4/LnpfYdOd8I7Bli/RLdIMHSBq1trrwzO4iE7\n",
+       "y4LOyHWgXJFSAWSVUikRqW6D390E0qJXO9shqDoGzydh/H3wsa3TJvwMNL0G9y+D6ApIFbXcq/kY\n",
+       "+I7Bt6L447B0BbRmYNxj5eAycLQa6UjmcYzlaTwEdSPweivs+rFS+QvKuy4ct3MbrPggrA1DRRoG\n",
+       "/bBPQe/X5iKB6XrOuQh/CbQrxSPXY3u/DIjQChwAtig1e09HVzPvIlLTBr+7GVQLhKP6Gqs8As/5\n",
+       "YMP7dQZjdPLzz8OaN2mpLHHXMaiNwYQL9vmg8wko7p0SQrWaYJUD7umATVvgUOWUVgcpMD0GFQNK\n",
+       "/dWlxugQuWM9fGQ7DDigkAHjK7DxEO7lwoZEkaqBHE2nwBeF01Xw/KhSY+clEYuIBSo/AcuWQnMW\n",
+       "UiY4qaD7MSj1o7MyY5MJ5yJSg84dyQNn5krA6nK4mdb4y6FcFu5Hhz/SgAOsK2DlB2FdBAqeWrru\n",
+       "dNMb3cHYrkaIdUNHGHmoD4c00tBZIGsaYtw/QGZfBUykwZum9j0NrO+zY01lKdgmKNpCpHph73eU\n",
+       "yuwp79tVAX+yHuyroLcGkgD79X3l7YhSP59hvEZgWRWsVjrMfwjtSblhRsGCzsgNpqxNcnbB8sH2\n",
+       "zSAry5LJbshVQTII7zkDjwXOD/cCEIB2M1iqwR6GZgt4ChislSijA/kPUdpeh4fKioErR7NYi5Cs\n",
+       "gK0/1K0GJlkchhPr0DeVS4w7uVtEgjC4Dcx+3SY9umcua82vByLcBXwWuG2ux3IzoRR9Ivw58H0R\n",
+       "ts1nj5If7tmiJbDHAFyQq4TkMHyoEvJNcJ6XYDucOE3/onGeOw22VkhXgt0Mq98H4XtFHM9Dem+5\n",
+       "YuaAiAxnoLJyWs+lcd3i9JJS6CJiaIIdd8Cwo9wuwaZb5B4/SsWSJNuenHbdToB7kYjYz09et22E\n",
+       "25bCXVPCQovt8MTHoO9Lk4KDolUJH1oFd7aBygKnIW8S+W7hKjwmC1wZItIK9Q/rU6YIBPth/GdK\n",
+       "ZXaLyAQM3QkGp5Hw+P1k36yDRBEkBFvMOI0W3OMWnDELTlqxpyfoWz5I5sN++K9V1I+7sMUBHJiT\n",
+       "tRgKSaz1WfzrgT0WkZWt8JkW2FIPseOwpAdO3gGnVsPIO7CxnBd1Xli+bMCeKP/NOQvGyA3ACcvb\n",
+       "4LwbuhWKjcAZKAxA30FoWgvDRlDj4Dytn3CUBaq8iFNhEyPGpJ2C5TSlyhGcHRA8AZXlxdGR1a3S\n",
+       "Q5VQP2VxLBq1fs3lMZmMe+2/en4gQg1a5vxzSs2c0LfARfkKcD/wP0X4vFIXJl3OBxzQ0TpNxMkG\n",
+       "xWqQgm59fh4GfW2lIPhD8D4Mmz2w6bRuDpawwKsfgYMZ9NMhSqnRKpHOt2HpbTBo0k+O9v3gHb9I\n",
+       "bsYUrDZwesttHc6+CAUjJYG09XxjpCRQUkxJoNX4N8LywLTX0tBSBX0tnIvvr1gB2x+AXlM5UXY5\n",
+       "OH4Oj4rI38xUnbfA9UFLJLR/Bu5NQGO53LirBnZ+WkT+QSnVCXSKiOThc8PQXAXJApjTGKsDmHJu\n",
+       "PGc90RbsaRv2Gsisd4HDxPkN66wYsxZyvixFk4hUtMPHd0AkC9F2naUse2HFaQi2aals4XzBq3nJ\n",
+       "DF1qF7hWihCLa+PiPBL6eGcC8NguOP44NP0Ymp8A4xn4x5x+36wwm40Y8wrIAQopgkVBqP3c1loD\n",
+       "OucjZZmyZ4GTFTC+/0b/xvmICBXAs8A3leIXcz2em5Gy8fFJYD3w30Tm5yJWgnh8hvrOEqQCEAto\n",
+       "Eb+znNKtqA8AHqi5De4Y0IYI6KTtTeNQc+/U7wThR2/Awceh8cfQ9DOwnbj83IxMSle+nDcOO+QV\n",
+       "0RAMu87/+PF6iBy9UMtEjCAzGIQGmHKDqYVNKyBimlKxUwWpdt0wZfGF31/g+uFcD6tFN06cZGkA\n",
+       "lnnBsHTyFaWUCsDju+DY96Hp51B7BClBxYAH+9lwWp6iMYUUgYhARBgp5cmcrXApogwFAnYI7DFA\n",
+       "+zKQBpjIQjoFVhOoVsiNQvMpqE1o4cx5rxs1Z54REfkU8Dn0gvJ1pdQtI7gUgDcOwcfuheTk4tAF\n",
+       "1UPapTyp+vgD3V0UK7pVokTh46fg/g4wuSmWEihDP8ZCCdO4kDIoip5zezEpCB6DN4wQbNbGSo8R\n",
+       "et/kFvJ0XC4iLAKeQMt8/99zPJybGqWIi/B+dEt5rwh/NN/0Rybg9UPw0R1TPAGdUDUCwwF47nn4\n",
+       "5GrweiAzAs6jEAnDTsANPnV+GStoW8XULCKGSXd22ZvwUxF5Fv1wEb3cRV0ppawiL+yGj98Jo5WQ\n",
+       "joL1LWgokPwmvL5MN1WrLMK4ETrHIfzshVsK7YPO9+kmnZNErdBXoCxLDmAAm22GCpzyk8qCB/yG\n",
+       "4qoF/wyJs/4iWP1TXykn2P5QRH4BWA0YAh4yH/OSj9ow57MUTL1EqyOoU0BnFN5exrg3yKHmNM0G\n",
+       "wazijHgLjL0O6qARNltBGUBVw4E+2Fap+4uYxqDuBHQH4cVZOQzXyFyepI8ppf6lnPy5l1tI/bEA\n",
+       "h45C3QRsawQVB8MAjI/p33x2ESyfmGdPYqPIV0ahPoRa5URKRSwJK57BHIaSIgp0GnXnzbQZTpgg\n",
+       "+HXI9UDfUjBbdHOm8yWub3VEqEf3VflNtBHy9/M1tHAzoRQBEbYDPwKeF+FTSs1Nlv1M5ODgEaid\n",
+       "gG0N+hqTARgbh8eVUiER+YcRWOeAqjB0l8vkUyJS1BHUvOGcZwRg2A350elxdTir/3HFiaBZpQ5Z\n",
+       "RWQM7ndAVRrSE/BkBt6CYTOMLgFrJaQDvGufp/TbWrxrfJ02mGxpOGmE4R9ODb2E4UgPvL+uLIQI\n",
+       "kAdDOdFk3szbrUlsAALLtdL2VMZM5bm9gMm1X0S+eYZ04wTBjXZMkoVShFxvicjfKqUKIvLsGXDX\n",
+       "MJoURr1RDJYUpefz+gG+KCL93WBcA1INwRS82QuLhrUh8pMC/GyuK4wulzmvpikLgj2rlLpn2us3\n",
+       "faa1iHiBWnRm9eBMC925zxo7oOERM+7NHrJLraQN9Zi6UxhL3XjHssQHoPsF8Pt1eV/i6KSOyWWM\n",
+       "w4zWW0jN5xPzSuZchFrg/wA+DXwL+JJSl04sXODKEMGINva+APw74PvX29i7lmtdRHxADZdxjZ37\n",
+       "TsVDsH47bB7WIZphN7zaDJ3fVqp08GrGcYkxCtqzkrtSd7mIcQU0fgwq66Hkh7EojH1VTWuGJiL2\n",
+       "WvjsOmhog1AGLMfBcxJeiis1L5+Mb4U1HiY7mrd8AbajJehLAsfqYXcQxr42s5F53vetYFwJ7mWQ\n",
+       "HYf04Wmqu4hIHbpiM4LWFzFSzkeqgA8tgzsz2JoiVNVkcFkjpKJ5ot+F6E/nU4n2xeZ8To0REfkz\n",
+       "4LeBP1FKfXvae7fEiXo5iEg1LPtDeHACXHlhqMVM79YinVVFintA9sHY00qp/ktv7XxsIhtr4H1+\n",
+       "sCaAoG7c8My7qDvOKZcz5+UE1f+MDvF9B/jrhUTVG48Im4F/Qj9l/55S169nyWxf67qk0Xkn+O8W\n",
+       "8i1+Io3VZPoLMBGEQ2H4xXxI+NRluh1fgAcD4Ctfr6MueNYFvX83XTtCROxWWOeHVQX9SL4P6Jqv\n",
+       "ntJbaY3Xc1X9ILiX6kTk2GEIPaeUil/HfVRWw4crYbFZJ1SHhrVQZh84f8fMyvcZaIzk8A0paodg\n",
+       "fyO89bpS0Weu1xiulTk1RkSkFvj+tJdHlVK/UX7fgo5NPzT14hIRxfmx/51KqZ03dLBzhIj7Pnhg\n",
+       "u5annsqeFnjpWaXyr1/Ndo0iK9fDJ3fAkBtyBZD90PQWHAsqNX1O5pyLnqhCFfCf0Mbr42iJ96HZ\n",
+       "HN8vOyKYgf+InocvAV9Wamal0Cvb7tzclAwiK9bAZ3dAvw+yBZB3oHEPdE4o9d3ZHs90RDwPwAPb\n",
+       "YO00Y3tPC7z8r0rl9s3NyK4Pt5IxMklZbVddb2+EiFga4Av3gG1pWTV1GNwvg/s0fAMWfx4eGT1f\n",
+       "ZThjhO/XQu9fzhfvyJzqjJQVBXfMMKh1aIXMItDODKVHSqkv3ujxzQ/sXnDNoOlQkQO7/Wq3Wgs7\n",
+       "NsKEWxflYAK1CQZ6YbWIVE53Bc5HRPCjb4C/i25otl6pc4l7C8weZcPjr0X4IfBV4JMi/EeleH6O\n",
+       "h3ZV1ML2TTDuK8tqm0BtgMEeWC4iNXMh/Hc+Nh+4Z1gX3HmweC58fYG55sJqqOvGkqXg65ii3N0A\n",
+       "8bXgHobNKWxyYbsDWxFsBnSRxLwwRi7GXJb2/ir6AJnQB3jJHI5ljgl3w/AF2ggwZIX4FYdmJjFB\n",
+       "Tc2UhDbQE+7XWgbuq93ubCDCEhH+ATiNzgnYoBT/dsEQmXuUoht4EPgztB7JcyLcMcfDumJMUFM9\n",
+       "w/VROW+uj0j3hSXAAEMWSC5cB79EWMDrv0CDBnyQdIEHYkkITntwDTggFmNKkcR8Zs6MEaXUnyul\n",
+       "diiltqFb3kcu9Z1bl8JxODoO+5sgaYa4Bd5sgc4+pilJXgk5GBzWSU/n9gQS0F6o8Lt8bc4p64Xs\n",
+       "BOLAaqX4baW4qlbZC9wYlEIpxc+AVcCTwOMi7BXhP4mwvtwb6AJEMIrgfLf3Z5McDI5Muz5KwLhe\n",
+       "F+fB9ZE/BkcCcKC8LkStOkTT1QN0z/XoFpg9cjAxppNWz2Mc3Anog8DT8God9Hsha9T/fa0GAk9f\n",
+       "TlL3fGCuE1g/BPwFsE8p9Zlp791y8cSLISIucN+pOzNShMjbkNxzLYl0ItK+Gj53F0zUQSIJ5n3Q\n",
+       "eADejCj15HUc/nVh6pyLYJpv2hYLvDvlqpv3Ah8E3oMW2sqgtS8mq0lslIX/0NVPX9TfnZtrXUTa\n",
+       "VsLn74JgA8RTYNoPTfthX1ipn872eGZCRNzguRPcG4AihPdCas98TEC/Un7Z1vhrQURMNfA7W6Fm\n",
+       "NYwYodQNVbt0+fb/VEpFRGQp1L4HLA2QG4KxVy5ToG/WmLfVNGcHIfL3wM+VUi9MeW3uB7bAAgss\n",
+       "sMACC1w35l2jPBGxTMnwjXFWLPAct7LVLFLxINx+N2wZ0IlHGSO83gIHn1YqsevKtiUmoN0NjRmI\n",
+       "5+HE9LK/m4Fb7Ump3LPiD+H+GPhS0FVjpq8pz5kEhL+olIpeeiu3PrfavC9weSzM+2SZee3vwz0V\n",
+       "sHxMOw4nHPBSNZz4KlC0w1JApbUw3k0tY3AxJ8NcKrA+KCL/Ae3C7QHmTS30jUZEbLBoi+6PMZkB\n",
+       "bSvCliHou1tEdl+uOJKI2Kvhk0uhtQFyCTCd1Mf2X5RSC3kWc4rrNlirwJav4Of3tJJ2ezAUo5hW\n",
+       "jMMXReSvy9VmCyywwC8nbbCoBlZOKVSoSsH6tJmef9NBxrxEV5xyGh50ibyUUOrlORrrDWXOjBGl\n",
+       "cxbmXd7CLOEEl0EbIFNx5cBhBWxamdb0AHiWg3kCQm9C/p3pan4uuHMjNN8BfUWQMagDy9IY6i9E\n",
+       "5K/QXpKbIoHp1sNRC96UnT1rl5FqdOMQAypvx5VuZsK+H/Wr6BLZa0ar7JrWQ8X9UHJCYh/kn70V\n",
+       "cgsWWOAWxg3+srcg4NCdmPMNkPHVoWruhu/XlJOpV4DxKbhPRDqVUjNK/OscI/t68CzSunexAzfL\n",
+       "A89CA6VZQkT8dlhlB69AvyJS1K3LXeVQVcKiq2gSCRts9GH+z5X4KkrY0mMY80Eal0PvHhF5bKpx\n",
+       "UQGbV8FYCeQE5g1xqlqMeNJ+kt4Qvn8LAztF5CfzVYXxZqcs2meeWWY/3g8D6xwEtlqpLBYx5wso\n",
+       "V5GYsxHVVQkNIuJXSoWmbM9ng5UO8EegtwSnLiVYpPs7eR+14/61esTmQUoRzO+ZIPawiPzhQjho\n",
+       "gQXmhsn1Ad2KQ4mI2aDb+S5OQxSIwIhY2Lmqit51HkyNOayFCaIOL45EN/btBiKvV0HICsVluhnZ\n",
+       "cmboN6TDws2/DWudUBeD6CI4slXE+F2lip2z/duvlAVjZBYwiixbCo+uAJyQH4It+xkxJ3nVBYvi\n",
+       "Hk4tdRKpKRB1pojsqofPtFFlclLbC9BEznKQWFOA1rUQ2sOUcl8BgwFKAaiK42+xUTMBgom8BdYP\n",
+       "gnUD7DvAQingdUVE7F547yLYYAJDvcjIqJYRnxIaS78DRz8LDocJ25gAReImMyoSw90M8ShTxP5E\n",
+       "ZHE7fGoFGNyQG4E7TsCYiPzzJXKA2qw4378aszTgGQVoxccAanEnmc8Bf3djjsICCywwEyJi9cD9\n",
+       "bbDJAsY4BIwiL1bD3SuguRHSSTAfB05z0rUI98Za/HYDlRFF1ljJsD1O0SDUZ/rJrqki/SrojBLD\n",
+       "DCUAYYLNAAAgAElEQVS+Gv99sM0CK8oaNM1RqHNA8ldF5G+utC/SbLNgjNxgRMTSAh97CEJ+3cyL\n",
+       "DsBBoeVF9of9HHnvMkxGL6VwNam9PdARQBaZcJ0VNbJjyTWRcQawu8DZyhRjJA4HDsN9Fkx1JZxm\n",
+       "wJAhb5zAVICmKGQt0LmMBWPkuiEiUgW/sQXa1un2q6UBqHgNPici/2uygaFSKi5SO5CiOBCjq8aF\n",
+       "JWfGEHLgG4mSrwoQL1JudiUipnr49O1adjNRCaEOKLih6VXdgesiOVX2pRVQU4frvKelGryBUcJb\n",
+       "RcQ+H3qtLLDALwt++OgmWLFBK1cWh8H9IvxxHQTvheOTn2sD+49I395GtidJYROkskZI+Kk4kWNi\n",
+       "SQplsGOpyJI2m6BwBqwxODV9f9o72rIGlk7zmFSloMYPgzVw8R5e5dYtk41dey/V4O96s2CM3ABE\n",
+       "pMkDq4zaPZdsBJsfzpOWXg5j+yit3UZmz0oYN0NBgCw0KZQpRrqiGudZ178VgLQFcskp+zG6wXcS\n",
+       "1jZQrBeSjjD5+mFMQxNs2qWTYwsGKF6RFLDOPzCvg6r1oBRM7IfCUaXUgu6HprkFFm+cIs3cDNEN\n",
+       "YJuALejmVWVM4TR3PdvH25sWkbfYKJqDBDoGyDqDmH4KBYeIpGzwsVrY4YNYATgNuWrYuxhCe+HD\n",
+       "VSLWJIxl4JhSKnb+cPI5EwWDAcN5oTihZLJQyqBPnwVjZIEFZgERqVsDK7eihRqzYC6CvxlWpmE0\n",
+       "CT3O8vXoh7QHrNWU+qHos1IREYwlUBRJVPUSq24kn+uGym6wn4E3gT4RqQffHWBvKKvxvgWqAHkj\n",
+       "mKat0wXtlH338Ror4MOr4fYmUAmgG6Ii8p3L7Qx/PVgwRq4zTpF71sB7OyBnhuIhaEhBbREGjJzf\n",
+       "et0MFTVw0sI5cS8rjDogP07EpvAZBWNRoRijYIFADPKnpnx24wZYtRH+dT9q2Wny26E2GaQ6BcvH\n",
+       "IW2CU2ZInrjc8esy4cpPwNoOWBICJXDqETi6QkR+sJAMC4C3dtpcAtRAzAHN578aegtGfyXIB14O\n",
+       "8/JWC/nlOdqzJYzjYGqCQ5+HkVcXw7Y6iNWXPSUpsJ6G7QpylVBxGwxEYMNRuF9E/kkpNaWpYuFA\n",
+       "glQyTtLlxpkAUBSNKYK2NKWD6NL5BW4SRFgM3A1Uox9idi4oEN9U+GrKN/8EOPrhTi/YK8E2Bov7\n",
+       "4d4a2F1ZTkxVEJgAh4v0cIpkrQVPtIiSMJ6xITzDEwRHT8KhEBwDzoBxMSz5LVhfgMo4BG6Dgxth\n",
+       "6AwcXQqbprQK6PHD+BgQeLfBmmHDWti0A3omJdl7wfccfEJE/sdshXcWjJHriIhUr4QHfgUGreWT\n",
+       "sRmCT8CqM9C6DHonP3sSauOwPwiuBi17DkAdDL4D4RKJWJxeXxGnKUDeMUxhAGL/v1LqrGx+JWxb\n",
+       "A+NuyL8HjlmIZU8TXmXH7M+yewUMJqD/F1dYm74UlnfAPb3nXmqIQmYNvL2Xa5Cnv4WIBWZo7DgB\n",
+       "7jScPP/VzNvwzmIYX1fC2pRh4ygkclD9JlSFwNTk5Llf30B+qBvqI2D1QtYBWYGlWcg1wZtLy4tJ\n",
+       "DXifgYfL4SAFoJSKGkT+9iSFP2nDVWHDnMsQtYyS7A3A9xYMyJuDshHyP4A7gBfQbvXbgb8V4Rng\n",
+       "j5QidJFNLDA/iAbLeR0jsLIWLJUQjoCnBO5myA/ABj+8FAJ7ArqPQGoDOZdi0B3CVT+C0TaOpQ8G\n",
+       "Xk+S/2GiXBUnIgK1H4IdEagr55HVJMHthae8sLcfJlqhoQQRA3TFYfRHFytgqIZta2Fsam+YNgg3\n",
+       "QcsINDLFA3wjWTBGriNmWNwOyjrFJeaAYgu8vRs64lBwQW4Q7KegPwFPH4J/UwnuBoiXgDPgH4IX\n",
+       "43DAS/z2PHFnCA6V4LnpFRsGcDim9NDYRuH0YgZGX2ZgZQTDK1B6Qyk1cWW/wr8MWqdVhhiAtiyc\n",
+       "XMyCMQLQ3w8Dh6ChLM2sxsB1EGwh2DP1g0qpvIh8D4JjsLUKZAAWjYOtHDprCxVxbXER3tsO+/bD\n",
+       "1maw20ANQWURjt4NXZPba4WIH5pHwQfnbkwlpV4Ukd4k6Q9aobYAJ0PwklJqoaHaTYAI9wGPA38L\n",
+       "fFQpslPecwF/BewW4V6lGH6XzSwwPxgZhNP7YLENGnwQjoNlDFIZCHaCPQeON2HpGUiNwr+UYDwO\n",
+       "W13kAjlC5gn9ULMfGJ5mSHjB54e6add1SwS8zdD5JQjUgqUGcjGg61KdhA3gdOjKnvOwae/vBWKk\n",
+       "N4oFY+Q6UoKSmuGJuQ6Cb8PxF6HTCp6I9pB0lW9U/5yED/uhOQ+EoGsCnix7QF662P6ScLQb1q+e\n",
+       "kpjkgUweBqH0glIqdeW/Ip+C7AznRdYIhYW8A0ApVRKRx16DXzkKK61ACMKj8G2l1NAMn1ci0gvG\n",
+       "QWialmCWMeeQvj4dAB7wwks90DAK7kFo/CDsdM/c/vuCJx2l1Gngy9fnVy4wW4iwHW2IfEwpdk5/\n",
+       "XykSwBdE+FPgCRHuVGr+t4T/ZaV8vf/gDfigD+4OAjnINMKebTB+GmoOwfJR2JmFPVNK+5/h0uKf\n",
+       "OcgCRQHjlDUgb4B8CciV14HTlzveOBzphtvXcM7ITYNpSK8xs6b4umCMXEeK0HMaWAUmezkPpAR0\n",
+       "agPkZ0qpk9O/o5TqFZG/H4EV4FgC1hKEq0Ukdin3ehh2vQUr89DYpK1v+xHwjMATFzNEyvLxppkF\n",
+       "seJH4eQ90G4CRzmXJW6BUwLpC8Z/s3LxY3BplFJx4HHd4BALELnEfPVBbwpG3FBfDssVBI75S4R/\n",
+       "chC2Ai2LIFgNweNQSsCLE2CrnbKRHvAH9aJhE3HfD2YbhDuBM/O9dG+BCxGhHfgx8OhMhsg0/huw\n",
+       "Efhz4P+6wUP7pUREPGBbBc5qiA5C4cTVVKKV198feEXMS2H1XdAzmTPohEwWdmfh6SvVf1JKJUX8\n",
+       "nXB0Cayb8uBzuB4iBy/lBZmJKLy+F1bloakZQgmwHYaKUd0vbgb9pBvDvGiUNxPztW+BjtlRi65Q\n",
+       "GJ9+otpFtrbDB5ZB0QilHp2IuC8K//puNysR552w+P3QkQNzCXrs0HkIwj++1A1GRLwu2OiBjjxE\n",
+       "AvDmu3VqLNe+7/DBFhMYEzA0Bs9Ml40XcdwBjR+AxQZ9/XQXYeBnSmXfuZJjdaXMxpyLiN0D9/pg\n",
+       "kwlMCegrH4PBaZ8zA3WUnw6ux41eRFqg+ZPQbgc7OhQ78CbEngbsdtjggzVFSAVgbwlGGuCzK8BX\n",
+       "DdkQWE9Csh/LO7D0LlieB0sBehzQeQxCP7gZK57m67V+oxHBBOwCfqAU//0yv9MAHAY2KUXPjRzf\n",
+       "jWa+zbuINEHbZ2C5RUfbwxYYGISRf1JKhS+9hRm3WVGvr+HKGsiGwXICMv3wz+cnoV/Wthpq4CE7\n",
+       "dARxrUrRkijR0gMTBejrg8BjV2s8iEiFCzZ5oKMA0XF9H7ls78oV7Gd+d+2difl2ooJWx6yBj9VB\n",
+       "sxNKo1Aag+eTSu2e9rk6Byw3gDWh3VynJi1WXQ+OD+1Oi2vVvI5/Dw8PnZOHLwEvLYI931WqeOwi\n",
+       "47EDhss9AStFHt0EK9bDsB0K/eB9Fbxn4FvosNHZk0FEKoBW9M24t+wJuKHc6Dkv64N8ejMsLuuD\n",
+       "FPvBvwtsZ+ArSqkAaJG6Rvi1erCXQIYhNgw/UEpdViKXlmSmBm3EpKa9ZwUWo43ZYaXU+JT3nGhv\n",
+       "ZWxyLkTEZoQVdliUgaECDMKy34WHR8BeNjxKwCuL4I3HlSoevtbjNNvMx2t9NhDhz4A7gYeU4rKT\n",
+       "jEX4c6BNKT5zwwY3C8ynedcPmfV/BOvqKuleVknBAhCgYAkT+ldIfAvtAY2j1+/MJYQIp27baoAO\n",
+       "NzSmYCIPx7WHQ4yAo7yti2p6iEjlIvj97ZBvg2AGTK/D8iMQicMPgf4LS/7nHxeb84UwzWUiIoZa\n",
+       "+M0dULG8nF2cBNOz8Cm3yAY7TAThaAlOKqVGRWxNULMVPHZIKpGKPZDoh8b3g98DWRGpOgn0Qzsw\n",
+       "6jExXFXEVFAsGoOlUehajy7nmj4WL1S+H9pXgAGRun4Ye+piVTMiUr92Su17BixxzItqsC4fwrY0\n",
+       "g2WPiPHnShW7QFdooJ/AbiWaWqB985Ts8DbtlqwP6gqGp0Skein85oMQrIQgwDC4n4ffEpEvX2wB\n",
+       "0oaG699B24PgNkAsJ+L6KSS/MemxKBulJ6Z9zwOVH4AlK7U0TWRcRH4OFG0YfrWE744E7iQkcmYi\n",
+       "mfz/Zu+9gy257vy+z+ncfXN6OU7OgwkYECASyWVcMK652vVKxd1VKFtr0SqXvKu1Vest+w+Vtsp2\n",
+       "uaQqa+W15A1eaYNEU6SYiUQABDAAZoCZwWDezLycb8634/Ef/SAMSSSCAAYE8K2aqnn3vdvdt897\n",
+       "p3/nd74BJw03jrUC7KnD3AnefWP2roQQ7AW+DBz/aQqRHfwL4KoQjEr59u3pv8tRhNTEGFf3H8Xq\n",
+       "WThdgEm6yfNEv1VlagbIQDgOqQUIGkLkL0D966+0JS6EyCXgxHC8L7vWjPkhjfh75gmY+BgkE9AL\n",
+       "hEj9ADoPv9iBFUIMW7AHYADzaTh+ApRdO3OSA8FtsFRB/1CbkRzYDSGGF2H7azcucH6ecNOKESHE\n",
+       "bcQ21RFwVkr5392sa3mdmJyE4QM3PMi2YN9u2GfC0GF4ch6OXYLLQmgX4ZZfgrvWIVuFjgbf/i9i\n",
+       "Y81PPQSl1fhjX9wDDxwxuLZ7khcSwwgZIlnlhaMbTFyPiF4uf8CA0d+AO1NwYBWEhMUiPPS3hRD/\n",
+       "/FVySPLD8UmRwBXM031GixmcTRupDrgXePDXhRD/8l2swMiPvAzxcwSaTtwFIgnHjkCUg/4WJEMQ\n",
+       "Q9DZD9mV2Dz36Vc+fOofwS2fgjuW465Fx4DH/hacd4F//eM/vbPlp8Hw34IPFuHwakxKW0sLvvVP\n",
+       "RlnRYWbGYKLjsXo8jUmCjNdjUd/iq/ka9z72Ev/kffyc4X8H/kBKfoLw/FqQkqoQ/DnwW8A/edOv\n",
+       "7L0JqdIZmsLGQt8hB0siBqPDZBNVxtKQmIQDLtR2QeEhWDoMP0wA/+bGA8WLC/bMwGePAwXolmH/\n",
+       "BbhLCPFHsdLlyC/DnZuQr0NXhyc/CudM4FtJIe4+DB/bDZECXAN1C+whWA134j9cMK6Q/GCGjAO3\n",
+       "NOHECswPw0O/ufMceNu4Hm8WbmZnZBH4kJTSE0L8mRDiiJTy4k28nteCk+GlFUwdMiHs2wNbFdBn\n",
+       "oTYbSy0P/oD8EbhtE7IuXBmC5ROgTsOuAmy7YDwJmQ4c24C528e4vvsA0/MaxgAECRpDLs9+poKT\n",
+       "FGIsB1vflTLcIY+KwzAzEluXvIhdVahNQOU48PArXH+ruqP0qUO2R7pkka016SU8MjUY7sBJG8p3\n",
+       "EjP7341ov3gPbkQVkoMdfxAb8hGoXyF9T5d8BlQM6u4QtVUHMq904HgHaOYj8IHll7ZPkh6c2YCV\n",
+       "z+90Oho7CioTknfB5B3gDcHIJEz94CV2fM51GNsd0dLy7N4YsDF0iCCwMLU+dsuiPlrCM8/zw1ta\n",
+       "fOEHOzTpHJRfi4n/Pt4BEIJfBPYBv/QzHOYPgW8Iwe+9gc7K+/hJVE2aro7xn5+JPoOEh5ZPoJcV\n",
+       "WrMRJzpQ6oGWgsosnDoHS7uFEKNSyo142zz3adh71ECeimhqLvVnUgSVNPTzkOjCp8sUk3D7FuR3\n",
+       "+IYJH+5YgZU7hBBzR+Dj98GqtWMRcRiUP4XPnMc4nsXsq4QDk0E7Im90Ufvx9KUAeyqwPQWbh4Cz\n",
+       "b8ZN2Qn6M4DuT0u2/Wlx04oR+aOxxj43uJC+Q7G9DkoIQgXZgnwGoho4Fi+tbqah/UOUWwOGrsN6\n",
+       "CtY/AGe68EIEkz3QM7B2Ozj3gyJ11H0O/dQmV+8OUEWE1jNIM0qqUeHUOowY8MMvCaH9hzzh7Dji\n",
+       "voil6S5fGWtx6kKsLwcodiEx/irXv7qy441RBCmw5QBfX8S3uhzcKWxKLTBH38J7eLOxtAibF2Hk\n",
+       "EGwqQBmc87E/yOKQEH9TgTufwLxHZ/dqgalNAI++cYkLJ9ts/LtXOXYJEhokfuz3uGak6B0cht/x\n",
+       "oZ0U4lHIjcOt++CWNbhsgHYYNu4C9UFIdaGZS2EFLlZBw1xSqBdNXFMQJHTIRXhdgZtNM0i1ODsD\n",
+       "ZQFXz0P0/Mtd2Pt450AITOKuyH97o5fITwspuSAEFeBDvIYFwPt4bUgppSrEXzWIfjeBzAmMwKVR\n",
+       "ULD9Hmo9whDx4hLAdiHMxv8vSCANbED+83D7QdhfTXBJ2ojW08x9ap35ehY6Mk7erYHeh9KPZYWZ\n",
+       "IeQw4cQ+8K0bvKqqUMqTzC9j6GlGKjpCVtg42ieSa6Suw64bvKQKA0j+ZwGeEMJJwh0ZOCWAOpzt\n",
+       "xttFr6oQEkJYGfjoDJw2QGlDRRPi68EriCPeDNx0zogQ4hhQejnZ6zsJUspqVoizD8Ftt8BWFBtd\n",
+       "OdvQPnyDpluCIvAbsJWEpWnYFULGA30AtSzsbkN1WOW5ey2647C+PwO9U4SbGqG5gj+6iFKXZGsQ\n",
+       "apAO4Ew1zdV/fA/9CznkwhpKwWPgPM8PPrjFxx+Iw5CqCeisCyEcoAAYOTiegKMSPAceK8NfPQyf\n",
+       "yMCZkGamg+3VOPVk3FkBKKdh8KYzqN8pkFKGQog/fRA+ewH2GkAN2uvw/f3w2TPQA2pXSSkdtmea\n",
+       "aGGS0VoXkWyzqw6d3IvHEkIYGnyoEKf2JpKge2yNCZ5JeoyUJUMV2EhOcnbfBG7/QzDvgngUPncO\n",
+       "u+Ryx84DJN2HRg/GBWzPQuoiCBnQ1y0G9QEdK6KdK6JEFpo3gCikF/TwBgbdCjxwHnqXgQUgsRN2\n",
+       "1QG23ndefUfiHwJXpHxNP4nXgz8BvsT7xcibggge3qJ/vMjS/gLqIEG4cp3MiRWmtuM5vJaD0Q70\n",
+       "rNiGIwK2FaAeCxEOHgLTS/Hdu1Rae5pYaoGs65B27qW1vAqpZRiH/kNQceJ5+0X4CjQQIPwbnsub\n",
+       "kHwB87RFvlkhu/UUnpPBs1wMt4ZaGHDX2ViB+SIqJnQ2d9SAmRL8ypmYXrAVgbgMH34a9goh/vWr\n",
+       "kWYL8MtnYO9eqNXiXJAzc/A5S4j/042tI95wIf1KuKnFiBAiD/xz4Iuv8P3fv+HLB6WUD76F12KA\n",
+       "eRIKpwEFak/D4OkbfSia8PUnYes63ClBVaD+YXhqJJ78CUBcgaRP4y/h8XvAyYMhNa7s0qmMwVre\n",
+       "5PwndQJVRe2pECSpR5NgRaAloTOK2oVAe5TOLODFAoptfQRROrgz6WxSdRXC4ihRtsp37wsYvgyL\n",
+       "VZteahR+Jw9WE25NQv1OeFiD6AJ87BmYqsKfVeHrwJdg1zDs34RKHpZugcvjIO4XwjwF3jNvdVvu\n",
+       "ZmCHU/MnO2ohHagPw2/eAe1paCwgMhlS8zaavc3W9BZ516V4JWSkAi+cEmJsH3AEhg4lUHUTWTMo\n",
+       "W8cI2aLmrzNXBM+q0sqYzDklalkDf/F7aJ+TBJoAPUdrZJOLz8ORDZitwIPNeAzcYXi6BxsHGqwM\n",
+       "7ad5vsq1iSwickEqSF3i9XL4mwKiFrIHThJKX7KonCjQG80jm0FMpn7yJzNsfjbsTHAG0Hs3/m68\n",
+       "1diR5f73xGGKbwb+Avg9IdCl5G1NWH2Xwt9GPFIjM24Szep0Nrp4T/uMduP0jit3xh2RjoDcEjw+\n",
+       "BRsXpJTbQoj9Cr0jkzx9cBgnBDVj0nZqtFubhM5jcMSPHbN7Gtu9gMdH4K41yLhxhtgD+9IsiSLc\n",
+       "fQ1OlmFRgpaAdEi4Z4uqFqCpNUqDGmEEE8/A2iHYLsLQDm/s6hA83wWtBKO/C9HYEOVDCuHaFYx9\n",
+       "GpE7yWC+A1PlmBz7spllO2KHfcdgawHuHgJrBiojkDoLv7IRL3r++M1e7NxMAqsG/Bnwj16J/Sul\n",
+       "/P236VpUKPwq3LIf9pdjUujVT8G5w0KIf/NiBbnDdH585x+2EKceh89VwdFBXgd1CX4A0bfh0iak\n",
+       "/r7O4ANDJH2VDTFO4Cv0DB1P9YmUbfrOBEG9CKzAZBn8iMDqIIVPsg1JCbVJqBR1hHMJblcxgg7q\n",
+       "bhdlVMEQBjgBg0BQCY7BXR+CxY3YyKLXAeM5OPYheOJOWKrBgRpMSimXYyLVDz8OF+8F5xSkGnDr\n",
+       "tyDfhae/CBcywP1vx/2/GXiR6CuE0GyYnoRlH9QeshSxtcvEdNMo/W2c87B3DR68HfIGnF4CZgy8\n",
+       "vGDVl3giRTCao1nO4q/VWBpvUs2oRAWNvpkic71NJsox2FskVBP0Wil8Q/DEXRtkvw0TTTjyDDz6\n",
+       "i1C5A6ZOQmEpYPTBBa4esZi3k8jwMiI5TOQNEWzVoDsHdg+7BPdOZnhm/wzq+GEynkPH0gjsefjI\n",
+       "82AIIf7pG3PifQlxoZ7+CEyfAVODdlUI9T+9qLx6H68b/wz4V1K+fnfMV4OUrAvBHHAP8L0345jv\n",
+       "VcRk8twXYP8dAcN2AE0oW/DCFXh8FQpHIZiHK8NgboG0oP4ItC4LMfJ3YfxMlsa9Y8xUTFLNkK7q\n",
+       "oDhtyrki/WAypiIMhsHtoP7qCnNPwfwuSBngqRm2wo8TPbMfNq+Dchk+VgT2wNwCYWKCIKFSSy9T\n",
+       "XPA4tQLre+CFFjy0BM/thHM2r0GnB7feBR9YtXlgysIv1kiPJlCuS0RnmcZtFtvbGaIZXqEYAbJD\n",
+       "ILdhIg92cSdypAjdDGgW7K7ADDD/Cu9/Q7iZnZEvErsJ/kEsKuB3pZSP36Rr2QV798Ndiy+9VFqC\n",
+       "wTRU9wMvS6ztS/m0EGL+OuxRQfdg4QZ57XkhxP+VQR4QTAUF/ENF8h2X1WyJQQ+MroOSLcNwH7Wr\n",
+       "E6kOaqgRRQFC1/CtOI1xjwSh96goG6j3GaQ2IixbYdhzaVsutg2T3RQLt47SO6/Ebb7SEPSHYbAG\n",
+       "I9uQGILuBETPQEkIsQaMQ9iF1gCOPQKnFmISFMC9S7B9jxDiiZ9HVvZPiTDYKdw24EQWRrO4jQDN\n",
+       "3iRIFDh3a5WBBfMT8KlvQuMEFH2Tvqdi+D0enxzC6V3DHaszrMCQLJK6uE19KmDD8TDLCq2jQ6Sk\n",
+       "ghr1UEswUCYJ0k0u7OszaBks/0JAIx8yW4eTC7EKJ4h63PdXPb56zzBXl07C1TpoG5AxoaVDyWdW\n",
+       "EXh6iU5xFtW1EXSxRnVIpgnJ4H9mleA54K9+tluU+zzcehRO7njhbCbh4V8XQvyrHzfMex8vDyG4\n",
+       "g5jfceBNPvRXgc/yfjHys2IKxu+F6SkoAqYf8/X8++Cp34fWfyTuCnYAi7i4SMHsfwN3+7CVynC5\n",
+       "JfFSHm1Nw2wNkD2FfiYLskJRBoisizs+hrZUI7Gvy6QBaqTjmzqp8WdZHcvS+6oBKzlYG4o1xMNJ\n",
+       "GGRIawWMgcLy6DYDs8HxDUj6MP+XUH1xyySCXf8YPrgCZujRzwakVZ2hhkujaKOVNYYGK3Rm+3Re\n",
+       "ravRqsQqwlIBbtwZMHWolSA6Hxt/vjuKESnlv+Udo9rIzMLUy9iCl3ywPmoKkfFikurSj7enpZR1\n",
+       "IcT5EOUAZD9pCXsmy0ABljXo3ULtyUU6hyx0PaDv6ww8G2EERE4GxVtBsZaRySlEaKN1B0T6JjIV\n",
+       "kBUmWsKl34DJeplqxiHKCshJkmaIaG/i9EP2tmDlgAIjNYrWOdwpnU7gIZUEDBIQdWODrW4zVpJ4\n",
+       "kPsvYd9BmHJh43ZotuBKHw5uxp/KCmFIwHwB+LkoRoQQFuhHIbcPvCY0zv+4q+rLQUopHSEe/iH8\n",
+       "yi4YGYdqQNi4RGs3GMFutp02mwmPvc/GKZnVFDj9kB4aab+P0Jv01HUyRYf9PRXDk4ixCDXTIVNZ\n",
+       "4NLhvQgH8CJ8tUtkBBhbKo2EQeeDNpuBTWgJqHg4Vp3tEY8912FxFwQLcPDKGtedw0T9U7ClglyE\n",
+       "3Dm0MZ+9103KORikAiKriZIwyAY9unoLw9BIJhS0/1GIXBIaf/FGOiRCiBIcPhqrhF4sVkc6cKsB\n",
+       "lXuBP/5pj/legxAIYhuD/2EnZ+bNxP8HfEsIvizlT8rW380QQkxC7hbQ01C/Av7FNxrvAMY0FPfB\n",
+       "dB2cnYd7uhtTy67/TWgHQAT+NSllNT5/6iQc02B2C8q2T6rt4Q5ctvMaApVB5CDRSbsqad2HrE/a\n",
+       "V+ikdcK0wsxGks09EbqSISUH2LvPcu03huh+PwddC4I0pNNoTY8AH6+goSqjVOwWP4wiCs/DfElK\n",
+       "eWXnfgxDVsRkWAix1Q3wU0QoCEMiFZdQW8ORHp3GK9wIgPVVuJ6BXXnQk9BvgTEH1hRcW42roDf7\n",
+       "9/jmE1jfTgghhoAEUP1Rt7p+COt5GGq9RCqaLxR5/O7d9NfHILcM2gI8L4T4ixuJP/E+euHXYO/R\n",
+       "PFsH9+CYCQZhglrxOpGoQeoo3tkOwewEdreFmW/TG3LQ+nVI93CiK7jKKkI1iXIStdZE71pM+QG6\n",
+       "5jJUhqTXoatfo5dKs5jVEaJHSJMjVSgnIK+4zHoeFZkl0emxNbrEtmrCoAliP3QWIXcFAkh8GvZ+\n",
+       "EqZqYC/ASBXGJDx9Gqa+HcvMIqAl+PkpRBIw/JtwbBjG29DfDRdvF8L6ipSDp17r/X344bNw0oXj\n",
+       "XYjaoPhw+V68p108fQnYwsvvKLub0MgPEH2DrqNg9BYIR3WGEdiRwKqB4g4YmDCS22QzZVM3TXw7\n",
+       "IowCRJRikGsQqTr96iyDxQK7uyt4xQZKwaY17HElC1LAUhN6yy3kVx6AiXNwQAXRjG38/xwqH3FY\n",
+       "OBLSK20gnAKqUKgHIRlPkPXaRIOIg3WofxAu+MTboj8tslCULxUiL2KkBcarqbfex0v4G8T8pDdy\n",
+       "/18Ll4lX6Ud5DxneCWHdCkc+B4fdWN2yeggu3L7Dk3oD25KhA8KIC5FAga4db9e302DdC8d6kOjA\n",
+       "NSGE9VUpB2chOQGlnYeyvd2hfmoCTytBOKBrbCDNNqoGZiIg0iV6mMDoV2jnPQzfYMMKURIGoak3\n",
+       "QUsAACAASURBVOzjoZDw+mStLhzr020qMSEsDAj1LfxUHcv3cPp5SlUbJepSO8qPhmi2oSHjUFMz\n",
+       "BKe9zcyqz9WxHF2tjMiU0YIKxQuw/Ypp7lLGQX/nQF2HL43FmRPdSfihBuFc/Fx407do3xPFSBxm\n",
+       "VvwiHNkNGQlbCJF5FFrfS8MvzMKHNc6d6LNyoMrIcp/bnyvw2AcO0wlOIM9loX0UeAQOPQJ36EL0\n",
+       "M/BxF3tXzCMoJVX88l5EOEJxHSLRIZyZpf7cebgzAdNdIi1Bq2Ria3W0wQaD1DaYBnpQYYSQoSDA\n",
+       "dFXWTIeyImllJGoUt+z1ELqGTjJyyHZdUiLF3pbCXL5Hz9HZ23HR1EV808YzHQrlBs3cd3HH2rB0\n",
+       "P4ysQrPMqAojd8Dtm5DyoLIHGkBLhayEp3fDUA2aNmxdeXEF8M5H4gNweghuvcGufUqHv75PCPH8\n",
+       "a01OOyqbbwBTCaiXwB2PLeBHr2CebGP0oVJV+bf7c4RZAzHTxwlbCF0nanUoRjq+dAl7ITWjyyCv\n",
+       "gdBp5SEp6wh1myiyULQEq34GEawTpaYgbODfus2CZ6D32gwKPmd8hRE/Qh1AdRyezoL8g4qUD+8o\n",
+       "pdSdGIFkkqd+2yZ7oM8hZwXFaNAVFltGjihs4aqbjOz4xzTNJPxySYhsDR6OYjvq1yulb0H1Zeyb\n",
+       "t1Lgb77eEXqvQggs4J8Cv/5W+IFIiRSC7wAf5T1SjMSLj933wSfX48UTwEwdjEl48Ay8ZuDgyyC8\n",
+       "Gje/V4swGAXPNHg+n2Qxa6DXTRYO1UnVWnzgPPBZIcQCpDZg/ghcOKazcnIEzzJQA8nASSJkgcB4\n",
+       "HlOmMcIkiq+hBwFRWKdrqiStIl0lQBUhUioQgasN6ApJwlmnG9Vjo81wlWSuw6TVJuUqKEGPctFl\n",
+       "dCO2uiL94ieQUvZip+/H7oYPrMby4dZInQMbdbpzkFsHKeH7ITtO3K+EnTnzjzQhznXh8xMQzYG5\n",
+       "Ad0N+H/feAfqlfGeKEag8AW4cxpu2XlY+Qo8dI/OY5PHiXbdDfMVup0lxKktOgcvsTKRp55LEG6+\n",
+       "QPJejag3RP/qLLJ2CX5rFtEzKEwKUoNl3Nlt+rbNZrJBWK3S7PTRE30SaR3l8DCd5gC3DLjP4u0J\n",
+       "8S0NLVBAJkmXy0ykUkz2dbq2RFNVRu2QHiGboYmlFpgbqZOsq4Qdh22riBqGiN46T2VG8MwaOUWl\n",
+       "LwV6q8Nk6ywt1aZn+aiyC/+zHzvGdsCYhYOfgbqEpgHZAYzUoV+InYpXzuxQS/qwXoHWgztqp4aU\n",
+       "MorzVpyTkNkHfgMqZ6WUizdvTG9E9jjsKf/oawkfplRYmADmXsdBFjdg7QDY49BaRJldJn9qAVvp\n",
+       "cfcTKnN7Zrl8bBKxoWCt99geruD359FWQ/YuBUSJPq1xSFoWqqVT00PKOHQVnZxYoKon6Eobz6gR\n",
+       "JvcgOwI0DfQqUaJClNhDV1nmrN5j4IEIodmE3AqsjgkhWgJmknBcFWLKgcM6+gnBXs0iE0oCv4+n\n",
+       "NbCURZYdl6FqQEJqPLR3L92tIoo/AuOb8KsX4cJOh+81w/+klFtxbMHZfXBiDTQJNRueycLWX7+x\n",
+       "8XpP4cvAs1Ly0Ft4ju8Cfw/4X9/Cc7yTMBn/bSf8H315TwWePcEbKkaYh41z8NhnBEPRCEvZacqJ\n",
+       "Akbg4poV2sNjaN4LPHFrnclllWtfNGmrPS59AfajMpVs0Q99yk4WXxvB6I6gNDfR8lVsv0LPMXHD\n",
+       "Jr6exCBJW4WMOoYSRfFWuhxQVSLaiUvI6TFoWrD4JM4+lVlhkPMtHF/BjMqQCGhmIRfA0JeFKH4f\n",
+       "qt+Mw/xa34WnfVi8E6JhGKQgacVejs+PQy+E3neACWLj0Z2tWNLEc/2PLECDmBd5YT0ODQ2J87be\n",
+       "EsuAd30xEuu/D+2FYzdYnOsRnNhM8dQXTjP4hg5RFuoBnWczdDJrMKNiAzPSxGpEBMYa9VNNNloz\n",
+       "UMyR7OiMbob45h4aiTSerZPxHDaGlzHGYcLLoGoSOegysDwW1Ntpf1MB+Q3kIYF/9CCWuUnCsikq\n",
+       "oJhdzHBAy0mi4FEMLOb7Gstan4Tl4OdCOrJAd6WALDdxZodxFQ3htlC0kLQDga4SyYB8o8OBZ+EH\n",
+       "UzBxD4QSqheB26BzHHI2zA/BZg0OL8ScrPok7J6HiYdjHu7mPrj0e+A9A/0lIZQHYfwjcCIF403o\n",
+       "DMGFk0JY/yFuV95syAB89Sdf9+EG86BXPYKUgRDiT74HX3wOZvuYtzfJtKocOwdT9TxP5mcZmQtZ\n",
+       "GlJphzp08hDWoFOldh1yJ6EQKSR1DT/0aWmSUBFEoUUjdFFpcoAmTUWhrPdodhOIRA7dM9H8Mqol\n",
+       "yMthOrLGUiNk93fgg/OwmoOF3Xla9+XQPtkkO2uRSvt4eoTQk6Q7oAgXOzDQ6RMl+vhqxJE0uEUb\n",
+       "XQFRc/DaM7B5EPoeHHkidmh8na3W6r+HRz8BL5yI04Yb3Z2E0LfMAOndACHIAb8N3PEWn+p+4I+F\n",
+       "wJKSN33F+g5ECP7L8GN8NXZKf/3YiWRIAh7IpyF/PMvSzAwDbxjN0BCqg6FquBOb1JUREs0B61P7\n",
+       "kdk29v4qM0aHgq/iCgM96GFEJh0SSFfFEDo9NUBnwKi3hS6zrGqT6P46bTyW1HVGlZCk4lMXSTqh\n",
+       "TSQ6GL6K03EJ8iqpKCRZdlFTPr7Sxw888j3wNdCb8JF5CHfDD35TCPEvdvw/vi+ENYBDvwS3/nvo\n",
+       "6XDuEzBahPHvgzYEF/6eEObXILkLDh+EQgRlRYj8eah/9UYqgpTS44YYlLcK7/piBHAg9TJ73ran\n",
+       "IZJpcJcQ0xukj0nSCkh0arsrON1dmANQUDA8i1J1gcrpWfzzEjsZIB2PxlQJ4QJ2GVeRCDPPuNvG\n",
+       "VHR8kUIJl1CsPqXd3yGdsgjaKo2ui7se4Y31CEo+UhdEionud1E1gQwyOK09OJUObUeyXYSQPtRc\n",
+       "lEyZKMrglruoqQoTpiAbwURfZawT0jR9LtwGy0Nw6vtwZgUaDjz8ORgcheNrsUpoTY0NvJ7cD0EE\n",
+       "moDR+2F4C67cBcMZSNdgRYvl6F/7bTi6HhO1qgchyMK+DtR/TQhx8bXc/N56VJ+Ey5+F4g2KoO0E\n",
+       "rAyATSHsD0L+VkBA/Snon325NqOUsiqE+MNtuBOGRuDu52G6CTXbItQi1KEWYiaHiKYQCKJwADNt\n",
+       "lqRHIQXjSwqLuwRrRZWSrzOhSTqyRkcJaSOYiRzq0qAo51jJHWI7rBLZ1xH2NqYIkYGF0nEIr7U5\n",
+       "fSW+qpZh0N43jn1fg7GsxoQcRe1VGaQrVIVPYFhoAw2p9IkCF0eJSAK6qiAGaabVBtf2jrP6TR38\n",
+       "NiSnwLsCe3mdxcjO+H5FCPFtYiVB8/V0Vd4H/xD4j1K+rs7cG4aUNITgInEC8HvBAG0JlgZQdmJ7\n",
+       "doj5XJeLUPnK6z2IEOo+GPtFSOfBjaCVBP1ikTBlMBgSSEWSVEERKTx3hcGwTq+Uwl/cT7TwKLnb\n",
+       "S0yt27hOBQgIHY2S1mBNX6Wf16Afkeim2FJaYKjkOg79LYElPMKUhqJY+LpNR/okpEZCLOKIiIzp\n",
+       "kMi6uNLHtw3UXpeorREVi9hCougdyrbLZnbAr9Rj1+fKFGztB56LpfhTH44LlaQHjx6D000Y2obl\n",
+       "adj7AIw1oPZlOLAId+zMmxHw2Al4ogV8500ftdfAe6EYqcB2FBvL2Dfsk29nPKLlqzBeJXeLyXhT\n",
+       "oIY+odpHE3Wc/iWapTEiRRKl2wTmFpo6je/7uLscBoGGW9IRUYCHgWf2MWSEYum40kNxB7h0EZbg\n",
+       "lpZG13XQm1usTvvMzWn0ruvUv+BRTduYUYhvh/iagRJo1PAIMwnC9BiaXEG1IgpqgrCwTK9UJWz1\n",
+       "UdISWzPZ21GQfsBSRoKQJAT0enB6UePKkSS1PQH+IZfRrOTqaEC4CfllSJdhcRqulOGDL8DECpQL\n",
+       "YOWgVIOKDaEdcw5K+Zh/V9kPwx4k+tBPw/JuWLsb+PbNGtwY7tPw7G5oH4LJELoKzHmw8udQ/FU4\n",
+       "OQv7twEJcx+HZw4IIf4f4rbQblAtCNeBch4+PwynBZUDXb5+uIk2aLHnfJfQ7uBO6GAfwqpGKE5A\n",
+       "aI8x0BsoH13Ga0uuI6n7EUlsRjshbileXoxEHgkRUsdBiXx0VaUULlPVwEZlV2CiCwvVr9EzetQn\n",
+       "YhJazYHngyQc62In2hhFQTXtYysOBhEGFTytADoYUYdeso+lQKIJQV/SSEV0hEq6XUMZukb4CQOU\n",
+       "SkwYEkKIZ/kp2q47+8g/k2fJewU7XZHfIk6DfjvwHeBjvAeKkTjPTPt38I1fg31FcCJYVmH+Avjn\n",
+       "Xuv9O92QO2DP34n9xXYvARF87xc1rh+ICBN9tLyHUCzUUBIoPsKMMMM2fcWk0r5MYl8f3dbwPAtN\n",
+       "CnBqKFGKri5xRIc027h2QLJl0g2h4Wg4/SqVXBdF6ZKJFBwtiSUUpFRRaNBRQnah46oqUSqH3ajg\n",
+       "KQ3kaIJukAYlws8O8JUS7nYSo/ksl8clp5eg5EFqhJg3lIOCHhciELs6jLXBjIA8eBqoAobHoXTx\n",
+       "pQWcApxag6u3CyEeeDWH1rcC7/piRErZFyJ5PzzwKTi5DYVeLON+ItHE/cPHUf7+BAndRAn7eMYS\n",
+       "/VSD8RXJaLjEJW8DbVQlEQ0Y2pYs5a9z6Y7DdDoKaSK0XoBvNkEk8VQTb7CNr7kYRKidCj0VZmVI\n",
+       "mBowKA3wEwNmrga09Ue4fmIUzzGYEx1adkgKnShqUrdmaEUBhjuMjBoECRcnEJD2SSgOSWuOtB3R\n",
+       "FgamNMEw6STShDJAhi1UpQ+74dnbS/TG0qBvoVsqwx2TsldnPePT2Qusg1yHzr+Eyq1xVezaYO+0\n",
+       "P7dtSOysnPUuDA7A/quQGLz0WiYB+buFEN+7mSvlnfC5P4faNDw3Dn4qJv0qp2DPQbjjyks/ffsy\n",
+       "9Gag/GmYvA32BfFk9kJWZ8GZJfBOwNomfdNBOE3U5CKXb99C0dfpZE5DV6KYIPVtIuFRkHmEvsWg\n",
+       "BFY2JIp8ko5GGEaEnoarxt2KnKdTabRIJAJ6iQyoHRSRZSxyMVQDwzeRXojqSMxhnT+6z0c+BZW/\n",
+       "7JH9M8nkeMBEWiKVLSoih5RJbBrUhUsC0NSAuqFApDIdeeQ6ktFyh8sTSXqmRTi+D55rgHMBSnvg\n",
+       "Ez7sasKzQoi/fufwf941+DLwNSl5u7ayvkvsZv07b9P5biqkDK4LIf43WNoLhhNbKv2k9cKL2HEP\n",
+       "zgM+5D8J2b8Bp5OQGoLreyHxfJqM2WMzs03gJFCjFjoKA9Ug9GuEYRdBFSVIcdAckE/4bIU1emMO\n",
+       "qi9IKD2cqAehjkqf0cBDUw18Z52a4tEJxlmz6iS1NrolySsBQ4qHygbbSpGO7IOQpCKLKCrjWgpB\n",
+       "yUITEes66JGB2fNphjbtYArTDTFIszLd5PQSVPU4HBiAXuwSG4o4fFN48RaWECB90EJoOXEosBlA\n",
+       "JHa2fYJ4wW5qxAu194uRNx/dH8AzDVi5B9RhcBeg/EAk5YoQ6SMVrI+ZtHM+VrfJ8ackBWD+Y+AI\n",
+       "j0+9EA/oZhG8aI1OSrJuZNgyA1SjR6jnUUJwtRS6u0jF6pMgjZqqEugeRCZ11UOYLooMaB4AR+sw\n",
+       "GlZopvcyUBOsRH1MavhKE19UUO0yrlrH1zxywsXUdMinKPngRzAhJG1caqpCxS5heSoGgiiy6CoV\n",
+       "Onmf1p0Wwg0QioHd7lNN2aTKPoFWJ70B233YXIOUBldDuP8gzLTiqrmdg7UO3LHj0xHVobYfnBu2\n",
+       "Nio29FowLKGSZsel72ZhR462DPYJOHQaZkNY2AvpkfizjO+EGbYdMA7D+H1wZhNkFzYimE4oZA5t\n",
+       "01n9LssfmqLnN+kmJIoqURMForBGIMqodkCk6mhuB0foFKSk4wgi4dOyQmwJoQhwBdQkiChCCkGo\n",
+       "hggRofkeKc+lr5hoWh9DMVGlgTTa9A0LWkWcrTGqjQGR9gLOvT67ZAo9LVFVQZoIhxZXlSQZ6dEM\n",
+       "fOaDJOgJlKDHZGRg6W1WJ5qM1toM1wLOT1tE1y9DYQ5md8PKEdiag0walPvhS0KI/0NK+SPeA3EK\n",
+       "qXUL5I5C2IPts8Dc+1bwr46dMLz/mtjk7O3CE8AuIShKySvKNt9NkFJ2gNfRCbFPweQnoGjC1iRM\n",
+       "5ECRMLsRdwuyFlz8sEQWBNNql4G3wHaiSFepE0YdhGhgD1rk62Dnh8l6FvYgR/9amc1bqzgZH2UA\n",
+       "DRX6is/+qo/MChK6jzAkDZGh4ts0zQZJ3SGnJEkpdQQBScASG1wRGr6EvnAIoz66TOHqNobMUI3y\n",
+       "LIYRmmXhRQKFAQVXIPISqwz3H4WrI5C/VQg1AJZhYwUe2xXzzvLzcO045GVsF3L9JNR2w/YwrH/I\n",
+       "ZiMyEZGPOuhiL0F7hZvQAX1PFCM7k+dzvKz0rftwi7EJuGcx/vrqECxMwhULxrW4i+IJaBZgsqmS\n",
+       "rTQ470dUkgXaSgFvuw97WhjaGsJR6IYGi7KGZrgMDJVkM2TYjUiqHmTA1gTVSCcKUmhqm5TsEAkX\n",
+       "SYpATAM9QlEnUjcYUSymQwWTFogOFd1FIElKHRH5bAuNltRIu+BrEa6m0+qO0daXqToqu3sBjZyG\n",
+       "qCvM+ZJ9QsUAAh8uHoShLtx9ENoCzqdgzgWlC8M9uOvpmK2+nIXqOnSvwSNFGJLQTCpsWgqFuYDV\n",
+       "LLzx9NE3GQfg0Bk4swFXpyAqQqcAC3dD+wEoj0H3FggSMXncTUJ5EkwHjp8zudIVbGUSJItFvO4x\n",
+       "grUWofkw4aQO66OoHVBEDsVoIQ2JpbYIlQq6qrI/9NhWJL6AlpDs0sEKoNiEalKyYEicrErkGgwQ\n",
+       "lMMEdhBhWRp6KPGUAqrbBBlQTwsiZwzSixifdkh4XVxVpSZAyghLdLHRqBJSUVP0m3vBERS1Oj2z\n",
+       "zhXLpOhrNPMBXtOjs52E763A6G7gFigDGCAy4B2C5AYcAR558UbGMuKRvw3Hh2G0r7A5q3P9SxqL\n",
+       "F1Qh/u8defD7RcnL45eB56R8RbvtNx1S4gvBI8QF0M/ouPvugRBiH+z5uzBig6KBOwG7PFiwoBHE\n",
+       "c11iAEoxIGcJyoFKSe1ycNCmocOqgMwAaMDmMDjuKuExnTUlINRDMnpA14d5oVATCawgoDnkcjCA\n",
+       "TGSgRZKUYjBDh6v2gI4+wqQIkDh06BMiSBKSJsGGjJgXNUoiiS51FOkzGGSo6Q4hJwkDDfAJxXXK\n",
+       "pTnqyRbZARgl+PDjoOcVnvmfslxhls7cOg+eqHH+zoBSJaShQ6YGpQNg2dCtweTTJtXbdRLdUZIX\n",
+       "PLp2ledub7H5cHgT/rbfE8XIqyO6As/Pgz4LbhH0PZBRYXId7By0epBcgOHDMNz0qY9aDKwJ9EEP\n",
+       "I13FnfbBCVC9ACOQWF4S18nQE1l6cp35tM5QS9I0JZpqsCqytNRxPC1Co0xeQBKDiBRdWqyRY8Aa\n",
+       "GTXNWFBFqDZqJLFEDU0MCKWCRoQmBHZkMkefqqGgqzr97jSDCgTmMotmg3ZSIzBz0NmDPVdhrlgm\n",
+       "LIGZgmwVPv1gbMoKUIxMvnl6iM3LA8ojNVaPh2TKggYOtctd7C3oHNDoT+QIZZahtT6VUcmW04Z7\n",
+       "gW/cvDF8EUOnYLIHT90JUw7MdGBjArb3waUinNgCmYMrhdiUdioAPQuhCi/MuhgDSXl4BiuAhNWg\n",
+       "WYhAGYt981GRlSp2dBl3OkWo13BlGVvUGZHQI4Gi5NGljhAdrokKlu7TtsFpQJgyWbBtItXECzRM\n",
+       "GdI3Q7YDnWEsorBBGDSoOWlWlV0w3YdbDIJxSScaQRUCKfs0RBNVePRkhN31SDBBw5zEaLfwDQfd\n",
+       "TdJPtlkwevh+h0IrQeeJWSi7kC7sFI4+qB5ENrgZcJ3YA/sGWCfglmE4Wk7w/F3DYJrsq3XoHSuw\n",
+       "9neuw7e46Vyhdyz+AfC/3ITzfh/4MO8XIzcg9xswvg8ONcD24WIJtj1It+D5LDhBTKFCD2mo0IsU\n",
+       "ZiNJVlEoBgEDGywJFyYh8mBX08OzPdwh2KvGLq1bisWamkDoSZaFQUfU6OoDjoUeiiqoS522ZqGJ\n",
+       "cdqiy1V8UmRIkMWjSQePTUy2hEoU9fFkSEp4dLCpS4WB3At+C0wBShrCYWT4Ar5ikBm3KFwJaB7y\n",
+       "UPwRRhyVrp3h+skJwlyDKk2qXROuPcdm2CeagKPzcMtWiksfHaLzXI2tqTqXxsfwrh2nfe5JOCCE\n",
+       "+Nrbnfr9ni9GdvgGfwoP/gLs/gdwfBNSizC9BWd/AcwM9LIwBMgoYCVU2B6p4hsaoboLgjwETUL9\n",
+       "PB27gOJnSOGiiRSu0OmwzqPZIpYcBSXCkwp5IVHpkWWEMcp0SaCSIccGEo15YZCRIbYIUMMNpC7R\n",
+       "gGngkogICSlKuCxyEByi1fDp6jlSqz3c/CJBcRS7bLKthgRulkQYEhX7eEMu6Tr0k3DswZcKkXIh\n",
+       "z+atJqXkONuJEuHiJtXCItXx0zCfhfF1+s5Znpu2KBbT5CsBc8YQWyt3M3jq+/BBIcS5G3J5bhIU\n",
+       "DTbGYNqGfS9uG82DdxvIDHQ74FnxxNRKQd2EVATjHjxTdAm3QlxHYhg+QuuC0YYoAwwgYxA9MsLA\n",
+       "KaOZm8jRRVxFMBZFZBWN69osGWmjyog8WUwc1lmgrkaknWE69hRIg4CQpF5BCeoMyS6b2oCy9FGU\n",
+       "LH3jMH6QJEguwx4b5G56SoW2OkYGl1B0UYWBoExLREzbCoEbYBhdMAWDZotKUVJSTAwkXR227A7h\n",
+       "tx6FE0PQ24SsDmMVSPvwQhuSm3HK84+ZIOWPwWxNZW1miNBKY9YBXEYGM6y1+3CXEOKslLL29o7x\n",
+       "OxtCcAoocXOK8/uB/+omnPcdidgcbeYM3LYVFx0A0xvQnICqB8lz8Pg4aEVYkgG5BZXMHgUjVIkG\n",
+       "AVECFC/2XnJCgd1RWCqFODYMKbEzcdtR6GIjtCK2ULDEBBKdBgs8KXQyUZYuYyjCZoSQdcZo0qKD\n",
+       "IEEfjQwWITUiRmWFEAs/UNhQcrRFAanVQFERTg+NJnq0RSjAEwl0zSOZ0QmSSbRejeoZB70hSRYU\n",
+       "FH2GqFYFpQozY7BaRHy0QTpS6GU6LGwaRHqKfFXHcBNstD5N+3GAizBJrOF/Wx24b2Zq7yjwn4CD\n",
+       "QOLtrsJuhJTSFUIrw/6zcOgGP5LpJ2DuHmAmNnpqZkdZ6+9CK+t4E9Mg+8A2hvTQ9BSBatFV+gwH\n",
+       "EclIUInySBVUkcFEJaKIKRr0WSIkIkdIhEM86h0EPikqWHQQSh9NcdGRKBLUCNYVCCKJlHBNQBCu\n",
+       "obWztK1ZgpUK3akeMtEj3Rtj1oe25jIfzdEo9UlEPQ5vQb4McyVwd8NgHizPYu2gRpRvYxeWOCy3\n",
+       "0DqC5clR6s198EQOBvsBnf7eMitrB1h5NgX9CWgC7ILohTjF8SYXI5VnIf+rcOYG/ooSQboOyjaU\n",
+       "VyGbgpILjgeXsnE+T9MBJzKZzyhoLY8gEzEQtZjxFQqgDlYB3YK8mcCsKbQdla502UyHNI0MTWGQ\n",
+       "RCKRqDKiL1KkogR1y2fdmMZGp68aFEIdSzq4ooerNBnHYzGcocspUAoYigq0CZwrIIpEYsCKbDNA\n",
+       "4sg6AzHAx8DF50poMKS0MJ156pqNTCukQ4XIbzLQe0z//+zdaaxk+Xke9t//LLXf/d6+vS/Ts3PI\n",
+       "ISlxFWVro2TLi+zIih1ElgMHSeAYCZJ8SRDAiT8bMRAHcGDHMGzHgZPAkuM4lmVLIkNJliguw2U4\n",
+       "5GzdPb3ffalbe9U5558PdUmNKEpibA17JPIBGtVdt+rWv885dc5z3vd5n6cgr1fSv/w5u//HuuLd\n",
+       "LS5NKK5yu44v8cde4hPFb0nwLEeMF+p651ryN+VQzEKN4hL5q5zDd8jIb8ZP4R/E+M352/we40Ws\n",
+       "hOBSjO79rq/+g49zrJ4wq+OUjKzvcbLM/XMsnWeyPJe77f565eKFSr1f6K4xm0WzA/YnnP90UP6J\n",
+       "NWuTnntLleUsWk7oJuyHtpE1mVxdpl4tKUImhj2TpG9qURUSZ2LpUEsSzps4kdsxdknmjj1Lph6I\n",
+       "oeFMGBmFiXByUaNsKtqpqjOVmyqTkVRwthyb1Xb1qpF6QnjHyGhxkVgzvhhVg1w0pNlhVnJ5wAc+\n",
+       "IB58Vn9j0cXjA71LPZONUv24MKwvGXWhR200D8f7lvvVPMrKyKF5SfGbngt/axELZl9ne/3kHrN/\n",
+       "zb/oE5aXtb//eWu3ZmbXR8oYxGpgJR9bSINazOyqmSRd/Wx+EUurRWU2FJWmgdJYZUmCwgQ9pULi\n",
+       "CDO5XKKvZSgx01JYl+mH1CBwoDQsomOlULLwWt/Kp7/o87M3uBQV19csz5as54n+uUpPTa2bS0ZD\n",
+       "659pGRRD6/e43KT7g9x4N0+9EPSujjUWU6Ppks3dXKsqNNKeV9b2dBdWGBeEVY4Pefwx9upvMhOb\n",
+       "mSdIfSv31jdG8dKccNy5wuXuvP2yvUAYZfprHbPp2Elnanu1srJDtsO05FaN8XSJvai52nW//X7j\n",
+       "uzWqh7TvkhXEnrXzTSsPEtXkoXJl0dU7hx68c1lVNp3kqWVBkNtPckU5k4RUlMtmiVFGFTO1pFKW\n",
+       "xwZpoZ8mzsdMrF2QxVxQSRNoSsKaqQOsm4QL7ldf0Ek6Llqz4IGyGipTTtKpMu1ajnWT0JInpSwb\n",
+       "OCu6OqZ5wqhe2Xi+8FL9+5R/6x6Xv8xlc6Z2b8bD3+q7svcZXv7J0plpqWzlktnUqJbaml1l/z7n\n",
+       "vX20Qm8LhCDHnzP3+/iWI0ZVCP5f/CD+/qNYw9sMBeVdth+f38x1RvNDfjqiP+JqYOV1ek12nuFg\n",
+       "QLUXpRgHHuzz1D9l77GankQaUpdmmWxcOmgmNmKpKXUSMqmRng295Fiu1JJLw8g4CSZVZhxTEx1J\n",
+       "0pAplXInlmQuKb0mOG8cUvfiWFH1tY4m2t2XjN7RV9kRwlkZYjIxSO5qx7EnYyUP0Zkw8WoaTgAA\n",
+       "IABJREFUMDqf2KraimRma3lDrJapjudhO0ubZJsc5bYW+pYeW1JvDxRrex4uNdx7+LTZ3SHZJ7lw\n",
+       "wP/zKKYjH2Vq7wST+cj32wHVzflUybM1QsaoST7ijSaDf0xyLvLDN4yfKozWojwk6rWGZkJWVdLZ\n",
+       "iLQuD+SBIBomE9FUXV1HIsaeQah0ZYLEQxNXHWsqdByr8BWptsqKobtqRuraSvtx2V65qF490B33\n",
+       "5EVUPuTgwdjs74w1/9KG9kfOydKaw1gpqyCzzeqqyeoF9yTy4tiNDzxQtQsLnejkSunB944cLhZC\n",
+       "3rB0vK33XMPhYE27GltvHOgu/KLGuS2LVwqx03dy4VMm19foXeaAuXNW6a0fYZynUjprfhG8deoM\n",
+       "+DXM/QeSv8GN/4w8zmURtVt19/9k09HwsisPh4r+loP3Dt09H3mN5AbrP5f5hY8uGW51TEdLJkc3\n",
+       "aLVJdogHvHLP0rtZupBI1mbG44lLn2qpLjUMa2MhjxKF25axIisD1ZFJuqDmxL20UC9yaR6FatdB\n",
+       "qBul1zTjQL3oqrJKnkxNzETZKaWJSvtyQd3ENMmcl2nbl+rqJJXFZO7ovKnSxINQOZQbhkK9YpBy\n",
+       "7wp7A/KtaHn1nINPXeXF6lTMHQkP52XZr0P1Fb7yKxP7f3LHwvVlsZt5MPugvV8/oH2Hnt8l3+Lb\n",
+       "EB/FrRjdeIRr+Kpu5O8/wjU8UszNzDZ/mMvn2L3GuT2qHCtUBbdSrv0iH/oC44xP/Thpm/Elhg84\n",
+       "KOZmkPU6X/wRJkuZ4eDIw3O5q+PKYZ46J7odok6cGIWBY0sO1S1gaZ6xK8FlXZ9L1nTHpSQv1eLY\n",
+       "TFcaG9IkE0QzhbrEukqFfiPVu1rnIOoUXRtlV5XMPEhXRNsmZi6F3GJWKWeVWznrugbtmQfhWcdH\n",
+       "i5QHpDfJG5QFa116Tzi5u+dLT00sJW1hVOofBr36L/C+EfmQWwUxhND8VptZfttrRr6KGGM3hNo/\n",
+       "46f/a66co1lxL+HeL7HyYd71xEQx3nNwcWa1nTuKC8bJmk4cOwiJk9pMLdw2dt5uyMj7VNsS61bL\n",
+       "XKGpNFIL+7KkMI3LBuVNt7OxcyC4bdlI9Izcgsyhhq5EjKVMwyRuaveOvLHQM9lh9o/p/aslfqJu\n",
+       "/BNB3pgapZU8yWWxZ1qOzWoNMUy1Fhet7p5x97GaVnbs8nau3D22v3AsW+Dxfs1Kg7Qxsr3ywN08\n",
+       "lW3f0HqWd4a2je6+h4upB7V9x38mmGy9yvSAr+zxt+e5CG8NQgjpEn/8Od5/iTgk3mQYQvhfY4wP\n",
+       "vm5Pvsitf8Lkw1yWe/iuVffv55JxX75MPbZs3y8MFiee/SIHx9woc7v/8D3i6tK8CvL+DW4NqR2x\n",
+       "NLSaTz0zLHzwC/MfT/CZtaHj0PBUtWRWlB5Oxw7rLWWckvRJdmRxg6JwlN3XkEliy07SNk42NB1a\n",
+       "qRqypKYT+nqxpx6mRnKVXOohdp3Vtu7AtmBRoaFvQeWSeSX1GXPxbDesaOC84A3rbiZ3HbUK7ymo\n",
+       "pUze07Mbj2iepf9Vm6NDGiUnvg6nkzL/IoTw2QH/Tpt3X+boJu3b8xDBf/itNkX6fYCfxD98xGv4\n",
+       "OP67EIQYfdtNO4WQPsU7/gIfPOTCHV4Z8cIPsHiPtS/xoGJvxh/7IkXgVz5CeJYrCU9VHC7Ow2r7\n",
+       "bd5Y55lf4/B8obE6022WXquxWlXuhIoYDKqJu8nUWJAaWTHFsYYTq3JnqwOX1dxvrFipMlU8kTgy\n",
+       "Sh4T47FZeKCm6VrctGAsOjZKj91J7xue6xMzeVK357wVQ4kosWZJVIV9rTBSZjW7Se6gXDOJ+xqN\n",
+       "f2KUXxHLs0yG5G+wcZlPL7D23UYv9oxqv8rSR/jnr/Pcizz9PfziIuM7/PEv8t3/5gnI/2Z4W5OR\n",
+       "EMJffdM/PxFj/MRb+4mtTa7cYfk1YsKHjsifIHucD/3rsU+22Ki3PNOberB0xxuhqwgdMfaFMNCI\n",
+       "a5ZDX4KRI/1w4LzoJGTidKyej8VwZ14Fibk8mzmLjsRES0emqVCTqmuoW8JMEiYKLbOysJM19IZR\n",
+       "8T/jpwPfdZE/c1mcHZgUD1T1JyQyaTUxzB9TJKm06tnd3LW33rIQlnUmXQerTZvDvtqZhqvJWLHU\n",
+       "U41TZSitllMHMXezlTi3vmxtZ9fx2Zrdjeetf6bQWGq5t7Oh2voMpm/xHXLGu97JB7+f26n5CfZJ\n",
+       "Fv8FPxlC+OtvTqB900X007x8fonGj3O/TnHTwZmxUEvF8g5rL9n9OG7g1oj25/nLf4jegAe7XD+Q\n",
+       "LN5Q1brO1wuLfe48HawcRp0+a0tMVkuNtG4/XNYqxsa1vmkIpo7FuKAKK4pky3qMVlMy2wZhxTTk\n",
+       "0qpUSxiLVuOxrtzAZQmCmwp3LVj2mBOJnqiyhLrgTEUtzEfOo6CwJEotIDd1bF3LqiTs6iZsnnDh\n",
+       "sHBw6Rft/8B1bq2zc4X9z3P2kP/7t9v2McZd/K0Qwsr9uUZkYm4u9c2m/n5b4NRb5EfxXz7ipdww\n",
+       "b6E+hVce8Vq+pZg7q577Yd45Y3yV1xq0dvjhf8ZPP8YLP4N7nP8p+g1efSfN97Ha5mJF39yp+dV1\n",
+       "muk8FXf/Iu8wUWRBfVB5vRUcSSzHoBui3SR4MnY1w9Cx3LFMpWfT1IUYhCq6GG6b2jUJmVaxQLqm\n",
+       "Vt3TCwMzfSuuWQpjpYGxRSfWtEyNwirhxLY1tKWiTE0wVWioxWiQzduzySzKDzetzUqDVk2VXTA5\n",
+       "XhLHHR7eZnWLzjOYzCsmi6u8nJB1uHiW4RPsZcQLnCRc/gTvwa9+q/bf24WMfMNeTYzxr37LFhBC\n",
+       "g2vvn5vE1N/UL7sZqK+xc572dKwRK6PNRDOZ2Yg97ZC6p5LF62YWrYRUiFGoCnfDVMe2BSOTtCHE\n",
+       "iVYcmOEgmVkyn5BJ1OxrO2PirtIdqWuCKEHuWHTPhl4yEZql+vScpf98qPcXx4ymYucVjaUJzQVl\n",
+       "OJY7MMguitWiNCYaoW4h1hTJA3laCslMNc1YroRmqkwzR2lNrcotH5eqUCkaM9PtPZduHVs8Dg5W\n",
+       "v1t9XJOPE82Qqk2fNr7R5+J9nsYn36p9s8GH3sH+V4kIXODkMpcezLUPt77+PTHGfeyvh/CZQx5/\n",
+       "kr0m013xucV5qWBpkwc7fOrUPKkfQvbrO/L/OMifnljpjCWzuSX+uRiszzKzDfY2c8fHI8OFaNoY\n",
+       "KWNq2u5phVQW2uYzspti2Da1q5ZULlhQjw1lNVKrRhrx2FFYU6UzC2FqbE09GUm8dpoyH+Q2LCMx\n",
+       "tGdBX6EfK+thapZUlkRTFHJBqiHIZILKUKpjUbQrBKZLwcNsQWtWaV8MBp0bZJ9g64R/MJ6H5v2O\n",
+       "OK18PVJju7c5vh8vxWjnUS4iRjEEHzPXjfyBJiPzc7biTcQ4J3uecJ6VGfmM3jn2Tzh7m50h7XfQ\n",
+       "r/GzH2XjKs8VjEbzKfdxyXSVlR63F3HAhQbLM3pZ1M45k0QhrVSxkiWclzhXFdbD1EjlYP42T8R5\n",
+       "carCKPDkdGg35dz+0KjV1W2WbuRR4YLVMFWYCFInLqs7UqkJUkRTXcGGjprSguDQkZHFQM8FpUoo\n",
+       "KydFX71bVzQvyUfHZq4oX2yQdLnz+XkGSOMsxRovvo9b25xPqOccZG86v17j8EXe5duBjIQQMnOv\n",
+       "gufxr0II/22M8dOPaj1o0Ep/MxGBfELMmLZJVjOjhZrNklDWLZcDowZVUlOGhiLOjOLYKEYxpsTE\n",
+       "Tpp4TLSZzLRUpiHxRggKdZmOHblU30RhaoxEruue1MREadGxDZNqpp7tGicpBa2kbuNsota+o6Fu\n",
+       "ULYcxpYYxoYWlSLJTdEF9bImjQuyin56aDnpa2Uto3rHQbpqmGXKousgHTlfr1s4rDsejCRl3+Rw\n",
+       "5umXWu68J5c2oTKr1RX9041WZvMxsLcMKc0m069/vjF/qP1O7z3gVz7Lc0MuHvDe72Y0JFnjpfcx\n",
+       "/nn+gxDC/0j9Gd79h04s32J6HoukZ+iMGedBv71g+eZIsjazfSaxW6vrJ7nV6UQWBgZhXTQ0cUtp\n",
+       "ybyEsKsVEiKTZCyqSWZj9eTINKs7iJnEzAkmokpHEqeqkKlryb1hz4ZdTcG2bjh0glXBgeBEZV15\n",
+       "2mtORYU9qyp10ViQSEWDxoKBJbG3Jnz2yXlwT23G6gNe/apY7TSz4yLa5sZoyQLvrLF0xI1q/trf\n",
+       "sh++A/Cn/A4Vpm8xPoY/jb/5qBfyViCEcJmzf5Trl5jNQlj6NCcfR0a4yplDlibza+vCCCtzger1\n",
+       "n+LxOvEd88DP3UW2Z4xyHmApMF1ht0G1zSihvsjuZfImVZ5SRqtJYhiCaVVpJzPjJJhWmaUQDJRS\n",
+       "7MWgE6PdlIPALEu0Y2JlqWYtm1qsVnwlaZpVQ6Mw0TIy0jbw0MDIVE0hNdJUOlLpaupoausauK+U\n",
+       "uqQuM4sjk/w5rZNbRmkpzqaqWqFMb3J1wsFj/PzLLD3ky5d4/lnupXP76fx1WlffZHgIY7IRC6sh\n",
+       "/FgyHw/+Mm68lVOvj1LAWuCHHtXnfwP06PbnI7yrbxLuLBzwcsVaUVN15u31bpWYhAWdSaGXV/aS\n",
+       "uoWqr2fViXPyKoqh6zgZKIuuWRg7GzKNJBig1DZWCUqpjqnU1KFS7prGaVF+aKTvFUSJpWRi5IyF\n",
+       "WDdujZy0dk0MrUtlIfV80fWFrOYNT8q0VVYlDqTuG6YbkqouL08UcV89BGnS9HDxjDSMdS1bizNp\n",
+       "9ap7tcrqbFO1/dC01ndrlxvrQ+d2Dm1fWTCdRQfJWcWdCneoDeYqyrcMXV66xfe8l/tffW5Mejq7\n",
+       "+OC3ex/EGB+GEP7OHv/F83NPjaLJy9e4kVE+Na+uPF3a+CG+9wGf/KOpcDX31KxQTVlMKq9llHoO\n",
+       "LyTOlNEsz/XDVJoOjZuLuuG6RQtqorFS6WWlQxTEDZmWuTvMUFWvU1QKY71kYDeMJJ6TVxs6lkxD\n",
+       "qfCqY31R6Vjb1L62iYtarql0jBQqhcQDhS87UbcoWjW1onDsgWNn5JqT0qCsqaqRo1pTv1zmOMEq\n",
+       "yzUL/2kIF+/S22XpWrDyWFNcmjqpbejO3q/60gKT+3zXV9gKIfy9b2Uf+fcDQpDgx/CHHvVaTvFx\n",
+       "/I0QpI9oxPgtQwjhLE/8h3xkwJW78wDUL3wPn12bp3cv3OMrT3G1TpITBjycEdr8kU+w95F5Evfu\n",
+       "Q7oTbjZZSXi84myfWY1Xpry0wKjOjQ5lHYGYVM7FTKfKLMWpvaStUYxMk9IbSakjk8boAPeq4ExI\n",
+       "NMrKLOF+UgmxcrtRyELDYbjkpHqc8uftJXc01BzaNLIucc/ACq4qDFVqMlu2PZCpKa0o4nl5qKvU\n",
+       "TEIQkttGVxfV9x4qixPD9gbbHY7HxNf47h6/NuAff57d23ykSdajV/D595/aNEBJ+Nd81wVO3kkt\n",
+       "Id7g/a/xuRDCz7xVhOTt0qZ55IgxliHU/yW//Od43wFne2wt8soyd19m+H1Re6ESw8y9LLdcnkjb\n",
+       "HffDpv3BA+NGosguakklmFYtZkfG6ZqtpOUgFKKuGFmy6KyhQuK8gamZN3DWUN9UqbKssopUYVff\n",
+       "xCWs64QTR6Gp5YrM66axqx0qDxullgXFaVWkkoiWRD1199UlkvzIk7NSmtS8kjVVWSodLJvManbr\n",
+       "hTSsmFSFYW9FVTw0foPR3+Hn/zRrl27pFYmHq8+afKpP8gLXbsynMm6/lfumxyc/yzsrLl7ieEjt\n",
+       "Syxt8XMxxh6kITxzhu/NWB9x+4BfepO4tTtjf8TDiqOcQUFakC4QczZLnRYniwzeG51pFjp5dJgF\n",
+       "SZG6OK3cr5futqMbBUd5y0aYahipQkdm5Egw05MayE0kJmZWnISWsqpJw0CZnKW6YVqrWVG5otDX\n",
+       "dFddkeR6JqaagksSn3YgkZjZNLaMZQ1nVSqFqYmpVEvmTlwQDRUhV5kKhhJ1x4Kb+UyVZXrOOhpF\n",
+       "nv/lecVj+yuufHflyXs8+wYv/PuLXn/yceXtJY3eSPnEWD2bGu8+Ln7ucTS5/Mt80Pxi9x38Bt6H\n",
+       "wxi9/qgXAjF6GIJdvBsvPOr1/N5i5cN8V8GV0xylZjEPv9x6moOb5DWKjBs1FuPcU2SvYvWAkwtM\n",
+       "r/OgpIicGfNKY+7/VwXu5xzUmR1wvuLmBkWdp8O8WLgtej2ZmYXKPI5rJk8q60kijampShITszIX\n",
+       "Q4HKXkIj4clACC27cq9btuuI8OuEUqbmDRfQUEoVVnHd3P9ipnJeYQNfEfRVVsQQpEplTNXkpqFj\n",
+       "mt9TrMX5jdDeBW41mfRZvcNTO3PH5Ookxo+HEH7FvLg8SLn6z/nJJ1hv4GUWM8Kf5IXaPEHV9Xml\n",
+       "9L2f4ot47a3Ys98hI29CjJMvhpCO2f4B8vOMe2SRP7LPzR2K84F2pqbuQdxwHDeU1VmhOnEyWpS1\n",
+       "j8ySmm4SMVQkDdEzQriokqrcF8PLJmpyhamZW2rWDRGNJYLKGTU56iY2kZl5VUvN1FBNadFMTWLs\n",
+       "JMw8dOKSljdsqkvVRSP7RhpKmbGeTM9KVZNP6npZrqdjfbKpdXiiu1A3C835GoYDk+Oh2f0e/yjG\n",
+       "uGMuYFwb0HyZC7u8J1Lu8/GCF99qw7oY40kI4W8f813LPDNjZ4+fiTG+Dq0QPvAe/vR7OFjl+AHX\n",
+       "PsMzIYT/hfom1/7sxMIffmC22XcwPmd/eFeS0tq6Y1ofK29wvM7Oc0GzCGYSnWLmoB7t11JL08rZ\n",
+       "NHpYrdhpblpJFjF24q6h3IpjBx6KzuC8tk2JPV1DE3teSyrnZVJMkwVdbee1rcnUJI6UjpQqOXqi\n",
+       "gcoEYwvmxncLgianXeRSlMtjqlPNtMKifmwapZeJLWkYER+YVhN7s45kdEbj4IKkWOXoDhc/Y+Gp\n",
+       "meujwuOvkuR1483r1qZLBq2m0FuTThvy4UtmHy4Un8+IT7HzIu/1HTLy9fgxb58WzVfxVd3IHzAy\n",
+       "0rrCZve3Pn82zu+Ljs7Nc8aaxVwb8ljBpy+xs0z3R0ivzts3k5QbxbxysrE3t3Iot2m9ijXapxrv\n",
+       "7+kxXCQLrAcKwXbIPRdn7oapV0PNUKYepirRQJCHGUldUpaaaelxwXK5ZCup64dNjbgoDy3BWBXm\n",
+       "lfBgE0sSe+bUJzd3ahtiSZTJkGgrnZdaMBPmrsyxUJUV0x3l5N3czth9ncGUp1tUT9Cb8ONH87Hd\n",
+       "//3UWuOr03C3Qgj/w505/6k1uPyjPP9VIgIJrtO/wTv8DmQkhFAztyufYu//T37VtzUZCSGs4oL5\n",
+       "hrsdY5zEWL6KV+c/X/8Jvq9Bd4lrKaO0o5olqlpiLVS+nK3rx6+IoaFlUVlcJx6Y5jMhjsT0HHJp\n",
+       "1VaFVBUuyZyo3HffqksaZqbuC7pKha7zUjU1TaUEh2rac5NgMwyty6VSqUwqc05q6kiqZyY9/d/V\n",
+       "JYIT4+qhJHlgSWozJob1BTuzjnJwZL9Z19ioCU5MQ2lWbYsHR8LwWHpnqPzaQRdjPDj96/0QwufM\n",
+       "HVfrWHLqN/JW4rQC8onTP19DCKF+hR/5Ae61T79cT7CXz61mf/zIY+s1j29cNNuZGubRwuo92eYT\n",
+       "jg8fSs8+9PhXaD3f9KUPr3pxqaGVPfAwnWh0ok5CFqMj0VFs2a2dV6U1a6JoQcOmvrEDmaguWLHM\n",
+       "qdonF5yTCKKagzg1DI1T55hK3djMQKkmlchMTc0r/tFM1BGcWHKoFMyU+mbGZo41zWRmYeAoLfTi\n",
+       "oZF3CdU1aTETTaRpKhT3TNNz0rSrvvm6KlvSaLSN96aSs0c2fpYzu9zfbNlda1s6mZmuNNR2I9qy\n",
+       "aWve2lq8SLdyurjv4Ovxw/ivHvUivg4fw3+Cv/aoF/J7i+k2B9dYGdNv0l2ZG5nt50hYfIkXrnC+\n",
+       "Pc+U2QkUd+h9H7WMy9Vc4hYrPtPg4ZQvr81b8/URi13G59itz9s3jZS8pJ/NbwQuiu4qnIRK3QUL\n",
+       "cl17Uh0bsWbdSBYGUoXtJDGOLcdh5uU0c2hJ37IYKovamqduQif29BEUEsuCE/P+WmZ+gTpGX6VQ\n",
+       "xIsyURnmNyRRTRW2hOSOvEbSi8ZrDRZrzDaZbHLzhOSY3nUe/1W+F7/41S06t83Pn6F9hv52pTgp\n",
+       "v8FQSTXf0L/tFF09hOcv8yc2qU0IO/O27v/5pmvH74hvSzISQggdfugp/vAVc7eGNxiFEP43841d\n",
+       "wxaXnuLyIb/yXq41SZPUwjgxrSrL8a5ue+b2KFdUiZq+XjIxlxe2ZcXrpmkTiSqZiNqCRGkD+yoN\n",
+       "M211iVzDiZktiQWFVVGldKLuSGLJTK5vakNAFBVO9PU1tHS0bJlhz0RT0NZG4phkT7uqW4uZViBN\n",
+       "Rhab0X5JWVYGoUl/k5OXWbutWQ58/6vzL+GrfymE9s/GOPi1N22781z481zpUC5yvx1C8zOM/94j\n",
+       "EjeunyFrf50D7GUOg/QjPH6/aXqmZraYKM6fSBcmNsIX9VeHSj3v/ULTl566Jlk5K9zasffcdeNk\n",
+       "ywvZ0DOxtDijW9ZsHy9JVmpqiApNPHRecEMhGLkglRgZ4kRUWY0No1AJlqWhkJmZ+/S2ZaZKa1JD\n",
+       "LWPHNuWmp0TzgYmJqCN1pK1moFAXfMWaNTVUulruxaBrLAtdafW6Kq1LLKhrqrIgpBOdkLi2nzI+\n",
+       "crJyYGt9Iip1383nK/YuVZLOVFZLZMnEaCNSLZi2qAbp6R3SK3Nl4L/81u/ity9CsI4n8euPei1f\n",
+       "h1/CPwhBLcbfKv7+/Yu9X+PzzwXDs0sG1xeIY4cLI68d9/kZRgVrD3jlMmHIlVc51+J+xV4yb+HU\n",
+       "s9NDOrKwwPEBl/bZRP857mxyOGI1ocjndYpRnJORCQqFoaYjCY4ElaeVlo3tB0YyY0sWy4mDrPQg\n",
+       "XNLXVrMi2JCYmZlIZRo6Ftw3NTNzonBVbmL6NYHcClLRrtK61DlF9bqYbqBlXjk5UDN2PhTaazfd\n",
+       "Gz+u29tk2CHd54k77K/wlcfZ+TIfckpGQghnuPQXeWqRfKVm+2L0xuwlxydPs908ddouCK/ROuKl\n",
+       "b7RXQghXnuPf/SG2l0+dmV9n4+P8hRDC//TNWAF8W5IRPP0sP/BR7uSnR+VLnCv5m2f4UpPZFuVd\n",
+       "J61KdzgPVxufCxpJoaoKZTpTTypn465xo6ZZq3lQnRGql5XZU9I4xS7xHOExla5gJlrAHVNBw0yq\n",
+       "J+rqyx16h8ILXndioJAilVsy9EB05K6oMpDITSw6ENQ0BYfom2pINN1XE8xUgh3N2HMttD0eCt0w\n",
+       "M5ZoasjTd5gOMbpLq0eWaxys27wzcGmfcz2eyPinPxpCeC3GuB9CyOdE5IcCt8/TOctjuPdTvP5U\n",
+       "COGvxRi338odN2fxLpqTxrsY9eeMXfKm1/WoF7KSWkn3XN3s7KL2uBRrQ4lcs5iq4zPvX/Hw2VTR\n",
+       "uOPg6TNi9rha/zWxed9RNtENU2G2aHO/sLXKLHIv0NEytXDqljoT7ZqaC10zicuCGPYdnFY2glKw\n",
+       "Y6R03tDsdOImyqSGErcEBypDLT0LKlML9mSaSrnCgYsO5BpKQcvQWWXYk8VFRdgU0jqOJXFPUa0q\n",
+       "40hWDS2WDXmvVHWC9WxqnDA6arExdvfPlt770tDi7b4711ZsHMyMliuT4S1Hq8equ4e0v8Tqa7wx\n",
+       "4lFOvL0d8YP4pbfbBT9GhyF4zVzj88uPej2/V4gx3gkh/eVlD/+bZcvDYBo2Hd94zuC1j/Hv3TN9\n",
+       "gtUOz+/RbfD6e9kZEOOSo/qi+8lcFl7T1wlsZPPsyG7O9uV5kXwnIZvNp2B2UjYCkzA/w9zma+qO\n",
+       "A/uWRXWVXKYVgnUTD5Il5aAmJn1FXje1ZlGpMJQj1THSNT0dZYhYsO9QVMqVhqef9NWxtvnBlUpl\n",
+       "YWiSZuYnwAOMJXZVYVEn5GpGzqdfVq6NTNv3ZcVttdZYbXFZ2R87zilnIYQwb6Gc+VN8JG8YXtow\n",
+       "Xuu4MBhqLnZ99sxPmzaf45UEN8nemBPc299ov6zzgefnibJfi4h4gr3bXN7iGr+7nurbkoxs8v5n\n",
+       "Of4qEZmQ3uVDzwsXo5pVxY33Kh/8gu6113zsTGrWyXRW2i7F3KTdVeWZRpkpZ3WdtKkRDzXTkVFc\n",
+       "Vp9NJBVhtmma7c/nQ01Oq9snopHCRA875iO+R9ZOJ9SDidSRkU2JdaWJ6ETUsuTYoQ2ZqahStygV\n",
+       "DW2bWJI458GpB2BTW88wHpkEmroOMZK4ZtkrLlnUNmksGhSXVaM7TB9aPOxII8PTcdlmwRORG9ew\n",
+       "j8tcWZhH06yf452n5bcLIxau8Pk/d8qC/600JHPS43Ha5xgdUL0WYxyF0PoA13+US8n863l3yP1/\n",
+       "tMfrL3HtXTxkrgb/HOdGJv+SNz4UNRuVTJCWQVlGJ9mSonpDml6x8/yzTDZU/ZvGZ85KK/Lhimp2\n",
+       "32Rp2dJgKq9OOFuqOdaNC3ohOaUce2baGp604YEjYzOVDkqlsS1tXZsmptiKQ2th5ERNlOhg36ot\n",
+       "ly24KzNzVuqMKDF1aOAVm15VaKoUHjezikpNoumuiRUxZHKJRJDZJNxWJDcUcdfabKZdremvZxpZ\n",
+       "YVbeV4+p5n7qjon2eqJ7fWbx1TuqMHNQ65BN9GqHiv9rn088YHqa6nvjUWRWvM3xUfzCo17Eb4Ov\n",
+       "6kb+wJARWFMtfNTRx5YdDesUK4wj1tW+/57HXqPeZvcs9chSjS9vnzFpXNWsLVoq5r4d3ex1J6Hr\n",
+       "/ZH1Dv1VLnyW6RovX+fO1lx/8skNzoV5DWLPvMXzDrxuakXNVYnU2MDYVEOhUI9N4/7E/lKqUdVI\n",
+       "k/n5x7HMMVZERD0TWwq90ymZROJVlcbpWSBV6ZlnLyzKbJvEHuFZmZpcJZ4OKyTu6oWgGftaWeZK\n",
+       "7yV3046LoenCSa6qMln1BW9875Fbn6J8JoRwwlOXg6YNh2trGgdQsz5Ora2ytftzvJByPOLmb3W7\n",
+       "/g3UWF+aF2l+E5bmD+1vZr9+W5KRlE7rTb4Vn+ddLfXHF9SLYyvZoeT6noffddnin2owAAAgAElE\n",
+       "QVSkGnm9ESXnD8yyQr2cyZMgxNROlsqLQqxm8iqVe6BKNlXJdfkkKpOSohTCF8WkRAcjdFXWtPUN\n",
+       "LZq4qtKVeiBI1eZdQGOVmyrnJDasCaamcuvuO7FsqqXvWM2eFaVMaWBqU89EYlSVapEsYTHMu44N\n",
+       "lVRHGWmHiSoZqddyo+4a/bsmi0c6B6y+OTo6+I3+4QJZk6PrPPcmEVlWzD/j/Brb5/wu47a/E0II\n",
+       "C2z+BZ48x+aU44xXByGEj/H8j/Ej938jDny7w8/9+X13/vav8GM3ubpM3CZs86kZP8vrl1LnxzsW\n",
+       "k6ZZu3CcNmwn+2qz3Kx2Xr1b6NUzRbchLqwp0xdZPrZUjYRyaJxXeo1SqKbqyZ5L+maifRN76tqu\n",
+       "WpIJLliML9gPY5Wa6SmtvKBmU+bE0EnInNNRmupp6GlpaEkcmuk7o7Dg2FShp2WkZcU1PQv6ujiW\n",
+       "OaupkMvURKWaSk9HYWJBYaZZlcp4RxoSzbDn7HjXINQkgdXhzGF6xuigqd2bmXUKh7OxJ3514Cd2\n",
+       "XndzI7O9VugljP/Ktzqf4vcTQhDM9SJ//VGv5bfBx/BX8N8/6oX8XiKn02Zydm6ZCoY0U/V20O5H\n",
+       "T71Er820xrl+zae/56rjg7bDxSBLZ8Zpw6oz9nSNTqdqOjW2n6S5RNIhfoDrCdtTejWGCRtx7sz9\n",
+       "onldIjeWa0pl+l/71kbCgb31ZVvjmQuGhnFLERqCXOqewo7KgNPpuw3BxLaRpwx1VdYFawq5eRT6\n",
+       "sdQtEy21sIREIqqcYKhmQYwTJyYuiM5Uha3azGpad/6kIQtR0De6PLDebjl4/4ccju5Su+14M3dc\n",
+       "tmW/iUgE4hP0b7M9+Sb8v3rc3OJDZ+YB9F/Dw/nD/jezX78tycgxL93hh84wmJAfaj6zoT4eClVN\n",
+       "+6QwWOqobS6Y7Pc5eZfq6FO2zr8hLLOcpPJ20CqnHiQt+8WSB8k5kpkYD1TlF5X1VXpDy2FDaSCm\n",
+       "A3lSmoQzhqEtOLIoqDQcnHqpB2csuGrdocIb6o6smhfiUgV6MnVNNZftGyvdMbCMBU2Zyh01x0ob\n",
+       "porAYsKyuQQmMVfH7CnNVKfWObla6Jt1hqrixDjfd/HGXBwG43Sef1feCWHxo1z6QfbeN7fNH92j\n",
+       "sTX/vScd6q9Ti/6tj6mVH+SDG7znTRbzF1f52f+IZ+7+BhFh7gvwxCp3zu7xd/eceutzEGM8hBDC\n",
+       "Ty969bme1bV9y81MczRxococZblB1jActIxe6olXGoxfE5a2rOeZzqyjMR5rVgf2FuryakUttmUm\n",
+       "WvZ0wkjpGZVcIpwq47vW9GwKLuqoy0wEQ12HooG6LR3nnNjQt29BU2Vgx5Gupr7MxEDdqoaaYOS2\n",
+       "qcc9dB6fFt3Wt6wpkZoIKrmgYWTZyL1TypnGi5aKun428Eb7rpXuwNMhd5LX7PUXXD6YGjQre3l0\n",
+       "tsfKIXnF0zuFSUr56e8Qkd8VT5oT9ber0+mv4j1h7r7V/11f/fsEB3z5Lj926U3eGKj2VKKN0+cW\n",
+       "BhhQhNxs6QxbbcfpofH5mUaWaFUtqdROKA0jzUh1hmlCL+MdQ56s+FzK3UBSpx1ZjvPp1tVAS6Fr\n",
+       "bE3lgspYsCN1aGoUS0V9qkxn3itXmiqwY82hHaVDVyxbtS4VVDK5V9y0qO2uji0TuQMXTJw/FbSe\n",
+       "l8tPRxkK2Wn+e0PPKBRWY3ReKc0alseFEHoWqq6sFxRThvUrVl6ttLN3Odz9ILOfdvjOW+7NKhtf\n",
+       "q3gOHS8s6e1FyvgNzCYhhJDg8TXeFdDjzguUDc5cZ29M/gXO3eVl3+TN6bclGRnywufmI4qXVoiV\n",
+       "RnpH0Txv6XZTNin0zqxrdLeM1mqKO1j9sGqnZ6uzZ5BWsmRZETYVZUeZrkhCJepx/ARbn2fjrno7\n",
+       "FUYLqmZHnmxIRYumCiOVbT11Q4tSh3Jn1JSWBKm26IqRTKrSkbsnMfSEZan9UyvgdTMXcVPTq5bV\n",
+       "wZFDUwPBYphLpZaVp13JeYlxS1cMZ6VxbC3ueFjl0smuWrhv+mJhNOCVTWYJr9V445N1+Z/PLX/v\n",
+       "2JOvF+KLHH6Yh49RJVRDDkasP2SrgX9Lzcjie3j2637H1cO5v0DxDQ7qFkI9xiqa30R8Pe5sa2Qd\n",
+       "l4/XrQ5yndlEme+rrVZeHq4Yf/YMW29wfI8nj+TtMwq5cX5iVjtSJFFDZjG9YFblinAiDUOZE2e8\n",
+       "6lbcFUND6kTN2AV1LewY2hSU6g6VXtVSyj0usYjSnouGbmgZK1QGpipjiQ25uvbpCO+is7b1DZ24\n",
+       "IppIbJsaKm3LtCw6J5cYy8x0DJNd9bCoFhiVqXFy1uHSbXtVKSsTYX/bw6dLq7VSnk/sd/jcR7j8\n",
+       "AlsLfLFL92s20Kdts8KceXYwPh0N/HbHD+MX3q6BdDEahOAF8+mJn3vU6/m9wowvvcj7Ey5fobvL\n",
+       "xRvy61viCVsXuHL0G8XcL58bm3yuy/c9q3wpN0m3NM4Qqz2xnqnH1BezUrtR2kd6MtdsVnXuNbhU\n",
+       "0U0Yj/8/8u40SJL0vu/758nMOru6+u65Z3Z29t7FLi4SIAmKFCmCpCRashy2LPnQZeuN71CEQmFL\n",
+       "Yb2xHWFLtiJkyxFSSJYUDoUcoiRSJkUFRRAgBJA4Fwtg753ZuXt6+u6q6joz8/GLrAUXy6WxBEEu\n",
+       "QPwi+sV0ZVQ/U5mV+X/+x/dXuU48lFGkHMx7SI4U8jm8vS0zizUdQxdr971Slq5g06Fcw0AqN7Cr\n",
+       "0LVhxYbERDYv4x66pGbZRBDsyQyct+3m3H8qOKe0L9ESpQotdQfG9jDWKWu21ZUK9XYujKJ0xpUv\n",
+       "c+PMKbW8yWhs3G4za1C81+zZu156Zsdsdd1SUhpkNTfGjxu8+ms0yrdxYw8hhC7/1uN86CEGgfga\n",
+       "H3iBV3+R8QJPlEwO+dgJn3qn473flcFIjHEQQvg7H+eDHX5gqHaY27yXmnROmTYTMdszy+6Kkye5\n",
+       "dlv2viXNEMSlVD15SN9lMe4JyQUx1k3DnrxYFidXeGXM/mesLPe0snPuP3BeXgyV2dg0TCWGc/eQ\n",
+       "TR0zuQWFYu7GGFXdBitadp0YWrBsYkNDVXfMNdXl9t1zWapuwarM2IrCqtxVd+SekM5x4EFLISjt\n",
+       "YmjgglfdsaFXdqTbJ06/smVpL7fzHJ//F7z2KMWUfv8Kf2hN6yMLWocnbjz6ujTfc+aTvPj7uHue\n",
+       "Ux+ntsOvLHPvn7zTh1QI4Ryr30fzLKM7HP7aG6+Qvk3PSbLH9hJPvSngKFW7lvi2fiBVs+vafzSx\n",
+       "9uDUo0XPySJHa4WFgsuzVL/xGXd/qmtUtIkNsivSGC0mhcWwqiEYlDdNQqoZx8o400xaauWyLDkx\n",
+       "wYIFY7QqhwhHmtYkSoduyxRSM4uCRYUD+4JFYy2sOzFy4s78GjhSIrpoJjcyU9OzKNi1qKmni0UN\n",
+       "I8G+6MCiHZsKh845NpHpS7R1Q9M0nRqVK1butmTxnnR3anVp4PBc0C0a6nejK5+m8wJf+n6++F4G\n",
+       "v8j05yoX6+wKGz/O5TP0l0gzVg8Y5SEsfZbeL32XO/f+GP7Ru72Ib6A3+kZ+zwQjMcZxCOHvfYIP\n",
+       "tTT+fGFzqe/hl1ka8JUPs73OxdfZwfV7hfE/eY1HNli/IH9+x+D7dmSLM4vjhs5spFsPejhKWa3T\n",
+       "LVlv0Emr6ZtO4Jk+n6nxckF/uZq0SZDO76tN0WsK0xCdEu2JVsMbThlxPl9TU9dwhHvaSGXqZk5s\n",
+       "OS/qaAhGVjS0zdwyNbEqOLAkaImWpV4XdRWmxvE2dmTWFBaNi1IIR4owNMurQkljlfFOpuz23Vw5\n",
+       "7/DqpzSfONDayExmdYMvnvhyoy78gQVlGbn+KRZuV87c/bc5BZce4ns/+ibj0gfZK3n48/zdff7x\n",
+       "b4Uv8oa+K4MRqoAEnwgh/ArFX6L71Ff1Ojccn2uaLmxKe48oP3Gs9tBYPT1QnDu0kmVqsS46MRVN\n",
+       "y2gcZpJ8JCnq8mK3Kj5OM6PxuvKRsSQcatUalrTmcfGOvq4DmxbdM9YytiQ31DGZNyOVStFMsG3T\n",
+       "WGrBWKqUqkmtKtyXa1mx6JyJPQe2nVY6bd89fQ1MncxzLU3RRLSFk7Kv7PUtPscjr3LhNvUpP9vl\n",
+       "zI+TrjF6/RQf+Qjj+1qDhvZJF00ni19w/xzNr5RefGTilX7kBQ4/HWO8/U4++xDSh3jsT/O+Met9\n",
+       "dp/k2fdVLJ3+i7z2MI+/KcC4t8jRqxxNWDnPg3tMM17Y4MaXvAkTX71/OM3S97D+h3h0BUM2x8HR\n",
+       "atTKUr2woL1f6NSi+tKGUadFfkirQZGqFSO5qVboaGs7qM0clCPdpFnxW5IqOXqg4YEwtmVZ7qxJ\n",
+       "HBqGE2tmHpLYVtc1dG0+EdNQuB2PDEPuIVX2cl+qJnFeW2aorzQyVTp26AFjucxQKaj6jY7nnNaJ\n",
+       "jq7cjgNHMoUFhdMecjT3I1qY7qo3Sv31sfpoxXTtrJP9Q4oDaRxoXS88/mlufYgP4vkNWk9z9b0h\n",
+       "hI/z+If4gUMGU+4/VjXypS9y7mW+8BG+0MQ//21/Gb8DFYIafgh/7t1eyzfQx/A33+1FfCsVQrjM\n",
+       "2gcm4pMT58f8xKd+vXz76D/nH7+H//cTuK7iRxUhhL/4K/z3K7xnbDKdOi4KSwohNBxOS9vjYLA0\n",
+       "08qqcd57IbEZoxiifsFOncYxOyd0Fyru4yjlchKMRbexpHBPqYlF3AhvjAJXY8G5higotNWMTXWl\n",
+       "gqhhqK3GnKt8go5MTa4naEpV/tw0ZM7KHCk8r69Qxsuo2deT1IKV8rRmmJmsDLxSn3r1TFO7MzQ4\n",
+       "s6y3dV3nqZaneg2d0Y7+ubFXLj1lf/u0uJfS3eeh15iVvwk/apGHr1TUuK8FHHMo2ug1HjuM8Tdk\n",
+       "U96JvmuDkTdplVaXzunSlfxYtnPs2XLBq+NFcW+Ln0ylzV3dMvfEYcd2O+rUDh0lE/n4eUn9vDIM\n",
+       "xWyHjXt8aIDc8WjTQudYN/S0Qlcpyg2kps5oed3AoUx1RkszqUOpDTmODY0kmvpalgTRSFBaUhqp\n",
+       "qQoDLU0VDnjBzLJjO3KJaF/pptSyXF1wJNiKUX/cdLgz9sxtLo+onWdwji/VWLnPjwxY2+X5xxp+\n",
+       "+fd1nPzCrmke5WmQFU3jzmmHl87aeDkzO8z0Jl9l7f47bFKqzNjO/hQ/fFD1fMDaiM5SFYzs/2s+\n",
+       "fYneBU6dcNji+cj238ceH/8Qzz5DOWHvnzF79s1ReLWbf+RP82hg6z1cSYPnHliwNWxYSFnpF/qr\n",
+       "idvrifvxacnolHpMTAeBc20zxw6TxFJaAfWLMppasBPGMqVWeWIYZrZDZixYMjTQFgSHYVGp7zhy\n",
+       "PUR9qX2UplbiLeJYDJWv756qHPuA0lWZhqZFC5b0TJRW5Pq2Neatbj25ukxUWtC2qJDItSQeiMGr\n",
+       "MmfCgo6evlJZpmplQzOe6DcGRo2nnbrfkR4vSxub8uyaq4/tGbSrUcj1nZrDs+f0H13Rah5r/uie\n",
+       "6es9zY/zynt53zHLY165Qnm1QnDf/kAI4eMxxqNv2bfxO0cfxrUY7b7bC/kG+jweCsHa3DTlO1oh\n",
+       "dH6Q9/5BnhjQe4Kwxq82+b5PszCjWfLkITcO49c/FCf7Nm/t6zycqA0f0ws7DpoLmsdtraulxfqu\n",
+       "xWdmTmWUddqxdBfHeWZSFu6lUdqmWOHMiKSgvkp7nq/YVmUh3q9y6q1K4lS+24lFUd3MLQsOnVZz\n",
+       "3y09qxoayEU9QZTqGiIVnChNTS2rWdN028hEPncuoxTCY2I8Ky0ODbMN7XDXfjq2Gtvuxa48Hyof\n",
+       "WHd46xHlLw9l33NXc3OofG0opAeS8Snp6rLkfU8qn+uwWw0au/hZ/kII4S+/dYKupHg7KFr+DaBo\n",
+       "30jvajASQvjf8AE8G2P8r9+dVXR/mEceYXNCbNFr8OTnrjtOeu4+1FGu9KXtvpUYzGol+W2jWmLD\n",
+       "zHKz5SCwU55RhMcqrF9jTP8GjZsmaV8SatpJX2FBW12m4YYldQ3RaYUD0TV0RYU920pTLas6atYd\n",
+       "GNhUAeZJFKJ9Y6XN+QBYKZfryRxbVLhk5ljulpq7WBBNlXpl02M7Uy+16LXojQg1bm/QP81DL3LU\n",
+       "Jr7Exb1oJd+SP7Jp+PKW3fdlusNMf7MrnbSc1M6bPHuJmykXf6Xywfi1d/CBL7O8wuk7X//r88dU\n",
+       "pNcQwv/O3tN0L3Kyy8lzbzSkqnZ6H3u7N66aqs7+EX7wpOHu+1Nhs84ksVgvvdYqXJgm1selYai7\n",
+       "np1yMonqzbFsoYW+PF0Xy4ljI2XJOBmapQOnputGWWbPzDjkDp04MbAu2lLT01DqCgpR0A5RX5Rq\n",
+       "ObSJjkUTRXnTfrZnrOKznEi8hEKqq9DW1FB3z9CJYj76t+SehzUlEi3BVNtEYizYlyq1wkRqZmLR\n",
+       "ssKqVx2ky0bNEzEcKrLT0rIuqZPWo2HSVIuLxiv7Ot3o0jYvvnfVqHjS6ZczablndOGsovZl/+YD\n",
+       "fa0lVuZ1+DaGC7QnrJWq6b3vxmDkx/CL7/YivpFiNA3Bp/D78dPv9np+OwohLPHIR/mJW1Wz9Us5\n",
+       "61OaS7x0iQ9erY6c8aaHYrUBOv3HufAYTx2U2jtHPv5YTW297XzMTOq7djcKG6Fy8K2rMhqzSD8r\n",
+       "XU4bTh2Pjfs0ejTOpo5j0JpF+wo7abW5KHCoyoIc4dGYuhYadkVNpZHotrrUsZa63MxVhcxM7sRM\n",
+       "fd4r1lDaMdGTa1q140TLRJz7cJ9TM1JYEh1KwhnSdWVMHKMM9xyHnoVYuNxtaY8Pjc8+7/YjZ+S9\n",
+       "NQuTe+6fPtD6cnDwyMTyhS2NxtjBlQ85ubrM5Ar7N3j0uCJtf12m44SXX+FHHyFtzqFoE9JXqfd4\n",
+       "8Zs9v+9aMBJCeD8WYoy/L4Twt0IIH4wxfuF3fyXtP8KlBS7uVjfbIuHOxejS1oGdpYFH5K7EoC03\n",
+       "a+SOw9SlsKQ9Te3VuprjfZPsgoNRVtlEju/Q2NFsj1xM67qi5fm0wz1Htl2Q2NSwrLSh5ozC86bI\n",
+       "lM5YEAwxMUDTdaljrFpWOrFrYN+qxCP6JugZy80k6h4yUcOm6CVTixIrSmVEHr18qmPlpK4RJ75w\n",
+       "paPI2pq1E/VwYOf01Pu32f4ewmePrNy7bf8DR7LJ1GApGtSDcT2RXL+g94UL4i24zMFXeNo7C0am\n",
+       "TANFIH1TXXH2NWZZjPFk/l7v5P3erGVWV1LjhdNie6g4mjlpdXWHdWXWcyuZuJc23Ms3FbM63aHO\n",
+       "rG6YJZa6i4rJVf1W3cxEXh5Jp1tW0pmHi5uuxWV7tZZRed8snXhK4glNEw1jB16yYddZUV9haCTD\n",
+       "mpqFqh8oNBTpeV0T0XBeNktdxKtyU9EMHanTFu3pua6hdFlNV91UcNVEc46+u6+t74qaKBNF1wyM\n",
+       "bejE4IItJ2nfNO/YSRZloVQs9s3aheOYa8S6jWnULqlraqWpKye7dlbP6O4lTDuy2Zr+al99xGGz\n",
+       "mrIaB8os88rDwdWzM5ZDCLd/p/2Jvg31Ufx37/Yi3qHe6Bv5jg5GcIFLqp6N3Q+QrXNwjvKE7Rmu\n",
+       "Vtfp1dzXP0BPs3aO1mJF1A62Pbi94JV1inZidPaE2lSR8UyZGE0jCReT6DCJGjkHC9THNHLKIjhu\n",
+       "FpUDe8pFledcQ/Vs3laVacahqyW1L9EXJcYeNlZYlGpbd2RL3SA2zcJ1uQsKpT19wTWZzAPaLpma\n",
+       "ec11q7ZclppIRJmzclvycE0RzkniKSGsSV2TKTwaV8WYC1mpE4ayx7fdmJwzvRvkQ4rN6HsGNbdj\n",
+       "x2w6cTr9Na88+GN6L09I2xWJovvWkxBj3GqH8Av/gp94qErsx2skN/jYOy3Vv53ezczIh/z6zuKX\n",
+       "VIja39VgJISwwsVlWie/nnVKy8qI6ZNPlC4s171vNwinWUqj/bSwWCutFrsMliS1ic6LLc3TNFtj\n",
+       "497rbNynO7UWVi3KRDMThxhbx20NLYuGUqnrckc69pVaSh0TEy0jlxy4o+LFfMCRQzsOhLm3TW4g\n",
+       "es0MIxOVm0nb1FD1hairSIGHgn6x4ELJfpZaG04d1lJHlx4SWwuSek2zNxHrXTtrr3t9JffwIdee\n",
+       "HmmfuSq7csnqpKUW90zTA43pE3rPXpwHInBUbZPPXAjhryAd8KUjPhFj/A2GVjHGkxDWvspXnuR9\n",
+       "b5qO+fLZb8EpnTHVcPBAV6NX153teP2xnla9U43gzrjfPKM1mNG6J2+dlbcOqgbf2FLPd7SmL5sl\n",
+       "E+l4LDmh003k05nzzQNtdc/VKjDZZdFMS1TTNvaQa/ZcsmjdwC2zeafO2hwdP1QahlTXkqmJiVQi\n",
+       "N1S4qG+i5q5UriaIrtrQ11bt0fa0LGobiu7YUNhQOG3Rka6ZKHdO5shd1ywGlgVTXffDkmI2NEyD\n",
+       "rJaplx0L43uShbsGNW712VsqJaOGzcGBvdVNk37LyVYprde0IyvXefEpzi0GW6Nzbn3/aSeLHft3\n",
+       "Tvj3rvJYCOGnv1uAaCFYUZGvPv2Njv020cfw59/tRXyzmk9zQc6oyd4zPDCmdZujgt2LHD3Ox17j\n",
+       "RoP7nyN5eE6OPkFaZSuy+MZ9vvSBrb5Jc+DwoaiRMK6xmXPQLO0kFS97mhAkdtOgM+XxXWYx8clT\n",
+       "tGNqkObOCmaCqDRToRR6uPE144imnjXRTpU1lWkopE6kas4JrodNqTuC1yzIbItmShclFtxzNKdu\n",
+       "Lzs7d6hJLEjM9B1asOBliw603bCnZeLAcmxaKvv204mQJspaYrUc6DtRWx06rnPmZc7sH7kd1+Tl\n",
+       "ks7o2On2Tb2lu6TTirZ6+HbnZBjjp0IIr17jSiRMKyja2w4SvFO9m8HIsmpWimpe/Ml3YQ3t6oK+\n",
+       "9mBVD6/Pd3eHdXpLPLwztne6615eUqc7y6UJ4xnJ1amFWTQ6G6Ure8rmjI3T1chB0tSRq5J1qVxT\n",
+       "4VBDw9jQ0H1TPamhMzZtqCuNjN23Yyabc/cSVZTdVTjjwDK2dOxYNXLoSOWnsGBqilLUK4P3J1VJ\n",
+       "p4sj0QtpcBBbpsXQShLFomPcWHeuqGbsB/WO2iwzyXa8dP7Ie+5z8nDd8QMNT9w9tDg6MM0KmZHR\n",
+       "xk3XnhnYeaXDdET6BX7gaW58L9dT4su87zM8FEL4W2/Pqjj4l/zqCncvsh7ZCdy68Vs5cSGEJuKb\n",
+       "p3dijP0QNl4rrX6YzXHb0mDR/d2J1840qNXlCmaF5nBPCAdanab6nNmRjO4aZNdsxBPh9ar6kO/V\n",
+       "7TzctNCb2Ns4Y6e7IIQ7FmJNEoYmgppSgpZ8DhpsKbA27/XYVkdQmBnJJapelKGpmeBBiQH2RYWB\n",
+       "ly3Ys6lUU0gEy5qO5Pa1NE0FE4m23IkFQxxZnfvejHS0nDK0EDqGcWyWDMWiJ00u6CR1ab1nPH5d\n",
+       "uz7wVI8Hv0z/VO7m6SN3aismR7nd1oNGn9g3Wj/UeobulP1bfGV50+jsY2J/w/jZK4qXE4qP8d5f\n",
+       "q9KzX/2tnMPvYP1+fDpG43d7Ie9QX8FaCM7H6M43PPrbRJWR6dqP88ATiBy9zLVNrjRozzc6y1sV\n",
+       "abzV49kVHj+oJk6PH+XVSQjhH2Cb7UjR5kaNjWPuPkDrTLRXY3uRWZ76aj2VZ9FFhbNllBXczKL7\n",
+       "ceJUUZWGduulo5R7WdAMmeVYZQsbSofVMuUyC+oWdEQtK6JbruDAtnuac/LTWK5uJI9jzVDXkDs/\n",
+       "t/XYsWRNLreqUBPsGghKQS44MTKQ6kq1NLUVzoZty0ZuqOuUheOsKcRFWayLhkoH8uS+WXNkZUL/\n",
+       "LNvTgdZLN00e2DRbmmpmz7Lc5OXd6vt8821PDmKMO6qRpW+J3s1g5Nivp4Detu4cQvirb/rnJ2KM\n",
+       "n/gWr+GA6S5hxCcfrUA2k8DejHi15ublc1rZmsVRJgk9+4u3HSd9D41YzoPR5RELdNIddxuXSHJC\n",
+       "F9tGWjpzQHdNZqrpdWcNXdKwVvnG2JM7kDuWSbT0bUptKw1k2nJdwY7o+jwCX1S6ouem1FkTLaUs\n",
+       "VmWPAqMkVt40MToJVRvUcszshEQrTUxCqr9fo5syphXHBrXM0WhDntfdXeWzm6mvbjTko65nDsKb\n",
+       "epWGDps7Fk59koc36D/P+irHP8SX3jjqGbYGlfPUU6oGuq/TfLT6b1c7Gkuq6PsO/s43OmEhhDXW\n",
+       "f5IHH63+vf4S+78QY5xH8Hs/O1V8aNfh06laseLu+Q15L9PKZmrLQa14Sb62Y3G4ZHG650Kybzdt\n",
+       "i8XEymRm0qFc4XgSTN/HqXHf0aUld2rrZjE1icFJUjMrGySlqXQeXGSmaOgZ4JyRY8HEprogNcOO\n",
+       "PQOrCj2lS4KG4NgC1gzsO3HG2GOamhr6RvaNNMz0Ravz+nHuVkzshcTUqbm5+J6ZNQ/GqSzcnYPt\n",
+       "cucMTGsdZ8Z3zdKJo3pPrX7i0oiHtsk7nH2x1Dzs+dhT6+68ciwef44f7dGYGP0cX/5p1U3nylP8\n",
+       "ye/lev1NNflHObxW9X59twQjH/Xti4D/DYpRGYKPq0o1/+DdXs87UQihzbk/yw+0ePRO9ZB/8WE+\n",
+       "0eSrNYarFWTxfkL5KqebJAv84TfRQh/o8nP/PsMbrC3TXuQrm1WPyQbKGnkkLRcM2+t2amMPhJq6\n",
+       "ib30UCvmukqHSYUR+MIZDpd4KA2aofR6jEah4UGlVHXMACN1TW1TywqJzEjLsm2bVvXteMiitpqB\n",
+       "4Mg49LQdCHNbj0SwKxiKCiMzuw7NRAdGuqa6cnU1XQ2vm8WRehgblCPNWWkzTGzVVnRjaaMcm4bC\n",
+       "qCyMysSJ2x67S7jK4SNc+jTv3z+yvz50/XsTd891+cqEf3VclV1+1xg672Yw8msqi+t/ovqS/F9v\n",
+       "PSDG+Fd/JxdQ+Z0sfJzNn+SJf1ONi5YL1QRHcbmhtTHRDfdMi65gyerOBfeXXnFnr7C4uijpFuwN\n",
+       "HS01pMWuEFJlWBS17JlZlGoaSc2MrNj1FI4VdhSCpjWZHUcaHjCQWFA38zqSicQAACAASURBVLrm\n",
+       "fI68crFZUjqlsC2qGVpAW0PTioZUEk7mfSZVH8K+hFDYk2poCKFjGjIxLtqblcrxmMaMUAoxmMY1\n",
+       "5f5MvSiI3LjecXfcdf6R0jRNNd6Ufs8mE3Hrc/w/GLe5/D089dbW6rOcLFcGSb8hGJl/9tFbou4Q\n",
+       "fkODtre83ubcn+MjdR6Z7/BeucKn/lwI4f+IMY4qPkb4SyOHf2GZHzpPL3d6faKbTtQGwdJsSa+5\n",
+       "J1nvGExGToydTnP9+sRRe8FS0qI2kHZLZwOThQWN4arDWt1JqOm1l+zoeT1puKyvclfOXbViEIfG\n",
+       "4VhDqmlJrqftrqiDmam+0oKeBZmenqFSKXPiSGbborFH0DIxMRXULGm54cShEwOliZqGO2FTtGxi\n",
+       "JpEZq2lLhVAo4/K8mXZkuRhYSjPTWlPXus5sqlcWloqR6+eoLdFrMikL2Ut74tEF1i5xs8ONbZY+\n",
+       "z0/d528h1pjU39IxH6onRfLW8/V7WD+m+jy+k/RG38h3RDBC7UmeXOKJN/UgPH2vYhv1T8jvMM64\n",
+       "eFT9/Nyf4OJbeszO9Vj9Hk5f4ief49qMr/wIxVrlWxNmtNNgpXPGVlaQp1YliqSlWc6cJEeaJRcj\n",
+       "X+zQWuD7c8qy0Iwsplwz0VM1pNfwgooUtVE27SbLMqVC0JKYOtSf01Qrh7HgwLrCyF0tmZa7TtSc\n",
+       "aNh1IFGz7NB51NXta9vVmt8JSvcURjoy9dhUy3M1Y620Lo4yW+lUbRok6UQ+quuNNpzq7VkekW3x\n",
+       "xWUupqyiczhVfpXjnxvz99/MD6o2gM6azyl3eSQhOeIVvP6tCljeUTASQnh8vpjPzvkcb/z+J2KM\n",
+       "35SleIzxSyGEcQjhk/jSu9O8CsNP8qU+t3+IdJHBHumHeepMR4hN7XGpaBy6t5B5LSxJbnSEGwOv\n",
+       "PpaaTVeNh0GYji12ZuppXUPPgTNGDlxzx+Kc/7DnCVNLMgvG9mT6poKG0tRw7sxa85KHTJ3VNjZx\n",
+       "IHcO9xTzkeB7juwZmFoX53tiWnMzvCN9wZYUCyaaeqa2JQ405dM1iyGVlS9JJvuG2brsMDVwZLJy\n",
+       "Q6gdy77C9X9U8mDTwX9x4nar6YGTRBZnZtnQbpODj8cYfxVqIejxvrd+qj2ao99kTv2bV+1Jnlzk\n",
+       "8TfdoJ64z9ZTHff+wsUQzCqf8F855n9MmQ1kf6jQKRrW79XMOgWLmZCmFgpmVtwfbXs1nSjqa66o\n",
+       "yYqBo05ptTbTCrnGNNMczTSbmSyWjoon9NPXfSXedyfMNE0MZKIDp0KppSaJY1dDV92KrqGg75YV\n",
+       "0RPqrhorHBhrCVoGtq3ouWzd1H2ZQq6Y58UYzJF1TevxtFpo23fKWENXkOso7Jipq8W+aZipW8Sh\n",
+       "qVynjEIY2K6taw3HptnELK0rsqZJLerXZ5rdE5375PXzkuMfUP7sDcmVu9ofiGrphtHCjvx+lP/L\n",
+       "14nPVAZFXwtIrrKyN7ck/72uEFxRNXJ9p2WBPoa/HILw7UqM/Xp1z7P5NiXei3f55VOVgedTt6s4\n",
+       "+Nnz3B3ykbchMCfnOPvVxJf+ZKp+utRcKzyeVg3zd3NOJ3VjLQtGekk0K2gnjLQ0p0c6SWW3sjCt\n",
+       "gpJTh2wtM27Msyt4VpXYj6okb8eMZKqUy6SqTeKCUk9fQ6LuQFBI5+MGG1jDOZktQy8oDJxyycCS\n",
+       "TW20HLqk7lXRkVP23XPWmqZWuKMRE5sx15qkerNoIyzbSXjhpJCNGrpF16mje05ala9gL7D91/nX\n",
+       "G/zqk5RTjn6FwSffCERCZRT20cf4wYu4xaXA+Qd5dpGja/zgq3w+hPAz34oG9m8YjIQQ/kv8ZyrG\n",
+       "/N8LIfxXMcafmb/8P+GbCkbg3Rvn/TqtUltmdo+9V1n5CR4/xeX+yAtx5ng5KmMmnZxIdxe1jwvN\n",
+       "TmHxpG036QiTu6ZLhUvJnl7SENSdcuTAwJFgT+7YY0q5IJo6J1gTXDcy1JVoGaupu+68gStoy2Si\n",
+       "ukLPVNfAvlRi25LR3IvmSEXnri72umMLDkQLzuqISqldZ9yNp0xmB+qDE8V4olgsFPGuLN9ylLXM\n",
+       "8rbO/j2to9zGDW79232zf7Vt9OUlr31wYHet0Al9g3Tq+NP0vgZRynnlFcYXWTo/94rYo/08YVCZ\n",
+       "OHwLtXiWjbfU6XfXFuVPP6x+/aMmn9uj+0X+1Iv80x6/0FP+/q76OMiKTNor9BuHRllicXKon51z\n",
+       "HD6iv/W87rlMnqVOkpY8luqhLiSHyuZU2RladeRWY00i08gfNk4XbNkXdGy6aV3ustSiqe2Quq1U\n",
+       "CoYaUut6LknsiwaaFnS0bMsFiaFTGlrzse2h0vn5Xmo2hySlkjkAKc6nsKLUwFg9zgg1hb5BHDhy\n",
+       "3uk8VS+WxdrAbpgxTp2E1+3Xp04rTGuJ18KK7mxFa5JZPLzt/uqJyfGitcXn7X946PSphrXDIC1W\n",
+       "9GJN/kendq/dMvr5n+OnHmPaIL/BwsvVjOXz39pz/W2rH/NtjID//9FV1dPyEdWO9ttcJzsc/YZN\n",
+       "DnsdDj/DLzb4wnlCweBz5M9z84mqh+QNHbQ4jJx+smX11ILufs/9hcK0yXZSYQ2CQlIWMgsWDWyH\n",
+       "0uU8lcVcmdAP3J2oRv0b7EQOBvSbvtYj8sbIe6l6pN5TSuJIEu7ILcrlDkxVZKElpUcNlX6d1lio\n",
+       "Ws4X5x1lU7lX7FrRkci01OQGFtFVzl3Za0Z2TZwpK95RGYK9LLM+mjjtwChesj9s627n9s737T6y\n",
+       "LYncuc/4l8h/Oca9OPeZiTHGGEI4ux7CD7Z4YJXaKc7/O3xpm27O2jPs3+XyQ/zrR6rR0+/5bBWY\n",
+       "v/bbPePvJDPy5/GBeZ3/Afx0COGBGOPf+O3+8d8NVUjw5jMsP8asx/4XY4zX569d5sqf4j2B5SGv\n",
+       "fYidU5Qdrq/0dRc7VmNqauQon9k5F/VOpRrTrn56S697YLE7tpKVlmp9dVcdx4ax0lI4MDNU1xAs\n",
+       "G9sycSw6i0S0LHfHrtKaxDVdOxaMLUlMTeTq1iX2TSzK7Nlycc5hHcgduqNf/S30dRwq1eReks8d\n",
+       "XZeM4mVlzDX3OprHpdCL9h49K721IpmN5Oe3tU6uufjc1EJJ9ywbK03X3s/+ZNfxZwrDmMvGpckv\n",
+       "Uv7SW5pGT0IIf//n+eNnuJBUrrmjLf7vGOM7AqG9c53scNB8829abj+1bDI7a7rVpDjP8RKj/QpG\n",
+       "8L/e4OUz7j+8pN4iK3flo0PN446b00vuPjdj+hJPN4xHhWmrY202Va+NHCeZRTXDZKi2kDszfcXt\n",
+       "cE6RnlaEUl1d6ZT2PAt1jF3R63KTOTe1La36eKyqEEivOK1mU6Gp4UTPNUGqoW3BzEzdiWpD2FcY\n",
+       "K+2goyYah6HSOTOpxJKgIw8TypFaqCtC1x2HphqWpxPD0DGLW8ZJbiMmurOZST1YD5Rx3/36WC0s\n",
+       "22ptGoxvapcTjfLAwtNL1l4Kcyz/iDDz8E3KHxm78z+/wO3bPJXRPKgebK99t0zSqIKRn/mGR32b\n",
+       "KUYxhK+Var4DgpHx8zz/o5xa4sIxZeALH+bmA/xQzizn9T53/2GM8V7FIPnMBU4ucnZQ8aJeSNn9\n",
+       "6dTpv9LywF6QyLSHXFuush/nC6YxdzrZ9XpyXqdYM5gderk2tZj3TOttjbzrVG/iMDtyM4tqC5yr\n",
+       "c6akl1QTxOdVFelCFZz0poxCblwbKsz0rKiwhAtyE1W9o1TVNkfMyaqFIKpJnJLaMkOYl3lqKujv\n",
+       "VB1Hc5rrknZx4Fac2U+CWt5Uz2fOjnOXDu+J7ZFf7l62M56o1Q50B23ntxNxoWf/DzJ8SUWoLSGE\n",
+       "cPER/tMPMj7L8Yv80IC1F9ibUj9HscTomNV91k6xs05c4c+cC2FrwvZ+VT35psZ730kwEt4ozcQY\n",
+       "b4QQfhj/NIRwydtQ2L6dVGWZzvwnPLNaXdAnZ3jh/SG0f5bR5zn7x/gDfc7M+fuDGmnC3lkem5Ye\n",
+       "3T9x1KkbNqLYHltMhxrDmjPT4MWFliztyxZyiqg+O3avvmQQzIG/U1116yYKW7aN8FU1W6aW5A7R\n",
+       "tOCi3CuOhTkKfmKsbiaf1xZzhR1H1k08jvtS+xouKgwMnahLdA0dmxh4QvCIMk6Mw0ui66INoT02\n",
+       "aQyM10f0Tklv0+rmuoMVMew4usQDQ9YnZ3y+e0ZZa1s7Gio/sufk87fM/oZqd3UxhJCpWOYnuMTi\n",
+       "g/dMP3vP5EiVr9yOMX7TJL7fXJPneeFHWN1kc0htltg/3XK796j4tRTtItM16rdY7Cn/Wk+60XZw\n",
+       "pdQKY0s7icODy3r17+VzmxXc54u/avpn2XpgolOraSZNR+HAyyHTKnKJqWGNNPS0wwVkFvTtWjKT\n",
+       "Mc9DDVzUNVOaODGQlC9Lk2VDOe5a1rKuIzVTmmkZuyh1W9/MeZlVdfdlrqqynm8YQ1wx8fnqZhcj\n",
+       "oRAMRAsaZoZGSuuUV+TFru3kpv3aqoXBTLOxpT6bOdsOLqW0Y2YUM/uxdCEm9qd1J/l52dFtndFt\n",
+       "R+GyUF+eByID49YtaR49fYs7G2jGGO96h06cv5cUglQ1SfOfv9tr+Sb1MfxR3wH9LvPer7/Hz/8x\n",
+       "1i8wOUU4yw//Sy7OJ2luLfOv/sMQwv86P/7/ZO8plh9kfED/OdSj1/+HvuJ8ol5GJwVHCR8uWQ9c\n",
+       "K6iHXU8Mp15Il80mwYGB43ruTJnJitLd5JLF+7ntRl/aJqSEuaHthmAvLvicqAgnupHNQHdaeC3L\n",
+       "3A9tdbklA6mOPZkqlVDZaVaByNOCRGEBhXKeEa38KaOJvlTDGKldUc+hmlgGC+PE/foZw+SCTiNT\n",
+       "rx0bNrYd1rZsDgaa09T0V/seefCUK/eCYKpoXLN5fiL8iRDC//LGvfoUP/79DB6Yj/MuUjzM4Rd4\n",
+       "avFNmY+MmJPdqJDdH/pA5RWxs89jz/G+NIR/WMT4Ww5430kwshNCeG+M8bn5RTIIIfxh/F0V6Orb\n",
+       "WJ0P8cEVPvCmSO38IYM/yLX7bCxz5k2vJXPWS1YSMswKa72RvMswa7o4TGzlZ7zSXlaGxNJsYL95\n",
+       "V1EEg/SiellXTztmcc9BOFIqLZqYeVlX1wNqSifG9hzYsGvRyHWloOnERGHkSBVrtwxdVT3x6zgt\n",
+       "8YLq8oxW9E3UHaoZ6ctsGTiviAtqYWqiLfOQqec04z2L9SNTmWRyxiyr23Rg5MTRUkcW143PHbr3\n",
+       "WuZ6clZvLZG2c/XFuuVwVv09R0Z/vWf5szxYq7rYb6l8Gh7p8uC0st5+JeHGz8Q4/p0aHxy13Xo9\n",
+       "9S/+g0S3Ww0zHyXf6+Rzi2+yui4IPVpL/PgyD03sjg5svD516R6tSekk3jLyCRaW6bys+cRQlj6q\n",
+       "n0Qvh6FaqPySp2XQno1drdENNJK+83FoJzD0sEzfzKHEguiMmaFjpdqcrzhOqpLMqjt2JZY1RPum\n",
+       "ehbNLMltaJrY0tM0dGZub9gwNVDaUP3H7qluUhcIO6KagMShiRNF0hfjpQoZWSwqZ6dNx3s6k6H0\n",
+       "Ku11noxRfTFoJdFyKGVl6aaoG0pJuGlnYer8Z3aEfGyntuN4nVAcap0c+f7PVTu+kxHfMeOsvxP6\n",
+       "IO7G+Lbu0N8J+hj+RgiSGH3bQ+pijHdCCH+T7TXW/wx/7BXOvsm47eIRFy+ydRHXY4xDfG7+I4TQ\n",
+       "Yf0vRudnpZXlXD2JkpiYzkrH5iymfXZKjrNj03Bs/y6n1oMzK01ZXhg0+vaW9pRynTSRlaWdQEgY\n",
+       "xbqWjmPn1UJQ07PjjiJpuN5sW4hshoGxwpEDS0itic7JDCUaSj0zPdEZFaHkWNXvNbbkjmh5ngcf\n",
+       "GbsucaS0YlaWVotjrzdXTZM2pqbpgo7T8jTYXzlxv1GI108sLXed33tjKrIunTRk7YnVh1fd+W8v\n",
+       "hDDu8dIKj1x8U9DRYGvKIx1im5O7JBdI+oRzHH2WD5wjOcsL6wzXGa7R7vNHQwh/7beaLX0nwch/\n",
+       "bM7XfdNFMgsh/Cn87d/KH/vd19J7ufIW74h2zoWUa6fmJNs3aeEqN9/LWq+6+d6rMWuR94J6vSZO\n",
+       "F/STy7rl0CAUYtJ0IVx0LxsYBDYNDGVGNvTjrnoYiTEYhdJjTiQKR/oyC7ruKuzo2dec+xdM4jah\n",
+       "odp2Hqse+RuqfudqDjrzqkImEazbcmBXI/ZshoaX5IQjjdmEtCvEmhCWtMqblsOhMydnvd5r6Zb7\n",
+       "9s8keqfeY9ZsaMRMTI/dv7DjOAseTFIrvUQyKxx2gnunGmpneGCbj36m+qxunOfLP8UTP8dD80bV\n",
+       "R2v8zB8JIVz9nfAr6fIHPsB7Pqz/8339xrRinLz/OR67zK++Ydz0RS71OP1jDB7jbiHee87OQ5+x\n",
+       "lx1q/CKjGwOSF1mh/X6+r8F05ZZXz66r1XOjOBREWVp3kG1Ii5GT0Pd4nHjGDf/GaXthV2lixVRT\n",
+       "00BLw8BIqW5qscikSdMgDB3Gh2ThQNSU6cvklo20pGZyLamOa67aM1WY2ZY7g1TiRGpk5nHBuiAR\n",
+       "PS8KohO5RTE+QOxp1UcWyiBJZ07sG0/3bdxmaZmddtAso/VYaIfSUhL0YmEp2TftDDVvlnZ73H62\n",
+       "J/1nPSt/mPdsVXbsJ3U+fY79n/kuKse8nT7qOwAB/5spRlsh2FFZNzz7bq/nnWg+qbEXwvlYNZG+\n",
+       "Va2oiiq+ThUCvvPf8N7vi9a3CuO1zEaMzhSFl+qEMYcDmtc5O6NT8MqUU4v8u7+W2v5g4XAtl9ZK\n",
+       "Md1ycyUKabSG1SmtwNXGkn5I5/iGoGNFGse+moysxMwVI/04q4ozIXXLvrZnzdyW6gjGanqGnjO1\n",
+       "rXr3qhlp6jFHXpV4RaFuqE1RI2Sy4rYkWbOXPiwmbZyWGSlct5NuaJctebaoKPfNhoc6ZzriXjZ/\n",
+       "jhfKLDdZWzNZ+T7uPcrBVT74RZ7e5/5GFRM5zfXrFaLh1CaTXW79PE+vc23I2g6XL/HlTb/uz7TO\n",
+       "cIW1O5wNIUzRnweJ31DfMBj5zeo/84vkU+/kj7x7KsfVJKK32NpPIva4v8+z72FlVjkynrnF53ZI\n",
+       "ltnP2BiwsEPzXvTl7y/8f+S9WZBk13nn9/vO3TJvrpWVtVdX9YLuxr4QBAGCBCmOFlIzpEayZcsT\n",
+       "1jjGYzvCoQe/zMh2zMOEH+wJT0yE7QdH2A7HaEIzYQU1kjhaxqJIUxJXkQAIEGgADXSj19qzMiv3\n",
+       "vPs9xw+3APaAICQSjWkQ+kfkS0VW5Il7qk5+5/v+S9fySa0xMTEzFL4FjvKxzBCNossSIRGh3EDj\n",
+       "YmHxGjMcctYQfFJSoDAJDolpoLXLSC3TYZmCjprxfSeZ8yjWUYxQTDFYCDllDphSIWJIlRltlrnC\n",
+       "KjEelu4zsmfMmRxfC0M9pD4x3NfxcPwh+5MZ+CmTjQfJ9TzWeId4zkHlNSaVGdpKUDMbSEnbBmzD\n",
+       "fDljCKSPQvAC+BEka0VY1cHG94uRSgp3KXj9FPC927mTIlI6BU88Adse5F7h6sYn4Onfho/8Nmws\n",
+       "Q94HtQ3Jg7DfhkkITg2Sx+HSDL35LUL/BPynG2DnYF+CJ3v0OuDeN+BsGLLtOKxYHjlLhJQom4Lh\n",
+       "E+WH7Ns7nDNjTrDIlIQyHk1ihnQZHjsNCBaZMdRNSlNyGpSJZQ6bLkP2aDPEw8aiTorQwcZjhkPA\n",
+       "MgmhqRBLmwkuIQMsYjCncaSGYoywjOJ+NHsIdbQpKHQeFg0Tg7Gx4wluuYcxCZN76/Q323RsF58R\n",
+       "nvRZjGPKtsUIzXQ24Wgb8v8NbjzDmzbal1PofQKq6zCJ4egPIXzmh27QXw/8LPBP7vQi3iW+Anya\n",
+       "n5Bi5PuYvAI3HoeHbiGoxhZsA2/bqToFCx+D010I1zWV/YT+YmF8pgwMO3DyJgSXwM7gZgkGOZz5\n",
+       "WegvZRwtCJThbl3wNlyrOJVjQLTiZqlQxAzxiTBYZkZHB+hjiburIyZWxoKxCWSBMSUsphiGuGzR\n",
+       "xCGlzoQFykwpkuAXKCL22kCfiAY5S9h4qKyHSaaYGwmVxROErdNYZoWcKRyPdYQaikNyXIxOcCox\n",
+       "9x8FDE9NGZ+qkYY+KhqRKgcdzTHYvRd2fcgegd1tOPgufOjj8PQRspyivAH50VW4sg+THJ7pwOf9\n",
+       "gsvgNKC2Bpdu5WqkhXDh1An4tQVIh0BT5Dsj+PJfNrr/gKf29p6GV/6Doqh4wwZhvwbbMyABaxGu\n",
+       "3ltorW2BZ0PY+xIMFmH+FHy3BcshjFbh+qEQrTWIyyVmJiOzx2RSJmLGjAUyOUEVCkc9lrC5is2M\n",
+       "itRoHAu7BGiRETEhJ0ebDqlpMqOMj2IgHhgDskDhxXsSwUXoAdepMaKCTYUEnwFCwpY5TS+/Bwuf\n",
+       "WlolcQKMXGEiITqzUIxoqBiroQn8mLXpJfbsBWI/Qs0u4jRSGqqEkBHgABOGnovngUZRCcckzpS6\n",
+       "AmcVXv1Z8LswfQSqZZgZSC+Ac3xjFgPqveASVWqgvLe0s5oQ1eD6Zfg/Lhdj2ImP/fdu0nxyj1oM\n",
+       "kSwzuPlxold8sE7Ar3wcrgd4qwmqegZZE8brXZ4wsJRFrOcRXVFcUhntbEqoPIz2KJk2PT3ggsx4\n",
+       "wmyzKy5TLFL2GNJGWEChsdknl4SRHeARsKHnGMk+kQQ4jDggx0IYYAho4VBnniLZ18JlUVo4JqIn\n",
+       "cMOs4eucWAoCX47CYp8MD02P3MwwZGCW8BTYRmMRof0jtE64x9R4beMk2nEpaQOWTyAlLnpdnHyF\n",
+       "YLZI/FrA5PIVGE+Kk/YUsGfM5Ksi8i0KGev0veEA/eRAhBqFhP3rd3ot7xJfBP57CiXkTxDGfwHP\n",
+       "Pgj5GmwMYOrBC8uw9T2gLSI5cBLwiuby4t+F0jrsOtBbAs8DVxWikLqGG/NwsQTzr8NwAjtlOO8V\n",
+       "eVlpG1LfcF4JoVWccT6wIfACNpd8h5pA3/h0mCfhkJSQeaVwTURVQkoYOkYhrDGRFk36HLKE4i5s\n",
+       "hkwZ0CSnicc+FaCOokmRUDYDrpFzH4JDnq5irngQ/AW0LjJyfVwp4amEhMJrw6OESxWl+9gERAS0\n",
+       "pppHn55ysaE4Wo4xvoXfjVD5PFfb5+n9qX+LRP+xwt3wiV38/7BBjRmW6aKnM6IXhgx/wxjz73Sm\n",
+       "miLWBfjo48cVIcAz8BEH6r8CWyXIY7C+Ax97ruDrvqPy9gNejKTPw0snYfQwbGqYKrgSwM6/guVf\n",
+       "hM+MYOELcGUJYh9cH5JFqJ6DaAVGfsGDcK+VMXqNIE3JleBIBUVOwoTIRMRSJIHkZEzI0XRwGKGY\n",
+       "YB8n1PSIWcRBI1RNyg0yImMQ6oBDSgfLXGdRDDVcEgYc0WRKD8NVTjDlFA7rTBkT0gUWTIWZKbOj\n",
+       "+iTxBFFjJG+h7QoJO2QyopwGLHrQDhXdGPwgoF8eoib7lByfdj6P0g46q+JF15i0B0QmoWdXaAQz\n",
+       "YjnCUikKWFbgfhRMDxZ34eAkNOtw7cNw7unipnIV0D/UQvhdYDKCdAZO5ZaxYb8IFhpTkGa1iPNA\n",
+       "yLkHhZW8hD+bMlt8nc6n9rlyP8SjZcTqsvioTTPQ5DaEjTa2fURPaXwbWhaEMiOXHWzb0DIOE9Vg\n",
+       "xCrN2GPHnvGMPSbWW4yVR5Atoa1zNOgRiKFEBZt9InpM0PhqQgWLKgllFC41XDQtFA4BW7iMKWMT\n",
+       "EuAyT0BAQokJdTnD1LIxlACXMk0sAib0yU0Ipg6mh8glYtPiyLjYaoJrepRs2C7NEdouJ3IYK4Wf\n",
+       "GUJpMrYm5CbBGpcILjwIO0d4/82E8xegMYNdLeL/sTHmaf56JvK+HT4FPGMMf6WW8/sYXwV+W4SG\n",
+       "MfxAdtT7FccE1f8Tho9B/R6IF6HiwVNnofMYDO+Ck68WXI7DD8NaBE4CNzdhwYczQEtDx4UbBh4a\n",
+       "wNNluN6E2Rfg40/Bx3bhX38Cqh6IhkvK0JAi2HNkYCgQSpMFbVMmZwXFzBh6ElITm9yUORCHDINt\n",
+       "jyjjs0WZmAkd5olYpIlDQo2cBjk3mTDA417GdNDcQ8ER3KMYzTeRLEQliswVyE9B/jqW36KZJISO\n",
+       "hwLqKkQzI2GAkQ4VM2Apj5ERfO8xDWuaVhf6dsr4OtQ7DdJzdVSJW6hDMyjFVOWIz31pB6OKyLH1\n",
+       "IXzjpPD1/3hJBANZF14ALgN/9gwsduCupeJLQe3Awufg62+k+XqQPw47N+AJEfnqO+3xB7oYMcbk\n",
+       "IvK7MPg2XFgGE1F8W1ZhYRFOHld0Dx0TLv/0ITj5WfjEd2HxAF58GPbWQJ0ts02FxN/Ayj1sVSKT\n",
+       "Gbk8TV9CmmjGCMPjm3ITzepxukiLGSMcusSMcKiYiJSMjgj3SYUJioyMGRMWpEILD5sMTUqd17lG\n",
+       "GQvFCkKdDCFmDjgBXBUhFQ9jlsCukZsJRnZIUxujFUmW0DKwJTk7FU3DgFmEuB+QTXawT54ksVyc\n",
+       "mcHEPVInoBlYqHSfdlWzEkPown4OzhRMqSD3tgO4uAI7XdicwOQMPDeCmyHs/Ikx5jabnYExJqmK\n",
+       "fPUb8LeegN0mxEdQ/gtY6cJvf990Z/FvGB6/2Gfv4xmje9rAKu1Zn95yRGcpo5KWWHoRlEkJqgo/\n",
+       "sHAXLMJYE+XCwIOuMpynyimjMSREMuSSsblhRUTi0UdoM6CRu9xQ51ESgTEskB1LfjdwSaigeYmQ\n",
+       "lB4loIZiiRoJ6ZtpQi3GvMbicQBAxAiHk6KoEzPHFteoMqBeyBIJGBJh2EdkAUtclAkxcgaMxhKN\n",
+       "qxNEeTRNhO1ZeBkkxqKeOoyjlLxms5yWiYIyZy5d5eDsETcfWqSVbHF6Bg9vQ2DDl/62iHSNMdfe\n",
+       "YVv+OuGzwL+904t4tzCGQIRvAT8D/N6dXs+PguPgza+IOEfw+Cp86jJkCr5+PzyRw3gNrmk4PQ91\n",
+       "HyZzRZekqSG1oHds634ug54D7QFUFmD3E7AeFom85hBeWIO6rXhYFBZCCGxIjgck5ERKYZkyiwSs\n",
+       "yJApNjOWEVFUMTSYx2OPDmMMZRaYssUyGZBgobHI8dEsYjjEoormEB7SNQAAIABJREFUBoUldZFw\n",
+       "JcbBEOFowTVTpu0qJknBE5Q08LJdpo6gxcUDbPaZsY1rhqymhvlZxjUL7mrAQgiVoOCovmbBU88M\n",
+       "6M0dMD5xnuxlgAisF2FjymYXznbhxrzN9kLG/lyZg9OnsR97jPwrGtQleOg1+M4I/vAIfvOo0FvM\n",
+       "Adk5+E9Wjzknb6AEebUYTVTeaX8/0MUIvMlt2eH7/jKISL2YG74V3Q/BfTEsBNCvwXQBfnoEr1Qs\n",
+       "rjmrzGU2kWVAa8qUMKZJoGZ44pBRosRVHBxWjq17ARQWp4h5DQsLj4lMCYEGHjOaeASUSIlxWKZG\n",
+       "Rohhik2JGoYGEZoITUiCQR2bi9eAxFQIZRWhiicOM9bJURj3ZTBTKNWZmJSUKZvTgNNXYeMqjEop\n",
+       "/bsPaB3kJHNHxNqgnJRmPMfc6zNeWUqY2THxDMYaPtqFzQCevQu6UzgycDSCB//f4iZy+QF47TqE\n",
+       "v/9uIqT/Mszgmy9CsgOfKhX2jKMufD4+VnoVpLUTbdjcDhjvtgg2QPIZuhxStjykN8E9nZDXXdTI\n",
+       "wk40s/IEV6fg2OwqQyxF8N08MZl4ONpBVMy86XDdtqnT4h6jKB87CAyJ6GKRieBSIqeCIsbgobBx\n",
+       "WWHKNRQJDgqPMTHzKKrE5ARkhHgoUirAGhZ1DDYKH581AgJmRPQZkhQONWYeTwwtevTMGn3ZxKOP\n",
+       "YQBygiVScompmBGL6TyW1ozsHFxhUUdMZEY9aFHrCnbziMlSA+taMTuHguT9wBR2P8L3wyz/2kIK\n",
+       "GcJngX92p9dym/BF4Of5CStGvo+Fj8GD3SKF9+o8LDiFSvKlJYhOwNmwSC4wBs5ocK3iEqWBmoEs\n",
+       "h5sG6iNw+jBZhqMObM2BtQnLiUNql8HkhJLjYLFMiIvFLmVWmHFFHJRxaeVj6rJIrIQmhjZgY2Gz\n",
+       "RIUpFkcF9wyXsTHMJMMjRuMdy3khJKTohAyBHrB/TGNto8Um9IcYNQWzB0wps0VSaqPMFNvsE1o5\n",
+       "ZULqpkkjTVCzEUcJLHdgZQTRAkw9QTuGZgMubA6o9S+QnujDo3U42iv89b8GzfvKfOnRdQ7XF1G6\n",
+       "T1SzGZ1YQa6cPJb8bkI/gY98D54/Pu+3gW0RUTOY9qHcgjAo5L/tEEpHxcMfv3Unb8UHvhj5IehC\n",
+       "d1TwR1ZukYqlbWh1iiKuU4clKTIMGmTgKMjBMUKkLOwsQKsc22SEHCKiiIhoUsFhhKGOkDAiwiMu\n",
+       "SEiMKRnoiMsSHhkeipwF+oQ4WGhSIgQHmwYOR8dBamUMIRkZGUUcdgCMpImYMRibVPlkaLSKgAmW\n",
+       "WaOWOYit8XRKpK/y2vwENa0xWLZZtBOG6YgToYdKyjj9EuXDEdu1EsNhTqwLoqpfElTNcL0OegQP\n",
+       "vwqLA/jWHDSiIv9h34XwK+9lIQJvFpZPi8izgAdEt+YiFA6Cq3uw0/BIKhUaL+doNyI6pRHdYHV2\n",
+       "HaMUwcPLuK9a6LDPuNwBu8ViWMPLR0ydKYklrJkAQ8TUcshMhGUES9VZwaeaGgwjcFKW5Igj3ILH\n",
+       "YUogmpwDMqzjpKAKZSxibEaUOHnspzuigcEjQZOyhUGhcPGZEiDElMk4pITQMOfIZIqrE1bFYYpH\n",
+       "CQs3NyD1QuarbXKdoywPK28SSxedD2mZPbr2AkGa42UzYtPBNj4LVwtxsJKMir1HREH2ewPVCNzG\n",
+       "e7mfP0F4BJgY8+5dJt8n+CLw6z851vBvhVWFynEsSWYVmTAAoxq0IggsKNngZoUwvmQXOTZLGsoa\n",
+       "uhpCDc4MqgfgluDFBhw8DAvKY0CFepYzclIyFAsYQhwMGkVOGcUyKT2xqEtOJB45xxf/Y85cioPB\n",
+       "o2oOyFhCmR65tIg5AhQVHDIOCVhhyi7FgeZTzDcskBmYHVL7dGFsQhfkOkKJqt7HYcKEeZSekJHS\n",
+       "pEl5ppA4J05gfwyPRDCzbbaaNUK/gj2MYB6+9SstTPcc+df2wX0ZGMD/BfTh8lOnKZ86w1yn8CQJ\n",
+       "a2uYTBNVo+JBJRaY05BdKmZfb575xhjtiXz5m/Ar65AdwQNzUJ4Wc6/LPjz5TjPO93UxIuI+BvMP\n",
+       "Fh4r3e+CfvV2eOAX3AL5PfjK34P76tCMoOPD+BCmxyQdSQEDk4pi6MfMJ/t0K0tocYhViMWrOKbH\n",
+       "ikpxTMAhN+hKwRwBHyHDUCVjwD4OHRQhGZ4ILgmCjUcGx7kjDQJywMEBfCwy4uOYvZAjJliU0eyR\n",
+       "McVihtBHSEzGmjmkbzxSciyZIPhofYpmOkDyGbh1xF5lsnTApUaLxlAxdyXlhknwK4c02nXsWcpr\n",
+       "d1XYXWlRHjVwmLKzPkJlGY5SVMYT6hcyhj6YKgQBLE/gRguujnlThfHe4/hv4G1yKwA6X4Gn/37G\n",
+       "qsmxrYyoGTKqtJGtJVoHVxieuMTJ4S6vn60yyVxUvsGRMyRxAhy3SiBCQ6cE4pJrB0OC5JqJqZFK\n",
+       "mZKxQQZkktOkgpERq2i2qDGRCjaHpBxSxWOZlIABHsIRDboE3CRnmRCLjAkuA9ZIuQvF0wxJOMBG\n",
+       "UMXcGRubgEx2SfBQKmdGhE2ThBoVXGyTYZmcUCJsFFYm5HlG5KS4saZq7VExPSZik+URC4cab6zJ\n",
+       "MotgKSW3+0yshI9+u+iIvIGdJgxvm+JCRGrVIuHw0x6spHBzCP8mg+du12e8h/hAjGhuwesUZOUH\n",
+       "gAt3eC0/BqYX4foj8MA+rAwLCkNkQ+jA5gFsr0BbQW0Kh/WiKxIGkHlg2fCahmEPAtfH+2iJTmoz\n",
+       "O+pSe8xwwga8iIFlUTIGSzQpLjYWUzLQU3JlUMfWhjskpGaCQ5WRFBfFiIKQnpPRMhYiPbRETGUO\n",
+       "TU6Gx4hrQHJctJxF0cewi2GOgolrgyTYXCQjRahTNk1sUeQSU5YBlqmTqbtJzAFXZEC54oFfJxi1\n",
+       "SKcdDtoxOqkQJy6SKExaote6m2gyJp/cB1c/DNlLsPzn8OSRMb8zL3LQon4uwWkqrDQnqFiE0TyM\n",
+       "+zC/eqxcSkHpW7yd3kBszAuOiNeD/+GRQnAwWSqsc3e+DJ95pwPljhUjIvLzwP8C9IwxT739u576\n",
+       "RTgzKOZ9l34VXnlaRP7gNqUETh2ufstw83yGH0P4IqRfhhf+R6i2oBnCFWMR1ytEowYnu1tEpxJG\n",
+       "XhmdHaJVwAmj8YMWdpZj1xQlytwg54ZklJlSZZ4WmhkKaGBzhRYRDg4TbFYwaGxyIKfEVSJWiHEx\n",
+       "GDJGVMnIaOEzYcqYGi1yLiPkJmHeHJKqKpF2UIQsYlPWU8ZSI8jKEIKxI2w3ZVDy0fgkjo9lpsza\n",
+       "m0y/MePygsGtdxFZJCufYuFiicrQo7/Zpt57gdlij9cPDCeGhmBlSPegMCGqvQD/dh32e3Dw+bcy\n",
+       "re8UjMlfF3H/ZcLBf3aI9VSVvLVM9UaT0nCP0WJCdqDppnWy5TNUO4Zhawl7qiF9jVGzQqYPwRpz\n",
+       "HZd14+OalIFyuKzLJLliKDEtCVBKCKTYnxqaZnpIYidYIlS14qwCiwaaKRYZUKOD5lWgQ4UGHkcs\n",
+       "MWOdBjMSWoyYcYWAORaYI6BMxpBlRqxg8LBMyFi6uGaIZRwCUVTyLkOVk+QZeWpjshCHDl6Q8tDT\n",
+       "UIvg5XMJUkkId+EgVZzeM+SLI9JSRNdP6L8EF0+CpeHEEK62i/DI4LYUCiLiL8N/tQBPbUJlHYIh\n",
+       "fOganL8Kf3DbCUa3H58F/rs7vYjbhWNr+DdGNT+Bxcjwm/Ds/WBW4UQfzDZ8+aFiKjBqFrmNl3OQ\n",
+       "GugArlZgaVT45dysQHwJ6tUS7ZMKSRQrnRnJ+RJnxiELFI2ISjmkrywWjSGR4orZocGymdE3M3aA\n",
+       "SQaVGB5MelxtlplI7TidN8TiAIucSBwi6mhq3JtdZ6oy6lLiJbkHyyT4kjFki8KGrQqcBy5S+EvJ\n",
+       "MU3WwiXClyLH15UuXb3ATNURqqhkBS0TIutlCq+PM7DzTXYf7DO3pNjYz5AsYLdZ4SD7EPkzl6B+\n",
+       "CZbbMNmAfh0eEJHfX4GDTcbf0MyaKVZpnWR7COebxcxLAczAuVQYtVx+u93JIDwHz90Hew5kb0gr\n",
+       "74Hp+7IYAb4NPEThCvhD8IlbVBknhhA8Bi9+l1v4Hz8OfJHHz8PnzoNRZOYqY+sGbE/gS/D6P4bh\n",
+       "fwHNFUU/9akeNFkcCUpprO3rRG2h7iX4lkdtXMHvR2S1MU61QlW5lDjLgIiEKgFdehg0LRpMCMnJ\n",
+       "UBjTwBObDhNChCllAuQ4lSRASHApkeAwT50KFYSEq9SZmRNM5QaIJgViMyWROQSDx01GakArPUVo\n",
+       "pWinuGWPyRCTUJKIRnLAUQWuV7aQX1jBBI+QTG7AXAWvleElE2arLrlYuMESzrRDNhczkJDpJkTr\n",
+       "kH4XBlsw+SPgyu2KkL5dMCZ5VUT+UQSf9nD+wSHW3FWckzM2spTGrsvF+jKT/iqzZ7vYVbfoPrZP\n",
+       "oOQQ4V6G5hJTM2ZfZihWGccOydEB3pxF3+sTOmWWjcITzSF1jsw8C1oxMx0O5JA1sRmhyJhgUJRY\n",
+       "ZA7NITYZDRI2GFDkTJTZYkadmE0MMyCiT48+EXN4dFklNxlKUmasIcaiIdcocY2cnKltyKni5AuY\n",
+       "cUTidRj5Q+7tQb8BByXoXof2DFhXzKwSzz5Zw74pZLlLTsqZ9oh6Ct/7BPzZDsS/C+O/MMZM/pJH\n",
+       "jYi4FN3lHyr/9eChE3BmAbwPFw7YLMHEgZaBR9/PxYgIy8BZ3veeSj8yvgj8OvBP7/RCflQYY/qF\n",
+       "9fvwo1A7D/mzcPD70FqAm58pCKof/hMYVeDKo4Uqsj+E8CpE34D2P7JorlSpjxxmyRhzMuGMGEQs\n",
+       "hlEMeDSUg2UyLpOigSqKRYYMJSPUkKdwZgR2AHvtnHS6Te577FqCR4hDSkqZm1SwjcNauoNYEffm\n",
+       "ULX7RFzkJZZIWcVQoZAk3Dh+FR1zaGKIgISYGBjgsk9kfGbqdOHcrRMyq0uuqli0cYDq6rPEiwlW\n",
+       "VuLApAQlB8IS8ZFLtdVl6G/hLB0y/4kS1alhasYcDSGVHrywD5tPkb/6hovCt8B8Ex49Bf4ObFwB\n",
+       "swW//8OyxwTcMuDeIhsGKL9NJ+VW3LFi5A2HTpG/qiWFZeBUDq9u8i6KERFpn4fP/W3Ye0NjfT/I\n",
+       "F+GTz8HrxqRfK/wV9ueW4B8+RWf2OgenhpiTKwQqwRrvsrEUsbIX8cDhiNSBwYLPYLmGEgtL1VCm\n",
+       "ykxKFJrxfXwTUaPLmmScRhFKQoowwmORAS1CLAzXj+nGEXUW2MRgA+Y4h0RTIaUrXTDreKYGUsGI\n",
+       "TVt3ScxlQqlimQoTa0Ike5iqQwmNYwQVdTGS4RvhsGyx4FdoTUKM3+HwhDCdbODriKyRshBNGVQH\n",
+       "TMMb2I2QkxKw4BmaCkoCVx6E8RLsVWH4P/GDVrZ3HMcF0p+IOCdg/dfgyZehOQNlFKO6oetvwuWU\n",
+       "zJ8w+aV5/NhBLAHlYZwGkSwRJW1QVYhH0OkT6wlqYZ2ZypiqlLrJGHEWO5syloyQBTKzi5IYnxCX\n",
+       "Cgp9zPbxsFnBZQ+FIgMUE+YQUmwUE1q4rOCwj2KHAwJzilQ2MJKh6QF9FHVSU2KEi5OPybOEk9LB\n",
+       "TPdwtqA1bxGWfIYriptLHnFvRKOb8eQhrIeNQor8/ISL84pEL/GRqxrHSlBpyOlnS3z1wz7d+yfQ\n",
+       "E5GXjDFvOw4TEasCn9yEpypgTSD2Rb4SwjNvLU6bcNYDZ/kWSbYAdTALRV/7/YzPAl825t91of4A\n",
+       "4KvA50WoG/POxML3I4wxA+CPj19vQkS+ANMn4NqHwIyh/08hep5iLFWrYv3jnPJ6GTexmMqUtAab\n",
+       "mc2yp7HDhFkFIjemrMEoYQ04DRwYzbbEzASsPiwkMJqBnbns9O5C6whZzEnJmeQlmpbHfLJNVt/m\n",
+       "tIKGEkraYk5BRQtzlo0j62hKOChSZhQk1msUY5o+sAzEGI4o2J9blLGYk5AxQ45YI6WEEb9Q8pgy\n",
+       "trg0jMXEfpWWU6aapBi9SHXXoikhVvU79M71OWW1OfO6g5WNCRtj9mtw9cMpwXMvwoMZnN6ESQTO\n",
+       "AcyuwT+5Aj0N2sC14+f/9nsDezdBHi5aTG+eBTeh+U57+r7mjPwgYil8VH58eHD2fOG1/2bVZoM5\n",
+       "B7PrRavqGsV3WXdB5EXgdM5svoVVU9Qpk8uQfRUhKxn3H0JUcjh40KJkQ24cMDGOtsmki1E2xmjy\n",
+       "fJ+yFdHWhlAZcmCFMTEKB2EFYUbxh+/jskWMzZQUD5hg6GIRYJEwT8wR86TiYEkNi5hYGYw5jzF1\n",
+       "HDMikBw/PSK2poyUoak19qFhIXborlk0yiWaqokRQWmhlhyy1RgwlRY4HonEuKUd8oUhbSujNtK0\n",
+       "K5rFBGq9IvFyB8g+BcPfAV59N3vy3qKVw8LlgtyWO5BKhDuZ4ske040UljOybMjY3QcrwUouYiyL\n",
+       "eR1SZ59MbCZ+TnzXEU5/jnG6Ab0Juh5TLgfMZftE2mFmpyRqh6pS2Hgs4iJSRkioMOUmJWJsDGC4\n",
+       "SRmLEQl9loEJTWxaGDQag0uFBqGsA/NogkJDJdtAD0sbfBOgJERbNU7HE7ZrhntWK8TSoJQ71GxN\n",
+       "WwuX5sFpDolUjnWphDRK+NGUDQK6zXlK+wZSYefkaV5qrmBZPs7DkNYvwOMi8hvGmNlbn2oVfupD\n",
+       "8NNPwI4P2QTcb8AvvViw5p+99b0JjDKQ+PvOg2/8XLJbjQ7en/hl4F/c6UXcbhjDTIRvU0h8v3Cn\n",
+       "1iEy/8tQ3oD4AHrfMsa8K4+iY+vxPzt+3fI54rTh19axPtyjNCrjVSJ69YA128Kf5gzKhgWnGGum\n",
+       "ApMyKGM4r2FeoG2Ki9hrU+AIsilUZ3BzLWexusu0GoNbpZm7xEbTT4ZkZYt5nWM5UJPifzvVEGqf\n",
+       "PbVAVc+TaU2ubFIro3Bf1RSs0EXgWxTf5kNKjFlA0WIZmxk++zgscFMaIBE2A1IJsJjDWC52WiG1\n",
+       "AkpAvNoDr4odzqDaoVqvsfRSii5NyJwebpjxsWdh8FMQfKcHv/kNuPsi3JPCbAgXfhRxgjFmryHy\n",
+       "7J/C4/dA34PsGrQvFPYpPxTveTEiIkvA59/y4wNjzN/5y3/7X/1C4egJsHoNdkeQvytGuxRWtj8w\n",
+       "VrBA53DPWhGwUlsR2e3B63+E/1/aNB72OJVZ2NojiGwGnRo3rAF/dB5WV23aohkaRZcVlogYiE+a\n",
+       "pxjTJ2cPbWAjh8UMUkczkYBDJbSMoSMFhbWPZpmAiBo2GQkDoIowooKQYmGTM8+ECjcZEBGaFiXj\n",
+       "kJASyyIoYawrZFmJeFjCyPOUqjN88ah4p/GCDnFaZjlJ0aWMzFbYYwsz79O2hozjkNBrEiiFE++Q\n",
+       "Nqa4owTXzaibHBPAQQPGZdifg9oAuIf3dTFiObDxDNguBE2wYnikt8WVz/gc/Y3zELWgv1+kJJpd\n",
+       "8kVYNcusYmFph0o0IaPPTYm56xWbF57oE+Q5ef+A8TLUcMmzIxKZ0cwtLNWgwYQtSWlg46GZIOzT\n",
+       "wANiUmKWaJsDhD65eEQ4JEAXhXUs3y7C9yI0Y8DFxiFDgBuUJOKkzqkazb7S9MSiql1MxSOwSng6\n",
+       "R4lD1UypKIeW63G4HnDfH4RMHy+iy700x/ZStB0RVhocNu6huQWp52IlD5Fu2bDxtSIc7mu3PtFj\n",
+       "a/6nnoTtN8yNapB8FPb24KdF5LlbieYDeL4Pn7RhbQ1sH7IAvCPIt74fTPy+gwgt4KMUBckHEW/w\n",
+       "Ru5YMQKfvrvgcxxtwvP3iTi/ZUz6yu38BBFZh/Z/bSj9VA9zImFQA1+n6JKHa2LKFuzrjIlrONcB\n",
+       "bOiUYB6oipAgJFrjZVAZgHwDXrwMi78MD9o5VjhlvwVnVYRC0Ew5cGx2jItrQlIDMwWrRjMV4Xlr\n",
+       "npk2OJKglM1UK9Ae2AmFpfo6YMB0ClK6pCxj0QIEgzAH9KhzlZI8QMQhmksYMhxcEkpgxYQmBzVH\n",
+       "JehQnYyo9TO2Q1jrhdT3B5QOUyoDw/IeeCl4JwDXGBNRBAu/9OM+8zH84Xfg6jV4XIHXhz+OigiC\n",
+       "//2H/c57XowYYzoU7oU/BuwXihZZBlzNYPtfHxvf/NiI4NpVsO4vYnkNFJXoy3D/Gkx/Bl5pwWAb\n",
+       "Gl+g9usDNqXKcmSxkhhMPmKSt2nvK7LKlK0rKf6ihY5KZJ5g2WBjqElCzwqxdQ8JhabJsVyHTBzQ\n",
+       "OVPtsu2AkYiJ8fGkStVkTGRGGU0Jlx6GMSPqJickZ4bNCRQbJIxQDE2PPdllnyVcDKrgbiNmQJke\n",
+       "qt5lwbi0JGDOitHz+3ScDKwSSht0GCOpS9rKkUqKOApP9mklDiIw0zHszMhfBvcEUIOpD+28iHU5\n",
+       "FcPhPJTufjf78d5j8DLc/Hl4YguOrV9IVULUt2B/D9wc/FNwUMVd6OGvjziVX8e1hdhy8GZCbWTR\n",
+       "aTW48ijUpjdISoaTQ0MjtsnLU3J7xJ5rWNE2A1OigSJnQkegUFa5CDkxIaAYmh1EDGcpY9NnjwYO\n",
+       "NjOKdJuMPTLmMCbDNUdoSmSS45kenkpwTJWZ8gj0kJQApQo/G3IwZcFJbOLYIgltUs8wQbEaQOZM\n",
+       "adzoMzydMGto+nNjBg+OmFZOQN9G6YiZv0h8CeAM9F4seF1fe8tDrdVBld4ynpuDqFzw7jxuUTsZ\n",
+       "Y7ZLIr81hr/fh/sWCvJbegAvHcDvAP/te7P37xq/CHzFmGPH7Q8evgj8gzsr8T1/HMPVjKEZwOhz\n",
+       "IvLa7QpkFJEGnPzP4YFmjVGtRZ6FpOkRu57CTWNGXogpZ7gTiC7BdhO2y4rANmwIDCxQSogToTYx\n",
+       "OFFhBi2vwNmzsHYGLpzKOZtazDs5iaQEBjZVxNg4NLXFiTRnz4YdKb5totwmkRjDFUI2QLdwGJPm\n",
+       "N0FlFMlzI8qSsmrmsdhinpAKVYQuIfOk1EkxKHMZZAsPm/sp02CfKSE3VY7Ja1xTipanmMwnTLZh\n",
+       "8XvQecjQ2UxYugqbx4XIYQXCPuCJyD2qsKC9+eMaWB5fRn6goHknWsadVNM8CvzPwP0i8mXgc8aY\n",
+       "t4xgLv2vcOkExaF33Rjzrg8FY8xeXeQbX4RPnoOZDfllmO+A96vwzcrx+CYD5dKoCvFSQpp4ZH2b\n",
+       "emLjuBP2Fy3aScaBC5tTg28VnZB69RKHUkVbmlz2qeopmevSCHw6sY1nOWxZG+jEULEzOsaQqxkh\n",
+       "y8xTxtUdjmSf3CjGss6IHloSGjqiIi5tERIyHPo4tGnKTY7QjKWPlfsoASUptrVLzSrR1gors8jS\n",
+       "nHoeYMWaI5XTqbdZTntYnoPGkMmAsSg2Q4cHAtBxyk5e5pm9GZ3nCge//U/CXXnBQdoB1lJIIlht\n",
+       "ikjdGDMWkYUqPFiG1giuJfDKcZV9BxF9D174EJgNODmAyIGLjSpHuw9Dbx1GN2D/EPtnc1pVhbZy\n",
+       "WtpiZRYzdmNMeZFXrA2ycpXKEZTDA7rLI4zfAkej3THiZqxhmCQxdj5lZJdZReEzI0KTodmjQ45P\n",
+       "xCPkconqsQerRcB9XKNDE0wRjicyJs3m0FJBpRY1UhI1QtsjtD5FybhF0JZeYMZ1et4YiRyWKoUH\n",
+       "9lSVUaOMUb4G3Q7DckQ9h9jJOPfyHt9cneP6ykkGh2N2SlUae4psZUz3bJ3+6yuYXYC0IGq93f5N\n",
+       "RpCHYJdvGXf2oRzClB8IpoTImO+KyCsHBRl0GTig+J+e/NV5Y//e8R8Bv3mnF/Ee4hIFj+d+3sUt\n",
+       "+PZhPoTWPOwUF//bgtJ9cJ8LtdTQ9ys4PTCqQsOqcz3eZeZMODWAh/8F5AI3fwbq3zbEH1XYJchc\n",
+       "Q0U0talhN4RwCJ0LUHehFhYeS8MW+FmOYyKMqzHkKDIakpOLwddwV1h0lKcsAMuIXqRqpoRqn9Tu\n",
+       "o/QQVAPSJ0Dv4ZQaNNllKkNaJGRYLJPQx8elwwybAWcI2McjZRmwicnI8YlZMktcM3eRxlMG/QTt\n",
+       "HjFvGcwTFuV1Ta1mmMxn/GnPsPb/wZayOHj+NPzDuwqjFHMNqIp8eWrMv5c8pjtJYH2OIgXznd5z\n",
+       "xJu32duHCXzpOdh+FX7OgdoIXngUogpkBrgJm1/H/twMfwNWTEI7HdOpOvRSh3YeE1kZwZ4h/hOY\n",
+       "PDTj1Ew4kCqu0qx4W2yriLkkZ+kCRCZjbdPhQBJestcxfomSl9CTMkMWqZibxGTsm4QSdQJCAl1H\n",
+       "5wZxq6TmgETZQJkZmjIuFhGIoEyOK4e4eozLK0i2TubYlEzCkoypWBlGa7LQYhI6xK4mskAmYzzH\n",
+       "pWoPMYwYiFAK6qyNNUNbEZTncF/qMVeG3f8HLnwI6h+Bo2phmTwfwFEMc1+H1RiutS2RlXPwq/dB\n",
+       "XoN4Dx56BZ4SkX/+V1Fl/KiwRO5ehE/Y0Arh+hF83RjzA+mdxphARP45DB+GC/dDHsDhv8lh4wg+\n",
+       "tg6jFbi5g9+PsUo5zizBWC6S2IzLLvulJYKySxzbNGJBaOOnKWNvUtjAi1CnSosJUSlG6T774mLE\n",
+       "sAyUmbKDYYMaCU0umgqYGmUVAGN8ypR1yAm1T9kY9sk5oaEXb3NYssntNpECMTeI8PFVCS+BsZUR\n",
+       "OTZ5tkpshFByXibEZ0hZZcR+g8kIkBDresbeGLYqUD5RpiOfYvIbp+FoixvN16mcGhGXbbL8bnjG\n",
+       "hUwDF6HdhT9/41mKSBvK90Dd32d87Ztw75OwU4F0BN63C2v+3/1hXkDHZNgL/ATISY9HNE9SFCQf\n",
+       "SNwi8f2bvC+KkVwgEt6mmP3xUVmCuVDQtTHGeo3kpKGiwdERHcvjaM8lej6hW4F4Hqb7MB8YWl/K\n",
+       "ufBzMG8VZ16QwUECo9+Cye+B9Rhcvwc228V7ghIYZbBMjDnmbY4xLGhwdGE/38XnQC0Ta0hNQIcq\n",
+       "duTgWK+QezUU96HskEzKOJJimyaZ7B+7e2dsUXBDXIqc3ql5FUtsFvS9LCQdEuc6uZRQ0iZjhdA0\n",
+       "SfUQ1b6bUuc1ug8pNt0Sa7OYfD8EP6R8V8x3lqH7z+6Ch3/RfcHcAAAgAElEQVQB9t7I/noQrD+G\n",
+       "T4vI9ffa0BJ+4gistw2rG/CL56FUB70FjxzA+QnsX4d7X6byc0M253IWlEUiGUPLcEYbXvcU49Al\n",
+       "mmZ00gwuZFzfhtbqlLXRlGAGr7chyWDjS/Chb8Jey3D4dxWbUZW+ahCXhYnMMdYWRhrEapVUDkmp\n",
+       "MdU+DjYwxuQDVKaImDFyDCtkWOR0KaGok6OxpEKu57DJKXOdchhQNi6JHaLEYj4FL8zp5TFWWGPk\n",
+       "G5x4ndOdLfqnD+lKRinVlDUsaxsd+ThhjilPMSqgehkYGzP6HZGlDZg/CX4TVARz12GxBy9sAPEa\n",
+       "/J3PQK913J4/DUcVWPtzeIq3MN7fLXyRxx+BX3oYjlow2YGzz8K9IvJ/G2PeTmk1B14D8hH0XgOu\n",
+       "T6F3gf+fvfcOluw8z/x+70md081h5t4JmAyAgwEIIjEApEglipRWkZRFaVfWrlRey2W7ymuXXSUH\n",
+       "1daW7d2tpezdUqlIUV4xiJQYxASCJAgQEOJggMnx5tDhdu4+ffLnP04PMBgMSFAEMCCBp6qnbvd0\n",
+       "n/76O93feb/3fd7n4e5RKIzBQAOnR6D12LasaCWF/vQIVii0zSJ+YNH1HbTREKNn4MsonuUQUCJP\n",
+       "SDMq09ZcQqWYFY++Cmgp0MUkUAkyIgSiSFJnNydZFYe+srDEIWDAVqQRSohFrBIpIaR0l5lBlUWz\n",
+       "T1ETUtGAuiqgazqEFh2VIwybaGYOXyVIRkk0v0f/0hpbsw5Jepgdj9FjA1ItjZNeifazPWhMg3nj\n",
+       "ULxoP1T3069ehEuPw/t92FEEexlkCZ4J4DkAkcTNsPfXYL+CZOCynHqMU9EyvbEc6D0YbMEX3Z8M\n",
+       "EbNXgg8D3/kpLtFcxt8D/zNviBbfk9OwdebV3bx01mDlvgzWoSwqo5NKOCitS08J21Zg3FZcOAt5\n",
+       "gYMVGFmGlUm4qEP7X8V/J0vgboL7daXUEoBIfhzGdNi1DEYJyhmYU9DUIBdBXYPukCviJ2MFkb6k\n",
+       "WI8mGagkaRWR8R18rUvCAN9PEco6CQMs+gjgyiiayuDjkFY+vhayRkBeaaQimKWHRAnyeoVIt9H8\n",
+       "SaTrEBQC/LBP4G5AMI3yXPx0SDIzyVg3xEmkkHAOqYJZvkChmKb23ikYZGJ7HCD2lNkP7nKcOXsr\n",
+       "GHm1ISLaNPzm+8HbFpfmuAl4AHZ8C97pk73J5YCeYucgRCcgGeksZHSWVEDSdTjtTtE+czuNxx+D\n",
+       "W1ao/Sk89wewtBM0DRrrsGcVbv8etJKweruiVrKpJxVB4NLWxxkYoxDWUZYLYQAUCHSdSAdRfQK9\n",
+       "REJaBL0mRipFWo8wtFg6bRrFEllCNmgqCwefKEzSUSmClMWN7YhMwudiSjC6Frm+Rz8X0gv7tDtC\n",
+       "mLxEsMNknFmSfcGQdWqmg+7XSAYGSTvC6UeMPwJnHOIOM6DxAFR+C+49A6lhE8RzM1BeAIwZSIwM\n",
+       "5/My9kHlKBzhVQxGRCS5A372Pli9HMHvg6oJo+24M+AvX/z85Nvh0IfhoAtpD5ZvhlOrUPvUJfhk\n",
+       "Hz40ApNtBqpB1h1wx4LBhYlJFlIRZiKioZLYwQRSr1IZcShshNizJppkKVEgFwqiEjTFoa977PIi\n",
+       "0oQ0jRI9xgkloEKdIop5TNIiaKrPikCXUVajEE2z2RmFFBUciqCiLNo9RT5qkQhsEpqLmw/JSYFu\n",
+       "lKaug4QBoRvhJ0x8sUhFOfTIwkyl0FfWaE2XSRcUazdnaW29A++peZiswO7jcMsatLddcb5moeXC\n",
+       "Iw/BlwwwfFgH1mKJfcnAzl+BD1YgN9QKOEhEesca378fgtNA/9Wq879B8DvAx6/3IF4HfBf4jAiT\n",
+       "SsUaMK8v/m4OJhXUBVZXofGVV/f4/tkUJ3aPsk/pJKIUqbZJVzPZNBRT1RZTvs/WvXDoCdizEZuA\n",
+       "HqjA5Cw81Feq8W+vPmLsgbXtENx8P6zeA3YaogAu6LG7r6bAV2AJbI8gO6xD5gCTEFtSeLpCNwdE\n",
+       "5NDVgEhbwZCAZKRQojBUjjxrVDBxoxyXwg6h2UOLYEKEBDqLXpaZQYXN3AJJbxRroDAaOr6Zoxbs\n",
+       "RV0ag0we5T2DPy0kQpOoLUjaROlGTE9Bw2AEOkZsQJZMX1GaNSE04iaf1xxvumAEmJqG4rarIr3b\n",
+       "4Pufhl8MGcuaJJVNZOhkKmkEh1JCZ0ESGNUJls/uJTonQA6S4D0Nyxdg9BaQcehsgx3T0EkJT/xK\n",
+       "mm1jCW5ctDk7r7GUcrATE+Bn0QIdI6rgm5vo2k6sqIdQQ5cWPjqetQPdmyHv+wyMLmV8oENGAnRp\n",
+       "clHl8GUfaTVCFNToYDCQDU5kArb7I6SaHS6YPUbGfFwDNtsR5a0CUoHo0BiltkLpPmGxSNqtcU4P\n",
+       "2ZUJCGqgnYAnTdj47AsXmOAEnCpA7b0woUFPg8pF2PoCUIpi6YgXIYpbOV/t1s2xCdAzvFj3YTy2\n",
+       "mr0j1ohhSSnli0gWdv8i/PwGZIbP39mAxBw8dESpwaMi8vEyFMEfheBfwdl9KXqZaXK2hd8ZkFwe\n",
+       "0M5Oo05bML5Bq7OFfyBCIp2u5mNqgqtyuFEJkTJbSqcbbWPLm8TSBV0PSZlFNLXKgvQoAS2ZJ4FH\n",
+       "kwk0UuRosqStYQeb+EqRDzSmA8VKJGiBh2FF7PfBSlY572foYlAVhYsiH1TRZD8JxwBlQ0mRGc3R\n",
+       "9xXbzugE0xbObA+vNgrLc9DqwIWn4O4ifD0Lng3GY7Gq4AORUk9fY87nYafxQiByGXvrcPawUhuP\n",
+       "v8rn+LpChJ3Eu8GfJgn4a0IpXBHuBz4I/MXrP4Lj/w+xvEaHYfD7Kr/B6D6ap4XjMzVGdvhYiTx+\n",
+       "OIXZPEVtqke0BjM7Id2FE4fAugS7l2CqDblbROQrL29B0pqEfMoiZafJdmxaWY9GLl72bg+gYcIe\n",
+       "id2CB4HGwFaMp7sM2E0oGhE+PhdQ+OhMkmSaSDKYUQNHVukTEGi7MbWQhiogKiQlq5zCIxOOYPTH\n",
+       "KfYDVswKS+0NCksW4ViJVi1Np5mArge58tAN0MRuOdhKJ20aaB5AnyARUWcSWjXY3Yf0lcHIImTr\n",
+       "r1O35JsxGNH1a7T2psHVYMkmn86R6rqE2xJYXQVEGD2TUBlsBduIciNwcxukHQs2JZVSiyLpCdh+\n",
+       "C9yUgNoe+Id3GuTH0pSqHo10kRVvCs0oYZnn8PQRUA5Ka2DRQBOXVNRCAodUkMNPTaAFCrFA97IY\n",
+       "joaTKdNXWVRUoaP3yUgJWMXRfDzLQaIsbn0cO1WjV9YYa2uMbtNxXY9KC+751IAzH9B4dm+BFdMm\n",
+       "ZWkk1QDH0Sm7swy8FY53IPo+RMeg8aRSMZERnhcR+76IPA2LY4B9mWktIoONuNiam76iVfMUTDZf\n",
+       "2onx48LpX6FXoYAl2NeBg0WwdsHvLENfRP4ayMK8/kIgchk3bMGJI8Cjw8/VBJoi8j/A4//SRf+1\n",
+       "JmE2jdRLOOsGauPR2P7TbOJvbMGlW1C7kjRTIUislZvQDDyVxXYsmv5NJOoDmiWXVCIkF+poRh7b\n",
+       "CFDRFEosdKbRySJEaIEBWsggGrCsmhx2A6oFRZeIMTTyvk7RiShGDoXBeZ4pFOhaBVyjT2h4lGQD\n",
+       "CllEBhiqS5s2qYslJuoa3QmN+cDg1OxZom23wuK74dlPQOqzMF4A6cWmVt/qwcsR1eQasSYvtN3/\n",
+       "1OG/AD6n1KvJXXhD48vAb3EdgpEhz+slXK8fFSKiExOjFbF0xOUAQs/A4F3Yjx5Fy5nMd4WM6REY\n",
+       "Ic0dsNsCmnBTMxZxfOp90KlDsgt+AIwB1avGrEQKJ2D1j+GucsCqNiAsKHxLRxeDCTS6moNSiuwA\n",
+       "XEuoRWnCdp5cZguTM3hakUgNUFEbXY0g2jhjYYeB7jPQNCKSOBKiK+goE8gjfkjO1AhDodftkjRq\n",
+       "ULQYSA59q8PcMy5Lk5MMRubhiQXY7sD0FLQy4M4SLVVYuEexzVLkej7uWINK6QbqS9Ow6yQUH4X9\n",
+       "h+GsQHQJimdjZfpryr6/2ngzBiPlMgzqkBq9ov3wHEza8JBLexSmjQx21aYyBrnIoakH1NQtbPXf\n",
+       "A+UIZANGDsGmB78gIl+BAx+E99Y0zs9nGWRdtkpgpYWV0SSbajdRO0Qzx7CCDp5po8QmkESsIKE8\n",
+       "RoKQPQMFXp7nlEZbE7zIoO945FOCjk1b6yJozJLBpohg43CJVbmZvutBz6dz/gIp0yM7Dql2SLcB\n",
+       "Nz8Ig7eFHHQd6myn5vpczHmYgxxsTpJaKqPnYPCwUs3/FUBEsiLpd0HpUCzwU30SOD8kIb4oq6SU\n",
+       "CkXks/fDx/ZDMQvBBlgXYLkf6/a8alBKbY2LXDoJ22+EcgUmQjgYguyGp2+D1Q3IfQN+exX+HsJr\n",
+       "XDBDDa7Z5RPM0O/eAA+n4W1FcMqQzkIrBccegqgFfzEKfxpAxiLKteiOuIgWYEgFP/LYlCI9Q6FN\n",
+       "TiLSoGd5OAKlTpGg4GCqNIbWp6mV8CW21fJMHTMycaWIHtZ5LqeRQ2dnpJGTgNFkRDWhkEZEP53E\n",
+       "MNLM9LL0cnME0TK6WsASHVd5uJpDGKUZd6uUd2YJUyWSPSFp9LCngUUBScLaAvyfa7Hlaf+HeAst\n",
+       "w1IIt1iQveJ5F8Zg60s/1gl9g0EEIS7RvAIdpJ8afB34TyJkfxI5MiKyC7b9GkxmQAmUWyLyeaXU\n",
+       "CrC+Br4D2hT26QpbN5uM9Jr0sn0KxM69bh/KeVjeAXe50E3GGkrJLsjviMjHX9rp2TkOqwN4dldE\n",
+       "yQ2p5lOsiDCqICUJfAJqkc+KDIMaCZHREC+aIoq2g6FhRBNgN/GzJcCgqZuEmkZAiMMYES6RmgTX\n",
+       "BiOBcpLYtmDpa4QpoW32aUZ58k1FPg3zwO2PLPH5X8iwduhugu9WYPUivKcNzj7YaNB7dJPze3uk\n",
+       "tuUJ9btwnjkUp7iLc/DUAnhfhIyg0SZ6WMG3X6/y65suGFFK+brI330LPnoIwjw4m5A9Aa0ufAWa\n",
+       "3jqbH80w4ul4tR4bIz7Lz+aoOjNgLEDJBSy49HY4vQpzVRJvh3mtyIN3zdHdNYkkG0SdBbqZInrf\n",
+       "wBFFVGriZmYJZAAqg9JngAGe6iIsY6LIayEVfwDaBglLBzHpJwJ0HQxCFBYFlcUVE0WIMEKSGgU0\n",
+       "2k6eIF3GHzhUnlT4d8Khc3D4FPTnIJOHyS2PC0sOW6UxojFIGCZWqkf7xjL1LRh8CuJABKb/Szgy\n",
+       "AnNNGBTg5O/CmQe4StnwinldFpF/tw77k5DrxpyDi6/FF3kL/vYh+K1LsN2EGzVIZODMnbGSMjPQ\n",
+       "3QXbV0GDJQ8aqbjpBuKq0Zlx2Prbq49bgLtvh/xhOFaGjQYcHof0UXh3Gz7dijUx0rNQ7sMpA2ZN\n",
+       "PLtFfWwTK+iQqChGFkfov8snNMBwR9DbJv2whlZqYZEiM6izlhnBIYMii9CkLx0SWoQQIj4YKiIT\n",
+       "QWRY6AK+FpLzUzw+NgNhHk816adbaGo7GfZiqyU6qka67WJmQ7YZHuOjAUrr0MzXWR07iHMxMexS\n",
+       "OB2rTJ16pa3ySqmeSOJL8LV/AgeCuLtgJQ3nLoD/7Kt1Xt8guJu4f/1a5aqfSihFS4THgfdzXQXQ\n",
+       "fnSISAl2/w68vwOTQ/L6Rg6+9TER+fdKqW5C5EvfhF8/RGSnqJxZpnngIpoRsGcVxh8BpcFzH4Tt\n",
+       "SUjbsFkAZwl2nYb6neDNi8xehK1HwDs2zLpUQZ4F5w7oLxo0jDzpTIcwo+FLipSCpLfFc5Zi30Ax\n",
+       "pruEeodlfQo/7EFYQFFHUn1CxhCVp6mnSeMSUkCoohgQkQKjCEED/CTBVp1gIocrc+iDFmJ28fUy\n",
+       "osDZD85Gm1/65tN8+o4ltsYFenX4NwG0ViDRhRWIFiL6/9V2+HAf3IegaMDaQVh3Sd/3DLsMxZ41\n",
+       "WH0nLIyKyN8opV5zO4Q3XTACECp1TkT+bAMOp2Li46ILJ5RSfRH5QsC5pTalnwEpxs6l7e/l4Y92\n",
+       "QcWFVAoGyaHpjwkKjJRBeXYfg0yOhJUD10IP6jS9Jnopi2rrdLO70aM1XGMaor1o3iaR2UML0xTs\n",
+       "IiQqbKoIM12laGYgSOLo0FN5umGZyBiQYAJbRjGVgaJKSAQqTdofYERdwsIlknOKO87AmgvRjdAM\n",
+       "wJ+FgoJGBnr+GhNBhLmVp5O1aTsVpN8gfxwGQ7+B9Nvh1hLcdkUGZLoNnXtF5JnLvkJXY8iCf+pa\n",
+       "//ejQETGgTzQVEo1rvE+HRH58xrMjMDvfwCCfVeRZ9PDQ8Hq5+BrH4W9GqQjWDbh0gnwnxWRWWLV\n",
+       "5Q2lVCUHt+wZpmSnoDIely6yyzC1AA8opZoikjbBeSd85zzMbGC8C8bXdpLf6hKaDqlcid0bq6wX\n",
+       "TGiMkehl8JOL1EcbJKM0UUJho6NkCyFHRIoo6BBF4OtNIi3BRD3FWDOkPKehSUQ3kcQzSpjBBCZJ\n",
+       "Qm2EwGuQUqukmESLsgyMZbykTcKEkAi7ZLKnpbOrM0AfOU17bj+dZ78D8xegWo8Fr14xlHKPicgG\n",
+       "rN4IVgYaF4ALL2eO9xOMfwp86vqJgF03fJm4g+gnKhiB9M1wUIPJKwLrmS4cKML6AeBJV6njIlJb\n",
+       "h8NJKDbxPuPDAvT/GDJdSIagX4KEB800qEWYOAq1O2FHDkYXYbcGz/0qnJgAvhGvQaVTMHMXHFi3\n",
+       "CMcFO6FYCzQiGcRdkIYiFDhngKmZWI2QnNXGzupoA5sg2SMydLSgQ2TsJog0uuKDpFGXzfKUHXdG\n",
+       "6Gas/eG3wLgVFUUoCdCUTqE3jxWcws4qOpOw85zH+KUNtv7dy4mEjous74MHCzH71stC8AzZ9ycY\n",
+       "cwwmWj5vW4tbO75/I/zDJeA154W9KYMRAKVUFfjWNR4PgSeGt+cxIXJhHbbvueKiV4P0FvSgfyzL\n",
+       "6j8vMtHqEYy6mAlIkqCpbVExmiSLBwisHKE7iHu/Mmso3UXzNcxuElUtUtu2QS8ZcsjukHYW2ExN\n",
+       "IWIiKiTUCuTUNEEUd9wkCXElQxh10KjTM2zcnItoNlkf+rfBzhQ0J+HZHSmyTpq247Kp+9T7Lu9b\n",
+       "XiQd6pybijCPKg5eiFPu9+8CjkHxJthxVRCQCGEOOD8LXDMYeTUwJvLRQ3BwFMIaaCMizzbhy1dH\n",
+       "5kOux3pG5HuduIvmeYQgKzHJYVOpsCoi/xaW94M+Bu4isAGF/wlG3xWLF3URKT44BZ5/BR9FB1WI\n",
+       "+/MKvKA2Wt6Ebg+sI7Dkk7zRZKy7QW+mTTpM0DqooYcZsv4GXn+VyOygsi2KoUXkZLHTKRANjSVC\n",
+       "tQQyikgScVsEloYZJqEzoD2ZpCAehhES6BYNNFqqja31sbU8qbBIRJl0VCE7OEuz0OMGgakeqFSA\n",
+       "Y0ScL+pst2Gi0mF742mWFiL6XyLOWP3IO52hmvJ16Lh4fSCxyvavAHuv91iuA74C/G8iGErxExRg\n",
+       "pkagcI2Sa96HVOnyvWtxU0Sy34bv/By8rRFzPBvbIVmG+X+A2m6YBJYGUGrDRB/uXYLaXSLyWLwh\n",
+       "a30ezt4Hg109alMBWs4i6TlsGBFFPWQmgqKAo2uUnRx7NzTMaINnx+ucSvlEKYeilEiYGmXO4jNL\n",
+       "nK60QSYgGgO1Hjv+SQ/yLdDmSZg1RsQmJMAfWGQGOVJemq1sn6wOp6egWYOXN0BswPky3Hpg+Huu\n",
+       "wnhEzmwgymdqqO2lEbc7n7udt4KRNw5qcP8j8Ac9mJ2ETgvSz4G1CX8FLGi0V5ro74bJrEZSt2nQ\n",
+       "ZUpPMhIoejWbPsfpJkIoJSHQUSqLYfcQlaGw3GZThVguVHNwIGOzT6pEpk9daRy3ZsmGBgNVpawN\n",
+       "SEYahmaDeLiRR11GEBN0b5zEZJnWbJm5OmwPDC6NCMetAonOLNlFG9l2hvOzNvdeDJnchH0nQY/i\n",
+       "r4IMvw/RAJxrOCw68ENsoH9cvAP23wbLl9twHoVbnoQ28ADErdlXstttOHoMbtNg206oD8A8BaOr\n",
+       "MTm1CqDBxBTB3SWCkgN3rGPtdti7He7ZgIIHngZPvrfC42dP0qvcc0Wv/Trk12NS7jo8z4/5/P3w\n",
+       "sYNQbOOoZTbuLpNIhaSigPlEl75WwnMStLNVXNNhXyZA1yIip0nT7rIyPo4nM5jKIRm4GMploGfx\n",
+       "7Cq6FtBJRIzpJmOBhRZ1CEyfpqTYivaDE6IS4CXXCcRmPdhizOhz0IeMxGaSYw3YUY04XohQy7HM\n",
+       "9lg5oh0p1XsD+whdd3wM+LpSLyYsvhmgFKsiXCK27njgeo/nlaO1GgcEjgUj3TgrArEyam8NQEQs\n",
+       "SN4KI2+PjWQbR2HwFPB9OLoVt+eSBm0D7lyGfAfWJqFuQL0DNw03oIaCGQXnJ4CWUsoR0T8N5p9F\n",
+       "zGkRxcjE0H1CCWkGAX2B2RBGwghNt1kfS7O9qUhKm5wVcKPA3obLaiZPQYcFQ7BJQGTFwYgqQ1QE\n",
+       "MQEbmCSb1okkQEUFxpt9PMuhPB5QaCmc5CTLt1t0PJ/+P9Rj37DT15q1CM6ehuUEzO+BWgMya3jF\n",
+       "FWbOw+4r1G+NaPj2rzneCkZeIZRSFRH5szrcmoP5AZxrxXXltgGHXYJKk25hjFxT4aZ7kNSZD3v4\n",
+       "CQdnooboeylpCUKtSUufIvCa+LkB0WKa1X4D9ynImTA7D6MT0C8IpAyMaMAullmWnSQiCyfscU4G\n",
+       "ZPAQAvxolkIE4xp0TJOxzAzJcMBKocfNbp7dbZeqjHB+M42YCfROkfVdNqe2YPZR6GVA82FBIBxe\n",
+       "hKuPw6mPwFQn/gECbOZgyeaKC/VrgSOwdjk1oQG3wfoFuFPEqMPEu2BuTGSmDJUHlArPxXwG+fMO\n",
+       "3P4M3BRCpwrfiuAkgIjM7oeP3QuNSVhtQOGLZG4pM+b45IeBlRXBreuK1R2Pca66ivGzkyg/Iqyt\n",
+       "xP41n7qS+6KUWhKRf78K90Rk74uYGIFpgQywri/SI0KZaSg2KWR9pnp9EhWDKB+rQZrVC1ycuZmk\n",
+       "ypIKhVBqJFUPFXYIVoVSwSTs9Tk+pjCMiEhl6Kk5rChDgEPaVyT0PF3jHGbYwfMh8qDdgWQPSIPp\n",
+       "wXgf+h6UdcjUIHhZ2+83O4bE1T8E/tn1Hst1xGeB3+QnJBgRkRGYfDd4+6CfgLoNZ9cgW4ZzmxCd\n",
+       "j7tsRj8Ch/fA/hpoEVz4ADxzAKqfUCo8zfCCHZu6PvxzcPQQBDMw6cIN58FLgDnknHU0ho0PIpKA\n",
+       "mXthRxl26Q7dGQc0Ia0pcgKrUfz8UYEx16Y84VHPxurJu7MwE0GodSg5FoN0mboqMNB3oMIl8I/F\n",
+       "u0RR6DRI90ZIuwajdNjM5OkHkNVTmJFDQm2xVJiD9behnp4hXPIhehA+OhSCXLp67obSB596GG49\n",
+       "DndGMKghZxX3PBe7QFzGxXFoPXj1618LvBWM/AhQSjWBb1++LyKFafijQ1DqwO4lIsthbTxBxm6Q\n",
+       "DAdI4GIqSJp5iipDKjJAOqQ5RsVK43tNhC7u6gZ8AuzfBnMncUsXAxx0uppGmho51cHBJ+nbpEIv\n",
+       "dgAeFOkk8xR1nYztc2ZcI6lM8kGGWrpPDyEZ6uRtG7N1hI3THYyxLKLDJQf8e8AwYMWClQeIMxBA\n",
+       "dApOPQqdO+LSjA0sDGDtr35I18WPjcvmhZeRgkCQebjxI3D3OkysxAS1x39XxPzPSvmnhlyV7wxv\n",
+       "L8Io3HkLOAaYKzDnQjpW9hCjQTcD+f7wnQNIz/Y5NHqObRfP0yooqgkoH1XKfomqq1KqLTI+D8Xp\n",
+       "DGlHo1Hw0HDZo3wucZF+mCRheCRUxEgbpk56dKcCKntgOmzT6/foallszUDzXRKNGlk7YO2TEPyG\n",
+       "z5QBKdMklTDxE2na+gA72SXyLVJtG2dUkfVHSLsRJh5bkcfNX4SkDau/Bm4RNnUop2D8aFxdrL6q\n",
+       "nU0/ZbiPmOD7Zp6jvwGeE+GP3uhtzbHw2MSvw3sTsO2rsLEXBvOwsg+OnoP+Xw4vuHtg7x545xWb\n",
+       "qNHl+Llb+7lCBn+44fw2pHbB5HNQ2BvflnfAzKNQScD6JrE5F8AcjOVgxIbQhbwTe1+5hjCqCwmJ\n",
+       "TUU3DJ12XmMZj1agmNqE7A1xc0A5E1Fw6yjfxKdNXZbx/ByoELwaujFGtjPGxAWhv9sg6ftMs8pq\n",
+       "IknHMbH0ZWxDCJ77PXgRKf9W6FVjBeyll5tHh9JUmdECZBVsePCNI3B4EdIurOfgRAX6T7zc619N\n",
+       "vBWM/BgowfvugGwHch247RYC3cKM6vRTOvpgiWoIpQSkVIZcZJMRDVt0xv2QhOrQbq4Q9fr4FWhv\n",
+       "QvfLsHQ7iAX5iksr1OmmoK0rEIttgyzGmoOaElSg2H8UnrhFIxFp6KGG4Sm6BuSiEPSASCJcR6Pn\n",
+       "JtCdGdgcIag+CglwzbjsYjpwQwWKOTjxYeBzwzLIV0XkSTg1TVyaWXhpe9urj2asvfx8DXgdCi1S\n",
+       "k3DfsbikAnEq9p0hbH1ARE4T670UiLVPBlcez4JtA9jbgrEsRAqMNJ3cgL4D3hXpx40piDS46wLM\n",
+       "duKIyNXhy/eIyFGl1IsIsiJS0Bl97wxOYYwRQ0iFPn60yapsMRZBz3Q4sAyLeRg7CYkBdGoR3ijY\n",
+       "WZhdrzDVabE2qqG2QvIbLuEGrD0ZK7a398O44ZPt+GxMp/DnfBJ+AzGEVlFI93JkahpBqsToeodK\n",
+       "SWf9wIA7vwHWf4YTH4CLGRh7Gi71YPO7EF0zZfsWAPgj4P99ExJXn4dSrIlwEvgAMYfkjYwJmNoG\n",
+       "e1fiuzccB3UctiVgI61Ub7jJyO2A7cN1azMXS25kHNjWh9INXBGMxAHO1IfgvT3YvgZPd+HM/tg1\n",
+       "5MS7ofENqHzmxcJslhu3BW8PIbNlcmEqpIPCUjoGKWxyuMqnIkW8MKITLmPqPsEAVASlLjTQ8PQE\n",
+       "oV5EBgEsrIDjIisB4zv2kt9IYYUO9rYQx8qS9nsUgjpjCx6WWWWtfRecvcYEdRMw/fJTOPIrcMcB\n",
+       "OLIWZ8C7Vfj6fvhaNSb2Ns+Df+L1Mju9nq69fwD83niYgbwAACAASURBVPDuf1BKfeZ6jeUfAxHR\n",
+       "p+DuCiRacNcB8CMsFZI3s2gofGwW1CqzBsz3ekgyh6YpCgMwQwMlHXr6gEwWMh+G8XGon4KV0zCb\n",
+       "hXwVJt2QWsHgwQNjDETD9gwStSytiRRjmT6J/V0SUYtmYoTRwEBvRtSSNko65Acgyz0ujeSxBzP0\n",
+       "1zYhdwzGN8k78IGzcSR/9BBs3QYWUDwiYm4q5T8Mz5N8X9f6+fdg+jZoTMQiaoXHYKLN5PILgchl\n",
+       "jNuQ3Q7WnXGqtpiCPiLFp6F9/+XAyY5rJ/O7YOWybFef3vo3ubAX5hNQ1KCZhdOzoJ+B2StIX4kQ\n",
+       "dgFn5rmqWwdIFOnvnqIQgpEQDF/HMGYQ1WdLG2AF0A4h9wQ8W4RoDrQ8qAxUDcHJJrGqPvPfCZi9\n",
+       "AIk+fGkWOAfVL8KlP4FEHRbHNZyxHglvg0BtwwwnUX5IOGjghDB+KUmmk6e2tIE9OaCyF8SG8peg\n",
+       "8hCs9oCKUqr/GpyunwqIsJ2YK/G713kobwRcLtW80YORBKSuUkYVYpVgbfyFx7wedC146DaQWSgp\n",
+       "WBOohDC42rCxAKUpmB92Ed5+HtrLMY/v7DhUPnXV72gVah1I+7EjxYQjOCKA4pwoelESkwAnUlh6\n",
+       "l1ExGIRFKk4NpwzHZmCHo9FUJXqhyYazG+/RIpxZgpsuorIeGXcNZ3Q/ySWNdNmnucNmkGhAT0hL\n",
+       "k2oAjc924dDVE1SOF/ila02eiIzCwYNw28oLnP2cB+9chK9oSm3+9Y9wLl4VXM/MyP1KqT8XEYOY\n",
+       "qfsTEYzE0TM70vCrGbhnWJyfaJDIjjDeNzFSgJkkI3m2IuFCV5E0ttAlRzvMMxGEeIkWZW2VbCri\n",
+       "8BaoAexegzN74MEmnK1B24wzJG53guDiYfIdlwVJEuSzcOIJ/MN5WrMpjKCOaw4ou3koD5D6JvXx\n",
+       "ATiwuOQzON2l6h5Duc+AW4a/VuR/A8Z78P3bYGIG9rTiyHhCg+O/IaJXlArPXY/5fQ4+WYZ3mzDp\n",
+       "wXoVvgTyEfC12P3yMvomNPJw8EPwnjUo1uPnPH07PJkkTjljQaoGg8ZQ5C6IGWzVFBt0uN+D3Ay4\n",
+       "deh+Fz5wDSXIUGLjOhARk9inwcvBr0/h5jK0UxEp3cUwFCYmoZagxoDEOpzpwM88A4//jEVmZ4ac\n",
+       "puNpMN5x6IQWi7kR/H01lm/z6Aygfhoyh6H/KCx9BYI/hBvyKcb6SfbVW2yM9jg1pgiCCM1cRYsK\n",
+       "TFUHXMjfhfv4M7iZJo0vAGWg+hpIa/+04r8G/lKpF9SD38T4W+Bfi5BRijdEABtzM9hBfL1aI87U\n",
+       "FmCtBPUWjF7R1ntpHOwrMoDuWXj2D+EdU/C2ShyweDo8Phu7I7wIYdw0F/HCBbrgxkJ/j2aBjIjY\n",
+       "Q78mHZKHwclDMAonMqBnFUkzRRgoZkKFa/pckDQzyqCowPM0Ik1Y3jtBZU2n3a7z7KiGUgX81hjt\n",
+       "Y32UrsEvb4+7CLQW5UqN2fyA1g2T6I0+sllhc8JGXwzpPBPQ+QxwbB0+8hgcOALrCQg3IXc07vb8\n",
+       "/stMaz528dCueniiF1uNvf64bsGIUupyDS+En4xWMhHJwejvJlG/NMNgXwHf6BAkDbB10uKj6xa0\n",
+       "NZRhofwIyxynf0nnwniDXZMLWFaChXyIz4CkUhRdsEtxMIIGR9ZhcR7Wn4KbLNixBX0rSeV9IY1U\n",
+       "gcDZCc0VKNxMu2njRAnSnVGC/BaRfY6i8pl3obEGzSZUqtD4hIc6uX5FK6fI9D1wZhqYgf3N+Aca\n",
+       "AVEEt25A/V7gugQjSqnzXCU/LFJ8Cp68A+5YiclVvgZPzsZy5LdXoThMw5oRvGMFlm8WkQeUUs0E\n",
+       "eHPw/WfhkMBICFEKVubgRIXKn0OlTry47YSlj8FeeYHA1bPgQgjRck7kffNwdxr0JkyNQGoEtVik\n",
+       "bTUxxjKkNUFwcUODlqMxfjxi7dPw1d+zSNw6wYxvEgQmTujhhR561iZImJzMTWB6G+x+OuIXTsCJ\n",
+       "98LROag3oZgTJjUTlYBwWii6HlObZeqqhG4psF2OZw5Qe86EcAvqwPG3gpBXDhHyxNoiR673WN4I\n",
+       "UIqqCE8Qe9V89nqPR0R2wvaPwM5kbDx3sQR2Eg5sxGKG3/1l2P0s7LgI60U4qqB+JeGyAzRASrBa\n",
+       "ite6bgS7HoL1GREpXNbiUEp1RSYWYvmC/cOM8MBI8LW7Sqwli/AvO9DURb4Ghf3wtnfA4VWwvwDP\n",
+       "/BZkDANxp0E6aEkHX2wSZkhg6NSUgatNkSwJowR0xkt0t3bgPLKGmjwC31mF/QdgfBL6KXBTkFml\n",
+       "71dYX1ynMLGGlME5Ac4nlVJnr5qnLzwG7zsHb7dA78JWBf5SKfVybrsNqAkE8kKTAsB6AfzX3KH3\n",
+       "WngjcEb+BfATIik98uE0pfsOUpu5iUzFRKjT3HmGoNRBU/OElo3uJTE2t3Ane5j2JKmlt9P+5tM8\n",
+       "90+WyO7uMm4pbvAgFcJcD1YNGFhQ3Qbzl+Lr5YlvwqO3wcl9EM1tsbVnG8F6AgoJWGzB+H6wu7i1\n",
+       "cdyNdchn0fwmyfkqqcfg3Ssw3YXH5uCxKaXax178OaoPwNH/Fvaa8Y/T12GzFCu4z22BMXVdpvdl\n",
+       "0b4fnjJg+UicZt0iVkPM3flisSOII/2RkJhD0uzDcR/edi98rwsJK3ZJ1r4Qm3NtXi7niMg5OPs4\n",
+       "OO+AXQH4EsdEK39XgDuOwD23w1oKgqNwiw2JBnRGCJZKVLI6KX2ASA+as2TWUmwUVinuhh3rGVrN\n",
+       "NKWGTzfdprndYIeeI+c0qSR0eilYyiTYuH3AsxHsPwe1AyA7YNuKojXjY5cMUBnsVAKtlsU45dAd\n",
+       "SdNduREuuSAPQHITPvNWIPIj4/eBbyn12naJ/YThr4hLVtc1GBGRNMz/NvxCDyZq8TW6eBjqCZhZ\n",
+       "gHufhJOr8NSt8OQWOEeh9dhlz6wh9FjW/cA3oDkS8+N3NyHhQ3obkOR54j5A7Svw8O9BZQ5GgxRP\n",
+       "HD5AJX0PwTdGoF+FzHfhn5+laMK7T76wcXE3oTQ+QKIGtlWg6rXpWyE3CBRCA+VrtFSLNWWSdfIo\n",
+       "+yYGtXMwPQa5JXjvKGRNUAlo9cFKgL0fumn6GwX6F47G2d6j1zLuG65jXxORB4hr7v0ftBbEBPzi\n",
+       "k/DInXDbRpz92cjBk0WofP7HPnn/CLzmwUjcLvWSL/WmUuojIvIO4GeJlf+u9do/ueLu95RS33tN\n",
+       "BvkKICIFmL91mtAaw3ANtEgQNUJxaZatXc8huk0Q5AkHHn5hCy1MUlqcobzZg1veT3TpEUJzkdEp\n",
+       "jVSlgTvmEg7g8BY8nofWDGy/BBUNWITqElj/PRxZ7HDzqQWO3rGN2vYFBroN6RbYU7CRBieEnCJr\n",
+       "ZigswZ7lWPgK4PAGnH3HMEvwfPZJqfCiiPZJkP8D8iUggPTJeHexVgB//VpzcL0w7OD5ooh8l1iZ\n",
+       "tQX0ILUdNvKw/YrFJBy6ZNIk/ufhJ2C/C7Oz0C5D4QSkNuELVxJyh6nXr8BzR+HcDgh98C8A3hj8\n",
+       "8l2wYsYEWJIgu6D1BGgXwZ+ArTwDq45kPcz6blonx/HdNcYPKKY2LSpuQKAG9KyIaaWTNQIiT8dL\n",
+       "WUxGfSLxMX2Ym4FnJ0BrwaQJjpvFHKRR9QKBlcWK+jRSLoMdZfiLDjy8BTN9qPtwZthV9BZeIUQw\n",
+       "gf+GWOjsLbyAvwP+gwjbleK67JKH2A17EjAxFNqrT8a+dSM9uDQHe2pw4ybI0/Ctp5RqvURVWCnl\n",
+       "iUyvxRLv81fw31oJ2HKAxlXPr4vIx6G8B8wbtuFOfwCeTQxFDyegvx/UGmquh34izp4evRtmNSiW\n",
+       "oaCatCybCymL2UGAUfIgABGLEl1sJmg3CwTdPORHQPdhfQXS2wEX8jVQPXDHYdEHFUAqiLOeay/v\n",
+       "IPzC5wW8IZ3gh6D9DXi6B5fuHsY+Vah8Uim1+MNf++rjNQ9GhqqN9179+FCK+/8CfunlIjil1J+8\n",
+       "tqP7kWCBliggCpLtAfZIBmugoQfjmOUkHapst9YYPx+QX0mzdXCchcY0oS2QiMBtYeYEQ1Kk7HH0\n",
+       "yga1UkTBgLRAD/jePotlfwL+uIO1q8cNKuLQE5Dw1xj5dp2H+2WWbsrgV1OwlQGnCsUI6gH6SAcj\n",
+       "gFH7hSGnAjB04vP8olKYUtFTIsX/BNU74bZlyPpQycJTJai8xLfljYA4mhdzBH4uA4dsypkGD80o\n",
+       "7nsGZjpgG/D0LFSfuiL12hCR/1iHIwXYY0O9PSTlikhp2K59+fiKuCb9fCuviMyOAGZcx0LivzeI\n",
+       "CTbmJJyfAicJgUIZB/G+ngTvcTggmKIYb+sEVY+LIx4JA8ZCD9f06aSS4EAytMmZAek+zHfiltxH\n",
+       "JmHnurCcSzOayDNVsXHSLWojwmYwS3TMg9ORUs8Bz71+Z+CnDr8KLCj15vGheSVQioEInyPOjvzv\n",
+       "13EoVkzRuozQiksKWgBh8oXHUz5YuZc/TPmb8Ojvx8JoMy2oZ+GZAlQ+dy0l4uEm5aSIuLNwJPGC\n",
+       "+jIA49Cw6A3f7/wMzGZg9zo8XgSiiD0nBmwecUjq4JQDCvUQY8QEMcnkPS5a8/T6KUgIqD4MdsHj\n",
+       "a3DfFNACbS+cz4JzBkoDaGzGa1JbRG4CSYDaUEptXDkuidXdDo/Be7bDyJTISgW+o5RauNasDDeo\n",
+       "D4rIw8SGme71zKxezzLN/0LsC/J3wyDu516vFqJ/JBrQb7ZI7pqlUN3CKUR4KQvoE0Y60UWXxS+F\n",
+       "2BnQUkKtdwCvp4NWQ58tk8p5aMUuoWHTmS+QLBuohse6grNpaLXzNNL3EtQOQvmLZA52CEo1Hnuv\n",
+       "z75HYHt7wG/cv8nfeKOcj56A7dsgm4oJSxcWse9rsOP4i0mey0Xor738vLa/Ckd7sHgXJAzoNaD8\n",
+       "l0qpS6/LjP6IEJHiHPzBnWDsgQ0HzO9wMXuSxi0+owvg+tD4LvQevvJ1w8DkQZHUAOZ+AXbvif/n\n",
+       "khJJf1Up+wf10XeaIEEsT6sAJuHcxbg3UCtBowz7p8CehKeTQ0n5VXAi3CqU7Cbbz25j5UAAiT5G\n",
+       "CEYUEtlChgqNrE/P1/FSIY1k7KrjCtRbFvmLORbzLuWkhqgRutVZmke1eFt3DYXct/BKIYIG/I/D\n",
+       "21t4KT4B/I0If6oUP3A3/hpiE5YFDg95XNk6dPXYTiV3xRq1koX6S1pbL2No4vkfof5OSM2DV4bq\n",
+       "51/BOtceqqS9iObpQegzWIczE9Afh1knHt9oFdYbsJaD6kARNOHur8DW9pDaezyinMa62kXFHsDM\n",
+       "OSgugTMBD98BFx+GERd2R+BdgqwDE+fAbcOJOjwG8/9dnClKAcsiUjwK7S9fFmTMwD03ws/fBpVR\n",
+       "WF2D0uPwzwyRTwQ/4LMOX/+6OPP+IFxPAuu/uF7v/Y9BLANu/HWZ4s0jRGPjjC5U6Mx41EdWicpL\n",
+       "8HGlwudFt0Rk8mn4pzNwWJHYngFl4qoCDS3CKjSxcz7pNahswubHLbqP3Ak/fwus+aD1CYo9erMG\n",
+       "xXmf0yPx9e/tx1xGLm7A5zcgtwpvS0PBh2oF/9/A+jvh4ljMLq8U4KgB5Zet+w53BQ+IyPeI64z2\n",
+       "G5lzkIVbb4HEgaE0exa8D8Ez0Jg7RuP/AxZfznNFRGbg0AfhF9fjjBHATQZ89YMisjTM4L0ESqlu\n",
+       "UeTJf4A7b4fVJIQKvBU4dx7OVsFw4Og8JA+AdxqmzkFiCb4NzRZ86zc9JnIbTJhJyrrPIjb7j4ZM\n",
+       "bKRpvyePZ+tsZEYxZyuUx6qMroL9V7DSdJn7OQvp7cdY7tAprTMYXeSG2Q6yr4WdEkkuK+W8tav/\n",
+       "x+GXiUXOvn69B/IGxVFiG4T38DJO3a81lFIbcav+d2+Hg3WwHLjowlYW7mxANROvd6dXIHrZYOTy\n",
+       "sYDPXf24iExDeh9oBvQuACuX10ClVHVU5MxTsP8IrJsQbcUu3oUug/8bHnwHpA6ANQqNbtzAcuMC\n",
+       "NBPw4E1xZfegCTvPRhTXPZ78UIq17Bojc+tkTRtXSrS7Ac6uS7BxJxz9ezCqUNP/f/beOzqu68rT\n",
+       "/XblQhWqgELOIAACBHMmFShRWbYcJGfZbdnu5Ncz73WvsfvNmuk30+3uWT0zr8PMtKdf2/2m3XZb\n",
+       "cpZkS5aVAyVKpJgzGEAEImdUzlVn/jgXUhEEkxgAUvWtxSWhwr2n6ta9d599fvu3IRSA/ii8DfRB\n",
+       "3R8b2hnjOrUGeH0DvNsNHBQRRyPcvRX6nUYWvB78JlBTuhvzd67agblGLAQB6w2DUuljIvLNY9j/\n",
+       "Dw9qhYOURVCjBXB0EWxxiyQj8LbSjIrI/xyk4J/qscV9FBQ2YJck8fQwfdYoWSkkFk8ROhoj8e0i\n",
+       "+FSVXqthN5aWDA1WE3VpB6WpGJ4EjFXCttugfzdw1Ihm384dn4gch7FbwVYB0WMwvUMpNXLxz6VS\n",
+       "wDVvEX2leKCl6iyxmaYGOADuCzd/cy+FJan3AxGAgjS0paG3nQs0gAvAC3sg0Q23FoApCLFx+FFc\n",
+       "qb2g++VMQ1MfLM1AKgDHlFJ9AB6R28twLXZg6/cR2+chHtpNfF2QwiVuMl4vKXM9priJuKOAjDXO\n",
+       "mDVI6BVgCI6d6cfxh1ZGa4IUOE2sHM7glDDOHlh0Al77tIhMztca742KkRX5M+Dff5hNzi6EUigR\n",
+       "vocW+M5LMKIJPAM7u6FrE5gcMP1tSKUhuEwnK6eeh/i+D+IMLeLeAssehNa0dl7vuhu63xWRX88E\n",
+       "JFPw1A64/ySsc4ApAIExeFwp1SEih4C3wfz78EAfVIQs7Gwu4fgaL+OWNLETgzy1KkHlIGTFxBBp\n",
+       "SlJOqsedWGPFpAtThLwpuio7iW2YgGNjsGsM/jlXzyYirbqaqDzH68gELJ3Q3wsHAV8pmJ2zluNr\n",
+       "IeCAehGxqAXeYTsfjFwmSqmTIvINF/zrZdqg64zRrMDyOnx0nxZEHTNeG/aI21NGRciKJZsl5bJh\n",
+       "U/VI2MmIw4z/oIKp/VAeg4mgvjP6eylqKaF+yE4qNs5gM8SdICkYL4KRX+b2SZk1tjPk9I4REdPs\n",
+       "xnI3MnGYCEBZpRG0zWBEJxfxRLA6wDrH92bNgsVx7uPvY5zEL4vIm+gcaWhWr5oscNr49x4iUtIO\n",
+       "ZZ8l8rwpZ3jFBIOvE/zqSgr8JVhMWtdjwkPSOkysKqiV8FnggIh8/Rg8Yqblaxa8oTi+3iw1fWBL\n",
+       "w4owDG4E8sHI5fEw+azIpfA48OciVCl1dsfb64VxHsyljTqff8YlISLlsPQB+PgAOIxzuV3gxc2w\n",
+       "5zjQaew/DjwrIi+jK2+CM9dTI2A5JmL+B3juM+BsbmB05WKSo83EdvvAf4JA+WsEomE4VkzRGhPV\n",
+       "oWqcIQCFORLE5Buj2GMiVngQ3knCG3M4XVu0pGM2tozWjwAQCYEpo32U3guwp8GR0hmueV+GuRj5\n",
+       "YOSDUVULlRtzbvxOSK+BiQG4AyMYAciSCmfI2JzYsk4cQROismTFRNZagMls1wUahSE4eBTu8IE7\n",
+       "haXQRtZiQaEwnQTvbnDEwOvBaNI0g2HEVYReYokYj3mh+B6oXw1KiRTvB//rM9UW2mjO1AYlKyCb\n",
+       "hslD6LbyC3qWOAW7D8PaCrAX6ZsJ3VDSpatr5hRpvc/0Kei5TXeHn1kBzgI9Dgh2Xsr+jYvE5Vji\n",
+       "u4u1x/xZlEDUjPgKsYwOYHFMYrNB1lKGJWzWF5clwE5jn0kROZmhcW+SdYb1td8D/XUQ8YHJKyLP\n",
+       "5R1WLw0RLMBfAP8unxW5MEoxLcJP0PYLfzbf47m62FqgJft+IAJa97E4At0rMIKRGYygZE7tnVKZ\n",
+       "kyLyV6XwjTZIjePwnKJsi4CqJNRXTTzSB0HBbBas6RkNiiDKiTkSoCCehmNJ2D67nYXBoL7VLKqE\n",
+       "eBUoCxQMwYAV/C8b4wv4RI7sh+XroN8EJMC8F6om4Zm5ru26dJpGdDuNvhnR/3yRD0Y+GC4v54q6\n",
+       "iiFmBp9258MC2MB8eIKJzXZKsiksVhOoONOmKJlgAwQ6dYHGpFJq3CzykxD8mSLckGDClMAZS+M+\n",
+       "rdvfqAxMOzBKVgFECjZB/X1QbIMwIsUHwP86VP8OrKmEwiJIlsPUBjhyi4j8KZAB3xe0FXBzENJm\n",
+       "OLkOTr4N/OZ6fYEfBKVUv03k59PwW2XgyEJ4HAZHdD+diy0zdcGpw2BdCS1+/VBnEZw+yBVmFkTE\n",
+       "BLY1UHIbmN0Q6YDp7cD0+CzxK3pnFUHMpv1YGxw0iAurWJDUEOHSEN0I0TaMYMRgHAZF/+RGKyG4\n",
+       "EUqzEHPqbsG2PxCRfwGmzpc1y/Mev402qlnQv/UFxP8E3hDhPy/05nmXh0nOdR8FfTl+vyxWn9tY\n",
+       "geRFJmsFWSwbjlK8TNFssuMKWzEPjBCsj3LMEiK0z0ncFCPhGSJbXoQ56MQUTJAxxwlIAg6drzRf\n",
+       "VxEWxOHdT0N7AApj0N0OR85A9D0PqWl49l0wdcGyIsiOA2Pwehx2z96mTaS9ET7XBFYzqF7AKfKb\n",
+       "mFLvXtLXdw3IByOXgIhYrLC2DDahc/qd/TpPftZNphdKA5itUP0nJrKtboK+OkIjEyTCZxhxlYDN\n",
+       "gSRjmP0erJMTRLJDWv8xCuCC2mVw2kS49zij6zy0BIrJWvo5dEuSqV4Yf3rmpitiXgZrHtFW6IVJ\n",
+       "7aS3dy1sb4OWCnA3Q0UWCiO6yiSzBXZ9GdI7oX0p3Nv7/idcNAHJW0XkoFJqQXmM5CIilVVwTwUk\n",
+       "s2CbBPMU7FKzmtjNhRYgy89hx1HoXANKwcRvIHtcKZUVkSIP3FqodR/hMdiRhSOXli3yfBRW3Aar\n",
+       "xsDth56VsGsZ9H1nDHa+BVs26BaYyS7wvYXvljCuERO+hhKcJhsQJmPL4EkIlZkyurca69aTxtgH\n",
+       "RXyH4c21ULQUWqIwZYfhELSfhFO3g2kDSIeIdw8EX1/glWnzguG2+ufAQ/msyKWhFMdFOAR8Hm2G\n",
+       "dpMQ74bTZliW02YiC3T4nEwU1Yp8M4apUvA5FF4/xMdFzC/N1SZDTz4rvhzC7rFSLV4apyFjThFY\n",
+       "5KSkc5S6YkVX0zRVZiuJzDjuVBSKLIRLI4yHUky/GYYn5xqliNi98JVaCh+0M+2fZIc3jJs0TTug\n",
+       "BZhoxFjCMrIqPxGRYsANTCulwnNs09sMX/gYTM40JV0Llufh4yIyqM7v2npNyQcjF0H0kf3cKli+\n",
+       "FMYtkD0Nm3aD91Vo3qA9iOM9ULIdy4oQ64agNVzOWKmHVCzGyco1DO88g3PVSaw2sAULSZhMhPoj\n",
+       "8MsIvGXsx1SnnT4HCyBdQb//KP62IHafi6QnSegZpbI5EW7F3bB+XAcioOvvN/ZDx71gN0Ol0RES\n",
+       "dMFHy6QWaMWT0DQrArcoaMrAqUaMSpWFhojYauAr94NqMJbHImB9CT4lIhOXIuI0sgZHyOnUaWy7\n",
+       "qBa+vkHX/U1GwXUEHj2mtbHnGCnNem8JtG+Gu3vfd2NcNgLZaghsDhB4aR+Eu2GLDRyTiC3Asg7I\n",
+       "jMShSmG2xbCkTWQsEFVZHGMezD4bmXXAy+/vafpp2JGExrUwlgXXINQNac3Ihqy2xm4eh0O3wp5y\n",
+       "EfmXhb7sNg/8R+Alpdg/3wO5wfg74C9FePxmCeKUUsMi7tfhubuhNQmWDJwo9nGk9HZotGDyTVC6\n",
+       "ZAJHpovqzgTL/bDjKyLm7yuVmb2s2wCNlSksQyEcdQ6Sdhu2RAYn4wQWxSjrgtCKJB97Ksux9SbG\n",
+       "l0aIFGeJZmD0ZUj/m/Nldr1w/3LY5KYk4KBoWqEGBwj5jhK1J2kbhK4VzNLTGN5J03NtD8AKbW3a\n",
+       "luC9CYsT0u2Q6IeVMD9Gd/lg5OLUN8GyO6B3Jqm3EfqTYHoLOvug1AwlERiZon4IHjjm5OBWL6aI\n",
+       "GXM2RY39NGOti4gEi6B8FNPkBNntQfj7WbNXs1mrLNMAzTDZTGgHhHgGaqfPWUqwlEHFwNmPmQBn\n",
+       "RJsEemZpKKIm8MRgsqGAfcvNdETD1Awp2od0hUkGyCzkFH9TMxQ2QN/MAy5IrYDwEGzgCpZaCmHT\n",
+       "eihYZQRiRZAoh/A03CYiu5RSUxd4ezP47LpPTi51k+BeopT/eeAtEXkbsELhVqjZCIF4kmRfGMrN\n",
+       "ZGxp4mkHI6l6olYPmWoFjzlFnHZIOaHcBX0RUsd1F+AHjO9gx3JoRncwnnbq47i5D8ZaYLKWebqo\n",
+       "LERE2AB8GX2xzXN5vAj8F+Cj3ETLW0qFXxORTuhZCmarnWnPLbBkKYwepHB1MZUjRUg2wnDjGZZ1\n",
+       "w6ZpGL+PWXoSwAMlChiP4hwYwuG0EvFkMWXjFI6DfxKKLeCOpmneZyMQqSFQW4rFFMPcFCD9hyLy\n",
+       "uNJd0t9DRBxNsH4xDA+jKgEEoRpnsJ/ONVFGaiAwICK7uAzNnwUcjjlkBk5IWuEC5nHXlnwwchEc\n",
+       "UFsPmdmri3UQKILUoFJ/AyAibdBg/GAGq9NMFNnJmtKEiqtQahNsH4boarK7u6Bopxa6vjfzVUql\n",
+       "ykW6eqCqWVv/AjAFzgktWp04ewTJAd3UyJ2AkAN8US2cTozDQI2uwa8yMiCTThhIQ9LdSmJxKSOV\n",
+       "DkonpzhS0cWZhhB37IVTYtifL1Scc50lHohZofhKNuyB9vpZttAWULWgTkAVs54DEBFfGTzcDGuS\n",
+       "dKwKMV3vZ/1BaDKOXdAJ6feOo6HAT4iYh2DMDu39Wd6dzlBi81AcynCgoRUVchGLuGDSB9Md8P/4\n",
+       "YO9y6B6BpkOQ7KMvBf1ebYEfLwFfDCa84MxJH1cqoIR8MAKACHa0ide/UYqxi70+z9koRVaE/wT8\n",
+       "qQjP3yzZEQCjBL8PoEbkDxpgKgIFunDGlBW0M+cZxr2wagBsdXNUKE7BiMCtg3CoLcXa6RSLe2DS\n",
+       "C70T0B2GAq+PJ++z4S+vIFxdgXvYjHvKgUx7dIb3iyLybWPJWDB87+1gLoPxEfxZRaEZRKWYWFRG\n",
+       "pKSQmKok3jcIv31Cl1+/cimfOQZnzoBlOWerElvtLgAAIABJREFUZs6AZxI6zve+a00+GLkISYhG\n",
+       "5lA6RcGR5KyW4yGYNguHa6sZ9S7BLlZIjJC2NJJJD0FNFMIlEFoN0ydgs4i8lis4HIeX34HfjUJV\n",
+       "FQSmwXUQCowGaLNqxEe3wyt/CTVuKErCcRME/TD1U4i8CW98A+qtesYei4B1uJix5k/Bs30EpieR\n",
+       "1kq82Qwji47yZEIx9V11doOphcbYAMg6zj4YA+ANwIHzvelSSEMoDCW+WZVKUa1mO0d7ISLWKvja\n",
+       "neBshqOHiJeHibtP8s6tw7jeAG8MDvtg9Nfn7i17Co5Pgs8LLa8F2PfJFMnaRUzbnCRjDhJTJ7Co\n",
+       "CYruseGydBPfbCKg7iB+tBB8LzAxNc3LhdBWDyEbnCoDVx8055QVTwln/zY/7HwbOMEC6EJ7A/M0\n",
+       "Wm/zADpTctORhlAEvOUQUyRNWg4oxkXBlobxAkhPz2GV0A993VC5CGp3wZ7VYK3Wq/qTu2H05Tps\n",
+       "f7mYurifjGMpDr8i7h0hVFxP/OdVMFUL9SNQKyIxqPgslFeBRXo5s+44U/trCR4YoH9tCofXQajM\n",
+       "gko0Eju8BE4sB1MM7jQ0fxfVzwF9XXDodVizBKYskDkNJcegN6vPk3khH4xchCx0noJkC7jKDS+L\n",
+       "CFiPgd3PWWvPwzDQ42Xqs424ziTILkqScJlJmatQEz1Q44BXCiCeBcxaoW0mp/5bOw7Kd6ZgUwE0\n",
+       "JqFzAt6dMdA6m+I2aByFiiSY3DpFn1CQmILMOzCUgfj9UAZkonDGtIbwMQdkW8l0+JkcmGaq1IFy\n",
+       "jsDpMcPAa6GilBr0iRzeDqtWwKgDUl1QfgCiUe0W+YEZh51H4csVEJrpQ3FGWzUHYM5uri2LoXiJ\n",
+       "MaNaTGzXaYY31mKvmuCltSmkG4ZfYI4TWymVEJHvw+v3g3dFmuzLQUZqCkg2OqDvDGbGaKvxUJwu\n",
+       "wByNIalhAo1v05HdSuKoF2qn6f8rGFwE5iYI3wf3ndYXyyxwogJ6xoDeK/lObhZE+DqwBdh0M83o\n",
+       "rzdGduTP0dqRl+fRIv6aMQa7jsCy+yHgIdwfxF+XxhEdxpKACj+8Ww0T57i4KqWUiPwEdtwD3vVg\n",
+       "PgF+P0y9Bhwqgc9tJLk7yVDzJBSaMKetpJSPRMgKSYACHfm4ofrzcI8FmvsBYpywbuetO7cy9EYr\n",
+       "gdc7CHxkAqYbYFu7biuOFbKNQAfUA5ci5lci8tQObeu9UXQhwI4EHPgg5nFXi3wwchGUUmEReeLX\n",
+       "8Gg9+KwgfZAZhF8ppQZyXqdE5Gd2LA+ZKbRlMY8miRRFSdj9ei0ulEJMHVhXJ0gmwnBq9oE3SoKz\n",
+       "ftg2PYcKOud1bmhZB3cc1KLJpBUsaai3wdgWGNihVOhFEdkJfWXoGX+JBT43s40iCBahgj3gY45l\n",
+       "iIXINDy1E/pOwa0C7jDs9+va/OCVbDcLxzvg1WndYEpFwXQG/MPwxFyuhVYoKslZc/VCeA2xN04R\n",
+       "q+vC75+C786o2I1aficQmNmWUsoP/FxEfoXOvvjC8H8ugv59FD/ooiaYIVI/SdqTwDbqoio8xHBj\n",
+       "gJHOrA6WokplDgOHRcwnIPYwlBRDwgST3UbV1ULW/1wXRPgt4E+BrUrlM0VXgV+guxx/Fb3sdVOh\n",
+       "lOp0i7wQhPtqSE1NMuAbpMA3TfUReKYYxn8NyYPneW8MeE5EXgLMuXrAWpHKGhgrItY3DUSgtRJT\n",
+       "OA3EwBWHwKAORpyw2AvNOZPPJV0BAvZXmfIWE08loWc1HFw3S7eSBtQs99XZiEiRMVa/cX1YUI02\n",
+       "5y0YEZHHgN8B7MD/r5RasD9upVS3iPx1v448zehWzueYTCmlQtUir1cx7SmCuBuiJ6DyFGwFS52V\n",
+       "ylAck6mbmH2c9ISIuGa2YxZpq4FP+MATB0pFOibh13OVZgEu8ChdBQNgN5TYRQmwlfN+B8YAhkGp\n",
+       "iAROa8W0Y0ZFnQY5Ad5x+NXV/cauDcbNfCdne3Bcje0q4DUR2dupNSIJtAnQnDf0FEyO6t/Be5h0\n",
+       "EJNJwH4jgHUUw4PNsLYAxA8xu8gLCaXeW1LKCUaHvSJ7XoPb41i9WTJFMeyeCNasUFQwSqjNhX3y\n",
+       "CNQF4c2z3V8zJ0Tkr2GgBEipnC7EH1ZEEOD/Bv4IuE+pcwSHeT4AhkX8HwHPiPCkUlzRJGAhElbq\n",
+       "LRE5eBqqIZOE0CScdAD+OZxRz2Gu1hox6BuD5TEotuIp7tJthh0JUvY46bUHSDmG4TkQExTPkb2r\n",
+       "HYhQNBlWw98XkTY/fDXXVsIP9k4diMxp/CgilRXwSLuuDqRSZHAUfnkprUKuJ/OZGfmxUuqHhqnM\n",
+       "bhZ4pG3cOE5f7HWj8MoB+MrtELFApgYmD2Czj1HebcXjj+GdirG8C4a9sO1WdKO6+qXw2N0wXgr9\n",
+       "GZAj0PYOFIrI/5pDJe2HyQxELbq/ygwjbohNYKT+Zo0/bBP52a/h84vBYgfVDXJG97c5p3b+w0hu\n",
+       "8HYRurtg6CDULIchE6geKDkE6YixdOeDhzfBsnUwYAHlB/s2+JxZJJpR6pzvOwjP7oEeK/HbBIs9\n",
+       "SU2HGbxOsGQR+wTx+ig86Z+jT4gRnOSFmYAIi9EakTJgs1J5Ee/VRCn2iPAsutz3a/M9nmuBkWnN\n",
+       "DbSuyJl0CnbuhQ3VuNZ7qZs0kw2cYKJxCnsoTkE2zNBhSL0FapE2N1w9awsjHgjNSAJOnYDtUbht\n",
+       "EWSTIKch06+NH8/J/omIux5+5y7ILDKWlXvA9wb8tiGYPW8G/nozn117Z26idi7aV+TGIaPUCavI\n",
+       "E+PwgBvqwuAcpfZkhi/ueD+TAToBcmA98Eop3LoGoqUQBRBQK2C4D+rHoY6cclaY0R24X4c3Pwob\n",
+       "R6AkBkOFsLMURs/rL5FU6riI/G03NAtYFfQvtOj4RkAplRaRH74J9x+ClSYwhbRQ4wWllF9ESpbC\n",
+       "8g3QNyO2LYLEBpgchrtFpHO2CM5Q0R9LYjsI6UbwhNJ4hkKMF0NvEdgGY3o2E73+n3jhI4IL+BPg\n",
+       "68D/C/ydUucG5XmuCn8MHBDh00rx1HwPZqGjlBoRkZdG8awqJOZJY8oEWLIrwS3HIW6FJ0uVGlQi\n",
+       "cga6e2FXA6wc1r1nOsvhUAIi+4xtKeB5Edl/DGqVXrbtOt9StR2WLQXHopzKukUwNQp1w7CUOdxZ\n",
+       "54t51YwY9uS/B/yH+RzH1Sal1DER6UBrBSqg7GtnByIAWcHQHdihshRCMbAcgsUBaAIsCZ3hWMSs\n",
+       "YEQTeRsORGHgLrCWQWIERn8wl0NgLkb0POe6Z55Lx/genxKRX6PXiHMrcTy+WT1pEmDug2oLrKqH\n",
+       "6nKRU+PwqlIqtwGZAsc4FI3C3iWQLQKJQMU+8KKTZnlyMZZkHgb+B7ADWKXUwjTuu1lQirAIXwSe\n",
+       "F+GMUixo8fsCYTBE8/4Qm0d1w067cS5HbZBxloh8pg5WKiZNk7yajHGoWt8zIsdh8pXZfWOMSeRF\n",
+       "J5IuKPfNURFYDIlCqLhKn+2qcM2DERGp4NySuhGl1KNKqb8Qkf+KXq9/anbKSES+lfPnNqXUtms7\n",
+       "2quHEcFGRWQABmPa96M8JwN0vAL8bwDEYGAUlgxDUzlUrIGAFTJ7tZnEQyJyZLbxlrH9fcA+ETHn\n",
+       "BYvzw3nU5/7xWR0034U1LmjeDCPLoL8L6nfC74nId2bK8bRlve8gqJVw7+va4t+i4EwRTMQ5x2vm\n",
+       "w40IjcDfo4P3ryrFG/M7og8PxnLN7wK/FuERpZi3niY3CP3Ql4S1ZnDnXDOOVJUy5roTrEt0CaQc\n",
+       "I1W1i6HhMfinS9GpXIgwDI3DxtZZj0+APQxDV7Ltq801D0aMvit3zX5cRGzGhTyFzhDIHO/91rUe\n",
+       "37VGKZUSkZ/Bi49BezG40zBgh1N9ENkBMAXv7IAtTdCwTP8gTWNQ7IXu9RANaIfRly6wj3wgsoBQ\n",
+       "Sk0Xiex7B9avh8EpcGVgUSEkiuGYCVgM4wndUXMzkONHMv0q7KiDqXqoTMC0DY7HYeSJvL27RgQr\n",
+       "8A20SPVvgU/ll2SuP0rxjAhpdEDyN8B/zx+HudFL6+afwW++BO0CrjT024VDiTWozAoYBl15sA4G\n",
+       "pqFhTAfZx69kv0no6IC7fFDeapT9noKyDggk59HgbC7mc5nm34vIVrRm5KdziW9uFoxqnP8Og0vB\n",
+       "7oFgH9CZU+o5LCIv1UPrCSjOgrJDzyI47gd7oV6qOS8iUlIAyx3g8UNXVm/7Yl1s81xDAvDcbgid\n",
+       "hlvTUN0AWS/sLM/JblSD360vOO+hlAqKyHdhrBU8NRCZhNTx8zS8qnFDuwVsfi1C7pnDkOmmQoT1\n",
+       "wPfQF++NSs1dQZDn+qAUvxFhE1rQ+kcifA9tkHYw7+tyNkplTonI/4DBdrAVQrC3BFZXz3F9r4Jk\n",
+       "oa7su6JgRCkVE5HvvwYP7oElAEE4MQEvzlpafg8RsQAtxdCSgHAUOmZb1V8L5lPA+udoR78PBcaa\n",
+       "34VKUjsV7GuEEQtkLIb51jT44hew9TaLLFkMX1wKqgBSA7D5BPQZjdLynVvnCSMYfFVEtgGtNfDF\n",
+       "ShjNfc00FCTmMFUzUrPnNPTLxSVy5wq4vxWSFl0ZtaUT9orIL2/GgESEAuAv0P1lvgn8KH+zWxgY\n",
+       "AeHHRViOrrD5BWAT4SngJ8Ce/LHSGKX3O2b+dovU+LWQ9Cz8YImB/yrtcxL4kYg4jL/Pe18QEVsJ\n",
+       "fHEJLK6HWBwsHXCvXeQXCaWuqSfJOTbneeaNnm6Y7ANPTiDiOAzOKdg11xtExF4Hn/0oTKyBwTYY\n",
+       "uwfOrIP6Ath0fYefZy6M7NeJPhg9rttcABAA+yEonPgAnikiUt4I9z0EAytguB1GPgI9S2A9MHt5\n",
+       "+IZGBLMIX0G72VYBy5XiifzNbeGhFEeV4pvAYuBBdHnsE8AREb4qgm1eB7gAicChY6BGwT3zWB8U\n",
+       "nYR4+ipbLiil4heboFph9WpovQ/OtMHYKhj6GIzUwCMi4rqa45lN3oF1gTBTLvoqfOYw1NlBjUHC\n",
+       "6EtzvsxIXS3YimeppVth7AisA9689iPPczGMst0nXodPH4UGpz62yRH4uVLqsrsNW6GpBZQjp5WA\n",
+       "oUMJnoYVzGN/iauFCA7gi2htSBD4glLvzyjzLFyMQLED+DMRvgXci9b3/CcR1iiVF2LPoJSaFJEf\n",
+       "xuAz5VCbBcZgynB/vu6WF2WwbvEsoXwhJBeBpQcauIY6k3wwsoAw0mn/KCJlaC3TeF77cXNgpGf/\n",
+       "SURK0Tqp8avdB0LNIQK/kRDBAtwJfBb4FLAXvSTzcj4TcmNiHLdXgFdEWJIPRM7F0BT+7aDOnGaB\n",
+       "sQUqVr+mY8oHIwsQdWmdFwH6+yE5Bc7cjrMnoWwKXr5Gw8tzBSilrvhinIKuTpClYJ7JjmSBTr3s\n",
+       "c/iKBzl//Ft0EPILtHtqXpx6E6HUjZ+xu1YYFZHDF33hNWYc9p2ChyvhPcF8CGw92m5+Dr+rq4cs\n",
+       "zAAMREQppW7omd71wCzS1qwFR+KGZD84T0LvODx+owlY88f80nGLbFkED7ZBygzZbnB2wp6AbuB4\n",
+       "QwlYZ467CKabsRtsnrnJn+8LD0PA+mgrtDVALAaWE2DqgSevhoD1Qsd8QQcj8z2GPHny5MmTJ8/V\n",
+       "43zByIJepslHzZeHiIgN1pTBvU4ojEN4HF5LwD6llHKK3Ho7PHR7TjlpCGxPQ2kP/PV8CKZmjX/B\n",
+       "zpREpHIZ/F+f0I517y2NvAyL3oV/UUqdyHmt2GC1cRw8s4/DvH2IBcpCPu4Xwyay8Q54eAv0zjwW\n",
+       "AevPYHkC+gqhIA7RCdgWh103WtbqWnIjH/c8H4wLJRkWdDCS5/Kww/oV8OlbYLgE/FPgfBc+fUS3\n",
+       "u9/lg7Wts5TSSTB7dYfTDSLyZv5mOTc2aGqGjH1WBUuLNjY7q4LFDuuM4zBSAv2zj8OVjkVEzECt\n",
+       "HhZD8x1EfpgphTUtMJn7WABK62GFG4KbocsP9t3wicPggCu3rBeRYvQ5G0Ef//w5m+eGJx+M3CSI\n",
+       "iLkW7r0dBosgAeCD2BYYHIZ7RWRfFaiZiosMyC5YGocWL3iWgWkclonIj5RSV8Vs52ZCna9lAYjK\n",
+       "UZkbx+G+22HofMchp2P1ZSMi1dXwpTrw2EENAE6R52NK5XuDzA9nRQIKCMBSn+6AFgXdsXkL9A/B\n",
+       "nSLy7vmcLy+GiJi98FAbbKyCrB9Mg9AvIj85X9fWPHluFObN9ExElonIOyLyloh8Z77GcRPhKgTn\n",
+       "zA1whkJIevQM2j0Be05CKcBhqLNB62YIVcD0J+DoXVBcDp8XkXzqdBYzFSyxnAA+A9IJ7smzK1gK\n",
+       "3Oc/Dnb9vx8MEbHVwmMfAT4C/XfDwGdgZAl8UkSaLrqBPFedSdg3c04BpMCSBM8UxBtyMiZOSPv0\n",
+       "9dbzQfflgI2rYPPnoH8rDDwMfXdCRSl8+so+RZ488898ZkZOKqVuAxCRfxaRNUqpA/M4nhudWBRS\n",
+       "EbA69X9tDkinwRTRs/poCg4cgvYotMVhzUow9YG7GPbYIdUOYyegfgwqWQBlZgsJpdS4S+SF5+Aj\n",
+       "rZAxQ7YL7N3wLtCZ89JkCCQINg/vNw2LgSWil3iuZEmluRncdTkldgWQXg6hAdgI+VLY600SDh+B\n",
+       "9ji0N0AiBpZjYGuHg4U5xz8FpmkdjHzgppYlcMdaGDbnZOKWw8hxaBKR0qtRNp4nz3wxn71pclPV\n",
+       "Tq6SD/+HFaVUqlBk+wvwpULwWcGehGwQghPwgxmDLRF5YhKaqqBWwWgNDBYa6WQAt77QOebtgyxg\n",
+       "Ikq9IyJdPbDEBJawDkL6jLpUswtuq4MtZqh/Hm5tg51roC8B5nehdgJeu0KjM4d7jgfdELeC9wq2\n",
+       "m+cDYnTl/vEuaDoBTQmIZGF7OdwTBUsBpP1gfwW2JCHdAn9UJTI0As8rpc7pS3Q+RETqwO2B6dnP\n",
+       "Gees86p+sDx5rjPzqhkRkU8Afwns/SC22HnOJgJjaXCVgcursyPmNJgVVNpFblOQQnf07SwVeT4N\n",
+       "LbmBSBzMw/rCds07NN6oKKVGgJHZjxfC3avh7k0w6IQdx2HlcbjzCBxRMDYB28Kw7Qp3P9IPso6z\n",
+       "11cHoCgA+3NfKCIuE7TawBWHIaA3X8lxbTC+19PGP0REjkB8BO72ai3W6maIPgTvFECqD4q2w2+L\n",
+       "yD8opUbPt10RKbFCi2jhc08CTvdCdVPO8k8ULKM623KpRol58ixI5jUYUUo9CzwrIt8WkfuUUq/k\n",
+       "Pi8i38r5c5tSatv1HN+NRiXcfQfsK9VmNc5aiNuhPQhfb4K3zZDuBOwiTydh2y5oU1BRB1MBcByE\n",
+       "0hHdWjpfnXEZiEhBE9x+O/TNVNssh0M+6P0V2Mfgbz+oaDEXpdRwsciBbbpaZ9QB6W4oPQChKOzL\n",
+       "GU9jI3y5FewuyA6AuQtOiMjPrrYFfZ5zMapbtovIbqB9NTjuga6Z5+vBvwbsE7AZeGaubThE1rfC\n",
+       "J1sBC6guMJ+GjnfAkYLyOpiaAtcB8I3Br280g8M8ZyOCFfhddGXeL5S68qqrG415C0ZExJZzYQzC\n",
+       "uR0dlVLfuq6DuoEREVMDVFYbegIbhEag3ArNLeCvgOkG8LeD7Rn4VCf8t274rh+2FMDiDPhH4UdZ\n",
+       "ODbfn+UGxOsDsc/SA1RDwAX1aCvlq4IffrkT+k7DZgFXCHYH4R2lVAhARKy18OhHIFIBowCrgO2w\n",
+       "5B3d1TffbO46oZRKiEi2Kkc7MkMFBAv0b+McRKSkDR7+BAy7dDaT5SAvwPJ98IxfN1tsSsPkGDyX\n",
+       "Uer4tf4sea4dIpjRLRA8wHPA4yL8R6X4/vyO7Poyn5mRB0XkG+hyyR7ghXkcyw2PUipbIzI5Bq5y\n",
+       "iGTANAorCsAzCqrSEL25IbkYOA1NWaX2AU/O78hvCkLTQBrEkiMunARnUmuh0qBvMg5YYgFHWP/m\n",
+       "L3vpxOhhsdv4Nxe1dVBQMcv7YimMHYdN5IOR601wYlZJ+BR4T0NrWJfl1iilBnOft0LLYlAzgQjo\n",
+       "7EgbhHuhekKpn12nsee5PvwxUATcrxRJEX4DvC3Czg9TP5/5FLA+Czw7X/u/GRmD13bDo5thfBzW\n",
+       "WqA1AF4HTJ6C2zOwcxFMWQCL7gqc5yqglAoXiezZCbdshH47ZMJg2wWV4/ALpZSyi6xqhc+0gbJD\n",
+       "5gzcfRqOiMiTV+I7Mgdm8xwPWiBryguT54O+MzB4GKqXwXAftAZh5QTY1+lA5V8VirwZhldmzMsE\n",
+       "LLlB7QwWyEr+vL2pEKEa3SByrVI6g6YUJ0X4z8BfAZ+Yz/FdT/KmZzcBIlIANAHmw7BtEL5Qr1PA\n",
+       "QTekt8KpMFj3wvpKeK0bTCk9M89zlQjAi3sh0wObXGAKQGIMnknCARFxN8EjH4fRmXLPpcAbsGqn\n",
+       "dm49eKX7F5EqoApQfaBCYMstLT0NZX5460r3k+fCiIgLfS5agH6l1ISIPPEmfPwAbHTBukIYb4E3\n",
+       "22AsBabnYOshOInRpiEJPV1gXj4r09al/UuOzM8ny3ON+HfAD5RidmXVd4BvirBWqbPF6Tcr+WDk\n",
+       "Bsci0twIv9UMNjuoM8AIuGrhnSKITsKaM1BbCtMW8D4Ly/rh2Qup+PNcGiJiARrRZZWjfqWeF5E3\n",
+       "gAK0Dmrm+UVNYMkNDkxAG0x1wjquIBgREXMRfHIlrKsDFQFOgvdpKFoPEy5IDIL7CEyEtSdKnmuE\n",
+       "WaRtETzaDFYrqF7ArX8Pr08o9eMCkeAdkFgF/REonIKiYvC3QqIH2jGCEaXUkFfknRdhSyuELdrT\n",
+       "xnMcjmXh1PX4LEbLgUb0b3ncqCLLcxURoRT4MrBk9nNKERfh/wP+APi96z22+SAfjNzAiIizAb70\n",
+       "MQiUQjSuZ8PrTLApCK1pcNnAn4boaagZ17qBnwLvzPfYb3REpLwavtwIxYXall2KRPYAzymlYk6R\n",
+       "zVXwYDWYRqA8DI0BmPJCeGYbJm1Gd0UuyFZYswI23AU9MxtqhcJnwfcyHHDom15nCo5ejYqePHMj\n",
+       "IoVN8OjHYaoY4gCrwfwS3LtfN9HrMkNcge8MLHWBpEBGIJSEXpMu332PILywF072wCoT2CbhSFYb\n",
+       "RV7NJb3zfZaSSv3bLvWCGgSTT+TgNPzyeuz/Q8RXgWeV4nwTwx8AHSJ8QylC121U80Q+GLlBMHwj\n",
+       "HiiCdQKRALwGxFvAVmp4hfTBSh+UtsD4KNRuhjNjUCAwVQKTh7QV+cF8Y60rQ0RMlfDF+8HaaFQv\n",
+       "ZUC2wS3vwpCI+NfAJ++D/jhYj0DxMNTvgE+3w7ONhnHVaSiZPM/SiYg4LdDmgJIojGS1P8w5VRml\n",
+       "sGk5jOdGNNUQWgTFO+GYX6m8K+t1wATNi8EyE4gA2CHTDuE+WAt0hSE5BkvXwZmZyqtpcL0K6/3w\n",
+       "vdztGedoFzklwXMhIhUOWGwCcxS6lFIDV/I5RETK4Qv36PYSU93QWALeLDSHdSCdLzS4CohgAr4O\n",
+       "PHa+1yjFsAhvAY8AP7xeY5sv8sHIDYCIVJfDf1kMLQ0QTWob9zs64IDFaNQ1CuVx/fxoSDfRyh4B\n",
+       "bxkkzkB9DLrKoLMfFpFfd74gRm+eGvTyy5hSKjDrJTW1UNqYY8tuBrUCRrvg1hRMrYJAGOyHYEsj\n",
+       "WGtgeBSaj8KnemFXGsLH4VQKDs2x//Ja+NoSKCyG1BhYT8K4iPwgdywiYq6EqhikkhC35ZQQ27XW\n",
+       "4IYRO4qIFag2/hxSSqUu9PqFhoDNAuYxKBPIFoPfors8p8yGO2oZNDjg1H6oqIWU6IyaLQlTzCFY\n",
+       "vRgukduWwkcXQ9oEdMMDXpG3g/DCFUw4KqqgygyRDrirGbIeSEyAOwrfEJGjSqn+D7jtPO9zJxDj\n",
+       "4kunT5EPRm5+RKQGfBvBXq6r5kJ7lFJT8z2uXERESuB3WqH2LuiZ6UtRDkVxWHoKAoXQaIVyO9T2\n",
+       "QNUZKPNAKKCDFo8fAvfAtmEo2m/4uYiIDV1dEc47c76PiBSXwxdroaoQskMgHpHtoZxqB8DunOPm\n",
+       "4YKkCbxWUIUQOwZL28Bcr8t7/WZdSTNxAEoH4Z+AjrnS3hXw6bvB1GQEO8uAEqjaBvej/QgQkbpq\n",
+       "+IITGgegaQpSaWzTdkx+F/HhPj3zHpy97YVKHfxxNTgFZBDiZpGfZZQ6Pd/julQUFHTBraUQAkwD\n",
+       "4Ehjnx4iaxsn9biImGugaDkcnYS+PigRoAyGs+DsvEw7dxGpWAof+bh2/E0DLNNeJFv2aVH0B82I\n",
+       "2R2Q7YZVayHuMzI9XkhOQEUcPoYWV+a5Mh4FHlfqokHoc8Dfi+BS6or6Wi14PrTBiIh5CbR/GVbF\n",
+       "oTgCI7fAwY0i8r8WmFir3A0NDRCbCUSMKW/aB3VnoGoYUsthPALpAJTEgTZILdEujfa3wJIFOaP9\n",
+       "DsYKRR6qhk+asVVFIWIR+WkGnvmwL9+IiFTAo3dB8RIjEEiB6XW4a4+2255p5DjcD6YDUJeEAieE\n",
+       "W2C0G0rDcDALqQHYGIXqGgiAPmYpSLbAnlHwDsL0XIGIiPiaoaUHh/UQ9qUeEv6lxHuXwsh+WCEi\n",
+       "vwKsDfCVFeAcB8cA1JdhLjBhivnxnRoi0TpA9BVtxHtj8AmIlRneKBNQ8AJ8WUT+7npNDnRwbl0J\n",
+       "JatApWF8L2SPK6WyRqasvgAWpSGZ1NqNyZz3Vi6Beyrg+Bg0FCOlZgo8fUhDJ5VHs5Suhw5riGDJ\n",
+       "XthaBOE0ROvgcDOMH4IGLrMFg0N7kWScOdkwC6hWiHRrF88PGoyMDoK1SRv5vdd4LwCFpdDjgWoR\n",
+       "cSulwhfaSJ7zY7itPoI2IbwgSjElwi7gAeDpaz22+eRDGYxopXjtI3DvGJQYF+yKMBSUQfg+4PF5\n",
+       "HeDZiAmyacM4KQsMQH0CKuzgqYVQBEZ3QrEZUmWQXg9j3TrFXxSCTANMvg3rR+DHHtjQhPV3Syk3\n",
+       "OfCEYiRdXUz9215ilcB35/ejzjvVlVBbBLEhqCqCqQJIrIbxXtjC+8GINQLuCVhfB9EoqBdBxuFd\n",
+       "P2wH2AdrisEZh4gZGAOvQG8RhFLa4OgsCwtpAAAgAElEQVScZQhjqWLNGBW3CC3jFuypXqYqexhY\n",
+       "disT20T/BkwmWFwFJSFYvBpMEQqCk1gDI8R93URLA3z0p9Drgh2L4cYwTSrL6ZFUCtGl4BvSSaHt\n",
+       "13rf+nsvfQxWLoImv171OPlbcGyXiDzrhYebYH2TXg6TU/BRu8hTCaPLeCGsXAaZZbB/D6Q7cFeZ\n",
+       "KZiKIukwLcdhY7eT3s+tIjhaDv4aEAWmg3DHCegcgNdzq9tEpAzwoSuyRt7zH9HZzEbAagfPXMpn\n",
+       "E6jZYtjLQSkVt4m86IYtAYjZIRmGgjFtnHgqrRsyziliNX6/ZUAy30H4gtwNdM1Rzns+XgDuIx+M\n",
+       "3JSUQZlT6wdzaR4HV6uIWBaQanwsCmd6dXbEEoSiIDQXgCUEJoFSE1QngEJIecEch3hA31B7TXom\n",
+       "lx6FvQF4swH+powKCigdB3Bjj7dgjU8x9rCIPKOUGjZmgtysmRIRMc21NCXQatGaj7AJXWfpgiNl\n",
+       "0G/RNwcASuCjW2GyBF4IQpMTCqohNqj1JVPGPv7RD5btcHc9jLrgYBP09oBvWM+Cz1LQW0WW18In\n",
+       "43huS1LvSxL3KAayVWRRWC27sX5pktQ/A1hgXQQ2LwIxIyk7jmQ91ngF1kQIJQGyIlBkx/55s8gv\n",
+       "jSqMxLX8Tq82hZBw6qDtOmBqh+VNcEfv+4/V+SG6EQ7522HD/TnVSm1g+xU8IiLdSqmAA4rcEDcB\n",
+       "bmzmaqp6rBSEJwh7u0lZU4wUlmEqbsB8upzMiVFYnITqAkgdhXE//Ap0sFEMjyyGDTbwRcA+CYdF\n",
+       "5B8AXy18qRGcdqATCjugZBkMzXiRZIHjUBOH6QqRRyd0KfDxy9XfJJV6s0jk6UNwazlghd467X1S\n",
+       "5IfDc/XBsYusrIOPl4M9DlIh0jcGTyqlzukynIfPAZfjovsqusT3pmY+e9NsAv4b+hzao5T6xnXc\n",
+       "fQoScu7DCQtk0saYFgpqFH5shupXYJkdFleAcwjiSYjWQI0bJAKEQcIgWaiwwLtr4JcAb0N9WFff\n",
+       "eBxYSmy4o7k7cOGIW7FXgCwTKb0d6leCSot4d0HwzZuhJFRnwxwboeQOqHeLVJ6B0ZeVUn3G8+46\n",
+       "uMcGiSotPlQVYOqB1cfBGoVdxuuczdC+BAbMerY4BvoH0wP1IlKolAoZZlf/tRN6U9Bao5ud1Z/W\n",
+       "PYB+lhvoiUjdMnh0E/i34ckK3pMpOm5ZhCXtwTGSBuyETSZSjRb4w0XQmAJfKSSTKFeaZMaCNR4n\n",
+       "Y7Fhifl4Z2sdGWspqQIFnzsB0yLyzzfSjWEQCvy6JPY6ULoMGoNnP2ZWsCjt5fDWNlQgNwthtFQw\n",
+       "n9bmZgem4fQArNLuxtmkImsGmCRrTlE+DcECD1nMqJQHwh6dYTsQAesxcE9pi38KYWsj3OWG2np0\n",
+       "NU4nfOwoNLpgeCukenD4eimst6BkimnP02SSK/WkI3sMlgbA+TEYdoKnB1Ycg9Mi8sRMJZaIyKzf\n",
+       "nsw16QjA945BKg4VpUAnlHbD0BS8OPu1IlK/DD5/L4wWG92DT0DFNnhMRP7eaGGQBxDBBjwMfOsy\n",
+       "3nYU8IjQqNT1OieuP/OZGekF7lJKJUXkCRFZrpQ6ej12rJSaFKnogxMVsCRnrfZQNQTevh6CTiMV\n",
+       "WwpE0E6NatbztWVwbx00ZyA4Dr8ch7fK4U8rYLAI/HZY2wRZpduIq0WQOgT2EcAHVQNQNgr2IzCW\n",
+       "hiCUfqwf1RYnli0jMeLBYc5icidJqSSSguKPwT2T0DqgiwAO3w5764wb2YK+oGi5BzXo5Y8upVT0\n",
+       "7Fd4HoBVd8DaIfBOQ18J7Px9EflHpVS/CVraATMc3Q9LF0OoAFJpsO6D2gjsqBX5k2pwpWFJGKZn\n",
+       "PEOSYBnEVB/Fugo8vy/ieBsS+41GaT+chKZ9WO4C7zKtGU7cJyKvzWiTSmDTKoiWQFTIoAjYGjGP\n",
+       "CVZnAElbMI+6MNEITjesegi2PwlLT0N9BcQTRMqSmJN+rLEQKddSnH4X8WQb2WM+7UFT8Rp8BPjx\n",
+       "dTwkl8VuqGuHUQGOQ8VxGMpqV9Krjnar9d0KznpIjELSrSciMwx4YcwLQ2WCGjPNMTkxgRLDIyYN\n",
+       "HUfhNifUlZMeH2GyfZS0tR/fqM629vqChKSU7FnZsDEoTBgiYxGxlMP9KViyGAJ1EBSgFoJx2BSA\n",
+       "jgMUWeO0F7goCQAkGHWc4rh3kPBRgcJCqHkU3ikwllEaYcoELdthuVUkVQZ31UF5lcjgKBxVlLdA\n",
+       "XZNITQim3oL4npnzXCkVEJF/GIUmAa/S5ei9c10HSmHzaojmljUvgbFeqB/Ry0oXLE/+kHEvcFwp\n",
+       "LrkiSSmyIrwG3MOsEvCbifnsTZN7Yqa4ip1NL42xp2DbY9BXrzPwwwK9nRDcdi33ql07iz6h7Qaq\n",
+       "MuA3Qf+wMXsJiIhN4PZGXUETqoFeB6SPwD37oKMI9ruhbRxaK8FtBREgDKYMmH16uSA5DKajUBOB\n",
+       "p8PQC4sfg1tiCfz7o0zfPkhsZZhIuAT39AD+whCmrJZLLO3QI7WkYVMfTDXAeCML9IIiIuKBh5bD\n",
+       "LQ2gEiCnIWkReSKtVJfxGi8svgXu7AGLEfQ1TkPGBFNbgcfN4CwAtRJOHoPwflicBmcM/jd7bx4j\n",
+       "6Xnf+X2e96z77q7qnr7nvmdIDY8hRVEUdVqyLUuyHcuGV+sjiyBBgt0AySIB1kj+WCDYAEF2Y2eN\n",
+       "jZ21pWhtadeyJQvSihLPITkccjgnZ3rOvqu7676r3uvJH08NORyOxFukaH6BBmeGVU9XvW/V+/6e\n",
+       "3+97VNvg3Q93HlQ7zsr3Yc95+PQB+H4InPPY924S3dFBlxEm9nUR22Fh1/Cc+kJk7oBDW+HQNYg5\n",
+       "cG1mWAj9CVDOwo41GK9BJkq9XiG6NYTwQ4iOg7EJAxmlt96G0SRU1yHeI9U7QcpOYsSgboZohXT0\n",
+       "psRNxmi4BVpnM0Py7E7YfB52CyFC79eo+Sfg8dNwRICowxNtePp2viq3QgihAXNxmHWV8d/Fm8ml\n",
+       "t3n8FGz/fbjThUIdKtNwbAJOWzBRDXFs3xjr0yP4eo9SpAich5EUXIpAzwR/APo1QL7imNoXQvzZ\n",
+       "Y3BvCu4cUL9YxUr4ZNbhkT02l0bbdIonYN9dcCoGgxJEnod4CZ5U14Tkl1vEPh4hHn+BXuYS5d49\n",
+       "dK7HwM0rae02j+lKgtGXCb0J8rUSg0iDc1cgMI7Cjsgt19GtUDsFX5wD4wiURmHpNEw+SuGrTe45\n",
+       "DgfmoRGCk78KpzPA9288d1h4XH69c2DBSJrXKj3SanwUe73n/wPDb/DmRjQ38CjwIB8WI+8ehBAH\n",
+       "gBEp5c+VaCelrAoh/g2sz6K+MFVg6d3nSYTvhUMfUfPpG83fl/Lw6JeFEN8qwNeS8IkRmO5D+Kq6\n",
+       "+C2OwslRuK8Fcx1FjMxYgA96B2QaBhHwO+AkwQ3g2avwl30pnxFi5LfgrgHMVXy8YxWe2WFjzjWp\n",
+       "J1fZ9NpE1iDvQusu+P4IhKqwZR52bsJ4AFaed6EYGRLeokDnrfpKaLBnD9z3MCzcmJ3vgsh34beE\n",
+       "EP9qOGLKQl6+UojcwEQdQnMALqwsgnYA2A+r8+AuwSFgpgCWC64Lm3FwPgJPHYfPHYePpKG6hH5X\n",
+       "B1dsJ3k5RCdVxA1dJ550qT4thGjCvkPw0euvnO/tJTUNqtwbpUEa7khDLgHNDj2rxKrcwIrkiCFo\n",
+       "hSM0NrbinHkUPhuH6pOk7wlxuGsROtGjO9Ejl12kbHpssAW3nKdzbvqm7CGhdvHDP74/0ZLyEdRs\n",
+       "/A1DCGFk4De2wd4Z6PfAmIfP2EJ8ayDlmds/q/BZuL8N03X199QA0l34//bANw7P4O2Zwq5rNNxx\n",
+       "uo/Ow5ZrcO8x2J+CngYLa7B5FU7D6C8LMTkNfhmMx9p4Px6+D4QQKYONz8zCJ/bDlRD0zsChb8Ev\n",
+       "heBUG8qb8HUp5YIQsQfgjgM+VtGmE0lidltkI89xcfYo3es+tLto2QTJV31Hunhhl7FVWJ6C2srt\n",
+       "Wpd9MCOw60F4/EYcwRrRsSzbGx7eRBcxD+m+KtQ37hVCHLuNr87PRAeuF+Gu3E0kZICi+sB/SGQd\n",
+       "Qghs4AvAP38LT38a+B/e2Vf0/sJ7WowIITLAvwa+8lP+/x/d9NfHpJSPvZO/f0hSfd3K/+1CCBED\n",
+       "e69S6yQ+DfuuvdoFfM8GnN0To/RHeThQh+0FcHZDxQOzDCMLcNiCfFh1PlIxcNeAURBN0GZhMABx\n",
+       "CmIxOLMOxcHL7y08BxPDHZXuB6Q3exSK0J8A9wXIl6H5MMxYMNkHLwwXjoL/HDQ0cG6Zp7/t46FB\n",
+       "9H6Y/hjEDGi5QkQffStr5eDIbpW7I2/6t+4c5BaVwdtLQEd1oG5FNQLejYvlygKcehTuyCmnzHtm\n",
+       "QZTV+CQ8gHufgX0H4O8nYe0APPI9SDsYuRyh7i7G5kOYLkAC2emxObmM9XnQJIR3w2YfCjd1A0fb\n",
+       "YP+2weiYh917iUbuME33ftiAHov0fGiExlTrO/40fOwyXJ6A2ICxcJJ4FcAjtFEknPLYcRWuF9ts\n",
+       "Rq5SvhtqzBJcA7gMo03l4PoLz/25GQYc2A97P6Y4EwDsAOs/wa8JIa5KKV+1WxdC2LB1EqaXXr1S\n",
+       "pgdbNnKcdvdAKwOtrPKD2Z6HsVnlZHytohjv+UU4Idl9QKUuTKxBMa3xzP+cYL42IcRSA04AJydh\n",
+       "16/A6diwCNgBjzwDM0/A8R5895VxcOp+uGPNoTUocX5nlEHBIGJskrZP0k/VCU7VkfMu7UMGCVcg\n",
+       "gi5udBOjI3Er0K0C166AvwfsKkQXFUk2U4RIZMjjOIGYaWLH17C3pUisW3ixLo4JtqsK9YKESzmG\n",
+       "HbU3igYcPwkfCcHIVij1wDwNY8twAVh7s+f1A4zPAmekfEvH5AKQFYJRKd+cDPwXBe8lgdUAvg78\n",
+       "91LK2x5cKeUf/Vxf1LsANZ+e+sewJ6R2YWcOwkYBQsd4mUjajoRwD+2FyB2w9hyYI2C2wU9C3YSo\n",
+       "gFkHZjJQ6kJfB7OquiKNOISugrUJYhNWGvDkksqRGN5ovQrUIhBuga9DLwf1GMoAbQ6uHISH1mA9\n",
+       "DVKHXAsO+vD4EVh/kne8YIs9AAc/DUdXIOpC24Jjn4dn3vRKOtj2bUZ8lvqPCWokKMTIVTg1DQfW\n",
+       "VCHYNeBkDja+MXyMFEL8p2fgSgr+8BB4A2UotcsCYxvUXUhsqFbp0z50NHjOQB6eJDp5oxABCAiE\n",
+       "j1eA8d+AqARzHDb2w/p52PljCA3g2j2QmYpQcCR6qI1TP8aVfI2ipimZ8HkD4hVVrfkBNBPQugzV\n",
+       "EPoejYH0gCLelMeOBXANg9Yhh8LqRfqhDt2jDr1ODazz0Cl9AG28c3DnDsWLeBlxcOZAv664Cudv\n",
+       "eYqnGlw9A8I3fWYCoK+Z4GyBxSz02mB1YfYuqD2F2HqB9MclKSPAkxrtf+az8z+oUZ9jhNg8mGc8\n",
+       "blCOfpbNi9fhgafg3gkwYzeFIwLshuI52L4yLERUYT4VhmgZYsUqy4sB3b1JhHSIulfJtIq4BZPG\n",
+       "cy2uTrcIhUyynT65iwFGFR4zYHBeStkyhfjWX8F/OQ6HtoMjwY1B6RJs+xuyBZjt60S8DmvZDu2M\n",
+       "S2hFjWNvoK5xm3HL60Fx8MSftuGTEdgZwKAGj7XhiQ+qIu8t4qvAN97KE4e8kePAvcDfvqOv6n2C\n",
+       "97Iz8hWU6cv/NlSS/nMp5QcqVVRJZPNfhIc8mBkSltavgDkJa/tgx3MAOte3Z6hr41ALQIwoL4qY\n",
+       "gNhAmRiNCYhuQ0W8rkJyHvSjsFAGYxW6NpypKzOt/+grt9Cb5uYbT8DJ34F0D4o7IR2AZ0GyAvsr\n",
+       "cHIaFgKwXoI1qYolJNQDWP3rG7LQoc9BFuhJKetv8ZhYMP1RuG8ZIsMLYcyBo6tvpRipwdnr8LnC\n",
+       "TQF0LmgL6o83kcTK34Infw3md0A8gE0fNr+nRAgKwxn5i1uEOLMdLlXhrjnY3IRIG6wUeA70KrD/\n",
+       "knLSfC6Nnwqo3ytJ6gLDB9igOdImkVCeFUeuwAs6ZEPg7IKFJiTmYW06QtxI4SVjyO4AoZfYYqzS\n",
+       "2Jimu5CDiV+Hp25kmADMw+jfw1qD3qkGKV85iXcsHT8ZpphKYq5mSNbqzIlz9DKX6XUd+JEDZ2/u\n",
+       "EgghQqjz2P1FUtjcCqE8V15zs9PUOOo1IynF30keh1P3w703dUfOjUP5QgPWrsFDWVhugh0FrkDm\n",
+       "IvntBeaKceyaS2CcwdnaZ/kTkl1fh3I+h5NIE620SKT7bJpHYHkFDrZV1fuyl4Sn3GXTDgQ3VCzK\n",
+       "VC1/Ba6PK57oiFMn/1SD9azGelxjWylFEB3j7OFJGpfO8sLBFcaKAekebAxg7etSyuZw/fMBrE2D\n",
+       "Z4OTgMocNM8S3Qoz0yNsOa6B9PGvr9E91Ce6CpUIODqUY7C28FYNH4fP+8th2m/wYRHyaghBEuUV\n",
+       "8odvY5mngaN8WIy8s5BSfhP45nv1+39OSEN27JVCBGDrBaXisfZCaBECx+DUjl3UTurKeW3OBc2A\n",
+       "ZhkyK4rYGJtV8fB+B2JT0O5C9BRk8+Cm4GQXNjfgKV9p+2/pFAQvwUvfhdJnIHkQ0g3oDZSBaCWu\n",
+       "du+rcdj+NHjblH1JX1emr/EvC5HNAnlFcE2tgFsXInsBqt+5tRX+BhCFuPlKIXID8dclK94OfXjx\n",
+       "FByWMDUN9T6YFyCxAo/c7N45dIz8i+FoMAKUfxqZswOL63CPpRQKNR2urcP0FYjrYBUhtAA/lFIu\n",
+       "CSEeb9K6N8biqEZSSBAbdKe6FFyYcmD9HkhH4QogHWjsgY2+Rqw/TqFn09sSQnPCgI4t1zF39qGW\n",
+       "h+rNhQjADFRs2BdjyQTtMIx12+jjFr2wTbGRYnTToWX3qGV1kqNtUvug5UNxBeio4jhyL8x8EkZ0\n",
+       "aGlC5C5A5a2cx58Lhh3UrRbkHKXouHKD2FqGk5fh18Z4JdG0A+Y11er4KYZSzUfhRA6Ku6EQQEXA\n",
+       "8jJU/q4LwUk46MFUAVplCF/F2ppirJ3E7gDoCBkh1PGw8i7zkyZWMoLu+EhtQDfcgHABmlPQfhYO\n",
+       "P4sdG8Vd6xL0z5LZVyWRL2FdhuZ/LYT4tpSyqDwKn/gSzOQgEwF9VbI+8CksT2DnYlD0CRlH6M3v\n",
+       "YnD92yzMLrLw5yh1y81Kn2gSwvvhQgCchtlV+JgknuthRhdp7AwR2ugRo0/lJJzeA8tfAi1Ql4/K\n",
+       "373d8/V+V929h/gS8BMpeTvF/9PAv3iHXs/7Du85gfX9hGFVP4baVRXfqPGZEPpuyD8I+gg4i7D5\n",
+       "mJTythdDAYkwhh7wYs7m5G9J3IGgu1qGA1LdpbMBxK5D1IJgHYyMkpjKKWi2IdJTVs2DCyCuQV3C\n",
+       "s1V4xlMx8a95zcNdyjEhxCWYjsM9y1BoQSsOnRhYJRUwOtgPOyvKtuTFg7BFQPP3YLICXgG2LcLA\n",
+       "BuMUVHfAsV8H/vym4zeVgSM2pBpwtQsvSClvjb5uq8ZPx1Qjmhto2G/kWN/mvfWEEH/2OBxIwz5f\n",
+       "jSReQN39b7wuzYQ7R+C+CTUbu1iFx7lJijhUZczqkPOh8SKIXWA6oFvKOK7kwuJWOLkGcU/9DoDG\n",
+       "Eiy2aY1HaEkHYh2ilkEQ1WjfFRD2BTE/YE7zWZOwfB6K3wwz879GCG+6BFaLwUgI4fn0LUlfW4IX\n",
+       "Xbj726Qe7GNE0/RK++lcakEoDnt+BeeRCpc2rrCxdRG2asTTWSa6Pu1tG3Rikik/RnfTJ9dxiZrw\n",
+       "yNeEEP8HaFOw55fhoSVVDAbA6R1w7CuouPL3FYQQ8Tz87jYYGwW/Atpl5Zny51LKqgunz8A+B7ZP\n",
+       "QWegClFzRY0obxu5PuzyfV2NT89kUIXMy9J6IcSf1uBQGnZ34ZiBuXUKq+fhZAOk3gVCGK02/his\n",
+       "P+gzutiin65T2p2l0mnBXf8Z9DbhTJnpay/QGx2hu62MnveYK/exvSj2iIv7UQcxIUToe3DoC1C4\n",
+       "CnUPLt1pUs9GCBYE8YxOoeQgwzkGawAh8Oaguwj2bSwInAEEfdAvwsQADt0NHQfR8zD7VYzmJtH1\n",
+       "gKmzcPw+2LUGO36kRGOuD4/8mhBiA8XzyAMJIKErpd4GPxeC/wcWXwX++G2ucQI4LAS6lHzgir4P\n",
+       "i5EhhBDTsOU3YDwO/QisGUKI7wGP/yyJoRCRu+HQr8KdFciVYG0MnvtDIYz/B7iulJgLaTVfLmcy\n",
+       "rNwZohzJ0+rch3+lDeETcCAEiRFY74FfgpCBppURpoYvNZAZ6Lig2eA2wKiB1YH1OvxfAyJXoC8h\n",
+       "sLiNzfhNMKGfhxd3g9GH1FU4eFkZuz4xB5YJXgxKIxDtwi4PKmnohBWv1s9A7iJc2wt7fwDXtgoh\n",
+       "8lLKDUuIj8zB12ahPwkbZZg+BXcLIf705lGAlNIVIvY4HPusGs3EHGhZ8Mz4z3jdPxNDYubx4c9r\n",
+       "kIDPHYD7DsJGDKoLsOdZJXX94+G8OzwCX52FmXGQdRAXQD47NLOKK4LNwgNw4QTMrSuliiGEmN0O\n",
+       "/+ggBCE4fh0ebEEkTe+JeQa/IWnTx7U8YlLZwQQCOvuBswab5TZr6SiF1T60S6xv6bAc9vA9yPxX\n",
+       "K4QLcRLlAuPzDbzsY1x+QLLGTrg2Ac0JaI7T6J+DuQadqItjdLCikE+GaK/YmKUqVhcmG7AnAet7\n",
+       "IXMn3FFSoqBGFKJ92L8GV7cJIUZ/GnfrvUIGPnMUcgdv6nKMw+iPlWnUnw09iv7yKdiRge0udFuq\n",
+       "IC++3trDx7zmcUN/mqeBp4UQWoz+wzWauyDpSAh8sELohmC9CxYB7q4GK7PT1IsfZbA4Cp3n0e6+\n",
+       "jrAk235UYvuxEk/cLYikUojoNNHzJvpggBZdJXdfm95uGA2BaYBlbaFljBHrR+mYHo5V4fL2Js7a\n",
+       "BP2rf0X64QFWtEPPNmkNJoToByrA7lkXTkopnZQQz52Eo23YdRQaMXAtmnRw2yNk1rt08k1WViE8\n",
+       "CvmrkC+98u4PDmD146rmTe7MUDuwha6Ro3ulCcXrcEEI8VdvVfn2DxVCsAU4DPz921lHShpCUAR2\n",
+       "ooj5Hyh8WIwAyqpi9nfhkx24llZTkbkYLH8a1teFGPkJNL8Hzou3uBdaMPUp+PjqK2OG2SqYPtQ+\n",
+       "I2Xxj4UQfwM/+RrsnTTob7MpR2wup+/Hv5SB3hWY2A9GFOoliFkIM4lmbxBuZom1LRq2Ti8pQa+A\n",
+       "kQI9C8EyWB0s3WHHJ2DnFDQ1OBMVIleESEhZjWw8JofJp8pkbe73Ye8GxFOqAbC6Cx4tQH8R3Cdh\n",
+       "e1vFZnAE9riwtEeFia6kQK+DIcAPgRbXuPARMKwAtgshUpPwL3eAZyrXyG1b4MW7IdRRUdnfefUR\n",
+       "7zwFpyWsfEw1gNoDKP0t8D++C+c2uxvu+Tgs3Aga3A3rPozV1fz1uwl48AhM3zUMxwOYhOwPYWMB\n",
+       "flKAOwLQ/ho+HYHgsPKJ+GcVKHwU5qeg0YCYBz0LgjMEB8OURZUlQ7Bfgi3ACQSelHgZIKbT/U6Y\n",
+       "M5+scnWsSXwiIGFr5BKS3X4cOxQiVqvSHFlgI5Rm9GqbWcunHnyW7mWALoQWEJ/Mo6U6+Nosm90y\n",
+       "Vj1Goefi6kWsIGB0yNpPDSCSBa0AL00qVVcKuJyA56PgtyG+MhwbvC+kmEIIexb27b1FjbEDNl+E\n",
+       "OSFEUkrZGHYCX+LduThPZvEvQzHnE3YihDUNLdVgMxrQWYDpZ2D5oRC5bo9R6xFKB11aJoyEA0ba\n",
+       "UPoELG+CFDaWyEBgoQ8EEMbsRNAtH+7LUSyGQFZZGxsQ0dYZ2DFa9ihyaQv9sItnXGfb7ggzTQuz\n",
+       "t0l5LsPG5/az9PgcweZ5+OIZmBVC/DXw4+OQG4WP1aFRBJGlc7XKpUKL7nZB2IeVOch4oFfhwkMQ\n",
+       "RMAogb4Ckc/DA+fSnJk4zEgliuEM2Bg7wua1FOw5BncBx96FY/1Bxm8CfyMltx0Lv0mcBO7gw2Lk\n",
+       "gwpzF+wyoZyC0FbY04HOhJK6NQRcuQuCJMxneLUXQhqy5mv5DhMNiEwKISwp5ZoQ4v+EjT022u9M\n",
+       "0atFlSqm2wBLU3cJqUO3BLE6mh3HGFSBOJbTwwpy4J+mNxKHYEXxRvQmrEXYseozkWiwtaK6LzsO\n",
+       "QHQnHPwelDLw/O8JYX5DSvccpO6GOwTse0k1LjZ3AiY0UjD/x5BIQPteGPWgHoGyBu001CyIOrBa\n",
+       "AOlbbIbjLGctnG1d2miQtkC7B9g79BTognEcjszC41E4wC3FyLC9/KQQ4hkUf6MrpfSGROZ3GqPj\n",
+       "6vi+qr08AdU47BBCaBNwzw6oSF5hPW6FyihMnYO/ugKPlOBrR0G/ezj+aUD0GDx8FowKLJdUouud\n",
+       "UdAGoBugpwjJPvP4+ATEhcD2BFEjSuX/DqCyzCCjE8rEmNp0aU/6FAYmu0oD6nmBJ0fIbaxgUYQV\n",
+       "OHJe5/I9ZbqxBLgvEf6ohzGRJlyDrn6RIGnTD+q06RLymxTOvqLW2ghBYwUIQ3YSDq/B6QmYSkEa\n",
+       "WE7Bvik4+U+G3aKfS1Lu60DTbkNQ1VDptLyNMLg3igjM3gmLK9SDIufu3SRUkGiGYNAOGC3CYJfB\n",
+       "4XacZiiMu1RBZLtsG8lgVyJYWg1zNEDm4KoMaGf7RCo3O4AJVgvb8Y00ic4S5XyMaVMjRhfNaBIl\n",
+       "YHGwF+eZGqkH40xcaiLTZVqjWcKtBDtWF2huT1PtmJBMw1dq0M9CPArbO+C+BPUx6ORgcpJqdZ1a\n",
+       "/HnMTA933WG3AXP7lA2+3YRWGl7YC5EiRLwRnFCMRA3AIN3doLltL/0XLyg1x4fFyJvDbwPvVNzJ\n",
+       "jWLk6+/Qeu8bfFiMABBJQcKF4g441IbeGMQCFc1Qj4OZh34UsgeEMLtSuk8Pn9iDjgBfqCyLG2hb\n",
+       "4A4YSk6H8+vjcSHiSXioC9IH4YOmAw4MHBgNIJRC0yIE4gr98CLVKEivji8CNbf1siAyMGhiGWXq\n",
+       "hQHxEjSSsLoX7q5AJQr1EcjV4KA5qa4AACAASURBVGMDqHxWCHEBxudUMwAgv6l+ADqTcLEBjQq8\n",
+       "+N8qO5OcoYqVugaaC7tbsJyJUkzM0UulkK6GG+/gaUtk7s3i0KdXGeDXbTXj8mbAX4QpyU+3PR7u\n",
+       "at9RD5PboNe6jbKiCWFFiIx8oo//8FnMfhi/OkHvTGEYYmcqUoUBuGOQPXITD6UMkyOQ24B7inDA\n",
+       "hvEtEC6gexaa8RJSs7CFRTboE5ESgxjX7DhtfxfszYKzBJHLiEidlm/hGD6jA0GgaYT9Ho1oGnvT\n",
+       "xIgMaGfg6j4fb/ACJBzIuYxEJK3eIs5oj4ieIVvpsS50inWHuAbpDXA1uJCHCxUILkFEUwqc1aS6\n",
+       "bx3uQScCoR5s3QSRhPo93OTC+V5BStkbFeL6dchvhZeVYauQqKq/v+sqIBf8K7A3BfE8jojiDCws\n",
+       "x8dsn2PzcJ28F9CvdBnYIYLAxbZMZqoDlhMeMhmwbRUSfajHAupBEyMSxw1ZaE6D5VyKupEk3Gyw\n",
+       "mRyQjkSx7RCB0cUXBjm9RXDoCheTPeKlKtEWmGMWYcPEiTg4iRZmsAGJrdANQ0aHfzEL8/vg+Dw0\n",
+       "avDgdTC3wboBJR/ZP4jzowqI56nmlWGt7qluaEdANYCkBbWRgGCqhjupI7s2ouyiR20VZxx9o8dP\n",
+       "+SuhA81/qFwTIdiDSjJ+/B1a8iTwP71Da72v8GExAkBjGdY+BtKCkKeInaEebIwopvlEHxJN6Nhw\n",
+       "/neEsLtSDk5JKZtCZM/Bi3vhjhW1b/MFPD8OlR/eSjBrw/MX4MgItC5BehpaVRUHrvchaYAwcFlS\n",
+       "YXdyioGAaK9PP1ECfRt0JxXhrnkabWQHRvUEa+OwXSijRbsH3SlohVWkhaxArISyX9iE2hxkbzG+\n",
+       "qgmgA/nPw8ETajO1NqWsLTTLxsOgOOpQMsdYM2wCTeBJA83bzkhZoxnzCJstdHuN9vYZ/LOqDY23\n",
+       "CePVt2Z9DChC8a3sfCFEKgH3JeBgoGS2zwzg+Z9BNl5agtIVyG0bdm76oJ+B9Ca2liD1FZteVOJn\n",
+       "Xcz8JcyCRvMHAfSLYKN+tKiKZgegDJkAds3ARkkFiVkzYGxgaGWEkcAaTKKH5mnqfcKay5gfomjE\n",
+       "qDONp02BNqqKsJQEq0bD0YhSpxNy6Y57aD2BZ/p4tqQ7kqKejLFOnGq5CtUfY0QEbsYlNGITaFOM\n",
+       "LLfxwi4ip9PI+Zw9DevjoPWgeQZqjwC+ss4YfRwu3AvhsCJMhpYhGoCnK0fa2A7eB8UIQAm+/wT8\n",
+       "QR0mRtV3JXoGKMKf/6ybm1Bs5D15eECHTB+ulRX3602ZTQmIhqGQUzLZwRZYvYQ7uYRX2E6ouoyT\n",
+       "DqiHW3RDJej46IaOCAZIOtgOCB36IfCtgMRVwWaiyqWJBLKUobGm4U6apBp9agXIR1xCuoGDj4uO\n",
+       "rweEzU3C2/v0rkM8BrOLkvnDAich0aSJk9wKnVGoXofwndCwIH0VHp6BIAWVC7BtHqaaMKdBMwKG\n",
+       "BYs6yaZPfREenQWhgbUBO07D6S+AbVRJ6GNEOwFYLco7swyevwyjrZ/Cy7rl+Kcg9wXYvgN0AY11\n",
+       "oRLB33AeywcIXwW+KeU7Fr76IorEqr2Da74v8GExonAF5ldgbAaW05ByYSMONRt2FuG0DTM9GOhw\n",
+       "5ypUHhZCnFYXxOp34Rkbru9Q96hNDdafhe6rWplKomh8sox9uEYwN08/NIKseVBehG07QUrMYJ2I\n",
+       "3kCSoEsZ10jTzISRgY7wXaQWgpIOfhqnX8dNRRA9JRl1NNXN0HTYuQ56AJUE9OYg+dvgHoZnD6g6\n",
+       "aOIcjFTgwihc3wRtJ2Q+pwqwRBHsY1AaiWFFJRfCIUoNjfXRBEndQggb6QRoeotuNInpL+NYcdKd\n",
+       "NRwbemMRGJyHxCL8sAdv2jtGCH0H5D8FMwUhJupQfgwGLwCxLfAHdynPlVIPzHPwy6dhmp9S9Cgf\n",
+       "B/Efvwf/NAtHDWUnu1aFH6ax/rtDxOsGqXqFjW1j+KZLkH8e/XNVfDcL5ybh95ehvQ7hNlgxcBow\n",
+       "kVWtr3IDsikYb6PFNXTW0Ls2pi/RAocKHcZRBpgNX6cq4jhSIyErtAshAtIMRJ+eDEgFNlWhk9T7\n",
+       "GHaETr1OdU5jw4hSb47R3NgO6wuI5AsUxgfs6oPW7lGMdtjYkSfwcriDBKHeVvpL51kur8Gf3Kwq\n",
+       "EaJwDdqjMH4cqqb6DAxMcCyIt9W4xt243XF8LyClXBdC/OsSHE7ARBvW2/Di6/FaInB0D3zhMJQz\n",
+       "UFuBuecUYflPpZQrNx4nhBhJwEeiMNmDtbpKD3/5/edgzxwcuwa/lAfRBVFGilmkbBKkFqjYLZKm\n",
+       "wAzAmXOp+x2KToB1TjLTATOAUgL8pYAjT3V44WjA9YSHnV0itC8G4V00NY2oIRGexDYHdHQbKcMg\n",
+       "XAZWFNkx6E25LCTA8TU8u4PvG2xY0wz6YZVvM9cEcx+U+5DpqmiB6x5EpqG3AcEh6IWhXoP8CsQM\n",
+       "qn2f/+L74L+kiNVmAJtReNKHjlZjx/I15ifyeJpOO1hGG9+Ak43XGdGoiIfCP4IH4rBrWW3QllLw\n",
+       "2D8eJvj+1NygDxqEQAN+C/jiO7WmlJSFoI5KjL7yeo//RcKHxQg3FB7i30O7COV/AlNAQkKkA/Nh\n",
+       "SK+q77JXhW2bEJlE7Zj7Q/b9XwghRlEZN43hjyWEeQCiOWgWDezfnCb20CgxdIg06Uav0cp0KGzm\n",
+       "aQSX0PSAMT2BJbNIunRFmyUS9MigyR7SaEO8CNMW+DqBO6Dc9Yn3YbkADQMupeDQKTACNVZfyUAs\n",
+       "A5Evw64+9AewcA8s7oD2BSjPQ9+Avb8CeQ2296C+ReeFO5IUZyJYlsO6YUEuREiX1IREZ0BYmsSd\n",
+       "LgO7Td8vInRBO5XDWujQe2kRwhfgeBv+1Ztl3guh74S9vwv3VmB8CSphOPElOBM16U6PwOEBrHag\n",
+       "MQqdB2ChBgeEEE9JKVdfu56IFuBLw1Tckz3lEaJVYMc4ppUg3B3gmRAvz9NNg2fXCbZ8Bv7ttuF4\n",
+       "YAWSfwvRR2B6l3LnnO1B/pIiC21pYtgR0AYY0iNnaehOH33gohlwzhaYSDQh6OOh+QHQRE91CEyD\n",
+       "Hg38iE68b2L3upyNCqRWQ4/ptPtj9JlCigzaqE9grhJLG8QZ0E5DTsBeBFF6FPUqhKIEERexawLJ\n",
+       "JsbvCDG6AbUN8FaBJ+An/43J5KykMRdgzgaYm5D9iTK+OpuGze/cegxvOpb68DPev2GE925jaK73\n",
+       "hqMChBDhWfjkJ2ApPByTblcFfLahUk///fBxk1vh9w6CHIVWBe44BXcNZcPXhRDmFohMweIqnOpD\n",
+       "oQ2RLogawlrFFG08IVg2bcL00bBo9UKck222pFyKrvIiXBwzYMTiR3eE2UgamL0Occ1hd6tO4C5z\n",
+       "YTxJx9JZwmVc76MJnZRXpiOgokvseIQxGgwGUDR9hFllVYtQHVj4ogZ6TVkCnO+AMYBsB9Ln0VJJ\n",
+       "AmMTrG3QyoNXU/Pm3gBIUnM3eX4SPrKsRsyuBs9Pgn0GtLKkuG0VrVjGCyzS1Q70AopffwO5Ndtg\n",
+       "+wjsucnaYKoOByOweZg3mT/0C46jqLye0+/wuidR/pcfFiMfRAyLiu8IIX4M1Y9A7MtgPwQHy2qu\n",
+       "Wm7A1POqW9LvcIvNMzCA9EchcRC8CAQF1VXJt2BxPMTlT42TWEuiJyNorRS+CLDGrqLPCKQmSWsJ\n",
+       "bHwQEoFBDIdRaiyKLoHugwiBNMANQ3URcj0IKrijUPwEtDzoroAwIJGBzbByg87XwYrAoWWgBNVN\n",
+       "OBeF6pwKCxOHYa4BKyY4mkExNEO1MIo0DOpmGpcqmtEkjEdPC+HTJWa7aFYPl1VMz+Po+jyt6irn\n",
+       "8ybtXB//zzrwzE8zFfvZyH8KjlZgbLijz/bgaNHk2j/dQ9ferzSy8XOwuwAn9kBxEuRp5YvwmmIk\n",
+       "DHcehpG7b1LK7FLKpKMGfaNBN3oN5iQzmsAKXFaiGov+GJ2mrwiUwQQ09kLjSSU/+KgFGQlTEYhs\n",
+       "QdObID2kH8bTOrh6Dzu6TsXok9MsdosIRuBTBAY4mGKNZj6ErqXwhYPwM7gMuGwYWIMxnJVL6NYG\n",
+       "dz49TufIdvSJbZgdD0t3qIej2GKTaEzdG7NxEEYMx5jEFSPEWgJpZehEz2N8ymDLpMeWDWhtA9+B\n",
+       "cj3D6niaYhVCl5qs5fskI23SE5KGBuvflVLO33z8hBB5YFzFH43thrQNXSlE8lloPvI+lHnmRkEL\n",
+       "3xIRMKMM47bdcD7Nwy89AN1pqAMUFFs73oDfFiK3ADM7ytS2HafljxBcvwSzLRKzDUYiJWyjgiZs\n",
+       "SuyiKy16fg1PX6On30X79AnKk2ssboHJlM6kabEcm2bdyOABRqiD7hWpDza4d3MNM9Lgii6pyAFt\n",
+       "I84W6bBhJKiLOD08ZskSlmvoqQ6mtLC6DhlDYgSXWIuZDOwwyDAYZ2F3BvJhwtLGCJZxQgs44g6k\n",
+       "qIFdAnw1y700gXdykyfrsDilNl4bwPojkLsb9q2DfQ1gAAwY6PDNAm+Ip2OllVLvVmQ7EH3L8v1f\n",
+       "UHwV+IaUr3UJfps4BRzkbYzA34/4sBi5BcO29qPAo0LEvwi1h2FmAfpJuPwxuF6AzuOo0cB1uBHA\n",
+       "VfgaHE3B7jV47H5l4ujbMPcYBFtCYJRZnc6h9fo4hQ5eLEEqCFO2ozhijAGSKpu41IiTwQR0uszi\n",
+       "YmltGrJMFR8nFQGxSshaYTTd5rPPQHYAl6dhPQT1NrRTkNJAxmCtAPsuvfIOEwOI7oMDi7BpASaU\n",
+       "c4p3+ogepzybRg8b9ENRanIUP4hgai9RFSGyQRGXPj0cIjTIij5aPyBek0Q2m8x8H57X4PSFt+7o\n",
+       "GRpVGVuvQKO8JU00MwXXs0qX3B4H4xjcOQM/GFYttw2BS8LhHHgNiCWHlvE2+DNQv0avW6WzV+dw\n",
+       "2yLWh0AL6BouovME81+cxuujrscVTyXDRu6G0gamdREZzODpUwgC9OAKrt7CxaEh1pF6k7xvMCmS\n",
+       "JAMdA5+wrNERPlKk8bRJhNxEl2tEtBhp6eNqTRrZAKfdIbEKnzjl8Sf3J8Ar4llLGKEa8VCZgSax\n",
+       "JeiuQBfQEjlcooSlRyWq4UbbCJHF7M7jJZTT6v2rER7fM05z1xhpx6ZqVmi38/BCmFr3DAuU4H+/\n",
+       "YSs+/EwLiH8O9h6FfAacA+DWYfsxmKzBifvh+RDwN2/tPL9r6LVvQ1huQCiA9jCDKLodtkzfQq5O\n",
+       "gGcTfgjuewoOLQ9Yr5/he5/bykZyhXBGZ86WGHoHQ1hERJgwPkURwxK6onlZK7RnNLRImHHPwNS7\n",
+       "NGNpmmaUMJKeHEUjgW9AKVVj3texgz4pPyAWFvQ9j2VrKz3mMKgSsEZN9NCDLDY9PL1LPq4R8zyW\n",
+       "rQGaf4219A5aJQO2bEKhTLg9gik0hOkSGVjoxll6Yp2kqZPvO5i4NMwOG1PQ+Jdw1uYVR+KWECng\n",
+       "haNwz5IasQTAC1ug+iwwI4SdBacKXL19IepUoHSbUMpSDDon34Hz+wsBIbCALwNH3oXlzwJfexfW\n",
+       "fU/xXgbljaFMYHYD0du4Cb4P0P4unOvB0m/CyDawmrDlGByuw/E/EEL8OynlNdB2wO4cHFiCUgTE\n",
+       "lFKkVPJwuQ16FyKyh5GW+GYIDAfhV+na47haEgsLCGFh4bNClSYJLCbJ0MYENKJinQjzXMcnXHMY\n",
+       "M7vc3YGx4U1/Zg1KeyA4Ag+cgEgAxYIa2WzmYLAKgzBUCiBNaMZtLv9qBiOZQsOjozXpiBBSTyMI\n",
+       "QOoIBIY2QoCOJ2ySgU7Iv0jfqzPrQ6BBowyVl2DuHOSqUJqEM2PcpkvxxuB2oG4rbwwFm8pMmH43\n",
+       "C1fLcHcUehHwcqCfhumrasd29daVbCEO5eDBtpJGDdagNgkvxKDbBq+F9A20nE0l16fhaGitGqKU\n",
+       "IxZxiI7uoXG6DdEl+Hgb/F1QiaCPJDHTMXrRafBBGD4Yc0TlKl4wjxfU8b0QsaqGn7NBg0DXsJCM\n",
+       "4lLWypSlhj1okBAWk4ZACAPP6JDX1ikWXKRu8M0vgWNfwYuWyFkRAgoYWoIBGyxoJTrRgIwUBFh0\n",
+       "iNIQaQJ0QCATPRzDYDDmsOeEwXxujq6RIGqNECo16U3spdu4AmMGXI8oo7wor1I2aXtg7/3w8AJc\n",
+       "noGpNUVqPn4XjP9nle2yfKcQ4tG3mlP0bkBKWR4R4tpZmNwP66AyYU7CWOWVTA/PA+mpNuLLu9Yi\n",
+       "2pRLwoKYCauTkKpU+fLfNfj2/RaaliRWMnHdNkE2jRQWCVZYYUBLGPi6Ry+ow5RGRE5h1wO8RJVN\n",
+       "s0BLjGPLFq5eJCBHwk/R1y1W4gO26Bo7PJOS7+EGFjEk18jgE0KXOnCFqghTkDrKZk1QNpJ00Inp\n",
+       "JSaDOlfCGs5EGl2kMZ0Gom8QCdJYcZtq+kWS2XG2NgSWoWEXJePtKrEwXChIKS+++gg2fgwnYrB8\n",
+       "ALI+lHVYuwyh7XDgHhgJlOz/clkI8f/e5txfhcubMDoGe9fVCGghA2d8aP+DKUaATwMXpWThXVj7\n",
+       "LLD/XVj3PcV72RmpAg/x/ttZvYyh98XjkL4f7jwHuQ6Ehi1IAVQ/CfxbSExAfqCua6sHQE6o0NeQ\n",
+       "AYND4F9s4xDC0wNESEfILn2jTFWbxkYQkh06oouGDhi41NAx0KkRxcIB3CBCwo8SdTpEWy6ppCR9\n",
+       "k+V1tAeDPoyYcHlWSXK92rBAyMOZXTBqKjPXQQROjc/Q08bxdBuJQLUWiiBnEEFfWV/LTSQxpIQg\n",
+       "kPRkHx8HS0JXKq5cvwKJy+r3gAqzfTt8gvJjcOJX4IElsH3wRUAxlaO2MgPFJTh9BfZFQZQhcQFk\n",
+       "Ff7iVpdcIcTsAfj1SbjgwOH9UGtAfAnuMeFMGfbvg/YaWi+GNPoMrHX0sEdsySUY7YLZBbOl2MzU\n",
+       "IVzAHHWIM8AkzkC4+LqJL7roQQOXCIGeItBbtKSDW9Tpp/oMTEkTjYEI6ApJA5ewWEVaLhlCCCnx\n",
+       "hUCTYPtJplphrox49AsacmMD0wpRNycJhMAPwPLHsLBZiy/wHBqTskcDl75cx0XgkQLNQmo6bhSM\n",
+       "IMr1sTiEBG64jpwKYQSTKt/IHoFBS0mUD/IqR9KRI7CnpqLl/SREauozPxqDhRzsWVfJBSQZjjp+\n",
+       "XhgGNk6jrl8rt1q/l+HbT8BvXoHplIpU0IrwVF/ZaSOlHGSEOH0GDtxxU9F8Dv3eBmbeZvnhAemO\n",
+       "Sl+InfXZuTpgcdTE26ETxKcI6OIQIAloMIoghUkXR3RBO42QFtLsUrcm6Ik8kESKGOAjWaSqh7Ck\n",
+       "Q98IiBohHD1MX/ZxzBApGmTlJkUi6FLQEylClOnrHmBxXuiUhE5ChjCJ4uiWUub5OoHp4WZ3EVo4\n",
+       "Ry9xEU1zsfUuI7KMIeOIQQs3VSd2zeeOs1C+SwgRgEiArKGybgbAX6tx9Y1zm/00fDQO+28KFywU\n",
+       "4NHPo6z1LYjeD+mjMGVDawEe6cPpyaGydwXWv/eLHMr4FvCWE3rfAK4BI0KQkPJdt0b4ueG9DMob\n",
+       "AIN3yejqnUQW8gFM3HLSp+pgTykVoVmGugWVDNT3KuJaNwyGpkLnerOCJbNP0XoJEQpAGrgiBEgC\n",
+       "TBxhEaDTBUxqCDxiZEmiA308mlS1LP2ugd6Fjt3HIUDc9OXuWdBPQMRU6bwALR1KBtTDyi+l7usU\n",
+       "0zbLRhpX24qi6go0OkgCJC0QZ5DaNhK+TqB7DOQFNAGSPt1gHel0GfdgrAkdC7oZiB+AbhWaJlzu\n",
+       "87aIVf3n4HQUig9AVkBDuKw+kaQfR73e631YLcLIIsSr8L8M+T6vwgjccxBaM6q/nHgK5vIg1yF3\n",
+       "HrTdsD4LuR7Nks2Elybsa3jxNTrbBIPFKoPEfyB8CIJwHreeQ4pNwoHEY4FWXOLrl0BLIRkw0GYA\n",
+       "H0EbZBbPmWdpqkm+Z7EQTaEJDWhRoUkcjSlCrGoRDHIE0gTaeFoJ3RQEI3E8mcbTL9ObczEYJdCS\n",
+       "6AyIahYpBEaQQBoRWvQ4T4uGMAgYw0DDZA2PRYJwmEHQYiFr0s7F0LChDX7cxwlaEEIVcsktcGFR\n",
+       "2VX/4JUjqIfBHvIu9AZ0Q8o+3pRKBuwLKAt+/oXIzCR8dRrCJsglkGEhvt+7KfFbjRvEvytBAdXx\n",
+       "Kd+6g6/BD56F3DpMpcG4ALt85NadWA2Lrt6gnllCZATBtEap3sYbK5OJzCC0PJpzkc1QQJE9DMig\n",
+       "0adHh0CMoHkJulodJx5lICaIIgkY4GAxIDz8ti0zMMDWbHoyoGcYDISNKwQbtHFFkUDOopOi763T\n",
+       "06u0tBGkGEGSRMgmbW2RgCxCjuCa46CHEXoHJ3oGf5fPVheSuosfwEi/gSZ8Oo0MoeU2InDRAf1z\n",
+       "/P/s3Wmsbvt9F/bPfw3PuJ89n73PfM6958732tcTMYnBAWcqSRQ3oiltAlWFQK1EEZX6ohUSfVOp\n",
+       "byqBVFUgASnQCqigkBISgklIQoht4tg3tq/vcKZ7zj3jnsdnftZa/75Y+9jX5jq2g6/tmPykLZ2z\n",
+       "9/OstbSG//oN38GHnqoNnLcCb9wLIfzfMcb+CetlN4TQZvE5nrv3ZRfCsxt89ukQQpfln+T9z/Li\n",
+       "Q9ozbpzhE4Gbf0OtMdL3H1GEYB5/An/hndj+SfXyKl5QWxd8V8QfYEa+dgw4TOrZ6VtHoXttZocn\n",
+       "1NHXePWHGbxAd7mWXL++Ti+QzlZ86l2P2c1WlZN9sj2yVWKXEJTGCm3RKnaNBZetGCgVOjITmbGF\n",
+       "su9oumB4dKh8MHWY8dIKz7dJVrhxjqM5mjMO30Oa5qY6JkVqaza105yYthrayYJu6DpwDvMShbp+\n",
+       "mQg6ogcaClWIQjzSjBtSibScGlcjp3JWjxkNuL1JMaJ9il/+INuvcO//OvGK+b1Gp6nfb+l/vu+N\n",
+       "QclLFYcv82emPHaB0bA2REs2+Nm3S0Qg59Qiw5T4IT7/gFubLA1YLbjX4ntbhBfs3/+8GxdGzkqk\n",
+       "SeKwdeTg3Bkf+ELX8vHEcOXAvWen7jYbsntTB8++W0wmdYvfm1ir96cvxkUGyxwXDsefNzm3pB1W\n",
+       "RT1TE9F9Mw9t6RlpGGnrBCaWJHGqCtuOGkcm5YEsm1gJqVlMjByLKt2QyCSyLJeEXIhTx1YVujoG\n",
+       "oiCa17AgJnumndQnvyfVGg8dxDn5wcRx57TJ9AFr9zi4zBsX+fwn6zfSW+/vz3PrT9RG0otXufUD\n",
+       "QStGN/LaBuYTF9n69NfBrvhinIhgnVO7Ed/53Tyfvsr3O5f4Mz/KYL3mSxuR/Ut+IoSwEWO8/eiz\n",
+       "JzokX9WjJsY4CCH8rS3ZT7L8Myum739cs0wd9xoWhm3j5qK9hT3NYmpwvqWRHNpsblpIjg3TtgfW\n",
+       "jZ1DrlQJVUMSB9J4lnjoTtKQxVw7jOUO9c0pHYpGkjgWy2WDEG0lc7JQOa4WHIVTUtGRvqlXzcIy\n",
+       "6ZbKssx5LZmWxDg8qRLMLKvCAqbyNJPFurDJ01SWbqtMNCeVgxA8FgbG5ztm+7kqH7n2h+vR7g+9\n",
+       "pdvxO+f4jR/BP3nLqUrr1eGRqGOltocYtCi6uFiTzD70FvbM01uMz7L3rhiPPvaNXOPvkvhJ/JsY\n",
+       "vZM05kejmj9IRr4VEUL4ayTLxIr48zHGb/lIJ8a4XwubvfTcl4TNJimfPs3uz518ph9C+DsM/jrf\n",
+       "kzPMagXMUb/j488+7aC5Jhe0sz3D8KTKI9TYUKWnpnqMsSnV1NUQPXTHyJqxXKk/nbi/v6+4NvLU\n",
+       "DU6vs3GRjz1bY1Kk5BP6PZ6eNoRGRxkKx62ZPQsWQkcwMbUqN5XYO9l3gkQ0EQylOpZ1NIxNI3OH\n",
+       "fU9ei/KSz2S1e++Nuyx8nvfepjfmxmnuRHa/YSrvV8ZF/tLztJYYb9N6lXfd5//c5u9u8+QSVyYc\n",
+       "D3nlq+lNhBDO5brrvyB78ZzJ7eeNb53luFO3XS5HyQ+NVE8PqQrKJ23uTu3svSlbmDedHXimaJgb\n",
+       "T/Q7QT4YWZns211YcXDpgtgYSJOojL7YehcP0KSqaBySZ6pJbpqtCPGMtlQ7RImeypE9i4Kph/Zl\n",
+       "saERGtKQ6GMrbFlPWrigsqwII6VCJRGMJGmibWIch9bkYsjMqwSLHhkfl6FnVA5ksedWnmsXt1Wz\n",
+       "maJ1xnS3YnyL9jL/8jleebkGHn0F/XD8O/zO+ygfy4WLPYftzPbqnK3jodd/cFP5D6LJL3y917UT\n",
+       "wgev8KMX6vmhO4xDCP8gxnjrG7g9nniS1vqJQi61uN5l0pvyPxvC8qfYv1pvPnuelRcoJ+y8hBtv\n",
+       "I5T2dO7Cn5u3MnfZhmW9MDRLJ26f75mEjui2Ya/tyuRxp/fuOejueTgfbOSPKeNI0w1VeMwsJhJN\n",
+       "eTgwy0fy6jHTMjVOZo7SrhibYmjWB+ZAFlddGQfj9r6t0LQaC0fJk1KHJx2UFa1wV+mqIhRSz0mr\n",
+       "s5JwbKJUSARtlY66xdVQ6kskMpVGuCuIulrmkpGjRnCvmZhWU9MXxkYH7G22nXuQ+KX3D1y6z1Ob\n",
+       "LA9o/EAI4fPqMXoHOxxscO0Ma3tzfv0D645Wu2bNQ5vFNj8xtP42gNUzh/SufAPX9rspfgZ/+x3e\n",
+       "x3cdbuQ7JRn5KrOa9w65csgscP39IbQfxjj6hkW0/sNj75/xiYQbz7FQ1UXZ1q8w+e1azMyTKzxz\n",
+       "aPpGYXKa7hZZi72nl/QX1swnmamWqVTdOx6oT/4FNVbjAa5jTomRiSVMJO6ap6oMQ8Pe0oYza/yp\n",
+       "X6S4yfUF+lv8zrvpteskaB0vt1PjUKm09HWUFg0w0MfYVMu8O0YCeiozlZvoC1qSMJGEgbLc1NiL\n",
+       "3nWLN+e5eIf5J1l6yNNveXmd2aPR/2bQPP845WMnLIensFy7tP4n2zH+fbVr6LX6Tys/GMJaxfbn\n",
+       "cP0RADqE9Eme+a9mnk22HSaF4TO33Xhm1c5wU/7kxNzyU9qbx46bI0XjjPHgWDwzUc41lFcnsu7U\n",
+       "cevQv/v+rrQcakx2tbcXDGhudgAAIABJREFUTBcXlMtLsiRXSsQkV/O7dwlLMrk0jE2yfc3OQ/Pd\n",
+       "TAhti9WRftpTyIy1BfMKicT7jD1wPTzUNJBoG2tp49hTgp7MTK6ro69mk6faDhx7YCFEK3hTKtoT\n",
+       "TU21pAamHsrSwrpU2eg5Hj1r/95tGp/W6M2kyxPTckv7Xa/Jnz82fnhs9A/r8xcWcB6LbP5G8Kvf\n",
+       "u67z4hnV5x43uHOBwxHpL3Lm9olU/te6piGES+/iJ/4E9zsnlNstuv+CPx1C+Ktf772R0up8hVfN\n",
+       "TcmTu3rvi04d8YemXPsj3F7khX2u7DNLee09vP5r+Fdfvr3eT68608xNhsymmTLpaRUHsuUl/XFK\n",
+       "uaCRcTg+kLcGynZwKkwV8Q3tsGLOnmjfQThvK/RM7AseCGFZTC5KbInVWQ1U4VAsbypDYqE4drcz\n",
+       "MQsTHNgNlwXHKESZeUemekptqed0tIySzFRTpilSj91M0BakUqU0VKK+mXnz2vI40893NGLhqEhd\n",
+       "zzrGvWjUzORLlzTzXH6wY+PSGz6dTVyacGWR1b/KccbyyzUIf9Dkt9abrD5l0l/T2AkOhx82/fi/\n",
+       "4+JL7i9VvhKoedBh8hWjne/+CMG6mkHz0Xd4Vy+r3au/a+LbyabJ1DPqF/GxEMJfjjF+6ss/9eN3\n",
+       "vtQefCbj//vxEMKNb7Wz6MnY4R+EEJbUok97J23efIX/8plaGnxw03H7C+4tDbgYrWapyVxTSGfG\n",
+       "eqbaEs2TvOtLpmyJAQ6lunJrxoY2DbWsamlrxNJU4XDwlPxuJq7f9vqFWvL99RcIpznTbGhoOMZ1\n",
+       "XVMrUpVooO8xwQWJKNgx9bpoTkuBPTN76uToSG5DV66nUpZD7eHY8zcY5dwree81PnGFxlt0BMrA\n",
+       "F06x+cvfjHP9WF2RfTGeYvu3eSaE0IJU5y/nLv6xxMpsqrldOPw+bvxG7Y4scPajfGSPtUHfwe7E\n",
+       "F96/Kv3eI3lybCU5bRL7yksjvfIlw3JROdc1HR7QGDA3VSy/YGv1knxIGQeK8Zbj1aGynGmWlWmy\n",
+       "IgbqTtZ5XEVL6ow82RWroV62aVHTJKZyiZVq4F5ySt9AQ1eiEk1UVpQuShwpXZWZ09Awc0ZTW2Jo\n",
+       "YijR0vUZQ1EXS6KWRW8IjvVV5gSHCvuCqRXbcvMWQynYtdp8SXG5sj4cON8bK2djxfyh/D2Z+CBK\n",
+       "Drn1386F8NJjfHiZpyLpgP2BMn/B8ct/+C04oDHLp3hmhz+fhvCrFVd/t0R0mfe9cMKAevS7NQZP\n",
+       "snKbJ77ee6PkwR3Ce9TdvFe5eE3zh4eqbtPw7sRer3C2oPsunvnYl7yYLu0x+nAI4bMxxq1H28vE\n",
+       "D3DvQluZ9w3mpkZJoZd3ZaGFe2KrrTFtGZVXjReip5PEfojaYaaBkWWlPeteU+naNtSqgvVwYBrn\n",
+       "TMNZjfGhSXbgIB9LbEiqS/azgfmk57Q1uV19qU1tU5klQQsTTZmemRWZBzJrSpVSJUFhoE5GmurO\n",
+       "5raRQs2vioZmstCQyh2Hlr3GRZ1qyZ5l8i1Fa2Z8OpfMM07bGqcmhrsUs+CMhvlsYv+P8keP2IuM\n",
+       "P7fktQ91TTotx/tXlC/1GH6YwWvu/NjA1bV6PEPNhvtch92vWM//o4ifwMdifHupgW9ivIx3hSC8\n",
+       "Azom35b4dgJYC/zg7/6pt5rPtQuejLxxxYnHyLc6TtDgXwSNZrzwAs/8cW4fMrers75sqzOlVxhm\n",
+       "DdOqEsOe0mlNucwCHigfuY96Q25owbHEWWv2DHBX4oaenl3CglHxjFE1lS6dId72qZ9INE9nhqsr\n",
+       "pmHRtonSUKLUdNrAgpZSaklhCbmGQmJe6oLMdTPzmqYYKvTN62sa+cCU5W2KnJdatarr/T5PfoK8\n",
+       "5PAqGyMOL9Zd3DuBe5+g+sI7ff57/MW2sx9Z8+SbqaQamvYeOvVYX1ny2d/BIcsLrJ3oRywe5faH\n",
+       "ucb+vudOVdqzQ7sLmW4jdxyD7uiBYbzgfj9w94jOOkvPK5K2tDFUhZnYXFX0HgrpsVAMxEbUCAET\n",
+       "M7uiU6I3xHhf4VBaww6tZFPj6Zbd1rJugIHCrplCPXl/Q6mH2yaOsS26pDQ1cN/UmrZFubGhFJf1\n",
+       "7ciVgqaHFh3bt+bQJaUDXTN9xw4cY82KTNCajfSTB+YWmlrNSrMYG2SJFyep42Zi+/S86aiv/cx5\n",
+       "6UeeUx4+XQvWTG+w+DpLh7y4xcM1Brd5YsYLK3S+h8szfvpVroUQ/v5XS0jyum337+FDujXHvP0N\n",
+       "tNPuv8lnf433rTB7wA+eFXqF5u7jFrbvu/auV7Tzyvcc0j/vhNpbM4Iew6sXsAUhhIvzms9fkLR7\n",
+       "To2OdYttm62O/XSkig/M8gazVZNsR5hPjNNCEQpjuSUNiT2FNVVcFMKGRUf68aylZMcgDuXhqkl4\n",
+       "4KBBGaeKYWlhNm/Y3ZUlK3JL+mboSQ319B3oKNXvsdKWRNBQKcxL3FQJSlM1qP1sfQ7LLVmyoQp9\n",
+       "ibbEWT0HDgwM9dGw5Zw8dswVUTNdNqnmVfEzdtd3pYd9F+Y61odHdBu2O0tuzZ/T2z1weGHT63eO\n",
+       "fc8tPn25bW5zwcqgdKvZMxrCPNMVh68O/Mph7Z7QimxO2fhHMcY3QwhrKZcrysit7xBH6HcyPuqd\n",
+       "Y9F8MWK0GYICZ/2eZRS+s+I7ZUzzdUbCVx3pfPPjBHDXxMFXGrbBCi8+cZKcvKH13l3za7mD+Sfc\n",
+       "ambuhoeigTl9M9dV5pX6MrekzikNMdQxlGpbNCcXLRoaWffAil1vSuKadtGV56lpuGOwkFueazuY\n",
+       "P20aHpMYKZ1WigqbCptmDk1EmYsKFY4NZVqCoCsxk5tZdaRyoGOiE2kd8cJNGgUPF5iNuPfrPFbw\n",
+       "apeNnI2/zfQqDx8/OTcP3+rn8R8at1h+a3fkGmtHvI61ec33d5zZzSQVdDWO10wWh5ZPVeaf4ugT\n",
+       "TMNbwcaJrUuFc42m9mSo3ymk6WX9pCfIxORQGu5pzc0Z787z4TPMllTlxKTRkTgSuqU8aWnMhnR3\n",
+       "TU1VWtjSNNTRNjCShGOrsTSJ6xaqgSKMLDbvGVcM05apkWiqnvG/oH74jiTmsK2yIGqbaEksmjhU\n",
+       "SXQkZkZGUjxv30OFBexo6TiDBaU1G3b1Lcrcc9GWkWGYWM1bNuIV06SpH/ddT2dCqy3EVBUnQl4o\n",
+       "H++YZvPmqoZie9e0t2p28woHt1huMX+HM20eTHj+HEdvUn6QN3pMM576t/X8+m11JPZ4/R5PXXwL\n",
+       "86bCmzV+5Os2rzsRLPunv8Ubi/zF5ynGuptz1m8FWXlZY/qm3eeOjPe+vJChnibVZoq1oNvSn1/T\n",
+       "3GubrjMJixaGA2k8cC/fNhw9y4MnySdma03tNJi54XpVqtKora8j07FlHEqlqUNnhdA1NjYJpc3w\n",
+       "hBjXGee1YnJ1y+FRzgtNmXfJ5IIoGqp8Dnc0LMlOoKmljlRTlJrpiXLBfbl7J4D3BY14rEweKCSa\n",
+       "uiozqZ5FDVEhGphJzHTMQmWSJdJqSLrIlCItzbWbWnEqThuKuObcaCpNj2wsNBTZaRsrY6+OZ9Ld\n",
+       "I+l0JsQgmytIM8q7LEy5yfbfYPuUGsu9FWOczYXwkef4yJUa9Oc6WiH8/DjG3/56r/nvpwjBHD6M\n",
+       "P/0t2uUj3MgfJCPvfLyVwTJJuRmYfSOAt99TnNDVfpwnX6hfHfv9EPJfiHH2yld8NFaEYzrbGk8c\n",
+       "6T99RWyekYdCu+oI6XXLdmxZlDmWOEBPW2bbNbm2RQsn2hUDdabVsCDYMFJpS2JCOiU7ovPApDdn\n",
+       "M8w7Cuc1jAXnRatS4xNmzorKrhrlP8KOzGld0QSFvlTUiwPtWV82nVge12rZ65vsN5g1uIfBr3L0\n",
+       "P/O5FfUis/kWBsRr78S5/zXCJueXmG7TfK1WVfslXFpQldOvMKpsyUaZo+WpIsECW7f5wjne/RCm\n",
+       "minNZkvcLPRfWFFk8/LQQDBpLsmKA2k+4bnzTCriA3ptVT4UQ6pVToViX9HuWhEdhI7UQCa1bMmK\n",
+       "u+47UmpoVy1bIbdkwWJcVqSZU9WWA12bnlC39LpY9MgSOLgtOC3qSCwp9UUPlHpm3jQ0Fo3VwhoR\n",
+       "LcdmUpV5LalCZWhsUWZRz0j7pM81CTtu5I/rjkdmMdWdtB22Vxw2p5qTrrZD+809jdBQaCjTRGxP\n",
+       "Ze1NZfus6rM9Do9IHnAh0s7ofRprvNQ76XZcYf813uOrJCMzXv5cre9y4Qo7M9JXWb1dg2a/ISfX\n",
+       "k6LgMxdDuPk8r1xX/DFChESIp8TDI1eXeP9bkpyDJtcLvBFCmMc55k8vSB6kkiLaW53SqhRK/TBX\n",
+       "d8jWD4nBTKIsGhppV5rMXNYQTCwa2arRIPY8Zd+6oI2ZIq6LYR1nas794WWmm7VBVFwRHSuQiypN\n",
+       "lWW5a7qG5vTNa9r3vL5FDXcdaynsm9nHUBJPa4QBYaJ0WaqnUii8IpoodGVmJhLBw1r11bxB0kRR\n",
+       "A7fDWDNpaZuYhaFRryWbxrpz15hznJ83FWVh6uUn7jh1uG9u6zV3X7hocjAk3WD5M7Q2+H9PwMFf\n",
+       "HIGFEC69yA/+KHebNXvKc+T/vGY+3Y4xbn8j1/33SfwwfivGbxnd/VEy8i+/1gd/P8R3eDLyscd4\n",
+       "fECZcLXJ7X/9zazCIYTwOKc+RGOd0W32Ps7qj/F9F3jxXl1h7bb5tZ85cf28/ei727x0jZ9+kaND\n",
+       "8XxX2TivMhFCpUpapsma3F1nXLfptNSqpmisLzHEsbaJXKKmCCyq2/8NwS1UiiQhmyiyDTrLJCum\n",
+       "KM2b2jlpzZbqJ76ttKq+sInUntKewm0Tq3IDldtidSwWpdabUfOYq2dp7NC7QZmzOc/du+z/45PW\n",
+       "+8Y385z/bnGH//0hz8/VduUPCl6LMY5CCCu52e7Ig6XKUkhOKt+xabtyd/684Q+d5Xv3HDTv+6Xl\n",
+       "kTdbrBdT4+MjR7Gj1+o4aizrmWkZG8lVoRLjnCjheErZ4MyI7CzVvCTdUSabQuuhhXJZzBKJfUFP\n",
+       "R6pj08Ce01KlrhvpunEcejNMTJOe+TIaho5dT0lclBifqKQeekQHLm3hOalDLbm25ZOE656RA8yf\n",
+       "3BcJltHALaWJiYZobGLOVEulFCVGTht5w9S8SqFqBKPRWD9fMosNeXHLYZprFmuaWobxvmFemFRB\n",
+       "ki/JxmNheYPnD7lX8PnX+Df3ePIxlj9Yc0C/qLtT+yl5Gz+SOk6u38/+Oh/8HV6MTHb4uVntwBu/\n",
+       "Xq2hWvMie5aFSz3Zwo6iOu3w5Q3h3cFiGSRxqp9z+G9qJ+LHL9Sdshsld/7ZMj+2yvMZ7Ztmz27L\n",
+       "wzm9qzOtBxOz5SObz5wW08P6fM+j3KAxVmmo4oLGbKqVBYOkcEtTruFIZtu80QmQNDoyDo9jRhiR\n",
+       "BdqR5JTQuCqmUcPYqjkNUaVvqLQjk1tx2lRuzrKbXnHO0EhlR5RjSVAK4dDUohpwU9XHpyVxRvCG\n",
+       "DQt6FiTuS42slVvuJ++RxoapkslnyXNF3DXt0hhNOeyKrajfntPPV01mC7Ldmap9wSir3H/qjFXH\n",
+       "xuGWaX6dpwt+c5e3Nahc5IVnGDffcl90mT1FvF1j078bk5GP+pLK77ciXsb3fwv3947Gd3gy8lt/\n",
+       "h2vPUk05fCXGeOdrf+frjxCaL/Lu/4L3Hta0to1n+M3vY77J2YdsnGX+oDZ5et8xm98fQmOVxY8Q\n",
+       "LjF3/3f0Jzt8pNBfymmNNOJIKXOUnsOOHXMuKjXt2HNspHR0AmEMMbEbOCOTgg0zY9typSNBTnxF\n",
+       "kSyowWrnTS3hQDA9YWUUMjOJkZnGyaI1FgSlqClXUwrrsdH8rO/yzcr2K/z2y9hhfI3FD/BLHyJN\n",
+       "GX+c3b8bY/XqN/N8fz1x4mnzdsC3W1vcPudua095jjNKMdt29/RFW9f/FNeblCXhU0bFJ3zm7nFd\n",
+       "qX9yKPwPmdnzibIa200b5spEqSMZTU3CsXG1zOs7vPg+QoLXyfqqdCKrxlqjzOl4ZJqNtFKe1JAK\n",
+       "EjNBU6VpV1PqReINo3zPlokqSUSZqQ0j93Gs7ow8e/LvPameaCKRSzROAIo9HVOlOU0XzKwaO1b3\n",
+       "Y5fUK/yOA4VdDYsWFep05cgFfacVRqZuIjcJOXcTG2lLfjbIqsp+xkwqVPMmYcvQsby/xyRVZIkQ\n",
+       "77C6w3irdi/+5WN+Y8DSfI3dRd27vMrSNv/i67iuv3ry8w1HzfI58+d4fonTw2P3Ox/zyvf8sK1f\n",
+       "fsber+3YP31XOHWs+k38b7yyzqvniSVuLPOfPcEfmqfKGU/0d/esPXbTwYVzupsHdi4t688vEEri\n",
+       "MVmb0Kn9CZpNlZG9kIlhJHcK8wpDdyybOHNy9mdmUlGFIIsH2o19w/UoNmeSfKxhpmViZKaSSBUy\n",
+       "uyo9iyrlSVHRMbbooR2Jpp6o7cAZ8USvpEZsnVYzcO6pO6GXRFOZvsIDLUNrMmV1KIY7ZipVVZAN\n",
+       "NMKe+eTQJEmtJHPKwcykCsbtBXdna4bbubQcOcw6hEuyIrFdPW/0r05x/+c5+zq/+nYdjhBCNid5\n",
+       "z32N7zuUFV3TjQumry1ynBOTOg//rooQZPgx/JVv4W5fxn/3LdzfOxrf0clIjPEarn3ND/4eIoSQ\n",
+       "c+HH+ciDL/mgLGxx8zzl9zO5RZLV4+nZhPw+/gjv2mX1EusjNt7d8Fvr9I93zcfS1J1acjU0REFp\n",
+       "0a4NZexaUEgcKsKyaWwTFpWhsGlqLDVvopI71jFyQ0MiapqFUyfH3OQE/zFVqjVJOtgyc1o0qGm5\n",
+       "dlW2sGJFTf6htFKVdSq0XbnykFvbjP+Xt2gv/MojR9N34nz/h8aJNP/fKxU/teBOf+buwhHTjvjG\n",
+       "R3n1UQWWEj/Andf5viUurlHdcvf0UKPZ9ORw191OW5I2jRWOs5bx0YEqnUlePBYWbylb+cl2JmKZ\n",
+       "aE1XdcKR0GzrmtkQzUSnFMZSAz0NEwcaJloYWgzB5TAn0zJwZOJAaUGhq2Fg5JNK59AX7akMpc6K\n",
+       "MkGUGproSyxrSfVMbTtr5tfVXZXzOK3wmpu2LGloaZpYc+hiTfOUGxkSO4y67B+aHM2bHG1xaZnJ\n",
+       "Y4bTSuhsanZyyXjB6c9sGizd0j9TKLq09jZ0DZw7z8O/wPbP3uWf/jx/8mk0ibfI3uDT1Ts0tvtS\n",
+       "LP0A3ztX+z/BEzsHFopf8OsfetbBp/riaFv85QN+7uQe3jj5EUK42OOnzuIU0zHpnqN2n8Nr1tO7\n",
+       "9h+bc7j8hFK7ttsOdwmBLJFm+7JkaGJeFp/Xt60Gv+zrxUQUCcucjNNKi7J4VxHOCEVilrSExkCV\n",
+       "DxUWtBxI9UxOCPUcK+1bOhmtkCkMPDCyq6uhJ1Nq2LFg6p5n1cPKvjo5zU5+WuiJUlPrcnv6Doyq\n",
+       "05qjmZCfFaoxyT2tsvJ4OK3T7BgVI3vG0rO5o/7M3uSS436braEyGzB+nJ1rpsnjpr92vr79PEV8\n",
+       "gye9bYdj8aN95y4/sNd+3MLmULl61eaHn3b0G2+QDWs58++2+BDuxOibWjB/jXgFT4cgi/HLXap/\n",
+       "P8Z3dDLyDscyq19myFbjUiZP15YdzYrpEheG9KfsNOmcrYFwjX02qtRhecmykXy+59ys8GbWNGxe\n",
+       "kmjL444qvCI6ZRYv2khywQMpYjinRu49g3MO7Tl0pMYUXFNXv6fUC0xfXQVdPPndUL3yV1q2BDeM\n",
+       "NQQrkhOWzkxHW2reTLQrSJ2SmouZa4szry+RHD9KPE5o1lfonQshO6R8/ffuuvvORYxxN4TwN7d4\n",
+       "mtjE9cf5nxbqptEXY4uzK1x5N6/2GN0131i0NA3Gnaa5wb69zqqq2DLJ+4p2IcSh1uOlSZZJw2lB\n",
+       "U1kWsuObhs37YnOikBpVTQcpn1Z4WsO8FIX7MntOG3ogTSbOSjTsGzltzwUN805p2XUsuiozE11V\n",
+       "WRUtYEHmodKeqY7o1kmFPNG3rW0oNzYzpx4SN9SdlYsmPmUjHhCeklhS0zyneCAgxmvSuKB1qZQM\n",
+       "Hxrklerq5fq2avbFtWtmnYneqC2Zn9c5GJl0Eou3dkx7q4rWOYPH9jReONBuT+Lwfw0h3L3NUzmt\n",
+       "Ye1cfeebmcTWhYLH1Jn0Lu5y8UWe/TJF1eg9Xxi41f+0g3+KN7+a90mTP/oUvfdwe0hzhzMXWdp3\n",
+       "lA8d34zS8SnllYT8LMkeoUEIQpiJZqoYNeOiM2FoPyxqxnlFuONheMw0BjU4c+HkqNqK8HniliK/\n",
+       "ZJaWtVqwS1JruCrzpmOVwkBuYlEUNbXROOFcvekpHfMa5nBo5J7MzLJoUxQcn5B8H3VlJrgrigpB\n",
+       "X0NlQQhd3eFYa3NTXCmV62NLjaaumSrk2rML0t09w6yyc9Q22H7I4y26XUY90jtcGvDK2ZNEBFKq\n",
+       "9G3eHyGE9cT5H181XslMexvuna3kg5asvK7xU/eVv0j5Hbe2fBPiWz2iEaNBCO6rk8J3uBh45+M/\n",
+       "imSkBqRaV7eXH54snBNG4ctBsq88wdoFhqPaffdCRbNVCwC9vsaTgeWEi9s0pql/fanQPBwq2kEy\n",
+       "OaPTSTXLO/pJUIWoGVeJh4SOxJ5SVyFRV7b3kAimch2V2wpTtRDa0+oj21G3YoMaLbinXnxqW9zU\n",
+       "vCXH5u3q2bKta0fPoTmpPSMHchPnqyZFpVlVGrgSuf9YCOGpetOnPsJT5zkzrf1lrk5OHDm/IXDh\n",
+       "Ox0hpFdSC39pXni+qWhMTR7umz18SO9M/W4GuzydcbzO8U1WS63Fy/LxgTLLdaWa1YZptW+WPm9v\n",
+       "NtPpnJZV98zSNbPQlaqE0FK2VoXqqiLOTGJhKTSslomNpO2VMNVVCnoG1g2cU3lZ04FER6V0pCvV\n",
+       "OKFnTs2cxaGZEVbVyegOxiqrMrcwlGqZea/CnJaxwpaZePKdmhJ+khajR3iILZGTEcE2mhKlkGy6\n",
+       "NNjV3O5L9kfK4ao7p+YNb7wpefq++emOJ/+fnvvPDxwudPWTXLq7a9Y5ZbRwztrVlt7BTOvodXs/\n",
+       "mobw8zHGL+AdER8MIayc4b++wtIS8T7hNjf3lCnV24FLCvUz/VVN2JZ5vMdgROMBT57CBfpjFtti\n",
+       "475iISdPqe6THZM8jamGO6KzWrEgHDg2L2o5MDCLi8ZatWGjnJORWP2stggDMR6eMGauqDlP1/Vd\n",
+       "Vlg6GfRtmLjl0JHUoaFKouHAMwqrWgaCEmsaRiY2zdmV6atcUC9otzxSc65xZ1NzWuYlJg6MwsTx\n",
+       "YjC/d13WXDNXlnpxIo8TMXSVSSnOd3UHQ41yanDhmI1jtlv1uCo+ZLHJ+NEqWRBuko3exikb77tk\n",
+       "8uSzlu7kuq+OHK7u2n/+hk449L6rrGW88hdDCH/v93J/fCdGCII6GfmT34bdPwKx/kEy8p0cNYWv\n",
+       "+0e58gOcTurH9f5GCOEfxhj3Qli9zhcee8S84PgJGilPX+P2efp1gaS/XCt9Pztl3GDvNKc2oouj\n",
+       "fTsLuWowUQ4bqm4qtlJ5rKimpkmbsC/xpkql8ozaZrFmzjT0BW3BgdI2HlczJs6rk5GmWqG1p05E\n",
+       "FtX0gzHuGGtYMLYqWDa1ZuR+tWO/SMhWnKpyucziLErSsVE608HDK6SX+cCIg4z1U3zPx1g4eaFf\n",
+       "mucX//MQwl97pGz67Y4Qwnpq/a88qXHhsoWtprToO1q65d4f+iWz3o/w6jmOdujc4Nw5fqtNscWV\n",
+       "ZbPDmaq1JJkOpf2RuLqr0VgUp5XEJelsrGouiRZljiRxpEoKVWusLBNZyKwVhUaaGKeVUwqlFQ+N\n",
+       "T+QzKrUEZUdlZGjRnLHCosREif0TH5NgXXCg1k9NZJaU9kyMTQXBxMR75Cf/S/QkUqVX1YnqbR5p\n",
+       "TGip/XHueCRNmuqqcQTXRYtC2HLc2XF+kfa5VNLc1U4+4eVTMyu3+fF/zIW9A3feuO7jHzwlnI2O\n",
+       "TidGjYalOy29A8hl5XnpqF9TF98RTZkQQljjpz5S+9Xfgffjk1z5NTuzqVdP8963gCXvLbC15y0s\n",
+       "jrfZZrpOd0h5lRfOYoHjCfmY9DE+e8gP9UleJL1fM02SHUEhlchEeVyVlwcmSYfQc2yJySdpHp9I\n",
+       "DYzUbKfL6s7m2snuHxNrtT716rOArrHeyUBmAQ19r5o39S6pB1LHUoVSJRUdm5gppYKZoXuiSuUy\n",
+       "zslsKe2cdNgu6NjQtKLjntSqNaW7WWl4fmQ52aVMDZLEYliVxKEsXDXrlfrtxGiuFOLzFnf7xvPR\n",
+       "6OVlHna5+CrPvML1nPIac7e+CnB1mefOqia5tISCbtcTO0sm7UNLu7zvPmd6jH7yG2B1f6fH82oD\n",
+       "n899rQ++A/EoGflH34Z9f1PjuzoZqc1Zn/1RfvBNWieo7mur/Os/HUL4P/Bz/OZPc/siy5E7a8zt\n",
+       "875NqjbLfUYLbDSZDYmBokWvYrA8s3Y4MFu4ZHj/gTQ9dFSdliSpPGQYmrijCm9qmsnjk0aB+gXy\n",
+       "eUGqY2Bi39S20ll1j6amndYxpy75a5GtujKeqROTicKhqURHUKhsR0YF79qpbC7s2WvXep/jZmmU\n",
+       "TM3KSj6miHx/Sf99pHdr2f37f5i5X6nZQ+eOOH2BB3UD5jsieh9ckl143OLmo4VuzsL+JcPGyzbL\n",
+       "n2fU4uKM/T4fe/cJhmTK8nOOb73u4PLA4lxd3aX77J2ZNylH8k4pyQtFWgphTiLTjBsmMZeUDTMd\n",
+       "XSumk21HvZm5WWmSpfIwVTmnfsGM1UvrGXQdOdSSypUmZsaWjbRlpifJaBTN4/hETb2n1lUlmlMZ\n",
+       "i2bmTnych5pSlcwtU++VWRBMlKYqpTqJraQKwU1RT/SsqJJUqWFnZL/dshpbJBOnq6nNc/OG7bHb\n",
+       "o4H5X+XiwZ71X9nz2Yt8/Huf19Oy8sW3xcyoO2+20Xg0jXhnYnWN848SkUfxIg9eNjn7wCe3OLjE\n",
+       "mXHtkv3alIf/8KuNiB4pJM9zLq0f2lafuZKs4PiIWxlnEvnlLTHfUCRzddVRdcQwlYW+1Fg7NE1C\n",
+       "IomJcejK4lVFWCJdnziLAAAgAElEQVRcJtYFR92NOlQ/ryvq53Wofmb3BaMvMmLoKQ1FudxluTeN\n",
+       "jS3JJUqFiXuOHWhL5FIzDQcm9hx45kQ6/q66WzqvaU3pQDQTTHHb1NicZV2Hemkw63V1jVTlsq0w\n",
+       "1EruuxzrnspxSDzM10zSiWS2obrYsjgpWHne6Gfn+e2XmP7KyS73+bSvguVrYc5gf2Ywl2sPZ6qF\n",
+       "VHvUctzzRePCM8esXvguSkY+in/2bVJCfRl/5tuw3296fFuTkdoIz/vxUozxv//m72HtQ7xn90uJ\n",
+       "CDy1w7WLbJ6PMd4JIfxNts5jjtUR5y7w2Uu1M+3DJUZzbBXEIbOCnZwqY9biqDwy3brn4Ch3cKkU\n",
+       "5gpZyM3ManhaOJabWZFKwpGxe6ZGeFOUONJTaauxIKX6YR6c/Dt65KZbZwRN9UJ3qK6O24LctomB\n",
+       "oTxGpyfMTqi5D5uli9OR00l9vFu4Fep6uhdo92m0eLDK4jUGi+yusHaibpvyLRSY+9rRuDgnLR8l\n",
+       "Io+iqTvtEt7k72B84qJ8/lP8N8Ma05Alik7Pze6RxSqYizODbjSYJbozJunEtJlrhLEiPhBPJvhF\n",
+       "0tMqrinSUj47FvOmy2VfnuWSkNoxs+mSY7tqAEZeG9lpi1IPHSrcMbZsZiaxq34pHaMtOlQ/gA11\n",
+       "0jlVj2xqMGJh4kCuZU4mlcnlRjJ79rVVKolDtdniKWwrLMmkKldU9qkGlKc0ssKwmulr6ZVD46TQ\n",
+       "jRPD3pK9hZm7z0wtfJpmWSfg1T8/sP8TY43FTHNSmrSCnSI3uT96Z8GHeZN/rxPXpMyIbP1Ndp5g\n",
+       "/jz9I4pt3l52O4SQNfjgizzzAT75SQS6cxSb9WhlM6H8grM/cuRMPpZWr3gjnNUP0dg8cWgYCktV\n",
+       "1C1vG6aHpqFjMttUZkOa766ZOemwPkZP4/PqZ3mgLizeUD95y+qxbEfd3TquR2hKwaaZRK7tNQtK\n",
+       "MwN3ZR7XV+mKlkzFE4uIBdfddcoRUlHtRZNiX+qeto4Vbd16IEmM0ho85Mi6pTRXqczi1L2Q14yh\n",
+       "uGSpOjaIQSsZWpsbaadHlrq/4uZHP2DyC/uS8b5z55hr0L/M7q+GEP7dVyaCQ260jFe4e2Gsu1wJ\n",
+       "LfZj33Sj9rZ6FF+VCf77Mf5T/I/fpn1/1xjmfTu9ad6HbozxwyGEvx5C+ECM8dPf3L2kiyy8zWI1\n",
+       "X6nXhUdW43frY0pTBj/D0g53zrPZ5vAM58fELv+2wdMvMzfh2jlu9aPRZjB3cUlx6qzKnlacGIdU\n",
+       "mUS5dR1DG8ZmRkqvnkiAn0GtEFBXUefU0Oi6aqr9ThbUYg576hpiTp2EPMCmdTwhcaxypqy30kj5\n",
+       "dMF+wlrFelH3UQaN+u9N3G6Tjetuz9qAxog7C/XfihPK3U6Hh0PfQn2Rrx2jG0ONRqkKqST2jdtH\n",
+       "+isT+6v9eu1vxBiHEGO81wrhMzP+bIf1Gzz+vGLYsLsZ7RxvyEOpc/BQmp+TJfuG+aKy2RTDfeJV\n",
+       "w5hR7AplKcymjmNTO2+4nlfSUFhTmpNZ9LKRU1rOiVKFmcp9+xoyi9pGKtc03RfNCSLaUpSuqxOR\n",
+       "y+qs7576mm+rX2DzGBl7Q0NAsCoXDPS9qVA74jQEu9W2KpmpE5LT6qW+TyyVcSqJa/J415GZEDIb\n",
+       "1Rmd8g1VIzhqNPTPTm2ss/s4L68x/ltHir+74/YPn9aYrpjeLsX+Z0h2+bfv4EXe3mKyVzsKf/G5\n",
+       "vcGpYa03M8YXmiHkp/mxtfrmTdZCuLXNP4kxHoQQ2vP8yFm+L+U9Q/Z3OPgwH/842y/xkWXKc7z2\n",
+       "eQt/curpcmxpVGrmI63GPVuhdDMdmVlQFkMH4VA/2aKiSt48Ie4+JimXKIdmVZN8oH5Oezgv8ZmT\n",
+       "McppdeLx/7N3Z7G2pPd12H9fVe35nLPPfO58b9+eyWY3KZKiRFODNdqOIkuWbdARLENGEMMPjh+M\n",
+       "BEiAJH4zkMQPCWA7ieJMzmBYliVDEjRYMi2ZpEixJXaz52ZPdz7zsOehqr481G6SoilzENlNClrA\n",
+       "xcXd9+yza9euXd/6/v/1X+ue1IngzMw5LCndQ1CYqLYILROrgjruSb0mtaSFmaG61KXFoPjciaaZ\n",
+       "0rG+lrlE4kiU27Sjo764to6NjRzJzMJ5R87MHNsSbYXMyJZQpFoK0YmNZMvGfFQNEtVqLo4OTO//\n",
+       "Za99x4nH7vJDpyzN6Nf52I/yVPRF2qFTPv4Z3v0ek1uXTF67Kz27Kb1y6PqzVUUEXt3g8I9FWSQE\n",
+       "F1WFyW/k9+Lfh1ewE4LlGD+vm/tWxNtZGfmAz6do/ga+U1X++zpi9FluPs47v2BRzQN3El+izxxj\n",
+       "8WwI7V/g4IdYu8PoEg8+w9WCjRNOl3jyKs+dcRBXlc0V7c01/etRq5nLw7a8jIqwJBrJvGBkS2Fb\n",
+       "ZqK6WlbxDhUbeknldf5ela/8q4vHuz4fg9kWBEGq4UjpwENSlwUXFvNcrwRqkTJWbabunOV18oxe\n",
+       "ViUNdwKdyDRWG81GjXtNLj7PKxuE81xKObjAswl3/8kiP+ibBKNPnqrfuOHo6rJ6r3By3wVFfW5U\n",
+       "rjF6jb8ZQviZGONhCOHSu3j/n+NXP8Yje+rbL6kt1YRHpsr5jNk1G596is0bllorRkuvy7NEkq4q\n",
+       "Y5AWZ4qsbl6P0tGaYWPHPGaWTSSG7rgj07MqGFsWbWk5MJE59oCKXJwZ2rcic79TfYXEhkThzIET\n",
+       "wdBcVQ0JKro4X/wdVavxCs4UXtbS0ndO1Besaaijp4wDteLQ3EWlg4Uae+HUGSZiiIbFREzGzrQs\n",
+       "Fee1i6E87ZhMxu6VE+EivR+upl4vPMeV9/V9evc5Bz+zZ/odKe0JN/f4VzHGb9giEmOc10L4hd/g\n",
+       "rzzBpMvoLqufYu3Q+lkI5/86+/vv4rt+iFtvOsA+z4Xf4idDCP+oI/kvOro/WLNcMN4OztZfNGvf\n",
+       "4cVxJV7tnXD5Dt99oLE+1xnNdQe0G8fKctVSPTVMZ+7M7yhrbYVUEh+SJ11LcYAl5exAUb+rVraq\n",
+       "UeyyNE+GcoXgdVFX8IwlVf2ysGpmRWIfz5tZUm1AmirSsKLuwLGb6toypWbMzALn1SxZVTNTN5Wj\n",
+       "Y1nPZU1NdX09x8YmmhJ37du2rGHiVMNReMREkLilZs2O1KaJ86LSrlvpBfOirlC3oS2EMyGZS+OZ\n",
+       "IpnYvDc3zPjh5+ksYoSWZ/ypO9z5vhDCk190n2gO8GkeTdgYKUb7Bi8UwhHPXuC4xss99n4O//Ab\n",
+       "dS29hfhR/EqMXz69+huBGBUheEGlW3kbEu2/fng7yciqz5d8z1Qn8+uMk4/xqcdJd7h2SL/B0zvc\n",
+       "++gfFtgU4+iTIYRPc/N9fLDOB2/y+ndytF53Y31Vv9V2N6SK5oqdGxN2WiwvmyW7ZiEjvSDVkHjZ\n",
+       "WB2PWFJTc+jUO1QkY6iqgrxD1YK5qdohD1WK+H1kUm0dA4kVNa9KDNwveEDqUCJXqJVREqrizkZC\n",
+       "c7WqkAyRJGwn1VRQEbiRMC2qn7vdIR5y/i75kGc+zst3GR8xePqtTkb+cogxnoQQ/suXTP7OhvzH\n",
+       "3yUWhfJoXfzE+3h+h50h34d/1uVdjzArSF6x+t4VsZNLamNCqVMsS7Op+QOpznPH3vtyMP+hroOL\n",
+       "25JmpiGo1ztGemZxrnQfg2CyNJZKtaygJzdQV/OIU8d6blk3c0VitNBy1NRdsORYsGTTPRxJpUpj\n",
+       "NY+4Y0XuVOkcrkm9vJiSOFAZBwylVkQdQytYXgxy7hsIVSheGEjL+zTzkTw9spQeydMrGmUhjyVS\n",
+       "J3GoDJcoo5jumoXozMysl6vPcuNlJrtsvdpxvNFyenWq+d6B9Oi24u8h+VLZTN8IzGN8LoTwj3b5\n",
+       "9gbbRxprM+/KebxF0uz49T+/4W4xkvci4xl1audXhW/fk5+vufRXtlwcN2XlSJEeO7nCa0tHpu+4\n",
+       "WPG7uMTxEkd3yebOq1zuz1pzq5MD3WnDkxsXpdn96vmWOHld3r4mJPuKpCuZtdSLwjy7RXrdksys\n",
+       "mMmTPXUtTTcNZJasayuljjUd2DZ1y7K2FUzNtFQ9qTlywapuPBTC1Hlzt0JHrlSqqamagDOnopYT\n",
+       "l6WW1aSCzKrMxNQovkeZ/75eekMjuWzuumhDY/F7ltwy1hWNDbEqsePM7WTFPAZjNe00JQzV0qA0\n",
+       "1+jTiJ8nIm9ieUaniZbFJFsIYeUaP/2DDC/yKyV2Wf41w+UbPv5LtDtMjik/G2OcfKXOu9/k+PP4\n",
+       "x2/zMbzZqvkTMvI14ky18aMqBPw7fv4hhL/7Bf/8NzHGf/PVvECM8SCE8D/R/x7aj1AMOPgXzH/v\n",
+       "yzxvFkKYsjGjOePBjzb80l94yMH2tnSaC51z0vlt/fuGknbEBUkIaj5r5o1FD3eA+4WFfVkpU1U9\n",
+       "thdvtpquqPrJr6C3cFJty8x0tDXUNOS2RDWJl3FtUcjNYuEsRK2kEp0eqPzWHp4zrPFiWjV2LsTq\n",
+       "8cHilK+G6h5+OmX/aW4fc/BL5E/HOPqmmJz5wxBjvBNC+PvnmL+jmr0e1Rex9A+x9zs8FkL42Q2a\n",
+       "dfIXOZ9JLm8oWpd1CtJy37y+q4zR6Hzfuae4/9Ujzz297dbFJXE+l9YnmmWmMVmTx9vyekOZnOqV\n",
+       "M51Y0445SUNfaaqwbNVQV9uGqSWZYuERUqhb13ZqJGpbUndgVXX/5q6+mRPRIglo0bZbF7SkC6t3\n",
+       "gpam3Ia6poFEW1vbyEzNmUQZlhTFHZv1pnfEQjE/sK9ulpZ62UxaduX5un6WSsKBeXpXOTuyM5t7\n",
+       "ZMgDY/qdLb/94w9rvN61cjhWJrclP3VbcXMY48fe6s8ZPx9CuJ/H/zp/9oWFk0baNNvMZOf68rV9\n",
+       "slPtpZrzt1tmTfIPd2x3lzWOU6FoSMcnuvW7utc+YL/3DgYjwhGzFzWvrUmO993r8ESfpR43NjhI\n",
+       "G3r5mlQXMxodIanJtCVxpB7HBvUV2+Xr+tnLMhnJvqmpeXyHZkhk3mlJprZI5eV1M6eWbBk4L5Gr\n",
+       "NiMNDNQW240y1KoNhlVXjYyMHJlZ0lAa4cSZC451pKqB8LqmmkIqVYQ9Ic0thagelwx1xHgmTQZm\n",
+       "ZqKOUs9Mw8hYzUxUmMa6o7wpZPuupKmsKOT9ue2b3K1VPkvD2h8kJP06w6kvaKfVeM/DtC4u2rsJ\n",
+       "LtB/jNU96uM4/K235gp6axCCFXwQf+ltPpRn8PjbfAx/ZHxFZCSE8CEcxxifDyF8L96nypX4zT/C\n",
+       "a/8O/gZ+Ft+vEiD+AcQY/+4f4fe/+Tv2F6/x1eKQu4H3Cj770I7Daw9r38uNG6VcQ1jqKpYLYdbU\n",
+       "nP2+XqsQkdtWEY49VS94skjQnS7aNBOf6+lLVIr7luB5Ncc6alLrWgtd/FDQUhpXCSrGC+fGHpTc\n",
+       "SyoVSYw8MaFsUE+qbvUungtVi6aMrESSkpdTjp/l9G/Qu1Bl82TfH8LaZzn92JtVkRDCm/KWKW6/\n",
+       "VbvjL4O8ZLa6yEbZZekzWg8cap07NgsMHjrixTd4f865HVqpc3lfP7QIG+qzE8PsZTHPvZHxvz7Y\n",
+       "tX/luvK1TbW1ubRx4OjcsUaYWS2m+nmhnGwoB/fsnc+NUhJTcy3njAzcj0JwIreDVVFNbtfQ2Imh\n",
+       "trDwyu3YMzGTmVg1dllQLoLyZqpSYV2io+7YRBQMBAMthbq5QuZUIqgJsSYpa/LdU9pjq60lq3mm\n",
+       "0FdkB0YhsxzXlOWW+bhungaz9Jx4dFtdy6M3Jy68RPJQ28HaNduzmtNuJt1vyvKHheMZPxBC+P0Y\n",
+       "45cUin5jsXyda7M3fYBqPvnQqnljSX3aNUsbspWaenpstHyqOUw0aqXlYiJf6sjOghATtbOW5maT\n",
+       "wSG9O5rZsfr6UOhmxke8PGa0yVqToyzxwqimnIzV6n3zdlusDaSBUCZKiSI7VQ8jE4laHKrHQ+10\n",
+       "4D7BmZfcdFlYLPKnMkFHal1wV3tBSyu9SF21GYnqZhLVQHYdUa6U2rQmt2fXgQK5ZT3LghXB4nN2\n",
+       "IF+4mfCUjVD6rmnNXpr7bPqqs+ScGHckYWjsjrmeJW2rmkb29Mpcb1AYlTVJa+heMtTNS2dly43V\n",
+       "qXhvJj/m45f44O2KkAzqfPwiR7+4cEbe2OTPbfHnIjsvce4cz3SrW5wuk3Y1VPTHDX8GH/sm0Gp8\n",
+       "RtUu+pbGlyUjIYS/hz+NNITwEZXPwC/jvwkhfFuM8b/7Wl44xvjpEMIkhPDbKmLzddaLfOUIIdzH\n",
+       "5ndS36zkB6efwE3eeJmPPtI0fXSFSWFWD45jU9lHvS6v10mDkJbmEtF9KiKSsyjHRoXcWKktuKeK\n",
+       "dLzkTSFqpRMZWDa2rnBNoWFqbmqgbVdm4NBIX4pPqW5jm4FrsaIyUxwHPtmsqiBXcBDZjmRJlWN2\n",
+       "Hu3AzRqTgsk92u/i0R/hiZNK6Hv7CT71RAjhf6b1Lq5/P5fzyhzu1kkI4f+OMe6FEN7MbJvHGM+8\n",
+       "tdjfY+911hvMP2Lnu6MHTdQbY62b7P40z/3yiyYvbfLuNdOQWCp72mXPKCbMC3kcWzvgtX+5LN9/\n",
+       "L8mURwtxZyZv7VpJchcUluLMLL7m5c67FEct45OJ2OlrdXJroSszMjIXjQwtLyobmcSS1KqGOwo9\n",
+       "Dee9oauhlJg7dSLaVLesrjCyr9BeaAKeVFqXm6qbKh0uHC8SY12pfjUSWjANM8W0wc1jdqasRweN\n",
+       "nmGYWDZzXrAXJlJBPW0YH7WFWjQd1zSWOhrjgeny3LxoOux2rPYLJ22KpHDWumz21D7ZG9VC8jYY\n",
+       "4OUTZguvrXmy6ub1KzbfuOneY01lu60emmqjQ/2Le859LFOejISNM7HbWbiFThQNRvkhg2es76Qu\n",
+       "FHPNbiFfmjporzt96UwjL8xD5nin7vLZzFY4cmulFMtUOrsr1HJF2TZJgnkytJHsqZeH5krtLPrg\n",
+       "PLWUBGdpz0qcei7M9GxoqgsKiVUzDT2HC/LQUX3vOyqR8syxVwVntqVmRmga2Jea6UpEwZELluVK\n",
+       "ZxJLn6MxUz3n4qlRGNspO7aTqfHoWFi+rhl3zDUWRnojiQMDTTEem8REL3a04jXZaSq4pdeoOQvr\n",
+       "mst1ybQUl24bvnvXJ/4/bmyxkjKYcvhLjH4nhNC6yE9/qCohvxxY2WH1Dh9q8K+bVVpxq+cttUl/\n",
+       "q/CWu67+IXgG7wpBeJvGi78u+EoqI39eVQKqq9bOSzHGsxDCf49P4msiI/CNGef96hBC4908/per\n",
+       "Uu3qiHvv5fe+jdd/hqN/ykf/Wq7zY/smaxdMT+uyYmYjOdXrBvM0ytMDE2sL/4jrqjv3THWybuBl\n",
+       "hW6VWeEF0am6O1rmCmdmohWlUqa58GjtOJVrW9G3hzUj51VF3VUVAWmrvt+tOY9HjjNCyXFSFQ2u\n",
+       "BE5DVSTuxopQ9Ev2JnzgHr/9GOFH+IFbtBYCtJVd5vdx8t9y9X4e6lHbZ+eZyhzt138qhPRfcuFH\n",
+       "2egyCyFsvcLhv/z3OWB+PbFIef1n/4q/lmn/qcL1lVxnfKq9W3r0qUp6dPz9R17/+1NWl02ulXYv\n",
+       "BVvzxNqkNAoTo5zQI392hQ8+yq3PsLknf/ex0OZCiBrhgKxtczKQx9/14nrTfPdMPjpTNtftZIme\n",
+       "jr6egY6xC3hVsCwoJQ4Xlt1dA2vaODCwrLAp1bErGhjoKtR1DdSlel6SWVXTUCo0UViyb6TjUI6Z\n",
+       "Q0V6KiujneFE7zqjdrAXThVh7r2iTdHUXNvMJBk6bg8UjW3xdEBaxfwNr6Rqg7nLn8xNHpg67aZm\n",
+       "YeLO6rreZ7fZ7XE/VkMIoxjj0VvxGX8e4xd58Yd5sA41ZbqsNciFO28wHMsfCcpiT+dw7L3P1rzW\n",
+       "zp2+d18tzSXtoAhH9pqJyehZD+ykroTcMFvRS9a1Bdf7hRsbXafZ64qE+8uW9ZNEkt8xXdrXX+04\n",
+       "V9Zk8UVnIXUkMQ8DK6HvvkAR6ZaJMmFSBuNYM05SMWyZCRjrsHBPXZW7I4nPCWGLz4WJVOGIwQVL\n",
+       "GkbumaNZJcZ4VOaiOWp2HTux7sgbcutKbaUjHFkJU5dwYKaczXUGPc1mSu2UkIqGoomZjhO3PR+b\n",
+       "2nlLzMfOwp5aHGjWZ+rh3c4dJLJ0YNqt2e9+p/yN5/jwwO2znBfnfPykmm6KtRAefoTuo9wak/0G\n",
+       "s+OqN9y6Q97hjee5l1c6/T82CEENfxb/2dt9LDHaC5VR73nfwuYtXwkZmS3U0nkI4dU3d8KLWPBv\n",
+       "ao3Bl0OVgXHlR/j+e3QX+SYb4yqbpv8DjG/w8PXM0ms1vc6r7j52XpyuqvdGeo0DYlMMA1UVZFNF\n",
+       "F6JKJb+jEqh9RqXoOMDIFevWZRJEc3NNt23I1cwMvOKeTVPvMfW6qoZyXeUX9JCqZH2oepkrOE2o\n",
+       "T6nVmKdcjNxOqp+rqeZ1rofKWLIc8cg9WjXa6zTO0Xq9et+7Oxw/xvSdbG3xnhe4ekR/hbvfxZWP\n",
+       "cPEa6d/iR15n59Yis/Uyv/VTIYR/8FZN38QY90MI/wMr13jwRZb7bB5VMyl1ldvu6+sD/tmJ4uEL\n",
+       "Rk9wvFpI21NluS8dc/qv8eyMR3Z5V1ute1vo7Kq3Uq0wk8aJ5TAzayxrDArrN2+Le3OPT5edjgbe\n",
+       "ePSckJxHaSgoZRI7on11fW01a1SeFWibSvWs23ReU6EvF2zak7iGqcxtmab7tGUShcaizH9mojR1\n",
+       "Kjq2ZappSb0I0uW2cWjaLc6Jk0PD1k0vmVhCqlAqnCtP9EOX5NC8ccLBBdOjGw7O5WY7LIeBy796\n",
+       "7Onve9jxZ+4Xn16h/9uS77xheZ1H/xK9UDkWH/3cW5VbVGm+Gj/Hz/849ydDs/qZw637DX73Mm88\n",
+       "Kcxya+ktCXYGc51PBr98re2gmGtNSvN0btKPHk3rttZSm1lqujlzFO66q22pM1dfbzuOdfN0Znk+\n",
+       "MVtqu3gytzUZOTefmpc4G9tqly4vV0S/G1lLqpmlrtKJaBA7boXLWuWpJBmalBtiiPKwp+5UQ9eq\n",
+       "XXnYFcwNLOl7SLFwWR6Koi1dZ+4s5m8um3hoIWWuSWyKmub6VmwamjgztW+qcMlYC3uhcLfB2Xau\n",
+       "npyomwtloh6jZpKiZjdsOZy3NaZz7eE9l8e3TBqlzclFt7KGLJ2YtzrKWqLe65LtsPIBDt6oohce\n",
+       "/DhXQwj/sMv5rcVU0126TVrL1cfXPObRFynu8b8vxrL/OOF78dkYv2kW/zdFrN8sx/NV4yshI9MQ\n",
+       "Qnvh4fBtbz4YQlj1JQyKvsWwzXb980TkTdx/QOMxug/xQzcmDmYjb3zvmfXTnrutxM1LK8r0IVk+\n",
+       "UqR7YhJUNYi+6qSuqEjJTFXHOI9dbeuLocypuplCYsVc4sxUKrdkZsOufSeLZ2+oTnNbRXHSxSvd\n",
+       "CdX/JUUV9ne7wUFgcyFoPcZprOzs+6F67iNnZKscZYQZ83dyc5fGkN4HK0ldfTGZEx/h7hXat1kZ\n",
+       "c3CJ7DwXT6p39qag7dE9bl1l9z5VXthbgkpkfOl1Vuesj7l3rmX3ocS8M7UnZy3G+FQSwv/Yd/Z3\n",
+       "OqbvzDWHJ7LegN9h/3+JMU5DCDd/z9LfvGSr09GKTeVsoFUb6oTMZlGal0f26uQrc088nThXC0K+\n",
+       "4sb0AdPmZVkIUs8r9RdTMnMrmi4Z6st1F9ZkjCQSOwoENRO5JoJtfXfNpUY6trSlCrlCgkRTas3M\n",
+       "sbm6rh3R1DJpKaa08hPLzZlitmKt3DZLbnpesCLaKodaZl7VkybLOrXLxr2h2Do1rkfd+7j1Q6Wb\n",
+       "s9ed/WYUJ6d093jktu2dkb/w62yPqmvw6fv56E/g/3rrPufpp0MIr/LqfT1+d4/vuY/TnKRr/MIn\n",
+       "3f6uE9df4zefYK8+s/eLp0avrTjbmXJn7Pp38IG9udd+ciKULaFIrGW5oyxq1gppOLGmaq4+OB06\n",
+       "ujDx7FapaEQPFiPHgaIbLWVksRIft3M6RTUavxtoxGgcNs2LsZN6z4ZXjMMtRWCi0LDhYbt6Mh0r\n",
+       "nrdmTQ23FRrWlJrGhpZM7Qh2FRIdqUS+SLyZ6pibmGBNqqXhxETfmpkLRdRLKu+i04SNYuQ49O2H\n",
+       "rtYk167NNQMnTm3GmWV9N+pD/XrTvY1UUtQq19U0N84ok6bkpBQmgXaD0VVOb7B1kd9/vHJHfHzI\n",
+       "3hH1Eq/z7vfSX+fgHmubPPsgJ7/C9/hjkJ3yRfgJ/NzbfRBfgDdFrL/2dh/I14qvhIx8z5us9oty\n",
+       "SjL8tW/IUb11mDH5EvNlvQbjNg/MyMpoY/dE/7lU2Ji5OKm7t/aYcj40q9ckSalwqhrN3VPtnTKV\n",
+       "euuzqv36QN1sYWI0k5qoOZGqOyco1E2klg0MtO2rWt5RtfvaVIWXliwEbtUrDFUju4ctTgpWZqzW\n",
+       "uZNVEtkHF88vYuW6emOT9SNGE5KbLO9y5wM0Rqx2OexyeI7OtLrxduskqwzXGR5w7yrdLV67zCiy\n",
+       "9Crve4HN0ucno95CHH2Up34suBa37L17XTbM9dPgoMRPhBCOcbZv6zO055RtBjc4+sefDwHcefim\n",
+       "+5/p2/u+jrmeaRhKYrRVlLIYhSxqTiiXV8TvXHKQjYw6q1qqocwYCdakbqiHpvoi3n3mwLq5hlMD\n",
+       "dcdKURBMZHITpZZD02r5UBpZUxiamqtLBE2FqbEzUz2p0hzBfHFNCWvCrFJ4NkOUZJlaXNGMmYuh\n",
+       "0FMR2rm5lRjc15+azp+xe7HUnp939XduibXC5susNmYmuy/b/5mX2WDzJ/mJ5yq/MapK2xN3eOWh\n",
+       "EMLmWzn6HWPsqXI/ng4hPHPCn65zec7tPZP/Ojr4UVprwcVuS/KDy3ZPVwyf36NPY8LWcaH3kUL8\n",
+       "9pTtVJJFaVIaFIlJSD0YCo3FmtuOBU1ulpTzaKXOSp1xwixW+qwrOULiwrz0ap1WQhELu8nApqAV\n",
+       "Dxw5r7SuaeLAmZ4TazFTk2qGQs9AzaprOuoigg0DUxO3lOqCiXLh11u5rTacSc3NYmEQpkpTbYlL\n",
+       "scqxe90izRAG0Y8AACAASURBVCqptkWX4x0xFnZbS6Yl9XgmFgc2sJROZcmqk8kFjV5iUB+Zz3It\n",
+       "R6bNpjzNTGuFs+WXhHiokZSSa0OjWUk4R7/LtT1+/gUGHS7V6Kxz0qNzyvwqd9pMV7gcQuh8MyaB\n",
+       "fy0IQYofV03SfLPgM6pqzbcsviwZ+cPKa4ub0TeVF8VXi6oMvHOTF3Y4f8atde5e4ORh2qeZl757\n",
+       "1SunNfXBQGvUl85SJ5t1/dpnleE+kvtUMysjVVjDJu6qoWUq6GlLZOZmTk2UJhIdPfFzI3vRSGpt\n",
+       "Ec1VN1Kq2jID1Uc0W/x9qKqGHKoWwluqxXCc0JrSKTnLkfB9JSsFo5RXSt6osxw5KTk64b2/VolW\n",
+       "f/0naLyP+8/YazCr8cCocj8pamyVlRj+uXeQDviOCedPKvO4Zx/iycjxUMV6vmFYiGavqkjPqUow\n",
+       "8yk+fbHp1f+0bWs0M2q27fa/V/+Tg6pM/CNHrqzxbQ3yM5xV5/HTP1W1eQy4cq30oeeOvHr1yGzO\n",
+       "6Q7TJW5k0WpRZf2cSnS724ZnF2wdH5IlYlKqxSNzXWKilixZ87Io0zVQoC6aG0gNtHQUpu7oOWek\n",
+       "riZVSs2NVI4zlQFuz5m6VXVNhcLQxGxhdsWxoTUdbblTuVk9CiEaqelkUSjHYqiyod+M0ZuWdPpN\n",
+       "DUuyF+dWl3Lb9RPTrYmlnNojnItsP87JizHOfjGEK1lVcfpCJBbuxR1f43e/ao26pOLod2KM0y/z\n",
+       "lD+AGOPrqlV3EYS59bd5/zL1tU1nF7c9enNsvX3B062HzS79mr0rc7sH7LwWfPrPzLVqc7GW6sXE\n",
+       "blKzHEeaolbkuMlywXJa5Te9XOeJKefH9Nu8ErkdWKoxSpuSGI3zqdNYOkn2q++hxHJMhPCanhtO\n",
+       "BTVRq0ysqdkNqZaRvrkLxhqiVCKKChMdA8sSm2YGi6m5gYqQjBVeNbUaS9OycBbqapEk1BwqjMpo\n",
+       "syzVa1wrgzyWNsMbbieZF5K6pKSlcD1JXc6Dk6Rwt9x3rGtYFl5aLbXzl81r58yT8/KsL5hYEqyM\n",
+       "7tleO3JYvuH2VirmY45ijKMQwv825i+vs7IQhpyc4zNtpjkhtxgz/OODD+FejF8ytfjtwjP4W2/3\n",
+       "QfxR8Mc8KO8rwf4/51f/K1Z/nPoGyx3O9zvubV7WP7ciO9dWvPy646uZ/P7zsnTVehn1s5Gp38ap\n",
+       "JWMr+nK3nWlb0ZAZaGm4rpA78pKR81J3FS4obChEE7d1rQgLL8boUF9lVVXZr9zAU6pEi5dUN6Zm\n",
+       "/Hxo+DjQOmOwxN2iuok+UlQ31TSQFtw/YNzgTo/6x/gPP16VmucJjRkrr7F8ynLG3cdop9TGHOa8\n",
+       "lnEw42DMX/xlTj9YmRetDHn4jF95D3d+1ucNY7/uCCGssP1XuXaezche4MYbHP4/9D/xgP53POpO\n",
+       "v8XsCicpcZ1RED7AWsrpVqWlgdsp7YzOo9W5GV/nt29WLacLxxxusZbTSxbVqIzQqWk6c7jdUs9O\n",
+       "FBusxBUxZMjNkr7EiWjmgsy6NamZqbETU5mpq6Y+I+orrIk6qlW4oyKWM8FwMRTcd2BXY2EDnztd\n",
+       "5Pi25F4U3RN0bRuFNbnMyGSRZlRqJIeysuKGA9yIPNxj6+zAUTHXHEyUO1ONpJC0efg1OosNx5WE\n",
+       "2Z8JITzL9uvc3OG+LzAHnKYL4+KvSciahXD/ZT58iUYg3GaWhvC1jN2/iUfY+FGujhruXV7SSaem\n",
+       "DzWtv37Hyn0fdvhLn3G8edOvvpMr51jTND2ZuZE1rIaJnbTvrmAWKruMcU4tZBqxLoltjfmp5xq5\n",
+       "k8qN341FdeSVGu041U9aJsWS9nzF+vi2s/WuuQv2QgMzmV0dx0o1B5bE0LIacwdxYCNJZM4wl2sK\n",
+       "RoIB5hqmVlXX7IuBeaQp2Itb9mJdOp7armWuyNXD1Ena1CzH0qQ0TxP3lVEjieahtJus2V8okCZh\n",
+       "pEjqsnIsTyJxKssaht1zyvFFjeOxSXHbbPtEMjxRmyzbDlvS1sB45bPG065HnvmM0beljp7q8ftU\n",
+       "mzr8g/UQpmPue0910wLPcOGUp79a0vlNjm+2Fg1VmsjDIchi/NYkfn9CRlirxODvHHLW5eFxZn95\n",
+       "VW/9mvnpzGh11+y9Y9v5wybFskZeo5ioL3PgVOKiq0aaeoIjQycL82ZWFtkT0cxlPGziOUvuOrMv\n",
+       "WDN1VV+htfAO6uub2sa3q0rCmar5M1r82cL5RWvpesFJ5PU6o4Kl06q91Et5o1aZthVJRUSWzpjP\n",
+       "eN+zFREZZ/zedcar3P/JqsjTneCEkyY3VmnfIHmJC/eYXa0qIo2Pcu+xKmAvlpVz59EvfFEL7+uM\n",
+       "zR/lQ5s8/gXjgU9e4aM/xOm/zZk+wu5iBtSU2l22x7Jt2suVdXWtrOzx6+9mfIGLIx4bM2/TfyQz\n",
+       "HATPlXODWCU2P6GqLIgch9wnAtPyloNLmcdmpWnxoufipsI6BmLY04gTq2FsVaqnVF8MfG/HTc9L\n",
+       "nF/YXu2GvtxYG7maJ+QK0ackLittloVRMpJ507FmyZpEZux+pSPnHdpCR8TIiprPGhhqx6GLRZX6\n",
+       "/jzONam3SD4y8x2f3nfzIkcFvUvcd1gRkagyJd2r89AJx+9j/zf5xH9SFTCuHHPQ4d8+zK0nVdv0\n",
+       "wVfzCYYQutf5q3+2YoYHcEzrV/nJLxn/+hVh9Qeq87Nzkrp3pWZpQDObO7lYSvZ6LLc0HqJ+i9tX\n",
+       "o0kWTcM57f7EdnNEmjtuVsW2DgZJzc0y1dOxpmW9ziwMlOXEMiZpNVx0QSYJmULulbDkhXyumWaa\n",
+       "zptra2JZS+ayobkbZs6XwSSM3UxKwzjyxIi9epBmA8vq0jLRT2amZkoVOZoF3q3yO9oVPRL68ryu\n",
+       "0dxwbm8idvuS9twVuRtZZlYW0liaBDpJw71w1Z5Nc3WpzDy5aerQZ8LY+/NoNK+Rn9eslTQpR6W0\n",
+       "cV5t2hJHQ50Jm+OBeWuqXClNylNpra/bmDr65YWH0+dwws9+gp/a5+oW5R7hNW6ffAvrGL4YoXKc\n",
+       "/LBvrhaNGI1CcFuV1/jc2308Xwv+hIzo/AUeb/PwXV5YTy03UwfzdePuujhtCme7alsdeWNL7WRC\n",
+       "IxcbiSzQXtSrozUsiY4kcpsmDlRkoq0ax72JXVOrCqXSrlLAgaFNw0WsVdXr/ZAgiJ+Tvq6odrob\n",
+       "qgpJrtpNz1PKlEFK/YTuiKLNUSQrudOoKhyP3uH1NtMpv/M+9l7j4NuoL3Eu4+Q+XmnRukq5UWXb\n",
+       "5Dd55//L8oR/dZHRKxVJWT9j/WPMMkYZn1jxJXJ+vl6ojNceepjHvsjn4l33eObbOP2VfV5+hutP\n",
+       "cPeGcPWupcdvq60NhVAZQt5uEtNq0rC5WWlcmgUPDWoGrehO2tDLg6MsOmhWtnUDlUanDExCYcOp\n",
+       "e50NO0Vbfz7Qr5+5nB/rFDX3mnUHMTGJiXsht2eGUlTtxu+FibbMUjx1GOqiDWsOtU1FpYmgJS5q\n",
+       "ZSuGyUxp6qqgJTOVmpojFXRNbC/GOpctK6V65lqCU12ZPMu9cj+1Q5ae5OgSZnzkw9TOMWxVE1dZ\n",
+       "QnvInfvYXaK5y8EHKJv457z0M5x+P+ExwgOc2+d7r3Hnb4ew+inOfukrNcJr8I5HSbcqRg3WGb+D\n",
+       "8mshIyGEBtd2yE/p13KtwcysWVebTY1XtwxefFXj/We2E37waT69w/f2Jl56/9jd5a7BcOh0PdiO\n",
+       "HIeoENRDzXZSeioWrsS+YVaoR0YJd0uaJTuBSUjFsimJNRdk7tUT9yZtm1oLp5lK21XI5LrmsWee\n",
+       "ZG6FqAzHdhaj+IM86mW5mnwhdq00H2/6DG6r7PRmi8zmkzi1nI5dMhLOJ2ahLpQ1RZxIlUJJqFUt\n",
+       "1ruh69Q6OjLRxKo0EkPpNAw9V5u6f3fm9sauWXaibmC61lCEVVmxpJ6eKLqFXN3yQTCbTIRXex78\n",
+       "bd5Y9SUEqQvLh3+4V43/vdlOff0bu1F5y/Ef4MUYvfJ2H8iXwO+rhkz+hIx8a2L50SpNsugwWi9M\n",
+       "2i33UkI8o32mVp+ISVLJyVq5ELpirRRMpZqimtREIlVakhstQtBSj8WpVoiaKlJyR5U4U60Npang\n",
+       "wsKNImOhNWGw8HAMKiHqNdXNqada95s+H7HVUA0RPzbjhTXWs0qsuRJp1dmt8RvbpE/yY7/Crz7O\n",
+       "U+/jnRmzUyYN5o9WGTbXn2PlFZ5/B2eR39/gaMK9n6uM0z72F/nO3Wr8eZjxifMc/mKM8RsZElWr\n",
+       "3mfyRQ83CmoJsiP+xb/lwy/yBCvv7lsbHrr0FOMV4sNk19m6yfEWZy3GJRvDxLRbV5sWurGuP62r\n",
+       "J21JsS+oJpLGSUX6uoFTc+frh5rlqrNkbCOZeXBMo8hdqJdeShODkNqSqimdqdRELcGWxIr6Ygpm\n",
+       "7Lamqu4xUYhqi2vjRFupaVNNy8xARM1UlMiUogNddBeuvEyVGpaU2gpNN4xsx+A9Meok3HtP1Z47\n",
+       "+xCPdVgZ8M7X2K/zew/zRsrlMe9+hvND7qwz3aH3RIzzp0II/wfn/jY/vM+1RcumCHzkO/jkHV9h\n",
+       "uGWDlSX/bphYZzEW+jWgRMHFp/i9b585NzjUX10xaWZec8Ho8LNW3n/sgSfZGpP0eeN6YW144HR1\n",
+       "ZJrOhVhTn0a1ciYIxrWOGOZW5lMHtcS5SCzndhOGSfWd62GjnKlJzENpHGmHJUlgEqvKJQuX9EBf\n",
+       "Q6YtU9O15VLZ0gs3LddYCVXVc1cV07CqWsO7qs3HFMuifdFYbi1UYQErWhohkYgO04ZpHImCh5JM\n",
+       "Qy5bRBXsioK5qbmJnpU4MVeYxJr+dGp/febBGEzKKC327JdL7mqJw4KybrlfmC3PjPJD49jzxNPc\n",
+       "bXH48h/mK7Qgp2/ZVN3bgJ/2JdzCv0nwpMod/Z+83QfyteBPyIj8LgeP02qxM+KNZU5rd4SsrVls\n",
+       "6+SbRqNblpt3HLXOSfIq6qyM0TAcauuoOxQWS8iKuVeVthfWV6cqm/h1lW5vf9EPrh6LVqRWpYLc\n",
+       "qWhLarzwSzwWlarkmhuq+/aOz5ORvkXMVqg0IlmXy5HkmHmjqqCsRl4PXHi2cmcNj3B9pSIXV7eZ\n",
+       "ZvSWKwHj/sPMX2DtozzQ46NPMv+lN/u9ITRK7n0/rU0mQ45+gfHvfoM/oFOOz9hbYucLWgM3Vunf\n",
+       "WYycw/8aQvc/5n0zHr3F+ohf/FEaR6RN9rv0a5Ux3FHgQp4o0yCdEQVlK5OVNaGWmcRcL1Tnq6si\n",
+       "hCHyRMy9nhxS4+KApRGT9agWCheUjhTOISyi75bwmtIDaubmBqGpWRbOJRNvmC9ISHRXFLSdSk0U\n",
+       "OjJNHXtGXhRcUjiTLOzk13SdGljWkBujNJfpmYvyMLYWg6JFMatM8GqNqnX/2O9Syz+XGmJ3k7M1\n",
+       "3v98VUnbW+NswHufY+9DKrHSJS6vc+0LKlNp5PF9Xv2gr5CM9Lhxm+9+9Isev1Ox9K8aMcZ5COuf\n",
+       "IX8XD/wWN64OnPVyd3c6Dke/y/jA1oulD73AnUtsbHJ3uUqESsNQNmU+5uo9lhvsr0WDbCgGpo01\n",
+       "V/ulfjZw0giuljWtydydTnU9lKLGbC6tzYyTmbPQkiVjoRjaz5Z1VaRiiIGJlVCax5ZaDEKyigP9\n",
+       "OPY+FdGeqCbf7qrkOEG1KXlOVVHdlrshuIxCYmymq6mxUIa+LlgKQRJzIaQuxOjUzJqxV000DFzW\n",
+       "1E3mZmXfII6dohVZ7g8kcYPYdDWOzLNDu+muLHY1JzW95rHdnVPdT1bHcmePw5//Wj6zb3WE4Br+\n",
+       "FP6jt/lQ/jD8Hn7s7T6IrxV/Qkac/iZvfLhy007m9OLESla6GF9yKzlwnGayOHJaHmvEuVF9JphX\n",
+       "I5ixpowzeRLVDcwlxi4s5h96boSgZaSlujE1VMLUM9VN6AGcyDU0nGpU5VrBifA5AfoWPr34+euq\n",
+       "9kFP5TVyqrppbQR+5zxipQE4XuPcoNoF1+dVv//un2b2aXaaFXm5Grl2yivLlYvr+UNOWzSe5L47\n",
+       "dLo834lx93PCs4Xnw1MqJjR9K8qvMcaycn39yE/x7g4bffZX+HTC3j/9gz/dwQN32Rxx3KoWjuY9\n",
+       "PnvfQrQ6qM7NqOTmamGtKMQ0Ogs1I4les6kxr/wj2ioy0VZViZYE18roOOFuTtjn8HwlEu6hE6Mi\n",
+       "RF3VYv/mGntkZiOW9kNpXIzEhJqJscJMa1H1mntJZjmWdkT9EBy7YlVPMPe83NhlcysypTUHi6mr\n",
+       "GsZSJ1qOrBlJYzV2fRh5tcXwiPuG1HaqqIOlEdM0c+9cMFmK8lrhuY1o6Zjmy1x7nVCQrS/eQH2h\n",
+       "nfkidGakX8049ysv88YSVx+tFMheZvv5P5JJ08mv8/ELPHiOnSOK04nykxP5/4kz8v+csyZnT/DO\n",
+       "A2aHPH2JO4NKrJ73GZa0BuQFzdbIwWoqjyPt/alxd+5SLXV+3pT0zuy1uJVyoWCe54ZJ6tVQV05X\n",
+       "JPv3pJs3JUs7jkPHUagIIlEa6w50dEPQF5RS+Fw1dIIVNXNRqjRTmqlIyYrqetoUzaS6Gm7KHZlo\n",
+       "Ss2VjjV0jZWhGgWexNRSmFtzJEqsartkpnQsCyOdaSGUtIqaWE6kcc8kb0gauaV4LOlNvPfJI9NN\n",
+       "zr3Bc/8/e+8dZNl13/l9fje9+3LqHKYnDwYY5EyAQSQhElyJpiRLstamd9cluXbX5XK5LIdSuVxb\n",
+       "Lru89m6tg1SSVqpd2YqrsF6LUaIYAIIAiTAIg8nTM53T63453Xz8x3mDGYCUCJAABgPqV/Vquvul\n",
+       "O/fce87v/H7f0IWlf4bG+qwrpW5ayfEfMv5r4LeUemt4qXcxXgTuEsFUiveCj9hbihuWjIjI48C/\n",
+       "APaUUh+8UccB0Tp0T8OLs8ACHOnE1EQoGDHCOtspG2Nokdnz6RXX2cv5REaZfFQmg6JnbbNIkxQR\n",
+       "ittIqCLJGQaSI02HUPpEcY/ADAlGiiR5dJJxNUHx8Elj4Y1KsrvEzKIooOUVriYxndHPe+hE5PDo\n",
+       "oUa/Py0aZGeYkGtCVNLeHmYAVRPW74epAbSKUB25JLsRdAQiE8q7unIgQCcN/nexJkYT0btqmqZU\n",
+       "fElEfgNqD4I7A4OXofWdNwLooHUR1h/TbbdUpJO5w5vwF4dgMoY4gXQfDjtQsxVPpn1cM0d7I2a3\n",
+       "0MXNNkjbEVMBLLv6nNeBtMBcYhBHin1Rwnlg7wxgQ2YSchYMRFdPMsIohdDzQR/FZNQiMFNEiaJu\n",
+       "BMRABxeXNC0SFIoFFCiDKXFwEFqEtMkxy8ZrbBt75Ai8qzJkxcPgCuO0uBWdBFWANYEGihMD7cz8\n",
+       "ogXtSZjIQOMO2B2kaFuTOF5E6Ed0Y4cwqBG3Aw6c12N/YQIGV0vt27Cp9HXlXjfBXRmD7otvfgxV\n",
+       "JCK/9xQ8dBruEzCb8ERfG2b+2g92XaiuduXeOaTHwWtAcvG6St4X4Cv/CRzNw9CDvQwMd+HDT0F/\n",
+       "DL7603Cyr8X+preh5cLqfMzkWpPjNbg4BbmDJn5Ke9y4vk7uL6a01UJ2N6YqA/LOZbpGSMUIqbZX\n",
+       "OFdwUYbCVGk84yANMbCkjUWfWPlEDMigGcpL6I7TKgZZYsIRAi2N4CMcJxnpNxsEGAgW85j4BKxi\n",
+       "UadIKDWyKBwxiJOEbcMml0SEbGMaLlUG6LuhSyFMSEfQsWz23Cqhl8JtDLBUAysX48aw8ALc/3V9\n",
+       "lteLcHaI3hFlgTkR6dwAT6obGiJMAb8AvLG4954JpWiLsAncwk2IG7mRlZFvoykLP4zz79sRfcie\n",
+       "AXcd8hGUmgo73efy3SYTaeFg4NOOB/gluJBNKGDSTwxsc4BiQEFtk0NhqCo75InI4SRZ6tLCNIS8\n",
+       "KqAkoU9AS/mkJXxNuGwRnZgcAHJE7KD7xjbQUVoNdT96Z3pB6V342Oj3KrqnPADyid5BTSh4wYQ7\n",
+       "hhA6MOjBlRLEISQVzaqYuQANEy7OwJGa/j+fN3SZ3m5pumsrBacdaL3pxeadDqXUJvB9ysPDl+Dl\n",
+       "B8CZgcO7QA2euhemTbh3V9OYa3lYSul59VKQEDQHxBMDZu2YhbQi5cOOpam+KQdmRKgmwl6saMcG\n",
+       "UQidTsKlSJf8pyIQBxYF9iuDRBQxBiGKZRI6Ck4ZignlERh6fPvYPKpiYnqcFQdT2eyTkBomXhKS\n",
+       "Mi0q1KkT0qHPGMIBLrPDPLHKEqiItnRIqQ53oDikwDB0jmiPzsVuDJggOTg2gM1AJ6cF26TrBOy0\n",
+       "8uz4Y7Se32J9LMfEQoPty5qJ9awJ9SdG570nkvsr+KtPwe0tDWheL8NJH9pPv8Ux9IAnRo/XQuR7\n",
+       "6A6++c8M0b4n3+V9Mqrk/S4E/50Wbc0swt3LGvPUDSD+GsQNkCKspKA2hJ1/CeFPw6lxvXzHtZg7\n",
+       "FmM2yxqLtYBmno3vwnwAmwcifC/iqA/VXdiuZpiVKnNKSFTAjuqzTcxQOlSASeliqoRkpHQ8h65u\n",
+       "rhGQxsRVikBEN36TBAx9b6dG0ngOUMTFxEJRQACLgE1sskmKiuoxxKduCBuJsD/sUzEHZEUxBKYS\n",
+       "yAQGNcuiNxhQlDSGX8YNAtYLbZYFblvRlcVaAV50YOv/hcLjMP6Qnn+aIlJ+CVpfUEr9oJifmy3+\n",
+       "S+APlWLnRh/I94mruJG/TUbebCilWvDDTURvU2zD9jpkq1BuwkQDFquQTmKm+lqzp5GCjAMTmMyq\n",
+       "Bn2jw+UkwjZ8yniMRS5GNwXmCo30Fn7UY2hPsjSsk01HWFIlibcZNy0OEZJFJyEDNCDVQFc8BJ3U\n",
+       "9oDLAh9Q0BZdHb1NdHvnKqsyh154VoBKpGXhrRjqu3AmBytFyK6A2oVOCcZDaAXQzsLhZWgcghUL\n",
+       "3BCcS3ChqcGs2bQG1G3+kVJq690ejB8mRjvl34avPwLP3wXheRguwN1F6FjgWeD3oRJr09/4HEwR\n",
+       "EU7CtgXTsYVyDTyV0DYjptALUl8lVGPhnCmcN1IM1ocYM7AeQMMQpkJFxhS2LUVfCZYY9FEYJIwL\n",
+       "bBi6uqAMmE1gUtmURWup7sfnkrhYCqrKx4w8uqZJmgSLhL4SJiXFjAqoskXbN6hbPQYyJCuwLwYj\n",
+       "0dihIfrfrg+dADpZcJRmwQTAphic2ycEgcl6/CCdLxyE5VOcn3+RjfsbnJ+D6JtQf1IptXPtvPae\n",
+       "EpEarD+kcUmdb0P32av38Hs8XoD+k3Awcw2AC/DKNHT/FHZehsX96N3B8ogR8mXw/x5MLgAnIByD\n",
+       "idOwsAbnfxx2NmD/q3A5DxdKMN+DaFLTxDtpm7wRsIdNVtnskwZ51WdTKcrikUa9dv8eQY/LIcBF\n",
+       "8QoKTyyqSUxFGZhRQtPWVbey6GsxIuQiERHOyDZvQAVhSMyaDOhjYicRkaErq4lp0hGhqiLyol13\n",
+       "mwWL/rCCvNRh7WCHaMwl8IvU6x7dP/HxL8OZaRiehcYzkDkKdz8Kj65qinws8Ow98GwIfP7dHc53\n",
+       "P0QoA7/IdXYo7+E4CdwL/D83+kDeavzIY0ZGLrB/DFERuic0NmO1CpNLWkiwX9VAL8eAshhsJhnG\n",
+       "E49NI01fpamLR0d1mXW2MWUfxSTFThwSB9o4r5dMQrJCWpnkkz32TBiIT0MScugk5CB6LjTQQ1JH\n",
+       "76DXBMJYszr2oVsATXRZt4ve3WcD3fNuhhB7cO/XYPkADMYhdQ7Sdah8GPbycMdlMMdgbx/0QjiT\n",
+       "h8oSRKdgexs2voLOfLZv1h3PqHz8pdEDkemsTt5iH8oDnZilU7BqQKoD0QNwaxfCso3nWaQs2HJt\n",
+       "MuTxgy5dy2DQgEQMBrbLIOwxndYLSK+g/94kYSiKtjIYw6SsIMDExuQ4PssJdENYSGBcwHECDBxM\n",
+       "DMYl4iwRbeUxnQgpBT0vZsu22cIkFOEgBnFiY5MwqYbMi08auJLAaYH95jUXpPUI+kOt0C0GpBN4\n",
+       "uQB3vAT3dIQXggr1YZWNxcMjobq7UGt7dFil8/tKqQt/zXm9gFbdu6lidH9/3eXPfjlFfsKj2POR\n",
+       "DaidhMEzo+v85Te8pwn8HyIyBczCzj1QWQDy0PxTGNZgtwi9JniLMPtTQFZX3Vw7YFpLjLEraZQy\n",
+       "sVFMSsBkrNgfwauWFu9TwKKp8T05LI4IvICQiU1igV4E+T7k8lpozUTTfRWKHD7z+PjYuBjMqJCh\n",
+       "pLg1CmkZBsPEQMWKtmlQjmMuABOinbs3sVFhlX2bGVptmxe8EwzOrMDsNuzA3hevSreLiAnVX4QT\n",
+       "DVg/oqtrVhtuWYfL94vI164Dkb9f4z8DPq/UNTG393C8gBZlu+niHU9GRGQSeAPQkG2l1C+8iff+\n",
+       "k+t+fUIp9cTbeGjXhw2Nz0G3BdFdUKxB75CmjxYHMDWEJAdxLmTJdDmrDqBUEUVCxCohGbbjDuXk\n",
+       "HM2URc4U5uxNQsulEaYIgz5OWpENTNLmOAXq7Fp9BqY2sUtxDah2VeRMi1mBZeodUW/0tyCBgwlc\n",
+       "MsEWrYMR2KA6EPTg+BXY6cHlLrRKwATYRfjoKc0AUbswl4UXp+GFbTj/P6NLLstvhqI7kvOObx7t\n",
+       "AKlB+WW4eAz2OdApg7JhZwj798BzwO1CVqAd5amZRfyoT8ou4XSKDJ1t+l4OiR2SQppUOEnn1nVe\n",
+       "cfrMBtCTmHHLYiY22EOYVgnbhklKwaREOEAp0YycrgX7Q+gEEetuRCC6ktFWQ52AmgrlaZxPV0LG\n",
+       "PWjlIMZFcEm8PmZiYioTP5VQSiBK4EVTXzu9RLfuTjiQG2GBkhDKPdjNwEQnptBssVY4TO81PNAS\n",
+       "VK5oQNKVGzJE72CIyIGj8PN30Fuy6W012CpdBFmCp9+YcItIBV16rCulPKXUNrpMeFJE0vCaW7kD\n",
+       "zidh4gFwx3WiL45B1nWZQ6iqlFaEEY8rYpJNInrETPrgC5BoMULlQ2YAUQ6UFREY4CQmvSRhPFJ4\n",
+       "pqahbyRaqK5gwIbSlPbDaOzvGiE2QkoUfRXRNxJsUXQlpqOETJDQEpNqoKi7iqKCmXrEruPhl4f0\n",
+       "Ugk+UX79WwAAIABJREFUTzH+8Q7VTMD4ENb3iTh/olRwFkiBXYG9g1Ad2U4MJzROJ3WFa1LQ78sQ\n",
+       "IYuWWf/IDT6UNxsvAXfejEqs73gyMir3/tgP+N5/8vYezetDRPIl+Knb4OgUJE1CY4nzy02yG1CY\n",
+       "gUfXIeOCNwu+gi1lUzTS9KSAUg6hZEgnMYjCjVr0nYhJK0UmjnGHTcyhxVz0ImeqKTzVomULsddm\n",
+       "LecxJXCngiujNoyFFjs6h164ZtHVEQ896Vy14JtREIpWPz0zEq4SC+wMpHrwpWOw+wxUZ7WHxjCj\n",
+       "xZqsEQJe0EJXEz0wryil3hQ1U0RmYeKTcOAARIFI4TvQffK9L/O8802Yn4fjT8HONGwJFCdg7iJU\n",
+       "65pZ0XbB84Xt1DiBJIRJly16+FmDyLchlae0OKQ35lAyW9hWmik8SnbMAAhURIiFb1hIYjMfBzQM\n",
+       "kxIxLtBO4FEfvpSFyykBsamIhRCwgSInMTOBlqBvpiCOoTcyPcx5sJruYRGRNkL6TsySmdBCV8jc\n",
+       "BI7EsG7p91RNSBm67ZSygDSsjjyHZjqwobpsPn2JuDeAOR+MLdjZgT/WdFmx0Nnx4L3OmhCRIzD5\n",
+       "QbAnYLg6ai9tXPe8TMOnPwKtGV1KBGAeil+EnxCRXxtVTnJQ/Sk4fgxyCewmItm/GlVOFOgk5No3\n",
+       "Z/8DmP37kBuDyIZoDLyyzQR5nLjNltFAiYEixiFiVRSugqYNhiGoSLGrdCVzcggTIdQr0ArhoZWY\n",
+       "Qgjny3rOWS0Dy1Aeh+N5zQrb4Brw9R7AQrEOtCVEiUkqThi3IDGElnJY6AmBgrokpMOYYttnt3SZ\n",
+       "1t0uq1aBrFMi6WcYLO/Smuzw4S146udF5F8AHRjO6vbOxEhbJD/UWKTOYeD9DmT9ReBbSt0crsNK\n",
+       "0RFhBbgdnZjcNHEj2TT3Av8UOCEiXwF+8t1a2ESkVIVPTcPjB2D/PliahFfL0D4FM9+kzx7Bb8KF\n",
+       "fzSyTy9BEBtkbBffGdI3anSNDDEDlJHGTQz6tiKxAo54DVTaxVQOhuVjscNE22V9DFwrwslGHEGL\n",
+       "krVFbywKaOzHFfTG7GF0QnJ1XYjQ0t7pEVtjWTRe5ChACL1NCF+Foy/ByRkY+zjM5qCsoFcG34Bn\n",
+       "b4OHz2tH3l4aagqil7/H6fle52sCDv0SPOrBwqrGXrz8YXhhAvj9t3t83t5ITsPpL8PeR2GyA8OL\n",
+       "2ixw30UotmFzE87th51BitAYsme3yTgeGRRjqZi869BglyvZMSLpU7aFg0YaoUdkxEwrcGI4nUQo\n",
+       "X1gWYdpOiI0ID6gnULa06eDUAC4VUtyCTYAQkMclZpYWK07EB+sQGdAyNbtpcgcODOFcOeFsdYBl\n",
+       "g2nAtAcP+lq2ftmBlyKNEckYMG6NWEQ+ZBKNJznvZHh1xuC3pU/4B4rwV9ehu65Fa0L07t8UyX8M\n",
+       "9j0CaQt6bRH7L5QK35NAOJHU3XDnz8E9TRjrwuYBeOG4iPwrpdTy6GWFEozNaKToazGnfR/mtyAn\n",
+       "Ij0Y+1l4dEHbDRhoZeFv/AScbAOnX/+9MgMzvwwHsjDmQLsEwzSsGCEpuqxjUSWkgI9CWEdhKpgC\n",
+       "njGFlEqhYqHNkHRGKyY3YliyYWwXDvc0JmMiAfHB97RmUAwshbDfABGtH/MA1+aHfegW85okPCog\n",
+       "gVAME76ejtlJFZjaMdnX2KWRgq/OgGkk2LUKMp6jEJn4FBk6E1j507x0osvRFVg+onFVRh2u5CDt\n",
+       "QsWDtgOXXZBNRtz3d3Swb1CMpN9/Ge3QezPF02g9lL9NRt5MKKVOAo+9W9+ne58sAMUx+MkP6zup\n",
+       "+CFY8yG3CY848I0TsHkWju0R/iHwPBRSKbYzJtaxhCBKuODO0pLDWLHCNraAGqFEBKmYglikGcM3\n",
+       "XdKASMCwXCO0PW6JtYtu04KZUTLRQRvbHR1hUi6iJ63s6KhNrt3rJTS7JiuA0gycJtpFdH4TGiU9\n",
+       "kbqzMH4QPnwK0jH4A1iqasbMhYEGspobsKugvSIit6DbZq8DI4rIAlTv1S2e3ATcZlwzTctE8IEV\n",
+       "2DkuIjMjpst7MkY72ydF5Hm4Mo5Geabgq5+G0gwM1zTbaKbWZ/ODBkbRI23Y3I2QUhF+aNGxs0w4\n",
+       "MTtGgGU4pEaf3kdXM4ompLAoYuP1hMt5k7641PEoi5AWoeZGBNi4VLAQFAZphBQJWRxWJWI1Dc5Q\n",
+       "4zzmEkgygAPHElgNwRrCtCVUTJtKqEgIcQx4KdEsqKMx1E3AGnnxKIeL1jzbUiW1XqT1ckxovQI/\n",
+       "UYffU0q95vUjUnoc7nkY7l/X47ubgaf+IxHzd5SKf3D7mHcgdKtw/lPw4S1NQV8vw2QbHgmh9Ung\n",
+       "N0cvDQMg0qXD16o8PUh1MA7B9H+jk4HCAhz+1jWV30wE9+zB+od5QzIC9uMwVRIOGSa7+YQpOyGj\n",
+       "IFIJu5KwgE0JhYFJlhA9Sno8SmJwORrH6Y4zvXeF9kSLxTRYlu50HHMMLhxyaFsK24JqP8B2FHMl\n",
+       "qIbwrMA5A90CHB1PHIMSvYmZAWpKMfDB9CBKUuS6Npelz041ICcJSQS+A7cs5blUnkUxhnIcrLhN\n",
+       "e6GOokxidRmvabG8OAF3D6pb8OIx6M9CFEF+EZwW3Hx6Fm8hPgucVerNCfu9h+Jp4BP8gJT5GxU/\n",
+       "EgBW3Que+qz2b3LGFCsnVqids/GsFMQu9D0tsjG7X5u0KKABl8/lGPzDEhNRQjeO2banaDINEpOI\n",
+       "jTCLTQ1l1HElIBMUaJopKoEiyoKRmGC5xEaPqQgwdRIyFuukIiNwQfSiptATylXfmQy6pSJcE9GK\n",
+       "0CqI08CyGmlg2BA8oNUkT6YAW+MS0qNJIuXDzBKs3gYbArlluJDXiqb3/zjkFGwgkv8m9L6qy9bu\n",
+       "A3DHZ+C2ga6knP0obA+gtQul66pXUwm8UuWHEq56d2IEsnsNgCYivw7befRkWrDY+aUSks5SVC2U\n",
+       "xAxJyGHimiEDJSRmwN4I4xGLh49PBj12ZcAkQ2EoTHshvUyBnSTPpNkhMCMakUFsdPENkxATQbDR\n",
+       "10IMRFgIJrtGzPioBL6bhvFIL1KhQl8gRSiGgp9E7OYTQgc8Q1dLCraWLPeUdmPuZ6AdZ1i2iqT7\n",
+       "BdJrDpZ5P/6KB0ef0VSO86NzUYAjD8AjK9faeeMDeKABtY+hs+T3UlQgVyrytdsnGea08o6wx/4L\n",
+       "IZm8iKSUUr5SalARefU03HrX6BqNQb6F/YkG+4fw01uwWoHde2H1ETj4JDiBbo008hBlRMR4PT7K\n",
+       "XYB8SvDSMY4DrqWn0VlgkQQTjwDBQjPlImButIk4rGJCu85OISRSKY7FWg140gClYLEIsYopk1BQ\n",
+       "BvWixSAI2TahndGblFaok5eh0nNCMdHXSGtkXYACbwiVU4rE88nYAZVpxdRFLT/vPzjCR82U6GZm\n",
+       "sRMwTME2K6SihOGBHZIJODsLppYmoXkO/GMZhv40jbAExQ57n9il1W+RPiUin78KeH2/hAgm8N8C\n",
+       "v3Sjj+UHiKeB//FGH8RbjR+JZAQmfhZ+LAfHVg0W3RKZZp3KbIZXzF1UZgIGafA7UG5Bak8DNRo5\n",
+       "+rXjLJ5ZZ/WRKqIy+MxD1CblDLAMwVB1ImmjqDMXQ6TSbIYBSWKSsRJ8K2bPyCJJj5atWTN2AruG\n",
+       "1gZRI7nx9RFt10L3gdfR+iIGeqLpcG3XNkigbkBO9AToKY2Sn3K1mdniEIyN1///C3u6GnL+23D5\n",
+       "Jch/FH68rtsWRU+D6b72UXhuW0SuwIFPwSc2tNYG6H784BCcPwQPnb32uQ3hul78zRSjikkHQETm\n",
+       "jxIdm2BirUc6FWCNmySqzh4+U0AlbtARF9/IEKkmHTVkThQ+ujR+2YQVNWSYCMOS0BUbL0m4YhlM\n",
+       "JCYBPh2JsRGG+Pi4FIEIRYzP9kjMLAO4OdhzYc8GqwR+CLURSLliQikxwFTU0Xb2pdiglihyhiIK\n",
+       "YNzRgNmqMlCGzVzSYnMwhkQZwj2ABRic1XSgq9ocJc3usNTrz9JMG+x978qAvLXwC2zeeTeTvSKF\n",
+       "JsACiVxi8cQFehfhGnCvCV96BsobsG8c1BqMXaFqeHz6SUhHMN7VuKFiGlYXYH1cg1LTaZjqgvwj\n",
+       "Efk9pVRHf2LYBT8lGlyeaP+onoI1w8ZQMCSmLQYWFllCckCCkBWwQ6iaEQY9rDzsCczaOpntKthn\n",
+       "JHQkYQ/YlZg4EgqWcMhQFBQMQ1hSsB7D0NHgdwcwQ8gF8GxO/2wuQX302vELir4LR56H0/8h5HPQ\n",
+       "caGn8mCG2JaNFSmUAaaRwXUDvBzcU4fCQ3Dpblj9/0y+Vj1E6liJQj5B0jnM9gxTWy8R/YMO/riI\n",
+       "/MublYH318TPoMF837zRB/IDxCLgijCv1OtblO/leN8nIxrvcPscHFsDSEj3fDBKTLe6LFVO004d\n",
+       "0zVupwPxaZjegT9RSoUlkSM9uP0OglQRghUorWNaFaZUD6VWEAmZVH0M0c65XZVChgV2G11UxaZv\n",
+       "ZFGJR54d9kw4KroN01FwCr0bikQzJqd8nZz0bBiKbqOURSurukq3djaAsqErIxk0zQ8Fiwa4ogWU\n",
+       "rhiw6cFYAUqj/nOtBNs7oP4YghKUboOVMqyO6V10ZVFjKJYfgW0P5s1riQhA+TL4+6C5HzirE6Zz\n",
+       "k7CyhS7V3NQxCQ9PY1spins+nf0pnDgkljSm8lCxwcCIIeoQqw77lXZxvWJoczNbQZgYHAgNekaW\n",
+       "Wy+FtOZbJIUS6cjHshzqZpmWFEnLLqbqsiIJHiYZYupE7Ko+toItBUYNdqfh4aEGoO45sGBA29SA\n",
+       "xk0zwhSdsDqi3xPFYMc6h16I9PXTVYrENMjFQ6zMgM38cYJFgCHYgS7pXI2OxiDEon1nrkYtB9EO\n",
+       "XG1z2vfA+AfAyELvDLSeUkpdp93xrkVphl7fpXpV4Q0TURMEVo1W2LjOSViLtslv1zSoogjOLfDw\n",
+       "rZAfLZyVIbhXYPV2aN0NBzyY8WALePR5uHQfnPqgyNxJaD6vvazqA5izFb6p2AJiEwST/cTsiEtJ\n",
+       "GRgCrvIRgZZSTAHLpkV2aJLEHu2MSWBVSBKLphriW30KKqEaa4fpW2I4LQrLMnHDGMfQQ/ZwC57I\n",
+       "wJU+PJ3XrRkjgR0T2i0wFHRyYKXBsqF2HFJdOP2TcKhgMG4YWIOIi5khS0YbIy4wtBx8P0GpJpHZ\n",
+       "Z64Ld17WQNVqBdwH8rwwOEzvdAtudykvO5S7YCYTdCodJg/D3jHg1XfxGnjHQgQBfgX475VCfb/X\n",
+       "v9dCKZTIa7iRNzJZ37Pxvk9G0N4a111Q47UGWwOXKGPiREfgyVNwx6a29nxlD76ilLosIqlpuO0I\n",
+       "WPthmIN+FgqvIPkeAXs4IhxWaQqqhWdARsFUrcYr2SrCFKaXENgRGW+LKAVFH17Mw21Ky4Z3Qm1a\n",
+       "tp2GRIGK9G6nh7a494BzAgcSncDsokv606KrJTHaBM9Renc0jGC8A24TFl8GdwpyBU393KnD7peB\n",
+       "HSj9IhxcgCNpKITgxXD6OHSroBzgL7/bXLXYgvNLcPko/NlB3TPeW4S9f3fzUHz/+rCglCNuDYmm\n",
+       "fWiZ7FZ6DJyEcVE0gV5ssBfFuAnc24aNgjCwwBZtZDirbGbbCUuFiJ4T0FAWJwKPhjuNGaWZSZrs\n",
+       "popsJilyxjIOEREWuySY+JQVGB2wNiDwtcppJoGcr6+JATAfwaKCFQMypr4megqMOGHS1LoxXQO8\n",
+       "EJIIainFkD6RnycOIgxjERZsaLwK7uA6hUalVEuk8jI8e5fGjNgJtFPw3DjsjBxAi5+GOx6AO3Yg\n",
+       "04Yrd8Hzt4rIb9wA8TN3kviCy8b0kM6Y4CrFQMp0ltPEtTe+eHSNLgNo6Fj3xOtf8cAZ+IssbN+l\n",
+       "pTa2+lB6Eeq3waECDC04Bmw+Bs/nIfM1xSs/LpiRwnU0wy0DGFgYScKG4VCJe+wZMR6wLxl1unwY\n",
+       "b/i0qg5bZoEZP42yFKHhkk4cBmaDbKzPv6EUFYGUitkTsCKtGrti6oJe+Tto4b1EV3UWPGjYsF2G\n",
+       "XAjFsk4mmj3YSMH+kpBN2xjDGHEMqsEeWbOArzwilSbpRrisUrQ9TuxBMirF5n2YskLycxb9VZfK\n",
+       "rkO2c/XMWYiCcgDlA7xPkhHgcXR//Is3+kB+iPjbZOQ9GDWohdB19G7IiQYce2aFk4/k6KunodCC\n",
+       "Z7bhc9fTAoGDh6Ft6JX8lhx0UxDmsaNleuIzYYLIgD4R+dDEVzF31jx6hXPsbhWpjRskQYdcts+x\n",
+       "LaiV9cThCsx5MN+HSxnttjs5onWGpmbIjIsWsaoBm4ZOKHJK73rSBliiuyNRBLYPxRg6CQx7MFyH\n",
+       "7X8G/scgdQ/E4xC2wHKAD8BCEdJZKHhafdUFjvjwah6SKrABqz3YymuPl9USnHsQZBIOXIBdG7ae\n",
+       "UMr7i3d/KN+Z6ML5PvHCEq39HofSDumeS9fdYz0NV/wszqYickOKPdjLwRCDCQwKXsIOCco2MKKQ\n",
+       "ot/h0oSBZSTkGika0ylEQaxsClGXmlkhl2RJpM8EIQKoWGtInM/A+CEYjzVAcUPBWEv7CoU29C3Y\n",
+       "NeFECFMGIFqzomaA8nSZPu7qNp6rdELbdXqYYchS2Sd9WDG24JG+NKR7DurH0RnuKJqfg2d9WLx/\n",
+       "hE3woPZvlYrP6uribffBR5avVU5u34J4FpoPAF95l4dsZxviR/Gf6eIXAnAz0F+BfAe2RXIf1dT3\n",
+       "waU33NNAchEuDGFfCfaNkqhmGho7YD0BJy7CwIWl+0HdCX4bVKArTw+twN7d0PUSqt+AvY/oyoRY\n",
+       "0LU9jCiNoxQFZ8B2EJBBJwgLoQagpnYjxk/Csw+X6VlCx46ZFQPDhCFpbGWxZYaklAGRIjJ15bSe\n",
+       "6OpXwwaxYTIB7oIrAUx5cMsKxArUPFQU7MzAeAOakZat396vFZqHKQO3pzc8LbvH9HCJVWuadLuH\n",
+       "sbWLWd1j31BriWQ8fW4CC8yhj9ltkrgWoXn1TAZEVh0jBtuD4fuJUfMrwP9yM1ZFrotvoQG4N028\n",
+       "75MRpVQgkvoi/NXPwl0dLWK2WQhZOdVk8PtNLXD0vW4kJwtxGU6t69V6nwNikKiETNghb5vkVEwq\n",
+       "NMEPcLqwWICdskd3xSP4n8DfgL3fgnMzkLehGsOqwJqjhbcObkC2q03M1mZhHsirkUqn0vS+0NSt\n",
+       "mpxo1s0ADVpLh7Bl6/fsWJD0YDmCxu8qpbZF5Otw4DDc19b28a0p+OYjkDNBdfWiWlF6F5aYukxf\n",
+       "2YHdPGz8IXz5P4aDVWg8pEXWMmfh0ItakOsrHxaRRaXU4rs6mO9QdOA7J7E+k2LOz1JIIpQbkbNt\n",
+       "jmwUuLA9zu5LS3Ae1AE4/wlI+SZmbOJ0DSw/YLcYQiGh6UKnljCfJDh9EzsZElvglSK9mmCRRAam\n",
+       "CalEJxgtgVUbjigYN3WLJRnCgQFcLkLsaOuAJNaMKuWgd21AmMB4AhdNnWxuAbshHElBNwuTDdhO\n",
+       "fCZrPicSaOSg/AxMr8BXHheRbaXURdD3CfB5EfkqGkndUUpdxV5Mwpx6fQsHYK4JhWO8y8mIUqpZ\n",
+       "FPnWN+BDd+gLtrEE1acxDvQ4WoTbDugW6KXHRHJfU6r39eveOxSR/xu+/HMwOa8Zazt92PwdGP8x\n",
+       "uDQD8R2Qn9c2C14G+hVYmYXpC3BwC/7yDpibhuIqzBc19b+RMYjbQ4ohSFl3wnbRMi4XxuBgAIkF\n",
+       "z8zCWiPFWCYDqQYrKYPJSLCNiD1TaCm4N0nIoNWXMwHEkdaqcWI4ZOgNiLMBwyoc2wHjomYFGQ/B\n",
+       "8QBUBjJD2FfXQNRsExqi8FMRO2mDQWhqbAk9zHARqxNx8DsQ3QbteUht6+QrsDQ7a28Y0f3zM6hH\n",
+       "p6kdLWFFISKrxFadw2fgYgKD90VVRIQPoqlKf3qjj+WHjBeABREmlOK7qoXvxXjfJyMASvkvikgT\n",
+       "tj6grcP7J0eur3t/w9s2F6Fahv13watdyPdgooW/v4bn2vRiixiFZw9p20oD1RIodeDoGTj/MNQL\n",
+       "OlmYNmDa12C5joLzJjSykN/Urrk7s9o7ZUHAE10BUWhMQFHBmoJjgQbcLRmarluyoSfwpAvNGAZ9\n",
+       "GP4WBCOviOpn4NZxKDT15xxogG/Bq4/B9B44e7AzBokNvR6UzsOOB3hKqboWPFr9ONw3CfMXodLU\n",
+       "a2A6gRMd7QDMTZ+MiEgVcJqMbVrsN7pEYwlqxSPfcTAij065Rv1zwFegpSD4CSh/JqbxmEUQmSSB\n",
+       "4EQhZlYrah4xYDkNhWSAiicoPxfQPRzRmc7RiT3aOY9SqBfLoQJlwa3oNkAaXfE6mdIKu7i6beOH\n",
+       "sO5CNdAvaojgJDZOnBDGBm0nRzecoGa2EHeHDVtxpKf1ITJDODgF+4Zan8If0xiJ29uw/iBvYMqM\n",
+       "xL3e6Mo8/N445Z4L4Q1hUnXgL5+DrSV4xIRCG7a7HCvDz1y4BsQ9ZsIXPiYiF66vkCilNkXk/4St\n",
+       "STQyfEcpFYuIB0//r/BACZwuNKZhJ4S7L8LlY9Ba1hXWbBtmL8Hlh4WLt1jMh0K2l7CTdQiaHlci\n",
+       "MM7Ard/UyULfhq8/CBc9KBZh/ECLVK5CwRjHo8cVIsw4RYuEiRAkhDOWheFZ1IjYsSMKJuRiGA4g\n",
+       "qUF1DdyqBitvHgczD2MBpAPNpEvy0CxA2YOzBQgugDNM6JhCfr/BROjS6Fjcst6iruCUguTPQd2l\n",
+       "k+6tfRCGOs9bfgmib23B8w0GFx22/v2ASuxT3ISzLdj80xuEHXon4leA/02pm5uyrBSRCE8CH+Um\n",
+       "adX8SCQjAEqpJbRs4fcN0e593QbENUhVNWdysIkp25j9g3i+x/qgyXAyxVwipJMe2Iq+BzM9rQh5\n",
+       "9AFYzcItkT7NFaCX09RB39Wl3fJB2PctaG3p0nAjDYVEJx1KRgJYiSb6DPqwGoNj67LtakqXZu0N\n",
+       "+MA6NE149RPg/blI9jhM/l2Y6wAHYS2B3MtwdBWebcNWAMUM7L+kHXpXs2C0ofHKVYreaAe5BhNb\n",
+       "UG2+/gzlfLAKb+sAvcshIkUY/xm49YAWk9u+LyL7asSx56++RnOYa/PApeuwMZ8XkSdi+JBH47OK\n",
+       "iXsUZQUdD8YDKJlQM+G822estc7uQ2mS2KbWSxEOVhEvZKYDhTlopTWu0mPkEgxgwT6BdqB3paqm\n",
+       "QbKVPKyX4DYElZjkI31AW3aWKCkQD45y5ImQOH2SzQ/WsMegNNTVkKIFnQykdnVCClrZ9U2P4TIs\n",
+       "t2G5DPtH18LQglNFqP3bH24kfrAYjcfLo8dIJ+X2qdczglIxHA3hyjE0+vv69yu02Nv1f1sTmboE\n",
+       "e13YGgMKcOsGzHagXYbLU1oh2W3q/Ce1z8EToW0LZQGlTC5SYfnVBtPndPK+k4fNvHYEvlNgoQQ7\n",
+       "RptivMWiU6WgClgEDNQGBBHNHrwU26TCLMrLYjSapOcVExVFVhIOxtCbhtUCuB2o58Ab00mJZcJu\n",
+       "VjPz5i1dSekbut0XZeG5tZix+2ImE6FNgNlImP823LcOfzwOK78GKA3kvXgM0kNonkFf/xG6d/xv\n",
+       "ROTPtN7IGsDmm7GRuBlChHvQyqWfudHH8jbFV4GP87fJyM0XIlI0kU9aVD4WkVcDuscbtC6/TGKt\n",
+       "kbqzx6yRZRjNke6DWm/SpM9WaxNjIcVk5HHbeTAysP4hONqCelYvMkYCrq/pmaGh8R+hoZktu3eC\n",
+       "5cPcEM67miEzbmqA6jrQ9CH7vE5U9vsw34FeBhr36efmmjC3rDd4yT7o/EM4lIJMA0p9XfnwZmD7\n",
+       "56BxEaQFrz4Fa49Bdj+oAfiXoHMSml96wynZ1mZ9976BZbFWgvaL79KwvO0xMv/6L+Duadi/BRO7\n",
+       "8O2LsP4oVFowPvJtaaS1Jh7r179fKdUFvigiqzD9TyH1IBwdQKYHbg+mYvh6GTbdNsZlj7jvkve2\n",
+       "cMcjKvtgLKvbdBZaJXfPgh1DW9KbQEq0VP3WGNxag0kb1hXYKaGdgVys2BDF0DBZlAxeNEFaBoRz\n",
+       "AUZ5iunmHr3xBNuEggtLJaiehngI7mhRXq1A+8k3c76UUpGI/C785d+F2X06edtQsPn1FIxVRY53\n",
+       "YTOEszfONE0MXVH8rr+jK0JvNowa3NKGqZOwPQm7H4DOcX2+VjOw+wWL8omIkz9pMlVxmO8KXRWx\n",
+       "mrJYM9IkDa0DorLwbAG8deg8DUcfhVsy2q03iBV5c4v7kwY7iUUgHpkkJhlqfFh5OEacFMiuBQwD\n",
+       "SB8xKEdCWwWElpYDGE5C4sKrMZQdmK9r3aGGpTcyHU8rw+74kNmD9DrsVGDuEty9p7BjRTsLG0dh\n",
+       "Zk2DonFHFObnRo/vGaPEZOWve/4mjv8B+OdK8R63uHjT8VXgl0WQmwH/ciPl4P9T4B+Mfv2/lFJ/\n",
+       "dKOOZXQ8C5PwX4VMfMBheigUhx2i3C7t/Q3ORT3mKhkqhslaLiSOQYYpKnGBVKNHbLWoljzscQgn\n",
+       "9C5kO691AYZ9XeFIZzUllgA6KTgQalpmOKvxAAMT2pGWAU+uMm4URIHWDVkswWRFa0tcPqyrLJYP\n",
+       "y4d0ZWNyCebakPsIHH8Cepfg7J1QHIcJBWOe3lk7BpgNWP0smpJhAW2l1PdiIWyLlE7CEw/AbTUN\n",
+       "eL04By8OYXBTSQ1fDW14VvjP4cBjcLgD3gJc7MOx5+HZHXjmITj2AqyPwdIchOeg/EkRWazCrWkt\n",
+       "h9rZhiXNTCreDTMmpFIa59N3wWhCxoFWFop1n8lLCdE+uHjcYt4xyaqQwE7omSPRq0QL0pmO9pqp\n",
+       "KWjmdDWsd0DrXYQp7frb8w0uGxbZWLDCPKVslmbikO0MifYZZIYuydCg3Ui43ISZCCIXXjgMuStw\n",
+       "qAMvz8KLXei/KiIlND7kb2RFKaV2Rq2NWXQJRy3AL9wKqQp4Nbj/LHxERP71jSnZt87B4qNw5LrE\n",
+       "ORK45Oh7Qceo6pkDgu9tP7H7DLz6czDWh/RAC3/FQwhWMxwLMxR/3uFKccDKXEReUtjikSQm4TB4\n",
+       "mcqJAAAgAElEQVTL1KBPMhmR3oOFBnQmoHm3xnZM+qAMiBx9P64DpcSnKD5GpLVDUg5kTSHBoBJH\n",
+       "qP1D6koYE5deEDLMwdMZtAWBoTH17Qg229AqamxJKoSDKUjvwq4L/qvw0BOwcwTGJkC1tbeVDRT7\n",
+       "0KnA0jx027ye7v0jFSLcB9wHfF8D15sozqPn90PcBC11uVFeWCKyoJRaGRlzfUcpdd8bnldKqbew\n",
+       "o3nL339gHB60odqFKxY8VsZ6bIwxy8UK6wg7FFVEMVG8tDDBoWiWfL/GVnmKAIU59EmGLoWtFTr5\n",
+       "ZQ5MxxQCGIxraqwCdhOQBA6HkMnoXe+egnoAczGMDfXEsGvoNv0gA/0++Bm9y8u0oB/C1m9APoKF\n",
+       "W2HjUYhnwHHhdk8D2gYG7HqwNYRL6/D45/Vk+Pl/D8YOavZO04XaGtz/efjmGJz+1ZGJ4fc7TyY4\n",
+       "d0H5Y2DcD1YAuXXo1GHr3ykVX/p+n/EWxuQdHXP9HcXH4cinYf6QTkYyHnSyms459gL80ST4qzDz\n",
+       "MNy9DFN1WJnO8J0HP8jOc3fD8hUY+zLlnxoyUUgRZCJm0j4pSy8QSaTBwJEHYR8eew7OzLucn4rI\n",
+       "VhSTKUEiwUhCJtMasuEA5VgnobsmrA01k+lAWrfk+i6ElkkkVfqME8URttrDFJ+UOU9nOM78csig\n",
+       "YuL2tvGSRZw6DOvgHtYqoc1NzahoBND+gwJ9ZxyOpoAWNLfhC/EIzPomxkmm4B9/EnL7tScBAOdg\n",
+       "8qtweU+pP3xrY/LDj7tOMoqfhiMPweGBrg5cSsPiU9D5slYWNg/D1N+BfFVrttTPavaZY8NglRH9\n",
+       "FwqPw+zDkL9VCPZn2VMmjlFgfDcgGrNYnBrHVxvM5cC1bCw/IteFYtSlV/DYXlaMDyCVgU4RtqtQ\n",
+       "7sPhyxAcgPKENrGsoyteKoKMrxNaowGdnTw7+/LklM+anTAmJWKEQcYlkT6JWQfVYzKEqoJA9Mbk\n",
+       "4BpspeH8GEzuQep5ePCbuu373Mdhz4DqBoS3wommVlO+MgnPb8Hl/10p/015Vb1d8W7c7282RPgS\n",
+       "8AWl+PUbfSxvZ4jwO8CLSvGrN/pY4G8e8xvpTXO1zKf1sN/FSIvcewf8zB3QK8LgMnziAnx6H5Y/\n",
+       "RaENMIUiRSt/itjZh5gWfVUnXwqZjFfZtsqE+ZCB2cPL10lbMbsRFCowKwYSK5qiCA3BShSXbL3j\n",
+       "7Y8ou4Y3clv1oeWAIZpVMQzhaANK25rnv5QBfwkKeWjUwf47WsysGGuBqnpa0zhvGWg65npGT0rn\n",
+       "j2jF73ILDp/VLSEjD/d/VRv/zSo4PY62Av4bYwTsOwXq03D3Nuzfhuoe1DPwtc+KyK+PrNbfM6Fd\n",
+       "WDP3anG3uA87z6KV5QSmPgJqCjYOaMGr7Rgqq1rtdH0Skuegcgt86gUN/gObrdIYR8IdBlMu3ctX\n",
+       "cBYKGJUDhLkMuZ0+lwq7uM4Ox2JF1tbA1LoHhW3Ih4JdDBmvmNziRXRSEErEWlorlJfQFN0VQwNV\n",
+       "hxHYl2B6Hhb64BfhUAhrwSxn0pNM+4qha5KOcoRqkXWzSRJn6NqKULUYZtfJNXQ1bbIMj56CzQrI\n",
+       "CzC7Ac/OZ/nKnR+C6C5t9atqkP0G/D0R+U2l1JtRbCxXYGr/GwzojkLtOTh+VY79bR7WvzFG7ruf\n",
+       "gxdOw+XjGiDcOgssj56bh+N/Hz7UhOl12JiEc/8YWg04dgrWLFg8D41/o1T7iyJyMov7B4cpdyvk\n",
+       "OwHe4VVWT/SYyKQ4EbapGQb9JEWh71Ie1oncNi07pllXRA5053UlpGjAwRj+//bOOzqu67rX354O\n",
+       "YAAMei8Ee++UqGbJKnFV7DiJ7cSOY8eWX8pLntOzXvPKy4sTO8mLveKVRHYc23GNLUdyHNmS1alC\n",
+       "SSTFDjaA6H2AGWAG0+e8P/YwBCGQBMkBZgjcby2sBVxgzj2459xz991n798+XAJjNeAp05iiFKqs\n",
+       "awMqbDBpB3ccPCOw82SUl6bL6K0sw+bz01VSQomtkvJYHHGV46KCUTlFlGl8AmuicNyp2kXlcSgt\n",
+       "gvEuePAFcCfhlVVwYgMUFUFxndazeake7FP6ctT5bWNSi2qI5BOZDJqN3HwF8ebDY8BvQX4YI1ci\n",
+       "H2JG/gvw6GKdTETcLfDO+zWqLA4wqfmztgAJR5Kkw4EjaUOoxRE/S8BbiQlO4ndEqMdNeTyG19HN\n",
+       "YMkUk644k4dSsA5qisEed5KMOiiyG5pTUexFhmGHpuh6hiFVBBVOLUI2GoFni8BXoJofjQFVTjzZ\n",
+       "CGu71eXa2KnbOU/shLoa2OGB+hRE7DCUBpsDzhrw2DUbp7Qf1h+GU3Xw9Ard0om7dcErOXUxFiIo\n",
+       "aI7wfK6XC4p+B9ruhaYgTLfBWAiaXoEtSRjeBfxogYbrmlFDpP4h2F6m3qHpYjjxEWh/GkL7wLED\n",
+       "doZgeBCiZXo9h1eAPwjtDgicgRXbocx/oU0X4eoSysYmKayMMWX3Y1u7Envc4EgJ3iSEp1z4CqIM\n",
+       "OwOsj2vWVMUg9Abg1WbDRIWhJZli3C1E0vrwacxohQSNCpwVZbZJplPgr4XhOEwUapZhzFZAdLqC\n",
+       "ipTB77XhihtCKScmUk1F+CzO8DjjIsSnYjjDUNYOVYNQ7INACUxOw6qM4VmeLqBoxy4mH7vw/1VD\n",
+       "eAcUjsIdwHy2S/Ny/zkTlNqR+ZpF1R2wc1o9TnEHBHfCHUPwuhfKp2DzJLywHl7aCey3QXUbJtVE\n",
+       "2RjYbX4iHjdtaS8FtjS2ZCnVcRexAj9dTkNseJqoJJkaBucklO+A7SnApsrMpwUaYzC50kYkBaFY\n",
+       "mna3xgutSWu15T67Lkd7O0BIcFtXDwN+F89trmbIVo5xgXGmMHY7hfgoSZURZhoH6nH1pkBS0HgO\n",
+       "RgLw6qvw3UpIroWy1bApkpljlRCchHUT0DkFwZ9CerF1YvIG0SJCfwf84RKKFZnJT4Gvi1BmDBNX\n",
+       "/escsuDGiIjU8OZo3kFjzC+JyC3A21jc6OWaWnBcMEQAnBArgcgEKWecoMdGWdiGzaQwTkM8Eifq\n",
+       "bKEgdJaO0j6KysHjTBOxRagIQkMZDIxBaZHgs7uojELamSZWJDiNDdJCJJykNgqrxlVSvSIF4oZo\n",
+       "Gjb1QSAJlaehf6e61D39sH5Mc/17KsFbA2vKYG0PRJvAnanc2ZPUNL7AeCZ4sgsSk1A6BS/+CErv\n",
+       "hOCdsOcorOzVYL7TVdA1yrwD0Lx3Qts6WBWEukwswIQX+nZC2WHw1Fz584tN4W7Y4YNdF97aw9AQ\n",
+       "hMm74fQk1IS0QOGWHk21PFwNoQLomITRLwAhiNg0vkdFKNM4IwliHhuppAPSLhLuUpyRAFPFaVwu\n",
+       "wZtyUzZewogvwNAUtJ3Tz55pA4bA2Awhl42RlINbJlKMl6WoQb0iIaPS/w1ROOsEX1x1XU6KigLX\n",
+       "RqEYO70iOMVGWcRFfFQIpKDcODQGIRZnvAP8/wNIQvrt0L8dGktAzkDTMXBlvI+hEi/JN9UQqdJA\n",
+       "prp5XuTAOAx0QXkr/Gd8yGmontQg1jxc1F2NWosJYKIcihzgmVIV42ChZsysG4VTe4D9FbChldjp\n",
+       "OIFVLsrHJ3C6SihLJImkQ4TtXpxDNtJVYZyecjqmJkgfgvgj0PI12GRTVdaAS70jGw3E7ODFji+W\n",
+       "pt3uhhE7pakUA54k054UU2OAU7OevFGdP850HIYLYGIzkdERUpvGcFYIFTbwJB1EM4URIzb1ruGA\n",
+       "zgKwHYf0s9BzBNb+HqwrgLUxjUcbq1PNoddjMDkKo0tCRfkG+HVgDPjXXHdkITCGsAjPAu8Avpnr\n",
+       "/lyJBTdGMnEJ98w+LiINwF8BD5rLBK6IyKdn/PicMea5LHQpHr1YdQ6AUt0DGYlDZRH+iUmmKxO4\n",
+       "7UPE7BHS3x0ivcHAzjQ1jlqqYzEMI8RTmlrX2QQ7nofh1QYvKQo8NlxJQ2LaTtikmbLrm29rWDNq\n",
+       "PIP6MI8VqgDZaBLKu6EkDF0JKAvCSKXGlYQEIgNQVqr7vrYkuEcg5QNvAeBWpUlHDIoG4FwaNgTg\n",
+       "oAfM68ZM7BfxPAuRd8LpBhXUGuqHkX81M+p3XA7dh2/cq56a8dqLvykLwVgZ9NZAKM/cu77N0Dqu\n",
+       "ol+hYnDFVCOl2cDpFVDVBYOtkCiFliDUROB8AZx7/sIWhUjVOTjZDJuGACJUnx3h1fu3EDxgB1NM\n",
+       "ajRKcLWdsrE0/mQCRwU4nCkm49D8pGZfeMqhYUqNolcqDJ4yQ4HLEHYliWW8ZWmjFVSHBDwu3Vqw\n",
+       "JdRLdqoG7H0QaAQn05TEIvQ5y6kYFeJxL4VdAtFOYjXgfQM4lElfB3hYRCpB/itsD0JxxgsWs8N5\n",
+       "h4PpwdlXbUTdc/MKcstsezz6DHxsPTSVQ2wE3O0Q8EOeKvPGB2G0CYr9Gk9yYQkICrRkdFUcadTN\n",
+       "QBLCBTDqZSQ9yvSaNCXxKOHCNLFkCsKDJMrBEYrhGBoi+kwIPg+u34LmOKQLNBYsmmmuxsAbDiiO\n",
+       "CS4HlKFRtOuGDDF7kImyFK5noGsXvFysczUBBOIQeikA9aNQ2UD8SJL0Zj/+2igVjiBFSZiKwoCB\n",
+       "5BgU7YOac3CsBqbOA3UqRe9wgSusBmlzl+obxVNQ2AED9tlXarkgQg2aQfOWmyHb5AZ4DH3hX97G\n",
+       "yBX4n2ge6w80wJ23G2OiM//AGPPpBTjv8BAMdkDFSo0goxhCQzAU0ChGv5OIJ0DEMQSnUvD5IdgR\n",
+       "QP65gIpUmmjpFKP2NF4bJGvAVMDoeQj1wEg6TmiLG684SETguEkwWZCixaYxIANlWj8kHoT0eZhu\n",
+       "huSoKmmeLYfkeQ2CNANgOvRNLpWE46vB9MB4E1QHNfhu2qbVXYcFivy67VN7GI5Vgv/f0R2W3VC5\n",
+       "W3VKDvVA4gDQcznjbw4EbG5Y3Q/7xuGMD1ZM6qIdLNRSFKE8S/FNTkP3HeCt0hTqgKjbOtQPjMBY\n",
+       "GDY9ByMtEKgA2zAkYhDZf7GNsX+DFz4E3c2qUjtom6TzuR4SPA2NfhhNE/JWEg9F8TSBSAwkSOF5\n",
+       "WHsCzrZqwLB7HAZLITEGkfEU9uYUNgMuBySdUBqCcKlmTrnjQErl38fL1FA551Fp7qlUGmdfP3ab\n",
+       "jaHGYhKRKZzFA0w0D1M2Dp1tMPrczHL3xpgxEfkn+NGHoLVMyxB0G+j7fgBWvA5t22DACekBKD4I\n",
+       "BX6tZzEvjDGDIvL5QVhfBFUhGExC++x7OH8Y3QcHH9J6Lb4J6DDQXgHxEd1+BDhbBYGnACbgyCnY\n",
+       "+3aSpxsInB8mGZmi0RfA4wrjOx3AlrIRsaeITgGfAaahaD3INEw7YcirYoNJ0aoS8SSUjiQZr7OT\n",
+       "Tl4orhclXpwkPgGbDkO7E/xTmpWVTsPoKAx9c1StjM9tgJJSkmejjKS6CZUHqZ1WTZEzAr4oeFbD\n",
+       "vp0QHYC1HwW/W7fpykMw7dYgWYCkA4jCeAqVd16u/AXwVWM4edW/vLn5d+BvRCg2Jn+rrOcsm+Zq\n",
+       "LGSktYhU1sGvrIDyUmAApBuOB+B4Deywq0jH0Si8YYyZVg9B7b/CXVVwchtsFqg1EHGqTPPxOAwe\n",
+       "gOQPwbfRQWKLEK60URSO0xAwBFdCXSFURdUNn56CaT+80gK3fS9TGC0EURe88E5ofAX2dEDQA4dq\n",
+       "4YQN9rig4BYorYCiJJwvhkMBGPseVFRmFpwIjD0L0y9D2QdgyxZYP6rGw7kqONQHI1+5Fje6SNWH\n",
+       "4YFmLSd/ZB2EWyDlhO4pGPpvb679cUPjkoWsCtcvw67fhLd2qlgYQFclPBGBwQ9A5YdgV5vWRnSn\n",
+       "9Lq8ZKDnizMLvomIDVUk8wLjGdVOLxrEkSiFe314PlJOhS2CLd5PqnyK+pjWiZmwQ8MZWN+jZeNL\n",
+       "AnBkJZx4F7QW6vZMuQ1K0OrJSRuUCBxOQtu0PsCGA1A2Ah02GD+pyr4mamd0owfv5hTl7ijVBpIx\n",
+       "SHeA/RC0vwrBf5tpbGrMDy1oyk6fMSYoIgU+eKAMdrhAJsE/Aj9KGjNHrMXCs1hZFSL2dVD3LvCV\n",
+       "wlQNmHLY3q6ZLr3FcHwARv75glZKgcjeJnjHCpAgFB2jaus0W9phZaeKyR11QMdXjUl2iIgbKh6D\n",
+       "0lthpx3cdr1PEqgxEgzD6vMQSwmddUIfNlbFkpR0QPErmgXTfgT8j6Dlt5Poi0Mic41qnTh+uwDX\n",
+       "LUnsU9OkBqG6BtaOw3gJpFfAdBHUBVSLKDYCRUfh+XfD+jMqslcfUu/s/lro7YTefzEmtG+hr/vl\n",
+       "xyN32TQi3IV6Cjbk8wM6W4jwQ+ARY/habvtx+TFflsZIpn0H0IqmoYwaY64oay3iex+0fhQK7oKd\n",
+       "NhVSKo7qg2QwDs8mYfRBlV123A633Q+3darc+8kVcObjUO6EpjFIJKEvAkOdEA7B2l51UnUZ6D4I\n",
+       "xcVQsBpSk+DfB7F2qPolaFgDhY0wXgVDZyDwf4wxfSLizPwfYWNMQkRWwM5PwDu7Lt2Rer4F9n3f\n",
+       "mMS8NUI05qf1k7BDoC6oio9HiqDj28bEjl7zhb/yubJgjDT8EaxuBVuTOt5iaL2WoR7o+DNgCorf\n",
+       "AqW3gM0J4Xbw/9QYM3rllmefp/BO2P1eqAiBJw52H/i3QH859E7AriG45WVwZLbDYnb45koIVcOa\n",
+       "tVDkg4RL03YLS3Tfv3wSSpNwLgbFP4HbTmp5gEdKoPdvgHLY8BuwsQQce6EqrP/PURusegYOlsEb\n",
+       "882IQR+guIDQNXjLss5iPpQyRmYpOjFKoWQreEpVEDD5pngXVeqlGQ0imgDveihugekhCB68kB6v\n",
+       "XsjNf6py6hWF6lFLF8A5gVQCWnuh9QR0VsOBYzD8dSjcC76fgWIXJPtgcj+Mfm/2XNSXoYqPwK5V\n",
+       "sHVA155D74WpCDQdgIlbYFsQDm7XWLPq89BboRlUowIHtkNjD9jbVKF1vB0m/xESh5bLuF96XlzA\n",
+       "G8D/MoacKAgvNiL8PPDrxnBvbvuRh6m9uSajIngNQjDB56DrQbglCAkv+GyQcGhFT28SCsPg+whw\n",
+       "EGz3QUuXptUBOAqgbVL3Z7tD4A2pCFJ5FPafgGd+hG4u95rLlGMXkYdhtBl9nZ5A80JXiNh2ozXF\n",
+       "Oy4WNito0kqhtlmttEzB8bXojTgvMmJXX4TxPeBtg/gZGHt1Rmp23qCLdpMX7joMY2dhuFS9H3eM\n",
+       "wXO10OHJLPRPiMiTgG0+sTNz422AthGNT+lZAZGd0BzVwofFnTCwFvbfBltfhWABHK0A/+MQPgCn\n",
+       "fg9q2sDhhfhmkFFIn4PgaigYVzXX4UI1clyAux7KP6I6Yw3l4LRBTY+KVoFWYu6v07ik9hZmpdxe\n",
+       "jsyDNw+DTReOzDbWhayCabS6IKDzRw15qQQTRu+pILofeYHLvLRU7dVCkr2rYFUMJoohFYNqm27J\n",
+       "dtjhVAWMHoDAZ4Ak1P4M3PuUBs8CdFTAsx8Tkc/P2u5qhtZVsLdHf/T7oD6q9tHx9bApBVEnlMbA\n",
+       "lGiyU2kEhuph+2sar/Lqv6Dr/eC1Gt5LkE+hmjI/yHE/FpMfAf8oQrMx9OS6M3OxbI2Ra8Vo8bh/\n",
+       "gIFbob4AYlGwxzQwLOgARxLqt8OaMThbA6kq3UppOwmRJiiNa3BiRSfUZR4iR2ohbTPGXHXPMrOI\n",
+       "dsGF9NXqh6CtUYPj/MC5MRH5mjFmAuLT+qY9m2kXJK7ZJWmM8QM/vtbPLTYaWFl9RhUlV41p5gxo\n",
+       "obLhFFpG9T//FuYuhqUqrTRlfuw1WjxuFuFB8K+HegcM3wdVhVoLZKoAgilY8zzsX6NFytJjMPI4\n",
+       "pE9mgj//HELroeBWWDWlxpM3DO12WBvSGJLuCj3Py5ugeCvs9GvAcOFKmEpCxYzUbIdR2f+4QOpN\n",
+       "mTIWVyezzfJBWLFadXiCwNnJzD11VT0ezY6rHYPzUzBVrRo2ElKhsfFhmBiEoc+COaXeS+9bYbPt\n",
+       "oiECsNIPA80wsFpETqHbaw6gCupnnMuR1F2cqggki3UrqDABIZsKqAkak2JLwEgRJEfms8YsB0Ro\n",
+       "BP4A2LPEg1YvwRiiInwL+CTw33Pdn7mwjJF5ots6pQ3gn4DuGtiQ8TyMCBzzQEsvTE1A2yhMnAFH\n",
+       "CyRXQ7BXy8B3OmHCB3VRmB7RgmNjBTB26tp7U/YA7K2F7TO8Ew018MyDwNcgdQZOp2BVAVRkHqRh\n",
+       "J5xwQ+BINq5H/jL6NLzykAYXN0yoVsfhMhh+dD7BlSLOTdD6PmjJ3BvdSRHn941JnLj0L6ePwPG7\n",
+       "IL0NXNWq8dDrAv807Iio8FbNKTj85dlS+xmPxGER6YN4o3rKbEBBBwyv0/ghRxh6SyG8Fdacg/V9\n",
+       "YItDsFa1aYZ9GkdiS8OgEwon4LgNEllTxF3K6PZLyW1QsgGSYfVi7myF22bcU63l8JNfFJG/u/p2\n",
+       "xtQxOPE+zdDyO1SYsCgJ/V4YKITRLxmTnuFhKayGsjm0fsoS4NgA1e+CliINZj1Vrm3szLzRloZg\n",
+       "cBwG68HXBf21ajubKPQlwefQ4FXHELxaDSM5LbWRZ3wa+JIxdOa6IzngC8DLIvxfY+anM7WYLGtj\n",
+       "JFOXowHVHOkyl6k+KSJl4L4btuyB7d+CH78PRtaBpwimE1qELF2lmSc9O6FqGM7FoawKgq3Q0wRV\n",
+       "bmidAl+RfvbIBEx0Q/rla+yzC1q3waZZ7uK1w3B4tYiUGGMmRezfhB9+AForwS7QnYK+R40xfXO3\n",
+       "fHMjIkXg2Qa162C0Fx53qXJtwg/DjxljTl/l8wXASlj5Ebh9TKsou5OwKQQ//YCI/G3GQwSAMSYg\n",
+       "Il+B0HehqQimEuCZhtVxGGoA5xgMF6Gv2HOiGS+Vp+GNVbC9H5rOwmkfHFwL6WPw7HpoCkFLJmNp\n",
+       "1Sjs64fUKg1cfaNGNVJGh7WcQN/31DNmcSXUEGn8L7CrEFrHNPDztQcheRxmurBbx6G2SQvmXVrh\n",
+       "981MvgztH4W9SajsgxN14C8Fx2ko7YbkLI/kVC+MbrhYBfkC/YVQeRu8q1tLEQBsG4ZH3wOHx6Cx\n",
+       "H4ZKNWbldA14wuDqhUc3QnkvjMXhZAPYuiAWh5H/MCZxxdiuTL2edVC9U4UUx45C8rgxZkl52UTY\n",
+       "ADwIrMl1X3KBMZwV4WXgw8A/5ro/s1mWxojok+ueNXBPM7qMd0NYRL4x82EtIoVQ/h5Ytx6St0FF\n",
+       "DCYFfuHb8NJWiK4Atxe6mlQb4JYQSDn4G6ChC06HodsLG/vANw4tAU2rK3BqvYrAT2a/NV+mvzZU\n",
+       "kMoJBMBuA/ssoSIbmfLpzSIur/5b3X8H3dWZX/YaY0JZuoR5hYgUQ/0nYGsZNAU0KPS4F04/ZszM\n",
+       "lN05PytQdAe0vhVsKxy4dlbxfLIW+1AM0oO4pieo74bejcALsz4ehdIE7DoINEF1WANWzxfB6/Uw\n",
+       "Po/MJf8j8NJ74MwGKE7DWA8M/hvEOsHWCmv3av2cC7R0wBGnysebYQgMoim5J41WE7a4Kt49sLMQ\n",
+       "tmUywUpjWkBwtBVGzuk4XsBpmMc6qS8AlU9C79vBvlJjlZp7oWQchqK8qeRF9CgcvQtKqtWb2lkJ\n",
+       "51bACQM7J6B2xr1aGoeN+x08tbUKubUSm0kQS04QPTysgpJOMmW70TiYIVTkZOpiHNmVKH03rNur\n",
+       "1YodKej8eTi2TUT+5XIvaDcpfw78pTHLOp35r4GviPDPxpBXxuayNEaAtevhgfuh252JGxiA4sfh\n",
+       "wyLy1xffCCreC7etga298FQENgShfxOMROCeNyB4Eh6/FQoESsrAG9GKod4wnGyD4DHw9MIWO9jd\n",
+       "MLAJKNWg15IzMPUmwTA1gNxbwLcKYgEI9ELdPVBXCe40DKa0eFp3OayYUR21txSG2mDDp6DWDbEw\n",
+       "9A1C99dmiGEtUby3wk4f7LwQuDkFjRMQeruIHL+yEebYAhvfAff0wpNrWwi566lKO5kuKcbVWUOk\n",
+       "8ABndk/hiYvUNMHYMUi3ZxbpWvWCuYrA0wHjdUAhYCAwBbFHNb7HuR48PpjqB87OXOAzaaTfynjp\n",
+       "CtE04iiAiPTD2VtggwsmPXByNxSUgFkJlRO6BVcYgzNvhb4e/b8tZpMx5tvA2wKJaSjfCc0z7h0B\n",
+       "PAPgXQsjpReNkQkPDMWYRw0nxd8LK+rhLadUpBCgvxJeqgQuuQeNMVMi8mV44t3gfi9UlUL5ELQ4\n",
+       "NS7tiaNQdw7WDqlhk3CtJGZ/gMRjNpASmOoG3w8o+FSI1vNQkdQt4+HXIXhsfkbIBfHJrbfAA10X\n",
+       "qx03BSG9CvatB7KaMZcrRLgd2A58INd9ySXG8III54CPQ34VBVyWxkgN3LIJJtwzAhjrYaoNmvtg\n",
+       "BXBaC2ut2KMeDgdaW2asHGonoWsN0Kf6HdMFcNfrMFoHL63S8t6JjGBV76tQ4VJxszY/VD2vdTHs\n",
+       "KRhuYlbJbn3Dr/s4bKnQwLbAajj0Kdh2DLZlAtBCLviP1fBkEdxSoArcfi/sWwUtbRroWJCCcJlW\n",
+       "B01+VET+bKm5XC+ldCusnJUhUJDU+i8dDWiRvMtQ9RbYOQoFyRImvRWQclAYSRAvTpFyT5AoFYpW\n",
+       "wKZzUD8J57bA8WER+TYQg1QvjKyAcg94z0PMA+OFEHsBqISWX4V1bvWgDLrh9ICIfNUYc8nYZ7Ko\n",
+       "ArOPibgfgR/8AhTthU0GIl6om4C6DjjQBM3noTUNj79fRL6wzKW934SmvVe8H1ZugOYYRO1wZBsM\n",
+       "nIGKGVWKa0/ByVXgqdBaNcEiOOaCwW/P3ztQWQP203CgGmrRwNKBNBT2AhUikkDzzePAiDFmVKS4\n",
+       "B3Ydhz098PoaqN8K5YWQ2ASpMnhpBG5/1cvptWtInq/QzDkAzlLQ4mZdQ4g9J6BuRItnvngrvBYC\n",
+       "nplfn10tsCJ10RC5wIpJaN/IEjBGRBDgL9FU3jwV5VtU/gR4XIRvGnP5beTFZlkaI3bwFvJmF1Wh\n",
+       "5sS5NdK9+WehZhucXgntw9DYAe1VsMoDERd0lcERHwQPqof0tuMw0A1D5Wps+DyQPgujYTagsfYA\n",
+       "ABVoSURBVDi+CeoD4EmpJPNAMXSGYXYQVcntsLsMdmT2rcMe2JQAVyal1pUEbxy2j8Dj/fBUHxQ3\n",
+       "w3Q7eLbCDr8GbQKUAwWl0L8R+pu5pjTmmw0Tg5iTN6WpxuGqFaEdFVAxAODAkXATCiYZLwKxTRH2\n",
+       "jWJvKaBuYpLWHkiUQcMG4DboWQvh16A3CGsOQ7QEJqsh7dcgwvHvQf0vwv0xaMxsxW0AfI3wwluA\n",
+       "x+f1n5nYEY0T2l0DhSMQ3w7NAa1btDIOna1w5yGob4KhWi6berpccW6DLRvgLV0Xj4kNzr4FWruh\n",
+       "ODNnJpww+BwEX4auFoichcDBaxP1c1fC3kMQdsNwma4Du0bhRDV07IGGdVDjVO2YoX4R+Z6WW9jW\n",
+       "q8Uz42vg9gGt4DtUDzVJSNXDU9scDCVquZhOHwT3CL6mIiqn/KSdetRuYFc/nLtdRJ6fX9p6Kq5Z\n",
+       "WLOJOTQ1eUnwLlRp8Bu57kg+YAxviPAY8DngoVz35wLL0hgJwMkeuKd6hmciCdIDAo4q2Hw/3Hoe\n",
+       "BlfCmgCcr1bphg3Pw5Gt0OOG5AiMZPLU2z8GzRNQP6VfEx44Uo7qFEyJeH8Mk/drxHsU6AlB/7+8\n",
+       "OZ6gZBusmRFDEnVBSUoL4wV8UD2W+bsIFLqN6X8KLgTYlv03jUuZSXkQnC1AQVYvYN4xuh9O/Jyq\n",
+       "kl/IaB4s1ut8tZz6WBf01UDreJia/iQJdzHhYITxxmmidjsbR0MUBCDugfQWWD0OFXGVdPc1wjMh\n",
+       "eNoGLXF9A+5FH2pmHGrKoHGW5sfGQXhjt4j8+BoEp9JQOwzNPTC5BWyZzxXFtRI0ZOKFlm2dkctT\n",
+       "tQvWjl16bHsPdHTBI+vVYxkBuidh6MvGmDfV7Zk/050wsAU2DF9M2U0D58pgzR3w9rNQnHkJOlMJ\n",
+       "T38YcGpK+GC5pu86DPh6dMulPwWpKHSOx4k+NgS3NmS8ZxFwpimQKWxA8Yy326IEuF2oQM0cKemz\n",
+       "SZ2FM2lY69bYGVCBvlOF4M+zulPXjgh2NFbkT4yZO5V/mfKHwHERfsYYnsh1ZyCHxoiI/Arwa4Ab\n",
+       "eNgY85XFOvc0HDikkqKNreCPgOuEhqK/CNVbVT3TNw3j56BvLTQGNX1urBfGhmHsVSgx4CyDxAk4\n",
+       "9SxE7lJB1wRwLgm9370QUGhM6AUROQzn6jN/0DO369ckID7jgVI+CX2iFUBtM9zv/T4IzlwobJAe\n",
+       "UvXPtTOi82M2GDNA1iTb85PEITjWCsFtGkgcFjg3Df1fv7qLfeQZ2P8QSDrG5o4z9LeuYLKwkdAL\n",
+       "AVKxKdK7Q1S1Q7RZM1scaXWHSwq2DEJHMxz5Bxh0oXN5ICO5Xq96IbOxG30zvzoi0gglm6C4QQMc\n",
+       "t/RqDNJEI1QGYbQACnozsQ1xrprxsRwxNh2v43UQrFFtoKZ+qO2GZx6DrhDqUeuZb5zF5Zl4RVVQ\n",
+       "XZXQNgZhFxyuh8k03DF00RAB1SPqbIbjg5oVY0tfdOLFneph2/AkdPtAzk/DvoPqWruwZjknCRZP\n",
+       "UnQcSmbERA0WQ3gE5rcdoXPV83344c/DKrs+EjqBnqeMMUsh/fWX0Yy2H+W6I/mEMUyK8GHguyLs\n",
+       "NebSmKZckEvPyLeMMV/PBJe9BiyaMZIJHns4CHt8Kl8YGIGfpOE42G9VuwSgtV1l2zvXwHgxHHBr\n",
+       "Dv8DW6AsAiPb4MQ90PdlOHEYTmxH90c6mfVGboyZZMZ+79z498OJd8CdGXdsUwDa/XCqAfZEdL/7\n",
+       "XDUcnobwwRkfHIfIUThzByTLtRJt1AEnfTD2tDFmfM7TLRGMMSkR+T5MvKxCcsRQr9RV3wyNMd0i\n",
+       "8iWYuB88LX6iB8OMBwZISxwSE0w4VV67byW4w/pg6y6A6guaD2nAY4yZvQ02DMMhGPZeFF8DaK+B\n",
+       "0BtX8opo7JDj56D5Lljnh4ogHKiDn9bDlldgsAb6mqAvDLVReLIaBq4htmE5MX4MXvojWC2wIqZv\n",
+       "/afXw5kuNAMpfLUW5osxZkSVkifv03IO6SiMPwml28A3x3gXC4TG4al10ObW6rqlZRqHXPYakIbT\n",
+       "JTB2KLNm/eMk3FIKG1Mw7if4RTi/DSqLoToEfT54vRSG//laZN6NiR4TkS7oaOOiEvRNr9IqQgHw\n",
+       "p8CHlpPA2XwxhudF+AzwmAh3G0NOnxM5r02T0Xf4iTHmLbOO56huQe3H4O3VqjFwgZgdvlMPoSA8\n",
+       "KBerfII+XH7aCakQtOyBFqMvJWcT0PsNY5LzLj6msQHl74e2tdCUVkXFU3EYPgFVK0E8ED4KEy/M\n",
+       "NjA04LbpE1BdD/YS9dAOdcDoX83Ux8hnclk4K3N+++x9dhH3Vmh+H9RugmofBCNg74Bbj0Na4F8b\n",
+       "4cxfzyXjLyJtqluyESiNwlAhnAjAwJczMuNz9MG5ERo/orEEjSEYToPrDGw/A4/tBb8fnCGYDINr\n",
+       "ElI9mdiGG9heyC0LWxTTdSfs/GPYIFAS09T6YQfs74Xe312odPfMS5ZRxV3fO+De3epJu8BwIfz4\n",
+       "nVB7QiUD+mqhp1S9J6sHwDsFnTboehmmHr+ccSFiXw/V9+j2cqIPhp+5WbLnFr7+GH8GrDaG9y/U\n",
+       "OW52MsG9fwXcAdxvzNVemG/0fHlaKE9E/hfwCeB/GGO+Nut3OTJGpAXWfQJundI4kPFCOFQDh1+H\n",
+       "tu3w87NiAFICD2+HFUG4v/NiVPpYIfywGHo+Nx/lzxnntwEt4KqHZAjSZzPpn/P5bCkUbAFvNUz2\n",
+       "Qex4Nt/8FppcGyOXQ9Nu5Rao/QXYMQI7uyBQAG/UwdEXjAlcNhhVRMqhcAsUVUCgGxInLuexEZES\n",
+       "WPn7sMet2zN141rB90AFlL8IngQ8PmLM4FcX6n/NBQtrjNT/Jry7APDCdA3YYlDWD8dK4JlvL4ZM\n",
+       "uohUQOtvwF6jWV8RJzz+NmgIwR0zsl4ONMJz7RA+Aw4nJLpvZiPzaizsuLMO2AdsNcYK6r4SGYPk\n",
+       "i2jq8zsX0kOS00J5WvWV78w6PGSM+aAx5k9F5C+Ap0XkkdlvKSLy6Rk/PmeMeW5hezvTbT9+H7hb\n",
+       "IRWA0UcgcR7M9jk+ATjqYcP5S9PjKqehrVKLqNF+DedPo5oE1/x2k3nbzllJ8KVKxuvxhIicgNfu\n",
+       "g6MrtaLy+KMQff0qnx0HnpvfmWwrYbVNtUPI3LAOowX4OpqhqRPSVhDeNSFpjceoHeCSTCNTCovj\n",
+       "ujda1+phmLoXCtdD3IB3HPa8culfbh6EY2sh9D1j4jcYv7J8EcENfAtN5bUMkatgDEaE3wQ+C7yQ\n",
+       "CWpd9DjDBTdGMkWm7pl9XERcGe2LBBpy/iZryRjz6YXu31wYrUj7T5Ix4+CCUufYIJyfJTZ2slYz\n",
+       "azxz6HjMT73R4uYgo8771ZnzIruIU2uRlPuhNw2Vdk3hdaYh7YCzPhj7j+yfdykzegBOv/dSRdOA\n",
+       "G84nyBSeXAwy6+C3MtLrlVDz25pFMxNnCmx2NG7DMkaugxlv+eeBf8hxd24aMjE1fyDCKPCiCA8Y\n",
+       "w6LWucrlg/JPRORuNAPhO/koZT3zgaN7v/IDeOajsK4JyhMw7IJTYxpF33HXpTLSMTt0wzzLuVvc\n",
+       "PCyMIQIqoHbeBlui4D0MnduhzMC5EuidgLGDkJ63l80CIHEYjq6F6fXQHNcg8FNA/3fmE+CcbTLr\n",
+       "iB/8wTcHN3dWQqjj6iUELOYiY4h8FtgK3GsFrV47xvBZEfzA8yK8xxheW6xz5zyA9XLkcfxAITjW\n",
+       "QVElTA1C+gxgg5qPwZZ6rT8TccHJIjj9hDHh53Pd55uFfB3zxUSk9B2w7k5YNwkJN3SugDOjMPEw\n",
+       "KiWfnzfsDbDwgYwX5OCLV0AiDNFTuc4wE3GshJW/CluSUB6C4RI4nITuLxljlkWKdjbHXYQS4GFU\n",
+       "X+FdxjB25U9YXAkR3g38E/BxY/hh9trN0wDWK3GzPZg0K8i5GSo2auDp2CFjzLwzaSxuvjFfCDJu\n",
+       "/DVQtV2rQPuPLcUKqjNZruOu8XS+XVBQA1M9EDpollHV5WyNuwjvQuus/AT4HWPmI/ZmcTVE2A08\n",
+       "BvxfY/hidtq0jJGskElDTi3lB0Muyccxv14yb+MFQHR2urDFpdxs4671bnACkaXoqVosbnTcRWgG\n",
+       "/h+6LfOQMfOtx2MxX0RoQ0tX/BD4Y2O4odpXljFyg2hly5p3QnETJNMQfAMmfnozpc3eDOTTmF8v\n",
+       "6tlw74aqt0JhEUxPg/8ZiLxmPbjm5mYZdxFxQ+m94NsNTgeEBmHoxzeLrke+cb3jLoIL+F3g94Ev\n",
+       "AJ+1CuAtHCJUAI8CY8Cv3Ujqb05Te292RKQS2j4Od8WgtRcSNji6HfbXiMiXjFUl1eISCvbA5vfC\n",
+       "bf1aKyjghlfekyl++mque2dxI5T/IuxeC9v7tehlXym88DER+XtjjJVCugiIcB/wd8BZYI8xs4uN\n",
+       "WmQbY/BnrvtngCMiPGQMP872eeZVI2N5U7IbtnFRkdWZhp390NoEtOSyZxb5hYg4oPJeuLMPfJmM\n",
+       "CF8M7uiHyvv09xY3IyJSB41rYW+PGiKgNat2xqDi9tz2bukjQpMI3wO+BPyBMbzbMkQWD2OIGcPv\n",
+       "ovXkPi/CkyLcJ5I9G2JZGCOZFOLrxNsM1XOkHdcC3Hf97V6ZG+vz0mo7W20uQjtF4PVcWhAN9Odi\n",
+       "t/5+UftzU7ezUG1eZ1vlUDPHNltNEJLX096cZPva5cF1u8FzUiXCnwOH4eEwsMEY/v3G2rzx/yOf\n",
+       "7pXF7IsxPInWt/g+8DmgW4SvivBJEe4WueN9mRTra2ZZGCPA3df/0elBGC968/ExATZff7tX5W6r\n",
+       "7ay3udDtTEM4AWHnpYfDTj3ObFn/he7Pzd7OQrV5PW1Ngn+ORdbvhenaG+3QDO7OYlvZbi+bbV0W\n",
+       "ESpFeI8I3wROA2XATvhkV5YyZe7Okzay1U422ph3O8aQMIaHgR3AA+j2863An8FdXwcmRTgkwndF\n",
+       "+D8ivHU+7S4XY+QGCLwGh10w5NWf08CJGugcBpZNGp7F1THGJGDsBXilESKZLZmIA/Y3gH+fsarq\n",
+       "3sz0QW83vNGg9ahA608d8kKi58oftbgSIvxl5sH1ogjdQAfwm8DLwDpj+HVjFk8t12J+GIMxhnZj\n",
+       "+Htj+Kgx3AGf+RzQBHwSTQtOAc3zac/aw74KxpghEcdXIfSzUN4EcWD8LIw9Bnwqx92zyDum98ER\n",
+       "O/TdCSV2mEyB/ykIWTWDbmIyyqnfghffDSc3gsfAeAhGvgn8Yq77d5PTDRwG+oB+oNsYrHT4mxRj\n",
+       "CACvZ77mTV6n9ua6DxYWFhYWFhbZ46bTGbGwsLCwsLBYHlgxIxYWFhYWFhY5xTJGliEisifXfbDI\n",
+       "LtaYWsyFNS+WB0thnJfFNo2IeIwxCyIXLCLubJT8FpFdwF7ABwSAV4wxB26wzbmMTQGeMMbcsEaK\n",
+       "iGwCksaYUzOO3WqM2X+jbc9oz4vO0zm0Xq6rvazMhesZ92yMcTbHNFvjJyLbgYAx5ryI3A+4gB9n\n",
+       "Q5042+M/o92srQk3sgZk675fiHs9m/f3jc6RbM6DXK0B+XT/5+O9v6SMERH5IPB7QBLV0v/LTBT8\n",
+       "s8aYexbonE8aYx64wTb+Fh3Ep4AgUArci06W37mBdiPAXJNrqzGm/HrbzbT9N0A1kACqgI8ZY0Zu\n",
+       "9FqLyMeA3wDCwFeAj6P51I8YY75wDe0s6Fy41nHP1hhna0yzNX4i8veAm0xRQGAKmAQajTG/Ot92\n",
+       "ZrSXlfGf0d6CrwnXuwZk877P9r2ezfv7euZINuZBPq0B+XT/5+u9j8kkCy+FL+AVNF1ZgF9H85zL\n",
+       "gGez0Pa+y3xNZKHtF67l+DW0ewjwzXH8qWxcjxnfbwGeB3bf6LVGb7QLFW970RtYgJdzMReyNe7Z\n",
+       "GuNsjWm2xm9m/4FjM75/Ppfjn+15kM25kO05kc15ke35cb1zJBvzIJ/WgHy6//P13l9yOiPGmGTm\n",
+       "278XkUNo6ePqLDRdiVqfl0h9i8hPs9D2QRF5GHgStS5LUKv50A22+06YU7HwbTfYLoBNRFzGmLgx\n",
+       "5qiIvBf4BioVfCPEjLr4IplChHEAEblmN3iW5kK2xj1bY5ytMc3W+NlnfP/fZ3x/vS7XrI3/f3Yk\n",
+       "e2tCtteAbN732b7Xs3l/X88cyco8yKM1IJ/u//y896/HgsnXL+AhoGXWsQbgH7LQ9tuZ2yLdmaW+\n",
+       "70Ct9z9B3ZPbc309r9LfW4CaWcccwAdvsN1fARyzjrmA/52LuZDNcc+nMc7W+KEL2Fzj9WAuxz/b\n",
+       "8yDbcyEf58RCzI/rnSPZmAf5tgbky1jn672/pGJGZiMi3zLG/NICtf1tY8wHF6Jti4tk6zpnay5Y\n",
+       "4764ZPt6Z3NNsObC4pGNa22tAfnNUk/trVvAtrNZHMvi8mTrOmdrLljjvrhk+3pnc02w5sLikY1r\n",
+       "ba0BecxSN0YsLCwsLCws8hzLGLGwsLCwsLDIKZYxYmFhYWFhYZFTlnoAa40xZvhma9viItm6zvnW\n",
+       "jsX8yPb1zmZ71lxYPLJxra01IL9Z0saIhYWFhYWFRf5jbdNYWFhYWFhY5BTLGLGwsLCwsLDIKZYx\n",
+       "YmFhYWFhYZFTLGMkjxCRt4nIKRE5KyJ/lOv+WCw8IvIVERkWkWO57ovF4iAiTSLyrIicEJHjIvLb\n",
+       "ue6TxcIjIh4ReVVEDovISRH5TK77lE9YAax5gojYgdPAfUA/8DpaK6A9px2zWFBE5E4gBHzdGLM5\n",
+       "1/2xWHhEpBaoNcYcFhEvcBB4j3WvL31EpNAYMy0iDuBF4PeNMS/mul/5gOUZyR/2AOeMMV3GmATw\n",
+       "HeBnc9wniwXGGLMPmMh1PywWD2PMkDHmcOb7ENAO1Oe2VxaLgTFmOvOtC616O57D7uQVljGSPzQA\n",
+       "vTN+7sscs7CwWKKISCuwHXg1tz2xWAxExCYih4Fh4FljzMlc9ylfsIyR/MHaL7OwWEZktmi+D/xO\n",
+       "xkNiscQxxqSNMduARuAuEbk7x13KGyxjJH/oB5pm/NyEekcsLCyWGCLiBB4BvmGMeTTX/bFYXIwx\n",
+       "QeA/gF257ku+YBkj+cMBYLWItIqIC3g/8MMc98nCwiLLiIgA/wScNMb8ba77Y7E4iEiliPgy3xcA\n",
+       "9wNv5LZX+YNljOQJxpgk8FvAE8BJ4LtWdP3SR0S+DbwMrBGRXhH5aK77ZLHg3A58CLhHRN7IfL0t\n",
+       "152yWHDqgGcyMSOvAv9ujHk6x33KG6zUXgsLCwsLC4ucYnlGLCwsLCwsLHKKZYxYWFhYWFhY5BTL\n",
+       "GLGwsLCwsLDIKZYxYmFhYWFhYZFTLGPEwsLCwsLCIqdYxoiFhYWFhYVFTrGMEQsLCwsLC4ucYhkj\n",
+       "FhYWFhYWFjnl/wPBByFp6Gp27QAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x11dbd0090>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "X, y = sklearn.datasets.make_classification(\n",
+    "    n_samples=10000, n_features=4, n_redundant=0, n_informative=2, \n",
+    "    n_clusters_per_class=2, hypercube=False, random_state=0\n",
+    ")\n",
+    "\n",
+    "# Split into train and test\n",
+    "X, Xt, y, yt = sklearn.cross_validation.train_test_split(X, y)\n",
+    "\n",
+    "# Visualize sample of the data\n",
+    "ind = np.random.permutation(X.shape[0])[:1000]\n",
+    "df = pd.DataFrame(X[ind])\n",
+    "_ = pd.scatter_matrix(df, figsize=(9, 9), diagonal='kde', marker='o', s=40, alpha=.4, c=y[ind])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Learn and evaluate scikit-learn's logistic regression with stochastic gradient descent (SGD) training. Time and check the classifier's accuracy."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Accuracy: 0.783\n",
+      "Accuracy: 0.783\n",
+      "Accuracy: 0.783\n",
+      "Accuracy: 0.783\n",
+      "1 loops, best of 3: 508 ms per loop\n"
+     ]
+    }
+   ],
+   "source": [
+    "%%timeit\n",
+    "# Train and test the scikit-learn SGD logistic regression.\n",
+    "clf = sklearn.linear_model.SGDClassifier(\n",
+    "    loss='log', n_iter=1000, penalty='l2', alpha=1e-3, class_weight='auto')\n",
+    "\n",
+    "clf.fit(X, y)\n",
+    "yt_pred = clf.predict(Xt)\n",
+    "print('Accuracy: {:.3f}'.format(sklearn.metrics.accuracy_score(yt, yt_pred)))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Save the dataset to HDF5 for loading in Caffe."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "# Write out the data to HDF5 files in a temp directory.\n",
+    "# This file is assumed to be caffe_root/examples/hdf5_classification.ipynb\n",
+    "dirname = os.path.abspath('./examples/hdf5_classification/data')\n",
+    "if not os.path.exists(dirname):\n",
+    "    os.makedirs(dirname)\n",
+    "\n",
+    "train_filename = os.path.join(dirname, 'train.h5')\n",
+    "test_filename = os.path.join(dirname, 'test.h5')\n",
+    "\n",
+    "# HDF5DataLayer source should be a file containing a list of HDF5 filenames.\n",
+    "# To show this off, we'll list the same data file twice.\n",
+    "with h5py.File(train_filename, 'w') as f:\n",
+    "    f['data'] = X\n",
+    "    f['label'] = y.astype(np.float32)\n",
+    "with open(os.path.join(dirname, 'train.txt'), 'w') as f:\n",
+    "    f.write(train_filename + '\\n')\n",
+    "    f.write(train_filename + '\\n')\n",
+    "    \n",
+    "# HDF5 is pretty efficient, but can be further compressed.\n",
+    "comp_kwargs = {'compression': 'gzip', 'compression_opts': 1}\n",
+    "with h5py.File(test_filename, 'w') as f:\n",
+    "    f.create_dataset('data', data=Xt, **comp_kwargs)\n",
+    "    f.create_dataset('label', data=yt.astype(np.float32), **comp_kwargs)\n",
+    "with open(os.path.join(dirname, 'test.txt'), 'w') as f:\n",
+    "    f.write(test_filename + '\\n')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's define logistic regression in Caffe through Python net specification. This is a quick and natural way to define nets that sidesteps manually editing the protobuf model."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "from caffe import layers as L\n",
+    "from caffe import params as P\n",
+    "\n",
+    "def logreg(hdf5, batch_size):\n",
+    "    # logistic regression: data, matrix multiplication, and 2-class softmax loss\n",
+    "    n = caffe.NetSpec()\n",
+    "    n.data, n.label = L.HDF5Data(batch_size=batch_size, source=hdf5, ntop=2)\n",
+    "    n.ip1 = L.InnerProduct(n.data, num_output=2, weight_filler=dict(type='xavier'))\n",
+    "    n.accuracy = L.Accuracy(n.ip1, n.label)\n",
+    "    n.loss = L.SoftmaxWithLoss(n.ip1, n.label)\n",
+    "    return n.to_proto()\n",
+    "    \n",
+    "with open('examples/hdf5_classification/logreg_auto_train.prototxt', 'w') as f:\n",
+    "    f.write(str(logreg('examples/hdf5_classification/data/train.txt', 10)))\n",
+    "    \n",
+    "with open('examples/hdf5_classification/logreg_auto_test.prototxt', 'w') as f:\n",
+    "    f.write(str(logreg('examples/hdf5_classification/data/test.txt', 10)))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Time to learn and evaluate our Caffeinated logistic regression in Python."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Accuracy: 0.782\n",
+      "Accuracy: 0.782\n",
+      "Accuracy: 0.782\n",
+      "Accuracy: 0.782\n",
+      "1 loops, best of 3: 287 ms per loop\n"
+     ]
+    }
+   ],
+   "source": [
+    "%%timeit\n",
+    "caffe.set_mode_cpu()\n",
+    "solver = caffe.get_solver('examples/hdf5_classification/solver.prototxt')\n",
+    "solver.solve()\n",
+    "\n",
+    "accuracy = 0\n",
+    "batch_size = solver.test_nets[0].blobs['data'].num\n",
+    "test_iters = int(len(Xt) / batch_size)\n",
+    "for i in range(test_iters):\n",
+    "    solver.test_nets[0].forward()\n",
+    "    accuracy += solver.test_nets[0].blobs['accuracy'].data\n",
+    "accuracy /= test_iters\n",
+    "\n",
+    "print(\"Accuracy: {:.3f}\".format(accuracy))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Do the same through the command line interface for detailed output on the model and solving."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "I0318 00:58:32.322571 2013098752 caffe.cpp:117] Use CPU.\n",
+      "I0318 00:58:32.643163 2013098752 caffe.cpp:121] Starting Optimization\n",
+      "I0318 00:58:32.643229 2013098752 solver.cpp:32] Initializing solver from parameters: \n",
+      "train_net: \"examples/hdf5_classification/logreg_auto_train.prototxt\"\n",
+      "test_net: \"examples/hdf5_classification/logreg_auto_test.prototxt\"\n",
+      "test_iter: 250\n",
+      "test_interval: 1000\n",
+      "base_lr: 0.01\n",
+      "display: 1000\n",
+      "max_iter: 10000\n",
+      "lr_policy: \"step\"\n",
+      "gamma: 0.1\n",
+      "momentum: 0.9\n",
+      "weight_decay: 0.0005\n",
+      "stepsize: 5000\n",
+      "snapshot: 10000\n",
+      "snapshot_prefix: \"examples/hdf5_classification/data/train\"\n",
+      "solver_mode: CPU\n",
+      "I0318 00:58:32.643333 2013098752 solver.cpp:61] Creating training net from train_net file: examples/hdf5_classification/logreg_auto_train.prototxt\n",
+      "I0318 00:58:32.643465 2013098752 net.cpp:42] Initializing net from parameters: \n",
+      "state {\n",
+      "  phase: TRAIN\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"data\"\n",
+      "  type: \"HDF5Data\"\n",
+      "  top: \"data\"\n",
+      "  top: \"label\"\n",
+      "  hdf5_data_param {\n",
+      "    source: \"examples/hdf5_classification/data/train.txt\"\n",
+      "    batch_size: 10\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"ip1\"\n",
+      "  type: \"InnerProduct\"\n",
+      "  bottom: \"data\"\n",
+      "  top: \"ip1\"\n",
+      "  inner_product_param {\n",
+      "    num_output: 2\n",
+      "    weight_filler {\n",
+      "      type: \"xavier\"\n",
+      "    }\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"accuracy\"\n",
+      "  type: \"Accuracy\"\n",
+      "  bottom: \"ip1\"\n",
+      "  bottom: \"label\"\n",
+      "  top: \"accuracy\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"loss\"\n",
+      "  type: \"SoftmaxWithLoss\"\n",
+      "  bottom: \"ip1\"\n",
+      "  bottom: \"label\"\n",
+      "  top: \"loss\"\n",
+      "}\n",
+      "I0318 00:58:32.644197 2013098752 layer_factory.hpp:74] Creating layer data\n",
+      "I0318 00:58:32.644219 2013098752 net.cpp:84] Creating Layer data\n",
+      "I0318 00:58:32.644230 2013098752 net.cpp:338] data -> data\n",
+      "I0318 00:58:32.644256 2013098752 net.cpp:338] data -> label\n",
+      "I0318 00:58:32.644269 2013098752 net.cpp:113] Setting up data\n",
+      "I0318 00:58:32.644278 2013098752 hdf5_data_layer.cpp:66] Loading list of HDF5 filenames from: examples/hdf5_classification/data/train.txt\n",
+      "I0318 00:58:32.644327 2013098752 hdf5_data_layer.cpp:80] Number of HDF5 files: 2\n",
+      "I0318 00:58:32.646458 2013098752 net.cpp:120] Top shape: 10 4 (40)\n",
+      "I0318 00:58:32.646502 2013098752 net.cpp:120] Top shape: 10 (10)\n",
+      "I0318 00:58:32.646518 2013098752 layer_factory.hpp:74] Creating layer label_data_1_split\n",
+      "I0318 00:58:32.646538 2013098752 net.cpp:84] Creating Layer label_data_1_split\n",
+      "I0318 00:58:32.646546 2013098752 net.cpp:380] label_data_1_split <- label\n",
+      "I0318 00:58:32.646556 2013098752 net.cpp:338] label_data_1_split -> label_data_1_split_0\n",
+      "I0318 00:58:32.646569 2013098752 net.cpp:338] label_data_1_split -> label_data_1_split_1\n",
+      "I0318 00:58:32.646579 2013098752 net.cpp:113] Setting up label_data_1_split\n",
+      "I0318 00:58:32.646586 2013098752 net.cpp:120] Top shape: 10 (10)\n",
+      "I0318 00:58:32.646595 2013098752 net.cpp:120] Top shape: 10 (10)\n",
+      "I0318 00:58:32.646601 2013098752 layer_factory.hpp:74] Creating layer ip1\n",
+      "I0318 00:58:32.646615 2013098752 net.cpp:84] Creating Layer ip1\n",
+      "I0318 00:58:32.646622 2013098752 net.cpp:380] ip1 <- data\n",
+      "I0318 00:58:32.646664 2013098752 net.cpp:338] ip1 -> ip1\n",
+      "I0318 00:58:32.646689 2013098752 net.cpp:113] Setting up ip1\n",
+      "I0318 00:58:32.652330 2013098752 net.cpp:120] Top shape: 10 2 (20)\n",
+      "I0318 00:58:32.652371 2013098752 layer_factory.hpp:74] Creating layer ip1_ip1_0_split\n",
+      "I0318 00:58:32.652393 2013098752 net.cpp:84] Creating Layer ip1_ip1_0_split\n",
+      "I0318 00:58:32.652407 2013098752 net.cpp:380] ip1_ip1_0_split <- ip1\n",
+      "I0318 00:58:32.652421 2013098752 net.cpp:338] ip1_ip1_0_split -> ip1_ip1_0_split_0\n",
+      "I0318 00:58:32.652467 2013098752 net.cpp:338] ip1_ip1_0_split -> ip1_ip1_0_split_1\n",
+      "I0318 00:58:32.652480 2013098752 net.cpp:113] Setting up ip1_ip1_0_split\n",
+      "I0318 00:58:32.652489 2013098752 net.cpp:120] Top shape: 10 2 (20)\n",
+      "I0318 00:58:32.652498 2013098752 net.cpp:120] Top shape: 10 2 (20)\n",
+      "I0318 00:58:32.652505 2013098752 layer_factory.hpp:74] Creating layer accuracy\n",
+      "I0318 00:58:32.652521 2013098752 net.cpp:84] Creating Layer accuracy\n",
+      "I0318 00:58:32.652534 2013098752 net.cpp:380] accuracy <- ip1_ip1_0_split_0\n",
+      "I0318 00:58:32.652545 2013098752 net.cpp:380] accuracy <- label_data_1_split_0\n",
+      "I0318 00:58:32.652562 2013098752 net.cpp:338] accuracy -> accuracy\n",
+      "I0318 00:58:32.652577 2013098752 net.cpp:113] Setting up accuracy\n",
+      "I0318 00:58:32.652590 2013098752 net.cpp:120] Top shape: (1)\n",
+      "I0318 00:58:32.652642 2013098752 layer_factory.hpp:74] Creating layer loss\n",
+      "I0318 00:58:32.652655 2013098752 net.cpp:84] Creating Layer loss\n",
+      "I0318 00:58:32.652663 2013098752 net.cpp:380] loss <- ip1_ip1_0_split_1\n",
+      "I0318 00:58:32.652672 2013098752 net.cpp:380] loss <- label_data_1_split_1\n",
+      "I0318 00:58:32.652679 2013098752 net.cpp:338] loss -> loss\n",
+      "I0318 00:58:32.652689 2013098752 net.cpp:113] Setting up loss\n",
+      "I0318 00:58:32.652701 2013098752 layer_factory.hpp:74] Creating layer loss\n",
+      "I0318 00:58:32.652716 2013098752 net.cpp:120] Top shape: (1)\n",
+      "I0318 00:58:32.652724 2013098752 net.cpp:122]     with loss weight 1\n",
+      "I0318 00:58:32.652740 2013098752 net.cpp:167] loss needs backward computation.\n",
+      "I0318 00:58:32.652746 2013098752 net.cpp:169] accuracy does not need backward computation.\n",
+      "I0318 00:58:32.652753 2013098752 net.cpp:167] ip1_ip1_0_split needs backward computation.\n",
+      "I0318 00:58:32.652760 2013098752 net.cpp:167] ip1 needs backward computation.\n",
+      "I0318 00:58:32.652786 2013098752 net.cpp:169] label_data_1_split does not need backward computation.\n",
+      "I0318 00:58:32.652801 2013098752 net.cpp:169] data does not need backward computation.\n",
+      "I0318 00:58:32.652808 2013098752 net.cpp:205] This network produces output accuracy\n",
+      "I0318 00:58:32.652815 2013098752 net.cpp:205] This network produces output loss\n",
+      "I0318 00:58:32.652825 2013098752 net.cpp:447] Collecting Learning Rate and Weight Decay.\n",
+      "I0318 00:58:32.652833 2013098752 net.cpp:217] Network initialization done.\n",
+      "I0318 00:58:32.652839 2013098752 net.cpp:218] Memory required for data: 528\n",
+      "I0318 00:58:32.652964 2013098752 solver.cpp:154] Creating test net (#0) specified by test_net file: examples/hdf5_classification/logreg_auto_test.prototxt\n",
+      "I0318 00:58:32.652986 2013098752 net.cpp:42] Initializing net from parameters: \n",
+      "state {\n",
+      "  phase: TEST\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"data\"\n",
+      "  type: \"HDF5Data\"\n",
+      "  top: \"data\"\n",
+      "  top: \"label\"\n",
+      "  hdf5_data_param {\n",
+      "    source: \"examples/hdf5_classification/data/test.txt\"\n",
+      "    batch_size: 10\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"ip1\"\n",
+      "  type: \"InnerProduct\"\n",
+      "  bottom: \"data\"\n",
+      "  top: \"ip1\"\n",
+      "  inner_product_param {\n",
+      "    num_output: 2\n",
+      "    weight_filler {\n",
+      "      type: \"xavier\"\n",
+      "    }\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"accuracy\"\n",
+      "  type: \"Accuracy\"\n",
+      "  bottom: \"ip1\"\n",
+      "  bottom: \"label\"\n",
+      "  top: \"accuracy\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"loss\"\n",
+      "  type: \"SoftmaxWithLoss\"\n",
+      "  bottom: \"ip1\"\n",
+      "  bottom: \"label\"\n",
+      "  top: \"loss\"\n",
+      "}\n",
+      "I0318 00:58:32.653069 2013098752 layer_factory.hpp:74] Creating layer data\n",
+      "I0318 00:58:32.653080 2013098752 net.cpp:84] Creating Layer data\n",
+      "I0318 00:58:32.653090 2013098752 net.cpp:338] data -> data\n",
+      "I0318 00:58:32.653128 2013098752 net.cpp:338] data -> label\n",
+      "I0318 00:58:32.653146 2013098752 net.cpp:113] Setting up data\n",
+      "I0318 00:58:32.653154 2013098752 hdf5_data_layer.cpp:66] Loading list of HDF5 filenames from: examples/hdf5_classification/data/test.txt\n",
+      "I0318 00:58:32.653192 2013098752 hdf5_data_layer.cpp:80] Number of HDF5 files: 1\n",
+      "I0318 00:58:32.654850 2013098752 net.cpp:120] Top shape: 10 4 (40)\n",
+      "I0318 00:58:32.654897 2013098752 net.cpp:120] Top shape: 10 (10)\n",
+      "I0318 00:58:32.654914 2013098752 layer_factory.hpp:74] Creating layer label_data_1_split\n",
+      "I0318 00:58:32.654933 2013098752 net.cpp:84] Creating Layer label_data_1_split\n",
+      "I0318 00:58:32.654943 2013098752 net.cpp:380] label_data_1_split <- label\n",
+      "I0318 00:58:32.654953 2013098752 net.cpp:338] label_data_1_split -> label_data_1_split_0\n",
+      "I0318 00:58:32.654966 2013098752 net.cpp:338] label_data_1_split -> label_data_1_split_1\n",
+      "I0318 00:58:32.654976 2013098752 net.cpp:113] Setting up label_data_1_split\n",
+      "I0318 00:58:32.654985 2013098752 net.cpp:120] Top shape: 10 (10)\n",
+      "I0318 00:58:32.654992 2013098752 net.cpp:120] Top shape: 10 (10)\n",
+      "I0318 00:58:32.655000 2013098752 layer_factory.hpp:74] Creating layer ip1\n",
+      "I0318 00:58:32.655010 2013098752 net.cpp:84] Creating Layer ip1\n",
+      "I0318 00:58:32.655017 2013098752 net.cpp:380] ip1 <- data\n",
+      "I0318 00:58:32.655030 2013098752 net.cpp:338] ip1 -> ip1\n",
+      "I0318 00:58:32.655041 2013098752 net.cpp:113] Setting up ip1\n",
+      "I0318 00:58:32.655061 2013098752 net.cpp:120] Top shape: 10 2 (20)\n",
+      "I0318 00:58:32.655072 2013098752 layer_factory.hpp:74] Creating layer ip1_ip1_0_split\n",
+      "I0318 00:58:32.655148 2013098752 net.cpp:84] Creating Layer ip1_ip1_0_split\n",
+      "I0318 00:58:32.655159 2013098752 net.cpp:380] ip1_ip1_0_split <- ip1\n",
+      "I0318 00:58:32.655170 2013098752 net.cpp:338] ip1_ip1_0_split -> ip1_ip1_0_split_0\n",
+      "I0318 00:58:32.655180 2013098752 net.cpp:338] ip1_ip1_0_split -> ip1_ip1_0_split_1\n",
+      "I0318 00:58:32.655190 2013098752 net.cpp:113] Setting up ip1_ip1_0_split\n",
+      "I0318 00:58:32.655199 2013098752 net.cpp:120] Top shape: 10 2 (20)\n",
+      "I0318 00:58:32.655206 2013098752 net.cpp:120] Top shape: 10 2 (20)\n",
+      "I0318 00:58:32.655213 2013098752 layer_factory.hpp:74] Creating layer accuracy\n",
+      "I0318 00:58:32.655223 2013098752 net.cpp:84] Creating Layer accuracy\n",
+      "I0318 00:58:32.655230 2013098752 net.cpp:380] accuracy <- ip1_ip1_0_split_0\n",
+      "I0318 00:58:32.655237 2013098752 net.cpp:380] accuracy <- label_data_1_split_0\n",
+      "I0318 00:58:32.655251 2013098752 net.cpp:338] accuracy -> accuracy\n",
+      "I0318 00:58:32.655259 2013098752 net.cpp:113] Setting up accuracy\n",
+      "I0318 00:58:32.655267 2013098752 net.cpp:120] Top shape: (1)\n",
+      "I0318 00:58:32.655340 2013098752 layer_factory.hpp:74] Creating layer loss\n",
+      "I0318 00:58:32.655354 2013098752 net.cpp:84] Creating Layer loss\n",
+      "I0318 00:58:32.655361 2013098752 net.cpp:380] loss <- ip1_ip1_0_split_1\n",
+      "I0318 00:58:32.655369 2013098752 net.cpp:380] loss <- label_data_1_split_1\n",
+      "I0318 00:58:32.655378 2013098752 net.cpp:338] loss -> loss\n",
+      "I0318 00:58:32.655388 2013098752 net.cpp:113] Setting up loss\n",
+      "I0318 00:58:32.655397 2013098752 layer_factory.hpp:74] Creating layer loss\n",
+      "I0318 00:58:32.655414 2013098752 net.cpp:120] Top shape: (1)\n",
+      "I0318 00:58:32.655422 2013098752 net.cpp:122]     with loss weight 1\n",
+      "I0318 00:58:32.655438 2013098752 net.cpp:167] loss needs backward computation.\n",
+      "I0318 00:58:32.655446 2013098752 net.cpp:169] accuracy does not need backward computation.\n",
+      "I0318 00:58:32.655455 2013098752 net.cpp:167] ip1_ip1_0_split needs backward computation.\n",
+      "I0318 00:58:32.655462 2013098752 net.cpp:167] ip1 needs backward computation.\n",
+      "I0318 00:58:32.655469 2013098752 net.cpp:169] label_data_1_split does not need backward computation.\n",
+      "I0318 00:58:32.655477 2013098752 net.cpp:169] data does not need backward computation.\n",
+      "I0318 00:58:32.655483 2013098752 net.cpp:205] This network produces output accuracy\n",
+      "I0318 00:58:32.655489 2013098752 net.cpp:205] This network produces output loss\n",
+      "I0318 00:58:32.655503 2013098752 net.cpp:447] Collecting Learning Rate and Weight Decay.\n",
+      "I0318 00:58:32.655511 2013098752 net.cpp:217] Network initialization done.\n",
+      "I0318 00:58:32.655517 2013098752 net.cpp:218] Memory required for data: 528\n",
+      "I0318 00:58:32.655547 2013098752 solver.cpp:42] Solver scaffolding done.\n",
+      "I0318 00:58:32.655567 2013098752 solver.cpp:222] Solving \n",
+      "I0318 00:58:32.655575 2013098752 solver.cpp:223] Learning Rate Policy: step\n",
+      "I0318 00:58:32.655583 2013098752 solver.cpp:266] Iteration 0, Testing net (#0)\n",
+      "I0318 00:58:32.683643 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.3736\n",
+      "I0318 00:58:32.683686 2013098752 solver.cpp:315]     Test net output #1: loss = 1.00555 (* 1 = 1.00555 loss)\n",
+      "I0318 00:58:32.683846 2013098752 solver.cpp:189] Iteration 0, loss = 0.869394\n",
+      "I0318 00:58:32.683861 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.3\n",
+      "I0318 00:58:32.683871 2013098752 solver.cpp:204]     Train net output #1: loss = 0.869394 (* 1 = 0.869394 loss)\n",
+      "I0318 00:58:32.683883 2013098752 solver.cpp:464] Iteration 0, lr = 0.01\n",
+      "I0318 00:58:32.698721 2013098752 solver.cpp:266] Iteration 1000, Testing net (#0)\n",
+      "I0318 00:58:32.701917 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.7848\n",
+      "I0318 00:58:32.701961 2013098752 solver.cpp:315]     Test net output #1: loss = 0.590972 (* 1 = 0.590972 loss)\n",
+      "I0318 00:58:32.702014 2013098752 solver.cpp:189] Iteration 1000, loss = 0.54742\n",
+      "I0318 00:58:32.702029 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.7\n",
+      "I0318 00:58:32.702041 2013098752 solver.cpp:204]     Train net output #1: loss = 0.54742 (* 1 = 0.54742 loss)\n",
+      "I0318 00:58:32.702051 2013098752 solver.cpp:464] Iteration 1000, lr = 0.01\n",
+      "I0318 00:58:32.718360 2013098752 solver.cpp:266] Iteration 2000, Testing net (#0)\n",
+      "I0318 00:58:32.721529 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.7696\n",
+      "I0318 00:58:32.721562 2013098752 solver.cpp:315]     Test net output #1: loss = 0.593946 (* 1 = 0.593946 loss)\n",
+      "I0318 00:58:32.721593 2013098752 solver.cpp:189] Iteration 2000, loss = 0.729569\n",
+      "I0318 00:58:32.721603 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.5\n",
+      "I0318 00:58:32.721613 2013098752 solver.cpp:204]     Train net output #1: loss = 0.729569 (* 1 = 0.729569 loss)\n",
+      "I0318 00:58:32.721622 2013098752 solver.cpp:464] Iteration 2000, lr = 0.01\n",
+      "I0318 00:58:32.740182 2013098752 solver.cpp:266] Iteration 3000, Testing net (#0)\n",
+      "I0318 00:58:32.743494 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.77\n",
+      "I0318 00:58:32.743544 2013098752 solver.cpp:315]     Test net output #1: loss = 0.591229 (* 1 = 0.591229 loss)\n",
+      "I0318 00:58:32.744209 2013098752 solver.cpp:189] Iteration 3000, loss = 0.406097\n",
+      "I0318 00:58:32.744231 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.8\n",
+      "I0318 00:58:32.744249 2013098752 solver.cpp:204]     Train net output #1: loss = 0.406096 (* 1 = 0.406096 loss)\n",
+      "I0318 00:58:32.744266 2013098752 solver.cpp:464] Iteration 3000, lr = 0.01\n",
+      "I0318 00:58:32.764135 2013098752 solver.cpp:266] Iteration 4000, Testing net (#0)\n",
+      "I0318 00:58:32.769110 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.7848\n",
+      "I0318 00:58:32.769170 2013098752 solver.cpp:315]     Test net output #1: loss = 0.590972 (* 1 = 0.590972 loss)\n",
+      "I0318 00:58:32.769223 2013098752 solver.cpp:189] Iteration 4000, loss = 0.54742\n",
+      "I0318 00:58:32.769242 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.7\n",
+      "I0318 00:58:32.769255 2013098752 solver.cpp:204]     Train net output #1: loss = 0.54742 (* 1 = 0.54742 loss)\n",
+      "I0318 00:58:32.769265 2013098752 solver.cpp:464] Iteration 4000, lr = 0.01\n",
+      "I0318 00:58:32.785846 2013098752 solver.cpp:266] Iteration 5000, Testing net (#0)\n",
+      "I0318 00:58:32.788722 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.7696\n",
+      "I0318 00:58:32.788751 2013098752 solver.cpp:315]     Test net output #1: loss = 0.593946 (* 1 = 0.593946 loss)\n",
+      "I0318 00:58:32.788811 2013098752 solver.cpp:189] Iteration 5000, loss = 0.72957\n",
+      "I0318 00:58:32.788833 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.5\n",
+      "I0318 00:58:32.788846 2013098752 solver.cpp:204]     Train net output #1: loss = 0.729569 (* 1 = 0.729569 loss)\n",
+      "I0318 00:58:32.788856 2013098752 solver.cpp:464] Iteration 5000, lr = 0.001\n",
+      "I0318 00:58:32.804762 2013098752 solver.cpp:266] Iteration 6000, Testing net (#0)\n",
+      "I0318 00:58:32.808061 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.7856\n",
+      "I0318 00:58:32.808112 2013098752 solver.cpp:315]     Test net output #1: loss = 0.59028 (* 1 = 0.59028 loss)\n",
+      "I0318 00:58:32.808732 2013098752 solver.cpp:189] Iteration 6000, loss = 0.415444\n",
+      "I0318 00:58:32.808753 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.9\n",
+      "I0318 00:58:32.808773 2013098752 solver.cpp:204]     Train net output #1: loss = 0.415444 (* 1 = 0.415444 loss)\n",
+      "I0318 00:58:32.808786 2013098752 solver.cpp:464] Iteration 6000, lr = 0.001\n",
+      "I0318 00:58:32.827118 2013098752 solver.cpp:266] Iteration 7000, Testing net (#0)\n",
+      "I0318 00:58:32.831614 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.7848\n",
+      "I0318 00:58:32.831657 2013098752 solver.cpp:315]     Test net output #1: loss = 0.589454 (* 1 = 0.589454 loss)\n",
+      "I0318 00:58:32.831707 2013098752 solver.cpp:189] Iteration 7000, loss = 0.538038\n",
+      "I0318 00:58:32.831728 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.8\n",
+      "I0318 00:58:32.831745 2013098752 solver.cpp:204]     Train net output #1: loss = 0.538037 (* 1 = 0.538037 loss)\n",
+      "I0318 00:58:32.831759 2013098752 solver.cpp:464] Iteration 7000, lr = 0.001\n",
+      "I0318 00:58:32.849634 2013098752 solver.cpp:266] Iteration 8000, Testing net (#0)\n",
+      "I0318 00:58:32.852712 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.7796\n",
+      "I0318 00:58:32.852748 2013098752 solver.cpp:315]     Test net output #1: loss = 0.589365 (* 1 = 0.589365 loss)\n",
+      "I0318 00:58:32.852792 2013098752 solver.cpp:189] Iteration 8000, loss = 0.684219\n",
+      "I0318 00:58:32.852840 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.5\n",
+      "I0318 00:58:32.852852 2013098752 solver.cpp:204]     Train net output #1: loss = 0.684219 (* 1 = 0.684219 loss)\n",
+      "I0318 00:58:32.852861 2013098752 solver.cpp:464] Iteration 8000, lr = 0.001\n",
+      "I0318 00:58:32.868440 2013098752 solver.cpp:266] Iteration 9000, Testing net (#0)\n",
+      "I0318 00:58:32.871438 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.7816\n",
+      "I0318 00:58:32.871461 2013098752 solver.cpp:315]     Test net output #1: loss = 0.589656 (* 1 = 0.589656 loss)\n",
+      "I0318 00:58:32.872109 2013098752 solver.cpp:189] Iteration 9000, loss = 0.421879\n",
+      "I0318 00:58:32.872131 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.9\n",
+      "I0318 00:58:32.872143 2013098752 solver.cpp:204]     Train net output #1: loss = 0.421879 (* 1 = 0.421879 loss)\n",
+      "I0318 00:58:32.872153 2013098752 solver.cpp:464] Iteration 9000, lr = 0.001\n",
+      "I0318 00:58:32.889981 2013098752 solver.cpp:334] Snapshotting to examples/hdf5_classification/data/train_iter_10000.caffemodel\n",
+      "I0318 00:58:32.890224 2013098752 solver.cpp:342] Snapshotting solver state to examples/hdf5_classification/data/train_iter_10000.solverstate\n",
+      "I0318 00:58:32.890362 2013098752 solver.cpp:248] Iteration 10000, loss = 0.538933\n",
+      "I0318 00:58:32.890380 2013098752 solver.cpp:266] Iteration 10000, Testing net (#0)\n",
+      "I0318 00:58:32.893728 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.782\n",
+      "I0318 00:58:32.893757 2013098752 solver.cpp:315]     Test net output #1: loss = 0.589366 (* 1 = 0.589366 loss)\n",
+      "I0318 00:58:32.893775 2013098752 solver.cpp:253] Optimization Done.\n",
+      "I0318 00:58:32.893786 2013098752 caffe.cpp:134] Optimization Done.\n"
+     ]
+    }
+   ],
+   "source": [
+    "!./build/tools/caffe train -solver examples/hdf5_classification/solver.prototxt"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "If you look at output or the `logreg_auto_train.prototxt`, you'll see that the model is simple logistic regression.\n",
+    "We can make it a little more advanced by introducing a non-linearity between weights that take the input and weights that give the output -- now we have a two-layer network.\n",
+    "That network is given in `nonlinear_auto_train.prototxt`, and that's the only change made in `nonlinear_solver.prototxt` which we will now use.\n",
+    "\n",
+    "The final accuracy of the new network should be higher than logistic regression!"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "from caffe import layers as L\n",
+    "from caffe import params as P\n",
+    "\n",
+    "def nonlinear_net(hdf5, batch_size):\n",
+    "    # one small nonlinearity, one leap for model kind\n",
+    "    n = caffe.NetSpec()\n",
+    "    n.data, n.label = L.HDF5Data(batch_size=batch_size, source=hdf5, ntop=2)\n",
+    "    # define a hidden layer of dimension 40\n",
+    "    n.ip1 = L.InnerProduct(n.data, num_output=40, weight_filler=dict(type='xavier'))\n",
+    "    # transform the output through the ReLU (rectified linear) non-linearity\n",
+    "    n.relu1 = L.ReLU(n.ip1, in_place=True)\n",
+    "    # score the (now non-linear) features\n",
+    "    n.ip2 = L.InnerProduct(n.ip1, num_output=2, weight_filler=dict(type='xavier'))\n",
+    "    # same accuracy and loss as before\n",
+    "    n.accuracy = L.Accuracy(n.ip2, n.label)\n",
+    "    n.loss = L.SoftmaxWithLoss(n.ip2, n.label)\n",
+    "    return n.to_proto()\n",
+    "    \n",
+    "with open('examples/hdf5_classification/nonlinear_auto_train.prototxt', 'w') as f:\n",
+    "    f.write(str(nonlinear_net('examples/hdf5_classification/data/train.txt', 10)))\n",
+    "    \n",
+    "with open('examples/hdf5_classification/nonlinear_auto_test.prototxt', 'w') as f:\n",
+    "    f.write(str(nonlinear_net('examples/hdf5_classification/data/test.txt', 10)))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Accuracy: 0.832\n",
+      "Accuracy: 0.832\n",
+      "Accuracy: 0.832\n",
+      "Accuracy: 0.831\n",
+      "1 loops, best of 3: 386 ms per loop\n"
+     ]
+    }
+   ],
+   "source": [
+    "%%timeit\n",
+    "caffe.set_mode_cpu()\n",
+    "solver = caffe.get_solver('examples/hdf5_classification/nonlinear_solver.prototxt')\n",
+    "solver.solve()\n",
+    "\n",
+    "accuracy = 0\n",
+    "batch_size = solver.test_nets[0].blobs['data'].num\n",
+    "test_iters = int(len(Xt) / batch_size)\n",
+    "for i in range(test_iters):\n",
+    "    solver.test_nets[0].forward()\n",
+    "    accuracy += solver.test_nets[0].blobs['accuracy'].data\n",
+    "accuracy /= test_iters\n",
+    "\n",
+    "print(\"Accuracy: {:.3f}\".format(accuracy))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Do the same through the command line interface for detailed output on the model and solving."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "I0318 00:58:43.336922 2013098752 caffe.cpp:117] Use CPU.\n",
+      "I0318 00:58:43.654698 2013098752 caffe.cpp:121] Starting Optimization\n",
+      "I0318 00:58:43.654747 2013098752 solver.cpp:32] Initializing solver from parameters: \n",
+      "train_net: \"examples/hdf5_classification/nonlinear_auto_train.prototxt\"\n",
+      "test_net: \"examples/hdf5_classification/nonlinear_auto_test.prototxt\"\n",
+      "test_iter: 250\n",
+      "test_interval: 1000\n",
+      "base_lr: 0.01\n",
+      "display: 1000\n",
+      "max_iter: 10000\n",
+      "lr_policy: \"step\"\n",
+      "gamma: 0.1\n",
+      "momentum: 0.9\n",
+      "weight_decay: 0.0005\n",
+      "stepsize: 5000\n",
+      "snapshot: 10000\n",
+      "snapshot_prefix: \"examples/hdf5_classification/data/train\"\n",
+      "solver_mode: CPU\n",
+      "I0318 00:58:43.654855 2013098752 solver.cpp:61] Creating training net from train_net file: examples/hdf5_classification/nonlinear_auto_train.prototxt\n",
+      "I0318 00:58:43.655004 2013098752 net.cpp:42] Initializing net from parameters: \n",
+      "state {\n",
+      "  phase: TRAIN\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"data\"\n",
+      "  type: \"HDF5Data\"\n",
+      "  top: \"data\"\n",
+      "  top: \"label\"\n",
+      "  hdf5_data_param {\n",
+      "    source: \"examples/hdf5_classification/data/train.txt\"\n",
+      "    batch_size: 10\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"ip1\"\n",
+      "  type: \"InnerProduct\"\n",
+      "  bottom: \"data\"\n",
+      "  top: \"ip1\"\n",
+      "  inner_product_param {\n",
+      "    num_output: 40\n",
+      "    weight_filler {\n",
+      "      type: \"xavier\"\n",
+      "    }\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"relu1\"\n",
+      "  type: \"ReLU\"\n",
+      "  bottom: \"ip1\"\n",
+      "  top: \"ip1\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"ip2\"\n",
+      "  type: \"InnerProduct\"\n",
+      "  bottom: \"ip1\"\n",
+      "  top: \"ip2\"\n",
+      "  inner_product_param {\n",
+      "    num_output: 2\n",
+      "    weight_filler {\n",
+      "      type: \"xavier\"\n",
+      "    }\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"accuracy\"\n",
+      "  type: \"Accuracy\"\n",
+      "  bottom: \"ip2\"\n",
+      "  bottom: \"label\"\n",
+      "  top: \"accuracy\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"loss\"\n",
+      "  type: \"SoftmaxWithLoss\"\n",
+      "  bottom: \"ip2\"\n",
+      "  bottom: \"label\"\n",
+      "  top: \"loss\"\n",
+      "}\n",
+      "I0318 00:58:43.655120 2013098752 layer_factory.hpp:74] Creating layer data\n",
+      "I0318 00:58:43.655139 2013098752 net.cpp:84] Creating Layer data\n",
+      "I0318 00:58:43.655264 2013098752 net.cpp:338] data -> data\n",
+      "I0318 00:58:43.655297 2013098752 net.cpp:338] data -> label\n",
+      "I0318 00:58:43.655310 2013098752 net.cpp:113] Setting up data\n",
+      "I0318 00:58:43.655318 2013098752 hdf5_data_layer.cpp:66] Loading list of HDF5 filenames from: examples/hdf5_classification/data/train.txt\n",
+      "I0318 00:58:43.655365 2013098752 hdf5_data_layer.cpp:80] Number of HDF5 files: 2\n",
+      "I0318 00:58:43.657317 2013098752 net.cpp:120] Top shape: 10 4 (40)\n",
+      "I0318 00:58:43.657342 2013098752 net.cpp:120] Top shape: 10 (10)\n",
+      "I0318 00:58:43.657356 2013098752 layer_factory.hpp:74] Creating layer label_data_1_split\n",
+      "I0318 00:58:43.657373 2013098752 net.cpp:84] Creating Layer label_data_1_split\n",
+      "I0318 00:58:43.657384 2013098752 net.cpp:380] label_data_1_split <- label\n",
+      "I0318 00:58:43.657395 2013098752 net.cpp:338] label_data_1_split -> label_data_1_split_0\n",
+      "I0318 00:58:43.657407 2013098752 net.cpp:338] label_data_1_split -> label_data_1_split_1\n",
+      "I0318 00:58:43.657418 2013098752 net.cpp:113] Setting up label_data_1_split\n",
+      "I0318 00:58:43.657426 2013098752 net.cpp:120] Top shape: 10 (10)\n",
+      "I0318 00:58:43.657433 2013098752 net.cpp:120] Top shape: 10 (10)\n",
+      "I0318 00:58:43.657441 2013098752 layer_factory.hpp:74] Creating layer ip1\n",
+      "I0318 00:58:43.657451 2013098752 net.cpp:84] Creating Layer ip1\n",
+      "I0318 00:58:43.657459 2013098752 net.cpp:380] ip1 <- data\n",
+      "I0318 00:58:43.657467 2013098752 net.cpp:338] ip1 -> ip1\n",
+      "I0318 00:58:43.657479 2013098752 net.cpp:113] Setting up ip1\n",
+      "I0318 00:58:43.662454 2013098752 net.cpp:120] Top shape: 10 40 (400)\n",
+      "I0318 00:58:43.662477 2013098752 layer_factory.hpp:74] Creating layer relu1\n",
+      "I0318 00:58:43.662497 2013098752 net.cpp:84] Creating Layer relu1\n",
+      "I0318 00:58:43.662508 2013098752 net.cpp:380] relu1 <- ip1\n",
+      "I0318 00:58:43.662520 2013098752 net.cpp:327] relu1 -> ip1 (in-place)\n",
+      "I0318 00:58:43.662530 2013098752 net.cpp:113] Setting up relu1\n",
+      "I0318 00:58:43.662539 2013098752 net.cpp:120] Top shape: 10 40 (400)\n",
+      "I0318 00:58:43.662546 2013098752 layer_factory.hpp:74] Creating layer ip2\n",
+      "I0318 00:58:43.662555 2013098752 net.cpp:84] Creating Layer ip2\n",
+      "I0318 00:58:43.662562 2013098752 net.cpp:380] ip2 <- ip1\n",
+      "I0318 00:58:43.662571 2013098752 net.cpp:338] ip2 -> ip2\n",
+      "I0318 00:58:43.662580 2013098752 net.cpp:113] Setting up ip2\n",
+      "I0318 00:58:43.662595 2013098752 net.cpp:120] Top shape: 10 2 (20)\n",
+      "I0318 00:58:43.662606 2013098752 layer_factory.hpp:74] Creating layer ip2_ip2_0_split\n",
+      "I0318 00:58:43.662654 2013098752 net.cpp:84] Creating Layer ip2_ip2_0_split\n",
+      "I0318 00:58:43.662665 2013098752 net.cpp:380] ip2_ip2_0_split <- ip2\n",
+      "I0318 00:58:43.662678 2013098752 net.cpp:338] ip2_ip2_0_split -> ip2_ip2_0_split_0\n",
+      "I0318 00:58:43.662689 2013098752 net.cpp:338] ip2_ip2_0_split -> ip2_ip2_0_split_1\n",
+      "I0318 00:58:43.662698 2013098752 net.cpp:113] Setting up ip2_ip2_0_split\n",
+      "I0318 00:58:43.662706 2013098752 net.cpp:120] Top shape: 10 2 (20)\n",
+      "I0318 00:58:43.662714 2013098752 net.cpp:120] Top shape: 10 2 (20)\n",
+      "I0318 00:58:43.662722 2013098752 layer_factory.hpp:74] Creating layer accuracy\n",
+      "I0318 00:58:43.662734 2013098752 net.cpp:84] Creating Layer accuracy\n",
+      "I0318 00:58:43.662740 2013098752 net.cpp:380] accuracy <- ip2_ip2_0_split_0\n",
+      "I0318 00:58:43.662749 2013098752 net.cpp:380] accuracy <- label_data_1_split_0\n",
+      "I0318 00:58:43.662756 2013098752 net.cpp:338] accuracy -> accuracy\n",
+      "I0318 00:58:43.662766 2013098752 net.cpp:113] Setting up accuracy\n",
+      "I0318 00:58:43.662818 2013098752 net.cpp:120] Top shape: (1)\n",
+      "I0318 00:58:43.662827 2013098752 layer_factory.hpp:74] Creating layer loss\n",
+      "I0318 00:58:43.662839 2013098752 net.cpp:84] Creating Layer loss\n",
+      "I0318 00:58:43.662847 2013098752 net.cpp:380] loss <- ip2_ip2_0_split_1\n",
+      "I0318 00:58:43.662854 2013098752 net.cpp:380] loss <- label_data_1_split_1\n",
+      "I0318 00:58:43.662863 2013098752 net.cpp:338] loss -> loss\n",
+      "I0318 00:58:43.662873 2013098752 net.cpp:113] Setting up loss\n",
+      "I0318 00:58:43.662883 2013098752 layer_factory.hpp:74] Creating layer loss\n",
+      "I0318 00:58:43.662901 2013098752 net.cpp:120] Top shape: (1)\n",
+      "I0318 00:58:43.662909 2013098752 net.cpp:122]     with loss weight 1\n",
+      "I0318 00:58:43.662922 2013098752 net.cpp:167] loss needs backward computation.\n",
+      "I0318 00:58:43.662930 2013098752 net.cpp:169] accuracy does not need backward computation.\n",
+      "I0318 00:58:43.662936 2013098752 net.cpp:167] ip2_ip2_0_split needs backward computation.\n",
+      "I0318 00:58:43.662942 2013098752 net.cpp:167] ip2 needs backward computation.\n",
+      "I0318 00:58:43.662976 2013098752 net.cpp:167] relu1 needs backward computation.\n",
+      "I0318 00:58:43.662988 2013098752 net.cpp:167] ip1 needs backward computation.\n",
+      "I0318 00:58:43.662997 2013098752 net.cpp:169] label_data_1_split does not need backward computation.\n",
+      "I0318 00:58:43.663003 2013098752 net.cpp:169] data does not need backward computation.\n",
+      "I0318 00:58:43.663009 2013098752 net.cpp:205] This network produces output accuracy\n",
+      "I0318 00:58:43.663017 2013098752 net.cpp:205] This network produces output loss\n",
+      "I0318 00:58:43.663028 2013098752 net.cpp:447] Collecting Learning Rate and Weight Decay.\n",
+      "I0318 00:58:43.663035 2013098752 net.cpp:217] Network initialization done.\n",
+      "I0318 00:58:43.663041 2013098752 net.cpp:218] Memory required for data: 3728\n",
+      "I0318 00:58:43.663158 2013098752 solver.cpp:154] Creating test net (#0) specified by test_net file: examples/hdf5_classification/nonlinear_auto_test.prototxt\n",
+      "I0318 00:58:43.663179 2013098752 net.cpp:42] Initializing net from parameters: \n",
+      "state {\n",
+      "  phase: TEST\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"data\"\n",
+      "  type: \"HDF5Data\"\n",
+      "  top: \"data\"\n",
+      "  top: \"label\"\n",
+      "  hdf5_data_param {\n",
+      "    source: \"examples/hdf5_classification/data/test.txt\"\n",
+      "    batch_size: 10\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"ip1\"\n",
+      "  type: \"InnerProduct\"\n",
+      "  bottom: \"data\"\n",
+      "  top: \"ip1\"\n",
+      "  inner_product_param {\n",
+      "    num_output: 40\n",
+      "    weight_filler {\n",
+      "      type: \"xavier\"\n",
+      "    }\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"relu1\"\n",
+      "  type: \"ReLU\"\n",
+      "  bottom: \"ip1\"\n",
+      "  top: \"ip1\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"ip2\"\n",
+      "  type: \"InnerProduct\"\n",
+      "  bottom: \"ip1\"\n",
+      "  top: \"ip2\"\n",
+      "  inner_product_param {\n",
+      "    num_output: 2\n",
+      "    weight_filler {\n",
+      "      type: \"xavier\"\n",
+      "    }\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"accuracy\"\n",
+      "  type: \"Accuracy\"\n",
+      "  bottom: \"ip2\"\n",
+      "  bottom: \"label\"\n",
+      "  top: \"accuracy\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"loss\"\n",
+      "  type: \"SoftmaxWithLoss\"\n",
+      "  bottom: \"ip2\"\n",
+      "  bottom: \"label\"\n",
+      "  top: \"loss\"\n",
+      "}\n",
+      "I0318 00:58:43.663349 2013098752 layer_factory.hpp:74] Creating layer data\n",
+      "I0318 00:58:43.663365 2013098752 net.cpp:84] Creating Layer data\n",
+      "I0318 00:58:43.663373 2013098752 net.cpp:338] data -> data\n",
+      "I0318 00:58:43.663385 2013098752 net.cpp:338] data -> label\n",
+      "I0318 00:58:43.663396 2013098752 net.cpp:113] Setting up data\n",
+      "I0318 00:58:43.663422 2013098752 hdf5_data_layer.cpp:66] Loading list of HDF5 filenames from: examples/hdf5_classification/data/test.txt\n",
+      "I0318 00:58:43.663457 2013098752 hdf5_data_layer.cpp:80] Number of HDF5 files: 1\n",
+      "I0318 00:58:43.664719 2013098752 net.cpp:120] Top shape: 10 4 (40)\n",
+      "I0318 00:58:43.664739 2013098752 net.cpp:120] Top shape: 10 (10)\n",
+      "I0318 00:58:43.664754 2013098752 layer_factory.hpp:74] Creating layer label_data_1_split\n",
+      "I0318 00:58:43.664772 2013098752 net.cpp:84] Creating Layer label_data_1_split\n",
+      "I0318 00:58:43.664783 2013098752 net.cpp:380] label_data_1_split <- label\n",
+      "I0318 00:58:43.664791 2013098752 net.cpp:338] label_data_1_split -> label_data_1_split_0\n",
+      "I0318 00:58:43.664803 2013098752 net.cpp:338] label_data_1_split -> label_data_1_split_1\n",
+      "I0318 00:58:43.664813 2013098752 net.cpp:113] Setting up label_data_1_split\n",
+      "I0318 00:58:43.664822 2013098752 net.cpp:120] Top shape: 10 (10)\n",
+      "I0318 00:58:43.664829 2013098752 net.cpp:120] Top shape: 10 (10)\n",
+      "I0318 00:58:43.664837 2013098752 layer_factory.hpp:74] Creating layer ip1\n",
+      "I0318 00:58:43.664846 2013098752 net.cpp:84] Creating Layer ip1\n",
+      "I0318 00:58:43.664854 2013098752 net.cpp:380] ip1 <- data\n",
+      "I0318 00:58:43.664862 2013098752 net.cpp:338] ip1 -> ip1\n",
+      "I0318 00:58:43.664875 2013098752 net.cpp:113] Setting up ip1\n",
+      "I0318 00:58:43.664901 2013098752 net.cpp:120] Top shape: 10 40 (400)\n",
+      "I0318 00:58:43.664924 2013098752 layer_factory.hpp:74] Creating layer relu1\n",
+      "I0318 00:58:43.664945 2013098752 net.cpp:84] Creating Layer relu1\n",
+      "I0318 00:58:43.664958 2013098752 net.cpp:380] relu1 <- ip1\n",
+      "I0318 00:58:43.664966 2013098752 net.cpp:327] relu1 -> ip1 (in-place)\n",
+      "I0318 00:58:43.664975 2013098752 net.cpp:113] Setting up relu1\n",
+      "I0318 00:58:43.664983 2013098752 net.cpp:120] Top shape: 10 40 (400)\n",
+      "I0318 00:58:43.664990 2013098752 layer_factory.hpp:74] Creating layer ip2\n",
+      "I0318 00:58:43.665000 2013098752 net.cpp:84] Creating Layer ip2\n",
+      "I0318 00:58:43.665006 2013098752 net.cpp:380] ip2 <- ip1\n",
+      "I0318 00:58:43.665015 2013098752 net.cpp:338] ip2 -> ip2\n",
+      "I0318 00:58:43.665030 2013098752 net.cpp:113] Setting up ip2\n",
+      "I0318 00:58:43.665052 2013098752 net.cpp:120] Top shape: 10 2 (20)\n",
+      "I0318 00:58:43.665066 2013098752 layer_factory.hpp:74] Creating layer ip2_ip2_0_split\n",
+      "I0318 00:58:43.665077 2013098752 net.cpp:84] Creating Layer ip2_ip2_0_split\n",
+      "I0318 00:58:43.665086 2013098752 net.cpp:380] ip2_ip2_0_split <- ip2\n",
+      "I0318 00:58:43.665093 2013098752 net.cpp:338] ip2_ip2_0_split -> ip2_ip2_0_split_0\n",
+      "I0318 00:58:43.665103 2013098752 net.cpp:338] ip2_ip2_0_split -> ip2_ip2_0_split_1\n",
+      "I0318 00:58:43.665113 2013098752 net.cpp:113] Setting up ip2_ip2_0_split\n",
+      "I0318 00:58:43.665122 2013098752 net.cpp:120] Top shape: 10 2 (20)\n",
+      "I0318 00:58:43.665128 2013098752 net.cpp:120] Top shape: 10 2 (20)\n",
+      "I0318 00:58:43.665137 2013098752 layer_factory.hpp:74] Creating layer accuracy\n",
+      "I0318 00:58:43.665144 2013098752 net.cpp:84] Creating Layer accuracy\n",
+      "I0318 00:58:43.665153 2013098752 net.cpp:380] accuracy <- ip2_ip2_0_split_0\n",
+      "I0318 00:58:43.665168 2013098752 net.cpp:380] accuracy <- label_data_1_split_0\n",
+      "I0318 00:58:43.665180 2013098752 net.cpp:338] accuracy -> accuracy\n",
+      "I0318 00:58:43.665192 2013098752 net.cpp:113] Setting up accuracy\n",
+      "I0318 00:58:43.665200 2013098752 net.cpp:120] Top shape: (1)\n",
+      "I0318 00:58:43.665207 2013098752 layer_factory.hpp:74] Creating layer loss\n",
+      "I0318 00:58:43.665216 2013098752 net.cpp:84] Creating Layer loss\n",
+      "I0318 00:58:43.665223 2013098752 net.cpp:380] loss <- ip2_ip2_0_split_1\n",
+      "I0318 00:58:43.665230 2013098752 net.cpp:380] loss <- label_data_1_split_1\n",
+      "I0318 00:58:43.665241 2013098752 net.cpp:338] loss -> loss\n",
+      "I0318 00:58:43.665251 2013098752 net.cpp:113] Setting up loss\n",
+      "I0318 00:58:43.665259 2013098752 layer_factory.hpp:74] Creating layer loss\n",
+      "I0318 00:58:43.665273 2013098752 net.cpp:120] Top shape: (1)\n",
+      "I0318 00:58:43.665282 2013098752 net.cpp:122]     with loss weight 1\n",
+      "I0318 00:58:43.665290 2013098752 net.cpp:167] loss needs backward computation.\n",
+      "I0318 00:58:43.665338 2013098752 net.cpp:169] accuracy does not need backward computation.\n",
+      "I0318 00:58:43.665351 2013098752 net.cpp:167] ip2_ip2_0_split needs backward computation.\n",
+      "I0318 00:58:43.665380 2013098752 net.cpp:167] ip2 needs backward computation.\n",
+      "I0318 00:58:43.665387 2013098752 net.cpp:167] relu1 needs backward computation.\n",
+      "I0318 00:58:43.665393 2013098752 net.cpp:167] ip1 needs backward computation.\n",
+      "I0318 00:58:43.665400 2013098752 net.cpp:169] label_data_1_split does not need backward computation.\n",
+      "I0318 00:58:43.665407 2013098752 net.cpp:169] data does not need backward computation.\n",
+      "I0318 00:58:43.665415 2013098752 net.cpp:205] This network produces output accuracy\n",
+      "I0318 00:58:43.665421 2013098752 net.cpp:205] This network produces output loss\n",
+      "I0318 00:58:43.665431 2013098752 net.cpp:447] Collecting Learning Rate and Weight Decay.\n",
+      "I0318 00:58:43.665441 2013098752 net.cpp:217] Network initialization done.\n",
+      "I0318 00:58:43.665446 2013098752 net.cpp:218] Memory required for data: 3728\n",
+      "I0318 00:58:43.665534 2013098752 solver.cpp:42] Solver scaffolding done.\n",
+      "I0318 00:58:43.665568 2013098752 solver.cpp:222] Solving \n",
+      "I0318 00:58:43.665577 2013098752 solver.cpp:223] Learning Rate Policy: step\n",
+      "I0318 00:58:43.665586 2013098752 solver.cpp:266] Iteration 0, Testing net (#0)\n",
+      "I0318 00:58:43.683938 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.5184\n",
+      "I0318 00:58:43.683981 2013098752 solver.cpp:315]     Test net output #1: loss = 0.716141 (* 1 = 0.716141 loss)\n",
+      "I0318 00:58:43.684236 2013098752 solver.cpp:189] Iteration 0, loss = 0.764954\n",
+      "I0318 00:58:43.684267 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.5\n",
+      "I0318 00:58:43.684285 2013098752 solver.cpp:204]     Train net output #1: loss = 0.764954 (* 1 = 0.764954 loss)\n",
+      "I0318 00:58:43.684305 2013098752 solver.cpp:464] Iteration 0, lr = 0.01\n",
+      "I0318 00:58:43.714700 2013098752 solver.cpp:266] Iteration 1000, Testing net (#0)\n",
+      "I0318 00:58:43.721762 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.8168\n",
+      "I0318 00:58:43.721818 2013098752 solver.cpp:315]     Test net output #1: loss = 0.434918 (* 1 = 0.434918 loss)\n",
+      "I0318 00:58:43.721899 2013098752 solver.cpp:189] Iteration 1000, loss = 0.282425\n",
+      "I0318 00:58:43.721917 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.9\n",
+      "I0318 00:58:43.721932 2013098752 solver.cpp:204]     Train net output #1: loss = 0.282426 (* 1 = 0.282426 loss)\n",
+      "I0318 00:58:43.721942 2013098752 solver.cpp:464] Iteration 1000, lr = 0.01\n",
+      "I0318 00:58:43.750509 2013098752 solver.cpp:266] Iteration 2000, Testing net (#0)\n",
+      "I0318 00:58:43.754590 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.8224\n",
+      "I0318 00:58:43.754621 2013098752 solver.cpp:315]     Test net output #1: loss = 0.416874 (* 1 = 0.416874 loss)\n",
+      "I0318 00:58:43.754660 2013098752 solver.cpp:189] Iteration 2000, loss = 0.51988\n",
+      "I0318 00:58:43.754672 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.7\n",
+      "I0318 00:58:43.754683 2013098752 solver.cpp:204]     Train net output #1: loss = 0.51988 (* 1 = 0.51988 loss)\n",
+      "I0318 00:58:43.754690 2013098752 solver.cpp:464] Iteration 2000, lr = 0.01\n",
+      "I0318 00:58:43.782609 2013098752 solver.cpp:266] Iteration 3000, Testing net (#0)\n",
+      "I0318 00:58:43.789728 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.8176\n",
+      "I0318 00:58:43.789777 2013098752 solver.cpp:315]     Test net output #1: loss = 0.415907 (* 1 = 0.415907 loss)\n",
+      "I0318 00:58:43.790487 2013098752 solver.cpp:189] Iteration 3000, loss = 0.5093\n",
+      "I0318 00:58:43.790510 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.7\n",
+      "I0318 00:58:43.790530 2013098752 solver.cpp:204]     Train net output #1: loss = 0.509301 (* 1 = 0.509301 loss)\n",
+      "I0318 00:58:43.790544 2013098752 solver.cpp:464] Iteration 3000, lr = 0.01\n",
+      "I0318 00:58:43.817451 2013098752 solver.cpp:266] Iteration 4000, Testing net (#0)\n",
+      "I0318 00:58:43.821740 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.8252\n",
+      "I0318 00:58:43.821770 2013098752 solver.cpp:315]     Test net output #1: loss = 0.409124 (* 1 = 0.409124 loss)\n",
+      "I0318 00:58:43.821822 2013098752 solver.cpp:189] Iteration 4000, loss = 0.284815\n",
+      "I0318 00:58:43.821835 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.9\n",
+      "I0318 00:58:43.821846 2013098752 solver.cpp:204]     Train net output #1: loss = 0.284815 (* 1 = 0.284815 loss)\n",
+      "I0318 00:58:43.821890 2013098752 solver.cpp:464] Iteration 4000, lr = 0.01\n",
+      "I0318 00:58:43.847015 2013098752 solver.cpp:266] Iteration 5000, Testing net (#0)\n",
+      "I0318 00:58:43.852102 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.8256\n",
+      "I0318 00:58:43.852145 2013098752 solver.cpp:315]     Test net output #1: loss = 0.404445 (* 1 = 0.404445 loss)\n",
+      "I0318 00:58:43.852188 2013098752 solver.cpp:189] Iteration 5000, loss = 0.511566\n",
+      "I0318 00:58:43.852200 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.7\n",
+      "I0318 00:58:43.852210 2013098752 solver.cpp:204]     Train net output #1: loss = 0.511566 (* 1 = 0.511566 loss)\n",
+      "I0318 00:58:43.852219 2013098752 solver.cpp:464] Iteration 5000, lr = 0.001\n",
+      "I0318 00:58:43.876060 2013098752 solver.cpp:266] Iteration 6000, Testing net (#0)\n",
+      "I0318 00:58:43.880080 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.8328\n",
+      "I0318 00:58:43.880105 2013098752 solver.cpp:315]     Test net output #1: loss = 0.396847 (* 1 = 0.396847 loss)\n",
+      "I0318 00:58:43.880700 2013098752 solver.cpp:189] Iteration 6000, loss = 0.397858\n",
+      "I0318 00:58:43.880718 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.9\n",
+      "I0318 00:58:43.880729 2013098752 solver.cpp:204]     Train net output #1: loss = 0.397858 (* 1 = 0.397858 loss)\n",
+      "I0318 00:58:43.880738 2013098752 solver.cpp:464] Iteration 6000, lr = 0.001\n",
+      "I0318 00:58:43.913795 2013098752 solver.cpp:266] Iteration 7000, Testing net (#0)\n",
+      "I0318 00:58:43.917851 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.8316\n",
+      "I0318 00:58:43.917876 2013098752 solver.cpp:315]     Test net output #1: loss = 0.398135 (* 1 = 0.398135 loss)\n",
+      "I0318 00:58:43.917956 2013098752 solver.cpp:189] Iteration 7000, loss = 0.243849\n",
+      "I0318 00:58:43.917971 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.9\n",
+      "I0318 00:58:43.917989 2013098752 solver.cpp:204]     Train net output #1: loss = 0.243849 (* 1 = 0.243849 loss)\n",
+      "I0318 00:58:43.918002 2013098752 solver.cpp:464] Iteration 7000, lr = 0.001\n",
+      "I0318 00:58:43.943681 2013098752 solver.cpp:266] Iteration 8000, Testing net (#0)\n",
+      "I0318 00:58:43.947589 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.8312\n",
+      "I0318 00:58:43.947615 2013098752 solver.cpp:315]     Test net output #1: loss = 0.394763 (* 1 = 0.394763 loss)\n",
+      "I0318 00:58:43.947651 2013098752 solver.cpp:189] Iteration 8000, loss = 0.513399\n",
+      "I0318 00:58:43.947664 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.7\n",
+      "I0318 00:58:43.947674 2013098752 solver.cpp:204]     Train net output #1: loss = 0.513399 (* 1 = 0.513399 loss)\n",
+      "I0318 00:58:43.947682 2013098752 solver.cpp:464] Iteration 8000, lr = 0.001\n",
+      "I0318 00:58:43.973080 2013098752 solver.cpp:266] Iteration 9000, Testing net (#0)\n",
+      "I0318 00:58:43.977033 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.834\n",
+      "I0318 00:58:43.977056 2013098752 solver.cpp:315]     Test net output #1: loss = 0.395663 (* 1 = 0.395663 loss)\n",
+      "I0318 00:58:43.977710 2013098752 solver.cpp:189] Iteration 9000, loss = 0.399341\n",
+      "I0318 00:58:43.977735 2013098752 solver.cpp:204]     Train net output #0: accuracy = 0.9\n",
+      "I0318 00:58:43.977746 2013098752 solver.cpp:204]     Train net output #1: loss = 0.399342 (* 1 = 0.399342 loss)\n",
+      "I0318 00:58:43.977756 2013098752 solver.cpp:464] Iteration 9000, lr = 0.001\n",
+      "I0318 00:58:44.003437 2013098752 solver.cpp:334] Snapshotting to examples/hdf5_classification/data/train_iter_10000.caffemodel\n",
+      "I0318 00:58:44.003702 2013098752 solver.cpp:342] Snapshotting solver state to examples/hdf5_classification/data/train_iter_10000.solverstate\n",
+      "I0318 00:58:44.003850 2013098752 solver.cpp:248] Iteration 10000, loss = 0.244639\n",
+      "I0318 00:58:44.003871 2013098752 solver.cpp:266] Iteration 10000, Testing net (#0)\n",
+      "I0318 00:58:44.008216 2013098752 solver.cpp:315]     Test net output #0: accuracy = 0.8308\n",
+      "I0318 00:58:44.008252 2013098752 solver.cpp:315]     Test net output #1: loss = 0.397291 (* 1 = 0.397291 loss)\n",
+      "I0318 00:58:44.008262 2013098752 solver.cpp:253] Optimization Done.\n",
+      "I0318 00:58:44.008270 2013098752 caffe.cpp:134] Optimization Done.\n"
+     ]
+    }
+   ],
+   "source": [
+    "!./build/tools/caffe train -solver examples/hdf5_classification/nonlinear_solver.prototxt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "# Clean up (comment this out if you want to examine the hdf5_classification/data directory).\n",
+    "shutil.rmtree(dirname)"
+   ]
+  }
+ ],
+ "metadata": {
+  "description": "Use Caffe as a generic SGD optimizer to train logistic regression on non-image HDF5 data.",
+  "example_name": "Off-the-shelf SGD for classification",
+  "include_in_docs": true,
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "python2"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.9"
+  },
+  "priority": 3
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/examples/03-fine-tuning.ipynb b/examples/03-fine-tuning.ipynb
new file mode 100644
index 0000000..cc90b16
--- /dev/null
+++ b/examples/03-fine-tuning.ipynb
@@ -0,0 +1,947 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Fine-tuning a Pretrained Network for Style Recognition\n",
+    "\n",
+    "In this example, we'll explore a common approach that is particularly useful in real-world applications: take a pre-trained Caffe network and fine-tune the parameters on your custom data.\n",
+    "\n",
+    "The upside of such approach is that, since pre-trained networks are learned on a large set of images, the intermediate layers capture the \"semantics\" of the general visual appearance. Think of it as a very powerful feature that you can treat as a black box. On top of that, only a few layers will be needed to obtain a very good performance of the data."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "First, we will need to prepare the data. This involves the following parts:\n",
+    "(1) Get the ImageNet ilsvrc pretrained model with the provided shell scripts.\n",
+    "(2) Download a subset of the overall Flickr style dataset for this demo.\n",
+    "(3) Compile the downloaded Flickr dataset into a database that Caffe can then consume."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "import os\n",
+    "os.chdir('..')\n",
+    "import sys\n",
+    "sys.path.insert(0, './python')\n",
+    "\n",
+    "import caffe\n",
+    "import numpy as np\n",
+    "from pylab import *\n",
+    "%matplotlib inline"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "# This downloads the ilsvrc auxiliary data (mean file, etc),\n",
+    "# and a subset of 2000 images for the style recognition task.\n",
+    "!data/ilsvrc12/get_ilsvrc_aux.sh\n",
+    "!scripts/download_model_binary.py models/bvlc_reference_caffenet\n",
+    "!python examples/finetune_flickr_style/assemble_data.py \\\n",
+    "    --workers=-1 --images=2000 --seed=1701 --label=5"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's show what is the difference between the fine-tuning network and the original caffe model."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "1c1\r\n",
+      "< name: \"CaffeNet\"\r\n",
+      "---\r\n",
+      "> name: \"FlickrStyleCaffeNet\"\r\n",
+      "4c4\r\n",
+      "<   type: \"Data\"\r\n",
+      "---\r\n",
+      ">   type: \"ImageData\"\r\n",
+      "15,26c15,19\r\n",
+      "< # mean pixel / channel-wise mean instead of mean image\r\n",
+      "< #  transform_param {\r\n",
+      "< #    crop_size: 227\r\n",
+      "< #    mean_value: 104\r\n",
+      "< #    mean_value: 117\r\n",
+      "< #    mean_value: 123\r\n",
+      "< #    mirror: true\r\n",
+      "< #  }\r\n",
+      "<   data_param {\r\n",
+      "<     source: \"examples/imagenet/ilsvrc12_train_lmdb\"\r\n",
+      "<     batch_size: 256\r\n",
+      "<     backend: LMDB\r\n",
+      "---\r\n",
+      ">   image_data_param {\r\n",
+      ">     source: \"data/flickr_style/train.txt\"\r\n",
+      ">     batch_size: 50\r\n",
+      ">     new_height: 256\r\n",
+      ">     new_width: 256\r\n",
+      "31c24\r\n",
+      "<   type: \"Data\"\r\n",
+      "---\r\n",
+      ">   type: \"ImageData\"\r\n",
+      "42,51c35,36\r\n",
+      "< # mean pixel / channel-wise mean instead of mean image\r\n",
+      "< #  transform_param {\r\n",
+      "< #    crop_size: 227\r\n",
+      "< #    mean_value: 104\r\n",
+      "< #    mean_value: 117\r\n",
+      "< #    mean_value: 123\r\n",
+      "< #    mirror: true\r\n",
+      "< #  }\r\n",
+      "<   data_param {\r\n",
+      "<     source: \"examples/imagenet/ilsvrc12_val_lmdb\"\r\n",
+      "---\r\n",
+      ">   image_data_param {\r\n",
+      ">     source: \"data/flickr_style/test.txt\"\r\n",
+      "53c38,39\r\n",
+      "<     backend: LMDB\r\n",
+      "---\r\n",
+      ">     new_height: 256\r\n",
+      ">     new_width: 256\r\n",
+      "323a310\r\n",
+      ">   # Note that lr_mult can be set to 0 to disable any fine-tuning of this, and any other, layer\r\n",
+      "360c347\r\n",
+      "<   name: \"fc8\"\r\n",
+      "---\r\n",
+      ">   name: \"fc8_flickr\"\r\n",
+      "363c350,351\r\n",
+      "<   top: \"fc8\"\r\n",
+      "---\r\n",
+      ">   top: \"fc8_flickr\"\r\n",
+      ">   # lr_mult is set to higher than for other layers, because this layer is starting from random while the others are already trained\r\n",
+      "365c353\r\n",
+      "<     lr_mult: 1\r\n",
+      "---\r\n",
+      ">     lr_mult: 10\r\n",
+      "369c357\r\n",
+      "<     lr_mult: 2\r\n",
+      "---\r\n",
+      ">     lr_mult: 20\r\n",
+      "373c361\r\n",
+      "<     num_output: 1000\r\n",
+      "---\r\n",
+      ">     num_output: 20\r\n",
+      "384a373,379\r\n",
+      ">   name: \"loss\"\r\n",
+      ">   type: \"SoftmaxWithLoss\"\r\n",
+      ">   bottom: \"fc8_flickr\"\r\n",
+      ">   bottom: \"label\"\r\n",
+      ">   top: \"loss\"\r\n",
+      "> }\r\n",
+      "> layer {\r\n",
+      "387c382\r\n",
+      "<   bottom: \"fc8\"\r\n",
+      "---\r\n",
+      ">   bottom: \"fc8_flickr\"\r\n",
+      "393,399d387\r\n",
+      "< }\r\n",
+      "< layer {\r\n",
+      "<   name: \"loss\"\r\n",
+      "<   type: \"SoftmaxWithLoss\"\r\n",
+      "<   bottom: \"fc8\"\r\n",
+      "<   bottom: \"label\"\r\n",
+      "<   top: \"loss\"\r\n"
+     ]
+    }
+   ],
+   "source": [
+    "!diff models/bvlc_reference_caffenet/train_val.prototxt models/finetune_flickr_style/train_val.prototxt"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "For your record, if you want to train the network in pure C++ tools, here is the command:\n",
+    "\n",
+    "<code>\n",
+    "build/tools/caffe train \\\n",
+    "    -solver models/finetune_flickr_style/solver.prototxt \\\n",
+    "    -weights models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel \\\n",
+    "    -gpu 0\n",
+    "</code>\n",
+    "\n",
+    "However, we will train using Python in this example."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "iter 0, finetune_loss=3.360094, scratch_loss=3.136188\n",
+      "iter 10, finetune_loss=2.672608, scratch_loss=9.736364\n",
+      "iter 20, finetune_loss=2.071996, scratch_loss=2.250404\n",
+      "iter 30, finetune_loss=1.758295, scratch_loss=2.049553\n",
+      "iter 40, finetune_loss=1.533391, scratch_loss=1.941318\n",
+      "iter 50, finetune_loss=1.561658, scratch_loss=1.839706\n",
+      "iter 60, finetune_loss=1.461696, scratch_loss=1.880035\n",
+      "iter 70, finetune_loss=1.267941, scratch_loss=1.719161\n",
+      "iter 80, finetune_loss=1.192778, scratch_loss=1.627453\n",
+      "iter 90, finetune_loss=1.541176, scratch_loss=1.822061\n",
+      "iter 100, finetune_loss=1.029039, scratch_loss=1.654087\n",
+      "iter 110, finetune_loss=1.138547, scratch_loss=1.735837\n",
+      "iter 120, finetune_loss=0.917412, scratch_loss=1.851918\n",
+      "iter 130, finetune_loss=0.971519, scratch_loss=1.801927\n",
+      "iter 140, finetune_loss=0.868252, scratch_loss=1.745545\n",
+      "iter 150, finetune_loss=0.790020, scratch_loss=1.844925\n",
+      "iter 160, finetune_loss=1.092668, scratch_loss=1.695591\n",
+      "iter 170, finetune_loss=1.055344, scratch_loss=1.661715\n",
+      "iter 180, finetune_loss=0.969769, scratch_loss=1.823639\n",
+      "iter 190, finetune_loss=0.780566, scratch_loss=1.820862\n",
+      "done\n"
+     ]
+    }
+   ],
+   "source": [
+    "niter = 200\n",
+    "# losses will also be stored in the log\n",
+    "train_loss = np.zeros(niter)\n",
+    "scratch_train_loss = np.zeros(niter)\n",
+    "\n",
+    "caffe.set_device(0)\n",
+    "caffe.set_mode_gpu()\n",
+    "# We create a solver that fine-tunes from a previously trained network.\n",
+    "solver = caffe.SGDSolver('models/finetune_flickr_style/solver.prototxt')\n",
+    "solver.net.copy_from('models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel')\n",
+    "# For reference, we also create a solver that does no finetuning.\n",
+    "scratch_solver = caffe.SGDSolver('models/finetune_flickr_style/solver.prototxt')\n",
+    "\n",
+    "# We run the solver for niter times, and record the training loss.\n",
+    "for it in range(niter):\n",
+    "    solver.step(1)  # SGD by Caffe\n",
+    "    scratch_solver.step(1)\n",
+    "    # store the train loss\n",
+    "    train_loss[it] = solver.net.blobs['loss'].data\n",
+    "    scratch_train_loss[it] = scratch_solver.net.blobs['loss'].data\n",
+    "    if it % 10 == 0:\n",
+    "        print 'iter %d, finetune_loss=%f, scratch_loss=%f' % (it, train_loss[it], scratch_train_loss[it])\n",
+    "print 'done'"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's look at the training loss produced by the two training procedures respectively."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {
+    "collapsed": false,
+    "scrolled": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "[<matplotlib.lines.Line2D at 0x7fbb36f0ad50>,\n",
+       " <matplotlib.lines.Line2D at 0x7fbb36f0afd0>]"
+      ]
+     },
+     "execution_count": 5,
+     "metadata": {},
+     "output_type": "execute_result"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAXUAAAEACAYAAABMEua6AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmcXFWd9/HPtzt7AlkkJCGAgbCIqCSyuIDaRECEYZvB\n",
+       "EQRFB5iMo8CjzuMwOlpdioo4IM4iM6wTgdHhgRFBRAhLM6gQtgQCIQQkYc8CJIEQQpb+PX+c01hp\n",
+       "eqmqrl5SfN+vV7266tZdzr11+3tPnXvuLUUEZmZWHxr6uwBmZlY7DnUzszriUDczqyMOdTOzOuJQ\n",
+       "NzOrIw51M7M6UlaoS2qUNFfS9fn1OEmzJS2SdLOkMb1bTDMzK0e5NfUzgAVAW6f2M4HZEbEbcGt+\n",
+       "bWZm/azbUJe0PXAYcDGgPPhIYFZ+Pgs4uldKZ2ZmFSmnpv5j4P8CrSXDJkTEsvx8GTCh1gUzM7PK\n",
+       "dRnqkv4MWB4Rc/lTLX0zke4z4HsNmJkNAIO6ef/DwJGSDgOGAVtLuhxYJmliRCyVNAlY3tHEkhz2\n",
+       "ZmZViIgOK9LdUbk39JL0MeDvIuIISecAL0XEDyWdCYyJiLecLJUU1RbMNiepOSKa+7sc9cLbs7a8\n",
+       "PWurJ9lZaT/1tiPA2cDBkhYBM/JrMzPrZ901v7wpIu4A7sjPXwYO6q1CmZlZdXxF6Zajpb8LUGda\n",
+       "+rsAdaalvwtgSdlt6lXN3G3qZmYV68s2dTMzG8Ac6mZmdcShbmZWRxzqZmZ1xKFuZlZHHOpmZnXE\n",
+       "oW5mVkcc6mZmdcShbmZWRxzqZmZ1xKFuZlZHHOpmZnXEoW5mVkcc6mZmdaTPQ11FSUUd1tfLNTN7\n",
+       "O+iPmvo44HoV5fusm5nVWH+FegMwqh+WbWZW17oNdUnDJM2RNE/SAkk/yMObJT0raW5+HFrmMse2\n",
+       "+2tmZjXS7Q9PR8Q6SQdGxFpJg4DfSToACOC8iDivwmW2hfkY4OkKpzUzsy6U1fwSEWvz0yFAI7Ay\n",
+       "v66mXXxc/jumimnNzKwLZYW6pAZJ84BlwO0R8Uh+6zRJD0q6RFK5Ie3mFzOzXlJuTb01IqYB2wMf\n",
+       "ldQEXADsBEwDXgDOLXOZpc0vZmZWQ922qZeKiNWSbgD2iYiWtuGSLgau72gaSc0lL1toZhypPd41\n",
+       "dTMzIFeUm2oxr25DXdI2wMaIWCVpOHAwUJQ0MSKW5tGOAeZ3NH1ENG82v6I+BzyHa+pmZgDkSnJL\n",
+       "22tJhWrnVU5NfRIwS1IDqbnm8oi4VdLPJE0j1boXAzPLXObYPL5r6mZmNVZOl8b5wPs7GP65Kpc5\n",
+       "DngS19TNzGquP64oHUsKddfUzcxqrL9uE+CauplZL+jPmrpD3cysxvo01FXUUGAwqfeLm1/MzGqs\n",
+       "r2vqY4FVpNsMuKZuZlZj/RHqLwOvAsNV1OA+Xr6ZWV3rj1BfGYUIYDUwuo+Xb2ZW1/o61Mfxpzs8\n",
+       "rsTt6mZmNdVfzS+Q2tbdrm5mVkP90vySn6/CNXUzs5rq7+YX19TNzGrIzS9mZnWkP5tffKLUzKzG\n",
+       "+rtN3TV1M7Ma6utQHw68np+7pm5mVmN9HeqDgI35+avA1n28fDOzutbXoT4Y2JCfvw4M6+Plm5nV\n",
+       "tf4O9eF9vHwzs7rmUDczqyNdhrqkYZLmSJonaYGkH+Th4yTNlrRI0s2Syu3FUhrq63Com5nVVJeh\n",
+       "HhHrgAMjYhrwPuBASQcAZwKzI2I34Nb8uhylJ0rdpm5mVmPdNr9ExNr8dAjQSOqKeCQwKw+fBRxd\n",
+       "5vLc/GJm1ou6DXVJDZLmAcuA2yPiEWBCRCzLoywDJpS5PIe6mVkvGtTdCBHRCkyTNBq4SdKB7d4P\n",
+       "SdHZ9JKa33zxWUYy1aFuZlZKUhPQVIt5dRvqbSJitaQbgL2BZZImRsRSSZOA5V1M19z2XEV9GdfU\n",
+       "zcw2ExEtQEvba0mFaufVXe+Xbdp6tkgaDhwMzAWuA07Ko50EXFvm8kpPlK4j/U6pKi20mZl1rLs2\n",
+       "9UnAbblNfQ5wfUTcCpwNHCxpETAjvy7Hm23qUYiNwKY8zMzMaqDL5peImA+8v4PhLwMHVbG80hOl\n",
+       "8KcmmPVVzMvMzNrpsytKczNLZ6FuZmY10Je3CWgEWqMQrSXDHOpmZjXUl6FeepK0jUPdzKyG+jLU\n",
+       "2ze9QOoB41sFmJnVSH+HumvqZmY15FA3M6sjDnUzszriE6VmZnVkINTUfaLUzKxG+jvU/etHZmY1\n",
+       "1N+h7uYXM7Ma6us2dYe6mVkv6uuauk+Umpn1ooHQ/OITpWZmNTIQQt01dTOzGunvUHfvFzOzGvLF\n",
+       "R2ZmdaS/a+oOdTOzGnKom5nVkW5DXdIOkm6X9IikhyWdnoc3S3pW0tz8OLSbWbn3i5lZL+vyh6ez\n",
+       "DcBXImKepFHA/ZJmAwGcFxHnlbks19TNzHpZt6EeEUuBpfn5GkmPApPz26pwWe1PlLr3i5lZDVXU\n",
+       "pi5pCjAduDsPOk3Sg5IukTSmm8ldUzcz62XlNL8AkJtergbOyDX2C4Dv5Le/C5wLnNzBdM0A7MZ+\n",
+       "7Mkb7d52qJvZ256kJqCpFvMqK9QlDQauAa6IiGsBImJ5yfsXA9d3NG1ENAOoqK8CO7R72ydKzext\n",
+       "LyJagJa215IK1c6rnN4vAi4BFkTE+SXDJ5WMdgwwv5tZufnFzKyXlVNT3x84EXhI0tw87BvA8ZKm\n",
+       "kXrBLAZmlrGsDk+UqihFIaL8YpuZWUfK6f3yOzqu0d9Y4bLeUlOPQmxUUa35vfUVzs/MzNrp7ytK\n",
+       "wU0wZmY1M1BC3SdLzcxqYKCEumvqZmY10N+33gWHuplZzbimbmZWRwZCqPv+L2ZmNTIQQt0nSs3M\n",
+       "amSghPqIPiyHmVndGggnSl/DoW5mVhMDoab+GjCyD8thZla3HOpmZnXEoW5mVkcc6mZmdWSgnCh1\n",
+       "qJuZ1YBr6mZmdcShbmZWRwZKqI/qw3KYmdWtgRLqrqmbmdWAT5SamdWRbkNd0g6Sbpf0iKSHJZ2e\n",
+       "h4+TNFvSIkk3SxrTzaxcUzcz62Xl1NQ3AF+JiD2BDwJfkrQHcCYwOyJ2A27Nr7viUDcz62XdhnpE\n",
+       "LI2Iefn5GuBRYDJwJDArjzYLOLqbWTnUzcx6WUVt6pKmANOBOcCEiFiW31oGTOhmcoe6mVkvG1Tu\n",
+       "iJJGAdcAZ0TEq5LefC8iQlJ0Ml0zAB9lBA/zQQr8pt0orwEjVZSiEB3Ow8ysnklqAppqMq8oI0cl\n",
+       "DQZ+DdwYEefnYQuBpohYKmkScHtEvKvddBERAlBR64CxUYjX3zL/ot4ARkch1vV4jczMtnCl2Vmp\n",
+       "cnq/CLgEWNAW6Nl1wEn5+UnAtd3MqrPmF3ATjJlZTZTTpr4/cCJwoKS5+XEocDZwsKRFwIz8ukMq\n",
+       "qiEva1MnozjUzcxqoNs29Yj4HZ2H/0FlLmcwsKGLNnOHuplZDfTVFaWdXU3axqFuZlYDfRXqXbWn\n",
+       "g0PdzKwmHOpmZnXEoW5mVkf6sk29u1D3PdXNzHqoL2vqPlFqZtbL3PxiZlZHHOpmZnXEoW5mVkcG\n",
+       "0olSh7qZWQ8NlBOla3Com5n1mJtfzMzqiEPdzKyOONTNzOqIT5SamdWRgXKi1KFuZlYDbn4xM6sj\n",
+       "DnUzszoyUEJ9DTBKRVX169lmZpZ0G+qSLpW0TNL8kmHNkp5t90PUXeky1KMQG0i19THlFtzMzN6q\n",
+       "nJr6ZUD70A7gvIiYnh+/7WYeQ4E3uhlnObBtGeUxM7NOdBvqEXEnsLKDtyppKikn1FcA4yuYp5mZ\n",
+       "tdOTNvXTJD0o6RJJ3TWblFtTd6ibmfXAoCqnuwD4Tn7+XeBc4OSORpTUzJ4cSNCqZjVFREsn81xB\n",
+       "bn5RUX8OzItCPFll+czMthiSmoCmWsyrqlCPiOUlhbkYuL6LcZtV1AjgpSh0GuiweU39NFJbvkPd\n",
+       "zOperuy2tL2WVKh2XlU1v0iaVPLyGGB+Z+Nm5bapt50o3REYXU3ZzMzezrqtqUv6OfAxYBtJzwAF\n",
+       "oEnSNFIvmMXAzG5mU26b+gdUVAOwAw51M7OKdRvqEXF8B4MvrXA5ldTUtyX1a9+6wmWYmb3t9dUV\n",
+       "pZX0ftkxv3ZN3cysQgMp1Ntq6juQmnUc6mZmFRpIof4i8A7gnaReLw51M7MKDZhQj0KsJ93Y632k\n",
+       "3jQOdTOzCvVlqK8vY7zlwN7AwzjUzcwqNmBq6tkK4N24pm5mVpWBFurLSWVyTd3MrAoDLdRXAJuA\n",
+       "x4ERKqqxV0tlZlZnBlqoLweeyz+a8Sq+AMnMrCJ9FepDKL+m/kx+vho3wZiZVaTaW+9Wqtya+sPA\n",
+       "dvm5Q93MrEIDKtSjELcBt+WXDnUzswoNtDb1Ug51M7MK9XqoqyiR2tTLufiolEPdzKxCfVFTHwJs\n",
+       "iEK0VjidQ93MrEJ9EerVNL2AQ93MrGIOdTOzOuJQNzOrIw51M7M60m2oS7pU0jJJ80uGjZM0W9Ii\n",
+       "STdLGtPFLBzqZmZ9pJya+mXAoe2GnQnMjojdgFvz68441M3M+ki3oR4RdwIr2w0+EpiVn88Cju5i\n",
+       "Fg51M7M+Um2b+oSIWJafLwMmdDGuQ93MrI/0+N4vERGSotMRLuIUJjNZzWoGWiKipcxZO9TN7G1B\n",
+       "UhPQVIt5VRvqyyRNjIilkiaR7oPesVO5Chgbc6K5wmW8CoxUUQ1VXI1qZrbFyJXdlrbXkgrVzqva\n",
+       "5pfrgJPy85OAa7sYt6rmlxzkrwMjKi6dmdnbVDldGn8O/AHYXdIzkr4AnA0cLGkRMCO/7sxQKr+Z\n",
+       "V5vXgJFVTmtm9rbTbfNLRBzfyVsHlbmMak+UgkPdzKwiA/mKUkihPqqGZTEzq2sDPdTX4Jq6mVnZ\n",
+       "Bnqou6ZuZlaBLSHUXVM3MyvTQA91N7+YmVWgr37Ozs0vZmZ9YKDX1N38YmZWgYEe6m5+MTOrwEAP\n",
+       "dTe/mJlVYEsIddfUzczKNNBD3c0vZmYVGOih7uYXM7MKbAmh7pq6mVmZBnqou/nFzKwCAz3U3fxi\n",
+       "ZlaBLSHUXVM3MyvTQA91N7+YmVVgoIe6m1/MzCrQ7c/ZdUXSEuAVYBOwISL262A0N7+YmfWRntbU\n",
+       "A2iKiOmdBDr07Ien1wNSUUOqnN7M7G2lFs0v6ub9qmvqUYjAtXUzs7LVoqZ+i6T7JJ3ayTg9aX4B\n",
+       "h7qZWdl61KYO7B8RL0gaD8yWtDAi7uxgGdU2v4BD3cysbD0K9Yh4If9dIemXwH7A5qF+O5u4g4Ka\n",
+       "BdASES0VLmYN7gFjZnVMUhPQVIt5VR3qkkYAjRHxqqSRwCFA8S0jHshr0RLNVZfQNXUzq3O5stvS\n",
+       "9lpSodp59aSmPgH4paS2+VwZETd3MN6iHiwDHOpmZmWrOtQjYjEwrYxRH6h2GZmbX8zMytQXV5T2\n",
+       "NNRdUzczK5ND3cysjvRFqM/v4fRufjEzK1Ovh3oUYl0PZ+GauplZmfqipt5TvlOjmVmZtoRQ9z3V\n",
+       "zczKtCWE+oBsflFRE1TUNSrqnf1dFjOzNj2990u3JBRB9GAWC4FzVNRuwMvA/yHdv/3F/JgETAa+\n",
+       "G4VY1UkZPgCsiODJkmHvAv4S2Bk4I4LV5RZIRe0K3AS0AscDZ1exXmZmNdcXNfV3VTORxGCJM2mO\n",
+       "B4F/BG4BHgS2YcUeE3lyxgnAUcBOpFC/RkXtrKJuU1FfLJmPoPViaP2HkmHbALOBscAuwKfKLldR\n",
+       "Q4FrgPOAmcBfVLN+Zma9QRE9qUR3M3MpIL4UwU8rn5avAz8AvhnB2SrqBOC5KESLxC+BI4G9I5in\n",
+       "ohqBXwKH0Np4Adp0ImKPKMSLmvjggRwx8yZGLW1k62fORq1bsfCoo3j4+Dv51HGfpTmOJtXUm8oq\n",
+       "V1HfB/YA/hxoBF4A9olCPFXpOpqZdURSRER3v1XR8bR9EOpXR6SasMT2wPKIjm/FK3EE8FfAfwBX\n",
+       "AMcBvwD2iWBJHmcn4F7gR0BTBJ8EUFHDeeITe3HFby/kswe/xtRbHgLOZ+let7B+5HPc+v3RHPbl\n",
+       "+3lptwZe3GN/PvK99YhNbBr8W/51wYms3GVaBE93uT5FTQduBPaKQizLwy4BlgA7knrpfC0K8Xx+\n",
+       "7wxgqyjEWdVuQzN7+xnoob4a+DkwGjiaFMhHR7By83HZClgAXA18HvinCL4n8Q3go8BhEbRK/BPp\n",
+       "xzm+mcf/TgQ/k5gA3ADMZ8SKI/m7iauBVh44ZXvu+urOvLT7ycBU4FDgcJo1F9gLOJb1I7/Gsvc+\n",
+       "wg53XwT8KgrplsIAKmpYXt564A7g8ijERSXvHw78GrgMeI7UJPMDYAXwfdIvQ50ShbipNlu1exKT\n",
+       "gfdFcGNfLdPMamegh/puwDF50H8AzcAnga+Sgu8HpJqugMYIPi8xHFgXQUgMJt2j/RekNvWrSc0u\n",
+       "SySmA/8NPAW8H/jXPP//BJblYX+MYKbEnsDDwIURzNysnO+67iDGPnkDk+fALjdt5KVdj46L5sxW\n",
+       "UVNIgT0Y+K+8HntHITa9OW1q+tk3CnF3fv1u4BzgY8ABpHb7/wL+Igpxlybf+3l2/N0pTJu1inc8\n",
+       "di6D17UAXyEdYH4KPApsjEKs7XS7FrUDK/YYy3//zzHMnP4rBq9rJF25Owh4N7cXz2fYyg8y/dK7\n",
+       "GPbKH/J2D+D1KMTSTub5UeCHwJejEPe3e0/AoCjEhpJ1PhB4D+kguLizspZLQnz0rP2Y8a1v5vX4\n",
+       "HOl8x4nAT6IQj1c0v6JGAu8D5kQhWkvWY0oUYnFehwuApcD3oxDrVNRY4N9I+9k5+ecUa0JFDYlC\n",
+       "rFdRDcAMYCvg91GI5R2MOwKYGIV4sv17ZS5rIrANqSK1FelOqYtruT4ly2ok7Vt7Ax8G7qFkm5eM\n",
+       "N4r0zfuxKLzlh3TKXZaA7YAVUYie/PDOgKOitibly9woxEMDOtQ7KpjEMcBZwDtIJ0F3JNXiD4rg\n",
+       "rTu52BmYQ+r1cmIEt5S8N4oUADdE8FQetiup18wlwBcj2JROmHI28KMIXuxgGQ3ABD7wk3+nqXgE\n",
+       "oacZ/vIwFh1xAxuHPcee/28m8Gma43lSCG9PCtLrgDnte/ioqBFtwZzPB5xFawNsGPlOVr1zEU8c\n",
+       "Opq9L9qaoasXIjYC19Da+EU2Dd4ONJhNg29h2CtXk/rpfwI4CPgNsBH4S9Zs20jj+tGodQnDXllD\n",
+       "MBUQm4YuZuHRU2ltvJ0nPjGFo7/wWxpaTyR1Dd0qz2NJ3uZzgeeBg4HDgYuAU4EZUYhH80Htb4Bj\n",
+       "Sbdavoj0jeVE0kHzQeAI4HHgKtLPFo7Pj02kwFwKvMwrk0ewZsJTbPfA3cA+wKlsGvRJkGjc8D+8\n",
+       "MO0jbP3sNF7a7Up2/MNzwEkEwaoptzJ2yaHAY8CupIP/WmAI6acSh5K+sd1FOsexNelAegCwEniW\n",
+       "tK8tBZoJDkdcCmwghf4KYF/g7vz3xly+52ltPJ+NwwYx5LVv5uX9nhRgytttEzAceIRUsdid9LsC\n",
+       "B+Vt8iCpQrAP8CHgJWAdsCqX50N5fovzZ/IaMA7Yn9Sz6sK83n+fP/fHSQH9ev48JpCC+2Xgj8BD\n",
+       "wGdI4fpCXs5rwHvzNJeSvs2uJR2UZ+Tt81iefi/SDzWszWV9iVQR+iNwAjA9l2E46YC7Sy6vSP9v\n",
+       "vwc+SOqRdhepI8ZUkvGkytk+wM15uncABeAPwLtJ33IPyp/bE6QDxOWk/4Ef5XXbSOrivIB0X6nf\n",
+       "ATdGIVbkA8cHgHeSKgb3k/bxrUn7/gZgWRQiVNSeuQx/yON+lLSv75rXcW0uf0Meb5f8ea8g9bp7\n",
+       "mNREPBX4OCkHXif9psRo4Mn8ua4BJuay3JTX91DSPnMTcBvwJdL/VAtwVhTi3i0u1NN7NJBq5hvK\n",
+       "mxf7AksjeKbM8d8FPFZNd0ptP+dAGjZcyIaR27J0+nxgMrTeDw3jSTvfBaSdZTqp9jGS9I81K4J/\n",
+       "6qAsUxmzeCPjH/kui2dsig0jviAxmlEv3M0BP1zAwiNPZ8mMGcC5wH8xcukrvOcXn2e/nz7CS7vt\n",
+       "z/N7r2T5e2Zx7HHraGgdyYX3LOT5fb8D/AvpH/NTDHl1HpuGrGHT0KdJtf1/JPXweR6YGcFaFTUa\n",
+       "OJm0gz9Hql1tB/wvcGUUYqmK+hzBT9kwYh2D3mikYdOlpJ33ReA0UthcHoV4BEBFDSYF2dH5vRX5\n",
+       "0UDamScRGseSj32YbR4bxqgX1iBeZ+24y5h12+dR61g++ON5PH3ANF7Z4Uye+OR3gD+jWQ1cOGcn\n",
+       "nt/vcva5YAZ/9reNwELmf3oIzxywJ4ed9gDpILKRdAvoffPrV/LjrlzmU0ndTiexascWLrzvM5wx\n",
+       "9U6Gvro98JEoxCoVtRewJ6kGODs3uf0dr044lQ2jtmfR4b9g2qwrGLZ6b9I/Nnm7iXSQ2wvYgRS4\n",
+       "d5IODFPzPN8AFnDx75/g8zPWMeiNMcBDOVjE7c27snTad5l8z3p2v+4BJjy8hBRUyvvZGFJQrAZ2\n",
+       "y49hpIPCsjx8LKmXWVtgXhSFePN3gXMNdzrw16TAH0lqBv0N6cCwOynMHsvTDyIF7vg83R6kHl8t\n",
+       "pN5ma0mh+0T+rCn9FqCitiOF+yZSQLYCq/L+NQ44PW+rVuB7pN5rTwFXkr6Jj8xlaiJ1SFhJqtR9\n",
+       "Nc+j7VvY3qSD0yHAiPxZ3J+XGXldd86fwWpSBWAT6UC2HbCcVLlpqxhckrfL1DysNY+/Oq9rI+kb\n",
+       "0HjSgfcE0sF4NqkX3Bjg26QD3E552Vvlz+lgUvhfTPo2uDupZn4I6cD1gyjEije3YQ9CnYjotUea\n",
+       "fe/Nv3fLHkMg3pWfj4T4GsQREEPbjSeInSD2hVgCcTzElyHuyc+/CvEixIr8mFAy7TshbodYBfEQ\n",
+       "xLSSeV4BsQziaIhPQNwNcRnEcXk+H4QYnp//HOIqiFMgHoPYtqTcV0A8AXEfxG8gppYsfyTEdvn5\n",
+       "cIgTIH6GNqxg/MP3MHTVcogP5GWdADEKohHi/XndvghxEMQOEFtD/Divx/g8z10gxkMUIG6BTQt4\n",
+       "7xUzmX7RKIjfQpwPsSfEXRCfyNN8CmIBxGCI2RB/gLgmfx7/lrfVaogZefxdS7dpB5/joRDn5G36\n",
+       "nxDPw6bzaKaxm89/XN7+n4K4KZej0+XkaQZD/FX+7K8oWac9IDZAHN7BNN/J8/9ZXobyY5v+/h8o\n",
+       "8/+ky+3Y7fTNNNBMQxfvb08zh9CcKqCdjCOaaezoM6WZEaXT0swUmvk4zQzOr7ejmZHt1mkiRKdl\n",
+       "6qIMnZYxjzO8/O1KVL1Ne/cDr75gW+IDYi+INTmkPp3/ue+CmJL/UYd0Mp0g1G7YIIhRJa9HQtwM\n",
+       "8Vxb+Ofh5+RgH9/FvA+A2I90YHoRogXi3lzWl3IQr4C4EWImxJQ87eEQayEezQeEl/PjEYj/hriI\n",
+       "dFB6AWJTDs1z8zp/Ly9rVQ7H7SAOgXga4kmIX0AM6qS8N0P8ewrgGJ3X+W6IayG2IR3kni5Zn1UQ\n",
+       "d0L8EOLb+SBwIMTQvKxnIM7O6/revA5j8nb5YV63n0Ic2/bPDPHPED/Nzxsgirk8V5EOUvtCfAHi\n",
+       "DojLIS6AWApxK8RhEH+Txz8K4pa8nW7J87sA4jyIYXnb7J6XMZd0EPkuxDqIY8rc746DOKHCfXUk\n",
+       "xF4dDG/MZbsVYlweNikPuxri/XnYqLwPzMlln0SqUOxSMq/d87bZuQ//B3eFmNxu2BCIbSGGtxve\n",
+       "kPfTr5EPohBTIVZCzOxk/g2U/K/mef8NFR4Eul8Pouppe3cDV1+wLfUBMZke1l66mPcgiK3bDdsa\n",
+       "Yo8K5rFjDrwP53/sxvx8aifjD2/biUk1mEmdjDcs/xXELIhfQ0zI/wRDS8Y7k1zL7qKMe0JshPh2\n",
+       "fn1cDtlBJeP8mHRw2SWH46EQ38rhfTrEcogLIa4r+Uf9Xp72CojHSYH/7Ry8p5O+zdwDcT/p2802\n",
+       "7cq1F+kbyo9I3yZuytOeDPF1iF3bjb8v6VvFgxAjcsh/E+KPEIty2W4oGX8GxCsQC0kHrudJB86/\n",
+       "Jn0bEsT0HEQPQPwLxDdIB60XyQfjTrZpI0QT6eD0q1yul/J6j4H4D1JI3wJxG8RP8vb9Vd5250Oc\n",
+       "kcu0GOJZiItJB9vPkb5FPg5xbV7eyblMl0E8BfEZiOtJlZChpG81Y/K478jLPpN08O+yxtvJ+o3P\n",
+       "+9wruUwNpArBhXk9l0O8Tvpm1LY//wjid6RvSS+TDqYPQlyZP9+2b02D8vgHQrwG0Zo/+6F5ewbE\n",
+       "sbX9Xyeqnbbf2tTNuiJxJPC/EXR26wcBDRFvtnG3f/84Ui+o90WwSGIq8HwEr+cT74cAl0XwRsk0\n",
+       "DaR2zleBWyJo7WDWla7Hh4GVETyau+eeReoZtYp0cvao2PzE/1eAqyN4RmLHXJ69gcNI5w9eI7U7\n",
+       "30g6gf4B0rUdx5PaoBfm13eTTnBOIJ24nAo8A/yWdAL3VtJ1FXeQTgJfQzoPMQa4KIL1Ep8lte3/\n",
+       "KvJtNHJvtB1JJx/nkdrOr+dP5xbuJp1v2gs4IoKFEqeQzuVcmtfjfaSTj5DOF3yGdNJzcF7fBtKJ\n",
+       "xPv4Uxv5aNJndiypff3XpLb4e0lXm/8KeBr4el63y0jt8UuBb+XtuS3phOZrpPbyHYEPRfBy3tZn\n",
+       "kT77L+d1+wZwUl7Hc4Bvkc6htfXGG53X5cy83Pfksm+VH/vk9yeROlXcBJwCbAucGsEGiUl5mw/K\n",
+       "j2cieLHfTpRKOhQ4n3QC4eKI+GG79x3q1m8ktorg1f4uRxuJrYGPRXB9fj2ms4NWB9MOJvXq+GPE\n",
+       "W0/+SwwhncicT+rltRcp9F4gBdtTETzXwXQ7ATtHcGtVK5Xm8RPg9xFcJXEYcAapl9qKDsYVqWdS\n",
+       "W1BfQgrQ09rWS+IdpB47++THzqSD4L2kLsz7kw6Mz5JCdj7p5OWH88Ho/aTgv510fcvGkuWPIN0W\n",
+       "5GXgruigJ1we7wukLsa3kTLuLODfIvhZfn846UB4GekgewfphOqHSMG+jnSgeoC0/T9C6vVyZV6f\n",
+       "l/Pwk0knUjfmR3ME1/ZLqEtqJJ0tP4jUk+Je4PiIeLRkHId6jUhqioiW/i5HvfD2rK3+2p75osNz\n",
+       "gO9FsKhk+BHAneUeNDuY7zBSL51/jqDbH/rJ18wcC1wcQZfXbeQDy/Wk3jendXRg6Ul29uQujfsB\n",
+       "T0TEklz7beZeAAADf0lEQVSIX5BusPVoVxNZ1ZpINTGrjSa8PWupiX7YnhEsIzWRtB9+fQ/nu450\n",
+       "sCh3/LmkZqdyxl1L6t7YK3pyl8bJsFmf8WfzMDMz6yc9CfXeO8NqZmZV6Unzy3Okq+ja7ECqrW8m\n",
+       "3f/FakFSob/LUE+8PWvL23Ng6MmJ0kGkE6UfJ12Kfg/tTpSamVnfqrqmHhEbJX2Z1PeyEbjEgW5m\n",
+       "1r969eIjMzPrW73yG6WSDpW0UNLjkv6+N5ZR7yQtkfSQpLmS7snDxkmaLWmRpJsljenvcg5Uki6V\n",
+       "tEzS/JJhnW4/Sf+Q99eFkg7pn1IPTJ1sy2ZJz+b9c66kT5a8523ZBUk7SLpd0iOSHpZ0eh5em/2z\n",
+       "lvcryLX+RtJtKqeQLvudB5R9bxI/3tyOi4Fx7YadA3w9P/974Oz+LudAfZCu4JsOzO9u+5Fupzwv\n",
+       "769T8v5b0xs0bcmPTrZlAfhqB+N6W3a/PScC+Y6sjCKdm9yjVvtnb9TU37woKSI2kO6RcFQvLOft\n",
+       "oP0VZUcCs/LzWaR7mFsHIuJO2PwnE+l8+x0F/DwiNkS6mO4J0n5sdLot4a37J3hbdisilkbEvPx8\n",
+       "DemCzcnUaP/sjVD3RUm1EcAtku6TdGoeNiEi/eA16X4RE/qnaFuszrbfdmzeHdf7bHlOk/SgpEtK\n",
+       "mgq8LSsgaQrpW9AcarR/9kao+8xrbewfEdNJv+f6JUkfKX0z0vcyb+sqlbH9vG27dgHp132mkW4a\n",
+       "dm4X43pbdkDSKNJNwc6IiM1uPNeT/bM3Qr2si5KsaxHxQv67Avgl6evWMkkTASRNgrf+nqt1qbPt\n",
+       "136f3T4Ps05ExPLISD/R1tYc4G1ZBkmDSYF+eURcmwfXZP/sjVC/D9hV0hRJQ4BPk+5hbGWSNELS\n",
+       "Vvn5SNJ9pNt+5Lrt5kUnAdd2PAfrRGfb7zrgOElDJO1E+r3Oe/qhfFuMHDptjiHtn+Bt2S1JIt1y\n",
+       "eEFEnF/yVk32z57cJqBD4YuSamEC8Mv02TMIuDIibpZ0H3CVpJNJP3j7l/1XxIFN0s9J99zeRtIz\n",
+       "pB8EPpsOtl9ELJB0FenHhzcCf5troEaH27IANEmaRmoGWAzMBG/LMu0PnAg8JKntzo7/QI32T198\n",
+       "ZGZWR3rl4iMzM+sfDnUzszriUDczqyMOdTOzOuJQNzOrIw51M7M64lA3M6sjDnUzszry/wFBsEB8\n",
+       "UlvRigAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7fbb37f20990>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "plot(np.vstack([train_loss, scratch_train_loss]).T)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Notice how the fine-tuning procedure produces a more smooth loss function change, and ends up at a better loss. A closer look at small values, clipping to avoid showing too large loss during training:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "[<matplotlib.lines.Line2D at 0x7fbb347a8310>,\n",
+       " <matplotlib.lines.Line2D at 0x7fbb347a8590>]"
+      ]
+     },
+     "execution_count": 6,
+     "metadata": {},
+     "output_type": "execute_result"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAXgAAAEACAYAAAC57G0KAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsnXeYHNWVt98jgXIY5ZyQMNlIJJMMwhhssI0Dxsbr8Dms\n",
+       "zTpne9e73qa9tnFYrzMYe53WOeyuFzA4YBAYTEYiCQQCCSRAaZQTEtL5/jj3TlXXVHdX9/SMZsR5\n",
+       "n2ee6a6uqq5Ov3vu7557rqgqjuM4zv5Hv319AY7jOE734ALvOI6zn+IC7ziOs5/iAu84jrOf4gLv\n",
+       "OI6zn+IC7ziOs59SSOBFpL+ILBSRK6s8/g0ReURE7hGRea29RMdxHKcZikbwHwQWA52S5kXkXGCO\n",
+       "qh4MvAu4rHWX5ziO4zRLXYEXkanAucB/ApKzy3nAjwFU9TagTUQmtPIiHcdxnMYpEsF/Ffg4sLfK\n",
+       "41OAFan7K4GpXbwux3Ecp4vUFHgReTmwRlUXkh+9d+yaue/1DxzHcfYxB9R5/GTgvOCzDwJGiMh/\n",
+       "qepbUvs8CUxL3Z8atlUgIi76juM4TaCqtQLsqkjRYmMicjrwMVV9RWb7ucD7VPVcETkR+Jqqnphz\n",
+       "vHIxy4EXaUmXNXyhZbkK+I6W9KpGj90fEZGLVfXifX0d+wP+XrYWfz9bi4hoswJfL4LPouEJLwJQ\n",
+       "1ctV9WoROVdElgLbgLfVOH4QsLOZCwWeAQY2eazjOM5zjsICr6o3ADeE25dnHntfwdN0ReB3AQOa\n",
+       "PNZxHOc5R0/PZPUIvnUs2NcXsB+xYF9fwH7Ggn19AY7R0wI/EBPqZvAIPoWqLtjX17C/4O9la/H3\n",
+       "s/fQ0wL/rJa0Wj59PTyCdxzHaYCeFvhm7RnwCN5xHKch+prAewTvOI5TkL4k8M/gEbzjOE5helrg\n",
+       "mx1gBY/gHcdxGsIjeMdxnP2UviTwPsjqOI7TAH1J4D1N0nEcpwH6mgfvEbzjOE5BPIJ3HMfZT+lL\n",
+       "Au8RvOM4TgP0NYH3CN5xHKcgfUngPU3ScRynAfraIKtH8I7jOAXxCN5xHGc/pS8JvEfwjuM4DdCX\n",
+       "BN4jeMdxnAboSwLvaZKO4zgN0JcGWX2ik+M4TgN4BO84jrOf0tcE3iN4x3GcgtQVeBEZJCK3icgi\n",
+       "EVksIpfk7DNfRDaJyMLw9y9VTueDrI7jOD3EAfV2UNWdInKGqm4XkQOAm0TkVFW9KbPrDap6Xp3T\n",
+       "+UQnx3GcHqKQRaOq28PNAUB/YH3OblLgVF2J4HcDB0hZetpWchzH6ZMUEksR6Scii4DVwPWqujiz\n",
+       "iwIni8g9InK1iBxe5VRNC7yWVPGBVsdxnMLUtWgAVHUvMFdERgJ/FJH5qrogtcvdwLRg45wD/A54\n",
+       "XqcTXcqb5GI5I9xbkDlHEaIP35WegOM4Tq9FROYD81tyLlVt9Mk/DexQ1X+vsc8y4FhVXZ/aplzM\n",
+       "XC3pPU1fbFnWAYdpSdc2ew7HcZy+hIioqhaxwDtRJItmrIi0hduDgbOAhZl9JoiIhNsnYA1Hnk/f\n",
+       "lUHWeLxbNI7jOAUoYtFMAn4sIv2wBuEnqvoXEbkIQFUvB14LvFtEngW2AxdWOVdXrRX34B3HcQpS\n",
+       "JE3yPuCYnO2Xp25/G/h2gedrhcB7qqTjOE4B+tJMVnCLxnEcpzB9TeA9gnccxylITwv8ri4e7xG8\n",
+       "4zhOQXpU4LWke7t4Co/gHcdxCtLXpv17BO84jlOQvibwHsE7juMUpK8JvEfwjuM4BelrAu8RvOM4\n",
+       "TkH6osB7BO84jlOAvibwvvC24zhOQfqawHsE7ziOU5C+JvA+yOo4jlOQvibwPsjqOI5TkL4m8B7B\n",
+       "O47jFKSvCbxH8I7jOAXpawLvEbzjOE5B+prAewTvOI5TkL4o8B7BO47jFKCvCbxPdHIcxylIXxN4\n",
+       "j+Adx3EK0tcE3iN4x3GcgvQ1gfcI3nEcpyB9TeB7bZqklGWelGXWvr4Ox3GcSE2BF5FBInKbiCwS\n",
+       "kcUickmV/b4hIo+IyD0iMq97LhXo3WmSHwReva8vwnEcJ1JT4FV1J3CGqs4Fng+cISKnpvcRkXOB\n",
+       "Oap6MPAu4LLuulh6cQQPTKb3Nj6O4zwHqWvRqOr2cHMA0B9Yn9nlPODHYd/bgDYRmdDKi0zRmyP4\n",
+       "SfTea3Mc5zlIXYEXkX4isghYDVyvqoszu0wBVqTurwSm5p+ry56/R/CO4zgFOaDeDqq6F5grIiOB\n",
+       "P4rIfFVdkNlNsofln21gWWTXnnBnQc556tErI3gpy0BgNL3w2hzH6VuIyHxgfivOVVfgI6q6SUR+\n",
+       "DxwHLEg99CQwLXV/atiWwzNfVGVrw1eZ0FvTJCeF/4P26VU4jtPnCYHvgnhfRErNnqteFs1YEWkL\n",
+       "twcDZwELM7tdAbwl7HMisFFVV1c5ZVfFubdOdJoc/vfGa3Mc5zlKPU98EnBd8OBvA65U1b+IyEUi\n",
+       "chGAql4NPCYiS4HLgffUOF9XBX4LMELKkrWE9jUxgneBdxyn11DTolHV+4BjcrZfnrn/voLPd2Dx\n",
+       "S8u5npJul7LsAYZhYt9bmAysxQXecZxeRE/PZG2Ff74WGNeC87SSScBy3IN3HKcX4QLfGiYDy/AI\n",
+       "3nGcXoQLfGuYhAu84zi9jJ4W+C558IF1wNgWnKcTIswXqUj5LIpH8I7j9Dr6dAQvZZkkZanwvaUs\n",
+       "35OyHN/oSUUYBPwSeHET1+QevOM4vY4+LfCYIN8lZTkWQMpyJPD3wGHVDhZhugjn5Dz0dmACDYp0\n",
+       "mMU6HJvc5RG84zi9hr5o0aQFfjbwI+APUpb5wMeAbcCYGse/EKt62YEIBwKfwHL9G43CJ2J1enbg\n",
+       "Au84Ti+icKmCFtGyCD5EzuOArwJ3Ar/GXs93qe3RD6azEL8Yi8D/SuMCPxl4mt47y9ZxnOcofVbg\n",
+       "genAk1rSZ4HrpSzvxCpbKnB0jePzBH4i8DCwk8YFfjTQjgu84zi9jL4s8DOxgU0AtKT/ByBleR21\n",
+       "LZrBdBbxNmAjJvCjGryekcAmTOB9kNVxnF5DX/bgZ2KpiVnaqW3RDKJzpD0K2EBzEXwU+J0553Uc\n",
+       "x9ln9MUsms3hPIeRiuBTrCNE8FKWfsGrT5Nn0YwiieCbEfiNwG6gv5Slry1k7jjOfkqfE3gtqWIi\n",
+       "fjz5Ap+O4F9P5zVi8wS+jeYj+DZgU7iuXrkgieM4z036okUDZtMcQ3WLZkwoKXwwMD7zeK0IfgfN\n",
+       "WzTQXAPhOI7TLfS5CD6wFhgCLBdhsAh3xge0pDuAZ4GhWKbNsMyxrY7g0wLvmTSO4/Qa+ozAizBF\n",
+       "hP7h7jrM834aGAEck1nQO9o007FZpmnqefCDG7w0F3jHcXolfUbggZ9hs1DBIvjHtaR7sEheqBTm\n",
+       "ONA6jc4Cn5dF01UPfmO47QLvOE6voS958KNIctTXkgywDgn/01ZMvQh+kAjpZf+6mkXjHrzjOL2O\n",
+       "vjTRaUT4A1iCeeyQRO7DsJowYAL/POz15XnwEh7bLcLAcHs77sE7jrMf0Zcsmg6B15L+Wkv6j2F7\n",
+       "XgS/DpiHlR8YkslNjw1CFOI2YKMqigu84zj7EX3Cogl2SjqCT1PNojkGs3F2kET7kC/wG8LthgRe\n",
+       "ytI/PH9cANwF3nGcXkNfieCjjZL106F6BH8E8AQmvunjooBHIY7+OzQewY8AtmpJ9zZ5vOM4TrfR\n",
+       "VwR+ROZ/mrQHH2nHGoQngK2kBX7ONSN5xTv30IIInkp7BjyCdxynF1FX4EVkmohcLyIPiMj9IvKB\n",
+       "nH3mi8gmEVkY/v6lyum6Q+CrWTSQRPDJY2MeHswhVwr9n0lH8C7wjuPsdxTJotkNfFhVF4nIMOAu\n",
+       "Efmzqj6Y2e8GVT2vzrmaTZNsVODXhf+dLZrB7QMYtrofx3x/JrznISrz2HcBB4rQX5U9Ba4rFhqL\n",
+       "uMA7jtNrqBvBq+oqVV0Ubm8FHsRWMcoiOduyNBvBD8cW8mjEooGMwIvQnyHt9prnXHNM2Kcjgk9l\n",
+       "0hQV6TYqI3j34B3H6TU05MGLyEws/fC2zEMKnCwi94jI1SJyeJVTdMWiWU31QdZnqRT4tVhe+9NU\n",
+       "RvCDGbpmD+uet43RS+dKWQbz5rNexwE7NqeObaTgmFs0juP0WgoLfLBnfgt8METyae4Gpqnq0cA3\n",
+       "gd/ln+X9zxORi8Pf/AaucwSwkuoWzVpS4q8l3QbMCsv5pQdZBzF09V6WnbmGYauPAt7F7GuP5vDf\n",
+       "psscNBKFu8A7jtNSwphm1MmLu3KuQgIvIgcC/w38VFU7ibeqblHV7eH2NcCBIjK685m++aSqXhz+\n",
+       "FjRwnbUEfjCwhsyMVS3pmnAzPcg6mKFr4eGXPc3AzbOAT7J+9kae9/t0SeFGCo65B+84TktR1QUp\n",
+       "nby4K+cqkkUjwPeBxar6tSr7TAj7ISInAKKq63N27YoH/yTVI/hOAp+i0qIZ0t6P9udtYufIp4A7\n",
+       "uPeNq5m4cFpq/0Yi+DwP3gXecZxeQZEI/hTgTcAZqTTIc0TkIhG5KOzzWuA+EVkEfA24sMq5uuLB\n",
+       "r8EyXLKZOMUFvt+uwQza0I/NU7Zw99//BvgIj58Gw5+ck9q/qxaND7I6jtMrqJsmqao3UachUNVv\n",
+       "A98u8HxdSZN8FFuPdTiQ7h0MAR4BplY5dgtWeAym3TKeZwft5dkhO7j2i0v0z198VD67fQADtk+S\n",
+       "sgzSku7EPXjHcfYT+tJM1s3hL2vT5HrwKbZ2PDbuwYk8M3I3aSF+dkgbe/s/DMwN+zcq8O7BO47T\n",
+       "K+lLAr8l/GUFvqpFI8LZ/OlLryJaNMOeGsfOthilD+woYiZ7b8MW8YauRfDuwTuO02voE9UkMYGu\n",
+       "FsHHNMm8CP4INs6cRBT4Ie3j2DlyJ0mkPQR4hn57bwdOCMd0ZZDVPXjHcXoNfSmCb8aiGceOMQcS\n",
+       "BX7QxrE8M3I7icDHnsF1wFmhbnyXPXgpyxgpy9sKnsNxHKdb6LUCL8JIEf4S7qYFPjubtSOCzyzD\n",
+       "BzCeHW0DOo4ZuGU0O0duJRH44cAWLelj4RwvoDGBj9cViec9GagouCZlmS1l+UXB8zqO43SZXivw\n",
+       "wATgRSIdC33U8uA3Y+UKsv73OHaOGkCM7gdsbWNn2xYyAh/2vQI4j4ICH6L9wcC21ObowU8HZkhZ\n",
+       "0pbUHOC4eud1HMdpFb3Zg4+R+iHU9uAHY3VnkmyZhPHsbBvUca4Dt7axY/RmEq88nhcaFHhslajt\n",
+       "qcU+SJ13OtAfSE+gGkOyaLjjOE6309MC30+E/gX3jUJ+GCamW8kIvAj9MEHdSb7Aj+OZEUOI67IO\n",
+       "2DqcHaM3kUTasWcAcAcwmuk3DqKYwKej/0jsGcwI92enHhsDtElZilTddBzH6TI9LfC7KB7Fxwj+\n",
+       "OGB7qM+e9eAHATtV2Uu1CF77DyWuyzpg23C2TthAjkUTIvE/ccRvplBc4LNF1+J5pwNLgINSj43B\n",
+       "ovqhOI7j9AC9WeBHYIJ5HEmknPXgh2DiDZml+UQYhNk3Q9Eg/gO2DmXLlHbyPXiAexm3eDTNR/Bp\n",
+       "D34BlRH82PDfbRrHcXqEnhb43YSBVhEGi1SdfQom5Aux+vPRJ8968NF/h84R/DgsffJZEBP/AVsH\n",
+       "s372OioFPp0Fs5i25eMoVk2ymkUzHBgP/JXOETxY7rzjOE63sy8i+JhJ83HgX2vsOxy4C4umqwn8\n",
+       "EGoL/FpgG3v7bwMuZPvY7WyZspHOefCRxQxbPYFiEfww8gV+ArbQyBI6e/DgAu84Tg+xLwX+aCqz\n",
+       "TLKMAFYBK6gt8NGiqVxc26LoNcA29h64A/gX/vzFRWj/9EzWbBT+BP2fGcSwp/NWjspSLYK388Bj\n",
+       "wOzUoOrY8Hpc4B3H6RH2pQd/BDCpxr7RPnmISg8+Lb71LBqL4J8duBP4H+5702asQYipkBUirSVV\n",
+       "dratZPKd4wq8lrxB1p3h/xNa0vXAHpLIfQxWEdM9eMdxeoR94sGHAdA55C/eHYn2yRKas2iSCP7W\n",
+       "D18GvBtrEHZQPYKHHaMfZ8J9Y6hPXgS/K/x/Ivx/lMSmGQMsxSN4x3F6iJ4W+I2YR30IZlcUieD/\n",
+       "Ctwftm3G6r9EinnwN/zrai3pBjoLfLbUAGybsIwxS4qIcCeBD6mWu0kE/jHgICnLIMyaWoELvOM4\n",
+       "PURPC/x1wFmYPXMLNvGpWibNCGCLKr9W5cth2yZgeJjgBIlgQy2BT3LP60fwG6c/wuileUsDZsmL\n",
+       "4AnnjgL/CHAwFr23Axtwi8ZxnB6ipwX+GuBcTOAfwLJNqkXx2RRGwmSnrSQ2TTaCT/vziUVTKfC1\n",
+       "Bllh5UlLGPXYkAKvZRidPXjC+aPAPwQcSiLwG/EI3nGcHqKnBf42bBLQWSQCX82Hz6YwRtIimRb4\n",
+       "tZioR2IEv5VE4AdRL4Jf9NZlHLijv5RlVp3XUi2CLwMPh9su8I7j7DN6VOBVeRb4M7Z60v3UjuA7\n",
+       "++PGRhKbI50muRyYmdqvWgQfs2jyPfjdQ3ew6P9tBj5a5+XkCryW9Fta0pgu+RA23jCOxKLpUYEX\n",
+       "YYgIv+/J53Qcp3fQ0xE8mE2zG8soqWfR5EXIaZFMp0k+TlLkC+p78J3SJAM7uPkTO4G/k7JMqPE6\n",
+       "ql1fB1rSLeF655FE8D3twY8BXpJTK99xnP2cfSHwVwJfVWU3VQQ+iFHeTFGobtGsBw4QoS0M3B6A\n",
+       "Ree1BllR7ZicFNnJlikDgF8CH6jxOuoKfOAh4BRgHfvGohmKFTkrMq7gOM5+RI8LvCrtqnwy3H2K\n",
+       "/Ah+KLAjDKpmyRV4VRSzaWZgJYaXhG3bgKEiHIgJ3a5QffJZ8gU6ToL6NXBajZeSN9Epjwex9V73\n",
+       "iUVDIuxFMoMcx9mPqCvwIjJNRK4XkQdE5H4RyY1qReQbIvKIiNwjIvMKPn+1QdZqA6xQaXOk0yQh\n",
+       "sWkOBxaHbTGCHwesCaIPFsVXE/jB7DlwJTAl/UBYa/XccLdaDyPLQ1hvoR3rUQwLq0H1FLH3MrLm\n",
+       "Xo7j7HcUEZrdwIdV9QjgROC9InJYegcROReYo6oHA+8CLiv4/NU8+E4pkinSUXDaooFkoDWmYUIi\n",
+       "8JOwyVWRZ/KeIwwEK7d8aB0wObNAx3zg31LXWETgHwz/28NEqK30rNi6wDvOc5S6Aq+qq1R1Ubi9\n",
+       "FROsbNR9HvDjsM9tQJtIzQHKSDWBrxfBVxP4x6ku8BPpLPDVnuNPXPulz6FsJ6klAxbRz6yyHms1\n",
+       "Hgr/23OuvyfoswIvZZkoZSnt6+twnL5KQ1aBiMzEMkJuyzw0BZuGH1kJTC1wyvXAEBGrvy7CiSKc\n",
+       "S/UUSagUyKF0juCjRVNP4HdSXeDfAJzO9rE7qLRpJgOjsUYpux5rNZ4Or2VduN/TPnz04BsSeCnL\n",
+       "BQXmAnQ3JwB/v6+eXMrSX8qSXcjdcfoMhQVeRIYBvwU+GCL5Trtk7munHUQuTv3ND354uibNa7Ef\n",
+       "dC37I+3BZ22X5cCRWL2bx8K2tMA/ndq3agSvyibgvaw7ZCidBZ7wHEUGWK1CJbyIZPJTx/WLcIAI\n",
+       "J6X3l7IMkrKcX+TcBbEIfuziSVKW2XX2TfMB4PQWXkczzAImSVmKruPbNFKWN0hZPp/Z/P+Ab3T3\n",
+       "cztOGhGZn9bKrpzrgIJPeCDw38BPVfV3Obs8SWVt96lhWwWqenHOsSuwqPsx4HlY9F0rgk9HwFOp\n",
+       "7Dk8Hs6xKJWBE2eyTsRqw0RyPfgUS9hw0CBm3JwV+J3AURTz3wHQkt6VupvugRwNXCXCuJDZA9ZD\n",
+       "+qGU5X8L9hDqYQJ/3rsuBM4EXlnwuBkkywzuK2ZhmU/jgaelLKcBf9OSPtsNz3UwNgEvu62RRtFx\n",
+       "uoyqLsCW/ARApHmbskgWjQDfBxar6teq7HYF8Jaw/4nARlVdXfAaHsBEHUycZ2OReU2LJuS6D8QE\n",
+       "P7IWy6pZnNq2Dct4yRtkrSXSq9k8rR87R6SX3ZsM3I5F8IUFPu/6w+0RwGjec8TXpdyRnTQN68HM\n",
+       "aeSkIpxVpXDbUAZsgcl3Hos1HvXPVZYDsJ5Lkbr43Um0iGIj+xvg2G56rrF0fs+nUcxqdJxeSRGL\n",
+       "5hTgTcAZIrIw/J0jIheJyEUAqno18JiILAUuB97TwDU8ABwpwgHYAOkDwMnUH2SdCqxMpT2mc+Ef\n",
+       "SO3fzCCrnWvH6DU8M+KQ1ObJwN9oMILPkK4oOQIUhj99IYn4xp7QMQ2e9wt0jkABhnD0j7ew+qgV\n",
+       "wAgpSxHRnoJ9NwoLvJTl/G5I/5yF9QSnSlniWrcHt/g5ImOB6RnPfVp4bp8F3CBSlqG13jcpy1FS\n",
+       "lrf24CX1KVr1nSuSRXOTqvZT1bmqOi/8XaOql6vq5an93qeqc1T1aFW9u4FruB/LepmJeeS3Y41K\n",
+       "rQh+FJ3tmci9wB2p+00JPAA7Rq1A+80E+8JiPYZFWI+jkAefwzISkRrOhHt3MGjjWJI6OtOwAdlG\n",
+       "I9U28hcLH8rxlx3Anf/wGHA3xRqOWPKhkMCHkg6/xT7HlhC+4LOw9QCmkCxg3l0CPwb7PaQHlqdj\n",
+       "351uy0CSsgyXshzXXeffh/wBqDWW9BrgrT1zKX2SN0pZvtXVk+yLUgVZHsCE4RDMI78X+7FVE98t\n",
+       "2EzTWVi2TgWqXKjKn1Obqg2y1sqiMXaMeYT+uyaGe5OxmbfLwvM3G8EvAuaG28M5/rLHefjc3WiF\n",
+       "wF9B4xF8vsAfd9mhDNyyl0Vv3UpjAr+c4hH8yeF/Kwdlx2CzjR/ABH42sJcmBV7KcmCdXcZiFt+c\n",
+       "sH8/7DN/jO61ac4HftcTA8k9ReglngK8vMZux1PZmDqVzCdJsW6a3iDwa7Af7mlYpsm9YXtuBB9s\n",
+       "mE2YD54XwWf33x3OL1RG3TvCeaqzadr9DNgW7ZTJmF2wPNxvVuDvBY4MP+jhPO/Ksdzx3idBpgTv\n",
+       "eyrwf8AxRbppUpZ+csSv+wFtjHx8lJTlnanHhjK/dDJ/+dyNaP8RmMAX6RnMAO6k+CDrKdiXsXUC\n",
+       "f8uHXoayjCTldjaWnttsBH9LNlKWsrxIyhLHLcYCt5L48BOw3uKjdK/An4A1YC+qtoOUpaHxmCrn\n",
+       "OEjKcp2U5dNdPVcBzsG+ay/Ns+3C9/o4YEqBhve5yhnA9V09yT4X+CDYDwCvxgT+vvBQLQHdiPng\n",
+       "nSL4KmwDVqX9euDjmJBWZ+3h93DAzoHBl40RfBzIbUrgtaSbgNXAHGZeP4MBWwfy6FlXs2vYNkxI\n",
+       "pmE/jq0Ui3Dezflv/Av9dvfjNW+6EPiulOX54bESTx+7gXvffDdmM9yFNRwTpSxfqnHO6WHfohH8\n",
+       "KcAlwOkt86ufPvar7Bi9DmtUYwT/B+DgRp9DyjIaa9iOzDz0DZJ6Q2OxBiSK6XQsgCg6p6NZXgD8\n",
+       "nCp2hZRlMHC/lGVG3uP1kLKIlOVd2Gu7H3hHD4wpvBy4FPudzs15fCoWcK3A3uemCPZWoUzAvoSU\n",
+       "ZTqWaLG43r712OcCH7gfi8weVmUtyeSgajQl8OkNqjyuWmcm6p5BS9k2fi8m7pOBp0Je+3Kaj+Ah\n",
+       "2jTP/+nRrJp7L3rAY2yduA3LIhqLvf67gJdIWU6p030/Htk7jwtfDZMWHg98C7goTFJ6B1ddvgxr\n",
+       "mEZiFtg44FrgY1KWiVXOOQNraAfnTfSRspyYuj0YeD6WRrsNK/TWdUasGM7Wie0kAn8QNrayl8bT\n",
+       "N08J/zui/yByM4Fp4TX0B+4hEfhp2MpcLRd4Kcs4KcuA8LyHAZ8EXiZlyfP6T8XGfpq1M34EXIR1\n",
+       "+T+IBScvaPJcdQkR+VnA1Vhp8HNydjsO6yEuIxlbaYb/g47lPPcn5gMLgtZ0id4i8DHrJU4G+hxJ\n",
+       "JJ/HBuxHXteiCWyj0n8vyuNsmtaPbWNnkkTwYALf7CArRIGfueAQHjn3VqCdjTOewbzs1VrSPVj3\n",
+       "rMze/n9g15Az0wenbAWAw3j45Z9i+JNw88euBL6IzcT9D+AbbJo+IFz3yJBXfztwQzh/XnQFiQff\n",
+       "TkZMpSxTMbsjTk47DlisJd0WztuUTSNlaZOynAcgwmDaHj+A9bO3UGnRPEpY5zYbhUpZBkpZpgbR\n",
+       "zHIaVmIjbe+MxcZmpmJ+/7pw7rTAd1cE/zOsptFc4EEt6UpsIZyP5OwbP/uGI/gQGFwAnKElfSAI\n",
+       "xi+x70dLCdH097AGZamWdBXVBf54rLFeRpMNV7DbDgH+n5Sl2qpwfZUzSOXBd4XeIvD3Y0XNHgdQ\n",
+       "5duq1Mqj3xj+Nx3BF0GVZ9k+djsbDjqWSoG/jq51nxYB5zFk3XBuf+/dQDsbZu8BXkhotLSkX9eS\n",
+       "jueO9+xl/ZzXZI6/WcrywiByh/G3jz/B5QvhhouXBbG4EWss/gMTsRjBg3Wf3wcsJCcvPpxzOvZZ\n",
+       "rKWzTXN2+H90+H8KcHO43bTAY6m4Pw6iNJq2ZbBq7g4t6WZsVnS8priQ+delLHdJWc6SslyGjac8\n",
+       "EF5zlhcCP6BS4GeG/1MxsV+HCc60EIVGi2YFKYGXsoyXshxNQaQsr5KypHsOw7DP5h3YD/n28NAH\n",
+       "gbdJWV6fOcWZ2Oc5Mxz/DilLUYGejQUM6d7wL4DXdcOg7kex92kR8I9h243AYSEoSJOO4GcBSFk6\n",
+       "9cqkLIfVsJM+DnwF+CHwT12++hyqjQ9IWQ6WsmTtvqbOL2W5OOezmE8L/HfoPQJ/N/CvVeq/57ER\n",
+       "62quL7h/UwIPwMMvX8q4xR/CIrsnAbSk/64lvbqp8xn3AIez7EVr2T1sI9DOukP6Y9U6O3olIoxg\n",
+       "2RkjGLKuI/NFyjIKs0ROw6yL7TxxavyCxOj1U8AbwopSQ7D3qZ8IA7WkO0Ikl87mSTMO2KEl3YoJ\n",
+       "fPaHdzb2XsZjTwVuCrcX0LwP/xpsQtox9Ns1ijEPw8qT4/dhJWaPPYMJ/DnA3wHfw+Zd7MEmss0D\n",
+       "zk/7siG99Sgsap6TurZZmA0YBb5dS7oL+4xnUt2ieTPWWBTlEuBjqfsvBm7B0j//kSDwWtKngFcA\n",
+       "34oNQhg7OAT4FUkEfz5wacH5DEdgwVMHWtKHsc+1S6mZUpYxqdvjgfcD79WSfllL+pfwXDvDtb81\n",
+       "jAUskLIsxoKCDoEPabYr0j1TKcsIrMG+POuzS1lmYg3f94AvYSmF1ezGZl/f84Hl4feW3t4fWyvi\n",
+       "2no9BylX2NA3AAAgAElEQVTL66Qs36vxe5gDlLDxx3jMBCwY63IGDfQSgVdlmypfaOCQjWQmOdVh\n",
+       "K80K/F0X/Y31c57GfhBPZR8WYZ4IVzW4JN4KYD33v34z5uWvY90hAzExTttOh7PyJBiy7vBUNsIL\n",
+       "gF1YFHgY1pNIL2GIlnSxlvS6sG0o1sBtpnLRj2oCHyNlyETw4cv9YuDrwNxw/1QsUgOzdXZjYwmF\n",
+       "CdHbsdiM6Rcz/zMnov1g6dnx830Ss2fABP71wNe1pN/Rkh6kJX2flnSDlvQx7P1LL9RyIrBQS/o0\n",
+       "lhobq5zOwnoe00gieLBg4zyqWzSHYwPV6Qlw1V7XVKwRPl/KMiBsPhfzp7+GDaTFCB4t6T2YYMUZ\n",
+       "4/PDNT5C0uM4HGscLsk810Apy6VSlqNSm4+kctJf5GbsfUkf32kWtJTljKzAhe2zgaekLPFz/jTw\n",
+       "8/D+Z/k+1ls5H/uevhV4T7BwHsM+h1dgqcfp780xWCA0AxuwTXMScL2WdIuWdDXwP8Dbcp67K5yK\n",
+       "fffLme3vwn5L3wJ+I2WptVLafGyG/yerPD4Hs5s/lWoEjgDub4X/Dr1E4JtgA8XtGbBlAm9t8rke\n",
+       "5RdX3An8b5XnnA+8jNo5vxWED+9sFr82rirVzvqD4w8sLfBHsnUiPDtoJxbJgQn7z7Av+RGYt9yG\n",
+       "NWJ5X7Yo8JuonLDzEDZLc5iU5Z9SXc6DqCLwmAivBq7CGoe5WGS9JvW6Otk0BSL68zAP+krgLA7+\n",
+       "/Su55y1Av/jcaYG/DxtPqVY24zfABcGP/yU2+Pvr8NhSEptmJiaUaYsGrPfzT9j7vQILJg4Um0kL\n",
+       "9p7fRTEf+8WYD70YGzAXTOB/jzWKbyFZLyDydayncTEmLtcQqqQGER6PFUE7V8ryAuh4f7+NBSHX\n",
+       "SlliAbsjyUTwgVvJCDxwp5TlTfFOCCh+A/xfzkD7C7Be079LWV6KRaCfqfIe3I29h98HPqElvV1L\n",
+       "+uPwWLRoXol9fw9NHXcs1jO8AHitlCVdE2o6SboyWC/undLamdQnYNH166UsH5KyfE7Kcin2Ot8P\n",
+       "fB77DT0oZemo7yRlOTTV4zgMG+B+v1gdpSxzsN/yASRjFenFirpMXxX4dSQiVBdVLlXN/aIX4VE2\n",
+       "T5umJX2NlnRHzuNzMcH7jEjx91NLehd6QKyauZFN04dhBccqBR620X7Ik9BRdfIkLGLZiP2wFmMz\n",
+       "e58mM9Ep9CqGkCPwoWDXYqx65+eBd4eHXo6NMYC9z2mBPxv4I7AE+5G9jM6DQR0CH7rlXwDuk7LU\n",
+       "KpH86vCabgSOZ+xDp3Pf320ksYduI/j8WtL7gIOC/ZTHb7GqpDdjP8BDtKSxImT078GE5V4sK2d2\n",
+       "eK3RwvgqZhc9HRqtlVjOtmA/wE8DbwiNyAeyFkKKs7CMpZ9jYv56rBfxiJZUtaQ/yRaUCzbRe4GX\n",
+       "AJ/FhDuOAxwBLNGSbsCiwm+HXtQnMdE9E4uWfxuuqZNFk3o/OzJppCzTsEj5c6lB6mPCe7Ia+Hna\n",
+       "ksEaki+G9+KXwBu1pGvz3oDw/n0TuFFL+qfMw6ux93k+ZntlBf6uMH7wc+AfUo9Nx+yzyJ3Y7+HF\n",
+       "edfQJCdgjes7sdf7DPZevk5Leq+WdK+W9B1YY/uT1HfgKqwRBxP4a4EPY2NG/UNDEXtfc7DEkq9i\n",
+       "nxu4wAPwEyp9ze7kUUJFQRFOEOH7mcfnYa36XhqI4gMjgC2q7GHPwI1ov6eoFPgjgFtYeWI7cFL4\n",
+       "MZ+ARWB/I8kOacPso2wEPwhbg3YPnSN4MJvmy5iQvDb41a/ARBI6R/CnAddpSXdjX8KLMEFPswDz\n",
+       "4fsD38UGEm8FfpY3sBe2zQf+EER7IdsmrGX9wQ8RBF5LeqmW9EfxmODt5qIlXYo1FJ/Wkl4cuvCR\n",
+       "dJbMTCyCXIF9hu2p/b6M/ZDjGMAKrGdjYx6Wj38A9oO/CPhKGDB7v5TloPC6BBOcP2Pv5yuwgcgP\n",
+       "1et+a0mv1ZKepCX9lZZ0T3i96zEBjz/+n2LjUH/FBqhfFiyLq7Dg57zwWvO83IeBUcE7B2uQf4/1\n",
+       "TN4ftp0dXudbMCFeImW5IDx2PBZdvwP4qJY0+x3Ivp4fhOvJbo8px7dh35FOAh9ufwuL0GNPokLg\n",
+       "w3kux8S4y4ilq04HHtCSXqElfZOW9DPhe1gx+KklXYAFAM8P4wCzgROCtTUE633+BgvkfovV6XpJ\n",
+       "OHwO1qv8C/DCVADx3BZ4VbarVvwgu5PHgJkhOj8dEyMARBiERYT3YStavazBc6fr3rdz5z+8l+RL\n",
+       "DRbB38jD527FunDnY1kR6zCBh8SDf4rOpQrSC6LkCfxtWDZNCROxL5L41ZAaZA1fvmOxaAmscZhM\n",
+       "Z4F/FJvE8mcsSj4T6x0MBr6TI/IHA2u4WDeLcCjwNW765N8wMa5M0SzYQ9KSnq8l/a+ch5ZiKZb9\n",
+       "sIj1cezHOZfEokFLuktL+j+p427AovEjsJRQxayD52MDhi8J1/tJLLsDQkE6Leny8HmN1ZIeHwS4\n",
+       "GZZjkeHicI2Kva8PA6doSdMR7fexVOPH8xrD0Gu4gySKPx1rmP8R+LhYCuxLgD+FQfn3YL2Pz4RI\n",
+       "dR4WXd+gJc0GPLnUaNSWAr/DGqJDoUNgpxDsKy3pQ1hjGhuJGVRG8GDiebaUZVDcEBrdi6QsV0hZ\n",
+       "XlXt2kJPc3Rq07HAohDIFOFmzDqNRRJfgEXvD4WemgIfwmyxM4FDw3XOwVJKn8Aa61guPW/cpCn6\n",
+       "pMD3JGEy1EbsCzcXmCFCTJ86HFiqyk4skjq16HlFGIBNrok/wHau/vb62GUXYQwWAdzHo2eD/WB/\n",
+       "gGVggH2p1mPRVbUIPvrvkC/wPwBOD8/5K8wa+FXq8XQEPx14JgyOgQn8ktR9oOOH/AcsIn6ZlnRr\n",
+       "+KG8Eouar5SyfF3K8uFwyDyskTkZ+JWW9H+4892bMOHKZvDcIdL8zEdMhJ+HDbRuDrn7K7H3ZV2N\n",
+       "467AxKVjKUgt6d1a0ie0pBuxxvcj2A/49WHg7b1Y5EbYvyvzJsAao5NIRXda0vu1pG9Vmx2d5leY\n",
+       "pVNLKG4lEfj52MSah7HG4VLMokk33tcBA7DewlPhdbeCd2I9vYexxrc/9p24Vyvr/i8gyfzJWjRo\n",
+       "SduxQCs9/vMWrBFcTu3o/mTgrtRY0fGkBr8L8DesoT8F+00dj2lDx/iKlvRuYEr4vyTsM4XEav4r\n",
+       "ZlUeSLMJITm4wBcj2jQx6ySmrEVxAvtyTQ3CXIThmD0TI5t2Ktd/jWLSDv1GaUkvw6KCT0FHxsWx\n",
+       "QVBzPXgqBT6bRUPwEeO4wq+xga505Jr24NNdZrBB54+Tzzu1pBeEtMb4XFswC+tG1s9+ht2DLgl5\n",
+       "xsdg7+EYkmnro7Av/iAR0gN8E+hajfoHsPfknzF7BpKB81oCfy/2wzufnO6zlnSZlvR/1OYg3BrO\n",
+       "/xpaO8tyOfZ7rdt9D43Jz0i+m3nchllp07H3O57337DP+vbQAMZzKvBfWPbOHbQILenq0GPahtWl\n",
+       "mkHn7xrY7+uoEN0fQH6K9FVU2qRvAS7GxkxeGCzIPE7Cgo+YxXMCjQl8jOBPwXojG7CApmIAPTXe\n",
+       "cif2XXoyjLmAWV7vJOkhtgQX+GI8itkls7DWOq7yMxeLZG1SlP24T8k7QQ7ZZQnXkS/wG7B1YNGS\n",
+       "rtCSdqyUpSVdHm5WWDQivEqET5AMsEJ+BN+BlvRxYGJmsCwdwR+LZUTE/Z/Ukl5Z5Vy5K1FpSZ/R\n",
+       "kn6Bbyy9kY2z9mIRZGwkR2MLuQzHBGd9znsyhGQR8YYJjdmrsZS65SKUWHtofH+qWn7hB3cl9iOu\n",
+       "133+T6wR/rKWtOg8jSI8jqXHPlpvx8AHoGbq8Q3YBLI7gRviZxaE9g1k0jADP8Gqst6Z81greAgL\n",
+       "Yl5O53Wf78Nsr+nAE1VE8Crg5cFymYVF0VeHHs4dwItD1tibxQrNxeDhBOzzf0mwTk4jsUCL8Aj2\n",
+       "3ZyLNQy3Yb26bIZU5E7gdZg9FfkrNs7TMv8dXOCL8ijWIi/BPrROAh9oxKYZTmW9nWxZgIOxbut6\n",
+       "kjVc+6fsoTRZi+Zw7Etaz4OvIB2xpa5pSJjQcQydo6pmaePRs3ZiPu88rOGIQj4NE/sNmMCn35Mh\n",
+       "kLtqVWHUsnBeh2VmnMfy+XFMoFYEDzvargFg19B6P8CrsCJm3+zKdeawHLPECi1XGKLiqh5yiPJP\n",
+       "xyyML2ceu1lLem3OMY9hPb3rso+1iIewAf9hVFqFYK+/DZtBnfXfIw9gmnYENiHtl6kIOdpsP8Je\n",
+       "8zcx2xMs0PgqNrB8ATa+UO05OhEam79httJ2TOD7U1vgJ1Ep8A9iv3UX+H3Ao1g2yKJwe3YQ2qOp\n",
+       "7AbfhE2LL0I2gs9aNDMxG6EjgsfSrfKqQGYHWUdi0UDaokmvJFWIICbfAf6FTATfRdp45GWKTXrZ\n",
+       "FXz8+BqnURnBj4OOAdaBdFHgAbSkv9eSXgEMYdXcrdg4yPaaB/38qnu56RPw+a15tW7S596lJf2g\n",
+       "5qfUdoW/0OIaMmEA8L+1pIXniGhJXx/swe7gQWyg9S3Zxin0MB7ABppzxTcI7fexaP2fMEspciWW\n",
+       "0jgDK818AXBhGFAejn3PT8NKRmQnVhXhD1haJVgUvwtL0MjjvvB4h8CH1/dDkkmDLcEFvhiPYi1y\n",
+       "h8Bjrf5S1Yo1YW8HjhIpVK2vnsDPApapsgNQEQZjX/6z0icRoT8memtIIviRWAMxjETgn6K5olmX\n",
+       "ABdi3flOC6k3SRvLTx+ICXlsIEdjqaZR4LMRfBTWLgt8iqEsOW8T8JW6vueKUwZy7RehVdUyGyQ0\n",
+       "HC3Lruil/BY4V0taLfK9D3gpNebAaEk/g/2O5mpJ70htfwyL0l+rJd2pJV2MBREfBu4Ig7SLsYDi\n",
+       "941euJb0Mi1pXBz7duAd1XpboVexEHME0ts/piVtVS8ZsMEKpz7R91yEWR2zsXSnv6R3UmW7CO8H\n",
+       "fiXCNaodk4fyyPPg03bETJLZeusxAZwFHCHCeFXWhMdGYlbPNmBwmNw0AhPEWSQCv4JkvdfCaEnX\n",
+       "SVn+AzihhYM/bewZOBTlKqRjgZfR2Be+msDHxqtpDz6HoWyddKCW9F8K7Btnsh4GZCfsOC0gpJPW\n",
+       "KrJ1HzYxr6Z9EmySJTnbs0kBv8IGYKNF9X1ANZn/0BSh9/HTOrudR41xn1bhAl+MdZivtgiLMg/C\n",
+       "JrF8LrujKj8U4XZSKXJVqBrBi9CG9RjiIF20V2ZivvxpJJOR2oANqjwrwh4slS167UeSlDVeAUwT\n",
+       "QRqo4RP5HPllEJrFZrWuOKXE9JtjSthorPbIYcAzquwSyRX41kbwxV9XWuCdfUMsIV7YH6/DrzDP\n",
+       "/zYALen3WnTeumgo79HduEVTAFVUlRNV2aTKFiwqPh4bVM2jncRTrkYti2YmsDwlxOuxruNUzFec\n",
+       "nzqujaR88g4sch+JWTJHUjnICplUySIEr7b24iiNYQL/g5tWq1VRhETgj4YO2ytdzbKlFk2wtmKB\n",
+       "tyIMxz73w1vx/NUQ4TIRWloZcT+ipQKvNuv5SyTVUPc7XOCb41Hg9horQq0HRtepMFkrTXIWlcWU\n",
+       "NmBivQ6rBTM/9dgoEoHfTiLwi7Bocxt0LI3YlE0jgoSJWa0i1qUZnto2Brvmg0kEvjsj+Gj1NCLw\n",
+       "d9H9EfzbyB9If84TLJx/pvhCP0XO+ckWTtrqdbjAN8cjWBGhXFTZhRUnqiVGI6gU+HSjMJNkIk58\n",
+       "7JiwbRE2oSrWEclG8EMwgV+IRajpRqgpgcemqf+47l7FacOsrrTAxwheSKyp7hT4IZn/9RiOfe4H\n",
+       "ijS8ZGAhQq9iAHCGSOFsrOcUWtLPF00VdQoIvIj8QERWi0juEnoiMl9ENonIwvBXZMCqr/Nx8lcO\n",
+       "SrMeas5qrYjgQ6OwAxPnvAj+WCyr5llsZuUR4bE2kog3G8FDawT+NJpYMq4GMa1zOHTU9IlTtDeR\n",
+       "vJ703IDcQdYu9CyGZv7XI35eD9J9Ufwg7DvwaZJVkRynaYpE8D/EUpNqcYOqzgt/n23BdfVqVFmt\n",
+       "WndN1mzaIwAinCbCyXSe6AQWsU4gP4I/nET0nyZZuGIMiSDuwCLcYVg0DJX53c0K/Imp52sFbeFa\n",
+       "YgQ/ClifspHi69mS2mcwVoM8G8HfK1KxFF9RmrFoWiLwInxAJHfG8xDs83qQrpVkcByggMCr6l+h\n",
+       "Itc7j2aWaNvfiamNWd6EpWVlPXiw9Lu3kR/B9ycR/dXQMRA3iWRB8e2YMGwPx++lMoJfSYMCL8JQ\n",
+       "TNBaMvAXLKiRVAr8aJKUMVvtykgL/BBs0DVZ1s0Kjx3S5LU1KvDDSAS+qwOtp2NjKlmiwG+htdlC\n",
+       "znOUVnjwCpwsIveIyNUi0q1ZBn2I3AgeE9iTsYlSWYH/HLYk2GwqBT4KXty2ikqBj5koO8L2Tars\n",
+       "xrINumrRHIfZPSLSEtEZio1PbCAR7zEkr/EJCgo8SeXAWouJ1LqOeN4ixAb5UZLl85plRJXnjQK/\n",
+       "lcrxCcdpilbkwd8NTFPV7SJyDlZNLXdNThG5OHV3gaouaMHz91aqpUpOw1bBuZCMwKuyQoSfAW9S\n",
+       "JT2yH3tQMYJfRbIy0WQqI/hJJCmR11OZUtaMwJ+IlSiegDUeS2vvniDCcaqdClPFQeG0eI8mEfVL\n",
+       "sAYA7PUMFOEAzKJZg5VYjZyO9VKaFfjtNC7wT2auoRmGV3lej+AdRGQ+lZlyTdNlgVfV1EChXiMi\n",
+       "l4rIaNXOlfRU9eKuPl8fomOQVYS3Ar9Q5RlMYF+DVTXMevBgEy+yNSzWY/5zTA+rG8EDqPL2zHlW\n",
+       "YBk4Eh4vMuHpJKww10mYyBcS+CDKt4gwVZX0qko1BV41mYauioqwFRO7GMGnF7uej+UwNyPw8Xzd\n",
+       "KvBh0trWMDgeKRTBNzkpzenjhMB3QbwvIqWqO9ehyxaNiEwQsUL5InICIHni/hykHRgTxPTbwFwR\n",
+       "RmBe+iNYUbLO06ltAPermc1PAQ+mRGI1MDGcOx3BR4HPazgIA8PPYEWrltTJ049++YlYGeR0o1KE\n",
+       "GVgAkU0prBfBZ4n7VVg0IkwJ57qZggIfqnHGBZKH0pzArwLGhgasCD+gcj3ReK5qAr8j2GvPQkUt\n",
+       "fMdpmCJpkr/ASmEeIiIrROTtInKRiFwUdnktcJ+ILMJWur+w+y63TxEHWcdhP9xDseh9RZgZe4fa\n",
+       "Itt1UbU1H1ObotgOB/aG2bXQ2aLJYwW2fuUU6Milr8Y8LJpcgTUqjWTSxJLK2XGIRgU+G8EPCw3P\n",
+       "6dhM4vXUKYOcYiYmuGACny7QVo/hJJH4Ooo3dvOw3lqaahH8YJKsJ/fhnS5TNwpR1ZolSlX121iE\n",
+       "6lQSB1lnhfuHYiK5suoRNch01ddgkfE0EnsGkgg+d85C4Nck67AeFK6pGhcAvwlWySoaE/i4uHVR\n",
+       "ga82OzHuNzjssxebDDQXKwu7kcrFmmsxHBgVSg/HCH5eA8fGhjTaNDU/y7B4yQRsAttoVdaHxim+\n",
+       "nizRooGkYVubs5/jFMJnsnYf0YM/CBPeQwgRfFdPHLrwG7EVbp5OPbSdlAdf5djPqvJ7zOc/qNp+\n",
+       "QYguICmalk7NLEIjAp/OosmStmi2Y1lBQ8O1PIW91qIe/HDoqLY5FIvEG53oBMV9+COxErTXkyzI\n",
+       "PjRcQy0PHirfH8dpChf47iNm0czCfuAdFk2Lzr8aiz6zEfwgals0kWUkvYs8jsa+H7Fee0cEL9Kx\n",
+       "+HEHIpyQ8fTnYAOyRT34aqVT0wK/gySynYg1bhspLvAxM2U0zXvwUFzgj8J6U78DXhW2xWJv9QQ+\n",
+       "vk7HaRoX+O4jWjQHYROYDsIEtVUCvwqrT5MW+GzlyFrUjOBJ2TPhfhzYnQjcIZIsHiLSsYZmevxl\n",
+       "dtjWVQ8+bdHEDJNh2FjDKhoT+PTzDQnHUmUZxA6CpTOEpPTyUzQm8FcBZ4WB2XRefxaP4J2W4gLf\n",
+       "fWzEBv9mY930J7GBwVYK/DwqLZq4TFwrIvgzgaszzzcBW7oQbKJWTAP8DlY75SsitAVBPAhb2abV\n",
+       "Fk06go8CPzJcy7w6q2nFiHgUyXKGRXLhhwLbU4PiDUXwqqzDspdG4RG804O4wHcTIdtiCzYYuAxb\n",
+       "ULiVFs0qTBhbFsGLWI55KOB1FJWLbEcP/kxM4E4M28vAlap8EVvY+HOY+G3AJlnFuQBfEOF9ZAQ+\n",
+       "FBobT/XlALMWzbZwjjbMQ09H8G+kdpGudATfiMBny0rUFfhgV8UIHpLCacPD7appkuF2RwMo0r01\n",
+       "6J3qiPAGEd64r6+jWVzgu5eYwvcEJvDQWg8emo/gV2CWywAAEY4EHgoWzFHAY+mCaqH2/bPAy7Fa\n",
+       "Oi8IlsMbSOqX/zNm05yN+e/papBHAx/FBD8dwc/BFjepWGQ5RV4EPxtYq8qe8FrbgqBOwUrtVssO\n",
+       "6zGBxywkJfmcYunjEVjjXCRNMkbwN4e6O07Pczz23e2TuMB3L+1Y3vsuTOA3pXLWu0pc6i4vgs+d\n",
+       "6JQmCOpT0CEccWHts7Ev9R05h63CMkB+hPn/Z2Cvb1k4ZzvwLUzwo8BHi2YGJmCnYgK/E0vTPZKk\n",
+       "8csjz4OfE64lllnejQnmVEwUj61yrmHYjOBGLZpcga8zUSzaM3EMIwr8cKoLfCcPXoSBWA9lUp1r\n",
+       "3KeIMFIk+RxFGN/AZLDeTByv6ZO4wHcv60nqxzxAZQngrhIFvtkIHoIPH4TqQsxLfylwAuafZ1kN\n",
+       "XK/KJmxl+zLw35l9voo1AksJq1SF888ALsZEfWMQvi1YMbNOM3pT5EXwc6h83dGmmYKNG7y4yrmG\n",
+       "Y+KcjuC30bjAxwa01vKHp2AzgCPpCH51fE4RThThv8I+eR587AHtE4EXYYgIRUqAzwQOTjV6P6L6\n",
+       "59CXcIF3qtJOUlfmFiw6bhWrsJmVaeFpVOCjD38cNoHoEuwaX0B+BP8E8Odw+1asPs1v0zuEImlv\n",
+       "CdvXY8I7FtiFzSK9mcS22IL1FopE8GkPviOCD2zEovLJ2MpTtQT+cZIf7fbwVy8XPrs4i5Jj04gk\n",
+       "lhfWu7k+9XBckrEjgk+t3hUnauVl0cS68Psqgp8DfCTbWxHpVEhvKqYncQLXqPDX13GBd6qyBisv\n",
+       "GxfubuWsxEeA7OpZjQyygkXwLw3n+aUqK7DIeA75s2HfiUVmYAJ/vyoPZ3dS5SpVHg4DzdswD/Nx\n",
+       "VXapcmqqUdqCWT1FIvi0RTObzgI/OzzXH4HjRDoi5GkiHWUThmGNVFWLJmTiZAU/r3b/Sjqnmf4Y\n",
+       "+GB47nlYYxaJ4xEjsIZPsVWs2kiEsFYEv68W4p6MvfcdvZUwHrA0I/oxbXZ46n9FmmemAewrjCJ/\n",
+       "1nGfwAW+e/ks8M3uOLEqO1X5embzDpJiVUX4C8lCIrHcxB+AhcHbzj7ntjCwCfAz4PwCz9GOifjj\n",
+       "OY/Fsri1BD7WZElPdBpIZ4vmSGBlGBi+FTgvPPYV4MPh9nBM4HMHWUM+/B/pnIkzHDqt4PVj4Asi\n",
+       "9uMPYncsVs//VGBRZlH2tAe/OfW8I8kX+KYieBEmiXBJkX0LMjnn+edi15we+I1lqOPAcCeBx4KD\n",
+       "l7fw2lqKCB8V4SOZzR7BO/mo0q5af8CzhawjVWa0Hqrcqsp5qnwoVdL3P4EvFDh2e170nkM7Fs1W\n",
+       "E/h1YXC2GlswkdsdGpcotNkI/kiSVMvLgPeLMAkr9BUHetMWTV4E/zLsPXxPxoLIi+B/go2rxAyi\n",
+       "KdgA7k6szs/1mf3THvwWMgKfmkwVbbYYwY/DMp6KWjRHYuWoW0UU+HQPIha+OyK1rW4Ej/WyevNg\n",
+       "8fNI2Xuh0W5I4EUYWq9Ka0/iAr8focoWVc7t4jmWqPK7Vl0TJmzHULnwSGQLtf33uM94ksg2RsVp\n",
+       "gd+EiU0s/nUFJjjfDM8bbY5o0UzEqnDuplLg34algP4v8KHU+eNyfR0EH/4i4M0ijMcasYXAd7EV\n",
+       "u/IEPnrw2Qi+H5U2VHzdMYK/j+IWzURqD/42Sl4E/3zs/U/n53cIfKqgWofAhwZsBq1d27fVjMHS\n",
+       "f6NAD8YK2zUSwV+Nfd97BS7wTnfTjkVG1SL4WvZM3Gckld40dLZoDiUIfPD+L8MspEuojOCj4MeG\n",
+       "Yjs24DkROA0rrnYJ8N7UDz0vgo8DytcBL8Fsi4XAT7ESDbdkdq8VwYNZHtU8+PsoHvl2h8BnexBH\n",
+       "YVVJ0xH8NOy9HYZZaOmyDITjD6T3C/xokkJ5sRfXiAc/kfylOvcJLvBOdxPtlzyBXwPcW+f4dK17\n",
+       "SAQ+XeZ4IyYe6dmw3wU+jWX9xAg+ZrDsIiPwmJVzhSpbVXkUs1smpo6rNn/hGuAcLIJfpMoGVU5U\n",
+       "7bBaImmBz0bwUJnZE193jODvB8aHKLgeE7EGq6Ec9ODd51kLk7AZzRPDfoOxSPx/CQIfjpuKLUie\n",
+       "jtzTAh/LYvSYwIswQoRzGjhkDPAwySzt0dhn1UgEP5JeVGLCBd7pbmoJ/D8Dl9Y5ficmtmlvemt6\n",
+       "li10rF/bUZ9dlfWqfJbK2bTRatlAZ4E/ksrSDA+RpC/WE/izsQHWhVX2AbORhmGikY7g27CJWtUi\n",
+       "+HFYw7WZYpFhFNBGC5XdTP4EscnYussxgj8cE8FFwGGh0WnDZjk/Fa45Pnda6GZiEX5PRvDnAp9p\n",
+       "YP+xWGG4WM9oFPbeu8A7ThVioa1OKaIhbfLZzodU7BMnRKVn6T6V2S0KfF49m21Av1AULc58XU+l\n",
+       "pz8EOAyLQCNLSAQ+2ip517cyXM8YaqxXGwqVbcAyT2IEP5iklMVYzO+NC46nI/i1mCVVxKaJvY7C\n",
+       "No0Is7AIe1Jmez9MkBemzvt8bIbuxvA6poW/lalrzovgZ2LWVU8K/HEUbOhCL2QM5qGnI/jCAh9S\n",
+       "QAdRfI2BbscF3ulu2oEnii5PWIW0wN9HkgIZiXn/nVZYCg3EOkxgokivpzKCH4qJeXrA9yHg0PDD\n",
+       "PxqrCFqNa4B7CrzGdZiVlPXgl2NZONtTpQ22Y172REzgV1Fc4PfQmA8fK4SOy2wfhzWeT6Se+/kk\n",
+       "tjqGaqcAABLySURBVNoDmE0zFXvvY0rrcKyhyhP4estEtpJjybwPYd2CdhE+GVNcA0Ox9+1vWM9k\n",
+       "MInAD8qzr3Iss2i3eQTvPGd4CMu37wpbCBaNKntVOw3MbsQEu9oEr3YqBT5r0UzCxCjdQCwhWYXr\n",
+       "AGqXmfgeFMo9Xxf+b6VS4JdhVkiHbx+EfhtmE6zHIvgimTQTw/kaFfh2OotvXNA93XuYS77AryCZ\n",
+       "1zAc69VkPfj7gf45E8laThDfY+gcwZ+Drc9wHvC+1PYxQHsYO1mCWXajSHqgg3Ke5gYRHhLhg+G+\n",
+       "C7zz3EKVe1V5dxdPk47g81gJ3JZZtzbNOkxgom+fjeCPBZZkjo8e/EnALTXOjSpLVbmq7quw64iT\n",
+       "xbaTiOHjhAg+s/9WYENI56xq0YgwV4SDw0St2GAUEvgQmZ6B1RTKRvCTMaFux9IfR2C2R6yxczNW\n",
+       "onkOnS2arMDPDNe1GpggwgHhfN3FbMIAaSbSPgObw/DLcE2RsSQN8IOYZTcaCwaqFaSbAXyeZEZ5\n",
+       "FHi3aBynAWoKvCorVTmzxvH1LJrxVPrvYLbJBOBFdE55bJZ2kkJlO0jy+9diUXD2NW4hGbuo5cH/\n",
+       "G/CecL61mCgVFc/ZWHG4v4XjSdkRk4GngvW0Bot6HwzF5sAahaXAB7AIPm3RdAi8CP3D63ucIPDA\n",
+       "m7BJdZ0Q4VSRQguq1OJYrJ5SbEgJaw8cB/w1XF/6/RxDkhAQG/e40tgO8gV+DHAlNlGtP8m6BB7B\n",
+       "O04DREujWeoJPGQmXIXB38eA19M6gV9HZdrnJMxW2kD1CD4K/CpyLJpQTvgMLE0zrnK1meICfyY2\n",
+       "KWsNSQT/DhG+E64vDmg/jUXrHRO4Qq/m77HofQmVFs0qYHCInidjM5afIRH440nyzbN8HGsAusJx\n",
+       "WFZUeuWwk7D6SVvC65qc2j9P4KM91imCDx79gSRrG7ThFo3jNEWHB98k7VRaNDeSjAvkCnxq21Dg\n",
+       "zi48d5p1JBH8dkxgNmEikifw6Qj+CeDwnMG+UzCxmhfOV1jgg/i+H6srtJZE4Odhs3RfSSLwq4Cz\n",
+       "sIldHYRsmkNVuYlKiyZmCg3D3vvl4ZAo8PMwiyOPcVjJ6q5wLPa5bSYR+HSFzzyBjxbNQyQWTRT4\n",
+       "7GSnMVijFQfxx2ACv5m+JPAi8gMRWS0iedUF4z7fEJFHROQeEZnX2kt0nLoefD0qInhVblDlN+Gx\n",
+       "eN6sRQP2Q1+k2qXnzl5HtQh+APkRfBSdW7Hfa7YU8kuAX2DCciImoEUj+FdjDecfMIGPg6wzsHLP\n",
+       "6UXdn8YqYN6cOUfs7cTrjQK/hUTwZ5DMg1gTXvfzgWEiuWmM4+m6wB+OZVxtIXkv0gK/ClvRLGpg\n",
+       "OoJ/BGuUJlDdg0/vHyexjcSybvqUB/9DrKRsLiJyLjBHVQ/GKuld1qJrc5zICiprzzTKOqpPVtqG\n",
+       "pcfl5bD/gSo+cZPch/m/0Fng47Y0HRF88MEvAT6V2eclWAXMhViGSKEIPvQEPg18JkSha4FxqcVZ\n",
+       "PodF9jFjZhVwe2aCWZa0RZMW+MkkcxRWY72O1ZgFlhfFjwNGi1RE2IVJFQlrp9KiOQyboEWwizaT\n",
+       "TIIbG/ZHlZ2Y7XQ4VSwaKgU+TqaLAt93InhV/SvJFzCP87DSqajqbUCbiPTmehNOH0OVL6jyrS6c\n",
+       "Iv4Q88RpFXBRlfLIf1Xl8i48b/Z8d6tSCne3YxHiRkxEoLMNtRmLeCO/BGaIcDJYeQFs4tTtmMDP\n",
+       "o7hF8wIsz/6qcG3bsdmoMeJersqb4nKMwLXUL32dtmi2kMzGjemWYMJ+Wrje5WQEPowpDMFstGaj\n",
+       "+CFY9dEo4iNSM27Xp/ZLD7SmBRus99af6oOs6aybtEXTtwS+AFOoXEh6JUllOcfpDcQfYl7BsGdV\n",
+       "+X4PXw+YwPfDIvjNmP2RjeAvxnrQQIcV8j1seUUIA6Rh+91hW1GBfzFwdSb9cw1WGE6Dt96BKjeq\n",
+       "8ss656xm0aQHa1djdtRCzLaZmTlHFM5baV7g20hmN8drGImVuEjPnE778GkPHsyyUyrrBqWpZtE8\n",
+       "RS+yaFq1KG524Cc3Z1hELk7dXaCqC1r0/I5Ti6oCvw/pWH1Llb0ibCQj8KoVFTMj15KI/otIBotj\n",
+       "HZxV2KScegJ/JlYaOc1aLPskr25QEaJFE0s7pAU+HcHH691DiOBFGKXKBsyeWYP1Sj7a5HWkBT4O\n",
+       "smYjdOgs8NkIfkP4bKoNsuZZNHfTxQheROYD87tyjkgrBP5JktVcwKL3vJogqOrFLXg+x2mUWhbN\n",
+       "viK7vGK6Pk4t7gamiDABE+m44MgKTKCfxESyqsCHJQWPx2yQNF0SeFV2iaCY+GU9+Cjw0XJaiAni\n",
+       "MSKMA5aLMJak9s4dwPEiDMizz+qQjeBHkGTEpKkl8A+m9o8lpQWYptpROyg6F+uw+QRttMCiCYHv\n",
+       "gnhfREpVd65DKyyaK7BFlhGRE4GNqrq69iGO06Nsw6ab98oIPvzfQAGBD7Ngb8Dyzw8k1NMPVstc\n",
+       "VZaSsmhE+IecGaNxScFsg7cGE/5mI3iwRnQy1SP49diYx9MkHvw5mAUykyDwqqzDBqSbieLzLJp6\n",
+       "EfzYzOO3Aq8Nt6NFcyhJFlHa0klbNKuw2jX9m7jullMkTfIX2Cy3Q0RkhYi8XUQuEpGLAFT1auAx\n",
+       "EVkKXI7NqHOcXkMqV7k3CnwUokICH7gO+BhwXdpDV+3wuePAogBfxPLZ05yJWT1Z1mK1Zboi8Fuo\n",
+       "jOCnAnviQuth8fnvhn2jB/9yYC+WmhgjeLAc/Y+KdFrcHAAR/l2EN+Y8lLVoRpAv8E8Dk0MVyIEk\n",
+       "cxRizaN7wt04yDoZmCrCMKpbNLEuUq9Yx7WuRaOqbyiwz/vq7eM4+5h17B8WDZjAf53MpKMUUdTG\n",
+       "h/+vxOqvEFauuoDQ686wBssc6arAx/9bsAYjbywBLNodiaV6XoEJfCy3gCrLRPh3bKzg/Jzjz8WE\n",
+       "9WeZ7XkRfDWLZhIm1utr1Bvajs1qjfME5tB5kDVm0WwiyR6qGVCEBni+aqflHVtGqwZZHae383Ys\n",
+       "D723kBX432B54UV4AKvMmBeFQyIwh2CTds4KdVhmYLn9PyBnwhJJ5NxViyb+34Jl5WTr9wMWJYuw\n",
+       "AhPeWzGBH0GSEQRhXV0RpgfvG7BBWez15bkQjVo0B1GZQZMlzjqO6d8H0zlNcmx4nhjBF/HhZwN/\n",
+       "FGFgrWJ2XcFLFTjPCUIO+u59fR0pKgRelf9WrbkiVAfB5jgqLXiZx2O1ynlYHZ17sFmrVwGfV+Xf\n",
+       "qghKKwR+C1bXfg+JwFeL4MF8+Kuwxi1aNB25/6psw3of/5A57kSskZos0rF2aqSoRbMKi8r/E/hK\n",
+       "jWuMHvx4bK7A8zLnW4/1EPphK5BtpUqqpAjDwqAy2Nq2B9LaNXQrcIF3nH1DnNRUrYZ9V9mMZcQ8\n",
+       "DPwfllr5J1W+V+OYNdhg9Joa+9RjK0kUvwVLL8yN4AP/io3dLcMi6bQHH7kMK4A2MLXtJOAmrN7M\n",
+       "C6BjkhQUtGhCg78BuEk1mW+QQ/Tgx2MFzA7HIvSN4TzPYu/3ptBwxh5UBeH6rgG+HTYdGf5nyzS3\n",
+       "DLdoHGcfoMoeEbZRe5Z4V4gCfyUWxc8FPlLnmEeB/2rB6ltpHx5qRPCqVqlThD1YBL+ajMCrskSE\n",
+       "e4HXEcYSMIH/Ojbn5sQwEPtKbH3cohYN2HjE7XVeU4zgh2ONyqsIOfKpfdaRzAfqZNEEv/3SsM8p\n",
+       "4f5R4eFx1FjusSt4BO84+46jVJPMjRazGfOoH1ZlhSpvDlP3q6LKRlXe1cXnTQt8jORrWTSR9Zge\n",
+       "zSRn/V7gq8BHRJCQgngC5tvfimXhfA6zTiDfoom1aSoIM3R31rm2ONFpPCbws3PO1U7SG8uzaE7A\n",
+       "Jqa9FAusp2ER/EqSejgtxwXecfYRqTov3cFmLFrslsiwBnFwldT/WhYN0JHKugzL4snr1fwBS2V8\n",
+       "EVblcnXIlb8t3P8B5sfHhTfyIvhsFk1RYgQ/gaRCZVbg11Ep8FmL5u3Ad8Pcg1uwmaqzsFx/t2gc\n",
+       "x2mIzdhqTD2dGtqQRZNhGTAxzyIKGTdfwQZDJxKWyVNljQgfwgZK34iJcFbgh2GNXZ5FU4S4MPt4\n",
+       "zEJ6OOdc60gW/KiwaMLM4QtILJm/YYL/GMnM4wrC5LTRqh119JvCI3jH2T/ZjAlRT9NVga81wPsz\n",
+       "LPPlDapJGWdVvh6ybVZg1keHwIdsnp1YBN7sgPYOTIQ1NJiP0DmtslYEfz62rm8s4XILcDrWG+hY\n",
+       "aEWE14rwcxHuxno972zyejvwCN5x9k/2lcAvhI7aMRtJVlUqwjLy/Xego0571bUpqBT4tJjH1M1m\n",
+       "c823Y1VzY/roYqygW5rVJIOsW4GhIhwJfAZ4IVZaInInlm55P/Z6Dw/bP4FlPH0TuLvemEkRXOAd\n",
+       "Z/9kAfR83r8qfyUsahIE+fgGDr+RrrkKK7CB5d0ZcdwMXcoM2o6Jd+xdfJnOFXQvJdHTrViJhndi\n",
+       "kf0LVZMlIVXZIcKd2OIjQmLRHAR8X7VLi9tU4ALvOPshqvzvvr6GRlFlEWHFpSZZga3FujGzfQvU\n",
+       "zZSpRZyUtho6Gq4KgkUUiR78kcCn0uKe4mXhOk/AVtIagdlILS3U6B684zj7Cyuwgcw8gW82gwaS\n",
+       "SWlFJ4BtxWyio7GJUZ1QZX0YTI4e/CxgWatLFrjAO46zvxAtmqzAb6b5DJo4ULuL4tH1VqyUwuOx\n",
+       "imYN1mJ58LMoXouoMC7wjuPsL6zE8ujzIvimBT6wneIR/DZssPeOAvtuwZYw/P/t3VuIVVUcx/Hv\n",
+       "r9IHMwgJxi4D+uDD+OQQDJFI8yT60oWiFAIfeoju0EMiSPrQgwVBD0EEGViEJUViEGRBRRAkkrdS\n",
+       "KcEBLS8DRSQSKP17WOvk8Xgue2b2OXtm+/vAxj1775mz/LP8u2fv9V9rBMqvi3CCN7O6+J00dUHZ\n",
+       "j2hgagm+UXvQM8E3rVUwhu/gzczay5OHneHaBP8ezPil80Wm9ogGes9x0zBJmjCt9ATvUTRmVien\n",
+       "aEnwEXxfws/9hrw8YgEXSENUD/W6MJskvZAt/RGNE7yZ1ck1Cb4MEVOqKj0FPDyFQqVGVawTvJlZ\n",
+       "Fx+RXrZWJo+6+WwK3zJJWmi89HmDnODNrDYi+LjqNkzDJH14/g5+yWpmVrWz9GlaZ0X0Za3Xaz9I\n",
+       "iohonb/BzOy6lhdEXxDRfijnTHJnoTt4SWskHZf0q6SNbc6PS/pL0oG8bZ5OY8zMrjcR/NMpuc9U\n",
+       "zwQv6UbgTdI0ncuB9ZJG2lz6bUSM5u2VkttpLSSNV92GunAsy+V4zh5F7uDHgBMRMRERl4APSYvb\n",
+       "tvLjl8Ear7oBNTJedQNqZrzqBlhSJMHfSRrX2XA6H2sWwL2SDkn6XNJyzMysUkWGSRZ5C/sjMBwR\n",
+       "FyWtBXZzZYVzMzOrQM9RNJLuAbZGxJr89Sbg34h4tcv3nATujog/mo4NZriOmVnNTHcUTZE7+P3A\n",
+       "MklLSLO1PQasb75A0hBwPiJC0hjpP46r3gp7iKSZ2WD1TPARcVnSs8AXpLmWt0fEMUlP5vNvA48A\n",
+       "T0m6TJp1bV0f22xmZgUMrNDJzMwGayBTFfQqlLLuJE1IOpyLyPblY4skfSnpF0l7Jd1adTtnK0nv\n",
+       "Sjon6UjTsY7xk7Qp99XjklZX0+rZqUMst0o63VTouLbpnGPZhaRhSV9L+lnST5Kez8fL6Z8R0deN\n",
+       "9FjnBLAEmEdaNX2k359bp400jeiilmOvAS/l/Y3AtqrbOVs3YBUwChzpFT9SMd/B3FeX5L57Q9V/\n",
+       "h9mydYjlFuDFNtc6lr3juRhYkfcXkuacHymrfw7iDr5ooZR11/qS+n5gR97fATw42ObMHRHxHfBn\n",
+       "y+FO8XsA2BkRlyJigvQPaGwQ7ZwLOsQS2hc6OpY9RMTZiDiY9y8Ax0h1RqX0z0Ek+CKFUtZdAF9J\n",
+       "2i+psfDAUEQ0lhA7BwxV07Q5q1P87uDq+cTdX4t5Lhc6bm96nOBYTkEeqTgK/EBJ/XMQCd5vcWdu\n",
+       "ZUSMAmuBZyStaj4Z6Xc3x3maCsTPse3uLWApsIK0JurrXa51LNuQtBD4BHghIv5uPjeT/jmIBP8b\n",
+       "MNz09TAVr7gy10TEmfznJGnx4DHgnKTFAJJup/iK75Z0il9rf70rH7MOIuJ8ZMA7XHlk4FgWIGke\n",
+       "Kbm/HxG78+FS+ucgEvz/hVKS5pMKpfYM4HNrQdICSbfk/ZuB1cARUgw35Ms2kKaHsOI6xW8PsE7S\n",
+       "fElLgWXAvgraN2fkBNTwEKl/gmPZkyQB24GjEfFG06lS+mffl+yLDoVS/f7cGhkCPk39gJuADyJi\n",
+       "r6T9wC5JTwATwKPVNXF2k7QTuA+4TdIp4GVgG23iFxFHJe0CjgKXgafznanRNpZbgHFJK0iPCk4C\n",
+       "jSJIx7K3lcDjwGFJB/KxTZTUP13oZGZWU16T1cysppzgzcxqygnezKymnODNzGrKCd7MrKac4M3M\n",
+       "asoJ3sysppzgzcxq6j+vUsbacqJa4gAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7fbb37f207d0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "plot(np.vstack([train_loss, scratch_train_loss]).clip(0, 4).T)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's take a look at the testing accuracy after running 200 iterations. Note that we are running a classification task of 5 classes, thus a chance accuracy is 20%. As we will reasonably expect, the finetuning result will be much better than the one from training from scratch. Let's see."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Accuracy for fine-tuning: 0.570000001788\n",
+      "Accuracy for training from scratch: 0.224000000954\n"
+     ]
+    }
+   ],
+   "source": [
+    "test_iters = 10\n",
+    "accuracy = 0\n",
+    "scratch_accuracy = 0\n",
+    "for it in arange(test_iters):\n",
+    "    solver.test_nets[0].forward()\n",
+    "    accuracy += solver.test_nets[0].blobs['accuracy'].data\n",
+    "    scratch_solver.test_nets[0].forward()\n",
+    "    scratch_accuracy += scratch_solver.test_nets[0].blobs['accuracy'].data\n",
+    "accuracy /= test_iters\n",
+    "scratch_accuracy /= test_iters\n",
+    "print 'Accuracy for fine-tuning:', accuracy\n",
+    "print 'Accuracy for training from scratch:', scratch_accuracy"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Huzzah! So we did finetuning and it is awesome. Let's take a look at what kind of results we are able to get with a longer, more complete run of the style recognition dataset. Note: the below URL might be occassionally down because it is run on a research machine.\n",
+    "\n",
+    "http://demo.vislab.berkeleyvision.org/"
+   ]
+  }
+ ],
+ "metadata": {
+  "description": "Fine-tune the ImageNet-trained CaffeNet on new data.",
+  "example_name": "Fine-tuning for Style Recognition",
+  "include_in_docs": true,
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "python2"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.9"
+  },
+  "priority": 4
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt
new file mode 100644
index 0000000..f29fc7e
--- /dev/null
+++ b/examples/CMakeLists.txt
@@ -0,0 +1,31 @@
+file(GLOB_RECURSE examples_srcs "${PROJECT_SOURCE_DIR}/examples/*.cpp")
+
+foreach(source_file ${examples_srcs})
+  # get file name
+  get_filename_component(name ${source_file} NAME_WE)
+    
+  # get folder name
+  get_filename_component(path ${source_file} PATH)
+  get_filename_component(folder ${path} NAME_WE)
+    
+  add_executable(${name} ${source_file})
+  target_link_libraries(${name} ${Caffe_LINK})
+  caffe_default_properties(${name})
+
+  # set back RUNTIME_OUTPUT_DIRECTORY
+  set_target_properties(${name} PROPERTIES
+    RUNTIME_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/examples/${folder}")
+
+  caffe_set_solution_folder(${name} examples)
+
+  # install
+  install(TARGETS ${name} DESTINATION bin)
+
+  if(UNIX OR APPLE)
+    # Funny command to make tutorials work
+    # TODO: remove in future as soon as naming is standartaized everywhere
+    set(__outname ${PROJECT_BINARY_DIR}/examples/${folder}/${name}${CAffe_POSTFIX})
+    add_custom_command(TARGET ${name} POST_BUILD
+                       COMMAND ln -sf "${__outname}" "${__outname}.bin")
+  endif()
+endforeach()
diff --git a/examples/cifar10/cifar10_full.prototxt b/examples/cifar10/cifar10_full.prototxt
new file mode 100644
index 0000000..c16f7dc
--- /dev/null
+++ b/examples/cifar10/cifar10_full.prototxt
@@ -0,0 +1,154 @@
+name: "CIFAR10_full_deploy"
+# N.B. input image must be in CIFAR-10 format
+# as described at http://www.cs.toronto.edu/~kriz/cifar.html
+input: "data"
+input_dim: 1
+input_dim: 3
+input_dim: 32
+input_dim: 32
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 32
+    pad: 2
+    kernel_size: 5
+    stride: 1
+  }
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "pool1"
+  top: "pool1"
+}
+layer {
+  name: "norm1"
+  type: "LRN"
+  bottom: "pool1"
+  top: "norm1"
+  lrn_param {
+    local_size: 3
+    alpha: 5e-05
+    beta: 0.75
+    norm_region: WITHIN_CHANNEL
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "norm1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 32
+    pad: 2
+    kernel_size: 5
+    stride: 1
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: AVE
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm2"
+  type: "LRN"
+  bottom: "pool2"
+  top: "norm2"
+  lrn_param {
+    local_size: 3
+    alpha: 5e-05
+    beta: 0.75
+    norm_region: WITHIN_CHANNEL
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "norm2"
+  top: "conv3"
+  convolution_param {
+    num_output: 64
+    pad: 2
+    kernel_size: 5
+    stride: 1
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "pool3"
+  type: "Pooling"
+  bottom: "conv3"
+  top: "pool3"
+  pooling_param {
+    pool: AVE
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "ip1"
+  type: "InnerProduct"
+  bottom: "pool3"
+  top: "ip1"
+  param {
+    lr_mult: 1
+    decay_mult: 250
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 10
+  }
+}
+layer {
+  name: "prob"
+  type: "Softmax"
+  bottom: "ip1"
+  top: "prob"
+}
diff --git a/examples/cifar10/cifar10_full_solver.prototxt b/examples/cifar10/cifar10_full_solver.prototxt
new file mode 100644
index 0000000..f30b398
--- /dev/null
+++ b/examples/cifar10/cifar10_full_solver.prototxt
@@ -0,0 +1,26 @@
+# reduce learning rate after 120 epochs (60000 iters) by factor 0f 10
+# then another factor of 10 after 10 more epochs (5000 iters)
+
+# The train/test net protocol buffer definition
+net: "examples/cifar10/cifar10_full_train_test.prototxt"
+# test_iter specifies how many forward passes the test should carry out.
+# In the case of CIFAR10, we have test batch size 100 and 100 test iterations,
+# covering the full 10,000 testing images.
+test_iter: 100
+# Carry out testing every 1000 training iterations.
+test_interval: 1000
+# The base learning rate, momentum and the weight decay of the network.
+base_lr: 0.001
+momentum: 0.9
+weight_decay: 0.004
+# The learning rate policy
+lr_policy: "fixed"
+# Display every 200 iterations
+display: 200
+# The maximum number of iterations
+max_iter: 60000
+# snapshot intermediate results
+snapshot: 10000
+snapshot_prefix: "examples/cifar10/cifar10_full"
+# solver mode: CPU or GPU
+solver_mode: GPU
diff --git a/examples/cifar10/cifar10_full_solver_lr1.prototxt b/examples/cifar10/cifar10_full_solver_lr1.prototxt
new file mode 100644
index 0000000..59bc572
--- /dev/null
+++ b/examples/cifar10/cifar10_full_solver_lr1.prototxt
@@ -0,0 +1,26 @@
+# reduce learning rate after 120 epochs (60000 iters) by factor 0f 10
+# then another factor of 10 after 10 more epochs (5000 iters)
+
+# The train/test net protocol buffer definition
+net: "examples/cifar10/cifar10_full_train_test.prototxt"
+# test_iter specifies how many forward passes the test should carry out.
+# In the case of CIFAR10, we have test batch size 100 and 100 test iterations,
+# covering the full 10,000 testing images.
+test_iter: 100
+# Carry out testing every 1000 training iterations.
+test_interval: 1000
+# The base learning rate, momentum and the weight decay of the network.
+base_lr: 0.0001
+momentum: 0.9
+weight_decay: 0.004
+# The learning rate policy
+lr_policy: "fixed"
+# Display every 200 iterations
+display: 200
+# The maximum number of iterations
+max_iter: 65000
+# snapshot intermediate results
+snapshot: 5000
+snapshot_prefix: "examples/cifar10/cifar10_full"
+# solver mode: CPU or GPU
+solver_mode: GPU
diff --git a/examples/cifar10/cifar10_full_solver_lr2.prototxt b/examples/cifar10/cifar10_full_solver_lr2.prototxt
new file mode 100644
index 0000000..d4ed5d8
--- /dev/null
+++ b/examples/cifar10/cifar10_full_solver_lr2.prototxt
@@ -0,0 +1,26 @@
+# reduce learning rate after 120 epochs (60000 iters) by factor 0f 10
+# then another factor of 10 after 10 more epochs (5000 iters)
+
+# The train/test net protocol buffer definition
+net: "examples/cifar10/cifar10_full_train_test.prototxt"
+# test_iter specifies how many forward passes the test should carry out.
+# In the case of CIFAR10, we have test batch size 100 and 100 test iterations,
+# covering the full 10,000 testing images.
+test_iter: 100
+# Carry out testing every 1000 training iterations.
+test_interval: 1000
+# The base learning rate, momentum and the weight decay of the network.
+base_lr: 0.00001
+momentum: 0.9
+weight_decay: 0.004
+# The learning rate policy
+lr_policy: "fixed"
+# Display every 200 iterations
+display: 200
+# The maximum number of iterations
+max_iter: 70000
+# snapshot intermediate results
+snapshot: 5000
+snapshot_prefix: "examples/cifar10/cifar10_full"
+# solver mode: CPU or GPU
+solver_mode: GPU
diff --git a/examples/cifar10/cifar10_full_train_test.prototxt b/examples/cifar10/cifar10_full_train_test.prototxt
new file mode 100644
index 0000000..d45fc61
--- /dev/null
+++ b/examples/cifar10/cifar10_full_train_test.prototxt
@@ -0,0 +1,220 @@
+name: "CIFAR10_full"
+layer {
+  name: "cifar"
+  type: "Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TRAIN
+  }
+  transform_param {
+    mean_file: "examples/cifar10/mean.binaryproto"
+  }
+  data_param {
+    source: "examples/cifar10/cifar10_train_lmdb"
+    batch_size: 100
+    backend: LMDB
+  }
+}
+layer {
+  name: "cifar"
+  type: "Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TEST
+  }
+  transform_param {
+    mean_file: "examples/cifar10/mean.binaryproto"
+  }
+  data_param {
+    source: "examples/cifar10/cifar10_test_lmdb"
+    batch_size: 100
+    backend: LMDB
+  }
+}
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 32
+    pad: 2
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "gaussian"
+      std: 0.0001
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "pool1"
+  top: "pool1"
+}
+layer {
+  name: "norm1"
+  type: "LRN"
+  bottom: "pool1"
+  top: "norm1"
+  lrn_param {
+    local_size: 3
+    alpha: 5e-05
+    beta: 0.75
+    norm_region: WITHIN_CHANNEL
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "norm1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 32
+    pad: 2
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: AVE
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm2"
+  type: "LRN"
+  bottom: "pool2"
+  top: "norm2"
+  lrn_param {
+    local_size: 3
+    alpha: 5e-05
+    beta: 0.75
+    norm_region: WITHIN_CHANNEL
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "norm2"
+  top: "conv3"
+  convolution_param {
+    num_output: 64
+    pad: 2
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "pool3"
+  type: "Pooling"
+  bottom: "conv3"
+  top: "pool3"
+  pooling_param {
+    pool: AVE
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "ip1"
+  type: "InnerProduct"
+  bottom: "pool3"
+  top: "ip1"
+  param {
+    lr_mult: 1
+    decay_mult: 250
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 10
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "accuracy"
+  type: "Accuracy"
+  bottom: "ip1"
+  bottom: "label"
+  top: "accuracy"
+  include {
+    phase: TEST
+  }
+}
+layer {
+  name: "loss"
+  type: "SoftmaxWithLoss"
+  bottom: "ip1"
+  bottom: "label"
+  top: "loss"
+}
diff --git a/examples/cifar10/cifar10_quick.prototxt b/examples/cifar10/cifar10_quick.prototxt
new file mode 100644
index 0000000..1ad190e
--- /dev/null
+++ b/examples/cifar10/cifar10_quick.prototxt
@@ -0,0 +1,147 @@
+name: "CIFAR10_quick_test"
+input: "data"
+input_dim: 1
+input_dim: 3
+input_dim: 32
+input_dim: 32
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 32
+    pad: 2
+    kernel_size: 5
+    stride: 1
+  }
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "pool1"
+  top: "pool1"
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "pool1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 32
+    pad: 2
+    kernel_size: 5
+    stride: 1
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: AVE
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "pool2"
+  top: "conv3"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 64
+    pad: 2
+    kernel_size: 5
+    stride: 1
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "pool3"
+  type: "Pooling"
+  bottom: "conv3"
+  top: "pool3"
+  pooling_param {
+    pool: AVE
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "ip1"
+  type: "InnerProduct"
+  bottom: "pool3"
+  top: "ip1"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 64
+  }
+}
+layer {
+  name: "ip2"
+  type: "InnerProduct"
+  bottom: "ip1"
+  top: "ip2"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 10
+  }
+}
+layer {
+  name: "prob"
+  type: "Softmax"
+  bottom: "ip2"
+  top: "prob"
+}
diff --git a/examples/cifar10/cifar10_quick_solver.prototxt b/examples/cifar10/cifar10_quick_solver.prototxt
new file mode 100644
index 0000000..14b4401
--- /dev/null
+++ b/examples/cifar10/cifar10_quick_solver.prototxt
@@ -0,0 +1,25 @@
+# reduce the learning rate after 8 epochs (4000 iters) by a factor of 10
+
+# The train/test net protocol buffer definition
+net: "examples/cifar10/cifar10_quick_train_test.prototxt"
+# test_iter specifies how many forward passes the test should carry out.
+# In the case of MNIST, we have test batch size 100 and 100 test iterations,
+# covering the full 10,000 testing images.
+test_iter: 100
+# Carry out testing every 500 training iterations.
+test_interval: 500
+# The base learning rate, momentum and the weight decay of the network.
+base_lr: 0.001
+momentum: 0.9
+weight_decay: 0.004
+# The learning rate policy
+lr_policy: "fixed"
+# Display every 100 iterations
+display: 100
+# The maximum number of iterations
+max_iter: 4000
+# snapshot intermediate results
+snapshot: 4000
+snapshot_prefix: "examples/cifar10/cifar10_quick"
+# solver mode: CPU or GPU
+solver_mode: GPU
diff --git a/examples/cifar10/cifar10_quick_solver_lr1.prototxt b/examples/cifar10/cifar10_quick_solver_lr1.prototxt
new file mode 100644
index 0000000..d3af70c
--- /dev/null
+++ b/examples/cifar10/cifar10_quick_solver_lr1.prototxt
@@ -0,0 +1,25 @@
+# reduce the learning rate after 8 epochs (4000 iters) by a factor of 10
+
+# The train/test net protocol buffer definition
+net: "examples/cifar10/cifar10_quick_train_test.prototxt"
+# test_iter specifies how many forward passes the test should carry out.
+# In the case of MNIST, we have test batch size 100 and 100 test iterations,
+# covering the full 10,000 testing images.
+test_iter: 100
+# Carry out testing every 500 training iterations.
+test_interval: 500
+# The base learning rate, momentum and the weight decay of the network.
+base_lr: 0.0001
+momentum: 0.9
+weight_decay: 0.004
+# The learning rate policy
+lr_policy: "fixed"
+# Display every 100 iterations
+display: 100
+# The maximum number of iterations
+max_iter: 5000
+# snapshot intermediate results
+snapshot: 5000
+snapshot_prefix: "examples/cifar10/cifar10_quick"
+# solver mode: CPU or GPU
+solver_mode: GPU
diff --git a/examples/cifar10/cifar10_quick_train_test.prototxt b/examples/cifar10/cifar10_quick_train_test.prototxt
new file mode 100644
index 0000000..2317739
--- /dev/null
+++ b/examples/cifar10/cifar10_quick_train_test.prototxt
@@ -0,0 +1,222 @@
+name: "CIFAR10_quick"
+layer {
+  name: "cifar"
+  type: "Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TRAIN
+  }
+  transform_param {
+    mean_file: "examples/cifar10/mean.binaryproto"
+  }
+  data_param {
+    source: "examples/cifar10/cifar10_train_lmdb"
+    batch_size: 100
+    backend: LMDB
+  }
+}
+layer {
+  name: "cifar"
+  type: "Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TEST
+  }
+  transform_param {
+    mean_file: "examples/cifar10/mean.binaryproto"
+  }
+  data_param {
+    source: "examples/cifar10/cifar10_test_lmdb"
+    batch_size: 100
+    backend: LMDB
+  }
+}
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 32
+    pad: 2
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "gaussian"
+      std: 0.0001
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "pool1"
+  top: "pool1"
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "pool1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 32
+    pad: 2
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: AVE
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "pool2"
+  top: "conv3"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 64
+    pad: 2
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "pool3"
+  type: "Pooling"
+  bottom: "conv3"
+  top: "pool3"
+  pooling_param {
+    pool: AVE
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "ip1"
+  type: "InnerProduct"
+  bottom: "pool3"
+  top: "ip1"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 64
+    weight_filler {
+      type: "gaussian"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "ip2"
+  type: "InnerProduct"
+  bottom: "ip1"
+  top: "ip2"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 10
+    weight_filler {
+      type: "gaussian"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "accuracy"
+  type: "Accuracy"
+  bottom: "ip2"
+  bottom: "label"
+  top: "accuracy"
+  include {
+    phase: TEST
+  }
+}
+layer {
+  name: "loss"
+  type: "SoftmaxWithLoss"
+  bottom: "ip2"
+  bottom: "label"
+  top: "loss"
+}
diff --git a/examples/cifar10/convert_cifar_data.cpp b/examples/cifar10/convert_cifar_data.cpp
new file mode 100644
index 0000000..f4c42e4
--- /dev/null
+++ b/examples/cifar10/convert_cifar_data.cpp
@@ -0,0 +1,109 @@
+//
+// This script converts the CIFAR dataset to the leveldb format used
+// by caffe to perform classification.
+// Usage:
+//    convert_cifar_data input_folder output_db_file
+// The CIFAR dataset could be downloaded at
+//    http://www.cs.toronto.edu/~kriz/cifar.html
+
+#include <fstream>  // NOLINT(readability/streams)
+#include <string>
+
+#include "boost/scoped_ptr.hpp"
+#include "glog/logging.h"
+#include "google/protobuf/text_format.h"
+#include "stdint.h"
+
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/db.hpp"
+
+using caffe::Datum;
+using boost::scoped_ptr;
+using std::string;
+namespace db = caffe::db;
+
+const int kCIFARSize = 32;
+const int kCIFARImageNBytes = 3072;
+const int kCIFARBatchSize = 10000;
+const int kCIFARTrainBatches = 5;
+
+void read_image(std::ifstream* file, int* label, char* buffer) {
+  char label_char;
+  file->read(&label_char, 1);
+  *label = label_char;
+  file->read(buffer, kCIFARImageNBytes);
+  return;
+}
+
+void convert_dataset(const string& input_folder, const string& output_folder,
+    const string& db_type) {
+  scoped_ptr<db::DB> train_db(db::GetDB(db_type));
+  train_db->Open(output_folder + "/cifar10_train_" + db_type, db::NEW);
+  scoped_ptr<db::Transaction> txn(train_db->NewTransaction());
+  // Data buffer
+  int label;
+  char str_buffer[kCIFARImageNBytes];
+  Datum datum;
+  datum.set_channels(3);
+  datum.set_height(kCIFARSize);
+  datum.set_width(kCIFARSize);
+
+  LOG(INFO) << "Writing Training data";
+  for (int fileid = 0; fileid < kCIFARTrainBatches; ++fileid) {
+    // Open files
+    LOG(INFO) << "Training Batch " << fileid + 1;
+    snprintf(str_buffer, kCIFARImageNBytes, "/data_batch_%d.bin", fileid + 1);
+    std::ifstream data_file((input_folder + str_buffer).c_str(),
+        std::ios::in | std::ios::binary);
+    CHECK(data_file) << "Unable to open train file #" << fileid + 1;
+    for (int itemid = 0; itemid < kCIFARBatchSize; ++itemid) {
+      read_image(&data_file, &label, str_buffer);
+      datum.set_label(label);
+      datum.set_data(str_buffer, kCIFARImageNBytes);
+      int length = snprintf(str_buffer, kCIFARImageNBytes, "%05d",
+          fileid * kCIFARBatchSize + itemid);
+      string out;
+      CHECK(datum.SerializeToString(&out));
+      txn->Put(string(str_buffer, length), out);
+    }
+  }
+  txn->Commit();
+  train_db->Close();
+
+  LOG(INFO) << "Writing Testing data";
+  scoped_ptr<db::DB> test_db(db::GetDB(db_type));
+  test_db->Open(output_folder + "/cifar10_test_" + db_type, db::NEW);
+  txn.reset(test_db->NewTransaction());
+  // Open files
+  std::ifstream data_file((input_folder + "/test_batch.bin").c_str(),
+      std::ios::in | std::ios::binary);
+  CHECK(data_file) << "Unable to open test file.";
+  for (int itemid = 0; itemid < kCIFARBatchSize; ++itemid) {
+    read_image(&data_file, &label, str_buffer);
+    datum.set_label(label);
+    datum.set_data(str_buffer, kCIFARImageNBytes);
+    int length = snprintf(str_buffer, kCIFARImageNBytes, "%05d", itemid);
+    string out;
+    CHECK(datum.SerializeToString(&out));
+    txn->Put(string(str_buffer, length), out);
+  }
+  txn->Commit();
+  test_db->Close();
+}
+
+int main(int argc, char** argv) {
+  if (argc != 4) {
+    printf("This script converts the CIFAR dataset to the leveldb format used\n"
+           "by caffe to perform classification.\n"
+           "Usage:\n"
+           "    convert_cifar_data input_folder output_folder db_type\n"
+           "Where the input folder should contain the binary batch files.\n"
+           "The CIFAR dataset could be downloaded at\n"
+           "    http://www.cs.toronto.edu/~kriz/cifar.html\n"
+           "You should gunzip them after downloading.\n");
+  } else {
+    google::InitGoogleLogging(argv[0]);
+    convert_dataset(string(argv[1]), string(argv[2]), string(argv[3]));
+  }
+  return 0;
+}
diff --git a/examples/cifar10/create_cifar10.sh b/examples/cifar10/create_cifar10.sh
new file mode 100755
index 0000000..a42725c
--- /dev/null
+++ b/examples/cifar10/create_cifar10.sh
@@ -0,0 +1,19 @@
+#!/usr/bin/env sh
+# This script converts the cifar data into leveldb format.
+
+EXAMPLE=examples/cifar10
+DATA=data/cifar10
+DBTYPE=lmdb
+
+echo "Creating $DBTYPE..."
+
+rm -rf $EXAMPLE/cifar10_train_$DBTYPE $EXAMPLE/cifar10_test_$DBTYPE
+
+./build/examples/cifar10/convert_cifar_data.bin $DATA $EXAMPLE $DBTYPE
+
+echo "Computing image mean..."
+
+./build/tools/compute_image_mean -backend=$DBTYPE \
+  $EXAMPLE/cifar10_train_$DBTYPE $EXAMPLE/mean.binaryproto
+
+echo "Done."
diff --git a/examples/cifar10/readme.md b/examples/cifar10/readme.md
new file mode 100644
index 0000000..4a95cee
--- /dev/null
+++ b/examples/cifar10/readme.md
@@ -0,0 +1,98 @@
+---
+title: CIFAR-10 tutorial
+category: example
+description: Train and test Caffe on CIFAR-10 data.
+include_in_docs: true
+priority: 5
+---
+
+Alex's CIFAR-10 tutorial, Caffe style
+=====================================
+
+Alex Krizhevsky's [cuda-convnet](https://code.google.com/p/cuda-convnet/) details the model definitions, parameters, and training procedure for good performance on CIFAR-10. This example reproduces his results in Caffe.
+
+We will assume that you have Caffe successfully compiled. If not, please refer to the [Installation page](/installation.html). In this tutorial, we will assume that your caffe installation is located at `CAFFE_ROOT`.
+
+We thank @chyojn for the pull request that defined the model schemas and solver configurations.
+
+*This example is a work-in-progress. It would be nice to further explain details of the network and training choices and benchmark the full training.*
+
+Prepare the Dataset
+-------------------
+
+You will first need to download and convert the data format from the [CIFAR-10 website](http://www.cs.toronto.edu/~kriz/cifar.html). To do this, simply run the following commands:
+
+    cd $CAFFE_ROOT/data/cifar10
+    ./get_cifar10.sh
+    cd $CAFFE_ROOT
+    ./examples/cifar10/create_cifar10.sh
+
+If it complains that `wget` or `gunzip` are not installed, you need to install them respectively. After running the script there should be the dataset, `./cifar10-leveldb`, and the data set image mean `./mean.binaryproto`.
+
+The Model
+---------
+
+The CIFAR-10 model is a CNN that composes layers of convolution, pooling, rectified linear unit (ReLU) nonlinearities, and local contrast normalization with a linear classifier on top of it all. We have defined the model in the `CAFFE_ROOT/examples/cifar10` directory's `cifar10_quick_train_test.prototxt`.
+
+Training and Testing the "Quick" Model
+--------------------------------------
+
+Training the model is simple after you have written the network definition protobuf and solver protobuf files (refer to [MNIST Tutorial](../examples/mnist.html)). Simply run `train_quick.sh`, or the following command directly:
+
+    cd $CAFFE_ROOT
+    ./examples/cifar10/train_quick.sh
+
+`train_quick.sh` is a simple script, so have a look inside. The main tool for training is `caffe` with the `train` action, and the solver protobuf text file as its argument.
+
+When you run the code, you will see a lot of messages flying by like this:
+
+    I0317 21:52:48.945710 2008298256 net.cpp:74] Creating Layer conv1
+    I0317 21:52:48.945716 2008298256 net.cpp:84] conv1 <- data
+    I0317 21:52:48.945725 2008298256 net.cpp:110] conv1 -> conv1
+    I0317 21:52:49.298691 2008298256 net.cpp:125] Top shape: 100 32 32 32 (3276800)
+    I0317 21:52:49.298719 2008298256 net.cpp:151] conv1 needs backward computation.
+
+These messages tell you the details about each layer, its connections and its output shape, which may be helpful in debugging. After the initialization, the training will start:
+
+    I0317 21:52:49.309370 2008298256 net.cpp:166] Network initialization done.
+    I0317 21:52:49.309376 2008298256 net.cpp:167] Memory required for Data 23790808
+    I0317 21:52:49.309422 2008298256 solver.cpp:36] Solver scaffolding done.
+    I0317 21:52:49.309447 2008298256 solver.cpp:47] Solving CIFAR10_quick_train
+
+Based on the solver setting, we will print the training loss function every 100 iterations, and test the network every 500 iterations. You will see messages like this:
+
+    I0317 21:53:12.179772 2008298256 solver.cpp:208] Iteration 100, lr = 0.001
+    I0317 21:53:12.185698 2008298256 solver.cpp:65] Iteration 100, loss = 1.73643
+    ...
+    I0317 21:54:41.150030 2008298256 solver.cpp:87] Iteration 500, Testing net
+    I0317 21:54:47.129461 2008298256 solver.cpp:114] Test score #0: 0.5504
+    I0317 21:54:47.129500 2008298256 solver.cpp:114] Test score #1: 1.27805
+
+For each training iteration, `lr` is the learning rate of that iteration, and `loss` is the training function. For the output of the testing phase, **score 0 is the accuracy**, and **score 1 is the testing loss function**.
+
+And after making yourself a cup of coffee, you are done!
+
+    I0317 22:12:19.666914 2008298256 solver.cpp:87] Iteration 5000, Testing net
+    I0317 22:12:25.580330 2008298256 solver.cpp:114] Test score #0: 0.7533
+    I0317 22:12:25.580379 2008298256 solver.cpp:114] Test score #1: 0.739837
+    I0317 22:12:25.587262 2008298256 solver.cpp:130] Snapshotting to cifar10_quick_iter_5000
+    I0317 22:12:25.590215 2008298256 solver.cpp:137] Snapshotting solver state to cifar10_quick_iter_5000.solverstate
+    I0317 22:12:25.592813 2008298256 solver.cpp:81] Optimization Done.
+
+Our model achieved ~75% test accuracy. The model parameters are stored in binary protobuf format in
+
+    cifar10_quick_iter_5000
+
+which is ready-to-deploy in CPU or GPU mode! Refer to the `CAFFE_ROOT/examples/cifar10/cifar10_quick.prototxt` for the deployment model definition that can be called on new data.
+
+Why train on a GPU?
+-------------------
+
+CIFAR-10, while still small, has enough data to make GPU training attractive.
+
+To compare CPU vs. GPU training speed, simply change one line in all the `cifar*solver.prototxt`:
+
+    # solver mode: CPU or GPU
+    solver_mode: CPU
+
+and you will be using CPU for training.
diff --git a/examples/cifar10/train_full.sh b/examples/cifar10/train_full.sh
new file mode 100755
index 0000000..4285a5d
--- /dev/null
+++ b/examples/cifar10/train_full.sh
@@ -0,0 +1,16 @@
+#!/usr/bin/env sh
+
+TOOLS=./build/tools
+
+$TOOLS/caffe train \
+    --solver=examples/cifar10/cifar10_full_solver.prototxt
+
+# reduce learning rate by factor of 10
+$TOOLS/caffe train \
+    --solver=examples/cifar10/cifar10_full_solver_lr1.prototxt \
+    --snapshot=examples/cifar10/cifar10_full_iter_60000.solverstate
+
+# reduce learning rate by factor of 10
+$TOOLS/caffe train \
+    --solver=examples/cifar10/cifar10_full_solver_lr2.prototxt \
+    --snapshot=examples/cifar10/cifar10_full_iter_65000.solverstate
diff --git a/examples/cifar10/train_quick.sh b/examples/cifar10/train_quick.sh
new file mode 100755
index 0000000..2830c40
--- /dev/null
+++ b/examples/cifar10/train_quick.sh
@@ -0,0 +1,11 @@
+#!/usr/bin/env sh
+
+TOOLS=./build/tools
+
+$TOOLS/caffe train \
+  --solver=examples/cifar10/cifar10_quick_solver.prototxt
+
+# reduce learning rate by factor of 10 after 8 epochs
+$TOOLS/caffe train \
+  --solver=examples/cifar10/cifar10_quick_solver_lr1.prototxt \
+  --snapshot=examples/cifar10/cifar10_quick_iter_4000.solverstate
diff --git a/examples/cpp_classification/classification.cpp b/examples/cpp_classification/classification.cpp
new file mode 100644
index 0000000..1c6371e
--- /dev/null
+++ b/examples/cpp_classification/classification.cpp
@@ -0,0 +1,255 @@
+#include <caffe/caffe.hpp>
+#include <opencv2/core/core.hpp>
+#include <opencv2/highgui/highgui.hpp>
+#include <opencv2/imgproc/imgproc.hpp>
+#include <iosfwd>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+using namespace caffe;  // NOLINT(build/namespaces)
+using std::string;
+
+/* Pair (label, confidence) representing a prediction. */
+typedef std::pair<string, float> Prediction;
+
+class Classifier {
+ public:
+  Classifier(const string& model_file,
+             const string& trained_file,
+             const string& mean_file,
+             const string& label_file);
+
+  std::vector<Prediction> Classify(const cv::Mat& img, int N = 5);
+
+ private:
+  void SetMean(const string& mean_file);
+
+  std::vector<float> Predict(const cv::Mat& img);
+
+  void WrapInputLayer(std::vector<cv::Mat>* input_channels);
+
+  void Preprocess(const cv::Mat& img,
+                  std::vector<cv::Mat>* input_channels);
+
+ private:
+  shared_ptr<Net<float> > net_;
+  cv::Size input_geometry_;
+  int num_channels_;
+  cv::Mat mean_;
+  std::vector<string> labels_;
+};
+
+Classifier::Classifier(const string& model_file,
+                       const string& trained_file,
+                       const string& mean_file,
+                       const string& label_file) {
+#ifdef CPU_ONLY
+  Caffe::set_mode(Caffe::CPU);
+#else
+  Caffe::set_mode(Caffe::GPU);
+#endif
+
+  /* Load the network. */
+  net_.reset(new Net<float>(model_file, TEST));
+  net_->CopyTrainedLayersFrom(trained_file);
+
+  CHECK_EQ(net_->num_inputs(), 1) << "Network should have exactly one input.";
+  CHECK_EQ(net_->num_outputs(), 1) << "Network should have exactly one output.";
+
+  Blob<float>* input_layer = net_->input_blobs()[0];
+  num_channels_ = input_layer->channels();
+  CHECK(num_channels_ == 3 || num_channels_ == 1)
+    << "Input layer should have 1 or 3 channels.";
+  input_geometry_ = cv::Size(input_layer->width(), input_layer->height());
+
+  /* Load the binaryproto mean file. */
+  SetMean(mean_file);
+
+  /* Load labels. */
+  std::ifstream labels(label_file.c_str());
+  CHECK(labels) << "Unable to open labels file " << label_file;
+  string line;
+  while (std::getline(labels, line))
+    labels_.push_back(string(line));
+
+  Blob<float>* output_layer = net_->output_blobs()[0];
+  CHECK_EQ(labels_.size(), output_layer->channels())
+    << "Number of labels is different from the output layer dimension.";
+}
+
+static bool PairCompare(const std::pair<float, int>& lhs,
+                        const std::pair<float, int>& rhs) {
+  return lhs.first > rhs.first;
+}
+
+/* Return the indices of the top N values of vector v. */
+static std::vector<int> Argmax(const std::vector<float>& v, int N) {
+  std::vector<std::pair<float, int> > pairs;
+  for (size_t i = 0; i < v.size(); ++i)
+    pairs.push_back(std::make_pair(v[i], i));
+  std::partial_sort(pairs.begin(), pairs.begin() + N, pairs.end(), PairCompare);
+
+  std::vector<int> result;
+  for (int i = 0; i < N; ++i)
+    result.push_back(pairs[i].second);
+  return result;
+}
+
+/* Return the top N predictions. */
+std::vector<Prediction> Classifier::Classify(const cv::Mat& img, int N) {
+  std::vector<float> output = Predict(img);
+
+  std::vector<int> maxN = Argmax(output, N);
+  std::vector<Prediction> predictions;
+  for (int i = 0; i < N; ++i) {
+    int idx = maxN[i];
+    predictions.push_back(std::make_pair(labels_[idx], output[idx]));
+  }
+
+  return predictions;
+}
+
+/* Load the mean file in binaryproto format. */
+void Classifier::SetMean(const string& mean_file) {
+  BlobProto blob_proto;
+  ReadProtoFromBinaryFileOrDie(mean_file.c_str(), &blob_proto);
+
+  /* Convert from BlobProto to Blob<float> */
+  Blob<float> mean_blob;
+  mean_blob.FromProto(blob_proto);
+  CHECK_EQ(mean_blob.channels(), num_channels_)
+    << "Number of channels of mean file doesn't match input layer.";
+
+  /* The format of the mean file is planar 32-bit float BGR or grayscale. */
+  std::vector<cv::Mat> channels;
+  float* data = mean_blob.mutable_cpu_data();
+  for (int i = 0; i < num_channels_; ++i) {
+    /* Extract an individual channel. */
+    cv::Mat channel(mean_blob.height(), mean_blob.width(), CV_32FC1, data);
+    channels.push_back(channel);
+    data += mean_blob.height() * mean_blob.width();
+  }
+
+  /* Merge the separate channels into a single image. */
+  cv::Mat mean;
+  cv::merge(channels, mean);
+
+  /* Compute the global mean pixel value and create a mean image
+   * filled with this value. */
+  cv::Scalar channel_mean = cv::mean(mean);
+  mean_ = cv::Mat(input_geometry_, mean.type(), channel_mean);
+}
+
+std::vector<float> Classifier::Predict(const cv::Mat& img) {
+  Blob<float>* input_layer = net_->input_blobs()[0];
+  input_layer->Reshape(1, num_channels_,
+                       input_geometry_.height, input_geometry_.width);
+  /* Forward dimension change to all layers. */
+  net_->Reshape();
+
+  std::vector<cv::Mat> input_channels;
+  WrapInputLayer(&input_channels);
+
+  Preprocess(img, &input_channels);
+
+  net_->ForwardPrefilled();
+
+  /* Copy the output layer to a std::vector */
+  Blob<float>* output_layer = net_->output_blobs()[0];
+  const float* begin = output_layer->cpu_data();
+  const float* end = begin + output_layer->channels();
+  return std::vector<float>(begin, end);
+}
+
+/* Wrap the input layer of the network in separate cv::Mat objects
+ * (one per channel). This way we save one memcpy operation and we
+ * don't need to rely on cudaMemcpy2D. The last preprocessing
+ * operation will write the separate channels directly to the input
+ * layer. */
+void Classifier::WrapInputLayer(std::vector<cv::Mat>* input_channels) {
+  Blob<float>* input_layer = net_->input_blobs()[0];
+
+  int width = input_layer->width();
+  int height = input_layer->height();
+  float* input_data = input_layer->mutable_cpu_data();
+  for (int i = 0; i < input_layer->channels(); ++i) {
+    cv::Mat channel(height, width, CV_32FC1, input_data);
+    input_channels->push_back(channel);
+    input_data += width * height;
+  }
+}
+
+void Classifier::Preprocess(const cv::Mat& img,
+                            std::vector<cv::Mat>* input_channels) {
+  /* Convert the input image to the input image format of the network. */
+  cv::Mat sample;
+  if (img.channels() == 3 && num_channels_ == 1)
+    cv::cvtColor(img, sample, CV_BGR2GRAY);
+  else if (img.channels() == 4 && num_channels_ == 1)
+    cv::cvtColor(img, sample, CV_BGRA2GRAY);
+  else if (img.channels() == 4 && num_channels_ == 3)
+    cv::cvtColor(img, sample, CV_BGRA2BGR);
+  else if (img.channels() == 1 && num_channels_ == 3)
+    cv::cvtColor(img, sample, CV_GRAY2BGR);
+  else
+    sample = img;
+
+  cv::Mat sample_resized;
+  if (sample.size() != input_geometry_)
+    cv::resize(sample, sample_resized, input_geometry_);
+  else
+    sample_resized = sample;
+
+  cv::Mat sample_float;
+  if (num_channels_ == 3)
+    sample_resized.convertTo(sample_float, CV_32FC3);
+  else
+    sample_resized.convertTo(sample_float, CV_32FC1);
+
+  cv::Mat sample_normalized;
+  cv::subtract(sample_float, mean_, sample_normalized);
+
+  /* This operation will write the separate BGR planes directly to the
+   * input layer of the network because it is wrapped by the cv::Mat
+   * objects in input_channels. */
+  cv::split(sample_normalized, *input_channels);
+
+  CHECK(reinterpret_cast<float*>(input_channels->at(0).data)
+        == net_->input_blobs()[0]->cpu_data())
+    << "Input channels are not wrapping the input layer of the network.";
+}
+
+int main(int argc, char** argv) {
+  if (argc != 6) {
+    std::cerr << "Usage: " << argv[0]
+              << " deploy.prototxt network.caffemodel"
+              << " mean.binaryproto labels.txt img.jpg" << std::endl;
+    return 1;
+  }
+
+  ::google::InitGoogleLogging(argv[0]);
+
+  string model_file   = argv[1];
+  string trained_file = argv[2];
+  string mean_file    = argv[3];
+  string label_file   = argv[4];
+  Classifier classifier(model_file, trained_file, mean_file, label_file);
+
+  string file = argv[5];
+
+  std::cout << "---------- Prediction for "
+            << file << " ----------" << std::endl;
+
+  cv::Mat img = cv::imread(file, -1);
+  CHECK(!img.empty()) << "Unable to decode image " << file;
+  std::vector<Prediction> predictions = classifier.Classify(img);
+
+  /* Print the top N predictions. */
+  for (size_t i = 0; i < predictions.size(); ++i) {
+    Prediction p = predictions[i];
+    std::cout << std::fixed << std::setprecision(4) << p.second << " - \""
+              << p.first << "\"" << std::endl;
+  }
+}
diff --git a/examples/cpp_classification/readme.md b/examples/cpp_classification/readme.md
new file mode 100644
index 0000000..a086db1
--- /dev/null
+++ b/examples/cpp_classification/readme.md
@@ -0,0 +1,77 @@
+---
+title: CaffeNet C++ Classification example
+description: A simple example performing image classification using the low-level C++ API.
+category: example
+include_in_docs: true
+priority: 10
+---
+
+# Classifying ImageNet: using the C++ API
+
+Caffe, at its core, is written in C++. It is possible to use the C++
+API of Caffe to implement an image classification application similar
+to the Python code presented in one of the Notebook example. To look
+at a more general-purpose example of the Caffe C++ API, you should
+study the source code of the command line tool `caffe` in `tools/caffe.cpp`.
+
+## Presentation
+
+A simple C++ code is proposed in
+`examples/cpp_classification/classification.cpp`. For the sake of
+simplicity, this example does not support oversampling of a single
+sample nor batching of multiple independant samples. This example is
+not trying to reach the maximum possible classification throughput on
+a system, but special care was given to avoid unnecessary
+pessimization while keeping the code readable.
+
+## Compiling
+
+The C++ example is built automatically when compiling Caffe. To
+compile Caffe you should follow the documented instructions. The
+classification example will be built as `examples/classification.bin`
+in your build directory.
+
+## Usage
+
+To use the pre-trained CaffeNet model with the classification example,
+you need to download it from the "Model Zoo" using the following
+script:
+```
+./scripts/download_model_binary.py models/bvlc_reference_caffenet
+```
+The ImageNet labels file (also called the *synset file*) is also
+required in order to map a prediction to the name of the class:
+```
+./data/ilsvrc12/get_ilsvrc_aux.sh.
+```
+Using the files that were downloaded, we can classify the provided cat
+image (`examples/images/cat.jpg`) using this command:
+```
+./build/examples/cpp_classification/classification.bin \
+  models/bvlc_reference_caffenet/deploy.prototxt \
+  models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel \
+  data/ilsvrc12/imagenet_mean.binaryproto \
+  data/ilsvrc12/synset_words.txt \
+  examples/images/cat.jpg
+```
+The output should look like this:
+```
+---------- Prediction for examples/images/cat.jpg ----------
+0.3134 - "n02123045 tabby, tabby cat"
+0.2380 - "n02123159 tiger cat"
+0.1235 - "n02124075 Egyptian cat"
+0.1003 - "n02119022 red fox, Vulpes vulpes"
+0.0715 - "n02127052 lynx, catamount"
+```
+
+## Improving Performance
+
+To further improve performance, you will need to leverage the GPU
+more, here are some guidelines:
+
+* Move the data on the GPU early and perform all preprocessing
+operations there.
+* If you have many images to classify simultaneously, you should use
+batching (independent images are classified in a single forward pass).
+* Use multiple classification threads to ensure the GPU is always fully
+utilized and not waiting for an I/O blocked CPU thread.
diff --git a/examples/detection.ipynb b/examples/detection.ipynb
new file mode 100644
index 0000000..6a03c99
--- /dev/null
+++ b/examples/detection.ipynb
@@ -0,0 +1,8392 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "[R-CNN](https://github.com/rbgirshick/rcnn) is a state-of-the-art detector that classifies region proposals by a finetuned Caffe model. For the full details of the R-CNN system and model, refer to its project site and the paper:\n",
+    "\n",
+    "> *Rich feature hierarchies for accurate object detection and semantic segmentation*. Ross Girshick, Jeff Donahue, Trevor Darrell, Jitendra Malik. CVPR 2014. [Arxiv 2013](http://arxiv.org/abs/1311.2524).\n",
+    "\n",
+    "In this example, we do detection by a pure Caffe edition of the R-CNN model for ImageNet. The R-CNN detector outputs class scores for the 200 detection classes of ILSVRC13. Keep in mind that these are raw one vs. all SVM scores, so they are not probabilistically calibrated or exactly comparable across classes. Note that this off-the-shelf model is simply for convenience, and is not the full R-CNN model.\n",
+    "\n",
+    "Let's run detection on an image of a bicyclist riding a fish bike in the desert (from the ImageNet challenge—no joke).\n",
+    "\n",
+    "First, we'll need region proposals and the Caffe R-CNN ImageNet model:\n",
+    "\n",
+    "- [Selective Search](http://koen.me/research/selectivesearch/) is the region proposer used by R-CNN. The [selective_search_ijcv_with_python](https://github.com/sergeyk/selective_search_ijcv_with_python) Python module takes care of extracting proposals through the selective search MATLAB implementation. To install it, download the module and name its directory `selective_search_ijcv_with_python`, run the demo in MATLAB to compile the necessary functions, then add it to your `PYTHONPA [...]
+    "\n",
+    "-Run `./scripts/download_model_binary.py models/bvlc_reference_rcnn_ilsvrc13` to get the Caffe R-CNN ImageNet model.\n",
+    "\n",
+    "With that done, we'll call the bundled `detect.py` to generate the region proposals and run the network. For an explanation of the arguments, do `./detect.py --help`."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "WARNING: Logging before InitGoogleLogging() is written to STDERR\n",
+      "I0218 20:43:25.383932 2099749632 net.cpp:42] Initializing net from parameters: \n",
+      "name: \"R-CNN-ilsvrc13\"\n",
+      "input: \"data\"\n",
+      "input_dim: 10\n",
+      "input_dim: 3\n",
+      "input_dim: 227\n",
+      "input_dim: 227\n",
+      "state {\n",
+      "  phase: TEST\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"conv1\"\n",
+      "  type: \"Convolution\"\n",
+      "  bottom: \"data\"\n",
+      "  top: \"conv1\"\n",
+      "  convolution_param {\n",
+      "    num_output: 96\n",
+      "    kernel_size: 11\n",
+      "    stride: 4\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"relu1\"\n",
+      "  type: \"ReLU\"\n",
+      "  bottom: \"conv1\"\n",
+      "  top: \"conv1\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"pool1\"\n",
+      "  type: \"Pooling\"\n",
+      "  bottom: \"conv1\"\n",
+      "  top: \"pool1\"\n",
+      "  pooling_param {\n",
+      "    pool: MAX\n",
+      "    kernel_size: 3\n",
+      "    stride: 2\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"norm1\"\n",
+      "  type: \"LRN\"\n",
+      "  bottom: \"pool1\"\n",
+      "  top: \"norm1\"\n",
+      "  lrn_param {\n",
+      "    local_size: 5\n",
+      "    alpha: 0.0001\n",
+      "    beta: 0.75\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"conv2\"\n",
+      "  type: \"Convolution\"\n",
+      "  bottom: \"norm1\"\n",
+      "  top: \"conv2\"\n",
+      "  convolution_param {\n",
+      "    num_output: 256\n",
+      "    pad: 2\n",
+      "    kernel_size: 5\n",
+      "    group: 2\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"relu2\"\n",
+      "  type: \"ReLU\"\n",
+      "  bottom: \"conv2\"\n",
+      "  top: \"conv2\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"pool2\"\n",
+      "  type: \"Pooling\"\n",
+      "  bottom: \"conv2\"\n",
+      "  top: \"pool2\"\n",
+      "  pooling_param {\n",
+      "    pool: MAX\n",
+      "    kernel_size: 3\n",
+      "    stride: 2\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"norm2\"\n",
+      "  type: \"LRN\"\n",
+      "  bottom: \"pool2\"\n",
+      "  top: \"norm2\"\n",
+      "  lrn_param {\n",
+      "    local_size: 5\n",
+      "    alpha: 0.0001\n",
+      "    beta: 0.75\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"conv3\"\n",
+      "  type: \"Convolution\"\n",
+      "  bottom: \"norm2\"\n",
+      "  top: \"conv3\"\n",
+      "  convolution_param {\n",
+      "    num_output: 384\n",
+      "    pad: 1\n",
+      "    kernel_size: 3\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"relu3\"\n",
+      "  type: \"ReLU\"\n",
+      "  bottom: \"conv3\"\n",
+      "  top: \"conv3\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"conv4\"\n",
+      "  type: \"Convolution\"\n",
+      "  bottom: \"conv3\"\n",
+      "  top: \"conv4\"\n",
+      "  convolution_param {\n",
+      "    num_output: 384\n",
+      "    pad: 1\n",
+      "    kernel_size: 3\n",
+      "    group: 2\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"relu4\"\n",
+      "  type: \"ReLU\"\n",
+      "  bottom: \"conv4\"\n",
+      "  top: \"conv4\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"conv5\"\n",
+      "  type: \"Convolution\"\n",
+      "  bottom: \"conv4\"\n",
+      "  top: \"conv5\"\n",
+      "  convolution_param {\n",
+      "    num_output: 256\n",
+      "    pad: 1\n",
+      "    kernel_size: 3\n",
+      "    group: 2\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"relu5\"\n",
+      "  type: \"ReLU\"\n",
+      "  bottom: \"conv5\"\n",
+      "  top: \"conv5\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"pool5\"\n",
+      "  type: \"Pooling\"\n",
+      "  bottom: \"conv5\"\n",
+      "  top: \"pool5\"\n",
+      "  pooling_param {\n",
+      "    pool: MAX\n",
+      "    kernel_size: 3\n",
+      "    stride: 2\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"fc6\"\n",
+      "  type: \"InnerProduct\"\n",
+      "  bottom: \"pool5\"\n",
+      "  top: \"fc6\"\n",
+      "  inner_product_param {\n",
+      "    num_output: 4096\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"relu6\"\n",
+      "  type: \"ReLU\"\n",
+      "  bottom: \"fc6\"\n",
+      "  top: \"fc6\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"drop6\"\n",
+      "  type: \"Dropout\"\n",
+      "  bottom: \"fc6\"\n",
+      "  top: \"fc6\"\n",
+      "  dropout_param {\n",
+      "    dropout_ratio: 0.5\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"fc7\"\n",
+      "  type: \"InnerProduct\"\n",
+      "  bottom: \"fc6\"\n",
+      "  top: \"fc7\"\n",
+      "  inner_product_param {\n",
+      "    num_output: 4096\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"relu7\"\n",
+      "  type: \"ReLU\"\n",
+      "  bottom: \"fc7\"\n",
+      "  top: \"fc7\"\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"drop7\"\n",
+      "  type: \"Dropout\"\n",
+      "  bottom: \"fc7\"\n",
+      "  top: \"fc7\"\n",
+      "  dropout_param {\n",
+      "    dropout_ratio: 0.5\n",
+      "  }\n",
+      "}\n",
+      "layer {\n",
+      "  name: \"fc-rcnn\"\n",
+      "  type: \"InnerProduct\"\n",
+      "  bottom: \"fc7\"\n",
+      "  top: \"fc-rcnn\"\n",
+      "  inner_product_param {\n",
+      "    num_output: 200\n",
+      "  }\n",
+      "}\n",
+      "I0218 20:43:25.385720 2099749632 net.cpp:336] Input 0 -> data\n",
+      "I0218 20:43:25.385769 2099749632 layer_factory.hpp:74] Creating layer conv1\n",
+      "I0218 20:43:25.385783 2099749632 net.cpp:76] Creating Layer conv1\n",
+      "I0218 20:43:25.385790 2099749632 net.cpp:372] conv1 <- data\n",
+      "I0218 20:43:25.385802 2099749632 net.cpp:334] conv1 -> conv1\n",
+      "I0218 20:43:25.385815 2099749632 net.cpp:105] Setting up conv1\n",
+      "I0218 20:43:25.386574 2099749632 net.cpp:112] Top shape: 10 96 55 55 (2904000)\n",
+      "I0218 20:43:25.386610 2099749632 layer_factory.hpp:74] Creating layer relu1\n",
+      "I0218 20:43:25.386625 2099749632 net.cpp:76] Creating Layer relu1\n",
+      "I0218 20:43:25.386631 2099749632 net.cpp:372] relu1 <- conv1\n",
+      "I0218 20:43:25.386641 2099749632 net.cpp:323] relu1 -> conv1 (in-place)\n",
+      "I0218 20:43:25.386649 2099749632 net.cpp:105] Setting up relu1\n",
+      "I0218 20:43:25.386656 2099749632 net.cpp:112] Top shape: 10 96 55 55 (2904000)\n",
+      "I0218 20:43:25.386663 2099749632 layer_factory.hpp:74] Creating layer pool1\n",
+      "I0218 20:43:25.386675 2099749632 net.cpp:76] Creating Layer pool1\n",
+      "I0218 20:43:25.386682 2099749632 net.cpp:372] pool1 <- conv1\n",
+      "I0218 20:43:25.386690 2099749632 net.cpp:334] pool1 -> pool1\n",
+      "I0218 20:43:25.386699 2099749632 net.cpp:105] Setting up pool1\n",
+      "I0218 20:43:25.386716 2099749632 net.cpp:112] Top shape: 10 96 27 27 (699840)\n",
+      "I0218 20:43:25.386725 2099749632 layer_factory.hpp:74] Creating layer norm1\n",
+      "I0218 20:43:25.386736 2099749632 net.cpp:76] Creating Layer norm1\n",
+      "I0218 20:43:25.386744 2099749632 net.cpp:372] norm1 <- pool1\n",
+      "I0218 20:43:25.386803 2099749632 net.cpp:334] norm1 -> norm1\n",
+      "I0218 20:43:25.386819 2099749632 net.cpp:105] Setting up norm1\n",
+      "I0218 20:43:25.386832 2099749632 net.cpp:112] Top shape: 10 96 27 27 (699840)\n",
+      "I0218 20:43:25.386842 2099749632 layer_factory.hpp:74] Creating layer conv2\n",
+      "I0218 20:43:25.386852 2099749632 net.cpp:76] Creating Layer conv2\n",
+      "I0218 20:43:25.386865 2099749632 net.cpp:372] conv2 <- norm1\n",
+      "I0218 20:43:25.386878 2099749632 net.cpp:334] conv2 -> conv2\n",
+      "I0218 20:43:25.386899 2099749632 net.cpp:105] Setting up conv2\n",
+      "I0218 20:43:25.387024 2099749632 net.cpp:112] Top shape: 10 256 27 27 (1866240)\n",
+      "I0218 20:43:25.387042 2099749632 layer_factory.hpp:74] Creating layer relu2\n",
+      "I0218 20:43:25.387050 2099749632 net.cpp:76] Creating Layer relu2\n",
+      "I0218 20:43:25.387058 2099749632 net.cpp:372] relu2 <- conv2\n",
+      "I0218 20:43:25.387066 2099749632 net.cpp:323] relu2 -> conv2 (in-place)\n",
+      "I0218 20:43:25.387075 2099749632 net.cpp:105] Setting up relu2\n",
+      "I0218 20:43:25.387081 2099749632 net.cpp:112] Top shape: 10 256 27 27 (1866240)\n",
+      "I0218 20:43:25.387089 2099749632 layer_factory.hpp:74] Creating layer pool2\n",
+      "I0218 20:43:25.387097 2099749632 net.cpp:76] Creating Layer pool2\n",
+      "I0218 20:43:25.387104 2099749632 net.cpp:372] pool2 <- conv2\n",
+      "I0218 20:43:25.387112 2099749632 net.cpp:334] pool2 -> pool2\n",
+      "I0218 20:43:25.387121 2099749632 net.cpp:105] Setting up pool2\n",
+      "I0218 20:43:25.387130 2099749632 net.cpp:112] Top shape: 10 256 13 13 (432640)\n",
+      "I0218 20:43:25.387137 2099749632 layer_factory.hpp:74] Creating layer norm2\n",
+      "I0218 20:43:25.387145 2099749632 net.cpp:76] Creating Layer norm2\n",
+      "I0218 20:43:25.387152 2099749632 net.cpp:372] norm2 <- pool2\n",
+      "I0218 20:43:25.387161 2099749632 net.cpp:334] norm2 -> norm2\n",
+      "I0218 20:43:25.387168 2099749632 net.cpp:105] Setting up norm2\n",
+      "I0218 20:43:25.387176 2099749632 net.cpp:112] Top shape: 10 256 13 13 (432640)\n",
+      "I0218 20:43:25.387228 2099749632 layer_factory.hpp:74] Creating layer conv3\n",
+      "I0218 20:43:25.387249 2099749632 net.cpp:76] Creating Layer conv3\n",
+      "I0218 20:43:25.387258 2099749632 net.cpp:372] conv3 <- norm2\n",
+      "I0218 20:43:25.387266 2099749632 net.cpp:334] conv3 -> conv3\n",
+      "I0218 20:43:25.387276 2099749632 net.cpp:105] Setting up conv3\n",
+      "I0218 20:43:25.389375 2099749632 net.cpp:112] Top shape: 10 384 13 13 (648960)\n",
+      "I0218 20:43:25.389408 2099749632 layer_factory.hpp:74] Creating layer relu3\n",
+      "I0218 20:43:25.389421 2099749632 net.cpp:76] Creating Layer relu3\n",
+      "I0218 20:43:25.389430 2099749632 net.cpp:372] relu3 <- conv3\n",
+      "I0218 20:43:25.389438 2099749632 net.cpp:323] relu3 -> conv3 (in-place)\n",
+      "I0218 20:43:25.389447 2099749632 net.cpp:105] Setting up relu3\n",
+      "I0218 20:43:25.389456 2099749632 net.cpp:112] Top shape: 10 384 13 13 (648960)\n",
+      "I0218 20:43:25.389462 2099749632 layer_factory.hpp:74] Creating layer conv4\n",
+      "I0218 20:43:25.389472 2099749632 net.cpp:76] Creating Layer conv4\n",
+      "I0218 20:43:25.389478 2099749632 net.cpp:372] conv4 <- conv3\n",
+      "I0218 20:43:25.389487 2099749632 net.cpp:334] conv4 -> conv4\n",
+      "I0218 20:43:25.389497 2099749632 net.cpp:105] Setting up conv4\n",
+      "I0218 20:43:25.391810 2099749632 net.cpp:112] Top shape: 10 384 13 13 (648960)\n",
+      "I0218 20:43:25.391856 2099749632 layer_factory.hpp:74] Creating layer relu4\n",
+      "I0218 20:43:25.391871 2099749632 net.cpp:76] Creating Layer relu4\n",
+      "I0218 20:43:25.391880 2099749632 net.cpp:372] relu4 <- conv4\n",
+      "I0218 20:43:25.391888 2099749632 net.cpp:323] relu4 -> conv4 (in-place)\n",
+      "I0218 20:43:25.391898 2099749632 net.cpp:105] Setting up relu4\n",
+      "I0218 20:43:25.391906 2099749632 net.cpp:112] Top shape: 10 384 13 13 (648960)\n",
+      "I0218 20:43:25.391913 2099749632 layer_factory.hpp:74] Creating layer conv5\n",
+      "I0218 20:43:25.391923 2099749632 net.cpp:76] Creating Layer conv5\n",
+      "I0218 20:43:25.391929 2099749632 net.cpp:372] conv5 <- conv4\n",
+      "I0218 20:43:25.391937 2099749632 net.cpp:334] conv5 -> conv5\n",
+      "I0218 20:43:25.391947 2099749632 net.cpp:105] Setting up conv5\n",
+      "I0218 20:43:25.393072 2099749632 net.cpp:112] Top shape: 10 256 13 13 (432640)\n",
+      "I0218 20:43:25.393108 2099749632 layer_factory.hpp:74] Creating layer relu5\n",
+      "I0218 20:43:25.393122 2099749632 net.cpp:76] Creating Layer relu5\n",
+      "I0218 20:43:25.393129 2099749632 net.cpp:372] relu5 <- conv5\n",
+      "I0218 20:43:25.393138 2099749632 net.cpp:323] relu5 -> conv5 (in-place)\n",
+      "I0218 20:43:25.393148 2099749632 net.cpp:105] Setting up relu5\n",
+      "I0218 20:43:25.393157 2099749632 net.cpp:112] Top shape: 10 256 13 13 (432640)\n",
+      "I0218 20:43:25.393167 2099749632 layer_factory.hpp:74] Creating layer pool5\n",
+      "I0218 20:43:25.393175 2099749632 net.cpp:76] Creating Layer pool5\n",
+      "I0218 20:43:25.393182 2099749632 net.cpp:372] pool5 <- conv5\n",
+      "I0218 20:43:25.393190 2099749632 net.cpp:334] pool5 -> pool5\n",
+      "I0218 20:43:25.393199 2099749632 net.cpp:105] Setting up pool5\n",
+      "I0218 20:43:25.393209 2099749632 net.cpp:112] Top shape: 10 256 6 6 (92160)\n",
+      "I0218 20:43:25.393218 2099749632 layer_factory.hpp:74] Creating layer fc6\n",
+      "I0218 20:43:25.393226 2099749632 net.cpp:76] Creating Layer fc6\n",
+      "I0218 20:43:25.393232 2099749632 net.cpp:372] fc6 <- pool5\n",
+      "I0218 20:43:25.393240 2099749632 net.cpp:334] fc6 -> fc6\n",
+      "I0218 20:43:25.393249 2099749632 net.cpp:105] Setting up fc6\n",
+      "I0218 20:43:25.516396 2099749632 net.cpp:112] Top shape: 10 4096 1 1 (40960)\n",
+      "I0218 20:43:25.516445 2099749632 layer_factory.hpp:74] Creating layer relu6\n",
+      "I0218 20:43:25.516463 2099749632 net.cpp:76] Creating Layer relu6\n",
+      "I0218 20:43:25.516470 2099749632 net.cpp:372] relu6 <- fc6\n",
+      "I0218 20:43:25.516480 2099749632 net.cpp:323] relu6 -> fc6 (in-place)\n",
+      "I0218 20:43:25.516490 2099749632 net.cpp:105] Setting up relu6\n",
+      "I0218 20:43:25.516497 2099749632 net.cpp:112] Top shape: 10 4096 1 1 (40960)\n",
+      "I0218 20:43:25.516505 2099749632 layer_factory.hpp:74] Creating layer drop6\n",
+      "I0218 20:43:25.516515 2099749632 net.cpp:76] Creating Layer drop6\n",
+      "I0218 20:43:25.516521 2099749632 net.cpp:372] drop6 <- fc6\n",
+      "I0218 20:43:25.516530 2099749632 net.cpp:323] drop6 -> fc6 (in-place)\n",
+      "I0218 20:43:25.516538 2099749632 net.cpp:105] Setting up drop6\n",
+      "I0218 20:43:25.516557 2099749632 net.cpp:112] Top shape: 10 4096 1 1 (40960)\n",
+      "I0218 20:43:25.516566 2099749632 layer_factory.hpp:74] Creating layer fc7\n",
+      "I0218 20:43:25.516576 2099749632 net.cpp:76] Creating Layer fc7\n",
+      "I0218 20:43:25.516582 2099749632 net.cpp:372] fc7 <- fc6\n",
+      "I0218 20:43:25.516589 2099749632 net.cpp:334] fc7 -> fc7\n",
+      "I0218 20:43:25.516599 2099749632 net.cpp:105] Setting up fc7\n",
+      "I0218 20:43:25.604786 2099749632 net.cpp:112] Top shape: 10 4096 1 1 (40960)\n",
+      "I0218 20:43:25.604838 2099749632 layer_factory.hpp:74] Creating layer relu7\n",
+      "I0218 20:43:25.604852 2099749632 net.cpp:76] Creating Layer relu7\n",
+      "I0218 20:43:25.604859 2099749632 net.cpp:372] relu7 <- fc7\n",
+      "I0218 20:43:25.604868 2099749632 net.cpp:323] relu7 -> fc7 (in-place)\n",
+      "I0218 20:43:25.604878 2099749632 net.cpp:105] Setting up relu7\n",
+      "I0218 20:43:25.604885 2099749632 net.cpp:112] Top shape: 10 4096 1 1 (40960)\n",
+      "I0218 20:43:25.604893 2099749632 layer_factory.hpp:74] Creating layer drop7\n",
+      "I0218 20:43:25.604902 2099749632 net.cpp:76] Creating Layer drop7\n",
+      "I0218 20:43:25.604908 2099749632 net.cpp:372] drop7 <- fc7\n",
+      "I0218 20:43:25.604917 2099749632 net.cpp:323] drop7 -> fc7 (in-place)\n",
+      "I0218 20:43:25.604924 2099749632 net.cpp:105] Setting up drop7\n",
+      "I0218 20:43:25.604933 2099749632 net.cpp:112] Top shape: 10 4096 1 1 (40960)\n",
+      "I0218 20:43:25.604939 2099749632 layer_factory.hpp:74] Creating layer fc-rcnn\n",
+      "I0218 20:43:25.604948 2099749632 net.cpp:76] Creating Layer fc-rcnn\n",
+      "I0218 20:43:25.604954 2099749632 net.cpp:372] fc-rcnn <- fc7\n",
+      "I0218 20:43:25.604962 2099749632 net.cpp:334] fc-rcnn -> fc-rcnn\n",
+      "I0218 20:43:25.604971 2099749632 net.cpp:105] Setting up fc-rcnn\n",
+      "I0218 20:43:25.606878 2099749632 net.cpp:112] Top shape: 10 200 1 1 (2000)\n",
+      "I0218 20:43:25.606904 2099749632 net.cpp:165] fc-rcnn does not need backward computation.\n",
+      "I0218 20:43:25.606909 2099749632 net.cpp:165] drop7 does not need backward computation.\n",
+      "I0218 20:43:25.606916 2099749632 net.cpp:165] relu7 does not need backward computation.\n",
+      "I0218 20:43:25.606922 2099749632 net.cpp:165] fc7 does not need backward computation.\n",
+      "I0218 20:43:25.606928 2099749632 net.cpp:165] drop6 does not need backward computation.\n",
+      "I0218 20:43:25.606935 2099749632 net.cpp:165] relu6 does not need backward computation.\n",
+      "I0218 20:43:25.606940 2099749632 net.cpp:165] fc6 does not need backward computation.\n",
+      "I0218 20:43:25.606946 2099749632 net.cpp:165] pool5 does not need backward computation.\n",
+      "I0218 20:43:25.606952 2099749632 net.cpp:165] relu5 does not need backward computation.\n",
+      "I0218 20:43:25.606958 2099749632 net.cpp:165] conv5 does not need backward computation.\n",
+      "I0218 20:43:25.606964 2099749632 net.cpp:165] relu4 does not need backward computation.\n",
+      "I0218 20:43:25.606971 2099749632 net.cpp:165] conv4 does not need backward computation.\n",
+      "I0218 20:43:25.606976 2099749632 net.cpp:165] relu3 does not need backward computation.\n",
+      "I0218 20:43:25.606982 2099749632 net.cpp:165] conv3 does not need backward computation.\n",
+      "I0218 20:43:25.606988 2099749632 net.cpp:165] norm2 does not need backward computation.\n",
+      "I0218 20:43:25.606995 2099749632 net.cpp:165] pool2 does not need backward computation.\n",
+      "I0218 20:43:25.607002 2099749632 net.cpp:165] relu2 does not need backward computation.\n",
+      "I0218 20:43:25.607007 2099749632 net.cpp:165] conv2 does not need backward computation.\n",
+      "I0218 20:43:25.607013 2099749632 net.cpp:165] norm1 does not need backward computation.\n",
+      "I0218 20:43:25.607199 2099749632 net.cpp:165] pool1 does not need backward computation.\n",
+      "I0218 20:43:25.607213 2099749632 net.cpp:165] relu1 does not need backward computation.\n",
+      "I0218 20:43:25.607219 2099749632 net.cpp:165] conv1 does not need backward computation.\n",
+      "I0218 20:43:25.607225 2099749632 net.cpp:201] This network produces output fc-rcnn\n",
+      "I0218 20:43:25.607239 2099749632 net.cpp:446] Collecting Learning Rate and Weight Decay.\n",
+      "I0218 20:43:25.607255 2099749632 net.cpp:213] Network initialization done.\n",
+      "I0218 20:43:25.607262 2099749632 net.cpp:214] Memory required for data: 62425920\n",
+      "E0218 20:43:26.388214 2099749632 upgrade_proto.cpp:618] Attempting to upgrade input file specified using deprecated V1LayerParameter: ../models/bvlc_reference_rcnn_ilsvrc13/bvlc_reference_rcnn_ilsvrc13.caffemodel\n",
+      "I0218 20:43:27.089423 2099749632 upgrade_proto.cpp:626] Successfully upgraded file specified using deprecated V1LayerParameter\n",
+      "GPU mode\n",
+      "Loading input...\n",
+      "selective_search_rcnn({'/Users/shelhamer/h/desk/caffe/caffe-dev/examples/images/fish-bike.jpg'}, '/var/folders/bk/dtkn5qjd11bd17b2j36zplyw0000gp/T/tmpakaRLL.mat')\n",
+      "Processed 1570 windows in 102.895 s.\n",
+      "/Users/shelhamer/anaconda/lib/python2.7/site-packages/pandas/io/pytables.py:2453: PerformanceWarning: \n",
+      "your performance may suffer as PyTables will pickle object types that it cannot\n",
+      "map directly to c-types [inferred_type->mixed,key->block1_values] [items->['prediction']]\n",
+      "\n",
+      "  warnings.warn(ws, PerformanceWarning)\n",
+      "Saved to _temp/det_output.h5 in 0.298 s.\n"
+     ]
+    }
+   ],
+   "source": [
+    "!mkdir -p _temp\n",
+    "!echo `pwd`/images/fish-bike.jpg > _temp/det_input.txt\n",
+    "!../python/detect.py --crop_mode=selective_search --pretrained_model=../models/bvlc_reference_rcnn_ilsvrc13/bvlc_reference_rcnn_ilsvrc13.caffemodel --model_def=../models/bvlc_reference_rcnn_ilsvrc13/deploy.prototxt --gpu --raw_scale=255 _temp/det_input.txt _temp/det_output.h5"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "This run was in GPU mode. For CPU mode detection, call `detect.py` without the `--gpu` argument.\n",
+    "\n",
+    "Running this outputs a DataFrame with the filenames, selected windows, and their detection scores to an HDF5 file.\n",
+    "(We only ran on one image, so the filenames will all be the same.)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(1570, 5)\n",
+      "prediction    [-2.62247, -2.84579, -2.85122, -3.20838, -1.94...\n",
+      "ymin                                                     79.846\n",
+      "xmin                                                       9.62\n",
+      "ymax                                                     246.31\n",
+      "xmax                                                    339.624\n",
+      "Name: /Users/shelhamer/h/desk/caffe/caffe-dev/examples/images/fish-bike.jpg, dtype: object\n"
+     ]
+    }
+   ],
+   "source": [
+    "import numpy as np\n",
+    "import pandas as pd\n",
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline\n",
+    "\n",
+    "df = pd.read_hdf('_temp/det_output.h5', 'df')\n",
+    "print(df.shape)\n",
+    "print(df.iloc[0])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "1570 regions were proposed with the R-CNN configuration of selective search. The number of proposals will vary from image to image based on its contents and size -- selective search isn't scale invariant.\n",
+    "\n",
+    "In general, `detect.py` is most efficient when running on a lot of images: it first extracts window proposals for all of them, batches the windows for efficient GPU processing, and then outputs the results.\n",
+    "Simply list an image per line in the `images_file`, and it will process all of them.\n",
+    "\n",
+    "Although this guide gives an example of R-CNN ImageNet detection, `detect.py` is clever enough to adapt to different Caffe models’ input dimensions, batch size, and output categories. You can switch the model definition and pretrained model as desired. Refer to `python detect.py --help` for the parameters to describe your data set. There's no need for hardcoding.\n",
+    "\n",
+    "Anyway, let's now load the ILSVRC13 detection class names and make a DataFrame of the predictions. Note you'll need the auxiliary ilsvrc2012 data fetched by `data/ilsvrc12/get_ilsvrc12_aux.sh`."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "name\n",
+      "accordion      -2.622471\n",
+      "airplane       -2.845788\n",
+      "ant            -2.851219\n",
+      "antelope       -3.208377\n",
+      "apple          -1.949950\n",
+      "armadillo      -2.472935\n",
+      "artichoke      -2.201684\n",
+      "axe            -2.327404\n",
+      "baby bed       -2.737925\n",
+      "backpack       -2.176763\n",
+      "bagel          -2.681061\n",
+      "balance beam   -2.722538\n",
+      "banana         -2.390628\n",
+      "band aid       -1.598909\n",
+      "banjo          -2.298197\n",
+      "...\n",
+      "trombone        -2.582361\n",
+      "trumpet         -2.352853\n",
+      "turtle          -2.360859\n",
+      "tv or monitor   -2.761043\n",
+      "unicycle        -2.218467\n",
+      "vacuum          -1.907717\n",
+      "violin          -2.757079\n",
+      "volleyball      -2.723689\n",
+      "waffle iron     -2.418540\n",
+      "washer          -2.408994\n",
+      "water bottle    -2.174899\n",
+      "watercraft      -2.837425\n",
+      "whale           -3.120338\n",
+      "wine bottle     -2.772960\n",
+      "zebra           -2.742913\n",
+      "Name: 0, Length: 200, dtype: float32\n"
+     ]
+    }
+   ],
+   "source": [
+    "with open('../data/ilsvrc12/det_synset_words.txt') as f:\n",
+    "    labels_df = pd.DataFrame([\n",
+    "        {\n",
+    "            'synset_id': l.strip().split(' ')[0],\n",
+    "            'name': ' '.join(l.strip().split(' ')[1:]).split(',')[0]\n",
+    "        }\n",
+    "        for l in f.readlines()\n",
+    "    ])\n",
+    "labels_df.sort('synset_id')\n",
+    "predictions_df = pd.DataFrame(np.vstack(df.prediction.values), columns=labels_df['name'])\n",
+    "print(predictions_df.iloc[0])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's look at the activations."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "<matplotlib.text.Text at 0x114f15f90>"
+      ]
+     },
+     "execution_count": 4,
+     "metadata": {},
+     "output_type": "execute_result"
+    },
+    {
+     "data": {
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x114254b50>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAALMAAAOoCAYAAACa7cU2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvUmMZel1Jvbd9+6b75vnMV5MGZlZmSlmJYulEkWCpIQ2\n",
+       "e+O2V+0GBBE25AXhCTQBEe2FbEAbw4AhayFIC9NA24s2BAluSAuBLUEDSIKqYjIzq3KKOd48z/fN\n",
+       "0/Ui8juMYIsSkZESuwJ5gUJlRUW+8b//f843HcUwDLy93l7X4TL9vF/A2+vt9aaut4v57XVtrreL\n",
+       "+e11ba63i/ntdW2ut4v57XVtrreL+e11ba5rsZgVRfmqoij7iqIcKYryrX+k58gpivKJoiiPFUX5\n",
+       "6NXPAoqi/LmiKIeKovx7RVF8V3j8/0tRlLqiKE8v/OynPr6iKP/61fvdVxTln73B5/xfFEUpvXqf\n",
+       "jxVF+edv6jkVRUkrivJXiqI8VxTlmaIo//0bfZ+GYXyq/wFgBnAMIAvAAuAJgFv/CM9zBiDwEz/7\n",
+       "3wD85qs/fwvA/3qFx/8CgPsAnv5Djw/g9qv3aXn1vo8BmN7Qc/7PAP7Hv+N3r/ycAGIAPvPqzxqA\n",
+       "AwC33tT7vA478+cAHBuGkTMMYwHg/wXwL/6Rnkv5if/+TwH8m1d//jcA/rPXfWDDML4LoPszPv6/\n",
+       "APBvDcNYGIaRw/mX/Lk39JzAf/g+38hzGoZRMwzjyas/DwG8BJDEG3qf12ExJwEUL/x36dXP3vRl\n",
+       "APgLRVEeKoryX7/6WdQwjPqrP9cBRN/wc/60x0/g/H3yetPv+b9TFOVjRVG+feHIf6PPqShKFuen\n",
+       "wod4Q+/zOizmfyo+/vOGYdwH8M8B/DeKonzh0os4Pxf/0V7Lz/D4b+q5fx/AJoDPAKgC+N/f9HMq\n",
+       "iqIB+GMA/4NhGPqlB7zC+7wOi7kMIH3hv9O4fDe/kcswjOqrfzcB/H84P+7qiqLEAEBRlDiAxht+\n",
+       "2p/2+D/5nlOvfnblyzCMhvHqAvB/4sfH+ht5TkVRLDhfyP+PYRj/7tWP38j7vA6L+SGAXUVRsoqi\n",
+       "WAH8SwB/8iafQFEUp6Io7ld/dgH4ZwCevnqer736ta8B+Hd/9yO89vXTHv9PAPwXiqJYFUXZBLAL\n",
+       "4KM38YSvFhOv/xzn7/ONPKeiKAqAbwN4YRjG/3Hhf72Z9/mmu/6fxz84P/oPcN4g/Ot/hMffxHlX\n",
+       "/QTAMz4HgACAvwBwCODfA/Bd4Tn+LYAKgDnOe4D/8u97fAD/06v3uw/gP3lDz/lfAfi/AXwC4ONX\n",
+       "iyr6pp4TwC8DWL/6HB+/+uerb+p9Kq/+wtvr7fWpvz41ZcY/BTHy9vp0X5+KnVlRFDPOy4hfxXkD\n",
+       "8EMA/8owjJc/1xf29vqP6vq07Mz/lMTI2+tTen1aFvM/FTHy9voUX5+Wxfwffy309vq5X+rP+wX8\n",
+       "jNc/SIwoivJ2wV+TyzCMv0sb8g9en5bFLMQIznHRfwngX/3kL/36r/86gsEgJpMJRqMRNE3DcrlE\n",
+       "JpOBruvQdR2TyQTVahWbm5tYr9eo1+vQNA07OzsYjUbodrtQFAWz2Qwmkwnz+Rxmsxmr1Qq3b99G\n",
+       "Pp+H3W7HbDbD3/zN3+BXfuVXsFqt4Pf7USqVYLFYsFqt4HQ6MZlMEAwGMRgM5OcAEI/Hoes6LBYL\n",
+       "3G43yuUyFEWBruvY3t7GeDxGsViExWLB2dkZ7t69i1qthmazid3dXUynU6zXa7zzzjs4PDyE2+3G\n",
+       "YDCQz2E6ncJqtSIcDmMymaDf72NzcxPL5RI+nw/Hx8eYTqfIZDLI5XJwuVxQFAXBYBBnZ2cIhUII\n",
+       "hUL48z//c3zhC19AuVxGJBLBdDpFs9lEIpHAcrlEKpXCcrnEy5cvsbe3h3w+j42NDVgsFjx79gzh\n",
+       "cBiapgEAGo1zUm8ymcDlcmGxWCAWi+Hs7AyJRAIWiwWTyQS/93u/99qL5FOxmA3DWCqK8t8C+A7O\n",
+       "JZ/f/ruQjGg0Kov3ww8/RDQahWEYWK/XmM1mCAQCMJvNsFgsMJvNsFqtGI/HuHnzJvr9PqxWK6xW\n",
+       "K7rdLrLZLCqVCvb29tBqtRCJRAAAe3t7mEwmMJvNePbsGex2O/x+P0ajEaxWK2KxGOx2O6rVKpLJ\n",
+       "JFRVRSAQwHg8BgCMRiPEYjGMx2N0Oh3EYjEoigKz2YxwOCwL0zAMqKqKBw8eYDwew+PxoFarYWNj\n",
+       "AwBQrVZhs9kQi8VgsVhgt9sxHo/h9XqRTCZRqVQwGo1gs9nks2k2m5jP50gkEmi321BVFaFQCG63\n",
+       "G91uF5FIBKvVCuFwGKvVClarFT7fuc7I4XDAMAwEAgHM53M4HA4AwHK5RDKZxHK5xMbGBlRVhaqq\n",
+       "cLvdcLlcMJlMMJvNsNvt2NzcRKvVgtlslsdaLBZwOp1wuVzo9/tXWiefisUMAIZh/BmAP/v7fqfX\n",
+       "68HpdOLg4ADL5RIA8OGHH+Lzn/88qtUqarUaYrEYFosFUqkUjo6OoOs6Dg8PEQ6H0el0oCgKKpUK\n",
+       "HA4HTk9PYbVasVwu4XQ6oes61us17HY7Go0GdF3HfD7H8+fP5Uv86KOP8ODBA7RaLVgsFqiqikaj\n",
+       "gfV6LQsrl8vh+fPnePfdd1GtVrFarTCfzzGfz/HRRx/B5XJhOp1iPB5jPB6j1WrB7XZD13UcHBzA\n",
+       "brejVqvB6XTCZDJhOBzK7j8YDHBycoJAIIBu91zdWSwW4fF40Ov1sLm5ibOzMwQCAUwmE/R6Pezv\n",
+       "7yMcDuPly5cYDocol8t49913USqVUCgUoKoqjo+PYbfb0ev15JTp9/tyk4xGI2xsbGA8HsNkMiGX\n",
+       "yyEUCkHXdTSbTaxWKyyXS8xmM4xGI/h8PhwdHaHX6yEYDMLn8+HZs2dXWiOfCpz5Z7kURTF++7d/\n",
+       "G4ZhoNvtYjQaIZ1Oo91uY2trC5VKBeFwGGdnZ5hOp3A4HLDb7TAMAxaLBdPpFH6/H8PhEGazGTab\n",
+       "Dfl8HtFoVH6fi3E6ncLtduPRo0e4c+cOOp0OQqEQhsMhfD4fFosFrFYrCoUC/H4/1uu1PE6n05Ej\n",
+       "VdM0BAIBDAYDLBYLeDwetNttWK1W9Pt9KZNMJhM0TcMPfvADpFIpWCwWZDIZ9Ho9WK1WOJ1O7O/v\n",
+       "y07o9XqxXq8xHo/l35ubm+h0OvB4PPjkk08Qj8cRiURQLpdhs9ng8XhgtVpRqVSgqirS6TSePXuG\n",
+       "W7duYTabYTgcYj6fYzabwW63YzAYYGtrC4ZhwOl0olw+1/+EQiEAkMdar9eIRCLodruYTCawWq3w\n",
+       "er0wmUxot9sIhUI4OTlBMBhEr9fDt7/97WtfM/9MV6lUQiAQOOfpX9W9rInr9TpyuRwikQj6/T68\n",
+       "Xi+Ojo4wGo2QTJ6jfL1eD6FQCC9evEA6nUav14PP50O1WsWtW7dgGAZarRZ8Pp/8P5PJhMlkglwu\n",
+       "J19gMpnE0dEREokEFosFRqMRZrMZxuMxptMpstksarUa4vG47FZOpxNnZ2cAAEVRsFgsMB6PsVwu\n",
+       "MZ/PsVqtsLGxAZPJBIfDgSdPniCbzWI+n2M8HsPpdGK5XMJsNiOXy0FVVVitVqiqKrv+aDSCyWRC\n",
+       "IpGAruvweDyw2+1oNpswDEMWWDgcxnK5hMPhwHw+x2KxQK/Xg6ZpGAwGUFUV0+kU/X4fLpcLx8fH\n",
+       "0DQNi8UCuq5DURScnp4iHA7D6XSiUChgPp/D5XLBbrejXq/Ld1apVGCz2aCqKkqlq4kdr9ViBs53\n",
+       "TTZAZrMZOzs7cLvd8Hg8ODk5gcPhkF00FArB4XDA7XbDbDYjEonAbDZDVVVYLBYoioJoNAqn0yk7\n",
+       "kqIoGI/HWCwWOBeBQR6n2WwiHo/DbDZjd3cXzWYTZrMZbrcbJpNJSpn5fI5kMgmTyQSTyYTFYoHV\n",
+       "agWXyyVlDGvWWq2G7e1tFAoFWK1WaRR9Ph+8Xi/6/T4Mw5Bd2WKxYDweIxAIYL1ew+v1wmw2o1Qq\n",
+       "SX08nU7h8XjkebPZLMxmM3RdRzabxXq9hqqqSKVSACBlgaZpUBRFPlu73Q6r1QqPx4NQKIRqtYpQ\n",
+       "KIT5fI6trS10u13MZjNYrVbY7Xb5ffYf8/kcxWJRmsadnR185zvfee3v/tOCM/9MF5smRVHQ7/dh\n",
+       "NpuxXC5ht9vR7Xbh9Xphs9mwXC5htVrx1a9+Fd1uFw6HA2azGYPBAI1GA4PBAIqi4HOf+xyq1SpG\n",
+       "oxFarRZWq5U0VYPBAE6nE6qqYjKZSNPW7/dlNwuFQtLscFcfDodwOBzweDwAIGUNj/H1eo1er4fV\n",
+       "aoXVaiWPabFYMJvNYDab4fF4kEqlMB6PYbfbpQTyeDyYzWZYrVZwOBzSYOm6jnA4LI3vbDbDfD5H\n",
+       "NBpFq9XCbDZDs9kUlMZsNmMymWCxWMhn2263MZvNMBgMYDKZpNZlGeNyuaSkYt3MTcTj8cBisQCA\n",
+       "IES9Xg+pVAo2mw3D4RBOpxOtVutK3/+1WsysLa1Wq+yohmFgPp/DZrPB7XYDOD/GnU6ndOVEAzY2\n",
+       "NhAOh2G322XRcDcOBoNwuVwIhUKw2WwIh8NYLBYwm81wOBxwuVxyUywWC2iaJjeX1WqVXZNNm2EY\n",
+       "srupqgqXywWn04lMJgOXy4VgMCiPrWkanE6noB28WQ3DwHK5hKqqMJvNePfdd2XnBM7rVpvNBk3T\n",
+       "4Pf7oWkabDYbHA6H1KhutxsWiwWhUEhe33w+h8VigdfrBQAsFguEw2HM53NYrVZsbW3B7/dDVVXM\n",
+       "ZjN5TS6XC2azGYFAAG63G5qmyfs2mUxyeqxWK/h8PkwmE0GDptMpAoHAlb7/a1Vm+P1+NJtNRKNR\n",
+       "PHr0CGazGb1eDzabDfV6XdCF2WyG9XqNv/zLv0Sz2ZRFeXx8jMFgICjCs2fPsLe3h8FgII0aS47Z\n",
+       "bAabzYaTkxP4/X40Gg1MJhO43W7BnK1Wq9wo/X4fTqdTXt/x8TGcTicSiQSazabsvIFAAIqi4OXL\n",
+       "l1BVFU6nE5VKBfP5HPF4HLPZDKFQCE+fPsWNGzcwHA4FzTg4OJCbtt1u4+TkBKFQCGdnZ9IrLJdL\n",
+       "aJqGYrGIRCKB0Wgk5cb29jb6/b6UT/l8HjabDWazGe12G+l0GoPBAB9++CHm8zny+TxcLhcmkwkK\n",
+       "hQLW6zVWqxXi8TgePnyIjY0NubGazSYAyG7PGl5VVTSbTdksrnJdq515Op3CYrGgWCxCVVX0+32B\n",
+       "zLjzzedz+fdyuUSv15MjdTabwe/3S40XCATQ7/exXq/R6XQQCASQy+Wk8ZlOp4jH42g0GrIDLhYL\n",
+       "tNttAOclxGKxQKVSgaIo6HQ6cLlcsjNFo1GsViu43W44nU5p3nRdh6ZpSCQSUFUVJpMJNpsNTqcT\n",
+       "0+kUpVJJmsThcAhN0zCfz6FpmpA0q9UKHo8HqqrC7/djPB7LydHr9aSpZIngcrnQ7XYxHo8xm83+\n",
+       "gxPF7/fL52mz2WC326WvGAwGcDgc0HVdyCI2ydPpVH6X5RPret7kqqpitVphOBxe6fu/Vjvzer2W\n",
+       "49Lv98NisQj05na75YvvdDpIJBJYr9d47733pHmJx+NYr9dIpVKYzWYoFovY2toCcF6Pj0YjbG9v\n",
+       "S2OjaZqgGiQKLBYLut0u1us14vE4xuOx7KgOhwOTyQTr9VoayPV6LSXJYDBALBaDw+GQXXQ+n8Pv\n",
+       "96Pb7Qoyw3LAbDYLqREKhTCZTORGYelhs9nQ7XYFGw6FQtLMBgIBYfF4IiiKAsMwpDEeDodCrhiG\n",
+       "gXq9jnA4jHa7LU0r2UE2qHa7XW5Aj8cjJVKr1bpUang8HgyHQ6TTaSwWC8Tj8b/v6/0Hr2u1mE0m\n",
+       "k+woxD+5aNxut1DUxItZP6qqKrUxG6pOp4Pt7W24XC4YhiEd/Hq9lkXCGhKANFeqqsLn8wmbdfFx\n",
+       "AUhzyNqWmDRJFTaLLIlWq5XU1g6HA+v1GhaLRd6D1+uVJtLv98vuzTp2PB7LIluv1/B4PBgMBphM\n",
+       "JlAURero4XAIr9eL0WgEv98PAPJY7D2IcvDzWK/X8lqXyyXcbjfcbjfsdvulv8Od2el0ygkzHA4F\n",
+       "c+Z3papXW47XajGzk2ZpQJhuc3MTx8fHWC6XWCwWwuixM282m3jvvffw9OlTwTwbjYZguuFwGPF4\n",
+       "HIVCAcC5vsDpdEq5wWOVdaPJZEIkEkEul8POzo7UxKVSCR6PB4FAAHa7HaVSCZqm4dmzZ4jFYvB6\n",
+       "vahUKrDb7Wi323A4HGi320gmk7LYisWisHAejwf5fB5+vx/lchnhcBi6rqNYLAqGHY1Gkc/nEQ6H\n",
+       "oaoqOp0ODg4OEAgEUKlUMBwOMR6PsVqtBKPnjdput6HrOnq9HgDAbDajXC5jZ2cHiqLAYrFgvV4L\n",
+       "IhIKhdDpdLBcLmEYhpAr7EN8Ph8Mw8CLFy8QiUTw5MkTAMDZ2RmSySSKxeJP/W5/luta1cxOpxPj\n",
+       "8RixWEzqVKvVina7DcMwpKsn6D+bzfDw4UOEw2EUi0U4nU6MRiM5Svv9PsLhMGq1GlqtljR0qqpC\n",
+       "13Wpm/1+P3Rdx3K5hMfjQSwWQ7vdRiQSga7r2Nvbw3q9RjqdhslkQjKZxHQ6xXA4hN/vh8/nkzJg\n",
+       "d3dXdCOTyQSxWAyTyURecywWw+7uruy4fr8fs9kMFosFuq5jMBjg/fffF4hO13XRSRCmS6VS6HQ6\n",
+       "iEajGI/Hsjun02lYrVbMZjNZkJFIRAgZAPD5fGg0GqjVagDO+wKWEzxV/H4/er0eDMOQfoBwKLH7\n",
+       "1WqFdDoNt9uNYDAopchVrmu1mGu1GtbrNUqlEsLhMBRFkcVG/JaKMzaELD/YlKTTafh8PmGsyOwN\n",
+       "BgP0+33ZzQFIudHr9aSUaLVaODw8hNfrRb1ex3K5xJMnT2AymdDv9zGZTHB6eio0b7FYFChwNpvh\n",
+       "0aNHMAxDVHXtdhsulwuDwQC6rmOxWODjjz+G2+3GfD4XjDYYDMJms8HlcuG73/2uqNXYWBUKBei6\n",
+       "jtlshnw+D6/XK83ZcrnEcDjEZDIRWI6NcbValROO+DNvDuLgFFlxMxgMBvD5fFLnt9ttmEwmjMdj\n",
+       "jEYjHB4eYjgcYr1e4/Hjx6Ihefr06U/9bn+W61otZqfTKX8mRTsej9FsNqEoijRmq9VKSpJsNovx\n",
+       "eCyNT7PZRKPREE2E3+8XUkNVVazXawCQsuHil93pdAAAHo8H0+kUiqLA5/MJ8rBer+F2u4UmXq1W\n",
+       "UFUVvV5PbrZQKITFYgG32y2NIutWvg5FUdBoNDCdTjGZTDCdTlEoFGCz2bBYLEQItVwuMRqNBFdm\n",
+       "4xcMBjGfz6HrupBFZrMZo9EIiqKgXq9Lv8FewWQyiUSWRBBJlOl0ilqtJiUStSV8zYZhCPyn6zqi\n",
+       "0Sg8Hg8ajQbS6TTMZjO63a7cgK97Xaua2eFwwOl0QtM0qV0TiYRQupubm2g2m5hMJjAMAz6fDz/6\n",
+       "0Y/wwQcfwGw2I5lMXmLLBoMB7Ha71MSBQABHR0eIRqMYDocIBoOiGdY0TZAFRVEwn8+xsbGB1WqF\n",
+       "RCIhwiHgfLFzB/R4PFgul7DZbMKasWZnHcuTw2KxIB6PC1tHUoOUOUsVljOKoggjarfbkU6nhSRq\n",
+       "NpsCnyWTScxmM3g8Hui6Lo1kPB4XEVQoFBJSio/BRtFmswniYjaboWkaXC4XbDYbfD4f/H4/isWi\n",
+       "vCaSKNSZK4oCm82Gmzdv4k//9E9f+/u/dos5l8vBZDKhVqvh7t27KBaLWK/XGA6HGI1GsrPlcjkE\n",
+       "g0FomoZSqQSv1wuHw4HVaoXHjx/j/v378Pl8cDqd6Ha7cDqdqNVq8Hq9mEwmoqngEdnr9VAoFHDj\n",
+       "xg2B56bTqezmhLFI5ebzedE7T6dT2elY7qiqinK5jGQyiZOTE6xWK9y4cQOHh4fweDzSXBIrnkwm\n",
+       "cnpwxyaeznKgXC7D4/FgNBqhUqkgEAhgOp0CgLCSy+US7XYbwWAQR0dH2NjYQLlchqqqqFQqiEQi\n",
+       "sNlsWK/XmEwmcDgcmM1moq9erVZot9tot9twOp1oNBoYjUZYLpeIxWJSdrA273a7cLvdWK1Worx7\n",
+       "3etalRnD4RCbm5vw+/0iGHe73QgEAhiNRqIeo5Ccu5LL5cKtW7dEdXbv3j0Mh0O0Wi2RcdINwSPU\n",
+       "7XbLMUwMOZVKYTQaoVqtIhaLYTqdIhKJiAC9VCpJ+bGxsSEEA3c/KtiIDft8PpTLZezu7iKTycDj\n",
+       "8cBkMsnuSRbN6/UKBOjxeFAul+FyuTCbzeDz+VAsFrFYLOD1euU5U6kUdF0X3cpsNpP6PJFIwOFw\n",
+       "wO/3o91uy3/funULq9UKkUhEqHUKmogds3zLZDIAzqFJ1vadTgd2ux3T6RSapsmmQOLkqqTJtVrM\n",
+       "7XZbFqvdbpcOm1RwIBCAx+MRwXssFsPjx4/lg9za2oKmaUJMRKNRuFwujMdjHB0dwePxoNPpwOFw\n",
+       "oFqtyvHudrsRj8eFdGADSf1BOBxGt9tFNBqVGpT4tMfjQTgcFtYxlUpBVVURIlFmSnbT5/NJrc33\n",
+       "xrqaKA4RGyrzCO2FQiGkUikpbagr5o1FRISCqK2tLYHXbDab3MyUdhLNIbFDvD6ZTEofwBKE6Mx0\n",
+       "OhWxF0mqUCgERVFEivu617VazGazGfP5XHxvg8EA9Xodi8UCtVoNz549E9IDAMbjMba3t2EYBiqV\n",
+       "Cl6+fInpdIoXL16IBpq1MzUEPGLJMNKZksvl0G63BV1ot9sYjUYYj8fQdR0Oh0OazNVqhWKxKN5C\n",
+       "Noe6rqNWq8nPCHV1u120Wi2Uy2VRq5VKJZjNZgSDQdlhy+WynCAX3StUrD158gSnp6eo1+uXqP56\n",
+       "vS6wGZtNq9WK58+fAzgnZpbLJdbrNebzOUqlEvb39wWLXywWODw8FJJmMpkgn89jsVig2Wzi5ORE\n",
+       "UCAqGXu9Hk5PT9Hv99FqtaCqKqrV6pW+/2u1mCORCFwuF3w+H0ajkSjnVFWVJovsVyQSQbvdRqvV\n",
+       "EqaK/55Op6JfIBxFFotoxnA4hK7rGA6H4vEjPOXxeKAoCvx+v/gDqZleLBaCUxN9GI/HcLvd8Pl8\n",
+       "QhvPZjOB4rrdLoLBoODYbKAoGgoGg7Db7fD5fJcE/Y1GA51OB91uV9R5s9kMsVhMqGoSSe12Wyhp\n",
+       "ylR1/Tw6mWUP6+/5fC6NJckQi8UiiEWn04HNZpOTiPix1WqFw+EQ/yB9jzRRXNX1dK0Ws6ZpmE6n\n",
+       "aLfbl2hk0rar1Qper1dqaE3TcHZ2JsJzr9crOmIC/Wze6NhgM3PRga0oimh8TSaT1MCDwUBkmPy7\n",
+       "3IFtNpuIepLJJObzOabTqezOFotFdnNN0zAej+XUASC7OmtOLloK/slwkvVrNBpCPfN1ES9nOTAe\n",
+       "j2VRDodDaVrNZjMURRFsmLs9kYtOp4P1eo1+vy/mB/4+f5dMIfXgiUQCNpsNtVpNTAlkWF/3ulZo\n",
+       "Rr1el91tMBig2+1K7enxeBCNRtHtdkXEQ8yVWloSILdu3ZJaDoAIheiqJipBAU6n08Hm5qbsyGyw\n",
+       "eAOQRSS6QDobgCwGs9ksNTjNqIvFQrQhPAE0TUO1WkU2m4XdbkelUhG4zO/3X8J6b9y4IYL/RCIh\n",
+       "2giTySS753w+RyaTkViEcrkszpjPfe5zggbFYjHpLUKhkLhVgPO6nvpoOtxpzYpEIvD5fJjNZlJi\n",
+       "kZ0kscVa/Etf+pJQ3K9zXaudmdrcTqcDt9uNbDYrWgPWoRQWKYqCDz74AMlkUnYpTdOkIYvH4/jq\n",
+       "V78q7JzdbsdisYDP5xPBDUVNbH646EwmE6LRqLwmAPI7VPI5nU4EAgE4HA7ZTe12O2KxmEhAicg4\n",
+       "nU4pc4hFc/EHg0E4HA4kEgncv39fMGZN05DJZOD1euFyuUTUFAqF0G634ff7YTKZ4PF44PV65X0F\n",
+       "AgFp7Ig1s8lkbZ9IJIRip1fQ7XYjkUgglUrBbDYjkUhgd3dXSCHGMPCz9Xg84lRxuVxwu91y6rzu\n",
+       "da12ZtZoLpdLsNHZbCY6iKOjI/HXhUIh/PEf/zHW6zWq1So8Hg+Oj49FaE58l7tZpVKB1+uV0Bab\n",
+       "zYZisYhWqyVlAMX7lEo2Gg3cu3cPz549g9PpvKTlpZ53tVrh+PgYiUQCxWIR4XBYMGHi41arFdPp\n",
+       "VBRt/X4fs9kMiURC7GC1Wk0sXGwu9/f3BcqLRqMoFAoCI/I1dDodtFotKU2sVis6nQ5u376N4+Nj\n",
+       "IW5arZb0EnyP/X5fmrxGowG73S4Sz3a7Lbgy3zOpbxJWjx49gqIo+OSTT7C9vX2lXRm4Zjtzu91G\n",
+       "rVbDYrHAfD5Hv98XQqLT6WBra0vw0fl8ji9+8Yt49OgRIpEIXr58KU0cpZcX3dW0OdGhMRgMEA6H\n",
+       "EQqF0Gq1xJ1BnJW7UavVgtPpFC3zfD4Xva/b7UY4HMbW1pZIJJllQQlpLBZDsVjEdDoVhIP2LcYK\n",
+       "UHJZq9UwnU4RjUZFl00cnbEDg8EAo9EIjUZDJJ5kEllWud1uKQlcLhd6vR50XZfAmuVyiUKhIKcP\n",
+       "yyOq69gf1Ot1iXS4GMjD+p4nTyQSkRPuKte1Wsy03qTTafkivF4vFosFgsEgAIhmVlVVPHv2DOl0\n",
+       "GhaLBRsbG6KpCAQC8Pv92N/fRyKRgKIoyGazcpwTXaDTeWdnR7x6wWBQHM4+nw/L5RI3b94UBRoA\n",
+       "WYilUklwXOpAdnd3EQqFLkUm0FOXzWbhdrslwosGXYryl8ulwI3L5VKc4IQSWTdHIhHBsi9mh2ia\n",
+       "Jhg4dd/UUbPuZbCL1+uVG8Ln88FqtYrLmyekz+cTcT4p+Gg0is3NTSmVWNZomobd3d0rff/XajGz\n",
+       "ZqVgxuFw4OTkRPBTpgQRnXA4HBIYU61WoSgKVquVeO6azSYGg4FkU1itVvGymUwmie1iidFsNtHr\n",
+       "9dBut9HpdESo/9FHHwnNPBgMxPHN0BdCdoPBAJ1OB7quiyi/Wq2i1WqJvrjX6wnaQTgRAE5PT+V1\n",
+       "7e/vYzabSRQWSwGSLLQvUWjEXZg0NJnQfD6PwWAgbGi9Xpe8DfoOWdpczOVot9soFApSVhACrVQq\n",
+       "osY7OjrCfD6XaLLxeIxKpXKl7/9aJRp985vfFI1ur9cThosZEbTs12o1JJNJgag0TRNLFb9ILiiH\n",
+       "wyGCI3rpgXcGAAAgAElEQVT8JpOJEBOkcG02m7BkrG8pKGK9DEB+3u12EQ6HRePAPAzu+Iz+oqKP\n",
+       "aAbdNIS8qMjTdV1uUGo82MgVCgVsbW1hPB4jGAxKk0yh0avPDy6XS3QZ4XAYw+FQtCcMhmk2m4jF\n",
+       "YqLvZsadw+GQml5VVezv78Pv98Nms8FqtULXdXHAOJ1O1Ot1JJNJiQaj7ew3f/M33yYaAYCu69JY\n",
+       "nZycSDLl3t4eLBYLjo+PEYlEUK/XxdVRqVRw7949SdqZzWY4PT3FvXv3cHh4iLt37+L58+e4ceOG\n",
+       "7JT0FLLZ4S5DnDsUCmE0GgkzV6/X4ff7JamzWq1KXsfFpspms2E6nUpjyXqTATOVSgXRaBTBYBD1\n",
+       "el1yKVhTU2HHBKaXL1/CbDbj6dOnl2IB6ERZLBaCP5P27nQ6gpfTCeP1evGjH/1IwhuZqKqqKj7z\n",
+       "mc/g8PAQAETu6nK5hNUbDAaX3Oq8eb1eLwaDAcrlsqA3xL1f97pWO/PXvvY1JBIJ1Go1BAIBbG9v\n",
+       "4+DgAPF4HNVqVaSh/JIePHiAP/qjP8KdO3dgsViQzWYl7GU2m+HevXv45JNPMBgMJLtiMBgIwwYA\n",
+       "iURCcjlqtZrseKlUCt1uV8oAitsXiwUCgYDoGEwmE6rVqsgj4/E4crmcMJfNZlN2cmZpsJkju0gC\n",
+       "5f3338fDhw/RarWQTCYRDAbRarUEWQCAZDKJH/zgBwiFQlBVVVRrhmFI7U1vZDweR6fTQaFQgMlk\n",
+       "ElSFFrNYLCaUORlKn88HXdeF9ez1egLrsYcg6hMIBEQF6HA40Gg08Ad/8Advd2bgx9FcpFSdTieq\n",
+       "1aqI0UulksBS6/UaP/rRj6DrunzZh4eHUlc7nU5873vfE7tQrVaTxxmPx7KAWq0WWq0Wtre3MRgM\n",
+       "MJvNBB4DzmWpvV5PvsCL7ufBYCBIQr1eRyQSkXqXiaTdblfez0WTLBV79XodLpcLnU5HYDDWyuv1\n",
+       "Gt1uF+VyWUoS0vPtdhs2m008fvP5HF6vV3LvaMotl8tCqe/u7iKfz6NYLEoQjcPhwPPnz8UCRSnq\n",
+       "ixcv8MEHH6Db7Qpt3mg0xDNZLBZx69YtqdEZA3yV61ot5mg0KouDH2AmkxGigOk5JCBu376NFy9e\n",
+       "IJvNSo1NyxDF4kyyVFVVogXS6TRqtRqsVqugDvxiWfuFQiHx11HySScKPXB0fvDE+MnaXFEU7O7u\n",
+       "4uTkRBADZiiTfibyQnJntVoJ3svXzNDvi3Af47EODw8lDIYQnsfjgaZp6Ha7SCQSAktyx2bN/eUv\n",
+       "f1nqcU3T5DGcTifef/99kafyJGK/QQOC3W6XBClmlFzlulZoRq/XEyuPx+MRmpbZxQxL6Xa7smPQ\n",
+       "t+Z2u+Vo3tnZgd/vF3aOZYTD4YDVakWv1xP2i6whAxNXq5UIdoipmkwm1Ot1YQ3JVHIRs6EMBAK4\n",
+       "deuWiIbozmaJwKkA3W5XUBXutIZh4Ctf+YqUIQ6HQwwHjF7Y2NgQ9i4ajUozCkCaRWos+v0+QqEQ\n",
+       "lsulnG6NRgOBQAC3b99GLBaTm5PiLjpZmF1y0aDK9CLqmQnZ8e8DkI3oda9rtTPv7u7KEdtsNiWN\n",
+       "nYJ1TdPQbDYxHo+xtbUlwd1M62Rj+MMf/hDpdBqPHj3CrVu3UK/Xsbe3h1KphF6vh93dXXQ6HRmZ\n",
+       "cHBwIOUD0ZOHDx+K1arX62E0GkniPj1z29vbIuJxOBwSaUvPIp0cz58/lxuLzN/BwQHu3LkDj8eD\n",
+       "SqWC09NTiR2j8IdwHQmc09NTsXDVajXs7OwIDBkIBBAIBCQrOZ1O46OPPoLf74eiKOIkyeVyAjHy\n",
+       "FKDemzkYmqah0WggEolITV+r1QR/7vV6YqAlW8v6+SrXtWoAf+3Xfg3pdBqNRgN+vx/b29t48eIF\n",
+       "EomE0K0AUCgU4Ha78d577+EP//APcefOHQlOrNfrYiS9d+8eHj9+LLFTbrdbRErUKaTTaZGKNptN\n",
+       "0T0kEgkRqNOISn0ETbLckS4iCqlUCqenpzJnhF495iYDEPVdMBiUHW+xWOBLX/oSvve976HVaiGV\n",
+       "SiEQCIhiLpVKwWQyIRaL4fvf//6lHA2PxyOjMqjE83g8EhJO7TQRkQ8++ABPnjyRxKbZbCajIxhK\n",
+       "Qzy60+kIoUOokhJTWrPYADabTfz+7//+azeA12oxf/Ob34TP55NMDCbIUzfM2rdarYoplVRwqVSS\n",
+       "fOblconJZILJZCLOFMMwEAwGJUaLBAsA2V2JqbIJ4mtheUEnB7v8YDAohA5F/rQoMeiFODMNscR2\n",
+       "AUiyEL1/dDiPx2OEQqFLxAtLBqZ5djodSTBi2hFfC0uS4XAosCIXPKcEMH42GAyi0+lIWpSmaQiF\n",
+       "QhKnQHktYx6o+S6Xy9jY2MDBwYE4TMxmM37rt37rLZoBQOhbpuik02l0Oh3s7Ozg2bNnElZCXLdY\n",
+       "LOLo6Ai/9Eu/BJPJhNPTU0SjURwdHYm4BvixdWk0Gol/jSJ1NkpskE5OTiTettvtSuYF2T5FUUQZ\n",
+       "Rziu1+vB5XJhNBpJ6UNV2mAwEMlpq9WC1+uFruswDAM3btxAtVqV+n08HouGeDgcolKpIBgMolar\n",
+       "iQ6brKbT6bykIe71erh9+7bk2fV6PRweHkLTNJn9srW1hf39fWQyGdFhc7YLwxYnkwnK5bLk2zF6\n",
+       "l2wlk5zS6TSOj49xdHSE4XCIeDyOR48eXen7v1YNIO3+Pp8PmUwGsVhMPkgygyQuWDoAuJSwmc/n\n",
+       "ZRKTxWLB/fv3MZlMZAZIPB6Hz+cT0oSLjbVrKBRCJpMR4TqllTS38r8pxp9Op+LrYwIQM+yY00aR\n",
+       "EcuJcDgsoieiFmQwqXOgwo8sJwVUgUDg0pEfCASg67o0bWQwAUhjmk6nYbPZpA8hXV8ul6UGZonC\n",
+       "7+FiBC/ZSiYlMfqB6AwjDiiXfd3rWu3M/X5f4lsDgQC8Xq+Ij0ajEe7du4f1eo1cLof1eo1f/MVf\n",
+       "RKFQEBf3r/7qr6LdbmN3dxfr9Rq//Mu/jMPDQ/lS6FBhc7Zer7G7u4vZbAaXy4Xlcim75t27d8Vg\n",
+       "u7m5iW63KwIeQncU+XQ6HVitVqRSKSQSCZnwxMgCegqZ2EnHNJPuuSi+8pWv4Lvf/a7Y/D//+c+j\n",
+       "1Wqh0WiIN5DmWjpI+v0+7t69KygMhfmLxQJf+cpXxHsYi8Ukfvezn/0sjo+P5UZIp9NIJpOy+FVV\n",
+       "FSc4R67REU9ZAbXcvLF4mj18+PC1v/9rVTP/xm/8hgSOx+NxoYopWD84OJAEIy58kgkM2bZYLJjP\n",
+       "57hx4wa+//3v486dO4JyOBwO5PN5ZDIZ2VnD4TAODg7gdDrR6XSQSqVkxz47O5OJTGz2iAAUi0Xs\n",
+       "7u6KCIgCKKrO6DKv1Wrw+XzybzKBdH9wgZVKJYxGI8nNY0QtpaFUEFosFmkgycjRysRSxDAMiQpg\n",
+       "2hEx9NPTUwmX5MSAer0uDhQmI1HjQQyeJ+T29rYwgnSGc7zGw4cP8Sd/8idva2YAl/LTTk5OkMlk\n",
+       "RKzPzDhivGSiSqWSgP5Ucem6jhcvXogQh40krVMU1U+nU9TrdSEp2L3Te0h7EbFqKueI5xKpuJia\n",
+       "xEaMfyeRSIimghYqivy5ABlzexFRCAaDkqF8kWbmdKxgMChZdsxGJvLCcqPZbEpaKHCuU3a73RKT\n",
+       "wPfD8qbdbiObzcLr9YqakDpoBk1e9ExSJ0Ip6duogQtXr9cTL1y9XhdXcaPRkGaOKjIOaCwWixgO\n",
+       "h+j3+9B1HZ1OB6VSCcvlEq1WC5PJRMbzAucTSSuVClarlUR9DQYDFItFVKtVybB48eIF/H6/sGwU\n",
+       "ELXbbWENaZjN5XLyxQPn4xcePXqEZrOJ/f19iZZ9+fKlHNvdbhf5fB69Xk8gQebPMXCl2WzKv5ld\n",
+       "x5mGpPHZHH700UeXXiNwLiut1WoYDodCz1erVZycnOCjjz6SiQP7+/uXUkEHgwH++q//Whg9Mn9U\n",
+       "EzKXg+E0tJz98Ic/vNL3f612ZsZdUYTDwBWKYIh5sllSFEUEN5FIBBaLBdVqVVKMOBaiUqkglUpJ\n",
+       "LAEhtXg8jlQqhfl8jtPTUxHqM2eNKT8Mn6FYh0gFa06Xy4VAICDHNIXzbKoikQh6vZ4YcGOxmEgn\n",
+       "iSUfHx/j/fffx2AwQK1WQyQSkcfj6eB0OhEKhVAuly9N3qL1iuaE0WgkzpfVaoVMJoNGoyGoTTab\n",
+       "lXR+ZnhwABBw3gDev39f6PF4PI5arYZsNivZcqzh+dy6rl85Of9a7cyse7vdLjY2NjAajcSSw2gA\n",
+       "fpBMHNrd3RWtcLFYhMPhwAcffCBw2XA4xP379y8lwnOK6Ww2k6ziVCoFTdOQz+dl+A4F8bFYTHZ6\n",
+       "1qBUpLG+Zj7c2dkZlsulBJlfLDvu3buHVquFWq0mU6+Ojo4AnLtXLqYIcehQLBaTG5PBjjw9qF+h\n",
+       "0L/f78Nmswkzubu7K3G+DGrkfEKOq+Bzc+oVoxaIXjAzj3U+iZ/pdIqzszO0Wi10Oh1EIhFx4rzu\n",
+       "da0WM0MEedRdPMq501AcztqVc7OPj49FYD8cDkWrTHz34OBAJJccJkl9Ba1YFDKx1tY0TSBARVEk\n",
+       "uJs5dcFgEFarFbu7u9JIUaNMPx5vSmK2xMoZysIoMTq3V6uVMIz0JRIn56nF/Ds2aD6fD9lsVnKs\n",
+       "yUgSleCQn9VqJZ7CVqslOXIul0sE+2Q4OVtQVVWZ90fzLMu/3d1dsaIBEJ3I617XajFHo1HxyjFr\n",
+       "jSIb4LzepcTzopOYXT/F7aPRCMFg8JJL5aIwhynwwWBQ0ISLY8moT6AskjNHmLQJnBM8vNlo108k\n",
+       "Epfm41HOyh2amDTjAbhDslyg04UiqYupRZx2xRuUN5TFYkG9XpeIrvV6LVOpSLCwDOBnVa/XJdeZ\n",
+       "aBDFWhy1xrFwlAZwhBwAubEp2KLV7arjhq/VYqbHjaMJSGK43W5EIhHJkgiHwzCZTPjGN74h9enF\n",
+       "3ZM7zu3bt6GqKjY2NpDP50VzkEwmpYRgCihJCS4aTdOkkWJYDONgZ7MZ0um0mDkJZ5G9JEphMpmE\n",
+       "OSNxQmPp/fv3xZ2xXq9x48YNRKNRCcC5mOi0ubkpRAwpd2Ljq9VK4nsJ9/GGDQaDcjLYbLZLUtn1\n",
+       "eo1sNivGBZZZvGlJhtAl73K5kEqlJCeaTCgD2JmncZXrWi1mBqMAkJ2XGC79f4xvtVgs+N3f/V2B\n",
+       "kJj03m635QN++vQpPB4PCoWCDNCJx+Oi8wUg5ljuRjy2qVVg88UZH9xtdV1Ht9uV3Zy1K28KwzAQ\n",
+       "CoXEo8jFdnh4iNlshkqlIrssiZdCoSCxsfQAMqSRr+0iEUNJZqPREP0IP5/1ei11fyQSwXq9Rj6f\n",
+       "lxiGxWKBs7Mz0bgUCgUMh0OZgtvpdFCr1SRdaTAY4PDwUDLw1uu1jMAgucTv7nWva7WYCa15vV6Z\n",
+       "trpardDv99Hr9UThRYeyrut48uSJNCrValUWf7PZRLPZxGw2kwR6jnoYDAbo9XpYLpeo1+sAzskF\n",
+       "SkSn06mI+pfLpUw25fxpjmTg8E2KcihCYrYFQ2lqtZrMHWQUVj6fF3qe8BthOr/fj1wuh7OzMwlS\n",
+       "BCAWrVAohHw+j+l0KtoKDjSi2Ii1M8Nu2CMQmuTi43QpPn6tVsNyuZSkfAY8kjKnhIAacc7q5glz\n",
+       "letaQXPdblfS4Cm8Zy1IZRe9emyCqEzjF7VcLsV8ydkhTNFknUn5JuEx4tLMkqOAnmqzP/uzP8PN\n",
+       "mzelsWLcLlP4J5OJ1M2cbUIEoFQqSbJ8Pp/H7u6uRBWUy2WZoMoTwul04vj4WGxZxKQNw5BTh8J9\n",
+       "yl1nsxnG4zEajQba7bYYU3O5HOLxuDB4RGAYS0AGks0x7VakwGnv4vAhOsGp075oAm40Gm8H9Fy8\n",
+       "7Ha7hB/evHlTglISiYTMKSFExQ79wYMHmE6nuHPnDlRVRTwex+c//3nRC6uqir29Pezt7QGAfDHE\n",
+       "bGlz4iTXyWSC0WiEW7duodVqST1rtVpl8DyDwWmMJYvGkoei/kAgAIvFIlG9X/jCF7BYLGCz2ZBM\n",
+       "JhGNRuUGZRA5bf0mk0kEV3zf8Xhc3Dcsjbxer9TWZA8pZHrnnXdgGIY0nDS87u3t4b333pMmc2Nj\n",
+       "A5qmSTOqqqrAmbxBORErlUqJu50eRTa2b0NgLlysvxKJBE5OTmTHYbCKy+WSBU+56Icffgiv1yu5\n",
+       "cqenpzg7O4PX65XdsVwuo1arCRFis9nQaDQAQPDqSqUi6fbBYBCFQkG0FFy49NNxNPB8PkcoFBJI\n",
+       "ajQaYWdnR2r/fr8vtDRwznD6/X5ks1lBFKivGI/H6Ha70HUd9+7dk3hceh8pSqJovt1ui/aZ5leq\n",
+       "DBl8Q6wdOO8NOCelXC6jWCzKjcjPhG4ap9OJs7Mz0XirqiqnHxvG6XSKTCYjoiuGU17lulaLmePO\n",
+       "Dg4OsLe3JzYdMmfPnz+XcJf1ei3/jyHYtFwxmJBA/0UJI3FVAGK9Z/RUv9+XnYbjwjweDw4ODkRj\n",
+       "bbVacXZ2Jk1krVZDPB4XAdDx8bFkTlD0RAd1rVZDrVYT10w+nxcUxul0yjFPkQ+H3qxWKxwdHaFc\n",
+       "LsuUVe7qjNsi9HcxXIZzDKlb4XzCyWQiZYnFYpGsDeZSc1GrqoqXL1/KaVKtVqWUWa1WaDQaKBQK\n",
+       "6Pf7Qt1f5bpWNTMpW5vNhrOzM3H9khb+hV/4BYlYJcZLS5PH40EqlQIAHB8fCxXLY5Y1bjQaFa0D\n",
+       "m0IaTZkFx+aGOuV3331XBkl2u11sb2/j8PAQuVwOd+7cwcuXL5HNZmXeB61KzOoggcFRxXTChMNh\n",
+       "afhY5wYCAaG7Cf01m03cunULjUYDDocDW1tbElAzmUxw8+ZNsWVRQ71YLJDJZGTMMo2zJFrcbjf8\n",
+       "fr8I6ymBJc7MyNxMJiM3GrFlirRYc1NPAwB/9Vd/9drf/7VazPyCAAjiQG0EXdDUKjCSi116r9eD\n",
+       "z+dDvV4XbJQNJZVkw+EQw+FQxjSwSSQZ0m63ZSYg0/UXiwUqlYpkdZByp0KPdXQ+n5dxCkRKKDxi\n",
+       "gmir1RIyp9VqyQChVqslC4mjH6jKG41GKBaL2NzchMlkEhKl0+lcYuqoFmw0GjLtlWo2lhsUDjmd\n",
+       "TgkPJyZP0wPnCc5mM9RqNcGsq9Wq3FgARB8D/Dg1dDweX+n7v1ZlhtfrhWEYQooAELaOXTU/dMJJ\n",
+       "jL/iImcsFus6Cvu5g9AadHHsA1N66ARhtgSz6OiqoG6CyAfrRJpSiX+T4CEdTe8c/YgApAShPprJ\n",
+       "QbwBer2ePD4TUCnx5ONejOHt9/uXyin2Fsy5YF0MnDe5xNEpKeX4NFL7JF2YcsoShcmjfr9fSCc2\n",
+       "o2/p7AtXPp+HYRhyxIbDYclEowCJaAfhNuDH0tH5fI5kMimIw/b2tjRQ1WpVspNp8eGRzClPZPIo\n",
+       "pGfKZrVaRb1eFwz64OBA0BRS5P1+H3a7Haenp1Izq6oqxANd1s+fP8fBwYEsZir5eJOs12vs7+9L\n",
+       "xCxxY07d0jQNtVpNfH4sWS6eIEQtyPA1m03k83kEg0H0+31sbGxIVDB9hKTMSbxwI6CICoCI+Dud\n",
+       "jkSSES8fDoc4Pj6+0vd/rcqMbDYr0FupVMJisZAoAArKWZtNp1Msl0upNznfw2KxoFwuY3t7G51O\n",
+       "RwgPzgcMh8Oye3NqKScmud1u+X0KkkwmEx48eIBcLifYbzabxYsXLwBAdtubN28CgCzKg4MDaJqG\n",
+       "O3fuoNlsSt4xg84phmKaEWFETdPwxS9+EScnJ3LDWiwWiT5YLpdIpVKXBksyTejGjRuo1WryfiqV\n",
+       "CrLZLBqNhhA0LpdLCCHe9BsbG5LrQadMLpeTGz6RSFyaheLz+YTc4qajKApu376N73znO6/9/V+r\n",
+       "xUzT6Gg0ws2bN2W4JDFYhh4Sp2Xjx1RNqti+/OUvXzK1MvqKRyqd2Ryyzi+R5lLGupJkGQwGUrcz\n",
+       "RsDtdiOVSkmjBUAyLKxWK+7fvy8qQE5TpaieTpLVaoVUKiUjlEmH53I50SdPJhMZLq+qKmKxmCQW\n",
+       "UU3H8oE0Nhc4zar0U7KeZ+NnGAa8Xu8llpGqxI2NDQlUZNnH98cJASR8SPaQCn/d61otZgBCXfPD\n",
+       "yufzuHHjBp49ewafzyeeO2ZA5PN53L17FwBwdHQkKZxMQcpkMhiNRkilUmg2mzJplWMOMpmMDLBk\n",
+       "TobX6xUfXjQaxenpqZAZlFFWq1WxQtXrdWEuB4OBjJKgXgM437HL5bLkJNfrdRH3cN4IAJlv4vF4\n",
+       "RILZ7/elxqcvkjoNUtMUP7GM0jQNP/jBD3Dz5k2h6UOhEEqlkuiiOV6OJxFnZTscDpRKJdGxkLpn\n",
+       "w8dm9y/+4i+QSqWENudp9brXtVrMgUBARPjEkblTkDHL5/PQNE0cIz6fTySWsVgMzWYTiURCnNjR\n",
+       "aFQICtLTi8UCkUhE/IYWiwVbW1vi8ubgRu7ioVBIbiSOLPP7/UKnMwaBijcOn+eQyovyUWK2Ozs7\n",
+       "siBjsRhevnwpJ8Lt27elAWM5Qe9iLBZDLpdDOp0WMoXZdgyUofz0/v37MJlMQjYtl0sJPGezerG8\n",
+       "YcgM2VE2pZTjEl+m129zcxM2m02mxfKEet3rWjWAF8MJg8Gg1GSr1QrxeFx2QNbJDBnkbBIuPIac\n",
+       "0BVBOxFNmOPxWBIvKbfkUKCLORdMMGKICwdk0p1MfBqANKZer1coeEJfxKuj0SgcDofMGnG73dLA\n",
+       "+f1+gREZpEisG8ClkW7vvfeeCPndbre4wTVNExSDNwazLjRNw8bGBnw+H9555x3E43FprAlRMp/D\n",
+       "brcLle9yubC9vS2oEV//bDZDNpuVuYOBQEAkAa97XavFzN0DOC832u227GzEoLloGa/V6XRk5t1s\n",
+       "NsNsNkMul5Nalzg05/Qx8YcKL5IEq9VKfk7pJ5NDya5R08Fxv/TDkV2j+P1iMpGiKKJEs9lsooku\n",
+       "lUrC2AEQGahhGPL3uTPSB0mamTsrcI7NE1lgiUMTA3PnWPJwnBtDXwhB8vMDICZfMpcOh0PKHAbM\n",
+       "DIdDzGYzOX14IrBUet3rWpUZDodDZjAXCgUEAgGpTUmdcmcFIEQE4SjivdVqFTdu3JB8t9FohFAo\n",
+       "JNQrmbbj42PJVpvP53jx4oWMLeOIhWw2i48//hg3b97EaDSSDp8RBxxaTxMudRgWiwW5XE6cJnQ1\n",
+       "Ewmg/467f7ValezmRqMBn88nQ4esViseP34s4qOLcQesV5kNQudJOBzGixcvcOvWLamz1+s1KpUK\n",
+       "7ty5AwAy4o2qOo4/I4O5s7ODDz/8UNAMEiiDwQA+nw8nJycolUrisKGc9nWva7UzD4dDxGIxgaIo\n",
+       "0Kf+wul0iheQx6/H4xHBeSwWE3WXw+GQbAqyeszgIImQyWQkItbhcGBjYwN2ux2JREIym8fjMdLp\n",
+       "tAyOZCNmt9vFrGqz2SRgnHnRrNdJPCiKIuTH9va2zMter9cSCAlAAiOpAGQCPnsIlgekqvk50Huo\n",
+       "KMqlYHMK+4mY+Hw+wYqZthoIBLBYLC7NDySUmclkLvUlpOhJbxN14ed8letaLWZqhRl8clFj7HA4\n",
+       "EA6HcffuXdnpvv71r+PFixfSXPEL59/d3t6WCazPnz+XLGLOiWbKPFVnHLfQ7/dlpAIDwxlGbjKZ\n",
+       "EAgEkEgkhJpm+cPRYiyLdF0XmSjpcvrx3n33XcnQcLlciEQi2NnZkRqa7B6jCbi4xuMxVFUVCnw0\n",
+       "GmFzc1OylcnIseS66CKnu4YL+otf/KIYIO7evQvDMGSHppyVtLzD4UAsFhMaezAYIJlMCmxK7+RV\n",
+       "rmu3mDksnblubMbIVp2cnAiM9Du/8zvSsTscDmn0qG57+vSpTIn67Gc/i/V6Da/XK8J0UuZU1tG5\n",
+       "zLwJDuBhPgZdK4qiIJ/PS6O2Wq1E6EQNM4MMiaIwXouumMePH0sJslgsUC6XxWHOHDdGDnBUMmN2\n",
+       "6dEjRX94eCi0PN04F6WbDIqs1WoSx2u32/Hw4UOEw2EJnAQgWpXxeCz5cV6vF/P5HNVqVXZ9pqrS\n",
+       "dMu5Kle5rtVipsKMNSE7eLJq/X5ffsaGZ7VaSQI+pZsAhOigOu3p06eCKFD1xWGYTqdThEntdhvD\n",
+       "4RAAZNwC8V2GwLAhm81mWCwWwpaxGaVnzjAMcWdQ4wD8WFA1n89xcnIionqO/3358qUM4Wk0GjCb\n",
+       "zQiHw7BardIQE5HhTTedTmVoD08aNop8LjZ1HBwfi8Uk3ovzCAHIeGbe5LRWsV6v1WoSKcxcDeqk\n",
+       "r3Jdq8V8MZ3T6/XKFKaLugVCciaTSUYHZ7NZ6eAJ7VFZxgRQ1qs0krL+pD7jYs4Gm0WWOBe1yYw6\n",
+       "YEnD2SuEyWaz2aVwFr/fL4IiLmafzyejzliLspxhOj5PJeBcGnt8fHxJa8zyhK+dsb4U2XMsBgNi\n",
+       "FosFksmk4MK0hlE2qqqqSAYURREFIeE6mlXJ9nHYJwDs7OzITn6V61otZkJoJpMJ3/3ud8X/RmF7\n",
+       "r9fD2dkZcrkcDMPAxx9/LJFcFOZTCMTjtlQqoVqtinlTURQR2p+enkodPRqN0Gw2pVkiccPxxWxu\n",
+       "CHfVajVxKr98+VLeQygUEsr78PBQhj5SvEP9NTMwLBYLFosFjo6OJAjxyZMnUtPzPft8PokzsNvt\n",
+       "Yv7t9XrY399HvV5HLpeTkcI0InCQka7rODg4EPaQPj6iHMys43tyOp1y+i0WCxQKBei6jlarhXw+\n",
+       "LyeRYRg4OjpCs9mUPLvXva5VpO03vvENAOeoxmKxQCqVwmg0QiAQkKHrmqbJh2i1WtHv9xGJROTI\n",
+       "j8ViqFQqCAQCqNVqSCQSaLfbCIVCcLlcODk5kbhYHsG6rl9iz+je0HVdEoiAH4fQsAHlbl+r1aAo\n",
+       "CrxeLwqFAnZ3d6EoClqtlkTvskzhjMLxeIzbt2+jUqkgmUyiWCxKvkWv1xOpJYfqkATh+7fZbPB6\n",
+       "vYKIUIP86rNEIBDAeDyWnZgpTqxrecoBEKkrB3ayvGDqkdvtliE89EISdalWqxJa0+v1rjTT5Frt\n",
+       "zMWIwIUAACAASURBVPzwmB3MLx+AfGDMauNR+vDhQ/T7fYTDYezs7GA6nUrGM0dGUNbInZb1KGWb\n",
+       "TqcT0WhUnq9cLssOyFnWTBulmIfDNRmUyAZsY2MDg8EAlUoF3W5XalFOUiXURl1Fs9lEqVRCo9FA\n",
+       "JpMR5u0nU/CZMLS1tSV2L6IZFBCREOFjhEIhSUelk5uqP5p5+dlQn00Eh8pCjm5jCcXanL0MCSwO\n",
+       "PbrKda0WM+flLZdLsSExSYhlgt/vRzKZlKC+nZ0d+XtMsOe8kHg8LtJLkh1utxuZTEbqTSIjpVJJ\n",
+       "glWYt8bSgNroiwlErG25Q1HYTgUelWqcnzKbzVCv1wWTdblcCIVCoi3e29sTXyEDYJgdnUwmJeiG\n",
+       "C8nv9yMej0NRFESjUamhfT4fotGowHfUlXC8BCdUcb4KNSCksdmfMPOagemhUAiBQAAAZEIAMWcA\n",
+       "gthc5bpWDCDLi9FohCdPniCVSiH3ag51pVKRBUlqmV8KVW8cTs6gFHb6xKnr9TpWqxVOT09hMplk\n",
+       "5APrbe7ezNIg2cGGidOr6LrodDqIxWKCgrAWpdSUuzjw41xohh/+ZC5boVAQtIV5eazN2+02dnZ2\n",
+       "UC6Xpf5l0OJyucSzZ89EZcfdlKpC4LwMInxIlKfT6eDo6Ag+nw+NRkPCKjkOIp/P4zOf+Yy42uv1\n",
+       "upyGVPNRW16pVMTCdpXrWu3MTqcT8XgcgUBA7niyetytaGciAXDjxg2MRiPcv38fiqLA7XZjc3NT\n",
+       "sGoGJJJ4YNYzmUZKNPln1tAckh4IBODz+SS3gySJw+GQNNJqtQqv1yu6aVqskskkFosFEokEotEo\n",
+       "7t69KzQ2d0/CgNRHc+fmc1MvQgMqJ7/y7xK9GY/HsFgsYp5lWTKZTATF4HvkwCOyp7RjMcaLMxXp\n",
+       "qzQMA4qiIJVK4ebNmwiHw6LRps0rFApdOdHoWjWAX//612V+HTHe4XAoi6zT6UiUQCQSkWiqvb09\n",
+       "1Go1mXGiqqrY+y8SFVxsbOyIWTN+gKybqqoCyVFcREvRYDCQnTeTyUiSEgNROGbhohEAgGRyAJCd\n",
+       "0zAMaQY5NJMQGLXDtExxRATHyZGJYyQYE/07nY5kWVA3wosZ1tvb26L7oD2MiUoM2Dk8PJSGk0QL\n",
+       "4Tev14tWq4V4PI6PP/4Y8XhcXN/f+ta33s40Ac4bQCIFAKRBymQyUmb4/X7k83mZiVcqlcTQWiqV\n",
+       "EA6H8cknn+DGjRsoFotSa2cyGSiKgv39faTTaQDnx+/t27dxenoq5AZhOWZZ+Hw+CQgkKbO1tYVW\n",
+       "qyWlAC1QnU5HRDt0ceTzefj9fvR6PXGJ0LKfyWTk98vlMhwOhwiRaFz1+/1CdDBat1gsyk5JzyLt\n",
+       "WERyVFXFyckJdnZ2pNZutVoCUzJsZz6fS7AkMX6WKCx5Op2OSGbZfHP8BU/K4XCIv/3bv73S93+t\n",
+       "yox33nlHpJN0h4zHY/j9foGsCoUCer0ebDYb0uk0bt26JfFRzH4gW8WOu9Vqwel0Qtd1Cd02m83i\n",
+       "FOEX7HA4sLe3h0wmI/44nnyRSETkqUyMZ0onSRiOEKMYirU6GTaeDBw5RnsTUzQzmYzQ0ER06Ptj\n",
+       "1pvZbJbH4/Pruo7T01OEQiE0Gg0RVHHXZ7QBm1SKmkqlkliuqtUqhsOhmCMo2OJ74aINh8Oiw7bZ\n",
+       "bIKQ8Ma+ynWtFvNoNILdbsft27eh67qwcMPhEOPxGNvb24hGo8hms/L7T58+leOWOmEmcZIgoAOE\n",
+       "WDM1vIvFQgaqr1Yr9Ho9Yc04IZZ5yPP5HLFYDGazGVtbW0ilUqhWq3A4HJLyQ2EP6WxqJGazmexy\n",
+       "zMpgXVutVkWmms/n5fgn0cL4r1AohHQ6DV3XZUfmPwyQ6XQ6yGazItzXdR3j8VgGVM7nc0SjUTQa\n",
+       "Dek7mPO8sbEhs17G47GMcrtYj5Oh5fPSNEGChWaI172uVZlBRVs+n0ckEkEwGJRwQIvFglarJeId\n",
+       "BsL4/X4oiiINXzqdFpaNkBaF+0RKmBxKZzUdHABET3FRNRYIBGT4OaOumOcxm83EgkVVWzQalbgv\n",
+       "wnM0oNIdTSE/cE5X7+3twePxiNE0Go3KDcJkUgASGH4xuszhcAhuTNNsPB7H9vY2FEUR7yFPvAcP\n",
+       "HiCfzws+bLPZRDDFBZpOpyVEh2OMiV+rqioumKOjI5mJ8tZpcuHiLLrBYIBEIiGEAcNRqBpjorvd\n",
+       "bsfe3p4sFsZ70d3N0b4cEQGcL9ZEIiGa4VQqJdpn6ik2Nzehqira7baEG3JSLMkFm80m7g9KUFVV\n",
+       "lUDGdDqNjY0NjMdjaJom6UO1Wk2GaVJcRLZuPB5L4A2tVByswxKHo9CIaVOboaoqRqMRNE0T/Jnl\n",
+       "xI0bNyTqS1VViXFwOBzi2GE+CWtzNqScB1Ov10WZyJtmPp8jm83C5XIhHA7LHO/Xva7Vzsxhj5qm\n",
+       "SaPW6/XEDa2qKnK5nEBrxDt7vR7u3bsnCjRFUfDkyRMoioJisSiDdZj1fHp6eilln/kUvV5P/j99\n",
+       "dX6/H/v7+/D5fMjlcgLV8Yter9c4ODiQm4KZ0C9fvpR6neo1UsqcPciprjabTY7y1Wol+dOcsnV2\n",
+       "doYHDx5IrV8qleByuUTUVCgUYDKZkE6nRS7rcrlQLpehqqroNfx+PwqFguiQL0Z8UYlHRSFRDWpX\n",
+       "FEURBKZcLosYi/R4PB6/smruWkFz3/rWt8QFTbiL+RX0nGmahmaziXA4LEA9gX6OSqP4vN1uIxqN\n",
+       "otvtioictTHtV/T/EaMmKzgejy/Nhma9SHiK0BTDvinKobOExA71HhwqxKaJ8B0AoYh5IwMQRo+D\n",
+       "KtPptLB1hmHI9FWOgCAuzc/m/2fvTWIjTdMzsScYjGDs+75zX5JVWdndVdXVDbUgoaUWIMD2QbB9\n",
+       "MDAwfPPBgiEImjnqMjB8kQUddDIaAx0MDGBpZB1aLVVD6pbVKk11rZlkMrnFvm+MIIOMYGw+sJ6n\n",
+       "gpZkA0nMtETUDzQ6i5kMBuP//u973+d9Fopt+/2+DBOtVqvkaGyKWdOz5LJYLHC5XHjx4gVWVlYU\n",
+       "mtTtdtFsNnVCVatVxGIxHBwciLvt8/nw27/9219xMwAo8anb7SoMnuy3RTvWwWCg3D1292z25vM5\n",
+       "jEajUA2WKhSeLvoQkzBDtl2/30e32xW7jg9Et9tFvV4Xx5pQFxtDAKrHFw1VCGfRQ48sOJqJ03OZ\n",
+       "OyJPB4bVk5dC+ii1jFSvUE3CYB3ymLvdrnBu/u6cYM7nc8TjcSlISB0tFAryZl5aWpJXHh1Tyf/m\n",
+       "58TPjIkGvH8PuR5VmcH8PRqJs2FhhgdZYAyeoXjVYrFgPp+jUqnAbrfj+PhYjSJwN13joq/X6+L/\n",
+       "slHkhOvq6koEnXK5rHQnBkESCru5uZHzktVqVWaJ2+1Gs9kUt5lWYTRx5OIhlEfiExvMRqOBJ0+e\n",
+       "4OTkRLs/3z+xbFJj2fhS7sXFvyhwYIlGSI+BQp988sk9X7nFPER6WxPbbrVaeP/99+FyuTQQms/n\n",
+       "sFqtqFQqMmpcWlr6ajEvXtT8+Xw+PH/+XJozjk37/b4ytdfW1mRgyDhhGhXm83nFJdAEnMd6KBRC\n",
+       "uVyW2XYoFFLOx3Q6lT6QtNJUKiWnHrPZrOO01WrJ5Pvk5EQDCL/fL7NCngCMKO73+9jY2MDl5SXy\n",
+       "+TxWV1e1E/d6PU3m6KBEeufFxQW++c1vqoyhRjKXyyko02AwCJEYDAYapWezWbkmVSoVGI1G+ekF\n",
+       "AgGMRiMpdmiQw5OBGDRtvlKpFLLZrJxO+/0+vF6v3J9yudyD7v+jKjOq1ap25EQiIaql2WyW0plj\n",
+       "3eFwiFgsJk0eFRtLS0vY3d2VDu/y8hLj8VhwV6FQ0FjZaDQqKm19fV0DjpubG43D6VZP7JY3mgwx\n",
+       "xjqQgEPOBkWp1CNeX1/j3XfflQMpANXtVG3kcjk9SFdXV0ilUmK9MQAoEAig1Wqh1WpJSsWfSTst\n",
+       "p9OJTqej7JHz83OsrKyIXcif53A4pG+Mx+MajcdiMXQ6HfHAiYTQ0YibBi3PMpkMEokEvv71rz/o\n",
+       "/j+qnZlukjQxJG7sdDrRbDYRCASwtLQkM+2vf/3r+L3f+z1873vfQ7/f140ol8uS0FNizykaSUP5\n",
+       "fB7xeByJROIeMZ5DlWAwKDUGH6ZKpQIA4gHT0jYYDMpmgLU7c1BWV1fRbDZFZN/b25NaejabSVlN\n",
+       "3jWZd6SbMkKZSQDME+dYvFqtIhKJSKS6uroqeiktgClrItxJJCIajaJSqWhgwkGIwWDAzs6OamHW\n",
+       "/SzFrFarBjqJRAIGgwEul+vBCa2PajFzxEvcM5FI4ODgQAhHtVpFIpGQWvmv//qvcXFxoQV0e3ur\n",
+       "3WM+n+OnP/0p1tfXtZsAdwuxUCiIRHR+fq5FzmleKpVS7shkMpFsi7vxaDQSsYcppcxFYYxEsVgU\n",
+       "ZmuxWPRQnZ2dod/va6o3GAyEP7Nc4I5Nf2oy05hJmM/nRS6irGtlZUU0VJKhWq0WvF6v8hHfeOMN\n",
+       "jEYjpNNpnJ6e4ujoCH6/X0Y6FxcX0lMeHx9rgbMBzGazWF1dRbVaVT/A/gbAgx2NHlWZ4Xa7EQgE\n",
+       "7imfGQlGEj1r0EQigZubGzx79kxNT7fbRTweV5fPiRv9nUnhpIcdWWNckGTXkWhEwxia0jDPLxQK\n",
+       "yRaBDkQczpAjQggNuPPQoxuR0+nE+vq6kBgS6jOZjDzq7HY77Ha7FjEpoNQ0kqq5vLyMZDKpBpY+\n",
+       "HlarVTAdhaypVEqec8+fP4fdbleNHQ6HMRwOJd/qdDry0qPRIjNlaJDDptTj8Sj+Ymtr60H3/1Et\n",
+       "5larJaiMHyitZ6m8praOuwCJ4TQ+zGaz99AQBlAyzIfjYZLyLy8vUSqVhBww+4OjZO5ANzc3aDQa\n",
+       "Sjbl2NhoNKq2pvi2XC7j/Pz8nkh2Mpng6OgIo9EI5XJZkn2qoOlHxwg1ohHX19figFBGRYiNjSQN\n",
+       "ZgaDgWLTFk84+tqRA12r1XQ63dzcKFGKjfJ0OkW321XaADeTfr8v1hwbZnqL3N7e4qc//emD7v+j\n",
+       "KjOoPWMEwng8VvY0rVdpMUA+MY1ZKFUymUzI5/OiQTKhlAMCaveY/+f1etUI0nSQuyfVF5ubm+Jl\n",
+       "cNTLJCzu2g6HQ1Fku7u7SKfTODs7Uz27tLSEJ0+eyDO63+8jGo1KEU03ImZ4MzWAUjCeFJRD8YRg\n",
+       "zHEkEpG7Pl2NAoGAfEKAuxOCJwdfkyHwlHuxTn/y5Il6Ao76Kczl570IGXo8Huzv7+PHP/7xa9//\n",
+       "R7Uzc9pGT2AAWFtbg8fjkT6PcBERD5YfVBVbLBY8ffpU3nHtdhvxeFxTMgo7eYTTrd7hcEhEy12d\n",
+       "/A36qBEXpoKbRCP6UrCpy2azaDabKl+i0SgcDodISdPpFHt7e+Is076WCpJWqyW5FnV4tOWiiyjp\n",
+       "q6PRSPKli4sLNceTyQTRaFTWvNPpFKurq8LAl5aWZDlG/2W/3w+XywWTySTlDADxOOh5R19nTkBD\n",
+       "oZAcjh5yParFTMcdyp3MZjNqtZoMSrrdriTxjUYDn3/+uaZ9i9a35D189NFHikVg3Tyfz/UAMAeQ\n",
+       "WHa73ZYBOTkf3InYWLHM4f/oRkRVNHdS2oItLS3p4by4uJAq/Pj4WOWRy+XS1I6EKe5+zEAslUqa\n",
+       "kLInWFSqMHyHsCLLBv6OzDShjIyjfzbOzWYTJycnmprm83n5i9Btlb8bdZS8Z/V6HdfX11+5gC5e\n",
+       "HMlS2UAVBg2z6dc8n8+xvr6ORqOBjz/+WDesXq/fI/CQSUbrVyYnEbdttVqCxFiDz+dznJ2dKU11\n",
+       "eXlZN5mqEu5YbC65yMm9uL29lTEKPZhLpZJsb0ejEarVKgDcy22h1xxr0o8++gjZbFZlwnw+R61W\n",
+       "k2av0+losPLixYt7UivgrqzodrsSLZDeenJycm+BM6+EZQNJ+q1WS/ZjRGpqtRqazabIXzS0oRPr\n",
+       "Q65HtZiZn0FF9Xw+1wIlr4KKB8qQ1tbWMBgMxNug1eyibJ6dPxdRoVDQkUj4jP5qREuoEuGxCkD2\n",
+       "WmSmkQtxe3t7L/GVpwPN0AEoQ7vb7crl/vLyEs1mU80nQ4goHiDxiTtot9vVJJFHPrPF6RNCxTkb\n",
+       "aUKXbK5Zo3Msv7KyIq4I7c4MBoM+Dy5QnobcTMhbuby8xNramsI4H3I9qgbQ7/dLjUweMXVxhI48\n",
+       "Ho9y8DweD3Z2dhRvRhGq1+tFMBhEOBzG6empMkuYyrq8vCz4y+12C9q7uLhANBrF5eUlMpmM7KYy\n",
+       "X8QQEwlwOp3ySaZ6gw9SMpmU+/ze3h4ODw8Ri8XQarWwv7+vBNlAIACn04l6vS4FNAlKNpsNLpcL\n",
+       "kUgEVqsVn332GWKx2L1m1eFwCFKjeJX1Kwc+a2tr4o6k02nZ/NJWN51Oi4uytraGXq+H1dVVQYGt\n",
+       "VguxWAyJRAKtVgubm5sK2mw0GvjlX/5l/OAHP8B0OsXOzg6cTid+9rOfvfb9f1QU0N/8zd/UsXh8\n",
+       "fCzSzfb2tthlhJii0aiErbSNslgs6Ha7ODk5wRtvvCH3916vB6fTKUUF4xs4GeQRzOOVmdCLlrcM\n",
+       "vATumtLDw0OF5JDIw9oagOBE4tRkwIXDYRgMBlSrVQXIE4fmQ8UkVKYGXF1dYX19XTUwgzY3Nzfx\n",
+       "6tUrAHcPAO26mHf42Wef4c033xTDr1AoyMe52WwikUjA4/Gg2+1qOEKrhOPjYyEs9KxmrszKyopE\n",
+       "wjSlcTgcKBaL+NM//dOv1NnA3Q3xer2o1+vaBb7xjW8gFouJMONwOMT9/Y3f+A380R/9ETKZjPSD\n",
+       "dAl1OBz4pV/6JTQaDaTTaVl/XV1dIZPJoFqtSnXSbrdhMBg0uiZhiOUKUQh6ylmtVuzv74tFR7UG\n",
+       "c0TsdrvKH7fbjUqlIppmJBJBv9/Ht7/9bZVPe3t7aDQaKj04zGFgD2mYVHzf3NzIsZ7QGkk/HH0z\n",
+       "u48PyHQ6xZMnT5DNZhEKhTAej7G/v696mFNPcrf39/cxmUzkgBoOh+9lKrrdbsnFVlZWpCd8yPWo\n",
+       "FjO5zIwF3t3dxYsXLwDcBcdUq1WkUikFNf7kJz/BYDBAuVyWf1s8Hhd984MPPkA8Hsfx8TE2NzdF\n",
+       "MG80GlJG0K6KTpfJZBLxeBxnZ2dihxGB4Eh7Mpng4OBARueVSgWxWEw1LP2Xy+WyXIdYw5+cnGB5\n",
+       "eRn5fF5EqVKphH6/j62tLdW1FxcXop7Sl5puRByGMJCSRzuFCLTK+uSTT7C+vq6fFw6H0Wq19Bof\n",
+       "fPCB8kqIQbPefv78Ofb29hQiz82CfBDW+7RmuLq6wqeffvqg+/+oGkBygknYYe3IuAT6FlMpXC6X\n",
+       "JRh9++234fP50O/3FZHAhi7zRQb1fD6X2xH1fLQIsNlseOedd7CysqLIXr/fj3Q6LX0crQSMRiMy\n",
+       "mYxsb2liyAxv5gASv6bmkCN2NqSEE5lexXzCWq2GeDwuFUy/31cAJiVP8XhcwfTsNdLptAYpAFTy\n",
+       "8DObTCYaxbNnYELVovnhZDLB22+/rTSAxRwZpnft7+/DZrPJWNLr9crf+XWvR7WYF6GinZ0dkdnJ\n",
+       "XlsUktLIm01To9EQPTMYDMLlcmFjYwNerxflclnSIGZkL+r+GEhDp3qGXzLugTtWKBQCAJUBxF23\n",
+       "t7dxc3Mjh3kA2NzchMlkkscdx9qMKeNr0PJg0Td6e3tbam9yJ7gzDgYDCW/JPQG+FAMzr5ClCNU6\n",
+       "VKDQyZQIkN/vFxUAgEze2+22ZGNUyVCixaRX4C5dIBqNKhb5IdejWszU9Q2HQ3S7XQCQEw9H0Pl8\n",
+       "Hs1m8x5mTPiJ/hOExg4PD3UzFj3VOLXjpI96PUr4J5OJyoxFI0TyIsh+I4Zcr9dF9KGmrtFoiFvB\n",
+       "QQ+taPv9vqBE4tSE9MijJixpNBoVIsQ0WYpKyQOhAIGnGndhui4tfpZseBkeNB6Ppf0zmUxy/a/V\n",
+       "alrAnMz6/X4hOuRxUK1Dn5KHXI+qZp7NZojFYlhaWhIzK5lMIplMCsyPxWJSiAwGAxQKBZkaUmtH\n",
+       "VyIqV2hGSF8KAHLuIRY9mUyws7Mjgj8HEIT5iFfzwfB6vYjFYsK+DQYDUqmUSEgXFxfY2NgAADV9\n",
+       "5CEDwJtvvimVCL9G+RLH6E+fPhVRiKlR0WhUZQgDNjnVo3qbO/Y3vvENmEwmlUp+v19ezIt2B4QJ\n",
+       "R6MRIpGIpF38nWw2GzqdjhTewWBQJRAN14PBIFZWVvCTn/zkte//o1rMHDRwFEuzlV6vJ+yWH2Sr\n",
+       "1YLT6VSOc7ValekKhy6cBtIqi1atXPz0U6vVarJ2pUcG3Y2CwaBU3STgU19ITsbFxYVqzmKxiL29\n",
+       "PUSjUTSbTXQ6HY2cS6WSsPCjoyMkEgkR9fk7U33On0FyUT6fh9vtRi6Xg9lsvmeHxYkhR/acmLKW\n",
+       "pyNppVJRnc/MbWoAyRFn6USzce7ALENoTMnXpUSLtIKHXI+qzOAO4fP5YLFYxFnmxUWczWYlvmSZ\n",
+       "QMNvp9OJ4+NjSYAsFosolNw1CS/x6GZQZbvdlvFKuVxGJpPReBmA6k5OIGmE4nA4sLa2hkAggJ2d\n",
+       "HblvcrLXbDblnETrWtapXDA2mw1ra2twu933vOn4bzY3N3F1dSVCUqVSuccZ4fCkWCzKe4/1Nf07\n",
+       "GHVBshaNGInBEyun2ICDGrfbjdlsJhnXeDyWEIBTS+BfIDnfYDAkDQbDXxkMhgODwfDCYDD8T198\n",
+       "3WcwGP7SYDAcGwyGvzAYDJ6F7/k3BoPhxGAwHBkMhl/9p16bdSMXCWMaOE6mm9HGxgbMZjPi8Tia\n",
+       "zaZU1CsrKygUCjIJNxqN0sExooDUULLhms0mQqGQpnckLXE34nHKhFhi2H6/H4VCQcw1RjkQ8SiX\n",
+       "y4qYoPKZBJ/JZCLiPyeH3A0HgwEymYyaPyqo6WfBppK+cBxDE6HIZDKSRxEPJkGJ7kqLKawcXZM1\n",
+       "SPX7YlyF1WrV+yX2TIkaHzqLxSIPwNe9fh5lxhjA/zyfzz81GAwOAB8ZDIa/BPDfA/jL+Xz+vxoM\n",
+       "ht8B8K8B/GuDwbAH4L8BsAcgDuB9g8GwNZ/P/4Ez9e3tLYLBIFqtlhZbt9tFIpG4F25JP+VOpyNb\n",
+       "K4L3Ho8Hp6enagRJdKcZIgBZurKB5Gv0+31cXV0hkUiorrTb7SgUCoLBhsOhFl0kEhEPmpO08/Nz\n",
+       "iU/J3+DAhikAsVhM7krMB6R1gslkQqlUkqSKooJSqYR2u60UAS484M6yy2w2o9FooFwuYzweCyMm\n",
+       "GkS3fe7MfF+xWAzT6RS5XE7OT2woAQiqm06nsugisnJ6eipP6PF4rGDM173+s+/M8/m8Np/PP/3i\n",
+       "z1cAXuJukf4XAP7dF//s3wH4r774838J4P+Yz+fj+XyeA3AK4J1/7LUX+QXlclnNVq/XU11IMSVN\n",
+       "FXmM0juj2WwC+JIMdHNzI8YdvSqYFEXkhMoU7py3t7caFpDHSx+5+XwOp9Op98t/Wy6XNf2bTqf6\n",
+       "b6YxcYGQj8EdkdRPADqBWNfyAer1enIkqtVqKqn4sNLei/YAJFXd3NyItGSxWGS2SBah2+0W25Bw\n",
+       "IdESfjbAlwY67F/m87mwa46/6dH8kOvn2gAaDIYMgGcA/h5AeD6fk9BaBxD+4s8xAIsu1CXcLf5/\n",
+       "cLGLn8/n2NzcxHA4RCqVklKZKVJer1e7IXcLhpmT9UaOMF8zEAjA4XDg7OxMcB71hhyEkKlntVrl\n",
+       "HXd5eYlvfvObikQAoJExSe7hcFjDA7PZjNXVVSEHPp9PO1skEkEwGES329W4mp4diyVBMBgULk5H\n",
+       "f1rzUoKVy+WQSCRUgvBUYh08m82wtbUlvjWpqUQdaEVLC2GauZB4tLGxAbvdLqNELnRuBGazGcFg\n",
+       "EO12W7503/rWt/BXf/VXr72efm4N4Bclxv8J4Dfn8/nl4t/N70ZP/18MqH/079jscSchl8LhcMjQ\n",
+       "hOR9h8MhMxhmBrbbbfj9foTDYXXwhJNYlgSDQeHJ5DuTgE+iPnP3GNLTbrflSkSXJZqfE9NmZBux\n",
+       "6FqtphKCBB5yRAiPAVDOISeDoVAIhUJBJQRppDy1eMq4XC7t9AaDQYGgjICgfnE4HEoqRqEAHZWG\n",
+       "wyFms5lI/Uzgury8VMDRzc0NKpWKsGbGdHD3rtVqmnbSiuF1r5/LYjYYDCbcLeQ/ms/n/+GLL9cN\n",
+       "BkPki7+PAmh88fUygOTCtye++No/uD7++GP8+Mc/xt///d/j/PxcNSvrvFQqpVByGmm/fPlSdSZz\n",
+       "TjhsKJfLkuRTjcGdvVarYT6f66FhA+V0OpHJZNDtdpFMJrVIybdgI0hZFXcvLm6n06lcwevra+22\n",
+       "0+kU+Xwefr9frzebzTRtI/OOMq/FqGGiO1TceL1e6RrJX2b5wBRV2oqxZuaOTliNfBEOZejLwclh\n",
+       "s9mUH14ymRRzLp/PC4NnQ3h4eIj3338fZ2dnD1pXPw80wwDgfwdwOJ/P/7eFv/q/APyrL/78rwD8\n",
+       "h4Wv/7cGg8FsMBhWAWwC+I//2Gt/97vfxXe/+1289957WF9fh9ls1iJi88JO2mazyRibPAfu1vRi\n",
+       "406zmHe9tbWlv6f1AO2uAGiqxbJhEYaLRqMAIJsANmyEqzj2Zh0diUQ0YGF+YK1WE8JC+wE6CwEQ\n",
+       "8uL1esVUYxwFvfZ4wiya5ZAlxwUKQDAe/ZoXp30MBeLCpIKdOYRut1tYP/+O6AYHKhcXF0in03jj\n",
+       "jTfwne98B0+fPn3Q2vp51MzfBvDfAfjcYDB88sXX/g2A/wXAvzcYDP8DgByA/xoA5vP5ocFg+PcA\n",
+       "DgFMAPyP83+ChE2CD8lCzL6LxWI4Pj7WzVp06WT07qIxCy24SL1kWVEqlaRqZijPs2fP5BTE8oaW\n",
+       "VUajEcFgUDYATLciU44LL5vNIhaLoVgsyqX/9vYWR0dH2rEZJ7G0tAS73Y6joyNsbW3BaDSi3++j\n",
+       "0WgIFiOB//LyUlyLdrstLnGv1xNJ6fb2Fp1OR/TXUqmkB+Gzzz7D6uoqyuUyms2mpojkbpBkJGAf\n",
+       "twAAIABJREFUXy6X0Wq1RA01Go3I5XLY2dnRiJoUVZ6CPp9PbMF+v4+VlRV89NFHD1pY/9kX83w+\n",
+       "/7/xT58I3/0nvuffAvi3/3+vzc6aRnzdblflBN2GLBYL/H6/HOpZNnzxc9DpdODxeKTTMxgMSlIl\n",
+       "2M9BwKKUyOFwCJpjfVqtVvHkyRMpM2j8wikep2wulwu5XE71Mz3xms2mLGd5qjCmjNNDmpFT3Gq3\n",
+       "2xWISSOWZrMppTb9NJjbMh6P4XQ6sb29jfPzc/UX19fXSpCiqQ6RGVokMJNwOp2K9ERONi1sGTFM\n",
+       "l9HFnZxCXkJ9RGNe93pUSpMvMuRgt9txcnIiHJdcB8JmDKW5uLhALpfDG2+8IViMOrV4PI6joyO8\n",
+       "8cYb8mVjx06Xei5Gj8cjKI35KXTr4QImf6LVakkBs7y8jFQqhVKppHqUjLJ2u63kKdqF0dHIbDbj\n",
+       "9PRU3iDX19f3FBvMRqGNVy6XwzvvvHNPZZ7NZrGzs4OjoyNJwTKZjFKnaMtFBIh+dhaLBePxGKPR\n",
+       "CLFYTIgNfTQ4STw4OBAcaLPZcHZ2hlQqJToscxqbzabq7ouLC/z+7//+V2bjACT5pwMPdydip/1+\n",
+       "X9Mr8mrJfONCDYVCwllJPL+9vdUOmM/ncXFxcU+Kz0aRr8shS7FYxM3NjRYrORg87imGpZXA5eUl\n",
+       "Tk9PFVlMKI/KZUZEMMlpNpvptQAIxisUCvKKBiAfOT4UXJw8SQjd1Wo1ABDfYjqdypKApCsGeZJF\n",
+       "yH/Pet5qtUroenV1hYuLC+HHZN1xQ6G9AAn95G+87vWoFrPT6YTT6YTb7QYADS0WrQO4s3HETM4F\n",
+       "ORaTyQT9fl8LmiJQk8mE29tbPH36VFl2dDsKBALaOROJBCwWi2J6rVYrdnd3MZ1OJUsKhUK4ublB\n",
+       "sViUupkB7NTVUXVNSyxi4LSoJRLAnZtj7mAwiF/7tV9Tbgo1f4twGxtg4O4B4AApnU4jEAiIuE/H\n",
+       "VPKSKcliY8vmjs0o4xy4oMPhMJaWlhSLlk6n5S1HPz3SUxfzBV/3elSsOTLM+EFarVa8ePECb7/9\n",
+       "tuq509NT3N7eIhQKCRajYplw1+3trVwtg8HgPTfNSqWi0oUBkJTtLy0toVQqCYWgK+bh4aF+PgDk\n",
+       "cjkd6yyLzs7ONP3jjSUXmqPpVqslGI0m3zzaiV3P53P8+Z//uXR9jL148eLFvTSoRYst7qyLHnZv\n",
+       "vvkmCoUCCoWCnPTtdjtyuZyQELIQ6SGXyWTkMc1BD6eshOqonVxeXsbBwYEyWtrt9r88aO4/5cVd\n",
+       "mbvd5eWlrJ84cuYY1e12i1REz2LyoGnFCtx14fSlowplNptJ0c1deXl5GePxGLPZTCgIEQ42eTRs\n",
+       "XPTR6Ha7oqNykdJmy2Kx6Huur6+VOEvaJ49yOp1SGNtsNjXm9ng8aLVaWF1dVRA8ADWCTKdiL8Df\n",
+       "bdFnhEw4CgQikYiwZdolENlYWlrS5HUx74VOo6Tccgp4fX2NZrMpIfFDrke1mBknzKxqZpBw5yTX\n",
+       "FgDy+TysViuq1apyOrgQer2ejm3CdoVCAcPhEMfHx3C5XFJqAFB9SmUIxavdbleK7H6/r52Xk0Bq\n",
+       "5yKRiFQZtBXgzsdGtV6vo1AoKHCI3G1yT25ubnB+fi67A9bUDMM8PT2VmQtrfnIkWOIQ3+bwg2aI\n",
+       "JFkxqKfX66kWprspkRUOkF69egWTyYRGo6EThaofg8GghpXcED5YD7ke1WJ2OBziBOfzeeGu1AXS\n",
+       "9GQ0Ggle4k4VDofh8Xi0eMnl4BFLUSZdf0iSYRAjF+vy8jLS6TS8Xi/W1tbkl8yGyWKxiGnGq1qt\n",
+       "qn5krC8XOuvpRCIBv9+PQCCgkEp6dxDm293dBQDEYjFRTVnDklttNps1+OFAhYobUmG50LhTj8dj\n",
+       "xGIxTCaTe/0IbcrYQ7BkMxgMgjvj8biExDRD56kyHA7lnE/W4kOuR1UzM7z94uICm5ubiuH1+/2o\n",
+       "VCqYTqdIpVJoNpuw2+1477338Ad/8Af42te+pskWiTlWqxXvvPMOXr58qeQmkmbi8TgajQYSiYQU\n",
+       "GJwiUgnOCDHgDk1Ip9Pq3OfzOdLptP4tGyaqyvkQsflivQncuSP1+32Zs7hcLnFN2PRdX1+LTGUy\n",
+       "mfQg0laWkWxUquzv7wv3ZV633++XPS/9OYLBIM7OzmSzsLu7i7OzM7mL0v3f5XJpgDIajeRnQoUL\n",
+       "J4hMAbPZbAiFQjg6OnrQ/X9Ui/no6Eg7R7lcxurqKlqtFoxGo3BncpgHgwFOTk6Qy+VQLpfV/Fmt\n",
+       "VuTzeUQiERwdHaHZbKJer8s7jTed9TBrWDYy7Mi5OzmdTlSrVdWGtCe4vr6Wsz+TSVl78uexdiec\n",
+       "yAUN3E07vV6vXqPT6eDs7ExpVUQx+v0+2u226m9yt/v9vtQkLKHoukQHVRKhaJyeTqfFua5UKjrp\n",
+       "Go2GBL38jIvFItbX1++VVxzukILL5pnQX+6rtKkvL2aE0FOYQweaYpfLZfEFvF4v3n//fezv7+P2\n",
+       "9lYeauTlWq1WHB4eaqoWiURgNBqRSqXQarU0FTMYDHC73VJTkHfALp/iTvJ22RBdXl7q/3u9ntyF\n",
+       "5vM5otGodtbRaIRgMCgoiwoWckIoOuj3+0IZer2eFmG/39e42mKxqPSgb7PT6ZSZ+WAwuOdtPRwO\n",
+       "Bc8xQZZU03Q6rfE98CWSxJKMfBKbzSYHVd4jytGKxaI+AzIDH3I9qsVMLHaxM+aImLUa67PF3OdF\n",
+       "X2fipotWUoxNYBLpdDqVdQEtBBZTT7lAyAF2u91yx2RmCJELvk9GCy+OsTn0WST+cFReq9VU1y7K\n",
+       "mkjr5DSQfQObWQAKAaIFAut5ci46nY5EBPzdKO8iYYmRGre3tzAYDEry4i7carWUo0jvjX6/j2q1\n",
+       "Kq9sDrRcLpfEwA+5HtVi5k5jNBrlzENne+KdrHsZWcAPlz4Qi65DjEQYjUbadbl7D4dDRKNRrK6u\n",
+       "isLJGjcYDAIAXC6X3PLZ0NEAkQ+IyWTC6uqqyoVFqigHDMRrWTIBEDne5/NpQZCFRyd9vj4d+Umm\n",
+       "L5VKGqhwcETIEcC9FAEmEbDfMBgM8Pl8Ut6QGMXGjzs7SxZyqAkjUg5mNpsRCoXUN9BQ5yHXo6qZ\n",
+       "uZi4IGgcSEvaXC4nWT4XSTKZRDgcFkIxn88VDE/bK07alpeXxfaiTwSRDx6vS0tLKg04vaMqhLnY\n",
+       "XNDkKtCrgm5FVH4Ph0NhuhcXF+IQB4NB1ZnkYBPHpcqFtlxkxgF32dSJRAKZL+LNVlZW1Jh1u12k\n",
+       "02nVwmx6J5OJmkFyukejkax70+m0jCdZKrHBY0m0u7srYQSnhBQMn56eIpFIwOVyYX9/H++///5r\n",
+       "3/9HtZhJPTQajRpgEB1oNptCMcg5IPYM3CEhxKdbrZaQER6n3DkWw3rICiNaQNnT1dWVOvv19XVp\n",
+       "9shj5uJvt9vaDdlw3d7eagfjrk5sl2YqwWBQNrvAndHMq1ev4PV6hZ3zQbq8vMTV1ZVQg3K5jHK5\n",
+       "jGQyqabs1atXKj2m0ykODg6wv7+Pg4MDRb7lcjmpz6mPJFpRqVRkcMOypdFo3HNIpQSt3+/rpGs2\n",
+       "m2IQjkajB3kzA4+szCAHgEcaR7A0c6Fae7Hmo2cwFRgANDRgKI7b7dbomimwtJOiSpuDDE4OqQFc\n",
+       "bABJdg+Hw/eO9dPTUxGU6H/MI5tQFk8IIghsasmRJueCVmMUCzCn8OnTp1KbJBIJDUfYfBkMBnQ6\n",
+       "HR37w+EQ6+vrGu9HIhHRadnAcbHS29lgMIhzzQkpKZ/kX9DHg5g2zdWpmH/Q/X/Qd/8zu+r1um7S\n",
+       "eDxWZgenfixBaB3LnYSqCY6Db29v5dLT6XQ0bQMgGIvKDzLlaLXFUoKZ2awnCYsNBgMtbKIBDocD\n",
+       "Nzc393zyKIWiUeHNzQ0SiYRIRyylxuOxHtBOp6Nyh+Y2HHU3Go17sRDMxmYJwnCixei0brcreI/N\n",
+       "LQDh6dQCknXIXXY+n6NSqYgeQASp0+nIuuHq6upeP8LS7CHXo1rMq6urMuErFAqIx+PodrsSnp6c\n",
+       "nIjbTGJQuVxWXghDZkjcp2SoWq1KPcJdn6VFMpmEz+cTZk0G3vLyMtrtNpaXl5HL5RAIBDTZy+fz\n",
+       "+PTTT5XNTZ3g9vY2AEj1QeyXRo/n5+caQ7Np5dSNPnuE5hi9TMrqzc0NyuUyrq6u0Ov1NKjhKdPt\n",
+       "doWjMw44l8uJy1KpVGA0GlGpVOByudDr9VAsFjV65/dQGADcOTidn58rANTj8SCZTOrnEvPm+2UC\n",
+       "1etej2oxc7TKLp+7InkGZKPRZoAUS4582ZFPp1PdGIpJyW5j5C7DdBhFRhcfwmFUaJAHzCOYOYAu\n",
+       "lwsABJ1R8LmIQrBJ5ciaDwqzwC8uLuSBQV4Fa1e+X5ZKixEThCT50LEJpfKaDkcUtVJDSaSHqhmW\n",
+       "Nnx9qkz4fii4pS/JYpQcSyzu8g6H46uAnsWLDDgGQVqtVni9XuVDUzbFiIXt7W1ZaNEgcTqdYnd3\n",
+       "V8E+sVgMfr9fUz86hXLxcpchIkFaJRGBTqejE4N1Msn2HFkTuguFQuh0OkJh+CCwObTZbIjH42i3\n",
+       "21hfX1dYJ5GJ2WymMX4qlZL9F1UxVKYTRuR4+dmzZ/eYbRzFv/nmm8Lb+VBsbGyI+01UwufzSYNI\n",
+       "jgZPJQYBjUYjJJNJXF9f69Qh1s3f8Tvf+c6DRtqPamfmolg0BqSQlFlzk8kE1WoVKysrEn6SqM/6\n",
+       "stlsit98dXWFFy9eKEqBtSEneDabDYFAALPZTP4XhNeur6+RTCaRz+dVO3I8TfOWbreLUqkk/JhH\n",
+       "OC14Wc+zVGJoJadqtVoNw+EQZ2dnsFgs8j4mw67Vasmckbss86zp8FSr1dBqtdRLcLyezWalsnn5\n",
+       "8iUqlQqKxSLMZrNOJEKItFmgTpInEetwIjzz+VxlBZtdNqIP5TM/qp2ZsiiDwYBf/MVfxGg0QiKR\n",
+       "0FHMJCUqKRgQ4/F4YDAYUKlUFJHgcrlUUqTTaXX8s9kMfr9fWjpOzajmsFqtOi5p5RWNRqXHI0uN\n",
+       "zvoAsLGxoVEyd3oAGs1Tlu/3+3F5eSnTF7LOqFlcWlpSnDAALRxKtbjgWq0W4vG40BFCZ3t7e3j5\n",
+       "8qUQG5PJhPF4rFhgmkuyWSVD8Pb2VsoVDof40DNjnBkqLGkcDge63S663S52d3c1KPrRj3702vf/\n",
+       "Ue3MrBcHgwFevnyJyWSCfD6P0WikepTYKLm+bI6o1DCZTKjVauLXEmpjmORgMEC1WhWiYDabtdNx\n",
+       "vMtBhdPpFE+CyhMeszy2iRZwRM66nlg00RQGUXLH5+KgiTdtAhqNhnZrq9WKZDKpo547JbWMRFnI\n",
+       "ISE+zSB51sr8GSxLuBszEpkedQzbASBn/0gkcs+/j2Lfy8tLOBwORCIRTRNp4fu616NazDabTQuN\n",
+       "OzQ7fTYtRqNRxoKRSAQABK+xCbJaraovibtypyYXgTwL7kR09+TpQJtaypMACDYbDAZot9sIh8Py\n",
+       "sSDLjkcw1TKsX/l+2GQFg0HVrnQ74g7KB6RSqcjVkzAka1VafxHLrtVqaLfbSo4yGo3iT3BX5qCE\n",
+       "ggLCdW63W78r+RbkuBDy5AbCySVxbXKqCZE+5HpUi7larSLzRYrT4kKmTxp3NcafdbtdvHr1SkGY\n",
+       "9E7j8GE0GiGXy2EwGKBer0sQC9xZBtBFs9frIR6PazzN5i4SiWgMzN2JCEgwGMT5+bkWJjFyRk1k\n",
+       "s1mNmTkip/v+5eUlarUaKpWKVB5msxnFYhHNZlO7XjKZVK3Oh5hDDQAav3PHBCCjnMlkgu3tbcxm\n",
+       "M2kBOdanxQKbw3q9Dq/XKzIVJ6GUgBEGZQjSbDZDLpeT+p2G7g+9HtViDgQC6qhp1kc4ik0Vechr\n",
+       "a2s4Pz/H1772tXvsr8lkgs8//1xYKamSvEksWRa5GYFAAPV6HblcDul0WlAf4S+6edJTw+FwoFKp\n",
+       "aAE8f/4cgUBAzD4OYDju5u54fX0tOihra5KHbm5uhDPTW/rs7Exyf4fDoV2f5CGy4BahOYpLDQYD\n",
+       "crmc/KYZFrpYlrEUInYPQPU0m2meCuTD8AFKp9PI5/OiDwD/Ap3z/1Ne9FJm3QxADphsqjqdDvL5\n",
+       "vIy4Of6lbKfX62lnYQ1Zr9eVyX11dSXjQk7ygC9D3klIInuMN5blB2/YxcWF0I9FY3HW6eQqc2cn\n",
+       "T5rSp1qtdi8QnsR2q9Uq6iaRgm63K5ISHy6WL6PRSGNq+twxQo2+ILlcThEWy8vLqNVqwuCJSjBo\n",
+       "h9Af/7w4kaTqnbwWOjU5nU71Ig+5HtViJmC/qAJmDUlkgkaFBoMBmUxGpHpOo+jHTIk/m0Lu8GyK\n",
+       "6LHhdDrVHFEzR0yaU0KbzaadnMMWppX6/X6EQiFxkFkaud1ukZJo+E1UgtnfixhyPB4XnXPRrXMx\n",
+       "wJIPCG0S+HsyB8VqtQpa5GJfWVlBJBKBx+NR3AUxavIt+ACSB86SZXHQwywYNpGcUDIGmU6lD7ke\n",
+       "1WKmspkCVlIeOW1j00YvYxoaTqdTPH36VDtHMpnUrpnP57GxsaE6k8aE5C9w0rWysoJ4PI5Op4O/\n",
+       "/du/FTmJRB36UbB8oRSfEQtsCvP5vOREvLk+n08PBJOjiCgwB4WMuPF4LOSGxCXuzFyg9EOmap0O\n",
+       "RDR6pH6PYoWLiwuN+DnoIJHeZDLds9alSxENxPl14uTkr3Dgwt7joQsZeGSLmbASox4YLk43eKo0\n",
+       "qMW7vLzzOLfZbDg/P4fL5YLP5xNhfz6fIxgMyjKLRy89Meiwz5RShuPs7+9jNBopa4819NbWlhhm\n",
+       "9OPg0IayKE4lSdjhkUyhbiQSQSaTEb+CqaYUsTJl1mQySenCXXE4HGpX93g8mnKSwcYdk4udu2oo\n",
+       "FBJGzPqcDDtCkfTvo1BgUYzA7OzFv+dgyeVyweVyqcd5yPWoFjM9KiwWCzY2NhRxwBg1t9uNVCol\n",
+       "9tnm5qZG4IlEAsAdPvorv/Ir8Pv92NnZQbfblQs8oTzuRExSpWNmIpEQwkHuBx8k4A6r5m5MI3KX\n",
+       "y4VIJCIvC9bOJN7XajVEo1HM53O88847yOVyKJVKsv5iGP3a2ppKEWoBiWZQwUJLACI1LpdLll92\n",
+       "u10cZ54w/Mw4rWSDabPZFNlM1Uk6ndbn4nK51PyxtHO5XMLLGbUcj8fRarUQDoeRyWTw1ltvPej+\n",
+       "P6rFnE6nAUBUSx6pwN1Cr9fr0r2xI6cFVzabRbFYRCgUwocffohGo4FsNotEIiEuM0WiDI+nWTeP\n",
+       "4mKxqB2zXC4r42QxCIgK5efPn2t3Go1GCIfDuLm5EbRGP7pgMIijoyO0222USqV7tgX8d9fX1ygU\n",
+       "ChIN0LTl5ORETk7T6RSlUkljeJZHNGphmXN5eYlisQiv14uPP/5YueOkbXLo0m63pWp3Op04OjpS\n",
+       "6A/jhulD3ev1lAvOCaTH4xGpv9lsot1uI5vNPuj+PypL29/93d+V7dT19bWiFDKZDPL5PCaTCQKB\n",
+       "gBxA7Xa7FjN1buQ3OJ1ODUwcDgeq1aqaMrphcuJF/jN5yYTlWLMuktSJUDC+12KxoNls6vuvrq7g\n",
+       "crng8Xhwfn6uOpdoCSeHALC7u6sRPYMvfT4fisWiIorZqLI8WCTIc3hEezK+7tXVFUKhkAzEq9Wq\n",
+       "6ulF5IM7O8lG1WpVJjaMkgAg03OPx4NoNCrx6mQy0YLmfz/E0vZRcTMod7+9vdWxR8kSieLZbFZm\n",
+       "KMCdlxwzoEm8r1arcLlcOD09RTQahcPhkGUtE1YdDgdarRYcDoekWOQEA5CZSiwWQzabFSRHXzoA\n",
+       "GnszNoxDjX6/j1KpJDEo9YSLkWP9fh8vX76Ex+NRucOpIQW7FCYQ/uJAiIQs/v7Ly8vIZrMKJKLt\n",
+       "FxtYckPoo8e+hIlbJCKtrq7KQ4PZJtPpVBpGk8mEo6MjTQHZILJpbbfbD7r/j2oxc7TK5CKHw4FU\n",
+       "KiX/s5ubG0SjUUl/kskkPv30UxiNRglPWU7YbDZ84xvfQK/XkxyKI9xAIIBGo4FwOIxIJCJ8lLte\n",
+       "o9FAJpMRu45UTbpt0lOCdrQ+nw9Wq/Wec3w8HhfXl5Iqejqz/DAYDIIAidDw/dNf2mw2o9PpCKFg\n",
+       "o0dif6lU0o7LwREhtkwmoxqbdNVer4dUKoVsNov33nsP+XweJpMJOzs7Kh+oyqGo1Wg0IhqNij89\n",
+       "/yLIMxgM4vj4WPfpK6+5hYv2ADTpo6UtfedOTk7QarVweHgIAPjhD3+IUqkkUhGbuXK5jMlkgr/7\n",
+       "u7/DYDDAhx9+qAHIaDSSJRUX70cffYTb21s0m02VI8xVaTabcvHhBPLw8BA/+tGPZDrI3YxWs9Pp\n",
+       "FCcnJ6jVauJg012oXq9jOp0qUctgMKBYLKJSqUiaxJ03l8uhVqvp3zIznM0gd8disags61KphEKh\n",
+       "gMFggE6noxPi+fPnyGazyOVyqFaruL29xU9+8hMl4h4fH4tfMZvNcHp6KqaiyWTC+fk5Pv30U3FL\n",
+       "SMI6PT1FuVzG8fHxg4lGj6pm/q3f+i34/X5Uq1UlONVqNaTTaU2zBoMBKpWKSOY8iuPxOA4PDxXs\n",
+       "SAn/8vIyksmkGklK+GnISNir0+kgFouh2WxqBA7c2R8Ui0XBhoPBAPF4HIVCAdvb27i8vES5XIbJ\n",
+       "ZMLW1haazaZU4NPpFOHwXbbnYpYKoUVmllCAy1E5f+eXL1+K60FEAYC+32azyfOtVCphd3cX1WpV\n",
+       "aaxHR0dyZgKg9AE2tPxcV1ZW0Gg0sLm5iUKhoF6DJxjlWFar9Z4SZdHckZHNf/iHf/hVzQxAzDf6\n",
+       "lq2trSnmloMJCjHPz88RCoXwZ3/2Z/jVX73LliedczQaaVGZTCZ17AT2uUszDy+fz8uHjUMW1pPE\n",
+       "bkmyp46O2HYkElEA5tnZmWpdlkyFQkF1cbValYkiSyUiHL1eT6NkckKCwSAGgwGurq5E6uGEE4AG\n",
+       "L9zhT09PtasPh0M10KSA7uzs6OuDwQD9fh8OhwMnJydwOp149eqVyplXr17pdTgm5/Tx5uYG/X4f\n",
+       "T58+RS6X04T1q5p54VoMs+EukkqlFN3LuAOSyff29nB8fKwaFoD4DCTxl8tlDR/IxWWcmtlsVoNI\n",
+       "hlgikcB4PL5ndsKMPnpnhEIh8Z/JQEskEiI00V6MMb0Oh0PvmYzAaDQqewNmsTBygegIcEe+mkwm\n",
+       "8Hg8SnPl7ur1ehWvTHSHo3aiIA6HQykDrOlJed3b25PMazHLcDwe48033xS5iT+XsKjP58PGxoYG\n",
+       "O8T4H1ozP6rFDEDTs9lsBofDgdwX0b80iInH4+j3+5jNZvjZz36GbDaLjY0NMdDy+bwWzuHhoeTv\n",
+       "3F1MJpPCeqbTKc7Pz2E0GsW96PV6iEQimhAC0Gic3hKVSgWXl5dSf5MC6XQ61UCdnJwICiS+zZ/Z\n",
+       "6XSkR2T4TTablVSJC3fR9bTRaAiiI5easioqSBjOyQXOJrHdbovlN5lMkEql0Gg0cHx8LN0im91n\n",
+       "z55hPp/j5cuXiMViorVeXl6iWq0ilUqJOx0KhdDv9/Hxxx8jk8koIOh1r0e1mI1GI2KxmNhdixke\n",
+       "oVBIx7DX60U0GkWpVMLq6qqIRgys5Gu53W6B/JQj8Uin0npR7UFbLZKRKEeinIqOlxyVr66uiqcQ\n",
+       "iUQ0bjYYDNjd3RV5x2azKZ9veXkZq6urKicIjdFtiBNPwnckQFEpHo/Hpf+jqaPf70en00EkEkGt\n",
+       "VpMsi7tzMpkUhOZwOMRNoR9dOp3W4qRCJRaL4fr6Gi6XC6urq8hmswiHw3A6nQiFQqLUXl5eSgWz\n",
+       "vb2NH/zgB699/x8VmrG0tKQMkKOjI3Q6HRl8UzTKHYSRaAw05/96vR5evXqlWo+qZ5JyuIB4rNJU\n",
+       "hhNFst4YbEPftcXwn3g8DqPRiGKxCI/Hg3w+L4I8hxilUgn9fh+5XA7n5+eo1+tSbVgsFiEq5JdQ\n",
+       "rTEej9FoNDR1Ix86Go3CbDbj+PhYBCrgDs6s1+uaCpICS4HAbDYTmalQKOD29lbTRzLgms2mkgRI\n",
+       "bT04OBDllcT+eDwuTPzk5ASz2Qyrq6sAvgysf9D9f9jy+ed1UYt3fX2N1dVV1WCcolGhTbmTx+NR\n",
+       "Ph6VETTwBqDdhznZvJE0bgFwT1VNpKPRaGhX5i5I6RbJ73a7HTabTYmnXEC8oYuOQaRu0g+j2Wwi\n",
+       "EAiIUM9aepHuSrsrlky5XE7c4m63K4mVzWaTGyizXMj9ps0tveZWV1dRr9c13FmklfLzo0MqR/4c\n",
+       "cdPgnPwT9gGVSgVms1lj9Ydcj6rMoGKZww1yeVkLkgvB0S5wl/8BQIOLarWKUCgkByPulG63G+Fw\n",
+       "WIuFlgY8/umSD0BGMhx0UNHByRmhKA41aPfFGpsjdRLx2Rim02lJoviQ8Pgn14L6Pu6Uw+EQwWBQ\n",
+       "pQ2TV8nko0CBfGpaHTCHBAAGgwG2trZQKBSwuroqMevt7S1cLpdOFH6mJC3RNIecZWLwVMOT1ETF\n",
+       "SjKZfND9f1Q7Mx2KqtWqMu2IGhiNRh17nELRfRK44w80Gg3tmJ1OR2UJd18ORrjD0MlnOp1qoEHV\n",
+       "Ch3xeTGfkKUKCfFUx3DHZpQaU2VZOnBnB+5OoFwuh/l8LnOY2WwmxGBRpe7xeNDr9bR4WeeSOVcq\n",
+       "lSRoAKBShoJU5mMzP2Vxs/D5fIIvuVFwbtHtdlGpVKRdbLVaMkYcDu8yzk0mE8rlskbrvBevez2q\n",
+       "ndnn86mxuby8RCwWUxd+e3uL09NT0TgZ5P7ZZ58hlUrJ7op149ramkJziAxQ+Q18iVBQBV6r1dBs\n",
+       "NuH3+xGPx1GtVmEwGLCxsYFyuYx6vQ6LxSICEtl1rFX584PBoFhr3OnoqcG8QJqb1+t1IQX5fB6p\n",
+       "VOqeOWSz2USxWBTfArg7varVKsLhsCRdlUpF8WmMQ1tdXcXx8bHUJq9evUIymZSggQ9xLBbDYDBA\n",
+       "LpdDJpNBsViEzWZDqVTSUIhCYpY4fB8vX76UqY3L5cKLFy8edP8f1c5M61faW5FTPJlx/k4sAAAg\n",
+       "AElEQVRMZA1LRyNaxL777rvqwP1+P+x2O8rlsoSZRAVIKieLjgR07lwsH1ZXV7WjZTIZABCbjGUD\n",
+       "v4fQG4/i9fV1OJ1ORCIRWCwW9Ho91bGkj8bjceG9NJ6hkU0ymZQZJG3CFqd8FMtmMhmZsNMujG6i\n",
+       "bBgBaLdedCtiWP1in8FpH+0S3G43LBaLdmtO/KjmpnYxFAohlUqpnOEO/brXo9qZecROJhPs7++j\n",
+       "Xq9jf39fzkB2u10UxMlkgm9+85v44z/+Y2kBSREF7hZaMpnEaDTCW2+9JfdLq9WKUCgkMhLrXNa2\n",
+       "5+fnSKVSCIfD6Pf7sFgsiEQi9wwVKRZg7ondbke321XtyiY1nU5LxUHlChEWwnPM7CbLzWazYX19\n",
+       "HSsrK/D5fIq0oBiWvhf0ox6Px4ItOYnj0GZ3dxfxeBylUkmLlEMVDox6vR78fr8aYp6Oq6urGAwG\n",
+       "8Hq9uLq6gt/v18Mci8XUuN7e3iIWi8FisWB3dxeffvrp69//hy+hfz4Xa1kAohrSI+Pi4gK1Wg3n\n",
+       "5+ci2z9//lyj5m63i2KxiE6nI8X28fExjEYjyuWynHqcTqfIPBw9k9JIC9p6vS6XHgAapxM+A4Dz\n",
+       "83PRK2u1mnZ82hcwJYr/hg8qFzZJ/IPBQO+fTqCkldI3r91uo9frodFooFKpSIFDbjUht4uLC0Ft\n",
+       "V1dXOD8/v/d7sE4nS/CTTz7ReyP3hb1CPp8Xz3qxhON7orKGvPBarfaVCczixcbH5/NpFEu6ISdX\n",
+       "4XD4Hjc4FotpzErVBhGBxeAZo9EoV1GLxaJjn/9N+iQlQMwCoSZvUTlNLzuiJGx8zGYz0uk03G63\n",
+       "ShjyiGns6PP5VNvSkZTHNodCHJ5wlG6327G1tYV0Oo1IJIJwOIxisSjeCrOyV1dXlfcHQNPPdrut\n",
+       "E2tRyLu5uSmeRyAQuNeIcgEHAgH4fD54vV6dkORsUDLGrBj+jNe9HlWZQSVFpVJBtVrF+vq6uumr\n",
+       "qyuVF8SiaQ5Oji5z6er1OhKJhAy6yc2gJIhqEDZdHF1zYEFYrVAoIBaLyfuNmC/pkZwu0ouOo2cS\n",
+       "ibi7UmnSarWwt7eHQqEg4hEZeqVSCclkEgaDQaw40kfL5TJOT08xHo+l2eMDSGcnn8+HbDaL8XiM\n",
+       "k5MTvPPOOygUCkJV6MvB0HamyTJEk8iR2+3GxcWFNg265g8GAxwcHKgBTqfTODs7k41CMpnERx99\n",
+       "9KD7/6gooL/zO78jWI27FfOoc7kcRqORSEI2mw3Pnj3D97//fXzrW9+ShJ4RBk6nE0+ePMHZ2Zls\n",
+       "CxjKHovFZBBD6IuUTw5X2CCSief3+3W0cuelrIjTReBLc0EA8rCgkY3VasX6+rpIU2T4UY3O0oEB\n",
+       "l5PJBEajEe12W+lQXq9XZUkymcTJyQmi0ahG53x4KJuixzSHJ8fHx1hfX0elUsHbb7+NXC6H6XQK\n",
+       "v9+PVqslVh7jMGhKTlIRbQfo3lQqlRTddnh4iO9///uvTQF9VGUG67fr62usra0piqHX6wmQNxgM\n",
+       "ytOgyoFMLgpSDQYDQqGQambuqCwvSqWSeLjn5+cSyC7Gkd3c3AjDvbq6kvdwv98XdjwajcTjpWMm\n",
+       "LWIZZUFGHSmnvV4PpVIJtVpNUivi2s1mE+FwWDERHF0ToqOdLrH2bDYra1367HG8TSHu/1vtQryc\n",
+       "TMTl5WWsrKzg6OhIxCI2s4sj6qOjIxk38pTiQ7+ysoJisfhgq4FHVWbQPPD6+hoHBwdIJBKo1+vw\n",
+       "+/04OjpS5hwAGRNSElWr1TS1YxNERh2nh/V6Xd07A3y4QzLyl75u5GrwlCC1lNavrVZLLvfkRRMT\n",
+       "ZtRDq9VCqVRCuVzWwuDrcpLGZpQiVT4szWbzni8zldVEM/gadNfncc/TYjQa4fz8XNwJn88nvz6n\n",
+       "03nPo5kBPp9//jn8fr+UJpubmygWi4LnqJIhYsJm3OPxwO/3Cwp83etRLWbuwHSo5wh6PB6rWeFC\n",
+       "o7qCC5XCS/pjsEEZjUbK3vN6vUilUjg9PYXH40EgEJDmkHo3m80mToTRaEStVpP5CUe+5F2Qgced\n",
+       "nObo5C/QJNHhcKDdbuuUCIfDmtrRO45+db1eT+PrdDoNs9mMbDYrbPjdd9/F+fk5HA6H4ERqABnn\n",
+       "QBuFRVuv8Xh8b+TNMisej8sb2u126zNhMPzGxgZ2dnbw6tUrtFotRCIRWK1WRbiRsOTxeOB2ux90\n",
+       "/x9VmUFZDj/ITqcjfPj8/FzwWLfbVWhPNBoVrNbv9xEOh/Hy5UtcXV3J8w2AiOs8TjlqJtmHO3O1\n",
+       "WsVgMEC320WtVlOHTvhuPp8rAIg7IZ30XS6XIDc2gJ1OB5999tk9tTYXD4lALIGoyuZono0Xifjj\n",
+       "8RjPnz8Xjxm4I0iVy2VFU9TrdTXJlUoF8/kcVqtVA6RmsyknJvpnkGgEQJ95uVzWg/7RRx+hUqkg\n",
+       "FoshEAig3++rIY9Go/Lrq1QqD7r/j2oxkxzU7Xbl5BMMBmVjRa83RpZReUxPYpLtCbsR5spkMuIf\n",
+       "mM1muQjR7IU8Az4cnI4xLoKdv9PphMPh0E1mBh5trOjvRi0fvYxZBxM9yOfzikKmYTlH6wzTTKfT\n",
+       "etjMZrPqd/qDcOqXzWbh8/mk2I7FYkKFEomETGUYJEQdod/v1+fBh4OQIA0TSZGNRqMiJDUaDSV7\n",
+       "TadTYdc0jXzI9agWM+mcW1tb8hEmVDUYDPC1r31NZic8pjnOpa8zACEEnIrRe9hoNCIcDivAnJa2\n",
+       "GxsbsqXiA0RxgMfjgcVigdfrFXuPMqR2uy0XT9oDMMC+UCjIPZONFydzW1tbqn25q7KMqdfr2Nra\n",
+       "Eh2TnIy9vT2N6ykc4GsCd9PTvb09YeWj0UjRwqPR6F5uCYdKfB0AWFtbk5DVZDLB7XbL5ZQnCqmq\n",
+       "PJW2trawsbEBh8MBv98vL5PXvR5VzcysO0p+6G7EHZvydtZ/l5eXwnkJ29XrddWJ+Xz+3uia+rlF\n",
+       "IjvDZ2gMfnt7K484LqpKpaKalROvm5sbUSTp2dFut+WuRJ8PMvI42OCkjSbiLKs++eQTpFIpxTgQ\n",
+       "LaAZziKDcDabyUuDuz+V7FdXV1haWoLf70cul0MkEtHPqNVqwqfJByHMuJj9N51OUSgU9HC3Wi3M\n",
+       "53MUCgVxRsgt53ubzWZf+TMvXhaLBbPZDC9fvtQRbLFYEI1GEQgE8Omnn8p8hRpBSpr4tUQiIUIS\n",
+       "j+xIJKKkJ9bS3GGYh8LhBWmSNCSfzWZ6IADIgZ4DBuoHTSYTksmk9IV0z+eDR9NFeiuHw2G43W4k\n",
+       "Egk4HA5sbGzA5/Nhe3tbEz/W60tLSzg5OUGz2dTQIhgMqo5ftNFiHW6325HJZNRE+3w+rK2tKd6C\n",
+       "fA6ecicnJ4I8Wf/ToJyGi+RvMz/84uICn332mfzrHlpmPKqdOZ/Pa6RqtVpRr9cRjUa1Mzx58gRO\n",
+       "p1Mezkxj6vf78pXgjeURzV2YsboANP4m0jAcDhVGwx2LxJ9IJHLPPJz6QafTqe8ht4G7FIWtfr8f\n",
+       "2WwWwWAQpVIJ4XBYyozr62u43W5RXEnuZwPKARDJ9sCXQgTW+AaDQWNoojwcV/Mzodqb9l4sERbj\n",
+       "2qhhZIlBfz9OX3lSVKtVqcuXl5cRDocxm30Zk0w/vte9HtVi5oKhkTjN/FZXV1Eul2G1WtFsNgVH\n",
+       "sd70er149uwZXr58KdB/fX0dzWYTRqMR6XRagwm73a7jsNVqySiQdTUJTWzaOF5frJFpEcZyod1u\n",
+       "w+PxiPM7mUzw/PlzJVMRGiMyUyqV1GgBEDrD5pYihdlshuPjYxQKBbz77rvS8jEDfD6f4/r6Wr4X\n",
+       "6+vr8t8gykGEqNVqiStit9v1WdIbJBqNiqhF83JuEIydAO4mnMViUQOW0WiEg4MDxGIxfPjhhw+6\n",
+       "/49qMdPhx+l0wm63I5FIaCjx7NkzURlJByX0tba2huPjY3g8Hjntt1otyZJYG4fDYZmUM8ODvGnG\n",
+       "5hLLpvVVuVxWfAJ3P+aGcNcmh2QxsjgUCskB32azodVqyUt6c3PzXrNF9IYqkrfeegtms1nwGVEM\n",
+       "s9mMt956S3U+g4y+973voVAoKFObdE+a6EynUzWyFosFyWQS9XodwN0JRqdQlmMrKys4Pj7G1dUV\n",
+       "rFYrvv71r8sqzOVyYX9/X1PWDz/8EJFIBPF4HPF4HB988MFr3/9HVTMTfyUWSrokc+mInxLdILON\n",
+       "trB09vz4449hsVhk+FKr1WRN2+l0lPZkNpvh8/nUVNESl7EL3KHJS+DxzEVLxQankGwgr66uhF0T\n",
+       "/242m+h2u0JsWFZwh2UjS79nNoDM8qM0q1wu47PPPtPnsrS0pGhi8jg4dKJVASeG3W4Xw+EQL1++\n",
+       "FCTJuGKPx4NmsylXqZWVFe3quVxO9rpEV0hA4glWr9e/ihtevKh0Hg6H+OlPfwq73Y4XL17INHE2\n",
+       "m8lYkd5yFotF2YHM6/v8889V85I1RptZIiVWqxX5fB5PnjyRIzxjxoiKuN1u2O12fPzxx3LppEPR\n",
+       "4eGhRKZnZ2f34D6OsTnYIbb7ySef4N1330Wv18Ph4SGSySQCgQCurq6Qz+fx3nvvod1u4/nz59jZ\n",
+       "2VFqVDabxfe+9z3xRcLhMIbDIV69eoV2uy38+sMPP4TNZsPR0RG++c1v4vj4GH6/XxEQzWZTihvC\n",
+       "iJVKRQgPDWaYTejxeNDtdoX9h8Nh6TP5vSaTCe12W/yOh1yPjjXHXRiAMqvJXOv1egiHw6jX64jF\n",
+       "Yuh0Ouh2u1hbWxOmyrxpu90uk0LmcpCny1356uoKkUgE2WxWGPXl5SVSqRQqlYqGI5eXl+I0t9tt\n",
+       "JJNJFAoFlRkAdFPD4bAyRRazr+kJTXU5BaeskYvFouRiXFjT6VSfx2w2g8vlgtvtltiAJZPNZkOl\n",
+       "UsHm5iZyuZySpchXMZlMKoNIE+BnnEql0Ov1JNalZKzRaAjxIMGIlrxUYwNQ6ef1evH8+XP8yZ/8\n",
+       "yVfGicDdzhyLxcQ79vv9KJfLamwKhYIyqJ1Op/6bEqfFnJKNjQ188MEH2N3dRa1WQzAYlFrCZrNh\n",
+       "Mpno39IJkzVuu91WihRxWmLEw+EQ2WxWnGhqDEn7vLq6QrVahdfrFbRFd/2trS3VuicnJ1hfXxcz\n",
+       "sFqtYmVlRczBRVOaXC6HUCgky1zW2FycZ2dnsNlsImkZDAY5EhEeLBQKcDqdaDQaiMfjwoZZEpH+\n",
+       "yfKl0WggEAig2+3qtCH8yUHV8vKykCZmiz/kelQ1c6/XQ7Va1Q5BthnwJTeY2jcS2yklMplMMnhh\n",
+       "zEMqlRKnl69D9hm9l0mnpLg0n89rgEA4ixa19EB2uVxIp9Oy+6L0n+NfIiCMIiMSQOivVqthZ2dH\n",
+       "jZbRaMTOzg6azaZqdk74AIizQdMZNqF0dGJjNxgMBElyUMNIC/5+JCgBQDgchsPhwPn5ueA1jslN\n",
+       "JhOKxaL43X6/X4YwLH/o70Fs/qHXo1rMmUwGy8vLYmkxLZXTr1AodG9gQjSA7kCM/2Vo+enpKdLp\n",
+       "tEjubrcbsVhM3Amfz6chwsXFhVTYdNGkGeHa2hqi0ShSqZTYbMzL8/l8aDabwrj5b6ngpmiU6upg\n",
+       "MIhnz56hWCyK1G4ymVAqleDxeOR8enFxgXq9rodlZ2dHrLetrS1ZH3CXvr29xdramoj0tNpiAmsw\n",
+       "GJQYlzIvNrWpVEq8DA5crq6uEAwGtfiZyBUKhcT6m81mSKVScv7f2dl50P1/VIuZux5NANlNc3HR\n",
+       "gZNQ02w2QzqdxtXVlRACwlNMaO31emLOEa7jzez1esrr4M2hFo/5gMFgUAw2vpfb21sAd2QdSpvI\n",
+       "cWCX73A4NIl0OBz38qmLxaLyQnw+n6inwWBQ9XQymdQiouMRJ5a0CyiVSoL2KD4lt5nQ32Qy0YNJ\n",
+       "vJhTPH5m1EmyRr+8vFQf0ul05MtMiJCvSz/mZrOplICHXI+qZqY6+vr6Wjo0OnQOh0OUy2UEAgFc\n",
+       "Xl7i4uICpVIJlUoFv/7rvy4nokgkgoODAzidTk2sSqUS9vf3NbYNh8NakIT4eIN41FNKxZqTxzj5\n",
+       "u6VSCTs7O2om7XY7jo+PMZlM0O/3pZ/jcGU0GiEajYq7TFI9AHE06FrPcoW+cHQXomyKg5FIJCL6\n",
+       "a7lcRjAYVJbi5eUlstks1tbWMB6PUSqVEAgEUKlUtPPzQaOq2+VyadN4+fIlNjY2ZFDJxRsKhYSe\n",
+       "UDUfj8cxGAxQLBYfdP8f1c5sMBjQ6/Vk4UrWGtXBpF8GAgEsLy/j6dOnwl8jkYgC1kOhEDwej4YA\n",
+       "NBm0Wq1wu90yM7FarQqcpzzK4/HAbrdjNpuh1WohnU6ryaNXBCPOrFarGjY2THRIojGKy+WScTmd\n",
+       "96mAJhLldDpVBrGUYQorGXtUwBAloV5wNpshHo/D6/UKxWAtzxLG6/UqLJOjfZYQ/FxYjtzc3Kih\n",
+       "I3YfjUYVskl6KaPbqGC32+2aaL7u9agW83A4hM/nQyQSwYsXL+D3+8VQIxm90+kgm83C4XDgb//2\n",
+       "b7G2tgYAcrt3OBw4PT2V3o6xwDy+3W63EA3eVMrlOX202WyIRCLY3t7GZDLB6ekpvF4vrFarFtpw\n",
+       "OJTHB6VDlEt5vV5xQubzOUajkbJW6BzEUbDNZtPQhGjMwcGBrAgYO7G9vS3s1263w+PxCE2hbwZl\n",
+       "VQyQZ+PKWIlcLidrXMq7iOrwwSRzMZlM3nNN4jCHLEH63pF6S/+Sh1yPqsxgrBibLh7hVBszg4Mw\n",
+       "2d7eHprNpiZv4/EYq6urygnxeDxIp9M4PT2F2WxGOBwWLRSAwieLxSJWVlbk/O7xeJDL5eByuVCv\n",
+       "17G3tyeqJKEvNj0sV4LBoBht8/lcLj9EFubzuRq3tbU1KbS5k29tbcnMkE0rw+MdDsc9ORcX5tbW\n",
+       "ltTrVqsVTqdTeYBGoxFra2uw2+1CQ4xGIxqNhhbfcDiE3W5XbDDlTwaDAQcHB0IwWFM7nU6p1KlE\n",
+       "Ae5ONdrfPsTR6FEtZhoLspli/cpxM2VJDNqhHRXLgclkgnK5DKPRiH6/j+vra5RKJZGEuCA5mr25\n",
+       "ucHh4aFI+4PBQDAVYTnCTvTooPIjGAzKsJD1eqfTEXGn1WqJdUbMl7tZsVgUgy2fz99z6KdNAvCl\n",
+       "eQvxcA5K6ApKI3FmrbDxJB7OHmQwGEhWxQaOCp75fC7dIHFs4K70YePX6/WwvLys+LhAIKByh54j\n",
+       "5Eg/5HpUi5m8ZC4ujl45fWOsLlld7XZbsqjl5WVRJefzuWrHcDiM4+Nj2cVubGyoiaMglPgp60cu\n",
+       "JvKQb25u4PV65dVMvJjcCI6XGUvMEEi6jvJ9LSIrfDASiYTw4WKxiHQ6LVJTvV5XaeFwONDtdvUe\n",
+       "qL0jOYryLKZi0SmJPnKktjLKmWGda2trqFar6Ha7YtPd3t5FCweDQfE3yAbMZDLKBuT/aJfwlT3X\n",
+       "wkWXeQDiVVCbRnegVqulepMfMgWgxKVZ21UqFTV4qVRK8BWtuMiqo7pjOp2q0eQpQViQkBwjkamA\n",
+       "WVlZwcXFhZKq2Pg1Gg15JbPxY71psVgkeuXO2e12kUqltAgrlYqOd6ZLcZhjNpsVXNlqtVCpVMSr\n",
+       "zmazauD40FGvR5SFjSeRE5qJm81mkZEajYagO74WuRu03QUgMtViJvjrXo9qZx4Oh/D7/crZm81m\n",
+       "UpKQxEO5FLFTt9stmiUX3dOnT3F9fY1oNCqDGHoa83gkErBYCwJQxl0ikZAcPxKJKCaN/IlEIqFk\n",
+       "rMWGj9wGu90uojwZdpyYcSJHFp/P5xNBnsHqHITQxoBBlZSOra+vw263IxAIYGVlRRFrpKsyJMhs\n",
+       "Nktk0Ol0YDAYkEgkcH5+Dp/PJ2ydD3AoFILNZlOqFDkt9AbhVNJkMklIyzjob3/72/iLv/iL177/\n",
+       "j2pnph8w2WqsNwOBgLi+wWBQ07Tvfve7ir7l4IHZJx6PB++99x5cLpesrkajkfwsyBXm5I7uRExR\n",
+       "4q5LbjPhNS5QNo/8Ghs0EvSn06lQEQphR6ORIMdf+IVf0EPK9z8ajWSHy1g2ogWsT7lzU/PI8oLU\n",
+       "TSIqhMrMZjPcbjcGg4GaN6Is6XRaO6vdblcUBnO7w+GwxuGE/Vh+uN1uwXNseB8anfaoFjOtrgDc\n",
+       "iy6jNcBiYzMej/HDH/5Q6mPgjsHFGIbhcCgCztnZmRomhjbS2KVSqcBgMMhbglwJmnDTrpVxBxTP\n",
+       "srnkAqKjEZNKWdOXSiW0Wi2RhohsfPzxxxKQcldnZggx5GaziUKhINIRHZco3eIOztqaaEo2m8Vg\n",
+       "MEC1Wr2XPcihDkuYv/mbv1GZdnh4eM+mlkaLtOfNZrOo1+uy7B0Oh6Kf0t+vUCg86P4/qjJjcaxa\n",
+       "LBaxvr6Ofr+P+XyOy8tLWVldXFwgmUxKeEpjQTp5LhoQ2mw2IRIrKyu4vr5W112r1bQgiEtTyDqd\n",
+       "TnFzc4NUKiXLrNFopM6d7LdqtSq3UO7a8/lccQuVSgWJREKEe46HOV2kCqbZbCIWi6FYLIrBBkB0\n",
+       "S3qAjMdjjZxJuiedkyjQaDRSaZLP56UFtNvtqNVqyjQhOsRSA4CSa9lQs+5nUla73YbBYIDb7db4\n",
+       "mp/zQ2vmR7Uz86ilRS1ruVQqpbqN7C/mhzCXj3gtJ15utxter1cwUiaTwXg8Fi7r8/kQCoWQyWTg\n",
+       "cDhkVUXPDPKQOarmgMRoNCpSze/3w+FwiEfB5ClyJbxeL/b29oQ9c5Et5hDu7+/D5/OpPNjd3UUq\n",
+       "lUIoFJInHEOLCL3Rf49oCq3HMpmMnOydTqcUJJFIREQhBgtx2sqp5fX1tSKPaRlMmRhjLRKJhMoX\n",
+       "svjY/DFD/CHXo1rM1Lsx05nOmeQXb29vi3Nrt9tVBozHY1EtabMFQMJVugCxWSL/mLsdp2m0q41E\n",
+       "IvfypFmnk6O8tbUFp9OJ09NTTRQp0VpbW5NwYDweo9PpiMQUDAaRTqclElhdXUWtVpMxY71eR6PR\n",
+       "EBON1gkmkwnb29siVW1vb8ujeTKZaBzNTBLqCVdXV2VDtr29LaiTA5N4PK54CbqXMn6COzXxe5ZE\n",
+       "TqcTwWBQ7qbpdFpw5Pr6+oPu/6MqMxiW2Ol0kPsihPHk5ESB5PRxq9frkgWZTCZUq1V15oPBAKen\n",
+       "pxpeMB7C4/HI7XNpaUmmKH6/H7VaTcy55eVlHZmU3S8mmN7e3qLVauHk5ASbm5uo1+toNptKwWId\n",
+       "yQDN5eVlUTfPzs7kz0y9nNfrVT37zjvvKGGW7LhyuYxcLodEIoFyuYxkMol8Pi8Vzc3NDQ4ODuBy\n",
+       "ufDq1SvV/W+//bZMXwjtcfhBfJ0Wv5xSUuFCo51WqyVuM51VKQUjgZ/ezwCUGvu616PamVdWVkQn\n",
+       "jEQionICdw0hlSg8TjOZDK6vrxUi2Ww2kUqlEAwGhQhQNkS1SK/XQ6vVUkdPcg1wdzO4k5LfzBKB\n",
+       "msLhcKhygSR3WnsBUOwDJ2x8IJi4ShkX0RI2VA6HQ7Upd06askSjURQKBT2Y0+lUJt8A1JiyPPD7\n",
+       "/YL9mIO9+HmFQiHFWDAAiI2cyWTCzc0NisWipqQkIbEs4USUjS0X91cTwIWLUBgNRRhDRq0d61iS\n",
+       "w5lRQl4AVddMJ7VYLLJs5XSLdTBVxZTwW61WjclZkrARSiQS4hgTVVlfX5f7D3HrdDqNYrEopCEU\n",
+       "CmkX5MJhCi3hLZJ7CInRBiCRSKBYLMJisSgbm/ki5GQT8aH1LQBZc5lMJuzt7amW5yKMx+MaY/Nh\n",
+       "4ENEPjM9rWlOEwgEUC6XxSkJBoMqlUg28nq9ePr06YOsBh7VYv5/2HuX2MbXNL3voSiRkijeREq8\n",
+       "6a6Squqc03X6nD59mzFmpoE4i0Hs2SWzCbzILpuBYwdxso8RZJO17VUwNgLYm8DxwHA8iCcz4x50\n",
+       "t/vUudRNUkmibhTFOylRlERJzKLO72mqPXaAUpzxCP0HDk5dVBTF//f/vvd93ufCbplMJvXy5Uub\n",
+       "cePkzsCDsW+xWNTZ2Zn929rttq0I4GccHx+rXq9raWnJkB0ZJyhF2D0JvwQCxGLr5z//uZaWlmzO\n",
+       "PTU1ZV4GJUE0GtX29vYdqwJ2O/gOuBmx60Ki73Q6zsPGJB2oD9ekxcVFM+Cogxk9l8tlN7DhcFhH\n",
+       "R0c2A4cui/80pUa32/XDDb21UChY8IsYAYiQrJRAIOBhDqgMvG3I+u97PajFfHp6qtnZWTsSgRSM\n",
+       "jo5qYWFBh4eHd8IooV0Sh4ad69ramh3vGUqgxJ6amtL29rZlVpFIxLXy1NSUkQs69VqtpvX1dXtJ\n",
+       "gBMHAgHbeDEe7nQ6ppEiAmVogRsR9EwGEghxiVA4Pz83P+Pp06dOV8Xi4NNPP9Xe3p6J9ciYIFJh\n",
+       "kTA5OakPP/zQKhWULaOjo3r06JG2trbMwVhaWrKPHV597Xbb2TEff/yxXr9+rXK5bG7zxcWFlpaW\n",
+       "9NOf/tR86sXFRf3BH/zBe9//B1Uzs6OBOLRaLZVKJfV6PW1vb5sMhPdZq9XS1taWpf27u7uSpD/+\n",
+       "4z82ToxzJ6UIfsaMYEl1Qmp/cHCgbrerVqulYDDoxpK0VY5fkATYdbD9Tk5OtLu7q/39fVvbbm1t\n",
+       "WQECzZP8FUkOh6QJhSvCgIRxfSAQULFY1M7OjimvIyMjevv2rSYmJmzHcH19rWazaZIQEQ4YhG9s\n",
+       "bLie5j0DSZ6cnGhvb0/VatWl15dffmnJFYJgfDgoRSKRiH7605/e6/4/qJ0ZoguezKAHNBhjY2Mq\n",
+       "FosmteDX1mw2XTfSgBHsiC0WTp9MGKGCNhoNhcNhLSwsaGNjww0lk0ZUJ2dnZ+780SDirUHzOdwg\n",
+       "ofeDyDMcbgl6kkqlnF3SaDQ8XKEePj4+to6R5Few8qOjI0WjUSMmmJEzOOLBweEIo3CIXKhQSMqC\n",
+       "YDQ9PS1Jd/jW2C6QQ55MJu+ExmP7y7993+tBLWaEmnhNAOgDb7VaLePMlA4ctYyV8UCDAgp1kRKA\n",
+       "aDByOeBKo6/jRsEoQ8SKjReDG9w+oZlK8k4/MTFhKwQeCMhR0WjU4tTR0VFzrJfUCdUAACAASURB\n",
+       "VDm+ISNBlOdBokyg/MGPj0ZYeicqODk5MWeEMT28auig8Cn4zHkNXP/hmvR6PWUyGX8eJApAxwXB\n",
+       "oQnkc3jf60GVGZlMxvG6sMAoO05PT7WxsWGft+3tbXvFQVHsdDreeTiuqYtxkz85OTHcxPes1Wo6\n",
+       "Pz/3wmFBM8plzM6EkXp8mFyExRXyLnZZID2C2ilX0OXBa2bHZ3cbjg+emJiwPxyhm2DqU1NTHhJB\n",
+       "faV86Xa7XpA42yMXgzZLM1mv1++M3FutlsbHx1UqlcxFQUYFPl0qlWwOAzx6n+tB7cz1et1sucPD\n",
+       "Qy0tLdkDbjiJ6vT0VPPz8yoWizo4OND3v/99L/zb21sVi0VTLTEyQdpDycGNh9vcbretwN7f31cw\n",
+       "GFQkElE4HFaxWFQymdTe3p5mZmZsaQtBCHcjOvxer2fFCLvi5OSk7buoU4djzjCbwcARESuSsGEl\n",
+       "diKRMDzHsKPf73vsLP2C03Fzc3MncxAnz06no7W1NS9SsHAkVVBZgQbHxsZMCWXTGB0d9YInw/s+\n",
+       "14NazNAlu92u1tfX1ev1tLq6qoWFBcViMcvfi8WiRkZG9Lu/+7v6+3//73uXAycF3ltZWVGj0VA8\n",
+       "HtfOzo4//MePH6vVapm/S5NXq9U0NTWlmZkZL3BJHu9+8skn6nQ6ury8NOQlyRYC+H4woKA8gIif\n",
+       "z+e9ED/++GMnQEFawpeCdFTQGlxM4U4z0WOw9PjxY/Oiz8/PfSI8ffrUwUYMU4AA4U8zoMJ2C1UL\n",
+       "WDRG64FAwPkmTC3D4bDevn2rlZUVU2V//OMfv/f9f1BlBjtMJBLR9va2ZmZmtLm5qePjY21sbGhk\n",
+       "ZESbm5va3t7W9fW1/uiP/sh14v7+vvNMGOtubW1pYmJCf/Znf6bFxUUT5iENkWLKtI/FPD09rTdv\n",
+       "3tjhBzYYabGBQEBv3771BLHT6SibzXohkDFCwhRDj3A4rJOTE83OzupP//RPzQhEywcXmjG8JPtu\n",
+       "EEwEe29vb88ck6+++sp8aZyggsGgk2CxNsAjhIknuDH2vbjs93o9/Zt/82/8cxAkNDY2pkgkomKx\n",
+       "qGq1qlKpZBLT+fm56/D3vR7UzsyMH+y4Vqvp6dOnPloZbYML09gtLS1Jkk0Ih9Oger2elpeXjTkj\n",
+       "0BwfH1c2m3XE7/HxsSdxjUZDS0tLNjan3AGayuVyLn0WFhYsI6JBwsSFeAZ28Ovra62urno6yEgZ\n",
+       "iA2JF6YqWO9SbszOzhpJYac9OjryuDqdTjsQk1obGBBSFdNBBAYEbi4uLjoACKN3ml+abxIAEomE\n",
+       "ut2uCoWCSqWSJ7c0ou97Paid+fLyUrVaTb1ezxEPBwcHCgQC5lNQryG5J2CnVqvp7OzMIemYk2M6\n",
+       "zo4WCAQ8vABL5e/QFmLq3el0dHp6qp2dHd3c3PgoZiFTEoCJYzvAQ4BRODl7wIiVSsViXB4Cfn5I\n",
+       "7yzm8/NzJzsxqcPc/OjoyLnZ7Nq3t7ceTXe7XXOSydqmB6FnQGxQrVa9WDFlhOvBCcLDzESWr8P2\n",
+       "lhi3970e1GLmOB0bG1OhUHDcWDgc9i7U7/cdYQABPhQKKZvNWjaFXIkunEBKbk6z2fRNn5iYUK1W\n",
+       "UzqdNmwGJRJn+5GREbXbbd9UXDCR5+PEyeIHoyU7cHp62hYBiFuRd0E4wkgRUS8TymGBLAaN7XZb\n",
+       "jx49crJrtVqV9IuHYXt7W5FIRNFo1Lg0wyLp3aS12+2qVCo5fg0SlqQ7uYTwm8fGxhwNEY1GPW1F\n",
+       "nVKr1e7Fy5AeWJnBTYYsgzUAi3RsbMx8AnDoSqVi5x+OyMXFRScxFQoFx6VhWwD3F0cgTGFSqZTx\n",
+       "ZSA3BAL4rREkiS6QhRAMBi0m5TRAtwgEB0e43+/ryZMnTkENBAIObOd98vNKsj0tZUAymfT4nWQo\n",
+       "BkszMzOS5GaRsufjjz/W/v6+x+98lsOEouEyBN3ksL0XpdPExISazabm5uYcAB+NRrW+vq5/9a/+\n",
+       "1Xvf/we1mEEXkPIwWfre976n3d1d1et1TU9Pe9pUqVTUbDYt/4fv+/nnn2txcdFEI3aQi4sL7e3t\n",
+       "WdKPi2a9Xr9TMszMzJjsP1zSMG6/vr7W8fGxj140gzxYyPtp7Jg2AsuNjo5qf39fhULBOO/19bVq\n",
+       "tZpj1hi0oBiJxWKOHx4m+PR6PUej8XMmEgm1221tb29bRf6zn/1MhUJBr1+/1vr6uiPdsDoACWq3\n",
+       "2xoMBoYhyTSR5CaUevzNmzfG5QOBgF68eHGv+/+gygwWAGJQjm+GA1hHzc7O6vr6WpFIRAsLC3Zt\n",
+       "LxQK7txnZ2eVzWaNXLBDIt1nPA2XGChqWIEMmsHolmlYOp02rjwxMaHz83ONjIxofn7e2DTMNKZp\n",
+       "pFCxg09NTfn4xok+n8/bkovmja9H1c176XQ6biCB7Six2FEvLi7cLDNc4nUgOoFQENLJ2D0ej7sE\n",
+       "gjgFuQnoELMeyFmccO97PajFHIvFDMZzYzEVp6mizMBkEANAXIgYGaMmgXyPSSGdeCQSUTabVSQS\n",
+       "sUQrkUj4e8RiMXOEUZ9QfpRKJU/POp2OxsfHbUYIY40RPJL9SCTicTwsNth/TPsoldAuwskm6/rm\n",
+       "5sZ85EePHtnEhihkRAmUOysrKx7F83nwUKPC5qFiwBONRu1Vx4m1srJiO4PBYKC1tTXzzDc2NnR4\n",
+       "eOh7dp/rQS1mkAlk/DRdgUDAmR/hcNilCB8eDR3TuKOjIzdvyITQ4fX7fR/3dPG4EQ0GAw8ukNUz\n",
+       "VcRSdmRkxOmr8CDg/WJ9xTHPkby9vW0VNiNjMg+RfGHCSOlCUgB6x0QiYV/ndrutvb09x7PRBGMs\n",
+       "g1UWE7tisWgkZXgoA/w5HLUBMsTPO6xCWVtbM6OPIRQ7OIOm+1wPajEz8YrFYspkMlpYWLhjUIIO\n",
+       "7c2bN7bVyufzdg1KpVL+Wiy0YJrhw4aBCsSdR48eqd/vO8R9YWHBLj7hcNjEGsLooZb2+33Nzc0Z\n",
+       "G5+dnVU+n1cgEDC3hFOARQEDDQOVq6srra6uGiLj/QN7dbtdGxjyUGE1m8lkTCiCGjs5Oem4YiaE\n",
+       "YOG4f+JeSvPJZwWKg4f1wsKCJicnFY/HXa9XKhUlk0kVCgU/aExVgQrvcz2oBpAOm9BKjj0IPNVq\n",
+       "VWtra3acbDab+slPfqLf+Z3fMU9CeqfoKBQKNhGkMaLJYmrIrokBzBdffKGlpSUnn0ajUYsCUIfc\n",
+       "3t56isaUjigGjFjIGWFHYzFeXl4qFos5aw9zSExeGo2GLQnIrIZkRVN8cHCgq6srVSoVvybE/M8/\n",
+       "/1zRaFSdTkdXV1fa2NjQt771LWPGkJBoImHUYSIDsnJzc6ONjQ19/PHH2tvbs5cIzqWMwMHLCc7c\n",
+       "2dm51/1/UDtzo9FQNBq9M7qV5GYvkUh4d6NUYGdm52NnhjQz7C8My02SNYB8DcoRGhv8ncFaY7GY\n",
+       "+dNM2XCyB7KC0QfkBQUU939y/7CmhZ7KbinJQ5B+v+9/gy8Fp8DCwoK1i9Tr2NQOy6emp6fthB+P\n",
+       "xzU7O2tYE0wb9TXO+5D5sXigtuczYyfu9/sKBoNulDOZzL35zH9hizkQCAQDgcDzQCDwf3zz++lA\n",
+       "IPAvA4HAZiAQ+D8DgUBi6Gv/+0AgsBUIBN4EAoH/9N/zmncYbfF43PUdgxIUwagmIMeTeVcqlUz6\n",
+       "QRERiURcdnCzmOpJ73BfdGwwykAFgsGgTwoeDEkWrZKvgjEMjDe0eRjbMGFE+c0pwvFOUzoYDFzz\n",
+       "N5tNcx5ohuFugPgwyGF6R34J743mF4f9QCDgEwsCP+6e/X7fJ9jMzIw97piK8lkxNCIgk2EQm8/7\n",
+       "Xn+RZcbvSXolKfrN7/+OpH85GAz+50Ag8N998/u/EwgEPpD0X0j6QFJB0h8GAoH1wWBw+8svyNz/\n",
+       "+vraw465uTmHwrBbocP7zne+o+PjY+8oIA7r6+taXFxUt9t1HY68H7n/4uKiDg8PjTywUIH2hkk/\n",
+       "S0tLhukgxvOay8vLikQibigZ/ExMTOjJkyeuddvttubm5jyYoBeA5TczM+Pp4K//+q+r2Wzqhz/8\n",
+       "oUqlkl8fEtTLly8dAVev1xWNRr2DMkwhygFmnCStrKzo4OBAyWTSKVmkFcDNgMfS6XRMUWVAhDXY\n",
+       "4uKiarWalSiJREKJREKFQuFeC+ovZDEHAoE5Sb8t6X+U9N9888d/XdJvfvPr/1XSH+ndgv4dSf/b\n",
+       "YDDoSyoGAoG3kr4n6c+dfbLrFotFFQoFhyh2u12dnJwoGAzq5OREIyMj2tvbsz0U3IJ4PK7NzU1b\n",
+       "37bbbTvaB4NBJ5XC38AbgjocUhApqwsLC3rx4sUd1yCQgnw+r2q1qsPDQz8c6XTaNer+/v4dU/Ji\n",
+       "sXgnYSqVSrkhbTQabnh3dnY0Pj6uL774wgQkxu4w/TDJYcAxNTWl5eVlnZycqFwu69NPP7WY4ZfD\n",
+       "3gnlhFGHri+fz2tra8sELgZT8Xhcx8fHxqNPTk4ccccJEwqF7h0E/xdVZvwvkv5bScO7a2YwGJx8\n",
+       "8+sTSWho8pIOh77uUO926H/rormCIMQCQNqEjJ5mJJlMWscGPIcHBnUftgKMeamb4SWTz41ekPoY\n",
+       "ST6RC9TLwGCUB3hGh8Nhzc/PW6pF3jSsNIYb2OaiDcTPjQaNh4VaF4sAJoe4k3L0Q0/FVJFhysTE\n",
+       "hPkVwHAY12CzMDxm5/OSZOydzPGbmxv3F3zmw+HxvD9KsPe9/n/fmQOBwH8mqTIYDJ4HAoHf+vO+\n",
+       "ZjAYDAKBwL+P3Prn/h3TulAo5FDFbDbr+DGoiIg78W1molar1dRut50dQjjNRx99pHa77boZwn4o\n",
+       "FNLr1699XLKrXl1d6dGjRyarszAxER8ZGdGHH35oOKzZbDoEiIkbZcnY2Jg++eQTnZ2dqdVq2bgc\n",
+       "tQrfA7YaDkZkpoAbgxqA+GCyPjIyopWVFQt/yefGM5nmGI7zN/fHglbKomQyaZsHuBnDAZl4ZpdK\n",
+       "JQ9zMB4fHR01Ln6f6y+izPg1SX89EAj8tqRxSbFAIPD7kk4CgUB2MBiUA4FAThJxnUeS5of+/dw3\n",
+       "f/ZvXV988YWB91gspu9973v2NJ6ennYgZavV8gcLPtxsNl2rknrKqHhjY0Orq6t3AiOldwoR6Z2v\n",
+       "M9M0VNns6MiuSH7FQLBcLltPB6aLMyn+yZI8esepnxwQJoGzs7M6PDz0wARNIEproLP19XXjzMBl\n",
+       "NKiSnA3Ybrc1MzNjRiAC2WFbA+r9i4sLraysmFZL5gpWva1Wy9NWgjNxPo1EIup2u/qDP/gDR3L8\n",
+       "pcsBHAwG/8NgMJgfDAbLkn5X0v81GAz+S0n/VNLf+ObL/oak//2bX/9TSb8bCARCgUBgWdKapD/X\n",
+       "YOG3f/u39ezZM62trZn/S+cOBRJMFix32KkHqVG1WrXUX/oF6Z+Qeeo8vCzm5uYcPD/MVMOU5uTk\n",
+       "xIMNJndAdvCEGSODIEjvTGGKxaIHGSyKer1uiwCUK6Ojo2bqVSoV+1ygpuZUOTo6Urvd1tHRkbF4\n",
+       "+gqyTVDc7O7uuvkc9rkmLxHzRR5wBjAnJycW3TYaDdVqNVUqFY/JDw4O/P0eP36sH/zgB/r44481\n",
+       "Pz+v+1z/MQxNKBn+J0n/OBAI/FeSipL+c0kaDAavAoHAP9Y75ONa0n89+Hfoa6rVqlUW+BOvr69r\n",
+       "ZGREc3Nzkt5lBQ7nYuN8xL+5vr7WX/2rf9WOoWNjY/ZmhvM8jO3ifwz+G41GzZhDALq6umpeBOlT\n",
+       "6ALHxsb06aef2pyGvG3qZWpqmlaiIPBQ5vtLMo0T9IQMFkbHy8vL+uijj3R8fKyTkxOl02lJ704x\n",
+       "9IY0iRMTE/r000/NJ1lfX7dsbG5uzqUTPzfmktls1jwNKANra2va2dkx7rywsKDT01N9+OGH+vGP\n",
+       "f2x+yO3trf7JP/kn772Q/kIX82Aw+L8l/d/f/Loh6T/5d3zd35X0d//fXo+SIpVKaWtry9RKIDLI\n",
+       "RLgUgQEXCgVzjs/Oziyy5NhvNBpaXFyU9Av4j3oPDJejFr9jBiF4One73TvJVVjUfvDBB9ra2jLB\n",
+       "KRaL6fj42Po8nP45NWCtYSADrRO/O7gn6A15n9lsVmdnZ9rY2PDUEAU2jeX8/LyazaaHJeDAQIZo\n",
+       "9X72s595HI/iJpVK6fr6Fymr19fXymQyVspXq9U7tbn07sRbWVnx2P9XXnNDF5ZXjUZD9Xpd+Xze\n",
+       "WDCSptHRUdVqNevwcOqBWIQh4dnZ2R19Xz6ft6kL/Irj42MtLS1pc3PTjvLUsQxOarWak6pAF2q1\n",
+       "mtNih3dCxtE0i2C5mBFC3SQ0XpKnhLVaTdls1sMNdtRhr+RWq6V8Pq+XL19ay9hsNq173N3dNQNw\n",
+       "fX3dD1Gj0TCKAc7OEIi+4ejoyIgN4e8IEnA9Oj4+dv0fCoW0ubnpxK2pqSltbGzc6/4/qHE2kBML\n",
+       "SZLhKqxhWbT8Gd369fW1KpWKVlZWrELGlhbeLkEyTMskeVgCfsrNabVa3v1IfaJxBIeFpim9a6ym\n",
+       "p6ft5wxScHt7q16v598zBgbOg81HhsnU1JTDMMGgqcrgh9DwAqsx+kf6xGcjyc3p2NiY3aJIASAM\n",
+       "E/UNjS9mjZKcnXJxceFGmM8LBToOUQxZ3vd6UIsZzBJVMHXwcKQXU8CJiQnl83kNBgP/HgJRNBq1\n",
+       "kSHwHpgsujiCexqNhon25AF2u10tLCzo+vraTR6eyjh2krEC6kBziCxpamrKRCJI/MQwMJ3L5XL+\n",
+       "3njcoSaHmE89/fbt2ztuTaurq8adZ2ZmjCfjTQ0vnOgKyoNQKKRkMqlMJmMrLiBMuBXBYNC+H/yM\n",
+       "eO71+33lcjkbmu/v7zvy+S8dzvwf8uLoe/TokXZ3dz2sgHr50UcfmVvMQqUehA/BDkJWB4lK6Ao5\n",
+       "OiEFnZ+fu65Op9MuN4ikQEzL8Q36US6XVS6X9eTJEw9xoGaWSiV7JrNDYudF7iDuP2DCjLiDwaAN\n",
+       "x1moR0dH+vVf/3Uf91NTUyqXy8pkMpqamrLi5PHjxyoWi16U6PPGxsYsdkBdTX4gD2omk/HPEIlE\n",
+       "7jR1o6Oj2tnZUS6X84l2c3Oj9fV1P6jcp/tcD2oxswt2Oh1PskgXhUZJTZpKpXR6emqvDEz8aGJ6\n",
+       "vZ4bP5ohdHVYFJAKdXZ2pkaj4bixQCCgcrmsZ8+e6eTkRLVaTZFIxFRPLK/y+byPeHKpr6+v7xzB\n",
+       "ktw4ogVkqHN+fu7d9urqyijKF198ofHxccdgEGjJRA5KKyaKfI9ms2mDxtvbW9XrdROOMK6B6Xd9\n",
+       "fW0EhFMLnFuSU3Jvbm782SJawIcPshMptn9Zx9n/QS68JGCDochgVNrpdDw1g63GgKFcLlu9LMmE\n",
+       "fbK3wWKr1ao9iMFx4WRcXl7q8PDQQ5nNzU03jJQRPBjSu4cPfw4eEtyLGJkz8pXkCWWv11Ov1zN+\n",
+       "jckLdgW4+Pf7fZXLZYfPl8tltdtt48g0j5CwSqWSm8VQKKS9vT0vUpAd2If0J9TmBHIyqaSGHx0d\n",
+       "tWd1sVg0w25qakrFYtF9y8HBwb1Zcw9qMdOoIF1iAgY1sVqtqlarmUNRLBZtHJPNZh1Kw9exACET\n",
+       "AV9h3s2xyGtK8s5FibG0tKRKpeKdb9jjmUZwf39fp6en1tSR7AT7jLoXQ0eEAZwO4XBYjUbDtS9E\n",
+       "I+DCSqVi6y+4JSxMml0e8MFgYI/rdDpt9GR8fFyvX7/2yTHc6LKbk60tyZg0D4Yk5fN5G8+g4Lm+\n",
+       "vtbW1pYGg8G9yfmB+/p7/cdyBQKBwd/6W3/LcBcmK1NTU8rn89re3nb6KhDT4uKi/sW/+Bf64IMP\n",
+       "fETiLs+ABMEpMn4GAbgFpVIpCzUlefFls1mjEOyCPBzU54y4u92u6aGSjLB0u1078w8rsHu9nmZn\n",
+       "Z+0YynhZeuctx/eA4IMOUJKj5c7Pz/X48WM9f/5cCwsLarVanohOTU0plUrZOw94LxwOa29vT/Pz\n",
+       "86rVavrOd76jg4MDD094DUkO3+E9sMlQFvV6PaVSKQcSZbNZ7ezs6B/8g3+gwWDwXmLAB1UzU0dS\n",
+       "25JGen197d0NJXM4HNZPfvIT+09QJ6Ktu7q60uvXr5XL5e4ouDmquTHDOxVGiAw1ZmZm7gTWUK+v\n",
+       "rKzo1atXd6LLRkdHdXp6qkQiYUZeOBxWv993qYFdLH7LS0tL3pXJa0mn0y4ppHcTT5pTHOwh+PNz\n",
+       "VCoVnZ2d2dsDkhWLmAVIniFmjGQs4oREmREKhRzYSVJXqVRylgvfhxMUaJTm8H2vB1VmQCXEIYc8\n",
+       "arwpKAtoxgqFgp4+feoINNzt8Y9IpVKmQ+bzeSuncR2CaA/PIh6P2+qViSHHLoR8giYhBA3j3DDq\n",
+       "JicnbTMG8R/jRpAPfDTw1cDeAAuEfD5vAn4wGLSIlnE7Uz5IT7FYzLAfsij+L71DTWKxmOkAnFjk\n",
+       "s7DwcW0aHx93VPJw5AbMQGi2qFw4Ue5zPajFDIzEWBpDPsB4dkDwT5ovFhq1JX83Pj5uGAt4D24x\n",
+       "N4yRMfU2JQNNFjgsN4whAaXJYDCwSJTXxMWfMTwPGLsfv+50OpYcgZkzGCHm7fz83GUN5pAgMPF4\n",
+       "XOl0WrOzs4YcIWdJMrxHrU0ZMuzzTOkEdIiA+OrqStvb2+Zew0ch/4+HiXr6+vr63r4ZD6rMqFar\n",
+       "Fna+ePFCq6urVkZUKhUNBgMzyrLZrBUgIBmJREJv3rzRl19+qd/8zd+0kpkdB/IP9loHBweu0VFS\n",
+       "z8/P+wiHRvry5Uutrq7aVw6HfBYBHhNMLvGq6PV6zg8JBAKeUMJ6IxTz7OxML1++1NOnT1WtVrWx\n",
+       "saFcLqdGo+Fx9He/+12XLZRF5XJZlUrFKhC+dmpqynazKGSCwaDZe+zoqVTK6BCsw7GxMafFUtqh\n",
+       "cUQ4gCHlMFJzeXmpP/3TP73X/X9QOzOE71qtpt/4jd9wk5HJZLS8vKz5+XktLCw4n2R9fd2LZ3V1\n",
+       "1Q3Pr/3ar1kvFwgE9OzZMw8k4FeMjIyYGx2LxZRIJFyyUDJwcxcWFix2xfyQsS4awm63q5WVFe++\n",
+       "+H+k02nbdi0sLLiBXV1dNRV1fHzcKbCRSERra2t69uyZDWeSyeQdESmZ36izP/vsM4VCIS0vL3t6\n",
+       "GA6HVSgUbPV1c3NjSy6U2ZwSlEoEuSMmpsYfGxuzmePk5KS51PjTIXxdX1+/1/1/UIuZumtqakrP\n",
+       "nz93t9/r9eygj4UAuSH7+/v2v7i+fpd/t7+/72O63+/rxYsXHrdCUuc4Zxfudrva3d3V9va2nYko\n",
+       "GYrFot3lsYPllMDPrd1u6/PPPzc/gl3/8PDQ/s3lcvnOe5qamtLbt289TKlUKup0Oh748GAwkgbx\n",
+       "AKEAa9/c3FQoFHJ8G+VSs9n0/8GxW62W6vW6tra2XM8jRfviiy9MGsIUcbicwqPj4ODAvBA41ldX\n",
+       "V7bWfd/rQS1mTMAZSRP+gjKZm/fixQt33YxhqXXJMSGeASXH+vq6RkdHHXbDhA1HI0xMpqenzbTD\n",
+       "TT+RSNg8EHSiWCx6XBwMBrW+vq5CoaDx8XH/HnbZy5cvzXir1WpaWVmR9G5w8eTJEx/ZcB6IsIBH\n",
+       "0mq1lMvl7vQM+/v79rogSmJ8fNzxFcOefPQRpGphKonYAUwZvgkPOicM7qlTU1NaXFw0TwbWIpHN\n",
+       "1Orvez0onPn3fu/3lM1m1Wg0bKGFDIhmj6kXyuGXL1/qu9/9rrnAML6y2ayCwaDi8bj29/eVSCTs\n",
+       "iTEzM+MdKxKJ2OGn0WioXC7rO9/5jqEtmkn8inu9niE7xursrMVi0bKpfD6vZrPpwcT4+LjlTJIs\n",
+       "ph02RJekR48eeQeHttlqtbS2tqZqterQTGBIbHNBLeCVrK2tObx9b2/PAl1Jd0QJ8Xj8TlOMAQ99\n",
+       "CerwUqlkHSbKHjz2EB+fnJzoH/7Df/grnFl6B8Rj6DI6OmquMkOBi4sLVSoVXV1d2dwF8jkZJoyW\n",
+       "k8mkyuWyHj16pFwu5zF0r9fzWJZUUtTfoVBIhULBhPTb21sT2OEqsDihmGLa0u12vdBisZgjgbHH\n",
+       "pRTK5XIaGRmxOaQkP0xjY2Pa2dlxTBzvczgDEZ4GFFJOj06n41qW6d75+bk2NjYMqd3e3nqgRK1M\n",
+       "gxoMBm0GyWeJxxw6wvHxcSd9QZ2FAgvP+z7XgyozUDTTXAD9QBCnXkYrB6Go3++rUqlYaAm8l0gk\n",
+       "dHZ2ZiiPY5obgHEMSg+GKOx81MKQlKiVaaAIk0SkCm8auBDkBEU4wxygr2g0ekeyhI+HJDsNhcNh\n",
+       "N4gYGzKs4LMCB0Z9g/8GJQCnDiVHNps1FCfJ8B3qF7IRsRgGlSEujQcBExu41ffNNHlQOzPdM/oz\n",
+       "4n+BwOD+QpznmIdUxDABKie+w+xawWDQDDgWifSOzsjCQ6UNtkr+nSR7QqPQJpJ3eCBycXHhiSRk\n",
+       "f45m/i02XKFQSGdnZz6+Dw4OzGADRmSxMHnDu4PanNIJS1sIWaAWCAFo4ubm5kzQh4fNa/E9rq+v\n",
+       "9ejRIx0eHnpkz3QTigAnZ7lctgUDzfX7Xg9qMRNZxqBi2ELgW9/6lssISXbvubi4cLYJcNWTJ0/s\n",
+       "0wa0hPE41FH0hkzxMFak7MArAtMWDF8Izzk/P1cmk7FQFp41R7gkE+zBmUElMIohPIf0J4Y/cJQh\n",
+       "1SM0CIfDWl5e9tCEU2Ztbc1iU+y6mEZSK/OQ7+/v69mzZ2bJkeN3dXWlVCrl4QruqIFAwA0zlNtM\n",
+       "JqNGo6FkMmlZWzweVyaT0e///u+/9/1/UIt5fHxcf/RHf+SdgiYtl8upVCr561KplDY3N8304liE\n",
+       "51sqlfThhx8at0W5EolEjAJcXFz4RqCZ29/f19XVlZ48eeJBCjU5Rzz1KiGQjLKpF0Elfvazn+n2\n",
+       "9tbE/16v52ObcT2j6WGrWTLDYdxRokC7HB0d1fPnzzU6OqqFhQX1ej0dHh5qamrKTenBwYEk6U/+\n",
+       "5E/0gx/8QMViUbFYTMVi0RRRSqm1tTVtbm6az8Ewh6DON2/e+H0Pm8GjEaOx9AAAIABJREFUECcl\n",
+       "izyY+1wPajGHw2F9+umnkqT9/X3lcjlH+FIijI6+C1b/9NNPdXh4qF6v52EENTfqEbK4i8WiPvnk\n",
+       "E11fXzuJ6vLy0vG/sNAI5gmFQjYCDAaDajQavmEMEEqlkkWylAT4shGTDHRFRjVlEWFCt7e3WlhY\n",
+       "MI2T3BFG52NjY55IPn782Gbei4uLtpLF40LSHfQnnU7r6dOnVsYgJ2OqSMOXz+fVbrdNbY1Go8rl\n",
+       "csbBnzx5Ikku7dj15+bmLHIgGeu+QfAPqgEsl8sG4NvtttW/OOZj/8RiSqfTbrhIe4J0BCmJ8oDm\n",
+       "BSQENQj/BpvXk5MTNRoNHR8fq9lsGhqTZG40rkl7e3u2O4DpRrjN0dGRzs7OdHx8rFKpZOwbo3Ho\n",
+       "p5VKxRERjKlRcnPasDPi7onNAZ8NMil8+n7Zfw81CGN/eBUoyodFBDjg02CD7fd6PeVyOUeo8VAg\n",
+       "USsUCsbP3/d6UDtzLBazgcrjx49teTU1NaVcLqfj42P/GVTJ2dlZzc/P6/j42LDXs2fPVK1WNTMz\n",
+       "o8nJSa2srDhhtVAoaHd31xmAwzawNzc3mp+f19XVlU1nzs7O9OGHH9ooER706Oio/fDm5+e9+MB7\n",
+       "P/vsM52enro5ZKDA4lxcXDRaAc6MgSHSLcSxjN/Hx8f15MkTZ5VEo1ElEgmHvWNsODU1pWQyqU8/\n",
+       "/VSnp6e2q/3ggw8s9wIbnpiY0OPHj90rMHqXZFbh48eP9ebNG4dwghrlcjk3iVdXV1pYWLjX/X9Q\n",
+       "ixlrq0QioVKp5KiFfD5vFly5XHbkGTsHtlpnZ2e6ubnRj3/8Y33rW99y140XBrs3MiiI5+zK4+Pj\n",
+       "trKlbgd7ZVedmJiwXS7/BqfQsbExk3tgvKFsBpID193Z2VE8HjeHGNNEyPws0FAopHQ67UnjxsaG\n",
+       "jcvJ1YarcXJyckf6BK4NQjJcKyOjokxidD5sc0sZ9ZOf/MQCBfLIm82mNxNgxb29vXvd/wdVZpD4\n",
+       "SdmAATecY+KFi8Wij2QaL8a54MQ48EMNZSEP482tVsuvD46K5cDZ2Zl97pBnUZOXy2XX76VSyW7+\n",
+       "TNRg6gFpoUZhAXGEg1+zOHEm2tvbs6PR8fGxnj9/rmaz6TobSwPwZkl3yqJarWYjckSzfGa8Bgy8\n",
+       "wWBgmdmwRS/4+sHBgYW7ePbh88dDge8civP3vR7UYgZu6na7XqTIh4CvWHwTExN2pZTe5W1gVM6R\n",
+       "SeeOjSzjbuRNNzc3hvWwo8IwkdEv+Sa8FgMG6lQaJ/Bd/p5pIwR2VM9o9fh9t9s1J4PviVsoPwdN\n",
+       "MAMe6Z2YltMFQe74+Lj51ihv0OqBNTMlROaFVW4wGLQotVarmaeBcJWfgYeU04cTcnZ2VvelVjyo\n",
+       "xcwVDAa1vLzsIxuMNBqNan5+XolEQhcXF/rBD35g0L5arToMZ3l5WZOTk1pdXbUyYnd31/q5+fl5\n",
+       "m5vgEQdRplgsuhwAviPgkqHBsJ0AsQg3NzdWVMMVpoaFBD8sB4MkBD85Eono8PDQY/GrqytlMhml\n",
+       "02nrBkdHRz00IZwoGAzahJEHj1JmeXnZpCDYfcMBltTt+DWTghuPx+12RLYgp2QikdDs7KzLQWIz\n",
+       "xsfHf0XOH76AeJD/4OmAqzvZeq1WyzxjuvxGo+ExLYgGGR4498AMw1YA8hGCzX6/rw8++ECRSMSO\n",
+       "P9PT08Z4G42GE5Y4MZB6Db5JumL8zi5IWUFyKxNN3gMcYXY3CEQ8PCxEBkgoxzudjndwYtqYgiIN\n",
+       "y+VyTg0AMcHIhmEIERMTExOeeGK3xZifBAISAcbGxiz1QvnCoOde9///o3X0H8XFBwdJBnlRs9n0\n",
+       "MckY++bmRm/evDFSQOglu1YikdBPf/pThUIhdbtdR5/RjK2srFgIcHNz45p4bm5OoVBIW1tbxnFR\n",
+       "WDB5gwAFU4/3x8PCIAZ+Nb9Gzzc5OWmHfwSleOUhcZJkGwBI/MFg0DZbfC6UEWDYjMnJScHyi4XN\n",
+       "w5tOp/Xll18afYFIBOF+b2/P3Be8q1HehEIhxx8TYTEyMqLl5eV73f8HtZhpKIC/kEQRkUsULoR8\n",
+       "hghgwSw4GrJhuRA7LyNxFhnecoPBQMVi0c0Nejtiz1BR09TRLKLxY1QOoR2CDzZdkPrxjCuVSob4\n",
+       "8NAALtzc3HR9jvMpNTLvqdFoSJIV7NI7E539/X1HOfT7fRspDlspDGsMKX8wlESHyImGPGs4l4XJ\n",
+       "ZKVS8ci+0+loa2vrXvf/QZUZ7DRM1MCG4/G4Op2OPzRQBTRs2AOcnZ157I1C5erqSktLS2o2m961\n",
+       "EYOen59rd3fXgllkVLjgD0f1Yn5CREI8HjdPpNfr6e3bt0omk05xrdfrxmmZstVqNaXTadXrdYXD\n",
+       "YXU6HfvrIdXCaCabzRodwN0JXLjT6ejRo0cKh8M2aeH0mJubU71e19LSkmOHpXcLHY8QCEtIyaDX\n",
+       "YhozOTnpQKPhQVE4HNbBwYEfPKaUvBaU1ve9HhQ5/2//7b9tE0BgNpwrhxUoxA4fHBzo6OhIH3/8\n",
+       "sQF91MZ4NeOPHA6HLTfiuIYlBs46Pz9vUxOk+pCKbm5u3CTmcjmVy2XzKOBo4JzPgwOpabjuTyQS\n",
+       "HsljoojhOIT4arWqqakps/p2d3f1rW99y6cA/GfKGfyq4TpDA221WndqaWrtRqOhVCqlsbExG7lQ\n",
+       "KpBwi9KdySBIzXATGolEVC6XXcIUi0X9o3/0j35Fzpd+kZQKCb9QKOj169f66KOPdHR05A+YkoIj\n",
+       "+vDw0IaLpVLJE7z9/X1ze6EukmmXz+e1s7PjmjUQCOjVq1caHR3V/Py8tre3TX9kFI3ItVKpWKOH\n",
+       "O1KlUjGnmZwTckRCoZDK5bINWCQZV6aUgNfADkzONrsmKpiVlRV9/fXXzvUmIqL4TcYgDSS2Yclk\n",
+       "Uq9fv9bl5aVPqNHRUX311VdaWlry5sH7hCW4u7vrhpoyazhVFjEDJdvs7KxLn/e9HtRipsumEWFh\n",
+       "0T0DO0myjAlGHKPfhYUFvXnzxrBSOBx2OUANyKKEbgkMxTQPuf6wGgMOMv8el1Iu5PnDptx4SoC4\n",
+       "5HI5fy+I/DwAkrzQh/8MNTe/ppfgfcPbgPuNcBc2HwaInEiSHF3MAmV83u12NTMzY9gvmUyagssO\n",
+       "De+70+l4uMQD/Jc2O/s/xMXkid2KxYeJCqaEQG+4V4bD4TvHH+76sVjMyIj0iywU0AteH8MW5Eaj\n",
+       "o6Mm+1BqYHGFcADlBgMTMFeIOPF43AMVoC3MbQiFpCSoVqu2t6L0IOUV4lK73VYkErEPxu3trfkg\n",
+       "gUBAR0dHDrVkiMLPy4ibngFv5larZS43AxbEsbwOny0lCra7YMq9Xs96wPvuzA9qMe/t7dnkcHNz\n",
+       "U5VKxSUFknZgqJGREdXrdauxSRUdGRlRIBDwv+t0OkqlUoaSJBlSqlar7uLh5lYqFW1tbdmNk1oX\n",
+       "ByJqaRCPq6sr7e7uqt1uW+18fX2tvb09HR4eOvEKXBgDGqBHPI1BZo6Pj/XixQtVKhVP5N6+favx\n",
+       "8XEz3GC3EbFMs4k2sdfreZrKZwcfZXt725O7eDyuvb09NZtN1Wq1O2XeMCWAzwlC0eHhoX9ebL2C\n",
+       "waBr/Pe9HlSZMTwho7lCH5dKpRzzQPc+MTGhly9f6pNPPvGCxrQ7k8no6OjIaU/hcFiJREJbW1u+\n",
+       "KdPT0x48gPXCJgPRYKiA38Xo6KinYcBSkjxgAPdNpVKq1+s2rMGAEKXK3t6ek6WwTYjH41a+MJYm\n",
+       "LTUcDvsYj8fjOjk5sSkkk8TR0VEnP0nya8K4I59bkss50m4RCtDoSvLnv7y8rE6n44EKYuDZ2VkP\n",
+       "fXiv97ke1M48Pj5un+VIJGLTQxx5jo6ObG5IVBqxwEj+b25uXFfncjlP0jhyce1BG4gtF8c2Rze1\n",
+       "5vz8vKFCxtnUrTDjSqWSDcYh6qAdZPLG8CYYDNriQJK97vg54UxQ06LwiEQiOjs7UyQSUSAQ0PT0\n",
+       "tJl2PCDSu9qdBxSXfQhJ8XjccCF1NbtwvV53WYVYFU8Oml1KsNvbWy0vL/uERIh73xiIB7WYLy4u\n",
+       "LF0iVbRcLqvb7drWFVVErVZTLBYzk+7k5MTG3NVqVVdXV84ShCtMSCVCTsbYku6w6SYnJx1ttrW1\n",
+       "ZfhN0h2MGF0hN71eryuZTEqS3Y8Ir4fM1O12bWiDEBTK5eeff24PakmexjGoAUuvVCqmWxKPMezz\n",
+       "DFR4enqqUCikZrMpSQ7WxAcDQhG5MMCQt7e32tracp2NrS5ErvPzc7169coPCyP7QqFwr/v/oHDm\n",
+       "v/k3/6b9LSYnJ+11vLS0pGq1atEmVEQojESCkVaFofjr16/16aefegoGXgonAaMVjARZtMj2W62W\n",
+       "ZmdnFYvFvAuyWIgwi0QiHm0z6ctms1aiSPJkjaYVPkUmk/EQY39/326k4ONMMG9ubuw7jUcIA5xK\n",
+       "peLSAtRmdHTUwZhg5kBoEJYuLi5MKOKziEajHgZtbm4qlUrZZqDdbmt2dtYNOCUIFINEIqFyuay/\n",
+       "9/f+3nvjzA9qZ6ZZIw4XDsBwOurNzY3H04yHaYBAAMhEIX+DoHWon5hu85rDQZVIpBDSBoNB77KI\n",
+       "XBlWkHjFAwVPuF6v69WrVz4xkGNhDTA7O6tKpWJZE5gzxPmzszPvqrw/rGYpFchqGRsb83u/vb3V\n",
+       "4eGhkRGaulqtZvydMT4kfCIyIBIhNID/wqQUpyRQCx4APq+rq6tf8ZmHLxTZuOdDjEGRgcyJ3Qby\n",
+       "0OjoqL3dIpGIXYkIkCfeiykXHNzhXZlG6fr62goN0ILhmpUSCBgREj2LnFOFDBXYf5Cc+v2+9vf3\n",
+       "NTs761OEKSW2VzSsCFWLxaJOTk78M1C+9Pt98zCGLQYkGXmBT4JBzPX1tWKxmBYWFty0MjBhx2+1\n",
+       "WnY0pQGm9JmYmLDpSzgc1tu3b9XpdMwVv8/1oNCMTCZjUj2NHBMxEkZRUXS7XaVSKYenN5tNLS8v\n",
+       "q9Vqmby/t7enxcVFRydMTk56tA2vIZvN3jFVgRXGAgZDjsfjPimgg05MTNx5ULCNpYxgVxsZGXFp\n",
+       "0+v1lEwmPXCZn5932QRj8MmTJ0Y0UJ/DEUkmk3r79q0fZBCUpaWlO7pBFtbs7KyazaYJVUCf7LoM\n",
+       "XvCPnp2ddXgRmwrWuVAF1tfXfVp+9tlnOjo68s91n+tB7czDlq/Yxe7u7jqS6+XLl+YrDwYDY8Ow\n",
+       "w6rVqiYnJ/X8+XPvcBzJkH8YVvT7fUNyHM+lUkntdtsunxMTE34vBwcH2t/fN+cXQtPV1ZVH2dgc\n",
+       "4Ex0fHysRqOho6MjW9sy+Hj16pXZcjSvTD1PTk7sqk85sbu76+YWyRK8FeRNaCgrlYqCwaDevn2r\n",
+       "s7MzZ6CAwQ+jQyhfGKcT6fb1118rEokYb2YMTloW+styuWxt4K9yAH/pIjt7GCojOJI/I4SSpgdI\n",
+       "CIQBaQ8JSXTqGGzf3t7ao46bgyLl6OjIY2vk9LD0JicnPR0LBAKuw/H0GPa/oJwJBAKuMylVGGqA\n",
+       "BtB8RiKROxM5xviSjF/jDEpwEfVxIpHw2Bk8m4cRghA2BmDplENMERnHU/dL8oAF/D8cDruZJYy+\n",
+       "2+3ag+Q+14NazMSJQUHEBhZ39unpae9sKDaIGh4fH9fs7KxrQnBbeA0sOoz/CN6h+clms5qamtL8\n",
+       "/LxisZjy+bxHyywscFmINpCUksmkuRt8fTQaVaFQsO0WC3xsbEzhcFjz8/Oul9kt8aKmHia+AsXJ\n",
+       "zMyMm8KZmRkvUFANkgfAiYfH15QwsAQxv2FQg9Ib7jThQNFo1PcGrnQ2m7XmL5FIKBgMql6v6/Ly\n",
+       "8l73/0Et5tPTU01OTt4xGMRwm6gCjmJUH+zA0i/Sqqh/WRRYyFILD4P9vB5BOZJct0tyXUrdiJE3\n",
+       "O7kkZ1oz7GCSxi5HLcsDQSmBAyiNGpNHYEkmcwgU2OkZkPD+JHlHl+TPhZ9XkhOvKGcY4HDKwEXh\n",
+       "52YXxraLyGIEwfwawS4ozX2uB9UA9no9k4kqlYpD0IkvYLGR2wwufHx8rEQi4SMPrLrZbLo0YKer\n",
+       "1WqWWqGiGCYjATVdX197zAwP5Pr62to4BjeRSETVatXmiVggMBW8uLjQ0tKS5V63t7fmg/T7fW1v\n",
+       "b3uHhf9MbY1O8PT0VIVCQRcX72IxGNUDPYJdk9gaCoX8ELNoQXIw0ME7D3gNTSO52PQEIDCcmBin\n",
+       "Q7DCHAdnpvtcD2oxLy8vGzNGcZxMJq1CPjw81OPHj5VKpdTv9zU3N6dXr15pfn7ecBFyJYSfhULB\n",
+       "Zts8LGNjY44qGxsbs3QIgSrdPHUxnAjqRY7ieDzu6Ao0dHjWjY+P6/DwUOl02lTSwWCgQqFgk5dE\n",
+       "IqGlpSXn+EnSwsKCNjc3lc/n7VIkyelXDMkogchDZKJJ/mA0GtXs7Kyte1lok5OTVphjMcBGAIQI\n",
+       "1RZsGzuvTCbjUT5+GwiJf0U0+qXr4ODApUWxWLTioVKpaHt7W4uLi25MksmkSqWSTk5OdHx87NIh\n",
+       "FAqpVCrd8U0jtw6SfCKR0Pb2tjqdjhKJhBs3Fj2aw2w2q2Qyqc8//1zZbFYHBwd30qVglDWbTU1P\n",
+       "T1shgh4OTjUG6ATb4LbPrthoNDwlRHaFSoSQeDDg4bhfGkkGOIVC4Y5K5+XLl1ag9Pt9RSIR7ezs\n",
+       "aG5uTo1GQ6urq66nKUGgcw7bKVSrVYVCIW1sbCiRSHikT5gSGPe//tf/+l73/0Et5mg0asd7TALZ\n",
+       "fZDhU16Ew2FFIhF7UCB5v7r6ReZ2qVTS3NycO/FoNKpHjx4Zw6aRHBsbU6PRMCowNTWlZrNpf2Ka\n",
+       "QqZ0CFmHE5dwM6Khy+fzrpsZceP5wQ47MzPj0+Lt27cmWUny4uOBZleUZK+L4QwSGs12u22fan5m\n",
+       "6V0/Uq1WvbiB59LptLrdrrLZrPsSppAkyaKuSaVSxt6Pjo4c0cY9+uSTT/TFF1+89/1/UA1gv9/X\n",
+       "zMyMlcXkgQwntwJnsQgjkYgbJoSXLGgoiuwkHJGQe7gJ0WjUQw4W88rKitlmOAdJMkmIGpQpXyAQ\n",
+       "0MzMjGKxmBlmDC5WVlaMqkBmIu0qlUopmUwqHo9rdnbWdl1YEpyfn5vrfHt7a3Ny6KcMlnhIoMwS\n",
+       "Jg/n4vb2Vul02uw5lC70EjyYpBA8fvzYmYeUQMOREisrK5qenvZgh//f53pQizkYDKpYLKpUKhnR\n",
+       "OD4+NuQDrjucEzIctINjD5gsJBweBIB+OBksymKxeMfSCkrk4eGhPZ7xVy6Xy7a8BceGhbe3t6dS\n",
+       "qeTG7fr6Wrlczuw/lDFo7oLBoBs8QnaGs/6gXvb7fR0cHBiRqVQqNrVhTI16He83vj/fo1aruYGr\n",
+       "1+tqNBoO70FMgA0X0cvYDjBBxRZ4bm7O77vVajmo577Q3IMqM9LptJlaFxfvUptyuZzH18SmgTBk\n",
+       "MhmVSqU71rTIq4aTVcfHx12KMHWjhOl2uyoUCo5JA6dOJpP2W0NIy0JbWFhwqYO5YiKRcCNZq9WU\n",
+       "SCQsDqBUGQ7xkeSaE/gNm16a0lQqpYuLC83NzalQKNitlAdkuCxghx7edVFk39zcaGVlRZ1OR/Pz\n",
+       "85JkhiENHgYygUDA42/qYh7m4XqdfgC52PDw6n2vB7UzQ9phuifJNShDABowPmiySKBBMsxgp8ag\n",
+       "pVQqmYIJXgu2C8wE8RybW2AvKJPgymQSgt1KMlEfNhlKFSaScDTAxYEGsYOdnJzU/v6+J5X8/DSM\n",
+       "cJLB1xl0BINB02IhYPEz8JnwEEgyR5mwTH4GRueXl5f2qoNiixSMB3PYRBGN5rAS5n2vB7WYkd5L\n",
+       "UqlUUr1eNxmcupfGplKp3HEMRV3MqJdjEItZoDMUI1dXV1pcXDQzj10WMjujbB6gYRUy+X/Fb7JC\n",
+       "ms2md2Wmfdvb22o0GpY04X9MzQuMeHx87IWJoSELCV7F6empVlZW/q0dm4eKRc1uC5TGAuckOjw8\n",
+       "NP7c7/c1Ozvrmh9DRAZOu7u7pnpS+8/PzztCgxOC0yYQCCiXy93r/j+oxQwVkqgzpk2gAF988YVV\n",
+       "JNTHWMjCRYabwDQMiI0F2Ww2Xe/y9fv7+3fGxzhlBoNBUzkvLy8t12fwMqzJA6GAqD83N+fdn4YT\n",
+       "2T7Iy9jYmNbX1z1KluRAePLBcd7/+uuvbZEF/DY6Oqpqteoe4c2bN6a3UiqNjIwol8tpYmJCT58+\n",
+       "teSMh5zT6M2bN45shlkIJRZBL1NIsPiXL19qa2tLvV7PlmX3uR5UzVwqlUzfpB5EECpJ3/72t5VM\n",
+       "Jp1wFI1GzX9eWVmx+SHcCthmHPk4XTLsuLi4UDabdX0IFfL8/FxLS0va39/3eykUCg5oJ0Ma/jFl\n",
+       "DST9er2uo6MjPX36VF999ZUNBcHJQQzYAaemprS0tGTTcYhOWM7WajXNzc2Z34GKPZlMKpfL2csC\n",
+       "gxxcP7HYYlIJ+sFGgLi13W7rt37rt1yCDAYDPX782J55w9mI2Dc0m039xm/8hv75P//nSqfTymQy\n",
+       "2tnZudf9f1CLGQfNy8tL7e7uan193aNWVAy9Xs8y+WKxKElOlBoMBspkMnrz5o1dkRYWFiy5oo5k\n",
+       "LHx5ealms+malPqXAQniWcoYasWxsTE9f/5cT58+VTAY1P7+vnOxEQUkk0ltb29renpaGxsbGhkZ\n",
+       "8S6KgTcE/evra2vv4GdgUXBycmKIbGtrS4lEQuFw2AT+WCym3d3dO7yM29tbLS4u6mc/+5lWVlZM\n",
+       "qJdk1IMyKZlMKhAI6MWLF4YtB4OBSqWSVlZWjOKAr8MrCYfD+slPfqKZmRlbKvAe3vd6UGVGo9Ew\n",
+       "L4BdUJK9j+Ets4NVq1XX0wTq4IhJWurR0ZHd4rGgpU5sNpt3Xocmbmdnx4lWKDHgLWPyQh429SMl\n",
+       "y97eno968kmgRkJ4Pz4+9micqIZyuWx6KqbjrVZL/X5fxW/yqiE6MZpGmAo0iRcdJjb4MdNUYzXG\n",
+       "QwcVFHX1sEUtmwpTyuEkLHB0PhMU5m/evLnX/X9Qi3m4SwfQH5YtkXCEreqPfvQjHR0dKRKJ6O3b\n",
+       "t5LeWVtls1lFo1Gtr6+bOwFycXR0ZC84OnCQA2AscGK8JiDdo+6QdGf4sry8rFQqZU820BEgLnas\n",
+       "4Zhi8OlUKuWSgtiFRCLh4c34+LgFvcNMPPw7Tk9PlU6ndXNzo7W1NROFfnkEjv9GJBK54/rZ7/eV\n",
+       "TqdVKBScIAs/Ay44TR42tpLMHuQU4IS7z/WgFjO0Q2pKfs0ABVgunU5rYWHBNrL8ORBRuVxWr9dT\n",
+       "p9PR9PS0R9O4u+fzeeXz+TuUS+pv6sTb21tTGkOhkGZnZ73j8YCgXAYxQIRKWGUkEjHjjxIoEonY\n",
+       "4wNvEPgOTOuowcF1IS/hyREKhRSLxXR5eXnHN2Rvb88NLGSphYUFE51I5Uqn0woGgyqVSpqamlKv\n",
+       "19Pe3p7a7bYDRCuVihc2ppPBYFDz8/Oan5/3OB8l+zAJ6n2vB7WYQSdubm5MbpFkiAmFdL1etzUU\n",
+       "Ub0wwJDic8RWq1Xb20LgOTo6Mk5aKpW82OLxuMWww87zt7e3llPhA4d8C686EABQEOpYBigc05eX\n",
+       "lzo6OlKr1VKz2bScCuTg4ODAxCGwbyaGkvx/4o7fvn1rzJgUWUb62MyC/tDgMoBiweOfQfnBlJVJ\n",
+       "I7xq+pCTkxMr3KmnR0ZG9OGHH97r/j+oxYw6hFKBiyYpHo87Xo1SAguqYWokBoUYfQ9LmbLZrCOH\n",
+       "4/G4d2lQDMoWdmsGG+l02qgBJCdMt+GJ3N7eupRYWVnR6Oiocrmcxa9TU1OamJhQNptVLpdz+A7l\n",
+       "y9TUlObm5pROpz3lxB9j2AIrFAo5LXV9fV3RaNS0WHR7DJbW1tb8QOLkORzaQ+kGR5zPPxKJ+GRC\n",
+       "ZhaNRpXNZrWwsKBcLmdRLCY4NOTvez0oNAMjwlarpePjY3344Yfa3t62pB0ojV3t8vJSGxsbZrHB\n",
+       "9nr16pUSiYRevXqlxcVFHR4e6rPPPrNJYC6XszsPwtNIJKK9vT1Fo1Ftb2+b6TY+Pq6DgwNPxpia\n",
+       "bW5u6oMPPlC1WlWxWLR6Gbejw8NDLx6mfNVqVYlEwkMVSoLz83Pt7+/b3LvZbCqZTOr4+NiZgtI7\n",
+       "4QDE/1gspnQ6rd3dXbMCZ2dnLQpIp9N69eqVwuGwPTOmpqb085//XB999JEdkmjc+Gw7nY6mpqa0\n",
+       "t7enTCajL7/80nU3zeBwhszOzo77mFevXt3r/j+oxQwPF4J5KBTS6uqq4vG4VlZWHGqDrzF1KHjr\n",
+       "7u6uBoOBlpaWlEwm9fjxYy0sLHgYwq6CNVY6nVar1VIul3PjRkQE0cU3NzeOLSYkaHJyUh999JEG\n",
+       "g4GHEpQwpFHlcjmXMzSGKysrFtKyMyPnxzqMmhUKLMY46AehrUJ/xUBcknd4pqOYiUvvUJ7b21s9\n",
+       "efLEuj+GH4VCQZ1OR6FQyM3msCYS7nQoFFI0GvUDSv+xtrZmUevGxsZ73/8HVWZgmI0iA9wZZGJy\n",
+       "ctIZf7lcTs+ePdPp6an/TS6XcyYdiopOp6NcLmfhar1e947b6/X05MkT8z+y2azm5ua0sLDgehAD\n",
+       "lGEjQ7gT/DqVSrk5oxbHIxp6JzxsBLnPnj1TMPguzphgnqWlJTeF7JpLS0tuvhKJhCYmJhzhQBmS\n",
+       "zWYViUQ0PT2tQCDg7zkzM6NoNGojR0ozAjLhdo+Pj2t+ft4G7vQctVpNIyMj5jED5RUKBU1MTGhu\n",
+       "bs5wYSqVcsDo+14PajHTZAUCARWLRVM9z87O9Pz5cxv6Yc79xRdfeHeDnthoNDQ2NqZyuazNzU1d\n",
+       "Xl7q5ORE09PTZr0dHx9bxEouyvARyv85HfCWgITPa1OfHx4emhkH6R/vOHZrYuGQXf385z83rl6t\n",
+       "VvXy5Ut/b4KJcHhqNpuq1+uOOAZmBFEpFosql8u29cJa4OzsTLu7u44SrlarOj4+tpH45uamEaBS\n",
+       "qaROp+NkXMx0sHmgyTw9PdXx8bFubm5Uq9XU6/X01VdfGWu+z/Vmi5xbAAAgAElEQVSgFjPuPufn\n",
+       "5/qzP/szjY6O6uDgwDXbq1evbDyCbxyyIfDVRCKhr7/+Wqenp1ZWswhgjjWbTVMaOWa73a62tras\n",
+       "zGDYIL0zAoetBkWTRXp5eelQH0QEqEe2trYcwdZqtcxXrlQqJgKdn5/r6upK6XTa9gmvX79WtVrV\n",
+       "/v6+ud2zs7O6urqy1g8PO4ZI5+fnOjk5sWXY5eWltre3LQPb29sztMgIHNV1rVaz+oSHF2gQY5mj\n",
+       "oyMtLCwYbx7OP4lGo2q1Wvrqq6/udf8fVM2Mp0S329Vf+2t/TTc3N1pdXVUymbTwM51OK5vNuqwA\n",
+       "m4Vi2e/39cknnyiXy5lY/1f+yl+R9M7+i/Ew/sZAcLFYTI8ePXLJwjDl7OxMn332mdrttknpg8FA\n",
+       "T5488ZELNRQIL5FI3DHehnXGQGQwGCiVSpmFNkzFpJxKp9OeFHa7XZ2cnFjCRN4gCMfq6qrVHsvL\n",
+       "yzau+e53v6vJyUlls1nnhwN3UpsD4/HgxmIxW56xIaTTaT+olHqS7A6VyWQUjUb1wx/+8FeyKS6i\n",
+       "G2i0kCZVq1XVajWb/cEfGB0dVT6ft1VrLBZzHASKCZw48dGIx+M2Gm+32zZMCYVCHnsnEgl7uzF+\n",
+       "HuYJgy9j78VuhhcGBi0saKyvksmkjo6OfByfnp7aVkt6xzvhCEcihqPTo0ePrG1sNBoeyRNkn06n\n",
+       "tbi4aNYh6hTUO8COGNnABMT1iIcJByOMXqLRqEZGRlxyQGcl/HN5eVmRSMQeG/e5HtTOzA41MjKi\n",
+       "g4MD+1kMa9ngAsTjcSeHRiIRzc3NmR4J3ZIanIFLIBAwDMdNGfaH6PV6HlpAA81kMqpWqx7jhkIh\n",
+       "0zsxUcRNCLX08fGxJDk6DSiQIxlYEP8O6Z3NLjxl+A/QW4H2GGAkk0lVq1X1+33zVygjGLTADqxU\n",
+       "KrYWuL29dckFFZQdGcadJH8O4XDYbkVwvYf1hpRK8GBAVd73elA7M4MPjPkwKSF+YGtry+6dmKnU\n",
+       "ajW7WfKhV6tVx5uhhqBm5TUpBejGa7WaoT+kVTwMmLZIshigXC67QWNixn94KEMc6na7Oj091cbG\n",
+       "hrHlVqulcrls1hqqDlhphPocHh5qb29Pt7e3Ojg4UKfT0dHRkUub6+tr1+ULCwuuo29vb23OyKST\n",
+       "Gh35GA/y7e2tzXBarZabOd47dAL43GDbp6enOjk58evcN6H1Qe3MkUjE5BYmaOl0WpFIxFkky8vL\n",
+       "CoVCymQympmZ0T/7Z//MdrFAXN1uV/F4XPPz86ZPXl5e2hL38ePHDrZZWFhwlARY8/z8vKeINzc3\n",
+       "9rgbHx+30yg70/T0tB49euTuH4LU4uKiY4lpULPZrNXTa2trNm6EaA9feGVlRaFQSIVCQWNjYy49\n",
+       "stmswuGwCoXCHQiNYQdQH43tt7/9baXTaY+hCT2anp5WrVbT4uKid/JMJmMLsqurK+/8YMsgOXz/\n",
+       "drutxcVFvXz5Uo8fP5ake8dAPKidGf0eFyaFsNfW1tZULBZ1eXmper2ufr+vx48fO42KHWTY4xnC\n",
+       "UCqV8g7/5ZdfWr1cLBatokAYgLcdWC0NHO8Ft3m8JqhhIfUMBgP7w11fX6tQKNh7mjp6a2tL5XLZ\n",
+       "qAoPE0rnfD7vEwIbLHZ73O0xbxl+uCYnJ83rIBGA6R6CX8br8DcmJiZUr9edb0Itnclk/KDBz0D1\n",
+       "Dkc6l8vp+vra2dz3uR7UYoYQE41GTU4ni6Ner2tra8seD0jucTN6/fq11SdgsGSDfP3115bNM4lj\n",
+       "ocDvoFEkx3pkZMTaPpo+rArIy2PnpmY/Pj72Qs/n8/bG2N/fN5WUU6PT6SiTyVj9AhIivSsdSqWS\n",
+       "2XTDVE6YhJRfk5OTOjo68klCs8tCRYMIFxufEMwVpV/0KsCgDITQVHY6HXU6HaMbpFIRiwG0R7nx\n",
+       "vteDKjMgq+DbQJcNUiHJtSq7GRo9iEQ0gWj6qP/wRBv2bwP2YpLGwqRposnDt7jVat2hbCLkRCUO\n",
+       "FRIvj3a7rWw2aztdGjHizIZ9pQn8wdeDxpDdEM4yu2a9Xrf9AaYvuC7xmWElgAfd0dGRxQXs5nwO\n",
+       "qVTK2SUkVfF3nJYgIQyFyMvGz46I5Pe9HtTOjL8DKmwGHbgKUdtSa+I/zA4JjIYkfnT03bM+DPfF\n",
+       "YjFDa8M7D7HAQE4ou6VfxDPE43Ef4SAVktwETk5OeudCns+uNzo6qkqlYuJQIBBwQxUOhy3jAn0B\n",
+       "mqTxg8BEaQGycHFxYUISuylWDZw05+fnajabLr1wSqVR5AEafjCHjRVRyKPG4TNBcAukivPS+14P\n",
+       "ajEPW8HSsLVaLY2NjdlDeHiHDoVCRjbYibChQqFC6lS1WvXRyhSPwJ2zszObvtze3nqU3O/3TSBi\n",
+       "hI6ae3JyUqVSydxg5E/sbJubmybZB4NBR0O0Wi37Y8AVYRd+8+aNjo+Pjaogeo3FYo5sYFemUW40\n",
+       "GuZkQHyiyUSXiHoaM/ZoNKpGo+ExOzg9kCeWZqAwlDfHx8dGT4YTsEBJMpnMve7/g1rMOBChZxtW\n",
+       "Otze3lr9wI2U5OmWJDPeyPTAulZ6J+GnURkMBvYjnpqaMg68v7+vqakplxXVatU1eD6f9xEOJyOX\n",
+       "y9kSgBAfJFM/+tGPvHujSwQdGB8fVzweN7EpHo8bffn2t79tHw+ywyORiHkbOBm1222dnp5accKw\n",
+       "BGf8WCzmhjCXyymXy7khjUQiWlhY0OjoqG0dGDRR2tHwYRXM0ArHqPX1de3v79vPBAHCfa4HVTNf\n",
+       "XFxoZmbGtS81LDZTn3zyic7PzzU2Nqa5uTmdnJwok8mYq0sjx/E/7J8xMjJiKRZH+KNHjzykgPnF\n",
+       "TcRY8fb21mYneEpwOhCxEAwGlU6nTXIC2sKMkRAg2HZzc3OWKeFGNDIy4qRTalS8Nfr9vubn510m\n",
+       "ZDIZoweEaQJXnpyc2OIM32gQD+A58lpWV1c9jSR2jQYQZuHc3JwhS5z+8bRbXl5WPp/XxcWFy777\n",
+       "XA9qZ+b4J5MOIWYsFjMzLh6P+6aOjIxYWZHJZFSr1TQ1NaXvf//7bhDZvSYnJ3VxceFSA8ssdnB4\n",
+       "IZIc0l6r1WwEg6M9pcvs7KxqtZoajYZVypCGbm5unI+NF0ggELCDEIR2bHfhITMsgrjPxVSSBxw+\n",
+       "BCcLDR9oDVNGeMoIejnhOJmIP+ZzB/HhgaLeZ3hECUbWYbfb1Zs3b4xr/yo6behClsRiBUKi7qS+\n",
+       "Q7dHg3Vzc6ODgwMfc3/4h3/o6Vs6nb7jr4FWTpL1gpQUBwcHSiQSxrORUwEXwiG+vr5WuVxWLpez\n",
+       "UeHc3JwpoMFgUAsLC3Ye2tzcVL1eN3S1urqqvb09n0SMg+Fw83PTA8zPzyuZTKrX63l48/r1ay88\n",
+       "IosnJia0s7OjUqlk1Qk2teDltVrNBKphxAJbA1TjX3/9tYlc+PrBz8b+oNfraX5+3o1qtVq91/1/\n",
+       "UIu53+8rk8nYlHA4nFGScU7MSIhE44hjEPLhhx+a9AJsB1yHmSHHMK6fMzMzSqVSOjw8dK2MTInd\n",
+       "iJIlEAhYAApKgkcH418WMuoQUBR2RkmGzXgAGBKxAPlMQAwoc66urjQ3N2cWW7/fNy8EVcpwqBEN\n",
+       "GooWampMbVC2876ur6+VTCZtMHl+fm6yfr/f1+TkpObm5swN4QFH3vW+14OqmcfGxrS9va1ut6v1\n",
+       "9XW1222LO/lw0+m0+RMTExPK5/NKp9M6OjrS6uqqaY1Pnz7VH/7hH7q+pWm8urrS0tKSDVlSqZQb\n",
+       "RtJJ0+m0yuWy4yQWFxdNsfxlb4zBYGA0g51/cXFR8Xhc7XZbpVJJS0tLarVapo/u7++rUCi43oQA\n",
+       "BewVCATuGH3jj4zdQSwWs3i11+vp29/+ti1siTzr9/taXV3VYDBwJgxGjuQR0hwTsoMpJWaR+EPj\n",
+       "AYi6O5lMqt1ua35+3v7Ok5OTtvt63+tBLWacgjqdjv74j/9YH374oY1UmKKxQ62srOiLL76wb0M+\n",
+       "n/ei3Nzc9OBhY2PDdTblwsbGhlKplN68eePdV5KxamRS1KTPnz/3AiADGyPFm5sbvXnzRoVC4U4Q\n",
+       "/YsXL5xR0u/3nfsRi8XU6XRcH8M9xpUJke7c3JztDEqlkr7//e/r5OTEtbokl1kvXrywcQxj+Xg8\n",
+       "rpcvX2p2dlbtdlt7e3v2jF5dXfVUMJFI3AkDBREJBAJKpVKqVqu2FeO0qlarmpmZ0cbGhnf9Z8+e\n",
+       "6fnz5/e6/w9qMRcKBX9wy8vLVjUAh+FjAaUTdfT09LQHJ9Sr+XxeZ2dnWlxc9DGIrSuICYaJMOlQ\n",
+       "ZGOezQQQOf38/LxVzVAiB4OB1tbWPGRh6LC+vm63ULBnFCEYM/LAcOJQk1L6HB0dKRQKaW1tzQYu\n",
+       "fAYMhvr9vpaWliTJiAjvIZVK2QckkUjo5OTEQlVKFghXPOwY1GD7tb+/r6WlJX8/kA5Mxo+Pjx1G\n",
+       "+qMf/ehX5Hyuw8NDCzorlcodUxKgNaiRHIO/vMNJcnqT9E5XuLOz446dhcCxj29dv983T4JjFWND\n",
+       "eBPb29tqNpvmOMBt6HQ6LitCoZDq9br29/ctYRo2UgECxL8ZK9jz83NVq1W7McGVDgQCOjk5sWSJ\n",
+       "KDbw55ubGyMdGExSu7bbbR0dHalYLOrg4MCIR6PRMC2AQM3h3MHb21v//4MPPrD0CtnY5eWl3r59\n",
+       "69INMcHu7u697v+DWsyMR5m84QgEtMQOwoeeSCRUrVZtvI3hNzAeOCuqk1/2Nr65uTHri6HMsPsm\n",
+       "i5YGk4UNG43mEzYauYHAhpisVKtV78qQ5nG5Z8qHhQJ1PQuEKGJOCppESaZk0pgyCkeMS0wyWS1o\n",
+       "+ygrMDPHngBMGvRIksM8edgZxlByZDIZw5oHBwf3uv8PajGvrq6qXq+7XKDGhQ3X6XSMQ0vvatzF\n",
+       "xUWl02nHeJHElMvlPEzI5/NOfo3H4zZNYYfLZDJ2KSoUCmo2mx6CAH0tLS356zBNhMMAzkt8wuzs\n",
+       "rDKZjOMp4GywqHZ3d82RxkgRhyUsDdj5QXDgU1AOTExMWGXNZ4VHH+VONBpVIpFQLBa7Y8GwuLio\n",
+       "TCZj9t34+LjLBsxlJNmHjocTQcDk5KS51LiXBgIBlzvvez2omnl3d9fH3fn5ufb29ky3pAbc39/3\n",
+       "btPtdo0NE2HQbret2aNmBXVgB4rFYrYy2NvbM/m/Vqvp9evXvpnpdNrQ14sXL2xSA77MtI2pZKvV\n",
+       "UiqVsgni5eWlR+E0c1iQHR4euieg9gyHwx5Tw1tGvEpNzVTy+PhY6XTaGSmUQiAu4NK8FwI9JyYm\n",
+       "rH2Eu4EFA+GeTFKhsqIXpDSbnJz0hJNBCZzw+1wPamcGhqNhWVpa8s41Njamer1ujgFH/szMjNLp\n",
+       "tK25njx54ro7m81qeXnZdlWIUlm8iURC3/3ud40m3N7e6tGjR3ry5ImpoSwERr/sZrVaTY8ePdLk\n",
+       "5KRSqZSWl5f15MkTTU5Oanp6Wqurq8rn81Z1EP1weXmpubk5188EZgIrzs3NuWxZXFzU5OSkgzuH\n",
+       "U5442ldXV72rT09Pm3jEzwv3BKW79Isdt1qt2jASE5tkMmlbBfoLSplsNqu1tTVb7cIrhxY7PBN4\n",
+       "n+tB7czNZtO7BbKeaDRqwxG692AwqEAgYG4E4D66t2HqJey6ra0t831xGu10Otrb25Mk7+KlUskN\n",
+       "EWP1P/mTP7FUiRMhFoupWq2a2IRjJvzjvb09zczMqF6vK51OG47LZrPa2dnxoKVcLnsUD/H/5ubG\n",
+       "Sm44y5VKxZg0LkTdbte6O8j6MzMzprfigAoCwpj77du3Ojs707Nnz3R9fe3Phh0eI8jhGOZ4PG4y\n",
+       "FqPysbExtdttnZ2dmXx1n+tBLWb4s+Pj43r27JnGx8ftS7G4uGhTP3Yl6suZmRl39clk0se99C48\n",
+       "5/Xr15qdndX09LRGR0ddGnAShMNhZTIZ7e3t2RKLB6bdbnswgx6P4QEnxMLCgs1gcMn/4Q9/qO3t\n",
+       "bT+c2GQxoSuVSkqn0/Z6ZtKILo8dk5+B8HXEslhvTU5OOrASg3ASAjKZjAqFgsOJarWaUqmUc1cu\n",
+       "Li40Ozvr9yy98xbBfw+vEkk+qRYXF525CPID7fRXCa1DF2Pdbrerr7/+WsFg0C6UlUpFh4eHOjw8\n",
+       "1Pb2tq6urlQul81VZiJ3cHBg0k+xWNTp6amurq684EBI8BVmZz05OTGBiCkd/GluEgw1/C2w/Nre\n",
+       "3la/39fTp0+Nxe7s7NihHnJQsVhUvV5Xs9m0OppBTSAQ0OLiog4ODryA6vW6Tk9PtbW15UGGJDsN\n",
+       "YbfVaDSc2wJXhZobH+dhx9F2u+3FSUIANNZGo2HUhO/faDTUbrfvOJyCclxdXRntuG/a1INazBi+\n",
+       "dDodffbZZ7q6urLJCP/P5/MqFAoaHR3VysqKj2IooaFQSOvr60omk5qZmfHflctlp5hisBKLxVQu\n",
+       "l410wMmlDDk+PtbU1JSNZiS5Ycpmsx7EzMzM3IkNptEjjAcxKcR3VN2SPCYeDAY6PT1VJpNx44m/\n",
+       "RTabtf4RVQzSL3jR0WhU6XTaeeH8HdBkrVYzOR81tiSLGOCRsODBkDn52PUlGTFBG0lZ8ivW3NB1\n",
+       "dnbmm4F3MtBQvV5XJpNRt9u1l1qn01GhUPAuglfa/8Pem8W4mqb3ff+vdq7FnawiWfvZ+3TPTPcs\n",
+       "PZIVQbDsXDibA8QOksALYASyIyMXUSLpOjGsIIiQQFGgwIHgC8NyvMSJBRuxJDua0YxmJtPdc06f\n",
+       "pU7tVdyXIllkVbFIFvnlos7vaZY8stqn0NKoMB/QmDlbcfne732f5//8F4YfDA3gZ9Tr9Wv+Fycn\n",
+       "J3aDLi4ujCUG8uHz+XRycmJ/j+iDUChkWK7X6zWVSzKZVLfbNZ4EF2NxtHIsDLB03juEJthxDI3g\n",
+       "I1OToh6BXcdnA3kBEep0OqbS5kFFZAveLslw9PHJJgOZ8SYYk5jRaKRGo2GRbGR0jyvr3+S6VYuZ\n",
+       "ZocvfLwOnJ6eVqFQUK/XUzKZtJ0DqIuufzgcqlwum98FmXh4x4HFMjjhxjO6ZljA+JZhBna2sNtg\n",
+       "m0EjdRzHms+TkxOTYU1PTxtnpNvtmqMQMBZ4LU3VeA4KC4oHFJI+iAbMwG63q1gsZsoR2IAgEjAR\n",
+       "WbRgz0xFWYy9Xk/BYNCQIWIiYAbyHYIwIezlYYco9abXrVrM3KS5uTnt7OxYfVYulw11IHdvbm5O\n",
+       "h4eHevXqlVqtlo1Sy+WyKpWKOp2OXr58qVKppP39fSP8MMqWPnFOAtuFuTYYDNRsNs2lf2try3Zr\n",
+       "oK9ut2t1da1WM+U2D+Dh4aEajYbV0/ClIbWPuxSRa/Ls2TPt7OzYz2MgAQoiyVz6qXtxYqrX6yoU\n",
+       "Cte85qrVqgqFgl69eqXBYKCjoyOdn59re3tbH3zwgT1U5XLZhK0Q7fnewc9xNCoUCup2u2o2mybB\n",
+       "gvOChdmbXrcKzYAVNjs7a74T4LH4zY0f7cQmjBu/rK2tWWNHLcmYHKUFU7l0Oq1oNKp6vW6umODQ\n",
+       "TNimp6ctkmw0GlmtSmorUB96ORrRQCBgp8rc3Jza7bYeP34sScavjkQitosi58JR6PLyUvfv3zf+\n",
+       "MPV3KBQyVfX8/LxWV1cNIvT5fKrX68pms5Kkx48fG1TH6FqSiRNwkEIQwNic/G3yFwlCmp2dVTab\n",
+       "NR0iOze5KD+MGx674CVAcqFLhqhPpw3uymJhyIARIHnQMNTgYbCAqtWqDTDq9bp6vZ6psTHfxlIA\n",
+       "+T9+ERDlu92uyuWyuc1jik7IPELRcd5xrVaT3+83425qbcxbwIFHo5F5YfAZxk3EsQgjnYqdejAY\n",
+       "WN0K0X44HBr5CKuu+fl5223JIWRDQFVCI4wxD7Ck3++3iWy/3zfvulAodGNo7lYt5mg0qkgkokKh\n",
+       "YOLSYDBoqAO79cXFhSWyOo5jdV6/31c6nbbMkng8bhgtJB5MEweDgebm5rS4uGgkdOpwGqZSqaR7\n",
+       "9+7ZZAs9IdwI8vTG6aSSzLd5MBgY3XR9fd0kSeOpTjSO8D1odpmEjjv+czrxsyHEwxqkIUQIzPSO\n",
+       "XPHZ2Vk7RR4/fmykpHg8rkqlorW1NaMPJBIJC4zHcyOTyVgjiusSiVSoT25y3aqaeTgcWpAMPhXj\n",
+       "Tu6O46hWqxkP4ejoSKenp2q323rw4IFc19Xu7q6VFb1ez3YTr9dreG6hULAaGuspWGWDwUB7e3u2\n",
+       "29PQwf2l2UkkEub8Q/oVu9Q4rRQJPyHrkoxKiqAUfwpqZOwQ8K9AroT5DdNAtJDlctkoo3jZwbsm\n",
+       "ExxDGOwWwPMhNL399tv2OXH+pxGn9EKWdnR0ZL6AYPS8xk2uW7WYoXpClSS4Em9iVMnoz7LZrJUe\n",
+       "3/72t826ikRWnDeRYlH/xeNx88AA2oJcdHZ2ZrIq13XNfAV30GAwqOFwqEKhYF7PxPQiIAU9YMJG\n",
+       "3ggS/3A4bAgMTReav06no5WVFWPxjdfpeEoD68H8Y7oJlEZp1mg0bFQ+MzOjXC5nKMrx8bEkmSh2\n",
+       "b2/P/iwYDNqmgvcefUa5XLYyJRaL6ezszPqDu3fv3uj+/5EsZsdxQo7j/APHcV46jvPCcZwvO44T\n",
+       "cRznNxzH2XIc5587jhMa+/s/5zjOtuM4m47j/Knf7+dOT08rEolci0qAAE8dV6lULKidkJzz83Pz\n",
+       "njg7OzM+QrPZNOcecFdJ5itMJAR5KaSOttttOY6jeDxuDc/ExITtiDSmOPrAj8hkMnJd19yLqO0Z\n",
+       "O1Mzw2mgnMFii52Qh+/Vq1fW2DJAcV1XqVRKk5OTNvk8OjpSu902FyIU1bgucbpxwd3gIfF6vTbh\n",
+       "G41Garfb9v+Hw6Hy+byazaZarZbm5uZ0fn5uU85MJmMWDs+fP7/Ruvqj2pn/J0n/1HXdB5LelrQp\n",
+       "6Wcl/Ybruncl/dbrX8txnIeS/pykh5L+bUm/7DjO933fiEYxGp+dnbXpGR04+DBTKhzoKTNojODc\n",
+       "wqzDgwPeQyAQUCKRMGNE+MVgtfPz8xYdxrROktXmNEaSzK4AxQf4Nzki4zHANHyu6xqxn2YTDNjv\n",
+       "9ysQCBjagUKk3+9b2tXc3JzV79Fo1LJO+FmUCvy54zhmAIN4F4QHtTbNsuM418LqU6nUNTNGPpPH\n",
+       "4zEagN/vv7Gl7R96A+g4zrykP+G67l+QJNd1LyWdOI7z70r6t17/tb8t6f/V1YL+9yT9Xdd1B5IO\n",
+       "HMfZkfQlSd/6vT8b1hYkcelKkoRe7uzsTKFQyGAhqJ+INuF2EAgJNZGpHLIpNIXo++AZcCLQFBKA\n",
+       "Oe4bDbTn8/mMAplMJm0Xm5iYsOEKY21qZfjQjH89Ho+Gw6H5QO/u7hqJfnZ21oYuLEpgPhpBHEZn\n",
+       "ZmasVobMBNKDD914rHAikdDR0ZGFEUlXo+9+v28qGNxYx08EPid8ZxpX8GZEE296/VGgGauSao7j\n",
+       "/KqkdyR9IOm/lJR0XZcOoCIJF71FXV+4eUnf12IdIsu4USCwFDgpjRZ0THI6KDP44hkTh8NhVSoV\n",
+       "vffee9rd3bVGyXVdO2oZomATAC7Lw0GpI8ketlwup4cPHxoSEg6HbXdlR8c6Fs+K09NTexBprFzX\n",
+       "ValUUq/X0+Liou3CTNbGbWcZ6AAjnp6e6vj42BTVGxsbOj4+VqPR0Be+8AWre0FoXr16ZQ0msROS\n",
+       "rHbH5R/oMJPJWMPL0ARrAdydaFbpL25y/VGUGVOSviDpl13X/YKkM70uKbjcq2/J/df8jO/7Z1//\n",
+       "+tf19OlT/c7v/I7K5bKx3eAFAC2NRiNFIhGr4Uajke7evWs1M+mq0lWA5dramh3zS0tLxkNgN0Wa\n",
+       "v7S0ZE6kHPGUHkz/eGiWl5etdsb9k+YVhTOYLUJc6JfoB7GKzWQytthhrLE7k3GCpAo3VPjMTB3x\n",
+       "uwiFQlZ2YCZD08hnmZ2dVSaTMd4HJ1W73TYc/f79+8aPdhzHyPgMdVKplPFfXrx4oWfPnv2xNBvP\n",
+       "S8q7rvv/vf71P5D0c5LKjuOkXNctO46zIInZZkFSduzfZ17/3r9yvf/++woGg9rf39fjx4+Vz+dN\n",
+       "lf1n/syf0Xe/+12l02kLbL97966Oj4+VSCSMHonuLR6Pa3Jy0tyPGE9DwifUZ3zI4vP5jFyPsoUA\n",
+       "n36/ry996UsWSIn75vT0tBKJhIbDob785S9rNBppb2/POA/pdForKyv6xje+YUYrHo/HzBghwUME\n",
+       "gqyUSCRUqVS0vr5u43z42DRulDmZTEa5XM4WNLXvvXv37Ncw/BAQSJ9MAgmUf/fdd61hHBcwzM/P\n",
+       "q1Ao6K233lKhUDDbsoWFBSUSCX3hC1/QcDhUrVbTr/7qr77xwvpD35ld1y1LyjmOAw7zJyU9l/RP\n",
+       "JP2F17/3FyT949f///+W9Ocdx5lxHGdV0h1J3/n9fr7f79edO3d0eHhoFrGdTkff+ta3bOdFqey6\n",
+       "rnGDd3Z2jOlGbIQkiy7GT2J6elrJZNI87KhRUX7s7u4aW02SjbQDgYCOjo4sCapWq5kbfiAQUCqV\n",
+       "0pMnT8xCYHFxUdFoVGdnZ/rggw/k8/ns10RGcMyDHCCQxbETE5ZkMmkuoOz40pWYYX5+XgcHBza2\n",
+       "H1fk5PN5q9Nx65d0zXCcaWUwGDRUZnZ2Vs1m81oWOK5L0F0nJydVLpetzKKxvcn1R4Vm/LSkv+M4\n",
+       "zhNdoRn/naS/KeknHcfZkvQTr38t13VfSPo/JL2Q9M8k/VWXu/h7LqRI2FrhDo+vMPBQo9FQo9HQ\n",
+       "2dmZNYGUG6gqkFOBeLA4YYjRCKJ3k65KksXFRfl8Pu3s7NhkkYeAcoMhCOYxDEhw9gyHwxbtwMBn\n",
+       "3INuvAzC9w6E5fT0VI1GwxYMI2YWMEkB29vb14ZLlBUw/4gD5iSC+TaeMYhwgLoetQt4P98ffQLf\n",
+       "seu62traMniS/oWH5U2vP5Jxtuu6TyR98fv80Z/8ff7+35D0N/6gn7u/v28+EjRBzWZTmUzG/Ihp\n",
+       "dlgkH374od59913DodvttprNpu1IhUJBHo9H+/v7isViRpf9F/kAACAASURBVNLhz4DUQAoobfCP\n",
+       "wOYKwg4wHrgxLp6MoPGdK5fLikajZiWLOsXv95sGEDtckJpqtWqvg3Ib1hp4NGT6YDCok5MTVSoV\n",
+       "TUxM2E6PpAs9Yb/fV6FQULvd1v379+0hazQaeuutt8zRkxocJAYNIUIG/pzPBhGJh9l13Ruz5pzf\n",
+       "Z5P7Y3c5juP+1E/9lHEvBoOBZWQTlwDXgBqQnZyJGlwHkpok2S5KXvbOzo5FKwCdkaFHA4ZxeaFQ\n",
+       "UCqVMgMURtIYhKPLI9RSkqEgpMBCHOp0Okb+l64SWe/fv69CoWA539FoVK1WyzIF3dchl61Wy0wd\n",
+       "JyYmzKAGywRilsHoCdyp1+smppWuBk/j/Gswe4j9Z2dnZopDucL0r9lsWrTb5OSkTk5OlEgklMvl\n",
+       "lEgkND09rU6no1/6pV+S67rOm6yBW0U0kqREImHxB6SNxmIxM/wG/mK3LhaLZpoCL5luv1QqGeGf\n",
+       "3ZFFxoAD0j0LgeOUUTDaOSxxwcCfPXumjY0N40+ABkSjUZs8MoZnEQK10RxiNH58fKxKpWLlDGN3\n",
+       "8vcKhYLV2T6fT5KszJmamtLBwYHFSBCjkUgklM/nDc0YLyvGVTLjkCH6SPyaeah5IDmZKLXw/CCk\n",
+       "fnt7+0b3/lZxM46Pjw0tQLlALciuMC56JZnK4/GoWq1aKA1JpixGlBydTsdEpBCCQD2QNkHex64L\n",
+       "otH4EU+cAqNvuAnjJKBSqWTUU6it5XJZZ2dnajab8nq91zSJlBeSjOLJg5tMJnV0dGTfB4McShEM\n",
+       "aeLxuJ1OExMTisViZuCCUyrNM+XD5OSkTUr5njAWx2EVSsC4IABeBu6lYPM3uW7VzgxhptFomJAU\n",
+       "90t2HaynIpGIyuWyQqGQ6edCoZA6nY6Wl5dVr9fN1w3CfDAY1Oc+9znt7++bnhAbXerTSCRi3T0L\n",
+       "m3EwgxYsajn6x03ACe3JZrNqNpsKhULGkSAUiBEzyIvjOEbFJA9wfn7eCEi7u7s2+XzvvfdULBZV\n",
+       "KBTshIrFYsaRwO2JYB7q52QyqcnJSS0vL2t5ednMJNPptObm5nRwcKCJiQkzsnnx4oUl0r7//vva\n",
+       "3NzUxcWFotGoTQD5jJCn/vSf/tP6xje+8cb3/1YtZjr2QCCgjz/+WFNTU8rlcgoEAmbvSr7H0tKS\n",
+       "GZY0m00jj09MTOg3f/M3zawchteDBw+uWbiSUbK0tKRqtWrSpuXlZUMF7t+/b5ZeOArhWt9sNk1j\n",
+       "t7u7a4pxKKb9ft+wYaiseH34/X7t7+9rYWHB5P3tdtuGK1BJMX+UPrFheP78uS4uLoyuiaMnwtJO\n",
+       "p2N9xatXr8yGd3xHHw6H2t3dNbencUFAsViUx+NRr9fT0tKSarWavvGNb9g0E5YdBpPYdw2HQ/3O\n",
+       "7/zOje7/rSozUP0eHx8btivJ1MKFQsEcjvCE2NzcNLEl4lAWO+JOasTxcJ5kMqmlpSVFIhHL4kin\n",
+       "09dU1B9++KGRa9iRp6enTbNHPR4Ohw2VYPH1ej1ls1lNTEyY6z9OoShX8NigfAGOoy5nYpjL5Yy3\n",
+       "zMO7vLyslZUVyy6kcWU3x8ARwQInHPxrn89nJ1ClUrFyCG4LiA+e0aBDEI8WFxeVSqUsExFx8E2u\n",
+       "W7WYwSnZMRk+zMzMKJvNmko6FosZqwyXHjgFo9FIb731lo6Pj7W0tKRut6vHjx+bKhomWKlUUrlc\n",
+       "1t7enrHCkNofHx9b8wX1E8dRFtv9+/dtTAwPQ5Jl6VFrM21stVrGqCuVSkYGgi8cDoctg5rm98mT\n",
+       "J+p2uzbw8fl85vMB6nF6emqEIyaIYMOgNYT9oErHYyObzaper2txcdE4HECGoBz5fN7gv5WVFTWb\n",
+       "TRUKBWMKYugoychOb3z/b7Z8frCuWq1mjRhlxuHhoYbDoV69emXHOAOUXq9nNxKzbho3+L0XFxcW\n",
+       "84tFFWUJNlOSTJpPAwdrDE+7mZkZPXv2zJAR1CqSzD8Ck/DLy6tQ+bOzM5uSgRNTElCPQ6Fk0AG2\n",
+       "izkM6a0Er1MeNZtNy8YulUr2GXFwmpiYMDU2vwc6hCIbdAXsGUI/f7fX6ykSiZjC5OXLl+YuhdKF\n",
+       "B50T9SbXraqZE4mETdXu3LmjYDCotbU1U4lwzDFChhoZCoVMgXJ+fq6lpSVrwkqlkpUE4/IpWG6x\n",
+       "WEx7e3vmNgRvAlx1enraGqnFxUVLJKVkuLy8tAAcGsB8Pq8vf/nLJiWCggmCwQ7LLglBieEEdgU4\n",
+       "gsIBSafTpslDezhOJ11cXLQsRDzwOIlIfcUzgzIDvvjS0pI1eHzXBHIS8wBCgjMSpye/f9Ohya3a\n",
+       "mfGIqFar8vl8FlDOl39yciJJNr69vLzU9773PSPqP336VNVq1QYse3t7SiQSNnKVZAQaRrR7e3u2\n",
+       "wPF1brfbKpfLyufzxvnlJJCu4hVAF6CtssjPzs5MQU2ADdxp/DxyuZypzDFZYZzMe52enlaxWDQM\n",
+       "Op/P25gfqT/OozwsvF/G9+PKamwATk5OrOQBm8cnhNIBXd9wODSYFJErOkhOsl6vd213v8l1qyaA\n",
+       "v/iLv2i1LTRDMN1SqXStpvN6vfJ4PKrVagqFQkZER1SZzWa1ublprvTUxXTePp9Pu7u7unfvng4O\n",
+       "DuTz+YwzHIlETHAK3r24uGgZJSAUeEXTXPLzedAQxKL65oHE1406HU42JwElByLWer1u3nDpdNrk\n",
+       "SclkUtVq1RYsxKTZ2Vlr+miiiUKGXI8Ui1IGtYjX67UcGU6bbDZrPGl4IgxbyB+cn5/X4eGhfu3X\n",
+       "fu2HE0Dpyq2HQcLBwYE8Ho8KhYKZDm5vb5tNAFG+jK57vZ458DD5oubb3983b2acORmXE8eGVdW4\n",
+       "9wMu9ASh49V2cnKiZrNpGDbZJRcXF0Zc4lhmbE5tSUmCkBXBK3g3fwavo9fraX9/3xrPaDRqC5fp\n",
+       "pCQzYwGaQ51OE8tirdfrCoVC5uCJvS9WBJwOPODwPhBFYCrOqPv58+dm27W8vHyj+3+rFnO/37dF\n",
+       "gocx2je0euxseNJhtCLJaljsqbhJ8I1ZeDhuwl5DvoRChRvPlAwMFl4FuzxqcASio9FIhULBdIgs\n",
+       "NuxomdjhSgQXA/MWIDZgOvJMHj58aJrBvb09SdLR0ZGSyaRhzYgVQGSIwcDsJhwOX3Mpvbi4sJwY\n",
+       "EJmzszNr/i4vL80sHWV5LBazJvvly5cGb3LyYKH2ptetqpm73a6WlpasAfF4PEaux3gE21tEm0QZ\n",
+       "jJsFokaGCFQsFo2CCU4NnAYDT5Lxdlm4kNfhNkgyPwseFOkTWinowGg0Mv4FMN3l5VUcMlg0Pso8\n",
+       "kFBbYaDVajXT1T179syGNkicCCOirsW9E485ooY7nY45GgEbjqu4R6ORNjc3jSJAGq4kO8WA3mq1\n",
+       "mj34Ho/HOCzAksjP3vS6VYt5ZWXFDLPBMon8PTs7M34wg4FxC9bLy0tTi9DggOHiFgRrbNz9R5I1\n",
+       "YJiwcJNoinK5nHw+nz0Q9XpdjUbDbizN38LCgtFHJyYm7H3AQIMjQWwCTDX4zAsLC5Jku3U+nzeI\n",
+       "sFKp6OTkxEhU2CEA62G2yAKfnZ1VpVLR3NycEomEDg4O1Ov1VKvVtLa2ZiP6fr+vZDKpWq1mFrc0\n",
+       "gZyE9Xpd9XrdNhYyB/EWwc/jpov5VpUZmLycn5/r/fffl8/n0/Lyssnoo9Go2WuRQ51MJs0wcGLi\n",
+       "KmotHo9fE43SkYNaxGIxnZ6e2uKZmZkxmTwcDpJfB4OBRQmHw2FVq1WlUin7M4wVYZzNzMyoXq9b\n",
+       "iik163jshMfj0e7uriQpHo+bcWOpVDJbW04WdnvyrQnXYeKHfxyjcHjGo9FIqVTKyjCGJpRf1Pce\n",
+       "j8caUSwdMJ5h9w2Hw4ZpI92KRCLK5XKmeD8/PzdF/Ztet2pnHo1GNgDAL5k8O6Z93Gik+Pl8Xr1e\n",
+       "T48ePTILXOT5uLljGUBJAb0RrgZH7sTEhGWGIAtikXD802SxUCcnJ806llSsSCSii4sLe4jINcEq\n",
+       "i9AeiE6YLbK4MAKv1+sW0QBNs9VqmZdevV7X9PS02QNgqcWonBRb2Hvsqixk6uSJiQmbrKLmgbFH\n",
+       "rgpNKREax8fH8nq9kq4eSDw8bnLdqsWMDJ/AcrR4tVrNYgw4wmliEomEer2eqtWqstmsDTRQWjSb\n",
+       "TUtiotyg5Bh3mQcSG+cUh8NhHRwcSJLtmI7jaGlpySZ9UC6DwaDtzKAm0WhUXq9Xx8fHdsowzEHF\n",
+       "jQwKrLpWq9lInVg213X16NEjs571+/2mIG+32+YoSo0MVElDSYZ4s9m0CGVG54FAwBiHPATs9ozf\n",
+       "GdBQRiBNA6KjYU6nv6+DxKe+btViZlekloVvixQIEji46mAwsHoWbzl8MGZnZ21iiNIEPgYjaUmG\n",
+       "Ow+HQ8OWaf5OT0+NWklYDaw49IaYLuLKiTggk8nYGHlyclKNRsPMwJF1EYaJajoajZpqhh2RyIvd\n",
+       "3V1VKhW5rqt+v29TOJyfRqOR1e64djabTRvagGM3m03LWSRuzuv1KpfLqdFomHhgcnLSUgcwJefk\n",
+       "OTk5UTqdNtNHHJtQtLzpdasWM1a2pCnBSwBK63a7Ojg4MKioUCjozp07VpaMRyDQWGFmiFkJTDbS\n",
+       "S/GhGPcvrtVq1hzB+2C6x8KDSYdqGRy42+3K4/HYuBoGIAMJkAw0jDSd4MU44NdqNTu2MW2k/h4X\n",
+       "FzAyH4+owMiRUgqWHv+fEoLGGWIVzSuE//Pzc/n9fh0cHJgiB14Jqh5Jpg6v1+s3uv+3ajHjZClJ\n",
+       "X/3qV008GggE9JWvfEXhcFgbGxsGHf3Ij/yIDRJc11UoFJLP59OdO3cUDof1+c9/XtFo1PKyA4GA\n",
+       "TQzZ1djVfT6fKbDxgcbhh1Li4cOHdnpgyILPHQMK9HDoBBOJhEGNmBOS9uT3+5XJZDQxMaG1tTVz\n",
+       "EM1kMlaCLCws2M4eCASMhzE/P28DlHv37ikSiVhGNtL/bDZrTL9kMqlQKKSFhQXzqctkMrq8vNT6\n",
+       "+rpZLpBUgFCA4RJjc5K2JicntbS0JL/fbxvEvXv3bnT/b9Vi5smemprSkydP1O/39e1vf1u1Wk1f\n",
+       "//rX1Wq1VCwWjZH25MkT20k3Nze1vb2tqakpvXjxQu12Wx988IEmJyf17Nkzo4OenZ3ZlJEF3mq1\n",
+       "1G63lcvlVCgULHuPYxqFyc7OjkqlkrHO0MK1220Tqx4cHFjdC3b79OlTC6jf2dnRwsKCNjc3NRp9\n",
+       "ki4FJNloNFStVg0poDZmKMJg4/Dw0DSG+XzeTrMnT55Y2bO/v29KFrL8Njc3rcd4+fKlBRrhI42r\n",
+       "0ZMnTyRdIT3U8jSEyL9evXplSM3x8bE2NzdvdP9v1WJm9Iv6gpwOEqAkGUke7wuv16u5uTlFIhEl\n",
+       "k0mrCRlo0Cyii0ulUmo2m2Y9xYLAfmpjY8N2psXFRRugTE1NWXkBGZ9pYrlctp9DID01OM0V3GV2\n",
+       "MYSlkPOpV/lcLN5+v28KFyy7ms2mTe0YM1cqFftMJycnchzHRAQ8SEwNIWwxhmfow9AIF1Lw9vn5\n",
+       "eftM+Nf1ej1Fo1GdnJxYHARkrje9btVipg6s1+vmfQxFEZ4AQxByAKkDHzx4YM0b6gjw54cPH5rR\n",
+       "X6/Xs1gFjBlRHqPJOzo6MolWPB43SA8rWjgVmCpi/wWjjPo5lUqZsyaN0+TkpHZ2doybDeFHkpmw\n",
+       "dLvdaza+eH10u11LmYWWOe5QCjKDTnJ9fd3iLsrlslKplIbDoSlIQDLG7RMQD4AzozpHpYJ/HfU4\n",
+       "aV2Li4tGanrT61YNTVgYUCsdx9H8/Lw1K3BtUTQsLi6aQThaQZosHOXn5uZMv5ZIJLS7u2s7PU0Z\n",
+       "qmdqZmpRFiA7FaPuyclJ86qD7ww/hIEGqabjkB8BnKhKxnfLy8tL87igJ4AZyOtA5GHRj9vzIlJg\n",
+       "kWIOiXHO8vKywXfU/XiLzM3NmXqFwQeuUdJVPuN4LiGYfyqVMgHxcDjU2traje7/rdqZganm5+dt\n",
+       "6kT2M0GPHN00c/l83gYY9XpdpVLJ0I5ut2v13Dh0BfmdLp8d/fT0VOVy2WrdcDis8/Nz40wwZOl2\n",
+       "uzbuHvdwAxtnOrm3t6dyuWziV5Qf8DtOT08Vi8WMQffy5Ut7T9LVWBu73mazqVwuZycJtlrS1QNU\n",
+       "KBTsBKnVauYeSkNNn8FpVq/Xbdw9GAz08uVLM1qv1WrGQDw5OTGolO+gVCoZlo76u9Pp6MWLFze6\n",
+       "/7dqZ85ms+ZLAfk8nU4rkUiYCDUYDNpU66/9tb+mX/iFXzAPOISo3MAvfvGL2tra0uPHj+3mjEYj\n",
+       "ZTIZFYtFOY5jtTCUURYE0BSu+Xja9ft9G6qM80NALiAynZ+fa3Fx0SZ3jKeJKtvY2DDVNAOOt99+\n",
+       "20j6ExMTSiaT8nq9RtuEuca4GuNy3itjfxzx4WDgQx0Oh/Xq1Svz0FtZWTG+OEbiTAIzmcy17EBU\n",
+       "OL1ez/4MLv14JMa//Jf/8o3v/63amTudjnkTY+iCGkOSTbTY/f7+3//7Njwpl8uSZBo7n8+n3/3d\n",
+       "35XX67UdKBQKGdG/3+9fcxsCTsOHA981COnjOzjMNK/Xa7RR+A2UQ1BK5+fnNT8/bxBju922EwNO\n",
+       "MUqZw8NDG/hQMvHaMzMz9vP4+5xkNIfspOSN06gNh0OFw2HlcjkbsmDZG4/HJclOn0AgYM75NJJw\n",
+       "pw8PD42aSpoV43mcp25y3arFPA7sAxXRyRN7Vq/XrYGjAWLc2u12jVPMTcaSgOOcmwtxqd/vq1ar\n",
+       "mYkLAxMW5sLCggVMnpycaDgcql6vG4MPqAxuhPRJklO/37dFzWcgP3B/f/8a+y+fzxuSAfrR7/ft\n",
+       "825ubqrdbluYJycDn7tUKhlVMxgM2tQOR3xckngQEfdyYVhTq9VssQI3sslg5+DxeMxU5uzszKao\n",
+       "P6SAjl00Y5KME0w9eXFxYTAUhHFJhhOfnp6qUChoaWnJCDxwPbjJeKhxU1GzwAkuFovXvJipsyXZ\n",
+       "RBBOsqRr1lbBYNDkU5Cazs7O1Gq19PHHH1vpVCwWzROZiLNxx/tEIqHDw0Pt7+8bguH3+3X//n1D\n",
+       "VC4vL/XRRx+p3W6r1+vp6OjI0mnBnycnJ40mKl31I6FQyEoJTjnG9B6Pxx526eqEw8qr0+mY7ApD\n",
+       "yd3dXeXzeWvKmbLe5LpVi5nYBUa+uFHCM6hWqwbY+3w+22GweI1EInr69Kny+bxlfbDjjsvnwV5b\n",
+       "rZaOj4+vmZgztmbBhkIh5fN5i3qQPgms58httVoqlUomuSIsEzEqENjLly/l9/tNUABBH5717Oys\n",
+       "CXK9Xq92dnZUrVb13e9+1xQhcI6Xl5dt5N/v9413Eo1GrwXCY7dQq9V0cHBg9E94JtgAg5lLnyTR\n",
+       "AjcC5w0GA21ubqpWqymTyWg0GllOYy6X++HQZPyCF3BxcWG0SZ/PZ2lMsVjMjmF4FkBnCESTyaTV\n",
+       "vjjpE3wDsR1FMbIgj8djP4Njc25uzvw1IpGIxZaBQnDMY6VLPQ2WjEYuHo/bBA/yEA5NlAjU3Ax3\n",
+       "UqmUvSev16vV1VVDMkiUBesmBoLoMoZIlCx+v9+EqalUyqRc0WjUeCQ4IQWDwWtiWnD9ubk5eTwe\n",
+       "G3kjgqXnIBXrj1102md5MV5mLAztEhgJVhaowscffyzpagyOLxrURXamg4MDQxMg9/AfWC8TMcj7\n",
+       "R0dH8ng8xtRjV6TGjcViarVaJrlnJ4MoNQ4posZgMjk3N6disaiTkxOtrKxYElSr1bJhDdg5dXmh\n",
+       "UND7779vOzluoUCVCAnGTxhKA04YnJl4LzTJUGl9Pp+2traUSqUM60bjiAL8448/Nm43jEHqZBTc\n",
+       "N7lu1WLGzIXdmKw/dhGv12uKB6/Xq3fffVetVkupVEonJycKBAK6c+eOJFlIYywWU7FY1MrKis7O\n",
+       "zozMRKkCRk19S22JEIAwdHLyGo2GWQZMTk4auYldeDgcKpfLaW1tTaVSyVCF+fl5ayYfPXqk7e1t\n",
+       "LSws2EMDbkys2+TkpNLptLrdrpkyhkIhzc/Pa3Z21rw77t27Z4oQdsZQKGSKG1QyfHfsrNTMkJaY\n",
+       "4qVSKfV6Pb148UIbGxsqlUomCGaIRdgRuzcKoLW1NT179uyN7/+tKjMcx9GLFy/UarX08uVL2xUx\n",
+       "KkGDRuYGam74wXjIcQwnk0nDQMeDa3q9nvENKpWKKUCYLDJhG/euYDrJkUpJNDExYdyMUqmki4sL\n",
+       "3b17V7lcznyXJVlmH9xov9+vi4sLvXz50thpqEowIOz1emYFlkgkJMlcQvHzODg4sHID1yKGLHjv\n",
+       "lUoltVotbW5uWoMKDRU3JcbzEPTpHegPoAigAD84ODAfQPd1NPT6+vqN7v+t2pld19Xq6qqmpqZ0\n",
+       "fHxsVlegBtgDXF5e6u7duzatw2qLUoCatFQqSZK58WAqjhPnaDRSOp22KR91KLo4ak90ewwriGAI\n",
+       "BoM2WPD5fObNkcvlzCim2WxaMiociNnZWcPF79+/b6JXEBwI77xfGjDw93HUJxKJWD43vBHeDxku\n",
+       "GNjgkMruih0Y5Qw7NjESjuMoHA4bp5nvdzgcKhaLWdlFljbf95tet2pnlmQDEnY+HInI3uPIh7zO\n",
+       "eJcRMpgvjQ6TL+RUDCN4ABgUINSEtwFKMhgMrHOnVod8VK1WrUypVqvmu4aamsULhMZrM1WDozEY\n",
+       "DJTNZo07cffuXUMtpCsOSTAYvGYrO276uLS0dK1koJSiacOEkcaU75WHA/4HwlfkVzSmNMxwpply\n",
+       "BgIBQ5GIobjJdasWM7TJV69emZ4MXu/k5KRxG2ZmZmwUzPgZNhwLDiYbqgoom0+fPrVhCqE4z58/\n",
+       "V6VSUb1eV7FYNA7C8+fPDXaC/zw7O6tCoWDSKLzlKHm2t7c1Nzen9fV1I9VTo+/v7+vly5cql8ua\n",
+       "mpoyeytJ9nPm5uaMu91sNs0v+uDgwIYcNKTn5+cWK0GzWqvVlMvlzIc6n8/bAsYWAfgTHBoFD8Mg\n",
+       "fPZ6vZ65e3Y6He3v7xunut1ua3d312YAw+Hwh9Dc+NXtdrW4uKhEIqG9vT1jiyHTka4w3EKhYImm\n",
+       "0BNpDEOhkCWxBgIBW4RwoHGgpw7ErsDr9SqZTJqk/+zszCISOPK73a6q1ap5w3HUExlMXdnpdFQo\n",
+       "FIzrAYa8tLRksRCQ93E9hTxEHBsstXFjGxY+WDGG5PwaTH1+fv6aoxHoTTAYtAaTB7bdbst1XYPw\n",
+       "CMoMhUJGzIItB0zJ+56bm7s2PcVM502vW7WYOdaxiR0MBkokEmaKCHAP9ZEjsdVq2YLD1gv3+NPT\n",
+       "U6NEQtRBSMoIFlNuKKCtVkvxeNzIPSwGeMncYMoaBKODwUDdbteSnGgiPR6PsetYoJjQEEEsfeIg\n",
+       "NBgMzHeO3EOSnhjeUBYh6yJmORKJ2HhZkrkiQaxnikmuN1Zo5JJzRSIRbWxs2MgaByWGK3BoGNn/\n",
+       "0Dfj+1zj9E2spCCzMLSAOM6xSNMEdIZyYnyQwcJjLMuE8fLyUrlczky7yfTAplWS1Y2MiTGcwTJs\n",
+       "NBqZ6mN8gcPHQKVN6TAcDpXP582yq9lsqlwu2zAG0hAXbMBxYny73bamEcSn0+moXC5f46x0Oh2z\n",
+       "QHBd1/gu8EwY8ZNmBcEKXgg/C/iN/BWStXq9nsnOwL/f9LpVaAZfGPBbIpEwwlEwGLTdDb4uQlRJ\n",
+       "tsij0ahyuZyxylKplNlhoaR2XVfhcNgmhjDfRqORiWA//PBDra6uWlOEhVWv11MymbT4CJh1i4uL\n",
+       "isfjxogjAQBXJRYjNgHEp8F847RJJpP68MMPDb9m941EIjo9PTU7he3tbT169MgQD6BJDMj9fr/i\n",
+       "8bipdIDpeNA4JYigazQaJmglb/wrX/mKMfYKhYJWVlbs5Op2u4pEIgqHw4buHB0d3ej+36qdGYyz\n",
+       "2Wwqm81a182oFLfMaDRqZiePHj1SMBhUr9ezXSIcDisejxtRhiMfiT5cYEk2FQT2A79NJpPa2tqS\n",
+       "JIMD4Q67rmtoADDf0dGR8vm8qajhSFMS8NonJyfmm0wphAMof8apcnx8bEc5C4UyLJVK2WdCKQ4N\n",
+       "s9PpmIiBrGwml5IMkUEkwGmGb1yz2bSyDt+54XCoQqEg6eqkQB8Zj8fNBgFriDe9btVippZLpVLa\n",
+       "39+Xx+Mxz7iDg4NrBoHsQjR4iUTCdl9UFoFAQPv7+1YPUlZAhzw8PNSLFy8sUD6dTl9TWaRSKSUS\n",
+       "CXMSJccEUSsEJEoX13WVyWSUy+UsPJOpYjAY1OPHjxUKhUwlTi4IgxssdlnkeOKxq09PTxtezpSw\n",
+       "0WiYqSQMQGwUUGnzOoVC4ZrrEpndKNa73a7ZGYCZz8/PW94h7vzjfiIHBwfa3t6+Zoj+ptetWsx0\n",
+       "5gxKMMkOBoMWUwaJHg7E3t6ewuGwarWaDQSAoRzH0dramnmnnZ+f281qtVpaWlpSLBaT3+9XpVLR\n",
+       "1taW6QWhW/IeQDgCgYCazaYqlYoWFhYUiURMI7i0tKTd3V1NT09bQtTS0pKazaYk6cmTJxoOh0ql\n",
+       "UvL7/TYup5nNZrMKBALGGcZ9qdFo6PT0VLVaTUdHR9fQHFAOgjF5SKhpx8fcaAuhfhILhws+BCJC\n",
+       "jjgdYBvOzc3ZePz8/NweOgY5PzSBGbtQLAcCAe3s7Ei6UkDAZa7X6zo+PtarV690fn6u7e1ta0qo\n",
+       "7RqNhmq1mhHZyScZt7yFoYbxIGNyCDcgAtS4eKnx0OAiBAGqWCxKkgVB4q5Uq9UstAaONcJRvCqm\n",
+       "p6cNBwbNgPMMjoxam9ra6/UaRMhkkQcYxbn0yQCqXq+r1Wppd3fXsHkebumKeovyBRwfZILyi3qd\n",
+       "soUafDzGeX9//2b3/0b/+gfsajQaVoNiLgjZxuPxWOhkNBo13wZwXJ/PZ0qOhw8fWpPjuq6Wl5eN\n",
+       "dPT8+XMjzkB9pDZNp9OmQaR2xm6WJotdaH193bwr0um07d7Hx8c2Wt7Y2DDCEg0rECEq6Uqlovn5\n",
+       "eSWTyWvuSaRo8X1MTExoYWHBYDxJRrx3HEfpdNoek5NwsQAAIABJREFUjFgspm63a9/PaDSykM9U\n",
+       "KmWm4eOMOtAXhjQzMzMW7zw1NWUiCMdxjB9CU57JZCRJ9+7d07e+9a03vv+3ameu1WoKh8PGe2Ch\n",
+       "wpUAtoO8TiY2scKhUEihUMiUw9Ink7V0Om0WAOxO7CrjtSi7EKUGIe8Q4ceHL/CeaZxAKrrdrpUI\n",
+       "+/v79jnW19fN9wJcGPNx3IT4M/IHiS/DCgEvjYmJCQtsh6sBnIlvB7g4nngYO4KcwPlGzQKvgwEJ\n",
+       "9gcXFxcmskXjOD58wo0VUtWbXrdqMQcCAZ2cnFjeBrUtO0UymbT85/Pzc9OopdNpQwPK5bIODw/t\n",
+       "JpJGWqvV7FiVZDcHTLper9sNgt4I9Adsh9l3p9O5Zh5OgwnmCy7caDR09+5d833+6KOPjMTjuq4O\n",
+       "Dg5s+NHr9Yzn4bqu4vG4/Tz4HdVq1ep/MgVZ4FBXCQjiIYBQNK6bPD8/V7fbVT6f12AwMHSILEDK\n",
+       "BzLGyTCMRCJWUvR6PZ2dnVn9X6vVLAXrTa9btZhhniEZAj5jiEJKK/L9Uqmkzc1NG8uy63W7Xfu3\n",
+       "s7OzJlfCHTQajdpQIZ1OW629u7trpwD0U3ZMpnsQ6eEu4PPGIsI6l9p2OLwKXS8UCgYBomaB7smk\n",
+       "kkaNupnFmMvlrun2oMSCfBwcHKjdbqvRaFgdj1RKkjkaMUJnjM9IW/qE6M9ixuCcqWGr1TKy1LgL\n",
+       "P/EUUGBvct2qmhnrWIB/eBPseqlUym4+Zt0/8RM/YclIuBnduXPHXCuLxaLVwpQO1WpV6+vryufz\n",
+       "Ojo6MkgtmUyq0WgYR2RcnoRKA5kS07JIJKLd3V27yfgrs7MfHBzo7t276nQ6+tKXvqQnT57YeDge\n",
+       "j5siZXJy0nBcxs8LCwuGb8NUwzcDD+lWq2WOSvQJ4M7Ly8v2oFM/VyoVQ1sgaNVqNTv1kKKBtGCn\n",
+       "AKTIWB+ivyTD3H/yJ39S3/ve9974/t+qnRluBrXqYDCwY5XjFII9HIdyuWwumhB/yA4cjUZaWFiw\n",
+       "UetwOFQ2mzVN38LCglZXV826Fgx5dXXVcFMWNzo+UJHDw0O72djF0gAuLi7ae00mk6pUKqpUKtrc\n",
+       "3LT6nbqdYzqRSJjWEV8L2ILUt4hyYdsx3ZucnNTi4qJBbKQJ8MCjm2RDmJ2d1UcffWTTwPn5eesn\n",
+       "4LKAJTNyH41GCofDtrlAf2X0HgwGb4xm3KrFLMkaM75MmiPgJ0k2qm6320ZkR3EC/5lJFna0cHnR\n",
+       "9zUaDdul4CJwk/L5vCYmJuyhQG2CL0e5XDbEgdIGsxZU3yAWNIZYEhAVPM65HjcER/FNSQJrDjUK\n",
+       "3tBwLLBAoPw5Pz+3E47GTZINb8rlsvnFoVpnCgqRiMYaMQJqdt6767qmJOd7q9fr14hKb3LdqsWc\n",
+       "TCbN7Jtuen193VhoyHSQLiWTSWN8ra6u2tBl3EQcw+7Z2VnNz8/bVC2dTmt1dfWaIWAoFDJuBibi\n",
+       "uAWFw2H7/bt37xqCEAwGjTAfDAa1urqqxcVFmzbGYjFDKGZnZ7W+vm47siQbAnGSzM7OKplMWvg8\n",
+       "zR3NJ+UMsOD5+blxQhYWFoxmymteXl7K6/Ua0pDJZDQcDg3dgQGYTqetROE1+PPV1VWl0+lrukoy\n",
+       "WqSrzWVxcVFvvfXWje7/rVrMp6enOj8/1+HhoWKxmO2mSNyZMEFvhKnGDskOCqtubW3NFCk7OzvG\n",
+       "a56fn7eoBdhpYKdM2MLhsF68eHEt62Q8nw9iE+GTuVzO5E8c1Vjo4jcN8R8/DBJlkfOz88KFoFnk\n",
+       "/Y67FGHZO86F5iRC5YLcrNvtqt1uGxWVhKxxORenCbj24uKiJiYmdHJyot3dXcOdoXvOzMwYZAgv\n",
+       "Jp/P3+j+36oG0HEc4+S+ePFCjx49MvdKQstZuDi4Z7NZGxZEIhH1ej2jjXY6HcViMVWrVS0vL5uS\n",
+       "GB4xu56ka3wISWY8TlQvjRjhPpVKxUbflUpF8XjcGHwHr3OnKY8QnP5eNTS9AYstmUzKcRytrq5K\n",
+       "kkmpdnZ29PnPf94miODBWGYRTs84OhAIWK3Nd0WTSZ0L24/3IslIVOgn5+bmNDU1pYWFBZVKJTuF\n",
+       "MFHknuEIxbj8Ta9btTMTxLO3t2e1XrfbtfovEAhYbh1TwZOTE52dndn/ohVEnYKjPcJLjAX9fr8d\n",
+       "17iKMtJGLgTnGRgNO1vG5RB7qLvhGAMfTkxMWJnBuB2/DeKK6Q0kqVgsGmLApA91SS6Xs0wUxtQs\n",
+       "aIYvkqy8oWxh9MzDhzKbeDaMKOGlsPvTn0DMAkFCoY6/HdAemYM3uW7VYuamozSBQH5yciKv16tS\n",
+       "qaRkMmk5Ht1uV8lk0iAiBgNwcy8uLhSLxa6pK2DEnZ+fG29iMLgKU6c5wrzl4ODA0BVGv8T3gofD\n",
+       "dDs+Pla5XLagenDtnZ0dI7SfnJwYOoK6BQsD6mKI+6VSSUdHRwoEAjbJm56eNvgOohCLj50WwhE2\n",
+       "CIPBwCDEZDJ5TdU9HnlG6m2n01G1WlW1WrV/2263zYeOBhhBwsrKilZXV63Rvsl1q8oM/CSwWyUr\n",
+       "r9vtan9/37znUD0fHh6qXq/rwYMH5ubD1BD3TCiWCwsLqtVqlnPHYpienjbnH8dx9J3vfEdvv/22\n",
+       "5ufnTdlBQA1wITX04eGh+W+gTcRNHuU1DkKoVBiklMtli1K4vLy0Ic3du3fNeIb6l6gI0Amcn/r9\n",
+       "vpGoUGGT15JKpdRoNEzSRU1NTV4qlcyoEScoyjigNkk2Rk8kEiaURRne7/f16tUrxeNxi0G+yXWr\n",
+       "dma/36/19XUjsGSzWYOvYHqdnZ1ZAwYpRpJZ1RJVRtjMuK8bdaUka76wGUCgmc1m5fV6TUENkoJ2\n",
+       "EM82PDfw0wCB4Ne8R+pmhjrg0tSjuGjG43G99dZb5kfHqTE7O6vJyUkj8M/MzBhfghoYWA1cGucj\n",
+       "8r6hqSaTSc3OzioUCpkahc+zsLCgZDJpDw59CvYIxWLR4tbweJZkzXkikbBS502vW7WYJRleOt61\n",
+       "x2Ixc72nSSSrJBqNanJyUuvr62ZMAv5KDU4q0nhCEtAYQevo4dDEUeb4/X7zhpZkYgHqdWpHhKe4\n",
+       "EFFGoAph6IOnBlAZKVGUHjSwSMfwgwabHhee4v3M4GLcmwOCfqfTMTIWLkfY77IgUXdLsoaY7D8Q\n",
+       "Daab+IcA3xHuCUR6k+tWLeb5+Xm9evXK5DgMHSDDhEIhJRIJU25/9atfleu6SqVSqlQqptAOhUKK\n",
+       "x+NKJpNaW1vT5eWllQy7u7tGZVxaWjIEBGI7nfrFxYUNCwiDRzdHHjXTM+pjwoQkmT9cMpm0h4tJ\n",
+       "JQ8jDvnU6ZjDrKysmKAWfBkBLIuJGp7QeIYv0idBR3A3Jicnlc1mFY/HTdGOMyjTRdhyDHfIaykW\n",
+       "i1byABnCp5Zk6BNayZtct2oxS5906Oj9GCRAfEH9zM0olUp2nI/voHy57DQ0Rn6/35od/g3HJkQi\n",
+       "VODIhjglxp3hKUvAiDFhxL+t3+9f88bgs7GYeGgoncgQpKHEHoydniBOdI4wCXnQ8PdAHQKRioFO\n",
+       "sVg0PSETPH4+tTiuRxCfeEB5GMH+aaylTzD/cTeoN71uVQPY6/W0sbEhx3FsqMCxCiQ0GAyMnba3\n",
+       "t2ewFLsgQ5JwOKy9vT2jPw4GA62urqpcLhuMxO/t7e0pFovZDWLRQXnEUouGNJlMmpMQixlS/8rK\n",
+       "iilY4GTDU8YSDLMXIo2npqb0/PlzjUYjO31w/8QCIR6Pa35+3miyDJRGo5ERgYAfiRxmkoqAdXd3\n",
+       "V+l02pQ5RBsjICYyIhqN6t69e8Z79ng8Oj09ValUMo5Kr9fT4uKiqeeBTm9y3aqdeTxSjIhc3Nzx\n",
+       "O/N4PAaN0X1T89ZqNeuq9/f37bhmR0KxzOiZ+hU9XbPZ1NOnT23Awa7FzinJjmheh4bKdV0dHR2p\n",
+       "2Wzaboq6GQ72+IgYfzYml8vLy8ZJZqLGCYUXHBO3eDxurqVwqsGtcXYiQiMejxvllN+LxWLG2Ybg\n",
+       "32g07LOdnZ2pUqnYqXJ6eqpms2mqd8oSeN3g/j/cmccudhlqwng8bg0M1gMYm/T7/WtSqk6nY9ZR\n",
+       "77//vh31k5OT1vDQKFEzY0RI4+Tz+bS0tGSqZrBdJmqQ1TmWV1dXr5USGCrC9SVokqkj74dFzTSR\n",
+       "qRrcEhhsDD7QGPr9flOQc2LAM0FZAurg8/m0sLCgSqVi8W8MbYApEUCQsDWOEIEtI9TlhKP5HI1G\n",
+       "unPnjnl8cL9uct2qnRkmHGlMTK0uLi6u8X4hi2P4R8TX+fm5SqWScWoxDUQ1AT6KCyjNFDvk8fGx\n",
+       "7VRg0HT046y2TCajqakpPXv2TB6PR8Vi0cSpsVhM9XpdW1tb5n3MMEWSmc+wmCDQn52dmc1tKpUy\n",
+       "lfo4BjwajQzrpUHDdQiBqyQTClDuIHQlXPPVq1eGRzMVnJ2dNUgOD2dqfcS5SM56vZ6mp6dNZ0jT\n",
+       "+8Ohydh1cnKiSCSifD5vhjDo/cZDKWlgtra2FI1G1Wq1TMSJETk7yatXr5RKpVQul82nji8dp6TD\n",
+       "w0P1+/1r4TbRaFSlUkkLCwva29uz3R97g/PzcxOHEizEovX7/eZJgeJb+gQ7rtfrNvAAEmNUPRqN\n",
+       "9I1vfEOJRMIguWq1aq+P4TcPRL/f19OnTxUMBlUul62kITGAU2QwGMjr9erk5MRq+UajYUKCcSMc\n",
+       "pqUkTUFz3d7etrqcDQfr29FopO9+97s3uv+3ajEDHVWrVX35y182emM0GtWDBw/0W7/1W9Z0ZDIZ\n",
+       "y62DmkkE2dzcnJLJpClMwKTZpVzXtQkWJtxAXNlsVv1+X4uLizo4ODBJlMfj0aNHj1QsFo0Aj08y\n",
+       "tex7772nQCCgFy9eGCpx//59C9jMZDIKBAJqNBp6+PCh4drVatVKkuXlZbXbbWUyGbVaLb311lt6\n",
+       "+vSpWYoxoACtmJ6eVjqdNjcj1DR+v1/vvvuuITaIAegVJCmRSKhSqWhjY0Pb29taWVmxvzs5OWkZ\n",
+       "JfPz86rX63rnnXes6fb7/YpEIubpEQqF9M477+i3f/u33/j+36oyg0WGYz4k9LOzM33ta1/T7Oys\n",
+       "7UbtdtsciZrNpvL5/DUTxZOTEz158sSy8SgzSJzCSPvu3btGvBkMBjo/P1coFNLm5qYtjkAgYOhJ\n",
+       "u9223ZqckKmpKW1sbKharerjjz+2B2xxcVGVSkXPnz/XysqKYcEbGxs6ODgwLJsd2ePxKJfLKRQK\n",
+       "qd1ua2VlxUg+IC2gI5Is0erly5cm6K1WqxYKVCqVrKwaFwFg7HJ0dKTZ2VnlcjklEgnLHSfMB0SG\n",
+       "YQ/oED4fOzs7FhDUaDR+KGgdv4bDoblwVqtVNZtNE7biGwcTrNVqmUyeAPOpqSmVy2Vtb28bK0yS\n",
+       "kXywERhPFKWenpiY0NHRkRmOV6tV4+2yoOBFoL+jXi8Wi9rc3DSPDORLjUZDpVJJ5+fnJnWCNce0\n",
+       "jKy+jz76SJVKRfl83ohGW1tbNpIH48aajHE7JRYi13G1Ry6XU6VS0fb2tolqKdcgUVFKYAscDAZ1\n",
+       "cXGhw8NDU8OQMUiPQbwzzqLhcFjT09NaWlq60f2/VWUGU6vt7W1z0KGjd11Xm5ubBkvxRcKYg7wP\n",
+       "moAxNgYnkHZYyBydExMTZtAC0f7i4sLc6i8vL80bjoHH2dmZGo2GHjx4IOmKP8FwolQqGYRYLBZt\n",
+       "CIFZIpHJoVBIkmwYEg6HbTgCLDYYDLS3t6dAIGCMPmpcmH7wIWhYSZVlYY371DH8yWazymQyOj8/\n",
+       "N2dTuNJwLVjoL168sNE28CVOqPl8XsVi0RxQfxidNna5rqvFxUWz0fJ6vVpaWjLTvsePHxslFE85\n",
+       "bFrhaDQaDUUiETODAcqLRqPKZDLm1Xx+fq7l5WUL2ZGk1dVV4x1g0u3xeOw9YU3r8/ns9TBXgeRD\n",
+       "4A11tSSrz+FjU/JQpzNqRwmDRzMSr3w+r0ePHqnRaCidTmtnZ8dc/c/Pz22kjx8eE9GNjQ2bGsL/\n",
+       "CIVC5iOXTCYNhx43YZ+ZmdH9+/d1fn6uhw8fGlaPNx0CAcI0JV2btr7pdasWM4YjuPOcnp5qd3fX\n",
+       "AhRxPCInBKgOWqLP5zNRp8/nM6wUFQlWWPV6XbFYTHt7e9f8JxhlZzIZbW1taXFxUbVa7Vr5gPcG\n",
+       "devCwoLRRCWZoSMuptLV7pjP522BEsa+vr5upH8GLLDsGKVTwyJghUiFema83GL8j9UWAyG4yel0\n",
+       "2qBH1NW4H6G+wWw8l8tpcXHRdmJ25nFSEqcEYgBEA2963aqamYEBypLxXZLaDG4FRzIG5EiE4Cww\n",
+       "jUK2D6WRXYppI68xTrPkhrMLESYPnwP/NpTg0lXHDwRHUBC7NX4bYMzT09NmDcC4GBIRsiUWsNfr\n",
+       "NVoowltG65KMoTfu98wuSTgoZRaZLQxqgC+xtB0MBlaiQayampqyMTx2YVgN+3w+k6zx729y3arF\n",
+       "HI/HjZEG7bBardrCbjabOjg4MNJ5sVhUJpOxkTYO+Nvb26a6ZtfhhkLCIR4CV85er2dNFXKmdrtt\n",
+       "XhLAeoxxJdnRDHoAAw4eBkHqPECVSsXgPKwOUJf4fD69/fbbSiaTyuVyCgaDyuVyqlarevr0qY23\n",
+       "MVAkywUUA0cnRAOocUhT9Xq9JnCQZPxjsklwZmIYhVodOwLYfjTNDHZozpGk3eS6VWXGxMSEwU0r\n",
+       "KyuKRqN6++23TZ6D5RXZfUtLSzo+PjZpVCKRsFIBvJkdlx0JP2HHccwbGUI9tTYNITwRZPZ4MwcC\n",
+       "AStfJNlxzUPT6XTMGGY8FwWh6rjjJiPp6elpFYtFw2slaXl52XZ4HDk9Ho9KpZI8Ho9CoZAJDPr9\n",
+       "vrLZrCEvvV5PqVTKyFIYvUhXPBdsAxYWFhSPx014gKIb43DeIwsd2RrUUUSx0WjUxttvfP/f+F/+\n",
+       "AF4Q0sF5O52Ovve97xk/oVqtql6vW2xupVKxnDpJJj/66KOPbLQ9HA61s7Nj4e0sZBYinGLqWBQg\n",
+       "uHsSwTBO6YSFxy6G0Tm4Mw7/lUrF4seoYfn3QG2gDHt7e1pYWNDZ2Zm+973vqd1uq1gsql6vG76L\n",
+       "KxOG4L1eT6enpyoUCjo5OTECfrFY1OXlpQqFgvL5vH0OfOGgDcByY3JIuUWQD73BOCGrXq8b2gRe\n",
+       "PzMzYxYNN7lu3WImYzqRSJiCGu0cR/3MzIy5djJpc11X5XLZ9Gho+uA1o9rArYd6OBqNand3V5J0\n",
+       "584dDQaDa8LTi4sLpVIpMxAHzuLYnZqa0vz8vMLhsM7OziyBCYNC6n0QBwwXKXvOzs5sdx0OhwqH\n",
+       "w8pms3ZyzM3NmZ3A8fGxDZZGo5EikYipPcbRDyKTSQUYjUba29szwQN8bVCcmZkZw6jZnXE9IrEA\n",
+       "O9xUKqXV1VXNzs5aiTU5OaloNGrigje+//8mf9lxnElJPtd12zd61c/oopnr9Xp68OCBqtWq7t+/\n",
+       "bzslLjqxWEyXl5eanZ21YMdyuaxwOKyZmRm9/fbbSqfThkhAQoejkclkVKvVzAfi3r17NiHDMJAY\n",
+       "Bb/fr9XVVSsLgK/wiFhYWDCr2bm5OZ2cnGh9fd3QFDjAPHShUEgnJyeKRqPKZrPq9Xr2v51Ox+p/\n",
+       "Rt3AdJREBGhyOgSDQSUSCUMTHjx4YIt9bW3NHv4/+2f/rIW4wyQEIqR8I/QSuzJQEHSQ8/Pz9jDg\n",
+       "WgrRaG5uTnfu3NHXvva1N77/f+Cj4DjO33UcJ+g4jk/Sx5JeOo7zX7/xK36GFwgBEnh8g6n1RqOR\n",
+       "qZilT+KJIeBQd1J6IKfCzZOdo1arGaTHRG58V4FiCl+a477dbtvuzBEPEsDi8/v9FolAljZTQ/Dk\n",
+       "hYUFKxcYyrA7MrAA2SgWi/a61WpVPp/PLAWoc3O5nPkw8744vcDZ9/b2VKlUJMkYfvl8Xl6v1+wK\n",
+       "mHoCc2IGSSMNH3p6etpw62KxqE6n86/4X7/J9Wn29Yevd+J/X9I/k7Qi6T+70at+RheKEfi01MJg\n",
+       "n/jHccQWi0Xt7e1Jkg0PsBxgIUBrhAbJkARuNFwHOnsoleTqAcUx5Rt3l2cBIkbF7gt4DGol4gDY\n",
+       "bozF0fV1Oh21220zLWQaiFSp1WpZA0qTjGP+eMoUAyV4JuygXOOGMyhuKEMYlvD5eMjYib1er0Uo\n",
+       "YyDp9/vt9UBJbnJ9msU85TjOtK4W8z9xXXcgyb3xK38GF+UEEncYc6hLJF07chcWFvTOO++YoBTB\n",
+       "6Y//+I9rNBpZ/QzKgS0rZCPootls1hY2+XhQUDkFwK2B1hh5s7jAXAOBgNbW1q69d6xqGV0Ph0Pd\n",
+       "uXPH/D7AwiHL83qhUMgUNYhPMXxkUfr9fq2trRksCcrh9XqNc8J3kc1mFQqF9KUvfcnyUeB2SLKY\n",
+       "NOwP4HlsbGxYc0gpRynDgg+FQjc2Tvw0NfOvSDqQ9FTS1xzHWZF0M0DwM7pwITo/P9fu7q6CwaC+\n",
+       "9a1v6Ud/9Ee1s7NjyVGNRkNf/epXdXBwYP4XENJrtZp+/dd/XV/5yldscre9va0f/dEfVbvd1v7+\n",
+       "vkFZhUJBmUzGDAr39/f13nvvaWZmRrlcTplMRtlsVt/85jd1//59mxYi6sTIGyQgHA7r4uLCnOsR\n",
+       "B1BrDwYDPXjwQJeXl/rggw+sbsachViyr3/963r8+LFFyOHk1Ol0tLu7aycLusO9vT0jy4NaLCws\n",
+       "6Gtf+5ru378vj8ej58+fG2nq8vLSaKfn5+fa29uzkwtvup2dHX3uc59TPp/Xhx9+aDK0arWqRqOh\n",
+       "5eVllctlzc/Pm4XBd77znRvdfweN3Kf+B1cF6NTrHfoH5nIcx/2Zn/kZG3KwG5dKJWUyGZXLZdVq\n",
+       "NcViMcukgxA/btDCIiMBCWdOJEfVatXQEca0kHQocwimxMibB2Z6elqVSsWQEgQBHPdg4EwLOQGk\n",
+       "q2M9Go1a/Y9hYjKZVKlU0vT0tOr1ujKZjHq9niEY+N01Gg2FQiE9ePBAh4eH18oMJpypVEpHR0eG\n",
+       "L+fzeVOQFAoFK6lWV1dt3P7WW2/ZRoHxOY6ed+7cMSLSYDCw8ofvrNVqqVQqaXFx0T7Hr/zKr8h1\n",
+       "XedN1sAfuDM7jrMr6VuSvi7p667rPpf0A7WQucYd8V+9emV2rPV63XYvgs3ZUWGP+Xw+80A+ODiw\n",
+       "iSGEdLBe5PfYaY3Xw5QP7XbbRr0gHgTBY1VFmCZ0yng8rmKxaDUu0iZifsFjoazm83nFYjGLESaI\n",
+       "h6FHvV43K4JWq6WNjY1rhurU4zShWAgMBgOVSiX5fD6DODGkGadvLiwsmAxL+iRQlFE3r4tTKqaU\n",
+       "ELtAiHBXgl56k+vT1MyPJP1vkqKS/gfHcXYdx/nHN3rVz+hijIq4k8YEISocAXYHJFWYgY9GI8vZ\n",
+       "4z+OzfEAHbSAKysrymQyJqWiHq/X6wZZkfREcynJCPxwIaLR6LXBiHT1YFLnX1xcyOv1WkYgTRm2\n",
+       "X9iDjUYjLS4umuZvPMxnOBxeS62lSWPnnZ2dVSKRsHQtvjeaVTwwELvyWYDnAoGATTah2PKQU6/j\n",
+       "N02/QFnI93/T69Ms5ktd7cRDSSNJNUmVG7/yZ3BhLzAcDnV0dGRJpohJga2oXaPRqD766CODnsbh\n",
+       "MgYY0WhUuVzOEIGzszNr8hCwglocHBxY/Ytx9vHxsdnXMinETRRjlXK5bLwLGr1ut2sWr6TGMv0D\n",
+       "m6V8IOmp0+kYf6JWq5kRJA78LCgmjIy5MYDBEBI0A5Eru/yzZ890fHys4XCoFy9eWHxEv9+3GIfh\n",
+       "cGjeHBD+KbNQgY9GI21vbxs2jdsopd2bXp+mAWzrCl/+HyX9Ldd1bxZw/BleGK/0ej198YtfVDqd\n",
+       "NrPDRCKhcrmsVCplqMP5+bneeustxeNx4w0EAgEzCccHIhaLGQ6cyWRULBYVCATMwQdoKx6PG2cB\n",
+       "HgX5eKAY1Lqzs7NaW1vTxcWF1tbWNBgMbHTNZBHXUZw4KWP6/b7eeecdY7/hjg93RJLtntTY4Mjv\n",
+       "vPOOcrmcPVxM6nhwcB0KBoP64he/aMw7ThnU5YyyOVUQKqyvr5sPx9nZmXw+n+7evau9vT3t7e2Z\n",
+       "sQxmOB9++KGSyaQZM97k+jSL+T+W9Cck/VVJf8VxnG9K+prrur95o1f+DC52AVhaHo9H29vbevjw\n",
+       "oQ4PD804EecfLGPxlaD5+fa3v6379++bEvni4sJYbNSweE8sLS3p8PDQUAqiziqVih2fqL7JBOSI\n",
+       "HzdEJ9i9XC7b0AfYamtry6y0Tk9Plclk9OTJEz18+NBGy/Pz82YVViqVDFk4OTkxK7BaraYPP/zQ\n",
+       "6npOiidPnpiqptVqKRqN6uLiQltbW8pkMrZj+3w+1Wo183JmkYPIzM3NWaJsvV7X+vq6Tk5O9J3v\n",
+       "fMdOQp/PZ8kAT58+NTKV3+/XN7/5zRvd/z+wzHBd9/9yXfe/kvSfS/qnkv6ipF+/0at+hheiUtTH\n",
+       "1MnjvGK4vpOTkzo6OtLKyorhqhiTo22DTI4ok4YxHo8rlUoZooEyHK5yIBCwwcm4uxBORpB3OKaZ\n",
+       "usE9Hsdfl5aWzNwGhh0DDZyO9vf3bVERKeHxeBSNRlUsFrW1tWU7+Gg00t27dw01YfHCN4GHDKtu\n",
+       "cnJSzWZT5XLZegmGIoyyQXJwB6WcgArK73U6HS0sLMjn85lglzKQz/6m16cZZ//D14jG/yzJq6vp\n",
+       "382r9c/g4sgl9heerHQ1HSRllAUZDAaVSqVUrVaNcQeRCPYd0zJ4zBi8zMzM2ESLnGomXTDHfD6f\n",
+       "TQ/hNAyHQ6sTEZdyJCMuwNqAEwYUArZaoVCwYUa9XjfOBGQfTF3QREYiES0uLlrje3Z2ZoFAYM7I\n",
+       "tcbVNxi57Ozs6Pz83EoLOB7AcODYPJCSzLXUcRzjsbCYSQmo1Wq20OFS3+T6NA3g35R013XdP+W6\n",
+       "7n/ruu5vu657M4vzz+jCOBvZOugExB8QhnF8GPyWKx6Pq1armamh1+u1rEBonFjLer1eSZ/kcdD0\n",
+       "kd1H6hTlCfo6iOh4zGH7Ct+5Xq8rlUrZLogNeCw8AAAgAElEQVS5TaFQsIeLnzU1NWVc4ZWVFUMr\n",
+       "BoOB9vf3Va/XDa5rNBqqVqu2YDm9xmN/m82mUVkrlYq63e61NNZ2u61wOGyeIixqWICIdxEUtFot\n",
+       "RSIRHRwcWMwbRoyQp0Ch/jC4GU8k/Revd+h/6DjOT78eb//AXbj/xGIxrays2JGJ6iQYDJpDKI0S\n",
+       "quDJyUlDHfr9vqmf2aV8Pp8SiYQNBxhPM+iQdC2x9OTkRPF43LDUcZsuJPlo/DY2NqwEikQi8vl8\n",
+       "Bl2xW+EC5LquEomE8aXBx1FTp9Npe9+xWEzxeFzLy8tmWEieYbFYNH4IavN0Om1xE6hdKJUQBnM6\n",
+       "0AeMfw/EOMzMzFzLZuH98LNQ80hXQoNIJKJoNGo9xJten2Yx/6+SviDpf5H0y5Leff17P3AX7kPk\n",
+       "0bXbbQP6ic+VrjpwZEhYZ1Fro4JGesWuAZkebjFMNgYFXq9X6XRax8fHlgHCkYq6mlNhdnZWzWbT\n",
+       "rAsQw+7t7dnRz3iYnZNAGwYelC88hOScYJlFP3B6eqqdnR3LYGG3h7M8PoxBqAuNFvErpQTWuuPJ\n",
+       "t/Qgkoznzfssl8vGL5+cnLR8FcdxdPfuXbNQQKp1Uw3gp0Ezvui67ttjv/4tx3Ge3uhVP6OrUqlo\n",
+       "aWnJGirAe3a6cDgsx3GUTqfl9XoNVqLOJKB9fHDgOI4x5SAiDYdDa5jgCxOnOx7T1uv1bHQNzg3f\n",
+       "eH193RAM1BycBrwnhKEQjIimIIAH5GZ6elp+v99chOr1ugKBgLLZrNndQpLHUZ/xOoMTSQb1jYt1\n",
+       "edCAPXFA9Xg8RtACu6fkoQEOhUIKBAKWq0hdf3FxYRtENps12PMPIzrt0nGcDX7hOM66rgYpP3AX\n",
+       "ZQCIAmJTal+YYi9fvjQzFnbRO3fuWLAPI1oSq5rNpvkeM1XMZrOW/wFhKBAIKJlM2kMC/4HxNgrx\n",
+       "drutSqVi1gZEl0FgBzWZnZ01jw+EuugNB4OB4cHEU6TTadPvMQGdmZnRs2fPbKfl4VpZWZF09eCM\n",
+       "/yyEuKhXsBNj55ZkJwzNLLwP6nl415RomUzGQj1pSAOBgNbX1+0hpQe4yfVpduafkfQvHMchcn5F\n",
+       "0l+60at+Rheq54uLC3OPJ7Ac21oUI+wU7Lj4nVFzM3LN5/NaX1/X1taWwuGwxYvRCBKbAA/5448/\n",
+       "Nlck5PjIryDHY57SbDYNvcDUhZvKTk4TxaQRe15JFqg5MTFhYlRscSmTpqamtL6+btO8cVsu6RMn\n",
+       "I4Sl+HtwCgQCAfOci8ViVgZRe/PvhsOhjfvhdvNaW1tblnTL7g0ve2dnRxsbG4rH4/awvOn1aXDm\n",
+       "35J0V9Jfl/TTukI2/sWNXvUzuiYnJ6+hApj/4Q/XaDR0eXlpTQ7qj8vLSxu99vt9G4hgmHh4eGg8\n",
+       "CZQS2OWizmaXx9YL8vp4/BmiAUSkXPh2jCtY4AqzKAnrxC631WrZwwvUxt+p1+v2HyNu+MNgv6Aa\n",
+       "/X7fKAAEc5IQUKlUjIBUq9VUKBSuiXJLpZKdeM1m00oHIMVAIHAtPZamm/4EvB5K6f7+vm5y/b47\n",
+       "s+M4/6GuSPiOrpPxN14/tf/oRq/8GVyMbi8vL/X2229rMBhoaWlJ6XTamg4sVfv9vvlM4I8GC45S\n",
+       "AcL+j/3Yj1lgfDqdVr1eVzabtag1auHxvDvc5hOJhGHR8/PzCoVC8vl8SqVSFmsMlLW2tmYnCF55\n",
+       "aBaZHHLEJ5NJcwKC7ENdOzMzo0wmo+fPn19LsyI/vFqtanFx0Wr+xcVFtdttK03gayCMDQQCWllZ\n",
+       "sbgLSfY6w+HQ6JvLy8tmfr6+vm7aRYYuWOAyGWX8jqrmvffe07Nnz974/v/rduZ/5/V/f1nS/y7p\n",
+       "P3n93996/Xs/cBcWtgxP4Bng3bazs2OcW3ZNwPpWq2WTMxYNu+7e3p5BZVNTU3aT+/2+0TPZlWh6\n",
+       "gsGgDWcQjmLYQtcOGoHhIMYzoVDIOMWXl5fK5/Mm8R9n5925c8e8PUAqGHS0220tLy/bEGM8FBPb\n",
+       "LgzE9/b2bGGP8619Pp+9Byx8A4GAPB6PRTLDzgsEAsrlcpqenlYmkzGjSGifEPs9Ho9xQSYmJpTP\n",
+       "5027+G/Krf+91++7M7uu+xclyXGc39CVDrD0+tcLkv72jV71M7oqlYqSyaSVECAHBM5IMoYZpoGN\n",
+       "RkPJZFJTU1MWAww3gsECJQPBkAxUUGBLMltclB08WIhsqSNBRHAqwv4ABAYIDkhQuoISG42GxRrD\n",
+       "UvP5fMbNQLoPOQrMnCnhxsaGBcjTsJHPAmIBUT+fz5utLn8nGo2arx0Kc3ysa7WaFhYWrLkDosNg\n",
+       "cRzqk2RUgVQqZRAm9fdNrk/TPmYljbOmK5JuZqT7GV3pdNo4tHAvaM4wXEHrRo3YbDZtosZuDjYK\n",
+       "LopLT7fb1dHRkR318HQhL6FgwQgR/gVKC8hAKEDOz8+ttse8cGJiwh5EYKyjoyPzNwZflmTlC2Y1\n",
+       "+NAhTn358qU1i1A7cWaCv8L7Pzs7U6lUMtHtzMyMeULPz89re3vbhiRwj4EnM5mMhYLCN0H4wJS1\n",
+       "3W7r7OzMMPbl5WUTQdB//GHEDf+mpP/HcZy/6DjOX9IV2eg3bvSqn9G1tbWlUqlkit+TkxO1Wi3b\n",
+       "mVkMNHzJZNImhChRMPIGXmMkXSwWDYIjehi1Bg2jx+PR0dGR7V5wfyHTHBwcSJLBhghG2ZHZDaem\n",
+       "psz1B1jP7/erUCiYRhAsmiYN3gcpAJJs4uf3+20xEXLfbDYlSYeHh2q329rb29PKyoo1qgx3Wq2W\n",
+       "jo+PTUlDY0mJAg+EOpgGkvg2fPFg1x0eHqrT6dgw5vj42B4O3tObXn+gBvC15u8/kPRjumoEv+a6\n",
+       "7v95oxd1nJ+T9J/qiuz/sa6gPp+kvydpWVcC2v/Idd3W2N//y7oSCPx113X/+ff5me5P/dRPaXFx\n",
+       "UdKVjGhpaUnFYtG8NAqFgk3QyDM5Pj7W5z73OYPNyOWDJUZHjmvl7OysDg8PLUN73HCRJoudmweF\n",
+       "cTYTQyiV9+/fN3YctExSVsdRF4hHNEosHCaDKEn4XNT7eGdsb28bHPn5z3/eRKXhcNhKs1qtplQq\n",
+       "pVKpZMGY7My8TjAYVKVSUTabVbFYNEUP/iHkpXi9Xm1vb9s0dWVlRYeHh6pUKuaSBEvu29/+tjKZ\n",
+       "jJUnP//zP//ZaQDdq9X+j17/d+Prtbr7r0h64Lpuz3Gcvyfpz+tKnvUbruv+947j/DeSflbSzzqO\n",
+       "81DSn5P0UFJa0m86jnPXdd3R7/3ZKC9Go5F1/+VyWaurq5Z5DUqB3xrEpMvLS+VyOUUiEf3u/8/e\n",
+       "m8RGmqZ3fv+PjGBEkIx9j+C+JXOrrF4wXdOC1GrBGvukMWADvowxNnTTwePlYBuQL9bFbWAGXgAL\n",
+       "sC0bmsPIkAFjoIPGGEEQ0FZ3Cb1VZlYWkzuDZARj3xgRDC5Bhg/M31PBbnWjkVRrWkR9QKOyM8lY\n",
+       "3+99n+f//JePP9bS0pLVcqiwHccxmyp2H+rbwWCgZrOpRqOhubk5G5H7fD4dHBxYfYi5zNjYmB3N\n",
+       "uF/m83kbN4fDYR0fH1vO9M3NjRYWFuwxX716pUePHlnjR24htSo3yPBd9C/ko52dHXPhZ1S+ublp\n",
+       "ZU2j0dDR0ZE++ugjvXz5Ul/+8pe1tbVlnyelA3rG6elp5XI5czwdHx9XqVTS4eGh0um0Go2GhRJh\n",
+       "w9VqtXR6emqNbTgc1tnZmba3t++1tn4eCui/5zjOjuM4p47jdN797z72XKe6lWFNOo7j0i2t9ETS\n",
+       "b+nzxvIPdevTIUn/UNIfDYfDq+FwmJO0K+nv/XUP7HK5jPZIbDDcDGLIOGqpH6mXwYcZYsBpLpfL\n",
+       "SiQSVh9PTEwYMZ1mklDLVCplahV4FRMTE0aCZyxMnAM7MDVwKBSy0qNQKFhkGfRNOn7KF6xoQTiw\n",
+       "2GKCyEjZ5XIZ6WpsbEyRSOTOsIgbiM8PEev5+blOTk4sBQB7LSRQpGxNTEwok8nccUh1uVxGBZie\n",
+       "njbSEoY1jOHpI3Bqvc/189TM/72k3xoOh4HhcOh/97/A+z7hcDhsSPqnko50u4hbw+HwzyQlh8Mh\n",
+       "2sKyJDQ0GUn5kYfI63aH/olrNOcDDgURDNAWs9ms4akgGEBdmAjSFLlcLqVSKUkyGumTJ09MHvWl\n",
+       "L31Jbrfb5D4TExNaXFw0LgW85S996Us2QUOuhA4wHA6b7xq+yVBDgel4rPPzc83NzRm5nXKC/Oxo\n",
+       "NKqlpSX9+q//ur1u3lsymTS23agIIBQKaWFhQZFIRPPz88pms0qn05Zqtbi4qLGxMeMuM63Euovm\n",
+       "lR6AhCmMeCBiBYNBra2tWVTx9PS0VlZWbCIai8XMVuF9r59nnF0aDodv7/UsI9c7bsd/qtuxeFvS\n",
+       "/+04zj8a/ZnhcDh0HOdnFfN/7b/lcjlTSxQKBfN7WF9fty9iZ2fHdqdarabd3V3F43Gr/1qtlmGt\n",
+       "b968MQ4HOxKPe3x8bMcukzTsvJDdY9n68ccfy+VyWWOaSqXMhRT8eGtryxTR7LY4ikYiEeNGcDoc\n",
+       "Hx/bgiyXyyb/L5fL+t73vqdkMqnNzU3Nzc1ZPTwYDAwVKRQKev78ufnModxmihmNRvXZZ5/ZuH58\n",
+       "fNy0j3x209PT8vv9JqAFyoTfjRNSt9vVycmJ5ufnbWqaz+dt6MK4Hqu0971+nsX8g3d17b+UBOF0\n",
+       "eI8J4FclfXc4HNYlyXGc/0fS35dUchwnNRwOS++w7Mq7ny/oFh7kmnn3dz9xofxgQvb06VNjZlUq\n",
+       "FW1sbGhubk67u7uamZmRx+OxwUM8Hre86lKppLW1NZuKERLZaDTk8Xg0HA5NfjQ7O6vvf//7Jvkn\n",
+       "ShgvuIWFBXMdlW5LoVwup7OzM62urhrSMjs7a+UDsv1RAWu/31cikbBdkpNkcXFR+/v7qlQq1mgW\n",
+       "i0V5vV6trKyY9g8vurW1Nb169UorKytyu93mMkRcA7+bTCZNCbO0tKS3b99aRgkuS61WS9lsVvv7\n",
+       "+xbtgHQsFApZNjgDmePjY62urlqkxfj4uF6/fm0sPKaH73v9PIs5KKkv6R/82N+/72LelPTfOI7j\n",
+       "k3Qu6d+S9D1JPUn/WNK33v0Xb44/kfQvHMf5Z7otL1bf/fxPXN/85jdVqVRssibdTr4gvTNOfv78\n",
+       "ucbGxjQ3N6fx8XEtLS0ZZIdwtFgsanZ2VuFwWLVazSRBREokk0ldXFyo1+uZZEi6zc6bn5+3o31p\n",
+       "aUlbW1tG7mGxj3oSb25umrr7+PjYXgNum9fX10qn0+p0OjZZzGaz8vl8lgkeDoeVTqdVq9UUDAbt\n",
+       "5kHVQZkC9ZObEY860rdG6304KOPj44pEIub8hA6RCR/E+kQiYQQsBLn9ft/MbpaWlgz5YKDzG7/x\n",
+       "G4Z6FItF7e7uvuey+vnQjP/ovR/9r3+8V47j/HNJP9AtNPcj3ZrM+CX9seM4v6130Ny7n99wHOeP\n",
+       "JW3olnr6O8OfgifCkzg/P9dnn32mpaUli0YoFApGsCkWi1pdXVWtVjPZDyoJLGWR+kMgf/TokarV\n",
+       "qrHJ6vW6Go2GFhYWbHgBCgC2HAqFtLe3Z1wIkJFoNGrxZYRnJhIJ84vrdruqVCpG1ul0OkbKAZMG\n",
+       "GpuamtLV1ZWZpXMiEOaDQIGPDGbg69evtbCwYDZZnU5Hy8vLpo2kOYQNSAkCBAjllWFNr9ez0mHU\n",
+       "GHE4HFqjTAxFoVCwcpBGEGLTfa6fijM7jvNfDofDbzmO8z//Nf88HA6H/8m9nvlv+AJnBiP2er2W\n",
+       "M724uGiTMHgYNFe7u7taXV01dtz4+LiZeUPHJGsalKHf71tWNf4a7HLdbtf4C+Pj40okEvrkk09M\n",
+       "ZzgYDBQIBNRqtcx48OTkxAY1IC9MJnl+wi7x+MCTAt0j+LXf71etVtPS0pLevHljzyXdNnvo8SKR\n",
+       "iN24PCZEIfwvXr58aQ79KFJIaJU+l2gRdcHOO+qPnc/n5ff7zY6XIdH8/Lyazab6/f6dsfy3vvWt\n",
+       "XwjOPOE4zt/TrfvnqDjrx1l0vzTXqJXs7u6unjx5YpyHUQiKpNLPPvvMfM7wt7i8vFSpVDLvB4g0\n",
+       "WFQh82+1Wmo0GsZ9htBEehOlB8c1Uy/Ce0YFnEBz4L+gKUi/kGmxWDFDhKtNc8hxTm1PY1YqlfT8\n",
+       "+XNb9ChlENd++umnhgCBR/PYKEd4L5wWkPIjkYhlfR8cHFicBWJiXg8TRDgsjUbD6LDlclkLCwt6\n",
+       "9erVvb7/nwXNhST9D7qF5n5b0rqkhqQ/GQ6Hv5REo0wmYx/awcGBJicnDXW4ublRsVg0eiVj3lar\n",
+       "pcnJSe3u7hrMVqlUDIdmQEHADf5wNGBgw71eT3t7e5qYmNDCwoLtlBzPfr/fund+FhSEkyCdTtuw\n",
+       "BUoqo3HqWBYVvGS4DdTNOPOz642KTIlo43E4lSEpMarHibRSqdhkkzKt1+tpeXnZNo5R1iEsRZpO\n",
+       "r9drPUw+n1coFDL/Ed4PAgBU6fe5fhZr7r+QJMdxPLpFIP6+bsfO/6vjOK3hcPj4Xs/8C7iYTEnS\n",
+       "8+fPTaqDKSByonQ6LbfbrbW1Ndt9+QI9Ho+Wl5cVDofNRIVRMCJNiEeYwFCzosQIBAKanJw0S61k\n",
+       "Mqmbm9tMasJpGCLAFgMhyb0LWE8kEtbI4teBWhtFNqcJCbHhcNgcO5H8w1uGjI8OL5lMGj6MBRe5\n",
+       "K4RkRiIRM3ZJJpNGKGLSyEmDGh2qK6mrCGD5/V6vp3A4rEgkYoQpPluv12ul0fteP8/QxCcpoFtU\n",
+       "I6jbQcdfvfcz/gKvWq2meDyuubk5Cz5H4FkqlWyQcnR0pMFgoJ2dnTuY7tXVlXK5nOr1utrttkWt\n",
+       "sdt7vV4tLCzYIj4+PjbqJCJO+M21Wk2rq6uWk3Jzc6NEImEyI8bUOCZVq1XDji8vL7WxsaFOp6NW\n",
+       "q2U85e3tbeN91Go1GxPDad7Y2DAiEXYB3W5XzWZTa2trCgaDljEofX7zAzeCSlD2sPtyopXLZQUC\n",
+       "Ae3s7Ghzc9MQknK5bAxBmsV2u23BO5CgGOHzmmn8rq6urOm9z/VTF7PjOP+b4zjfkfR/6XZX/q6k\n",
+       "f384HH5lOBz+UmoA2WVoKtxut03cqDX7/b55JqMSBttF8AlzjMYHeKzf7yuXy+nw8FC1Ws1G4YTG\n",
+       "A0cR9jg6OIDX6/P5VC6XzSIWJTclAg0kMi2GJBMTE0qn08ZTZmDB1HM4HNp4fNSTg7H24eGhMfLY\n",
+       "RUebZEnG/Gu320ZpdRzHxvzU1NlsVrOzs8YhwSxytE5OJBKKRCImmIWGC8JBw7e9vW0MPV7z+14/\n",
+       "a2eek+TRLZe58O5/rXs92y/4wuSFLBCQAZqw5eVlI9l7vV5lMhn7kihJ4GjgjE+WXigUUjqd1uzs\n",
+       "rLLZrJaWlhSNRrWysmK7NlwEMNt4PG4LJpPJmEUV6auEBkm3w5R4PG64ONFk3CTRaNRG0tFo1B6X\n",
+       "GxBVOgKC09NThUIhPXr0SIlEQrOzs5qfn5fL5dLc3Jwx/iRpbm5OLpdLyWRSw+FQS0tLCgaDhstP\n",
+       "T0+bMfmoAQ7vB6V1PB43/JhyLxAI2Ag8lUopm80qmUyaEfvz58/tPT169Ohe3/9PXczD4fDf1i2h\n",
+       "55/qFr34z3U7DfzXjuP8t/d61l/QRb1YLBZNY0bjBYeZY5whCObheBS7XC5ls1mbCgK9wa6jbqX5\n",
+       "AT0hMpcsPhZ0rVYzr2VixChtwMXj8bg1Z+yYNFCRSOQOBHZ2dqZKpWJcYgYbyWTSbmZ8j+fn5++M\n",
+       "4tvttlZWVrS3t6dYLGbeHmNjY0qlUmbs2Gw2dXZ2ZkptjBLx3lhcXDRCFwY3YOGxWEzJZFIfffSR\n",
+       "NaDT09Om/mF0DbUWES285/tcP7NmHg6HN8Ph8FPdRqb9K0nfkbQi6Z/c61l/QRfmgzgYjTptgr/i\n",
+       "gxwKhVSpVOTxeIxUhMHf0dGR7XZut9sk85i54HHBVIzamYw+mjUYbJDPCcUh4oEjHstcsGp2O+kW\n",
+       "thsOh4Y7M1kEm2Z3Zrrn8/lMNABEdnx8bKbn2IXBCgRaq9VqdxIAaArZ4aGT3tzcKJfLqdvtqlar\n",
+       "3Qm+9/l8qlQqFi40ekJKsiEP1rahUMgyGPHiu8/1s4Ym/0TS13VbLw90WzN/591/3wyHw+t7PfPf\n",
+       "8OU4zvB3f/d3Jck6bgg6a2trKhaLajQaSiQShhsDSZEehUlLpVKxjh5KJvIpbABYlNiBwaOQZISl\n",
+       "0dfAVA0tHPwR8kngXFSrVYXDYfOP5nkbjYbVmYhgM5mMOfoXCgW1Wi09evRI+/v7pphGroWiA4Ep\n",
+       "3GPgNho1vEWy2axOTk6sJAD3pi8YDoeWYMXJNhgMzBQnn8+bwypZM+Pj44rH42a2zgQWJKjZbOoP\n",
+       "/uAP3nto8rN25gVJfyzpo+FwuDQcDv/RcDj8/eFw+OqXbSFzEUbJUILdh66eQQVfHmppHI9goQE9\n",
+       "TU9PGw+ahYpjEMoIVCxwFQjj8Xq9VkdTEuB3h4UXsiK0e2dnZ5bJR1Ks4zjmnwH1E1kTY2iIQpeX\n",
+       "l8rn80omk8rn88pms3bz0oQ+evTIfC8g5yOoxZXT7/fbScWgZHd3VxcXF2anywib3bxYLFrDCNmI\n",
+       "RhAJGjpHCP6SLIUqEAjcO9fkZ+HM/9m9HvnfwIV9QDAY1Pb2tpaXlw3yoXYEevL7/frOd76jQCBw\n",
+       "R/rU6/XMOqtQKCibzaparRp6UK/XbUzLrsNjIkCdmppSu922mIfd3V1zHALFoKtn6oiAFJcl0AG4\n",
+       "FX6/X2/evNHMzIzGxsaMbhqJRCxjm9ff6XQsiYrBDM1as9lUPB43Fh2vidH5qO7QcRydnJwYtk0Z\n",
+       "QKj8ixcv7AaAmM+Usdlsmjzq5uZWFFQuly1KAl7G6Ibx8ccf3+v7v5+51y/ZValUzHkSGIvFygJi\n",
+       "8oTJYqvVsgyRVqtlzu9+v98GLEiVLi8vNT8/b7s5gerj4+NGREdAyyABf7jz83OrVfv9voXY0PAR\n",
+       "nM4OGYvF7OZgcSwuLtouGo/Hjdgvfe5NDQLCe4WHUq1WzdUU3BvzSEjxbrdbU1NT5pkMwZ7auV6v\n",
+       "G087EAioWCwaJEgsHQ01Wkr8RdrttiFDICOodkCR4vH4vb7/B7WYGWrwBeKNAUoRDAZtiII0KhKJ\n",
+       "2OiZ0gAiO7Xf6uqqHcmkOEky0jrWAktLS1pdXZXP51O327XYNaIggKvw0UilUjbKlaSPPvpI0q16\n",
+       "u1qt2kJPJBIKh8P2vjAoxB2IiRuUUrgVwI08FnpFOBJAl1A7MU+PRqN287NrEiUMi47YtouLC+Mx\n",
+       "Yx+AYSRIkM/ns4g2DGUkqVqtmicgNNP7XD8Pn/nvzOVy3b4dMGTQiNEMQBQShNaQucHuGI1G7e+g\n",
+       "QJIDPYowsIMD9ENOpxkCQ/Z6vXZEA235/X6Vy2VrwIgtY9diRI5gdHp62hpWxs7Y0DIahzQfCoXs\n",
+       "JsBay+12G6cEngpeHzwHI3IQEPB27L+on8lKoS/gfbbbbaVSKRu4MKhCUIu2Ei1lMBg0qBI73l/k\n",
+       "0OTv3NVqte7IkDAXqVQqyufzev36tQaDgWX9vXz5Un/1V39lmXws/N3dXZXLZaudX79+bRAY0iE4\n",
+       "E9TR5+fn2tnZ0e7urprNpvb399XpdFStVnVzc2P4d7fbNfMZIK1YLKZwOKyNjQ1ThdBYVioVFQoF\n",
+       "5fN59Xo9bW5uyu12a39/X2NjY6rX6+p0OiZurVQqOjk50Q9/+EPbpev1uk09p6amrPzAQvbt27cK\n",
+       "BALK5XI6OjpSv9+3XgP0BK4F8GK73TbokRiLra0tG1BtbGwol8uZVAxCErs4sF6hULBk2vvwMqT3\n",
+       "yM7+Zb0cxxn+3u/9nunbqJkxALy4uNDW1pbxIzAM/NM//VN97Wtf09jYmH3YLFhJtnNWKhXDRePx\n",
+       "uHEWRt192E2heUI6qlQqluTabrdtWMJJ4nK5LMYNhAXkhb9DdQJ/YjRagjLl7OxMkUhEx8fHCoVC\n",
+       "BschUCB+AkRh9GdHA3jOz8/tZMtkMuYw6vV6LeD+7OzMpGTU5PBPCOKhyQUxgYQEzRWjnEAgIL/f\n",
+       "r2q1qt///d//xflm/F26cPicmpqyTD8avOPjY7OMAq/99NNPDVe9vLw0NcTBwYGWl5dVr9eVTqf1\n",
+       "8uVLra6u2uLr9/t3jAxZxEQa4GcB3MZjs/BGx8+QdVwulw4ODowEFIlEdHJyYtq4wWBgiygUCunl\n",
+       "y5d6/vy5TecKhYI5arKT4pMnyX4OxAOe8c3Njfb3902Eix9dMBi08Et8LqiDCXDHnBGuCqXKYDDQ\n",
+       "ycmJKX1Aas7OzpRIJCy5C2FCJpNRr9f7W8nO/jtz8aHlcjnzktvf37fdFd+K0S5fktrttkFz+NDB\n",
+       "FQbG83g8llHSarWMgYaTPVM3BiPNZlO5XO4n/Juhh4IKjPows6DAxbGDRYlBWHq9XrcEKI5xXI4g\n",
+       "5CNIiEQidgMx2CmXy9ZEMjUcdS6itLi8vNTx8bH5fmAkjsv+xMSECR7A5cHkGdlDfuK9jGL9pNNS\n",
+       "htEgv+/1oBaz4ziKxWIKhULmx0beCMaBlUrFuMv7+/t2BLKIgdP4kjD0ZjyO9wUjc9hl5+fn5i3B\n",
+       "okokEgoEAkbqqdVq5l7E+FeSwWQoVjA8x/YA7JqAd8J4jo6OzLgF1Qp4LyXI4eGhmayw4xLJzO/i\n",
+       "1olub2JiwmzM0Eey8CVZ2hQwIacbeDNeGpJsuMLv0l/wM+DoY2NjFr/xvteDKjMwDqTDR9WMixAJ\n",
+       "rNRymUxGW1tbFgnGbkd3z+5IPU396Ti3gY1kdZDD3W63tbi4aKoLhgjxeNz8NKampoxJhqv+KCqC\n",
+       "GpqGk8V7cXGhx48f224Ils5NR61+c3Njiauzs7MaHx83OzAMcaampqzOdpzbwCJUNKA8+HtA6A8E\n",
+       "AsbTgJSF3wg8bkb9lHFYCMCak2RICfwYoobdbrc++OAD7e3tvff3/6B25m63q2w2a6QgiPEQhCDn\n",
+       "YCkAdHdxcaHDw0OVSiV5PB7jXjDoIISKR+8AACAASURBVP4MTjM83WKxqHg8bgw4MlDOzs4MNaEe\n",
+       "R+jZ6/VMj8jOzpGPgBZyETsXdq8MVsCD4WrTTMLfoBmVZFixJGvKGMOP2uzS3NbrdWPS8Z7ITpme\n",
+       "njb/PTBqSVbSMaaH4wFhqdVqmYsq5Z7P5zOZFOStL8qMkQtesqQ76gnqzmg0qomJCcsBXF1dNS+1\n",
+       "6elpJRIJC8K5vLy04QnHsvS5TRfcA74gvjh83Xg8yheOXXbhmZkZW3A42UsyMg87NeR8Hj8QuHVG\n",
+       "A5uG1zExMXGHtA8HWZJhu6N4s8/n09jYmDmcIh4AW8YmbGpqyvgko6QseoFRiy5eN+bnLGzouPBH\n",
+       "oOYyykZPyXt73+tBlRnsvnTy6OP6/b4qlYq5AlFXAkuRyoQqm7iw4+Njm/q9ePHCMF2CfcbGbvOv\n",
+       "EWtisj18F8jT7/dtkII97tXVlUqlkgk54UozDmbYwOugKS2VSgaP4VYPN5vHHz1p4GzAtwYfxg+D\n",
+       "G7dareri4sJKoWq1ar54GKbDMoSmCd/i6upKy8vLVtdTruEtIt3WyAgm8NVg5wa3Z7O5b0Lrg1rM\n",
+       "REAMh0ODjZjizc/P3wl1hCcBofzi4kKJREKtVktPnjzRcDjU8+fPLf200+loZmbGcFrG4D6fT5lM\n",
+       "xrzqUGfQSN3c3NjfoY1Doziac+LxePT8+XPb5bG+GhWN8hiEAXFK4Gz6+PFjIz2hOEEGxY4JxxiF\n",
+       "us/nMwOaZDJpzR67p+M4lvVCZMTc3JyOjo4kyf4dLw2QDqaY4PBsJLiR4tfBOB2F+32uB7WYGdHi\n",
+       "kwZyQB2J4oEsu/HxcbVaLQvcoaMvlUpaWFhQLpfT6uqqNZJwEuAeU1unUimzJpiZmdFwODToDW4G\n",
+       "amnCeMBrnzx5YqNoSpRKpaLT01MrCdjlILd7PB6zvUKLSKaI1+tVNpu18HlQkrm5OdtV8QJBTT4q\n",
+       "G+t2u0ZWwo4WYpXH49HJyYn5P6P8pnTjJmKHHk3sOj091ezsrDXf8XhcyWTSft/v92t+fv5e3/+D\n",
+       "WswsRkoKGkCO7r29PQWDQcvpYLxMXBkLhzHwcDg0SijNFpIi+BEYkDMqrlarymQydtOcnp5qYmLC\n",
+       "jv/BYKDPPvtMlUpFL168MCwarR/BPYPBQIPBQLFYTNItfZJBEGSfXC5n/nS9Xs/QmL29vTuBOvv7\n",
+       "+3ajP3nyREdHRzaAAY6E3DQaJwwBSLplJIKJQwjK5XL68MMPJX1uYIOdLS5KoCsTExPa2tqydNZY\n",
+       "LKZGo6Ef/ehHZjqJJvF9rwe1mMmbGyX7QGMsFArK5XJaW1uzWtfn8ykYDCoQCMjtduvw8NByTWKx\n",
+       "mMFVHMfUjrFYzLjGsMEwPMGxBzk9rv3AVaMDE5qh09PTO0lVNzc3NohggY1mmHASQBoiVIgb8vLy\n",
+       "0gKHer2evfbz83PVajUdHh5auYJHteM4ymQyhuJcXl5qf3/fBKiSzBgdewVJhrbwWJIsT4aYCXBw\n",
+       "pomoY3BspZHd2dm51/f/oBbz6ELhw4Mi6TiO1tfXNTExYTarkmzyhZ0tHhhMrEb/zAKg6WJqiIEL\n",
+       "0iEMAqlnKW9gkIEQAI9NTk6aJhGnUeiczWbTuvxIJGI3KgMNYDEwW3IIkTWBwlAmcAOfn98G33c6\n",
+       "HWMFYkHG0CUcDttJRjkGrXX0M85kMtYUYn2AS+nq6qrt5rVazWBNpqyYmTuOY+jO+14PCppjJAxm\n",
+       "C20T/zSO+1ETbI5YSQbr8Wev12uKbBo2GhvHcayxAi1gB0LVwgBhcnLSOviJiQmLUJBkkBvDklFH\n",
+       "/H6/L5/PZ+JPSUZlpaRiwEJwvKQ7aAPvgd0VTw/qfbfbbYoPsHnKLRJUr6+vze0UygCpBIPBQGdn\n",
+       "ZwqHwzalhGtCE4qtAhg/426c9omJ/oICOnLxoVEvI2eHW4FEiWOQJgb/NTjJTLX6/b5WV1fV7Xa1\n",
+       "trZmeCtDjmAwaJgqTdT6+rpCoZBRLuGF0LihsoCGylhcuvWSxu0IKiX/hQ8ChXJ098TrGZ9l6mq8\n",
+       "3y4vL3V+fm7Oo5Qo4+Pj6nQ6hgFLMiiRhQ1ezedIT4G1wKjF7+j3wJCJngEP5ng8rlgsplgsJo/H\n",
+       "Y70HZdl9rgdVZhCySGoS7kIEk6OrQ2R6eHgot9ttYZI0RPgbT01NGTZ6eHgoj8cjt9utdrttnA+g\n",
+       "MwwLRxXKjuMolUppY2PDBjBYX7Fj9/t97e/vm4s+ITg0sZJs4kYNS5lDg4krPZFmmIaXy2U7ibAE\n",
+       "AEtvNBp2etTrdbMc4LGhBuCST+4hqATTQ6Z2GDFiGt5uty3YCPsBamZQF+ijpVLJLM/ucz2onZnj\n",
+       "rdvtan193dha0WhUCwsLWltb09TUlHFsV1ZWbBeenJy0RmZ9fd3M/YbDoRYWFu5k/VE3D9+F6ExP\n",
+       "T0uSVldXTcWCNo4Gje4eeA1oLxgMWuO1uLhoFrWQbyDuEy4Pg25packwXpyULi4uFAqFLKhnlFdB\n",
+       "hDDsPU4hJqHkJuKkBI4NtDccDu35KRm4YTGZfPz4sZVnDH5GTyW86+hbeJxnz55ZuXSf60EtZmxb\n",
+       "SQrt9XpGnSwWi1bLxmIxa85w1el0Ospms3K73crlckbpjEQiKhQKCgaDVs+Gw2EbWNBUeTwe7e3t\n",
+       "aTAYGNsO931JBj/RvSN+pY7HVjcSiZgWkVKlUCjo7OxMyWTSKJvdbtc8KFCMsJMz3ZyYmDAUZGFh\n",
+       "wU4bHJTghuBexO8ydBpl5Pl8Phv983553Zwo1WrVZFr4hoCfS7JaHU75zMyMRWlwo9znelCLmZ14\n",
+       "YmLCdlF4COwMqJYh01A/0jxJMvokO/2ojzG7lCQbFHQ6HcuF9ng8hlxAeJJkzDuaQOwNwLTJ2qOz\n",
+       "73Q6xoHgtTLoobbE3oCamBD4ycnJn9AW0nhR89P8IXTldWBiKMmEstxUNHCjo3HKJur40cdFLCDJ\n",
+       "eM3Ak5FIxE440KXRBvx9rge3mOEpIObsdru2m7FzMkTY29tTsVg0g29yQlhQ4LpbW1uqVCpGm8TO\n",
+       "CpkSzvMXFxfa2NhQr9dTMBjU/v6+8Q1ubm7MZ+OHP/yh9vb2TAFDHU6DiqqZBZhOp9Vut1Wr1ezG\n",
+       "2d7eVqlUMjiS0oQ0V5z/qcn39vbM6pa6F7SlXq+r2Wyq2Wyq3W5bc8wJMPqZ5XI5M4bhxGq1Whbd\n",
+       "TOkBR4OMlF6vp9evXxu9FkOYfD5vrLovlCYjF5AcujVGswwCiEvz+/2qVCqan5+3nZOj7uTkRJLs\n",
+       "KAyFQpqfn7cR7+TkpMLhsMUHV6tVffWrXzVGWjabNRwXM29OA4hFX/7yl7WysqL9/X3FYjETozLo\n",
+       "oValmSQMh5RWslIg/jMGpx7nuKb+jsVievbsmYLBoGX+1Wo1K8PIOqHeBokAC7+8vDQbBIhavFew\n",
+       "cXjao0gLCAbDpKdPnxpkSlj96uqqstmshVze53pQixk65vn5uVZWVkx2HwwGjcnGYmeKl81mjVoJ\n",
+       "uM+XlclkLIaBoxlWHM0iI2/YeUdHRzaIoKzBeBvMNpfLSfrck25+fv5OFAL1tiSLbYPbQZ0LS216\n",
+       "etr8KDqdji1UGj+sAMi/pgQA4qNmvrq6MqlTrVbT5eWl5SBizI4hDQOTcrlsJU2xWLTyg/g0HEUv\n",
+       "Li7UaDS0sbFhv+t2u1UsFnV8fGwG5PeJTZMe4GLGUw1d39nZmQ0ykD7h6IMIs91uG04KZkwuCpM/\n",
+       "6mRI5/i1wfa6vr62yAhSpxg8RKNRgwAvLy8tTjiVShl1NJlMmkceYerssKAm0EUh42O7xXOAIODQ\n",
+       "iWzMcRzNzMyYATsEI2pavDKgeIKewDCEFMTrJU6DP8OhTqVStvPjZJRIJMy9FFxeko3iM5mMpqen\n",
+       "TVRxn+tB4cyjnT87BJIiRtaYi4M3Hx4e6mtf+5qNpxuNhlnbTk9P2wgW8Wqv1zN8lEiJy8tLW9wM\n",
+       "Q1BUwIIDFRl10a/VajZJ5FTh6G+1Wkb5pHnqdDpKpVLKvUt4RaolyULuYQeGw2EdHR0ZOoHODw9k\n",
+       "EAfqXKIY0C/+uPAUSzE0kZRwbBx4bSwsLBgnBCf9er1uLk7oBkdZeCzw0cHL+1wPamdmgfX7fa2v\n",
+       "r0uSZmdnFQqF9PWvf12SzM2eXTMej5vhSiAQUCaT0crKilKplO0aMzMzRirHg5laOxAIKJFIyOfz\n",
+       "KZVKaeFdvPDMzIwRhqTbHf0b3/iG3G63KpWKotGoaejgE5N2Co1VktE7UXaDyszNzVmdy8IYHx9X\n",
+       "Op22Zu7i4kLpdNokXLxfEAhMGJeWlgwSxC73+vpaa2trxmUmPQAkgtLI5/NZeHwqlTLR79zcnGWm\n",
+       "QH4CR4/H41pYWLDT6NGjR0qn03rx4sW9vv8HtzPzgf/whz/U+vq6pYHu7+/L6/Xq5cuXdixSu21t\n",
+       "bWl2dlb9fl/ValXlcllnZ2em2Mjn82YtQDbKzc2NmSJWq1Vr1CRZHUkuCZKjly9fyuu9zaWu1WoG\n",
+       "g21vb1vpASJzfX1tte/ExITl8sEjoWaHHER9i3M/Xs+E+Kyvr9tujlh3a2tL19fX2tjYUDgctloa\n",
+       "X2Zi1KTbEm5nZ8dKHaadJAFQw3NCADuO7sRMK+lDGNEfHh7aFPM+14PamScmJsxHYjRjA47B9fW1\n",
+       "vvKVrxgpfH5+XuPj40YaJ8LA4/FodnbWUAKgrkAgYH501H/JZNJ4Bmjm0um0GZug/IhEIuYQj7AU\n",
+       "zggZItSak5OTmpyc1MzMjNXk1NhYGJDWCl/j+vparVZLw+FQ6XRaLpdLjx8/ViqVstAhPECwJYhE\n",
+       "IhYdgelko9FQJpMxPJpGGXN0dm9gykgkcsciAU/mdrutTCYjt/s2bB6uC/U/Kbf1el2xWMymhfe5\n",
+       "HtRiLhaLCgaDlqgECsHuBgcZVhlN0iirzOPxaHFxUbVazRhi4KYME2B8TU1NmaHK4uKigf7NZtNy\n",
+       "rAm9hCtC5EMmk1G9Xr9DPGKo0ul0LEf6+PjYQntWV1dN7Qxr7ujoyPBu9H48XqvVsgEOEqbR4RFD\n",
+       "C4S3xWLRSjBQC/BuSiIa2+XlZUuhvbq6MqkYUN74+Liq1apisZg5QI0aW2JojiF8IpG4NzT3oMqM\n",
+       "yclJlctl09qNjY0pl8tZKUBDM7pwILfs7u6aOvnVq1daWVlRsVjU8vKySqWS+SXD8iJUJhqN6tNP\n",
+       "PzVRAHTJo6MjhUIhnZycGIaMyLbVamlvb08vXrywqAakRiTGkkft9/v19u1bw4xBTY6Pj0313Ol0\n",
+       "TEWOuSGYdqVSMVrnzs6OksmkyuWy4dkMRObn55VKpbS1taWFhQXF43G9fftWCwsLZmOAZQPEoXq9\n",
+       "rmQyKcdxtLOzo1gspkKhIJ/Pp/39fQWDQZXLZZ2enmowGBgFYGdnR91u12Ik4I3f1zjxQS1mZPAc\n",
+       "iRzvCF0rlYqpMoga7nQ6ev78uSRZvRkIBDQ1NaW5uTlLq6KRo95MJBK269FkMUJHps8NANYLujE9\n",
+       "PW07OSoXsksoazCKATWAx4FAdHZ21soeMOP9/X1r6sj0Rp6EgGByctJeL2SqDz74wLjRH3zwgQKB\n",
+       "gJVf4XBYY2NjKhaLmp2d1dbWllKplJ0YwIcLCwv2eV9cXGh2dtbsCKABBINBk4KlUikbArHzczq+\n",
+       "7/WgFjN5eYeHh5adjWr4+PjYSgyGKtjCApnxb7VaTclkUvv7++amDw8Da67Ly0vt7e0pk8lYXcyk\n",
+       "EQjw4OBA6+vryuVydpOhUQRCY1iwsLBgDWQoFFI+n7cBCc+NexHNV6FQsBJqY2NDMzMz6na7ZjuG\n",
+       "9VWj0dCXv/xli46DjwLlc2dnR5FIxIwmgdgYXXs8HhUKBbNJCAaDJnqAT7K/v6/p6WlLEuB9ACPi\n",
+       "3ES6bCaTsfzwbrerSCSiH/zgB/f6/h/UYgaam5mZsQUGFprJZEwtgWD0y1/+sl6/fm3kHVh3kUhE\n",
+       "0WhU3W5Xjx8/VrFYNFEmtaTL5dKHH35oeDB16rNnz8yc8Pnz5xY7DN2x3W7r0aNH+uSTT9TpdLSw\n",
+       "sGAu+vCfS6WSNZfEEgeDQc3NzUmSaRjj8bgNKdLptKTbcTN2tvCJA4GAnU7hcFiO46hQKBg/4qOP\n",
+       "PlKtVtPc3Jz9zNjYmJ4/f65oNGpq9W63q+XlZY2NjVndi8F5Nps10tL09LRev36t09NTpVIpeTwe\n",
+       "VatVzc7Omivr2NiYvvKVryiXyykajSoQCOib3/zmvey5HtRibjab5qQZiUQsEwQOA00QrpksTOxd\n",
+       "cf7kS6Ih4oiE7I+F7fX1tcUEwwYjv9txHIubqFQqNg6nFpZkMFUikTAi/OHh4R1bLJThw+FQm5ub\n",
+       "mpycNM0eNgYMRsbGxnR0dGT2AfV63f69VquZAAHjF4/Hc8fP+uTkxBz9WegMoPDV2N7eNhvbDz74\n",
+       "wOimiAYCgYBqtZqJYsfGxlQqleR2u/XmzRvrB5gAsovj3nqf60GhGdSxYMHUc8ic4vG4TcKoMykb\n",
+       "RvVvoA7EfoGQQLmUZIw6vrR2u23KDhzlp6amLC0Vq4JAIGCcCbBqrAJgyDHSptYm8AYzb+BGFg+w\n",
+       "nN/v183NjdLptEKhkAkI0PiBzqRSKcXj8Tuvnx1+fn7elOJoJHFJLRaLZoKDtZbH49HCwoKhI2DL\n",
+       "qVTK+hMoq7FYzAxtMJ+knoZMdZ/rQe3M7JjwehmiQHb3eDymJJZkaEM4HLa6lB2DsgMGHNkmEIjG\n",
+       "xsYUj8fVbrdN88YR3e/3jf/baDQMBwYH5+fPz8/t+J6dnbXAHV4XvAuQk+fPn9tuBmaMoz6vZXFx\n",
+       "0WRjlFuw30AfwOITiYQhOPV6XalUSicnJ5qbm7MbJxqNmvkknnlwPiDaEwY6Gl386tUrey8ej8eM\n",
+       "2+lbSJo9PDzU5OSkqW3ucz2oxQw+CpcBjjK1LhjpqL0qUFu9XjdmG8cxLDZMs7PZrN6+fWt2ruz+\n",
+       "7Xbb1NtQIplyoVzBdAVPDRQm3FjFYtEEoC6Xy9QoQG2BQEA7OzuamZkxQhC499nZmQ4PD03lgdpm\n",
+       "e3vbFjRhlHCqeU9kWUO6Z7IYiUTMWQlZGEJWIMazszNls1kbxVNq4Y2RSqXM4RM2IAKGQqFgr33U\n",
+       "OPI+14MqM9gtXC6XTZjAcKl9B4OBYcA0K6AMxBhQXwYCAYO2EomEBoOBKaChfqKkmJqaUq1WUygU\n",
+       "UjAYvKO6wPNZkh2pOC1RCkxOTt6B+6ampgw1Ga3bObZRnZCt53K5tLa2Zp4ULEosFjh5qMUxAOfU\n",
+       "QK3daDR0dXVlhHqGG0CHozZn3Eyw61DPwM4bHeTAq6YU4qTCSRR05T7Xg1rMLNZqtWq45+rqqi1m\n",
+       "0I2FhQUzPwyFQobdMnnLZDJyuVxKJpMqFApyHEdbW1s2DaQp4kYAV56ZmbF6MxKJGG9ieXnZBgdM\n",
+       "JzEq56aD64x5ICKCTCaj2dnZO/zjSqVihizwt5eWloyHMjMzo9XVVc3NzVmJ9PLlS6tLz87O7qAS\n",
+       "hE9KMpuEy8tLZTIZSZ/7L1MXo58EN2cKyM2LhzPPTVwdpVcymdT6+rqazab5WYON3+d6UIsZKiFx\n",
+       "D9A5KRvYEUAiWq3WHbI7bvrk3NXrdfn9fj179sx2NqAoToBRC1t2a45iYo45guFtwGMgealWq6lW\n",
+       "qxlhiJuDCSVu9+Vy2ST/7HSdTkezs7M2SgdRYdLISHp2dtZI+nAo4F+w+KrVqilo0C/Cm3j27JmZ\n",
+       "PmK2E4lELG6DlCmErgyJCEWikf1xLSRuTZCq7vX932/5/HJdo0EwUDZHrVZHhatEJow2HRB9sH9F\n",
+       "ncxNggIF05jRP/O7cHj5+R9/Dfw7/ybJNIc0dxB3EATwJUPzZHfkd0AQ4F2MEuAhyXMjjr5P/j9o\n",
+       "A6+P5+S04L+S7PcoPUb/DjSDkoXXT8NKL4N7E802C/sLQeuPXbjHM75lUoW06PLyUqVSycg/+DdP\n",
+       "Tk7aQoeIDxKwsbFhjK7p6WnV63Wdnp4aMgGOSugOJQpHNuPwRqNh2C/TOXb2dDqtRqNhdSZ0yF6v\n",
+       "Z65B4XBYiURC6XRa+XzepnEnJyemrAFrJ0dbkp1O5+fnWl1dtRE9C5b4BzynR+VZRDVsb2/r/Pzc\n",
+       "8PBut2vPBTQJrk9zS47MqF4SBfhov0LP8oWgdeTCbV6S3e2VSsWGFUBWqCiwnAL5wHEHl3pyOvBx\n",
+       "o8Yd9S0eRSWQ8uMmxM0D0YiGjp0OY0een9EuC8JxHEMVUFCjFJE+j7pAsU3ji4YPXna/37+zi4NS\n",
+       "8H7wrqCJBd2ABIQG8eLiwmxyyT7hhgQZAh0iwwSko9/v35GBAdHRSPI673M9KGgODoAks4nlKB2V\n",
+       "zxOPBuEGqigWtfjGMdVCQT0+Pm41IjvM8fGx7erBYNC8niEyUZfy2vBiZiFKssEFEWlgwiAYQItM\n",
+       "DCkbgP+kzw0V0fKR5oROkUFQtVq1gQe1MeUTXAyU6UwzJRnxiRtidXVVjUZD7XbbnJJARjCU5LWB\n",
+       "unBBmmIkj1HjFw3gyIUvBHc6pBf4tCgxrq6u1Ol05PV69emnn1p6ab1ety+o2WzaLnR+fq5CoWBi\n",
+       "Uvwkzs/Prbms1WrWUPZ6PRWLRaXTaQUCAR0fH9ti4ngNhUKmAeTxkXydnp7q4ODAPPIajYZRNSXZ\n",
+       "KYDdFQQefg6eNAMNIi64kSk5Wq2WjdpJnCVtC8bd6empqtWqms2mQYBnZ2d6/fq1wZtsAggALi4u\n",
+       "tLu7awgH6I8kyzZsNBo2aqfZhPD/vteD2pmz2axp6Hq9npFrgOOgPQ6HQy0tLalUKimTyRg0Fg6H\n",
+       "zWFneXlZR0dHlve8trZmvskMQ9ADspiJN/B4PIrFYkZ5pNYdtc+CHgnnwefzWY2bz+f11a9+1YZA\n",
+       "8JQHg4GpPcrlsoLBoKLRqBqNhsmmWBiMsweDgRl++/1+zczMqFAoaGFhwT6zarWqxcVFYw5yw0Jh\n",
+       "pZ7HNRXuhtfrtXF9JBKxps/r9epXfuVX7kwq8bq7uLiwcTr8FWis902belA7M5l75XL5Tozazc2N\n",
+       "IpGI5f9xHPZ6PduBOdZxkAdSk27H5FBBOX4JvIS4g3bv8PDQ3IWazaa5/tC5M4kkWF6SJS3lcjmb\n",
+       "7HFME+COQz8Z1IFAwOrVZrOpyclJk2lhVA7agPVBu922Mgp0Ay4HTEJODj4nbjjIRtTRGKGDU1er\n",
+       "VSNwBQIBe5/wR+hfpqenFYvFjDOyv79vU1DiJd73elCLeTSo0efz3TEFHFVzwC2QbhXKkowmeXl5\n",
+       "qXw+b+aKkF9w+4FEE4/HjSzPFxQKhZTJZGxaNjZ2m+4EFAXKgisSEzj88fCGBsLCgKXb7Rp5CUiP\n",
+       "unxyclLBYNAGKCxyn89n+SG4juJFTW4JJHxODzzlaABpbkmWBba8vLy0x2C3ZRI5GgcRCATk9/st\n",
+       "go3nkmSlycrKiiFA9BDvez2oxQykBJUSLwzcP9kBgMVohCTZTsNUjHEx42HUxKiPqfVAJyC6083D\n",
+       "xKNZIyKYETZu/ldXVyoWi7bwCcZpNBrGJaFZBNbzer12CvHagCJZ/NLnXhrwMXBWmpqasgxB6m7+\n",
+       "x46OExNj+W63a5pBSpDJyUl7TLBnRAj0CAxoUKHg6I8ImAHPaMza+14PajFzXJZKJYXDYeNFUGKg\n",
+       "HIlGoyqXy+YVIckWG4gDkzhJevr0qU3k6vW6OfMTlomCgzJl1JCFGpQjHjQAhTK1InU+po/4bsCz\n",
+       "AOkYHx83UhISLKwTut2uTezAgh3H0fLyst3cJycnVtKMYuXj4+MqFAoGnzF0gkgfDocttoFaGZSF\n",
+       "kTVID5sKnynqG3gYIEcHBwfqdruG2LDZvO/1oBbz6empURHn5+ctK+/m5kbr6+tKp9MmwMReACk+\n",
+       "wTYYpWQyGWUyGVu47ObsnuDCkP+xteIGWV1dtXqx2Wwa644FwFAF/sb5+W1g/IsXL+RyuVQuly31\n",
+       "am1tzeito8R8himSrLbFFUmSqU0ajYa9N9AKRvI0n9LtZgBJCOEBC0+S4d6UF1gfzM3NmaodNIcR\n",
+       "O1QBhLg0pZOTk1pfX9eXvvQly0tkcvm+14NazGDGoVDIKIbgt1tbWyboHAwGZj6+vb2ts7Mz1et1\n",
+       "jY+PWy3YarW0u7srj8djOyFNnCQbJMRiMQUCAQv8AZ7b39+3aRrO8fl83oxj8vm8Ybp42R0fH2t3\n",
+       "d1exWMycji4vL1Wr1eymw1GT4HYGI8i2PvnkE2OpZbNZq7VDoZCR/5niBQIBOz1omIEC+/2+kayI\n",
+       "jDs4OFCpVNLFxYWazabFNRNyiRIem1vpdpBFWfPJJ59IklmTlUolHRwc2HPeN274QS1majqmbTDl\n",
+       "MB/BoQc2FzXdaJIqi4QYMOpNJnWQhuDl0gxRC/b7fYOtIpGI5ZFQhxJfzCLimGYggmwfM+7Dw0NT\n",
+       "hFxcXMjr9SqVSunly5fGnR7lkYTDYWPGIc7FK4/QTQhMqMnPz8+Vy+Ws1mewg6+e4zgmYgCrJtyH\n",
+       "KeeoSypTv5mZGY2Pj5sXHacItgOY29AAYyf83t//38Qi+mW5IBphtI2BH7yAcDhs2C3NIYhHuVw2\n",
+       "mKtararf7+v4+Nh4DtwAeNVNT09bCYIdLoqOwWCgWq1mkBicYgIy5+bm5PV67TXxWrEWw9sC7jKs\n",
+       "PG4iyot0Om0GhfAbINkD7bndbsORWUzU16P84VEkBSswSFbn5+daXFy00E8kY7FYzD5L4iVojK+u\n",
+       "rowJB8pBzgxcDuwYcD9NpVL3+v4f1GJGutRoNIxEnsvl5DiOer2ednd3tbe3Z8Yo5XJZ09PTRiyS\n",
+       "ZCVJt9s1N856vW5ec7u7uzo9PTWXecj2yPBh0JEXGAqFdHR0ZBitJO3v71uwPFNAdIo0ZS6XSycn\n",
+       "JyY+bTab2trauiNIPTk5MaFtp9NRMpnU2NiYNjY21Gw2ValUlM/ntb+/bylUo3UplNNcLqdWq6Wd\n",
+       "nR2dnZ1pc3NTLpdLe3t76na7CgQCVqbhkAqPhKQCHh+8GqPGUqmks7Mzy5U5Pz+3tIKTkxM1m02V\n",
+       "y2W1Wi19/PHH9/v+7/Xbv2QXcF8LeQAAIABJREFUzR4exFNTU3ry5InBXuCoCFfn5+eVz+eNczA9\n",
+       "Pa10Oq0PP/zQwnpIWUKgOkqFRKDJhXG51+s1yb8k81pjV2OiJ8nQA4xeuGlIfRplAI6G+Yw68Uej\n",
+       "UcNyb25u9NWvflXX19daXl42MStIChiy2+02kS22WslkUsViUclk0pxIUaYggZqfn7ebVJKJASBP\n",
+       "ob6em5uz7wEyFwgMwgXeM83pFxPAkQtegySzvYIDQFME3gpsBAsNxfMoF4HmCcwZCy60bAxHgJyA\n",
+       "9dhlqR8pd3DKpKECwup0OuYkii8cXAlwXZzzwZmpvXlfqL3RPUoySOz4+PgOTEjkMX0A/BIWOT7J\n",
+       "9AB8jrxOOBU0bM1m02i23Bh4QeMfTSMMG5GQIsbvo+Yx73s9uMXMhImAx2QyaWNeosPI6wsEAsat\n",
+       "oFFkx6KxQptXr9fNy5m6mYGKJKs3WYz8PKqM0Vo0Ho/blC+VSikYDCoUCplHxuTkpCTZ5A5eBLxg\n",
+       "+ByRSMQYauSLkNDKa5qamtLy8rIZOJIuQI0/NjZm1gS4guKjHAwG7fPD0gCzGj4bzA8ZWUuyxhYU\n",
+       "A/iSqerU1JQx5xDvZrNZM7l53+tBLWZUx/v7+6a88Hq9isVimp2dValU0sTEhDUwsNLAP6mdZ2Zm\n",
+       "DC8ebXS63a4Rfvb3981BH2hrdnbWuL7pdNrsv/g7POVyuZzS6bRisZjq9bqZIlLvM7yhFs5kMpZw\n",
+       "1ev1VC6XjfIJ2wxsmGaWSRzj50qloouLC5Nf3dzcaHZ2Vufn5za5I5OERnU08/rs7EwzMzNWzzPY\n",
+       "wdsjGAyaQQ5jfiai1NJ8hmdnZ7YxoGqnhLrP9aAWM11zMBhUqVTS9PS0Xr16Zb5rHHG7u7sKhULG\n",
+       "dwa+ikQiarVa5rrZaDQUj8f18ccf25cFiWh+fl6JRMI6d1AQiDflctnQA2C+fD4vt9utsbExffbZ\n",
+       "Z1a/MlZm54L4z5QR0xXKgGw2q83NTQtgZwIXj8dtwog3R6PR0M7OjkVcwB1eXFw0828yXvDBy+Vy\n",
+       "RpTis2m1WkZzRV+I7UIul7PSBlgSPw98peGXc0KA0OCctLGxce8YiAfVAKK4JqMPHLPdbuv4+NjU\n",
+       "EORIj4ZEVqtV4zg7jmMSHoLSfT6fuYTe3Nyo1Wqp3W7bGNrtdiufz1tJ0Wq1LJEUyf5wODTDl4uL\n",
+       "CxOcwm2AfHN2dqaJiQnlcjlNTU2pWCzaogc7x1IWOA6OtfS5nQGLw+VyGT7caDTk9XpVLBZNUtZu\n",
+       "t80FCvEpGX4zMzM6OTkxuzM+E0n2mvELgQ2HvAqVN8aRYPDQQSXZTUb61H2uB7WY4UZMTU1pbW1N\n",
+       "4+PjWltbk9vtNnYbx6fjOGbXBSoACQeHIzgPXq9X9XrdWGoQ/pmq4TyEIjsSiWhlZcWOek4MSoPh\n",
+       "cKgnT57o8vJSU1NTSiaTxilJp9N3aJrgxG63W7Ozs5JkKAG/F4/HVa1W7QTAwDudTqvT6WhlZcVs\n",
+       "sYAvYe7BJqRWT6VSJmAIhUJKJpOWkQK7DmEtpCq42dItooQFGB56P95XgIwQ2sP7wPzxfa8HVWbA\n",
+       "xBpt+JjYkTON/gzvuFErW6ijfOEnJydGGR098uHtYisFod/tduvp06fm6sPQAv4DAxTq3kQiYTIj\n",
+       "XIS4MUYVKTMzM/Z71Wr1Tvg8ODnKE0Si0WjUYDsITOQLor2TZIjI1dWVMpmMGo2GWZr5/X5dX18r\n",
+       "EAhoOBwaZIcaHGN0Biaw7paWlswZNRAIWMQF8iwCffDgA68m+/B9rwe1M2NWwk7QbDbtAzw4ODC+\n",
+       "AIudYxOaKEMSSUaIabfbNgABqgqHw6rX68ZSQ28HjEXnD3yH7g7ZPiNkmHTU4aOyo/Pzc2O9jToj\n",
+       "+f1+M1ocjXSQZCPhyclJ7e3t3RHugpD0ej0b6XMjnZ6eWjAPtXI6nbYoCoZDg8HADBYh5jebTauj\n",
+       "JyYmTBB7eHio5eVlG7nzGY5KwyYnJy2ldm5uzvg073s9qJ0ZlhpxAwg7O52ODTcQoiI8pdMGMoL6\n",
+       "eHPzeWA8jkgTExNaXFy0o5FmDpK79LmqBYdLSSZClW6nbhCe2JE47jFIgUAPaoC3HHYCOGfCgWCB\n",
+       "wKkulUq2o5Limkql7nhiSDJzcAQCSLT4Wfge9BqBQMC8m8fHx41uOhwO5ff7zeuOTQJKZ71et5MM\n",
+       "/BmO9MzMjPx+v66urkwA+77Xg1rMdN6jvGPqQ9h0kH9SqZQFzCCNL5VKmpmZsVICo5fR1KidnR3F\n",
+       "43GLJKMOZeiAnwWG4Dc3N5aOyuID14aXcHl5aUrxZrNpX770eYD81NSUlQykTZHKivtoMBjU1NSU\n",
+       "0UHxQeZxGJhEIhHDyFdWVgxVgNPMIsR/b1TOVKlU7Abxer3WUDOQYXA0iuuDtkiybEWwdkS32Dzc\n",
+       "53pQizkYDBoODLeXZsbr9arVatk07vT0VEdHRwYfLS4u2gdaLBZNxsOiwpQc8/CJiQklEgmdnJxY\n",
+       "XY7zJfUiuzn0RqZ3k5OTqlarlojV6/WswQQ7hhzFOPr6+lqFQsGwaHK6GVAwycRDA1X4xcWF0UbZ\n",
+       "YVF048vM54UxJDg04UKSjBA1NTWlSqVi+DHOSqhLKIcuLi6MZsuGwusD36d5ZDD0haXtyMViubq6\n",
+       "0vz8vCYmJrS+vi63263l5WXza/Z4PHd4GjDMUHJ8/etft90OhQS7h3R7AsA1JlDn/Pxcy8vLVluv\n",
+       "ra2ZOTmcDRY03m6JRMIifmlc4TKDV7OzgWOn02l7HXA2er2eQWpAdyAvqFlOT0/l8/m0urqqo6Mj\n",
+       "E/USydZsNjU7O6ujoyNls1mFQiE9efJEwWDQnisSiaharRqKAxeDphnVi9vt1tLSkg1jPvjgAx0c\n",
+       "HFgoKI06rvsul0vxeFzf+MY37pVr8qAWM7ZPNFfhcFjNZlPT09MmmaLBwguZqSDddrlc1tXVlebm\n",
+       "5sy9EoIQnGS4uOVy2WAm3DCBr3K5nFKplLrdrkqlktFGLy8vzRosn8+bsJUdENplLpczce7Y2Jjy\n",
+       "+bwWFhZUKpXMxDwQCOjk5MRMHrkhvV6vIRy4I0UiEZVKJVWrVR0dHZkDKgy8ZDKpXC6nwWCg3d1d\n",
+       "PX36VMVi0XB6ScbjwBxyenra2Hng6dAEsOydmJjQ9773PaOdSrelCo002syzszN99tln9/r+H9Ri\n",
+       "TiQSVvPBUkP1DOwD3IW0Ci8I4tRcLpdOT0+1uLiodruthYUFY6r1+31dXl7agCGZTCocDt+xlmI3\n",
+       "X1hYULFYVDgcViqVshExvsl4sGGeAqEIBUg0GjUJ0unpqUUtXF7eBkY+ffrURuWkn9JYYpNAA4ry\n",
+       "gxKBaOXRmwXl93A4NO8RanpcjdA8RqNRm/ThsM8p8eMsQozWm82mMQkZqLA7O45j9NW//Mu/fO/v\n",
+       "/0HVzNVqVSsrK0omk7ag0dhBkez3+8rlcvJ6vTo+PtbExIQZIQJrYbtVLpdVKBR0cHBg0BOlAx7F\n",
+       "1Isc2TDbIN5D4u/3+6Ya4XE46m9ubrSysmKex+Sc1Go1VatVQz3Ia5mZmdH29rY1WWNjt8lUNF/U\n",
+       "2tgdQEzi1EBlg9kLcWlTU1MqlUoWR8GElIkn3nJAe3wOkuwzODk5MYSEBhMYFIQIrBoGHqqdL5Qm\n",
+       "IxeulT9uV8vkCZiNThwHfEkWbEngDOyyTqej+fl5MzYhVAeH+Gq1alNFhhHHx8dmbTU2NmZQIIGX\n",
+       "TP7Q1EH6R34EZMjNR1BlNpvVxcWF3r59a6GR+XzecHIiJigBYMUVCgXDh4HwwMIvLi5MZAvCglxs\n",
+       "NAweigC8ZZAeDHLA1JlcEqrJaYZRZLfbNVVMKBTS/v6+Kbm/8JobuTDuZoExFmYKxSSPhYd9bDab\n",
+       "VaFQMJbY0dGR7YLz8/N3DABxr6fpwUS72WzaUCWdThtNkhuGcTkj6VarpcXFRaM9RqNRJRIJq3XJ\n",
+       "kV5eXjZYa39/X36/X0tLS2o2m6ZZBBFZWlqS2+1WJpOxGGU4IixcGkTG79Jtc8fQ5/LyUsViUbFY\n",
+       "zFAdRvqpVEqNRsMmevw+pw8bxeXlpQ4ODsxQEfiQTQXPasoxMPovBK0jF2Z/LMirqyu9fv1a7XZb\n",
+       "tVpN7XZbhUJBH3/8sRqNhg4ODmxadXx8bOSizz77zESep6enevXqlUUYYLd1dnZ2J2H07OxMOzs7\n",
+       "km5Pgu9///vGt2AAcnV1pe3tbZ2entoUjxICJQgeGtiG5XI5vX37Vjs7O5YNPj09re3tbVvQQHtI\n",
+       "sKrVqnkhU/rQoELT/O53v2ufGdwLl8ulzz77zDSG/B2+HYeHhwYLUo5cXV2Z9VexWDTI7eTkxEqo\n",
+       "VqtljSoN39HRkXk6X19fq9ls3ivQUnpgixk+hN/vt2OWXQq5/nA4VDablcfj0ezsrFkMQGg/OjpS\n",
+       "OBzW+fm5jVifPHmivb09tVoto0MyhKhUKlbPfulLX5IkFQoF89CYnp42jSBc4UgkomfPnqnRaFhN\n",
+       "++bNG8OKMTinMVxcXJTf7zfp/5s3b4xjsbOzI7/fr2g0qmazadzqdDpt4TrsypQC0EjhekxOTlpJ\n",
+       "8PjxY8v6wxeDMiOVSlkkca/XUzwetyaU5pWdn/BQGry5ubk7TkfAcfQgTFLvcz2oxYyvBBHAPp/P\n",
+       "lL9LS0sWKLO8vGyYssfjMayUSR2cW8xKIColEomfSKaanJw0mAuVy9OnTw3vPjs7UyqVskYHQ/BR\n",
+       "BXU4HNbs7KyJQFdWVvTo0SMjH7lcLgUCAc3Pz5tuDwd8vDHwwmC4g8qFcojnCwaDZh8wNTWlpaUl\n",
+       "S30ilJ6JIPyL6elpBYNB5fP5O8oUYtUo3UBkoBL4/X5TpCC7wqgRbL5UKsnv98vn833Bmhu9SDIa\n",
+       "NVnJ5/OWLgo68e1vf9tU2C6XS9VqVQcHB8rn80Y2b7VaOjw8lM/n08HBgR3T1M2tVst2tVKppEql\n",
+       "oq2tLcvHps6mQYKeOsqjYCGAlGBsyASuXq/b85ycnBiWHAgE9IMf/MAQCfjKcI6Pjo60u7urfr9v\n",
+       "HGHyChnMYBqDXzJKm/39fZVKJfN1JpKZ045dmoiM09NTxWIxHRwc2OdM8w1LEZ40wmBU2sB1NK7f\n",
+       "//737/X9P6jFTI1brVYVjUaVz+eNrgmZBXUwC+Ht27d27HLkezwes2O9urrSzMyMDg4OVCwWTegq\n",
+       "fW62CDw1NTWlTz/9VIeHh7q4uFAulzNXfrBoFCyBQMCmYjc3N8rlcsrlcjo6OlK1Wr0zaIHWyciZ\n",
+       "m67T6diNJ0lv3761G6fT6ajT6eji4kKbm5uqVCqWaY21LBAheDEY+sTEhOkpOUVevXolSaZHhFDv\n",
+       "crm0u7trDa7P5zNVCaNykJnT01MrrSBnwZRjMHOf60ENTR4/fmxqjUKhYEc1/OXV1VV5vV4bCkxP\n",
+       "T+vZs2fGGkun00bhjMViRuDnJqArJ1RmbW1NvV5PmUzGFCcffPCBTbqSyaQhDq1WS8vLyyoWi1pa\n",
+       "WtLr16/NCJ2yaGFhwRbS5uammZYD1yWTSYsVxn6rUqlY6CVO9r/2a79mo2LMDj/88EML6gF9gP22\n",
+       "tLSkTqej5eVlY9oNBgPNzMwYwvLixQsVCgXNzc2ZFArL3dXVVfOwo3Q7Pj42y6+nT5+qWq3acIpp\n",
+       "57Nnz0w4IUnf/OY39ebNm/f+/h/Uznxzc2OLh8anWq3K7/eb1VS5XDZuAtpASUZu53fOz88Nt4bF\n",
+       "ViwWdXx8rEKhoJubG+3u7trud35+buQheBEoPzY3Nw1/vrm50f7+vi4vL5VMJtVqtcxhnuFNrVZT\n",
+       "MBjUzc2Njo6ODFLM5XKGV5+enqpQKCgajRpa4Pf75TiONjc3jTVYKBQMuYHNB1LCZ7C3t6fT01O9\n",
+       "fftWzWZTm5ubmpycVLFYNFuFUqlkCxsODBBfo9GwxCtOIHoPRuBMD4vFovFEjo6OJEmlUkm1Wk3f\n",
+       "/va37/X9P6idGcVEv9+35gycl24fD2MmZGCmQFyzs7OanZ01kxPk/B6PR+l0WoVCwcbTsVhMMzMz\n",
+       "ZiSDOXksFlMul1MikVC1WtXS0pJ96YPBQOl02iaUcDFisZjpEqmlyfPGrZ5pWiwWM70jzSlmLnNz\n",
+       "cwYxYhkQjUY1Oztr+dnRaFS7u7uam5tTMpk0u4Dp6Wnl83nNzs7K6/VqfX3dfEGWlpZsAVOWAOHN\n",
+       "z8+r0WhYYzwcDlWpVLS+vm6mNo7jyO/3KxAIWI1Mg45d7tjYmJUz73M9qJ0ZWAquAWqGUaMRamIW\n",
+       "KcB+OByW1+s1iy2mh5VKRfF4XIeHh+r1elpYWLhTl25ubt5p4mq1mnZ2dmzYkc1mLd0KtOHi4kKB\n",
+       "QED1el29Xs92e2ptScbhuLi4UDabtckaihB4FESUnZ6eyu1227+53W6zLaBmZeSO4xAJqthpcVpw\n",
+       "U9EE0izSkEoylALMmSkfhKy1tTV7L0RmAM3d3NxYwgDWXf1+/4tx9ujFlGlyclLb29vyeDzWFKK+\n",
+       "ANGglsbdvlKpmLKY8gG3+L29PSUSCZ2fn6tcLtuX1uv1TDSKIhrLglH3TTjVqKpvbm7ucKYxZYzF\n",
+       "YlY/klhFedRut3VwcGCcajgcmJJzE5I6heMQpt69Xs8Yb8QpwwVhetnpdNRoNMyilgYaKic4OJKu\n",
+       "s7MzS7Nlp+ZxuNlG3aBwCcW0Ef60JJ2cnKhSqdzr+39QixkpEl8cDpTQEofDoRKJhO2O8XjcvNU4\n",
+       "PvFcwwJgenraQH1qV/jGICSw35Dej8qQwKkRmDIClmTavsnJSUszHR8ft+QoiEBYDIDCILLFX1n6\n",
+       "3GwctTf5h5OTk5qfn7emdTTInlKKMT4nEnCfJEttZefv9XqmXIfYBE8D2RlCYqaeOB2NKmL4bmq1\n",
+       "mo3L7xvQ86BqZmC4wWCgDz74QNJtAI/L5bJFzFCA4UM0Gr0TmgO1c25uzhoVFhYEJWRVdPHHx8dy\n",
+       "u9168eKFscvYcbrdrubn541sj9KCBqnf72t9fd0W6XA41MzMjE5PTzU7O2uk95OTkztyJALlWSzo\n",
+       "76Bk8viStLGxoYmJCQWDQWUyGR0eHiqRSMjv9xu9c3x8XIlEQgcHBwqHw3K5XHrx4oUkGSkJKwYY\n",
+       "c8Cc6Ckpg9g8UOPwOpB8wWZE5Q2py+Px6C/+4i/e+/t/UIuZ6RwoAFyGUTX1zc2NHZcYrqCXa7fb\n",
+       "Rg2F54v+T5Idx9SSRDfA1Ds+Pr5z7I96O8O4g4W3v79vo/RqtWokHpz6GS6Ew+E7Tkkc5d/5znf0\n",
+       "+PFjs+2tVqtKJBKSbssAEgAwICdFitIF05ZarWboiSTjdIRCIf3whz9UJpMxfJu+YNSknShliFpX\n",
+       "V1cql8sqFos2FqfcoT/BWQlDSIxgNjc37/X9P6gy4/r6Wvl83tx6+OJZSOjrVlZWDO1gp8QEBTSB\n",
+       "IQJaPlx/qAfBUKFTYrJCuCU3CZTPUddPThB2O3b7vb09s8qFXzwYDMxckOFMt9tVJpORx+Mx+uX5\n",
+       "+bnZC1CmsNtDC8UOjIYNZiD5KLD8IPUjKGAhUw8Tq4zR+dbWlinSuemwHBhtaGu1mrlCUbbw+cHK\n",
+       "u8/1oBZzMBjU0tKSfD6f6fskWWkBTvvmzRu5XC5tb2+bnOfq6kqVSsUceZD9Q9ZnTM3OTPOYyWSM\n",
+       "D3J+fq5UKiW/32/oCPAbbDUGL1BFaQLdbreePHliE0NeFzug4zjK5/OGzDDRQyw6NzdnYTfcMBMT\n",
+       "E/aaqYu9Xq9mZ2etlGi327bAaCoLhYJFGEtSKpVSOBw2mRNJsdy0wWBQ5XJZ8XjcbH8XFxeN7skN\n",
+       "m8lk7DsJh8Nqt9vKZrNG6L8PLCc9sDLj7OxMsVhMS0tL2tjYkMfjUTabNSzz0aNH8ng8BrcNBgMd\n",
+       "HByY3B7WltfrtUUtyerS8fFxzc3NGZ8Zgerq6qp5PlMPhsNha/4kmVbQ5XJpYmJC6XTaHDUJkCRy\n",
+       "+PDwUF6v16RbDFdoEhG5UufjWMTrJYSSm7pWq8nr9RqRCMcmdkPHccy+tt/vWxIXTkqQkyBt4ZvH\n",
+       "e2y32xbySXPIzs/PwPGAtcfzlkolyxR88eKFNjY23vv7f1A7s8vlMhUzihBJpm4eLTXAcVlk7Ewk\n",
+       "n2IcMyppwgT8/PzcRrmEl8diMRvM4I1RKpUswxuCPibiPOfExIQNccCMYawB/TFZZNwO1ZPoCRor\n",
+       "pmuZTMbKIZfLpaWlJXMukmSliyRLXWUcj3H6+Pi49Rtkn1DOwKaDcgtXA1PGfr9vblIQpnD+9Pv9\n",
+       "BllCaIJNSNP8vteDWsykgfp8PpviUTdDCSV1aVRS5PP5VC6XdXFxYWbhg8HARrHslOzQNzc3Ojg4\n",
+       "MPd3bK5gjnG800TijUz9PnrzXFxcaHl52aZw9XpdpVLJeNMQcQaDgQqFwh0nJkSnHOUslmazKb/f\n",
+       "r4uLCxUKBV1eXt5JkJVkcjE4x5CsEPQiFIAqe3Z2ZiUPXiBXV1dmAIOdGa8D7gfvncELjSjREHC9\n",
+       "y+WyiRve9/qFLWbHcf4Px3HKjuN8OvJ3Ecdx/sxxnG3Hcf614zihkX/7rx3H2XEcZ9NxnH8w8vdf\n",
+       "cRzn03f/9j/+rOdMJBIWcoMCotvt3unAsZylAWMHi0QiFuIIjssuTvzZqLn30tKSDT/g8DKqxaEI\n",
+       "ohKDA0nWmJGP53K57gxo4JZQj3u9XiP8uFwuPXv2zLDcUaOW6+trw8cZdYNf4+9BzAQ3KRNJSiaU\n",
+       "KQTJw7CDsz2aQoXbfzQatbwXyjS425CVZmZmjKjFoudmSafTNuj6ZW4A/09J/86P/d1/JenPhsPh\n",
+       "mqQ/f/f/5TjOE0n/gaQn737nf3FA/aXfl/Tbw+FwVdKq4zg//ph2wcvAV4JFjHIYSGpvb09XV1c6\n",
+       "Pz/Xo0ePVKlUrNtGuDoYDEzmM6oiJpEUtcX8/LyZMlYqFdvNGE1Xq1VTVJA/QqgjUqnz83OD1RzH\n",
+       "sUVHatbr169tKvejH/3Idv7RsHeGF4PBQL1ez9yOGB7F43EbnDDVxHKX9K1oNKqtrS1DZEabUxQl\n",
+       "fC04J0HColSChE9J02w2VSwWTUYViUTU7Xa1ubmpTqdjXnm4mt7n+oUt5uFw+P9Jav7YX/+WpD98\n",
+       "9+c/lPTvvvvzP5T0R8Ph8Go4HOYk7Ur6muM4aUn+4XD4vXc/989HfucnLjR+kMHBM9l1KC2I7yVk\n",
+       "kp0CtTBEelAIBgXYuNL1X11d2dEbCAQsdpcpIDcJRzzdvSSDzAjhOTo6Mg0ftFDCHqlhR4Mqg8Gg\n",
+       "rq+vVavVDHo7OjpSvV63RpHnoLZH5FssFjU+Pm78C25U5E1YI9TrdU1OTmpnZ0fX19eWzgrGDk4O\n",
+       "dk3utyQjHAFPIh8DimPIgxSMEuk+1982mpEcDofld38uS0q++3NG0l+N/FxeUlbS1bs/cxXe/f1P\n",
+       "veD/MqHiiAOKgg6ayWQ0Njam/f19410gCWKMzaiY+nI07J2Fge/z2dmZmZr4fD4lEglDDtgNuQGw\n",
+       "OQCmSiQSloLKLriwsKB6va6rqyuTGfF7oxxo9HvY2wKpUTKQ5YKu0ev1an5+3hyf4EP3+33L2c5m\n",
+       "szYGHx8fN61kIpEw+RWnCRM/FienDTa2fO6Y5BB+hESMTEF6kftc/8agueFwOHQcZ/g3+ZhwgMvl\n",
+       "svL5vGVoY1RI/cdu+8knnxhJhklXpVLRzs6ONVkzMzPGuYD4D0qCOpoc7W63a6bbYNHLy8t6+/at\n",
+       "pqenrYSA1skE7ujoSKlUyqA0jF+wGKDO3tra0uPHjw2XxYeDgM61tTVThayvrxuZp1QqaXl52WRe\n",
+       "1Kh4YuCev7u7K7fbrZ2dHaXTadVqNRukXF5eWoNGTMSoOSJMQEItGf/n83lNTk6q2WxqZWXFXlM0\n",
+       "GlWj0bAJZiqVsmzt973+thdz2XGc1HA4LL0rIaBJFSTNjvzcjG535MK7P4/+/U91pP7zP/9zO47h\n",
+       "/i4sLBhBB89hFtTi4qLliaRSKRUKBblcLlNZBwIBOY6jpaUla9Aw+2ZHZpoH3CbJoLtOp6OzszM9\n",
+       "evTIGh/4C8QrYAMbCAQMWaAk+cpXvmJm3M1mUx9++KFZbMF/6PV6knRnyPPBBx/I7XbbTsv7xnUT\n",
+       "jLjb7dqAp1AomJTq8ePH8nq9+s3f/E3zf6MJhBiEskSSlTRQa/GI5jl9Pp9pM1dWVu6MsA8ODlQo\n",
+       "FHR6emon4Ptef9uL+U8k/WNJ33r333858vf/wnGcf6bbMmJV0vfe7d6njuN8TdL3JP2Hkv6nn/bg\n",
+       "T58+NWI4I9P9/X396q/+qqEbyeRtZYPl7eHhodLptPb29swK9u3btzawyGazOjg4MOLR4eGhJYxe\n",
+       "XV2Z0oM6s1qtam5uToeHhzo9PbVQ9lHD7k6no0qlYsc5BouO4xh0ODU1pd3dXYMPWbSSzCkUzjY7\n",
+       "IST+4+NjRSIRbWxsaG5uTs1mU4VCwRphJEyM88vlsvGNT09PVSwW9fTpU21sbFisXKvVUiaTMZ84\n",
+       "8GVu6kajoUgkYv0HFmg852Aw0OHhoQKBgPUy/X5fCwsLZgN8n4GJ9IuF5v5I0nclPXIc59hxnP9Y\n",
+       "0n8n6Tcdx9mW9Bvv/r+Gw+GGpD+WtCHpX0n6neHnt+nvSPrfJe1I2h0Oh//vT3tOpEDBYNDsqPB+\n",
+       "kGR16tjYmK6urvTkyRNtb29blANcBkbOp6enRjQiv2M4HJoOT5I1Z8Fg0LzlKEcw9Qa6op5Fmg+S\n",
+       "4fF4lEqlzA63VqvpzZs35ssxmgWSSCQMv8atHjsBFgh8Z0br8XhcPp/PNIWUNzSXTPbi8bjloOBW\n",
+       "StIsDTTwJE6pEP5h6t3c3CgYDN5R8IC0YK07GAyUSPz/7L1ZaKx9nt/3LS0llWpXrSqptEvnvDrL\n",
+       "291Dz9uDjQcTE8LcJJCLkItAcO587YDN3BtMIJAJODdxArnwQMDBZGCmJ8MwNtP99uJ+l7NKOtpK\n",
+       "qr1U+y6VVJULvZ/f+6g9Y4cjjz0R7wNNn1fnaKv6P//n9/+ucfl8PmWzWa2trcnr9VqG88def2U7\n",
+       "82Qy+a//kr/6O3/Jv/8XpEKkAAAgAElEQVRHkv7RX/DxLyS9+P/yPUmjZM7FBYzDAW0ytOvr16/1\n",
+       "8uVLFQoFJZNJG1EoaUc2eXV1ZTjtxsaGldlId1BaJBJRtVq1TAjiB549e6aLiwujsEOhkImgSqWS\n",
+       "pZOCgZNl7KwWW15eNikqCxK4jYMlqUW0PPG7E9sFdY1eGbqZ2d/tdlsONQcyyJB4PK5Wq6VoNKp6\n",
+       "va5UKmWZG7VaTdFo1HyB6L3Bop1tt51OR/F4XKVSSV6v955YqlqtKhKJPHgxPyoGEAKEkzOSROJX\n",
+       "0QtwWKvX64rH41pcXDQXNrsIcBLB4lNTU6YppimVXR9bPtASckzczPRjE77S6/Vs12s0GrazStLh\n",
+       "4aGV9bAIsOXz+chZ0UWzuKCKGaHI7CDls1wua2pq6l5MADUXPCFGo5HBkuzwvDabm5tGHuE5lGQz\n",
+       "P85ybn7+HmY1m83a+xSLxdTv9+/1dCNs+tjrUS1mZJ9ER7G4sL+DFXMSJ+VzPB6blvnq6kqLi4u2\n",
+       "ONxut9msIEui0ajcbrcd4GD80A47/W2YS/kZwK4hINjBOp2OksmkVT7wu5C5TKoR7B6VEcTDjsdj\n",
+       "9ft9sx5dXV3ZWBUMBo2sYffkBkfoRKUyvw8kBg4dbuzZ2VlziDtvimw2q4uLCytIgtZHvcjrOjMz\n",
+       "o42NDVMiwiAS6viQ61EtZpqclpeXLREehRi7BztWKpUy7S//hgUD89dsNg2V4AZIJBL29WZnZ21h\n",
+       "MWYgsCGhCC0DTmqaq3BZIORnxk6n04YdkwwE3Xx5eSmPx2Oh6HSukC6EfYuRJhAIKJFIqFAoaHl5\n",
+       "2TTctEZxWHOKjqD/WeBUBPO9xuOx5V8vL99B/pNvqpKXl5ftKQEbiSOb6AcO5oS+o88gifQh16OS\n",
+       "gHY6HZvTstmsUqmUVY5BB0Nr1+t1K01fX183NVun07G4rsFgYLsROcawWlNTU8pms2ZmZS4HLsvl\n",
+       "ctrd3dXc3Jzevn2rjY0NY/D29vZUKpVsFkXPUavVrEJhMBiYTqTf75sPj2oLZmQoduSsHEBdLpfK\n",
+       "5fI9qvv29lYnJyf3VHtgzwTE4MbB2BuJRLS/v286DmqM2+22Xrx4YX3aMH2RSESDwUD7+/va2dmR\n",
+       "JMvSwPZFnjQoDKVJb968+Xe8w//261HtzHjiwEKhndl5ms2mzWgLCwtGHzN7NptN0/zOzs5axNT0\n",
+       "9LQ5mUulksUMcLDDxu/1erW/v29aDoLE0Waww/NGYhsKh8Omcut0OhbEyMGOA1w8HrfdlO/HyMDv\n",
+       "j5WfAG9cLshD+XeYX/1+v+r1+j2yAzaOzwXFYeHFYrF7+mxn5jImhMXFRaO+y+WyWc1wkCN4Ojk5\n",
+       "MZ/jQw2tj2ox82il7pcmJk7+FMPzJlCHS8gh1cFUg83NzdnXQJ8BHc6bwmGITOInT54omUyaXmNp\n",
+       "acnmZsgFDjrOxiXknEg6KcHB5ErOMbMuYwjqN8LT0TswErBrLy4uajweG0qC+o8FOz8/b/gvFLfL\n",
+       "5TKordfrmQAJTyMtXOTRjcdju7HRiqOwu7q6Mve5JNNQJ5NJlctlk9A+5HpUixk70eXlpZaXl+3F\n",
+       "QgpJbFQsFlM+n9fLly8tr5mTuNvtVjQaVTAYVCQSUSwWUygUMmf2eDxWOp02RwU7GMlEjUbD0jOZ\n",
+       "n2OxmNrttkVx1Wq1e9AVB0aiuQh+JPPOSYwUCgWzICGjBIUIhUKKx+PmqKGbOxKJKJvNaji86wfP\n",
+       "ZDLWeY27xe12m7OEHZKDH4vWaTLw+XzG8i0sLBgCBG6NiAkNCF5DJ2LBpsOZYm1t7UHv/6NazM7O\n",
+       "EsQ29XrddlL8geSnMR82Gg0TyvOG4apmZ0NQTiALmmAwWrfbbcgBc6Pf79fp6alCoZCWlpaMOUyn\n",
+       "0wYjTk1Nyev1GiMnyYwEkuxpIn3rxQsEAjbjczAlJkGS5TUT+8UNi1E2mUya4o2DGU8qxhiETcSW\n",
+       "rays2BMNdAVyidZbcH1nsSWtsox4S0tLikajtgF4vV7bwR8qA31UB0BO9lNTU1pZWdHCwoKePn1q\n",
+       "ZEmpVJLP57PK20AgcE955na7dX5+rlgsZuMKFiUOX0tLS6YDBtpaWFiQy+Uy79z09LQ2Njbk8/kM\n",
+       "tZC+zUhutVpaWVkx5zb5d9PT03bydyIm2LUWFhaMjVxZWbF5tt1uW8p/IBDQ7u6u9buUy2UtLi7q\n",
+       "5OTEdBg0yeKUWV5etqaAbrer1dVVUxAyD1cqFfNSUkzE68c5xO12m0IunU4rGo3q8vLSkCSXy6Xp\n",
+       "6WmtrKwYjAgj6na7re/lY69HtTMDtQUCAdMlv3v37l6xZKPRULPZtEXxh3/4h5YOKt3Be+VyWd1u\n",
+       "V7/85S/NOMphi0c+Viko4Uajof39fdXrdWUyGZ2cnBjsdX5+brgz4wKkDa5xOlempqZUKpV0dnam\n",
+       "y8tLffjwQaenp5ZzjH+v1WopEAioXC5rOBzqxz/+sVUQn52d6csvv9TBwYEikYhpTKLRqBEndL5c\n",
+       "Xl7aCEQEAcgGehVm2mq1ahQ/6AQuF3Z4DoAHBweW5j8cDg09arfbyuVy+vLLL01wBUZ+fn7+oPf/\n",
+       "US3m0WhkgS/Mz8x7kBzEUHH4onrA7/db1ACGVqq8oHbb7bbNhGRnEMvKLI7fDf0GGg4UdWCr4MeQ\n",
+       "C/jpSqWS4df9fl/xeNyqkZ2MGrJT6e4g+fTpU0sInZqaUiqVktfrVaFQMHsVMBomWwgTEB/w36ur\n",
+       "KxPz86Qg+UiSZV7ARIIiMZ6hNaHcB+0yBmHatIbDoQ4ODuwGwcn+sdejWsxOCSb+OmIBgMSwQaFR\n",
+       "lmQkAxemTN40Sab8KhQKJhuFlga7hibnjebmcYqKCBwEJ2Z2BE3wer2mXgsGg9ab1+l0LHgRJlOS\n",
+       "sZcgF6QulUole/xj2r2+vrYcPHyHtHNhdOXsACwIFd5oNIxAcjbJgkrgwOE1B8sHvcBWRWuX9O1N\n",
+       "gaAfrcjHXo9qZg4EApaMeX5+roWFBS0vLxv2urW1ZUZR5Jvs4pzWYbxY3OCn6HK///3vq9Vq6erq\n",
+       "ynKSWaxIIsPhsImDJJlSj7LI5eVlg/4ikYjOzs4sYyKRSBjpwgzNgk+n05YNcnh4aMlC4XBYxWJR\n",
+       "U1NTRlm7XC4tLi5qaWnJ8u5wXaMbATdGw4IUFVES1RhIUr1er9bX1yXJFHsIjDY3Nw1LxmhATh89\n",
+       "LYiYiFzg6YGQf25uTn/8x3/80e//o9qZOYWfn59reXnZMF1knThD0DjMz89rd3fXRhOCu5PJpJ4/\n",
+       "f24RWW6327QQ1WpV6+vrikQiJtwH6QBpQG9wfn5u6USkzeP0xrkNxY2ajViDvb09pdNpM4HOz8/b\n",
+       "fHtwcGC1wdQo4I6GQCH0nHRNdl4IJBKXkK5Go1F74kh3Yn+oe9qv5ufnrSgUhR3RDDwF0GvzNch0\n",
+       "5r0BwuTwvLu7q0gkYibkh1yPamfu9/sGf/3Zn/2Zfvu3f1vtdlvxeFzn5+eWRN9qtfTixQv9+Z//\n",
+       "ua6vr/Xpp59a6MnU1JS++uorcx9nMhnVajWrAh6Px/r6668VCoV0cXFhMyewFegH83csFtNPfvIT\n",
+       "a1Uir65YLBpEd35+rq2tLZ2eniocDqtcLqtWq5nYnhDETqejSCQij8ejDx8+2LzKPJ9MJs3kinGV\n",
+       "Cgaqz1ZXV1WtVuXxeKzMMpvNKhKJWMoS54U/+7M/06effqpisaijoyOzoa2vr1tGNTY0FjPoDTsu\n",
+       "VPft7a3VGhPpdXR0pGw2q1qtpp2dnQclgEqS66FWlb8ul8vlmvz9v//3TdV2fn6u1dVVtVotJZNJ\n",
+       "QzM4fIHBvn79Wmtra3K5XLq5udFwOFShUNDu7q76/b4SiYQymYxpbweDgeG7xNeGw2GbaZkjMco6\n",
+       "O7tpPV1fX1cul9PNzY22trZ0dnZmhgGqG77++muzMaH7WFm5c5Dxc6RSKYMbh8OhhTs6c6pJ0/+t\n",
+       "3/otZbNZ+Xw+3dzcmEEVCSp+RZJIA4GAstmsksmkoRJTU1NqNpsGza2srBiRA75MItLh4aHS6bS9\n",
+       "Xq1WS6urq6bDoDSTpyXk0D/+x/9Yk8nE9e94u//C61HtzLOzs5YFt76+boczxPDMd5VKxSJkR6OR\n",
+       "fa7X67VETEgVpIuBQMDqFNBVBINBLS4uKpvNajKZaGlpyUwAoVDIEACYQHKggbH29vZsjAD/xTj7\n",
+       "4sULe3yTzYYKjR7C0WikeDxuWmDwaqrZQEGIFvN4PAZPRiIRhcNhi/tiNmcHZWeFJr+5uVEoFLKx\n",
+       "jbgAAtxxpNB6hY4EDJv4sUgkYn0wEEsQXN/R2Y6LmKd6va7PP/9czWZTBwcH6na7CoVCKhQKuri4\n",
+       "UC6Xs/yJ4XBou8dkMlGn09Hr168l3YnsZ2dn9e7dOw0GA3U6Hfsc9AlkctRqNfu6qOCOjo5ULBZV\n",
+       "KpUsoAY1WrlcVqPRUKFQsJ8LC9LU1JQ9gtGF5HI5w3vBnGEwa7WaxYVNJhNlMhnLqM7lcnr79q3K\n",
+       "5bIqlYqRGs62LX43GNPXr19biQ7Y9vv37/Xq1Svl83n1+321Wi2LRqCThTl4OBxaLcX5+bkmk4kl\n",
+       "i5I2RdA7X+/g4ODBUQOPajGjCeZxL8l2lcvLSwtPxG+WzWbNXsQLCRkAPlwul5VKpVQul02jjEmz\n",
+       "UqmoVCoZlEcuBnit3+/X9va2uT0QNNGuhCyz2WxadRr5GxAryCddLpdptIvFos3Q/FzhcNic18Bu\n",
+       "LMhwOKzl5WXTGHPYKhaLJkuFQJmenjaCCP/heDy2aF/ERODtlUrFKPVKpWJpRjyB2HERLhEeU6lU\n",
+       "9OHDB7ndbrOtfRc27rg47YM+ODv9qOriEJfP5/XixQvTA1erVbNAJRIJo6ihpHnsMi+D14Kf4ggZ\n",
+       "DoeGjni9Xh0cHFgpJW4Xcu5KpZJlSqOAA9cmeV66Q2kqlYo91kn/B+rqdru6vb3V+fm5hsOh0um0\n",
+       "zeOBQEDxeNx6TJCvMl/DKKJXhkiCgPJ4PMrn8xZGw+9GxBkVGYjygfXK5bKp/Pg7dN/D4VAXFxem\n",
+       "PFxcXDQi5yHXo1rMTmiqVCopFAoZrIVFCS9dOp3W+/fvzVgJrjw1NXWvBhgSggoDqHD8hOgTENf7\n",
+       "fD4bDSaTiba2tiTJNBYQBEB2zWZTp6en9/DhyTe9hQjZnbMxRT/OMBvQjbW1Nfl8PjuwES9wcXFh\n",
+       "NyKYLh4+n89nNDM3E0WaUP8IgMiRCwQCevLkiRmAuaHRUDMrh8Nh02Vw0BuPx5qdndXOzo7G47HZ\n",
+       "wzAKPOR6VAdA0nqur6/NFoX6ixcWN/VwONQPf/hD0yjgvNjb21Oj0TB6OpVK6fXr15qdnbWU/LW1\n",
+       "NRPYI2Mk4urq6korKyv62c9+ptnZWTWbTe3u7lpGBdEFaDRYnPgRqSlzLuZCoSCv16snT56oWCzq\n",
+       "5cuXajQa8vl8Zun//ve/bzfd+vq6xuOxIpGIwWROM4Db7VY2m1U8Htf19bUd2nw+n4rFora3t+3Q\n",
+       "GY1GNR6PLf7LaaeCAEkmk6pUKjaeTE9P6/3794rFYra5cPMmEgkVi0VjBxlHQqGQPvvsMzuvfMz1\n",
+       "qBazJKNikT4ym/r9fvP7BQIBjcdjXV5eqlwum2653++rUChocXFRiUTCymrQYnAzUDrDaR9FGN6+\n",
+       "UqlkRZRcWJkkGfFAtNfMzIzq9bolGUkyZGEymejJkyf2+WRJo97jxiBNU/q2dhnbP27x6+trxWIx\n",
+       "XVxcaHt725wpaENubm6s9ow5GSgTAb7H47ECIV7X4+NjS/eMRCKSpJ2dHYssIOGfoiGeDrxWOFSI\n",
+       "I/vY61GNGbz5w+HQgr8vLi4saahcLlu2GwcZEAAklqANKMZQdI1GI2UyGXtTsejzKC+Xy8pkMrYT\n",
+       "IgZyu906PDy8F17O45tDZLFYtJIgrqurK4vJrdVqarVaevfunSUIcXgjI7pUKlmf3tHRkYbDoYrF\n",
+       "oiqVimXKESGLZczZBFCtVlWpVAwRGo1G2t/fV7/f12QyMaIlk8nY7E4z7XA4VC6XM2jQ7XZbqA6t\n",
+       "q3QiQtW3222L7AUZIYrgY69HtZgZKQjxQ/rofJzhLiZsEJaKbAhJRl37fD61Wi2l02mdn58rFAqZ\n",
+       "ZhnxuZPVIh3T6UFEA4yqDAwX0Q76iJubG1vULpfLVHX4/wikaTQaxgzOzMxYZdni4qLdmOx+zhAW\n",
+       "xP/8G8Yjvg4pns5EJwypoD8conHJkBNNulKr1TIZKN/X7XZbEA4xB+g6+v2+RYylUikTJX3s9agW\n",
+       "sySDpxDpYyJttVomyEfiuLm5qVqtpkgkoouLC9XrdS0tLdlBDCgqn8+b0+L4+Phekj4J+YiJgAVz\n",
+       "uZxub29tAbCLIfZhIQWDQa2srCgYDJoNCxcISUE4y0FiEomEjTmrq6u245O77PQ+IpJCCI8LhYQh\n",
+       "ZlrGCppVgTOB8cjpcx72CI10GhDIzEPBiHqx3W7b0w/MnXiGdDptf/eg9/5hS+ev10VANy4Nstuk\n",
+       "bwU2ZC87A1KwRd3c3JgEUvq27BxblBOeA5FgXGCnhyrv9/vmO0TAQxYbdixJhm4gPqKNiR1s8k09\n",
+       "GYJ50opI4GQ+5qnDzkuBJt19LGx02GiNITlAJqjOADajgJ5YW0lGGg0GA9Myc6BD3glGjRWLkYjD\n",
+       "JN5Ep2vnodejWszQrU6xezQaNREQSaCc4IvFokFy6XRay8vL1pgkyYJW0um0CYicNWaMAM4QFiSP\n",
+       "n3zyiVmPYN1wOuPCRvREjQQ/L6U/dHOvra3ZzxYKhYxdQ2Mh3S0wnh4gDjzOiaPFRY48k12W5Kfb\n",
+       "21sz92JjqlQqtoNfXl4a6gGD1+v1NDc3ZzYwXo9nz54Zbh0IBJRMJrWysmJPDEa5UChkZ5LvGEDH\n",
+       "hYAeGA6HNgk9lOLAiqHHALMlWBvEg+gpAgR5pIJXI/CJx+P30u35e8IG0XE4Y6t4DJO+ubS0ZIlE\n",
+       "PDX6/b7pS25vb7W7u2s3AkQO8Be1Djc3N9rc3JR0d3Mjt4S1xPOIThstM9pqtBnslmhSGF+YddFO\n",
+       "+3w+Kw/iey0sLNjC5KzCE45dGU0KP+fs7Ox3HkDnRUUDEBQHJYTx8/PzJoJh/CCbmEZTZxMUj2eI\n",
+       "AQ5yIBlY/hH4A085682urq7uLWBoa5RpuFzozmOXZ5dbXFy0XZRMC+xbUO+SLNUTxzc3IsmhLFLo\n",
+       "ZEYmfm/CGQmYYSTg5iVEETYTSxiLnacdryuSAMRd09PTps9wRvJeX1/byPbQ61EtZubRfD6vg4MD\n",
+       "o7MlGfSEUKjVapl9ajQaaWdnR4VCweSXtEVVq1WtrKwYUwcdzGO3UqlYLx7ifacWIxqN6ujoyJzS\n",
+       "PNolWeBhqVTS7e2t8vm8zcbValW5XE65XM7kpefn57q9vTVIC1oaa5jP55PP59PPf/5zDQYDgxgz\n",
+       "mYy9Liw6NNrD4fAeC8mMPTU1pS+++EK9Xk8ul0sfPnz4N3bp0WhkFDo+PgqJJJkbm3o3n8+ny8tL\n",
+       "vX//3oy+pJPOzMx8p81wXuh8vV6vBRA6YS1sSQiOtra2DPnI5/M2z25ubioej5sQCLYMkgOpKLYk\n",
+       "yBi+byQSsURNoD1ob/pMyO5wu92WHE/gSrPZ1OLiolZWVoyCnpmZ0c7OjlHbkozsmUwmJk91u93a\n",
+       "29vT/Py8nj59qrW1NdNmuN1ug+dAdgiBIeSm0+lodXVVHo9Hz549kyQjcoD8CFJ0uVxaW1szjUci\n",
+       "kbCwRij5m5sb04pcXV0plUrp2bNntttLspYuzMUfez0qBpAZEScD1iTeMMYGFs9kMlEwGLTFglIs\n",
+       "mUyacJwUTTpCUIzNzc0ZS0dqEtpfzJ5EZwWDQXk8HhsJ/H7/PXYRyI2QdJqsKLxk9OFxDYXNYdF5\n",
+       "c5Gl4URiIpGIWai40SaTiVmfeFIwg4NvExU2Ho+1u7tr9XI8CUCO6Cjn4Onz+SztH7EUyVC4ZRB0\n",
+       "kY46NTVlN+nHXo9qMZ+enmplZUWlUkn1el3r6+s6Ojoyiw6QGtgxkNLy8rIJ1IPBoN69eye3261y\n",
+       "uSyXy6Xj42NbXCwOOkv6/b5lTlBq+fTpU8OjFxcXdX5+bofSZrOp1dVVi9kKh8M6OTmxwxflNc5k\n",
+       "e2fWXSgU0urqqt68eWM+RkwEr1+/1tLSknK5nNH5CHyIC9vf31ehUJDL5dLTp09tLODQVy6XbdH+\n",
+       "7Gc/0/b2tpEhc3NzNgc3m02tr69rfn5eFxcXarVaWltbUy6Xs0q6733vexZXhkDf4/Ho66+/tswP\n",
+       "TLrz8/M6OTl50Pv/qMaMXq9n9n7gMHbHcDhslCoah/X1ddP3BoNBSXdz7Pb2tubm5qxv78mTJ3YC\n",
+       "v7i4MF0xGDDIBLamXC5n3x/cmIVMulI0GrU0Ig6CsITM6pAXy8vLdjMRjMgh1rlzoxXh90DhNhqN\n",
+       "VKlUDPPmUHZ9fW1JqLjGnWmkqVTKdnFCJoHiqDMmviASiSgUClkvDKmq6Lv9fr8KhYJ1gS8sLGhh\n",
+       "YcFKgJjdH3I9qp2ZIBRsSVtbW8acgVBAJiwvL5uplPmRGbndbiudTqter+vTTz/Vmzdv9Df/5t9U\n",
+       "uVzWkydP7nV68OiE2v3hD3+o0WhkWRS3t7eG+YZCITsIFYvFe8WWTgq90WhoaWnJXB6cBfb29owl\n",
+       "e/v2rdbW1iwUkd8FjHg8Hmtzc9McKRsbGyqVSnry5ImOj48l3SVxxmIx3dzcGLSIRxA3OQuUcMXl\n",
+       "5WVDc7gp0Vqj1Esmk8YoEmSD+AqSik0DpzxM6Ndff/3R7/+j2pnZAbEVwVRx0CDHAQYskUjo+PjY\n",
+       "SAOv12u4KwU6pGuy46HZRfnW7XaNHcOlTZVYqVSyghuwWeJqSdiE5CEfOZfL6fLy0vBxyh/5upgN\n",
+       "WFD8D+gOxwhh6FDXnU7HXhcOb5NvKn5hFsvlsv1+UO6QHpVKRb1eT+1221LyGYdCoZAtbq/Xq8Fg\n",
+       "YMgN6Adjymg0Uq/Xs4Bzvhe/w0OuR7Uzoy9GSE9wCTsBxIIkOygSqcUjnV2Ckksn7RwMBs3oCoWN\n",
+       "LmHyTVEk2gaSPwlZ8Xg8FkFF9BYGVeSdCwsLCoVCZl/iBlheXjaNCaQHOyXhNaSKDodDbW5u2u4K\n",
+       "Vc54hUkBWJBIXvoI8fJB9XPjrqys6ObmxtoIyKqm5IfDIgKptbU1ixljLEE5KN1xAlS4Qd87VYMf\n",
+       "cz2qnZkDDzZ2UoZgppBIoglAl4w2wxn44vP5jFFDx8ApHrgPFwW4bDgctsMOXxuiBBIEgT4qNRYo\n",
+       "cQLsnIRzk13M4m42m3ZYhRzBMV0oFCxYfXZ21jTDkEKQN/gMCZ8hgQjVGuwgDmxmdUnGbkK5O9GY\n",
+       "Vqtluy94uCQrRWIEhCEkUsypfXnI9ah25lKpZMQJwSrFYlHStznHzWbTWC3kinjxstmsms2mGV0R\n",
+       "7IB6SDJzKWQJ8+dkMtHh4aHm5+cNy4aBdCIjVE68fv1aKysrCoVCJlDCtexyuQx9QYyPLR9Hy8XF\n",
+       "hba2tmyBd7tdo9eBIFEKYq/qdDoqlUqaTCba39/X3t6eJTkBCbZaLUUiEZVKJWUyGe3s7Ojo6Eh+\n",
+       "v1+ZTMYOfuVy2dpfUQIuLCwYovPFF19oe3vbDuPkbCDGv7y81NbWlikTodwfcj2qnRklGW+MpHtV\n",
+       "CeCpnKTJunAKi3CNMELwZ07u7Dw0jxIcgy0IRALMG0EQpThQ0+DG7FqId9jJw+Gw6bD5H+ozHNTs\n",
+       "dKR/8rN7PB5b+E7amEMpzg/pWxaS38vr9WpqaspaucjAQ4ONOo/dGz30eDy2EQvBEr8DYxznDj53\n",
+       "OBzagROz60OuR7WY6c8AfPd4PGb4RNiyvLxsyjkamXBA8IKDelDYA3sWDAaNoIhEIrq6ujKRETT1\n",
+       "2tqa5UMgZiLRCNIFoRCJmhx+UKpRPcaCxVyLrgSDKT0ljE348ljwkkywhFAKwoVRiI/B2Emyosxo\n",
+       "NGpKQ4wKku4llwI9Enzj/Lm5ORYWFuzwzSbDCMNsvrCwcC8296Pe/wd99l+zi0Vzenpq1Q6c3Eul\n",
+       "kk5OThQIBNRut5VIJPThwwfDYKm9hb5GHba/vy9JFkhOa+ri4qIlFaHJZdTA2nRycqKNjQ1DAgaD\n",
+       "gTwej37605+q1WopkUgon88bhvv+/XuD8FqtlslRWRy1Ws2cHvV63XbU6elpFYtFra6uWrA3vzNC\n",
+       "+JOTE52fn2t9fV2VSsVc5IPBQF9//bXVVAyHQx0dHek3f/M39eWXX+rly5dyu9362c9+plgspsPD\n",
+       "Q33yySeWdxGNRtVsNlUsFi1Mkq6T29vbe+9DvV63UWRxcVH7+/sW+bW6uqo/+qM/etD7/6gWMzkW\n",
+       "c3NzltRJ78je3p6y2axmZ2etMGdjY8MWAY9hHCjb29uan59XNBrVxcWFMVXT03ddd91u14LA0RlX\n",
+       "KhVFo1Ftb2/rw4cP5gghf4KILUic4XBo+opms2lRAdQ5RKNRuVwuVatVs0Whsjs4ONDTp08lydLr\n",
+       "udGi0ajRzji8cZcnk0kr6wQ92N3dtVELLHwymejFixdKp9N2cw8GA21sbNx7nZG5RqNRLS4uanFx\n",
+       "0ZLzmdc3NzfNYQ6hws/z4cMHbW5uyu/36+nTp9Yw+zHXoxoznJW6xEBRW4a6bGlpSVdXVzZXYo+C\n",
+       "gdve3rbETh6ZpHuORiPlcjkT2UPP4t7mEMOBjqxjBDTg3l6v1/oFXS6XHVJJx3cGnIN2cADk9wTq\n",
+       "arVaJn4i2IURyePxaGlpyRANMPi5uTkzA6BRhoyBpURNOBgMFI/HTYDFz8bXaTQakmRNU6A/qAE9\n",
+       "Ho9h6B6PxxCU2dlZe/Iw538XAuO4ksmkOp2OLU5JCgaDNp/V63UL8f51GxQWoEKhoEajYaQEc3Ei\n",
+       "kbCDGgudmZs3p9VqaWNjwxYJi4Y8DUQ5/X7f6sQ4ONLahLyUv3fKRZndCU+hpzsSidjuTWd4IpGw\n",
+       "uVyS9aLwqGcRHR0dmRqw0+nY3yHc50DIbgyeDp2O6o/FT6cgemq0GVDokFgUXr579842nYdqmh/V\n",
+       "YkaTiyKsVqup12xYVkYAACAASURBVOuZAyWVSpnegORJ0ADKLGHdRqORAoGA8vm8zYfoIKanpw3S\n",
+       "gp7l0X1wcKBCoWD2fq/Xq3a7bYclbE3OqKpqtWqWfdCQt2/f6uTkRP1+31RozWbTvvbt7a3Ozs7s\n",
+       "5kNznclkdHZ2pqurK/uZe72eAoGAjSLOUG/yojH24pa+vr5WvV5Xs9nU5eWlhTQ2m037ODgx+mgi\n",
+       "DIi4xYBQr9c1NTVlxUdnZ2fy+/2q1Wra29uzhd7r9R70/j+qxRyJRMzr1uv1FA6H740Ik8nEpJde\n",
+       "r9cOSiyYdDqt0WikfD5vRAP1aZPJxMgLREfkRWCShd1bXV01manH4zHUAVlkIpEw2z2PbZzReAPX\n",
+       "19cNDoQdpEMPxzQECf0swHH4HtGdkIJPHC74LkGOku4Vb3I5dSPBYNDievkY8F6/379XEITgCeIF\n",
+       "XTb5fOhOyHcmG/s7d/avXegcwGZ5FPKmUyID3IRIPpfLqdlsWuUwJToslMXFRbNHkRuHIRONBmgI\n",
+       "8yKsHTuTy+VSt9tVpVKx3fH6+toanehkGQwGFpfLU8EZmgJFj37YWd4pyUghDK8o7VDxcXMiYMKK\n",
+       "xc02Pz9vhBGzNjQ3LmvK4J0pTlD009PTBk0SVoMS0OfzWU4f3eQEW34XAuO4UGkBVWFkhVb+9cjZ\n",
+       "TqdjiZfMhjQJIHuUdE+/AA1MWCJEBzsvQn9JJgji/6GFIXDQS7AIUffFYjH7t0RsQVRIshwK6N9A\n",
+       "IGD493g8VrVaNXERMVosevoIabtCjO/3+41BlGSNVBgIGEFY7EQMMAODK6Nbcbvd5vohgDKVStmG\n",
+       "ws4NsUKR6EOuRwXNweoR5zoajbSysmI5coj1yWWjq5kFFYvF1Gg09P3vf992tqWlJcttQ5fA4r+6\n",
+       "urIkz42NDSMhXC6Xdnd31Wq1NJlMLDiRmZOdHbENoTOURBJUs7GxYXJVr9ers7MzY85+53d+x0Ja\n",
+       "qFCGUfv000/twIikdX193RCDr776ytw0uVzOrE8ItHgdv//975swiXR+pLJut1tLS0s2c6fTaV1d\n",
+       "XdlrShOAz+ezm7/ZbCoYDGpra8u0G6lUSplMxsRJD7ke1WLO5/OG2WYyGWOtOp2Ocrmc3r9/bwTE\n",
+       "zs6OfvrTn8rr9SoejyuZTOri4kIul0tv3rzR3t6eTk5ODF3AOo8pkyR+DpI0sgJZ8Qh/8uSJ3r17\n",
+       "Z7sec+RwONRkMrHyG7THwFtISEejkWKxmI6OjiyEvNVq6Re/+IXJN2dmZkxc3+v19OWXX1qGxmQy\n",
+       "UalUUjgctmB0dt1CoaBut6vz83NFo1FztKNf/vLLL7W6uqr5+XkdHx8rFAqpWCyaTpxEIhoDPB6P\n",
+       "oSPchLVaTefn5za393o9q3RutVqGdsTjcf30pz990Pv/qBYz2giPx6O/8Tf+huGdMzMzSqVSGo/H\n",
+       "djianp7W1taWvv76awtsAVYC1vL7/Xr+/LnNysBqpPwsLy9bKDcaZsgAxEvoHBANDYdDE75DZrx8\n",
+       "+dJy77B2MTIg1OHfwwAS6gg5QkyXz+ezKFraWZeWllStVu1rgJ1juaI/BYMtGgvMAxxiKXAnGoCR\n",
+       "C1yakWF6etoO2rOzsxbrJd2NSMQn4MPkiURm9sdej2pmBnxnHgYJYA4Mh8Nm0aFYkYZSxhAkoxQ6\n",
+       "ttttnZ+f266YSqXMmHp7e6tUKmXjBHMn8s2ZmW/rg53lP8zlLFQWtRP3JhoLrTBMJmMKDCEppUB9\n",
+       "zoKg1dVVc5AwG7OwGE+mp6eN/EGHwqi0ublpZlPOBsz+RAtweIW+9vl8ikajxnAC0dFzgkLOSTJB\n",
+       "+nw3ZvzaRTG60xns8XgsGLHVapkrJBgMmriF/jyYuXq9bppliAqCTyBDrq6urCA9kUhYNx41wexS\n",
+       "zMcQJ3weyjoe16RzcjMMBgMNBgOdnZ1pNBpZHzXlQFDcdBFCeDAKobIbjUZWk8wsi5GXxUR0F0gH\n",
+       "mRj0HV5fX9uNCjnS7XZ1eXlp5xRnSEyj0dDu7q4ajYalGqGQA9WZmZlRsVi0ZCOETB97PaqdeTQa\n",
+       "mWoOnfHNzV2fNEHWdJjMzMwol8vpiy++sDeCMhzyk+m/ZqckFJF4L2fJeavVMqSAvGGv1yu/36/T\n",
+       "01OzEgGtQaIgwCGwkOaqk5MTu/lYBJlMxna5wWBgITHoLyAlXr16ZawaPxMZcoVCwQ5ks7OzRtiQ\n",
+       "A40Yf35+XsVi0XyIRIxNJhOdnZ2pVCrZDXxxcWGvjySzYJF01G63TWNdq9XMHT4ejy2lv1qtmtn1\n",
+       "Y69HtTNj1e/1evrRj36kmZkZy2L70Y9+pLOzs3syzh/96Ef60z/9U3k8HsXjcWWzWYVCIQUCAa2t\n",
+       "rSmRSKjVatnogcUJIT1yTPTM4/FYyWTSHunValXxeNws+fF43HZ0Zs3r62utr6+b5SkUCqler2tr\n",
+       "a0snJyfa3t42gX4oFDLKNxgMKhQKWY1yMBjU1dWV9SAGg0HVajWrLD49PdXTp0+NrZPuyI6trS1L\n",
+       "x19fX9fl5aXtkM+fP7cxBMx9fn5en332mbrdrnZ3d602Ar8jTz2nmTgej6vb7Zp9isMiysNwOGwQ\n",
+       "4k9+8pOPfv8f1c5M2iUBgSRvMkN6vV6z+gMvpdNpIz+YHUEIMLPiY4O1Y94Lh8OqVCqW57awsGCL\n",
+       "Cr0Hj31ERDgyFhcXLYC71+up3+9rY2PDIgMYSWDOeEzzSGdWRdsxHA4t0IZ6BixJ/B6NRsPCEBln\n",
+       "nDM5ZAavjd/vNwzaGYzIYsSjyPdMJpOmbcZeJn2bphqLxUxpB9kCdOfMqfvY61EtZsiRlZUVvXv3\n",
+       "znQZ3W5X796902QysSDver1u+XPtdlvlclmJREJut1v5fF6VSsUeyc1m03Irbm5uLGMDsB+XBrju\n",
+       "zc2NKpWKnj17Zsowboipqbuy+VKppHa7bYs/kUjo6OjIAhnr9brh469fv7aid1AY6e7mrdfrxryh\n",
+       "c/Z6vcrn83agmp+ftzEDsRNfH0c57B/5eGTAodWW7jKXcZTwmkmyjI1yuWzumHq9bo6ZTqdjyAjE\n",
+       "FMQSWXrLy8sPprMf1ZjRbDaVTqdtpyOKCikjj1NKfLDkSzKxEaQBsBJzK0wcNCxO72AwqHa7LemO\n",
+       "NUOYjpCGmwE/ICIlYr6QrTJ+FAoFs13RNxIKhYwuR1dcLBYth47gQunbInZgvampKVUqFSudn56e\n",
+       "tlo5bj4SlQqFgon7nz17ZhLWy8tLi9T69R3U7XbbOEVGHjsuh2bEWPv7+xani8oPRWEwGDS57sde\n",
+       "j2pnBvQHW56fnzeM+Nez0YDC8Ag6U/cR4CCfnJqaMjqXnTMcDmtubs7EO9Vq1WZRgllg8lqtlh36\n",
+       "iBUAcYFhA6d10urO3Qu/IQE0zn4SFheoC1Q3PdzgwMg4ybpDa0H6EvkXjDDc4IifeB3xBwKzES1G\n",
+       "tvVoNNLFxYWNSOhbnDsvqBFxw+122yxVH3s9qsXMQgCdIKKVncBZnshj0O12W+wAMk52UkpoWFxo\n",
+       "k1HBgXQMh0ODvNAozMzMWOAih0SanogL4CnBjMvNgyjK2b9XLpcVDAZVrVat74SnB7UX0rc5zexy\n",
+       "zOgQNpIMV5dkowtnBQgTbhYWLhlzmUzmnu4YgRLIDuIoRFPAhtwUzvDEUqlkoxBw50OuR7WY2+22\n",
+       "QqGQpVJ6vV6lUil5PB6LaYXo8Pv9pvElsIXdjMoCdijCDBcWFrS+vm4G1HQ6rW63ayQBuDHySHZv\n",
+       "dkWn8zsWi9nX4Y3HKU1GHOlHsVjMdCN0GHJQhBHkwDc/P28/Pzs31n7+/Wg00tOnTy1ViadDr9fT\n",
+       "0tKShsOhGXyxQfE6kPbJRsFNQrQYPzepnhAiBCViLA4EAhZAyUj00OtRLWbcCzgm2ClRqi0tLZlG\n",
+       "eDwe22OdBTAajYzxgn4lbw4Grl6vm1SR2RwGjhw24CYYMElmfJVkI87S0pLN5fV6/V6dWD6fN2fL\n",
+       "0tKS4ec8km9ubnR6emrzvCRDHiQZmgPqQAoROmznrA55QnQZMQj0+aFDTiaTFppDTwkwILENVBfz\n",
+       "c4BcuN1uOww6O7WDwaDJbgme/NjrUS1mQvyur691cXFhWg1gsc8//9x0EdQU8PelUsnqyKhWI5P5\n",
+       "7du3hgMjq8QdEQ6HbU7lZI4HkUgtmDEqJ5zVaUB4kBgwh8lk0hZdoVDQYDBQqVQyzbTf71c6ndZk\n",
+       "MrGRI5VKWaVZJpO5F15DQCHnBW7YmZkZXVxcWK0x7QIul8vaqNBP032Ip5Fsul6vp+npaQvYQZvR\n",
+       "arVUqVSMhGI+ZnYHJUE19x0D6Liw78Modbtd5fN5NRoNawotl8vW0XdwcKDRaGTRq/SQsMjpeEaz\n",
+       "ixAJzJbQQnQUiHhwtGCrR2ONDJLx4fLy0hKSyLkjdoukIDQmGFcR+ZdKJZVKJatem52d1fn5uXq9\n",
+       "nlVS8LnOInjYPKIR+H2B55zdJ8y75NhhFSsWi1b1wJmj2+3aE0OSYdwkK9FG2+/31el0LCqBA3u7\n",
+       "3dbp6emD3v9HBc0hYPd6vfre976nubk5vXz5UjMzMxbp6qws6HQ6Ojk5Mes7J/FEImGWfFLoe72e\n",
+       "WfMhGxYWFpRKpXR5eSmv13uvumxnZ8cWwd7enh0McYHjj6MaQZJFw4ICOKllnCuElsfjcUUiEZu3\n",
+       "wbdnZmb0/PlzDYdDPXnyxHZ+SdZ4hZaEgEefz2dtU7RM+Xw+vXjxwlAXyCfIGTq3k8mkCoWCUqmU\n",
+       "pfYHg0F7QnGQBFenKZaD69dff22E1ubmpr788suPfv8f1c4M6uD1elWtVu3FlO7mW2fVGVoCXlSn\n",
+       "JQr9AzMirmlEPbz5UMPkInPK55EZCARMT+wMAcdVTZImhzJm/larpVqtpvn5eQtY4VDFXE/VBeIh\n",
+       "mEksYhAog8HA1IDY/AlugZ6GTLm+vjYWcTQaqVqtKhAI3NvNGRMWFxcViUTUaDTMoMBODI6O6QGr\n",
+       "FWcCnDntdluxWMwCK79zZzuuubk5ffjwwaC1TCZjrBSjwuzsrG5vb1UoFJROp+X3+1WpVMyRzbzM\n",
+       "okXKSb4G1PHU1JQCgYBqtZrFFDgzI5wkDbs7uyxubkypqNIwdYKY4HJ25kUzOkBiZDIZY+awL4GH\n",
+       "w1SilYber9frarfbBkcyqoC7wwRyYC2VSmo0GjZ2kcXc6XSsCoNFSR4fOmVkuKFQSBsbG/Zz8jPx\n",
+       "/UKh0HcHQOdVrVa1tramy8tL+Xw+bW9vm9glEAioVCrdOyw542+RJPr9fkWjUQWDQWUyGdPgYnWq\n",
+       "Vqsm6SQDGoyVnmwOQYSfoKS7urpSs9m0PhUSQHF8I8h/+/atHQ57vZ61vjpd4UdHR9Yjws1B7C5K\n",
+       "PpfrrlC+2Wxqf3/fVHTdbtdGFJfLZQgDiaLYnaDV2YEJvuEgDOYcCoWsQo1DHXMyQeK5XE4fPnww\n",
+       "Idji4qLdGPV63Wb7h1yPamZG6JJKpewASDBgPp+3kEHeuFAopJ2dHROVj0Yj272vr6+VSqWMlUJc\n",
+       "g+vbyfKRIgTRQdo8VDS1vIw08XhcuVzOHu+wiqPRSD6fT8vLy0Ydh0Ihe9zDsN3e3mp7e9tiEKh8\n",
+       "QxvsdruVy+WUTqetuoLwGoRG+CA5OyB6oj2An2t1ddXo7HA4bAxiIpEw1wx6aL/fr2AwaOMT1DUx\n",
+       "ZKjj0MzQiOXxeKyi+J//83/+0e//o9qZcWBzAEGAg9A+mUza6Ztynf39fUMxJpOJFfBcX1/bgREZ\n",
+       "KOmfvNGEuEiy3ZkF2u1274nvcZigHqNtdW5u7l4gDfoN4C7sUbhMWIztdtvQievra2uMku4OqLFY\n",
+       "zNCBarWqZDJph1/6U/BHIoDC7Y0znYRSMPSTkxPd3t6q1WqpXC4bgTMzM2MbCSMX/06SvSYLCwuq\n",
+       "1Wr2++LIRmP+HTTnuBDOg+9ie7q+vraoKnZBDnBPnz615HuwY2odnDJHJ9RFDgR4bb1e19XVlSVr\n",
+       "IhGlv4OcOxYmnSXxePxekidaCXZ44DNQFFRsMG6YU5lTIYRwi0AAgV9LssMXMzI9JSjdiKvFGuUc\n",
+       "d1h4HKR5kjHmIOpyPqGAE4fD4b1K4mq1aiNJr9fT9fW1qfA++v1/2PL563UNh0PLWuZg02g0LBG+\n",
+       "UCgok8lYKEsul1OhUDAvHnhrsVi0GZhoKjQYZFJw+GIxSHeqvXa7rVarZSJ+BP6o0YbDoSUsUcng\n",
+       "VPaBXbMoGZcGg4HN+9yU9IFgU2KmPj4+thq5crmss7Mzi9RijmYnHA6H9rrAmBJJxgFPkiWoViqV\n",
+       "e2eGbrdrbng2DnoJQS/q9boajYYqlYqurq5UKpW0vr6uXC6nUqlkAq2HmFmlR7aYwS/ZedBidLtd\n",
+       "e0Gvrq50dHRkhkzcKYSysNtymmcHlGT/XpKpzfC89ft95fN5LS4u3mPYmFNRhXHjsKgvLy9tETnN\n",
+       "AVwsDNqiRqORPR0k2ciESo2oAAgOoDwOb8yseP9AQHjNiGXAWOCMmG21Wga1URXHmMNZAzMArxOv\n",
+       "HSo5RP50d0Nr06L1kOtRLWZYMvSx9GXMzMwYFhsKhbS1tWWdHEgZ2YVAD5hlWZSk83AgDIVC5jJG\n",
+       "EQaS4gxIBGFwLlB2WA5v4N7sxNyIhM0w96fTaZvtna1N/PvDw0ONRiOtrq5ajjTin2KxaGMLvkG/\n",
+       "32/IBloKn8+n9W/KPm9vb7W0tKSLiwsbD6hThjTh4IwGhQYAdnwSlgh2dLlcRnd7PB4jg7xer6LR\n",
+       "6IPe/0eFZsBixWIxvXv3TrOzs9rc3JTP57M3mF1xe3tbxWLR2DRs99VqVdfX1/YxDk3smDRHXV9f\n",
+       "a3l5WePxXec1LmTiacmzwANHAIyzZoIQ8m63a8U30l2YDYuLg2cymdTV1ZVWVlYMr0aVR+AiwY8c\n",
+       "NmHvarWaYrGY2ZkIKV9YWFA6ndbl5aXdVLlczhqk4vG45ubmtLu7azcNIZM4wbGeSTIqPxAIWJ3F\n",
+       "zc2NVldXVavVbOHjZWQnR2m4s7Ojzz///KPf/0e1M+ORo0fPiQAgeOFNxkFSKBQM3AfzJaiQAxCw\n",
+       "HE5vbgpEPJze6aeem5tTqVSyXR4MlnBBxgoWPLs/cy15xuC25+fnhs6gDQYblmRhhbBonAP4Nzwh\n",
+       "yuWyHUoJRgRZ6PV6qtVqJk7yer06OTmxUYA0JHQfmBiYnUGAiAZuNBrWNksjAQdVwhLJZCY64Tuc\n",
+       "2XGVy2WFw2HLV6vX63b4a7Va8ng8Jj6XZD64VqulcDhsaAJZa+12W2dnZ5YIxOO01WoZ9Ypw/ubm\n",
+       "xpzVR0dHJsGkVswJ44GtMoMzr5MPHQqFzKsIZutM5eQQRh4yEtBcLmd2KKxUCPdBJTg3YAwgCkCS\n",
+       "RQDTgYi+m5gGkvx5LWEO+Zk5dIOPowakJgMokKeTkzWFoXzI9ah25ng8br44Kgjy+bxRxbVazRg4\n",
+       "qOFOp6Nut2u47vX1taEUaHWPj4/tjWMm59CCDmI8HlupeSKRsMcxlipYSC5O8CQmtdttRSIRa0Jt\n",
+       "NBrKZrMWWlOr1VSpVEwQxS6Nmfbi4kKLi4uqVqu6uLiwRdPr9VQuly1SDMaOuAQ0FaAz3W5XJycn\n",
+       "drOiyWYnBfa8vLy0cabVaimTydgTS5KJn/g9z8/Pza/Izdbv923Momb5Idej2plJfy+VSjYjr6ys\n",
+       "WL6cJDvI3Nzc6LPPPtMXX3yhH/zgB0asBAIBffbZZ0qlUrarUSkGIkDGm9/vN6yU+ZJdkAXe7Xbv\n",
+       "FdRD1rBTBgIB7ezsWCcJBk+CDMFx+/2+Xrx4YcL3TqdjcWC0rJ6dnSmVSulv/a2/ZXpnHCYXFxem\n",
+       "5QZ18fv9ur29VTqdVrvd1tLSkmq1mhKJhDwej2klIDyQBEDwVCoV7ezsWIkRO+zi4qLevXtn4vte\n",
+       "r6e1tTXNzMzYzO/xeHR2dmb4NV3cJycnH/3+P6qdeW5uzmztsVjMnBXAbPl8Xs1m0x6bmUzGegE5\n",
+       "2DUaDZsdS6WSKdd45BPgQmqPJJtnKefhUINNn8c9eDIkAl8XvTK2J0m2izF7M+vzcUYFbrhms2m9\n",
+       "K2ixEQixg1PGzhOJ1+X9+/f2hOp0OioWi4YXS3dPD84ZPLHY+amLgNZuNptG6sBoAk2SntRut3V8\n",
+       "fKxoNGq/N6/TQ65HtTMji+z3+3aCR3WGO8PtdlvlApoN9BvIGSWZH45Cmn6/byU/XHjnCHRhN2bn\n",
+       "BdelDIj5GCUeQh9nrjTxB9DClNlj/8LTBxtXqVQMbuSpU6/Xzeofj8c1NTWltbU1EwjBPM7Ozqrd\n",
+       "blu7Kwwo9PLs7KztukCbkuyJwNwrydKMQqGQpDtnC2MRmhSv12tPSOS0QJy3t7f2BPzY61Et5nA4\n",
+       "LJfLpcXFRW1sbNjBCvoV8+nl5aXZgXCOhMNhJZNJ5fN5G09OT0+tyw4xPRcQE2MA/w7smZ2cXZyg\n",
+       "bg483W5X6+vr95RwpHpiu2LnRc23uLho+g8Sg4DmJBl2HIvFbGFyCKPnMBaL2cF3fn5eT58+tdHF\n",
+       "5XIpmUyaFpuK44WFBRuDCDkMBALqdDpaW1uzBiuIFlKOMLSSeETjLdAfehmIFDaSj70e1WJuNpty\n",
+       "u926vb1VJpO5lxUHAZLNZi17ghgC2Dh27Q8fPphz2ykeGg6HarVa5kjBjEoKPJoM3lBwZr43OxU7\n",
+       "OTDeYDCwXZL8Z0YLHNZoj8mvKJfLVtADCQMaAbFCbwvMIT+v3+9XqVS6h9IQSUB/S7/fN80xwZBo\n",
+       "tIPBoIrFohlYUe/Nzc2ZlDaTyZgzHjtVOBw2zTidhdwAzPIPuR7VzMwjFScxqe/RaNREQCwO5sdk\n",
+       "Mqn5+XlzO4BCgIxgwSephxefGohAIGDfL5lMam1tzbLbgL8QMSEVrVar9rGbmxtrkp2fnzfxfyAQ\n",
+       "UCgUskMbkQI8/nF8JBIJxWIxExmhpGO0wW3CjE9/H/0sXHNzcxbTAK0ej8eNHOFjvA5EgPH9uPH5\n",
+       "upubm/fy+5CL+nw+RSIR+/md5wvGmI+9HtVi5sXhDgfIBw3IZrPGqGGopN4BOpv6MzLYgsGgNjc3\n",
+       "VavVbFxwluUQQTUej1WpVIzggFQBt2Xhh0Ih0zOMx2Nzq9CV3Wq1lEwm/41drFwu3xOx8xRwEjcz\n",
+       "MzOKRCKmTUZQ5fF4bBbP5XLWXotHEXUdrhnID+j96elpra2tWZE9MzWifV43btZ+v2/wJgZW1H24\n",
+       "uSFgODgvLi6aM+Zjr0e1mAuFgkXS/uIXv7BTfrVaVblc1mQy0dXVlT58+KDl5WW9evVKX375pVU4\n",
+       "cBJ/9eqVGo2GcrmcMpmMXr16pVqtplqtZmgA1n92YP5cqVR0c3Ojk5MTC4chEPHw8NCQlFwuZ8RM\n",
+       "Pp+3SgS3261KpaJXr14pn88btkv9mXTHdB4dHSmXy5kOpFwua2pqSqenp8b2oUU+OztTOBy2Gxg2\n",
+       "k50wn8/r9PTUkvcRMaGEazQa+uKLL1StVrW/v2+51cQiDIdDy2gmpgyJLBsMgqpms6nT01PL9cP1\n",
+       "3W639a/+1b960Pv/qBYzpYy4HkjZQSS/sbFhyZ29Xk8/+MEPTLWFLoLYgPF4rKdPn9oBEUMrJlC/\n",
+       "32/sFoQKJ3b6Q3BiE8HFqBAOh/X06VMjDZgXp6en7ZCJdYlHujNQhlSgaDRqWuR4PG6LxO12m/uE\n",
+       "wzAYOp9PfVuv11MymTRBEhG+19fXdkhk502n04Ybc+iTZBUa0l3O3vLy8j31IsmnHJhBl54+fSqv\n",
+       "1yuPx6PxeKznz58/6P1/VIsZK73L5bIXVJKhDTRHsWOxA6Gsi8ViFrNFUeX8/F2v3+bmpgmPmP/c\n",
+       "brei0aiRCk7jJrAVHkMWOoL2arVqijvMnGg8pqamtLu7azl1SErJvvN4PIbrJpNJ0wwnk0mzIlF1\n",
+       "jO4jkUhoYWHBbmTmbhakMwgdMwKNt1D9l5eXCgQCFuMr3TnRWfTkSIP+IJN1YuFer1exWMzS+jmE\n",
+       "S/qOznZeULvMlbBUKL5ub291fn6uy8tLzc7OWu0BWgMsTufn55Jk4ppsNmvBhM5AFEJaqO7F/QxF\n",
+       "DNWN25tkTxJ+FhYWdHl5aXMnYiEe97g3bm5uDOEg563ZbNpj/vb2Vvl83tqeyuXyvdbZw8NDm8n7\n",
+       "/b7Fg/Fz5/N5q2Zot9tWIXd5eWndKDc3Nzo4OLBxC/koC5cRjXMC2R/Ak6BFJDIBo5JgipHgIdej\n",
+       "WswEHQK+k/5DUPj09LQleEKgQE3Pzs4qGAzargcEhYE0FouZe8Lj8RhGTQbG/Py8Op2OotGoEomE\n",
+       "LSyQE3Y35lUc106BvzOQBY2JJBt7vF6vif/BvRkdnKo5SYbEOLMoqK3AbYIlCxiS34nF67wh+TsM\n",
+       "uYxNXq/XUolAefg40CS0OcgP4xWSAEijh5ImLnac/79fLpdr8g/+wT+wEz96hX6/r2QyaSd1VGXY\n",
+       "ltAkYOwk8Scej+vNmzd6/vy5ut2uCYUKhYKi0ajNnGCr4MnORy07GOwaVqTZ2Vm1Wi3FYjE72F1d\n",
+       "XSkcDqvdbisej8vv9yufz6vT6ZhQH38gdqx4PG7fA0SEYHJuQq/XazJYzLyQLYwO7KQcCsmjLhaL\n",
+       "CofD1jRVLBZtgZIgOh6PbfZGk03RJcExHo9HlUrFCn1YvAicON/0+3393u/9niaTyUelwTwq0gQG\n",
+       "6+rqSq9fvzafmd/vN6lkq9VSv9/X2tqafvKTnygYDJpA6OrqSpVKxaxCV1dXqtVqqlarhsFCOuDz\n",
+       "YwaV7mbvYrFo3YBPnz5Vq9XS0dGR0um06alXV1ctpAWYDkwYGvj8/Nx0zRwgm82mNjc3rXAHOhg1\n",
+       "ILkcw+FQqVTKRqzRaKTNzU2beYHhkJ1yqEXGyY3FXD8YDEwlR+dhtVrVD3/4QwUCAWv2Iq8OcokG\n",
+       "WSSh5XLZ5KXOThUQnn/9r//1g97/RzVmEEqCKL7b7arRaNjjFlF4u922w5NTYkkxPOwd1DIh4ZLs\n",
+       "UYtP0Kn9IEmJ9MzXr19rMBgoHA7bbglujKgGOI6F1Gw279HYwIbEBVSrVYsEYKcFHgO3vr29NaYT\n",
+       "RAWRENVwVkvA8QAAIABJREFUfN7S0pKxg1QQgwlLsqyQer1uMtp4PG42KA7JjC0gH+zyvB/ENvh8\n",
+       "PkmymF3iBYBUH3I9qsW8uLhoijd2JVgz52wK5MWjdnZ2VpFIxA5ACMWdSADhiSTvc8Ch5BKkgxAW\n",
+       "DpV4AUFFsGeRXYEB9OTkxCJp+XuE78zfQIiI8Jn5CbEhzFH6VvjDeIWjptPp3KuDy2azpnzjTMHN\n",
+       "TAQu/Yr8t9ONDvvH/wjJIXqLZCMkAI1Gw8J4JKlYLJorJpFIPOj9f3RjBrQoHSS8aIRe0ylCdFS7\n",
+       "3Zbb7bYT/vT0tIrForxer8rl8j1GDFwXXJTqXoRM7Ix8P1LzeVSjaU4mk5bGD9THrE5PHjtXPB63\n",
+       "WZ+EoFAoZPnLzOqNRsM6XUBqJFm9Mon3NMGizWDUAbuuVCqm5yA1n5w8Wgb4MwffwWBg2XdcvLaE\n",
+       "koO8AP1dXl4aHIlW+qES0Ee1M7tcLgtD3NvbMxVaMBg02IeUIGJlUWwBhd3e3trBJZVKGbLBDMkb\n",
+       "y7jAYYbPY1asVquWME+OMp1/tVrNZmXmSfDm0WhkPSHM5sCNhULBPIDMpxzsKNEJBoOm6GPMIqQR\n",
+       "byQRWyAexAIQZ+sUHY1GI8OI6RPHTTIejw35yGQy9vry88bjccP0cfOgvkun0+b5cx6YH3I9qp0Z\n",
+       "8Yvf79f79++VTCZNWBOLxUweWi6X5XK5lEgk7NDlTBFiMSBVBD8lbHA4HGp9fV2NRsMqJyh5h1Rg\n",
+       "XJhMJpa/jNAGtVi73baDGZFeyWRS29vbevv2remsWQigAR6PR8+ePbODHAuTcJcXL15YqOL09LSy\n",
+       "2aw2Nzd1fn5+zx0j3R3aqFXj9eAGpwWWtHxQG24AFIqj0cjkrPgVk8mkjRs8jci7W1paUqPR0N7e\n",
+       "niqVijGYoVBIf/qnf/rR7/+j2pl5wyWZ4ByigYxloKVYLKZ8Pm87H24TSdrc3NTMzIyOj49NYMSh\n",
+       "iMchCywej9/r3O71eqpUKqZrzmQylrhPvluhUNDl5aXi8bguLy8tnosZvVqtKhaLmY55a2vrXqYE\n",
+       "CAVRsJhUB4OBVS/g8CB/7uzszOIDOGxhlmWBIgIiUgsJLVplapTj8bglIlEGRFWFU9F3dHQkSVpa\n",
+       "WtJgMNDXX39trCCQXzabNeIIVOhjr0e1M09PT1vZTb1e1/Pnz43xOj8/1+npqc1lBIAfHx9rb29P\n",
+       "0p3gBjPp1tbWPSoW6AwvHfkapVLJZlQgvPn5eXvs/uAHP1A2mzW3NTAgVDZ6ZIIGqVxbXFy0iC2n\n",
+       "3SoQCBgbiJ7C4/Ho4OBAq6urRmUTJYa7hEX91VdfqVgsKpFI2NhzcXFhkB1pR6urq/rlL39pGRvn\n",
+       "5+eanp7WxcWF+v2+hYk3m02dn5+bxezk5ERzc3M6Pj7W7u6uBoOBjo6OrLSICggiGDhwl0olvXr1\n",
+       "6kHv/6NazF6vV/1+33ZLQkl4rK6trZnJlF3rk08+MYOr2+3W9va2fv7znxuIv7q6ajt6NBo1JouG\n",
+       "15WVFeVyOYPLnCbT1dVVEx1FIhFVKhXNzMxYKAzjAV0shULBXC6wlzRGoYHAcJrJZGy3Ho/HWl9f\n",
+       "N5auWCxqbm7OPIG9Xs9Cb+bn57WxsWG2MuA9dt1yuayVlRXD7GEMd3Z2DO2IRqMWNjkYDJRIJGx0\n",
+       "QgcOk0r4JK8NEQtUFK+vr9sTb3t7W7/61a8++v1/VGPG5eWlZcelUik76WMBIlEIdAHRvtfrVTwe\n",
+       "t11RkiW+j0YjraysaDAY2GmdR2Kn09Hh4aEJlXw+nyEf6XTaDkHj8dgOgWRXcFAjZ+Lg4MDgsMlk\n",
+       "onq9bnh1KpWyQktaVN1ut4UpDodDC11ETLWysmJS1V6vp/39fatj6Pf75nahv5sdGVsVoqZut2sB\n",
+       "i+z2zogwmlyvrq60vr5u+mTcOWg3aMjNZDKm94DVhIl1+is/5npUixlrOzJDFiQHtWazaYvc6/Va\n",
+       "VgMaXFg5aG1EPGdnZ4pGo9YJgjl1bm5O6+vr5pQej8dGoDjZPEkGwbVaLYP4gAQjkYi2trZMmgmR\n",
+       "Qr9etVpVu922sG8OVaT8o4mQZHoOerbx4q2srNzDgKmyIEgRTQhPEuZXcuiA0IA8MfjC5C0uLhq2\n",
+       "Dj4OdU6KaqvV0vLyspljnS6VeDz+4Pf/US1mBOiSjJzgQIhGAsex2+1WPB7XJ598oqWlJQsMHI/H\n",
+       "Jtl8+fKlHRihXCElgsGg6TAk2cLk4AWVy8GPz5mdnbXFj6aB/7+5uVEsFjNIMBAIKBwOW0Ycrmyn\n",
+       "OEqSkSOMWRwMCasBmgNyg11kFneOQNwMzPZ8HnAjgiiUe05tBi4cDpTs2iAt0p0cN5lM2muGis7l\n",
+       "cln18Mdef2WL2eVy/W8ul6vscrneOD72P7hcrn2Xy/XK5XL9Xy6XK+j4u3/ocrmOXC7Xgcvl+k8d\n",
+       "H/8Nl8v15pu/+5/+bd8TlVe9Xrf+jIuLC1Oq8diFkMC6wy4ObV0oFNRqtXR8fGzlOuPxWMPh0GA0\n",
+       "sF7eQElGF8N6FQoFix+4vb01ZwXfi1YmaGEKKsF7OfyxGDn9QwPTBzgcDo12R0PtxIwxDpCNDK0P\n",
+       "Y9jv920sYccmQJzMOyIJIH+mp+/6vmFF6WzhfQBhwYBLyLskk+fip+z3+/9eogb+Knfm/13Sf/Zr\n",
+       "H/t/JD2bTCafSvog6R9Kksvl2pP0X0na++Zz/onrW+3i/yLpv5tMJjuSdlwu169/TbvYMaamprS/\n",
+       "v2/BfZPJRI1GQ6enp7q4uNDh4aEJdw4ODuzNx692eHioTqdjweJnZ2fyer1qNBoKhULW5soC4xH7\n",
+       "6tUr25FwTPOzYIxFSE9kVqPR0MXFhT3COcA1m01dXFzo+PhYNzc35iAnu6NcLiufz5sBFX10t9tV\n",
+       "oVBQqVRSJpPR+fm5eR+pJgaCa7fbKhQKyuVy+tWvfqV6va5ut6vj42PL0CO4/e3btyqXy8pms/bU\n",
+       "Y3YGmoQwkWTjGK8hpBWkETcXeXdnZ2f6F//iXzxowf2VoRmTyeTPXS7X+q997E8c//kLSf/lN3/+\n",
+       "zyX9/mQyGUnKuFyuY0mfuVyuc0n+yWTyy2/+3f8h6b+Q9OO/6HuSk+F2u/W3//bf1u3trT755BPN\n",
+       "zMxY0TuObMYKbO6IiiTp5cuXSqfTBkdtbW3J7XZrfX1d2WxWKysrVgQECTM1NaUXL16YK3p2dlbr\n",
+       "6+tyuVz63ve+Z07qbDardDptemVK0t1uty4vL+X3+xWPx00HDGGztrZmdRDr6+sqFApKJpOmiSYs\n",
+       "MhKJmN4ZTUetVjNtNiTMYDCwSAG+z87Ojt68eaO9vT35fD49efLE6HRMrLu7u5aGtLy8bPkZ0PdA\n",
+       "gCSyrq6u2rgBMrO1taVSqWQpqM4O84ODg49ec/8xobm/K+n3v/lzStLPHX+Xk7QsafTNn7ny33z8\n",
+       "L7xKpZLi8biq1aqy2ayWl5dt10BRRwALLxyULimVKMygYqvVqrmbCVlBylgulw16IkR7NBpZeAyH\n",
+       "QEYdSRYUiPMEkiOVShl7SS6Fs5EJRIGiHKfAiTkVbUomkzE9BEKnWCxmjhc0Iq1Wy0rn+VmI/4WG\n",
+       "50xQq9UsTgsChQMfB1IUipgPoLqLxaKp96DrV1dXLYwccdhDYDnpP9IB0OVy/a6k68lk8s/+fX5d\n",
+       "ZmMyKmDv8PeBBIB4rK2tWRMqM7XT/+dyubS6umrWH1hEqnrpsuOkz1wsyYJVoJ8hO9xut9UzoMxz\n",
+       "u92q1WpaWVkxFABIb2lpycRBPNr9fr85OjhcohumgdUZASbdVRlz6MTU68y1SCaTFk0g3ek0wKoZ\n",
+       "gYrFou2yzN5g9JgIQqGQotGostmsHRIp+OEACyoTiUQMPRmNRtrY2HjQ+/8ffGd2uVz/raTfkfSf\n",
+       "OD6cl5R2/PeK7nbk/Dd/dn78L429+dWvfmVU72/8xm/cK14HnyWgpVqtyufzaWNjwwJi0D4sLy9r\n",
+       "b29PP/3pT03WiZLs5uZGa2tryuVy1lZ1eHho1iLGi263q4ODAz179kyxWMyYRKA3dA8o2UhGcrJ8\n",
+       "CNpjsdg92aXb7TYTKZgxBzev12s4Ljg6yAiQJLsz4iYSTImgJXv55cuXqtfrWlpaMhQim81am20w\n",
+       "GDTtNYJ7SZbFQUg7cQwgMn6/X6PRSB6PR7u7u3rz5o1mZmZUKpUetLb+gy7mbw5v/72k355MJs42\n",
+       "lv9b0j9zuVz/o+7GiB1Jv5xMJhOXy9V2uVyfSfqlpP9G0u/9ZV//t37rt6w+rVQqWTE7kkcyjQuF\n",
+       "ggUJQqWS7xAMBq3sxuVyqVgs2mGNKKpisWh0Nrsr6jBQhYuLCy0vL1uaJo9rbFGlUslkk91u10LS\n",
+       "mX3b7baJcDjE0sXNzgdWWyqVrJuEeje+LwHmCwsLdhAEDkNgDxsYiURMK720tKS3b98qlUopm83a\n",
+       "Tt3r9Ux8RSQYSI8ku+Elmd2Lf9vpdLS8vKzT01MTds3MzFjp5uzs7INm5r9KaO73JX0u6YnL5cq6\n",
+       "XK6/K+l/luST9Ccul+srl8v1TyRpMpm8l/R/Snov6Y8k/b3Jt+bEvyfpf5V0JOl4Mpn8hYc/SRbR\n",
+       "yu6Gg2IwGGg0Ghn9y04pyYoaoamRTALoo0nmoAhVzWjCIubR7XLdNU8lEgl7jDvz6njsY/ik4RWB\n",
+       "Ok4M0uWBtnCNkNEhydhDyBooZp4+y8vLJoOdn583+prdEuaQ187j8RgjCgHFrAuawgJFF47UFEaT\n",
+       "QzgYu/Stkdbj8RgsB1bOkwmDwYPW3GMytP7u7/6uUdIEaM/Ozlrw+Oeff67NzU1ls1nF43Elk0n9\n",
+       "wR/8gT777DObBYmJhREDCy0UCiZ+j8fjljdM2QzwGXM5Yw3wFG+oM2fO7Xbbjl0qlTSZTMwQSnYb\n",
+       "bBq/C7oTXNpQ0pKMweTgScJTqVS6V1fMmBGNRnV+fq5QKGQ4MomkHJ4jkYjy+byNDPl8XqlUSt1u\n",
+       "V8vLy2q32ybCkmTudzYEbgTC1RnH8P+dnZ1ZeVChUNA//af/9KMNrY+KAaSiIJlM6sc//rFSqZQO\n",
+       "Dg50cXGhP/mTP1G/39ebN2/05s0bjcdj/f7v/75Ze7DznJ6e6l/+y3+parWqn//852q1Wnr37p1V\n",
+       "O+CHk2SHsVAoZITK2tqaIQHhcFiBQMAe04jrA4GADg8P1e/3VSqVdHBwYKMAMbK1Ws28f+RwkMo5\n",
+       "NTWlX/ziF5Yf1+v19OWXXxrN/MUXX1hgeKlUUi6XMwqdw5b0bTffmzdvVK/XVSgUVKvVlM/n5XK5\n",
+       "dHx8rC+++MIIHjDvarVqss2bmxtdXFwYG0igOOVBbAbFYlHSnU3q8PBQs7Ozhq93u11lMhl99dVX\n",
+       "D3r/H9ViJgCl3+/ryZMnqtfrFsZHd3QoFNLu7q6ur6/15MkTo1LxqE1PT1vJeiKRMDwYaxNs1fT0\n",
+       "tEkaLy4u5PP5lE6nLY+OVM5CoWBZ0fQJokXG0r++vm7QVrfb1Wg0sogxRobp6Wmtr6/r8vJSXq9X\n",
+       "qVRKt7e31gOysrKig4MD3dzc6OXLl7YLLyws6MWLFzZz4wFEVVir1Sz9aX193Zg9SYYrEweAXgVF\n",
+       "H/G3CI56vZ7djLwfyGiXl5etgWp9fd0MrrCyZII85HpUi5kTNwA8eQ9OJwnQG91zSEFRlzGSSN86\n",
+       "k3FUIIqJRCKG1YJRS7L6taurK21tbanT6SgQCFiSfiAQMNE+nXjT09NGYaOqYwQBZnS57nrAsUnR\n",
+       "XUgMGJQ0Og6n3xBcHdMAX5sx6vnz5yYLZTcFLw8Ggxb2ArkB9Mesz8+NrgP1IU8t4monk4lCoZBS\n",
+       "qZS5W9goPB6PgsHggxfzo9Izx2IxE6RfXl4qFospk8koHA4rm80qFArp+PhY/X5fS0tLNsNho5Jk\n",
+       "GRE8sj0ej66vrxUMBlWpVNRsNhWLxTQej42Fe//+/b3uEyrXvF6vaXj5u6mpKZVKJRWLRQukubq6\n",
+       "UiKR0NnZmZW2I5EcDof3lHfFYlErKyva39/X8+fPTaiPl7BQKJgznfkdzTF6Ehq3pqfvmlLb7bZp\n",
+       "kU9PT+3gR+D4YDBQtVq1kWlhYcH8egiIIJdOT08N2qtUKmaOKJVK2tzcVLvdVqlUuhesjtPnu3gu\n",
+       "x4VMkkwLshw4WNFfMjs7q3Q6rePjY3uss0uTtUzyJocw8h1wMFOV0G63bUTx+XxaXFy02K5gMGiP\n",
+       "dnZ3AgnxDaKrAAGBoCGDAocG0bh+v99y6cjUIHOaoBrIDvTb/A5zc3OGcqTTadMo82QoFosKBoNG\n",
+       "vpCvwe5/dXVltWtQ5mg9oPE5CLOjEwFMljQ9jKA76KVvb2+/W8zOy1lsg4yTiFpiVSXZYxBLEIgE\n",
+       "lDMWfhan9K3kE3cy8zM1ZxRAokNGXgnNzfzqtDEBR8EsAmexWLFg8W+RdbK4xuOxQYXOMcVJjDBq\n",
+       "OGFLYDUYRIJvwHrRqGBqIJODjDk0JYw9kmyBsykgRWWkgIUFeYGlhfzhd3nQ+/+gz/5rdlGzi4YX\n",
+       "JRn4JzkOzJEQDGQIl8tlDYdDbW5umgwSWWOpVLKdfm5uTsFg0AISmc1RzzUaDWP0isWiOS7y+bwx\n",
+       "YdL9EBVy31hwpH/ys7EIcKrAoKFrmJmZ0eHhoQaDgXw+n0qlkmKxmBXgwM6RPQdjSLkn4whxvaje\n",
+       "pqamVCwWbd5GU4K2udPpGAGERoUAm1wuZ/S8dOcE4hBZq9V0fX2tXC5ndDkHx4+9HtViTiQS1uhE\n",
+       "ChHJQ3SXOE/XqLXw9EUiEWMGEb4TYsIuWavVTL/ATIuoCMEPTVIul8t2ymAwqLW1Nbu5YO/Ynckz\n",
+       "ZidmROEg6/V6tbq6KpfLpXA4bHUPOE3G47EVVUp3RgUcNjjHMbtCX8PgQaKggeYQOhgMjKxBGgAd\n",
+       "vbKyYj/Xy5cvbf4lnsHn85keptVqmeMbNR8JUqQ31Wo1PXny5EHv/6NazOPxWJlMxmYyv99vmXMQ\n",
+       "FdLd3Eu/39bWlonv0TVgNE0kEqpUKuae9nq9SiaTNkM70/HRcHi9Xi0tLVmGBiHk7HbOvAlQFhYA\n",
+       "kkhgNeeiDgaDpv0YjUbm1pBkeRVouREO8XOQWkrcb6fTMRJjMBjY7rmzs6PRaGQMIAdkn89nLhny\n",
+       "QQhSREXIARhWlKfNwsKC2bcQOZHMjxab1x5U6GOvR7WYnTnCHOzQ3rKLcfLv9XrK5/NWeSvJhDg0\n",
+       "kubzeXk8HiuShD3DuUFkLoufpiocFhyOOASCmCQSCUMhwuGwhZTDCCL3JFmeGbrT6SgSicjr9erw\n",
+       "8NBw65ubG6PMJRn+DNTm9/uVSqXMGUIeBv4/YmxZ8HNzc/b3mFoZ3ahCY25HVxKPxy1wB10L5wC/\n",
+       "369EIqGVlRWNRiNVq1VzuyNaImnqIdejWswcYG5ubizIkN2ERYFM1OW6q4pAi4B+gbqGUChkownz\n",
+       "KYsKurpYLBpFjQkVySmRXcy/GE6vrq50dnZmRAGsIgc5xpRoNKqNjQ01Gg37POZ+dkQSmtCMsBMy\n",
+       "OqyurtpOjisGBhOTKq8XcQC4P/AK4qTpdrva3t62UBhcMtxA0PugGsFg0MYKKopLpZLcbrcWFxeV\n",
+       "SCQMD+dmeSia8ahwZtRl7KrQqOFw2BqQMGmGw2G9efNG5XLZ6Oj/l703iW00T9P8HmoXJVIiRZEi\n",
+       "qV2KiIzIyD2nq9CDBvpiw30a32wffDB8m4MvBgzY1wF8NGD40BfDA8xlAJ8MH+zxwJgFVWigcmq6\n",
+       "KisyMiO0UiLFfZcoiRIp+qD8PfmpptszCLnHM0J9QCIztVL8/t///77P+yzlclkvXrxQoVDQ/Py8\n",
+       "Tk5OHDXMzgrvGC0hQwAGE+xmhOUsLi56pMvDABYtyRyMRCJhoevs7KzK5bLFotiGzc/P6+zsTLu7\n",
+       "u1ZqYK8AFHl5ealCoWD1Nr8rk8k41mFiYkKNRsPkn3q9rkQi4TiKpaUljY+Pq1wu2zdjYmLCu/3K\n",
+       "yoqHIggaaJTByA8PD7W7u2sz9nq97j6iVCoZvgueZD8p5T7selI7M40MkibpfrcG06UejMfjtodl\n",
+       "58Gyip2SHbbX6xnCgojebDat8UN1Qpe+vLysy8tLvXz58gGRqN/vO0AHGKxer5sQX6vVPCkEFSB+\n",
+       "jF0YZQc7ZxA5wMZAuh9tLyws2DGoVqs5UJ4jHfQA6ma73bbd2NnZmRGS29tbW3ZR14O40ADe3Nx4\n",
+       "sBNkGA4GA1UqFcN2xWLR7xvlBfkpiCQecz2pnZlGDFvaeDxuU5WJiQmPUIkbY6GAXqTTaRUKBcXj\n",
+       "cX388cfqdDo2+wuFQrZwDYfDnohtbGzo+PhYU1NT2t7e9i6Lxk6SHX6INeN3ElIDAQj+CJNCGkwY\n",
+       "asik+FzQqDyfzztWAjTi66+/tnp8fX3dMRHHx8cPPKdJoZqcnFSlUlE2e69MI+EWjd/FxYX5FLe3\n",
+       "t2YC3t7e6tmzZw80mBsbGy7XwPVBRxAToCGkgQXC+9DrSe3M/X5fb968US6Xc5PX7/d1cXHhiIdG\n",
+       "o2GC0OLiov3fIPpIUrPZ1NnZmW2oMPWmFKnVat6hjo+PH5DqoVZiOoNrz3A4VL1e9y7H8czYmQkY\n",
+       "sn/gtEql4mP67u5Op6enJtNTxxOoORgMrMKen59XtVr1xI3EKRpLbA6AIAn9xCByZmZGBwcH5kpT\n",
+       "nkCBheRPuRU0LG+1WpZN1Wo1XVxcPNjt7+7u1Gq1VC6X9fbtW01NTRm/f8z1pBbz3NycXr9+rUQi\n",
+       "YbiLXS9ot4XnGjcUqwEYbXhZYDoOUYadjWwTJmw4HOEOCiuNySDQHoMLnPsh1jN2Bm/e39+33wV1\n",
+       "ftAOdn5+3gaGQesxSa7fiUoD1UmlUh7t7+7u2jar0WhYOQIGDwrDBA9iP68fe7JoNOqUKyBNsGp6\n",
+       "F0hS4OIseiZ/6+vr9gZ5jMpEemKLmXqQWAU67aC8iAVOh12pVFzXQoqBa8DYlv+HrwBmDaoB7txu\n",
+       "t01wl6T9/X0LaFutljHmfD5vsnuv17M5DMqPWq1mBGVubs67NJ4bwYcLTHc0GrkUQVgbjUZ1c3Oj\n",
+       "Uqnkr6O+pTGliQyHw6pUKnb1pLaHrFQoFDw1pTcgonlpacmuR91u105KnI5TU1NqtVoWFDB04sHr\n",
+       "9/s21HnM9aQW883NjW8wzRq4MCSZVqvlWF285wiKZDqH29HExITOzs5cFw6HQ5sMgrt2u10nNyUS\n",
+       "CZPp8edArIofMsOScDis6elpe1cgdyLRFRchfOBw2URYy2KT5IcLx/3z83OfCBMTE36Y4WmQdwLm\n",
+       "CyZM47aysmJJFrszTS6LsdlsuicgcAivaWLp4vG4rRXga2ACA2RIk51IJLypfOj1pBYzDdH09LSn\n",
+       "Sevr616gcHCDurOlpSWHVUqyiR/TKaT4DEqwBQiaDaITjEajzjeBiMP3Az3hSA+7DxsBYEQmjfwu\n",
+       "ToDZ2VnH/7I7go+jwGZnY0weiUQcQsQED74Jjvd8nAd5ZmbGcjGGRJiIg8VzCsD5CIfD9rjGdiCb\n",
+       "zer29tbQJR554PoIgWmOsWt4zPWkFjNvEFAcWjRG10ydGN9ubW2Z9A6sRRY2qAE3IxQKuUHZ3Nw0\n",
+       "V4PuHa7wYDBwHYjzJfwJfjdcYW4uDD+8jSGyB0MimTRWq1VtbGzYqZTaG27J7OyszSLj8biWlpa0\n",
+       "trZmuRVOQwyN4Elks1nrB6GgJpNJw5ZgzzxkTAPRkF5fX3tH39jYUKVSMcGL6Sjj/mg0+iDtFhNI\n",
+       "NpQPvZ7UYkaKD/US2wHpnrhPt87ErNvtGvynDCHQkThidhoSXJH+M66+vb11WhLHOLxqTgQaPZQa\n",
+       "+EPwc/k6dsi7uzu7DDGGZ0GDzoBrY0pIOQP8lUgkzL3ANy8Uuo+BCIfDymazDxybODnILJTkJCoo\n",
+       "pPw39ma1Ws2KbiaUkKPw+RsOh1bAwANnSsgABTbhp59++qj7/6QWM+NoGsGpqSmPpKlzEWeCxXJE\n",
+       "gwPPzs56QbBQZ2ZmrEiW5N2TkgDfDRyEGPeygGOxmPPzcPUJRolJ8uKnbtze3nZZgNIllUppZ2dH\n",
+       "8XhcpVLJDRz8aV4XjkyYrkDqAUYLwnih0L1/dafTsUwqiGxQjlG2DIdD+1qjSgfSlOQhDKw5BAF8\n",
+       "LhwOGxEiK5yN5ne/+92j7v+TGpqcnZ05944dFz5DvV73mz47O6tisejmjbw74hAODw/daNVqNZ2d\n",
+       "nblMOTo60ubmpmU/uI12Oh17Qmxtbbn+vLq6UqFQsKEhlriMsefn59Vuty1+xeNufn5e+XzeHhnU\n",
+       "ruFw2P52YNfn5+f+WzY2NswZAfcmsbXX62l5eVmFwr1934sXLxx/NhqNVCwWlcvl/PP/xb/4F/r8\n",
+       "888djDk2NqbT01Pt7u76fUVsAFsuk8moUChob29Pr1+/VqlUcvNMuVGpVLS0tKRWq2Uvjkql4lLv\n",
+       "Q68ntTPjesmUj1xn4hlCoZDtYBlns2hZJM1m07sLAthoNOpuGx5H0FMDDgURYpgWSlImk7FLJ8aL\n",
+       "lBZoCXHdX1tbs5woeGPZwWAA0rg1Gg1TUIfDoT7//HPn/DGIACLD5w5jG7B3yD4sWMb+GCJeX1/b\n",
+       "CRSUItjIAUNOTEwom82aLCX9xPfG9iyZTLoUopYmGFSSIzg+9HpSi7nT6fhI46hl12U0vLq6alus\n",
+       "0Whk53r0bvF4XOl02kcsOX/tdtsU0VQqZbgMsxQaH/R03FBIPEF4LJVK2RkIBAQHJh4ylM5LS0sP\n",
+       "HJfgFt/d3Wl1ddUWV5ubmw67R8mSzWb17Nkzu/FDj6Vpm5+fN6V0a2vLpdba2ppDNWG9ZTKZBxKq\n",
+       "y8tLy82C0i5kXcCZ2WzWxjfD4dC+IlgvzM3NGS7c2dl51P1/UouZHRi1RDCmYWtry8SfmZkZd/xI\n",
+       "nhKJhBse0qFAK1is8AyQZ2WzWTO+2A0B/5m4wcfAgzkozwKvhTIqyTpDjv5YLKb19XWrmEOhkCFF\n",
+       "ZGGQn6hzg0qQer2uarWqfD7vk4VkJ0orHvylpSULA2j0+B4oovBGyGuhLuZv4H2EHw2zEKTk9vZW\n",
+       "a2vLh+SrAAAgAElEQVRr2t3dVSaT0Q8//ODhye7u7qPu/5NazGSKBGEggmnwDgYdGAwG2tnZ0ccf\n",
+       "f2yL1cFgYBLScDjUzs6Out2uO3OaIzDkYLkQj8cde4ZcaHp62l27JEN4c3NzFnkiXAUGhLWHooMH\n",
+       "hN+DopkgeNAEgtqx2mURLi4umodM00qtzYOP1o9TDVydciv4wMJIDD688KGDDeFwODTXGk4Isi5U\n",
+       "Kby3CwsLJjc95npSixmnH4jfkIdALUAGTk9PNTc3Z34DA5WlpSWPp+PxuI6Pj7WysqLBYOBsPoJ+\n",
+       "uOGkU0FYmp+ft+snNxeoimMVWIvas91um6jPTkdZgRQJVXg0GtXq6qrd8AeDgS158XLGwBtjxm63\n",
+       "q88++8zZfCxy+gRI80CN5+fnfj8ZQgXLk2azaT0lpwR+0uDTjPWBAektqKnHxsaUTCaVTqcVjUbV\n",
+       "6XTszvSh15NCM0qlkgWcLFqST0EvMLvGq+L4+Fizs7M2DCSjo1AoOHgGtQjoAKoIJml4Gg8GA52c\n",
+       "nGhtbc3jcgxOqLdhx01OTnqELN2rv+nyR6ORlpaWVCwWLYliSNHr9VQul/0AQIrq9/v63e9+p62t\n",
+       "LRP5QXWmp6dVLBY1Go10cnKidrttZcr+/r752/A+QFfq9bpisZgd9GOxmE1j2u22TRJ5nzF2rFar\n",
+       "psIWi0VrGclIYZFjPXx+fq5YLGbBwodeT2ox4/7OwmFUHYvFHoD6LFDQg0wmY10bKAU8DJrFmZkZ\n",
+       "Y629Xk9bW1vq9XqOGEM0S1g8N3dra0t7e3tOj1pdXdX4+LhyuZzr9nq9rlQq5ZtPOUPdT6YJ5VNw\n",
+       "ZA8mXCgU9POf/9wLEsUHHOFwOKxer+fBENPDjY0Ntdtt/33s9uDk09PT2t3dVb1e98fA7kFEMK/Z\n",
+       "2NhQt9t9IGplMokT093dnXq9nnnbQdEEviYfej2pMuP09FTpdFrS/U7HgAQmGvo6sM3r62vHpJ2d\n",
+       "nXmHBY9GCRJUaLdaLT80hULBi5mSAlPzfD5vrwy8L3DWJJ6C6SDkHRh5MPCGw6FrS2xxiU0gFIhB\n",
+       "xuLioo6Ojsw7QXUdFLnCyhsOh/bBmJmZMf2VRAEefuipvI/kw7DjM1y6uLiwsTmUUTBxYpn5evoG\n",
+       "8lfgOY9GI/3www+Puv9PajGn02n1+30tLCzYXYjun1qQpk2S3r17Z1ZXNpu1ZAiNGtkjNHGor/f2\n",
+       "9ozjFgoFN569Xk8bGxsWnM7NzSmRSKhUKrm2DdoPIDQ9Pj424yyRSGhlZcXlBB5w4+PjOj4+tuv+\n",
+       "5OSkI4klmWvd7/eVy+UUDod1enqq6+trFQoFL3rcQKempoy5o/CuVCqKxWIqlUpuCIHVIN3T9EGY\n",
+       "wlCSMbskswShgzJtpZZnNH51daVYLGYPvz9wMwIXuXszMzNmbSHIzGQyxm0hDT179sxfDwMuHA5r\n",
+       "Y2ND29vbD7JQsJeamZlROp3W9PS01tfXlc1mjUlnMhlzP2DT9Xo9ZbNZw4QsgKAjP4gLC50HaPPH\n",
+       "cPdkMukSCNNHyENYyoJhLywsaG1tTSsrK/roo4+cFgBHYnl5WcPh0G5MPGDz8/N69uyZyVh40lHO\n",
+       "8NqICoZRxwh8c3PTgfAQmIDwQJbgxsBKfPHiha6vr202/vz580fd/ydVM6O4gLcQDocdp5bP51Wt\n",
+       "VhWJRFQoFOxnfHd3p1qt5mDzqakpvX//3i75HJ+vXr2yFRUmJldXV9rZ2XEzWC6X9dVXX2l6elr5\n",
+       "fN5JTUQVQ0LCzX5nZ0f1et1DHRpKOMSorJF/oStsNBr6/vvvtbm5qdFo5L+B+ndvb88EfUlefCS7\n",
+       "0rRFIhG1223j5ijMT09Ptb29rd/+9rfa2NhwNIYkB7f3+32HBd3d3enk5MQTTbjVmEySG7O7u+vd\n",
+       "Hn4Mwt5QKKR//I//8aPu/5PamZEtzczMWG9HPToajbS8vGzjFaAiSgKaHVAD/Jw7nY42NzedVBqN\n",
+       "Rm3R1Wg0HAXBtAy3e0oTBgo0YxzPxK5RI1NXAsuBRyNiTSQS5j0z6AFqk2SjchQfnASw/hjBd7td\n",
+       "ux7hkYHkCpiNciXoOw0zDu4xzbB0P/YHzYEHTiP5+yruTqdjr2qgPBrotbW1v/rG/hteT2oxB7to\n",
+       "RshEliWTSU1N3aeswlqDN8HORQJTPB73EYn5+Pz8vLLZrAaDgady29vbthaAXE5TRhNZrVa1srLi\n",
+       "8oYHS5K5xOl02ho7poPLy8t+6BhybG9vG8aCB8zu+9FHH/nhxYMDpt3Lly9NLaWhq1arSqfTfm3x\n",
+       "eFzJZFK1Wk2bm5uOpWAKGo/HjRdfXl56ETI+X1hY8AMWj8d1eHhoZiDsPTz/cEzi9a+ururVq1f/\n",
+       "7gbB//9xMTomiJwJG7IgYsoYZBBAI8lDDmLLJNnJEky4VqvZ9RPaJDznoHdavV7X8vKyCUGlUknN\n",
+       "ZtO7M0MHShtomdhxBX2i2VHhMxD4EzSGQQVCvY1VV7PZVKlUUqlUcuMFrRQcGv53sVh0UsDl5aX1\n",
+       "hpQf4MWEADGYYsTOGJ8h1OaPUc6w8qamplymtVotS8wYZbfbbeVyuUfd/ye1mOFhBB3jkeGjWIbg\n",
+       "AlR1eHiocrls2Twj3qArEh+jocPBHsYYxztWrclk0uNvOnyidrGsKhQKJuYwSKE55GuJFD45ObGT\n",
+       "Phg52SmUCkCDGJoHd01YacGwScoT7G3n5uZ8KmBjxpDo5ubGVg1YFRweHvq9ZuCCypzQHiRVCFYp\n",
+       "+XgNTBoJpX+so9GTagAjkYh1gKurq5qYmNCzZ888+4doxDEMkWZzc9OqbI6/TCajq6srRaNRp7lK\n",
+       "8i4Inj05OWkuBAlTlBuhUMhk+WCGH6JVcv/W1tbMT4YmyTBhNBppZWXFWYAc41h9URf3ej0brc/P\n",
+       "z3vKiXvRaDSykz3EJz4H7RQi1cXFha3FEomEms2ma2yQDvwvglnilGCUbZg9kn0oyf3FcDjU1taW\n",
+       "Dg8PHyQJPOZ6UjtztVp14/b+/Xu7AdGoIU/K5XLOKGF4wi5B1h0jYngLiFhZjMPhULlczmUCudJj\n",
+       "Y2MPcF1cRWkmGWFjLzs2NqZisWiaKfg4CnNKEEl22Gw0GkYrKF1o0sbHx1WtVlWpVCy1gvfMqB6T\n",
+       "cr63Wq36yGfUjfUBwxskVahlKG/QJoLDM/bPZDI6Pz9XJBJ5YJxer9dtOFOtVj3iH41Gj+ZmPKnF\n",
+       "DHdhfHzc0WnAdRxtCwsL2tzcNJZbLpeNCSMyZayKFQBkdepRdj3MEGnKGCuPj4/b6w50hHp3ZmZG\n",
+       "1WrVmSWMwLFIgGO9urrq5isUCjloE750UIiLqePe3p4kmbMNfgsJKEhRBW2hfl9dXbXEiQeb0Tml\n",
+       "D2UDfiO4m4LEnJ+fm7ctyWqXu7s7ZbNZtdttP5jBhAOmin/YmQMX2rulpSUfjeyCxIMR7jg2NmZ9\n",
+       "HrgpolRYcBgTUnem02nt7u46KxD93YsXL8xBfvnypebn57W/v++dEqI9nOSvv/5ak5OT5iODc29t\n",
+       "bWl7e1uJROKBQz9eFvF4XMvLy+Zmw97DVD2TybhkYOHi67a2tub3Bg8+BiTZbNaKksXFRW1sbJjR\n",
+       "B/F+e3tb5XLZ9Xg0GlUmk3G5tru7q+3tbRtTQuQClbm4uNCXX35pduLd3Z02Nzf17NkzpdNpv4+P\n",
+       "uZ5Uzdzr9ZwrIt0LKNltiQdDwIldFFZTBGKORqMHnz89PfXnYrGYv2ZxcdGaQ1hx8/PzqlQqfhDY\n",
+       "bWiWKBkoXSYmJixobbfburi4MMuvVqtpYWHBQlP82iKRiBqNhtrtthEV/JPZ+fj7g1azNLmULzSf\n",
+       "Jycn7jUqlYqbvYWFBUfI9Xo91Wo1vXjxQnt7ey4lsDZgQCTJeDS5isCQg8FAR0dH5qCA7zPyTiQS\n",
+       "Ojs7e9T9f1I7M1O5Vqv1IP4LkjsEI5wtQS+YjKEeCdoUhMNhdTodO3bCYYZhtr297RgDcu+Wl5fV\n",
+       "aDS86Bit4ztxeXmpYrH4oKlMJBIP5FmZTMYNFrUtzDcsFWjQeO00c5gd4kcBDIhLKqXR0tKStra2\n",
+       "NBqNHONGGQPrjxE5DxT4fDwed63LAzYxMeFgedThwI+UfIy1EeyiqAkmu37o9aQWcyqVslJEuseJ\n",
+       "OdLga0QiEW1tbdlDglo3Go1qeXnZiwnzE9w2adhQkfD9+/v7Nlth+CHJQxuMBBnYwMGIRCLe+ajp\n",
+       "Ly8vtbKyYhMZ6V4QC3UV9IG/dTAYmKBDnRoKhfTxxx/btBHYEF4Fi35paUm9Xs9cDxCgbDZrqiee\n",
+       "ckB5jPJbrZYNcUBriIBg0IIqZmpqSolEwmUOpc/ExIRevXplstHU1NQfAnqCF2SY6+trnZycaGNj\n",
+       "w1RHQuDz+bx+9atfKRKJ6OjoSJVKxXgto+ZOp2OjwF6vZ7Th7OxM5+fnTpY6PT01uaZcLuvXv/61\n",
+       "g2mYKK6srJgAj+F3tVpVtVpVPB5XoVBQLpczsWd8fNxZKs1m0+mv5+fnRlNarZaD2MlL4b+Hw6H+\n",
+       "6T/9p6pUKtrb2zOzjqEPKVKUE4gYcrmc3r9/r9PTU719+9bUTeLPsCCA10JjCEJSq9VsME49TgLs\n",
+       "5eWl9vf3HYtRr9c1MTGho6MjIxynp6f6i7/4i0fd/ydVMxeLRfMFvvzyyweGhdhjzc/Pe1dbWVmx\n",
+       "HD5oIYWOD+ehtbU1T9uAm2ZmZtyA7e/va2xsTNvb23YvIv633W5rZ2fHHsbsWkB0kUjEYlD8L4C9\n",
+       "UqnUA487mjqOfIJwsBKjXEBpzTEP6gCygys/tfbt7a3S6bRV1xCYMpmMWYdM9IDfUL7THDP+pi/g\n",
+       "ayASJRIJRz8gbg2iHisrKzo4OHjU/X9SOzPlBESbSCTiGF5CYaampkzlZNHAv6A8WVpaeuDlhuIZ\n",
+       "NyJ2SqAkyodQKKSVlRX7zyHgpEYMBviAmMD1JXoNmI6dnQUZDJIPuoiCyEh6ENWAbq/X62lubs4c\n",
+       "ZUlW11ByQOOkrOB9BPHhtSCWHQ6Hjp+YnJy0IBdEhJBKavhMJuO/A0ejnZ0diwPwruPv+NDrSS1m\n",
+       "SDTlctnHWqvVUr1eV7FYdFb20dGRZmZm9Ktf/UqFQsHDFrr9vb09R6vhMs8xn8/n3WAeHBzo5uZG\n",
+       "79690/n5uWZnZ60kGQ6HPhkYGcNQq1QqajabDsPE/CSYKnt2dmaeB/yParVqZl65XFaxWHR4EFZd\n",
+       "lEXsjJeXl3r37p1Za3hroJwBJTk5OVG/3/dwaHx83AkCEPf5mzGt4QSCjzEYDFStVu0dx7j/9PRU\n",
+       "zWbTrymIS5PFMjEx8Wiz8SdVZgSPUnbf7e1traysqNvt6quvvrKKWpI+/fRTL+zJyUkjFp988oki\n",
+       "kYjdPqPRqJLJpPM+4DXE43ElEglDVevr60okEjY6XFtb09XVlTKZjKGofr+vdDrtgEdcRwlgh4b6\n",
+       "+vVrB72DB7PLMuAhFhnEgsEIxoaM7nO5nE1gcBJdXl52CZZOpxWJRDze5zThxMGylkEQu3gymXQ5\n",
+       "sra2ZjlUOp22NGxzc1OZTMaw38LCgkW8/D6SYl+/fv2oUuNJLWZqMCAmYgfg1WKzGgyvZEq1vb3t\n",
+       "FKlOp2PSC/U0imUWRzweVy6XswL77u5OBwcHdtpfXFzU6emptra2vAPhPoSw9OjoyGIA6I/T09Ma\n",
+       "DAYmr3e7Xa2srNh5dGZmxkpr4hPwocAeiwgHQiURtQbjkfleHiDc+iFojY2NPeBct9ttNZtNdTod\n",
+       "7e7uWk9IFB0lAzXy1dWVlpeX9e7dOydLra2t6eLiwppJyP2tVstw4GOuJ1VmEHrDmyX9xAqDlEOz\n",
+       "xK50dnZmzWCQdhkc69ZqNVWrVfstY/iC3xrYKbsqWSEMAlBy8JAFgxxnZmbs5IMrEaR+xLCNRkMX\n",
+       "FxembEajUev2GO5I0tu3b13OkCRF+VOr1Yxi4CWN6xLvDzzvq6srZ4zQH1Dbh0IhlctlHRwcWE1C\n",
+       "EA/2uYgjoJyiuTw8PLTDPi5R1O1BhuGHXk9qMScSCbXbbZ2enrqUgHIZCoWUz+ddThwdHWlpaUnb\n",
+       "29uSZBok7K1YLObBxPb2tpsvXC2DxzGG4gxSWKRHR0eSfrIGS6fTjheGU8xiQOmC+pvmFcYZIezE\n",
+       "TKysrBg3Jt96c3NT29vb9prDBxkpGeLTWq1mhqAkDz7gevOeMWzBsYlmOhaLKZ1Oa25uTs1m08gI\n",
+       "DzhBnDx8QHiM9Cm7wMmx9eJefOj1pMqMs7MzcwPAmXHOhDDP7rGysqLT01MVCgV9/fXXkuRy4eTk\n",
+       "xJL4XC7n8gV6aa1WU6fTsUyo3+8rn8/bAoCEK8jux8fHisfj+vbbb7W8vOzMEhqrVqtlSJEmjXRU\n",
+       "JmSLi4uqVqsmDWFSw2Jh52ZBwgbENXRlZcVjZowXh8OhST6dTse0VhYhDDrCgebn51UsFrWwsGBD\n",
+       "SU5AoDbscEFE0P/Nzs7aSSqYPx4Mvnzz5s2j7v+T2pnHx8d1enrqIw+yPMT2arXqBrFQKBjmoq7m\n",
+       "OJ2ZmTFDDgta1Crtdlurq6tu8ra2tlwfcpNAMAiumZycdNIrdFMol9S4FxcX9jumJOKhuL6+Vq1W\n",
+       "s1Po2NiYc0+IfWPhEA1HeYE4NhjGUywWTb8Ewkwmk5JkDSWDD3jNvG5OHdAIal8oACRhUeLUajVv\n",
+       "KCQSUKdzoQ5/rN/ck9qZGYlOTk7qd7/7na1S5+bmtL6+rkgk4vru5z//ufL5vEWhDFBqtZpubm60\n",
+       "urpqYnkul9Pu7q6dfoKZH9PT0/roo4+Uy+U81oVSSk4HCafdbtfiWQwPkVuBvEj3vGzG1gxd6vW6\n",
+       "Li4utL6+7uYTt6Bms6mjoyO9fPnSihpKrvn5eR0dHemLL77Q7Oysstms5f6YshDAA8OOkHqosuDG\n",
+       "lAu8X5FIxDrF9fV149iUPzwEm5ubhgevrq6USqUUj8c1PT2t4+Nj1+1/CLUMXEFXeWAkorzYeTEQ\n",
+       "pwEjE6/T6ajVaimdTiuRSKharbrDDh6N1WrV6aLgsKhTmKC1Wi1r9JgEgiiAQUO8v7m50dLSkmKx\n",
+       "mL755ht1u10f2dTDhUJB3W5XyWRS19fX5hJfX1/7n6mpKeegoA6pVCo+bcrlshqNhnMPcfAPRprl\n",
+       "83lJ8kQSJIPvw7iG4Qc4NtYJ4NN48dFI5vN5+9zxM66vr3V0dKSTkxPNzs7q/Pzcjv4fej2pxYwq\n",
+       "AvM/TAexucJzAl/km5sbpdNpFYtFnZ2d6eLiwg5D7BLX19denDc3N955GQtDRu90Ol5k8XjciaU4\n",
+       "AbGjsbvDU2C0e3l5qU8//dTsuJubG0cNo/5GNxiU+NNoBR30MYgk6+T169fmKjM2r1QqZrMFx+Qw\n",
+       "14AFUchQ58/M3CfbFgoFlxbY3SL0BZXAyByTcgQHeHUMBgM9f/5ct7e3xuwfc4Xgm/77foVCodHf\n",
+       "+3t/z4w2Im07nY7W1taUz+f17Nkz7e/vmzOAjo2FwGCE7n5/f1/Pnz9XsVhUKpVyd399fa2lpSXt\n",
+       "7e0pk8moVqtpbm5Oktx88QBFo1H1ej2tra2p3W4bY+52u1ayIErF544HcnFx0SIDHgoeJBznpfvG\n",
+       "NJ/PO2SSr+E0KBQKevnypVNS37x5Y0SC3ZgouEKhYGd/mk8eVrByONIYKBJRFwqFzHEmiUCSc1ku\n",
+       "Li7sWY3nM2FDWPv++Z//uUaj0QcpW59UzdxsNs2txbEnn89renpa5XJZp6enku538C+++MI3EtRh\n",
+       "b29P09PT+uGHH/TixYsHsp5Op2PEACNBvOAoYXASRawq3S+0q6srj4tZHCcnJ3r9+rVarZbev39v\n",
+       "ZyTstorFohlrlAIMMgaDgYUBDFmur++zCnmdSLVQdON0j4Ch1+tZmIAYAIrm1dWVPvvsM/3617/W\n",
+       "+vq6RQzo9DKZjMuQubk5S7c4+fgcihaGTvA6EDBcX197N7++vvb9+NDrSZUZcBgikYjS6bQDJhmW\n",
+       "hMNhJZNJbW9vm2iPJ7Ikk4pYMNjfBqX5mIBLcgQvquebmxuLNyEXUTfzM/CFQ/U9OTmpzR/Tq/A9\n",
+       "bjQaSqfTtjdgh2M3Xlxc9AiYWAYWONIvyO6w5xgUofpmV0fahWoaCi0nCLUxQxOQlrGxMfX7fSvR\n",
+       "+X++hixEBL+UF0S2QWBimvnY5k96Yot5eXlZ+Xxe7969Mxnm+PjYZPOjoyPjwxBmgnAZuCfwWC6X\n",
+       "s76Nm48UCKYZllNAVjQzl5eXJiIxgJFkcxZGx6hLUKvQvBL7hhcG3hLValVHR0e2zuK1MuhgVH17\n",
+       "e+uJH8T6q6srR5RxkuAN0u/3PXbnocHABaI/ZRaELjjXjPppbPv9vur1um192X3RD0qyAp4Thbr6\n",
+       "MdeTWsyMctlRsZDCYyKdTmt2dtalABRFJmEoQzgaaXoODw8Npy0uLmp2dtYEc2pN6kUWNNRImGiU\n",
+       "C5JMh2Q3oyaWZOI7BH1w2dnZWTtrMm0EP2dEfnt7q7OzM8ORq6urHu3zgFCjg8ODtExPT2t1ddUo\n",
+       "yu3trc1gjo+PdX197Y0BJ6NqtfrAJ4PhDQFIqE6YGuIlQvAosi6cmP5gaRu40MdFo1GTcsCRMekb\n",
+       "Hx/3LhWM+oIWiUwK6AtJE8ckrpt4wM3Pz/vmEizJlBAcFrYcuxcoADufJJcUUCQpB5A9MbEjUKde\n",
+       "rysSiTgDhTg3/KbhZzAkgliEzIlgeUm25EUniFFMLBZTs9nU4uKihsOhzXLYtZPJpAc4PPyE/QQ1\n",
+       "gEwiKWPYaCA0kQmIGOJDryfVAJLn0W63tbm5acIOY1aOYdw0gbFSqZTNADn+KU0YGsA2k6RcLqcv\n",
+       "v/xS1WpVx8fH2tnZsUUWRKGPPvpIY2NjWl5e1tnZmR2ScBhl4Y2NjdnqgJ8PbEcj+fnnn9sghUZx\n",
+       "ZWXFY2AeMMJ/GEyweOFbkAfOsEWSxQxB8xa4Hufn50qlUqZ8TkxM2MUTlUkkElGxWHwQkzExMaHV\n",
+       "1VXnAFJaMY1EUACDjjLvDxPAwIWTEDxgIsMYZNDYdDqdB5BaoVAwwwu6JdwNRrtwKc7Pz7W5uam9\n",
+       "vT0NBgNtbGzo5OTEnfnNzY0ymYx50nAQ2GF7vZ5WVlZUKpUciEksWi6Xe2BRi9r5m2++sSrmzZs3\n",
+       "SqVSqtVqev78uebn550shY0AJw9Z4UE8nbLj6OhIr1+/liQTs2ZmZvT+/Xul02lPJ7vdrhYWFnR8\n",
+       "fGxIMRwOq9FomGrK68Q4cXJy0hI2Itqq1aoNFxuNhvsGSGDRaFTff//9o+7/k1rMU1NTRie63a47\n",
+       "cnZCTEtACJ49e6Zvv/3WhirD4dAUxuFwqO3tbftiAItR30GW5/fRtUO+oWbGDYlBCUR8yFBbW1se\n",
+       "uLCTz8zM6M2bN8a7Z2ZmDIlBAqL+r1QqbhwpXZ49e2ZIjtAfVCjEMyB76na7NoFJpVLWFDIpZeEu\n",
+       "LS2p0Wg4I3E0Gnn8zRSQJhT5FSUPjLqtrS1zQlqtlnZ2dtRoNJwUOzY2pn/+z//5B9//J7WYufGT\n",
+       "k5N6/vy5ecVMvoDFrq6utLq6qkKhoK2tLZN2YIhFIhGtra3pu+++89FI84SDPC6ia2trtnulpLm+\n",
+       "vtb29rYdQ4kdg8iPwBSlBiHzg8HggahUuocbQS54GKampmw2uLu7q3a7rUqlYh8N6lM8N3AXQjgK\n",
+       "9LewsGDrXRQvWAHMzs7q+fPnTpcCjej1eq7vz8/PbReAWhvWYq/Xsx4RtIXdHWSEAQpNI/YKH3o9\n",
+       "qQYQpUK327UTESR1tIEXFxdmrUn3xHtgPCIO7u7uTKXkZ05NTVkPCG+Dpo+RM00X/ASOeeA3YCjk\n",
+       "9aAQ3W7XaABNHxM/eCWMw+GAnJ6emrWGsoUHNjhCl2QFdrVatbkLdFcULJxkWOiGQiFzoEulks7O\n",
+       "zlzbA1/ynqFDvL29NU4O1gxjj3KLJAGijnlN/X7/DzmAwQszcGRHGFtDjcT+lYgESC9YSSWTyQck\n",
+       "oEgk4nFyrVYzKoArKDv9wsKCer2eSqWSmyJUL8iyKHWC0BlxCjc3N/ZbZsrY7/dNOKIWZ3Gw2+E3\n",
+       "jXs+i7NSqbg8IAxIup9GFotFc1fAskFY+NuBznK5nJUmLFyYhLFYzH7VIBQrKyvGlGu1mpvMarWq\n",
+       "q6srMwiZdBYKBZ86DKcecz2pxby0tOSbvbm56eYH4g55fMPh0CoMOn8GFBcXF673grsY1gFBZUcQ\n",
+       "62WBTUxM6OOPP9Zvf/tbB6KDX/N9QVgNNhzHNgsJGwOGHvxdwGJ4Xuzs7CiXy3mEvPljzAW7J6cB\n",
+       "431MY7BBWF5ediZfIpFQq9XS8vKyHZhub2/NQwFjxyNvMBhobW1NjUbDmwjI0OzsrJLJpOttoDpS\n",
+       "u1KplJlysVhMqVRK1Wr1Uff/SS3m4XDoHemXv/yl/uRP/kSHh4cKh8PK5XIPvOX+5E/+RL/4xS9c\n",
+       "RuB42e/39c033+iP//iPdXh4qJ2dHVsGYF8gyZ7GOzs7zsFuNpv67LPPdHh46PF1IpHQ999//2Aw\n",
+       "Mjc3px9++MHZHgRWMnLv9/s6ODgwXAh/+pe//KV+9rOfaTgc6vT0VJlMxlq7/f19ff7556rX63rz\n",
+       "5o3TsW5ubnR4eKg/+7M/e3BSXVxc6ODgwEoXoLLBYKDDw0P96Z/+qfb39810m5ub08HBgT0/MDsk\n",
+       "gFO6JyuVSiWNRiO/zyTmFgoF7e7uOuuFxhGeytjY2KPRjCdVMxPIAy2TuhW8mRIDGRTQkCTzJthd\n",
+       "2YEl+SiGnwAJnZ/NRROEUaMknwT8TkodMGkGFpOTk8Zc+TpOkuDXUEPDOaYsYBIpyb0CvQCMPl4j\n",
+       "P3NiYsJupMCIvGa+hr+D30lCV3A0Lckfp2zi74UiwP3gNQZptLxG2Hgfej2pnZlmhPq02+1qc3NT\n",
+       "4+PjnlIFPTA+++wzlUolT7iIffjZz35mvJrygZEwqa4rKysPFN/cJMSm1L4cvUB6PCSYFRLZAFxW\n",
+       "q9UsosUtlAki5QbWYhzVi4uLuru7U7FY1PLysnZ3dz3pY6HiBUcDDBei2WwqmUzaHw8TSaIqss3d\n",
+       "TJUAACAASURBVNmsSqWS+Rk0fsjEbm9vTW4iuWt+fl7Hx8c+GaC6AnsCWcLC297e1mAw0N/+23/7\n",
+       "USE9T2pnJguEGpB/BxlZKEbgXaRSKVtLsYMxOcNsheYE9hej716v57Ev2Gu5XFa1WvXPZLeCWokr\n",
+       "EbRSdkGwbXR+lUrFVElG4UiugMqwsg1i471ezyIDcGdJdvPk9Go0GpqZmfF0j4xCILWJiQlnhgPv\n",
+       "URbNzs6aHCXJzlFwL0BSkE7ROGIHjM0wHHLQmsfuzE9qMW9ubhpPRYoEngnLjboYvJTFhdWs9NMR\n",
+       "D3KBfzK0ToYJ3KhWq2ULg7W1NSUSCS0sLGh1ddWNJCPnyclJLxKyQNDYseviXIRmES4IWDjZehD4\n",
+       "GctT266srDjCeDgcuinjdfF9nCpM4SAwscPShGKIKMk+diA4sVjMFr2QpsgEDH5tNpt1GQW+DsZ8\n",
+       "fn5uEexjrie1mKnJwJQxLCEXsNFoqNvtql6vm6eLNAhYCp0fOjU0cCipcfeRZAUzwZaFQsE8CEa7\n",
+       "DEsWFhYeHOM0SZDVQQYYXiCuZYGDBTM6hgeBMhsyvCQ7CpE9eHx87L+LiRyLB14E9FB2VngpPEwE\n",
+       "6pTLZW8I8EDAmOfn511KsXOXSiVj1jDsrq6uzImm1gZ3fsz1pBYzYk92tvPzczcwmKywQ3EzkExB\n",
+       "rL++vlY4HDbyMDU1pY2NDbO7cI9nYsfN5zhmOCHJgwjwaZh8DDtoVnHEr9VqqlQqD8zDsRoAamu1\n",
+       "WuZLBOvwYG4frDXstGZnZ83jhio6HA5NLuLvB6mhyQTaQ2XOyRU0d8HVFBECECjDKHgtmL4QLIp2\n",
+       "Ea8P+ofHXE9qMff7fTvlg0xQq7Go0Zo1Gg3vxKAGtVpN6XRazWbTbLrBYKBCoWDGHQoKjmL4B7Dx\n",
+       "SH1FdXxxcWGhKGYrdPhQUsGpg2GPwREv6mkMYDKZjAqFgur1uutd6aeUWWrZbDarRCJhES0fv76+\n",
+       "diIrGPfi4qJWVlYsb+JnsWvCJuRhgfYKagKygnaQoRGlCg8w2kumnLyvsAkfcz0pNAP+ctBG6tmz\n",
+       "Z/ZdBthPp9PKZrPK5/NuwMbGxrS2tqZQKKTd3V0HvlODQn/c2try+BWUAoroZ599pkajodnZWW1t\n",
+       "bSmdTuvu7s4nBMR4rAComVdXVx+4gDKEwAMPn4vhcKjl5WVzGwifZ+FR6ycSCfth0BRChGfs/erV\n",
+       "K/OdIeHzHiCFSiQSDsREFfPy5UsPWCQZA0eIgA7yiy++8APICYb+kJOHzST4UD/melKLuVwuu4Q4\n",
+       "PDz0tC1IXWSRwg9GkNrr9cwVYBwcNC2EI10oFBSPx3Vzc+PpFVev11Ov13OqEg3Nt99+6yHGYDCw\n",
+       "TW6pVNLc3JyKxaLK5bJ39YmJCf32t791LR6NRnV0dKTRaKTt7W0dHx8bScEuC1uxZDKpcrmsV69e\n",
+       "OWpibm7OYlF203q9rmg0augPRIQSAXd+0Bni28rlsr3rfv7znzuCAswedQlWYiRrYSr+7t07m5jP\n",
+       "zc25ESeh9jHXkyozCHthaEHjxxHJoiTMnJ0AnjKex5CIer2e7VhXVlY0NjZmXJjdPJPJaGxszHBa\n",
+       "uVz2Tg35aHt72zIpeBc0oSQwsSPCcFtZWfGQplQqaWpqynTRra0t/6yVlRWP1RHYJhIJG0PCc8bJ\n",
+       "iAVLkhVjf5pS+BIMXhKJhHHzer2uTqejZDKpVCqlTqdj4exoNDJ1lLII56Lr6/uMmdvb2wcmlCi5\n",
+       "Jdnc/DHXk1rMZIXgI4E6mhqOWvHs7Ez9fl/7+/tuhlKplDKZjPb393V2dqbb21s3UCxEUI2DgwM3\n",
+       "Ze12+4Fa+auvvvLuNDMzo6WlJRttw3oLh8OuiwuFgs7OzrxL7+zs2NaqXC6bfipJ33zzjeLxuPL5\n",
+       "vNrttur1usuYt2/fGjl5+/atwuGwms2mSqWS3r9/7wWP2eL8/Lw6nY6KxaLTAEBE9vf3FQqFVKvV\n",
+       "PMKfmppyhjgCVkoZGjvek4uLC2WzWe/aWOGCboCy8HO73a5KpZK1mB96PakyAwz16upKf/qnf6rL\n",
+       "y0ulUikvKEmWD93c3Oirr77Sd9995yYJZ3e0bIlEQrOzs5ZgUZdKsgCgXq8rnU4bZoIRJslG4QsL\n",
+       "C57e4bu8trbmRSvJqEe1WtVgMFAkEnH4ejKZVLfb1SeffOJmFZtcBAG7u7sevrx+/VrxeFynp6da\n",
+       "WFjQ+vq61Si8P3jLDQYD7e7uuh4PyrcWFxe9uw+HQ+3s7Oj9+/cWEcAbCdoWsNPCf0aqBa58c3Nj\n",
+       "XSaDHAZUq6urev/+/Qff/ye1M8NTGBsb8zCk1+uZX1utVi2NHx8f18HBgfm7qEbgJNCUgJHOzc15\n",
+       "8bGrTU9Pa2trS6enpw982ihJWGzwgKFsLi8vW0YEP4MygCENKhAMYeAVE5/G7giaIN0/zEFTFngg\n",
+       "2AXQIMNeGwwGLnn4GfQTePSBNqBU4YFh8IT9Ge8Xihegu2DZQSmCvUO1WjV3nGi5x1xPamdmV2FR\n",
+       "TExMKJVKWbIkyXKf8/NzbW9v6+joSJubm27M2u22lpeXlUwmdX5+/iDgcXx8XDs7O47MhWQONbLT\n",
+       "6XgxAkOBw/K9OCQlk0mdnJyY8wCbLBqNGgpcXl42cgIKA4WS188ImITYsbExh7pDxWSHB+kpl8uK\n",
+       "xWI2mlxaWvL7FovF3DMQRElID4oUhklzc3NKp9MPLMWSyaTla/Pz8yqXy0omk/4bgkQjmmAQjp2d\n",
+       "Hb19+/aD7/+TWszr6+s2AEdbhs1Wp9PR+vq6fSwghf/RH/2Rrq6uTAGlyYN4c3t7q9XVVUmykHQ4\n",
+       "vA+rpIljBIxfRigU0tbWlkfM7OD1el0zMzNuisbGxmwaAy+E45dkKnZy6JLscs+fPzcLMBwOq9Vq\n",
+       "edgDG44mstPpKJvNeleH2Tc9PW2no2g0ajx+eXnZej5orBMTE47MYJCCDnJjY8NoBM33p59+qna7\n",
+       "bUiQkCRIX4y2d3Z2nFSF2fmHXk9qMe/t7SmZTKrT6ditvVwua3NzU41Gw8gGYs18Pq9KpaJPPvnE\n",
+       "YTmpVErv3r3zsGRhYUEnJydWRoCYHB0dqdvtamdnR4PBwEc1ZCHKEYYIKLgJaz87O9P6+ro935aX\n",
+       "l7W/v2873aWlJUuKqIuj0aix4W+//VaffPLJgwFFtVrV+vq6qtWqyy3ITPQEcCkgy0tytASO+NI9\n",
+       "flwsFk0uqlQqSqfTZtCVy2Xb8VIGQXY6Pz9XPp/Xy5cvjR5VKhXFYjF/PaaNCHwJOHrM9aRqZlwz\n",
+       "yZqmucI1kxuG5Oju7s64KbwJsGEaMr6HsWu1WrVkX9KDOAigMnjAMO6Y/mF0CFQInCdJlUrFtTW1\n",
+       "KGE5xBnX63VPJ4HmsAfj9+C0dHNzo/fv3xsWAyZklAwBCoUNY26wa9AP0BqGGzwYDD3gfmNRG5zC\n",
+       "np2deROhNKNODoYfQStluPWh15PamWma0Oxx5PZ6PQ2HQ+3u7jrlCPohShI4wYzC5+bm9Pr1a+c/\n",
+       "X1xc2LwEXBXQH6sCBJ3JZNK8ZGRTkmwdRm0+GAy0ubmpw8NDB8PH43F9+umnOj4+VjgcVjwet1VX\n",
+       "IpHwgltaWtLk5KS94lA9Y1ozGo30/Plz/+6XL18qn89rcXFRxWLR2SaSzGhjyMGUkJNqf3/fFr9L\n",
+       "S0vmVmNxMDU1ZdMbFnkwi5HIYpo8xK7r6+uKRqOejj7WPPFJLebhcKj19XUnhMZiMb17907xeNxB\n",
+       "8MBBn3/+ub755hu1221LmrDqajabFoaura050AfXeUkPXO/L5bIZdWSaMEZmbD47O6tGo6FoNOq8\n",
+       "wRcvXiifz6vb7brGrdVq5ikQBAQH+d27d1pdXXWAD0c7zRm77cHBgd2Q4F3ncjljvoQPYca4v7+v\n",
+       "ZDJpQcFwOFQymTSunEqllMvllEwmdXZ2po2NDXuJUMc3Gg2trKzY1Aby/c3NjfL5vG5ubjQ/P6/p\n",
+       "6WlPZbH5Jcrtn/yTf/Ko+/+kFjPHKME7kHx48pnMSfdkm/X1dcViMSUSCVvh4i5E5ggKFUmehN3d\n",
+       "3YdTorwIBspTkrDjIOiEIRcKhZRKpWwpgEgWfzt2V1AXdnyaTsonfgdiA9h1k5OTymazrtV5Hfw9\n",
+       "NJ2E0jNan5yc1NLSklO5+v2+zSXxGWm1WrbbmpmZ0cLCgmZnZ209FswfRxaFjTBja5rCm5sbbWxs\n",
+       "mE4bDof1R3/0Rzo8PPzg+/+kauZareak08vLS/v/BhNGGWsDodEUnp6e2t8Ygg6RwOz0yP1XVlY0\n",
+       "GAx0dHTkBXVxcaFer6ff/va3pmqCdYO9SnJWNmJV8FhQB6A7WH/dbtcTOlKyWDBTU1OmgoLYML4u\n",
+       "Fos+RYrFovr9vhtKFh4+yWj2iMCAg4z7KKJYkqoYAOFzwZh7enra9l9MTmdmZpTP55XP53VxcaFo\n",
+       "NKpQKKSFhQVVq1U77lNjP+Z6UjszMBcYLWhCkCLJsTwcDk0ikqRkMunSgXowOLkKh8PGrgnkgfZI\n",
+       "3Qw0NjMzY9srhgxwhokpY0wNkw05Fq8VeuT5+bkymYx95HhNpJsCpwGJ0bjBk2DoAZF/cnLSLDaY\n",
+       "bDRfg8HA9FYmpf1+385H1PBwnzn1hsOhVldXXT8H45zJ+qaJRM7W6XS0sLDgARHUg8dcT2oxMxgB\n",
+       "DYjFYl6AwGJB82sUx2NjYybm09W3221dXFzo5uZGzWZTsVjMC5LMFBQi0WjUE75qtWouRblctkki\n",
+       "Nx4zGcxQ5ubm1Gg03N0TY0FAJnkn8D2CtrfJZNIjcEI1seeCSwFngqhjmra7u/ugeqC/k5MTxWIx\n",
+       "uxoBH2KE02w27auBMhxord/v6/vvv9fU1JTW19e9y9K/cHW7XSUSCZVKJT179kyFQkHtdlupVEr5\n",
+       "fP4PfObg9fz5cwejVyoV46EQfK6vr92QwZ8guHJ6evpBohJJTezITMYQb0K4X1hYMOWx3+9rfX1d\n",
+       "6+vrqtVqNjjM5/Pa2Ngw+T8ajTosh92KAQfjZ7wp2O05iuFNAAHi7zYxMaEXL14YZqP04HUzzgYJ\n",
+       "odxgZM2ODucavjTcC3oN8gopDzBgZ2QeNG9kEETjmE6nbfWAuQwIBp7Rj7meVM2MnRVOlNVq1VTQ\n",
+       "wWDgRoQdm/IAJAADbG5yOBxWtVq1+6YkT7mkn0y6gbrW1tb8OuAqsCCx/EKhjA4vaKrSbrdtOdBq\n",
+       "tRSNRt04IrcC3wYBYHSfSCT8eRpgHJLA3SmxGCkDRQK1zc/P23ZWkn3sGFNHIhEtLS2ZDYfcS5JP\n",
+       "MzBuHPgZvQffA6IieC/RbiJV+9DrSS1mRsvdblej0cixvxBhisWiGx2mdTRftVpN7XbbuwtYNf5w\n",
+       "HK1k8rEAMW+ZmJiwIJRygkYJmI4bDe+Z+AhuJrtas9n0IoTlhoxLkn833Ohut+uROo0lYZ1B7zw4\n",
+       "29fX1zo7O3N9z0IcDAaG0VjEDHigZ/J1NKwMi1CeX11dSZJOTk40GAx8QrHokYxx8b1BA5wPvZ7U\n",
+       "Ypbk+T+NEvxhTE/Gx8edecIYlYkYuw6UUPBnxsiE09AE4vMMNMZwAPJR0IKK6VnQ6BB8eHp62kw4\n",
+       "FC9EFv++m1FwUWAQDnSHxAqbL6DAlZUVn0ixWMw0Vb53YWFBkUjEAl+IVXA8Li4uTCHl75+ZmXEu\n",
+       "IaUWJRpe2JLMt1heXjYhifuApzUPDxDoh15PajEXi0XNz88beyVC9+bmxpOnZrNp2iJvbqVSUalU\n",
+       "8lDk+PjYGSBATCizsQqg5gRzJX8Pdhg72Gg0cug7dE2kUexwSP6xCJuYmPDroVShgQUGC0JaDC6w\n",
+       "PiBQiMaNRq1YLJqKKcmMPLgduNmjHOdEoAw5Pz/XcDjUycmJnaCwNMD1v1arOTKZRns4HNoscjgc\n",
+       "6vj42KN3VC0IeR9zPanFjDMn/scYm0DBhOQDFAXEFg6HTbfkDaV+u7q6MtRGWZLP523kDYbMjgTv\n",
+       "AH8LGhyw6JmZGXt5SLJ3NBM/iDdgyVjISvJgg9MEvgi8kmQyKeknewIWN+UColFcSXmdQGIMlvh6\n",
+       "yrVgk3Z5ealsNutmmdMPR1U2EZAWGsugmSLpBfPz89ZNBgdaH3o9qcUsydEJOPVQJyYSCaXTaR+9\n",
+       "UA5xrWSHJS4X/dvW1pZub2+VyWSUSqV0cXGhubk5R6XR2EBCD4fDymazxlgZaScSCSMDGxsbhgQR\n",
+       "iobDYXOMKYHC4bBrUmpxQnOC2XxBiJHIBSaE8Iuz2aydh3hQ+H5UNb9vQbaxsWGUAe9lJpCEDSFC\n",
+       "SKVSnrQuLy87JwYkCauuRCKhZ8+eeTq7vLyslZUV29o+5npSixn3HjI2UE/f3t7aY2J8fNw1ZDab\n",
+       "1fLyshEQdjSgLmRAGJKz4FFnQN8E3stkMkZOtra2bGnFbiTpASzFKJcj/vT0VBMTE/r88899CjD4\n",
+       "YAeUpP39fdfM6BJTqZRPllQqpcXFRWWzWde3pKzSjAUHR1htSTKPBdMadHsw+KR76RnuSqFQSPV6\n",
+       "3XwMGsyNjQ3L2DDWubu7c9nGrs19QbD7mOtJLealpSVls1k3TigxIpGIFhcXzasg6ou6k8EB2C9N\n",
+       "GPUoUBc3a2lpyXHAmUzGlmA4zc/Oziqfz5s1hg5OknFkEq2CEcc///nPNTk5acYcjSl1LmR8Ysmw\n",
+       "BqMMWVhYUDKZ9AgfP+p4PO5dnXwV7LeWl5d9zE9OTjodKngyrK+ve3LJTowFLc0u6nAabewW4Lb0\n",
+       "+33F43ETmiDzI1RgY3jM9aQWc7fbdboUuxqRaYg4qQelexyVN5Vjs91uG5/lZ5RKJcNgklwXApsB\n",
+       "lcGBZurINA20AfQAUhK7PrkfQQ87anfySBDLSnIDx+++uLiwmXq1WtXk5KR99HgAwXSpS9nx2aVr\n",
+       "tZqzDPl9TBDZpdl1ee3YLYBvUxJxusEODLoYcVphJMnAptfrPVoD+De2mEOh0P8SCoUqoVDozV/x\n",
+       "uf86FArdhUKheOBj/20oFNoPhULvQqHQfxj4+FehUOjNj5/7H//ffieNBQsnqDSGID81NeW6kURW\n",
+       "Fji1M14Y2BOk02nnSkejUU+qgNXgKkxN3ccEdzod17IsRsxXQAuCukScjTj2oXKyq7F7swMD11EG\n",
+       "xeNxbW5u2oqM5FOI8wxwpJ9EB0BjaP+oa5laXlxceKwd5CPDwAvKr9AZBi27WKiRSESlUslGkvxM\n",
+       "EgIQGoTD4UfHDf9NjrP/vqT/SdI/CH4wFAqtSfoPJJ0EPvZK0n8i6ZWkrKT/OxQKPRvdb0F/Lum/\n",
+       "HI1G34RCof8jFAr9R6PR6B/9Vb8QDV7QOBu7gBcvXphr0G63NT8/7x3tiy++cPN2c3OjnZ0d28xi\n",
+       "qEh0cDQa1cHBgRYXF02+j8ViVoQgV0qn03bDBOcGQgvWjMBzHNl8H00cjWOz2fTpcnd3p9XVVS8+\n",
+       "Ps4i2tjYMMwXiURUKBSsR3z9+rUKhYK/ptPpaHt72+VIcHG+fPnSr4WSi40ATJpMlGKxaC0h9ryE\n",
+       "AH355ZfK5XJqt9uampqyv93V1ZWOjo58316/fq1/9I/+ylv7b3T9je3Mo9HoF5L+qiLof5D03/ze\n",
+       "x/6OpH84Go1uR6NRTtKBpJ+FQqG0pMhoNPrmx6/7B5L+47/udw6HQ+VyOZXLZR0cHOj29lb5fN5y\n",
+       "feLKRqORDbHJ7RsOfwoxf/funRNIcUgKh8O6urrSr3/9a83NzbmB4ohmaJHP5508hV3YaDSyLlGS\n",
+       "DR3JF2GhBm1mwWibzaZ++OEH7e3tSZJlU7gJNZtNXV9fu5GamJhQuVx2XX5+fu6H5eLiQnt7eyqV\n",
+       "Smq32y4rIPpXq1Uv/MFgoLdv36rX67lMgYRPU8dJVCqVTGTqdrvK5XI6OjryRPDw8FC5XM4Pf7lc\n",
+       "VrFYfGAGA0X2Mde/1Zo5FAr9HUmF0Wj0u9/7VEZSIfD/Bd3v0L//8bMfP/5XXqVSyVAU0n+ok8H6\n",
+       "sV6v26EIsWWxWFShUHB9e3d3p0KhYCyXEoZjl10F2AwtHnUrg5ler+cByu3trT0tpJ8yUIDUKpWK\n",
+       "arWaarXaA6ok43cmZAsLC5ZyYUqOLxwPQi6XcyQGHA5OLBY3fG2wbgYesAslGV7E3xrfZTYENIFM\n",
+       "DXkogfiQnQXH1QyCIIVNTU2p1WrpF7/4xSNW179F1lwoFApL+u90X2L4w/9f/o6TkxOLTS8vL/XF\n",
+       "F1+oUCiYS8uxSk2LLAqy/cTEhLrdriqVij766CPr8mB3YQZIGiqWBORBr6yseIwNUhAOh3V0dKSP\n",
+       "PvpInU5H8XjcNE2aQ47jzc1NIyPFYtFmhisrKxqNRmo0GlpfX9f5+bmNGdfW1kxs2tnZUSgU0ps3\n",
+       "b/TJJ59YUFupVCzpB16Ea8KOfXt7q1Qq5QFJcLpH/U4EBTs6WSZMIlnkNzc3evnypUsiEgAYXF1e\n",
+       "Xmp9fd2nF8Oif5+i03YkbUr69kfW2aqkfxkKhX6m+x13LfC1q7rfkc9+/O/gx/9aOcLXX39tn4Zv\n",
+       "v/1W4+Pj2tzcNOiP2HVhYUGj0cj+EEwAQQugh8IUC7LNCNOZmZnRzs6O5UeM0VF3gDlHo1GHBEEm\n",
+       "4jWEQiH7VoB+BJEPxubgzOzONH00uWNjY5ZupdNpPXv2zBZk8/Pzpp22223TTLGoHQ6Hzs7e2tqy\n",
+       "zx7+e5jO0G8w8YxGo5aNIb8C0UBBQ5wwDlIEGRFngfnM5uamUqmUvvvuOxUKhb/u9v5rr39rZcZo\n",
+       "NHozGo1So9FoazQabel+sX45Go0qkv53Sf9pKBSaCoVCW5KeSfpmNBqVJXVDodDPQvdPwH8u6X/7\n",
+       "635HIpHw7gB+2el0zIPApw36Ibatl5eXOjs7cwIqgwPiDZDvDwYDW2S1220Vi0VDf6g2KEew+uL4\n",
+       "ZhcEJSFzj9o3yJlgigksRhlDLSvJO+twOLRhIT8b2RcRx/A0JJmOiWK90+mYlonMC3Sm0+lYiAAa\n",
+       "wuvp9/sPXicJs+zE9XrdBH4ecMS1/D1AkFAEHkvO/5uE5v6hpL+Q9DwUCuVDodB/8Xtf4hC50Wj0\n",
+       "vaT/VdL3kv5PSX93xJ2V/q6k/1nSvqSDvw7JkO4x2U6n452TOg0ICUNsOn3cheAzLy0tudve2Ngw\n",
+       "t5jakjICRhgGhrVaTaFQSBsbG/apGxsbs4sogZVEU2CVNTMz49gyVCRg2RCSFhcXXVvPzc2pXC5r\n",
+       "bm7Oo29chxDIIse6u7vT+vq6lpaW3Ggy5MG6YHZ29oHUHx9o6urd3V2Fw2E/9L1ez0JZYMdIJGLW\n",
+       "HFPB6elpTyQZ0HB/KJtAj1jo6+vr/+4u5tFo9J+NRqPMaDSaHo1Ga6PR6O//3ue3R6NRM/D///1o\n",
+       "NNodjUYfjUaj/yvw8X85Go0++fFz/9W/7vcy7aPJw3kSHBkFNvROgtoZC2OUiFKFBuXH12LnzGQy\n",
+       "aS85vOqOj4+NrYJhg5JIMpdhd3fXgTnwptEerq+vP/A5hq03HA7NL1lcXFStVrMChOkadWkoFLKB\n",
+       "zcTEhJaWlhSPx21YmM1mjXRQMoDJ47OHjRnDmo2NDQttqY8XFhYc0TY3N2cjSJh1GDvyur744gsN\n",
+       "h0Mz6vACWVpa0mAw8CbxodeTmgDyJrdaLZcJDCbYAQaDgXcy6I2gFyggJFmEyWIJQlIc6be3t9by\n",
+       "BZtKTgI0g4ykg1M4/psmKcjvZYeldoXAj8o6GKADbs1ro6ZlN2QYA92TcgPcGJ0jPslM/ySZ8A91\n",
+       "Nfha+NuCHs1AlVBW8cVjMcO3ZvJKYw4777H+zE9qMXOcjo2N+fhuNBpOOEIPSCpTv993DUzQI6yw\n",
+       "4JuM++X4+LhyuZwXnST7SnCT2u22xbI0R9L9QwT1k0kdR3E8HtfCwoLq9bpyuZxlR5i1MPLGShcF\n",
+       "Cjg5zD3pHva6uLgwFxrFNX8zP4NTAlgPn5FgjY8/CC5IsVjsgXedJItz4ZJwovD6UWnjZc3JAVkJ\n",
+       "24fZ2Vnt7+8/6v4/qcWcyWTMzKJ5i8fjDyiIxO72+31lMhm9fv3a+O/d3Z13jYuLC/ODOeqpoQm1\n",
+       "OT4+tniUr8O1Z35+3gaG4LxAfCwIGHf8O2jIkkgk1O12NT097XEwnAiYcMHpHLs8fQL+FbxueMu8\n",
+       "Lzc3N5ZZYUnGg8fPZwyPsSInBuUDzk8TExNOv0KmBQQJVwPVSpCeWq1Wjah0Oh39rb/1tx51/5/U\n",
+       "YuaG05RgHwUtkl2g3+8rnU7r7OxMP/zwgxYXF7WxsWFBKsy0YrEoSR6UAGthBfD5558/cM7ENWh6\n",
+       "elqxWMyeciSSMkkjICcSiTxwQULaREQbY/CPP/7YjRUnhSQvYEhOEH2Wl5e1ublp/d7l5aVhR7jT\n",
+       "6+vrFjMwEmcxNxoNQ3NY+galWJJsxEhqAGbsExMTWl1dfZALiJKdEmdtbU2RSES7u7vK/ZiVPRwO\n",
+       "HZr0odeTWszAR7FYzGB/uVx2PRzESTlqgZ5qtZp5y5QcwHHwEBhtB1XE1Mq9Xs8Zf9PT0yqXy1pe\n",
+       "XnYksfRTTt/a2pptBqLRqHq9npsvxty4Il1cXOjs7MxDDOIXUKcw1SR2AQITTv7Sva80dNRGo6Fw\n",
+       "OKy9vT07cI5GI6MVg8HAtge5XM5DFTjPcKrj8bgymYwuLi5sezA/P6/Ly0sdHByo2WxaFAH5C1Zi\n",
+       "Pp/X4eGhms2mNjc3fe/+EDccuIgmQwvIsSbJujyGKsHjcmpqylRKmicI44D8aAJptKA+gmuDjsCu\n",
+       "I+gdmy2mcbe3t9bFUXuS9NTr9fwzQS84/hk8oCeUfqKihkIhe1lQswLFSfKE7vr62jpImHsMbyDS\n",
+       "83dfXFxYdSPJfs68PuKcb29vlcvlXLfPzs56YHN1daV2u23eeCQSsdE4pybY+fT0tCmwH3o9qcUM\n",
+       "ZkpOhiRjunTqcHQleRFDsqnX61pZWZH0kysSNW2QC42TPQ0nnXmj0VCj0bAKm8WP7o4mCQ4Ei4HT\n",
+       "gAuqJeULfOrp6Wn/TgYaSJ8kWQQQ9KeAnw0XmeYMbgT4ND7NBLNPTEzYh4/yAoEC5Q9kEreghAAA\n",
+       "IABJREFULGx3a7WaWq2WERR8+G5ublSv180lAeunrEAE/FgK6JNazDDE6NSpn4Gv2A3Y2YC4Zmdn\n",
+       "LcM/Ojqy4gH+AzsS0QiwzEKhkA4PD90sgeeCwUajUU/U+H24FwFpQTEFSYDYgyEMNScoDOiJdF/i\n",
+       "NBoNM/KIQkNYCx8FoSnDkKB9Ag95oVDQcDi0gpoNod/vm4V3fHxstTmnHqcLfA9OuKDHH9knV1dX\n",
+       "nqbSdNJk0qQ+5npSixkQ//z83JAboTngnBi7EB3GwgU6glR+fn6um5sbzc3NaWxszA0lgP/t7e0D\n",
+       "gj1DCth31L6SvNuBcZdKpQeUR/BeFNoMNihFwHERml5dXRktQCyAcxL+09FoVOVyWcfHx+r3+/rL\n",
+       "v/xLj/er1aoXDrg6FmP4d1CWoBKhxwjaijWbTT80wZE3KhXQo7OzM3sxByHLqan7xFbpJ5X6Y64n\n",
+       "5TUHwB+LxfTJJ594ogRBiLqNbp1uHMdPjuhkMqn19XW/4cB1oVBImUzG4TadTsfYK538xMSE4vG4\n",
+       "ms2mlSCUPxMTE6rX60ZFUGfg0dzpdJROp122vHr1SrVazcd70PKqVqvp/Pxc8Xhc3W7X30eK02g0\n",
+       "0u7urvr9vi0G5ubmbNuF2//S0pIf0o8//lj5fN4j8mw26/cV4v329rZhTkoMxMFkaiNCQNhL6Dul\n",
+       "0c7OjqT7pu+jjz5yTMcfEloDFyR3doPz83NzjqmJGW7QnOFOz3EKTgrXmQEFJPVyuexjkp2LocDN\n",
+       "zX2edrVaVa1WU7PZNG8Yf2Qw3LGxMd/kYrFoDLfZbDpXhJjh8/NzG7Tc3NzYNTRI6JFkCwX8lYHx\n",
+       "yuWySfHn5+cKhUJqtVoezgwGA2cTgpLwuuEsU7IwUEL53u12Va1WXcvjxl+pVMyN5t80pXhAd7td\n",
+       "9wtAf4+5ntTOLMlURKiTMLnQz8HcSqVStpdi+ACXF0Em/hXdbtecDKAqRAB08NR+y8vLvsmUM+zw\n",
+       "7K64jtLwLC4uWgfIMT89Pf2vGH6DvBD5S1MIGhCsXYOIzMLCgtLptBdv0GWfZhXpEtg0tltkvQC7\n",
+       "SXLZw5gavxEs0ZjyXV5e2q2f4B7orezinHIwDR9zPamdGf5vkGyDKJR4BVCJZDJp7BOpP/4ZENX7\n",
+       "/b6urq6867A7UU5gasLnw+Gwp41ra2vu9iORiGMjIKGDScNpAE3AJJ2RL40kZRAPBoQeCPU8PHd3\n",
+       "d4rFYuY3j42NaX9/35yRdDrtfEQsZ+GkUB8zjsdlaGlpySUAHhjxeNwRF3jgIaSNRCJaW1vT1taW\n",
+       "7YHBvjHByWazTn2V7h+QpaWlR93/J7UzczOoD0ejkbOr2+22fvaznznTr9PpaGtry94Ul5eXrkW/\n",
+       "/vpr0xdnZ2etVoa2yQ4HF3l1ddW+xix0yP/hcNjB6OCuQF29Xs+eFhcXF3bIh42XyWTMfFtbW1O9\n",
+       "Xnez9+rVKzdTZAciFEilUvZmvru706tXr7SxsaFms2lJP+pt+gl21uApgX7x5uZG6+vrGo1G9smL\n",
+       "RCIOtXz9+rWxa3JOVldX3SDOzc0pk8n490syNPfixQs7pv7B0ShwEWnAMCEajVq3dnl5qV/+8peW\n",
+       "4oN90uX3+329f/9ey8vL2tvbc0cPfIW7/g8//KBqtaqzszPzb6+vr7W3t6fj42PX6Pl83mPe29tb\n",
+       "7e/v6+DgwDBhLpfzQ1WpVMysazabDuQ8OjpSLpdTsVg0aw/E4PT01IOfcrmspaUlnzblclmlUkmt\n",
+       "Vsv00lKppNPTU+caMu3r9/uq1WoqFouqVqvqdrt2Vnrz5o3x+larZWoruDY9ytHRkVl43W5XBwcH\n",
+       "2t/f1+TkpBqNhsrlsqeYcLpPTk40HA51dHQk6R4B+f777x91/5/UYkblMTExocPDQzdy2GmNj487\n",
+       "LGZ+fl6Hh4fWpcEpeP/+vfb39x3zAH4r3fMRgrVyqVTS5eWlMpmM5ufnHYcWtLm9vr7W4eGhfSGQ\n",
+       "VtH4VSoVVSoVN3grKyuOhmAkL8kBQxzrBwcHloENBgPt7e3Zsek3v/mNbm5uLKz97rvvNDc3p/n5\n",
+       "eZcsoAeUBxgYohaH/1EsFt2gvnnzxkLWVqv1wCyRoQiLnRMS3J9mGvX85uamfx8ql8dOAJ9UmcFo\n",
+       "NRQK2SeDWpcwHDDOWq2mr776yrsLZUk6nXbIJf5ywFfLy8te+GC5Nzc3Ojo6cn349u1b7e7uamFh\n",
+       "Qaenp5LuaaIcryyGeDyuubk5LS8vG4Ml1HJ3d9cNFI3d9fW1tre39Zvf/EaJRMKEoUKh4DIAoelX\n",
+       "X33l180kE3YfCM/Y2Jg+//xzZ7BAjDo4ONDq6qp5JJRe+I0Ew4Du7u5c4jDeD7p7ZrNZlxoE+8Ae\n",
+       "JKkLDw9IXt99990H3/8ntZjJ/6CZYuoUDoe1sbGhQqFg5CGVSqlQKDgxlSFGMKe6VCopEono7OzM\n",
+       "kh+I6mDYmKdw5MNWY2dGAcJghHKBAE3MY3AaxUAlGo0aj8UPRLrvCwjWGRsb0+Lioo/y6+trY9Px\n",
+       "eNwT0ampKQdrfvrppy5LwuGwhb6j0Ujr6+s6PT1VLBZzc5lMJhWPx5XL5fTRRx85T5vSand31+6i\n",
+       "5XJZ29vbToglZm56elpnZ2caDAZ2RGWsju4SPsljridVZqA9w22IYQgeGpLs8IlcSJJ3WiAuRsfQ\n",
+       "HZEzgVgg4iyVSra2YmwNpTMWi/k0QEJPp7+wsGDVMscsGrzr62v/3HA4rFar5RMGSZgkm6dLcvYJ\n",
+       "xzmTUGrsTCaj7e1tIy3IuYDk2u22RqORKpWKR/AwChnpMwACsotGo3rx4oWFuQyJgBZbrZYymYzL\n",
+       "D0or6K/D4VClUskml5jWPOZ6Uos5aO4HvISUh1pa0gNVc7lc9hEMqM/EC8wVAjwqEbjGmMLQYJL2\n",
+       "ymgaw/Fnz55ZnR0cZiBUlWT2XTCMp9lseurGiB3/ZeRWwJGrq6u28gJ1YfHncjkrZ/idGItXKhVt\n",
+       "b2+bHTcxMWEfDlAZPgecSMPM3wNTjs+RwgV1AG5J0JOj3+9rdXXV/I3x8XEtLy8/6v4/qTKDnQrj\n",
+       "Fbrzdrttayx229nZWSudsZ2CjVapVBSLxXR5ealisahQKKSDgwMlk0mPrsGKeSiAxfL5vNbW1lQu\n",
+       "l7Wzs6Pb21v95V/+pdbX151oSqQbkzbCe1i80EfZzcCAc7mcd7d8Pu8hBRNOVCl4S0PXROEdiUT8\n",
+       "fSy0m5sbnZ6eampqyhg8Q45CofCvOO8jYG21Wvrss8/cZPPzMU88PT19kAkIxZX3Gi860J5er6ff\n",
+       "/OY3j7r/T2oxI33H6QcjlXA4rGfPnunw8NDHtyQHr4NPS9Lq6qqlUalUSpFIxEORUCjkoMZYLOZd\n",
+       "nA6eUgMnUerBdDqt8fFx79rEDWMWTrQDQxeGPpLM+ajX695Nx8fH9fz5c0/TUGpjLnN1deUdG+0h\n",
+       "C+jLL7/U0dGRSw2w9Ha77QcYRGV7e9tYdKlU+ldKokqlomQyaUrs+Pi435egH93W1pZ9n4M49nA4\n",
+       "1K9+9StFo1GtrKzoj//4j/XrX//6g+//k1rMQQ9mYK1qteruGbPA6+trJZNJ1et1vXv3Tjs7OzZc\n",
+       "mZycVD6f1+7urgqFgpEE1NXlctnG5eVyWdls1iJSiOhzc3NqtVpKJBJaXFz0CYAmMJPJeNIGYWl6\n",
+       "etqsPZzrwaPhQY9GI2cdnp6e6vnz567ty+Wy+dfQVUEhUHpfXl7qzZs3mp6e1tHRkdLptDkolBWt\n",
+       "VssSrLdv32pnZ8dC2/X1dYcYNRoNpVIp9Xo9nZ6eusSQ7pviw8NDbW1t6erqSr/73e88Xq9Wq+4J\n",
+       "KO/gV/+zf/bPHnX/n9Rinp+fd94Io2kMuQmtnJqaMnIxNzdnHzdqZBACPDYWFhYkyRESTPWIZSBK\n",
+       "DCckGkIQE6AxOBLgzDRjkpwGC6zFrnx1deWJGgSfRCKhdrttvJjaPJvNuoHr9XpGVSDvT0xMeAwt\n",
+       "yYJWSeadcGKhvIY7Mj09rZ2dHUNulB6UQQx/+P6FhQVzMhjRIwKg9s9msy6lOG2SyaQpoR9yPakG\n",
+       "EGJ7Pp/XDz/8YDSA3fjo6Ejdblfn5+c+dsmWhnMQDoe1v79vbvFwONTp6amSyaQGg4EtXxlQkCPS\n",
+       "7/f17t0714ClUkmFQsGICnActeK7d+8eKMFpMlutlvL5vNl7V1dXOjs784ADGyuGKJQ533//vWKx\n",
+       "mOr1uhl6eIccHR3ZKheL2cFgYE+Ld+/eqdfreQpIJjj85lAopL29Pb17987MOklWqmAWQ2nBCYG7\n",
+       "53A4VLFYdANKGdLr9bS/v69Go6FKpfKohSw9sZ0Z3kM2m3V2NGNW6t0go47dEYIM0h0IQdSG1JWz\n",
+       "s7NKJpNaXFxUq9XyTomsCoiOtKWFhQX1+31Fo1Hd3d0ZW56fn1c8Hlc6nbaUa2ZmxgaGqVTKeDkN\n",
+       "K8gJLDXgPx5AnIiur69VqVTsvjQajbSxseEdnVSrYFTD+vq6a/pKpWKUhPp3enra9TOC2KBJ4sLC\n",
+       "gtUrQYNJBk2gHeSZoFSfmprSZ5995teytrb2KCfQJ7UzS3LXTR2I85B0j/NyhEOAD4fDPlbD4bDJ\n",
+       "8Hw9aantdluzs7NKpVIeYaM0AX/GUV76yXuCUwD1NRxofDWA6djR+FmowyX5a8F9JZl032g0vJDO\n",
+       "z8/NpZB+0kQC0yGZAk5EmT36MW8bxuCzZ890fn5ulQ3OoPzO7v/T3pnERpqmef3/hR22Y3Hs4Vht\n",
+       "RzjttKszO6u6ekEjulsaMXBEQgKEaAEHhIAbHJFAnODACSE0h9EIBiEhREsDamkaxIFlUEs9rVqy\n",
+       "ypWb006v4XCsjnDYYUd4+TjYv6c+dx8G2V3drcCvVKoqp9MOO97vfZ/n//yXoyMbKDG69/p2oHOk\n",
+       "jOr1egbvccC0Wi27Sfne6C/vukZqMzOJYtzq9/sVjUbl9/sVj8dv8X1hfCGfJ+KBGpjoYaQ+lAvI\n",
+       "h/j4YDAwpIPBArERl5eXFicBcZ4sDwxjKG8Y5EgyBUc4HLbQ+nK5bKebd3hCuhbWAjxENFgQe6h1\n",
+       "GVA8fvzY8hIRGriua7RPHkpJpqPE8851XUNrMpmMGScyxYtGoxZLhykjcCPmjbOzswYtggLBl77r\n",
+       "GqnNzKQPWT5XfCAQMOgKuibypv39fSMPSdfQHBuBNwkyDzwDJnd+v98cfSCXU+cmk0lzAGV4QpnA\n",
+       "EEeS1Zk0n/CxyVxhLO4NtPcmvxJrwSDC64gkXTdzjuOYVAweCqoVyq3JyUn1+31rIDlx+b2GQiEj\n",
+       "18/MzJhZDcoVoDdJxoGBocjCcpjUWU5u3p+Hk9mzmFBh3H11dZ0jTebHYDAwR3pkSouLi6rX61pf\n",
+       "X1elUtHExISdQizGvcBPs7OzRtfM5/P2+Y7jmBfd1taW1a/o/TASZHrGxxlseC14oZ3CNJOuy429\n",
+       "vT2b3LH5h8OhMpmMcaRPTk7UaDSshMJw3CsEYBjEz4sbE/wVoEBvXLIkU6gcHBzYz8MDw+tlmAQN\n",
+       "9+DgwN4PvECazaba7baJihk+3WeN1GZG/o/RNtYASNoZdzOJwk+CUwiKqDckh1MZMSlEJca8dPJ4\n",
+       "v0HSAVbDzguvCMbYruvq4ODATlo8JGiiyFzB+Pvo6EiHh4fGhKMPkK5Pd3L8uBn8fr8Fv0M4QnaF\n",
+       "oz+bGoMcfo5Wq2VIS7/ft5iGg4MDTU5Oan9/3+pkPOy8v0NJVqK1Wi2jgWJnC4LEQAqUhQnpXddI\n",
+       "oRlo9SYnJw1KKxQKpiaBRDQ2NqZgMKh6va5isaipqSnNzc1ZkA55JYxll5aWzFQlHA4b0TyXy1nD\n",
+       "0+l0NDc3J+kaH3706JFxIHDtgYqKeoXJGajB9va2OWp2u13Nzs6q0+lodnbW3OYDgYDq9bqi0aiJ\n",
+       "B6ifJVmtj1TMdV31ej0za6RUoRnmZKQvwMcZhQgJA1hp0czSLPJgwuMm229+ft7onicnJ6aK5/ef\n",
+       "TCbNswM67H0FrSN1MnMK88Zj2CLJ0AJOqpOTE2WzWTMWJwaC6FyiwmZmZixfWpI5+khSvV5XKBSy\n",
+       "k6vZbGpnZ8cyQM7Pz00SJH1phTAYDLSxsaGjoyO79mkaKQ0YFzuOc8sOtt1uG4VSkk0F8VLma7qu\n",
+       "a01rPp832iYnKhseh9NwOGz+zeDePKhEx+EgCm+D7wMnhduHg0L6MmEWl1O88ra2tqysou5+ELR6\n",
+       "FkoKTEuY+LHJaVBwFdra2tL4+Lg1evCLm82mGSwyUCE3EKcflNMHBwe36tX5+Xm5rmunLS7+sO/O\n",
+       "z8+tGeV1bW5umtVWMpnU1NSUhasjEMD8Bf4EdrQgNNAsyUvBfuvs7Ezr6+vy+XxmSQA9lTpWktX3\n",
+       "TBt58Hi9WDMcHh4a3EcIKEw5hkhgyJJM9U5ZRFNbKpWMVgB8GY/H7/X+j1SZIck80Z4+faqpqSml\n",
+       "02nLI2GAwvi0UCjo4ODARtnpdFrValXLy8t6/PixVldXzRqALI5arabHjx8bXzeVSpmHBGGXUEiB\n",
+       "6iAvAX9NTEyYO+bU1JQeP35sjSs15+Liol3pp6enGgwGhgbgLMq4HT8P7GwLhYIcx7EhinRthFMq\n",
+       "lRQMBlWpVMx7GluxVCplmxY0AsN2xLWNRsOMZbBWyGQyarfbymazury8vKXMASb1+/3m+BSLxVQu\n",
+       "l3VycqJisWhxGRg33meN1GZGekSy6cTEhN68eaN4PH5L+MmVyBUNNLa6uqqVlRV98skn5nnBm7Cw\n",
+       "sKBms6mNjQ1ls1nF43Ht7Ozc8l6rVCrmN/fpp5+qUChYWE6/3zee8MTEhN69e6dMJmPm6MViUUdH\n",
+       "R9re3rbp3/Hx8S1y+/j4uDqdjh49eqTDw0N7CDFqYWixvr5uDZ8kazCRg52dnemTTz7Rs2fP7KYh\n",
+       "U2R7e9sw4Ldv32ppacksgGOxmL744gs9e/bMyjYa14ODA5ueHh0d6ac//am+973vmdXu9va2lpaW\n",
+       "FAwGremkLKK0evBn9iyutlwuZyQZZEvHx8eWDY0KJRKJWPYzENbBwYFtfm9cL/ZZ+FGgisB6lvEv\n",
+       "sWuhUEgzMzNKpVJqtVo2MGEczfQMFQcRZhDUveVMMpm01wSpB6ycKR9NGX4aMzMz5iY0GAzsRgqF\n",
+       "QgoEApqfnzcVCYT9wWCgx48fm+s+ZRGuQ8RhUDdjxHh5eWnTRfgnS0tLkmSYerFYNK+74XCo+fl5\n",
+       "44D7/X5Tkd9njdRmppMGA0UdghoC7wvePO+Ym82dTCbtRCZEHkpnOBxWo9EwX2NYbl58G0IQAxk2\n",
+       "AI0p7pcMDyAKwa7z+XwqlUrmTYE+EYYZymnMzxHNcpKfnX0Zf0wZhI0s2C+1NvyNqakp5XI5TUxM\n",
+       "qF6vW7oAzkjE0IE5s9E5UamtgT8ZefNQg6gwtsaCADsvRujf+c537vX+j1yZQa16fn5uFliXl9cp\n",
+       "pODDPp9PyWRSlUrFUA8k84hJo9Go+Vd0u10z3p6bm7MGB6IQDRhWVl46qVcxIsnQCUbtuI6GQiEj\n",
+       "GDUaDdVqNWsGwa5xqfeyzkBkGOrAcUin0+Ze32w27ecvl8tqtVrGycY9lKkllFGI9vBNUJejdsdY\n",
+       "Z2VlxWRUwI7AcJLMbRVWHYQmGs1arWZG7vcVtI7UZgZvbTQadhJ2Oh3l83nt7e2Zsrjb7ZrXnCS9\n",
+       "e/fOLKjy+bw2NjZseME0C38NfukMKQi15OFhzLu7u6tQKKRUKmWmKq1Wy0qSra0tM6nBCZTanTiH\n",
+       "vb09K51wPQLZaDQallaFoSEMvkqlYpDX+fm58a753bTbbbXbbUWjURv/0+RielgsFrW1taUnT56Y\n",
+       "sSSmjdPT0+anjEkkJzRIEHAe6bfHx8dKp9PmsYFaPZfLqVqt6vT0VD/+8Y/v9f6P1GYmugAZFFJ6\n",
+       "NhXX4szMjPr9vhYXF7W6uqrl5WXVajXlcjn5/X4tLS3Z38f6FQNzGq1+v28n8ZMnT9RqtdRsNs2o\n",
+       "EbI+I2tOJIhNSPxh6JGvB+MsnU7fsiHodrtWZzIYmZ6etqHMycmJ1tfXlc/nVSgU5PP5bJqXSCTM\n",
+       "vZTXgko9m82aAPbq6soCLa+urpTP5zUzM2NNMkIGbhPsc6EHUNpg6sLwB4td6ZpzjlgC/jZ9y9zc\n",
+       "nN68eXPn93+kambpyyBGmjemX81m0zjI/BnWr41GwyTv1WrVuvt6va6LiwuTVFHHAiexiarVqqRr\n",
+       "TSHxZDxQ0CUxXYTny0AHkjwnJIy9k5MTS5elQQUSY8zNyeitf8lNoX5mQ+VyOeNE87oxPidTkBhm\n",
+       "WIE8WODPRFdgWyvJmkWyUfDgANILhUJmuwuejBgYNTw1OEaOd10jtZk5dbrdrr773e+aG2Uul9MP\n",
+       "fvADxWIxlUolzczMaHFxUd/+9rdVLBaVTqf16NEjMwtcWFhQsVjU06dPNT09rSdPnhgPASkS+Rx0\n",
+       "5JKMdD81NaX5+Xm1Wi0FAgETASwsLBiCkM1mlc1mLd1Jkj744AMtLi5KkoX6kFWIN0a5XFa329Wz\n",
+       "Z880Pj6umZkZM/CemJiw9Cb0d7Ozswaj4WtRKBTMYWhhYUHLy8vy+/3K5XJmyh4KhfT06VNlMhnL\n",
+       "BE+n04pEIiqXy1peXtaHH36oi4sLLSws6OzsOnY5HA7r4uJCmUzGmtKlpSWdn59rdnZWkUjEoL/F\n",
+       "xUV97Wtf0/T0tPL5/INxonfhZBQMBvWjH/1IR0dHpoT+4Q9/qFqtpv39fZMhvXnzRhsbGzo9PVWt\n",
+       "VjNzwJcvX2pnZ0fValVjY2P6+OOPzWzw3bt3dr32+31zBhoOh1pfX7dQm+fPn0uS0TMDgYCeP39u\n",
+       "JJ4XL17cIjfF43Gtrq5qa2vLTFKomz/66CNjrWFntb6+buwzWIG9Xk+VSkX9fl8vX75UIBCwJjca\n",
+       "jZqtbb/fN3bc0dGR1tbW1Gw2Va1WtXWTEHtxcaHnz59rb2/PmHiDwUDb29vGMlxdXdXZ2Zlev35t\n",
+       "rks00C9evDA7r3q9boJanI5c19XLly/t99Bqte4tmxqpzYyrDnIjsGKQCq5qYDRKiIuLC9PdQTL3\n",
+       "bpTz83NVKhWjMKJIoTHi8xkOgE1LshEwZQMUT055xsX4Z6Acka6HHXjTMdYGPeFnREQLJOit6ykf\n",
+       "CM2RZIw+0ATIRECMUDqBNkEi4Jmcnp4aZIcyhq8LzwRKZ7lcNjQIliK1MjAedgx4Yd9njdRmjkQi\n",
+       "FhRD5AKnDMy1SqVi4T3o9Gq1mlKplOLxuF6+fGn8g16vZ/J/L6F9bW3NnDObzaZht/gwe2EyHg5o\n",
+       "j5B/CAA6PDw0F1Bqcuk6Lm1/f98SXicnJ7W1tWW6PVxKyQV/9eqVksmkxbcR1lmtVs0oETMX6uKT\n",
+       "kxNtbGxod3fX+N5+v99IRFBEId5vb29rOByaqQuEJep3fkZKMQhSbHh+d0Q7IwsjMcvLIb/LGik0\n",
+       "A6nP5eWlnj17pnA4bN16NptVu9022RR/dnl5aXgudd/R0ZEKhYJ2d3eVzWZ1fHxsymcmd8lkUpFI\n",
+       "RLlcTj/5yU+Uy+VsWgd7bG9vT5OTkyqVSnYLQGKfnp62CGRG581m08SvcB7Gx8fNHgGJlOM4hiWj\n",
+       "K1xZWTFRbCQSscxwLBIQ8ebzeQugxLne5/NZ4urW1paVRZysExMT2tnZMcNwHEERrLIJaXrz+bzZ\n",
+       "N2DxQPO6vLysYDCoZrNpSbqJRMKoqPfxaB6pzQx1k+katk8MDhqNxq0ygT8n9Ql5EYoUkA4wYuwA\n",
+       "KC2wxGKke3Z2pu3tbXPLhCRUr9eNJITxSTgcNlFou9022RIO9I1Gw2AwTji6/snJSQu0JxjI5/Pp\n",
+       "3bt35h2yt7dndrSgGJKMpsogCWsw/gzO9eXlpTY2NrSwsGA+z1ADSJslk5ByCCemSqViGHmv11Oj\n",
+       "0bAHC6NxuNno/1qtlsF3d10jtZnhOUiygcJ7771nXT+CV2/edb1etw7+8PDQPIX9fr+ePXtmm4RJ\n",
+       "2WAwsGlaJBJRp9OxzD7UGoPBQKVSydJK+b5+v1+1Ws2+DtYDnNBc3f1+XwsLC9rc3JQks5dFHOrz\n",
+       "+VQul20a6fP5zC9udnZWtVrN+MuE9FDy4H46OTlpATnAdCARR0dHhjF7hxqcwGgmIQvBdb66utLK\n",
+       "yop2d3dvZbPw0GHXAF4OTOo1bbzPGqnNjAH25eWlnj9/ru9///t6/vy5VlZWjMPw7t07u+5evnyp\n",
+       "YDCozc1NTUxMaGZmxtCEYrFohoftdluZTMZO+/X1devOHz9+rO3tbVNykIf35s0bxWIxJRIJra2t\n",
+       "yXVdQySGw6FqtZpKpZLa7bYZfOMsOhgM9PHHHxu3AfLQ6uqq3n//fYsuQwaFO//jx4+1u7urt2/f\n",
+       "amVlRVtbW5qamrplI0Y93el01Gq1rLlEZULjNzk5qY2NDSM7XV1dqVarGef75ORES0tLRmuFj7K5\n",
+       "uWk6wEgkYhwNLzfEK5aNxWImaHj79u293v+R2sw+n89y+eLxuMmksKXiTYIY9PTpUyMQgXSUSiVD\n",
+       "NpaXl1UoFPT5558rFovZaT4/P69ut2skdhKfOGXi8bgODg5MFpVIJGxYgkuQJAvxOT8/v5V7PTY2\n",
+       "plwuZx50DF0ePXqk8fFxpdNpHR4e2mtqtVoqFAp2bZdKJQ2HQ/ue4Me9Xk+xWMxEtvl8XmNjY6b2\n",
+       "zmQyevXqlfL5vAVXUmZgWQsSA1dbkj0IsVjMkrwqlYohHJlMxiRqZ2dnRvWMx+MWLBoIBPTkyZN7\n",
+       "1cwjhWaAXEgygan35ICeCJOOcEu0dJQKdOetVsuuf6aJ0BjPzs6MTMP1OxwOb2XtDYdD4/iiViEu\n",
+       "gaB0LGAJtOT/OcnxcPNa3cIRxugGWAvRKA8EPz+oCyNnb9Qxr5tJKfU4jSoNKp4YOHuSQoUaR/oy\n",
+       "0J0bBUYcE0xvpDD2wZIMBcH7465rpDYzOjw2JJgyTdNgMNDi4qLxHiSZ/Ws0GrUOHGFqOBw2RTZ2\n",
+       "A+jtcDDCoTMcDmthYcEoqKSasnGRWUky4g96PdTgkINQpcCdILfbi7rAicb5iDLULiPnAAAbvUlE\n",
+       "QVSITYz7Etxtr2YQYQIU1mw2azkmk5OTFl2M0p26GjUNpCfyDZFUwYpD2we1ttlsSpI9CAgNODAm\n",
+       "JyeVSCRULpfv9f6P1Gbm2gOrJQiSXBOSlCCbY91KPccbAd0RPwmaJeilJycnRkJC5Xx+fm6xZPCH\n",
+       "IbRD8YScj2sSrykWi91yY2q32/ZnSPZ7vZ4kWUnC6Y27PfwMhiZY24IaYA5DApTXmgtivDdkh+8P\n",
+       "6T4Wi1mTx0PDTRiNRo0HzuGBRQLjfgZSCAho1r3uU/dFM0ZqM7PpMAakJpWkWq1m5Bmc6PFqoGa+\n",
+       "uroy1bDruup0OqrValY+EFMsyZw28VzGdBFLAmxrQ6GQ3rx5Y+6jCAIwGifWbWdnRycnJ0qlUuZs\n",
+       "71WihEIhc19CuYJFAIrvbrdrkGMgELA/R7iLRwf2sefn5yYgYITOx6Xr7D++Jmy/4XBoMCeYd7PZ\n",
+       "NGdU4ERJRiRiotnv97W9vW1TRDSLDLD29vbu9f6P1GbGXhYd3WAwUKVSsQaFDA5q2Hw+r1wuZyfx\n",
+       "8fGxEY4uLi5ULBatufNycyVZAiq2AEzOkP6gCfx5ISo6PgwaOU0JvUHpwVXPmFm6tg7rdrsKhUJG\n",
+       "hAfL9fv9FsJDxLLXmKZWq5kaG/k/1lx4KYODcwuMj49bvBun8GAw0MLCghYXF43rUSgUTJzgDQHl\n",
+       "gcKEMZFIqFQqyXVdO0wo9zBhv88aqc3MYAOTleHwOh/v4uLCNH6cmJyMOzs7doKEw2FDNxCiQldk\n",
+       "EkdUA1o2x3EsR4Tw+KurK6XTafsauCJheXV6empKDzjEY2NjFlOBkoXNwegZPZ4kG9hALe10Onr5\n",
+       "8qW5B3lLG/K2k8mkMpmMfZzohsFgYEMNbxPH5+I9glk4zaTP57M4ZeImBoOB4vG4/f75vfn9flUq\n",
+       "FbVaLUUiETswdnZ2TEjhdWm6yxopaG5+ft42FL9EGqpsNmtdM6Sjcrl8yzWT0xtJPIw33kTqQklm\n",
+       "gohLPMMU13W1uLiotbU1I50jYmVogqkiSUyYwFDb7u7uWoYgNTOGjtPT07ZRksmkZaiwsXBRokkj\n",
+       "Ai2VSpm7J6bf5J9MT0+bC9RwOLQxdCgUMm0ktr7YF5yfnyuXy9nn4qyP3hL1OFFt+/v7yufzNkSB\n",
+       "xJTJZDQYDJTJZO5NNBqpzVytVs2EZW9vT9lsVmtrawoGg5ZljaNmNpvVu3fv1O12Dd8NhUKKxWJq\n",
+       "NBrK5XJaX1/X4uKiJSYxco5GowbVYdiC1RSEG9d1tbm5aWHulDZnZ2cKBAJqNpvGa+h0OkokEkYS\n",
+       "8jox8WCyaS8uLuzvUPJgEEMNurW1ZRg7LkWJRMLIROgMiV8DWqxWq8a5GA6H2t3dNUdSEri2t7eV\n",
+       "y+XU6XSsvMFugXq51Wppb2/PAjU3NzfNSQqvEhptDp1ut6v9/f17vf8jVWZ4nfCZMiEJQrRJM0JZ\n",
+       "wcmDAjmRSNgJBUZMzQxWipSK65uUJ9QqXmgNj2TGyYTRgCyQ/ee6rimp2QTEkYFDUx7x+YyHXdf9\n",
+       "BdgOU3HorpLMaN1rWg5C4/V6QyIGOoF/NHpELGips6nf+Tt4YvMzghJxkvM74+vyujCmuesaqZOZ\n",
+       "rn58fFwHBwcWmtjtdk3Oj21XsVjU2tqaQqGQtre3LYH1k08+sYB48keurq7UbrclXf/CqQ1brZbB\n",
+       "YHxdfDW81+fPfvYzG0sTwg4Bajgc6vXr1/ZGIxY9PT21FFaYe5999pmePn2qt2/fGtKB0WKlUtHK\n",
+       "yorVzh988IH29vZuGceAI0NFrdfr2t/fN5oq9fnr16+N24LDvSR99NFHBqsB8WGfy4bEIhdfD7jN\n",
+       "RDB/+umn9jPB8d7Y2FAqldKLFy/u9/7fb/v8Zi3Gp9SYnHLSlxJ5rKDIdR4fH1c8Hr8VOYbrOyRy\n",
+       "Mvq8RHWc7BG1gnxgKg6xZnt7274/DDGvb0YsFrP86ePj41vaunQ6bUhIv9/Xe++9ZyR9It3Q3kHT\n",
+       "7Pf7evbsmS4uLlQqlawZgy8cCoUM2ZiZmbFbAwMbyEoYvRDgeXV1pVwup0KhYA8b0cYIZTFZhIVI\n",
+       "g9nv9y0vBQ8+7BaOjo5ULBYVjUbvjWaM1MmMBhBCzenpqbLZrIX1lMtl49WSUw2TbHZ21poWyEpw\n",
+       "H/DMoEGho2dChsccdTdwGE0S17yXu4EUX5IqlYoWFhaUSqXsFKO+x5mUm4N6vVAomLUCrDj8Orze\n",
+       "0MRAcFLi5o8ns1eYGw6HLQg0FAppfn5e5+fnhttDV00mk5qenlYymTS1O+UECV9YLgD7MXghbSAe\n",
+       "j2tra8u+P5HE91kjdTJ7hx107G/evLGmaWNjQ41Gw3i7yJQkmZz+5OREL1++NLYXCg82K40ZWjYe\n",
+       "DnzhMIEhtcnn86lardoQR7r2ZUOKj0EhnBLGzRMTE2YW3ul0tL+/ry+++MKmcKenpwZFoiBJp9M2\n",
+       "5SSAc3d3V1tbW6rX65Y0y7QP4hRhnycnJzo8PNRnn31mnnWXl5c2DKJcQ2GCsrvRaNgAqNfrme4P\n",
+       "Xjh+Gqenp6YhBOFptVoWQQyz8a5rpDbz7u6uWXNNTU1pb2/PVM1ch5lMRqVSySiLfr//FgkmFArp\n",
+       "61//ujUofAxL206nY40kmR5YvGKIuLu7q/HxcW1tbVk96NXuMczAooCNxGvnxAeqk2RstP39feM+\n",
+       "YEwDTXN1dVWnp6eampoyLjVQHthxp9PR7u6uGUsyLcTyS5LZ3UK+wgAmFotZ3jcIije/G9szmm9Q\n",
+       "HXyXd3d3DTt/9+6dBf5kMhlrEO+zRmozT05OWgbgxcWF+U8wwma4gDKZUwUFBGtzc9PqXsdxTD4F\n",
+       "5xlqJ2oLRtjHx8dKpVKan5/X3t6exavhKQf3+OjoyGpGBjY4CtGwSTJyUqfTUb/ftzwRPJrxiGPz\n",
+       "Ya1FnY9mkVsKrziGIXwNHmxJJjDFOxozdbw0sA/gRvOaJQJPRqNR01Eiz8JFlIMF0QDun8Q832eN\n",
+       "1GZOJBJ2pVOnTU9PG0OMRmRmZsbcO4HUvKcSv2w2F9kbp6enarfb2tzctFgzJnCcRpxMiURCnU7H\n",
+       "rGXJuwbWQ5PHZoCB1+/37b8xCPf7/cZXDgaD6nQ6xuHodDrmqVwqleznApkAO2+32zaA6ff7t6xk\n",
+       "vXYAPp/Pmk4ePhygqMPRHoJHx2IxJZNJSTJCviRj6qExpLQBPqW/YbDjjbO4yxqpzcyAIRwO2xgX\n",
+       "3R61n+M4Vm9SInDFQpDhKmU6SLfuOI5yuZx1+6SxIsPKZrO3BKSZTMbyAQeDgfL5vGG3NE+u65pI\n",
+       "9vj42Nh4Xo8579WOe1Cn01E4HDa5UzKZNAsujMbxf87lcsbmY6N7rbp8Pp/i8bjm5+cNt0Z0K10z\n",
+       "9bDdKhaLFnLE7xmlNTgx1rmgI6AtcFHoJeCC8HoeMk08C14ALLdGo2FTMghAMOey2ax8Pp9evHhh\n",
+       "glTAfJoW1NG8AePj46pUKmYpRd0bjUZNQXF4eGh17enpqfk6U3/2+33z0SBlSZKN3iHpey3DqHXB\n",
+       "eznJ4W1L11wNEBwSoqrVqiYnJ01LCFNNkim0A4GA3R6VSuVWrBxQ3tnZmVZXV9VsNrW5uWkPH1NA\n",
+       "IDjKE0QP9AcIcuE8g8ow/MHD5L5WAyO1mVEvQLKfmZm5ZQyIDxuNGK6a8Bxw3A8Gg1YvgzF7TU9g\n",
+       "nUEckr7MU4FTAQ7MZI0oByZy3BySzOeDWhxCFLh3IpGwkTz4ND5wvMZ0Om3cEdKd8L1AXMBmYQLI\n",
+       "BmJjBQIBJZPJW2lU2OhSSsTjcXMqoozhNXW7XbVaLbs1+H6RSMSaR3oARu2SjPvMzXjXNVKbGWrk\n",
+       "xMSE2u22XePeuIHx8XGT8WNyPTU1pZ2dHWugQBEI60EpAQbLZqvX68rn83bieYN5/H6/NVyHh4cm\n",
+       "FkWNAY86GAyq3W5rYWHByDdQLREWHB8fGxGKmrXX66lcLpuD089n8Q2HQ2PujY9fxyvTdPl8PkuW\n",
+       "hXdBqXR4eGjOqWgmq9WqlWxEEtMcImpAkoV3M5pCSUZuAmkh1u3s7EzJZPKXUmJII7aZab6oKYGM\n",
+       "IJZzNVOngmIg7iTsBigJBYg3S3pyctKw1lwuZ+NaThmGE964MngQqVTKBgcISfn+zWZTiURC2WzW\n",
+       "XhvTQiiiPAC4bNJo0cAlk0m7acbHx42miWl6PB63PgB5P/AYpVYoFLoVwsmgx+fzGXmez+f05uf2\n",
+       "2gnDT0E6xY0F9Hh6eqrFxUU7oTudjr2mu66RmgBiBA4H4ejoyPzTgLfgEMO5QKMWi8UMyqIM4R9M\n",
+       "vNlQfr9f3W7XOnJgPST/uVzOqKachLwGMGmQClALTLglWQlwcHBgXwtFCRwKdHxYWwHFgdxwolM6\n",
+       "VKtVo6BeXl6aPwY178TEhGHtyL3q9bqy2awODw/tNKaWpv7nlK3X64bQ0PCScksWIpK1i4sLU4jT\n",
+       "eJMicJ81UiczDC58Gri6vKLTYrFozYrrulpbW1M4HLag9UwmY1ROBiGA/8PhUO12W5FIxK7iSCRi\n",
+       "JxQTNpz1qUOpXwkI6vV6t7BkvhecjVarpbOzMzOXweET7Jdam5E4J3W73TYbBdJkgR0Jcef7oAzh\n",
+       "9mEUjRoHZhx8i4ODA8OwvTcesikQGgwZUaLTUyAcTqVShq+DxMASRPFz1zVSJzNUT0k20s1kMgY3\n",
+       "4UCUzWbtysMzmSndxcWFqbNTqZROT09tgkaDh5EKjSLwXyaTsSFELpezpnBjY8NOK+iWXrErJ/fP\n",
+       "GycyXUulUpKuSwL4EkzSTk5OTOeIWxGlCbZfnOQQ6UlWhe1HBgwELWy24vG4PSiZTEaJRMLi1+C9\n",
+       "8L1o/BKJhLnpT09Pm0mjJONKn52daX5+3hAdvt+DP7NnMahoNBomfQL+KRQKJtkBjQDEhyMcjUbt\n",
+       "ZCEtKhaL6fDwUJlMRicnJyoUCobrBgIBLS8vG7sslUqpUCgol8uZW2etVlM6nb7F4CPYHX0fDwlB\n",
+       "PoFAwDIEQRIkmX6Oh5P8bvjOPJjU6ODSJLXSiDLiDofDZkJDsA/ly+Xlpf0cOPJTViBvOjg4MANz\n",
+       "qK1g1jxAPJzYCsdiMX344YdWVx8cHBhz775Eo5E6mff29kyA+vbtWz169MjGzo1Gw3jJEIi++OIL\n",
+       "w1tRcqD7Q5SJvs9b4wH0E/gTDofVarXME5nmh1N4bW1N0WjU/JiDwaBt7HA4rJ2dHc3NzZmyGs6F\n",
+       "JPPFcF1Xu7u7yuVylhLV7XaNegmZh1IIWA3yz9zcnEFnvV5Pp6endhJSNzcaDRtxX15eam1tTeVy\n",
+       "2YZK3FwXFxcmzvV6kwwGA/l8Pm1vb9vno67hhG40Gtrf37dJIU6kkgwPv+saqZMZQjmNBJvL5/MZ\n",
+       "/xbCDSbegPmc0LjEw4WALcdVDD4tyYSq4LaoqIfDoSlNoDziUMqpCRHn8PBQh4eHJlAFWsRUkBvk\n",
+       "/PzcsG82I03ocHidPsupeXh4eOtn7/V6NkJHKgWc1uv1zMkemy4eYJTaICOgHNBMr66udHV1ZQ8J\n",
+       "AymclLzfy2v4jnMSDSMYOA/wXddIbWbHcZRIJKzpYsIVDAZtfDw1NWVKj1KppE6nY00UGSSwuILB\n",
+       "oIrFomGpZJgUCgVLHgXb5mrGLBE/DLJCaL6wqOVEu7q60uzsrJ1y0jW7DL4whCDMVqhFQURAbgij\n",
+       "Jx+EcgRBbb1el/Rl+CUj50gkYha7bE4Og2g0qng8rlgsZicrzSoPPqUDpCdc+KmHq9WqfW1SbLkR\n",
+       "gDnxLXkI6PEsygjGsJOTk3a1UxcChXEaQaL3cmzR6yFLIvWUMoPrFuTg6OjIRs9gsVy9cIWpxTmx\n",
+       "sa+dmZmx6xaUhSubk5TXSv3PwAEyPafxwcGBTQ2BGPkeuJYy4MG2FzU2p2Sv11M+nzeEBrSC7w9d\n",
+       "liFUNBrV8fGxTk5OzMwdtTmCAQ4M/o03Hli2pFvY913XSG1m6tVUKmUYM/UfUzBCKrGe5d+lUsli\n",
+       "0Rhbo9zgTffmccRiMdO7cZ1DPz08PDR2Hrgv0zfgME5NxtFs8kAgYLcL3nPIn6TrJheus+M4BnP5\n",
+       "/X5T1XAzeTdTKpUyZhuvzcv0Y2oHXRPkBYbb7OyshV/yoDLwuLq60uPHjw3K8/l8WllZscYQf49Q\n",
+       "KKROp2OhRuVy2dAWGtD7rJHazN43AukPJzJlAB5zIBS8MbVaTZ1OR3Nzc7cyo3GRR4HMlAyneDa4\n",
+       "67p6/fq1EomE5ubmtLm5acQayhGgNDYvHGGu89nZWWOYMYaenJzU7u6u1cH7+/vG/qOuRZ0tyVKx\n",
+       "4BtfXV1pfn7e7GO9rxUMmZtmampK1WpVwWDQsPFYLKZ8Pm8REyA3HBKo2JvNplFdJyYm9PnnnxsD\n",
+       "kN95LBYzVGhra8tKEMx1aIrvukZqM4NIQG2k1mTiRaOWSCRsLAwiEYlElE6nTfrU7/eNdE+NTSnS\n",
+       "bDYNl93d3b1FMAI9oRnFS5mOneAaXh8km0qlYr522OZSjszPzyscDqteryuTyajT6WhsbMywdG8T\n",
+       "x2QzEomo0Wjo+PhYe3t72traso3sOI6KxaIikYgNl8ixLpfLOjs7s8FTr9czJIXXCh2Vm4T6n2mn\n",
+       "JMOgu92u4vG48Um2buLVSqWSPXjD4VDVavWBnO9d2KRCjaSbZiM3Gg1TRLCRJVlqExuOpoYxNSNw\n",
+       "jAPxK4YQj66QUwoyO9wMmjVq4lgsZjUvGHc8HjfvC5yJQE9IcoX/izUApQS+bY7jmAIcxUcoFFI8\n",
+       "HjdPDn5HQHsYuOAxwgPrbUi9jkgMQrxZKgyAuHlAM3BsOj4+VrvdtqHO1NSU9vf3jTuCKujBnsuz\n",
+       "aLTwNhsMBvZmc9Wn02krD5iSzc3N2Ym2sLCg/f1984PDLEaS0T7p7lkkTI2NjalSqRj5hgYRrSG1\n",
+       "O280D5xXQoTEiXGvd9OtrKyYqQwDHx6aJ0+eGOQIEw1NHnAlpyq5gFNTU5qbm/sFC1tsFkibhUI6\n",
+       "OzurFy9eWNOMDpCyi6koTSDxyfBZsACDlgpNAHRmYWFBn3322Z3f/5HazJC8SVpl80iyN51aV7pO\n",
+       "p9re3lY+n7eYhFevXplbveu6Rn88Pj42eO7ly5daWlqy0Et86VzXNVd9pnbBYFB/8id/osXFxVu8\n",
+       "h+npaZu2kTIVi8Xsagd263a7mp2dtUy/Uqmkd+/eSbqewNEEVqtV4zPDQX7z5o01gMCKYOR8X5z4\n",
+       "vfKnVqulYrGo/f19BYNBy+dutVo6Pz/X7u6uORbx+zg5OTG+B+qS8fFxsyFD2c7D/vMj73a7/YAz\n",
+       "exeDDt5AQi45AagLa7WaudZjE7u2tqZ+v690Oq2LiwslEgnzjCByDUlSuVy2EuTJkycWX7y5ualk\n",
+       "MmlBP5iFg5RQPoyNjWlnZ8dujEAgoJWVFcPDwVtbrZampqb0+eef26Bhc3PTcqkLhYJ561H3A0fi\n",
+       "tTc2dp15zX8zCofBh9k6/tIbGxs2Wo7H4woGg/ZvXE+JWoaJSAmTTCYNVgRnhlaL8z7cj1qtZiUK\n",
+       "2DSawbuukdrMmLGA7wITodr2+/12ivK5lCbf+ta3tLy8bE0ZmCcEdLgGJL1Go1EbqBA2ubS0pFgs\n",
+       "pn6/b1AWWDHIBMJONgcyJzwwwI6xuiWgExvY+fl5Q1m46r3GijRpi4uLphd89eqV3QrAX9/85jeN\n",
+       "gA/X23Ec5fN5NRoNOzVRrbTbbdNFcioDX9ZqNeNpx+NxRaNRlUolI0rBRGQSG4lE9N577+np06dW\n",
+       "zrXbbSNU3XWNVJmBI7ska4b4BVUqFePjktFB/YgrpiQ7fY6Pj83vLRKJ2Cg8kUgY/fHNmzdGNg+H\n",
+       "w6pUKjb9kmQNHU0WnA1Yc9SO3kAbrAEYPEgymVSr1VK5XFa32zWRgVeEQCk0Nzdn2sBWq2XNrNci\n",
+       "gJMVMxt+fzRpl5eXajQaWl5eNldRYtWYbEoyOLPValldziLSgmkpURk0mAx5oNxCB7jrGqmTGegN\n",
+       "wSpDAUnGoiOMJhAIKJ/Pm0SK63F+ft7G2fPz85qbm9PV1ZXJliDpF4tFyzABrUgkEqb4QPDKm4tr\n",
+       "kVedUSgUlEgk7GuAyUqyiGSGJ9K1/KpSqSgQCGh2dlbSdVN6eXmplZWVW4oRtH3pdFrvv/++GZDD\n",
+       "Vtvb21O5XFYoFLLvA+UzEAgoHA5rY2NDPp9Pc3NzdprGYjFls1mThPEQYr8VDoftAeXURZkdiUTs\n",
+       "ZoH2SVZMKpXSBx98cK/3f6Q2M8gDLvk/H3PAyYfoE6yWLh5SEFg1VlkA+0BTjH2B/hCB8mAwNYMD\n",
+       "7XX5YSQM6iLJ8FlIQahbQArQyAHz8TAwiKB5ZQLoDajEK5mfm+9BJh8PO8iIV2RLLe793QGfwQLk\n",
+       "AWV4wykbCASUSqXs1oHY5PP5jEgF/Ie8amNj417v/0htZr/fbx5nXKHIplCgMDjwJiOB5+K9zJXN\n",
+       "NMsrMG2320axlK7JMvV6Xb1ezzKmJVlNy/gbDjP5f9T2MMoGg4GmpqZ0enpqUCC2BJKs5oT15/P5\n",
+       "zNWfYBzGxlgAICBlosnmZQNx4vf7fbNEoImGigrxiIcJEQEwICUVcCKjfxAawn/Q+PE6+J0QxSbJ\n",
+       "fu67Lue+cVW/KctxnNH4QR6WXNd17vL3RmYzP6yHNVJlxsP6/3s9bOaHNTLrYTM/rJFZD5v5V7Ac\n",
+       "x8k6jvMfHcdZdxznI8dx/shxnCXHcVZ/3a9tlNZITQB/E5dzjeH9Z0n/1nXdv3bzsa9Lup9JxMP6\n",
+       "hfVwMn/167clDV3X/T0+4LruqiRLPXccp+Q4zh87jvPxzT+/dfPx3M3HP3UcZ9VxnD/rOI7PcZw/\n",
+       "uPn/zx3H+Qc3n/vIcZz/enPy/7HjOMs3H/8rN5/73HGc//2r/dF/tevhZP7q11NJH/8pn1OT9Odd\n",
+       "1x04jrMk6T9I+rakvy7pv7mu+89vTviQpG9Iyruu+3VJchwncvM1fk/S33Vdd91xnD8j6Xcl/TlJ\n",
+       "/0TSX3Bdt+r53JFcD5v5q1//L0D+hKR/7TjO+5IuJS3dfPxnkv6N4zh+Sf/Fdd3PHMfZkLTgOM6/\n",
+       "kvRHkv674zhhSb8l6YdMJm++piT9RNK/cxznP0n6w1/KT/Qbuh7KjK9+vZD0zT/lc/6hpKrrus8k\n",
+       "fUvSpCS5rvt/JH1PUkXSHziO8zdc1+1Iel/S/5L09yT9viRHUsd13W94/nly8zX+vqR/LGlW0seO\n",
+       "4yR+2T/gb8p62Mxf8XJd939ImnQc5+/wMcdxnul6c7Eikg5u/vtvShq7+bw5SQ3XdX9f15v2Q8dx\n",
+       "kpLGXNf9Q12XEN9wXbcnadNxnL988/ecm+8hx3Eeua77M9d1/6mkhqTiV/jj/lrXw2b+1ay/JOl3\n",
+       "bqC5LyT9M0lVfVmC/K6kv+U4znNJy5II9/htSc8dx/lE0l+V9C8lFST9T8dxPpX07yX9o5vP/YGk\n",
+       "v33zNb6Q9BdvPv4vbhrFVUk/cV3386/yB/11rgduxsMamfVwMj+skVkPm/lhjcx62MwPa2TWw2Z+\n",
+       "WCOzHjbzwxqZ9bCZH9bIrIfN/LBGZj1s5oc1Muv/AuHZAPr9VeA9AAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x114254ad0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "plt.gray()\n",
+    "plt.matshow(predictions_df.values)\n",
+    "plt.xlabel('Classes')\n",
+    "plt.ylabel('Windows')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Now let's take max across all windows and plot the top classes."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "name\n",
+      "person          1.835771\n",
+      "bicycle         0.866110\n",
+      "unicycle        0.057080\n",
+      "motorcycle     -0.006122\n",
+      "banjo          -0.028209\n",
+      "turtle         -0.189831\n",
+      "electric fan   -0.206788\n",
+      "cart           -0.214235\n",
+      "lizard         -0.393519\n",
+      "helmet         -0.477942\n",
+      "dtype: float32\n"
+     ]
+    }
+   ],
+   "source": [
+    "max_s = predictions_df.max(0)\n",
+    "max_s.sort(ascending=False)\n",
+    "print(max_s[:10])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The top detections are in fact a person and bicycle.\n",
+    "Picking good localizations is a work in progress; we pick the top-scoring person and bicycle detections."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Top detection:\n",
+      "name\n",
+      "person             1.835771\n",
+      "swimming trunks   -1.150371\n",
+      "rubber eraser     -1.231106\n",
+      "turtle            -1.266037\n",
+      "plastic bag       -1.303265\n",
+      "dtype: float32\n",
+      "\n",
+      "Second-best detection:\n",
+      "name\n",
+      "bicycle     0.866110\n",
+      "unicycle   -0.359139\n",
+      "scorpion   -0.811621\n",
+      "lobster    -0.982891\n",
+      "lamp       -1.096808\n",
+      "dtype: float32\n"
+     ]
+    },
+    {
+     "data": {
+      "text/plain": [
+       "<matplotlib.patches.Rectangle at 0x118576a90>"
+      ]
+     },
+     "execution_count": 6,
+     "metadata": {},
+     "output_type": "execute_result"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAXAAAAEACAYAAACqOy3+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvdmPZVl23vfb0znnTjFHZGZlZmVWVdaQVd1FqmVSomjD\n",
+       "Ei3BEiVYFgzD0LMBCzBEw4IN+C8wYMCCAL/IT/SDn/xkA6Rk0pxsUjRpkt0NsqeasirnITLmO51p\n",
+       "D37Y+9x7IyurSRhsJxuM1Z3Iyhs3zrDP3mt/61vfWkeEELiwC7uwC7uwHz+Tr/oCLuzCLuzCLuz/\n",
+       "m1048Au7sAu7sB9Tu3DgF3ZhF3ZhP6Z24cAv7MIu7MJ+TO3CgV/YhV3Yhf2Y2oUDv7ALu7AL+zG1\n",
+       "H4kDF0L8XSHEx0KIz4QQ/82P4hwXdmEXdmF/2U38eevAhRAK+AT428Bj4I+AfxxC+OjP9UQXdmEX\n",
+       "dmF/ye1HgcB/GrgTQrgXQmiB/wX4hz+C81zYhV3Yhf2lth+FA78KPFz596P02YVd2IVd2IX9OdqP\n",
+       "woFf1OZf2IVd2IX9/2D6R3DMx8D1lX9fJ6LwhQkhLpz8hV3YhV3Yn9FCCOJln/8oHPg3gbeFEDeB\n",
+       "J8B/AvzjF7/0P/7z/w4kKGOYVxWPnz6lalqUUmRZRq/XAwJVXRICVGWNc548L5BSUZYlbduilCTL\n",
+       "DFJJsqzHZDJhMBiQZRlVVaG0wjpH0zRoYzBaUzY1OIuUEqUUSikIgRACvbygrmuEDwghcM4hlMSH\n",
+       "AAKcdWidASCEQAhBCCEeA8iUxDsbB1ebRTgipcR7jzEGIQXBe4QQIOJxfvO3fpu//XN/CyHAWoeU\n",
+       "AiFk/A4gECihQAqsc/jgkVrT1DVGaaSU4AMyQBCSFgEEjJJorZFCEghY2+AJ+BAQUlAUPTJj0EKi\n",
+       "lUEEiW1a2rYlpDGxwRKT3QEQ8V4IeMAYQ5ZlhAB4DyEg0tg4D8ZkgMBaS2tbEJJ0afGaEbjg00SM\n",
+       "9xrSuP7rX/lX/PzP/wf4NL5CgPcBKQTee0IISBnHSEqJlHJxzQAhBHwaZ+89Ll3fi9/rnqX3LOYE\n",
+       "gHceCAghz50HwAe7OEc3DzpbPbYQgtZavPfpmkHK5fV219ddkwua1gWUlCgt8M4hEITgQQR+/Vd/\n",
+       "mX/wD/4Rzlo8HqHicdrWLY/j47z13i/OoyQEb7HWEgQE4nwQQi5GXhGfRyAQREixtERqhZSKqqrQ\n",
+       "KkNqRUBircVZiyRer1Rq8eyFWI5Bd5/OOYIU3SCdG3spBIIXfJRwyzH1Hu/FYl0sxj0egF/65f+N\n",
+       "f//v/YcgQrobn54/+PR38PGWXPDx8AFC8AQXFmPn04oVUiCVjHM63kV87unfSmnEyn0JIWhsG387\n",
+       "CEK8grh+fCAE4riuzJHVv5dz0H/p83/53/4zvsr+3B14CMEKIf4p8H8ACvjFlylQtgZ9qrZGZZrh\n",
+       "YIPd3W1aa5nMZhweHzGZnDKdTvF1m5x2dDBaACGQaYVWEh/iQ3bOMZ9Pcd4ymZyxsblJ0S+YTqc4\n",
+       "59IC8TRtgwiB1rWooFBKIkWaqwHatsF7h5YSKQUBQeMafPBkWYHODEIE2tZijMFaj1KK1sbFIYsM\n",
+       "kQbf2hYh08MQGiHB+SY6BgEEAWmj8N7hXBPntQg4LxaOQQiBRBG8AC8Wi034wKDo4b0j+IAQ0Tl4\n",
+       "wCiFUSo6DecI0ifnE1Baxp8Fh/cttra0Pm4Swguc9WilEULig48bg5IordHaoHScNkEIlFYLh+Wt\n",
+       "wzu3GEvvHZPJmBACWZaTGRPvNQDJiTnn8DYsFrn3HoQApYBACBZCWsDEBRSIjkLKtICDx1mPF9Ev\n",
+       "xH0xOiLrk2MTEqWWm4d3Pjoplt9FgHUtTVsv5mnn0H33PBIQkmnjXv0jVj5bbLxCgFBIpZAKVn2U\n",
+       "9wHnIeARPo2hUGnBW0JQeO9QUqClwBgNBNqmRAqB1goXLN5BZjQgcM6S5QXeS7wLacOQiOBASkxR\n",
+       "RMfrPUIqnA908XAILu3RgiC6Tc4nh2fRWuO9xTUWhEBJickN0oO1Nm6OKl6ztXGDQ0CwadNSauHA\n",
+       "F5tcemAibZCrjkt6t3g+cdLHDTl4SwdrSBuQEqBV+rYAj4Ag8CoAEoLAuc6hEsFRCHgvQK48O+/T\n",
+       "tRCBmPcJSMXvaC2jM/Y2HRdkWndGKQIdcAhpicfNyRM3b+/jPTsh0x4WvxvnTLyu5AT4s7DRPwoE\n",
+       "TgjhV4Bf+WHfqat5XPzWcjI+o+j3kEqxu73BtdcuUdctk8mEtbyPEIKqqTk9PeP09Ix5VaKFx3oH\n",
+       "IaCSk/NIZrMKk2WMx6c45+j1ekgRyIyirmucc+R5QdbrR+fhHDaA0TohDUm/KDDGpE1hTi9PCDwI\n",
+       "mqYB4k5praAoCvKsRwiB2WxGAExmsNZSNw1aK6y3mJRtkAJkQk1SaBZpCCHxCLy3rIC5hSk0Xq7s\n",
+       "0iIiCB/cAm2p5GwCAt94vEwOz3mEAJObNEkjwgoh0PqW2gUkIIWKi1KbOH9EcpXKLJBcVddQ17Te\n",
+       "LZBHnudorSMaNzmZNmitKasZ6+vraK1p25bZbMZsNsdai1ImRiM6RkBtU59HxbYlBIcUAR883gV+\n",
+       "4b/8hT+fCfpjbM+A/+J//6VXeg3/4n/4xRSdWpyz4GOEJWyMlDrk2UU9QkTnLaWjbi1KyMV8kSqh\n",
+       "WA9KRue1iGz9HFKk0M0/2S2XFC0sEavH2jbNn2VUE92gJBCjAohOWIr0My+QLCMo5/3i+l0AHdQy\n",
+       "evIBH0RyvBF8pZPHtYBcuF2hRIIZpIi3u8+IwoOIhwhBIFNkGUJAipA+D+fXw1fYj8SB/1lM93Ia\n",
+       "a1lfH1GMBiitaBuLrRqaxtHUFVQ1k/mMPC9QWnPt8i6vX72EdZ7JdMqsLHHeUVUVVd1QNg2Xd7Zw\n",
+       "Pjq0LOtTJPRunWWYG5x3GG1QJku/V+GcjQPowAFN6ROVk9Pr5chMkuUZWZZHp9c4yrKibW0KpQKj\n",
+       "wYjNzY2I3rWhbWpOz87IMkPd1IuJGkIMe633eNviXEAKyes3Xqf1Pu7YLFGKVgofAs47hAWl0wwO\n",
+       "oBBUto3ICI8yktour0kIFVGq0ggBTdOgjKZtWkLaSKy1GK2QSsewLwRa4SA4pNAE0oR1DoFa7jeB\n",
+       "SF0l5Gxtm6KXSBPFjcKh1HyJrIKg6BUrC8KBjTSFDzYtujTpvefNmzdpmuocPfGX3f7mq74AQMno\n",
+       "8JSMFKHzHqN0ijSTC4uPG6klUkZKynqfogjwwVPXFcCCnnLSoYVESBmdqs6Wjkyo+GcFtYcQ0vwM\n",
+       "vPPu+ykKCckBR3BBECnS8RH9AkLKBcjtwJ8UiZp0ljZFbUopBCICjY6Gcy5FSkv6LEY+LkYz6d8y\n",
+       "RQIxoI4UkweUjr/nkAu0DhCCXGx6HY1FOtYPs1fmwO8+eYgxhpPZGIB+3qPf64MPOOHJkCAVyqgY\n",
+       "JfmW4BQBhZaCXq7Z2NiL4ZdQWGcjfxWgLEtms1l00GVJOS2pm4Z+r4cMgf6gh84yqvkMGRLP2RFk\n",
+       "BEajIVJKZrN5QoAwm03ITBZRaBUdVV235HkPYzLmTJjPSwICY7IYpjpHVuRY51FEuqGuLVJpWucZ\n",
+       "9EcMh+uE4Ni+dCnxiYLx+BSIznRWlQQfEmKJHJ11bdwk2pambcnynKapKWQvLhoJMgict5Tzkl5R\n",
+       "4EPc+b216DzDOsfpeMysnLE2GrCxvo4UkrppUSncU8QQXCgVJ7+1BBc3N5FQSAz7wsIp13WNx2Nb\n",
+       "S57rBbVDiNxlR2fpLC14Ik3V8c5KxU3H4bn9wXvIjn+WF04c/mI48IhEPRABh5cStIDkhCBGipo4\n",
+       "X4WInLD0AhXAW7fIXyilovNzDu/Adw41oe2lvw7I4BM6j+hbIOKaF4Fbt95NGwiL/IpITrKLJhf5\n",
+       "C+/xwa1QHNG5dseSMjpka1tynS/uqXPeEZDErWKZe2GxmXjv0qYkI33SUYDOEnzKl4kU1bPMpUGk\n",
+       "omL+SCQ68i8oApfG0PrA6dExg16fZ0+fg/fsbu+ghSZYS24ysl5M3OV5Rt02aGEwKiLpumkSF6px\n",
+       "wVMUOd47NtbX2FgbYVsLnRMSgiLPOTk5QQhBf7TB1voGdV1zfHJCXVUYkyGEIMszCCB8YDqZoguD\n",
+       "lpFzdrbFNXWkU+ZzbF1T5AVmbZ35ZIwXku2dXYKX7O8/5emzxxR5zuUrV+gVfUIIjIYj1tZGnI5n\n",
+       "3L1/D6UUg37ksgf9PqPRGk1T470jM4a6rhkMB5hM0+/1OXh+gA8OqSRa6JjA7fXiZFQK6x1VWaGU\n",
+       "pugXWOcTWgEhNePJjFlV0rQts1lF0zQ0rWPQLwjWoZRGSYkQKiEViVAabz1aRppHa423lrrxiRtN\n",
+       "jl0lTldB6yJ/HZ2vSIliFk65C0uVUmgdHbp3LgbHCWFZbxF+icwv7NWbtTY6pOAJBLxWyyTgIgeQ\n",
+       "OPSUpAfouANtYlIdAt75yI8jIyWTcoZCCJQx5yiSjj4BFs64c/AS152AmC4UCOEjmkEsnTkCZWRK\n",
+       "YAZEl7/pqBspSdkXrIsgjOSYSVy3EJKQonyR7k8qlegVv7j3LkHd3Y9WEjSLyEB0mLGb2gmpd5Go\n",
+       "FyJtlF9tr8yB3/n0c/Jej6ZpGI2GaGWYTuZIeUaRF2xvbiKV5tHh04jahCArMtbX19IRBFpnSBHp\n",
+       "iPm8ZntjyHQ2WyStgvdoo1MWP0TOTQiKLKOta0aDAUWWYZuGwd4eW1tbACglGY/HKWwKTMsZ09mM\n",
+       "qirJ85zRa1fRxiwSNQfPD5nN5wyKDKE0bTkjzzNu3Xx9wf3Ox2dMz06Yz0tOTMbW1hbWw/7jxwxH\n",
+       "QzZH1wkSgmupbYvRmsZGNczW2oizyRl1LZicneBsVOM0TYsPgf2DI3wI2BAYra8z7PfIMo1Sirpp\n",
+       "6BW9pKJRTOclx6cT9g8PUEazNlojhApCSBGQpy6rqMRJ0YQLFhV8jCoSxClETnAp6UjkAH1QOJ94\n",
+       "qKRSiQnYGBL7NEEDHuscznmkkBidRYVISlB7F86ncMQy0XZhr94GvV6izSwueKyKfHAIAZwn+KiY\n",
+       "6SKs1YcptMRbnxy7jPMhJbWVMEvKhCUP3CmOrGVBx6wifSEEjXMxlkt5IiEjUBBeEJIT7Rws51Qo\n",
+       "AeETyAhRESWkQClD0dOEZpWu8bjEU3cRo/OOxrY45zB6kBKeccPxyclDjCw7QYO1FqlkJEpS5L/K\n",
+       "dbu0iwkRE6M/zF6ZA1dIhr0hNot88sbmkCaMmdYNJ9M5z45P2Nvb43RWRbmglsiy4qysmM/nKKlx\n",
+       "1rG+sYmzgbZtOT05xvmYuOz1eigZB81kBm8tZdkktNgSZnOkEFRVxXgyoZxOOTk8ZDgc0u/3kUbT\n",
+       "NA1FUXBpd49ePkZKSVEU2LYlyzKKNJHX+gNm83mkTLKMyXhMWZYcHT1nOFxjb2M9OizvyHZ3qKsm\n",
+       "KlQQ7O1sopTi9PAAIQW2qdnd3WFjY8jh4RRnG8bHE0yec3B0xO7OLoU2eOcxUnBwcExdNxhjaOuG\n",
+       "mZxipEDkktYJyrKmamqcg42tLYTSFMMBGz7QtBbrPMMsY20wJFMG6xvyzJDpmPgZDUdM5/EZ4D1o\n",
+       "hRJqkWdwbZuouigbtM4ilMB6T5ZlWGepk6rDGIOzAmddlG/KSH2Fao6UGiGXUrK4AFNomSRYL7N/\n",
+       "8d//82UI2m0AISSEt0rJhhh0p7jcJwTp02KRSpEVvXPyRFg6kbZtF6oZ5xxKx/nh031mWbbgSVfl\n",
+       "jR34iNLBSA91XKpPCgedEOzv/M7v8K1v/VFSP3mapo5jaNskmYsJuH6/x/b2Nof7R8zGc9Y3RmSZ\n",
+       "xnqL1pK832M4HJHlfYaDNabTOe+8c5srl66ijMFah4dF5CSkQBH5ZUlUMTXeEQL81//sn3xpzDsa\n",
+       "zBiDCh4pAlLFhFznUaIzDyn536HclPeITyY5uiQ7XURYMVoL3oMUeCEj39wl+ZRIScMkySSOXZAd\n",
+       "6g34kCga1dEbguAjgAgehPILLl2k6FyHqBQSnZQwIe6QEHc8bTpHcDgXqQ5tNJmMsuLWrsgKA4BE\n",
+       "6xXaJslSo2LLAmIxNyJqX+HYfUhc/V9QBP7h+x9ydHLM2XTK+vomSMWVa9fZ3Nzk4OiQd999l63N\n",
+       "LT6/d5/T0xOUkjRNlWiVYRqowLxu+cFHH3P50mV2N0bx4FVL3bj00CMi6Phr7+KCM0TZU57n5CZb\n",
+       "IPWqLGOyL0nwpmdjgoiJmm5RGmMYjUaUZbngu/LMoFRBP89RwdHPDWfHh/RzTV5kDAYDtDYAHB0d\n",
+       "Y4whHw7J+gMGgz4ff/QDTk9P2d7d4uzkGN+U9Hs97t5/zPP9fbwU2BDItaEpK8qyRkrNdFZy/cYN\n",
+       "RuubTGdzGmvp93rkhaCcl0CgrmusA9t6xpMpk1lJVddIYzBZhhaWclZiqwatIiudZ1mkcYo+rqmR\n",
+       "QjEaDKiDo5dHXjA4TytlUpV0gWdE0m3bJL1w1N12FIuUEhc8MkR1gfOO1raMRsU5tKVYOtAOxL3M\n",
+       "PEknvZBjppA7LWgVzqM1GSLvajIdw+OoPVs4yMhj+kjlyCWHmZssOrqiwDuP0Iper7eShAoLx7bq\n",
+       "/LXWeNdirQfaBTXUXaf3ntms5ezsjO98509iwt1GgNA2c5p6htZRgiqlxLeeMK+AU2zr2NnZIcs1\n",
+       "1jasrQ3IcsPJ2Rnee3r96AzqynLn00/QUnPlyhWUjKojERxKkGiEtF6EpKs/+KrcsXPd2iLSCmqp\n",
+       "mJBBghBoIQmSKC1MSTkfwGu/3HCJ3PDqeIkIvUHJ+KejL7oEaYjP2Cu1QPhCgBQFS9mjO3e9wQuC\n",
+       "AhHi30r7qKNPDpU0R6Ni1tPaWPegM4N0umNhFjpupQza6LQJB5yz8f5cVG8ptZTWQqRdOlpJpJ+v\n",
+       "5guilNDSSQm7Db0b6x9mr8yB52uGa+uvMRiPuX37fWZljZCas8mETz+9w/HxGbu7V3htVHBj63WE\n",
+       "kpRlGUOQEDg9GdO2nt6lEU8fPCUXOW+9dQ3nXJQLtu3CsTjnKMuSpo0EwLycIZVBSsW8dAunEXWe\n",
+       "esHndUU+qlNVJL2ysC3j8oSqrGjaltwUaGMSdmjRIvKEm9uXMVqjhEF6RTWes762xs5oA3ygVwzY\n",
+       "3NpBKcHh/Ud88N5t9nZ3+dbBH3B9uE05n5NNW/6nf/mL/Of/2X+KkwF7fBg3FRfY3d5lfPiMWimG\n",
+       "TctWXvDg0RNGW2uY3FNPauqTMU4anMkpdq6yXowoJxWzgxN2Nrc4fnbAdKjRGz0aHNQtb127gZUZ\n",
+       "R1j2fcVJecLlfp9NAkJntLMZfa1Y21gj7KxRe4u0UM8atNlkLuD7D5+wtTlgOByQ9TOkc9hgKfKC\n",
+       "fq9AS4UXEldF/fhMSlS9j6DByhE63yMPlrp8DIVgrrKXzqOzcozOM1SWx+KaRLeYEGVcFYbgohxR\n",
+       "p8KMuPmQCjEkVduglaGVc+Y256CcMWmeo71jgx7X1nK2eoKq1JjBLmM3w9t+oltj0ZXSIGTAtk3k\n",
+       "/YVGCo2UmqyfETXWUVWkRRSYTafTJEuF3/jNX+fk7BhhMryQ1LMKJbLoNFx0HlJFeaYIgWpc0gbL\n",
+       "tJ6T5wbfVPSnBWvDPpmSzM9OOTs+wfR6HB6f8uYbb/Hk6X0uX96Nkj5hoqZfKRAktVMbkSWK3C2L\n",
+       "yF40pR1Rn58KVNpOvx1ARSmq8zHqEnKZmFQibrR+Qb3FQi/n3KK4RkqJSnJeb9t0RkG3mwgRC5xk\n",
+       "UBCWunEd2vS1KGrwgmWkk5LsHQoILkqPg1oWy3WFVK0jbRxRcCV0qpWwywSmQ56LsjoAEUSz4MOB\n",
+       "JfUTQ4V4buJGpdItLcFFvLyoZlnWH4g/pdvJK3PgIQTKek7bNjjnePTwAddfv5miYM/dzz/nyuVL\n",
+       "QCArctq2ZTgcRpQlwFrPb/7G/8X2zi5v3LzBX/vpv4Znshi04GKY3raRn2rbNhZvpIfqfJcACYvv\n",
+       "dCiqU1TYVJxDXaG1QiuNbT3Wxp9rndHPDMbE0FglIb+1LUFCVZaIfh8tDCfjUwgBe5qKinzgtJpz\n",
+       "/+ljtjY32XvtCvcfP2KwNsARuHP/Ljevv05lG/6rX/invPHmm3z6xWfcuXOXrc0Ntre2OTw+ovWe\n",
+       "h08fUQyHTGcl9x/c5+xswNpAcXx8Sp71yfojHJ7gG8pywsnxISZTVE3FpUuX2NldR2WSh48ecvPm\n",
+       "Lc5OxvgskAvB7PkYdzxm1q/ZuzagtS1SQFO33H/wEDkoQGmE9eigcG7KSV3j24b21OMciKKHlnEO\n",
+       "69CijUarVC3YOgwCqpqhyXCtY94KoiTB0reKsirp914+VdfyXkRBdYNUMe+hgkATkD7gjcTLSGOA\n",
+       "S4hRxQQXAScceWEQQvL8aJ/D00CJwCpHY2sMniprGF3aZhgGHE2iJFSmCj4XHPiAt+BSdW8IAbyL\n",
+       "cjbvcGXUBUsgWIeFRSFOlg34/d/8PR49esRwNEzzx+Fcu0i+x5A+RqBtE+L9SYnKNLnJMFLRW9sk\n",
+       "uAYlVdRZS03b1jRlTT8veP50n3424ODwkM3NHYoiYz6fL5C29x6pQix2QhK8RuuX869aL59FzC+t\n",
+       "RhUxYuqS0qsUQIysRKwziJ8QEEgVKYwFTRZlG+BWabMU6eCjPnHl/IsjifOfd8hehoATKYkOiXfv\n",
+       "zras7IS4kaku8Zn4N8cKnUfnmBO9lqSJkkgbIsK5y15F2SGwGCPxgl9eOnKJUmLhl/40Ce2r04Fr\n",
+       "jfeBTz/5lHv37mFt5Py++c1v45yjKPr88Tf/iL/x03+Foig4OztjfWMt8Yoe71uePH3Iw4cP+Kmf\n",
+       "+mmePnvE3XsfcevWLW7cuEFTVTEMbdtFSJO2vxiWiGUW23u3CIdsSkh4l7LsIeCaesFPNU2D0/Fh\n",
+       "WGvRSkBo8FYggkIXGW0QIBSOgM4zRKY5PT5kPpvT6/Xo9/vkJmPeVBwcHXJ0doIAdq/scTQ549nx\n",
+       "IVJIBqdHHE/HaCG59+ghJ+MzFILpbE7bWuazko2NTUyW88ff+Q7v3v4apiiYzGfUTcv7793GOc/O\n",
+       "3hWOz8Zg59y98xEqK3j79m0ePnhEVc3wZYE9KVlvYD0Ydl6/yayx5KYg1C31aAsXLOW0xEvHfDKn\n",
+       "n/XJih4y7+GVomlLCqHRGnLXsjnsoWqLth5pU9sCEZKcymFlmyagQCrNqMgplEMNBpzUBUFvEWYT\n",
+       "2pmimlaMtkYvnUdhXpMZg8VFXXEImCwnF5rgPVa0CBELpK2z1M6TZSYhv0BVRxlmXbccHj0hqB28\n",
+       "Lahrj5EalUlcmCNUH6MKmsOKbG2IDAK0wLuQ5o8leIeWMWkWK4RjZWPwEucDtkMNsEB9VVXx/e99\n",
+       "j15RUM1Lsizxyol+cN5hnSUr8nTHIlX6euqqZmdvl2o2Yzyfs7E2xLWO3cs77GrDoyfPcD7w4PFT\n",
+       "fvbf/nd49/bX6Q+GqaBtqYs2iQ4QPiYKbWsRwrxktKNVVbVw/Eu6RSTkvESfy++sOCHnU4VoPLdS\n",
+       "UX8NpLzFkjLJ9ZejLvclaV38by+WNMyCRksLXABGSISOGvPue6uJw+BlovnOO0yJwKWS/oUWPD2H\n",
+       "7j6740mZLWscVq5jWQcRFlXaQi3bcKyaD+LctXXH+Cp7ZQ6863fycz/3N7HWMZvPqMqav/Lh15iX\n",
+       "c0CgVAxvH915zJMnT3jvvfdo25Yv7tzhD//wj/j6++/y8cef8t47b3Hz5jWq5pjZfMK9+1/Qy3L2\n",
+       "9vYwpkMRUZkSd9IsDhQd97kMbWRSnoTkvBECHVp8Sl5BzOV134mI3C4eWOUdbu5wDvJBjswUTjj6\n",
+       "60PyYY979+5yJb+CKjRVWzPaXKeal4TgOZuNMVKh8oy93V3Qmv76CKUUa6M1nh8dMlpfp5cXjCdj\n",
+       "tnZ3aVuLtxaVZ+xeuUTQirt3P2Nja4fPvvicwhgePXqIA9752tfZXOvx5rvv84ff/DaZ6UcHv7nO\n",
+       "2f4Br1+6jBEaF2C0sUk7rVFeMOhvMK7HPD854ObNy5i1LZwNeG2YBoe1AaVzJALpPdujEf1BjlY5\n",
+       "goB3Fmdr2oRQREqQKSmRRiNE4NnsiFy1VB7GYRMz8PjWRUnpYJNsZ++l86joGVI1NJnWSCHo5QW5\n",
+       "0gTrmFSTyDtqtQBu1jXMqxlCKqwLCC2oXcWsPMP5jJAZYgweUD3F6fiYzZ03ODoqKQY9pmVJL1NI\n",
+       "IXBJ76+1QatY7YuIfTRU0q+HxKMLrSP69J6mboDYm2Y6HYMQ+GCRQaCANlha5whCkBUGKUlONzpe\n",
+       "JwR53uPu3fu8/eZNgitQSjKbzchMwfbODltbe9y994BBf42333qXn/iJn0RJzXe/+12GQ4OUsYLS\n",
+       "OrC2WaLxlCf6KvPWLtbOkraAtl3sTwuKIeZGujL1QNHp/ZNOOrILSz48/XI8zkv6N8Vq+aXji0g6\n",
+       "Jiy7dd6h447iCF1UHjzCxRYCoTsHUf8tCCgpojpm5RghxCS97IqXVGrJEJb69uDj2rd0uZeYMI1P\n",
+       "cjke5xQ1K45/1YkLqRdOu5Pa/jB7dSoUrfHOMRwMcM7R7xXkeY5rXdR81zVKa1rvuHr1KltbWwyH\n",
+       "Q3ywFNl7vPXWG2xsbPL+7ds4WzE+O2Jvb4/9/X3m8znDXp+qqhLVEW9TJnmhEF11YvdQVKJuOq00\n",
+       "KKmXIn2XUE/y9D4lO2JSNGlZE8r3siuJj4tNSon1kYO31pLlMaHpgc2dbcZnZ9jhgE8//ZQiz2lD\n",
+       "w6XXLuFay6wu2bm0x+NHjymrijfefAul4nTPewUB2NrdpWpbTudzPrv7BTt7e/RGI3YuX0UKxddv\n",
+       "v8vZyQnPjw54/vQhxwcH7F25Qts0tE3AhcAXTx8hyzlrTYmqZoy213h+esz0ZMrl0SZaSz782jfg\n",
+       "Y8NaL5CLjLYRhKJHU5Zk2mBcoHAOX5WgQGeGoPIYxViB0xCCo2lqgoghaOMtobVxs8x7VN7hB2vc\n",
+       "eTalmR9SeEvRNgx7BcG9nAu0KpY+ewFIhZdgdSAIh8dhjFoUZbRtG1GW1pg8i/rzEBdlJjQ3b9yi\n",
+       "ths82p8QvMMUhrPJmM0Cpk3LR5/dIe+9jsiLhORCLJpKTU5ig7BODeFoXUMgoGREs0apWBQVYisw\n",
+       "pTS7uzuyo8crAAAgAElEQVT87M/+De7c+ZymqanGY6RMtQjexerAhPAjcPALNU0zmzEYDnm2/zwm\n",
+       "MQc91tfWuHrtGscnpxyfnHH92ut8cucOt29/wKXLrzGdTNjY2EhghYWUrWtApZRC6ajeqqv6pWOO\n",
+       "8AsJHimRuXDoyRkrmdQhzp9zXLOqjHUGWkeViXMgup4/sWpTahXX6ZcceFjozBErraHCl5N9AnBd\n",
+       "P5YUeccNR5xrRNb9HV8kFvMnofsdEcGHkjIm3b1LDlzERHbnoGXsHSPFisLJ+3PovDukThuY8B7P\n",
+       "ebQdUmS1SPCG87TQy+yVOXCTZXGStw1CQJEbBLE3B0GiJQTfYvI8FpBohfOWTCmGwyGbm5vYpuWd\n",
+       "d95iPp9jjAEzYmNjI950QitZltE0CV2ElRArLMMbt5Kw6egW75ca09isRi4Qh9KSTEmsDWgjFlxl\n",
+       "CAGbuPdoAusdxitG/T6tc2ysr6O0SqhNR822ybh5/Tp5nnN8dETbxIZXbdPSNA11VZNlObfefIv9\n",
+       "/X0+v/MZV69do6oqTK9AGM3mziaXr73GpctX6PV7TI6PQGX8m9/9fX7y6++zNlyjcrEAYzqesLW5\n",
+       "yXTWIJRkZ2+X7z75Nq/dusHW61f43kefMJnXvPPubeoA+8+fMLFTnj19RHZlyLwVZPmI1nkOjo64\n",
+       "fPU6g6Kg7xwYhcxi2OmauDF6ZXCuiWXKSExmQHYZ/yivPLOeor/O2GU4J6iDwDUO1wJCUD2fvnQe\n",
+       "lW1FY1tkZpITl9TeRVlca8G5hLTiIlU6i3MlfoKSAtu2qTHTJpOZw7sWIWp8cOQmw8sBT56XzBqJ\n",
+       "0y3GwNHJDIFEG0NmCqyrmc3nqXVDTGTppMwR3uOtX3DbSkUZpvWOum34xje+QVEUPHnyBL+5jfeO\n",
+       "2jscscVAlG8kpBeg9Z7GOfIs59qVK4yGfba3t8BZxmenfPfjjxlPpmysb/L8+Ii333mPrCioa0uv\n",
+       "P0jAIqJKRNRPF5nBdsUpUhBw6Ozlm2bHLRNCqhZ0aQ0sUXDk1bsiGFJiV4CRtM7GjptK4v1yI4ib\n",
+       "AAgVnb9K7iny2/G/VOKoFwx2OqURL+jDE98c13pY5EGFYFEB2c2L+H+XPhNprkR/oUXsX0RSJvmQ\n",
+       "Igff9eNc+gmVNlaR1DNpkFIydakLDyE68i7qX+reI82n5FKO+qfZK3PgPrjUWXQpl1FKoLWgaerF\n",
+       "g2pSJlrpOGht2tXauiHgmM2nUVsbWly7bAsp09+dEiUWlCx3Nr6isu8cL5YG1yLAdZ3a6Gi32LNB\n",
+       "LDupxXqv+F0pZayQ1FF+5kPAGEVIDaU8Ub412N1baKqFEAx7/QVPX1c1UkquXL5MAIbDEcO1ERtb\n",
+       "G4xGIx48fIjWGus9W0IwnpyhMgNESuPk9Akm63H33kO2tre58cZb7F67yXc+/gyC5Cc+/DqZyTk8\n",
+       "3ifvFxzNxjx4+IC+lDgP+48fUfmWYBsefv4Rm0XOd55+yiAb8td/5t/le3fu0grBwfN9Lt16h2ef\n",
+       "fcbRs0dcunqJeTNng3W2t7co8oKsP6AsZ/Q2thitjZA6ltOfHJ8ynk7ZdZpiMOS3/+QT7JmAXGFw\n",
+       "vHPtBlvbe3xy/+GXHxaQpZUZrCckRCdlTls7XOtQSPqDPhsbG6l5UpxXDx7cZz6bQwgURcH45Iz9\n",
+       "45raarwrMXnDbHbGzqW3cGXg6X6FKXZiQljOKHp9pIwtBuZtTfAOoaBs5jRNg8nMIok+yDKCc5gs\n",
+       "Q0pNWVXUbYNzniLvRRlqnscNWRk8JJle7EfTlZF7HykVlMYCo/U1JtMx4+kZeS+jV+Q83n8ae91I\n",
+       "wWB9jf1nB/zD/+g/ZlbVqMmUzY21CDKI3LDEpcZKDimXKBVsSmh+2VxbEylORVggxhiNLOlHsUja\n",
+       "dXy7FAKpRezroyQKEztCduqQAF4KCA48uCTci1LOqCLxbZ1o0LgWpRA479HCrHDKIRaPSZk206WC\n",
+       "JXZmXKUz0loWJGoGFjw8JAosOnmRJLAiBJROrSW7XSdAcA5EbC+w4iaA2JJLSIlWkXprIOnS/VIr\n",
+       "H0c1nj/RNP5PceKvzIEvvWH6V3A433XpWyYMpOh29vM9lyHE/thLhdG50GP1vjsqo+O3hBCL0tnu\n",
+       "97r/XqALlg5cKXPuYZxz8mnCL4Ik78Cn1qcr31epiY8QYlFeHDn5yLMrGaVTXVRmrWI4GEY54uZm\n",
+       "TPRIyc7uDm/zNt473nr7LcqyQiZK4Ps/+Ji816epGh7Xj7j9wfvMTk9iEbFWND6wvrnB9vYWs7Jm\n",
+       "fHJEYTK28gJfNQzzjCwElHVsbG5QSUGjFW/ceIePfn/CdP85Nq+ZHY15cPdzjJQE23J6+JxHmeaj\n",
+       "7/4xG4OCjz4+4tH+I/7WX/33GPo2th9VPWrb4kvPrCoZDodsbG/RH62RDwb01YjD41M+/87HNOtX\n",
+       "MZsZWWjZGwx5/bUbfPtPPnvpLPrwrXfYP3jOtGmY1TW66HN4dEq/GKCF4Xg6Ybixw87eNSaTCUII\n",
+       "Xr/xOru7lzk7OeLOZ58xPjlGCcHl3R1msxZdDAh6TL/3Gn2zi/TblOUpShoyM8Cplmoe56bSkqAC\n",
+       "rasxucYohWwiH25UhjES6QFU7HWTK+o2Fl4pHamwyWTCr/3GrwMwPZujsgznY2SaSYlsLet5j6as\n",
+       "yIZD5iJg1kbs7z9lrdcnBM+9L75ga2czOjkBvUE/VkkGywdf+4DnByfUVnB8fEwIsd1rwDIajXDO\n",
+       "UrctKjU0szaCKx/al465SAlWUtK2dU0M9zGpI6ZO8l2Pd6CXLQQJWiJcp71PG5Xoeot0Fbix6Mf6\n",
+       "WOyS5xm2cRFBJ4faUTZeRKo8UkxLOXDnABaR88JRn/cjMq27iICXvb+7X1mAtOSPFpHQgldPLYFT\n",
+       "ncMi0lhZ+6vJSiFigZwSEhFiS+PYFjuC2NyoBaj1K4nQr7JX5sAXzlaQerAvG8Ikr50QbZcBJuLb\n",
+       "lWxzLJvtnKlfhFmrm0M3eCF0qYpo4YWfdxZecOzxM784rxQihngsd9jV70vBgrvrGMF4RWFxrI46\n",
+       "dGnnXY5JbMAjU99qH2Jv4rKtKYpiEWKFELu69XWP4WhIVVcEoXj33bc5ODhkd2uTQksypTl4WsTE\n",
+       "3qBHf2ONwWjEz+zuIELcNJpZxeeffM6l9Q3eu3GTDInuj8iyHqflHNUzuLMT2ukZvVwxvHKF+qzk\n",
+       "4NlTtl+7zs5wiBUSb2uuXb3MoFdweHbET735M7RGc9jOyYVmUjYEPIUUtE3DZOY4shVN06KMYavn\n",
+       "qYPHisCsOmM95BSZ4M7H3+b4bErd8Zkv2FZ/xN67u8zblqPplO99cgfXgMgzil6fvlDorM9kVlE1\n",
+       "kc8cn014/PAe77z5BpPNI9xsSr/I8AZCWTPqD9HFCCEM5WzG8fER9+59hrOaYjAi67XUTaygG40K\n",
+       "1jf6zOanEBoybRgMhmysbzHoDzg7qynnNf3+IPaa7vq+CIHSmoePH/Frv/prkRppGoZrI1rbooIB\n",
+       "Z1Ftw4bJuX3tdfIs47OH9xlPx9S+InhBdXyCyTTTU8nRwTOUVgwGQ5xreXD/PkXe4/jwKNY9eCjr\n",
+       "hsY2sUVyazk5fR6LaUyO8gapNL1ejs7UVzoPncrOIVGOwiwSrBG2RDVXpA+69rDpfR9WLjTVXbVs\n",
+       "lz+K3HZEntEh6kRpplZTISBCTBJ2rWONMXR97mMUkNrIChYqm5D+F3NfAtd22B6Esx38Xq7lFFVD\n",
+       "lCt3/Hxc5ilxK0W6fhkbdPlYZbr03V0PpsAqsIy0UvyZ87HzoDYGmX4W2YJUUOVZjNFX2avjwFfU\n",
+       "ITFwSLxP+iSkhJBIbUY7iqJzpH4xMN3D7RoepaonVjjslFDpjhtP+3KNa4fWz6F9HwAXNwsRJ2/H\n",
+       "X0GcOotdXJDCQnHOsasXduXu87CySHwIy4SY9zgcVRsVLvV0itIqopngkU4steapt3GR56yNhhwe\n",
+       "HrK3t8Vbb77J8eExTdPG5lXOIozi+pVLkRJyDiU1vbU1RptDfuKD92nahvXNTU7OxlxFcHp0yJOH\n",
+       "d8l6mp/88CcJWvHozj2asub61T2O65qty5d4eO8+WRZfkLGxs8XutdeYTZvoeAOcjmcIApnJCN6T\n",
+       "W0ebXraR5znjecnx4SkuD9z+2puYfs7R5x/x5rWbnB49x9mXh5JP9vdZ29yAPOdPvv8D7j55Tq+/\n",
+       "ztyOmU73eXLwiDfffCNKN3VsSlZNJ+xtbdDWJbdu3sC4ig9uv8f/+sv/Mz29xU5vC5FJ7t97ytbm\n",
+       "dU5tRT0/Rul15qeSRw+e4aRib28TYxycTTk720crT12V2NbzjQ//LX7+7/wdjCrYf3bM84NDqqpi\n",
+       "WlZMZnMyk1HWNb/zu/83Z9MphFioU9bziOylQPiGvdGIr1+/gRvP2B72Gb79FvLhF9w7PWLYG+Fq\n",
+       "R6E1vX6BdZadrW3uP3jI1vYuTdWSScPnn33CzbffxyEwmcZ7F7tYajBoEJAXBfPaUtclLjj8vD0X\n",
+       "jZ6zDhAl1NzJAFcrI0MI57TgnT7eB4mMgXPS40uqql42bRKpdwigRUDiadsW3a3/RI0YIxcJ3bCQ\n",
+       "JXYVoURAmHqPrL6dSEuN8y1dZU3nI0LwSW+/ROsx+lbn1mv379gLXy0cuRAC4bqWyMuGVKzo0Vff\n",
+       "yhTpqdT4KxV5idBVh35ZXvhV9uq6Ea5MDiGSg/XpEQm52Lk78iJmieXC8abbB2IGeDV7K6VErtz/\n",
+       "atjU8V/xWOeR9up3V5G7EultGR2Nk2ibEJbfwccIwAuZmueEc865u99uI5Hdq8XkUs6IjzxujBQE\n",
+       "DsiKfOX6Qkym+Fh62zQWrVTqy6AAz9bWJsNBj9xIjo4P0NpQ6ALVy2i8i53YVCA4i1IBhGP3xlWu\n",
+       "v3YJ0bQEDXMVWLu2i5tV7G2tcXl7xI03r1DZmksbu4jasbm2zvreLnu5QWQZO2tryNYzr0qOZxN6\n",
+       "m2tc2srjZCUmZL1tEaHbdBTT6YSgDArNzLcM14d88MF7tJmkrk55bXvAds+wPthlQvXSefTk6IDj\n",
+       "piSYDDHoQ55z0tTMZhNm0zkKwfODQ2bTCaPhkPlkyu76iL/+U9/gyvabHDx9zLf+8A842n/C7OwR\n",
+       "P/PTX+f0tObJo8c8uveYja/vgZ0jqehlG7S1JtRAHuj1CvCWuqwY9DSvvbbN7vYWRwcnBFdx+Owx\n",
+       "g/46W1vbrK2ts3flCk/3n/P9H/yAz764y6effsazZ89ikt2LBZrUStDLFNs761wdriHahus72+w/\n",
+       "22fz2mVy4RnkCuEchdKs9fpkRazazKRiczRicnpCrzeknM/417/8r/gnv/A2rRO01ZyiyGirKVVb\n",
+       "Q4i9g6xtIUlnm6biPIN73jrZbORtBTqPc9Rau6ipiJFsdFrWWYyJb3jCpQAbQXAO51uUFGQyj1Gp\n",
+       "s8nBKbSWsa2FEOktVulVZYsXKZAURpast+xFY60lyKUj7d6wg4hFUfEVgKmfCrGMvqM0RUfn+Bix\n",
+       "+0TnLDTdziY0nl6d6Fg4bZVecPJiVN/l4GAJ3rrEaIw2liAwfscvKZwVmvll9uoqMe0KCsVFgn/h\n",
+       "5EDrDgXnEU2voOJVmmMpw1ki6iizOo94l+MZHXDXnvRFmc6ybWkXkpHeJ5mmdHLALnTtJ88je7n6\n",
+       "AMXyc5Wa0XdNedxKBrt7RlIo8OnhdlSR+zJ10FFOWmdxJ3fLlq7ethR5HsdQ6AVakEAuBMEG6km5\n",
+       "GCcpHaI9Y97p3ps41r5pY5N7JRns7TG6fBkIaKXZvf4mTV0vxgMBDBKH1wzYZCc2+krIp0NAq7Kz\n",
+       "LqIKSQVgvUNKza3XZ8zrGu+gnFdMZ1N6esbbbvzSeXQ6ndL3HgFsZRnDq7vMyjnrt64gtUZUmiAF\n",
+       "j588xkvBsZG8du0aAPPa8+DJETOb8/mDUxq7gVNDWjHm6LTBhpxnz59zcloBmwSZM7MTGgJCl1hf\n",
+       "IssAdcPkbMJuu8PRwwmnp3NcKDm9Pqd3dZvJLPD5/Sf87MY7bO1twb1DwvCQO/v3cb1AW7X01RDV\n",
+       "FjgjsKFkIBSvD3pczhWybjmZHpHtrtEOc4Z727jPjyj6fbQ3XN7c4LWNNQgtR+MTNl/b4rtf3OOs\n",
+       "HBNkTnNwyLd/8Mf85F/9kIP9J8xnh+S5xnpwOqNRUE3OGPUKMgO1rWi1gJdT4JxwRiYNoo09vp2K\n",
+       "Kq+6buj3BrR1g3eBqipRSpH3eoQQ3xnptEc6iUajpEovFgl4Uac+4SCcBOtoElevtY5No3KDVAYb\n",
+       "YpVi01iUVvRHA5pmjAoRDGQii31YlKPyc7wEFTR15dFk2NBELj14Ek+BlqnRFQKZErAqCIzOQCqc\n",
+       "j+/NRaQKYtcifIr808bS9ERKdsbXtzkbKRXfRCQvlYxpshAorIgOXASkl8TujfGNPbFFQESJS0Xb\n",
+       "y+2VOfDudVywdMiLHVOslMF2jnslTHsx+bj6ey9+p/veqr2sAupFO/c7YrkPrpbnnk+GnA83X3bO\n",
+       "l5070j3nv7usYhMru/KXNaHLkPX8n3iM5QbZ9Xd5EQWsnu+FAy90tS41mF81rfWicVP31pLu8+56\n",
+       "M2NwUn7pXN1xX9yAjffxNWvasBFSPwsfqJuWLM8Wb2950W7durXoT93RVq2zVFXFrJzTTj3XXr/O\n",
+       "5vZtfAgcHB+ysbmJkor7D77g9OyYjc0Rg/4AFzTPDg6o5g3G5FFpIGKFsDEmvndRRFy2ZkbIWjIY\n",
+       "9jg5LQHDvQePqcsZ/bxPb9Dj6OAJ2ztrfHLvAb/0q7+F2VznvQ8+5Pn+Kd/8/T9helQzP5iy1R/R\n",
+       "jkuKbFnEMez12BqtkQMSyfPDp5SnY66MhnhrWR+OmM0r+v2cXq7RCowwrPf7HM6mfO32u3z+eJ/D\n",
+       "sxnV/JTf+zf/J0eHj7m0sx0dos6wTYUU8Rn3exmEQN3YWLEY/EK19aKpEFUV1jmUzvA+vh9WKUHT\n",
+       "VgQ8Usc+/iHE8ZOpSMbZNhYOOYUUCilBOAiptYFIL2yRCJDxrU9CRiRuXRv70jsX32sp4ztby6rF\n",
+       "2opMxBeZRGoyYEOLw0YgJQRFlpOpHGfb9NaexHUr0N18dLFFAsQo3/rlO3ch5uE6cLaa4AwhvifX\n",
+       "+6UcMb6HVcd2Cy52I+zWdPdCdiliS9qOQRCie9l0pF/+wiYxu+KaTs7TOYPu310YtpAArlAQLzqz\n",
+       "1cqllylKVh1i97MXz79Ayp3Q/ksIf2kvbhQ/7FrOtRVdpVHEea3ni5nr1e8t6ZwvX0f32WpWfLWR\n",
+       "/OoxV0t7VzeUJVd5/ryrY/Pi9a32i3nZWHQdHV8csxc3m8WGk8LqRes3YpFU18GwyIe8zLZ3tuK7\n",
+       "OLWhaVta20a5l4/ISrZR5VA1dey82IPxeMK8bVEI1tZ7BF8xn51x+eo2bes4HZ9xcnbM4eFzqmqG\n",
+       "yTRGBhSevZ0Ntrc24iL3gXZmqWYN9XzOoJdjPfTXBxTDgv3Jc57+wTO+OJgyqU7oD/t881t/zG//\n",
+       "1v+DYZ1bN79OqDyTo+esDXuUszO0MmRGs9kfUJiMUNe0rWMwWqMwGf28YGu4zng8RfUEl7ZHrA0K\n",
+       "FAEtoJ9lrPmCk5MT3rp6hQ+/tsXaxg5ZnrO+tUmvN2AyneN9rDjWzoFvaUuLRaOzPliPlssCpBdN\n",
+       "O49QsUjKuhYlBGU5RwiJMbEniLXtIjqLFabLDodCgnct3ll0ov4WdRk4nND4oAmuTYm+aEblNE21\n",
+       "yBXZpo495rXGZJpgPU1TIYVGKUEbapyM7+0MriGTjtY3IHzqLgo69c33IUZxCBAq9ZpBUbuIortK\n",
+       "WOfjS2Kk1CnZGfNwIQRMEAgZOxLGVhwB6Vx8eXLi6YWMbyVSRifk3vm6lHuT3WaxbNfxw+yVNrNa\n",
+       "ILiVxd39rLOXo0u/cIhf9fMQwuL1RKuOpHM0qxHAi074q5Dvi05xdWPoHODLHNWL537RWb547Jc5\n",
+       "39XrWT3+i/0WVhOwf5YoY5mQeVGmef5azucsxLlS69VNZPW6VqvjVq9ldSNb/Lx7ozgSraKW3WSK\n",
+       "XBpsa78ylFxfX4+hZ+vQRpHlJiajSDJOZVE69jQRUnJ5sMelS7sopbBNSzUvqcuSqqxAB4zKGAxG\n",
+       "TKdzdna2WN8YMpmMmc3n7Gxv8Natt5lOS+4/fYhtPPVkzru3brExWuNb3/wWbdOii3Vufe0D8rUe\n",
+       "Tw+ecfXNTa6+cYvf/b3f5e69Q2Zzx+HRCRsbBeu71wlG8nz/AaPNAnc2Y2+4Td9k2KZFAZbA2++9\n",
+       "y6Pnh5RlRTmZUQhF3i/YHOYYLMKBkTlG5zjjaV1Ae8fZs6f8o7//97jzxRdRpz6b0zaAUhA0wrdo\n",
+       "aynyjMYZghrg2hrl3Fe+DSYHqrrFK09W9Ajtsv1yXcdmX+mdw7ExlIgFbd7G3uGxWVZsL+CdjQ4u\n",
+       "vSokvZsHgUWSeo8nBUhZzZaFRiHmvbSSaOWZzUu0NEgREW9dW7xwmDxGAv8vc+8dY1uS3/d9qk66\n",
+       "sXN4r1+eeTM7aWdmd2Y2cZdLchMpkivLf8iQLFsCJMiGAmQZBiQYsAzLVrAJC0q2JVAQTEqyTZES\n",
+       "STGI3KWWK23eHc5OTi/NvNTdr+PN94Sq8h91zu26p8/tNxQlDOuh3733hMr1rd/vV7+glMKoBJUl\n",
+       "hLUIjbb2F2kGGYBB5uIQIcQkZKAW1lI3CAKrwaIKM/scPyaHbZpQ+vncNyhpuQFyat06qMrXOhop\n",
+       "rDFfmiWT8wL33MESVt7sg+Q8ve8UeFlUANOL3aWIy9R68awLYuU8XYBy77lg424ixfWy/maRl/vb\n",
+       "fa+oSxUlDRAVPrSdv/eSJpouDki67Zi1uRR1rMqvDLhlrRt3YyjyKe67YF/uH3cTgWpuZla7Umty\n",
+       "aRVHlZqwqkYdBZit7h+VT3aJlxtNqXyRa0zuZlajhWXrdV4/YwwEHvPzbYKlRQaDATIKSJOUpaVV\n",
+       "Hrr8EOPxiMC3KpLD8RApPGpRk/3DLucvL9PtDRh2xoSizs13NnnsmY/QHyZ0hgO+9PWXiFo1tCdY\n",
+       "XV9ke/sGvU5GGC4hhM/C3AL97j4CQT1ocuHcRe7t3OH08jKLjRbSwHA8QgQ+MgrYOewgo5BRkjDs\n",
+       "j6iHEWEE840avjYEwkNqK7Ndqoc0Gi2GWcrTT36QbNhjrhbiRw26211u3N4hyTSXzq5x/uIpxoN9\n",
+       "PD/k+u199kYJUinWWwLq9co+97SX+4IXDAcD5vLgFp7v4QcBqcpIktQGzXbOoqQorKEBT05ENMqk\n",
+       "VrlAMPkTwnp1NUqTaWU3Agm+J8nSjEzFOeUsGSVj/FqIURqlY6uvH3mkSk08RHo55RsG0lp7A0Us\n",
+       "1+JMxioQ5Fynto6ntBSWWzE28LnK8s/c4tqKjiyWWZVDmVun5mbzucqlkMV6sYeUfuAjPZEfdR3F\n",
+       "1kyyFG0EWue6+veBiffRkGea7S4inRTAWDSoLAM6CbjKIFb126UoTwKWcj1dYHevuRSxC/KzgHXW\n",
+       "RlUur+q3C45FqnL4XgBelcil3F9VXELVGUKZEi+3w90AyxzSrLKnvhf3zYSbRgpJMs6sPvAMfVhr\n",
+       "tesjDCR55JrCylb6HlrZPgvDiDAKSdJ0ImNsNpqkcYwnJPPz82jfim2SYUyj0WDRzJNmQ4Y9Ta0+\n",
+       "h1YQhnWajSZRI8EISUpASoMHn9Ps91J++h/9LIPBGJMakpuHoDVv8Dae7zEeZsy3enQPBzz3zHOY\n",
+       "pk+WjpHMcXBwD3/OMB8KaqFPphTDLEEpz4oI5JhUa4bjlJWVFcbxEE9mmCQFYZ1vaW0IfJ8wCJir\n",
+       "h6hQ8NQHHqOXjlhemCdWPkr1WD91DhlEnNtYYXWtTtzzubO5zb3dAwamTSgkq5fOEi3OV/Z5vdZG\n",
+       "hxla2MAfF1dW6fV6HHY7pFmKMdbthecHuaaINcjxAj/3xy4xntVKEcIQCg+RU+BKCIzwQUpUFk8i\n",
+       "HSldmLIbvMCbWIlalUKfJLcDMUKTqRidaetAzAOjPetpUWUEfq4Z5XloxORQ0crqM2spYo78JmXG\n",
+       "BiKeKATkBjuF/D1RKULnYsQ4m0SwB1BaToJ6CymPDHQUIGQO6po0zaw4x/etuMY78q8jZsz7Ir2v\n",
+       "AF4ANdjKl0UdVfLXsiihuOZSgO79KtCYJYeFI6p0VnLfKT9bdl7jchZuRGs3ryoqeZYoo+q6u8Ed\n",
+       "OxytEMVUlXM/oL9f3SYULUyNpzsW5XE5VoeJ+4gjlU2Mdf3KCcpUYZTLabUpMpgYTWmdIZQ3MZhI\n",
+       "0xEFV2uMIR1n1kxb5CbZwlL+9UYdMEitCaMmSwvtnCLSebRwCSpGGTBBjau3tvjy17/H61ffYTjO\n",
+       "aNaa9Pr7eJkmFB7+uIHvSdpCM9rfo+37PP/NL7O8sor0PZZWl+mnmrmVswy2b+DphGbkUw8DlCcY\n",
+       "pskkRFdmFCbTRLWIUPhII4jCiMD41L06GMny3CKthQUWz6xyuNch9jTG8+in8Pa16/SykDhT3L3Z\n",
+       "oP3pZ2gGAa+//jZbuyNMXZAMR1yNFHOnTlX2uR/W2d28w5s3b6KBO60Wa2trrK6vYYRgd/+A69eu\n",
+       "Mjc3ZzfC+QXSJCFJNY1aSJLmaq/SQwqNVBqMYhzHiCBEiYyo3iBTY+IkzilRTRRad61RFBFEvj2o\n",
+       "zFLCwEdmGUIYG2UqCKwriixD+D5GGdaWlphvtOju75MNB3ieIM61WKzfFEPgeda0PTvyZyS0ttal\n",
+       "wvq3GWUjgtyWwYZ1s5F8pJR4UTDRlrPYpUBCpjS1oJb7ZLJy7nE8yOerQeXReIhzJS1hA4r/vj7E\n",
+       "LAPXLGq0LC91Qdq9Xz6orKLSXfApDgfcDaE4NK1KJ8mHZ9Xfpdwn5vyOSKeIFlTV5irKtQpwy/Wt\n",
+       "2rjKbS7fq+JyZo2HW5+ymMl9ZlaqclUABWDbbwaDcOSvJ+YnCg2bnP02JtcwsJ9FNCW3TFe90xhj\n",
+       "Q74JQTK2Pj4KWagUYExGpzuiVqsRBBHI0Drxb59FK8Nvfem3+M53XmD/oENy0EPFCVsjq2s9Tsas\n",
+       "Xt4QZZoAACAASURBVLzAeuM8t955l3HaJWxH9OIOC2fm6Aw7nD37AJ3RmNbyGqPRGB0EGM+QYPCM\n",
+       "sQezqcJDYMMh5P5LjI23KIwkTlLqzQZgf6+srFFvNzEK0lQz0GNSAWF7gWef+TAimmcYJ8w17eaY\n",
+       "acMwTqxIw2gevHSeRx97gJfferu6032fO7c3Cf0aRhj6vREPP7zM4UGPN956k/3DLqdPn+bM6fMc\n",
+       "7B3w+itvMRqOqEUh+/v3CGoNPvmZz3Dl2nX2drdpBQEXz21wam2NQTzm9s4Oc4uChXadTqdDo9Fk\n",
+       "PBxRy0MpCiHYP7ReGz3PQwa+lWVjfawMhj2yTONHNYaDEfEopVVrcv70BhfOnuHfffc7gKFWsyqC\n",
+       "CMGgP8LzfYxnDQgLjs/LtbyyLENpRRQEqDTJ1RvDIyOhICATVoNF5NS79KxWlhdacZ7xQOnMBiX3\n",
+       "fEf8eyQG9sMg9wJpTwTS9PfpIeYs8Ub5kK58H0pGMbPYco6OBKoAZpa/41lU66y6lMFh+rBvcmfq\n",
+       "+rR2xnT5BXVYBY7l32555U2rCoCDoBju44BYls2/F/FOUZ8qEQy4OvVMZHkup3SUl0FrMflePsw5\n",
+       "KSmVW7GRq4QJJvJGIQBdsYEXbZ30hgABkQ0vAUbmsloFwqMeNBHCY5QohFAcHA741veusnnrLq9+\n",
+       "53dQ+138UcqzZ87RbjcYm5iRzBj7iqAd0Vg8w4effJivffu3eHf7Gs35kMP+PWpejRs3rvPgmQeo\n",
+       "GUk7atKLmmzde4eNtSWQHlmc4AcB2hh8X1pvhjJAeoYkSWxQDXseiRAQegHX373KqY2zyEadaHmB\n",
+       "QAc2ilAQknYO6eztI6VkfeEcB7u7jEZ9VtdWyUwXjWZ/+xav0mN5udoH+6/9xq9TazRZXFplc3OT\n",
+       "tVNrrK2t8a9/4zdI0gxPemzd3cYXITtb9/Cl5OzaBjpTBL5Hdzji+o13GY5HGGM3zqtvvs3lCxf5\n",
+       "/ksvc3d3lwsPgU7qLC0tEccxtXqbUazY3d2lVqsThhFJlrK0sMhoPGYuatLvH6JUShQGgEIpgxQB\n",
+       "d+/eZKk9z7e+9R021ldoNuvs7e2DkIzjlCCsobTGCNDKinK0p/GExJOKOE7xPSsa8YMAncu/LV/o\n",
+       "59GeBNLjKFi1J0FoDIV64dGaDqOALE4mhn7W1ZNVM8wy6+WwUEH0ZoQSLNLvCcCFEO8AXUABqTHm\n",
+       "I0KIJeDngAvAO8AfNsYcVrx77NNlxavY+PuBq3uwmX85BnLFZ3FqXqVaV87/mGk9s52tz8rHFblM\n",
+       "H4hWA1T5oHMWVVwlwilfL5JS1cBYtTGeBOAnbWDuofJ0PafrWM7fw6dwzwky1wt2yp3hPXJSZ8NE\n",
+       "9l3SPM83DzPZHCeiq1IdZCH79PJI5iZnaX2PJFXUWnP8lb/y1zl//kGGco7Xvv8iK1GDh5+6RDAc\n",
+       "Ex92ePLiRZRQXL17k/1Rlywbs9AydO/t8MnHn+Ar+3foHnSYb8+hE4FUkv3NbZ576kPcvHaNs2c2\n",
+       "6PbvkSljnUsJDx1nyDAgkBJpfALPRjeS0sp8hSfAU8ggQOmEO7fuctDrYOp1fuALX2BpfRUlDcKT\n",
+       "nNnYwGjNaDRiHI8JF5qk7Tq+73Pm9FniQYzxBETS6nZWpJXVRbrdPp3tbc4uLdNqNdjcvIMQBqVS\n",
+       "BNaIZ9DrMR4OWV1aYb45T6dzyHg0ZH//AK/VJMPQqNeYi0IWwoCN9TV6h4csLcxz9/Yt/DNnWV5a\n",
+       "4/y5DQajMYPhkKVlj9t37rKxcQZBQKY9PvjUszywsszuziZb27e4t3uPyPMJG3O89sZVdnYPaTwz\n",
+       "x6XzZ9nf3QYpSNOUWr2BkBlxHBPWImq1Or1eF50ppAe1Ws1GJ8KqEY7HMXGc2wTkroKjIMAYiOOY\n",
+       "KAitOWGu3FCc7YRRbaJyG8dx7kvJILzcH4wo/KaQ6757KGVIsxirbTM7/V4pcAP8kDFm37n2l4Ev\n",
+       "G2P+NyHEX8p//+Wql6vEA/8+qcinbCRSLNSqzWJqIVMtW6+iul0AOwn0q9pT3mDcDauqTe5nOd9Z\n",
+       "5c4Sa7j3yu10ZfnlzfCkVG5/AeBljZ2qepTrYKPUixzErRy8OGw0wvWvXE6uSGZ68zEGpCicnE1q\n",
+       "nYux0uN9io1nqHWu52uVghkOR7TmFsi05E//13+Gr3/ju3zz536Fs6dOc35tEa2H6Br80Bc/ixqM\n",
+       "ifD4A5/5ArtbO9x59xbD2yOMjBgOhjz6zOf57pVXePHGNUyjRTeJCaKIX//OV/j4xz/Cwd4WG6fP\n",
+       "MOrs4wsfP6qTmFHupljTrEXYUG3aBn3A4IWSYTwkiYfUvRpnzq3z2BNPc/PePo12EzxJpjNMkiB1\n",
+       "RiQMzVaIaoWMjUGGEadOnSKUHpEQHPQ6HKqUVtCo7PGoHnC2sc5CY57xaIzxBP1Bj4uXLrC7t0+/\n",
+       "P6Req1GrRcy12tap2vY9Op1DtJ+xvLKIEIZGvU42HBA1a8y1W6TjMZfOn2Wr22WxvkC32+ell17h\n",
+       "7LnzjJOY1dV11k+d4eCwz/dfeoXx2Hri7PbHNJ76IM1mjdXlVa5cu0KcKuptw6uvvg5GcOPGO9T9\n",
+       "S8TxmEcefhylDf3BgHqtQZykHB526PX6NBp12nNtayykUjyTa5MYw/Lycn7oqanX6mRZRppkJGlK\n",
+       "rVYjiYd5IHTr1VEUFIC0XhujWt0G3PZ80lADHlpnk/Xn+z6pShHSxtjVWlgu8IT0H0KEUl5ZXwQ+\n",
+       "nX//GeCrzADwMohCBWWeE6ki/+c8aFkXYa8abSY1OekwbxZVX6b8p4FgGtRdirWKUq5qZ1Ubq35X\n",
+       "abCcRBGX8yieLR+wniTfr7peJZt3yyr3Q1Wdq0RAbn2mDjn1EXBbIbVHPrxYaJ2RJBijc9/KR/6s\n",
+       "rRzSGl9Qsla1HuvcOlgKKI56mNTDzyICFeATooWmVgsYp4rX3nqDv/v3/gE7e11+7IlP8eADD1Jv\n",
+       "N7i1vcnK6VMMV1boyH0+ePEy33z1ZYKDDi2lefhcg+bCHAeDASEBj/hN9DDja4NN1HwIo5hFHfLi\n",
+       "y68jSXnu0UdJBiOa9QbJ4IBxMkBEIamBmueDslo+Yb1OIH0gINGKtUuX2O92+dQXfoyVU6c5J33w\n",
+       "fJB5FKDcaZLRhYsD6x1Sa4OuaVSmkAja3gJBHB/5Qy6lTz77LJ7nU49sVKhUZwyHI5RWXDq7ysH+\n",
+       "gbXSlB5pssR4NKbb7SIjjztbY86un2d5bZUrV68SBj4H3Q5BFPLClbfRjTob83MsLS/nvlU0wvNZ\n",
+       "a6ySKcPm9ibD4ch60ySAGG68cZ27bz3P0tIyly4/SKx9dvY7jDcPQCukMOwf3mOvM8fNd2/w3GMf\n",
+       "ZM747B0MORiNuXrrLsM0ZWF5lblmRquuWF1e5tT6BbxWh/W1DTZvbjHsxqSjlIW5eR5/9DHOnt1A\n",
+       "Gc3vvPAiL734EtIMrc9/37cHnQiMFiwvn2Z+YQ2VSXbudVhcXEFGinE8wvMEO/fu2jOTcR8z6CKN\n",
+       "wg8kBJAmM/wZ5Ok/BAX+W0IIBfxDY8xPA+vGmO38/jawXvWi57AZxcKu0gCpotKrAAOsU5rKSlZQ\n",
+       "zS5AVclyy3mXr1VR81X3ivJPirAxi/s4iZo+KRW7+UkbUbmc8veTOICpw0chJs6NZrW/SK7YqQz0\n",
+       "1rfYbC5sVt29XNyBN123InZkAd6uZaybn33efvoysk6QPIlQGkWK0dbf853NLf7+3/k/GA81Tz3y\n",
+       "JGfPn6PT7TBKR6gkoV6rcefOXc6dPcfGxYv44zHe3h70etzr76FGI8JGg5V6g0dPf5Afahpe+tIv\n",
+       "0xnHmMzH9yMCLdBCMooTWu221U3PMlrtJkYKMiEYjUZ4QhD5AWmmiRo+0guI45gHP/AIn3/kUQZJ\n",
+       "wiBLrQVkHhrQqvIdEUw2YIpXbS2bk0qzNvGlxcUjQsEYIi+k1WxM+vjcmQ3SNJ0ai/F4TBwnjAZW\n",
+       "3zuMQpr1kMFwyGG3S6vdRGnF9tYWg8GAxaVF1tdXWV8/lfvKTmhEDeLxmMO9HRZaDYL5EE/4jMdj\n",
+       "PNkgU5Kd7S79bsa77+5Yl7TUGA16bN/ZY6G9SLu1ws7WNttbW7z6yssMtUDUm3jSZ9QfsLq8wtz8\n",
+       "PI88+iiL7TbBch+hPG4pxRtvvcVCY57OQYeV5WWCMKBWr9FotFhbP80oPaDX61Fvt1mYXyBNrTdR\n",
+       "FQb00oQ7t7a5ePEhbty4xa3Nm3z4mQ8TD0esnL6IEJqWTgkCASZje/suc+0m3V61D6Ai/V4B/AeM\n",
+       "MZtCiFXgy0KIN92bxhgjjhx2T6Wf+ac/P5kwTz/5OE8/9XilhZ7rj6EMku5hn6W4quVFZbAQQhyX\n",
+       "l3Ncvuted8ssf5YBrVxXOFKrKwNY1SHg7ybNovhdrZf7bQKz/K24bSj3tQuAZVHQSd/LG0C5DVX9\n",
+       "VzUmRSqceJXrWSWecjmTKi5Kj61JuicM0ldAhhaCeq3F3/wbP4WnAj73A58E7TEY9RgN+9y7vsXq\n",
+       "8grXX36FMxcusv3OO/zCa2+wGIU8uL7Kgw9fYhSc5bDfRWWKpLlAs7XAf/bkH+Kdrdv81ne/zdDT\n",
+       "DGSCSD2CwFod+r6PUIIoqmHIrOFL7hPE833bbgTjTFGvS8J6g/nFRYTnUavX7AFoZij0iL38gPdo\n",
+       "c9O5KElN9QUc6UDPSqdc9UJh/Vprra2/klzTqVDlK+Z7FEVW7W4OEII4SZi7/ADCsyHIDFaP//Kl\n",
+       "c2RphhFYs3hPWu0cbRjFY9L+kPOnV/A9n163y9LiPE8/+QlqtYioVuflV15n++4e48EIgWA4GtJu\n",
+       "thn2Rmzf3eGhBx9g9fQ6/STm4eGQzAvojWJSBPd29tnZvMO9OzeJ+x12d3e4+Og8C3NL7G13iHzP\n",
+       "ujmYX+R73/su3/jG1zh3/jy1WoPr16+zcHqRuZV1uoMB927eYTgcceHCRcZG0e3tY+pQWwpY1Yv0\n",
+       "zYCwVUeEHr/4r38dYwyf/sEf5KUXv4/QGdt37zA/P89oNJw5DvB7BHBjzGb+uSOE+EXgI8C2EOKU\n",
+       "MWZLCHEauFf17p/8E39kJpVXJCGs5db9KLsi3c/stAqQpu9Xq7kV1OysPN4L6LqLpgCxQn+9qo6z\n",
+       "QLCqTVXXymKbk9Ksfi0Da9k6swyIVeNX1P+kNpQ3yPK1orxZqcpKtni3sPiF4/5iynWLsnmETDBy\n",
+       "hBYjUpOiTECWJPyPf+V/4Td/+cvUdZ1GELG1/w7bm3do+AHjw13a7UW88RghPR5//BGuvfUWA0/w\n",
+       "wrvXSBuSRx95mA8+9gSvv/kG17a22Dhs8Mc/9mme/+3fYlyPSMOQ5UabO3fvsDlX46nLF+htD0nT\n",
+       "xPqvNh7jLMMPIoQXWtEFhjSJ8UyTP/cX/xtGSUqcpUjfByPwPAG5cTp5kBC3b8tzzO0LdQLHOBgN\n",
+       "pw7kC8o7DEPq9foUgeTOGa01yXCM0ookTqzpfZrkB3yWY2jXAmrzLYLcgCdVGXFs9bWNgfF4jCXr\n",
+       "BDozVgNJDdAqYfPuHd65/grjcZe15UZ+sBig9ZDTp+bpdTZptx5k52CfN6+8xWicsLC8RlSvce7c\n",
+       "Rbbu3ePSgw/Q7/fwPMnh+gKJvodJE+bqdYYMeP673+bDT32I3Z0dBsM+aZby2ONP0m63afktvvSr\n",
+       "X+bHf+InmN+Yp9FoWI7Jk2z2NpmrhfzOt7/K448/xsUL69y9e50rb1/hYx/9MPV6EykD5heWCMOI\n",
+       "S5efoNlsIaTk137xX8yc+//eAC6EaACeMaYnhGgCnwf+J+BfAX8c+F/zz1+qer9q53fyBnK5rfSO\n",
+       "AUlFXez1+4giiu/uNfev/Ows8YGbbzkf9145lU3LXQrFzdvdMI61saJtVanK0vS9bDSz3nFBumw0\n",
+       "Va7fSX1dfr74K9j18jvF5yx2vjCQmtWGrMKbotsWt14+AUJotCetIyzpo02NwFvgrTdep+63SfY7\n",
+       "9Lt3Gahdmj7MN0LS4Zhk0GHzzrvIZouX33qNZqvFvf17tGoh436Prdeu885LV/ihH/1hzHgEdzo0\n",
+       "On3+2p/48/zpf/y3SPw6wyCi3rCimMXIo+1r5lttRuM+Xi0iHgwxniTLQ3rVGzVqzTrNxUUOej2i\n",
+       "RhO0si5JsdaPkwhW1kh8qh+MMVNz0u2nKkveItVqtYl8GpgiTNI0nXB/5fGWUuIH1mtiq1UnTa18\n",
+       "t/Bu6fpesT7vbX5JkGDjjyra9Rqj0YgwDBmPxzQbTXq9Pkl2wMJCwGd+5DmUMvR6ffqDAXt7e4xH\n",
+       "Q8bjIUG4xMK8j/HhwoMXSZOM4TBmZ2ePr/zmr3HqzBk2b10nM4oHLz9ArVHn0pnLDPsxh1mfS+fP\n",
+       "ceH0OdI0YTDo0et3WF9bs0GIleL221e5uLbBWy++SppkLCwsEgQ++we7LC7PM7/Q4vFLF1mfa+Lp\n",
+       "DhuXN3jq4fP0e0Pm5hdRGq682mNhoU2vu8edd67/RzXkWQd+MR8kH/hnxpgvCSGeB/65EOJPkqsR\n",
+       "Vr3saozMEl3ANIXkAkQZDIGJE6NyqtIwqTq4K5flLvRZk7kMZMXELXe8C3xlCrkM/JWcyIz+mTXA\n",
+       "99OqKedfvl+1eVW9X2WQVR4v97yjqh8LIHHB+72KllwAP4kbKbeliqL3PIPxDUZoUiNITYD0Wnzr\n",
+       "m69iTJ3Ir7NzcJ2st4sXZdQCH5kleCiSdMj50w8zFD5b71zn4x94iLlGg3OnTvGt//eXWX7gQX7n\n",
+       "V77Ey89/h49+4lmeXNzghddf5dmPfZw/8Nwn+JXrrzJSMa35ObZuvctoHLOxsYpWQ/67v/yXWNs4\n",
+       "w63NTW68c5MbV65a7ZZ4iFev89Szz7Kwsspht4P0fOtn3RTnSspapEpvEtzX7UtrHThbq6oqFeNV\n",
+       "9gVUNecnetH5c+NczFKvWdU63/fJkmSyHnyZr2tp322325P3oyjK5fmK4aiPlAskaUq9EaJ0g85h\n",
+       "D+EJkiSlWauBykgbDR44f44oCvF9nzDySY1Pe34eDGwEIR+4/CBaP0e338WvhcRJzEG3w3y7zvrS\n",
+       "MmIp4BvvfourV95lZXmNBx54AMQpanXrYrjbOaTVbNCq+ZxaP838/CK9/oj5hSV2d/dY7a3iB/DS\n",
+       "Sy9w5eqbJFmMl6UMByOiKKLWaNLpDmi3F/CCkNv9Q7q9AT/wA58iiiL+7xPGQvxuZa7/IZIQwvyb\n",
+       "f/1zlROm/NuXwbEFXQbJyYKUxyntgsU7CciqQNt9torSdPOsAq1yqro/i3Kv6pNZm8h7Ea+UOYhy\n",
+       "favAzBVLlDfYsrjifptbmav5923Hk8995ti9F7/z5fcknilT9pVtzjyUTIkaPqMsJagt8rWvv8Kt\n",
+       "dw5Yba9x8/WXObj5JjLrMhfZKDZR6NOPR7RWV1k6d5762in8WhNhBBvLq9x48y2e8Oe49dZbPPHM\n",
+       "k7zy7lvc2rnF4xcv86GNB/i3X/oKanmBX3vzZV7v7dKeW2T3zibn1xd54sFzPPLQRX7iJ3+cBI2W\n",
+       "HkJ4hNIn9DziLCZFI6UgU1bf3cvPBIpoUWAFDrmuzbG+vt98/cinfvzYve99/dfv+365HPLyVW4l\n",
+       "izFWECKEFe0YY1UinLmlyV1JI3LjLPLDUX3kqS+X6wcitC4TjA1CnmUZSZzYDR5jvQFifZ+nvkGl\n",
+       "mTWhT1Wu621VWIejIQqNF/gMRgOiQBB4EQKfLLE+xOfmWlx68BIIwd7+Ia+++gb9wZjMH9EbDFmY\n",
+       "W8b3Q4zxaNRb9AZ94nhEe65Js1UnHo+oJ9ZQaP/wgIND60NmMIp599Yt2vPz9Pp9G/ih1uBf/vy/\n",
+       "xBQ7cim9r6b0Lis+c/c305PEZc2qHDuV1fUKKqD4XWb/q1jHY1WYATguZVFl7FN+fhaF6NbVLa8M\n",
+       "wO4772XhlDcG13hplhFSOY+qjcot/36+Y8r5Vv12r1XV4aT2FkEkXLl2OZX9xRSO08opE2N8z6PT\n",
+       "HSFljX/1S7+KJxfwtc87V6+SjQcsr8yzv73HcCCp1QKG45h6q8nKqTWu3XmXp8+fQwnY3tzGz2Bn\n",
+       "e4ffObzG+soyr777NhsXz3L+8Qf57gvP89yPfJo/+tRf4O/81N/m3MIyvUiyM4g5vXEWnQ1ZXT/N\n",
+       "53/0x/DCAKEVRtjo5bHKrNMoYTASKzIRxaG8ACTGFUcUAQLyafRezkZmbbRFH5ZFieVUqRqMwIZP\n",
+       "zN8ReTjB/D+L39YiEmMQvnVSZjcfENp6+xPC+hw3uXMrgyEr4pIJCLQBrJOrwkEUudooYch8q2Y9\n",
+       "BmqNyTQqsw6rtNHMqzZJlpCpjGbDhibMUo3n1VCZXTf1eo1OZx8NjEdjlpcWaLcVCR3ObpxCyoAs\n",
+       "M8RxSrtdRxhF7AlUqmnVWjSiJk0RsbOzw/zyBuceeJRhPKLb6xMLq0PuN9vUGnUOO50Tx+l9NaUv\n",
+       "g1cVK2fUyXLvKZAzx0UVLqXuyuqqNC+EEFM+Ulxqu+rZWSb973VCu4BYXhBVC6iqjJNEQVXAWbSr\n",
+       "7ELXzV+I42qWbt+dJA6pSuV+q0onyfreKzifBPIFcAthY0+W8wcwniFLFauLp3jhe6/SpEY9avLa\n",
+       "22/SPzykGRqiRsDS6hpbtw/wpDVxv3DxEvv9Pj6S3e17PPTYU9zbvEe92aA+N8fe3l2GnZi1U6cY\n",
+       "Zgln2mv8qT/zZ9k6PODezl1+/I/9EWS9xn/+F/8stBcJ6zX6vUNeevlV/tyf/a/oDTrWuk9K62ND\n",
+       "G6QGpMil2sfBtGzV/F5A1+lILGFc3Z/e5GA459D08bzc8o7GyDoOK8LoTew7pKDQVSui3ACYvB6Y\n",
+       "3O22lPmhtPUKqLH6/wAeaU7IG5RKiurZOeFZ9VVjLzDopta+RkqrneNJpC/wDUivTlNYD4qeZyMy\n",
+       "WW8N1lNloU+fpAlKZ2idMT/XIEkyUNayMk0V9VaTkRzjGc3iyhKdbpcwrCMy6PdH6IUmYa3FYeeQ\n",
+       "g94mRsA4HbO0sobneyhjCMKQM2fP8/OzR+r9A/CyRgZMg2+VQyd3IbueDIt3Qz+sBEd/wlYen8Bl\n",
+       "UKzSQqkSHZSv3w+g3OQCYdl8vYrKdetb/n0/4HPTLNez5ToXh1Bue8oiq/fa1qpnT6LAq66dpEPv\n",
+       "RhyvyqPgPNw8y5vPpI6eTyto85Xf/G3efOkKpB7Nxj41NcCvQ5qMUGkNKVr4yx6HoyGPXb5MtzdC\n",
+       "pRltWUP3Y9q1OqtLy4xVRm1+jl1S0tGQ3o0eWzdvsXf7HmvrZ3j4scf4yvbXObW8QDOs8yf+6H/B\n",
+       "P/v1LzEYjohqdYyBr33jGzz+1GM2fiUGI5SN6KI0RguK2Km2/Uf+Z6b7y5B7i6kUoZWTNmYS6Lcq\n",
+       "TSj6wkCq4uypPJ8n6zZ/L0dXW7s8uLAxZgLIQO6C1YZ2m8xyLfPNxSAxTHx560I92AL/JHsh7AFo\n",
+       "LkASQuKluVM5bR1UWSoejNY2zicSlWRgQPuaIIhQyuD7AZ5vXf1GQYCUAcKA79VIUwXjhCAIiOOx\n",
+       "DcgcWde1wvOp+fMoZRiPUhubNuky15DUgjbjOMYLAqS/RK8/oNVuIf2AXq9nQ9OdkN5XAIdpACnL\n",
+       "VuGIAp8YH1RQjkU+Vb62q+TbJ4HRSaKM4tr9qM9ZQPJenz1JK6csPpolCplFvVfVY1aby3UrU3Un\n",
+       "tcFNJ4FFkcqHmO67J4lQyi6Iy8nlJsrqhuWxN0LyzvVbfOPffosLq+dZXJjn2tUrDIZdllcW8X3B\n",
+       "aJRQCxuESw1WGmcYZwkizWj4EUmmyfojku6A27duI6OQYTym29lDDwY0wgbJ2DC6vc/WO5v8qb/6\n",
+       "P/ADn/0st7e3ePu1N3nqiSf50jdf4CAeIMaKC5ce5LDTJQgixunIBtLGUqtSWgMkivmYu8xFgDLK\n",
+       "aacVs0gh8cRxo51Kd8ZgnXrdZ6wmZdxnLciCcjbWWVR+42jcPEdTxs1X6MKDtlUnBtA2HLg5kgih\n",
+       "hEFQJ1Xa+iIxZop30DJ305A3KcqDfQgp0cJuBJnW1nsgEl8IRBAhEYx0jBTexJeJUhnapGSZDYhs\n",
+       "Mo0UYzAS32uQKQVBgPLACyOUsS6K2/PzqAwWpE+mDDLtHfVb7lRrNI5ZbS3ZOLC1gIVwgayCu3HT\n",
+       "+xhSrZgILnDnrJs4Csor84HPjEalR5ErJixePoE8KVAimnSKEEfPGG0mhyASMSnHPlcGBlc0Uyzw\n",
+       "ItvC3NvNQzCdhWvVmVMaFO8b5xmbr1LT4ZSK56Q8qkcZcGadA5SBbxZQTWo6AfWjNhZ1seXa7/bT\n",
+       "rpgqn1KF5d6xJKctXE/aRIwpg7SZzAGqc69s0/HkQx59vKDcLBtsD6QsqEsQkppo843f+HlOz61T\n",
+       "qzegIdG6S5MBMpbIxhyH45QFQs5/4AOszje58vy3aQcSEw/JhmP6e7tcf/55GuOE5LDLUhTQU4KF\n",
+       "epuGF9Benyftj0k6e/zDv/HX+S//+/+WZhQx3N5kezxk7fQyi9kid25f5Ymnn+BDz3wQpRUhnvWN\n",
+       "kbfB9ms+XjnlagrgEkd9bAwYZTDCoES1KuYsTm9W3wpD7oTJjpOpttWrpMCnRjiv+4RYY3quT767\n",
+       "ZVPIxAuPkgaZf5pCpGLyM7bJIgeDdfWqjSHJw6CZ4lhX2EDN+TEnSuflCEGQR9vxAm9SnhBRvl60\n",
+       "RVAhEEiSNJ3ULkkyyOX5UlqXv8bYjVEIQYB1aeDlIiE/9Jivz2EMNB38uN95xfsK4EodRbkpJqRN\n",
+       "Rws+mwqHZm9pk+98UiLy97SR6Fyv1JVzC2FlbBgLCKoIh+T7CCkmLOd0PYqKFGIKF5Bt/QqOoChv\n",
+       "UvMZYFnlhe+ozOP9o5wI1naj0xQRr8vGEVXA7P65ZU2PQbFpFuKS6XtHG5fDGXGcW2Ki3zCdXFg/\n",
+       "yRcM2HBU5aVabJAnpVkbWJH0BLNsmKoi8osXeEjpgZFkmSLwA/75P/05djZ3Obdxkf5wyNUbb7IQ\n",
+       "+XhKYOIhola3ZuytJhfXTnPtjVfYv3MH7SlCnVhvj9qzDpqCkCzLiPwQLT16nR6Ndpso9HnqfaY5\n",
+       "hwAAIABJREFUmacQnZSbh/t89Rd+iY9/5rPUDvt4kaAdBXTVmI3T6/zsP/kZ/uAX/x92d+8RBiHG\n",
+       "WABHGLRFPzuuTFOcx8REJxBxVYZYE1HiLMq6RCi45brj4uZXxV0VFIMoni1xf5N33KKPSsv/t+33\n",
+       "PWttekx0VGCBMRhpw6RNZm2RvzbH56SwVLp0xE5grVStnN09E7J+vgVHfv7DwJ+0oRDzuesl01iP\n",
+       "k3lA5DRNjwVBeS/pfT3EdHfbskx7kibAwaQzpJSTgxStFEZpUNo6gKEYWktpSQRSHOVtpDnyG+0M\n",
+       "Wjk8miumKRs7uGqMLkAW16om9awBqaKmy5S0NUY5rrJ3kuy9vBnNEkPMImBn1cUTx9syq22uPPN+\n",
+       "2ipSklOUVRv6/dNMMYsBKyfWWIcrBrQ9eIvjmDCsk8aK2zffZefOLssbZ+jEIwIN/iBlmPVZXKiT\n",
+       "pSnJYZePPf1JGmtnuf3qK2y9/RreoG8DJwcetTBCa+iNu6ytX2T/3oCl5Tnm1pft3E0y9u5ucXUQ\n",
+       "86Hzj3CmPkcmAnZef4MlP+IX/79f4HBxgZu9Qx566DzPPvMhxvGIZrNOluViIOFhhPXzIoWcmLzP\n",
+       "AsnytfJzs9wg3G+8TgoC4oqt3DxniTmLe+7nrFQlGizKLvIvcMK9X5bJV82XKo6huF6OBVuFHVWi\n",
+       "Xbe+03Yg1nK1uB/m0YbKdblfet8AvHywdARUYsrwQ0o5Ae+iY4uYi5POzGVo8cSdY4AU0oY9Mtbx\n",
+       "vR/4Ez++Nrr0ccu/oi4u6LplwpHVmFKKWq02NegwQ6boDHYVoBbvF06hXHn/0X1dECxTaRaYFi5o\n",
+       "7rej2z4+vmiqxC9CCDD62CSeOdHMtLl6Oc9yPU6Sdc9K92MxPV9iUTx/TtivRsNcu03nsMf62ml+\n",
+       "9Vd+g3ZzgfMPPUymDd/45V+jGaeEvsdhr0uzVidMFUn3kGh5hV7vHmnaJ82GJEbTrjVRKiX0ApJB\n",
+       "j8AYxr0e49GIqF5j9dwFxnc22drZxIwTvnn3gLA1x5n1Z3n2Cz+M7Mf8ws//PKQpKhlz9/ZNfuZn\n",
+       "f5qde3cJQquVYPlxkQupXY7wONFRNeeqUtlS9b1QgO5arXIHUQWy7r3y/WJtFfkUxlknuTS+X/sK\n",
+       "g58yWBebS3m+HXG65th8nXAkpTaUN60jMeh0+1w116KtSh1tni7x59brvbh0fh9FKMflupWR5rWZ\n",
+       "nHpP/UmZE1f5BDLge0cn7VpnE5AOghDIwyLlEaONduXp1QY1Rb2Kz/JEKazYXM2ZKkCdtdsXO7er\n",
+       "2gd2QqdpOtNKrjzximQlDo6UsLRYqia65VyrZNLHKXgAydHEvR/Yzpr0Ve8pleXPlO/87iiyY28L\n",
+       "2xcT5ttYPxq+7zMcxMy3F3nx+ZfYv3fAU49/lDGG/YMDlleWkbv7aD0gjRUDNUKNFFfefov6aMDC\n",
+       "SotxssC+6qK1YqQzWvUmWWIQqSLuDqjJgOFej1pznquvvsGz5y/x7Cc+hsbgjxRhvcnOYo3Xe7uk\n",
+       "3R6f/kNf5Mr2Fq//5q9Sq2mSeEQQeiRpjMz9nxiTcxCFKFEfH9fy2Ln9dGw8K+b1/UA8CIJK7an3\n",
+       "Oi5VADdr/ZVTua6z6g7VHk+L8socieurqMhXSjml4VRFhEzP62njt+LZMrEohHCiYx3Ve1Z4xJPS\n",
+       "+wrgLngVyQVEAE/61hzYFLElcx8PUtpDCCFQWAbZN4rCYMkTkjAMc5DOD0tFzkgLkQe6PSrTndhl\n",
+       "0CtMft0Yli7L406CMshW7ebF90Js5N6btDunBmZN6nIdi02tzMbdj6oVAmYYeU2VVd7Q3HbNUuHL\n",
+       "HEqnnF91PapA/uRJfD/ZehFzsJxPHKc0621Uanjhey+ysrjKuzvbzM3Pc/udm0ilWF5dRsU+XuKx\n",
+       "s7eLkD71VkirWSMQ0GrU2YpThFH4Gjwvw3ghB70u7TShtrBMrT3HwoU1/Myws3OI2N7j4cceZiFo\n",
+       "Eo9TzFKbyx94hNFhl91vvsHu1hZpMuYv/Pm/xKB3iBd6SM+zJueKfN7mQJArU5fnSVn8VT5/OYl6\n",
+       "PYlLKlKWZZP5qYrwYiWAm7UxuHV1530ZPN9rKpfnrt+T1l1503HFokUdq7DJnffl5wvjqTKeTHy9\n",
+       "5JKD4nohDnbX0f18n5TT+yoDL6uAuR1XPFMEEzWmoJjzs2KlrF6sBOkHJFkGQqK1IgxD0jjFN4U2\n",
+       "S35YmgN4UbbgOHtXrmPxV7beKzrfrW9VKh90VgGvu/sbYyYe9Kqoh/JCcb3Cufrb7kFwEY8PjruO\n",
+       "tZzPbHewRTpaYEX+xWIrjimqFp2NpFNok1jNoOMb1jQLOb35ldU9y0mIk9nMsjdjYwwID09CmmY8\n",
+       "/+0X2Nvd44lHn+Qg8unf2yfrdGgEPj01ZnFuHvqC9bMNhhKaq6fY3dtjPpCcWllhpz7P4PCAXpzg\n",
+       "BQ38KGDl3DnOPvYBRHuBu4eH+CureMt7DPe67HQ6bH7z25xZWOW5xz9M3WshDxJWo3keO/Mg/+JX\n",
+       "fonPf+4zPPvMs3T7e4AlFgwCX0iE8dBGgTBWG4RqMC7PNyGOu5QoOJ7ikn2/ANfZQBI4gaLdgzc4\n",
+       "vsEfH49qcUfVO/ebk+VUpn6rABiOPIO675WtuI2xYqIgCKbyKNaBC9zFBuZi71H/W2Kh6Fsvj61Z\n",
+       "1M/Nt6ov7tfu9w3Ai4E/abcrnhNKoI1CiNxQQYDWtrFvX7/B5YcfIag3GA8OqUc1PD9A4KGShEat\n",
+       "ThLHVv1KCLyCHVLKHmbClOy9XH75upsKGVvx6VIQ5Y4vJsKsCexSBWXquxhsV/e7mECuu1R3YpX7\n",
+       "tdgMC8B3KYgy11A1HpPFr6epqOJeVbsKXeJyv7j9c7QxTPv0fq/JzbuqDp7wcidnFqyEsLrURkg8\n",
+       "JN9//gVMqunsH9K+fIadvR1WGnUykzHSmt1eh8gIMt/nwac/yJlLF3n5699jtL/DYX9Ic3GFODX4\n",
+       "aMJmm4XlJWLfYzQe01z0GSUZg3FC5nsMjcZTBoXmdmeP5c3bPHPuHEtRi348ZG59jR/53Gd5+oc/\n",
+       "SlaE6VIpSimiWsNySoY8YrpC5m5Vq4CrTH3avplm8afGaopYMJxECJY5TleW646zm7f7vTxPXc+k\n",
+       "7jhWEUaz5oZbZplSLlP2VRbZVcRSOS/Xe2hV3abPrKo3tfsldy3NcnnhpvfdkGfWQBcpyyxwF0FE\n",
+       "pRRIz7Mx6xB853vf4//8R/+YT/3gp/ncj3wakyqUkZBpamGNOEltp8h8oolC19yfiDBc8KzyqFYG\n",
+       "NBdo3XpXAVMV5e3+LjYId7JVnXgniXXM40mPQoOmAGWRG3BYYDrurKlqsbl1KFtolhfDBHgBP9eL\n",
+       "LYN41eSU3rSxkZtXVf2q+nhW3kWqWozT93Pd3QljIsBYz3Zvv3aFYX/Axvo5DvcOuLZ1HQ4HLId1\n",
+       "/NDD9xscxockytBeWqW9fIreMGXj/EVWPvo0m3fvcvrRh3n1+e8jk4xs2CPwfITSjHf3WF07QzTO\n",
+       "qGWGOAzpBT6hFOjQkPiCt/fvMP/uVeT1ZZYeuYh4aJ3z3UdYXFxGSkOaQa3eIAgD+oOxrb7tEMAg\n",
+       "5FHAhvL4ueN4RFkflzO7/X10z66zWf1+EuHlluvWpVwvl8ssj3XVWnHLrEpl8HUJhTJR42p/HOdI\n",
+       "pvGoTJwV7a2yaq4ihtwy3st8djezKjwqp/dVhFKWW5U7AIqDCEBYxXdj7ILMtCaoNwiiGk8+9TR+\n",
+       "FPGPf+afcubMBj/6mc+xONdGpakDgnmHYK27PI4s/zzPm1CnVVR4sRO6O6nWeuJTo2qAyru7S/mW\n",
+       "WbDi0y2vDODWiCbXXcYBQ6YXSnGtAMmiHoUjK7c+7qQs6n2/xVlWmZo1maGQ0U7LKas4FftbTOKa\n",
+       "GvsSiEKTfLYc3+VAqpLRGopysByEQKOV4rd/+6ssLy6zv7vH0sIyg6s3CQ0cej71Rh3jBzTrTQZa\n",
+       "sHbpAeKxDXXVqtdpGkXmS05duMC1q+9wcPs2gdYMuh2iKESPBrR9j9OLc9SDEFGv0Q0lDIf0R0M6\n",
+       "vmGofL7z2wfgCR5uB8j5BsqHU6c3OOjsoIR1tKR0ShhG9hBHmNywXKK1QZvj4FW1jqrEV+5GWh77\n",
+       "Ki5yMq4Ol+bOicr+r6D0CyKlTCGXlQruZ61dzrf8OQv0Z20E5TLKYO6+V5aju/UtlzWLKKxKbuSu\n",
+       "Kg2fY8+fePc/YkpzcC3AZRYLbGV3gCioDkmcpSwsLNAdjWi12ozSQ5ZWVlleWeHG9ev8rb/7d/mJ\n",
+       "z3+Bjz3zLL70MSoDY6mwKploGWBsueLYdXcgpJQTh/TuxCuzUe4mUQXw7nMuR1Ck8uIoNppiAdTr\n",
+       "daev8skrBZ4np54vNpwqqqDMDRSgXN60bIWOL7hZi8podWxBuZuH2zbfC9CWvHRzOAY05VTeKI/V\n",
+       "wdrgTaz0tDH4wuflV17h7u07PHTxIbyWT7/TZVkJxmTgCXo7uxgkLC0TbJwhmJvDJIJsqFi6eJr9\n",
+       "uzd45fsvMu/VObt+mtH2PTyd0esd0ppbJ2j4xGZMPxvRu9sjjgfs7tylvt9BR4JxHdq0aA0VyStX\n",
+       "0A+cR15Y5+Mf+SgvvvgiZ8+dttHO6xG9YZ8giPKzWGtVKlEgfYQ8zgVWUb0mP6eoAsYyoFgOtbK7\n",
+       "7VjlIFMGTfdalR64C0yFF8kyten+uW1xtcFmzYMqyr14x73vyrvdze53u4GV5335vMvt5/JmMCvd\n",
+       "j9Mop/cNwIOgOJm1B5THnVPZT19oUm1QKkNkCuEL/DAk6fQJAo+l8+u8u7fNqheQGMOHH36Upx76\n",
+       "AK++9hpXrl3lx77wBRYX5vClRChF4PsYpZBZan2Na33kB8ErJmauO1wCc1f+5Ype3FR4Myzeq6Im\n",
+       "yvExywNWJV4qfhdlu9S/++eyi9Lz8LycChdySjRTgGOxqIs2lr0xloHeKJNbtjKJBJ+3Nm+fW9/j\n",
+       "VH6Zsp9MdnX8sNJyE9bPx6z5XK5vud+ksEENijHwvYjRSPOVf/MtTp26QL/TZ6FeZ39/k2atznjY\n",
+       "QZgUz8/ItKDT3eHspbMMuoeMuyPOn11HJCP23r5GNBzzwr/9Kj/+4z/B3mIbNRCkPozHGZ3xDiZ6\n",
+       "i26SEnfHREGApyTGC6n7AWkywpgxMYK93W2GuwfIxQaNxgLbt+9y6tIZZCYhyQjqEZmAUIGvvdy3\n",
+       "h9WsKix23Y3vOLgW433Uv2XZ7/H5djLrXgal8ly9HyHjEiFlsCoICTdPV3tjVn3gOIfopvI6Ka5V\n",
+       "le+2cRaglwG8StbtEiDuBlZF5VfV5/etCEWprAQc09Rs8aeSlMS37Hjk+SDtYvcTEJ7HmUvn+fK/\n",
+       "+ypRnFJv1DnsdlGex/qpU7QXF/jpf/KzfOLjH+dHP/dZhp0uARLf85BG4UvITOGnTeT+JGxZRitr\n",
+       "5ensiF6uzlVQD4WeNlQfglQNZAEkBZiWT6JdWXVhzCOlRClNlmZTeYHlZIq+OvKNfbSwXY4jUylZ\n",
+       "luWgZ/C8gto/6u+i7DJ7bCeSmJThyqyn1SkdUDbHdWhnTcgsy6bKF8LV6T/ZKrCoQ5XoIIg80lHC\n",
+       "8vISnc6ANFFsbx+gVAAyotWIGOzfI+7tkxpJvVYDHTPQCf0kYX79NHWjuXPtCr3+kGYIO1t3STfv\n",
+       "sb4wRz8Z89orL9Bq1dja26V/2EelB/hBQC1qsLA0z9Z4hBeGRM0WWQbNhRbzPmTJkGE/JqtL6s0m\n",
+       "KqwT+jX8VCOlRygDUDEmgBTrhdDXHgaByjWyPKcP3M/yHLT9f1xsVy2OgJPEVkU6icU/TtUfiU5m\n",
+       "PV/mNqu4u7LqbhUX4BI6Vam8GVRRx+6G4yoNuOWV83c5WbfNVWH9ykScWzcXF3/filBcirt8yAC2\n",
+       "IVmWIZUkA3zPy2WZGqUVSguUkiwvLZHFCaM0pu0vsr4xT9RosKEUg3jMD//QZ3n9tVf5G7/zU/yn\n",
+       "P/mTPHz5MsNBn5rnE+eROIQnSVVqT/h965FMeB44VIQLzkkeAsoVdRRqhkW7ygMchiFw/IS8SFWD\n",
+       "OcVOIibybze5vq2P+nGS6wRQi8lQr9cdKqgQk0xTRVrbCONFOgJVJtaiBVhWhbYrt6Gcj9u24neV\n",
+       "V0F38zgp//Im4m6m3W6fRqPGzZt3aNTbRLUmL3z/qzzxwafYvnkHkSXc3ryLjyAA0JBmikZzHtnQ\n",
+       "nDl3if3+kGFnwPryMru3bnHz2hWaKiPTKX6jxp3bt1laXKTX64JWJKMxUkr6nUNW1lfw2jXkXI1G\n",
+       "tkgvSzAC1tdXGB4cIJRP2uvz0je/w4dO/wQ6U+zcusPNF17l8hMPM9KCIDWIwEP71rOeZyQmV/0U\n",
+       "pfa6/VcGt+pD3uMbqh2baqB18zlpQy5zrSeJBoq6F4RBGQDdTcidP+67RSo2ieKzqrwqnyNVG6BL\n",
+       "dBSgXxWsuZqDObrnipyqrD3dOlRZqJ+U3tdDTJiW8Zbl4cYYfAKCvE3CntjgiwBhDJ6UhEgW5+a5\n",
+       "uXWXx1bOs384QPZGjJKERqtJ4EU898zHMEbzz//lL/Lxj36UT//gp0jSBBmFCGMAjZTWFaXWdoMA\n",
+       "AdIeGiqVIcWRv5YCNAuDhuIgrdhtC3ByzY2TJJk6oHEp3pP6p5hAWukJMJcnWfmwsrjuTm7brmyK\n",
+       "CpLSw/OmfaUX77iHg1OiIDUtv3frWAYSl3pxx7RKrOKC+6TNJ7DDRer3+5P6lsVKAFG9TrfbY2l5\n",
+       "jSw1fP3ffYvdnT3qG3NcvHSRrRvXac0tMe4eoOIhsc7QGHrdAXOrawRhAz8esdCSRNqweecWYZLg\n",
+       "SUEgJfUoItHKmsxHIVoZZL1GFIXEo7EV12lNFo/xPUmSxDYAsTCgEkb7h5iR4k6asfVvIj7x8U/y\n",
+       "o5/9PH/tf/6r/NQ/+b84vNenKTy8DIYBJMIQKm1tIDBgjm+e5bGsuleeZ9Op2qrYfcelmE+KYuXO\n",
+       "eaj2SV9+3q1XFTiW61UAf/HdFdOU14FbB3duuRyJ++cSZOV+rVo37txz532ZY3UJnzIHcD+xiZve\n",
+       "dwAvy5fKjZHSAy/f8SQ2Sr0RCGlItMLLJJ/86Me5/uJrdPo9rF9en7lGRKPWIDOag8N9kizmU5/8\n",
+       "NC+8+Dx3tjf54k/+QWphiNAZgbBGEjoZE+TBHxA+wvNByNwJfUkWbMzkULCgvpVSE4tNl0L1fX8i\n",
+       "q3UpkVksnHsCfWStiQ37pI+iC7mp+F1QyGX2FY78uBTPFODvsovFJCu4jHIdpZimlMuT3i03U9Mi\n",
+       "lOLQ100TUVnJarPI634sZHGIW5Y/FotrnI6pN5qMhjHJWPGdbz/P+fOX0UoTJwmb9+5Rb7WIfEk9\n",
+       "rbN/cMAw0Yhak9byKbb3+/QGQ85vbCCTMSIeExmF0IJhv0c/GSKjAKMNZ9dPMdCHZJjcDaKif3iI\n",
+       "TBRZZqxaZaYxCrbu3uNzX/gM7165BkoybkREa8vc7uxx4aEP8vADD9GoNag1Gsg0I5CS1DNkwh5S\n",
+       "B7nqqMdsgCtzefcTiUw/N/vZYq65FpjlNFMz6QQKvKhzFXiX53M5H3eeuBxHFWVdntNV3HDx54o/\n",
+       "3LaWtXfca+XDy6r+cQmUqvv3o7yL9L7qgbtilDKrVHxmRqFyB72uu0jP95BoakgubJzlW1/9Gp87\n",
+       "dZpOt0s9jOz7aUo6HtMMQhpRyFiPefrpp9na3uJv/u9/m//ki1/kuQ8/xbCzh6czWvWINIkt8PpF\n",
+       "JA+7YATiWF3LwFU+WS9YL1fmVh6YYiDhaEEUeqrTlCj5uer0gJeNIKrqCHZyFZOxsKRzxRhunu4E\n",
+       "LC9SrfQUBVNlNTppv5lebC77WWZBi03PDb7gUiwnsepuHscoqNBDpZrQq3HlnXdZmF9iZWkZreDm\n",
+       "9esMhwOEFMw1W/gZzIc+jBLqC0t4zTn27m4y325jjOLGtStInRJFPnGSoZXi8KBLKgxhGLK6uESj\n",
+       "2aQXp8TjGL8eMej2qC2ust3rsrRxirlTp/H7Yxqex6kzF7lw/kF8GfLa7Vt84EPPcGvzLkko+eBz\n",
+       "zyCiBqNxTK3dJuv1ERp0CGMkNS0QBlQFzpap2N8tiNu+u/+z7jwpJ9das2pt36/8k8QHVdxalQbZ\n",
+       "LO6j6rCxStXPJbSmuNB8nlaJgN3+mNV/xdx2KXiX8HDbcL/0PlLgHraurvhkWnZcgJOPzGPp5Q01\n",
+       "hsQY/ChEZIbTy6uYmo8wCaiELDFIIAxCVk6t0+n3kKEkiJbojQesraxw+dGn+bVf/RUOD/b4zKc+\n",
+       "QSgMo2HfWrlpG7Xayw82rbN3gUJPgUMURXlrHEDNlN1scsrbpQaOg3L1qfZ4PJ7I04Ep9qtMlfoV\n",
+       "cvGCglJK5Sw2WNNra7lXuAbxZM4tGFWpYlhssNMTVOP5wdSpetkXxmQi6+Pm8AXQFg6RiucnboId\n",
+       "HxtlJ0BVqegXpdRENXWqLtpgMkEg4caVG5xZO824P2BpaYleZx9pFKPhmFAYAk8RC0l9aYnLTzzF\n",
+       "IMlYzDSB0XS6hyTJCJMkBKFPEISMsiTfGBNqQUCqFSgIowjf9+n2e+zvaZ77yMfobt/l9GOPYmoN\n",
+       "etfuoOOMl15+nQtnzrAyv8Tjlz/AamuZ1WfPcOPdG3zgE89x/c23CKOAzYN95j0fmYIRkPiKWmYj\n",
+       "7Bj/+EGay2HdD0yqxB82mMhsaHCtqKFaa8rVxnLHqTzHqqjUYs5U1f1+FGtRRtUhYRXl7eZ5Ehfj\n",
+       "rrmyqLJ4tpC9z6qbm1IndoH7eT+Os5zeNwCH476wy5NMCBsE1UNYdTABwggkgkxYK81QeCRaEbQa\n",
+       "7Nzbot1qk8YxUa3BcDig0+kQ1Wu0ghadww79YR+kIPVq/Minf4i333qDv//3/wF/7I/8YU6treAJ\n",
+       "GA36RLUa43hswcYPchn09A5bNXGFEFbNjmnqM03TKQ0T9z2X4nBFL7NOud2Du7KBjhtyrninyF9p\n",
+       "dSxfw7SbSxdkq8bEXYjFvWKxVlErLqC69S76owzaLrVefJ+1MIApTaByP/i+T6oT6vU6Uvtcf/sa\n",
+       "ly4+xGg4pLe/izQJq0ttzDhk1OvTNzEJknPnLtFYXKK3t8/Djz1C3RO89M2voY0irEfESUo9lOjU\n",
+       "4NcivMTQWpgjVhn97oBWEFGLIuZ9jyCKkEKysXGWGFBhxOHw/2fuzYMsy+76zs85d39rvpd7bV3V\n",
+       "1VW9V1f1pl0CWwIZiUUwgwwOzLAMWCwTJsYT4LGDiImZMbZngnHMGAwzDkAgIxAIkAwSIEC7hJaW\n",
+       "1Gt1d+1b7svb737P/HHz5jvv5suWYDzRnIiMzLzvvnvP8ju/8/19z+/8fgFZprh2+w7dXo/HHzmP\n",
+       "a0ria9c4duFBEstktb/LAyfuZntng9Dx8qw8SUYWJ8QC0iTLU4GVUp8V/VuWy7KC0/u2XITYC1lx\n",
+       "SCnvd0wLDnWYxVQGLtOokWmWQ7kd5XJY/PEyki2/T1/kCipP/265HdMWnGlK97C664p6mlVy2N+H\n",
+       "lVc9oUMxoIVZcaCzTQlZLlDKEJiZQCqxlwdP4GSS1DSYO3aEtbU1Fs4tEEYRW51dwiCiWquRotjc\n",
+       "3iUIQ6q1GrWax64/Ymu3y733nMGUZ/mlX/m/+cmfeA+uZbK8vEC/s0uzOUMU+PkRcm1Ff6VOTrMM\n",
+       "lR48SakrmWmKWleWBZIsm3BF/5RRffHcw0zHcZ3F1JjjhblbRhPl03Ll+hTtL9D0NOWgt1sf67LX\n",
+       "SZlSKzaBi3ceFo9Gpwv0TdMwDPPJZ2RkoeJzH/84rWaLF55+lhPHj/HylYtYhsKouLSrDertFrcG\n",
+       "OyzOLXHPfQ+wNRjRG/kszM/SW79NHAXML8wx2O0SRCmpH4BhMgp9KjMzzC4u0dnYwnIcvEqdmUYT\n",
+       "y7LY7uyydvEK8/fdw9ZuH8+wcBwbI0lJSBgRcWNrhSPeCRbaVdrtGWaGXQhjNm/eoVKvsjPoYVVt\n",
+       "lCExhMSREmlCmhyM4ldWOtOUwGE+85Myc7jy0GXhMESsy6j+vWnz6DD0O81q+JtYEsWzykp7Wv8U\n",
+       "ddLBVbGnNa1e+sKgy3z53sOuTbMCvt73p5VXVYFPowR04ZBSkigQKrf6i+zRmcoQpoklJMLP3eCO\n",
+       "3X0Xz33gLzlx8iRRmlKdmaHleEjDxDBM0iSjtedXLA3B0fkqi+0WW1s7hEnCo4+9hj/40J/w2IXz\n",
+       "VOt1LMclDAPiKMC1vDz3nsbJlhVaUQyZx2wpFEnZ/ahAltOQcnnXvDzgZaSr96XuTlhQOuNUaXto\n",
+       "XZoTdc+yLE/CyqTgFkq5PFbl8StKEWe9fF2fLEUdy0i/aGN5AurIbhrdVJSyJ0shN/vcvJkiYos/\n",
+       "+9M/5bGHnmRupsWws4uRxvjDHmZoM9rapO5VSV2PhaUjuF6V7eu3qdUrRIHPzWtXyeIQ07LxKjWi\n",
+       "KCPwuygzRZgGi0eWCZKYzJA4rsPm7i5JnLC8vEymFP2VNU6dOU3omDQrMyQLswTb22Sk+PGIyzde\n",
+       "4tmLX+PsyhW+c36OumGTCbhz6yrtSpXmbIMokwyJEAKsMCUoKIbs66O/shLT0ep0hZj7jB+mLMub\n",
+       "0WV6RAixv99SdgGd5laqK87DrIOylVUu03jt4ntlUDjNw2aap0kZWU97xzT6pVx0i6Vcr3L9vt7i\n",
+       "Wy5fV4ELIX4NeAewoZR6eO9aG/hd4C7gOvC9SqnO3mf/HPhhIAX+O6XUn097bhFPu0Bakx4XYwUu\n",
+       "pYGVKhKRkbC3eZBBIpI8QWgKwpTcffYMf3LzN+j7Pl6ljrRdjEoF07S5c2uVYX+AFIKaV2Gm0aRZ\n",
+       "sxGWweL997HTGzG/dJTFI8f5xKc/jldxuOeuY5CEuLYkTnLyUbcasizb9+0GXZGA2FMg+uAUnxeJ\n",
+       "GsobQOWDB4XwF26L0+iEct+NBUSR/5mhx78Iw3BisqVpnifU9ewDKF+PvKZPTP09Oj94GNIp5wPU\n",
+       "PXf0xarwkCmjL91KmVbK/LvuUimEIEpGrKysUK9UsQwT0/O4fOkGUiYYKsE2baIwZNTHoIa/AAAg\n",
+       "AElEQVSLibCQ0uDOnRU812V+do6tO9fp7exAHBFmGa3WLF6lScc02Op1aM7MIIw8XnijUqNSa3Dz\n",
+       "0jWGgyHVRgOExPEkqUgxHJuYhO3uFsONVQypyDIfK0kwMnj+U5/kypU7/PhP/FMW52ap33OWj/3h\n",
+       "h3nHd307HTNhV4VUFBhBzECmGJaJmaqpikVfIMuW2Tdipgtx+GevtOldfm55PHV+vvjMcZypezzT\n",
+       "nnuY6+1hXi/F9/X2l8Na6PeUrcRpHHrxvfK+lj4Oh/V1eSEq10N3Sf5GyjeCwH8d+L+A39Su/Rzw\n",
+       "MaXUvxVC/Oze/z8nhHgAeDfwAHAU+AshxFk1JbiwrkzKm3V6ybKUbM8DxDVMTFNBRo4QgMzKE4ou\n",
+       "OzVm7lpit9/Fsh2G/pCV1TUqVoVUGswszGMkKa6QdLq7bO/0EFISpxmG5VKp11GZ4g1veDOf/+sv\n",
+       "kyQJD9x/BmUI1CiELCOVEO+dLjRMA5Wp/CScyLlkZQhQGXryb31gCiVdoFt9kKch3EL56RseZeGY\n",
+       "JhBSFpMgP6xT3GM79l4CVxDSwLQEKJVHCylNKt3a0NGtjr50JSsYb+WWKRcdXelWhn5fGaVM4zSn\n",
+       "lUyp/SzpYo+7lYYgTWOyTGHbFV54+gXiIKHWbPD0xS9ipRG2oag16hBFmBmM0oS64+RZ5VdWOXrq\n",
+       "JPHQpreziSMltldHpBn+KESaFq0jR5DNOo35OXrBCMeqUHEqrK+uYbkmioSdzhZHjh0liiOev3OJ\n",
+       "0aZJxfCItnepphKVJkhp4WCAyOinIVt+n/e+9ze478QJvvkd38q5Jy5gBQkVx8ZSFikZmQTTYBzl\n",
+       "JU1RKRiGSaLAsE0ykUdjNwBjL+hVwqTFeBgKLETxsG4fn/IFEHvx5Mt3FYqxkIdcPgvQoy/e36iX\n",
+       "UdlS14vcs3z3vqW1YTodM83CzZ8zeTJav0+/X5fp8TvGIA4tEfh4wVD7vw+zGPRr/0UQuFLq00KI\n",
+       "k6XL3wG8Ze/v9wKfIFfi3wm8XykVA9eFEJeBJ4G/Lj9XR3r6YJZNDdvOzUkyRZYkqL2GG3sD6psp\n",
+       "AkG7F/Pm734bFz/xJe49dZqeH9Ku1KhJj53QZ31nnaZp0ag2WFpu4zZOkES5chsMBqxvbDIYDEiF\n",
+       "Yn75GCs7fT75Wx/gx9/zY9TjDraAUOZH/i3XwUiBKIEkD5IUkxELsC0Dq+QZIqXcP/JecM57fTux\n",
+       "sVkosyiKsCwr34SL4/3VHqbTGcUEGCMYPdPP+KCFflI0/944S4hOPejvKKMM/Z1lZV8W8GmeBPoi\n",
+       "of8uZKE8gYrPDzPnM5XviWDIPPqkKYnimCSNcF0bVIWXn7vKvfc8yG63iz/os2AK2PNzN1NQwsSs\n",
+       "2MzVqsT9DmY6Ih5scfHpG9y6co35+gwVt0aSJkRxTBpEhDWLxdOnOXrkBM8/9zw1zyEajhh0e6gs\n",
+       "RlgGftij23OI+wGDLMGdncVtztJq1OjEXRIlMBOoKAszS0lUxsi2OP+ax3npS1+il/ksPHyG1Zfv\n",
+       "YGRVGjMNNqNdTNuikiiyOEY4NiLJ4xPatotnuwyjgIwMVIpIU2SaooTYP1k8zfor5KP4f1IhTpY0\n",
+       "PWiZlYu+WBf/T6MuvpGiL/S6PE6+S782XjzGf0+6AU6Ty6JfdM+m4jPdoplGUeXP0sHnwb29PJRB\n",
+       "kdtWTCx6Xw+ZH1b+thz4olJqfe/vdWBx7+8jTCrr2+RI/EDRD27oZnmZDihOMBYdV3gXlIUhSRKe\n",
+       "eOQRvvAnf8FmZ4MkBkdUUNUKC615FqsunmnQXVsn6PdZ2djMBz1TzM7Ocdfxk0hDIgzJIBjiRwFb\n",
+       "W1u87zfex/e9823MVD0kAkulECQIQ2K6Nqi8E22Vx8uO0mTqIOjmWXFNV3y6kircE/Xd/WkKsOiv\n",
+       "goIoK1Kdd1dKTbh2FZuUQoj95Mz6Bmv5YI3enqJME/6iTochqrJfefFb3+TU5aJsGRws+WRMkgTL\n",
+       "tomigDRJqFaqpGnGH3/ooyzdfQ9mKgl3OhhKEagEVyjMJEUZBjgGx5eXmT1yjJevXqbVmoUsY3t1\n",
+       "FcKQUHWxazVsaRGmKf0gRJmSertFLxhheB7zi/NcfvZZkiTBVAKRKpJRyO7aFu3UZsE0cdM8r2U2\n",
+       "V2dzsI2TSdw0Q5g2tcTintocN3opz3/5Szz8xtdSa7TAdumY4EY+QZzCbIXR9jbHajOEoU+sFKEB\n",
+       "hmsRqpTM72MpsZ+7NDUkgZG7wZocRI26d9Q0y2ha0ZVNMSf168VnutIuLEkpD1qSheyWn1FGxcUi\n",
+       "P032dMQ9Vs5QoF69Pbp7r/6eslIvKJQyNVKW36/XT9PvyW1WvR66lfH15T4v/583MZVSSpRjtJZu\n",
+       "mXZRb9xBZHgQeekcma5gdMVjphK35hKLhPn2PKZwGPoRg9UOhmWBhEatSsWusHT0OOvrm/T7A7rd\n",
+       "Hhtrm9iuQ61epVqrgWHz6EPn2ens8lt/8Pv82A/9IE4KFhLXtBhFISEKJQUGAlMJrDTDMU2UPRl+\n",
+       "Vhf4aXGQi/bpDv5RFO0LWkGl6K59OtVQKLsiZ6ceoa94r45eClSvK0m9HtrYTozDQQSi9lFEuU6T\n",
+       "E2k8hoe5uOmKfRqiO0yQreLsgGIv1ZjAsj1GQcqtm7fZ2RpSmZ3h+MIRnrvycU4sH6E/3AK/hwpj\n",
+       "eiIiq1Q4Yjrs+gmxMFhYXGJj/Q6j3V2qhoEjMrqbG9SbLUzHwSGjdXQZu1bhxo3bVBpVDNvCcEzc\n",
+       "SoW414W9BT1OYzYbJp6haPk+C7RpVRo0mzH+yiaWdFi4a5koiZhfPoZ65goPvektOG84x5W1bU4f\n",
+       "rdNeXkRud7Btg4997pO85fHXcuX6Kovzs/hpQGJKlIgwpYFlgBmnGEqQCEUiFIHMqT5zrwv1DWM9\n",
+       "Fn6Z4jqsTFBnJUpm2ngViqlQqGWlpcts8V1dzsYpyzLtWZPK0TQPi899ULbLc0//Thno6O3T3//1\n",
+       "XAeFYGI+7N0xUZ/D5PwbVd7wt1fg60KIJaXUmhBiGdjYu34HOK7dd2zv2oHy67/5O/uVPH/uQc4/\n",
+       "8tC+4OgovLzhoG90FQpuX1GNfO45cw+Xr1/FPeGAMnHacxxfmEcmAlyTYegz6A3oDW7jBwG27dBq\n",
+       "zOA4LtVqlW63Q6fTpdfvYloGse/z6Gtew/t+7wP8t9/3A0TDkDCLcWybSCoyoUiVwshSskwRhckE\n",
+       "11heVXVlq7dL3/ATQuyj8GKzVBdaXag8z9v3AilQbPH84vv6u3VkXEyccjo33a1RV976+/XrUw/Q\n",
+       "TFnAXsmS0JGOjuzK1Ey5CKEQoviuRAiHYJTQbM7x9Fc+xrGjZ9gOenz+83/NzJ6l0l6YI941CHo9\n",
+       "giTEa84hK00ur6/jORVWVjfYvHWLhmVTt0zSICDwRwRRiHIc5o/fxZmz9zIYjRBSUm3U2FpZJU4T\n",
+       "ao06wjRRYUToh6goQcwbWJZDnKXIRFG3bJZOnKSrXC5fv4awLM6/9nFsBTVMvvCZT3Ky6XL2/guQ\n",
+       "GbRPneTirb+kuR1yrm/y8T/4KI9929tZEQpHWKASjCiPE97v96h5HhGCBIgzSSYFpjTIOOjNY9v2\n",
+       "xJwbI9qvn5GnbEUdZvbr89YwxlH2dEur/GxddsvvnK58xwGsxrI25vPLi4wu0+WFRwc85ToV9ZqO\n",
+       "wHW6ZpK6+ZuULz/1NF966mvf0L1/WwX+YeAHgX+z9/uPtOu/LYT4RXLq5AzwxWkP+NEf+kcTQlLm\n",
+       "toqGT8vcXF6BCyGw0owTx0/yxc9/hcfPPIofpvT8Ef4gxAwUvSwisSQNZeB6Jq7rkCQpQRQQxyG7\n",
+       "nW0c22Z+tkW71SBNYoajAb0o4My9D/Brv/lb/OC7v59gMMQTApWmqL0MPwpFZghs08bRJkLRtvKB\n",
+       "E90UnBbB0DAM4jjeD1+rT6ri2Tpa0umSopT7Ut8sKi8IRb2KZ5ZP0hX3Tdsl179fPLu8SaOP8zQl\n",
+       "Xp4Q5bYeVjKVQMErYmEaHvVala995Xmq1Va+ZxLG9Ld2cIXAVBEiU0ivRqps6p5LY+kYK6OYbhDi\n",
+       "eh47nS1G/SFVElAphsqQjk0/DohVRiMK6Gxu0R+OOHX0GCjFauBTnH2p1OukTkRKftp43m4ikpRR\n",
+       "GLDud9i1UkBSP1Jj0LXZ6HeJvvAUqjvi6NE5TJWxefEKteoCfnfA4pFZbNsifuEqr/FabIgh/+H9\n",
+       "7+f+h+/nmy9cYN5tYAyGmEmMVamSioxYZKTkeT8dJSGFOIsPKKY4jvdpSb3vX0np6GOpj7s+hoW1\n",
+       "dRBFJ/veU8Vn5XMExd/FhnlZbstyUlBoBxeOjMKfvfyZ/r7yYlWACSnlBKjSQdG0RWQyDd1B7xXd\n",
+       "AjksabQQgiefuMCTT1zYv/Yr/89vTr0XvjE3wveTb1jOCSFuAT8P/GvgA0KIH2HPjXCvIS8IIT4A\n",
+       "vAAkwE+oQ5bxYkXW3eB0BK4jAdu295VYUcq+qEopXNvl3nvuZWV1g5Ef4NXaCNeFUUIYDFjf3sJo\n",
+       "VsmUSdtycFyHdrOFYZj0e32Ggz5bQUAax1SqLvNzsxxZXGTOUtxeW+Gh84/x73/91/jpH/8xIj/A\n",
+       "NW1klgIKYUnCNCZLIojGpmGh9MomYBmhlo/d63G+dUVWVmjFBCz6VFem+uZU4b6ne8Dok7Tsc6s/\n",
+       "p4ghnlNdYv95hV932XVvbGaOY4cXz9fpr2LcCmQmRHECcLJeRcKPaSX3N1YgJUJlCGFQcWr88Yc/\n",
+       "wmMXnsRxTJLdXVqWiT/q45mCqOcjanV2DJdjx+/jvgfP84VnX+BUq0HY67CxukYNgWta2KZAOBY9\n",
+       "PyBFMdNuUa3XePFrz2CYFkfa86ysruJ3urhCQByDZSBsG7dlIS0Pt+KysrGKU/G4tbXBa++5m/kj\n",
+       "y1y8chlzqcXWZgcjAykhSALe9oY38ofPPMdxWxDtdBmtbnH+sXv5zPZnqM+7nDtxD8P5Rf78S1/k\n",
+       "9uVrvOf7vw8bgWU5eBWPIPHJsgxLSGQqEVFKnKakMp0qh0UYgkLecmWrXkHJ6GFdJ/ljHQDoG/aF\n",
+       "HBXIVKcMddnWg60VMqbz4zrdo8tPmub3x3E84aqahwWYzJQVx2OlrCvv8oa+74fYtr0vu6Y5yduP\n",
+       "+2M83wrgU4TjLeS/+KzIlKXUpOtrUbfy/D3MfXb/3d8Iz/Jfuggh1F/96e8fQGU6daKvumXf4TIX\n",
+       "XAxCGKWoWpU/+pOPEOyMuP/cBUJh4SmTmnCxGnW82SZJd0gYbZOkMWGYJzkwpIHnulRcFykFSRwR\n",
+       "+EPSJEWaFtKxCEVKpGL8QZ+//8Y3IgYjZBwhhCKSKZHIsDAw1ME8l7rA6pbF9FgUcr+tOhrJsmxi\n",
+       "w2cad6ej1+IenT4pK1odnZctIG289n8XCKp4tm4F6O3Lf4wDddXLJOpJJvpr/L5xwK37z7/5wDOe\n",
+       "+8pfgswnRNVt0O/6fPFzT3PpxeuY0qE+W6d/9TpJvwcuEMTIzOSm78PSEc7c/SBJLOiKjPPH61x5\n",
+       "4Xk2r11B9XdQwx411yQTGZGCyDC5++wDxAg2Nzao1euYhsXmxgauZeCaIo8HHodk0iRI4N5zj3Bk\n",
+       "eYHnL10kVgleonjwrnvIbMlOFvL88y/QSE28zCRWKUY65PGzD3FrtYPRXuTe+x8g7O6wdKzNiePL\n",
+       "PPtnn2Gxucwt0+BSNeGlzVv0tjd59/d8F+2qh02GkWW550mi8tg+SiFMgzCLDyDrXElPykuBFJVS\n",
+       "XHjttxzo8y995iP7NF0+7mP0fpg+Kd6bppMxQOBgJEHdStPlsgxeJuuczyM96mfuQjvpEpv/MFH/\n",
+       "4n7dsi2K4zhEUbR/MKmQ9cJi0eVbV9blhapA3VIWwM6YUND6vNOtX8MwuPCat6GUmmoSvWonMceB\n",
+       "oA56MehmF0xyrkWDixVe90Zp1JqM0oRve+u38O9/6Ve5zwAXwcbqOqFbo3v7Js25OVzXpd40ME2D\n",
+       "enMW13EJwpBer8fmTgcBVCsOrdY81WqFnc1dYvLQpNv9PvNzbf77n/s5fuUXf5He+jpZHCFsA8M0\n",
+       "JnzAi7YU9Z/Gremc/rRNRz0++rRA9dOiERb3wuSO++HCLzQhO+jaN14E2J8QxfN0ZFOMS/G/YVgH\n",
+       "zNPyZuVYiP9mUdj2i8yTXhuGxaA/Ig4zbly9QRrFWLbJYGuFzVtXEVFIba5BFmegbGqtNo0TdzEK\n",
+       "A7bXdzh1/mGuvPhVVu/cpG6bGI0GoyTGVwlhmhGkGQtHF2m029xZWaNaqWBLydbGOt31DWLLpLbQ\n",
+       "xjIl1VqNhbtOMcwkr3vLW1icbfPg44/Smm/z8Q/9Z9ZW1zh7/iF6/W2smkfaC5GGxFCwFvh88GMf\n",
+       "4R8cf5TNr36R5MgC9XMn6cYBfcvg6KnTdJ+6wj2PPUJUi1leXuTO7ib/8n/9BX7hF/5nWk4Ff6fD\n",
+       "QrVGmgX4gU+11SCIwgNJuHP5meZjPbn5Vy6O45AkCVEUkaY5TaEDMMMwcBxnH3XrHk06oCmKPj90\n",
+       "eqWY22Xwo5ex4ss38AvEnD/ToMhCVFgZud6wDuic8rPL1IxunRTPg0l51y1epdIJoAN5GknDkPuL\n",
+       "iJ7dqgCmkHveeZ43ce2w8iqmVEsnlI1SasIsKQShfExbn/xFKf72/SGWaTHXbGBXbUbBEIIB87NN\n",
+       "avUZGjMNurtdBipiOMr2Om8HJQRCSKqVCl61RdVzEUIxCkJ6gx3SICbJMgzH5PSRk8Qq5p3f8S7+\n",
+       "3a/+Kv/we95Fxa2QxREyk5hSYpS4waJdhYAWXjfTlFWBFIr7i7jc+ue6wp6WxKD4TOfv9P4uI119\n",
+       "E1Kvk5RyYgHIr2cTY1bcV3xP35zV/y4+LyZO2f/fsg7mUCxP1mlFShMhTYQStNttPvbRvySOIo4s\n",
+       "LZJGKTevXMIwMxqeS9DvEqUZkfSoLyyyUKuxub3La197jqE/5PrKDUQSkgmBbVrYtRq7/Q6jVGF6\n",
+       "Faxagxu3V1BJSrNdY9TvM+x3qbk2jlL01tdZWlrkrmPHmVlcwpiZpVZv8My1qxw/ukxnfRsbgySM\n",
+       "WL12k+pMlbc88Rq+8MnPsrmzQ3eng20JZqpNZMVkN9jkzs1LnLq7hWtX6A58mK2xUROojRVk6LK7\n",
+       "GWJ7Fv/kh36Mj3zkYxxdWuINj51nY+TjqAyn6hEnEVEcYlvOAeSrL8Z6f7/SIiqEwPM8TS4mPcqy\n",
+       "LGM4HE7Ixli5HfQ20TfOi+vljWyd8phWyrKYy5+FvhAV96TpGJzon5UDrBXv1Rcb3VlAl9FpMlt8\n",
+       "b8ylFz8ZaTp+f0FfFUxD4To9LRVbubzqCR1get65okOKI/c6naCvnPqgWq5J5kcMdne4/9z93F69\n",
+       "xYW7H2B7p4sfh8gw5f4z99GXCaaoEkUJa2trbO3sYEiD4SA/QDPbapGmUc5zOxYmgjRJyKKYcOQT\n",
+       "GwkpMHvsKP/2V36JX/j5n4fRCDX0J2LDlbnkwvSCcXIF3WQrBkzP5KPzhMUzdSWn84NlJF58r+AA\n",
+       "dW8THVmXn1HUoYzu87Ga9DMvng2TE69sUpetCCi7TqYH2qa36TCFEsUJArBMh7XVDT776c9y5q6z\n",
+       "bK6vMdeeQw36pET4saJhmfhSMsgSahWH2y++QOZaBP4MweoGKvZxDZPI97EcF+G4eMYsSexTbTSI\n",
+       "lWDQ7dLwKvSGfcJwRH6IMsOWEgsTEcVs3rqNnyhee//D2IYJ0iDsB3z1s59nvtXgdW9+M3/0R3/A\n",
+       "6173GsLdDkIKgiigogyMfkStUeXZzWs88e638pnPfZ67zpxi5vhpdlc6LD54hva3OrR6MU6k6Gxv\n",
+       "IaVDvxty/uzDdMIhv/y+3+Yff//3Ii2JkcRYcUzVcYmzybCoOrWm93n+9ytsHGu0VjH+eugFKSWu\n",
+       "6x6w7PL5PN4E12W/PM5la7CQhbL31nhhUBPhK8YIe5qHjJz6bJ0W0a2Cct3KDgBFHYIg0GiUMf8d\n",
+       "huHec3MaRQiBbTv786EYA90jLIqiibSGh5VXTYHrpkOB9HSFUzSq7FNcXvn0Do4Dn6rlEGUJd999\n",
+       "F9evfYr5uTau65EIAxmmvPTSRULXwh+mWKaNV6nw8EMPIqRBlil2d7YJQp9erw9pipA1UimpVDxM\n",
+       "KUEogjTAc6ssnzyGMCX/8b3v5d3f/h00bRuSlDhKUBIQYAhJtpdEGMNACSBTJFG+6gpjEnmUFy1d\n",
+       "mcNkFvZpMYnLForen/mGcYF4C2GfvoGpRyjUr+dH1Ccnnj4Gel2LjZrD9jaK+/NxJO+j/Qzqegq5\n",
+       "w2NdJFECUhJEIR/8wB9CarG106M/GGAg8Qc+tbkmKuiTphlRGlNvtXBtydbqNmazyvNf+RLhxgae\n",
+       "Y2LbAstzMTGJghBpOcwuzHHsrpOs376DMkYYbo3BaJOt23doOx71iofMUirSJQsTLGXQ3d7lpRcv\n",
+       "YjSazC8ucf25F2h6FV689BJHHzjJd//g9/Gxj3yE2XYb2zLIgpCWtLA9D5WkDIhZ6W8T9Po897kv\n",
+       "Unv7HEvH7mJna5dTFx5i9emLDNZ3qdoW/cCn6nl0d/ookfLYI4/xf/zSL/OP3v1fc3p+DtepEo98\n",
+       "LMcmVQphiHzBzBRk2V44CAOk2LNGBfIVNzGLk4/Z3lhN38/RwcJ4rh8Mv3rY/+V4PFmW7Z9h0N+R\n",
+       "K3YLKcU+nTNWvgcpmvze8bzTZbdQqoW8NRpNsiwlifON/CQdz78sy0AVG7mKarWay2SSkGVF4Lni\n",
+       "VGtufRSx/i0rmqBHi/YVFndhDUwGqTtYXjUFriuGokzjaPX/dZOiPIhSSiyREskYTMl8rY6RpNxa\n",
+       "uY1MDExhMzu3iHmsgnAc4mTIaDgkCHyuX30Ox3Fo1GeouiateoP52Tq9bp/haIifpPhZgGM7zNQa\n",
+       "NCpVQCH6isfOXOCZ6Cs89eKLPPLoIzSSFNu0GKkYy83DhtpCIg2DUMIoiZFC4JLHjygrQ10B6yak\n",
+       "LmRlt6oyn1f0iS4Y+fNy97XJPhQYhnmgv/X66OM0RswHIyceDEY1RjplDlMf93wCFl5GBdcuD7xv\n",
+       "WrGxMNwaf/7JL3Ll5S1mvSa15jJ3djpsXb1KNhKEm0OUjNg1MoRQHK3XIPRxTMWMZbK1s0Ovu40t\n",
+       "DNRsC6/RhGGGk1kMY4VRaWLMzOH1YyyryYiY0epN6qGgGgWYtZS5U0dRYUa01sPE5ejxk3T9AVG/\n",
+       "Q9DZ4vadmwRRwNXb1/ihUz9KFIccPXmUnfUN2o0KnbrJIIxoRQZJkiIcm5cuXubU3HH6V1dY3V7H\n",
+       "ePwMwncIr25jziyRBhF2OMCpSFIX7CSjoUz8nZC3PfxGXnj6Mp8bPMX3fse7aFcqJNEQYUuiLAKR\n",
+       "YYoMA4WBBCVIMUgw8lybWcwUkdgbE3PCpc4wph9cG99fKEu1T6Ho1nSZ+ivKtIQi5U38sWdWQppO\n",
+       "7pONAcG4DgUC1wFi8V5dqe/XL03JVIY0JI7hYCt7PxdpuudpojJFkhYJwidPkhsGSJnuswymae/x\n",
+       "8dEEdZSm6T7iLiwa3Qo5rLyqCR3KB07KK2HZ3CsrfR3BSymxpEuYpDhVl5mGw4ljR9jcXOOJ84+z\n",
+       "dmedly89h+V5hCql3ZzBsR3mjyxTqVQY+T6WadLt9li5fYNMKaqVCrOtOpXGTI7yRj5RELGzuUV/\n",
+       "0Kc128QdObz+NW/gP/zHX2ZmZoYHT9yF7/vUKhVGoxHSEMTSIA5DhBTY7JmBe6c1JRwQyDLnr5uF\n",
+       "OhVS3oTR+0bvn0LRFpNHf64QkmzPP1jv3/Ix/kLYdDNzvDAc9B3Or4sDil5fVHS6pMydw8GY5NNK\n",
+       "vdUgigWf/dhfYjpVfBXRnqljRTGGY5M6NtJUhCkkUUy9NcMoTBkGHU6dPEkYjNhau40rMxKlGA36\n",
+       "RElCo9IC16TqOZw6eRcbGxtEgc+5Bx8gTmM6JJjLQ5Q/YKe3RdwPmKvNELZNkoqFmK3RXVtnBosv\n",
+       "P/0sJ+45xac++yl+6Ed/GN8PkAa84Y1v5IO/+wHcSpX7HniASy9dIiRGWpLAD0AKjCOLVEwTWwga\n",
+       "WKQVm9WNDnefuQ83CxkIwfXVFSxb4Mw08awKSRjRkhlGb4e7T53kf/wffpb3/JMf4e57jmNFETUp\n",
+       "ycIYUyowJYkQKAQqy3JUDqhXOFxdpjF1OmXMMx/cuITcstP3ZnQqozy/Jz041L4Vqd+nK1ydUtEP\n",
+       "t5X56cK61PWI7hBRuM3qsqkzBDp42vfE2XOHLp5X9JFhGPt5W7Ms0zZ3Y4Q4yPcXNJBOK79SedXj\n",
+       "gesNKJtSxWe6y08ZeeoTWwoLCInDmEQolpcWefbp5+h0t5mdbzC/3MZ0XMI0ZrTjE4URd25cp1qr\n",
+       "YUiJZVuYQtJuVPA8D9M06Xa7bK35mLaNFAZVt0L72HEMQzAYDRkGfdZXNvjhf/zDfPpzn6FZqbDQ\n",
+       "bBD6Ea1KjX4wwlcJ0pTYWZ6MNlUQCoUUYGSTwYXKYWgLZFsUnZcu/i/36X5/aAsDsJ9rszwOBVIu\n",
+       "0zD6z9gFMf9eGTmVaZJ803SM1PQxLdMyeh2L9pY/O0yBZ1nG777vdzjSaJI5VVIhuHb5IjOGgfBq\n",
+       "mFUHYQp6/oBqq0GtMUOnM8BE0huN2Lh9jaolIQkRpksah8Rxgh/E2I0GJ+86jWMaEAU8cuFhXGmw\n",
+       "e2MNzzJpzM3S9JZxbtmkSUx3t4OyXO47/yDXu9vY0mD9hcvcdfcxXrr0Mm6lwhve+AaCJCSTYFg2\n",
+       "b3/nO/n0X32CGMX80SPcuHYNI81wHZtGo4Fs1rj33D1ce+ky7bvuImjPMFIweuYZ3nDuIZ67fZsZ\n",
+       "LHZ3+/QtycAaIeM8wFqz4jHo9vmf/sXP86E/+zDb6ZDH77kX264w6g6xqx5RlpEZilTmLocy3fOJ\n",
+       "ntrbedHPZOiLcdli0y2usRKdHvtdt+AKZVr4fOsyqMuYLkdhGO6/Uz/UVk7IoP8U8614tn6oqVDY\n",
+       "Y68RnbotFpLcYsy57ZQkyQ68J4qiHMhpXixSShzHnlgoLMva98wrz8NXKq+aAkS5HNgAACAASURB\n",
+       "VNcHUV/ZyhO3EBY9mQFMxiUo7t3tdPBq1bxDDMl9957l4nPPstPdoj+06Pf7WLaL5bnMuHOcOHFi\n",
+       "//nDYZ9er0enu4thGIRRRrXW5sziKaLUIAgiOjsdtjfW8f38oMTsXJv2bBNhGWysbvLG17yeT33p\n",
+       "03zvd34nchQxGI4wbItIxdiOhR1nGBmkKiNOMxzLwtI8bXTB0hWxvriVB7Sc+09X6LpJqHuhHFb0\n",
+       "9xZ0VdlX1jAOUiBFvfXIiUKI/QlRRubTFojiQJeuAIpDHK+EQrbWN7j+8mXOnXkYe2GB2JTc+OLX\n",
+       "WGjUSC3F5vYGSSpwmy3uu/AEvWHAWvcyzWoNjNxbIh4OmKmYpI4BoSLLYKPfwXIkS1lAxR8x47nM\n",
+       "zTbxMLj65S021m/jex7u/EJubUUhV+7cYenUab7ywvNYnkP39joPLC1ybXeXrzz9Vf7Vv/kFUgGG\n",
+       "ZZKoBMt1WDx6lNe++U18+Qtfwg9jjFYN5QdUDBfbMtno7XCqYqCiiOGNNbxWk6jiYfg+F59+hnDo\n",
+       "YwUxs24Fs24RGIJsGLE0O8dApSxWHbrb23z3d7yL3/iD97Py8nW+861vZWlhiSQckiUxhiLP+yoU\n",
+       "ytiTr8PDa0/w0GXXujJFNk1p6h5Yk+BgHKunyIajW9hlNK7LlOu6+3OoULg62NPBYnGYpvhMr2+Z\n",
+       "A5dykhHQPUXK7a3V6hM0og5Mi99F231/OGHp6m6/Ot0z7US0Xl7VrPRlhK1P4DGKkxMN1AWjbKJZ\n",
+       "lpX702YZSZxgSsnc3BxRFHP0yHGWl48jpcnA90l9xe07K/k7hMCruJimRXt2dk+gFGEYsbK6Shxn\n",
+       "WJZDvV6hXvMQKqce/GDE7vYOwgTTMthcXePuk6f5hf/tf+efvecnkJnCSmJMUxIHEVmSYUlJJiWG\n",
+       "FGRJSpglE9TEtA0+vZ06J1woSF1I9P4tI5WprmJjmnC/6II2qfBVzs9qyGCaJTWeqJPJb6dx58UE\n",
+       "0tGSboYXLo6Hlc9//FOoIKTX7yBUglVxyRIf0zUgjEgNSZTBTGuRUDms93osnboXU6VcefrLDP2Q\n",
+       "pldBECOFgSQ/ZOF6Dk6jxvbmBtvXb3H+wgXsNOW5rz3FXbNtkoUm4e4ug34fx3MYJgGt+07jtGbp\n",
+       "3t6AIKbuuGRVm6ee+io/8dM/xekz9xDFUR4/J8vAEmQqpdFuE6WKWnuW+5fnWL9zG9EZIlJBHIV8\n",
+       "/JOf4O2PvRkvyejdWuFS0GMmMKkZktc/+QQ3vvgMfhjRiXoEromjDO7cuk1sQmxKZAaDwYBve9Pb\n",
+       "2B10+MBHP8r3vOvbabouZiiwVIZUGYnKSEQK5CGJD1s4dVRbjFcZfB02zvk+DBP0RnFPIXN66Igw\n",
+       "DPcVarHIF3pCf58OHsoblLp1l1MTTMhuIZv6/Br7aMfIvbMGOU89Rvrjwzi5v3mx8ai/twCfZZfF\n",
+       "/OTxGKToNKLOLryS7MPfAQVeVLDsbQJ5QwvXHH1QivuKVXbfdLIFUmSYjo0jBEjJ448+yUc++meY\n",
+       "dhUpHOqVOo1GE2/ewzTzHd4kSfBHI8IoJI0TXM+hVqvhOA69Xo+036HX22Fraw1TmjQaTRqNJvPz\n",
+       "bZaPLDLyB2ztbLO7u41yLX7gB/4b/vCjH+Vd7/g2LNOGIEAqmSditvN3OjKPoKfEGL3C2DLRXbX0\n",
+       "fimiFOrIepr5WvRT8VmOHCZjqIw5O/a/pwteManGJT9dmd+rm8nF8WoDPZdima8shHQaNSJlfhoz\n",
+       "b28xrhLLKlzUppuS11+4RM326A66eFnCypVN/H6fykxGXZgoy6FRb9JaOMLV2+tsd0c8+MBxOhur\n",
+       "pMLAdD3CUYjrOag05+0hR1PVmRZmBkuLy1QyuP7cc3gopO8TZBFexaM77LM7CIirNvede4gb124S\n",
+       "hRHR0Me0JB/4iy/yT//5z/LgI4/kMbrJ+8q0DOIkIUxTbMejvbjAztY2p44ew7ZMbl68RLA7wjNN\n",
+       "nIrFzu4WQdqj29nk9vY6cW2WdK7FVneH1sI8qrtLwwTDMRCpouq5KMtC2QahH1JzKgxHPkfnlzEr\n",
+       "Hv/Lv/s/+Zmfeg+ztgMIPMOC2McAhBSk2eHKo2zJ6a5wxU85rnwx3kmS7W825h4aBcgQe/7ZxRFy\n",
+       "c0+hFYgbLOtgncYKc3KDs5BxPVpn2TuqPFfK9KwQgiSJUSrav9c0zX1Fm7c1P/GZK2j9eP2khaAH\n",
+       "pMvlPp367sk2iQOLZbm8qhRK0UllgdAn9jQuWF99LcvaVzJJmrvgqDQGBMowsW2bkR/Qai8Qh4qV\n",
+       "Oxus3t7Bqlg5520YWLaNYUhM08BxPTBMOv0hYjAiSRJsx6I9e4SKVwNg0B/gj0b0eh2KCEa2ZXL8\n",
+       "+AlGYciw20cZFi9eu87502eoCANDCqQliA3I4gQrynIUZk7uehc70OWi95XeP9M2CXXEO2k6TvqH\n",
+       "688sJl7Zk0R/tv5e/drYJBbo2cz17CvFd6Z5KuTvPxhHHdS+4tbNXr00qzM4XoXV/g7J9haj1TUi\n",
+       "A26HIW1hs1vxuHDvg8SpIvIj2q0Wd27dYuX6JcwkojHTIhYpveGAVI1wDYswjlk+eQKv1mDl2nW+\n",
+       "+ZseJ+j3uXjxRZrVCo5dpTXTYJBGbCQ+fhJxduFerjz3EsNRRK1eZ3s04uUbl/mpn/0ZHrrwCHGa\n",
+       "5vlz8vVhr+G5XPQHQ/pDn6Wjx9lY3aJSq+E2GyT9gKrtkRqCi5de4k0PPU53a5v09hrD+Qy7YjII\n",
+       "fY7PtTCrLjYxz9y8QioliQzJhCRVAiUgGIyo1aoE3QHRcMhP//hP8lef+CRvedPraHkeSoJUgrrl\n",
+       "EMVx7vJ6CP+qnz7U5WzaqV997AsKQpefsnzr3ykCwI2twYMeW2WlrM+HsqdLId9jEDI9QYNeN8+r\n",
+       "Tnw2poPyuDvje+We/MsJEKbTMfq74jjn0Mt1LffJ3+lNzKIUlSxPbn1QdQWl/+i7w7blIDJAQ2vN\n",
+       "eo32bJvnL77A8aOnOH78GO3mHMPEp9vrsL29TdpLcRyHarWKQtBwHJIwIghGRFFMlgzZ7XSR0qBS\n",
+       "qeA4DnbFpT4zg23b+L7PcDgi8CMcy2EQ+Jx/5Dy/8zv/idl3fz+n5uaxLYM4TcjzB4ElIBbkfrla\n",
+       "e/Ud+vImUNEPxX069aLfU0bYOlooC0l+79j8LUc1LFMg5Wu69VAeM31Tuvx8fbzHpm02wWPqpbzB\n",
+       "XZRBlDAz38Tod/C7PeZrdXwzYxBGdPsD3LkWuzs77HZ8Fo6coFavc/3aJSwVotKIFHBrTRLDZdDv\n",
+       "sjkc0V5YwHQrbK9vMVNt0pppsTMc4WbQW1snsSzsLY+eAyMX2gvLXHr2IhYW/TDiwSce5eqdG/zk\n",
+       "P/sZzj/5CGGc7qXbI49ameW/4zim2Zzh0ktXGPo+zTRja2sHuSs4e/Y+bgcZnTtrDEZDUPD0xWd4\n",
+       "86OvY2dri5euXsadrXP95g3WjRXOnbmPU+0lNjbWWA+GxBKa1TrBKCCRAstxCJIIMzOZtapsXLrB\n",
+       "27/pW3jfB3+bb3vHt+IsLeEBw94Ix7IQ5uGnHsunc8seRcV462NX/F9W/jrtUbYkx/I5edhGV8rl\n",
+       "5wghJugWHaiUUbEuU7rFO/n+yQ318gJRvD9fZMZ8dnnelPvGNA2y7OA5ibKyP2wR3R+LV/z0/8cy\n",
+       "rYJlgSmUlc5JFdcLZaCb+yqTqDSFNAORO9djWdz3wD388Uf/gieeeIz1m+tsrN9BmSaNZpPTp09R\n",
+       "qVQIghDf9+n3B4xGI0ajIY7j0mg0cMwGAgiikDDM2Nnewvf9XJHbNtKQeK5LtVrDti0qQtLv9fn5\n",
+       "n/uX/Ol//jDzb3kzFSUQhshP+gU+KWCY5j6CL/qk/Hd5AMuWSDkQftGHulvUeBEcH5CZRNIHlWN5\n",
+       "gugLaZnzPCxegz7G5YVIr2/ejklvmnLWlMOQyMbIZzZT7KxsUktTUiKIFI5jE3qwNNtGxQF+Z5uh\n",
+       "YbB2+UWGww41T2J4BoE/IhE20q7h1iWiMUPiufTCmDRW2FWHWyt32Lp5AxHF2AgGwYA0CMGu02y0\n",
+       "WLt8nTnhMegMWDh5DGGZZAY8/uRjdPwu0nTz9grI0oxUFe5pJqORz82bN2k0mqSJYvn4Ce7cuEHY\n",
+       "H2FVK+yEQ2wDRJqxtrnK5uoKDxw/wZ3eFs989SvMLC9x/JFzZKZEdgbMKou+YTAwDeIowkgVQkji\n",
+       "JMrpwVodU5ioNGX9+m3e+ff+AU8/9TUqT1rMOBbzM3WGIx95yMGpw+SjPDfLXip6uIxpSklXjNOs\n",
+       "vOJHD9hWBnL6nomeDKWoX3G9ECWdZinL2Vi2J+WwSByht62g+8oOBHr9y3tbaRrvuzMWKF23eA3D\n",
+       "wLbtv7uxUMocuNoXagN9A61YFcsdMk3ZZyo3ZQwpkAJSMtI05PSZkyQfGRHEPZaONJFZi+4wJoxj\n",
+       "1lbvYNk2ju1g2w4L87OYhslwNCIIArqdXYSwsCybaqVCtepRqTWRQuIHPt1Oh1F/gO8mpKmJsHxs\n",
+       "UyL9hM/9xadotds8d/US5x9+EBmGCD/CEQbCEsQqIz+yOUYEOm9XXC+nkNMHtYxGpm5UUnikFJno\n",
+       "J8Np6ohZRxpFncp9L+U4TvI01FKUaXSP7iapb7CWg2LpQv9K3jP3P/k4l556Gk+YeLYBWUTc69Hr\n",
+       "D7AW5zDJ6HW2qdsGZjQi3F4hGO5i1h2azSZutUIQQZqa2LUmS6eO0Vic5/aVq6T9gEqlzktXLrN7\n",
+       "8xZzlok/GhC7ktqxBXaHfXqXb2DujkhlzFvf/q0cf/IClWOL/NVH/wTilMzM9zkked5KpMQUxb5P\n",
+       "xnPPPIvjeNimg+u4DIIRp06e4tnPf4Hlk0ex203CnR3MNKVZq3Hx5ed55P6HWV6Y5dSx47ztbW8j\n",
+       "rdpsPHeZFz//LMdPnKBRcemmPoa0saSJrxSuZYMpGfojTNNCCEnUHZIMfN7yyGv4wue+wH2P3Ieo\n",
+       "unh1h2p4uAI/7ATwNDkswFcZbJURell2i+focYTKMYR0Ra7HEynu1/WEjmyLIGtF0R0Iygi7DBym\n",
+       "WaN6nXUng2KxKW/OT9NfRR+WFf7fWQqlfALqsPCOZRJfiPyUXt7Igq/dix9iuEghMQwwUAiRkApF\n",
+       "1XV54okLPPXlL3D25N0kQUyjeYSZeo3KQhXDMBmOhoxGAWvb23tK06LVanF0cQnbqzMcjugPBmxt\n",
+       "bTMYDnBsm3qjzgMPPIRjO3Q6HXZ3dxj4ffpBQM2ycKTJ3WfP8v4P/x5ezePCqXuwRZon35WSLM3y\n",
+       "pBAlVOo4zphO0cxBmDydVjYvCxcnHankPqrjPi845UnO7eCmoo6GdKpD9xoyXoEnnTZZ9RNmOlco\n",
+       "pSSOwxJllMcGzy2rPa+NKcWr1wjCkKbpILKQGEAY1Gs1zNl5At+HNGN5fpFbV6+g/D4LTQ9FzOb6\n",
+       "LdrzR7HsKo5wmF8+QuPoIt14xNFjR5k/fR+3Ll/mzu07MBzgWiaj0QinNkNfKZAGdpyx5Nb5ptd/\n",
+       "ExuJ4ubaKnfX6zSdBsPdAXEzxTT2zOpiwdprY78/YGdnh35vRHtmljAcUJ9v0bl5m4cfPs+nv/wZ\n",
+       "zj5wDytRSNId0B32SXzF1u42TqXC33vr32d2bo6rO2usrq3SkCZyFNKebdHLBPEopmE3UFKRkGEY\n",
+       "JngGCQLP9nAMC88wWHv5Gv/VO7+LD33qT4nqNsfm56mIrOycpE3C/Pi4SjPQ/LoLgFCAjlw2CvTM\n",
+       "AbnTFVYuF+wnRwF1IIx0mV/WQU4ZuY+BR3ECVN/sH88ffdPVNM2DHiPZWOewH1cFsjSvY16FfI7p\n",
+       "dSvaWsyFvF7s94OO3nWQtM8mlCyDw8qrehKz6Ggd9enuPLkiiibuLSKf6QOWd5xAiARE7sKaAioT\n",
+       "SGGhfHjra7+J3/jN99J+Yo4kTemtZwz7Qzqmj+XkcSAMQ+JWKyRJiilNglFEv7OKaa9iWblJc/L4\n",
+       "HErNopRiOOizvXZtvw6epWjXW8RxTBAnmFS4fnOVb3nLt3H71jWOzR9ltlFBmhlZlmDK/CBz0Z59\n",
+       "tLAXG9vYS4WFUGQqQ6npOSWLMv4soziKYZr2Hkede5AYxkE6RFfaxU+Zf9evjzeDDnq+6HUrK3Dd\n",
+       "PCzoH/2Z+Ribe88r0slZxFmMKaeLanjpNmdnFxEioTPs0PcNtmNBs30MkgZrw3Vcy+Pl67cx/R4L\n",
+       "VUWW9NkJIkIMAlKMuEM06LM96MCXP8lg8yZZxWZoWEg8Zi2Xu5bn2A26MNPAqcxgjVo0Z+oYbspS\n",
+       "ZHHl+Re5pgS3r17izm/9PqfcI9hGHWF3SOIICwu1F+MlS3NF3tna5tbNO8wvHCFMJK5XZXWjx9zy\n",
+       "CdJuh3uXTpKtdjl24gRXb11neGsbA4/P3brF9/+rf4FcWuLGxho3r1wjGUS02m2sKMXZHrJspmxk\n",
+       "CbtGyPoopt2egWhEnMUYBgyiIZ5h4AeKRsPj+aef48zx+3jmqStEp1OMo22axvQ4HKGKEZnAVBKp\n",
+       "IEqDiTk9dkMcK80CcBWAYgK1ColAIIRCMFbySTYGAYUVqu8R6bInpTNh0Y29UQr5LnJyTgIVHSDq\n",
+       "Hiv78ir35hTFO2Ec/4S9ebU/A4sZMDE/9bkxrsPkOQ19fhT116mow8qr6oVS5lOLDBx6AJcoivYH\n",
+       "JkkmXeDGSr24Nhlj2jAMlBCYtkU8TKlUKly9epXmTJN7732UilslThI6vQ6dXockjUnThFqtSqvR\n",
+       "wjJMer0+u51tut2cUjEMg1qtRqXi0pppUa1UUErh+z6DwYDdTic/VeV6zM40iZKY3W6HY8vH+PSn\n",
+       "P8M//O53MUpiBCKPWW3JfQ4sTRMyle0FxcnRDUIiEUCGjomKNialI7zJXuAs0xxvipTN2zLHWEZF\n",
+       "xeqvb66UUUXxfSiC9xxcfCf594OnLnWEUgTwKu4rDnKsrq5y9OhR1tfXp8pRd5gw2O6wsNDEEhLb\n",
+       "EFw49xCWN8ONm+vUjlcIVzoMN28zP2uxNdolHirSyCIzJUGoqFYaNGuLGGca3PzSNl5ljsFoh9kl\n",
+       "j83eFnFtmaudAbVWC9fJePHFL2CbS3g1iW2MGHlNor5kKzVZ3R7x7MWv8IYf+QH8yi6jCFxlkGYZ\n",
+       "hhQYmnud67nUalV2dndZXq4jpaTlVagbJpESXH3pRRZqFWZlizMLR1mLJTcuXud7v/0HOFufpdsZ\n",
+       "8MH/9DucXjjC6eXjRJ0ud3q7LPgZxxfn2Vy/g6h4nD17N5u37jDvVYlJyCyBaZlYpkSkGY7j4Scp\n",
+       "iWVw+tRJXrz4HEdmHiWZnkOAcOhjSAOkhSEkxp6izxFjbr3q3kzj8TZI9/zsDcabiGM9ANnePC90\n",
+       "RJk/niZL+iE1nWrUi26pFvpj8t0HwYZSaj/rlE4N5bTQwdSFuqtr2QNsWinTpoV1WxQ91Mhh5VVT\n",
+       "4OUj45B3RBiGhGG4/z9wQCnrRVdIxf8TG3tCoAIfr1bl3rP3cv3mDe6//37WV66RpCkKiet51Cou\n",
+       "lm2Tphmj0YhOZxuBIo4jarUKi4sLSCno9/uEYUCSJKyurJAHr7exHZsgHOF6FRSCMAqId1KCIMCy\n",
+       "bWzTxHMqfOLTn+LR8+cwTAtDmpApDFMiDYGFiVIZURTu0wfSMFAyD3ylo5lpPJkQRTLkws1JTPCG\n",
+       "BfV0ODoYC++04P9lE6+8GBT3H8Zd68gcJieoEPnBiOK+AqEfOXKEMAxZWFicKkdxNMCtVdju91Gm\n",
+       "xJlpcvK+e7iztsX5Jx/GT3a5ufoc7ZlF+uEOgyil4boY0sFqzlA7fZJ+t48XCTZuvUTX36XqeKRZ\n",
+       "k2A75nRtmYXGMmm9yc1On6RvcNw+x5BVdjc7NNptNmybyEkYBUP667e5/+4jPPamx1A1GzOUmJmB\n",
+       "IfeWXzVuv23ZdDpd2u0FPNchTWMMx2Q36tMPO5x+4hGufe2r9G71cVo1WGxw5vTrufv15+htrnP5\n",
+       "xUu0I6h2A5JKl6EI2VVDOlfXWO7ucOzYEtdjn1s3L9OwPOLAJxaKOFZke0gyDAI810WaBkGWYFZc\n",
+       "Hjv3CL//wQ/xjrd/69Q+N1QGGYTpHgVAAQT2YviTIASYhpXTLWmeFg8hMI08nEOaZcRJHjDLkJrP\n",
+       "uBB7flp5Zx3GtxcyWFbauryWNyR1nlt3jCj+h8lzFQXlV363Tm+8Ekgpgxi9lOdCMV908HMYPamX\n",
+       "V02B60pW94GG8aqaJAmu606srmVFoZscxfXis/0V0zTYWt/g3Llz/N4Hf58nnniC9myFWqVOnGTs\n",
+       "7vYIRkMMaWCZJvOzs7iuTZyEdLtddnZ69Pt9sixH8QsLCzTrDeIkJPAD1tbX/l/m3jzYsuwq7/zt\n",
+       "vc98xzfmy5dTZVZWZo1Zk1QSoAmNqFFrACEZEWAEmO42gQnb0RFtYUfTJhocwWTobsRgBMbYEkhI\n",
+       "QsJCI5pAElKVSqpJlVU5VE4v8413PvPZu/8497x33stXQNiOEOefd4fz7r1nn73XXutb3/oW48mo\n",
+       "NHoSAt/DdX1sy2U0HJae+WjE/Ow8o3DA1dUNjhw9BDrFkpVKYHXDDLY9TXrIckLrbdhod8KnfoN3\n",
+       "IKdyXC1L1jxybpro9bHay2SpPOK6Ma4mZ3WP6ptv3XvZbyFV59Q/pzqq52ma7UrWSilxXZfxeIyU\n",
+       "ijwL951H0oS4jSZoydpgg4W5BbYmW+Rmguul9Mc+Td+wZFmshB7W/GlUBn5eYM10mTlynKfii9iu\n",
+       "w20HfILMpthKcRsW7/zhH+TCE49iWZJX/cBbOLu6yZULK1i9jFkfPvBnH6OXGfDnuXLlLFYx4VB3\n",
+       "jv/rF34R3WkxHGiarg3swE1yO98g8FyPRqPB3OwMRue4jo8WGtd2CC3FzPISq5e6iNGEqDeisziL\n",
+       "O9/hwOGDnP/YX7N17hLXnz3P/a9+DZicvEhJZc442qIdOzjr4LQ8jp08Tm8yobAcCm1ASIQWeI5D\n",
+       "y22g8xzHd/F1BrYkHI1401u+n9/53d/mDfuMuSMUWGoqggW22aGeVhtvGE12YbtSSoQUGJ0jhcKS\n",
+       "EmEptBBTPHkHH96GXsyO9kk1p+rRYH2O7VfwUuWL9hbHwW6xq7pd2dt8pdxTdjshZa3DzeJxQtws\n",
+       "hb33nOp4PhZP5bzuvb7nO77tWih1Q2zM7u4clmWRpTu96Oo7WjXQdaqcVLt3QikESgjiNGVmZobc\n",
+       "GF70wod49OuPcmRpDsf28Nwmrtuk1Wjguj6TMCbPQgaDPtpkuK7NwaUDuK6P1gVxFNPb2mB97QaO\n",
+       "4+D7HouLC/ieT5LGxFlOpguG62sUWYHvBviOR7PRIM5iojThG088ycyBRZwiJ8szlAQ1ZYlUTX31\n",
+       "9tiAoUBSaovXvYM6hlb3LqpkS32cgZtw62qCVHSm6ti7YOpeRMX5rp9fh8PqR31x7GWZ7PWiqoRV\n",
+       "udgByutxXRfLdhhPO7zsPbLxBKKEQhpmXIczp06z3t8qDYQU3Lh+DaXHDMwY1WjimCa2pTFRH993\n",
+       "8bTLAa/LyfkW54aX8AqfgzML3Hpkia9+4eMIO8OfnePi+ioXN9Y585Lb6V8+x4FrXX755/4dX7v0\n",
+       "LE9trrA1WKdjzfKPvu/NBI1ZEuGz1HSJ+jcwntq+n1UkxVROOI3LVn6tdhfPD9BpxmQ0ZrzRZ/bo\n",
+       "QU7eeRef+eMP0nYcYvMcL7rlNj7yH9/Hi9tHUMMI37FYmWzhygbXV1foDdaJ0gHx6oQXzN3LDIa1\n",
+       "SxdxlxZYH/dwbZ/AblCkBRKBMmVeKQ9jbE+RpynKGPq9Ma983f8E7/kvN425MIo006TKYEQpCSGE\n",
+       "gDyj6jpjMKV2fFb2gbRtG0tYCG0w0mCQaCPQRpOlGUaUlL1tiM7cbJT3zrG93m59btej0/0M/n4F\n",
+       "N3Uncccj3908vX5Ujma19ixrNx13r1Oz97295wkhththVL/r7zLi39ZCnjqeWseMKg9sh+kApYda\n",
+       "/e8eFsV2gUQNa9Jltt9gCHy/rHyS8H1veQu/9du/zcte/IKSWTKMGI02aLfnkXg0g8aUlKHJdcpw\n",
+       "1CfSKa6b4Ng2tmMxP18mMQeDPv1+D6FLMXfP8/AbPtK2CRyP0WCMzjOyIiOODcqzmZ2dJ+i0+OP3\n",
+       "f4Cf+KEfRGXJ9qQXotQfllIgkOxAkPvzoevYWXWjdzRlbtYK34/KtR+EUn/+t2GC9d9S97Crc8ti\n",
+       "hRIXreAcY3YL9lcejpRWjSssS3U5WWA7LoXWdDoz+84jK0qJx+toJWgvHaQIDbluEMwtMNQe/rUv\n",
+       "kEQ5G6JBoRUUQ4yVEBYJXrvDpcEWW/2rHJ1t4YeSqNBsRBusP3GFmabLbUeOkfSgq7o8/Ncf492/\n",
+       "+qu86r57eeXSnRBkWIHgxSfv5tMf/wBv+J/fxu0PPEjmQlZsYmcunlQkUiGmokjosrpUF6V4fxSF\n",
+       "XL1yhcUDGZ7rkirYjIZECh6/epUiS/BO3sLGhavEF1cIHn6MF9x1L5lj0YtDxuR8c+UiQkK4vkFR\n",
+       "JCQiZuy7XB9vcGL2BEWa8dhjj7ElwbF9fL+FpVza7Rl8N0Cbgk67yWTUoyhyMgSt2QWS55GUzbTB\n",
+       "SIGybZASJXZyU2XOycGyFGmWbLcXLJkrBcqUazNNS40TxFQmQyiU0Mgpw0UXBZmpPOTKky7ndVmQ\n",
+       "Vq2LncYNO966QYidSLycm/W5vDuRX/9b542X37lTTbzjyJSJ2Dr0Uj7mpnVRt2d16KVeiV7//vqm\n",
+       "8A/aA9994TuP68aheq9ecloPUfYaoOoQZscHlZTcnSLP8QIfy7F54P77+eojj7B88Agz3Xlm5wIw\n",
+       "il6vx2g0oqBs/dTuNLBsm0bTn0IK5aJLp+FXu91iYXaWoBGgtabf77O+vs4kipBG0mm1mJstDY9l\n",
+       "O4yiCZMkIQgauI7LU2ef4fTxYyih8AKPKBxjb4sIVcoZEgSIPRtcNUaO49SMY2XQ2TaM9QlUH/u9\n",
+       "4wk751dhcPVa/X+q1/b+7/6ekd7l5ezge3rXIip/946XlGUZhjI3ACVfzhj0HQAAIABJREFUWj2v\n",
+       "JkSO0wgYJhnDVPPIk88wyDULR3L6ozEL0SpHgw6uLkil5rq2eS4KiG2X5c4cgejj3jJLfvReVp74\n",
+       "CxqO4NjxZRaXFhhu9Hnu/DUWLIf1r32eX3jn95P98Bv41f/7l1jvrHH+0bOIxQdZNBbKEbzmLa/l\n",
+       "0kYf6YPJJmQ6ohCzFGaq80JpwI0BgSDwfMbjEb2tATdu3OCJxx/HWZxhPB4z22ghGz6ZMCyfuZvl\n",
+       "uUMMLl9jtNJnc2aTD55/hCPHDrPQWGBMjrAFw7VVdBwh7YJRWvDZx/+GXBhOHTjGnUGbs70Nmm4T\n",
+       "r+FDo8Gl9etcWVun2WySxQkN2+bEocNI5XLh/CW0tb8B8VszTJKYPC8oTFFWmlJgWzbCsikkGCQZ\n",
+       "AqRFxc7IdYGHLKNJyyr56NMu7UYXpHFSFuJIWeaUonQqx1DmovJ8Jxlfcb4rjnlpL4qpw7Azzyvj\n",
+       "XxrOHRphNdfqxrY+p6u1VBTZrnlefWY1f+vQZHXuftHmXoNcX8d7oaDn89r3O/5BtFSr46b15+X7\n",
+       "dYW7ko+51+hUHmd1LyR7Qi0psS0LiSCJYk6fOsWffOCbPPjC7+T61eu4Tkqr0WVmps3igXniKCHJ\n",
+       "EgpdMB5NiKOQRiPAsiziOCZNE/K8YDwaoqZYred5BIHP4VYTgyBLM8ajMePJGKkkJHHJLpGCSRLx\n",
+       "0P0v4C8/8ylOnThJrgvCOEVKC8u2KPIcozWYUmgfBLrQ6FrJ+zbVap/S5NLL3es13CxVUI1/nT5V\n",
+       "ffbenEJ9I3g+w733PhbFDjS2H05e34wdyybN06nmSdnaK8sypGMhlWQ83h8DfwaLUZwznOQsNzUz\n",
+       "2YB8eJW8eIpG3OOCbGOiLdqDNTaziK8Xszw+OUbbDWgVj3Lb7CrXNlKevujSbWSsrfVpbUg2V59D\n",
+       "yAmzS7OIYJGvbwLXUsT4WX7qn7wK/+A51odn+MsvtXn0kU3+5b/+RS71VylsH11IBAFaCQqvgTYT\n",
+       "pJBQ3YtpaPXss88iDJw5cwYhFc1Gk2EypnX8Vg4vHmTu4EH8mQ4NN6CDwxc+8BE+82cfIeuHdObm\n",
+       "ePB1r2bmyEGQksSkPPvs0/zFn7wXr8jwA4eQjL9+/GFGV25w9623c9/cAv0kIRn0OH7bce570f0M\n",
+       "hcEoB1MYrMzQUh5CWEwMZPp5pEz9sppzNOjjegG5zEr8PS4V+ZI0AqDVauAGwdS50EjbwhQavX3f\n",
+       "C4q8KGmEUuJ4Fo7nlYa40Hiet91irK6hUs/d1D3m+jzbBa3WEoN1ymp9Du6d8zv4/e75XX5/Kci1\n",
+       "bV+25/T+RW31pP5e6Yv6WoCbqYx/1/FtZaEAu7CnvZBA6Y2ltXBpx5Or47bV+bZdCqIrsZtfWaQZ\n",
+       "rueiTemaz8/Oceb+B/jSl7/CC1/4EHmaEyUT1m9cpNls43ke3e4s7e7MNMwdkWYp4/GILMsIgoBO\n",
+       "x6PdbE4xYc14POb8+WukucZ1XNqdDt1uF9d1y4rO4YD+cIsoilCWg+25vOIVr+Rd/+bn+He/8PM4\n",
+       "UoDOiOMIW5W8WCVk2aMQgRYao3eaE1cTpRKyr9/wcoLlu/C/MhzcXYFWjXG9+06ddlhN7rqSWx3X\n",
+       "rmOHe41yeYhd97c6vw6hVL8tiRMKiulCNSWfvyhI86yMKJ5nqn4ttMm1h+MHRMOIOx3N6TlDN76A\n",
+       "Y1b5yDNLPC0WOHP65SQyZrCxwrIlEeGAp85dILhdcmrBwYm+Ss/PuZCPmDhLHGrPMZskHF1e5MNP\n",
+       "ab7EYX75feu8WF3nV1/ikLSvMeOfxNcS11ris3/zDD/5T7+P6+eewc4LctVgkMVYrsY2GjPlsiul\n",
+       "EKpsYfb1R7+OZVlceu455hcXWF9f5fCth/FbDrGJIM8xSUY/GbCZF7zoja/lysYKwzTj//i5f0Pq\n",
+       "2Wz1+ywvHGSYhRw4cRRLGj713veSjEMSk2Eri7Mrz6F1zrETxzh96jRXNjb41pe/yKH+HXSOH8fu\n",
+       "dIlzjZIeBQo0OMrabu6w9wi1IIw1nttBAJbj40gxlQQuW7LZtmJra4Ozz1xmOByyuLjIzGyHwPLI\n",
+       "ixwpDFJaOK6HkqXqX5pnGFNgCQvLEaRpTjDdAKqGv/UkY9UQYS+FtW6Mq/Oq52XbspudwL3Gfy/r\n",
+       "re6tV6/fDGfu36S57uRUv7me86k7ZHu/7+/igYu/r6v+P/IQQpi//uxHd5XM1w1APXud5/H2gNVD\n",
+       "m/pN28Fn/RLg2s5mlwNg2zZJliKVQiqFkJLCc3jP772HUydPcvDAAdxpkiWJ07KaDEEQNImznCDw\n",
+       "cJwdrqsxBlOUfe2MKWGMChsWqLKjSxQRZym2YyOVwvM8XNelmHrMk0nI6laf7sIia9ev8cqXfRcm\n",
+       "TzBJhBJiSiOchmuURlnXsvLVeO3t0FOO504hzG72x/4qbPWJWXkZe2Vq9074vXBO9Vm7eeJmW5ui\n",
+       "vknsFxFIYyGsalPI0KLUYA+jhGajzWiS8oKHvuumuXTy9T9DGKVkkxArGTFnjTk1azg+kzFjx2zO\n",
+       "nea5G5or4xa9FJyix1EnpaEUT69u0Z0LeMlcyEM8xyVsImaI6KLznIOupoFN2DjJheA4W1HEXcUq\n",
+       "b/IHtB9cp9mdISwO8anzLb52o82L73uQFx3M6VrrRM2ANeFhbB/LpEhjkEiEEdPCFcnWVo+nv3WW\n",
+       "paWD5HmpVqhlQiYyokmCiMGTPldHfbaKlCxLOdadY9YLUK6i0Ba+06AQ0Ow28QKLpiv51Pvfx+a5\n",
+       "Z+g4LoNoQigNWIZO4HLHsVs5deQE0TAiysGbX6R79DjN5aMYt8FwkiAtC8exKOKMe86cvmnMP/+N\n",
+       "p1CFwc4NSii04yJVxa+ujJdGKoGYJuYFMBwOKPKYMIxYOrCA7zlcu3IJx7bwPQclQRc5ejqnfWtn\n",
+       "099P26Q+5/erc6gb+8pWlLRae9e5e52V6rzyu3ZDteXnlAnour0q5/X+trS+Rvc6Mc/ngdfX2j0P\n",
+       "fjfG7E/K/7YmMZ/Pe6uD/UkS3WREqlAEdlMOs6w0vJZSJT4mFSgLIQWtRpM4TUrjrQvSLOMlL38Z\n",
+       "f/Hnf85b3/xmnCk8MjPbptnokOeaKEzY7F/nypXncByHIAhoNBp02m2Cho/nzRDHMf1+n62tLSzL\n",
+       "ot3q4Lke7VYLo8qmD1u9Hpcvr5ZFSsqi02qxOL9AZ+4A66MRG1s9Vlauc2CuS+AHFFmKFOVC14Ap\n",
+       "NLkptuEkqNOc9stmlwa/jmlXk7w6qs9RquzZtxPxlML4nufVcMAdSUzY2cT24+bvhJkGpfbQyGpe\n",
+       "SZm1361BEY9jhIRGIyDJEyxLMTc3R55rlpcP7juPZrSgbTlEjsUwdRmYeR4fap7ohTQbis7lT2MB\n",
+       "ca9gYp1m6B4iG+e85MxxDroHefzpq4yfGdC+rcsLX3+azWdWefbRc+SdJSa3nuDq1nnmrvwxb16C\n",
+       "WSfj+Kl76I0skq15FrxV7MYFTp9+CdmhV/PkV69zx50Jy8vX2Ag1y/e8jOtbIzwnIM9yTGFKGEVa\n",
+       "WMpiYWGBA4tLRFGE47hobZB6gpQGZTkwzGngkTUD1j1D7iisSYIdxownI+JJzsWrq5y/cZUPffiD\n",
+       "6CTEdQx33nqYRQ2NQoHlMfRyrqVbNOIR/adHjNY2WLLbzDbnmG8tEF5ZxbhdGkdmKRoObhBw6eI5\n",
+       "bj10dN8xl46LyjQ6SYjiCOMKbMfBUDX1haARIIQgy2LStKDVatHuOGR5iN/MUa4LluLW03cSeB4r\n",
+       "1y5x9fIlpNAsLCzQbAbkk/Guwr4KLqzPpcoWVPS7ugOYZdk2a6ruBE6mjKb97EplxKvcklI7kWaW\n",
+       "ZVOPf6exSX0jsazdjbvreH2dHlt57/tFrdWa+PsmMv9OD1wI8R7ge4E1Y8w909d+DvgJYH162ruM\n",
+       "MX8xfe9fAT9GWc3+z4wxn9znM82XPveR7cVu2/a2GE1954H9NVOqwalXCVYDUheS30le3Mw/DrTD\n",
+       "RGh+9wPvxW01uf3oCdrapeH5OK0GozTFUhYNy0U5FpZjb2NuvV5v23halkWj0UBKSZqm5Hm6/Rvq\n",
+       "N833ffI8J01T4jguf7dRjCZjfM/hr/7qs7zjH72VViPAEoYiy3FdjyIrm1q4rgcl4WbHc5gaacsu\n",
+       "fxvTMFbnORJNicqZKYUNbKv0PCQgdJnc1QbyQmJMTqlIUCCELjm7xpDnIKSNUg6m0MhshJAOxnIo\n",
+       "LJtMCwqdYymDMimOSqFIUGiMdjBQVtAag7AdbMsl1xJQSOlgTFnAkzkFnm2xcvUqF8+d58bqBoNR\n",
+       "QlxILl6+QqvV4Q9/5zdump9n3vgzDCbjkoqWG0SmsbSgE7QZbvVJ/Ii80KAcbNulyFNUEXKgJVlu\n",
+       "GczgCsdmHWYaipW0B2HKA0dP0hqE6LUttG+z5gaErVn8RoNbZl2OdeFQZ4vWgTFNW2GNOwhvlpWh\n",
+       "xGGelgePXF1l3buLUw+8GmVN8FKNUhmjICQWHgEB5AOEzHG1hdHQtxyktmlKB6M1uiiwpiXpFT1U\n",
+       "UxbhKKWwRYAtJegR5889w+//7h9x6fwN5ma73HPmOIPhCrbj8uSTF0mlhwhcjnTbLNoWdhzSCTzu\n",
+       "vvc+mgvLPHb+GlZnEdwWR47fwt33nSbNMm47dftNY/71b5wly1J832c0npDogsBvIoQgTVKEsKYl\n",
+       "8Yq8yGg2S4ZWmmfouOx25fs+aZaR6wzXLTu9N9tNjCno9fpsbKzTbhYkSYw2Bc2mj5QChQGdYQFC\n",
+       "56A1Skp0aiOkJM8zHM8hiiNc1y4rO800WqakzHrCpqjYQEKWj025uWpdoATIqZqpQe6yLVXStNIC\n",
+       "rx9Vk4Y6JFP38Ku/e6VjK5u2V5GwOu66/+X/XR747wP/D/CH9d8K/Kox5lfrJwoh7gTeDtwJHAI+\n",
+       "LYQ4ZUpxjj0Xu9uT3Js0qx5XJdbTz9+1o9WlJStjXeeUVzuZbe8Y3+pc17URAt7xtrfzy7/+73ng\n",
+       "1F1YWiItVcrKjkekSVZ26VYSzyuLLhynbDbrui5RFDEYDJhMJnieR6fTYWFhHiHYfn0ymWxvNo1G\n",
+       "g7m5OZrN5rTYISUIFlldW+EFL3iIj3z0Y7z9bW+lQNNutinyHKn0tNGEg8aQFzlFYaad0CSWskCA\n",
+       "sncy8VJJlFAYUTJyFGXerMiLaSZBIpScKu5qhIwRWsN0UkphkWUFujBYysKSZWJVaw1+m1bDp7e5\n",
+       "jq8cVJ5iOQ4aSSYdUtUCzyVodkiiaZcek5GmCUkakRcZQmpyXTAZrLOxuUkYThiMXOJwwjcefpgw\n",
+       "DEkzg+M1sLw2wl1kZRDvnUIAFLmNMH5Z9ScLomSA7UhaM4JTd5zGUR7rm1ucu3CFNAEpPAyKQViQ\n",
+       "JSky67I+nLAw47Ka3sZ4WLAatXjtqQ4teYNhf5MsPchhXzArL3BCJ7R7Ln6yhPAKhm4IicFrFMSN\n",
+       "lEBsoMIRhxp3snKjydVzj3Hy1J1oVzOOh7TwMGmMZh4rn8GyniOyMoriCI0YUBNyXSaxjS62i1mk\n",
+       "nC5yo8HkGFOQmpjRJCbwJMdPnuLt7/hBfukXfoX1jTUuX7I5sNRhc3MT17JJDIyjmPbx44w2V+nY\n",
+       "isySpJZg/ugyb/mO7+QXf+O3ePjxs9x++x186Wvz3HffA9y2z5gbk6GUIElC2i2fTGuWDx5kdXUN\n",
+       "R7plUxRjqDruRNGEcByDAaUcbNsiyyMajQbrWyO0TtFGkxcxQgoaDY8gOMxc10UpSRiFPPvsWdI0\n",
+       "YXFuBkvZFGlCnuR0Wi0G/QHtVkAcxeQ6RWqBkJBPq5AtYSOcqcaOFJii8n5rzJLta5NTAw6gEXIn\n",
+       "SiyKgiiKAIHn+ds2qjLaaboD99bzVHVnrrJtddtVx96r3/P3xcD/TgNujPmiEOKWfd7ab0d4E/Be\n",
+       "Y0wGPCeEOAc8BHxl74lVaFSF7NXOVvveXZnj6rX6brjX6NcFb6pEZ91ThlrlX15QGINC8+qXvYyz\n",
+       "Z7/FS1/8EjZurJHHGYcPHcJybIyUaG0Yj8eMx2M2NtaxbQfHcaZGvexgL0SZULx06RJQed0B8/PB\n",
+       "dnInjmOuXLlKUZTetWU7ODYsLhxgY3Od2fkDPHPhEqdOnmAYxegsxZt2HBqHY4RdTUKJrer0qRLn\n",
+       "r8IuJVWpFKfLZKDGYLTBsu1dOYLSx9AImZfMAAxSWAgUgd8gS0uOuqDAsQUoi562EFmG41nYJsG3\n",
+       "NZOwz2ovZiO2OLsy4PzKgEGYE0XjHXyR6UQ1GiFL5TolyxDVUhZYTeIwRLRP0JqxieKU3CgSU/5v\n",
+       "4Ub7zk8lPQLPJU5i0mRMs9UiT3vMH+6Q6xFWnKHjCRQxwlhoYZNkECEJHR8lbGZmjhHNdTnlD1mf\n",
+       "FJy7dIn5NY+XLB7iqNtjZT1i9foWA79D1Otx3N4kb80i7Qi/Ca04AZ3gdjTYK6iZMcPJGVqH3sxX\n",
+       "n/ogB+aewmodJmjcihqvM6tyhiLEkpIgb+HplNikuEVOKgoSNQ3HhQPaTJ2VDKaVfqrKi0hNoxOU\n",
+       "MCOS2+++m+96+XfymU9+ikkUAwdIE0ma5kRZjN1qMxiNuOXgMsPVqzQdG6/b5svffJjHPvinfP3p\n",
+       "c/SShCfPPcX1a02OHDm275gfPXYEpSSWkuRpihCSyXjE8uIsWZptc/pHoyGWsGn5Np7vlc1Psnw7\n",
+       "sbixucLMzAzD0YDuTIcwjHBslzSNmJufYzgYlTRCYXH7qTPkRcblS89BkWIrheu0GUcav7FAmPZw\n",
+       "PBtl/NJpm0JSxpTMNbsyirkGVcpNa1MSBRBT5pMxpacz9cQxlBHb1CDbtj2lE5efWY/8YXcHqjrs\n",
+       "WEJJ2bbBrtZt5WVXDcTrUAvcLHa13/Hfg4H/tBDiR4CHgX9pjOkDy+w21lcpPfGbjgrPqrzTCseu\n",
+       "jnoYUU9qVka8So7VcdjKk68Pwu4k3k54YzkOMtc0lMND997Hex77Q85efIZbjx5HJBlFHCEkXN9a\n",
+       "Z6Y7SxD4tNsttC47mY/HY8JwzHhcik8FQQCAUtaUuRIRx+n2tXS7XXzf58CBpe1rDccThOUwmYTM\n",
+       "Lyxhez6f+PSnaXW7HFxcpNNqEg76eI6FzhXUMtRpmqKNQU538HybemgopEJPKXyoqlCi9DKMKAsd\n",
+       "pqNcjpOWiCmsgVRoA1EWYVsSyxJokyEsg7RsHO0RRmMWuh2efPQRpBScOHkHbdfhg+//OJuRRWwa\n",
+       "tGeXsfz1MiFsLCQWRkvyVGP0TgQmEWgNcRKj7BmM1kySjDAV2I6HcpyyAUbxPJS2PMZWHlFeYEkb\n",
+       "iebEidt47rlLSJnTMAHrWz0m6QSjXFAejU6DQhss20HZio04ZvPGJtK6waGDbY7ePsfKhWs8Z5rM\n",
+       "zhhOLMc8Moj4Uv8IyWiWF830ObQe48Q5x5dmOKUCsmxImnVxmzbOIZsJDp9/4gbXEotm+A2KsMfj\n",
+       "mze4/8zdWLlFK9BMTI+YDsrkoDYYmQAhHYTIKPICPZV0LXQpUGa0KesBEGgMcTLBdh0syyNJCnzH\n",
+       "4p0/+aPMzLX5i49+ku9cPsn6xohGOyUeDknGIdlghGiXNMADi0toDcPhmG+dfZpmo4sddMhTze23\n",
+       "38HSweV9h1wIw7Vrl2k1AlzH4fq1a6yvb3DnnXfhBw08z2U0GrO8vIgu9HSDMWRJhO06uE5AnCYs\n",
+       "LNxKr9/n6OFlwiii3QxACALbI0sTGn6TySSi0WyQpSl5AYeWjxP4PkWWohBcW7nGKEwJWi45ZcMM\n",
+       "pMKybGRhtj1rOR0zRGmcSzpyuQa0KcvjpZQIozCm2GZ/obNtQ70Dn4htwa26gc3zbNvW1N+r11RU\n",
+       "a3cvAaOOOuwtzPvbjv9WA/5u4N9OH/888CvAjz/PufuC7KW2yFTUZ6q7UTfGdZyoTnfbq40Au/nM\n",
+       "e3e3yvuuzhGibDA6yhMcJOkkxrJs3vzmN/Ebv/1bfP8b30y2NeLo0jJKuJw8dSuDrfK39nq97YTK\n",
+       "7OzsNsVJ65JG2Ov1cByXRqNJt9ul2WwSxzFhGJIkCZubm9tY/8GDB3FsmzSJWZhf4OrKdZqzs7z2\n",
+       "9W/gfe//U/7Jj72TKBzRcC3SLC27ueQaKUp2ilKgjJhK0k69gUrEygjcSoyqpJEDhiSMSrhFyWmI\n",
+       "V3ogqmhMK39AKEluChCQSU2SRgihcZRNHsWgBStXr/OtsyH3PfCdfP2Jp/m1n383UQrHb72TNCmY\n",
+       "6bpsXDmPP9vCUjZKOihhARK88nvSNCbOYyxLYdkKPS5xUy0EljQ0PLtsP2cMrpJId39p02hyg5mZ\n",
+       "Jdq+pNCKfj/i61/9BktL84wnY+yGIhcubtdHSEmWFygLFIKiiEmjAqUstDY8Pj5FcnmLl5/w6R4q\n",
+       "GKQbrCBZLGKWm0OuF1t8ddDlg1cP0kwT1KbHbRdHfN9yzm0HDYI2MmzQ33yC7ozhi1/5I04+dBw3\n",
+       "S7D0ExTFSX7pj8f84NveyHz6DMZO6dkaowzKWBRKYecGuwqSCo2yFHmWoyyrxFi1YUomwnVsLCWI\n",
+       "w5xOa444GlCYjDf/wPcznhi81jzaauB34a7lg+SjmAMzHYrhmMMLSxRxTjiKuHThMq7wKbTi6JET\n",
+       "vO3tP8SD9z3A1ZXr+475+fPnmemWMGIJRRruv//e8v5iiOII2y6hhySJpswrzdziAv3RmM3NNRYX\n",
+       "F0mTiNnZGZI4odstm6QMRyM8x0UbjYOLNxcQx3GZ4BUWtm0ThqV2vHJcbjt9F1obvvH4X+K5ZRm6\n",
+       "JSVpbkpY0BgkBkzpUUspkJYqheQoa0cUU9lZrRGypHgKU9kivSva39EacnYZ3fK13dDHXi2Uvd70\n",
+       "XjtVN957HdDnO/6bDLgxZq16LIT4D8BHp0+vAUdqpx6evnbT8b73f2Tb+N1/393cd+auXYyHela2\n",
+       "IvNXF7pfuFENWFWdVTU7rg9odSPiOEZ4JTamitLILR84wJve/CbOnj3LG17+apLhmCxL2Vq5iqcC\n",
+       "XNtDGAPakOcZRZYThxG+7+O6Dp1Wm06rRZxkhJOQUZKUGKuUWFLSmZ1jcX6BwWBAFEVkSUqeJOR5\n",
+       "Tm9ri06nQ39aBXr7HXfx9DPPcOau20nyBMd30VmBq+wyuaXLFb69aUkJGmxlg5wWOWRpWUBENQkM\n",
+       "jcAny1N0UVAUpb44WmIbv2S4WAVCUZZTK42wLDZ7KeNxxMzsIoHXRqUJy4dv45tf+Ar/9TffR3vh\n",
+       "EAfvfikSSTYZoxjQEBNaCxabyG1hLiF1WbmHRjk20pGITCA9hbQtutIHDOE4xBRFWdAjd4oeKl34\n",
+       "vUcyWaNXTBDSxrF9mq6gc+gwr3vN6/jsZz/HpnGQMiNNYpQBW4IsQOcZ0ghs5WIyMCi2WgHn05DW\n",
+       "lRu8+KSDGeQ8cVVw59xdHFZbvMJdwTRHfNm+nyezZRz7EMXgAuudMYdGIVL0CLMCu2XxzNnP0WnO\n",
+       "ojc7JFmbQ0c2OdPVPDE+zR9+8RLvfNMS7fQ8rumRZ5KmZVHoq0Sph3DnabY6JEmCMRoKMIgyb1FR\n",
+       "1gBLaookw5MBaZRS5AYtBIWQ/ORP/zM++5mv0E9Sch2SZxMOuD5JOKTpltoyc0tL9IYhW1sTXvXK\n",
+       "13Pfi16C35wBIblw4QpRnOw75u12h4W5BXzf4aknn+TQ8mGSJMcLAjKtKXVeDKNwgGs7oBRJlFHk\n",
+       "4DoeMzMWeV42BQ7HYZngn0pdtBpNbMsmy3PIDbYSuK0GpuETxsk2RJrnOeMwZByGIOD4iXuQAsbj\n",
+       "Ef2tLXSR0m21KPIEdCnbrHWBFIY401hWiSYWucbostmFEGWTlR0YRSPUbknY0giLXQ5hZYuqSszK\n",
+       "2axqVfaySerFPHsNtlKKrz38Db72yDf+x7BQAKYY+EdrLJSDxpjr08f/HHihMeYd0yTmf6HEvQ8B\n",
+       "nwZOmj1fUrFQKhJ+XdZxL62wMt71waozTxzHwbIsNjY2WFhYYDKZbHvJURQhpcRxHIqi2MatLcvi\n",
+       "mSsXmG/PMOe38QOfRMKYnL/8zGfxjeTYwUO0Ox3cTpvB1gRb2WURjlK4rrvd3HgymTAcDknTdNoY\n",
+       "ubXD+S4KJpMxcVx6361Ws2zNNoVciizl+o3rGCGJi4JcG/xWg9W1NS6cO8trX/3dHF4+QJ7G6CSl\n",
+       "7TWI43g7sauUIsuyXeX0UGFx5fuTyWT7fGmp7SjFCMjyHNdyEZmNUQWTZIIKFP1wwtPnLqKFR6F9\n",
+       "wglo7TIehbTsiH6ccWVtROfgMXACwjQnnQxQeUgx3CQdbTLXbiBmD6JR5LnBdgK0tkgLQZwbjLLJ\n",
+       "jGASJyjXxUuGmKkKne/6FBrSooTK0Bkmi/jwb/6Lm+bmT/3rX2Ort87m1ib9wYje1hglPU4cP8X1\n",
+       "6+sMTUAUhlgSijxD6AJd5NMeiqWxUbYLBvpOiqVS/OgKD7Qj7j8wSx7bxFtDTrDJYX/IqtPiy85J\n",
+       "wqigO3eKojfiYNHHs2N0N+WBky7zbPHlsxd5eK1Bp/FK/sXbZrh15hzCafBI/7v5r5sv4snNS/zT\n",
+       "t97CYrTKjPTRJiQyCR//9MN87vNfodlscObMPdx5550cPXoMratilp3oKZ30WJifY2szAlyUC82O\n",
+       "xxf+6q/58J99ku/93rfyx3/yXoyIGN64yJLvMtvu4CqLRqPJKMk4fvtd3P/il/LQS7+bi5dXSTPK\n",
+       "sej3uOfM3Rw40L5pzK+trDMaDli9scLp06cQEoJGm/5whB8EaF3q41uWxWQ8ptNuY0lJHMW4DZ8g\n",
+       "CDj37Dluu+0E66sbBEHJUsmyjDRNcWyboNEgTUIaDZ8kyYjjGCksXL/sLzqJ4tIzp6zIlJRsNtdx\n",
+       "aAYeV648x6VL5+m2AhpNB0xKUaQ4tiTLd1hvnuOSpSm6qKLbaXgz7XIvbbY3jHourdhT5FQ6ljvy\n",
+       "t3WPunpeP7feCBx2YOIKDq1z0+976NXPy0L5+9AI3wu8HJgHVoH/E3gFcF95q7kI/C/GmNXp+e+i\n",
+       "pBHmwM8YYz6xz2eaL37mQ7t2r3p4Ud+N9r5e7VjVRdYHoM5Prt6gTUDoAAAgAElEQVS3bXvb665o\n",
+       "QEVRkJiCtt9ATptE5BJyS7C6vsEnPvZxXv6SlzLT6TAJQxy7iS7YBd9U3n+9MWmdpF/xU13XRQix\n",
+       "XZFZJTMALKXQRmO7LsqyCZOYJMsYjAZIKXjiicd5zatfWdKwhICk1IZI0/JvXuRYlk1e5Agpt7+r\n",
+       "unbHKbvx2LZNmk6TKEpuN4YojCZPcyajCL/lE+cpmTA0urMIK+DDH/4koxEkoUWaKBzXx9hDkrzg\n",
+       "0JHjrG8OaLTaJEmKMDmBAzodk0YjkmiEzkKSOKPZbJcestvAclpg+WQ4xKkkFwotFJkIydOELI5x\n",
+       "bZs4TlGOT65LHZs8jfnEu3/6pvn50Pf8BFk+JElGuK7D6o01PLeJEBaWcklTWTZTmIa3JVQkQFjk\n",
+       "GoxQSNslyzSBtkgcjWVFzGSb3L/oc6wLM05Ia3ID2b+BcVzCuSVuS4cMOcBlcZinRk2+tJIRNVJu\n",
+       "bU+4PS845AzQ+QVmgiW+71X34s/lTJoRwjnFf/7yGT56/RTM9JlNvskx1ce3Fd2ZOxmsnef61ScR\n",
+       "UjA3P19Ge7ogSRI6nQ7dTpfA9wh8n27TpdNscvz4Kf7ma49w+doVHn/yMTb7fdbW+pw5cz8rK9fQ\n",
+       "JsWRMQ1HE08iZruzFBpaM/P82P/2U/jtWYLWHAaF7/lsrfdoegF5oTl9+uYU1rVrN7hxY5XlgwdQ\n",
+       "StJstdjY6tPudNjqD2i2O+U6EIZoEuE5DmrqdDlTpyfwfbTOyLJy/fmey2QSsrS0wOqNDfr9TbI8\n",
+       "JGgEzM0ulFRdIVDKJskypBQYIUmzDCkVSVwQRxFKSvIso90J0EXBoL/OYLhOmkzwA5s8T9B5WQXp\n",
+       "2FZJuRWCIi83dbldpFOtkXwb1t2J5iu5it3OpFK76x3qtqzuwdcRhrqXXX3WXmf1byvk+TZWYv7Z\n",
+       "TQa48iL3Et/rHO7qgivjBNBoNHj88ce59957ieN4u+xWa102VJjCKfX/j7MUKSRFVpYbW5aF5XkI\n",
+       "z2G9t8V//qM/4sd+6EcYb/XJitLozc6UwlVQ3pCrV69uZ5VL77tBp1NO3rW1NQaDAWmakiQJrVZr\n",
+       "2sknwFIWhS62f3+WZWXI5dl4ngdSsrq2xmA85tz583z/234ADxBxVG5KlkVaFDiug1SS0WRCp9sl\n",
+       "mUIylu2QpkXp1aQp9rStmmU5SEtx9doK/VGZ4dcGFpaPAJIbmxts9sc89a1n2OyNGfQjlg4coUgg\n",
+       "8JtoJD09QgpJK2jR8BqE4zG+45KmKYXJ0FKTpDFRHOL2rjKejFBKE4ZjpBKkeUZ3bpGZhSWcoEOm\n",
+       "IUlz1lnENpBMRsx02pjCYHsNtHTIp574e//tD9w0l17xtp8liXoYEzGZDHCm1XutVptxGGHGQ4yQ\n",
+       "WLbNKAwxUrFwYBnL9dnqD4mTjDjOKAwYW5eslTBBSYGtCg75GQ/MaW6fSXHEgDBMGQxy+vYsHa+D\n",
+       "ZbfpqyNc0Qe4EI145vK3aMeK1x6zuI1HuPuQg3v4Aa4evIvNuVN4meLUkfv4lfdfZKNzF3HyNNba\n",
+       "V5ErV7ljvsl4fAlUQV4UtDptMIZRGJZdojodAMbDEWmS4FoCQUGSJWSFptHosLHZI45jPNfFsgR5\n",
+       "lqGERa5D3EAwGY2Jo4R3vetnWVnb4FOf/zz/+7/6WVbXepw8cQqTaxxp8D2f6zfWufeeUzeN+cc/\n",
+       "/kkOHVrm8KFlfM8lL2ASJRghcFyXrNCMxyW11rUUo8EA3/VoBD5pnuP7PmEYsrGxzpHDh6cMpYIP\n",
+       "fuhP+cQnPkG306G31QOVcePGDYos58SJW3njG9/Ea17zGhrNJlprtvoDut0Z0jzHlmUzFqNLIxgn\n",
+       "MVIabFsiZNnc/JGvf42DBxdxLQdTFERRSKfZYDjoY1tTaHVaJbvNx1Z7dfatKaVztz0qCRT72jqy\n",
+       "LNtGGipm3F5JisoW1kvsq+/9B2nAv/KFP9+VuKwupCqbrbzoOs5dHXXa4PTztnfILMtKI0gZ+nie\n",
+       "t23YqpL3JEkg12glKYzGsRx0kpVetGsxkZpPffpT+Fpy+/Ix/M4sytlpNFxpGbiuux0hVDchzzNc\n",
+       "10MpOYUyyl6bZZFPzmQy2b4uIRWWbZetrTAUeXkOQpAB4yjl8soK/eGYV7/8pRya7WLZFlmWlypu\n",
+       "xlBQdrORSqFFyb3tD4akiSZPM2ZmZ1lf3cD3GwghabRa9IdjpG3hej5hkrOykXLp8jW2+mPyAowW\n",
+       "2JYiCYdEkx5FNmZxrk2URGSNBSxp03BbuMonTwxFrpGWjbAE/ckI27fLoolJjzQZs75+DSVSICPL\n",
+       "4pLJKG06sweIU83s/AF08xAWmng4oOW5ZGlGkhlSY5EbRVoYPvjLP3rTXHrNj/4iRZogTIakwHUU\n",
+       "nW6LOJ6QpDFWmhMlCUmW4fgeQbOFMYZJGIGQeI6DzksJ381iQNsIfCRhYdgYhcxIi07Y50BQkJsR\n",
+       "SMNwa8jnx03uPhBw30KTXj/m+saE1f6IseMjGzZ3tQwnrAlzXZ/hwjG+vhKwnt+BFoZOc5VXvOp7\n",
+       "+dI3Umj6LHcybjz8cbrJN9AotOwwHI1KYTNKZyWOIyylkAKUVKhpRfE4HBDGA06cuAXf7ZBGkKcZ\n",
+       "ppjQ8CUKRZFI/FaDftTn13/93/PENx/jzrvvJtUFTsPnd9/zHu44fQd333EXS3MLDEYjRtEE3w84\n",
+       "dvhmJsqFixeY7bYREhSSPBPkusD2PHJT6vJPwlLTJ09yHKXQRblmMq3pdtsMBgNc1yPPUp566kme\n",
+       "fPJJHNuiEQSAwXFtoiQiDiPW1ta4dvkKW1tbgGRmZoY3vOmNvPZ1ryObrkVdlJKxRguUpYiiMgoN\n",
+       "4wjXczBCT9kzV8jCATrP8H0Xk2d4tsUkHJVqiqIs/y+F5EqYsW6fKuOqNbu85fK10sjXC3OAbVtW\n",
+       "b0heQTh7S+rrSEJ1/j/IUvr9Or5UEEd1UTsDo286r/KwK2NYee5VCXhFlJ9MJtsCVJUBVUphG4VG\n",
+       "l9KXAiwpsaUiMhqhJA++6CE+8Pv/iduXjuC5Hm6jsb259Pv9UqBqMMC27e0S+yAIMEYTxxGbm5vb\n",
+       "kEtVrRkEPs1mY5qcKiskoyhBWQrXEqAVRZGT5gXDfg/HbXLrrbdx8fI1PvGpT/PWN3wPw+GQ2fm5\n",
+       "aekySNtiNJkQpymXLl+i1++xurrBaJggheR1r/0eFpaWKQpDu9WhNxiihY2yfZ4+/xwXLt9glM9T\n",
+       "FBLXXYZUYwPoiE7HoRVoomhCll8lzyb0NiLmZw8wyQqasy2MEvhBg3EYE8cJrc4cURoyGA5x7BaL\n",
+       "R48yc8tpAleQRhOMLmi3ZikKG4NPGGmSWKPtNVqeRyIysnCMbyvwPOJCERWCKN0/G58LiRYOFg6+\n",
+       "53L48BKjcY/OTJswGuFqH0YjrDyn0AWFtum0WnhuQpGk5EmEyQuUMJwW9zHSa2TeEM9KefDYYbqe\n",
+       "ZLZ7N82ZU4yygKWlWRy5wZsmIU98+WP0oz7hwWMstTrclzd47omnGYdn8c2YqHkrjwWHySZjbpcu\n",
+       "36E2WWmlrDRbfO3LDzNvBUw2XVJcVDfE848iJxLfatDsdHE9b8osMkRRiO95GF3q8AggMyB8h1uX\n",
+       "TqCkwGQOndYsOk7xnJTALej4LcKh5oGHvoMHX/5CPOHyuU/+Fffd8wKUpWm3ZnjDa76H3/6t32Tj\n",
+       "0mXe+sY3lUShVpMo3r94qtn0SNKYTrtFf6tHw2sTtFqEcYxjO0RJgmfbYCBouggDaRzh2TaubXH5\n",
+       "8mUOHlwizwsef/ybnDv3LCdPHqfVbCCEoN0u2VvKtjGFZjgYcHHpAlcuXS3X3WjIb/6//x+PPfYY\n",
+       "P/KP/zGLBxZR0pRNo4VFnhW4blngZrsOaaZxXMjygsUDx2hYCVevXWFz7QadZplXch0HPTW4uiiN\n",
+       "tzYVj3x3qb1l2VSJzN3aLGqXOmi9crwOpdQLF+uaQ3UcfC+d8PmObyuEMn18E7Zc7XQVvlyn2lSP\n",
+       "dwkhbRtwa9cF1z+zwqir5x4WGigk5NKQ64IsS3CUTVFo3EbAn37oQyAtXvLQyzCFZjzq4zoK24JO\n",
+       "uwmUwvZRnBEnGZMwRucJvudiWTa24xJFMZ3uLHGSMRyNsSynpENaNrooE2oGU3qLjk02Fbo3xpBm\n",
+       "MYHr4/k+11c3+eRnv8APv+MdDAcDXvCCB9FZQZrlZLkhTjI+9ZnP0Zmd5wUveBHN9jzv/t3fwg0s\n",
+       "Hrjvbl505m7WnrvC0vwS41SSN7p87tGn2JxEFHmC1jkCjetYZGmKJSRFZhBakiYGSzoM+1sU6UWy\n",
+       "ImNp+Qhe0CErHHTuUxRlYUcU91AqIcsm3HPyDrI8L3U+jKbXHyKVTa41aZoTxgmtdgvHcQhHZQVi\n",
+       "qx3g2DDorWIrTRpHZLlGS49fe9f/etNcevs/fzde4JKkCUHgMg4nOK5NGIUYrSmiIb7v47sOJs/o\n",
+       "93p4rkOaxLheUPKqpyqBcVTioRLDLUcO02m3MHk6nY8wHI4QUyfhlsO3sLJymbXVFZTKiJMRpshw\n",
+       "XJeVlTWCoItUPp7XINIFjueVSTIDpsiRxpTjLBVa52RxqSq5sOihlGZzfYMi1TiOj5IOUnoUWESp\n",
+       "xnYbGGkj9ZDAKbn+S8uHcfyAOE2RgGtLjE55yxu/l8C1cRzBcDBmq7fFRz/yUX70nT/KJAxZXJzH\n",
+       "cVwcR/IHf/Cf6PcH3HX33XRmZ/A8h3vvuvOmMb9y6SLzs3MUeU6SpGgEzXaLMEqI4xQvaJCmKcqy\n",
+       "aloiGa2gwWDQJ/ADpBDcuL7CF7/4Be66+87SDlhWqdSILJtFpCUuXeVx+v0+Fy5cYHV1leFwyKOP\n",
+       "PsrrX/96fvzHfxzHc7edvZ2oveTQ1mEOgDDN8DyHyWTC1sYqUTgqIwRpStgpLSG0UsN9RzwujGOC\n",
+       "oEExpRtukytkSZ+URu/yqut6RVX0vuON7whm1aswK0e7jo0/8OLX/sODUL76Vx/bNtawQwOsKxTW\n",
+       "Odz1ctOdndDaFXpU5PrqqJekwm7lL5FqkBJhS7QUU4lmjTQlNIGS9Ecj/sMf/Efe/sYfwLUctM6I\n",
+       "ogkCzWDQx7ItXN+n2eygEdv0vjzPy2TNeILfaJZJDwRZrhmNxsRJKbKTxBmWdEBA0PCJ4ghtynDe\n",
+       "VorxeEwcRXQ7Hc48cD/tuTk+/KEPkoQhDzxwP77rcebMGQyS9fVNeoMxt50+xfpGD8fv8ju/93uE\n",
+       "6YQsmXDqyBECIXnZd70Uy+9waWvMY89dY5Jp4iSj026RJBH/P3tvFmPZdt73/dba83DGqlNTd3X3\n",
+       "HXgnXlIcREqGIpEaKFlEHCGRYweBjUgZESASkhdFGZ7jIAiSQInzYCcvNig5tmwlQpwAphRIDEVS\n",
+       "A6/E+fKOPVbXcOY9jysPa5/Tp5uTX8JLA3cBja46VX266py9v/Wt//cfyjIn8LxuMAlSGJS5ltUX\n",
+       "eYbIH4JsWCcRH/7wh0jSmjxXFGmL5/mUZUa/5zIYBLRl2anitHptuVoThD1WUQxCMhiOyIucVmnu\n",
+       "LlJycXGG75mItqQuEoa9gOU6YrB3xH/5H/4b37dr9N31nde9u3cI/YB+r09elOSlphumWUnTKiYH\n",
+       "E/KOPRbHEa7jYNsWdV1hSVNbBAj4rU99ipfe+xJOx/HfFHAhDAzzcTvbDaTQNA3n5+fMZjO+/vWv\n",
+       "c3Fxwcsvv8yv/se/AoitB/3m3n8k2BFsZotVq3SgedNgmYI8T7i8uCBJVtDWeI5DEkdYtoVtaKMs\n",
+       "x3V0qlfXWVfdCb9VO0rxpnqMUbepU0+Go+j1uBHdhp5YFOVjjwkheP8P/9QPHoSyu/tsfH53O+XN\n",
+       "50VRPKZa2jAsNsUeHhHkbVtj309Odnez9TYbQ9MxNxR0RyetxjKlSVUVGIbB/sEBZdvQltpQxzRN\n",
+       "er0elm1zdOMpyrJisVxxNV+zcSgzTY+mVdx/cAfHdYE5tqON6U3bwbQspLDJ6gbX30e1Bk3dkJaS\n",
+       "WhnYtokUgiRN6A+uc3Bg8/xzz3K1nHPtqX2ee+H93Ltzhz/4w8+xNxrzxVe+wi//8i8xnc4J+30u\n",
+       "zy+xHZ+mafnABz7AV77+VUR/wJ2zC9okxfeHnD79LPcvpqRRjNsb4btj7ZnhWhzsX2O5nCGEiWXb\n",
+       "NE1N3qQoBa2pGAwOqZuEloqvfOkLPHXrKSbDISrU+Kxtj7cbq+lbCGmT5QWmZeH7JnWd0g9dpGmy\n",
+       "XFzSonAchygpqJuG4SDUAb+mSWsKsiRhGAZcnN37fl6e767vsoS0MCyHvG54eK797Iqqoj8Ycng0\n",
+       "ZjpfYVoWRZ5pXYZl4roOWdYyny0YDYZ8/Rtf5QMf+ID2AS+1XF0BspVboRJKF7/+YEASx9vT+I0b\n",
+       "Nxh1QrpXX32Vy8tLfuVXfoVf+7Vf4/T0lKIocF1/8yQdq2Q3klHiWCaN1JoHzw24cfMmX/nylzvY\n",
+       "osTxA20+V1UoIE0z3Yh0sIbrdMPIpgudAehM7HYh390m0jCMLR1RqcdnfZt6ZprWY/XrezXY71gB\n",
+       "3z3q7Bq7mDsvwgbo310beh48LrffGKXDI1hld5q7W9Cl1HmTrXhEsjcAo9VyWN91SaoSy7SYHB4Q\n",
+       "xStOnnqWZRSjlCQrYBotqBtFUbbEGdBC00BSpBpzCyZYtk2SpkynMfvjfeKioEmLDhoyyMuStpHa\n",
+       "JyVJuH79Oo5jEfoevuvqgmiaDMZ7lNLi7tmMxSpnMD7ipJXUZUGaxvzO//F/8oH3/xCB52E7DnGS\n",
+       "UamGW7duMV/HXM6uGIxNYjXlz778FVrbZbpaczA5JCkLWmVgixbHMCmznLLQobRVU5BliR4CqRaU\n",
+       "JEtqVGvg+z0so+Dq/G2ylY9nh7z4/A/RKJMyb5CmqTm0baNVlW2LZxs4nk8cJ6Bann3qlCzPSbOU\n",
+       "0ahHVTesVzGe6+E7JpVQFNEKQ7Uc7w2/b9fmu+u7r6vZgm+8+jrXT2+glKLf69OzbK6mM6arNUVR\n",
+       "4HkuJyfHJElEPJuTei5pmnK4p61iLctmvVxgmNovfBOFppTaJmoZlg1SEMXRY37gWZFjWiYn168R\n",
+       "pwlvvfUWzarmlT//IsfHR9391ezUDvHY36rtao1hdvWjpakU73v/+4miNefn5ywWc1zPwzWB+lHU\n",
+       "mezgkrLrlLV9hVY3t+KRSRV8exXlo9r1iDa4C6Ps2jb/86x3rIDD47mYu5/vUnZ2DZt2VU27RfrR\n",
+       "sODx54PHu/HN522r1YCbj4UA2baIRuFYDmmaoUyT6WKOGwRcXl6SRQmO30M6IVfLmEoZpFmBtCwM\n",
+       "BI7tsFquKSqDg8MT5osFgTAI+gec3nqB+WqJ0bZkWUYcx5ob7hp4PY/BYICUUockRxVL1yKLYixT\n",
+       "cHpywu17b3JwdMJg/4h1UhKvV4xHQ6bROQqL2WzFnXv3uHHtGqvlgqOTU3qDkIvbdymKCtUapEVD\n",
+       "0Uqeef4FLmZX7B8cce/BHXw/pCFjMhqxWsU4tknoWFStwnEdsiyh3+uxipYURYnrhpRpTpxWONLG\n",
+       "UDXRYonyGrJ4gVIWQW+EECatUNA0WLZJFKc0VUktJK5lUrctZw/u4nkelmEwvTxDCTiYXKOtG+YX\n",
+       "l5we7rHfDxgP+yzXy+/HJfnu+udYYX9IXjQUtbaFffhwyipac3zthHSVcnA4oVUt33ztNeq6oipL\n",
+       "9kZDnn32aWYXM77xjW/QCwIiYwVsKHia2aHaFkMnIFOXuoELgoCq0iyxjS2zZVlEUcTBwQHL5ZKq\n",
+       "zviTP/ljhsMBP/nxnySKI8JgI0LaNHpdEyf1CdwwTAxpAAaYgihe0+uPCHtDsiznm998ldbQIc2O\n",
+       "45CnGdI0qcuyc1vUGLsQOsjCMK3HYF3YPfE329nWJmtzw4rbQEOaWfcoOEZnHDwu+HlyvWMF/BF3\n",
+       "Um7tXnf530+mrcOjHWt37VrIWpahPRPkbnrG4/6624FGd+xRSmEI7eAnJdRVhet4pG1Nv9cjz0ui\n",
+       "WtOKskYSXa0RdkBSVpQ1mErgOR5FK3DCIa7wiNMSx+1xNVsQhg2rKOtEDDZNC54f4nQ8XWEKZosp\n",
+       "dV3j2A5hP2S5mKNokC1UTcEzz95CWj6LxQrbcrl5ax/XdbEsmyyNEaohTUsWixUvv/wy6zgmjpbc\n",
+       "vX0b1bT0e0OG/THiuGY8CPA8m7LIeOrG9c4jWXL//l2G4ZiDccjtew9pakWcZxwMh9A2OIZJf69H\n",
+       "VYHjjPCNIbYqqdMVRluQJRGW4eB6AbWqMEztJ2F5HlmW0/MDAtfDtG2SJEM2Nbeu32C9XpHnBZNx\n",
+       "j7oVxOs5prQ4vXaMbxkMQwdBTfAdvFDeXd//9Y1vvo5l29x7eM5oOEIqQd20PHx4TqMa5t+cMxzq\n",
+       "SEFahaAhz3K+8IU/xbM9XnzhOb72ta92qks9xDU3Xi/ShLoGNB2wqmu+8MdfIElSmqbh6aef5umn\n",
+       "nyJJUzzfw/Vcbt66yXR+wXyx4Etf/jLvffllJvsT3c2Lx0/wis5iWRoo1c3WpLYoGPRH2rfeEHiB\n",
+       "yYvvfR+zizus12vMRoCUGKZJUzeapGBI7TPeKixTku0gBrski43x3i7TrmkejxXcfH0DGW9CJTaq\n",
+       "8u+03tFQ483Os6tygke49WYYsCnQpmluZeObx54M9d2l4ABbkc0jvmaHS3Xe1w3VTpcusF0HpIFv\n",
+       "2VSGxcOHZxw/8wLS8aiUQBha/KIUjEYjlNA2sUXV0DYNliHx/RDTkAT+EWEnOljHOlczS2P6/QHR\n",
+       "esloPNJUK9cnSVLaRnF+doltmzimgxSKyf4hTQ1RtOBytmI0HDAYjkjTFMPyOLm2x2x6wcXFlNVi\n",
+       "yfve+z7yvCDKcgwBP/5jP8adew+ZzueEYUASzTm/OmdvNKIqc6RowarpuYrJyKVM5wQ2UNcEgU+S\n",
+       "pown+7imzWw2xR/26Y/2WU/nrNcNQ3+P2cV9UCYIi6ptaNqalpqmMomiCN/3SdK1VrOahlZGGibx\n",
+       "aoVt2xjSoFExlimRlpZ5H+7vUacxRZ4zm55h2xb/6B//Ll4vYDwcMpvNtI/MYklRVKRpimlY+H7A\n",
+       "wcGRtvr1A/JiSZZlpGnMfLmgKIptAEcvHDAYDlkuVpRlRdFIFnN9dI5jzV6hE5lYhk2apBRpzng8\n",
+       "5uhoiG3bBGGPMq9plOLOnTukWYoQgjJP8AMXKQWn10+5mk5ZLFfkZYOSkiDsIQ0by3Kp85qr6ZyT\n",
+       "4xMQOYahGSpFnuOYWm9gmzoRyjAs6qrFcTyEDWWTsl4sMERDHkfkeQqqoVUwW6y59cyzfPONN7h2\n",
+       "/RaqlSB02lJTNxwfHeJaFqYpuXvnDrdu3aRpWpI4RSmJH3iUZcFv/Mb/wL/+1/8ah4eHuH6AIQ2i\n",
+       "tbbujeOUMi8JQp+L6SVFWXB6eg3TNLl//z77e/s8uH+f48NDnnnmPdy/e5+3b9/j5o3r3LlzmyyN\n",
+       "ME0NSyilDaXqusGybJarFZ/97GeZTqcIIZjP57zy56/wwgsv8Au/8AvUdc16vSbs9djb26MoCr7w\n",
+       "hS/w4//SjxOGIa7ja//0rqaIJxywBZ01787DEp2CJaWB5/kcn5wS9iLu3r2LFApDgmk5SFpq1aIQ\n",
+       "2I7moe9SBneJGJvEq01HrZXZ9s5Mr9EECMRj9OofaBrhn3z2/wLYUms2w8zHyfKPGCrweCzSBjrZ\n",
+       "SNM3viC7lJzu/3pMDCTQftpN3WjKkgCEwBKGtmKtWtwgIKkqSin4r//7/46//BM/hW1a2LZHmtdM\n",
+       "Fysct0etBEHYRyHwPJ9GKUQriNdRd4roFIhSJ/J4ntfJ+02yPEMKk3WU4Do+cZrhe75+01pFWxfc\n",
+       "vHHKZG/EajVn/+CAqmkoqwbX97n/8IKirIijCNFqqbljGVR5ynhvyNPPP8Nrr9+mPzikrCDJdVxZ\n",
+       "XReMhj1m8xkGgn7okcXnxHHGapWwWKVcu/4Ufm/MfLnGdjwQJqsoZm9/zLqYE68S+s6Q0A5oipzD\n",
+       "/QG22WLZLWm6xu952nSrNjXlSgjW6zX7+xPW6zVSGvoY27aURdkdiUtM28X1Qr1B1g2ha1EVGeNh\n",
+       "SN00lE2DMEzyNOlOVi1BEFJV2lJAIGmbhrpqSJKEsqroBwF1U+O4zvZm1iremjTNEUjSNMM0LKQl\n",
+       "qJua6eyKycGEu3fvMJlMWMznHBxoBd94vE+R5cTrGU2rA6yjNME0LUajUYe/ttiWSVnk2lxpuSAv\n",
+       "ciZHRwz39snzCmnYLJdrVAPTqzmjwYhRf4jpNgjZYhpym2Rf1y112Wo+ddmQxBlZVhCMfPy+Q1vX\n",
+       "mIYA1dJUFU1dUVba7KltoShL7p895Md/4uPM5jM8x+XgYIIhDbIkwXUdhoMB6/WqOxUbWJYNKF55\n",
+       "5RW+9rWv8tOf+Gn6/QFB2CPLckzLJopiemFIvzfgajrF8z1aGtbrFZ7n4Fi62To9ucZ8PqeudBjJ\n",
+       "5eVDbp6esF7NsExo20eEhKZRKCUQ0uCP//RPuLy83J6ioyhCCEGWZXziE5/ghRde2J7W79+/w1tv\n",
+       "vcmdO3dxHJe/9V/9Lfr9DYTSnb7ZnLi/VTL5napg09Fr1+s189mUssi1EZroRHu2RdvZyGrk51GM\n",
+       "2pMsk03sm/7a40PKzfdWVfNYfZNS8tIHfuIHj4WyKba7DJEnf/ENB3Q33QK+9UXaFfds1i5kspn8\n",
+       "AlvWiWg1boXQtMFaB49h2Fr9BoIv/tkXOdg7QEhBkecI4Oa1a1w/miCEZL5YkWQxWVGSlGtapfBs\n",
+       "l1HPwbFt7VNS94iiNVEck8fx9oL0XJcgHHLz5AjTclmvE/KioCxrEIqsKvjm175MfO0Y17FIHIOr\n",
+       "2RQlDPrDfaoyJ04ypGEipIGhFIYUhOMJb91+nVJlBMEI2VZk64SyqvF7ATWKhxcXhEFIUzWcnV3h\n",
+       "W+B5fVxvQMslbVsymz1kONLFxvUcTLOHKVt6ZsXx6RGidXBND1P0KYuIqq2osoqqKilzk7ZpKatc\n",
+       "m3y5DoYpWa0XnaTfxFBm59diYlsGozBAWg6W4+F4LkkcMYcO+akAACAASURBVLs6px/43H/4gCAI\n",
+       "MWwfIcD3dQpLHEeoVndmvuchpUEYBLiOQ+CNkVKQJhVlEXN2dsl4b0TY6+kEIVVw/fSQMq+J45Q7\n",
+       "t2/j+wZZnvPCc8/x+huv47kOWRqhVM2gF5DECRfnD+gFAddO9AC2qEpq1ZIVOet4RZokWJbOvNwb\n",
+       "jvG9kHWUomTD/QfnvPHWXRYr3fH7fsDeaI9Rb0idr6lsyXodIU2B5/u4jrMdco3HE5I4xZES3/fJ\n",
+       "shzbs1jFC0CQC4ijhLIs8DyPo4MJrl8iJbz99ltcOz6iLlJCz0EIqIocy/fp9UMc2yFJMwzDYjwe\n",
+       "sFotSdOYui557bVv0O+H9Hs9XMeiyFMEAtsw2B+NSNOMe3fv4gYei8WUXi/AMg0EUBQFe6MRt9++\n",
+       "w+HhIdiC+WJBr9enKDXTq25LUA1S6eKtbWMtLi8uuus0IC8KsiTh5No17t67hzQMXn3tNd7z3HP6\n",
+       "FN40uK7XpdjrmVVV15RV3Q0fv7VQPrkE357xITBoWhgO93EcF9XWXF5eUOSZZkq1DYbjUOYFhnjU\n",
+       "MO7GPW6JEh3bTj/2SMDzaBYntmjBk83rd1rvaKjx5u8NDLIp1k8avzyya3xEPXRdd/vxJnB0gxdt\n",
+       "XpTN2o04gu6IUzUgJY3QeHgXEanZLEWB5/f5/Gf/iA9+9EfYH48RtCSrNVW6oOcHqKbhdN+najwM\n",
+       "1wNpUTY1WVpQFgV1nSAaCxMY9x2eun6AYZqU5a3tYGaxWnB5OSWNtR+H6wYcT8b6dRB99vaHVEVG\n",
+       "nqe01ZrAllieT6M0FBOGfeqq48ELgWVKlqslnuczX0yJVjH7oxMsaSIdE0MoBoMeffoslyuiVcrx\n",
+       "4Q0cQ9E0NY1quH4jpKprnr9+jdu375AVCaZtEMUp/TCgZ4LVZAyGIa6jMybbQEuQi7xlMj4gTUo8\n",
+       "L2CaTOn1Ndzjui7SkCRJg1KNVsG2NVXH3V/NFL3hkCROSIucXt/n6OiANFlz8+ZN8qJmFeeUWYUr\n",
+       "WkxpMOgPGPQGXDs+RrUtZZHpMI1kTZIkKKXY2ztkcjBicjwmSROE2XJ+8QBpSO49uEtVVNRlw/HJ\n",
+       "CYPAw7R1CPZHP/JhkiymrCru373L4eGEhWUw7A2YTqcs1gmz+RylWkb7Q5zAx3ZMxgf72rohr7l3\n",
+       "do7OXTRRwiPoDeiNDQbjA6q64MH9+0ijpd93OD1+ijSOsfyxNhmrKqpGBzmYhsU6WlCWNXXVCdaA\n",
+       "tqg4PjziwfkFnheyWMZ8/Kd+js9//vNgOPi+ji97/pn3cHZ+RhpH1LU2f1J1TRJHOK5LGPQoipJe\n",
+       "r8c6iqibBtOSlGWDaRm8/4deRgitMPZcHyEkaRIhpYllmhwdHZLlCY7dYx2t9HstdUGu6xrLtIhX\n",
+       "MXGSoKSg1+uDKjr8WTxmjdy2igbBxfkFVVWRdq6GtqMpuJuT9oMHDzC6IHKE9sjv9/us1zHz+ZIv\n",
+       "fenLfPxjH0cgUErbQ4AeYH7bLDGlvgVgAf2zSGlSljWeF9I0FUfHJ9y5/RZRkuO6Gj4RpolqHg0c\n",
+       "nyy8Gxrho4bT+hbYV6lHyszvteFs1jvKA9/sQHVdP0Yf3MWQHMd5LMj4SYaJbdtbfuVmEPpkGMRG\n",
+       "cr/7YthKJ9S0QumOuws+MCyTsNfj1dfeZLlc0vMD6rpA1TWWAbOLh/jHR13oQJ9SNGT5ikZaIE0C\n",
+       "zyBwfYxuWBFHMVoNVhCvV/ieR5ZkiEYHAD//nkPiJEWhi2RZLPEchzheUBQthlTM5ndYXK2w7QG9\n",
+       "kXYK7AUBqzTFtn2SJMM0BHGS4XoBhtWwTmL6gyGqrcnSDC/oIYUWVkwXK27deoYwaDClSdtK8irR\n",
+       "1CvDoqlK3nr7TSaHB/hhoDm+MsAQcDjoEUU5si7Iqpyg71IUKY5rUZX6SJlFFXlaYlkGeZHiBzpP\n",
+       "tCwLXHeE9od5lMzt+z7tMsJyHWohMOMVaZqQJksG/R5REtO0BmF/QFEpSNaUVUnbVhRZThprHHWy\n",
+       "v4chdF6NEA2mYbJYnuP6HoZlISSsoxXhwCWKE1xfcnrjBgaSsqxYr9cYpkFe5NiuiR94mKbB6ek1\n",
+       "zs7u0+/1ODt7wM0bN8gKC8f1yYqUKItQqsEoBbZlU5U1B5MjLNMjzwrOLubMlxGO5+D6LoYlmBwe\n",
+       "c3xyxN6wTzSbcXn1AEsaFG1Di8QwtZeOkmxTl0yn0T7aSvOXi7zk3r17WLbHCy++hGF73H1wzsHR\n",
+       "NYqyRDUlIk2IFgsc2+by4iGDwZBwFGJbNo7jajOoqmY4HJJlGU3TUpY5lqVTkoLA5/DwANDFxLK1\n",
+       "F894NCRax6i25erqgslkwipaYtkm0uiwXNl2boQuhmHQD3tEecbDhw954fmnKbINtxvKqgTVeYw0\n",
+       "Wui2sWvesLTyPN/OlI6Pj8mybEfIZ7BYrLZ15Ctf+Qo3b95if29CEPg7czaFNHaK4maqKbrB1u7j\n",
+       "aCFbqxSmZVLXTVdfLG7depp7928znV3hOBaGkJg7qstdKf0uc26TxqMx72/lehuG+VjxfpJG/eR6\n",
+       "xwp4EAQAWz/rTYdcVdVjg8mmaR7DjjaBDZsjyUZyL6VE6ix0XeiVQpr2I2+BDp/aTKUbQ2ipbqto\n",
+       "pCQ39S5tVjVZmvB7f/Q5jm89hcorktYlTkosQ5FFJTDj+skhV/M5vWGfwPdASrKyJE0T2lawXq2p\n",
+       "yprBQAsOBHCwt0/ZVBiWyXQ2RbSKq4dzzUixDPb6fbyJDwis66csF2uiKCFwDujfPEAaLctVxN7A\n",
+       "Z7m+YGzZCFkhzZK6UTQSXM9m5B6wPx4wGAy094sjWSwv8ZSWOBfLOdk8xHY8ppczPfDxPNK0ZBll\n",
+       "eLZNr9enjUuGjg0SaplRFAVJ6XPt1lMIaZKmGVla0TY2q6RidrXAPrJxXYHnSZapR0WFVAbJOt1u\n",
+       "wE3TkOcFaapzLpum4drRgDZrUQomk30O9/ewLEs7PlowvTrHtnXREY7E8x0cK6BtKswWyiLl7GFC\n",
+       "o1oEBkG/j+8HHN28QZ4XNEpwNZ1zeTXH9wWhP8ANbaLlkjyLGYQh+8d7WwvgLE1Zr1csZivqsuT5\n",
+       "559nuVgQ+C5vvvk6IHE9j4PJhBvuGNu2WS6X2I7Dm2+/xeLqIcvVitVqRa8XcvMk7E6CkrqsSC4e\n",
+       "0DQtVn3C4dEJlm0TJdorpiq14jdJdLEWQjDo96jyEkMIAj/AkJLCzHnz4UPG168zvfcGoVHTNgWq\n",
+       "LemFDm2lKIocFUIjJO/78IdACFzXx7AcBAbCMAl8gzzLsCyHIo8xDZssK5jOFgSBh23qGMH98YS2\n",
+       "qVgv5mTxGtfTuZOjYch8doFpmrimQ5mXWK5JpSqqqiAIfPJSu4RatuS5555lNp9RNGBgIKRBWVZY\n",
+       "tkFd5zRtjTQrxnt9ZvMLrEzbyMbrJb1ej8DzuHH9OqppKItKB4V0TeD5+RWW4zPaPyItGs6nc4Ks\n",
+       "oBf2sG0Dx9w0gzvMtHbTMG6jjUFs9CE6pR6gy5VGmhbKMLl18zkCf8S9e/fAsmiFhmEtQ6LqGql0\n",
+       "2Epba6FgXVYIpD4NiEe2IZv6rQkdzbYhhe/dgb9jQ8zP/j//+xYTesSBfLT77O5a8EjR9KR3wK4N\n",
+       "bduyTcIQG8x8B07ZdOGGYeA0LdJ1WCQRgecjkWR1A47NH37mj3jlc3/MJ37mE5i+i2OGNFVNWaQk\n",
+       "6zlFFnM4GbM/Gevsvl6PVimqtiEIevpNUlCVDUJpXLNuW0zL1Ck5hqBpGxzLoamabuMqiWM98Kyr\n",
+       "Gi8Mmc+0Y9vBwRFVVegDnTSoaz3MLMqG5TrCcX3SXBt2hWGP1WqJ51j6IjEkCoVhaGXlYrHCsGyE\n",
+       "sHBsh6ZRSFMSJwlh2CdNss4/vCD0XCQ1nmPR1AV1VWF6DllWIITE80Ok0E6LtmlSpDH9wCOOV8Tx\n",
+       "inB0ojsr6GiaOs9RwQ61Sm/KQuU0TbsVQuhOBIJAH9k3fuqr1QrTtVGqwbUtaBoGgwDVtJqCZRg6\n",
+       "AKCsKKsSKRVu58SItLh77wHXr58iEFhWt+W3DUq1lN1Ns/FVNzsXvSLLsG0bKUT3HgnKqkYISZIk\n",
+       "2+93XbfrUm2m8zmr1ZrRaITn6cdty9JB00o3E3medy6S4PlBBwHuWEagKWiWaVIWxdaWtK4qirwg\n",
+       "TxI81+WZ597DxeUVluNovjSKpiqBhsBzUaplulhwcHKK43iUVY1h2kipnTlNw9S+Jnmufz70gPns\n",
+       "wW3miwt+9Ec+RJkX+K6PlrnblGUBou2+19j+zHleaNio1X70nud1ls66mWpUw2qx4Nq1E6q6okhj\n",
+       "8iKDtsa0JFmSAHDv3l3yumG5XDJfzCnzcltg3/ve9/Hiiy9SFIUetirBxcVD3r79Nq+88hdMDo/4\n",
+       "t37plzk5uUYcx5yenlLmOt1o2O/jd3MAfY1tjKUepxpvGGvfbrz5pK4kSRLOzs5YRRcEvqf9jeqa\n",
+       "tq6wLRPVbKCiroa1IKS2pd9g35tOfQMZ7z7+3g9+7DsOMd+xAv7Fz+uch7Ist0PI3Z9lNxShqqot\n",
+       "oX3TqT+Z5Aza2W93mLlLJdzlnQshCKTBosowBz1cTJxGkKP4f7/0Cn/7f/zb/Orf/Pfo+SH0PaJV\n",
+       "RpGkHB0fYtAShh5Xl5eUecKNm9fJsozDw4nOv8xzjfW12mrz2rVTLMvGMEyKqiIvCxarJVEcY5sW\n",
+       "nu2BgLAX4HleF9hQaxqTH/DqN1+jyEstTOiFnJ2dMR7vs7d/gGk7FEWFZTsslxFh2EOaGpe0TIOq\n",
+       "KsnylChJyLKM5XKJ7/c4Pjkhy8pHrB1TYnUCJkOaGj9uWoRoaaoCU0JVZlRVwfs/+EFc1yXPK1ZR\n",
+       "qgtu3WgXxSJjb9THNAx6PZ+8Nuj1esxmM2az2XaTzfNcC3gsnXLU7/dxHc3iCAKdzrJxc7x37x55\n",
+       "VuK6Lp7n0ev1cDyHLEuZXl2QJjGmIbl+csJyudRqv+MjxuNx19XBbDpnvoqoqobRaLL1aBcd3BKG\n",
+       "PmEvIOz1Wa/XRFFEHEdbXLQfBqi2JQwDZjNNaTs4OCAMQ8Iw1HTG5ZJvvvZN3U0pwdXVFcfHx1tr\n",
+       "46LQ12bebQaO44BSlHXN8ckJy/VKX9elFr5s4EEpJYPBQFNWu6am6Taa9WJBtF7hhyGO5zOZTLAd\n",
+       "jySJKIqCqspJEz3YXK0jRvsHjPf3qeqWulHUTctgMKapa+7fu4/nelRFQdNC08Lrb3yd0cDn6aeu\n",
+       "Mx4OUUoxn62o64YwDHTClG0ym0d6M5Ky4y7bBEG4pfltYhGllLi+h0R13tvQD33SNCaJ1yha6qpE\n",
+       "AGdnZ0Rp2jUTZbdRehweHjKZHGB2mZab+/+1N17ni3/2Ckle8Eu/9MsIafDMM8+wv7/P22+9xWg4\n",
+       "Ym9vqOGnumI4HJLnOYOBngeNRoNt7Xm8Vnz3Ar6Bbauq4ktf/VOklPR7AbQtvmOTJwmqrTuKZGfG\n",
+       "Jx4xYnah5J36uP0/DMP4riyUd9SNcNdu8duZVQGPDTGfPE7sYuX6iXVxl90L0I0tOsL+RqqvjyeV\n",
+       "bPXwQ0mqtkEaBp/5w8/y2c98lp/6qZ+m1xtyfHRElqRYlq1pWUmCNCRlUWAYkixLMA3YHw9BtRwd\n",
+       "7CNMQasUtmkTRwlNrXm1Qgid+I7C6YpRURRYpk0cReRFhmlpw3fX9WlbODt7iOcFWLbdyfIzhsMh\n",
+       "dd1y5+7drgPMuHHrFkVedvQvE4WezNd1zXw+5+jokPl8RlnVBH7AU7eeZr5YUtediMCWFHmJIQ1t\n",
+       "yF9W9MOQ2ewKzza5ujrn+PAApVqatsG2HIRhIqWJaTmYhjYqUm1FHMfYtkmWJlRKn0Qsy9zCZHle\n",
+       "cP36DeI41hxxz+PO3bvcvHZ9i2kWXfKQ3nh1gHRd18Sxpg/Wqt120LZhsFot6PVCPM+lrmvNyy9L\n",
+       "hJSYRg1ImlYxHO5RlDXCsBDoay3LE534U1eIDpJzHJ0gY5gGTV1TFTlZnhEEHqPBkLZtWS6n2p62\n",
+       "0alPAolhmNsuOY4THNvRG3Kl7ROazpPec9wOb25QAoJeoH1zJLiWS9ZBS5ZlkaV558WR7iiTFVVd\n",
+       "k6cRR5M96B6TpkWRl+RFQds22kK5KnEcm6vplNObN8kyXaBH433SrKCsGsqyhC4YWyioG0VZNXz+\n",
+       "85/h5fc+x+HBHqaAfm+gN0XLJopWlGWBaZvYTrA9VSilts2Wvh4tPTRVirrRkXBVWWI7NkKAbRo0\n",
+       "dcV8MUN1X0coZrMZaZpun8uyLPo9HRSuIxJ3TexqPvf5L3B5dcXk8Ihf/MW/ymi8pxs71WBZjq4B\n",
+       "VY3jOIyHQxYLDceYpok0wHU8hNxQlcUW1vhOCMaTXidN05DWCRfnD1nNp5imQVMVBK5L21RIdkNp\n",
+       "Ou9y9cglcYMs6AyBR8VdCMHLH/r4Dx6N0HGc7qaMt7j2ruk5PBpSbi6KJ/11N7/8dmBgSVTT0ABC\n",
+       "KU3UNwxs69GLIoTAsA1io0ZmFX3ToKLl7/7W3+etr73Of/Q3/x0cx2VeF5wvpth5g/B9PN8HAupW\n",
+       "Eac5Z/fuMZnsE/Z73L1/xs1rJ3z1K1/H7zl4gUfgBVimReAHDPp9jfU7LnlR6JQPAGxMw6Q/CDkJ\n",
+       "jyjLgqIoubi8II4TTk6usVpFmIaD57l4foBSigcPHjAc9Lm6utLQRpHiuy6WZbJex3heQFlVnJ/d\n",
+       "ZzAYELgOxy+8iJR6YHx1dY7vBShLIqTAC32SJCGJU4xWYphweXaHLEs5eOoGzz79wzRNqbnJUsdY\n",
+       "LRdrLq9mZFmO5/rYrottWxwdTUjimMDfJ861Ne56vWY+n5IkCXmW83uf/mdUVcVqpRkLH/zgB6kO\n",
+       "95GG4saNG9rmc7YkiiLSNGU+n3Lr1i0sSw+BAi/Ur1WSscwTjo8PkVKf4vr9HmmWUFU1VVVSJEui\n",
+       "JMU0HaaXF4ChaaN1y95kn36vj2WZVFVJ1bQsl0uuLqYo1dAf9PA9D98PuHnzBnmRcf7wIft7e1y/\n",
+       "fkxZlSyXK5pGcf/+faqqJvBD6u5a9R0bU0Aroa5yxqMRtGAYgnB/jGUbLFcrgtAnThKKsuDhYq09\n",
+       "5j0f0et1xaTl+rUj6kZ1G1xBUZaU+ZqyynAtbW3sOgamaeP7DlmWaTtUqe+Bk5Nj0kif0lZRxGo5\n",
+       "Rxombd1QFSVRkhCGPfK8wDRtTEvTYEfDEXle4LsOq9W6K8YwmRyQpgmraEmSxNt5lOtoa4jVatWd\n",
+       "ArRRlecF2JaGvnzf5+LinJu3bhJHEafXTlgs50jLJk+TLizFIwiCraW0ZTnkmY4kzDKdcKW7V4GU\n",
+       "BvP5gsPjYz760R+l7EJdbNvm1VdfZTQYcnLtGEsK3nrrNmVe8vTTN7m8nJLnOScnJ8xm804oJfF8\n",
+       "d6dZ/M4N7oZ0sXVRlSanN24Shj1uv/UmlmmQ5gWoFtMQXfh402lBHOCRfcimEd2lR2+a2u+23rEO\n",
+       "/DO/90+2roPbwroTCPqI2F5tf6GNDHWzdhkrUkqKDk82NPETKbQYQqCPImVHuDdMg9LVz1MVJZ/6\n",
+       "1G8SrSP+lU/+FfIk0xmOQhAGPWhb8u6GUUISJRlBf4jtOETrFVkSoZoC6pLjwwluYGFaElOa1FVN\n",
+       "nmU09ab7t3A8D9PS5k6WaWEZJnmpj/NN22wHsj/yYz/7/X1T3l3vrn8B19/7+7/JK198BWGZPPfc\n",
+       "c7z88vswLR1evlqtuXnzxpap5Hsue+M9iqLqIDSYTPa4uLhif39/C8v6gccGE7ftR+ZTHWVl25Vv\n",
+       "1Nsb645SbQIhKubzGfduv4Xr2drlTtV6GNrpNTaU5d3CvWHk7c71hPjuocbvWAeuj21sVYq79JtN\n",
+       "0d7gUBtZvVJqi5lvTGDKjed018VvpfRNixKqy2GssaXO68vyDIEBaclZNOdv/y9/h/fefJaf/4mf\n",
+       "JHRcvEGf2eUUr4KzsyvsvQHDQR/XdcgrnZazXCywXRfTkAwGQxxTEi0XvH37HqO9HkGo8d1hb8De\n",
+       "fohQunPUhjzaPrOsa0xp6BgnKXBdB6SgqrSt6rvr3fXu+t7r/PySr37t6/y7/8G/r5OxwpCyrGhb\n",
+       "sCyb27dvc3J0RF3XhGHIdHbF8dE19veHXF3NOT+/5Pj4kDTNybKM4+NDkjQlTZMOumtYrdaMx2OE\n",
+       "gKKoOphDdUNVpamPbYswtFxfGDb7exPapuHq8lzz5Q2Lusy1VbWQmDshNvCom9+V0j+pZ/l26x2l\n",
+       "EW4MW+ARprQpxrsT4d3B5OaXK7tBz6aYQxeN1g3xUJq20yqlU73LgiKJccOASrXM5jN+8zc/xYde\n",
+       "fB8ffOllQtvXDIfS5WhygCe1mjCtS1bLJXlRIA2LwXDIaDQm37ACmpo337rNZLxH2BuRFQV5uWY0\n",
+       "GnJxNSfwbPIsx5AS23W6DStg6HnUZYVpmMRx1NEpTQxp6hPEu+vd9e76nuvhxRW//p//FzRtw2Aw\n",
+       "YD5fYFk2q1XE008/zfRqSpFr0VASa3hmvV7z4EHE9evXCMOAPC90wPGwz9X0CtM0GI26oe18znA4\n",
+       "ZLVaAdDrhTqYwtq1hVUa6toJOkaaHB9dwzJN3n7rTexegDQsAsuirErEDutll2m325XvKsu/0/ru\n",
+       "LPH/H9eTplMbr4C6rnFdd2tctfneXRHPJlghDMNt162hk84JrOtgDcMAAf1Bn1opvF5IUZXceXCP\n",
+       "//Z//p+4NjnkR59/H55pY/Z9wsGAoDWwy5aHyxmLKsM1LYKgx+TgEMe2eHD/Hg/v32G9mOKakmEv\n",
+       "5MXnX8T1QzBsTMtHGi6XlwvOLy5plcTzA3qDgcb0bIuiKjXtaLUijRN8z8dzXUAfobIs+/6/Ie+u\n",
+       "d9e/gOvn/vInSdIChCYPHB4ecXR0TBAE3L9/X0vx8xzLNEnTlPFoRBStCEKPt2+/jZCCs4cPaNpq\n",
+       "y0oxDMl0ekVdl0hhEEWplri3sFysKcuGqmz1IFUp2gZUC7RAqzHspm4RSPb2DnjmmWdZRzFCSMpa\n",
+       "h5DrP2LbjG6G30/ahvzAduAbiCRN060lrG3bmKZ2sNuA+xsZ/KZQb3jCG1fCDZRimibStDVFLM8x\n",
+       "DAPXdjBNk9ligRv4mI6Naip+/zN/yE9/7OP8+Ic/SrFOqFDce/sutjBwkazjiPHN62BIVldziqrB\n",
+       "dhx6/T7XT46pioI0TXn48Iy6hbpVuF7I5PAYpRryPCWO77FarIjjr3PzxilZEuukes/BthwGwyGD\n",
+       "cEgWxxR5QVkV2LaFNOVW5PTuene9u777mq9WHB2dUBURdd1yfn5BFEW89NJLjMdD2lpx9iBnMh7T\n",
+       "NDUX5xcUZcFheEhVVZyfP6Qoio4GnG1VpQcHB1xdXWKZHrZpkKW6ix8MBpRlQRTF20Lb64XdPE9o\n",
+       "XYcwsC2TtgVawd54gmmavP7aq3i2RhcM+a0xkZsOfHf9wA4xP/cHv7v5GGA70NxIrJ8E8nf+7Xbn\n",
+       "2sArG5+UjUWsUArLtHBsnTdZ1jV5XTFbL/mH/+QfszfZ52c+9JfI6wrDtQlNF0/azFYLpO9S1RWW\n",
+       "0tP2yhKAQVPVqKrEsU2oKyzDxPMDqgamizU1BnlR4TgWgpaqKoijJQYNQugMzUHXhZuWRdgLKbOS\n",
+       "dJ0wmexR1SVSCqpGc7P/yi/+0re8bn/8h/8UIbR5UxiGNG1LUZbcu3cfpRTvec97dB6nbdI2Ja7n\n",
+       "k6YFrhtgWg6rSEuXTcfGtu2tMX5btQihsA1TR0Y1erLu+T5lVZJkGXfv3eXg8BBpW1imZi+4Hbun\n",
+       "aRocx6FVijQvdPQUCtU+fjw0TXu7WW844HVdc/bgnF5/jG07lGWB7wcdH76k3+9rD5BaS909z0Oo\n",
+       "FsOyqeqG86spnq8dDIXQ4hfb0cNhANvTsxbb0MNsz/VIk4SqqiirSouH2lazFix3S1s1TYssyymr\n",
+       "mrrz6MiyfDuMnkw0tbFRCikNirJCtTVSteTpGtmUGKpiMOjhhD5N2wWUCLPLVoSmbknzgjfefJu9\n",
+       "/Qm9Xp9GCHr9HlIIzYmWYBkSRKuNqtxN42LQVApT6vdDCKirAim0W59h2VxOp0RxxvMv6nDpPC+1\n",
+       "iIUWUwqaStP22lZp10jDJE5SXNvkt//Bp/j5T/4coLQqV0kQFnkFcZqCCZ5vU9U1nulRV42+Xzra\n",
+       "n+M4HYMEmk5kpQBpGniet72vy6La+psEYYg0NGc8TVMcQ3J8fML5+SWWY/OzP/1j33JP/NPf/zxZ\n",
+       "ltMPLAzDZDgckeU5bdNFliHohT7rxYLRaIhA4QU+0+mU09NTZrMrHekXRezv7xNFEYPBgKLIGI1G\n",
+       "zGdrRqM9TFOS5+VOfdL5m67rbotu6NlbZo+wjI7rvW3Nmc0uefutN/BdB1M8boO9Wwd3bWSF+AHN\n",
+       "xNzI4jed+GZtjhHQSUuVQEhJSxdEKh4VccuyUI1O7tATY4VRNkjXZlYk9FWDSiriwGAZGvyvf/dT\n",
+       "/PDhM3zsw3+J0jUQnTnWOk1JzRwzdHBdG8fpkSQJ0+mUKiqwrIDxaI/ewX5XQGdajFMWOI7D0fEI\n",
+       "13NYLOasFgmz2RrLsnnPUy8RpxlRFOP410iziLNphOvWOL0xg0mfycE+cRJjWzZlnnJ4sP8dY5X+\n",
+       "/Et/Qdu2BEFAVuS8/vrrvPjiexmPx0wmE9q2ZW9vj/V6Rb83oqoqPNfBcU2SZM2w53Ln3j2eeeY9\n",
+       "LJdLFIoWi2QdYRom67Lq5g/a43y+XPDpT3+amzee4vT0FNew8V3dQdRCkWURm+g7Q3TWnVVOlq21\n",
+       "glAGnUCls8e0LKq6QilJAzS1TjF56b0/hO+2tI1ivV6zjjIuLy5RQnL7zpvcfOoG4/GQvcNr5GVG\n",
+       "WWqBj21ZIEpGfavjCWs+sus4nF+csV6vmV2lRNGawNe0tMPJPpfnF+yP9xkHfQa9AWVZs1qtWMZx\n",
+       "R32rCEMNm1mWxWw21xho0zI5OCAIAtq2YDDsUeQJUbREVrkOaO5OiYdHJ5iWq0VZTctw0KOqCoqi\n",
+       "oGy0rPyN117nS1/6Ep/85L+MYUikyDkY9UGlmKaFnL5BRAAAIABJREFUETxKbEmTHKffI81SFssV\n",
+       "pmmR5yUCSVlqDLfp0o/KsuBP//RP+Bt/49/khm1jGA1S1bih0YUrFCRFge14WswlDRzf05uv6xAM\n",
+       "Ai6WV3iDPmmacbFYIaWBwGA4GnM6PqEoClariCqvqcUa13UZj8dbEsJ8PqcqH6VmDfoBYagNt9Jo\n",
+       "rXnyjqtteh2HJM+IoojFfEkYhgRBQBD6nF+dIwzB5eX5t70nVF1wcrhHmWc4jsPDB/e186FpMBwO\n",
+       "eeONN1Cq4eYzz1BVFd/4xjc4nEx48aWXSNOCppUoTEbjCXfv3WdyMEGaBrPzJes4YTgYM1/OWC4W\n",
+       "3Lhxk34/4M6d+/T7fQaDHovVGgyDMAh5eHHBYDDQzUxZd54rCtM0qKqGyeSYtpXcuXMHz+0cCdsW\n",
+       "29Quj1VZYtuO3vTaFim/d3n+rh24EOIU+HvAAZpD83eUUr8hhBgD/xtwE7gN/DWl1LL7N/8Z8G8D\n",
+       "DfCrSql/9m2eV33uD353Sw3c7Dq7CTpbQ5iOy6069RLykb+3aRh6t1NgSEmZ5wjbRDRgGQa1lGQG\n",
+       "TNdLfvsf/iNOj6/zw+/7AOV8TWnodI3hcIjdGfRo4UhN2qkDVdvi+T5ZWpBl+ZZcb9s2Yehrj42m\n",
+       "3qZoADhOQNtAXbcURc0qSpDSwHZsqroE2RDHa6q6pOc7DPshAsWgF9ILA/IsRUrJx372r37L+/GZ\n",
+       "3/sd7dPRiUDKsmSxWLC3t4+UWm5eVTWGlNRV2QkeGuI04c6dO2RZhuXYfOQjH8W0LGzLZrVa4Tlu\n",
+       "56uuTe7bVnHv3n08z2Nvb0JVVhRFSZqleJ5JGIbkeb5Vsz1pHgZaeKFaE7r3uKhKLaJqWx2eUFQ0\n",
+       "LejkFQuJLkaO42A7PgpBXpb4vs9sPqXp1GxIgRAOZaUVi5P9Paqi7LwwdDNgGiZt0+B5Lo7nURRa\n",
+       "IJQmMUHg47seaZqiGp1tuF5G7O3tg2vs+FMIyrIijrXN7+mp5qcniX4/TaNTVxoK17UxDIE09GtX\n",
+       "Vw1xmiKkxXodYcsWgaLXCxFC0e/3uLy64OLhOR/5yEcpO4+MtlWYBmhWmqAVj0IBmlpfe4apFbrr\n",
+       "daRpqba7FacZpsFqteThwwcURcHLL78XUFsRkJRSc5ERqKazKxBaRUhHs63qFtsS/M5v/wN+4V/9\n",
+       "16iqmuFwTJ4VSGmQZTmW7VFVNW2r6PV6FEW2jTzbnIo3bqGbbnLjJOp0YcOmadG2mtOulKJFIQ0D\n",
+       "39OY9eZEvjG1UgJ+/mc+9i33xP/9+38EQtDznG1DmCR6WNnv91FKMZ1OGY1GTKdTrl+/Tp4k9Hra\n",
+       "cuL09DpJkmoTLiHIcn3Kcl27I1Q4nYrW6uT8gvFY2+hK08AwLYqOgCBqhWHIrT2IYQg83936nTdN\n",
+       "jVItd+/eJV4/1DJ+oZWhnuPQ7sjoq7pGSvN7Sum/V4mvgP9EKfUXQogQ+KIQ4tPALwOfVkr9N0KI\n",
+       "/xT4deDXhRAvAX8deAm4BvyeEOI5pdS3NbV9MoRht3BvCkHZ1JjowQCGvgA3fEmhFG3daiNBBbbn\n",
+       "scoTBk6ASkuWqmQdmvzmb/0Wt9wxP/b+D+PtDak9n6pSxEnCm2++jW1bnJxcYzjsI6XAcTLyXHfX\n",
+       "y0XEaKQVYGmakaYpeZ53A48Btu3gecFWNhytr5CGieNoB7zDwwlRnDKbzWhVg+2aeJ6PVZtkZc7q\n",
+       "7gOuX7tGVlTcvvMqh5N9jO8wWn7ttde20IMQgouLC5bLJZ/85Cfp9Xqd4MHCsS2aRg9FvvKVVxkN\n",
+       "9/jID38E1YkS6qamLBqSJMYyTZIkwvN8XfxNk9t3bvPsM+/pLkILKWE6u+wMqFp838eyLF0Eu3lE\n",
+       "mqb0ej183yeKIt1VpTn/H3NvFmzZdd73/dae9z7zOXfsCY0GCIIASXAWKVKUSIqmqIROHMliUnKV\n",
+       "qxK5ErkU5ykPyYMrT67yS8rlylviSqpSZSu2wyjWQDGS6ISkJJIiQUwkpkajp9t95zPueVh5+Nbe\n",
+       "3SRAKsMDuasaQF9033vOPnut9X3/7z/MFwtzUFsMhyMsSzMe94ljuceD4YiqrrFVICrCLCfNCtKs\n",
+       "wPVcFos5Fy9dpKwKFouFebgLzo5PmE2nuLaD8mDn0iXWqwVFIQERq/WK88UcZSkx6wo8dvZ2DdRQ\n",
+       "YtkWylEEYchkNuXg4AC39qgb2SS3trZwcsgyTc/2qMoYz1X0ticoy6IsBKpaLeYkyUbaYtfBD0I8\n",
+       "z2enN2KxWtHvDVicHWIpuH79OoeH96hrga7+zm/+pnRnfkDTyH1EN9RVCWi09YCloJSkV0W9iOVy\n",
+       "YQQfHlmWkmYJ3//+9wF417veheu6XL58Gd+XTXQ49LAsRVNXD+hptkA4cbwhCCNcz8VybCLbZjjo\n",
+       "STc0X+B4PovFAsf2GAwiRqMJeVZyNp8TxwlJkjAe9+n3JbeyMDmWJycnRFHEcDg0vvg1VVWSJGL5\n",
+       "W5QVoZH/13XNcrmkLAqO1zH9fp/hcEiSZKYKtTpCw49eaV5w6fIl8rUIh/b391mtVgwGA+7fvw/A\n",
+       "e97zbu7ePcDzPI6Pj3nysce4desWtm2zXq65dGmfF1/8AVEUsbe/S78/4PsvfZ/dvV1sz2d9NicK\n",
+       "Q3Z2tkCLQnp7d5eyqjg7P+fS5X3mixWqaugPBixWK6IwwPECTs/O2Nqa0WhNo0WdeuHSJV564QDf\n",
+       "dQDNcNAnS+XQqR7KSdBavQUT/9Hr/xUGrpT6PeC/M79+UWt9pJTaA/5PrfWTpvputNb/2Pz5Pwb+\n",
+       "G631N3/k++iv/emXOhZKK9Bp2094KOC4NpXxQ1iqZVlQN7iOI05iWr6eoVF1A7UmGA04zTb8wR9/\n",
+       "mWKd8oVPfZYyzhhvbXO8WTD0RSnpuu5DasFzXNdjNBpSFAW27eD7Hmke43muwfXE10QpxXK5Mhif\n",
+       "VOWe5xm/YUVRSFBA0ygsYyRVliVllVMUGVVT4zo26+WKOBaRwWQ0oBcFuI7Nv/vv/+Zb7v/f+49/\n",
+       "nb29PVarFdeuXWVnZ4dbt25hWRYf+9hHWa1WsoGu1oSez3K5ZHt7u3MlbCuaKIpEEapMxWiYL2EY\n",
+       "EgQhWVYY+CXqqpkkSdhsYu4e3Obpp58GFK7jMhgMumF0m7bT7w+kakQzHA2wbZvYJIs3NCwXK7TW\n",
+       "jMdTkiQBLKpKEwahVKGeT1U12I6Dsizm8zlFmRP1IlHwlg29XiQ/z7ZJ4gTdNDiO1y30fr8PQF7X\n",
+       "NE3NYjGnrgq0rrCUhW1buI4jSUl+KMpDW9bCaiWfq3iKw3A4MEVD/UBYZrkGN3fAtMJ1XbNYLlks\n",
+       "18wXS7S2Wa7W7EwHDPoRk+mYqipZLOY0Tc3e7g5R1KPIBTeu6wcVuNaygXciN8vt5hZFVbJYLLh3\n",
+       "TzaoCxcuMJ3KDKEs806S3ev1WK/XBEEgqkylqarazJocmkZjWw4NmtVGEoyaRihxf/XNv+RDH/ow\n",
+       "YRhiuz55Xhh82yLPSrGDiHridqgr2ZSLQgoXQw8uy4osy7tZS0sPljxYh6IoSbO8M6CzLAs/CKTr\n",
+       "yTJCL6TSEq9WNzW/+rlfesua+P0/+Tp1U3N1b7fbO1oSRL/fl7CPSJ6VixcvYts26WbNdDpjOBrw\n",
+       "2quvm8Fkwfb2Nnfv3mUwGIrPi+ty895dHn/8Guenc9Ca4XBoAjVSbEdM8xarJVEvIrBt4jjFDwLT\n",
+       "dZRmhiN2Bq4ruZ+245Bu5hwd3idPE8oiQ+nGPJtGidkVtIr3fPBT//8xcKXUVeD9wLeAXa31kflf\n",
+       "R8Cu+e8LwMOb9V2kEn/LVRlfCHm4HigqWxpN25ZTPfBB6aa2Shz9LGXRqAbLnFg+itBzKDyLY53z\n",
+       "v//hH+ItMj798Y9TKNje28OppNJZx0sc5TIejyjLitFoyHQ6Fdz07JzpdCrmO0GIFzjE8YbDw0Oz\n",
+       "0fWIoogo6jPoDymKnMViyfHRKbansGyLXtRnd2+b+XzFcrkmLzJcz6NpGsKwh2XBcrXGdgPCSGFZ\n",
+       "mtPzBXfurpk+ZKzz8PWLv/hJTk9PmExGNE3D/fv3uXTpElVV8MorL7O9vY3rDtnb3SFLMmazGaPR\n",
+       "SEzvoTMUakVUYoGuKcuCXi9is9nw1a9+lV/5lV/BdW2qqmQyHaKUTRgFoODpp5/m+PiYixcvkmUp\n",
+       "Gs1oOMJ1Xfb29tBaglnDMCQtC+7dO8T1bJNN6uE6DluzWbe5OLYZVleKPC9kAMWawWCIqxyUBaPR\n",
+       "gDSV4dZysWA8GnN4/x4XL16kqmpCP+gw2aqqSLOM2IRIBP0RKNi/cImiyCnylKoq0Lphs9mQZjmz\n",
+       "mUO2jlnNT7h69SrjyQRbWXiOI4cIUiB4A4GclGWRZCWnJ8e4rhzyriXDOdt2ODo8ZntHpO/jyYws\n",
+       "WbB/8RJ5moj/dlFw8eIFqkK8Y6IowvcD0ljyLNuw3bJ6KNhb52RZxs7ODi+++CKXLl3ine98wnSB\n",
+       "HnEc43kOdS10OLGlcHBdH9t28FyHqsoRiUQbINCgLGiqmuFAbCLadv/evfvGXbDA17LpHh4eY9s2\n",
+       "g/7owSDb92lMhXxycsLx8SFbWzumM/OZzUwm7GrNcrFGWZrJZNKlrruO1X0WLeRnW4rJeESVF0S+\n",
+       "dHQ/7tJ1wd7OLuv1muFwaJwP3W4Td12XXq+HUorDw0O2trYo65JNsibJEp545+PEccbR0RHzxYLt\n",
+       "nV0ODg4YjUZUVcMjjzzC7dt32d/fx9INt27d4amnnpSiJsvxfJ/Z1pTj0zMaLyDq91mv1531xf2j\n",
+       "Q2azLTZJQhgEeL7HJk7w7JDZbJc3b1zH9wLyRN5/VWbUddlt4Jb1k7ng/482cAOf/K/Af6G1Xj/M\n",
+       "CtFaa/V2ll0P3eO3+2IURV2STovzta2D+ZmymT+Ul9dRbvQDX5OHja5GeKx1xcrSfPs7z3L/4B6/\n",
+       "/sm/wTjsYfshZ+slvTCi5/oEOxGWZYlhTlVS1WJnOugP6fUuYNsuWZaxXq/AknZvMBgYW9OCs7Mz\n",
+       "5ufnHNw9wLYdZrMtLl26TFGnhlNa8PIrL+M4HqPRGNfxqZuaIBiwWa/IixzP89na3qMqc9arJZXn\n",
+       "4vkOh0dHb3fLODi4y9bWFkdHR3znO99hPB5z6dIF4zURcufOHfb29njt1dfoh33+vb/5N5nPF2jd\n",
+       "POQpIYrVLEtwXI8kyYiTNa+9/gpxnPLudz9NnmfmvtodzVMpGA57bDYbrl69wsnJGQBvvvkmo+GY\n",
+       "0Uj8x8GiNd+fr9Z4nlgOxMkGpWAxn7NerXj22e/x27/996XlVhZhNCIIAgbDIZskpmkqDo/uS0Xo\n",
+       "OAz6fUajERcvXmS5mNOLtjk6PCQvCtbrmO3tbcLQZzAYUVaVwThXHB2eotEopbvqVKmG+fmZGa5a\n",
+       "vPDSqziOyyhyeOmlH1BmOR/60IeYTid4nkZZkKYplmWxWW+YjCdsTac4uzs0TU2RZoZWesjJySnv\n",
+       "fe8zFGVDGEbYrktVbIGCqNfj5PiEOE44Pz/Hc1xGoxGbzYbVckk/Egc/lCxc33dFsWcpPFcO0OvX\n",
+       "r3P58mVzeJXk5kBqmob1es1gMOD87JzpbEpViqZifj6nP4iwVEMbJtCumyQVx7/cbPpJkqIeovA6\n",
+       "jpiKrRdLLl26QFU1oC0TwFBg2xZFlaF1w+7ODlcfeYRNnFCWYk+7mAvsNRyNePTaIzR1yXq9Jksz\n",
+       "8ey37A6Sk1Qt2xRRGRdmE0JXoQOHHxcsZqsazwbluUYpKcWKZ34/Gg1Zr1fG6dMlSWJ6/RA/DLn5\n",
+       "5pucLxaMhiMee8djvPHGm+Rlyc7+HkVdEW9S+lbPJP2ssIEnn3wnR0cnFEXBZDbjfH4uthujIapq\n",
+       "mM/n9HoiDspzOag3mwRQbOIEJy+kurcUnuvj+xF5FuMFATQVlm1jGTZdZeYGP+n6azdwpZSLbN7/\n",
+       "s9b698yXj5RSe1rrQ6XUPnDc7jHA5Yf++iXztbdc/8P/+M87rPvDH3wfH/nw+7u8RN/3uxAH3/c7\n",
+       "miBdIIPGN4O3dujZKE1T1FQ9hz//1l/y0jf/io+89/0UnkVZV/hxzmTcpwxc1DqjKDIa3YCCwTAi\n",
+       "zwqqqmATrzpIR2CTPkWZo3XNcrHGdT18L2B3e4c0zZhNFXGcUBY5CWB78nrEGMehrjV1VZgAVB+a\n",
+       "mrIocCwbXWvu3r5DELhEkU+v57FeNfj+2+N9vV6P2tCjLly4wHg85oknnuwqjtPTU55//nlGwzGe\n",
+       "H3Bw/5Ct2YzA98mL7KHqZEVVldy/f58kTZhtbXH58uWuI5LK2O04+O2B3WLleZ4zmYxQSqxO0zQD\n",
+       "DYvFgjDsce/ePUmKCQYEYYDr2uzubrNcLrl86Sr9Xo+PfvQXABgNBwCcz5diOas1US9CKdV1R0Uh\n",
+       "9gPr9Zo0TQl8r6N71U3DZDJhvY7J89zMLgIqE3Y9mcxQyiLLEnLb4d69u/i+h6U8BoMBuzu7PPbY\n",
+       "O4nCkDLdAHB2eoJj29y8eYcoinj66aewcHBdm17YI89z5vMzHNclNB3kYCDvIwwj4/PtoGlYr1b4\n",
+       "niWbS1WxvbvDuBihlCbwPOqmYtDv45qwB01rBwF1WdMYA6s0TY3Aq2F3dxvLku7Jc93OslY3UtD0\n",
+       "ez2UhqauWC2XzKYT4lhcIptGoruaRiCqNoNxMBATLt94eLeww9aW2CT3+hFZKvCA43q4nnFAVIrI\n",
+       "FmOmqqpYrwVGC3wXFfgMBmLAVpUNeZrQ6BrHtnAiec2N1pSlFE+27ZDEawLfoxeFuDY0RYaui25+\n",
+       "86OXS02RrAh7YwmOyNIHnu1FjuPYOI5FksQoJdGBtu9wvjxnsr0lzpKbBavXNuzu7nHr5i3cwCeI\n",
+       "Qvx+wHq9EujUhEyfnp6yvT0lTTPWmzWjwZDlekW8XrO3vdslgwlMmXFyckavJ4XPdDoRMkCS4Dse\n",
+       "Smsm0y1efeW+CeDIgZpnn32e7z73ktkf3/Ztd9dP3MCVrNx/BvxAa/1PHvpf/wb4u8A/Nv/+vYe+\n",
+       "/s+VUv8tAp28A/j2233vv/t3/vYPmbi0G3aSyGCkTaWwjGTV0q2yUv78ZrPB9/3OoMqyLI7qhMXx\n",
+       "hu/926/xiY/8PPuXLuL2I+L5msX9E+IbGf72mMl0ytZkgNYNaZpxcnxM1IuYzWYAnedKmsXUhhM9\n",
+       "Ho+ZTmfUdcPZ2RlxnHTGM/v7F0wayCFlWZPmKZayiXo9+j0fz5MW/+DgHr7vE/gug34fP+jheJ6h\n",
+       "n5Wcn52wXC0Z/RgIRdzlhHq1tbVFkiQ8++yzRFFPKqDdffK8ZG/vArqGl195jatXr+LYiiiKmE6n\n",
+       "JMkGx3HxPJ8rV64ym00oq4ogDLvWWoIh+ty6dYu8SPG9UJ4HS5GmcceEWa1WXL9+g+vXr2NbDmdn\n",
+       "Z1y9eo0PfvCDfOELX0A5EcfHR9iOYOAvvPDn/PzHPkpRVNR1ycnJCePxhJ2dHWY72ywW4kBYlgXn\n",
+       "5+coZbG9vS32rrZLv9/n8P4xaS5MneroiMceewytNVtbW51DnWDYmpOTE7IilhScuqDIM6Ig5D3v\n",
+       "fhoN1FXFfD5nE8fcTVNGkXRlW9t7+J7Ho1cfx7I08/k5/Sji/PwcP3C5cuUKRV2x3mxYL1fkWYbj\n",
+       "OJyfn7O9vcNkMsByxdPC9z2yNCHLUvI8Zbmsqcucu3fv8Mlf+DhlWpAYPvzQQFEtrVY9lOXqejYn\n",
+       "JyekaWrmMzb93qDDsWXgLEKQvb098tZStidmcb0ooq5KNIq6EeYUBivXWrNcLJlOZniuT2KgHMuy\n",
+       "SNPEKJ5d4jgGrZmfn+L5D9z0rPpBF+w4xjq2Kn5kM0FYOlWN7fzwPMu2Q8pKIJWmUR2M43gWdVWj\n",
+       "LHB+DJQQuC51kXMU3yNOxJ99vakIgpA8z1iuzrl69SonJyc0jSaON1zqXTIznSUoi4986MM898Lz\n",
+       "9PohO3tbNHXNa6+/ynQ6Iwo8krX4q1979FHyPOfGGzfY27/AztYWx6cnDPo9PN/nxo3r7OzsEvUi\n",
+       "bt++zWQ8pt8LWa/XbO9sc3p6xnA4xFLgBw55VjKeTMiLgpnbR3k2dZnxkQ+/nw998Bnh5iubf/Y/\n",
+       "/Yu3fe/w19MIPwF8DXiBB1DIf4Vsyv8SuMJbaYT/NUIjrBDI5Stv8331N74qe37bIrT0mYej1Vox\n",
+       "h21LyrVSSihihnJouQ5lXXWGV/Eo4l/90/+eC8MJT77/GRrXwrNsbMdhGPWw4oLNes3N5BxVVExG\n",
+       "YzHK54HrYetC1oqKlFKUldCjkiTB82SQKdRHoUEpFMpSEqTgiaClZUOI3aRwt2UaLxmVtmVTVBLc\n",
+       "oK2GMAy4efsGSmkUDf/Zf/pfvuXz+Kf/5B+itWa9XktwilZkacrJyRlPPf00i/mcOElEnGISuW3L\n",
+       "YjAY0u9FNE3NY9cepWlqVqslj1y5wunZWacAq6qyU7VmWcZoPOzYNkmS0It6+H5IHAstcTKZ0TSa\n",
+       "2WyL+/cO8X2f01MZGo3HYxpts7UtPuJlWRi/iRGNVuhG2uc4TlmuV7iew2KxwHVdrl17lC55Js/R\n",
+       "GtJYMH3fD4lzqbJa++HWegHj+e4HPkVeYFsWcYYRhmyIehFh6BuLVafDSLUZgpd50dHd8jzFUlLF\n",
+       "eq7NdDoh3qzFz7oscQIfBTi2jW05pGnKG9ev8/g73oFS4jGvbGN/rIUP7DqOHICew3q1wLYstiYT\n",
+       "aZfLCm1w77oxLKyH4EHHleqv1xPvcMuy0A1Yyuo24daOwnEsqroiDEMzJNYSgKAVlsmMzMuigynr\n",
+       "upZEoyxDAZ7v8Wf/9s/49Kc/zWK5JIqEZSUME9sEDHsdi8yy7M5WtWlqiThrA5mNJUbdCLbtOR5l\n",
+       "WeB6PkEYmBSm2kBbFpZtGQ60RZ6mHQ++aRo+8Qufesua+ItvfJ08z/DCiDAMKYqCzUagJBmeekRR\n",
+       "yHK5Mh7pPVw/6NKv2r1mMpl0PiS6aQjCkNVqxe7ONqvlGtuyxC/f84iiiMVSuPG6TfZRCt8RGCjL\n",
+       "Mra3t431bUK/L92Z1kLddMxspalL0BWH92+znB/j+zZlniASToVCntH3ffSz/9+GmFrrb/Dj/VJ+\n",
+       "+cf8nX8E/KOf9H2Bjj/c/mqpRP1+n+l0ahI7FL7rdfhtUzfYysJ25ZRPkwTlOkLRsRS/+7u/S5ln\n",
+       "fOqXPkVGLYuiLDk8OeHQOWGgHfb6E565/BRlUXN8dMTrr71BWZbs7Oywu7uL68pJn+c5aSaJ2Mpw\n",
+       "O7e3t8nznNVKhAu9XsRo1O+goMViwfxEKEyB7xMFoQQbbzbMz84oikJcCsdjer1+hzO/+vrL3Fwt\n",
+       "mG1N8Dyb09Pjt71nh0eH9Ho9LNsijVPB4k/P6PeHvPbadYqioN8bUtcNaZrT6zkEYcC9w/uMhkP2\n",
+       "dnf587/8Jm/cuM7v/PbfxzEWnBJN5XbRYO1ibIUhcbxha2sLgB98/wfcuXPAZz7zGVxXPN3v3z+i\n",
+       "3++zt7fH448/QZKYAAssFotz8iKjNkzS7DjD86QrKaqazSam1+uz2ax4/PHHuXnzJi+++CLacIjf\n",
+       "+973Mp1MqWu4fv0N3njjBpeuXSWKInb29gg86QbyLGF7e1uyLBcSbrtYbrD9CN+3ePTRd8hwyHRv\n",
+       "dV2xmC8oy9oc2B6D4Zgg9GXRrlZYaJJ0w8nRIXcP1uzv7zMdj1BKsU4z1uu1VPlZxrA/YDqdEkUh\n",
+       "682aRktlfHp6hucGpFlGLwwAje/1ZEhW5hRlQS+KaGqJywujAXWtaXRFwwPztryQJJ88z6Vbosbz\n",
+       "Q+pSm/eg8X2fMAxZb8z7X4hiV3Jma/K0pKwqqqamQTJlB4M+tvGJn02nMpw06sGmqbAtxdnZKbPZ\n",
+       "jPF4xGq1ZjKWDrEB6qYi8ANT0Mgz0+ga13WEbVVVNLowWH1FlVcPnDmbhloLa8TzPEajEbPZTOAo\n",
+       "28YJI84Xi64zg7du4HlZk5cN2ClKtWpuRV2XrFYieCoK0XWcnZ2xWhaAY2CjAa7tkKxXVLmwZwLf\n",
+       "l+fe1kSBw+L8nDzPGfT7pqBrSOKYi/v7JFnKJo6ZbU2kEFIu08nYeOuv2d3dJUsTgYxM3kEQCDxc\n",
+       "NWIfXabCST8+uiNiKQW252IrG9tygZ9RKX1bgZvf/xDL5OF4NVvJKSkRXwK51E1DXkr+YZJnKNfh\n",
+       "u997lm9/8zt8+vOf4/LePmGhCZWNCjwqG0oLagvqrCBY5eROgGPwvqLIybKURgsvVuvGUAalWmlo\n",
+       "xL/bFSpX69uyXq9pKe6d8Eh5aG0Z6t2mo0+JGqsEszm2uYPrzRqtGvzAw3Zt3rjxGv1Bn3/wO//w\n",
+       "Lfft61/7Ei+//IoR8CzJ0pymEWn3crmiF/UF39SaIAqkoisKojCg0Q1FLrmWyhJq3PZsC9BE4QM5\n",
+       "cFuF+OZBruuayvgbbzYbxuMxH/7wz1GVFWmaMxyOpF3NCsMkCLqqZjwaSnWmhVudFQV1oymKkiQr\n",
+       "ZIAaJ6RZxmopm47I60sa04KXZU3gB4Rhn2vXrnHt2uPcvn+fNE0Yjoaslit8T4Q0tYl3a5qKN998\n",
+       "k/FoxIUrVwTzthRZJlTJsixxXBfHcXFdr8v43MQS89VocSfyfU+k14GP0mKPsFot6UURXtjHsR1h\n",
+       "YdQNumlYrVb0B32qusb1XRzXoawqHFvsGSwLyjw3Vgspru1Q5hlhEIocHkVeNDSNksxE+0GwSYsg\n",
+       "xHEsdMS6QuGg6wcpLnmekucZQeiT5/JeNpsN+/t7ZEmBhYtp3MCS9ZamqSTUex5FmmIpidj7gy//\n",
+       "G37jN36DPBean1gcWx3c2aY5BVGPusaQD4wNqiVDY4FHhe1SGqgzj3OUUeY6tovreSRZ2tE05X0I\n",
+       "rKSDAZZlEYURVVnytz7/q29ZE7//R1/Bcz2qUii0QSBpR21gTPtMO47TeQwtTladfL7f75khv4R6\n",
+       "CLGikBQsxyEpS6mFbYc8z9maTGUfMh1DlhdE/R4aTZlL9F9koLj1es3Ozi5Zlpvq26bXE7582Wiq\n",
+       "KieJl9iq4uT4gH7k4RihotIKtMwZnvm5z/xzeg5nAAAgAElEQVTsSelbxVb90KS1xZRbXnIURaTk\n",
+       "DMM+DtBkFY7nUaHRrgu+DN1ODo+48+ZdvvCZzzEdbeGUDa4fUDYNTVURL2M838MLfAZBn8byqauY\n",
+       "qk7ZJGvCIGQyEwVY1JuwWW/ARG6FYSA8bsdluVpwenyC57qEUcRoMMR17a4q11oThi6O7aAcm15/\n",
+       "ynodM1+cdWq0yXiM64uScL1ZEoQOlu2SZSl37xzy6U9+qhuI/eg1Gs746M99grqueP75F7h58yZV\n",
+       "VfH+97+XW7duk2WZCFI8B7uSzdL1XbBrjo+O5UF0PCxtc//olCSreOSRKzz2rieRxVeB1vSiHmmS\n",
+       "cHhwyLPPPsvHf/7necdjT3Bw94DRsE/P9/GGQ+bzOWm85OjwLmHUJ4p6lE3Beik83k226VzWoihE\n",
+       "o2QDLWoWizXb27vkRUmAwrIDXMdhPwjxfI/ZdAvbkTSlo6Njrl+/znyd8Z3nX2Jrd5/+uIfjemzt\n",
+       "jEnTlLOzM7I84fT0hOVyQZalWL0h6RvXeeaZZ+j1IlxXuPhJklFVclAHQUi/3ycKA7a3dkiThNpw\n",
+       "4xeLOUmcMGcNCpHzb1/EcR3ycoMduFRU1MgB5UUhWV6QxCmbTcxoOBZGT+RTVhX9qI/WDVHQw3VD\n",
+       "As/BsQqUktAQrRvC/gjH8ygyGbI3jRz68UZyI13XQVc+riXh0HW74FH0ez6OJYVPL+jRAElac/36\n",
+       "XS5fuYIb+BRlQZkXWNrCcT2GfoBl2dRVTX8cYikLTQ3KZrFaEscbdrZ38PwA1di4rk/oaWpdkmcp\n",
+       "aZJS1gILVpXwyB1Hmc7aIgxCHNeRwy4ICCyZp8g8S37uMPLFJsPRlFWNa4V4dkijfIEUK3B+TCUq\n",
+       "KfPSjSjLpSgbgp7MEnwDn9qWTZKnWK5AbRPHo65qpvvbrDdrST+yFL0g5O7dA/auPGJSkUJcu4dl\n",
+       "KzP/2TCfnzMaDYmTGGVZ2JkUMVmWs7OzRZZl6FrjujZbW1PqWsKlt7a2yJIUmpq6LCjqlKos6Pc8\n",
+       "bt08oBdEuJZQKptaqNGW9dcX1z/VCrwd2LwdlbBlTFROQ50V9L0Qq9aARaE0KgzIa8lJ/IuvfQPf\n",
+       "cfjA0+/tKsd2EOkYsQbQGejkeQ5WZdgOluFJi6glCOTPOrbbyYM3m9hMg5sOc+28e00YQ0t/LAxe\n",
+       "LgKGmsCQ+lscL8sy6romTVNGoz7L1dywN0KeeeaZDnd/3wff2i6+8NzXOiFOSwlcLBbcuHGD7e1t\n",
+       "Tk5OuH37NnVTgdWIeZCyODo8pijEQEkb06eqqvFcl9nWjPnihDzLeOzao53owrEcelGPvd1dirxk\n",
+       "OBiaMGLxfFktFyhgMpkw3ZqJ4VTd4EcRti0HXPu+0bCJxUgrzwuzgYtJkG279AdDai2dWFEUrFZr\n",
+       "g6PW5HnOeDxl0O/TaE2WZri9Prdv3SZJEk5Pz80MAi5c2GcyGREEPpZtUZY5yeK0CwxpqzDf92mD\n",
+       "ruU1Cce6ruTetJCK40hGqTbCmixLqKrSVFAZaZrg2gKtrVZrVoslO9s7bE1n2LZDkeVYtkVWF1R1\n",
+       "TZ4VOI5PnmYdhdaxhCFjWTK8azDGUmiKPAcappMxYegxHgwIQp8iTwkCH40LloNlKapC7AREEOOQ\n",
+       "FSVxklKU8qzduXfAYDhge3sH33XNM1AZzFZk8jZyj5QFYWjTaMHR43iDbizytMK2RALvejZBIGEk\n",
+       "nrmf7TOudQP6QT6mQDiV+e+Kpm7MIN3DshyUaS9c1zOiOF9mII2wd2S4XvOZz372LWvij/7oywDY\n",
+       "riUpXLaNZTvQ2rXywK5DKYeqrvGcpns2JcvUxmozUtMUhUC8k+kU1/LNYHgX27WM6dyDAWxRiklY\n",
+       "WdVEvojK2j2ovRxjZet7gUDDgBvanJ+dMpuNuXdwl1EvQmmBDS31IOu30T+jiTw/GqXWvuEfdSVM\n",
+       "45TI8+VBSzKCKMIKfKqmpqwrXn/1Ne7cusXnP/s5oRpZCteWyLImz4hTadGHw6EkoXsuXu6Rl6kJ\n",
+       "RhW5uOcJXXExX7DZxAbjFsHOYDAQnNW0k+3wy7Zt4jhmMV+YpCDwggjP9xkOh12ittCr1kRRwHx+\n",
+       "Rq/XZzwe8s1v/SVnZyd88YtfFErcQ14ib3dJ56vklDc+woHv89i1a7I5RREX9vfZxBvyIuPw8FAy\n",
+       "//YvdEZRWSaKuf5QZO/LxZyqLGjqmhs33qQsS55+6mkm0xmb9YbXrt+QAGAlLeF73v0UYPHYO96J\n",
+       "Y1s0Tc3JiWQL7u7t41g2y+VKsE8xO5EDD0Vi0uhPj++yv3cRUJRlzf2DuziBqCjl0JiR5ZmoBJuG\n",
+       "N998k+OjisFwyGQ8psoyzk6O2N7eYXDlElkuPjWB6wjVVGtUU0tgru93ifZJIpYGrW9M23JHUWQ2\n",
+       "eUl4WsxXFGXcDRN9L6TXD7FtV+irdcnpSYmtety+dZtXXnkT17YYj0ccH3+fT37i41A3lE2GZzuE\n",
+       "rkWlNFt728RxyqAXkmUlWDZ5VoDjU1UlVVFwcPdNgsAzMwmPrdkO8XpJozXLxYoLF/Zp6oq6EWZK\n",
+       "GHnkeYrtSJh1rWV+U9caZdkcHt1DWQ5bs21cX0KP43ViKKM+lmUzmUxFRJIXRsXpkOWxWQc+4/EY\n",
+       "hY2tfHQjHjFVXVDXsjFr6J5dz8CSSj1w2HPM4PThsAKFBF5XVU0DXYYpQJJsqKoa3w8leFwJZfLt\n",
+       "LtsEJpe5wB5ZmgrRwPMpiwLXDykMvXQ0HkFZ09TiNqmbxvh8a3xPNt3hYCKK5nBAXWqyYo1SmuVq\n",
+       "QZLGXLp4gaqRsPCmafB9US+XRUyaPBDKdU6fpmjrRUJxtG15P4eH90iTBM+ziYJAIFpL4Xkujm2Z\n",
+       "2cdfH9jwU02lL8uyO7nhQcJzu3nbto2yFdpgWU0NludSATg2t+7c5o//8Ms88653EzkeFy9fksqm\n",
+       "kqQb13UJwhDHtknSlKOjI4qiIAwCev1WTRlSlZWx+NSdzLfFsGSKD57nMRwOO550W1UMBgNTMQjt\n",
+       "arneUJYVi+WCqqq4dOmSeV819+7d67qBr371z7hy+RJf/I2/LbxrM+RoceAP/fzn3nLfXnrua9R1\n",
+       "YxR0wtopikLcCZMUy36QkN2+jyRJuHnzpqlsVxSFiJAaY6HqODYNFUrZzOdzrly5wuH9E87P550/\n",
+       "exhGRnRh4SjF+9//fu4d3OXs9ITxSMRN29vbjCdTWXSWJUyduqZuKhotD7GlLJbLFf3+EDT4nnDl\n",
+       "o16PtBQpflUJPc/3AizbZjgcMBwMaXRDmuS88cYbjCZ7ndWCUjaWshiORigFSRITxysRIDUVtrE5\n",
+       "2DEugsvlkvF4TFVVXeXdmel7Ln7go5RNFA2QrgziTcJ8Picviw6eofHY29ul34+wLEW8WaF1hR84\n",
+       "OBYEgct0OqEoM+q8oMgLslxa/aqBRlsox+PsfEGcJGzimO2dXcZDD9+1CPygY16NjN90VZXE6w11\n",
+       "U+N7PptEBExFkRsYRYOW4dhqHXP79h36wxGD4YjRaGK6jZTKSObrqkIbpkgURWRxu7F7VHXO/fsH\n",
+       "PP6OR42q0UU1NrYtQdy2oySFRmuUbXV88jzPheqYpx1brO10WqVlu9YdW5gsfuBT1Q/sU5taUrTQ\n",
+       "bTp7Rd3UfPqzn3/LmviTr/yJdKOehef5Qo5wHDzfp24kF7fRigaNbhoswwqqyop+r9etodbiWAHx\n",
+       "RqiynudR1ZkpqoQSOhiIU6nn+7ieRxwnZKmswbJIGI1GnZ7F81zOTk86Uy/bFpaN53pgaeLNBmiw\n",
+       "lcZWCs+xyLMUx7E7iMmyrJ+YSv9T28D/6s+/3H2YD1oc1X3oYPxQ6pq8KAh7PfKyoLEVbhBx49ZN\n",
+       "nvvus+xMtxhHfXa3dtiYh6a9YS01MAxDwlDsRNshR5mVhukCUtvKfRDPZ/E8aD2Ny9IICbTqhDSt\n",
+       "D3CeF9RmwOq6rjADGhmGSjp6SdNI+zgY9Dk+Oea5577Hr/3ar7GztUUUhvJ6ylI24lSc895uA//B\n",
+       "c1/vKvS2uhHObdX5M8gDLyKNFjLyfaEv5kVBnuXE8YaXXnoRraE/6KNVw9nZOf3BkPVqw2K5pihL\n",
+       "/CBEa1NBWYrxeEKy2XB0dMTFvV2m4zGDfoTnuWzWGxLzPobDkfBkt2cdHTRNE1OJSeWlkEq91xsI\n",
+       "W8jRBkZoiEJJaS9yGYzOZrPuUDqfz2lUgOt4nM/n+J6P78mQcDKZIOyJGj/wqMqCLE6oDNS2NZsx\n",
+       "GA4oChGs1E0tdEbbRjcNWV3Q+n5bygZsPC9gtdyw2qwJQ8HLfd9HNZ5RwAYUZUaRJbiezSZekCYb\n",
+       "fM82JmjCMCnyAj/ssV7FaGWxSXJWq5hbd+7y2DuewA8C+oMBuoqxVfNAVq4sbNtF64YiL8QHvpS0\n",
+       "eWUrwxUWOK0uxQwsyzJev36Di5cuEUZ9LNvG90N0I8+OpawfMrYqjdDMMxRd0DS65ODgNs+8793G\n",
+       "gsHBswPqmm59QYM2A/6HO1OxuxA6nqb5oXXewprtWm8ajcbYDVu2eIzY0p3b6kG4OQo+9guffsua\n",
+       "+NOv/B8G3pQCTALD5TO1LaHrad0yOdVDXYFLFInXj2d8WBQQJ4kQG/KcuqpBFT90CGnA9wM6f/tG\n",
+       "KLvKsun3fIl6NPc1y5IOqgsClyRORIXp2CjH4eTkCNtS+K5DmYvXe12XYAQ8GjkgP/ixz/3sQSjL\n",
+       "5fKHaGvqoQ+r/XfTNDjKFkN5W6GbGuU4LNYr8jznxRde4De/+B+xM5pS5kVn9iMeJgH9fh/P81gs\n",
+       "FiyXS27cuCGihl6Pi/uXGQ7HeJ7HarXoLDdbf+I0jZnP551XS+u0l2VZJzZqGlFHDgbihZ3nOUcn\n",
+       "R7ieT68X4rhi75kkGY5j88Ybb/Da66/wD37nPyeJYwLf5/T0tPOzKMvSYI7x296zVs7dHjctXt7S\n",
+       "yyylcD0Py7bJsgLXdzuOvK1KPMfFAgLf5fO/8isCT9x8k9PzU2bTGUUpcFIcJ+xfuMSdg7v4vo+v\n",
+       "FOvFmvuHR8II6fWYL5dYtnhRn54cd1DTcDhkd3cXz3V5+eWXmU6n7O7udOrSIi9oas1wOGb/iSdY\n",
+       "rzcisCgSTk9PBUNtNI5lM9vf6w7KOI45ODgQ4Ydq6IUOjz/6PvJcMPMWzjo7OzEsEzFI2t/fx3VF\n",
+       "BFRVFZvNBtCUVSk2Csau1LIsJlvCUZ/NZihlc3j/mLOzU/Ks7A508Z6pSGOp3E/PFuIlE4WAzd7u\n",
+       "BYajPpvlEtAcHR0TDQY4XkStHPqTMacn54YW1/C+9z7N5ctXWK3X9KIeZWVjWdLtrFYrvCCgyMU6\n",
+       "t98fcnBwr1svRZVTVjm6loLBtm2uPfooUa/PdDblytWrxl9GDsLCpLFXRWmqPKurLre2ZzRGeu95\n",
+       "LlVV4LoeR0cnhqIYENcprhvIMDLwRQ0q/yDPM5Ik7tSydVkZOXgpxmGGCtx2da35m28qZSkUbKIo\n",
+       "fLAn0MYuapr67QvN0OTMYsT2rQiqQUzlRIMgeoGiKISqGfjkWcxyIcKaptGsje1zLwxBF1hWjbZr\n",
+       "A1XKM7LZxFy5csVU5wGr9RqwKE3QehLH5iBShskmh3AY+OimJgo98izBN17iN2/eYDYZYyuF5YnF\n",
+       "ReDL7K3RYmT1cFbC210/1SFmy3FtYZQWE3/YpVDV4AYBWVNR0VChOTg44Ev/6l/zNz79y+xtbROY\n",
+       "qbLypIJo/bnbSj4IAlrflHaAKMP91oMcQCrZLMu6g6WlESqDOxfGn3pra6sTNrQHRmvAhW1TViXz\n",
+       "+dwozwQve/3117j22KN87pd/mThujXa8zqymHWq19+EDH33rwOYHz37NYO0/HHjaCira16KwAMsY\n",
+       "yrfvU8RKQegDLUwlYc8YUctiseF8PidJc+7cPWCxWorUeLlkuVoQRT18Yy86nYxQusF3HB65cpnh\n",
+       "YEgUhuRlwdHRCevVmsp4j2RZSr/f6xbsxz/2CZIkFS54XuC5PpbrdNSvdtDbdhZtTmrbXaWFDAXr\n",
+       "qsH3A4qipBf1zGcFjuuYBB9xBuz3+w8SYMxnKD7W8vqGw6Ek8bhSfadpRp5XJEnGdLoNyLBLo1GW\n",
+       "CGc81wyxdavcFZpcYTxukiQhCkPiOEEFHus4ZjE/x3Mc0nTDxT3hlHueawJbTCK5I+2+OP/JM2tb\n",
+       "LmmSinDHVH7L5ZKyysnLFM91sJTFarXk7p07jMZjnnnmfeRlgev6aBSu46Eb81qzHNd1yPPMrBWF\n",
+       "49pQN93vPc8lTlYMRz3TwdXUZYNj+4h7ZCnrRkFjujTbthmPR1RViWPZHd9bWUq8VurKDCWFYFDX\n",
+       "Nb4XYJvN/WHoFDB0y5IgkMSfX/rsW2mE3/rzbwihoBQltza5snXTYLtuJ9F3XHkPlmVh2Vr84oPQ\n",
+       "4NVt8Vfj+y5ogeJae1fLEnhms4lxPXEjbRlV7XBcYFQJRMnzjCAIiDdrijw1XYeF53sEvqRO7V+8\n",
+       "xCsvv8xoOCDPUqLApzH3R+YJdIPYn8lEnnaTLEsxt2k5ku3m3bmwKYv5akE4GlIWBX4U8o2vf50P\n",
+       "f+CDPHHtMRqjdrRdm02Sdk54LXziOE6XsiJOgCGTyQQbqdha7+ooEv+NFmJZrVb4ftCxPfb29lFK\n",
+       "cf/+fV566fvkec6FCxe4fPkyWmvOzk7I84LFeonlyAO4vb3NC88/z7Pf/S6/9fd+i8cevcpmvcb3\n",
+       "A1OxJMYvpZbAY9N+uq77tvesqEoaNL7JA1VKIJ0Gjet7aPPhu65NUzXCLLBt4liEK77hLOdF1g2d\n",
+       "bNvGtV3iOGU8HuF6Po7tcfXRa3iexzf+4uso1VCUKVpXlLUijHzieMPuzhYXdvc4PLzPnbu3qSuN\n",
+       "7djs7e3jBwHvfeYZvvvsX/HEk09y+eIldna2ODs9pyxLcYUrK2xlcX5+TqWt7rkYDgf0+16HSy6X\n",
+       "SxaLBWma0uv1mG5POyimrjRK2RwfH3eqwyAQTP7C/h6PXrsmg+bFgrIU86f1esnpqXQNOzsi3tK6\n",
+       "JtmsyIuCXtCnyFYM+n3KImVn5wL9ngg5Nol8r+P5qRF+iG/I1miM7wvckiQZtu2yWG5I05RkuSJO\n",
+       "E2jEbnbUj7jyyCUcZaGamqYqqcsKypQ0r1AmRMC2bYqsMOtBnoF7B4e89NJLfOADH8CyLcIoIAoj\n",
+       "ozeQIAvPDEAloEHCOOqqRjdis9tu3q7rMplMKKtcrCmCoCsGtFZEkfi+BIHHdDpFaQtLibhEGCUC\n",
+       "16R5IeZl8znn5+cURY7r2NiWje/7xo897Nbjw5CEZVnUTU1Rtpa6DyjFbSeepTm5OSB/9Kp0TVOW\n",
+       "9IOgU0C3ebmVbnBdD1OhdQeDrksUMiy1bRvPF1WkheQLSPVfi+e7I/sUGra3t7Atp2N/1UVOWcsQ\n",
+       "VylFkkhXnRno1HVdoigENK7rkGw2eI7D8dExvX6f4aCPbVl4jiRgtTkAQSDU5bpufnYr8K//2f8G\n",
+       "0H2g8GCS3TJU5CQ17UXokxQZX/njr3Dj1df4W//OF9iezqjriuPTU8JBn77x7GjpS71er/PBbjfF\n",
+       "ltYkWX8DgkC8CirjxZDnRYdxt16+ICnT7WCs3+9hWUJHXK+Xnbw2zzPyuqLWwtt9/vnn2d/b59d/\n",
+       "7T8ApamKUgYpRh6tmwdJRO2h1dqAvu/nPvOW+/bSs/9X1/K3XUZbWbYD2Ma0fIEn4p1Wzi+bFNR1\n",
+       "afBkz+CfFVLrKCQBRHjpvV6PP/mzP+XLf/xHTGZjtnem3L9/nwaHyXiEhaQgDXqhcKbnC2azLSaT\n",
+       "KWHU5+TkhNAP2N7aIopC5otz8jRjOBiwt7vHoD+kKk1Ki+vhBMJckE6i7N6XuFZWZtPJyfKcqN83\n",
+       "c4OKPCuZzbZEvdiIIKOqSoLAZ7Ve0jRQFCV983c28UaqWS3V/HqzAS1r3PPh+PiU/f0LbG/v4jo+\n",
+       "QdijadoJiRY5te/hBcLAEV+XmrrSxnSr5ODufcKoh6UsZrNtcmoxN7MUuinRdcVkNKDMMmgkn7L1\n",
+       "+shpcMznaBmWiu/73L59m9deu86TT76Lra1t0iSVahYtSUR1jaXg9PSU4XDIZDoVdojWZh4ivvny\n",
+       "Wquuw2mfo6Zp6AWhuYcCcxwc3CTq++zuSscZ+hF1pShLoRc2jUjnHc/v1nCLceumpjJd4Q9vQoo8\n",
+       "z6Sb82Xo6Lji+x4EvsGztelGLWpj5tXUmo//4lsx8D/4fdlHqGtU58fiiIAMJKOyedDpaxSOJSEw\n",
+       "Td3geXLw1caSoxMgId5DCtesKXEkBQRnr2sMSmP+fo1liZd8kggtdDDom7lWQmAOss1GtBG2IxAf\n",
+       "WmMrTW2e2dKYsJlpBZZt8/T7PvmzV4G3VxuC2g5BgG4q7DiOJMnbFufzOSfnZ3zzL/6Sz3/mszRF\n",
+       "RRYnOK7LpcuXyXRF0NislivhpVoWdVnRCyW8oKlrqQg8jygIWW4WHNw7YLlcMpvO6PcHDAZ9kuSI\n",
+       "8/Nzw/vNuHr1UYaDCePxlF6vz+npCUVREoY+URQSRQGL5Zy7B3ek+o18kizj+e99j//wi1/k4sVL\n",
+       "rDcrAt/HQtHIdAIzpcA1/iiu45qW0u+YLz96PQwLtTBKO+xq4Ye6rg0+qfCcgKLIu0pdMHK3kz03\n",
+       "WjyblVIox6HIa2azGefnC770pS/x+vVXufrIJTbxivnJMf3QZxFnnJ4d0VQNdVXwvmfeS5YlhFGI\n",
+       "47mcnp/jJymXr1ymLioOj4/JDY1qMpVQ55dfeZU8y/n4xz4u841aU2mRe1u2xag/AA1ZnpGma9Is\n",
+       "Nd2JRRSFxm9DmBRVXfLGG28YfDVgNpsYqCagrxs81+98U55/7jne+c530u+JlL3l8rdyc61THr/2\n",
+       "GFleoJTNdDIW/NNw2tMsJU0TsixGxaqbi7RFSJrkvPbaa2xtj+n3xU6hbhpC28N15cDuDftslith\n",
+       "mrgS5tA0DWWtqYuKkrYjtfD8kCzLuHPrTe4fnfDJT37C5HWGpGmM4zqdSlmjJcSiqam1fDZxmnem\n",
+       "U22V6Pkubi8kTVIcpwdo6lo2zLooO0dH23bEC99zOqZTFIQ0tUKpNnyloG4EXmyj0QTyCHBtB8ex\n",
+       "Os1EC4/IPEjYG8ulZKcanmDHxHrY5M73fWzXI4p6b7smxpMxRVniKgyZwKKqayzbRjWa2gRCgEIr\n",
+       "qexDL5IO39ZYFuhG7Eceik7Hdh3QMm+ybAcrcHBdx3wvcAxVsqwL6roiTlYsl3O0htFoZKjRJfFm\n",
+       "zWw260gV6/WaxWIhiUmeR1XmVGVFU1ckSU0bqm07LqCNA+WPv35qG3jbqrUv+OEPrf1l2zZZI94K\n",
+       "uxf2+Rf/8n/hIx/5CO9+11PovGR+ekpjQXxQ4PYjhniMx+MfoieKpHvUYW51VVPUBaPRiOl0YgyA\n",
+       "StI05eT0BNu2eeSRR5hOxTJys4m5f/+QW7duo7Wm3+8xnU6MTLfh7OwU27G4fPkiq9WK7z7/HNiK\n",
+       "3/qt/0QUjWmC5/umKvZQysJ1PCMeetBtPFyl/DgIpdINjZKYuaKuKI1rm+u6rJN2gGJRVCXKeEy0\n",
+       "ByHQwTPtz5MW1qbRDWWa4nsRJycnfOtbf8Xrr73GZDJmvVkQBR5VXeG4Nr2eDBVd22Zvd5eqrnE8\n",
+       "l74foSybskq4svcoB/fuoWtFU5c89dRTWJbF6fExr92/wXQ8Zm93j1dffZUokirlwuVdPF+SX9br\n",
+       "JSCdVBj5DIYRyrKoTTUXbzImkylNo0WIlbZtsy3inSTh9ddfN94c4h6ZZRkf+MAHOpiuqqqOctl6\n",
+       "1DS1hReFYjSWl6zXS2zbI1/OO5OvremYMAzJq5qyqlgu1lR1RZYJ1XB/f5/ZbGo2ImFDqFLjeULp\n",
+       "TJZz8izBUkM2G+GaW5aLZXko36XvhYR10Vk2+J7LSy++yFPvehdZFuN5AQcHd4zbXo5qNGEYYHse\n",
+       "y/ncxJpJukzQi8TGtSw6TnKSbGQwZw4O12DDWZazM9syXRwEfkCSLlnH50TRlLOzM871OVpbeG5o\n",
+       "OmXpRloPltZ8rmmEMQN0qupWcR0EPSzLxnUVrusxGDiiFjbPZ/u8Nk2D5bhmmLshSd5+I1uuVlgW\n",
+       "2J6P1g1V3RibBIeirHCUZWyjrY5ajHFPbJq6Y8SIoMeideRU2CilaaoSS4nQp8hFiPSw8VutS5TS\n",
+       "xMmGIPDNc1XTNJrpeILvBybgIWSxWFDkuUxULGVsDGwspQiiqHsOq6oylhc2gR/8xH30p1iBy5S2\n",
+       "UdK/tq5+TVHhux51VZKWBUUYEXg+f/H1b5CtYmbXxizOl0yGYy5ffRxtWRR1RZJnVGlKo2y0BctN\n",
+       "SpKck+clnnfWJe64tlQMJ2cLwVsHA6oGev0RYSTm+sp2OLh/iOO4jEcjhuMJcRJTFoKll7rg/PzM\n",
+       "eBvb5FXB8y+9aBLGf5X3vuc9ANRFiefYNHVtNm9F4AvbJIgC8iwjN+KFRgv3tdYNtvNjrDM9vxti\n",
+       "Wh7GFdG0h8ruHsYai6ZqcF3fLMj2ZH9oQGyLRwlo6rI2D3/Jt7/zHZ77/gtMtsVD2g080DWu1jRl\n",
+       "Tb1J2N3ZZm9v3yRwh0xmW/zJn34V2/Xwwx4vvvwKWmsG0ZDtrRm37hxQmir8nY8/ged73Lz5Jrdu\n",
+       "3aLIc8Nfl8rl8uWLXLlyxbADpK3NsoyqFCjBsi2Go4HQrbC6NHbbEajEtm36fck2XK/XfO/b3+aR\n",
+       "R66wY4zIPMuEA9uQJCmB66CrnDjdYNliKBXHMePRGMt2qYzDYd1UFFWDVSrKusQyEWfecIjWkCY+\n",
+       "2SbF0hAvVgBYlsIPAgLXI4szhr0+qe3g2pZAKU2JasTDRNkORVERBQEKTZ4IJbaoS0bDHpOx/Bxl\n",
+       "2ezv7kkwdBabTVhRlDWu54kDoutwenrK3t4O1DVZneMHPl4YkdkO/zdzbx5k2XXf933O3e/bX+/d\n",
+       "07NjAAwwGCwSCZCgKEI0SYnRYomxaFouypFV5SQVJ7FViWOpUpWSaJWrZFGyLVmOo1RsWZYUaxct\n",
+       "0lZICVxAgARB7Nvsa0/v3W+7793tnLgBUB8AACAASURBVPxxzrn9BhtdrnLRl4Wq4fR0v9fv3vu7\n",
+       "v9/3911kKZGhrB5erieIWwH9/h5KKs1vHg+p1SKuXV/j+Ik76LSpsOCy1EyhJBkjy5JCJdWuyMKP\n",
+       "nqdl7L7nUas3UCij0ch0x+wGKPS0XWYlEwM/+V6A7wdkRUm3pXUC9XqEEG99T3TbczrOLh3jOS5h\n",
+       "oLv4fDIhCLVLY6lKFBKhJAIBruasC08iKaCUlDJHCEd7FakSJbVjpnA0k8YRWoWZZRkIgSxLU+w1\n",
+       "DBN6ggJdiKMoJvAC0qwgzxOiKKYscnr7O2RpyuqhZeo1rcrU1EcPqaAwcZCe5+G4wjz03jkT89uq\n",
+       "xCyNSskoFvRSKKiTTVLAoV6r4cYBN2/c4DN/8ic89l0f4O5Td0GpuHnrFllaIFyXRrNBrdHQvGUE\n",
+       "aZYThCHdmdlK1trv97l85SpJMmR+fp6l5QWsrL7fH5BnBZ7nG8MbYXDVkvWNDWoNPb41203iOOa1\n",
+       "11+lVosZjxOUgCefehLP8/i7P/VTRH5AadggnpGRuwbftF1fmqZ4pWZbWIMd+zXgbdWYmfFMtxJz\n",
+       "JfUCz3phpCZfUL+eIApj8kIvmLRoSS9otfWqpMgywiDQVqOuw0uvvMbT33yamdk50jyjVBJPOJR5\n",
+       "iSsE/f6AY0ePsby8zJEjRzl16k6uXb9BuzvDj/6VH+XZF19kY2uLKIrp9Xr43YCd3T2ySULo+YRB\n",
+       "wOUrV5hMtG/G0aNHOX78mKH5NXXE2XjM+vo6X//61+l2O9x9150URcHKyjKTyZjJeEImdc6i5wYE\n",
+       "vo+UMBwO9EQhBKNkzMbmBoPBgHvPnNZ83yQhDAKGI50RmeUZrucQxwc2CEoUSAlxHBvK4YBkot0T\n",
+       "a7UafhBVGapFkVeUUlmWrN1c48iRVea6Mxq+Mx2n67jkeYFwHMbphO3tbYTQN22r2aAoJDUBVfq5\n",
+       "41HkOWWp4bHROOHw4VXQtwj9/Z6mijqu9rxx9H1T5AXC0+k9Yagf9Lu7u8zMdnEnOowjGSZG/aiL\n",
+       "gu9r9a7Gf11qcaS5+EAuS8Ig4MKFCzz8ru9kOBxSrzcMzKCo1eoGx47IjExem1zlDAZ9ze82O5g0\n",
+       "zUxnXhJFATg6kSuMIjxXIFyPuVYLlGA80Vz8IHLZ3d2uOOd6ufnom+6JnZ0tfX78kDxLjTWtvh8E\n",
+       "ms7nux7CtYZzIFyFMBERSpY4jiJwdV5lkecolJ5004wwCphMEqQsK9KFffBZXD0MO9RrMcrX5An7\n",
+       "gPI8H0fAZDJmd3sLz3VYPHyIRj1mYqYSOMj/PbDlvT1i8h3r6Dt+9T/zEUeR7gTNcihNDU/TZNhZ\n",
+       "744L5y5w/9kHaNSbhFHEeDDk2LFj5HnBcDRikqak6aRiJGSpFjRsbGxUvhea/jfLeByjkJw7d67y\n",
+       "xuh2u0RRTJbmFc0wSZJqg47Q3fHW1pYZjXXnoBkpL/G93/thzp49W+GR9oR4nsdoNKrG74ObX99A\n",
+       "1kLAQj6WMfN2xzS90qaweJ5fLUIjwyIQmDgmWVTS5izLjEjCNR2rR+TUKKX2TCnSnCe/9hSdTqda\n",
+       "wtSimGTYAynJy5zTd59mdnaOBx54gCiKGQ5HLMzNc+XadU6euoszd5/m+vXrZEnC6soyk8kEWUpW\n",
+       "V1dpNxqMkxGDXsqhQyuVsvXmzTVwHOpxzOLiovHunvDggw/SbrUQQsMdFy5cZHZ2hkajQSw8XNdh\n",
+       "a2uXV8+fN0VRT1YYpsV4nHDmzH14kU8/0VBSmqQ06g0muabROY6DErpDK4sSqXIcR9MZ7fkLfB3N\n",
+       "JaU0BcLSRl1arRrtVp2bN28Shjoerd/rVfQye46FgcnKMufYsWOMJwmjZESZKBR61+J5AWEYMzGv\n",
+       "IRyBB5TSZEf6Lo7j0mzVCfxIS9BVDkjKstAWrsLF9Xzj/CdxhMelCxfodDvU4jq1WgPPdRmORhpb\n",
+       "VSVFmeIIhzRLcISDMCZZrueCyllZXuDK1cucPHlCd/tegDIWA2UpyYsRpczN9efjumFViKZFdeYC\n",
+       "ZjzSC+Qsy+gneyj09BlHNYJQQzOu4xN4bhWwAgfakDceURRqHcHIRMONU+0+6ek6YF0OhdDnwvU8\n",
+       "8kIn9SChLHIyWYCUGopX1stfw5CjROdx2lzZOI4rG43piUMIRztPep72iHcdgtBHlZL9/RFZlrGw\n",
+       "MFdFxyl1oEC3DZuFjqah5P9iC7hmfhQ4dhnnR9r4JxkT1mJKJZEoJonkq088yUe/93s5tLTMcDAg\n",
+       "n6RMJhonbHdazAchUklu3ryuucNxg6NHFyuq2cbGBltbG5piWAs5dOgQx44dZX9/n1u3bnH+/HmE\n",
+       "0EupkydP4noCR+psvs3NTQ4fPoRwhNn0623+n/37/8Dq6iE+9bM/x87uFplZ4nRaLe3oZkQLQCUw\n",
+       "sic9iqLbaHz2/0sp6XQ6b7u4sKIdO3pZAYrtCizP2Rpe2TR5eyFYxZ5WhkVmMTchDCN+4Rd/kbmF\n",
+       "ecaTCa6rp4d+f59Os8n+zjYnjx2jFtVotTosLi5rYUhWgOtw/31n+cY3v8nd99zDxz/2Mf7vf/n/\n",
+       "UAsDsmxCv9/n1q2Cy5MJvuPy4P0PkOc5e70+Fy9fYWZmhnoU4wiXF194keFoyNmzZ+l2Z3nlpRd5\n",
+       "7bXXmJuf4b3vfa9hLaQMhj0ju8948MH7GY/HdFpt8ixnMByQG/x1MBgQRCE7e7vVgyuSBckkOfA/\n",
+       "VwIXF4mkHscURcloNDLdkaReq1EziTS2IGmztTGDvva4DsOA0ajP/GyXnZ29amlsYYXM2NcKx2Fn\n",
+       "f48w1BYPSaKX8LNzC2SZpnmmk4lRE1qjtIgiTcmnLEmRBVHggfBI84k5t05Fz5NKEMdNZJmzsrLM\n",
+       "yy+/zDe/+Sz9/pC77rqLI0eOaCqt6xqzsE5l8SCM+KUsc5LBkDP33M2f/ulnOXHsqPYZaQW4vo+U\n",
+       "AuGijZ1cu9QsGE/GVYMCVPstu4Bv1Rt6WSl14IP1C0rGCYNBT5vA5SWlLPGCoGps3q6QNWqhlqgL\n",
+       "jSF3ZrpkWcpoMCSOQ1zPxgJisOWCIHTNA7vQy1NHkJeSdDyhKHSAjCxLklGC4+l7ajLRexc7MVu2\n",
+       "C2ilrOM4eEoyNHunVquF6wg9MZqdRBD4DIdDswyNKiol3F7EbQMIBySPtzu+fV4oj/8RDjoqqchz\n",
+       "BC5eGJCVBaUjIPBwXJd/8ulfZX52jtOnTzPTahNHIb5z4HyWpikjE7zQ6WhDqMkkRZaS3AhBXNc1\n",
+       "KrfMnIyxkV/brzuMjIqq3+9XjBhbHLXgQ3/Yr7/+Or7v89BDD3LmzBnTPblMUs1RLvO8KuC287AG\n",
+       "N7aLCMOwojfaRdp01yal5N4H3/+mz+2V575cCX1st15BKGYBpJQWm9gUo2lxlA721c6PUimGyYgo\n",
+       "jnn2+Rd4+utP0+522NvbM+nYGZ7jsLO5wd2nTnHnHSd5+N0PM0xzbly7zrFjxygLXXTyvCAvNd+4\n",
+       "3mqwsbnJa6+/zqUb1zT7x3Xp7ffpNFscWjlE4AeUpTR8dk1hS4ZDfM9jcXGRvMh59dVXiKOI1dVD\n",
+       "zM3Osru7w/7+PlEc0mrPYu0OyqIgNF40nrlRPVd7SO/v7XFla53V1VU9/RQlYzNZ+aYoKAXyDfCV\n",
+       "MinseiFYGEMl7btuoT7Pc8gz3XnmWc7a2hqNepN2u43vB9V5CYIAZZWFjnbWk7LE8RztYNjr02rp\n",
+       "MGiFMLmvwghg9Dkr8kLTNs0kqZ0aJY7nGBqhdgjUKT1aWSlliSoLNjY3ePXVl7n3zH0cPXrciJoK\n",
+       "c+04hrpZGIjAsYQQyjKl3tBJPD/zM/87P/upn2M0HBOGMaUEPf0LfM/HcYuKaeK6XlUwLSXQxhJK\n",
+       "KZEThXCUoRfqh5SUFkpwzXnQfHEpXBxjl6uU4uFHP/Cme+Lxz38Gz/dR8mCB6vte5SMjZYkwKVuu\n",
+       "o3MtM3XQIJVFwWQyppQlvuGpC6E7cwBJSRhG2tDNQJNKYX5PTVH0DW3RjfwKpskz7X+jlCQOQ7rd\n",
+       "tglO0ZTFaWbltC2Gvf+r91eWPPjwf2Iiz3/Ow3VdPMfBEw4qkwRhqEUqYUwuoDcZ8bVnnmZrY4sf\n",
+       "+aEfplarkSVj9vb2GPUHzMzM0O12abdbBKFPqSSDQd9Qj2I6nU6FUe7t7dLva7724uI89XpMkSsm\n",
+       "+z3Wb21Qq0cV9afdbjMajdjc3DTCkRpBoPMOP/vZz/IjP/IjPProe/VirSgMXppXwaWR8diwFDXN\n",
+       "l5WVcMl2cLbo2geRHc1Ho9Hb+oHbFKNpWpYVLdj/7OuOkoTA+KEoKQn8gKzQyTul0rBJrV4ny3PO\n",
+       "nT9PXK/RHwzM5zbGc122NzZ44L77EAruv+8Bdnd2kV7AnXef5rVXXuGhBx5gfX2dWq0GyuPm1k06\n",
+       "nSbzMzMc/Usf5IULr/HsM9/k4sXLmobWaNAfjWjEChBEUc3ALKVmHPgeN9dvsbu7y8LCIkeOHGZ9\n",
+       "fZ2nvv519nZ3ec97HmF1dZWba2t0Ovph02w0CEKPItfqOseB3e1d9s3Dt1GL2d/dIU1TDq2sIIuM\n",
+       "erdNnmVQasy8KHUhDqMaNpZsMpngOA61WkSSjHEcj3SiVXX1RoM8LZiZ6SIl5J7D8vISQmgmQ5ZN\n",
+       "SFOjjEWQS72obzRr1BsNLaQa69SadrvDYDik3Z1BlpAVKZ6r/Xs83yHPs4qWd2ttTecrzi9SrzfI\n",
+       "igypNF/ccQwVF71zKcuc4WjApUsXefe7383KyooWBqWJYVlIXMdcjy6UgOtafxItBivzjDAKadQj\n",
+       "9ra36czO4XshwvEpS8VgMGQ0GpKlB4ZgjuNoeqNwqonTD7RHvuM4+IEPQgu+SmwzEiFLzeqZjHXI\n",
+       "h35fkRHaBPj+W5eqZlMbUvUHY1zPQUhFGGqlaJ7l2pNFKVCSUjhIAaUozINUR5c167WKlWKVkCgN\n",
+       "VZbK+M4Ijanr2mXphMJg6Pr3HBubYGs8VpbagqPZqFcTuYZRvQqSsfe+Ldp2IilN4/N20JE9vm0d\n",
+       "+Fe/+McIqaCUCKWxt3GW49djMiTDLOXX/sX/yXvOvIvl5WXiMMRzHeZnZqvuNk0nFc85CANcP6zE\n",
+       "BVmWaRpaHN/WTY/HY3QQbFB9rZR5RXfKsowoCqufm+c5Vy9fpiwLPvzhD7OwsMBwOKzk2QdCmvL2\n",
+       "LlhoSa+lg9n38EaMy1IG7ULEjp93n33zwub5p//8tgKuT672MK7UZ0WB62lIQJkuV0lFnmV4nmYp\n",
+       "ILRc2/FcnnjyKb7xzDM60Xs8xnEEeZoii4Juu8Xy4iL3n7kPWZQcPXKUvvGc6e3t4zlCR4OZ9+x4\n",
+       "ggsXL7C6uopUikSV2iB/aYlnnnkWFIyTCYdWVrQxWKE9srUHMuR5VvnA1+ox29vb7Gxvs7S4wNLS\n",
+       "EqPRUAuE4ohjx46ytLRcObr1ej18z8VzHZaWltjb26fTaTGcTLSFrBCEQcj8nDbd91ydoen7hrIm\n",
+       "BJmRxetzYiTZpRZw7e/vM5lM6O33GA4HFMYA6q677mJ2do79/R6HV4+glKDZbDFOElzXR7gOwnU1\n",
+       "DbLMyfKMuKYl9rrDz+n3h+zu7dPvD5FFSafTNr4gEcloRBD4JMkIm+MZxTWahmdeb1gLAcdIygsT\n",
+       "SCwYjQbs7Gzz0EMPUpjp0/5+02Zv0w9+Pb6bgmecHP/k332GpcVlTt112gimQDiaJ+37gdYXTGVX\n",
+       "TlM0LeVOR60Zl0GUEZQVCJR5CB10tRZb9oVfdd8g+cBHfvhN98Tjf/b7pGlKJhXNZkMrTpXEM9OI\n",
+       "/d30VFFQlgWeESpZuMgWc2n54wd1CsmBUM4R1lrDNf+Zz916q8uimpDzPCMMfRpT/PXpmjBt6FVx\n",
+       "v123ahxsPXAch9P3f9d/eR04pgvE+NCUShE36qSGV/zqs88xPzvPu77jIZTU0U+3bt1i7cZNwiAw\n",
+       "KeQNut3ZqvgWMq3I/0ppEvxgMGA00vaQMzOzzM7OU5Yl+3t9g0f5uJ5mo+ixEqxp/+c+9zkWFhb4\n",
+       "vu/9CIdXVxkMBlXBtjl+9kI9KMjubU9PezFb6MLKh+3XLVY6/SDV6s83H1ZqP/2gsBFm9iFhT7pS\n",
+       "CjcwnFI76uc5RVpqDFMoKB3+4vG/4MSJk+zu7hor2xGqKIjCgHarxSMPP8zS4hKTUcLzL7zAvQ8+\n",
+       "hOM4tNstrl6+zNzcXMXi0IyJw4yShJMnT7LR79Pvv85jj93L7k6PT3/60ywsLJKmOcvLK+RZrgUf\n",
+       "jgeOwgGaUUyjWWewr31Yao06e/0+61ublGXJiRMn6HZajMZjPvvvP8fK8hK1WlzBJAq4unaTsijZ\n",
+       "2tsxwinF7MyMzoscDDRsJhwKVTBJTHBuEBBOLZEcR7C2dpOXXnqZ4XBAZBzoTp8+zZ2nTjI/P0tu\n",
+       "PFliI2WXqmQ4GBKGgY5Qcx2zFMuNak8QBgHpWGsAdDTeHrfWNlhYWGR2ZoZBf2jS6TU+7Qc6xxLH\n",
+       "JUk1TLV3+XoFU8zPzhCGEQsLC7rL9jyisM5gsE8ca98eWwwC362aEoTA93SAcJ4dXJvC9yuqIAgm\n",
+       "k4xTJ+/kytWr3Ou5ZKqkXquxv9ej1miCKhHK15L/8sDPvig1pBX6AZGvE3ekLPHqPnmWalqf0MAR\n",
+       "QkM+2i3QMQ+XAhdQ0vJe3lpS7rkabpV5iuuUuJ4JJXdd49QIvtkRSOlQli5KaStc0AIehUC4Lq5z\n",
+       "EGFn72vHEyiJdh4sdbdtd1plKfE8HSYCepqTBhpq1utm+i5wnIPkMd2kSYpiUn3mFhK1E3m1y5AH\n",
+       "hIi3O75tBbyUBbLQeK1nsLNJlmn6zjDliS99me/54Ifo9/YojCXpyuKCMTXSktUrV65q/DiKqdVi\n",
+       "HN8sNByHPC8rBoYOI85IkjE3bqyhJJXfr8a4JyB0Yd3e3mJjYwMpSz7+8R/lnnvuAVmSZxnzc7Mm\n",
+       "307SajZJDKvDEQLPWFLmWVp98NOduGWhTHfgVlBi/256jHqrY3prbUcr+/S2HVSWZTiug+PprsN3\n",
+       "D9gvYRjiKl0Uk3TC5/7Df6DT6bK7s0tZ5CjpaM6y4a0fO3qUxaUlBv0BoR9w/ORJNjfXWVhYABSH\n",
+       "jx7h/PnzHD58mFJpUYjEY7y3z2vnL7B46DCD/pBP/dw/4PkXX6TTnSXNcp59/gUajRZxVEcqgesI\n",
+       "0iKvFrTj7V3yLCWIa3Tn5tje3mJ7dxdZFFy9eo2rVwt2d/fodDpkZcHpEye4fPky/UG/Wu62Wi1m\n",
+       "5+bpdtsMBwOuXb1GmqYcWV0ljGM8z8HlIDgkzTJGw6E2IRqNuH79Ojs7OywvL3Hk8LtMEHW98oIf\n",
+       "DYd6JPY8At+j225RKkUUzQIlUeybsGCT2q40Nl0U2iwqGQw4//o5Wu0Od999J6Nhwmg4QgjY2tqg\n",
+       "LPUyVZqRPssy8sIYMwURea7pb5s7u+zu7iPLFzlx/Dgnjh/HdQV7u9ucO/cax48dpdVuEYU+rkMV\n",
+       "YG09qx2zjKsYUVlGFOkCXa/XybKU+flFnnzyKTY2NlhePkQUBMzOdkDppHad26GxY3t96kJuMmzL\n",
+       "AlUUFEXG5s118iIn9DzCKEDL5g39VTgUpfYpB6gFmg0kpTSahTcfjtCTXD2KCM2C3jFQln4HUBYl\n",
+       "RalhE4S20z3o7EX1c7RLZVE9wC3GXRQF9XpoKJiGO++6uK4wNMmJKbT6Z9XrddPcWZGiwjo/uq5v\n",
+       "7tv8NijUptnbGmAbvW91fNsKuCxKXDPSlwqSyZhmu02a5nzl8S/h4rK6tETdc83iMaUodWcqhKhu\n",
+       "KBQUueaaamWeIssK0333kbI0kucGruvRqDfJ85I8T6vkjNzkU/Z6PebmZnj44Yc5fHi1YnEEroOS\n",
+       "esPseR6qLBkYTF0okFIngkgpkWjjdjsaaXeytBqtrNzddq1we0G2I+1bHVauDFRK0+mFh/anjhGu\n",
+       "AEfhCo/SeCvYaSHPc/r9AZMi49r160RxzKg/xPcDlCyZTMZ0Wk2U8YnY2dmhUW+glNCCDNfh4sWL\n",
+       "3Hv6Hh3aurRIYRRtjqdNpY4cPcq169f51V/5Z6xvbgIOy0uH2N3bo9XuMD87z/rGJqfuOIVrOi7h\n",
+       "OBRSanxe2+4xSSdsXbvGcNBnkqXMzc4R12pEAczMdukPBly+coX+cMQ41dt+rQwN6bSHvPDa6zRq\n",
+       "PgsLCywvLVPkBbuDHju9PRbm5omMh0c2SYnjiPn5efb29qqH4qlTp8yCyiFJEiaTMe1WC6QkHY9R\n",
+       "RYkf+Gxvb2tec6izL/f29+h0ukilF3a+Z+TdjrbfLcqSSxfOcfjwKt2ZWfb3+pRlThwFTFLN397d\n",
+       "3UV4Ltdv3EBKRac7w+bWDgqIa03a7Q47W+sIVVYuj6+9fo6r165Ri2PuvedufuAHfwhZ5ly7fpO7\n",
+       "7zxJbhgr1q9k2kfHWtQGQcAXvvAX1GpdwtDXDZEL6SRjPJqwsb6G42iKXr3WQEoFjovjOtU1af1i\n",
+       "tIeJLppBqKGfWjOikDmesGZsOZ7bJk0zlFTEcQNZlgwHA5qtCPPc4+2Q3jiyOZ4Sx3hoG3NRMxFY\n",
+       "GwHt0KlQqFIile6+i6kJ2H4GjuOA0hGLOqwiYjLRtafRaGhPpNJ6nGs4RUpJWUi63TaOq+EyKQ8g\n",
+       "En2vgpQZ3pRRlX1Ne9jzouPmnG/JQvm2FfBaXCMvJYVSSAVxo844Tdnd2eOVl1/mse9+DEfCzvYW\n",
+       "QRjSbDaN3FkZwxhN14rCmHq9QbcbkJZaGDQea4bH0aPHCAKPW7fWuXHjBukkI44bpsPVT71z586x\n",
+       "v7/Pmfvu4f3vfx8nTpwAVOVgGEY6H3JoQpbtU9MKcsCpiq4dgcqyeMvxCKhohMBti5/pTfTbc16j\n",
+       "ahQDpgQOB9a4vu/j+m4VeeUgKJVkf3+fer2pxz8l9ZShFFIpjWMiSXNtJl+La3zndz7ImXvOsL29\n",
+       "w8VLFzm8epRSlsRxyNmzZ3j15VdYWTmE7/tcvHiRE3ecZDIeM7ewyJe+8gS/+Vu/Ras5S6PZ0osh\n",
+       "pfjAY38JqwLd2drm6aefZraroY1ap2Xee6BtNR2HmivIshSpFLks6A/6NFtNkmSAVIpH3vMejp84\n",
+       "SRhF/NIv/zKO67G9u4vn++wP+oySBFTOxWvXadTruI5Lo1ZjptNlkhbMz8/TbDQogP1hws2b62YK\n",
+       "2+bYsWNsb28z0+3qJaLvkoxGnD93TusDanVqRj7u+wG7+3ta7NKsm65sXE09OrRAsyxq9YitLU07\n",
+       "PXxohSSZMDenMzSfeeYZQKsoz5w5Q3umy4+srKAcVwcxS4Xnh9xc28D1fFQxoRZ6DIdDbq3d4tKl\n",
+       "i2xubRMGPtevXeW55+a5//77WFpc4Oq1axw9vAIcsKLszmU4HJIkCRsbG1y6dImPfOT7cZ0GeZEa\n",
+       "PrPLytIyL7/8EotLp2nU6iYfNSaMIjAQXlGUZprUIRPatvjAu0cIEBICJwAhUdKaj5U0azUc4ZKO\n",
+       "M1whWJybZ1KMqvvh7bpRawdgGyKLtReFhoCE4+IIUMrClRJVAI7+XlVqurK9r+w9ZDvvuF5HypLZ\n",
+       "2ZkKjrWvaVkv+r52CX1fC7eyDKkKXN8FITU2X0ryvGQ8ThmXE8LwIFJSO0n6twXb2ObsW7kRftsK\n",
+       "+GQ0ogCE54HnkSQj0qzghReeZ3ZmhpXlRYo8p1Zr4DiCoTH+j+PY5FV6ZuOr6A97OEKQloJmq4lf\n",
+       "arvHnZ1dTf8D4lpMGIWAYG9viysXL9JsNjh16hgPPvCA/j7fI5uMtVOY5+IailOaTypmiOV/WjaC\n",
+       "VJr9oFVdumstS6fqlKcx+Wl+rL1Ypsn7053AWx2FKfjSLGeEo5VkruvhefpmHPQT3Q15LqqUCGOY\n",
+       "1Wq1yc2CyQ18zp87Tz2IGCYj3bM4gnQyZqbTolmvc3hlleFwqEMa2m2GozFSKbY2t5kkE5ZXltnZ\n",
+       "3WH18GHuuvdu1je3EI7Hv/w3v8O58xeI6x28uE13fonZmVnCKCBLU+JaDFJx9Mgx7rrzTjOCSwZp\n",
+       "j73dHdY3NkmzlFa7RS2uEcYB22vXGAz2EUjas3W+/7EPs7i4iG8ZP7LkPd/5EE88+RRz7SbD0ZDR\n",
+       "/p42vCpLxr0B/+RffZrrV6+SjEb8+Rf+nCdfeBElBLVGg6XlJWqNOh0vYOXQKuzucvnaNY4cPYrj\n",
+       "uly7tUZRpMRByOLKovbKzlLW+lvkaYErBK1Gk4X5eTzHYTxK6N3aZtTXXuROPabV7SCUIs9ydjZ3\n",
+       "eNd3vItkpOXyw+EeZSnptlusb27ywcfeR3dmhmScQp6T5WN8YVhbjuDE6hI4DkJIHCRZ1ubo4UM8\n",
+       "+t6HAcX6+jq9/T1u3Vrjq089xfqtWxw/fpQ77zjJ/fefZWlpkd3dbU1ndAVe4LF5dRPHdfi+j36f\n",
+       "gQKHWuxCQZ5BFGsVa6fd0YEbfkBc0wlSjqeFVZ7nIqVZgmIKtrDBxMY+1qiu7UMNBK7naRk5JW6o\n",
+       "cfpMZlVEIOrtEjGpdguFYX84BkKlFCB0xy0NtRKM2tHT0nqE7tcdBcLxdRgy4Lh6iV2WIEzA8P5+\n",
+       "D6V0DGGaptUD0DJOwlArsEfJCNd19ERZ6GI8Hqem0RKGA041pVuoZHoCmG7g3g5Otce3rYDPzsyS\n",
+       "Fjml45CXCkRBuzPDc88+x4c/9CFQJqF8NKTb7RqurKbS6TQT3UnEBs+M4xhSyauvvkIQ+LpjDzQm\n",
+       "NknHSKXhgSeeeIIwDPn4xz7G6uqh6mmXpWOUNEn047L6QJVSxsjGeJbIqdDlLH1Tzt8b6UF28Whx\n",
+       "8OkN8/Qychrzejs3wrI4EP/YnxUG+mejtE94FIYgMIIcD0cqkPoi1kyIgmatzksvvkS33Sb0fITv\n",
+       "0u/1aLVbDPp9Dr/rXcY3PMF1M+1v7ugg2IX5BQaDARcuXeDs2bOcv3SB1cNHyKXiZ37673PX3fcS\n",
+       "NdrcdedphFej1WrRH+wTexGR6+MIBy/0QEBeKEqz5Y+jCG9hgSPHjvHSy6/QHwzY2NqiWYtY37zF\n",
+       "X/3R/5rTd50iyyYs1ef0BJQkun+P9wAAIABJREFUBIGnHSG3t/EFyDwlS7QQJ/B9lOtx+eo1Nm+u\n",
+       "0Qxj5lsdXnvpZXAcGp0W/eGQ/nDEVm+fr507j+O4tLtd7n/wIc5dvMja+i1muh0W5udpzXYRnsu1\n",
+       "m9dxRjmNWp3IcVlYWqLX6zORKfuDPl/+8hPUmg16g76G4fZ7pOMxjUaTRx99H1EUMb8wz2Aw0OKy\n",
+       "qKuvmSJjbq7NzvY6WZqwsnKY/d1dZucW9VI+TUmHY4IoRHguSTLC+rzb68nzPGZnu3S7bY6fOE6t\n",
+       "9iEuXLjAFx//C77y1af4s8/rLNYf+7G/ius4FHnG008/TRRFvOeRR0DBaJToYAMOpkMpFd/93e8n\n",
+       "GSfmvtOWExbnthzuMPRus4W4neMstNLTsrCm9kSOf1CibWlXpV5q2iL9lofQalbfBJ/b+6s0y0NV\n",
+       "vbbBlYWGF5VSVZKiQIDSwd0C/ZpKoaccdcCr1wKuSdWIWefFdrtz237L1gPfO7j3LdXQ3vP2vRZF\n",
+       "YXIB3Cp9apoS/K1w8G8bjfCpL/wBSZYS1WokaUbcaPK7v/d77O/t8z2PfZDID81CJTAp22W1CLTj\n",
+       "kv37fl9HrHleWHmfKKXpX2u31hiNBiglOXr0KHefvpsTx4+TjccV9Uln/jmVsAYOCrXnaZP8sjhw\n",
+       "TQSjrjQmUXZxZlNxLN48/WeLZVloxb7GtBPjgSeCw+n73/emz+2lbzwOHFjv2uXWNKtFy7D1Re9o\n",
+       "TzVAUALSEUgB/eGI3/yN36BmEkmcUGPljpKcvvOUNp06dYfuIkrN0W+021q0oczF57rcuLlGWpSc\n",
+       "v3iZP/6Tf0e92aUzM8/S8iphVAPXULWkxHEEge9rvw0EspQ6SsrR00IuMzS9rGCSTQyXesT+7jat\n",
+       "Rp2P/5WPMRr2aNTrjHd7+MYFbzAY4Idalv9L//gfs7G5UQmZBsMh6UTjvQ/e/wBHDq2CUnzjmWd0\n",
+       "RqLjIny9PO/OzrK6OMurr7xOXhQ0mm2UgJWVFWZnZ80YXjLs7WsvnahFvVajkJLN3V0kilE2IclS\n",
+       "unNz4DlEtRqD4ZC4LNm4ucbi4iJhGLGzvc2DDz6kl3tKC4dsOtCNWzcZjxLa7Rnm5hbw3QDfC7Sp\n",
+       "luNo6aNQlEp/plakYmGR6evKFghbJPf29rh65TK93j43blzh2NEjzM50mZud444TJygM7c9zPRNH\n",
+       "pqrrKs8LhHDY29tjZmYGpXRD0Wq3qyAFq3uY1iXAgQWE/nkHTYuFCHTBfvPU6YoDVgjAw+//oTf9\n",
+       "m6e/8hlzXx0oNQ9gSM2ymZ5oNa48Mf/O/nsLrxgxkbEKEI6D46iq3kyzQuxkPQ2pWkjqNjvsKWjH\n",
+       "ft807GN/nqZbFtXezBIPhBDvGGr8jh24EOIw8BuAph3Av1BK/RMhxP8B/CSwZf7pTyulPme+5+8D\n",
+       "P4HWBvyPSqk/e6ufPZpMcAOfvJQooRdjzz33PB/+0IfxPJ3Con8Z7VPieT6TyYTd3X1GIw2n2Kg0\n",
+       "vbyrMTRbfCkLLl++zI0bNzhy5DB//cd+jFpdjz6NRoM0TWm3tTLKFlhbEO0FYP+cpinSdL6BkfZa\n",
+       "/xLH052w5ZcD1c+zJzRN0+pk2pNjBQ52uTl9gpVSVRf/FuejOuFWMGCl8faJrpTezkgUnhdUr5ul\n",
+       "GY7vUTqCJ5960vh9KKI4okCSFgWh77G/v8+HPvhBBEoveNAMm+3dPa5eu0ozDrnzzrtBOBw7cQef\n",
+       "+dPP8YXHv4zjN1g9fArHj5lbOMLNtTVm5pv0+j0WFxYZDQaEcZM0L5BFaRRpZmmTgxe2yfOU8aRP\n",
+       "u9nl8tbrLMx1ufjqKxxdvp8v//nj3H/ffSwcWiBeWGY4GvLiSy9RypLHv/hFXOM7kxkbAYSgM9Nl\n",
+       "Z2ufdqvFjRtrfPhDH8FV8MHv+RBxvcZnP/dZnnn2WdLxhGGvxxdefp73PvpdXLx4kdfOvU632yVJ\n",
+       "M86dv0AY+LiOw/z8LFeuP8fRB84gxnvMz85Q1D0aUczJmRPcvHKN5e4cWzdvIYY5tTRnN+0xuzhH\n",
+       "e6ZNkeXML8xxa+MmK8uHmIwn5sGrlbqLiwt85Ymv4jguy8vLRIEWjAiJnqCQJprMIfA8lHGjlEWJ\n",
+       "g6jOt8a2R5Veod8f0Om0ie+6i6LMuevUCZ568gk6zSZHjxzR+5xS0mm32d7dJa4f8Jf1tewg5cHe\n",
+       "xvM8EwqiO02llAnfOKASvtXxxq9ZLvZbHdOQwtt1otOduaXlVveSUU1OT8j63tb4+MFbsQ9Bk2fp\n",
+       "aGKCUiW+H97Gn7f3vxA6Um160rD4uRXv2TpgbTTs71BRFE3N0fm6siroWZZV/32r41tBKDnwd5RS\n",
+       "zwkhGsAzQoj/D13MP62U+vQbPsx7gI8D9wCHgM8LIe5Ub3GGarUGwnPpDYd05+Z48qmvcfbsWRSK\n",
+       "tVtrKKlo1RvVeJHnOcl4TFkU+H5Q4U+9fl8vNUcj0mREHEcEYcjdd57ixz7xcVqtlpbKliW1MEIV\n",
+       "Ob4jGA6HOI4WwdhFwjSNx8IkruviRjq1fBq+UEqRl0WVoDFtVGUFEdMnzXb4Qogq83GaA24vViml\n",
+       "LkBvcdj3Nt2x2++b9osQjjZpUoU0/HNtdKUcgR94XL16FVdoxWEYRWR5SpZnHF09xLC3jxA6w1BK\n",
+       "hTL4YqvV4uzZs4hSu61dv3mdJ576Ol/6ytdozyxxz5nTdOcWCII6e70xi4vHGGZ9ao0OgyQlipsM\n",
+       "kpzA83A8H4SL8hSF0ruDbChxXB/PjcjzgsX5BbY3b5COR1Dm7G3t87u//f+yt7PLsTuOMBqNiOOY\n",
+       "erPBoUOHWF5Z4Ttclz/5d58hScYoRy+wGs0mk0nKa5dfY21tjXd9x3cSeD5CwXsefoRLly4zmUyY\n",
+       "abRwjxzhheef576zZ8mygt29PVxXj8ajwQhZlvR6fU7ffTfD3T5ZlvP6Cy8ji5KluQVuXr9Ou9Hg\n",
+       "jhMncB2hI9OU4MjJ47ieDr/tJ0PyrKDMNX20UW/iOm7V0a1vbXLHHSe4ce0GtVqN1ZVVilLSbneZ\n",
+       "pBPjER1QlCXpeIJAJyMJoaetwrChHEdQCyNcNI7aWFpkd2+PWq1Gkgz1fiGMuOvOOxFSGQVjxLA3\n",
+       "oNNsk5g80+mRPo4j6vVaBR2kqY5ia7U7hpaoF/iWJ31QJFX1n5QHux6lVMUHn45TqI6pe8MW4Tce\n",
+       "B3DNgVBumopncyWn7xvfD6de4mAa1iJAOx1ouwJbfKc99ZvNZrUHs122fR/Tk8Ub8expDcc0lFqv\n",
+       "16vvsQ+Haej1nY53LOBKqXVg3fx5KIR4FV2Y3+YT54eA31ZK5cAVIcQF4N3AU2/6l44gzTLtQhYE\n",
+       "XLlyhTP33ac9HoSDchSJUUbap5jneRRlya31dS5dukRZlszNzbG6uso999zD6vIcYWi8iF0PpSS7\n",
+       "uzsEgY/jaMBLSlVl/tmCbOk6tjDbDtl2G0h9ku2Ha3HvMitvK6q2IE+PTXYJYQv3dNGdxuYqXP0d\n",
+       "yPt2VJse2+zJtj8nTVNKWSIcoQsD2u1NyhLh+VWknBdG2pkty3TobDLBDwLe+973Vuo6S+8rpaRU\n",
+       "2n88zXPiRoMbtzb46lPPsLB8lMNHT1FvzeD6TdJCEdU7bO/1CJuefs0so8RDKfDDBkWWUyIosxzP\n",
+       "c0C5eF6ALFPiuIYsBxT5BKEK/u7//D9QpindVgcXLfqJ2lFl4FXIkv39PuubG1y+epUkGZMXJaMk\n",
+       "odvt4rgBSuR0OjP8xm/8Jo++532UecYLL7zAAw88wI/91U/w6muv8fQ3nsZFMNNu8cKzz3LPPffy\n",
+       "Wpox3NvTC2ypF17rN27x/d/7UYrhhM8/9QXe/fDD/Jvf+S3e/ZN/k1FvwIuvvsTnn/gyJ+44wZHj\n",
+       "R0mzlOjiy6gsx3McZmZnKTJdFC5dvMwHPvABGvWY8XjCaJAAwpQ6Ra+/z8L8vOnoMqA0PjtauRqF\n",
+       "msY6Ho+rCbEsclqtFkVZWoiXLNOe641Ggyyb4LsOIgyYMSZWsiyJw4h0kuK7HulkghuYhd7Ugi1N\n",
+       "U2o1LYaq13Vajy3AnucY9gkcFGzMfWcLubityCHk21AED4ydpov9Wx22QOpCp6eEigGG9sVRhuoL\n",
+       "tqu3ego51f0r815tso/E88A34RW2+57WYli4yMJftqOe3mlN1xdbKyxN0Hbi0/DqdN15p8nDHv/R\n",
+       "S0whxDHgQXQxfhT420KITwLfAH5KKbUPrHB7sb7BQcG/7UiSBDfwCQIfTzjs7uygSkmZ5xQypdFo\n",
+       "kmc56STFZvcNBtrzeWZmhr/8Qz/A7OwsMzMzB1hfrkUKjqPpZ57v0mxov400K26DMRBuZbFqL34r\n",
+       "R69y6UxhlYV++lubUWvLOl10LddzWlJsT4ot/PakTOP4tlufHj3f7mK1i5Lpp7odtWznAOB4LoEf\n",
+       "oEqFzAtjD+qB45CO9MTh+T7JZILveQyTIc1mg/3dXc69/jqB57O0uEAUhZX/g5LaHEx5PrkU/MEf\n",
+       "fYaTd54hbnSJGl0KfMgVSng4UtCamSMvhxSFJIzqFKWk0WgbLru+8OJGg3wyQSFRqkAp3ZmOkx5X\n",
+       "Lp3jv/nxv0bkCXpZyu72FoPekLKQuDWfWq1OFNfodrvMzs6xuLzCI+99lJdfepW4UWdzc4ter0dc\n",
+       "q+ulXC1mf2ePT/2Df8DxoyeII5+5uTl++7d/m0996lOURcHnPv9ZZFky2+5w4+oV3vfIwzz77LP0\n",
+       "en3N9HFdDq+ssLF2iy9+7Sn29vaQLz7L9/3lH+T3/ugP+fFPfpJbN68z7g342z/+k9x35gwoxebO\n",
+       "JlIpWo0mnU6XZKhDb1949lm+9tQ3OHHsBPPzC2xt7rE72OHW5hqqlNRi7TlvvTg810M46AR2xyXL\n",
+       "JjhK+3orqUCV2mM8T3E9jyzN8DzNXhkNh9qmWenotX5vn7vuvIsiz4nr2lytFtc0pOM6SKPOLKHq\n",
+       "Dq05V+hrNWmj0QDzgBZCm0WhNCuLKfzcFtOqo0fhCN01K/M/fX8cLOmllFXz9k7LvIMdVGDuuQOh\n",
+       "nDfVEHmeQ57rZaKwcLJwDSPG3juKNNXmdFaVaRWxRaFoNDoVGqCVncWUwhrStLjNgXT6Pp7u4Kfh\n",
+       "FXtYCMUWc2t+Zwkbb3f8RxVwA5/8HvA/mU7814CfNV/+OeAXgb/5Nt/+ltWo1WoxyTOKPOOf/cqv\n",
+       "ko7HvPD8czQaOqk5iiIWFhZZWVk28lVdGBuNpr5wjBQ5y3RBUEoiixQc8IW+0BHadc9xXVx0BysN\n",
+       "XiwcXQhtGn1R6IAFGwxsi2SWZdTjWtWB2MI9XTCnL1S9TDUBAeYBYDsJmw9ov9c+AOyT+Lbu5C2O\n",
+       "adXlG8fCaQimkCWTNNWYqKNv4AL9ILHdQmm41qV5+ES1OrKUfPKTn2R7c4syz0jHE/JSsr27w9z8\n",
+       "ArV6A9eL+dmf+4d4QYNme46w0UG4IY4b4Xia8yyVInBdCimo1xqaSaAzJLAuidKEK2NUajJPiGOf\n",
+       "ZDhke3udn/iJH8d3JJ4rWFxawBMe4yQl8AKk6+huMQxwXY8SRZoWIFI63VnysmRuboFr126ws7uP\n",
+       "lJJWo8kwSbh2/QYf/f4f5Nb1a/zCL/wjbt68wc///M/zXd/1XexubXHi+HGuXr3G/Nws1y5f5H2P\n",
+       "PMzlK5e5eOEScRCihMRFct8DZ/nSl7/M1evXaDR0dN7+zg73nz7D9fOX+cVP/UPO3ncfn/j4j9Jc\n",
+       "mCMvS2QJG+ubOMIhCkIefvi93Hv6LPV6natXr7L84ApZOeZf/9a/5sjqKs1mk52dbWZnZ1CqrOCc\n",
+       "stTOeXEQ6jzFoqyMkuz1l0307iU3/75ei3E9h8LVCt1Go04UBiSjIb7rE3i+DgH2fIIwoJxaKk6z\n",
+       "K9JM2wBs72zhuMLoGmJGoyFK+UyHA1tzqIqBInQhtfTf2/FjwRv3dNPLw7draqZhk4N/Y6TwxUHH\n",
+       "e3CfanGR49j7QcOZjotJEvLMJCGqYm3j4qTUlh6TiaYV287cwhxZdsA6szsq+/7s7ymEqCiI04tN\n",
+       "oGKtTf/OrVbrLX/v6jN6x6/qH+4Dvw/8plLqjwCUUptTX/914DPm/94EDk99+6r5uzcd//hX/i/9\n",
+       "AQQBDz10lr/xNz5pGBVela0HoAyjxHW9qjPGpGOEvl+NaLoQHkAJpVSMh0NGo4Q4jqoP03VdSgQ2\n",
+       "C9CehHq9jnWis8wOS1NMhqM30X/KskS4TrVEtEZY7XbbjKoZg8GgwtWtP7cdiyxulmVZFYxrVaG2\n",
+       "03/jUa/XK6c8+8S2T3x7EYVhiO8E5Ko0RmEgXJfAdemNhuyb5bDFXO1+IK7V8FxR4Xq+oxPrvcDh\n",
+       "yJEjZHnBq6++xvOvXmY0Ljh2x2laMwtIEeCGdXq9Ac0wZjDYZ25uhltrN7nj1FF2d/aIwogyLwk8\n",
+       "46EsS1zhocoUzxV4voMfeGxu3GR3+xYf+5EfYLYzS7+3TZaVuKFevkrhgOfrghVq//jxOMUxdp5C\n",
+       "uHS7szz+xS+Za9PT7JfAw/dDmq0WFy9f4p//2q9y37330uy0edehFV5++WW9hItjFIq8mHDs2GGu\n",
+       "XLnMyy9+k/sfeIBWs8alS1fY3tnh0qVzHDl0nNOLh3jl1Ve5/NLLfOz7/iv++A//gO/+7u/m7gfu\n",
+       "49z5c3zx2a/zxEvf5H/7qf+Fe+8+rQvsKMH1fXzXIxklRJHOTDx8+LBmcTRmWbu5xsrSMufOnePo\n",
+       "4SMkSULDuF0KoV0cFYJSltTqUbUIK6UevZPBkHq9jos7BUNo9XPoB+SFvr4DP2DiuvTMZOt5upAz\n",
+       "TlDIapq095QfeNSiBqPRiPn5efb391lcXOLy5UscO3asSnsqy6IqhBa6sF2q7Y5tAdNNiUBKUT0k\n",
+       "bDMjDH49vZh8i/p0m6r54LW0pF4IpyqmtpA6wr1t8nY9XUCtuVgchwcmdOKAzWOnbB0YMr6No20b\n",
+       "KNs5Txfi6UJu64olPdgmzr7GuXPnWFvf4vVzl257z293vCONUOhHw78CdpRSf2fq75eVUrfMn/8O\n",
+       "8C6l1F8zS8zfQuPeh4DPA3eoN7yIEEJ9+Qu/r4uZ0PhQEIaVFFnvHjSGK4vSFJoJnuualO0D8rvr\n",
+       "6gsmyzL8wLqF6cQUu1S0F4stoHYs6/f71RPWPm0tp9YWc4AyL27DwCzMkuZZ5Sxmcfo0TYmiqCrC\n",
+       "dhRMkqRSYE5j19N4mH1fSZLw0CMfetP5eOkbj7/pwpheeNivTdIUPG3VSymRSqEcl0JJzl++xJ9+\n",
+       "7nPUoxiZa+HBeDJhaWGBTrPBT/7E32AySlBlQV6UKKETunVYbMgv//PfYn17l/vu/w5y6eFHDbJC\n",
+       "4UcxaTqhFukgh26nw3DQBwQOOlA2NCpLz3WQMiVNE8LQAyTnX/4m29ub5NmEh+4/gyq1m1uZF0bN\n",
+       "51Sq0dRwkQUCx9OZiyBwXI9XXn+NXk8vGDc2Nwkjn15vn9XVVfr7PQ6vrnDhwgV8z6Xb7rCytMg9\n",
+       "95xGScWXv/oFGs0Gvb1d5udn+NpTT3Ly5ElmZma4445TuJ7HtRs3WVu7Rbc+T6etE9GVgCPHj+F6\n",
+       "Hs+/+BI7+7tcuXIVHF1k3Szn/e97H5/4xCdoNVv4fqB1BcZ7W9vBGngv9vnDP/pDdne2qMUx9Tjm\n",
+       "wQceqAquDgMTlKVegI3HyW07G0u1tbS+MAwNbCHIsxLHs1Q6bdym909XGY1G3LhxE8/z+eD3fA+o\n",
+       "qUWgOaztLujvG48TBoMhMyZ+sG6StCzUZ7vIN2K5b8S17fLQ1IbqPlDqILVKSsl7PvBmN8InH//D\n",
+       "2xhk0z8fISgKWTVM9vORhSQv8koxDfDGHZSU+vdRiIqabAu4rT22ttjmDg6i9OxnYN/bNDxiodxp\n",
+       "GLWCfcx0D/qBEgQB3/Gej6D+E90IHwX+OvCCEOJZ83c/DXxCCPEAGh65DPwt88G9IoT4t8ArQAH8\n",
+       "928s3vYoioLJeMysMevvuB3GlhdbnVRFHEX0eiNqtRp5kTJKBrd5bKfpuOJbD5LxQXdsPgxrbCWl\n",
+       "pFB2zBI0a9qpDW7naNoFgu1o4zjGb3jV8s+OOfV6nbrTqLqfoihoNnVmpn1wwIG3SRiGt7kJTnN2\n",
+       "LY49zQF9q8Ni9NN0pMFggO/79Pt9Hcbb6RDXa2SldkXDGP0jjMy++kxy0mQMCDAKs+FwyK//+q9z\n",
+       "aGmZWhTiuh7t7gxxo0a706WQgqe+/gw/9MN/BeX6CNcnKySduQWuXLnM4uIC6SShVou4uXaNdq1F\n",
+       "vV4nScb4YUSSjAh9D+FphVscB2zcuskz33yayM3Y2thEFgWP7+6wub5WTWJRFCEc7deNIzi8MFdx\n",
+       "4LOipNfvGyWfIIhjhoMRfmgnkxyUfhgPR0NeP3eeoshJRhn9fp8XX3qB3/2D36NZqxO1PD7wgfcj\n",
+       "aZLlGY88+h6uXblCo7XKN59/hlN33smRE4eptWJGvYJrGzeYmZtleXmZQaKThJ57/nnSSUpsTLEa\n",
+       "9Qb1huDprz3JM09/jb/3v/49zp69n729Ps1m05xLSZqNDBSRcOLECV5+6UVWD60QeDr2ryxLBgO9\n",
+       "A6g1GhpOCj2iKDCT3ATP80iSIaAzJ5XSqUKOA/39Pu1WF6kKev0eMzNder19tra2COOIJ7/2FL4X\n",
+       "8gM/8ANIpfCc2x0zc9OsWBgxSbRKtyxLtre3OXr0KMPhkDiOK8Os6cOWAbt8PpCMH3S39to+gC/z\n",
+       "CmJ8p0ZTSllNovbBoRfwmgtu7zH7OtrAyzXkhmljK6oIxG63q4uqc+Brbt+/hT9tivzOzg6zs7MU\n",
+       "hWRvbw/HcTh0SIsEh8MhvV5PK3KnsPzpBuyNUKqFcqYXnG93fCsWylfQhq9vPD73Dt/z88DPv+Or\n",
+       "oo1mGo0Ge3t7LC8vV0k4drtb/ZKlHu2KXBdZm+YiS8lEpQR+gOf6lIXEd13CKKy6Zx2bZOxe0bxQ\n",
+       "0E/1wUAnh3uuXmQIBI57wLG273EygUGmL7owDEBBmurEa6n0T3Q9D4FVxh08DPQT1kUZc3h7Mxxc\n",
+       "NJpNEJplbpEf2L6+1TGZTPRC0nUrd7O4ViMzE0yr3QGTsei4AoGDH/ogHB0ZlU707ykchPBwg8gk\n",
+       "wEy0zD3w+e/+279Ff7+HK9AULNdlmIyROPzOv/1djt9xitFkQrPdwvNrjJKc0SChWW+RpxmNuEaa\n",
+       "JBxaWKEo7YNDf07NRg1XKFSZsb+3zcb6Da5cvUSRj7l2a137SruC7Z1dZucWtI1rXNMRY0oiHUEy\n",
+       "HnPhwjn9kC9LHNfXUm7PpygLiiwlrgUIx2U8mTAa9YijmMuXLmp/dM9jPB5Ti0Ncz2NxaZFH3vuI\n",
+       "6VJzhBQoKWi229y4fp16q4Pnx6wcOsyVazdo94c4jstwmHLXnXfx9ad16Mi7H3mEX/6lX+ZDH/kI\n",
+       "l65cxnEE3W6Her1BKCSz3Q5CCP7pr/xTHn3f+/jwhz6kXRjTlDAI8F2HIpsghMvZ+x7gT/7oMySj\n",
+       "CfVancFoRJnnNBrNisPseQ6+55LlKa7jGhhC0mg09XJ+klKv1RlPxowMHzyZjNBJRk329/ep1WJc\n",
+       "x+WFF1/EEYLveewDOA6MJwlCGdWiUgSenj6dUtGM6yRj/WAajkYUZUle5Fy+cpnFxUWScUKtVq+Y\n",
+       "NMpOywYJ8D0ff5pdwsHSUk1J8B3HJUJDYFpt+dZQgpICFExKy5lWVUesSr0bys3963oeSoIIfEqp\n",
+       "vVtc10VISEZjFNBptWg2W5r15nvVbmwayrAPID2FjInj2ODiLQ4dWiHLMnq9fUBDmtrf3SPPtSNi\n",
+       "alTdusezv/+By6ENg55mtL3d8e1zIzQfSKvVYjAYVMXbjoK2Iy3yA1qdbyxbLTcVgNqBGUxWpKTj\n",
+       "gy7c8iwdY1bjOiaqykSGtVqtAxzOdRkNB9US074eSuL7rlkiORXEEoYhw+HQKNd00ofneaR2Iz+N\n",
+       "HfoejuNVEI0dv8LAx5rJu46DMt1Inr81gd8L/Or3PpA4mzQjqarXDoMQpTSeXsoCKcwC0fOoxbF2\n",
+       "WlMAHmlmgo6LEuW79AdDyjJHCcFoPCGMYiQCJRzWNjZodpaJ6g2U8EiGGVGtyXicMNOZYdjfQ0hF\n",
+       "5AYEIqA3HtJo1Gk06gSuIB0PCH2Xv/jS46BytrbX2e/tMxj0qDVi/cDNcrI85db6munSYzZ3Nml1\n",
+       "Wmz3dkjGE2pFTiklcS0mGaf69zfL6tm5OZqNlrlGpO44e7t0Wi2GvZ6WQuc5wyylUAXxpAGuw9Ly\n",
+       "Mq0wIA5rvP7qOZa/8xB5BsdPniLNclw3ZHFRe8I7jqTZbvPquXOEcYwSgsuXL3P4yGG2NzbwhKBW\n",
+       "rxOGOvHeRQdGBEFAs9Xi8ccf57lnn+Wxxx7jox/9qHY8RNDr9QjiiI2NLaIoptlsM5mkZHmu7RCU\n",
+       "TrP3XK0W3N3dZTDsE8cxS0vLKKVIkjFRFBOGddY31mk2m0zSnDAsqNXC26bTNM0Ig4D/n7k3D7Lk\n",
+       "us47f/fmnm+vtauqG9UFNBaCAAEuEMBVFEVJJLWZ9EgjKyxZ4/GMwxEKj0OjmbAdY88S9jjkcNgh\n",
+       "Ozx/zESMRdlhSbQ9Q1kOitplihYpkiKABtBAN9B7Vy+1vjX3zDt/3Lz5XkENaP5xQBnR0Y3Cq/fy\n",
+       "ZeY995zvfN93JPDjP/ZjDURgSYG0LCbDIVVWEHQcopGedpUXBa1OmzTJcF2fTrdPv5+TZhmHtQnY\n",
+       "bBZR1kmKHvJro2RtrZzlJ7DuSunpPErpwQ56uINEVBUik+RCe6RU3B8Dt9D2DKmKsaQFggYONevK\n",
+       "8k8OUikqM0lLx5Fr166xtLTCzgPbui+GoKwUs/EUyxJN72y+nudMs0VCw+3bu7iuW2fQVu38Oaph\n",
+       "oDlFWQhjRFfV1f6cMKEFUh5KGU7428fRd3Qq/SLAb7JTs9uZIBn49glOpaHtGHjEZNt6oEPeQBSL\n",
+       "wR4WPYp1dtztdpvhtQaLNmWSwd2MNNZ4dpsybBEeMd/DYHemObEo4NEioezE9zXnXRR5E/ANx9xU\n",
+       "IG8+bFv7kRTFHKIxjVXVkFCaAAAgAElEQVRtaFVn+VWJquaj2xCCQimkZeO5LkkUa0ZJPZ0ny7X9\n",
+       "rq4EXCopuHXrFqur2vY1KxRnzu5wdHTM6dUH64BfYNkO0+mwpqUd0m752JbmMZdVSi8U2DKliCfc\n",
+       "O9jjYP8eVy9fRFUFw+Gh9qSgYGm5h++52FLQCVs88fijfOB978VzHfK8YDgaEXbb9Pp9XN/HTnP8\n",
+       "MCAIAuIkoawgbIWkWca/+IXP15n3DNt18RybsigYHh3zX/7Mz/Dxj36MJElod9vMoql20lOKX//N\n",
+       "3+A//vavM6iWOLWxQZpl7Jx7SFNQk1SPPev3UEpSVoqb93Y5ODgg8H1+8Ad/kC996Ut893d/N7/9\n",
+       "W7/VQA0a4tOVDYJG+XvmzBkO9vf58pe/zK/+6q/i+z5//nOf42Mf/RhpUdBptzm1vsrdu7cJQl/T\n",
+       "TqVhLQhms5gsySiKin5/pYYtCixpY1se02mM61UEQQulBMvLq5oOlyXaiCuaIdAw5vHxMUjJdDrD\n",
+       "cXQCYqpE1/exA0lWVQQ9ndl3vS5ZnpImMR7au8OxLYosoywKXr90iZWVFc0+QlAVJRWiWT+e7ehq\n",
+       "WM4nAZWl7rWYONAIXgBbAKKq/dT/5KGqnLyq8ELt7a8n0WtDLSHmtD3jR2LbNgh1Qkz3zDPPatV0\n",
+       "nOiquB7aoFXbcbMZLGbERtNhEiqlFGtrayeYXoseNYsxwuDoi3i3lLLpmxmYxkC2b3e8ozMxFxsW\n",
+       "i14kJnjatk2apPXOLKiULi8ar2FVUJWAELieg1N/nUX+qHkY4KSwJk6Spnm4WB4tZvuLP1/0NVjE\n",
+       "zAyebTYMk2Gbz59j2lnTXGo67VVZ06dU0xU31+V+h5kwr/9YTTfb7NhJEmNZdj1IQNO5VP29C6Xw\n",
+       "/IBup6MnkQ+WGA4n9cMkyIqCVqvF5z//eT78oQ+xvrZKFMcsLS+jhIVtWQx6Xaoyx5IVWTqjVDb9\n",
+       "Tp+wbTMTEktktMNQc7Ul7N+9zbe++S0cWxBNJwyHxxR5SpLEOJ4NZYXlOkBOkWTM4pi/+GN/jccf\n",
+       "eZTR8RGua3P2gU2msxnD8YgyizgYHbG5tE4SJyilSOpRcffu3qM36DMejxFS4noeWZ7hubrJHQYB\n",
+       "rVaLmzdv4roOo9ExXhiQZClBq8UXv/hFHFkyS2JObW0ym0VY0qIoKy5fvYaUFnGUsrS0RKfTYXnF\n",
+       "xvO1h/i1Gzd4+umn2d3dpawD1dLyEnatsLRFRZrGxHFSz+B0OLW5jio10CCEJIpnlKqgKCqOjoeN\n",
+       "dbHJmHXS4jCdznBdD0VOVUlsy8d1QoqyIM10E73b7WrVZt3Qj2YxQeAS18O/QTKNJhwfHRHFMbMo\n",
+       "Is0ywlYLWRYN93g6nSKkZG1dw1mO4+BYkCQ6g53NZmRZiuc6dV8o4N69/Wa6k14TFkWZ17BnCY5T\n",
+       "27cWqGqexOmFZdVBtw7WtVFVVWpq7H0PUWJbgslk1FwnDSfNVZ/aKCpsAuetu7u6VxQEhGGLu3fv\n",
+       "al+bGnMvirmAsN0Om3iQZVkz/NywxkxgN2vR6EkM7XiR0WYoh4u9tkW9iTl/4wW+SEF8q+Ods5Ot\n",
+       "OdfmpE1z0Jy8eY2UEt/R7A3zxbXS0mrMrESt/jKy+BOUofq9Fo3SdfCsiNKU8Xi8sNvqh3+R92oe\n",
+       "RHOO5r2MCZbv+w2FME21gALmTRvzIHQ6PaIoOtGNl5ZFVRZNQ3Zxcs79DgMv6YdSnuCWG0qiUtrD\n",
+       "2XPnRjlCCBxpMZ3N6PWXkBJmsylCaBqnJdpkqW6CJXHGmQceIE3ihsZVVVrR+sjDD3MwzbFVTjf0\n",
+       "UDigIu7cvMmZrXWm0xF3bu1y5Y3LxLOItl/x/icf5Ad/8AeI44S//bf/FlWR0e14pHlK0Ao1YwaH\n",
+       "laU+/9VP/Q06rYA0ntJuB8ymE65evcL29jatdqhN+JXi2htX2dw8TZIkdHs9XN+j3W7zyqsXaIUh\n",
+       "B0dHtFot+v0+8WyGYzvs3bnHgzs7dSarF0+aJEhLsnfvHs8+8wwvXXiBvKo4Go1oBSFHh0eEYYtz\n",
+       "5x5BIOl1us1zW6ANzJ544gl2d3cJfZ9vf/vbbGxsMBwOmU6nDV+45Wm5dK/XO1HFObaD57kkScq/\n",
+       "+bf/hldeeYW/+bf+Drbj8P3f/2n+zt/9n3j00UdPJBm27RDNYhzHY3m5g+v5JEnCbDbBdR16vR7T\n",
+       "2aRpDuZZhu3YHI+GzOIpr7z2qq4884xer8t0qt0+kYLdO7dphSF+EJDEMZZtM00ipknMNNI+2EES\n",
+       "EXgeSZbWrBOtHQ1bLQ4PDtg6vcnw+Jjd27sM+gMGgwFFoTehdrdNluvBDZVSVOh5uJZtaVFNTTOc\n",
+       "j1HTQ1pKpRD3bcVBmk41Th/o4Q9VpSjKgrIo8f2grqbnjU2lFE888Z56YHmL2WzGztkdhkdHBJ5+\n",
+       "D2M9YUgSs9msWfudTqdBAkyQXazEFxuS5p7N7519ImE1vHJdQevfMTHFUJj/tOOdG+hQ86JNZ9h8\n",
+       "SQ0rzK1X9RDiqglyZtczXxjmEnLDyTbDihdtH01Wa3Y+t35tv98H5tl5XA9uMMFbd/ajE+pK85lm\n",
+       "N12kAMmFDHquApv7JpjfMc2KxfMyPPC3E/IsQkL6vd3mehguu1KVVrst0KukgF6ng0Dw5JNPcvHi\n",
+       "GxRVRWAF5Kl+7WQWQZmjFIStNkmSEEURx8MxrVab73jm/Xz+l36F0dE+k0lCGHQYTSf4vkM87LM0\n",
+       "6DIdj/nuj72fF55/gb/4o5+l3W5T5AXD6TF/+Sd/nMtXrlKpCr/dYhZFKODSG6+j0pRf/MVfJE1m\n",
+       "PPXkE5RFgRSCl156iXc9/riexBOE9Pp9fNtlb3+fRx55lL39fR7YfoDeoMOlS5c4PDzQ+GU0Iyty\n",
+       "ZFVot8vQIwx9jTPbDrbj4NkWt27f5jd/+7f0rMxWm6OjIwDcUx7rpzaoCsWP/MiPcvmNy5RFyfHx\n",
+       "kNFoxMHwkJ2dHdbW1njxxRe5d/cujz/+OG+88YaeRN7paKvQ2azuSSieeuopiqLg5s2bHB0dIS1J\n",
+       "WgfYc+cepr804Oj4iCAMEULysz/7s/zar/1aXW0qxuMJruPT6XQ5OjqmVBGBH+I4Nq7n1vDICMuS\n",
+       "HB3tMx5PyLOMzc1Nut02y6tLbG9vNwFoPBrx0vkXtKz/yhWKomBvb49+v49jWQStkFmWcu/4ENtx\n",
+       "6Ha75KokiiPafsDh/gG2bRGnOsB7nscsjsCSlEXO3uEBSZbWTX7wIo+8yll0DpTCqiGPWshTVw3a\n",
+       "4nVKnlfo5fBWknJtYZsUZkSZjW0LqP2SQNQDObR7pef5RFnSeB+hVG31224ERnriUEpZFBRl3sQr\n",
+       "mHsamQD9ZvMuI6RaFOyYGLHoK2NgqkVItaqqRrjzZojmrY53LICbzM40BEzmbb6cgSIW/QYMRNF0\n",
+       "resgZgK0FLoRUhYZWBaibl46tsSS4AaaT55lmZ5gvpAtm+C4yBXPsoxpPfsQaD7L+HWbjcO27YY+\n",
+       "uLi5mBsmhYAKvchqPxIppJ4jWcM4BrJZxMXefJRFgaqq2gPdbjrsGufWlDnT2S+KrMbzNJVKY38W\n",
+       "wi546sknefmVVymqumqBpqP+4M4Ol69e48GdHW7dusWZM2dYXlnD8zyiaMbP/9zfA+mQZxXTWYzj\n",
+       "Otg2XHr9VSxLcXR4hFDwP/z1v0qRp5SF0iPpiownHnsXvuvywY98lDhNsByX/+V/+19BQdBqc+/O\n",
+       "bcIgYDJLePjhc5xaX+UzP/ADDEdTLr3+OteuXeP6jdtce+MSlm0zGAy4c28PBayvn+LoeEi702kM\n",
+       "jGazGWUS4bouS0tLXL58ueHjX7p0ia9//evkZYFjyt76uRsNx9y6eYvtM2fxPI+vfe3rfPr7PgUI\n",
+       "Njc3efXVV/mVf/dLHB0d8a//1b9iZWWFH/6hH6Lb7dLr9Th//jz9bo9WENJtdxCVvm/nHnqExx57\n",
+       "DNCb+6VLl7h9+zY3btxAofjAB56h0+5QFHqQ8drKKu2wxWQ0ZrC8hG8FHB0NkdKi1W7j+QGO62po\n",
+       "qsooopgonnLr1k1GIy2yefjhh7Bth42tLWzXa9aSbdtw+jTvefLdXLhwgaIoaLfbnDlzBktaRHHE\n",
+       "/sEB337pRa5eucZ3ffcn+N3f/V2iWcQH3vc+jipFr93BKiXCscC2eOXia+R5zvXr17l5c5eqqnjs\n",
+       "scd45pln6PV6lHlKSYXr2jXH2SNOU22XW1TYtoOqlP55nNCyjYWFT57dH0IZT8YURUZneUXj61LW\n",
+       "laPE9XT/wLF1LEmznLyoELYO7KISUOnBzQYW1dbHZv6s9k3xPKeJERrfzppYpaFeQ7fMm7igBVd+\n",
+       "EwcWVdiGn78YwE2/zWDlrVargUHf7njnhhrXwc7g3UATLBclweZLNQZSJzik851c74pzsY55jeFO\n",
+       "m6Ds+35tAznPkk+wTqjpemruj7JoHGXmD5pAned585kGYzO4l3Fl0zetarJr872FAD/wTtzYt6MO\n",
+       "haEe6GyoTbZtY9k17icWHNMkCDEXB5VFgULiuB6VEpw7d46yLPD9kEpVOFJSoQVAcZJw+cpVULC+\n",
+       "vk6lYDw6JooTup0246OU0TSiFXS5ffsuQegTxRPObJ3ixs2r9NoeVaGoioQ8Nz7qlmZvCJvNjU0m\n",
+       "oxHCsvA8n5WlJQ6PjomjiMHyKmVZcGv3Djdu3kRKSa/bxbZsXM8ny/WGeObMGVxP+3fs7JylKPVG\n",
+       "GIQthqMhvX6PPC/I8pxuR9uiHty7xz/7Z/+0bpSnBEHAyko9GCJJWBoMWOksI4VkujRDVRWbm5tI\n",
+       "IXj94kX27t6rvXMS0jhBuorReMQHn3uOra3TPP30UyRJwhOPvxv1F36c/b09bt64ieM4TCcjijxn\n",
+       "eXmlhrpyQLG+for1U6d49rnnNKSTpcyiCN/zoWZd7OzszLFXJN1uBylthJCkWUqaZ+R5ysHhPa5d\n",
+       "v8J0PMKtjaje854nOLW+TpFXJGnK3sERrVao2SrTGd26UnjgzDZplpAkCXdu39G0N8eh0+3S6/bo\n",
+       "9/Rm9Nijj3H5jTdY6g+YjseErRZlWeAELmcffJDB8jIo+KEf/iy+77O3t8eVK1dYWl4hiiI8IcGx\n",
+       "yMuCrCgaQVKexdj2XPhSFDlXr15heniDslSkWVFn4T/xJ9ZE0GojUDhOAAhc10NK/WxpLFppnxjA\n",
+       "rTcwpZT2PLc4CXeoOo4s+Kn4vncCIQBjr2udsMlYdCNcJF2YZqaprs1h3FCBhmdeFMWJXhjwp1rK\n",
+       "vqMB3GTUJsCa8sHgS0YlabLvxQzZBDzHcQjDsFkYBuc2jYRGfg9N0NVlifnDiQtuyp8oihq57GJg\n",
+       "XbSZNeoqg2GZKmKRzdIosFTeNE1N09PzXG1OtCCD1+q6+L7XTMNJWo242CtI05Q8S5rrqA3/cz1O\n",
+       "zdK0JNf1GE0mhK0ux0dHvPfpp7lw8RJlXtJqaxx/Gs3Y299ne3ub0w9ss7LU1w9a3c1P04TpcEIQ\n",
+       "tLn06gW2t89iezanT6/y8svn8T09sSdLc4qsoFQCUfvPjCZjHn3scZIkYXg8pNvvkcYxj5w7xze+\n",
+       "8U181+X06S1++qd/mqOjA8qy5Mzp04xGI5RSTMYzBoMlwjDk6huvsryy0gzVQFh881vf4ktf+hLt\n",
+       "th5957gOLRHScTR743/+O3+3KYM7nW7N5Z4yHo8bOfvB8QFKaf+JP/iDr9Lt9rAsqxlafPPmTYZH\n",
+       "R3zgAx9gaaXP5/7859jb29P3OMtJZzGppWG4TqvNux9/vF74mkY4m83Y29tvfOyRetNPa1xcSkmR\n",
+       "l2QUNT/aYXt7m+eff57eoE9VgRAwmYxwHD3k4er167x28QKj4SGrq0u0WiGinuDeabeJohhL2oR+\n",
+       "iBSOFuQ4Dp21NYQQ3L17l7t37/LgzjYPP3QOgN3bt5lEWnzy8ovnObW5gSpKZFlxZmOT2WRKnuXs\n",
+       "17TBvMz54v/7q7zrXe9iZWWFy5evEEURh4eHnDun33MwGBCEIa7vEYYhvu/rBCjTvQApBFk6Z1Op\n",
+       "ouT88S0kMIpHCHn/UHX9xi5nz+5gWQ6e59UCJr1WpbBBVORVTlVpFpcOkmlT+ZuRadS8d6UUZT2o\n",
+       "pSgKVJI1AdWork0wXoREdBwpTwhyTKJmGtGLKs5ebz5hzNhj5HnO6upqk3SaOPl2xzsWwM0XWdxh\n",
+       "DE/aKPC04GZUXxRjyk7DkzZZd1nqrvai5NVsEKZ5aWAXE9iNQsscZhc152ZKHtM4NUHeYF6m5Fls\n",
+       "OC6qK81GY7BvSzqNNN8E/TTTtKXFLMDg//c75hz1k1NNpJQEoY0Ulm7kqFIzR6RuDBkVnxYhpQgp\n",
+       "2Njc4OULrzbmXnme44ct9vf36Xa73Lx5kyJPNXbp2o1IKY4TWt1lHth5kFanw/7ePe7cvY1CsL6x\n",
+       "xWg0xbV90kKRKg0dZElSQxsVb1x+nSefeJIKiOMp3/mxj1AUKTtndsiLjDSeooocW8KF8y/oJqXj\n",
+       "4whIZ1Omx8csLQ3Y3d1lMBhobnu7zcsvv6w3YVuLftyaNaBUxWOPPdroDaqqYn//oPHCcV2X0Ug/\n",
+       "Y9tbZ5jNIqS0+PT3fqpZPB/8jmeB2nSobg6KquT44BBHWviuR5okDAYDsjTFtvRgW9Be3QiB5Tr0\n",
+       "eoPmHguhudaaq+xosZMQSGWmoEdMp1OWlpbZ2dnhypUrSCnpdnoMhyOSLMOyXF44/xJVVdTvpUjT\n",
+       "DNfSSUaRK2xbMotTiiJC1glFluaNaMx1HM5ub3N8NOTOnbtMJxP80Ec4DhcuXOD7vvd7eenF81x8\n",
+       "6RXCICApSpb62v/76vVrxHHMw+fO8f6n3stTTz/N8HjI2pIe3bf5nVtaFdsES0GcxsxmEfE40n0O\n",
+       "ObdatYTEtiwEgqN7d/Esj5zavVDeH1ZcWdvAb3Xod3s4jl7bZVWCsmrhjKobk3N6ojGi0tVwSVkW\n",
+       "evqP0ApwIy4C0LbUOgE01fSi8dwiEpCmyYm5AG8e7mLUwydgLGiqfMNcmU6nTbx5q/GK5njHAvji\n",
+       "ZHaY86WVUg0zRE++9poLYXBf82WFEI21q8lcDT5seJ6GT2lcB01AdhyvyWAXb4YJumYXNJxOsyMu\n",
+       "NhVMpvzmhqrZJAz2NS8NdcA3eLmUog7i+vPDMGw2hvsd5obPJf1uAy3pTWbeyNEMlwpVKRzbxnMt\n",
+       "8qIkL7Uqcnt7G9u29MCGBU+HrKg4PDzkK1/5Cn/lv/7LHOzvceXyTR56cId2q8Us6YK0qZTij59/\n",
+       "kW63w3g85MEHdxhNIu7eO+TBnYfZPzjGbkvaYYt0mnF6+zRXr19h8/QphKW0n4mQJFHCxz/6Yaqs\n",
+       "JGwFfO0r/5Hv/Ph3MpmMWRr0KIuK2fiYlaVV8qLAkXpI7kMPPcj16ze4fvMGr1y4oFkMaGtVCyjq\n",
+       "TbssM87ubDMcHqGU8axQjZeIgbosKYmnMba0GI3GrKyuoMqKLM8oiqwe3zajpKLValNmGa0wbPjN\n",
+       "rus2Qz88x22eG9uyUQjiKK2fEa02DNutOpBb2I5NXuZ6UElV90scXw9YmI148skn2d45y8HBAVEU\n",
+       "cfbBHQZLyxwcjvjaN79NK/CospKyBIFFu9snmk6ZzWJaoYPvtXDbDpUqGQ6H9RqxaidISzNPXI/z\n",
+       "58+ztDRg4/QmaVWwtbFBGsdsndrgtQuv0ttqsdTu4UiL4+kx3/rWH+N7HmfWNwjDkN/58m/zkY98\n",
+       "RNPvegMO7uzheT5xnOMHPqPhCD8McLBpBQFJElNVJd1uC6HQ3iToWZnL3T4Hd66Tp0nNdLl/Jnru\n",
+       "kXdRlWaClp70ZFkGelVNAC/LokmgbHvuu6KTPAuh5hoNFoJ9lhdNk9Fk4fPgX51IvBabkcZozmTf\n",
+       "izxyy7IYDocnEjXze+YzTAKqnVff+ngHlZggLd1oKAs9gCDPMz0r0dY3odvVNznLcgTzAQi2radG\n",
+       "O46HbbsIIYmTBNsWGkapKpRpJoo6K6sUZVVhVQolpKaQmWkdUnsZC7loZjPPlPRGoB0RtX+CnsJt\n",
+       "WfIEu0TWXF2DgQe+T1HqhyqrN5QMRZrUHHIpqGrcXptvFSRJfKKrvXjEsbbyXNyIjEGPebAMDxah\n",
+       "O/wG/6uKEtt2sGypB3bbNmtrq1y9dh3L8yjKEoRkMFhib/+Qdz/2GL/8y1/gQ89+Bw8/dI4knjFT\n",
+       "CisMydOCw8OjuiGnWF9fIQx97u0dsLF5mqJUrKysUVpanjwejdh+YJsXbt7k9OnT3Lxxg63TW3pD\n",
+       "s3TP4/qNXc48cIbtB85y9eo1BoMBYRDW3tJw4+YNWq02vV6P2XTGbJawurpGmhdceuMK+/uHepq4\n",
+       "0HapjiVwLAeVFDz+2OOMJxMCP0AKiVdjo/pZskjihLzSJXSWZ3Q6beI4IopjlpcHWgkJ9Ps9ytpP\n",
+       "x3JckjhBWpbGVoHJZEKn3akZU3rjdlxt8xCEWsBSVdqjI4qiBh6sqA2NKqjyAlvaVKpiPB4xWOpz\n",
+       "fDzE9TxWV9Z0z8J10batulFvqkfbthseuuO4LA2WEFhY0qq59w4rK8tYls1oNCQvchzL5o3XL7O5\n",
+       "tcXTT78Xz/cZTUZ0Om1Gx0M8z+eZ7/gOwiDgjTfeYGNzE9dzGQwGrCwvc2p9nW6nS5blPHzuHBdf\n",
+       "e43V1RVUVdEOWyRJghCS2XTCYNAjTmp2h9BmdYHnIaRAGud5BWVVMJ2OycqKJC+Qlo207s/GmE1n\n",
+       "WNKh3fJJEh2gTaVsyAcwHzJu27YWGJmkjdqnPNcNf9uxMeZaQugpRJohU9WMuTlsMld3GorvfAiD\n",
+       "wdoNO84EZOp7bZK1xeTxrdh1b3e8YwFcKYlQtd1llWn8yXJxfYe80Ioq29KZSFmWGicsM3zfIysK\n",
+       "bMuhShL9FURFlik8T2dnjmV8hDWZ33ZcpG1jS6GlsqKWm1cK3/dQ1NCCJbCYX9SihmJsy2qoT3le\n",
+       "AHOoRADKkki0/7EWHJX1mBHN887KGuN2bZ351L+rhNCNxbosk1IsGNPf79C4/aKXsj6nfCF4Sy0p\n",
+       "rh9Opy6nK9BqUBRIbebz6U99kn/yT35el4BCIiyHKM6Qls8rF17n/e99ipWVNXzXxhbg2Ba7e3fx\n",
+       "fZ9Ox0NaJbd373BqfZ08K0iimH6nT1UUONKmyktu3bzJ5vo6rmUReD6hrwUgRT7vsEshaXX7+GGH\n",
+       "pRWr5sYqZkpzYts9HyfQnN3LN26wsbYKCopC8fwLL+GHPUbTXVqttr7fUuP1WZrwuR/+DL4f6ClD\n",
+       "NWynext2rSjNcRyrqbz80Guez57bacyPijynXMA8BRIpbaSQZKkO8I7jUSkQsi6/pUVZ6rF0hjIq\n",
+       "bTO9fe47bwmBsGzNWvDmsFjYDnV274V1sJC4jn5uhYJW4GNJAdTZo2VhO7oJWGQZeZEQuB6OJZCB\n",
+       "QyUEN2/d0HzuIKDX7aMUPPLoY/XzZzMea6GQhSDwfa2yHY8Jum0++env4+rVq6RZRpokPHD6NPv7\n",
+       "+xxPR7z3ve9FSkl/dcB0OuX67g3chcClIQMHx3YJwoCWbGmGSDBvLAoJWZLS6Xa5cv06le1RSpeC\n",
+       "Cnl/Mz5818OSNkkyt7Itirm//pv1IFmW4TpaSWwCsGVZ5EVer4P52irLkuls1oyPM702A4fo50RD\n",
+       "mlosNHchzfO8Cd4m6zb8fxOszTkusu/MDF3gbRlp5njHArhXU4mmk6h5oLXTjML3XWxblyN5pnfS\n",
+       "IPRqFWNOUWoxT1EWoHKktOvsO8Nx7Dor16ORyrKgyDKKrJ5OYju4rkeZFZSqIsmzpuHgOzZZPXFd\n",
+       "KS20KVHkpcKSAsvS5Z2qNNc6CEPN8FAlpdKlcV5UKGEhpM7+XU9DRWXdgYqzulEjLapqziYxXsqG\n",
+       "Onm/481SXCHmlrRm09GbisZXizwnLXLsUtvzDwYDZlFEJRRpkXNqdY1Hzp1j72jEaDTBcWoZvxIk\n",
+       "ec6li5egzPnMp76HPM2YzfIGr1taWmL31i4ry4MaA1QsLfWxHYl0bSxLUCSammZZFlevXmVtbY08\n",
+       "zzl79qyWmNdQUhRFeuh0FuP5DvsH+zxw5gHu3buH53sNDNTtdFlbW+PSa5dYWVvj2s2bHA6HXHjt\n",
+       "Iv3+gDDUjodxnCKEAlXx3HPPcXh4eAIKM58LnODlL8Jmi4rYRe491BQzJbGsuahrkY1g7lXzt9D2\n",
+       "Bm9mKhgqWdMwq0vsRXc+s9AXfaON3YTj+vVz0274/2UJ0rNotVtaJt8bkJFxNBrR7nY5ffp0895Z\n",
+       "lmNb85F/WaaHflu2ICs0N3oymfDaa6/xwQ9/mN/+zd8kjmN2dnaIooh3v/vdfOhDH+Jb3/oWN2/e\n",
+       "JAgCVldXkVJy5vRp8jyn3W43lDrdi5k1VWNRFNy+fRuYszLSehDJdDbDbYVUlaoz6bcKZBoirdTJ\n",
+       "RmEDh5j7xVxHYV5nAnGe57Rq75o0TRtLaE3Jnd9LQz4wz46eLqUaQkFVVYxGo4bssPhMmGdPkxe8\n",
+       "5n6b8zONT5gz4/40Iyt4RzNw1SgvF5uEWZaeKEGClh6JluU5CAhaIarOfHXTQdUPfEVQPyRxNAVB\n",
+       "Hax1AyWKYoqiwoxB8rwWCEmaFkhLIi2HWZzWxjo64EpA+wlb2tBJCWRVT68WWuGlEAjpIJReQNQZ\n",
+       "cFlVFDX0o5sqmrrk2B4KpaezVxoy8TyfoqhwHLMI33rXXVR9mRJrUXFpAk9ZldiOU1cF+iEZj8d6\n",
+       "0ryooYPZjM/+8J/jn/9f/3c9hCJHYGlXOCGZJjGHwyG/95U/YPvMac6de5CSgtl0yt3bd8izrAmG\n",
+       "e3t7rK+foiwLbM/TJlpVxdraGmEYMplMKIqimW5ixFFhqLPM2WRGnqUIFKdOrXJ4tM+pjTXu3LmD\n",
+       "7/sMBkuMRiOSNGZtY5Ov/dHXuXdwwHA80cZRvkecxGjuO5R5zvd/5lPcvn27+UyDUZpNcxGznM1m\n",
+       "zaIx52mOxSzOQFcalkgajw5jCBYEgbYxVWgLiLJEiXmvxbyPUfIuCsF0hpYv3EuJ47jN7+oJN4og\n",
+       "0MHFb7Wb12tlcjlNTB0AACAASURBVEan06IsSlpByGuvvsZzzz4HlsWp9VPkC453hiJnyXlAchyb\n",
+       "ssyRlt1seq+//joPPfQQB3t7fOITn2B3d5ft7W3297Vkfnd3l83NTU05rSqGw2Gjas6znMlkwmQy\n",
+       "4dFHH236P3P+9HwgyebmJnEcNxv717/xRyhVkecZtqMl9vc7bNtGOBZpujgIYt7LMkQG04C0bVvb\n",
+       "LQjxJ9gexlTPvH5xTZn+3KzOyM01M8+W8WNa7EmZ62wqY/OsmSzb/L9F/rdhsP3/HWr89izx/4yH\n",
+       "bbuUpWIWJWR5iZBSu6i5Ab4X4roBAos81zQx7WXgkKYZSZJSlCVV3ZCzpMR17KZccxwHS+pJ8kmc\n",
+       "MBqO9Jg2zyPwA5YGS41as9XWkmvf92mFQZ0dVc0CtywLp7bTVDUHu8j17MYkSUnTjCzLa/jDo9Vq\n",
+       "E4YhYRgQhCFe4OP5+o9SiizXczjzQo80s+25hwoIzV/O7m8nm2VZ07E35RbM6YqLWbnreeRlobG+\n",
+       "2khJURL4PmmSoMoKVVSEns+TTz7J/t4+oP2Qi7Ik7LQR0ua11y+zfzzEC9ukRYXveWxsbCCEYG1t\n",
+       "jaT2lJlMJmRZClTMZhOKImc4HDY0vOXlZW0+FceMx+OmC2/mnJZV0TA4UBW3bt0gSeImMIwnY1zP\n",
+       "pdUKmaYZB6Mxt27faeyItWukJI6meK5FK/B55gPva5rXxijNHIYRsFjRtFotgiBoaH6G4mUyZSFE\n",
+       "Y62Q5RmWbdFqhQSBTxD4hK1AM2nShCxPqaoSy9ZeM8aczbCsTIZmPq/dbjdZYBD4hGFQL2LjjV82\n",
+       "jI3pdFJbAxtFs1b6qlqxmNbahMlsRrvTpVJQLGTyVVU1tLXj4aE+T8tqAvbR0RHvfve7efnll3nq\n",
+       "qad45JFH8Gt/c0OL/eM//mMef/zxJhj+4R/+4QnSgGNr6GBzc5NnnnmGCxcucP78ea5du9Y8szoO\n",
+       "2M05dbvaP/727dv4QYBlzTNo8+/7rYksmw8VNkGziQN1lm02i6qq6Pf7dDqdxm/GBHtDfKiqiiiK\n",
+       "GI+15bTZ8MMwPEEV9n2/MQQ7Pj5uvot5D+ODZCiT5vzerLBcrMrSND0xk/fPLITy73/9N5BS0Ov2\n",
+       "6PW6TfllOzaqbkRaUmKVkrzIyHO9C/teGyEUeZE1zUXQZYyqrSRtDFOjBCHwAx+BOLFghTRUq7Qx\n",
+       "w3cch77X0dafC+VUVRohgFN3qevGYFmBKjVNSenxb/FsSl4UWLYO/GVZNBN9PM8BpfBczeJAgRM4\n",
+       "eop8vdN3u96JQLN4LJrWm3LbZAYm8wC9yZRVCWj8v2mo1NlXp6XLWulIcqX41Pd+Hy8+/yJVVVKU\n",
+       "OY5jNxx4v9Xi9cvXiOKUJx5/F88+8ySXXn6FtbU1tHDC5c6dO5w+vYUeX6Yhlv39fVZWVpqHcTKZ\n",
+       "NLYE3W6X3d3dZhNVStFutzjYP6i9JmyeeOKJxhq11W4znc4YjcZMplO++s3zZHnGZBo1CtKiKkjH\n",
+       "EbYUHOzv8d/99E+TphF5Xja2rr7vN6pdUwov6hEMvrm4+A2MYc6zoQDW2dRi2WsCxWI/Qkqpx9qJ\n",
+       "udf8HD+dT3s3C99x7Br+mw84MDTYJEkaqAdgPB7z8Y9/nP/0n75aY6ylHvgQtlBoBk5eaKWp1hwE\n",
+       "Jyo3g+tOpuNaQOdyamOdKJrx/PPP89xzz7G6utqwwkwj8OLFi3zsYx9jOp2yvr6uB1e021RV1Qx5\n",
+       "oNKsqv39fY6Pj3nPe97DwcEBCE0xPDjQ2a7nuhQ1tOF5HoOlJQ6OjomSBM/XIxPfDkrQvQyHoshO\n",
+       "iOFMlmzu9SJVt4Ggam8hkwkbDrlhhJlAvHjvFxuP5j4HtVGaec1ihWX+ezFgL2b5piowm1+jsF7g\n",
+       "mb/d8Y4F8NcuXaYo6tFDqqjFEBnSstja2uIDH3g/W5tb2NLFcXxarZDxeMx4kiCkQkqBIx1s29LE\n",
+       "+0oibIVlu9pMvlRYro0tZJMV2HVWUOQ5Ao0V54WR4etgnsRFc6NsW/s0zxdfRpFrtoKmn2k2TFVV\n",
+       "mnONHs6QZDozqDC7aMFsEjcNDQN55IX2eTAPlmmEyLcQLZgM0GSIJovU5zYPHkWZIy2pB1KkEVLo\n",
+       "QN/pdCiLQpf3lYIKfMchEYKf/e9/hr//v/8DQj8kLwvdLEPhBQFpnPDa65fp9Ppcu3aJj374I7Q7\n",
+       "fS1Lt2yyXNucmgkqVVVx+/Zt+v1lZrN4AeYxWUZOv79EWer7mOcloe8xHk84u7NDFMe4jk9RzoiT\n",
+       "TPtwDAYcHI34vd//Awi6XL12g96gi+c6TKba30QISOKYv/BjP8ry8oC4ts01C6KqqobWaXBZMyHG\n",
+       "lNvGE8csRpMBmXJ78Xen0ynT6fREVg0nMyrzelXpxei5NkpppoN5beVYqKrE9+YzU6WYe2Y4JrjX\n",
+       "G3aTSSP5nk9+D7//+79fBzFNnZtMpoRhC8fxODocsvPgtm4Av/5604PodDqNSM1kvpdqK9g4hulo\n",
+       "wuapDYbHxwR+QDzTU6f27t6j1+vhWPU5WTZ37tzh6aef5vLlyzz00EP6OtXzZpeWlohjbQi1vLKM\n",
+       "Au7s3qbdbhOGLcq8xLEVbs8jzXL+6I++yYWLr5FkORUFonbWfKu+0GJ/wGTbSqkmCTKZ/qI1q6qK\n",
+       "BkozQdhsogbSNdl1nCSEYdiYmB0dHdHv95tM3FR2eZ5zfHxcV9/hiY3BfIYJ6ELoST3mnAeDQRMb\n",
+       "DPXYqDL/NBz8HQvgD557GDNZfn9/XwdNR3sW3NrdZW9/nyzLWF/WirHNzU067U7DrZWWRAqwXRff\n",
+       "1/L2vExQKNqtdj2MARzbxvc8nFrxWSmF5TgUedxg31VZEc1S8jxDmukmRUmZ183VunqzhIWw5+rN\n",
+       "NNVNUyooVIklrSbj9DxPD0QVEjtsNzeyLAqqssS2bALfxvGCRtnleZoe9lbqK4O/LtrlLvLMzc8c\n",
+       "6Tb0RB1E9AY2ynONxUtLb0IIirJAeD6eLfmeT36CL3zh39JbGjQPe1VpnK/V7vDyhVc5vbHCN54/\n",
+       "zwfdgE6nzXA8w/VCwMLzXMbjCVmW81jNbOj3+1y/fl3PpByPSZKEfr9PVVVcvHiR97znPQAcDYe0\n",
+       "Oh2SpCCKMxQO02mMF3QYjmdcvHiJb3372wDcu7VLu9OFSg8IcG2HJJ7hSMHDD+3w6COPUJWl5mkr\n",
+       "tbAxzp3jtChprng1GZSxADXZt9mQFrMhkykZ+MMsZKMgXtQNmIzX/NuU42bBmvczY/XM50ZR1Nxn\n",
+       "bcbkNEpdc2RZiu06tNstxuMJluWTZTrYZVlOu9/n8tUrjCdjTp1a59FHH20qAJOhDofHgODo6IiV\n",
+       "lWWiaEq/32N1daUWDnUbeMNscltbW83z5jgOzz33HL/zO7/Dzs4Oly5dYmdnR9P46s0mDEOOjo4Y\n",
+       "Dof0Bn1anTZlUZIlaQMhZlmGJW12zj3Ey6+9SqvdJo6GJ6w17nf4vt9YNC/CiCZgL9q5msNw/402\n",
+       "Y7ER2VToC5CMMdgDmsx6cbSaSfY2NjaaoLwIgyx+rnkOWq3WCXvpLMsaqM4Y+Jnv8XbHOxbAn3n/\n",
+       "+2svD6kpPPWOeHx8zM2bN9nbu0s0m7C7ewPX9RgOj0Do4a+Oo8suQE9Otyw9BNmV5FmG69W7Y64z\n",
+       "ErfmfiIgDEKWl5YoshllWdDr93j0kUdZX19HOj6BH2gKXt0gzfOcPMnrstqu+eKaN17kOWmmZdKW\n",
+       "KZ2lA0pSVhVCSkoFZaohDcvS09P1iDMFZUWaz5pAUZYVSom3tJM1D4PJBIuiaCx1TelVlppTrwy0\n",
+       "I40oQSJYNMzXfGmhIE8S0izjg88+Q5JE/PqXfws/bCEtm1JVBK2QOEqwbYd7h0MKbH7zd7/CY48+\n",
+       "TL/XZXN9TfOds5TlpXUODvbodQfEabKgfJ0rzgxHd2VlpSkxv/CFf8dP/dRPcfnqdc7u7JDlBctr\n",
+       "G3zxi19kMou0z8l4Rp4XBK02ZZGj6ilEUlSoomSwtsxP/sRPkMQRvquhKCVkY4+wWI6a0to0ikzw\n",
+       "NRCKWdymGjPnba51FEXNxrnIdmiUtwtBwGT/juM0TV8tUguae7poombKcsPtN5myUemZcr8sS6bT\n",
+       "KTs7Z7l69bp2UhRGyZmSZQXHx0M+85nPcHR0eOI63Lp1C9u26XY7TYNbCFhdXeXKlSssLy/T6WhO\n",
+       "u/Gc/+Y3v8nOzk5TtQgh8HyP8XjM9vY2Uko+9KEP8fLLL9PrdBvcP45j+v1+MztWSonrOxRZznQ6\n",
+       "o9vtUlQl/UGPr33zG+RFQa/TZjo5bOCit1oTeZ5qRlg9YcdUUObamgC4yM8u8npY+YLM3WTIi9au\n",
+       "pgFq1qeBeRZhS6D53IYWK+c2s2ZzXjS6M8+H2WTM55jKIc/zxnr4z6yUPvQsfF/PlQwc7QncDhxO\n",
+       "rfZ58vFHCENdMklLcnh4xAsvvMTdvQOGxxPSPNeOTZaFNH9bFllVIN2QtChBVehJ3wWlUrh15jIc\n",
+       "RxyPZqR5hJSC8sZtvvat84gaP1eVwndd2q02YSvEqWctGp8F39dwThAGBPXN1L4LAsfR3iZl7fGN\n",
+       "YSgIWZdh2ig/CAI818X3HGwhmh3XcR0sSxLHs/teM5PpLe7ki0Y78zJTNZm85pvXDAr0pO+iLCmL\n",
+       "DFWWCAXScXEtSZVnfP+nP829e3s8f/4lLNvBcQOGQ+29kWc5wpLcvL3H2soSX/mDr7G1tUnnox/G\n",
+       "tl063QF79+7h2D5Zqq/BjRs32NjYaLIM40dSliVLS0uNH/V0FushyVmJsBz27+zz8oUL3N3XCsrX\n",
+       "Ll3RsnovQFRG7RhR5jlCQq/b4a/+lf+GIs/rmYvaxU4tZMKLwiyYK1vNYnoz/mjgqkXc2vxOEHgN\n",
+       "RGKCgG1LhDC0PDOsAYLApyznG64JGIv8fRO89YxEiWU52Pbc4dJ4dnie02ThnnTJi4qHHnyIa9du\n",
+       "aLhIaf/sotDnO5lFfOMb3+Ts9hn2J2OqSittt7e3m8adgeGklNy+fZszZ05TlvMJMp7ncXBwwEc/\n",
+       "+lEuXrzIbDbjoYce0jh6on3dZ7MZQRBw8eJFVlZWODo4bH5/MBgwHA7pdDrsHx8QeAGB5+M5Lr1e\n",
+       "n1u3dun2+9y6vcsLL76I32px7fpNlrouYRhqb5lO561jSRg2/iaLvQnbthu+/WIz0DQVTUA1393c\n",
+       "b/Paqqb+moRj0SfJvMZk7MbTafGzzDOzCLcsVnKLTV+zpk3FEUVRc/5vd7xzPHALbKXLTtuxqPIc\n",
+       "oSoENmWRMUl0hoynaW2ntlbxQo8kvUIySvWE+ULj3BWQZilFJXBdvXjLomgWVJoXpGntEGjbegis\n",
+       "J/UQ3KrEDgLc+uaXRYGQFpO0YpxM6htdNIvVlFxCKqpcDzrWN04RhgFSimYzyDNtSO95rjaxr7Sn\n",
+       "uYZ4bGRV0u+0WVoasLa2yubWpv65vH/H3WTbix1tmCu2DC9VVRW2VWfblkTUHfwsTxFYVGXZDGNG\n",
+       "KVSZE6cZluNwVOZ89rM/TNhu81u/83t0ehaqEsRxQq/fJ1daOHJwPMaxBLdv3+U//Idfp9tusdTv\n",
+       "4jsOH/3IhwGd8ezt7bG5udlwa43fTZZltNtthsMhRVHwwPaDpFnB9Zu3+PaLL6GE5M7duwxHI8oS\n",
+       "BksrJJmmuKmqIMkSqrJAKEWe5fy1v/HX8T2PKJriuz55rr0o8lo5ZxYf0GRQZgEBDbvCZNuLsIdZ\n",
+       "fIuCjMWgZ+7XIpd3kdJphueaRinQcI5Nw3ReLp9kHyyW+OYzTeAvKq1R2Nzc1H7mYYtZrDPvTqvN\n",
+       "vb09VpaXeeH8S3iuQ7/fxbZtHnjggZpG5zCb6cDX7/c5PDyshWSq4WUbX+pWq8XFixfpdDrcvXu3\n",
+       "YUEZvFcpbdB0fHzcGMyZCe+GvZIkCYP+gDRJmIwntFstiqLkzJkzJGnOV7/2NXq9HklRYjla7dxA\n",
+       "EdX91ckmay7L+f0098lcv8WjqioKNRdLNT97U2BdhNpMUI+iqAnYJhExfZJFV9JF+MW8x2Jj27iU\n",
+       "mntvBD+LzfFuVw/b+DObgZdZTJUJPbVaWbVDhFY25UWhHdNaLcb5jKqsGPS7bG5ssrV1msk04bWL\n",
+       "r3N3b4+s0FCB41pks4w801PsldAX13M9PFvj4ZasM9CqolACx2/hS4mZMSkqCcIBJNKWCLSHsJQ2\n",
+       "SAUSXNev5b4ltqN0ExTNTJkmOa6t5c1C6PPK84JZlDXlcJorJjMdiB1RcRtNW6xUqRuMAtbX14CP\n",
+       "/IlrdjQc43s+ti2oyhxVy+od22mwXtuxkYAldRZWllVjGlSVFQi98UkpkbYWmWgamdSzNPMKG8ln\n",
+       "/9wPkaYZX/3Dr9PtLaGQjCdTLCeg1+0ym01QSrK3f8DevYq1lWXGownxbKphkO1tds6dZjyNubd/\n",
+       "RFVpI7GD/X1WV1cpioJZdMj+wTHD0ZTJLOKf/x//J7bj0O50+fbzzzNYWkJKm/6gz6SerJ6kKaJM\n",
+       "tOVAVbLU7/D3/97PcXR4QBQntFpt4kjj7EmSUGblnA1SZ4SLAdLzvDk1s2GYCCxLS981RGfjOnrR\n",
+       "lUWFbdkIadeZ8SK0JZvSffEzmkaU0toE8zuGM6yDiT63Ss2tkg0+LKWeWamUwqmZUJaUZJFujG+d\n",
+       "3uLs2W1u3NzFdmw9GT7wqcqyCcb7h4c89OAO05luulZlyWQ6par7FEkck6UpD2zrhufW1iZZqp/b\n",
+       "q1evsrm5yfb2NnEcMRj0EUKwurrK/v4e/aUl3SD2PCbjMd1ul36v11Qc0rLwfY9Wu0WaZQR+wDSb\n",
+       "MJ1OCcKQvf09ZlHMrVu3iJIUNwgY9Po4MmU2ndLpdgmC8L5xxGDgjq21F0YGD8ZjX1dB5lrmxXze\n",
+       "JNBs2I1+wkAqC/j3IrvE3FttETAXgJnfg/lGbu6dqeBMZWDYRHktDBJSNGwkE8CNtfafWQxcusb0\n",
+       "xSGry9dZFBMEIaUQVJbFLM2wa1c937KgLFhqeSy3Ax47+53MZjPiNGE6jSnLkqhUDI+PGY4m7B8c\n",
+       "cHhwRFpEOK6vxzQ5Lrkpd11BpTKKvPYzsES9sEDYQp+TZeOGPqosa4Mbo5JSCLt2JrQF0prv+FWR\n",
+       "Iyot10cJhHS1paUSKAQVWiikhEBJi9iwRygppc7Udkf3b9j8y1/5MtTiECk0991zHXr9Pp7vaSsA\n",
+       "SyKp8GRJt9ul29XWqRunNlAo3BofNNztvf19Ov0W7TCgXYueJpMJk4N9PvWJ7+KRc4/wC//yl1BS\n",
+       "i66scsokj/W1cUOssEOpYH8Uc/doiueHHOcRV/ZeoX/pOp7n8dIbew3UVJUllnijgVSyLCOKIibR\n",
+       "RDdoiwh3MmPtzGksS+B7PqPREe0wJI6OyNKEwLGp8phnnnmGT37ykxweHmuIIi/ICz3+qjgeUhYF\n",
+       "rdA/EUxNsDULzODcVVXVDBFVVyjaq8MWNkJJyqwCJZFKojJFZSlKpZlMhuutF65q/q0w1ZhNVepR\n",
+       "X9IyakHt/SOEIs+zWsBREQYhRS3kqhQodKO92+s3lZbB4x3HoigzyjLlv/iRz/EPfu4f4ooA2wmI\n",
+       "s4xuu8Pd/QMee/Rh3rh8lfe/9/34XojnuNzd36Xb6RAOlgG4O5qyvLxONMuxbZ/pWCtz9/b32Nrc\n",
+       "JE0TUC62Zenxe5ZkOhkRBgFlXmBLTQkc9Ae6aXl8xPLyMo7jcG9vD0c52gs3LvFaPrnjkhYZcRaz\n",
+       "urXG7/7yF4jiKUtLqwyPRwS2S1nkBJ5Pyw849+DOfddEmVfY0tHy+LJE2n6T8WrKrzEwq1BSYLs2\n",
+       "sqhQUltKGEFPmmV6GIRS2jahKlF5hqhOPjuLWbJpUC/+bV5rmpJmfsAi1KmUAlViSeNfBHE0JUlT\n",
+       "PC9oAr+51293vGMBXM+QzBqQv93unOiy610uwvMcHMdlNotwXJdWq0WW5fNhvkIbtVcVrLoupzfW\n",
+       "UAp8L8BxPMbTKS++eJ6rV64xmkxAgR+EKCqKskJUtbVtpVWaruNCmRPY2sK2SDMsW2evQkocaZOU\n",
+       "uX5oHQfHcmozotpYCiOvNhM+JLawUfWiLspSbw6Og7WYvWHhOLKR1d/3mlUVlgLLdhFUxGlGmuWM\n",
+       "Zhpush0b23GgKsgTnbFJS5LECb7nEkUxrufWXPAeUkqOjg8RsvZpkZKN9VUeeeQRNjY3aXe6PPHu\n",
+       "x/ib/+PP8I9//p9SVSmlkNiORV6kHBwnDUPC9x2UskmyRDd2peTm7pHGBtV86rbJRsqi0KZGRUEU\n",
+       "x7TabXzfZ325r2eM5gkoSS4yHGlxZ/c2/X4H27IZHu7zkz/5kzz77LNEUcRoeEQY+gSexp/LPEVS\n",
+       "ISzBeDxsIJNFPq5pDBZF1iiCldAKTtuyqVRJluY19imbCq4otHLXcVzK0tbNdAVVWVMzgVIY62NZ\n",
+       "M5Nqy2RhfKJz5MJsTkNLTFM90EHKuYLPlO6L/YN5Gd9CqZI0y+n1unzw2ed4/oXzjMdjpLQYlWN8\n",
+       "32P31h1c2+Jf/9Iv8VN/6S9x9dp1VleWcF3NEtEQh0UY+Fi2w3g0Znl5hVdffZWtrS3abc3njiNt\n",
+       "mdtqd0mzHLvui0glcF2fLCvZ3t7h8PCQJNHDL+7evcfGxgZ37tzBXXVZWllmNBrhBQEqlURJzC/8\n",
+       "i89zcHDMyuoyBwd7dDo9ijLDlrqiWVpaYmfn/gF87mciamX23GVSCNGYe5VlCZkgK3JaNUVWWnOL\n",
+       "V2uhwSvrQG77LtS9C/PMLGoBGjhVzC1lLctqlKiGvTLvb9CobwPfPZGdO46DZdtUFc06eTN75n7H\n",
+       "OxbANSfbWfDyQEu5hWxoRaaBlOe173bT8NN2oOYCuLVQw7UthNDeI1WpqKqMfsvlo8+9n+/6yHNU\n",
+       "peLg8ICDoyPSrKgbPYqq0grE8Xjc/CkqDe9Y0tLUQgFCWKiqwBUK4WsRQEUBaFaKG3oaY66HtioB\n",
+       "WIoKDblIaSFtG8vg6VWlp25jgrjSJbvr3P+auR5lXmiGi1IgLf0ZQqGwyZQgy7Q3i+PqUr5UiqAX\n",
+       "EkcRlt9C2g6lUhyOJzqYCQeUIEo0H/fVi1e4uXubB7YfIE4ijkdjhG3T7/rc3dsnwWaWRvi+xvuz\n",
+       "PKGs5ja2rmM11q6+32nk5kIIoniGqoO5QiEdi3YY0Bv0UUpfi+FwiO1Y2Jb2cpklkVYcBh6T4ZgP\n",
+       "vO99/MCn/1uWl5ZJ4ykS2NpYYzKZIGqnwE7o14we1QhLzMIBmqBoRBSGrke96WZGfAEkWQwZOLZL\n",
+       "nmT1+DmPND05rNaytR8M9b2uygoqnUVrd0g9fX5OK9NBOAiChulRVVX9vEl9zdCL2bFt2q2WzvBq\n",
+       "jLcsS1SpvaxbYcg0Svjgcx/k1VcvIYRNmqTgCWzbZTiZsdTvo4TD//PFf8/HP/ZRLFsPhFCqYhJF\n",
+       "NWynmM3GeJ7D9Ws3WV/bwPcC7t07wHU8Do9HLK+usbKyRhxHNW+5RCkb23KxpEORV2RpwenNMwyH\n",
+       "QzbWt8jijEF3mdHRmMTT3vBZntPqDHjl4hvMopxur89wNKZCIURFp9uhSDXUcPr0adL0/pNpTDUi\n",
+       "JCg1H45uWRZZzZYxEAmA73lkNSVRqAW+fp43mD3UM3IXqInmZ4t4tgnoQLMJz2azhuNtzK/KN72P\n",
+       "4zgNF928L2jKs+sFTT/EQDZvG0ff9v/+ZzwWaW8aH/UJgrDxVjbkecOVNDuULkl8RqNx3ZX3USj2\n",
+       "9/dZ7rTrslTUWKOW4IZBgOtpXxCh2qwMWhSlzo4M7mX8DsyF8/2wYQ5MkhmT2ZQrV67y2sWLTKIZ\n",
+       "WJJW2EYJQVVBWc/irISF43pNs6usMwIHuUBZqnGyIicvMpCm8aJvx1v5H1RCOx8WeakN+anVY1Ig\n",
+       "1NxPQQibvFIIqb/bdJZgOz4tzydJEz1yzbVBgUCSFxWSimkcAZJZnPP8iy/RH/R48qn30Ov3Offw\n",
+       "w9zavc1v/P5XOX/+PHY0Y3VpmbRurDVd+lp1aNsWulwQNR9e1PYJdb9BirqBkzCZTPB9j263i20J\n",
+       "oqneXFRVIqqKJI6YTSb843/0jxgMBqTRMar29BYojg4PNRXNGAQpkOhyOS1OTlN6M05pgnuv19M2\n",
+       "u0KLOqgDaJ5nWJZNmtXe1R3tP1LkVbNJaD8S1TSzmlK7rn5tR1NPzSanMzLtsXN8fNwwJfR5zlWg\n",
+       "5hyNXYHjOBRZRpSmWlhkadqs7nUoVpaX6fd6XLl2A9f1Scmx7YJer8doMqXX7nBv/4g3rt7gyccf\n",
+       "w7ElZVmxtXUa39eDLVphyGQ8pV+Le7KiYDaN2Hr0NG9cucL/196Zxkp2XPf9V3XXXt4yb/bhUIsp\n",
+       "yrJkSaQo0atiy7Ei2fESBPCOwEgQJN8cIIBjy0AQ5Ivj2EicIHEMBIkMRXGU1ZZpx9BqRZAdSNbC\n",
+       "RbvEhBTFZYazvqW771K3Kh9OnXvvGw6Hjm1xRE8fkJh+/fp131tdderUOf/z/4cQ2Nzaoo2UqF3X\n",
+       "0VRtXwQGRFnLQ5mVPPHYE5w6dUpQXOWU8xfPM93YBJPx8P95hPe970NMpiWbmxv4DmazCUluuHDh\n",
+       "KU4eO8mJEyd58Ytf8qyR6IAEks20beV0riiiYoQ40XU+Rn1c29I/dphpmrJYLHqFL/E3Rd9DcC2a\n",
+       "REEK4y5L/cwx5LcoCtqm6n3OmKvn0mURzd7e3ub48ePP2pWtZm7k4Y2Esx8GCiAHfieE8DZjzA7w\n",
+       "X4AXA48CPxpCuBr/5m3A30JE/X4mhPC+67xv+N3//m8B21exlTNgf3+fyWTS37TubF3XRW5t4c5u\n",
+       "2yhCkNg+wst8R5qlVFWN8CUPquzxcwGJiNJs0g+8ojGcE06WNImipyZuNPLHdEiLbes6Hvvq4zz2\n",
+       "1cc5WK5YLpZUdUPTtjRtFx/LsSxNM0JQBrvQ5+RCQIqPnUw+4Rm2KO/Ff3z7bzzj+/jJv/N3yVOJ\n",
+       "zoMPKKrZB4EK+iARmsfQOXEIeSqSZkEjB/mDnr1NhA0yQQI1NcY7uq6B0B1qT267jjwvcIlQ4u7v\n",
+       "7bO7e1WigAYsOgAAIABJREFUxEwU0bMsI7UDi5/Jh3b0rpNGJ/ygN6gpAhC+ctA8Y+xm7BwWOHH0\n",
+       "KN/31rfgnaPMC7AdwYuGYZZlmETSG2mkZD26c0SkxFILiYkLpsFgYueuF7k5RR7pHDESRQt6SZzs\n",
+       "cPRd9QiCulphKOXrjGgbIp1wnCoxDSKMmLVbIsRUctpxTqGBtoev1XVDGr9b3Vg07aTBjhbU1Imk\n",
+       "kU8+TTNskkUJu4xf+ZV/xv5yRVW1bG1vM5kI50eWptLV6Vrufu2redkdL6GplhzdOUKRp7iuZT6b\n",
+       "ce7cebY3RO3oShSAyIqUz37uc3zzq78Z5xxPPPEEx48fBwxlXnLp8mU2NzaidqfhyPY2YNjf3+9h\n",
+       "eMYYTJ7w2GNP8NT5p/ngBz/E0eMnmEwn7O9fZVJmHDkyY7HYZz6bcPrEWe65554YicI33/09z1gT\n",
+       "n7//I7LR+fZQpO29nGv1dDWmoVDEmW74SqyWX5OrHiOmNPga4/R1g9UUivYB6HuPkTC6OWigY83Q\n",
+       "NCY+SfL1bTt0VOvnv/YNbyaE6/Pp3jACDyFUxpg3hRCWxpgU+ENjzHcCPwS8P4Twy8aYnwN+Hvh5\n",
+       "Y8wrgR8DXgncBnzAGPPyIIQlh2wymfWDoTepkld6vM3zHILl0sWnyIucNEtJkpaNjTl1vcLahCRL\n",
+       "hf0NMCHBJIasKCjsBFVeSVOp2vdbVRChVJvEhpbYtGMTQY80bROPQBOqSo/aGQFDdeBIspwzJ49x\n",
+       "9sxpmrYlYPv8m4u58raR1tpz587x9IULXHj6gshL5bl0bwIJgIEuNFgjatzGypH5emYNVNWSrCcA\n",
+       "Erw3gI2LM0QVeh8MIUaXWZZhs1wad6xEpxoh+BBYNTVBanQy6RLhg07yGRhhZCwVdoe0zW9uHWFr\n",
+       "a1uQDq6jbYRkbOWWEB2lWw0wvJ4cLDYQXVsQSmMXqywAUeWezeeYEGialvvu+z3auiJLErokMibG\n",
+       "MUmSJJJgCc3BZFriOheheylFUXL61EmKvODIzhHu+IY7KMuSK1dlvm1szGOEOKd1Nb6TQrHrhKPd\n",
+       "AKGDLMtpqprOdQRqjBFRDx952m0qqTSJ+GJwYGCzlJyoa1vSJCePEfZqWdFUNSEPZKkUCFVJSDcX\n",
+       "dRLaTaqRuixwR16IknvTNWAsKYaf+qmf5B3veCdN46TQv6rY2dmhiRzV21tbfOwTn+Spc0/xEz/+\n",
+       "I9TVitA58jThq199nCNHjhCMxySBNLNMZ0KM5lwtm7rSPk8Ecuh9S1lmVPWCo8e2uXDhIouloChm\n",
+       "8wkXLlzAB4F1FumULz38MA88+BDbR49RlCVXr15hNpswmWYcHCyYzUo2Nze5/eztUjg2aeT2eaZp\n",
+       "cbxxdR8Fa9qCEKTWFOeYnM4CVWybl1NSIimdWoQmxnhvdbDz2UzSGs5B/D685qjjHA7e42MzkDbr\n",
+       "aN1Fm4w0AjfGYBh4dDQH7lxHGmXhNP/9Z0ahhBBUlC2PPucK4sC/Kz7/DuB/IU78h4F3hRBa4FFj\n",
+       "zMPAvcBHr31fQyKQtnjk1qLftV1srvU9JCxNM/b29iSKijulJ1A3kb85WOxKothyMpEF4aQzTuhn\n",
+       "ZYGB6EYaa2WBWiL+OsJ2IqxnVVcR1kW/IAMZXdtIC3rMlfZQvAB5JpDCwqbMTh3jxWdO9Bws3nsu\n",
+       "XrzII488wrlz51mp8K+Z98UR/fd6tr0xoVpJoS20ArX0iICA805URRIRGvCtpG6K1OK7FqPV7A7q\n",
+       "yHme59KkNIlj7luJKBWZk9hMNrgkyEKy0LESB+cjpW8KttS0hDQvaTW+9nXvbEJMLyRm4I/oye07\n",
+       "Kc5mRR6Pk1l/OvBdR4tIqfkANhjaYPuIx1qDB9p+3AKrvQpl2GsuXSJJU86dv0DTtHg9xRnDkSNH\n",
+       "OH1KmBUPDvaZ5hM2NzeZTuUEOJtN2NzaYDqdMJtMaVuPtblALlHcMeRpRucc1bIe9A07JaMyrFZS\n",
+       "2ykLUUz3vsO1gi+eTmcYY4dOUNOvu36xa2pmHEWC4Pg752PhLsV1jqZuOLazzTd+453c/9BDwnpp\n",
+       "otqV90wmU3b398iznIcf+Qr//u3v4K1veTM7R7a5eOkis42tWGtpWK5WBOOpY+fiqVOnSBLLbDJj\n",
+       "f3+X1WpBmmZ458myhLYV0rJjx45y/vx5tvMdsixle+eI4Pmbht/77d/iS1/6MkePHcdYy8HBLuDZ\n",
+       "2tqgays25xsUZcqxnR1e+tI7+trFs+GhFwvBSudl3iOKNCL2IYjylh3mizrVcR9AX0uLUF8lchuf\n",
+       "HPoNIdIg6CahJ7S2bWli4KnR87hwqR24fUevE00DNXXurRv6PMYNZM9mz+nAjTA5fQq4A/j1EMJn\n",
+       "jTEnQwjn40vOAyfj4zMcdtaPI5H4M0x3JSCqm5heDUMB7lrIjNeBSSxnz55FeIKlkLNYLXsMdBIH\n",
+       "NslS9hcHdE4EBXDK52wEwhcCdSv41DQ6NmONSGpFUQhNq9g0xXSCUjeS4JUJYiI5k+/oOlkcUtjz\n",
+       "cYdPSNKEpl5B15JQ4F3H9nzKva+7SyZK4yARx57lGdPJlCtXpLHlX/7qM8fsjhed5sqVK6wWS1Yr\n",
+       "qXC3jTjJNM8oi5IueLxvSRPZmLS4kyQG57sYaQvTY2oEF24IgtrIE1Hyzg3WJtR1S/BQ5CK5Jrjg\n",
+       "BJuJYw8+9Dlt+Y4SYWWMTjs1yZAuiffQR5DdkEoJIZApjwwy5sTOgCzNJTURhE8dDElexkgrOjIr\n",
+       "fQRd5yXfbsEHS9dBVgjD3P6qocwKylKcN0G6cnd3H5YieS7SfCqEnKWppGmMZ2Njk8mkYHNzg+3t\n",
+       "LW47c4bbXnSGyXRCFnO/nYcsLyU9Y6SDL2DASColTVI6F6idbmqePDfU9eHmIde2mFG+V7DMvpfP\n",
+       "06akqqqwUU/T13JqoXO0bU2SWP7q97+F1WrJJz51PztHjwvkbyZScZPJhOVKmB53Fyv+22/9Dq98\n",
+       "xcs5dfIE3/iyl9E0FcZarly5wvHjxyR48YHJZEqaZLRNC8EwnUhh1UShZ3BkWUFVNSyWNUd2UroA\n",
+       "Jsn55Kce4rHHHmN3/4Djx49Tt8KUOJ9PKcsC1wiufzrZ5MW3385rX/MaXAdFMenFgq9nRSmbWhpP\n",
+       "NYc6Hhl4ZoiPnXMUkb9GA40kSWK9o2UymTCZTA515g61pUHGUKkR1DfNZjPmI5STIlDGNAjKdmiM\n",
+       "wZrQQwV1vRRFQesGWTU9hd3I/iQRuAfuMsZsAe81xrzpmt8HY8yNSqXX/d2/+vW3A3L8ves1r+Su\n",
+       "176qbxNeLpf9ZN2vhIb0ypUrbG5v9Q62bQVnubGx2Tc/tG3HfC6pmfl8hncdV6+oiKvIX+ED1liS\n",
+       "JCdJkyg6G6PG4MmyIjqS2KThO0KnajiiqakiuDphgvdUdUW9XGBTkbRKbEGSZVJZzoq+Qy1LMqp2\n",
+       "SQhQTApccFjr8Y3j6uqAjfkGTXN9Csnv/a7vILEJRZbjXEfrHMWk5JFHv8L/feRRLl+9wv7BPtVy\n",
+       "RfAOEJ3LvChYLiq6VvJz5UQccttW5GlCYqSZyRiLsa3wNHYdk8xgk4wQPGkSmE1K6moBIm8h6ao0\n",
+       "hVggdq2n7TqhyjVQ5tLaLwXJoTXbRAfqfSdKRQGC68jzRLq5YjcrKMQzgBfOGe89lRP0SJENGoME\n",
+       "sEkuTTjWoLGNjSe7IpWcdd10/YkgsZGjOy05OFiQlTlJXkCAqnMkkRXywtUDkoMF5y7t4tyjJOln\n",
+       "8K7CEDhz223cfdfdnDp5MnbAJuSZIJekEGrIkgRnXWR/sLFYHXrYoY6Jj2MhcUI81cRTSGqHHKtr\n",
+       "5CQjqAsvJ0aCcN6XBc7VpAR+5Ef+Oj50fOqBhyjLCft7LbP5nLoOlOWMg+WSNEnY3Njgw3/4vzlz\n",
+       "8jTGptx25jQGWFYti0WDMRJFlkXBaiU55Ukx42B3GVM7E7w3BJ8SvKWYlBw/foonnnqaxWLJe97/\n",
+       "XggiY7izc4yLl55mtjljmmekWcDQkOUZmzNpALrzZS/HNZ5Vo927z06rqgFB5wc9Sk1B6YY45oDJ\n",
+       "I30tMUrXKDmNr9cctiKUxtG0vr9wsGd9oOmcY3d3Nyp5DU1c3vs+hanSdEPNxVPEwnTfBZym2AQ+\n",
+       "/okH+PgnH/wToVBuWMR8xouN+YfACvjbwHeHEM4ZY04DHwohvMIY8/NxQv5SfP17gH8UQvjYNe8T\n",
+       "3vPu30RI6geKzs7LEVhpHYWhrIjV3Uza3uPN5rlEIdP5nP2DXUIIlIVUnPMYtRikmDQu+DnXxWhf\n",
+       "dm5No8hxyFMUeX8Ul0ngadrYdhuE40LSNYGmrWUzsCP+5jTFhbirB6WpzMhjq2xi0z4CrNsV2NBD\n",
+       "1nwQhsPEJtz7xh98xvh/5lMfJHQB34WolZgP+dckgcTQ+YBrG4xvyDNB6FR1y2OPP84TT56naVuq\n",
+       "umFxsCQY2Nna4czRY7SdFFq8BTB4A3v7e1ENqWF3f1c21koiRDmYGaHuNbYv5vm4qHwn95VnA1QU\n",
+       "JMohNkf4mAZT9AqGnstDH/cwLf20AB32UGSUWElb+c5z7TqvXYdlYIJTFsbgvdAbxFxmmqS4TDca\n",
+       "4eYxxuDqRjYpE3Ct6xEGqfHIcUbSTqJI34loSBEFGeJxez4tIQjVwtbWNvP5lNlsxomTJxAVnIFj\n",
+       "XKPEcRefbnzjln7nnGi7lgUhyIbqu4F4KxhDVTc4D7/7e7/Pgw89JMcxLJvbOyRpJrA/58jSTOia\n",
+       "r+6SGOFbOXnyKHd8wzfwile8QgiuHn6Y206dYrFYgBbY1AFF9NbTFyRd9eS5czzy6Fd49LGvihpU\n",
+       "lvYCzsE1YDpmGyWtq5hOcpqm4tTxU5w9c5Z77n49+JTQeTob+kgY4M5Xfesz1sQXH/qI3MNExlhP\n",
+       "gDpn9LGe+qy1kKcEH7A6H2HIaUfnnUdx8kbppu1h/nc9CY1JqopIAztO02itSdPD/SZhhhSZzk3x\n",
+       "gfTBq2LZv+WNP/inK2IaY44BLoRw1RgzAd4M/GPgPuCngX8a/313/JP7gP9kjPnnSOrkTuCPr/fe\n",
+       "EonEHGbMV9qkJEmG6vBkUpKl00gANHB+CIWnwnwOZGB8IE0MIlPWEjrJCyZJQhbTFHkuijyZTWhD\n",
+       "Qhs/R3ZsHzGtpt88+hb7JBUOjojmaJqGrnXkhfCFd52DzpPmGSYdlHy6LjCxA3/GtCgjxGnAHxvT\n",
+       "xZw/lOWUjdlcFsl1zDU1TdVSZBllllDmObVzNK4lGE+WlYIvxZOYgGtWJEnKtMi448Uv4uV33klR\n",
+       "TAgY2k5EbG0Af7CS00chAs/eeIKFqqmxqSErMmH/w5ClEx599FG+/OWHOXfuPFVds1hVBBKMEa4Z\n",
+       "Y8SxrxYrouJXPHKmZElCEkWEJbqJsmLSdkgwkFkL9nDHmxanEpuQR9X2mNEi0rXLQvGKuImRdlFi\n",
+       "rYy/Dfq6gDUJSWpIioSmrvHBsKrVcYJrOrAS2fWsdcCqEjKq3Iv4QpalEY0ios/4QNvBwUGFj6im\n",
+       "1bxgf2+X5WopdAnAxoZoVgphUmBzc4PJZCINVPM5J0+c4Mxtt7Fz5Ag7Ozvs7e2RJCmr1RLvRTfV\n",
+       "B+k36FyH9w4fN+HlckmSplzd2yPNcn7qJ34U7z2fevBBDJYrVy4ym2+R5xPyvMT7wJXLu5KTNZAk\n",
+       "GZ/73Bf5ymNP8Ad/8GGCDxw7usO33XsvJ06cYGM2Z7VY4B0sfMXjTzzBF7/4RR75ylfIilJSmrmU\n",
+       "zKazCU3bcf7CZVGrN4HJdMpiscv2zoyua3jR7WdJk4SX33knbdtS7S85cfwET+9e7BWVNO9/rSl0\n",
+       "r1L5NDMwbnadwG21qKgTpY0plgRFpUVfpLnziFIqy5J5vnFoDuprNUXSNE2fTlmtVr3z1tqY8sho\n",
+       "2kUdP6E7pL0pcNY2wp9Nz+L5bPfd++jngBG+GilS2vj/O0MIvxJhhP8VeBHPhBH+AgIjdMDfCyG8\n",
+       "9zrvG37/t3+jHzyIEawd+HjHFz7mTdZdUJENuruNcZ2aT9IdUB2yFjc0atMUyPXyTDoBdFcc58v6\n",
+       "QtVoRx7nc68F/+sXb2KOHejTMJ0bBInHkKR7vuMtz7imBz76gT4CU129QHdIVFWKLHmEq7m+lpDn\n",
+       "OU30plr1NsawWC7Iy3xIU+g4xuOhWugiYU85sK7pv3rvTSe0rJcvX6aqKpaNpHlc23JwcIDrOnav\n",
+       "XuVgsej5nauqEmFoW8Zx0/SCx9i0XwSa3/Yh0NWxOcOKmIe+RvUp9bo67/G4/nvr/FB0SpKk1zCU\n",
+       "G4TQDa+99vvVcde/z5Okl+AySRJx3kJqJvBD+npJaiTCk4ULrnNMJ1PquiKECFUNOq/Bd8LbLvPU\n",
+       "YAMcO3qUM6dOc2xnh9lsJnn11Eoj1NYWR49sybXHk0vb1mR5BgZa1+JNykf+6GN8+A//iEW1Islz\n",
+       "kqwgL6VjOcukBkAI4IX7XlMQNsoTXg9PHYInzeJJNxjapqOczMjyjKapwHQ0TcXm5oy2rdiaC6Sz\n",
+       "LEt85zlz+jR3vuxl3H72rAQ6TTs6gQwCFiEEXvW6Q9lbAD79iQ/GOSvzsCwlSFLMvfehryP0RGKT\n",
+       "vH88dH4j0QND+soY0/OV6Pc/rvnoOtcNvg2DdqpG3vpeY14Ta21f89HPUXphY0x//5q6ef23f/+f\n",
+       "LgIPIXwaeN11nr8MfO+z/M0vAr94o/fVix6rnFgrudqhNVYmy+XLl9nY2Oid+LUDr1+uOmrducbk\n",
+       "+sulAGmkCj/pd1GFLSrWVojwh4JHHyXbgXZUaSN1Q9DP0N14uVz2jUjq7PUaVG1Dd+v95VKEjuMR\n",
+       "dFy0vZ4pXlUn48bGRt8Fqbk6KW5ZibTDoPeoY6XXphvUbDYjmIBvHT5+JyEEFgcHfc4uz3PyLO8n\n",
+       "YTfaXNURr1YrbJqwsbnJ8ePHY0ST9/nIxUJa+zciVlhhccp69+gTT1FXFefPP83jTzzO7t4urm0i\n",
+       "U5ucgIxNKJIEXwjfubUBEAIjQ0qWiFyXa53UGPKM1osQtqAGhJzKAM41FGlClhjyvGBvb1cKjla4\n",
+       "3L1GUnG8k1Rk8HSeYWzfTSybg8dF3Upj0n5TTpKE4LTRI/KgBMOqqsnzkq5zUkL2gTTNSRJDyPKo\n",
+       "BIXg5ruOvf0lTz75IGWeRWrfJvKqREWoxDKbTjl2dEcoCU4d5+Spk+zs7JBPcgwJb33r9/Hyb3oV\n",
+       "//rXfo2mavDecPXKHrP5BvO50BQkxoozjgyWWZy/+/v7HDlyBPwgJaaanZ2v6TqtDQXapiJ4x2q1\n",
+       "pCgzygidzfMZu1cv9nTKx48d53V3383OkSPCpljKnN3e2qKuapLsucmc+jRavN6xIEddN31znDJN\n",
+       "6tpWBkBtRtK+kh6CGH2UBka6zlXnUmmB7SgQbJ3rifi6VGpBWaTGSIyhiIRceZHTua4X94aB4nbc\n",
+       "FKTXeiP7/8qB/3mZMSa8977/cMhZJ0lcPCPoFAxVeH2dLqCxkv2Y1a2HrY1er1H3+AjjYp5J30ML\n",
+       "HRrV688hhNgOfZhm9FpKUv0bVa221rJYLPovWBuWdCOS475ElXoNGuV3Xce3fvcPPGPcHvjoB4Bh\n",
+       "EzPGgPH9Y3XUwQecG04qep15jJ51UchmNqSwFKrmnIhkjNt5lSlNTwia+9QIQ78Pjxzrq6pikk8k\n",
+       "pRHvryjyWLSU3EdTN/142xGda55ntK2TfL6TnPtyueTqlas452lDx5XLV6RLzYg2adO2BCMbyWq1\n",
+       "kg16b4+9g4rpZMJqVcUClusjqSIvek4day2rCL3UzU2/17HCuJ5ecKG/t1gcGf4uPrbGCMS0c1J3\n",
+       "UfR6YNS4NWgiFoVwi7hOajBSX5H6QmolOrZW8P0+jrWJqUQ6ke6zsWCMhaZtMIllZ+cIs8kM4wNn\n",
+       "zt7O8RMn+P33vofzT18gzXO5fGvJEmmmy/JYzI1zTdOQ+l0rJ4ykKiDgoqCyAT+kIdq2Is8Tlqt9\n",
+       "vHdMJiVHt2Zsbm7y8jvv5JWvehUmyH22TUOeZRzsH/QINZsOsL+u63j169/8jDXx6U+8/xA8UAOK\n",
+       "IkI2lRd9DM0NSDdk50X0HIRwzMTx1s07jwVKddxd1/XSa7re9Nq0cWh8yrdm4ECxo/Xku446Bmvj\n",
+       "rlBtaNQNRP3Z3d/ylj9dBP61tOVySdd1vQhBFVMCGplpC6keRw4ODnoV6cVi0SufjLGaqiVnre13\n",
+       "4TFESyNdoG9fHjt3zZWtVqu+xXk2m2HtwJfRV63T9NBmoV/omLBJu7Xquu6J8/M87++5i4gWnVy6\n",
+       "GT0bDlxzbGOdvLwYRHT7jc+Ynnt6f184zbe2tqgamdzqkITIaEVZFn0UoZuhOnSdTFpY1o1VMKuD\n",
+       "gGsVtQO9cxgM8/mcbtVQFnl/ymmdE9Yn3bQtZImlamuM6cgt1HXFal+k7bI8h64Db8ht4MSxbWbT\n",
+       "WWzesriu6zlEtFt3/2AfFz9vUpY89eRFiqJguVriWkfd1IQAFy9eZH9/j6fPP83+/p445o0dDFKA\n",
+       "JQTSJBFkSjKXnDtSBG/bFptIbYDYbSn9BLZ3wILHj/C6xJJmwwnSWkteFv0mMS5K2jwnp5ACfKyv\n",
+       "+M7RdILCCa2Mn6g9BbwL1M6BD5KP74SStms70rwEA+cu7eLby8zLkiefvhRJkwzb2ztcvXp1SEHa\n",
+       "QLVc4YGkLA81tUgRdoo1BmftQL1rDNbGlFeQFNKqWpIYaF1N03jSRBq7sgTuuftu7rrrLgmaYlC0\n",
+       "WCyYllLkVMRYYizZRFODgaq6PsGbpKsGeDAwSkMEqqruGQE1yCgLWX+uc3FTFCRZQL5zvMe1LcEP\n",
+       "HDVjLngNDseyacaY/vo1764nbXXMWZbJyckOKk0qsafjrKkXubevY1V6ZVYb72bdqFilDklJ1Ofz\n",
+       "OWmasre318saaQ5Knc7Vq1ex1jKfzw85NF0kY5zohQsXeseWjHZHFRrQKNo5J3Jp0MOG+q4uhtyo\n",
+       "Dv5qtWI2m6EcGypArPe0Wq16jheFiPU/h4FP4Xr2bW96JjJlbWt7Idm/+fU/3/fT9TVOJGhgZ21C\n",
+       "nof+1Dnk701P1Tqfz2OrvIAc1DTwU1+iQZnCB1UQWaNway1FlmOCnAhb1+Gatm+H995TLVe9OHIW\n",
+       "WUG1nqYpWT3p3qg+N7ab5sA117RcLiNRVcTORlNHOZvN+qOItZb5bDa0FANNjG4Ta4XQKMKbuq4j\n",
+       "TZJ+F9X29KZt2NvbF7hXzId77/svVHdA5xzL5VIihUzyjrPZLKIABpXzPM9oGsGIz2YzNjc2Ygoi\n",
+       "jeiXwGw6o6oEEVLkOcTqt4uq20mWkubSzRdMjAzWtra1PaepOEKSqNMeI5cGB6s1N4mYh4LkuBtT\n",
+       "T+q6KYBobsoJS/R4pZGnIUkUciiIJ+c6vPM4Z0ZpFGkerOuKPBdIcpJI/ca1NdWq62lAmqaRGkQq\n",
+       "sOcOHzeCr1NBh7HKtNBxpn0BoiiKPgKuIrPY+KiuRxdFUmTpoBC+t7cnzjgeU5xzHN3ZOYRUOXb0\n",
+       "GCBHlOVy2Ufqe3t7PZHWODcakKaTxeKg16oD+t1bj1Dnzp1jPtsQXHAY8LxK7B5C6PHSmoPDSEt0\n",
+       "kgjHBwxV8LWtbW03Nj3dggpqdCMAQ9Kf0lX/Uxy0II806pX3OFw81E0gy5JDiJA0TSiKnLquyfMM\n",
+       "yPruTO0YlRM5vQNXkrI0FXZU6RdJSJJhAxHaj5ayHGoPLhZEb2Q3T5En7oxVVXH58mXSSKqvTlnp\n",
+       "GrsIhdOEvxYUdLfUL8hay6VLl3p+ZS1AFkXRR9J6VBkXLZXe8ciRI33eXPPXA/xsgAfNZrNDOeiB\n",
+       "kyOmcpynyIt+MmmOfVyYFYy5i9zHg8wX0LOfrW1ta3tuS9PsULOTrjXJUx9Ooeq6FbqBQcwYBlTb\n",
+       "GOU29heaPlH0SgihF15RdFpqD3Ot9A2K0UmPU7gmDHh1rS/pRiD3JcV9Rak86/3/+Q/pn8x0YHei\n",
+       "nl5d14JMaJrRUUfag4ui4MKFC4QgCtd69OnTLzH1sbOzQ1VV1HXNqVOnWC6XrFarPlWiOWzNO3Vd\n",
+       "1xc+VZG66zopxnltz29JRsUR/Tv9vfIbtG3LdDpleTAQ2uiXM0afKD5dmP1EFUR3eC10Durya1vb\n",
+       "2m5kuobVhyjwQPDaAg/Uda+BkwlQr0TTsm7aCBNMaKqmr5V519F2DWme9ClTLeqOi/kwIhfLIqgi\n",
+       "ClZXq6p/v851dG5oBFIfpoGoNC5O+tqZBoTPZTc1Ak+ShIODgz5lEBjQHYou8RG6s7293d+UKmVn\n",
+       "Ixiboi40BbK/vy/dj9EZA30hUYuNQJRoaw4NnDLl9YPLQISjiJfxJhNihX65XFJOJhDoo+i+Cyya\n",
+       "RgtpmrKqKlxdM5/PefDTn+Oeu1/T1wbu/9gH+pxeXdeURYHhcAONyHvR5/hAK/EGwoBhJY6rqtOP\n",
+       "j5oQyKylbupIKyCF27woqKpKFoP3Mjmdo0yyvhMty/OoNDT8nUYYNkmEewYlt6Ifj6pa9ZulKs+Y\n",
+       "oMRXArYzFj7+yYd43V2v7GsCPUontrR774V/IiJvNJpyneslslaLZX+s1fdQJIEuat1sXecJwZMX\n",
+       "BdYY2lZk6rz3VCupYZRRLLhrZH6Uk5JqVfXdeABtN6T8rBUKXSlUpz3kMc9zadu3pk+5YaSW0jcy\n",
+       "IRGaIURCpIoHH/o8b7jntYJDTiyNa7DGMinLiHtOpCPYGpL4/axib8Asn8hrTJSISxLKooiKUcKp\n",
+       "0gXBvDvX9WNWFiVZRMqM4XGaZ/Y+RB7+ijQTCb9ghB9dT7pVVdHUNUnk1Ff2vTGpHUBRloQgY1mt\n",
+       "Bl4SPXn3zjKuTecsi8WKj3/yfr7lDXfTNE3fyaxc/CAILu178DGvrGtY/IeM3/hELJQEvo+4q6rq\n",
+       "r0X9w7hRT1Ou+jrttxhDD4c1OpimS8at9eOi6Y3s5qnSx0i3rmuWy6XID42ctTGGg4MDyhEiQyE9\n",
+       "xpg+qlZnpovRWnsIvqfFCuAQ9FAVf9R5K5JFO6108PRvdTGqCrY+p85QP7upa1nofXV8OEppLk5h\n",
+       "SXkxwLTlAAAI6UlEQVSek3iZBH/88ft5wz139c0Fy+VSmmxCYDabsYyY8rEIQZqKGHLfeRidcmIT\n",
+       "0jQ/jA0PwvsxLtyE6DydFzIoTUcpvKksS3Z3d+WU4qUtOViRt1ssFmwmCU1sttnc3DzUQOW9p4gC\n",
+       "szYRyJhzTuhriwmhC9hg0Rq7D77nyfZRXegTn7yfe1//6kNH4Wq1IMsn1DGnqVGRnsS8gcSOAoJC\n",
+       "FmkXcdWegAuyuJq6oekck7IU6tQItV1Vckw21nKwWLJcLvsT3aXLV9jY2KDIpBnD1Q6V97PxCF3m\n",
+       "pZBoIYuzrWrh4zGW1CSUce5KcBC7i51oa6ZZhvcOFyW31ImmNmValnzm81/me9/0xp72OFiBhaZJ\n",
+       "wmQyjdd8QJrE1u44T+k8TZxDRSHcM8oyuVwu2N8/IC1KkjShnE5xXUueCNugJbC/txs3FYgsI/of\n",
+       "xsqmbEygc8pZ469xkIVAM83gOLUdXdeaJ1DVVR84+G6A6I07ofU5heoZY3jo01/gL7/pjb3DBKJu\n",
+       "6bD2VquVnPDT5NBmAESo75Ba0YANe7gHYuxPdB3qdTz55JOcOHGi9zNFUbC3t9cHC+pjdPPSDUDf\n",
+       "d9wcqA7/udKpN82BG2NYLBYURcHW1lY/CYv5nCbmmdJE+Ep6HGaa9oxfV65cYXNzs4+GNQVT1bUo\n",
+       "TEcstLWWVSWF0aKc9lSRAREHDiFIVOU9W1tH4kIzffRrTSdyaZ0IGaxWdXSCoc+RqTOdTqfYTPhY\n",
+       "Glf3jjvPcxZLKYoWE8G4ewKta3uayTaS7adpyqVLF8jznIODochio/KQ0sdqfr+Jx0YDTEcaos51\n",
+       "vUYkcKjFWCOGEOQaiiJHlWoC0ETcqqBsBItrMGRFSdt27B4sKMoJBzHyLCczFsuqH3PFjwea/p4A\n",
+       "bHQ2s3RGMFKB1wWZkOG8p6qldTkI3yFJKimnPDaHFEgXpTpu3SDVEYwXhJ4IjDH41pHHOkpmE+rl\n",
+       "Sr6zosRg2JzNn9HcZYwhm5ZszAq6dkWZW4psBnh8sCSZdGcqdn/c/OObgZQ/mUSRDSOR/6qVrtTU\n",
+       "ZgNKIo+RX2pxbSApZW5qe3awEKylcY5lREClNsUEQ5oWET3VkWWW6XRO13XMY7EuhIAzjrSMp9u4\n",
+       "/vYaoRIoN2aUG7M+xwswm86pqorFYsFysc98Pqd1VR9FpmnKweJAggwva3cMrQ0hCmrH/oDOeyZR\n",
+       "61W/F00VysY3+IUy6rmGbMTSSBRMifJ1485sDd60Bqbv07aL/u8FX27x3lG3owBGg5zQUUzyQ1Fy\n",
+       "MB6RZUx6HyRBWC38+77DE+giH9Hp06eBIRugVA263jRgHGcFxigY9QPjefRcgIabmkI51LUUj5zq\n",
+       "jPULriMuu2cnTIR0RtnDNPpVBz4ueGq060fdgPq8Oii9BqCPVsbdimmaktqBUF6du/6N5ql0Msi9\n",
+       "JaRpFI7whyMAN9qQ9EvURaPHP5X0Uny6pG4KQpCUj/Kg6L3rlz3uRtXgQusD2hh1+fJldnZ2sFa6\n",
+       "Gzc3t3BuSCNo5KBHSL12vY4QzKHvTsmTNE2hJyr97HEE1XW+b6lPUtOnRMbt/poz1HvslYP8wEsy\n",
+       "m036fKY6bL32wzAy34/HtS3ZWmsZY2214UKPy8YYTJQt0/vpP8umhzZDvfbx0Vr/pnZNhJAlfcHL\n",
+       "WkvX+j5iVLyw1nB0zqtp2nBc8NZ5p+tJf9a5NV5X13Yj61zVQEWP8fp6bfba3NzsP1dPtuNioX6m\n",
+       "jvUYLaZObNzNqn0OepKU9STcMXoP+l663sZpQ4XujhVvuq7rkVz6fWvqUeegUlmM0Wvj9amp0TGP\n",
+       "t5y23SHuI4jc4wyduSCw5y624+t4HfrdNVH7OO05dtjjNaPvcyO7aa30z/uHrm1ta1vbC9TCs7TS\n",
+       "3xQHvra1rW1ta/uz2437NNe2trWtbW1ft7Z24Gtb29rW9gK1592BG2Peaoz5gjHmy8aYn3u+P/9m\n",
+       "mTHm7caY88aYT4+e2zHGvN8Y8yVjzPuMMduj370tjtEXjDF/5eZc9dfWjDG3G2M+ZIz5rDHmM8aY\n",
+       "n4nP37LjYowpjTEfM8Y8YIz5nDHmn8Tnb9kxUTPGJMaY+40xvxt/vuXH5FAzytf6fyABHgZeAmTA\n",
+       "A8A3PZ/XcLP+B94I3A18evTcLwP/ID7+OeCX4uNXxrHJ4lg9DNibfQ9fgzE5BdwVH8+BLwLftB4X\n",
+       "pvHfFPgo8J23+pjEe/37wG8C98Wfb/kxeb4j8HuBh0MIj4YQWuA/Az/8PF/DTbEQwkeAK9c8/UOI\n",
+       "ZB3x378WH/8w8K4QQhtCeBSZgPc+H9f5fFoI4VwI4YH4+AD4PKKlequPixJgiLCkzJtbekyMMWeB\n",
+       "7wf+HUrneYuPCTz/KZTbgK+Ofn48Pner2skQwvn4+DxwMj4+g4yN2l/4cTLGvAQ5oXyMW3xcjDHW\n",
+       "GPMAcu8fCiF8llt8TIBfBX4WGCsc3Opj8rw78DVm8VksyNnvRuPzF3bsjDFz4H8gItj749/diuMS\n",
+       "QvAhhLuAs8BfMsa86Zrf31JjYoz5AeDpEML9DNH3IbvVxkTt+XbgTwC3j36+ncM75a1m540xpwCM\n",
+       "MaeBp+Pz147T2fjcXzgzxmSI835nCOHd8elbflwAQgi7wP8E7uHWHpNvB37IGPMI8C7ge4wx7+TW\n",
+       "HhPg+XfgnwDuNMa8xBiTAz8G3Pc8X8PXk90H/HR8/NPAu0fP/7gxJjfGvBS4E/jjm3B9X1Mz0kv8\n",
+       "74HPhRD+xehXt+y4GGOOKZrCGDMB3gzczy08JiGEXwgh3B5CeCnw48AfhBD+BrfwmPR2EyrJ34eg\n",
+       "DR4G3nazq7jP432/C3gSaJA6wN8EdoAPAF8C3gdsj17/C3GMvgC85WZf/9doTL4TyWk+gDip+4G3\n",
+       "3srjArwa+FQck4eAn43P37Jjcs34fBcDCuWWH5N1K/3a1ra2tb1Abd2Juba1rW1tL1BbO/C1rW1t\n",
+       "a3uB2tqBr21ta1vbC9TWDnxta1vb2l6gtnbga1vb2tb2ArW1A1/b2ta2theorR342ta2trW9QG3t\n",
+       "wNe2trWt7QVq/w/Uvjt8hhUJzgAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x116bcb210>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "# Find, print, and display the top detections: person and bicycle.\n",
+    "i = predictions_df['person'].argmax()\n",
+    "j = predictions_df['bicycle'].argmax()\n",
+    "\n",
+    "# Show top predictions for top detection.\n",
+    "f = pd.Series(df['prediction'].iloc[i], index=labels_df['name'])\n",
+    "print('Top detection:')\n",
+    "print(f.order(ascending=False)[:5])\n",
+    "print('')\n",
+    "\n",
+    "# Show top predictions for second-best detection.\n",
+    "f = pd.Series(df['prediction'].iloc[j], index=labels_df['name'])\n",
+    "print('Second-best detection:')\n",
+    "print(f.order(ascending=False)[:5])\n",
+    "\n",
+    "# Show top detection in red, second-best top detection in blue.\n",
+    "im = plt.imread('images/fish-bike.jpg')\n",
+    "plt.imshow(im)\n",
+    "currentAxis = plt.gca()\n",
+    "\n",
+    "det = df.iloc[i]\n",
+    "coords = (det['xmin'], det['ymin']), det['xmax'] - det['xmin'], det['ymax'] - det['ymin']\n",
+    "currentAxis.add_patch(plt.Rectangle(*coords, fill=False, edgecolor='r', linewidth=5))\n",
+    "\n",
+    "det = df.iloc[j]\n",
+    "coords = (det['xmin'], det['ymin']), det['xmax'] - det['xmin'], det['ymax'] - det['ymin']\n",
+    "currentAxis.add_patch(plt.Rectangle(*coords, fill=False, edgecolor='b', linewidth=5))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "That's cool. Let's take all 'bicycle' detections and NMS them to get rid of overlapping windows."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "def nms_detections(dets, overlap=0.3):\n",
+    "    \"\"\"\n",
+    "    Non-maximum suppression: Greedily select high-scoring detections and\n",
+    "    skip detections that are significantly covered by a previously\n",
+    "    selected detection.\n",
+    "\n",
+    "    This version is translated from Matlab code by Tomasz Malisiewicz,\n",
+    "    who sped up Pedro Felzenszwalb's code.\n",
+    "\n",
+    "    Parameters\n",
+    "    ----------\n",
+    "    dets: ndarray\n",
+    "        each row is ['xmin', 'ymin', 'xmax', 'ymax', 'score']\n",
+    "    overlap: float\n",
+    "        minimum overlap ratio (0.3 default)\n",
+    "\n",
+    "    Output\n",
+    "    ------\n",
+    "    dets: ndarray\n",
+    "        remaining after suppression.\n",
+    "    \"\"\"\n",
+    "    x1 = dets[:, 0]\n",
+    "    y1 = dets[:, 1]\n",
+    "    x2 = dets[:, 2]\n",
+    "    y2 = dets[:, 3]\n",
+    "    ind = np.argsort(dets[:, 4])\n",
+    "\n",
+    "    w = x2 - x1\n",
+    "    h = y2 - y1\n",
+    "    area = (w * h).astype(float)\n",
+    "\n",
+    "    pick = []\n",
+    "    while len(ind) > 0:\n",
+    "        i = ind[-1]\n",
+    "        pick.append(i)\n",
+    "        ind = ind[:-1]\n",
+    "\n",
+    "        xx1 = np.maximum(x1[i], x1[ind])\n",
+    "        yy1 = np.maximum(y1[i], y1[ind])\n",
+    "        xx2 = np.minimum(x2[i], x2[ind])\n",
+    "        yy2 = np.minimum(y2[i], y2[ind])\n",
+    "\n",
+    "        w = np.maximum(0., xx2 - xx1)\n",
+    "        h = np.maximum(0., yy2 - yy1)\n",
+    "\n",
+    "        wh = w * h\n",
+    "        o = wh / (area[i] + area[ind] - wh)\n",
+    "\n",
+    "        ind = ind[np.nonzero(o <= overlap)[0]]\n",
+    "\n",
+    "    return dets[pick, :]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "scores = predictions_df['bicycle']\n",
+    "windows = df[['xmin', 'ymin', 'xmax', 'ymax']].values\n",
+    "dets = np.hstack((windows, scores[:, np.newaxis]))\n",
+    "nms_dets = nms_detections(dets)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Show top 3 NMS'd detections for 'bicycle' in the image and note the gap between the top scoring box (red) and the remaining boxes."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "scores: [ 0.86610985 -0.70051557 -1.34796357]\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAXAAAAEACAYAAACqOy3+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvUmMZll23/e7wxu+KeaInKuys+aq7ibdraZE0oIgU4Qt\n",
+       "mrAsGISgrTfaWAa88tYbwzagnQEbhGUv5I1XNiBKIE3SNCi2SDfZbLC72TVmVWVV5RQZ8ze+9+7k\n",
+       "xb3vfV9ERTYJg8Vim3G6oyLjG95w371n+J9z/leEELiSK7mSK7mSnzyRX/YFXMmVXMmVXMn/N7lS\n",
+       "4FdyJVdyJT+hcqXAr+RKruRKfkLlSoFfyZVcyZX8hMqVAr+SK7mSK/kJlSsFfiVXciVX8hMqX4gC\n",
+       "F0L8B0KId4UQHwgh/ssv4hxXciVXciV/3UX8RdeBCyEU8B7w94BHwB8B/ziE8M5f6Imu5Equ5Er+\n",
+       "mssX4YH/DHA/hPAghGCA/w34B1/Aea7kSq7kSv5ayxehwG8Bn638/TC9diVXciVXciV/gfJFKPCr\n",
+       "3vwruZIruZK/BNFfwDEfAXdW/r5D9MI7EUJcKfkruZIruZI/p4QQxGWvfxEK/LvAK0KIu8Bj4B8B\n",
+       "//jih/7Hf/bfggSVZcyrikdPnlA1BqUUeZ7T6/WAQFUvCAGqRY1znqIokVKxWCwwxqCUJM8zpJLk\n",
+       "eY/JZMJgMCDPc6qqQmmFdY6madBZRqY1i6YGZ5FSopRCKQUhEEKgV5TUdY3wASEEzjmEkvgQQICz\n",
+       "Dq1zAIQQCCEIIcRjALmSeGfj4OqsC0eklHjvybIMIQXBe4QQIOJx/q/f+V3+3r/3dxECrHVIKRBC\n",
+       "xs8AAoESCqTAOocPHqk1TV2TKY2UEnxABghCYhBAIFMSrTVSSAIBaxs8AR8CQgrKskeeZWgh0SpD\n",
+       "BIltDMYYQhoTGywx2R0AEe+FgAeyLCPPc0IAvIcQEGlsnIcsywGBtRZjDQhJurR4zQhc8GkixnsN\n",
+       "aVz/9a//K37pl/4jfBpfIcD7gBQC7z0hBKSMYySlRErZXTNACAGfxtl7j0vXd/Fz7bP0nm5OAHjn\n",
+       "gYAQ8tx5AHyw3TnaedDK6rGFEBhr8d6nawYpl9fbXl97TS5ojAsoKVFa4J1DIAjBgwj81m/8Gr/8\n",
+       "y/8QZy0ej1DxOMa45XF8nLfe++48SkLwFmstQUAgzgchZDfyivg8AoEgQoqlJVIrpFRUVYVWOVIr\n",
+       "AhJrLc5aJPF6pVLdsxdiOQbtfTrnCFK0g3Ru7KUQCC7oKOGWY+o93otuXXTjHg/Av/y1/4N//+//\n",
+       "xyBCuhufnj/49Dv4eEsu+Hj4ACF4ggvd2Pm0YoUUSCXjnI53EZ97+lspjVi5LyEEjTXx20EQ4hXE\n",
+       "9eMDIRDHdWWOrP5ezkH/udf/h//6v+B58heuwEMIVgjxnwH/J6CA//myCpStQZ/K1KhcMxxssLu7\n",
+       "jbGWyWzG4fERk8kp0+kUX5uktKOC0QIIgVwrtJL4EB+yc475fIrzlsnkjI3NTcp+yXQ6xTmXFoin\n",
+       "MQ0iBIwzqKBQSiJFmqsBjGnw3qGlREpBQNC4Bh88eV6i8wwhAsZYsizDWo9SCmPj4pBljkiDb61B\n",
+       "yPQwhEZIcL6JikEAQUAyFN47nGvivBYB50WnGIQQSBTBC/CiW2zCBwZlD+8dwQeEiMrBA5lSZEpF\n",
+       "peEcQfqkfAJKy/hecHhvsLXF+GgkhBc469FKI4TEBx8Ng5IordE6Q+k4bYIQKK06heWtwzvXjaX3\n",
+       "jslkTAiBPC/IsyzeawCSEnPO4W3oFrn3HoQApYBACBZCWsDEBRSIikLKtICDx1mPF1EvRLsYFZH1\n",
+       "SbEJiVJL4+Gdj0qK5WcRYJ2hMXU3T1uF7tvnkRwhmQz36o9Yea0zvEKAUEilkApWdZT3Aech4BE+\n",
+       "jaFQacFbQlB471BSoKUgyzQQMM0CKQRaK1yweAd5pgGBc5a8KPFe4l1IBkMiggMpycoyKl7vEVLh\n",
+       "fKCNh0NwyUYLgmiNnE8Kz6K1xnuLaywIgZKSrMiQHqy10TiqeM3WRgOHgGCT0VKqU+CdkUsPTCQD\n",
+       "uaq4pHfd84mTPhrk4C2tW0MyQEqAVunTAjwCgsCrAEgIAudahUp0jkLAewFy5dl5n66F6Ih5nxyp\n",
+       "+BmtZVTG3qbjgkzrLlOKQOs4hLTEo3HyROPtfbxnJ2SyYfGzcc7E60pKgD8PGv1FeOCEEH4d+PUf\n",
+       "95m6msfFby0n4zPKfg+pFLvbG9y+eY26NkwmE9aKPkIIqqbm9PSM09Mz5tUCLTzWOwgBlZScRzKb\n",
+       "VWR5znh8inOOXq+HFIE8U9R1jXOOoijJe/2oPJzDBsi0Tp6GpF+WZFmWjMKcXpE88CBomgaIltJa\n",
+       "QVmWFHmPEAKz2YwAZHmGtZa6adBaYb0lS9kGKUAmr0kKTZeGEBKPwHvLijPXiULj5YqVFtGD8MF1\n",
+       "3pZKyiYg8I3Hy6TwnEcIyIosTdLoYYUQMN5Qu4AEpFBxUeoszh+RVKXKOk+uqmuoa4x3nedRFAVa\n",
+       "6+iNZwW5ztBas6hmrK+vo7XGGMNsNmM2m2OtRaksRiM6RkCmqc97xdYQgkOKgA8e70Ln0QQfjWY0\n",
+       "aq7zxtuf6K3HceoN+934hBCiBxoCWuu4ELuFI/AOtNadNxY/E2ia5nNe9mQyic8lRXDtOaOSW3r9\n",
+       "Ukpqmxb0ikGGGKEJ6c/dtxAy6qTgECKQaYUUILzl4OAZjx8/5Pf/7b/h5s0bbO/uMFpboygKpBTU\n",
+       "dcOgP6CqaqRQ0Zh72dkMqVT0HIVGeAdSUGhN8B68wFsXo4A2WknKsFVKxphuHsTo1OKcBR8jLGHj\n",
+       "2LfPqY16hIjKW0pHbSxKyG6+SJW8WA9KRuXVRbZ+DilSaM8r2+WSooWlx+qx1qRxXEY1UQ1KAjEq\n",
+       "gKiEpUjveYFk+Uyc9931uwA6qGX05AM+iKR4o/OVTh7XArJTu0KJ5GaQIt72PqMXHkQ8RAgCmSLL\n",
+       "EAJShPR6OL8eniNfiAL/84juFTTWsr4+ohwNUFphGoutGprG0dQVVDWT+YyiKFFac/v6Li/cuoZ1\n",
+       "nsl0ymyxwHlHVVVUdcOiabi+s4VLizrP+5TJe7fOMiwynHdkOkNlefpehXM2DqADBzQLn6Ccgl6v\n",
+       "QOaSvMjJ8yJO5MaxWFQYY1MoFRgNRmxubkTvXWeYpub07Iw8z6ibupuoIcSw13qPtwbnAlJIXnjx\n",
+       "BYz30WK3UEIIaKXwIeC8Q1hQOs3gAApBZU1UGnhUJqnt8ppEWsRCaYSApmlQmcY0hpAMibU2Kgml\n",
+       "Y9gXAkY4CA4pNIE0YZ1DoJb2JhChq+Q5W2tS9BJhoqhoHUrNl55VEJS9cmVBOLARpvDBpkWXJr33\n",
+       "3Lt7l6apusXVTmatMiDgnIEQw13VQRu+g3uECEynkw5q0Vqj1NKzPB/GClxc5tHjt4Ha1kuITEZY\n",
+       "pYVQ1tbXY6De3kua103T4LxHrih7v6JsQjqHEAJjms95ncZbkDJ6mSHQ1AtMXdHUCyaTMWvDHtPp\n",
+       "EQ8+OuPhw5KiLPEhMBwOCSGws7PH3rVrZFmJVjnON2hVRO9TKKxx+AAhQXLGtlBJQEqNALQKqBbq\n",
+       "IHRoh7XReWkNnpIRInTekymdIs2kwuLjRmqJTGNnvU9RRHxOdV11YyqEwEmHFhIhZVSqOl8qMqHi\n",
+       "z4rXHkJI8zPw6mtvpiikNfTRuSCIFOn46P0CQsrOyW2dPykSNOksJkVtSikEIjoaLQznXIqUlvBZ\n",
+       "jHxcjGbS33LF+IkQISYPKB2/55CdYYxzUHZGr4WxSMf6sXr0x777BcrHjz8jyzJOZmMA+kWPfq8P\n",
+       "PuCEJ0eCVKhMRY/EG4JTBBRaCnqFZmNjL4ZfQmGdjfhVgMViwWw2iwp6sWAxXVA3Df1eDxkC/UEP\n",
+       "nedU8xkyJJyzBcgIjEZDpJTMZvPkAcJsNiHP8uiFVlFR1bWhKHpkWc6cCfP5goAgy/IYpjpHXhZY\n",
+       "51FEuKGuLVJpjPMM+iOGw3VCcGxfu5bwRMF4fApEZTqrFgQfkscSMTrrTDQSxtAYQ14UNE1NKXtx\n",
+       "0UiQQeC8ZTFf0EuLPADeWnSRY53jdDxmtpixNhqwsb6OFJK6MagU7qm0eIVScfJbS3DRuInkhcTF\n",
+       "HDpFVtc1Ho81lqLQHbRDiNhlC2fpXHeKMyA63FmpaHQcnjfeeh3Z4s8roTd2iYH7EAguEFrPMPjo\n",
+       "UQoRz63ortE5E8fVe5zzKCUTvh2NlLcuYcLRoyJdl1IyGtt2ASdFoJJHq2SEZgQCpSVLnzeOj0R1\n",
+       "SswYkxa76xTCYDBgsVgwHA4pZcaTJ4959923mU2n1PWcItM09YL5dIrzBqUcY9OwqAz3XnoVpSTP\n",
+       "9p/gvOPDj97nhTt3uXnrDoPBiPX1LYxZ0BiPDkVcIwm6wvrOAIeQoq3WuImlJ6wS/CRbWMXHqAii\n",
+       "w+GlBC0gKSGICl4T56sQEROWXqDSOLdGVSkVx8M5vAPfKtTkbS/1dUAGn64pGkSBiPcjAi+//Foy\n",
+       "IHT5FZGUZBtNdvkL7/HBrUAcUbm2x5IyKmRrDYUuuntqlXc0utFULHMvdMbEe5eMkozwSQsBOkvw\n",
+       "KV8mUlTPMpcGEYqK+SOR4Mi/oh64zDKMD5weHTPo9Xn65Bl4z+72DlpogrUUWU7ei4m7osipTYMW\n",
+       "GZmKnnTdNAkL1bjgKcsC7x0b62tsrI2wxtJ6EEIIyqLg5OQEIQT90QZb6xvUdc3xyQl1VZFlOUII\n",
+       "8iKPnp0PTCdTdJmhZcScnTW4po5wynyOrWvKoiRbW2c+GeOFZHtnl+Al+/tPePL0EWVRcP3GDXpl\n",
+       "nxACo+GItbURp+MZH3/yAKUUg37Esgf9PqPRGk1T470jzzLqumYwHJDlmn6vz8GzA3xwSCXRQscE\n",
+       "bq8XJ6NSWO+oFhVKacp+iXUpTAeE1IwnM2bVgsYYZrOKpmlojGPQLwnWdUpJCJU8FYlQGm89WkaY\n",
+       "R2uNt5a68R1soJRCqITpKjDOdh4ynXKgU8ptWKqU6uAK71z0V5OHZb1F+NYzjwrRtUZBRG+K5NcG\n",
+       "mcJ9SIuWmLniXM4MAeRZVKrOOUwTDbJUOayE6xAXe1VVKaLLY7JNy3TPy+uAgHUO4SM23X43Lm6L\n",
+       "Cy7i8AlvJbTjmxG8oVdmzGZj/uT73+Wd937EYjan3y+xTUMIjjzTaCVRCqo6OhbT8Zgf/eD7jNbW\n",
+       "yMsc5yI8dHDwFOctvXLIK6/mFEWfvNBYH6jrGp1lCQMPOBtxb5ESm7TzJNmgqNxdxNQTHNLCWD7E\n",
+       "iMJrtUwCdjmApOyTtx4fAhAEOotJdQh45yM+joyQTMoZCiFQWXYOImnhE1hGQq2ClziWTz/FOcJH\n",
+       "bwaxVOYIVCZTAjMg0vX6FrqRkpR9wTrbGfLWOHRJ1GSA22hEKpXgFd/dezte7f1oJUHTRQbCr8zV\n",
+       "dDOqS5YLvBDJUD5fvjQFfv/9Dyl6PZqmYTQaolXGdDJHyjPKomR7cxOpNA8Pn0SvTQjyMmd9fS0d\n",
+       "QaB1jhQRjpjPa7Y3hkxnsy5pFbxHZzpl8UPE3ISgzHNMXTMaDCjzHNs0DPb22NraAkApyXg8TmFT\n",
+       "YLqYMZ3NqKoFRVEwunkLnWVdoubg2SGz+ZxBmSOUxixmFEXOy3df6LDf+fiM6dkJ8/mCkyxna2sL\n",
+       "62H/0SOGoyGboztRATlDbQ2Z1jQ2VsNsrY04m5xR14LJ2QnOxmqcpjH4ENg/OMKHgA2B0fo6w36P\n",
+       "PNcopaibhl7ZS1U0iul8wfHphP3DA1SmWRutEUIFIaQIyFMvqliJk6IJFywq+BhVJBenFAXBpaRj\n",
+       "8lh9UDifcChilUpMwEav1KcJGvBY53DOI4Uk03nEXFOC2rtwPoUjYqJNpaqcLkHWHr/DZJeQRrfA\n",
+       "V7L6sWKh+09U1FqS6xKtNVUTvezgo+fjUrKPELDGopToEkztAo0RQ5a8aUubkG6vyRjbXYsNKZkn\n",
+       "RIR9lKSua/K8QGvF2ekx999/l2o2ITjP+HQR4T3nyDIdYUVgbW3IjevXyYsa21iaukJryc72JmfT\n",
+       "MXW9YLGYMRmPqeuaO3fusr29h1YF5BqlJcZEOC9qpdbWWUjedUgVObGCJRrY9j4kJNjM4oLHqogH\n",
+       "hxDAtRFQi9+Kc/k4oSXe+qTYZZwPKamtRLaETFjiwMv8BV0ks+rpCyFonIuxXMoTCRkdBeEFISnR\n",
+       "VsFyrgolIHxyMkKsiIrPJqPsaUKzCtd4XMKp24jReUdjY1SV6UFKeEaD45OSjzpFdQUN1lqkkhEo\n",
+       "SZH/KozmkhUTIiZGf5z8hXOh/Hkk1oH/5Z/3Sq7kSv7/J//dP/vn2LBMBvuQIBrVwhuC4KMDETwI\n",
+       "5TssXaToXMtYKYSgKyVsK2iWSnl5fJdwcq2X0ZqxK2WF6RBt1BDzHynf4z2CWDXUetvRa/dLOMkv\n",
+       "S2T/m//qPyf8JdaBX8mVXMmV/KVJpgVSlF1y2gV37v3gBUGBCPG30j7W0ac6f4i14bFi1mPaSqU8\n",
+       "QzrdojC0ddxKZehMd4rWORuhJBert5RaltYCKccSOkMQ+07kitcd8N7SVkTpBEkBCcJ5vlwp8Cu5\n",
+       "kiv5iRalFDqYiNakogYv6Lxhn5LsLdYcXCw9DmrZLNc2UhkHqCVUJHTqlbDLBKZDdknY2EwkUmlu\n",
+       "0+HhwBL6iSB9PDfRk1eCzquHlGiHlOBe9h+IP4Pt5EqBX8mVXMlPtIiUs1it0xcsu2ZlCDghugSt\n",
+       "ajtx20+KpTeslUolqW1lSMARG7fa47cdtW1FDoBEYBP271bQ4VUvOwS6nJG4oJdX+wCUih67Mebc\n",
+       "PV0mVwr8Sq7kSn6iRYmAF21VSlhWrsQXEEAmJELLVLXEOYgDIKQqG6XOK0yJwKWW/q4WPP4FsFIl\n",
+       "E5AyX6FMWF7Hsg8idF3aQi1pOFYlJlqX19Ye43nypSvw//V/+e+x1jGbz6gWNXVVM1/MaWtw33zr\n",
+       "DQ4PD3j8+DGvv/46xhg+un+fP/zDP+K1V1/l3Xff51d+5R9x9+5d3n7/BwD0+316ecHe3t45CyZT\n",
+       "XWZs5xapXEp0SfLYCSVSV1RKdAiBDgafkgsQ6RHaz1jju+4+7z2Vd0zmc5xz1E3NcDhCCJjPFzjn\n",
+       "ePDgY27cuMFwMGQ8noCSVPMFIXhGgwGZVDz4+AF7u7vkWvPBB++jlGJttMbbP/oR6+vr9IqS8WTM\n",
+       "xvoGxliEVJycnvGzP/+3eXZwyMcff8CN7RGzyZgyyyJOB7z61a/x4cPH3HvtTf7wu98jz/o457l3\n",
+       "5xZnDx/zwrXrbN+8gRv2yAYjzLRGVg1FphjXYz47eMTdu9fJnMLZgNcZ0xA7WaUN9BFI7/GZZJEa\n",
+       "SAQB7yzO1rFxR8RGliCiJySz2LJvjKdQhsrDOGySDW7ip2N0dUjwNddu3aQUksXTJzDqU+QZtm7w\n",
+       "zuEFuOBwxOfXL3oUShOs41k16XDFWIsbS98aYxBSYV2gLPss6or7H/8Q53cJ+TbGV2TCc22Uk7uH\n",
+       "/PIvfpOjI8PJuMe08fRyhRSCpomlZlrHZo+6rkFEHg2V6teDEF2ZWqYz8J6mbgA4m0741V/9VRAC\n",
+       "6yz9PEMBxlqMc5GuINOJdiAqh5anpyh6HJ+c8sq9uwRnyZSkms944/U32N7ZwbrAxw8+xVjH3/y5\n",
+       "v82/862fQUnND3/4Q4bDIRDrjpVWWNt2m0aagTzvAUul1ZZ9tq+1a2cJW8Q11uqj1Trvtmon+ECp\n",
+       "lg1RQsiuEaj9Tvs7BMjk59XTP/mn/+nnXhPBR96S+FfnHbcQR+g6dT3CRQqB9hwQk5aC2LwUUj1/\n",
+       "52EHE2v5ZaqJV4mSISzr24OPa98CISVNfQix8Y3leJyrqFm551UlLqTuxnl1zJ8nX7oCHw4GOOfo\n",
+       "90qKosAZF2u+6xqlNcY7bt26xdbWFsPhEB8sZf46L730FTY2NnnzjTdwtmJ8dsTe3h77+/vM53OG\n",
+       "vX5Xv6sTd4dM5YVCtN2J7UNRqYKtrZUGJfWySN+JaDmTpvcp2SFl5JvwzkeehxDwsm2JX5azWe9Y\n",
+       "LBZYa8mLnMFggAc2d7YZn51hhwPef/99yqLAhIZrN6/hjGVWL9i5tsejh49YVBVfufdSbD4Bil5J\n",
+       "ALZ2d6mM4XQ+54OPP2Jnb4/eaMTO9VtIofjaG69xdnLCs6MDnj35jOODA/Zu3MA0DaYJuBD46MlD\n",
+       "5GLOWrNAVTNG22s8Oz1mejLl+mgTrSVf/+o34N2MtV6gEDmmEYSyR7NYkOuMzAVK5/DVAhToPCOo\n",
+       "IpakWYHTEIKjaWqCiCFo4y3B2Ggsix6Vd/jBGvefTmnmh5TeUpqGYa8kOMlGoXFrJU01RVaBTGly\n",
+       "HVu3vQCkwkuwOhCEw+PIMtU1ZRhjopelNVmRJ+w0LspcaO6++DK13eDh/oTgHVmZcTYZs1nCtDG8\n",
+       "88F9it4LiKLs6s+lbEvb0qKVbQ2ww7iGQEDJDIj8NEKpVD/tUUqzu7vDz//8z3H//oc0TU01HiNl\n",
+       "6kXwLnYHepe6hUkVDLFeuZnNGAyHPN1/hrUNa4Me62tr3Lp9m+OTU45Pzrhz+wXeu3+fN954i2vX\n",
+       "bzKdTNjY2EjOCl0pW0tApZRC6VjyVld158j0er3ULZvWT1v6lioFO4WePFMlYyIuOH9Occ2qRewz\n",
+       "0BpkbFZBtBQDsWtTahXX6ecKL55TuSY/n+wTgGvLTaN3lgyOOEdE1v6OG4nFqpTQfkdE50NJiQwy\n",
+       "dRZHiMM7v1TQMnLHyMRj0q79i8RUgQjRAAjv8Zz3tuPvZe34stPz+fKlK/DYTgxlkSGI3BwEiZYQ\n",
+       "vCErithAohXOW3KlGA6HbG5uYhvDq6++xHw+J8syyEZsbGzEm07eSp7nSy6LsBJihaU34dyS9a/N\n",
+       "IMdmvrbNdtnui4jt7LmSWBvQWbTWXeOJc12NKAisd2ReMer3Mc6xsb6O0ip5bTrWbGc5d+/coSgK\n",
+       "jo+OYmOJ85jG0DQNdRVrhV++9xL7+/t8eP8Dbt2+TVVVZL0SkWk2dza5fvsm167foNfvMTk+ApXz\n",
+       "e9/+A376a2+yNlyjcpbgPdPxhK3NTaazBqEkO3u7/PDx97j58otsvXCDP33nPSbzmldfe4M6wP6z\n",
+       "x0zslKdPHpLfGDI3grwYYZzn4OiI67fuMChL+s5BppB5DDtdEw2jVxnONbFNGUmWZyDbjH8syTqz\n",
+       "nrK/ztjlOCeog8A1DmcAIaieTTFbPYZZwVqpWMznBO9ZGENjDTLPkhKX1N6hUARjwbnkacVFqnQe\n",
+       "50p8BSUF1phEzLTJZObwziBEjQ+OIsvxcsDjZwtmjcRpQ5bB0ckMgURnGXlWYl3NbD5P1A0xkaUT\n",
+       "IZfwHm891jqci4ybzjusd9Sm4Rvf+AZlWfL48WP85jbeO2ofIwofYm02Pnl6AYz3NM5R5AW3b9xg\n",
+       "NOyzvb0FzjI+O+WH777LeDJlY32TZ8dHvPLq6+RlSV1bev1BciyiV4mI9dNlnmHb5hQpCDh0Hkmg\n",
+       "rLGEYJGSlblNqtl3BFxaA0svWIhIZtZ6xKQ1QiYxzkbGTSWJZfZRa7YNRCLxyqikniK+Hf91qWjI\n",
+       "xIX68LDsCSA5X0l/dx2Q7byI/3fpNZHmStQXWkT+IoKP0V5IkUNXFrjUEyoZVqFS2Uq6+rbzt60L\n",
+       "DyEq8tVa92RjsM6i5LLT+M+SL12Bt9wXzjmUEmgtaJo6egJS0tjY/qx0HDSTrJqpGwKO2Xwau5aC\n",
+       "wZklLaRMvyNxUuoUY6UdW15u2c7hYmlwLQJcy9TGsikhnadlUhOJA4HULi6VJNc5QkbvLMsUIRFK\n",
+       "eUCEwGB3r2urFkIw7PUhxMaYuqqRUnLj+nUCMByOGK6N2NjaYDQa8elnn6G1xnrPlhCMJ2eoPPKE\n",
+       "KF1wcvqYLO/x8YPP2Nre5sWvvMTu7bv84N0PIEh+6utfI88KDo/3KfolR7Mxn372KX0pcR72Hz2k\n",
+       "8oZgGz778B02y4IfPHmfQT7kb/3s3+FP73+MEYKDZ/tce/lVnn7wAUdPH3Lt1jXmzZwN1tne3qIs\n",
+       "SvL+gMViRm9ji9HaCKljO/3J8Snj6ZRdpykHQ373++9hzwQUigzHq7dfZGt7j/c++YzpmWX31iZZ\n",
+       "fciibnjy+Al3bt+KjRbWE5JHJ2WBqR3OOBSS/qDPxsZGIk+K8+rTTz9hPptDCJRlyfjkjP3jmtpq\n",
+       "vFuQFQ2z2Rk7117CLQJP9iuycoe8P8LJGWWvj5SRYmBuIjwkFCyaOU3TkOVZJOkyhkGeE5wjy3Ok\n",
+       "1Cyqito0OOcpix5SSoqiiAZZZbFa2Ec+Dyk0bRu59xFSQWksMFpfYzIdM56eUfRyemXBo/0nketG\n",
+       "Cgbra+w/PeAf/Ce/wqyqUZMpmxtr0ckgYsMSl4iVHFIuvVSwHVRA4vqIOT0RuzcTxBk6jzFGI0v4\n",
+       "UXRJu6i8IrwltYg11EqiyCIj5Ar84qWA4MCDS4V7su1IfI4zOpnP0CJbwZRDbB5LjKJStd2QLd3C\n",
+       "KpyR1rIgNexA13kJCQKLSl5IGRt9QkDpRC3ZWp0AwTkQkV7gXCMakZJLSIlWCXqDVJfuE1Feiv5J\n",
+       "kFMae/9nKPEvXYG3EoLD+ZalbwV7E61lP88GByHyY7fzDVgNPVbvu2tpTviWWMEk2++1/24z18vj\n",
+       "BJTKzj2Mc0o+TfguSPIOfKI+Xfm8SiQ+QoiuvThi8hFnVzIyIbZRmbWK4WCItTZGGy7Sge7s7vAK\n",
+       "r+C946VXXmKxqJAJEvjR2+9S9Po0VcOj+iFvvPUms9OT2ESsFY0PrG9usL29xWxRMz45osxytooS\n",
+       "XzUMi5w8BJR1bGxuUElBoxVfefFV3vmDCdP9Z9iiZnY05tOPPySTkmANp4fPeJhr3vnhn7AxKHnn\n",
+       "3SMe7j/k737zFxh6E+lHVY/aGvzCM6si78fG9hb90RrFYEBfjTg8PuXDH7xLs36LbDMnD4a9wZAX\n",
+       "br7I977/AVNVs7nR46XNDdSs4q2/+XOUZcH+wTOmTcOsrtFln8OjU/rlAC0yjqcThhs77OzdZjKZ\n",
+       "IITghRdfYHf3OmcnR9z/4APGJ8coIbi+u8NsZtDlgKDH9Hs36We7SL/NYnGKkhl5NsApQzWPc1Np\n",
+       "SVAB42ojtV96AAAgAElEQVSyQpMphWwytFJkKifLJNIDqMh1Uyhq05BlGUpHKGwymfCbv/1bAEzP\n",
+       "5qg8klAJAbmUSGNZL3o0i4p8OGQuAtnaiP39J6z1+oTgefDRR2ztbMZGFAG9QT92SQbLW199i2cH\n",
+       "J9RWcHx8TAiR7jVgGY1GOGepjUElQjNrHUKCDwZBwnzTvPTOJbbEONe1FBEqCgFBlhgxdeI48ZHh\n",
+       "cUkhSNAS4RKMIJKhEi23SNuBG7tBrbdApNGwjUM8R4EHQYKY/BJ2CG0NdoqcO0V9Xo/ItO6iB7zk\n",
+       "/m6/0jlpSR91kZBfNg45T8eQ2UUaK2t/NVkZcf/ILyMS50ykxY5ObJFFAi1oecz/imPgQZA42JeE\n",
+       "MElrJ4+2zQAT/duVbHNsm22VqV8Js1pXeQlthNCmKtJ5L7zfXc8FxR5f8915pRAxxGNpYVc/LwUd\n",
+       "dtcigvGKQnesFjp0yfLSHScS8MjEW+1D5CZemJqyLLsQK4TI6tbXPYajIVVdEYTitdde4eDgkN2t\n",
+       "TUotyZXm4EmJFILeoEd/Y43BaMTP7u4gQjQazaziw/c+5Nr6Bq+/eJccie6PyPMep4s5qpfhzk4w\n",
+       "0zN6hWJ44wb12YKDp0/YvnmHneEQKyTe1ty+dZ1Br+Tw7Ihv3ftZTKY5NHMKoZksGgKeUgpM0zCZ\n",
+       "OY5sRdMYVJax1fPUwWNFYFadsR4Kylxw/93vcXw2jSyLvZxnsyly0GO96DE7OuXmKy+z99ouc2M4\n",
+       "mk750/fu4xoQRU7Z69MXCp33mcwqqibimeOzCY8+e8Cr977CZPMIN5vSL3N8BmFRM+oP0eUIITIW\n",
+       "sxnHx0c8ePABzmrKwYi8Z6ib2EE3GpWsb/SZzU8hNOQ6YzAYsrG+xaA/4OysZjGv6fcHkWu65X0R\n",
+       "AqU1nz16yG/+xm9GaKRpGK6NMNagQgbOokzDRlbwxu0XKPKcDz77hPF0TO0rghdUxydkuWZ6Kjk6\n",
+       "eIrSisFgiHOGTz/5hLLocXx4hFSRt3tRNzS2iRTJxnJy+ozgPTIrUD5DKk2vV6Bz1XnPtjGARAmB\n",
+       "8ZEOlrACOYqsS7CSug2tNUT4oCXFSvt9WNnVVMt2Ta02viQcOSpEnSDNRDX1HG80rVJiFNBSK9Cx\n",
+       "D4b0v5j7EjjT+vYgnG3d7+VaTlE1gLVLAq24zFPiVop0/TISdCUKhqXubjmYAquOZYSV4nstZa/O\n",
+       "MmR6L6IFgSAkwtON0fPkS1fgWZ4RA4eWbpMuoSikRCSa0RaiaBWp7wamfbhLwqOW96LDsFNCpT0u\n",
+       "AOJyjoHWWz/n7fsARH7maJpFh19BnDqdFReksFCcU+zqglVuXw8rFtaHsEyIeY/DUZlY4VJPpyit\n",
+       "ojcTPNJF8qZqsUiKH8qiYG005PDwkL29LV66d4/jw2OaxkTyKmcRmeLOjWsREnIOJTW9tTVGm0N+\n",
+       "6q03aUzD+uYmJ2djbiE4PTrk8Wcfk/c0P/31nyZoxcP7D2gWNXdu7XFc12xdv8ZnDz4hzyNz38bO\n",
+       "Fru3bzKbNtQ2ElqdjmcIAnmWE7ynsA6TNtsoioLxfMHx4SmuCLzx1Xtk/YKjD9/h3u27nB49w9nA\n",
+       "7TsvsrWracaPOTg6QgCD/X3WNjegKPj+j97m48fP6PXXmdsx0+k+jw8ecu/eV+j3+xQ6kpJV0wl7\n",
+       "WxuYesHLd18kcxVvvfE6//uv/Qt6eoud3hYil3zy4Albm3c4tRX1/Bil15mfSh5++hQnFXt7m2SZ\n",
+       "g7MpZ2f7aOWpqwXWeL7x9b/BL/3iL5Kpkv2nxzw7OKSqKqaLislsTp7lLOqaf/Ptf8vZdAohIFXG\n",
+       "op5Hz14KhG/YG4342p0XceMZ28M+w1deQn72EQ9Ojxj2RrjaUWpNr19inWVna5tPPv2Mre1dmsqQ\n",
+       "y4wPP3iPu6+8iUOQ5RrvXWSx1JChQUBRlsxrS10vcMHh56ZrQ7dNZL8s87KdwCuOSMz7LOd1olEN\n",
+       "oSMoC6El9Yq82DIGznEHHyGpqnpJ2iQSdwigRUDiMcagxSU79iQRrk0EthzaRIcwcY+s7k6kpcb5\n",
+       "aJDaNRidOJ/YTJfeeoy+1bn12v4dISXVKXIhBMK1lMhLQipW6tFXd2VSSnbXK0Lc10CEtjv08+WF\n",
+       "z5MvXYHH8pykYH3yuoXsLHcLXsQssewUb8tBBzEDvJq9lVIiV+5/NWxq8a94rPOe9upnVz13JRLp\n",
+       "fwvjJNgmhOVnSMxkXshEnhPOKef2IbaGRLZbi8mVkiwfcdwYKQgckJfFyvWFmEzxsfW2aSxaqUSi\n",
+       "pADP1tYmw0GPIpMcHR/EhadLVC+n8S4ysalAcBalAgjH7ou3uHPzGqIxBA1zFVi7vYubVextrXF9\n",
+       "e8SL925Q2ZprG7uI2rG5ts763i57RYbIc3bW1pDGM68WHM8m9DbXuLZVxMlKTMh6axChNToqcnWr\n",
+       "DIVm5g3D9SFvvfU6JpfU1Sk3twds9zLWB7tMqDh68oyiGLJ/dsz+2TGjXp/HRwccNwtCliMGfSgK\n",
+       "Tpqa2WzCbDpHIXh2cMhsOmE0HDKfTNldH/G3vvUNbmzf4+DJI/74D7/D0f5jZmcP+dmf+RqnpzWP\n",
+       "Hz7i4YNHbHxtD+wcSUUv38DUmlADRaDXK8Fb6kXFoKe5eXOb3e0tjg5OCK7i8OkjBv11tra2WVtb\n",
+       "Z+/GDZ7sP+NHb7/NBx99zPvvf8DTp09jkt1H3um485Sglyu2d9a5NVxDmIY7O9vsP91n8/Z1CuEZ\n",
+       "FArhHKXSrPX65GVOIJBLxeZoxOT0hF5vyGI+41//2r/in/zTVzBOYKo5ZZljqimVqSHETU+sNZAq\n",
+       "TpqmotVibdIy+LgtISlobOuaAwJdxDlqrU0VM23XYiK9cpYslYviUoCNIDgX6XGlIJdFjEqdTQpO\n",
+       "obWMHPZCpF2sLlfgzaIh7+UdzGGtJcilIm132EFE+tm4BWDAOZ8iYd9BmqKFc3yM2H2Cc7oSStdW\n",
+       "4qStEx2d0lZpg5OLUX2bg4Ol89YmRmO0sXQC42f8EsJ5HvCf5EtX4LiYARZSdliZEKB16wUX0Zte\n",
+       "8YpXYY5lGc7So45lVuc93uV4RgXcJk8vlulIuaIs2zKxVK8a0sUJIvwRJ8d5z16uPkCxfF0lMvqQ\n",
+       "WNvcSga7fUZSKPDp4bZQkVsy73V3kDwMrfNoyd2S0tVbQ1kUcQyF7rwFCRRCxI0KJotunKR0CHPG\n",
+       "vK17b+JY+8ZEknslGeztMbp+HQhopdm9c4+mrrvxiK5wwvCaAZvsUPZ6MWvftjO3WfUUYnUtyKkK\n",
+       "wHqHlJqXX5gxr2u8g8W8Yjqb0tMzXnFjtq/toFSD6fVY395mMZ9zOp3S9x4BbOU5w1u7zBZz1l++\n",
+       "gdQaUWmCFDx6/AgvBceZ5Obt2wDMa8+nj4+Y2YIPPz2lsRs4NcSIMUenDTYUPH32jJPTCtgkyIKZ\n",
+       "ndAQEHqB9QvkIkDdMDmbsGt2OPpswunpHBcWnN6Z07u1zWQW+PCTx/z8xqts7W3Bg0PC8JD7+5/g\n",
+       "egFTGfpqiDIlLhPYsGAgFC8MelwvFLI2nEyPyHfXMMOC4d427sMjyn4f7TOub25wc2MNguFofMLm\n",
+       "zS1++NEDzhZjgixoDg753tt/wk9/8+sc7D9mPjukKDTWg9M5jYJqcsaoV5JnUNsKowUY6PmcPCuY\n",
+       "NwsqM6Ec9mjqhlxmCBM5vp2KVV513dDvDTB1g3eBqlqglKLo9Qgh7hnptEc6iUajpEobiwS8qBNP\n",
+       "OAgnwTqahNVrrdGZJi+yS1VIf3NA04xRIToDucgJElCOys/xElTQ1JVHk2NDE5OTwZNwCrRMRFcI\n",
+       "ZErAqiDIdA5S4XzcNxeR9ht1BuFT5J9yYE1PpGRn3L7N2Qip+CZ68lJFvmMfAqUVUYGLgPSSyN4Y\n",
+       "d+whCEJMlJ2v+rlEvnQFvkqS3llMsdIG2yrulTDtYvJx9XsXP9N+blUuWsjL5Nx3xNIOrrbnnk+G\n",
+       "nMeqLjvnZeeOcM/5z7YVKavjcFlN6DJkPf8Tj7EM+9ptwi56Aavnu3Dgrq7WJYL5VdFadxsztLuW\n",
+       "tK+315tnGU7Kz52rPe5FA5x5H7dZ0xkbIfFZ+EDdGPIi73ZvMaahGpbcunEjeluNiR5XrPtECIFx\n",
+       "lqqqmC3mmKnn9gt32Nx+Ax8CB8eHbGxuoqTik08/4vTsmI3NEYP+ABc0Tw8OqOYNWVbESgPhaZo6\n",
+       "Jh1FDOsFnrVshKwlg2GPk9MFkPHg00fUixn9ok9v0OPo4DHbO2u89+BT/uVv/A7Z5jqvv/V1nu2f\n",
+       "8t0/+D7To5r5wZSt/ggzXlDmyyaOYa/H1miNApBInh0+YXE65sZoiLeW9eGI2byi3y/oFRqtIBMZ\n",
+       "6/0+h7MpX33jNT58tM/h2Yxqfsrv/97/zdHhI67tbEeFqHNsUyFFfMb9Xg4hUDc2diwGj0TEbdRE\n",
+       "LIncGJZUpkKFWFVhnUPpHO/j/rBKCRpTEfBIHXn8Q4jjJ1OTjLMG68A6lbZ9A+EgiNRinjZskQiQ\n",
+       "cdcnIaMnbtOGHBdlUc2wtiIXcSOTCE0GbDA4bHSkhKDMC3JV4KxJu/YkrFuBbueji5scQ4zyrV/u\n",
+       "uQsxD9c6Z6sJzhDiPrneL8sR4z6sOpJrudhE1q7pdkN2KSIlbYsgCNFuNh3hl7/yScy226hVBm15\n",
+       "T6vYuxLAFQjiojJb7Vy6rKJkVSG277XNPatNBrAMGT/v4S/loqH4cdey5Kp252EUcb7W82LmevVz\n",
+       "Szjn89fRvraaFV8lkl895mon3apBWWKV58+7OjYXr6/F81pu7otj0TTNOe/hotH9nMFJYXVH/UZs\n",
+       "kop825KyGFJVFXmuWRvGxKw1cVf0LMvIdUZjDMaaWO7lo2clTaxyqJqaLM/RPRiPJ8yNQSFYW+8R\n",
+       "fMV8dsb1W9sY4zgdn3Fydszh4TOqakaWazIZUHj2djbY3tqIi9wHzMxSzRrq+ZxBr8B66K8PKIcl\n",
+       "+5NnPPnOUz46mDKpTugP+3z3j/+E3/2d/4eMdV6++zVC5ZkcPWNt2GMxO0OrjDzTbPYHlFlOqGuM\n",
+       "cQxGa5RZTr8o2RquMx5PUT3Bte0Ra4MSRUAL6Oc5a77k5OSEl27d4Otf3WJtY4e8KFjf2qTXGzCZ\n",
+       "zvE+dhxr58AbzMJi0ei8D9ajZWxAsiHukuOcw0wqMq1jN6OKTVLWGZQQLBZzhJBkWeQEsdZ00ZkP\n",
+       "FhnatRmDS+8M3ll0gv66vgwcTmh80ARnui5NgEwVXCbO1mS5JlhP01RIoVFKYEKNk3HfzuAacukw\n",
+       "vgER9+qUEnTizfchRnEIEEqgpUShqFP5pJSRp9v5uEmMlDolO2MeLoRAFgRCRkZC72J5oHQubp6c\n",
+       "cHoh465EKtPJc291Xcq9dU1JETVY5b6/TL50Bd56h3BeMbZyuXfpO4X4vPdDCN32RKuKpFU07Ya8\n",
+       "q+dYPedlnu9FpbhqGFoFeJmiunjui8ry4rEvU76r17N6/It8C6sJ2D9PlLFMyFws0zx/LasRhhCC\n",
+       "PM/PXd/F8Wif0cXPrB7r3Ni0O4ojuz0vs1xRyCxtpqBQg9je7VOzg+iVcTu14HHGoTNFXmQdn3Os\n",
+       "3rMorVF55MG4Ptjj2rVdlFLYxlDNF9SLBdWiAh3IVM5gMGI6nbOzs8X6xpDJZMxsPmdne4OXXn6F\n",
+       "6XTBJ08+wzaeejLntZdfZmO0xh9/948xjUGX67z81bco1no8OXjKrXub3PrKy3z797/Nxw8Omc0d\n",
+       "h0cnbGyUrO/eIWSSZ/ufMtoscWcz9obb9LMc2xgUYAm88vprPHx2yGJRsZjMKIWi6JdsDgsyLMJB\n",
+       "JgsyXeAyj3EB7R1nT5/wD//Dv8/9jz6KdeqzOaYBlIKgEd6graUschqXEdQAZ2qUc/jgWTQGEwKF\n",
+       "UhRK4hZxi7eqNnjlycsewURj3W6pJ5VMVH5po2IRG9q89bFsFpn4ueOzFCHg01YhaW8eBBYJaTOX\n",
+       "iGEvqtml8zhTntl8gZYZUkSPt64tXjiyIkYCzjmCa3C2IS8LPD72XxgLFiAgExwihOi2DPQidupm\n",
+       "WRYrWFzbZp/0R5ds8+RSp7kfcGlbQ5K3Hgmq0lrHI0XaDco2Xb5gWdfWOlbqc5H9RfnSFfhFqADO\n",
+       "L/ZVj7j1/JYcC5/3LC8ec1VBrb63qmxWkwyr20aterjtsVb/Xv1eey2XedIARVF037/Mm36edJUu\n",
+       "K0py9T6eZ1zaa7zseBcV7sWqm1XD0B6nfX9V2V8cn1UjApdHM8+7LxNbLmPhaNrhO4RAcOc3mPXO\n",
+       "ETR4J85RI2gtUalpyqVuQk9A5bGd34sY1vt0fSEEyBTr6yOyrU1msxmyyDCNYWtrl1defoWqWpDp\n",
+       "WCI5r+ZIoSiLAcenY154eZvxZMb8rCIXPT598IQ3v/kzTOcNZ/MZv/nt71MMS7wS7F7bZH//YyZn\n",
+       "ljzfQgjNxtoG0/ExAkEvG/Dinbs8O3jEje1tNvtDZIB5tUBkGllkHJyeIYucRdMwny7o5QV5Aev9\n",
+       "Eu0DmVBIHzHbrV5Ovz9kbg0//fWvYecT1socXfQZ74/5+OEBjfV85fYeL9y9TjU7Rumcjx4ec7Ro\n",
+       "kM5xbSig1yPrj5Ba44zBTaf0pEZ5RXAeIwXz2Yw1rWPEpRU6yzDO0jQmbpq9kouSou2GBtRy71AX\n",
+       "TCwuEHQ/QkRW1+A81rtoCJ6jyxbzKbrMCc7jfB3r9QuFcZGCIO4qFD3fPJOx2xto93JtczKxgCBF\n",
+       "nT4ST3kpYrQS4sbnzqbfqeM6QkdRjcaSQ5m6U1PbfNrpR8h2vcQkpc40UomU6lrurdlYgw9xV6fI\n",
+       "sXT5PbfypStwoPOIV3HV9oYuYkA/TnFdVGKX/b3qUf44xbIqq7DEquK5CIGsKvnnKdbnGaqL57vs\n",
+       "71Xl2MplhO+twrsMcrk4XpdFCZflEC564hfvY9UAXoyQnnfuc/9u3w9dNI0UkqaysR447VjSnoMQ\n",
+       "IokWoJRGBGhsot9MSkNqhXdxzPK8IC9yGmM6jHHQH2DqGiUk6+vreB1hm2Ze0+/32QzrGDtnPvGU\n",
+       "vTW8gzzvMegPKPoNQUgMGYY+L33Lczwx/E///F8wm1UEE2g+PQXveYf3UVpRzS3rwwnj0xnf+ua3\n",
+       "CAONNRWSNU5OnqHXAuu5oMw11jnmtsE5FblbZIXxnnll2NnZoarnKGkJjQGRQYJEMq3Js4y1Xo7L\n",
+       "BT/12ptMzILtjXVqp3FuwrXrd5BZwZ2bO+zu9agnmkdP9nl2eMIsjMiFZPcrtyk213l4csLR6Zgb\n",
+       "W5tsjbaYHTyjV/bxucWLQJHn3N3ZZTKZcDo+w1hDCJH2QukMQqwsgUjKhfeEIAkqVqUIEciFQiQP\n",
+       "3AlBEDpuvGxr8jxWlzgfW9kvk7IsaVIfSBAe62q89fhA3KPVK4SPe1xmOlVGKYVHdEnFiNXb2CkS\n",
+       "lrxJNsSNiLuCgNSwI6TEB0/jDMInGLG23Q72AM7LblNvIeWyQccBQial7tO2ey0aAVIt+XWW9ASX\n",
+       "y5euwFsvTWv9OajjMvz1IpTQvrbqAa6+f5nSeB4OC0uv9Hmy+p2Ln71IXrMaWawma1ePdZmX/Dwo\n",
+       "47LXVw3c55Kjl0Axl53nz1L0f9a1dR4ty+fT4urt8S8+l89dQ0cfsSzZJHjy1CcQUfFo3L1cblnV\n",
+       "XY0P3V9t05T3FuFU1zBhzII2qg0hYCob27RFXLBORM+/1+8BAek9eTFga2OUPCKfdguX4GpcgJCV\n",
+       "3P/sKb/17T/i7fsPmFeWQTlgMj1GWU8uFLrqo5VkJDyL4yNGWvPd3/8ttnd2kVqxtbvN1HjWdm4z\n",
+       "2/8Y5RsGhaaXZzglmJsm3a/EBkewnqIsyIVGBkGRF2RB01M9CJLttU2GGxts3trl9OiMWnmCUkwN\n",
+       "vP/hR0xsTm0djz/tM/o732SQZbz99vs8PVwQeoJmvuB+4Vi7fp0z2zCeznnt3it866tv8Ue/93so\n",
+       "KTl88oh3P/0UDzwaDtnb22P32h5BCA6PT/jow/usra1FQ7i+gWkaGuPplzmNSWWvUiGFRzoPwVHV\n",
+       "NSLLccJS9PpYV1E3dfJEPcUKbLcqSmuktQgRkFlGlmWRisJahNYEF9jb2mK9P2R8fIydz1BKUDeR\n",
+       "iTHypgQylRgr7ZLPSHgfu0tF5LdZ2AVZ6mVwLu4r2u4fqoqsq5ZrN4NGgnWeMisTJ1PEuat6luZr\n",
+       "wKXdeKhTkZaIG17/RCQxW3meN3oRL11V0qvvX0xUXualryqfNjmwahDapOll8uPw4edd/6rn3rXz\n",
+       "r0A6sd34817F8zzXyxTuxeu9zHBdvOeL710W5Tzveaxez0WYafUzz5PLqAqgVdjxX4GAWNmNuzte\n",
+       "937M/kPka47XlMLvEFKFQfytsuzzEZY4H9W44PFC0FQ1pDKy6IFBCJaz8YKyLMmyAmQeSfxHt/Eu\n",
+       "8Nu/+dt85zvf4/jkjOZkgqsbni5irXXVVOzefZFr/Rf47MEnVGZMPiqY1Gds3FrjbH7G7dv3OFtU\n",
+       "DLf3WCwqfJYRVKAhoEKkvTXGoRAEVDJkjhBkLLUNkrox9Ab9OBpBsrOzR280IDgwxjPzFUZAPtrg\n",
+       "b3zzG4hinXndsDaIxtH6wLxuIqQRPC995QXeePMeP3jvfR4dHLAwNd+ZVYyfPGZzMAClePTwCbku\n",
+       "CSIwnSx49dVtTk8mvPPeuxyfjrlx4wa3brzAydEJb//wPRbzBWWRc3z8jKzs8+/+wi/wwYcfcXS4\n",
+       "zzDLuHvnJtf39pjVFQ8PDljbFGyMepydndHvD6jmC8re8NI5ZYOOWDaRY2U2n2CtRxcl89mCemEY\n",
+       "lgNeuHGTF2/f+n+Ze+9gy5L7vu/TfdI9N7wcZt7k3dnZiN0FdhcZBIlMAQQl/UGVZNliWSrSIiVL\n",
+       "crmKtKtMy7IlymJZJVGUTZkumiAlywwimECRAAmABJGXi81p0k56YV6++cT2H336vr7nnftmIJJe\n",
+       "9tbsve/cc/p0+PW3f79f/wJ/+I2vA4paTZsIIgS97gDHdVGOdiB0Cs7XKegrTVOyPCPwPLIkLswb\n",
+       "/QMnIc8jFdqCRRTcu3S0VZbja3WeciDLU52U3HEt9e+BGtj1PaKCDjXT8ef8ENOUsk51krgPJaeY\n",
+       "SWI5B0cCVQDjT9jNJ3Gtk9pSBofxw77RL2PXx60zxt9vuMMqcCz/bb+vvGlVAbDnmek+DLBl3fzd\n",
+       "qHdMe6pUMGDb1DPS5dmS0kFdijwXo+/lw5xRHcI6LzH29IU6Rdv+a+coo28UAsgrNnDT19FoCBAQ\n",
+       "4CKQoGShq81AOIReAyEcBnGGEBm7ez2++s1LrN1Y5cWv/zHZTht3kPDkiVO0WnWGKmIgU4ZuhtcK\n",
+       "qM+e4G2PXuBLX/s9rm1cpjHts9e9Tc2pcfXqFe49cQ81JWkFDTpBg/Xbb7CyNAfSIY1iXM8jVwrX\n",
+       "lTqaofSQjiKOY/xaiNTnkQgBvuNx5doljq2cRNZDgvkZvNzTCX49n2R/j/3tHaSULM+cYndri8Gg\n",
+       "y+LSIqlqk5Ozs3GDF+kwP79Eoxmyt7vP3MwUge/x2d//HCrPqdUbzM4tsra2xtKxJZaWlviPv/M7\n",
+       "xEmKIx3WVzdwhc/m+m1cKTm5tEKeZniuQ7s/4MrVa/SHA5TSG+elV1/n/JmzfOu551nd2uLMfZDH\n",
+       "IXNzc0RRRC1sMYiqVSiDJGcqaNDt7pFlCYHvARlZppDCY3X1OnOtab761a+zsrxAoxGyvb0DQjKM\n",
+       "Ejy/RpbnKFHEegFyJ8cREkdmRFGC62jViOt55IX+W8uFbsFMCKTDCNCFI0Ho2PPavPBgTfuBjmVv\n",
+       "HP10qCdtZpimOsqhMUF0nGqcMuVPBOBCiDeANpABiVLq7UKIOeAXgTPAG8D3KaX2jqhj9GmL4lVi\n",
+       "/J3A1T7YLL4cAjnzaU7Nq0zryvUfcq1ncrD1SfXYKpfxA9FqbrV80DmJK65S4ZSvm5Jl1cBYtTEe\n",
+       "BeBHbWD2ofJ4O8fbWK7fwcWE5wRZ2AVb75UClHaocmURL0Kp0TPauMGoUMZ6XWwearQ5jjaBUhuk\n",
+       "0X06RSZzVYi0rkOcZNSaU/zYj/0TTp++l76c4qVvPctCUOfCY+fw+kOivX0ePXuWTGRcWr3OzqBN\n",
+       "mg6ZaSratzd578OP8PmdW7R395luTZHHAplJdtY2eOqxt3L98mVOnlih3b1NmikdXEo45FGK9D08\n",
+       "KZHKxXMkjtBOZkrkCEeAkyE9jyyPuXVjld3OPioMec9HP8rc8iKZVAhHcmJlRYfhHQwYRkP8mQZJ\n",
+       "K8R1XU4cP0nUi1COgEBCLnBzwdKJ46ycXCFKh4R1j3pYp93usr+xwcm5eZrNOmtrtxBCkWUJAu3E\n",
+       "0+t0GPb7LM4tMN2YZn9/j+Ggz87OLk6zQYqiHtaYCnxmfI+V5SU6e3vMzUyzevMG7omTzM8tcfrU\n",
+       "Cr3BkF6/X0mTb3nsSe5ZmGdrc431jRvc3rpN4Lj49SleeuUSm1t71J+Y4tzpk+xsbYDU9u21sI6Q\n",
+       "KVEU4dcCarWQTqdNnmZIR+vW0yTVUo+E4TAiigqfgCJUcOB5KAVRFBF4vnYnLIwbzNmOH9RGJrdR\n",
+       "FBWxlBTCKeLBCBM3hcL23SHLFEkaodSfrQ5cAd+plNqxrv0o8Dml1D8TQvxI8fePTqqgSr3x7RYz\n",
+       "WGUnEbNQq1QpYwuZat16FddtA9hRoF/Vn/IGY29YVX2yP8v1TnrvJLWG/Vu5n7Yuv7wZHlXK/TcA\n",
+       "XrbYqWpHuQ1SySKEQOHQIRgdNiphFOTmoFMwLmOZesc3H6VAChPkbNTqQo2VHB5TdD7DPC/sfLVR\n",
+       "MP3+gObUDGku+YH/6of4oy9/g6/84m9y8thxTi/Nkud98hp85yc/RNYbEuDwFz74UbbWN7l17Qb9\n",
+       "mwOUDOj3+jz4xEf4xsUXePbqZVS9STuO8IKA3/7653nXu97O7vY6K8dPMNjfwRUubhASq0ERpjin\n",
+       "UQvQ7su5TvqAwvEl/ahPHPUJnRonTi3z0COPc/32DvVWAxxJmqeoOEbmKYFQNJo+WdNnqBTSDzh2\n",
+       "7PZhAhkAACAASURBVBi+dAiEYLezz16W0PTqDNp9XOEy6HXY7ewyNd3EcRxO1peZqU8zHAxRjqDb\n",
+       "63D23Bm2tnfodvuEtRq1WsBUs6WDqm3cZn9/j9xNmV+YRQhFPQxJ+z2CRo2pVpNkOOTc6ZOst9vM\n",
+       "hjO0212ee+4FTp46zTCOWFxcrqSnr3/zW9QfewuNRo3F+UUuXr5IlGSELcWLL74MSnD16huE7jmi\n",
+       "aMgDFx4myxXdXo+wVieKE/b29ul0utTrIa2pFnmWaRt3VViTKMX8/Hxx6JkT1kLSNCWJU+Ik0Qep\n",
+       "UV+buxZRHYXhAKSO2hjUQoQCz3FJ/BxwyPN0tP5c1yXJdKYoWVi/KPVnn5W+jFSfBN5ffP8U8EWO\n",
+       "AHA4bGM89lkwqaL4z7pRiy5CX1W5GrXkqMO8SVx9mfMfB4JxULc51ipOuVwOq1YmSxZVFixHccTl\n",
+       "Osy95QPWo/T7VderdPP2u8rjUNXmKhWQ3Z6xQ878ALi1ktqhmF40tFLQAeQ2UUtQKi9iKx/Es9Z6\n",
+       "SO18YYDf/NMR6+w2aA4oCjqoxMFNA7zMw8UnFzm1mscwyXjptVf4yX/102xut/nuR97HvffcS9iq\n",
+       "c2NjjYXjx+gvLLAvd3jL2fN85cXn8Xb3aWY5F07VacxMsdvr4ePxgNsg76d8qbdGNu3DIGI293n2\n",
+       "+ZeRJDz14IPEvQGNsE7c22UY9xCBT6Kg5riQaSsfPwyLtGMecZ6xdO4cO+027/vod7Nw7DinpAuO\n",
+       "C7LIAlQETVK5CXGgo0PmuSKv5WRphkTQcmbwoghyCOemEAKaeY3F5RlOn1gu7PJdwkBnhUryVKcM\n",
+       "zDPOnVxkd2dXe2lKhySeYzgY0m63kYHDrfUhJ5dPM7+0yMVLl/A9l932Pl7g88zF18nrISvTU8zN\n",
+       "zxexVXKE47JUXyTNqtfA1VeusPra08zNzXPu/L1Eucvmzj7DtV3IM6RQ7OzdZnt/iuvXrvLUQ29h\n",
+       "Srls7/bZHQy5dGOVfpIwM7/IVCOlGWYszs9zbPkMTnOf5aUV1q6v029HJIOEmalpHn7wIU6eXCFT\n",
+       "OX/8zLM89+xzSNXXMf9dVx90IlC5YH7+ONMzS2SpZPP2PrOzC8ggYxgNcBzB5u1VfWYy7KJ6baTK\n",
+       "cD0JHiRxtfepKX8aHPjvCSEy4N8opX4GWFZKbRS/bwDV22ZRqmyw7WIDxVGAATooTWUjK7hmG6Cq\n",
+       "dLnlusvXqrj5qt/M+4/KsDFJ+jiKmz6qmN38qI2o/J7y96MkgLHDRyFGOUEn9d+Ucl5F8ymEKGKL\n",
+       "TZbC7D64spxrUIAz3jatHtO7vy2JlSUtfb+pN9BBkByJyHIyElSu4z3fWlvnp/7lv2bYz3nsgUc5\n",
+       "efoU++19BsmALI4JazVu3Vrl1MlTrJw9izsc4mxvQ6fD7e422WCAX6+zENZ58Phb+M6G4rnP/jr7\n",
+       "wwiVurhugJcLciEZRDHNVkvbpqcpzVYDJQWpEAwGAxwhCFyPJM0J6i7S8YiiiHvvf4CPPPAgvTim\n",
+       "lybaA1KafJrag9CMgU6Y4lR7yxaskk5Fl44sqJJEx9kxDnJK6YQDgePTbNRHY3zqxApJkozNxXA4\n",
+       "JIpiBj1t7+0HPo3Qp9fvs9du02w1yPKMjfV1er0es3OzLC8vsrx8rIiVHVMP6pW0UXMljqyTZpLN\n",
+       "jTbddsq1a5s6JC01Br0OG7e2mWnN0mousLm+wcb6Oi++8Dz9XCDCBo50GXR7LM4vMDU9zQMPPshs\n",
+       "q4U330VkDjeyjFdee42Z+jT7u/sszM/j+R61sEa93mRp+TiDZJdOp0PYahX5anU00cz36CQxt25s\n",
+       "cPbsfVy9eoMba9d52xNvI+oPWDh+FiFymnmC5wlQKRsbq0y1GrQ77YlrAv7kAP4epdSaEGIR+JwQ\n",
+       "4lX7R6WUEgcBu0vlHwLwc7/wEo8/+jCPP/ZwpYeetLjuMkjah32a46rWF5XBQghxWF/OYf2ufd1+\n",
+       "Z/mzDGjltsKBWV0ZwKoOAb+dMonjt61e7rQJTIq3YvehPNY2AJZVQUd9L28A5T5UjV/VnNj32ht/\n",
+       "WY1TNS6TpKh8qF3SHaGQbgak5EIQ1pr80x//CZzM48PveS/kDr1Bh0G/y+0r6yzOL3Dl+Rc4ceYs\n",
+       "G2+8wa+89Aqzgc+9y4vce+EcA+8ke902WZoRN2ZoNGf4K4/+Jd5Yv8nvfeNr9J2cnowRiYPnaa9D\n",
+       "13URmSAIaihS7fhSxARxXFcHL0MwTDPCUOKHdaZnZxGOQy2s6QPQVGHsiJ3igPdgc8sLVVI2NhZw\n",
+       "YANtxtjYNodheJh4hI5rnee5jldSWDoZUz5D70EQaLO7KUAIojhm6vw9CEenIFNoO/7z507pFG4C\n",
+       "7RbvSG2dkysGRTyccvnkd3+AWi0gqIU8/8LLbKxuM+wNEAj6gz6tRot+Z8DG6ib33XsPi8eX6cYR\n",
+       "F/p9UsejM4hIENze3GFz7Ra3b10n6u6ztbXJ2QenmZmaY3tjn8B1dJiD6Vm++c1v8OUvf4lTp09T\n",
+       "q9W5cuUKM8dnmVpYpt3rcfv6Lfr9AWfOnGWoMtqdHVQItTmPxXyWrurhN0OE7/Dp//jbKKV4/3d8\n",
+       "B889+y1EnrKxeovp6WkGg2q9vyl/IgBXSq0Vn5tCiE8Dbwc2hBDHlFLrQojjwO3qp/8hAH/z+3/1\n",
+       "EJdnihDac+tOnJ0pd3I7rQKk8d+rzdzK7v7lOu4GdO1FY0DM2K9XtXESCFb1qepaWW1zVJk0rmVg\n",
+       "LXtnlgGxav5M+4/qQ3mDLF8z77PbMQmEy/eamDdwOF5MuW1BOo2QMUoOyMWARCVkyiONY/7HH/tf\n",
+       "+N1f/xxhHlL3AtZ33mBj7RZ112O4t0WrNYszHCKkw8MPP8Dl116j5wieuXaZpC558IELvOWhR3j5\n",
+       "1Ve4vL7Oyl6dv/HO9/P0F36PYRiQ+D7z9Ra3Vm+xNlXjsfNn6Gz0SZJYx69WDsM0xfUChONr1QWK\n",
+       "JI5wVIO/8w/+PoM4IUoTpOuCEjiOPiuQSO3WWFIxlmnMHovMGitThsPh6ADPPpA3nLfv+4RhOMYg\n",
+       "2TST5zlxf0iWZ8RRrF3vk7g44NMSQ6vmUZtu4hUOPEmWEkXaXnsSCYmsR57FrK3e4o0rLzActlma\n",
+       "rxcHix553uf4sWk6+2u0mveyubvDqxdfYzCMmZlfIghrnDp1lvXbtzl37z10ux0cR7K3PEOc30Yl\n",
+       "MVNhSJ8eT3/ja7ztsbeytblJr98lSRMeevhRWq0WTbfJZ3/rc3z8E59gemWaer2uJSZHstZZY6rm\n",
+       "88df+yIPP/wQZ88ss7p6hYuvX+Sd73gbYdhASo/pmTl8P+Dc+UdoNJoIKfnMp//DxLXznwzgQog6\n",
+       "4CilOkKIBvAR4H8CfgP4G8D/Wnz+2lH12EGerLqBQm8rnUNAUtEWff0Oqgjz3b5m/yvfO0l9YNdb\n",
+       "rsf+rVzKruU2h2LXbW8Yh/pY0beqUuVpejcbzaRnbJAuO02V23fUWJfvN/+MSqv8jPk0dGJvGpP6\n",
+       "Zb8/rYimaPfFbpeLhxA5uSN1ICzpkqsanjPDa6+8TOi2iHf26bZX6WVbNFyYrvsk/SFxb5+1W9eQ\n",
+       "jSbPv/YSjWaT2zu3adZ8ht0O6y9d4Y3nLvKdH/su1HAAt/ap73f5x9//d/mBn/3nxG5I3wsI61oV\n",
+       "Mxs4tNyc6WaLwbCLUwuIen2UI0mLlF5hvUatEdKYnWW30yGoNyDPdEhStPfjKIMVBxY7piilxmjS\n",
+       "HqeyJ69SOo62WatGPw2MMSZJkoykv/J8SylxPR01sdkMSRKt3zXRLXNlg702LU2ShNiL0flHq89l\n",
+       "Al8Sp7vMzHh88ANPkWWKTqdLt9dje3ub4aDPcNjH8+eYmXZRLpy59yxJnNLvR2xubvP53/0Mx06c\n",
+       "YO3GFVKVce/5e6jVQ86dOE+/G7GXdjl3+hRnjp8iSWJ6vQ6d7j7LS0s6CXGWcfP1S5xdWuG1Z18k\n",
+       "iVNmZmbxPJed3S1m56eZnmny8LmzLE81cPJ9Vs6v8NiF03Q7faamZ8lyuPhih5mZFp32NrfeuPJn\n",
+       "6sizDHy6mCQX+HdKqc8KIZ4GfkkI8TcpzAiPqsT2UJwEUFXqiTIYAqMgRuVSZWFSdXBXfpe90CeB\n",
+       "XxnIDOGWB94GvjKHXAb+SklkwvhMmuA7WdWU6y//XrV5VT1f3nyr5suxzKqqxtEAiQ3IVaqlsgni\n",
+       "JPVVuW9Vm1GVdOI4CuUqlMhJlCBRHtJp8tWvvIhSIYEbsrl7hbSzhROk1DwXmcY4ZMRJn9PHL9AX\n",
+       "LutvXOFd99/HVL3OqWPH+Oq//3Xm77mXP/7Nz/L801/nHe9+kkdnV3jm5Rd58p3v4i889W5+88qL\n",
+       "DLKI5vQU6zeuMRhGrKwskmd9/tsf/RGWVk5wY22Nq29c5+rFS9q6JerjhCGPPfkkMwuL7LX3kY6L\n",
+       "4CBwks6EnmrLBuewpKu9AydbVZnxEkKMEoSPdOilWEBVND+yiy7uGxZqlrCmTetc1yWN49F6MOcb\n",
+       "QupnW63W6PkgqI5GuLQ8R5bX2d/rIBxBHCc0ajXIUpJ6nXtOnyIIfFzXxQ9cEuXSmp4GBSuez/3n\n",
+       "7yXPn6LdbePWfKI4Yre9z3QrZHluHjHn8eVrX+XSxWsszC9xzz33gDhGLdQhhtv7ezQbdZo1l2PL\n",
+       "x5menqXTHTA9M8fW1jaLnUVcD5577hkuXnqVOI1w0oR+b0AQBNTqDfbbPVqtGRzP52Z3j3anx3ve\n",
+       "8z6CIODnKntdzM23q3P90yhCFGmegS/87q+Ufxv725XeoQVdBsnRgpSHOW0j4h0FZFWgbd9bxWna\n",
+       "dd4JRMrvKNdVBqoqjnHSJnI36pWyBFFubxWY2WqJ8gZbVlfcaXMrSzX/qf0w91VZ2EwqZfA2bans\n",
+       "c+qQyYSg7jJIE7zaLF/6oxe48cYui60lrr/8PLvXX0WmbaYCncUm8F260YDm4iJzp04TLh3DrTUQ\n",
+       "SrAyv8jVV1/jEXeKG6+9xiNPPMoL117jxuYNHj57nreu3MMffPbzZPMzfObV53m5s0VrapatW2uc\n",
+       "Xp7lkXtP8cB9Z/nE93ycmJxcOgjh4EsX33GI0oiEHCkFaabt3Z0isYfJFgX6HKmwtTk0XneiV/ue\n",
+       "surqqOer5kWhdKZ5pUCpkRetNiMrIMGirZwilDSicM6Ct7/3Y4fe8/U/+gye8EdOXY7jkaYpcRRr\n",
+       "BhGlowGiY58nriJLUu1Cn2SFrbc2Ye0P+mTkOJ5Lb9Aj8ASeEyBwSWN9YD411eTcvedACLZ39njx\n",
+       "xVfo9oak7oBOr8/M1Dyu66OUQz1s0ul1iaIBrakGjWZINBwQxtpRaGdvl909HUOmN4i4duMGrelp\n",
+       "Ot2uTvxQq/Orv/yrKLMjl8qb7olpOK9Juz9qnEhs0awqsFPZXM9wAebvsvhfJToeasIEwLGBpMrZ\n",
+       "p3z/JO7Gbqv9vjIA28/czcIpg5ztvDTJCalcR9VGZb//TrFjyvVW/W1fq2qDzb3neY7njWdmsVUr\n",
+       "VaUcL8YETiuXVAxxHYf99gApa/zGr/0WjpzBzV3euHSJdNhjfmGanY1t+j1JrebRH0aEzQYLx5a4\n",
+       "fOsaj58+RSZgY20DN4XNjU3+eO8yywvzvHjtdVbOnuT0w/fyjWee5qkPvJ+/9tjf41/+xL/g1Mw8\n",
+       "nUCy2Ys4vnKSPO2zuHycj3zsu3F8D5FnKKGzl0dZqoNGCYWSaJWJMIfyAjgI/AVaMhVCGJ7prs5G\n",
+       "qjbaqg3cnqfynJSfEwh0+sTiGVGkEyz+p/Fbe0SiFMLVQcr05gNiwvwO4pjU5CUT4OUKkHieOwoQ\n",
+       "RWE2iu8z3azpiIF5jkpzslQHrMpVznTWIk5j0iylUdepCdMkx3FqZKleN2FYY39/hxwYDobMz83Q\n",
+       "amXE7HNy5RhSeqSpIooSWq0QoTIiR5AlOc1ak3rQoCECNjc3mZ5f4dQ9D9KPBrQ7XSKhbcjdRota\n",
+       "PWRvf//IeXrTARzGrTvKXKnKjtZ7j4GcOqyqsDl1W1dXZXlhxEQYTw4x6d5JLv13S9A2IJYXxFEL\n",
+       "yH7HUaqgKuA0/Sqbb9r1C3HYzNIeu6PUIVWlPG5V5Shdn71ZlqWIu+EGjQWQsbgobwCjDcpRpEnG\n",
+       "4uwxnvnmizSoEQYNXnr9Vbp7ezR8RVD3mFtcYv3mLo7ULu5nzp5jp9vFRbK1cZv7HnqM22u3CRt1\n",
+       "wqkptrdX6e9HLB07Rj+NOdFa4m/90A+zvrfL7c1VPv7X/yoyrPGf/YMfhtYsflij29njuedf5O/8\n",
+       "8A/S6e1r7z4pdYyNXCFzQIpCq30YTMtezXcDutZAohljfQhqXS6eHdWi682rGZwRJz2aWx04zKTR\n",
+       "G/l3SHEQnEwexLlRRTtQRdjtCRuP5/k4JAUjr8iy2DRP04SjzVeVvkCvnWj/Gim1dY4jka7AVSCd\n",
+       "kIbQERQdR2dkyjJQSkeqNPb0cRKT5Sl5njI9VSeOU8i0Z2WSZITNBgM5xFE5swtz7Lfb+H6ISKHb\n",
+       "HZDPNPBrTfb299jtrKEEDJMhcwtLOK5DphSe73Pi5Gl+efJMvfkAbttH2+BbFdDJ5rKMLs4UpRS+\n",
+       "61eCozsSKw8TcBkUq6xQqjiP8vU7AZRdbCAsu69Xcbl2e8t/3wn47DIp9Gy5zeYQyu6PvRF+O32t\n",
+       "uvcoDrzqWnnjLVuTVB2amWIkj3J/q1QEwnFpei0+/7tf4NXnLkLi0KjvUMt6uCEk8YAsqSFFE3fe\n",
+       "YW/Q56Hz52l3BmRJSkvWyLsRrVrI4tw8wyylNj3FFgnJoE/naof16zfYvnmbpeUTXHjoIT6/8Ucc\n",
+       "m5+h4Yd8/1/7z/l3v/1Zev0BQS1EKfjSl7/Mw489pPNXolAi0xldshyVC0zuVN3/g/gz43OvKKLF\n",
+       "VKrQyiVXapToFw5oMS3F1cc4SFWcPZXpebRuhdkU1GhHUEVyYaW0XfmoDil18gMERx3lOVI7zGjG\n",
+       "RAP/qHoh9AFooUASQuIkRVC5XAeoylXB/ee5zvOJJItTUJC7OZ4XkGUK1/VwXB3qN/A8pPQQClyn\n",
+       "RpJkMIzxPI8oGuqEzIEOXSscl5o7TZYphoNE56aN20zVJTWvxTCKcDwP6c7R6fZotppI16PT6ejU\n",
+       "dEeUNx3AbTvUsm4VDjjwqoOTcuyPqljbZTG8zJWXAeYoVYa5difucxKQ3O29R1nl2CobqAbwO4m/\n",
+       "R90/SbVk3lk1JncC8qPAwpTyIab9bFllc9RBcVXfzMZVNjcsz70Skjeu3ODLf/BVziyeZnZmmsuX\n",
+       "LtLrt5lfmMV1BYNBTM2v48/VWaifYJjGiCSl7gbEaU7aHRC3e9y8cRMZ+PSjIe39bfJej7pfJx4q\n",
+       "Bjd3WH9jjb/1j/4H3vOhD3FzY53XX3qVxx55lM9+5Rl2ox5imHHm3L3s7bfxvIBhMtCJtNHcqpTa\n",
+       "AQlDj0XIXARkKrP6qdUsUkgccdhppzKcMeigXhX3ltVpRp89qZh16bquBkTDRNhryrEsZex6RW4i\n",
+       "aGtz4gnv0PFXQpIs17FIlBq7N5dFmIaiS0GR7ENISS6Uzu+Z5zp6IBJXCIQXIBEM8ggpnFEskyxL\n",
+       "yVVCmuqEyCrNkWIISuI6ddIsA88jc8DxAzKlQxS3pqfJUpiRLmmmkEnnYNyKoFqDYcRic07nga15\n",
+       "zPgzpBXSjV3edACH8cBOQui5NUl5ZTHxqcrJkoPMFSMRryAgRwoyEYwGRYiDe1SuRocgchRTg+K+\n",
+       "MlnYqhmzwE21xt3brkMwXoXt1VlwGpjnlXWPrjfLxtMpmfukPGhHGXAmnQOUgW8SUI1aOgL1gz6a\n",
+       "tuj36u/6U3M1smIVGc+9Q0WOe7getYkoVd4w1IgGDpaubkt58z26uFBkH9f0YMRgfSClQUmCkNRE\n",
+       "iy//zi9zfGqZWliHuiTP2zToISOJrE+xN0yYwef0/fezON3g4tNfo+VJVNQn7Q/pbm9x5emnqQ9j\n",
+       "4r02c4FHJxPMhC3qjkdreZqkOyTe3+bf/Pg/4b/47/8bGkFAf2ONjWGfpePzzKaz3Lp5iUcef4S3\n",
+       "PvEWsjzDx9GxMYo+6HEt5qvgXJUBLnEwxkqh084JRSaqzfAmSXqj68o4R4kDPbowduW67qpSxYGP\n",
+       "zXDR9hGzxjitj77faYaFMzKTzJVWz0hLFaNJV4d6zZUiLtKgKXOsK8BRFJJERpYXK1cIvCLbjuNp\n",
+       "BkPH5wmK9ZJrBBUCgSROEozGPo5TKPT5UuqQv0rpjVEIgYcOaeC4LqBwfYfpcAqloGHhx53OK950\n",
+       "AB9XG5irBwt+XGzTP+Wq2PmkRBTP5UqSF3altrgthNaxoTQgZCYdkusipBiJnLbId0DQRk1hA7Ju\n",
+       "n5EIzPtGLZ8AllVR+A7eeXhcMiuDteZO85EdbNk5ogqY7X/2u8pjj2UrbFdj5uMAxIsNlcPSEiP7\n",
+       "hvFiw/pRsWBAp6MaX6oHG2T52iTn3qqN4cB0WKepMplfHM9BSgeUJE0zPNfjl/7tL7K5tsWplbN0\n",
+       "+30uXX2VmcDFyQQq6iNqoXZjbzY4u3Scy6+8wM6tW+ROhp/HOtpj7ugATZ5PmqYErk8uHTr7Heqt\n",
+       "FoHv8tgTjyH2E67v7fDFX/k13vXBD1Hb6+IEglbg0c6GrBxf5ud/4VN87yf/H7a2buN7PkqJIo6X\n",
+       "IlcH6g2tILFGqawmOoKJq3LEGqkSx9ZCUc8EVZ55rymTrLTG5qjgGIS5tyT9jZ6Z3HzzNlxHe5se\n",
+       "Uh0ZLFAKJXWatBHVmvpzdZgmhebSpaV2Au2lqvXs9pmQjvMtOIjz73vuqA/GlNJeL2mOjjhZJERO\n",
+       "kuRQEpS7KX8uANx0unK3GQEHo8GQUhY7lxbJVJZDlusAMJglrzktiUCKg7qVVAdxo61JK6dHs0X2\n",
+       "srODLcLbAGmuVRH1pAmp4qbLnLR2RjlssneU7r28GVWpRPT1ymZNbIsjDvdlUt9sfeadrFWkpABn\n",
+       "eyOtvrfqdZP6pxWfEi096dCd5PrgLYoifD8kiTJuXr/G5q0t5ldOsB8N8HJwewn9tMvsTEiaJMR7\n",
+       "bd75+HupL53k5osvsP76Szi9rk6c7DnU/IA8h86wzdLyWXZu95ibn2JqeV7TbpyyvbrOpV7EW08/\n",
+       "wIlwilR4bL78CnNuwKf/319hb3aG65097rvvNE8+8VaG0YBGIyRNCzWQcFBCx3mRQo5c3ieBZPla\n",
+       "+b5JYRCq5kuV5rM8/vb6qUpoPUnNaX6zP7+dYoeMMN9tyzN7Q7DXxaQ1Y0sM5no5F2wVdlSpdu0+\n",
+       "jvuBaM9V87vv+4eeuxsQf9MB3Oi0D4BKjDl+SClH4G0G1uRcHA1moUOLRuEcPaSQOu2R0oHvXc8d\n",
+       "uQHr7NKHPf9gHAjK74QDr7Esy6jVamOTDhN0itZkVwGqed4EhbL1/Qe/54ZhGSuTwNRwqXfa0fUY\n",
+       "H140VeoXIQSo/BARTyQ0Ne6uXq6z3I6JIFxx790Wx5VoFC/mReivKoepVov9vQ7LS8f5rd/8HVqN\n",
+       "GU7fd4E0V3z51z9DI0rwXYe9TptGLcRPMuL2HsH8Ap3ObZKkS5L2iVVOq9YgyxJ8xyPudfCUYtjp\n",
+       "MBwMCMIai6fOMLy1xvrmGmoY85XVXfzmFCeWn+TJj34XshvxK7/8y5AkZPGQ1ZvX+dTP/wybt1fx\n",
+       "fG2VoOVxUSipbYnwMNNRRXNVpeypWqYXW11lH26b56rCQZTpoVx/+XfzDlOPce67m5DG9nPla1VO\n",
+       "gmZzKdPbgaSrDtHrSCIp9eGwerN6rdlmrqavWabGNh6bITT/7qb/bzqAm1JWSYwIJ1cjO9axf1IW\n",
+       "zFVBQApc5+CkPc/TEUh7ng+Ig8hqgMptfXq1Q41pl/ksLwTjxWZbzlQB6qTd3uzctmkfaIJOkmSi\n",
+       "l1yZ8EzRGgdDSOrQYqlayFpyrdJJH+bgASQHhHsnIJ1E9FXPZVla3FP+ZfLmcjdFb2TmH6B0clnX\n",
+       "den3IqZbszz79HPs3N7lsYffwRDFzu4u8wvzyK0d8rxHEmX0sgHZIOPi668RDnrMLDQZxjPsZG3y\n",
+       "PGOQpzTDBmmsEElG1O5Rkx797Q61xjSXXnyFJ0+f48l3v5MchTvI8MMGm7M1Xu5skbQ7vP8vfZKL\n",
+       "G+u8/Lu/Ra2WE0cDPN8hTiJkEf9EqUKCMKrE/PC8lufOHrND81lB11WbvhBiLMRDlfXU3c5RFcBN\n",
+       "Wn/fbrGfsxlBu+4q9Y4dq8j0y1g82b4j5XEcp+tx5zdzb5lZFEJY2bEO2j0pPeJR5U0H8DKh2YAI\n",
+       "4EhXuwMrk1uyiPEgpT6EEIIMLSC7KsM4LDlCjmI3SKkD4ShRCNJCFIluD95pE3aZcI3Lr53D0hZ5\n",
+       "bCIog2zVbm6+G7WR/duo35aDkz1WVd/tTa0sxt2JqxUCJjh5jb2rvKHZ/ZpkwpdanE65vup2VIF8\n",
+       "1b1HR520ix4HC8CLEkUJjbBFliie+eazLMwucm1zg6npaW6+cR2ZZcwvzpNFLk7ssLm9hZAuYdOn\n",
+       "2ajhCWjWQ9ajBKEy3BwcJ0U5PrudNq0kpjYzT601xcyZJdxUsbm5h9jY5sJDF5jxGkTDBDXX4vz9\n",
+       "DzDYa7P1lVfYWl8niYf8vb/7I/Q6ezi+g3Qc7XKeUdBtAQSFMXWZTsrqr/L5y1Hc+STJy9CyzR2b\n",
+       "v20p2qaTO20sNt2XwfPbKTbI2uv3qHVX3nRstahpY1U8F5vuy/cb56kynoxivRSaA3Pdju9j1tGd\n",
+       "Yp+Uy5sO4GVAMAMHxSFmkUxUKcMxF2fFWabtYiVI1yNOUxCSPM/wfZ8kSnCVsWYpgh8VAG64XcHk\n",
+       "E3jzfvOv7L1nBt9ub1UpH3RWAa+9+yulRhH0qriH8kKxo8LZIq59EGzy8ZXH2zznOJPDwZpysMBM\n",
+       "/WaxGZ101aLTmXSMNYm2DDq8YY2LkOObX5XFifbmuztHovKBp1IKhIMjIUlSnv7aM2xvbfPIg4+y\n",
+       "G7h0b++Q7u9T91w62ZDZqWnoCpZP1ulLaCweY2t7m2lPcmxhgc1wmt7eLp0oxvHquIHHwqlTH4X4\n",
+       "VAAAIABJREFUnHzofkRrhtW9PdyFRZz5bfrbbTb391n7ytc4MbPIUw+/jdBpIndjFoNpHjpxL//h\n",
+       "N3+Nj3z4gzz5xJO0u9uAZhYUAldIhHJ0QguhtDkh1WBcpjchDoeUMBKPuaSfN+B6+NDdpk1z3T54\n",
+       "g8Pr+fB8HKaTSc/ciSbLpcz9VgEwHEQGtZ8re3ErpVWaJpSuKWYd2MBtNjAbew/GXzMLZmydIrem\n",
+       "aZ9db9VY3KnfbzqAw2Gu21yDg7jIucoQonBUEJDnurOvX7nK+QsP4IV1hr09wqCG43oIHLI4pl4L\n",
+       "iaNIm18JgWPEoSzTh5kwpnsvv7983S6GGzGfNgdRHnhDCJMI2OYKyty3mWzb9tsQkB0u1SasMhdh\n",
+       "NkM7kt8BaB7mtsrzMVr8+TgXZX6r6pexJS6Piz0+BxvDeEzvqnI0qFePrSOcIsiZBishtC21EhIH\n",
+       "ybeefgaV5Ozv7NE6f4LN7U0W6iGpShnkOVudfQIlSF2Xex9/CyfOneX5P/omg51N9rp9GrMLRInC\n",
+       "JcdvtJiZnyNyHQbDIY1Zl0Gc0hvGpK5DX+U4mSIj5+b+NvNrN3ni1CnmgibdqM/U8hIf+PCHePy7\n",
+       "3kFq0nRlCVmWEdTqWlJSFBnTM6SOElIJXGXuU4/NuIhfHtuDMVQjMLJpqTyPNvNg00SVFFs1h6Zu\n",
+       "2xfEnsc7mdCN5thxKttX7qsN4FXMYhUN2XXZ0UOr2jZ+ZlW9qd2p2GtpUsgLu7zpAD5pok1JUw3c\n",
+       "JomolALpODpnHYKvf/Ob/O//18/yvu94Px/+wPtRSUamJKQ5Nb9GFCd6UGRBaMKYorljkdXMoFc5\n",
+       "h5QBzQZau91VwFTFedt/mw3CJraqE+841oF5HOlgLGgMKIvCgUMD0+FgTVWLzW5D2UOzvBhGwAu4\n",
+       "zgEHdpT0AiCdcWcju66q9lWNsV234RjL95QXo13yvLDdHQkmApSObPf6Sxfpd3usLJ9ib3uXy+tX\n",
+       "YK/HvB/i+g6uW2cv2iPOFK25RVrzx+j0E1ZOn2XhHY+ztrrK8Qcv8OLT30LGKWm/g+e4iCxnuLXN\n",
+       "4tIJgmFKLVVEvk/Hc/GlIPcVsSt4fecW09cuIa/MM/fAWcR9y5xuP8Ds7DxSKpIUamEdz/fo9nQy\n",
+       "A6E7DiiEPEjYUJ4/ex4POOvDemZ7LA9+0+usPL72c1Xrwn5/GTTL7bKlzPJcV62Vo7jRKvC1N50y\n",
+       "U2NbfxyWSMbxqMycmf5WeTVXMUP2OyZhgV3Kjo1/7gHccKY2yMDBZOvdFRDa8F0pvSDTPMcL63hB\n",
+       "jUcfexw3CPjZT/1bTpxY4WMf/DCzUy2yJLEmtxgQtHeXw4Hnn+M4I+60igu33bXhYGBNTI2qCSrv\n",
+       "7jbnWxbBzKf9vjKAayeawnYZCwwZXyjmmgFJ0w4TyMpuj02Upt13Wpxlk6lJxAxGRzt+IFzmiA7+\n",
+       "FqO8pko/BMJYkhdjIIwK5O4PuFSeg3kPWoIQ5ORZxhe+8EXmZ+fZ2dpmbmae3qXr+Ar2HJewHqJc\n",
+       "j0bYoJcLls7dQzTUqa6aYUhDZaSu5NiZM1y+9Aa7N2/i5Tm99j5B4JMPerRch+OzU4SejwhrtH0J\n",
+       "/T7dQZ99V9HPXL7+hV1wBBdaHnK6TubCseMr7O5vkgkdaCnLE3w/0Ic4QhWO5ZI8V+TqsPRRtY6q\n",
+       "1Ff2Rlqe+zKw2nUZkLElwkmgVMXpGyalDMxlo4K7PccZ7+f45lXefEybKiXGCsag3G4bl8q0XAW2\n",
+       "RzGFVcXO3FVl4XPo/iN//f+hJEkydlpcLlp3R7FotQ13lCbMzMzQHgxoNlsMkj3mFhaZX1jg6pUr\n",
+       "/POf/Ek+8ZGP8s4nnsSVLipLQWkurEonWgYY/V5x6Lo9EVLKUUB6m/DKYpS9SVQBvH2fLRGYUl4c\n",
+       "ZqMxC8BOczUiXilwnPHA+2bDqeIKytKAAeXypqUbdHjBTeKOVJ4dWlD25mH3zXU8cs1e2jWMAc2o\n",
+       "DlktalfRj9I+eCMvvVwpXOHy/AsvsHrzFvedvQ+n6dLdbzOfCYak4Ag6m1soJMzN462cwJuaQsWC\n",
+       "tJ8xd/Y4O6tXeeFbzzLthJxcPs5g4zZOntLp7NGcWsaru0RqSDcd0FntEEU9tjZXCXf2yQPBMIQW\n",
+       "TZr9jPiFi+T3nEaeWeZdb38Hzz77LCdPHdfZzsOATr+L5wXFWaz2KpVkIF2EPCwFVnG9qjinqALG\n",
+       "MqBoCXV8HMv1lzdxQyM2yI/mwKI1Q1Ou6x6SuAydmn9Vc1xVjHVMFedunrV/t/Xd9mZXtYFVlUkb\n",
+       "XPm8yx7nqo2wqkzadCb2/a7u+jMseXFAeTg4lf50RU6SK7IsRaQZwhW4vk+838XzHOZOL3Nte4NF\n",
+       "xyNWirddeJDH7rufF196iYuXL/HdH/0oszNTuFIisgzPdVFZhkwTHWs8zw/iIDhuMXiF7XAJzG39\n",
+       "l616sYuJZmieq+ImyvkxyxNW5bVp/jbvtrl/+58tLkrHwXEKLlzIMdWMAUezqE0fy9EYy0CvMlV4\n",
+       "tjLKBF/0tuif3d7DXH6Zsx8Re1at11boOB+i0AKoPAc13i67veVxk0LTmJkD1wkYDHI+//tf5dix\n",
+       "M3T3u8yEITs7azRqIcP+PkIlOG5Kmgv225ucPHeSXnuPYXvA6ZPLiHjA9uuXCfpDnvmDL/Lxj3+C\n",
+       "7dkWWU+QuDAcpuwPN1HBa7TjhKg9JPA8nEyiHJ/Q9UjiAUoNiRBsb23Q39pFztap12fYuLnKsXMn\n",
+       "kKmEOMULA1IBfgZu7hSxPbRllfHYtTe+w+Bq5vtgfMu638P0dng+yrGH7PvLtHonRsZmQspgZRgJ\n",
+       "u85JUp75zZZeJwFgeZ2Ux8p+v93HSYBeBvAqXbfNgNgbWBWXX9WeP/cqFKNrS9Nxbtb8y+KE2NXi\n",
+       "eOC4IPVid2MQjsOJc6f53B9+kSBKCOshe+02meOwfOwYrdkZfuYXfp53v+tdfOzDH6K/38ZD4joO\n",
+       "UmW4ElJl4rSJIp6EfpfKM+3lae2ITmHOZbgHY6cNk/Vw9nebAy2LocAY52E2CLOxZVlOmqRjdYGW\n",
+       "YMxYmXbZC9uWONIsIU3TAqQVjmO4/YPxNu8ui8eakMToHbbOetyc0gJlddiGdhJBpmk69n4hbJv+\n",
+       "A24sVzlSjUtKpg1VqgMvcEgGMfPzc+zv90jijI2NXbLMAxnQrAf0dm4TdXZIlCSs1SCP6OUx3Thm\n",
+       "evk4ocq5dfkinW6fhg+b66ska7dZnpmiGw956YVnaDZrrG9v0d3rkiW7uJ5HLagzMzfN+nCA4/sE\n",
+       "jSZpCo2ZJtMupHGffjciDSVho0Hmh/huDTfJkdLBlx5kEcqDBB2F0M0dFIKssMgyslq532Ua1ON/\n",
+       "WG1XxWXqKg5z8nY8eXutVpXDXP2B6mTS/WVp8yjpzi5VAfGOapttymeesdtpX7P7XN4Yy/XbNGn3\n",
+       "uSqtX5mJK/fFPPvnXoVih3oti2BpmiIzSQq4jlPoMnOyPCPLBVkmmZ+bI41iBklEy51leWWaoF5n\n",
+       "JcvoRUO+6zs/xMsvvciP//FP8Je/53u4cP48/V6XmuMSFZk4hCNJskSf8Ls6IplwHLC4CBuc4yIF\n",
+       "lK3qMGaGBnDLE+z7PnD4hNyUqskcEycRI/23XezY1gfjOKp1BKiGGMIwtLggoyYZ54ryXGcYN+UA\n",
+       "VBl5ixqwrEptV+5DuR67b+ZveyxtEdR+l/1bFYCb6/Zm2m53qddrXL9+i3rYIqg1eOZbX+SRtzzG\n",
+       "xvVbiDTm5toqLgIPIIckzag3ppH1nBOnzrHT7dPf77E8P8/WjRtcv3yRRpaS5gluvcatmzeZm52l\n",
+       "02lDnhEPhkgp6e7vsbC8gNOqIadq1NNZOmmMErC8vEB/dxeRuSSdLs995eu89fgnyNOMzRu3uP7M\n",
+       "i5x/5AKDXOAlCuE55K6OrOcoiSpMP0Wpv+WxsT+rD3kPb6h6bg67optSPguqKmWp9SjVgKnfMAZl\n",
+       "ACzPebnYa8BsEuaz6n1VMUeqNkCb6TCgX5WsuVqCOfjNvM+WLMr/zL3lM7g7bV5vOoCXD9PK+RNd\n",
+       "PLyiT0Kf2OAKD6EUjpT4SGanprm+vspDC6fZ2eshOwMGcUy92cBzAp564p0olfNLv/pp3vWOd/D+\n",
+       "73gfcRIjAx+hFJAjpQ5Fmed6gwABUh8aZlmKFAfxWgzBmByBtm2srXO23Y3jOB47oLE53qpiE0We\n",
+       "5+RZPgLmMpGVDyvNdXvh6X6lY1yQlA6OMx4r3TxjmyeOqYKycf293cbyYre5F3tOq9QqNriP+lzi\n",
+       "esy1KIoOEb7ruofUSgBBGNJud5ibXyJNFH/0h19la3ObcGWKs+fOsn71Cs2pOYbtXbKoT5Sn5Cg6\n",
+       "7R5Ti0t4fh03GjDTlAS5Yu3WDfw4xpECT0rCICDOM+0yH/jkmUKGNYLAJxoMtbouz0mjIa4jieNI\n",
+       "JyAWCrKYwc4eapBxK0lZ//2Ad7/rvXzsQx/hH//P/4if+IX/g73bXRrCwUmh70EsFH6Wax8IFKjD\n",
+       "m2d5Lqt+K9NZifoqrZXsgzWbYz4qi5VN8zBZFVJFE/bnUUBmgN98t9U05XVgt8GmLVsisf/ZDFkV\n",
+       "01C+ZtPemORYklhtxqcsAdxJbWKXNx3Ay/qlcmekdMApdjyJzlKvBEIq4jzDSSXvfce7uPLsS+x3\n",
+       "O+i4vC5T9YB6rU6qcnb3dojTiPe99/088+zT3NpY45Pf873UfB+Rp3hCO0nk8RCvkAgQLsJxQcgi\n",
+       "CH1JF6zU6FDQcN9Zlo08Nm0O1XXdkW7Z5kQmiXD2Qjnw1kSnfcoPsgvZxfxtOOSy+AoHcVzMPQb8\n",
+       "bXHREJmRMsptlGKcUy4Tvf3eNBtXoZhDX7uMVGUlr01Tl8012gfFZc6prH80i2uYDAnrDQb9iHiY\n",
+       "8fWvPc3p0+fJs5wojlm7fZuw2SRwJWESsrO7Sz/OEbUGzfljbOx06fT6nF5ZQcZDRDQkUBkiF/S7\n",
+       "HbpxHxl4qFxxcvkYvXyPVAe+BjK6e3vIOCNNlTarTHNUBuurt/nwRz/ItYuXIZMM6wHB0jw397c5\n",
+       "c99buHDPfdRrdWr1OjJJ8aQkcRSp0IfUXmE66nCYhsbVJnd3eGYXfd9hicfzvDGuumx/bZeJlklH\n",
+       "cODmPVXgfVTby3Ri00QVZ10lzZXHz/yz1R92X8vWO/a18uFl1fjYDErV73fivE150wG8yvjd/kxV\n",
+       "RlYE6LXDRTqugySnhuTMykm++sUv8eFjx9lvtwl9nb1aJQnJcEjD86kHPsN8yOOPP876xjr/9H/7\n",
+       "F/zFT36Sp972GP39bZw8pRkGJHGkQcI1mTz0ghGIQ5NWBq7yyboRvWydW3libJ2iWRDGTnWcE6U4\n",
+       "Vx2f8LITRFUbQROXIUZ7IdqHKnabTH3lRZpn+RgHU+U1Ouq/Gl+ctvhZFkHNpmcnX7A5FvtsoGph\n",
+       "TBJPHd8hS3J8p8bFN64xMz3Hwtw8eQbXr1yh3+8hpGCq0cRNYdp3YRATzszhNKbYXl1jutVCqYyr\n",
+       "ly8i84QgcInilDzL2NttkwiF7/sszs5RbzToRAnRMMINA3rtDrXZRTY6beZWjjF17Dhud0jdcTh2\n",
+       "4ixnTt+LK31eunmD+9/6BDfWVol9yVueegIR1BkMI2qtFmmni8gh92GIpJYLhIKsAtfKXOy3C+J6\n",
+       "7A7fWwZlm07KxdDYQX1Hq0HK778b9YFdqizIJkkfVYeNVaZ+NqM1JoUWdFo+JyozFZPG2tC2zcHb\n",
+       "jIfdhzuVNx3As8xWn4zrjg04ucgil17RUaWIlcINfESqOD6/iKq5CBVDFpPGCgn4ns/CsWX2ux2k\n",
+       "L/GCOTrDHksLC5x/8HE+81u/yd7uNh9837vxhWLQ72ovt1xnrXaKg00d7F2QkY+BQxAERS8sQE0z\n",
+       "vdkUnLfNDRwG5epT7eFwONKnw+FYDza34VboxQ0HlWVZIWKDdr3WnnsmNIgjC2lBZZUmhoYjHyfQ\n",
+       "HMf1xsTociyMESHnhz0nDdB6njc2JqMwwVaMjUkRI6u48izLSCy7/1FbcoVKBZ6EqxevcmLpOMNu\n",
+       "j7m5OTr7O0iVMegP8YXCczIiIQnn5jj/yGP04pTZNMdTOfvtPeJ4gIpjPN/F83wGaVxsjDE1zyPJ\n",
+       "M8jADwJc16Xd7bCznfPU299Je2OV4w89iKrV6Vy+RR6lPPf8y5w5cYKF6TkePn8/i815Fp88wdVr\n",
+       "V7n/3U9x5dXX8AOPtd0dph0XmYASELsZtVRn2FHu4YM0W8K6E5hUqT90MpFxl/lyKasKyvfa1lj2\n",
+       "PJVprGozNjRzp7aX22PuNcyUTXt2PVWMYlUb7N/KNFfFeBrd+6S22SWxchfYn3c6tCyXNx3AzUBU\n",
+       "6YOF0ElQHYQ2BxMglEAiSIX20vSFQ5xneM06m7fXaTVbJFFEUKvT7/fY398nCGs0vSb7e/t0+12Q\n",
+       "gsSp8YH3fyevv/YKP/VTP81f/6vfx7GlBRwBg16XoFZjGA012LheoYMe32GrCFcIoc3sGOc+kyQZ\n",
+       "4yLt52yOw1a9TDrlNvcY8KzimO1nTP1Znh2qVzEe5tIG2ao5sRei+c0s1ipuxQZUu91mPMqgbXPr\n",
+       "5nvZG87+e4zjL42D67okeUwYhsjc5crrlzl39j4G/T6dnS2kilmca6GGPoNOl66KiJGcOnWO+uwc\n",
+       "ne0dLjz0AKEjeO4rXyJXGX4YEMUJoS/JE4VbC3BiRXNmiihL6bZ7NL2AWhAw7Tp4QYAUkpWVk0RA\n",
+       "5gfs9YfkueLqzVvst9s8+djj1FxJcvUqJ9/6MKnnstbZ5aHT97C9c5soCHVWnjQnT1ISAVma61Rg\n",
+       "jNOIGd8yXZYBzh7bchGiCFnBYYCxQdWmlTIATtLjlhmXsmqjCuTvVIwKblJfj3qfvcnZ9djctl2q\n",
+       "Npwq0J0E5DZQV0klk75PKm86gJvFaB+OmCKEAFdCXpgbOgI3F0glyIqYFkEuyVyHhZMrrK+vs/To\n",
+       "ElEcs7W3SzSMaTSbZCg2t3cZRhGNZpNmM2R30Gdrd5/7z9+HKy/wr3/6/+SHf+hvU/Ncjh9forO3\n",
+       "y/T0DPFwoF3IrR39qEHO8hyVHfaktEGmCqhtsDScZFmEM+NT5upNvZNEx4M2i8qY40bcLYNk2Vuu\n",
+       "3B7Tf8NNV4GD3W/TDpuDMXWYzcgWS219vj1O5qC4TCv2oWkURXrxOTl5pPjKF77A7PQsLz/3AqdP\n",
+       "neT1y6/gOQqnXmOuMUVrbpYb3R2WF45x/oGH2Or2afcHLC3O0964SRIPWVxaoLu7zzDOyAZDcFz6\n",
+       "0YD6zAzzy8fYu72FFwSE9RYzU9N4nsf23i7rr1xm8YHzbO12CB2PIPBx0oyUlD4x17ZWWQlPszTX\n",
+       "YG5uhpnePkQJm9dvUW812Om28Ro+ypE4QhJIiXQhSw9H8SuDThUI2LQ2CSQO0tkdFJu+7M+qOmwa\n",
+       "tdtStY4mcb93w3mb+ybFKyqvh0njY9pkM1fmTKuqXfbGYNN8+d5J16qkgDs9X1XedAA3emNTbKCQ\n",
+       "UpIqEEpL/SZ7dK5yhOviCYkYaDO4k/ec4cVf+n1Onz1LnGU0ZmaYDUKk4+I4LlmaM1vYFUtHcGKx\n",
+       "wfLcLFtbO0RpytueeAe/+uuf4Ym3Pk6j1cILakTRkCQeUvNCFAfWHmWzR3vyHKljthggKZsfGc6y\n",
+       "ilMun5qXJ7zM6ZoihBgzpTIqnYNUaQW3LsdNNvM810lYGSdcA8p2/VXSkSkmznr5ur1YTBvLnL7p\n",
+       "Y3kB2pydabPpv9kwqsDL0M1IN+9miMTjd3/nd3jikbezMDNLb28XJ0sY9Nq4kU9/a5NW2CCrhSwd\n",
+       "W6EWNth+4ybNVp14OOD61SvkSYTr+YT1JnGcMxzso9wM4TosrxxnmCbkjiSoBWzu7pImKcePHydX\n",
+       "is7qOufuu5cocJmuz5AuzTPc3iYnY5D0uXTtNV545VkurF7mexcXaDk+uYBbN64wV28wPT9FnEt6\n",
+       "xAgBXpQxNCqG/M7cXxnEbG61GiS1zXgV81FVf1k9IoQYnbfYqhC7DnOf7VNgz+PdtXO8DVVtq+LA\n",
+       "y4yi3QebnsqcddU7qtQv5WJbo5TbVW7fnTbfcrkjgAshfhb4OHBbKfWW4toc8IvAGeAN4PuUUnvF\n",
+       "b/8d8F8CGfBfK6U+e6d3GCcOOz62GUApHbxMkYqclOLwIIdUpDpBaAbCldxz4T4+c/3n6AwGhPUW\n",
+       "0q/h1Ou4rs+tG2v0Ol2kEDTDOjNT00w3fYTnsPzgA+y0+yweO8Hyyim++KUvENYDzp85CWlEzZck\n",
+       "qVY+2qqAPM9Htt1gHxyBKADEnhzzu0nUUD4AKjseGOI3ZotldQIccKm2+aUuCv01x45/EUXR2GLL\n",
+       "Mp0ntBb6h7h8O/KavTDt9xjCLUsGdvvL+QBtyx17szIWMlWius19Gw58Eidom1QKIYjTPqurq7Tq\n",
+       "DTzHxQ1DLl28hpQpjkrxXZ84iui3E2I8pHS4dWuVsFZjcX6BrVtv0N7ZgSQmynNmZ+cJ69PsuQ5b\n",
+       "7T2mZ2YQjo4XPlVvUm9Ocf3iVXrdHo2pKRCSIJRkIsMJfBJStve36N1ew5GKPB/gpSlODi/94R9w\n",
+       "+fItfvCH/j7LC/O0zl/gc5/+DT7+F7+HPTdlV0XUFTjDhK7McDwXN1OVwGJvkGXJ7G7EdCHGmalx\n",
+       "Gq8GnTJDYn+a32z9vPktCILKM56jDkntYsfpr+5LdRyTcvtsZsGsqSodunmufK5lz8OksZ60GZrP\n",
+       "crjeO5W74cD/b+BfAT9vXftR4HNKqX8mhPiR4u8fFUI8BPwV4CHgBPB7QogLygQXnlDKh3V2yfOM\n",
+       "vLAAqTkurqsgR3MIQO7phKLHgyYzZ46x29nH8wN6gx6ra+vUvTqZdJhZWsRJM2pCsre/y/ZOGyEl\n",
+       "SZbjeDXqrRYqV7znPd/BV7/2NGma8tCD96EcgepHkOdkEpLCu9BxHVSutCec0Lpk5QhQOXbyb3ti\n",
+       "DAAZ7tae5CoO14CffeBRJo4qgpDSLALtrGPu8QO/SOAKQjq4ngCldLSQ0qKypQ2bE7NNAW2QFRwc\n",
+       "5ZZVLvbCt6UM+74yl1Kl0xy9a6SfLa4pkEI7GulolYIsS8hzhe/Xefm5l0mGKc3pKZ575Rt4WYzv\n",
+       "KJpTLYhj3Bz6WUorCHRW+dU1Tpw7S9Lzae9sEkiJH7YQWc6gHyFdj9mVFeR0i6nFBdrDPoFXpx7U\n",
+       "2Vhbx6u5KFJ29rZYOXmCOIl56dZF+psudSck3t6lkUlUliKlR4ADIqeTRWwNOnzqUz/HA6dP810f\n",
+       "/yiPPvVWvGFKPfDxlEdGTi7BdTiI8pJlqAwcxyVV4PguudDR2B3AKYJepYxLjJO4QEOK+pIaqVOU\n",
+       "KvwjzDwITfw6njylokZ16TnW9GmYHnvzLlsl2cVu66QDPu2N7FgUKKw+VKtjqiRcGFf52GpJ+x77\n",
+       "vnGu/YCJw0oEfrBhqNHnJInBvvanwoErpb4khDhbuvxJ4P3F908BX0SD+PcC/14plQBvCCEuAW8H\n",
+       "vjapfpubKotsUGS+EUCuyNMUVXTcKSZ04GYIBHPthO/4yx/mlS9+k/vP3Ut7EDFXb9KUITvRgI2d\n",
+       "DaZdj6nGFMeOz1GbOk0aa3Drdrts3N6k2+2SCcXi8ZOs7nT4g1/4JX7wb/8ArWQPX/x/1L15kG3J\n",
+       "Xd/5yTz7Xeve2t/W7/Xr13u/3qVGEhIGCWQQYMEYGRyYATzYYDtiHOMZ22MHYcfMIHvGwTgmvMDY\n",
+       "4TEYGxASBgzd2AIktNIttaTeu9Vv32qvuvvZT84fp7Ju3lP1WgwxE81kRMWte8655+TJ/OUvv7/v\n",
+       "75e/hFiWS/4d38PKgSSDrEySlFKQCnAdC6cSGSKlPFjyrjnn/badcWxqZZYkCY7jlE64ND2Y7eFo\n",
+       "OsMMsyvvZe70M11oYa4ULX833SXEpB7MZ1RRhvnMqrKvCvhRkQTmJGF+atRfHUD6vFmnrMgRolwP\n",
+       "AKVPBEuW2SdtSZKmZHmC77uganz95Uvcc9cD7PX7hKMhS7aA/Th3OwclbOyay0KjTjrsYecT0tE2\n",
+       "r71wlesXL7PYnKPmN8jyjCRNyaOEuOGwfPYsx4+d4pWXX6EReCTjCaP+AFWkCMcijAf0Bx7pMGJU\n",
+       "ZPjz8/jteTqtBr20T6YEdgY15WAXOZkqmLgOj7zzCd740pcYFCFLD51j7es3sYo6rbkWW8ketutQ\n",
+       "yxRFmiI8F5GV+Qld1ydwfcZJREEBKkfkOTLPUUIcrCw+yvrT8qG/mwpROzR1CoajrKRqMSdr/f0o\n",
+       "6uKPU74RTVHKpnl+OnlM/58NAzxKLnW7mJFN+pxp0RxFUZX3MieYKZKfjqGC6d62YmbS+0bI/Hbl\n",
+       "T8qBLyulNvb/3wCW9/8/xqyyvkGJxG9bqmZ5lQ7QKxh1w+nogqowZFnGkw8/zLO/83ts9TbJUvBE\n",
+       "DVWvsdRZZLnuE9gW/fUNouGQW5tbZacXivn5Be44eRppSYQlGUVjwiRie3ubX/q3v8QPfugDzNUD\n",
+       "JAJH5RBlCEti+y6oshFdVebLTvLsyE4wzTN9rBrTrN9Zhyea3v2jFKBuL01BVBWpybsrpWZCu7ST\n",
+       "UghxsDmz6WCtLqwx38fsO33cVO5aeR9VqnHl+tN0curnHhXXXRQFSFnCw/0BKkTJuTquS5JE5FlG\n",
+       "vVYnzwt++zefYeXOu7BzSbzbw1KKSGX4QmFnOcqywLM4ubrK/LETfP3SBTqdeSgKdtbWII6JVR+3\n",
+       "0cCVDnGeM4xilC1pdjsMoglWELC4vMiFl14iyzJsJRC5IpvE7K1v081dlmwbPy/3tSwWmmyNdvAK\n",
+       "iZ8XCNulkTnc1Vjg6iDnlS9/iYfe8xSNVgdcn54NfhISpTnM15js7HCiMUcch6RKEVtg+Q6xyinC\n",
+       "IY4SB3uX5pYksij3AeUwajSjo46yjEx5Owo46DFZPV8FJtqSLLtuVqZNP9hR8vZWaFXLsom4p8q5\n",
+       "lI/qe5jhveZ9q0pdUyhVaqQqv7er1+3G7P5ZtOya11d9Uf9fKfCDopRSopqjtXLJN/gjOE/XAAAg\n",
+       "AElEQVT9EcjwcKYwkyMzFYypeOxc4jd8UpGx2F3EFh7jMGG01sNyHJDQatSpuTVWjp9kY2OL4XBE\n",
+       "vz9gc30L1/doNOvUGw2wXB578BF2e3v8u1//OD/xoz+Cl4ODxLcdJklMjEJJgYXAVgInL/BsG+XO\n",
+       "pp81Z9ej8iDr9zMD/JMkORA0TaWYoX0m1aCVneYCzYyC+rl6wlBKHaB6U0ma9TD7xuyHwwhEHaCI\n",
+       "ap1mB9K0D28X4mYq9qMQ3Wybif3FVWUdhBBIxf5WYwLHDZhEOdev3WB3e0xtfo6TS8d4+eKnOLV6\n",
+       "jOF4G8IBKk4ZiISiVuOY7bEXZqTCYml5hc2Nm0z29qhbFp4o6G9t0mx3sD0Pj4LO8VXcRo2rV29Q\n",
+       "a9WxXAfLs/FrNdJBH/Yn9DRP2WrZBJaiE4Ys0aVTa9Fup4S3tnCkx9IdqyRZwuLqCdSLF3nwm9+H\n",
+       "9+7zXFzf4ezxJt3VZeROD9e1+OQX/pD3PfEUF6+ssbw4T5hHZLZEiQRbWjgW2GmOpQSZUGRCEcmS\n",
+       "6rP3m9x0GJu58KsUl+47E5FWKZjq96rszN5j2l/mOVNmD/f1dNK/HSgoo0Vul5/7sGxXx575myrQ\n",
+       "Md/PnJS+UeigEMyMh/0rZurzjeT8j2Ot/EkV+IYQYkUptS6EWAU294/fBE4a153YP3ZE+QcA/Jtf\n",
+       "eI1Hzj/AIw8/eCA4JgqvOhxMR5dWcAeKahJy17m7uHDlEv4pD5SN113g5NIiMhPg24zjkNFgxGB0\n",
+       "gzCKcF2PTmsOz/Op1+v0+z16vT6DYR/bsUjDkMfe+U5+6dc+xn/zgz9MMo6JixTPdUmkohCKXCms\n",
+       "IqcoFEmczXCN1VnVVLbme5kOPyHEAQrXzlIz9MsUqiAIDqJANIrV99e/N59tDgI9cEykWw1rNJW3\n",
+       "+Xzz+JELaI6YwN7KkjCRjonsTLRzoAiKkjNG6GgJ/VuJEB7RJKPdXuCFr3ySE8fPsRMN+OIX/4i5\n",
+       "fUulu7RAumcRDQZEWUzQXkDW2lzY2CDwatxa22Tr+nVajkvTscmjiCicECUxyvNYPHkH5+6+h9Fk\n",
+       "gpCSeqvB9q010jyj0WoibBsVJ8RhjEoyxKKF43ikRY7MFE3HZeXUafrK58KVywjH4ZGnnsBV0MDm\n",
+       "2c/9IafbPnff9ygUFt0zp3nt+u/T3ok5P7T51K8/w+Pf+UFuCYUnHFAZVlLmCR8OBzSCgARBBqSF\n",
+       "pJACW1oUHF485rruzJjT/XYUEtf9Vu2/233qYo5by5pm2TMtLfNa0yo76ni1lBPLNIHVVNamfH51\n",
+       "kjFlujrxmIDndrJ6NAI36ZpZ6ub/Sfny8y/wpee/9se69k+qwH8L+BHgH+9//oZx/D8IIX6Wkjo5\n",
+       "Bzx39C3+AQA/8eO/DRy9W4uenc1z1ZhpUyk6ecGpk6d57otf4YlzjxHGOYNwQjiKsSPFoEjIHElL\n",
+       "WfiBje97ZFlOlESkacxebwfPdVmc79DttMizlPFkxCCJOHfP/fybX/x3/MhHfohoNCYQApXnqP0d\n",
+       "fhSKwhK4totnDAT9bmbqWTO8UNfdjJ6AEnWkaXqQvrY6qKqUg0mX6FJtS9NZVJ0QdL2qIXtm0VSN\n",
+       "ee/q7/W9q2avaQ4fpcSrA6L6rjP3VfuObHIKkYHmFXGwrYBmo87XvvIK9Xqn9JnEKcPtXXwhsFWC\n",
+       "KBQyaJArl2bg01o5wa1JSj+K8YOA3d42k+GYOhmoHEsVSM9lmEakqqCVRPS2thmOJ5w5fgKUYi0K\n",
+       "2aeKqTWb5F5CjkVRwKLbRmQ5kzhiI+yx5+SApHmswajvsjnskzz7PKo/4fjxBWxVsPXaRRr1JcL+\n",
+       "iOVj87iuQ/rqJd4ZdNgUY/7lL/8y9z10H3/m0UdZ9FtYozF2luLU6uSiIBUFOeW+n56SkENapIcU\n",
+       "k95QRcdnmxPzUeUoa+0oyrAa5jlF0dlB9JQ+V11HoP/XDvO3cnJq+TPvN61rYThgZ8+ZzzNBgn6e\n",
+       "ljUTVJmg6CgEr1Nj799lpn7lJ2gL5HZxHUII3vHko7zjyUcPjv3cv/rFI6+FP14Y4S9TOiwXhBDX\n",
+       "gZ8G/hHwMSHEj7MfRrj/Iq8KIT4GvApkwE+pb2AHRFE0M/vrDjSRgOu6B0pMl2piJKUUvutzz133\n",
+       "cGttk0kYETS6CN+HSUYcjdjY2cZq1ymUTdfx8HyPbruDZdkMB0PGoyHbUUSeptTqPosL8xxbXmbB\n",
+       "UdxYv8WDjzzOP/u//g1/46/8BEkY4dsussgBhXAkcZ5SZAkkU9NQKz2tQKvvpzutuuzezPNtKjLz\n",
+       "d7odTCelqUxN55QO3zMjYMxBWo25Ne+jc4iXVJc4uJ+O666G7k3NzGnucH1/k/7S/aaRmRDaYTZb\n",
+       "L73hBxQkSYrMHZQoQJQKXAgFUiJUgRAWNa/Bb//W0zz+6DvwPJtsb4+OYxNOhgS2IBmEiEaTXcvn\n",
+       "xMl7ufeBR3j2pVc502kRD3psrq3TQODbDq4tEJ7DIIzIUcx1O9SbDV7/2otYtsOx7iK31tYIe318\n",
+       "ISBNwbEQrovfcZBOgF/zubW5hlcLuL69yVN33cnisVVeu3gBe6XD9lYPa5/aj7KID7z7PfzHF1/m\n",
+       "pCtIdvtM1rZ55PF7+NzO52gu+pw/dRfjxWX+y5ee48aFy/zkD/0gLgLH8QhqAVEWUhQFjpDIXCKS\n",
+       "nDTPyWV+pBzqNARa3kplO40cmcqCBh1mvnhVuWYKAEyHvZYjjUxNytCUbTPZmpaxo/hxs5T7xZbX\n",
+       "p2k6E6papgWY3SkrTadK2VTeVYd+GMa4rnsgu7Y9y9tPx8t0vGngo9tKy78+p3fKUmo29FXXrTp+\n",
+       "bxd5c/DsPw7P8v92EUIvzYHff+bjMwrA5Lu0oqrGDle5YN0JcZKjGnV+43eeJtqdcN/5R4mFQ6Bs\n",
+       "GsLHaTUJ5ttk/TFxskOWp8RxucmBJS0C36fm+0gpyNKEKByTZznSdpCeQyxyEpUSjoZ823vegxhN\n",
+       "kGmCEIpE5iSiwMHCUrMUA8wKrGlZHJ2LQs6sUNVopCiKGYfPUdydiV71NSZ9UlW0JjqvWkBGfx18\n",
+       "agSl721aAeb7lX/WobqaZRb1ZIcmFVMOzLBGMgukQokcRQ6yHBB1v8WwH/LcF17gzdevYEuP5nyT\n",
+       "4aUrZMMB+ECUIguba2EIK8c4d+cDZKmgLwoeOdnk4quvsHX5Imq4ixoPaPg2hShIFCSWzZ1330+K\n",
+       "YGtzk0aziW05bG1u4jsWvi3KfOBpTCFtogzuOf8wx1aXeOXN10hVRpApHrjjLgpXslvEvPLKq7Ry\n",
+       "m6CwSVWOlY954u4Hub7Ww+ouc8999xP3d1k50eXUyVVe+s+fY7m9ynXb4s16xhtb1xnsbPGR7/9z\n",
+       "dOsBLgVWUZSRJ5kqc/sohbAt4iI9pARLJT0rLxopmvKn0bqJqPVYlXKK3m+nT/Rz83w2B4iWd/M6\n",
+       "00oz5VIpxWNPfeDQvZ/73DOI/SyZZtbPMoR2NiS2/Jutv77etGx18TyPJEkOFiZpWdcWiynfprKu\n",
+       "TlQadUupgZ01o6DNcWdav5Zl8eg7P4BS6sjZ621fiWlGQOhiCgnMcq76hfUMb0ajtBptJnnGd77/\n",
+       "2/ln//znudcCH8Hm2gax36B/4xrthQV836fZtrBti2Z7Ht/zieKYwWDA1m4PAdRrHp3OIvV6jd2t\n",
+       "PVLK1KQ7wyGLC13+u7/zd/i5n/1ZBhsbFGmCcC0s25qJAdfvout/FLdmcvpHOR3N/OhHJao/Khuh\n",
+       "vhZmPe5HtbHZrkcp/tm8IxwMCH0/E9noftHfLcuZuafu02p/lvf7xlnYDvrfEiAFqlTfCAGW5TAa\n",
+       "TkjjgquXrpInKY5rM9q+xdb1S4gkprHQokgLUC6NTpfWqTuYxBE7G7uceeQhLr7+VdZuXqPp2lit\n",
+       "FpMsJVQZcV4Q5QVLx5dpdbvcvLVOvVbDlZLtzQ36G5ukjk1jqYtjS+qNBkt3nGFcSL7pfe9jeb7L\n",
+       "A088Rmexy6d+8z+xvrbO3Y88yGC4g9MIyAcx0pJYCtajkE988mn+7MnH2Prqc2THlmieP00/jRg6\n",
+       "FsfPnKX//EXuevxhkkbK6uoyN/e2+Pv/y0f56Ef/JzpejXC3x1K9QV5EhFFIvdMiSuJDm3CX8nM4\n",
+       "xrpU6rMUgRCCubk50jQlTUvQUyLfkqYwAZhlWXied4C6TeenCWjMfjWVp5nuwaRNb4dGSxkvHfga\n",
+       "MZf3tNC7EGkro9QbziGdY04auk4mNWNaJ/p+VXk3LV6l8hmgA+A4NpYlDyYRc3crDUyhtCiCIJg5\n",
+       "drvytitwmEYtmGaJFoTqMm1z8Oui/w/DMY7tsNBu4dZdJtEYohGL820azTlacy36e31GKmE8KfYb\n",
+       "bxclBEJI6rUaQb1DPfARQjGJYgajXfIoJSsKLM/m7LHTpCrlQ9/zYf7pz/88f+H7P0zNr1GkCbKQ\n",
+       "2FJiVbhB/V5aQHXUzVHK6gBl7l+v83Kb502FfdQmBvqcyd9pxF8dSMCME9Ksk5RyZgIoj0/vV+W1\n",
+       "tcCZE3DVStADp7oS03EO76F4lMkshEDJMoRQCVnuniRthBJ0u10++czvkyYJx1aWyZOcaxffxLIL\n",
+       "WoFPNOyT5AWJDGguLbPUaLC1s8dTT51nHI65cusqIosphMC1HdxGg71hj0musIMaTqPF1Ru3UFlO\n",
+       "u9tgMhwyHvZp+C6eUgw2NlhZWeaOEyeZW17Bmpun0Wzx4uVLnDy+Sm9jBxeLLE5Yu3yN+lyd9z35\n",
+       "Tp79w8+ztbtLf7eH6wjm6m1kzWYv2uLmtTc5c2cH363RH4Uw32CzIVCbt5Cxz95WjBs4/NUf/Qme\n",
+       "fvqTHF9Z4d2PP8LmJMRTBV49IM0SkjTGdbxDyNecjM32NidyXUaj0YGiDoLAkIupfGiZG4/HM7Ix\n",
+       "VW6Ho01Mx7k+XnVkV+t3SCYqsljKn0N1IirrMAUn5rlqgjX9XHPiMIMFTBk9Smb176Zcuv4ryPPp\n",
+       "8zV9pZkGHTp91FZs1fKnQoGbSYhMxKpUmWfZRKSmgoNZheb4NkWYMNrb5b7z93Fj7TqP3nk/O7t9\n",
+       "wjRGxjn3nbuXocywRZ0kyVhfX2d7dxdLWoxH5QKa+U6HPE9KnttzsBHkWUaRpMSTkNTKyIH5E8f5\n",
+       "X3/un/PRn/5pmExQ43AmN5xZNy2AWpnrpeOmyaY7zNzJx+QJ9T1NJWfyg1Ukrn+nOUAz2sRE1tV7\n",
+       "6DpU0X3ZV7Nx5vreMDvwqiZ11YqAauhkfujdzHfSFBHsT/gClJKwH07o2B7ra5t8/rOf59wdd7O1\n",
+       "sc5CdwE1GpKTEKaKlmMTSsmoyGjUPG68/iqF7xCFc0Rrm6g0xLdskjDE8XyE5xNY82RpSL3VIlWC\n",
+       "Ub9PK6gxGA+J4wnlIsoCV0ocbESSsnX9BmGmeOq+h3AtG6RFPIz46ue/yGKnxTe99738xm/8Ot/0\n",
+       "Te8k3ushpCBKImrKwhomNFp1Xtq6zJMfeT+f+8IXuePcGeZOnmXvVo/lB87R/Q6PziDFSxS9nW2k\n",
+       "9Bj2Yx65+yF68Zh/8Uv/gb/0Qz+AdCRWluKkKXXPJy1m06Ka1JrZ5uX/s+ixmurApLV0+KrpbPZ9\n",
+       "/5BlV47nqRPclP1ZkDCrlKuycJT+yHM1k75iirCPipCRR97bpEVMq6Bat2oAgG5T7c8r22bKf+sd\n",
+       "pEonp/breQfjQfeBGRGWJMnMtoa3K2+7AteCYC7O0Q1aRWr6+up3/X8ahdQdj6TIuPPOO7hy+TMs\n",
+       "LnTx/YBMWMg45403XiP2HcJxjmO7BLUaDz34AEJaFIVib3eHKA4ZDIaQ5wjZIJeSWi3AlhKEIsoj\n",
+       "Ar/O6ukTCFvyr3/hF/jId38PbdeFLCdNMpQEBFhCUuyvYMOyUAIoFFlSzrrCmkUe1UnLVOYwNdlM\n",
+       "/ryqKKvIwFTK2qnjOFrYj3ZgmhkKzePlEvXZgWf2gVlX7ai5nW9DX1/2I2UbHeygbm4hZyIfhWc5\n",
+       "JBQoIE8ykJIoifnEx/4j5A7buwOGoxEWknAU0lhoo6IheV6Q5CnNTgfflWyv7WC367zylS8Rb24S\n",
+       "eDauK3ACHxubJIqRjsf80gIn7jjNxo2bKGuC5TcYTbbYvnGTrhfQrAXIIqcmfYo4w1EW/Z093nj9\n",
+       "NaxWm8XlFa68/CrtoMbrb77B8ftP830/8oN88umnme92cR2LIorpSAc3CFBZzoiUW8MdosGQl7/w\n",
+       "HI0PLrBy4g52t/c48+iDrL3wGqONPequwzAKqQcB/d0hSuQ8/vDj/O///F/wFz/y5zm7uIDv1Ukn\n",
+       "IY7nkiuFsEQ5YRYKimI/HYRV0lL7/Sz3nZhmMjYhyom2VGxaySuUOtqfY4KFKQg7nH71dt+r+Xhu\n",
+       "F4kSRRGW5SClOKBzpsr3MEVTXjsdd6bsaqWq37vValMUOVlaOvKzfDr+iqIApR25inq9DrAfV68T\n",
+       "z+lVraX1oXP9O04yQ4/q99MWt7YGZpPUHS5vuwI3Z9WjOFrzu2lSmOe1knBETiJTsCWLjSZWlnP9\n",
+       "1g1kZmELl/mFZewTNYTnkWZjJuMxURRy5dLLeJ5HqzlH3bfpNFsszjcZ9IeMJ2PCLCcsIjzXY67R\n",
+       "olWrAwoxVDx+7lFeTL7C86+/zsOPPUwry3Fth4lKcfwybagrJNKyiCVMshQpBD5l/oiqMjQVsGlC\n",
+       "mkJWDQes8nm6TUzBKO9Xhq/NtqHAsuxD7W3Wx+ynKWI+nDnxcDKqKdIxnTnVfi9RnI4y0ly7PPQ8\n",
+       "jVZqmYuyc3JVYGUSy2/wX/7wOS5+fZv5oE2jvcrN3R7bly5RTATx1hglE/asAiEUx5sNiEM8WzHn\n",
+       "2Gzv7jLo7+AKCzXfIWi1YVzgFQ7jVGHV2lhzCwTDFMdpMyFlsnaNZiyoJxF2I2fhzHFUXJCsD7Dx\n",
+       "OX7yNP1wRDLsEfW2uXHzGlEScenGZX70zF8mSWOOnz7O7sYm3VaNXtNmFCd0EossyxGeyxuvXeDM\n",
+       "wkmGl26xtrOB9cQ5ROgRX9rBnlshjxLceIRXk+Q+uFlBS9mEuzEfeOg9vPrCBb4wep4f+J4P063V\n",
+       "yJIxwpUkRQKiwBYFFgoLCUqQY5FhlXttFilKgXYOKlVO6nrRjxlSZ1lHL1zTZaos1QGFYlrTVepP\n",
+       "l6M3FDlcSgs+I89n/WRTQDCtg0bgJkDUzzWV+kH98pxCFUhL4lkernIP9iLN9yNNVKHIcr1B+OxK\n",
+       "cssCKfMDlsG23X0+PpmhjvI8P0Dc2qIxrZDblbddgZthRtWZsGruzSqHWWeflBJH+sRZjlf3mWt5\n",
+       "nDpxjK2tdZ585AnWb27w9TdfxgkCYpXTbc/huR6Lx1ap1WpMwhDHtun3B9y6cZVCKeq1GvOdJrXW\n",
+       "XInyJiFJlLC7tc1wNKQz38afeLzrne/mX/7rf8Hc3BwPnLqDMAxp1GpMJhOkJUilRRrHCClw2V+c\n",
+       "tL9aUzK7QqwqVKaC1ArdXHChSxVNmO2jFa0ePOZ9S1SVHmrf6jJ+LWymmTmdGA6buOVxcUjRm5OK\n",
+       "SZdUuXM4nJNc93Oc53jCIhcKt9MiSQWf/+TvY3t1QpXQnWviJCmW55J7LtJWxDlkSUqzM8ckzhlH\n",
+       "Pc6cPk0cTdhev4EvCzKlmIyGJFlGq9YB36YeeJw5fQebm5skUcj5B+4nzVN6ZNirY1Q4YnewTTqM\n",
+       "WGjMEXdtspqDmG/QX99gDocvv/ASp+46w2c+/xl+9C//GGEYIS1493vewyd+9WP4tTr33n8/b77x\n",
+       "JjEp0pFEYQRSYB1bpmbbuELQwiGvuaxt9rjz3L34RcxICK6s3cJxBd5cm8CpkcUJHVlgDXa588xp\n",
+       "/sf//m/zk3/1x7nzrpM4SUJDSoo4xZYKbEkmBAqBKooSlQPKWIhiKhl9zJQ9k06Z8syHHZdQTgKm\n",
+       "b8akMqrjezaC42ifiNYhpsMTpimOq2NAX1+14s2ACB02a8qmyRCY4OkgEmc/K6ipy/R9gyA4OD51\n",
+       "7qYIcZjv1zSQSSu/VXnbFbj5AlVTSp8zQ36qyNOcoaRwgJg0TsmEYnVlmZdeeJlef4f5xRaLq11s\n",
+       "zyfOUya7IUmccPPqFeqNBpaUOK6DLSTdVo0gCLBtm36/z/Z6iO26SGFR92t0T5zEsgSjyZhxNGTj\n",
+       "1iY/9pd+jM9+4XO0azWW2i3iMKFTazCMJoQqQ9oStyg3o80VxEIhBVjFbHKhahpajWx1MXlp/V2X\n",
+       "qhKthijpvTbNYiLlKg1j/k1DEMvfVZFTlSYpnab2bc3qan2r71s9Z56PnIJ6ViCLgsIp+NVf+hWO\n",
+       "tdoUXp1cCC5feI05y0IEDey6h7AFg3BEvdOi0Zqj1xthIxlMJmzeuEzdkZDFCNsnT2PSNCOMUtxW\n",
+       "i9N3nMWzLUgiHn70IXxpsXd1ncCxaS3M0w5W8a675FlKf6+HcnzufeQBrvR3cKXFxqsXuOPOE7zx\n",
+       "5tfxazXe/Z53E2UxhQTLcfnghz7EZ//g06QoFo8f4+rly1h5ge+5tFotZLvBPefv4vIbF+jecQdR\n",
+       "d46JgsmLL/Lu8w/y8o0bzOGwtzdk6EhGzgSZlgnW2rWAUX/IP/x7P81v/uffYicf88Rd9+C6NSb9\n",
+       "MW49ICkKCkuRyzLkUO5vcVhVG2bKhirark7S+rhpcU2V6NG5300LTitTc+OOo6xMU6biOD54prmo\n",
+       "rbohg/mnx5u+t7moSSvsadSISd3q6JTSYiy57ZwsKw49J0mSEsgZUSxSSjzPnZkoHMc5WH1dHYdv\n",
+       "Vd52BQ6zNEB14OoFPOZmBjCbl0Bfu9frETTqZYNYknvvuZvXXn6J3f42w7HDcDjEcX2cwGfOX+DU\n",
+       "qVMH9x+PhwwGA3r9PSzLIk4K6o0u55bPkOQWUZTQ2+2xs7lBGJYLJeYXunTn2wjHYnNti/e88118\n",
+       "5kuf5Qe+93uRk4TReILlOiQqxfUc3LTAKiBXBWle4DkOjhFpYwqWqYjNya3aodW9/0yFbpqEZhTK\n",
+       "7Yr5XE1XVWNlLeswBaLrbWZOFEIcDIgqMj9qgtB5zU0FYG7cYBblCMgKonHIZDTmytcvcP7cQ7hL\n",
+       "S6S25OpzX2Op1SB3FFs7m2S5wG93uPfRJxmMI9b7F2jXG2CV0RLpeMRczSb3LIgVRQGbwx6OJ1kp\n",
+       "ImrhhLnAZ2G+TYDFpS9vs7lxgzAI8BeXSmsribl48yYrZ87ylVdfwQk8+jc2uH9lmct7e3zlha/y\n",
+       "M//4o+QCLMcmUxmO77F8/DhPvfeb+fKzXyKMU6xOAxVG1Cwf17HZHOxypmahkoTx1XWCTpukFmCF\n",
+       "Ia+98CLxOMSJUub9GnbTIbIExThhZX6BkcpZrnv0d3b4vu/5MP/213+ZW1+/wve+//2sLK2QxWOK\n",
+       "LMVSlPu+CoWy9uUrn/aXLjpawpxQq6F1VYrsKKVpRmDNgoNprh69G45ped0ujFCIMiGbHkNa4Zpg\n",
+       "z+TB9WIafc6sb5UDl3KWETAjRarv22g0Dzl6df31p373MBzPTIhm2K9J9xy1Itosb7sC14PcXJwD\n",
+       "GChOzrygKRhVE81xnDKetijI0gxbShYWFkiSlOPHTrK6ehIpbUZhSB4qbty8VT5DCIKaj207dOfn\n",
+       "9wVKEccJt9bWSNMCx/FoNms0GwFCldRDGE3Y29lF2GA7Fltr69x5+iwf/d/+CX/rJ38KWSicLMW2\n",
+       "JWmUUGQFjpQUUmJJQZHlxEU2Q00c5eAz39PkhHXbmUKii2nGmQJVVbwGTXhQTEGbVfiq5GcNZGAq\n",
+       "5Kq5qrl1c7BX0ZoeQCZaMs1wHeJotkdm5cSqoNVq8vu/8Z9QUcxg2EOoDKfmU2Qhtm9BnJBbkqSA\n",
+       "uc4ysfLYGAxYOXMPtsq5+MKXGYcx7aCGIEUKC0m5yMIPPLxWg52tTXauXOeRRx/FzXNe/trz3DHf\n",
+       "JVtqE+/tMRoO8QKPcRbRufcsXmee/o1NiFKank9Rd3n++a/yU3/jr3P23F0kaVLmzykKcASFyml1\n",
+       "uyS5otGd577VBTZu3kD0xohckCYxn/rDT/PBx99LkBUMrt/izWjAXGTTsCTveseTXH3uRcI4oZcM\n",
+       "iHwbT1ncvH6D1IbUlsiiDAH8zm/+AHujHh975hm+/8PfTdv3sWOBowqkKshUQSbKnN9Szm5moJHx\n",
+       "YZlgRglX6c1qP5d+GGboDX2Nvr+ZOiKO4wOF+lbKzAQPVQelab2V1MR0fJl0jTm+pjHaZa5xse9I\n",
+       "T5Ip0p8uxinjzbXj0XyuBp/VkMVy5bGYoX+qY/mtaCNd3nYFritYjTaBfXN5PzTH7BR9nZ5lD0wn\n",
+       "VyBFge25eEKAlDzx2Dt4+pn/jO3WkcKjWWvSarUJFgNsu+TfsywjnEyIk5g8zfADj0ajged5DAYD\n",
+       "8mGPwWCX7e11bGnTarVptdosLnZZPbbMJByxvbvD3t4Oynf44R/+r/mPzzzDh7/rO3FsF6IIqWS5\n",
+       "EbNbPtOTZQY9JaboFWY3t9DcotkuOkuhiayPMl91O+lzJXKYzaEy5eymfWEKnh5URm+hnVpCmGZy\n",
+       "id7kfiSPLmZ+Cn3fo1a/QYl0iv3JrMxhUe4m5Dg6RE3tLx5JAQcrCBj2Iq68+iYNN6A/6hMUGbcu\n",
+       "bhEOh9TmCprCRjkerWabztIxLt3YYKc/4YH7T9LbXCMXFrYfEE9i/MBD5SVvDyWaqs91sAtYWV6l\n",
+       "VsCVl18mQCHDkKhICGoB/fGQvVFEWne59/yDXL18jSROSMYhtiP52O89x3/7d/82Dzz8cJmjm7Kt\n",
+       "bMcizTLiPMf1ArrLS+xu73Dm+Alcx+baa28S7U0IbBuv5rC7t02UD+j3trixs0HamCdf6LDd36Wz\n",
+       "tIjq79GywfIsRK6oBz7KcVCuRRzGNLwa40nI8cVV7FrA//xP/w/+5l//SeZdDxAElgNpiAUIKciL\n",
+       "2bFoprqoKnEzFE7/VfPK6/7OsuLA2VhGaGiQIfbjs/UScnv/mZr/Bse53QKvcum/OS60jJvUTzU6\n",
+       "qjpWqvSsEIIsS1EqObjWtm1MGqkoyhWfpYI2l9fPWp1mQrpS7vMjn63rbj7vrVNIecYAACAASURB\n",
+       "VMrbrsCryBsOc5768yjnnuM4B0omy8sQHJWngEBZNq7rMgkjOt0l0lhx6+Ymazd2cWpOyXlbFo7r\n",
+       "YlkS27bw/AAsm95wjBhNyLIM13Pozh+jFjQAGA1HhJMJg0EPncHIdWxOnjzFJI4Z94coy+H1y1d4\n",
+       "5Ow5asLCkgLpCFILijTDSYoShdmzXm/tga4WLVBVE/YoJ6GJeGdNx9n4cPOeeuBVI0nMe5vPNY9N\n",
+       "TWKBTv4Ps7uv6N8cFalQPv9wHnWM3WDK/vFKB5RUhFnOaDCiXZ/DC2qsDXfJdraZrK2TWHAjjukK\n",
+       "l71awKP3PECaK5IwodvpcPP6dW5deRM7S2jNdUhFzmA8IlcTfMshTlNWT58iaLS4dfkKf+ZbniAa\n",
+       "Dnnttddp12t4bp3OXItRnrCZhYRZwt1L93Dx5TcYTxIazSY7kwlfv3qBv/63/yYPPvowaZ6X++eU\n",
+       "88P+i5dyMRyNGY5DVo6fZHNtm1qjgd9ukQ0j6m5Abglee/MNvvnBJ+hv75DfWGe8WODWbEZxyMmF\n",
+       "DnbdxyXlxWsXyaUkkzGFkORKoAREowmNRp2oPyIZj/kbf+Wv8Qef/kPe983fRCcIUBKkEjQdjyRN\n",
+       "y5BXg+IwrTtT8ZhydtSqX7PvNQVhyk9Vvs3f6ARwU+R/NAdeWq+zVuAUXEyt0Gkk0+wK7yotadYt\n",
+       "COoz56Z0ULE/cehr5b78yxkQZtIx5rPStOTQq3WttsmfeicmTDu7OrjNTjUVlPlneoddx0MUgIEC\n",
+       "280G3fkur7z2KiePn+HkyRN02wuMs5D+oMfOzg75IMfzPOr1OgpBy/PI4oQompAkKUU2Zq/XR0qL\n",
+       "Wq2G53m4NZ/m3Byu6xKGIePxhChM8ByPURTyyMOP8Cu/8u+Z/8gPcWZhEdexSPOMcv8gcASkgjIu\n",
+       "13hf00NfdQLpdtDXmdSLeU0VYZtooSok5bVT87ea1bBKgVSPmdZDtc9MzrJ6f7O/p6ZtMcNjmsWk\n",
+       "dexcYDcafOqrn2aUZMwttrGGPcL+gMVGk9AuGMUJ/eEIf6HD3u4ue72QpWOnaDSbXLn8Jo6KUXlC\n",
+       "DviNNpnlMxr22RpP6C4tYfs1dja2mau36cx12B1P8AsYrG+QOQ7udsDAg4kP3aVV3nzpNRwchnHC\n",
+       "A08+xqWbV/lrf+tv8sg7HiZO8/3t9iizVhblZ5qmtNtzvPnGRcZhSDsv2N7eRe4J7r77Xm5EBb2b\n",
+       "64wmY1Dwwmsv8t7Hvond7W3euHQBf77JlWtX2bBucf7cvZzprrC5uc5GNCaV0K43iSYRmRQ4nkeU\n",
+       "JdiFzbxTZ/PNq3zwW76dX/rEf+A7v+s78FZWCIDxYILnOAh7FgzoPqmGg1apsyqy1X13QH8ZS8/1\n",
+       "pwkqjpbPb5whUd9HiGl+8SpQqaJiUz5Ni3f2+bMO9eoEod+jlM0pn10dN9W2sW2Loji8TqKq7KuT\n",
+       "S7W87Qr8KM7X/K5fsLprh1YGprmvConKc8jLXNGgwHG49/67+O1nfo8nn3ycjWsbbG7cRNk2rXab\n",
+       "s2fPUKvViKKYMAwZDkdMJhMmkzGe59NqtfDsFgKIkpg4Ltjd2SYMw1KRuy7SkgS+T73ewHUdakIy\n",
+       "HAz56b/z9/nd//RbLL7vvdSUQFiiXOkXheSAZdsHCB4Ox7ZXj+nvpiVSTYSv29AMi5pOgtMFMrNI\n",
+       "+rBzqBp7a06kVc7zdvkaTCGsTkRmfcv3mI2mqe6aYnKJvnAJ90Z87YVXIIL5QrF7a4tGnpOTQKLw\n",
+       "PJc4gJX5LiqNCHs7jC2L9QuvMx73aAQSK7CIwgmZcJFuA78pEa05ssBnEKfkqcKte1y/dZPta1cR\n",
+       "SYqLYBSNyKMY3CbtVof1C1dYEAGj3oil0ycQjk1hwRPveJxe2Efafvm+Aoq8IFc6PM1mMgm5du0a\n",
+       "rVabPFOsnjzFzatXiYcTnHqN3XiMa4HIC9a31thau8X9J09xc7DNi1/9CnOrK5x8+DyFLZG9EfPK\n",
+       "YWhZjGyLNEmwcoUQkjRLSnqw0cQWNirP2bhygw9965/lhee/Ru0dDnOew+Jck/EkRFZ8Tlqmqoqs\n",
+       "Ornr/qxGqZjpMo5SSqZiPMrK+0bKTK8U1T4TczMUXT99XKsck2Yx62E+27Jm5VBvHGG+m6b7qgEE\n",
+       "Zv2rei7P04NwRo3STYvXsixc1/3/Ry4UmA52M2RQN5A565svaRYhBIUqTRlLCqSAnII8jzl77jTZ\n",
+       "0xOidMDKsTay6NAfp8RpyvraTRzXxXM9XNdjaXEe27IZTyZEUUS/t4cQDo7jUq/VqNcDao02UkjC\n",
+       "KKTf6zEZjgj9jDy3EU6Ia0tkmPGF3/sMnW6Xly+9ySMPPYCMY0SY4AkL4QhSVVAu2ZwiApO308er\n",
+       "W8iZnWqaiKZiNdsVdESK3ol+Np2miZjNAarrVG17HRN8O9Siy1F0jxkmaZrm1aRYptCb9JJlWXjC\n",
+       "59lPf4Fmvc2J8+d48/kXCIRN4FpQJKSDAYPhCGd5AZuCQW+HpmthJxPinVtE4z3spke73cav14gS\n",
+       "yHMbt9Fm5cwJWsuL3Lh4iXwYUas1eePiBfauXWfBsQknI1Jf0jixxN54yODCVey9CblMef8Hv4OT\n",
+       "73iU2oll/uCZ34E0p7BLP4ek3LcSKbGF9vsUvPziS3hegGt7+J7PKJpw5vQZXvris6yePo7bbRPv\n",
+       "7mLnOe1Gg9e+/goP3/cQq0vznDlxkg984APkdZfNly/w+hdf4uSpU7RqPv08xJIujrQJlcJ3XLAl\n",
+       "43CCbTsIIUn6Y7JRyPsefifPfuFZ7n34XkTdJ2h61OOjd3k35aU6wR8lhxp8VcFWFaFXZVffx8wj\n",
+       "dBS1qJ9t5hPR15t6wkS2OsmaLmYAQRVhH4qAOsIaNetsBhloH1DVOX+U/jLb1pT9P/UUilYAt0vv\n",
+       "WCXxhShX6ZUvqfna/fwhlo8UEssCC4UQGblQ1H2fJ598lOe//Cx3n76TLEpptY8x12xQW6pjWTbj\n",
+       "yZjJJGJ9Z2dfaTp0Oh2OL6/gBk3G4wnD0Yjt7R1G4xGe69JsNbn//gfxXI9er8fe3i6jcMgwimg4\n",
+       "Dp60ufPuu/nl3/o1gkbAo2fuwhV5ufmulBR5UW4KUUGlnudN6RTDHITZ1WlV81KHOCk1TS1bxqiW\n",
+       "99bOQJhFCJqUrVIjVWHS/+u+MhffVMtRg9VcYWZyhVJK0jSumOZlbvDSstqP2thPhh/FMdevXGWh\n",
+       "0yVoNojimLbtIYqYFEBYNBsN7PlFojCEvGB1cZnrly6iwiFL7QBFytbGdbqLx3HcOp7wWFw9Ruv4\n",
+       "Mv10wvETx1k8ey/XL1zg5o2bMB7hOzaTyQSvMcdQKZAWblqw4jf5lnd9C5uZ4tr6Gnc2m7S9FuO9\n",
+       "EWk7x7b2zWo9Ye2/43A4Ynd3l+FgQndunjge0Vzs0Lt2g4ceeoTPfvlz3H3/XdxKYrL+iP54SBYq\n",
+       "tvd28Go1vvX938b8wgKXdtdZW1+jJW3kJKY732FQCNJJSsttoaQio8CybAgsMgSBG+BZDoFlsf71\n",
+       "y/xXH/pz/OZnfpek6XJicZGaKDcqEdPqIvZztat934TKCzDiujVA0KCjlA2Nnjkkd6bCKuWCg81R\n",
+       "QB1KI/1WYYRV5D4FHnoFqOnsn44f0+lq2/bhiJFiqnM4yKsCRa7Qq4ahHGMm963fVY8Frbt0O5jo\n",
+       "3QRJB2xCxTK4XXnb84F//lO/PdOZVbSnw47Mc/p7Fb1p54QuSimUKJ04cZbyb3/xF/jWb/8AWZ4z\n",
+       "2CidDpYtcbwyD4Rlyf3g+hxH2jiWS5qk2C44TmnS1Ov1A0U3Hg1R+52llVng+6RpSpSWW6uFaUKz\n",
+       "3eLG9cs8+djDzLdqyCxBFBnsL2TW73OAFvZzY6MKBAqEolCg1GFO/Ch+T+9+LaXEdV0jFO9wzGtV\n",
+       "aVf5xOpx8zfmc49C/UfF9WskremfKaJP9yen6YRdFMVB/SVTzvLSXp/nP/cl/FQSbY0I+32EyOiN\n",
+       "ewzCiJ1BSHv+BJ7fYjLewHd80tEIe7xFx0koipjdKGGARWd5BYuCZDQkdxehv8Vo6xpFzWVsOUgC\n",
+       "5h2fO1YX2Iv64Dq0al1stUptLsNK1jiWODiTBpeV4EbL5ubaJme8Dv/D3/1J9ro9VGrhKKdU4MbE\n",
+       "e+PGGp//wnMsLh3DdRv4QZ1+NGGhM4fq99h54w2KeIzVcrl0/Qrh9R06RUBar/FDP/P3mF9ZIe6P\n",
+       "uHzxMtGVdc4qn0ZSIOcCNuyczSIl8utsxCnd7hwkE9IiLZd3FzmBZSEzheN6ZNJBtBq8eOkip8+e\n",
+       "4tzxLm3LwQljrDwrdz9yJZktKYSFyAV2IZFAoqZJl0xazVSaGnBpJ/cMahX7iYNQCKbjPCtm5VtK\n",
+       "yV33P3lIn3z95WcP5OUohPxWlI05KVRpy/LzMMWr1OxGFlPZn13IU/2dWb6R3jUV/GNPfTvqT2s+\n",
+       "cJgOdL0Dh5nAJUmSg8bOstkQuCntoI/N5pi2LAslBLbrkI5zarUaly5doj3X5p57HqPm10mzjN6g\n",
+       "R2/QI8tT8jyj0ajTaXVwLJvBYMheb4d+f7KfNMei0WhQq/l05jrUazWUUoRhyGg0Yq/XK1dV+QHz\n",
+       "c22SLGWv3+PE6gk++9nP8Re+78NMshSBKHNWO/KAA8vzjEIV+0lxSnSDkEgEUOxv3lYW/Y5ZZQlv\n",
+       "tp84y7anTpGqeVvlGKuoSM/+pnOliir070En7ykqfTLLfVcVuXkvpaYJvPR1eiHH2toax48fZ2Nj\n",
+       "g263i8oL3PYprMZ1rDCnP+4x2umxtNTGERLXEjx6/kGcYI6r1zZonKwR3+ox3rrB4rzD9mSPdKzI\n",
+       "E4fClkSxol5r0W4sY51rce1LOwS1BUaTXeZXArYG26SNVS71RjQ6HXyv4PXXn8W1VwgaEteaMAna\n",
+       "JEPJdm6ztjPhpde+wrt//IcJa3tMEvCVRV4UWFJgGeF1fuDTaNTZ3dtjdbWJlJJOUKNp2SRKcOmN\n",
+       "11lq1JiXHc4tHWc9lVx97Qo/8N0/zN3Nefq9EZ/497/C2aVjnF09SdLrc3Owx1JYcHJ5ka2Nm4ha\n",
+       "wN1338nW9ZssBnVSMgpHYDs2ji0ReYHnBYRZTuZYnD1zmtdfe5ljc4+RKcGc5+Lu72KXxjFZLFBC\n",
+       "YkkLpIMlJGXaVk1TlNarGc007W+LfD/O3jIm5KmCh2J/nAMztMtbFXMRWHU9hC7mRGAqbtPKPIoe\n",
+       "0rtOmdRQSQsd3rrQDKGtRoAdVaq0qbZuddFW7luVt12BV5e6xnFMHMcH34FDStks1dlO0wcH34VA\n",
+       "RSFBo849d9/DlWtXue+++9i4dZksz1FI/CCgUfNxXJc8L5hMJvR6OwgUaZrQaNRYXl5CSsFwOCSO\n",
+       "I7IsY+3WrZKT9VxczyWKJ/hBDYUgTiLS3ZwoinBcF9e2Cbwan/7sZ3jskfNYtoMlbSgUli2RlsDB\n",
+       "RqmCJIkP6ANpWShZJr6qIuCjonfK5bg6zKlUhFOfwuH0AyY6MIX3qOT/VROvOhno66vctdk3Zn1n\n",
+       "eVHJNHGSjsHNOHbsGHEcs7S0TDiZYNk21mgDO9wlkDXSZITfqLEzHKJsiTfX5vS9d3FzfZtH3vEQ\n",
+       "YbbHtbWX6c4tM4x3GSU5Ld/Hkh5Oe47G2dMM+0OCRLB5/Q364R51LyAv2kQ7KWcbqyy1Vsmbba71\n",
+       "hmRDi5PuecassbfVo9Xtsum6JF7GJBoz3LjBfXce4/FvfhzVcLFjiV1YWHJ/+lXT93cdl16vT7e7\n",
+       "ROB7pRXi2ewlQ4Zxj7NPPszlr32VwfUhXqcByy3OnX0Xd77rPIOtDS68/ibdBOr9iKzWZyxi9tSY\n",
+       "3qV1Vvu7nDixwpU05Pq1C7ScgDQKSYUiTRVFCXiJo4jA95G2RVRk2DWfx88/zMc/8Zt81we/A9d3\n",
+       "GaUJNcfCth3sfVRIAXG+TwGggcB+Dn8yhADbckBAkZfb4iEEtlWmc8iLgjQrE2ZZ0ogZF2I/Tqts\n",
+       "rKOioarFVNrmNVWHpMlzzy7Wmd1+0LQUqxElVXrjrUBKFcSYpToW9HuY4OePw4687QrcnD3NztKh\n",
+       "QL7vz8yuVUWhv1dNJjP2U9oW2xubnD9/nl/7xMd58skn6c7XaNSapFnB3t6AaDLGkhaObbM4P4/v\n",
+       "u6RZTL/fZ3d3wHA4pChKFL+0tES72SLNYqIwYn1jndF4WCo9CbXAx/MCHNtjOBiUyHw4ZKG7wHDS\n",
+       "58bGNidPHYciwZY6S6DuMIXj7Ds9ZCnQhdICMuvwMTtYH9eTl21LA5HPzva66LaqRrJoRGwqY9M0\n",
+       "1t+PokqOGkj6GvM+uujvSZLOOGullHiex2g0QkqLLJ3sp+F0ufzlT2MNBmzsRUg1was3oJBs9rdZ\n",
+       "nF9kd7xLpsZ4fkJvFNAIFCu2za2Jj71wD1YKQZZjd+bonDzDq9FlHM/l3HJALXXIdxO8us2P/vAP\n",
+       "cunlr2Lbkm/78x/mjY0drl+6hb2X0g3g47/5NHupgmCB69ffwM7HHJ+b5x/+zEcp2k0G/YKG5wDT\n",
+       "UDR54G8Q+J5PvV5nvttBFRmeG1CIAs9xmdgWnWMrbFydQwzHhHtD2ktdvIU2yydWufj059m9cJW1\n",
+       "Ny/y6Ps/ACojyxMSmTEKd2lFLu4WuE2fO+46w954TG675IUCIRGFwHddml6dIstwA4+gSMGRTIZD\n",
+       "vvfD38//+a9+np/8qZ+gXvOJsgy3KHALgS0ssK39JFjgqGnoqZ54J+F4htuVUiKkQBUZUljYUiJs\n",
+       "i0KIfT55yg9r35bY3wu1arlVi+awq0X7i6qL42A22ZWpV6qbr5RzyiwIKWmgw8njhDicCrt6jVln\n",
+       "U0lr/aXB61u9r1nedgWuZ0A9+M0Ui7ZtkybTvejMGU03tBkqJ63ZmVAKgSUEUZLQ6XTIlOKdT76D\n",
+       "r37lq5xcmcd1fHyvgec1aNbreF7AeBKRpRP6/R6FSvE8h9WVZTwvoChyojBib3ebrc11XNclCHyW\n",
+       "lhYJ/IA4iYjSjLTIGWxtkqc5gVcjcH0a9TpRGhEmMV97+RU6y0u4eUaapVgSrP0oEb2pb4GepECR\n",
+       "Iylzi5voQLcTTCe0opjdDsu0WI5KEgXT7Gy6VDlrE0XomG/zejO3hVnMwVGNMjEVvqbPppQOQPk+\n",
+       "nudhOy6j8ZhGe44/+qNnuXRpB0/YXLpwjZYfQBiTS0XHczl/9z1s9XZLBSEF62s3sYoRfTXCqjdw\n",
+       "VQPHLlBhjyDw8AuPZX+OuxaaXBhcxc8DVjuLnD25wnOf+V2EkxJ057m8tcHl7S3Ov+deetcusHxz\n",
+       "jn/yD/4RX7r6Jq/u3GK3v0Xb7vIXvu/PUat3iUXASsMj7K2jfOugP7UlxX464SQqt/JrtubwgxpF\n",
+       "kjIejhht9+ieWuWu+x/g93/112m5LpG6wjtPn+O3fuFXeKp1EmsQErg2t8a7eLLO2sYt9vpbhEmf\n",
+       "aGPME/MP00GxefUy3soiW6M9PCeg5tTJkxyJwFKlLyibRDi+RZYkWErR2xvxrd/xnfzupz7Nd/3Z\n",
+       "76BV85BhjEQhioIkLUis0sdU7KdXIEvRu84oVJk7Pi33gXQcB1vYiEKhpEIhKZSgUAVpkqJEGbJ3\n",
+       "QNEpjghgODoO3JRVfY1pnZpOeF2OWnBjgsQpIp/dPL36THPdgm3POlmroOZ272LSl3ojDF2vb6TE\n",
+       "33YFbipiU1GbkSnlO0w92RqJHnTYwQIJg2sqSve5QlELgnLlk4Tv+/CH+bmf/3ne+9QTZWTJIGQ4\n",
+       "3KbVWkDi06jV94MyCrIiYTDsERYJnhfjOg6Oa7OwMI9Sin6/R6+3hyjKZO6+7xPUA6TjUHN9hv0R\n",
+       "RZaS5ilRpLB8h253gVq7ya/+2sf5y3/xB7HS+EDohSjzD0spEEimbovZeGhdTO5Md/R0ZevhXOFH\n",
+       "hXIdRaGY39+KEzTrYiJsfW25WKHkRTWdo9Rswn6NcKSc7vhi27LMLidzHNcjLwra7Q5f+MIf8fTv\n",
+       "PMOjZ99Fb3Mdv3Cxw4RotEVhCVorq+QTRVbUqc0vMih8gpufIQ4ztkWdvLAgH6DsmEke47faXO3v\n",
+       "stu7waluk2AiCfOC7XCbrZev02l4nDt5B/EezFlzfPnzT/Mvf/Zn+bZHHuZbV+6HWopdEzx114P8\n",
+       "3u9+nA999w9w72OPk3qQ5js4qYcvLWJpIfaTIlGUERxFXibvD8MJN65fZ2k5xfc8Egt2wgGhBS/d\n",
+       "uEGexvh3nWb70g2iy7eofflFnnjgYVLXZi+aMCLjhVuXERImW9vkeUwsIkaBx9pomzu7d5InKS++\n",
+       "+CK7ElwnIAia2JZHq9Uh8GoUKqfdajAe7pHnGSmCZneRWCiCxhzPfPL3+MiHvps4j5GqfAclBZbj\n",
+       "gJRYYuqbKn1OLrZtkaTxwfaCiBJZW6ocm0lS5jhhf4d7KSwsUSAF++2TkyqNkDWSPjoKRW9ePEXr\n",
+       "6v+m7j2DLEmv88znM2mvLdtV1W66p6fHN3pmgAEIYOAIgqSAAEYERO2SIQXBXXFjpSDXKDZiRa1C\n",
+       "DDGWYoRErqgfIhQUQS5FCqADQEI08I6CH7gxGNNm2lV32WvTZ37f/sh7q27V9JAMShHk5p+qupWV\n",
+       "tzJv5vud8573vAch9jPx+t6cvZcPFvJnv87qxuv33O8m3g9katXJLPVSf89LnotZPJulXmatCWbf\n",
+       "f3ZR+P9FBH74IsyCw/R3sy2nsynKYQCabvsaF+qU1VqqssQPA7Tr8PBDD/HVJ55gbfU4c91F5hdC\n",
+       "sIper8doNKKiHv3U7jTQjkOjGUwohfqhyyfpV7vdYml+nrARYoyh3++ztbVFlCRIK+m0WizMzwGg\n",
+       "HZdREhFlGWHYwHM9nnnuee4+dRIlFH7ok8RjnD0ToalzhgQB4tACN71GruvOgOMU0NkDxtkbaLod\n",
+       "7nqbvYlm+efpa7N/M33t8N/ePoI/aPy/z++ZAw9R/X/vR0lFUWCpawNQ66WV1vzq+3+NTrtL1xNk\n",
+       "IsdVJZgKtxEyzAqGueGJp59nUBqWjpf0R2OWkg1OhB08U5FLw03j8GISkjoea50FQtHHu2Oe8sQr\n",
+       "WH/qT2i4gpOn1lheWWK43efFizdY0i5bX/scP/fed1P8vXfwi//3v2Krs8nFbz6HWH6EZatRruD7\n",
+       "/vbbuLLdRwZgi4jCJFRinspOfF6YgJ8FgSD0A8bjEb3dAbdu3eKpJ5/EXZ5jPB4z32ghGwGFsKyd\n",
+       "e4C1haMMrt5gtN5nZ26HD118guMnj7HUWGJMiXAEw80NTJognYpRXvGZJ79CKSxnj5zkvrDNc71t\n",
+       "ml4TvxFAo8GVrZtc29yi2WxSpBkNx+H00WNI5XHp4hWMtigUzaDD5cvXWW13aIYNXCWJspSyrKhs\n",
+       "VXeaUuFoB6EdKgkWSYEAqZmqM0pT4VMrV5TWtR59MqXdmoo8zWrFlJR1TSnJJ3YMdS2qLG8fgbuu\n",
+       "M8GLahIw7N/nU/CvgXNfRji91w432MxGzbWwoDhwn0+POb1/Z6nJ6b63yzZvV6u7Hdd92E76L9r+\n",
+       "2gF89gQOn1AN5rMOd7Ue8zDoTCPO6WchOXgBhJQ4WiMRZEnK3WfP8ju/920eedVruXn9Jp6b02p0\n",
+       "mZtrs3xkkTTJyIqMylSMRxFpEtNohGitSdOUPM8oy4rxaIiacLW+7xOGAcdaTSyCIi8Yj8aMozFS\n",
+       "ScjSWl0iBVGW8OhDr+TTn/oEZ0+foTQVcZojpUY7mqosa3mirY32QWAqg5lped+TWt2mNbmOcg9H\n",
+       "DS+1KpgulrOGP9NjH64pzC4ELwfchz/Hqtqnxm7Hk88uxq52yMt8EmXVo72KokC6Gqkk43HMm970\n",
+       "Fp78zpP0yx7raY+B7zCMBaO0ZBiVrDUNc8WAcnidsnqGRtrjkmxjk13ag012ioRvVPM8GZ2k7YW0\n",
+       "qm9y1/wGN7Zznr3s0W0UbG72aW1LdjZeRMiI+ZV5RLjMN3aAGzli/AL/6B98L8HqBbaG5/j0F9t8\n",
+       "84kd/vH/9S+50t+gcgJMJRGEGCWo/AbGRrVUbvpZTFKrF154AWHh3LlzCKloNpoMszGtU3dybHmV\n",
+       "hdVVgrkODS+kg8vnf+8P+dQf/CFFP6azsMAj3/9W5o6vgpRkNueFF57lT37nA/hVQRC6xBT8lye/\n",
+       "zujaLR648x7OLyzRzzKyQY9Td53i/KsfYigsVrnYyqILS0v5CKGJLBSmpLL1pJhm6BNIB1NkoDVa\n",
+       "aEaDPp4fUsqi5t/T2pEvyxMAWq0GXhhOgguDdDS2Mpi9z72iKiukkDVo+xrX92sgrgy+7++NGPvz\n",
+       "RotNDe9m77MD1OpMYXCWsp29Bw/f8/v8/cH7u75/a0OuPXzZu6dv39Q2W9Q/bH0x+yzAQd/1v8z2\n",
+       "1w7g0wjysJxmenJFkSMmYnnYj+Rmedvp/o5TG6IrcdA7uMoLPN/D2Do0X5xf4NxDD/PFL32ZV73q\n",
+       "Ucq8JMkitm5dptls4/s+3e487e7cJM0dkRc54/GIoigIw5BOx6fdbE44YcN4PObixRvkpcFzPdqd\n",
+       "Dt1uF8/z6o7O4YD+cJckSVDaxfE93vSmt/DT/+xn+Pmf+1lcKcAUpGmCoyQCUXPeoq7KG2GwZr8b\n",
+       "cXqjTI3sZz/w+gYrD/B/dTp4sANteo1np+/Myg6nN/esk9ssrz3LHR4G5XoTB7jF6f6zFMr0f8vS\n",
+       "jIpq8qBaEBP7zrKoMwo0r3nta7l4+QpP9Aue3crZHqWUMqA0Pm4QkgwT7nMNdy9YuuklXLvBHz6/\n",
+       "wrNiiXN3v5FMpgy211nTEhEPeObCJcJ7JGeXXNzkq/SCkkvliMhd4Wh7gfks48TaMh95xvBFjvGv\n",
+       "P7jFa9RNfvH1Lln7BnPBGQIj8fQKn/nK8/zEP/whbl54HqesKFWDQZGiPYNjDdYU6InDnlD1CLNv\n",
+       "fPMbaK258uKLLC4vsbW1wbE7jxG0XFKbQFlis4J+NmCnrHj1O9/Gte11hnnB//kz/4zcd9jt91lb\n",
+       "WmVYxBw5fQItLZ/4wAfIxjGZLXCU5rn1FzGm5OTpk9x99m6ubW/z3S99HoODBAAAIABJREFUgaP9\n",
+       "e+mcOoXT6ZKWBiV9KhQYcJWuhztYiRYGbTVxlqO0S2kscWrwvQ4C0G6AK8XEErgeyeY4it3dbZ57\n",
+       "/irD4ZDl5WXm5juE2qesSqSwSKlxPR8la9e/vCywtkILjXYFeV4SThaA6cDf221Ztt8ENgXew9La\n",
+       "6c/12LKXBoGHwf+w6m02Wp++/lI68/ZDmmeDnOkQh9maz2xAdvj9/iIJ5V97I89XvvCnLwGA2ep1\n",
+       "WaYH9MizIH+YN3ecoCa49qrZ9QVwHIesyJFKIZVCSEnlu7z/V9/P2TNnWD1yBG9SZMnSvO4mQxCG\n",
+       "TdKiJAx9XHdf62qtxVb1XDtraxpjyg0LVD3RJUlIixzHdZBK4fs+nudRTSLmKIrZ2O3TXVpm8+YN\n",
+       "3vKG12HLDJslKCEmMsJJukYNymamKj+9Xocn9NQ3BJOM5PC8zdu7sM3emNMo47BN7eEb/jCdMz3W\n",
+       "QZ243Wsiml0kbpcRSKsRerooFBhRe7DHSUaz0WYU5TQaLX71V9/Ph5+Map4UgeOGxElOEcXobMSC\n",
+       "HnN23nJqrmDOSdlZuJsXbxmujVv0cnCrHifcnIZSPLuxS3ch5PULMY/yIldwSJgjoYspS1Y9QwOH\n",
+       "uHGGS+EpdpOE+6sN3hUMaD+yRbM7R1wd5RMXW3ztVpvXnH+EV6+WdPUWSTNkU/hYJ0DbHGktEomw\n",
+       "oo44kezu9nj2u8+xsrJKWdZuhUZmFKIgiTJECr4MuD7qs1vlFEXOye4C836I8hSV0QRug0pAs9vE\n",
+       "DzVNT/KJ3/0gOxeep+N6DJKIWFrQlk7oce/JOzl7/DTJMCEpwV9cpnviFM21E1ivwTDKkFrjupoq\n",
+       "LZAGHNclw1I5iijLajCtLE5pUUJhXA+ppvrqKXgZpBKISWFeAMPhgKpMieOElSNLBL7LjWtXcB1N\n",
+       "4LsoCaYqMZN7OtD7i/6UWrjv/GMvwZNnvvWFA0Hf9F6cVZRMsaKW1ToH9j0crEz3qwOMg1RtfZy6\n",
+       "AD2LV/V9fXssnX1GDwcxLxeBzz5rDz7yZuzf1Eaew/alsyANkGXJS0BkmorAQclhUdTAq5Wqq95S\n",
+       "gdIIKWg1mqR5VoO3qciLgte/8Q38yX/+z7zn8cdxJ/TI3HybZqNDWRqSOGOnf5Nr117EdV3CMKTR\n",
+       "aNBptwkbAb4/R5qm9Pt9dnd30VrTbnXwPZ92q4VV9dCH3V6Pq1c36iYlpem0WiwvLtFZOMLWaMT2\n",
+       "bo/19ZscWegSBiFVkSNF/aAbwFaG0lZ7dBLMypxuV82uAX+W055GzNNtehyl6pl9+xlPbYzv+/4M\n",
+       "D7hviTn7md1Om7+fZlqUOiQjm4lKptKv2UgjHacICY1GSFZmaK1YWFigLA1ra6vEccbp06dxvnkB\n",
+       "TEGrEUJV0dYuiasZ5h4Du8iTQ8NTvZhmQ9G5+kk0kPYqIn03Q+8oxbjk9edOseqt8uSz1xk/P6B9\n",
+       "V5dX/eDd7Dy/wQvfvEDZWSG68zTXdy+ycO23eXwF5t2CU2cfpDfSZLuLLPkbOI1L3H336ymOvpWn\n",
+       "v3qTe+/LWFu7wXZsWHvwDdzcHeG7IWVRYitb0yhSo5VmaWmJI8srJEmC63oYY5EmQkqL0i4MSxr4\n",
+       "FM2QLd9SugodZThxyjgakUYll69vcPHWdT78kQ9hshjPtdx35zGWDTQqBdpn6JfcyHdppCP6z44Y\n",
+       "bW6z4rSZby6w2FoivraB9bo0js9TNVy8MOTK5QvcefQE494QX3ukacIgGSM8F+l6qMJgsowkTbCe\n",
+       "wHFdLFNbDAgbIUIIiiIlzytarRbtjktRxgTNEuV5oBV33n0foe+zfuMK169eQQrD0tISzWZIGY0P\n",
+       "NPa93EAH13X35HezAWBRFHuqqdkgMIqiQ8/LS6PgKTOg1H6mWRTFpGltf7DJbMCi9UGnxtmmw1l5\n",
+       "7DR6v13WOn0m/rKFzL8wAhdCvB94O7BprX1w8trPAP8jsDXZ7aettX8y+d0/AX6ceijTT1lrP36b\n",
+       "Y+6FyV/87EdxHGfPjGZ25YH9NHt6sWaLYrNdgtMLMmskv1+8eKn+ODQukTD8yu99AK/V5J4Tp2kb\n",
+       "j4Yf4LYajPIcrTQN7aFcjXadPe1or9fbA0+tNY1GAykleZ5Tlvne/zD7oQVBMBlIkJOmaf1/W8Uo\n",
+       "GhP4Ln/2Z5/hR/6799BqhGhhqYoSz/Opinqohef5UAtu9iOHCUjrybRwJmmsKUskhpqVsxMJGzi6\n",
+       "jjwkIExd3DUWykpibYlSgKgQwtSaXWspSxDSQSkXWxlkMUJIF6tdKu1QGEFlSrSyKJvjqhyqDIXB\n",
+       "GhcLdQettQjHxdEepZGAQkqX2h5AUbgVvqNZv36dyxcucmtjm8EoI60kl69eo9XqIGTdDfjkDUVp\n",
+       "KhzfBSkZRONailZaRGHQRtAJ2wx3+2RBQlkZUC6O41GVOaqKOdKSrLUsdnCNk/Mucw3Fet6DOOfh\n",
+       "E2doDWLM5i4mcNj0QuLWPEGjwR3zHie7cLSzS+vImKaj0OMOwp9nfShxWaTlwxPXN9jy7+fsw29F\n",
+       "6Qg/NyhVMApjUuETEkI5QMgSz2isgb52kcahKd265b6q0FIxNR8DW3+eE8WFI0IcKcGMuHjheX7t\n",
+       "V36TKxdvsTDf5cFzpxgM13Fcj6efvkwufUTocbzbZtnROGlMJ/R54BXnaS6t8Z2LN9CdZfBaHD91\n",
+       "Bw+cv5u8KBgMI+aXVhj0hxRVRSNskqUZRZETBAGjcURmKsKgiRCCPMsRQiOoOy/LqqDZrBVaeVlg\n",
+       "0nraVRAE5EVBaQo8r5703mw3sbai1+uzvb1Fu1mRZSnGVjSbAVIKHrn/e16CUc9++1OY3EFISVkW\n",
+       "uL5LkiZ4nlN3dtpJtkwtmfWFQzVVAwlZf2/rxdWYCiVATtxMLfIAtkyNuaZe4LPbdEjDLCUzG+FP\n",
+       "vx62jp1i2mFHwul2/0NvfNkI/C8D4I8BY+A3ZgD8nwMja+0vHtr3PuA/Aa8CjgKfBM7aevrn7H57\n",
+       "AP5nn/6Dl+iQZ0n+aYv15O8OAONsmi9l3VY+W8Wd7uM4++A73bfjtciEZSwM//qX/g3vfsfj+EYS\n",
+       "aJ8CS288Is+Kekq3kvh+3XQxpUs8zyNJEgaDAUVR4Ps+nU6HbreDEDAYDIiiiCiK9habRqPBwsLC\n",
+       "BBhL4iRHKc3G5jpCWJ797lP83R9+D4KKdtisi5mVIc9ywkYDg6Wsypprm1w/rXQN7LMcnaj17xPF\n",
+       "V30tBVRlhQAUk0jBUutwbQZm36BeCkFR1IUkrTRa1x2qlTFIz6HVCOjtbBH4LlWRo10Xg6QQCqs8\n",
+       "cDzCZoc0qaf0VLYgzzOyPKGsCoQ0lKYiGg3Y3tkhjiMGI480jvjW179OHMfkhcX1G2i/jVCaNEkZ\n",
+       "j/qMR33wTlIag/Z8rJBEaYoxJdiKZDQgdCVHlxZYXVnGVT5bO7tcuHSN3EiMkGALAlURqhxZ9PHK\n",
+       "iKW5Bht5i/Gw4vRSi7edNbTG32TQHzGSqxw5dox5fYvT7Yy28GiHKzgn+lReBlkTf6HFRp4zLwKa\n",
+       "8Ygb0X187db9dO4MOXP2PoQyxOkWrbbHuEgRHEOXHlq/SKYLquI4bgWoCCuY+KZU6EMZTmUNlS1r\n",
+       "jgyHLEkJfYmrFE9/+7v8q5/7Baqi5MydRzmy0mFnZ4ftrQFDq4iwvOa+eyl2NuhoQSvwuP8Vr+Ds\n",
+       "Kx6hc/QO/uW/fR9ff/I57rnnXo6uLnL+/MO89vWPkWQFrusTxQlRFNUqL1tneb7vUxjD2upRNjY2\n",
+       "MZWth6JYy3TiTpJEtRmZBaVcHKfuOA7DBlu722glMdbgerUqZZr9LXQ9lJLEScwLLzxHnmd8/5t+\n",
+       "4CUY9cQXP0a71SZNUsoqx/W9Pd5bKYVFUE2jXSnQlaEyFmsFRkyFAlPsMRMABzAIuZ8lTqNwEPh+\n",
+       "sIdRU1zK83269+AEoINj0oCXYNfhTHo2Mz33yrf81SkUa+0XhBB33OZXtzvgu4APWGsL4EUhxAXg\n",
+       "UeDLL3f8acffdGWbed8DlePpa7Or4WGlxPS1Kce0x0uL/cafvYtdVlTWojC89Q1v4Lnnvstjr3k9\n",
+       "27c2KdOCY0ePol0HKyXGWMbjMePxmO3tLRzHxXXdCajXE+yFqAuKV65cAaZRd8jiYrhX3EnTlGvX\n",
+       "rlNVdXStHRfXgeWlI2zvbDG/eITnL13h7JnTDJMUU+T4k4lD43iMmHRoCilxlN5LxaY8//SmUFLV\n",
+       "TnGmLgYaLNZYtOMcqBHUMYZByLJWBmCRQiNQhEGDIq816oIK1xGgND2jEUWB62scmxE4hijus9FL\n",
+       "2U41z60PuLg+YBCXJMl4n19kEoFYg5C1c52SdYqqlQbdJI1jRPs0rTmHJM0prSKz9d9WXoIrQnRV\n",
+       "d2e6XoDreJRGEfoeaZaSZ2OarRZl3mPxWIfSjNBpgUkjqFKE1RjhkBWQIIndACUc5uZOkix0ORsM\n",
+       "2YoqLly5wuKmz+uXj3LC67G+lbBxc5dB0CHp9Tjl7FC25pFOQtCEVpqByfA6Bpx11NyYYXSO1tHH\n",
+       "+eozH+LIwjPo1jHCxp2o8RbzqmQoYrSUhGUL3+SkNserSnJRkalJOi5cMHYSnBQw6fRT07qINDQ6\n",
+       "YU0zIrnngQd43Rtfy6c+/gmiJAWOkGeSPC9JihSn1WYwGnHH6hrDjes0XQe/2+ZL3/463/nQ7/ON\n",
+       "Zy/QyzKevvAMN280OX78JMPhkGa7ixCC1dUjlMWk/V1JtJKUeY4Qkmg8Ym15niIv9jT9o9EQLRxa\n",
+       "gYMf+PXwk6LcKyxu76wzNzfHcDSgO9chjhNcxyPPExYWFxgORrWMUGjuOXuO8mWaaoLGEnHew/Ud\n",
+       "lA3qoG1CSVlbK9ecqUqlNKBqu2lja6EAEzMta20d6UwicSx1xjYBZMdxJnhVH3M2859i2RSPZmnH\n",
+       "mkoq9gB7+txOQXs6QHyWaoGDQ8lfbvuv4cB/Ugjx94GvA//YWtsH1jgI1tepI/GX3abR6eGhpbNp\n",
+       "xGxRcwri0+LYLA87LVbMXoTD0fv0GNp1kaWhoVwefcV53v+d3+C5y89z54lTiKygShOEhJu7W8x1\n",
+       "5wnDgHa7hTH1JPPxeEwcjxmPa/OpMAwBUEpPlCsJaZrvnUu32yUIAo4cWdk713gcIbRLFMUsLq3g\n",
+       "+AEf++QnaXW7rC4v02k1iQd9fFdjSgUzFeo8zzHWIiepWbknPbRUUmEmEj7UtFGi1t9aUTc6TK5y\n",
+       "fZ2MRExoDaTCWEiKBEdLtBYYWyC0RWoH1/jEyZilboenv/kEUgpOn7mXtufyod/9U3YSTWobtOfX\n",
+       "0MFWXRC2GonGGkmZG6zZz7IkAmMgzVKUM4c1higriHOB4/oo160HYFQlVZHgKJcwSBmOt6mqHC+Y\n",
+       "w1E+SVmhpYPEcPr0Xbz44hWkLGnYkK3dHlEe1dmB8ml0GlTGoh0X5Si205SdWztIfYujq21O3LPA\n",
+       "+qUbvGibzM9ZTq+lPDFI+GL/ONlonlfP9Tm6leKmJadW5jirQopiSF508ZoO7lGHCJfPPXWLG5mm\n",
+       "GX+LKu7x5M4tHjr3ALrUtEJDZHukdFC2BLXNyIYI6SJEQVVWmImla2VqgzJrbN0PgMBgSbMIx3PR\n",
+       "2ifLKgJX896f+DHmFtr8yUc/zmvXzrC1PaLRzkmHQ7JxTDEYIdr1UIcjyysYA8PhmO8+9yzNRhcn\n",
+       "7FDmhnvuuZeV1TVazTZSKXZ2drly9Srdbocjy8vcuHGVViPEc11u3rjB1tY29913P0HYwPc9RqMx\n",
+       "a2vLmMpMFhhLkSU4novnhqR5xtLSnfT6fU4cWyNOEtrNEIQgdHyKPKMRNImihEazQZHnlC/jrjqK\n",
+       "DWHLo6QemIGsfVtkZZnCn5xcM4SdNP7U0mOo6ZO6C1oirMLaak/9hSn2gHqfPhF7hluzAFuWxR7W\n",
+       "zP5utqdi+uweFmDMsg6HG/P+vO2vCuC/DPyLyfc/C/wC8D+8zL5/LkfT6/X2ovBZMJ7liaYneDvP\n",
+       "cDioZz68uk2j7+k+QtQDRkdlhoskj1K0dnj88Xfxb//9+3j3Ox+n2B1xYmUNJTzOnL2Twe4IYwy9\n",
+       "Xm+voDI/P78ncTKmlhHW5+LRaDTpdrs0m03SNCWOY7IsY2dnZ4/rX11dxXUc8ixlaXGJ6+s3ac7P\n",
+       "87YffAcf/N3f5x/8+HtJ4hENT5MXeU2HlAYpanWKUqCsQEu1x9OZqYmVFXhTM6paRg5Ysjih9m+W\n",
+       "kxSvjkBU1Zh0/oBQktJWIKCQhixPEMLgKocyScEI1q/f5LvPxZx/+LV846ln+X9+9pdJcjh1533k\n",
+       "WcVc12P72kWC+RZaOSjpooQGJPj1++R5SlqmaK3QjsKMa97UCIGWlobv1OPnrMVTEuk5COERFxGe\n",
+       "Wxdqs3xEEmXMza3QDiSVUfT7Cd/46rdYWVlkHI1xGopSeHjdACElRVmhNCgEVZWSJxVKaYyxPDk+\n",
+       "S3Z1lzeeDugerRjk26wjWa5S1ppDbla7fHXQ5UPXV2nmGWrH567LI35oreSuVYugjYwb9Heeojtn\n",
+       "+cKXf5Mzj57CKzK0eYqqOsO/+u0x//0Pv5PF/Hmsk9NzDFZZlNVUSuGUFmeaJFUGpRVlUaK0rjlW\n",
+       "Y5m6Jnuug1aCNC7ptBZIkwGVLXj877ybcWTxW4sY3SDowv1rq5SjlCNzHarhmGNLK1RpSTxKuHLp\n",
+       "Kp4IqIzixPHT/PDf/VEeOf8w19dvsr6+SavTRQjN8vIRsJaLFy8y123jeR6eVw/LfuihV9SfL5Yk\n",
+       "TSY0iSXLkonyyrCwvER/NGZnZ5Pl5WXyLGF+fo4szeh26yEpw9EI3/VqSgUPfyEkTdO6wCtuD1f3\n",
+       "PnCebz35aXyvbkPXUpKXdcs/1iKxYOuIWkqB1Ko2kqPuHVHU3Z/WGISsJZ7CTrHIHMj2972G3AOg\n",
+       "W7+2r0WfUi5TDAJeEk0fxqlZ8D4cgL7c9lcCcGvt5vR7IcR/AD46+fEGcHxm12OT126z/QwAv/XB\n",
+       "J3no/AOcP3f/AcXDbFV2KuafvN9t043pBXMcB6313rDj2Qs6/SDSNEX4Ci0cVFWD3NqRI7zr8Xfx\n",
+       "3HPP8Y43vpVsOKYocnbXr+OrEM/xEdaCsZRlQVWUpHFCEAR4nkun1abTapFmBXEUM8oyiqxeabWU\n",
+       "dOYXWF5cYjAYkCQJRZZTTiRZvd1dOp0O/UkX6D333s+zzz/PufvvISsz3MDDFBWeciZ+0vUTvrdo\n",
+       "SQkGHOWAnBhUFXndQMT0JrA0woCizDFVRVUZrJVgJI4NaoWLrhCKup1aGYTW7PRyxuOEufllQr+N\n",
+       "yjPWjt3Ftz//Zf7o332Q9tJRVh94DImkiMYoBjRERGtJs4PcM+YS0tSdexiU6yBdiSgE0ldIR9OV\n",
+       "AWCJxzG2quqGHrnf9FCWOUqUNEOHca4IHEWZJcRRn14VIaSD6wQ0PUHn6DG+//u+n8985rPsWBcp\n",
+       "C/IsRVlwJMgKTFkgrcBRHrYAi2K3FXIxj2ldu8VrzrjYQclT1wX3LdzPMbXLm7x1bHPEl5yHeLpY\n",
+       "w3WOUg0usdUZc3QUI0WPuKhwWprnn/ssneY8ZqdDVrQ5enyHc13DU+O7+Y0vXOG971qhnV/Esz3K\n",
+       "QtLUmspcJ8l9hLdIs9UhyzKsNVBRO2WruuHF2pq11dJQZQW+DMmTnKq0GCGohOQnfvKn+Mynvkw/\n",
+       "yylNTFlEHPECsnhI06u9ZRZWVugNY3Z3I773LT/I+Ve/nqA5B0Jy6dI1kjSj0WqSpnUBUDuQJQnt\n",
+       "doelhSWCwOWZp5/m6NoxsqzED0MKY6h9XiyjeIDnuKAUWVJQleC5PnNzmrKsay7xOK4L/BOri1aj\n",
+       "iaMdirKE0uIogddqYBsBcbpv9DS7bfd2OXX6QaSA8XhEf3cXU+V0Wy2qMgNT2zYbUyGFJS0MWtds\n",
+       "YlXWHu316LR6yMo+jWIQ6qAlbA3C4kBAOMWiaSfmNNic1vYOq0lmm3kOA7ZSiq99/Vt87Ylv/bdR\n",
+       "oUxA8w7gozNFzFVr7c3J9/8b8Cpr7Y/MFDEfZb+IecYeepPZIuZnP/77B2wdpxdgst8eeB8m/Keg\n",
+       "7LouWmu2t7dZWloiiqK9KDlJEqSshxpUVbXHW2utef7aJRbbcywEbYIwIJMwpuTTn/oMgZWcXD1K\n",
+       "u9PB67QZ7EY4yqmbcJTC87y94cZRFDEcDsnzfDIYubWv+a4qomhMmtbRd6vVrEezTSiXqsi5eesm\n",
+       "VkjSqqI0lqDVYGNzk0sXnuNtb30zx9aOUOYpJstp+w3SNN0r7CqlKIriQDs9TLm4+vdRFO3tL7Xa\n",
+       "y1KsgKIs8bSHKBysqoiyCBUq+nHEsxcuY4RPZQLiCIzxGI9iWk5CPy24tjmis3oS3JA4L8mjAaqM\n",
+       "qYY75KMdFtoNxPwqBkVZWhw3xBhNXgnS0mKVQ2EFUZqhPA8/G2InLnSBF1AZyKuaKsMU2CKBIkJU\n",
+       "ObaERjOgM9dGiILd3hY7uzv0ByN6u2OU9Dl96iw3b24xtCFJHKMlVGWBMBWmKiczFGuwUY4HFvpu\n",
+       "jlY5QXKNh9sJDx2Zp0wd0t0hp9nhWDBkw23xJfcMcVLRXThL1RuxWvXxnRTTzXn4jMciu3zpuct8\n",
+       "fbNBp/EW/vcfnuPOuQsIt8ET/TfzRzuv5umdK/zD99zBcrLBnAwwNiaxGX/6ya/z2c99mWazwblz\n",
+       "D3Lfffdx4sRJjJk2s+xnT3nUY2lxgd2dBPBQHjQ7Pp//s//CR/7g47z97e/ht3/nA1iRMLx1mZXA\n",
+       "Y77dwVOaRqPJKCs4dc/9PPSax3j0sTdz+eoGeUF9Lfo9Hjz3ABubO7TbbcoqY2dnlxMnVjCVZTQc\n",
+       "sHFrnbvvPouQEDba9IcjgjDEmNofX2tNNB7TabfRUpImKV4jIAxDLrxwgbvuOs3WxjZhWKtUiqIg\n",
+       "z3NcxyFsNMizmEYjIMuKuttSaLpzB6fEA1y6dgtJbZjluS7N0OfatRe5cuUi3VZIo+mCzamqHNeR\n",
+       "FOW+6s13PYo8x1TT7HaS3kym3EuHPcpztpY224k5xSoh9u1vZyPq6c+z+84OAod9mnhKh85q088/\n",
+       "+tb/KhXKB4A3AovABvDPgTcB5+uPmsvA/2St3Zjs/9PUMsIS+F+stR+7zTEPyAgPk/Wzq9Hh16cr\n",
+       "1vQkZy/ArD55+nvHcfai7qkMqKoqMlvRDhrIyZCIUkKpBRtb23zsj/+UN77+MeY6HaI4xnWamOrg\n",
+       "UNdp9D87mHRWpD/VqnuehxBiryNzWswA0EphrMHxPJR2iLOUrCgYjAZIKXjqqSf5vre+pZZh1ZwB\n",
+       "ruvWN7nrUlYlWjuUVYmQcu+9puc+nWbjOA55PimiKLk3GKKyhjIviUYJQSsgLXMKYWl05xE65CMf\n",
+       "+TijEWSxJs8UrhdgnSFZWXH0+Cm2dgY0Wm2yLEfYktAFk4/JkxFZMsIUMVla0Gy26wjZa6DdFuiA\n",
+       "Apc0l5RCYYSiEDFlnlGkKZ7jkKY5yg0oTe1jU+YpVRZRlSk69YiTEVUVYUxEUQ7JshGe57JxaxPf\n",
+       "ayKERiuPPJf1MIVJeltTRQKErie+CIV0PIrCEBpN5hq0TpgrdnhoOeBkF+bcmFZ0C9m/hXU94oUV\n",
+       "7sqHDDnCVXGMZ0ZNvrhekDRy7mxH3FNWHHUHmPISc+EKP/S9ryBYKImaCcI9y2996RwfvXkW5vrM\n",
+       "Z9/mpOoTOIru3H0MNi9y8/rTCClYWFyssz1TkWVZrXLqdAkDnzAI6DY9Os0mp06d5Stfe4KrN67x\n",
+       "5NPfYaffZ3Ozz7lzD7G+fgNjc1yZ0nANaZQw352nMtCaW+TH/+d/RNCeJ2wtYFEEfsDuVo+mH1JW\n",
+       "Btfz6k5HLel2mvT7fRqBw61bG6ytHkEpSbPVYnu3T7vTYbc/oNnu1M+BsCRRgu+6qEnQ5U6CnjAI\n",
+       "MKagKOrnL/A9oihmZWWJjVvb9Ps7FGVM2AhZmF+qpbpC4AfBSzBqEKVkaUWaJCgpKYuCdifEVBWD\n",
+       "/haD4RZ5FhGEDmWZYcq6C9J1dC25FYJqomCTe00602dkRs2yl81P7SoOBpNKvVRRcpgKmeLULA08\n",
+       "G7AejuyF+PMbef7aOzG/8KmPAPsXYxYAp2A8q+GenvAUnAAajQZPPvkkr3jFK0jTdK/t1hhTD1SY\n",
+       "0Cmzf58WOVJIqqJuN9Zao30f4bts9Xb5rd/8TX78R/8+490+RVWD3vxcbVwF9Qdy/fr1vapyHX03\n",
+       "6HTqm3dzc5PBYECe52RZRqvVmkzyCdFKU5lq7/8vinqcmOM7+L4PUrKxuclgPObCxYu8+4f/Dj4g\n",
+       "0qRelLQmrypcz0UqySiK6HS7ZBNKRjsueV7VUU2e4+j6WmntIrXi+o11+qO6wm8sLK0dByS3drbZ\n",
+       "6Y955rvPs9MbM+gnrBw5TpVBGDQxSHpmhBSSVtii4TeIx2MC1yPPcypbYKQhy1OSNMbrXWccjVDK\n",
+       "EMdjpBLkZUF3YZm5pRXcsENhIMtLtljGsZBFI+Y6bWxlcfwGRrqUk0g8z1OKPKfol2ALhM2J4x2y\n",
+       "pIe1CVE0wJ1077VabcZxgh0PsUKiHYdRHGOlYunIGtoL2O0PSbOCNC2oLFjH1KqVOENJgaMqjgYF\n",
+       "Dy8Y7pnLccWAOM4ZDEr6zjwdv4N22vTVca6ZI1xKRjx/9bu0U8XbTmru4gkeOOriHXuY66v3s7Nw\n",
+       "Fr9QnD1+nl/43ctsd+4nzZ5Fb34VuX6dexebjMdXQFWUVUWr0wZrGcVxPSWq0wFgPByRZxmeFggq\n",
+       "siKjqAyNRoftnR5pmuJ7HloLyqJACU1pYrxQEI3GpEnGT//0P2V9c5tPfO5z/B//5J+ysdnjzOmz\n",
+       "2NLgSkvgB9y8tUWr1WJze4tut0MzbJEkQy6+8DxHj65x7Ogage9RVhAlGVYIXM+jqAzjcVRnolox\n",
+       "GgwIPJ9GGJCXJUEQEMcx29tbHD92bKJQqvjQh3+fj33sY3Q7HXq7PVAFt27doipKTp++k3e+8138\n",
+       "0Lvf8xI8GacZjqylrtbUIJhmKVJaHEciZD3c/IlvfI3V1WU87WLKMNFkAAAgAElEQVSriiSJ6TQb\n",
+       "DAd9HD2hViddsnt6bHXYZ18jpWIK8FM8qQUUt8U6iqLYo3CnyrjDlhRTgJ9tsZ++799oAP/KF/74\n",
+       "wIlM22anUfQszz3dZmWDk+PtrZBTTTbUqY/v+3vANtVwZ1kGpcEoSWUNrnYxWVFH0Z4mkoZPfPIT\n",
+       "BEZyz9pJgs48yt0fNDz1MvA8by9DmH4IZVngeT5KyQmVUc/arJt8SqIo2h9iIRXacerRVliqst4H\n",
+       "ISiAcZJzdX2d/nDMW9/4GEfnu2hHUxRl7eJmLRX1NBupFEbU2tv+YEieGcq8YG5+nq2NbYKggRCS\n",
+       "RqtFfzhGOhrPD4izkvXtnCtXb7DbH1NWYI3A0YosHpJEPapizPJCmyRLKBpLaOnQ8Fp4KqDMLFVp\n",
+       "kNpBaEE/GuEETt00EfXIszFbWzdQIgcKiiKtlYzSoTN/hDQ3zC8ewTSPojGkwwEt36PIC7LCkltN\n",
+       "aRV5ZSnKiTRLGOLxGGktypZUeYawBZIKz1V0ui3SNCLLU3RekmQZWVHgBj5hs4W1lihOQEh818WU\n",
+       "tYXvTjWgbQUBkriybI9i5qSmE/c5ElaUdgTSMtwd8rlxkweOhJxfatLrp9zcjtjojxi7AbLhcH/L\n",
+       "clpHLHQDhksn+cZ6yFZ5L0ZYOs0N3vS9b+eL38qhGbDWKbj19T+lm30Lg8LIDsPRqDY2ow5W0jRB\n",
+       "K4UUoKRCTTqKx/GAOB1w+vQdBF6HPIEyL7BVRCOQKBRVJglaDfpJn1/6pX/DU9/+Dvc98AC5qXAb\n",
+       "Ab/y/vdz79338sC997OysMRgNGKURARBSJpm+EFQN6gpXQ+fxjLfbSNk3VNQFmLSWOVT2tqXP4pr\n",
+       "T58yK3GVwlT1M1MYQ7fbZjAY4Hk+ZZHzzDNP8/TTT+M6mkYYAhbXc0iyhDRO2Nzc5MbVa+zu7vKf\n",
+       "fuf3XoIn1cT8CkTtF6QVSVJnoXGa4PkuVtQNRDduXKOIB5iyIAg8bFngO5ooHtVuiqJu/6+N5Gqa\n",
+       "cRafpuBqDAei5fq1GuRnG3OAPSybHUg+pXAOt9TPMgnT/f9Gt9JPT2DKL81WfPcvzEFPj6mcZ9ao\n",
+       "Zhq5T5sApgNzoyjaM6CaAqhSCscqDKa2vhSgpcSRisQahJI88upH+b1f+4/cs3Ic3/PxGo29xaXf\n",
+       "79cGVYMBjuPstdiHYYi1hjRN2NnZ2aNcpt2aYRjQbDYmxam6QzJJMpRWeFqAUVRVSV5WDPs9XK/J\n",
+       "nXfexeWrN/jYJz7Je97xAwyHQ+YXFyatyyAdzSiKSPOcK1ev0Ov32NjYZjTMkELy/W/7AZZW1qgq\n",
+       "S7vVoTcYYoSDcgKevfgil67eYlQuUlUSz1uD3OAAmIROx6UVGpIkoiivUxYRve2ExfkjREVFc76F\n",
+       "VYIgbDCOU9I0o9VZIMljBsMhrtNi+cQJ5u64m9AT5EmENRXt1jxV5WAJiBNDlhqMs0nL98lEQRGP\n",
+       "CRwFvk9aKZJKkOSGFCiFJVExxi0RRlIWEiNcNC6B73Hs2AqjcY/OXJs4GeGZAEYjdFlSmYrKOHRa\n",
+       "LXwvo8pyyizBlhVKWO4W5xmZTQp/iK9zHjl5jK4vme8+QHPuLKMiZGVlHldu864o5qkv/TH9pE+8\n",
+       "epKVVofzZYMXn3qWcfwcgR2TNO/kO+EximjMPdLje9QO662c9WaLr33p6yzqkGjHI8dDdWP84AQy\n",
+       "kgS6QbPTxfP9ibLIkiQxge9jTe3DI4DCgghc7lw5jZICW7h0WvOYNMd3c0KvohO0iIeGhx/9Hh55\n",
+       "46vwhcdnP/5nnH/wlShtaLfmeMf3/QD//n3/ju0rV3nPO99VC4VaTZI0xXEEaTbG93zWVlf4nd/9\n",
+       "IN/3ljeR5Smddov+bo+G3yZstYjTFNdxSbIM33HAQtj0EBbyNMF3HDxHc/XqVVZXVyjLiief/DYX\n",
+       "LrzAmTOnaDUbCCFot2v1lnIcbGUYDgZcXrnEtSvXXwZFLEpaLBYrNGVR4Xl1g5vjueSFwfWgKCuW\n",
+       "j5ykoTOu37jGzuYtOs26ruS5LmYCuKaqwdvYqY78YKu91g7TQuYsSIM64A562AwLOBB9w0sbeqYA\n",
+       "f1hO+HLbX3sE/sXPfvQl3PJ0pZvyy7NSm+n3B4yQ9gBcHzjh2WNOOerpzz4aA1QSSmkpTUVRZLjK\n",
+       "oaoMXiPk9z/8YZCa1z/6BmxlGI/6eK7C0dBpN4Ha2D5JC9KsIIpTTJkR+B5aOziuR5KkdLrzpFnB\n",
+       "cDRGa7eWQ2oHU9UFNYuto0XXoZgY3VtryYuU0Avwg4CbGzt8/DOf5+/9yI8wHAx45SsfwRQVeVFS\n",
+       "lJY0K/jEpz5LZ36RV77y1TTbi/zyr7wPL9Q8fP4BXn3uATZfvMbK4grjXFI2unz2m8+wEyVUZYYx\n",
+       "JQKD52qKPEcLSVVYhJHkmUVLl2F/lyq/TFEVrKwdxw87FJWLKQOqqm7sSNIeSmUURcSDZ+6lKMva\n",
+       "58Maev0hUjmUxpDnJXGa0Wq3cF2XeBRRmpJWO8R1YNDbwFGGPE0oSoORPrujDLRPkUgsdZHacx38\n",
+       "0CPLM8LQYxxHuJ5DnMRYY6iSIUEQEHgutizo93r4nkuepXh+WOuqJy6BaVLzoRLLHceP0Wm3sGU+\n",
+       "uR9hOBwhJkHCHcfuYH39Kpsb6yhVkGYjbFXgeh7r65uEYRepAny/QWIqXN+vi2QWbFUira2vs1QY\n",
+       "U1Kktavk0rKPUoadrW2q3OC6AUq6SOlToUlyg+M1sNJBmiGhW2v9V9aO4QYhaZ4jAc+RWJPzt9/5\n",
+       "dkLPwXUFw8GY3d4uH/3Dj/Jj7/0xojhmeXkR1/VwXcmv//p/pN8fcP8DD9CZn8P3XVxHo7UgiSI+\n",
+       "/alP8tjrXsfy8gqL8wtUZUmW5RgEzXaLOMlI0xw/bJDneT2/VO0blLXCBoNBnzAIkUJw6+Y6X/jC\n",
+       "57n/gfvqLFrr2qkRWQ+LyGteelrH6ff7PPbGl5pZJVGC63t7wd5+1l5raGdpDoA4L/B9lyiK2N3e\n",
+       "IIlHdYYgbU075TWFVnu475vHxWlKGDaoJnLDPXGFrOWT0poDUfWsX9E0e9+PxvcNs6YYBjANtGe5\n",
+       "8b/RU+lnHcNmB/ACE95W74Hv7Co1jWynJ+m67kSatH+esy33cLASnOU5SIlwaoc4LRXa85FWIGU9\n",
+       "EeTNb34z/+HX/19e+eAjeNolDPw6Gs0NN2/eQjsaLwhoNjtYFL4fgq0jfSU9ev2IoNGkP0zqdl3p\n",
+       "0RuMSbPaZCdLC7R0QUDYCEjSMcbW6byjFONxQprs0u10OPfwQ7zqdW/hIx/+EFkc4/khgedz7tw5\n",
+       "LJKtrR0ee+wx7rr7LFvbPbSj8DyfOIn4zGc/z42LlwmF5A2v66CDDjc2blHkCQpDUUGnPUeWJeR5\n",
+       "SiNokWU5ygEpFIY6RXVCD1d2CGTFrVs3eeSRVaK4JE1jstjgOyHSStqtOTqdYxR5WlNOso5kTJUT\n",
+       "BB7pKEZKydG1ZdKsboNvtoKa+99YJww0wtYPZrfVoD8cMTfXpayGWOmQV4bKlBw9ucL6+jqiMrQD\n",
+       "lyyPWJ5r1alnldVaXd+vvV8QdOYXcKWHAEZ2hJYaO3EIbDfnCf20thJutxkP+wz6BZ12i2Ji2dvp\n",
+       "tImimLIouHrjGr7ncuz4ccLQwZiMfn+H3d0+i0srRHGOdh3CVhNf+URRjNKKubk54tGQuW6H/u4O\n",
+       "VVXiuhoqQ6MRYmyM60oqoxn2h7SabbR2WFxaoTcYkxUW5YcgJCaDhisZjiPa7Sa9/gDHrZUYZZlx\n",
+       "9s5TfPe7zxCN+rSaTdrtLi+88DyD0ZBLly/TbDa5di1jeXmBsqx4/PHH6fX6XLh4ka9+9Wt4nsvK\n",
+       "kdpYatDr8ZnPfo63/613gJCkWU671UaonDTPiOOEOMmpjKXrezDx0hmNR/ieh+t6RGlCEDQwpsIK\n",
+       "+OQnP8l999+3D4RmfwalNbUqZBb0OpMawOHtfe97Hz/1v/4k1u4XAetnf0IvSgC5N/JMaEVZVQSe\n",
+       "xx0nT5KmEZsbG0TRgCovCLyAaDzCcR2U0mRZhud7+Oz/P1MxgrEzneLV/lzXWY57Wus6uMCYvb6W\n",
+       "KVaBIMvyvfOaxcKX2/7aI/Avf/6PDphS7Y/V0nsyudnK7nQlnYI97NMwruvv7TNb2Z2drTeN4qus\n",
+       "Vm5YWasx6vZ3gSMdkjxDeS4q8PkXP//z/Ojfery2m9UalMRxXbxGkzwv6PUHDAdDpg5lWgdUxnL9\n",
+       "xjqe7wMC16uN6bXroR0HKRRxmuC7LaxRtam9lpSTh1kKQRJHtBoNfM/l7rNn2Orvcu+D9/GVr3yZ\n",
+       "a1eucPXFSyzMzaOU4r3v/TGeeeZZmu02ruvjeiFGBTzxrW/y5DNPISSMd7cxUcyrzj/C8dNneHFj\n",
+       "m+v9MV5rDqka7Ozs4PkO83ML9Ps7E4WNS1WVDIejCQCXdJSlrCLieJs8G3LqjlO0W11sqZDCw3WD\n",
+       "vYVVqwohBUmaobXDKIqojEUpF6k1/f4Ag534ymSUVUUYBvWAXw0mT8iTiCDw2djpU6Fw/Qari0fY\n",
+       "2d4hL3Iqa1hdWSXLUlzXYbvXY3NzkxMnjjMYDBGEBL7P9tYmR9dWuHjhAljDqVOnGQ1HddQn6yJ0\n",
+       "UkbkacLaygqtVoOG7+E5Lus3btBptVm/eZO5+QWKSaF4OBqQxCNcV2MmWUydNYJBIZWm1epS0SCO\n",
+       "M7SWuE49asxWJVLVbptQYcq6duO6GmnB0ZIwCBgNhpRV3SugtMPSygoGQZxn+EIw12wwGo85euwY\n",
+       "cRxhTEmWZcx12zTCAMfRLC8vAJJ+f8SVFy9hjGFjc5PHXv86kiRhcXGB0Pe4cuUKR48eZbfXJ2w0\n",
+       "6PV6XLx0gUYj5MVLl1hZWeb8+fMcXTtGu9VCac3NG7cAyIqCdqfL4tI827sDtOOQpQmOo/C8afCT\n",
+       "0N/pM9f5/9h70x9Ls/u+73POs293q7q1dnX37JzhkBIpkbEkS6JEKbIJJ0YiL4Agv3AiJ28CwwEc\n",
+       "2foH4iAIEiBI8sKKAEGGZEtRbMgQ4oW0BUqkKFGmJC6jGc7We3Utd3/29eTFufdWdXNIOQkiSsAc\n",
+       "YDCFW13VfZ/7PL/zO9/fdxnw+htfQyitqVBryqvc+NyvbSIMpRlVvX6fNEkQQvDR7/7oN9STn/l7\n",
+       "P8MyXvDTP/3TnJycUJYlrut/U/ihExIptN6hbRtMKVG0fPUrX6HrKizDwLZNDc+ujcS6rsNawyya\n",
+       "YbYWz7Xr0Jlr9eVqqCmfYMQZhrGlIyr15KxvU8+uW91u1nd87JN/ejvw63j1pthuivmmeF9fG3re\n",
+       "5mev+wk8ncB+fZp7vaBLqfMmO3FFsjcAo9NyWN91SesKy7QY7+8RJ0uOnnmeRZyglCQvYRLPaVpF\n",
+       "WXUkOdBB20JaZhpzC8ZYtk2aZUwmCbujXZKypM3K9UNuUFQVXSu1T0qacuPGDRzHIvQ9fNdFCrBN\n",
+       "k/5oh0pa3D+dMl8W9EcHHHWSpirJsoR/9mu/znd++DsIPA/bcUjSnFq13L59m9kq4WJ6SX9kkqgJ\n",
+       "/+4rX6WzXSbLFXvjfdKqpFMGtuhwDJMqL6hKHUpbtyV5nuohkOpASfK0QXUGvh9hGSWXZ3fIlz6e\n",
+       "HfLyS99Bq0yqokWapubQdq1WVXYdnm3geD5JkoLqeP6ZE/KiIMszhsOIumlZLRM818N3TGqhKOMl\n",
+       "huo43BkwXcXUZczsLCFNEm7evMVyucIVOeP9Ab7ncbw3QHzgOZbLBX3HIC911NXN4yFVsWJ/rAOs\n",
+       "pSg5PhyyWCywbAvLgiDq0TQeStXQNcxnMaZh0It8DEOwvzemaVqaquRyesH+/h5SdgS+T1VWVLXm\n",
+       "6G+G10ma0TQlZQmGMDHX8V69Xo80S7AsA9uz19JuHSHXlC1tp6iKilUyI17MybKEXj+iakuarsS0\n",
+       "LaokwQ8j0iIDWk4f3cGxTFzHZtTzCDyT4TAizTLeefdddnf3uXPvHmWli0UQRrz2+uucPnrEyx/4\n",
+       "AB/96EfwfJ/JZAJIptMFaZpx+9az+L7LnTv32Ds45vxyhu0EvP7GW9w4uYlSil7UI7JsLidTJssV\n",
+       "ZVnieS5HR4ekaUwynZF5LlmWsb+jO3rLslkt5him9gvfRKEppbaJWoZlgxTESbytC++1HM+lXTb8\n",
+       "/h98icPDg/Xz1V6rHeKJ/6tuXWuMze/saGvFhz78YeJ4xdnZGfP5DNfzcE2guYo6k2u4pFp3ytq+\n",
+       "QqubO8G2lsF7qyivahdPNK3XoeD/J+vbXsA3x43N13BdrvqkYdN1VdP1In01LLiuPNTr6d1sszsa\n",
+       "trX9WgiQXYdoFY7lkGU5yjSZzGe4QcDFxQV5nOL4EdIJuVwk1Mogy0ukZWEgcGyH5WJFWRvs7R8x\n",
+       "m88JhEHQ2+Pk9geYLRcYXUee5yTrbsJxDbzIo9/vI6XUIclxzcK1yOMEyxScHB1x98E77B0c0d89\n",
+       "YJVWJKslo+GASXyGwmI6XXLvwQNuHh+zXMw5ODoh6oec371PWdaoziArW8pO8txLH+B8esnu3gEP\n",
+       "Ht3D90NacsbDIctlgmObhI5F3Skc1yHPU3pRxDJeUJYVrhtSZQVJVuNIG0M1xPMFymvJkzlKWQTR\n",
+       "ECFMOqGgbbFskzjJaOuKRkhcy6TpOk4f3cfzPCzDYHJxihKwNz6ma1pm5xec7O+w2wsYDXosVgt6\n",
+       "PZ+oP6BI5tjWTYSQfOeHXiKKerz22h/RiJZ33noH23YYDkfc2Nuhv9PfekovVzPm0zmPH58CgtB3\n",
+       "8b2IJIkp8iUQ0e/16IUhaRLjOCZFmvLowT0O9g6p64YkScizkvHxDlWZMhoO6NoW1wwZjW5SVtof\n",
+       "fjKfcPPkBo9OH2IZUlvuCohXGa7nUNUVRaGNvVzX1UZJjo1jClaLJY8fnyHoONrf49atGwyHfSbT\n",
+       "CU3b0I8i4tWS0PdxHZt+GBC4FkK1FHnCO2+/w4c+/CHuvvs2YX/I8c0TwESaNqYSWCjiOGY2W/I9\n",
+       "3/u93L55izt37mwHeUII2gZ8P0SphrfeepdPf/ozvPDii9R1w8uvfIiibCkbbQv7+PGEZbzi8PiI\n",
+       "bJmxtz+mUx1ff/NNmqamrip2hgOef/5ZpudTXn/9daIgIDaWwKZb1cwO1XUYOgGZptINXBAE1HX9\n",
+       "TYvbaDSibnK++MXfZTDo80Of+CHiJCYMepunfl2orxwJ27bBMEwMaQAGmII4WRH1hoTRgDwv+PrX\n",
+       "36AzdEiz4zgUWY40TZqqWrstaoxdCB1kYZjWE7AuXDvxr/HuDaTbtvWWFbfxgtLIw1VwjM44eG8D\n",
+       "r+3v/3ZDKL/7W//X9mixoc1s3ux7YUDXqTzXfU/00NPWMnOuKDlbPudTPHIl9SBMtR2GUJjrsNW2\n",
+       "UUjbJusaZOTzs7/wj9gzHSLPx3RC4qJF2AFp1VE1YFoOnuNhGALVtQjhUdcae5sv5oRhuP132o69\n",
+       "NiYCZ83TFabYctcd2yEMfBbzGaprsCQcH+5zcuMYafnMVyVvvvkmURTiui6L2SV5liBUSy/w2d8d\n",
+       "8eqrr7BKEopW8MUvfYVVVumBl5AI1TDqB3ieTVXmtEqtPZIlDx+eEYYj9vZvcPfBY6pGUbYdQRSA\n",
+       "gfZXdj3qGlRT4BsKW1U02RKjK8nTmFc/+CquF9AobRS1ETvkeYFYhzWbtk2a5jRtQ683YLVaUhQl\n",
+       "hqtoOkFR1JjSYhhG+JbBIHQQNGRZguN5ZFVJka2QQuPJs9mcOE7o9wYURUUU6YI9ny9wXZesmuM4\n",
+       "NnmeMuj38YOAuqlQSlAUJUmc4rs+WZ4TF9rASAiBbZtYprk+IiuyJCdLc4ajEbbpIIyKNE1o2g5T\n",
+       "6oF1UZRbuKBqCvI8pWlqDOGQpgV+GGBYFlXTooSxpruV2LajO/iq1oITz2FnOMS1Ldpa/85uc7qU\n",
+       "gniVYNqWFmI1Ff3Qo8hiLAGOY+Cv3TGlZXP/0RnRYMj5+Yy8rNkZjvB8lygMcWybuizwPIfRYHgV\n",
+       "X6gEdQ2WZZKkMf/qX/1LWtXykz/5k9x/8JAgjLBsmzhOGA6GSCXWsJKpMzTrisGgz2DQg04haCny\n",
+       "gtlshmd7vPyBF3jtta+RpStsSw9xN4VPyCuetWEatG3HG2+8TppmtG3LT/83f/cb6smv/fN/zpd+\n",
+       "/4tcXl7y4osv8hM/8ROMd8f6/hNPnuAVsMkkVevNQ2xYbwjtCy51Z1yWJdPze6xWKzzHpa4LrcEo\n",
+       "Cs1aM6T2Ge803zyvr6LbrpMsNsZ7Tyo5nzS+2qyquhIObWrX/yc72f+/1wYbelp2en0YcN0SdiMb\n",
+       "37z2dKjvdQoOsBXZXPE118PNtfd1S32tSxfYrgPSwLdsasPi8eNTDp/7ANLxqJVAGFr8ohQMh0OU\n",
+       "0DaxZd3StS2WIfH9ENOQBP4BYRjSdR2rROdq5llCr9cnXi0YjoaaauX6pGlG1yrOTi+wbRPHdJBC\n",
+       "Md7dp20gjudcTJcMB336gyFZlmFYHkfHO0wn55yfT1jOF3zogx+iKErivMAQ8P3f933ce/CYyWxG\n",
+       "GAak8YyzyzN2hkPqqkCKDqyGyFWMhy5VNiOwgaYhCHzSLGM03sU1babTCf6gR2+4y2oyY7VqGfg7\n",
+       "TM8fgjJBWNRdS9s1dDS0tUkcx/i+T5qttJrVNLQy0jBJlkts28aQBq1KsEyJtLTMe393hyZLKIuC\n",
+       "6eRUJ4+rDj8KuHH4AtPplKqq1g+H5Oz8DNPQIom9vQOOj48I/ICijHTijTSYTmacPnq8DeCIwj63\n",
+       "Tm6xmC/xHIE0JfOZPjqvFjM8z4O1yMS2bZpaMrs4YzQacXAwYH+nTxBGVEVDqxT37t0jS5cU2Yqq\n",
+       "SPEDF8cQnNzY43IyYb5YkMQtSkqCMEJ0Nv3QoykaFssVR4dHIDwMQ1FXKfO4wDEtaB1sUydCGYaF\n",
+       "ZwY4joewoWozVvM5nudRJDFJsmKqJnQKpvMVt597ntdee43jG7exbY+mralrg8vLCYcH+/iej2lK\n",
+       "XnvtNW7fvkXbdqRJhlISP/CoqoqvfvUr/NW//tc4PT0ljCKkNIhX2ro3STKqoiIIfc4nF5RVycnJ\n",
+       "MaZp8vDhQ3Z3dnn08CGH+/s899wLPLz/kDt3H3Dr5g3u3btLnsXr4eK6seq0Z45l2SyWSz73uc8x\n",
+       "mUwQQjCbzd6zhoRRxM7ODmVZ8ju/8zt8/5//fsIwxHX89Xxr3Qk/5YAtWFvzXntZolOwpDTwPJ/D\n",
+       "oxPCKOb+/ftIoTCkbtokHY3qUAhsx6Jruycog5uCLYTYJl5tOmqtzLa3r+nAjnX9WRv7PW1V+83W\n",
+       "t70D/8Jnf33bKT/dUW+K8HWIZbMzbaCTzTR4M/C8TslZ/11PiIEE2k+7bVpNWRKAEFjC0FasdYcb\n",
+       "BKR1TSUF/93/9D/yF37gh7FNC9v2yIqGyXyJ40Y0ShCEPRQCz/NplUJ0gmQVr49KawWi1Ik8nuet\n",
+       "5f0meZEjhckqTnEdnyTL8T1ff2idomtKbt08YbwzZLmcsbu3R922VHWL6/s8fHxOWdUkcYzotNTc\n",
+       "sQzqImO0M+DZl57jzbfu0uvvU9WQFjqurGlKhoOI6WyKgaAXeuTJGUmSs1ymzJcZxzeewY9GzBYr\n",
+       "bMcDYbKME3Z2R6zKGckypecMCO2AtizY3+1jmx2W3ZFlK/zI06ZbjakpV0KwWq3Y3R2zWq2Q0tDH\n",
+       "2E6HVWhhVIVpu7heqDfIpiV0LeoyZzQIadqWqm11uEOWrruTjiAIqWttKSCQdG1LU7ekaUpV1/SC\n",
+       "gKZtcFxn+zBrFW9DlhUIJFmWYxoW0hI0bcNkesl4b8z9+/cYj8fMZzP29rSCbzTapcwLktWUttMB\n",
+       "1nGWYpoWw+Fwjb922JZJVRbaXGkxpygLxgcHDHZ2KYoaadgsFitUC5PLGcP+kGFvgOm2CNlhGnKb\n",
+       "ZN80HU3VaT511ZImOXleEgx9/J5D1zSYhgDV0dY1bVNT1Q1JltF1mnH18PQx3/8Dn2A6m+I5Lnt7\n",
+       "YwxpkKcprusw6PdZrZZrOMPAsmxA8fu///u89trX+OSPfpJeT29YeV5gWroDj8KQXtTncjLB8z06\n",
+       "WlarJZ6nB8BCCE6OjpnNZjR1Q9sqLi4ec+vkiNVyimVC110REtpWBy0IafC7v/dFLi4utp1oHMf8\n",
+       "ws///DfUk9/47Gd5+PAe7777Dvfu3cdxXP7Bf/sP6PU2EMqm49bF1XiqK998771Wu6bXrlYrZtMJ\n",
+       "VVloIzSxFu3ZFt3aRlYjP1csmKcZcZvYN/29J50Mr5h37RP1TUrJK9/5A396O/CnQxyuv/ENB/R6\n",
+       "Cjt840W6Lu7ZrOuQyWbyC/qD6lpty6mZO1rR2OjgMQxbq99A8KV/9yX2dvYQUlAWBQK4dXzMjYMx\n",
+       "Qkhm8yVpnpCXFWm1olMKz3YZRg6ObWufkiYijlfESUKRJNsb0nNdgnDAraMDTMtltUopypKqakAo\n",
+       "8rrk6699heT4ENexSB2Dy+kEJQx6g13qqiBJc6ShWRSGUhhSEI7GvHv3LSqVEwRDZFeTr1KqusGP\n",
+       "AhoUj8/PCYOQtm45Pb3Et8Dzerhen44Luq5iOn3MYKiLjes5mGaEKTsis+bw5ADRObimhyl6VGVM\n",
+       "3dXUeU1dV1SFhh6qutAmX66DYUqWq/mWGmooc+3XYmJbBsMwQFoOluPheC5pEjO9PKMX+Dx8/Igg\n",
+       "CDFsHyHA9yOWiyVJEqM63Zn5noeUBmEQ4DoOgTdCSkGW1lRlwunpBaOdIWEU6QQhVXLjZJ+qaEiS\n",
+       "jHt37+L7BnlR8IEXX+Stt9/Ccx3yLEaphn4UkCYp52ePiIKA4yM9gC3rikZ15GXBKlmSpSmWpTMv\n",
+       "dwYjfC9kFWco2fLw0Rlvv3uf+XJJVdX4fsDOcIdhNKApVtS2ZLWKkabA831cx9kOuUajMWmS4UiJ\n",
+       "7/vkeYHtWSyTOSAoBCRxSlWVeJ7Hwd4Y16+QEu7ceZfjwwOaMiP0HISAuiywfJ+oF+LYDmmWYxgW\n",
+       "o1Gf5XJBliU0TcWbb75OrxfSiyJcx6IsMgQC2zDYHQ7JspK8cLYAACAASURBVJwH9+/jBh7z+YQo\n",
+       "CrBMA4FOi98ZDrl75x77+/tgC2bzOVHUo6xKDZ12FagWqXTx1raxFhfn5+v7NKAoS/I05ej4vaMF\n",
+       "mrbFdb11ir2eWdVNQ1VvErq+sVA+vQS8Z7crMGg7GAx2cRwX1TVcXJxTFjmOaerNx3GoihJDXDWM\n",
+       "15l1W6KEYVyztr4S8FzN4sQWLXi6ef1m69tewK/DIJti/bTxy5Vd4xXx3XXd7debqKONC+HmomzW\n",
+       "e0UciboFKWmFzpZcR0RqNktZ4vk9vvC5z/ORj/8H7I5GCDrS5Yo6mxP5AaptOdn1qVsPw/VAWlRt\n",
+       "Q56VVGVJ06SI1sIERj2HZ27sYZgmVXV7O5iZL+dcXEzIEu3H4boBh+ORvg6ix87ugLrMKYqMrl4R\n",
+       "2BLL82mVhmLCsEdTr/F+IbBMyWK5wPN8ZvMJ8TJhd3iEJU2kY2IIRb8f0aPHYrEkXmYc7t/EMZSm\n",
+       "VKmWGzdD6qbhpRvH3L17j7xMMW2DOMnohQGRCVab0x+EuI7OmOwCLUEui47xaI8srfC8gEk6Iepp\n",
+       "uMd1XaQhSdMWpVqtgu0a6rUx/nKqiAYD0iQlKwuins/BwR5ZuuLWrVsUZcMyKajyGld0mNKg3+vT\n",
+       "j/ocHx6iuo6qzHWYRroiTVOUUuzs7DPeGzI+HJFmKcLsODt/hDQkDx7dpy5rmqrl8OiIfuBh2joE\n",
+       "++Mf+y7SPKGqax7ev8/+/pi5ZTCI+kwmE+arlOlshlIdw90BTuBjOyajvV1t3VA0PDg9Q+cumijh\n",
+       "EUR9opFBf7RH3ZQ8evgQaXT0eg4nh8+QJQmWP9LYdl1TtzrIwTQsVvGcqmpo6rVgDejKmsP9Ax6d\n",
+       "neN5IfNFwid++Mf4whe+AIaD7+v4speee4HTs1OyJKZpaqQ0UE2jB7WuSxho3n8URazimKZtMS1J\n",
+       "VbWYlsGHv+NVhNAKY8/1EUKSpTFS6jnBwcE+eZHi2BGreKk/a6kLctM0WKZFskxI0hQlBVHUA1Wu\n",
+       "8WfxhDVy1ylaBOdn59R1TbZ2NbQdTcH9JkUEITS7Z7VKmM0WfPnLX+ETP/gJBAKltD0E6BnCe2aJ\n",
+       "KfUNAAvof4uUJlXV4HkhbVtzcHjEvbvvEqcFrqvhE2GaqGuJQU8X3g1UfNVwXsVIXhX5K2XmH7fh\n",
+       "bNa3vYBv6H/X6YPXMSTHcbad9XVJPegP3bbt7RD0eg7eprBvdrPrXTiArfTQshNKd9zr4APDMgmj\n",
+       "iDfefIfFYkHkBzRNiWoaLAOm54/xDw/WoQM9KtGSF0taaYE0CTyDwPUx1rYASZyg1WAlyWqJ73nk\n",
+       "aY5odQDwSy/sk6QZCl0kq3KB5zgkyZyy7DCkYjq7x/xyiW33iYbaKTAKApZZhm37pGmOaQiSNMf1\n",
+       "AgyrZZUm9PoDVNeQZzleECGFIkliJvMlt28/Rxi0mNKk6yRFnWrqlWHR1hXv3nmH8f4efhhojq8M\n",
+       "MATs9yPiuEA2JXldEPRcyjLDcS3qSh8p87imyCosy6AoM/xA54lWVYnrDtH+MFfJ3L7v0y1iLNeh\n",
+       "EQIzWZJlKVm6oN+LiNOEtjMIe33KWkG6oqoruq6mzAuyROOo490dDKHzaoRoMQ2T+eIM1/cwLAsh\n",
+       "YRUvCfsucZLi+pKTmzcxkFRVzWq1wjANirLAdk38wMM0DU5Ojjk9fUgvijg9fcStmzfJSwvH9cnL\n",
+       "jDiPUarFqAS2ZVNXDXvjAyzTo8hLTs9nzBYxjufg+i6GJRjvH3J4dMDOoEc8nXJx+QhLGpRdS4fE\n",
+       "MLWXjpJsU5dMp9ViNaU5zGVR8eDBAyzb4wMvv4Jhe9x/dMbewTFlVaHaCpGlxPM5jm1zcf6Yfn9A\n",
+       "OAyxLRvHcRkMhlR1w2AwIM9z2rajqgosS6ckBYHP/v4eoIuJZWsvntFwQLxKUF3H5eU54/GYZbzA\n",
+       "sk2kscZyZbd2I3QxDINeGBEXOY8fP+YDLz1LmSfrZx2qugK19hhptdBtY9e8YWkVRfGeNWQD+8zn\n",
+       "y20d+epXv8qtW7fZ3RkTBP61OZtCGteKogKEYhv0ef111kI2pTAtk6Zp1/XF4vbtZ3nw8C6T6SWO\n",
+       "Y2EIiXlNdXldSn+dObdJ47lOtniy2TSfKN5P06ifXt/2Ar7x6950yHVdPzGYbNv2CexoE9iwOZJs\n",
+       "JPdSSqTOQteFXimkaV95C6zxqc1UujWElup2ilZKClPv0mbdkGcpn/n8b3N4+xlUUZN2LklaYRmK\n",
+       "PK6AKTeO9rmczYjWggmkJK+qtZhCsFquqKuGfl8n9whgb2eXqq0xLJPJdILoFJePZ5qRYhns9Hp4\n",
+       "Yx8QWDdOWMxXxHFK4OzRu7WHNDoWy5idvs9idc7IshGyRpoVTatoJbiezdDdY3fUp9/va+8XRzJf\n",
+       "XOApLXEuFzPyWYjteEwupnrg43lkWcUizvFsmyjq0SUVA8cGCY3MKcuStPI5vv0MQppkWU6e1XSt\n",
+       "zTKtmV7OsQ9sXFfgeZJF5lFTI5VBusq2G3DbthSFVu+B7jqOD/p0eYdSMB7vsr+7g2VZ2vHRgsnl\n",
+       "Gbati45wJJ7v4FgBXVtjdlCVGaePU1rVITAIej18P+Dg1k2KoqRVgsvJjIvLGb4vCP0+bmgTLxYU\n",
+       "eUI/DNk93NlaAOdZxmq1ZD5d0lQVL730Eov5nMB3eeedtwCJ63nsjcfcdEfYts1iscB2HN658y7z\n",
+       "y8cslkuWyyVRFHLrKFyfBCVNVZOeP6JtO6zmiP2DI83qSLVXTF3V5HlKmupiLYTQitCiwhCCwA8w\n",
+       "pKQ0C955/JjRjRtMHrxNaDR0bYnqKqLQoasVZVmgQmiF5EPf9VEQAtf1MSwHgYEwTALfoMhzLMuh\n",
+       "LBJMwybPSybTOUHgYZs6RnB3NKZra1bzGXmywvV07uRwEDKbnmOaJq7pUBUVlmtSq5q6LgkCn6LS\n",
+       "TCvLlrz44vNMZ1PKFgwMhDSoqhrLNmiagrZrkGbNaKfHdHaOlQsMwyJZLd6zhmRxtrWDPTu7xHJ8\n",
+       "hrsHZGXL2WRGkJdEYYRtGzjmphm8xkzrNg3jpljqgq71ITqlHkCuEVppWijD5PatFwn8IQ8ePADL\n",
+       "ohMahrUMiWoapNJhK13TAoqmqhFIfRoQVxTqTf3WzLt225DCn4EOfAONbLDRTUG+fozYMFKUUtsC\n",
+       "f90MZtNtdx3bJAxzQ0EUV/4pTXflM+60HTIImKcxgefgIsmbFnybz//m51mcnvOjP/KjmL6LY4a6\n",
+       "gywzRFexWCU4js3ueMRqlRJEkk4p6q7VqfRIRsMBddUilMY1m66jLAudkmOI9ZDHoQ020tyKy8tL\n",
+       "rcyqG7wwZDbVjm17ewfUdYmiJYwGNE1Lv9ejrFoWqxjH9WmamijwCQOP5XKB51gsFwvkOnz2YG8P\n",
+       "y7aZz5f0nr2NECaOKdnb3UWakiRN6fV6mOvrV1UloeciaPB8l7YROsLLc7g4P0MIieeH2JakUgI3\n",
+       "8DHFPp7nkSRLptMLwuERSrU0TYshjTU2qo//lmnSi0KE0JuyUAVtq09K0+l83YlAEOg/89xzL2wN\n",
+       "xAxMqrJGKqDt6PdDfNfXvtWGQVnWpEnGfL5ASoW7dmK0TAspBLs7OwgEliXxdnZQ3QClNEcfoKoq\n",
+       "XTT7A6IwpMxz8jxfv7eEwaBPVTcIIXnw4P7Wh911XbIs5WBvj8lshgCeuX0bz9NRX7Zl6aBppZuJ\n",
+       "oiiQhsF8PsXzAw0BGuDYJr1egERT0CzTpCrLrRVpHK8oi5IiTdnd3eHmzRucX1xiOc72OSoyrfAM\n",
+       "g5DA95nM5zi2heN4VHUDXYeQBnmWanvjptFzHqlT2k3LxpAmhmkhDBPbdmmaFiEM9vb2qaoSRLf1\n",
+       "9AjDcN0ll0gp9WZm24RhSJIkWJaGc1rV8s47b3N8fIRpmZRZQlHmerC9YZchsUwbD4ObN06YzWdk\n",
+       "WXytwD65NENlTtM0nJ4+Yrx/wNHREYZhsFwu6fV6LOZzlOoY9Hr46zmAvseue6fodR0BeK/x5nXR\n",
+       "4cHBAVEUcXp6yjJOCHxv7W8kKOt6S1UWUqCTilraFoTUcW4b7HsD815HDf5MdOAbaONp46pNR74Z\n",
+       "ZG4I7Ztue/Nz16e1eV5eqZmumVddpylKKddmNAazeIHZjwATqxW0luS3/uBL/NzP/xx/+2/8LawW\n",
+       "EILpbEKZZhwc7uO7+4ThbS4vLrj/4DE3b91guUrY3x9TFAWr5UJjfZ222jw+PuHoaE97KtQ1RVUy\n",
+       "Xy6IkwTbtPBsDwSEUcDu7mgd2NAgpcHzzz3PG19/k4uLx1qYEIWcnp4yGu2ys7tHr+/juC6W7bBY\n",
+       "xIRhhDRNjg8PsEyDuq7Ii4w4TVnlSxaLBb4fcXh0RJ5XpHGur58p8R2HZDnFkCbL2RzVduSrjrYu\n",
+       "MSXUVU5dl3z4Ix9hEI0oipplrK1xVdOStA1VmSPpYZomJycnFI3B3niH6XTKdDrdbrJFUWgBj6VT\n",
+       "jnq9Hq5jMRyOCAKdzrJxc3zw4AFFXuG6Lp7nsTMa43gOeZ4xuTwnSxMml5IbR0csF4lW+x0ecHK8\n",
+       "t+7qYDqZMVsuqeuWGweHVOuMUrGGW8LQJ4wCRmuWTBzHJEm8xUV7YUCWpmsnSX3N9vb2CMOQMAx1\n",
+       "LN5iwdff/LruppTg8vKSw8NDRNdQpAllWerCmufrk4QDSpEXDYdHRyxWS0qpu/N6nUJl2/oE2e/3\n",
+       "2dkZbpuadt2dreZz4tWSy8szPN9nPB5jOx5pGlOWJXVdkKV6sNk1LWWWEfg+Shk0bUNVVfT7I9qm\n",
+       "4eHFBZ7rUZclbQdtB5ezBcP+kKKoGQ1GKKWYTZc0zYowDHTClG0yncVPpF/p1Cl9XVarlX7fhf6+\n",
+       "63vs7u6wXC4RAnphiJSQJisdOoJGMw3DpCsrdkYjojAkTVMc5xvDHACk0J4r9+7ex/UDfvzH/4qm\n",
+       "5XYdN2/e5M677zIcDNnZGdA0NfN5xmAwIMti+n09DxoOn/RZufIy+db1q21bfN/n1q1bfPlrF2RF\n",
+       "RS8KoOvwHZsiTVFGs6ZIrvFvccWIgStxIVxpV647sn6r9W2nEX7xc/8C4Ikd5zrj5PoQ8+k38+RO\n",
+       "CQhd9OVGvKNfRKlue2QyTX08qWWnu3MlqbsWaRj85mc/x+d+83P88A9/kigacHhwQJ5mWJataVlp\n",
+       "ijQkVVliGJI8TzEN2B0NQHUc7O0iTC0GsE2bJE5pG82rFULoxHcUzroYlWWJZdokcUxR5piWNnx3\n",
+       "XZ+ug9PTx3hegGXba1l+zmAwoGk67t2/jxCSNM25efs2ZVGt6V8mCkGzVnLNZjMODvaZzaZUdUPg\n",
+       "Bzxz+1lm8wXNOuZb2JKyqDCkoQ35q5peGDKdXuLZJpeXZxzu76FUR9u12JaDMEykNDEtB9OwtKdF\n",
+       "V5MkCbZtkmcptdLpJpZlbmGyoii5ceMmSZJojrjnce/+fW4d3yDPc5RSlOvkIb3xaqVd0zQkiaYP\n",
+       "NqrbdtC2YbBczomiEM9zaZpG8/KrCiElptEAUpssDXYoqwZhWAj0vZYXqU78aWrE+tTmODpBxjAN\n",
+       "2qahLgvyIicIPIb9AV3XsVhMtD1tq1OfBBLDMLfm/UmS4tiO3pBr3T23a096z3HXeLM2dQqiQPvm\n",
+       "SHAtl3wNLVmWRZ4VKCDLsmvKZEXdNBRZzMF4B9avSdOiLCqKsqTrdGFp6grHsbmcTDi5dYs81wV6\n",
+       "ONoly0uquqWqKlgHYwsFTauo6pYvfOE3efWDL7K/t4MpoBf19aZo2cTxkqoqMW0T2wm2p4rNKRlY\n",
+       "34+WHpoqRdPqSLi6qrAdGyHANg3apmY2n6LW30coptMpWZY90bj1ogE/9VN/6xvqyS//8q/w21/4\n",
+       "HS4uLxnvH/DjP/5XGI52dBFULZbl6BpQNziOw2gwYD5fEEWRPt0b4DoeQm6oymILa3yz+nm94G6g\n",
+       "3qxJOT97zHI2wTQN2rokcF26tkZyPZRm7V2urlwSN0iCtmG4go+FELz60U/86aUR6uOVxrWvqzHh\n",
+       "aki5uSme9tfdvPntwMCSqLalBYRSmqhvGNjW1UURQmDYBonRIPOanmlQ0/Gz//gf8e5rb/Ff/Y3/\n",
+       "HMdxmTUlZ/MJdtEifB/P94GAplMkWcHpgweMx7uEvYj7D0+5dXzE1776R/iRgxd4BF6AZVoEfkC/\n",
+       "19PZlY5LUZY65QMAG9Mw6fVDjsIDqqqkLCvOL85JkpSjo2OWyxjTcPA8F88PUErx6NEjBv0el5eX\n",
+       "tE1JU2b4rotlmaxWCZ4XUNU1Z6cP6ff7BK7D4QdeRkrN+Lm8PMP3ApQlEVLghT5pmpImGUYnMUy4\n",
+       "OL1HnmfsPXOT55/9btq20txkKanqmsV8xcXllDwv8Fwf23WxbYuDgzFpkhD4uySFtsZdrVbMZhPS\n",
+       "NKXICz7z6X9NXdcsl5qx8JGPfIR6fxdpKG7evKltPqcL4jgmyzJmswm3b9/GsvQQKPBCfa3SnEWR\n",
+       "cni4j5T6SNvrRWR5Sl031HVFmS6I0wzTdJhcnAOGpo02HTvjXXpRD8syqeuKuu1YLBZcnk9QqqXX\n",
+       "j/A9D98PuHXrJkWZc/b4Mbs7O9y4cUhVVywWS9pW8fDhQ+q6IfBDmvW96js2poBOQlMXjIZD6MAw\n",
+       "BOHuCMs2WCyXBKFPkqaUVcnj+Up7zHs+IorWxaTjxvEBTavWG1xJWVVUxYqqznEtbW3sOgamaeP7\n",
+       "Dnme03UNUupn4OjokCzWp7RlHLNczJCGSde01GVFnKaEYURRaBdH09I02OFgSFGU+K7DcrlaF2MY\n",
+       "j/fIspRlvCBNk+08ynW0NcRyuVyfAioMw8DzAmzLRim9sZyfn3Hr9i2SOObk+Ij5Yoa0bIosXYel\n",
+       "eARBsD2ZW5ZDkb/3EFNKg9lszv7hIR//+J+jqjfGYDZvvPEGw/6Ao+NDLCl49927VEXFs8/e4uJi\n",
+       "QlEUHB0dMZ3O1kIpiee715rFb97gXvcyMQwDIU1Obt4iDCPuvvsOlmnoIGbVYRpiHT7errUgzvrf\n",
+       "/mT2wXV69Kap/Vbr296Bf+7f/tpVYb0WCHpFbL9yI9wEPmzWdcaKlJKyXk+JNfETKbQYQh/JDKo1\n",
+       "4d4wDSpX/566rPjFX/wl4lXMf/yp/4gizXWGoxCEQQRdR7F+YJSQxGlO0BtgOw7xakmexqi2hKbi\n",
+       "cH+MG1iYlsSUJk3dUOQ5bbPp/i0cz8O0tLmTZVpYhklR6eN827VbiOjj3/djf4KfyPvr/fVnY4n3\n",
+       "KKh/5+/8XYRl8uKLL/Lqqx/CtHR4+XK54tatm1umku+57Ix2KMt6DaHBeLzD+fklu7u7W1jWDzw2\n",
+       "/im2fWU+taasbLvyjXpbSv1CpTaBEDWz2ZQHd9/F9WztcqcaPQxd6zU2lOXrhXtDqd7Uu00H/q1C\n",
+       "jb/tHXjXdXhr74ZNV70p2hsc6voQczNg2pjAVFVF27bbLn4rpW87lFDrHMYGW+q8vrzIERiQVZzG\n",
+       "M/7X//0f8sFbz/MXf+CHCB0Xr99jejHBq+H09BJ7p8+g38N1HYpap+Us5nNs18U0JP3+AMeUxIs5\n",
+       "d+4+YLgTEYQa3x1EfXZ2Q4QSW9l32+rk7appMKWhY5ykwF17KNe1tlV9f72/3l//futrr/0RP/Vf\n",
+       "/hc6GSvUNs9dB5Zlc/fuXY4ODmiahjAMmUwvOTw4Znd3wOXljLOzCw4P98mygjzPOTzcJ80ysixd\n",
+       "Q3cty+WK0WiEEFCW9RrmUOuhqroaRhprLx3DZndnTNe2XF6cab68YdFUBXVdasrhUz5Pm477upT+\n",
+       "aT3Le61vewf+h1/8N8AVpnQ9Ygj0LnddZbl5cxsJ/fXOXRkmhpBYpqklyFUNSnNMpSEp6xo31DDE\n",
+       "dDrl53/pF3nm+ef4yCuvEto+SZJgei5R1MeTWk2YNRXxckVRlkjDoj8YYNkeRVlqvLNtePTwAePR\n",
+       "DnVV0qgSITqGwwF0EHg2RV5gSIntOmtZfYDjeTRVjWmYJEmMYRrb3b5tW773h/7yn+hn8v56f/1Z\n",
+       "WO/VgX/mNz5P27X0+32SNMWybIqi5Nlnn2VyOSHwHLI0Ydjva3695RLHMTduHG8hkDRN6Q96pGmK\n",
+       "aRpEUQQIVqvleuCp5xJRFG7tMDY49rYj5xrGLfQrk8k5d959h34U0DYVptT1yLj2Pq4X8utd+WZ9\n",
+       "q0zMb81R+RNY14cBm/Bh13W3xlWbP3NdxGOa5jblfdN1a+hk7QS27mANwwABvX6PRim8KKSsK+49\n",
+       "esD/8L/9LxyP9/lzL30Iz7Qxez5hv0/QGdhVx+PFlHmd45oWQRAx3tvHsS0ePXzA44f3WM0nuKZk\n",
+       "EIW8/NLLuH4Iho1p+UjD5eJiztn5BZ2SeH5A1O9rTM+2KOtK046WS7Ikxfd06ADoI9SGzvb+en+9\n",
+       "v/74lWYlCE0e2N8/4ODgkCAIePjwoZbiF9pFMMsyRsMhcbwkCD3u3L2DkILTx49ou5qiKOj3exiG\n",
+       "ZDK5pGkqpDCI40xL3DtYzFdUVUtddbStFv90LagO6IBOY9hto+mQOzt7PPfc86ziBCEkVaOdSvV/\n",
+       "uuvenM43as2nA2y+1fq2QyhZlm0j02zbxjS1g90G3N/wIzeFemOYvnEl3EAppqn9jquqoiwKDMPA\n",
+       "tR1M02Q6n2uesmOj2pp/85uf5ZM/+Am+/7s+TrlKqVE8uHMfWxi4SFZJzOjWDTAky8sZZd1iOw5R\n",
+       "r8eNo0PqsiTLMh4/PqXpNL/c9ULG+4co1VIUGUnygOV8SZL8EbdunpCniU6q9xxsy6E/GNAPB+RJ\n",
+       "QlmUVHWJbVtIU8vs31/vr/fXv9+KBiPqMqZpOs7OzonjmFdeeYXRaEDXKE4fFYxHI9q24fzsnLIq\n",
+       "2Q/3qeuas7PHlGW5pgHnW1Xp3t4el5cXWKaHbRrkmZb+9/t9qqokjpNtod105aYpaOsWKQxsy6Tr\n",
+       "gE6wMxpjmiZvvfkGnq1tJwx55bK6Ydlt6NHX15/6IeYXPvvrAOsLcBXg8DSQf+1ntzvXBl7ZcL83\n",
+       "FrFCKSxTZwMioGoaiqZmulrwK//0/2RnvMuPfPR7KJoaw7UJTRdP2kyXc6TvUjc1ltLT9toSgEFb\n",
+       "N6i6wrFNaGosw8TzA+oWJvMVDQZFWeM4FoKOui5J4gUGLUJ0OiFm3YWblkUYhVR5RbZKGY93qJsK\n",
+       "KQV1qx0V/9KP/81vuG5f/Oyvr0ULM8IwpO06yqriwYOHKKV44YUXaBody9a1Fa7nk2UlrhtgWg7L\n",
+       "WEuXTcdeZ4jqCLCu7hBCYRsmqutQrT5Wer5PVVekec79B/fZ299H2pb2yW5a3DW7p21bHMehU4qs\n",
+       "KLFsWyeEd08GS5umvd2sNxzwpmk4fXRG1BtpX+yqxPeDNR++otfraQ+QRkvdPc9DqA7DsqmblrPL\n",
+       "CZ6vHQyFkMi1vadl6N7E9nRyim3oYbbnemRpSl3XVHWtxUNdp1kLlrulrZqmRZ4XVHVDs/boyPNi\n",
+       "O4wejzW1sVUKKQ3KqkZ1DVJ12q+8rTBUTb8f4YQ+bbcOKBFaiq4UtE1HVpS8/c4ddnbHRFGPVgii\n",
+       "XoQUgqauEBIsQ4LotFGVu2lcDNpaaS/ytWFTU5c6ji/PMSybi8mEOMl56WUdLl0UlRax0GFKQVtr\n",
+       "2l7XKa2NMEySNMO1TX71n/wif/FTPwYorcpVEoRFUUOSZWCC59vUTYNnejR1q5+XNe3PcZxtvFzb\n",
+       "am9tBUjTwPO87XNdlfXWSTQIQ6RhUlUVWZbhGJLDwyPOzi6wHJsf/eSff08I5Vd//TfoBTq/cjAY\n",
+       "khcFXbuOLEMQhT6r+ZzhcIBA4QU6eejk5ITp9BLHcYjjmN3dXeI4pt/vU5a59pufrhgOdzBNSVFU\n",
+       "1+qToG31fbMpuqFnb5k9wjLWXO9ta850esGdd9/Gdx1M8aQN9nWjvus2skKIP91+4K7rPuFvsjlG\n",
+       "wFpaqgRCSjoUzZo3uynilmWhWp3coSfGCqNqka7NtEzpqRaV1iSBwSI0+Lmf/UW+e/85fvC7vofK\n",
+       "NRBrc6xVlpGZBWbo4Lo2jhORpimTyYQ6LrGsgNFwh2hvd11Ap1qMU5U4jsPB4RDXc5jPZyznKdPp\n",
+       "CsuyeeGZV0iynDhOcPxjsjzmdBLjug1ONKI/7jHe2yVJE2zLpioy9vd2v2nyyB98+Q/puo4gCMjL\n",
+       "grfeeouXX/4go9GI8XhM13Xs7OywWi3pRUPqusZzHRzXJE1XDCKXew8e8NxzL7BYLLShPRbpKsY0\n",
+       "TFaVNgQTa4/z2WLOpz/9aW7dfIaTkxNcw8Z3dQfRCEWex9t5hCHW1p11QZ6vtIJQBixWS6RcC64s\n",
+       "i7qpUUrSAm2jU0xe+eB34LsdXatYrVas4pyL8wuUkNy99w63nrnJaDRgZ/+YosqpKi3wsS0LRMWw\n",
+       "Z615wpqP7DoOZ+enrFYrppcZcbwi8DUtbX+8y8XZObujXUZBj37Up6oalssliyRZU99qwlDDZpZl\n",
+       "MZ3OWC6X0HaM9/YIgoCuK+kPIsoiJY4XyLpYBzTrU+L+wRGm5WpRVtsx6EfUdUlZllStlpW//eZb\n",
+       "fPnLX+ZTn/pLGIZEioK9YQ9UhmlaGMFVYkuWFji9iCzPmC+WmKZFUVQIJFVVkuf6JGvZJlVV8nu/\n",
+       "90V+8id/gpu2jWG0SNXghsY6XKEkLUtsx9MWzNLA8T29+boOQT/gfHGJ1++RZTnn86VWSmIwGI44\n",
+       "GR1RliXLZUxdNDRiheu6jEajLQlhNptRV1epWf1eQBhqw60sXmmevONqm15HBx7Hccx8tiAMtfI5\n",
+       "CH3OLs8QhuDi4uyb1pCj/R2qIsdxHB4/eqidD02DwWDA22+/jVItt557jrquef3119kfj3n5lVfI\n",
+       "spK2kyhMhqMx9x88ZLw3RpoG07MFqyRl0B8xW0xZzOfcvHmLXi/g3r2H9Ho9+v2I+XIFhkEYhDw+\n",
+       "P6ff7+tmpmrWnisK0zSo65bx+JCuk9y7dw/PXTsSdh22qV0e66rCth296XUdUv7x5flbduBCiBPg\n",
+       "F4A9dMv8D5VS/7MQYgT8MnALuAv8NaXUYv0zPwP8hqeQ+AAAIABJREFUZ0AL/G2l1L9+j9+77cB/\n",
+       "7/P/crvrXE/Q2RrCrLncaq1eQl75e5uGoXc7BYaUVEWBsE1EC5Zh0EhJbsBkteBXf+X/4OTwBt/9\n",
+       "oe+kmq2oDIlhmgwGA+y1QY8WjjRka3Wg6jo83yfPSvK82JLrtUTY1x4brQ6R3UA+jhPQtdA0HWXZ\n",
+       "sIxTpDSwHZu6qUC2JMmKuqmIfIdBL0Sg6EchURhQ5Dqx/Qf+w7/6DZ/Hb33mn64DgLUIpKoq5vM5\n",
+       "Ozu7SClxHEe/Dylpai2GaduWJEu5d+8eeZ5jOTYf+9jHMS0L27JZLpfrtJEapdapJJ3iwYOHWvm4\n",
+       "M6auasqyIsszPM8kDEOKomAwGFCs4arr5mGghReqM2FN/yzrSououo6qqinLmrYDnbxiIdHFyHEc\n",
+       "bMdHISiqCt/3mc4mtN06a1AKhHCoaq1YHO/uUJfV2gtDM5dMw6RrWzzPxfE8ylILhLI0IQh8fNcj\n",
+       "yzJUq7MNV4uYnZ1dcI1r/hSCqqpJEm3ze3Ki+elpqj9P09B2qaahcF0bwxBIQ1+7pm5JsgwhLVar\n",
+       "GFt2CBRRFCKEoteLuLg85/zxGR/72Mep1h4ZXacwDdCsNEEnrkIB2kbfe4Zp4PkBq1Wsaam2uxWn\n",
+       "GabBcrng8eNHlGXJq69+EFBbEZCUUnOREahWaSm50P5DrGm2ddNhW4J/9qv/hL/8n/yn1HXDYDCi\n",
+       "yEukNMjzAsv2qOuGrlNEUURZ5tvIs82peOMWuukmN06ijusDYJoWXac57UopOhTSMPA9jVlvTuQb\n",
+       "Uysl4C/8yCfeswP/F//2t4k8Z0vBTVPNJe/1eiilmEwmDIdDJpMJN27coEhToihiuVxwcnKDNM20\n",
+       "CZcQ5IU+ZbmuvWa3OWsVrUVVaAbcaKRtdKVpYJgWZV1j2yaiURiGZs1pbrjA892t33nbNijVcf/+\n",
+       "fZLVYy3jF9A2NZ7jbAOTu66jbhqk1IjEBz/yg/+vO/Aa+K+VUn8ohAiBLwkhPg38TeDTSqn/Xgjx\n",
+       "94C/D/x9IcQrwF8HXgGOgc8IIV5USn1TU9vrIQzXC/emEFRtg4keDGDoG3DDlxRK0TWdNhJUYHse\n",
+       "yyKl7wSorGKhKlahyS/943/MbXfE9334u/B2BjSeT10rkjTlnXfuYNsWR0fHDAY9pBQ4Tr6Ox3JY\n",
+       "zGOGwwFhGJJlOVmWURTFeuDRx7YdPC/Yyobj1SXSMHEc7YC3vz8mTjKm0ymdarFdE8/zsRqTvCpY\n",
+       "3n/EjeNj8rLm7r032B/vYnyT0fKbb765hR6EEJyfn7NYLPjUpz5FFEVrwYOFY1u0rR6KfPWrbzAc\n",
+       "7PCx7/4Yan3zN21DVbakaYJlmqRpjOf5uvibJnfv3f2/2XuzWMuy877vt9aehzPfserW0N1kjySb\n",
+       "pEhqoEyZlCiREuQMssUEEhIkVgLYyADEyEOeoofAiPMQBEGeDDhIEMCWFUZhNJGaaMlSRIpDk81u\n",
+       "suehhlt15zPuea+18rD2OVVkVzft5IEEog1Ud9W999xzzj57r/V9/+8/8K5H3t1dhB5Swtn5SWdA\n",
+       "pYnjGM/z7CLYzSPyPKfX6xHHMcvl0lZVecl0NutgB0m/P0BKw3CYkmX2HPf6A1qlcERoVYRlRVHW\n",
+       "FGWN53vMZlMuH1ymaWtms1l3cdecn5wyGY/xHBfhw87BAcvFjLq2ARGL5YKL2RQhhTXrCn129nY7\n",
+       "qKFBOhLhCsIoYjQZc3h4iKd8lLaL5NbWFm4FZWlIHJ+2yfA9QbI9QkhJU1uoatGFDsdhgOu5BGGE\n",
+       "7wfsJANmiwVp0mN2foQU8Oqrr3J0dAelLHT1q7/yK7Y7C0K0tucRo1FtAxhMxy+2i7j1yYiTmPl8\n",
+       "1gk+fMqyoChzvv3tbwPwxBNP4HkeV65cIQjsItrv+0gp0Kq9R09zLISTZSvCKMbzPaTrEDsO/V5i\n",
+       "u6HpDNcPmM1muI5PrxczGIyoyobz6ZQsy8nznOEwJU1tbmXd5Vienp4Sx7H113E9GwbdNuS5tfyt\n",
+       "m5YosvJ/pRTz+ZymrjlZZqRpSr/fJ8/LrgqVG0LDg47J9jbV0gqH9vf3WSwW9Ho97t69C8B73/se\n",
+       "bt8+xPd9Tk5OePyRR7hx4waO47CcLzk42Oe5575DHMfs7e+Spj2+/fy32d3bxfEDludT4ihiZ2cL\n",
+       "jFVIb+/u0rQt5xcXHFzZZzpbIFpN2usxWyyIoxDXDzk7P2dra4I2Bm2s6OjSwQHPf+uQwHMBQ7+X\n",
+       "UhZ202m79dAa9om3YOLfe/xrYeBCiM8B/1P356eMMcdCiD3gT40xj3fVtzbG/KPu578A/Lox5svf\n",
+       "83s2Ffi/+IPPbgQ66/YT7gs4Vl1lfB+WKqUEpfFc1zqJGfv1EoNQGpQhHPQ4K1f87hc+T70s+MWP\n",
+       "f5ImKxlubXOymtEPrFLS87z71IIXeJ7PYNCnrmscx7V5ilWG73sdrufjdGnW8/miw/jUJo3c+g0L\n",
+       "6toGBWgtkI6L5/vW06WtqOuSVis812E5X5BlVmQwGvRI4hDPdfiFf/NX33L+/+P/8JfY29tjsVjw\n",
+       "8MPX2dnZ4caNG0gp+fEf/zEWi4VdQBdLIj9gPp+zvb29cSVcVzRxHFtFqOgqxo75EkURYRhRlnUH\n",
+       "v8SbaibPc1arjNuHN3nqqacAged69Ho9tNbkeb5J20nTnq0aMfQHPRzHIeuSxTWa+WyBMYbhcEye\n",
+       "54CkbQ1RGNkq1A9oW43juggpmU6n1E1FnMQEQUDbaJIkts/nOORZjtEa1/U3N3qapgBUSqG1Yjab\n",
+       "otoaY1qkkDiOpZtGUUQURFZ56NhrcrGwn6v1FId+v9cVDeqesEx6HW7uQtcKK6WYzefM5kumsznG\n",
+       "OMwXS3bGPXppzGhsvThmsylaK/Z2d4jjhLqyuLFS9ypwY+wCvqHKSm8zt6jbhtlsxp07doG6dOkS\n",
+       "47GdITRNtZFkJ0nCcrnsYMoWKQxtq7pZk4vWBke6aAyLlU0w0trgOIKvfvlLfOhDHyaKIhwvoKrq\n",
+       "Dt+WVGVj7SDixLodGuurUte1LVwcZ5N6VJbVZtZi4TnR5cG61HVDUVYbAzopJUEY2q6nLIn8iNbY\n",
+       "eDWlFZ/+uY8/sAL/3B/8Kdf3djdrx5oEkaapDfuI7bVy+fJlHMehWC0Zjyf0Bz1efumVbjBZs729\n",
+       "ze3bt+n1+tbnxfN4885t3vWuh7k4m4Ix9Pv9LlCjsEZfUjJbzImTmNBxyLKCIAy7rqPpZjjWzsDz\n",
+       "XIxROK5LsZpyfHSXqshp6hJhdHdtdkrMTUEr3pFG+K+MgQshrgMfAP4K2DXGHHffOgZ2u79fAu5f\n",
+       "rG9jK/G3PeI4/i5F5ZpGswk0bu/5oGymtsI6+kkh0UIjux0rQBD5LrUvOTEV/9fv/R7+rOQTH/0o\n",
+       "tYDtvT3c1lY6y2yOKzyGwwFN0zIY9BmPxxY3Pb9gPB5b850wwg9dsmzF0dFRt9AlxHFMHKf00j51\n",
+       "XTGbzTk5PsPxBdKRJHHK7t420+mC+XxJVZd4vt8JlxKkhPliieOFRLFASsPZxYxbt5eMv8dYZ338\n",
+       "1E99jLOzU0ajAVpr7t69y8HBAW1b8+KLL7C9vY3n9dnb3aHMSyaTCYPBYOOjvPYitt4Xawt0Q9PU\n",
+       "JInlwX/xi1/kU5/6FJ7n0LYNo3EfIRyiOAQBTz31FCcnJ1y+fJmyLDAYBn0bJLy3t4cxgqqyqTBF\n",
+       "U3PnzhGe73TZpD6e67I1mWwWF9fphtWtoKpqO4BiSa/XxxMuQsJg0KMo7HBrPpsxHAw5unuHy5cv\n",
+       "07aKKAg3mGzbthRlSdaFSITpAATsXzqgrivqqqBta4zRNmG+rJhMXMplxmJ6yvXr1xmORjhC4ruu\n",
+       "3UTobI97FnISUpKXDWenJ3ie3eQ9aYdzjuNyfHTC9o6Vvg9HE8p8xv7lA6oit/7bdc3ly5doa+sd\n",
+       "E8cxQRBSZLndDIzuOo17EASmoixLdnZ2eO655zg4OOCxxx7tukCfLMvwfRelmk1ItuO4eF6A47j4\n",
+       "nkvbVliJxDpAQFtXvFbR71mbiHW7f+fO3c5dsCYwdtE9OjrBcRx66eDeIDsI0F2FfHp6ysnJEVtb\n",
+       "O11nFjCZdJmwiyXz2RIhDaPRaJO67rly81msIT9HCkbDAW1VEwe2o3unY2/bDh/7/b6lDHreZhH3\n",
+       "PI8kSRBCcHR0xNbWFo1qWOVL8jLn0cfeRZaVHB8fM53N2N7Z5fDwkMFgQNtqrl27xs2bt9nf30ca\n",
+       "zY0bt3jyycdtUVNW+EHAZGvMydk52g+J05Tlcrmxvrh7fMRkssUqz4nCED/wWWU5vhMxmezyxuuv\n",
+       "EvghVb7qcldLlGo2C7iUzju+93+lBbyDT/4P4D83xizvZ4UYY4xYB7w9+HjHEv/+xOZ169A9p13M\n",
+       "78vL21BuzD1fk/uNrgb4LE3LQhq+8rVnuHt4h7/9sZ9lGCU4QcT5ck4SxSReQLgTI6W0hjltQ6us\n",
+       "nWkv7ZMkl3Acj7IsWS4XIG271+v1CIKAsqw5Pz9nenHB4e1DHMdlMtni4OAKtSo6TmnNCy++gOv6\n",
+       "DAZDPDdAaUUY9lgtF1S1TSPf2t6jbSqWizmt7+EHLkfHxw88V4eHt9na2uL4+Jivfe1rDIdDDg4u\n",
+       "dV4TEbdu3WJvb4+XX3qZNEr5N/7W32I6nWGMvs9Twuvc4XJczyfPS7J8ycuvvEiWFbznPU9Z21sp\n",
+       "kdLZ0DyFgH4/YbVacf36VU5PzwF44403GPSHDAbWfxwka/P96WKJ71vLgSxfIQTMplOWiwXPPPMN\n",
+       "/t7f+/u25RaSKB4QhiG9fp9VnqF1y9HxXVsRui69NGUwGHD58mXmsylJvM3x0RFVXbNcZmxvbxNF\n",
+       "Ab3egKZtO4xzwfHRGQaDEGZTnQqhmV6cd8NVybeefwnX9RjELs8//x2asuJDH/oQ4/EI3zcIycZt\n",
+       "b9UlsW+Nx7i7O2itqIuyo5UecXp6xvve9zR1o4miGMfzaOstEBAnCacnp2RZzsXFBb7rWfHJasVi\n",
+       "PieNrYMfwt64QdDZIkuB79kN9NVXX+XKlSvd5tVQdRuStZld0uv1uDi/YDwZ0zZWUzG9mJL2YqTQ\n",
+       "rMME1vdNXuR4nkfVLfp5XmxS2i0915qKLWdzDg4u0bYajOwCGGocR1K3JcZodnd2uH7tGqssp2ms\n",
+       "Pe1samGv/mDAQw9fQ6uG5XJJWZTWs186G0jOpmo5XRFVcmkyIvIEJnR5p2Ax3wHhe51S0hYrfvfv\n",
+       "waDPcmnZS77vkecZSRoRRBFvvvEGF7MZg/6AR979CK+99gZV07Czv0etWrJVQSqTLulngQM8/vhj\n",
+       "HB+fUtc1o8mEi+mFtd0Y9BGtZjqdkiQJZWl9jS5fvsRqlQOCVZbjVrWt7qXA9wKCIKYqM/wwBN0i\n",
+       "HQfZsenabm7wTsf3XcCFEB528f7fjDGf6758LITYM8YcCSH2gZP1GgNcue/hB93XHnD8OgD/8//6\n",
+       "Eh/+kffzkQ9/YONlHATBJsQhCIJ7FrGbQAZD0A3e1kNPLQy6VrSJy//9V1/i+S9/lY+87wPUvqRR\n",
+       "LUFWMRqmNKGHWJbUdYnuFFO9fkxV1rRtzSpbbCAdC5uk1E2FMYr5bInn+QR+yO72DkVRMhkLsiyn\n",
+       "qStywPHt67HGOC5KGVRbdwGoAWhFU9e40sEow+2btwhDjzgOSBKf5UITBA/G+5IkQXX0qEuXLjEc\n",
+       "Dnn00cc3FcfZ2RnPPvssg/6Qn/75f8yq/ccQ2Ep7jaRl7b1/VLW9AvoT+2d9GOwEWnVXiOiukhYI\n",
+       "+/br4z37tfX/gbfcZMPud8a9t76Xj30C4E8J7qO8190v8K3PD9vhWx+3WoHj2tc4Gtuv7e199894\n",
+       "LpQFBD4c7L3lVwBw7dK9v7/3iQf/zP1H3ANf/AlJlFBVFdPpOa7nEXUsKqvcgyiKO59vF4NmuVgQ\n",
+       "+NaWtGpbtnd3GNYDhDCEvo/SLb00xevCHgxrOwhQjUJ3BlZF50kOmt3dbaS03ZPvWSgHbCCJlJI0\n",
+       "SRAGtGpZzOdMxiOyzLpEam2ju7S2ENU6g7HXsyZcge8TRdEGdtjasjbJSRpTFhYecD0fz+8cEIUg\n",
+       "duwH1rYty6WF0cLAQ4QBvZ5VPreNpipytFG4jsSN7WvWxtA0tnhyHJc8WxIGPkkc4Tmg6xKj6s38\n",
+       "5kFHnS+IkqENjigLVqsVUgrrlug6uK4kzzOEsNGBTuByMb9gtL1lnSVXMxYvr9jd3ePGmzfwwoAw\n",
+       "jgjSkOVyYaHTLmT67OyM7e0xRVGyXC0Z9PrMlwuy5ZK97d1NMpiFKUtOT89JElv4jMcjSwbIcwLX\n",
+       "RxjDaLzFSy/e7QI4KkDxzDPP8vVvPt91Se98Xb7jAi5sqf1PgO8YY/6H+77128C/D/yj7v+fu+/r\n",
+       "/1QI8d9joZN3A1958G//dQD+g3/vcwghNgt2ntvByDqVQnr2JUqzVlbaynu1WhEEwcagSkrJscqZ\n",
+       "naz4xr/4l/zkR36C/YPLeGlMNl0yu3tK9npJsD1kNB6zNephTfxLTk9OiJOYycSuOGvPlaLMUB0n\n",
+       "ejgcMh5PUEpzfn5OluUb45n9/UtdGsgRTaMoqgIpHOIkIU0CfN+2+IeHdwiCgDDw6KUpQZjg+n5H\n",
+       "P2u4OD9lvpgzeBsIxbrLWerV1tYWeZ7zzDPPEMeJrYB296mqhr29Sw98/F8f/9+OxXxFEHpcvXqV\n",
+       "WrUsVyuW8wVVWeK6LhcXF2xv7zAa9ZCe9bQIAp+yyCnLgqoqmM8Vqqm4ffsWH/sbH6UpavKOD9/v\n",
+       "oKg1rVbcl+Xq+Q6np6cURdHNZxzSpLfBse3A2QpB9vb2qNaWsolNr0riGNU2GARKW+YUHVZujGE+\n",
+       "mzMeTfC9gLyDcqzHft4pnj2yLANjmF6c4Qf33PSkutcFu25nHdvW333yBJal0yoc97vnWY4T0bQW\n",
+       "UtFabGAc15eoViEkuO8AJai64ji7Q5Zbf/blqiUMI6qqZL644Pr165yenqK1IctWHCQH3UxnDkLy\n",
+       "kQ99mG9+61mSNGJnbwutFC+/8hLj8YQ49MmX1l/94YceoqoqXn/tdfb2L7GztcXJ2Sm9NMEPAl5/\n",
+       "/VV2dnaJk5ibN28yGg5Jk4jlcsn2zjZnZ+f0+32kgCB0qcqG4WhEVddMvBThO6im5CMf/gAf+pGn\n",
+       "LTdfOPyT/+Wfve17/34V+EeBXwW+JYT4Rve1/wr4b4HfFEL8XToaIYAx5jtCiN8EvoMt2P6++T5T\n",
+       "0vXFuvY4uT9abS3m2GRiKoXsEkCklChsnqWUEuE6iEmPP/4fP8uHHnqM6wcHaE/iNy3RsEf/0h4y\n",
+       "q1ktl7x5dsTJnduMBkNGoxHDgQ0SzlYZaxcy13WJO5Otphte3blziO8HG5HCmgZ1fn6CkIIgcPH8\n",
+       "mDRJN2yIvF6xVAuSJOHqwWWL0eYFqm6Y5WfWnlNqoihksZwSdUKLBx5CkOXWR1t3wQFhGHF8fMqT\n",
+       "Tz3FbDrF8yNuHd7lAx/+Pp/sXx//2ofr2e7vzTffwA0DKwyKAtIkoSgKstWK/f19qqqkKTKEY2c5\n",
+       "nu/g+QnD4YCiyDaJO9PplK3RCBHHtE2L6ehjSncsrO46t+203GDgQWjtTvOi2EQErm8zSwdVtKol\n",
+       "iqJuSGxwpQtGIF0Hzw3QZg1bOmitiMKY2XSKAPzAp9/vMxkPmc3nlKVD2y5RqiHLloShDRheC3DW\n",
+       "1DfVDYzbVqHWgcydJYbSClM1+K5P01R4fkAYel0KU40UgsB3kY7sONCSVVHY4WccfZdW5HuPum1J\n",
+       "4pgw8MmzFUa3OBJ8zyWJYzskxCDQ7GxvkS2W6KYlDiOklDz7zW8wHo04PzlGChvycf3qZctmSQcY\n",
+       "1RKFPvPZFN/3Obi8z2w+I8+WCAFNVVLXFTtbWzRVyfF8xsH+Pk3TsJjNSdMe+XJF5PvopkW6LlVl\n",
+       "z5U2LQ8//BDz6QlB4KB0C8bO+jzH35A53vaafKdvGmP+grf3S/mZt3nMPwT+4Ts+633HGuteU4nS\n",
+       "NGU8HlN2fMzA8zf4rVYaR0gcz+7yRZ4jPNdSdKTgN37jN2iqko//zY9TouxN0TQcnZ5y5J7SMy57\n",
+       "6YinrzxJUytOjo955eXXaJqGnZ0ddnd38Ty7WVRVRVHaRGzRcTu3t7epqorFwgoXkiRmMEg3tMfZ\n",
+       "bMb01FKYwiAgDiMbbLxaMT0/p65r61I4HJIk6QZnfumVF3hzMWOyNcL3Hc7OTh54ro6Oj0iSBOlI\n",
+       "iqywWPzZOWna5+WXX6Wua9Kkj1LvhBj+9fH/9gjjkPFwgBCCZVGyXC45PT2lLEv6aY/xeEwcRyxX\n",
+       "S7SxlfHZ2Tm+F1KUJUkUAobAT+yQrKmom5okjtFKIaRDFPdQyqBNi+ZeYVPVNsmnqirCKMKg8IMI\n",
+       "1RiaRiGEIQgCoihiubLhvrOZVezaGEJFVTQ0bUurFRqbKdvrpTidT/xkPLbDyU49qHWLIwXn52dM\n",
+       "JhOGwwGLxZLR0HaIGlC6JQzCTsthMVttFJ7nWrZV26JN3WH1LW3V3nPm1BplLGvE9/0ueWhi4SjH\n",
+       "wY1iLmYzZjPrDf/Rt/lcqkaDUyDEWs0tUKphsbCCp7q2uo7z83MWc4sbWtioh+e45MsFbWXZM2EQ\n",
+       "WP63Y4hDl9nFBVVV0UvTjlihybOMy/v75GXBKsuYbI04Oz9HCo/xaNh56y/Z3d2lLHILGXV5B2Fo\n",
+       "4eFWW/voprCc9JPjW1YsJcDxPRzh4EiPzhXrbY8fuBJzvcO4rttZNt6DU4wxm+obbXDW6ReNomqq\n",
+       "jkKWITyXr3/5GY6+9RKf+PTPcdysiGpDJBxEGHIluUQjQUlYlDXh7SMqNyRNhmw/uU9dV5RlwXKZ\n",
+       "kWUrjNFdlW2TqDXa+nd73oYu6Lou5+fnrCnu60U89H3qssEoyPPVhj41Gg0tDUtr6rKgLkuU1ixX\n",
+       "S4Qw7O9u43gOr73+MmkvfeC5+vSnP8ULL7zYWegaMIL9/cu4rsd8vmA4GFt88wdgj/D/h6MsM158\n",
+       "+Q5JHONHKX7Ht9ZKY7RmsVigtbUV8AIP13NJ0gjXsfYMUkJTVZ3VQoHnulRVgRQ2I1QgNqHYQoJw\n",
+       "7lXXYRgShiFZlhFFAa26R7ddUyfzPKeqSsIooOyKj7OzM/b39yjzmjRN6Ro3kJbVVRQFvucTRQnL\n",
+       "xQopbMReXdcMh0OiKGJ3d2djcTwY9CmKbJPmFMb35jJO13EIaYfGrhsjhGW7RLGFXKqsQnTKXNfx\n",
+       "8HyfvCw2NE2wNM68KDBhDykl4509+qP7hjTfcwjHJ88X2O457Fg2mji2EWxVVTKdtvS6+2p2anHt\n",
+       "07tHpGmCMJrVbEbT1F0+b21TsFyXvGkQCBZdQMXWyK5RJyc2F7asai4uLjAYmspG/8WxJUh85zvP\n",
+       "s7Oza7/frWVJYvnyjTa0bUWezXFES6sajBEbnYuFoayFwzsdP/AFHNhMWteY8pqXHMcxBRX9KMUF\n",
+       "dNni+j4tBuN5EFjj9tOjY269cZtf/OmfYzzYwm00XhDSaI1uW7J5hh/4+GFAL0zRMkC1Ga0qWOVL\n",
+       "ojBiNLEKsDgZsVquoIvciqLQ8rhdj/lixtnJKb7nEcUxg14fz3M2VbkxhijycB0X4Tok6ZjlMmM6\n",
+       "O9+o0UbDIV5glYTL1ZwwcpGOR1kW3L51xCc+9vFuIPZfv+U8DfoTfuxHfxKlWp599lu8+eabtG3L\n",
+       "Bz7wPm7cuElZllaQ4j/4Y/3sZ38V1/WR0qGpawaDIdeuXeW973sC62vcgjEkcUKR5xwdHvHMM8/w\n",
+       "0Z/4CYIg5PD2IYN+yv7+Hn4QMJ1ObQ7ockkUp8RxQqM1y6Xl8cZxvHFZi+MIgyCKIppacTFbsr29\n",
+       "y8npKU3TUjcGz3UtbTPwmYy3rMWu53N8fMKrr75qlXXA1u4+nm+VpFI4FEXB+fk5ZZVzdnbKfD6j\n",
+       "LAuuP3QNzzQ8/fTTJElMUZS2rV0sOlM0TRhGpGlqvaSTIUWeo7Shad9a77XGY7J92YbxNiuc0KOl\n",
+       "RWHFP34cUVY1eVawWmUM+kPL6IkDmrYljVO7sIQJnhcR+i6urBHChoYYo4nSAa7vU5d2yK613fSz\n",
+       "1YpstbBc4jbAk5YRrTplskCQJgGutDd8EiZoIC8Ur756mytXr+KFAXVT01Q10khcz6cfhEjpoFpF\n",
+       "OoyQQmJQIBxmizlZtmJnewc/CBHawfMCIt+gTENVFhR5QaMsLNi2lkfuuuuQXkkURriei+tYdWYo\n",
+       "7aJq51n2eftxYG0yXEPTKjwZ4TsRWgQIYRAtuO9QiYquGxHSo240YWJnCYF0cFwXRzrkVYH0rMvp\n",
+       "yPVRrWK8v81ytbTpR1KQhBG3bx+yd/Val4oU4TkJ0hEIKSiKFdPpBYNBnyzPEFLilBX9/oCyrNjZ\n",
+       "2aIsS4wyeJ7D1tYYpWpAsbW1ZYOmtUI1NbUqaJuaNPG58eYhSRjjSUup1MpSo6X8/oXYD4WZ1YOo\n",
+       "hGs+Z+tqVFmT+hFSGUBSC4OIQiplcxL/8l/+BYHr8sGn3kfQtUDrQaTbiTXgXoJ9VVUgW+Iktrto\n",
+       "WUInIQ9D+7Ou423kwatV1k2DLUYfhuFmo1mHMazpj3Vrn9sKGBRhR+pfm+CUZYlSiqIoGAxS5osp\n",
+       "s9mMKIp4+umnN/z3p3/kE285b8998882Qpw1JXA2m/H666+zvb3N6ekpN2/eROmWH/2x/+4tj//s\n",
+       "Z3/VyuWFpG0Vvucx2ZownZ1SlSWPPPzQRnThSpckTtjb3aWuGvq9fhdGbD1fFvMZAhiNRoy3JtZw\n",
+       "SmmCOMZx7Aa36aAMrDJrpFVVNU2tmM2sSZDjeKS9PspYOK2uaxaLZYejKqqqYji0wbbaGMqixEtS\n",
+       "bt64SZ7nnJ1dWBN9CZcu7TMaDQjDAOlImqYin51tAkNc1yVJEoIg2ARdryPKyrJEtfbcCCFIer/4\n",
+       "lvNXlp+nbZuugiopihzPsdDaYrFkMZuzs73D1niC47jUZYV0JKWqaZWiKmtcN6Aqyq7Sa3ClpCxz\n",
+       "q5TsoA2lrfS+ripAMx4NiSKfYa9HGAXUVUEGVtwGAAAgAElEQVQYBhg8kC5SCtra2glYQYxLWTdk\n",
+       "eUHd2Gvt1p1Dev0e29s7BJ7XXQMtTdN0SfIODvYcCQlR5KCNxdGzbIXRkqpocaSd/Xi+QxjaMBK/\n",
+       "O5/ra9wYDeZePqaFcNru7y1aaVzX60KbXUQ3oPQ8vxPFBVbUpy17x1bUik988mcfKOT5/d//PI4n\n",
+       "bQqX4yAdF9Z2rd17shoSl1YpfFdvrk2bZeog1xmpRYHABm+PxmM8GXSD4V0cT3amc/cGsHVjTcKa\n",
+       "VhEHVlS2XoPWh9tZ2QZ+aKFhwIscLs7PmEyG3Dm8zSCJEcby/qW4l/WrzQ95Is/9GDi81ZWwyApi\n",
+       "P7AXWl4SxjEyDGi1olEtr7z0Mrdu3ODTn/w5SzWSAs+xkWW6KsmKnKIs6ff7Ngnd9/Arn6opLAyB\n",
+       "lYv7vqUrzqYzVqusw7itYKfX61EUBVVV3WMFdEOcLMuYTWddUhD4YYwfBPT7fTzP2wwzl8slcRwy\n",
+       "nZ6TJCnDYZ8v/9WXOD8/5TOf+QyDweC7vEQeeK4AKYTd5Tsf4TAIeOThh+3iFMdc2t9nla0e+PjR\n",
+       "YEBZWsVc2rey9/lsStvUaKV4/fU3aJqGp558itF4wmq54uVXX7cBwEKyXC5573ueBCSPvPsxXEei\n",
+       "teL01GYL7u7t40qH+dxWuLrD4sMwRCLIuzT6s5Pb7O9dBgRNo7h7eBs3tNCU3TQmlFVpVYJa88Yb\n",
+       "b3By3NLr9xkNh7RlyfnpMdvbO/SuHlBW1qcm9FxLNTUGoZUNzA2CTaJ9nltLg7VvTBzHm5bXLvI2\n",
+       "4Wk2XTzw/DmOZ+mrquHstMERCTdv3OTFF9/AcyTD4YCTk2/zsZ/8KChNo0t8xyXyJK0wbO1tk2UF\n",
+       "vSSiLBuQDlVZgxvQtg1tXXN4+w3C0O9yIX22JjtkyznaGOazBZcu7aNVi9KWmRLFPlVV4Lg2zFoZ\n",
+       "O79RyiCkw9HxHYR02Zps4wU29Dhb5vi+bzc66TAaja2IpKo7FadLWWXdfRAwHA4RODgiwGjrEdOq\n",
+       "GqXswmy4F8TidzREIe457Lmue8/+Yl2oYQOvLUTJJsMULPTYtsqm0AsbYfaghXt9SCFpKgt7lEWB\n",
+       "7wd4fkBT13hBRF1VlGXFYDiARqGVdZs0Wnc+34bAt4tuvzeyiuaoh2oMZW0hzvliRl5kHFy+RKtt\n",
+       "WLjWmiCw6uWmzijye0K5jdNnV7QlsaU4Oo59P0dHdyjyHN93iMPQQrRS4PseriO72cf3D2z4gVfg\n",
+       "f/4nn9vg3Gs/lPXi7TgOwhGYTiKrFUjfszRm1+HGrZt84fc+z9NPvIfY9bl85cBWNq1NuvE8jzCK\n",
+       "cB2HvCg4Pj6mrmuiMCRJ12rKiLZpO4tPs5H5Sik3oboAvm8n82Fo0zzWVUWv1+sqBssYmC9XNE3L\n",
+       "bD6jbVsODg6696W4c+fOphv44hf/hKtXDvjML/8dq0jshhyu61IUBT/yE596y3n79jf/DKV0p6CT\n",
+       "GzP4JEko8gLp3JeQXX36LY9/4fn/hrq2IiTdWai6roOmRQiH6XTK1atXObp7ysXFdIP1R1HciS4k\n",
+       "rhB84AMf4M7hbc7PThkOrLhpe3ub4Whsbzop8TwfrRRKt2hjL2IpJPP5gjTtg4HAt1z5OEkomnwD\n",
+       "bSxXKwI/RDoO/X6Pfq+PNpoir3jttdcYjPY2sxMhHKSQ9AcDhIA8z8iyhRUg6RansznY6VwE53Ob\n",
+       "sNK27aby3pjp+x5BGCCEg+O8NZP05q1/uoFn0D57e7ukaYyUgmy1wJiWIHRxJYShx3g8om5KVFVT\n",
+       "VzVlZVv9VoM2EuH6nF/MyPKcVZaxvbPLsO8TeJIwCK1SVggGnd902zZkyxVKKwI/YJVbAVNdVx2M\n",
+       "YuciSisWy4ybN2+R9gf0+gMGg1HXbRS0nWRetS2mY4rEcUyZrRd2n1ZV3L17yLve/VCnavQQ2sFx\n",
+       "bBC344oucNkgHLnhk1dVZamOVbExqFt3Omul5fpedx0rEgrCgFbds0/VyqC0ArNOZ29RWvHxT/78\n",
+       "AxfyP/zCH+L5Et8PrLma6+IHAUrbXFxtBBqD0RrpOGAEbdOSJsnmHlpbHAsgW2XWVM33aVXZFVWW\n",
+       "EtrrWadSPwjwfJ8syykLew82dc5gMNjoWXzf4/zsdGPq5TgS6Ugbsi4N2WoFaBxhcITAdyVVWeC6\n",
+       "zgZiklL+cKfS359leb9cfpNO3yrquiVKEipqGhReGPP6jTd59hvf5On3vJdhnLK7tcOqu2iUUhSd\n",
+       "LajFsSOiKOLKtauEYUhRFDRlQ5GXlEWFrW3thTGdzrGOcX2CILDS28YKCeq6piiKTSKQXQRq8twy\n",
+       "ZjzPI4l7HQ3KIS+yjn9q28deL+Xk9IRvfvMb/NIv/ZKlHXXRcHme24X4HdJ4JKJTyXWJHdLBd1za\n",
+       "umHY71MU1nVPPviz5sMf+hBVWZFlK55//jmMgbSXYoTm/PyCy5cPmE0XlFWN6/t4YYQx0GiDkIL+\n",
+       "YEi+WvH7f/hHXN7btRa7aYzve5yennPj5m2apqHfH1ie7PaEtqODOoDrSLYndtAqcJjPpyRJj/Ns\n",
+       "hXENniNwhMPo0j6rLKOuao4Pb9NOJhvP8X4aotoS6fpcTKcEfkDgh9xZzhiNRoAmCnyGg5S2qSmz\n",
+       "nFa13Lx1k63JhF6/x3wx77zi7U0ZxRFGa0pV07QNRbFkPHrr+RNCcHBwhSAIENrvFLAhdVPiuy6e\n",
+       "77DKZhT5Cq1bgiAgTWO049k4vknCcpHhCMkqr1icn3Pz1m0eefej7O7vk/Z6mDbDERrHtV2pFIKs\n",
+       "sNL/uqpJ+yNUY9PmJ3FCozRy7enTWDOwsix57fU3uHxwQBSndtES1s4zCGKiUH6XsVXT1lSVNW+z\n",
+       "g80SbZqN+jSKIqR08Z0Qpehk8Iq21ZhuwF+W5aYzjeOYNEmstzx6s2hb/5N6U5Fb5oqh3hjaORRF\n",
+       "bl0SjV3UpBQ48j412QMOAbR1TVNVuJ5H0+XkOtIF17XwpwBhBKy7wsDH9y23PeqiDgWQ5Tmj8ZC6\n",
+       "qmjqCkSz2YSqsqAsC4IgpCpLVKvB2OvaMsASa/CmNU1dM73IN5BokoTkWU4Sp3iug3BdVss5jhT4\n",
+       "nmeH256D53tgjC3UMO/YkcMPwQJuyft601rBd5ubu8KxhvKOwGiFcF1mywVVVfHct77Fr3zm32Vn\n",
+       "MKap6o3Zj/UwCUnTFN/3mc1mzOdzXn/9dStqSBIu71+h3x/i+z6LxWxjubn2Jy6KjOl0uvErXzvt\n",
+       "lWW5ERtpbdWRvZ71wq6qiuPTYzw/IEkiXM/ae+Z5ies6vPbaa7z8yov8Z//Jf0qeZYRBwNnZ2cbP\n",
+       "ommaDnPMHniu1jfUertZXxxrepkUAs/3kY7D/AH7QC9OkEAYeHz6U5+y8MSbb3B2ccZkPKFuLJyU\n",
+       "ZTn7lw64dXibIAgIhGA5W3L36JgwCImShOl8jnSsF/XZ6ckGaur3++zu7uJ7Hi+88ALj8Zjd3Z2N\n",
+       "urSuarQy9PtD9h99lOVyZQUWdc7Z2ZnFULXBlQ6T/b2NYX6WZRweHlrhh9Akkcu7Hno/VWUx8zWc\n",
+       "dX5+aisgzxok7e/v43keaWpl6qvVCjA0bWNtFDq7Uiklo60B2ggmkwn5Az6CNE0775mWIrOV+9m5\n",
+       "ZS8kcQQ47O1eoj9IWc3ngOH4+IS418P1Y5RwSUdDzk4vOn8Pzfvf9xRXrlxlsVySxAlN6yCl7XYW\n",
+       "iwV+GFJX1jo3TfscHt7Z3C91W9G0FUZZV0HHcXj4oYeIk5TxZMzV69c7f5ma1WpF3aWxt3XTVXly\n",
+       "U11ubU/QnfTe9z3atsbzfI6PTzuKYkimCjwv7BgxgVWD2v9QVSV5nrFcLq3dcdN2/PXGGod1VOB1\n",
+       "V7dmcwVdpWwMuK5DHEf3Cjm6gHJt0OrtF7I0iVjrgNd8cY01lTNGb6yi67q2VM0woCoz5jMrrNHa\n",
+       "sOxsn5MoAlMjpcI4qoMq7TWyWmVcvXq1q85DFsslIGm6oPU8y7qNSHRMNttxRGGA0Yo48qnKnKDz\n",
+       "En/zzdeZjIZ2o/JtIRsGdvamOyOrd+K/ww8BhPJnf/RbmwHI2t9kzX113c7bOwwpdUuLpsVweHjI\n",
+       "b/3vn+VnP/Ez7G1tE3ZTZeHbSn7tz71us8NO+LCW5BdFgR3urz3IAawrX1mWtsISAs9zNxtLWZbU\n",
+       "nT/11taWdYWrm82GsTbgwnFo2obpdNopzyxe9sorL/PwIw/xcz/zM2TZ2mjH35jVrIda6/PwgR/7\n",
+       "2bectxee+bMOa78X5AxsBBXr1yKQXBQ//ZbHD4I/IIwCYA1TKaq6AsdCRrPZiovplLyouHX7kNli\n",
+       "bqXG8znzxYw4Tgg6e9HxaIAwmsB1uXb1Cv1enziKqJqa4+NTloslbec9UpYFaZpsbtiP/vhPkucF\n",
+       "vh9SVzW+FyA9K+tefwZrs6M4jjc5qZvuqrZDQdVqgiCkrhuSOOk+K3A9t0vwsc6AaZreS4DpPkPr\n",
+       "Y21fX7/ft9WuZ1N3iqIkTX/5Lecvyz6P6Gh7vtcNsc1auWsVwXXncZPnOXEUkWU5IvRZZhmz6QW+\n",
+       "61IUKy7v7TMeDvB9rwts6RLJXdvuW+c/e8060qPIC4y+l2w0n89p2oqqKfA9Fykki8Wc27duMRgO\n",
+       "efrp91M1NZ4XYBB4ro/R3WstKzzPparK7l4RuJ4DSm/+7fseWb6gP0gIAmsHqxqN6wRY98jG3jcC\n",
+       "tGEztxoOB7RtgyudDd9bSGG9VlTbDSVNR4VVBH6I0y3u90OnQEe3bAhDm/jzU5/8hQdCKF/90l/S\n",
+       "Nh31uMuVVVrjeN5Gou969j1IKZGOsX7xYdTh1eviTxEEHhgLxa3tXS1V0y7gnm/dSNeMqvVw3MKo\n",
+       "NhClqkpL+Vwtqaui6zokfuATBjZ1av/yAS++8AKDfo+qLIjDAN2dHztPYDOI/aFO5PF9n+VyueFI\n",
+       "rhfvjQubkEwXM6JBn6auCeKIv/jzP+fDH/wRHn34EXRXwTuewyovNk54a/jEdd1Nyop1AowYjUY4\n",
+       "2Ipt7V0dxzHWyN1CLIvFgiAIN2yPvb19hBDcvXuX55//NlVVcenSJa5cuYIxhvPzU6qqZracI117\n",
+       "AW5vb/OtZ5/lma9/nV/7j36NRx66zmq5tC1YVXaWAVYGnSTJxm7T87wHnqu6bdAYgi4PVAirStUY\n",
+       "vMDHdB++5znwgArccRy00lR1uRk6OY6D53hkWcFwOMDzA1zH5/pDD+P7Pn/xl3+OEJq6KTCmpVGC\n",
+       "KA7IshW7O1tc2t3j6Ogut27fRLUGx3XY29snCEPe9/TTfP2Zr/Lo449z5fIBOztbnJ9d0DRNB021\n",
+       "OEJycXFBa9aBGAH9fo809Te45Hw+ZzabURQFSZIw3h4jhB2CqdYghMPJyclGdRiGFpO/tL/HQw8/\n",
+       "bAfNsxlNY82flss5Z2e2a9jZseItYxT5akFV1yThg3n4168esMrt7zqZnnXCD+sbsjUYEgQhi/mK\n",
+       "PC9xHI/ZfEVRFOTzBVmRg7Z2s4M05uq1A1whEVqh2wbVtNAUFFWL6EIEHMehLuvufrCv4c7hEc8/\n",
+       "/zwf/OAHkY4kikPiKO70BjbIwu8GoDagwYZxqFZhtO0i1ou31SeMaNrKWlOE4aYYMEYQx9b3JQx9\n",
+       "q9EwEimsuMQySixcU1S1NS+bTrm4uKCuKzzXwZFOByOlhJ3qcT1fWuPiUkqUVtTN2lL3HqV43YmX\n",
+       "RUXVbZAPOsqmIg1DdLcxrPNyW6PxPJ+uQttsDEY1COyw1HEc/MBFqwaJzRew1b+ynu+uLeYwsL29\n",
+       "hSPdDftL1RWNskNcISyHPwwCyiLrjLm8jotu8DyXfLXCd11Ojk9I0pR+L8WREt+1CVjrHIAwtNRl\n",
+       "pfQPfwX+pT/73U2lfH+ihyWytyjdtRdRQF6X/MEX/oDXX3qZf+sXfpHt8QSlWk7Ozoh6KalvB4Rr\n",
+       "+lKSJBsf7PWiuKY12ay/HmFovQrazouhqupNosjayxdsyvR6MJamCVJaOuJyOcf3feI4thWNalHG\n",
+       "8nafffZZ9vf2+du/9G+DMLR1Ywcpa7L+fUlE601rbQP69I++Vej67Wf+dNPyr7uMdWW5HsDqruVb\n",
+       "lG+t4MfJn6BU0+HJfod/tthaR2ATQCwvPUkS/uhP/pjPf+H3GU2GbO+MuXv3LhqX0XCAxKYg9ZKI\n",
+       "tm2ZTWdMJluMRmOiOOX09JQoCNne2iKOI6azC6qipN/rsbe7Ry/t0zZdSovn44aWuWA7iWbzvuI4\n",
+       "7gIQ7EZcVhVxmto5RtNSlQ2TyZZVL2rdia8awjBgsZyjNdR1Q9o9ZpWtbDVrbDW/XK3A2HvcD+Dk\n",
+       "5Iz9/Us8+sR/+ZbzVxW/g9OlK/mhZeDUdWvTZlpbhdd1w+Htu0RxghSSyWSbCmXNzaTA6AajWkaD\n",
+       "Hk1Zgrb5lGuvjwqN232OsmOpBEHAzZs3efnlV3n88SfY2tqmyIsO1jY2iUgppICzszP6/T6j8diy\n",
+       "Q4x1c1TK+ubb19puOpz1daS1Jgmj7hxamOPw8E3iNGB313acURCjWkHTWHqh1lYO7nYMLmCDcRut\n",
+       "aLuu8LsXIUFVlbabC+zQ0fWs73sYBh1d13TdqER1Zl5aGX7ip376gRX47/7O/wlKITZ+LC5SOmiw\n",
+       "GZX6nk2HQeBKGwKjlcb37canVNe9Cvt8Aus9JPC6e8o6kgK4XYZAh9J0j1dI2YmQcksL7fXSbq6V\n",
+       "E3Yb2Wq1su/NtRAfxuAIg+qu2fVMzL5LiXQcnnr/x354K/B1COpmaAmbqbDrujZJ3pFcTKecXpzz\n",
+       "5b/8Ep/+6U+i65Yyy3E9j4MrVyhNS6gdFvOF5aVKiWpaksiGF2ilbEXg+8RhxHw14/DOIfP5nMl4\n",
+       "Qpr26PVS8vx4o5wqy5Lr1x+i3xsxHI5JkpSzs1PquiGKAuI4Io5DZvMptw9v2eo3DsjLkme/8Q3+\n",
+       "nc98hsuXD1iuFoRBgERYlaRNOwUDnut1vshe11IGG+bL9x73w0JrGGU92V/DD0opu3iVb32849oh\n",
+       "yVr2rI31bBZCIFyXulJMJhMuLmb81m/9Fq+8+hLXrx2wyhZMT09Io4BZVnJ2foxuNaqtef/T76Ms\n",
+       "c6I4wvU9zi4uCPKCK1evoOqWo5MTqo5GNRrbUOcXXnyJqqz46I9/1M43lKE1Vu4tHckg7YGBsiop\n",
+       "iiVFWXTdiSSOo85vwzIpWtXw2muvdfhqyGQy6qCakNRofC9gtVrRti3PfvObPPbYY6SJlbKvufxr\n",
+       "ubkxBe96+BHKqn7ryQNcR1AUOWWZITKxmYusq8oir3j55ZfZ2h6SptZOQWlN5Ph4nt2wk37Kar6w\n",
+       "TBPPhjlorWmUQdUtDU3XkUr8IKIsS27deIO7x6d87GM/2eV1RhRFhut11Ww38ErT1ApFjP1ssqLa\n",
+       "mE6tq0Q/8PCSiCIvcN0EMChlF0xVNx0zp8JxOlGV726YTnEYoZVAiHX4So3SFl5cR6NZyCPEc1xc\n",
+       "V240E2t4xM6DLHtjPrfZqR1PcMPEWhck6/AHx/OJ4+SBnwlAnCZ4woYnCyFplUI6DkIbVBcIAQIj\n",
+       "bGUf+V0GgWOQEowGre8XrQsczwVj503ScZGhi+e53e8Ct6NKNqpGqZYsXzCfTzEGBoNBR41uyFZL\n",
+       "JpMJ6+zY5XLJbDaziUm+T9tUtE2LVi15rjY+T47rAeYdSQ3wQ7CAb3Db+z609R/HcSi19VbYvbTP\n",
+       "P/vNf85HPvIR3vPEk5iqYXp2hpaQHdZ4aUwfn+FwuMGS1+3gsEvcsNWHolY1g8GA8XiElJKqshP3\n",
+       "07NTHMfh2rVrjMfWMnK1yrh794gbN25ijCFNE8bjUZf+oTk/P8NxJVeuWPObrz/7TXAEv/Zrf9cq\n",
+       "GoscPwio64bQ9xFC4rl+Jx66123cX6W8HYTSGo0WNmauVi1N59rmeR7LfD1AkRuHxu89qqq65yvT\n",
+       "QVRSOmijaYqCwI85PT3lr/7qq7zy8suMRkOWqxlx6NOqFtdzSBI7VPQch73dXVqlcH2PNIgR0qFp\n",
+       "c67uPcThnTsYJdCq4cknn0RKydnJCS/ffZ3xcMje7h4vvfQScWyrlEtXdvEDm/yyXM4B20lFcUCv\n",
+       "HyOkRHXVXLYqGY3GaG2sEKtYt82OFe/kOa+88krnzWHdI8uy5IMf/OAGprNKTEu5XHvUaCXx4wjn\n",
+       "bZzvHAFbYysvr1pF07bMZ0ta1VKWOdPplP39fSaTcbcQWWGJaAy+b8Uc+XxKVeZI0We1yizVTXpI\n",
+       "6SMCj9SPiJR9P1prAt/j+eee48knnqAsM3w/5PDwVue2VyG0IYpCHN9nPp12sWY2XSZMYmvj2tQb\n",
+       "TnKer+xgrts4vA4bLsuKnclW18VBGITkxZxldkEcjzk/P+fCXGCMxPeirlO23cjag2VtPqe1Zcys\n",
+       "7+91hqstLhKkdPA8gef59HouXmCv9/Xwcs1Kk67XDXNX5PnbL2RVVeD4AcZoWqVxO2Zb3bS4Qna2\n",
+       "0XJDLaZzT9RabRgxVtAjWYddCByEMOi2QQor9KkrK0SylrF2jVGmQQhDlq8Iw6C7rqwEfjwcEQRh\n",
+       "F/AQMZvNqKsKBzvotDYGDlIIwjjeXIdt29poNekQBg/wVL7v+IEv4GESQqeOFAh03RJ4PqptKJqa\n",
+       "OooJ/YC//PO/oFxkTB4eMruYM+oPuXL9XRgpqVVLXpW0RYEWDkbCfFWQ5xdUVYPvn28SdzzHVgyn\n",
+       "5zOLt/Z6tBqSdEAUW3N94bgc3j3CdT2GgwH94Ygsz2hqi6U3pubi4rzzNnao2ppnn3+uSxj/ed73\n",
+       "3vcCoOoG33XQSnWLtyAMLNskjC0VqerEC9pY7qsylkL2wHPlB5shpvStcm3THgpnczGqt6H/r61D\n",
+       "bfCvgzYKsN4y9uJv+MrXvsY3v/0tRtvWQ9oLfTAKzxh0o1CrnN2dbfb29rsE7ojRZIs/+uMv4ng+\n",
+       "QZTw3AsvYoyhF/fZ3ppw49YhTVeFP/auR/EDnzfffIMbN25QV1XHX7eVy5Url7l69WrHDrCbe1mW\n",
+       "tI2FEqQj6Q96KGXjvdZp7I5roRLHcUhTm224XC75xle+wrVrV9npjMh82YUDO5DnBaHnYtqKrFgh\n",
+       "HWsolWUZ/QfcN3VbIxtBoxpkF3Hm9/sYA0UeUK4KpIFsZoVAUgqCMCT0fMqspJ+kFI6L50gLpegG\n",
+       "oe0CJByXum6JwxCBocotJbZWDYN+wmhon0dIh/3dPRsMXWbdIiyoG4Xn+9YB0bMeKHt7O6AUpaoI\n",
+       "wgA/iikdF600OtCbzctxBVHfZ7GYYrSx/OZiRRyH3Lx1h4cefhfDARssWCnLFMrzAq0Urck3s6I1\n",
+       "/Oi6VsbuuS5xkmIwnUajpm4aPMfHYLttVSvKDn7yXB/P86lbxahvdQJJEiLE29vJ9pMxqipwpUPg\n",
+       "2yq+KUv8wF7vyigMGmE0AgGO5awLV6NpQWmUbhBCgtb257WwcY3SMmmksCrMuq5BCLRSG88V3/cI\n",
+       "XEGLXYjDMMJ3faq6pWlywjBCtQ3z2Tl1VXFweZ8ktqpMYwyu56INtF0cpOu6G9dVpd45E/MHvoCX\n",
+       "Zdlle9mKO/QT6rICJEkc40Q+h7dv8zu//dt8/G/8TR5/92OgDId371JXLcJxSHspcZoShNZvo6ob\n",
+       "/CBgNJ5sZK2LxYI33rxBnq/Y3t5mb3+Htax+sVjS1C2ua7FfEB2uqjg6PiZObfvWG/SIoogXX3qB\n",
+       "OI4oihwj4Etf/hKu6/Jf/IN/QOj5qK6rcDsZ+bqiW1d9VVXhKsu2sM9373twzxr0e4+6M/laS8yN\n",
+       "tgO8tRdG1eULvl0FueaZu65VLLZ1TeD71mrUkTz/nRf56jNfZTzZompqlNG4QqIahSMEi8WS69eu\n",
+       "s7+/z9Wr13j3ux/l5q3bDEZjfvnv/DLfeO45jk9PCcOI+XyON/I5v5hSlzmB6xH4Pm+8+SZlaX0z\n",
+       "rl27xkMPXe9ofj0bcVYUHB0d8ZWvfIXRaMjjjz1K27ZcurRPWRaURUmtbc6i6/j4nofWsFotbUch\n",
+       "BFlecHxyzHK55Kn3PEEcx2R5TuD7rDKbEVk3NY4riaJ7NghGtGgNURTxIBAlisJNhmrbNhtKqVaK\n",
+       "O4d3uHr1gK3R2MJ3XcXpSIemaRFSUlQlZ2dnCGFv2n4vpW01seBe+rl0aZsGpSw8lhU5/w9zbx6k\n",
+       "yX3e931+fXe/95w7szN74sbuAuAFEOAFUjzEkKJIKiIpKpQtq+I4FeeQK3EsV6pUFqVSlSJKtiU7\n",
+       "lVCmJMuUYikyKVmEDlIEDwggQRD3sYu9d2d2zvc++v7lj1//et4FZ5dOpVxUo1BY7O47M2+/3U8/\n",
+       "z/f5HqurK6BuEfrdnqKKGia2Y2Ea6r5Jk1RZKhsKdohjZbI0M9vCDGE0GjAejgv1oyoKdmG3qvBf\n",
+       "k8D3lKQfSPIM13E4e/Ys97/xDQyHQyqVagEzSIKgUuDYHnEhk4+iiDRNGAz6qOCIvNwrqc48w/Mc\n",
+       "MFSUout5WKZAmBZz9TpIwSSMyXOJ45m02zulCCbLMu6/QQ0ZjoYEtksSK/sKIdT9IFB0Ptu0EKY2\n",
+       "nANhSkQRESHzDMOQOKbKq0yTBIlUk24U43oOYTgmz9VORttr6ElDRQU2qQQ+0lbkCf2AsiwbQ0AY\n",
+       "TmjvbGOZBourB6lWfMJiKoE9Qz/DMIv3en3E5M2OH3gBrwUVZW1pmERRwdMsMuykVFDx2TNnuefU\n",
+       "vVQrNVzPYzIYcuTIEZIkZTgaEUYRURSWjIQ4UoKGzc3N0vdC0f9mmUx8JDlnzpwpvTFarRae5xNH\n",
+       "SUkzHI/H5QYdobrj7e3tYjRWnYNipLzA+973Hk6dOlXikdMOi6PRqIimsqZufnUDaQsBDfloxsyN\n",
+       "jml6pU5hsSy7XIR6BYtA3MD4ZzKZlN/TtCw8IyDLcySQRgmPf+sJms1muYQJPJ/xsAd5TpIl3HnH\n",
+       "nczOznHvvffieT7D4YiFuXkuXr7C8Vtv58Qdd3LlyhXi8ZiV5SXCMCTPclZWVmhUq0zGIwa9iIMH\n",
+       "l0tl69raOhgGFd9ncXGROFbCqPvuu1sA0v4AACAASURBVI9GvY4QCu44e/Ycs7MzVKtVfKGc77a3\n",
+       "27z86qtFUVSTFQXTYjIZc+LESSzPpj8eKHrpOKJaqRImikZnGAZSqA4tSzNymWAYis4Y7/MxqAKh\n",
+       "aaMm9XpAo15hbW0N11XxaP1er6SX6c9YFDBZliUcOXKESThmNB6RjSUStWuxLAfX9QmL7yEMgQVk\n",
+       "eZEdaZsYhkmtXsGxPSVBlwmQk2WpsnAVJqZlk6Tq9w1hcf7sWZqtJoFfIQiqWKbJcDRS2KrMSLMI\n",
+       "QxhE8RhDGIjCJMu0TJAJy0sLXLx0gePHj6lu33KQhV95luUk6YgsT4rrz8Y03bIQ7XmE5/oCZjJS\n",
+       "C+Q4jumPO0jU9Ol7AY6roBnTsHEsswxYgT1tyA3uDEajIhpuEuG6Doal6oB2ORRCfRamZZGkKqmH\n",
+       "HLI0Ic5TyHMFxRdMljRNEBJGY5XHqXNlfd8vbTSmJw4hDJJCSW0VRl6OayOznG53RBzHLCzMldFx\n",
+       "Upp7rJiiYdPQ0TSU/Le+gHc7HbWMsz1l/DOe4AY+mczJkYTjnL957HHe/773cfDAEsPBgCSMCEOF\n",
+       "EzaadeYdl1zmrK1dUdxhv8rhw4sl1Wxzc5Pt7U1FMQxcDh48yJEjh+l2u1y7do1XX30VIdRS6vjx\n",
+       "45iWwMhVNt/W1harqwcRhig2/Wqb/5d//hesrBzk0//sF9htbxMXS5xmva4c3QrRAlAKjPSH7nne\n",
+       "dTQ+/f95ntNsNm+4uNCiHT16aQGK7go0z1kvhl97aMxT25NKKZmEIa7r8Su/+qvMLcwzCUNMU00P\n",
+       "/X6XZq1Gd3eH40eOEHgB9XqTxcUlJQyJUzAN7jl5iu9897vccdddfOyjH+W3fvtzBK5DHIf0+32u\n",
+       "XUu5EIbYhsl999xLkiR0en3OXbjIzMwMFc/HECbPP/c8w9GQU6dO0WrN8tILz/PKK68wNz/Dgw8+\n",
+       "WLAWIgbDXiG7j7nvvnuYTCY06w2SOGEwHJAU+OtgMMDxXHY77fLB5eUp43BceI24CCkwMcnJqfg+\n",
+       "aZopIdU+d0atVi2tU8NwwqCvPK5d12E06jM/22J3t1PSOzWsECcJlq0SzHe7HVxXWTyMx2oJPzu3\n",
+       "QBwr5WYUhorlkGujNI80ikjCqLxeyFM8R6kToyQsBF1GSc/LpcD3a+RZwvLyEi+++CLf/e7T9PtD\n",
+       "br/9dg4dOqSotKZZmIU1S4sHUYhfsixhPBhy4q47+LM/+xLHjhxWPiN1B9O2yXOBMFHGTqZeaqZM\n",
+       "wknZoADlfksv4OuVqlpW5irwQfsFjSdjBoOeMoFLMrI8w3KcsrG5WSFrNQIMoTDk5kyLOI4YDYb4\n",
+       "votZyNJFscBMkxTHNYsHdqqWp4YgyXKiSUiaxgXdNmM8GmNY6p4KQ7V30ROzZruAgWkqrN6SOcOB\n",
+       "ahbq9TqmIdTEWOwkHMdmOBwWy1CvpFLC9UV8OsThRveyPn7gBbxZbyjAHmVviZGSZBk4FoZp8m9+\n",
+       "4zc5fuQYhjCJJhG+51Lx/D13v/GI3fYOlmVx4MAiWaYWMv1+l6QQgrRm6qysLJUYVhhO2N3ZJUlT\n",
+       "Dh5c4dChQ4wKFVWneKAYhoFlG8wvzNLr90p60enTp7Ftmw9/+Ec5ceKEMqnyK4SR4iinSVIWcJ3+\n",
+       "HkVRiV2D4jpreqNepCm5snHd33vtoW8C3/fLbj0rvD70AshxHIRhsLH7va/3vL0c0dF4zHA8wvN9\n",
+       "vv3E41i2jUR16Y6jYsB812P96lXuuPVWjh45wv1vup9hlPDiCy9z5MgRXFulEw0HQ44fO8qw16NS\n",
+       "r/LRD/0Ir5w+zU5nh8B3ESbESUxQq9PpdXFsB9OyWFlZQQhFYdtttwmCCkePHiOOE7785S/jex6v\n",
+       "f8MbmJud5dr6Bt1uF893qTdmsW2T2dlFOu02ruOyu7uDZZpYpsCr+QR+gGsbnD7zMisrK2r6STMm\n",
+       "4zFB4GEXRUFKyNMEpGQ0Soubav/zPx6NQIjCs8ZUZkiOTRInzM40OX36lSLYWS1US6VsQQuk0Bnk\n",
+       "eYZhGWRS0tneoV5XeP5wNC5yX0UhgFG+2jmUMYNBEBDHym/EsIxCbr3nRy9ziWWapEmMzFLW166y\n",
+       "duUS73z47Rw+fLQQNaWFiMkoqJtpAREoGp3qRHMaCy3StMblS+cxTKXYjKKQLAc1/Qtsy8Yw05Jp\n",
+       "Uq3UyoKpKYE6ljDPc/JQqWAVvTAljlX0m2kIZpr1opApvnguTIzCLvdmdOc43jN3G40UldR2TBWY\n",
+       "EMYqjUcor21TGIThnsw2S1PCcEKWZ9iFSEsIyMip1SvkZDRbDWXoZuiOWcEdKs1InXMJas/gOEgJ\n",
+       "SRzTLQLF69UKrVaDKIqKWMa05PXD3nSh3+M0oeH/d6jxf+7DtixknKuLHInr+iQCeuGIbz31JNub\n",
+       "23zkQx9WF+54QqfTYdQfMDMzQ6vVotGo47g2mcwZDPoF9cin2WyWGGWn06bfV3ztxcV5KhWfNJGE\n",
+       "3R4b1zYJKl5J/Wk0GoxGI7a2tgrhSIDjqLzDL33pS3zkIx/hoYceVIu1VPFh0zQpg0u9Ij5KU9QU\n",
+       "XzYv1Z26g9OdiX4Q6dF8NBqVAbmvPcIwLB8u+nW6eOt/9ffd7xiPVeeZSQWbBJUKcZJw5tVX8SsB\n",
+       "/cGgOG8TLNNkZ3OTe0+eREi45+S9tHfb5JbDbXfcySsvvcTr7r2XjY0NgiAAabG2vUazWWN+ZobD\n",
+       "P/Qunjv7Ck8/9V3OnbugaGjVKv3RiKqvzCk8LyhglkwxDmyLtY1rtNttFhYWOXRolY2NDZ749rfp\n",
+       "tNu8+c0PsLKywtr6Os1mk06nQ61axXEt0kSp6wwD2jttusVkUg18uu1doiji4PIyeRpTaTVI4hgy\n",
+       "hZmnmYIAXC8oWEkR+/U9UahUdZVqlSRKmZlpkeeQWAZLSwcQQjEZ4jgkigplLIIkV0Tzai2gUq0q\n",
+       "IdVEpdY0Gk0GwyGN1gx5BnEaYZnFQ9o2SJK4pOVdW19X+Yrzi1QqVeI0JpeKL24YBRUXtXPJsoTh\n",
+       "aMD58+d405vexPLyshIGReOCZZFjGsX1aKqgatPU/iRKDJYlMa7nUq14dHZ2aM7OYVsuwrDJMslg\n",
+       "MGQ0GhJHe4ZgqvFRylA9cdqO8shX2Z42CCX4yjALNotHnilWTzgJGY3Gxc/lFUIbB9u+calyHYv+\n",
+       "YIJpGYhc4rpKKZrECUbBbkPmZMIgF5AJ5QWvCqWgVglKVopWQiIVVJlJ5TujmCoaFtN0QlFg6Op9\n",
+       "TgqbYG08lmXKgqNWrZQTuYJRrRKS0fe+Ltp6IlGLdvP7QEd/Cwr4ZDxBSIMojpjECXbFJyYnThKe\n",
+       "eOJbPPz2tzPo90kjdWEfXlktseAoCul2R0WX41CrNUpxwfb2pqKh+T4LC3Ml1LC1tYUKgnWoVqtK\n",
+       "Rp0ney5qcYznuTiOhevWSZKE82fPkmUp/9s//TkWFhYYDofUC3l2XES/pWmGX/gYl91QQVfTF7Ye\n",
+       "jcIwLAuvHuX1QkQv1PY7NDVSCy9c1wVE2TVrCOXGLBZfpbwIJdc2LJPvPP0svf6ASiXAEIrnnEQR\n",
+       "eZpy+NAqnudxz4mTXLt2jcOHDtOP9sbJK1euqGiw4oFx9OhhTp8+zcrKCt1OxNzMHHfeeTcPP/wu\n",
+       "nnrqaZAwGYc0Wi3lmpdmgKECbwWlEVOt1sCwbZ55/gV2d3Y4sLjAfa8/xm6nyytnXsX3PY4IOHBg\n",
+       "idFwSK/fV0tTy8QyDQ4cOECn02VhfoZhGLK7u4ttmvQ6Xebn5pgMR1jF2G/bFo5pYRQLbQ177Ode\n",
+       "evXqZXrdHsPhgDRVOOntt9/O7Owc3W6P1ZVDZKnyTp+YY0zTRpgGwjQVDTJLiCLFmR+NxoU9Q8Jo\n",
+       "NOLK1TX6/SF5mtFsNgpfEI/xaITj2IzHIyWyMgz64wm1gmdeqVaKrrmwZ03SIpBYvZ9Dhw5x9OhR\n",
+       "0jRWnap+f6jOVTcStm1eZwuBzMtp7eSpk1y8eIFbXY8sGyjOtKF40rbt4BaFaboZ0ZNlmqaMhiNV\n",
+       "2AqmlerKE7IsRSCLh5AqjGbpIupiC3tPtZneeDfU3tkiziW1WhUpM0bjCZZ2Z0RgmIrPnWXKM92y\n",
+       "HSzbKeEiDRnlBX/cKKBKVbQ1rHQ9xVlxzgVmEeI8Ho9J8vS6ohsELr5rkyQxiu4ulJNhcb701Dzd\n",
+       "lIWF5fL079/s+IEXcGmglglS4lcrRAWv+OWnn2F+dp43vv51yFxFP127do31q2u4jsPc3By1WpVW\n",
+       "a7bkmqZ5VJL/pVQk+MFgwGik7CFnZmaZnZ0nyzK6nX6BR9mYlrrY1VhJadr/yCOPsLCwwA+/772s\n",
+       "rqwwGAxKrFnn+OkLVX+wtm1e9/TUF7O+GbR8WP+5xkqnR0QNvbz20B+6/uC1eEUXdP11b/ShJ0lC\n",
+       "GmUKwxQSMoOvPvpVjh07TrvdLqxsR8g0xXMdGvU6D9x/PwcWDxCOxjz73HPcfd/rMAyDRqPOpQsX\n",
+       "mJubKx86ijGxymg85vjx42z2+/T7p3n44btp7/b4zGc+w8LCIlGUsLS0TBInSvBhWGBIDKDm+VRr\n",
+       "FQZd5cMSVCt0+n02trfIsoxjx47RatYZTSZ86c8fYXnpAEHglzCJBC6tr5GlGdud3UI4JZmdmVF5\n",
+       "kYOBUkUKg1SmhGMlEjIdB3dqibTfce7sq9x5553cdutx5udnSQpPFr+QsucyYzgY4rqOilAzjWIp\n",
+       "lhSqPYHrOEQTpQGI45hOp8O19U0WFhaZnZlh0B8W6fQKn7YdlWOJYTKOlCy+c+FKCVPMz87guh4L\n",
+       "Cwuqy7YsPLfCYNDF95Vvjy4Gjm2WTAqEwLZUgHAS712bonh4qVxVQRjG3Hr8Ni5eusTdlkksMypB\n",
+       "QLfTI6jWQGYIaSvJ/5SffZopiq1rO3i2StzJ8wyrYpPEkaL1CTCQICR5noEUJaSWJCkmIHPNe7mx\n",
+       "pNx1LPIkwjQyTKsIJTfNItUd7GJHkOcGWWYipbLCBSXgkQiEaWIaexF2+r42LIHMwbKcQmRjljut\n",
+       "LMuxLBUmAmqaywtoqFapFIU4xTCsPVaSaZKmOWkaludcq8+nXVh1k/q3voCnBSfZNC3COFb0nWHE\n",
+       "Y1//Bu9817vp9zqlJeny4kKBISrJ6sWLlxR+7PkEgY9hu8pz1zBIkqw0l1dhxDHj8YSrV9eROaXf\n",
+       "r6I/hSBUYd3Z2WZzc5M8z/jYx36cu+66C/KMJI6Zn5slDFX0Vb1WY1ywOgwhsGzVLSRxVJ54/dTW\n",
+       "Pt9aTDFt2qU79Gkxz42EPNNba/2U18otDZ3EcYxh7v+hu66LKVVRHEchj/zFX9BstmjvtsnSBJkb\n",
+       "irNc8NaPHD7M4oEDDPoDXNvh6PHjbG1tsLCwAEhWDx/i1VdfZXV1lUyqBWmOxaTT5ZVXz7J4cJVB\n",
+       "f8inf+EXefb552m2ZonihKeffY5qtY7vVcilwDQEUZqUC9rJTpskjnD8gNbcHDs72+y02+RpyqVL\n",
+       "l7l0KaXd7tBsNomzlDuPHePChQv0B/1yuVuv15mdm6fVajAcDLh86bLqSFdWcH0fyzIw2QsOieKY\n",
+       "0XC4lzvZ+N7z92M/9tHSC340HKpOyrJwbItWo04mJZ43C2R4vl2EBSeFWEphyGmqzKLGgwGvnj5D\n",
+       "vdHkjjtuYzQcMxqOEAK2tzfJMrVMzYuRPo5jkrQwZnI8kkTR37Z227TbXfLseY4dPcqxo0cxTUGn\n",
+       "vcOZM69w9Mhh6o06nmtjGntiLu1ZbRTLuJIRFcd4nirQlUqFOI6Yn1/k8cefYHNzk6Wlg3iOw+xs\n",
+       "E6Sh4JocwEJP+3sNiUD7ecs0JU1jttY2SNIE17JwPQclmy/or8IgzZRPOUDgKBVpnueFZmH/I0sz\n",
+       "Kp6HWyzojQLKKlxkydKMNFOwiZr29ha+Wn9pCOXxmRQNl2EIzEKGn6YplYpbUDAL7rxpYpqioEmG\n",
+       "RaFVX6tSqRTNXVbK87Xzo2naxX2bXAeFqmSksKwButH7fscPvICbtkUmYRxOqDUaRFHCNx/9OiYm\n",
+       "KwcOULHMYvEYkWaqMxVCFMnuFSjGxiiKC2WeJI7TovtWIbNK8lzFNC2qlRpJkpEkUZmckRT5lL1e\n",
+       "j7m5Ge6//35WV1VHNx6PcUwDmasNs2VZyCxjUGDqQkKeq0DTPM/JUcbtmkKl3Mmi0mVQy92noZLp\n",
+       "gqxH2v0OPeLDHpwyvfBQ/tQ+whSwjx2qngT6/QFhGnP5yhU832fUH2LbDjLPCMMJzXoNWfhE7O7u\n",
+       "Uq1UkVIoQYZpcO7cOe6+8y4GgwELBxZJC0WbYSlTqUOHD3P5yhV+8zf+FRtbW4DB0oGDtDsd6o0m\n",
+       "87PzbGxucestt2IWHZcwDNI8V/i8st0jjEK2L19mOOgTxhFzs3P4QYDnwMxsi/5gwIWLF+kPR0wi\n",
+       "te1XylCXZmPIc6+cphrYLCwssHRgiTRJaQ967PY6LMzN4xUeHnEY4fse8/PzdDqdGy6Odnd3aNTr\n",
+       "kOdEkwkyzbAdm52dHcVrdlX2ZafbodlskUuJMCS2Vci7DWW/m2YZ58+eYXV1hdbMLN1OnyxL8D2n\n",
+       "DMltt9sIy+TK1avkuaTZmmFrexcJ+EGNRqPJ7vYGQmaly+Mrp89w6fJlAt/n7rvu4IM/8iHyLOHy\n",
+       "lTXuuO04ScFY0X4l0z462qLWcRy+8pWvEgQtXNdWDZEJURgzGYVsbqxjGIqiVwmq5LkEw8QoAo21\n",
+       "DkGLaPJMFU3HNfD8gKDmkeYJVglFJFhmgyiKkbnE96vkWcZwMKBW9yiee9xkh0klCJDkGLLoplGv\n",
+       "yQtXP2UjoHz0JRKZ5eRSdd/p1ASsz4GCkFTEogqr8AhDVXuq1aryRMq0x7leZuZkaU6r1cAwFVyW\n",
+       "56KsA+pehTyPsaaMqvT31If+XFTcnPG3n4WSSUkuwa9WmEQR7d0OL734Ig+//WGMHHZ3tnFcl1qt\n",
+       "VsidZWEYo+hanutTqVRptRyiTBnLTyaK4XH48BEcx+LatQ2uXr1KFMb4frXocNVT78yZM3S7XU6c\n",
+       "vIu3ve0tHDt2DLVtV6pL11P5kMMiZFk/NbUgB4yy6OoRKMvSfccjoKQRAtctfqY30Tcq4JpuOB0/\n",
+       "N/1rbahv2vtj4GmaqvFP5mrKkJJcSmVrS06UKDZG4Ae84Q33ceKuE+zs7HLu/DlWVw6T5Rm+73Lq\n",
+       "1AlefvEllpcPYts2586d49gtxwknE+YWFvn6Nx/j9z7/eeq1Waq1uloMSck7Hv6hUgW6u73Dk08+\n",
+       "yWxLQRtBs1787I6y1TQMAlMoloKUJHlKf9CnVq8xHg/IpeSBN7+Zo8eO43oev/brv45hWuy021i2\n",
+       "TXfQZzQeg0w4d/kK1UoF0zCpBgEzzRZhlDI/P0+tWiUFusMxa2sbxRS2w90z+1yracKrZ84ofUBQ\n",
+       "ISjk47bt0O52lNilVim6sknpUZMXLAXDMAkqHtvbina6enCZ8Thkbk5laD711FOAUlGeOHGCxkyL\n",
+       "jywvIw2T8TgkzSWW7bK2volp2cg0JHAthsMh19avcf78Oba2d3AdmyuXL/HMM/Pcc89JDiwucOny\n",
+       "ZQ6vLgOUbCc1ziuP9PF4zObmJufPn+e97/0AplElSaOCz2yyfGCJF198gcUDd1INKiVW63oeFBCe\n",
+       "SlEvEqOEqrymuefdIwSIHBzDAZEjc20+llELgoJpFmMKweLcPGE6Ku+Hm3Wjmo8NlFh7mioISBgm\n",
+       "hgApNVyZI1PAUPeMzBRdWd9X+h7SnbdfqZDnGbOzMyUcq+m4WsyjfgYT17aVcCuOyWWq7kORF/TT\n",
+       "nCTJmEwiJlmI61rle1NOktcH2+jm7Pu5Ef7AC3iepmBZjMcjojjlueeeZXZmhuWlRdIkIQiqGIZg\n",
+       "WBj/+75f5FVaxcZX0h/21BIqE9TqNexM2T3u7rYV/Q/wAx/XU0u/Tmebi+fOUatVufXWI9x3773q\n",
+       "dbZFHE6U6ZNlYhZBs1ESlswQzf/UbIRcKvaDUnWprjXL9mhP05j8NDtEXyzT5P3pTmC/Iy0Kvpo0\n",
+       "FPYm80yN1Za6GQf98Q0hlLzgwpqOzatnXqXieAzHI9WzGIIonDDTrFOrVFhdXmE4HKqQhkaD4WhC\n",
+       "LiXbWzuE45Cl5SV227usrK5y+913sLG1jTAsfvvf/QFnXj2LX2li+Q1a8weYnZnF9RziKMIPfMgl\n",
+       "hw8d4fbbbitG8JxB1KPT3mVjc4sojqg36ooK6DvsrF9mMOgiyGnMVvjAw+9hcXERWzN+8ow3v+F1\n",
+       "PPb4E8w1agxHQ0bdjjK8yjImvQH/4nc+w5VLlxiPRvz1V/6ax597HikEQbXKgaUDBNUKTcth+eAK\n",
+       "tNv7nr+dTpvF5UXllR1HrPe3SaIUUwjq1RoL8/NYhsFkNKZ3bYdRX3mRGxWfequJkJIkTtjd2uWN\n",
+       "r38j45GSyw+HHbIsp9Wos7G1xbsefgutmRnGkwiShDiZYAsTyzCwDMGxlQNgGAiRY5ATxw0Orx7k\n",
+       "oQfvByQbGxv0uh2uXVvnb554go1r1zh69DC33XKce+45xYEDi7TbO4rOaAosx2Lr0haGafDD7//h\n",
+       "AgocKrELKUkMnq9UrM1GU1EVbQc/qBR0RiWssiyTPDcoel5VsIUOJi7sYwvVtX6ogcC0LCUjJ8N0\n",
+       "FU4f53EZEYi8WSImhdy9CAQuIFQyAUJ13HletPEUakdLSesRql83JAjDVmHIgGEqy9wso1gQC7rd\n",
+       "HlKqGEJNC95b2iYqvCFNGY1HmKahJspUFePJJCoaLVFwwCmndA2VTE8A0w3cjeBUffzAC3i1WiXJ\n",
+       "JIiURnOGZ55+hve8+90gi4Ty0ZBWq0W9rkDJMAyLNBOmONHqv0Q5L7/8Eo5jq47dUZhYGE3IpYIH\n",
+       "HnvsMVzX5WMf/SgrKwfLp10cTZB5kUQ/ycoTKqUsjGwKz5J8KnQ5jr4n5++19CC9eNQ4uF446iXm\n",
+       "dPHWXcaN3AizdE/8o7+W66ivjVQ+4Z7rgoDuPl9CMSFSakGFF55/gVajgWvZCNuk3+tRb9QZ9Pus\n",
+       "vvGNhW/4GNOMlb+5oYJgF+YXGAwGnD1/llOnTvHq+bOsrB4iySX/9Of+CbffcTdetcHtt92JsALq\n",
+       "9Tr9QRff8vBMG0MYWK4FApJUkqHGeN/zsBYWOHTkCC+8+BL9wYDN7W1qgcfG1jU+/uM/xp2330oc\n",
+       "hxyozKkJaDzGcSzlCLmzgy0gTyLisWI9OLaNNC0uXLrM1to6Nddnvt7klRdeBMOg2qzTHw7pD0ds\n",
+       "97p868yrGIZJo9Xi7nu+9/zVZ1sIy+Ty2hWMUUI1qOAZJgsHDtDr9QnziO6gzze+8RhBrUpv0Fcw\n",
+       "XLdHNJlQrdZ46KG34Hke8wvzDAYDJS7zVH5bnsbMzTXY3dkgjsYsL6/SbbeZnVtUS/koIhpOcDwX\n",
+       "YZmMxyOyLC3Vxrq5mJ1t0Wo1OHrsKEHwbs6ePcvXHv0q3/ybJ/jLL6ss1k9+8uOYhkGaxDz55JN4\n",
+       "nsebH3gAJIxGYxVswN50mOeSt7/9bYwn4+K+U5YTGuc2DLPYP1jX2UJcz3EWSumpGR1TeyLD3ivR\n",
+       "urTLTC019QL4RockVyHGRQZAnudkxfJQlt+7wJWFAKPA6ItoK4EAqYK7BaJk9JiWXbxWLVmVgCss\n",
+       "GzHtvNhoNK/bb+l6YFt7976mGup7Xv+saZqWHH+dPjVNCf5bv8SMo5hxFONXa3z+85+n2WpSrdXL\n",
+       "k9CcnSGMIka7k3IR6BfeJFEUMZyoMNwoirAsl5lWo1jGKA74+rV1RqMBUuYcPnyYT33qv+LY0aPE\n",
+       "k0lJfdIuYNOKr9LO1lIm+VmaXQd1OI5Dmmcljui6bpmKo9kp07/WWNY05jhNL5wu4PV6fd9zNf3B\n",
+       "AqXz3PQyRMmwb9CBF9/v6tpagbUp8YPhWpiGQRxFpThpNBoq/44sxHFdqo2GEm3InErV5+TJk1y5\n",
+       "ukaawV995Wt88U/+IwuLq0jhcsstx7GcAEyLcRhhmi7jMMaxbRzfQ6CUb3leJM5YJkmcAxaDXsTi\n",
+       "wkFazZAoGtFt7/D6e17H607dx2jYo1lt0mnvYhcueO12m8FoyAc+8AF+7Z//c3a2dxDFZNMfDIjC\n",
+       "mNbMDJ/73Oc4dHAFCkhLCpiMJji2zcbaOq3ZWd7wwJt4+aXTjG6ghD1/6TLDXld56VTqVIKANM85\n",
+       "c/Y8OZJRHDKOI1730ANgGXhBwGA4xM8yNtfWWVxcxHUddna2Cp8PTbdTwppqLaA77BCGY1zXYzgc\n",
+       "4PseSTQhzWXBVXeVtUMa43sOAru8JuI4IksihMyxi040HA85uLTIJ3/iE3Q6HS5dvECv1+Wzn/2/\n",
+       "OHL4ELMzLZaXlrnl2DGiUHvMWGVToa93VVhUzNvMzAxhNGE0GlJvNMilvE4XMU23g+sLqO48r7sX\n",
+       "ZF5mVU4fprieFXKjIy9EPLCXKaDuY8F+MISOZ9Nfcg9eUR26MneTRRctS3hmOiRGCKGmq+Le22OY\n",
+       "pCUxQdcQzTbTtUJDMDpaTp9fzf+2C0KEfi83O25awIUQq8DvAop2AP+nlPJfCCF+HvgZYLv4qz8n\n",
+       "pXykeM0/AX4apQ3476WUf3mz75FkOVKoxdgzzzzLe979HixLpbCoN6N8SizLJgxD2u0uo5GCU3RU\n",
+       "mlreBQyLLX6ep1y4cIGrV69y6NAqP/nJTxJU1OhTrVaJoohGQymjdIHVJ0x9oHtP0SiKyIvO1ymk\n",
+       "vdq/xLBUJ5wkSSl/119PX0RRFJUfpP5wtMBBLzeB67637uL3+TzK4q0FA1oary8uKYvtzD5Hnudk\n",
+       "huDxJx4vOOQSz/dIyYnSFNe26Ha7vPtdyjg/zTJlgG/b7LQ7XLp8iZrvctttd4AwOHLsFv70zx7h\n",
+       "K49+A8OusrJ6K4btM7dwiLX1dWbmVYDw4sIio8EA168RJSl5mhUm+MXSJgHLbZAkEZOwT6PW4sL2\n",
+       "aRbmWpx7+SUOL93DN/76Ue45eZKFgwv4C0sMR0Oef+EFsjzj0a99DbPwnYmTRAmLhKA502J3u0uj\n",
+       "Xufq1XXe8+73Ykp41zvfjV8Jr9fh8wAAIABJREFU+NIjX+Kpp58mmoQMez2+8uKzPPjQWzl37ty+\n",
+       "5++ZZ55nfn6Wi1ee4fC9JxCTDvOzM6QVi6rnc3zmGGsXL7PUmmN77RpimBBECe2ox+ziHI2ZBmmc\n",
+       "ML8wx7XNNZaXDhJOtI5AKXAXFxf45mN/g2GYLC0t4TlKMCLyYoIiL6LJDBzLQhZulHmaYSDK60xh\n",
+       "26MyTq7fH9BsNvBvv500S7j91mM88fhjNGs1Dh86pPY5WU6z0WCn3cav7Plv7xXevb2NZVlFKIjq\n",
+       "NKWURfjGzYvOa/9Mc7FvdL2+tmm50X2h/zu9I5IoP6XpCVnd2wof3/tRNM5e5FkaipggZYZtK2Mw\n",
+       "zZ/X978QKlJtetLQ+LnmcU9rQabpvSVFsag5Kl83L6fqOI7Lf7/f8f068AT4n6SUzwghqsBTQoi/\n",
+       "QhXzz0gpP/OaE3kX8DHgLuAg8GUhxG3yRp8QEEYxrbk5Hn/iW5w6dQqJZP3aOjKX1CvVcrxIkoTx\n",
+       "ZEKWpti2U3acvX5fLTVHI6LxCN/3cFyXO267lU9+4mPU63Ullc0yAtdDpgm2IRgOhxiGwWQyKRcJ\n",
+       "0zQe3Y2bponpKdn0NHwhpSTJ0jJBY9qoSgsipj+06Se3znyc5oDrizXPc1WA9jle27nocVGPzmWW\n",
+       "oGHsy0JRqjiLS5cuYQqlOHQ9jziJiJOYwysHGfa6CKEyDPNcIoXCF+v1OqdOnUJkym3tytoVHnvi\n",
+       "23z9m9+iMXOAu07cSWtuAcep0OlNWFw8wjDuE1SbDMYRnl9jME5wLAvDskGYSEuSSrU7iIc5hmlj\n",
+       "mR5JkrI4v8DO1lWiyQiyhM52lz/8/f+bzm6bI7co6wPf96nUqhw8eJCl5WVeb5r8yX/8U8bjCdJQ\n",
+       "C6xqrUYYRrxy4RXW19d54+vfgGPZCAlvvv8Bzp+/QBiGzFTrmIcO8dyzz3Ly1Kl9z//mxja9Xp87\n",
+       "77iDYbtPHCecfu5F8jTjwNwCa1eu0KhWueXYMUxDqMg0KTh0/CimpcJv++MhSZySJapLq1ZqmIZZ\n",
+       "TnEb21vccssxrl6+ShAErCyvkGY5jUaLMAoLj2iHNMuIJiEClYwkhMBEkBZsKMMQBK6HWXzu1QOL\n",
+       "tDsdgiBgPB6q/YLrcftttyFyWSgYPYa9Ac1ag3GRZzo90vu+R6USlNBBFKkotnqjWdAS1QJf86T3\n",
+       "iqQs/83zvV2PlLLkg0/HKZTH1L1xM0m5mm6N8n6YpuLpXMnp+8a23alvsSdfVyJATelVXvK6+Oou\n",
+       "XAjlmaT3YHpK17DR9HT9Wjx7WsMxDaVWKpXyNfrhMA293uy4aQGXUm4AG8Wvh0KIl1GF+QZnnA8B\n",
+       "vy+lTICLQoizwJuAJ270PXwvwHEcLl68yImTJ8myXHmJGJJxoYzUTzHLskizjGsbG5w/f54sy5ib\n",
+       "m2NlZYW77rqLlaU5XLfwIjYtpMxpt3dxHBvDUIBXnssy808XZE3X0YVZd8i62yBXH7I+uRr3zuLs\n",
+       "uqKqC/L0RlwvIXThni6606NliavfhLyv2S/TTBj9YeuvE0VRoXbb5zB0lmGC5XrKmS2OVejsOMR2\n",
+       "HB588EHFSikuNGEosUcmlf94lCT41SpXr23yN088xcLSYVYP30qlPoNp14hSiVdpstPp4dYshGWT\n",
+       "xDEZFlKC7VZJ44QMQRYnWJYB0sSyHPIswvcD8mxAmoQImfKz/+N/RxZFtOpNTJTox2t4pYFXmmd0\n",
+       "u302tja5cOkS4/GEJM0Yjce0Wi0M00GKhGZzht/93d/joTe/hSyJee6557j33nv55Mc/wcuvvMKT\n",
+       "33kSE8FMo85zTz/NDz28z/lHsHH1Gh943/tJhyFffuIrvOn++/l3f/B53vQzf49Rb8DzL7/Alx/7\n",
+       "BsduOcaho4eJ4gjv3IvIOMEyDGZmZ0ljVRTOn7vAO97xDqoVn8kkZDQYQ6EflEh6/S4L8/NFRxcD\n",
+       "WeGzk2MI8FxFY51MJuWEmKUJ9XqdNMs0xEscK8/1arVKHIfYpoFwHWYKE6s8y/BdjyiMsE2LKAwx\n",
+       "nWKhN7Vgi6KIIFBiqEpFpfXoAmxZRsE+gb2CTXHf6UIurityiPwGFME9Y6fpYn+jY6/QqSmhZIAB\n",
+       "VuE1n+d7hVp7i2sVpv6e6mfVyT45lgV2EV6hu+9pLYaGi3Q49jTsMX2f6vqia4WmCepOfBpena47\n",
+       "32/ygP8PGLgQ4ghwH6oYPwT8QyHEp4DvAP9IStkFlrm+WF9lr+Dve9iOjSUM2ru7yCwnSxLSPKJa\n",
+       "rZHESYnLqfxJ5fk8MzPDj37og8zOzjIzM1MuA2QSFfiRop9Ztkmtqvw2oji9DsZAKOx7+uLX8tky\n",
+       "l64orHmqCqJbxFNpW9bpoqsxsGlJsf5QprEv/SHpIq+79enR80YXq16UTD/V9ag1rSA0biClNwyD\n",
+       "aKQmDsu2GYchtmUxHA+p1ap0223OnD6NY9kcWFzA89zS/0HmuRI5WDZJLvjjL/wpx287gV9t4VVb\n",
+       "pNiQSKSwMHJBfWaOJBuSpjmuVyHNcqrVRsFlVxeeX62ShCGSHClTpFSd6WTc4+L5M/zdn/oJPEvQ\n",
+       "iyPaO9sMekOyNMcMbIKggucHtFotZmfnWFxa5oEHH+LFF17Gr1bY2tqm1+vhBxW1lAt8ursdPv2L\n",
+       "v8jRw8fwPZu5uTl+//d/n09/+tNkacojX/4SeZYx22juf/4NWF1eZnP9Gl/71hN0Oh3y55/mh3/0\n",
+       "R/ijL/wHfupTn+La2hUmvQH/8Kd+hpMnToCUbO1ukUtJvVqj2WwxHqrQ2+eefppvPfEdjh05xvz8\n",
+       "AttbHdqDXa5trSOznMBXnvPai8MyLYSBSmA3TCWFl8rXW+YSZKY8xpMI07KIoxjLUuyV0XCobJql\n",
+       "il7r97rcftvtpEmCX1HmaoEfKEjHNMgLdWYGZXdoGMrsybWVmrRarULxgBZCmUUhFSuLohGaLqZl\n",
+       "R4/EEMq0Shb/qPtjD7/WuLYuhDcrZIqu6RT33J5QzppqiCzLIEnUMlFoszJhFowYfe8osy7L2lNl\n",
+       "akVsmkqq1WaJBihlZzqlsIYoSq9zIJ2+j6c7+Gl4RR8aQtHFXJvaacLGjY7/pAJewCd/BPwPRSf+\n",
+       "r4F/VvzxLwC/Cvy9G7z8pih8msT8q9/4TaLJhOeefYZqVSU1e57HwsIiy8tLhXxVFcZqtaYunEKK\n",
+       "HMeqIEiZk6cRGGALdaEjVAalYZqYFKnsBV4sDFUIdRp9mqqABe1LootkHMdU/KDsQHThni6Y0xeq\n",
+       "WqYWAQHFA0B3EjofUL9WPwD0k/i67mSfY1p1+dqxcBqCSW/QgesgCPVQSfe66yzDCyrkWc6nPvUp\n",
+       "dra2yZKYaBKSZDk77V3m5hcIKlVMy+ef/cIvYzlVao053GoTYboYpodhKc5zLiWOaZLmgkpQVUwC\n",
+       "lSGhzhWQF+HKFCq1PBnj+zbj4ZCdnQ1++qd/CtvIsUzB4oEFLGExGUc4lkNuGqpbdB1M0yJDEkUp\n",
+       "iIhma5Yky5ibW+Dy5avstrvkeU69WmM4HnP5ylXe/4Ef4dqVy/zKr/zvrK1d5Zd+6Zd461vfSnt7\n",
+       "m2NHj3Lp0uV9z1/gWEiRY5Jz8t5TfP0b3+DSlctUqyo6r7u7yz13nuDKqxf41U//MqdOnuQTH/tx\n",
+       "agtzJFlGnsHmxhaGMPAcl/vvf5C77zxFpVLh0qVLLN23TJxN+Lef/7ccWlmhVquxu7vD7OwMUmaY\n",
+       "pl02C7Zh4DuuylNMs9JfQ19/cah2L0nx9yuBj2kZpIUvSLVawXMdxqMhtmnjWLYKAbZsHNchm5Ku\n",
+       "T7MroljZAOzsbmOYotA1+IxGQ6S0mQ4H1uZQJQNFqEKq6b/X48eC17pA6vvoZvYG+vXX/51CCp/u\n",
+       "dbx796kSFxmFkVcUKTjTMCmShKxikhBlsdZxcXmuLD2UX0+t7Mw1zBHHe6wzvaPS97v+OYUQJQVR\n",
+       "/1zTi2I9Tev3cyNCQ3mObvqn6ovbwP8D/J6U8gsAUsqtqT//LPCnxf+uAatTL18pfm+f4+cB+K1/\n",
+       "8wqve90p/s7f+VTBqLDKbD0AWZDvTdMqO2OKdAzXtssRTRXCPSghyyWT4ZDRaIzve+XJNE2TDFFm\n",
+       "AeoPoVKpoJ3oNLND0xTHw9H30H+yLEOYRrlE1EZYjUajGFVjBoNBiasHQVB+uBoP15tnHYyrVaG6\n",
+       "03/tUalUitDZsHxiT2+xQU0JtuFA/3tfH8cx3WI5rDFXvR/wgwDLFCWuZxsqsd5yDA4dOkScpLz8\n",
+       "8is8+/IFRpOUI7fcSX1mgVw4mG6FXm9AzfUZDLrMzc1wbX2NW249THu3g+d6ZEmGYxUeynmGKSxk\n",
+       "FmGZAss2sB2Lrc012jvX+OhHPshsc5Z+b4c4zjBdi8wQ5MIAy1YFy1X+8ZNJhGGpFHIhTFqtWR79\n",
+       "2teLa9PCMASOY2HbLrV6nXMXzvN//Ovf5OTdd1NrNnjjwWVefPFFtYTzfSSSJN1/iew4Jju7u5w/\n",
+       "f4ZDB49y5+JBXnr5ZS688CIf/eH/gi/+hz/m7W9/O3fce5Izr57ha09/m8de+C7/6z/6n7n7jjtV\n",
+       "gR2NMW0b27QYj8Z4nspMXF1dVSyO6izra+ssH1jizJkzHF49xHg8plq4XQqhXBwlgizPCCpeuQjL\n",
+       "cjV6jwdDKpUKJuYUDAF5muHaDkmqrm/HdghNk14x2VqWKuRMxoqaV0yT+p6yHYvAqzIajZifn6fb\n",
+       "7bK4eIALF85z5MiRMu0py9KyEGroQnepujvWBUw1JYI8F+VDQjczosCvpxeT+x2vNYArd0TCQAij\n",
+       "LKa6kBrCvG7yNi1VQBWrSwWW64KtZfdaQa2ayCqTgsWmD91A6c55uhBPF3JdVzTpQTdx+nucOXOG\n",
+       "9Y1tTp85vy+D5nve+83+UKhHw28BL0kpf33q95eklNeK//0w8Hzx6z8BPi+E+AwKOrkV+Pb+X/3n\n",
+       "Afiv/+4XFZ6ZxASekiKnSYRpCkzDJBfgugoGqRQLSoVXaW8BdcHEcYztaLMngyColktFfbHoAqrH\n",
+       "ssmkX44sGovSHinTxVy/dppvmySJih0rZPGVSgXXdcsLwy3Uo3oUHI1GpQJTM000JKJHRd2Vj0b7\n",
+       "bCBBOe4Vqi394WoKpOd55UUT3iDVR00X6sEDSmqcJAkCNYFUfUWLskwTScHtFUpogTA4efIkX3ns\n",
+       "eSy/zuz8EkkmsD2XKEqoNVtEUcjsbJMoGnP8+GEGvb6yC85yHNvGtW3y4ubO84goirFciyxNeOXF\n",
+       "77Kzs0USh3ztq19FZjGua5MlaaHmM0rVaFRwkQUCw1KZi8p1zmJ7p83KyiHiOGFzawvXs+n1uszM\n",
+       "zWJZFidPnODs2bM8//JLtBpNZmdcPvThH0Xmkmvbl8nIaM3sD6Hc+/oTXL66xvr6NdbWr3Lw4BLz\n",
+       "C3NIAUEt4GMf+3Geff4FwkgtgFUwQ8Yv/+Knedtb3sInPvEJ6rW6eg/hGCEgTUM8zy6wWMX8eetb\n",
+       "3057d5vZWcV394vwjVqlopgVKDc80zLpDfrKqMpTNFbLtmgEfmkTIUyjgC0ESZxhWFqJqYzbGs0m\n",
+       "Fy9eYjQacfXqGpZl8653vhPk1CKwOPTXBG3b4LG2dpWZ2Tk6nU4ZGD19feqCrRsg/TWnp9c83yvQ\n",
+       "e/e18mrX99vNIJRp1th1X1/kpGleNkx6z5WnGcJQ13+chJDsFVLVPBmEYaTeT9Hs6YfKdGOnz4V+\n",
+       "f6a5V+inO3BdI3St0WyTaXqiPk+33norx48f560PPUBSeEB99nOfv+F7/34d+EPATwLPCSGeLn7v\n",
+       "54BPCCHuRcEjF4C/X5y4l4QQ/x54CUiB/1bebPZBeQm027s0zSaTghcrodgIq4u31xsRBAFJGjEa\n",
+       "D67z2I6iScm3Howne91xwczQxlZ5npNKPWYJaoFyagOuG2X0AkF/OL7vY1etcvmnx5xKpULFqJbd\n",
+       "T5qm1GoqM1M/OGDP28R13evcBPWEoYvw9IPiRuorjdFP05EGgwG2bdPv9xmNRipdpbI/i0U5L+pz\n",
+       "khCNJ4CAQmE2HA757Gc/y8EDSwSei2laNFoz+NWARrNFmgue+PZTfOjD/yXStBGmTZzmNOcWuHjx\n",
+       "AouLC0ShCkxYW79MI6hTqVQYjyfYrsd4PMK1LYSlFG6+77B5bY2nvvsknhmzvblFnqY82t5la2O9\n",
+       "nMQ8z0MYyq8bQ7C6oPJK4zgmTjN6/X6h5BM4vs9wMMJ29WSSgFQP4+FoyOkzr5KmCeNRTL/f5/kX\n",
+       "nuMP//iPqAUVvLrFO97xNnL292PvDjocOrZKUPcZ9VIub15lZm6WpaUlBmOVJPTMs88ShRF+YYpV\n",
+       "rVSpVAVPfutxnnryW/zj/+Ufc+rUPXQ6fWq1WvFZ5kTxqIAixhw7dowXX3ielYPLOEVzkGUZg4Ha\n",
+       "AQTVqoKTXAvPc4pJLsSyLMbjIaAyJ6XMUGIc6Hf7NOotcpnS6/eYmWnR63XZ3t7G9T0e/9YT2JbL\n",
+       "Bz/4QXIpsYzrHTOTRCVgaRhxPFYq3SzL2NnZ4fDhwwyHQ3zfLw2zpg9dBvTyeU8yvlf09LW9B18m\n",
+       "JcR4szKS53k5ieoHh1rAC9RiMy8fPlmWFQZeZkFumDa2ooxAbLVaqqgae77m+ufXjZZunnZ3d5md\n",
+       "nSVNczqdDoZhcPCgEgkOh0N6vV7JGdf37jRT5bVQqoZyphecNzq+Hwvlm7BvxPkjN3nNLwG/dNPv\n",
+       "OnV0Oh2Wlpbo9/vlm9DxTnmek2dqtEsTVWQd2yHLcvIsJ5QRju1gmTZZmmObJq7nlt2zik0q7F5R\n",
+       "vFBQT/XBQGEMlqkWGaLwDYa9gp5nGWEIgzgpFyVIiCKVeJ1L9RVNy0KglXF7DwP1hDWR8npC/95F\n",
+       "o9gErmPjOLZK8zFubGAThqHKKzTN0t3MDwLiRIU41xtN4MY88igK1fsUBkJYmI5XJMCESubu2PyD\n",
+       "/+bv0+/2MAWKgmWaDMcTcgz+4N//IUdvuZVRGFJr1LHsgNE4YTQYU6vUSaKYqh8QjcccXFgmzQwQ\n",
+       "BratzlOtGmAKicxiup0dNjeucvHSedJkwuVrGxhCYJuCnd02s3MLDIcDPD9QnazMyQ3BeDLh7Nkz\n",
+       "6iGfZRimraTclk2apaRxhB84CMNkEoaMRj18z+fC+XPILMe1LCaTCYHvYloWiwcWeeDBB4ouNUHk\n",
+       "orQwfe1x8fJVGv0hhmEyHEbcftvtfPtJFTrypgce4Nd/7dd593vfy/mLFzAMQavVpFKp4oqc2VYT\n",
+       "IQT/8jf+JQ+95S28593vVi6MUaT8tE2DNA4RwuTUyXv5ky/8KeNRSCWoMBiNyJKEarVWcpgty8C2\n",
+       "TOIkwjTMAobIqVZrajkfRlSCCpNwwqjgg4/DEUJI6vUa3W6XIPAxDZPnnn8eQwje+fA7MAyYhGOE\n",
+       "LFSLUuJYCkoxMknNrzCeqAfTcDQizTKSNOHCxQssLi4ynowJgkrJpJF6Wi6QANuy1VSm2SXsLS3l\n",
+       "lATfMEw8FASGkDe1k5WZIMw0Z1qW3b7M1G4oKe5f07KQOQjHJsuVd4tpmogcxqMJEmjW69RqdcV6\n",
+       "s61yNzYNZegHkOM4TCYTfN8vcPE6Bw8uE8cxvV4XUFOv8ne3SBLliBgVqm7V48lyCtHYuA6Dnma0\n",
+       "3ej4gSsx6/U6g8GgLN6aRqM70jTZo9VphZLmpgIQ7JnBxGlENNnrwjXP0ijMasyCRicchzSJVZiD\n",
+       "xuFMk9FwUC4x9fdD5ti2WSyRjBJGcV2X4XBYKNdU0odlWUR6Iz+NHdoWhmGVEI2mRbqOjU4GMQ0D\n",
+       "WXQjSbI/gd8qICLY6+zzvEgzymX5vV1nfwwdFM6LFAUz0iKKi6DjNEPaJv3BkCxLkEIwmoS4nk+O\n",
+       "QAqD9c1Nas0lvEoVKSzGwxgvqDGZjJlpzjDsdxC5xDMdHOHQmwypVitUqxUcUxBNBri2yVe//ijI\n",
+       "hO2dDbq9LoNBj6DqqwdunBAnEdc21osu3Wdrd4t6s85Ob5fxJCRIE7I8xw98xpNIvf9iWT07N0et\n",
+       "Wi+ukVx1nL02zXqdYa+nFINJwjCOSGWKH1bBNDiwtETddfDdgNMvn9n37C0uKk94w8ipNRq8fOYM\n",
+       "ru8jheDChQusHlplZ3MTSwiCAlKzLAuTvAwJqNXrPProozzz9NM8/PDDvP/978e2LAwEvV4Px/fY\n",
+       "3NzG83xqtQZhGBEnCRTLYctWobkyl0qFOuzj+z4HDiwhpWQ8nuB5Pq5bYWNzg1qtRhgluG5KELjX\n",
+       "TadRFOM6DgbwEx//eInxmobAME0G3S55nOLXbMY9lXaVpCmVWpUojHEcj1q9SbOZEMUxuzs7HDly\n",
+       "RCXqFE2KCvm1kEZhrRwn12HduZRlKEIu8yLcwUDkOSI2SITySMm5MQZuC5dITjANEwTl8lHfV6a3\n",
+       "RxwQQpDmaQmBCCG4ePEiMzNzHD10WO3FEGS5ZNQfYpqi3J3t3c97TLNpQsP6+hqO4xQdtFk4f/YK\n",
+       "CHePoiyENqLLi2l/jzChBFIuKmQiQ+zfS+zVhJv/8X/+Q+NEujvVTztdJH3Puo5TqWk7Gh7R3bYK\n",
+       "dEhKiGK62MP1uJsQgnq9zmg0KmGLac72jaSxegybhkf0e9DYul5OTAt4lEgovm6hoX/uNE3Kgq85\n",
+       "5noCee1hWcqPJE33IBq9WFWGVkWXfwMWim3buI5DOJ4oRkmRzhMnyn5XTQIOuSG4evUq8/PK9jVO\n",
+       "JatHjtJud1iZP1YU/BTTshkOuwUtbZdqxcMyFY85yyMagcAyItLJgM2dLXa2N7lw7jQyT+l2d5Un\n",
+       "BSkzsw0818EyBLWgwom7bucNr7tP5U0mKd1ej6BepdFs4ngeVpTgBT6+7zMJVUZjUAmI4pjP/fbv\n",
+       "FJ33CMtxcG2LLE3ptjt87Gd/lne89W2EYUi1XmU0HionPSl55C//gq99+RFa+QwHlpaAZ7/n/A36\n",
+       "I6Q0yHLJlc01dnZ28D2PD37wg3zpS1/iXe96F1/+q78qoQYF8anJBkGp/F1dXWVne5s///M/54tf\n",
+       "/CKe5/HRj3yEt731bURpSq1a5cDiPBsb6/iB2m1YhmYtCEajCXEYk6Y5zeZcAVukmIaFZboMhxMc\n",
+       "N8f3K0gpmJ2dV3S4OFRGXOMRArX47nQ6YBgMhyNsWzUgekp0PA/LN4jzHL+hOvu6WydOIqJwgovy\n",
+       "7rAtkzRWaTevnjnD3NycYh8hyNOMvJC0x3GsvHcAaeyFHWdZhpyCT0rMHLAEIPLCT33/I0knuIHy\n",
+       "9ldJ9MpQS4g9hor2I7EsC4S8Tkz3xjfer1TTk1BNxUVog1JtT8qHwXRHrDUduqGSUrKwsHAd02t6\n",
+       "ZzZdI/TSUhd/XSc0eUHDNBqyvdnxAy/g04uB6ZBfTdeLwqh4MgtyqcaL0mtYpuQZIASOa2MXb2ea\n",
+       "Pzqthpou0pMi1uy1tL3pbn/69zUdUP+d6YeBxsL0hzBN5N/DtOPSgrLctOdZQZ+S5fJTf+D7HePx\n",
+       "eOoCN8tttn5ih+EE07Qwjf1fnyQJ9VpNJZG3Zuh2B8XFJIjTlEqlwu/8zu/w0IMPsrgwz3gyYWZ2\n",
+       "FilMLNOk1aiTZwmmkRNHIzJp0aw1CaoWI2FgiphqECiutgHbG+t858nvYFuC8XBAt9shTSLCcILt\n",
+       "WpDlmI4NJKRhzGgy4Sc//g+467bb6XXaOI7FkUPLDEcjuv0eWTxmp9dmeWaRcKLiwcI4xrIcNjc2\n",
+       "abSa9Pt9hGHguC5xEuM6askd+D6VSoUrV67gODa9Xgc38AnjCL9S4Qtf+H/Ze/MYy7L7vu9zzt3v\n",
+       "22vtrq6empruWTicIYfLkEOKFClZtCRKliwldrzEtuI4MBwIipEoiGXYjhN4gWwEhmPYAWxEiWTD\n",
+       "suhNjg3boERJpmWRIinOTM/aPT29r7W+/e7n5I9zz32vht0jIwo8guEDNLqnpl69V/fe8zu/5bv8\n",
+       "PJ6smKUJp85sPfD63buzx8rKCp1Oh9U1lyA0GuLXbtzgueee4/bt21R1oFpZXcGtGZauUGRZQpKk\n",
+       "tQenx6mtTXRlGg1CSObJjEqXlKXi6HjYSBfbjNkkLR7T6QzfD9AUKCVxnRDfiymrkiw3fIlut2tY\n",
+       "m/VAfz5LiCKfJE2wTurT+YTjoyPmScJsPifLc+JWC1mVDfZ4OjV6OBubpp3leR6eA2lqMtjZbEae\n",
+       "ZwS+V8+FIu7f32/cncyecCirom57VuB5tXxriVaLXrDZWE4ddOtgLUyLRVUPh8YCuA5MJqPmOpl2\n",
+       "0oL1aYSi4iZw3rp328yKoog4bnHv3j2ja1P33MtyQSBst+MmHlgwgk0gl81Y7F60fBILO15GtFnI\n",
+       "4fKsbRm8YD+/BVEsQxAf+ru/6//9D7As7MYOB+2Hh8UpF3oGvWF/ccO0dBpXdlGzvywt/gRkqP5Z\n",
+       "y0LpJngq5lnGeDxeOm3Nw7+Me7UPov2M9mdZEawwDBsIYZYZAgUshjb2Qeh0eszn8xPTdOk4qKps\n",
+       "BrK2EngYjNC2l8xDKU9gyy0kUesMq8H8zjWfzej1V5ASI1YlNJ7v44g2eWaGYGmSc/aRR8jSpIFx\n",
+       "KWUYrU88/jgH0wJXF3TjAI0Hes7dmzc5e2aT6XTE3Vu3uXL5bZLZnHao+Mizj/G7ftf3kyQpf/pP\n",
+       "/wSqzOl2ArIiI2rFNWLGY22lz3/1I3+STisiS6a02xGz6YSrV6+ws7NDqx0bEX6tuXb5Kltb26Rp\n",
+       "SrfXww8D2u02r73xOq045uDoiFarRb/fJ5nN8FyPvbv3eWx3t85kzebJ0hTpSPbu3+fjzz/PK6+/\n",
+       "RKEUR6PRA6/fk0+8r+EKlBik0TPPPMPt27eJw5BvfvObnD59muFwyHQ6bfDCrcDQpXu93okqznM9\n",
+       "gsAnTTP+4T/6h7z22mv8qZ/4s7iex/d93/fyZ//cn+HJJ588kWS4rsd8luB5AaurHfwgJE1TZrMJ\n",
+       "vu/R6/WYzibNcLDIc1zP5Xg0ZJZMee3NN0zlWeT0el2mU6P2iRTcvnuHVhwTRhFpkuC4LtN0zjRN\n",
+       "mM6NDnaUzomCgDQ3KA0ouh8PAAAgAElEQVQhDHc0brU4PDjgzPYWw+Njbt+5zaA/YDAYUJbmEGp3\n",
+       "2+SFMW5QWqNQCG3EzBBOzcrUSzZqxqSl0hrxwFGcWUk6xo+M+YNSmrIqqcqKMIzqanox2NRa88wz\n",
+       "H6gNy1vMZjN2H91leHREFJifIYThoViQxGw2a/Z+p9NpOgE2yC5X4ssDSXvPFvfOPZGwWly5qaDN\n",
+       "a2xMsUiX32y95wHctivsL2naCgvp1SRJmosfBEFz6tlfGBYUcovJjqLoRPvEtl3sv5UysDatNf2+\n",
+       "gYzZ7DypjRts8DaT/fkJdqV9T3ua2t6a67rIpQx6wQJb6CbY19hhxfLnsjjwdyPyLLeEzM/2m+th\n",
+       "sexaK8YPmGP2Oh0EgmeffZaLFy9TKkXkRBSZ+VmT2RyqAq0hbrVJU6PoeDwc02q1+djzH+Gnf/bn\n",
+       "GB3tM5mkxFGH0XRCGHokwz4rgy7T8Zjf8e0f4aUXX+K//L0/RLvdpixKhtNj/ugf/gO8feUqSivC\n",
+       "dovZfI4GLl1+C51l/MzP/AxZOuODzz5DVZZIIXjllVd439NPGyeeKKbX7xO6Pnv7+zzxxJPs7e/z\n",
+       "yM4j9AYdLl26xOHhgelfzmfkZYFUpVG7jAPiODR9ZtfD9TwC1+HWnTt88Rd/wXhlttocPUQLHOB9\n",
+       "73s/x8dDRqMRB8NDdnd32djY4OWXX+b+vXs8/fTTXL582TiRdzpGKnQ2I/BNVfbBD36Qsiy5efMm\n",
+       "R0dHSEeS1QH2/PnH6a8MODo+IopjhJD8+I//OP/8n//zutrUjMcTfC+k0+lydHRMpedEYYznufiB\n",
+       "X7dHRjiO5Ohon/F4QpHnbG1t0e22WV1fYWdnpwlA49GIVy68ZGj9V65QliV7e3v0+308xyFqxczy\n",
+       "jPvHh7ieR7fbpdAV82ROO4w43D/AdR2SzAT4IAiYJXNwJFVZsHd4QJpn9ZAfgnlAoQoslR0MJtu0\n",
+       "PGoiT101GInXKUWhMNvh4QE8CFzS0lqUubiugFovCUQNsTXqlUEQMs/TRvsIrWup33ZDMDKOQxlV\n",
+       "WVJWBlBg9YmWOwWWVLWMkLFEqmXCjo0Ry7oytk213FJVSjXEnXe2aB623vMAbgcCNvO2v5xtRSzr\n",
+       "DdgWRTO1roOYDdBSmEFIVebgOIh6eOm5EkeCHwW1E3huHMyXsmUbHJex4nmeM629D4Hmvaxetz04\n",
+       "XNdt4IPLh4u9YVIIUJhN5posWgppfCTrNo5t2Sz3xd65qrJEK1VroLvNhF1Ko5lclkUz2X/QSpMU\n",
+       "4ZZ88NlnefW1NyhVXbVAM1F/bHeXt69e47HdXW7dusXZs2dZXdsgCALm8xl//Sf/AkiPIldMZ4mR\n",
+       "QnDh0ltv4Diao8MjhIb/8cf+OGWRUZXaWNKVOc889T5C3+cTn/o0SZbieD5//n/9X0BD1Gpz/+4d\n",
+       "4ihiMkt5/PHznNpc5/Pf//0MR1MuvfUW165d4/qNO1y7fAnHdRkMBty9v4cGNjdPcXQ8pN3pNAJG\n",
+       "s9mMKp3j+z4rKyu8/fbbzOeGPHPp0iW++tWvUlQlni176+duNHwACwr42Mc+ztbWFm+88QY/949/\n",
+       "lqOjI/7+3/t7rK2t8YM/8AN0u116vR4XLlyg3+3RimK67Q5Cmft2/twTPPXUU4A53C9dusSdO3e4\n",
+       "ceMGGs1HP/o8nXaHsjRGxhtr67TjFpPRmMHqCqETcXQ0REqHVrtNEEZ4vm9aUyqnnCfMkym3bt1k\n",
+       "NDIkm8cfP4frepw+cwbXD5q95LoubG/zgWffz+uvv254AO02Z8+exZEO82TO/sEB33zlZa5eucZ3\n",
+       "/I7v5Jd+6ZeYz+Z89MMf5khpeu0OTiURngOuw2sX36QoCq5fv87Nm7dRSvHUU0/x/PPP0+v1qIqM\n",
+       "CoXvuzXGOSDJMjzXpywVruuhlTZfT1JarpWwCCnyh7dQ7t6/R2d1zfTXpawrR4kfmPmB55pYkuUF\n",
+       "RakQrgnsQglQxrjZtkW1MrBLE/yNbkoQeE2MMP3tvIlVptVr4ZZFExcM4Sps4sAyC7ssjYb7cgC3\n",
+       "8zbbK2+1Wo3Mxrut9zyA21MKaILlMiXY/lKNgNQJDOniJDen4oKsY7/HYqdtUA7DsJaBXGTJJ1An\n",
+       "1HA9vdBHWRaOsv6DNlAXRdG8p+2x2b6XVWUzN0012bU9uYWAMApO3Nh3gw7FsTF0ttAm13Vx3Lrv\n",
+       "J5YU0yQcP8DQIQwDlBacP3+eqioJwxilFZ6UKAwBKElT3r5yFTRsbm6iNIxHx8yTlG6nzfgoYzSd\n",
+       "04q63LlzjygOmScTzp45xY2bV+m1A1SpUWVKUVgddcegN4TL1uktJqMRwnEIgpC1lRUOj45J5nMG\n",
+       "q+tUVcmt23e5cfMmUkp63S6u4+IHIXlhDsSzZ8/iB0a/Y3f3UcrKHIRR3GI4GtLr9yiKkrwo6HaM\n",
+       "LOrB/fv8jb/xv9eD8owoilhbq40h0pSVwYC1zipSSKYrM+Ar33L9/s7f+dskSUqWpEhfMxqP+MQL\n",
+       "L3DmzDbPPfdB0jTlmaffj/79f4D9vT1u3riJ53lMJyPKomB1da1udRWAZnPzFJunTvHxF14wLZ08\n",
+       "YzafEwYh1KiL3d3dRe8VSbfbQUoXISRZnpEVOUWRcXB4n2vXrzAdj/BrIaoPfOAZTm1uUhaKNMvY\n",
+       "Ozii1YoNWmU6o1tXCo+c3SHLU9I05e6duwb25nl0ul163R79njmMnnryKd6+fJmV/oDpeEzcalFV\n",
+       "JV7k8+hjjzFYXQUNP/CDP0QYhuzt7XHlyhVWVteYz+cEQoLnUFQleVk2hKQiT3BdrwlmZVlw9eoV\n",
+       "poc3qCpNlpcoBd/9kBjS7Q3wvAgQ+H6AlObZMr1obXRiAL8+wLTWuI4LDifbHbqOI0t6KmEYnOgQ\n",
+       "gJXXdU7IZCyrES6DLuww01bXdlk1VKDBmZdleWIWBvymkrLveQBfbt7b8sH2l6wOgc2+lzNkG/A8\n",
+       "zyOO42Zj2D63HSQ09Htogq4pS+wfTlxw+3nm83lDl10OrMsys5ZdZXtYtopYRrM0DCxdNENTO/QM\n",
+       "At+IEy3R4B3HaZAs71ymnWTYiMuzgizLKPK0uY6IB2fg89mUuNXl+OiIDz33HK9fvERVVLTapo8/\n",
+       "nc/Y299nZ2eH7Ud2WFvpmwetnuZnWcp0OCGK2lx643V2dh7FDVy2t9d59dULhIFx7MmzgjIvqbRA\n",
+       "1Pozo8mYJ596mjRNGR4P6fZ7ZEnCE+fP87WvfZ3Q99nePsOP/uiP1mYHFWe3txmNRoawNJ4xGKwQ\n",
+       "xzFXL7/B6tpaY6qBcPj6N77Bv/yX/5J225BwPN+jJWI6nmnR/c9/9s81ZXCn062x3FPG43FDZz84\n",
+       "PkBroz8xnPy9b7l+9+/fZ3h0xEc/+lFW1vr88H/2w+zt7Zl7nBdks4TMMW24TqvN+59+ut74BkY4\n",
+       "m83Y29tvdOyR5tDP6r64lJKyqMgpa3y0x87ODi+++CK9QR+lQAgzsPM8n1Jprl6/zpsXX2c0PGR9\n",
+       "fYVWK0bUDu6ddpv5PMGRLnEYI4VnCDmeR2djAyEE9+7d4969ezy2u8Pj584DcPvOHSZzQz559eUL\n",
+       "nNo6jS4rZKU4e3qL2WRKkRfs17DBoir4+X/6z3jf+97H2toab799hfl8zuHhIefPm585GAyI4hg/\n",
+       "DIjjmDAMTQKUm1mAFII8W6CpdFlx4fgWEhglI4R8l1AlAxzHMKUNgcnsVSlcEIpCFShlUFwmSGZN\n",
+       "5W8t06hx71prqpoZWpYlOs2bgOr7/gnJi+WWiIkj1QlCjk3U7CDaJnhaa3q9hcOYlccoioL19fUm\n",
+       "6YQFJ+Vh6z0P4MsnzDKV3Wbmk8moviiWakuDk7ZZd1WZqXYQBM0Fsz0mO7y0bRcb2C1Dyy57ipr3\n",
+       "kE3JYwenNsjbnpcteZYHjsvsSnvQ2N63I73aBJkm6Ge5gS0tZwG2//+gtcCoL8R7bNYSxS5SGPqx\n",
+       "0hU8iMujTbAXUnB66zSvvv5GI+5VFAVh3GJ/f59ut8vNmzcpi8z0Ln23ISklSUqru8oju4/R6nTY\n",
+       "37vP3Xt30Ag2T59hNJriuyFZqcm0aR3kaVq3NhSX336LZ595FgUkyZTPfPunKMuM3bO7FGVOlkzR\n",
+       "ZYEr4fULL5khpRfiCchmU6bHx6ysDLh9+zaDwQDHdYnbbV599VVzCLuG9OPXqAGtFU899WTDN1BK\n",
+       "sb9/0Gjh+L7PaGSesZ0zZ5nN5kjpMHzA5fvzf+7PgKWmq4rjg0M86RD6AVmaMhgMyLMM1zHGtmC0\n",
+       "uhECx/fo9QbNPRbCYK0NVtkzZCchkNq6oM+ZTqesrKyyu7vLlStXkFLS7fQYDkekeY7j+Lx04RWU\n",
+       "KuufpcmyHN8xSUZZaFxXMksyynKOrBOKPCsa0pjveTy6s8Px0ZC7d+8xnUwI4xDhebz++ut89+/8\n",
+       "nbzy8gUuvvIacRSRlhUrfaP/ffX6NZIk4fHz5/nIBz/EB597juHxkI0VY9239ZkzhhXbBEtBkiXM\n",
+       "ZnOS8dzMOeRCatURRsZBIDi6f4/ACSio1Qvlw9EYYatDv9vD88zerlQF2qmJM7oeTC7giVaIylTD\n",
+       "FVVVGvcfYRjgllwE4NTuREBTTS8Lzy13ArIsPeEL8E5zF8sePtHGgqbKt8iV6XTaxJuH2Sva9Z4H\n",
+       "8HfipbXWDTLEOF8HzYWwfV/7ywohGmlXm7na/rDFeVo8pVUdtAHZ84LmvZdvhg269hS0mE57Ii4P\n",
+       "FWym/M6Bqj0kbO9rURqagG/75VKKOoib94/juDkYHrSWtRNM9eE3rSVzyCwGOQ9aYRBQVIYVubOz\n",
+       "g+sarROrFyOEIC8Vh4eHfPnLX+aP/dd/lIP9Pa68fZNzj+3SbrWYpV2QLkprfuPFl+l2O4zHQx57\n",
+       "bJfRZM69+4c8tvs4+wfHuG1JO26RTXO2d7a5ev0KW9unEI42eiZCks5TPvvpb0PlFXEr4itf/jd8\n",
+       "5rOfYTIZG3u8UjEbH7O2sk5RlnjSmOSeO/cY16/f4PrNG7z2+usGxYCRVnWAsj60qyrn0d0dhsMj\n",
+       "tLZ6HMaVxWZEvu/jSEkyTXClw2j04B74eDyiQtFqtanynFYcN/hm3/cb04/A85vnxnVcNIJkntXP\n",
+       "iGEbxu1WHcgdXM+lqApjVKLqeYkXGoOF2Yhnn32Wnd1HOTg4YD6f8+hjuwxWVjk4HPGVr3+TVhSg\n",
+       "8oqqAoFDu9tnPp0ymyW0Yo8waOG3PZSuGA6H9R5xaiVIxyBP/IALFy6wsjLg9PYWmSo5c/o0WZJw\n",
+       "5tRp3nz9DXpnWqy0e3jS4Xh6zDe+8RuEQcDZzdPEccyX/vUv8qlPfcrA73oDDu7uEQQhSVIQRiGj\n",
+       "4YgwjvBwaUURaZqgVEW320JoqCqj2SKkZLXb5+DudYosrZEuD89Eu93VOlM1Tk+OY1uvugng1rbO\n",
+       "7KOFOqBJ8hyEXnA0WAr2eVE2Q0abhS+CvzqReC0PI63QnM2+l3HkjuMwHA5PJGr2dfY9bAJqlFcf\n",
+       "vt7zAO4HIVVZIaSgKHJUpXBccxO6XXOT89wILtkg6brGNdrzAlzXRwhJkqa4rjBtFKXQdpgo6qxM\n",
+       "aSqlcJRGC2kgZNatQxotYyEX+sHLmVK1pIho9BOMC7fjyBPoElljdW0PPApDyso8VHl9oORosrTG\n",
+       "kEuB0tYDz7iZpGlyYqq9vJLESHkuH0RWoMc+WBYH+6Dluh6OK41ht+uysbHO1WvXcYKAsqpASAaD\n",
+       "Ffb2D3n/U0/xD/7BF/jkxz/G4+fOkyYzZlrjxDFFVnJ4eFQP5DSbm2vEccj9vQNOb21TVpq1tQ0q\n",
+       "x9CTx6MRO4/s8NLNm2xvb3Pzxg3ObJ8xB5pjZh7Xb9zm7CNn2XnkUa5evcZgMCCO4lpbGm7cvEGr\n",
+       "1abX6zGbzpjNUtbXN8iKkkuXr7C/f2jcxIWRS/Ucged46LTk6aeeZjyZEIURUkiCujdqrolDmqQU\n",
+       "ypTQeZHT6bSZPiAF7/d7VLWejuP5pEmKdBzTWwUmkwmddqdGTJmD2/ONzEMUGwKLUkajYz6fN+1B\n",
+       "hcnutAJVlLjSRWnFeDxisNLn+HiIHwSsr22YmYXvY2RbzaDeVo+u6zY4dM/zWRmsIHBwpFNj7z3W\n",
+       "1lZxHJfRaEhRFniOy+W33mbrzBmee+5DBGHIaDKi02kzOh4SBCHPf+xjxFHE5cuXOb21hR/4DAYD\n",
+       "1lZXObW5SbfTJc8LHj9/notvvsn6+hpaKdpxizRNEUIym04YDHokaY3uEEasLgoChBRIqzyvoVIl\n",
+       "0+mYvFKkRYl0XKTzcDRGnhW0WyFpagK0rZQt+AAWJuOu6xqCkU3aqHXKCyMx63ou1plHCONCZBAy\n",
+       "qtZSWbRNFuxOC/FdmDDYXrtFx9mATH2vbbK2nDw+DF33bus9D+Cu46NVbvpPjo8fehSlYVS5jslE\n",
+       "jA5xTlnlhGFAXpa4jodKU/MrCEWea4LAZGeeY3WEDZjf9Xyk6+JKYaiywtDNtdKEYYCmbi04AofF\n",
+       "RS3rVozrOA30qShKYNEqEYB2JLJ2rjaEo6q2GTE477yqe9y+azKf+rVaCDw/aMoyKcWSMP2Dlunb\n",
+       "L2spm89ULAVvaSjFD1hZmqPQII2Yz/d+z3fx1/7aXzcloJAIx2Oe5Egn5LXX3+IjH/oga2sbhL6L\n",
+       "K8BzHW7v3SMMQzqdAOlU3Ll9l1ObmxR5STpP6Hf6qLLEky6qqLh18yZbm5v4jkMUhMShIYCUxWLC\n",
+       "LoWk1e0Txh1W1pwaG6uZaYOJbfdCvMhgdt++cYPTG+ugoSw1L770CmHcYzS9TavVNvdbmn59nqX8\n",
+       "8A9+njCMjMtQ3X80sw23ZpQWeJ7TVF5h/DAZAsjSxZxBIJHSRQpJnpnhlOcFKA2ivv6OdKgqY0tn\n",
+       "IaPSte7tC915RwiE4xrUQrBoi8Xt2GT3QVwHC4nvmedWaGhFYU3aqrNHx8H1zBCwzHOKMiXyAzxH\n",
+       "ICMPJQQ3b90weO4ootftozU88eRT9fPnMh4bopCDIApDw7Idj4m6bb7re7+bq1evkuU5WZryyPY2\n",
+       "+/v7HE9HfOhDH0JKSX99wHQ65frtG/hLgcu0DDw81yeKI1qyZRAi0WKwKCTkaUan2+XK9esoN6CS\n",
+       "PiUKqR8ewEPfJ00XUrZludDXfycfJM9zfM8wiW0AdhyHoizqfbDYW1VVMa1VRI0bUdxUq7Y3bVQk\n",
+       "RU0WCk+wKW3wtlm3xf/bYL2scGgTReuhC7wrIs2u9zyATyfj5oE2SjOaMPRxXVOOFLk5SaM4qFmM\n",
+       "BWVlyDxlVYIukNKts+8cz3PrrNxYI1VVSZnnlHntTuJ6+H5AlZdUWpHWkrBKKULPJS8KpGN6ydJx\n",
+       "qNAUlcaRAscx5Z1WBmsdxbGB9umKSpvSuCgVWjgIabJ/PzAkpKqeQCV5PaiRDkot0CRWS9lCJx+0\n",
+       "3knFFeKk1KV9YK2LzjvXYDBgNp+jhCYrC06tb/DE+fPsHY0YjSZ4Xk3j14K0KLh08RJUBZ//ns9R\n",
+       "ZDmzWdH061ZWVrh96zZrq4O6B6hZWenjehLpuziOoEwNNM1xHK5evcrGxgZFUfDoo48ainndSprP\n",
+       "58Z0Ok8IQo/9g30eOfsI9+/fJwhNwPJcl26ny8bGBpfevMTaxgbXbt7kcDjk9Tcv0u8PiGOjeJgk\n",
+       "GUJo0IoXXniBw8PDE60w+75As6Fs6WrbZg9admM7jgNa4jgLUtcyGsHeq+ZvYeQN3olUsFCyZmBW\n",
+       "l9jL6nx2oy/rRlu5Cc8P6+em3eD/qwpk4NBqtwxNvjcgJ+doNKLd7bK9vd387DwvcJ2F5V+eG9Nv\n",
+       "xxXkpcFGTyYT3nzzTT7xbd/GL37xiyRJwu7uLvP5nPe///188pOf5Bvf+AY3b94kiiLW19eRUnJ2\n",
+       "e5uiKGi32w2kzigqzpqqsSxL7ty5Y+JAjcrIsgyJYDqb4bdilNJ1Jv3wQFYUBUqfHBQ27RB7v1jw\n",
+       "KOz32UBcFEUjB51lGVmWNRBA6yQENOAD++wYdyndAAqUUoxGowbssPxM2GfPylLb+20/nx18wgIZ\n",
+       "95sJWcFvgwDebrdPDAnzPDtRgkQtowWeFwUIiFoxus58zdBB1w+8IqofkmQ+BUEdrM0AZT5PKEuF\n",
+       "tUEKghYISZaVSEciHY9ZktXCOibgSsDoCTtG0EkLpKrdq4VheGkEQnoIbTYQdQZcKUVZt37MUMVA\n",
+       "lzw3QKONO7syLZMgCClLhefZTfjwh3WZ9WVLLPs1+2A+DPw/Ho+N07yoWwezGT/0g7+bv/l3fqo2\n",
+       "oSgQGC1whGSaJhwOh/zyl/8tO2e3OX/+MSpKZtMp9+7cpcjzJhju7e2xuXmKqipxg4BKmQxnY2OD\n",
+       "OI6ZTCaUZdm4m1hyVBybLHM2mVHkGQLNqVPrHB7tc+r0Bnfv3iUMQwaDFUajEWmWsHF6i6/8+le5\n",
+       "f3DAcDwxwlFhQJImGC0LqIqC7/v893Dnzp3mPW2P0h6ayz3L2WzWbJp305+wrSvTlkgbjQ4rCBZF\n",
+       "kZEx1RgJiKpCi8Wsxd5Dy+RdJoKZDG3hj+g4Es/zm9cahxtNFJngErbazfcbZnJOp9OiKitaUcyb\n",
+       "b7zJCx9/ARyHU5unKJYU7yxEzpGLgOR5LlVVIB23OfTeeustzp07x8HeHt/5nd/J7du32dnZYX/f\n",
+       "UOZv377N1taWgZwqxXA4bFjNRV4wmUyYTCY8+eSTzfxngZ9eGJJsbW2RJElzsH/1a7+O1oqiyHE9\n",
+       "Q7F/2ArCgCxbNoJYzLIskMEOIF3XNXILSxBjoD5cJs3nWj6ILcxPa93o+ttrZp8tq8e0PJOy19lW\n",
+       "xvZZs1m2/X/L+G+LYPv3NTV+d5T4f4A1m6fkRYWQ0qio+RFhEOP7EQKHojAwMaNl4JFlOWmaUVYV\n",
+       "qh7IOVLie25TrnmehyONk3yapIyGI2PTFgREYcTKYKVha7bahnIdhiGtOKqzI9VscMdx8Go5TV1j\n",
+       "sMvCeDemqTElyPOibn8EtFpt4jgmjiOiOCaIQoLQ/NFakxfGh7MojaWZ6y40VEAY/HL+YDnZPM+b\n",
+       "ib0tt2ABV1zOyh+0NBVRGJKlKbpS6FIRByHPPvss+3v75n7UEqFxp42QLm++9Tb7x0OCuE1WKsIg\n",
+       "4PTp0wgh2NjYIK01ZSaTCXmeAYrZbEJZFgyHQxzH4eDggNXVVSM+lSSMx+NmCm99TitVNggOtOLW\n",
+       "rRukadIEhvFkjB/4tFox0yznYDTm1p27jRyxUY2UJPMpge/QikKe/+iHm+G1FUqzyyICliuaVqtF\n",
+       "FEWN8cY7lxCikVbIixzHdWi1YqIoJIpC4lZkkDRZSl5kKFXhuEZrxoqzWZSVzdDs+7Xb7SYLjKKQ\n",
+       "OI7qTWy18asGsTGdTmppYGqnJ8P01TVjMau5CZPZjHani9JQLmXySqkGtnY8PDSf03GagH10dMT7\n",
+       "3/9+Xn31VT74wQ/yxBNPENb65hYW+xu/8Rs8/fTTTTD8tV/7tROgAc81rYOtrS2ef/55Xn/9dS5c\n",
+       "uMC1a9eaZxYWGkOWhdhqtbhz5w5hFGElIUy2/PBQZcwpFqqB9t8W/WJ/bwsu6Pf7dDqdRm/GBnsL\n",
+       "fFBKMZ/PGY/NMNse+HEcn4AKh2HYCIIdHx83v4v9GVYHyUIm7ed7J8NyuSrLsuyEJ+9v+xbKS6+8\n",
+       "Rq/Xbcov13PR9SDSkRKnkhRlTlGYUzgM2gihKcq8GS6CKWN0LSXpYpEaFQhBGIUIxIkNK6SFWmWN\n",
+       "GL7nefSDjpH+XCqnVGWJAF49pa5x5ZUCXRmYkjb2b8lsSlGWOK4J/FVlgr0pnTzQmsA3KA40eJFx\n",
+       "bbEnfbcbPFTPe1m03pbbNjOwmQfw0FM7q7OvTsuUtdKTFFrzPb/zu3n5xZdRqqKsCjzPbTDwYavF\n",
+       "W29fY55kPPP0+/j4889y6dXX2NjYwBAnfO7evcv29hmMfZlpsezv77O2ttY8jJPJpJEl6Ha73L59\n",
+       "uzlEtda02y0O9g9qrQmXZ555ppFGbbXbTKczRqMxk+mUX/36BfIiZzKdNwzSUpVk4zmuFBzs7/Hf\n",
+       "/eiPkmVziqJqZF3DMGxYu7YUtoe+LWvtBnvYaiCAdTa1XPbaQLE8j5BSImp9DaB5T6DZoPYwq+qB\n",
+       "t2n/LQwOLAw2TdOm1QOmovrsZz/Lv/t3v1r3WCtj+BC30BgETlEapqnhHEQnKjfb151MxzWBzufU\n",
+       "6U3m8xkvvvgiL7zwAuvr6w0qzA4CL168yLd/+7cznU7Z3Nw0xhXtNkqpxuQBZVBV+/v7HB8f84EP\n",
+       "fICDgwMQBmJ4cGCy3cD3KevWRhAEDFZWODg6Zp6mBKGxTPzNWgnm3uUnyHA2S7b3ehmq27Sgam0h\n",
+       "mwlbDLlFhNlAbJ9R+3wsB1zbkrNuRBaNYiss+9/LAXs5y7dVgT38Gob1Es783dZ7HsC/+vUXUbqs\n",
+       "yRA50nE4c+YMH/3oRzizdQZX+nheSKsVMx6PGU9ShNRIKfCkh+s6BnivJMLVOK5vxOQrjeO7uEI2\n",
+       "WYFbZwVlUSAwveKitDR8E8zTpGxulOsanebF5sspC4NWMPAzg4ZRSuHWg6vA90hzky0r7ClaMpsk\n",
+       "zUDDBoiiNDoP9sGygxD5ENKCzQAtEcRmkeazLYKH1W9451pZXaEqS1PeKw0KQs8jFYIf/x/+e/7i\n",
+       "X/rLxGFMUZVmWIYmiCKyJOXNt96m0+tz7dolPv1tn6Ld6RtauuOSF0bm1DqoKKW4c+cO/f4qs1my\n",
+       "1OaxWUZBv79CVZn7WBQVcRgwHk94dHeXeZLgeyFlNSNJc6PDMRhwcDTil3/l30LU5eq1G/QGXQLf\n",
+       "YzI1+iZCQJok/P7f93tZXR2Q1LK5dkMopRpYp+3LWocYW24vK1a+c9nNZV87nU6ZTqcnsmo4mVHZ\n",
+       "79fKbMbAd9HaIB3s9yrPQauKMFh4pkqx0MzwbHCvD+wmk0byue/6HL/yK7+C43gNdG4ymRLHLTwv\n",
+       "4OhwyO5jO2YA/NZbzQyi0+k0JDWb+V6qpWCTBKajCVunTjM8PiYKI5KZcZ3au3efXq+H59SfyXG5\n",
+       "e/cuzz33HG+//aMHrRYAACAASURBVDbnzp0zvfvab3ZlZYUkMYJQq2uraODu7Tu0223iuEVVVHiu\n",
+       "xu8FZHnBr//613n94pukeYGiREjRtN4etkQNh7TZttYLUxOb6S9Ls2pVNq00G4TtIer7RpbWZtdJ\n",
+       "mhLHcSNidnR0RL/fbzJxW9kVRcHx8XFdfccnDgb7Ho3NnTBOPfYzDwaDJjZY6LFlZf5mh9d7HsDP\n",
+       "nDWT7LIscTyjWXDr9m329vfJ85zNVcMY29raotPuNNha6UikANf3CUNDby+qFI2m3WrXZgzguS5h\n",
+       "EOAFZtqttMbxPMoiaXrfqlLMZxlFkSOtu0lZURX1cLVOyBzhINwFezPLzNAUBaWucKTTZJxBEBhD\n",
+       "VCFx43ZzI6uyRFUVruMShS5eEDXMriAw8LCHsa9s/3VZLncZZ26/5kn/ga8fjcemFy8dcwghKKsS\n",
+       "EYQEruRz3/WdfOEL/4jeyqB52JUyfb5Wu8Orr7/B9uk1vvbiBT7hR3Q6bYbjGX4QAw5B4DMeT8jz\n",
+       "gqdqZEO/3+f69etsb28zHo9J05R+v49SiosXL/KBD3wAgKPhkFanQ5qWzJMcjcd0mhBEHYbjGRcv\n",
+       "XuIb3/wmAPdv3abd6YIyBgG+65EmMzwpePzcLk8+8QSqqgxOW+ulg3GhHGdISQvGq82grATog9ay\n",
+       "kNhy+8NuZMsgXuYN2IzX/tuW43bD2p9nbfVsC2A+nzf32fqdWqauXXme4foe7XaL8XiC44TkuQl2\n",
+       "eV7Q7vd5++oVxpMxp05t8uSTTzYVgM1Qh8NjQHB0dMTa2irz+ZR+v8f6+lpNHOo27Q17yJ05c6Z5\n",
+       "3jzP44UXXuBLX/oSu7u7XLp0id3dXQPjqw+bOI45OjpiOBzSG/RpddpUZUWeZk0LMc9zHOmye/4c\n",
+       "r775Bq12m2Q+PCGt8bBlD+HlNqIN2MtyrnZZ7L/lZiwPIpsKfaklYwX2gCazXrZWs8ne6dOnm6C8\n",
+       "3AZZfl/7HLRarRPy0nmeN0NtK+Bnf493W+95AP/O7/isgfDUJ+Lx8TE3b95kb+8e89mE27dv4PsB\n",
+       "w+ERCAcpXUMjroNcUZYG5icFji8p8hw/qE/HwmQkfo39REAcxayurFDmM6qqpNfv8eQTT7K5uYn0\n",
+       "QqIwghrbqVTNUkyLuu/l1nhxgxsvi4IsNzRpx5bO0gMtqZRCSEmlocpKKlXhOMY9HSEpKg2VIitm\n",
+       "TfukqhRai4fKydqHwaITyrJsJHVtdlhVVe0F+K2rgcAJq/hmWj9FmpLlOZ/4+POk6Zx/9a9/gTBu\n",
+       "IR2XSiuiVkwyT3Fdj/uHQ0pcvvhLX+apJx+n3+uytblh8M55xurKJgcHe/S6A5IsXWK+LhhnFqO7\n",
+       "trbWlJhf+MI/5kd+5Ed4++p1Ht3dJS9KVjdO8/M///NMZnOjczKeURQlUatNVRbo2oVICoUuKwYb\n",
+       "q/zhP/SHSJM5oW9aUVrIE3INy9dieVBkg++7tVAsUsC2guzBuYx2aJi3S0HAZv+e5zVDX0NSi5p7\n",
+       "uiyiZstyi+23mbJl6dlyv6oqptMpu7uPcvXqdaOkKCyTMyPPS46Ph3z+85/n6OjwxHW4desWruvS\n",
+       "7Rp5VNPLhvX1da5cucLq6mpjym0157/+9a+zu7vbBEwhBEEYMB6P2dnZQUrJJz/5SV599VV6nW7T\n",
+       "90+ShH6/33jHSinxQ48yL5hOZ3S7XUpV0R/0+MrXv0ZRlvQ6baaTw6Zd9LA9Ye5L1Tjs2ArKXlsb\n",
+       "AJfx2WVhDoNlmrvNkJelXe0A1O5P2+ZZblsCzfsukxLtH3s4Lwvd2efDHjL2fWzlUBRFIz38255K\n",
+       "H/kOkWc0gduRx6n1Ps8+/QRxbEom6UgOD4946aVXuLd3wPB4QlYURrHJcZD2b8chVyXSj8nKCrRC\n",
+       "SoMLrbTGrzOX4XjO8WhGVsyRUlDduMNXvnEBUffPtdKEvk+71SZuxXi116LVWQhD086J4oiovplG\n",
+       "d0HgeUbbpKo1vrEIBSHrMswI5UdRROD7hIGHK0Rz4nq+h+NIkuTBrvQ201s+yZeFdhZlpuZo+q2v\n",
+       "NxrihqVYlTm6qhAapOfjOxJV5Hzf934v9+/v8eKFV3BcD8+PGA6N9kaRFwhHcvPOHhtrK3z5336F\n",
+       "M2e26Hz623Bdn053wN79+3huSJ6Za3Djxg1Onz7dZBlWj6SqKlZWVho96uksMSbJeYVwPPbv7vPq\n",
+       "669zb98wKN+8dMXQ6oMIoSzbcU5VFAgJvW6HP/7H/hvKoqg9F42KnV7KhJeJWbBgttrNtNx/fNiy\n",
+       "r4mioGmR2CDguhIhLCzPmjVAFIVU1eLAtQFjGb9vg7fxSJQ4jofrLhQurWZHEHhNFh5In6JUnHvs\n",
+       "HNeu3TDtIm30s8vS/I6T2Zyvfe3rPLpzlv3JGKUM03ZnZ6cZ3NmKQ0rJnTt3OHt2m6paOMgEQcDB\n",
+       "wQGf/vSnuXjxIrPZjHPnzpk+emp03WezGVEUcfHiRdbW1jg6OGxePxgMGA6HdDod9o8PiIKIKAgJ\n",
+       "PJ9er8+tW7fp9vvcunObl15+mbDV4tr1m6x0feI4NtoynQcbTQP192SNqcryPMLi7ZeHgXaoaAOq\n",
+       "/d1te9J+r6qhvzbhWNZJst9jM/bAVvhL72Wfs+V2y3Jfe3noa/e0rTjm83nz+d9tvecBXBQZrueg\n",
+       "igKhFQKXqsyZpCZDJjCwtlNn1gnigDS7QjrKjMN8afrcCsjyjFIJfN9s3qosmw2VFSVZVisEuq4x\n",
+       "gQ2kMcFVFW4U4dc3vypLhHSYZIpxOqlvdNlsVltyCalRhTE6NjdOE8cRUormMChyI0gfBL4RsVdG\n",
+       "09y0eFykquh32qysDNjYWGfrzJb5+kMyQJttL0+0YcHYsrhU/ZDJdV5kCBxUVTVmzGiNrgqSLMfx\n",
+       "PI6qgh/6oR8kbrf5hS/9Mp2eg1aCJEnp9fsU2hBHDo7HeI7gzp17/It/8a/otlus9LuEnsenP/Vt\n",
+       "gMl49vb22NraarC1Vu8mz3Pa7TbD4ZCyLHlk5zGyvOT6zVt88+VX0EJy9949hqMRVQWDlTXS3EDc\n",
+       "tCpJ8xRVlQitKfKCP/Enf4wwCJjPp4R+SFEYLYqiZs7ZzQc0GZTdQECDrrAl8IOW/X+2NAdOZODL\n",
+       "WN5lSKc1z7WDUqDBHFtEw6JcPok+WC7x7XvawF8qw1HY2toyeuZxi1liMu9Oq839vT3WVld56cIr\n",
+       "BL5Hv9/FdV0eeeSRGkbnMZuZwNfv9zk8PKyJZLrBZVtd6larxcWLF+l0Oty7d69BQdl+r9ZGoOn4\n",
+       "+LgRmLMO7xa9kqYpg/6ALE2ZjCe0Wy3KsuLs2bOkWcGvfuUr9Ho90rLC8QzbuWlFqIdbqk0mE4we\n",
+       "0mIQaO/hOwf6SilKvSBLNV97R2BdbrXZoD6fz5uAbRMRO4hcViVdbr/Yn7E82LYqpfbeW8LP8nC8\n",
+       "2zVmG7/tM/CqSJHaqRUiDLOpKEujmNZqMS5mqEox6HfZOr3FmTPbTKYpb158i3t7e+SlQYF4vkM+\n",
+       "yyly42Kvhbm4gR8QuKYf7sg6A1WKUgu8sEUoJdZjUigJwgMk0pUIjIawlC5IDRJ8P6zpvhWup80Q\n",
+       "FINMmaYFvmvozUKYz1UUJbN53pTDWaGZzEwg9oTiDga2qHRlBowCNjc3+EsPuFZHwzFhEOK6AlUV\n",
+       "6JpW77le0+t1PRcJTB6QRJZFBcIcfFJKpGtIJgZGJqlUhS4ULpIf+t0/QJbl/OqvfZVubwWNZDyZ\n",
+       "4ngRvW6X2WyC1pK9/QP27is21lYZjyYks6lpg+zssHt+m/E04f7+EUoZIbGD/X3W19cpy5LZ/JD9\n",
+       "g2OGoymT2Zy/+bf+Nq7n0e50+eaLLzJYWUFKl/6gz6R2Vk+zDFGlRnJAVaz0O/zFv/CTHB0eME9S\n",
+       "Wq02ydz02dM0pcqrBRqkzgiXA2QQBAtoZoMweXALypEOValwHRch7UBzubUlm9J9+T2aQZQ23AT7\n",
+       "GosZNsHEfDalF1LJtj8spfGs1Frj1UgoR0ryuRmMn9k+w6OP7nDj5m1czzXO8FGIqqomGO8fHnLu\n",
+       "sV2mMzN0VVXFZDpF1XOKNEnIs4xHdszA88yZLfLMPLdXr15la2uLnZ0dkmTOYNBHCMH6+jr7+3v0\n",
+       "V1bMgDgImIzHdLtd+r1eU3FIxyEMA1rtFlmeE4UR03zCdDolimP29veYzRNu3brFPM3wo4hBr48n\n",
+       "M2bTKZ1ulyiKHxpDzB4w3AtLgwersW+qIHsti3LhNwmLQ9ketk1LZan/vYwusfc2rVm5y9ol9gCw\n",
+       "B7m9d7ZVYysDiyYqamKQkKJBI9kAbqW1f9v3wN3QI6/L19k8IYpiKiFQjsMsy3GFgysdQseBqmSl\n",
+       "FbDajnjq0c8wm81IspTpNDGMvkozPD5mOJqwf3DA4cERWTnH80Nj0+T5FLbc9QVK55RFrWfgiHpj\n",
+       "gXCF+UyOix+H6KqqBW4sS0oj3FqZ0BVIZ3Hiq7JAKEPXRwuE9I2kpRZoBApDFNJCoKVDYtEjVFTS\n",
+       "ZGq3Rw8e2Pzdn/vXUJNDpDDY98D36PX7BGFgpAAciUTxsRe+9fWT7PtOfuFBs7oKKGA6h+/4jPnz\n",
+       "W1mPnIOkToKyFIIOJ9yCVk6Zvz/3PSdf933f/+//Hq9d/MLiP2pDnTsPN9b5/74qidQSnWuUo6m0\n",
+       "QTJZrLfZuLr5t8ZWYy6qMlZf0rFsQaP9I4SmKPIa+aKIo5iyJnIpDRozaO/2+k2lZecInudQVjlV\n",
+       "lfGf/54f5i//5F/BFxGuF5HkOd12h3v7Bzz15ONcfvsqH/nQRwiDmMDzubd/m26nQzxYBeDeaMrq\n",
+       "6ibzWYHrhkzHhpm7t7/Hma0tsiwF7eM6jrHfcyTTyYg4iqiKElcaSOCgPzBDy+MjVldX8TyP+3t7\n",
+       "eNozWrhJRdAKKTyfrMxJ8oT1Mxv80j/4AvNkysrKOsPjEZHrU5UFURDSCiPOP7b70NviSs/Q46sK\n",
+       "6YZNxmsgv1bATKGlwPVdZKnQ0khKWEJPlufGDEJryqKok5kcoRYH8XIWbVsl9qC2f9vvtUNJ6x+w\n",
+       "3OrUWoOucKTVL4JkPiXNMoIgagK/vdfvtt7zAD4cDutyunNiym5OuTlB4OF5PrPZHM/3abVa5Hmx\n",
+       "MPMVRqhdKVj3fbZPb6A1hEGE5wWMp1NefvkCV69cYzSZgIYwitEoykohVG1SrAxL0/d8qAoi10jY\n",
+       "llmO40pUZYaSnnRJq8I8tJ6H53i1GFEtLIWlV1uHD4krXHS9qcuqMoeD5+EsZ284eJ5saPUPWrlS\n",
+       "OBoc10egSLKcLC8YzUy7yfVcXM8DVT4wgP+n9VtbjmdQC7JmSFaVa4bpGlRVQzOBSljpY1kjk+qT\n",
+       "Ulid6AK55M1pYYlZZgwdpFww+Gzpvjw/WJTxLbSuyPKCXq/LJz7+Ai++dIHxeIyUDqNqTBgG3L51\n",
+       "F991+Ps/+7P8yB/5I1y9dp31tRV836BETIvDIY5CHNdjPBqzurrGG2+8wZkzZ2i3DZ47mRvJ3Fa7\n",
+       "S5YXuPVcRGqB74fkecXOzi6Hh4ekaU6SpNy7d5/Tp09z9+5d/HWflbVVRqMRQRShM8k8Tfi//6+f\n",
+       "5uDgmLX1VQ4O9uh0epRVjitNRbOyssLu7sMDuIH9ipqZvVCZFEI04l5VVUEuyMuCVg2Rlc5C4tVZ\n",
+       "GvDKOpC7oQ/17AIWrTfbbmnaqWIhKes4TsNEteiVxXxj4QEchf6J7NzzPBzXRSmagP9O9MyD1nse\n",
+       "wAeDQX1iYqjcQjawIgt1Kopad7sZ+Bk5UHsB/Jqo4bsOQhgWnao0SuX0Wz6ffuEjfMenXkBVmoPD\n",
+       "Aw6Ojsjysh70aJQyDMTxeNz8KZUwusDSMdBCAUI4aFXiC40IDQlAUQIGleLHgekx16atWgCORmFa\n",
+       "LlI6SNfFsf10pYzrNjaIa4Oq8R+CQ/YDqqI0CBetQTrmPYRG45JrQZ4bbZb/tP7/X7N0UtvPBWTZ\n",
+       "SbNaxzV6MGDutaoUKJNFS2lMNkTNSTDLBOEoihqkh1Kqft4kShuWcRAEeK5Lu9UyGV7d462qCl0Z\n",
+       "LetWHDOdp3zihU/wxhuXEMIlSzMIBK7rM5zMWOn30cLjn/z8/8Nnv/3TOK4xhNBaMZnP2dzcADSz\n",
+       "2Zgg8Lh+7SabG6cJg4j79w/wvYDD4xGr6xusrW2QJPMat1yhtYvr+DjSoywUeVayvXWW4XDI6c0z\n",
+       "5EnOoLvK6GhMGhht+LwoaHUGvHbxMrN5QbfXZzgao9AIoeh0O5SZaTVsb2+TZQ93pjEtD9B6YY7u\n",
+       "OA55jZZZnmuEQUBeQxLFMl6/KJqePdQeuUvQRPu15X62Dej2M1i2qsV4W/Gr6h0/x/O8Botufy4Y\n",
+       "yLMfRM08xLZs3m295wHclha+HxJFcaOtbMHzFitpTyhTkoSMRuN6Kh+i0ezv77Paaddlqah7jYaC\n",
+       "G0cRfuChVIXQbdYGLcrKZEe272X1DuyFC8O4QQ5M0hmT2ZQrV67y5sWLTOYzcCStuI0WAqWgqr04\n",
+       "lXDw/KAZdlV1RuAhlyBLdZ+sLCjKHKQdvJjb8TAmpRJG+bAsKiPIT80ekwKhF3oKQrznt/U/ylVW\n",
+       "Od2O0R8pC1XDPnWtR6KbYVZTatfVr+sZ6KkZbLl1RmY0do6PjxukhBnALSjgtpdq5Qo8z6PMc+ZZ\n",
+       "ZnQ8HAObLUtjarK2ukq/1+PKtRv4fkhGgeuW9Ho9RpMpvXaH+/tHXL56g2effgrPlVSV4syZbcLQ\n",
+       "GFu04pjJeEq/JvfkZclsOufMk9tcvnIFrTXdXo+ilkStqoo8LZohMGCctRSEXsjtG7c5deqUQXGF\n",
+       "MfcP7hN3uiA8Lr99lS9+8ZeJ4pBut4OqoNWKcHzB/v5dNtc22djYZGfn0XfNRE1v2RymRWGqcysd\n",
+       "GywhTuw+X0Z9vJPSvxwwXddlNps1Dl8m3gQNh+CdaBILUlhmWdr3XIb8BkFAkadNzFnW6jk8MqbZ\n",
+       "/X6f9fX1h7Kym8/4bv9TmHT23wAB4AP/TGv9E0KIFeDngB3gGvB7tdbD+jU/AfxRTDf1x7TWX3zX\n",
+       "T8ACXuO6btPIXyY/WMdoS7lVKmnKHdcx2cr6+jqeqmi1YtI0w+j2qlozO7O/j3lHKfC9qC5haw9K\n",
+       "XZImGUJKpHBIk0k9/HDptUJ67YjTm2t87nd8hqKsuHHzFjdu3mI6T5jP5qRZTl4U5EVFmuWkmSnL\n",
+       "fNfDWC2BZ8y3UY4RxvLiNqoyD19VlVCZIQzqwRm0Qhtj5BrVYkdtSiuk8Ex/XGvUQ9QI/9P6rS3f\n",
+       "s0bSGUKGpirDlrkaoQwTWAjqNohRxMzKOY6QONJDKVP5mcN2gTMuywrX9agq1aAPLMXfZumz2ayB\n",
+       "9ZVlCWox7AxDj0oL/ts/8Sf4q3/1f2MyT0jTlDAMSRKDJJmlKZ7r841vvkRVVZw/9yh5OgcGRotf\n",
+       "KxxHkmYJ/c6ASlWMxyPW19eYzsYYCoNgOp1wcGCG0db13Q98XNepvTsFrZpebltANgD2V1a4cvU6\n",
+       "d+/v8aUv/TKr6xtEccRkMiSKQvr9FrPZhLW1FTY2Nvnwhz9cE5re5b74PlVVnECHKGXqWts+WVYM\n",
+       "7HQ6J0g8VljNfwfk0LZpsyxjPp9/C05/mUFt/9j3s2xuWDjVLzN2pWg3pDHL0KRu36yvr6O1Zm9v\n",
+       "77cGI9Rap0KI79Baz4VJ635VCPEp4AeAX9Ba/xUhxP8E/CngTwkhngb+C+Bp4Azwi0KIJ7QRLHng\n",
+       "skG1LMvG8sriLH3fBy05PLhrHhDPxXEKOp02WZYgpYPjuQ1xRWgH4Qi8ICCQEdZ5xXXN1L55BrQx\n",
+       "SpVOTWipSTvSMeiRvMjrhyEiTc1ncV0PjSCdljiez9bmGttbp8mLAo1s+m9l3SsvckOtvXfvHnv7\n",
+       "++zv7Rt7Kd837E3AARBQ6RwpjBu3kKZkftCSAtJ0jtcIABkrOQDpugb/LAUg+al/8mPour/neV4j\n",
+       "mC8khoFZZwhKa5LciEhpVeIIA0N0ENY8hrJUTTZYsKgiBNogHcqKIjciY1WZAUZvvKyvicXYSilx\n",
+       "agLROwdCbs1iNcw548odhwFCazzpIIAiS/Ech8qpFRPra+I4Ti2CZWQOojikrMoauucSBCGnT20S\n",
+       "+AGDlQHnHjtnqPDjSb2h23WG2KYoM1R9kJaVESkTQJ6neI5PnmZUZYUmQwhj6qFqnXbpmlaayfhq\n",
+       "6z0B3dBs1rIocB0fv86wk3lKnmZoX+O5ZkBYLlWDNpGxWZyFs9myvyhK/MA4uedVDkLiIviDf/AP\n",
+       "8NM//XfJ89IM+pOUlZUV8lqjut/r8evf+A3u3rvL7/99v4csTdBVie863Lx5i8FggBYK4WhcTxK3\n",
+       "AoqqpCwzozliZZ8jAzlUqiAMPdJsxupan/39A2Zzg6JotSP29/dR2sA6Azfm0uXLvPTyBfqrawRh\n",
+       "yHB4TKsVEcUe0+mMViuk2+1ydvssaIkUrlH0fMiK45i8zE5AfauqVgD1PJz6GTPDQ01aB1RTJTmm\n",
+       "pZMZo4llvLcNsO1Wy7Q1yhLq+6Fsj7p+hrVSqJoMZMk6dpBqSUY2AxdCIFjo6NgeeFlWuLUtnO1/\n",
+       "/5ZRKFpra8rm1zHnGBPALT7hp4FfwQTxHwR+VmtdANeEEJeBjwFfffjPp9kIyxhOu7nLQjWQMNf1\n",
+       "GI/HRv+kbr0oNFluprVCS2QiUaoijCKzIUrDjDPys2aDgfGNFFKaDSqp8dc1bKeG9SRZWsO6aDak\n",
+       "xqMqckNBr6FFDRRPg+8ZSGEgXVqn1tjZ2mg0WJRSHBwccPXqVe7du09ijX9FuxmO2L8ftPqdiDQx\n",
+       "Uqm6MFBLhTEQKFVpXEUcYzSgCpMZBK5EVQXCTrMryGrNc983JKWovuaqAIFokDmO9MwB52izkSRU\n",
+       "JHWAq/N8F2Ro4VOGvGSzmExlTbDRdXvBEQv9iEbcvjLDWS/w63LSq1tbBnFTYKzUlAapBYWWTcYj\n",
+       "pUABRXPdNMk4xSrs5YeHOK7Lvfv75HmBqjeLFILBYMDpU0ZZcTqdEPsR3W6XOI5wHIdWK6Lb6xDH\n",
+       "Ea0opigUUvoGconFHYPvelRlSTrPFv6GlRWjEiSJme2EgXFMV6qiLAy+OI5bCCEXTFDR7Ltms9vW\n",
+       "zLKYGRgcf1WqOgt0KauSPMtZW+nz5JOP8+KFC0b1UtRuV0oRRTGjyRjf87l89Tr/50/9NN/z3Z9j\n",
+       "ZdDn4PCAVqdXz1py5kmCFoqsrmBPnTqF40haUYvJZESSzHBdD1UqPM+hKIxo2draKvfv36fvr+B5\n",
+       "Lv2VgcHz5zn/4p/+Ey5deovVtXWElEynI0DR63WoipRuu0MQuqytrLC7e64Z+r0bHvro6BA/9Fk2\n",
+       "THAcY6ad1Th2+7zYoLrMA2hmaTXU1wq52a81768XAnL2kLCdgqIoyOvE08IUlweXloFrEURVaTwN\n",
+       "7LLBvSgXPA/bc3+39ZsGcGGUnL4JnAP+D631a0KITa31/fpb7gOb9b+3OBmsb2Ey8Ycuz/XrMlQ0\n",
+       "bhgW4G4HmfXnQDiS7e1tgxUvzCBnlswbDLRTX1jHc5nMplSlMRSgtK4cwkD4tCYrDD7VrQObkMJY\n",
+       "atWmEJbkIl0XUZlUVBhZOfOAiFqcSVVUldkcYRghpapPeAfHdcizBKoChwBVVvTbMR/78HPmQclL\n",
+       "cExg93yPOIo5PjbEFv7a3/qWa3XukdMcHx+TzOYkiZlwF7kJkq7vEQYhlVYoVeA65mCywx3HEZTq\n",
+       "/23vXGMly667/tt7n1fdqnu7+/a7Z8ZxkulhPMZ2t8cPktjETpzYMdhBiEAQQhaCzyAhhRBLCPgC\n",
+       "ASQeEiEoQBRhwDwEOA5YdhzZseIgP+dpjz3xiOmZzIy7e2a6+z6q6jz35sPa65xze3p6TBL3nfat\n",
+       "pW7dulV1q07tOmft9fiv/78jeAhWmB4T48F4EY3FYzInSt6ZwVpHVTUED3kmkmuCC3bYVBx78KFP\n",
+       "F+U7csLKGJ12YlwfafflHo0gO98/FmKU3TSCbbdJgob/aZJJWSkInzoYXFbESCs6MitzBF3nhX/G\n",
+       "gg+WroM0F4a5nWVNkeYUhThvgkzlbm09IU3yTKT5VAg5TRIZiDKe9fUNJpOcjY11Dh8+xB1nznDH\n",
+       "a84wWZuQxtpv5yHNCmkwG5ngCxgwUkpJXELXBqpWNzVPlhmqqtnjXNqmwYzqvYJl1lJggw4llWWJ\n",
+       "jXqavpKsha6laSqcs/yp97+X5XLBVx54kM2jxwXyN52xXIqU22IpTI9b8yX/7X/8Ovfdew+nTp7g\n",
+       "j919N3VdYqzl6tWrHD9+TIIXH5hM1khcSlM3EAxrEymRmCj0DC1pmlOWNfNFxZHNhC6AcRlffeAR\n",
+       "nn76abZ2djl+/DhVI0yJs9kaRZHT1oLrX5ts8H133cWb3vhG2g7yfNKLBb+crU0nEmmPHF/btn2m\n",
+       "pwNQGhzlkb9GAw3nYobXNEwmEyaTyZ7J3KG3NMgYKjWC+qbpdMrMDqyDikAZ0yAo26ExBmtCDxXU\n",
+       "6yXPc5p2kFXTLOxm9p1E4B44Z4w5BHzKGPPu6x4PxpibtUpf5rG/B8Cv/OqjnHvjfZx70+v7MeHF\n",
+       "YtGfrDul0JBevXqVjcOHegfbNIKzXF/f6IcfmqZjNhNSotlsim87rl1VEVeRv8IHrLE4l+ESF0Vn\n",
+       "Y9QYPGmaR0cShzR8R+hUDUc0NVUEV0+Y4D1lVVIt5thEJK2czXFpKp3lNO8n1FKXUjYLQoB8ktOG\n",
+       "Fms9vm65ttxlfbZOXd+4hv2eH/0RnHXkaUbbdjRtSz4pePLCU/zfJy9w5dpVdnZ3KBdLgm8B0bnM\n",
+       "8pzFvKRrZBq1mIhDbpqSLHE4I8NMxliMbYSnseuYpAbrUkLwJC4wnRRU5RxE3kLKVUkCsUHcNp6m\n",
+       "64Qq10CRyWi/MUZEOEY1RGFE7ESpKEBoO7LMyTRXnGYFhXgG8MI5472nbIWCM08HjUECWJdhrRH1\n",
+       "pLhmNmZ2eVIQAlR112cEzkaO7qRgd3dOWmS4LIcAZdfiIivk89d2cbtzLr64RdtewCVfw7clhsCZ\n",
+       "O+7g/LnzxgMkvQAAGwdJREFUnDp5kuA9iXNkqSCXCAGDIXWO1raR/UFFp0MPO9Q18XEtJE6IWU3M\n",
+       "QhI71FjbWjIZQV14yRgJwnlf5LRtRULgZ37mz+JDxwMPPUJRTNjZbpjOZlRVoCim7C4WJM6xsb7O\n",
+       "5z7/fzhz8jTGJtxx5jQGWJQN83mNMRJFFnnOcikEW5N8yu7WIpZ2JnhvCD4heEs+KTh+/BTPfvsy\n",
+       "8/mCT376UxBExnBz8xgvvHiZ6caUtSwlSQOGmjRL2ZjKANDZu++hrT3LWqd3b06r2nVd3DiHCcge\n",
+       "8hcdsTrgLNLXEqP0fhI3Pl/Jr6RsmuyJpvX1hYM97QPNtm3Z2tqKSl7DEJf3vi9hqjSdbjDgyWNj\n",
+       "up8CThKsgy9/5SG+/NWHvyMUinmlJ+x5sjF/B1gCfw14VwjhojHmNPDZEMK9xpi/HU/IX4zP/yTw\n",
+       "d0MIX7zudYL69U9+7D/0O1znJQVWWkdhKMtjdzeVsff4YbNMopC12Yyd3S1CCBS5dJyzGLUYpJk0\n",
+       "bvi1bRejfUlHtYwi6ZAnz7M+FZeTwFM3cew2DNSVJgTqppLNwI74m5OENsRdPShNZUoWR2WdTfoI\n",
+       "sGqWYKMmHkjKV9U463jrOz/4kvX/+gO/RegCvgtRKzEb6q/OgTN0PtA2NcbXZKkgdMqq4elnnuHZ\n",
+       "5y5RNw1lVTPfXRAMbB7a5MzRYzSdNJG9BTB4A9s721ENqWZrZ0s21lIiREnMjFD3GqmwW2vxxGGn\n",
+       "Tj5XFkntNRVsmgbicISPZbAkSYRi3dBzeejtAScf3y1Ah90TGTkrZSvfea6/zqu2wzIwwSkLY/Be\n",
+       "6A1iLTNxCW2qG41w8xhjaKtaNikTaJu2RxgkxiPpjJSdRJG+E9GQPAoyxHR7tlZAEKqFQ4cOM5ut\n",
+       "MZ1OOXHyRFTBGTjGx1N/erHrxqe/6zEEI2RSIfjYj2iHaN4Yyqqm9fAb/+sTPPzII5KOYdk4vIlL\n",
+       "UoH9tS1pkgpd87UtnBG+lZMnj/KDP/AD3HvvvUJw9cQT3HHqFPP5HKJT6x1QRG9dfl7KVc9dvMiT\n",
+       "F57iwtO/jw/g0qQXcA5tDaZjul7QtCVrk4y6Ljl1/BR3nrmT+8+/BXxC6DydDX0kDHD3638onnHX\n",
+       "XxefJZ3IGmsGqOeM3tasz1oLWULwAavnIww17ei8syhOXivdtN3L/66Z0JikKo80sOMyjfaatDzc\n",
+       "bxJmKJHpuSk+kD54VSz729/5AUK4sSjoK6FQjgFtCOGaMWYC/ATw94GPAx8C/lH8+bH4Jx8H/pMx\n",
+       "5p8ipZOzwJdu9h5pmvX1SusKnBu6w5NJQZqsRQKggfNDKDwV5rMrC+MDiTOITFlD6KQu6JwjjWWK\n",
+       "LBNFntQ6muBo4vvIju0jptX0m0c/Yu8S4eAIoW8Mdk1LlgtfuCJIkizFJIOST9cFJnYQNF3Liwhx\n",
+       "Eqcl9b0u1vyhKNZYn87kIrmBtXVFXTbkaUqROooso2pb6rYhGE+aFoIvxeNMoK2XOJewlqf84Pe9\n",
+       "hnvOniXPJwQMTScitjaA311K9pGLwLM3nmChrCtsYkjzVNj/MKTJhAsXLvCtbz3BxYuXKKuK+bIk\n",
+       "4DBGuGaMkctsOV8SFb9iypmQOoeLIsIS3URZMRk7JBhIrQW7d+JNm1POOrKo2h4rWkS6drlQvHxH\n",
+       "IcRIOy+wVtbfBn1ewBqHSwwud9RVhQ+GZaWOE9q6Axvo4kXbRf6YZSlkVJkX8YU0Fe4dg4g+4wNN\n",
+       "B7u7Jb6VPstylrOzvcViuRC6BGB9XTQrhTApsLGxzmQy4fSZM8xmM06eOMGZO+5g88gRNjc32d7e\n",
+       "xrmE5XKB96Kb6oPMG3Rth/ctPm7Ci8UClyRc294mSTP+0l/883jveeDhhzFYrl59gensEFk2IcsK\n",
+       "vA9cvbIlNVkDzqU89tjjPPX0s3zmM58j+MCxo5v80NvexokTJ1ifzljO5/gW5r7kmWef5fHHH+fJ\n",
+       "p54izQspaWbSMlubTqibjkvPXxG1ehOYrK0xn29xeHNK19W85q47SZzjnrNnaZqGcmfBieMnuLz1\n",
+       "Qq+oNCajut7yPKdU+TQz0Pl2ncBttamoJ0qjKBEUlRazaa2dezm3iqJglq3vOQf1uVoiqeu6L6cs\n",
+       "l8veeWtvTHlktOyijp/Q7dHelIGiJsKfTc/iebPPDa8QgRtj3oA0KW38/5EQwj+JMML/CryGl8II\n",
+       "P4zACFvgb4QQPnWD1x0i8F//90CMYO3Axzs+8DFvsu6CimzQ3W2M69R6ku6A6pC1uaFRm5ZAblRn\n",
+       "GvMidH4QGlXugzG2VI9NndL14H/94k2ssQN9GaZrB0FifT/nHG/+kfe95Jge/sKn+whMdfUCeyFS\n",
+       "0mTJYDTUoNj6OnpT7XobY5gv5mRFNpQpdB1jeqgWukjYUwysa/pTP3vdCS3rlStXKMuSRS1lnrZp\n",
+       "2N3dpe06tq5dY3c+7/mdy7IUYWhbxHXT8oLH2KS/CLS+7UOgq+JwhhUxD32O6lPqcXXe42n7763z\n",
+       "Q9PJOddrGMoHhNANz73++9V117/PnOsluIxzEectpGYheEKg75ckRiI8uXCh7VrWJmtUVUkIUSgi\n",
+       "6HkNvmsipbGUg2yAY0ePcubUaY5tbjKdTqWunlhmaxMOHTrE0SOH5Nhj5tI0FWmWgoGmbfAm4Xd+\n",
+       "94t87vO/y7xc4rIMl+ZkhUwsp6n0AAgBvHDfawnCRnnCG+GpQ/Akacx0g6GpO4rJlDRLqesSTEdd\n",
+       "l2xsTGmakkOznCb2tnznOXP6NGfvvpu77rxTAp26GWUgg4BFCIH73vxjLxuBg5yHAptc9ph775UT\n",
+       "ZZiozCdZf3uY/EaiB4bylTGm5yvR73/c89HrXDf4JmivzfaRt77WmNfEWtv3fPR9lF5YIKVtj0AJ\n",
+       "IfCWH37/HywCDyE8Crz5BvdfAd7zMn/zD+CGXEw3NN1prLU9DnPsxK9cucL6+nrvxK9feP1y1VHr\n",
+       "643J9RcLAdJIF37S76IKW1TwvRDhDw2PPkq2A+2o0kbqhqDvobuxYl7X1tZ6Z6/HoGobulvvLBYi\n",
+       "dBxT0HHT9kameFU9GdfX1+n8MLHVNE1sblmJtMOg96hrpcemG9R0OiWYgG9afNz5QwjMd3f7ml2W\n",
+       "ZWRp1p+E3WhzVUe8XC6xiWN9Y4Pjx4/HiCbr65GKYV5fX+8n3lTVxlrLhWe/TVWWXLp0mWeefYat\n",
+       "7S3apo5MbZIBGevIncPnhhA6rA2AQP4MCakTua62aaXHkKU0XoSwBTUg/PEGaNuaPHGkzpBlOdvb\n",
+       "W9JwtMLl7jWSiuvtEpHB0/MMYyO6KOouek8bdSuNSfpN2TlHaHXQI/KgBMOyrMiygq5rpYXsA0mS\n",
+       "4ZwhpFlUggK8J3Qd2zsLnnvuYYosjdS+deRViYpQzjJdW+PY0U2KouDkqeOcPHWSzc1NskmGwfG+\n",
+       "9/0U97zu9fzLX/ol6rLGe8O1q9tMZ+vMZuskLsEZK844alCm8fzd2dnhyJEj4AcpMdXs7HxF12lv\n",
+       "KNDUJcG3LJcL8iKliNDZLJuyde2Fnk75+LHjvPn8eTaPHBE2xULO2cOHDlGVFS59ZTInvVZVM1Oh\n",
+       "fxIl1/1wnDJN6vOVAVCHkZqmHYbvRmUSDYz0OledS6UFtqNAsGnbnoivS6QXlEZqDGcMeSTkyvKM\n",
+       "ru16cW8YKG7HQ0F6rDez/68a+B+VjSPwT/zPX+sjHjOq+ekH0N1TLwa9gMZK9mNWtx62Nnq+Rt3j\n",
+       "FKaNdSZ9DW10aFSvv4cQ4jj0gBTQ4xpDkfRvVLXaWst8Pu+/YKXd1I1I0n2JKvUYNMrvuo63v+sD\n",
+       "L1m3h7/waWDYxIwxYHx/Wx118IG2HTIVPc4sRs96UchmpmyEroeqta2IZIzHeXXASjMErX1qhKHf\n",
+       "h0fS+rIsmWQTKWnEz5fnWWxaSu2jrup+ve2IzjXLUpqmlXp+KzX3xWLBtavXaFtPEzquXrkqU2pG\n",
+       "tEnrpiEY2UiWy6Vs0NvbbO+WrE0mLJdlbGC1fSSVZ3k/rGGtZRmhl7q56fc6VhjX7IU29J8tNkeG\n",
+       "v4u3rYlsj10rfRdFrwdi3XvImmRCT7hF2k56MNJfkf5CYiU6ttYQYqPYEzCxlEgn0n02NoyxUDc1\n",
+       "xlk2N48wnUwxPnDmzrs4fuIEn/jUJ7l0+XmSLJPDt5bUiQBvmsVmLvTXT+oGOl4XI0ydBwi0UVDZ\n",
+       "9MNFsm4lWeZYLHfwvmUyKTh6aMrGxgb3nD3Lfa9/PSbI52zqmixN2d3Z7XHTNhlgf13X8cff8pM3\n",
+       "jMAfe/Az/fM0oMgjZFN50cfQ3IBMQ3ZeRM9BCMdMXG/dvLPYoFTHrRS6PYZ81KfQwaFxlm/NwIFi\n",
+       "R9eT7zqqGKyNp0KVklY3EPVn59/+3j9YBH4rTD9gGUsCGpnpCKmmI7u7u72K9Hw+75VPxlhN1ZJT\n",
+       "LpXxhJTCctR56fjy2LlrrWy5XPYjztPpFGuTXt6q71onyZ7NQr9QHbXtuq6f1qqqqifOz7KsF17o\n",
+       "IqJFTy7djF4OB641trFOXpYPIrr9xmdMP623syOc5ocOHaKs5eRWhyRERkuKIu+jCN0M1aHryaSN\n",
+       "Zd1YBbM6CLiWUTvQty0GI9Ozy5oiz/osp2lbmTKNm6uzkDpL2VQY05FZqKqS5Y5I26VZBl0H3pDZ\n",
+       "wIljh5muTePwlqWNI80C4RTenJ3dHdr4fpOi4NvPvUCe5yyWC9qmpaorQoAXXniBnZ1tLl+6zM7O\n",
+       "tjjm9U0M0oAlBBLnBJniZlJzR5rgTdNgnfQGiNOWMk9gewcsePwIr3OWJB0ySGstWZH3m8S4KWmz\n",
+       "jIxcGvCxv+K7lroTFE5oZP1E7Sng20DVtuCD1OM7oaTtmo4kK8DAxRe38M0VZkXBc5dfjKRJhsOH\n",
+       "N7l27dpQgrSBcrHEA64o9gy1SBN2DWsMrbUD9a4xWBtLXkFKSMtygTPQtBV17UmcDHalDu4/f55z\n",
+       "585J0BSDovl8zlohTU5FjDljSSdaGgyU5Y0J3sSHSMmqb172ZYhAWVY9I6AGGUUu11/btXFTFCRZ\n",
+       "QL5zvKdtGoIfOGp0PkX7c0MJabj+9fi17q6ZtjrmNE0lc7KDSpNK7A0Tue2e6PuVauD77sBVoNdq\n",
+       "6ur9aHc3PYn6bDYjSRK2t7d7WSOtQanTuXbtGtZaZrPZHoemF8kYJ/r888/3js2NdkcVGtAoum1b\n",
+       "kUuDHjbUT3Ux1EZ18ZfLJdPpFGU508+nn2m5XPYcLwoR638PA5/CjexPvPulyJSVrey2sl/+L3/k\n",
+       "Lyllv+F3DeysdWRZ6LPOoX5veqrW2WwWx98F5KCmgZ/6kjFroLWDILJG4dZa8jTDBMkIm7ajrZte\n",
+       "wMR7TxnpDfI8J82zPf00Lclqpnuz/tzY9t2Bb29vR6KqiJ2Npo5yOp32qYi1ltl0OowUA3WMbp21\n",
+       "oq0X4U1d18XxcNlFdTy9bmq2t3cE7hXr4d77/gvVHbBtWxaLhUQKqdQdp9NpRAEMKudZllLXghGf\n",
+       "TqdsrK/HEkQS0S+B6dqUshRESJ5lELvfbVTddmlCksk0XzAxMljZylb2HZlk2+q0x8ilwcEqskMi\n",
+       "5qEhOZ7G1Exdgy6ApqlihiV6vDLIU+OcQg4F8dS2Hb71tK0ZlVFkeLCqSrJMIMnOSf+mbSrK5aBf\n",
+       "W9e19CASgT13+LgRvMoFHfroOCIrlAhdI+AyMouNU3VNXRRJkSaDQvj29rY445imtG3L0c3NPUiV\n",
+       "Y0eP9e+tJDXee7a3t5lMJj1CQWujARk6mc93e606oN+9NYW6ePEis+m64ILDgOdVEpsQQo+X1hoc\n",
+       "RkainROODxi64Ctb2cpe2aTkqIIa3QjA4PosXfU/xUEL8kijXsmQ9zYPdRNIU7cHEZIkjjzPqKqK\n",
+       "LEuBtJ/O1IlRycjpHXiSCJorSYQdVeZFHM4NG4jQfjQUxdB7aGND9Ga27w68bVuuXLlCkqY9ZaaW\n",
+       "GowxdBEKpwV/bSjobqlfkLWWF198sWdu0wZknud9JK2pyrhpqfSOR44c6evmWr8e4GcDPGg6ne6p\n",
+       "QQ+cHLGU03ryLO9PJq2xjxuzyj7mGaCHWpJRvciVrWxl35kNFL17BaIVoaN+Qq9boRsYxIzlNfZq\n",
+       "aOp1rf5CyyeKXgkh9MIrik5L7F6ulX5AMTrpcQnXhAGvrv0l3QiAvrmvKJWXs3134JtRT6+qKkEm\n",
+       "1PUo1ZHx4DzPef755wkh9AIQXdexWCx6Z962LZubm5RlSVVVnDp1isViwXK57EslWsPWulPXdX3j\n",
+       "UxWpu66TZpz3fe3MjZoj+nf6uPIbNE3D2toai92B0Ea/nDH6RPHpXdf1qiC6w2ujc1CXX9nKVvZK\n",
+       "piXVqqp64IHgtQUeqNe9Bk4mQLUUTcuqbiJM0FGXdd8r821H09UkmetLptrUHTfzYUQulkZQRaSg\n",
+       "LZdl/3pd29G1wyCQ+jANREMIPeGVbhr9jMJNbN8d+O7ubl8yCAzoDkWX+AjdOXz4cP+hVCk7HcHY\n",
+       "FHWhJZCdnR2ZfozOGOgbidpsBKJEW71n4ZQpr19cBiIcRbyMN5kQO/SLxYJiMoFAH0X3U2DRFCaZ\n",
+       "JAnLsqStKmazGQ8/+hj3n39jj0N96Iuf7mt6VVVR5DmGvQM0Iu9FX+MD7cQbCAOGlbiuxu1VT1cV\n",
+       "9NRaqjpyocfGbZbnlGUpF4P3cnK2LYVL+0m0NMui0tDwdxphWOeEewYlt6Jfj7Jc9pulKs+YoMRX\n",
+       "ArYzFr781Ud487n7+p5Aj9KJI+3ee+GfiMgbjabaru0lspbzRZ/W6msokkAvat1s284TgifLc6wx\n",
+       "NI3I1HnvKZfSwyiiWHBXy/lRTArKZdlP4wE03VDys1YodKVRnfSQxyzLZGzfmr7khpFeSj/IhERo\n",
+       "hhAJkUoefuQbvPX+NwkO2VnqtsYay6QoIu7ZyUSwNbj4/SzjbMA0m8hzjNBCOOco8jwqRgmnShcE\n",
+       "8y785LJmRV6QRqTMGB6ndWbvA0kq1BZJaimXpcwW+KHBX5YldVXhkGtF2fcUMqiWFwUhyFqWy4GX\n",
+       "RDNv3vRSHzKfL/nyVx/k7W89T13X/SRzkqT9ebdcLvu5Bx/rynoNi/+Q9RtnxEJJ4PuIuyzL/ljU\n",
+       "P4wH9bTkqs/TeYsx9HC4RgfTcsl4tH7cNL2Z7bsDd07Iq9bW1mhHztoYw+7uLsUIkaGQHmNMH1Wr\n",
+       "M9OL0Vq7B76nzQpgD/RQFX/GhO+6aLqYY0ymXoxFUexxzuoM9b3rqpILPTZCxqmU1uIUlpRlGc7L\n",
+       "SfClLz/IW+8/1w8XLBYLGbIJgel0yiJiyrUL3k8GMuDm1Sk760iSbC82PAjvx7hxE6LzbL2QQWk5\n",
+       "SuFNRVGwtbUlWYqXseRgRd5uPp+z4Rx1HLbZ2NjYM0DlvSePArPWCWSsbVuhr80nhC5gg0V77D74\n",
+       "nifbR3Whr3z1Qd72ljfsSYXL5Zw0m1DFmqZGRZqJeQPOjgKCXC7SLuKqPYE2yMVVVzV11zIpCqFO\n",
+       "jVDbZVlFtXDL7nzBYrHoM7oXr1xlfX2dPJVhjLZqUXk/G1PoIiuERAu5OJuyEj4eY0mMo4jnrgQH\n",
+       "cbq4FW3NJI2iD1FyS51oYhPWioKvfeNbvOfd7+xpj4MVWGjiHJPJWjzmXRIXR7vjeUrnqeM5lOfC\n",
+       "PaMsk4vFnJ2dXZK8wCWOYm2NtmvInLANWgI721txU4HIMqL/MFY2ZWMCXaucNf46B5kLNNMMjlPH\n",
+       "0fVa8wTKquwDB98NEL2bjZWvr6/zyKPf5Mff/c7eYQJRt3S49pbLpWT4ietBEmoC9R1KKxqwYffO\n",
+       "QIz9iV6HChl87rnnOHHiRO9n8jxne3u7DxbUx+jmpRuAvu54OFAd/iuVU/fdgX/gz/3V/T6EV4l9\n",
+       "nn/9b9613wfxKrPf5l/9yo/v90G8yuy3+Re//I79PohXlVWVOGbtgUEcg2/mvfMXfLnF+5aqGQUw\n",
+       "GuSEjnyS7YmSg/GILKPrgy4Jwirh3/cdnkAX+YhOnz4NDNUApWrQCF0DxnFVYIyC0WG/8RDZKwEa\n",
+       "9t2Br2xlK1vZH8aU6kAd7XgqU7NkpbIYo9f0b7UkqtF7OwJO1HW7h/sIJEtyDJO5ILDnLo7jj6ef\n",
+       "+8eui9rHZc+xw9bj1Y3llSbl93GUfmUrW9nKVvad2MuN0u+LA1/Zyla2spX94e3mc5orW9nKVray\n",
+       "V62tHPjKVrayld2mdssduDHmfcaYbxpjvmWM+flb/f77ZcaYXzXGXDLGPDq6b9MY82ljzO8ZY37T\n",
+       "GHN49NgvxDX6pjHmJ/fnqL+7Zoy5yxjzWWPM140xXzPG/PV4/4FdF2NMYYz5ojHmIWPMY8aYfxjv\n",
+       "P7BromaMccaYB40xvxF/P/BrsmcY5bv9H3DAE8BrgRR4CHjdrTyG/foPvBM4Dzw6uu8fA38r3v55\n",
+       "4Bfj7fvi2qRxrZ4A7H5/hu/CmpwCzsXbM+Bx4HWrdWEt/kyALwDvOOhrEj/r3wT+I/Dx+PuBX5Nb\n",
+       "HYG/DXgihHAhhNAA/xn46Vt8DPtiIYTfAa5ed/cHEck64s8/E2//NPDREEITQriAnIBvuxXHeSst\n",
+       "hHAxhPBQvL0LfAPRUj3o66IEGCIsKefNgV4TY8ydwPuBf4vSeR7wNYFbX0K5A/j90e/PxPsOqp0M\n",
+       "IVyKty8BJ+PtM8jaqH3Pr5Mx5rVIhvJFDvi6GGOsMeYh5LN/NoTwdQ74mgD/DPg5YDyOedDX5JY7\n",
+       "8BVm8WUsSO53s/X5nl07Y8wM+O+ICPbO+LGDuC4hBB9COAfcCfxJY8y7r3v8QK2JMeZPA5dDCA8y\n",
+       "RN977KCtidqtduDPAneNfr+LvTvlQbNLxphTAMaY08DleP/163RnvO97zowxKeK8PxJC+Fi8+8Cv\n",
+       "C0AIYQv438D9HOw1+WHgg8aYJ4GPAj9mjPkIB3tNgFvvwL8CnDXGvNYYkwF/Afj4LT6GV5N9HPhQ\n",
+       "vP0h4GOj+3/WGJMZY74fOAt8aR+O77tqRmaJ/x3wWAjhn48eOrDrYow5pmgKY8wE+AngQQ7wmoQQ\n",
+       "PhxCuCuE8P3AzwKfCSH8ZQ7wmvS2D53kn0LQBk8Av7DfXdxb+Lk/CjwH1Egf4K8Am8BvAb8H/CZw\n",
+       "ePT8D8c1+ibw3v0+/u/SmrwDqWk+hDipB4H3HeR1Ad4APBDX5BHg5+L9B3ZNrlufH2VAoRz4NVmN\n",
+       "0q9sZStb2W1qq0nMla1sZSu7TW3lwFe2spWt7Da1lQNf2cpWtrLb1FYOfGUrW9nKblNbOfCVrWxl\n",
+       "K7tNbeXAV7ayla3sNrWVA1/Zyla2stvUVg58ZStb2cpuU/t/6S2bnP6vZqYAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x114f15d10>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "plt.imshow(im)\n",
+    "currentAxis = plt.gca()\n",
+    "colors = ['r', 'b', 'y']\n",
+    "for c, det in zip(colors, nms_dets[:3]):\n",
+    "    currentAxis.add_patch(\n",
+    "        plt.Rectangle((det[0], det[1]), det[2]-det[0], det[3]-det[1],\n",
+    "        fill=False, edgecolor=c, linewidth=5)\n",
+    "    )\n",
+    "print 'scores:', nms_dets[:3, 4]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "This was an easy instance for bicycle as it was in the class's training set. However, the person result is a true detection since this was not in the set for that class.\n",
+    "\n",
+    "You should try out detection on an image of your own next!"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "(Remove the temp directory to clean up, and we're done.)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "!rm -rf _temp"
+   ]
+  }
+ ],
+ "metadata": {
+  "description": "Run a pretrained model as a detector in Python.",
+  "example_name": "R-CNN detection",
+  "include_in_docs": true,
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "python2"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.9"
+  },
+  "priority": 6
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/examples/feature_extraction/imagenet_val.prototxt b/examples/feature_extraction/imagenet_val.prototxt
new file mode 100644
index 0000000..b0a1cef
--- /dev/null
+++ b/examples/feature_extraction/imagenet_val.prototxt
@@ -0,0 +1,238 @@
+name: "CaffeNet"
+layer {
+  name: "data"
+  type: "ImageData"
+  top: "data"
+  top: "label"
+  transform_param {
+    mirror: false
+    crop_size: 227
+    mean_file: "data/ilsvrc12/imagenet_mean.binaryproto"
+  }
+  image_data_param {
+    source: "examples/_temp/file_list.txt"
+    batch_size: 50
+    new_height: 256
+    new_width: 256
+  }
+}
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  convolution_param {
+    num_output: 96
+    kernel_size: 11
+    stride: 4
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "conv1"
+  top: "conv1"
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm1"
+  type: "LRN"
+  bottom: "pool1"
+  top: "norm1"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "norm1"
+  top: "conv2"
+  convolution_param {
+    num_output: 256
+    pad: 2
+    kernel_size: 5
+    group: 2
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm2"
+  type: "LRN"
+  bottom: "pool2"
+  top: "norm2"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "norm2"
+  top: "conv3"
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "conv4"
+  type: "Convolution"
+  bottom: "conv3"
+  top: "conv4"
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    group: 2
+  }
+}
+layer {
+  name: "relu4"
+  type: "ReLU"
+  bottom: "conv4"
+  top: "conv4"
+}
+layer {
+  name: "conv5"
+  type: "Convolution"
+  bottom: "conv4"
+  top: "conv5"
+  convolution_param {
+    num_output: 256
+    pad: 1
+    kernel_size: 3
+    group: 2
+  }
+}
+layer {
+  name: "relu5"
+  type: "ReLU"
+  bottom: "conv5"
+  top: "conv5"
+}
+layer {
+  name: "pool5"
+  type: "Pooling"
+  bottom: "conv5"
+  top: "pool5"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "fc6"
+  type: "InnerProduct"
+  bottom: "pool5"
+  top: "fc6"
+  inner_product_param {
+    num_output: 4096
+  }
+}
+layer {
+  name: "relu6"
+  type: "ReLU"
+  bottom: "fc6"
+  top: "fc6"
+}
+layer {
+  name: "drop6"
+  type: "Dropout"
+  bottom: "fc6"
+  top: "fc6"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc7"
+  type: "InnerProduct"
+  bottom: "fc6"
+  top: "fc7"
+  inner_product_param {
+    num_output: 4096
+  }
+}
+layer {
+  name: "relu7"
+  type: "ReLU"
+  bottom: "fc7"
+  top: "fc7"
+}
+layer {
+  name: "drop7"
+  type: "Dropout"
+  bottom: "fc7"
+  top: "fc7"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc8"
+  type: "InnerProduct"
+  bottom: "fc7"
+  top: "fc8"
+  inner_product_param {
+    num_output: 1000
+  }
+}
+layer {
+  name: "prob"
+  type: "Softmax"
+  bottom: "fc8"
+  top: "prob"
+}
+layer {
+  name: "accuracy"
+  type: "Accuracy"
+  bottom: "prob"
+  bottom: "label"
+  top: "accuracy"
+}
+layer {
+  name: "loss"
+  type: "SoftmaxWithLoss"
+  bottom: "fc8"
+  bottom: "label"
+  top: "loss"
+}
diff --git a/examples/feature_extraction/readme.md b/examples/feature_extraction/readme.md
new file mode 100644
index 0000000..6c8917e
--- /dev/null
+++ b/examples/feature_extraction/readme.md
@@ -0,0 +1,74 @@
+---
+title: Feature extraction with Caffe C++ code.
+description: Extract CaffeNet / AlexNet features using the Caffe utility.
+category: example
+include_in_docs: true
+priority: 10
+---
+
+Extracting Features
+===================
+
+In this tutorial, we will extract features using a pre-trained model with the included C++ utility.
+Note that we recommend using the Python interface for this task, as for example in the [filter visualization example](http://nbviewer.ipython.org/github/BVLC/caffe/blob/master/examples/filter_visualization.ipynb).
+
+Follow instructions for [installing Caffe](../../installation.html) and run `scripts/download_model_binary.py models/bvlc_reference_caffenet` from caffe root directory.
+If you need detailed information about the tools below, please consult their source code, in which additional documentation is usually provided.
+
+Select data to run on
+---------------------
+
+We'll make a temporary folder to store things into.
+
+    mkdir examples/_temp
+
+Generate a list of the files to process.
+We're going to use the images that ship with caffe.
+
+    find `pwd`/examples/images -type f -exec echo {} \; > examples/_temp/temp.txt
+
+The `ImageDataLayer` we'll use expects labels after each filenames, so let's add a 0 to the end of each line
+
+    sed "s/$/ 0/" examples/_temp/temp.txt > examples/_temp/file_list.txt
+
+Define the Feature Extraction Network Architecture
+--------------------------------------------------
+
+In practice, subtracting the mean image from a dataset significantly improves classification accuracies.
+Download the mean image of the ILSVRC dataset.
+
+    ./data/ilsvrc12/get_ilsvrc_aux.sh
+
+We will use `data/ilsvrc212/imagenet_mean.binaryproto` in the network definition prototxt.
+
+Let's copy and modify the network definition.
+We'll be using the `ImageDataLayer`, which will load and resize images for us.
+
+    cp examples/feature_extraction/imagenet_val.prototxt examples/_temp
+
+Extract Features
+----------------
+
+Now everything necessary is in place.
+
+    ./build/tools/extract_features.bin models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel examples/_temp/imagenet_val.prototxt fc7 examples/_temp/features 10 lmdb
+
+The name of feature blob that you extract is `fc7`, which represents the highest level feature of the reference model.
+We can use any other layer, as well, such as `conv5` or `pool3`.
+
+The last parameter above is the number of data mini-batches.
+
+The features are stored to LevelDB `examples/_temp/features`, ready for access by some other code.
+
+If you meet with the error "Check failed: status.ok() Failed to open leveldb examples/_temp/features", it is because the directory examples/_temp/features has been created the last time you run the command. Remove it and run again.
+
+    rm -rf examples/_temp/features/
+
+If you'd like to use the Python wrapper for extracting features, check out the [layer visualization notebook](http://nbviewer.ipython.org/github/BVLC/caffe/blob/master/examples/filter_visualization.ipynb).
+
+Clean Up
+--------
+
+Let's remove the temporary directory now.
+
+    rm -r examples/_temp
diff --git a/examples/finetune_flickr_style/assemble_data.py b/examples/finetune_flickr_style/assemble_data.py
new file mode 100755
index 0000000..09bfa26
--- /dev/null
+++ b/examples/finetune_flickr_style/assemble_data.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python
+"""
+Form a subset of the Flickr Style data, download images to dirname, and write
+Caffe ImagesDataLayer training file.
+"""
+import os
+import urllib
+import hashlib
+import argparse
+import numpy as np
+import pandas as pd
+from skimage import io
+import multiprocessing
+
+# Flickr returns a special image if the request is unavailable.
+MISSING_IMAGE_SHA1 = '6a92790b1c2a301c6e7ddef645dca1f53ea97ac2'
+
+example_dirname = os.path.abspath(os.path.dirname(__file__))
+caffe_dirname = os.path.abspath(os.path.join(example_dirname, '../..'))
+training_dirname = os.path.join(caffe_dirname, 'data/flickr_style')
+
+
+def download_image(args_tuple):
+    "For use with multiprocessing map. Returns filename on fail."
+    try:
+        url, filename = args_tuple
+        if not os.path.exists(filename):
+            urllib.urlretrieve(url, filename)
+        with open(filename) as f:
+            assert hashlib.sha1(f.read()).hexdigest() != MISSING_IMAGE_SHA1
+        test_read_image = io.imread(filename)
+        return True
+    except KeyboardInterrupt:
+        raise Exception()  # multiprocessing doesn't catch keyboard exceptions
+    except:
+        return False
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(
+        description='Download a subset of Flickr Style to a directory')
+    parser.add_argument(
+        '-s', '--seed', type=int, default=0,
+        help="random seed")
+    parser.add_argument(
+        '-i', '--images', type=int, default=-1,
+        help="number of images to use (-1 for all [default])",
+    )
+    parser.add_argument(
+        '-w', '--workers', type=int, default=-1,
+        help="num workers used to download images. -x uses (all - x) cores [-1 default]."
+    )
+    parser.add_argument(
+        '-l', '--labels', type=int, default=0,
+        help="if set to a positive value, only sample images from the first number of labels."
+    )
+
+    args = parser.parse_args()
+    np.random.seed(args.seed)
+
+    # Read data, shuffle order, and subsample.
+    csv_filename = os.path.join(example_dirname, 'flickr_style.csv.gz')
+    df = pd.read_csv(csv_filename, index_col=0, compression='gzip')
+    df = df.iloc[np.random.permutation(df.shape[0])]
+    if args.labels > 0:
+        df = df.loc[df['label'] < args.labels]
+    if args.images > 0 and args.images < df.shape[0]:
+        df = df.iloc[:args.images]
+
+    # Make directory for images and get local filenames.
+    if training_dirname is None:
+        training_dirname = os.path.join(caffe_dirname, 'data/flickr_style')
+    images_dirname = os.path.join(training_dirname, 'images')
+    if not os.path.exists(images_dirname):
+        os.makedirs(images_dirname)
+    df['image_filename'] = [
+        os.path.join(images_dirname, _.split('/')[-1]) for _ in df['image_url']
+    ]
+
+    # Download images.
+    num_workers = args.workers
+    if num_workers <= 0:
+        num_workers = multiprocessing.cpu_count() + num_workers
+    print('Downloading {} images with {} workers...'.format(
+        df.shape[0], num_workers))
+    pool = multiprocessing.Pool(processes=num_workers)
+    map_args = zip(df['image_url'], df['image_filename'])
+    results = pool.map(download_image, map_args)
+
+    # Only keep rows with valid images, and write out training file lists.
+    df = df[results]
+    for split in ['train', 'test']:
+        split_df = df[df['_split'] == split]
+        filename = os.path.join(training_dirname, '{}.txt'.format(split))
+        split_df[['image_filename', 'label']].to_csv(
+            filename, sep=' ', header=None, index=None)
+    print('Writing train/val for {} successfully downloaded images.'.format(
+        df.shape[0]))
diff --git a/examples/finetune_flickr_style/flickr_style.csv.gz b/examples/finetune_flickr_style/flickr_style.csv.gz
new file mode 100644
index 0000000..5a84f88
Binary files /dev/null and b/examples/finetune_flickr_style/flickr_style.csv.gz differ
diff --git a/examples/finetune_flickr_style/readme.md b/examples/finetune_flickr_style/readme.md
new file mode 100644
index 0000000..ecb9d3d
--- /dev/null
+++ b/examples/finetune_flickr_style/readme.md
@@ -0,0 +1,165 @@
+---
+title: Fine-tuning for style recognition
+description: Fine-tune the ImageNet-trained CaffeNet on the "Flickr Style" dataset.
+category: example
+include_in_docs: true
+priority: 5
+---
+
+# Fine-tuning CaffeNet for Style Recognition on "Flickr Style" Data
+
+Fine-tuning takes an already learned model, adapts the architecture, and resumes training from the already learned model weights.
+Let's fine-tune the BVLC-distributed CaffeNet model on a different dataset, [Flickr Style](http://sergeykarayev.com/files/1311.3715v3.pdf), to predict image style instead of object category.
+
+## Explanation
+
+The Flickr-sourced images of the Style dataset are visually very similar to the ImageNet dataset, on which the `bvlc_reference_caffenet` was trained.
+Since that model works well for object category classification, we'd like to use it architecture for our style classifier.
+We also only have 80,000 images to train on, so we'd like to start with the parameters learned on the 1,000,000 ImageNet images, and fine-tune as needed.
+If we give provide the `weights` argument to the `caffe train` command, the pretrained weights will be loaded into our model, matching layers by name.
+
+Because we are predicting 20 classes instead of a 1,000, we do need to change the last layer in the model.
+Therefore, we change the name of the last layer from `fc8` to `fc8_flickr` in our prototxt.
+Since there is no layer named that in the `bvlc_reference_caffenet`, that layer will begin training with random weights.
+
+We will also decrease the overall learning rate `base_lr` in the solver prototxt, but boost the `blobs_lr` on the newly introduced layer.
+The idea is to have the rest of the model change very slowly with new data, but let the new layer learn fast.
+Additionally, we set `stepsize` in the solver to a lower value than if we were training from scratch, since we're virtually far along in training and therefore want the learning rate to go down faster.
+Note that we could also entirely prevent fine-tuning of all layers other than `fc8_flickr` by setting their `blobs_lr` to 0.
+
+## Procedure
+
+All steps are to be done from the caffe root directory.
+
+The dataset is distributed as a list of URLs with corresponding labels.
+Using a script, we will download a small subset of the data and split it into train and val sets.
+
+    caffe % ./examples/finetune_flickr_style/assemble_data.py -h
+    usage: assemble_data.py [-h] [-s SEED] [-i IMAGES] [-w WORKERS]
+
+    Download a subset of Flickr Style to a directory
+
+    optional arguments:
+      -h, --help            show this help message and exit
+      -s SEED, --seed SEED  random seed
+      -i IMAGES, --images IMAGES
+                            number of images to use (-1 for all)
+      -w WORKERS, --workers WORKERS
+                            num workers used to download images. -x uses (all - x)
+                            cores.
+
+    caffe % python examples/finetune_flickr_style/assemble_data.py --workers=-1 --images=2000 --seed 831486
+    Downloading 2000 images with 7 workers...
+    Writing train/val for 1939 successfully downloaded images.
+
+This script downloads images and writes train/val file lists into `data/flickr_style`.
+The prototxts in this example assume this, and also assume the presence of the ImageNet mean file (run `get_ilsvrc_aux.sh` from `data/ilsvrc12` to obtain this if you haven't yet).
+
+We'll also need the ImageNet-trained model, which you can obtain by running `./scripts/download_model_binary.py models/bvlc_reference_caffenet`.
+
+Now we can train! (You can fine-tune in CPU mode by leaving out the `-gpu` flag.)
+
+    caffe % ./build/tools/caffe train -solver models/finetune_flickr_style/solver.prototxt -weights models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel -gpu 0
+
+    [...]
+
+    I0828 22:10:04.025378  9718 solver.cpp:46] Solver scaffolding done.
+    I0828 22:10:04.025388  9718 caffe.cpp:95] Use GPU with device ID 0
+    I0828 22:10:04.192004  9718 caffe.cpp:107] Finetuning from models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel
+
+    [...]
+
+    I0828 22:17:48.338963 11510 solver.cpp:165] Solving FlickrStyleCaffeNet
+    I0828 22:17:48.339010 11510 solver.cpp:251] Iteration 0, Testing net (#0)
+    I0828 22:18:14.313817 11510 solver.cpp:302]     Test net output #0: accuracy = 0.0308
+    I0828 22:18:14.476822 11510 solver.cpp:195] Iteration 0, loss = 3.78589
+    I0828 22:18:14.476878 11510 solver.cpp:397] Iteration 0, lr = 0.001
+    I0828 22:18:19.700408 11510 solver.cpp:195] Iteration 20, loss = 3.25728
+    I0828 22:18:19.700461 11510 solver.cpp:397] Iteration 20, lr = 0.001
+    I0828 22:18:24.924685 11510 solver.cpp:195] Iteration 40, loss = 2.18531
+    I0828 22:18:24.924741 11510 solver.cpp:397] Iteration 40, lr = 0.001
+    I0828 22:18:30.114858 11510 solver.cpp:195] Iteration 60, loss = 2.4915
+    I0828 22:18:30.114910 11510 solver.cpp:397] Iteration 60, lr = 0.001
+    I0828 22:18:35.328071 11510 solver.cpp:195] Iteration 80, loss = 2.04539
+    I0828 22:18:35.328127 11510 solver.cpp:397] Iteration 80, lr = 0.001
+    I0828 22:18:40.588317 11510 solver.cpp:195] Iteration 100, loss = 2.1924
+    I0828 22:18:40.588373 11510 solver.cpp:397] Iteration 100, lr = 0.001
+    I0828 22:18:46.171576 11510 solver.cpp:195] Iteration 120, loss = 2.25107
+    I0828 22:18:46.171669 11510 solver.cpp:397] Iteration 120, lr = 0.001
+    I0828 22:18:51.757809 11510 solver.cpp:195] Iteration 140, loss = 1.355
+    I0828 22:18:51.757863 11510 solver.cpp:397] Iteration 140, lr = 0.001
+    I0828 22:18:57.345080 11510 solver.cpp:195] Iteration 160, loss = 1.40815
+    I0828 22:18:57.345135 11510 solver.cpp:397] Iteration 160, lr = 0.001
+    I0828 22:19:02.928794 11510 solver.cpp:195] Iteration 180, loss = 1.6558
+    I0828 22:19:02.928850 11510 solver.cpp:397] Iteration 180, lr = 0.001
+    I0828 22:19:08.514497 11510 solver.cpp:195] Iteration 200, loss = 0.88126
+    I0828 22:19:08.514552 11510 solver.cpp:397] Iteration 200, lr = 0.001
+
+    [...]
+
+    I0828 22:22:40.789010 11510 solver.cpp:195] Iteration 960, loss = 0.112586
+    I0828 22:22:40.789175 11510 solver.cpp:397] Iteration 960, lr = 0.001
+    I0828 22:22:46.376626 11510 solver.cpp:195] Iteration 980, loss = 0.0959077
+    I0828 22:22:46.376682 11510 solver.cpp:397] Iteration 980, lr = 0.001
+    I0828 22:22:51.687258 11510 solver.cpp:251] Iteration 1000, Testing net (#0)
+    I0828 22:23:17.438894 11510 solver.cpp:302]     Test net output #0: accuracy = 0.2356
+
+Note how rapidly the loss went down. Although the 23.5% accuracy is only modest, it was achieved in only 1000, and evidence that the model is starting to learn quickly and well.
+Once the model is fully fine-tuned on the whole training set over 100,000 iterations the final validation accuracy is 39.16%.
+This takes ~7 hours in Caffe on a K40 GPU.
+
+For comparison, here is how the loss goes down when we do not start with a pre-trained model:
+
+    I0828 22:24:18.624004 12919 solver.cpp:165] Solving FlickrStyleCaffeNet
+    I0828 22:24:18.624099 12919 solver.cpp:251] Iteration 0, Testing net (#0)
+    I0828 22:24:44.520992 12919 solver.cpp:302]     Test net output #0: accuracy = 0.0366
+    I0828 22:24:44.676905 12919 solver.cpp:195] Iteration 0, loss = 3.47942
+    I0828 22:24:44.677120 12919 solver.cpp:397] Iteration 0, lr = 0.001
+    I0828 22:24:50.152454 12919 solver.cpp:195] Iteration 20, loss = 2.99694
+    I0828 22:24:50.152509 12919 solver.cpp:397] Iteration 20, lr = 0.001
+    I0828 22:24:55.736256 12919 solver.cpp:195] Iteration 40, loss = 3.0498
+    I0828 22:24:55.736311 12919 solver.cpp:397] Iteration 40, lr = 0.001
+    I0828 22:25:01.316514 12919 solver.cpp:195] Iteration 60, loss = 2.99549
+    I0828 22:25:01.316567 12919 solver.cpp:397] Iteration 60, lr = 0.001
+    I0828 22:25:06.899554 12919 solver.cpp:195] Iteration 80, loss = 3.00573
+    I0828 22:25:06.899610 12919 solver.cpp:397] Iteration 80, lr = 0.001
+    I0828 22:25:12.484624 12919 solver.cpp:195] Iteration 100, loss = 2.99094
+    I0828 22:25:12.484678 12919 solver.cpp:397] Iteration 100, lr = 0.001
+    I0828 22:25:18.069056 12919 solver.cpp:195] Iteration 120, loss = 3.01616
+    I0828 22:25:18.069149 12919 solver.cpp:397] Iteration 120, lr = 0.001
+    I0828 22:25:23.650928 12919 solver.cpp:195] Iteration 140, loss = 2.98786
+    I0828 22:25:23.650984 12919 solver.cpp:397] Iteration 140, lr = 0.001
+    I0828 22:25:29.235535 12919 solver.cpp:195] Iteration 160, loss = 3.00724
+    I0828 22:25:29.235589 12919 solver.cpp:397] Iteration 160, lr = 0.001
+    I0828 22:25:34.816898 12919 solver.cpp:195] Iteration 180, loss = 3.00099
+    I0828 22:25:34.816953 12919 solver.cpp:397] Iteration 180, lr = 0.001
+    I0828 22:25:40.396656 12919 solver.cpp:195] Iteration 200, loss = 2.99848
+    I0828 22:25:40.396711 12919 solver.cpp:397] Iteration 200, lr = 0.001
+
+    [...]
+
+    I0828 22:29:12.539094 12919 solver.cpp:195] Iteration 960, loss = 2.99203
+    I0828 22:29:12.539258 12919 solver.cpp:397] Iteration 960, lr = 0.001
+    I0828 22:29:18.123092 12919 solver.cpp:195] Iteration 980, loss = 2.99345
+    I0828 22:29:18.123147 12919 solver.cpp:397] Iteration 980, lr = 0.001
+    I0828 22:29:23.432059 12919 solver.cpp:251] Iteration 1000, Testing net (#0)
+    I0828 22:29:49.409044 12919 solver.cpp:302]     Test net output #0: accuracy = 0.0572
+
+This model is only beginning to learn.
+
+Fine-tuning can be feasible when training from scratch would not be for lack of time or data.
+Even in CPU mode each pass through the training set takes ~100 s. GPU fine-tuning is of course faster still and can learn a useful model in minutes or hours instead of days or weeks.
+Furthermore, note that the model has only trained on < 2,000 instances. Transfer learning a new task like style recognition from the ImageNet pretraining can require much less data than training from scratch.
+
+Now try fine-tuning to your own tasks and data!
+
+## Trained model
+
+We provide a model trained on all 80K images, with final accuracy of 39%.
+Simply do `./scripts/download_model_binary.py models/finetune_flickr_style` to obtain it.
+
+## License
+
+The Flickr Style dataset as distributed here contains only URLs to images.
+Some of the images may have copyright.
+Training a category-recognition model for research/non-commercial use may constitute fair use of this data, but the result should not be used for commercial purposes.
diff --git a/examples/finetune_flickr_style/style_names.txt b/examples/finetune_flickr_style/style_names.txt
new file mode 100644
index 0000000..73090c9
--- /dev/null
+++ b/examples/finetune_flickr_style/style_names.txt
@@ -0,0 +1,20 @@
+Detailed
+Pastel
+Melancholy
+Noir
+HDR
+Vintage
+Long Exposure
+Horror
+Sunny
+Bright
+Hazy
+Bokeh
+Serene
+Texture
+Ethereal
+Macro
+Depth of Field
+Geometric Composition
+Minimal
+Romantic
diff --git a/examples/finetune_pascal_detection/pascal_finetune_solver.prototxt b/examples/finetune_pascal_detection/pascal_finetune_solver.prototxt
new file mode 100644
index 0000000..829b200
--- /dev/null
+++ b/examples/finetune_pascal_detection/pascal_finetune_solver.prototxt
@@ -0,0 +1,13 @@
+net: "examples/finetune_pascal_detection/pascal_finetune_trainval_test.prototxt"
+test_iter: 100
+test_interval: 1000
+base_lr: 0.001
+lr_policy: "step"
+gamma: 0.1
+stepsize: 20000
+display: 20
+max_iter: 100000
+momentum: 0.9
+weight_decay: 0.0005
+snapshot: 10000
+snapshot_prefix: "examples/finetune_pascal_detection/pascal_det_finetune"
diff --git a/examples/finetune_pascal_detection/pascal_finetune_trainval_test.prototxt b/examples/finetune_pascal_detection/pascal_finetune_trainval_test.prototxt
new file mode 100644
index 0000000..9dd2120
--- /dev/null
+++ b/examples/finetune_pascal_detection/pascal_finetune_trainval_test.prototxt
@@ -0,0 +1,391 @@
+name: "CaffeNet"
+layer {
+  name: "data"
+  type: "WindowData"
+  top: "data"
+  top: "label"
+  include {
+    phase: TRAIN
+  }
+  transform_param {
+    mirror: true
+    crop_size: 227
+    mean_file: "data/ilsvrc12/imagenet_mean.binaryproto"
+  }
+  window_data_param {
+    source: "examples/finetune_pascal_detection/window_file_2007_trainval.txt"
+    batch_size: 128
+    fg_threshold: 0.5
+    bg_threshold: 0.5
+    fg_fraction: 0.25
+    context_pad: 16
+    crop_mode: "warp"
+  }
+}
+layer {
+  name: "data"
+  type: "WindowData"
+  top: "data"
+  top: "label"
+  include {
+    phase: TEST
+  }
+  transform_param {
+    mirror: true
+    crop_size: 227
+    mean_file: "data/ilsvrc12/imagenet_mean.binaryproto"
+  }
+  window_data_param {
+    source: "examples/finetune_pascal_detection/window_file_2007_test.txt"
+    batch_size: 128
+    fg_threshold: 0.5
+    bg_threshold: 0.5
+    fg_fraction: 0.25
+    context_pad: 16
+    crop_mode: "warp"
+  }
+}
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 96
+    kernel_size: 11
+    stride: 4
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "conv1"
+  top: "conv1"
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm1"
+  type: "LRN"
+  bottom: "pool1"
+  top: "norm1"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "norm1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 2
+    kernel_size: 5
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm2"
+  type: "LRN"
+  bottom: "pool2"
+  top: "norm2"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "norm2"
+  top: "conv3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "conv4"
+  type: "Convolution"
+  bottom: "conv3"
+  top: "conv4"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu4"
+  type: "ReLU"
+  bottom: "conv4"
+  top: "conv4"
+}
+layer {
+  name: "conv5"
+  type: "Convolution"
+  bottom: "conv4"
+  top: "conv5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 1
+    kernel_size: 3
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu5"
+  type: "ReLU"
+  bottom: "conv5"
+  top: "conv5"
+}
+layer {
+  name: "pool5"
+  type: "Pooling"
+  bottom: "conv5"
+  top: "pool5"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "fc6"
+  type: "InnerProduct"
+  bottom: "pool5"
+  top: "fc6"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 4096
+    weight_filler {
+      type: "gaussian"
+      std: 0.005
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu6"
+  type: "ReLU"
+  bottom: "fc6"
+  top: "fc6"
+}
+layer {
+  name: "drop6"
+  type: "Dropout"
+  bottom: "fc6"
+  top: "fc6"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc7"
+  type: "InnerProduct"
+  bottom: "fc6"
+  top: "fc7"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 4096
+    weight_filler {
+      type: "gaussian"
+      std: 0.005
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu7"
+  type: "ReLU"
+  bottom: "fc7"
+  top: "fc7"
+}
+layer {
+  name: "drop7"
+  type: "Dropout"
+  bottom: "fc7"
+  top: "fc7"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc8_pascal"
+  type: "InnerProduct"
+  bottom: "fc7"
+  top: "fc8_pascal"
+  param {
+    lr_mult: 10
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 20
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 21
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "loss"
+  type: "SoftmaxWithLoss"
+  bottom: "fc8_pascal"
+  bottom: "label"
+}
+layer {
+  name: "accuracy"
+  type: "Accuracy"
+  bottom: "fc8_pascal"
+  bottom: "label"
+  top: "accuracy"
+  include {
+    phase: TEST
+  }
+}
diff --git a/examples/hdf5_classification/nonlinear_auto_test.prototxt b/examples/hdf5_classification/nonlinear_auto_test.prototxt
new file mode 100644
index 0000000..53eda6e
--- /dev/null
+++ b/examples/hdf5_classification/nonlinear_auto_test.prototxt
@@ -0,0 +1,54 @@
+layer {
+  name: "data"
+  type: "HDF5Data"
+  top: "data"
+  top: "label"
+  hdf5_data_param {
+    source: "examples/hdf5_classification/data/test.txt"
+    batch_size: 10
+  }
+}
+layer {
+  name: "ip1"
+  type: "InnerProduct"
+  bottom: "data"
+  top: "ip1"
+  inner_product_param {
+    num_output: 40
+    weight_filler {
+      type: "xavier"
+    }
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "ip1"
+  top: "ip1"
+}
+layer {
+  name: "ip2"
+  type: "InnerProduct"
+  bottom: "ip1"
+  top: "ip2"
+  inner_product_param {
+    num_output: 2
+    weight_filler {
+      type: "xavier"
+    }
+  }
+}
+layer {
+  name: "accuracy"
+  type: "Accuracy"
+  bottom: "ip2"
+  bottom: "label"
+  top: "accuracy"
+}
+layer {
+  name: "loss"
+  type: "SoftmaxWithLoss"
+  bottom: "ip2"
+  bottom: "label"
+  top: "loss"
+}
diff --git a/examples/hdf5_classification/nonlinear_auto_train.prototxt b/examples/hdf5_classification/nonlinear_auto_train.prototxt
new file mode 100644
index 0000000..fc0688f
--- /dev/null
+++ b/examples/hdf5_classification/nonlinear_auto_train.prototxt
@@ -0,0 +1,54 @@
+layer {
+  name: "data"
+  type: "HDF5Data"
+  top: "data"
+  top: "label"
+  hdf5_data_param {
+    source: "examples/hdf5_classification/data/train.txt"
+    batch_size: 10
+  }
+}
+layer {
+  name: "ip1"
+  type: "InnerProduct"
+  bottom: "data"
+  top: "ip1"
+  inner_product_param {
+    num_output: 40
+    weight_filler {
+      type: "xavier"
+    }
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "ip1"
+  top: "ip1"
+}
+layer {
+  name: "ip2"
+  type: "InnerProduct"
+  bottom: "ip1"
+  top: "ip2"
+  inner_product_param {
+    num_output: 2
+    weight_filler {
+      type: "xavier"
+    }
+  }
+}
+layer {
+  name: "accuracy"
+  type: "Accuracy"
+  bottom: "ip2"
+  bottom: "label"
+  top: "accuracy"
+}
+layer {
+  name: "loss"
+  type: "SoftmaxWithLoss"
+  bottom: "ip2"
+  bottom: "label"
+  top: "loss"
+}
diff --git a/examples/hdf5_classification/nonlinear_solver.prototxt b/examples/hdf5_classification/nonlinear_solver.prototxt
new file mode 100644
index 0000000..b4aacf6
--- /dev/null
+++ b/examples/hdf5_classification/nonlinear_solver.prototxt
@@ -0,0 +1,15 @@
+train_net: "examples/hdf5_classification/nonlinear_auto_train.prototxt"
+test_net: "examples/hdf5_classification/nonlinear_auto_test.prototxt"
+test_iter: 250
+test_interval: 1000
+base_lr: 0.01
+lr_policy: "step"
+gamma: 0.1
+stepsize: 5000
+display: 1000
+max_iter: 10000
+momentum: 0.9
+weight_decay: 0.0005
+snapshot: 10000
+snapshot_prefix: "examples/hdf5_classification/data/train"
+solver_mode: CPU
diff --git a/examples/hdf5_classification/nonlinear_train_val.prototxt b/examples/hdf5_classification/nonlinear_train_val.prototxt
new file mode 100644
index 0000000..8f7ef04
--- /dev/null
+++ b/examples/hdf5_classification/nonlinear_train_val.prototxt
@@ -0,0 +1,98 @@
+name: "LogisticRegressionNet"
+layer {
+  name: "data"
+  type: "HDF5Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TRAIN
+  }
+  hdf5_data_param {
+    source: "examples/hdf5_classification/data/train.txt"
+    batch_size: 10
+  }
+}
+layer {
+  name: "data"
+  type: "HDF5Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TEST
+  }
+  hdf5_data_param {
+    source: "examples/hdf5_classification/data/test.txt"
+    batch_size: 10
+  }
+}
+layer {
+  name: "fc1"
+  type: "InnerProduct"
+  bottom: "data"
+  top: "fc1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 40
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "fc1"
+  top: "fc1"
+}
+layer {
+  name: "fc2"
+  type: "InnerProduct"
+  bottom: "fc1"
+  top: "fc2"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 2
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "loss"
+  type: "SoftmaxWithLoss"
+  bottom: "fc2"
+  bottom: "label"
+  top: "loss"
+}
+layer {
+  name: "accuracy"
+  type: "Accuracy"
+  bottom: "fc2"
+  bottom: "label"
+  top: "accuracy"
+  include {
+    phase: TEST
+  }
+}
diff --git a/examples/hdf5_classification/solver.prototxt b/examples/hdf5_classification/solver.prototxt
new file mode 100644
index 0000000..8587b5a
--- /dev/null
+++ b/examples/hdf5_classification/solver.prototxt
@@ -0,0 +1,15 @@
+train_net: "examples/hdf5_classification/logreg_auto_train.prototxt"
+test_net: "examples/hdf5_classification/logreg_auto_test.prototxt"
+test_iter: 250
+test_interval: 1000
+base_lr: 0.01
+lr_policy: "step"
+gamma: 0.1
+stepsize: 5000
+display: 1000
+max_iter: 10000
+momentum: 0.9
+weight_decay: 0.0005
+snapshot: 10000
+snapshot_prefix: "examples/hdf5_classification/data/train"
+solver_mode: CPU
diff --git a/examples/hdf5_classification/train_val.prototxt b/examples/hdf5_classification/train_val.prototxt
new file mode 100644
index 0000000..13ddf47
--- /dev/null
+++ b/examples/hdf5_classification/train_val.prototxt
@@ -0,0 +1,68 @@
+name: "LogisticRegressionNet"
+layer {
+  name: "data"
+  type: "HDF5Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TRAIN
+  }
+  hdf5_data_param {
+    source: "examples/hdf5_classification/data/train.txt"
+    batch_size: 10
+  }
+}
+layer {
+  name: "data"
+  type: "HDF5Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TEST
+  }
+  hdf5_data_param {
+    source: "examples/hdf5_classification/data/test.txt"
+    batch_size: 10
+  }
+}
+layer {
+  name: "fc1"
+  type: "InnerProduct"
+  bottom: "data"
+  top: "fc1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 2
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "loss"
+  type: "SoftmaxWithLoss"
+  bottom: "fc1"
+  bottom: "label"
+  top: "loss"
+}
+layer {
+  name: "accuracy"
+  type: "Accuracy"
+  bottom: "fc1"
+  bottom: "label"
+  top: "accuracy"
+  include {
+    phase: TEST
+  }
+}
diff --git a/examples/imagenet/create_imagenet.sh b/examples/imagenet/create_imagenet.sh
new file mode 100755
index 0000000..e912ac4
--- /dev/null
+++ b/examples/imagenet/create_imagenet.sh
@@ -0,0 +1,57 @@
+#!/usr/bin/env sh
+# Create the imagenet lmdb inputs
+# N.B. set the path to the imagenet train + val data dirs
+
+EXAMPLE=examples/imagenet
+DATA=data/ilsvrc12
+TOOLS=build/tools
+
+TRAIN_DATA_ROOT=/path/to/imagenet/train/
+VAL_DATA_ROOT=/path/to/imagenet/val/
+
+# Set RESIZE=true to resize the images to 256x256. Leave as false if images have
+# already been resized using another tool.
+RESIZE=false
+if $RESIZE; then
+  RESIZE_HEIGHT=256
+  RESIZE_WIDTH=256
+else
+  RESIZE_HEIGHT=0
+  RESIZE_WIDTH=0
+fi
+
+if [ ! -d "$TRAIN_DATA_ROOT" ]; then
+  echo "Error: TRAIN_DATA_ROOT is not a path to a directory: $TRAIN_DATA_ROOT"
+  echo "Set the TRAIN_DATA_ROOT variable in create_imagenet.sh to the path" \
+       "where the ImageNet training data is stored."
+  exit 1
+fi
+
+if [ ! -d "$VAL_DATA_ROOT" ]; then
+  echo "Error: VAL_DATA_ROOT is not a path to a directory: $VAL_DATA_ROOT"
+  echo "Set the VAL_DATA_ROOT variable in create_imagenet.sh to the path" \
+       "where the ImageNet validation data is stored."
+  exit 1
+fi
+
+echo "Creating train lmdb..."
+
+GLOG_logtostderr=1 $TOOLS/convert_imageset \
+    --resize_height=$RESIZE_HEIGHT \
+    --resize_width=$RESIZE_WIDTH \
+    --shuffle \
+    $TRAIN_DATA_ROOT \
+    $DATA/train.txt \
+    $EXAMPLE/ilsvrc12_train_lmdb
+
+echo "Creating val lmdb..."
+
+GLOG_logtostderr=1 $TOOLS/convert_imageset \
+    --resize_height=$RESIZE_HEIGHT \
+    --resize_width=$RESIZE_WIDTH \
+    --shuffle \
+    $VAL_DATA_ROOT \
+    $DATA/val.txt \
+    $EXAMPLE/ilsvrc12_val_lmdb
+
+echo "Done."
diff --git a/examples/imagenet/make_imagenet_mean.sh b/examples/imagenet/make_imagenet_mean.sh
new file mode 100755
index 0000000..57f4376
--- /dev/null
+++ b/examples/imagenet/make_imagenet_mean.sh
@@ -0,0 +1,12 @@
+#!/usr/bin/env sh
+# Compute the mean image from the imagenet training lmdb
+# N.B. this is available in data/ilsvrc12
+
+EXAMPLE=examples/imagenet
+DATA=data/ilsvrc12
+TOOLS=build/tools
+
+$TOOLS/compute_image_mean $EXAMPLE/ilsvrc12_train_lmdb \
+  $DATA/imagenet_mean.binaryproto
+
+echo "Done."
diff --git a/examples/imagenet/readme.md b/examples/imagenet/readme.md
new file mode 100644
index 0000000..b1ebfaf
--- /dev/null
+++ b/examples/imagenet/readme.md
@@ -0,0 +1,105 @@
+---
+title: ImageNet tutorial
+description: Train and test "CaffeNet" on ImageNet data.
+category: example
+include_in_docs: true
+priority: 1
+---
+
+Brewing ImageNet
+================
+
+This guide is meant to get you ready to train your own model on your own data.
+If you just want an ImageNet-trained network, then note that since training takes a lot of energy and we hate global warming, we provide the CaffeNet model trained as described below in the [model zoo](/model_zoo.html).
+
+Data Preparation
+----------------
+
+*The guide specifies all paths and assumes all commands are executed from the root caffe directory.*
+
+*By "ImageNet" we here mean the ILSVRC12 challenge, but you can easily train on the whole of ImageNet as well, just with more disk space, and a little longer training time.*
+
+We assume that you already have downloaded the ImageNet training data and validation data, and they are stored on your disk like:
+
+    /path/to/imagenet/train/n01440764/n01440764_10026.JPEG
+    /path/to/imagenet/val/ILSVRC2012_val_00000001.JPEG
+
+You will first need to prepare some auxiliary data for training. This data can be downloaded by:
+
+    ./data/ilsvrc12/get_ilsvrc_aux.sh
+
+The training and validation input are described in `train.txt` and `val.txt` as text listing all the files and their labels. Note that we use a different indexing for labels than the ILSVRC devkit: we sort the synset names in their ASCII order, and then label them from 0 to 999. See `synset_words.txt` for the synset/name mapping.
+
+You may want to resize the images to 256x256 in advance. By default, we do not explicitly do this because in a cluster environment, one may benefit from resizing images in a parallel fashion, using mapreduce. For example, Yangqing used his lightweight [mincepie](https://github.com/Yangqing/mincepie) package. If you prefer things to be simpler, you can also use shell commands, something like:
+
+    for name in /path/to/imagenet/val/*.JPEG; do
+        convert -resize 256x256\! $name $name
+    done
+
+Take a look at `examples/imagenet/create_imagenet.sh`. Set the paths to the train and val dirs as needed, and set "RESIZE=true" to resize all images to 256x256 if you haven't resized the images in advance.
+Now simply create the leveldbs with `examples/imagenet/create_imagenet.sh`. Note that `examples/imagenet/ilsvrc12_train_leveldb` and `examples/imagenet/ilsvrc12_val_leveldb` should not exist before this execution. It will be created by the script. `GLOG_logtostderr=1` simply dumps more information for you to inspect, and you can safely ignore it.
+
+Compute Image Mean
+------------------
+
+The model requires us to subtract the image mean from each image, so we have to compute the mean. `tools/compute_image_mean.cpp` implements that - it is also a good example to familiarize yourself on how to manipulate the multiple components, such as protocol buffers, leveldbs, and logging, if you are not familiar with them. Anyway, the mean computation can be carried out as:
+
+    ./examples/imagenet/make_imagenet_mean.sh
+
+which will make `data/ilsvrc12/imagenet_mean.binaryproto`.
+
+Model Definition
+----------------
+
+We are going to describe a reference implementation for the approach first proposed by Krizhevsky, Sutskever, and Hinton in their [NIPS 2012 paper](http://books.nips.cc/papers/files/nips25/NIPS2012_0534.pdf).
+
+The network definition (`models/bvlc_reference_caffenet/train_val.prototxt`) follows the one in Krizhevsky et al.
+Note that if you deviated from file paths suggested in this guide, you'll need to adjust the relevant paths in the `.prototxt` files.
+
+If you look carefully at `models/bvlc_reference_caffenet/train_val.prototxt`, you will notice several `include` sections specifying either `phase: TRAIN` or `phase: TEST`. These sections allow us to define two closely related networks in one file: the network used for training and the network used for testing. These two networks are almost identical, sharing all layers except for those marked with `include { phase: TRAIN }` or `include { phase: TEST }`. In this case, only the input layer [...]
+
+**Input layer differences:** The training network's `data` input layer draws its data from `examples/imagenet/ilsvrc12_train_leveldb` and randomly mirrors the input image. The testing network's `data` layer takes data from `examples/imagenet/ilsvrc12_val_leveldb` and does not perform random mirroring.
+
+**Output layer differences:** Both networks output the `softmax_loss` layer, which in training is used to compute the loss function and to initialize the backpropagation, while in validation this loss is simply reported. The testing network also has a second output layer, `accuracy`, which is used to report the accuracy on the test set. In the process of training, the test network will occasionally be instantiated and tested on the test set, producing lines like `Test score #0: xxx` and  [...]
+
+We will also lay out a protocol buffer for running the solver. Let's make a few plans:
+
+* We will run in batches of 256, and run a total of 450,000 iterations (about 90 epochs).
+* For every 1,000 iterations, we test the learned net on the validation data.
+* We set the initial learning rate to 0.01, and decrease it every 100,000 iterations (about 20 epochs).
+* Information will be displayed every 20 iterations.
+* The network will be trained with momentum 0.9 and a weight decay of 0.0005.
+* For every 10,000 iterations, we will take a snapshot of the current status.
+
+Sound good? This is implemented in `models/bvlc_reference_caffenet/solver.prototxt`.
+
+Training ImageNet
+-----------------
+
+Ready? Let's train.
+
+    ./build/tools/caffe train --solver=models/bvlc_reference_caffenet/solver.prototxt
+
+Sit back and enjoy!
+
+On a K40 machine, every 20 iterations take about 26.5 seconds to run (while a on a K20 this takes 36 seconds), so effectively about 5.2 ms per image for the full forward-backward pass. About 2 ms of this is on forward, and the rest is backward. If you are interested in dissecting the computation time, you can run
+
+    ./build/tools/caffe time --model=models/bvlc_reference_caffenet/train_val.prototxt
+
+Resume Training?
+----------------
+
+We all experience times when the power goes out, or we feel like rewarding ourself a little by playing Battlefield (does anyone still remember Quake?). Since we are snapshotting intermediate results during training, we will be able to resume from snapshots. This can be done as easy as:
+
+    ./build/tools/caffe train --solver=models/bvlc_reference_caffenet/solver.prototxt --snapshot=models/bvlc_reference_caffenet/caffenet_train_iter_10000.solverstate
+
+where in the script `caffenet_train_iter_10000.solverstate` is the solver state snapshot that stores all necessary information to recover the exact solver state (including the parameters, momentum history, etc).
+
+Parting Words
+-------------
+
+Hope you liked this recipe!
+Many researchers have gone further since the ILSVRC 2012 challenge, changing the network architecture and/or fine-tuning the various parameters in the network to address new data and tasks.
+**Caffe lets you explore different network choices more easily by simply writing different prototxt files** - isn't that exciting?
+
+And since now you have a trained network, check out how to use it with the Python interface for [classifying ImageNet](http://nbviewer.ipython.org/github/BVLC/caffe/blob/master/examples/classification.ipynb).
diff --git a/examples/imagenet/resume_training.sh b/examples/imagenet/resume_training.sh
new file mode 100755
index 0000000..d1febff
--- /dev/null
+++ b/examples/imagenet/resume_training.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env sh
+
+./build/tools/caffe train \
+    --solver=models/bvlc_reference_caffenet/solver.prototxt \
+    --snapshot=models/bvlc_reference_caffenet/caffenet_train_10000.solverstate
diff --git a/examples/imagenet/train_caffenet.sh b/examples/imagenet/train_caffenet.sh
new file mode 100755
index 0000000..94558ec
--- /dev/null
+++ b/examples/imagenet/train_caffenet.sh
@@ -0,0 +1,4 @@
+#!/usr/bin/env sh
+
+./build/tools/caffe train \
+    --solver=models/bvlc_reference_caffenet/solver.prototxt
diff --git a/examples/images/cat.jpg b/examples/images/cat.jpg
new file mode 100644
index 0000000..b4efc6c
Binary files /dev/null and b/examples/images/cat.jpg differ
diff --git a/examples/images/cat_gray.jpg b/examples/images/cat_gray.jpg
new file mode 100644
index 0000000..43c5ce3
Binary files /dev/null and b/examples/images/cat_gray.jpg differ
diff --git a/examples/images/fish-bike.jpg b/examples/images/fish-bike.jpg
new file mode 100644
index 0000000..39d9bd4
Binary files /dev/null and b/examples/images/fish-bike.jpg differ
diff --git a/examples/mnist/convert_mnist_data.cpp b/examples/mnist/convert_mnist_data.cpp
new file mode 100644
index 0000000..2749e45
--- /dev/null
+++ b/examples/mnist/convert_mnist_data.cpp
@@ -0,0 +1,198 @@
+// This script converts the MNIST dataset to a lmdb (default) or
+// leveldb (--backend=leveldb) format used by caffe to load data.
+// Usage:
+//    convert_mnist_data [FLAGS] input_image_file input_label_file
+//                        output_db_file
+// The MNIST dataset could be downloaded at
+//    http://yann.lecun.com/exdb/mnist/
+
+#include <gflags/gflags.h>
+#include <glog/logging.h>
+#include <google/protobuf/text_format.h>
+#include <leveldb/db.h>
+#include <leveldb/write_batch.h>
+#include <lmdb.h>
+#include <stdint.h>
+#include <sys/stat.h>
+
+#include <fstream>  // NOLINT(readability/streams)
+#include <string>
+
+#include "caffe/proto/caffe.pb.h"
+
+using namespace caffe;  // NOLINT(build/namespaces)
+using std::string;
+
+DEFINE_string(backend, "lmdb", "The backend for storing the result");
+
+uint32_t swap_endian(uint32_t val) {
+    val = ((val << 8) & 0xFF00FF00) | ((val >> 8) & 0xFF00FF);
+    return (val << 16) | (val >> 16);
+}
+
+void convert_dataset(const char* image_filename, const char* label_filename,
+        const char* db_path, const string& db_backend) {
+  // Open files
+  std::ifstream image_file(image_filename, std::ios::in | std::ios::binary);
+  std::ifstream label_file(label_filename, std::ios::in | std::ios::binary);
+  CHECK(image_file) << "Unable to open file " << image_filename;
+  CHECK(label_file) << "Unable to open file " << label_filename;
+  // Read the magic and the meta data
+  uint32_t magic;
+  uint32_t num_items;
+  uint32_t num_labels;
+  uint32_t rows;
+  uint32_t cols;
+
+  image_file.read(reinterpret_cast<char*>(&magic), 4);
+  magic = swap_endian(magic);
+  CHECK_EQ(magic, 2051) << "Incorrect image file magic.";
+  label_file.read(reinterpret_cast<char*>(&magic), 4);
+  magic = swap_endian(magic);
+  CHECK_EQ(magic, 2049) << "Incorrect label file magic.";
+  image_file.read(reinterpret_cast<char*>(&num_items), 4);
+  num_items = swap_endian(num_items);
+  label_file.read(reinterpret_cast<char*>(&num_labels), 4);
+  num_labels = swap_endian(num_labels);
+  CHECK_EQ(num_items, num_labels);
+  image_file.read(reinterpret_cast<char*>(&rows), 4);
+  rows = swap_endian(rows);
+  image_file.read(reinterpret_cast<char*>(&cols), 4);
+  cols = swap_endian(cols);
+
+  // lmdb
+  MDB_env *mdb_env;
+  MDB_dbi mdb_dbi;
+  MDB_val mdb_key, mdb_data;
+  MDB_txn *mdb_txn;
+  // leveldb
+  leveldb::DB* db;
+  leveldb::Options options;
+  options.error_if_exists = true;
+  options.create_if_missing = true;
+  options.write_buffer_size = 268435456;
+  leveldb::WriteBatch* batch = NULL;
+
+  // Open db
+  if (db_backend == "leveldb") {  // leveldb
+    LOG(INFO) << "Opening leveldb " << db_path;
+    leveldb::Status status = leveldb::DB::Open(
+        options, db_path, &db);
+    CHECK(status.ok()) << "Failed to open leveldb " << db_path
+        << ". Is it already existing?";
+    batch = new leveldb::WriteBatch();
+  } else if (db_backend == "lmdb") {  // lmdb
+    LOG(INFO) << "Opening lmdb " << db_path;
+    CHECK_EQ(mkdir(db_path, 0744), 0)
+        << "mkdir " << db_path << "failed";
+    CHECK_EQ(mdb_env_create(&mdb_env), MDB_SUCCESS) << "mdb_env_create failed";
+    CHECK_EQ(mdb_env_set_mapsize(mdb_env, 1099511627776), MDB_SUCCESS)  // 1TB
+        << "mdb_env_set_mapsize failed";
+    CHECK_EQ(mdb_env_open(mdb_env, db_path, 0, 0664), MDB_SUCCESS)
+        << "mdb_env_open failed";
+    CHECK_EQ(mdb_txn_begin(mdb_env, NULL, 0, &mdb_txn), MDB_SUCCESS)
+        << "mdb_txn_begin failed";
+    CHECK_EQ(mdb_open(mdb_txn, NULL, 0, &mdb_dbi), MDB_SUCCESS)
+        << "mdb_open failed. Does the lmdb already exist? ";
+  } else {
+    LOG(FATAL) << "Unknown db backend " << db_backend;
+  }
+
+  // Storing to db
+  char label;
+  char* pixels = new char[rows * cols];
+  int count = 0;
+  const int kMaxKeyLength = 10;
+  char key_cstr[kMaxKeyLength];
+  string value;
+
+  Datum datum;
+  datum.set_channels(1);
+  datum.set_height(rows);
+  datum.set_width(cols);
+  LOG(INFO) << "A total of " << num_items << " items.";
+  LOG(INFO) << "Rows: " << rows << " Cols: " << cols;
+  for (int item_id = 0; item_id < num_items; ++item_id) {
+    image_file.read(pixels, rows * cols);
+    label_file.read(&label, 1);
+    datum.set_data(pixels, rows*cols);
+    datum.set_label(label);
+    snprintf(key_cstr, kMaxKeyLength, "%08d", item_id);
+    datum.SerializeToString(&value);
+    string keystr(key_cstr);
+
+    // Put in db
+    if (db_backend == "leveldb") {  // leveldb
+      batch->Put(keystr, value);
+    } else if (db_backend == "lmdb") {  // lmdb
+      mdb_data.mv_size = value.size();
+      mdb_data.mv_data = reinterpret_cast<void*>(&value[0]);
+      mdb_key.mv_size = keystr.size();
+      mdb_key.mv_data = reinterpret_cast<void*>(&keystr[0]);
+      CHECK_EQ(mdb_put(mdb_txn, mdb_dbi, &mdb_key, &mdb_data, 0), MDB_SUCCESS)
+          << "mdb_put failed";
+    } else {
+      LOG(FATAL) << "Unknown db backend " << db_backend;
+    }
+
+    if (++count % 1000 == 0) {
+      // Commit txn
+      if (db_backend == "leveldb") {  // leveldb
+        db->Write(leveldb::WriteOptions(), batch);
+        delete batch;
+        batch = new leveldb::WriteBatch();
+      } else if (db_backend == "lmdb") {  // lmdb
+        CHECK_EQ(mdb_txn_commit(mdb_txn), MDB_SUCCESS)
+            << "mdb_txn_commit failed";
+        CHECK_EQ(mdb_txn_begin(mdb_env, NULL, 0, &mdb_txn), MDB_SUCCESS)
+            << "mdb_txn_begin failed";
+      } else {
+        LOG(FATAL) << "Unknown db backend " << db_backend;
+      }
+    }
+  }
+  // write the last batch
+  if (count % 1000 != 0) {
+    if (db_backend == "leveldb") {  // leveldb
+      db->Write(leveldb::WriteOptions(), batch);
+      delete batch;
+      delete db;
+    } else if (db_backend == "lmdb") {  // lmdb
+      CHECK_EQ(mdb_txn_commit(mdb_txn), MDB_SUCCESS) << "mdb_txn_commit failed";
+      mdb_close(mdb_env, mdb_dbi);
+      mdb_env_close(mdb_env);
+    } else {
+      LOG(FATAL) << "Unknown db backend " << db_backend;
+    }
+    LOG(ERROR) << "Processed " << count << " files.";
+  }
+  delete pixels;
+}
+
+int main(int argc, char** argv) {
+#ifndef GFLAGS_GFLAGS_H_
+  namespace gflags = google;
+#endif
+
+  gflags::SetUsageMessage("This script converts the MNIST dataset to\n"
+        "the lmdb/leveldb format used by Caffe to load data.\n"
+        "Usage:\n"
+        "    convert_mnist_data [FLAGS] input_image_file input_label_file "
+        "output_db_file\n"
+        "The MNIST dataset could be downloaded at\n"
+        "    http://yann.lecun.com/exdb/mnist/\n"
+        "You should gunzip them after downloading,"
+        "or directly use data/mnist/get_mnist.sh\n");
+  gflags::ParseCommandLineFlags(&argc, &argv, true);
+
+  const string& db_backend = FLAGS_backend;
+
+  if (argc != 4) {
+    gflags::ShowUsageWithFlagsRestrict(argv[0],
+        "examples/mnist/convert_mnist_data");
+  } else {
+    google::InitGoogleLogging(argv[0]);
+    convert_dataset(argv[1], argv[2], argv[3], db_backend);
+  }
+  return 0;
+}
diff --git a/examples/mnist/create_mnist.sh b/examples/mnist/create_mnist.sh
new file mode 100755
index 0000000..06ecc27
--- /dev/null
+++ b/examples/mnist/create_mnist.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env sh
+# This script converts the mnist data into lmdb/leveldb format,
+# depending on the value assigned to $BACKEND.
+
+EXAMPLE=examples/mnist
+DATA=data/mnist
+BUILD=build/examples/mnist
+
+BACKEND="lmdb"
+
+echo "Creating ${BACKEND}..."
+
+rm -rf $EXAMPLE/mnist_train_${BACKEND}
+rm -rf $EXAMPLE/mnist_test_${BACKEND}
+
+$BUILD/convert_mnist_data.bin $DATA/train-images-idx3-ubyte \
+  $DATA/train-labels-idx1-ubyte $EXAMPLE/mnist_train_${BACKEND} --backend=${BACKEND}
+$BUILD/convert_mnist_data.bin $DATA/t10k-images-idx3-ubyte \
+  $DATA/t10k-labels-idx1-ubyte $EXAMPLE/mnist_test_${BACKEND} --backend=${BACKEND}
+
+echo "Done."
diff --git a/examples/mnist/lenet.prototxt b/examples/mnist/lenet.prototxt
new file mode 100644
index 0000000..cb42610
--- /dev/null
+++ b/examples/mnist/lenet.prototxt
@@ -0,0 +1,128 @@
+name: "LeNet"
+input: "data"
+input_dim: 64
+input_dim: 1
+input_dim: 28
+input_dim: 28
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 20
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 2
+    stride: 2
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "pool1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 50
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 2
+    stride: 2
+  }
+}
+layer {
+  name: "ip1"
+  type: "InnerProduct"
+  bottom: "pool2"
+  top: "ip1"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 500
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "ip1"
+  top: "ip1"
+}
+layer {
+  name: "ip2"
+  type: "InnerProduct"
+  bottom: "ip1"
+  top: "ip2"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 10
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "prob"
+  type: "Softmax"
+  bottom: "ip2"
+  top: "prob"
+}
diff --git a/examples/mnist/lenet_auto_solver.prototxt b/examples/mnist/lenet_auto_solver.prototxt
new file mode 100644
index 0000000..fa4bbf0
--- /dev/null
+++ b/examples/mnist/lenet_auto_solver.prototxt
@@ -0,0 +1,24 @@
+# The train/test net protocol buffer definition
+train_net: "examples/mnist/lenet_auto_train.prototxt"
+test_net: "examples/mnist/lenet_auto_test.prototxt"
+# test_iter specifies how many forward passes the test should carry out.
+# In the case of MNIST, we have test batch size 100 and 100 test iterations,
+# covering the full 10,000 testing images.
+test_iter: 100
+# Carry out testing every 500 training iterations.
+test_interval: 500
+# The base learning rate, momentum and the weight decay of the network.
+base_lr: 0.01
+momentum: 0.9
+weight_decay: 0.0005
+# The learning rate policy
+lr_policy: "inv"
+gamma: 0.0001
+power: 0.75
+# Display every 100 iterations
+display: 100
+# The maximum number of iterations
+max_iter: 10000
+# snapshot intermediate results
+snapshot: 5000
+snapshot_prefix: "examples/mnist/lenet"
diff --git a/examples/mnist/lenet_consolidated_solver.prototxt b/examples/mnist/lenet_consolidated_solver.prototxt
new file mode 100644
index 0000000..b81f30f
--- /dev/null
+++ b/examples/mnist/lenet_consolidated_solver.prototxt
@@ -0,0 +1,263 @@
+# lenet_consolidated_solver.prototxt consolidates the lenet_solver, lenet_train,
+# and lenet_test prototxts into a single file.  It also adds an additional test
+# net which runs on the training set, e.g., for the purpose of comparing
+# train/test accuracy (accuracy is computed only on the test set in the included
+# LeNet example).  This is mainly included as an example of using these features
+# (specify NetParameters directly in the solver, specify multiple test nets)
+# if desired.
+#
+# Carry out testing every 500 training iterations.
+test_interval: 500
+# The base learning rate, momentum and the weight decay of the network.
+base_lr: 0.01
+momentum: 0.9
+weight_decay: 0.0005
+# The learning rate policy
+lr_policy: "inv"
+gamma: 0.0001
+power: 0.75
+# Display every 100 iterations
+display: 100
+# The maximum number of iterations
+max_iter: 10000
+# snapshot intermediate results
+snapshot: 5000
+snapshot_prefix: "examples/mnist/lenet"
+# Set a random_seed for repeatable results.
+# (For results that vary due to random initialization, comment out the below
+# line, or set to a negative integer -- e.g. "random_seed: -1")
+random_seed: 1701
+# solver mode: CPU or GPU
+solver_mode: GPU
+
+# We test on both the test and train set using "stages".  The TEST DATA layers
+# each have a stage, either 'test-on-train-set' or 'test-on-test-set'.
+# test_iter specifies how many forward passes the test should carry out.
+# In the case of MNIST, we have test batch size 100 and 100 test iterations,
+# covering the full 10,000 testing images.
+test_iter: 100
+test_state: { stage: "test-on-test-set" }
+# The train set has 60K images, so we run 600 test iters (600 * 100 = 60K).
+test_iter: 600
+test_state: { stage: "test-on-train-set" }
+
+# The net protocol buffer definition
+net_param {
+  name: "LeNet"
+  layers {
+    name: "mnist"
+    type: DATA
+    top: "data"
+    top: "label"
+    data_param {
+      source: "examples/mnist/mnist_train_lmdb"
+      backend: LMDB
+      batch_size: 64
+    }
+    transform_param {
+      scale: 0.00390625
+    }
+    include: { phase: TRAIN }
+  }
+  layers {
+    name: "mnist"
+    type: DATA
+    top: "data"
+    top: "label"
+    data_param {
+      source: "examples/mnist/mnist_test_lmdb"
+      backend: LMDB
+      batch_size: 100
+    }
+    transform_param {
+      scale: 0.00390625
+    }
+    include: {
+      phase: TEST
+      stage: "test-on-test-set"
+    }
+  }
+  layers {
+    name: "mnist"
+    type: DATA
+    top: "data"
+    top: "label"
+    data_param {
+      source: "examples/mnist/mnist_train_lmdb"
+      backend: LMDB
+      batch_size: 100
+    }
+    transform_param {
+      scale: 0.00390625
+    }
+    include: {
+      phase: TEST
+      stage: "test-on-train-set"
+    }
+  }
+  layers {
+    name: "conv1"
+    type: CONVOLUTION
+    bottom: "data"
+    top: "conv1"
+    blobs_lr: 1
+    blobs_lr: 2
+    convolution_param {
+      num_output: 20
+      kernel_size: 5
+      stride: 1
+      weight_filler {
+        type: "xavier"
+      }
+      bias_filler {
+        type: "constant"
+      }
+    }
+  }
+  layers {
+    name: "pool1"
+    type: POOLING
+    bottom: "conv1"
+    top: "pool1"
+    pooling_param {
+      pool: MAX
+      kernel_size: 2
+      stride: 2
+    }
+  }
+  layers {
+    name: "conv2"
+    type: CONVOLUTION
+    bottom: "pool1"
+    top: "conv2"
+    blobs_lr: 1
+    blobs_lr: 2
+    convolution_param {
+      num_output: 50
+      kernel_size: 5
+      stride: 1
+      weight_filler {
+        type: "xavier"
+      }
+      bias_filler {
+        type: "constant"
+      }
+    }
+  }
+  layers {
+    name: "pool2"
+    type: POOLING
+    bottom: "conv2"
+    top: "pool2"
+    pooling_param {
+      pool: MAX
+      kernel_size: 2
+      stride: 2
+    }
+  }
+  layers {
+    name: "ip1"
+    type: INNER_PRODUCT
+    bottom: "pool2"
+    top: "ip1"
+    blobs_lr: 1
+    blobs_lr: 2
+    inner_product_param {
+      num_output: 500
+      weight_filler {
+        type: "xavier"
+      }
+      bias_filler {
+        type: "constant"
+      }
+    }
+  }
+  layers {
+    name: "relu1"
+    type: RELU
+    bottom: "ip1"
+    top: "ip1"
+  }
+  layers {
+    name: "ip2"
+    type: INNER_PRODUCT
+    bottom: "ip1"
+    top: "ip2"
+    blobs_lr: 1
+    blobs_lr: 2
+    inner_product_param {
+      num_output: 10
+      weight_filler {
+        type: "xavier"
+      }
+      bias_filler {
+        type: "constant"
+      }
+    }
+  }
+  layers {
+    name: "accuracy"
+    type: ACCURACY
+    bottom: "ip2"
+    bottom: "label"
+    top: "accuracy"
+  }
+  layers {
+    name: "loss"
+    type: SOFTMAX_LOSS
+    bottom: "ip2"
+    bottom: "label"
+    top: "loss"
+  }
+}
+
+# Expected results for first and last 500 iterations:
+# (with portions of log omitted for brevity)
+#
+# Iteration 0, Testing net (#0)
+# Test score #0: 0.067
+# Test score #1: 2.30256
+# Iteration 0, Testing net (#1)
+# Test score #0: 0.0670334
+# Test score #1: 2.30258
+# Iteration 100, lr = 0.00992565
+# Iteration 100, loss = 0.280585
+# Iteration 200, lr = 0.00985258
+# Iteration 200, loss = 0.345601
+# Iteration 300, lr = 0.00978075
+# Iteration 300, loss = 0.172217
+# Iteration 400, lr = 0.00971013
+# Iteration 400, loss = 0.261836
+# Iteration 500, lr = 0.00964069
+# Iteration 500, loss = 0.157803
+# Iteration 500, Testing net (#0)
+# Test score #0: 0.968
+# Test score #1: 0.0993772
+# Iteration 500, Testing net (#1)
+# Test score #0: 0.965883
+# Test score #1: 0.109374
+#
+# [...]
+#
+# Iteration 9500, Testing net (#0)
+# Test score #0: 0.9899
+# Test score #1: 0.0308299
+# Iteration 9500, Testing net (#1)
+# Test score #0: 0.996816
+# Test score #1: 0.0118238
+# Iteration 9600, lr = 0.00603682
+# Iteration 9600, loss = 0.0126215
+# Iteration 9700, lr = 0.00601382
+# Iteration 9700, loss = 0.00579304
+# Iteration 9800, lr = 0.00599102
+# Iteration 9800, loss = 0.00500633
+# Iteration 9900, lr = 0.00596843
+# Iteration 9900, loss = 0.00796607
+# Iteration 10000, lr = 0.00594604
+# Iteration 10000, loss = 0.00271736
+# Iteration 10000, Testing net (#0)
+# Test score #0: 0.9914
+# Test score #1: 0.0276671
+# Iteration 10000, Testing net (#1)
+# Test score #0: 0.997782
+# Test score #1: 0.00908085
diff --git a/examples/mnist/lenet_multistep_solver.prototxt b/examples/mnist/lenet_multistep_solver.prototxt
new file mode 100644
index 0000000..9b22b45
--- /dev/null
+++ b/examples/mnist/lenet_multistep_solver.prototxt
@@ -0,0 +1,29 @@
+# The train/test net protocol buffer definition
+net: "examples/mnist/lenet_train_test.prototxt"
+# test_iter specifies how many forward passes the test should carry out.
+# In the case of MNIST, we have test batch size 100 and 100 test iterations,
+# covering the full 10,000 testing images.
+test_iter: 100
+# Carry out testing every 500 training iterations.
+test_interval: 500
+# The base learning rate, momentum and the weight decay of the network.
+base_lr: 0.01
+momentum: 0.9
+weight_decay: 0.0005
+# The learning rate policy
+lr_policy: "multistep"
+gamma: 0.9
+stepvalue: 5000
+stepvalue: 7000
+stepvalue: 8000
+stepvalue: 9000
+stepvalue: 9500
+# Display every 100 iterations
+display: 100
+# The maximum number of iterations
+max_iter: 10000
+# snapshot intermediate results
+snapshot: 5000
+snapshot_prefix: "examples/mnist/lenet_multistep"
+# solver mode: CPU or GPU
+solver_mode: GPU
diff --git a/examples/mnist/lenet_solver.prototxt b/examples/mnist/lenet_solver.prototxt
new file mode 100644
index 0000000..2dfbc83
--- /dev/null
+++ b/examples/mnist/lenet_solver.prototxt
@@ -0,0 +1,25 @@
+# The train/test net protocol buffer definition
+net: "examples/mnist/lenet_train_test.prototxt"
+# test_iter specifies how many forward passes the test should carry out.
+# In the case of MNIST, we have test batch size 100 and 100 test iterations,
+# covering the full 10,000 testing images.
+test_iter: 100
+# Carry out testing every 500 training iterations.
+test_interval: 500
+# The base learning rate, momentum and the weight decay of the network.
+base_lr: 0.01
+momentum: 0.9
+weight_decay: 0.0005
+# The learning rate policy
+lr_policy: "inv"
+gamma: 0.0001
+power: 0.75
+# Display every 100 iterations
+display: 100
+# The maximum number of iterations
+max_iter: 10000
+# snapshot intermediate results
+snapshot: 5000
+snapshot_prefix: "examples/mnist/lenet"
+# solver mode: CPU or GPU
+solver_mode: GPU
diff --git a/examples/mnist/lenet_stepearly_solver.prototxt b/examples/mnist/lenet_stepearly_solver.prototxt
new file mode 100644
index 0000000..efc6a33
--- /dev/null
+++ b/examples/mnist/lenet_stepearly_solver.prototxt
@@ -0,0 +1,28 @@
+# The training protocol buffer definition
+train_net: "lenet_train.prototxt"
+# The testing protocol buffer definition
+test_net: "lenet_test.prototxt"
+# test_iter specifies how many forward passes the test should carry out.
+# In the case of MNIST, we have test batch size 100 and 100 test iterations,
+# covering the full 10,000 testing images.
+test_iter: 100
+# Carry out testing every 500 training iterations.
+test_interval: 500
+# The base learning rate, momentum and the weight decay of the network.
+base_lr: 0.01
+momentum: 0.9
+weight_decay: 0.0005
+# The learning rate policy
+lr_policy: "stepearly"
+gamma: 0.9
+stepearly: 1
+# Display every 100 iterations
+display: 100
+# The maximum number of iterations
+max_iter: 10000
+# snapshot intermediate results
+snapshot: 5000
+snapshot_prefix: "lenet"
+# solver mode: 0 for CPU and 1 for GPU
+solver_mode: 1
+device_id: 1
diff --git a/examples/mnist/lenet_train_test.prototxt b/examples/mnist/lenet_train_test.prototxt
new file mode 100644
index 0000000..b18fc26
--- /dev/null
+++ b/examples/mnist/lenet_train_test.prototxt
@@ -0,0 +1,168 @@
+name: "LeNet"
+layer {
+  name: "mnist"
+  type: "Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TRAIN
+  }
+  transform_param {
+    scale: 0.00390625
+  }
+  data_param {
+    source: "examples/mnist/mnist_train_lmdb"
+    batch_size: 64
+    backend: LMDB
+  }
+}
+layer {
+  name: "mnist"
+  type: "Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TEST
+  }
+  transform_param {
+    scale: 0.00390625
+  }
+  data_param {
+    source: "examples/mnist/mnist_test_lmdb"
+    batch_size: 100
+    backend: LMDB
+  }
+}
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 20
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 2
+    stride: 2
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "pool1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 50
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 2
+    stride: 2
+  }
+}
+layer {
+  name: "ip1"
+  type: "InnerProduct"
+  bottom: "pool2"
+  top: "ip1"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 500
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "ip1"
+  top: "ip1"
+}
+layer {
+  name: "ip2"
+  type: "InnerProduct"
+  bottom: "ip1"
+  top: "ip2"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 10
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "accuracy"
+  type: "Accuracy"
+  bottom: "ip2"
+  bottom: "label"
+  top: "accuracy"
+  include {
+    phase: TEST
+  }
+}
+layer {
+  name: "loss"
+  type: "SoftmaxWithLoss"
+  bottom: "ip2"
+  bottom: "label"
+  top: "loss"
+}
diff --git a/examples/mnist/mnist_autoencoder.prototxt b/examples/mnist/mnist_autoencoder.prototxt
new file mode 100644
index 0000000..563c7c9
--- /dev/null
+++ b/examples/mnist/mnist_autoencoder.prototxt
@@ -0,0 +1,323 @@
+name: "MNISTAutoencoder"
+layer {
+  name: "data"
+  type: "Data"
+  top: "data"
+  include {
+    phase: TRAIN
+  }
+  transform_param {
+    scale: 0.0039215684
+  }
+  data_param {
+    source: "examples/mnist/mnist_train_lmdb"
+    batch_size: 100
+    backend: LMDB
+  }
+}
+layer {
+  name: "data"
+  type: "Data"
+  top: "data"
+  include {
+    phase: TEST
+    stage: "test-on-train"
+  }
+  transform_param {
+    scale: 0.0039215684
+  }
+  data_param {
+    source: "examples/mnist/mnist_train_lmdb"
+    batch_size: 100
+    backend: LMDB
+  }
+}
+layer {
+  name: "data"
+  type: "Data"
+  top: "data"
+  include {
+    phase: TEST
+    stage: "test-on-test"
+  }
+  transform_param {
+    scale: 0.0039215684
+  }
+  data_param {
+    source: "examples/mnist/mnist_test_lmdb"
+    batch_size: 100
+    backend: LMDB
+  }
+}
+layer {
+  name: "flatdata"
+  type: "Flatten"
+  bottom: "data"
+  top: "flatdata"
+}
+layer {
+  name: "encode1"
+  type: "InnerProduct"
+  bottom: "data"
+  top: "encode1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 1
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 1000
+    weight_filler {
+      type: "gaussian"
+      std: 1
+      sparse: 15
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "encode1neuron"
+  type: "Sigmoid"
+  bottom: "encode1"
+  top: "encode1neuron"
+}
+layer {
+  name: "encode2"
+  type: "InnerProduct"
+  bottom: "encode1neuron"
+  top: "encode2"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 1
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 500
+    weight_filler {
+      type: "gaussian"
+      std: 1
+      sparse: 15
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "encode2neuron"
+  type: "Sigmoid"
+  bottom: "encode2"
+  top: "encode2neuron"
+}
+layer {
+  name: "encode3"
+  type: "InnerProduct"
+  bottom: "encode2neuron"
+  top: "encode3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 1
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 250
+    weight_filler {
+      type: "gaussian"
+      std: 1
+      sparse: 15
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "encode3neuron"
+  type: "Sigmoid"
+  bottom: "encode3"
+  top: "encode3neuron"
+}
+layer {
+  name: "encode4"
+  type: "InnerProduct"
+  bottom: "encode3neuron"
+  top: "encode4"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 1
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 30
+    weight_filler {
+      type: "gaussian"
+      std: 1
+      sparse: 15
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "decode4"
+  type: "InnerProduct"
+  bottom: "encode4"
+  top: "decode4"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 1
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 250
+    weight_filler {
+      type: "gaussian"
+      std: 1
+      sparse: 15
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "decode4neuron"
+  type: "Sigmoid"
+  bottom: "decode4"
+  top: "decode4neuron"
+}
+layer {
+  name: "decode3"
+  type: "InnerProduct"
+  bottom: "decode4neuron"
+  top: "decode3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 1
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 500
+    weight_filler {
+      type: "gaussian"
+      std: 1
+      sparse: 15
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "decode3neuron"
+  type: "Sigmoid"
+  bottom: "decode3"
+  top: "decode3neuron"
+}
+layer {
+  name: "decode2"
+  type: "InnerProduct"
+  bottom: "decode3neuron"
+  top: "decode2"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 1
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 1000
+    weight_filler {
+      type: "gaussian"
+      std: 1
+      sparse: 15
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "decode2neuron"
+  type: "Sigmoid"
+  bottom: "decode2"
+  top: "decode2neuron"
+}
+layer {
+  name: "decode1"
+  type: "InnerProduct"
+  bottom: "decode2neuron"
+  top: "decode1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 1
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 784
+    weight_filler {
+      type: "gaussian"
+      std: 1
+      sparse: 15
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "loss"
+  type: "SigmoidCrossEntropyLoss"
+  bottom: "decode1"
+  bottom: "flatdata"
+  top: "cross_entropy_loss"
+  loss_weight: 1
+}
+layer {
+  name: "decode1neuron"
+  type: "Sigmoid"
+  bottom: "decode1"
+  top: "decode1neuron"
+}
+layer {
+  name: "loss"
+  type: "EuclideanLoss"
+  bottom: "decode1neuron"
+  bottom: "flatdata"
+  top: "l2_error"
+  loss_weight: 0
+}
diff --git a/examples/mnist/mnist_autoencoder_solver.prototxt b/examples/mnist/mnist_autoencoder_solver.prototxt
new file mode 100644
index 0000000..6e35cb6
--- /dev/null
+++ b/examples/mnist/mnist_autoencoder_solver.prototxt
@@ -0,0 +1,19 @@
+net: "examples/mnist/mnist_autoencoder.prototxt"
+test_state: { stage: 'test-on-train' }
+test_iter: 500
+test_state: { stage: 'test-on-test' }
+test_iter: 100
+test_interval: 500
+test_compute_loss: true
+base_lr: 0.01
+lr_policy: "step"
+gamma: 0.1
+stepsize: 10000
+display: 100
+max_iter: 65000
+weight_decay: 0.0005
+snapshot: 10000
+snapshot_prefix: "examples/mnist/mnist_autoencoder"
+momentum: 0.9
+# solver mode: CPU or GPU
+solver_mode: GPU
diff --git a/examples/mnist/mnist_autoencoder_solver_adagrad.prototxt b/examples/mnist/mnist_autoencoder_solver_adagrad.prototxt
new file mode 100644
index 0000000..cc0ed9e
--- /dev/null
+++ b/examples/mnist/mnist_autoencoder_solver_adagrad.prototxt
@@ -0,0 +1,17 @@
+net: "examples/mnist/mnist_autoencoder.prototxt"
+test_state: { stage: 'test-on-train' }
+test_iter: 500
+test_state: { stage: 'test-on-test' }
+test_iter: 100
+test_interval: 500
+test_compute_loss: true
+base_lr: 0.01
+lr_policy: "fixed"
+display: 100
+max_iter: 65000
+weight_decay: 0.0005
+snapshot: 10000
+snapshot_prefix: "examples/mnist/mnist_autoencoder_adagrad_train"
+# solver mode: CPU or GPU
+solver_mode: GPU
+solver_type: ADAGRAD
diff --git a/examples/mnist/mnist_autoencoder_solver_nesterov.prototxt b/examples/mnist/mnist_autoencoder_solver_nesterov.prototxt
new file mode 100644
index 0000000..2a59fd4
--- /dev/null
+++ b/examples/mnist/mnist_autoencoder_solver_nesterov.prototxt
@@ -0,0 +1,20 @@
+net: "examples/mnist/mnist_autoencoder.prototxt"
+test_state: { stage: 'test-on-train' }
+test_iter: 500
+test_state: { stage: 'test-on-test' }
+test_iter: 100
+test_interval: 500
+test_compute_loss: true
+base_lr: 0.01
+lr_policy: "step"
+gamma: 0.1
+stepsize: 10000
+display: 100
+max_iter: 65000
+weight_decay: 0.0005
+snapshot: 10000
+snapshot_prefix: "examples/mnist/mnist_autoencoder_nesterov_train"
+momentum: 0.95
+# solver mode: CPU or GPU
+solver_mode: GPU
+solver_type: NESTEROV
diff --git a/examples/mnist/readme.md b/examples/mnist/readme.md
new file mode 100644
index 0000000..413d4a1
--- /dev/null
+++ b/examples/mnist/readme.md
@@ -0,0 +1,287 @@
+---
+title: LeNet MNIST Tutorial
+description: Train and test "LeNet" on the MNIST handwritten digit data.
+category: example
+include_in_docs: true
+priority: 1
+---
+
+# Training LeNet on MNIST with Caffe
+
+We will assume that you have Caffe successfully compiled. If not, please refer to the [Installation page](/installation.html). In this tutorial, we will assume that your Caffe installation is located at `CAFFE_ROOT`.
+
+## Prepare Datasets
+
+You will first need to download and convert the data format from the MNIST website. To do this, simply run the following commands:
+
+    cd $CAFFE_ROOT
+    ./data/mnist/get_mnist.sh
+    ./examples/mnist/create_mnist.sh
+
+If it complains that `wget` or `gunzip` are not installed, you need to install them respectively. After running the script there should be two datasets, `mnist_train_lmdb`, and `mnist_test_lmdb`.
+
+## LeNet: the MNIST Classification Model
+
+Before we actually run the training program, let's explain what will happen. We will use the [LeNet](http://yann.lecun.com/exdb/publis/pdf/lecun-01a.pdf) network, which is known to work well on digit classification tasks. We will use a slightly different version from the original LeNet implementation, replacing the sigmoid activations with Rectified Linear Unit (ReLU) activations for the neurons.
+
+The design of LeNet contains the essence of CNNs that are still used in larger models such as the ones in ImageNet. In general, it consists of a convolutional layer followed by a pooling layer, another convolution layer followed by a pooling layer, and then two fully connected layers similar to the conventional multilayer perceptrons. We have defined the layers in `$CAFFE_ROOT/examples/mnist/lenet_train_test.prototxt`.
+
+## Define the MNIST Network
+
+This section explains the `lenet_train_test.prototxt` model definition that specifies the LeNet model for MNIST handwritten digit classification. We assume that you are familiar with [Google Protobuf](https://developers.google.com/protocol-buffers/docs/overview), and assume that you have read the protobuf definitions used by Caffe, which can be found at `$CAFFE_ROOT/src/caffe/proto/caffe.proto`.
+
+Specifically, we will write a `caffe::NetParameter` (or in python, `caffe.proto.caffe_pb2.NetParameter`) protobuf. We will start by giving the network a name:
+
+    name: "LeNet"
+
+### Writing the Data Layer
+
+Currently, we will read the MNIST data from the lmdb we created earlier in the demo. This is defined by a data layer:
+
+    layer {
+      name: "mnist"
+      type: "Data"
+      data_param {
+        source: "mnist_train_lmdb"
+        backend: LMDB
+        batch_size: 64
+        scale: 0.00390625
+      }
+      top: "data"
+      top: "label"
+    }
+
+Specifically, this layer has name `mnist`, type `data`, and it reads the data from the given lmdb source. We will use a batch size of 64, and scale the incoming pixels so that they are in the range \[0,1\). Why 0.00390625? It is 1 divided by 256. And finally, this layer produces two blobs, one is the `data` blob, and one is the `label` blob.
+
+### Writing the Convolution Layer
+
+Let's define the first convolution layer:
+
+    layer {
+      name: "conv1"
+      type: "Convolution"
+      param { lr_mult: 1 }
+      param { lr_mult: 2 }
+      convolution_param {
+        num_output: 20
+        kernel_size: 5
+        stride: 1
+        weight_filler {
+          type: "xavier"
+        }
+        bias_filler {
+          type: "constant"
+        }
+      }
+      bottom: "data"
+      top: "conv1"
+    }
+
+This layer takes the `data` blob (it is provided by the data layer), and produces the `conv1` layer. It produces outputs of 20 channels, with the convolutional kernel size 5 and carried out with stride 1.
+
+The fillers allow us to randomly initialize the value of the weights and bias. For the weight filler, we will use the `xavier` algorithm that automatically determines the scale of initialization based on the number of input and output neurons. For the bias filler, we will simply initialize it as constant, with the default filling value 0.
+
+`lr_mult`s are the learning rate adjustments for the layer's learnable parameters. In this case, we will set the weight learning rate to be the same as the learning rate given by the solver during runtime, and the bias learning rate to be twice as large as that - this usually leads to better convergence rates.
+
+### Writing the Pooling Layer
+
+Phew. Pooling layers are actually much easier to define:
+
+    layer {
+      name: "pool1"
+      type: "Pooling"
+      pooling_param {
+        kernel_size: 2
+        stride: 2
+        pool: MAX
+      }
+      bottom: "conv1"
+      top: "pool1"
+    }
+
+This says we will perform max pooling with a pool kernel size 2 and a stride of 2 (so no overlapping between neighboring pooling regions).
+
+Similarly, you can write up the second convolution and pooling layers. Check `$CAFFE_ROOT/examples/mnist/lenet_train_test.prototxt` for details.
+
+### Writing the Fully Connected Layer
+
+Writing a fully connected layer is also simple:
+
+    layer {
+      name: "ip1"
+      type: "InnerProduct"
+      param { lr_mult: 1 }
+      param { lr_mult: 2 }
+      inner_product_param {
+        num_output: 500
+        weight_filler {
+          type: "xavier"
+        }
+        bias_filler {
+          type: "constant"
+        }
+      }
+      bottom: "pool2"
+      top: "ip1"
+    }
+
+This defines a fully connected layer (known in Caffe as an `InnerProduct` layer) with 500 outputs. All other lines look familiar, right?
+
+### Writing the ReLU Layer
+
+A ReLU Layer is also simple:
+
+    layer {
+      name: "relu1"
+      type: "ReLU"
+      bottom: "ip1"
+      top: "ip1"
+    }
+
+Since ReLU is an element-wise operation, we can do *in-place* operations to save some memory. This is achieved by simply giving the same name to the bottom and top blobs. Of course, do NOT use duplicated blob names for other layer types!
+
+After the ReLU layer, we will write another innerproduct layer:
+
+    layer {
+      name: "ip2"
+      type: "InnerProduct"
+      param { lr_mult: 1 }
+      param { lr_mult: 2 }
+      inner_product_param {
+        num_output: 10
+        weight_filler {
+          type: "xavier"
+        }
+        bias_filler {
+          type: "constant"
+        }
+      }
+      bottom: "ip1"
+      top: "ip2"
+    }
+
+### Writing the Loss Layer
+
+Finally, we will write the loss!
+
+    layer {
+      name: "loss"
+      type: "SoftmaxWithLoss"
+      bottom: "ip2"
+      bottom: "label"
+    }
+
+The `softmax_loss` layer implements both the softmax and the multinomial logistic loss (that saves time and improves numerical stability). It takes two blobs, the first one being the prediction and the second one being the `label` provided by the data layer (remember it?). It does not produce any outputs - all it does is to compute the loss function value, report it when backpropagation starts, and initiates the gradient with respect to `ip2`. This is where all magic starts.
+
+
+### Additional Notes: Writing Layer Rules
+
+Layer definitions can include rules for whether and when they are included in the network definition, like the one below:
+
+    layer {
+      // ...layer definition...
+      include: { phase: TRAIN }
+    }
+
+This is a rule, which controls layer inclusion in the network, based on current network's state.
+You can refer to `$CAFFE_ROOT/src/caffe/proto/caffe.proto` for more information about layer rules and model schema.
+
+In the above example, this layer will be included only in `TRAIN` phase.
+If we change `TRAIN` with `TEST`, then this layer will be used only in test phase.
+By default, that is without layer rules, a layer is always included in the network.
+Thus, `lenet_train_test.prototxt` has two `DATA` layers defined (with different `batch_size`), one for the training phase and one for the testing phase.
+Also, there is an `Accuracy` layer which is included only in `TEST` phase for reporting the model accuracy every 100 iteration, as defined in `lenet_solver.prototxt`.
+
+## Define the MNIST Solver
+
+Check out the comments explaining each line in the prototxt `$CAFFE_ROOT/examples/mnist/lenet_solver.prototxt`:
+
+    # The train/test net protocol buffer definition
+    net: "examples/mnist/lenet_train_test.prototxt"
+    # test_iter specifies how many forward passes the test should carry out.
+    # In the case of MNIST, we have test batch size 100 and 100 test iterations,
+    # covering the full 10,000 testing images.
+    test_iter: 100
+    # Carry out testing every 500 training iterations.
+    test_interval: 500
+    # The base learning rate, momentum and the weight decay of the network.
+    base_lr: 0.01
+    momentum: 0.9
+    weight_decay: 0.0005
+    # The learning rate policy
+    lr_policy: "inv"
+    gamma: 0.0001
+    power: 0.75
+    # Display every 100 iterations
+    display: 100
+    # The maximum number of iterations
+    max_iter: 10000
+    # snapshot intermediate results
+    snapshot: 5000
+    snapshot_prefix: "examples/mnist/lenet"
+    # solver mode: CPU or GPU
+    solver_mode: GPU
+
+
+## Training and Testing the Model
+
+Training the model is simple after you have written the network definition protobuf and solver protobuf files. Simply run `train_lenet.sh`, or the following command directly:
+
+    cd $CAFFE_ROOT
+    ./examples/mnist/train_lenet.sh
+
+`train_lenet.sh` is a simple script, but here is a quick explanation: the main tool for training is `caffe` with action `train` and the solver protobuf text file as its argument.
+
+When you run the code, you will see a lot of messages flying by like this:
+
+    I1203 net.cpp:66] Creating Layer conv1
+    I1203 net.cpp:76] conv1 <- data
+    I1203 net.cpp:101] conv1 -> conv1
+    I1203 net.cpp:116] Top shape: 20 24 24
+    I1203 net.cpp:127] conv1 needs backward computation.
+
+These messages tell you the details about each layer, its connections and its output shape, which may be helpful in debugging. After the initialization, the training will start:
+
+    I1203 net.cpp:142] Network initialization done.
+    I1203 solver.cpp:36] Solver scaffolding done.
+    I1203 solver.cpp:44] Solving LeNet
+
+Based on the solver setting, we will print the training loss function every 100 iterations, and test the network every 1000 iterations. You will see messages like this:
+
+    I1203 solver.cpp:204] Iteration 100, lr = 0.00992565
+    I1203 solver.cpp:66] Iteration 100, loss = 0.26044
+    ...
+    I1203 solver.cpp:84] Testing net
+    I1203 solver.cpp:111] Test score #0: 0.9785
+    I1203 solver.cpp:111] Test score #1: 0.0606671
+
+For each training iteration, `lr` is the learning rate of that iteration, and `loss` is the training function. For the output of the testing phase, score 0 is the accuracy, and score 1 is the testing loss function.
+
+And after a few minutes, you are done!
+
+    I1203 solver.cpp:84] Testing net
+    I1203 solver.cpp:111] Test score #0: 0.9897
+    I1203 solver.cpp:111] Test score #1: 0.0324599
+    I1203 solver.cpp:126] Snapshotting to lenet_iter_10000
+    I1203 solver.cpp:133] Snapshotting solver state to lenet_iter_10000.solverstate
+    I1203 solver.cpp:78] Optimization Done.
+
+The final model, stored as a binary protobuf file, is stored at
+
+    lenet_iter_10000
+
+which you can deploy as a trained model in your application, if you are training on a real-world application dataset.
+
+### Um... How about GPU training?
+
+You just did! All the training was carried out on the GPU. In fact, if you would like to do training on CPU, you can simply change one line in `lenet_solver.prototxt`:
+
+    # solver mode: CPU or GPU
+    solver_mode: CPU
+
+and you will be using CPU for training. Isn't that easy?
+
+MNIST is a small dataset, so training with GPU does not really introduce too much benefit due to communication overheads. On larger datasets with more complex models, such as ImageNet, the computation speed difference will be more significant.
+
+### How to reduce the learning rate at fixed steps?
+Look at lenet_multistep_solver.prototxt
diff --git a/examples/mnist/train_lenet.sh b/examples/mnist/train_lenet.sh
new file mode 100755
index 0000000..1b6bf7d
--- /dev/null
+++ b/examples/mnist/train_lenet.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env sh
+
+./build/tools/caffe train --solver=examples/mnist/lenet_solver.prototxt
diff --git a/examples/mnist/train_lenet_consolidated.sh b/examples/mnist/train_lenet_consolidated.sh
new file mode 100755
index 0000000..c855467
--- /dev/null
+++ b/examples/mnist/train_lenet_consolidated.sh
@@ -0,0 +1,4 @@
+#!/usr/bin/env sh
+
+./build/tools/caffe train \
+  --solver=examples/mnist/lenet_consolidated_solver.prototxt
diff --git a/examples/mnist/train_mnist_autoencoder.sh b/examples/mnist/train_mnist_autoencoder.sh
new file mode 100755
index 0000000..cfd67e8
--- /dev/null
+++ b/examples/mnist/train_mnist_autoencoder.sh
@@ -0,0 +1,4 @@
+#!/usr/bin/env sh
+
+./build/tools/caffe train \
+  --solver=examples/mnist/mnist_autoencoder_solver.prototxt
diff --git a/examples/mnist/train_mnist_autoencoder_adagrad.sh b/examples/mnist/train_mnist_autoencoder_adagrad.sh
new file mode 100755
index 0000000..95fe1b1
--- /dev/null
+++ b/examples/mnist/train_mnist_autoencoder_adagrad.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+./build/tools/caffe train \
+  --solver=examples/mnist/mnist_autoencoder_solver_adagrad.prototxt
diff --git a/examples/mnist/train_mnist_autoencoder_nesterov.sh b/examples/mnist/train_mnist_autoencoder_nesterov.sh
new file mode 100755
index 0000000..cf19ea7
--- /dev/null
+++ b/examples/mnist/train_mnist_autoencoder_nesterov.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+./build/tools/caffe train \
+  --solver=examples/mnist/mnist_autoencoder_solver_nesterov.prototxt
diff --git a/examples/net_surgery.ipynb b/examples/net_surgery.ipynb
new file mode 100644
index 0000000..ff780fb
--- /dev/null
+++ b/examples/net_surgery.ipynb
@@ -0,0 +1,6911 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Net Surgery\n",
+    "\n",
+    "Caffe networks can be transformed to your particular needs by editing the model parameters. The data, diffs, and parameters of a net are all exposed in pycaffe.\n",
+    "\n",
+    "Roll up your sleeves for net surgery with pycaffe!"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline\n",
+    "import Image\n",
+    "\n",
+    "# Make sure that caffe is on the python path:\n",
+    "caffe_root = '../'  # this file is expected to be in {caffe_root}/examples\n",
+    "import sys\n",
+    "sys.path.insert(0, caffe_root + 'python')\n",
+    "\n",
+    "import caffe\n",
+    "\n",
+    "# configure plotting\n",
+    "plt.rcParams['figure.figsize'] = (10, 10)\n",
+    "plt.rcParams['image.interpolation'] = 'nearest'\n",
+    "plt.rcParams['image.cmap'] = 'gray'"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Designer Filters\n",
+    "\n",
+    "To show how to load, manipulate, and save parameters we'll design our own filters into a simple network that's only a single convolution layer. This net has two blobs, `data` for the input and `conv` for the convolution output and one parameter `conv` for the convolution filter weights and biases."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "blobs ['data', 'conv']\n",
+      "params ['conv']\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAlIAAAHNCAYAAADVB5V4AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvWuMZdl13/c/tx733np393T3PPkYDUccPsQZiaRkCYpE\n",
+       "CYklOwYhfwjCIAEiJDLswAkQf3AQIEoC64OcIEDiIHESBAiCCAkkJ4GtJHCM+KHQjmGZtmxKJBVC\n",
+       "wxkOZyac4Uz3dHe97q1bt+7Jh+r/rt/517499ER008xZQKGq7j1nn73XXns9/mvtfZq2bdVTTz31\n",
+       "1FNPPfXU0z86DR52B3rqqaeeeuqpp57+SaXekeqpp5566qmnnnp6j9Q7Uj311FNPPfXUU0/vkXpH\n",
+       "qqeeeuqpp5566uk9Uu9I9dRTTz311FNPPb1H6h2pnnrqqaeeeuqpp/dIvSPVU089/b5T0zT/RdM0\n",
+       "/87v97Xv0s4HmqZZNE1T1WtN03y5aZp/6v/rc3rqqaeeSE1/jlRPPfX0vUBN03xA0suSVtu2XTzc\n",
+       "3vTUU0//f6Eekeqpp55+X2kZItRTTz319L1IvcLrqaee3pWapnmuaZr/s2maO/dTZH8E3/2399Nz\n",
+       "f7lpmkNJn7n/2S/hmj/dNM03m6Z5vWmaf/V+Cu5p3P9L9//+yfvX/Kmmab51/55/Ge384aZp/mHT\n",
+       "NPeapnm1aZp/7x9hDK80TfNT9//+95um+R+bpvmVpmn2m6b5naZpPtQ0zb99/7nfaJrmn8a9P980\n",
+       "ze/ev/alpmn+WLT9oPENm6b5j+63+eZ9Xo3+Ueegp556+u6k3pHqqaeeHkhN06xJ+l8l/RVJ1yX9\n",
+       "65L++6ZpnsVln5P0S23bbkn6vyS193/UNM3PSPo3Jf20pA9J+sl4RLn2Pt2UtCPpcUn/iqT/vGma\n",
+       "3fvfHUr6F9u23ZX0hyX9iaZpPvttDiXrGP5ZSf+dpCuS/qGkv3r/88cl/ZKk/wrXfkvSH27bdkfS\n",
+       "z0v6j5umeeHbHN+flfSMpE/c//2EpH/32+xzTz319F1OvSPVU089vRv9iKTNtm3/bNu287Ztf0PS\n",
+       "/6Zz58n0l9q2/TuS1LbtSdz/z0n6b9q2/b/btp1IqqFIDf4+lfRn2rY9a9v2f9e58/T999v+fNu2\n",
+       "X7n/95ck/aqkn3iP4/qbbdv+1bZtzyT9T5KuSfqz9///NUkfaJpm5/6z/nLbtl+///fflPR/SPrx\n",
+       "dxtf0zSNpF+Q9Kfatr3btu2hpF+W9M+/xz731FNP32W0+rA70FNPPX3X0+OSXovPvnH/c+kc6Xn9\n",
+       "Afc/JukL+P9B10rS7SgWP5a0JUlN0/ywzhGej0palzSU9Bfepb1l9Bb+nki61V7svpnc/70lab9p\n",
+       "mp/VuYP0IZ0HoBuSfuf+NQ8a3/X71/7WuU8l6dxp7IPYnnr6HqF+MffUU0/vRt+U9FQDT0DS+yX9\n",
+       "P9/m/W9Iegr/P1W55tvdPvw/SPpLkp5s23ZP0n+p77Aea5pmKOl/lvQfSrrRtu0VSX9ZFyjag8Z3\n",
+       "S+dO2Ufatr1y/2fvfoqwp556+h6g3pHqqaee3o1+U+eo0J9ummataZqf1Hl90a/e/76p3NPg878g\n",
+       "6eebpvlw0zQbkn7xAde+G21JutO27axpmk9L+hf07Tth75XW7//ckrS4j079M/h+6fjuI2v/taT/\n",
+       "pGma65LUNM0TTdPw/p566umfYOodqZ566umB1LbtqaQ/IulnJb0t6T+T9C+1bft7vkSXnZnyWdu2\n",
+       "f0XSfyrpNyT9nqS/c/+akyX3P8gx+tck/ZmmafZ17rD8WuW53w4t6/Ol/9u2PZD0b+jcYXpH57Vh\n",
+       "v14uevfx/VuSvibpN5umuafzonYW6vfUU0//BFN/IGdPPfX0j5WapnlO0pckrX8vHpz5vT6+nnrq\n",
+       "qUs9ItVTTz19x6lpmp+7f57SFUn/gaT/5XvJyfheH19PPfW0nHpHqqeeevrHQX9M52cxfU3nxxv8\n",
+       "iYfbnd93+l4fX0899bSE+tReTz311FNPPfXU03ukh3KO1BNPPNE2TaPFYiHvqB4MBhoMBlosFuV/\n",
+       "O3n+3TSN2rYVnb/FYqGVlZXqc05PT7VYLDQcDstnvLdpms5z3Jemacqz+FzT2dlZ55n+brFYlHH4\n",
+       "Jx1VjpfP8/0cvz9L8rPdL/PS95DIV//tdufzebl/MBjo7OxMknRycqL5fK7FYqGzs7NOv9xX3ucx\n",
+       "um3zb2VlpVzvfq+urmplZUVra2vl89XV1dI3f+/71tbWNBgMtLKyUr73vb7O/fAzJWk+n+vs7Eyn\n",
+       "p6c6OTnR6empjo+PJUmz2Uyz2UxnZ2c6OzvTdDot43Pbq6urha85d23b6uzsrDM2X+f+sR32lfeb\n",
+       "b6enp6W/Jycnl9bBYrFQ27YdnrkdEvtLWXAfSOvr6+U6yvLKykpZU+6Tn3N2dtaRi9PT0yIz5oHX\n",
+       "p+cq57Bt2zLffObZ2dklHZAyzb7kGC2P8/m8fMd1nOs++8223EZtTS1bl9RVlo1cp+4jn+371tbW\n",
+       "tLKyUtaI52gwGGg8Hms4HGo0GpU2LZ8ep/uc7fr5ptXV1fIzGo3KnGU/Z7OZjo6OdHJyXi9/fHys\n",
+       "k5MTHR8fa39/X7PZrHOv1+dgMNBsNtPp6WlHH3D+qE/5v+eLvLWs+Huubz8v9bWfSX3n+9q2LfJt\n",
+       "vtV4Zdvk/1dXV8sapL7ydysrK1pfX+/MI+ns7KzMK9e9pKKL5vO5ZrNZGZ919Gw203w+78ilx8Hx\n",
+       "5/z7/tXV1aodSVk+OzvT2tqatra2tL6+rvX1dY1G528yGo/HGo1GHVvHZ7m/bdtqZWVFk8mkyM3R\n",
+       "0VFn/LQltD2eY/LIY1wsFh09NhqNihxT1wwGA62trZX7PW+cX//YP+AYTk5OOuvTbQ4GA62vr+tv\n",
+       "/+2/Xd1d/FAP5OTkp3GpkYW7ptxSWbr9FOhUvlRwXNxcvOwrhY6LM9uuKWE6HxQMGxLyo+YUuR90\n",
+       "xmqGM9twX7y4OEb/b8UkXRhezwevYz+TJzlGj82fe6HZQNvBPT09VdM0Rei9+Gu8oMHwuOikeBFQ\n",
+       "yfNzf0cn0f3zdTZSVLocHwOAmhOZPHe/05Fi36yI3A5/WxH6s1zo6USQas6Vx2eniW36ev+4Xx6D\n",
+       "nU+PwfflWkonM+Uz1yWNDOfE8lbTC/zMz2a77puNPOc428hgin9TFvkcyg+NOR0o3pPt+38avGXO\n",
+       "oQ0Ency1tbXyXDvGkooTs76+Xox/ytNoNCpGkkZrNpsVZ97P9H1ux/JKPnrsdrBozNx2zQiTGBT6\n",
+       "mXTO2VeOJYljTVmkITWPvG7szDMoonw/SD/bAbY+SpmhLPh5/JyBrXShE8kH/+9167mogQm5HpYF\n",
+       "IPx7fX298MUOYwZZvp5OiHnlZ9jx87gsjx57ggnU2an/amtQurAD7hPXBfuba926NGWQPPZaok7k\n",
+       "2GvU10j11FNPPfXUU089vUd6KIhUQrn+jPAeI5OE52sRrz8n2fs0qpJ9yL+XpfLyvloUZGLkW0sL\n",
+       "Ealh5OroIlNUCb8yYuczE4FJpGRZipFtJIrk69KrTwQvowf2MXnrMRLpWBZZsm1GXgnVJkpkfrmv\n",
+       "hMKTl0wFcYyJspHMH/LF0WimpXiN++7ok+1ndJ9RG9uuESNnPt9tE+1gSnR1dbWkGCR1/jZ/2KZR\n",
+       "KCJTfC7TTSbKVkah7kvKlL/jmBMFznRAppm9/i1zNWKKJJ+Ra8Tk9ZmIOsdBhNd98N+1SNkoD++b\n",
+       "zWZlXayurmo2m5Vo2eiU59XolPm2vr5evtvY2CjtNE2jtbU1DYfDS+j+6elp0Uuz2ayDco5Go/L/\n",
+       "cDjU1tZWQZ88Bqd6c70wzWIko5YZyPVCvZI6jql3y1eiGWyH3zFd5LY4T5mKdB8TJed6MoLj/i7T\n",
+       "r9PptIN0MIVHOaFey/Xk750GJI/5P/mW65F21qijZcufU8aJVhnlNLHf8/m8yJd0IcMea9oWzgOf\n",
+       "R31qtNa6iXLpezKN7jWWKHUikkTkh8NhR7+kj/CgbNlDcaRcf/Fuxt1Uc6DSmDJ1lQ4RnYma8ksj\n",
+       "RZg+IfmaEmV7tTSGKVMstVRDpm9qfaNgGKZcljpIZZFUUyx8tttIaPhBqRbynIqByiH7QMeC/6cS\n",
+       "rEHUNn40DHRoamkBGi8aWjs7mWo1eRxe3CmH7EfNYGTagvfW/qahogzm9w+613VblhvWszgNQyVF\n",
+       "3pBvNvRUnBybx03nz2Pl5zXFV1uHOXfpcHK8NbmiwiRRRjIwyrXJOc60L/ubTikdVxuTWuqVfXU9\n",
+       "Uuq7tbU1zedznZ6eajweSzp3bE5PTzUcDotTZePlmir3Y21tTRsbG5LUSfNxLJwnp3S4Dj3no9Go\n",
+       "OFDr6+udtK9rVuxQUV6X6Xp/TqeZDom/s5OVASmvqzlSKQM1R9/EObKusZzSUTU/7EzQqWAKy8+z\n",
+       "TuA95pn5armhfiDvycO0cymzXNt0UsgPyy9tyXw+13g8Lk6465DMG9dImS+cQ/d/Pp9rOp2WNl03\n",
+       "5vor8t68qQEP7Bf7kGRniXWzGYBxjdqZo8NF8lpjepZB3zJ6KI6UF4SFS7pcgFYz1Mu+s2BZ4DhR\n",
+       "ifRQGOmAJWqVTlQ6QdLFxFppsdbFPxTgzDcvMwrJq6RliJT7zfFln0kWuHQaaTxS4dXQoFq7OR5G\n",
+       "eUYkzLc0IHSe2J4VP4lzV3MibYASAXJBPcfN+7wg0/GmE8L+nJ6elgXNaI/oVdb+pANtviRRcTEA\n",
+       "YXTlfidfbNzIRyKAvocKfjablTXFfvK35SZlkQhBLSp3PyhXlI1a/RINajqWjLhrwQCdu3T0jejw\n",
+       "Gdkn8sY1M0ROEo2lImbUTIch5c28MiKUhtE65fDwsGOwiFKxEN3y4s8530YW3Bfy1M82atS2baf4\n",
+       "mePZ2Njo1MK4bW+YmEwmHcTEsmLjzfusbygfUleW+LnbY11dGjmui1zH+Xmi0v6faycdcmYUqAey\n",
+       "XsnriN/T4bMT7Pmn82TKNUZnJvV+Xlfru5+dn5mnGxsbGo/HpehcunAkE6RwW9YVRjo9jpOTk0vB\n",
+       "ittMJ4/rn85eAiJeX9ZDGSBbxiwDXjM5hpqudV8pLw9yoEwPxZGqIUy16CWNeP5tIrPTuFPYsu0H\n",
+       "OWwPui6NgP/f2trSYrEou1aYJnFfahD0g1I2y/rIsdfuz+hpWXTO67NQl22z6JTOIgU8F1cqr+SX\n",
+       "+WOlYCcrHRRCvORbRlN8Jp1rOgUe2zJidOO+1CJct51zyfHVEJRlz2SElPPNtOayaKtGq6urGo/H\n",
+       "BWHgzhYTETj3n3NsR9TP4hzTQNBYZWFmyhwRwORVBjQcZxpSts/ou9Yuv2O0nw5YRtyZcq31xd9R\n",
+       "4ZM36bRJ3eJ6ts35d8GxkbDFYlGifbc3nU5L0bh/M4jz3Cca6X6lvNJBbZqLtI/lwCksr8ssql5Z\n",
+       "WSlIhh0pOoCZikkkcVn6jP30d6mj89osneC9lBcaaDpl5Jvn07KUc8ni+uRpkp9np9t8zLS626L9\n",
+       "Yhsu3M6AmTwl+sI2OPe+zw6OHWXu2vPGBfMisyAMZtimHTMHZ3ROzUvPZdM0nfRibWMTx+Q5465E\n",
+       "o6UMHIiaWi5qjiUdd+rEZYEh6aHXSOXkZzTo73xfMraWusnrKUzLnDQKbEaFqYRTCXJS1tbWyk4Q\n",
+       "RnFMi+T2WaJiOU6OaZmx8PcP4lt64eY9lUrNQawZjexD7m6wMUklQqXExcBnZ/uG2wkNW+F4LrjA\n",
+       "OXYa0ZzD4XCoweC8ZoEQt6FoO1E1VKo29uTRMgWakL+vpbKko2H5riGYbpPjZd9Go1ExfHQIrTCS\n",
+       "7/4ud9xQdigbtUiQqY1Ee2ikUmbYb/fTz6OyzfVrfqX8p9NOxeiaFtaskei85Nbx/J9rP50nGyFJ\n",
+       "HSRpWX1JGj1f73XIMR4dHRVEwwbKCK+PNXD7dL69ZigDdBz8HP6YWAs0HA4vbcl3W0YMmE6cTCaa\n",
+       "TCalTfaPfeLcEkFNBMW8zpS+7880UNqN1HWeW1IGbzT81CmWd/Yz26/ZN/PCa9N1aJI69WfZT/eF\n",
+       "Nsv99fe0d+lEel2srq52HI3hcFhknulIP399fb2DJtLhp7NP/lt/s+bJ8s5Uam3Ocodd6o4sUSAP\n",
+       "0kHjZ9mGx0J542/Td50jZcNVixRraT9peY2Pr2UxJwdsRcRoS7q8JdXPdxtZdFZLV9UMLRUwhdTn\n",
+       "GTnq4aRyLKaag0dllf2mYUo+8HmpjDhu9sNKxHynQlldXdXJyUkR5OSplXsa2nRslzkhdBjIJ6N9\n",
+       "jlqcguAC5pi4aDlOR5D+YbrM33FuyYNaFMb2H4SqWBYtI4mK0DAxuk6lxmtSeZq2trYKMmA55BiT\n",
+       "T/7OxbBO0XC7Mucx5akW6XJeE33NNmtyaaVt+aUO8PNpGBOd5f+cRxrudKLosKaDXgsoMuBiSsHE\n",
+       "SDjboQwlslBzEpb1M5HDZWlSo0Scd39HXrN/kgrCbid/ZWXl0hZ9/3bAkw6h+zibzTprkQ5IyhQd\n",
+       "/pQny0Q6Xrkm2Re2l8FlIra8j+k+857nQBFdYmDGZ5qfnO+1tTWdnJyUtLHJ/5vfDMLolJlqtsPy\n",
+       "yPVFJ8q8J/9cX+dn2jln4FFzUt2GA14Wn/se2xTKs9vMAIJjMVrHdDBlv7beag6S+0rbn8FpbcPA\n",
+       "sr6R+uMPeuqpp5566qmnnt4jPbQDOTPKq6FG9KLtmdvDpIf4btub7ZHXoLlEpdgXEqOUjJoyIkp0\n",
+       "ROqecJ31R0zfZX/ordfgz2XpUPcvd5cxQieMnMgSU5isr3G06wiMCBn5xkidffV15GFNFjJiIIJF\n",
+       "apruDrq8z5EzecCxZHGk++7xJ4pX47XbSpSHfOD9teLFZeki3+OxMLpMlCZl0SdjJ1rj6NKpFp7s\n",
+       "7qjRhcS5LjJNlM9mio98ItReGyPRkUzp5L2mRLRr7Zp/mW5wn31PDTFIMpJIPZTPtpzxwEq2lyla\n",
+       "98upx5QvRvcei8fgnVTr6+saDoed0+qlCxQoU4mudTLKQTTfJzsbcUl0inUjHH+tEJwImVN90+m0\n",
+       "s0s06+USTSIim8iK5TtloIbmE6mnPPD51mdMCfm+1Lvc1epTvLmzL/Uex8W++H/zk8+ZTqcFESKS\n",
+       "46yN9UGuC46fetyUpSmsf2V7zLb42UQbieS5Xq+GEBFNTH3FbEnOI/mWtaFEnCgXtCGLxaKzucKf\n",
+       "my+5zolAZSnEu9W6PrRi84RqOQF0qkz8nLAmHZvcsr4sx1z7nukOCkme8JoTkbuiMi1QG3M6WZzA\n",
+       "WrqBcHS2nUJbG5/vpTDkIqnxhqmFWlrExtbPsQKuOYT+u5aarPWBCns2m5XCz6ZpSsFtykC26e/G\n",
+       "43FV4dChS55L3WJ46SKnbwcjnWHKbebseS3Tekk5127DhjudePLSZ/ywz3aoTk9PL51kvb6+rslk\n",
+       "0nE+BoNBqYtKntJJ4v/uS65p04MCnVwL6dSQz7mGmfbw9emcS11D4X5n6j7byDQ6yddwvec81NK+\n",
+       "NVmgk5tOlmstaRhNbduW18bYmWIK2k6mj15gOtxOgHWC15P153Q6LXJuJ+Hk5KTU7XitppNlXZi7\n",
+       "YRk4DofDjp7mRgrztBbs8RmcS/Yhi4M5jzTgdog572y/dk8G0Jyr2k7dTDXZWWaKzs6mZYXrx8Gf\n",
+       "5yIBhlpqjzqXa/Ps7OJsNPPXNi3H5P5m2pf6x33hphynNy3/eTZVHjPDfvs3bUDWidYCL96f7ZFP\n",
+       "tM8eS8695SH9ELe5rLSo3P/Ab79DlB6xpEtOUCoNDrKm5KloKVC5GKmUiKC4DT4/nTbpcrFerd7h\n",
+       "QQXINBKpMOgQJoJUUxxE8JaNnUqa9S6JHGRk6MJSP5vndNjL39zcLAiGpPJOO+/oSWfB/a1F6nkd\n",
+       "58F9pAKSLuoWjLBwYbB+ywo6597jT4TTfXc9h/vrV2cQeWQ/+ZnHSmXj/qURrjkS5A3XCxWDDaT5\n",
+       "sLW11eGjjdrGxoZ2d3d1dHRU+LZYLLS9va3RaKQ7d+6UNnwQI5V7IhxE/2p1UP6byF+2ybmmovfc\n",
+       "+TvKbN7H+fVz0iG2A+57cq5oGDgGritS1riQWBz/IMoaEuoBFvOyP5ZJRuj+2zUtvM+8MAKVemrZ\n",
+       "GUZ8rxvfpejaKq8VoiLJ76ZpOs+0HNZqmIhGZ7Evn+f7OH46Tpwr94sOXOqOZSgI+0RdlYdVeh7d\n",
+       "pvVNDSXjLjW3a57O5+fvdyMSLKmzycOoejp9bIsyTEcg1xydS7bj+wgMJL/TiWWAaVnyRgR/58DO\n",
+       "6FDKqNtLnuW851jz+mW2tMajmm6l/LHuzv1LPZD00IrNM8KkN5reZio+Ks9EWUheCF4ED0K5GPnx\n",
+       "Oct2SjFVlEJaixj8PxUcBclCT0hRulC0NaPt8SfsmX3xdTXEh5T8JgpkgXMagUW1VAwnJyelSJlG\n",
+       "wuN3P2qITE1RZBrUPPH1VmY1x6XWf99H5ctFxIgwn+d5SDl1mzxnJ5E5olC1uXI/mD7ytcsUsw/I\n",
+       "cyR48+bNslPqnXfeKdecnp5qd3e33Pf222+X+bh69aomk4nu3r1b+O31lYqf823+1tApU64nUhrQ\n",
+       "WuCSCs/r1G3TWJCX/Jv/p7OUn7vdmoLmdwyg3FemhdIxJw8sP4z+a333d9ztRCTNhs2yk/rB629l\n",
+       "ZaUEOOYjdcpkMuk4S0Q/WLCeaF/bdl8+TSTH+pFrKA0j1zADOF9L3lMfk9/We6nbPC88r4j3mV/L\n",
+       "HGXzO3Wk9Y1l0f30phXOB8fqteTfNYc3X0xs/UEdQseKzgvXpK/jeqJc0V6kbuHc+R7zkAief3t+\n",
+       "fQ4g1yjn1A4mN5J5LrJ/tP2cm2WOTM0hTB1APcY0N51Gy6EdYT7T6+BBAdJDfWlxzdNLB8O/GdVy\n",
+       "UVKhZhSZ3icdsHSq0qPP74gyeRGn05OMToXsyWB7fp5pPp93YHorJEPAtYh7mYfetm1nl072iahU\n",
+       "wpzul3ex+XsrDSNWdChPTk7K2SNEp6QLYeQ81RQKHeokf8++LjPWnEsrvExt1QwrDYd5z+sodySO\n",
+       "j4suFbhlmONINCFRHo7F50K5baKHbduWk69v3LihtbU17e7u6u2339bR0ZGuX78u6fxcn9lspsPD\n",
+       "w9Ju1g3RGfQY6CTQkSYPa5Egx5Jkx9HP5trLtc01yr7WiP0xcS5qn9eIc+F55VpchnLVomvKKB1s\n",
+       "95MpbD+PazvrS7z+bBxtJPy/9VTuvGTazrszJXWcLc8j09uWbbfLQJMy7TXDl78S/aND7HvdL+kC\n",
+       "xSD/0rmyM1ALWjPQoQxTj6ceomNSs0F2MqwP3ad83Q4dW8tv7nZkP2vItOeBvMv7arYz/2eQTr6Z\n",
+       "31yvzEBYr9euTQczn8s1zN2H1il5vqLHknqYgAPlxfaT81rjnX8vC/D4TDtQPCDVtEyfdZ639Jvv\n",
+       "IDG1k8rQSp1MIrOWwX1s10Tna1k/an2g8NGIuE0LU0ZQjAJM2feMVPK5NbTKW1LzoM+MElOBJyKW\n",
+       "4/CPo41EAMwHKw/pomaMyBTn04Wk9u6Pj48ldbekppPBhcGo3v1gfj0dVyploj5OdzCKSx7UiBGO\n",
+       "lQcXfipn/5+FuaZMP/jvjJrSQaHctW3bORiPxteo0GKx0ObmZvnuySef1M2bN3VwcKCjoyNNJpPy\n",
+       "3Qc+8AG9/vrrZW6YSl5fXy8pQLfrvrKPHk8qHMt1Takn4snvOFeJEPmzNJZ0rLh2/L/vf7d70zmj\n",
+       "M7wMOeWp3RwDU3OMvNk+ibojnVken8IaTqn7qhdTbrDwuiJvjXycnp6W2iiiIGybcmFq24vXgWQG\n",
+       "4Ozs7FI9mvvuYGEwOD+7rZYez/FQzmvBLddIBpl0ZLwxxp9nDRd1dKIXWRtjJ2NlZaU4fDbAGQCw\n",
+       "baLNXstOnRo1pI7w3JlPTrm7n3TkExRg8EEkTFKnRMPz4e8SRWf//Qw7+wwWrRcsczUEkGsqbSDt\n",
+       "EJ/rdhPl8vW09ymnvt6orufJPE2AgKlyBjKmd6uR6o8/6KmnnnrqqaeeenqP9FAQKUag9AIdtWYx\n",
+       "Mj1XwvzSBYxKDzyjHXvoNXTK9xiG5n2MhOid5q6mHIPbleq1SPa2s76Lz6WHzfTbcDgskWTuwMn0\n",
+       "FseyjByhJTxuqNPeu/njQnOfAM7IzGmP09NTra+vazqdFt64borRPOeZEQERKc/bsnoHp0L9HWuc\n",
+       "vHtkGQLFCGcZ8iFdrhEyn/muMcPXhLQpb45wKTNZNJ8pahMRCkZGa2tr2tzcLJGuEUvpPI25u7ur\n",
+       "nZ2dMi9GoK5evarr16/r9ddfL+/CMgrld7OZL5kCYyEmUVWmUGuolGU607fmEefORP4k3J7P+XbQ\n",
+       "xkT5/Dd1jqNfR8+JDpovNaTLfU20mXNtqqWOjDrVULbFYtE5+dqoIHcVU9aMmJ+cnHRQJKdWvD6I\n",
+       "uvl+oyOca88do/WcX6fgBoNBKaD2PBoZMPKQNVIp8x5jTf/6+TU97756TnP9U3e4nUQ6vA5ZYM0U\n",
+       "K1FZ6WKzi9vwvLh9rzGj26yt8jWuIaJObNu2HNZJniWyYz5ST/n7zJ4wxWbExqldv7DYesx/u6/U\n",
+       "B4nUGnk6OzvTbDbrvMrI/aghzrkBoWYDPQ9ZT0hEjvcxE8W5kbqZllpfPO+ZPXJby+ihvSImoU8q\n",
+       "p3R6qLBS4aZjlSkTTmItx8kUhZ9nITPTs0DQ189mMw2Hw046qJbek7q7c6y8MiXoSSTM6X7ZmeF3\n",
+       "8/n5m7a5RXYZ7G7KflGJ0JhQwVhxmKy47US5P3agzs7OdHx83Jk3GlH/JJRL2JUK27tAEr63M2Ml\n",
+       "7q3e7qPbzfScx5MpGPOBcDl3z7iPi8WinL2USjyNZi5YK3grMc8VDRqdkpRFzqGV9COPPKKrV69q\n",
+       "Y2Oj1D299tprOjs70/PPP6+bN2/qox/9qL72ta9Jkt566y09+uijms/nevPNN3Xjxg298cYbki7O\n",
+       "rjk8PCxjSXifxj8dF65fKl1+nulAy3SmcNJBSsPhdUl4nqlUznemK2igm6ZbJ5PzWktJe3x0CMwf\n",
+       "zzEdoizArfWtZjBMLIJ1X3JuPHYWmVueuRuMDhSdf6/PxWJR3q2Wzo0pa+TcB6+tmlNsObBDTt5Q\n",
+       "b1JPMmXjdQ3XAAAgAElEQVTmsbEPNYeWupz1aVI3jc2Uln8zcGOtls+Isp5kP1nL5jmiDuBOQZ7Z\n",
+       "Zp75eazH5JxmKpLymPV+Hn+m9vJ5Juph6zvrIzp9rN/L1B5tWwaUlEuvGcobbY774+eZbPMpF9SZ\n",
+       "DLS5GaUmgwYjaIfcN65df0Y+1fyH0vbSb77DxHy+yYOhopa6RbX+30QFZsVSi158H+8lCkIB83OX\n",
+       "1XI5ak1kidFMkoXmQYhHjtvfsSaFyJmjPBdFE5FIHqTCoaHLKNERlGugMjqwl88IVFJHQM1bCn8q\n",
+       "nozS6VhwjshbzqHbc/TD9mgUJF3ajcGaq+QLo1YrFKn7mh+jgZxLjikNCuWMBsD9TiPNdsk7Opmj\n",
+       "0Uj37t3TRz7yET3yyCNaX1/XU089JUm6c+eOptOprl69queee07r6+v65Cc/KUn6W3/rb+nWrVtq\n",
+       "mka3bt3SzZs39fTTT0uSXn75ZY3H4yIDHveyeck1STmhMeN6ZzDAufF1NTSu5iDlOiNKkcYy17rv\n",
+       "z8g+dQCfx3lLRMbkuaoVufp7yonXYU0vZD8oz+ms5/qiI85de9xR634QDUjDukyXmXIe+W5H8571\n",
+       "g2ncs+CZ75hjQbudw1owkWvMn+drW0y0MenUSBfvS01ZYzt0zoiomoeJHvmeDOASkXMfvM4dsLNG\n",
+       "yqgibSI3FNjpa5qm8xotPpPjJRJJuU0UyfNLdDGpFjjyu7zH9XzmH201bSllhvzKoI38tM1wm0bb\n",
+       "bdusc8gXBkk1gGQZPTRHih2WLhu+7DQhVSo/MjMdErfFk3zTqcq2pK5xqDlHXCw80JCQYc2RqSn1\n",
+       "GlHYapGfF8XKykpJ9+T7mqww6bXnWDk2evBGunxaMu+z8FEIs2DWO/c4TjuS7sva2lopuuQ1jpJN\n",
+       "LIhM40g+5a5GojhWOHTUKHtZtGhDkzsPeSihr8u5TxkiWZ64lZjjoPznXFMeeH7NI488ojfeeENP\n",
+       "PvmkmqbRk08+KUl6/vnny0nDu7u7Ojs706OPPipJ+rmf+zm98sor+o3f+A198Ytf1O3bt/Xxj39c\n",
+       "0rkDNplMOjsy6cRkVFyT41RuVpJUrjRSuY0+EZlc69Ll1IKJfaWM0imwnHp9cK54v40FnUQGe+m8\n",
+       "0FFLBIrPoXLnRo+a/jKvanog+WviWvOPZfjo6Kjzkln/li6/A9B6js+jHqYsrqysdDY7mAf+24Zw\n",
+       "MplcQpXT4SS/2QfqgKZpHojy2SinbufYcnzmW81hzfVMx8X6xbzMAJJjoZw4ADYRYUxkxw6H26It\n",
+       "M3JXmyuuAT+j5uCbqAuJkltWuP54rBCRP+qMmu5gH2sInHmaPKwhssvsKG0ZgwofYJs6ymhwzX67\n",
+       "L7UyHdNDc6QywmJ+WOoqzZo3SONrhUPFY2rbtqSGEnXy5GZEWPuM7fG5XIyMLjMdSGXi6/L0VypE\n",
+       "pqjSM+dClFS2xHOXj3fPuR+LxaJzuGSOh5GdpOJI2JmiwNqBS0fKnzl62d3d7QgqPf5UwuY1Uw/u\n",
+       "l6Mxzov5xV0knlf/pvLKM214GnGiSjQ+dKT8ORVM8sXPdrsZXZMYlVNZpKKzonQQwV0/TzzxhNbW\n",
+       "1nT79m29733vK/UOH/jAB0r91PHxsTY2NjrG4YMf/KAef/xxrays6Dd/8zfLePb29rS6uqqdnR3t\n",
+       "7+9rNpuVs6nsrNaMV40Pte8T5RgOh5d2PDEV4rXkwCWROj6vthY5B9Qn6RBzXXBe+Bz3z7+5Xmv9\n",
+       "MjF4s8HLNu3ceczsB4POdAbT+PB5PteNuojrKxH6JKLfHpeNptEJ7iYdj8edde51s7a2puPj4xLQ\n",
+       "+hTs5Fc6hBxL8p/6vuYseb0ZqefaZDvkAflGZM5EpI0vXmZg6d90Ij22WpqNMktngzVHadeIcllf\n",
+       "0AlgDW/WBNlhIJ8YxJhPltNEssyvwWBQMgHZL64njt/31Wwoeez77Cj7ANuac+rnp27Ndk22UUYc\n",
+       "aWct27ad+RqjWnul30u/+Q4SDSGVBhVeKuGMKGvtSZch17wuryVUz+tqk2XKiUul6EnnIqXR9iRT\n",
+       "AVjgEy2hAq05fL7GyJRzy/bYLZBte/5ONX+fC90RBWk6nWp1dbVTB5ZGh4rPffC1VrLkjdtN9MZ/\n",
+       "GzqmQvEb0lmv5HlwZJZpIc6b2+PBcUZc3A+Oz/PDE4fNU0fCXnSUX8uR26SSSgeJfzOiJQyf1/l5\n",
+       "5vnOzo5u3bqlZ599VisrK5pOp3riiScKH40M3r17V2+99ZZ2d3dLm1tbWxqNRvr5n/95Pffcc/rq\n",
+       "V78q6Vxp3L59W0dHR0WReR5v3brVUaxpxAi3M4IjUmpZdztZDMr167Vkw+S5JF+WOQK+z+uG/aEu\n",
+       "qUWZnkPqJ/ZP6m5Q4PhJD0LXM7peJhs0ooni+blek+aj0Wkjy4lsUEdxzTCgyQjcBtCoizc6+DR9\n",
+       "R/mM7JnaW11dLRsajJa7Xc/PdDq9NHbq8gySqS9zLqgbciwcR81JJg85/zxjiGvb93IsWXdF5zJR\n",
+       "Ko7R9zEtad1Ane0atpoz74Brsbh41xyDb+otrkU6Oa53pa1hip/zS/3IwJ1jJAJMnZi1x/6Ozu+y\n",
+       "miWvQY6PNpKOpHThRPlz6h7y07rdtLKycum4k6T++IOeeuqpp5566qmn90gPDZHKVBu99kQVMs1H\n",
+       "BMFETzej5Myzsx0jNlmMyjQSUYAawkQiGuXn+HpHGPbQCSUTlchtljWkKvvg3+6n05ncESNdbLl1\n",
+       "VObt0X4O+ekaMEYtUjeK4edMXWb9xXA41MbGhlZXVzWZTDroEusKHNUwtcd0G8fCCM1btjOn7zER\n",
+       "6WHUnKkm/+9+sC98aSujKs6R5zxTCYz6/X/KSKYS3B/3aTablWMlpPPTy0ejkV5++WX9wA/8gK5f\n",
+       "v64rV65IOo+w3nzzzVJUvr+/X3izubmp2Wymxx9/XB/5yEf0Iz/yI/rhH/5hSecHeX7+85/Xiy++\n",
+       "qJ2dHY3HY+3t7UmSjo+PC5roiJnjYoTNNcA14ojYKJf54/Rezhl1BaP3lFVTIsWMXIkmLSuWJd8f\n",
+       "hFqx3exDjsPXU96IvhMNTj3k56RO5JrPvgwGg1KgnCUG7A8jcLfp9qTLB4kSOTci5ZP0R6NR2cGb\n",
+       "KRwjwX4Gj+JwOUKmWvlc95fjp84wypBrMtM2kjpF2pkupq1omqaDkFlenBZiX1Ivs99sN1NZRP4S\n",
+       "4WzbtqS0rAN5Unwt80JdQyTG9WzZN6NVlOFEizIDwCyH26wdrZEy5XE6vevrs9+JjmY2wGSEKNeF\n",
+       "EUDLh0te/ByvfaNRfAUOUU+vYfL3QfTQTjbP1IB0udKfE2xlk4bP1zP1lQVzmUYxURh9rT9nOo1K\n",
+       "2G3U+s3vqCjdZjo2CVtL3SLXGl+o6MwLwqYJYbN+RLq8s80GkS8o5Xis9OgAMD3jv/kdU1BeNIb+\n",
+       "5/O5hsNhqZnwmN0f95njcF1Fjd80FLyGdQPpMPk7Kxumarl4CB1L3ddS1FJ1djBIGSi4nZRTGoja\n",
+       "GK1YCZtL0s2bNzUej7VYLPThD3+4GKjf/d3f1dnZmd555x1NJhPt7OxoMplIkvb397W2tqZ33nlH\n",
+       "r7/+un7oh35I165dkyR95jOf0ebmpn7lV36lGGKnb3Z2doojVXO+M/VMnjLd4rQj73EaplaLkE4U\n",
+       "5yd5ZaoVv7JfXitMybjdZW1ynDXj5fVdKy2gHiCPqEcyrev7MkVZ60c+Lw1zli5Yx9RqTzwPdGra\n",
+       "9rx2bTweazQald88X25zc7OzY4w8cnueZ6b9uIWdu9MexHePwXqVRpG1oi43MC/sHFnfDQaD4ugl\n",
+       "r/k89zk3TfA3X+GSThHtE4N6yijtDNPiKSvWo9Szqd/YN9a6JSBBuch6Kq6ZlPn8339z3bnP1KWe\n",
+       "f/JGurzjnZtO/LLjZQ5NtufnUMdLKvK6srJSdqSzTpf1aAQ0ci3V6KEhUu4wJ4DKiDloT2g6QVJ9\n",
+       "NwCvs/GjsyXpkkKqCUoKHYmL40FjZD+WOYJEqzx2GlYbgqxjqil/fsa6CTujuZuitquJyv7s7KwT\n",
+       "mbkIcTqdajweV2savFgyGnCU52ttNGxouehoUHgoHZU7ayD8ORVQOpzpyORWX5MVe62Y10q65kh5\n",
+       "jMztZ4TlvtSCAUZvGV1KF2c8+d7t7W1tbGxoOBzqySef1Gg00sHBgSTplVdekSTt7u6WImC/a+/g\n",
+       "4EBN02hnZ0f37t3T3/t7f0/PPfecJOn973+/fvRHf1Tj8Vh/7s/9Ob300kultmp7e1u3bt3ScDgs\n",
+       "O/tqyttjqJ35MxgMyo5Of055Ic8sJ9QDiVYl5b3uT35HNK2mJNMwsX3qo6w19PzXiqRpYHLd1IrW\n",
+       "+fzUUb6PNX35uREFGiE6OenweCcWDSgde4/JheV2pqRzI+X17ppKHgHQtm05HJY1l8fHx2W9Oyiq\n",
+       "6dRcw4lisPCdSJTl0s5S256/j3I2m+n4+LjzPKIsdhwZGCXKaOImjET/GPzWAnDqmVwvPKIiA29m\n",
+       "FtwW73Vfc7NS8nQZyJBBtR3cxWJxaSe3+8YsRKKaNdCA9/qZ387mDY4xgRKuFdsM2yBnIszb4XBY\n",
+       "zgo7O7s4FNXEXZL2I5bRQ3OkclspvXIbHTpZUtfA0wgnFGeqRZspyLnjgp9LFxNTK3JNRyq9bio+\n",
+       "omqLxaIUYEoXsKLh7fl8XlUK3u1CA0DnLJ0398N9zyJAeuDkMyMS981C5VTfaDQq51ZZGG2cGD3T\n",
+       "AWPERYcgF3TOIXlAZWMDYgeKBom7P7yguEuSz6PT5rlftvCtMNJ4eV7oRC07Cdd84nEMLHBm35yK\n",
+       "GI/H2tzcVNu22t/flyTdu3dP73vf+7SxsaEPfehD2t/f1zvvvCPpfCcnnTOujUcffVS3b98uBaUn\n",
+       "Jyd66aWXyjgef/xxvfDCC/qTf/JP6hd/8Rd17969wksrJZ/JQscgnUTuRrUhcfqHfPKasKJLhTWb\n",
+       "zS6NoRYQpTHy2HNrOJ0IX2M0M9dAznPqnkQQTHQImRJhBC5dTj+lsU7jVou+ibrxc/Yj0RMaQV7r\n",
+       "9HHy13z02jfCvLm5KUklbe8f8oaHJTqtzBS85cjpQgcDx8fHnefWguma/na/fR/XFI29EV7LN7MB\n",
+       "RCvIB88PUS/pPMgxcs5+2tbZEU2AIOeH9pDBLG2X15LfaWr9UUMz6az5XpYscC3awXBK0denvNWC\n",
+       "BzrBtIvD4bDoMKKPlgdSolWZHTDRuXefM4PhPo3H47Lr2HK7vr7eKTx3W3SYjL6aLy5HWUYPxZEi\n",
+       "/EbHhJNH4a9F8bVJkOqKlWhA1qjUUCde475QELN/CYXaSBoVYlv0oi2IfAlxRjbT6bQ4VRlh+28v\n",
+       "/vyc/SS8yv6YlzTs/p588ziMipycnJSddL7PzpIXI5W062LovDJKdt+86PmdFSIjBPKNn1H4rbgy\n",
+       "H+5FxAiICB2VGCkVcsoAIfTc5s4goIY6sT98tuVyOp1qc3NTu7u7xei//fbbOjk50QsvvKDNzU29\n",
+       "8sorZWfUeDzuGBJH4B7H5uamJpOJhsOh9vf3y3dvvPGGVldXdf36dX3yk5/UH//jf1y/9mu/VuTE\n",
+       "Dv3m5mbn0M50jIhgmAdWZKzZYK1aIs9U+jT8nAvzOI2sece5JVHXZAqHCj0dfAYuNWJ/OId0OGtR\n",
+       "NNtMncF2Ui44Hn9nw+LPuK551MQylCJ1BvlkJ2I8Hmt3d7fops3NzeLoJxopnc/XaDQqiBSP8ODu\n",
+       "NPJhdXW1yCWdbfLUDlG+fsnG0zz02hwOh8Xo2/FhXyzTlsd0TrwGmBJ1/4zapLxZ/6S9oLPu+WLA\n",
+       "Th2e99GhN69qQEAGJ+Qh+57tM4VpIqpG+bOcZLbD91C+yQeibqlvbT8Tacv2s4+eN6d0LVeSyi7m\n",
+       "jY2NS3o29S9tg9TdRVijh+JI1RAiTj4jRv9vAUiUigVxTC+4TS6IjMw4yWyTBZMW5HTA7Mmnckvj\n",
+       "SzTIDhKRJPLBjh6L9Tx2e9FS1wh4TFRG0uX6L1/PBZWpARqrmmPqZ9uBmk6n2t/f76CLnqM0Nv7O\n",
+       "/Ga7jjw5HhoT95Nnq5AsLxsbG6Weh3UX/skCecoc+2o+1QxwLeVmYvRpPibaZR4SefB4E6Hk+H3N\n",
+       "yclJOb3cZzCNRqNy0CL57eMbtre3y3k60gXCY8XStq1u374tSbp9+7Z2dnbUNI0eeeQRffazny08\n",
+       "+ut//a/r1q1beuONNwpvc55yzjwezz/XHakWLNkRoIEhb/09EWkikbw+5Zjrj/PKYGYZlF/7PNdL\n",
+       "puG9tmsGg0Rlzb/tNCQST2cnDbT/rvGUupKIlJGcROL9HCLiDo6kc0RqY2NDbXvxmhG273SeX+NE\n",
+       "dJiGu2masmFie3tb9+7d0+HhYXl/G9cI9Q7bdDqH43E//Zm/51pz4GGdyPokyhWRZPKPzn3Kac4R\n",
+       "76s5tERu/Kxsk31IO0S9k1kZykTqMK4l1yaR6PQksGHUqYaqWp+ynwyc/f+yWljy1GvLjhJ1Ltfv\n",
+       "YHBej+ngmnV9NX8gecYx0xmvUX/8QU899dRTTz311NN7pIee2kuyx8xIK3cCSN3TgumVp3fPIteE\n",
+       "042QEAnxd0ahEuIk9J1pDf9v+NdtsU1He/S+7aUbceEOOkf2fB+Rn+M6o6zHcB8Y+WaemZA9UTJ+\n",
+       "xzF5PIvFQkdHRyV1xKiUp50TRWRbTrExFTEajQoqZf4y1cK31fOt8szpr66uanNzszPHTCXx8DVC\n",
+       "y0lG9/w3EUt/lvNO/vhzX8f0tH9nOol89r3ug1MRx8fHunLlis7OznTr1i1J56+B+b7v+z5NJhPd\n",
+       "u3dPg8Ggc0DidDot8u+onfPQtq2Oj481HA4LyvXWW2/pm9/8Znlx8fb2tn7mZ35GkvSlL31Jw+Gw\n",
+       "vAqEKSojYIxgKfs8AJXwvmUk6yNMRKWy4DNlheTvslDXfeVxC7X1kygJ5/DbodRDWbzL/jiqzoM1\n",
+       "eX1NjrJeqsYDj9dEtDhTkP48i65zXOZNFnhbXyZaQV09mUw6qV2+SirRupWVFW1vb6tpGh0dHV1C\n",
+       "g6yDbTPMI89tns7t+3ywZqagXcvDFBbLFvycGm+o9zIzQvQw58K8y9Qlvzffc8243bSP1HNcg+wz\n",
+       "0UyiW6xzytTaMp0mdXdMJxLLdvLFzOYP54Xj5701ftTG7DlwnRNrpCgTiWQxm0L/hHV/y+ihvmuP\n",
+       "TEoHZTAYVIu7crHVlJyZyxxzwtlmIhcajRknY1kaIuF7P8f3OB2V/VoGqXsCCQUzFeYxMK3pZ+WC\n",
+       "Ia+YussCPvahVvxs/iXseXh4WNrxd94FYSNLwTOsT8eVRngwGHS2PvNskhTgNNBbW1udRSGdpxpc\n",
+       "O8E0gPlCqJltpszw+Qn3kprm4ugDywblmw507mSh88zFa/Kuu/l8rmvXrpVjDO7du1fOeDo6Oiq8\n",
+       "l85lw68O8jO4Q8XpDab5pPPjFKbTqU5PT/XWW29pPp+XZ3z2s5/VN77xDR0eHhbDz/Xk4lfzjfzK\n",
+       "gIR8t4JmutkywFREprEywGEqxm3znpRTpyJc72fesG8pe3ROavorZYJj9L1Zp0THufYsqZuySKK8\n",
+       "kuyEZtG05S0Nhv+nI2XyOVHuv+uceCaQ5YHjM9/p7NCRog5lIOr+uADdgRDfTpA6knzzZ6PRqKNP\n",
+       "neZhKpmBE4NryoIDEf9PvcAAsTYPHCuJutfjYMCa9oN8YZCf/K6lCik36XgyXewaSPOP4/Uc2PGh\n",
+       "A+71n7qNfcqi+dS9Kdt0dnk9++T+8Dm2C9vb250dwizzsNwycJIuCtW9Rthmja+mh+ZISd0dUYwA\n",
+       "zUAKXm3Bs51Uam7T7WZ9TBaP83lUPInImHhfLdedn7EgPCefB3IaqcozlvwcjoU1IYmq0YCmQ0PK\n",
+       "XU7J67zeSptoh9tfX18vhXzHx8eduqRapFbjl8fPwkUqNO7cMwLjxULH24vFirEWlXuRUhFlzVjN\n",
+       "OHFeajvz6IRSvnPREgG0o1RTtk3TaG9vr/TNr4F57LHHdHBwoMFgUN6rx7nwWpK679jKiHKxWJSi\n",
+       "3qY534llBODu3bt6/vnnJUnPPfec/ugf/aN69dVXS7s+zsIKisqPjkPTNJ0dQrXAJ/+3c8KC1JSZ\n",
+       "5HFuIaczYWeRhdJnZ2c6OjoqO8W8O2eZY0OekdyPWtS6WCwKsuj/ExXN3XIcQw1x8+c0TPmbR4+k\n",
+       "0V5ZWSlzZ6Jc2rF3NO/59Vry2qJjaX1UQ2Rc32dZ5xk9RCJSLrzDb2VlpdQEShfvvaS9MCoyGAyK\n",
+       "k0cbIF0UZbPfzAwQ7eH8UJ97HdYQ7XdDDjkHXCtN0xRHlXzhGniQ7ubaqq1B9s9y6uvZPx+BYd7z\n",
+       "6ADKLeXO/XIfszA712wN/MisCANa8pXPTvnjs8xPZyXMI/Iq+2J+5TpKB7BGD82RIjQpXThDiSxI\n",
+       "deYTkqNTQ3TG1xMFIZMYfSSSkyhPKjg+k/e5P+5zOiRMabF9pjDoyBB2Tg/cCsLIVKIudkxTCfiZ\n",
+       "iRqR7x4HF6ev8TgN07qNO3fuaGNjo4yHi5/jcrt0lty33L1SQyXpEBh5srFMBIgRCndH+jOPp+bU\n",
+       "ZeTGOZO6BaHpaPvamvPC1Jafs1ic72piFMR55HsLPSdXr14tzs729nZnZyPnKZFTO/WLxfnuPaMy\n",
+       "bntjY6MUoR8eHuq1116TdP4i5J/+6Z/WX/trf01//+//fQ2Hw04Eyg0VngM/j1uOfX3yquZE04ki\n",
+       "QpTBUo33vmZ19fx9kUYjNjc3S6G9nVCflWWnygXOGUHXdBPny5/VrnP/aZBqyrlWwL/sWj47fzP6\n",
+       "Tj1iHhH1oIGvjcMOCueRc0X0PFM0fs+nn8/1QmeBzzeiTF7wXaE0tImAEikjcuZ1b91Hp86o+Orq\n",
+       "annjQ65394W6hkF8Bpcco+Un9QvRJY+HtjB1VCKQLv3g/FO3Uw+nDSR5rLYni8Wi6AXysxbsWYd7\n",
+       "HmqbInztsvVkOfB9DyIHZh6b2/RcOxPB3ZzcIZz2mTbY/OffecRC0kNN7ZGRhOvSm+bA+b8pvUUK\n",
+       "DR2ZTC+QGLGnocv/0ws2WZC8rdaT7edzIaW3bQXhA+1yh5UdDC5aw7CO9mg86LC4rxmheNFTgMk/\n",
+       "54XTyeR30oWCefvttztGiwgJHUGmmPzbfXA6h8o9HT5GFl40nuucE+6upJIiakR5omJNOcuFRqeB\n",
+       "0b/b4XzkMRyMaJc5YX6ODztcXT0/w8epNu9COTo60tHRUUHnfJ+Vsf/muU48ysDX+buDg4OyTbhp\n",
+       "mlKTdeXKFe3s7OgXfuEX9Morr2h/f/8S0mEZ4XpyKs0GLAOTRFY8T0aGlim3TD+0bXvJQJtvw+Gw\n",
+       "1I8Nh8PO29/NV/N0a2tLd+/eLTvGshaJ67eGkqeO4Vr3d4km2AmmrjEyRCPNdWpngvLiZ7hNH9jK\n",
+       "wMzGZzqdltf+SOdonE+u39jYuLRLaTQaaXNzs5wK7Tny8y1PDpJshL3jji8e5zqx3nS/6NjYaZlO\n",
+       "pwV99Tim02lHpknWw0agiCwxg8HvzDd+R1lMuaXj8aBAP4NtrvuU4zzA2DxKgKHm0JDm83kHzePu\n",
+       "ceox98NkmTMaRbTJ/cgxEgBg/9g+5SFBCTrFtD1+pueMQWnbth0AgbrGZ5p5/lk/ZSSViGj2iXbJ\n",
+       "/cua46SH4kjRqSBCYsOUkQAdq5rjlUyoRWi+n/VQVDjShUB5cVsAKPz8P59DuDEF3wLq79hvQ+gW\n",
+       "KAohoef0sH0tD/JkxG7BdxtUxIwcLJQUOI49Bd9zyDSbpFK4fPXqVW1sbFxygPxcj4dG2FGVx0LF\n",
+       "mIbZ/XFNEGHxmiPlRZpRo/lFRVKL3hKpq6EqrP/yew5JdKAsWx6H/0+5Yv99Bpd0ntKTpOvXr+v0\n",
+       "9FRXrlzR0dFROQHePKKD3LZt5/123/rWtwpKd3JyUr6zw/nyyy/rySef1M7OTlHub7zxhnZ2dvR9\n",
+       "3/d9+tznPqc//+f/fAcBTAeRqfJ0GhOp9bP5WfIwgwB+nvrEz3JtjzdHSBebIlzrw/XN9WXeZH+s\n",
+       "p2ho340YDCZKTMeW7dmYzufz0s9lPGBQ6sDCr3JhytAp35OTE02nU+3s7JS05p07d0p7W1tbHWTB\n",
+       "NUC7u7vlEM50wGm0vTFEOnfQZrNZqUWjUaqlZ8gDf2b0k2gdt+eTN/P5vByQaUokM3lm3vj5RolS\n",
+       "f1E3PSjtxGcQnaE+9Zp3m5xP62X3MWXN/as9x/22TjU/PBeUOc6h+0hbTBvFYDSDTEkFzSNvmO7n\n",
+       "mNxP88/ONFE3IvR0Bq0jqW/pgDrATDAhUSg76f6uht6THoRI9ccf9NRTTz311FNPPb1HeiiIFD1E\n",
+       "eoeGOe3B1iJSe+iMBjINmIVn9koZgRDy8zUmXpcpMXrDGSEmesMiyoycMirNMTIScORs9Iw8My8c\n",
+       "Yfo5rkfw27YdpdRSeO4fa0ocjTgSIILAnWFEegaD8xeA7u/va2dn51KkVsudmxw5JYyaUQqRKR6s\n",
+       "xjRAzkcN4XR7WdPBOq1EG9ieEYREURxZZUSXc8Z5Z+SUtYOOkFwPtbKyUg4sXCwWOjw81NbWVgcC\n",
+       "931N05RXEY1Go/L6mBs3bmhnZ0eHh4flOIPs087Ojt5+++2yU1A6j2Zff/11PfXUU/rZn/1Z/Y2/\n",
+       "8Tf0la98pcgM35uW0bXnJ1MgHgdTwEYyrAcYIed8eM48ZpN56fTW5uZmQd34olLzyZ/x9RLb29s6\n",
+       "Pj6+BPFzTDUZqaXDvd4T4TQxDZztEY1KXeFxUGc4micSR9R9dXVVu7u7Jd3ilOd4PNbx8XE53dtp\n",
+       "EukcoeLmDacLqcM8bs+hd5c69Xx0dKTpdFrejmB+14q7PT6XBBhx2d7elnS+yaFt25K+IpJPBMTI\n",
+       "CwuO3V6mjGg3skyAO1yznsb/ExWqyYQzCjn/RJf42+Pw2k67Rt1Lm0iZHI1GnfGPRiNNJpOCXrMe\n",
+       "1WO27kv0jfq+VlrD+2jj2/aifpXpS/+f2R6P3zymXXKbRPX4zjzXBPI1Rn4e++BnEH0mkkey7XpQ\n",
+       "KjnPKggAACAASURBVPWhpfZsrCmMUtc5oKEhcRKpaBOay5qJGiN8L40iUywJxWc/c1HQYCQMT8XP\n",
+       "GqOac8WCXQtL1mpJ6jg1fIfV0dGRDg8PO4oo+89UE5Uii1FNXGyZYjVf19fX1bbnBcqTyaSTjmIO\n",
+       "3M4ZF5D7njl8G9larjxfvZBwN52brK9ZZpS4iGj8zGv3hw6hf7uP5gnrPehgpFNPma3VdKytreng\n",
+       "4EAf+MAHdO3aNd24cUPSxXsPV1ZWOu9udH84N3aqJZXi9Dt37mixWJSXyXIcOzs75Vr38+mnn9bd\n",
+       "u3d1584dXblyRT/+4z9e3tHHPngsTgExnVdL6dlgch5yLtwu55BknpJvliPWaEkXRcxMN7Iuh6nf\n",
+       "jY2NToEz+5zk+2xQsg6FNTWcbzqXqVNYK1MzOqytoRH2uxntUNkh4vqxo82TnyeTiQ4ODnR0dKTV\n",
+       "1dUyh1tbW9re3u7UJJpHnGMXldORmk6nRR9NJpNLtTQOiLJ+zvWS7jeDG+uS3NDi75qmqZ5tZIPr\n",
+       "52RQnnNL5zSDHM6770sblAF56pxlujTrsUjWpZRVyluuET7Xc+e6M84HeVlLbXq+M4DJeaL+8tqy\n",
+       "48Z3rHoNso419T5fNZbz4vlz2lk6l2E/3zaTfeHOSJa6UJ9btpgO5jqs0UMrNk+vj0KQyA6/l7qC\n",
+       "kUzmwpAu76jI+yjEiaBIlwuBa0xN71zqFpiyfdYBpDKmgfd3VpBWuIz2M1fMgj575o4Ca7n7RPWo\n",
+       "6ImI1ZAlf84jDoiCnJycaGdn55Iz6cVI48nC8tzVlPxlTZt5TMRjWR6bjkTWYBHJ4Ti4Y8zPM+/8\n",
+       "PfnC+fNcL6ujSKc+60PoHLqW6c0339TTTz9dovLj4+Ny3IR32WWNnJVh0zQlKt3f3y+v03HBsR0J\n",
+       "7tZ79NFH1bYXL0l+6aWX9PTTTxcD9ZnPfEZf+MIXJElf+cpXyjvYPA4GClxn6cRarr1TKpWax5OG\n",
+       "iBF7KjnPh2Vxd3e340AQ3Uxk2nyaz+fa3t4u9SWJlNaM6jKqGWyPy455onXkoYO9Gio3GAwu7XAb\n",
+       "j8elVsSvb5G6Dt3JycmlQMTHRPi9eeaZD2H12p9Op6Wu0+QNL0bHvd6MlFsXEzmnYfX6zfF7nnks\n",
+       "hXVdrUaNZ4IZmSDiTP2R69AOO424eeMgg3Vgfg71fda/5nzTVvn/DDCI1qROsyNkPcJ15r76O/Oc\n",
+       "gYp1aC3AJD/zuXS4zs7OOvq7NhbykwFAOoveYJU7IS0L1gt08hiwMiiyLrRdqqF9zCZxzZ2cnJR1\n",
+       "neOrIVWkh+JI0VHg71RKNApETxLtyfY4aURcEgrNScuitGyb/2fEwbG4vVT6fv76+npnWz8XhK+x\n",
+       "58x0ngWHHraNgRe/PXMW2h4eHurw8PCSICxD17wAiRrVlJ2VO2Fz99MFpjasHJv7SjTHz0vHmI6b\n",
+       "+7Ys8mKxbkbvVHyeC8tEzWCbP+kAJVLKSIwy2rbnxd3cKJAF/OnUu082NtJ5im13d7fsrnr99ddL\n",
+       "KsbGxQaF27w9P8sCDKd0DPtbHr1LzTvX2vY8rShJ3/zmN7W2tqannnpK+/v7un79uj73uc9Jkn75\n",
+       "l3+5nEVl5yNTJaREXYgOuM/mn2WDc06+8fOcL6e4KYt2HhghU76NJI7HY+3t7XWeeXBwcGmDQlIt\n",
+       "+Mr5zbm3/sl5IvqQn3uMTA1LKs6QkSgfESKpUyJg9Mgyar5sbGx0Dr6U1HHGzs7OdOXKlY5jaWM3\n",
+       "n89LetAy7LSeC9B5BMPJyYlms1k584cpXPPMYySKb7mgw8FDfKfTabmWeiERl0zB+rk1PlvP+dkk\n",
+       "7pDLIIEOaepU/zBtlXLBYNnjM9Vkwp8zKKeuNzmDYKKO5C5rPsdrikS+pXPKlKf5TZTPx5BwZ7J0\n",
+       "UV4yn8/LC9LZLsdFB2wwOD/OxHYiAyXzn84Y+WV7QBlt2+7ZjjV6aK+ISQdF6uaFU1AZkSbMx4ms\n",
+       "tWuEhY4EhZnwKNusQbTsT418n710tsn0BCNDO1JUloyevZPIcCUFkc6LJ9zf+TMbhcPDw6IguTuy\n",
+       "Bqsm6sa/ueXfi0G6gGrtbFH4iRxZEWVNQzpG5jOdI6kL4XMea9dauedCsJLKZ0vdLd0ZaVJB+RrP\n",
+       "F99kL+nS4rdMZFpQuoxImccnJyel9ujatWva2dkpDkvW+vDZnDeiUtK5UZxOp7py5Uo5NdpGggbR\n",
+       "NVR+3iOPPKJXXnlFkvToo49qa2tLP/iDPyhJ+uQnP6lf//Vf15UrV8q5RUQ6SJl+tazZUHKebATp\n",
+       "jHhcNYNCR9IoyOHhYamTklRSzozMuaa820k6l3GnOf0MGlIiVL4/dZfXGuWfz/M1RksyeMkxkm+1\n",
+       "dJT1pA/IdUrO/DSfnN5JI0vnn7U1Dtq2trZ0enqq0WjUOX/MaUE7MpYbIlRO41OvOhiYz+cdFMj9\n",
+       "8o5jzgtlImWDc+Qxcfx01Kj3GOQlgs/gjz+c41ow5mfWgq90KhIQqAX7vta6hE40x0HggI4UUaPB\n",
+       "YNBJdVHnMOjM7x3YpszbHtVsG2XSMmXnmSk46gYequy+cwxe47lm7FyZL5TvRPjoLDGgTvR7mb0v\n",
+       "c/zAb79DRGHK/LLULTL1dxnFkdJrJ3NqxiUpUa6muXxCsimNbhLh0dp1hFtTgRlB4EK0YBqqtJBI\n",
+       "l9+czsiHaT9GyHZ6eIJzIiSpwKXuIloW5Tgf7v5RSTP1NBgMOhGj+bGyslLQFTpVVlKpYP15LmyT\n",
+       "c/eOotIQpdPlz83/B6EollfLhqO7RNdShvMMLbdrBMcnYNMZ83sIr169qg9+8IOlH8fHx8XZsXHj\n",
+       "dmH+9tj8vIODA+3t7alpGr322mu6fv26JJX3FR4eHpZzenzfZDLR008/rVu3bpWCd8/JH/pDf0j/\n",
+       "4B/8gwKDM9pN5JOyZnQza9PY35rjSiNB2UjndD6fazKZaH9/v6REfd6WI9laarAWsPlEdBdMs1+O\n",
+       "3qmMTYlc1pBOOti1ddi27aW5JD/pKNtwO+3hQEpSOaOOAQ8DI+qf4XBYeObPHnvsscLTw8PDok/8\n",
+       "2crKSiknsKwfHR110EGm6KizF4uFxuNxZ66ti/nj75zyJrJNnnvbvwuJOUbz12UP/s58qRnvNPQZ\n",
+       "2NUCh0RoMmD1/+4j59BE5I9tZPCYRLTOlGtpsVh0shj+3g4nA2/OE/tDx9C6r3aauH8c0PCQ3lyH\n",
+       "tJF2qDLYdXBOu2eZoZ3MdWE7yzSybQk3G5GPtTdYkPrjD3rqqaeeeuqpp57eIz201F7CqvSsCd2Z\n",
+       "GCHYm5S61faZ1mPqypHgsr64DT+LxXgZRRAurD3PkQCjRLaRkYuvZWozoWEjUtx95H44leI0nr+z\n",
+       "p15DJ/w8w+xE/Lg7LfvOMTGSky5OsDaqwkjJJzS7qDhrpIjesG0fYkqkg2NwxJoIEcl9ycithjqY\n",
+       "h0RNWL/A1FvC6cy953icsmKtBIua3Uby4vT0VFevXi01KbPZrPqeMm9ZJ2LjPhPql87lb2dnR2+8\n",
+       "8Yb29vY0Go107949Sec1Ui7Y9An95Nd4PNbNmzf11ltvaWdnp5yy/gM/8AP6sR/7MX3+85/XeDzu\n",
+       "FPUmypepW6ZmiEwxhZG7fKXL6VDWybi/5u90Oi0v2t7b29Pu7m4H4eb6NkpDxMdk1LUmb0ZULW+W\n",
+       "7zz0j+05Emb9D9tj1E2+pZxTJ/r7rKeULlAAIgxc5y7Mns1m2tvb69Tj+aDd4+Pjcp3bPTg4KDK6\n",
+       "v7+v9fX1zsu1XWDutczdfp73GoJOBJ96kbKSfOManc/PDzM9OjqSpLLJwvqA2QevedbPEL1x/410\n",
+       "mizT7Lfvs462vuUYuLa4a9FjYEkB7yM66bXBZ/vafB6/8/wbneH8N01T3syRqS+uE/aV9Vsso7H9\n",
+       "mc/n5a0BlEmm39Lm8busS+Q9HANLZJj29HfWI7QdHvsyf6TmWyQ9tGLzXBiZWuNvKjoyzERG5nOY\n",
+       "6qNSNLxdWxg0hMvqO7i4KVy+Lx0iO2W11JH7wUllLQPz2ePxuJO+Y/7d4yJ5gVqxG25PA0VF8CAj\n",
+       "zDGyv5LKrq2VlZVSP1WD6W1obPRy9555JV0cjZBC7fGzHfad8uScPw1rKqZ0lphL59j5N6Fo85Dp\n",
+       "nZp8u26KTgqdQ9eJcDv+wcGBHn30UV27dq3jTHj+bAzdrr8zP1dWVjppVs+Nzxh75JFHdPfu3fI8\n",
+       "pxn39vY0GAyKETo7O9OLL76oGzduaHNzUy+99JK+//u/X9J5uuynfuqn9Fu/9VulH56X7FMqb64z\n",
+       "Ow1+HgOTTM2mE0wZpiI/OzsrDqKkcuK3HdlMdzhtYLlg3SGPUCC5H2tra5cMPOWmlqJin5M3/HxZ\n",
+       "iQJl0uR0SNM0nZPNmSIxf53281EXjzzyiLa3tzUYDEoK98aNG9re3i4bZZzGM28Wi4Umk4leeeUV\n",
+       "3bt3TwcHB7p9+7akcyfLcp2pZwckDjRynFzDdGz43r408uS79b+duoODA21sbHScZfbFMuR58HfW\n",
+       "d54vrkPfY1lLvUd5oRzTTtQKuK3rrLsZ6DNgltTZaMLnZZlJreyEOsrjyzSm+UoHlm15HP4+Azq+\n",
+       "OSFLQmjbsy+skcrNUgxk3Kb7bnua8k7HkDxggbl/aEseVGguPUREKhnN7xKtSuOYHjaNewoIJ5vG\n",
+       "lJPp6Jl9SVQpc+ZepMwXu22+HysXTTorbIuLlWPm7jwWgLpGytdYEXEMZ2cXB3om+ubizVTQOTYq\n",
+       "MCstKg86du4Xc+P+zblh8TcNTxoTL0z3g44UUUH3mbJBhy0Vbcpg7hSqXeddSFY2vK9mABlF0SGw\n",
+       "A0bZ5HXk5Xg8Vtu2evXVV/Xcc89dir7m83mnxoFIrevSjFy5rz7uwKgCjy0YjUba3d0tdUV855qN\n",
+       "8ze/+U0988wz2t7e1te+9jVJ0vPPP6+Pf/zj+qEf+iH99m//dqeIlTUJKQvpBGQxqNcFgxBTLfhK\n",
+       "ZctgwQ7h22+/XXbkOYJ9UCBGA+W16LlNVCz1jL9bLBadukD3k7UZeZaWg7UMukyscyJ6QCczkWEX\n",
+       "mA+HQ929e7cU/EoXxuSZZ57RE088UWpZ3KZloWmaUjtjlM9y+MQTT2g2m+nOnTudnadt25ZCdD/L\n",
+       "Y/W484wpz0s6Sh67P89gi8GjdK6P3RejLhsbG5eCcr9qhDLJfro/KRN0qmqoI4GDtDF+tsdBnWRZ\n",
+       "cv0Ui8G9nrkDk4ec+twmB9ocP20Sdaafz01RdHzpQFK+ySsGDP7cbQ6Hw0u7eRlc0TFK3ri/Jjo8\n",
+       "DETNY/fDgImJtpU6yXx2XRg3DyzzVUgPDZGykjClIlvmMS9DrR5EVmy1CNbMpLPg5xMGZV+o6BMt\n",
+       "80QmFGiBIkqQSJZU3/XjiN3CSMPJXXtZrGcD6iiSkYzb8PjZn+wzHQb33f3JQl1+xqgmIyvyzYuT\n",
+       "xeFUgu4jHU1+RyWVY+Di5gLiSzlrEVRtkwI/q6EDVKKeM0ZMNtpWLLWNAU1zcUaMdH6UgFOljOo8\n",
+       "fvN8sVh0dgmapywg5X1OeRwcHGhtba1zCvXGxoYGg/PzgO7evVui5a2tLV27dk0rKyt688039eST\n",
+       "T5Y233jjDd28eVN/4A/8AX31q1/tKH46vJYLosScIzquRGoyIkw5sZzSWSPKxbTA8fGxbt26paa5\n",
+       "eDkvHWmmB1ZWLl6ybWTP17VtW9YRneNEKx1Ysdja/fS4ZrNZcfRIteCAc05jwzEyrcj3zjnNaaTq\n",
+       "6tWrhYcf//jHNR6PyzEHPAvq4OCg6Jr5fF62mHuuvDPw7OxMx8fHevHFF8tux/X1dR0eHhZ0wIX6\n",
+       "OZ/Sxa5RjtFGjevbTobT3Ua6PXY635z/yWSiO3fuFF1IXe5AjjqlllHwT6IbbIuBsK+xDqDTb0ff\n",
+       "88M2vbvT64UbVZxCtt40uiypnGbPF5jXUpEpWyYGB5Qby5h5b0pEiQ6h5d46jw6S+8K0JrMNBDnI\n",
+       "G8+Fx8xANEtGMuXJFGQ6fj4KxH/nLu8M0kgP7UDOGrIkLXeMUqjzvmXtSl0lx8nI692WjWzNgTNl\n",
+       "xMG2fD2dojS87CMXLe+XumceJeTq2qlEcKSLt5XTkDONwL+zBoPoWI4/HYFEVjJyTEoF5DYdrVuZ\n",
+       "UcAp8Ibq2S86ZRkRsS80zF68OU9ECog+8DMq6UTFEh1LRMrXEmXgCe3kq3Quizdv3iw1C1kjxZcl\n",
+       "S906Pzr9RIGsFLxD0kZSUjnt3G0SXZ1Op+VU883NTX3rW98qp6xbeX7iE5/Q+9//fr366qvFIOdr\n",
+       "PDzH/i4jdZK/YwrH91EmaJSki/Rlyj35Zscl0WErYM8ZX2lhw0wjTdnys7lm3OfRaHRp/N496zcY\n",
+       "8Dsew+G+UY/4+XZaGTDQkBtpki7Sd2dnZ9rc3NTe3l6nxlE6d5oWi0XHsTPPPB/7+/saDAYFtRqP\n",
+       "x+UF10dHR3riiSfK8QdGQO2cE1XmWqTO8Xd2Aiw/lBsHrBwvx++AJJ2lyWSiyWRyKcVEBJq6mHzk\n",
+       "Gqfek7oHRSYxWDIxPefvaAedVq7VgtlR4lEF5A3Tlk3TFMTZgQKdUup56jP3TVIJNvy7VotYmwu3\n",
+       "5QCEjmTq7CQH+Nmm9cD6+volxNU6hvqYAR11Nx1MO06WNcoNAYBl9NAcKQsNPVAqykSkzBQLAiPv\n",
+       "GqLk+/w7J89EtIqeMgWthoR5EXNh0KAnJE9KFIewcRp2OlE5PqJmvofnpbgOhwiS28iIlsaM11uI\n",
+       "OG63beVIheLDD33eVUYRTClSUP3bCtufpdPE+pKcZ7bFv6m0pctQbUa6td/mGduuBQOUIzpPlAf/\n",
+       "7evMRyKN/vv4+FgHBwfFmRoOhyWt4joVO6J8Xq3+w+RonA6Yi8Z5zMbJycmlV6RYeTs1dufOHUnn\n",
+       "hvT27du6du2afuInfkK/+qu/Wgyz59XHONDBI4Kb8yd1z+5yCofzkvLhKDxRpVxzs9lMk8nkUkTL\n",
+       "Z1rO6bAxXcqUjr/3WFibkc4WD8H0GrGD7Ejbc8+1R2rbtjwjr3EbrGUispDnS3l877zzTnm2ZdRy\n",
+       "wwJ7vrbDmxSm06meeuopvf/979d8Ptfbb7+tN954o/DTZ0ml8+F0EcfFcTDYcr/8nWU8nR4iShnc\n",
+       "ORg5OjrSysrFq5DMGyKSXgeSOnxhvzj37F9+R0TK97qGzc4wgxZf73f82RHx2P08llFk0E7kOWXQ\n",
+       "iNcyJ8a6g+d2ea68rlkfx3mlHfU15hvTcP5sOBwW20A59fqrpbXJf9sd88P9sfymjqasZEBn+8Sz\n",
+       "zuiYLaP++IOeeuqpp5566qmn90gPNbUndSMMIiLp9WfkQTQhIdiaB0qoT9IluJf3JPJAymsTlXC0\n",
+       "UEtvJTTPNphXXhaFJp98jZGIjI4JzzJ9yHvIN+4scXTNvDL543otv9eIzzXiwAiC8LSjSSJSTK0x\n",
+       "KneapZZeNdzKnRokIh+sb2IhZO5ASXTMCAfv804jzkHOT0aJ/t+RXqaGnd5zlOr7Njc39c477+ix\n",
+       "xx7T1atX1bZtOSSxlupxv1w/xlohzgdTyOT3aDQqBy1aDswDoxyurfJrHaTzlJH/fuGFF/TFL35R\n",
+       "L774oqSLOh1HdpwX84vpJ/LbP5nyJeTvcXOHD+WcSLH7Y+TTqZFMszLtkOuCaG7qKP4mMmk95XVj\n",
+       "VNF8JcrOlB5TO3yeI2ZH7Jl2MPJiVNH3ra+va3t7W4899lipvfPuurZttbe3V1JyTJUahfIrXYhK\n",
+       "Sedr8eWXX9bx8bHe97736amnniqI1MHBgWazWVmvbNd1kSkTHj9RfSKXTt2SP5wz6j+md4i0JNrO\n",
+       "sVhP1bIcqb/8eSJinHuWZng8Ror8mZEZqVua4M1C/s6viyKaSznMbEIiWJYfp8ZYZ0d7zNIQ1jx6\n",
+       "XFk/lWlBtuP5OD4+vrROfA/TaSyi91xThxqFMgLFNCX5n74Cn2fkyWN3loQHG5syhZj00F8Rs8wh\n",
+       "krpwKeE6MlzqnjFFyjRPLnw/w0zNepeawSTcnUVwNYcmFyn7kBNL45Y74Hhf3s8FQGVkQ7psgbN2\n",
+       "jAbZ4/D3rK+xQLnonekk1rG4XS5MQqdURHSePM8J3abD6jYJtafTx8XJwkM+I40seWuZ4Pg4Z0yN\n",
+       "OQ3AtCjnKCHlnE8rJhbq+5mj0ajU0qRR8P9N03ROjGbtjJ+dTkittm57e3tp6nV1dVWTyaSsCzoZ\n",
+       "t2/f1mg0Kum/F154QV//+tfL/NJRYB2QeUpnOp3ZDIJ4jechAyz3l0ERee96pMPDw3J2jttlHRxl\n",
+       "wkePsPYqg6haXyV10hdMCY7H43Latp06OlmuU7Pj5k0BqWtYsNy2F0Xwg8GgPENSOUZjfX29HCng\n",
+       "WjaniF1/c3x8fCmotMHPtKXH5Xd67uzslB19k8mko69pkOkIcD3Vxsg5YV2VdUeWEdAusL/Wqaen\n",
+       "p2VzBftjouNaW7/U+8sKnF3D6HtdC+f2WR7B4nemFdm2pPJuzIODAx0dHRVZpo1aWVkp7y5Mcn+8\n",
+       "Pjxmy13N0aGeo041z1JH+3fWiKZ8+z47dix3kM7XDc8gdJseo3Wfr3dtMIEajoNpugzgrUPTznq9\n",
+       "flcWm6eDQQSEkTOpZkxr6E86MqzHYnRDR4TPtHedhsREQ8I+0VDVIhn+zu+8aK0AuNg8Tu768vO4\n",
+       "m4K1AHRqKOQ0DrUIgv0h8sXtvlZaPKvG13GXBZGZs7OzcijfdDrt7M5hFGCnyn1jcWBGRUTaPH4q\n",
+       "HY/B8070iY5dGk06TMnTdBAzb+8fKnLfK3Vrl+gwWO7NT19vh7VtWx0dHWlvb68TmXpXGR0/z4Wf\n",
+       "kw6/eeMt1E1z/soYSSX6tYNBnrImZW1tTUdHR7p586Yk6cknnyxnB924cUOf/OQn9du//duSpC9/\n",
+       "+csF/fCOMcuT64WIDDEap9Odc0F5TVlm8JU1cVyzdgzzkFMHD+yP1731CZ1DKuZ0pIispHw1TVNe\n",
+       "nOx6GAZvXvd2pkzus5/l89u4LuxM+6wwSXr88cfLLrq7d+9qNBqVQzfNaxcUz2az4mS48Nl9tYzb\n",
+       "sTP/XMg9n89L/dTx8XHp7+HhYSeA9jg9L/zcTgiDaK5/FjBnYOI+Uh48F24z9buv9QYT2gXOXxpa\n",
+       "6wXLS47NP5wP85TBF/Wlx5DHMUgqcuL31uWLoP0Mn5fFPuVL1KnLXPtkZ4LOIj/zM4gccr1kbSj5\n",
+       "bjnzeGwPiNZ6jK4Rs8xxI4n1JFFok+ePdZJ+LgvJGdB5fEQqU5d8VzpS6Zyw01wIy76rOSTpEPgz\n",
+       "e7SM9LmryTA1i5GpRBmB2WkhEsAIhM9t2/aSA+J+EzqksvazCX1vbGx0JpMQp3f8WBBYGGzv30WF\n",
+       "XnAeR+7WoPPCBUBjRIifaTXpQjEsFouyy4xKyhGg0zw8kJNpnFq058VRi3rMl0T4fJZKfm/lRQPJ\n",
+       "fhLF4UGWiXqRT0w/JprpPtYCAUkdRWLHx7uhbCxt3BeLRdmBY8fU8uPI3H1z6iCDCMv9/v5+MZg2\n",
+       "Srdu3dKNGzd0eHioyWTSiYwXi4W2trbUNOdpo93d3YI63Lx5U5ubm7p9+7aGw6H29vb0Yz/2Y5Kk\n",
+       "b3zjG8VZMr/9N/vsYxgSWXCxMonri06Tiam/RF0t+0ylUUlyfdsBdL+Hw2Hnt7+zzNpJyt1J7kdG\n",
+       "6nTMrE9YMM+XRtP4e/w2wr5WUjGgq6urunbtmp577rmyu9LOj78fj8edFw/7hdbz+Vy7u7udAnmi\n",
+       "4zwSgzw9OjrS4eGhtre3O+82XCwW5Qwh8tv60WuVAWwi/5yj2WzWQWKo21PvkO/mEY02HRDziHIm\n",
+       "dXddJ/qSG4H4HV/Ia96lA0J7Y6JOdpE39Y7bs+5nitO8YIrPtFgsinNCGeL35gnfUWg95H7wAFHL\n",
+       "dg0AoBNpvUo7fXJyUnYPM303GAzK0SQuIWEq0Q5UHutj3tgpTTTK65OZEbeZoE6iqOmzkB7aOVLS\n",
+       "csfH1zxIiGt583TAfC8njwbaz/H9Fg5H+L7GELifQeVMg2xhYr64lvYjDOsx1PhT+34ZqkSIW1J5\n",
+       "sSoj/qwncF9SUVGY05HisyaTSWdHiL/zYmPEbqNjo03H1ULshcr5I6/pbLK/fmYaL85NKhOPPb8n\n",
+       "gkc0i3Of6VL3hf1ynzLFJ3UjXukCUmfaIaNdG1k7pH62+WVHy0bX83pyclJeK0Q5tRJOtOr27dtq\n",
+       "2/NambfeequDgNnAbm5uljHaWL766qvl9OvDw0NtbW3pmWeekSQ9++yzevnll3Xr1i1NJpMSAEjq\n",
+       "IBSWEc6/x0fEmrJW0xecF8su15SVur+jfuDRJ1wHbsuBiwMFyg+DA0mdeUqEl/1msEAnxfpla2ur\n",
+       "cyq426bjRQdkdXVVe3t7evbZZ/XMM8/okUceKam9w8PD8iJsG1Oi3l6bfIOC2yTysrKy0jl/y47N\n",
+       "YDDQZDLRlStXCppFxDxrdqgfExmyPjdvanrR37NNrtM03EY5vHbYLp1a/8/vuBYTcSYaQ5mh02O0\n",
+       "j4Eg7VAafn9u5IWIFPXN6upqOVyX/LN+YzDkPvl5nhepixQl32o1n/7bKUnPF/WXx2S+e90kETXy\n",
+       "fUZnadfyHuoN3+dn0mHluDK952t4QLSBApORs2X00GqkpMsOAyNnevX8nI6TdPmwvzR8CfsSaaEC\n",
+       "p+J0WzRSCeslzMwxME1QM7hExTgGIm6e4IQt3VeOgYaGxXPz+bykzHy4HpU0n01DX/uM47ZSsLAl\n",
+       "muZ7jaR4/P4s0zR0XIhGuc00kukUWYHxPvIuoXH/zWcl+knFSH7bOWQNivmdTjnvraX52K75aaed\n",
+       "9QCOnv1M3s+am5WVFV27dk3SeYrl4OBAOzs7JW3GQlMXDNtYeJ6uX7+u4+NjXblypRzWuLu7K0nl\n",
+       "ZGIbYjtt0nndxp07d/Tkk08WRW9k5Q/+wT+ov/gX/6Jee+21gvJRTj0PXi+eA6JQRBaTiBBQ/jLt\n",
+       "RmOQaUTODwu5a3LjfhNtdpDAVLvJCB4NLeeQssu0Hw2T58h8Yw2JHSmeQP+xj31MH/vYxy6dvrc/\n",
+       "rwAAIABJREFUNeT5c1qIemw+nxe0k4Gi+U8EINO+liOiFXakxuNxObcpEZCcz0wL0R6YJ76OPLSj\n",
+       "4zYzPWN++YBiox8ZtLRtW+SXNiiRQCJLNUcl0TFf75IIPs/rm/qDZQupS63zvH7tnKTusT1I/jGj\n",
+       "wbqspmnK6fKUCfeHmRnKomXI7xLl/LqPBhess0wM5ms1Ys7iJALIPrNNpvXSQczAlo6U59sBCkEH\n",
+       "64ka8FPmfOk3PfXUU0899dRTTz09kB5qsXkNdeJ3JOZhE8LnvYlYMcqtwaZug78Tgq+llfxc5pF5\n",
+       "n68l8sOomaiTP7PHTlTNEa6RB46PuWAiUJIK+sQj7xlB8pUCmZ92JJq8lrpQvGsi2A75lFEjkTMi\n",
+       "Wb6XqZsH5aN5j3+bb5zLWhoz57kmF+4no06PwXPjiJGvafFziJ4wUuV883OmmbxzhQfhuZDY6R3e\n",
+       "S2RnsViU2qoPfvCDun37tvb390sKz/M7Ho+1WJzXzB0dHWk0GpWDCV2k6jm9d+/epZSvC9L9XOmi\n",
+       "ANZydevWLT3xxBOSzuunPvrRj+rrX/+6RqORvv71r1/a4u/0Bw9tzHS4PzMP+TvXPefc8kx5oewx\n",
+       "neb2t7e3L6XgPVde71xPXGt+VY+f5znzHGQayykl7hiSuqiEr+F78c7OzrS1taXNzU2dnZ0V5PBD\n",
+       "H/qQPvGJT5Q0GwuWXV/iOiqn5KSLHVKeY84va6pMREHOzs5Kql86r4t63/veJ0n60pe+VJBIZhY4\n",
+       "fq+Z2roncsj5MBLhua6l6WtZisViUY5tIdJDquk+t5kpNPLBlKll2wmmkKQLdMVb+TNb4faJ1BEt\n",
+       "N/JLefahsUZNyVMiPqx/8nNcx2Z9lkhc01ycks7UN9FMfud2PM5MJRo9slzTlvjHa4d8Z73Zsg0/\n",
+       "HlOt1mnZ+1f9zLTBy2SzjHfpN99BSgeGn5m42HhN5m5r7TA1ZUbX8ut0bhIepDLnThovBvaP46FA\n",
+       "53NTodfG7HaoDFicS6GxgNE5ch7XgmJl6bbIKxqKWmqvVpfC/rVtW2otpIttuXR4afgswKenpzo5\n",
+       "ObmUQkwF5b/Tsa7l2HnysMfGugm2S6ctZTFrNlJJsn3D68mbmiPm9A7JbTntdnp6Wk7/9nyPx2Nd\n",
+       "uXJFW1tbRTG6wLttz0/y5u4l75R68cUX9eyzz+rGjRv6vd/7vc66WVtb03g81vb2duGF03RUoDs7\n",
+       "O5fqZFZXVzvOmR23vb09vfPOO3r77bf1+OOPa2trS3fv3i3ffepTn9KLL76or371q521Y7h9e3u7\n",
+       "GP3kJ+dgWSrIay1T1myHae1MvdLp9ny4loY8cPDkMTBtMJvNykn0kkr9mNtyP3k2mbe/MwWeqS2m\n",
+       "wi3nW1tbJf24urqqra0tPf/885KkH/zBH9T6+rr29/dLoOW5dyrQKRwaVAYJJpYf1D73b+vIzc3N\n",
+       "kh65fv26pHMZtm7i7i7LFNdJzhsLgvk9U4TWK2kvfG06Jf5tOedYUr9mfQ31BuWCuiJrNX29HQ46\n",
+       "GQw8Ux8nr/J0dbfB8fhvp9n4mZ/DoJ3OkqQim4PBQEdHRx1nQlJZE7zPMkTeeR0zRWqnjvPCE8S5\n",
+       "HmtUq/NiDRo/J/8pw94tzlRr8i+DNa/1lE3SQ0OkpMuM4UKgAGW0mYZ12QBr9VNs320lYsXiRgs3\n",
+       "PXMvIhtTCrgXiwududi9WOxMcXw03ByPc73Og9cKQL04lu1CqDlDHov7yMXEZyS//DcRuf39/fKd\n",
+       "x8KdGKamacpWaBohOjFZm5BzmMTIJZ9nZyqjFCqCZYuGTnWNd7PZrER+fB5rRei4Ek00OkgZtDGY\n",
+       "TqflBa+SdO3aNY3H44IicZ7sxI7H44IG8fUNX/7yl/XpT39aP/qjP6ovfOELnV1z0+m07LjiqyLW\n",
+       "1ta0t7enjY0N7e3taTgclgMbrbRu3rxZNjP4u+l0quvXr+vu3bu6ffu2nnrqqeLwra2taWtrS5/+\n",
+       "9Kf1xS9+Uffu3SvPu3btmkajkW7fvl0KvRPFowOURa+pI/Je8pjyRmQv5d9zMB6POzUtNEBZQ2LZ\n",
+       "nc/npX7M49/a2tJ4PO44UkSBiJwx8nX/WQNnx9UbE2ycXnjhBX3qU58q/Day0DRNeRWOx+pNA96B\n",
+       "6bE70KOxJZ2dnRV01I4TdYwPap1MJjo8PCx9HY1GOjo6KhtTuHvZ+rOWOSCvuZPS47dRtA6nk+Qx\n",
+       "eI3TcfNYjPARkTF5rhhYEY16kD0iscYs2yBSaR3vdliDSTTHcph94jwOBoOy25Q2iJS6h2M2r9fX\n",
+       "1zsv6SaaSN3pAMvPotzQGfT8kXfc6ETUyXNG5yrXdr4OymNIZ4rO6bIA2faoZpf8Wa0+s4xj6Tff\n",
+       "QaoNJCk9bE52DZ1IzzvbMtGY0nEjskO0ytcQzqfDlTvWfD/bYB8o8Bnp+p6Eov3MLJz3c3xCLXcv\n",
+       "WNhtfGyk8l6Po8YbOpnkVRp0Gzqfa+Q+5qIx0uIIu/bcdJgcXaVzSbITkjCuF3A+h/cRQeIz2Qb/\n",
+       "ZwSbznBen/d5oVrpUA7Mb74/TVLZReXv8rBG78qzcaMyXVtb09/9u39Xzz33nD71qU/pi1/8Ypmv\n",
+       "lZUV3b59W+PxWMPhsKT2RqORRqORZrOZfud3fkfXrl0rZwUdHBx0ItWmaYqxPDk50d27d7W9va13\n",
+       "3nlHq6urJdV0cnKizc1NffjDH9bVq1cLAiWdozY+qdmK00YkAwCmSskjGpZENVksSwfFha1EtDj/\n",
+       "TpNTnqhrnI5JBW4ZPzg4KCjfbDbTzs5OcWyI4jolOhqNSh/ZTyKrGxsbHSSvbVvt7OzoIx/5iD72\n",
+       "sY910Anzw8+kQW/btqQDiSS4/3zpsvlomamhPJYbO2+LxUL37t0rsmEn38XtNb3q5zC9af1Dg5pz\n",
+       "7rESfaFT5nElb6SLwnMGcgxyOfd0lOmYkYwQUu8zsGqaizPD3E8H3rZx/s5rwO2lLuGp5H4Wgwzb\n",
+       "DAYB/s1gNXV5on8ep4M195lrzbLMsRJcsENKR8z95f98Hn9nxsKfk9+cA17Hde8+ZGDtcfvHjj2P\n",
+       "EkpdkPRQHKkak7jYPclUhnRslqFLKfyMcNJYppOTCFfNGSJZ4BhdSZePDiBs7udYIVGAPEZuF+XY\n",
+       "6FARWXBkZiVDqJJ9snGkoWFemLzzXHiBU2nWFpL7yC3VNe/dnr2h/xpilouNjgkVninnkHUS3ObO\n",
+       "OaYD5Pmlckv54hySx4TpM7XkvjG6sUz9v+y9aW+kx3X2f3U3t17Z3MlZNKNtZMmRBVmILTtAYiBI\n",
+       "XuQDJB8zrwMDToAEcmAgtmF5kceWRtss3Ju9sJtLd/9f8PkVr/tM9fhBgD/4vGABBMnue6k6VXXq\n",
+       "Otc5dQrLDWsPIEWMhMea1Ot1bW1taXV1Vfv7+wXFwHsZgz5WUTK1Wk2/+93vdHZ2pnfffVeSUqLM\n",
+       "1dXVl8YizBDMWblcTiCr1+up3W6neBhYEUnJlTQ/P6/t7W11Op3EeBGPs7CwoH/4h39Qp9NJbR+N\n",
+       "Rur1emo2m1kWk4WORcMNBQAPMvG+oX9w0+X6lD7z+UYf46qLOY9inKLHelE/r6N0BSQ6nU4BCPE+\n",
+       "AMjy8nJi43zsA9ZYFPmuWq2qVqvp/fff1w9+8ANNp9euSxaC6XSq4XCYwDHfwW5SvO2MO2ff+D9+\n",
+       "7vPW9R19yEHYMKpnZ2eFDNTStTGK/nFDJWdw8h3sK+2OAJT7AYbO8JBihDEQXe48K8c6RWDEOGMe\n",
+       "RXbMZYxud0Oc+jOu6CeMFGQZZYB+djlR3PXs7JfXBUPAgQfXe/yQvxdDjvr4uInrsRtDMW7W6009\n",
+       "YdBcLzuL6sCH+YlxSR38N22JBhc6mXr5muAy8v89UfSscqOuvag0XTFKxbgFSYUJnANSPMPBGdfl\n",
+       "6E2vx6xn8L8LnBIBhSNtH8S83wMmo5JCCcXB5lR0nJyOrKMFyXuweBcXF1+iXKH4I5PjFGickF5Y\n",
+       "OBzMEFCa2y46a8HjXn+/09juZo2Fz5FZbmI4iKUAXN1K4X1en8iMIo+4zRfZO6vo48i307pyoS88\n",
+       "v4orzuXlZa2urqb7/LeDKK8730lX42Zzc1NPnz5N8njnnXf01VdfaTKZFI6EkZTOZeO+Xq+XtrEv\n",
+       "LCxob28vndN2cnKSWJeLi4sUH0OyzuPj49QGAPmPfvQjPXv2TJ988omkq7xGc3NzheBn6l6tVpMy\n",
+       "d/aQ9jmgjf3vGZxJ6kc5OztLDBBHongBgPtGDy/UJbKdng/M5xtjkbbGvgRM8JmDDbKL48KD5bl7\n",
+       "967eeustPXjwIMkN5pD0BuVyOeWDclckCxcyc2MA3RoNDNfBzElnQ3q9XtpoAOD0MQWIgEnxfowG\n",
+       "pRf62RdtZEwfO+Ph/7Ogx74g4N6ZOJ7JQhvfFxlIXzPcwGf+AyRoswPPCAiYby4X3GWeEoAxzHPo\n",
+       "Azemab/Ln+d5/6M3YzC6G3s5r4EHj0e9yLU+X3I6MgekfKy5TNGF/r64TkZQ5yW2gXnNOx1El0ql\n",
+       "ZIS4N8VJlVnlNv3Bbbktt+W23Jbbcltuy/+y3Cgj5QVK190Tbr3lEKtU3CKdYxMokcVy68AZBO6L\n",
+       "/uNc8HGMAXImJiJjrBwsFd9pAOLGao8umsjQuMycAYmWG9aA+9mxPthBA9qOFoYzVU7x5vzVbrU6\n",
+       "MxNpXSyw/5uSe0/s38gI5O5Hlu4yiNf4Lhsv0QLxMRRZTHfHYPXEuByvp9cFFxrf+3ur1Wpyy47H\n",
+       "43QauvTyIaMxNgHZnJ+fq9FoaH9/Pz13a2tLz549S3LxhIV3795N57GVStfn8O3s7Gg8Huv58+dq\n",
+       "NpsqlUqJkapWqzo5OUnM0ubmZnLt9Xo9bW9vp3H093//93r+/Lkk6csvv9Tq6mo6u8/7olKppFQC\n",
+       "jN/IVk8mk8RAeP8wT0lWiXUvqRBgDBvkO7fG43HBwo9sLX3oO8WcxXEWkn7h/1xMEC5x5En8GKwA\n",
+       "MtnZ2dH3vvc9SVeM1MbGRtrsERlgdGJMKsrRNq4/fRyii2i3x5j5XCqVihtfPEi5Xq8XmJRarZaO\n",
+       "E8Jl4y4678/ofnW2LvaDMyuuc91FiFxpJ24+5orrBeass518B2PM974OePwP7iZPmOrF2wGrRhud\n",
+       "OYPdh6113Z1zqcH08T0/rAk+7qhvZGLpGx8Tcb65ro/rqbvKXc/TV8xR6hnjqHLxxu7x8bAZ+hBW\n",
+       "ytvn3h2fE+jmuLZTR4qvWcjA51Ku3DiQiguNC44SP/MF1EFWdN/4OzxAmBIX5lgnv4YSXUG54oPI\n",
+       "FRGUN/V3xY7fnmf7QuHuzBiTxQTmJ4LB6I7ybdbIEqrT+yK6M2McgU8kjxVwGUZXm/eVKyvq4e/P\n",
+       "9U2MD/J2eFvj85Bf9NXzd3Tf+ef+XQS4noMHJcKmBffNS8UjNrg/F8jsLimvOwHZUnFzADFAUP2u\n",
+       "3NklxcK1srIiSWlH2srKivr9vlqtVsG9MR6Ptbu7q/F4rNXV1eRanJub09bWlr799tvCdn7qOTc3\n",
+       "p16vl1IfkGVdUgpsr1arWl9f15tvvilJ+sUvfqGHDx/qyy+/TItwjPNzsIfy5WDcCJ5wb5HegfiZ\n",
+       "ubnrHEgeNO1uLx83nqLAXYj+PhYGisdfeN+SQ8jBsy8mlUpF/X5fk8kkHRZNYbdbs9nUBx98kI7d\n",
+       "wX2DHHxMMxZzAcnj8TgdJxJjupAxYy5uhnFjNbo26/V6ci+Wy1e7xtjNe3BwkPKD8R1GgMe4sGU/\n",
+       "hnT4WIj6hL/jtQATXLu0JR6L5Pe5DJEb8xG9xSLuMqVdDsbdMGGuu37mmWSId93FuJCUYhHdwOK9\n",
+       "gIlofDlgiK47ZOOGneeEc/eyGwVc60ZLNK75PoISrvcNNv69g3BfU5CZAygv/r/r8wj6vf5eZ5+n\n",
+       "/O1B575+uIGRKzd2REwEL+5fzgXu+gSL7JE/NyJQrpkFfCL4kGbnM/K6xvZwH4Mp904GDR3jPnoU\n",
+       "TByccau8A4ToF/a2OEBwAMC9npDTmQyuiTvLPGaLQZiLZYsTxRfaaMHFgenxBy7jyCZF68Pb5e1n\n",
+       "cswaK7PGUQ6MesmNJZ7nIMrPIvPnUDePY5pOp2nnGjFtkhKjguyl4hlu0Tp1ObAIAHq4ttFo6PT0\n",
+       "VA8fPtR4PFa/3y/s7un3+8maHw6HiXWCFZmfn9fz58+1sbFRCGKGdTg5OVG73U7vr9fr6na7SbFX\n",
+       "KhW9/vrrkqS/+7u/U6VS0ZdffqlWq5UWFuRE3jFig6gLKSLK5XKy3GM/1mo1bWxspBQNyJRga2Jl\n",
+       "SqVSAmAe3O2ypj459sPHADrAYyqiFc/BwNL1OZ6DwaAwXviu2+3qnXfe0d/8zd/o3XffLYyZ+A5n\n",
+       "nRhHcWFj7AFmfHEhsSNxJxE0utHGO/0gWTf+FhYWUh6x4+PjpPt8Wz6FdtOmWQZu/Mxj5+L85V2c\n",
+       "M+l6yA0AB7jR+HRD0BdzB1XIzUGox+xEo5Rn8B0gwjc0eJlMigeR8xtA44HuDoKk4maXyITybJfp\n",
+       "3Nx1agoHP15vX99c3m44O0BBVm5I+3v5389qpNBvThRQDwAW4zc3ZsAD/pn3aW58zTLm2XE6q9x4\n",
+       "Qs6ceyUCohwL4YtdziLxvx0g5RZBlEROUHHB9et5Zlx4vV1u4cRdGxQGBZOK66TrbMN+iGJu4nN/\n",
+       "DqCWSqWXrFY/Zy1aZi7zuIgzIWYpPL6jTUxEFnNn9OI7ojWE3Px3LA5KvC+c2s6BSd8pFJksCs/k\n",
+       "M6w8V6QR7KOA+S4mWgRk5erqk5iFfTKZqNVqpfxbw+EwXVev19OCSf9HOno6nSYQg6uNpIyHh4dp\n",
+       "Ud/d3X1JpixuruQrlYoePXqkn//859rb29P29nYaA6PRKD3Xd5hJ0vr6ui4vLxNoe+eddyRdWdy/\n",
+       "/OUvtbm5mWTk4HMwGKher6dF/uDgQJKS27HZbOrw8FD9fl+j0aiQZ6nb7erOnTtaWlrS7u5uem6t\n",
+       "VlOj0UjB8O7O6PV6BVDqIJ6+BWhEpoffkX1gXNBPjUYjyQYwIykxRfx/eXmphw8f6h//8R/1ne98\n",
+       "R81ms5B5fDAY6PLyMuUzi6DKx6iDLPqGccf72MXldfe+976JOwwBLuRCajabevHiRRrDKysr6T2e\n",
+       "eNUX0eiqpt7Mm5wB7nWNYGVhYSHNEWek4n0Un4/MfTfg0M38eAoLngcD7CcTVCqVQrJixgrXoQsd\n",
+       "cHHd5eXLyZgByugR9Jm3x92akfn2dcTDQZyVcX3F2HBQ48+h/hg9zlTGtd6BIbJy8BVZNzcEvH9y\n",
+       "bkBvk5MhnjLGQ1acKPD54kDV5RZDQ7zcKCPloCIHnFxATPwo1FeBKJ4bBc3n/LiVEe99FXrl82hx\n",
+       "0inxHhRqXLRzlLJTwyhtrPHImDEAvLNd+fCOaJlGoOGyoQ0RwLgrINdObwvWi8uTxccBSqyffx5B\n",
+       "l7ebvs/1H7KIoNyLy8bBsFvpPqEcpPg7KN6mONldfs5cSMUcOLjqyAoNK4QbYDKZpOzl1NsXSp5D\n",
+       "7iYYLZcfTMf5+bna7bY2NzcLB5ASi0U6At9F9utf/1rNZlMff/yx/u3f/i0pxfX19SSDxcVFdTod\n",
+       "bW1tSVJy8+Eqcbnt7Oyo3++r2WwmV1y0okkCeXJyosFgIElaWVnRYDDQ9vZ2WoTPz8+TG/L8/Fyj\n",
+       "0Uinp6e6c+dOYRdTq9XSxsZGmleDwaAAhKiDsz/Ilv7zRcP7dhbgZ0HF1ei7utziXV5eTvVcXV3V\n",
+       "v/zLv+jDDz9MC5AnJ/X4Ltg1xhA6gQXQD8rlWCcHiPyOQCrqYFy/MfbGGQyP95SuAD8u236/r+Fw\n",
+       "mL5zN5P3Pe+LYMbnDEZQjEcslUopWanfEwvpE6KOJHcb4FG6BiYOopApfed6z2NaeQbAKc5VBy3O\n",
+       "PvmC70apg3hYJ5e3M/7EYfn8zoW5eH1cN/m1TlqUSqXCWGQcuyuU9zmz5V4Lxr3rLx8XDvId1NFm\n",
+       "d20i47iL2cGSr/kAVAxW6uceIF8PMVhmlRuLkcoxPd5RPnmiVeXFJ/2rlJgPEn675enfRRDiEzEC\n",
+       "M+9gt/KYBD4wQPQM5rjQ+mT0giIslUoFZer5g+JAdMDiSsHb6NS/g5VoBfrgd8siV89o6Uag4axc\n",
+       "lBtK0+XmblIHcF6c5ckpTW+HPzOCea7le4CmTyiPr/B7HdjHxcd/A5oi8HMrtVQqpZQDq6urhbgp\n",
+       "4mWk6w0DLGrRIkdJLC8vq16vF5L8Sdfb6z1+6ujoSKurq1paWlK329XR0VECbtVqVdvb2/r5z3+u\n",
+       "73//+3rnnXf06aefSrpSRLjmkLG7KZ4/f6719fUU7IwcG42G3njjDR0dHaW6IO/hcKg7d+6oUrlK\n",
+       "Hor7ULpanEejkba2trS0tKTj42PNzc0lQNftdjUajXRycqK1tTXdu3cvxeyQH6rZbCZXoh/Jg6w9\n",
+       "BQLyctYslzrALVrfch7Hrh+FA/hksUaGP/nJT/TRRx+lxdQTrp6dnaUxQlySxzH6ppZqtZr6Yjgc\n",
+       "prEdXUK0m/EYxxQ6hb6NBgDhAnNzcxoMBml8k1eMcx2dhUOfOgMa57dnAOc+mC83TNzQQYa+4Ho7\n",
+       "AKbOnvhvdLT3E0HkDqi8Da7XeBbzk/a5nnWd5TqM+2BromuL692QdN0OG0ecjzPV1NHXyhhIHvWh\n",
+       "/x1BepRbpVIpJK8sl8vJm+Jy4b0+h9z4ZL30+CnXkV63uB7nSATGEYYM49XHYfQUOIj+S+U2/cFt\n",
+       "uS235bbclttyW27L/7LcWIyUU4QULBO37vk8d38skZbmGe7Ci35P7vPfkbmJriU+i/WI9ZaKO344\n",
+       "YwtrA2Tt8UcxUN0pdtiKGMjHdTnXpLcHi1e6tkz9x60PD6qNLI/3ndcBVO80u9cBSz7uwnDLF3eF\n",
+       "WxO+ldrlS8wK78n1UbR8uQ8Llrp7H2AZUdec5YNbIBa3GiNzF92XOVerM5q8BwaAA43dJePxJe5q\n",
+       "WVhYUKPRSBS5W4kkhsS91e/3C1mou91ucsdtbGwkGRMDNT8/r88++0zvv/9+YSegJJ2cnGhpaSkl\n",
+       "+pSuY96m06lOTk5SrJZ0xUh98MEH+vLLL7W3t5d2WUlXc+bNN9/UdDp96Vy49fV1DQYD9ft9ra+v\n",
+       "68GDByqVSsmd+OLFi7Rt/Pz8XPfu3UuyYdciMT0cU+N9wbmHPv7ZIYcrh9QM0jVDAuvqTAZ9BOPm\n",
+       "Jwz4Nm7k/d3vfleS9PHHH6cYIuYOLGNkij0mDVYAZpN7pesEpeiZuDvJYxhhc/iO96HHfHzzu16v\n",
+       "J9chcwHrH6aoVCoVErnCftEfHrTONZS4yyo3ByNrHOeZs81+P890pi6GX9B217fuqor6hHFBH56e\n",
+       "nhb0tG8cQeYUr0fc1ODB2Yw7Z85h24jNoh3oNR+r8Z2483Fx0n760IPcaT/rAN9TX1hU34zg7Yiu\n",
+       "Z5eNB/C7bqMdET/4/XF+0L9SkSGO7FMMu+FZHg+ZKzd61p6XSM9KL+9K43tfFB3Q5J7B/fzEhU56\n",
+       "eUeDu/yoK/e50uF3nIw5UAM1jyKJgMip2/g+BjfvdpcJ7iYW/5xbKbo1+SzStj6YiLtwCtn7gN9x\n",
+       "oPr7HTS5C8Tlx/uoP0DKARLvdjeWv496xnFBO6K7gD5jcriLlXoRMO4FGSHn6EpEMXCNj7EccMr1\n",
+       "U7lcTjEl0jX9Px6PX4qxQJ7T6TQtjr6dnLPEeBY7xarVqkajkV68eJEWBZ5JTNb8/Lz6/b7G43GK\n",
+       "O+p2u+p2uylA/eLiIgWNf/LJJ5pMJmm3Xr/fT66nWq2WXA3n5+cp8zb1X1pa0vb2dprb1LdUKmlj\n",
+       "YyMFkQNeJOn+/ft69uxZajPKn/azK/H09DTFTtG39Xpdr7/+ejoTr1arpfE2GAw0HA7T0SoeG/H8\n",
+       "+fPC4uKbO+IGg3q9np7pQCIu0Mi2Xq/r0aNH+vGPf6y7d++m9hOcT7wa7/PjOkir4gv7ZDJJ7fL3\n",
+       "+bhkAXMXlYcC+DP5HEMMUO+yqFarKf6JnWy0g1xo0S0TF6cY3uC6OQKfaFC6geHueR/fDhSje348\n",
+       "HqfjQHDxutzQNa5XqDNz1PWj1991lwc/O+hxfTGZTArxOw4wmevVarUQfkA/siuVthGHJF27ganj\n",
+       "xcVFwXCJa6XrWtqIcRL7zOMHXUdFgzKSBOPxWLVarQBWfEzmwmDQ24BPH4fI0o938uKbmbyO1Cm2\n",
+       "3denWeXGGCkmYQQccSB6ieDJ75GuFxX3z3pQ3iwAx/OibzjH1jh7kmO0fOI6s3B5eanRaFQ4ssGt\n",
+       "1tjGHGKOSsGBI8jdFV+Mf/IB7YsuStrl6Qye3+fvjBaft3cWqOJ7X4Sk66DTGACbezeFfong2tvA\n",
+       "fXFLsjM3sX+jn9yfQz/E9rm8aa9PRrfk+DzGFszPz2tpaUmtVkubm5vpPq5xS5H6oJgnk0kh+Z/H\n",
+       "7GAhelzS3bt3NRqNdHx8rPPz87RrjzHx4sULtVotLS8vp/tarZb6/b5OT081Ho/1hz/8QR9++KGk\n",
+       "q2Nndnd3NTc3p7W1tQT8GF+M9Wq1WrAIy+VyauvZ2Znm5uYKixiLPYwN+adWVlZ0cHCQ7iVuBfBW\n",
+       "rVbTjr+VlZUCS3R+fp7ivV68eJEC05Hz6uqq+v2+ut1uYvYYC91uV9VqVXt7e6pWqylZ6eLiYprb\n",
+       "EZwTy+a79ohzm5ub04MHD7Szs6Mf/OAHevToUQqo51w8jl/xnWIsypPJJC3MjLVqtVrIxzULSMXv\n",
+       "nGHGGPBnELPiDFHOIAIY+DuRR6lUSjuRee4sHUyhjtHIjgHY/h1g0AGTlzjnkWluF5df4+tBbLs/\n",
+       "O8rX2S4/z9CDs31xh7lyvcm4gGmGIcbopQBmAb7O6GMUjEajpBMcFCFT+j8GYUd96H3IZ1EvRk9K\n",
+       "lKXPGV9TfY3wcQoAjOBTejlJNv3KdzC8MUYK4Orrohv/eJFmlRtjpKTZOR+YtDF4PLIOs57rHZUD\n",
+       "TxQGOJ0dF0x/Z5x0fOef5awsB10XFxeJ9naWZJYC8Xq6fLx9HsD+lwAfVib1woLIMVbNoBA5AAAg\n",
+       "AElEQVT8H9vktDsT2GXijFGuDX69W6xMiOj287r4s/iMCcrf3mYHz66IYwB5rAvyjPKj0AbfKeUB\n",
+       "7MjWAZvLE6XqYLnVahUAA0yFM0ywTNEyAizB9khXSmA4HGo8HqvdbhdcLQSRz83NpUOIed/i4mLa\n",
+       "7dbv9zWdThOQIMAZmQ6Hw7TFfXNzU0dHR9rZ2dHR0VHKui1dAYK1tbUEdnI0/fz8vFqtliaT62SY\n",
+       "WJXSdWZpXHflclk7OztaWVnR6elpAl8csHz//n19++23qW6VSkVfffVV4Z1ra2taWlrSN998kxa3\n",
+       "ra2txKhUq1UtLCwk0LO8vKxf/vKXkqR79+5JUsrQ3mq11Gw209i5vLxM7tLLy0vt7+8nQLG+vp6S\n",
+       "lU6nU/3kJz/R66+/rmazWdgphsvW5wxjhnmPLqG+PJOxG4PC3XXF/7NYZZ//LKIwcoxFZ3oAL+TG\n",
+       "ikYbCzPsVHwH/eosDgA3GtgReHAv13JNTq/6LjhnzaOR6Bt7pGICTC/OwvgpEhQHZ36KAQYF48X7\n",
+       "wTdA+C496QpkLS8vF9IveFvH43Fy9cZksLQRttrXIeonvRw+47o+rhes1bMYIOofx5p7UtCbMYCf\n",
+       "9zkYRn5sbvA6OVhjjEQwyI5b3K6UXKZ36unzMldubNdeXPidpZKKYMQXRQdb0stb1x2URNbLQUjs\n",
+       "1AjauJ7nxEnqVkmcWKDi2AYUERYqFrKja97hbJUj9bgIUeLgjjKKCsD/zsU7uEuJ334v1gz38hxn\n",
+       "BN1aYNLE/vH2OdPjQIqYEj7PTdY48X0x8rHDd0xaxohbH0xaLHIHp66U/D2AC2el/J3l8tVOGg6K\n",
+       "dgW+urqq5eXlQpZjwATgCCDuCTqh5d3a9fpcXl7q+PhYnU5HS0tLCYQ0Go0U34OVS+l2u9re3tb7\n",
+       "77+vb775Rufn52nXnscEEbtFXqf79++rUqlob29P9+7d02g0SjvsOJqmXq/r9PQ0ue4YAyh7EoXS\n",
+       "dsYeliPAhvsAeLguyJvDvYypxcXFgrsUpocUCp7QczKZaHd3V0tLS7p//74uLy8T67S9va2NjQ0N\n",
+       "h0O9+eabOj4+Tu9bW1tLB+SWSqWUdkG6isk6Pj5OuwRXVlb02muvJRlsb29rc3OzwC7RhsFgkHSF\n",
+       "L7YsyL64O7uB2+fi4qJw9AhWuRth0fD0xcf1jt+HHH18X1xcaDgcpiNicE3htmEeA7ioD7o4GqSS\n",
+       "klvT6+7y8RL1irMqnlID4BKNwxzbDpBzYywaSFLxwGOvvzMgl5eXic3lOweiEdDCYJPdPMrHTyRw\n",
+       "IxnjEqAUZZvbeUeh/nH9Qk86MRHXC5ez61M3VuNa5nL1tcP1aAwHYQ1x74X3oRvXDu54Pu/LxYf5\n",
+       "nIhEx/9zrj13OVFyHRcBkX9HcRdaDoz5gI/0awRITuNG4BCZnhyTkwMKEYxNJlfHfYDSuQ/wFBdE\n",
+       "961HEOXMSZSf5yOJbZGKE9cDq/06romWqU9qR/HuPnpVX0Uw7FZyToH7/dTX+8V9+7n3RoDj488X\n",
+       "Xq6dZZnEz/xzmAGnlv0aFJQrGGc61tfXC3JlUaxWq6luvMOpZw/69PPhptOrRHQeIAojs7y8rFar\n",
+       "pZWVldRnyOvk5EQnJyfa2dlJ7IMv7hz1Ua/X1e/3U3+Nx2P94Ac/0L/+67+q2WzqwYMHCfSsrq5q\n",
+       "b29Px8fHybJ1sD2dXsd4EeTtckeBN5vNxLqwmI1Go8RkEawsXbk3Wq1WcoksLy+ne2GfUY5vv/12\n",
+       "Ss5J8tJyuaytrS0dHBykDN3Ly8vprLvFxUX1er3kaiQf1tramkajkY6OjhLQ29ra0tbWltrtdqrr\n",
+       "o0ePJEkPHjzQysqKut2uGo1GgVX09Ca+oPN+xtTi4mJyTfAd90XWARkAzOOCEQ2VaOGjc3KuL8IX\n",
+       "YEvctUsdMb58caQ/Yhwlz3QdGHUG88H1jjNu/hljiXrnGHnGGqyF3w/wyQVG+4YAN74uLy9Tugfm\n",
+       "Y9Rx1CGCGt6HG8p1lQNpAICve9zLc/075rvHFzLe3F0GIOE719tuXPv4ikHZORIjpx9zfebXOHBy\n",
+       "uUWm0kEqfZHzGNFu17cOsNyVR91yBEaq58xvbsttuS235bbclttyW27LK8uNxUhF/3V0jznDEl10\n",
+       "UtFajbTjrOc6LRn9sfGeWeyY18198Xweg92iq2E6naadE46icduw84jv/IBRp0K9bjwr1ps2I6/I\n",
+       "BHhf+H0xwNktDiwWrEqnPB25R9nwt/c9snE62etMiS49ZxGhi5G7x3A5W+fWR2SkIruUk633Z2TG\n",
+       "vJ+x1tzS9oKrpd1uJ9cXiSyx4huNRiFOqFarJRePW8rIE8uScUUbCU4mlob6Xl5eptQBxBhw38rK\n",
+       "SmLKWq2WPvvss0LsDZZ3v9/XyspKkvdPf/pT/dM//ZP++Z//WZ988ona7XY6YFdSCmzHNcb7iO9g\n",
+       "DnBen3R1XAsJHM/Pz7WyslJweeO+4jnuNmg2m5qbm9P+/r5qtdpLAc9YpbgUiTdpNBrJyr93715h\n",
+       "R9/Z2ZmWl5e1tbWlo6MjbW5upjMDcSG+9tprOj09LewgbLfbeu+993R2dpZioWCk7t27p+FwmHZI\n",
+       "uruHDSndbldzc3Oq1+uFecVZcoxjZ5uIRXJmgvsY/9HCdgYkMuO569wVw5iDKfPn08eUONf4bmFh\n",
+       "IW1G4H9cY7C1pE2AjYJB8PQPzmTFbfcwsLCfOV1D+3zuexJR3zSCvEulYqZvCmwS7cgFviPLqGvc\n",
+       "JZZLH0Aak0ajUWBMiN10t7jrLMIxmPvOxqPfyIbuSTd9fXWGyF18zjrSBt9gE9dw5izPi54I17fO\n",
+       "SLkrPvahrzPU3dtAXVzvwdwiIw/hYI3IzQXKjR0Rk/vbK5obUP5/BFIMpAgK3BXnricfNO4ik4rb\n",
+       "anOgyoGAd7RPiAi2eC5t5JwsPmeL68LCQmG7KvVmsWTw0TYHQj7RUCbuWnLKmffy/Nh+H3A53zCK\n",
+       "zb9HmTmt7TS2pylwBezuAeTj9H6ki2MbKQ4yaVsOIHk/RJDubq44IePCk3M7UmIwu4OrWq2mVquV\n",
+       "gpHZhcOus3K5nOJryMe0sLCgfr+ftrxLV6Cn1+sVYlc86z11pA4eSLq0tJRSIJRKpbTl/vDwUIPB\n",
+       "QIuLi3rvvfc0NzenP/7xj0k2CwsLuri40Onpqcrlsh4+fCjpKg7mD3/4g/7mb/5GP/nJT/T48eMU\n",
+       "iP3aa6+l3Yaj0SgFuEtX8yDGUQDkOGPy9PRUlUol7aLjuk6nk44CKZVKKdUB3/d6vXS8Tr1eL+RQ\n",
+       "YwGpVqs6Pz9PQeqNRiMBrJ2dnUIG8efPn6tWq6lWq+n58+d69OhR2jWI+5D3EA8nXc2DRqOh0Wik\n",
+       "u3fvamVlJbn9mCPlcjmlnWA+cYTF4uJiih9jnNVqtcLxQb4zi52o6BPGvI9Zro9g4lUGqeuZ3Lwh\n",
+       "TxY5z3zxRp/4pgzGov/tcW5kRwcQcai3z6+cEeSAEBkzvqiLH/HjOsRjV12nOnhE7/rOQ3SX52vj\n",
+       "uxhG4bqd75Fl1BezAOjFxYX6/X7Sv37kD/d7oHkuhMFBDmNiNBoVdJdv9oi7OP1ZzGFPpUCdfYOJ\n",
+       "63gHUBFoIWMfM5FA8L99DXH554Ab19br9eRGjwefu8wwSnNrCeX/mYScCNXZHf5GqB4r5H59BxPO\n",
+       "OknFAemLCgLnebNYp1f9HYFWfMcsZotO8h0pgCmpuGDTJiwMX+gdxMV3uYLMLfgRXcdB7GxNjMuK\n",
+       "fmvfWeZH2Tiqd7AZg0QdtHmbeaYra1f63j4UucsGQIgF7s/hubTD5ZHbeOAy8wB2Z8d4N0yUB8fy\n",
+       "PWfKOetUq9XS0RONRqMQBI51CAiTrpNfkqSSg3Ynk+sUCFjs0jVbQyEoGiC0u7ubxuLi4qL29/dT\n",
+       "rNJHH32UgsVZtDmmhmM/JKVUAp9//rnW1tZ0//79FFv05MmTdN7dYDDQ6uqqjo6OJF2N05WVlcSa\n",
+       "Ef8hXTE5n3/+uQaDQQKdFA5Hnk6n6azAk5OT1P5Op6Pj4+PEhngSTPoNgIRlLynlOgJYwWwxLhqN\n",
+       "hg4PD7W9va2dnZ00Z2GGlpaWNBgMtLm5mdiTubk5nZycpP53UEdaA7ajOxvtfUVsmlvq5Lwi31Uu\n",
+       "nQqpHpyN9E0Uvgghe58zbs3zG7DjDKBvkmDXo4N6AG88mBggyzPcMONZ8/Pzhdg3qWhEOosmFRNZ\n",
+       "+rzkHc6y+K48wEBkQKQiG+vrUSzOZiFv6ueAC7nwHgenPMe9G9EoBJwBPn3XJmsLG1Pi7jT0FzFb\n",
+       "sY9dH8aYo9zmHTeCWB9z8UxRz+aCyGNx/ZoDUoxVnxcY6nzmYx/miX6MORl9/Yj1jMaylxtz7TlT\n",
+       "JBWDvyPI4joXWGQiCLp11CxdT87IELGwuuUewZlbynFi+KCIgYyzAJjXyf+HNqYNkTaN9CzFd6BE\n",
+       "MOagxJknfwaLM5MtHnoqXU92X4jdredACgBBPiR2nEhK1mR0fXqJioa6OECNQLVcvj5XSlKBXmdh\n",
+       "Rt5x4lJmWToAtwjMseLiThsfa5EBcPaOzN8s1ix0c3NXKQvW1taSUjw4OEgMTaPR0PLycnp2v99X\n",
+       "o9FI9YhAEvkhV2dkyNLdbrd19+7dAogmPcKf//xnTafT5KL77LPPUsLNarWqdrudFsZms6nJZKK9\n",
+       "vT1VKhX1+33dv39fkvTs2TMdHR0l4HJ6epr6C9bJ3XQRDBOAvbi4mNoOqzQcDvX06VNVKpXkAkQ2\n",
+       "JDCdTqfq9XqFjOrj8VitVksnJyeJ8eKdnjuq2WymNt67dy/lxHrvvfd0enqaZMpuzGaz+RJwnZub\n",
+       "U6vVSuBxc3OzkJhyNBoll1g0gAighxFAhufn50kG1NkX/hgADMhiDrprzMcKAAmd4Kw548i38FN4\n",
+       "N8bAZDJJQNqZ8Og2kZSyhQPeXId5UL2zEvSZy8t1pq8hzjr5WpErDgR8HtNXtMcXYWfYo25Bpr5G\n",
+       "uPHL+yJL78Wv5x3cBzMVE6tSf/SXyzSCUd6L69HXSGf+fL32NSzu1va1MQeE+M4/yxEkcYOQ6+ac\n",
+       "68770L1TzrgRtM96621Adk4WuNxfVW4s/QHF0agzSpHpcUSbE1xE81JxJ0BkZ3IAx5/ntGPOzeiD\n",
+       "JT4jx6b4NZHNYEIwyH3iUWcW2lynspDG7cFeF6eQkQ2f+zX+nQMtL+QD8ngDr+dkMnnpoFBJiTbG\n",
+       "VRn7IMrE2+9/u3wdODu1jPLMKThXsLHvXFnH+3zbMHKPIMzl76wQ7WNR80URGQOeut1uAk8wRMvL\n",
+       "y2mM40765ptvUl4y3gtbBYhl+ziLvKTEiJ2fnycGCcZnc3MzuajW1tb0+eefp636b731lp49e6at\n",
+       "ra20Q5B8SJPJRBsbG3r8+HGK9Xr27Jmkq4Xy4OBArVZL8/PzOjg4SIrLY17q9bp6vV5ix87Pz3V6\n",
+       "epp237EDSroCUiS4PTw8TOObsUiM0tHRUdr5GN81HA7V6XTU6/UKOygBbMgQUAXwYoel97/3Wb1e\n",
+       "T4CUvsAVurGxUdiqDkNydnaW+tF1FSASl68fZYPBkmNH3Z0ymUzSfaQiQDd6pnbmOzl2HJj7tnYM\n",
+       "iWjwsQgvLy+n7Pb+nRtlnljUM7TD0lIf+hvWhkIfue53FsWNUi/oCGf/XScCGHKuNZ6NDnI2y0GR\n",
+       "60w3DqM+LJfL2Tgtl7e7Fr14HCtydkDM0TC4mf3gc57nTA2/Ac/uPeG+VzEygHlnwakfMqHe0bWZ\n",
+       "Y3xcp8d+BETlXJbUBZzgOjmGdPg7Z3leeF4uts3Ljbn24t+RzYm+TkpcfHM0Y6QcX1UPBzpxAQWd\n",
+       "guy9XpEx8+flkLkruxy4inFSvuUcpQ1Qoi7u7nNWSnoZlLhcpGvXn3/ukwwQgO/dQZaff+TUqLMu\n",
+       "TDwGJwoSK9KtgVmgxutOXSN4AUx5P9AGB1q+yLpcZk0s5DwroNzfkyuAXq+bM1goOOk6psVlR+4i\n",
+       "4p5wRbEpQZLu3r2rg4ODQiyMA0lYDNxOjKm9vT3t7u4m96Iv7E+fPlW5XFaj0VCr1dIHH3yQsp5/\n",
+       "/PHHKfblzp07hfQHJOF87bXXdHh4qHv37iUgQXwQGdidybm8vNTJyYmq1aqWlpYSuHHZkgRUuorh\n",
+       "Qn5cQxZzADzy73Q6Gg6HWl9fV6lUSvVZWVlJQMzjtnjXYDBITFG3203f+fEh0+lVHqu9vT1JV3Nx\n",
+       "ZWUlMWCueAkmb7fb6XzDmBsJ4O/noqEPms2mxuOxTk5OCpnbfUz6/AEcxAB7iusvNjDwOXLGxReN\n",
+       "K+obj3rhXvQfAJV7ACjMgQj6kIEv+gSv49Z0dtjdePGZ7tpDNhQHY9Et5HPevRg8IxrlOUOQee9t\n",
+       "8Lr7WkJxNscZGgBN/C664hiXXj9csLzb+xFdCIDN9bkDKorXjTWH+kc3mt9HuyOQ9M8iGHbihLbG\n",
+       "a/06lyUGNG1xeSF/Z55oUw60eVti/b3cpj+4LbflttyW23Jbbstt+V+WGz9rzz9zZB9jU/w+t/Yc\n",
+       "4ef8svHZXtyi8Ngbtx6im83r7e5Ify/X5dgTvoufY8mxiyfGOlF8G67/dutAKh6D4vEN8b1uKbql\n",
+       "hKWCLHiW76BwFoXvoEj53PvJ73Nrz/s6bvF1tixagN6WuLPQ5RDZJi/eTq53H7l/h5xol1tDtAmX\n",
+       "AnL17+MOKq+DBwq7TGEkCLx1BqFer6e4qtFolCh+6YqZGg6HGgwGKb6IY2Cm02nabt/v93VycpLi\n",
+       "mVZXV9XtdvXs2bO0sxD249tvv9VHH32k//qv/0pB8wSbj0YjnZycaH5+XicnJ9rc3EwxYGQzx2UU\n",
+       "4zDm5uZSwPd4PE7PdHfQ0tJS2qVE+/r9fspaXi6XE5MkXbFuJIdkZ6AHeBM4zvNhndi0UCqVdHJy\n",
+       "osFg8NKhrsyBXq+XXGacL+jZ351V3traKiQp5b1cs7S0lA4C9nHA4cfD4VDVajX1BSVa5BRYKbK4\n",
+       "x7ntbimKu9LcVcf/uMR87rpudAaj1+uljPgwStQnZm93necsLjqLjSvO9uMuGo1GqS0xLMDnr8sV\n",
+       "l6WzM7Sf+9zF58+mzh7n5euSewK4lrZEj4W77Vxf8Zn/758xT3x9I1Yq9gn9iEzZnYb7PDJyfiyO\n",
+       "93OMp/O64bnw58VwGG+Hx4u5nH0NdhnPWr/xqsDoI1PWaHS/uw4jE+XF5ZbDHq8qN3pEjPun3f3E\n",
+       "dz6IvXN8q2903zld6ILzv7kuuuR8wfVnuBDjzo5IK8b3xLpFwBjrzAGqEZz49fE+qNgczUm7oH5z\n",
+       "rqp4X44ijyCGAc6ZTXyHK5CJkMsODNjIKSnqk6OUZ/nDXVFEZRgntFQ8UDjGUHkgql/n97vS9B1d\n",
+       "sS0eixFpaV90AEiMG3fvlMtlbW9vpzQIfiwEu+iov6cVQKkuLS2pVqvp7OwsxSxVq1Wtra0VQDLf\n",
+       "vXjxQtvb22o0GumsPgDR/v6+VlZW9KMf/Ui//vWv1W63k6xqtVoKIh+Px9rd3dU777wjSfr000/V\n",
+       "aDRUrVbV7/c1GAzSM5eXlxMgKZfLyR1B+0qlkjqdjqrVqkajkdbX1yVdufZIf1CtVpPb0OXmO8U8\n",
+       "SL/T6Wh9fV2tVkt7e3tqNpupHaenp1pYWFCn09H+/n4hoH4ymajdbmthYUGHh4fqdDppASmXy8nV\n",
+       "iXLnSJpGo5GymhPk78HPABvGFAvZ8vJyWtzYoRmPLJGKqUgYT7jdoz5yoI9LzMeuv8MzYpdKpeQi\n",
+       "nk6vM9H7/CIHF24fAHG/3087yOL5dcxfAJXPFeYkOy/dYMTtie7y+NAcwPG5y7z1sADeG12CLlPX\n",
+       "DVEnMt5ybiDqGF2CXnJhA9GYm2WIo2/pNzc6yYPmIRjoYNyH/jmbJnzd8Ho5+ImAzceUgyXuibGx\n",
+       "LovorvT+i/o3gpoYv+RGu6/VOdn52sp4zgE3xwe5ciNAKsfSxMBct0z43zvKOyAyQ5HpYtBHNA17\n",
+       "Qp1y90cg5ZM9x6rxO8eexMU0AjcUwtnZWWp73GLs9QMYuV/YZexWU87CQTYeb8Dn0ece5T2ZTNIW\n",
+       "emerqK+DXZe5x1J4wK3X260tnp+bRMjMA1EdBPl1OYYTWXrf+I4mZ5m8n/jbF7PY92z5jkwZz/Zn\n",
+       "ETjLZ8T8UOr1ekoZ4IzHcDhMC2/c2dnv99NuMIKrHfCenZ2lNjabzZRYcjweJxas0Wi8xHL96U9/\n",
+       "0ocffqjvfe97+t3vfqeNjY3ULoDX0tKSdnd30/EpDx480LfffquFhYWU6yla4+fn5+r1eoWFliSc\n",
+       "l5eX2tvbSwHXFOK4Tk9PdXp6WkiTQODyZDJJweIebF6tVtXpdFQqlVSv11NcFuOh0+mo2+1qZWUl\n",
+       "PRNZnJycpHgrz7NTqVQKgfLEsrVarRQXValUdHR0VOh7mCh0FIlap9NpYsSiweBxUNHQ8kLgsxuh\n",
+       "FMaLL+LEyw0GA43H45eO1XHwHVmG8XicAu4PDw8TI3V6eqrRaFR4htfVAZ0zNj7HAIWA00qlopOT\n",
+       "k7QLK7e4unziAcBe58hI8W5nYCILhfz8ep7v4DTKPzJMDrCkIoj0Y2Fi2zwONbJYvnMZ2Xl6E2cX\n",
+       "o25HbnE98fWK7xzY8T4/fskL66XLIm6eijsw4666CM6oO2kZ+M7X++hRoJ3udfD6OQDzdnnf5MqN\n",
+       "ACka4gJ3cCS9zPRIxQU957abBWxmuQn93U4ruvspChUUz6CIuxAc5efa4J+76y0OKLaokiDOF163\n",
+       "rgAsUbFRVxZoH3z+fe47B4IRhfv1Dpx4Hp9HgIEyy2X7pv3xN+/LKUGvD5alKxXkG9NkUBfaFunq\n",
+       "aJFGCj0nZ68b9XHLN/YJ1rTn1OFzTjSHsQFknZ6eJpBBfT0BJYsf/7NhYTQaJYaL+2q1WmKTjo6O\n",
+       "Up4o6XrXXq1WS2yW57uZn5/Xr3/9a/34xz/WgwcP9PjxY0lXLsFms5mykddqNX322WeSpHfeeUdb\n",
+       "W1s6PDxUtVpNzIaktOtwPB4nBgr5kvxzOBwmMAjAnE6narVaajQa6vV6iSVhwTg7O9P6+rpGo5HW\n",
+       "1tb0xRdfJNns7Ozo4uIiMU6VyvXhy2SY7/f7Gg6HSRa0fzgcpms9NxUuQRZ7LHuuI/AfEBcBGODS\n",
+       "3Y24JH2MUdCdjDVP5AmoRj85WHB9xqYRd6XBXkS9B9gdj8cpU7zrb+YtTDWuXtrIgdy+GErXrj3c\n",
+       "uJHl8va6cU2KCgwGX+SiPvWxn2PUXb5xIafkwjuiPkF+/n5n5HOAl7nv45428C4M5ly4R2StuAdd\n",
+       "yLmu7vHwPHde3JXO3677Li8vC260yJC57vc2UrdYT38/oC6u2c42er7C+NychyquM85GRRAY1/FZ\n",
+       "BMmscmN5pFAALpRIm8YFK+emcQCUE5yzBjlQwPc++HOupPjuyIJxPZMzUoRex1gPJpIzVQwyXAHR\n",
+       "kpKKh9ZG1IzFEAdZHAw5CzEqsuhT5zeT1d1qruzZSs3fsT7uFstNagqLQW4iu8wcaPO90+LeB67Q\n",
+       "3c0GMJzlKuDzCOhcbihkbxMWJM9nt5h0tWDW6/U0Xk5PTxPTs76+rkqlkoDU8fFx6mMWZIBE3GWD\n",
+       "WygqNrKX3717V++++67G43ECPU+ePNH29nayvuk36Sq+olS6cvH84he/0A9/+MPEQJDZu1Qq6fj4\n",
+       "WGtrawlwbGxsaHl5WZ999pnef/99DYfDxOgQq7S2tqbDw8PUTr7DeibBIDIlj1C73U79vrS0lJ7r\n",
+       "QLRUutqx50zP4eGhFhcXk8uQHWZkDO90OikDOUAT9ypjsdfrJYYEcFyv19Mi7WARQEr2cgAhu6fO\n",
+       "z89Vq9USy0jdfVy5geIWd07xs+jEWKiYxwlGjDYwj2CiuN/dRmxzx1UnXemdVquVdjQeHBzoxYsX\n",
+       "kq4TwgKkHBTAqLreirrdx7XrBY5TYnz4fHNdF9l1xjR18bnhRxbFtcWZGP8OudBHrof8vXEdcjaf\n",
+       "+nqKCPRWDBuIDBwAweOUGCscVwQ7ik5gjERGLK6l3kZY0EiCsH5hOMb1za9zOVCY53EtdYOduvvz\n",
+       "XMe6PvZxE+uRc+vFZ7uB4t+9qtx4ZnMq68Aqgo8cEpzFTPh9lEjR+vty11McrUa/uwM9f68v8v5e\n",
+       "B1252CGvq1sG1NkT03G9M1X8OGp3atgBDP9T3+hqcjlRd7daqR/KyJUcn/E83El+xEClUinEDxEz\n",
+       "Ehk03o2ijdZ1rj0RzDhT5f3p8oiWkPe1T1a3DmO/0W4K73TQ6xnFYZ+k6+SSyJkz1CSlWKbj4+OU\n",
+       "/RoXU7/fT3ViAfZJ74uW1wVAcXh4qFKppAcPHujjjz+WJH311Vfq9/vp+IRarZb6kKzdFxcXGgwG\n",
+       "+tOf/qTvf//7kqT/+I//ULfb1erqqiqV4nEun3/+uT788MOk2Gu1WmIr6B+Py8HNhpvT28N1uO2m\n",
+       "02mKTSLbuqSUDbvVaqWxR7sZY+vr6+r1egl8SUouv/n5eT169CixMPR/r9dLR6D4+Ot2u4nFgFX0\n",
+       "BKS4ttzNwndnZ2dqtVoFd5RUPIMyxxD5vHUm3DckxKNDYMcWFhYSuPNgesZ/Tic68EC3oTOYU8zR\n",
+       "r776KqXNaDabBeDi9fF4QPSGMwPOWPFZLO7CkpSAqYMU1wvIl/e68edpW2AZeYfr3siuIXvYwMhy\n",
+       "07bIcmF0U9/oofH+dmPZjWcMWHfd0z+AJdhv0ktQZ89BFw3I2BfULweGousuelsiGxX7LxdLyu+I\n",
+       "A1iPXafTPuaCG9gRMM0Ct9Er5d/lvFqF9s/85rbclttyW27Lbbktt+W2vLLcCCOV84NH2s9RfbTK\n",
+       "pJfPBZrFRkl/OUaK/ymvYsmcLXHmxe9zRO/Pd1QeGTF3F/q9ntk310be79aCdL2Tx5/p97q7EuvE\n",
+       "LbMY60OJKD3Wh3uwUN2K4L0wUl5XLM/4TH93jH2Iso8smltApdJ1+oH4XLfA6V8fa96fbpG5tUMf\n",
+       "YtESB5Qbd24V81xYClg8/77RaGgwGKQz9Zyx6Pf7Bfrft9Xzbqx+2nN5eXU0DNv/+NEAACAASURB\n",
+       "VLv5Pvvss+RK/Ou//mu9ePFCv/nNb9Rutws7AwlY55iUP/3pT8m19f777+unP/2p5ufntbq6qr29\n",
+       "vcQQvHjxQqPRSN/5znf0+PFjvfnmm4VUEH4gM+d/IQuez/xxFwW76y4vL1MyTwLDp9OpOp2O7ty5\n",
+       "o5OTk8KzeD5xUCcnJ8lFSX0Zw6PRqBCsPhqN0u5EUkxISnFtHNexsbFRiO1jbFxeXhZciXNzc1pZ\n",
+       "WUnsoo83DxFgfrubh0SMuNr8iCfGYHQPuWuOQHafF7QNViuyuDGmkLHIYc/I/euvvy4wb5eXlynj\n",
+       "vVv73h9eN5cD7fTPYLDRXe6+jjrDC0wGzJjHllFc57vegrF5lYsuriuRifFnoveIRYrhFegw+j4y\n",
+       "3vQFOoHvnYVEH/FsXzdhgmg/qXXQpT5unPmMupV6OKMY174Y8+WydsaKQqD5rF3m0W0X5eaeIR8D\n",
+       "UR97G+J64mM/d6+XG01/EMGLL/qu+HONz4GK3MLl18RrEVr0d0dXTw6IuMuM4oPBO5L7ZlGE7Jbw\n",
+       "2IroovJMuV5PlEKObuV6B6HRHeauPRSauy9zz+Td7pLzukXFIhUP8kVZOiBgkYnxaUxKJpaDRV8A\n",
+       "fOAjU+ofA1wd1MZ20g+4LqPCwN3CIufyceULre71c1eMj3XiMYiT8bgN6sd5ZJ7ZnN2dBNxyFArP\n",
+       "RG7UEeVDgHe73U4uPOKAPvnkE/34xz/WeDzW48ePC1n2J5OJWq1WYXz+7Gc/kyT97d/+rT7++GP9\n",
+       "53/+ZwKKjKfV1VV9+eWXeuONN5KL2uPjut1uOryWQ3wlpYznuNsGg0FyeQK+CDp+/vy5SqVSAoTj\n",
+       "8Vj1el3T6TTFJVFYmDmrkENfpeucR2trayk3Dy466kVAvR/Ci/tpfn5e1WpVw+Ew1ZU6jMdj9Xo9\n",
+       "HR8fp2eurq7q4OCgoB8cEDHucDF64D/uLOaTZ8PHJco9vquJcToajTQYDFLby+VyOv7JA+l5JmOb\n",
+       "nX2ckShduUSp997envb29hLIRK68Z25urpA2wtvJLlPGm8vF9RfzEiBExn2+o63+29vvbkZ39cWY\n",
+       "oXgWKz+5+BqKz21AbXSBUpdXHYTsYQQ8N7bHQWHuulyOLdf77qKkvnwenxU3UeXWtigPX/MiqKfN\n",
+       "ubXU18hIPLixG6/1+nBtdB1j1Hhf+3XMj7hevKrcCJCKbBPFhR3LXwJZOSbqVWDAhR0X4Rwb4u/j\n",
+       "J7d7y+vkHcDCnrvfF+b4DAdmOXYJ5ULOHJ/AXEPshreT9/tgoj7O0ETZuH+5VCql+JOc7GKOllm7\n",
+       "HT12ivt8gDs4iLtpnNny4sCFNrkSnjUuXLEDcCi02Vk2f360RL3Qdx7z4XEVPGNhYUGvvfZaWoS6\n",
+       "3a6WlpbUaDR0cnKS+ky63ogAMGDXG++LQNG/YxFtt9taXFxM+ZlOTk707//+7/rud7+rN998U0+f\n",
+       "Pi0k1uz1eqltZ2dnaXH85JNP9JOf/ETvvfeeHj9+rLfffjvFQVUqFR0eHmp7e1vtdrvQf8T/efwT\n",
+       "iv3s7EyDwSCBneFwmN5XKl3ll1pdXS0YACwcS0tL6Rw+gCjfVatVHR8fp5xcvnAAwFZXV7M7iZBj\n",
+       "PJSbpKCTydUROLAX0vXuu/Pzc52cnGhhYaEQp8aGkbiQ53YzurHCoj4YDBLYQg4AeYCGz0tnjTkG\n",
+       "B7l4igKPTYLRY74BtABEMOCMcQ+oJz6M+DL6h/vo8/F4nOLy+M5jhdyoof6VSiUlLfVzJt34cbnB\n",
+       "TCMjZ8f8sxyz5HVx/YGs/FoK8qbdUcc7iIk6aRZ7ghxgbCIRwLNyejGuaaVSqXB+I3PBUw/4vdwT\n",
+       "mUnaHdcSH2+lUqkQExfzlEXZ+H0RSHG950rz+/w53r/kP0PWcY3i+d6W3HiI5cYYqcgeOYiKSJXv\n",
+       "vTFxh0ZO4H5fpPykIkqPwMgXzdyk4v/oTpo1ESJlGJkcf0cESx7E7c90Cw2ZudWAAnXZxOKDLLJr\n",
+       "3o4I0FxOEehwTw7J85mDPs9cHCcoLgyXQ2RYfAL44M/JiHpGSjdn6VLf2CYOZEbGUtGV6u2Myozn\n",
+       "O5CiXXNzVwcZLy8vJ3dSr9fTcDhM6Qg8WauzOsjJZSEp5aSKliesG4kw/Qy3fr+v3/zmN3r48GEh\n",
+       "e3mlUklsCu0FxNRqNf33f/+3fvjDH+r4+FiHh4cJnJE4czQaqV6va39/P7Xv4uIiBYS7K0u6OhMP\n",
+       "ppSFF7AwnU5TmgG38gFjyARZT6fTAmDl3cfHx2q32wXGl0Dx8/PzQjbxvb099fv9BOaRnXQF6AeD\n",
+       "gS4vL5PLlPd5biUysR8fH6f6EhhPADjtx40Ia+JuL85J8/HjDDvti/O1Wq0WNg9wuC0yLZVKaVcl\n",
+       "6Upon+sBZLOysiLpWn+Xy2Xt7+/rD3/4Qxo3tI/rfBfwwsJC4aBiX8Cr1WoBJLixCVij3cvLy6lf\n",
+       "B4NB2gzgoIPCAuqbiCjMcXeTIZtZBhj/+4acyMg7s+X9BLtHHzpzGg11b4Pry6gzI8ng/eZrIHKh\n",
+       "bgSfA6Zc3r4zm3t97UA+zvTzXW7N8Hrm5OUGfJS3h7FED5DLgH7MuSf5cfZzFlDKAdJYbhxI5cAS\n",
+       "f3tnOCDwweyf+//8nXtvfNer6ujv5D63TCMAis/09kj55F6vqrtfF60MR+xOO8f7uNcn8asKiN2f\n",
+       "H4GGKw1KHNi5+jJR3RqIC0DOEgIkxFQFOTDF/65I+d9lG60l3s3kcmXlcgFE+ER010fsa28/MmH3\n",
+       "FIX6AZhY7LDkWVA90Sk5lIg9OT4+LgAN2k1eJAABixPt8ASZsGylUknffvttSnopSQcHBwUL2McT\n",
+       "7Ngf//hH7ezs6IsvvkgxSZIKC/Pl5WViQVjM6ANneZrNpvr9viaTSXL9eU4n3FbT6ZXrDIArXadV\n",
+       "6Pf7aRcdfQWw63Q6mpub0+rqagGc8myMHZi1w8PDNKbYbk992IXqiw8A6uLiIoFBmEXGiOeBg7Vi\n",
+       "YQNkYnS4BQ+w4jPfpQYAcZemg3Zip2JsGCwPdcoBCk5emE6nWl5eTiDIgfrjx491cHDwkoXP+Ped\n",
+       "gs6kkXrAY6Y8/YbPRXdn0U/xIGf6zRdzxgJpMVy3M58B5jD9tG8Wy4PB6u2Nxrwv7BTkgpspMln+\n",
+       "d3wvYyKn1328R1adfvb1wvUVLmQ+c93H/25oevt4ro8FXI8w57MMa36cdXNZ5tbVSJrEEtdQ3OAQ\n",
+       "DuVyuTAOkRVj1Q2uWR4Myo259tx95MU7MS5IdFSOvXFGK4KzHGp1cBDBEt9Ht5a/j9/eDlcW/uP3\n",
+       "+n2UHADJsVk5dsMRtS/s0rVF6otELlbC2+PFB7bL2SeuMz456nmWjN1SoN5Yq94uf2ccDzEFg/e1\n",
+       "B3DGieHxVrHdLPTOOEUmh2dFxeegKrKHbgAQW+Z1ZSHAFePsCYkuWeR8PHi+GS+0ATZrbW3tpQWL\n",
+       "3Ee0mffV6/WUUqDT6SSw8OjRI/3+979PR7y4S4ws5AcHB1pdXdX29rZ++9vfSrpKyPnWW2/pm2++\n",
+       "SS43cgytr68XkpMuLCzo6Ogo/Q0bA2hyNxT9WKlU1O/3tbOzUwCgk8lEe3t7KpfLKQeU98/p6alW\n",
+       "VlbU6/VSbNXFxUU6VoO0G34sC0BnMpmoVqsVguFZYKrVqlqtVqprt9tVo9FIwDfGazHWvH8kpWN1\n",
+       "nFGLqTdgKXEn0vbpdFoINneWi884VodYrnK5rKOjIw2Hw5SGg7pwzXA4TGklGo1Ggammbl988UWB\n",
+       "OY3jH+AQxzCLpqcxoF78jgHV7sql0KbFxcUCm039y+VyYuP8+VEfuJ7Luboo7gWgfa7P0RU5Q5a+\n",
+       "8jABCuPU3W2xvr5eevF1ahbYQL+78Ukf0Rf0P2CRseRMofclbfF3+LyL6zHPiW3n8wg+vU2RJYtt\n",
+       "p66+5g0Gg5QPD4OD90W2zcNjcu8p1HfmN7flttyW23JbbsttuS235ZXlxs7aiy45qbjl05Eo1CwI\n",
+       "0+nKiJBz1rn7cyND5MxERNZ+vTM3zkJFdyT1iCyNF0f9/r+70WL7Irviz3Y6lffCXBDP4Qh7Vp9E\n",
+       "dsotIH8n/QFl61uTx+NxsuTdx4/FBXPjfeIxYlht7pKIQZ65MRNlwv/4+qPF5+xh7BtKjKOItG9k\n",
+       "P93diwvMZe50+Xh8fcgorrVy+eoYHZI7eh80Go3CziTq6vFafvTIdDpN5+mdnp5qdXU1WZewVH4+\n",
+       "n7sEkRf98/z581Tvv/qrv9Jvf/vbxLJQTxiX6XSqJ0+e6P3330+uld3dXdVqNT148CC51VzGjAt2\n",
+       "E/Jetuefn5+r0+mksYUsSYwJ++IyJXM8FrufUUiMTr1eV61WU6/XK7hw/Pw2d7kgU+arxyzh8iNb\n",
+       "Oi5FSYnt2t3dTQcQM86azWZKIIkrC/YHGUyn03TUj+90JZi8VqsVDl5mZyR95pnDYfjm5ubUaDRe\n",
+       "0jVY+hzl47GCyI34KM803+/300aCzz//vHDWoOvZ6M733WG5mB2uja50nussD8/2eCTYHK7Bvcwu\n",
+       "11lsDnojegNiLA/1ZE1wN60XxpE/09/HNTzX3cRRb8d0D9ENi070NcjlzHfI2tcRZwvRYd5+7wNv\n",
+       "B+uWxyH5dzyLMA3azXPdC8C7WfMdL3gfuPzi+3xNoO0cW3R+fp7YXmSLq9xZOu8Xdznnyo3HSPnC\n",
+       "5YMxN9h84vgOM3+eDzCnKqNwfeLOKgxOFnJKfPcs12HOt0vxyeH1jT5i6pkDOu5C83bxXQRl8b1O\n",
+       "10a/eK5OFAeCXi9++0Sb9Z23sVK5znROfXzBQBky6aPvmzrlQBZtdFBHvXLyczrZY6GQl4MlL1zH\n",
+       "ZIwuDL8vN24mk6szBQ8ODrS9va21tTVJV2Ot1+up0Wgkl59nRC+VSqrVappMJoXA4VLpamcZrqle\n",
+       "r5dinXCF3blzJwVJUwAvvgGA2JMnT57o+PhYDx480N7enjqdTsrbtLS0pOl0qrW1NX311Vc6PDzU\n",
+       "22+/LUn6/e9/r/39/RR8PhqNkrsQMEgQt8fPICMCyl0J12q1pAhZyPgtXcczLS8v6+LiojAPzs/P\n",
+       "1e12E9B1fQKoJf5pPB4nQNjpdJJ7lTgij4cCfA4Gg7RxgL5/+vSpJpOrc/iI+aKfqPfCwoLq9Xrh\n",
+       "iJzFxcW06CNj6Tr4m+t8LuB+Qx+6C5b7PcM+4BOg7ptU4qJHigXGLMAWF9XPfvYz/eIXvyjMfQfl\n",
+       "xKjEkAx307iuBQSib3gm+sLnl8vA527UGeQGI5YoGliMQZcr/Z0zkh0k+G+Kt99dl1IxX5LrYAcS\n",
+       "HrrgxXVWdI85gHRjNxqOHmLgcyjKk/7MuUEjkeCyQQ/6dZ4LDpDHdRE8x3f5cyjebjeAaF+Mj+O5\n",
+       "fvIHxgt9HGNdGaezyo3FSDnAkfTS5PJdbc5QRLTI4HOrZ5aCiXXIoVj/n/f5/17HaAnwfbw23h9B\n",
+       "pC+yEUhxr6NyZ8vi5z4QmdBY/R6zwOSPu7n83T4ZKLQ5go/c79inKAQGpstnfn5e9Xo9WYIocBZP\n",
+       "HyteFwdC8XsmDXLwAEhn7mLxseH9y+TKxR9g5ZfL17lwYEpcJihNB7T47OkPtxKr1aomk6st9eQu\n",
+       "8tgy4og8jsfL6uqqer1eSnYpXSmyUqmUzoaL89APl4bNka62+B8cHKjT6ejh/9nRxzEg7XZbo9FI\n",
+       "Kysr+uijj/SrX/0qtYHz8J49e6aFhYV0jIyktHWfmJxoffti1+l0EgAh2Ju+JUkodWUbPfmLYGZ4\n",
+       "JgqzXC6nnY2SEgBhwRsMBinHFs+o1WppPLH7ENZoOp2q2WymYzh4X7/f1+uvv54OknZjDPah2WwW\n",
+       "Fmr6pNVqvcRQXlxcJHYIoOnjGz2ZM8zYRQeocJBFDBvv8DkOw8kBxC5TjIb/+Z//0Wg0KuSDijFR\n",
+       "cS57ey8vLxOQnk6vg+jdoGQsejwl7ZKUGCe/L74PMO9B+sjW3+9AivfHXcC0ywEo8o5y8A0RPCN6\n",
+       "YCgOUKKxHAO6I5Ci/own5oV/F4sDCNoZGfrIzOWKr7sRADrL5aQJ+jKuHb62u072z+Oa64SLvw95\n",
+       "U38/gzC3WzIG2Odklto185v/H8sslia6Lf5v7nEw4f/783Lo2d/B9/7OCILid3TmLKZo1jt8Usd3\n",
+       "8LzYabPAoE+sOJGcYp1OrwPzInjMBWo7xctzc4MxFpdLtIAdFMf6475hMfBJSpbvyETmZBHZTVeC\n",
+       "UlFhRyvPxwoTOwaMswPF5e7bwwH8OWofWXhQPbvTYHVYAE9PT3V4eChJKdXAwsKCTk5OCophaWkp\n",
+       "HUrKOIQFKZVKaZFst9sFWhowNhqN0s4uZMN5egCTqIiazaZGo5GePHmihw8f6sGDB5KUDk4eDAZa\n",
+       "WVnR22+/rW+++UaS9NZbb6W+63a7Oj09TYxbo9HQs2fPtLi4qFqtVkgQiTtrY2ND3W63YD2zm+/O\n",
+       "nTtqNpvprEHaAWODK40M5rSBnEjIzXNeAebn5uaSK4Ax0mw21W63dXp6qmq1mvrZF3wHh9x37969\n",
+       "5L4k672kJGvqOBqNCocrw4wRkM9iT9A4bfCAfT5nUwGg2cegu0ApjDV3+zDWAa4LCwvJJeb90Ww2\n",
+       "dXh4qMePH6cx4uwK4zsyHa6/+d4Xa8aNt5P61Gq1wmLnQIh5mtsUIqmQnsJ3s+JKY065Tndg5oCX\n",
+       "+vE+13+uRzC2XP+7PGIoRFyrouHphjLGAe2gzrTH74061HeuUWi760fX0dLLa6n/xA06sR8ovuZ5\n",
+       "yAxj0Osxi5HzZ6MDeJ6z2xEgwyzzGc/if5dnlH8sN8pIRQTti3kEKJ5QMqJzR/VxMeV+f7dUzBvh\n",
+       "CJdrIsKN9fe/fWDm6sX7/PscUONZkcl5VT1dls66xIGNBeM+aWepooKL7Y1AA/lGpcgPLJJbH7CM\n",
+       "DMw4cZ3idUXkTEUcN3Ey8z/vyQFwv86vl/QSUPLfcTcKypFrsHLoo5gIkd9RwRPDtLy8nNg42kuu\n",
+       "oqWlpcRmeH14Xr1eTwkfKQCtxcXFwjZ3j+/y3VaSkiXPmIFpkVQAH8PhUM+fP0+AaHt7W71eT51O\n",
+       "R5PJRFtbW3r06JEkpV1xx8fHWl9fTwf1Ikd2WE2n05RLSbpypeFOK5fL6RgS6frIEel6dxdJO6Vi\n",
+       "hnrkRjuWl5c1NzdXcDGwQMMI7uzs6PDwsJCvaTKZJFchYxR537t3TxcXFzo4OEhuWMBwq9VKQI++\n",
+       "o54wYIyZxcXFl2SDm1K6XgzcZc11PtaY39F16ZY+xwj5PHT3ui/OGA3Ulfgqxs7S0pJevHihwWCQ\n",
+       "2Dj6h3Ea89rxXPoqzlfXa/QRdT07O0vPR67OWJCtHpYwB+oODw/VbrfTfaREmMWA+5x3wwRWmLa6\n",
+       "3nM9lFvzaGduIWc9jADJgUJu115kI70f/bn0ZXRReh187Md1x3VnjMHiWg+HcD3Dc2gPrlpfj3yM\n",
+       "eHtyxriDb9fhs3atOyj2eqObfU3PvTeWGwVS0svAwhfZiJzpeO+UyP64cLhnFqJ06yKHeON1uef5\n",
+       "PQ4aaENEuLHjve0OinJsGiXe6+62Wf5ip/ula4WCTzwCy1z7JBUUBrLwNmHFovy8Pq5QXKnEtpbL\n",
+       "1/k9PKgWl5wHHEcLNgJQ6Gh3Fc9iF70OswAY10RXabw39hvuPMaO5yDCxbK+vp6sfZdNuVxO8Svu\n",
+       "lkRJ+3EkvtUX8BHbPD8/r+Xl5bQVfG7u+pR7z8nD4k+fE6sjKQUj4xJ78eKFHv4fd9/nn3+uWq2m\n",
+       "ra0tSUrs1tramobDYXo27WPx6Xa7arfbhQBnWB+UI+0hfgSgSYA0rkaAFnpkNBolgFIqldK5hZXK\n",
+       "VdZ1Tz2Aa+ji4iJtoZeuWBfG9NLSUkq5gNx3d3eTpdvpdNJ4gz1ifHKeHfcRXwWrR/txazK2PTga\n",
+       "wJ0zPAEC5ATyBdENEs73gxEtlUopEJeUGFzP+ML1x5jwTPPkjiII3ecHLEXUP4yBuB7wd2RXuI/4\n",
+       "QOLGXJ8AfsnHxoYE5Mj1zBt/B6DIA6f9Pmf6vS+8vq7znIVDJ3i6BF/0ud7/dtbvVSW6cX1ceCxf\n",
+       "BHSxHyJIieuJ3xuD2ykuG/6OOtbf5zJ2Wfi6lGP4Zxm4Odl4/WcZ2G7Ax/v/Uh/cpj+4LbflttyW\n",
+       "23Jbbstt+V+WG4uRihRg7nv/3691BBwtAK7nN6g3RynPctlFZO4WHc/IMQ/+/FdRuJGFi6xW/H9W\n",
+       "PR3pe5yB3xfp8ZjUTSpSntJ1nEguWN/jAairU+peR7doYnxEdG1GOtzrQrwKrI7Lm+udiaFNLsP4\n",
+       "TGfgIqsWrSQKrAL3ujylomXq7Yp18DgX6SpOqFwuJwqeQGLpiiHpdrtpezwuFuk6Lqler2swGKhU\n",
+       "KqX4GjKnkxLB5w9xVT42fEyen5+nw2vZvYcs2NVG3BZsxvHxsb744gu99dZbunv3rnZ3dwtuz8Fg\n",
+       "oI2NjXRWn8sb90ulUinErszNzenOnTuJ/SFRpnTFkBwcHKRjSk5PT3V8fJzOW1tZWUmuoFLp2o1F\n",
+       "XxADc3x8rGfPniWmq1qtpizya2trevbsWRrDsBvNZlOl0tUByeyE3N3dVaVSUa1W08HBgZrNZpqL\n",
+       "sAqkYBgOhymRJzLe399PrkN2xhGAz7Eyk8kkMXkwh6QkINaKvvedZLEMh8Pk9pyfny88s16vp2By\n",
+       "YsXoQ092yjzgiJjpdKovv/xStVpNm5ubOjw8LMjb52rOZebzJbIgzhI4kxePyYl6kp2TMX4M5oln\n",
+       "xOOg2ADAM/jt9Y6bVKKXJecujQHV3p7IgsNGodNzCUFzepDfjDnWLpexrwHTaXFnZgyr8DahL5yd\n",
+       "4ztfE3O6nXti7JS33XUy7BQyiOuxe314D/3L5zw3rmGwgjG9Q9zkEduRwyqUG0t/kBO4lE8JHxdo\n",
+       "/yx2dBRu7HAvs1x+sZ6z6h1BgdcdheG+aQc2sU1xEsY2zHI3eT1iQWl4YHW81mUQwcirZDDr3T7Z\n",
+       "AAVeUAg5ypjnRXed74gjZkq6jhXwieiBkF43fPDIxXeLvKrNsW+4z92ptIvJCfij/rwTdwlxRij3\n",
+       "o6OjlFH78PCwENAJGADE+JEZAEtkRDslFVIhAHwBZ6SUYIz6zqVKpZIWUwLPARk8x2NS6Ceu/f3v\n",
+       "f6979+5pZ2cnZSiv1WoppmZlZaUQW3V+fq5+v1/Y0YSbbXFxUcvLyzo5OVGtVtP8/HzKFs691J8+\n",
+       "AdiUSiX1+/10BpvH81xeXqbnHh8fpx2R9DeB7wA4z09EsHmv11O73U47+o6OjrS9va2Dg4O0K47+\n",
+       "dfe5dH3AMeOi3++nLOIARklpiz7uW8Y8fS9dxZEdHx8nN5F0DVYYk65PAP+lUinlEqMsLS1pcXEx\n",
+       "7QScTK6znrN7FODNzjyeu7+/n0DmwsKCms1mQd4s7tQvtzDGOQyI8LUBGY5Go/RMP42AMcz4xMWb\n",
+       "c+e7PJApoQmz1iDXGf48gGckAZgjtNHbE3VUDNlAl8Y1g7lLmzyujXul61MafDMJc9/1ZE429LOD\n",
+       "WIwP1x08YxaYQu/52aouHzd4S6XrHGLueozuthgDhZy5L24q8uczT2IcmceMSUU3I3L0I71iufFd\n",
+       "e7kYmhzQ8EGSA1ZxMEjF42Z4bw7A5CZNrEeOaZrFhOU+i+xQrDuK3n3Ksc4RgLjcpCJ74pOWCeug\n",
+       "wLct89utowho/F0OyqJf2weoD+r43BjMx+TOAVvui4faslA5WM69i3fk2pHbXejxBV6wVD0Y0Rkw\n",
+       "t54cTPM9DAJt9P7qdrvJqq7X64WdRDADsBi8k/iQ4XCYApy9eAoDrHDkxgISGTg/Uw2FBcvD4ghj\n",
+       "5TE7zWZTrVZLe3t7+vbbb/Xw4cOUNwo5Hx0d6c6dO5pOpwlkNRqNFFNDu/xYknhsCjFJT548SXEy\n",
+       "x8fHiWEBEMG29Pv9FMDuCnY8Hqvf7yd2DmA3nU61sbGRFO7y8nLqC1IUDAaDtEvtyZMnkqQ33nij\n",
+       "wJDBMHl/cTizB6n7FmxA6+rqqiSlg3cZF6VSKQGbwWCg4XCY4s88ZQjy9nnlgMkXu+l0moAymzro\n",
+       "U98VR86u6fR6QwBsmHR1DiPzghgq2k/us5gnSCoaNdTLWXVf1H2BZnx6OoE4/svl8kvj1AOvc7tr\n",
+       "HfR43zkwoES97LtnfQ2Kuj+uVQ4wYn5B5BO9JP5/9B44cZALnAasxu9harwdvivS5RMBLjokgqUY\n",
+       "fO/P4p2+frmcATye+oj7y+VyykkWAZEHlOc2RLFueeyYfxeJEq6fRVhINwSkfED5YPTspt5RPtii\n",
+       "K4LveUZOaLmJwSBx1E+JFsmsxTX3zFyd+Y6BkVv43Q3i9zkzw4DJWRBu8fn7Hb37wIkZdiN17O/1\n",
+       "z93ag32h0Gc+MWPwJIrFJw3PdUsnBhACpFx2pE2Iz6H+uKV8h5P3xSwFFUEz32PNeQCoL5al0nUG\n",
+       "7cjuucUWz/9ikrKzzHeYtdttnZ+fq9frpbP43K0wnU7T585yEczurtWYXLFcLqcdgoA0gqt5lp9h\n",
+       "xnNwk7HFXrrOA4b1xvcUdtTt7+8nNoiyurpaOBDZmRUyfff7fTUajUKeKNyWa2tr2tvb02g0Su8E\n",
+       "gJEtfW1tLcl0Op2q0+no6OhIlUpF7XY7MVmA00qlklg5ZONZ0BcXF/X48WO9/vrrqR3or6dPnxaY\n",
+       "w/H46gBlEoE6eDk9PU1gD8DR6XTSdw52XMmPx+Pkfp2fn9fZ2VkK/G82m7q4uEjnM/pi5fmV4nZ2\n",
+       "Ukq02+0EopA3Z+fV63UtLi6q3W6r3+/r008/lSQ9ffpUz549S2kqpGvwS9JXNy4cUPlGDN/pyvcU\n",
+       "Z914Rr/fV6vVSrnCuMfnsB+ejewAhQ5Ambe+9Z7ncP6gj1Ha4GsV8nLGh7ERQygim+5sHC5v9JED\n",
+       "pZgMljEdjTvXUw4OnFVyhg1DEcDuxp7rwmggUz+u9b+p19LSUpbJiyEYvtHC1wNfg319cIDsz6Rv\n",
+       "c4QMsvNkrD7O4noRwXGu3NiuvRh/49RaZIn4zF0KFAZhrpGRaswxHfx4TwFGhwAAIABJREFUR8V3\n",
+       "5wCa05l+X66O/h1/++/YbkmFTowskN/Pfb4TJL4j0pY8n8U0x9ZE+cUyC517O3KTfTKZFNiOXF08\n",
+       "RoH/fZK7dcRCkdsGTHGFJb28y8Pfx3UoPt8lyL2ePdwXfe5nl5gr92jJuhUFyCHvj7sbut1uco0N\n",
+       "BoPEiElKO9V8YWLxcmq8VCoVGAkSV6JM/H5yLF1cXOjk5ERnZ2cp7oodZvV6PcU6UZfDw0MtLS2l\n",
+       "pJIcaispHajL4gu44btWq5UWpnq9ntxYzWYzKXRX8NIVqCGX03Q6LWyHR8bsIptMJtrY2Egs2MXF\n",
+       "hV68eKHz83Otrq4mpklSyhE1Nzenfr9fAFKeFPWbb77Rd77znSRn4qI6nU7qI9/tNxqN0vOk6wXX\n",
+       "E46i5AFEDnrYoYdsAHMcanx4eJjaT16ti4sLDYfDgpFUqVzn1mGhhHGEjQCcEQ/H+K7X68kQkKS9\n",
+       "vT39+c9/liQ9e/ZMz58/T/mmfDHF1c0C7s/w+cxY9QU5lriwMzY855VUdEMBUmmjszCeANS/5x2u\n",
+       "C6NR6+CE/6MBFY1b7wv+9/UuhoJ4mhreDwvpMo4pGZwl8ne6Low6nOc7U+3tYc7yXGfbnY13efI/\n",
+       "Ojwayf5cYta4z+vlhjfPAtC5256x68asy5R+zK0Vvta7fo6pgnLlxoCUlA/qll52wVF8MMRANWee\n",
+       "fAI6rejvcnYiMkQ+SF4lcO5zVOvM0V8qESjG9nhdIrijDdEycBk6tT8LVTt1THGmyQGdt93bn6Pp\n",
+       "Y3ELgwU8WiOU8/Pzgq88x+xJxVPX/cfllusLlDkTKvYhn3m8hLfP7/dxxP/R2vbvqXe04ACYABz/\n",
+       "DkajWq0mECopATVXwrS/Xq8ni5Y2AQhZSFhk6vV6Yoj6/b5WVlbUbrc1nU7V7XZTX5DLqtlsanl5\n",
+       "uZBpfHFxUXt7e0lxHh4eJtfe3bt39cUXX2h+fl6tVist3LSPWKbT09NCP3nQdMwWPp1OdXx8rHff\n",
+       "fVenp6daWFhIrI90tbAzH8mvhdwGg4F6vZ6azabK5bJqtVqKS5pMJin/0NnZWSGZ6fn5uZrNprrd\n",
+       "rjY3N1Uul9M5hH5kCcCQPhwMBhoMBqlPPE8YdSiVrpOpspisrKxoeXk5sYPOZLGwDodD7e7u6uTk\n",
+       "JIE0smkfHx8ng4B+Ylx2u90CK8m4gMXsdruFc/+Wl5eTW5MA7idPnujZs2eSpK+//lrHx8eSrty1\n",
+       "jD36Siq6jaNedcPKmQc3Lp0d9zAAnxO0D9kDUnOMCnJ3vepMdvR8wFTA2ObcgrQ3gkBAkbuI0ANu\n",
+       "OPmz/HnRoKUNyCgaks6qeH24Ltc+13PIis+cTYt1hX0GmDtrjj5wDwhAy2N3+T0LrPjnrl+5h3oC\n",
+       "2kql0kuhILTNvSr+3auAlMd45cpt+oPbcltuy225LbflttyW/2W5EUYK1iYyKJE5ir7yyAL4fc7k\n",
+       "5Fx/0cXiSJT6OOvi90dGJJacHzbXPv6HcvZ7nIVw+pPngaKjdUWJMTsxritHc0pK1rFb0FCZzvL4\n",
+       "u5z6zvnpvR/cEuJ6WKbY97zLGTK/nrbkfO3RWsj1U2SW3MqLTJ67Jn3sRKvT6X7+90Nn3dqNY84t\n",
+       "day5paWlAo1PQOVwOEzuMncJnp+fF8a0M4icNUe9vK1xhxdpDIbDoY6OjlQqlXTv3j11u920Uw73\n",
+       "I4k5t7e3U3vW1ta0ubmpr7/+Ol375Zdfpu8fPnyow8PDFP/jcsC9R0B4ZJ5pC+ySdMXytNttDQYD\n",
+       "VSoVra6uajqdpucPBgPt7OxoMrlOsuhM3ng8TiwLLkTpipFrNpvpuJrpdJrYHDK3r6+vq1wu69NP\n",
+       "P02xVbANGxsbmpub097eXiHJKQlQOdAYeRMsfXl5qV6vp4uLi/RMguVxKzSbzUKKA5glSVpfX08u\n",
+       "2J2dHX3zzTeJKXBXFkyRu0KZTzCc/X5f4/HV0UI+ZkjkCuM1GAzS+2EL6TPfKBB1k3TNMDBXIjtA\n",
+       "iUyLjw1SGLjrkPZ4SISvCc4I5dxMzhbFGBrXMzEkwtch1xMuv+jug5FCfq5r0E/+vwdRo6PcpZlj\n",
+       "wSLLhHvcw1N812L0rvhuYK6P6yXXS9fZ4WFHeQb3+uYAnulhFc7gezs8VQGyQ4d6HxI3GHWe1zPG\n",
+       "+VJPLzGeOHqtYrmx9Ac5msxpyjhond6MrjenLv077nFQE104PMMFGRfXXB2je8vb5fWMPu+cO4//\n",
+       "o9uMetLx8TtoTad5o5JAcUSXoVOYuUHigy0CFXff/SWQ6c8DZMQ4ghg46FQ3sSHIMbpgXe5RplER\n",
+       "OQiKSpD7omsSSt4LSsTdePyNYnAglGtjrg5LS0taW1vTysrKS4dpslvOFwXmideVwnfEL7iSvLy8\n",
+       "TC6l6ErExYbrp9Fo6M6dO5KUdonNz8+rWq2mLejS1fb3+/fva2NjI7m76OejoyMtLi5qbW0tHdfi\n",
+       "7QYocpiyu0vn5+dVq9VSfJMbWx988IGm06sjbLrdbiE24969eylA/f79+wXXBGOBYPKtra2Ca8Az\n",
+       "rzt4q1QqKYfUkydP1Gq1Ul0XFhb09ttv6/DwUE+fPi24KdgNt7CwoO3t7cIiNBgMUgyZL0CSkqsM\n",
+       "l9vR0VGSHf2Fu3Fubi4BW462ITj+8vIy3YfbHBBFSg3+lpQ2Q3jwPkDq7OwsudIIaOcedE2cL/SX\n",
+       "Awf/nODpCE5Y6Nyoc/cSICqmHMG1xFxkdx/1lK530cbFGdkChqJbiOvdFUm9GcuzAI0bkjzPQyti\n",
+       "GINfm3Mj0r4oN+qUixt1d53Xjb+JL5L00noBsESWOYAbQa2fCQjIimttbhMWesANUJeFg+SYQgH5\n",
+       "x91+Pi6p46xd68jK25aL2aPcCJBy364XX+jcYvfvGfzRj87fcXHyToiD1FmeHLqOFkKuxM53688X\n",
+       "8pwfexbwiEyctyUu8pIK8UR+ve8yife6bDjV3QdVZG8cAPJ9ZFlif0Vrj+fE2AmXBbFUDGrAA/3n\n",
+       "8kXeKP7Yxw6iHXSgPHJWcO45XqLypC4XFxfpwF2sLn8Gipd3+nfEBhHg6+OGOBW34KPVyPMiWCLR\n",
+       "ZQShi4uLmkwmqlarqlarKf6G+9bX1zWdTnVycqLDw0NtbGxIumI6zs/P1el0UvyQA8P9/f20oAMo\n",
+       "pKsYqadPn2p1dTXFMnEO3fn5uRYWFtJxNXHss5MNBoTFfnt7OyUcZUz6MTDr6+sajUbqdDqFJKK8\n",
+       "c319PTFRvjhw7A1xMHt7ey8tYi9evEggjLl3584d7e/v6+uvv9b8/LwajUa6j12OvuuTPvdUErBH\n",
+       "fDeZTFLKBMCLgwWPkVxeXk5B6hyTQ7vL5XICaHHRp89d5uyymk6nSd5bW1sp0H5ubk7Pnz/Xt99+\n",
+       "W0gc64Ze9Br4HIxgy3VTLkbGwbPvBHTd5EYqC6TrCWekKBF8OFPO9TFInT6KAIT3UHIMObJ1ubtO\n",
+       "9vscbM1i62gP7YyB5+icaIi6IRgJBQed/kyuYwz4e6Vr8Ersp7fD11NYRNrq60g0aDFMfSely8lJ\n",
+       "FwwD2useAl+fXQfnxoIDuly/zio3mpDTBeeMQnR95QZTZJ18MMRreFdOqN7hOSbqL4GF+D11pSMi\n",
+       "ePF2+vu87bENgEe/JrbdFaR0rUxA75EFkl4+P+ovtYvfrkT8ur8kN1dw3hcsckwMduHwnQ/+XB29\n",
+       "PpGKz4F2d+vNYrH4PE4o6u27jngmu/+cuvadNFyLIoqgrFqtprPMottlFv2NFSldMwnS9YLkIMnr\n",
+       "NZ1O07l3npiRxXN1dTVlB8d9c3R0pNXVVT18+FDn5+cpaaV0BQYbjYbq9bqOjo4KFvuTJ0+0sbGh\n",
+       "/f39lG+KOvf7/SSzo6MjVavV5KKinwhO9vY1Gg0dHx+n3Uunp6eFvEeekZ30AsiEnFGNRkOLi4s6\n",
+       "OTnR5uZmeifzgZ2JjKl2u62jo6NkXVer1fTM/f19dTod7ezspN1HAJpS6Srj/GQySekTkDeygAHi\n",
+       "sN04xp3RpX9xP7G1HBat1WolNyRnEXqoAAA6MiTMGZiDfr+f+tDPLWRe+Fj3TRi8L+fSY5FzfcOC\n",
+       "jqEUd58xD6LrKRq/OSOTOZkzsHie3xeZKN9oMZ0WdxtG0J9ruzPcABpf8+LGKC/oPndVcp/rxOgO\n",
+       "Q0dQF1KyIA8HQQ5OMbwBRQ6IeP/i4mJqkwMR5rsf4Iz80PvIyF2Jrr+jjo8g2ccw7fVdlnyGfnOg\n",
+       "5f1OX/hvD0GJfeN9MKvcaPoDKZ+cK9KZjlgpzjRxbw64IGyu8XdxTWRXfEJQfDJEEBffGRWT9HK8\n",
+       "TGRWqAcd5laGA6bcJOUnunpoAxMkKk2UjLt2vF3ezthfOTbJnxFp2UgZu/sO8MQzPWmey4G2e2oE\n",
+       "nxA5+tstsRxwncU00g9eF1eE9JUzOa7sooJ21ysLIDuwAFXr6+t64403tL6+nurqOxMja8Nht6QT\n",
+       "8LiUSG/7mMKdwnURLJbLV0enNBoNbW1tpQSRz54908HBgdrttpaWljQcDhPImk6nGgwGarVaWllZ\n",
+       "SRmupavdZ3t7ezo8PNTe3l7haBVPM7C4uFjY8eOU/+XlpQaDQTqS5PT0VPfu3dP8/LyePHmi7e1t\n",
+       "jUajBCbm5q6OV2k0GmmnndeHfEcsHN7HAB2+Y6EiLQI5s0qlUooJq1arunfvns7OzhKIoQ/b7bYu\n",
+       "Ly9TXJK7t9F3MG++MJRK11nbkTHfjUYj1Wq1lJ9rd3c3AdDT01O9ePEipeAYj8eFQ6JhTJlTOWaF\n",
+       "dzmT5Qu4J22kPrTB3VWMMfQkfZljA3yR5XPACzu//D76iXnncpvl9YgsSGTAoqsoMkNeT2f/3ZCL\n",
+       "xm7Uy67TXDY59xzxaPzv33G9h3fwnR/6LhWPtHFQ5M/FMPA56O+gnbwvxlnSB143xpqzRN4XDvD8\n",
+       "N/L1dcRBDr9ZA/jOCQPGm6/B3t4IYn0cxnq+yq0n3XCMlAtHyi/S/O8LofQy2+RsQRzgkYWKdWBg\n",
+       "xIWYEpE59faO9Pf5AMm5seKE4V5AD+4G6eWtns5kAECov8fm0C6PDfEFwwdTjrqMiiNaVTw3IvUo\n",
+       "C6flkQvWq7s3PLeUTwwHq5EN4kw0+sLb4vWgbx2gM54iaMTH7+3zc7pcBg7+vF1cc3l5mRYiLDIH\n",
+       "MCxu6+vrWltb0927d9VqtQpxOTArvM+34RKYPD8/n7b3u9XmBoQrTrJrw2R4WgHaDuDb3d1NeZSI\n",
+       "wWExJiGqdJ1p++DgQLVaTffv308Le6PRULvd1u9+9zv95je/0d7ent566y1J10wHBg8uG+pNIkT6\n",
+       "he9gVQ4ODnT//n21Wi396le/SvInMJtUDp7G4ezsLKVmGI1Gunv3biFLfrl8fUwNzI10HYD8/7H3\n",
+       "Jr2RJdcZ9psDp5yYHIs1s6pbPbpbguV2t2XBhlcybMswDHjjP+Bfoj9hLQwv7YU2hleGBcmyBQuW\n",
+       "ujWrq4fqanYNJItDzskpM78FvyfyvYeX+gADH8oLBkCQzJv33ogTJ87wnhMnOELm+fPnunbtWppf\n",
+       "5rXX62XQquFwmHgV+nkZg9FolMoODIfDjHyglpTnW9EvQsHUGdve3pZ0nlvFPLJemF8M1tFolAw6\n",
+       "52EQqfn5eXU6ndRPakjxHa+6Lk2rsEej0NcNfEe9sLzGPNAfZFrMu8LA8qOY8iIH7nj493y9u0Mb\n",
+       "nW3+J18MY8EdZl9zUe5LU0PSc6/4ftRR3m/KYWD4eA4X33MHmXdjCLGuvOioy2r0jL/bHVZHcV0f\n",
+       "RPQvT3/QkLNuYLpsHo1GF4w8aZpbSl2yKBd8fhwsoXnZC3cQ3Dj3vjugEJ9Hvl2e3k79vvTKVbtq\n",
+       "V+2qXbWrdtWu2lX7re2FhfZiTBQvn8/ycm8us3zdMnUPK3oljjzkvdc9s+jhRE8DT8bvc+jb/6YP\n",
+       "HhbKS8b0xOIIDXtIzhv9AK3KO/QRjzYvHOf/e5/IwYgoHda6JyV6rk+E7/E2yGVxyNmRFeYh5k9F\n",
+       "7895hmt58+vzEceK5wV8DO0ZNyEmxu4oHp68lx2ABowbL8w9R57n26qZy3a7nRCMiHKenp6mzQBx\n",
+       "p8l4PNbz58/VaDTUbDbTLi/6zI9D7tI034Px+hhHo1FCB4H5QQnIX2LnXa/XS8jN4eGh2u12Qis+\n",
+       "/vhjbW5upv4sLy/rzTff1PHxsf77v/87FeV89dVXM4VGHVVrtVqpTADhS0827na7Go1Gaadhv99P\n",
+       "+U+lUimF+hqNRgZd4UzAcrmcDib2eSwWzw8JJgne5Qhz0Gq1tLy8nEEj2IHHcS7saINGVKSuVCpp\n",
+       "LsjfAl1yFODo6CjDl9IUkQINJFwI7aVzhJPiqCcnJ6lgKWOgsCqoMHRhbguFggaDgVZXV9OOTXjJ\n",
+       "Q321Wi2Vm2A9sCvw+Pj4QhK7y9cYwsvLbyI5n2seTnMZGWW1py04isM4kGFRXvBc+gPyTp8IZ0EL\n",
+       "R5cd4Xcki2f5mqbx/Lx0Ft6Zh9ZEZAu5m7eVn3yhvBMPGKOnD7jcjkg98iymvXjOKYiPo/eMg3fF\n",
+       "yuesOUeP+Bv9R64f7/PfzluuS/NymgqFQioZAdrL+JiPiEySo5m3EYL2Qiub+0RFZZgXxvN7vTk8\n",
+       "6kS9LByYd837gjL1PBoPsUVF6VClh/5iHz3c5de9pD2Lwhl/ZmYm1X+Ji9H74QvaG4ImCo4ovKLh\n",
+       "yt8xXMq9LK7LaBgNKRcAvqsGgXd8fJxykrwPeVuVoRswLcnePhfQLxrP0ZCHhtI09MGzyuVpzR/p\n",
+       "XLmxUH1buYeX83InPNRBKIZ8H86oQxF7Um0Mx7oQw3De3d3V8vKy5ubmUs6Sh0ViCLZQKKRjYhCc\n",
+       "GOCEBfr9fjKgyGfqdrva2dnRcDhUt9tVu91OFdEHg0FGIEtKdaSq1apu3Liht956S++++6663a4+\n",
+       "+OCDNBe/+7u/m+hNjRtoTV6UG48+9kajoXa7rW63q+vXrydDo9VqaTI5r8x+7do1NRqNFE4iVLSx\n",
+       "saF2u62zs7M0RsJwGCrk4EjTXXDkt1Wr1Qy/nZycaH19XZ1OJ9WEct6gDpjLDK8eDq9ieKEA4VUP\n",
+       "73h18Gi0HBwcpPAeBwjDQ7VaLSlMjDCMTww5Qm++K3MwGKTSD5PJRPv7+9ra2krhaY6iwSnyDSMo\n",
+       "KF93zivufPm698R/1pobC8hiD81JyhgnGL+eM+gyNG+9IpuibMOBzHNm+U7si38HI86dF5+bPBmH\n",
+       "/I+Gp5Q9DsodTO8jz3TZ5/LOaU5/XIchAwgzuoHiuoA+Rb3ndKNFh93H5M+hb8PhMHNMF9+LBhXP\n",
+       "i8ZfNMCgqYfMXaZ6eJMxuKGa116IIcXCcYZmwl3x5RlNlz0rzwDLQwz8f97B4nHvjwnweiVSdicB\n",
+       "fY3GGoT3Fo3GaCzEBFC+S2yYvvg1mD3G+Z0u0JUfZz6PCUevEPrljSV6UZ4/xf9R6UNfvuu5EG6w\n",
+       "8l0/c8nRPV/4Lizz6O3NFwbNhQ3KjIKYfM4p99K0TIHv4PFFSt9Go+mBsp4AC+JCfhHe/a1bt1St\n",
+       "VtP/PkZ/ni98+gfNdnd3tbi4mIRrr9dLAjFv+zD5McPhMON50zyHBcTm5OREvV5Pg8EgGfa8n2NQ\n",
+       "QCZmZ2fT+XW9Xk+7u7va3d3VO++8o69//etJAf/sZz/T+vq6ms1myvXy8/QajYb29/eT0QLK02g0\n",
+       "1Gg0MsebzMzMpOdKSjWbTk9Ptb6+nnK2QBzhH/f6QbLckIwHLC8uLqZEbwzpo6Mjrays6PPPP9f+\n",
+       "/r7G43EyzvDIQb9w0uClSqWiTqejk5OTlCMHT/nxJ2dnZ5mjXsi7QslCm263q0qloqWlJQ0GA43H\n",
+       "43RftVpVv99Psuv27dvp/na7rVu3bqUcqclkkkE52RVZKBS0sLCQ8uLo69nZWeZ4p+goueyK69Ud\n",
+       "zagXaKenp4nelL2gxXIK8H1EtZAD7uS5HGAMHnHgmTH6gHInh4bPHZHy8UQUzJ04v4f74jvdaHeU\n",
+       "nmuelI7ecBnPNf73HDeeyz2+s4659PpcHiXxvvF3RIgYX5TD5fK0Ph5zLCmdFdnr9XR0dJThq3K5\n",
+       "nDn4PebpOu1wJPk8LyGfZ/qZgP48jyxd1l54QU5XRLHD0RCKVn7ec3wSY2jPmxtWIAwIG0dA3GCg\n",
+       "uVJyJc67Y7jJ3+fM5EzMJMbdML5DAi/UPSAWKko2IjKXIXL+WTRE8CJ4v9M30t7H4EnoGIbc77tB\n",
+       "CoVCStiVsmfGcTCvJ3g7miVlC62x6NmdRPOQLf/TfGHxf9wCTL88JEphRTd0HBWAlnNzc6pWq7k7\n",
+       "gkCjVlZW0m44BBhKcTgcZubYlYDzcqfTyQjU58+fJ/QM2hBm8uZnUFH6gO9Qe0qahlxdmZCYPplM\n",
+       "tLW1lc5a29vbyyjVQmFaXHJtbU3r6+v6/PPP1ev19M477+i9995LY/joo4/0+uuva319PbPz7tat\n",
+       "W4nH2GVHTSsgen6q1WqmmjjoCYVHZ2dnk0G4vLyc6metra2lZ9AODg6S4u/1eokX6/V6GhPolBsN\n",
+       "jx8/VqfTSYg28gQeps6To06gJZPJ5EJtKuiIMejXUD6TySQV9GRdLC8vq1qtprAeuxelqYIaj8eJ\n",
+       "V6DL4uKiSqVSQis9SZnDqOFJisYiJ1izzj+OADhq4vLGx+jKj4bB4uuB3474ufJz58vXOPd54nKU\n",
+       "fW7E+qafKOejPCHsGGU7Y4qVy2PDOOE6eiAqf8bgqQk+bu+r6yEHCfh7OBxmdG4EMByRw/Fyg42G\n",
+       "ER3RLZ4BAkoo3YujuhM/Go0yjgmHbiMTfC4uAyy8FAwFhGNqBjrTZT3Og28K8qjG/0lEKioh6eKO\n",
+       "vTwDIFrs8XtReUYUKvbBITtnKPcY+a4r6Rg6dI8xD53y/x069cbkgWK4IYXgjQuL+1wRRzpGQ86f\n",
+       "63TLW1AufBwh4zs+bj5DaeWF9tglBs0Zhx9jUSwWNRgMEvrjyB8C67LcABeQ9Mtj9pE2eB+E86Sp\n",
+       "Uef5OI6UORSN8PP5K5VKqbhiXHiEJ/CSab1eLx0Ey04zlLCjt3hhLqyOj48TD5ydnSWlyMGxfM/7\n",
+       "ipD2XWhueJ+cnKSjZUBnaDMzM/rkk0/08ccfq91up/ltNptaWVlJBwR3u13t7u5KUipU+fLLL6tQ\n",
+       "KOi73/2uvva1r0mS3n77bf34xz/O5M8xBs/LefLkiVZWVpJBACJWqVTS4byLi4uZStuHh4epLldE\n",
+       "OqgGHnM2MIjn5uZSuNLzi1Bi9Xpdh4eHicbj8VhHR0eqVquJDhiE5AdijOzt7WVyvQhZYcDAN5VK\n",
+       "JYUoaV7eASQjOhGEZlutljY2NjQ3N5eKo8JHlUolGVv0BZStVColQ5F1we5Q1ujW1lYy4JgrR2Th\n",
+       "L+aC9XuZIQEPe9X734YoeJjPQ3fcVywW09qgf1L2yBKfU2kq0+C5mP6BDIGP3Mnk+b9Nf+UZPN4c\n",
+       "wY7ymXEyFpe/7sBznSgL44sABQ6f6ygv4hlRGZ7jMuiyCI8jWh7xkZQxbAgjM1cuF3mO53rmyWHu\n",
+       "h0as30JhehwN8oSadfQXR5LnnJ6eamFhQcViMcOHkRZ57YUhUlJ2e6kjNlI+9HvZ/x7LdCMEBssz\n",
+       "plxxwFS80xPOohGH8OVaREriePIQNg+5eYNB/HMWz2WIHEzkXkikny90f5eUzZ9y2lzGOL/Nq4qG\n",
+       "iqTkwWNYuEfkW7lRNggSlLd7K3nGCc+OxpmP0ekkTQtXYvR55XA8E2jgiA/3AS37O1C4hAJdUDnt\n",
+       "4MlWq5XGuLKyorW1NXW73XRkB7TEyKMf5JHRH/rEewhDgSwQKot1bwhpoeTceAZRYS5JKB6Px3r0\n",
+       "6JEePXqk1dVVvfPOO+mZGF+DwSAJS0J70vmxJT/60Y90+/Zt3bp1S//5n/8pSfr617+uW7duqdVq\n",
+       "aTQ6TxynL9TNarVaSfG7x8r5dZw/eHh4mMJpHJ3C2LwcATWYSqVpFXFQJzxnjFQ3vqrVqqrVqkql\n",
+       "kvb29iQpjRGkgxy6crmcQolSNjG20WgkgY6QL5fLKemeNeDH1BDm8/XN9v88VF6S7t69q1KppKdP\n",
+       "n6Zx0n8PbRLGrdfrajabqWJ8THyHN7rdrj766CNtb28n3vDk7rhJhfe5Yo/IOTSYn5/PGKfQlnn0\n",
+       "8wvpG0fe+FzxTi806++Dbi7PovyMCdXRAXaHBqM2bsN3Zw59EeV4RG/ynu/6CXkdoy8xlOp08Ocy\n",
+       "P25EQn+nNXSg8VnUGc7bbigxNz7XbpxLU+c30gT0i/C56yScAfrAdfrGuNA1rMNqtarhcJjOksT5\n",
+       "4X0e1o3AQp7u9XZV/uCqXbWrdtWu2lW7alftf9leaGgvxsOxTGNYioZlmBcuk3QBXuX6ZSiKow6e\n",
+       "yItHGa11aZpwnLc7w3dm0A+3onkm34khNA/rQReS/AjNxHi3NPXOYsIl93uOgickYrF7aMPv9/F7\n",
+       "WMjzFfx7oB6Mzz0sL3LJM+KuGI7RIL4tKW0/d8g5eoqEqEBhoI3nUPl8eg4KeVuOnPnWX0/mhWY+\n",
+       "B5E/Yq6df+4e29zcXPKUlpaWtLS0lLbF+8G+HrpkDhw98d03Hgahyvh4PE4Jm3yX890Ijzl0DX3J\n",
+       "83Ke+eEPf6jHjx9rc3NT9+/f13g8TqharVbTvXv3NB6Ptb29rQ8//DAViGw0Grp586YODw/161//\n",
+       "Wp1OR6+//rqk8519GxsbOjw8VLfbVbPZ1N27dyWdo1i9Xi+Ny+F2UEOOueEoHBALwqP9fj/xFfNP\n",
+       "Aje5dRzNAo3L5bIGg4G63a7m5uZ08+ZNSeeIzWg0SiECCpTS13K5nBK8fU15Mj+8xPomz4jQ7tHR\n",
+       "USZ3rlKppBBj3GzgoSA/mqNSqWh1dVXHx8c6PDzMhGZBNiaTSSoNQa4UoT7CIn5eoKMGBwcHevbs\n",
+       "WQZ54JxDD+nAUxQP5Xnxe47Qe0jQkVb+j6gL93jyM5EJ1qnv+IKXyOmKyc+g1NDfdYK3y1D5PJkY\n",
+       "9VhMGne9wXOZ7zw94zoNGefj9wOkY46rz2NMRaGhZxw5Qg64TPMTeIfZAAAgAElEQVQUiUKhkNn4\n",
+       "wXPJDeNdcTccz/Z0DX5Dk5mZmUyJEs+Roq8+DsYdU2FYn41GQwcHB5mdxsgEohuMA3qTunFZeyGG\n",
+       "FMrpMqOD/z38xvUYMosQZzQAYj6T3wdjeE6P95GJcKPHq0DHfvLMvFCfK3ZPTqTPl+UnMR5PRI2L\n",
+       "zePgzuRudPhhm/78crmcyTuBVozDv8dvTxr3PhMKYes4Y3aa+o8rG3YkUaPJc0HcQPSQKELZ87Hc\n",
+       "cKFPvguHZ7oBzmL1a7QoZKELMLXTF8FLnonPh4doKYWAUV6pVNJxHhhTHlpzge+hamBtpy3t6OhI\n",
+       "tVpNMzMz6cw4F3RUDC+Xy5mzuNhtt7KykujKmXGtVksvv/xyygH65JNPUj//+q//Wt/85jc1mUz0\n",
+       "7W9/Wz/72c/S+7a2trSxsaHr169rdnZWn376aXrf6uqqms2marVaypnAEF1YWEjG7vHxsRYXF1MY\n",
+       "ajAYqNPppHDY8+fPUyiAeTs5OVG1WtXCwoKePHmSqpC7oGXnWwwZd7tdNRoN3blzJ/EpBomH3uAp\n",
+       "cutI0vZdqXNzc+mYGkK3zl9UT2fsbiwQNiL07ZtAMIow0n2LOs5Ds9nMhDAItUwmE62urqaDpKEp\n",
+       "/YG/WWvwt6Rk9B4dHaXnDgaD5MwQBvPQPXK2UqmoXq9nTifAWYoymudgXGHgMYe+/d/5P4bnvOQB\n",
+       "OoS16u9zY47m8pK1F0M9Hg5i3vLe50Yj11y+eGM80RBivA4OxGcQBsbZyDNIXZZAN095iLlVPge8\n",
+       "z/O5XPZQwZ5rrudxeumnb0iIvzGS3NihLxFEcAfa6/ThFDLuYrGYSs8cHR2lXansRqY/MfE/LyTr\n",
+       "7YWVP5CmsWU+c2MoWuBuIPl33cJ2tOeyez0eDaFhnFj+wBcgzyQJ0S3heK6Rx8TdMCKZj8mPcWhf\n",
+       "tO4JOnLkAt8RIJ7jaBFjcis7Wu4umJyJIxoXc6rcKHNDB6HIFnNPOK1UKpn6Hb6gyI8gNyXW0eIZ\n",
+       "9IexkBzoBp40PYeLz/0QW77vgtvH54LGBS3KiyRs956ZGwS078LkXkfEXCnym+e6kVmpVDJCGCEq\n",
+       "TTdFYJi5cVgul7W9vZ05FobGdmKEjOcPNRoNXb9+PR0KPBwO9cUXX0ialkTo9/s6OjpSp9NJ1+7d\n",
+       "u6evfe1r+uyzz/Rf//VfevDgQSYBFPRnc3NTZ2dn6Yy6L774QpubmwkB4+gVaXq4cKfTUbVa1eHh\n",
+       "YernwsKCer1eMmYWFha0vLycFDv8B/38oGTnW+bX+ypJL730ktbX17W7u5sM3mLxPHEVXn769Gmi\n",
+       "69raWkIbG41GJgF8PB6r1WppZmZGzWYzs2YpU0GJh9FoenSS57dAQ+dv1kChUMjk0LCearWaBoOB\n",
+       "FhYWUu7c4eFhQhpJuPdSIyA4c3NzCZVi7unHRx99pF//+tfJGJfOdwqWy2W1Wq0L5T0iCu3rwvO/\n",
+       "kN+OxtMvvutyEdnA+o1OKHI9yjzkjxtqPB/+QH66QeDvdkXrxmNMRI9Ik6N4bpAgV6MT50rckRzf\n",
+       "wYlOcfni80g0g/FHnRbpTT5qdL64lmdw4pgwFjfc3RjyvkAXdKkbVjEq5QBFdCjcWIJezCEODs8g\n",
+       "lxEnAzS20+loMBio3W4nECCijpcZvdILMqQQbl4dGqXmFnQ0iPw3zY2qy8J3Pil5VqUbINLUyMqD\n",
+       "Q52JokdAX9x7YUF5COUyWNhDV+4J8ByMg7goHLXxcTBmxuf9QcAgkECSpIuwebT46SOM70JiPB6n\n",
+       "itNUUJay9bAYX0wM59BdR8mkqeERz4byMGM81JR+etjTFx8L2o0f5oDfEYJGgLK7LhpZ0FVSQpbc\n",
+       "S2auKpVKpj6V0xtjMHrJrJXRaJRJoHXUz2FsSaleEPzoyCOKhERdlB51jIDSt7a2MrA6zxiPx2q3\n",
+       "2/r4448lSf/wD/+g999/X9vb2zo8PLwAxX/yySfa3t7WX/3VX+nOnTsp7Lezs5MMZ3amgqy4MUsi\n",
+       "/r179yRNE0cRsLVaLe06o62traWk8MXFxTSPviOS/32HIyjZw4cPNRwOExK1vb2dDKnBYJCSsuE/\n",
+       "aludnJxobW3tgsFPmNWVQqVS0XA4zChGxo+xzvNdLrC5wQ0FD+d7EV+MJp7dbreTMePoAcU74Tl3\n",
+       "YpCjn3/+uZ48eaLZ2Vldv35dd+7ckTQ9KJn3sUNXUtrMwRp1p4KxoUzdWMI4gM99rKyZPAeTPvj9\n",
+       "0fHOQ3s8zCZdlKWuk1wnuEEQ54JrbuRFHYSh5P30Z0QjC3q5LnFjxdd1XuiL70cnAoTb6Ro3C3na\n",
+       "RtRjbvC50wK6Sd9iaR2fozz9nocGuc73uYevmV93LpGrDqLQz8XFRZ2dnWlvby85kI6AeXQpr70Q\n",
+       "QworGQUhZb3DSNQ8WNIVTbRw/buOcknTxQETQtTotdBQwu5d5oWrGENcsL7wfBE5IzIekB9nHGcY\n",
+       "qj5HONQ9qZjP44apI1NUTHakxoWU09f74yGFuPBR9qAEvssKo8qVZJxff5eHEjGGIqrGIuH7cUGB\n",
+       "HlLMLQpI6O4GJgsQNAuEjO9HWrkwdto42sd15/E4RvjBjQefY//tUHepVEo09R0yGKSE7bygHcq7\n",
+       "0WioUqmo2WxmtrljdJ+dnen58+cJIVpfX9f9+/f19OlTPXjwQF988UVCqbrdrr773e9mlCdCqtfr\n",
+       "aX19XX/yJ3+ibrer5eVl3b9/X9J5uHB7ezsTDnNjYX5+XoPBQKenp1pdXU3ywncyIRN6vV4y9Kgb\n",
+       "hWEKX0rnQhM0HBkEbywuLmp1dVUffvihtra2tLe3l4wxPHz3bL2AYK/X0/b2tubn53Xt2jXdvn1b\n",
+       "klKxVZDGWq2WUSz0BYONNcOBxI4MOArhvO/PPDs7SzV0vCSH8zwGOAcsQzPCUp6GQD/ho8XFRb31\n",
+       "1lsqFosp7Ht4eJhqXVFSwQ0RR4fc+I9r3sfqDnUeCkLzQ5kZY8xDik65p1e4PPdr0RH250MTb76m\n",
+       "aY42RV2U56y7Lotz7LqFZxP6d0cRmmBQudz3vkJj+kzJC0nJUXCZS6Qhponk6U9kH+MnxcKjHXnp\n",
+       "ON6vmEfnY/P38CNNkTGMwIjSQ2MPl/uzrl27poWFBbVarVRKBRpFw9rbCwvtIQTpHLA4CEqeZR5D\n",
+       "TnzGdy6D3pyJnInzlC/XWHwwsDOvCwKfKDxNlJ0vYO7xYmSeK+EG1mUT5go3b4x+rxsXLogiIsU9\n",
+       "k8kko6QQYpFGLtjwIFGAIFInJycJteFeknqHw2E6Sd5hW0e3HCEhbIWX7bzB3EAT7qFRP+gyHuCd\n",
+       "k8k0eRyli6HpwtJp6CFO5wueQf/8fkcb3SCiinS/38945v5OTwz1CuhOtyjs+RkOh6rX68kgWF9f\n",
+       "19LSkubm5rS8vKzV1dX0TN5Nrlqr1UrXqIL9+PFjHR8fq1arZbbv00/6Tu2ir33ta/rnf/5nXb9+\n",
+       "Xd/61rf0wx/+MOUr7e3taXt7W2tra6rVahnDnjPkMIYoh0A/q9Vq5oy9vb29JPxWVlbU7/e1vr6e\n",
+       "BCcGg8P/rAPmaW1tTVtbW/rkk0+0tbWlk5OTBP8fHx9rf38/vcP5u9lspurr7XY7U3/r7t27mpmZ\n",
+       "Ub1e19zcXKb6er/fTwavo42SEj0KhUIyqHi3r+3Z2fMzAb1ulTRF/t2Bm0wmqlQqiWcoc8A1UM9S\n",
+       "6bxWlssitq5vbGyo1+vp0aNHevjwoSSliu2lUikpYS8O62vC1wUKDKXNPHMfSiwiNr5+ovHiKLEr\n",
+       "Y5rL8+g054X3aK47ohGV1w8pq1Ni//yZ7tzRMHrduPNnIJ9cDvh7MaJ8vNzrBpwbGsgRZDL85jly\n",
+       "3i9ohV5wB8XfxbWYfM+8eppFpE0e3fjfIz00dBkbHBxVjYac1wpkLeGY8pvNJXnzm/py6ZWrdtWu\n",
+       "2lW7alftql21q/Zb2wtBpGIYRTr3EoHwpWyYzi3ePGuY7/O9COu6BexQpSMMjryAfLi34Ie6ekJk\n",
+       "DHvRp+gh+PvjAYz+DPrnniDjImnav4MlHT2bGELKg7h5LiiEJ56DAGHNx/FyL3A/38HjYOs176PE\n",
+       "AIm0bFmHbtwbkzNp9IW8J/rioTkPJ0lKOQI8O26rZl79fXg5EeHxuQHBil6mh1N9bqWpxzM3N5fQ\n",
+       "SMJpq6uraVwk3Tp/gyzxOeE0kCLfbekhI77f6/VUr9fTdn3QJGjFOXHQrNPpJN4i1wKeAtUajUYp\n",
+       "X8jnIo+XqdYuSd/4xjf0y1/+Ml0DceQ+RxUJNbAb1HOZOJvPER08UNqtW7cyyKgjJLybfuB9DgYD\n",
+       "PXjwQHt7e+n577//vqTzbf/j8TiFNwqFQipI+ejRI62srKQDmmdnZxOtoNft27fTGLwEByUH5ubm\n",
+       "Moc2exXpwWCQCZl4jhDoIfME2g/qRC6gNEVx4fFGo5HhH/hyfn7+QkHRfr+v58+f69GjRyn06QUN\n",
+       "QZ3Zru6IqyO5jojQN5DgvDwZR2Sct8bjcUJdPKzGu5ALLhNiuDFuTuJ95MS4fnG94iiP6zPChY7y\n",
+       "ECL1SEecQ5c7NO5hw4HrLniDNY8c87Expx7qBIl1FNuTzPPSHqTz0DU5qjEq5KkXrGNPGifKwPOj\n",
+       "XM3Tz9I0r5aiyR7S4zehYPrNOgDh96OaYi6xj4WixZ5o7ykdHvbMay+ssjlE9wUuKR1NEaE6Wh7T\n",
+       "+zOjoRCNMY/d+gKI7+NZTFaEuJnkmAjnCXdepZi+8jyH9z0/wPN+fFw8mzwG+uGhIs87wvCAKaBp\n",
+       "jEU77X0MMHDMEXADggXt4QgXFJyTJCmFH+iPjwNY340dF1okqca5QKD5dQ8lukHk4QVXpIzf+YT3\n",
+       "RLjZ89ycd7jP5zEaYhHCdv7D6OHvvIZwZ2cfY4SWsb4ZCobkXg6klc75qdVqpcOSS6VS2gZ869Yt\n",
+       "zc/Pa29vL13rdDpprJzfxgGyvrXYeX00GiVF/OMf/1i/+tWv9NWvflXvvPOO7t27p0ePHiUekabJ\n",
+       "np7PUSyeJ0k/f/48JTcj3I6OjlLdKencAKrVaommCwsLqcYU+W5u2LlQ9TDUb37zGx0cHKhUKunj\n",
+       "jz/WBx98kIysV155ReVyOdHl5OREL7/8sqRz+H9vb0+dTkfb29va/H9rbdHXR48e6fT0VNevX8+c\n",
+       "N4bh7IYBfMORMfCTH6jteV3UrfJEe2QT8+LhDPKgeJevXwwPwu+04XCojz76SA8ePNDu7m46UxDD\n",
+       "ldINVHf3texhcH77eoOPvNwD19yAiakL7iT6Mz30XigUMgc/j8djdTqdjKK/zMjEmXCa+t9OH2Rm\n",
+       "dKaQ2R7ii4aIO/yuh/xeNz793Dv64sayPwPZFx1Txh7HwD1OX0kpzNdoNC4Yux5mhA7uQPIdZBH9\n",
+       "9ntiaNNDdhhwOOzoi4WFhcRzHr7zvlHeRDrP/8TIQl94TnHcUAVtvL7fZe2FIVIQz2PVoEJ+sCbX\n",
+       "sHLzkCiEUF7CmjOpeyckf/oCdwHuk4G3KCnFXrGCowfucVs/qd29n3K5rHq9njEanNHjDgHGHhOP\n",
+       "/dnRO0JY461FoQADe66FX3N6oZSd3ixCf6YbC3hnMN/x8bG63e4F71SaJukTQ8+L2/vYnE5x518U\n",
+       "ir67xQWy13rxFvnMkzw5kDVu3aZdlr/mz/YaJ7Tj4+N0JhzfcXTT+cHfwaGjlA4ALZHODQm8Y/7G\n",
+       "WFpaWkrjQgixqwtEolqt6vj4WM1mMxk9JHNjsMzPz2dydpg/R6do3/nOd5Ln7GjkYDBISeHUfPKD\n",
+       "h9kZRzI18wLyMTMzo4ODg4zBDW1wJFqtViqTAQ0lpdyc5eXllFD/ySefqFQq6YMPPtDW1pZ+53d+\n",
+       "JxmprVZLtVpNrVYrGU0Yi3fv3tWtW7dUKpX05MkT/epXv0oG7+///u+nk+wPDg5Uq9USqsjcgrrF\n",
+       "/BKvSZbnXIGq4ZkzF752XUZ5DguKxJF4EE8SdWnPnj3T+++/r52dHXU6HVUqlQwC6geQ+1rmnf7b\n",
+       "/3Y0HIPAnUFvLjOglTs70aElP4j3MOeTyUTdblf9fv+Cs+vJ3t4cwYnr1xEndILLLzf2XI77zjZ+\n",
+       "oqHoxlV0zBijb/GnId/i55PJJIPEuoFGf6GpG0yg0/BpnpxDx4HqMA84C1HP+j3+Hh8jMr5cnu4c\n",
+       "9/NQ0XMur73vp6enSUaNx+OUG+rGMu8tlUqpbInLeZ+zy9oL3bXnDMdEoWQxpqRpnR3PuvcWEwlp\n",
+       "DpdicSI0CAPBOJcRih0o3IfgcYXoC9+VtBsEpdL57iqu+YGaLEL3XC5D3mIisu/gi54C48vbzgvt\n",
+       "YWIXWg5rw2ARwcLjdIMXeh0dHWXqKkE3qh+XSiW12+0kfKXpAbwxJIrR5mEBNyiZW0cQI72id+ke\n",
+       "eKR3Xo0veMoVU0yadQWIUnAlRf9coNBAE/CwIq25ZzQ6rzHkByyDUMVdRihqR2I9pLy8vJwEVL1e\n",
+       "z2y5J4zW7/d1+/Zt/eQnP5EkffbZZ3r77be1sbGhg4MD1ev1lGwOneEdRyRefvll/eu//qt+8Ytf\n",
+       "aG1tTWdnZ9rf309zgeCl7pivmcFgkNBFDFnoMjc3l6rAU16Ae09PT5NxiTfruxuhO8YZu89KpZLe\n",
+       "f/99/fznP9d7772nhYUF3bhxQ9J50vzMzIz+5V/+RR9//LGePn2q3/zmN5Kkhw8f6t1339X6+rru\n",
+       "3LmjV155Rf/zP/8jSfr5z3+uP/qjP0qCOu5Q9Lnz9e3hFb7POoXnMX4iz7MuCZU6MsAP6zvyqMtl\n",
+       "5vfBgwd6/vx5CqXWajUVCoXMRpN+v5/ORCwWi6l2FQY4KCbvYoyOFGOUSNOyCcw5JUcYI84qitWN\n",
+       "A75TLpfVaDQya351dVX1el39fj9zigLoBXTx8J3Txku50G8+Zx7c2XKjwneKeXg3hq0YXzQQ4QGc\n",
+       "JJ7tzpbLGZqjfN5iArsjatHoOTk5UbfbTQgNvMVvZJTfE/vhOgq9R+kPd5Sgqzu8DoK4rPPGHDnC\n",
+       "76Hr09NTNZvNJAuc3ugexu0OVwydxvbCdu05AiVllQLXvRAewsZDPvzO+0y6WGsCL1qahr48NMH3\n",
+       "Hd1BIbjXRtjL86akbKVXR0P4DMFXqVQy9ztyEE/kjjuySqVp7YsINTqS4SEm6BENO98h5iG7PEHi\n",
+       "hgVGm4cJ6Cuxe+8n9+EdcA0GR7HAyDHEF3Mr3HBzr8EXqUPTLI5ocLv36MYp70XZRGMLgcii5T6n\n",
+       "l3+fvyOs75464QcMhHgf4/bDWd2QZY0QhuNoFOZydnY2ha8kJQPliy++0K1btxKywhw2m00Nh0O9\n",
+       "9NJL6b6nT5/q/v37ajQaunHjRiasu7OzkxwM5pJnbmxsaHNzU5ubm2q32/rss89SHSlCZeQgDIfD\n",
+       "dEQK9OI3/XLazczMaGVlRd1uV8fHxwkFZrfe2dmZlpeXMwUrWRPkUOzu7qb+7O7u6ic/+Ylefvll\n",
+       "lctlHRwc6G//9m8lSffv39dnn32mTz/9VI8fP86EZD///HNJ0le+8hVtbm6qUCjoq1/9qiTpgw8+\n",
+       "0MOHD/X2229rfn5eS0tLF5xEZJ3PPeMDBfDDtTGMcfScLwj9ECZF/knnSJ2HbvydHimAr0Hqut2u\n",
+       "ZmdnVa1WE6rIu7gX2Ug4zeuWeb4WcyBNHUBkgpdeoY84HnHdsw5xin1nYAz9uQFKIVHCieyuJJRK\n",
+       "OYFYVsEjGB5tYP3RJ1fCjizjgPnuaF/bLqOcBp5H5n2JqJLrL4/CuA5ANzny7yFYL9/ihvh4PE5A\n",
+       "APPr749OkOtQ5j2GHuN8Rn3J54wlpn3grLtByfy4AQzPtVqtBJwUi8WEqtKQlS7XvX/RCPX2Qgwp\n",
+       "GI7fkpJlSjjKjQK207pXGhebM08MFzqC4JPpC8Ohcb4vXTw13EMYQH4OHzsjucVM3+i75zu4V8Di\n",
+       "d0YE8kaZ+PcjeuNWNJ4aRmgst+BGhi9MDJIYuqM/0Jjxe4iO552enqper18IQRwfHydUB0OQd7li\n",
+       "9sXtc+lz76HSaDT6onQjUFJKXHTj1BUUhlwU3tAGOju9oXOEgZ3fvN8+Ni+sGIXGZDJJStKNVGiK\n",
+       "AUJoDCRqOBxqMBgkVMYRuWKxqKOjo1Tn65e//KWePXsm6bzEAfkxc3NzajQa+ou/+AtJ0re//W1t\n",
+       "bW3pS1/6khqNhmq1mjY3NxNNd3Z2dHZ2pnq9rnq9nkoc3L9/X/fu3VOn09HTp0/V6/US3L60tKRr\n",
+       "164lujv6OxqNVKvVEm+4MmEeQDmWl5cznigKazgcanFx8QKSDTpULBa1tbWV5uxHP/qRlpaWdP/+\n",
+       "fe3u7urTTz/Vt771rTSODz/8UL/4xS/07NkzFYvTQpeDwUA///nP1Wg0VCyeh+ReeuklSdK9e/f0\n",
+       "61//Wq+88ko6XsdzOlhPzH9EQz3M7kfkgA6VSqULqRDlcjnV7skLpzgdWIcoUt7p9zWbTRUKBT19\n",
+       "+lTb29uJ/xk/RgnozmQySX+jzHy9xPXhoRiUImhVXiK6RxSQAdDUNwNEFNuPTWFzB/dVq1X1+/2k\n",
+       "b7woI/IcerthjhFBGDY6Zr6BypU0PI+jAxrMMx0Zd32CIYAcI5KBzAAphF5Rz3K/yz/nRQ8pusxE\n",
+       "X7KO4pEthP7iGvZ5j/wd5SjONXLO+d83Q9Fvfpw28EOkG7UM2+126qPraNflUa/6JpC8dlX+4Kpd\n",
+       "tat21a7aVbtqV+1/2V4IIuWQs1d/9nCbw9g0UI3o9Tv8L13MJ+KZviPIUSrPF5KyOwccAaHF/BgP\n",
+       "J+FF4Gl4c+TGUSU8Ct+Z5mPhWt72T/di3RJ3Sz8vEZ93eOl/9xa57igJNPWwnoc3vG+S0sGZ0NIt\n",
+       "+5jkyfwyx44A4vlERAqP0hNy3fNx+sR58FCuezRxnvLi+/CLe3ru/TmdHBVzT5/vcM0RRK/Czvc8\n",
+       "1OD8xvyCzDAOjioBWYq5JtJ5AvH6+romk0kKb2xtbaXdcXjvX/7ylyWdH0z8T//0Tzo5OdFrr72m\n",
+       "V199NSFZjUZD165dS5WvFxYWtL6+Luk8Efv4+FhPnjzRs2fP0lEi0jlac+/evcwuGkcyCEWBfsQd\n",
+       "i51OJ61tEuThPRLX+/2+6vV6Ji8JBGN7e1snJycp1+lXv/qV/viP/1gzM+fHpxwcHOj73/9+4gvy\n",
+       "M8gTefz4saTz5P5ms6nHjx/r9u3bmp2dTXlXN27cUKvV0uHhoTY3N3V4eJiQHPg77mBlPfDseGYi\n",
+       "oT5HK10mgu6BbHhI38PxyIDI3/AqIaz5+Xk9evQo5eiVy2UtLy8nmvouPg//QG8PpUfe95BgRMsc\n",
+       "aSKEJGWPlfFoBvcREuJvD4c7CgiaBL3n5+cTUutFVRkLGxg8dQGEKk8OcT3mPfpcez6lh/24j+9E\n",
+       "BAnUDRkIj4COsf49GsH3Pf0ElJOwPvrA0z3Qoeg3T1iP8+zz63nEHvWIcxzTQTwqQ19i2BCkymU8\n",
+       "vO5IHrLUUcR+v69isZhJaXA6Oy8yv//nyh9gMBUK091w3nFCETHmy9EjfkafQ5F5hhSMErfHe95N\n",
+       "vIahRp9cCXk4iZBZzB/yyfSdK/Gdnn9EvN+ZTsqe+8e27XgfeRRugNDvaHjm5R9Eg5V4MCFMh4H5\n",
+       "HAZ1hR93wnlyM+fLkQOWR1P+R6B5X3x+aZ7LFYVVNDDdaIcmUbj7vd588btAQHAyBr93NBplFq40\n",
+       "FaCRb8jJKBTOD9eNMXrmAJ7zZGlyF2IomvpP8I7vpIH/hsOhDg8Pde/evZQLMxwO1W63tby8nLa0\n",
+       "M4b33ntPZ2dn+vd//3d99NFHeuWVVzIlFZaWlpLBs7Kykvr57Nkz7e/v6/Hjx3ry5In6/b7eeuut\n",
+       "9MylpaUUhpKm8L6HMqPxSSkNkvNRHnwPZUHYC2HoczUajbS7u6ter6f/+I//SOMYDAZaXFxMPMp9\n",
+       "MVS2v7+vd999V5L093//9+p2u/rLv/zLlEtG3lO/31ej0dDDhw/10ksvZUoLHBwcpBCtK3v4pVQq\n",
+       "pVIHHtZtNBrpvEFCQjEHxY/icp5GWZFAy/ji8U3+vrm5OdXr9ZTPhiJyxYTRxkYhngv9Wa9x3dDH\n",
+       "qIRZYzzHQ0bu0Hi4SlKmz9CQZ2I4eWI79CZv1WVv3LnlxqafBoCBhbzyNAnvL84BdOEaITMvbRNz\n",
+       "lLgG/VyfeIoFaSOlUinVLHPni+YOKc8tFqfnevq6Y7zu8PC+4+PjdPg2a5Fxs1EEg8flmstn3uU6\n",
+       "3zf+uHMJ38VcP7/mNPV15Ruwjo+P01xwYHiU/d7XqCe9vRBDiol2hMgXE9Yh3hCM6YrbGc7jtj4Z\n",
+       "fO5x2ohIEWuPgojFhzESvQGu8bc0tb6Pjo4y3oSkTN/jgpemQjNa1M4o7G5wA4Ux+f3+uQs70Atv\n",
+       "LhgQGp6T4P3nN4gagjYib57jwPtWVlYyuzUdiYzootMIpU8phWjUFAqFzELNi2NftghiYdTYZ3jO\n",
+       "G/wAvZwH3PvyhF/64M/wnTrulcUcQBfMeKmeiAmCM5lMMsUsOR4nb9wuXCaTiXZ2dlIS98HBgQ4P\n",
+       "D9MhwZVKRTdv3kz9/tM//VPdunVL3/ve9/Sb3/wm9XN+fl6VSiUVnPzss8+SkHry5In29/d1cnKi\n",
+       "27dv691339VXvvKV1B/QlFarpeXl5QsKHTnAUTfSeY7QYDBIGzcoxeA7Qbvdrmq1WkrWdidqNBrp\n",
+       "4OBAJycn+sUvfpGOOnn99ddVqVTU7/cTyuJzgUGKwUUe1BtvvKHJZKIvf/nLevz4se7cuZM8fZQi\n",
+       "R8fcuHEjGYutVivxDgVOuc8RtF6vp2azmRK4WXfkwjgCTm0dFKLzP7vckE2+BqIB4M6ldL7Tk/wh\n",
+       "Cp0yHxsbG4mnQUYZB0izI0H+Hl/HGCTwKfweZaYbQO4w8kwpm4zs73MdQk4P/IbxC784wuyRCuaF\n",
+       "5lvyvbmBQ1QBWUCRSt8BTd+9vlF0yAqFaeFadCYOlXRuFKD7Ym4PRp/nULmMIwE75g9hJPouUd6H\n",
+       "7Op0OsmA5l6KDzO/oEvMJfLS88Gk6Q5NRxldB7sjGuntxtSHDXsAACAASURBVJk7EY4oxQ1bbhi7\n",
+       "0cr4fpuRJb1ARMo9CulixVk3INjaPBwOMwwmZRWYL0qu8TlM7tAh/18W2suz3uME+sJ0ryomoktT\n",
+       "gcq7HT1yS9qZGOHD7+i18ONGGPR0dMsNQ677ova+02Agfyd9wphwpnODiPGw2EjC9Z0qjkB6crfX\n",
+       "BXEvJm6P9fsvS6CELg7xEk6IgpdrLijj9mApm+gaF58bsD7H7oF7P/2dIK7+LgTfeDwtSMccelFX\n",
+       "N4Jpk8kkCaRCYbpV3UNGo9EoY4DcuXNHn376qdrttk5OTrS/v5/CG3fu3ElFJf/8z/9cT58+1dbW\n",
+       "lqRzo6XX66nf76dyDI1GQ5L05ptvSjpP1n7ttddSbSfpHMlYXV1Nhsv+/n5mmzNnyEFXFF0eAhnX\n",
+       "BmfNYVR6An+/39fBwYE6nY4ePXp0wRFjzbgh57skUQhUPaf90R/9kf7xH/9RxWIx0W0ymSQkR1LG\n",
+       "ADk5OdHe3l7iQUdquR8E6+xsWguMHYeOarrCdgXrCsnXILX0PHTmzkC73dbTp08Tz/iGB0cnpOkh\n",
+       "0SBkvqYIwzm6HCvi8/fMzEzmnE2cEeSzjxHHMk9ex52wXhEe2cU73bByB8/TN5hz5Gh0SB0dc6PH\n",
+       "E8BpjnK54xVlKWOIpWSYI+iKYQzdovHhOpF5B0XyNcOuV+q3uQHukSP6HY0Nkud9HN6Hy5LF+cwN\n",
+       "d482EMb2ufVdgG6sOhLlfeNadBRopJqA8rtDze//c4gUSiYS0hefMz+xZI/7xu2sLFAnqiv7aGU7\n",
+       "LMr3WNzsLMObyVPmjqA5oR25cc/fDUWHLaUs7Ivw9jFwD4vAFzdbi6OX5EYZ/fawIH3CSHMm9v7g\n",
+       "1frzPVxALgn3OXM3m810NAjC1xeI9xXYlXH6YnMB5kYIf/v4ozFHv3wREd93oRg9PubQaYYScuM0\n",
+       "hkn4wYhyzxEh7krTeQPkxPMBWAduYDqfQlfmw9cQ9IlGB8LTFQgGymQyScgH92P0/PKXv9TGxoaW\n",
+       "l5dTHtTdu3fT+Hu9no6Pj1PoAeFHXaHxeJzqPnldJL5br9e1uLiYMfjw4ieTiWq1WmbHkPPJaDTS\n",
+       "wsJCBlGoVCrpfz865vj4WP1+X8fHx+noFeaCMTSbTa2traVipDwTfigWz0OYBwcHkqTvf//7WllZ\n",
+       "0RdffKHV1dULKAo7KJFhoDWSUj2jer2uyWSS6i+BwJXL5zui9vf3M2jG+vp6urawsJBZe6QBsKsL\n",
+       "HoanWUuOdji/cw0Dm12RXvdrOBymOlPD4TCFSqJDRugZ5cy7oA18jbPgzi4/8LAr2slkklkbvr5x\n",
+       "HAhN815Hn0ajkSqVSlpPjs5hqHitp4hO0zzHh3v9OciEqLxdt0QDzA03xu10Jf2hXC6nuXc0n5Aw\n",
+       "/EZzhNyPQGGMR0dHmbUfHVIvT8Fv5oS5cLDBU0EwpqIsykMZoYcb3YzPneMo23m284kbdYwLmvJu\n",
+       "6O3z4EZtDCFe4IFLr/z/2BB6UlbxMwFucUtTaxGh64odr8qPJXBF5YiOv5fPHcXy0J6jHHzf+8tk\n",
+       "upXqaAMLzwUHiIUngvIsxl0qZWuUuBeHAeoGT7lcTudaRYQvGlPOcBhJQOmEOaXp6dmSMh4a4/Dw\n",
+       "q38H+o9Go5R87BWcCTe4MSZlK7tTediRHN7nc0Y/+U1/XNjkhXt5Jt+PixR6uWHngsLDCf5c984Z\n",
+       "kyspV3L8RslzJIlvn3ZepD+VSiUjFOfn5zN5BG7U48kiiNyoo7I4Qmxubi6hHw8ePNDrr7+e0KTl\n",
+       "5eWkLLe2tvT48WO1222tr69ncrNqtZpu376dnAnQCeaB5F3oSCixWCympN5qtaqDg4PE/zdv3kzX\n",
+       "YvXm/f19zc7OamlpKVVuh57MB+9nbhgHhhly5fr166k/ID7UrFpZWUkJ5V5uAJSJd/zZn/2Z5ufn\n",
+       "tbm5qddffz3jOC0tLenDDz9Mxmez2UzXZ2dntbGxkYx0H4PnQYE4gWRhmPua97p7jhxEBCbyyGUe\n",
+       "9/z8fOKDVquls7OzlPPS7/d1dnaW5pRilh6miUoxbrDhnRhtLtelafkDlKajV55aQCK0ywMMtOhc\n",
+       "R4TaDQP4pFAopDyvuA5dznj5FkdUXC64jKO5XGJu8kqycJ2+er0vngky6AYDxpmffZiH5HtaCDRl\n",
+       "Tuv1esYodjmSF0WCj+AP3zwRUXJaBBOcT+EXN7ScNvBxdISjHcFapbmj4O/jOfQJPe28mJc2kmh6\n",
+       "6ZWrdtWu2lW7alftql21q/Zb2wtBpNyLd4vXEQXgNGkK/wJnel4SrVAoJOjYw2uO4HhYiP95t4eH\n",
+       "gMTzPDXPrfFkRN7n/XcvgebhnejteB6No1LQBuSId/h2V+D2mMgHbfCmHHnxcKKPw61vwlTe/6Oj\n",
+       "o3S/h1l91wvHMNDHuG3Z4XYPzzE+35Lt0GqEjfGU3LuAhngdjN+PifD55FnQzcNh7s2BinmxPM/P\n",
+       "gC/wjguF6Y4ovChyCDz05Vv62YXm+VCMHTrAU3iOc3NzKcE3zj88QEjNx8q1+fn5FPqan5/X7u6u\n",
+       "bt++rZmZGbVarTTGlZUVraysaG9vL+UreliXM+SYM55ZLpfTLruYe3F0dJSQlGfPnqnRaKTk9u3t\n",
+       "bZ2dnalaraaChYSaqLxO+KJarWowGGR2RFE2oFarpZQASemok2LxPIcPpEg6r9B+eHio69eva25u\n",
+       "Trdv304oFaEsX3fuJd++fVs3b95MHi9zcXR0pPn5ed27d0/j8TjtCKQtLS2pVDo/ONqPEDk7O1Ov\n",
+       "11OpVFKz2cygOu5Be94SDbTcEU34AATHQ/SMwcPo5NCwThiTo1p+BA3PJeXAk8ZBl0B5PORNdfKI\n",
+       "EoCaOgpCA1FnPbLBh77Ck65H+Ix3kqzveWYRRfbjRRxB4TpzwTuRYS57I2ri9HYky/OgPAISc7I8\n",
+       "1IesdmSJ++hLDH3RJ89f4hp5WRy7xPh9HplLGvoZ/vTSNi4foJXrFubIdSp8RfFpaO85rdAgpi64\n",
+       "TvFxQxMPBTpPOQ+A9EJz+vt/rvwBE+KJfvGaNwSBJw56UqkzkCsan5woNFxBwhi+APlcysKRMSE9\n",
+       "LnCYwnOBeCafMR6vTcJCiBPMQuK6LypPjmRBusESBZkrVsbvRpw/18OeHj6g5o3npHneGefFoZyc\n",
+       "pv7jdIZOMfbNd+ATD8MyL4RQPabOPPBdFlpefgbN4+EeSvV++iJEEUUB52FLF4wuXD3kJE2VEbzr\n",
+       "ix+F5MLGt00TXoDWMVmdkADhb+hGWPD4+FitViuFYFGEz54908bGRjoYWVI6sLhWq6W54kiaSqWS\n",
+       "hO/KykoKu0nniehPnjxJQoqwGOPDIJqfn9err76awoyE7xYXFy+EIfjbDyeem5vLGFLwJvzhByWf\n",
+       "np4mo+wrX/lKSqr+zne+o1arpW63q6Ojo1TlnL4+evRI/X4/5QthgL3xxhtaXFxUpVLR+vq6Zmdn\n",
+       "df36dUnSp59+qo2NDX3pS19KBz5D04WFBbXbbR0fH6vRaGg4HCaaHh8fpwOm2Q3nBj+hynK5nMm5\n",
+       "cgXP77hLNtZr43PGxTwho1jTGHvNZlPz8/NprjCaJ5PznaeEqpgL6B3DzOTM4AS4HB4MBikP8DKd\n",
+       "wNx6GgX9Zjen0wKZyfrwJGaX3WwO4JnkmmG0ut7x/zE08px3fjM+FDbhMN+xiLyOubw01zGkS8DP\n",
+       "hOiga5xnjE8fL59jYLLBwx1ND6tiODNGjFNo6/XO3HiL4TF3FF1+uy6DFk5HPotpGy6r81JPPEXE\n",
+       "+cLpEw1T13mXtRdiSEnTE+89iRtC+g+NwWNoOMFhJI+3SvlZ9s6M/M+EONPExEGP0zrRPTcGYsMY\n",
+       "Mblbyu7mYwwoO09487G7co7GAnHpuPARSDCg7xiENhhIeUmSzvw8j8Y4o7c4mUzSoaW1Wi1T9yUa\n",
+       "sR5Hj0aWG9gYj84fbsyiMKNQiDHtyE9unHlzRYWAjgY1dIlJvDFvzlFB5x9H2XjeeDxOxgAFJn0e\n",
+       "HaUgb8XzFuhTXPyMIa4l6Ibhi9Lb399XtVpNO+bq9Xo6zoX6QeTr1Go1vfzyy5KmhlSj0UhePsKU\n",
+       "HX3wC8YNdGMHGqUE9vb20jPZIo6hj5HBuieZmBwdRzLxrD2Phnv5DgbA3/zN3yT6/tu//ZsWFha0\n",
+       "ubmpxcXF1C9ytXZ2dtTr9TIHGq+srGh5eVnNZjPVW6I213g81nvvvadicVpry8cPL0VnqFarpXUC\n",
+       "audGRql0fhB6vV5PO7ecTx1Big1j2vOHHEXnbD2nmaMcOIKuvCmNgIx2WUM+EgaTI9WOivsczs/P\n",
+       "azAYqNvtXuBvrh8dHaU8HHcEPRcGY0OanlHIs8ijYox+LmC5XL5gnFA6wOWwO3OsZX8mfWc9xh2L\n",
+       "jN2RWuYlOoHch1zMyyFinlgzLnujg+rXyJMdjUbJmHZn3J1830HNnDJmR0ahhecMR+fSc9NcxjrI\n",
+       "4u92PRk3AXj+a3T0Y3PdBZ3yDDmci8s2G0gvsLI5gtYFH2f1sBAcxpamXmhUkggbBCS/+dyNkDyh\n",
+       "4te5H2MNRvY+RIal0T9HnbwvhHMQntEgQsFFVCImC8YEcASJhxqkbH0VBI2PH0ECPf0au/W8j4zR\n",
+       "oXH6Lk1DU6AjHq7MQ2YcYfECilL2/Cfo6kYTDQGVVyk9Gg/xkGdHoeKigV5Od+YCIeZhRgSJI2De\n",
+       "3Dsaj8eZooTQi23e7hTwLt9Z5IiUG+Vs9+c9vAtl44Kf7eMYaTxzcXExVfvd39/XaDTS7du3JSmd\n",
+       "UQUidXx8nIws+kPYLKLGJycn6nQ66dkYGZQCoCDe06dP030U3IQvBoNBJnzK2AqFaXKwIxCgB0dH\n",
+       "R3r+/HlS3uPxOFOC4Pnz5+lcwL/7u7/T7du39YMf/EDb29uZxPg33nhDd+7c0dOnT9O7QN0ajYbW\n",
+       "19c1MzOjnZ0dff7554nfvv71ryd6Ly4uJgeA+WQTAQfm0nzchK9cRp2eniZ0rFQqJVSKEIsrSd9I\n",
+       "4oaGKxOuz87Oql6vZzaUgJYSembuuI7cOjs7S3PjssNLMlA+hDF6+gUIDc/k7263m4yxPP7m/dK5\n",
+       "Ae6IQ55D7hsOeLeHHklRcNnG+3mnPzOG4P0a1z0iQd/c2fZdgr5bMG5k8bnCqCgUCqlPrBl38vKi\n",
+       "Kqx9rrm+YLefyxPmEn5yGkNXdx7j+9xR9ve5QelyE4c9Jrl7n93h9eYy2sfHHEQHw/WTy3He4YZ6\n",
+       "Xnth5Q9KpdKFwpIwqsOWUjaj38M/fq80NVbcqne40eE+t4S5FhET3h3hQ+6PiwbDyo0ynunvjXCi\n",
+       "ozaSLjBFtJz9+xiKjhjFZ7iF7kyOwo4GmAsymhu1eILOzNDNi606OkPf/VkojV6vl4xrN/x4tiMo\n",
+       "l4VLyf1wIeWGtKOYCB6Emgt9F4bRiHbh78oKWjpyxTuhJYKW/sZ3ouDwsBBcXqSP+fOwiHukEf73\n",
+       "uffG3LA2+v1+ConBS+RAUWtJOq8jRWHNRqOhcrmc8ocI9ezt7SVPlrDXwsJCKpzJYcrQqt/va2lp\n",
+       "Sc+fP0/0dJ7Z39/X0tKSOp1OhsaEik5OTjJGkh8vUygU1O/3U0gKeUPeFU5NpVLJKJdvfvObun//\n",
+       "vt5//33t7e1pd3dXknTv3j3duHEjoRR+bMT8/Hw66HZ3d1czMzP6gz/4A0nS9evXtbi4mKrAu9OC\n",
+       "gURdH4x45oK8OvJ3XCZSvsEROx87Ctx5v1QqJcMdJRTlkZfX8C3yGBnQyx0QLyQbUysmk0lCVmN4\n",
+       "y5HXGNZ3RHs0Oq935sqUsaI7nDfQJdHBos+uZGO6Bu/Mmwuuz8xky74gbxivz5O/y3kYeeeos6M5\n",
+       "tLyQkst3csloGCCEDiMigyGNXvS+QlOv8C5NjQl3uhwBdSfd+RvUk888jxdaxCiO08Zln/OT69po\n",
+       "LDtgEiMKrI0YhXEQhPF6VMSN3Lz2wgpygiy414oCc+UhTZWpW/ZMIorVESC3JF3ASNm8p2h5RqOI\n",
+       "97qC8smMC889BEfP/Nko4RhmcwvaDRunB8LGvUAWe0TjECJuSFwWxosWPd6nx7lpbt1DD0dbPKbv\n",
+       "Rkg0grxCu9dEifkA9N29pwj5+uLLQxzpP9ecvyKky9iYHxcKPl9ReGKcYUzG8RMWkJQUJ7zf6/VS\n",
+       "iIZnOJ/hYYNIwPt40+5lu2d/fHycqg2DdjFPGDQgOghw7kFZNpvNJECePHmijY0NDQYDbW1tZeYe\n",
+       "ZOr09FTLy8tqtVpJCbNVHgHvypYq4o5GraysSFKqk4TiOz4+TqG44XCoYvH8rCzQHPcyEfjwEyE5\n",
+       "51PmOVaEPz4+1o0bN7S2tqbd3V09ePBA0nn5h36/r7W1Nc3OzqrT6aRxzMzMaGFhQd1uV+vr63r1\n",
+       "1VcTQrS8vKw333wzoQbdbjfRhm3mEXFxvkXuueE+Go20tramcrmckEEPX3nivz8LXvRTBmigOhhv\n",
+       "vAc+9FAT80l/UDY4Ai6joxL2xGHWPHlefo338dzT09NMiNPDVB7BcF6DLzzfhtAzazI6rhhzvr6L\n",
+       "xWlNvUKhkElKd3QK+e3IymVRjEKhkJDbvBCs6yg3YFyGgzQjx5hHN3SjQeKGtiPHUVf6/9Bzfn4+\n",
+       "JYI7vSLw4PIbY9aRJx8jY3FgwkOzMUeU5t/3CIbznzsR3i9/xmV/+xiY88vaVfmDq3bVrtpVu2pX\n",
+       "7apdtf9le2GhPazkeHI0Vr+jFzHG7SEch57xXGOozGPQvsst7uCIUDSWvlv7niiN9+ZWdR7E7u9z\n",
+       "BMWh0Jij5Pd5oqaHIfC4SU52KHoymaRdLx5j9nc7XT3M6t5iTOaLfXXP06HqiCwxnyAgoBT+zLzQ\n",
+       "lHvi5DA4H7nnEPuCV4ZH4fPrf8fkZ7xkUCafE88B8Gsk+0JTP9yWcXg+w2QySWEoShMcHh5qZmZG\n",
+       "1Wo15RLhMTqSyU6piJbG8CJrCVSHa6wZeP/o6CiF70BboLdXfiYE+PbbbyeUhjyjVqulZrOZPH6Q\n",
+       "HfpZKBRSgja79OgDzwdl4L7RaKRarZaKY5IL5bzJTkHCG3HNg9SMx+PM2Z3j8TjlVfFu6Txct7y8\n",
+       "rGKxqO3tbd26dUubm5uSzsszfPbZZ+r3+ynna2NjQ5LSurxz546Wlpa0tLSkO3fuSJLu3r2bvGd2\n",
+       "usFvlJIYDAbJ4wV1mZubS+VeGJMj1YTT/ABneNFzq2LVc3gFnnQaELJCzvJM37zg/YBu8AnoU8wP\n",
+       "jSg7c0zpDT+CxMN9nr5QKBRSmBVEknHFXXSRTq4TWBPIQEddPNRfLpcTv3nKiSMl8CIHSzNOR+49\n",
+       "jyeGoNBbCwsLaecqYwA543sxBIkcgUYeMuO6pynwXD73VBau0U8/cJpxcJ0ctzwE0HWAz72P2xFH\n",
+       "vhPDlz4unutzzTgdtWPMLs89n9jXEbrb0T34Im7M4t68aAfthe3ak7L5Qh7aizHPmBDnTIVyizkB\n",
+       "0lRB+24Qh+c89IdB4u+MuVL+PhohFZ7n59e5AeaGBWGFmNvi4UoPGWAEIDDdyCJMgZCOyd2zs7Mp\n",
+       "jyLmn9Avvu8LA0aGSV0A+/z5gmIO3AB25vUdZg5VM59Oa38mi5jdYv58X7D+2+nOHHkOhQsbTwBl\n",
+       "3t1gdoHDnBIacQMMIes86bSCtz0USp9RhMDYKBoXTswDhhQ8i5FFjg7PBIanVhPPJEcIh4TDhqXp\n",
+       "UR/VajUpY3ik2Wzq8ePH6vV6un79ura2tjIGEXk8sXYYIeilpaWMESGdVxJ3nlxcXMzMKWHHaDSy\n",
+       "65e5hAZeOoDf9Xo9HXHCZ4Q+l5eXM7zI92/evKmzszPt7++n+1566SVtbm7q4OAgbfGHT/v9vmZm\n",
+       "ZlSv11Wr1VLld655MrRXjPZSAyRvsyszHtp+cnKSSh6g6DEgOfbG+cplDrxIrlretnaMfHa2ebKx\n",
+       "O7cuF3xrOUnmbpAx/mKxmHbzEj72eWKN+LrBKKPf165dS7TZ39/X/v5+UoieK0gf4RHfXetJ+O54\n",
+       "8H5P8vZ58jBYdPDJOxuNpqUIvKo/zcfl72T9u87zkJ479JJS7TRSFKKBwbvQbZ7LRp/43FNFnDZx\n",
+       "fpHlOKHuJMZDlX2M0RmPG5Ccv/IMFX+fy1N/j+sc9Bx0dX5i7vicMKyk5LDgQLizDV/G0KK3F2pI\n",
+       "uVESE/fi99wTcIJjQV6WPBm3lXKNnBGIHHNvILQLDe8PfXEmdSvWDQGuMcEYQM587j24cYYQQLl2\n",
+       "Op2EAsQjEHwnGIoUb8HzaWg+Nvci4uKPiA33+Xj8+/7bcy1Y+NzvBihjcMPM54u5dM/W+YfF5tei\n",
+       "9+xIHuN3LwSaci+L3sfA1m0UhSNkjlZFh8ARMObMx+bz4kUZOZ3ehYkn6iIASOB2Id9ut9Xtdi+c\n",
+       "t+aJoAsLC1paWkr39Xq9dJwLXrgfsFsqlfT06VPNzs7qlVde0ccff5yZ52vXrmlubi4lpdO84KIr\n",
+       "PeiLJ+65Lxh4vmPNjVA8Z3jYUU7qD/G+g4ODC7l0IFWNRiMZIdK54m80Gtrc3NTy8nI6mBmHpFar\n",
+       "aW1tLd0vSbdv3047VkEXXJkOBgN1Oh212+3ER1wjGd/5h7nAAUCper0vDghm3WDAzs7OqlqtJofE\n",
+       "eRTjGuXmCtHRdwxsPxhbyh4/43XicFRBOKNc8N1x7igNBgO1Wi2tra0lxNydIMbKO/mMQqrPnj1T\n",
+       "r9fLlH/w5HNQHYzavOTjaARwzdeTO708PxpU9N1lOwn7OGFuuLrjhdHiaE2j0dB4PE5lPFwusm4c\n",
+       "4Y7INHzmaBXvRR67E4NsgQ9cfqEPQSYjmOBghvcFJ9hlvRtn3Iej7cgp/fSiqcxhNLppPk7PCaW5\n",
+       "4Q1tGTsFSBmrjz3SMLYXYki5weBKLn7mqIF7V27YYBx42Mq9DAwB/57fx3e8MVGXWcHc4wuC+7xf\n",
+       "3lhkLJi4w8x3gvkY4k4BQhmS0inejnDF/pbL5YRauBCRpohcTA6FST3k5ve5gHGUi7njXR7mhJYg\n",
+       "CA6VuvCkX96gD7uWomHkcxLhWIRCRMrcwHJe4HO/5vOM8ILWcZ49fJdn2Pnf7vGcnZ1pOBwmRe0V\n",
+       "0V15ueBHWfMuR+vOzs7r+kwmk5TEToP+JCWfnU3PTPNwtyd2S+eKHaH405/+VN1uV6+99pqk8yrk\n",
+       "pVJJOzs7mp2dVaPRSMYRAovQRblcTsiKJHU6HQ2HQzUajQyi5cUroSXjAP1wD9drvRBuQZlGBLhU\n",
+       "Ot9R2W63Va1Wtbq6Kuk8FMh8YGRRK+vp06cqFM6TZ3d2dlSr1bS8vCxpuhbb7bYODg6SIUtfQYxO\n",
+       "T0/TepSUhDdhMUrAOJ9AE1c08OHi4mIyiuA1SiJgPPuZeb6BAqXiiKukhKwQrqJ5gj686SEz5ATG\n",
+       "lKcRuIJ2HgZhbTQaSR5FpN6dZN95yc7R/f39lKzvdEPx+9rDoMpbhy7X/Fm8j3mLUQopmxjOnDF2\n",
+       "R0L4Lu9wkMD7Ca1Ho1Faj466QwfWo+sqlz8eGpOyTpqPnf9dzvNZ/B9edR3gYcUoazEuYz9jgrh0\n",
+       "sRI6z4jOPe+IwAV84oag09SdefhUOl8zbsz6+JibSBtvLyxHKi9kJk0VWYQAHQFy1EnKHiAcLXNH\n",
+       "sBwqvsxAkrIeBlCtowe0PIXo3pR7EL51HkMqz8LFI6fPHC8B3Hh2dpZg8ajIo6EUDVanGcYTNPOc\n",
+       "Bv72ReRM5ULGPTP3SpyxGZfDtB6iJJTJos9Ddng2z5GUqu/SPw9h8C4XPh66dFp7P+l7Xi4UC9MV\n",
+       "kBuRKDc3RPNCopPJJOUfSVNkqVwuq1arXTDg4Xno7eEHDI3xeJwQCqebdG4cYCBJSmhFq9VSp9PJ\n",
+       "5MIgpFHmbgwPh0PNz89rbm5OlUpFrVYrveOtt97SgwcPEvo1mUwyyrvT6ajT6aR8plarleaiUqmk\n",
+       "HXClUint2qvVaolWPu/c52sIw9DlCoYViATzjiKAfhyYLJ0bNr1eLxnQrVYr0ebu3buSzsOR8CRz\n",
+       "wa5FQm0YI5JSeLDZbGp1dVWTySQZks+fP09ozdLSUmaXJP0ndFQsFtN7OKjcUVU3+KEbx+vQ+J6v\n",
+       "F6eZhzucD0FffY1G5YJ8hZaO8tEYkxuSyDQUsstaDsYmf4n7yG8rFovpmCTeF3PzYjqAh4MceXDa\n",
+       "RQfSDXJfi1xzneMKHsV8fHycZFBUyMhUDA6eQ4t6BpmLfIm5t64/HAX2e/29jirSD67H6I+HF/ke\n",
+       "+ol3gy5JyuUlR/8JU8aUFEekGEdEEpHREcn1eYifMca8cGGv10sGvfclomx57YWF9rCSo+WJcnJm\n",
+       "8Obb4Pnfn4FhIE0RF9plMVhX6DQEtYd9+NzDSW6ouSCKW1URbCT+ej4WC8WtaDfm/DgHXxTdbjfl\n",
+       "ungOkDRNrEeoRIPQtxnHxc3CdG/V0So8fZ4X0SpHnXyRuoHhuQaOvsXxQxc3WmKIyo0hD236nHiM\n",
+       "O0LrvuBc8DD3LgAcvYoOgfOeP9/7g+Dz7zlqyHV/hzsVjspEDx0Uge85UgGfwDd4uvSZBGeUAAab\n",
+       "oxI4AKAqfg4dob6dnR3Nzc1pZWUlw3uHh4cJNvcz3ECpNjY2knDDURgOh6pWq8kYALGTsoVR3YBl\n",
+       "XZGXw2eOFg+Hw7QOMZKgL+u+3++rUqlkio4SEqvVarp165YODw/TOm+1WlpeXk5lEU5OThJaRaHU\n",
+       "o6Mj7e7uZsofNBqNZEyAhMEbjka3Wq3kcNBfR04xJJkLHDme6xW64TNHQKQpkgGPuzHo/Op1mJzf\n",
+       "xuNxJgEbRBMDx2tNxdxGShsUi9M6adFZcd7n/cgML+9BhfW8/CRPXHfnimc6XVxfFIvFdO4hSfDR\n",
+       "GeczlxHoAhBCn1+/n7G6I45T4mvR54nP3fhgfqJMdFnvecbIDebMk61xRugrtPPQGPNKyRdfZ5Gu\n",
+       "PjZpGn5nc4obTvydl5cEnTxE6A5VnFOXl5737IYiNkW/309OixvHEaSI7ar8wVW7alftql21q3bV\n",
+       "rtr/sr0QRAqL0eGyGOby+Lt0bvV60bKIZrgF7l6WpnlYRwAAIABJREFUw5ERLnWPztExLGiSa90b\n",
+       "43sxd4O+eJzcty7H+7zPHp6JSB1eF/TwEBxhnHq9fgEB8rHiDbg35JZ+tLTxKkhU9O/i7UAbpy/0\n",
+       "xGuLCYlA9XhDsS/s7oIm0AjvM4ZuQSfdW/KxxDE73O5zH3O+3BuLKBPfiV6W52F5zpmP0Z8TE9VB\n",
+       "QvCu47lwjMPzgCibgJcYn0li6Onp6YWigXjwEQEkNEWBUA8lg4IQUjs8PExzvLu7m1CuVquVQW7r\n",
+       "9bru3buXSbZmXP1+X9vb27pz546Gw6FarVZmLZMDc/36dVWr1QthBzzshYUFDQaDlM/FPHh+hoci\n",
+       "8DzhJxA5r/rN2YEehvMkX6/83Ww2U8ju7OwsHVgsKSFXoHeFwrS4nyMglCpxXiQVgP44gjYanVf7\n",
+       "dj6E1+bm5lSv11WpVFJ4zJvzaVxrjm47euu8GkP30JhwuydcU6iU8caQN9/znarMEzssvSyDNK0I\n",
+       "T+5YPG7MkXN4BNqAUHvaAeNnPkAgXA6Dxnq0ItLUEXDG5+vLS7j4PPO/o+0gc6Br/j4PsXp+sfON\n",
+       "I+YxGsNYR6NRWjOe1H9ZXpCvJ66NRudlSCgc7HPqiBCywyNGbExgjI4Qud6JuXrQir8jwu95ZP7+\n",
+       "KIdjdIGxlEqljAxmfV7WXlhoD4HkQhMmdKOD5nF+h0AhMELXBYpDiRFOdQZ3uFPKLghnFr+P5/J9\n",
+       "7xv9Q5FL2WTz2D/uY3HH2DC5WrzLc5TYEeK7UqTp4sNgcLiT5wDh+tjjeKCrM7JvKfYF7PlMHhbk\n",
+       "mv9G2NJXDARqYUV4mAUQIVYWALT1vvh8xnExZu5zvvAcCJ8jz3vK29gQDS7PJ0PxewjXhdtoNEo7\n",
+       "vuI5dQhe4HMvY+BHo2BUeV8IYdF/KXuUj9cHkqb5WhhNvkaHw2HKgeF8PM6hW1lZ0f7+vsrlstbX\n",
+       "1/XFF1+kkM6NGze0sLCgarWacSQY93A41IMHD5KgJSTEmX67u7va29tTo9HI5F0xTgwFD4EeHR1p\n",
+       "PB6nhHXPyUMpQ08EpzQ9p63b7aat+t7Xs7Mz1ev1FKaCh4fDoRYXF1Wr1VIY/fPPP5ekZIhxkLcr\n",
+       "dniNWk+EO+AZjAHyUJzffKOJ8ypOAJXgnQ+LxWIKGeXlfDj/ezmVmGODonGe93URc+tmZmbUbDZT\n",
+       "yMplKPlHfIbRPxgMLhxjkrfGfG4ZPzIc2eKyIO6o9jQMHDNoFp1rfij1IU03PtAXf4enUuSFhtxR\n",
+       "jGkuHK+CU+Iyx8N4HlL0OWRMl+kcxsF9HuKLYTr4BjkWUxh6vV7iYz+Wx0uEcF+sHelz4Dt9XUe7\n",
+       "USspY+xFeRnH6LwL7aKcdJuCcCONOY285+2FIVIoWzdUnACxhpNvHwWJ4FqMgeYRJ8aRaUySE4oE\n",
+       "PBZUzK+JBoJ7AihLR1G8nxhMbix4P5lkzyFA4dP36A1yOrqfxcXY+C4Lwxk3eofeQHmgqfeP/0GQ\n",
+       "8oxU/z7XnDbRw6C/3OPCDhpwj+/U8DnyuXHPKjaMU4SbC2Ke44nJvpvJ+x8NYUd3oJ0vWBcwzjd4\n",
+       "024Euwfr3hJ955ko99PT0xTbp98Yu9Hz9fGT5OuJ75zhJp2jCRQHxVA5OjpSs9nUnTt3UkFK6mCd\n",
+       "nJxocXExIUnSuWIkgZvSEYyh0WhoOBxqZ2dH7XY7c5YdXjle/P7+vvb29iQp1bmamZlJRo/zEMaT\n",
+       "F3d0/rp27VpS3l5jqt1uq9lsql6vJ/TBd2CBrrTbbTUajTR+0DAMv4ODgwwiA1oHmkd/oTXor+fm\n",
+       "kP8yHo/VarUyCsNRY+QT95XL5VTmIDYcFhR3dLBYe6PR6ELNL3f0+D8mdjvq7oU+eaaXZYBuo9Eo\n",
+       "c2ZgrC3F+nalTPK25zG6sQTtPe+Q53Edg9aNHnd8HBXBIHGHDkfBd/T5sUQ0N8xcJnk/oyPohm5E\n",
+       "0FkPzEVE1uDXvIRq3umomOuEaHi5nnVD17/HmNgtGo3a+H7+j7miMUrgfB1lu+v1GOVhTIwz6ieu\n",
+       "xYiJ63mQWX9HXo417YWVP2CSvXPA79HC9glD8XtyGYwNokFjEUVPTsomW8ekZfrg3pMjYlJWEcZJ\n",
+       "dOQiImQIufg+/s9DSHzscSFijOL1OUyNgvV73HBFAMEkjtxgBDrCxjU+82J89NWFex6NXajSFzcM\n",
+       "YpK+G5RR+DN2Ry4dUnceiIKNcUNDpzPj8ARgf7Y/x+nqhil9cx5xj9ZDdAhNVz4YLzMzM6lQY/TK\n",
+       "xuOxBoOBzs7O6zB51ft+v5+ZTzfOR6ORWq2WisWims1mZkcbCgpjxsOCa2trWl9f1+LiopaXl3V2\n",
+       "dpYqTVerVS0sLCRE5tq1a3rppZckTcN+MzMz2tvb0/HxsZaWltL4OCC53+/ryZMnevjwYaL322+/\n",
+       "nalBFR2ier2e+LTb7aYQEn0hxOWhn1qtlgxEjF6eS82sYrGYUUiSdOvWrVSramVlRfV6PaNoQIY2\n",
+       "NjYyPAdqzLMGg0EqjcC4QK1AIZxnkBuTySSF9nBwfF1ggGBEM1aMNmgaEWeXCePxOJVrcLq4IwTv\n",
+       "erjJEVUKh8LLlG/o9/sJIfME/pOTE1WrVQ2Hw/R+nokxByLjicq0GNpiXB6Kjjt2oaevwxhNQK5y\n",
+       "DQSOdcizjo6OkqEML+bJfcLv0bGJ6zo2n2saKRJuNESEjM/9+S6XcK5dvrkD6062n5JAnz0dh2cR\n",
+       "Mne56O/IQ4yYQ9cJjqhFMMTnijl0fenrB+NWypao4DseVYImDoDQN/9uXnth5Q+kiyEkfrvnz/dg\n",
+       "zsiEKJr4PClb5ZrnRw+M+9w74F6ER4TGvS/+Tmd4R2VoPvlu4fv1+Ey8D645EuLGGl6XMx39xkCN\n",
+       "wobvIDB9YTP+iBa5IuO6LwYY2+FhaOPeZRQehFtAbdyw8bF680UQjWEXir5wfOzej4hG8nekNzQG\n",
+       "xXKUw9FN9+SlaQ5JNGjpC1vg5+bmVK1W03VQExSDw+2gO9xP8U3o7R4dh+UyDmgzGAwyVbfdY+eI\n",
+       "Ep+LmZkZ3bp1K5VpYO11Oh198cUXWl9fV7Va1bNnz1JtppWVlWRcg5RwX7FY1Guvvaaf/exnSQCT\n",
+       "k/Xo0SNJ0le/+tWUS+F5IxzKu7e3l8lroq/Ly8t6/vy5ZmZmtLq6mikb4YUu3YuGP8mD8aNuWIvM\n",
+       "77NnzzJClu+vrq6m3XnM4Xg8Tjk+boCheEHYyFOBZ4rFYqZ6uedtTCbnRy4RSozoZ61Wu6BIL3Me\n",
+       "nYd5nh8P5OEOz1V0Be1Oj4eicEbcwfS++hz4WsWYg+cdOfZ1hMPosg9ax13XvBtaRjnkSLKjfMgm\n",
+       "6Bl5kdwa38rP2PNQZPriO089SoGR6norprf4uJze/O8Ii/fHHXGXYYyPH78XOnNPNHqi3Isy03WQ\n",
+       "P5PfLjv9Gs+KRpQ/33PnHL3nur/P6ev2RHy+j8H122XthSFS0chwQyYaGRGS88/cI3MDgGsolMss\n",
+       "TUfG/D4IDTP6pPN5XiKyNI0x+4T6IkHQRBr4oo+QL/f5Ncab56VhkLl1DsLm1z1fI2+B8S6Hvwk3\n",
+       "YThFww6kxJNcHbFBUXHNi/X5Nl+nFfPl0LAbQTHp0FG9aNj4QomwMYYgfYn0duPMBa17xXmIYl6h\n",
+       "Oeh9dnaWcnNAZ3guBgtz5/yNouj3+9rb28uc1VUul5NRsbi4qPX19YRm+JEwEdIej8fpSJlbt25l\n",
+       "wsGdTkeTySTVpapUKplK23Nzczo4OEjHzGAIraysaDgcamlpSW+88Ybu3r2rnZ0dSefe/PLysh4+\n",
+       "fKhKpaJarZbZ/j43N5feu7y8nMbeaDTU7Xb1gx/8QMPhUOvr67p582bqz82bN5MCu3btmiqVSqb8\n",
+       "g5/RWavVEgKIHOE9cYMGZ/7t7e2pVDqv8s41jDlChSi+ZrOZQqU80z1ijm1xj5lroMyj0ShTTBK5\n",
+       "5Xlu9NPRRWScG06eM+JhHvK+CPGQjA+fOMobHRt39Hxtcy/rGNTJEQcadYV83Z2cnCS6ttvtzLpC\n",
+       "xvLj8gQnydcktGHduzFPX1xWRLnvTpWvcYwWp4HLG3eqHTmiWjm6ydMIMDw4ncLpDV9joHpfaPQ3\n",
+       "Ikf+OTrMdYRHBdzw4R6X59FY8fvzDEeXt1yj72xggHf9xAJ3eJyn4GU3vrnf590NTje46bPzQDTS\n",
+       "fFy/zZC6Kn9w1a7aVbtqV+2qXbWr9r9sLzS0Fy1MrMq4s0fKFr+MaBXJeDFkhPXru/ncUsaad4+F\n",
+       "Z/JO/8019wzyIMdoPXMtjimiEzFBmc/xOt0z9vt5L8+gb3hkjNefGyF/p6l7sTEUhWeKNxP7Sh6A\n",
+       "pMwZSR4y4tkewsBLBZaPuzdIFo5esHvakb5eAC/Gznl/pLc0RSH4gd6efE8ozufE8y/yaMp7QBAc\n",
+       "lej1eims46FoPCveRSiHRjjppZdeUqVSyXhWJOeyRvCEgcMpXMj3Jen69eu6efNmJkzhOULkwTBe\n",
+       "Em5B0zj3rFarZSrw1+t1HR8f6/3339fKykqmyvrp6aleeeUVvfrqq/rpT3+aEsrX19dTUjioJTRu\n",
+       "NBpqt9uan5/Xl7/8ZZ2enmpjY0Nra2uSlBCg1157Le3cgzaLi4vqdDoJ5XPecPQABNDlFeGmcrmc\n",
+       "kCRpyuuERzz3hjybUqmkfr+vdrudyQGs1+uZs+Kcx2dnZzM5bI4cUhYBOefP9NBODPnH6tlRniwu\n",
+       "LmZQEmjBmgeduAyRKhQKF8JUhEVBUTxFww9e9yOC2HnJYe2O9Li8uCzMeHZ2lo7f8fF7qkRML3F9\n",
+       "4PzGeAm5OTrlOUGOULMu6JufFiBNiy27bPD3gVDFHDDkl48/6hzPJ3ZU2fWTy1/e7UfqRP0BH/A+\n",
+       "L6kCiuwomfNU3i5RR6cI27JuXe7FKBR9dR3k9IsoMo359p3hUX/HcKd//tvaCzGkXAG7wvBtkXlG\n",
+       "hw8o1p3Ky6WJho3HfTFQYtyWZ+T97c/knR7HxjDx6uvOUHmwob8nT5j7fQiqGD/mcw/deR8RCm4U\n",
+       "RIXtfXVI1KuUx37mJekjOKLQIAEWmsd4P1vDI4zqELL3w+mdZwzzfU9I90RK+MHDdD4WX4get6cP\n",
+       "hUIhkxjs8xgTWn0c8L7fh/Kl9IGPE2UymUxS2QFCPAgDkjzZgsx9nU4n5UB1u900F5VKJRk7lLIg\n",
+       "1Lezs6OtrS3Nzs6mvCT6WqvV1Gw2Ux6UH1o7mZzn0NTrdTWbzZTzIZ0fZFyr1TQajVJYkGTrfr+v\n",
+       "2dnZVAX83r17qWwA32F+Dw8Pk5Ld2dnRysqKvvGNb2h9fV2dTicl/DL+jY0NLS0tpRAY4yD5vVqt\n",
+       "plAS97EumDs/gYCE9fX1dZVKJT158iTxzeHhYeLjw8PDzGG/3EdOFzvXpKmCYZ16uQvCugh+D6/B\n",
+       "U3y33++nd87Pz6ter2tlZeVCjhC8xU4zDzXhsHlaAv2Efq6Y3chmzbrz4XKYH56T91w2NvBMQtWe\n",
+       "OxlDNxiMLpfcyKHOFErf0zx8PUtT5Zw3BvgZ2rmT6degnctVrnsNKfriOWDQimuj0ShzDJYb+Hlh\n",
+       "VsbhBpXTl/64DIrGhNPVZZTfE8OlkpLjxVqNG5D8XV7r0MPZlFZhDl0H+Pj9ndSWuywM53TBgKY/\n",
+       "seyP0yLqmbjDMLYXhkh5zPj/q7myzLO+EUTRg+Q91FpxQ8aVKj/+mS+2SGAEQhxLRKpcCdEvR8jc\n",
+       "WHJG94WIhc+z4sKPnqN7tQhRvgeNpGxNDZ7lSt8XdqzrhLCNxzz4vRiT8X1sAZemxrBvUV9cXNR4\n",
+       "PE04ZsyMwenGc6NR7c0NV0c882LifM8VgiuhiAa4oI2Cxb1b7vW5cyOLuk7j8Vi7u7sqlUopURtj\n",
+       "AMHmieGDwUAHBwfp0N8opClTcOPGjZRvQf89Afrg4CA9c35+Xjs7O+k4lnq9rhs3bqQ+j0Yj7e3t\n",
+       "XdgNOj8/r+XlZa2srKQE7T/8wz9M4zs+Pk41oA4ODvT8+XNJ50jO4uKiCoWChsOhNjY29Hu/93uS\n",
+       "pO9973tqt9u6du2ahsNh5nwvvOdGo5F2dvX7/eTRbm5upnPtfDerpHSe1sLCQjrj77IdqD7vXuD0\n",
+       "yZMnOjw8TP3hfMLDw0M1Go1UM4n7MZiQRfQT55E+sjtKOs/lYr2g4P0IFlCw+fl5ra2tZRTt3Nyc\n",
+       "arVaoj1j8YR5jDTPS5mbm9PJyUnGoJEuJuPCC87PvhvXZbQrIMpS+C4ul3kcFcM4MPSYb2/uCHpD\n",
+       "Hrrij3KY8XteqctoFLAraMbFNZf7nj9E3/gNneJubUnp0HGe48f8QE8vVePNUTn+93f7/Ph3XGZF\n",
+       "o8MjG1EuwhvIz7ixBfTQnQ/0rkeUXLdE5Mh36kd0z/vNNXJxvZ/8dqdIyj9jNfIFfO15he7MX9Ze\n",
+       "aGgvfuYejn8nWolu9HiiX9x9Fqtrx/c508SwEf+zeOIi8V0SUci4ceJojRtYeUiYe3A0klD9fp9g\n",
+       "Whw7cDhhHEeu4vj43xPRoSmL0Q1H0AeMGEdzHN1hfrjGOykG6coNZYPiYzFSI8u/616bG9f0L9Iz\n",
+       "/h0RxzyliRDzOYyhAPckfeG5F5YHZ/PjZ5AVi0VtbGyoXq+ncJ10zsP9fl+tVkuDweCCl1gsFtNZ\n",
+       "dGdnZ6l6tiS9+uqrmpubS4U6MV4ODw/VbrdTeNEF040bN/T2229rcXFR9XpdhUIhKbB2u629vT3d\n",
+       "u3dPMzMzevLkScaQmp+f19LSUkooBgVot9taXFzUycmJKpWKBoNBotvi4mIKG/V6Pc3MzOi1115L\n",
+       "8/yjH/0onc3nybiFQiGdNUl4cG5uLhmgJO/Do76+KSIqKSFWNEcoozc/Go10eHiYDJ7hcJjmaW1t\n",
+       "Taenp8mwdXSMit+sj1hKxStzgyZJ00Oi5+fnk2HrmwngVWpRYaguLi6mUC4omSssxsX8uAHmRReL\n",
+       "xelZjhwuGxPNY/Ix97oc9MR4D3kyj1xHtsGL/js6KnlIiitHd4b9e6BJ/l3vT146B/c5Yu46iHWI\n",
+       "go7Iuctrn3uMEeRuRD1w1kAaY9qG089lE2kgOErufGLwwOcxhMV4oJPrC3fc3ZH2sDN0YcygcU7z\n",
+       "+L6oe6VsTauo/91gxCn0cLUbUW5MsksTfvJNEcgK31R1WUpPXnshhpTHd71zEVZ1uD2iQ/GIBYdz\n",
+       "4+4NRwcgKu93RoxKGGPJr7mFy8J3YeJVkn18LnjyGAMhlGcscY+jJdALlMLDA5KSp8nnvMNRN++X\n",
+       "/x29HxcMbtRFFEy6KIAQxHEO3DBCkTvaFz0p/+19dqHCWHlm5C3vI8aw84H3Jc5PHCd8E2H3+Ky4\n",
+       "RVk6V5CTySQhPZQHoB0fH6dilpJSjg+KMdJkNBql42Lu3LmTnnl0dKQnT56o1Wpl6NHtdtXr9VSv\n",
+       "1/Xmm29qc3Mz4wnjkXW7XT18+DDVOPp/2DuTH0mzq+w/MWRmzBE51VzuqnKVwe1u2thmYQMWyF6w\n",
+       "QLDA8sIbNkjwB/AHIGGJHYIVYo+EkNhhscFCCATGlsCiy91tuqvb1V1DjpEZGWMOMXyL+H43nvdU\n",
+       "lBeWvq+8yCuVsiLeeN/33nPPPcNzzj33F37hF/SlL31Jq6urqYAm/dzY2FC5XNaHH36oK1euZDzF\n",
+       "er2uvb091Wq1lMuEAULJBsoZuKL5xV/8RQ2HQ73//vuaTCYJ9ZHm/L2xsZEOBC4UClpfX08hQnYO\n",
+       "zWYzPXv2LB1hIWVDRqVSSY1GI9EfIwNDxY341dXVZPhtbGxof38/7UwsFova2tpSpVLR+fl5JtQm\n",
+       "LarST6dTHR0dpbngeCcKi+7v72fyxwjB1ev1tBsSfsMoZbedoyteniKirsViMdHfETCcB+jnaJXn\n",
+       "s7COfe3BX4TvY30m593xeJxBnTxUFHP5YlglIkTugPv7YijUZQHVvCPiAQ2WKU1kho8d2YaBBdoW\n",
+       "ZQ9oJAaGyxdKYVxcXKT8O+dRaOe7mV3Xca8bjDhI6AqXQZ6H6boMvkA+x5AwNHR9Rx9cF2PEuXEO\n",
+       "ksp6ipEP5i8ada4TPUrjBhyOvId83Rn2fvJcvvPwpfMA7/u5N6QkvaBMXwZdSgs0x8NbXFuWFxTD\n",
+       "Nq6o47OlbK0mKZt8iRHjCjMmuNEQbjAyyak8i78RDaI/XgkY5nQhuAy1c0TGFw0CCBrBxMvQomXh\n",
+       "MfdoPDfBjR1JGW9gGWLj/WEcFG1bZtz4/EkLxYYR6d4V73PjOyJA0Dl6phGJ8rg9itT77v3x+YyL\n",
+       "yz2xOD62+t+5cyezHf/s7EzPnz9Xr9dTuVxWo9FIyAN5UYRoff6Y48FgoGKxqPv376dnPnnyRLu7\n",
+       "uwn5Ojw8TPkH9+/f1+uvv65Go6HpdKqTk5NkLD158kQffvihnjx5onv37unLX/6y7t+/L2muEHZ2\n",
+       "dlI19OPjY925c0fSPBH90aNHunXrVqrUDa0wtnK5nI6Pj7W1taVOpyNpjsbVarVkeFDQU5qfbVev\n",
+       "1/Xaa6/pv//7v1UsLorZlkol3bp1K/ULw5FxsKan06n6/b46nU4mFA0iihHCfHLGIHPp/IwSGgwG\n",
+       "qtfr+pVf+RU9e/ZM0hzlY47q9XpGEDuqM51OVS6X0zgwgAjDFgqFlBs2nU5VrVbVbreTse2hcnLq\n",
+       "6H9Md5hOF8U13Rlgw4I7DTRQlUKhkEFAkQcYLKxDV2oeuvH6Y6Di1FviPVyjeXV/xkCfXQb4HPr2\n",
+       "f5fR/P5ljuMymYDMjmkOPk8YQq7YY0jd++IoDzLQHWE3XjC24BXQQ2rJeVjbK5s7qkdfHWFx9Gw2\n",
+       "W+T9scnHmzujGKM+Du+HX3OH1HUbMgqa+LqIqGHUifH+6NDmcrkMiABN6SM868aiz7ejuq4nYuN7\n",
+       "f05sl+UPLttlu2yX7bJdtst22X7G9kpDe26dT6fT5MViSTu64DAcno+UtYpBlRwhoUWPDQvUkRKP\n",
+       "j3v/PLbtFq9XwKbl8/lMbpJDiR5KdCg6wot+HIB7HctCTo5ERTTId6q4FwQ9HEHyQ0OlRTJvDF2B\n",
+       "xkHrZYX3JL2QC+G7uE5PT3VycpLCMHhMjjbGEgJOD0cOPXbv3owjf+7x8AxHtvg9tOdIlkIheyYi\n",
+       "4Q7CmiQO0xeeC/+ANEjz3KObN2+muT08PEzb/GezmdbX17W1tZWB3aE7OUXc60m3FxcXKaTV6XT0\n",
+       "v//7v5IW4Ya9vT3dunVLb731lq5cuSJJCTV9++239d5772k4HGaS+3/9139dv/u7v5sKJHJky7vv\n",
+       "vqu1tbWU/P2lL31Jn/nMZyRJH3zwge7fv6/BYJBy4Ci6Se5Xr9fT1atX1W63E5JTrVa1vr6eksW7\n",
+       "3W5CZCaTeXXyUqmUPGmOO8nn8zo8PNTW1lYKAcQzEx0NXl9fT7xEQiweP+ERflsqlZKH7+fVgUav\n",
+       "r6+r0+mo1+slnqZvIAVnZ2cpfBbLlziy6Ghwp9PJhCdB1yqVik5PT9O5gzTWCYnzhC6LxWLKr5rN\n",
+       "ZpkxeOh7OBxmKqmDiudyucy8QLOzs7OE8pGYzpg8ZSGivJ6rw3rx/CfQW0JunpRNLmbc3eboDQWC\n",
+       "vTmCwO5EaY6Gx9MTXMYgg+AHR6T8N45Yx7AUuYWMD7lPOJmGjGJtR1SeMReL86K73hfkvW+coTnS\n",
+       "wljon/fVc9Ocn3h3TIXxXe4uh52OoF2MExnMrj7WKrRhXXiIT8rmspHL5PobuRw3Ifih79DHedTR\n",
+       "SqcFctfDiVyLvLCsvdLQnpQ95sMhQBSrtFAKhUIhwb8x+ZvJdXjOE3sjZOehwhhLdQibWi0uaIHE\n",
+       "HZZlDBgXCFRPOEXYAxM7zIjh4VtsvU8ueDx8xBhiXSPot2wbqn+m327YOYQew6YYuzCeL9IIIUfD\n",
+       "xnPRvB6SH67L2B36J0fA4WJpsUXWF2dcGNGY8t/E3/FMfgvP+UKM74jXeMfR0ZGuXr2q119/XdI8\n",
+       "FDUYDPTkyROdnp5qdXU1JQejCKCNCylCT4RaXGGdnZ1pY2NDa2trev78earhJM1zj+7fv69f/dVf\n",
+       "1XQ6rzSPkfX06VOdnJykPJpcLqevfvWrkqTPf/7zms1mevr0qd5++22dnZ0l46XZbOrs7EwfffSR\n",
+       "fu/3fk/NZlP/8A//IEn6whe+oE6nk0JOBwcHqaZTo9HQ0dFRRily1h6J0iTck0/BtW63m8Jh7O6T\n",
+       "pN3d3RROKxaL6eBinjscDjUYDFIJAEnJsNnc3FQ+n1en00nV2TG6qMWFYvN16gqnXq9nQnT0OeZn\n",
+       "8F25XM7MG8fHjMfjlEt1cXGhg4ODZEg1Gg3NZrPEL+7soEhWVlaSjIqGg4fEWB+Mp1KpJFlCvpKf\n",
+       "6chZeYyH44I8/OJj9PUXw0HIUd9F7E7WaDRK8oAdrNxHf2NYhbIVvBfjnfuiLkCeUHrCFbfLNpdR\n",
+       "0Wnh2fl8PmOc4TRLC8cHg4ISIR4SpJGDhkPmNMHo8qRv+IR3oadimskyYzOmNcTf+mefP5dr5IBJ\n",
+       "2dQFaEZf/HeM4fT0NPGx76Jz+eppLM67MYyMUQXPY6BBE59znBjo4ZXj+T3Nx+o0cgPvZe2VGFJu\n",
+       "KPhxEK7Io3XqExCRqlxucVaZx+F5xstivW5tR0TDUQxPgKVv0cCSlLGafXcE1zxnyN/HgliWz8PC\n",
+       "5bu4CLgvjg/r2xnSx+zjcXQOertgdAZ3uvNbN7b4PcIyGoLQ5PT0NOXJ+JlgESX0RYMQdsOV/0cv\n",
+       "0xEpWkwe5J+PNyKf0ViibkmM58Ob5OJ87nOfyxSlfP/991UsFlWv15Mz4CgnPFAozI/RQNGurq6m\n",
+       "BNyoTDY3N1UozI8pYVfXa6+9JmmOgF1cXOjjjz/Ww4cPdXp6qs3NTUlzhGhlZUX7+/t688039cYb\n",
+       "byQj4/Hjx/rud7+r6XSqO/83lwuaPn36VLdu3dI3v/lN7e/v6x//8R/1jW98Q9LcsGm323rw4IGe\n",
+       "PXuWMU6fPn2aFMZsNlO3202Gy7Vr1zQajdTpdFSr1bS5uZnuY+MGByxTiwq6oPAprTAej9ORLRsb\n",
+       "G1pZWdHBwUE6JNllTqPRyORCuWEH7yJzlhlCppsiAAAgAElEQVRHp6enaWesND+SZjgcZoxCFJ8n\n",
+       "N5NT6CU/JKXiorncoihhq9VKNAPZJM9NWmz9hj94JgdWk7wPcg3dcIBAIDCUY86Rrx/y8OBfrnkC\n",
+       "NDxMf1nzvtNvMsluGuA4Igzo2Sx7cDnyHX7yYr8gaM4T0DTumHaEyHWL6wuXc1EOkd8GasEuRu7D\n",
+       "kJhOF6VFuObRhOiUg+6DfjvKRV4VPOtOcsw34tnQG7kG/VyGOV19jMhT14duaKFH/H7oTdFc1z88\n",
+       "E7nMvPuGERBR74e3QqGQduBGlM/H7GgVQAY8Fw0kDDDXwe4sxSgI8xeNTm+vxJDyRefGEwsOIsRE\n",
+       "SAScewMOm7KwIQDKtVAoZGpbSItznJZNoDMU/YzJ3whYfidlk7dBFqLydsjVkRVHY3yxwfguDJyJ\n",
+       "QKVisjj9w1pflnTpXpsLTf+Nf/YxuDHjY0LYMfaYfI1wyOVyGU8c796NMd7HYojoEYLU++EGpRtW\n",
+       "bvDCY7lc7oWKu8wdxlUut9ghyli8rAY0Oj091WAw0LVr13Tjxg0NBgO9/fbbie4bGxtpmz50cDQW\n",
+       "bw5aojDhXe8Hc12r1fTJJ5/o/Pxct2/fVrVaTdd2dnb08OFDTSYTXb9+XaVSKSnojz/+WDdu3NDX\n",
+       "vvY1ra2t6dGjR/rP//zPNI5r166p1WolhUGI7jd+4zf0rW99S3//93+v7373u/qTP/kT7e7uSpL+\n",
+       "53/+R7/1W7+ld955J6Ed+/v7iRd7vV5ao71eL5UpYIciCE+3281URB+NRnr27JmeP3+eCoLCa5xD\n",
+       "RkI1nq80RwQ3Nzd1dHSUwtwo2lqtpp2dHTUajYSaOW+AjDC/8CL1jGazWapF5UphMBikAqNukGO0\n",
+       "+G4q5p4xsJbX1tZSiI534Siys0uaG1nVajUpCUcBMJ4o0YCShm7w/draWjJEJaVaZMhi53NQEyqF\n",
+       "QyuvTZbL5ZJR5sVqmRsfj69TKuXDb27YQB8pu8MQtAqZ6SHBqAAdxcUQc+XqSKEjPNHpXCYPJWXk\n",
+       "KCEsGmvOFX5Ev/nnCJCXoaBPzGGlUkl99eZhQeYQw8+NTcaGDHLH0ekWoy3QOqblsBbhAV8zfM89\n",
+       "vhOU+fRd6ZGu3Of6D7p4H/xengk/eckU6OHz7WOHHu5c857YP2+vLEcKoiwLq0WDgYFAvBgy4nvy\n",
+       "VuKxF75N0mE/mD8iHR4qwrr1PANX2tGi90XonoiP0WP9/kynhU+ab8V2rwkjjlwW6Me1WD/HFx6M\n",
+       "6WHRZdtdY3/IEfJxOZ1h3rgwHJGjyKDniTCH0cPA21kG3XrozpWTt2VehKN9IJnRMOL50+ni0M+4\n",
+       "IJ3X1tbWdOvWLa2ururRo0fa39/PFGVkQeN1RzqzgKMB6AYkixtl9eTJExUKBX3uc59ToVDQ3t6e\n",
+       "nj59mvrIrrZcLqf9/f2kMH/t135Nt2/f1s7Ojr73ve9pOp2m/CnCUCjMbrerb33rW5Kk3/7t39af\n",
+       "/dmf6d///d/1V3/1V/r444/1N3/zN5KkP/iDP9CjR480HA61sbGhZ8+epRAV5R7y+bwGg4FarVai\n",
+       "wf7+fkIk9/f3dXR0lJn7Xq+XQngHBwfpGjTAYMD5cAMUob++vq5+v58MtGazqZOTkwza4sYLeVCg\n",
+       "Eq6EqUXDuue+2Wym27dvq9frqdvtpp190jxnZjAYaDweJ2QBg4/SBuwgdKeH/8OH7kB2u12trKyo\n",
+       "1WqpXC6nqunch+HjRUzhaVc+VL1n7B5W8xpHKPLodNJXeHOZg0LlfMK4HrpHTniBV+aYtYK+cHnq\n",
+       "aFihUMgYWYwTGrgSZh25AcUzI52Q/VK2sCS0c+cUA8KNQP4SMuU3NJ7hzqwbM/QD3oi7z0A6vV/M\n",
+       "HX1mbnxsjrAs251MP9zIis4ehjbXYmjS6Yh+ZSy+0xP5zbuiDOc7l+PIRw8lRpmOzPR8Yzcsya1y\n",
+       "nec6OxpuPifL2isL7TEZLGoEYYxRSwvhxtlfJD1KWebHy3KPyT0c95JcIUe0xicFyxwlxLlKwPoR\n",
+       "DnYl6KhaDAM6WgVKgzfoC4p+OJ2cjv7u+NcZwuF13unM6McOxMR1F2DRA4ihMxSPw9vMDQYL8+9j\n",
+       "8WNS/LkonmXeAM9yGNdDdD5OFxhOP/rM4naaYaS6t+ILdDweJ8V9/fp1HRwc6OHDhyoUCtrY2Ejo\n",
+       "EzTF0Ed5wFN4UCAAzvvkRfFdo9FI5QRarZZu3ryp4+Njvf/++8rlckmx8//pdKrd3V1tbGyksF+x\n",
+       "WNQ///M/q9fraWNjI+M4tFqtlM8wnU71R3/0R3rw4IEk6Y//+I/1ox/9SH/xF3+hZ8+e6a//+q/1\n",
+       "+7//+5KkDz/8UO+9956+8pWv6O2339bW1lYydp4/f65Go5EEHLlA0tzIGg6H6YgVVxgkom5sbGhj\n",
+       "Y0P5fD55+L1eT+vr67p7926iLUYxvEgpA451oQDqeDzW3bt3M0fgYNhgTLhj4or24uIi5SvhKdPX\n",
+       "fr+fkC/QHknp2e5A+FrL5/NJbrkAZ7s9hokbMl6Elf67AYKhByLtRgNhSWlxdBPjm06nKfQYz7v0\n",
+       "dRBReBQhChX6QEOQiIh+gJaRguGKD345OztTv9/PGEu+5jGaoTdH/0A3KZuT69EEeIXmBkFUnBhC\n",
+       "UbG7XIFHvHAqSCn9cb3DPw+1SXO5F+WNn20XoyLQmbF6eM7LSsBv9N91QnRSlzn76F9Pcse4ch51\n",
+       "3c24HLSQFuUu6K+/zw1PnAgaBqvrX+83ujXqY5A5N6Kcdq7vI2DhdFrWLssfXLbLdtku22W7bJft\n",
+       "sv2M7ZUgUliCbp3iNYFouAWI93F+fp62N2Kd492BuPhZbngwy5IJHdqN+TVYrnjPHtfmXVjYETIF\n",
+       "PWMc7iFLCyTK0SqPOwOn+xjc+vZ3OgoTz9SCbn4gaYSjl4WzeG5EBX2M3tyL4V63+B2OxrMA/o9J\n",
+       "ooRo4tEx7lG4Z+zXPCwW++noEvSO6JXTlOeBTrjXzfyTwEyS8s7Ojvb29jLFMz1/rFCY747ifuaE\n",
+       "/uAF4ZVznyNlIBR+9t2jR490cnKiarWaQSXW19dTmOn+/fuq1WqpCvfTp0/VaDQySe940OThNBoN\n",
+       "feMb31CpVNKf/umfSpqXOPj2t7+t4XCoP//zP9c3v/nNRLfvfe97+trXvqZ33nlH5+fnunXrVkKA\n",
+       "HKE8Pz/X48ePE6pGAvx4PE67AkHqKEwLD5OHxNi73a5+8pOfaHt7OyGZ9Ofk5CQV+Mzl5gnc5GV5\n",
+       "flIul1Oj0UgIzebmpkajUUKcCU1ISrk8vuPHk61Z05RXIJ+L9z958iSFOZFfoJTINZdX0gKVIiwM\n",
+       "D29tbaVjacitol9eSBNe8iTtQqHwwi5AGmOH7r6OkDOeb0VzWRFRVc9dk5RBeZERyE5kIHRzOeso\n",
+       "PsiCI3ye+wId8/l8Ji3DUzG8f3x22kfaxFQNv854GQPP9Dyf09PTlNrA3PNe0FRkgqPkUnYjE9f5\n",
+       "LSFhxgWa4/98bqABOiGmpiwLZblczOfzmXQLDkFnvTqaya5UEGKQbqc7esxRLr4HQZUWu279uiNv\n",
+       "fg863PnCx4T+8eiVz3HcrPDT8qOkV2RIufJlkMPhME0GDBljtyROrq2tZeKzENNL1fu7mHQ3LNxQ\n",
+       "wGjysFWcGD6fnp6mEIxPFs1/G5nbf+Phqphs579FUHreSIz3swBdSEh6IW/AjUdpkdPEAqL5+COM\n",
+       "Da15tzMqxgl0cUOCcfpvvDHv8fwvp+WyPDeO2BgOh5n8khj/j2OAVvCiHx5LGJbvfacUeRMbGxtq\n",
+       "NBop2XowGKjRaKT+YxQ6HTmjjbmPOXGE+1yIorhIZL97927amfbs2TOVy+VkvG1tbaWSCvzmzTff\n",
+       "1Onpqd55553Ulxs3bqR+oqD5PBgMtL29rTfffFOdTkff+c539MEHH0iS/vAP/1ArKyv69re/rS98\n",
+       "4Qu6deuW/u7v/k6S9JWvfEW7u7t6/PixfvM3f1NHR0fp3D/4YDwe691339XBwUHKycKIWFlZUbfb\n",
+       "TYnqtEqlkpKcY2i+VCqp2+2q1+vp+vXrKdfIebHb7SaDgOT3arWaDEmMNuQC4eVer6dKpaJarZaZ\n",
+       "07W1NY1GIx0fH2cMm36/r9lsfuTH2dmZnjx5koR0uVzW9va2Pv/5z2tnZ0fD4TDlJRGy4kxJ8p2k\n",
+       "xWn1s9ksGZB+Dh/H/GCYMIbJZL5V3r9jnITJ4H3Pg2Ke2CEa1y5yK4aHoI3vMvPrxWJRlUolY9Dx\n",
+       "bIxjHEZPZu/3+8mwi3k+KF3yc8iTYhzxnLooT1z2I08wSJCTHpr3vFf0k+8ixxDwv9yHwR5DRO4I\n",
+       "e4gLesKXXuaBFhV+TMXAeVwWhowy3Pvq+UfRAPH8KE/LYcee3xv1Hc/w0C0lLxyYcJ2PY4EO5zmk\n",
+       "LPBstyegacy387HDO17Z3Q3ISNNlhmVsr2zXnpStv+ELJSZ5xTONJGWYOE4awrRer6d8D1pMLoOB\n",
+       "x+NxJmeFiUBxxnipnx0U4/7RG+A7FDv3eZ4AAsHf7+9zD8SvwdC5XC4lwtKcGZbllNEiquQGD/31\n",
+       "EhAxjyIKBt7jVj39Jt7vi8YRMBdk9CUaQd5XDKx4FIYbmC4EoEukQ8yLw+tC2UiLwohXrlxRo9HQ\n",
+       "wcFBJqeBfBMEsOemMEaMJTeynD88l4VxdLtd3b17V9euXdN7772XQXNASTBGHj16JGm+Vl5//XXt\n",
+       "7u5qd3c3UyQQ5AoBPxqNUiL2nTt39ODBA/X7ff3Hf/yHnj17pt/5nd9J1/7yL/9S169f15tvvqnv\n",
+       "fOc7+tznPpf6//DhQ73xxhs6OTnRzs5O8iA5Eqjdbms2mydls0ZJRMaAmE6nqSAnhTEvLi7UarW0\n",
+       "traWntntdjUej1Wr1dTv99Vut9VutxMK5Nv0e72eNjc3kzGC0ba9va3pdJpKL0jz3X7Xrl1L/cbQ\n",
+       "ZC7I4dra2tLe3l7GUD84OEhn+l25ciXN4fvvv68PPvhADx480L1793R2dpYMnM3NTe3u7qbSDhgT\n",
+       "9NNRCd8Qsrq6mg5HrlQqGfQon8+ng4pBqd2o4zid4+NjnZ2dZersYEBFNNqT3vk/yJb0Yg0eR5ZY\n",
+       "Ry4naOS2cY8jwOTMYpThREtzxxsjwfMNpey5psj1iLogR1zWggxx3RVxjAa4HnG6k4PFvHFckiPL\n",
+       "nm9K8j2ffe6X0Z3vkNluUDl9mUvmE1SOunTuTLreoUXD09HSaEiR2+g6ItbfQh56Aj/vH4/HydCO\n",
+       "eg9ZHDdNoH+hgzvc3D+dTjMlStBryN9icVH6g+8w6N0Ai1GpZe2Vlj+I4RWuRUXnqAGLzWvwuAUu\n",
+       "LWpTsf2XZ7higzFhRkdPllm3fOdIFCG4yMA8fxk0SovhQv/snoB7ThgmnlQak/d8sfEXYzPC1u41\n",
+       "LoO8uceNHlAh3w0TaUMfPawHfV1A0JgHr3zuwg2FT58iDX1B+wJ2YeOhVH7rnlNMvuSze+ydTidV\n",
+       "6X769GnGaEeQubfufY0GvNPGw7IYDyAW0+lUn/3sZ1UsFvWjH/1I0+lUzWYzzTe0nkwmevz4cUIz\n",
+       "3nrrLb333nvq9/spMdlDJqur8wN4B4OBhsNhMoi2t7fVbrf16NEj7e3t6Ytf/KLeeOMNSdLf/u3f\n",
+       "am1tTV//+tf1gx/8QJubmwlZ+qd/+ifdu3dPzWZTDx8+TMpaUjIcqMFFQry0qMItKRUXZQx4rCAk\n",
+       "HhIC3cNgpjwA1eLxkCn7wMHP0hyRGg6HaSdlt9tN7yyXyzo+PtbVq1d1eHiok5OTNEaQO0K+t2/f\n",
+       "TkbteDw/Y293d1ez2UytViuhg/fv31er1dInn3yijz76SJ/61KdSSLjb7WpzczOz08/TFigLUijM\n",
+       "a10x/nK5rFarlRwZ5328fJwLr7JObS2cQd/peHp6mupcRQTEDauIRjlf41h6oV5kJE4U5+5J86Kj\n",
+       "7BJlDL7rmjA6ss+RWl/7ntYA8oVeYFMB64n+8zvWBfzkCJOPzZvLa5ddpJa4MTgcDtVsNl9A/3Gc\n",
+       "6JPTOK5VeIwxOMpLf6IRQbiU58R5ZAwxnOYOfkykx9gEZaQ/k8n8ZAJQ0KjPXb5GwGFlZXHWbTSy\n",
+       "3CDEWGbeWPfuJMf3+Q5RjHtHv1zPoNfdLuEZUWbH9soqm0dL0hX9spikI0Su6JzY0ROKjOKK0xEC\n",
+       "D7FIi91Cjib5IvVn0C9+xyQsM46WhQ5p7hksM3r4PsKV/r2jeuQp+S4VaOL3+dg9bOIonOcQeUgs\n",
+       "Mq1/Zp6iUcg/wmfQ2+c0jh20TVKGmemLe2a+gOGxlwl+DKzoeUbPmRDVzZs3Va1W9cEHH6R3RQ+K\n",
+       "RUeffEwYvRFVBfGDn3q9XurPF7/4Rc1mM73zzjva2trKIBSOmh0dHanVaqVK6j/84Q/V7XaT8mW3\n",
+       "mzQvHYASPT4+1oMHDxKS0+/39fjxY3U6HX3qU5/SV7/6Vf3Lv/yLpHmpgq9+9aupQvqDBw/03e9+\n",
+       "V9I8J+vq1av66KOPMiUBpDk6BGKCcQA6JM2VKYLSqxSPx2Ntb2+nWkJuEJDDw/b9brer0WiUlDCo\n",
+       "FoaJO0r9fl+NRkPD4VCtViv1T1oYS4TN2FUoKRlm7NDL5Ra7JPFor1+/rk6nk3LmJOnq1asqFou6\n",
+       "evWqjo+PtbOzk/qC3Nrc3NT29rZ2dnYydANtpWYUeV4gV9DLHQjqDFG5HVROUqasBQg265ADo5FV\n",
+       "jjCwnjwC4IYM9HHD3u9HniBzuIahkM/nU/6YI8cg3yC4zD8GCv33d6EDYkRAenF3lhtuoGPIYnda\n",
+       "I8LMe7gPQ593sLbhFxwC1xcun5BD/szZbH6qA31y586d+dhXnsc15sr5zcfE/GPwujykP+gk3xEX\n",
+       "5eR4PFan01G1Ws2EmT186cgZzwIJch3kBZZdn8Zn4rC6QQSvcJ/vlnd+nkwmyThznQa/RNQxpqN4\n",
+       "e2XlDyCCw5yxPIDnO0Cs6XSa8iUkJRgVD8ULcEmLMvFskY6Wsof3YBp+70iHM4ujDTG2CqNGQ9DD\n",
+       "hT7JjNUNymhgRu/DFzPCC7pwzfMFYojN+wSNovfinmVMAoyfPV/Ncx+cBj5u5t29S5KcHZWSlHJS\n",
+       "MAhdkEe0zY3VZWGz+B1CPdLbjUEQA2leGuD999/P5DxEg9iFAzShRZ6Iwh0Pazwe60tf+pKkeS7M\n",
+       "kydPtLW1lcJ4sTLwYDBQtVrVgwcP9MMf/lCSUuhqNpsl4UZ+DTRrt9u6d++eSqVS2jp+cnKi4XCo\n",
+       "crmsr3/96/rggw/03nvvSZrXn5pOp3r27Jm+/vWv6/vf/34aAxXNz8/Pk3HBPHEGHWMcDocJrSHJ\n",
+       "mi3VIEXS4igXkuFdkeEtr66uamtrS5VKRfv7+5nt7lTNxmjHWPTjSjDGIk2RB2tra8kIqdfrySBY\n",
+       "XV1NOVf09fDwUO12W81mU81mM4VLyaeqVqup6CjPLJfLKhQKOjk5Ub1e12c+85lkZBJyrNfrqYK5\n",
+       "G2Ae/nWnzfO7CP8hW6lnlc/Pa3p5xXzyqtzQcCSBNQOy5A6WK0hksstMR/ZdTrhMZDweappOp+lY\n",
+       "FyrHS9lcUfghIite/HaZE8tadz3jjpAbRJ5CAY2cNm4cehgOQ5CNJg4SOAoS9QblVTwVJKJVjq7x\n",
+       "GZryvRsb3lx/xc0DzI8bL9CYvkqLvDsMOXdefW6YR57reh4dwxp1MIUwOmkTPrf8defZxxYNVmlx\n",
+       "QoqHdmkOHCwDCCL9Yrssf3DZLttlu2yX7bJdtsv2M7ZXVtl8GWrjiINbgVTFBaJ3OJaQAYnP7pnF\n",
+       "EJtbqR4Lx2r30Foul8skn9PcYvZ3MC7QMjwNt3KxxuO4sY6BlB1B8uQ3fxcNZATvI3p6NJ4f7/dd\n",
+       "HQ638z7y0TxEGUNmEblb5mW6hxI9JLy22WyWPE6Hos/PzzUYDDIesT8rzoc394aW3edQcPxMlW4K\n",
+       "WT558iSTs+C8Bv1JwPV58PHQV/f2oif8xS9+MXOcy9bWVvJw3aMDEi+Xy7p586Z+/OMfp/fdvHlT\n",
+       "hUIhJXrX6/V0rVKp6OTkRJubm6rVahoOh2lMnU5HpVJJv/zLv6xOp6Pvfe97mfypf/3Xf9WXv/xl\n",
+       "ffLJJzo4OEjI2f7+fgoXMW+OVnCeGuE5Txrm+Bj6QLL3ZDLRcDhULpdLOUfOh6xfkJ5isZjQHBLq\n",
+       "z87O0tE5eN4cIAxd2BXGc0GqSqWS1tbW0lxMJpPMkS6UmJDmSepbW1sqlUpqt9taXV3NHD4MsrCy\n",
+       "sqJyuZzkFzvTrl+/rna7rcPDQ92+fVvSHAE9OTlJfEGIS1qUqWAHs6+vUqmUDnFutVoajUYJjRuN\n",
+       "Rjo4OEi7pOE/SWnrOghSDPGQq+K5avGsPZAFR4DjX/f2nffz+XwqeAwP+fu9ErWXGYj5jvAFqFQM\n",
+       "7aND6AutUChk5sZD9/TH5aj3k7BgRHeQ2WxUqFarS1F7T0+QssnmHh3gt9AceevrLSLvUfbGtJoY\n",
+       "xiPfzHW0R2u43xP8QdCYT57tRTddVvp9Ple+KSqmMUQd5FEBD+F6mDtGB3yXYNSPjjJ6WNT56mXt\n",
+       "lZU/gDBO3Bh2g6h+ICeJyIR+/MDbmD8VQ3A82/sQ4VZvL1O8LNxlzEainYcguD/C5MsYnmd6EqFD\n",
+       "zX4tQpH0j3e4Eo+hPR87zO+5O24oOUPD7AguLzHgRumyBUVfCYH63DBXlUolIzDJn+K5KEDnGQwf\n",
+       "FrP3xQ0pp7/f799Np/NkShKU7927pydPnqSx+1lvMf7uysWhblrcAeSKhVyKN998U+fn53r33Xcl\n",
+       "zUNG5+fnScE5bYC2b926pQ8//FDn5+cphwbnA+PDw2nklty/f18ffPCB1tfXdXR0lPp55coV1Wo1\n",
+       "/du//ZsqlYo+/elPS5IePnyou3fvajKZ6Mc//nHaoSdJ7XZb5XJZs9niuAhXeoTa6DMHVhNGG41G\n",
+       "GgwGunPnTqakwMXFhba3t9OOLYTb4eGhisViynUqFAqZ0gmNRiPRmWNYaLPZ/EBfHB/CB8wda2w0\n",
+       "GqlarWaMEDe0kT3S/NBmwqmNRkOHh4eJNzwHaDqdqlKppDVVrVZTWPXevXvqdDqJptVqVdevX8/0\n",
+       "x3NvCoVC4lMMTuaekLi0yH2Cv90YdMeA61F2Od3cGXTjhdCay0xf36w/N5r8uW4MeZ4Kn70MgrQI\n",
+       "7eFM+Dp0w4O+esiXtYeydHmATGNMbgTCq/CwyxqMqWVhTYwCjC36wkYAdxBdTyH7kc2u5H1Ti4+b\n",
+       "v8ucTil7ogXzj0GELEWu+fzTb88V4n2UF4I2uVwuc1KAO9yeG+ty2R1JrvkufU89ic6pAwhuC6DX\n",
+       "Yx6w/3Vn1sPB6AGnWdyJmKHrS6/8P2wwckQsmCQG4szIonHjQXpx6340ijze7sYFBoDXIHFjhAXF\n",
+       "PW61x91+rhCZnKi0Yx5PFCbu/TjK4wvK+0FjXCgwT5zkOh4L+QL+DsYRc4ig9TLjww1UN+IcTfO8\n",
+       "Kf76HPr4PTmQPjoiR/9Qeo4IuZBywcH7lnktywS58wwIzb179/T8+fOk6La2thKdoreKkPBFHnPf\n",
+       "aFEAcqbc66+/rkKhoB/84AdJebvh6YUqec7du3d1eHio8XismzdvpmeSE4ji8x1fBwcHeuONN1Ji\n",
+       "OFvppbmCvnLlih4+fKher6e33noroS6TyUQ3btzQw4cP9dprr+n09DQZYHiWKysrKadtmSeHseve\n",
+       "HjzMQcAIPpQgOUTNZjMhWSAXrLVicX4+niMN1J2aTCapxAJ8ShK285GkVDcKw2o0GqX/s7vMnTfy\n",
+       "wVqtlnq9ng4PD7WysqKNjY00XycnJyoW5/WyUO6u2JvNZupLvV5PtCF513cAxvXN9m2vBcZ2f0/M\n",
+       "dqXA4dmerM77kBUYf4668Bd55LuzoKPLRZrnvsRcoLgjzWU0vH92dqbBYJCS0eGbXC6X+uD9cBSK\n",
+       "Z7BmPB+Wa8wvypq/GO8+BpwEd8ygE0ZCPAbGnWaQWcbqDpfzpBs60MONE9/p7JuJGIcbi06faIDF\n",
+       "XXkuN7z5OpXmfOhFfHluRMdcLtMvT4BnLPCeG98+p8yH85rTBqfBDVmPPjEGR8B8PP77ZXrWc+CW\n",
+       "tVeGSEVvgAXkW0edGV3hRyKjLCCs38dnRzWkxena7k24QeCWa9z26My3bBeVlD1lm7G4Be5j94Xp\n",
+       "wp37CFPAPP5MlDfM78wArdzDdCUMYhQRK198LlwYFwvXUQenB3PrNM3n52UrKpVK8phdgHk9Ga+H\n",
+       "xftZIA7/updTrVYzULPvPooo309DLkejkVZWVnT//n3t7u7q8PAwGTXQHW/YFx+Cwo1Fn2P/nRtY\n",
+       "0jw5eHt7W+vr6/qv//ovbW5upi3ppVLphSR7DJvbt2/r9PRU+/v7KREaYYvRCXKyurqaDjS+cuVK\n",
+       "QoCuX7+uZ8+epZDYm2++qbOzMx0fH+v69etqNpv6/ve/n64dHByoXq+rUCgkowEa0uKacUMfGmAA\n",
+       "sOWeMFi/33/hvEBq7TjcXiqVUlFK+Mnf4d5sqVTKOFigVawrL2EC33O/8wbPZoeRG4icrYhjcXJy\n",
+       "ksbhSeoxZOGOXj6fz5SpINyJYRQNF+hHKBKaEkpl5yFrh2u9Xi8ZZigx+uBJ5DH04fNHX52/HVXH\n",
+       "OZWyhy+jbL1+IPxBzS760+l00tgpdBpDcvTLjRU3eHh3RDPcEXeZ7YiYh39Ys/TXHUhkgRcVdsMN\n",
+       "A4vnxffR2OggLeQo7/czYx3hczr47js3FrlO8wR8N5iQrchtvyfSIqLxrgscIEGHOGrkKB86Cx0T\n",
+       "3+lRKnfM/LcYcjRkL7rRjTPGEcONcWefyzN3NF7WXokh5bUcfCHCrExU9PidgSEwQoJ7MVKkrDED\n",
+       "8Vwo+kKIMK4rSW/AxChmRzdAYpYJWpjbJ9PREzfc/L0oAIQ0O7ekbL4Mz6E5I/g1/42UPc7BLXdn\n",
+       "VheqvgjZeh9RN1AFjDXeG70NLHyUi0PKTgMgfebPCwhGQy1CvPSDvkM3N8jds5nNZvr0pz+tw8ND\n",
+       "7e/vp5pN3BcNYV9cLuigqxfCg095N7h5EYgAACAASURBVAJ1Npvp/v37+vDDDzNFJ6UFbC5lC/FJ\n",
+       "87DYxx9/nN5BCQBJaafQxsaGisWi2u12osPGxobeeecdffrTn9bR0ZGePn2awnflclnvvvuu6vW6\n",
+       "bt26lXK0GNdgMFChUNDx8XEmhOEoXS6XyxTQY35Resw1fW61WklJ+nE1w+FQa2trCVFgVx80A1Fp\n",
+       "Npsql8sZR4bPGOZxe/x0Ok15Y27w0r/JZFHh28Pl5Hyw9v1929vbOjk5SXPLfZubmzo6Okr3E7Zg\n",
+       "DuEVvHx4Gjq1Wq1kFDgPu7Ls9XqZHLBGo6GjoyO12+2UIyYplT5ANnqxSj+6hHxTmq8XZFUMdUwm\n",
+       "k4yj5buAMZDimnGEl3XiyIs7Tr57ixAlhqTLU69L5DmW3hwVZ57YNey5tl4MmvfyrmhE0hgn1wjf\n",
+       "ebqCpFTSwvWZI4AvMxzQhS4j/fnRWYPHoY3f4wYvOtL1pNPL3+3hO0/vIBfR14zfE1EgRyrd8Y7o\n",
+       "k/OTG2mML6YR4GC7bIeOrmNdr7GmPELi9/209koMKZhw2YQxaIQZjQlwr0b66VYiSssZMoYb6EdM\n",
+       "jsQTjedROVEjyuXP82R2vnfDMYYgY/NFkM8vtoL6AsLLZcGzCBi709SVF/1B0CBsXIBH9MuFoguh\n",
+       "aGTF0KnDq87sLgQiIuCookOxCGoXfGw2YDyM2yvfYlBFqNi3NNNviizu7e2lc9pi0iH/j4YEig6e\n",
+       "c6FRqVQS7fC8UQpvvfWWTk5OdHJyoq2trWQk8B4EXavV0sXFhe7cuZPmFRqBQvgRIlQ7n0zmSdv3\n",
+       "7t2TNA/tEQra2dlJuTjSPNfn7OxMt2/f1mw20+HhoX7pl35J0hwhwHhAWbrgo5SAI5LSXGHMZrOE\n",
+       "rk2n0xRKrNVqWltb0+7ubtrm7gUpye0CpeQaBoejDh72nc1m6XiZwWCgs7OzDGKzurqaSkdg4DCP\n",
+       "lUolHaPi4XI3yN3JcF5cX19Xv99PeUjSHGVbX1/XYDBIxSF97YNMQBuUHc/wyt/OfygFEDaMTLz7\n",
+       "arWqvb09HR0dJb7Y2NhIhUORCY7EYwCyNjyMzrPpI8Yt9zImHB+XffSZe11JTSaTdMTOZLKowo1S\n",
+       "IxyGUc08YQT6OqOvhHXdEeU+D295or4jI8gLl3/+O6cJfXUEKeonrjs/zWaLcDbPdic56hVH1Tz6\n",
+       "AO94OG1Z/7zvL4tGuA70cjRuzDnYIS3yWD0czDUHMBiDR1uYf3+u98V1lDd4BX0RK9vzzmhfeFgx\n",
+       "jpfv/cgb6B2d+9guyx9ctst22S7bZbtsl+2y/YztlVU2jxCee0ae8ChlD98FtoyICxatW+7ErTmC\n",
+       "w98LUuHImG//pzmUzX1StsK2X4vwqo8Ja53+R2iWPvlz/C/hBqzpGJaI4/PcIIctoQ395HvfXovn\n",
+       "EMNi9NNDavwmhrYcoSKUhyflYUlCL+4BuKfg+R2eqOyJxqB9HtbFc4vevHs/oBnb29vpHQcHB0u3\n",
+       "8ft5WXHuCWeBmhGmdJrwDEp13Lp1S9Icefjoo4/UarWSt+xhT1AuttQTbvzJT36icrmcEIBGo5HJ\n",
+       "k9ja2tJgMND+/r7u3r2b+vrJJ5/ozp07yft68OBB4rPHjx9re3tbtVpNH374oT71qU+lZ3Y6nZSz\n",
+       "E7fcxzCuo4NsCyfk6/Sr1+vq9XrK5/MpCdoriROWAXXx9QBKRYmDZWdNeuVveN5zU0DWfF2Anjlq\n",
+       "wTvdo/UxwrP5/GILP3PY7XZTkjo08PCG78bz0hkXFxfp3fBWDLGAHvmZYp4bsrW1pclkcXjv0dFR\n",
+       "Juxdq9WSbGR8oC9OTx+foz2OcEgvljOAN1xOsP59nnwnLs9knhwVhjfOz89TtCDKZvpWqVQyPMl9\n",
+       "y/oIvaUsEsh1wssuXz3E5DlMzoPwu8u0uGvP86J4vufUkccHDVyuQEvnN8aFXPcQGjohFsWkr85f\n",
+       "Thufb/76fVHWRxntNHfd47zq+sYjHvyW98XcMH+mp0wwdp8n15mOZMYcLQ8zgnzH/ERvr8SQ8h1d\n",
+       "cRFIC+jSt8y74OE7aWHoOKzq0CawJzF6h04hJIvCDRImF8HihoQrN4dG3QCKCtQhzBgW8LFFmFrK\n",
+       "VgYmLwQ6OoTpBp/XQpEWoQeHpRmz00GaQ/OEA3kXjXF435ctcIzYmOdGDoX30YUJRkSEyLnmAgD6\n",
+       "R6bntx66cKPWoenJZJIOuJWkZ8+eZcJGkUfjllzfYcUYWegxGZd+wVMksR8dHWXqKPEbxkjezunp\n",
+       "qba2ttKRNZyLRm0iwlTSPAx4dHSUduzV63X95Cc/kTRPtkbJrKysqFaraXd3N/HU5uamRqORzs7O\n",
+       "tLW1lZQXOSrQLcLt0MHHyBwyFxgSJNNj5PgxICh96j35jjP+3+v10vEZGC4YFdJi7RNK8zwpDHKc\n",
+       "LMoy0Nh55ZtcmFsfpysMd+JwNLiv0Wik0BUHX8d1yvMuLi4y9Z2oiB0ru0fnLobYp9Oper2eisWi\n",
+       "tre3085TjrdhPUAH5gLF48+D9h5Cmk6nGUOD8UYFSH+QNR6Sgaaz2SwdsoyM452e9xjrA/EvGgcx\n",
+       "vQF6QmdoGEO2LwurOU0Zq8tL1gP3xfvduWYt8EyqydN884anH/h46KOHTz3XCefAU1RiiobLQ5dR\n",
+       "UT/Q+J6wrBsd5Hl5LhLXyAEkdcT1NHzkaQIxYR3+WOYIw3PeF9dlnvLB+2JOqxvfDqp4SkVMi1nW\n",
+       "XllBTppPJpPOAD2BDSJwDWF7cnKScitoy4jlRI8txpZ9YlDIEUFzgUJjDJ5UySJmXPw/9sMXZrSw\n",
+       "fSzkSkmLAzLph48Pj8obAkpa1KGBrhg+Uva0dvdupGws2WlHczo5muRekRt+cVwkkMaF43zCczyP\n",
+       "iMKEPj6nSUycxfjGsGMbvyfLgti4gRPzASIqw/MRLNzruV6j0Ug3b95M4z8+Pk5b9xmrJyMjgNip\n",
+       "trOzk/oKz7I13Hdjce3q1avqdrsZgyiXy6Xjb4bDYRp/o9HQ6uqqRqORms1mJvkXI8gNKDd0oZuj\n",
+       "eIzBd8JGpKNUKiX+j3mTXD87O0tFKOlLsTgvicF9nuCNAHdHweeH30JrLy3hAtw9feiKQeG86Dtk\n",
+       "URaeqLy2tpbZYUfyMwYt8gyeoy8Y46PRKPGC96VQKGROrpfmMhEniPIJbBhot9tJRkUv3BW2O45c\n",
+       "QyYPBoNk4CPfQP5YN45u0Ffe58aBK074wx0txo+hRJ6bNF9vw+Ew7fzkWezYov+e78J8npycpHw+\n",
+       "rvmuO+acvjAud64iKs+8R+TCjVMvRcH7iDQgk2Jz3cD7MKIwUKKRwDhfZtS50euyHP70fDTGH9He\n",
+       "SBvWPQgaY+Qd0MF1lDs/HulgvplLd2Kcv3wOpMWOc1+Xy5x+tyfoCzTxCBfXvIbdsvZKQ3tu6cXd\n",
+       "BK6EmAg8TJ/g4XCYMXQgspQtyhhRHkcOHE7m/dy/bCJZ8PxzFIb+LjOy/H73ylwASNmKt7wbpnHF\n",
+       "xbsi4sL30WB1SN3hVA8n0D83pnxuXDHRNz+VO9IuevHU6IlG1mg0SoUKfRzMk/fXkTNpAcV7UiJC\n",
+       "3oWW0wIkBGHMOyPKiHJgLugTgtHnyeuXOALDc+k7ioqE6/X19QzShUEiLUI6w+EwheMcquaZ0+k0\n",
+       "HXIrzROca7WaxuN5HZ79/f2MwphMJnr8+LE++9nPZvh0ZWVFpVJJ+/v7L6CR7D6KiCT8Np1Ok8Bx\n",
+       "A9SdDmjjYV0PZTnPjsdjVSqVZMBGfmJdIDh9nUIznCw3bHgnOykdJSA04LtI4e+oXFxuuFCPaxEn\n",
+       "hT4Ui8VUGoECq6urqzo4OFCz2cwkW/NsP/8PmoIsxdpc1WpV+/v7ms1mKfSLAVIqlXRwcJAcRJeN\n",
+       "jrS6rJAWu+TYVYkh62iOe/XR4KbP/jze5aGfaNjRT1BZaMM7MKCiQeTP9rICIJw44258L5PRjuzT\n",
+       "R+R/lK9uGMT5j6g/9zha5rqEviOjXCYh1/ykj9hcTi4zxDwFwOcdvneeg97IIubI5wnDqlgsvuDU\n",
+       "evgbow8a8xzGG41vB1aWNfiA++KcuFHtCJ3rWH7rPOm8T799TLG9EkMKgRhDGFL2AE4fjCskFoc0\n",
+       "F+7Ao0yGGzYQE4+I5kwTIUf3ZD1u6/dy3YVDRE5cKLjRxjtdCHOPf/b3oTSXTaaH0HyxuTCEYXkH\n",
+       "IRpXdm4EITToq3sN0NIFJ9d8oU0mLx5QyTEi5MPQN1AB3hm9OgS3P8/r9rA4oA87thxxcwMEmuLR\n",
+       "eR4MNMUT8flw/nOkwo0anzdoQ60jDH88aUnp2AjWA8KB/pRKpRSe6/V66RBlpwUoE6hTPp9Xt9vV\n",
+       "1taWLi4u1Ol0tL6+LmluEHS73bQGOaJFkprNptrtdjoepd1uL0UjoSW0ccdhNptljDDPYcPo9XxE\n",
+       "+NBzqWgYp74e+X46XRwPE50dr6lVr9cz3j4CFT731AGQL5TCbDbLbJ3HSKTPUUn4Goc28F+5XE5H\n",
+       "wmDYOILdbDY1Ho/1/Pnz1O9ms5mEPmFIaEwlefoCrw2HQ127dk29Xk/dble5XC6NgWNjjo+P03yx\n",
+       "tqE1CBlHN0lKBpTvzIryxRVWlFWsaTcOeLcrSoyjOE8R5ZTmiHS/31+6sxraeuiGayhXdpnSQMBc\n",
+       "jsXwliOnNJANL8Pj6wNDD55wOvGswWCQdglDT/gqAgHIK48wuG5DntJ/50WniecK0Z+IskeECF3j\n",
+       "78aRWVtbU6PRSOgzzyT1wlMdoDe6gntcfsbUgEg315sxBSSO26/x1yM40UFzZ8hPqnhZe6XlD9wC\n",
+       "dQPDPXIpm38SjY6Li4t0ijmEcwKw6FHEoBggMRgcLohiMqgbGQgaFJ+HYaKFLr2YyBe3k0rZHLDo\n",
+       "DbrFjoewzDr3cBTPj6iYK2j3KmAoNzSWbTf1FvNHeD7IAO+KaN3p6akODw9TWBIa8Xs8dubZw1fQ\n",
+       "2o1oQjoYEs5PnmPnAhMDCQ/Lx+rJxHHufeMCvOiekAtdFB/0phwBZQcoBkl/oNtoNMoYhBge165d\n",
+       "02w2S2evMccIae8j/cEgoCo4vxkOhyoWi7px40ZSmih2aLK5uZnyetyD9NwiP1sMYe/oqKPNGJ7c\n",
+       "B51d2KOkl9UXQzjG0DpJyswp/MZ6psq6o1nUawLddofHc29AxRkHiBzKxBFY5AKKOOZVephvOp2m\n",
+       "Y2BKpVIqJkvtK96HcsV4435pgVqDTLlRXy6Xtbe3p83NTe3t7cnb6upqOoeRzQMuSz106coryiqQ\n",
+       "HEdrQY4ieuZORlRS0cF1g4z3ufNGMdp2u61Op6PRaJRxlnlmRCFidIB5jAoSJA0Z5nlH7oiC2sOn\n",
+       "HtaK6A9/oYPzDHoE4xyk0hGTZeF0nsfv3HiLzq0DFjTPeVtWlw955AYRNOE7z0X2DUNuQCN7+W10\n",
+       "MNwg8ve5PmOtRUTT5yKGbplfv+bhd3Sj0wlegHfd+I4GZmyX5Q8u22W7bJftsl22y3bZfsb2ShAp\n",
+       "oLqYFOiwZ0ycc2/ALWyPdQNpxriztAjnYFX6MSigU25p8zy8ZYfwHRWhD9LCK/XYrKMnPt6Xhe88\n",
+       "74P3ee7Fsti8o27QiERVYFCq9Triwnex7IN/F0OGeD+OaMUwnPctzkOlUklQNqEm3zkEZO0hSsYH\n",
+       "yuR5GdDXUR3eTZ7U2dmZhsNhJpQYQ8Ee3iB0Bz0dPYE+8EnMI8Lrgda+fd3REZAp+u5ek3t0QN6z\n",
+       "2Uz9fj+TX8Q6ICzmPMmOO0J8jUYjobEgIH4mW0Td+v2+Op1OJpwK3aiu7/0GFQNtdh6KIXPewTVH\n",
+       "Tz3PjblwuoNAkZjPmuD9vr7IJSH/jrmCVnjY+Xw+0cY3bzgCxzyBHHGUkCevwksgadAND7nX66X+\n",
+       "MffT6TTNE/PMbk4qvoPSeYmDtbW1TO7WaDRKOXf1ej3t2rxy5Yr29/cTXxwdHalaraYE/1jIkDE5\n",
+       "MsG1QmFxYgHzDG3gQw8X+TVQIa+iDd8gN6EV1wj1n52d6eTkRN1uN4NwIxvIvfT7QHaWIfggHPTR\n",
+       "5bcjzS6/YvgohsRIFvfIivO3o3sxRER/+v1+Zh0iYx0xlV48XHmZfqS/jvwxF/7OmM/lSI2fsIB8\n",
+       "9f44Uh378LLQv+809L4yZteLjnw6AuXoZgzR5fP5FGb06IS/j40oZ2dnaR3CB45EMU/I5J+7HCl2\n",
+       "RTj8D5GA9GLNjphQ5wbSeDxOYQhnWIgJ7OuGDIaVQ83OmD6hcSK4z5lLWoRFPIna4UHizDHPy0N+\n",
+       "McxIc1iS5pPrDMvv3eDhfX64I4I/JgE6LWL+QTR4vbmx5cbdsntJBJaUhB6LwJU39HRFGUMm0NaF\n",
+       "CwvD4V1PsPRF5H2Hbg4Ve/gC3vAwQhy750qg+Mrlctqqj6B2qNpp6sJlOp2ms+16vV4KD/JuD6O6\n",
+       "QpIWtXbIR/PwFRXDNzc39cknn6QxEuI7OjrK5IpAN54Td8eMx/OSBqxtDxl4cjrzi7MzGAzS2J3v\n",
+       "GHsUkl4ugDAh68XvHQ6HyWiF990gi+vS83I8h46cM8bPETC5XC45R9KiFIXzCvzlxr7ngklzWcjB\n",
+       "yBhCXKvVaiqVSur3+5pOpyqXy2l+STR22UHb39/X7du3VSqV1Ol0UvkFv95sNtVoNDLGCXzLnLsc\n",
+       "JGyKzImbMlxBY/jF/CJXok5/7keW+mYh/7uyspL4s1CYb9bo9/tpJxxzyCYCz6+j8Q4PQ3sC+7L0\n",
+       "DJ9fl9sxrcHlko+d9Q6Pej4wziCGD/PkMnFZfk50QDx86e9nrG68uEHnSfp+v4c16asfneMJ3m5Q\n",
+       "xYaco4+e7sDYPOzpciJuGvO5cWOXZzM2aIWD5SkG7vy4HGJM0MrlKDSJ4Ie3V2JIocRcAPjCcqXE\n",
+       "dwhmGCjG7skV8YXo9VhgYveSPLk5xmqjZezM4p5LNPBQ7iwYjwfDoHGBR2TImbtQKGTQBhecEWWL\n",
+       "dHRh7n2jP57LED2u6I25UetM5YsnIjf+/5gzMR6Pk5JqNBov0MSfj2CGyb2kQMxXiomUHrN3Lxih\n",
+       "iVKFvi7oUAq+gLkHgcIz/YgdjPZogJPn4Tk4Ths3lnnncDhUpVLJCBrnzdPT04yn6M/0hHgXTKA1\n",
+       "pVJJw+FQx8fHyfu8du1aQntA3OBFDghGeHnuTqPRSOPb2trS6elpymdhNya08fPkoDfjzuVyGeMU\n",
+       "wYgA5j6SwumHtMhDk+aKn0N7QV9pjImSEX5+5enpaabGVswJmUwmOj4+1vb2djr4mb6en58n1MsN\n",
+       "8F6vp62trYSOuhJqNBo6ODjQzZs3VSqVtLe3lxAplECtVkuGgj+fPmJQ+zN3dnZ0+/ZttVotPXv2\n",
+       "LJXFIKe00+mkg8PdwHb5OpksjmthxyLXWAtuSDuP5nK5jOxxxZ/LLXarYZiQ67gM+QGJdh7GMWKz\n",
+       "kdP0/PxcvV5PtVotzYM7Zi4jPB/RDazYD4wol2M05A7PcWfax+y6gPfxGz67ocx6eFmeGs8GdHAe\n",
+       "d2cg5ke5g02+IM375jIN2sOT7vQgn13murEGzd1A94bcch5GLjA2jClpsS5wkt2I9dyuuDPPHQD4\n",
+       "hbEjW9BNTjdHFV/WXokhBdzui1jKLjbpxeJhy7wF32be6/XSdmLu9+fjvUrZM+PwFPmtJ8H5byRl\n",
+       "mAjjLFqvTKQzjPfXGZlxw6jRK5eUUY6erOh082fxPh+jCwnGiJce4WM3fvy6tEBsXDi4R8f4HYWg\n",
+       "Xxg18ZrvluFdKAyEK7zi97KLCMM1himZI/ruStiRB7xFSal+kgtXD28u+ywtFjfC2Xd9eX8wpNyL\n",
+       "wnhgTXgjVI1g87nGcMHoiXyI8wE/+UkB9Gd/f1/5fD6dxTadTtXpdFKZinx+Efaijg/PGQwGmZDs\n",
+       "eDxORh9JwJJSwjSGhq81+AgB5vSl0KYbbnjsfOcVwz1MxWYCykCAwsGf8AAK2qt7S0rhLxwZeIH/\n",
+       "9/v9zDolJE3BXFcm5XI5hfXG47Hq9Xqm/tfFxYX29/d17do1NZvNTKjJlbSjqNT6OT8/13A4VK1W\n",
+       "S2gNu5ifPn2qzc1NXblyJaFdhEdxAiPywmdkH/10z34Z8gHtHAHhd+74uBNKgy4oTXhqMBhkduau\n",
+       "rKxk5IKHoXw9TafTtCmDNephb5Q78tbRDHiS8bjyhDej/JayDkFE4pAvrHNf9yh7N2y4z51p5zV3\n",
+       "fn23myMtbkR55MLrWPk8Mj/u5FGfjHc6os9ccj9GOQ4j13AcXEY50oTxFOfQHWD6HA1JlyFRJmLc\n",
+       "xbIyjrTGqICnvTgfuC59WXslhhQMMxgMEoTpuyJgDI/PuwXqCgNisBiBIKXF4nfUwKvmRqQlQpxu\n",
+       "pXpD+HPNJ1FaCHnfbcFCwRiKhddYaNE4cW8zekku3OJ16BghXxo0xnJ3A9YZk2fRyFECJXMER1LK\n",
+       "VWARuOGG8sbQ8mrmzIUjTtJCQYGgOPODqJF75EaGG4DLcgG83EVUJih8Dzf4/PpY3IDl/cViMeXl\n",
+       "+OJzIeOK1qv+giJh2OBpOr85n1E2IvKb51xFZXJxcZGOXGEOaaPRSMfHx2o2myoUCpmQEDQpFovJ\n",
+       "iPKDgKfTacZo4RphzZhTJCmDTETlPZvNUj5iPp9P6BvPxMEZjUaqVquq1+upsGg+n0+5Qzha7rFX\n",
+       "KpXMTjjeSSXx1dVV9Xq9NFfQhtw+cqRAwAqFQkJtQEC4Ro5Po9FIxU85HgjlheLf3NxMMsrzNzwc\n",
+       "CG2Y37W1NR0cHCR61+v1NEftdlvXr1/PhJPgW5QhNEWZsT593cN7rBPWoaMwUS77kTVuPDgaKy1q\n",
+       "ziGrmRfWkhum/g7kIs/3UDJyCEXqSEt0LB2hYh3jlHm/WW8Yd97cKfOCs47cwa/uYMfwFY2ixC5H\n",
+       "o4L3yIwjbdDKIxfRCIhzTR8wrqFhNDIdJPC1DHrtckxaGEE8MxpSGDvIRm/IWHSGOzQ8x1Ej7sFe\n",
+       "QCZ6mBSZHp0Cj4TRF19vP9eGlFeQZSAxP0palA7Awo6TCMNRh4OQAswCsSaTSYLiyTvBYHOhwXMj\n",
+       "yhO/xyCKFr1Pki/gyWSSOVfLkR9fNBGxkbQU+YpxbM/9wFBwL93H4guc+L8LFH+/zwXv8JwWZ0bC\n",
+       "PREG5zufLw+9IjThgYgigYa4VwqcDHJxdnaWKcCJoPctzNLCAMNbKpfLmQr0bly6kYERFI+qoJ94\n",
+       "VuQSDYfDTMKml6/wZGQMXk8kduOfMYD2uIcFXbnHnQCEKciNC0OQXHdemCcM1uFwmEnu99IUuVxO\n",
+       "1Wo10QLjk7wcSSmchLFHWNCdCJQ568jzGMfjcTLMHAWF7ryfJHxqdUU+HwwGqlaraS69rg31tZin\n",
+       "wWCQ7u12u5pOp+kcRtBBDHdXQvAO/6bTxekLhUIhhT7X1tb07Nmz1JdGo5Hqqk2nUx0fHyderNfr\n",
+       "GgwGGaSVvnmdp1KppHq9roODg0TTZrOpWq2mfr+v/f39NAaUGjSM6IkbTM77HiJGFruTOpvNEtrI\n",
+       "d/7X0xBcLrjXHzcMQAfqRbEBgrkZjUaZUKSPYzqd16wrlUpJefoYWa+OgtCgj4egoR3z6WuG/jP3\n",
+       "MV8PZ8XTSOhL/C7qC+gR3+UOEnPuERs3MFyfYOCSY+dGLX3H2IuOOfzip2JwzR1WB0E8jSIa29A0\n",
+       "psHEecBJiZEap7uXICI1xulEX7Ah3N5wurtB7AjVshCpt8vyB5ftsl22y3bZLttlu2w/Y3sliBRe\n",
+       "gO+0kBZbX6MV7TlKeNNuffN9hDB9VwLWJ3A5Xjx5Jx6iW5bI7qiWtIBVydHxPvgYPHTI/REWdm/T\n",
+       "UTfGF70/moevsLCdLuSL4dXE4nt+5EcMYcatqIwxJgo7hOyeDP32Pjv0/DKY1HMWoBdhHWjl7weV\n",
+       "wrPzHVKgMSRV837fnUSI0mFwD3sug8Qdqvex0Rc8t5jo72E0dng6rUHQ/Nlra2tqtVqZ3V2OrNBf\n",
+       "iksyRiqT5/P5dOQJYyyVSqpUKol3IuLIswmpgOKyY3AyWRSq5BqI8MXFRTo30cP2nn8Sd5uCUJAH\n",
+       "E+kqLZLAHcGQlBKxybPjPfSPhGkP33ruWK1Wy6QY1Ov1FAqqVCoajUYv8D6J7s6LJycnKhaLarVa\n",
+       "6na7mTAmsoudkq+99lpCViiWWiqVdHh4qPX19SQTDw4OVKlU0rsIG0pKhx8jc6rVahr706dP05mJ\n",
+       "XOeZV69e1eHhYUItnPehvW+2iOkOPmcgbMyHh1P8OdzvKEoMeSMvHCUgrM/8np6epkKmg8Egg675\n",
+       "/DoSS5jJ+x+jGTTPxQK18DF4WDLK4xjCYwzwPv1zPcP1ZZEPR82ibAaBQd5BTx8Lc+n5SdwbkWrX\n",
+       "ey77PFWCcSM3HXXztBX0jaNuUVbGHFPfUOP84ii0I6TINdYDaKjzgYeLvaFjY7qOpGQLsK6ivv25\n",
+       "C+3FGKa0UK6eMOehplxufvwF8LFDtcCsCGlCCoPBIC0OzwPwaw5B0iIM7TkULlxgSodlXan7ex36\n",
+       "dYXuY2fBLEtW5Lozt+9UWQan03dnZn7v35Mo7MYbv/N58P4wB77Io0J0iNeVNXT1kGAUSjGHjFwY\n",
+       "jkfhORGG9dIAHlJgx5TPdb1eTweeevgPvvB4O2N3g8WFF+En8gToj2+f59muVBgHApYkbXgYY9F3\n",
+       "FXpI2JPXi8XFdl5Ch14/zBUkpwF4/See6YKVXCBo67TxHD12XnIYrNdzgx7wjdPUQxf8DpkQwyO9\n",
+       "Xi/ljjGOXC6XzhR05c14PXXAeYlQKPLEE3AJA8Y8P9Z+rVZLfENeEnPELqjRaJSZe+7v9/spnCfN\n",
+       "DaLz8/NUCoQdgZJ0eHiojY2N5Ei4bMARPDw8VLVaVaVSSeHJ9fV1dbvd9Pn4+Djxz61bt5TL5dTv\n",
+       "91MI10PM/GWNunyGfzG+oowmpyY6oi7XPOeM5o6X50Exp+R8EY6XlGrDjUajlLvmTjnOEmvbZY3L\n",
+       "QDd6uMfTLFwuYZygxD30487ksrFhfPn1ZcaXG2D8nv5CNzdeMWzcifbwFDLM5wSZ4RuopGwSdwQz\n",
+       "4H1fFxHMcMPG86DizmKah9wY7wx+SQAAIABJREFUl++q5xr3MgZSBFZWVpKj4yUpXH/5fe7AR31J\n",
+       "fx3kcN6PvBDbKzOkpOwuGBYRdYGcmX3LJcmgL1P0nkPiyjx6JeTHIDTiuUu+8GPekxtlnrPjkxhr\n",
+       "QblBgYG1LHkQL3qZVx4T5bjm8XBHvlBQMLAnrFIXxlENV7RxzJ634h6sN+aM/i+bFxceNM+PirRh\n",
+       "HnK5nLrdbsqVcrp4bg3KC0VG3hxJq4zPhbcLOEfpEFKMHYTPk03dqPYYPAaK57RgZCGwobcbsvTT\n",
+       "BRi5IPCUe4l8T/I0yoT6TJ7PQp9Zc8tyLVZXVzM5Z9PpNClk3wQAT4DkrKysJHQqn8+r2WxmjkXB\n",
+       "AHFBzrW4y8Z3zuXzi91lvta5JikZyZPJJPEGyi2XyyWEzNFhp3m1Wk1jA7mMNIE3ML4qlYqGw2EG\n",
+       "kWN++I0rfWQF6KDTEsOsXq/ryZMnqVDt5uZmMtbK5XI6MkZa5N00m03t7e1ljGj4T1IyPNvttiTp\n",
+       "k08+SUgaRnJ0zNit5aUvkAls5kGmeF06X7P+GUcJ5MmVqSu9WEqmUJjvMPRdYr6ZwlE/5Dnvg2/g\n",
+       "gShvPJ8tlirwxPeYa0S+oiNLEUlytMbf65EKrsX6bm4AeH+9MU+uB6Nzwl+MLXe+3DF3HUxzpMkN\n",
+       "jhixcGQPGTMajTJG2nQ6zaDvEamkkVwfEU7klveF/FpQc0kZxI17ItDBvLpucoAk6qWov6KRnKHZ\n",
+       "S6/8f2oMxFGZWI5AUkIV+v1+ggJpKESHh6WFx4FH4krIt2iyaNwzQcm4B8YznUncM/EE4ig0HKJ0\n",
+       "b4FrrvSk7AKMi8sXhe9MdEaAWfw7XwiumDAWfFEiLFgUy4w+3w5Lv/BYfNsrz3SGxmjmmgtCn0Pn\n",
+       "jdFolDz6+Ezm370kdmx5ojH9dAPI6c32dRCX6IWMx+NUksGVLQYS4cJlCZQo6Bi+5Z0kS/sWZVcK\n",
+       "7F7knWwBPzs7S4nRCBZ2tXm/3OtCaLEVnvuYF++/hwTZxeeGCX2B30iqd14sl8sZoy0meVYqlYRW\n",
+       "0RcMTujrCac8k3n3Q7AZBwn3IDaEWkulUlLM3O8bHxyRjggGic/senQEwWUGoUinD8qg0+mkWlHS\n",
+       "3NjZ3d3VbDbT/fv3tbOzI0m6cuWKZrNZojk7CRlDr9dTo9HQxsaG9vb2kgFG+BejbWtrKxlgBwcH\n",
+       "6axFjAZXvI4yu7zkr4dpMUi5Dt2QKS6n3ADC2OQZvvYcdWONMPcbGxvJAOVA53a7rcPDQ62srCRj\n",
+       "cTgcZvjMDRR39DCMXA7TX490SFl5DGoa9YDLV99Zy7hdXjlN3QD19/g8uOHizv0yxN/nCVq70+nl\n",
+       "Ynwton+Q226cYbxwn6PYLkPRl5407yUfPNrgG2TcePP+05/pdJoxeqITCA09fYK1HEGQSKtItzg/\n",
+       "6MMIHGTue+mV/8cNQeXKhIUdyw645ctRCfFIB6oMe26PL14gxmgQuNXuXiJ9YzIc/vZQlgsbXzTk\n",
+       "YdBceCDk8PRd2Xp/JC1V5jFMiWJ3w81/Ez1FWoSP3bDxMGS01KGVIzreVwQgNJdehEljWAx6uoHp\n",
+       "42XhttvtpDAwXD3M5M/kO4wTF94uBCP6h2EJH7pQRMCAzMW+QuMonJ2uvBN+IwTFNb6Dbuz0xFjg\n",
+       "Nxj1IBxxjNDTlSD3IRQpPgl6QpgJHq1WqynshLfJM30MhUIhra9Y9gHUFvQMhIY+wS8YN76O2I0H\n",
+       "Es2acdRZWuzwoiFHQDMckaTvGKWuAFdXVxOS7Uguz8TYoJinOxHkHcYdRqCpoH3Hx8eJ3q1WS7lc\n",
+       "TvV6PckE+lksFpNRUCqVkpHOGDgCh/wSP3amWq1qNpvp9PRUR0dHKSS+vr6e4a1YooW59TAufOwG\n",
+       "V2wefnJji/44oo1xxDXWE0rXDX54mPu8LAxGfrlcTv8kpSKdONCu9EHumFt3CN2QQbbE3EkQJEel\n",
+       "3UiIYSDeH1Ef7nNDwNdMTPFwow5UzGWypAzi7iF71yfMN7IR5JG+uuPsO3ahG2vSnV03mvyvzy8O\n",
+       "Bv1j/CCX1WpVuVwurQv6Dh3cAPIwHjI/6m43imN40sOMEU2MMlrKRlpe1l6JIeXK1xU/lmycfP4P\n",
+       "E4BOcB9QNZ6gG1BMeNweHpnWlZgXFvNQjLTwLN3zcmbzyXBhykSAvDgi48qEMUbPxQ2b6CWxEH2x\n",
+       "+W9iAiA0RRHEfCYXoPzWn+dJrE43+gJNvfRAhPBdaHk4D2Xq84QhRyiBRN1Wq5UWBOEi91pIUpWU\n",
+       "EqGlrDCP6JcbHs5/PjZHIL1sAsYyOTh4kjwLtMYRU+6l/xhF9JuQIOGfZrOZoTVIEIrd+wzNmM8I\n",
+       "+TMfrrzK5bKq1Wo6wmdlZSUTLoWPp9NppjgnlcwJxWD4SUp/XRn7GkVpw08uFFlH8KnnOyDo4TVJ\n",
+       "GZTUESi+4zesa/rkfOMGrgtU+gji5l4q6xJj1uVLtVrVxsaG9vf3M4i4ND+updPp6M6dOymx+ubN\n",
+       "m5Lm4dlcbh6a3NjYkLRQlqyhQmF+VEq9Xk8IGDWUSGAfDAYp72p9fT0ZLO5QwI84gIzHUQdHOOLa\n",
+       "iblobjhgrDhCEGUYdPU1y/PJsfFrg8EgI/fW1taSg4XDRT9clzCvzoeONHAmZwzbuTLGMIgIPk5b\n",
+       "DHO6U+eOmZTVbV5KBznEO9A78EA+v9iA4+gTz+T9OHvu7COLGBv9iY6Yr0UcEUdefV3wFwPPESJH\n",
+       "nx3hHo/HyWmDnu7QwROu+7iGTmP+IoAA7ZGPPpfuOMQxLPsd8/bTEKnL8geX7bJdtst22S7bZbts\n",
+       "P2N7JYgUHjH/5y/oSYzB+v/xTN0KBk4HGsTKBBWQsltipQVUipfkcDQokyNDHm4gqdg9EknJK8GL\n",
+       "LRQKS3MvxuNxpmK0hyVizNif7+iMtEBWfBu209h3UWB9ex4JNC+VSpk4NgiG5625VQ+aViqVXoj7\n",
+       "S9ljCNzDcm825o/xnBjXZi58PGyB9u31Drszv4yj0+lkvA88Grxsz3uLSKUfseJ5EA47S1rKW/48\n",
+       "xgBE7iFozgoDbfDjR+r1etrizxhAYz1s4Eiu040QBXzCuNwb9VALiea+bZrfehI8eUfHx8eSlHbU\n",
+       "El7xkCD9ogwFHih9cWTJ+8k4crlcQr/cS8Rj9nCDIyMcqUIyuXudyAue4+iuh3IdqSUUCv/5/FLI\n",
+       "FP7yKuy1Wk0bGxtpLgjJwcO7u7uq1Wq6fv26Dg4OEuI6nU5VqVS0tram/f19VSqVTF+YPzYjkFhe\n",
+       "KCx2W167dk2dTkdPnjyRND9LsdFo6OTkJK1xRyuQXR5i4X3QBXkTc+v8rLwoH32+l/1lHhxdZA6J\n",
+       "VCDrnS848cALHZfL5RReBmGLaSKg376GCUsPh8OUauKyDVlKzpfvEuS3RBw87EW/YxiZMTv/uc5z\n",
+       "OeKoKSH0KO88ooKcyOVymfWETGP+HGXxlI6IwjjS7XMlLVAnZImvs5iDxe9pKysrqlarGT0F3Xxe\n",
+       "pAXKGVFG3zDhyJznbPE7Px7M+xTnxHmWOf+5C+0ty4ORsnkt0gIu9tAPBPSdNAzQt/T7NReO0UBx\n",
+       "5ReNumVQnicNejxVyu6mQJF5zJw8EPrpApm6PlG4eZiTd8fdA4QjnEFYZD6u2Fxw+liBaGGmeD/z\n",
+       "FMMtbEP15H7mwuclhtNeZsTQRw/DTKfTtFOq1+slujHHbmRiBCAY/UxAr4DuNAKyxuhxQepKnJ1L\n",
+       "9JOcMeD4yWRxBE5s/M5LGpCbE+FvnoWy6HQ6SYB7KIF+A/djPBDmITEb2niYyoUG9Lq4uEh5Ni6I\n",
+       "vczA3t5eorcf48GuNnc+SH53GktKSeh+/puHMPr9fgrNuyEFHQmzESbwUDJHuXgCqjQPCxWLRTUa\n",
+       "jZRb6flT1KSify7AJWV2SMK7pVJJBwcHKcdpZWUlnW/nmx4Io/phz81mU51ORwcHB7p69ap2d3cl\n",
+       "zcsW5PN5tVotHR8fq91up35Wq9UU9sW4g1cbjYZms1l6no/96OgoGRrlcjnxB/zE3+h0MR9SNhzH\n",
+       "OOAZtqa7sewGmPTicRueL+WGDc9uNptpo4krZXLSqLHG8UCc0cda9PG7gYCc9XVdKpXSDsnhcJg5\n",
+       "box59pQP+k2/MCqiEUAozvOuWEse2o9pBp77CR29KrnT1+mIYeWywmnMmvcNQe5g0S9fs562Eg0Q\n",
+       "D126TuR3LivdGWAd4czGtBzG5XSBv7y5rOEdkS5OG/rJc9zgjnMHryzLc6O90mTzuPXWDR4pG2fH\n",
+       "w4GJ3fOGqRD6nguDIuL5TlQEeoyhwiwgTO4NYNTwTGcoR1RIzPMcAjfmPOmTmjRs54yJqjC0K0LG\n",
+       "7tvmoyfA/f5bz+lxo89zBYg5+5gjbfB6Z7PsjifPt0HQ8X4fU/SEY86Fo1WMP9Zqgf4sYgwReIf+\n",
+       "nZ9nz3X0vCHmw/OePPfC0U9frNFbjXF8aObCgO39GE6ef0Bx2uihgiyQByUpoT6+MYO8KlCg2WyW\n",
+       "aqX5Thv6QpLuxcVFRmGMRiOVy2W1Wq0kaKANNC0Wi8mYhaYIIBACzy1y4344HKbDpqE3zgWoI/y0\n",
+       "urqqbrebHAWKjNIXeAjB6qgTieascYxRaa4M2+12QgZB9Hgn84FCZJdot9tVv99PxoLvoiPHo9Pp\n",
+       "pKNy4Nd+v5+UKIoWJO/s7Eybm5u6du2a9vb21Ol0Uo7UaDTS8+fPVSqVklHrMgSDYX19XblcLuWy\n",
+       "YXCRP+U1vfDKJ5OJGo1GRsaCmIGauqz0HEyX2e4oMo8Yt3G9LHOgmRt36GJOGrIBtAia9vv91CeX\n",
+       "O9SOQjY67/OZAqY+BpzRXC6nZrOZWRedTiedh4h8XJbcHQuAujPtiBbrkH6Px+PMXHgNtojgRxQZ\n",
+       "PeXyGsfTaUB/XGe5IcncO/rt8+VoD7/nGeg+zyHzOfRIi/PFbDZTt9vV5uZmJtcpRgEimuybuXw8\n",
+       "cWxuKHqL4AHothuKnieHM/iy9soQKSlb/A8C+4J16NQ9nejNO3waCelM5Ra/MyLMHqE792p8dw7/\n",
+       "PITFc9yIiAmO/hnBIM0X/mAwUL/fT0aBG1N+GCpeOGNA4HOIrHuRjCuGeHim0xvPXFIKPYAwQQPG\n",
+       "j1GGcojhBmmx2D1k4uiGL2D66ugh9HZUCKPNkwW51409voM+9B+0JgqVKNxZUI6AMk8YtvCLe/GO\n",
+       "SlGLxwWqoy0ejoXPSbaVshX0p9NFUbtarZbmfzyeV98uFovJwHEDDFSJPkLTZrOZFO/JyYkGg0ES\n",
+       "Ehg5hIpWV1dT4i5n2rHrx7fw5/P5lGgO4uKGryN9zsMgUigD3+aNUYPR53OOondDilAP72TcGHnQ\n",
+       "FgOy2+2q0Wio2+2mOQBJG4/n5/wdHx9nPFPO4ltdXVW1Ws0Ydvl8XvV6Xe12W51OJ+2UW1lZ0fHx\n",
+       "cQZ983PkDg4OVK1W1Wq1MsjSrVu3NB6PdXR0lMocEPbb399Pa5jq6Kyt/f39tFmg3W6rUqlk6LK6\n",
+       "uqrBYKDBYJCKXcJ/Kysr6vV6mfCUtNi2jtzEcHTHwefbP/MdssjloPMEz3Sd4Oi/8/Dq6vxgb0fB\n",
+       "kb3T6VRHR0dJHziS6YhJDLHj3PCbZrOZ1tPNmzd1fHysw8PDF+QIhtrFxYUqlYoGg0HqC3ICZeyJ\n",
+       "2NAIGRuTpplPxu9hPJfdIOz+e2SNI/qMkZAfutPlq6Pb9MPXk8vVmMqArHW9znwv07ukTVxcXKjf\n",
+       "76eNELwXmhJdYI3GKEnUwf4XR5r/u33gupKIhTvJUb9EFMzbKzGkIAQCi8YgY4jPF/HLYpUgDD5g\n",
+       "z8dhsh3K4zdMnuc4oEBjuXgEOOEAP3oEIenWuS+2aDnzvmq1qmazqV6vp06nk6nDgXKMuxHoy2g0\n",
+       "Sn2M6JiPCUMFj458C6dZDBl6PJ3nubEgvbibhLnyujnMoSMI7iWw2MhVi8YwjOwhHK5hAHqoVVrA\n",
+       "scu8ESlb3iIKMJ8zX1DOBzH0598zVje0YtiVIy68IcDc4HT0CCic+6iQzeGsoJrc50aXI0tra2s6\n",
+       "OTlJlahBoJjPfD6fUI5er5cEGErm4uIiGVkuMJkXFI0LftYSv4EvvX9eYI9GYU/oHUOJjh572QWQ\n",
+       "Dectp7OHElx5g+LBF6enp3r+/LmkRRHQ09PTNEavebWzs6P19fV07AxrrdVqZXZFOnK4sbGhp0+f\n",
+       "psOJq9VqOnz44uJCN27cSEprPB6neep2u2q326meVb/fTzxYr9fV6/U0m83UarXU6/UyW/xbrVZ6\n",
+       "jit2jAsMIb/2snCfI66OPLiB4msoOi08N4ZseTY8RmjPHRMMH/8nLQwi8mEwcrmG4UFZDjfqKVHB\n",
+       "+mUOqWO1sbGho6OjVM8QfnLkGqSLsftY8/nFzjRH4h1NlxaIK0rd830xuqCdGzI8F752xAuecmTL\n",
+       "6Y0+QFZ42kbUxT6PPB+eiXPsURynB/IRtN2dZJB4R/lorAU35J1PGTeARPyNj9vnbDKZJEfKkTv0\n",
+       "yM9djhST7MRxDxxl5IrGjSDPhaEBWfs1FiiCiDCCtEB5IJIr71wulzGm4nZYJhzP3WOxDmnGpDqY\n",
+       "31Ei7gO+r1QqyZiiRcVMX0lQBhWKi9Yha5jLoXGO1oiJyowD4RAhfBQasCkhJ4e3PQ9HWuRPYfVH\n",
+       "i58xQlenNwIHujJOX+TRiOY55IBEYc2z3ZD2732R+n3D4VCz2eyFGmCed8AzvK/+1w1Kfyd5VVQq\n",
+       "Z+6bzaZms1ky2kE6CoVCCiVBJwwUN7xIEmecFxcXac5qtVpGQVUqlVS8sd/vZ5AlQkLr6+tqNBov\n",
+       "5EJgnNIPFBRhR5CoGOYFvcVodIEH71LTyoU+1cOhJ+UXnKdc0TAOeAVFQZkH3nl+fp5o6qgKYc9i\n",
+       "saiTk5O0hqDbZDLfCNFoNLS+vp7Cn/1+X2tra4kX8/m89vb2JEk3btzQzZs39fjxY3U6HV2/fj0l\n",
+       "jTO3IJWj0SgZCxsbGyoUCjo4ONDx8bGazWYqcXB6eqqtrS3l8/mUDwafQjP4z1MOMJqZy8ifHhFw\n",
+       "NAReJNcF+YCx6bIPnqb5M3yN++eo6Jknz9dzY9kNAXiLZ3iVexwUl5PkjzGv6CCOPiqXy2o2m5nx\n",
+       "EK7O5/PJIIghZuS3K3b41Mfocvb09DTj5PlGCtaJy2k3pKLT7mE4eBoecN2L7HXHi2vISpcl0iIv\n",
+       "zKMuzjv0B0fYZQbzNZ1O1W63E91Btd2I4ZkR+VwWVeC96BRo6sgaz6Bh0LEunJ+QIS9rl+UPLttl\n",
+       "u2yX7bJdtst22X7G9kpzpDwvya1ELFCsXodmsWrd4sfLixWssdJ5llvYeB+gEsRheSY5Jngsy7bz\n",
+       "Az37zg7PoXFvi/AbeSTAuTwLpIIkX3b0MbaYM+M083HF/B9P9CQZVFpsscd79G3+9A1vyMOXHiKI\n",
+       "oUB+47C2Q67QGfSJRjI11z3h3p8B7UFTqIbrYTV+U6lU0vM8KRUe8pi9o2N8xpMBMfG+4hnzDnjN\n",
+       "m4cbuc5nQqCMkZ2HhBocQSAMCM03NzczCEK1Wk27lnyMlExwpCgmvfpBtzHZ/vj4OIWqvcTB9va2\n",
+       "tre3k2fn/AbsX6vVMonMIDyE7xwFYN5YS77DDvQXr9l5n3dyD0iXh2JAoxmHI1148iAT9JXQz3Q6\n",
+       "zXjxtOFwmPG+KWMCPc/OzlLOErTFm87n86kUB+NHLlSrVXU6HXW7XW1tbaX5JndyY2MjMxcgH1ev\n",
+       "XtXp6alOTk7S+xqNho6Pj9P6cH4vl8tJZlar1RQCpPnpBhGtZQzuoTtaxPeeH8Q1p0MM+UOPiM6A\n",
+       "0iLTPbTL+6rVakIuaCDirGtHxkFnhsNh2nrvaF21Wk0hPtalNA9FHx0dKZfLZULi9AXZ7yVCoCdp\n",
+       "Dsh735zjsssjH15IkzXsCA7zgN6bTrNlQ+I8eiPC4JtVoM2yXDI+cy9z7PPE/TGC4yHG2BfWGCiY\n",
+       "hwx9Fy3NUy5Yo8idGK70MUTZDA966oXThmKv0GIZChfbKw3tRXiQheOTICkxL/DtsoXoytYJhyHl\n",
+       "ORP8zkNInjhOTJbPnpDp7ywUCpm4rm+H9rCgtKg2TG6Vw6D0DRrk84ujMOgD43f6xHABz6ahiFgQ\n",
+       "hLukeS7IycmJarVaUnBc4x62pJOgKGW3lcbjPpxpPWeCvjjMyuKSFiEcF8o0n1voxHXCEB4GY34R\n",
+       "hvADCpBn+sLz8BXX4T8Ox2QOuE4OQ6zD4mFSD0V4/pYbbdJcKcLbhJY8QZKq1hcXF5nq2OVyOSUf\n",
+       "N5vNTBVfNyIQnL6rqdFoqNVqpfXhQhrjoFQqvXCcye3bt1NYxQ0+nI21tTW1Wq0XDGDWQrE4r8UU\n",
+       "6cZYPckWGlIXxw1Fz1tjXJQEkOY5RPQD3o73YLw6f/sxOIRW3KFjDDyLXCdoQgV6D8PBA27c+4HG\n",
+       "bIm/evWqjo+P0xomv6ndbms6nWp7ezu9jx17udx8h5lvVMDR/Pjjj3X//n3l84tK39Cy1+upXq+r\n",
+       "XC7r8PBQ0sLZ8ZIgMVzEPGD4uAxHkcawoCs3NwyYC3eOPZTF2kc2uGL3UguDweCFGmOeUiBlD1jn\n",
+       "nUdHR6pWq5lD0Ak/k2bBfa1WS/V6XU+fPk3r2nOkMN7gWc/Xgmb0P4a0uG82W9Tvwtl25xQ5h2MM\n",
+       "cMCzYniLMgluaBDq9PxCGjTk+S7rXSaTvxyNWt917noQR5nP0bBBPjmwwjy5/nCZCP/lcrlM6onr\n",
+       "yJin5uHFGIrkXujkub/sRP65M6Q8UdStWowCz12SFrFQj4W78I2eavSE3BuAcRCgeFGTySQJ00Kh\n",
+       "kMkr4WR23he9KE/o9t11rtjJV3Cjx/MLXPi4scA4XAnH+DuoGEYP78N4cg/YE4exvGu1WiauD5OR\n",
+       "D1KtVpNy7ff7L2yZdgZzIzkuVOjmNa6krLBbhjrgWXp+EeMnd4aFzDg9iXk6nWpnZyfNr3tgvkCZ\n",
+       "Cww76Oa5bCwwhCnjw9vybceu+MkZId/H5xihiVDwhHqMY99Vxv/Z5o4RRa4G7wPlmUwmCdWS5krB\n",
+       "k4o9H2JlZUWj0Ui1Wk29Xi9tTZbmeTnkEZBb41vAy+Vycjp8Vw9jn0zmW+7dqPHET8bl65fdSPCA\n",
+       "K0hHEjF6oJsrAebNnaGY/8Y4jo+PkyxgV6rnSoA+uNEhLRwxch056w0+Y/yVSkWNRiOjIAaDQVoz\n",
+       "d+7cSWj0cDhMOWIrKyva3d3N5MCR/9Rut1Wv19PYz8/P05w9f/5cN27c0PXr1yUtjhyazWY6PDzU\n",
+       "xsaGrl27Jmm+2w/aMTeO0mNMIWfoG3yDvMMYijtQkZfuUPFsd2gj8uLN6wdypA7y3XkYeRELZIJA\n",
+       "sc6Pj4+TbGfn1tnZWUJrvbwHBpc7UsxT1Gc0lxGeCwhfoNMwDlzWuG7DwZKU+os8BBTwHGJoiqzx\n",
+       "eXJjyR1VdIUjUzGfDQMK0MObz2vMr6pWq2lzFAaozzXvjAY4SJYb4xhsroPdOYVW0N6NJc/txRnk\n",
+       "/egnDD8cZ9erL2uvrI4UzVEIFIkbSdKCyBDWEwtRVjBEJJq0UHIxgcyTes/PzxMKRKVgdu240kMJ\n",
+       "wEg+GUxqnEhpUaOl3++niefd7gVGhM3LC0jKKBPQD4eMPdnPBZdXv+b69P+w92bNcSTJubZXFdba\n",
+       "CwBBsls8PdMtyWQmk270/3+HTBppemOTxF47tirUuajv8XwyAM4xmxt+FwgzGghUZWZkhIcvr7/h\n",
+       "8fSUBQ9BXxhT/oaR93Wz2SyVhh0bI4U4o54DPscg2ZGwYbNT6TF8CYp9eHiI+Xwe/X6/FmEY/h6N\n",
+       "RnF3d5eLESPsiNpRsN/Z0Q6GA6NoObTz6OJ0VkRcV5JcQQ5RzhDO6SvXHR4exv39faZ3cBym02mM\n",
+       "x+M0pBFVpXFKHAwGg1rKCDmkFADPA+WcTCZxe3tb21aPs2qyqguAUnDSBFjmje97bCzDODuHh4f5\n",
+       "fRwyI0fICagdRTg3m00SvekPuqR0epvNZiyXy0z3eX0Nh8N4fHyM33//PW5vb6PX69UMJrLKVnfO\n",
+       "d8MIg1j1er0syEn/jTpjlCOihl7d3d3ljrrLy8v44YcfUg/d39/nPb2eKYtQBlHff/99XF5expcv\n",
+       "X+KHH37I50HKJ7jj/XAiTIou0/Wk/HkXZw0IPrneiBx6qly/DprKdCHvYqfYz6NsDDQMnD7QIU4M\n",
+       "cCBhvdtqtWI2m8Xnz58jotrtt7e3F91uN25vb2vfjYg4OTnJnbPc004z7+T3Mhpmp4bfccIc0KB3\n",
+       "XL6FQPDg4KBGK0DPGHlBx9AX9892pqylZP3n+eE+zE9ZKwskz0GPn4dOsGNDoGJ7Y/DDSFfZ0L+l\n",
+       "Hudap+/op51U981/MxWGz0D6PVZl+2aHFkc8R1e8WDwANtr+PaJeUt/oBZ+Zf+PUF9G60QCUG8gR\n",
+       "k7RarbLw3tHRUUbzTLDTG/SF7bW0p6enhGJvbm5itaq2MhsdK1EQSgmAPhmyNHqCk+g6TQituQk0\n",
+       "c8eIfDkQF6HmPoxdxNZAobS43ve141im0Lif89u8I3ONo1qmd90XO9r0A36ZHRvkaG9vLw84jog4\n",
+       "OzurFb90dOXUAs/wIqOPdpq4jvdx+sOQM04YP82V4D4UT+R3EJmDg4PcUo/DT8kMUhTv3r3L6s6k\n",
+       "MjebTRwfH6czyVwwPhgdxnmxWMR0Ok1EkmiXMUVRLpfLWK1WGR3DM8KBJQDhM6fl7dAj61bQDgzs\n",
+       "fHo8nRZBSeMA8kzqBCEj5XrFadjb28soudnc1oI6OjrKHZolmttsNrPK9j/8wz9ExBYl8K7hw8PD\n",
+       "RIVwghjj2WxW01seX6rwR2yNxcePH2M0GuW7mJPF2DQa212SfIZczGazeP/+fbRarUzfwbVC3zIf\n",
+       "PI/+WG7pJ/Lp+WKOmTtQCa995r00eNzXKR0jNjjApnxwPWsC1HQymeQc8l7WBbS7u7ta6vjp6SnX\n",
+       "jJEhIx4RVX2xRqMR/X6/Nk93d3fx5cuXdFhKZ6m0S/x0yQOQetM9IiKrz9uh4B3M1/QzkTPeE/5k\n",
+       "RD3NZXSROUT3Oo1G4znQlcQrAAAgAElEQVTofzsmDlL4x+/MG2AGzwNdh3Pm9zdfinnxuDEX6AE3\n",
+       "vttsNp+VajByZRCg/NzoGrucX3LoaN/EkeJFHQl6sGn+nIWLcPt7pOe4j68xAhJRrwpNdMXCx8ki\n",
+       "Kid6RhlHVIaN/jFZ3NMpNfcfoW+1tvV0Li8vcxFRCM/kVysy0DgXKPV9nb+1E8n1NDt/GCGihdvb\n",
+       "27z/yclJonJE00aF6A+L206P58UOSukU21H2WKGk/TzGrYxmiRRIN/V6vVpKz5C30z5v3ryJq6ur\n",
+       "Z2fWRUSWaLDTZg6Y03aOrpBN38upD/rj8TLS0263U1mTKmA+WdDr9TrJsBGRROS3b9/Gu3fv4vz8\n",
+       "PB0JoHQqOF9cXKTiI9rmHo1Go3bsDvN9d3cX7XY7HX5k5fHxMZERnO/JZJKcGxxjnucjUuBg8RmE\n",
+       "XgcKNIwaypA0JWOGPDsQ4v2n02k8PT3lVnUcRsbUKU8McERVvBMk8/r6ura+1+t19Hq9RHJIh5EO\n",
+       "bbfb2S8/D0QB/mGZRgdh44gS2nK5jMViEf1+P4uv0hfSfefn5/Hu3btaBN3v9+Pu7i6ur6/j+Pg4\n",
+       "n+f5BQUtaREOFLhnv9/PscZR6ff7tWCANf21lL4DjDJYcQDlOTYK4n4y38vlMkajUQ2RMpqPrXDt\n",
+       "JgfsDtTOzs5qqS3P03A4zA0cm82mVrmedTYej58R7bkfSKsDQSNCfs+Iuq7B0fS2fwqu4owRUPO5\n",
+       "y594TB3QYt9sa0iVYte4jrG13JpT62ft7e09qwcH2uWswXq9zlMACLyQC4IyB90voZ/o4ZKHZ3kx\n",
+       "Il7Oq+WUAIPvORCkT19rr+UPXttre22v7bW9ttf22v7O9k0QqdI7jqgQG8OCL6V1yobXivdo5MJo\n",
+       "ARGhOTy+jpxyROSZTkTWTik8PDzEeDyObreb6JLRLqI9oinD1I7I1ut1QsqHh4f57yUeCv0tkRz6\n",
+       "jAdtUp5TnUQwjnhIc8Jz2Gw2yb2BF0OEbFQEBI9rzCEqI0iQJBqRRZnXNnIHT8gRnSO4rxE67+7u\n",
+       "Yj6fJ+pEpEVK1VBtt9vNcSTVxj2J0LnW/Xczt8fzY5JoCXHzXqSzjDQxxhC4TTYHOdjf369xdhqN\n",
+       "RhwdHcXp6WlcXFzE1dVVTYYjIneQHR4e1sjIFF/kXDwQKeQHJPb4+LgWQW42m0ylnJ6eZjoJefLu\n",
+       "Wqd5SV/v7e3lwbkRkegOCBa8loiopcPm8/mLu2cdKVufEBGTCt3Z2amRRyOqSNqVv3d2duLm5iba\n",
+       "7XZu4LCOgB94dHRUK5uAfgKpNSeLNCD9PTo6yr7e3NzkGmPN8RlI4GQyScQKeSP19Pbt27i+vo7p\n",
+       "dFrjTi6Xy+j1es/Ghf6hmyhaS4Pgz0+nQ50JACn02ofyALroNAprxUiUm9NeXnNePxH1qvmbzbYw\n",
+       "LlX4fYAy6TnKh5R8HlAxo2etVisPjEZfcB2bCbrdbq5V1hgNFBKUxe9lZMU62iicsxv0z1xP68nH\n",
+       "x8cYj8cp26ZDmIu0v78f/X4/7Ql2wNw3I4DwbOlvWe7GXDnbY5A6l12IiFoqj3krsy3oB1LpERVf\n",
+       "j7EzkshGF9Bs22DWGciSx4W/I2NO7TEvzoS4OQX6UvsmjtRyuUxDXC5EG9eXUmMMhuE6Gy4z/0tS\n",
+       "pHcM9Pv9GlmcFFlEJIF3uVzmoJvrQ5Vh0hhMOKRh18yx42K41FwnyuHjeDiNV+aHDeH2er0kHZKf\n",
+       "N7fJabX1el1LDbpys53EiC3JdTQa1e5Z7myjMnxJFmWsmBf6jZOA8sChiqifS1gSI73Dq7yOOUVx\n",
+       "w++J2KYn+X7Jw2COn5621XSBpCO2C4Zq4OxktMNTjmnJPSgNAQqM+SdVWpIoSQN2u90aERmHnrG+\n",
+       "vr6uOQKdTid+//33+PXXX2vwt+uz7O/v1w58Zf1BOLYjwWdHR0d5BI2VIjV4UJDMzeHhYRow1raN\n",
+       "rrkzTrVA4J5MJuls4dCzAWRvb3s4MNvcI6LGI5rP51nV+6VAAk6JHQ3k7PDwMPr9fo17BHcKZ4/3\n",
+       "YPcunBX6yPMIAnCoPE84mKT9vIZJk5Q7ER8eHmI4HObzdnZ2stxFRMRoNMo0ignPHGGz2Wzi5OSk\n",
+       "5hDg7Ji74p1wm80mdwOSiqYvOC5sImFnI7LhnXyUJ2ANWBeU3CqoEHzHzhayYl4b78hYoYPNYfTa\n",
+       "LDfsoG9MWI6onLUvX75kCqokY9NXc2qPj49z8wb60ilYGlSEkn9qvqb5Spa9kgDeam0PjR+Px5n2\n",
+       "tS3F+Wg2t+VhTHmgtlWz2cwdnNzX5Hqc6YhI6oedFHNccdzMt/NP8wBpHlvLDWPF707hWX5wPkt5\n",
+       "4Tmkey1rdqJMLzJlp+wn41H+ze2bOFIoDNcO8ouVAo6j8NKAmfVfogcmOeLRGnWByGfUIyLSiOL5\n",
+       "enGjhGezWU1pRlRRGwgROWeui6gfl2FkAWPJ9Xyf92bC2+12EnyJNBwJo9iIflA4fM9ePlGto5OI\n",
+       "alcP48P7cJ139ZhoiLAhwL63uV5G13h/R26MZUTFY/DiodnpZp4wiLu7uzEcDmsOjI1fs9lM3sf1\n",
+       "9XUNWWCnXKmQnWOH62J0DDmDk2OFihIm4qNeEfLGuEIadt0ucyks49QA+u2336LZ3O7eAnXa399P\n",
+       "5wbjyDuyBdx8CRzQ/f39OD4+zhIJds5ms1ltl54LFhKx9/v9DF5sSJFvxtDzDc8DJNZOHcHV/v5+\n",
+       "zOfz2m5GO43j8TiOjo5qSBS6wdwWvg+azM4/b+4wVws0EDnF8TCiGxFZfgInGa5URHUsCQHgZrNJ\n",
+       "NJqz8iaTSa5H5ALjjNMN18bvzHjjEEZE7uK0E4nOiIh8NjJgcjKGnkDBvBR0NgbTzhFGF46V61qZ\n",
+       "/8QaKblAyKGdLAdQjLMLNYKQ0Ed0DbqV9VlykVxuxe8ACnt/fx8fP36scX3evXtXQ73MAXr//n2+\n",
+       "P+gu64TSMgTrDrzpj5ESO2BweYwYMZ7YyMfHx+Qy0tA95lVxLbWxyr9H1DM4ZSbGtpi+GszA1pVZ\n",
+       "Cl/LuDG/3vTCGL10sDyZIda+gYPy+Cv6jj9gW2o7w3f8O/JZcvicTfha+2aIFPCjvVKEvzRQbDfF\n",
+       "GBmaRKBsVC0cEdXuQBuM8Xicu6Ps+NBAqIBCSyfu4eEhC1rSVqtVnquFI8Y9Oc+M/mIgI56ft+S/\n",
+       "IbikE9g1yHjR77u7u1gulzXHjR0RXA+iEFGv3cTfrZAgpTpC57MSBbTTw9+NwHke/E52JB0VeT75\n",
+       "DGVq4beA43iwaKbTaabIdnd3M2r3+D49PSVZmoKFKGbQG4qO8n4Yw3I7vjcmYIzKBYlBcXTHM/f3\n",
+       "92M0GsVyuYzpdJqfsTuHMd3d3a0V5ru8vKyRkO2A/fHHH7mmdnZ20slqtVqJuu3u7sbHjx9Tpt69\n",
+       "e1c7a4wUV0RFTG+1WjUZjKjScDYIGG/vnMH4ITOcUWcHwcVo7QiQUqRRKA8iL2e90bwxwrKIgcII\n",
+       "NRrVGYXj8Tg/Zw4wmsi+yeNGejCS3NO7kwhiQHJo0+k0jo6OkjiNg4a8UawVp4fdw6DhIOPL5bI2\n",
+       "phFVxH9/f18rqnp8fBzj8Th1kKN4jCznLJakcKPpNm5e8/6c64xUMpYR9a3qvKPXN7q5vCeBMYGe\n",
+       "nQMCRxxop4xsP1izRhp2dnYy2P348WOO5eHhYQYWd3d3tcOON5tNHi4dEXFxcVFLcXHPfr8fq9Uq\n",
+       "bRC0AtaFA90yW2CHwOPWam2r36PjGDdnUjyHvINtHboDOSO16+dhswjMnbpmDNHFpp80Go0Yj8c1\n",
+       "Z8/Imfvg8j00AqKX0r22LfTVwArjVqaN0f12XO10EehZDg0YvNS+mSOFo1HWdiihuoh6yf+I52k/\n",
+       "rmGBeNEwIHi13OPy8jIhUXbgGHK1cJQcA/rCAkWAfcgkCsZw83Q6TY/bSsXva0SDd2BR9fv9OD4+\n",
+       "rm3fZUFTKNPoEnD/SzsfykqtpQG6u7uL2WyWKbwSdXLK0gqTMStzzM73G8ou71EufqdBQDoYb+65\n",
+       "2WwSLaGuz2azyd1hOLSOjHiGDX7E1rCRCigjEaJw7u9xK51hnJ4yMnNEi7PWarUSmcCBpeFEAX0T\n",
+       "hUVsiy2SZnMJCT4DugfFNNcJZ5ZSB6enpxFRHVrsVIudVJAuZNw7COk3z/WYGFWcTqc1hA1DaLSW\n",
+       "z5h/UtpO+2LQUIzUzoqokKTSKY/Y6h+UOsbYZVHgwkREreo96xv+htGNRqORTvt8Po/JZFKjCsCf\n",
+       "nM1mNRSl2dzWcgIhd+FUZHS1WuVRKIw3qaxGo5FpHYIBdBm7BHEoIiL7hVxjFLknjsJLKAifIxfI\n",
+       "ON/DGLK2S0NsQ+TUj1GA0mChU0BhcCRxWp1iZ06hbTiVylpzCYuI58e0WH4Xi0X89ttvOYd7e3tx\n",
+       "enqaa8K79h4eHuL09DSf6xQsBpy1w5iy/gj0Pe78HzuJc887WKdHbNefHRUH2dZBrEHWRWn3LJsl\n",
+       "dYMA0s5yRKWj2u126k7kbWdnJ4EQAioH0N6xbp1BORPWmNex5w29ZweUMfD3Pb92Mj0uZeqZvrAG\n",
+       "PRZl+yaOlB0iT4YJ2eWCInoyehFRRUlfi4RQsiX/YDqd5nli9MOViLneqAF9tyPnqMKNCN31QFjU\n",
+       "5GPt0ePskRpAsaM8gWQxOtyz3W5nVP709FQ7IgPl4cjTwsACsMJzIxq2ssWgGdp11MQYMU82mBCb\n",
+       "PcY05omF7c9Br7inESKczL29baV2HAIW0nQ6Ta4PRsjwMcoEZ2p/fz/G43HWBLLxwnh47mlEMRgF\n",
+       "5rfkSHiMGBsUyGKxyLl8KUrGqPP+pF+pE2OEzBw4jkpB9jl7cLFYxGw2i9FoVKshxjiD2pToJQ5i\n",
+       "u92ucYvYdm1ieUQVCB0eHiaixD3H43Gcnp6mUTT5GafZ6Xka/BUI88gx/Tk8PIzpdJoy5+tbrVZM\n",
+       "JpOsL0ZR14jIEgpsL/f823ihr2x8qR3X6/VqnCWcNcb58vKyljrcbDYxHo/j7du36Ux73jE0RmuQ\n",
+       "d7huBwcHmdpDzlqtVgZvFGNFznBOmTtkEoNXcly4J3OPAeMeln/mztE+uofPuS+o0EsN54v7EtTR\n",
+       "H28Isq4muFytVskjfIk0XRpT7A/oZavVirOzs5wnMg3dbjcDQsYPxInrnGJmvggoqC9GUNRut+Py\n",
+       "8rI2DuhcEFlnTEgpI4/YFuQG3UM/SsTfa9z6ySCEx/5rnzm4Hg6HGeigNzzPPinA84S9I0h2cE2t\n",
+       "MGrWOa2PLCGXzkiVjlIJzDjgLxEwnMASDSxtUtleyx+8ttf22l7ba3ttr+21/Z3tm5U/AE4vSV3A\n",
+       "aEZ9TGorURV+N0TpnQMgJ2XxMaoge7eeIV6iE6NkEdXRDEZo8FS9VdxQLPcEYscrN4wJOkL0Tp96\n",
+       "vV40m9tihiAXjoJBL+B5gMiwo8MRiMcNCJ/IvkSl6D/ImgmSRAdEyOYmfC3CJIIhumLnSETUECYi\n",
+       "JafIDPn73iBEIC+bTbX9ttz5BkrCPU2iB02IqA47ns1mWQyR8QaZ8E42o3IgUEQ63tUGaoq8Okok\n",
+       "VcDYeNs/HB/QSqMum80mi11CNId/0W63o9lsZuFAE2Cvr68zHUTaz++PrLhQHrJIfzudTiJaXOex\n",
+       "KNEa0m6kf4w6lfJkJOv+/j5LjXjMKMTI2EJidRmDiCrl4fQd6Aay6rMM7+7uEuUrd6yCbEyn0yyh\n",
+       "Ye4gP0tKAPL1+fPnGA6HcXJykmm4+Xye5UY415B3cIqZ1BnIIf25u7vLOXdK+OlpewzN09N2ZyqF\n",
+       "QweDQepf5MmlVowYGB2EgwVxHqTMZHSQt3INmxeKnka/+YxP7mPUmjXD+5G+YrfmZDLJzRnIovlJ\n",
+       "pJxchRw74vQ13zfiaMTmt99+y/V8enr6jBvbarXi8PAw5QI9dX9/XztyzDqB9wFJHo/H+X4uwYOe\n",
+       "duoJu0W67O7urpYWfvPmTXS73WeoozM76HiPt7M6RqGMyDgVH1Eh4yCnLt/DGsammuPY6XSi1+vV\n",
+       "9KH1JRxHMifuJ4hdeV3pT+AbML9896XrvIZLpNJZopfaN6tsDunS3AQgY/NpIuqpL64v4bqIisNS\n",
+       "OlJc59om6/U6rq6uotVq5WGsXlBwjJyH57py4fM8titzD3avRVRcGlIe5XbL8p3NHXMqE+PIdzA+\n",
+       "ZYXmXq+XaQOnapyidArUTi3vaD6YF2TpkJnEa6i/5AeRf+c9Pb+kAvibn4dhsiPGMyIiFY3LW3Q6\n",
+       "nRw3uB1eiOaAeBcZ0PxgMEgip40zysaKKWJrsHu9Xo5PuduO56P8Sr4W88OmCjvqfMamAhTmZrOJ\n",
+       "Xq8XvV4vBoNB7kLjeRi66XQas9kseRtPT08ppygQVynm0FacNB9MDBeFQARZhMzPM73RAqNlo23Y\n",
+       "HGcKSJ7PkF+nLklfff78OcbjcZycnORc8M6MGw4xir80Jjs72xpSe3t7ScTHKCGvTm0if/yfOmue\n",
+       "X/7v9CwpnIeHhzg7O4v379/H999/HxERv//+e8xms3jz5k08PDzExcVFzg2OASR6+FsRkSl9+Dns\n",
+       "tLVMR2x1xHfffZcpf9K6cOvsKJqrxDrAeTY1ATk1dcG80nIe0Sde75Z9gsjyc+tjpwkjtnp0NBpF\n",
+       "u92Oi4uL+Pz5c82p44DccrcfPDYH5earUavLzg73/PnnnzO1f3p6mk4PdIPd3d3cBEDjOBlvNnJA\n",
+       "t7Ozk84+toZGEFTWaLI98Lj6wPLxeFxzdmgmerP2vE5JzZV2zo0+IqdsdrFN9lyQZsbGIteXl5fJ\n",
+       "3bTsMBfoG9vtiMo2YJvKoNTy5XXJvHJfBzu2Azs7OzVubglgvNS+iSO1WCyi2+3mqecRlYNQ5q0j\n",
+       "qtonNjQl8Szi+eGNEVXESzOxDkK1OS18dn9/n9diqCMi0S2T5miz2SwVnL3biEjiJ3lrO2fkilGW\n",
+       "nnyTxyHUoty63W7uyAFxcATpBRtR5ZZpLJaXOChGDNww7DgFNvrOjZfGi3cxP8zRAO/OvUsuhccJ\n",
+       "hcFzcCIajcYzZcP7+XBWO3k4KeaeYPAZZ4yQeR+QX41MeYdVSdy0I9hqtWpOr7eJl3WSuNYKHYNJ\n",
+       "+YtGoxHT6TQuLy9r5F/Q0svLy2fRKmR3uGPeNRdRIQWgSdzz4OAgLi4ukoeCA2K+Ds4X48YYIB/w\n",
+       "ixh/b2N3gOHxZc1hEI+OjuL8/DzG43EiU3Y0eA6cJ7gy3BdHC+ffc0DfkRNkic0LvV4vDRyNaNdr\n",
+       "zLv9QCx2dnaSmxmx3TpPLbNutxtXV1d5Peub8QJFoi84Beys9KG2GCbkyBsqeM/JZFLjlFoXY1TM\n",
+       "LWLtEUjQJ88xrTTE5pRhyHgWusC8Ke7JWkLv8xwQUHSpDyVfLBZJ1LaepR/ozJdKGURUMlnyWOfz\n",
+       "efzlL3+JiO2affv2bW1s0aPwVWnU3mK8zQOy7nXJCAjW9K8s90FggSx7Mw3BAI6Kg6G/NUdc6+DB\n",
+       "gTdj8/j4mJwxvz/zYV7heDzOABDEivfiAPbhcPgsu4FcMF7mfzIGOMIlclaid+aHlcR7fw8nys+I\n",
+       "qIpQf82xjPiGdaSA1S04JrI2GlXNDtINwO2QwSLqSBYL1AaaKIsB5TMWGdtj2ZFAs1duh8AGytEY\n",
+       "11xfX8f79+9zG7wjZCM3VtAoKXYuONWwWq2yBhDXcwAphoNxKdNvCAZCUO6cscJ0BPn09JR9YDdc\n",
+       "iSQ4vUlzasVpjYioGUeMJvcy6ZvvlM1RiZUbCp3UDg4DCBHOgNG4kjRpZw8ZIVJzygoHl0jeuw1J\n",
+       "g5IyczTt8SaC8n1BLFCMfv9Wq5U7hjabTa6FiLqhubi4iMFgkKTi8XicAQGpQjugJrY7AudcN2Tp\n",
+       "6uqqtmY+ffqUaSP6HlGhAPzfxGjgfeScjQfMKwiQU7t85rVO8BNR1VMjdYIMowiNuJJWQgZx+pA7\n",
+       "nGDmcTAYRK/Xi/F4XKslhMHiEGKnIXlf1oB3JqKXkJtms5nPYzcuaES3262hKAQkpSxFRNYiI/VB\n",
+       "Pzn3j2dbTrnX7u5uppyMnJEmJO3reyLDTrcYuXEw6PXGdfyzsbJeZm34fU0k9z3v7+9jOp3GfD7P\n",
+       "INKpZe6NETbST//L9B3zBBpZkpFJt/33f/93UjwiIt6+fRv9fr+WFkS+CfCo9wXhmnXhcSxRet4b\n",
+       "h6IM/spyHh5rdgSuVqssMRIROT+murhaPJ+VZHGuZeeuz4QkZY9N8BzTN9YKp0wwT5QLQmegv12n\n",
+       "r3T0WWPIh6kQ7HI0SODnObVp++xgmubf7ZC91L6JI0VU4BwukCIDGFEtCIwaEwlcT/OC9o4TrjH/\n",
+       "yErKQmUIFEH1IncUgVEnJWDEgpQHBs3KxIrQPBv6ybMbjUZ6+YvFImvTcN3FxUVEbKPSfr+fEDYL\n",
+       "w2NHH1AY9s4Zu/Jv5LkjIg+9LT9z+orxRmiJmGwEiSiJTO3U2VB+DVJmPnBYIup8nlIRYQRxXgxh\n",
+       "42AaheI6Fibj0mw2n/EhnBb03FJ9m3e0kUKWzJHi+fAqQB8sDygGnCIcK8/xZDKJwWAQx8fHKRs3\n",
+       "NzeJ0LGVGwd8NBrF0dFRzt3Ozk4t7Uf0XMrm5eVlFr6cz+c13kCZHnPKBMSHQ27hBzGGZYrFaxSn\n",
+       "xQEJ3+FoFYIu6wHu65Qh6xRZeXp6ypIDyPdiscgdhoeHh7VyDCh7jDHpf97ffD7Pz3q9TlTPfWbu\n",
+       "2u12HB8fx2QyqaVqCBBAI7w2QE1IL1qeneJpNpt5rE1E1OZoMBjUPiPtDBLPESQRUeNi2pGiMf9l\n",
+       "mi6iChydDbD+dtBXUhkw7mQj0IO3t7exWCzi+vo6ZrNZ3NzcJHeQcSGrQP+4H84aHDCjLU4rmi+G\n",
+       "c9Dr9WI2m8Uff/xRQ9Dev38fw+GwFoQhhw8PD3lEE44//bRjRzqSz0xRcbPxJw1lXcNzcdycwrLz\n",
+       "UKI8yLepHXxOAOD15RpbRnaMAjK+pPBMIUGGOJLJKHxEvShniSxGVHxlI/bYGOyrx9H0HeTMAT/2\n",
+       "hb45GEYvfq19M0eKCWVx4Sx0u91EpgzxItQYHRe1sxCVSM9ms6lxhfge0C19QPlHVAsfQTWSRX/M\n",
+       "a3EF54eHhzQUJQHXxtSCaCSM97EA47mv19s6KiiM8Xgc3333XS1Sd/TsiAalWiIk/M1OCNfh7FAf\n",
+       "xOOGcff9LNQlQmZIn7pHJScCBe7owPMF3O2/MVe8L9eBpkG4xZGjL0Rt5fgzNyas2tB0Op2asXMk\n",
+       "xHl/m80mick8A0XO4t5sqvpjT09PiargSHlMzGcZDocp36Cpj4+Pefo8lfsp7gjn6e7uLo6OjiJi\n",
+       "uykC9PTw8DCPPYnYppoitk46zgTOGTVvxuNxDIfDGAwGNePNlnNkx2PocXZ6g/FAcTFXbqASNgIR\n",
+       "9XpYjImLJPL/3d3d3DwQseV2OJrHufGY3t3dJRLochnIp+tX8U7oC1LQ5l9gXEkPYyQ4qmi9Xsdo\n",
+       "NEoHNaKqiI7+wrHj3Y3GEYVz3dPTU4xGo5RDnGgoBryPA0GCANK68/m8ZtzYiAFFwc6UdSuOq3WD\n",
+       "0W47SzbWpY7iu4xP6ejw/ZKaAMWBcSiReE4S4O+uQeTMAXKEXNCgpfz+++8REbk9v9FoJDezTN95\n",
+       "ffMZRZRZO+YAEkQQJBtZKYNHnlHyIymZYi6fU2W2eTSjcoAb3JN/LiVDfxy42OGHG0bQYuSauSHo\n",
+       "to4ukTCCYfrCdxgX6xP0D+vM64lrcKIcTGP3Sl4ZY/a3Unuv5Q9e22t7ba/ttb221/ba/s72TRAp\n",
+       "V0AFlcEDhqjmHVx8F++SLZMRzwnUTmmRljCHwMToMp9ryJHo05wgGh44SJCPdOD75HrL9B3Pw3uP\n",
+       "qLZj4xW7EfltNpuE4bn3bDbLgoRs68Wbxksnp00qgvubl0BzJEhU5bw/fYWXYb5TRHWUD1G5ycJw\n",
+       "PZhn+sW4GUJ3lEoapNlsPktfUrCRSKkkxnNWGRGtU4lEsMD/JYGdOTcBkQjKRe6cxmEXC+jh7u5u\n",
+       "LedPJE+EZs4WUSXjYggalJY0j6s7r1arTNd4LcABgYR7cnKSpOxms5kV3N+9e1eDsQ8PD+P333/P\n",
+       "atvz+TzTfp4DCLXMhdNJ/B10DJnneqMXzBncs7JyOYgRXEDewZwu5o/iqxHV0TZw2Q4ODmpbr9l2\n",
+       "PpvNaqn0RqORPA54m47m0UGz2axGQ0BuSb964wDrl/WBvHBPdnrB40L+XGyVeyBjy+Uy05mLxSI2\n",
+       "m00WemQzw/X1dTw+Pubh5lxHdE9fWYf7+/tZeb3T6USn06khnMwTqIJRHvNI+Y7XKTsPnSaNqBAb\n",
+       "6xyuM++pTAeScoeyQQqYe3JNibbTyrQNc0EzV5JmXWZu0ZcvX/JZ3333XWZNIipOEjtgjcjAXzQn\n",
+       "y7u7fWSKU8wgn+i2EgE0gluiK4yxETyPK3YDGeE9QI2Rf3S5x8VZBLImnP5hhJdme0img2b+mGk2\n",
+       "fMY8W5/SB8aynPeSMO57+vlkmoyo/620XsQ3cqTgffj0cNJgy+UyD3p0jhTBgA/w0q4C7waKqCox\n",
+       "M/mGlBFMLzTXmYFo7PPreI65EE4R2dBCBiy5AsCU7ouJcUyihZz7Qy61crm+vq4ZKj6DtI4DFvF8\n",
+       "txqQJcLnviKcJiZyD67HSHFPDKGrIHvXGtA2Qu/UH/fBYfYCZvGXhFM4WIx36YSuVqtadXen13A0\n",
+       "GE/SGyxMUsl26Hkm8uSFihyh2E1oZu6oQA2J1TvFzLtzmgaFwE4aFFQ5hxgrzz9jz84uuF7spHn/\n",
+       "/n28f/8+rq+v01Eej8fx8ePH+Omnn2J/fz/++OOP2vPYmec5jYgsvcBzz8/Pc54IfEib9Pv9WorK\n",
+       "81DW3oIUz+5Z9EWj0ci6PdPpNI0LzhL9wFBRwZw5pB8oTO+KfHh4yBSNS7QQAKF/LFPIgnfY2Tn3\n",
+       "BoVymzvpPOTK+oS0IEaHtUw9qZubm0yN8O4nJyexv78f19fXcXl5Gbe3t+koHh0dpRzhoOLsI3/W\n",
+       "TU418R3mxtyUiNIT4uAAACAASURBVPqZiqXjwtqwE807Wpas+1jX5uyYI4ccUuPKaRrGmmtMlShT\n",
+       "eE4X8jsBgVNbJVWCuZ9Op/Hzzz9nKvH777+vkfTR+RDNTSlA19iZ5B2wSegrPrNDznt7V58dw3KO\n",
+       "nPJysOY55R39TPSMuXJeMyUXzYAF/eE0AZ6Hc4atsCMLtae0V34/NveUTryDe2wbn9nxcvBt2cOB\n",
+       "NMfT7/RS+6YcKZRjRCX8cFt8ECsDw8JAwURUpG3/4zOQLeeTza/xYnG0ZOfIzkFEfWssiuElcimI\n",
+       "lksQmExdKiCfG2akg37wHAwmz+G5OAZ2ePb29qLb7cZsNkuF6kjQjlTZrPD8HROsebadPqIj3tXk\n",
+       "d39mAqQXyUvRFU5NSeQkp47slA4I48czeQbOCTL1+PhY49yZy2KjgsIyqliO2cHBQQwGgxiPx7Vi\n",
+       "nkRl3W43I2krqf39/ej3+9HpdGqEdrgSlisWPwRuo7m8NzsWaaPRqMaB6/f70Wptj8C4ublJwzyZ\n",
+       "TJL/BKLhdQgyhsNII2hpNpvJEyy3RbN2HSThJMAH6fV6uWa4/2KxyKAGo9/pdJ5xbZjLiMjjYVCk\n",
+       "m011Fhv3pvbS09NT7lajdg99tTGFd+UDmr3e+J3Cu6UM4/S3Wq0aemIOFX2IqLhOyATX8xm7C2ez\n",
+       "WUwmk1qtqNFoFN1uN/UjyNLd3V3WmHOQwjwhl+hGxhc0gb+zc4u58jiXQZ3J0f5/RDwzlNZ9BLTs\n",
+       "LiQ4Zf585pwNdkTUdIERCyMXfk/aS0ET1/l+pVFdLpfx6dOndIg+fPiQ3+X8TJwozz3oD/LrjTd2\n",
+       "9Kx3HVRGVOcuGjFjLl3OhHHh/Wl2Qvx+Rsj4O8iSv49Tiw4y/3c4HCY/cblcxng8zoDOa6l03OxQ\n",
+       "l05WRKWLIPI7iPZ7l/23g4cd4n4RVSBd8mbtAL/UvpkjhXLlxahU3Gg08rBcYHwcIe8Y88ChMCHm\n",
+       "mZRH9F8So5kEDCOVjiPqOw9emkATX7l/RL1eDs6Zya8+T8zIEue6NRqN3KpaLiKiGveHseB0eSLK\n",
+       "iK0h7Xa7MRwO4/T0NI6OjuLjx4/x6dOn7I/RljKdRtQCoueonEVpyJ7rQK+4v50Q1+1x1MC7WwE4\n",
+       "dQt6aHSHcSOqhiDrKIt+eZu4ZdCOL8+DZMt3mAM+s3PpqsxEMJCCcbQg+W42m9qZcJyxRXMK17tg\n",
+       "vB3dMhMR6QxFVHA+le13dnbSAen3+9Hv9+OXX37JeSNt5nGmn8fHxzGbzeL6+roGaTPeRLXz+TzH\n",
+       "+O3bt2nYKA1g2WLnICiCkYjDw8PatU55+7mDwSBRF8j0rDHWFmPKVn6Mk9NE3M+puDLap7iq09r0\n",
+       "mX45GLC8+fBymoMKgh7eg6rQGCru3+1203kHzfAGFXQMxTI9T2dnZzEcDlOv0beHh4cYj8e1mnZ2\n",
+       "bkj5euck7w6yB1JnJxuZ47ukHRkbjzFjh7w5ELXO5X7oKeuAzWZ7KDkZA3Qhn5UImNNCRpbskCDX\n",
+       "LuRJXzy+nkveD1v26dOnrOsXsUUACSL5ng/MdkbA5GeQX+ocgpggf9gQgqzSSTDqQnDjsbS+s63h\n",
+       "Pr6GBuIMiuvxdnbB5XIIjPb3t2eYejMF88DmI48pDVtQ2m/0frk5oEzzEahEPC/MXM5p+a72Ixi7\n",
+       "r7Vv5khhoFlsi8Uibm9vYzgcxnq9rcxqZAkhQ2mWRQxBYxyVRtQPYeV3mp0JFwIs8+r+/+3tbUaH\n",
+       "KGlzDIzAkIZ0X15KQUVE1p4pUS63l9Ajw8wYJcYM2PPg4CD+z//5P9Hr9fL63377rbZw7UwwtmyH\n",
+       "7nQ6z1J0TpfSMGzsfIIr5ftznatw8/58r0RrSv6T+Q3A4nxuY8U8Mp7mp6CULRs8nz6wuMoImXn0\n",
+       "bhgca3hM6/U6C81FbFMwLM5ytyBKkaNscIIiKqMABw4Dzzyt1+tEbz98+JDzf3Nzkwqs0WjEx48f\n",
+       "0ymjvyAWcGoiqiNEPn/+nAiMq8Xj6OM4ukK6+YJ2Eh8eHmpVoMv16EBhNpulowxvkPWOgxoRGWwR\n",
+       "/WJUmH/PA0ggYzOdTmMymeSOTo7T4TOnZm2EGQvvOrZSJjgEXSnRBYwPCKrHbWdnJ1E20negek6J\n",
+       "I8MYK4JA70SEW8WRI5SN4XnsYsTBAmFoNps5lre3tzXqhWkMjKPLERA4OLBk3Fhv1oVGQaxv0Q80\n",
+       "1j8GmbHBdrBTlh1wEfVijS+labATpeFG75jfWaZ+rBuMUDnQub6+rqWTkK+yZATBL7XDVquqMCzP\n",
+       "YcydJcHZ5HrmwoiSUS9nRuyolnbG70WgyJgSKDgVztg4aOD/zC8OP+l9H9zO2vd4GDWnv6w1j53p\n",
+       "FS5uzFwQfON00hfbEds/1qydVSPRphO91L6JI4Xxc54VztTu7m4MBoNYraozxVhgEZWxttMDxIz3\n",
+       "WW67jqgKcFrxmXsFzyCiUho22I5aICgjbDQiFStgC6qjVgs7wsmCtxGKqIwP+WV7zwg/aSVQPB8h\n",
+       "wdh1u9346aefImLrnV9fX6dht/OCUJX5dfqNUkBp2glicZRl9UuyngnVjAOwagm3c1/Gx6gTi4E5\n",
+       "9LZcpxfNu+Iz8wz8dwwWkbnROBbb7u5ujXsCemAC5Gq1So4JKetGo5GGjsZY4oDCRYiIJFHD2+G4\n",
+       "CcZlOBwmInt3d5elCnAqWCs4CIxzp9NJJ+jLly85z4wTqI25KJztxljjvEdUpFKu6/V6NXI3SAtK\n",
+       "3ak9z2WJ1JpXiEMUUSHDk8kk1yFHOiFfBwcHWazRpPx3797Fr7/+GpPJJA4PDzOdyVzQjzII47mP\n",
+       "j48xGo1q8oQMErUTLEZUyh3U7fb2Ntc4SF2z2czaetSgm8/nuQ7hkJbIAqgQ1bO5DqRqOp1Gq9WK\n",
+       "N2/e5FzjRONsgZru7u7GcDhMFNOlH5xGIx0DtyWiSqF4LZUpf3SHxw2HCGTExhWdxHOMvKFbQKWM\n",
+       "1iMbjJObMwoue8H7o0OYZztE6Ogyw8E8YKdw/iMizs7OYjAYRL/fT7TLxpo1wPyyLthYhBPselfI\n",
+       "ptPPdrIZu1JfMidOJ9q2svYAFYzGAjY4qOXnfD5PDibyaF1DUMPmFY+bwRTzA5EXv4vXE/d239zo\n",
+       "50upYt+3HBuanVTG+yXELJ/31U9e22t7ba/ttb221/baXtvfbN8EkQKKM6ROtOjdTbSSiNxut2v5\n",
+       "TbaKgkgRRRF9cE1ElbYi2ii9fK5z3p5nRzzfjeLcrmFEPP5yq2ZE5WE7v813F4tFonM8F/SGNEwJ\n",
+       "xxIl2isnAiJ6IqKk/2/evMldUOW4ma9E2tRoHTs3HM3w/jzDxUhp5go48qSPRrYctRFxedMB80TU\n",
+       "YXlhTBkfEB9D3/7n7zsq4xk0okyiYefbneJljLyLCcTUkDz3plimizPSnA6CbI2MjUajJJvTD0oV\n",
+       "rNfr3P1KqgN0zMTw6+vrTA9GRI1nSPFGiOjr9bYg7GAwSETCZ7k1m82MTg2TszGDXTZeX57XMvJj\n",
+       "HszT4TPu2Ww2M93OO3neSP9Np9P48uVLREScnp7G6elpnJ+fJ7p2dnYWEdtipaQjymNwWq1WdLvd\n",
+       "rKZdbnN3BAySxHvs7OwkCd27Dw8ODmpn5a1Wq0Sk2u127nRiPTo1wc5ckEjmt9vtxs3NTdzf38fp\n",
+       "6WmmeSO2aNzt7W3tXNDPnz/n3LNrmirlfgfSlWzbd2HGErkreThO+fi7rIWX0n6sXZPCzZvyhiMj\n",
+       "9Tc3N7V0nrmMcM1A9FzklP6AtnqTERXvWYtlio7rSM0bbRqPx/Hw8JAHHTslxvuhA0GA2CQEKlva\n",
+       "nVKX+TvWMUZWPE8eU+tK72b3GBphQhasv4wE2656w47l2J/RvHnDfK8SvXZDB9CQC/PzLGv+vtG1\n",
+       "krLi76Gb/lb7Jo6Uc79eiHt7ezGfz/P8HZcjcOopokoVke6CK+C0gQmPJsnyGUoNY2WyuEmK8J0i\n",
+       "qoNr7dwxyIbd6S8ChRBxv5eEIqKqKG2SekSVUvMuLjtm7EJid8779+9rBGyUlQn1kHCBcO0EsojI\n",
+       "h9uhdB7d/TB5Ex6CU6nMO5/RcNY45sHj5gUFhMtckLu2wjCviTktOUkYJBZPSXAu03M0O1+bTVW9\n",
+       "PCKS00ffmCufUQbEDZTPHDN3BBMR1aHFm80mIfPLy8tot9uZhvIOMI5egVuFMWTH03A4zPcnRbFc\n",
+       "LuP29jYNMM/lvZl310QjWMDQuEr24+NjOhdwQ3h3O4iMH+/nTQOWJwcH7D7z/G42m1pQxc445hg+\n",
+       "5d7eXh6lFBFxfn6e1y6Xyzg9Pc00JGOMM1Te//DwMIbDYe5CshzjgMFnImBk3tFNDpQeHh4yLQz5\n",
+       "HYfv7du3WZkeA877XV1dxXw+z40k8/k8rq+vIyLi+vo6hsNh3Nzc5Jicn59HxNbh7XQ6cXR0FJPJ\n",
+       "JO7v7zM9jY7iMHdzwJgXBw7mgNqBLIOskjPl/5vrik3gnjaQcIa8gQQd1uv1ckMD482OU9az7QCG\n",
+       "HhkpUz+kWZ1KJMBwSozmVBMbB8o00e3tbczn86wJhwyjd91XvzunZJiyAtfHQaavQdegH80v4r1Z\n",
+       "27ZDUDrQiTs7OzUSOY10ozlbLjNSliPwmvBZkgRGTrGV69ubFPiJfBG42z4/PT0lxw1dZeespOv4\n",
+       "npZtpxzNT/ta+yaOlCMdIz9MAJFZ6UmbNMxLW+lF1POeOGclxyqi8kbhTxhZstKCe1Q6LRhtGwV4\n",
+       "HDzXdZTw9B19lVFGxDbyYSs8jf6ASpnESnRAH+DInJ2dxZ///OeIqKNm9u5RROYyRdSj/VJx4Cwx\n",
+       "BnZ6PEdE5GVES0RjYry5co+Pj1nQknd3P02AtbCzMPx+kCPhgXn3lftqArhliPm3sXx8fEwekzco\n",
+       "REQeVOv6MCa/866Mh40UxOCTk5M0yMwFCq3X69V2fEG23d/fj5ubm7i5uUmjGLGNzN+9exftdjvL\n",
+       "MfAOnHPZ7Xaj3W6nEWY8QdSWy2U6bnCROp1OlhdwLR+IuuzOY2wODg7y7D94QOZGWX7Kmk44wiYB\n",
+       "05gDnE0jmRidbrebO4W4L8URkcXxeJzrzXxEEGLvsqL+krlaljfWi/tqAixrzeRfrsVpYp4uLi7i\n",
+       "xx9/zHpQ5kCenp7GbDaL8XicBoq5o4gqKCYOQ0TkeZD9fj+Ojo6Ss8V4QuhHLzpYctHViEhuF5+j\n",
+       "L6xHPVfMqaN9xoyxLPUJP12ehuvgG8HDsU3Z3d2NyWSSx/3QIFC7bz5KCJ2F3rGjx4aP0sFGP1G/\n",
+       "yw6RETj4jSCONvKsA6NVBwcH0e/38/BhGn1DhyOrHmtsA0i59Tc8Y+aNcSVQcNDrHX3Wl3ZC7Nw+\n",
+       "Pj6mvvCceg4NWHgOsJ/0hXEpuV44SThaRn/R7eiv0nZZPiPqO02NJpZc3BI9K9s3caSIQDlDKqIy\n",
+       "3nj0EVVUjjFx/Smu8w46tjgbqjWZ0QvRA9hoNGrbZ2kmDzMZbDlfLBZZVdnfZ2Jd04d+skAdRURU\n",
+       "njKRhreP4nnTdzsdhndZaOxq+f333zMtQ7TgCIT7EiUTVTBm9JV3MOnSEZ3fEcSNcTOsDDyKovbu\n",
+       "DZwkl7tghxmIAn00wmTFWUYNbPsGHSzJ5jakTichQyhmO8MRkc4TqI4RE8afcaU8BH3keqe0+Wy9\n",
+       "XsfNzU0isTa09JsUK+k7vutzKvnuyclJ/PnPf47lchk///xzbhWOqB9EfXx8HJ8/f07jPRqNYjgc\n",
+       "xq+//hrz+Tz++Z//uVYVm8NFHfkxbswrDqxRB2TBKQ/mmnQRP/kOzyIt7bQA+oJ7r9frRBsiIonU\n",
+       "EVuDPxwOawocWWq1WrkJICJqKAuyhh7yzjmTxbknShvZZg7ZtMD9ymAKA8V72Rn8/PlzljGhDk9E\n",
+       "5CHVpAmNLA2Hw3TIkDWnMJrNZr7z6elpzhP6zP1xw4DRP4zf175HY97soNiB8Bi4ryCg/N33BMkB\n",
+       "mXKwvL+/n7tbcTiQCxfxxa4Y4XbWwrvo+v3+s4DURHtqc5UoHn3l2aChEds0MmsIh8Zzwd/R29wX\n",
+       "mTdoUAatzAGfG3lhNy/3N9JlPVrW0UJ2cIYcKNDu7u7i/Py8tmHqawAC9zOyxHqCkmD6i9+FTSH8\n",
+       "3fbRqVv3zde/hJqCFtvu8+wShSzbN3Gk4D3YsUEw4BLYCOGEYORQ2hFVntXIhJ0bL+oSbiUatEce\n",
+       "UUF5XsQ0Sgrg1ft6eCFlnpdnM4H26vkek8f2YxpCBkRv6JvnoaBd1+Xm5ib+53/+J969e5fCZQVp\n",
+       "hYZAITgU7iQ6dMqMqAyj7pSox4n7enu0OQ3sooyoc6eOjo5qc2jDymIuI136X0bQ3mJeLgI7qDYO\n",
+       "GGyQH1cIp+9sqfZOGuYVQ1vubCHqB+Wy88a7LRaLODs7i7dv36byhRO1s7OTqRhQIJxElJDTficn\n",
+       "J/Hly5f4y1/+kv3zDsPNpqoX9fnz54ySj4+P4+zsLH799df4t3/7txiNRolWWX4xHjjuIFdOjzA2\n",
+       "pBeRX6J65qaMcC0z/KS4KfNE1EgARbkC5Jt1NB6P4+rqKtN7jPV0Ok0E0WmKiCrVzPsa0eC5rENz\n",
+       "ID0nBFPMuWuh2Tn22mQdeJcoyBAHSCNH7DRkTq+urmrlD0hZLZfLWvqK9XxwcBDz+TwajUYeLbNe\n",
+       "r/NvZb0vHEXQcfpq9LB0dkp9a2e6XMN2tLxTmLHCqfKa9r1KRAZaAzqSNCsZA3Y6Wi+xhlxjzfOE\n",
+       "LLDuGXsKozqQtGFnLnd3t0dGkbpdr9e5S5L+WbcbzbFT5NQZ3/OYGpHC4TH9gnHmsxJUMD3D92TO\n",
+       "+d32koYjaX1HfwEXPN6lnJjr6rXC2mGMCFT4WdpaxuQlCg0yZkCDuUeH2PHj2S8FDbRv4kgRuboy\n",
+       "LguFgXZ6A8ElNWAym6NQBJ/rgMxfguVs8BAmC1TJb7IzRokGCgnaaDrHbAWMAWXyjDI5V1ymgEyU\n",
+       "JJowcR4BiKjOboqIVB7z+bzG5fEc8E7l+FDg0+ifieCPj9vCgERKJqLjZLJ4HQUwN1zv8WbR7u3t\n",
+       "xWAwqD0P561cGDyj2Ww+g7Bfmu8ynWDj4kVq5edUKoYc5NBFXDGIZWqpJNAbCXPj3rPZrKYYSDc4\n",
+       "rWsiPGvp3bt3cXx8nPP/yy+/xHQ6zaNOOp1OGszJZBLtdjvu7+/jl19+iVarlfys+/v7GI/H8eHD\n",
+       "h3j37l3M5/PkaQyHw5r8N5vNTCX3er1ajTdqNDH3pOFKYjC/o8C95X61WmW6y2uAe2LQIbguFot8\n",
+       "j263G7u7u3FxcRGtViuurq5qDjG8osViUdM1oGJOG7h0ByiYgynPhcm13JO0JH8j6kU+HBARFdPM\n",
+       "2cH4R2x1InXnms1m7QgcUw9AjbzWlstlOsOTyaSGkJC+RO78fnakcX6c+kKOy9QcDgY6z4GN0WUC\n",
+       "ytLJcurdDgrlLUD8cEYw2qw3r2mCddKY1lFlOYSIqK01+KmksZE10Cg7krZJ/GSMQH+vrq7i4eEh\n",
+       "hsNhGuoyDYUzGBG1NWAebqNRnbhAs7Pg9394eKihcLYDlhs7aJ4fxsuoIt+jkLBlH33pFJznHBvy\n",
+       "UvoM5L/MKPEM7KVTm3DJsF32Byy7OHhG1bDRRspoZUarbK/lD17ba3ttr+21vbbX9tr+zvZNECm8\n",
+       "XrZXR1TpHVAHIqaI6rgHvlOm0xzhOq1Cjtv5WrxhQ6cR9ZOg4QuZ4OocMz9NzIyoDtk00dqRJ5FE\n",
+       "yZECojQnx6mCMlIFIen3+5lLh2zqCGq1WuVWbXviEdUuxvL96A8RD1GRUQLG0DslGEM3eCvM79PT\n",
+       "U0aKoD00ECLuBWy+Wq3i06dPNZlwpE8EATrAeNNPoxCOMEjLGLrnJ5Esc2GEymkdkBe/O1EQiKej\n",
+       "KKc2DRW7j6vVKrflR2yRE9AIdn8xx0TU8Jqurq4yKifVsF6vs/gmxwPNZrP48OFD/P7777XvMg/t\n",
+       "djtLHPzXf/1XjTT++LitdN9sNuPs7Cxl4c2bN9FsNnPXJegC/VwsFnF/f5/8LG//h+dIpI7M3N7e\n",
+       "JuLGdaQMXO4EVMtnSTKHoCx3d3fJL6JYJRtVqJJuuQDlckrFKACy7J3FToN5rTkt32w2a0flgBqA\n",
+       "upacndlsVkPHQEH29rZVvklDNZvNODo6iogt4vH0tC04aY4JcxFRbVlvtVq1Y4g6nU5uvTfa3GxW\n",
+       "h7qDrlhu6a+Rp5KozHecRrWclOkwo/boBtYr6x2S/nq9TvkZj8epv9kFDlJrkjVjZBSbOYB0bfSf\n",
+       "MeC9jVSD1L6UAnKKESSNOQDd4h14X3N0sCfWr4w140wWhf4Y8fb/yRYwT6UtMupotMopwXJOjcSW\n",
+       "5G9sEmvA/GXslncVWg85Q2MEDLQJm1pSe/icfjjzgNz67353xtlySfv/HUeKF/cRA6PRKCeuhH9R\n",
+       "vHd3dzkQziV7EixQm80myxsgXObeuLq04UgTmq0sI+qnjuMQmE8AlAgPiPuQnvT7lVwonC/qCdFw\n",
+       "sJzO4O/NZjMNCNWdIyKP2iAVEVFPUdrZLHPWKOYy1857wEkBtvUCN9HeHCAWC6RnUlgRVe0RHFvn\n",
+       "w9m94maFTr9NhORvhoDtKHuueJbLQhhKN9yObABzo8QtM9SegdvCHOMIO13sBR5RGRAO8KU/du4a\n",
+       "jUY6mcDTjcb2CJj7+/sk7GMIu91u9Hq9+Otf/5ppuH/4h3+I6+vr7AOORcR26/xyuYx/+qd/irOz\n",
+       "s7i/v4/vv/8++4JcTyaTuL6+jj/96U8RsXX4qEeEUTG8z/U0r1FkCGWJXLCeSdm22+2s6u5UD/do\n",
+       "tVq18g+TySSJx+bBwTNrtVq5Ziw3yBHv4DIVbJDxzuGIKi1kQ0Nj/drIUKZkMBjkPLCbkLno9/vp\n",
+       "BN7e3uaOyIitQ9Tr9fI8QY8JKRGcTHZBR9SP68HBcKmV+XyevJ1Wqzo6yBwlAgzrTBuy0ijyuR1C\n",
+       "ZB3jzBq2TrTTyucmXOPYOOhinnCsKDvA+u71etl3TgswSdsUE2p88RmnBUDdsAxzHf0qid+uis67\n",
+       "E4ShKyIqLmV53JhpC6Qynford6cxFyXlgPuZjmJnhb+x7uzYoYuYF8ubbabnCVACp9XOGUEA82c6\n",
+       "DQ4R48r6sSwtFosMLO0M02/e28G8OVpl+o7xNAWHd/9/tW/iSCFojlparVZuyTYxNaJaGNQ2Iaec\n",
+       "L6H8f0RlkDA0zp1bgKzwbbxNfkZImQy+78E1zyuiyqdagP0cFizfJyLHWHAwaERF7ja3onSkMO6t\n",
+       "VnWqPErJx+V4MZHzNzfFAscYec4iKtStRFx4BouMe3vHE0YIUisN48+7lk4m6BmOFgufd2ShOUpy\n",
+       "n5h7I44eEzvtzHnE81PVPdc4IEayMDL0uVRgzFXJ40IJ8hlISUTUtqYTQTI/OG0gKJxnxdwdHR1F\n",
+       "o9GI//3f/62VMWi1Wrn79OrqKtrtdjpB0+k0/uM//iPG43FMJpP405/+VON2YNzn83kMBoN0sggU\n",
+       "iPpANRjnZrOZu5ow8owTHI+SbE2Q4I0NyKiP02H++v1+EnnH43EiZPAIcUIcUCF/DixK3pbrW/Ee\n",
+       "IJKus+N5K51GjAZGkXty5l+3242dnZ2YzWbp8D4+Pkav10sZIApHLkajUR4bRF+RW8uvOSQU5+Tc\n",
+       "RIyRx3uxWGQRVgez3IuAzOuC57sfNtDmQZbcSX/uYMgoNU42fWVevIMYmaJILXO9XC5rXEWXJzFX\n",
+       "Ep3NWjY66KKTBC9G3HBOGK9y3fNeluFms5m6H9vAO5Q7PyHJc2/GxFkVmjdyvBRE4pzj1JtfxHiU\n",
+       "G4jgWuGk2H69NJfmKbNxjADGGwbIlpT8KPsBL70DcsguaaOcrVZVWsd/517oW29qMO8aOXVdSNb+\n",
+       "19o3caSYFEogRFQkQG+pNgmw3W6nYvYL2XiWRE0GgAVgeJAokf7YQNuz32w2uaWbe7qhcLknjk7p\n",
+       "nLFLDQ/aaBgCjxDv7+8nhE8dFJwMjwtRCv2zQOHBf/nyJRVEGUXbgTBaRZ9N2LOQY/DL+zlKIerk\n",
+       "XovFIueY75FSWK/Xeb7i7e3ts8gU+N2pFfrvui1W0I6WIuopX+YbZVo6p3ZCgcLpJ3Lj/3NPGjt/\n",
+       "jMp4zB0EMI+kPIlE6f90Ok2UBkTECoxgABTCjjSE5GazGZ1OJ43jYDCI/f39OD8/z5pd3PPf//3f\n",
+       "Yzqdxs3NTbx9+zYJye4rNa9+/PHHnEPqez0+Psb79+9ryDBGwakNFx+ldhrIsaNg3os5NXpyd3cX\n",
+       "nU4nlaBR0JubmxgMBnF5eRmj0ajmZHqDCGkVxoZ16ajc+sBrFJlg/hlHo5Zch0EhYOH9IUvz7tZf\n",
+       "k8kkv3t2dlbrC87saDSqUSUYt3a7nboLRwuZmc/niXAdHh7WUD7Gu9y9ZxKxEfcyrU/gUeoZrgcB\n",
+       "t/EtEYDyOtBEB4bM/3K5zFIPzCk7StF7dly5D3rE5x4yXhGRBh/75E0P9LcM6HDSyjQvc4Ls29nC\n",
+       "QIPaYF9wPuyg0kB/SC8b4UMWsYu8p8EG+o4edzBgRBF55Zl2gMpd97x/SZnB1nFff8Y9WMcObgjI\n",
+       "CKJs51kv5WYgnjebzTLA9o5Vnm09YZuHvbPTH1GVdvlb7Zs4Uru7u3kMjA9njaiiPhucdrudfCoW\n",
+       "+0vfL1EXDCyD4PyseVYINM/jXkykBxXhKhd4RJWuMcJkhW1nwGkfvm+0A+EYDAZxcHBQ40PQ4ExQ\n",
+       "HM4CBe9qsVjkifXcm/6U/xzJ0Fe+S7OBJCX3krPA4ud5KCHm0NfgLLIbzgubcWOePcceU3+/7AvO\n",
+       "UOn0WaEy9+zEw/Fy6oPrO51OLnBD/yxCO+WlU8DYGlYG5cAZ8Nig+Oinjfju7m5GyqQxnGrkmBC2\n",
+       "/r979y7l5suXL3FxcZE7Z/mMQ2zfv3+fUT/je3R0FHd3d3F4eBhv3rxJ7mLEdm1NJpP48OFDNBqN\n",
+       "mEwmKfukz3AEHh4ect3zDGS/THmyFghCGDMCBYw6yINT0IzV/f19HB0d1Zxq5AinlzH10T4YMqf2\n",
+       "LQcg3jwPAE5hCwAAIABJREFUJwFuF89DP1EM0ty6vb29dOyYCwd+Z2dneXyQFTpcQZ4F0sX8Ird7\n",
+       "e3vPnIxGoxGLxSJPkCC1wgHAIC42sow7awkn7KXGOkX2jcgQXDht4u8ZIUJ/oJuNLhCQPj4+5vEt\n",
+       "Tl8a2bYRxrkiI+Lgw4gEuyl5b8pp8CwQScY7YmuL2NHpNJkDVgelOE7oRae2kF9SgiWPEn3vXWpG\n",
+       "UMq5cMqs1OvuG84EffG6NE/L/2eesKnW0fSV59pRs+PJWqPhYGGLDZKUjqRtADsHoZ+UTqZ3QZfO\n",
+       "J0h1GYwzfmUA7PZNHCkWLk5ARIVycO7US3WkUHoR9e3t5aQZAkUR41BZEFjcLxlZ/k4E4QgDZ25v\n",
+       "b68GG3PfiEp5um9WEBYM5+TL55HTpdQCW8jd106nk3V0SpLf8fHxM3Ih/cHZiqhQg4gq522iu+F/\n",
+       "uEAsNISYBY/w+/1xOIn0rRSbzS0Bt9vtZmTgaN78CCslX89Y+DPfAyeN3/lXKvanp6c8S5BoyqkK\n",
+       "5h8EjWgex8oRr+UUeWPeS+XOfZ2q4h3m83kqCwxuRIWacL3R3C9fvsTT0/Z8PY4FMX/u+vo62u12\n",
+       "vH37NlqtVhLRV6tVjEajNDj9fr+GvoCQUJuJfp6fn8fR0VF0u93aWW4RdUI5TjQ6gNpwpOncWPc4\n",
+       "J4bXGQ+cEPhnyCn13g4PD9PZNCeP4An5dkFSlCny5Ll0WsVGH8cGI2QjjGOy2WyyZhByA0cKOWu1\n",
+       "qlIUyNf5+fkzsj1oCRsRkAEaaE273Y7j4+M8IgbHeDAYxGQyqSHdh4eHacDMSWFdeCxIMzr4fMlx\n",
+       "Kj+zbuY9+P0l9AXUGB2OA4qeaTS23DIQHdZFmWLld/cNWXHNI5OUbaAxvs5yGOVh3eOA81zkxMib\n",
+       "ZZifOF8u8Iu+LtOFpfxRssOZnPKaMsA0guTUHsip0dWI6lxcxqG0lw4+nTovEX87Z5av0pnFcUMf\n",
+       "e+5xlqbTaXKg/TwQfBxinmPqSGlnDCaUwECZxn6pvZY/eG2v7bW9ttf22l7ba/s72zdBpEajUcLU\n",
+       "eLx44ERtRiyI7rwLjobn7Dw73rijVe8qiKg4UnACSvIcKTryyWWEY8Ia183n8/SE8d5fSo85vRNR\n",
+       "oTygbsDKfBcEp4wCgURB9oCxI7Ze+9PTU0KcLrZGg+DL+72EANK4LykK+ASGPP0dw9f8NNLn1Kqj\n",
+       "J1IP/szIAXPnVsLX/I2oDfnwThpSL07v0k++T3Tmwqr8PaIqLBtRFTEl3VtGMKAN5PSRL48XyILR\n",
+       "KpAx754qd6yaQwWfaXd3e5grxNvpdJr3JAV4cnKSu1dJUfGsvb29ODo6itvb24yS4Uydn5/H+fl5\n",
+       "/Ou//muSu5fLZXz48CHu7+/zqBvI7WzxdukLk3+R75c4jUTcpLG9RknhuOQDRG3QwX6/H81mM5bL\n",
+       "ZaJg+/v7MZ1OE/32bjgQpfV6neiaURNHvE5v0CdayRMBRWw0trsu6TNn9+3sVEdcse4oQ4E+cLqQ\n",
+       "MfVmmpLW4PQIpRFms1mmUTgPkesXi0WmF42yMGZ7e3u5/b9E5Iy8Iqfl+nbqzH8rKQ4eR3Qlusop\n",
+       "HOYNBJjyFug0Ut9GGczrRCa5rtfrJUrDeHqNIqtGuv2u/CvHxTQCnut3QBdYlmxX0G/Wf6Q2ndJl\n",
+       "DYO2Oo1Jf8wlJV3qFNbj42MN9UQ2yh13nnOPATbTGRzktCz7UqJm9JvP6OvR0VFSgXxPTsG4v79P\n",
+       "/vR4PM77QXlxdofM0EsoE/rbxw5FVPawTPG7fTOOVEQFaUZUC/D29jYrxZZnv5FOi6igQBoD6p0T\n",
+       "XGuCop0pjFu5648+4sSYQ1OS9pyiW61WydPwYvLzEG5PMEqfrcdOa5owCJmXMSOdAFfHfBYWCie9\n",
+       "25BFVAbMBDveA4fMZx2WitJpljJX/lLqAy4AzgsOB9fjPGAw+Iz58Th4btfrdRoGO4p8D+VgJeCx\n",
+       "Z1F5yz0LB8K2nSzabDbLVAjX4WTZKTJUzZbykhPnvnItjflhF9r+/n7tOIl2u52E29vb20wLQeTE\n",
+       "EYLPxvwOh8N4enqK2WwWnU4njQl9xYCbl0NZh0+fPsUPP/wQ+/v7cXl5GRHbqthwekgn8e6LxaKW\n",
+       "0sTBpy+eAzvJpPuQTXMOcaSY12azmbvfIqrDu0nvcbxOxJZSQLqUtca4jcfjXL+c+4mTRfV2gjOn\n",
+       "r1iLcFbKtPbd3V2NZI5Sht+GnJPG5DrkBFnhmJvd3d24ubmJx8fHGI/HMRqN8jtsIMDxMmmY47fg\n",
+       "m+3u7qbjtlgsYjab1Wp0OdBAFk2JMK8ThwXdUKZIWL8lcZfxdnDFPfn8/v4+D5+m0Q8cPJxFKtZv\n",
+       "Nps86BvZn81muduPPvPZZrN5xlWyjuP9GAtXkmcOv+ZcW/e7XA625SVnCV3h4Jl72W7B47Njx3g6\n",
+       "nY2cEpzyPrwrOgSaQFkuh8DFqVOa72XqBoFjs1nVXfTYOnh0OhlnuNvtRr/fj+FwmOvCO3b5PjaR\n",
+       "6vN7e3vZJwe+pFDNieZ5zBv3sK9gp/ql9s3KH5DnfMmrXS6Xz4htJRHNxpt6VPP5vEYAxji3Wq3o\n",
+       "9XrPJp9m40YzEmSSekTdq+f3iOqEcHvldgjg6UBwtHOyWq3i5uYm+v1+HB8f15w/cuB2eiIqD346\n",
+       "naaxwcj6Pfr9fvJQTJDEszefw9exNR1UgDHFyUIB2EB7F5sdD4wBC6fMj+O8lWRrE05ZqH+LM2Bl\n",
+       "gnJizFx0kTEw0sX7gS4xXy5GimPC2PjdI6LmyL+0HdqLn35gdOwMlvwpHG3X1aLEguXE26XhdGFI\n",
+       "mA8iOWoQcdRIxPaMvuPj47i9vU1ekhX/bDaLH374IT58+BAfP37MvnBg8tPTUwyHw+h2u1lSAYNr\n",
+       "JWpngeOCvCOPuScQYi3ZAcEpJ9o1fwynDIeJDRuMG4fQQkZnHo1QTyaTLD4asTXCw+Ew9YvHNKIK\n",
+       "viyTyAF1veA2cU8MoI054w2SzK5GNt1EbJ3B9XqdZ+xdXV3lzkOcAwwOHEueNxqNYrlcxmQyyaDB\n",
+       "c2iU3zoRuWQeza/h3c0DMv8S/VeuU3OCynHzWqH/5Zl5BC82dAThOMDwq5ApHy3jYA++TatVlTJw\n",
+       "RgF9CZeJvoMOsjvNtovfzZGzjkKvY/i9KQgdav1Iw/EoESCutS4xj5U1ZSeS96CQK+jL3l51HFm3\n",
+       "242Dg4Pc0cnYRlR6jHVjgMRrgrl0AImjz1zwGU4XZ0xSuJNxA7TAsYNz6B1+Jd8Jnc544eBFVPYQ\n",
+       "Hqht9/+LHxXxDc/aM5kuooLrcDIQLD7jOgsEn2HsIGPbATMZknSXrwdJKCMoP8cKw9FxmbZD8bCg\n",
+       "PAFMEg6NYUyUy2q1iouLi2fn0Hk3CgsronJIqeHT7XZr6TkWY6vVyoNqEVQrH0dlfueI+kLgmShU\n",
+       "E/Y9h95JYqfBBFEraIoGEkWXAu77lOPNvf0ufIdxBbXw9w1D27FzGtCy4zFtt9uJQLwUdQMNO51M\n",
+       "UUWQGRvhMvr1+/r/L6Xh+I7J18yb0zBOC+Fg7OzsxHg8joODg0RyTk5OotGo6rusVlUNtdVqW+l5\n",
+       "NBpligiEBIfkzZs30W634+bmJpXb4eFhbm9eLpfR7XZr6xfD43pREfVzJnGkyzpeGL71el0bm8Fg\n",
+       "kMZnNptFt9tNYz0ej/N9n56e4s2bNzUna2dnJ1EdIwiz2Sym02kiH05RsduNMXeg1G63c+fiZDKp\n",
+       "obm8vw/Z9U6p5XKZQRYOCeMNujSdTms7KNlVulqtsnQK+ouU/f7+fqbGXOsNB5Q+uoYW6TICCqP9\n",
+       "OFjonDJN85Khj6ijURi5MmhGL3gOJ5NJjMfjdAScgqWvh4eHiT5ZbhycujmtizNhG8R7gcw6eLIu\n",
+       "sL2wYw2ibH2C7POOTkPZoS3BA+anJK7zLGTE9sDX4jiUtm13d1unsdPpxMHBQe2gc8CPk5OTmjM1\n",
+       "mUyy6KkRQt7D+tRZkzIVuLOzk/ccDAa5RiHvs+7YMToej2M2m8VyuUwHm/WKHSkDGmcn7Jyie7Dt\n",
+       "6BX6+VKGye2bIVIMnAWNhU9kWfJWIiKhca5DiBlMFg7Piah2BrFV0v0oHQHuzeQzgDx/PB4nKkB5\n",
+       "BDsLPt7CSA9pBN4F+J/rIrYTNplM4vLyMgUYp8XRkB0pIF5H1VyH4eEdOMCUZ6LsEXBD0oayvWBx\n",
+       "2jyPFkbSli+l4UB/XkoJehu455doHYcXeSjlyf3h/dmlQXTl6Bk0g0Jx3lpr9Mi5cuYd5Wf0D6O7\n",
+       "Wq3yfu4nyr6s98W7olDMV/Bc0Mx1enp6yiKQIDJOlYN8PD4+Jl+AvqzXVYHbo6OjWuQ/Ho9TuQ+H\n",
+       "w1rEyRZ+EElQECK54+PjuLm5iel0mn2hphHpFiJ+xtvOyO3tbXKrdnd34+zsLKNhR5gYHvpFGpax\n",
+       "wXHzESl2el0qgfIQEVVNK8ogePzZ+Qj9wIYYhxVF7Xd0KQbk3Gv+/v6+dugtzgm7eClT4CrmrAfG\n",
+       "ZjqdZn/u7u7S6eB+vAMIpE9a4J6grfTfc4NBQmdbvpBhr2enjNCRyFRJvbD+ceDgNc/cIVMgijc3\n",
+       "N6kj4Qcy9kbduQ9oFSgPcxZROTY8z2uUccSJBGHku+h1B+iMhdGl2WxWQ2boQxmQeecfes16AHk2\n",
+       "CMCzmQsQd+tFEDBnTEqH/+TkJB0ngghKDlFU15mDyWQSFxcX8eXLl0TNjbZbfqj7xrh5N7jt/HQ6\n",
+       "Td2DrqVw87t373ItgdRbXzqgtn13MVFnDdy8Tkod7GC+bN/EkQI2da4VQwSU7Tx1RP2sMje2MuMY\n",
+       "4KxE1CvQAsWW6R0iMASSZ+EIlAt/tVrl1u7BYJDRD/dcrVapwHg2DQ8ZI4/iM1LTbDZrXAAbAD63\n",
+       "h2042FESzgef43Q5p0wEQMqkVBwIpAWohJq9oFCWOAXmifCe5lvRUNAQbv0eVrLlAn5JPvhbyVPw\n",
+       "IsXweu4cudlY2JHi+8yJiyc6jegjbTx2yAPGAiOPkraDamSNOXcV84jIjQk4py6Sh1LEYcfxYdwu\n",
+       "Li6i2WzGn/70p0x3RGxRl8lkEu12O3q9XoxGo+wLHC/m3+nynZ2dODo6iouLi5hOp3F4eFhDbs7O\n",
+       "ztIRfXqqOFInJyeJvJQI3/39fTq9yBuK9+7uLobDYRoC82AiIonpjJ1lkfXJfSeTSQYuGFqcKVBA\n",
+       "3h8Hi3F1oUenFOBXRWyDr/l8HqvVKrlnyJvnDF3E86bTaTorOKomhiPTIIrML6kOc2l43nK5TN4K\n",
+       "fCGaNwHwTPpGSgr9Rl+dirLDYn5VGQQZ8WaNOJApr2NcrQv39vbi+Pg4ms1mXFxcZLAQsTXsLgJp\n",
+       "buhms8l6WWUpFqNRERW/LiJqWQb0rtNb9JE+lzoOFAj9xpihR5BTb2xAp30tyDVqwv24FufO+svf\n",
+       "KYNYz816vT2fs9Pp5Pu32+3o9/u5PsqyGaenp7G/vx9XV1d51BHzyRhjSxwsUtgVmgLyDeL4p//v\n",
+       "CKqIqNlLBxi+J6iYuVZG/rHRzJODIQc/zohZfr/WXssfvLbX9tpe22t7ba/ttf2d7ZshUnjbJiSa\n",
+       "0FdG0EQ/L6FSRKrwHJz2wyMGHizRqpLgF1GPkEgdmo8CeRS40ygI7wTXgqjFRDzQFzxeUBATac11\n",
+       "MTpjIp/TlbwL1xEhE0X5ufQHJAK0zrwl8yQcCXIfPP8S/jdq5pQoCN9L3r1TcyCTHm9kxTwN3tE7\n",
+       "kIxaMqaeX1f+Bf3kXQ19g/LwLMshssR7esemPzf3B9kiMkOGyk0McBQcUXEvpzrpN3NNuujw8DCj\n",
+       "cu7DmB4eHqYsXl1dxXA4jH6/n7sPzeWCHxGxRUW88wfonvsz3p1OJ6bTaczn8zg8PIx+v5/jPp1O\n",
+       "k7S+t7cX19fX+e69Xi83aJTrwOgb/DTLN2sK9MnzX3KV1ut1DX00Z+b6+jplsNvtJmpwfHycCDl9\n",
+       "6HQ6Od6gPfSHdJA5fhGR6BQ6ylxN1hbIsPlqFNXkHg8PD5n25FByZMSEcsjUpIE7nU6tn8gKRTlp\n",
+       "ICnmOprnVPJyjIiUKSzTNlgL6HGnAXl/aAfmMpYcxZI7tbe3l7tPXSKk3+/H/f19pkJNGWAeeUdz\n",
+       "hDjmaHd3NzqdzjPEgjWPHbKu8fp01sDjgAyASNlegLTzzu4jNsgFnEE1bRPMvcIucF/3weP4EoEd\n",
+       "efTmFdZlu92upWkjtutpMpkkSuq0O/PN2HlDCMgmyHC/36+lruHvnZycJDKHfG82m2i324mce10w\n",
+       "V8yvqSQvcd4i6kdCoVdsZ5xyfql9E0cqImoGJ6LiCsDELw0uLwMR2/UtgCLLVjoShnip4RFRKQRv\n",
+       "/0dBt1qtrKsTESmgKFgbWsimCA7OVMQW3rcjZ2WK4baTiJJkxwVQKoQ/PmMcymrZGGxvC3WDw4NC\n",
+       "cjqJBY2xNMSPQWIhe9EwjzYS7g8OdJmiY+xw/nhfnuf0BlyTiOdbhCOeVxKnHygVPqMBt5fEWDuv\n",
+       "NDtY5iDw3jYmfj73Ne8CnhJ9Nr+MHYHc105A6fQhN5xHaRnm3hgTZIXSBJPJJDlhvAelRyIiU1t+\n",
+       "/+FwmAqu1+vlO04mk+TsQNTFAfHxMHBMUHwofQjqb968yXdAcQLhe4dRufOHs+EYJ2TtJUODs3B9\n",
+       "fR1HR0fR6/WSGM9643NSD8gRTjvvwrhhaNgs463nfEb60QYTmSFVtFgs0iEidcpacOBJ3Ti4TlSj\n",
+       "Z9zYgUddHe7poBMjxpiaIE8Kz5whO0AOsvjd6VNzJO1A2ajyGY5CaYQdhJS8V/NpGU8+Q3fiTMFl\n",
+       "5Z6Hh4exXm83J3gnM2e0HRwc5Lt7V6EpBQ6CSse5lDWaAxsajq0Da77rVJt1B7KHLkCmmONut5u7\n",
+       "TW0T6Lvvb3oEY4jceA5sr3EibfccBDsgd3r56al+EgYyjH5zYLa3tz2Q+NOnT9Hv92upctb509NT\n",
+       "DAaD3LEfETVZKpvTdeUckj51CtU25W85URHf2JEynwnhoh6SvXwf6UDEXZaUh6lvTo/RAr5b5ovN\n",
+       "rfEgRlR1RUo+ixcCzhR9iKgQj1arlSRWHEUiCyMlEXUis9+dhU2UZwFGKWA0/G7O/TsqMXEawjWL\n",
+       "wxwLhBEhsmHG2D89PSWpnnmiMaZGB1G8cNUsnDhrnhP+bifI8+moECVvw8911BVBdnB2MXg20EaV\n",
+       "kM8SBUXhY8j8fp1OJ4nfcP2YKxt5lE5E5dRybx93gbJk/I3KMkcYate1ohAr48WOlIhIRwgkh7pR\n",
+       "3B+HiF1m8BTa7XZuK+/3+zVuAuOKscAp8NywPr0pA6I1xoTdpbwDRW4Hg0H88ccfz9AKnETOoWTn\n",
+       "GvdBH3gNU6YEVHB3t6oVZQ4RjqePbMEhIEhxkUCQnv39/bi5ucmxwYFip5yRYwIjxsbIOhGynWOj\n",
+       "IHA97u7uktcWUTkSg8EgHVT+RiAKR5Pgj3vyDAIbZNS8KFoZLDDHpUOEzkCWHbTZAfW93JAHjCdj\n",
+       "MZ/P80xHry8QRcoZ0Ad+8rz5fJ71qSIiZQG55lgUnodjYieQcbCz4t9L0nKppxkTdvq6n9hIvufN\n",
+       "D5ZDdBtIJqUCuLcdYNBfxqa0ew5KvduTsxlHo1He344lc/4SL8tghscJ+UAOncFoNBqxXC7jr3/9\n",
+       "a3S73dpOdnSFuYXmN/udSn4v4+hjZbgOzuNLOoZx/1r7Zo5URLUdMaLa1dVsbgvrmXnPIjEpr0x1\n",
+       "AA/7IE2ntPg/SondTjzbUQXXEFnbeKHcOBAxooITXX0VR8GHyK7X6zg7O0tDiSDyLAx/uf3fKQ4v\n",
+       "ZP6P02RBdASJILu4ohUdC9aKj7+h9LgvTiJGxM4LDh3v5oWDwjcK5uKEXMP1jvSdtijlwgRiRw6Q\n",
+       "e3l/qjZH1HdR8f7e3cU1yJU3ExC1QFalLzx7d3c3d0yBakRUu8QYS6f9UGxOmdlZL1Msjio9vrPZ\n",
+       "LO9pZWGSPPcYDodZc+3NmzfpSFBD6/z8PO7v7+Onn37Ksb+5uUmnlIgeWez3+wnH23GKiHS6GVOn\n",
+       "w5fLZTpSw+GwdnYl6X4czfF4nDKD3JGKw7B4JxVjS/BjFAtj6iiV63hHHEOjk8iCHRPmn1IEvV4v\n",
+       "jo+Pc+5vbm6SUEs07RQ7fWa7uY0//xgDy4GbN68wt7PZLPr9fpyenqaz4Cr9EVWtqojKSfduMho6\n",
+       "0alCI2tln8pxIxgrEVCjxegwf8b13BvnFnSf3bDMtRvoJ+PHmOJksKvTwRCBK880Gsma3d3dTTQF\n",
+       "mbG+c5BM4OC1YGcY+4Vd89w7Y+G0FBt2ms1mnJycxGg0yvM0I6qAnnfkLELmiB3eOO4+NcF63rLy\n",
+       "9u3b3DSBzCM3nN3J2idgiKjOvESOLVM+H9YOFM8/ODiIq6urODs7i+Pj47zOB7y7dhdjit5HPplD\n",
+       "9LptrQNUHFQcQaNjBBlfa9/EkcKgl3lH5599HAwGC2PValXHSKCIyM96m3fpmRoiJqL04LphpHDe\n",
+       "zK9gCyi1j2g3Nze5Wwlnw5yN0WiUUVS5swBOR5mic6qp9JT5HAiaBc13vKWY7zki4HqUl6MhBBIH\n",
+       "oOSC8E4ggYyNnS732TwJlH6ZejOCUDqZdgZ8JIajWws/ix2F+/j4mHA1KTA7Fu4nsgZqZbSI8QSx\n",
+       "4h2QCcaj3NV0e3sb0+k0VqtVLkyUDeUErIzdN5QCn7nOEE400WU59kD8ZVoxYpviGwwGcXNzkzJ1\n",
+       "d3eXu+7+5V/+JXZ2duLjx4+5Joge37x5U+sLjg5Ov2vJGIWF20DwwXgC3eOs0T92wOFgMU/IAcaD\n",
+       "delUFGkE1hD9WSwWMRqNMqq2o+5AgMYcYrhZr/P5PI34cDhMWaBa/Nu3byNii2idn5/HbDbLsQE9\n",
+       "IEA0j8cRsPkz1KKime7Q6XRqOhGe1OXlZS3A4z34HnwYZJh5QoYdtCCPBF1Gap3WQ19aFq17rL+d\n",
+       "6isLspbUDHQ2Y0S1+sVikXWtPGe9Xq+GSPK8vb29Z0eO8BnrnUDbaJJ1aYmGe94c7PJ3nDfrBBA/\n",
+       "bCFOX0S169rv7lRxt9uNXq8XJycn8ebNmyxXEFEhw+zeNGcrorKnrCHzDnkutov3R2bX63Vyahm3\n",
+       "29vbuL6+TlS4PFrM8mAdtbe3l/QAOIK2ewTrFxcX8fnz57yOEgqMnxEwp5BZP0by+D/p3bIOmndV\n",
+       "O/PDevha+2aIlJViRDXgX4NOidZRelYGl5eXtbowL8GK/LPBiKigVyMPEfEsAjDSBPy3u1s/dXu1\n",
+       "WsVkMskT3bmWdxiNRmmE5/P5s8VtVMOGvSRK+p5GjEqvmYX48PDwTJmyOBGSElpnQRGlsRCJql/y\n",
+       "+JvNZjoE9NP3xAlDgJ3ztsNYCjGKgMXFO2IAcAKtLOC2tNvtrHhvmcFZhDPibewvGVOPGYrHUTeG\n",
+       "jrkD2WAe2+12HlVCLRnky1W+TRKNiNqYlP3BkMJzKQMHIPj5fJ6IRMTW6FOj5fPnz8lRYixHo1ES\n",
+       "rc/OzvI6qmh/+PAhlRXt/Pw8NptNPtM1hkC+4HFA6o2oUvKk3rwOcQR3d3fj/Pw8DVJExZtC1kAP\n",
+       "eP/7+/vke4FAMf/U8+r3+7UjnRhveJcmqtNASymFAFnb9XNAdJCpdrsd3333XVxfX2etLNY0ZSqo\n",
+       "i2SU3s4I+stIR6/Xi6urq6y5Y94Z66vUNUTzjFer1ao5w+iBMsB0msjGruRHlpwg/5/rLd92jP0M\n",
+       "PmOdsP7p6+7u9hw9Ni4YHcWAsjGA+3NPOFOWPcYNtJnvligX34uoHCcHob4f98CJQh86o8B3+LtR\n",
+       "avfZDgpHIR0fH8fx8XGMRqMa0uXMBmND/6iDBvhgvYiux6G07ub4Keut0unhPubaYTvshJR8T9LL\n",
+       "FJ+NiOQEdzqdmM1m8eXLl1oxVmdg3CfI8GRyeCfLk8fJQZK5WvSdn6V/ULbX8gev7bW9ttf22l7b\n",
+       "a3ttf2f7ZkfEEP3Q8LZNirbnC6cGb9yRxtXVVUK4jpCA/ZzWcCMyMyRN/0zms+dKhWwfKeFidxA/\n",
+       "v//++0SJIioO2GAwyPcGiQDFMCzpaAC40QRC99epMb8bKAZcAUctvh+5eROjndp0lExRRj/H6UtQ\n",
+       "Mg6VdPRFuhCeBOPmSAfI2OfJ0UjXODIwCRRZ4XkgI6RESn4IHBzzrnhXw7vIB7C/eUnMJVwk8wcc\n",
+       "vdOXbrcbk8kktwtzHxdmNBoGpwjuUpmChptjNDOiqpoMB8SIBf3+9OlTNBqNWmXz4XCY79xobI/D\n",
+       "+e233yJim7L6x3/8xzg6Okr0lfcHoXGBW6dFQAxAnoxwEpG+tA6bze0RLxcXF7WjZUBwkRe4Hy6S\n",
+       "eHd3F/1+P49Q8Rl5lGTgOiMKpGzhiZQ8zogqfUaKDmQL5IS+8TwQ6VarFbPZLGWROfPGBsYNukK3\n",
+       "2817mwO2v78fx8fH8fPPP8disYiTk5OIqPM/WRs8zzoXlNq6kY0+ROGgAGQEQBiMftIf5q1ETtHB\n",
+       "rB3TGpBhj1MpB3zXOyEbjUa8ffu2htL5uZSHaDabcXx8nPNGMU7QQ/NRkdu7u7uYzWZJ0aBvfMfp\n",
+       "f/5m7pr1kxE90si2Ud5IYKTOckmanNQdx6dAwGYuTP6GPkHaE1oDRzvd398nLcbVxUHOGo3GsywG\n",
+       "RzAhH6YKlHxNyxm6H26ZU6KQ3km3IlOsa747mUwSHUcX8t4u8kza/eDgIMfAdBcoNKW8mF9b0mSc\n",
+       "Qvxa+2ZHxJBSQVCddmHQnTIjL3x8fPxsgqfTaUK8hhAZjOVymXAlCoVjK1CIdnpQ4F40Jp4BObIr\n",
+       "y1uL2S01n8/j5OSkRmREATHRJQ/IcKj5BU7vGHrEsJK2wEiV42zyOO/BgsGAe7cG6T4LpwWcbdMm\n",
+       "YPIe5gdhCCIqDgWOoB1CeDU7OztJjCQVdXp6Wsu505+ISNL/09NTpoCsoHEG7ExFVAbKaRs7mGyJ\n",
+       "RtF64aMwkJ0yzUjeP6I66JM+c583b97UlC0y4/Sc5x/DYz5aREWQJI1jhxLl3mq18uwsV8zGQcO5\n",
+       "Mmkhkt0rAAAgAElEQVTaHKBPnz6lXPz4448xHA5zfqbTaXz//fcRsd2qP5vNkh9kxx4lv9lsnjk1\n",
+       "7XY7FotFchst+5BlF4tFEstpOKlwS2j8v9frxePjY+6apSQA98WYMJ/mOpEOGAwGqacsT+gJ6wWC\n",
+       "OxNTS7I19+cYHuYeRc3zvL6pB0VZCesI6iQdHR3F1dVVHhLd6/VSR2JomcMyYMKR5XfWMPrQgRCO\n",
+       "J30z36Xk8TmIYK3hpNqRMuWiTHmxFng2mzV4XrPZjHfv3iWV4uLiIiKqndQ+fxV5Y5ezd0mXwUmz\n",
+       "ud39ae4kzrW5tuZysW7os+/pVJF3FZsP63sgM8gQARpOrY9HIQVHMG254d9yuYyrq6uIiORE+VgV\n",
+       "7zonqME+OAVrWgNEfd6L5xLsWrez3gBD0InIBGNmXes0LqAL74Df4DReuQOeMbF9KmXRtpRSMuif\n",
+       "ssaf/Y2X2jdxpDBgCEPENlLodrs1h8oC2Ov1otvtxmAweLaLrtfrJQfDEYa5P+v1OksrRFSCQKT1\n",
+       "UqTPdXZOUBAonPKIBQZ8Pp9Hp9PJd7i7u0uP26eS01j4Jpry004Mz46oHByEAuMfUW0RRTFbYCOq\n",
+       "Mg54/RgtmqNLowXeMs7C9+JnfECu2I5Ozt+L15wGBJnf6ctkMkkkwjn1iMhCkpAnqVWEXPj+cIb4\n",
+       "HcSsRPkiqoJvLGZzIUz+Rh54P5w/5LBE71C+RJn0r9frpUOE019ypIzwWG4ajUY6w5YNR9soThck\n",
+       "3dnZiV6vl+/nOmmdTifm83n89ttv0Wq14ocffkiZmM1m+fPDhw/x7t27iIj45Zdf4vHxMR2Yku9h\n",
+       "vt16vc5dOBDwefbt7W1thw7bwr2jkbVAYNHpdGIymdTOokPeMYiHh4c1gjsOD/LE+9/c3KQjuFgs\n",
+       "kksVUW0Hhw8C8Z7nEZCg12xgzEXp9/u1s/2oPUVJCq/DTqdTc9iN0j8+Pia68vbt29RTIBGLxeJZ\n",
+       "fT2cdProulV8hgPW6/We8XRwvJAtI3nIKfrdXCcHUs4CRMSzde8xjah2hxmlZ7yQ3ePj4+Q2zWaz\n",
+       "nNfHx8faMT+gfqwnb1CgD5vNJnlFXt/oPO7r/ltPWy4YN+yZSzIMBoMM4sssDI4fusubNwgeAAcg\n",
+       "lLuMBRshOPeSsQFVp78GJQgEmGPPE0619aQRKH56juirHS4K0jK/XttuzmBgGxjz6+vruL+/Ty4n\n",
+       "epl7An5g+x0o8BP7YE6fkaiXHF6/e9m+iSNlD9GQa0R1fpgJzgjSwcFBIg8M3N3dXRrQciLxSL3t\n",
+       "GWO6s7OTqTi8ZUf7EXVkyIsNhc5EMOh8B0V7c3OTC4LnOP3C+3nRWfmWf4uoH2ZpR6bVqh8iasFi\n",
+       "0TjCREiJhktCrYXGSAeLkJo5VnyOaJ1GjKgIgihTFFJEVQQRobcimkwmuRMSpWKUp9frJXkSo8J7\n",
+       "ozDLVEO/34+9vb086BXHppxD+shnpHvYUuw5RA7Yudjv93NsI+rVeBl/pyy4/0v95fuO5HjHMpK1\n",
+       "Y080zrg6LeICkc1mM2Hz09PTWK1WcX5+Hu12O46OjvJdHZV2u9346aef4j//8z8jIuLLly/x/fff\n",
+       "55g7JYaTiLM7GAzSYHz+/Dl+/PHHRHF3d3ezphUlLCIiKxjbgPEsEN7r6+taIMK7objRMSBuTg16\n",
+       "XgguMCQYIeoKgUyQjmMOHdHaCFuBs4UcRwrjRSDlYIgxNBLqgqSs7Yioka2Nitl55N3t8BnBR77R\n",
+       "F6Sc/dn/Ze/NfttKkjzcIClq46bNdpXdNd3ThcG8z8z//zbvszxMowvdXVWu8iJLJMVVEkXyPhBf\n",
+       "nu+EWXMvGhjoPigBw7YonpNLZCy/+GUkjhB/m2aADHoOeHc+CGT9ApLF851KZ24oEWKn+8uXL8X5\n",
+       "pC4Ua+j0k20Jz2G9nGoz2szY6IsPJ7muodce5y2ntXl2pp6QDuZ2CdsY0DcCIh9Acn9ADq3bHx8f\n",
+       "yxhx1HPmB8fNwT9OKQVJjWT6uzyP/2MTcGLywSWc16enpxiPx2WvcdIVe+D0MPuBFKpTa+yBxWJR\n",
+       "Ds0gF7bZgCXMKw6R9asP+jhdmIOEiK9Ljrg9iyOFoXReFUTFhp5FBGoEdnRkCvxoFr8dJ35nX979\n",
+       "6uqqHPE3IsJ3zdfKCEREvRQA/+cZ+fJdPGyEzekqn9CwgPI+KyU7Ofyco98cX2bM3mygWeZlOVrw\n",
+       "WvAOlIznBUVMmhDEI6KKonB2XI02IuLz58+l767ezfwQnWRFZJTAGwqZQOHhOERUl6yyUR0xNhqN\n",
+       "gmJOJpOYTqdlI5pX5e/zPaf5zK3i+ZvNJobDYTnlwjvZ8EbNnMJxCi+nxVg/UEYaa4TSdzkGGwXk\n",
+       "ysejKRrKZyBL/X4/3r9/HycnJ9Hr9eLm5qbICHPSbrfjj3/8Y3z48CF+/fXXiNjVmen1etFq7W4C\n",
+       "IHWFLFrpNZvN8jlXyXz58iVOTk5qx/hns1lJGVLoFIQT5Utqo9FolAKUEXXFSGqHlCvGhdQA5Qci\n",
+       "KmNiOUWm+I7RFaMQ/L6d5Iio8RNZJ8YBaomsgZSyLk6h2ZiCcNMH73eOoeP0g5AyPtARZDsHH+io\n",
+       "jCDjuOIQ2Olx6svpE9YC3Yc8O/jCCTBixR7z3np8fCxyMxwOY7vd1U776aefagjCZDKp3SQB8kRf\n",
+       "cDLYI+bd2TEEDeH/lhHkgf5hh3xaknlzZgV7gsxwMTVcVFqer5zuJ2ADmSOIo6/8Xg607JRnxwDZ\n",
+       "R5/4e6D+rIOzNDirppA4S+Gxr9frGI1G5bsEVft4S0b/cxqZ/X97e/tVCpaWKQHIL/OaA1DQOnwI\n",
+       "15/ah7bV5u43P/k/bJBNM78moiLp2rnAyOBQmSsQUU3C/f194T5F1AvamfMTUTkXh4eHcX5+XgTT\n",
+       "ffGGt/OCcsLZQ4CdIgT+dRqPiJwog2gPw8uC+eipNzqC5X7a4XC66PT0NO7v72M8Htfy+a7fYvjd\n",
+       "ggJ/i7WwMgAyJUoAQeJ7kAm9KVnzu7u7WtToHLiJmCbwHx7urv+4vb2Nb775poaAkQpmDhyZgDbi\n",
+       "NHgTMI84TBR+i9ilBQw5k86IiBp0jvOeG4oKkiPOC7VPkDk7x0Sc/DxHdGx4Uo05vcP4HHmjzKik\n",
+       "3el0as77ZDKJzabilqFsbm9vCyry/v37WCwWBT0hzfv999/Her2OX375paBHkNTZDzlNzd1+y+Wy\n",
+       "5pxTDmG1WsW3335bS+kfHR0VxJDvei88PT2VUiibza72D5wvUoYYZ6OujoI56OAUhh1G1oc55boo\n",
+       "Uil2pHESMUTWQwQeFN1EvpElxujaVOhJy7D362q1u7KHAMlXJ6FDcL4cXcNHwzgbBffvOqjjWayd\n",
+       "6QXIBnKDrs0omH9GY96sX7Jjw7sp+hoR8Ze//KWUdZnNZrWq94+Pj3F3dxej0WjvO3E60BE0HE76\n",
+       "awd0s9mU4MxBDt/DGcyBEJ+xh0FCkYvNZhPT6bR25Q/vA2kFdTL6CJeU+xhtB1gndEJOb+EU0XJq\n",
+       "zw67i53iQFFY2QgVeg0ZNRptp7LZbJZnsu/gf5liYRlj/DRz0VarVQyHw9qBAyOa+AQenwEPyz4p\n",
+       "0+12G5eXl+V7Dqp+q72UP3hpL+2lvbSX9tJe2kv7O9uzkc3xsJ0qIXVDysL53Ih67tunB5bLZYlk\n",
+       "fRWII8RMGMMTBZ4178oeaGb98128cPNlQC84yulrBHxCDvQBrxj0yDA+jdSVI1/Dpk4LZGI06RsX\n",
+       "WvMxf3OfQBsidkgPiArpEx91hXsGIudCaaBSRHT0td/vx/n5ee24Os90Cq3RaNQKrrK20+m0kCB5\n",
+       "Jgif8/JGwCi8eHFxUTudBBpHtH9+fl7m7fPnz6XyfLPZjF6vVyO3A20z10ReVKbOJFM4KqQMF4tF\n",
+       "Qao8DlJMrC9yg+wR6eVjxUS8ufAgEV2j0SicB/q6WCwK+R5uzZ///Ocip1wcvtls4urqqqBqj4+P\n",
+       "hZMGcsWpPTgN7LHlchkXFxdlDOfn5+VdFNnzWsCXcCoVOeWOP1Jk9IVn+UJvk/RBnFxgMaJ+d+fR\n",
+       "0VG5EobvwVdh7xjl4y4+iqBa3s7OzopucqoNlIt97JOd6BDW0ukl5IoLhnOa3frOqaGcnrMOI5W3\n",
+       "Xq9L9G19wv/z/XdGXMxDYQ8zJ6TNXAAVtNlrYHQwo2B8ZhoBKAmI47t37+JPf/pTOY15fHxcTi1C\n",
+       "tAe9v76+LnMGgs6+MfnbiB/vN3pkjiHzynwbQXYajv0AsmYkGp3tAsG/deraGRquJyMTw/s8x+wr\n",
+       "dPg+JNDUCGTRtIrtdls7dQ6CZ9pNRHXlljlGNBBPp25pli/G6cwP68WhCMtFRHWbAbo+Imo2Bf1u\n",
+       "ZJz+OCXNZ+axIR8RFaUhnx53e7Y6Uvvys0B2FsKIiq/EJaZ2LIBoUdIWRlfhdh6b9xkeNK8h9y0r\n",
+       "9oj6qZmc24aUahg3oroPDkVkweCkFA6NlT7v5TSG+wdcifLEkJsH43oa5lhYMbPRI6r7rJz3z2P0\n",
+       "fBmqHY1GtbsSPX7nnd2YE893zk8/Pj7GfD6vEY4pXQG/zmuHHGGw+/1+MV7Ox6MQ+YzULWkBH7k3\n",
+       "9M8fO7gYdOqaWE6n02kxyigik+3NPbHTYwXK85z2pQ+sh9NRrCm8JQcmOHWbzSZubm6+4hUeHByU\n",
+       "K41YZ47ncyEvvDVkBsNwe3tbg+k7nU40GjsC+jfffFOrst7tdmuydXh4WOSJe/jgpzjFTPkG0kL0\n",
+       "P9emg+/glCjryB42n+fg4KCWQnx8fCx7kHc1m82yp3wCibRw3i+ZrDqZTEq6lEuOfR+aDz64XlZO\n",
+       "l6Hw5/N54Y4yb+wNc3voA4aNAM6pYhtjX9rLHuQ0qoORiJ0+YT1szPiuqQKu+5MdKq8xP3eqlGd+\n",
+       "//33MZ1O4+effy57mGdyKTdpqsfHx8LLIViPqNJVdlxt2E3E93jZd7YXDoqgNfh96HtSvH4mOjYf\n",
+       "+OF7OEbMC79r0rdTf8wVTj22yH1FN1g+sJ/mvxmwADzIlBaew7p7LQj06Yc5nsw/e8Z2wGn57PBZ\n",
+       "Xpgnp/74jHIL5qQR4LpeGu/DGXVql/nM78/tWRwp35HliCaiykNaESGAkOtM8iVfbKKueTJm7OOh\n",
+       "RlSOAwrWgsiC47ka6fFkmrjod+wzbC4+aVQmokKWUELk/pkHPjNRlL7QB4yOyeYRUa6eoFaRyd8m\n",
+       "v2dnEUcTwTI3w/NgxcRJOG6iz+Rnk6OtNMwLy84skS3v4Th7RIVs8jveTMzN09NT3N7e1hzefr9f\n",
+       "5tf9YAyvX7+O9XpdSKusPQ6ueRM+DYPsse7mBzCHh4e7m9p9TYg5UHa23SdkgEtA6bsdYDuuPA/5\n",
+       "wAgig1YioGX0BfmHAP7dd99FRMT79+9LuQF4OD4J980338Rf//rXgtjRF+48Qz6/fPlSHIl2e3e/\n",
+       "Hg5Uq1XVQluv14UvB5cCzuHj42MMBoNy3Q7Iqfc7z8Dw55NX/r85REavfTzeAaBRK+YebmFGE82x\n",
+       "Yl1w8EGocJ7t9KC8XZ6AqDiTmH2whbnHcTFnpdGoymUg03bUMSDZGcr8VAy8kTzrYOtJ81TR4+ZY\n",
+       "8V3WwEiWUSC3VqsV3333XaxWq/jhhx9iPB6XS6LzNVWQ0plvo2lPT1UNIjss7DnztbLOsk7EmWMv\n",
+       "ZSfSyBTOPmvBWB0g571sp3W7rV9jw3hMhqeved54nrlcRt2Yf3SZZdkHqH7rpDey5+CBAHKzqd+P\n",
+       "yfdyPSrPKTIMQsozsZW2t3zGOuZx2dml8UzLApmynC3LqJ7bszlSEVGL9vedmjNZk0ldLpe10xTA\n",
+       "jUYJeA5CbOWQoUR77vaWgQRttCPqpHicDBNHSRHiRLBJ8YJZlExGRZE6VcczgcbZqBYEPiOKR/Fg\n",
+       "HEgvTiaTWoFMKzungZg3lB19zg6h4W0TGTebTXz8+DHOz8/LJsj9iqg7ITyPZxpiR1njGLkKNesA\n",
+       "apCVDf0jIjVakZ1jK7eIKCkEn1BxypENZ5QDBwj0xOli+kZUjQLlHTmNQN+QWaJ9EAEa91PxnuzU\n",
+       "s69AN1gDR7NOeyLDkMK/++67eP/+fUTs7tP7l3/5lzg4OIj/+q//ipOTk3j16lVE7NCqu7u7uLm5\n",
+       "iYeHh9oJuuPj40KMHo1G0Ww2y95nP5+cnBQHjMb36QupNtYCB2swGNT2IWNcrVYFBXXFcxwrZNx7\n",
+       "itTPyclJQW0dYPkAxsPDQ1nH5XJZCosif3a6+Bmpa/pDtG5Umd93LaN+v18zrugDn5BiDCaS814b\n",
+       "K4KPiCoFyr9B63LAlJ0a9BljRN68j63fIio9472f98i+tHYOkCKq0hDdbjfOzs5iOBx+ha47lWZn\n",
+       "kb2KfkT27QTiODozwHx7bvns6ekp5vN52btZlxIgWyc6Bcf/mWMHxYzB9AocGZAp20v+DeqaM0Am\n",
+       "1FuHIYPoQztLnGjDEbXcoa+dQrNjY/TecoGso989r6wZyBF6ivlA9jJKZPuBw8n7MiptB4y95YDP\n",
+       "yPD/5kRFPCNHCpjQJ6EYCM6DlRtGgdM4hjkRSBbMgunN3GhUx/+d42YBUTa8z6mwfWMAIfCx44OD\n",
+       "g1KBGmXDMxk3C2OjDG+C/rmuDUUa7TFH1IuWNZu7o9PwUlAk/M7Z2VlMp9PixFq48e4zPIpQMUf8\n",
+       "nO9ZcTKnmbPGWuAIYJgcuUdUjit9M/TfbDbj4uIiNptNjMfjEl2ykdhUTiuguKxQmdvPnz/H09NT\n",
+       "XFxclOs3aCim9XpdjtxbBtjw7i/f8+ZEdoz0oLhcUZtnGv1zetvy4XpbXot9R7lx9H1qC3kHwcTR\n",
+       "6PV6ZS045XJ4eBhXV1dxfX0dP/30U0RE/PM//3NcXV3Fv//7v8dwOIx//dd/LcqGAp6r1SouLi6i\n",
+       "1+uVyJNIdbVaxd3dXa3QIykh+m+nEM4NhsnGAM4R8+UghHeCxOB822CyD1GuDgY4IccfdJSjV97j\n",
+       "9R2NRnF0dBSnp6dxe3tb5AM0A+fK6SRzwAiK+B5GBHQEh5Q+MH6CwGzIrL/sKA+Hwzg5OYlut1sL\n",
+       "PpAT9o0DW+TNDpMdBpBYI6R2bBzY2CkAMaSBavF962bXykIv3t3dFZ7jt99+W+YUBDYHZk45s/7Z\n",
+       "kWLNfNqXZ7AmDnZZH3QvThXy7XSaUR4KCmP7DAJQOgbd5SDCts+OhlEwvo/TlB0pI0UeI2vCz+0Q\n",
+       "gsqwJ5yWRdcgG8wxKWaQZdKCyBvlQkDXLcNGvtx/mp0yo77oWcAMAiyP9elpd20aY/B7mTNnHv63\n",
+       "tF7EMxbk3Gx2VYfhprD5DTd6EUlPAeHnaIXFiqgEwkRRCwKfkU4Cyt/Hs0FBZIcK4adYZESUu8eI\n",
+       "sM0Hyn3K0WVERazjGfQhC7ohRzsr3CPFfDin3+l0ym33PCvPsfPFkEeNGvFOoiiUt4XThGZvRMPb\n",
+       "fg7vxtBlB6vZbJZrBUBmMN6Qwp0eyAKPkXGqCbI0Ssx5dKJRNo8RoMFgUIPqGTPNKBKlJ0xUJ9WW\n",
+       "o33PrbkRNL6DwvY7I+r3eTk4oC84YSgU114CjSEtAupGam80GpU73H7/+9/Hf/zHf8Tnz5/j3/7t\n",
+       "3+Lg4KDcwweacHFxEZ1Op2bYIf2Px+NinO7u7iIiyvwSqXvclhUUW1ZuzClOodcR9IG0t6NKp7iz\n",
+       "XsDQMEdGfChrAkqUkczr6+v4/e9/H4PBoMwp84qSdnXn9XpH+saozufz8j6QDiJ2owB8F+fEe8by\n",
+       "wVF/p3GPjo5iOp2WquYucJs5S5YnHCfQIKdoPLfoAOstZIo96vQ0Tk+O/NELjH84HNaoIOjRX3/9\n",
+       "NT58+FCrMYb+y6lG7ADy6WDI+jjrXPpNf7wORu3zdWNZ39uOIO/+mR1MO6I4h7yPdK5tofeGUT07\n",
+       "xZZr5NzvJDDNDhGyYOfZhynM87Q8gIYSkFoWjV4yT0admHNssPc26+xglPHRRwcrXif+jUzzfZxB\n",
+       "yke4EZT9Vnspf/DSXtpLe2kv7aW9tJf2d7ZnQaSIgrfbbYki8AINM2evkOOOERUxjWj31atXBc3I\n",
+       "RHWnBTKHxnCiv+Ncqb+XoU4QE8ZALhkYmsjT5F9QFEcYQMwHBwdxd3dXoGFQJ8ZgLxoEj0ji6emp\n",
+       "nE5xocGcQuS7GbWyx79arQqXxH01zwfSrlMRoIdOtzF+5of3Eb3QR9bcXBcXkiSV6iPw4/G4Fn3k\n",
+       "dAScj30RBkRpl3BABpHRRqNREMfBYFDSMhzH5b2WXZCu2WxW3uvrjUA1kFOjCVl+Sd1RcDFD8Ya8\n",
+       "c/rLc+so2b97eHgYHz58qJ1aQ/4hf799+zYidoVjf/nll/inf/qnaDab8cMPP5Tn9Xq9glQis6RS\n",
+       "KbrJGt/d3RUOWrfbraWSHHmDUBKpWp4gLVMtfR/Pj1QDe8R8Ra+XOR2gFIzB8kSVc1CJdrtd9BdF\n",
+       "NofDYSlOCtrOaT5OnZpG0O12y54h1eQUItwxUFCnzeDjUWrFyAJyStrESHG73S5pPd9DR5kMo0PM\n",
+       "p9FQo+smamfU38greiNzQPm+9YVRDfpwfHwc5+fncXNzExFRkI3VahWz2Syur69L1XPeY6TE6TD3\n",
+       "LaNBIDXsT/oHdwiujrlNRpXg74L8I3eupo4Mm6vldDJzy+9EVFeMMXbSUqZYmH5BSswcNMZhBDvr\n",
+       "HsaVv8ucMJ+et+l0WuNist8sF5R9MVrm8bP3nYJ1RsPvM9/Up2/ppw+BOP0H2gaH1adSmXuKg4Ke\n",
+       "MWdkxH6rPetdezYKuYZFRAWr+pjzdDqtOVksFEezqSHi75uv44Xi+0wQv08/yAXnnK8JqoaiSYdx\n",
+       "rBxnhGe3Wq2iGP0+w8ZsZHhAzAuGwmOgTz4tyPdGo1EMBoMiuIzfuWAElZQcY0FpzGazr4y335lh\n",
+       "XOaAvjpd6E1hUj7PzFwEGu8xNI8jxf175tPxPvM1SCXQX5cnmEwm5ToXZM0pouPj41K9G9gew0Xl\n",
+       "3IhqA6NsmQPWmtN/+5SbTzJlhwBlBx/CcsO7rODNyzGJ3ScoI6rLiVG4jJFgBeVsnsiPP/4Yg8Eg\n",
+       "Dg4O4qeffqrtQ4wavBSud4mo7iEkvdftdsta4Fizp+BM8b31el3qkrmEw2KxKDwQFKpTNexNHGTL\n",
+       "DYbCqXGXDeH3ebYPKTAnOOLmlcDjIWXGs0hduoQHqU0CHhP/aRj236I74HjhCPG+w8PDmM/n5YJp\n",
+       "OwStVium02ms1+u4uLionYayc4BMe1+iJ3BqkVn6gx7ACWNPkpY2l4fP0G3sU1Me0AV8p9/vl0Dx\n",
+       "06dPZf/haHGAgT3tZv1ASQS/x+PHaYUSEVHtNVNJkEWCcfZvpoGQpuMAQ05DZZ1Hc3radtHpKObX\n",
+       "up13klKz/s7pzkwo93wZZDAvzCnXiCicYM8ffeP/7HM79TzTZHPr74io6T2aaSmtVr1avKkX1PZz\n",
+       "ag/7jVwxBkAG9IjlEAcs98PtWRwpn8JyLhfuBpvfXIHtdhuz2eyr0194wiyE66kQBZjPY+8UD5p3\n",
+       "o8Q48mzF4U1DTp8J9zMbjUatcNc+j98kWhqb1E4FY2DD2TGIqAieRol4H0iNDbEVscm1/pu+mIRn\n",
+       "ATePANQhH1HO/Cs3b+j8MzshfE4EnSPkiChEcdZvPB4XYXd0heLwprGMmFt2dHQUl5eXpf8YGxoG\n",
+       "FufNXCnehTNhJGy9Xsd4PC6cFK8xRscbnHHC/WMNPW+sqb+Xlfs+hMAcFu5sMz+u1WqV8gzv3r0r\n",
+       "BQ3n83m8efMmRqPRVzwYnHWcJDun0+m0zMnp6Wmcnp6W+W6323F2dlYrAGlnIvNYTLiF94Wz4b1j\n",
+       "3eGCuTyDtTPi4sYzzTujzArzZMU6mUyKEV0ulzEYDGpGAQXOOqMjZrNZnJ+f1/a/+2KF7n2BU847\n",
+       "4Z9FVLWpbm9va/IQsXPczs7OYrlcxmg0in6/X9bJKHuzWV0v47ljbtCN/NyBlZEd3m3UOQd0OIWZ\n",
+       "f5iDDTiRrOtkMinXax0cVJfvbre7y6HNu2PeTHTGCTMyjqHH4SIQ9ljQHy78zNhAbjwGozmscZY1\n",
+       "nr1PZxJoZztjMr6dU+wrJ44dmKEPQEF9yIh5wgm5v78vzim614RyNx+gMDqGXKPjzKNFJoyk0bDr\n",
+       "BKoEhW6np6eFe+q5Qvf7PsKISkcTKOeA3air14gx/P8OkUIAXQzOpxqIThnMdDotkbEJ4jzDxjIj\n",
+       "KxCNjUDxN9+xoaTRH28svscC5VNUPoaaIU76aCE0ugN0aCFmfETBRi1o9/f3JXLkhEREdRs8pyLy\n",
+       "0VtHXRZ6GhsOpZg3yXq9rtWgYb7tvPAczztzYEK9YXcrWMZxdHRUijpmcreh2KOjo/jy5UttLdjg\n",
+       "RIQRUUMvQF981x7RyT7EjY2P7Jocyd+OTP1ODDOy7RQeP8uHG9brdYn4qOPC9wxn4/zbKWUfGRX0\n",
+       "3KHYSCdH7Gps3d/fx2QyiTdv3hRSckSUC4SROc8p+wQD7wurqUr/9PRULikFOb26uqqltI1yeX5B\n",
+       "MbMz6HnPSIdTe/tSnqzJPuMVUZH37bw6qs4pMwzcarUqRTE9RqJyggOex7w1Go1YLBa1u8Gc3vIa\n",
+       "YhScTkOGDw8PYzAYxOXlZQyHw1gsFjVndDAY1Iq+IudGVdin1jXMox1E61MjLXZemENSJH4na4HD\n",
+       "4aDNcz6fz0s1/YgoqZn1elfaJdM5kIl8QtZrBvpLAy3ySTHS/ybH08+ctnfQk4MX1shzwNo76LRO\n",
+       "zMG/aRL8n/2Sswa8D0fKFBXmw7YpoiKxozOQS2QKCgpyYEQMPZsDOmRjX4rut5xG5gFHDzn0HiBz\n",
+       "4EwNDRTKJ8g9pzTsgufWiFruS5ZLt2dxpHL0SGMRnCKIqC6hHAwGZUCuRE3OE0OEMFJp2sgAk+MU\n",
+       "GY6ZNyDRsbkdfGYUKC8GjhyolRUfAsz3Mupjz9kKmg1jQ0lf/LehdrhGhljzOJhzDHwWfiMlOdok\n",
+       "YvA62Vm0gucz5p+xu6w/x40zUkZ0gJPVarVqhoZNxdzQz+FwWFIscAqIvPv9fkH5Wq1W9Pv9MhtC\n",
+       "9csAACAASURBVPfj8ThGo1G5VgauUET9VCVpHRfUw3lpNBo1tIjvUtOIOXdqNxsiO6AogG63WzPQ\n",
+       "nst9yCLFP70eNIIOInMbttvb2+j3+3F4eBjX19e1S025WcDHxiMqnpkRGXPn6Av9N7IAsgkny4YG\n",
+       "pbxvfHZA8l4gzb/dbr+6mJi153lGSLzHnPrmd/luhvqREwzqarWqnYRkDY6Pj2vHrk9PT2sGLKJK\n",
+       "sdh4ZFTCOoM+Iac3NzexWCzi7du3cXJyEp8+fapF9uv1rrQHCIn1IIEhhtfp5Jx6+a3+sSbeNy4C\n",
+       "6jnNgSF7ww2DuFwuy5w6C5FT3/BgnPFwIJ2RL2SGOUUGMxJPYIIsZS4jc2QZYl54b9bfvpjaOtiB\n",
+       "KE6PP0OOWDOPlcY6ZIQsol5RPqfh0PsEBP6eHUP/DNTNzhXzho1AXrz2/l3LIv1gDbzvSWmDGNpR\n",
+       "Zt32lU1ANul7Djz5PnNtR9N7b197tjpSTIaPUOI547wYNvYGv7u7i8+fP0fEblK5EoAjxI5aEGJD\n",
+       "uHzPnrkFdTKZlPdtNjtODEaYRUBI891ILJ55DxFVJMMCYWwjohwl5nlGjpgbNkM2mnZ2ECDmjD4w\n",
+       "VnN2UKKsg9NGKDMbNKNWQNsWdsZv/lHuq//vjcia0R8r18ViUYo4np6elhIEEVWFcqITOGoRu036\n",
+       "6dOnaDabJXWVSbwoNztnIE1E/Mvlspb2g+fCMXWnNa2QQRDpa6vVKmkv0kom2rKGrLOLweFInp6e\n",
+       "lut3WAvkFjn0++gXCtxpXRAp1pO1ABWK2Bnkk5OTgjrhBJ2entaQWBpV9I+OjqLX6xWEhLFThXy5\n",
+       "XJaUAc/AMXdai3Ejj4bcQWDNZ8u8O5AxG3vmBLQHJyobLtI11guOaJEPv49CpHCUkJvJZFLumWQ8\n",
+       "fMY8YhS979ATBCx2Fk5OTspeIxBhDDhxh4eHcXl5Ga9evarxXxyYGDkkELWsECjY6PAuIw04EZSV\n",
+       "saNtZBuHynOKfDr1x2c8p9frxWg0KgjRaDQqe4mrgByIoivZG8gV/cQhcvCFEXbAuY+a4SrvEdVd\n",
+       "bOxd6wKPFZ1pZ2a9rooME2Qx3xnFsSFHDvdlU0yQx07ZnmRagtfQNZc2m00tLc5tFYzP+x9b43Qz\n",
+       "jWdZfnivnalMk4moHHkDHwYWWG90opHinPFgv/B31hf+/Qw6+B372kv5g5f20l7aS3tpL+2lvbS/\n",
+       "sz0LIhVRoTf2ToGAiUrwJDudTrmvCxgUb3E4HBZ4m9QADSg9R7K8L0cPJtxyegjvnb4AJeLtG+Ll\n",
+       "ma5ebk8ZZMnpvYiKqGfYme+Z3EckmcfoNF2upA4MTVTrKsJwvZjT3IjIHZU5ktvHsbA372ie+SL9\n",
+       "5TQkaVqQSJPB4XJMp9Ov0qytVqugUPATQJaIrrgvzpEu/fGamQtA/0CKHBXBZYO/gzwxB/TfRFDG\n",
+       "0el0ylp6/pxeZT0cQdNAF00AhfuWo0sTYA1le+zA1ZkAyrvOz8+j1WoV9ATCLKmr4+PjghAQTT8+\n",
+       "Psbl5WVst9vCu3r9+nXc3NzUIkhOCSLrIDzmV4BiGC0yx8Gpa+bM4wB5Y0w+qs+cIY+ZX4L+MeeS\n",
+       "SDYjiMhup9MpJ7PgNvLZ2dlZ7bQSn0G+59Jic9pIXbLfTD/g+piDg4OyPtxfyOXny+WyoIBG+tB7\n",
+       "yJPTmv7/er2uFevkfXALM5ePueP53hMglcy/v+f0EjqQNeVz0Fini9FfyKOv5yHVSRrazXxF9ilj\n",
+       "dDYgp5G9R9nnNL7jK7QYX5brfSn9XC4G+TLdxPNi7hConhGyiPq9kaZ8gEbxPc+p18LjQdfSV1A9\n",
+       "t30onNN4UEJymtH20FQQ1n+z2dRoHx4zupj3QeNYLBbl1B7rT6ke9DNzQV/Qp/TJ47Fc7mvP4kgh\n",
+       "XDnHTPO1KxERl5eXNeVmSI4FYELyUUg3Q3MIl6Fl+tBq7a5dIH3jflp49zlg/DyTVHkuStHQ6sPD\n",
+       "Q0lTkN7zpZQIN6kvK33Dj84V+7QfQmHnBW4RZDwLqlOShtw9DpzgbrdbOy3z+PhYq3HiOcjOgSsj\n",
+       "t9vtUmHc78KxWa93FapJu7LWlCdwOoy1oPYKc+sN5ZOjHp8rImNg2VxWMN1uNzabTVknDDlyZEct\n",
+       "ojpJZQVkp4d1xRC7BhPrinzSH3hT6/W6EGX5HStpxumUAo6lT7DRjo+Po9/vx3q9jru7u9p6NRqN\n",
+       "mM/npW6bHeXhcBjdbjdOT0/j48ePNY7BbDaLt2/fxmKxiKurqzJ2k6uRGada2PPZcDHHrBHGhblB\n",
+       "6ZNmyukkjD77g8b6oRc875Q8wfnabreldhE/e3x8jMlkUkuLcO8ka7BcLmsne5GFfLyalA8Oix2p\n",
+       "x8fHwp/EqSAgefPmTdzd3ZV9mnki6DAcGr/35OSkpged4uPAj51zp5StQ3LQgs7L3CrG7r9zehdZ\n",
+       "9R5HL8FF9clqKrfbuTHXyzowE5U9H6wp823ZcH9tQzglZtvAPOMY2DlCdnmO+5Fl1in9ZrNZLt7m\n",
+       "3XZQPE95/dk7dnwjqjJD6E1/bz6fF71p3ULLoERucETN5aPlAzbMW54P5BK9g33x7y6Xy7i7u6vt\n",
+       "txyI4RSi55kXvsPBFq+hSf/72rM4UkRk7pw3dzaKLDboko8i+tilnxNRGSgEMp/ow/HhcwutT1b5\n",
+       "tJCjyexgRFROIv3KUYSRMCsQrjIBgfE77DQ6V8x3UbDmSHmzmkeSCbdcG2PUjZNsFjK+B4mTwpjm\n",
+       "QjAfGGcLI3NHP3yShJ/zvHzM36dIjORERHE6KHZpx6Xf78f5+Xnc3t4WdJKx0wecRvME2LTInfP2\n",
+       "/IHvxPhwXDE2djD8TkfBzK8NFHwOHDQ7EE9PT4V7FVFFyVzr4popILy824VTGZORMPp3enoajUYj\n",
+       "JpNJQTUonkl/cN6enp4KOsbJvIuLixiNRjGZTMr7XQ6g0WhEt9utnThkDTNX0QGSOS+WH9bIDq9l\n",
+       "ln5STJN5pKFzMv/EPCbPv3lOllN0GSccvYcPDw9LTSe4Scz34+NjDSX2HvHJPMsf/cZpIyBiXBym\n",
+       "QP8YxfOpMiPVPJ/Cif1+v7Z3vT9wUO2E2gEBrTLXCZm37PE8DCm6LuvX7XZbC/iYbxwk86/y2kdU\n",
+       "qJf7b32Y+XGWB+tL/nZGgrVgvDhc5pfaeWTd/Cz+YLDdT5rl00gd+tL2xDXzmPMcvNEXo5XsRes8\n",
+       "vjeZTL7imjmgx1HKBU+t67D56IzM1fL3CP58ctCymJFsnLP7+/tasWTuDaXPHq/BG+ya0S0aeigj\n",
+       "cG7P4khl5nxEFe0b2kYxLJfLWhTrNM1sNqvBjo44HH2tVquSBomoCi9CfrUj4X6iII0eRFTk2Kz4\n",
+       "nTbxGOzx0kwIRpggnXoj5pMtdgaBN7OhMRLTau1OplF9mv6hwKjwahQK+B4HkOPqs9msEDyppmxj\n",
+       "YkXgzYZhMYnfhsYb3uNHQVjJG+2h72wOKz5QKCI3YOqzs7PyTKBek/Adydh44eTiwKA8IuqVh7vd\n",
+       "bjnubhnmO6B5bMzBYFAzJC7ah2FmnC5Y6Sj17OysKB/mwcqVOeFv5N3KkHGgoNg7duSRN2q4/O1v\n",
+       "fyvvf/XqVQyHwwL98z3WdDab1W4BoC/ed74zE0eGk4cumZERaeSK+Vqv18V5NCLLd3wowg44cmxn\n",
+       "ne+BAmVEgf4Q3FHj6Pz8vMjpbDar1eui4QDjJHst0FutVis6nU7NWWIMpPEGg0FBxwj+fKDAhzB8\n",
+       "0tSNgHGz2VXuN/kZmSf9yLr5WP1isahlDLLcgP7YUcnZAxfKZO2Y97u7u9rhJHQ6joWJ8RRB3Ww2\n",
+       "pfAsDUeSPZ77YDI573NAbuSZz2zUnU3hO+i+HODYKfL+xTm0Pst2kfmzvmbeeKbfwXPQi0aimBcj\n",
+       "kHYyQao43OOTvkaUmWcHqw5OszPCvBFkGHiwDUCGvE7YOFfnZ3+ia72+/r/T1/zNHzt2/1/bszlS\n",
+       "CJ0XignDAGSjjxK3sCJEKIEcgRkFMofCG4SJZVJt8A2vRlSX4PJsvwshsZKmsUAWanNvaAhb5gyg\n",
+       "EB2xue+kdeBJ0Afms9vt1hwOUlMooNPT02LkuJYFgzudTktF4Zubm5jNZsWwRdSjJUfQmXvCnLFu\n",
+       "VkSkTbbbbTkSzvfIkV9dXcVwOKxFszgkjIP32Jgb/aGhuFByKAM7ShiyvK6srRFO5oHSCKQ881F+\n",
+       "Ih87z61WqzitPi3GOKbTaTktd3JyUjuSTP+JmByJ2nnI6CiyRXMwYJTXaCwRW6/Xi8PDw/jxxx9L\n",
+       "Py8uLuLLly8xnU5LZGp+Dc8EeYRbhdHjRCRONeOz07her2scFAwpKQkj3OawOeqPqAzlvlOpdp69\n",
+       "Dp5DHC2nIXFAQZ9saFD0PBuuHGP0dSIeP+gXgUu73S6O4nw+L6dYr6+vYzAYlNSejQHOkCN9dMLx\n",
+       "8XEtDcX4SaNz/Qw/d7CZeVfsocViEYvFIrrdbi3wRR4dlPKZjet6va6lS32adj6fl8/u7u5isVjU\n",
+       "ys14j/N//tDYM9ng+3PrWJprGtlJ9Jx6DH5e5tPRsi7M+xE9470ZUe1t1sDoLA0aAXbNto3Peee+\n",
+       "kjPMH8+ECoHddHCPHWde/D50tJFABx/sTzvEPNOBisfPvgQBNBXFJyfRK+bbunZcDjyxs/Q5gyIZ\n",
+       "GXN71jpS/DuiXjMmok5IXK12VW1R8laoIEvm2xgVceVjIyYQ0djY+wSZln9uQ5sdAjsvhi1ZNH5m\n",
+       "Hg1j8sJmiNNEbvfNYz09PY0//OEPERHxzTfflFpAKACuhmDeyHmjJHDCLi4uCnrlO94idh4/5SbY\n",
+       "QEakiKAzIsV8OJ3qqIXfyfW3OE5NBGrCaUTUkJ19iog1cRQHomK5YA3NQ2EtfedSRP26BDcTGTFQ\n",
+       "dpascB1EGPrfbDa1NBTcMQyUAwX33xErfWFc/HwfAsr/M6q6Xld1jRwZomB++eWXmE6n8Y//+I8R\n",
+       "UdUuQhlxPD9ilxYAHXI6lebUjfe2jRafmScGoRvlbh3AOrGmTvs5LcdagWbY8cp7kTlpNBqlRpXn\n",
+       "nOfe3NzEdrstTsh0Oi10hG63G0dHR8V5coCUG7wUHMyzs7Ov6kH1+/3CCWL/sv+Qt4xms098PQ9j\n",
+       "JyjJFALzM9lXzElEFQifnJyUK1qcsmSuLYP0x/rAKf8ff/yxHFggoKCOFI6rHSnXrcpVqL0PMyLm\n",
+       "z2jej/QTR9loFg09jd7Z5xDk9zgr41Sbn5mRVPpMJsHBtZE82x2PFydhvV6XPeqK+Ov1rkbjzc1N\n",
+       "DS02Gpf1DSgi8+IsRZ53+ue/XbvJBzKs53P6nb2HTWF/8x70DfXreAbBtm0/fSGNaB+EZzr7sK+9\n",
+       "lD94aS/tpb20l/bSXtpL+zvbs15abC8TDzNHAfxtBAteQ0R1RNepQUeiJmI7kgKqzoRL3mEv39B/\n",
+       "jvqJ4mhOqWVOQ/aE88kuIitDoxEVXGluS0SUO6aOjo6i2+3GH/7wh3j79m1ERLx9+zZev34dFxcX\n",
+       "EbHjNs1ms9oN4qAfoAA+Xpr5NY4GOJmCB+/0EWkF0lB8xpyAZEEWzc8nSqYvPkoOkuESB+aYOIom\n",
+       "0oQUbzRusViU+QfRyjJHis/jZr5BedxPTovmS2f9b6ORLvEA/8L9cfTj9JGRFbiB5ld5HI5M3R+i\n",
+       "NaJV5Io5Zf5AB/M+fP/+fSyXy3j79m1J0a1Wq5jP53F4eFiuILHsk3o1GhQRBX0zkdVE9Ha7XfaR\n",
+       "9y/cIFASyOqG4ZEzUCsib9bWvBcaew+emtFM9i0pN0fQpG05en12dlaTN/YGiBbfm81mZZ1ms1lB\n",
+       "IPke6ZsvX74Urhj9BE0mmrZeZa4sE8gMc4IuMeeLeTd6zzPRdei2xWJR43WRwuGkKY15A7Fwf5A5\n",
+       "+DXtdrsga91uN3744Yd4//592WsgebPZrKR12FvIIqgRpUqc9gM1ps/eHyBilllkkrW27XF60NQF\n",
+       "/vAcf25kJu9xpxSdWja/lPVxWo7DJk67MlbQGo+Dz0A5mW+4T6A4vV4vPn78WOvPPp4Rssfesb3E\n",
+       "rpj7+luZg8xTBpXaR0WIqC5mNhUB1MlpXacS2fvmyTEfLn3A7zOfWZfm9iyOlCczOxrAooZHgTEh\n",
+       "8ObTOXmy/UzXDDGnATKdDbEn0NCtnbu8MBmyxanj/2x8uFUYMDZ6xNcXpnperCgMtfI7bLZXr14V\n",
+       "QnlElIrUHAuGw+ATC9PpNCaTSTEOFn4MkY1DRJQUDMIL/yWiSgv5FBpzikLYx0tBHphvX6HhtWm3\n",
+       "23F3d1fjlrHxTeZmzTBKrAnv5lQHp4tcD8qp2cxpILXs/iMXOECu++UUg1M4mYOB42QOEZ/jSNJP\n",
+       "jjvTH4/VhiFzwPibvttomODrlCrf8RpmR8GptsViUaqbO53GuwzB40S7wniv16uNgXlErqgLRF98\n",
+       "uXjm63kP+znMG2tFX/fxL9in2dGiwrv5gA8PD4Wzg9zB58GYEIBQ+Zx+ku4j/YcjtV7vyka8evWq\n",
+       "lP7wZcd2RglSIqq0rnlpNHN2rDcZOzqSdbV8M9/cvmB9xL7BiO2rsZRTJl4v7zs+f/PmTfzDP/xD\n",
+       "fPr0KX7++eeaY4Ne4m/vfV94i6OaCdUOALMD6n3itCe6n72fnS70pfUE62GOrNP25jRa1ngWcpaN\n",
+       "OHKNfeNQkZ/l9JqdZMZ9cHBQ+I4R1Z5Hh5+fn8dkMinyZhsRUbdRtpfWNU9PT6VMB7LH9xhDXgeP\n",
+       "Mac7mRvey/tyqtY0C+s20zasA9G76Gm/E93hwDi3Z3GkuEjRCorOo8gwDhGVo2HByPlgC5qVG5sP\n",
+       "z53fQ0idh7YC5/8Wdt6dOUCMA4XokwruC0oWAcpKDG+aSJoxwxHJRticqdvb27i7uytH1REIvG82\n",
+       "JnMIMoQR4lQMfeX0w2KxiPF4HNfX1xER5eJQO0GOchzx4QTwbzgNKLKMAkEOZK4iKqWL8nGNMRwV\n",
+       "jPF2u62dCkMpgo4Z1eT3MWw+1ZSNqBUUyFdWpqwN8mbHO/+NA2huD2uBrBoFYtMjd2xoHAHkBXSR\n",
+       "/iAH9DnzkuxgGJVgzjGKVryQuClyyjrd3t7WDjA4aGFuTERnPZfLZRwfHxeZAS2MqE4TEUBlDiH7\n",
+       "meKUliVH56BPNPPKspKOiHJpOL/Hszi5BFJnLshyuSxOLtwgDmiAAKFz7BCiP1jbjDqydw8ODsr9\n",
+       "jqwNe/zgYHekHI7QfD6vRfmZH4bh8ykvr5M5e/4MZIf1MoLQbDZrwaKdbOSKsWYHzOvi1ul04urq\n",
+       "Kl69ehWfP38uPDzWlz1EaQlzGJnv1WoVr1+/Lg5oNqz7goh9Db2GPgbd8BryPoJGf4YutjEngHIZ\n",
+       "D88Xjvd2W5WPoOGsWu4zX8/rh2wYgYd/SnPpHPYwzYTx/PsEJoAY3lfojPl8XkMC6WPODNkm8Md6\n",
+       "xO/2qWjmAd2JPFgX23ly4B4R5UCHA37z3DJCm9uzOFJAslmR2ZGKqBdwRMF6siOqUykIKAY7ImqK\n",
+       "DoXjUygoLjtQEdWE5z8R1f197rOdOgTVhoxnm6DthtF2VGWvPY+B5k10d3cXv/76a0GkECjSA0S0\n",
+       "PAsh5FTeeDwuCvf29rY4J5zYc2SCUiEStjJlg/NzNpuLdLrvfI/oiurkHjOONSfTjICB1jAe5pa6\n",
+       "UtvtNnq9XvR6veK4OAWcU2nZObYDYifWp/toRlptsDxGZMtwtCs+ZyTATjvPsXFjHKCKtIzQOl1s\n",
+       "xWL0xZ/xb6dFPAand5jvo6OjUmzSyHC32y0nPdfrdUwmk9phERwiyOpGajFOGBTWkEMmoIvcA+YT\n",
+       "SDZq9Js1zWlBIygEBxgv5n69XhckifG56jvvbTZ3aTp+5nsFswOCPsEJc3QNAmkSLPN2enpanEvI\n",
+       "8qzTarWK2WxWHGtkzvKMI2CEBCQCPbYvTY0MnJyc1MpR4Dz5cnA31iEj1TaYdrQiKoI5znu73a6l\n",
+       "kh8eHmqnDhkHc/b4+BgXFxcFlWbd0FM5E0F/9jlWBPaM3zQCUwsIFu3U4AxYv0fU64IR0LD2yKj/\n",
+       "uGwCiKzXJ6N7/J71EjaIvd9sNuPy8rLIooNYzw1pZWeETGsgzZyzK+gnp81+K6BDhrxODnqzTTBI\n",
+       "YL2Zi7b6XTwDxyjTL+wLeA35/Lfas6X2iIhyNEREnL3ziMrQ2KAhDBg2BCWiyrFuNpty8ozvkS4k\n",
+       "WmVj8Uw88OzZs6jeMK4ldH5+XsZhLxbPG0Vh1MFKOI+PPru+UebWsIE/f/5cvvfhw4fo9Xo1Lo5T\n",
+       "EaTfiL7H43ERlOl0WjbNcrmspREYF+vFc7wWFl7PN0JOn5364XcoxWDOAQaFSB7HAycQZMhoAI7U\n",
+       "ZrMplbqJynhOLnXAM73OHguKxbJqfhiN73kunKrOCKv5LVaYPNcOljc7xh5ZArXkfcwd77Aj2WzW\n",
+       "i97ug7+RDxu29XodnU4nnp6e4u7urqwFNbZms1ntOif6zJH42WxWLpyNiJJudo03o0qkBbbbqthl\n",
+       "xM45cd0uZNsOsHkXeX1sWCiSyfdQ1Ow15sPFOBeLRY0DaQUNkgsKQloSFM9OJmm/09PTcprPaBpc\n",
+       "Qp7F9/j9VqsVs9msoDK8jz6x1zIagM4kOrdsGbFzs0PtcgARVYV65txyFFGllDCkpk3we/yM/4/H\n",
+       "4xgOhzEejwtPkjVHz+/jaq5Wq1gul9Hv9+Pbb7+NXq9XmxsbZ88XDogDnoxy2WDn1BJ2IZfIARWx\n",
+       "/vX7jCx7LeyoYm+YHzvCOSWGg2bUzGthRNU6CocfJ5QK/bnlWlA8i2yL9RA6HwfMto2+OP1Og4u6\n",
+       "b76ZD/4YbGCsyKKBDuYNnWlk1HzBzWZT6iVGVMj4PvS69Pc3P/k/bEymNzjohr1Q13rCkWAy+QyF\n",
+       "yQS4mCHCizFwuhBPlujJi4SBtEIz5MjGQeAsrL1erxRH3CeEjNFRgo2yo7WIeg2WvLkRCMZ5f38f\n",
+       "P/30U+lLTgu9evWqjJd0l9NYNCJ0ogzDyk7d0V8LGvO8b4z8nZUshgXFYRTEBGFD0vTl6ekplstl\n",
+       "Oca7z7BRbweZcS2nzWZTyMD8vg1QNsI5HWBo3NwQK0Q+I4XH/NPs/Dv9yzgyguE0If1DBjL/wpwq\n",
+       "r2FO69B8LYn7xJgwRBi2nMZCpiynnz59Kr93fX1djFzEbq/ZIeFqk4hdhW76MZ/Pa1XWIf7jZJPi\n",
+       "5Tkc03bh3Jz2xOBmNI/5zqjqer07Mk4aLyOOrBXf974g9eGAjM+8DkazQdvRlQ48mX/SKqTzaK1W\n",
+       "qzhuRrLMi+MdONSgqBhhGz2cWWTOSD7vc9rLzYEl85gDFjv1ds5ms1mRj9lsVvYwP+OP9yEBxOvX\n",
+       "r6PX69U4kBh97EROw7OP8rpwbN5IjsdHP7J9MocUXZXBA6NA1sME3kavWSeXSOFZdnT9HT/T6Sr0\n",
+       "gwvfdjqdstaAHTxzMpnUHB4jZNbpdl7QTfxxOhLZ9p+cyeFZDj6wldb1zCllfdB/OKm8L4MmzqZg\n",
+       "f0A5vU5G7fe1l/IHL+2lvbSX9tJe2kt7aX9nexZECh4PKauI+nUAREQmFhJRkgrAO+Q0gSMiPHVz\n",
+       "HfZFguZROJIy6gEilGFtPNSMVsBN4DnO+RKRZoSAZs5P9vrzBaaMgWf7RE1ERX5l/PAXvvvuu9JX\n",
+       "OGOgM/AP8Pg5CeIoGUTCpEXe6agSNIfv5cq7hsmZe9ICvJv3Ad+SAjL5m2gb2aEvpJJAKUDgeCbz\n",
+       "vF6vy/U3EbuojHcSkTtqcYqk3W7HYDAofWm32yVyBWkx3wUOivk3ERWXjz+MhzV2FOl5A0HwPDvN\n",
+       "CroFquj8v7lnTjnkfZQROU6DzWaz6Pf7Ze6Wy2V88803ZQ06nU45vLBareLk5CSGw2FBDowac8ko\n",
+       "qS1QUw6lgEY4KmUeOEGH7JgjR9TtPW6Z4gLsfegwiMt8Pv8KjSZq9VrQWEuXMXAKgetjzC/h2fCS\n",
+       "HM1nsiyNiu6gwp1Op/AYjWRAZjafJyIKSo/cek2MOBuRMPJJoV5H7ehN5HFf+s78JMZtVHSxWJSD\n",
+       "LT/++GPc3NyU+SRrQV9ZcxAIp8EuLy/j1atXhVuVdQbr+FsUkswZBJ0BlfIBBqOdNMbkTAPvxj6h\n",
+       "z9AlOWUGwpznk3XEZoKe5DQga+uMilFs0Ni8xhEV9QN96u9jO7K+dzqT+QC55XtGeninm+0e46a/\n",
+       "RobJotAPI1KgrKSC6QtpevaH9Rr6Mp8gZ+ym4uxrz+JIuQ5Q6YhOugGl0XELXq/Xi06nU5TUwcHu\n",
+       "1nmcLhsMjGur1YrJZFKu2fD7fA0Dgsh34AH4NJRz0iZsMx6qOJMW8+kNp2S8kCbPmSzHZwhuhhaZ\n",
+       "KwQtp30Wi0XtuorxeFyc2IuLizLfOBZWUu43HBTGb8VmZzFzg5zeYf2c1vX7nBO3s8hzXQfFBFcT\n",
+       "IDl9yLtcSTqiqnzd7XbLFRhOtUbUT6Iwl3bqbFRNqiUVyrgg1zudhlzCAcMJ4fd8EsUwOfNhBUEf\n",
+       "zPewMnNqIh9ggPjtk4xuOCSso8fQaDSKg+6ThxcXF7Fe747rX15elis8InYpuvV6d/VHfmbEzglD\n",
+       "8Tvd//j4WNtjNsBHR0cxHA7LaVyUK86ygxKUJ59tt9uSJsIZcaAFiZnx2rGBU2jj6DWMqBxu3ucT\n",
+       "UyhxE4XZKwR8JrV6b5h7Yl7hPgcIvg7cOT5rt9vleXyGw+u6e8ynU3AYLIy/Tx9SJds63XNjMnUO\n",
+       "SPiMNNft7W1ERAyHw/jy5Uvc3NyUfjCn7Pmjo6Na6i5it7+4VDyiCjhYi/l8Xpxrp0tJW7EGDlCc\n",
+       "XkIPZHqCA+4cQNPsrFh3M347vDzP9AzWnL3CuzL/Kn83Uywi6ulk+or8kDbn9+ERIgfIkcdIn1ar\n",
+       "VbEzzLlTxt4n9I858DOzXaEhR6QJj46Oio63Q+p38n8feMjyjZwyd56ffUGT27MV5KRjTKr5P0ag\n",
+       "InYL2+l0Sr7bXKfLy8tyuoPfN2vfJ198XJJNDwKQvdNc98mKD6HNRhjF741KM/pGiQN7cuaZAgAA\n",
+       "IABJREFU+0Zw9kVJzjPn5/K3NzInVY6Pj2MymZRo1YRrol0QJ96FAOLxm9th7sS+/ngtnU9H4aMQ\n",
+       "LJR2HHL0YWQwol4HhfcTkRodQ2HacLCGg8GgnEZEDs0twunx/XcR1Y3z3mjmchHxgK40m80y38yd\n",
+       "x+IoHfn0VSl8xkbGONuRYo585RGySO01F+20XIFYeZ38b6M/rCHPxvFzNHt9fR3v3r2Lx8fHuL29\n",
+       "LcVgW61WQUDY10bH7u7uiqNkZ4kSHEblGN9oNIqnp921RhDfOegREQVtshHhu1wwjYEFYaGvrAEH\n",
+       "IzK6AKfOhHcrYpBzrzt7DfK8uUrIE9w7O0kYKYI9notT52icv3GGMhpPX+gPjm5GG/hjRMrIBnw/\n",
+       "rslBVlqtVgkiHXCiC3imZcpZAAj8vvOz0WjEdDqNu7u7mkOCTNsYw7vjeivGsVqtiv4bjUbFwfYf\n",
+       "+uRg0HPD5+bLZITCqG7mq+F4O1DgWXZK7Chnuc3vyjwkyw0OBXvA/DT+oL/4HgVzWU/khDmNqLiJ\n",
+       "+4qJmvzOGLmmbLFY1AKiiCoww75YLzCH7Ll9iHJExYXMTibv8il7I1roGfsf9IF3+5nes/vas57a\n",
+       "c2TiiMUQfkRFGPalnBZwECMMGMIAYuLFyKkFnuf3sThGmvievXwrWP5er9dF0WZ0jN/B83ZxTIwN\n",
+       "EauFhu9ltAIHaF/EgYM0GAyKU+XSEHd3d+VyYMPWfldEfKUAbSTw8C2Mnh+iTfrKJsMpcIrT76Zk\n",
+       "A435oB/8LlGjjYn7iUywvswbztfp6Wl5Hv10VVzkyugQlznTB9aQlKChfJNPeY+jX69tji5pJmIy\n",
+       "D5yuQdmRxvUYGQsKgNRvfq5TmJZvO+VGDj0OR3+j0SgGg0EcHx/Hzc1NzUA9PDzU0micqEGGbm9v\n",
+       "o91ux9XVVQ3lIaXHOhwdHdUKnOIocmLNaTCQYaOG/i6OIM4xDSSTvkXU7wPNhOF82ADlbQXOAQoc\n",
+       "YgJC5pTvshd5dqfTKbKBYbChcRBH2pi1NaqELuAzBx9eZ+ZovV6X6tbIBQEuCADpdx9rd0rMe4Q1\n",
+       "xcBaRjFcm80mPn78GB8/fqyhIOx75sDGjGet1+vodrvFPmADWK/5fF4cKcopoDN8uTT6hSAb+WDt\n",
+       "s/PiMaAnmYeMzGU7wJxiA52moy/ekwYd6BcoEXonIz127GxPCKLYB3bY2C/0w4R21w9DtzLfRhit\n",
+       "h5BBkN+cRuf9GXVjL6NLeRbvcDbFjib7hXkxoMHPQdWto+zcGeVHzvZlhNyetSBnxG974M1ms3bM\n",
+       "nQ2XYT7gaRaVCDyiqprcbDYLKmWDzXuJtuwQdDqdGl/Eih+h9QZ0P1Fe3hg+RbRYLGppODsVhhv5\n",
+       "LKLaDPboOZHE9xCSiKpOBv188+ZNOaXEGszn84K6tNvV1QwHBwelrhSCbmTICs0Kk4gUAbXwZcVj\n",
+       "ZYPQ2oB6LegDa4/xyjLhuVsul19d6WIl2G63yyWwpM0Yw2ZTpVet+PicyMUKkrlmI8/n8xgOhzVj\n",
+       "DLJgY4hMoTBAshg/6Rbk2gqaueNZv2XIcKic3jGaiNPPOOgnishjZD+gcBhfp9MptY2QVd53c3NT\n",
+       "ngNSxjPH43GsVqt49epVbY6RUQwgR9iNHOH8b7fbePXqVRwcHJR0Kf1yqQmvP0EVDpX5JaxFs9ks\n",
+       "6HLELq0E5+L09LRWK4o5Zy/ZOQaFAgFEL7EW/swpG+YQx4a54Hs4GMiU0xvZWPoUlNfRkT7yh6xY\n",
+       "nzCudrtdUnsOWtl77B+nshkLv2ddT2s0GjEej+N//ud/alyv4XBYgg4bffYf6TmuguJd6Ajq5+Ec\n",
+       "45h5r7oiPDo2IxDoc+su17Nj7tA1diKRUebF68ReyWkjO5vZgDvt7sDSmRh/17bKTibyz/epEcj+\n",
+       "9HNwgAAYXE/KJ40dWNAX7MA+gMIOIHuDZrvgvhCws17WQ+wVZ7XyiXyekZE1z31GInOQmtuzpfZA\n",
+       "V+x1I8Sk2kyMJL0HidQGGqV/dHQUJycnNSKnc8URX9dsMgRKY6FwpvZVGiY9kBf58fGxcDYspDzT\n",
+       "KZYcmTkKsKKxIsybG+VFxIZgoExxJNjs5onglCBUvB+j9eXLl6JsvGmazWZBFWz0czRmw29HcTab\n",
+       "FePCmHxlT85He/1cBRujZ2XC3/BxVquqSByfHRwcFFTn5OSk5iw4PWPlx/f53AY3okpDrdc7ntB4\n",
+       "PK5ddWP0Edkz0Zh1hLPB99jscNI838wVCF+73S6KjHQfht/H3B00MAZHiY707IBlHgHoC880tG/+\n",
+       "Fc+hRIERzul0Gufn59FqtWI+n9f4eNQwOzzc3QmWUWoU5uXlZUEyaexpAq3tdlvmO9d/MlLdaDQK\n",
+       "0mKibESUK0dwQFwviL3Ivttut7XinZ1OJ2azWTG2fMbeZOzHx8fl0Md8Pi+6pN1ux8XFRY0OYAR4\n",
+       "X4rKQYYdKe99O7zdbrdwi9CjdrIODg7KtSy80/okI2o0B0n79jd9PT8/j81mEz/88ENERFkDHA1n\n",
+       "Djy/V1dXtVQqKPVsNovJZFI7MICxNippZ5B5zKi5ETfmz0FMDqzcsjNqZ5AACf2HnsXBdSCZOTv0\n",
+       "OZdMQQcb9bYNw+7hMBnpQS/jlDrAZD/wXdpsNis8O+xNTouZU+h9jKPDM22nT09PS61Hk8at90hj\n",
+       "0+xcOpNB35AlAg/bWfY032cMDhB+q72UP3hpL+2lvbSX9tJe2kv7O9uzIFJ4fYY5ncckteDoa7lc\n",
+       "xsePH+N3v/td4SFF1AskEj3zTLxhIganAEnvEB35Ql/4FkRX9Ie+m+Nl5MynWohmTIzm/4bhI+op\n",
+       "wdVqVSumlnPgPqFANAliMZ/Py7xA+uQZRH1454Y0STkalQAuJ32R89P8ntN+pD2JZI0G4dEzp0Sw\n",
+       "EfW0QYb+Gbu5YE5tIkv39/c1/oWfDZJjFILn7LsihnUAgTKXy2k4TnDxPdaXIndOERoFog/MI/07\n",
+       "ODgo62ekw1GnUz+kYUCDvL6UKCAd5aPjcAUcsTqSZm9mNJF3wDHw+Ej5cBKOasZ8HxknCvbVMp1O\n",
+       "J25ubgqnyVd9kCbNp+Q8P5BZ1+t1QUjMU2TPMR7k5uzsrJYG8vg5AZq5TkTDeZ1Mokb2+d5gMCjr\n",
+       "a54X3wPJIso2Wsh63d/fR7/fL3uf/XpyclLG57W0jBuRcoqMvWuS+uHhYSkbYmTJZT32Veo+ODgo\n",
+       "l0475U4DzUF+8v1/8/k8Wq1WvHv3Lv76179GRMRf/vKXMldGQJBFOLFOqyOLyE3mwvD7ZD7W66r8\n",
+       "CYdzzImkWX9xwnkftYODIs5EGJHKtAh+x2lFv9Of79PZzLXRHNaI5zklStkM0LDMn2JtSFVmCgxp\n",
+       "aCO8EVFOiIO6+io20HdTdLyGTm2a9oCu4BAOaX2nZVkrj93FYp2iZbzOhNh2weFifLaHeW1ye7bU\n",
+       "Hik54Nl9REQb7O12d+/br7/++pXy8xUPNg4IHgbAKSIUD5PLwkVUqSSMFD/j7+zE0Xy6ACeC76EM\n",
+       "UZqZm4DSwdmzEnLe3kqRNBdCDs8gYkcmRzHgVGUuFpsJ5Y+goqAMWTOOXCU455+Bh7mbyRuD57Va\n",
+       "rej1erWNQVqS8bBOTi/kzeO0RualoEibzV1tL6fSSGUwt05HopQx3CaqIh/MiU+Bkj5CcXv96Ctz\n",
+       "h7IyRw5lgpNi54W5gOjs+SYNmZ1znL7pdFqD11lDxm5Z5x18hlNppx4Zx4jb2WIdUNQmd/NeHCx+\n",
+       "9ubNm0KWJ8XnvQ38ztqavI9D6Jo53kco88yjIDXL/uWEntcWJ86pD9/7x/zntUDmHx4eSvqYtI2v\n",
+       "ZWJNcXKdmnFwieyuVtX1OxFRHCjz++zwO3hysMNn5qv4vayRT3wxZ5YB7xnkm1Srj+7z3cxz9BH1\n",
+       "L1++xPX1dTm9eX5+HhG7E1+sz2QyKfIYEV+l9eywsP7I2eHhYSFR41ijD1wtnc+tA/I1Sk7xmU5g\n",
+       "59QBNLLDuNnfrCFEbZP2mWfsGjrOOop3npycFF1rh81XOzHmiPpVPXkd0dHor/v7+6/2EH30wYej\n",
+       "o6M4Ozsr9eDMceX3Scs6aMXxJI0KVYZ+5sDJV9hkPeDmOUMnWQ5x6H0LiteH30Un4hhbTnJ7FkeK\n",
+       "/LvvEuLOK5P17IRERHGmvKFQ6iwWiE7E/gt/zYWBmGYCZkR1CtAnmuyouViZERqe6xolfObTNe6D\n",
+       "x0Cfs3PG99hAvvA1olp0bxgULwgJCBNzavRsNBoVZU8fbDC8Kc2DwmgYQeDdOGlshG63W5xnjKIj\n",
+       "XjZqPo7P/DpHzffsYBgl4Xsce2Z9fS0GRGUXFuVZ8OPYvBQInE6nxXnAYNj5NCKFY44iOjk5KQqa\n",
+       "KMyGLyKKExJRlalA1n1C08TKrHhsFJkDHKqM5vD7R0dHNZlDEeVIjDXDaTNP5OnpqTZG9hbvYY1w\n",
+       "cJkX+sZccRiDhpJlfRj7fD6Ps7OzYtTyiS7ewZ7YbKoSJr6yw46X55H18T133PdoZI/G+JCzw8PD\n",
+       "WgFYz5NPipn/0m63Y7lc1vQD1zUR0TM3Rt7Mn2PO7FQ4Kke3Mp8Y84j6lVOg6uarwfkDseZd/M1Y\n",
+       "WHcHbehR8wojojg50+k0/vznPxf0IWIXuPT7/RgOh2VvWxbfvHkT/X6/hi6zlvP5vNiTfGKVPqGn\n",
+       "zE3FHvCujNagoxzQ8Szz0RxgmnNjXqFrwjkI9Lp5Tfy9x8fHghzybJ+cs23KNsrBF9kefk5RWOvi\n",
+       "iKqgMDaIE7MRVSYCGwu3EZnieaBk5pbRjJ4xBmqcAUBkJMu8RjugRvzQD36HMw40Aj+cqX08vyxD\n",
+       "tbX6zU/+D9tgMIinp6cC2UdUG9E1nfZ5/Cb70fgMJZWNMIuybzHwkBGuiChRPBOfC74RJWcSGu/y\n",
+       "ouSIzgrUhhSY1saKMZjg7MjAkRb9otK2U4pcagp5LyLKEeyHh4eYTqc1UjFwP0egQZkidoam0+nE\n",
+       "4eFhSVOwaVzPC8PoU4Kkl3AADDc7mjXUi3PCGtqpIxXBnBjJsuIl8rFjiDE7OjqKbrdbDBOGm8j8\n",
+       "9PQ03r59GxG7i6A/fvwY0+m0GFynEp26cLoyokKzDLX71I/RKUefRNXIi+UwHyW2I2Vj5tQn7wCZ\n",
+       "QMaNMtIn0q92sDEoHLawEeZ7rFFWRkSBEZUjAEG52+1+lfLFqOFEWg5pmVhqMjLvY208V5BUiWjt\n",
+       "UPF/jGx2vCy7GR0FcTWSgfE4PT0t83Z5eVm+z95zmg9ZZJ2NqCIXXHjcau1qX/n+RAwvz3IaGUOf\n",
+       "5cnpG1LD3k927iOqUiGsO303OuPPLCfMN87J0dFRPDw8xIcPHwoCTDDm0i085+rqKi4uLgrq4EAY\n",
+       "fcfzbStYx31IBuvtAxg52GF+fa+na6Txe9bftO22XqqFuTZy58AZvQBC5LX03X9ZNph/9lpGB0Eo\n",
+       "6Rvrip05ODgodRstH3bkcrYFHbJYLOLk5KScAMdeYEc8DkAH9oRJ9F5/DgyAQiKLOH527AFP0AXY\n",
+       "eM8N+9GOm9OOzhCwFpaFfe3ZECk8SqfJUCZGnPjMnAh+FlEvkmZBjqgfjweWd6TAd4wiRVTHOfnD\n",
+       "aSO/1wowIwTeHHzm3Kx5LhH1iMX5/Ih6Ne2np6dSE4pmRbPZbAr0z+Lf3d0VxTidTmunQhyxcxop\n",
+       "IkpF8PPz8+h0OtHtdsupjHZ7VyZhMBjEwcFB3N7elv644vV8Po9Op1PWdz6fx/n5+VenLGi8H4Qo\n",
+       "p1RRfrkuiIv/2ZFgXTKfLSIKqoDSd8oXtKrb7ZYNykW5r1+/jm+//TZ++OGH+Omnn2qOMgoB5QhU\n",
+       "bcfEacucFvIm9rhZWzhARnEjvr740w4hcgDfh+85TZxlzkoj86BwdLiWxcqbz1DKRrlIkbCP+/1+\n",
+       "kZmnp6fodrsxGAyKMbXDg6PbbrdraZiLi4vYbDYlGON5KFu4Vqy1nR8cVT7DeWbe7HSDgrNOpB55\n",
+       "hmUZeTDyGxEF1To+Po7RaBTdbrcYmi9fvhTejiuFe16pbeV1Ql8S9IBO0A9+x+P1M5E50yIcyeNw\n",
+       "8SxQepDNRqNRDCTzZLTBXBh0jNEr1mk8Hsd0Oi2lQqbTaaEnYNhJi67X66Lfrq6uyryxt5GN0WhU\n",
+       "6uRhMJ3aJHXO3rdtITjBbiDDGFZslDlLPN+oDA0nHtk3hSSvVXbQWbd8GtIBFPbPzqplmsLLDqyw\n",
+       "P9hEZzEYG+vFvFGyCCcJPcH6bja7WwXOzs6KXeEz9Ln3GfOGDMKvIigHEGk2m4Ub6MCCtSM16ufy\n",
+       "bJ5vm8+c2yeIqCqiQ4Oxw8sa5L3p9iyOlBWz0zM4Ihg+p/Qi6l5+9rAj6gXaIqLGVzEUyLPYOEDW\n",
+       "5oegLDFMjjxBxPyHts+o80xvTBtvzwnHPf08+spYzWchEjUxMGJnSF6/fl1+h42BoC6Xy1J1FofD\n",
+       "qS+M/Wq1uyft+++/L+/E0YjYOcWOhDEkpAIMm/NdhNbKHUXovDzzwsZHRowCOS3CumUZIRLMUDyf\n",
+       "+2gt88RVRL1erxi9VqsVg8Egvvvuu/jTn/4U//mf/xnD4bCsL0bW8DTrStSKI2VEEtTJt5bbgLHu\n",
+       "KAejjTiEzIH5T0boLBvmXvBzFBGRPc+m0Cj9BOEBceT5RpNBHu3wub6YDUGn0ynOOd/xFVI4y6vV\n",
+       "KhaLRRkf/BajypPJpOwd7u1j/3ivYWAc/duxg6SMM4GBtrJGvrLj4CryyBRR+XQ6jc1mE+fn5zU0\n",
+       "g8Kbs9msoL3MKVE58m/n1U4VBomfo18w7Nad6CbW2PrV6Y8czDp1OZvN4vT0tFbAMafrbaQyOokj\n",
+       "9de//jVubm7i5uYmvnz5EqPRqHbHqp2gi4uL4khh6OCKUeogoqp677XyGK1PPI/MicuheI6dqiZg\n",
+       "jqicTPa7qRrIBuiJUTxSdqyJMy12kIx2Mh70DX/b3uRg3ugRQQLvpaRBRNTmbD6fF3pKRJRUHraN\n",
+       "qvo8v9lsxmAwKOUznCpnXo0wM6fISa/Xi6enpxKwj8fjGI/H5Wo3gxlXV1clqwV5nn6iW0Cb7ACZ\n",
+       "e+xMB82p1Jze+9+cqIiX8gcv7aW9tJf20l7aS3tpf3d7FkSK9J0JmeTA8c5NArTnmKNLokJH8TRg\n",
+       "PaNbjsyI7vHQDXHzBxItzeRW0kw5dwp8nI8QO5WYYU6e7Zw4P8PzNnxLP5fLZZycnESv16shK0RA\n",
+       "g8GgHAflZFnELjIZj8clmjHScX9/X+654udEwhwZBynhpA9z6xTtdDqtpW45jt/r9Uqaj3nh+Zy0\n",
+       "c7TCWuS1N3cORCYjYEbzvE5E0ETRPBeiJLwdrpKJiFIcsd/vl2j8v//7vyNix58immV9OZEVESXd\n",
+       "Q9RDusMyBVpocmxOJxithCdAhEqqw+MzGZ1m+crROXsPJMipa6LXfr9fSyVFVAVJfQqJMZCaY67N\n",
+       "2QBhcArHiCtzBtrE73uujci47IW5aKR5eC4IIRGs0UHGn1PJlq+cnvfedVorouJUNhqNODs7K6kM\n",
+       "5DQiSuTtS7aREVLGRqMiKg6JTy7yPpel8PicyrJei6hO6zIvvjnBd+ixVk6BHh0dlfQWaAoNdGBf\n",
+       "qnG5XMYvv/wSNzc3MR6PayejWAf2InLE+9DPy+UyptNp3NzcRESUA0kej/Ui68ffToM7HeR+ooc8\n",
+       "NsuCT95ZT8GnMvLrkiiZo+jsijl/tm3m8NAnZ2accqMotVE+xpdTwNjZ+/v7ImvoPqPi2CKfggUh\n",
+       "uru7q6FORgKZX5p1HPab752fn8doNIrb29u4vb2tpei2223JhDAe0wGcsTCdJcuBqRnOLpGVYH5A\n",
+       "0X0gJ7dncaQMX9q4sehZUIEUUXImgtnostA53cBGtlGwYPq294jKkQKKNMTpMaBkrbzNE7Bid5/J\n",
+       "PTtdyN84BZ4Xb0znyk9PT2O9XpfqzxjciArGxKHKRFE7hAiQlcuXL1/i1atXtes8IqLwWRBscwVI\n",
+       "tZ2cnBRY1afafK0A9YNYdxOUzZNptapq3/w7H6unL05t9Xq9kkqh9pYNtMefa9Q8Pj7Ghw8fipJi\n",
+       "cwMh4/Sfn5/HH//4xzJnP//8czHA9M1ke1KakJF9Qo/vLBaLWK1WNeeN9edIvlMSXjPzXZgXnIFM\n",
+       "nrSxxfGPqO6OI3VLfRi+d3V1VQ5nWGbg6TC/BwcHZQw+4YWh9TVOELwhKzsdyd43p493MC/T6TS6\n",
+       "3W7hbiFfOFOcfDMH0EECZG3Gj9NOanofcZh58/gxNjg4yDckXMZ/c3PzVdrMxHGnIni2dQfvpuU0\n",
+       "hJ0Dp4b4zKevTLrm/zjlJlR7jUipuT/oGTsFTjUhY+bLsP7w37jSibWg7+fn57XDCBFVSQl4NfP5\n",
+       "vAQivnEiOxnI0j5iOGuIo5ADV5PQ85wyfqetmQOCOQIJ82ztrDm4MoeL5+f+8gxsm/mVPjDhsbLe\n",
+       "Xnf+9tyzf7PdIUC0TUQv4dS7Vhpj9z5yStDcJI9ru91Gr9crJzdxrCKinMhsNpvl5C6O4mKxKAEs\n",
+       "NoUDWOgA9JffBzjAHwfX2JBcb87t2RApIi07NgxwH3HRvBOUcUTl9CCEv3UaA+OVo90cqfAZKA7H\n",
+       "5BGMXq9Xi/JyVIp3jBfrz/OxUnvYmd+VlTdzY0/ZxMDJZFK7RgOHk/eiyFyqwIiaBX29Xsd4PI5P\n",
+       "nz6VInteJyKd4XBYnFA+I6ImmspFGVFuNiSNxq4WDgqMUxyM24RFbzhQCis8RxEoAtbdPAHeAVJH\n",
+       "Y21Ho1F5rj/v9/tlXMhQxA6pe3h4iOFwWE4uOorCECHXJp0iWyjxZrNZfoZzimPsvWBFzJz6KDN/\n",
+       "81k+vWIjQWOfwIcyIjsYDGqkbDtnzCVzYkeRnyP3j4+PBVmyobGjxztwrEHIMm+Sz7iXLyt371Hm\n",
+       "lNIIrIk5HeYNsXZGJRy5Zh4MsoS88TmBgU/yOlCYTqe1k8Pwh3CiGWNGpPJdmzwTh5r5Mr+H3+Xd\n",
+       "x8fHNZTT+xK0h59D0sYJ84lhAhk7GEY52eM4P+adcfqY+TWKPRgMCprrZ7NX0NVwelgL+owM2JEw\n",
+       "Nw7Zo4GoIgdGI1lHZDyjSOwH633PtYM5non+Bi3JzyIzs4+jg244OzurodXsNetgB9jMPX8bPYXD\n",
+       "yqEmH5Biv+D8eE5Bxwjmrb9AW3HQ2fsg271erwYmMG/sFUrRoC/JIFm3ucYfVwKhvwjwWN+M+vFe\n",
+       "uF6MzXNuXb6vPYsjhaA5ZcciUYsmoo44+Hi5UShHQHzPzsvBwa7iro9UR1RwIv82pOyjtrlmEwoa\n",
+       "RYkDwHvZtPzbDtH9/X0h7x4dHRWFmcmduQaJj8lamZg0mNEhb2afhLNTsNlsahdX+oRZRMTPP/8c\n",
+       "h4eH8bvf/a4QOc/OzmKxWJRNul6va0eNqevCOjraYa0cDUdE2Ujr9boUdWPecCqog+Xostvtxng8\n",
+       "LmlG+mC5oFKzDRupM5OYMVKGrUejUfk8YpcyePfuXTl16jlrt9txfn5eNud4PK45B5vNphRbpZ9O\n",
+       "b1HzKh8tRqFzIMCKwAaCuWK+UfoYbTvpGE8Uh1E3DAunffr9fq0vjup86pZnsDfsuCJ7jMPGiwAK\n",
+       "4w9hH7ll7/Ne3ud0wOHhYQ3Cj9idAmWd7UBH7BBAB17IAb/D2tLPjEowDqcw0GcuIEozCsEYjGI3\n",
+       "Go0SgNhxPzk5KU4suggdh7OGgjdiAZkeo8d88Rnr2263a0VVI6qsAOiokWwcL6ejnBnImQajpfTj\n",
+       "4eEhrq+v48OHDxGxu9B6u92WuwhJ4zFG3/lnfYIzZtQw2wEMO3/TN+8fp7iQJ88Hn7laNpkNIxY8\n",
+       "L1M97HDxu5nEDJJnpNbUEz+LuXbakv3mQxrsa+qEeR1Jw6JnrBetz/LJNf4mEEQ2jfjzPAchLhzt\n",
+       "vUFAy7Ndm8oEe4IyZw6MdkbUbT52EefeqXJsBX2xrCAbIJ9e1wya5PYsjhTXN1i5w6lwusLKDUcL\n",
+       "A+S6ETgsmYXvWin27mn8m1M7OdpFkFHk7hMbzt68I4yIqI2PvrLRDg4OSkRHCmq73Zajzl5Ep1Ai\n",
+       "6mgDEfBms4nxeFw7Een0m3PqfM4fjAf95mTJ09NTfPr0qbbByIHj4JjvwfyyoTi6zjiMlqCw3U5O\n",
+       "TuLi4qLmnD09VSf92ARG8nAOObWFXBi+Pjg4iMFgUFN8zM8+B9Och+FwWIz3x48f429/+1tcXV2V\n",
+       "vlhm2u12dLvdchLUChwFtNlsas53RJRSCzhChtSdTsBo2CjyO0a8aHbOLft2uLMRQsmcnJwUR8bO\n",
+       "MO+9v78vR8xZu/F4XL7PdS80X9KbT3AZ3bLRZb5Q0vP5vMgQSDFjmM/ntdQEjj9Ij1M/8PDy3EVU\n",
+       "aVY7oMw3eySnaDwOR7s23kTQcBORfRxQAh87505pwm9kj+IAohvNAWPeQJBwcJFD5pqUGuPgmDnO\n",
+       "XaPRqBkvn9g0AoBsZP2ZeTKbzSYmk0nc3NzE+/fvI2LHLZxOp+WkpGvj5bSQUQF+H7qAg1bQGfYf\n",
+       "e5z5zs5OPkXOnDnNCvqDg2FEBj4pfbUNYxzIVj45bgfI82l9QeDptCZOFH0hOOdzbA160KnrvE6W\n",
+       "WWeEvFcZE/bNgRJOFb/vsfMs+mL7i5whk0axj4+PaydCTZPhO7ZtjAmbTf/tyLKmzFd2vplj5sB2\n",
+       "Ntuq3J7trr2IekSNgC8Wi5qXzO8xmdmjt4OTc8mZN+L3odgwxuYD8LtEnxGVgIPgIDSOYJw7t7Ph\n",
+       "vlowvbldNfjk5KQYHsO9/G52qoBNMeARUbvviOjJhFRHMhjr/BlHi3/55ZfyPjYKzzMSgBA7heTr\n",
+       "Lugj68mmGo/HpTZJp9MpRUJZJ37O77iiLZEsRofvMcfMj4nf1DhiDb3ZqLdENLfdbosBvrm5ievr\n",
+       "6zg6Oiq1tLzx2ZxET8iy5xTFYmeKqJT0IGhZRBUJU+IiEx7t9Du9Y8csNyJxp1wcpbMXMWyuobZe\n",
+       "r0tpCx80GI1G8fj4WI7vcww6Iso1O8fHx3F5eVnQtX3zZmdhu62KWObrGUBnSCuY94PJAAAgAElE\n",
+       "QVSwnuZ44GA1m82vCvrRb2TJTqmd8OyYImc4Wk5fgSqxd3jm4eFh3N3dlSr7nU6nli5cr9c1nei+\n",
+       "GBGwzuC59NXBHo3negyZk+Pv8Bz012AwKEGESfggFXYm0InIh9EMdMt8Po/RaBTX19dFp/z8889l\n",
+       "XeCn7HNCqHhuJwD9wdwYeWCeKbZrpGNfHaKISlc5Tcn6Ukep3+/XSr4gO1nvOzuC3aLmGbIPqtnp\n",
+       "dIoDa12a9y79w+mgYDDvdWrc+hzbyb+dDcIp4TOoECA5Rn6c9nbQwPtAY/cFagZO6FdOXZLGQxYY\n",
+       "Hylz0zdAnAiGnSqnL6BgyEmmBjgrgL50cGZd6nna117KH7y0l/bSXtpLe2kv7aX9ne1ZECkiOnt9\n",
+       "IA1E5M5dR1Qn8CIqz5xngdo4qqZlbxwPNJO6IyqIk1QZUYUJzuSusyfL951+dPRHdEmaarvd1nLM\n",
+       "nKwDIs6pBiJhv9upJZNJI6I2f0QB5k0ZkQIVMCJnyPXx8TE+fvxYPnN/XOiUuTafIeffibR8pQVo\n",
+       "A+lVUkMRUS5/7ff7cXZ2Fp1Op5zeoHgn7z08PCzRLRyC4+PjGrE4IgqR0vA5c5qRyXa7XQpykl4a\n",
+       "DoclzcRnRqcajV1BTxNAiYDgpZkL6OrHHDGnEaGDHJGqolGdF/k3QmDo3PuA3yUyN/JKOmO73ZaU\n",
+       "I+tLZM/cRkRBAJfLZUENT09P4/GxupDcldDhZ3iOQZ2I7B01kk5gHzEGkGHkb1/qYD6fl+dxMCKi\n",
+       "OilIP4xuEO16n2d9wt/ef4PBoBCjiXzNzeI+y8vLy3KCL6KOGu1LheX9nnmgPt5vHovTki6M+/T0\n",
+       "FN9++21Zn1evXtWQGF8XQnqQ7202mxqnynJozo0RTr57f38f4/E4bm5uauly+mi03il4dBYIqfUX\n",
+       "qInTmMgwXKter1e7TxCOGDqKdHxElbr23COnoNfw1rwPzVXi3x4DXC4u5zYRnfkCrTYCxhyB8PgC\n",
+       "Yb6HHjP3yOhK5hIh8yBjtl+M0ffqGZXhO5nWwrtZR8+Nsz408wOZB2TYfDX/rnmc6C37AT4FbOI4\n",
+       "v88YeB9/3E/rJGea3M/fas/iSJFnt5JCmbFghvJQdk5BeXAokXwKyXlOfteTYSfKBF/exxHqLDQ0\n",
+       "E8D9PvrkmiEoChwKw+pWCjyLaxLgFiFkwM55TOv1unYCh7niGRkmxmjjKLnuj51baifRbm9vi1IF\n",
+       "5vcpGxSH6y/Rz4gojs9qtSopM04GQgLmvRE7p2cwGES/34/Ly8saN8ZH8/meDS4OAQqSucGJg2Rs\n",
+       "XgpkZxxCG0QcC5z66+vrr4jIPKfT6USr1SrOhDkO2XCSQrChpK9Uusc4+AAEyqXf79fSB6w5f5t/\n",
+       "SF8YXyZdcu2HFbTLP0TsHBR+jqGlZlm32y1zk3lXcEmQG75HatPGjX7ijMORywEGBs9lTlgL0rNw\n",
+       "aVgr+G/MleXTwYeNE59l5ZvvroTgbsVMgESq3Pucgw7oGe9tZA0Z9jrZ6Nop83dx2nEc+Z3RaFT6\n",
+       "6TQnv8dcIXeWUeQok6gz/cBtu92VU7m+vi4pMo6kOz2ZHUf6i/PB6S3LFO8y+ZuggxS703AEyDgL\n",
+       "rorN/HvvmANmI22Hn5+ht+ycIMP39/elrh7OKsE0e82cpOVyWTtV6nRZPiCQObQ4LgRMtl84Uug4\n",
+       "02hciiBzGVn/fbYWGUWGLafIinl6lmGn/MxJcyqRMZijad6hHTUfdMmfmSKUU/c5Jcm8IoNOA+5r\n",
+       "z4ZIkce0cmdBF4tFiWojqlN0VhgmAdrDjKgjUlmg7LVHVJGtT/9F7ISVSMWePXwM+uuoFGGirxZg\n",
+       "nCeIeZC8eT4C4FNZjBXBN5LAZwgu9TMcsToK9ZHoiMoJzBFJRB2ZazabBWXguXDZIPtlwvV6vS53\n",
+       "p3kc/L3dbmvcGxRoRBQukB2pXq9XDJUdglzozsfBKRgJyulrOYhy920OSLy0HIW02+1ymedisSik\n",
+       "ZiNvdrB5FkYeZWO+mhUR/ATL9z4eg+fUXCWOFi+XyxiNRjVukeeb92I8/H1OEbo//D7HzHu9XtnD\n",
+       "rBNF+a6vr2O5XNaOHXOlDPObo2ATRB0A4DxhYLn3sN1uF86REVLGa4SPUg6eL1AyuFRGDmmZC2Id\n",
+       "wbyBSBIg4UDBzaMvzNf9/X25sDiiQs326Sbkx84s/UNGMBbZiex2u3F1dRXD4bAWUCK37D/zchz4\n",
+       "8ZmdM0jdyI33Po19tc+AsQ7Hx8fxzTffFDlFpphro0cELaAojJH+cTDH/TBaYx4o/YPUTLCH7Ltk\n",
+       "QHZcCbaM+toxwJYhwz7NaWTDwQABBCimneNer1fj6nkf0qzHMn/H8uFj/qBQ7p9lw2vm/zM2n6i2\n",
+       "3s+Ode5P5kcxfvalHXRkzXwzj8GIK33LfC36lfuAc4bM8Dsmydt583j+N47UszhSEdXmslHMpQZQ\n",
+       "LK5ia2URUZ3QcLqQzzxwNoGFJqJeA8QeL+kfo1/8PpMOouZNyu/Td6d7GDffNbLCO4l0MELr9bqc\n",
+       "UGETW2g8T2y8ffPM73lunN7LiAVz2e124927dyWCPDg4iMViER8+fIjr6+ty1NRziIL1vXMRO+eG\n",
+       "Qmlee6pio0x4BvPGpsGhBEngokscUSt+iKTeAD7pSZ/pK30BqWKuXHsLInqr1SplLPxZLvppdAXD\n",
+       "CjKBEaAPGWmFdOm0TE7rMF9GRmgcqaYir42knUWfaOIz1/ixQzCfz2O73Ua/3y9oCXJBSYjb29uY\n",
+       "Tqdf3cGIXK3X69qJHyJ51jDXSkKZ8z2fArUDleePVC/pZxO3nY7OQZRT2tmJzk5UruoPQooDZ4cV\n",
+       "J3KxWJQLvZlj5iaiIuCyzqvVqoYYOdVkXZNTEaQuz8/PizPFOlHM0vsmoqpLxR2DnK7mmegtB2me\n",
+       "N36Wg1bQee5TYywRlW4nNejTWjhCOGEmm/Oex8fH2gER5tQHJFhz+hJR7U8/E6ceGbTu9NxmKgl/\n",
+       "gxQ5jQ5SttlsirzwTJe5yXaDvWKk3X3nOw7a6SN72nKc0SDmyPbE5RDsDPKunG6zk4XTaxuS598Z\n",
+       "n4ioIetGqegfwRzP9cELbI33gdfHQYazV54rk9SxMaxXdqL2Oatuz4ZIWVAiqguHSY84usTb57SP\n",
+       "uT4oUzaVI3anL1CWjiZc3dn/NqQZUVUi59/2xP1/n7TIkZojZBSdDaRz196k8FIYZ04z4gAxpz5e\n",
+       "mzemlRvPw9jbeUF5PT09xdXVVbx7965E0XDZUEKu0sxcoJydhuSKBxTV01N1QSXKzDwaR0bb7bZE\n",
+       "rg8PD4UjxWWnpImbzfqlxYeHh8WZWiwWRUHb8BrVi9gpb4w1StqpS1If/K6LanJaCYXK/PJdw82O\n",
+       "kkk1ohidFj0+Po7JZFIiea8pCginpd1u12qTRVTH/532pNbXPmeXsXAdB+OK2O3Rs7Ozks61zGw2\n",
+       "mxiNRjGZTMoaY5C63e5XqKidN/YFStFyaDlHtiIqFA45Y12sNFk/5pp3mk/D33xmNMjPiqiuVLHO\n",
+       "siPE6cJmsxm9Xq98n9Sd69tkPgaGwwbHCLfTJpZv+uZaWOiPyWQSl5eXcXV1VagCj4+PBRHPJSqc\n",
+       "ZmGvWSfakBA0OkBEpnKgiB7nJgbv06enXcFXir0eHx/H+fl5RFQXWm82m5hOp185vBSX9PUnrA/7\n",
+       "jHUxv4Y+YGP4HmMB/bZ843wakbPMYD9arVZJyzFPToWC6tAX9KxRPq8HqLBr0DF//Az9nU/XGm2y\n",
+       "k8n/87iNKoFkGbhg3rBHNH7PKBeN72E3vE68y7yyjN4x3znNzvwbyOB7ZEroi/WJ05m23TxzHxrF\n",
+       "c3Jg5fZsjlTOiRKZkitHOCPqV8Uw4c4zO41mD5QUAgLqlAK/7yObNnoYdoSZ75G3diTo5hQBSpyf\n",
+       "O9LJUR3OgmHKiIpfAYKR04UR9UiIfqIgMpRKy9F8RNTG32zujqOfnZ3Ft99+W5QbimA+n8fFxUWN\n",
+       "xEsDnneEQiSGMnRfSYeYJ5TnYLFYxHa7LXVoInaOFFXE90UNfAcj5Tz6ZDKppVFdxgDndTab1coN\n",
+       "EOkBz9tw46zbwWg0GjXEwiiA1xGD73U0UntwcBDj8bimNFlPIjWiQqclURLMC/LnO9bgLTn6zKgi\n",
+       "xnYwGBQ5BVXzPpzNZsUxtzPIAQH2qvcTc0Pw5OBqX7rL+gIUDB6ZycGuep1T5uZeuA/MtxEeI0Q2\n",
+       "FDhyTnMYMTZnx4VzncbhmSDSEXWUwgRukEnWkGCF9+V0NJXCHx4e4uLiogQH5iV2u92vUpcusmrS\n",
+       "sB2pnO5FxtiH7EXvfdJ6yKJ1NH93u92SAmZuCDJ5nknFjUajxo3LurjVapX37iu4Sr9zyQGccHQB\n",
+       "v2eZPTio6j/1+/1arb19NQkt40YVPV/W18wRAQjrijyxV4w+edz8HvrP8sZaOrhzX/m3gQEH5Pvs\n",
+       "BfNjqk1E5YA43UtfkWHsutFRUoiM0/1jDf1er72DVQcfyA2ygpPKs60XbIP+39CoiJfyBy/tpb20\n",
+       "l/bSXtpLe2l/d3sWRMq8JLxNkB48TUjZERUE6sg5F+jjuY7YHanjpRLR+Fl4tDyTyACY0MRgIEaQ\n",
+       "hcxJ4r0RUXumI2O8crxhCOxEWY7YSE9Cms2et087MRaeCfzriMARBlE373AqA+Tl/Pw8zs/PC6mW\n",
+       "MRGR93q9EsHyXubTcCwRkBE9omM4DnBAKAQZUXHnptNpjEajGsEb0uxmszutlhFHIF4QAtIboGPc\n",
+       "ydRut786WkyF5cViUb7nQoMgIiawwzEhskGWmDfzuEA8WWOOR19eXpZIO6Lil9CXiApRAqbmea6I\n",
+       "Tz/4Tq4kDzzPeJk3n2KKiHj9+nV5H3eaudI4awgyBEJGWobPQBtJhznVQLROFLnvdAzrnPkO7G+e\n",
+       "YZ1BQw7yQQz+D+GXfyMHGeUwVwVUxgi00wTsd55Jior0tm9IAMEgtZWJ7+xVEGv6ws/pp6NrUr4g\n",
+       "GhcXFxER5aAEXKmMyvl+P+s9kEanzbLOsO7KqASIFQiYUQnmB1n25cOkG41gR1TpXtBSI4DIGIgK\n",
+       "KcCIqJ1KhGJhPhPIdZZTz5M5Qcw340ZOzQfy+M3PY//RF8s988d8k6nhfTldaKQLnh7fz5kJ7xmn\n",
+       "+Iw25TUEoeJ5mROHns0ps2xT/DOjwvYJLN/IisfnPmcCPuNzCnYfny8j404L0y+vef5/bs921x4E\n",
+       "WCuqTIpjkOZBIUSZxxQRX8GcTushjBYU/m8YMCJqBu7u7q4cteYdbHz3KaJevRvIOJ8iQsE1Go3a\n",
+       "VQi87/HxMbrdbg1KBdL3JoyonBYfo+dZKCHmgLGa6Ggh8nzCZeEklisDNxqNcoy30djVSxoOh6Wv\n",
+       "vIP19aYhXYTRd6VpGv2lkfZgQ1CLhTGgFEgb2hCycXHcmDvGjoK1U9vtdktKkFQRY3eqmMa84Xii\n",
+       "nDMXxOuEcvW1DfP5PMbjcTSbza+u+0DhGvpmvlkrO6i8o9lslgMMVN2OiCJ/PjThqu849Dj9cNIe\n",
+       "Hh7i9PQ0Op1OjMfjuLu7K+M/Pz8vzh77zWlmO/sm9LJuKDCcFH7PTpiblSAOgOXIqSV+3/w5GyP3\n",
+       "h7QkgQtX/dAfHNRWqzotyDNxnigZwglKZI7ncGkycoODzjvMWbEzQwqYMftwgxU9PCzW8ODgoHzv\n",
+       "4uKiHJRwatDzjZPvNCvzgMNgg8P7bVhJcyM32VmhOe0KfcE6ykaPtCjr6YMS3pc4xvQNKgJzwx/2\n",
+       "iw9TOJhGrhiTU+WZ5wa30Zcns2bMGfrdTkY+KJXT8ayRHRE7eKxBPjDiFJWDk+wg2kFxmo059QEV\n",
+       "/rZzw/f4m+fZJtqJRp95juHcGgTJB0gcqJgaYeDAfzslSrPNxkG1DfS8ICu0HCjl9mx1pEA1zDGI\n",
+       "qOpV2NP0orMomQti3pI3d/Y+feO1c8QINf1brVYFjbAy9ak3E9loOC9EdNlRBAnx9xCiw8PDUuoh\n",
+       "EwfZ1CipiHokZOGIiK+EYh+HiD7tO1p8eHgYvV4v+v1+DV1Amd/c3BTlgcGAr2RkwdwWNia/Y+cM\n",
+       "RUg/mTeUD3eCUXbBMmPFzmeQna3cnX+3HDIO5AJl02w24/9h7816G9uO8/0iRWrgrKlbPZwcx7Gd\n",
+       "xBe5yvf/CrmKgQSGY5+xWwPFUdTA4X9BPMVnL6nzAwwE+l9oAwc6LYp777VWrRreeqvWbDbL59B+\n",
+       "gWdaHhgr9wIhKBFA3tuKkUObiZTX63UFIeNdQSu4Z6vVSie72dydTu/1dlEFMudjOHgnV81w1Mtm\n",
+       "s0mSr2VqNBrluY6sPf2jQHqMuiyXy9xDyBLybeJteSQNCtPor5W2ESj4LDinRoNZWxsDxsO6lRG7\n",
+       "+T3WP8yrCcIRkc7m2dlZrhXjwDGv1XbnJfLeGBbzjqwbjKCbs+LWDQ4m+Fuc/hLlQ74YC+NizlwN\n",
+       "bX1C0QGOD/e23ubffM8VrUaBeD+ew/2QbRBn7yHW3xWl3gu12o6PCPLptXPTUeaM90QGkR/0YRn8\n",
+       "WFZKPYQzjB4uOWsEEzg3XAR1jJP3dXEEsm6bUNo47oXM2sFwUMk7IgPmFRoFM3rDZ3bYjA67kIR3\n",
+       "eMnBfykY4vcEhdYZ7K9y3tAJyJsRN9aX35nj6MCgfE8j3gRLdpTtcL50vRrZHI/ekSnRHMrbhzAC\n",
+       "yTJICziTRqqjREFsVDwZbHaTObnKqjYuOx4lyc2wtpUe74KhtyKO2LUUKEt2+QyFz0+MBU6NEQVH\n",
+       "l5Ab/Xt+lv2NTH5/fHyMXq8XJycncXx8nEhSxK4/0WQyqVRxROwqYlqtViyXy0rkidLiXUtvn7ll\n",
+       "bR2VktLg/nxmJMrjZlxl+sDpFH9vs9nEzc1Nju/4+LjSfqJMm7pSy6kkV1ExZiOLjppL2Biir5Gw\n",
+       "iEgnCceOXj68mxEltw6o1WpJDjfszjjYLxg4R/MoRObIChO0gmfyPcr6QQ0cwd/d3cV4PI7T09Nn\n",
+       "qbvlcpkHPS+XyxgMBhUSvpFjOzxOwfJOzWYzU3Q44KS2bXSNABg95WKeQTztZCF/q9Wq4rhtNpv4\n",
+       "9OlTrvXBwUGmqJbLZeqhsqAEpIX96ojdxODlclmpsOO+rLXXd7OpUhgODg6yTQUo27eaDWMwmW/u\n",
+       "6SaQdsDsSFnO+X/eEyNV6tOyn5VRBJ7PWNw0GFTbwZQd1/v7+wxQPFcgXiCP3ofYH+TOqLEDPAoZ\n",
+       "QDgh0e/t7eWJBg6wnJosg10yDUZb+Gl94u/ZObLT46q+Ui4sG8ivG1OWf29nhe+VqCdjtHOP01fu\n",
+       "U/7fziK/s71zGg5ZYC1eyrzw3g6S7LCXqT+nij0e5tdj9/h4l48fP8ZL16s4UpTfGhpm8vf29ipQ\n",
+       "b8R28CiAiGojRnvKbGxvbgQ/Ip4ZBd7B1QMRu8V1btbGG+XKhHtRcRJ5vo2eo3/SVDwvIjItYo5K\n",
+       "t9tNw4TBszBTOo9Rs5E3j+Ilp8/polqtVulrhFFiM/PZcDjMXkERUUE6SDExjs1mUzl+wfCyv0cv\n",
+       "KBwsp6gwJmx4O1lErHt7ewmps/bL5TIbiZaOratznNLleWx2KtOYb9YF2fHam7tiKNpGx6kWR/PT\n",
+       "6TQajW2PJH5vp8cw9tHRUUWB8zdE5DyPPWY0xfPmVC97gHsxjyAlvCfOFYgw88CF01dykFgbd/3m\n",
+       "YrwYE7fMwLjyPjZ8/lnyRSIinfnxeJz713uYOeEzR+VOx5RUAWQVx4TvnZycZAqJZsI2bGVazIEJ\n",
+       "vCnm1I5brbbtEUagVzopDsqc9np4eEjnez6f5/vSBd9pYsZAdSzrZ0TnJU4NY+OZGCKOTyK15/1m\n",
+       "Q8k4jOx4vzm7wJ5yqtyVya6EZK5IGRnhRodaT1uWMJqk+fgeDp9TWuV7rtfbNg1lypX0LO1YjDQZ\n",
+       "TbRuL/meXien60BquFdEpIPpLIT3vmXEOpN7gBg6gC51q4NXZ16MSPOuZAf8rp4bAsgSEeX9S54X\n",
+       "qBm/cxrPDlxpn0E9zVFzup99yrt5f9h2vnS9iiOFQrUBIx1mpe6XJ8IrkSwGD2xHuWvETuAwevy9\n",
+       "L4x06Z3ymSN137NUaFx2zCKi4tXyfRSK/+alKI73tYMU8fwIF/+/HSkUgcnbfk+E3pAtYwWtG4/H\n",
+       "FY98Pp/H1dVVxZHiXTG6jO3+/j6jcvrEcE5V6dgYKnfKxJuSsTvt4jmP2PHbvC4o6JKHQsRiWWy1\n",
+       "WhkplqkWNqI3ael82+CU5FEcSIyf4WUceZQysmFiaEQ8i/aQXZxfv7Ojfz+vbNtgQ+PfEy2bM4jM\n",
+       "YKRKbgKO0NHRUfYJe3x8TJJz+RwQAIz7YrGopFlRkvTS4XKKiN9b8ZNKIQVUIqA2gmVEzBqXaXue\n",
+       "Qed9Chz43nA4jHa7nXMKgkTK9eDgINNWfk/4PG53YHl7qfyfMdspKNtbYCjr9XpcXV1FRMT5+Xki\n",
+       "jpZP/t4yab1UBmJ2cjyvj4+PMRqNYjKZ5Fhd8GJDGVFFwTDWDjjtPDkwZQ9j/FmXiK2s0werdP5s\n",
+       "L0rUxYgTMsI6cS+coYidQ848O2BjfUgxko2wrsFAOwiwA1JmU+ys2EkHWfJakQYt18jvWepKO3Ps\n",
+       "uZe+R2BnFMiBg+WUlDV70ek/9j3tKdwAln1Arz4HhbbT5gIjgyXowUUgQ4sPZITLDqqfZ+f7W9db\n",
+       "+4O36+16u96ut+vterverr/zehVEyhBmvog8QBAbIy9EbF+/fs0IIGLXFRVv8yWEiIjOeVe8fxCi\n",
+       "x8fHZxwSOBg+M8xpP3OlPA7gW+fYSRcQfZSQNvd2RBQRCY9TyfWSBw2079TH/v5+JRLyO0fsCLUg\n",
+       "CEYlWJ/JZBKXl5cxn89zjIvFIptDgiAYfjdMXKvVErm6vLzMKJ1xOlrwmUu8f0Q1lVrmykEk+N3T\n",
+       "01MlLdpqtXJOLHNGuZADIihIwuYGOPIyP8ERG6kpGlh6nZFToiy+zztAFPe6OBI2OuV0Ycl/sBzy\n",
+       "PubRlGhQKUc8w+lAw9pEa6TpXF3Knux2u9Fut2M2myXvDITRcu/IezweJ6/IqMVLKTEjGozRqT1z\n",
+       "/0C5PL989lLpP3JH9Oln80z0DWgL8jaZTHJsnPPo1NbBwUEi5bVaLT+zzuMZrD0I5WKxyK7gXE6h\n",
+       "griA/q7X62y4WSJL7PdyP0Rs036k540UMc4yg+DP2WOkS+7v77NtyGQyybWwfuQ+INRO7XAxp6Cu\n",
+       "Rl1AlCk0MOqE3ivXl3EYqXKatF6vV1LQyBP/b46uW7QwbtA3I9Xs9TIlxmeWeSO7RnzNmyRV6H1b\n",
+       "Vtvxk33guWFe0R2MH/2FTfQ+BRUrW4lE7NKE2ALrT/QF/GYj4/P5PCaTSUyn0yxmYN7oLO+xlf6B\n",
+       "EUX+1ilmrxHjY6z4AeZou9mt9aALN751vYojZTj8pTQVEJ8NBsb04eEhxuNxLobPGGPA3B8yrY1m\n",
+       "aRRQjK4g5FnNZjPLxl1NYAHld9wfISuNt/+O3zFeFIgFmqtMQbD5eE/4KqVSsGLCGBviRticRuFA\n",
+       "2Ha7HZ1OJ2F6Q9Wj0SghYypcmF9KxuEucGZXROT5a8y1nQW4TvAySGexvqQ+MG4mx+JAeQ24p1Nd\n",
+       "3hg2zNwPmeEdgY1xnlgXjDYKGUfZaQH+zr2bHCggW4aODeWbS4Iswk2yDJMKsUL0+pPSBFb3OMwj\n",
+       "fKnC06kzKykcdkr8kYu9vb04OzuLXq8XNzc38csvv1ScHj/74eEhq/0oJmCt9/b20uFgLcwh8bxQ\n",
+       "5epAxHID38dGnr/jd8w53yv7PJXpKK9xs9ms9IlbrVYxGAxis9nEly9fct6Yq4eHhwpHLmJXFeie\n",
+       "YE6R0LUd54Z34Z4ljwvZf3x8zM7vrgi0nvPfc7HvTZC2nPjvS74Uzs5LKWHSKdAPvBcptPF+9HPY\n",
+       "R66EhFPI+Gy80ROuYmUt0D1OCzp4Q+59aD3ri8OHTjDdgj1tPpTniJS8gxsCATvETtkh4yV/iL93\n",
+       "xZ/twsHBQQY5nAHq9J15U6QA+Qx7gcNnR4SghL1kpwQnsky/uYCAcbh9Tdn7rpQl3tNBtp2aUn8R\n",
+       "oJdcLJ7Pfbrd7jMfw4Gm0/+M+3/jSb2KI1VykiJ2+WYLpD8jv8rkwL+4v7/PzYliLKMaR2V2qngX\n",
+       "8rB2spi4b+WfEWRvGjYlQgCfJGKHKJngZm4RwoKwefwRO6cBEjHf5z3xzM1xGAwG8d1338X5+Xm0\n",
+       "Wq2KMWEz2alAcdATqNvtJoeFPiyz2SyVXuloMI8oG5en7+3tZRNAjlOwMR8MBtFoNNJw4yBjeMoq\n",
+       "TeQCRcUmsSNlZ4J59k9vLDsLlh9zKDC4oH2utkEWarVaOpLmGeAIehy8K+Pgd+UzI3ZOnJ0g/g5l\n",
+       "UvYcIhJFIdsB9Rwhr6xFKYMlz8pyDA/o+Pg46vV6/Pzzz3FzcxPr9boS5IDSULXGe5qXQ38lG2zP\n",
+       "Q8lnMukXQ2J+I8a0dArMH3PJOmOFH4czYGVrJQ7iwjg4hxDd5BYH3Ofo6KjS84rAAq4HbWGYb8sJ\n",
+       "+43vbTabLEbBGeWyLEVEBallv+Bkm8SLE4XzZSPEVRotPmce2ScOFAkkcJTKwg10ETLIOqHfCEyM\n",
+       "UtiZcGBWOkasKfOG0cTJ9vid4SgzDZ5H26/FYlEJMGxn7LTd398/O07LSJP1jmU1YleswnsSfPKe\n",
+       "dpY5K3C93h0VZD6Ugy7QPp6Pw4+Dilx5/AcHB5XeinzP728bhc1A1sp+buj9EiXF5iNfjKF0suxH\n",
+       "MNcGElzNjGw6mOL3PAO9aZl3kPrS9SqOlHtqMBCiDgwKJMyI54eE2mGgDB/hsSHkJHEqWGww7JE7\n",
+       "RcTzUKQ2WhE7Q4tBsYIGZrYidiRgheJIgPfFsBvCRkD5XlkeDBR9dHRUUVBEVN9//32OqUTIeA+n\n",
+       "SSN2yABGzaRFHK7RaBSz2awStfD3rsC008K8OvXFGLk/CpjPkAEcUxSgv1c6H8wpBEi+42jWG8My\n",
+       "46qxUjEzJ6yDDRsG2+OyYfeckzryOkK2LNEFO78gQE6fGrEp03yOqrzOL0r9hz4AACAASURBVP3O\n",
+       "c4dcl+/C/sFB8f3puXZ7e5tIru9Zr9dzHx4eHqbs39zcpEOGs4Eclmiyf1oOHBRYEZNeQzbKlAqH\n",
+       "Pbfb7WeNF7mXnSyu1Wp7qG8ZYBHlPjw8ZHEF42AvcLahURcQEAyyAzN0kBHCiKhE8uzX0qkhSCwJ\n",
+       "uI7mPS92bko5tRyUTr5lir/f399PhLvT6cR8Pk+ytaN/5Iy9Y5kxisw82CjyDuwRrxMpOJBpp5Ih\n",
+       "8GMsjSxxHwwyzye1aj1jNJYABwfArXt4P9a9dLDZ34vFohJkg046dc09jWyWjp9/8re8K/Ns3cgY\n",
+       "sVugwk4XMk4cP9Au1ubu7i5Tey+thQn8pk3YGbIDip4BZHjJibGe5p6MCafY54qyruwr20KvRakT\n",
+       "3bvsW9erOFI+zJcJAolAkRkSPDs7q6AB6/U6ERJY/44AHLWsVqvk5XhjIJj8zoYNQXGlnb1vIEnn\n",
+       "Zvk7M/+tMCJ2R2WU71mmpdiQEZHpMZyp5XLXrNNdq2mc6YNinZZxGsP/5t44OqwFCIbTSoxhsVhk\n",
+       "ry8rcO6JoLIGjJdndLvdilNrhIn343coIs+nq/0ceXgNidAeHh7SkNkIs4m5p40skTmGtIwOee58\n",
+       "Pq8cj4OSQMm9xGGAK+IxOnePgfQzUQx3d3cVRxrkzsiUEYQyhWUFgbPQbDYrvA0+93whn8w9zoK5\n",
+       "R3S7xzlZLnedrUnBHBwcRLvdjvV6nRwjl+m7WSDPc/TJe/szO9F2GIjYQczKI6VIa4OQ2rlE6ePY\n",
+       "2TmDy0FQxNyw35gDZIh79vv9NHy0u4jYdZnnvc3/xAFgjNYl/J53LXtMkfqLiIrzgWzZmHndmW9+\n",
+       "X6Z1mTsjwdyX4LAMQJhDH95cogteY6eHQEeQ17LlB/sJ2eK5HLtT6nf0kINgyxTpV+7tzwjcTDPx\n",
+       "vLFOBAsR1SOHarVapmr5HqgQ+ob5xlF3IO35BG2jythOqNsfcFnv22EwWIAdMOfMz8Qh4p2NcEN1\n",
+       "KOeUABZns3TIrbOcvgP4YL6tj1g/ZN+ABWvG+hlZ5/vYb1/MM/cymlvazpeuV3GkHL1zTSaTNAoM\n",
+       "ngm4ubmJ9+/fV9JbFxcXEbHdGNfX1xGxg7Md0TldZ8PGxOBNm+vTaDQqTdmswIigMEDm4fhyCsrP\n",
+       "J+r0uXkIE0JuRypi1y6CCJNIr9frpQOFE+UNbJjdabOInWFHQbzETSBn73QSUCkG0Uq5zEVHRCUy\n",
+       "Y1M4zcTziMR5lhW0N1KJALo8vHx35p1/28ii/MrvReyOsWDuzCNDHsr1ZQ4pjy55RyhzI6Ceb2Qe\n",
+       "JWjH1albyzBjQg55Dz8Tg2+H11E66+/0ldEIo4VE1lZcRlXZC/QS42JPksIiamUNO51OOjYR1dJ+\n",
+       "FDsOjtFbK+ASOWYezYHyOBhfrbZNxRrNQS/wOXvK84ETyTv3+/0cM58xDvf0Yp6McuIk8zfmZ3lf\n",
+       "2pg4pYN8uImsZc6y5rUmALWuKVMlvr71e9Z/OBxm/yXLAAEXAbIROQw0PEgjsDgfNpZ2CsfjcUwm\n",
+       "kwzOGBd0BBtExohcYMTt1BHcWhaQbzt0XiP+Dr0MbcGcLL5vhCxi197h6enpWcoXuX3pNIX9/f1n\n",
+       "fFOey7NIETMHvofTe6ZN2KlhzxlM4KfllTESPGM3S7TSOpw5YT/xX1kcAIpdIuMeh5FF1rdE6nGw\n",
+       "mW/aLRjYsD5hH3rflaBIeb21P3i73q636+16u96ut+vt+juvV2t/QITq/HBE1fN1ftxw+MHBQZyf\n",
+       "n+ff1ev1jEyMDhF5ANGbC8G9iFzwZHk/R4dl8zpXiZRRGpGL78E9I3ZwvSvaHL3CsXqJbN7v9+P4\n",
+       "+DgRqZOTkzg5OYlOp5P3dTlnibgYdSs5Fa5KAP0ADSovUEFzzJhv7odHz2eOyl9Ci0Cx+K75S8wf\n",
+       "//kzZAli90uctBJBIqp0tUz5Gd9zJOQIjfUllUqqGeTDuX/e1ZwA39scn9VqFbPZLNtGkKJiv7hR\n",
+       "KBG001tOi/C5Uyfl88rCDCNQ/J3HwJhANElvgP447cVe6/V6icKCGrvM2UhfCfXT0gKUmKtEHplf\n",
+       "oytGJRyVU/RQq9WyZYD5J/V6PQ/HdkrU6XhQLMYxnU7zeBAqW09PT/OePAtOFu9vvpWRGd4flA/k\n",
+       "kDEQXYNu8V3uCZ/I5f6MgfE6nek58Bw7feN5L/92tVrl8VF0f/dFA0SoCS5CgbdkYjlrityiE01V\n",
+       "ME/V5fLORLD3ywtd5H3Id5jTkmwPr2ixWFT0M4U8JSUiYptpYZ+5KzvPs36zDsZeGTn3+Fh75MDo\n",
+       "GegtYzDvzkiUES4+Q5eY5sGFHeVz3sfyVtpFo+geA+PAxrJXGT8Vty5MK3WG19JrZ/3mtK5pCeY5\n",
+       "M3ZzqKwv+L3f/Zk8ffOT/8MLgaP6KaLK27BBjIiKgel0OhUYM2I3CfB2bKCB050LjqgeFAycW1Yd\n",
+       "+Pe8D++LMDrdgECYTOhKMUOGFgR4PKR29vb2ctPxHoeHh9HtdmMwGORxD/1+P51DK1su4Gt3fmbe\n",
+       "ECIg8zJlxGfmCHEPBA3n16kENpBTcVwWTDs2QNBs/NJxZSy+v58LBF5+BoT7ktInpYtyNMRrgw4X\n",
+       "is9QeDhSXE6fMu67u7sKb8NKsuQKoERQGK5429vbVizBh3AVHf9h9O1ksZeczvX6mmjrNCX/NmTu\n",
+       "9eX7ZcGE95KfV3K0GEtEVNIlOAplhSbpIKeRS4fWijtiu9/m83mlw7TfCw4dh1bbyYzYVqdiBHG2\n",
+       "Tk5OknfJmiAbe3t70ev10jiWfXAwpCWBN6KaHi2JunzHqbmIqOgm84UidmkZ5NH7wuvpg3x5vg1Q\n",
+       "eTllw37kfjiIw+Ew01gm+k6n0xgOh0lPYF+wn9BVnh/0noMeOx4UGC2XyyTxezzWXXaGMZLs+9KR\n",
+       "Ilhxyhzj/fT0FMfHx2noeQ56mKIpdON8Pq84dawJ33N6nnEx9pIK4fQVOsp8L2wGwQx2xHw587NK\n",
+       "Hhx6sV6vP9OdOEqr1Sp7RlmfWM+UdADLnNtU4CTh1DlFT4BBmpgUdcS2KpE1MoeW5+E/MH92epGh\n",
+       "Mv3Id5HB0jcpeaTl9SqOFIrB3q4/Y+HttHwrJ3p4eBjHx8c5kdPpNAfMwqNkOJcuokrYI+r3RJUI\n",
+       "FJNO9YYjayt3Frd0FGkOyec8N6J6GCz/76gMQWi32zEYDJILQW4aB8oePQLKBkUZWWkbkXCE8RKf\n",
+       "w0aR79vg8T2+WxJOXQ1j5c9nRBUoVBs2bxLPm9eL55pbZQNbVuYYBZvP54kAMa9uGuiqOeaUcbjK\n",
+       "hujqJQVeGusSBSNyQ24wNETMNipGM3DAN5tNxXChDOBSlO9kx82KDkeWy9ElCt+RnflMrBtryBio\n",
+       "KmO+zdlxBa8rJrmnnT4jWciyo0iPg+/aebVj4/5xfO4xwpskUkYuCOJoOsr6ttvt/B5z73Mmedf5\n",
+       "fB7Hx8fPKuTm83kiM8gbetDcTz4DAWMP22Cw9+v1eiLHNvrw1Ox08xlrbqJxKQf+W/+u1WrF2dlZ\n",
+       "pRcV74osLhaLRPsYP/qXefMasiceHx8rhHKjjA7qeFf2DQ4t62tSN/NipGhvby8mk0k6zC60oAlt\n",
+       "xNapNtmc90YueR/zLZEh5NC/512M6ltfWrej890nq9SLRq/s2LAHy3YZETuHnwxNeSHDAB6WU/it\n",
+       "OB3sReYR3e49bsTcup95BGBwdTRzaGfHAaR1RhlcMW/fQikJgLH3Rj/xJb51vYojRQrK8CLOAwrD\n",
+       "jhQKpOwSHrEbJH2Ibm9vnx0GzN/b8FHeiUPHYZ4RUTH0LBZC6vQUxslwu2HIshEeqIU3dkQVkSgh\n",
+       "8dlsFrPZLKNcHDKeV8LEVnCMA+E3cR4hMSQKSsBGxGgYSgXdw6GwsrXX7k0fUT3YMqLaKblU9GXK\n",
+       "ibFx/5J8yf+bSItjiuIsz5djDkBH+Iy/x6Ep78n7oCCtvEBPTKwso31ko3x3fl8qAae5/OyISIXN\n",
+       "nNgQsZ5EipYtPxuDY6cWRYpi9Pjr9XqldLpMFaH8+/1+9Hq9fJ7nn4g5IrLAxIqb5xHcuJKG98Sp\n",
+       "BFFiznEmn56e8vDcx8fH6HQ6FZQPJInneYwgdXQrt+O6Wq2y0SXGnfk2quk0Ow0nI7YEaVAmPnPk\n",
+       "3Ov1KujB4eFh3N3dZbDIe4PSm7hsnYIBoyu6969TSHaEcKxLJNnz8i3ECv30D//wDxlIcb4fa06F\n",
+       "3Xw+z15bOKyQtUHleCYBEfLswBCd8NJei9hWi56cnDxrqExAVDqLrBE6zvoN42rkhcvBvasumRfL\n",
+       "rh3ziCqR2VmTcs96rdAfBG2mKHgty6pPfg8aD6rmTIEdJWcA2u12xV6bUM+cmIjv5/k+1us8z/Lo\n",
+       "LIWzG8iDn1eiTRG7fVGigJYZO9LlM2xj7GOUgUR5vXpDTsOLrug4PDysNJFjkah6s7I1d4NGiRGR\n",
+       "B+E6D2r4l/41RKgoWibN0W5p2Bzle8IjdgJi5WblWkZfhndLCBZFCZRa5v9LbgljtdOAk1GiYDy3\n",
+       "FHKcKMZQjt9jdprSOe0SIWBMzEPpPLF5S5jdskIEZ0eyREts9EAh3ZgyYufU1mq15Ok5VeQmjd7k\n",
+       "vA+yw5xylQqjTIUgVzb8yDAb10hZxK6CzakUfoeMgP6AmEREVnAig1burJODFz4Htkepl/LhVILR\n",
+       "I5BkIjnzH+2Mkjbgu/v7+zEajWKz2SRKZLlAVh4fHytNc5EjHPqyxBkdQosQzw0VZYzT69TpdOLo\n",
+       "6Cj6/X7SCcyLQk/ByXPTSebIFYqMA90C4meHAL31+PgY0+k0uVWknknPG80wVQC5cEAHgvISr4O/\n",
+       "tZFnznAGSK97r5WpXu7FvFv+G41GnJ2dRcSWJ9RutyvHQKH7Hh8fU9czJuQb59oojeWYvktO1fl7\n",
+       "6/U6kQR+h+OGDjbKGbFLNZPa4jOcZ3qb2YCjB9AXTsWi7325pxPPZJ+xXk5/od9LVA1g4FsIo9e6\n",
+       "RFwJFo1WoZP4jN6EEbsejugEUn0ROyeTVCJ/53fgOy/xmZC1MlAyqmZ0GVnBttmu4TSWVZsRO32C\n",
+       "3vcaYHvsK/gq5b68XsWRIlLyyyKEeLZGQVgEoHZ7vD4+BO/TpEujMm7Qxf1BXSzs3NvRhx2oiCr8\n",
+       "XUblLIpz+ihroit3cDbkSMTA8/b39zOdiDKz0ua+Nj78tIPGRi3hSZQFyjpilxJljCgej5t3i6j2\n",
+       "mmEM/J0Rx8VikeMoOVne/IbUuS//BoFgDdm0PmIj4nmDTMPDKHLWyLwFKx7Wq0yPls5TxC5d7b8x\n",
+       "sub0FH9jTgFyhBLge6yd0wEocZQgcmFIHQOBsrSTaeTBqdqISKcTFPju7q6ShmHdUESMH+4i7/ZS\n",
+       "6gCn3Kli1on5K+fX6S4jLqAbDiJKpMTGxdwyDOJoNMpxU6rf7Xbj48ePue9ms1l+RmoKBMVR+d7e\n",
+       "7mgbHB4rd4ISCMTlWXtHR0fR7XbzKBXemaNJ3KIhYpfa6/V6z6Jq5JuAwagDsvgS+kTaDcfXaV4b\n",
+       "1ZcCU/7tgIG5+fjxYzw8PMRkMsk14Ygg0pO8q/d6q9Wq7HenZF7ah9bhZDB8Tqgv97FCZ9B4mD3v\n",
+       "ZxOUkWYk4OIeDhSs25FJt5owOsYzvDciqvu8RAedtsaZdMBjNAfdZkqLnSPf3+vKu5Ryw/1s2/xO\n",
+       "6M5SPzDPJurzLp4/yyL7y8Vb/I2pIyWAYPv7UnoaPet7lrL0/0Kgyuut/cHb9Xa9XW/X2/V2vV1v\n",
+       "1995vQoiRdSNhxux8+SJSs2DMgrTbDZjNptV+Ex8D6jWnuRqta1EaLVa0W63s+IN6JBqC6eJeEci\n",
+       "b0c0RsmMcPCTvy3z4URQjMvvSTQBauBUA+9qr53vAXUbPSrz3eTS7amX3zW8yZwSBdj791hJGTnq\n",
+       "9/1KzpbhWSMSjMeImc+jMp9psVhUogh+wqugeZvvCdTsgoG9vd0p5D6Lje85Bed0AvczIuo14Tus\n",
+       "o+ebOWc+WRfuQXQGImkkK6J6dh7vY9JnCet7L1jOvPaOQP08p2Udwfoz/h5koV6vx3w+r9zT1atG\n",
+       "ijkuhHE5Ted7m0uETnCE67QUqcQypQ3fqWwCyzqR5kCfXF9fZ1UQ62V0DR6To2DGAZJepsBcNk+z\n",
+       "SGScDtLr9TrTkOahWMaMTIAar1arRAiM4h4fH2c0D8rPvdiDPId5ubu7i59//jkRI9MInOIvES7W\n",
+       "GFmzfkJGTk5OYjQaPePDwS1DVnkOa4TuYs5MqEfG0N2me3A00MPDQx58zjhcqWlOFrrFdBDrIjeT\n",
+       "NW8WJA1kyLYEdNkpTC70DjzUzWZX7UeazEiTdSlotCv4LG/oEfad541CGqNIETuqAP9vHeGWISWH\n",
+       "jndFr1pn8BxQvpJzauTfdt52zVXnrIPPRLScIofoe2d3/G9/h8/M5yrRqP8XOvUqjlREtS9HxA6C\n",
+       "dG7YJ6tjmBEuSpIhYnJEgxcRRwjFcH5+nlUflNzDJbETVCozbwzSaCxI6SzYcBreR/DNbbCBdkrI\n",
+       "m6fdbsdqtaocemwYsyQplkosYnc0jeF2ExlRHM4bG6b1JkWZojycu3Zqz+vM85yDt/HGCOFEbTab\n",
+       "SqFBrVarVNaU/CwUpufe6VycPjs9GGZIw76nFaONIunoEj5mDOU8lXPn9zcvC4XKOvozy5G5BIzD\n",
+       "KXGKESKe9+3y9xgbStXOcrmWJYfECg7Dz9w4RWlCPX3OaO9BKoN5I03HcSnu7owjxHqRTvERIHDO\n",
+       "ms1mpUs1c8062MnC0eK7yD69kNbrdeWEBMbulLDnhXdjbY+OjlJHRew6apdOtOeKquKSb4U+KI0q\n",
+       "fCIoC3YIeNfSCPN+dL72PmSfQQR3+op9XXJZuOysvxTwnZ6eZvXedDpNeePZ1ouWNfau0/YRUdnX\n",
+       "flZE5JmVDib9XVMwXI3FeNFRbm/B2HAYLIvI6XQ6rZTsI084NAcHB9HpdCq8NRwUeLp8bzgcZqq1\n",
+       "Xt+dp8p32NcOKk0xsb50/zVS9qUsW26cAuNijzL2krbAWpqu4HVFfsqiCJ7LfuR7OP+WQV+m7fgd\n",
+       "DFJgG2yD+Ftk34GfbZd5bsyRdUJ5vZojhQIzh6YctDcbCpfIi0E+PDzEzc1NLJfLODk5qZDUjIBw\n",
+       "nAqIlPuE+D24rIh9WZnxd3ZsbGQtiCi1knPF91CypXPGpnHU4Dm6v7+vKGkrTiIEHAcLDpwNR3tu\n",
+       "roizxN+VeXVHJnYWI3aKqlRe3rSr1Sq5IDRDvbu7i4eHh2cl0DgRcHe8WT1PZWTC5xg9rxPjs/PO\n",
+       "OqGIcGxMDvW8oHg8H1yes/JznAOeSwsA5vIlpW+Oj50eGzhImH4+37eSctNMoxWWS+btWygPChFn\n",
+       "ASfOCBCK/+DgICaTSdzd3cVms6kYE/rsOOhwBErFFc8teRwlWdZVbEZBPQ4bCxSqHQH4NSBwPiII\n",
+       "2ej3+xVC/d7etiEmfXsspwQyPiLHFWKME91hvcc4eV8739wDbs/x8XGukx1P6yz2EOthOeTdMZKO\n",
+       "5nE6/T6WF+aOvy2NFAgC+9vBVjleF1Ogf8rDlz2H7AEjciZS39/fVxwL/h4HxeiJuZvmMXpvlkES\n",
+       "pOnj4+OKrPOerDF7mHvSRoN38BjgJ47H41ittsTuk5OTiKg6X+xDEGjemywGQSpzaoQeveJed0bN\n",
+       "vYYgd0aMvd5Gs+xk2WHj37ZB7Fk7+tyTIK9Ef9n36CGADcue5cpZCtYfXWJdShaGgIL7uGnw/68c\n",
+       "KSJMR7sIJ8JhyNWKgAm08DuCt9PRarWS+NfpdKLT6VTKw70IpbfJhsHQlsiKoycvsqFKDE/EzpHi\n",
+       "dx5TGcWVqJqf53dhYRE4k5S5DFeWa0BF2mq1rVKhlxKC/1KqwlEFSoJ3L9MGRqv4LkalTOE5rUn0\n",
+       "w+UKmnq9ngfeopzYjE5TPj4+xnw+TyKz54D1JrVXEtuJ4hyd+DPkr0zbMX7ey0bR8+X3QE55P9Ai\n",
+       "y3OZ9nIbENYXcrRJno68mWPWAdlGzsoKUu5txcQYWd/5fJ7Po+qsXq8n8ZzP6GiNzJrETKoMOSNg\n",
+       "4p57e3sVON5pNubZCKoVMpVWbhkQEYk0gvZ6bozg8b6lYu52u9mfiXsPBoN8frfbzdYLPA/nkf1G\n",
+       "awhkEATRUT1/64CGq0SgV6tdbyDObavVahXjwBoyBigS3Pf+/j6urq7S0D49PVUQfN+n1CeeO+sL\n",
+       "f9ZsNmM6nVbWCeeQUnwbVQJg9ken03nWBw8kj88jIh1FZzDsDDNe94XiM9ae/9wShv1QOv7IVqvV\n",
+       "yoDdhHIj7g4wWCOj1A4MaZtB3y1kjc7x/X4/74cNQKaMRtqRBIHHvrbb7bxvq9VKXWPknbkh4C2z\n",
+       "DuhzvlfqRuaPueC92HNUNpZZg4idzsGx9zpxeV9gX+wQ2uHl3yU6xj7BL0HfRuyyDS/JfL7DNz/5\n",
+       "P7zgVdiZIOpwtFimvnBq7GFH7LqN4wjwbxaIA33tGXtTgIaVufLSEYjYRWZMtHO+CKEXiO87j196\n",
+       "2SgEQ5yGbMt0lh0polkcQvfJcjoNBM7RkJ3RWq0Wo9Eo70seuoyuKRu2YHkcZQqJ8TO/ID12bOwQ\n",
+       "AZ8breLvqWIqBbzT6VSUKrJgpWfHl02JQ1XKoQ2BS3lttMu14KfHUkatdkbLYMDzao4U70/AQJQe\n",
+       "sUOW2u12lu3zWYkUGpFB2dEry9GXUT07/YyRMVOF5L28Wq2Sm1NGqSBLyCNzCoLMvnLHaI+XPWpU\n",
+       "jd42/K3XDbSZ5pAOtsyNQnka6cABMeKH7DPPOP7oGoKyXq+XaJajZNZ8uVzGbDZLY9pqtRLB8Poh\n",
+       "M94vOFx85pSRKyZBWjD2Jd+UdAmHnXstZrNZjMfj+PTpU7x//z4/43slks5lXYBuY97QFRcXF/HL\n",
+       "L7/EZDKpIGt3d3eJbMJbjYhKaxrGUvIxSePhhPNZxI7n46o1Ak90vA9BtjPDOOxIGsVw8OVgCZ1s\n",
+       "dJW/Jw2LE03F4ksUg81mE4PBIPb39+P29jYzDxERV1dXcXBwEIvFIo6PjysBpNcKYIKAmPHbDs3n\n",
+       "85Q5pyvLVjvWOWUGh7kkOAeB5Hu2oQ7M+DcZJv/eGSrskd8FfYk+9t72XHh9uJfpEB57xG7/Gzlz\n",
+       "sGLH29erOFKUAFv4mVAm2oYPRWIHxz1hmBQWEWEk3WCinC9D/mXulsm0YeInC8IY/FlEtZeNvWWn\n",
+       "RkoHzZFQqbDKCMAOJhsGo8jfuVGjkb7SeLvkl4teKygaFFrEThGhbOy5R+xIzn4/xr5eryvlryaA\n",
+       "Gmms1+vpSBGlYkBwxnhPyKG8P99DiTKPjoQctSBvzAv/9tzZWbLxtRwyXygAjLHRLOYLhekNz/qx\n",
+       "yZk3lAVOhlO7oD5HR0e5RjZodl4x/FwQX9l3VjZeh729vZxT5BQ0wKmter0eZ2dnKRtloDMej+P+\n",
+       "/j76/X4lPWD5Ho1GGVCx9hg2ggK/hwOSRqNR4SSZY+GjLSwjpLRLw2jH0c5bt9uNfr+fjliv16ug\n",
+       "fZB6F4tFdDqdvA9pmIeHh0QHMIroJkrr2ZO8B3KCIbFu855oNBqp5O1A7O3tPTsiBONeBkSDwSDO\n",
+       "zs7i6uoqhsNhdLvddCTYY+zjUkdZNlgb65TNZtt5//T0tOJIU3hA/zJ3y0cn4IC4iS/B3cHBQTZh\n",
+       "Zt6sYwm6S/SDe5VG3UHAS4G+G8va0BIA8rfm8rF3Sb+zTpzNGLHricV7gkahb6fTacot5xYOh8O4\n",
+       "vr6OXq+XGRfGCMG+2+1Gq9XK3mQcm2QbwkVmAtsHCZ6/I5jFoXXwVdI/bDdJ6aOvcLJw8tEXTgki\n",
+       "I+wVOH3smdJhdxNuZK8MvngmOtGpuvV6XeFm2jllj/xviNRb+4O36+16u96ut+vterverr/zerUj\n",
+       "YpyrjagiPnzuPHqZV/e9QGWI5k24dXrFED4eJ6iT03gmpJE+chRC7t0oUsTz9ge+uIc9bv8/0VHJ\n",
+       "xwD98vw4mmg0GpUT440clZV6TicZDiU95tQTYyO68PeIIohATUB39Zyf91LDUxAER+AgPY5mI6pH\n",
+       "rHCRrpxOp8/I3bwzESbrybu4fNZRGd8zF6DkpvB+Xm8iT1IA7gzu9ycK87uSkqRCx6kII6Dm80VU\n",
+       "KxOdgoyI5Nu4oWYZXdfr9SxUMFfA6JWP1/A5ZUZBI3aVeYyRueB7e3t7cXx8nKgNa8EcLhaLPOTZ\n",
+       "8srflpwNokZS/uaXMDfMf5lqdFdzIlOj0SBUrD+f0VkdfpXPY9zb21YzzWazePfuXRZORET0er1M\n",
+       "7Ww2m0zpMEZQzvl8XiH/Ov2IfjKJud/vZyr4/v6+kjJiHGUqharAyWQS19fXFVnb29tLQvPNzU0c\n",
+       "Hx8ngR0OCUhQicow58yPOTqmFFxcXMT9/X1WBpJROD09TT4biBRpXloHkHVgjKQw4ROZqE3aeTab\n",
+       "Vc7RRKch204HIwesu2XKyCNyZ3tkGgGHi/P3Tota7/vs14gqknN0dJScsPv7+zg9Pc15ub29zcOh\n",
+       "n562x72QrWGMNHAFmTLCPZvNUtfw91ymOpTZFNJpjNk2x2uPjkM+jAwig9yj0WgkXWOz2VTmDR8B\n",
+       "moptt9+7TNuX2RxTQbBZ6Eae5+akoI6uAPbavHS9Wh8pJhdIDkUBdGiYz04GisWKDw4M3YZduYNB\n",
+       "YUIt/NzX/CU+8zvxDvwkzQDUWRLmMXwWRnLFVtrepOZ6OD/rXiLcwwLFu2AoyiNFSOuRZrLgYODg\n",
+       "jvEOjIs18HyzSXhHuGdcTqnZiXW6hE1Vcmj4W6d1MeQYPVI1EZFcFfd2spK0jJgL4TRR2d6CDe3/\n",
+       "XiJAlukLp/DI+Tt9h+FHFuxoNBqNyuHDJrlioG1UbYT5m5IXwLOXy2VWBdqpZR2RN74PP83Kn5QB\n",
+       "Bgpi9P7+fhrtw8PDJMTCLbFjfnFxkfLhvd1oNGI8HqdD7nQK6QOfVeegCGiesZE25bt7e3upFHFk\n",
+       "LANWsrxP6XSTGkPe6DtX6gVSU5yJN5lMKlxNUu3wR5zWZp65h51zZty5IgAAIABJREFU5gPnCqcY\n",
+       "5wkeE2ensb6sn3khXKSkN5tNFmREbB2+wWAQ5+fn8ec//zlub2/j06dP+TzmjHcxRcF6kfn1vmE+\n",
+       "e71ezk9E5AHBR0dH6dyXQSMBBE5DxNYoci/SUcw3jhzjs55Bbzt9Z2I478tewz459YMDx1jZa3Z4\n",
+       "uRfvjd7a29sVTEwmk9QTpOjNLdrf34/BYJD3gxR+enoa8/k8ZrNZchUPDw/TkTZxm0CDi/Sl9b+D\n",
+       "AeaeQNH/RhZxRlyggp1lvbx/0K/MMwGGKQwR1cKOl7horD29wSKi4oiyTgSVJYVms9mk00YKnjEw\n",
+       "V4+PjxUqA/OJ7fzuu+/ipetVHCmM4mQyqZC8yH1inGyEMDDk/b2BUaRE515ghMWC5av8bkS1HQGX\n",
+       "nSs7e5AhuZdJch6D+Tg4Jy+dJehoKGIX0bzkDaO4XqqE87g3m03lfLmI6tlJKBQT3BFilw0zfnPE\n",
+       "yKszBngnlN8aBWE88EScY4fcXvYCQ9HxO6MgzH2z2axwrzzv5irYOcVxRGF4fm3kjYQaGXELAeYH\n",
+       "zhBrwv0Zo422KxaJeOzYgNa57Ydl1GtiJM/IktElc8Qwshhjj3k+n+d5YrwbChoEEKVP9VrE7lgK\n",
+       "95vBYGDovbfZv6PRKLrdbnS73exFxcW6gpx5r3G5cMGE65ITVxp9B0LlfrPjYUIyjToxmsgtzzg+\n",
+       "Po5arRZfv35NI8hYceTQEcy7jRxOPe95d3eX+wgOVnle4Hg8jnfv3kW3283v3d/fV/af0WfmhKo8\n",
+       "Owv1+rby7J//+Z/znZFDxm7DbV1aoumuBqTQgmDQxSvIHA64nUzrByOEyM1kMsl97AADOUGnQKqP\n",
+       "iCw8cjDvve9WJF4bileoiOv3+xlEWCaMoETsAivkxXLIGm42m0pbHr6HE8R+LZ1veHfwj3gf9Lz1\n",
+       "TYnK8H3WhGc6qPa7urChtG0OwAmSbEtKuSt/Vxbi8DwCJYIM6yjujd7z941io7N4Ns9C97EPWTf2\n",
+       "Pwic19BOXXm9miMFzM3EEXljoL3R2EQIvZUNG6hETfie008mlDu6x5Ba+B2VOJ3GpmZRTCzEMcEg\n",
+       "sJBc3MPoD89zJFymCN0N3R42UYBhdkdXjIE5sxDzXgioFSH3NNnXwmgFbeIiY0dJ2Qnh/VlL3olx\n",
+       "OO3puXEaDfKjHeWIXeWXWxawVm4LgNFjzKQFXlonZMLpOUPzoHyeb77HvFkmUVCO1IwuUGGGo2mE\n",
+       "xA6BI33GjOK2Axqxq3h8enrKrtIREefn5xkhllE5zhxOoJE92hogo14nnK+Tk5Not9uVoGU2m8WX\n",
+       "L1/SKTTZejAYxGAwiNlslvc1wRljWDoeyIxlPiLSsVutVs+iahupTqeTMmw95GianzhSOJRHR0cx\n",
+       "GAwqyh0jNp/Pk1TLvqDP3Xq9TjSrdPoJdpx6IoKG6mA0qNPp5P1ns1mcnZ1V0sFG4O3Ez+fz7Pfl\n",
+       "c/UitqkmUmKnp6cxHo8rRh9HxwbFASYXDhjvY0OJw+t7kO6lp5gdAnQ21YRGemi/MRgMKtQE9k+t\n",
+       "VsuSft7PvbXKjtkQ2tF/TpfjALNn5vN5fs9NJ/lbX/69U6F21Ph/3un+/j7u7u4qjnCJ4PO+Tqnx\n",
+       "XfQBY/VnzE35GYE8Py0bUGYIwCzDpsmURS04gbxPidJbzxmpZkz1ej11tG2wD1dH5hiDnSD/P/vA\n",
+       "tBzLLTad5t4GAco1La9XcaTgZZToEIuL0WdybKwxWChMO0OlJ44gvAQxR0Tl78o0nqMrvhuxi7wR\n",
+       "rDJ3iuEHtvQCIIQ82/dGAEGK7JlzLxtyX077OOK384PRsfFjs7AeRiF4FgqKuUIpMV+eT1BFxmfn\n",
+       "zP8PemOHyBvBUSLRFuMwQsAY4SUYdcKI4kjRw4bne60pr+Yq00dWNI7mQJ4itkbIfDrQIKInlBCy\n",
+       "Y2eCjU0bh9IZZDwoPe8bFOxqtapEX+T4QUJc0eaO7ay/kRXkqByj0yk4OXyPeV8sFomQsE4Yebo+\n",
+       "TyaTHCM9fzBw+/v7le7OtC9g7v085hKn1Nwr5IP9Z5mi6SSIolOjGFEiUNYlYme84DzZWQDZcSNE\n",
+       "IwI2EsvlMt69e5f3vL29TfTKe5T3Qj/ZwSb9y5Ez4/G4QjFg/Pxng4i8vXv3LmazWQyHw3w2nEO6\n",
+       "j/O84XAY+/v78f79+0rQYVn0c1xJy/xw+agUOoX3+/18H+sCG/DpdJopQRA1HAjLxnw+j06nk+vl\n",
+       "lO90Oq0EgZZd0ousgYNkDCuUBHMVqR72XHu+cXrpeF6mz0Cbms1m6oTpdBrj8bgSYHDPdrsd3W43\n",
+       "BoNBIvLsrYhIDtR6vc4UZGn37NzyE3QXygR6jjUD7GDflylDrwVygY1wWs+UBv+t6R4EFryT9V63\n",
+       "262ker234fxhw6wT5vN5OuPouPIwZ9r9lClP3udb16s4UnjmHiQoTwm/R8SzyMAIkR2aMjoyIlMi\n",
+       "Ol4YOx08A6NneJl7WhE42vE72bGL2B3L4fE7SjM6g+MTUeUm+Ds8z4RSc6CAmf18O4i8z3Q6zZO2\n",
+       "jQ76dPsS1sRDd08jfm8nAqfDa+LowBvRzlnJaWA85cZg/HZczTGwk+s0Fs6YYWzek7/HCbFzimGB\n",
+       "01Gr1dJRQpEBR7vFQsQ2FYGicT4+IioGAHTNRtfBQnkRWSPbzB0ybc6aI3YMD6iKo0s7Guv1rplh\n",
+       "vV7tTO20gJEfDLs5Qhj+VquVTkNEJCkbuXl4eIivX7+mTADfk9Y0pxIn0YGGHSLGZaQ3YpeidBrR\n",
+       "QZtl0k42aSee69QexsmXCdaz2Syenp6y8zWoy97eloR/dnYW8/k8z7iL2BrEwWCQZxiSUo3YcdJs\n",
+       "OLz2ZdqjROWm02keWcKFjJJaxnBGRHz9+jU6nU6cnJxUUh5lMMyYXiKk8/ePj49JYqeRK4Yd3g/j\n",
+       "iIhMk3748CGfMZ/PKy1fHHARkDqjYCSe37t4gPfmHe/v7/NsRNYSR8sE74hdGgo7g/5kfDj6tHYw\n",
+       "R4iiBdBH1tD6g2DRuhOOjx1NIzbsXTew5ZmkzJkXy7cdIqNgOJfQR0hxsj7YIWy1u9MvFosciwPh\n",
+       "6XSa6DV/Z8eG/e9CAr8L7+jAkzYSs9ksgwZ09Hg8TpQPvqMDVZxd0ro8k33vwKC83tofvF1v19v1\n",
+       "dr1db9fb9Xb9nderIFI0net2u5WUAHAw5FQfd+F8v73okuhWpvJMGHYEDWzp/K0vf6+Exg37ObI1\n",
+       "98fRCZ+5SsH3NApH+uqltJzJ5L5HmT/mJ6kBR6VuWkeUDKfDc1Ov1xOVKvPz5OXLc8qYj4hdCtQo\n",
+       "ChGLIz+/s+/h6Jq5dKqC9eVepLbKiM5oHnNkNJGUi9HB9XpXAgvMy2dElLwPqQY4DSVqSGS2WCwy\n",
+       "2gbp4AK+BmEzodMIYhl5mi/C35lfYySiXq9XiJWr1SpLzS3Pnmfvs4hdStAkfaJSGoKC2O3t7cXl\n",
+       "5WVEbGW41+vlGji6hne1WCxiPB7HZDLJyhgaDzKuUgYh7ZeFIVzNZjPu7u5ynzBfpFhANEq0FfSK\n",
+       "dyyPweF+Lpgg9cPBvCAu3PPdu3dRq21PDzg8PMzP4YQxR+12O6uGTC7u9/uZxouIlBPWx80qQbxB\n",
+       "eYzG8f/NZjOGw2EeJ8M9QQ+QPeR7NBrFYDDIPVA2OGZOeHf4ZP4MXQE/kfUH4aCKyqg632u32/Hx\n",
+       "48eck19//TVub2/j8fExptNpHB0dJWGbPWsCOJf/TSf5Ml2KbrLe8xmJzWYzWq1WpZDIfCzzcthn\n",
+       "k8kkDzW2HLN2EfHsMxAwPydiu99/+umnaLVa8fnz52i1WpUO9XDdSH+RrWAtQKE3m20bD+tT0CGI\n",
+       "8MzNdDqt8EKhUzCnoMW8g9OMpPJB6vis2WwmcmS9zthNGuf9IiLT5mUxmmXYSBM2k7Y3/DS9hLnh\n",
+       "vpZR9pFtb3m9iiPV7/efEaVZeKcIUKJAm6WR9uW0idsfRFTLLF9yiMoc8ktkcH9mx8RpL94ThWGO\n",
+       "gR09vyvfsxPonK/5QEDI3li8e5m69P2YV48DJ2N/fz8VuVMzODMoD0iu5qGhaEziJu8MsdYVOHYG\n",
+       "XjJezKVz6lwoRhO8cXR5l9LJgqjLmpSlrqS3mNeI3WZ7fHxMhcQ68V4mW9o5wXFeLneHantD2zjb\n",
+       "8FnR8878HcrkpTlBLsw3tJwzJ/x0t/iDg4NMq6GIucy34DwvrxOOIDwUPiPlzVph2AgAlstdXyvP\n",
+       "AyX3h4eHcX5+nob98vIyvn79mkqYd2KdPEfmhjHvpSNpIm8p915j9h96xEax5P255Hy1WmUBw4cP\n",
+       "H/LZm80muYb1ej1OT08zvQkvg7PSHHyw3pzjt1qtMiVIwDkajZ5xNR2gMXe8M2kpAoYff/wxn3Nx\n",
+       "cRF7e3vJ+cEoRUSutx35cn5Jozw9PcXp6WklBch8Ybw9hxhSjJu5o+w/HF86dLOncdapqkOmcGbK\n",
+       "tBjrhMNYr+8O3iZdt7+/n0UB5f5GT9mQ8z4Onpm3xWIR0+k0gyh/pySMs2a8C845vEG/J7YR58z0\n",
+       "FOYNh8N6n98zH6bJIHukrk3mHo/HmX6MiMq5tegTU0NstyJ2nf/dl86yiW607ceeAU7wPeSB/VLa\n",
+       "NQ6C5vsELcynA3cHnoAG3NfFBHAgv3W9iiMFv8QbnFLE2WyWSrdsjbBcLp8hHSh3lByGkM8c1Vg5\n",
+       "IvA2qiYBskAIlhcfgw6HyGgQThjfc57fqJcFGI4P72CUy/wunmXBZxw2xtwfJwpCp4UfIiLfh2vD\n",
+       "WhjpqdfraRS9EZbLZSoK3gfjSgk2Df6IDNgwRlFwEsh7LxaLigNmlMToiT9DNphvVw/S98ib2orL\n",
+       "hwTjzOF4MBc8zwrGjhuInpVNyZGDn4ACRR7gTiBn7iVEWwSTwK0I7EDaocI4sf7srYht/5p2u51j\n",
+       "8D7EUUJe4YNFRFZM4jAZPSjXZr1ep5Nxf3+fDQRZXwyBW4CUvb84+mIymeSxK1w446ztZrOpkFVd\n",
+       "QUQ06R5TyD5G0bJBNGpdwT0ho242mzg+Ps53wvnCKWy1WhX+FEYCUrEDSNaQ9i8YxHK9MfCeAxqY\n",
+       "GsXEEJgTyQU/ptlsxvn5eczn8/j5558jIrInHLLkgojT09PKWYA2XlzD4TCdhaenpzg/P8/7Gm3o\n",
+       "9/s5pwQrIKB2KkAwyh5cyAqGcTweV46IabVa0el0cj+iX5lvI9F2bB4fH2MymWThio2py+XLAiRQ\n",
+       "Vs5/5F6MD7lkndFtIGd2Rrkn2Rre1foLvtx6vY4vX77E1dVVdDqddDJBKuEq2Qnhwqk3Lwv5ALFG\n",
+       "jvx71n+1WiVaOR6PMyDAiWK+W61WxbHCnkRE6mz+xs4g+9D6xM43z8D58fE8OFLYcfsK2AuDD7xX\n",
+       "6QCaU8n+/Nb1Ko4UG8PnDi2Xy1yYiK2nXSo3lGaZSkLh4927Uy+OgjdQxC464vtl2i9ih2SV8CJR\n",
+       "y0spKf/OhpaF4/dWmCWa4FRdSTT2GHCqcBBLlAujDOpgpI1IkPG5d1Cn00nFUaYMy54pEPj4Hqka\n",
+       "0CiUBukUjJbTRhhunBejNVyeozKdC9HRf/f09BSz2SydH883/2bdDdOXFWwPDw+pFIhkUTTInNea\n",
+       "En/elfsapmbjGz0ySdNKA0Iz8LmhaN6JMVlOnMqlWaMPgnZUaqSHMfV6vTg+Pq6k39mDdqK81/jJ\n",
+       "+to5pfM5KW5XUD49PeV+9/52ZGiSqsfeaDSyms3IImO0EbOSZq5x7P09HEIqPm34WDMcKt4VJ+T4\n",
+       "+DgPkWbv48RiOIjgGb8rOtFHjO/x8TH3khFnZMmHRrsTc4mMcpHq5Do7O4tff/01IiKur6+j2WzG\n",
+       "aDTKTvNl2icisk+c5Y13gjjO3zE31qOcWcczifR5X1fzGiVx81/k+x/+4R/iy5cvMZ1Oc/yz2Szl\n",
+       "iOIP3p1AG7mxjttsNnkfnDHvfewQOtepYhwk0CH2DM6ou4+zZgSi6FqjK65QLnWxEfzLy8u4vb2N\n",
+       "TqcT//iP/xgRW6f37u4uKy+dwbGzQoViGXwR0BN8+8Jhsm6bTqeZ9nexBPLd7XYzVYjjHLE7g9J0\n",
+       "GKfvmBPmyfckwMMOs/bOWrGmzkyVtAWvPTbQBQl8xnO/db2aI+XNErHjCtgLR4iZ1Iid124DTzXL\n",
+       "0dFRVjFE7BQf9/NCEa2+hEhF7Jyoer3aOJNFJ3KCE8JlR8e5WyNc/Nscl4hI6NwX0eFLjpc/Y+N5\n",
+       "DAgNyFmZ52V8KG82OPNDVdfj42Pc3Nzks1Fu9PIwT8ZIhisqUJ7T6TQjfkeXKGbQKTcJRHGVKQxH\n",
+       "k0YXGDu9glBELuPHEep0Ool2shb+Wz+PeSZlYNTCDpbljMtjMjISEdkHx+lkvk/FFvLksmvfG2eC\n",
+       "9QX1JQ3A+0VsjSkGmujOfAAcJDeAjNhVQlr5sYZHR0f5rowFBMyBCdWc5nvgyIzH46zi4TOca4xs\n",
+       "abjLd/Kc4FywtsgL1Wmnp6eZUuTIEpwdI7KmGDC20omwjKxW26o3o54475PJpIIcgsItl9u2A+X+\n",
+       "Z1zoqjIY4//tfDutz2XklPeBk+Z+Zg8PD4lQ2ZiQlqWS8PHxscLfwSEwWoMscuAs8miHEAe/5LlE\n",
+       "7OQdvXVwcFBJF8PZIhC7urrKzwgU+dzOGvdG97uFCVVim8321AY3ypzP5xUeJPdEN79UBet0J++P\n",
+       "PD09PcX19XU2Ae12u4niYuOGw2HyK603bONwtli3m5ub5Dk5A8G7EpxYr0ZEBpS1Wi3Tr7bTzWYz\n",
+       "UWKvE20aCAqcdvceKoNPbKhtiCsTcfSwX85QsW/YHw5oWXPGYn0CKu6/s1yw1wxKOEgmqCqvV3Gk\n",
+       "2IQWRoQDEmmpGMo2ByguTih3p2WXXfO3GDffs3RMXOrJ37CgRkEQRj7333sBrMxwEF4qZydyXi6X\n",
+       "KYR8RoqKVCeRdsSuf5QNq6Nu7gP6YljVJHLIo2yQ0og4ZcIYfLSE58bIAj2AIrZw/sePH19M3WL0\n",
+       "cEyIbCOqKSPmzQ4U788clGRc1qtsqVCm9vw9b1Dn7RuNRoWYW6ZoWJunp6d06rkvcDNRIo5KRCTS\n",
+       "an4RRoj3wTA5LYlcUTTg93FLhdlsFrPZLBEp0CBImXAXmJtWqxW9Xi96vV4FAUMuFotFpX8N74G8\n",
+       "MUfwgHDkut1uclYs+/v7+3k0zXg8TjmEQGtyr1NK7GnLCM4pv38J5YXgihEy34V7gGSyPhGRARtr\n",
+       "RCqbiz0FksTcgCQul8sYDAa5fyKqPZVAyMq+aaXTFvG8hYoRXkfZyL+dqna7nWmsxWKRzsL9/X1c\n",
+       "X19X+EBGCGazWfK5cEDQ36PR6FmvMuTs8vIyzs7OMt1p7hUBD7zEktOzXq8zZbi/v5/zxhwMBoNn\n",
+       "aMZoNEo6iA2p18IIr8ePM4hMIP+DwSB6vV7ynXD+uCfPQAcwPsYLFxBHLWKbCr29vY1arRYnJyfR\n",
+       "6/XyXY6OjnJPuK0Ka02gzx7FyY2IdMj5e/c0sywQiFgPky6mNUAJWIAgO7CHqmCagBFXUp7ePzwP\n",
+       "O+LsAt/D7uLEsLcJ4nge9obvkUWgeawDKZxI9jfvUrb7wcn0nL2Uzk5Z/OYnb9fb9Xa9XW/X2/V2\n",
+       "vV1v1/96vQoiFbFDb1zmTxoOUjQRBtE1HiIRc8Suyyu/A+aPqDZfK/Oe5hQAIePV4qkTdZdduM0d\n",
+       "cCrP0KMrGiKqh6Jyf5+CTarPUTZjBloEPnbFmzkEnk/fn6hof38/yd9EMUDOpF24iHhOT08rCOD9\n",
+       "/X3c3t7msRdOYQFBN5vNuLi4yFw68wB35P7+Pm5ubrLihkoi1sh8FhAJ5KNM1RC1MWaTjb22/D+y\n",
+       "x5zzfe65Xq8T+SSSK/lZJkA6SgE52N/fz7QiqBPRD5ENURzf46wx5tpcHJNejbKAtPG5D1IFxr67\n",
+       "u4vb29uYTqdxe3ub8rDZbOLu7i5arVYlgl6v1/Hhw4fkh7iBIBGlU8tccEQYt2WRKrFut5vRIvdg\n",
+       "fagUBJmM2O7tyWSSyJHTRUSJruRxeq8krnodifo3m012OvbeBSWjczhrSLoDBNQoJ6gDJH7uxXwj\n",
+       "w6ytkTMIwRzfwkW1WSlnXOZt+iqRca8VSDx7ZjweJ3K0Xq+zkzbNfE0S5mw7UNyIXWX1ZDLJcdJC\n",
+       "wBwxUqkUGjgbYESURqDMN6gnc2YE1BwZ5I55I4UDr4r5RieBZJti8O7du9xTf/vb3+Ly8jLRSLiC\n",
+       "ZA8Wi0UlXcj68LlTnEZWPHdwhU0hccYApPrg4KCS4kdekUPub2oAe4GWBdbtvJtToMwbmQlQS5/d\n",
+       "iv1hXaGC9Pv9Co2A9DjyBk8MvWAkz4R4tyKhjQiFCS4icxEUGR5XjjOf6GrTWdBp2HCnGXlfUCn2\n",
+       "CLLnOSyvV3GkDNFZ8WP8MTpOvwC5NpvNhFkjtpArsCGGpCSN45zYyUKYTGg1L8ZKyqkmFh5j7Od4\n",
+       "AUkZlL0nSKeZX2MHjs3B2MsqpJIcyLsDR5ojw7sgJFQMcT8UP0oPnsh8Pk/eDrC7HUNg6OFwWHFA\n",
+       "IXhzoOfp6WnOM1D/4eFhwtAUFxjqhwhqZwkeh40Pn8HD4vsoPhtzeGKG39nQ/I1TQFQXomic+qU9\n",
+       "gavbWEOgYZTYarU7nNaON8qV78Kp4H7O+TMe5MJwe8TufKgylcU7oPycMiH9sFwuK5VOEbuzJCm/\n",
+       "Nu+F1B6p2Ol0+ozX1Wq10jHFQMORcTDBuLjXbDaL//mf/4lPnz5VeB3mL5YOiA2LeYDMCzIC6Z65\n",
+       "6Xa7eU9aQJiTR0Uba2UHHNIzhtXrw96m+7kvnGd+z/dIAeOAOD1JdaCDv5JD5fSd+Zhlaq50fn/8\n",
+       "8cf45Zdf4pdffokffvghIiLXDb3h3kSs93g8juFwmPJrWkOZIqWCDQ7i9fV1DIfDuLq6yvf5/Plz\n",
+       "rFarXIf7+/uURThDh4eHcXl5WTla6OnpKf/daDRiMBhU9v5oNIp6fXtOG5w1xsH7LJfL6Ha7cXZ2\n",
+       "lu9Zq9Xi+vo6Wq1W9n6KiPjrX/8am80mixFM4KZikDS6U0a1Wi33m6t2mTMcdrh35nHyfdof8C4E\n",
+       "SRRbsN7YRJO4S04xuo3fHR4e5vdwyPr9fnQ6nQp3ydXMvLsLoFjDm5ubrNhmHPSXYqx0qOcYKZxS\n",
+       "+oIhPw5WsFURO1vIO9jusW/MozU/DOcQveiebcgd+5R7WuaojCyvVzsipiSZmaMB2dXcG7xqNjCL\n",
+       "3+12K1VEVrClsxRRRYbKxbJiRoFjcEy6NEnPRh/BZSGN2KDkMepsAr4HImXDx8W7sCnsIBKtosh4\n",
+       "FyrWut1uEqrhRUTszvADzfM5XggzKJ+JfnB9Wq1WnJ2dJZoQEelYrdfrPO/KjgbGdTabJaLFZzg6\n",
+       "VFawEak+eXp6ik6nE/1+Pw0NCh9jbyN0eHiY0bUbF/pdeK433tPTU9ze3maFGvwd5MgOrWUHJ5Uj\n",
+       "JFD0boAKFw1FZySBfzebzcoZdjaSbsFh2cCg2VlAMUBwNZJ5d3eXCALfN8o1Go3i4uIi59XGm3nm\n",
+       "M8Ywn8+j1+tVOCYl/xFZtVNLddvt7W1cXl7G999/n2sI4ReD7v1kbhTOsM9pA91lbzjwMRfJpd7c\n",
+       "l7/B4XXgB9dlMBhUIv29vb0MRHDwuRgz/DFkj+cgZ0Tkrk7ECS0vO03+6eulz9Af3W43fvjhh/j6\n",
+       "9WsF5YO4DErgyjT2Mghjp9NJPdzv95Pv1ev1KoRy3v/y8jKGw2H8/PPPFeeFvwP9M1KPk7xcLvNM\n",
+       "xojI/m8cIG10EINIQOq+aIvFIvl833//fXz+/Dnfj+qzZrMZHz9+rHA8r6+vM0C28xKxa0PR7XZj\n",
+       "Op1W+EpUTeOAuZgCfW3+qxFAG3DOlIuo9h5D/l0JisOGk27Ujf5ZzJ/RQPYUPM9Go1FBSGnLUga0\n",
+       "kOmR61arVRm/Hd3Dw8NEhLl3q9WKH3/8MatHI7boIIEo47NzhF7CmfJa2HcwKMHvcbDN/eUe6BLr\n",
+       "KObeRWXl9SqOFB4/qYKIXZrGREuUBZAnyAiebURktZ4hfSYOMi33QjlGVNM73vS8Cx47gmM4FAOO\n",
+       "0eciSjHCZGXGIpYwPc83cdROHY6Vn8U98chx4Bx5Imh2AlEoJktDHvQGKTe7WwKYqOd3Xa+3J7dz\n",
+       "OOT+frWBHmOgaswRTbPZzLSIoVrSCybSlmgdisXOCQoTAqMhbJNDnX5jDKB5QOtuD+BGfXYwOPvL\n",
+       "zrdLdjebTbb7oEqtTF8C+1vxMzZkHIjcc0GUWcob84Fzhwz3er04Pz/PoMCEzL29vWzCiWww9xSI\n",
+       "EFg0m7tDVnEuSYc2Go1ce6M9/OQ9HQT84Q9/iH/6p39KeUKZsUalM4mzZqTWjhSyj8zxmZ10fucW\n",
+       "BzwLeXUUjtNN9Rb6CyfPzifzTVoCh7Hb7eac4KyQYrdcszYgp95rrDFz4ZSJr9Lh4vr8+XP88Y9/\n",
+       "jEajEb/88ktEbJ26i4uLdOZB33kG+xAdQJuMiIiTk5NKisnoCo56u92OXq8X33//fTZJRO4JLF1B\n",
+       "e3l5mfNNmgd5I3im99Z8Ps/xk1qnrUZEpOOGLJycnMR3330XjUYjq91++umnqNfrcXJyksEn3+eM\n",
+       "SWTVpHfQu/39/TwzEaf6+vo65vN59vsiwEaeQP673W4iwKwXc469scNJ2pM0FX/Dd5F9uq1bt9Me\n",
+       "giABpMdpx8vLyyx0QhbRX4yVtUAnIefD4bBS5X1xcRGnp6dxfHxcQeS4SM0eHx9XwA90OM6d28qw\n",
+       "50tainVMxA6t5nc8m7Qun+Gs4jQZxTMq9q3rVRypn376KQ/AZHImk0luGLxKVwyB5BwcHGSVXsSu\n",
+       "EWLEzrDYKPgqnRUz+3EmuBDkiGr1HdEIC2whBeXiWf4en6HwXYEEYkZkAlQasSub9/f5HgqNyx2q\n",
+       "2bR2QBy1wB0gHWeB63a7cXx8nM6uOQDAtFTzMVesBc4xBtV5ZuYYZIbnkbZCeTtFAGepXq9nbxsj\n",
+       "K0QkODAuVQelLNM9Ebv0FhGiEQSMqnkGXkNXCNmpw5gy7+4UsmdeAAAgAElEQVSpwrrhDODEWN6I\n",
+       "wGq1Xcdslzrj+JuP4UojpxvgMhilNWxObx93eGaOQOGM6DAGUvAEF9zz7OwsOQ9GYJgjy5dTVMjJ\n",
+       "b37zmyynt8PrikxHmlRa4kSU6AdoIYaGCJzx48DwXSNk7Dnv/4hICgF71cYNjh9BQKvVyrUg2KEx\n",
+       "pR0+UHlKwc2vKZ0j6zPzRMrP+NyyZYcKR/ff/u3f4vz8PP70pz9FRMTf/va3jMxPTk6iXq/nfgI5\n",
+       "w7EldW+uar1ej8lkErPZLFNyjIMg+P3793F2dpb8Ghxz5s6ouRFy5h3ZgL+GY2I9jL5Arh2Ut1qt\n",
+       "dPqoRByNRilTpGVJGzK/2BuqzLx3I3aVsDihXBzBwjtafzebzTg7O4uLi4vodDqV+xoNLZ0DgkYo\n",
+       "Edy3lAF0nzuhl8BBibjCqYLX5s7upMDJZphislwu4+bmJn788ce4vr5OO/Tx48c4Pj6Oer2eus1Z\n",
+       "Ggc8FxcX+Tz6eZlS47lBxzBGxg4tASTL9JpyT5hSApIIV5U0PfNOFuZ3v/tdvHS9Wh8pBAoBRdkA\n",
+       "RcIZiKiSxolQmACUK0rdyA0RFBNuBMEGGLTIaRiUHRGqF8opBtIL3BPDY15FRKSBsZPgz8qGlb7Y\n",
+       "nBgxnodBAHKGm8R3UMxwGxiP35Vn9Xq9inMKFPvw8FCB1P1Ow+GwQthzygtugCMs5oBxGBUiAjFE\n",
+       "z2c4OiBzbg2AcmWjlRvPyrj8f55rpQj6CYfLaT8rDjapnXjWFwSl5AXZ8TRpnVQehvGlxoOklcqj\n",
+       "ZTilHmXk1IdRjLLHD+sVEc8CFu4LAsU92UMYKUjZzBv/Zu4dtPR6vUSEnUrEwdjb20s5A62A81ii\n",
+       "unwPeUPpGVUFLSOV7NQAwYAdDL43Go3i6Ogoer1e1Gq1CveGdzGHzbxKnndycpLcj4hdihxODo5I\n",
+       "xM7xQxat7EEjmLPyYr1BO/kuMmjUzwEXOoriHMZ3fHwcP/zwQyVAtL5g7kGWnOrAOWu32xmYcoHU\n",
+       "WuczfpOzkVXz22iJcXR0FJ1OJwMzsg04bEaVSXsh86vVqtIb6/Pnz/Hw8BCXl5fRarUSOcXxs5Na\n",
+       "oiAgzZYBvkOvNgfQIDgEJp5TAk/k3I45xhv5cUoNZJo97gIQv6s5nqwdzzcSbfQbcGEwGFQI3rPZ\n",
+       "LKbTaTYX5V4RkfQP+GHtdju72r979y7pGI+PjzEajZ6BDQ5Y0VEuggENs1OP3seOOZXnFKP5ik7r\n",
+       "4lw70+LCHSNnZVbkpeut/cHb9Xa9XW/X2/V2vV1v1995vVr7A/LXPiOnTKW57BhSecSu8iBiV2WE\n",
+       "Z+roixwqHnvptRslMcGZz0AJTAwnbQWSYfiXdykrhhgvkVeZ1omIZxEE98DrJgppNpsJp4Nq4D0b\n",
+       "kYrYdrgdj8eVqMrnBc1mszg4OIjz8/M4Pj7O+RmNRnF7e5tpv6urqxwHJeBPT09xfHxcicoobWZO\n",
+       "zEkDigd9Ia3EBSLJd3keZEo4KU4ZQfY36lKmec1ZKc8+g9zrFAroz3K5jPF4nKlkv2ez2cxKHSOP\n",
+       "pC+BsEEakVneGfny2XfwPfgPdIG0K3C+OXbA2jwbtAR5Iu9vLhrvCnRffg+5JkJzIzyiMlCr9Xqd\n",
+       "c8NZeCcnJ5n2dfqAdyp5hbVaLStHN5tNBf11I8Fyv4Aqcq8yLdZoNLJQAaTTvAkjJlTY8r1Go5G6\n",
+       "ibQE8xaxa1wIf433odksRFmjZ666fHp6ynmjaenBwUGlOMLvZX6m0Tn0RKlTQK2tm5za43c//fRT\n",
+       "3N7eVhoY+2Bi0IWI3eHV3BfU2e8DQtRobFs5ONUGmowcG6k1b9IUC/ZQt9vNfW/dyl5jTfiMOaYU\n",
+       "v9lsZuPUyWSSxHGQY6O/7AkoI6wheh1CvOeWfUl7D37H2Lm3ic58ZpsF6o48L5e7o5WwR8goVdDo\n",
+       "Nmcq4KchS65II3VHwYCzAOg2yPHmItdq22bLVEN67Z+enmIwGKRtwlZFbNPF7969yxMEsCnsGeTO\n",
+       "FYesL3YNuos75XPGK60xnE2JiCyEGI1GqSMajUbKMPdE9imS4HP7CmU7k5euV3GknNYxHO20gSFE\n",
+       "Jh4yMpsuYqekKB+1AuM5CJoNNFCzKwAQejYKn1nxY8yBVc1TYCyukDLXCRjbpFnehffEULp3FmlH\n",
+       "jL85G+ZWmQD59PQU4/E4SdGcgYfgkEOGq4QTELGtvpvP53F+fh79fj8FkrExJ+fn53F6eprjphIP\n",
+       "he9UKmNibsxboNKPdJuNScSuvBiombnlfSE/WnlHRCpmO2X+CX/AFTERO/gbUqmNM4YrYpePj9ga\n",
+       "WXqekNLs9Xr5txhSOA3wMCIibm9vM4WEQvUY4SqYtxMRWQlnA2p+IAaFTuSsPRwtZM18AO5FqiIi\n",
+       "KsrUMmtyLIaVdOfNzU32rfr06VOlKhM+G3NIOshEb+Zlvd6e3k7lqGXGnEg4HIbq4UAwPubG6wxH\n",
+       "h3lttVpZcUl6yOnC0pnlYo0wXE4FjEajuL+/j4uLi9QbdhA5YHc4HMbd3V38/ve/zznmvexc8NNp\n",
+       "X1+l4+cUph2AiIhff/21shakmajEYn3h0tGhfLVapT7ivlQK2wGJiOSp8ZnPW8Owsycs41Qzsn4O\n",
+       "rr3PXIwQEan3cMwbjUaSzd+9e5fONfLhAMs8QvY7Y8Bg8yz3WGK/8X52os0psj7abDZJQuff5hE2\n",
+       "m81nnEDWyUUxpMC8F1ln0ocGF/hbFzogJzinFH69e/cuIrb74vz8PHmfOFURW71/fX2dsr2/v5+6\n",
+       "7eHhIb58+RKfP3+O3/72t5WWGlQ3kvqM2AUWyB56db1ep32bTqdpo2q1WuVoNVLAFHfM5/PcH4Ax\n",
+       "jLOsji/9Bj4jBfpSej2/+81P/g8v8pauFrLQIAAm7bnizYtvZ8ZRe8QuakOoysi7JJv6+Y7WbUB4\n",
+       "X75vwhoT7aM8jEiZO+LokucQJfmAWQiuEdXjSzwvEZF9TJgXzoUyKdh8H8aHMbGRQqFdXV3luxkh\n",
+       "wUHBkHFPIiScO3MFUGxU5pTVfigj5huFxLuxdo+Pj1kRgwNNPh9SKhdzyvoSXeMIs0a0V+Az+A/N\n",
+       "ZjMmk0muhY8cQLFBMkU5mC8GGZ41RkGVJG6M983NTUZJdsBQligpomsCA+8HG0nmjEDCzhKRMEEL\n",
+       "ShqSPPJghIggBXmwIqLvDITddrudlTu3t7dZHYRM2JB6PxiNQ1lTseX9hDOCM4Excdm1uUIO3vg+\n",
+       "ssG8sm4YgYuLi3j//n06oKAjROwPDw/Z4BY940OEicprtVpWJF1eXmY0HLFrDsraXV1dxcXFRc4N\n",
+       "a8feL7lOPq7HqCqf2/HyxTEnrJsvdK3fjYOM2+12GtcSAaXJoVFLyyRkZZotRuzadCBXnU6n0rCS\n",
+       "Y4WWy2UliGJPE5QSaPC8h4eHSpDI3v/06VNcXV3Fjz/+mH9vJ4PgaTKZVCpPHUwQNDDfkJRxvspC\n",
+       "KS72hwPvo6OjdBr29/fzPV19yl5zxsSOEtkdZB99gHMLZ4h3KHsnOTihgq7Z3DY/paLz9PQ0UZ5m\n",
+       "s5nBTcS2Jxm2ECAE2UHP0yeuXF/QSc4UZD9RHW1nz/J0dnZWCdC40D3YDXNRsYMEciXflmpcOGLI\n",
+       "BUjd/8aRehVHqtfrZfUVAoeRR6ggUEZslTT9Rdg4DAohgnBtEqArrEqCNwtvhWxSLUqI71uwXWXm\n",
+       "ijZHT3yf55WOG85ExI5UawKuy/95BgbIZGM2L4icET7DxlbsEZGQKIRODBIXzsavv/76DMlbLpeZ\n",
+       "hjAigrMUEdmjyO0tVqtd1aBJpURyGFEjUihLkLjJZJLPoFM6UW7Z/NTpY1JmXKQFSf+xvm6QiLLE\n",
+       "sELCxvnwe/L3pBIwLEYzkGGIklyuvnt4eIjb29scI4qBtfeJ9LRJsJxZ3phn5p8xvkRCxjljD+Hc\n",
+       "4FRzT5dwu5JmPp9n5Vmj0YiLi4uMIC8vLxPZfHh4SKPJO7CnqMJCLjqdTnz58iVubm4SBfHl/cS7\n",
+       "IgPj8Tg7R5OO4sKhx8AYWSEAGAwG8fHjx6jX62l4Op1OBitfv37NMnq+NxgMot/v53p6bgkMv379\n",
+       "Gk9PT/Hb3/421wXjM5/PkyjNviC1amPhtWPOynSpf/rid3/5y1/i69evSX6PqBLmkTsCBc6WI5r3\n",
+       "gbPIHc7GbDaL8XiccorRvbm5Sb3utbSDeHJykg7/aDRK4i/Ov59HyfzR0VEiZr5nxO4sv48fP0bE\n",
+       "roUHAbD1ED33aFECshWx1QsnJyd5Xp4D7H6/nwcFWzZ5D/QsQSQXusAUBC47otgGf45MzufzyskD\n",
+       "EVEx+OhPNwYm1erKNr7X7XZzb3BuasRW3i4vL7MIxRmMDx8+5Fhubm7i8fEx3r9/HxFbBJB7zWaz\n",
+       "DCQiotLDirV030Gcrtvb23S6IrbOsPUgKDj/z1yCaPJ3puo4pcvzsPvoBFePImfful7FkcLbNC/J\n",
+       "PTrgrhius1Iw94R7kB4gguOeEbs2ARZENpOREITf1QFGXriXK8Kc0+edHHWXXi2esPPCKHtSNVQc\n",
+       "ROyiOJQNqE/E7qBcKhfN2Tk+Po5ut5tOG/NtYzqdTmM6nVacFuYXJKJUUEDFdujYUMw/EZs3v6tz\n",
+       "+En0hQNK0zrGG7FzMuGL0MuEd0Ehong830bpnKagfQYOE8aP77m7up1T+EY0uDMCRv8bokKUjbl1\n",
+       "pJ7NiWCN6cCMgkSWcFxecvpIkRDRW7nxfBSt03esNUqjjLZA20i7oKhRJn4PDJtThKSMkW+qfVwp\n",
+       "ZCcatKHZbMZ0Ok14/+LiImazWXY+dnNUno/zBKrG5yAoyCHOIWPHmWU9uZjHz58/Z4QKKkHAAc/D\n",
+       "UTl8OuTf1ZU2lvf39xUuI++F03RxcZF73zSFMpXotSJ1478xEv/SdXZ2Fn/+858rUTkBKXwXc0je\n",
+       "v3+fmYThcBjHx8eVSlgHHvV6vVLVh7NM1oCu8BE7ngxzY4dsMBgk3469ZNSNYA39D1JNzydoIEbW\n",
+       "/uM//iP+9re/JQJqFAhH6eDgIA9wdpNVgiiCVqNjFxcXGWAh74wd2ggUBKPmpMOQSXQC6+dWMdgg\n",
+       "Ag8jfebVEliwFiUPiipAjgdyS4mrq6u0ieaj/vrrr3F5eZn7xg4KegLbhTOGTLnNA9V/rP1sNsvK\n",
+       "SYKSiN1egydFo9SIHXcQvW9aDgEujp3Ts656pNLTHEdQbeaQMczn8+SGfet6tdQeL1miR6AWODFc\n",
+       "NCpE8ThfjIOCw2Pjzabi9ybkAY1yH6MpNr5lZIeQowAdXUfsjpkw9I0D4ny/eQ/mcTkqZ554ptMp\n",
+       "tdquLHM+n2fkHhHxu9/9Ls7OzlLZ3d3dxWKxSMV4cnKSKUQrOq8Jl9N+KFenVu30mBtg5Q5s71Qk\n",
+       "yoYGccDMTtE0m9vu68zL6elpKj7Wlw1vrgibCyccQxWxc0AgkptbZIgaJ9KRtZ0OOzVE509PTxkt\n",
+       "YVAiIlMUoI0YgIgd6RRehOfNJcOgB3aWcbQYn5EJ7k1EbzSWZ5lLxboh1/BIUHDwFnHI4b2wTqQv\n",
+       "acaKHHW73XRqHfn5e+xheDYRW+T64uIifvnllxiPx9mhPWLXTBNHj+eSTnRKlvVzKh1FaeSZ8eME\n",
+       "jMfjXD/WolarpfL/8OFDBf2Bs8i+9BqQ3jg4OIjvvvuuYoRYb+TGTq05ayXnECQLZMpGmJ9GHfle\n",
+       "rVaLDx8+xHfffRd/+tOfcl+s1+uM9klPMY5+v5+BAHqMwhLmFK4L+sIBH0R25NdkbNoGNBqNisGC\n",
+       "F8dVNlBdr9dxdXUV4/E4ut1uXF1d5d9++vSpksr/7//+74jYpqHa7XY6TJ5XN1TudrvRbrcrfZSQ\n",
+       "U5xijK6dwdlsVkH+4Sbyvm4Zgt5uNBo5d+xD7JMLNRzoghzbufIeNg/R6S36ZsGpfXx8TJtxd3eX\n",
+       "AUir1UqHGlm6uLhIp84ZjM1mk73a3r17lzy0iIirq6tETWu1WnKbkKkPHz6kvqGfGrJIsQPAirvT\n",
+       "w88iu8VagIb2er3UucwLndVxgh14LJfL1LHT6TQ2m02ll1/JTy2vt/YHb9fb9Xa9XW/X2/V2vV1/\n",
+       "5/VqVXv9fv8ZikAUQ8qh5OxERPKjiLJo3mkkxCiBkShHZvw/qIJTGE5dAJmaiM47A3OaswT5GmjU\n",
+       "sKKhSRNn+W6ZiuAivQWPjPE9PDxEr9eLwWAQs9ms0pQPGJr0C+gNUQrdyyFmkl5hfUjBEXkxX6CJ\n",
+       "vIcjOqBpyNucvxWxRU+ISIG/mQ84KXt7e9nh3pAr9wbeJzL59ddfk/sEWmLSN9wJkEevuSMyCP4R\n",
+       "WySj3W5nBF6v17NBJJC40w1EQrwTCBc8EsYBYkWq1VVdoJ6gjfBPIrYVlHAZIHkyb05xR+w4UxE7\n",
+       "zhPRoFMBEbt0KpEu72IEjlQLc+Mzp0B/TbilupKUmDkIFD+4fQZjIAWM7MJNAP3sdrt5aC3R83w+\n",
+       "TzlbrVYxHo/j69ev8eXLl4iIbMLpZo+G9tm3oGtGkyO2lUgcqsp+A9kGGQWZYQ1Bl7g33yNVwnuU\n",
+       "CCgoFrxEX6y7OYq+zDcyqlYWiXA5ffTp06f4z//8z/j69WtEbLtQU5FFusa6lEIH5PPk5CSfyfhd\n",
+       "Ecp8s+YgLFSbITfozX6/n5WvjAPSb71ez/eMqHY2J51DupRnn56eRr/fj9FolCm63/zmN3lP9Dry\n",
+       "FrFNRXGI88nJSaKxyB060aijK/u8xyMikRGqip0VsN0A6XXRCLaNLt9lk16eYbsVsTuuistI12g0\n",
+       "SltxfHxcscHwzricMmOcvJf5kegEF22QDm+328kZfHh4yGdHRLbccUW4Dzv+y1/+Eo+Pj/Hu3bsK\n",
+       "pxQ+LHrbPDeeR8aHTA4XSDq2hjHQtoH9ad4pTV+/lV6PeCVHCoIwXKKIHcHaUDSTymIDZXpiSF2g\n",
+       "IJz6QqHhSNmZgluBUTRUyb9R5O4zQ2oRCNgOj50gv1fEjgQXseNQmcM1HA7j9vY2jo6O4uzsrMIF\n",
+       "gW+B02LljXJ+9+5dDAaDLDnnMFIcxF6vVykzpSU+aUYrPgyQjygoOzDbGSyrGZrNZh4wbOPNvLqU\n",
+       "mDXkflSN2JFy6sVVJ9fX16nQmRN3jPYGY+28TuamADePx+NUDDiNjI9UAwaYiilklZw+z3AKezKZ\n",
+       "xGQyiffv3+ehruaXMHbS2nx2cHAQJycnCW2zlsiwKzLPzs4yJYqRwIA7XYwjAXfKF9/ZbDaZjsAI\n",
+       "8Ty3DWCPNhqNVHDIuVMGw+Ew96DT2jb+vCvfpwUBzvloNKpUxV1fX2e65/LyMn744YeUDVIbTj07\n",
+       "nWR+n7ll5h26fxjPpOoKjh1zM51On6XeGCOyzTjtNBLsNBqNbDOCc84zPAYrc5ws7lOm8JiH8t+M\n",
+       "t9lsxsnJSVZmXVxcRLvdjtvb27i9va0Y2f39/ZjNZsmFwfFjXKTfn56eKkUVvCdkdSgRDkx9TFOp\n",
+       "L1xp/C//8i8Vh+bo6Cg+fvwY4/E4uWLIHSn/6+vr+K//+q98HgUDi8UieaSlc0p/osVikfrUtgqe\n",
+       "JrIPb5WO3m7Pg2Pp9J5TsI1Go1K0ZMcV3cT9kQvmCeerPOYJ2UN/m1B+fHwc5+fnqVOto8zDxB6a\n",
+       "7oKsQlnwHnIHfMtNr9fLoB7Agz36888/Vzh64/E43+XLly9ZWdlut+PDhw/JdQOU4ExDgi3mhrQ0\n",
+       "h7M7ONpsNnlW4V//+teU/cFgkNV+OPTsLfMUqagtr1dxpMjvll4tXAKicxaDc6vgdNhbRGiILB3R\n",
+       "Ifw8y0LsEmYiDCMroAYvlT26Ms4KjHfDWDi/D1JhQ/GSMn16eqqcgk0+nzy0S5U5uBKSMgowIioV\n",
+       "SzhiJo1jmMy9MdHO5FAUE99jva6urrKiISIy0gS5MRF/MpmkU8icMd84RxDcjZwxTzQ75NkRu8rC\n",
+       "+/v7bNpnQ4sCKvlaJlkjP3wPQvPt7W3c3d1lg8GIyL4lJiszBhwX/m3kBJn6/PlzNn0zQsSYuU9E\n",
+       "VGQEZTkej/MMsIgtx6DValVIoW5iCzLGs8z3idg1nywDGivR5XKZUfnd3V1cX1+nvG42m6zOGY/H\n",
+       "iW5yXxdTLBaL+Otf/5rvyBgIjnDQzGlAXnD44Yt5fYfDYfzyyy9Z/l46y+7NZXlCJpALO5n7+/vR\n",
+       "6/Vivd6Wj1uhPj09VQoL2A8gIxgiE1kJPlyk4mIZgod+v18x3qvVtscUjkvJkzLS5gAUgjLyXiId\n",
+       "zHOj0Yjf//73cX19HRFb9BMngfdkPs3fw7Fzs1f+DWJhfXN7e5tFA3zuRoinp6fR7Xbj6Wnb+45x\n",
+       "wFEimIIvE7Fz3AheTEbGOR4Oh3F9fR2r1So+f/6c+4tiBypljQ7u7+/HH/7wh6jX68mVYX0jdsU/\n",
+       "oOARO87O3t5eHrXFPiQQsWNqp9aBi4P59Xqdfcl6vV5mFXgXZBi75vWnsMOOmREbkPQShUXGCRQN\n",
+       "WHDQNnbawQH6nMpTAsKIXQaDNg+Q1nkW3+cIM/Rwp9OJ7777Lm2BA2KyGlSOl4EQsjUajWK5XOY+\n",
+       "RPYbjUbc3t5WirOwQWQyptNpIpVfvnxJbuO//uu/xkvXqzhSGGorN2A1elmQzop4XhHiRfa/rTAi\n",
+       "qtEYRp1n4kRZsTh9BcRpUjn/Ngpl5wylhqE1ub1EVFarVTognU4nzs7O4unpKS4vL2M2m1VSMwg6\n",
+       "iBJCOhgMYrVaZentcDjMzQYi5qpDN8KL2HWqxng41eQ5Yw2Yb0iSpCzc0I4Ig/n0mWRuzIeBiNgR\n",
+       "JSFrU5USsUuXsoGdLgWaRolMJpOMMChHxyh44xNV2yEBirYThcPgA0kpjUZZo6hWq1Wl4Ruywbz1\n",
+       "er04ODjIChDkLGKrpLrdbnbh9Zwyf/f393F6ehrj8TgV0XA4jF6vF/1+P8meyAo/SX3a6WONTMJE\n",
+       "gbl1B8gYYyQQuLm5STQLgu9kMomjo6NMp9jQNBqNdISbzWZGft5POJcuCliv19nAD3njXZ6enuLq\n",
+       "6ip+/fXXNL6Qmnnm/v5+7sMyILLzaDSW7xE5+zMj2qPRKNFDLirPQFGcJiEQwhHBwUa2QQBBKCIi\n",
+       "x2wEzeR+UMCbm5tKew/3PeNykQ0FHfRp+vd///eIiPjzn/8c9/f38f79+6xAdGrdzjl72A1+F4tF\n",
+       "XFxcxN7eXlxdXVXSJpwIQWDrVjMYRQJXO0uQ6NHRrD+2gvHa8NnB/OMf/5jBW8SuSo6siPfFbDaL\n",
+       "er0ep6enqedcsdrpdOL6+jr76xmNbbVacXt7G4+PjzEYDDL4QHbH43FsNtsGnD6hgzYZ0FV43nA4\n",
+       "zDUsq2fZowTAzWYzyfPIGvOIw++ihVqtlojLarXKeSNd2Ov1YrPZVA6md7NN1sL91QhiIXsbBQLp\n",
+       "wvHD7rFP6MdmSgt945A1n6WIXf348WMS/HmXZrOZ6b6IXU/EiJ3TTqFVt9vNsYOaEVw53U/BgB3O\n",
+       "8noVR2o0GuVklGkTHBcizojIKoKyd0TEzilztRUbzCgEShBhxOiAjjl9BDLmlKCdOlAl7m+kh0ox\n",
+       "IjpHCfA6rMS5Hh8f4/r6OkajUQwGg/jw4UOOgQZ2bH6X82L0cTKZT3coJvdMaoL7YvAwinxGrvzw\n",
+       "8LAyT8w74yKv7Bw9qZiS74JTgbFjk3udWHfn//f392M0GqUid0oQ5OvhYXuwMohAxFb4z8/P8+gN\n",
+       "K2FgZqMXLlf2cUSkynhexBaVc1TI83C6eE/Sg4yfZqblkSVUQ/EujgQZEzC+2wpQsQTChpFkjPRK\n",
+       "YZ95/DjROOpOKcBls6xHbB2u9+/fZ7PW4XCYn83n80z9oNjMa6CSjxSIy9hJT/L/Tr+jwIbDYeVI\n",
+       "Kar8kBVX/vBdl6lTNcsaI+dl+tcVbziOlmF+T/NAo6TsQ9AVyxQO6GKxyCNKeA6OPfwbBxFHR0eJ\n",
+       "hP5/7Z3bT2PZ0cWXaWMa2/h+AWygQd1z0+RlpEjzNA9R/uZIUf6GSEmUzGS6e7qbuzE2Ngcb8AU7\n",
+       "D9avXMc9Tzx8LX3aS4pmJmBz9j5776pataq2NyaDwcA0Qt1uV/1+386Fp6cn5fN5NRoNc+wZsy+D\n",
+       "Zw/SY4keTugJaTnB2IfDoTG0s9lM5XLZfu6ZQh9YsjdoLMzvw7qhWUGz49cw84PzcXt7a/uRWwzG\n",
+       "40WDXpxHSbEeQVw+jWNCdSB7yTughUIhlkb1aT/YmGRy0SMtiiIbM+mgYrFoQS3znEqlTCPFmcLY\n",
+       "fcUx5yOfQz8E00c7EtYte4Bn8sFnIrFoAAsryhnP+/dOIMwkc9Pr9azSHUJDWuojsSFeO8j7A163\n",
+       "R2YJFsy/X9bEdDqN2SLWMA4NAZQ/izc2Nqwz+tPTU+yKGGwCJAu6Sc4yxlWpVGLNrjc2NhRFkW5u\n",
+       "bixwl6S9vb3PslKr+GKOFHlOf8UGBoiXhiMlLfqerN7qLcU95dXGljhZXpzOP3E8+JxPbXEg0pTO\n",
+       "pwYw+Gh9VoWqvHwOFBYbEdNkMjFD7e+J6vf7ur+/j/V/YnywB3jXUPGMhwjf66d4HqJrFqSfN3/I\n",
+       "ekeKUmUcH1/q64Wk5N854DFoLHif10+n06pUKhaBemeL78EZ8NojhMsYPH8Y4wASBUlLkSMCxX6/\n",
+       "H2OUeC6cDy/gZHz5fN7WWSaTMUMaRZEd3slkUvl83lgHjAA9oXzKyn8WJxvjKS30bL4Tt3fcYXJo\n",
+       "DUG5tLQwfHd3d7q4uLCDnO9kbBy2vmCAKBedQavVsvFXKhVlMpmYTocIkkN/Op2qXC5bBM+eoS8Z\n",
+       "zgfr4P7+3kTorH3ffyuKIpt33/bk9vbWUl69Xu+znmf05vHOFfOdSqW0u7urXC5ne9TvYd4Nh60v\n",
+       "RAB8xjsJXkbgtV44egcHB3Z3IoYtiiIlEglz6DudTswpIogcjUax9D3P9vj4GOsIzb749OmTOfTd\n",
+       "btfE2AQ5iURCh4eHtsZY+1yjkU6nY+lQuq/joNLzin2B0ebM8OkmdGX8fXpAScs+aQQ8RPjSIm3y\n",
+       "4sULFYtFY128EwLTSfoHNpbg4Pb21hjVarVq84Yjvds4flYAABpXSURBVCojuLy8VKVSMQbEBwqM\n",
+       "zeuEGD8sm9fq8jlK6nkXJycn+vjxo+3RSqUSS2Ph1LF/vH3yDBD7jRYFPoD2bDfnLnaBdw0DuLa2\n",
+       "FkuZkW2AdfdOCM4cbCw6QXp/ITHAxjGm9fV1W+f5fN6ehQantLhJJBK2Lkhdsjc8G4m8Zjpd3DTg\n",
+       "2ejxeKybmxtdX19bcMI53Gw2VS6XrR/dN998Y/vm9PRUNzc36na7xjwyPjRssMypVCqWpfAp/t9D\n",
+       "aH8QEBAQEBAQEPBMfBFGisgSz15apoyIeL2omiiBiNAzPXzOs0M+XUgEwb8TmZDaQH/gWS5y2TTm\n",
+       "8+JQIlHYJ88e8D1EKz5iI/r1FKFPtSDApE2+pzHxzCmt9VE3bA7RrNcekGZBFLqaZ26323YPmk8p\n",
+       "Ua7qU5Bem4KQ1bM4kqxBny8/ZbykJom+faqCVImvuOK7vfgZ9o3IgL9HOs6zVfP5XFEUxS7s9XMP\n",
+       "lcuaIKLhxnjP4nhNQyKRUKlUMrbOp6g8k0ZkzdqA7vaVekTXzGsURapUKrHOz2gWSKnQDkFaUOOU\n",
+       "aHe7XQ0GA0v7oVMj/env/4JV444wIjRpQWP7zxBtsmdYZ8Vi0VJCfr6ZcxgWaZkW4bn81Tlra2vW\n",
+       "yRwdG2uYVDjVUIVCIaZr8lE7TLAvXoGlbTQaFmWz3tifpCt888LVFCP7Ah2FL/HnPdF4sNVq2dzC\n",
+       "km5ubhrbBotyfn4uaVmZxtodDodWfp/JZFQsFjUYDCwdwfhgXa6urmxvsZ9I+VarVV1dXcWqDz2r\n",
+       "SVqeNcn4SI3V63U7hyaTiQqFgqV90MOwvmHuYWN99TSpTtJw6KukpS04PT3V5eWlVU9Jy6tAjo+P\n",
+       "bd/5DtbpdFrffvutcrmc7u7uLIVDSh0dW6PRsGqr6XSqVqulXq+n3d1da8DIGGBIYBR9yj2dTqvT\n",
+       "6RhzzPh6vZ5ub2+VTqfNDrG3T05OYvqaRqNhFyhvbGzEpABek3RxcaHxeKxisWjsmdfs+AKp8Xhs\n",
+       "ejJpUX3Z7/f19u1bTadT1Wo1NRoNSUu92uPjo7GDvvCB4hWfqeBzSAzQXPobQPh39LPsb9KjsLGk\n",
+       "8JjTfD6vp6cnE3/zOa5SWltbU7/fj2lqT09PrZ0KFYOcw71ez4oLuIMVZml7e1vv3r0zFnK1dQxn\n",
+       "P4wqa41Kep8hW0XC56P/r/CHP/xh7juKS/HOvb6qQVrePF0qleyaAg5pShlJAUBPSktdFNVgXliI\n",
+       "ocO5QTAnKdbrhJQXC4VUIY6d19r4sUAN+2oPxoTjxBxQiVUoFFQoFGJ6Hi9EXK2k8BUHOAYYCyp3\n",
+       "vHaMPjg8D/obRKS+vHY+X5TPYpg5UL2zhR7M38pN1RYb1lfYcXijWfFCVRYwug1f0dfpdKwvjK+M\n",
+       "89oSxsq88Tv0+2Fc/AwHGI0RhxsVHVT7+ENwfX1dg8HAel15fRhOFWlRUr9Q6rxn5hCaXJJpPxD6\n",
+       "stalxdUc3nm+u7uLafJwmK+urnR+fm4GAz0S2qDVi1RxJEajkY6Pj23d7O3tqV6vm0aCdA7PjbOO\n",
+       "QN47SzjfpDh8FRWaqcFgELsf7MWLF7aGWKf+3V9cXCiKIkvdMAacUu/o+3TLcDjU1dWVXr58qaOj\n",
+       "o5hg26exMZZ+TpELSPFu2uPx8hoJAhMOV+4lo/LHH9TVatXaXuBA4GSVSiWrTuNqDap2EeJj3Lmv\n",
+       "kDn1lc/VatXu70NTRnBRrVZjmlKq1Xgf3rn3GstEImHPyTlJawPmwwcuqwVEjB8ZA2lFr6nhwu52\n",
+       "u22OEHuxVqspkUiYpsj37aLgYWtrS+VyWY1Gw/7e4+Ojzs7O9P79e1UqFX399ddmvElNzedzC1p5\n",
+       "11EU6e7uzopkstls7Cqf4XCof/zjH2q1WlZJzbyVy2VLaW9ubprGdTgc2rUrs9nMzkVpmYJFD+xT\n",
+       "R/72hVwuFxObJ5NJczbb7bY2NjaUTqdj/dem06lVVfOM0kLE7SvLCYwl2XdSiOXtF1WO8/ncbNT7\n",
+       "9+/tc999951+/vlnnZ+f69WrVyYHmEwmVrFYrVZjmkNsGmelT7NeXV2ZNhDNH2NAZ8zZtrW1ZWlP\n",
+       "bFK9XtfNzY1++eUX2/dc+5RMJk1Ty3qChFkV1EsLG/LixQu122396U9/+vwCS30hRmptbc0Wva/4\n",
+       "Wl9fj/Ur8oyHL/30bAaHBRvcfweOEwI0aekI0HuD7/Flk2xuX0Hm8/ZeaLyqn2KBIub0wlm0PkQX\n",
+       "fCdOXDqdNgbJN5dDI5FMJnV3d2cGmJyyv27Fl3Hzt7i80Y+D/49oCPEq33tzc6Pb21tzGLy+SFoc\n",
+       "avl83iJDPnd8fKwoisxArGpO+BuUOktLUS0OnXeqiXIxGNLygKbBo2986ZvjTaeLcuZUKmVVSowB\n",
+       "bRxrw98Ej1OLRsUzefx//X5f8/nyMlA0DGhyJpOJNfXk/Xujg26En2HEiIo51HC40On4y3ARB6dS\n",
+       "KTUajZgmDQ0U7CG6BklWyQhz5LUwtVrN5gxhOIfpcDhUNptVFEX6+PGjXr58GdOlsI8ox/fsEUYf\n",
+       "FpDnxCHDEfHaCwpMBoOBGVPYK94xjrkXpjJvu7u71vx1Z2fHnBgcvnw+/5kDRrk9/02jWNYG0Tdl\n",
+       "8J5Rf3p60qtXr5RKpXR1dRWr6GT+0ZH4829nZ8cYG9hHSeawZTIZbW1txYpzuMQ5n8/bO2Psm5ub\n",
+       "+uMf/2hOt69ikpZnA/uR8U0mE9NEnp2dmZPC2KlalWSVxexJzksKMWhPwDhms5lVl/p1ivaRJrj+\n",
+       "XEAjx88RiEuyd3t+fq7T01P9+uuvtk8JKJvNpumT/HnCBbgEYN5phhm+vLy0fSVJ+/v7kmRViVzS\n",
+       "zXeyBrPZrAqFgmq1mj1Lo9GwSt9+vx/TjsFKYavY59ij+Xyu6+vrGFtDtfZgMFCn0zG2knfDfqvV\n",
+       "auakMEaCx2KxqFwuF6sQz2Qyarfbury8tHXGvPG7vrCJYI++VNgwX0jVbrf1888/K5FIaH9/X6lU\n",
+       "KuacszZWK1ZhqllfPhNBQOIzUfysXq+r2+3qw4cP5tijGbu/v9fXX39tLVXIPDHfZJl8wCgt+l35\n",
+       "c/f38EUcKV6sd5a8ANRHCR7Q8CxWaekskbrxgjWMCYccDSH5GVVLMFK8RDxvnomDQVpS6kRglO1K\n",
+       "MuMLwzSbzWKfI6UAo8VhQrNMIkSfaqI6hMh0MpnYRsRRQ8BNVAuurq40GAxULpftQPKVPbAgPL+f\n",
+       "c++g0UZAWkZK9OfxY+RAvr6+Nq8e8JzMi4/6+S5SDr6ij8ibCAm2h7mRllU6ns0gDeK7gnu2CoaG\n",
+       "6MynYPl9L+Zk3aZSKe3s7KhQKNidVdLSIYBBHA6Hlu6QluJvbj/P5/O2hjlcMHoIM/2cEmV6VgmB\n",
+       "Pv+sVCqWFoJRXF9f/6xhIVE3hg/hvCS7ccALSZlv3gtj9OwDQk0ON290cRx8xZhvnOqDFtLYkqwn\n",
+       "Tb1eV7lc1vHxsfWiqtfrxprgTOHA8s5SqZQxdDQ2lRbCcIKZ1QDOV1nCkHpDy7/Dhnr2m4o9qjc9\n",
+       "fOd932NqlWH1UTKXa1NE441JsVg0doGzjLU4GAz04cMHOxP93/BVU2trazGGiRYG9GvyDBWGnBYO\n",
+       "7C//7Gtra1ZtNhqNzEg9PDxY6s2LwKWFker1etrZ2dHu7m5M1sHav7i4ULvd1uHhYayaud/vx6oM\n",
+       "WVPMCeyXvzOR85ssw/v37038vbW1pUajYa0OfLuYx8dH5XI57e3t2VlzcHAgadE24uLiQgcHB/Zc\n",
+       "f/3rX+3db25uqlqtqlwuq1ar2edgx7BhVIIy9qenJ/373/9Wt9vVV199Zc7Z4+OjOVc4jY+Pj3rz\n",
+       "5k1sbp6ennR5eRlLpZKi7Xa76nQ6+uqrr+w5j4+PLcCjnx7I5XJKp9M6Pz+3NeADpVarZa0/fIFP\n",
+       "p9MxZom0N2xVpVIxSQupQtjITCajfr9ve2U8HltWhH2Gffad67e2tpTNZu08ffPmjZ0BOLG8y3w+\n",
+       "b5/DtsDCUpAgLc6LQqHwWR8+jy/iSElLw7BaTUeE6L0/r0WRZAyUpFhUwMHBYYdhJQLCuPjPScvI\n",
+       "1lfu+FSA/11K2HnOVCplC2NjY0O9Xk9RFJlh8uwBDgRXl3iWLZVKxQ5aTzn6hURuXIrrQGARfOqO\n",
+       "AwWD4Z0byvFvbm60v79vz8ff9NV62Ww21tuH3+PfMV48697enu7v79XtdmPMCvOcyWRMTyDFW/6T\n",
+       "5vOUMiwfhhNnAmPqy/R9+wMOKFJ0bAR6LvE8/A3WAhEO0aq/uBSnpVAoxFI7OBZoJXDQvd5HkqXD\n",
+       "RqORbWK0X8wpjirrTZKxlb4ykxQpTCjODHOKgUGz45me8Xis09NTiyz5WbfbVaVSsaaD3qmCydjc\n",
+       "3NSrV69ipeI4it4I+opc5pFUnq9mJXIkKmW+ifRpnVAul+1S4tFopHq9bqkftFnMDbokjGkymYyl\n",
+       "p5PJZKwa1AcgrEVYUNY3jiLVXL7FA3uPYI10DOPHMPr+PqwpHEbmh/3ty+ZJA/n9S4n34+OjisWi\n",
+       "ORn9fl9nZ2exNKgPaljz3lGSZC1SZrOZKpWK5vO5rW+MDAEk/+N7PfOKDsr/rFgs2lUng8HAzsft\n",
+       "7W2bS3SrrCmY0mw2qw8fPkiSVYNxsflsNrMGioyfliewO9PpVH//+99tHD/88ENMK8Waurq6UrFY\n",
+       "VLFY1DfffGPMBXNKehVHAaaD88obaJha9FS3t7eW5oLl2t3dNe3PbLbo1M/az+fzxhaxdxnP/f29\n",
+       "vQ+qoX/99Vf961//kiS9efNGe3t7lm73QeRgMLCbMzqdjtrtdkw2AAt8e3uru7s7c8B++eUXTSaL\n",
+       "ZrTdbteaFUvLimQyF94BOzw81Pb2tq6vr+1KMt/nrNlsmmNN5oWfkeYmJbxaOV6v11UqlfTbb79Z\n",
+       "Wvjdu3c6OjpSOp3WxcWFOp2OzTeMKHKN1bYzrP9EYtGTkO9EcuOD2FV8EY3Umzdv7I96r9azVJ4d\n",
+       "yWazajabajabdhXKKiNFPtl/bnNzM0bT+V4eHGb09PG0OX8TI8LhIC2vOaGkFO9dikcY5IN9SwGM\n",
+       "KekkL+QkDUa6yIvicQRJOWH0cbwwpOSbpeXhiWCYnileIEg39GKxqHK5bAc/hobfY76AT9PA4PA5\n",
+       "5nI2m8Uas2HwPdPCd2PQ6NQNCyjJ8tnj8ViVSiUmusTB4iDzuhxvCH27BsbkmcHZbBZrrocjiYbA\n",
+       "dzDmbzDnvj8LzifsTyaTsUODd8PcIZDnXfnrUNDZSDJDAgNA2llaOrw4qYPBwETMk8lEzWbTnHDK\n",
+       "haVFzn82m+nk5ESFQkHFYtHGkU6ndXR0ZEGIF2JDfTMOr4GDcUokEsZUcngzxxyI6+vrsdvacZIz\n",
+       "mUyssaJPAxLt8g4vLy+Vz+dVKpVMlI5j7PcbLC3sjbRwGCjEYJ/jMGD4WNe+mSNGiBQsncj9+ycl\n",
+       "6tPtOMY+bcm6YO1ykPvmqDC1fD6bzZp+6vr6WtVq1fYbjLUkffz4Uf1+X5VKxXRABJ6sJVh6f/6j\n",
+       "b/RzBDCQPgDxzhntOXq9nvWfYm2Q1vPCZu8s8844N3CW7u/vTRf38PCgdrtt5xeSBa+VI/2Gg8V7\n",
+       "297etu+czWb67rvvNBgM9N///tfmhGeCASSI5NnQ5uKcE4Qyhul0qlwup3q9rul0ao2BOUvp7l6p\n",
+       "VIxZgsWhaMQLuHk3tIB4//69sc3FYlHNZtMC0kKhoMfHx1ivLNjLFy9eGLsmLdiV+Xxua+ef//yn\n",
+       "rUlauLBPYCiZ70KhoGQyqX6/b20kpMX9hTBLPvCQZFf4UDCw2sKhWq3GdHO+8IFiGPYSNmhjY8Ma\n",
+       "vh4dHalWq9m8+X1Fip3zq1Ao6P7+XgcHB0qlUjo9Pf3MzpF9effunc13rVYzfe6PP/74u/m90P4g\n",
+       "ICAgICAgIOCZ+GKpPR/1A9gGSpuJaGi5f3h4aBGBb8znNRt48ZKMHYCN8lQeeWUvOvNVe6RQoBzx\n",
+       "2onI0WNR0isty/FzuVysek5adv1GaEyULS3ywVDl5H6JoHzFIZczetbJ642iKIoxPgjCqTxCayUt\n",
+       "rzuRljQqgl4odbQOiBn5XaI92ir4KMIzUC9fvoyJJ2FbEEaSvoI59NoRvyYon+U9ep0KrRRWq0Bh\n",
+       "fohMYG2kZSsGnseLi3lHjMVXdMG2sG589QxMBu+e7t6rVY+k9Gq1mr0rWC60G76qqdFoxNpf+Co6\n",
+       "UlCsL0+pUyk1Go2UzWZjFW++ioW55DlhsHhnPsXu0zXMtW86+fT0ZCJjr+fyjANj5Ge+mSzr2N/R\n",
+       "xz9hBoloi8WihsOhMUFeD8TckJaGWeTZ0UCORiO7fJWIljWQSCRUqVS0vb1tUXm32zX2qFarxdIU\n",
+       "rEvSML6CbW1tzS7D9oyetGAc2dPZbNZ0UcwvImXSf+g20JewFhEC8956vZ4mk4mq1aqlzhib36/s\n",
+       "E9Youin+vmfj8vn8ZzIBXyVJVV+pVDJmivfOheykvP0NE7DniURCJycnxpx+//33ur+/V6vVMlaN\n",
+       "ux0zmYwxmA8PD1ZJJi3L4/f39+09UdGYSqXUarUURZEymYwqlUosnba+vq5Wq2XXbXnRNZeHYzNW\n",
+       "dZwHBweaTqemS5IWtms8HhvjynnIvMCc3tzcqFQqxdJl7AW0WrQbqdVqenx81MXFhYnVacgrLbRO\n",
+       "19fXlmbE1krLNgYwvWRBpOXdjsxrJpOJMTU02iSteHZ2JmlRFLC3t6fvv//e9pIvwkBPxt737Ws6\n",
+       "nY7K5bKKxaJubm7MBlEhy92kPo3smdS3b9/GtIpktWazmYrFog4PD40dbLfbKpVKdu+fP4Npm0Cb\n",
+       "lL29PdtrnU4n1ij29/BFHCn0PF4czMv1OUsqRprNpnZ3d1WtVm2heR0Uh+Jq7x5+zu9jlKVlB2MO\n",
+       "AwyOtDhsqPJCk4Oh9v13cHa8PgD4tAv/jfKfNKJPM/qOsqSpJFnPHp+DZuzoiXheUiSMi3nx6VNA\n",
+       "moV0kn8en9pjzrxjiFPKeHylpbSk6P1GJP+NaI+KM0km2vYlyjwrjiTl8b7s2Au5MZReNM6cYcAw\n",
+       "3qQg0bj5d0cVG04oTiZrBiMZRZFGo1HMsHuRsNeU8IwYGsbAoVGpVLS/v6/xeGwpKl9FRtsI//x8\n",
+       "J3uI9UOKCoeX52Wt8E7b7bZevnypV69exQwiDgn7yd8diGH1zhxGyAv4STXxOa/V8gJ/aXkZKul8\n",
+       "r9sh2MBh8p/jviz0UxRvMP7NzU29fv3aCjF8FSHp+XQ6reFwqPl8bmuYzusIWqVlpWCz2bR0DC0r\n",
+       "/NrAseVZ/ZnB2m00GrHziTmjapYUGWufefC6KRBFUawTOs56oVDQTz/9ZOl8f0m0tLy5IZvNmjMl\n",
+       "LYwsvXS8/k1apso5f+hkzrPidBDclEolGz9BK+ezD5IfHh40HA5N3Hx9fW3iYAqESH1vb2/bM83n\n",
+       "c/X7fX369En/+c9/lMlk9Oc//1mS9O233+pvf/ubvdurqyt7lv39fRO8U8HH/F1fX6vX61ngQQWY\n",
+       "tDijRqORbm5uLDDlO0ulkg4ODpTL5ayS0t8GMJlMtL+/bxd38zPGiXgcHZYk7ezsmGHf2dlRs9mM\n",
+       "Vd1GUWTONSloNGKlUklRFOnt27f68OGDisWiOaC0wuh2u/rLX/6iy8vLWNsM0Ov1VCqVLA25vr6u\n",
+       "Xq9nleOz2Ux7e3uSZPo8HHgf4GF/0Ce/fv1ax8fHkqTffvtN+/v7pv3LZDIW0JBibbVadmOCdzLv\n",
+       "7u6so/unT5/sZ1QSl8tl9fv92B6tVCrWfoKqeR+UTyYTs0tessJND6tBvscXcaQwAD6CRI+Bx5/P\n",
+       "5+0FHx0dqVQqWdWbP/gwOl547V+i1wBgcKRlybkXY3v2yH+nd14kmcYDLYuvvmLRUEHkI3iqmWA6\n",
+       "vBeNsUBfgdHMZrNmFEejkW1saeFhJxIJE/H5y2Dp6USeH20KefT19XVjD+7u7jQcDs1xLRQKiqLI\n",
+       "hMVedOpF1Gg3mBsqZdDVJJNJE/pxsSq9YbiZW5IZNd+rZJV1KxaLarVaJnjk/cIgrYp42WgwEl4D\n",
+       "x5hon4DWSlocamhn6FPCoe/XmL8+iDFgfDDCvE/mBp1QOp02po/P9no90xkkk0ljOemngnbL9y3z\n",
+       "wmzGxvNQHcPfZQ74e+gO9/f3zdBJS90Za9M7WRSH4LzxvKxhHL7VfegLQFj/vsEtBpZ97VkO1jzP\n",
+       "7PuxPT0t7q/b2tqKNQCVZD2ZqIJj7nlWxjAajWJCUuZ3OBzq+Pg4psekMo3rb3BUWIu0i1htYYEm\n",
+       "slarWSNLH3TBcMFqegaPv1Mul62VA2ux3W7b3+eqDJ6F99bv91WtVmPsJ3osKX4mSkstkGd7JVnA\n",
+       "gGPD//gs+2w0GlkLBOaUwJnz2/dE4qwrlUqaz+dqNpuxYJerbjxjydpgDb5+/VrpdNrE3/P53Kq1\n",
+       "YLN5/lwup9FopNPTU9uLvmGptHBwGo2G5vO5vfv5fG6OPWcgWj5fsIF2lv2Lxomg0velQ7TuGW/W\n",
+       "fq1Ws7ONd4GTcXJyonq9bvde4qQwN9wZyN+cTqfmvO3s7GgwGKjdbhvb46uSh8Oh6Sa73a6tE/RT\n",
+       "0rKdBU7W0dGRstms2RyaB/OdvEscSeaNZsJeowiZgQOcTCb1ww8/6OzszMb/8PBgVc8EugQY+Xze\n",
+       "An2cTMbHuQNp4jWeBPIEXv4sqVarsV57v4cvIjYPCAgICAgICPj/gCA2DwgICAgICAh4JoIjFRAQ\n",
+       "EBAQEBDwTARHKiAgICAgICDgmQiOVEBAQEBAQEDAMxEcqYCAgICAgICAZyI4UgEBAQEBAQEBz0Rw\n",
+       "pAICAgICAgICnongSAUEBAQEBAQEPBPBkQoICAgICAgIeCaCIxUQEBAQEBAQ8EwERyogICAgICAg\n",
+       "4JkIjlRAQEBAQEBAwDMRHKmAgICAgICAgGciOFIBAQEBAQEBAc9EcKQCAgICAgICAp6J4EgFBAQE\n",
+       "BAQEBDwTwZEKCAgICAgICHgmgiMVEBAQEBAQEPBMBEcqICAgICAgIOCZCI5UQEBAQEBAQMAz8T90\n",
+       "n59+FodZjgAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x112797f50>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "# Load the net, list its data and params, and filter an example image.\n",
+    "caffe.set_mode_cpu()\n",
+    "net = caffe.Net('net_surgery/conv.prototxt', caffe.TEST)\n",
+    "print(\"blobs {}\\nparams {}\".format(net.blobs.keys(), net.params.keys()))\n",
+    "\n",
+    "# load image and prepare as a single input batch for Caffe\n",
+    "im = np.array(Image.open('images/cat_gray.jpg'))\n",
+    "plt.title(\"original image\")\n",
+    "plt.imshow(im)\n",
+    "plt.axis('off')\n",
+    "\n",
+    "im_input = im[np.newaxis, np.newaxis, :, :]\n",
+    "net.blobs['data'].reshape(*im_input.shape)\n",
+    "net.blobs['data'].data[...] = im_input"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The convolution weights are initialized from Gaussian noise while the biases are initialized to zero. These random filters give output somewhat like edge detections."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAicAAACbCAYAAAC5xzv6AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvVuMbVl2pvWvfb/FjkueW568VN5dXSUbl4sHbBBYbYRK\n",
+       "jRqEJW7qfkD90MItN4gGgQC3QHYJiwdejJFfcNvgRtBuaBAPyA9gt5FBcrnc1bbLVemqPFmZlZdz\n",
+       "TuaJc+KybxH7sniI8839rxFrx4lMU7mjKveQQhGx97rMNeeYY/zjH2POleV5ro1sZCMb2chGNrKR\n",
+       "qyKVdTdgIxvZyEY2spGNbMRlA042spGNbGQjG9nIlZINONnIRjaykY1sZCNXSjbgZCMb2chGNrKR\n",
+       "jVwp2YCTjWxkIxvZyEY2cqVkA042spGNbGQjG9nIlZJPDTjJsuyHsiz7x1mWHWVZ9jezLPuVLMt+\n",
+       "7vF3P5ll2TvrbuNGNvJxZKPbG/lBlY1uf3rlUwNOJP2Hkv6vPM/7eZ7/13me/0ye518uOzDLsrey\n",
+       "LPuL36uGZFn2lSzLXsmy7KUsy/4wfLeXZdn/mmXZ4HE7/s3vURv+8yzLfuOqXm8jH0m+X3T7Z7Ms\n",
+       "+2qWZZMsy37te9iGjW7/4MiV1+0syxpZlv3q4/sfZVn2tSzLvvQ9asOnRrc/TeDkM5K+ccljc0nZ\n",
+       "x7lJ9lgu+L4u6fk8z9+Q9EVJfxgO+W8kTSTdkPRXJP1KlmWf+zht2cinRr5fdPs9Sb8g6e98nPtv\n",
+       "5FMp3w+6XZP0XUn/bJ7nfUk/J+k3syz7zMdpy0YeS57nP/A/kn5b0kzSWNKRpFcl/bqkX3j8/U9K\n",
+       "eufx378haS5pJOlY0n/w+PN/StL/K+mRpH8s6Z+z6/9DSV+W9P88Pu+lC9ryBUm//fjv/1LSz9h3\n",
+       "XUknkl6xz/47Sb+44lqZzibCW5LuPz62H5/Jjn9L0k9J+tLj+5w+fsav2XP8oqTfl3Qo6X+TtPtx\n",
+       "r7f52ej2iuN+QdKvPeG5Nrr9Kf/5ftRtO/6PJP0rG93+c4z/uhvwCSr670j6a/b/r0n6+bIBlPQd\n",
+       "SX/R/n9G0gNJX3r8/z//+P+nTDnekvQXdMZG1Uru/289niDDxxPhkaTp40n3UGcRwhckDcN5f0vS\n",
+       "/77imf6apG9LekFnwOZ/kfTfX6CU6bkk/Wcca9//Q0nvSvqcpI6k/1nSb3zc621+NrqNbofjv6wn\n",
+       "g5ONbm9+vu90+/E5N3UGqF5b8Uwb3b7Ez6cprSOdp/wuSwH+VUn/R57nvyVJeZ7/n5K+KulffPx9\n",
+       "LunX8zz/Zp7nizzPZ/ECeZ7/ep7nuzqjA39c0j8h6ev5WS51L8/ztyX1dKb0LseStla0669I+q/y\n",
+       "PH8rz/OhpP9Y0r+RZdllxjXT+efPdaao38jzfCTpb0v61y6iO59wvY18cnLVdbtwyiXatdHtjSDf\n",
+       "N7r9OP3zPzy+7rdWtGuj25eQTxs4uYxRLJPPSPpXsyx7xI+kf1rSLTtmZdX44yLXgyzLDiT9hM6Q\n",
+       "7uuSfujx9f7dx4cOJPXD6ds6Ayhl8rQknxzf1Vn+8+blHqtU/Dm+K6ku6dqf43ob+WTkqut24bRL\n",
+       "tGuj2xtBvi90+zG4+A2d1Qz+7AXt2uj2JaS27gasWVYpffz8uzqjyf76x7iW8jx/KGkny7J/XdJP\n",
+       "5nn+M1mW/QNJv5zn+W/bod+SVMuy7JX8rPBKeozUV1z6fZ1Rg8jzOsvR3pf0rM4oPklSlmVVSdcv\n",
+       "0d7nw99TnVGhw495vY2sR66abl/qeiYb3d7IKrlyuv2YpfhVnenNX8rzfH7BPTe6fQn5tDEnWfh7\n",
+       "VQR3X9LL9v/flfSXsyz7F7Isq2ZZ1nq8xv6ZFddeJf+kpH/0+O8vKKxmeEzx/QNJP59lWSfLsn9G\n",
+       "0l/WGRovk/9R0r+XZdkLWZb1JP0Xkv6nPM8XOgM6rSzL/tJjqvHnJDXt3HuSXgjUXybpr2ZZ9hey\n",
+       "LOtI+nlJfz8/S05+nOtt5JOTK63b0plhzLKspbOgqJplWfOxsSyTjW5vBLnyui3pVyR9VtK/lOf5\n",
+       "yROut9HtS8inDZzk4e/4P/KLkn7uMXX3t/I8f1fSvyzpP5H0gc4Q+b+vomJfBoH+mKR/lGXZU5Jm\n",
+       "eZ4flhzzNyS1H9/n70r6t/M8/+aK6/0dnQGX/1vSmzor2PqbkvT42n9D0n+rs2KpgYrU399//Hs/\n",
+       "y7Kv2jP8hs4q4u9Kakj6d/4c19vIJyffD7r9t3Wmo/+RzuoBxpL+0xXX2+j2RpArrduPlwz/dZ2x\n",
+       "3PeyLDt+/LNqj6qNbl9CsscVuxvZiLIs+x2d0aCbfSg28gMlG93eyA+q/KDq9qeNOdnIk+XK0Xsb\n",
+       "2cj/T7LR7Y38oMoPnG5vwMlGomyotI38oMpGtzfygyo/cLq9SetsZCMb2chGNrKRKyVrWUr85S9/\n",
+       "OZekCIyyLEuf+Xfz+Vzz+VxZlomi4tlspkqlcu68LMs0ny9XcVWrVVUqFdXrdVWr1XTtxWKRrn1y\n",
+       "cqLpdKrT01Odnp5qPp+rUqmo0+mo3+8ryzLNZrN0D87ld6VSUa227Eramee5ptNpOrder6efarWq\n",
+       "PM81m83SPfM8T8/Eb4R7rRKes1KpFPpoNBrp8PBQh4eHGo1GmkwmkqRWq6V+v6+dnR11u13V63W1\n",
+       "Wi3V63VlWabFYpF+8jzXfD5Pz5HneXrGWq2m2WymyWSiyWSSnqNarardbqvVaqlWq6Wf2H+z2UyD\n",
+       "wUCDwUCnp6eqVCpqNBrq9/tqNBqF56ZPFotF6jPuxzP/0i/90lrpzV/+5V9+Itr39kapVCpPHOt4\n",
+       "jYuud9H5ktKceNI94/3iuWVz+Xsh3g7mELJYLAr9x/9IpVJJduSjSBwTtzVl7XKhjd622K4n3Q/5\n",
+       "2Z/92bXp9q/+6q/m2LQyvfPvsixTtVpN8x374cKx3pd+jfl8rsViURgvPwZ7Uq/Xk82bz+eaTqea\n",
+       "TqfJ5ktKfR6FNmHv8jxXs9lUrVZLdm48Hms2m6XjKpWKqtVqwZfQJv/b27xKoj9x27m3t6ednZ30\n",
+       "HHmeazKZJBt+enoqScln4TvcB/A37XBbmee5Wq1W8hvIZDLRdDot9L/7JMa13++n/qXfR6NRei7v\n",
+       "78ViUdCLarWa/pakn/7pny7tpLWAE1dIHsIH1X/TQThHHKJUBAeu6NVqNSkeA7JYLJIiRwDkk4iO\n",
+       "5LjJZKJKpaLZbJaMigMAfrvB4brT6VSTySQpXLVaLYAkFMCvw/O4cG2fqP4MgKdqtZqO5X7dblfT\n",
+       "6VTHx8eFezl4wtEvFgu1223V6/UCOKH/mcDcs1qtponLM/mz0GfxWfgNMGPSTafTdJ2Tk5PC9eKk\n",
+       "bzQaaVLw+6rIk8DCRd9F57nqWv4Zf18W2JSdf9G5ZQ6pXq8XxiuOzyq5LJC66LndgWHoIjBxo4qU\n",
+       "AZPLtCeCCZ8HZeevumbZXF91P5ePMq7fK8EeRr0ps9m1Wq0wZ7FLZcdK520ZgRzAbjqdnutvArvF\n",
+       "YqFGo5HscrvdVqdztq0HNoa//VkcmEhLH9BoNJL/wLbQ/2778RMuEZSUjRuflQE7+rjZPFvpe3p6\n",
+       "qmazmYJnAl138LPZTPV6PbWTfvdAzn0e7SSoZKzoW9oT/Ztfl/H0gBj/io9+EhB3gLdK1gZOVhkz\n",
+       "V1SUCCVzpFn2wN6pDlBwgigyqDse32g0JJ05xvl8npwm1+ZcQI63l78BOKenpwUk2W63VavVkqJE\n",
+       "R889eD4HMPzmud1ReLQSjW+WZUmpa7Wams1mgb0BOTP5K5WKTk5OzvV9jHrimM1mszQ5nOHiWb1f\n",
+       "Yl8xrovFIhmDarWqk5OTwiSj78oM+3Q6XRkdfdKyKk16Wad8GWCyShys87dfo8xYOuux6l7+OYbp\n",
+       "9PRUW1tbOjw8TON8ERtQdq3LHFfWB2VO0Z8FvYlOpOy8y/Y38yu2LwYL8RnLQJIf/yRgcxWASZmU\n",
+       "AVYPPNzxuO2SljrkOkp/wELX63U1Gg01Gg2dnJzo9PRUs9ksARJ3qOieO7xKpaLT09Nzjpd7u45I\n",
+       "Smxtp9PRcDhMPsCBQ5n9KdMld9jeXxEAcGxZH2APZ7OZxuNxsovYSreLbi+jTjMePkawHwBAAnhn\n",
+       "x+O8oG3z+TyBwQjEvH1+Lvf38XeGfpWsDZz4gJYZT0fdPKArFY4Qx9hoNAoUn6QU2U8mk5Q6kKRO\n",
+       "p6Otra10DgKFxjVRjizLEtXnk4BJ522ez+caj8c6ODjQu+++qzzPde3aNV2/fl27u7vn0iX+TGXU\n",
+       "cewvZy3cOPj1XHmhODudTkqz0KeuhDAPtMEpc48cmCTev0wg2A4HElGBXQBP3q/uaGIqD8PlkYBH\n",
+       "FFdB4qS+rJQ522j8Vzkqp7WlYpRYq9XOgcuyNrtReVLKIcsytVotjcdjtdttNZvNRAf/eaUsOl/V\n",
+       "5pjCicwg7fUo8qLrMqcjOPC+LRunVePs7Yu2bdVzl9nCqwBQyvTanRbHEM27vfbghPnbarVSSt0D\n",
+       "T/qZQKrVaqnT6Wgymej4+Lgw7gQ3BCcEYNgp/IPbGA/wuFej0VCn09He3p729va0u7uru3fv6lvf\n",
+       "+lYBxHiKxO1wTJdIRV2LrLrP5/l8nnwLwIE+nE6nqX9w6O7geVaCT+4V2X8HAbBSLm4jYGHo0zKW\n",
+       "CyAzHo/TfX3uR9CBXjAGjUaj1B9EuVLb18fJyISI1BPoF6Wez+caDAZqNBqpbgKFAlkS7ZFGWCwW\n",
+       "6vf7Ojk5SU6PHwbdmQRH85ERoO0MynQ61d27d3V8fKw7d+6o1Wrptdde087OTjrfjZ1UrKvxSYRE\n",
+       "J1+WYuI6brABE51OJ01CFB82yettOp1Oaluk3SKIoH8iwPJnojYIVio6UcaI7xnvmNPlbyYq+kBE\n",
+       "tSpCXaesAiqr2IB4jn+HnkfnBT3rn7luODApc3CrAoVVMpvNUkqtVqtpMBjorbfe0quvvprSPE8C\n",
+       "Q2XyJPamTFzPpdWgCp2LYCY+r9sc7AzzJ9okjnlSn5UBi4vGn+8Z2wgA1iVRT8qYImcxnBXlWBwZ\n",
+       "gQ/znDmM7s7n85TSkJapFmdxZ7NZsj1+H2cYCJy4p1RkNTwYqtfr2tra0u7urgaDga5du6Z+v6+v\n",
+       "fvWrOj09TYGXAxO3ac7oRJ3it/cZ/9NWB0C03dPtMWW/vb2tWq2m6XSqo6Mj1Wq1VHsSWQq/N+1E\n",
+       "t/BZHijzTA5u6DcPDGkn13Uf7deJ9gUfi/1eJWsBJ2URR9mAOgBAkRytukIvFosEOnB2gJp2u63h\n",
+       "cJgAyng8LkTmlUpF3W5XnU5H9Xo9gRMv4qxUKmo2m+doMB9Q/j88PNR8Ptd7772XFPj1119XpVLR\n",
+       "j/7oj6rVap1jI4j+YW98Mrlxc9Dlfed0JROW8+r1umq1WipO5RiM9sHBQUpB7e7upmO8FofnRdEA\n",
+       "gEyK2Fan1RlTV+qYMuK5pSU4WZXC4nz6bTwep/OugpQ5H5fLOOA47tF5oSuec45swpPuA7BBd570\n",
+       "LO5QpLMxxaB/61vf0o/8yI/o3r1758ahjBW8yDnH+z/pWdyOxGfxttBni8UyP17WRgciRJVlrN+T\n",
+       "2hgDDI6Nx7lee/uvCjApk7JncGePrXb7TP8zt3k2D0QAGpPJRNVqVVtbW0nvtra2ku12Z0ywhW5i\n",
+       "v6QlYI3sh7c7z3P1+33t7u7qzp076vV6Go1G2t3d1WuvvaY/+7M/03Q6PcfoxYJsrue/yySCOMTZ\n",
+       "FQcMkgoByNbWllqtllqtVlqEcHx8nOYzz4ofAQB6mguB8fB+cWDjwMnH1ot30XPaXMZ+wtjAiNVq\n",
+       "tQS6VsnarPmqCer/0wH+PYpdrVbVarWSg3MmAjbAacTt7W0dHBxoPB4rz3MNBgPl+VktCJG9MwMY\n",
+       "fgdHeb4szooUtg/u0dGRPvzwQ12/fr3Qtq997Wvq9/v63Oc+l5AmE9cLQF0hHAVHZfYJ6MoelcfT\n",
+       "IdKyGKler+v4+Dj9gIxZJZPnZ8VZAMHJZJKcGSxVZLicQncj5Mg6gjoHOzxPjGzoF66d52eV9JPJ\n",
+       "pJBvvQqyiv34KOITvuxaDhgwzBgmJj46EZ0wv8uKBS9qN846y7IEZmu1mkajkV599VX96Z/+qV54\n",
+       "4QWNx+NzLAXyUZ1tBFyrjnHdiM/qc4h+jaDM+9vBAde5KMXyUcbWxy2yZdgDTx84wL8K4kDK7ZMH\n",
+       "E94fMZCEiYYNKWOhOe/k5CQtSCAVlOd5Ykyk5Xh3u10tFot0DHaGa/mqFPwCz1Ov13Xjxg29++67\n",
+       "6vf76ZkePHigl19+WScnJ/r617+eGENJKYD1e5WB8LL+879dD8rAidtM2I56va5er5f+h0Vqt9vn\n",
+       "0usANoABLHNkEGMaKjInDsRoN230AFM6X/vJMVmWJQbfGaxVsjZwsspI8dBx6arTS16Y2uv1VKlU\n",
+       "CstxpSV9RwdDhTmDMhqNCoWwDBJ1DL6s2BXaC7OkInXGtZ3+ZnBarZZ+53d+R9vb27px40YadBSG\n",
+       "NsQIRFqmfWLNhRsCn3z+nSs9AmKdTqcaDoc6PDxUp9PRaDTScDhMFP3Ozk6KMukPnE+r1Up9FyMU\n",
+       "V2D6gHbHHG1kg5xyjODQQd14PNZwOLwy9SZl8iQW5Unn+AQuG/PIapQxIe6cy6K9J4m3oVar6fj4\n",
+       "OBW08dnJyYlee+013blzR7du3UqRZtRJb0vZfSKYiueW9eWqzz0d48EEn3n9lKfDykBVbIvfO0Z/\n",
+       "0dCXtdXHN9a5OO1+VdKVcT6uAiZxoYGk1M8EjNgRZ6W5hzu6xWKhg4ODlN7xAAfWhBWJsGHScixd\n",
+       "19BFZ2l4DtrMKh/sb7fb1f7+vl555RV98MEHOjo6KqT5ELdJDty2trZUqVRSAOX9dhkh0KDfZrNZ\n",
+       "WlgR9YVAsd1uK8uylLL3gBOGibmAzXCGJj6b+1//oZbRAX0ZuOY3qRxA6WX6YW1pnbLIAeXE+Xva\n",
+       "xsEJBbCgQtIxdLIPihubfr+vyWSiBw8eFFAvUTv0GPdm8HyZFuKpE58Ep6enGg6HiUrziKDVamk6\n",
+       "neoP//AP9VM/9VPn2JgITBBWDh0dHanRaKTalWgovD9daWjrycmJ6vV6YovI7boBBOgx2Q8PD9Ne\n",
+       "Lyg97WWyMOl9nBAin0j3emU99CMKHp2rK7IDwNFolIAg7MtVk4/D5jggpT8w5mV7zUhFJ4lOO+D1\n",
+       "65RR2xe11Z02UZh/x/UfPXqk3d3d0rlS1h/cNxbVMedie1a1z/UdXYxRnt8vgodY4M6xq+7nNmVV\n",
+       "SuxJUSHXdwaRz914XwVggkR2QzrvgMrASWREAa5ZliW75Owv18uys4J59AqbBLCsVCppXxIvJPW2\n",
+       "OcDzFIQX7WOXHWQ6ADo8PNQP//AP6/d+7/cKwTHX9lU93BeGoNlsqtPpaH9//9z2CNzHWTmENDwB\n",
+       "7uHh2bsGm81mYkAkpS0Y6NsI0PCVnkWgX13oE/ezPgdIoTN2+BS3KVy7bCybzWYqZ/Ag5KLAcq1L\n",
+       "iVehJx7cnbsPIP+jcKDySuVsVUpc0uQFkxQ8eWEQyBy2hOO5NgPEPWO+2qvOAREuXItamLfeekvv\n",
+       "vPOOXnrppcIyZ681QbIs02Qy0cHBQapYn06nunXr1rlVNyhEBGW+YinPc21tbaW86nA4lHRWdNbt\n",
+       "dgt0IaCIPCGrMlBO9nBptVrnDKwXvcYo3pUVwxLzm4yBF5pxzOnpqQaDQUoz4bivas3JR2FPykA7\n",
+       "fTmfz9P+ItFpAdiazWZhXwhnL8qcs9/Hj4/HLBYLNZtNjUajwjgxF/is0WjoO9/5jl599dW04V98\n",
+       "Nn9GB/+NRiPNhdimi8TBEPoSlyo6EPJVYJGd8H5dBS4uSvEgzPkI2Mv6N4I5b3ds/7olOmHXmfgc\n",
+       "HIPNpJ/ZyoDjDw8Pk+PmGH9e3wQNB+k2we8LK+sRvKTkVNEL199Op5PASWTJmUfb29u6fv267t+/\n",
+       "X7DRzqxH0Oo1H1tbW6XLoMv6F/vY6/XUarUkSe12W6PRSJ1OJ+ksAdpkMkmZBEAHARvp12azmZ6b\n",
+       "tK+3x5/BWVZp6fdIZVHbSb8yn/Cl/DgAAqh5H/sqrTJZqzWPhqfMCDnIkJSQNx2G8wbZbW1taW9v\n",
+       "L6FbV1KQGh2HAjlz4jUidKSnI/icamPoOtqJQfYVC6BZihdbrZa+8pWvpHbGCNcVnPMpaJrNZikF\n",
+       "s729nZzCRVEW4Obg4CBVd3e73QSMqtWz3Vy3t7fTzn/0qRfH8hyHh4eFfQdA0J7S8bFzOpDnYh8B\n",
+       "IgxQPUYBgAIThbHnc3ak9Wr8i4qrPilZxWRdFqCUHeOpEXbR9XtFiWm9+Pmqc5/UvhhdMlc85ZPn\n",
+       "uV566SUdHx8X5k+kvLlfTK3Ezy4q1uX7sr99eaWPCY7Ha9T8ntHJuETgclG7uGYZExjnt9uKSKl7\n",
+       "361TygJJt3s+z6MNkJbzE6aKbRxwjMfHxynY8Wt3Oh21Wi2dnp6mFIdUXDKLzYTNRTxlBiPMs/h4\n",
+       "++IDxNkZ2JMvfvGL+t3f/d0CsF0F5ofDoba2tpIPIvijPs4Z0LJgIcuytLqStpElaDabyR/AZE8m\n",
+       "k7RNBM/M3PQlvzGthb67/+Bzzxigf/gAgnEHNwQYgBDf0M6fId5/lVyNUNMEJXZnE42aU2mLxUKD\n",
+       "wUCj0Uiz2Swhyd3d3YQ6XZmOj48LkbbXr0Sa2Vf8OICIjIu0rDRnJ9ZoVFBQPj8+Ptaf/Mmf6Id/\n",
+       "+IcLhYw+mRBSOe+//356DlYE0RaYmbjXC0rHLrGj0UiDwUDtdlvHx8eJTen1enr++ecTc9JutxPQ\n",
+       "8sIqKEKvDi+bpB6JxsjW0f1wONTp6WlaLeVGmPF0YOaOmijJXwmwbomsR/xcejLl71EM53q06sAc\n",
+       "AbhBkT8JiFwkcQw5lw2x/BgvHsdYtdttPXr0SM8++2xiQlaxpW74ynS/DCz4/57Omk6nqQZqNBql\n",
+       "+3r/cB8cV7wHz8hcjP3ubeBzZ17KWBX01schGu04Xv78H3X8vhcSwQbithEpq2vydFWlUkl7Lrl9\n",
+       "9GthTyuVSmFvJt/XiPGmj5xhdJAEQKC9nuIAADjzTHs4D1A+GAz06quv6s0330w6X8ZM8kz7+/sp\n",
+       "JQ6r74XAnirx1D7PwHUbjYZ2d3dTkMozTadTHRwcJFs4HA4TewKj7PpJCkwqrsjxOci9OZ82tdvt\n",
+       "1D+AJHyu22KCXeyTL8xwpsTLA1bJWpcSl004qDaQ7Sr6KyqxVyk3m820vNTpYwbWV/I4OHFjxiAA\n",
+       "TkCMzgI4WvfNfvy+7mRQqkajoa2tLb3++uu6deuW+v1+ul803NKZcet0OnrqqafSUjqiDBRDWlJn\n",
+       "bEGPOFW3WCz04MEDnZycpM1wFouF7t27p1deeSXlKD3qJf+JEd3e3k60oufH3WH4+nX2hwE48R4d\n",
+       "H7/T09NEsUpKBV6+F4tPaqhPwIkX565TLkrp8PdFwEQq7o0AG8H5q4SUT5wzq0DRKsATgaY7UV/O\n",
+       "yLnOJjIWJycneumll/Tw4cNzqbYyRwujiLgD53tvY+wrImCCBQASbWw0Gjo6OirQ+R51e99Sl+Wp\n",
+       "hCjOEHrQMpvNEqiJ876MUYvpZ38mB5/rBiYuEUxI51dv8NsBXOxzfzaPvCWd00NAijN1HuC4TvC9\n",
+       "38tTOlzfA1Lvf+w97cGpSmfg9/nnn9fdu3cLjtl/JBVekwJrwfJZxhVg1Gq1CiUBrh/cd3t7OwVp\n",
+       "gJLpdKper1cAcWVsHs93cnKSGJdY08RvbDx6zSZwzI12u53OZ9556hI/DChxP8L96DPf22aVXLnt\n",
+       "66UiQHGH51GEG9o4URgkf3g6BJoVFgCwUMY2oJwMJk7RC2ERFMyLr/xYR7A+Ed9++2392I/9WFJo\n",
+       "ABGTxCu1O52OKpVK2s6Z1UZ8P5lMVK/XNR6P1ev1CuCD9h4cHKRnRrj3/v6+nn766TQGTq3TNz6x\n",
+       "3QBHQwwy5jsKcaFyffURE86NuqN1trEm1+lRASm2uNvvumQVY/Jxo98IMhyo+GcOImKUGKN6b5tT\n",
+       "u9L5FwF6VMw9ABOuV5JSpIkOHh8f69atW+eMbhlw8g3c+O3Ax9sICPD2NxqNwl4U7nRI/zEfvG/L\n",
+       "AIAHKPRFBEURKHoBL4yi93e0d6tW4bi9uqoSmWGeDefjDGZkFaRioSo/Dgax7+48qesjHYy4j6Bt\n",
+       "/oONQDexydPpNNXSRUDlLI37FnZFvn37tu7cuVNgDZwF85qmyWSSbC2+hPNgfTqdTgqsmSv1el3d\n",
+       "ble3b9/Ww4cP0xJpQMRisUjpbhdf0eR9DuD2eeNz3/2cAyfPKMzn81SM66wR/ppnp46x3W4XFq64\n",
+       "T8SGx/a7XLm0DsIDOzXkfzttRCdRiOObvDAIsTgny7ICzcY93Vj5oPG3G2RP/9Tr9UJxKYjfn4ff\n",
+       "LL2sVqv69re/rdu3b+vll19ONTAMphsBT62gVBhjVu7AZjBZAQNHR0caj8e6d+9eyr060oZmRbiH\n",
+       "52IdEKKYPE80PB7d+KQfj8eF/Uz8xVqknqD+nA2CeiUv7crtIPMqgJMyWQXGL5Pe4XykzJl6hBnv\n",
+       "W+YAy66HlLUHZ4tRpWao2+1qPB4n44ue0vann35a+/v72traKgCmMom0fAw+vO/cAWCsvQg49h9t\n",
+       "j8yaj4kDLP6OgRESGQ9vqwcnDq4cTHsA5e2MIOiqAZQYbTswkc4v+faatchaOePktRNeJ+Q6je45\n",
+       "W4E4YPe+5b5e4O/to/6j0WikGinpzC7B9jrIwOa88MILun//vg4ODs6x/FzfAwVYBHyXB7L4ljxf\n",
+       "7inVbDa1u7ur559/XoeHh2l5sLOZ9F2z2Ux+h89cnOHwANzrSdwX4GM4lwDR0zP0ny9p9g00fczo\n",
+       "ex8b/MOVBCceOV3EnjiSc5Tn1J2kROujBCA+SYU8FwrCeTi+sloFH7iYX3ZE7jQ6VB/sgkdSRAcY\n",
+       "yXa7ndIZ3/72t/VDP/RDkooRWKQ8QbTOklDVTVThWzqPRiPN53MdHR3p9ddfT0oDWHNWhWv6+MS/\n",
+       "HZzEfCbfuUGu1+upDZ5j9T5lzGjbYDAoGHtH+uRcPcp3pukqpHXKpAxQSE92Pm6I4rER2Di1G89d\n",
+       "1aYyBiMpQdUAAAAgAElEQVRKHAvGuFKpaDQapT6fz+cJqKCzTmdftE21t8FX7EjLZdHoBzaA4IDi\n",
+       "9gh+ot5GEH1Rf3hQ4Ncs+wyJKdnoqNwplPUvziPeOz7LuqQsRcX/rt8OJImSfb5zDvOYfndWzuc8\n",
+       "OuWpg9gfDkZivwFwPGrnOrPZLNUlwmjgNwicSMXQvsFgoCzL9OKLL6b3taGzrj/cx5lt6kHm87n2\n",
+       "9/fV7XZVrVbT3iWz2Uy9Xk97e3t68cUXdffuXXW73QKTT78yt2GkAQ8O+qL/igDK/Zr7VUBPzExE\n",
+       "NorrkIp33XYw6j7c5wHB9CpZ+4v/yiI7JKZAYuEZD45BJO/u1LMXQ8WqbCJul4gIIyXujhgDjBFm\n",
+       "mS+5716vp8FgUIg8fQK1221Np1O98847+vDDD7W7u5vaTsWztHQKTFhf3VKtni1T29nZSVvzHx8f\n",
+       "K89zPXr0SKenp/rGN76RlNHBCG1xqg0nAOJ1YIiB8NwvCsh5fAfz1W63NRgMCikemB6vIXHnw1h5\n",
+       "vtdRdqTLYc2uCnNykdO/bHrHDXd0WDGSLxO/x0XO5KJniMbeAwKPMom2+v1+Ai/V6tk+P+TLy+4f\n",
+       "GSUciUdfnqpxB0kRNE4h2hFsA33HRl2rntX7w+/v93XmA/12QB7lMixIZFw8TepA6KqwKBH8+f+u\n",
+       "n3yHfYm1fW4nqtVqAgkONKXivksXAcxoE/gd9Y3jvJ5jOBwWbJ37iUqlkuqouOZ8Ptf169eTrUfX\n",
+       "CL4Wi0XaJsHv529Pr1arqT5jOp2mgPrll19Ws9nU3bt3Cyt1XAf5jX8jpRkDFPwshcduR5xJIUMB\n",
+       "++gMPudFEBHBhjMiMDDebr837QJQrZK1bsLmNGyM5CJ1KC2LVBeLRVpKykP7Rl7QdSBSp8F8NYsb\n",
+       "D1dkAIpUpOgioHJkyvlON2NonXImpcNkbLVaGgwGeuONN/TZz362sKKHIk/qSxaLRYpUccSOoKG4\n",
+       "K5WK7t+/r4cPH+rtt99OBhXlYZK6YsGcOE3vRcAcByUZx8z3KaB+gD5sNpsaDAYFI+4Rtiu2KzXg\n",
+       "j/YRdaAL/HaEfhXE9Tu26bLARFrSomUOq0w8aoq6LRXBwJPa5OeQOiMf7oC+VqsVGJTZbKatrS0d\n",
+       "Hx+r0+no9PRUH374oW7cuJEcE2CWZ+FllDyDt6fMwWdZpt3dXR0cHBScSmy/Py+6VdZ/Hr0zTyI4\n",
+       "cFra+xsQ7f3vQUCZeFtjPU0ZEI39sA6JtvgiHYr97qwvgQnBCk5SWq6SJEjyKD8yBj4WHvn7/z5u\n",
+       "nOPpNlZl+UoZT7VUKmepaGwiQPzk5ESj0Uif//zn9eabb6bnunfvXmJ5YPPzfPmqFBy8B7oA7Ha7\n",
+       "rS9+8YvK81zD4TAxJqukVqulnWdJvWAn6QNP53hKB7DowTttJlB0hhq/6+Prfe/pe0/NegDt/hHm\n",
+       "ijm58hlXfvMJSYzkfEBQSJ8QGKL5fJ5e0+479oEevXN9MxxJBQcdGRE+j4Y8IvM4MSJFyXf8Pjk5\n",
+       "UafTObcklxc4feMb30jfZ1mmnZ0d9fv9VGtB5XSv11O32y0t4kUxxuOxPvzwQ925c6ewj4qkQkTo\n",
+       "tB3KNBgMEmJ3JgLwEAvSJKV0kjuvGDWBzmezWWFVldO49JWPlwMo/5GWS5yvyjJil4sAyqrPOM/1\n",
+       "CIYrpg1W3bNM3LhfxOqUtQ+DHR2In9/tdhMobbfbaVdh6PTnnnsuFW878OZ6MCtPYiCoc9nd3dWD\n",
+       "Bw9KgUlZH1FM6cxLdKb8dqAgFanwstSaf+cAIwIWF783ztCvWcYCXDXxKNiBoAc9/p33PfOdBQpc\n",
+       "z5lSaWmnfTxiH7s9iAFmBCnxtzPnvm0C1yWAOjk5KWzQSZp+b28v2avT09NUMEvw6YCYcwFlgB3p\n",
+       "jEF/6aWXCqlRB3VefO7MPas+T09PE+jzVZA8j+vofD5Xt9st2EzGIII0fGmWZYXNHdFZZ7kARowL\n",
+       "13f77cdh3y7S77W/ldjpprJjyj5zcACLQirFQQcTBcV3JOiV1g4wUEhvG2jPAYBPNraAZw8Vvuc8\n",
+       "tnBn4rVaLY1Go5RzbLVaevDggb7+9a+nDdB2d3fV6/XUbreTIsxmM127dk1bW1tqt9vq9XqSzsDI\n",
+       "1tZWymV+85vf1P3799Vut5OyQI26ktN/GODBYJB2t6W9zkr4JkYwI0xGJhrHepU2+xSwp8l4PE5M\n",
+       "CjseSsV9ZRws+q690pItisDEt4e+KrJKj1exKjGl4MeUGWHpyamiaLQv2+YyQx/vhaFZLBapQHZv\n",
+       "by/VGvV6veSEfI4DumDDKGr1wm8MPgWK3W5XzWZTH374YWqT0+QxWsaI+1b+0Un587pNkc7vRRJp\n",
+       "aY4p6zsvGr9IcHS0fVVNyrqZE6l8pViZDvtxCEtZ3f4QWbte+1g4MPE+8L7y+7ueRlDiwAldhZ3w\n",
+       "qJ40PIw2tvH09DTt8orNabVa6UWB7EUlKYEF7CXpTna13t7eTun8mzdv6vOf/7yOjo4KBcHurxys\n",
+       "xr+r1WoqpPUVb85wRh9G4O7jQ0Eyxx4fH6vRaGh7e7uQPpKWQI7+xwZ4MXwZ0wJj5u27qE5wrW8l\n",
+       "XmUsLzK2lUol0cV0pufb3LBG+imiT0eUfh6f8T8d65/TFpiAR48epTf7gmop2MMAAVLq9br6/X5a\n",
+       "CsY1j46OUq7ywYMHevDgQWIQyNEeHBykXRO3t7fT81+/fl2SdOfOHd2/f1/dbje10YEJEz72P0rG\n",
+       "ToP0qae3EGeh/D0PvnqJCnOcTa/X08HBgR49epQUlHvQTq7N2Hhxb3TMXggLEIzvi1i3lIGHyGJE\n",
+       "I48j9vyxg2K/9mVYkHj8ZQEN4gXftMVXnAAQK5WKHj16lPZtIFgYDoeFVCX39iDCgTBAHIDrugtw\n",
+       "R38jHc08jlE8+ua1BJ4CKBsjaclq+BzlnAhCaKuzQ2UMTexzZ6ViWkhaOqCLVjV8EhIZ7chWxb5z\n",
+       "ACcp7c+E7cLBe996MMkYx927nen2NFxkzOhDHyv0jF3EnSUhTeLMPLUg6OpoNNLOzo46nU56gSw6\n",
+       "OZvNUl0TOsdqUFI8BwcHGo/Hab+Rev3sbchHR0eF9jq45n/3QV6r4/0FUPEl0PSXrw7ylBrP6nOT\n",
+       "3ycnJxoMBtrZ2UnAif5kPBwwck3aHcGVA8wry5wgbpjjJEbhPXJgoOr1unZ3d1Wr1dIyWe9wp/Nw\n",
+       "bjhfN3w4uGh8+Jx2uLLQPo/W7969q8FgkJwpqSYcNxR2o9FIK3qgC3n3Qa/XS89AAZazP7Az77//\n",
+       "fkL2/X4/PQ/LbA8ODgrpK/rOWROcuhtxd4ZQmBRsuRIxoWNE7wW0gJc4eVDymHsFxERlZgLE8XGB\n",
+       "smd8r7qsmowOWkiHRAcaj7/oeh+1HWWO06Mfj+59LwV0r1ar6fr166kYG0cN5Y3RIjXDZmdOfTOv\n",
+       "/EVwXgzutDPHe+2VR8HofZkRjMEIDqqsb5yBuagfcTa+Gs1ZFq93KTPaZZ/Hvl+nRHbJ+5r/Oc7t\n",
+       "t5/PuOLcPOXs58OqYBcYW16p4e2QVKh3iixJDFCr1eUW8JVKJTlfzqemDmACi02qGyDSbDZ1fHys\n",
+       "nZ2dpMsAFrfZbGM/n8+1s7OTNjID9LRarXQe/YD+kGr3vsc2AERgaTxzEAGHVEwvwmDAeESw7+eR\n",
+       "joUB9Wu5T3Cd8Pussk+XWVl5JWpOLkrpeNrB0THROIM1Ho8L18IwudP1LXMXi0Vh+2R+s7wJRfbv\n",
+       "YtQonRklgAHtYldA0jYUUR0eHqpWq6U3THJtEDaK4G/JjHUfTPLBYJCQO9ElEUTsz7KoGwNAH52e\n",
+       "nqrX66VJwcukKpVKWjkU2Rb6wHO3cZyYaDBGe3t7CaBgtGNeP0YKMfqJUZIX+l5FuYgddFDi4kWZ\n",
+       "ZZFz2TVXOTc/J0bA3hb+xlnHdnlRrBt+xvDw8DBFkNKy4FBSMsLs3oxjYjwx1G64HRDx0kFJhcLa\n",
+       "OObxfwcyZRE+fRuZJSQypav6lmNX1YysoubLzr+qgm0pAx9Ska3wc/gO0Opg1HXJbbczJw5Q2fnb\n",
+       "ARu2OjpX7ulMMedi+6j/o8aCN8pLyxqn8Xic2Hr8xmg0Urvd1nA4TLuosq/U8fFx0mX07+HDh9rb\n",
+       "20uF4tPpVDdu3NBkMkkpTKkYEABMYD1gL7AJ7veYi7G2kj7gu7Ixc9uLuC6T3nLgF1nQMvvlvjKC\n",
+       "1jgPy+RK7hDrx0jFqI3vpOUab5TT9/Hw853aoqCTDkP5nSHxIs1IrcXrMYCgUBSY+1AIyq6trDBC\n",
+       "ydnvBDDjRaJOczabzYTqfe07hpsUEbvIes6btnt9DUoMWzOfz/XUU0+lGhcYJU/vuDEo22/AgUlZ\n",
+       "ZC6dpYF2dnbSpHIWqwykeiQR9aLsvldBLtJr5CL9jxP4spT+RY7T7xsdSAQpzlxR5MffjMd8Pk81\n",
+       "BLCDRJvHx8fJUPMZG0Wx/NLrBciBE7F2u920AohrA0x4TvonRuw8B6ADXY9sifetBygXjd2T+tf3\n",
+       "ybjseCDObn6ce3+vJQJS16OyzxH/HsaEwMf3hHL76nrI+fRrDMY8gCxzkFyHe5AC5Lvt7W09evSo\n",
+       "sI0COt7tdpPtARhMJpP0Zm4CLJ4DRoLXamAzAeXxeHSaa8d5Tjt4dtrNrq2NRkOHh4eFNCV9hs57\n",
+       "utT3j/Hr099ufwEWznhGm+TnRabQWZkyPWLMrhw4cbS2yhCUfe6Dx0N3Op0ETBx4+HVitL+KGfBc\n",
+       "XYww4yT0CB/jCZvB24aZeKenp5pMJglJ93q9lN6BOoQBAnF7kSnUHceDZllr74WEgCFHql5j48vE\n",
+       "PNed57meeeaZ1EcODKiDAD37mn3GwiPUaLwAN0TDtJH2eZ5eOq/ADtQ8Gohg9aoAlCcBE5eLnOFH\n",
+       "qTPw9GdkkWI/rWJMytpBv3e73VQsyPGMKztsbm1taTqdam9vTw8ePNDW1lZyJjgj0oaMv+95w28v\n",
+       "bGZextVKOKYItjjGI7dovMtSxfTLRWP3JNCJrn8UiYCwTJ7E2HwS4vMxMiPuhJ4UcErLvUwcmOBM\n",
+       "o47CcuCkfZmx1+hEhob7wS47WJCUmGI+w45mWZbS8u6U8zxP9YOj0Uj9fj8Fb7B9HvTyDNSxtNvt\n",
+       "xLAA9LHx+C/u7WlGZ9NY9gzYIPWOfXYg7rZdUtoXCB11QOW/y/rUt7BnjGKhuAf3PLsDTtcjZ7Iu\n",
+       "mk9r3SGWvy+KHpFIr/rDoTSkTzyq92ugiO6kWbZFDpJzy6Io/xsUy3UlJeWrVCoFxQe1eoFpp9PR\n",
+       "wcFBUhKuQ3SaZZlarVZSWgAF92S7eYw3IIt2xpwubfZUjkcs9XpdN2/eVKWyLKD1zY+IgJlI9B+5\n",
+       "T+7rrJKn43hOJrAj6viaAq7hNKRTvlCUPoGvGnsiXc75X/azy4g7Ywd68bplc6Psnm602JHY91Hw\n",
+       "czudjsbjcVrl8NRTT2kymaTjOA+WLIIHxg8g6rl2dNyXRbrBdMfouhVZIOYiALmsRiGCPBfXxY8C\n",
+       "Fvy6Zd+tEmc2r4I4o7cqOFh1jLS0Sxzrf2MvYhDoq0wcYPi13A54m3CcXvsmLW13v99P774hsHTW\n",
+       "jWCSth4fH2t3dzcxKg5GHJg6U0ib40v08DOkOZ3BJ/h1W0cKp9vtajKZ6ObNm2lxgfs7MgnOtrhP\n",
+       "i0xiBJ2eiikLCH1MnIkpAyc+Dv75YrHcr+vKMScurryeM3dlipM6Oi83RnyO4vi5rthScdc+BnQV\n",
+       "q4OCMtherYxzxjgTITIQAAVSM7Sh3++nzdV4LpSRiNKZCNDybDZLG8yRs5RUoBQpFAW00S9Ohzpw\n",
+       "6Pf7aWmyrzyAxYGy3traKuSMQfoRPfOdgzNH2/QvYxiBifc/Ch4NkhuIyBZcBYlRedn3Uvlqh3jM\n",
+       "k8Rzyu4sypzEk67rjpSxAhz4d6RtML5ZliWdGwwGBRaPa0GrMzdOT0/TCgNnyDD06KB/54aUom2u\n",
+       "T/uYG9S/wDgins7x/lgFTGJfP6nffNwvut5F3zmFv27mRDrPUMYoOR4XdTo6ObcV2DBnyNARt+Ow\n",
+       "LewT4sWZzoRxnAc12FRJqViVZb3UMXkAiW/wPakcsLhjH41GyQ9EIE/bKY7lM3SQ15BQaOsbEqLz\n",
+       "6DI+ZzKZ6M6dO2m7CPoWf+DbLPCiWOaAsx0+l7zvHdTgT+IGb5H5cAadcY/gB9DHWFw55kQqN45P\n",
+       "moD+MJFG9EIg74gIMDiOgZ/NZim/Tb2FVFQMJhMrZvJ8ufeGdAZ62u12KrCFVfD8M9flPQjSkmnx\n",
+       "iQWLsb29nZbfwWYAAlA8FJxzcBL0I5PXKVQMHudyzvb2dgIqcXI7kyIt30nkII22OzCK6R4mBYDJ\n",
+       "o5I4/vSxT4BoAHnusvqXqyBPmnzIk0BKvM5F142OzKPZeL/LCPrebrdTnRDX9b1tpGVQ4Pn3WKcC\n",
+       "YOUaRFC+2stX8qCnOHEAPufgvAHcvuSRa/imWFFgAr2fvEgWiYAt9vEqoyxdbNcu+i469XXKKtC8\n",
+       "irGMnzlQZpylYv0QOuLP6oGkM8ySUv0eQvDlRaWIA2VvB2kZbCvMntsdD5YJPvkcxh37SOEsjp5V\n",
+       "Pezc7bpCyh8gDQDHXvL3cDhMby5mKXS1WtV7772Xns/7jDlLX+ELR6NR2qoCJt99Zhwjntn3BJN0\n",
+       "jvWgn3xbB+ZkZGdYVu3nrZK1bsJWZpQR7+zIoETk5wgwrhpxY4rik3/DuVLkhLFzh+vsBfcmP8kx\n",
+       "AJ5ut5v+n06nqeDJ834MNNvv53me3lBcqVTS2zEpbEVhQdQs6WLlhLTcCt7TIEwWFNEn3WJxtmaf\n",
+       "5W/VajWxOBH88Vyj0Sg9U6fTSREAtTNuSJl4DkgiW+KTxkEef3uBWpZlhaVnPrb04WWBwDrko4KU\n",
+       "i/6O/0dAs4p5ig7uojZFJo/8uBcxSstISSpuWEa0yVyLbAS/iRTRHWfYMHCkDQGiq5yPAxOeHdbu\n",
+       "4OAgLRctEw9mmN+eNvQ2x7+9n1dJBDNl//t13J7F/ZvWLatYkzKWMDo7ByU+Z3H+XuDKddArB46S\n",
+       "ErssFdM53ja+A4QAJObzs51YDw4OUt+SivegFXDBWJAmRyepccQmHx0daXt7O92j0+no3r17un37\n",
+       "tqTlTso+T/EFgHgCxHgcz+8rO9l+wrePR2ByAFNem8jzMJ+kJfjzNqzyzz5OPqbMN77jGq7vHvyX\n",
+       "zWOXK8GcXIZFKUPTHq04nYdT90nhk4Gljy5eKOoTMHa8t9UroXd3d7W/v5/W4g+HQw2HwwQMKpVK\n",
+       "YkJms1kqlIKNYRBxuIAJIrvF4uz11ZPJpJAf5b0KPmkdXBEx+DMTvbIMjt1m2TAr9g1g7uTkJD2T\n",
+       "MyBMJp4hKqOnG8qYgXg8EzNuj8x5XCfWr1wF6tslPjPycYFU2XkOJi9ygKuu59fwaAn9yrIs0d4U\n",
+       "CqKPUjHF6uABnfS0ZbPZTICaSNJrqTqdzjnGArDCNXzpOfl2GEvmK20mMqVoHr33fsRIeqrzo4xN\n",
+       "1OmyICuOC32MREaG/igbq3VJBCDIRcA5SgwkY82Gz3G3Gc4Ee32a109wnrQEK86IwQB4vQV2BhBN\n",
+       "6poiWAJGQDOpCIJB7xsYPIpUt7e3dXR0pGvXrhXS+ZJS0Ij+NhqNtGzZ00gEtwSasB71el1f+cpX\n",
+       "Cr7OXwXgfVCtVtOKUPqY4NHbDyBxkOMAxD9jTDwQ8DQSx8Sx8jovr31cJWuvObms+ASWzqNjqfge\n",
+       "Af7ne1dEvvPqZ2m5ix4GxH/8etXq8i2a7hwZTF8JxL29AHc0GiUFJU/INsgc78W6FGxVq1U9fPgw\n",
+       "nVOr1Qq7DfqKCPoKpZ9MJundPc7gnJyc6KWXXtKNGze0WCxXUrix98nMe3RwLkx2B4kOXGKajGM8\n",
+       "okIwJvQV+9JgaLyv+Zxr+9heFYnMHvLnASurQMeqlIOfdxG48b99nKTiC+58fjgFj0Nx1gFwzByD\n",
+       "IcSQslvmfD5PdSc+rr5TbtRFj6K5J22nLVDisKGRoUA8beTR/GWkrP+ik/Bcvh8bGa0I+K4a6C4D\n",
+       "SB4suK2M33GuB3vSsnCeMfaaDw8U6UPArL9Kg3v5+EXWfDqdpqCPdna73WSjfFsIGAq/frPZTMzJ\n",
+       "0dGRut1ugUXMskxbW1uJbcnzs4JRtq5Hr7HnnItuVqvVVEDO/XkdCiuBTk9PC6uE6J+oY8wNB1je\n",
+       "3+6vmMdS0UZ7n8eUEcEqNoF7SCqAHnQ7gkjaGEFSlLWmdZ4k0ZnxmS9L5TOUyhEgiNdzaxxPkarn\n",
+       "p924cLwbLYwyaJQIbTabpSVdZR3uu6JS40I6qVarpRQOk4Tc4mQySe9zkM62t+cNlr46idoWVzzS\n",
+       "QJXK2S6I/X4/0aCAB87p9/uFCNwnNhMLNobPcABc0wuLnamJURH3ROgHJhy/OY7zPFqJxtrH/ipI\n",
+       "ZCSepO8XgZVVTvMiQOMGK9K1q9icVeLREzVKDgYd9PiSxvl8nhg+38tHUgF0Y8SZp1mWFXLavtTU\n",
+       "GQV36g6YYEwAUkTDHkFf5vk/Lrvl4NCDk7LUtB/jjCO/rwooQdxRrZprZdF2DBhdB9FTZ8M4x1N4\n",
+       "eZ6nFAVsHPrAvTy9g+33PVHYdZv3NO3t7Wk8Hms2myXWDp1DCLZYgXNycpKCRWqqnMVGz2u1WtrX\n",
+       "ylND/loHBxBcA+Z6OBymYI1AFtDCPiv4ntgXzvhwXR877ycPNPhfUiEdy+cOQtyX0E8OLl2HncXx\n",
+       "Y580v64cc+LG2HONvrTQaSgGzjvfd9tDKdyQSUpvNN7a2kosCPd3IEM7UEoUj/zlaDTSZDLRw4cP\n",
+       "U1vYbEdabjmM4Cyazaa2t7dTzcbOzk6qQwGhc42jo6N0HRArUaFTZIAVBx9Q8bQ7bnglLTezI/2E\n",
+       "0A++PDv++CSICk/7vT+YbL6XBeMYqWyPhnmmVVHRVTPkZRNv1YS8iFlZlapxwy0VV6gxHpHGXgWW\n",
+       "IgDieg5AB4NBum+lsqzLiECM/+fzedoskPomSYWiPwclTnsTSeJUInihnZ7W4xlhYnguj8ovAxbj\n",
+       "80Qm6knfl6Vo4jh6P3LN2P84m48DkL5X4kyGVEyroytebO/Pgi4CXklre2TPGNI3/uwU4XPt8Xhc\n",
+       "GNfI3viSWuwX86VWq6UFBzAdXA/QzDVhPL773e/qww8/TGDjpZdeSky7gwdPYfhnMB29Xk+DwSC9\n",
+       "c8fZM3SAII2XosKMw6awGg4d8XmIX4S98RoQZ+s8PeO2nLZEW+DpVOw540nw7eO4WCw0Ho+TL2Y8\n",
+       "8SOM50VyJcCJd4Qr+3g8LtQsRNoe6l9aTgBf5hqjO67Npmg46Nu3b6fB9UIg2sObd8fjsdrtdmFn\n",
+       "TElpTTsG1NMZPpnzPFe/30+vrH7mmWeS4X/77bfPpVycBfJ184AIPtva2kpLKh2hSkuAxhIz6gAA\n",
+       "B61WS71eL1GcPgYxveVOg+MYu0jX8n1kNHgmLzhD6HtnsRjDVqulra2tc3Qw4814XgVZ5VAu62i8\n",
+       "T70g8iLWJEayGC4MEQ47nkv/lZ3rKbSYI+Y6vuwR4+VV/b4HD0YaHZpMJmq324Xoy1NH/rlfP+qc\n",
+       "z3mYRNrBSjwPUlYBvstI7PdV59Ie/i7r5+iEow0k8Lgqeh2BWaTpoftd7zgPR+3Le+N4eGTO9+5g\n",
+       "2SyzVqvpvffeS9fzY/mbtDN2L8/zlKpZLJZbsnc6neQHsEfs6IrOHB8f6+7du2m7hfv37+uP//iP\n",
+       "9dprr6ler6etF2ifB7j1ej0x5dvb26mehQ0LY+EpOsGqzqOjI7Xb7UI9yuHhYSpQBwhhDwGFnoql\n",
+       "b5k3/rcHmnGOeHG6v7xzFUiNjDhz0Oc03w2HwwQIV8nawcmqaJJNa3BMdAAdSt7R99mIOXBfT01n\n",
+       "Yhhx0MPhUL1eLympOwOuNR6Pk5ITAbLja7VaTecDlDzSY2ICiNrttp555pn0Tp7r16/r2rVrmkwm\n",
+       "+s53vpNQLytnWKNOesXz5+QiDw8PUx945MizMglQVq4Jc0TKJ8/zlPd0gOMvIkRZMUDObnku08Gg\n",
+       "VCzQ4v84/u6AmQyMG5PQoxPuzTLuq2TEPyoQcXEn7RGNf+8GpUwwNN4nGPoYqbkzdEeBzsQVI+ga\n",
+       "bCJtxOiwm+y7776rF198UdevX9f+/n7STSJBzvWi2Pl8rna7XajLQhecKfE5DdChpspTIjgqvwfP\n",
+       "5IbYDbvLZYCLsyM+Vt6fMU3jUWaZHjgI59UA6xR3XogDAkCD70Lq5zmblWVZYkH4H/DiG08ylgBc\n",
+       "D1xY+cJxpLG9MJ9xrVQqaZ8plu26ffO+JcVMbcWNGzf01ltv6fbt23rjjTf03HPP6eHDh+r3+3r4\n",
+       "8KF6vV4BRJ2cnCQQ5Kzlzs6OHj16lEAMthpdRtc5j+fZ3d1N/cfxb775pnZ3dzUYDNTtdlMQQNvp\n",
+       "S57d7UkEc4xfzD4wPtw7BkfYBNh8AmBshjOoi8UilSLgQ1khe5GdvBLgRDpPZUMVoZDecZ5KIOXi\n",
+       "UT7OHHCQZVlSQOiuo6Oj9NnBwUFqA/uXgFIlJVTtkSPpHKebmSgOFKTlAKE8eZ7r1q1bunv3rp5+\n",
+       "+mm9++67eu655/T2228n2hBFopaFNtBPtVpNn/nMZ3R6eqqDg4OU+sG4O4iAYaIynPbwHhNyo3me\n",
+       "azAYaD4/2z3R33SJcXVQUubcpCUDwuf0mdfxOCDxFRmkthh3vvNxB7iyz8H+/r76/X4hPbdOicVf\n",
+       "UXj+mMZySjTWPMVjPOLw6BqDzrWjM3FnKZ1n9kif8T15dahmZ1PcKANWtra2dHx8rKefflrb29sa\n",
+       "DodpGSdOixefsVkVhpPrEyT0er1CatB1340gTs2fqdFopP0hKGCMYxMNLp99FHAZAUYZ4PAl2D5n\n",
+       "vJ8d4DN+6MBwOLwwwvwkxB0tvyOY4ofniCCS60hnzs8jZ9e76Fh5fk/N+EaRnEtRtYMeBzr4Ed+b\n",
+       "BL0m6CMAbLVa2tvbS6vLHjx4oPv376clvF/60pf0W7/1W3rxxRcLgB8g7Xat2Wzq61//evIXtVpN\n",
+       "O7tsnBgAACAASURBVDs7unbtWkGXnfEg6OY53WYSrPtKTEmp/ZGlcxbSg3fsjAeWUnFDRw9IPbD0\n",
+       "NB1tYMsBSeeuSWDMpnHb29sp6F0lawEnh4eH6Y29TmlJZw9OjhADy+C4USbXhXKhfOTDYDF8tUee\n",
+       "50nx/B6wGjgG1q+DtGkbqN5Xk3hBZ7PZTO1i0pAWIl+5u7uryWSip556Srdu3VKe5/rGN76hV199\n",
+       "NTEhGEicsG9RvL29reeee05PPfVU2iL84cOHOjg40DvvvJNYGi9clJQAGn06nU7V6XS0s7OT6l7o\n",
+       "P69TcSqdAkP6pSzKZMJzz0qlkgxGs9lMfQQAZCKgtFyHQjUHOCcnJ2l1Eud6cTFb+q9bYgSN4Mil\n",
+       "8++J8sjaDQmfx4nu7BjAZLFYLjnEgMT8sUf50SHyN4YLA+ibSmFAY5tY4k4kR8rxxRdf1B//8R/r\n",
+       "+eefT5sOOqMYgRjOodfr6eTkRKPRSLdu3Upvo+W5HESg4x7FM0/j/j7+3FzPo2g+9/SE9xvnlElZ\n",
+       "H0tKqV7GyW2e972L2x2PRNclOzs7CWhGUIJN9b6LKSv6nufyMWDuR+BAWkY606+jo6MEWEl90AYi\n",
+       "fVgWZ3F9/B1E4VjRl8VikcCyAxnY2Z2dnWSb3n777bQNA/bLyxB4Zkn65je/qdPTU/3ET/yE/t7f\n",
+       "+3u6detW6qNnn322MPeY217/B0hpNBr60z/907QCk5dy0q+k1bgOwV2/30/z2Df29BU39LkXtLo+\n",
+       "e/qH7/GdtN0XQdAmlj870Gb7C16HskrWAk4Gg4FOTk4K0bm0ZExgC6SlU3KEiZMEnKCY5Jw5l85j\n",
+       "kOmkavVsKTDvVvCqbpBirPp3qtKdPk7CC0zpcAykU8kffvihXnjhBd27d09PPfWUsizTF7/4xULa\n",
+       "hAnJczJxms2mrl27phdffFH37t3TM888oxs3bqT2fPDBBwkY0RaPEpm49OnNmzd148aNtINhr9cr\n",
+       "MBNeGAZoIZ3FVsxx1ZKnxqjNcYbLAYhTmp6qAhy6k6xWq0kvYIFY6QRrRL+tUzzyLUvXSOdXmrhE\n",
+       "x8k1/douXksiKekyRgrn65FPZAn8mk6FM6e4j7fHDSnXbrfbevrpp3Xnzh3t7e2pWj1b+s7qCF+m\n",
+       "7sCHe3o+/s0331Sj0UhzBeDuTtpXAEUn79vnOxPhgM/ZO4yug3f6M/a5j0MZAI2sGHOOKNiP9fnp\n",
+       "TjWCkcsyOd8rabVaqtfrOj4+PrdZI4GMs9e+ygqnzzi7HvEdNsOvQf/AbDiwdSAdGXW+YwxwwlKx\n",
+       "9kJa6rUDAvTy6OhI/X5fr7zyij744AM999xzOjk50Y0bN3Tnzh299tprGo1GhTe5+7Pmea7Dw0O9\n",
+       "++67unnzpn7zN39TjUZDOzs7SQ+w8bQNn0NJAv6KawLO8Ce+pBow4i+dBbQA5lh15N87MHFGPLKo\n",
+       "+AY+A0iyRQD3935mXBgTFkPwve+iHmUt4GQ4HCZFu379emHpFvtoOI3ra8Sl5YubcIiSEhrDsXGc\n",
+       "Kz3Rvnc+CuBK71G9O1sMPwrBdWLxEeyJ5y4p5vzggw/U7/d1eHiowWCQHOvt27fTLrXxWaWzAe71\n",
+       "enr55Zc1nU71kz/5k6pUKnrnnXdS5TdKAENBPzJhvU30zXA41OnpaaK9URb2MolAkLTLeDxOb+ck\n",
+       "5+n7VDDJPDVE7Q59DPihXUQibDaH4/IIObIJTAqe6SpJWR2B051OkyORSXSJDiqCFa4VDUMZMFnV\n",
+       "Vj/ewQ3O2hkEDBXOd3t7W2+88Yb29vaSsWcDtP39/TRHfG5xLwBvv9/XvXv31Gg09Nxzz+n+/ftp\n",
+       "jwneiuz9ik4Q1TGXmQ8+F2OqzOt6Yt+UgZgI5Lw/0EdPffnqOMCX0/dRygCPB2XrFBwPAaTXMuFU\n",
+       "HRB4wEefABKwCW5b6ZcsyxKIc9tdqVTSHh9E5d6HjD22wPXfN5ZE//J8udmls/bOumVZpsPDQ3U6\n",
+       "He3u7qZnPzk50ec+97kUMHmNIcEVYOrevXtpB1l05Pnnn9e3v/1tzefztPs2/s7BBH3sATzXp68J\n",
+       "ADkX2x/HzmsVve98paf7Bc6hPV73A6gA/JOF4NrOurhvl4rbgpQFWy5rASdEuFmWqdfrJVQuLfNm\n",
+       "vkTYKWyUeDKZJOcLymTSewEP/7M9MMp9eHhYeHcLRs4jQo9ypOWyYlJADDaoVVoCKUfznh7Jskxv\n",
+       "vPGG+v2+jo+PU3+88847kpTe7grFTd1JvX72cqi9vb20E+3W1pb+6I/+SC+++GIaZIpcSZ94eqTZ\n",
+       "bOro6KhQIU50QF9KS+p8PB4XwAypJgwudTA4Mq8bQNygwWp5HRB7A/AzHo/V6/VS9EKbvAiLZ/XU\n",
+       "0kepE/ikJLbJUxBScQlvrFPx1BDXKQM6fh83Ur40153mqjQGOhvb68CbucP/njqp1c5eJb+zs5Mi\n",
+       "OgD/cDgs1ELhgHz+MEcBQq1WS7u7u3r48GEynnEHWq878P7yfo4MqEfmHj171B6Bg+t0HBPEU20I\n",
+       "KVDf6BHH6mwWoCfeAwdVdu1PWtxGEpljRxz0uZ65TSA1ApChaDSu2PCUEA6+Xq/r2rVr6V4OSLEt\n",
+       "3D/+po85F7uCA+Y47DTPStuuX7+u8Xic3meDHt69ezc9J2CEZ6fG6tGjR9rd3dU777yja9eupdTP\n",
+       "7//+7+uFF17QYnFW18hqS1+4gH7GeU4hOS+ZrVQqhb+jfkYGhGt5RoD54+DOQYikQhDCcxIQYMPd\n",
+       "fpWlr2P9EZ+tkrWAE2coACpQtDj9GEWQi3bDyOez2UyPHj1KqM/BCdfFiI1Go7SMyZXcgQmgyGlt\n",
+       "lBCA4imjWENB2gRFc6qc9elHR0c6ODhIwGNnZyetNvDVOkQagK8HDx6o3W7r/fff1+3bt/XjP/7j\n",
+       "ev/991NOdjAYKMuyVOXtyyo9lZJlZ8W/g8EgLYEjvQMAo/LcJwoK6n0DuHQAwSTFMPlvV243OIwr\n",
+       "+wAgtN0NeJk+rTsvLxXTaDEi98/8WSJ7EqlnruHGir50sOZO39sirY5SympfPLWAcfJnwYFznLMD\n",
+       "fo0sy9JSSNKCFHsDdKncr1arOjo60tNPP63xeKxGo6G7d++eCy78uZi3MYVGP3hhtqcSor5EnXIw\n",
+       "EMeJzznfr+MADhDiu9cSnfLcTplHBsb70VMX6xJAgDtx1ynG3qNi9NidEscDcHwlJud7bU6tVtPe\n",
+       "3l4BTDrA9nbxP/3lNgv7AsPR6/VSG7HTjI8X2mIfSdfgY2Dy+v1+quPwFDXB3s7Ojj772c9qMBjo\n",
+       "+eefV7VaTanoz3/+8ymI4EdSAvn0G5/DzMCOHx8fazabpTovfAdsPMDcwTf+JAbf2GGvtyGAdH/o\n",
+       "LAzj5sCFQILxYX6UpSk9iC+TtYATBh7QgEL6qg06F4X0CIKHAmGChH0vD2db6CAKX0kr8T3O1pc6\n",
+       "+YRxx8jkigW7pKN8MnB9KN1a7WzXQFYKsawMNAy74bsK8pykTv7gD/5AL7/8csrns4x4f38/7emw\n",
+       "WJwVbXU6nbSMMoItwANMyt7eXlIi9nSRzvZQ2dnZKeRDmeBEyETFHhECcHxnV4CcGxlfukzbvDYF\n",
+       "J8bfkd5m/KKDX5d4e5yVkFYXyl4kXkgZNz6L1/I0hTs39Bew4QyUzxXO8+8jEHHn4REqlfcYSKem\n",
+       "qYHxqJe0j9PxN2/e1Lvvvps2BORzVv0Afn0Vnb9i3qM5jwi9H2BmGCdSiHGJpfdtGauBrAI6ztT4\n",
+       "MQ5guJ+nbiNIiX+vSzwFE/eNwcZKSzbTwYY7X5/XOEoK5r0ImFUcDnqd8fDI3NORzjL6XPE5OJ/P\n",
+       "UyCGnjrDg83BrhFwYl99bjx48EC7u7vpGjwLYz+fz/WZz3xG+/v7mk6XL4P9whe+oDzPE8AAiNE/\n",
+       "vAzQnw2bzbt4YJmcjeRZASj+PePEPRxs4acoMuY85pzbfknn5rKPNWPp9i8GWj4/VslawInvnueG\n",
+       "xYFJGXXqA056wEEOSND3+kCR3aFyHQAAhXh+X2lJZflqmZjPzLIsGVNpSdsSFUhKSNcpN/ZlQMko\n",
+       "ZKI/eDZPZQEa/uRP/kSdTke3bt1KTMP9+/dTbQsRG+DAl4L69UHGrIrgGTDcgDloWJxFrMZnHKEp\n",
+       "uf7JyUkyAgAsj064B32M+OR3xF3GZhHZew58neJRfFma6UnAxIEt/eJOCpr6IpqfsfHUgVO1fkx0\n",
+       "xDggDwrQZ8CGM13OdFYqZzsmV6vVtJLAa46cSYyMB/rNNuLsA1SpVBIAB+TTfoCGR2fohzNN6BI1\n",
+       "Aqy6q1arhSJqgIKPW4zI0X2eqQxExH6WlrUOOFokAs1o5CMYX5f4/MLmOiNEu2PahWfF/knLmkEc\n",
+       "PfVxbu+d0ZBUGNcY+EnLlEEMSr0WBpBB+3G0jCO+AYaClYGVytmGlzdv3iwEWKPRSN1uN6UrmXOw\n",
+       "4YzjdDrV7u6uJKXNMg8PD5MP8PS2z1X6LabK8jxPzA/PDcPTaDS0vb2d7uX1MzwrYzifzwt9IC33\n",
+       "BcLfuT1wEoH/GUfGlWs7uPG6k2q1mlhUGJpVsva3ErtSoFxMBI8u3EjCVETamS3pSWc4clxVVBYN\n",
+       "s1NdKLFHrtwrFvr48lnAENFfrKEhHzkYDJTnZxXdvV5P0nLrfSYTO2iiYL4Py3vvvZeQ9M2bN5Mj\n",
+       "kpZFuR6NlTl3BxEYhF6vlybmaDTS/fv3tb29XQAobiAc2HlkSyrMDZFTrCg3zoPr+P4X3tdetEVf\n",
+       "Y9x4tqsi0WHFtNSTGJRI9/s5MZ0jFd9p4oYsRiZlxsANl+sKgrF0YOtACbbOHQIggLdYR8AFa+JR\n",
+       "lxt01wFfAk/huzMS3NeBHM8VozYMa+w/N7re/3EMo5P2Yz1S538PqHxO+nX529Mg9DMOZN3g22l6\n",
+       "xt37SyruCM24eMqEMaNPsHXoj5/vTpPxhpGDVYhMatRtricVC5xdfxCCOWfZ8SXT6VR7e3uJvWZM\n",
+       "FotFes8PO2wDZggQfT76LtaAVH+1gz87eoHNi6k9mBJqFKln4d6wW9hfn9c8H3PV3xbOPKxUKikQ\n",
+       "8HStM1OrBEDjY4feeLqU41bJWsAJnc2EhGrG6WI8fJJiDCWd25QMFO+Rm6cPnC3BAHA/irToLCYV\n",
+       "9wdBOoKXllXgEZHSdugx2tjr9dK1BoOBjo+PE8KG/bh586Y6nY4ePnxYOBZAEukzwA8Tl36JhhUA\n",
+       "gALGaJBCVO/b7e1t7e3t6eDgQPP5PBXHQoU6TY0TAdy5UXDAhxFytgxWx8GJR0keaXp7eX6nES9C\n",
+       "4Z+UuBFELprIUSJbJJ1/s+0qwOKfScs3iK5ybPRbDAa4Btf2JbBu5AAtrEIBqABG/T1P0PEYdliQ\n",
+       "Xq9XqE3ytIu03IAMBsaZEGeDPKqkfRzvDp95QHvd4HqUGMfRDaz3v38fI0dnr6Rlca6PR9QNT/M4\n",
+       "UFo3g+JpLwcW0vll3P4bJ+6BCc/o6S4Hv4wNfcr40gb00VkWqViXJRXfVs33UnG3agIb9JR24qBP\n",
+       "T0+1t7eXtlngOo1GQ7dv39brr7+u/f39VPB6dHSkGzduqFo9qy1xANrpdDQYDBJL7bUd9BUCs+CB\n",
+       "M0wxesk2DrVaLe1Bc3BwUNjw0H0b+o3v9D5xPeWa3u/O6tCvzD/muzM+HhhH9tv16coxJzjLSL/i\n",
+       "nGIEIhVrD1xhMWieciCSosNw7K6UODnqPpgYbiS5L2jWQZTTrjAbOHDOZ4DzPE9AZDAY6IMPPijs\n",
+       "epjnuR49eqR79+7ps5/9rCQlULK9va379++nTa24P2iZ/SN8Z77FYlmvg1NwFoq+caBANIRytVqt\n",
+       "tEmb061MEKdtuT6GiB8fR0CTT1ZJ54wubfA+lnTu7zIHcVVkVdtWRQn+bBGARKcbrxmdm/c5feuG\n",
+       "J7anjKGJ90GnAKZHR0ep4I8NnlgOyXujYCJ4Jwg7DwOo0V/moVR8W3GsL0BnKVb06BtjylxyGpr/\n",
+       "eX6Oc3DtDnMVEPC8vDNXXM/ZFe8/Z8oceDqA9PtFVsyPW6fEqJ05T6rWAa7bRbcdnBtBh7O5UjHy\n",
+       "duZsNjtb/dRqtXR0dFQAks6GeK1RTEtij2EPSN/Qxvl8+cJKnmkwGBRqMQiMRqORXnvtNb3xxhtJ\n",
+       "927cuKFWq5U2t/SxJRUEsOd+tVotMYX0dSyYxm85AHAbycaLgBQfg8iWOwHA3OZ/dNnvKS2DEfoQ\n",
+       "8OH2nftFW+L1pD7O3HuVrAWc+EYukRKlvsNzmihZZFK8M1FAf5WzU74cB9BAIdjVj9xejJZQIpCu\n",
+       "TzofePLsvkS51Wqp1WppNBoltOr7L6BsTJZ3331X9Xo9FVgdHBwoz8+KpmiT/87zs132vEAQEMVq\n",
+       "IZ4dytInoeeH3TjPZrNEx+OQMA5EgyihOwJfigzoi5PAxwaaDxTN94A6Z4uYuF6z4LR9BD1XRdAh\n",
+       "f2Y+p71OPUcmQDq/gsMlRj5+fcakbHUF142fex9iqGOhYLVaTcvP2cyvWq3q3r176vf7Ojo6SjrJ\n",
+       "Pjq9Xi+9UG2xWKQ3rjLGtNXtwNbWVnp7baz5oo+IfD2v7oAOI+0sCc/moA1b43Pe+8aPpY9wys6y\n",
+       "uPGNQMZtEp9H4FlWa8GcW6fQZrd/6IKDQ0kFQMnYOVsCoAF0RLbUI3ie28fJAUbZnOA8Uv9ScYVX\n",
+       "s9ksFEKz1B0fwsaBW1tbGg6HevbZZzWdTgsgxdP0165d0/7+frKXnU4nvRIFPwbIcp8Vi2djKpvg\n",
+       "2hkKntltB8ujfUECfYY98eDE2RrYfx8vjvc+4/oRfNBeBzi0i/s7K+ZgO86XKGtjTtyxz+fzQtW2\n",
+       "VESKjpy9aphruJOD6pJUMEpc2wfG84uei445tYg2uS9g4OHDh2kSOfU4mUzU7XZ169YtvfHGG2mv\n",
+       "Ekn6zGc+k9iRbrebjPC9e/fS1sS1Wi1tXuXbM/tkd+fjL5Ki3UwI0jYYB/o3rohiZcVsNkvUom+l\n",
+       "7JMD5XblZFkdq3hwUp4C86JZ71s3Sp739PF0AyktAVkcs3VJdPKuz25M/bhoQCNo4btV0bl0nh5l\n",
+       "bDnGjRbihsIpeNJ3vlcJ+XY3thRwP3z4UJK0t7en09NTdTqdlDqpVCppTpIfJ5XnYBrwyfPgPHyT\n",
+       "LEmJCseAx1oOB9/SMmdf1keMh3/nDKC3hz6NEagzI+hxGfsbGZCY3vEINdbo+NxYl7hDon/c4fDs\n",
+       "/I/d9a0MfBx4Lq8n4hxsvbPP9CkLCLyuzZ2wdH7HX28XAIAl7thdtm9gkUKj0dD+/r4k6dGjR6rX\n",
+       "6wmQE6xRb+JgzVffUKOHTtRqNe3u7iaGnT4YjUYFfwg75G33ue86689JcI5fYe54Gh4g7zrqdjwC\n",
+       "brdBTg5gbxkzacnqe8rG/XScKzzXKlkLOHGnifHDwdCB0hJ9oYBS8eVkXiCJkSK941E4g+ab9XgE\n",
+       "JamwBS/3ccVgUBlY8ovNZjNVXAOQ+Pv09FT7+/vq9Xp6/vnndXh4qHq9rldffVX3798vOKlr166d\n",
+       "c2i8kA/Q4I45y7JElbfb7aSATFwHH4eHhwVFZsLwjBgC/uYdNlSTO4JfLBZpySh9xfcYLU/heH7S\n",
+       "iyDdALtyM76wWTESiCk9ZxWugpSxHU5rXnRsBB9lkWFkVFaJ1zfECB7x6IbrAVIYY/TGgTuAFkYE\n",
+       "VnBrayuttpGW0VG1Wk10tjMisB6ka6Ix9JUKDl4crMb+jfrggYszbG4cI5CINTrOfnkEHEEmx0WQ\n",
+       "HO/BuT4HYnqHc5wRWqe4vZCWO8A6ve/94cDP0+jOlKJTETjH1IuDE2dfsCWeUpCKK85YUFCr1dLL\n",
+       "Wre3twsBEuc1Gg0dHx+rXq8X3gR9+/Ztvf/++3r06FEBCACw0V1W4jA/RqNRWvlCehMg5PtgbW1t\n",
+       "JRYSVsmDLfeX0tJ/YbO5Hz6xWq0WXtsCWxkzD9gH37CR8cOHon8UIMcgC10ASPKeOXwRUsYyogOr\n",
+       "ZG37nPiD4vCgkF25XBkciNAhoFhp2bFujJwa9E3c3MgNh8M0CHQqiukGg1VC0lmn7uzsFKqc+Z3n\n",
+       "edrpFGaFHQ7v3r2rra0tLRaLtMxXUlJaUkHsporx95c8eRTi+wGs6ueYSqDGhueNNCAgBYDlG+/4\n",
+       "e3C8z1E6JqOn7ubzeWJAIgXuQJRaBCYxoIu2eWqOcUFHrgpzQt+5I+SZ6cMYNayKJDwalIqbfEXx\n",
+       "z9xISyqMb2QSYrqC6JT5546BOUg+3PPTpBiHw6GGw2GqLYFBAWiiP6Q7+S0pGVfvv/F4rH6/n95h\n",
+       "gpPziG1VSoZ2OTChH/jfdTCCOAdlZaxIHDdnbvyzOM5x/CJb48/gAGyd4u8gw+ETwDAenl5jLB18\n",
+       "8JuUr1TcFM/HCGE+sacN4JAxdb30FJCDKXSM4+PeSp1OJ62aIbj88MMPtbu7q62tLX3ta1/TrVu3\n",
+       "0rmw4pJSMMg29Owe3u1203j7bsjUB2IXqtWqhsNh6jMWaNB+T/14qs9tOyDX57mkBOzcVjtjg067\n",
+       "jvoSXx8Tgvsy4M1LCambgc1nVV7Ue59zF8lawInvM+Bpm0ajUaie9sLLsvfZkKP2yCvStC44CC+U\n",
+       "hZr2NeG+PTMKxKQAuaPcPMvJyUnaVZUUFe8LoYBrPB7rxo0b6e2tktJeEN1uN+Xnh8NhMug7Ozs6\n",
+       "Pj5OeVFpiWpB+L7k0tNhtElaGnAmOIjb2QsUDwTvyl6v19XpdAoOwDcwos/5ATgywbiH9yPiY0VE\n",
+       "Tr97FMwKEHcEPsGugrgRZkLzf5zYZcyJS3SAq6Jov04EM4ALqbgMNBoymDf0JdKwgHbmXHzeVqul\n",
+       "Dz74IL1SHjDqG3b5VvPT6TSlf3ye43iYn/V6PRXPkmZicyrmIYYvpgf4G/EAIj6jU/8OTOh3ZwEc\n",
+       "lHlfenTv4xL7mZo0HIuDHwf+Mehap9C/9A1stKf+vL0cTx9iE7Ar9JWkBGCRCMqjA/UFAdjCyWRy\n",
+       "Lk1HX1IDRSBHaob9R3hJJMCh1WolJvvOnTt69tlnNZ/PdXh4mBgVxo3/YZjRDYI66Txopm0wLfg8\n",
+       "Ns2MfYg9Rkcig0UfMReYK/Szv0gWYOh22hkVabnSxus8AdBxPJ3d8nHDtwBY6Qc+KwPpUdYCTqDZ\n",
+       "JKVcHyCFKB2D6KkWAIhU3OwKB+YdhtGMBt1pas7tdrvKsixVOns1P9dhMvlyL2n5ojxPGfkSNopT\n",
+       "JWl7ezvlOgEH0OC+pT4reaSzHVo93eXGAWPPM/veJExgULojXGdImDhcH6ViddBkMkm5WIq04hj6\n",
+       "WABkHGUTRftEig6AZ4IxidG8O3oU2yNZANdVkRgtSuep+yhlKZwYua86DvE+8VQEuhxpWfTMUznR\n",
+       "2fr3jJFveLhYLNKS806nk/LdUnEHVc5lHCeTSdqBk/s5a0nE65/hAHwjQHQJ48lz+1x3wwlL4cDN\n",
+       "wa6ncbxvPUjiHHeiDubKxgZg7sytAypnspxxuwriAQK1dLQTe1nGbkvFeitsss8NZ6h8IUQMeqRi\n",
+       "ioi0s9sQrjObzdLeH6xo9IAzyzJ1u109ePBAe3t7qlQq6R06bg9feeWVtIs2tXgAHXS92WwmsEKK\n",
+       "pl6vF5hfdM7rIt1e8a4hxNlhDyI9+HQd53t8J+3xNwAzv7wfuY6ngRijWMTq7XW2DIbTx9uZc/df\n",
+       "zNPoh8tkbe/WibQpoABE7lQW7ABRB+I5LxQZWhjUCPJkwJzKRcG9AIlIgJUmGHTPlznNCqhibxBW\n",
+       "6zDwi8WiMNC0FWfNJGYwKVyl2Orhw4fpGWBjuHae5+lzvx4/kbFwSjwiWJTIlYX7zefz1E84E4/W\n",
+       "PYqEzkVR6TeiXQdGPqG4BnsDuPPwqAFGiWdkEse6gXUJho+2EaWUAQOpCABo/yomxK/v/3Ms14hR\n",
+       "KDpW5ugwth7N+zWYCzgg3/PEi/Yo4KZNkWL3dgJoJKW5yT1dt5zFZM44W+JGjzw+OuL9A1PjdW2S\n",
+       "ClF8ZEK8HoR2O5DzovR4jKfDfIz82Jju4V5loOgqiI8Pv9FlZ00iw+ERt4N0T62hz85QY68BzFyH\n",
+       "H+wS4IAxZgxarZYGg0EK7tBdauV4v40Hj9h1d8osB+Y3vsB1GBsPoOR1Hw64uJ7Pf+pQXNccIPtq\n",
+       "Jvc9jIEHKwjLo6lpwUc4+0eb+d83yvQNNj3odFvk4A0ggg13sOl2niADPbiMfq91h1iPpjwv5XlE\n",
+       "R7t0nDtTouroYOkofz03n3M9nL60fK00gMVTFgy4p3VgGRaLRTKwvmLA8+DVajVRbTgBR6m+Ex/f\n",
+       "ocikijAAfn8iOc7BaDD4kcZG+RAHiLSZ+9Bfk8kk9QfGn70saI9PHNrRbDYLbxyFvvT35nik6Xl8\n",
+       "3/DLz3fnTQTtOdSrIPSDRxXO8pQdy3HovQMN71eu5UbGgbK0HGscLN+5YXO2yYtmnVlwY0X0NZ/P\n",
+       "NRgMUlQGeJWUisK97RhmB2c+T7y93g/+bMxXH3//m2LY0WhUqC1xI++6Tduk4kZRzsgSLETG1QOT\n",
+       "uKLHGVVPzfhY8T8GOqaEfJykYsooXueTllj7h02i/7yo3leHEPU7uyGp8L4lAlNsrgNP9MVX7bju\n",
+       "uG/w8SY9E+0S7aDGER3FefIc6CO6AJvr15OKWxugq7GepowlcJCBfnuQQiDpab+YHo6AnTQ/fQNo\n",
+       "gBViVZBvN+/Pw/3cV8KEO1sjLQNKxsrnegw0fK56DSHtXKlzH19dP754wV2WZYnmipMRxZSWyL0s\n",
+       "QqZjSItADeK8iJDcOLkT8S2Hm81mSrG4gfFIwRkRZx74n8EkDeWokgGMlcuR+WEQXRH9f8Rpc56R\n",
+       "6Nap1LhduAM5Z1K4FudT4EU/kB/mfUKeanOnkGXFF1kxfl5T5BEWz04/k7LCAMUlpYwN11238Ubc\n",
+       "kESn5SmasvZG50xfer846EXnoqDzzB1pmTLhezc+buydHWCZZdQbfmq1mvr9fgGUuPPl/1V95IDa\n",
+       "v3M99M/cGdIGdBDw7xGbp3hoB/dCP7FF3g5/YWUE3/7b03T8HR1QZFcisKKdPiel4ovVvN3raEGS\n",
+       "pgAAIABJREFUEuYfOoWjY+7hFLFxMaXIc7vN8fPdPsLUuf1H1+NCBwIplu3CqMR30jhw4l7MHfaK\n",
+       "4jgKZrvdbqon9OJenyN5nqe0oxfs4icc8MegDNvmPs1ZE0mpXoQ5SDtInTqL5Wwec6GMnfZaEvqF\n",
+       "a/OsnONMErruQNPtugP7arV6Ljhl7Dz4vojtXtsmbBhQImqMDBMZA+FpgIi+fVAADCBmIj6AB6DF\n",
+       "Iylyhp4CYrAoZGXwcdbSkipHwZm4IFSWXcEuOP3pyuD35TiUAQVjsnqE6cyPVKTOPZJ2cFWpVAoT\n",
+       "FmTNMzu4AszQZnKqCCkj+seNsOeJiaAcvHj6KU5OnsmL2zyCANjE/iPHexUAikfgGBFfpRP7wh2c\n",
+       "gxY3ZO6YHKwg8RjG1j/HaDImGChPM3r6h4jJ2+eGEPBf1k7GmPb6/PXjI0j36MxXDXCver2eVo24\n",
+       "rrdarcTuuX57ZB3bGZkVdC8CyAgm/VoYbx8bd8gRUHgwJC1XEjEHGLtVY79OcWYPXUJfcHxeS8PY\n",
+       "8DzuyLxQlN9uw6NDo3+4L6llUjXMCZhexJkIB03YUk89S8tdrLkvb8L2FSfoFHtKAQDKarK8VlIq\n",
+       "roZ0fVsF5F2/ou9zJsn/pw95fxvPCHhykIjQj/ge/57r8lzYA/wm53JdHyufzx50xA1IV8naCmLd\n",
+       "ueR5nhz6YrFIqNEpTwwSkmVZWmeP0vmD+rIwruP0oOfGJKVrcV+Mna8SKGMayiIbb6+DBaedI2vi\n",
+       "LIykhEZjJbz3gVNmOATahIPneXgmz5W6o/eoBkq7Uqmk1RL9fr/w9s08z1OKhvMYPxQYw4XS4yic\n",
+       "uYpLAgeDQRpfn/yc646ZtkoqGPh1igM1npHCT09XuQNy9sJZCeYGn3sfl9GhTqdKq2sjPJqi3zyF\n",
+       "Kp1/10aZvsc2u+PiHjgPJBpH12HqB4h8eU5nZnxHWHSGueoAnWvHokwXn2+AaS+gj88YgYrn8f0Z\n",
+       "GSPvQ9oQmRhnsSLLEHVlnRJTVWyoh91y5xMdkqTCSkJ0xAE0feHpIbf5zt5Jy8JQCmIJUNyeuZ9B\n",
+       "fBWig3P+Z+yxvXxPigib5n4gjqvrBXMqgk/XG19aTTvoczZDlIqr75xB4rquv6RiB4PBOcaDWjHv\n",
+       "a+y2tFzN5Asg0EkCU56XdlDX4sGur4p1RpB+ATCukrUtJfbohY4hx+1gwzsFhfV8l0d50nJDGAwc\n",
+       "gpIy4DG/V6vVNBgMCq95j4Yalod2OdvAPaRiLUe8B4ZVWg44QMKRsCNyd9DOyHBPJgvHMbH8PlzX\n",
+       "t12ODs8/Y4L5tsVUmvNMTh/6TpCMQzQuDv5wJEQlzgShA05h8uxMTO9n2uxGaF0SGQSPNBy4SOVv\n",
+       "UeUa8Vox4nc63A2Ui4N/9IiIDgDh0Zm329uBOEvAvQFA7jSkJSiBAfHAwJ895viZNwASoiyKILk3\n",
+       "x0Clk250IOXsm7fN54Pbm1g0uEqnIliIgQnX8HP9uh69u464M4y6sm7g7eOEbQag+J5Q7vwdHAOc\n",
+       "eU7pvL3kPogHMP45fzsTjaDPnlrxrd1h5Pk/6hU/vgmhrxQ8OTnR9vZ2wT5LZ2kuavF8rLmH2zi3\n",
+       "afV6Xd1uV4PBIPlFr3dy4O9MC3rt98MudDqdtGqIOj8YqzhPOY/+5B7uv7DVkgrZDQelPA/j6syS\n",
+       "B7xcz9N1q2Qt4ITB9j0TcD7tdjtF6OwqGY2Eb5kund811iMPp3opPHUWxB0dvwEwoMR6vZ4oMe5N\n",
+       "u9iYDeOGOHCJkZgrlztekCfRo2+6xoTjuVFEEDxGnEI1p1g5hr5zytqNdyw8zPOzzeQePnyoTqeT\n",
+       "HAHH0g6MPffxqJjP3KA46IvgSzqLWlBu0mZs1OW0phv46EjXJZHWjnSrR/YxdSAVNyh0NjBOYhya\n",
+       "j188zh1bo9FI+yq4XnhaLTpWxo/reg0Q18Wh8wzM6bgB0//X3rsst5UkWbsLAEmRBAHwJqWUWdWV\n",
+       "1tWTfv8n6KfoQVn34M+bUuIFd1IkAfwDnC/2t0PMLLNj5xQ0QJjJKInA3nHxcF++3MPDFSedP2SA\n",
+       "ZS/aawrz1O/3SxzbwGiz2bRqqyRt4MdaeK5gpkxPO/ZfP8PsksHFH7ErfrcdjtqLtJPh9ah13q6B\n",
+       "NwYeNhp9C8NVMyteU+bfOjtpwLMBODqM37umTdIYa2TIR2UpZlY7XDyXd3gPAhD4LH848eIj8bDZ\n",
+       "DlkQ0mB+fHmlHVL67nckKaXsOfbc7XZbzqVrkliOXhunGWhOoM5ms9Lvbre5kbjeJ3X4iDVljMwX\n",
+       "JIDzfBySS1JspfNvWCPbSDM/r7WdaHQvMoM3q1CfisEzB3ERTsAAsxHIjUAQ2TAUtzGFZMPJBJ2d\n",
+       "nZX8l36/Xyoh2vMyg2H2wErKeSwgYRKr8PD4ztPTU6mySAljPkfNEo4L2xOmP4yXOeBGTeaUeaDP\n",
+       "Dw8PBRB5bGYzrGxfXl5KP3w6iWqIZk34bg06WHMSz9wvmBfmAaXHJgNUoqyRAZR4zYLtutVMRu35\n",
+       "Whnz7yRfzXud52HPmTnCGwGovDZ+5Nx1CFCeNCs++vAa4PNaJm2AlDT33hj0o+A8J8iKn2slxak2\n",
+       "Pn9yclL2AUmAlrsk5Qh+rXx5fu1ho18Mcl+rgHl8fNyioL0W9kLt3Lj58wbodlK8tn6WDem3AL7R\n",
+       "WYBCQjM4R97vSZPDAYiBMcDJS/KV0TXQdTi4Ds8kzUlHDOlisSg6CSDFu10MkD2BXMDq8Zk6x8S2\n",
+       "ijASjjQ2BEMNMOD72AuD76enpzw8PBRZTtp6AXC22Wwyn89LH2zMnVeIDPH7N2/elL1hvXx0dJTR\n",
+       "aFTGAJAycDR7yPqxzn/E0hpw+N9mtW0b2aP/TK53VueEY08gK9e3cF0SAAAG6+npKdPpNOv1ulQs\n",
+       "ZaPUNSUwZISLzs7OSpYzjAqTzh+U1dHRUTmJwIQiyPZoLXQII5vYpdwpve1iQNQyeXl5KQAFRoLv\n",
+       "wcrwHkJOvIt5dFIXmw+BhP1J0nqH45ZG1QgUz5xOpzk/P28J2ePjY2azWZ6enjIcDnN+fp71et1K\n",
+       "nvXcYGS73W7rWB7CbwOCEUC54MXYCyGc5Qx5NvUuG2trg1MbLCsuFL2ZBeTc+TU1yIGVIV7Nv2tF\n",
+       "mDSxdeYfZez+ITNWenVYhL/zDgMiG56kfW8PFZOdkGgDTR/t/dkTwxDQDLapMkuf2Nf2DP13jwnZ\n",
+       "r6lz5srsJ+NzSJVx+99mof6o1QCIPtWsgvfOrpmTJEVP2dAAdCkmSbVVnEFARb/fLwwo+x02D6BS\n",
+       "s77r9bqUnTfbAEuctMOIhDCS9rUFNfBFv6NLkS2D6i9fvpRrRpbLZa6uropM8hnkhPpbNfOHTXO9\n",
+       "KOTW4aSk7dSwp7vdbgHHrgdk1sT7BjmCnTw5Ocl8Pi+OLxXHT05OMplMcnNz05qzpH3akJAnDBU6\n",
+       "hvA+TiLfc80YrrBIGt3FvDCPh4eHLSepbjsDJ574ZCsYGNnxeJzNZpPLy8si3NQxAG1x3Hc0GhXg\n",
+       "YqDB0baHh4c8Pj6Wd/GThQbM2Iv58uVLKXnsDYbi4nsg7ZeXpvy9lQiZ4wcHB+XoHUqOI7oGFe6P\n",
+       "6WcMCieGzs7OWhRqTf8yBhQlmdgYDJ6JoNTMkNfEXiVzBNC7u7vLr7/+mul0mm53W3wIBcM4UCIG\n",
+       "WoAK1gsFhDJxnxxSY3xsRjaRQeyu2x9R+vzbhgZAggwhn8yhPXkrINbQ+Sz1SYQ6xGAlyl4x2OTz\n",
+       "gBTLup9Jf/27mtIF9PtZHrdpaOaFOXD+FM9Ejvm+HRFocVdqNjjmfYy1VubIE+927ke32xydd3jC\n",
+       "xpN1dI6D2SSzZx6rw2ZmpAA2NnQY7F02+mWnxmDu5OSk6EHmnRwHHLGknXsDSLCjuNlsL5CELbPx\n",
+       "s2fPHwz+yclJzs/P8+nTp9Yx26Rx2JIU55c5RzfiGCFbHCN+fn4upzp7vV4rfGjGwCyecy+QRd/F\n",
+       "gzx6DyDvPAP9bfmAYa4TcutIgUMosNMAlZOTk1xdXSVJZrNZqVtkB9IOfpLC0iKP3l/IA7YQW8X8\n",
+       "AF49X0RLPFevtZ1whWx+aHl7cSic8XhcBskC4SENh8MMh8PWEd7VqrlLBmXJxXs8D8UEkkya0uos\n",
+       "MKeGxuNxJpNJoQWTtJAifcWzB+myeCjPpIkr1hvcFFfSgAH6b5qTRGDTlXjK9rIAPwgtxeWMuu2h\n",
+       "ooTdN97NZ0H7SQozcnx8nLdv36bb7eb29jY3Nzet9XXIis2MgDKPfKauycL6cImc69DYUHhc9jx2\n",
+       "2ezt160GkihaU7t16IJmNgQ5drGnOixDcwgCI85nbPBQTO67jSR9wuO0MeY7PplVZ+GbvXN+AuNE\n",
+       "OdrD5DPIH33BmD08PJS9S9+d7O3QsD3vmvVM2iEpzykOk/NsalYEQGSWqw4duBmk1YCtDg/VRmtX\n",
+       "DX2RNKG6pM2wOVmefY18T6fT3N3dFbYFAzUYDFqsno3ja/KNgfZckWuC00Qfa5as291eVUKpCYdf\n",
+       "er1eOQxh9psTm85XxBjzbOtwO5zot6QNPM0O8sz6WgSveT0OO5BmL+28UwWXiwTZU58/f87t7W26\n",
+       "3W2pfYAZc227UTvE2Ft0sKMF7NE3b95kMBgUAGXAn3wdOv0zhnFngcw6KZJNDyqHQkxSyqZbiK+v\n",
+       "r3N+ft6iwyeTSSsfBG+Iz2AUvdEx+izq8fFxCTsQwwQUWPGv19v7BObzeau0PIvFcSwotjphlf/D\n",
+       "IDmkwR/6zhxxN06tBJlPPsdcoezt7dGshO1N0hBmfgIUHh4eSp8uLi7y17/+Nf1+P9PpNLPZLN1u\n",
+       "t1C3vAcgAohbLBaZz+ctRsreODHT1WpVnlsjeoOUbwWYJG1vvQYMhAbxLpxMZm8cpW5g6rwaDJYr\n",
+       "nFo50uqN79CQmZmaQUNZ0+ekDRAwBniT3W63dd2D9xNj94kL9oUNM0oXAFMfc6bPprMJlRq48HuP\n",
+       "gfnzvvfnaGYjn56ecnp6WpISTbVbZ/Fun2pjvgzI6+e7ua9189h22XxhqHNBki04oKYUexjG2vkF\n",
+       "DpWdnp7m6uqqsBKsPaULADfowqRhnWqQjy6mUQeFPplhYAzIO8zYbDYrdznhdD48POTi4qKAT/ar\n",
+       "QxHdbrfkvbhMPEwQDig2xHuXk0113pMdSjMr7MWkCakh8wYz1jvuN3N/f3+fp6enDAaDXF9fp9PZ\n",
+       "5nWxh60XsFcHBwctsJM0ABBnEr3f6XQK6ERecLitr+wcvNZ2FtbxZgd0MLmj0agMCEOOUJHwZPT4\n",
+       "/Pyc33//PUnyl7/8pQgjSt7xTzMHbAh7qfwffSIhyRUQnaTFIq7X67LR7DX5hMFkMslqtc1AZzNZ\n",
+       "uSKwPMuCwTw4PJOkZexq0IJyt7CZ+UFIeL49EtOJvOf5+bnUSkHw/vrXv+bx8TH39/f56aefihJa\n",
+       "LBaFnnW+CRubSwl5tt+TbC88PDo6yu3tbbkIsd/vFwWDwtq1R1m31zxrxs66oBBZC5SUq7rWBjZp\n",
+       "19XguXg4VtoYdCt1+gZFmzQlyfnsa7kkSTs0MZ/Pi3d5cHDQOiHDM90Ps4T0l/+jnw41GiyjHLlo\n",
+       "jZMh9M2gFgVt1oI5qvd8zSYxDu+Hbreb2WyWwWBQQryLxSL9fr8YIxwI1sWAlHf/Eagw2GI9DF7M\n",
+       "JDhMtMtWM3cGl7PZrMiFQ8BJw1Cx9uSokMxeh/3MRtghxQ54jxAaShoGi/fBwjHXAN+kAcuup4Ms\n",
+       "wcqNRqOiE2FPfEKF77BnDIScFIr8O1xq+4EsJU3Yh/6anWD+HRZhHSzzjCFpcnHMIn769CmHh4cZ\n",
+       "jUa5vLwsfSbBl0rrOBrL5bLMk2WA9R0MBuWCWIAn+8x2nLlCJjzu19pOwAkCtlqtChVYK1KEyqEJ\n",
+       "FIizt1erVUHonHCBirWyQchNpTlWzqQjYBYSUDXvBlUT22SSqVdC4hdCMhwOC1PAM8zusMFchZZn\n",
+       "gtRr78tGi88yHtOKjq0naWWkW9hqmpaGEAKA5vN568TN8fFxfvzxxyTbGOZvv/1WhPPp6SlXV1dl\n",
+       "7lgLCgsx90bi3FA9GAwKQMMLYS7qPiavF9naRbNhdm6Bc4HwxGmAUtbfe8EsB+vld7mQFYaMdXdy\n",
+       "LH0hodEx8qShq53Ma08naY5v813H6tmL7FPT2O43cg9I4nkoXCrAoszYh0lal8CZ2fFeZfxmEpN2\n",
+       "PlXNsKArzMowXupXAJABSxzzr/UHCYZepzpU5PwiJxE7pOPvfwtyzdpyoZ4Ba7fbLbkmBlroHdab\n",
+       "BEiMf5KiQ2t2CF1qjxsmPGlYSEINOIVJU5CMPpo5MHimn91uN6PRqOQNPjw8lMv7aNPptLAB6Fdk\n",
+       "DPkzqERODLjq/WkHgn4jxzWg9sknjDzA2HmQvMtrQRgLFufNmzf59ddf8/LykvPz8/T7/cJU4SCZ\n",
+       "mWL8rjDr8Azrt1gs8ubNm8I+Yb9xYLBjzJEdhdfaTuA4tzGCtGoKuNfbZjo7KxvlhXCs1+tyKR1J\n",
+       "Pu/fv0+SknvCZUcusgMrYsYBAwLNt9lsyoLybgAQzInZGIQNYZzNZkX4QJyM8+joqJzdrz1eK05A\n",
+       "EoKIYDAn3vw1GjVt6dAAdKLDIg7dAFIQchtH/r1arXJ/f19O6qzX29ye//iP/8j19XW+fPlSlBdM\n",
+       "F54mG2g0GuXs7CzHx8cl1MNczmazkjeEkYNFcSjQuSrIyLcQ2sHjAVBZGRlwJmltUN/fYUVDjk7y\n",
+       "tdFiTu1d83dTxQ578T4rfZqVrZWjARaghH2C0UE2AVo29PboanbPgAJgYuVlT5HfMZeAAffdgAej\n",
+       "RJ/dH36SH2adYG8Vuv7s7KzsJxLsWSMrWIdoaWZJvL/QSzUL6BDft8CYJE3VUMoBMJ66fAMyzFyy\n",
+       "/y2PvV4v9/f3+eWXX3J7e5vk6zouHP81m5akZS+QQ/QXIWxkG5aGfpMAagCF/uUE6GazKZdYbjbb\n",
+       "2inoc4fhsQ3ezzgd3tPoXN5lwG9GPPn6vieHzniX9zfyyNzwfOsX7AN2z3P422+/5fPnz8WJAUSc\n",
+       "np629kXNShoE+bb5WrckjY5CHmx32PN/1HZ2K3Gvty2ty7XWGHbH1RgwE85mcAiDCbq+vs7T01MW\n",
+       "i0VeXl5ydnZWkPZ6vS7I3hQpiogFgNZKUsI4fM4erD176i8YEYI+UWCgd55PbNO0lgEAfQDcoJyd\n",
+       "L2Mq0nkbPMv0m2lNCzBKmz+vGXcAnecBpeDEsOFwmL///e/5+eefi5fJBqX5JAZCbqrdiL1mHmoP\n",
+       "OGkbU/q362aAmTS5B2xanyZB0bB2zrFgLK5uiaeE3LmxT6wETT/72TXbQvPvDRhNufNsji9D+fp7\n",
+       "zIPZSwClT9kAYpAlxgFwZ+/WHiJePPLCPrOSRkH6Ejnvj6R9/xF99r6xgl0ulzk7OyuyDQh7zcu0\n",
+       "o4EMADj4WXuRXgMzJX8UFtpFA5TZaQOU1Dk26BvkdrPZFF3A3HAP2nA4LMma7O/Dw8PixLK2lll0\n",
+       "GBVfsQvIDwYedp71n06nJXcCGYLNRM8AqqbTabEjgAYzNYzV7DaOJb/DtrA3vObsK8t2p7OtVYVM\n",
+       "ERKCkQNsmY1L2owke5F9jk0F1HBSdL1e5+PHj6VvsOlmZer5d4jGa4F981w6fIxMOILxz8D3zi7+\n",
+       "464RkBMUEAYbwQHJUonVnv/5+Xnm83kR5sPDw3z8+LHcJ3BxcVEW/e7uLsvlssU6bDabkuhFVna/\n",
+       "3y9on4kjsxtFCJq+vr7O5eVlxuNxQcmENJ6fn7NYLAoLZAqPvBkLq8M7pp0RRv6OssJztZCYQTGI\n",
+       "MVXufBo2kGPHfJ9/0z/Ago3F4+NjBoNBQc8kLt/d3eX29rZlBJ0M6vABf1B01L6ZzWYFwXvu2Mhc\n",
+       "9mZU/i0ocp9KcTjNRr6m8W2QDAIMUpJm7NPptMwH62pDyk+vVR3br8MRKG+MDUqfz/sIZNKEWBzf\n",
+       "BiQgVwaoVm44DYAnn/7BwCFrHovfjw6pCxvCaKJw/R2eZcXOvJOAiJya9qcfPJM1BmjWeVwAsz8K\n",
+       "bWH88JgNUh2O8nN33WysYH/pL/k4nHY5ONheBYL8JU1CJ04b84gskHBpRpTvO9Thk1rIFM+wswYL\n",
+       "3uv1slgscn5+3rIjyC3POzw8zN3dXQaDQZKUPCeO3yaNkTUjUQNZO1wG96xvzQwyN94fSQPuGRtH\n",
+       "fgmHmblzw04AqqbTaZJtjh85exTQJCxGBOPi4uKrkPNr4X7GzN4ghaHuk3WAdRnj/LN8k2RH4MQo\n",
+       "CiOXNPUCfNwLAANASJp4I4ILc0DZc46hgrwBLlZ4AAiy8k29omAQWhQ2i4nC5cw5qBMljEANBoNy\n",
+       "Xw+KHNSKQPM+/i9p0LJzD2xoWPQ6b8ZGxkLCGHq95g4SmCGAg5u/v9lsslwuy7Hqi4uLcpppPp+X\n",
+       "SrpJE/46OTkpNCog0JuW9TJK513O3jc74FCDWR7mraZAd9WIN9f5BEnjsZs9cX4Km5c1wUtjnQeD\n",
+       "QSaTSYslqVkyyzh5LA6JAUCsaGgYGrNryAo5FuRz4SFyooU9wx5gXIAOG3z+Tt9gUSyzjMWKnn4x\n",
+       "t4Q+GQP9JAm7VviM3z+9r/z/zCmspT1IanA4NEvzfPq9rC3zbwDH/zssVyfJ7rqx/0gypa6JQ1Do\n",
+       "QyeNouvt3OCAouc+f/5c1skhQcLGzA3OG4ADPeJwh+cRY0y4x3uE3BJYlcVikYuLiwJeuJuGd7KH\n",
+       "ALGMNWnCLWb/AJ8+Zo48GvAawPLv15ws7jECTDEOmllB9g1sBn3APmIDYKeYm8ViUcZEfx2OYy7Y\n",
+       "uwbiDpPa+Wftea5DYHX4s247ASdQegio2RAmhUVYLBa5u7trHY+lmbbiRMxf/vKXzGazIrg25mdn\n",
+       "Z0XhsCBfvnzJ/f19rq+vW/kB/LG3RCG1Xq+X4XDYyv6GmnQoaT6f5+LiIvf39y3602yACwQxNwYY\n",
+       "zIU3OayOF9YCwb95n71e2BqEq45b0oekEXjmC+YKAXcs+fDwMOPxuLzb9QscxkGh1YwPc9/tNsdS\n",
+       "HS7DiHiDwxpgqGp6fReNsJ6TW+u59Xw79GJwXDfWHY/VANeeNw1gnzRg3sCFviBTDkEkaa3dw8ND\n",
+       "YbRMzQJGkpTwDvvWILMGy8gVwAS2EoDCs63QfZzTuSkYBZwNF8xij9XhHBpj8RFYx9XZqzWb4vmo\n",
+       "PWav8WvsVG2IWHuPhfd5Lr6FxtyzF50/Y10MC83eRQ5xPliTfr+fN2/e5O7uLt3u9jRep9Mpzo/3\n",
+       "NMm0yOBgMMh4PC65SN4/nrNer1ee63oonU6nJPc7ZI9jQCI0uo5+Pz4+luex3sxF0oTlLeNmdy0n\n",
+       "yBQ63uCJ7zgM62P27HlaDebpB7reoK/f7xcbhO5lvs2UJ+2aNowvafJjzFbxfdbEibrYUsbBuF9j\n",
+       "f2g7AydsyvPz88xms21n/h+FQx4DKOzp6amEakCA/D+5JLPZLF++fMnFxUW+++67LJfLkg9CApNp\n",
+       "1IeHh5yenibJV7FzI0F7M0lKYixH4VxA7OzsrNRaIQnr8fExFxcXmc1mRcFyAyWVVZP2aSEzIRjp\n",
+       "+hSAE14t1LVSR+klTbKmvdTX8lVq5gR0nKQ1h0nKunCjJgq20+m0CuwBEO1tIKSMkXeDtE3D4o0A\n",
+       "Dm1YvBm/hcbcODZPA1glbQBgBozPGbihpNgbSVNDxrQp84BnV+cTmZ3hGZYbhyCs4E1jm85mz6Cw\n",
+       "GRN0OM4A64aCQjly4Rmy4P7ZmAP4kLV63JYHvFyHaWg1UDGDC/sKMDF97wYoYxyeMzsXnlOPC8Nj\n",
+       "x4N9w7o4n8PrtatG6BYHwd55khJi8/wyPwBr1pFTWRwl3mw2JTRuBhsmPWkcWnQn76yNtMOi6Abe\n",
+       "YzYNRw0QAjB9enoqpyt5hpOuT05OCpCuWbhku6dns1mRZxwtwpc03pc0Cf7IIQ4I3wM4sb8sP+6D\n",
+       "dTfAjn6iP1gDPzdp8gkBZ3Z+vC/tEPlyVpxXO0/YGu8Jvksf/6ztBJxwkgaQYu9ovW5yJJxklbRR\n",
+       "HII5m80KNTeZTPL4+JgffvihKE0mwFQYhjPZnhzivaakvdjEsvv9flFYRvYoQICTz9vD/mC8fUTO\n",
+       "tUxQ+owZxYZx5x3O43CyZdIkRdlj9700SQMueJa98JomtJJgY+E1Esqxt2RmA6FHCeC5G/igMLy+\n",
+       "9NEljzmeSpjEdCNzhwx8C835A/aW+Il8edz2zB3qcZVNA1XnkNgw2itPGian9ubdHLrAwzF4xcOv\n",
+       "k7JZF8CuT5+5qqaPA9dKCnYiaZJtzRog34At64zXGBEDhKQNAGn8nvmmD+wDlD/j90++X/+s38v/\n",
+       "A0CSpqAdY6XvHu9rOUe1Ad5FY10BZi8vL2XfY/yRac+Hx4ze22w2Bdz1er1SqZQwO7rRjpbnxbf3\n",
+       "mvVjLkmQpaJ4XZsHPYXugxkBMKHjFotF3r17V5zXJCVUUoe3GDtOnNkQ+s0e8lzUDsrBwUHJVcTA\n",
+       "W4/w3VqneA+gg3kG6QWLxaIFprFdyKLnBl0LQeC+Mv8AE+wnzigy0O/3c3p6mtlsVsZe5yH+Geje\n",
+       "CThxLJ4wAZckkafAImP0+ZwZFehD0DnCfXd3l+FwmG63W042+MiwjQKby4wFz+d5bCaOvjoHBZqT\n",
+       "CT87OysABAFAufN8WBAnlNWgiGfWeRQWJH7PRnVRMyvLfr+fpJ1RXytihNsCbzBB8hoesalBknYB\n",
+       "FPQRlgkQgkA6fOexf/nypYCMugLily9f8unTp3KHBgarXstdN9butTAGgAJjY48EEMZPK/I6ORLW\n",
+       "IGnnRtQGuDZqBgj8G8XjMtt1SMI5JrWxZB1IhERpYzgMovCskZvValVAO8rZ/UShO/GWd3N6ok64\n",
+       "dKsBcf2TseHIeJ55Xx0qqMGHGaV6TnkO/XutiBjfOz09LTF/+vCacd5VW61WxUAfHm4Lc3HJH+tL\n",
+       "f2E4yUfiz3q9LqcpMWrIAU6ic/KSRh8RjkAmYLpYD55F+fokBTh0u93y3KSdRA7YIW9fRaVLAAAg\n",
+       "AElEQVSQ/LzlcpnRaJTxeFyqqCIjTvB2CI69ioPq0EfNQvAcGiGjGihTm8ThSebF85O0i2qyD52Y\n",
+       "zZxjTxeLRetZ7DM32wLARLfbzbt370rkgfIPdtyxT+S8jMfjVt4g4O3P5HpnR4nPzs5aniBC5QJH\n",
+       "SVpn1KG8EKLXBsfmnk6nJanWCgVjTJ4KaDlpZ03jSSJkhBUQZJ9QsbFIGkWTpLVQ/JuNxLMcrzV4\n",
+       "wsCwqGxIxgFAQ/AQQgTE5/WTtMIsDl/VSNyNTUU2NmACkIbBOjs7a3kKrxno2gMCzMzn89zc3GQy\n",
+       "mWQwGOTt27ctg87zHh4eSoEk5+2wKf8ZTfivaA63Je1L7vhjEMXcWnGYQUIRO+RphgSP1qybjWzN\n",
+       "xKFAauCAkfV+sjGn8WzABuDUIIKGl83zUVTsQUAoa8j7DX4I57AHTPX7OC4nBRwOShovk797XIzH\n",
+       "x5qTJhfEAMXryZ6rGZmkCeUBfNgjNEJTPKvX65UrHTBQvpeL/u66kU9wcHBQQALXSiyXy+JUsF6M\n",
+       "gzXnfjGAHLoCxvrk5KTlqNTsFH9H58HYmWE22EM3oBORdwMgwCb6czgctsrmv7y8lCTZzWZTLv3j\n",
+       "vRhskkSZH4fqXGTOsuhkaN7P9+i7c7aQFduHmi1BHs24UweGvz89PWU0GrWAjAEeesOOMk65w0AA\n",
+       "abNXziVinpOUFAjGTB5M8ucJ3zsBJ46bY8Sh1pgE8kkQCo6o4i0xSCdFMll4ecQ5kzbNjQK3EFkx\n",
+       "W0GS/5A09xmQ6c1zCDEx4ShS2BZT9PaMTHMlTUVKKqryPhuTpO0Vm9qn7yhwV7O0ENoztsKm1cqQ\n",
+       "TbxerwtaPjw8zHA4zP39fUuRAIxQIqwV/w8bRe4AoKfe+KwL8dvhcJizs7NMp9NMp9PWJWM2LN9C\n",
+       "AwywLovFohwnN/NQU6pWXjXbhtz5CK5ZIzMSKGP/vVb2KD/+rzaYKGj3mXchmz6NQHM9B4wV/49M\n",
+       "1Bf2MR7mjv1cy6wT562MGYt/x1j+SJnbg7ODwd6xPDl2bwXtUBzjqZU788T3PM/M22QyycXFRTl+\n",
+       "axaH8e9avjudTs7Ozop+4WfNusIckDD/5s2bjEajVj0SA0N0tMEE82ZGPEmxB+iNuoYNewT5BDgg\n",
+       "Y4PBoAAn9hZsFiALMMAxdIe9AQ7IHseikWf0nJ0JH2YwE8begnVk7pK07APzw/o7NFSz3F4HLkxl\n",
+       "TxwfH+fTp09J2kd50VNm4n1K6e3btxkMBlkulxmPxyWPM2lCV+PxuLV29PXu7q7koBjcM78121u3\n",
+       "nYETJv3+/r5sRAwRwrbZbMrFb6enp3nz5k0mk0nu7u5yfn5ekB4CzHc5qcAkAkT4N6EgNk2v1yuX\n",
+       "VxEeccEhTiEYyd7c3JQNdH5+XsJI1DCBWeHZ9ma5KM//h2KDxcE7Sxoa38JnWh2aESBkqtGVZtlU\n",
+       "9kBgqSzkyddeJnkgLlPN/5kNYnPSJ451Pj4+5vb2tgjy27dvy2VTbM6Tk5OyzlwkhZLp9Xr529/+\n",
+       "VvqLEdxsNuWc/beQc+LcD+hTQhdJ+yivE1aTJg+oHoeVSb1OBqcoUmQV8OB+8dMXU9ahHFPWNuh4\n",
+       "W8gOBgejAIPhEJEZNN6LwjJrw15ARjE8VuTuZw2meQZgAaXLe0191+Fdym47ZICD4bmjOaeI9aQP\n",
+       "PqX12ndZAzNQh4dNHQvH4+vQ0i6bnbj379+Xu7Mw2knDGpKXARtoet9z4bW1E2l2CueP8hDsjeVy\n",
+       "WULJrl6L48g6I5voeeoy3d3dZbFY5Pn5OcPhsPTJIcPz8/PC/CWN00HIBFk/OTkptsvsGzowaVeD\n",
+       "TtosEM+mhgngjHlHLvj8a7JQs4PsMdh5bNvj42PG43GRW54L2HQOEbZrsVhkOp2WPKHRaFTqoWDv\n",
+       "CPlTvoO8xF9//bXcVJyk5F9anv6o7QScIKyr1So3Nzfpdrs5Pz8vCh1kbsqbDcDv7L3Y4wE5M+F1\n",
+       "vJaS9tfX160kURSnwyYk26LEQbnHx8e5uLgoAIWaKp50noERJ/sbpQ5qB4w4Ucgeq4+lMi7mg7Gh\n",
+       "aAFfvMcGkX8zn9CX9ljd/G824cHBQekPioey3gChmnrlrpz1el1qopydnRUwN5/PS80IQCDgjrAA\n",
+       "d16QYEWpacZX5wLssmHYam/BFK7zY+rEb8BeHa7EINZ5Nc5dQalDqWOUUU42AE5oNFOALFnRE66p\n",
+       "84ZgBBwqAlgk7evRWdc6Dp40OWhmkQyqHeJ1bROvOc830LYhpKEQ6Xev1yvhT37PT8uX2Zo6P8X7\n",
+       "0IUSAZP+nJ/N3GO07HHXa/zaWP6VDSPPvjQD4BMayEeSwhQDHgBvBpgwF/P5PMnXYUpygi4vL1ul\n",
+       "zm3MeRdyi6dPTuL9/X1eXl6KUeb76H/LIbrLOSDr9Trn5+eZTCY5OzvLbDbLcDgsa8iFgDC/fr7X\n",
+       "zkCf+at1ugEL+525Muv/mmFnHybJp0+fMp/PMx6Pc3p62tpbdujdJ+wbtogaXbbLyCpy7VweGBvW\n",
+       "F/v7/PycyWRSdAPOLev1R21ntxI7CzhpCrBxXw4NdIey5OdyucxgMCiIFKV+c3OT6+vrEsNk07AR\n",
+       "ENLJZJKrq6skKcKVpCjjTmdbVpubhLldeLPZJmadn58naWJvCDZ9ZtOS2Y7QJimnf0DfPDdplABC\n",
+       "TaKRKV7ewf85Ju4cEQtIHf5h0zmmbg8zaViTk5OT/Pjjj3n79m0BWgA2U4q8s/YSki0ABMD5lBDv\n",
+       "PzzcFgTC4LEZh8NhS6H0er2iGJKUo4UHBwdFwe2ysVY0jyVpjKOTHvl9/T0bYf6Nwsdw8516zS0v\n",
+       "i8Wi1B4w6wTIR+E5QY93+hZeg8A63o2TgOJhPC5G533quXGYludbQfNdxkr/6VM9B47R1wygmUEc\n",
+       "EHujzhlJ2vfiuLFXTNvzf95nNRPpUBZ70kDRoSj6+Boo/Vc3OyWcisTweK87/OCj/4wZ1gigcnh4\n",
+       "WMAEpwrRATwf58S5HehNGMKkSYq9uLhohd4xxBwdRkbQRewj9iFsg/MLyauhpD17AVlz3aikufaj\n",
+       "BhgwOOjL2sHk/8zA8lzn17mZNUEf9/v9UqV7Op2W+Tw6OipgBZl1/hhrRk0r+oCsYutgdxi3c34I\n",
+       "Y9MX5qcuKgkI/KO2E3CCB9Xr9TIajcqxMdcvcUweBWZPI2kqbcJCHB0d5fPnzzk6Osrl5WURIMeq\n",
+       "nc3NJK9WqyJ8gJ+Dg6ac8nK5bCVrsRimwDabTQEiSXN8kskHiXNMmveafkSgjYpZTN5pDwpa0zQ6\n",
+       "Y+Vn7SlaaQIMkvYJiZrKx+gz1qurq5Zw1rFDMt95LkJNOI21A4SRaMuJD9aYNfDmY62TFIbgNW9z\n",
+       "l405NPCiOXSDgVqtVq0Mf5RyHYNOXk/w9O+Yz5omJ7wENetEPj4LeHec36EgK0uHUuhTt9s+HQeA\n",
+       "4CfA254zMlon5nrMZiMwILA3KEf2t/vn/tKs7DebTTlCyvxYlmsQVQMUgIXBC+/lHQ7LmOVinvmO\n",
+       "AY0NkB2PXTYcxm63W8LUm82m0Po+8p40Ce+Mg3HDMFhnDofD9Pv9Uu8KUJA0BySo5sq6dzqdXF5e\n",
+       "Zj6fF1Z3vW5KqZOT4svreC4sM/NqJoI9RO4fn0U+ABDURzGw4BkOTfkdzAtHm9nvZoutk1erVetE\n",
+       "GvPL+F4D26wFNsBgnbHDgNA3l9BAXwEeASs+pZo0trMGFzj3SftaBuaYxGjWxvu8bjsBJyjPzWZb\n",
+       "6Y/YFpQhVI9DMi58wySRF4KBBzCAtEHhpnvX63VGo9FXSUqcxBkMBsUzXa1Wuby8LKd6iGWywCBv\n",
+       "b0gEpKZq6R/lglerpgAVVBuK0YKO0sZgma5mk+IFmxUxI4JCJ9kURYPRchjBSpgxUAbazUesMS5O\n",
+       "DgRU8Lmjo6OSfwKFyXhZb7wXQJ3Xj2fbwJFF7hyXXTcbWPeZubQXjGJjHn0S5bWNS3gGZez3Je3q\n",
+       "rrXn1ev1Sv0Gx7MpxW6PJmnAtRkTP5P3YGQZ7+3tbZG15OuifnXo0r+v6WszL1bGBtR4wyhe7xe/\n",
+       "L0nrPcx7XajL4MTPq2n4PwI9zImpfINVAzOU92vNc1yzartozq+pveCkAcGeb7Nm1imsF59D/gwa\n",
+       "0c8OH74G2M7OzsrhidVqVUo5rNfrcvO5nT3XvUoaxp7cEQNlmEROpKDLeQb949kGA8iwwcp6vS5F\n",
+       "QQHFZop5LzqZOUiaECHvrfdCDY7m83nRyci8WXk+x5idgM34GBd2ot4z2FucSdgXOxyev8fHxwJs\n",
+       "0S91+NptZzknLPZwOCxIdL1ujgQ6wZFFgFFhoyTNwBH04XDYin+aXvMCARaMcslwJmzDZ8mkH4/H\n",
+       "X93vw79Z1E6nk9FoVBK4eL69McANFF/SZj2S5iQP40aY7KXCSDB/VhA29DBTSb4CBt48tXGnL8Ph\n",
+       "sMSGARJQtKPRqCStcmytNjQ+6cDY2AAcC2eu+Q5KCc+FsZMke3R0VPJWxuNxWc9dt3qz4TGY+uYz\n",
+       "KHKzG/y/4+32tpmbOgznsJH7gPzzbIP81WpVvLg696WmlpN2PgBrTEPuXA7czgXf5Xm1MXKioOfH\n",
+       "/ajZBhRiv98vSa3Mr2lzGu/h/YASktNfAzTMtefQHmG9Dih6xoF3CWBxWOw1JsbhTjzvb4EVZN2f\n",
+       "n5/LHrSjyO9ms1krL4X9yzOQc7632WyK08c8OefOyf0YWwzlYrHIcDjM1dVVFotFbm5u0ul0cnp6\n",
+       "mru7u8KIo9ORJQNjwCZhIPaGk7DtdL3mCBtoGczbOUTu2MPj8bgAK/aiTx9Np9NSS4p38Szm0vuV\n",
+       "/sG2UHUZ1sesFnaHP8ik5ZXPG+h77wKccLQs28yRc2g4PYrtYy7+zKHc2cV/LN6bN29Khm/STkQj\n",
+       "DOCbi5O0QizE0lEc0IYog1pYHGbxM8ni/vnnn8sdMTAjxEY5XeLnI9CEpFg8qsk6y7nf75fjgsRv\n",
+       "TW/buJpm5N9JO3YNdQk9DeJO0jJ0zBkAEBDhvBPmx4aITTUajVoxcjaTGQ4E7zU6ns8CtuxtORRB\n",
+       "gSYAqr01GxVAIYDQRnHXDRlzSMBMSR0CsPKjsT/MvvB55sThEcB3kq++hwdIkUPytQCZBjVJE9ox\n",
+       "64NiJAaNguH5Tua2gYUxg2qu995rXh+KzeOm3DlAjX4hi1D+ptiZR4Mt5sXgy9dWeD7quamBxJ+F\n",
+       "W9A7jMf5RV5L//Tf2We851sAJ4Qe0L3j8bgwqhg79rqPoaOn7dXjYPIdEklxVJLmxIoveSSPjzWm\n",
+       "+FeSXFxclBwTnNwkBZiQm2ZjDONqJ9WeP418F4d8krRyMpJ2CfgkX4EfO9s4dIASf+f5+bnYRDOQ\n",
+       "SVqVs5Ft5BkZ73a7xS5RHZb+sS5mwWwja/adubEdOTra3kLtCwRZH+bODjDpDKwF9rpOB6jbTsAJ\n",
+       "AmyqExCC4TFlhgCAIFFuNYXr0zYsPr/j0j8W3HQehpOjw9PpNOPxuBxXNuJz38mbAM1zmgTlykZM\n",
+       "Gi/w5OSk1HbBsENDsmgIitkNhM+gxSEmFCDjZ1PDLPm6ANN1r8XYaZwmQkDn83lBwvTFng1eS705\n",
+       "raSZK5QSf7+8vCxIH7kAePBObqd2PhBskzf4LlsdzjGFn6QFXJB9mj0Pb1pkgVBikuKN4inVe8Js\n",
+       "Q6ezzaKnUJY/WxtYclFsUJFnswcvL+1TSabtMSpWUJ4fsyrev8yTQYGZFL5XG2z64T7YSEC3I6M1\n",
+       "yHsN/DI/Bkx/tNZ2evh/K3uvR31U3PNs5oY59X7dZXP/0XVmm+inwz0YUoeO0ck2dj/++GMxlA41\n",
+       "/PTTTyXxlpLxOC/dbnNBKO8HxJJcb0YLucGIUr12PB6X/CiHaJIGePtE283NTS4uLpK0QauZgtp2\n",
+       "AdjrOZhMJjk/Py/MkcEFwJ659zvMRNfRhfF4nN9//711whPHF0fJDiI6wpEK6w3mmjk8PT0tegTm\n",
+       "g/3oongAReacsJmZliStfVO3nYETBmGEy6QATpwYSWIqyZZ1YhAK5B//+MdXl/o9Pj7mw4cPef/+\n",
+       "fasPGEMUoMEMGyVJOZ3iWD7Ik8v9MNy+J8JGGg80aW7KTFIQLuOndbvdcsmUq+kh+PSFXA6O1bHJ\n",
+       "HPtkU5iWs6JOvmZqkq3S//vf/15OPzFvGBCE0LVZGCPALWkE0HONR09hH7MhSTuUR1iKTeZ7imC1\n",
+       "mOdvqaGYrCBrqtifsTJN2qEQvE3WGS/NNDdejJkp3luDHcCwlaa9Hj7HOry8vLSO8dJ3/0Qhwzii\n",
+       "CD02xod8Grj57/5JBdEaCKAjrDw99zyz9kx5BrUf2FMwsZbvOizk/eJ1qoEx64W8m9mxh8q7DHTM\n",
+       "TmIYd80KAkrJA3OIgpoX9JMxHhxsa5RQRgH9hRO5Xm9DzhRgZE8TipnNZi3g6DvXHMKGOTs7Oyt6\n",
+       "GO8eveokcbOHw+GwMCE0h1iQF3IDGevBwUE5lcK+TlJkyswZdgU54d0AMj6TNBcs1o5kt9tthcuS\n",
+       "tPQenzs5OSmXKGKD+v1+kSPWxc4ln7P+sGNbyzeRAW55RmZtp6w77GTyrDoZ/7W2E3BitAQaxrA/\n",
+       "PDxkPp+X41oGDPP5PI+Pj3n79m0ZGEg62RrmH374IYvFooCBwWCQTqfJA0EBEA7qdrvluPB6vU2W\n",
+       "Xa/Xubu7y9nZWU5PT3N7e9tCkWwO+nV6eloWfzabtSrgEgayUibL2wwLWel4rTYgKEkWlfnjWFjS\n",
+       "JDBipMjLQDBcPhhWwsJUhwoMvi4vL4uXwp0aCJtPQpGgyhzB1lCQablctvJKSF4jNmwDCUKvlbir\n",
+       "59JflIiPa++qOW6Ol+PTVIAJnzCycTQgMUPAnJi9SBrji+K3geP3BkJJkx+FrDhJLkmLGbM3Zwqb\n",
+       "Z7vZi/b72ItmA01z41wYxHk+/e71et1yHPwMK/LaY3YYhd/55EfSJFfWSe4GOjQDTFP3ngvCReQP\n",
+       "vRYGslfMO9z/Ogl+V409TCVvDh2sVquyJ22k0S8YW4f6ABaU7f+v//qv9Hq9vH//Pr1erzDLPv4O\n",
+       "UEu28sVRVY7K9nrbAmvD4fArJgJ2mTmFSZ5Op0madWXdyXHju4AR1pe9c3Z2Vu5xe3x8LGwNehEA\n",
+       "bCeAED9A2KEfxsb+pK/0n9AaMu1wC2P7+eef0+l0ynsHg0GxQWZbfDQc4GcdZGBkub+/v8/Z2Vlx\n",
+       "sK2DDfJ8mitJOZHJ8znl+WdH5Hd2KzEThGGjABf0fdJ4fklasUcm2YmuKIC//e1vpZLs77//XhgP\n",
+       "19ZImrAP7wdVQglOJpOStFQfhURw8BDxEBA6jFKS4lkAUAA1pjVrYIDQ2ICgtGAPMEpJY4AYj4ur\n",
+       "2Ztljgxc2CQYPDb0mzdvcnV1lfPz8yJUT09PmU6nBX2jbOy9stEYvw0fY+X/Dw621XWn02nr4igQ\n",
+       "PIALQ8lmfHp6+up69Tr5cVeNeSfsQWlrU6woBDMmNdPlcB7KfLFYZLPZFG8PltGJmskf5y7QJydo\n",
+       "WmFgWFy7woCmNsgoYmSBMZldYE/5HT6uD9BwX2t2ws/lXfz0HjYLQeO7BoJJWvkJyCw6Ablz43Nu\n",
+       "rwG2+v/sELwmy2ZpkubUi9fyNVCzi4ZM+oQJMu4+moWlKBe/96kPchoosOacBDN2yJBDEg4bdLvd\n",
+       "wmKQW9fpdArbi/OHc4t8oEMoJIksole95hhS5Isxc5CCPlAtFtZ7s9km8dvZJMfG+XXMpfWfnRUa\n",
+       "jik2yGtze3ubxWJRWJ7T09McHBwU/cqdSMy/dRIy6WRkQCE2zPkqjN25OmaMWGv2LPuL39e5LK+1\n",
+       "nR1vYNCc8mAxT09PW7dZMmlJWrfr0jBKeIDL5bIYBMI2KD8WwwlWKF0r8Tdv3pRLoJjg1WpVEL2Z\n",
+       "BaNYgBV1UQ4PDwvFRr//iMLFM4a6dOU9NrfpUnsFVmyAILM7q9WqVXYeJQmar9E3LMh3332X0WhU\n",
+       "gM90Oi2nZJKtgrcX7zFCx9M/0/HUlXDVUmKksAC+0RiF7qRO5pHnM5+7bsiaDSfr7Lo9fK6m+R32\n",
+       "qY0860fhMIy7jWzNNBkQ06c6edugo+4nzYra/alDSIyJfwPeUbaUG2eN6xAX7+D7Bi6uaUJugR0U\n",
+       "U8n2+gz2aDaA6A47DXWrGRqzWu67f+K9Mp9ea/4YjLJe3s8e2y6b8yVms1lWq+3Fosxdt9ttXfDG\n",
+       "/sUDR48DHLx/X15eyr09m832BBVJnITdAAacgHT46Pl5W6jt5uamgI2kCaPaLsDkuAQFoIFnJe3a\n",
+       "M96PrI3B8dNTc5ke80G4BFnBJgHOYJWYW8JOyL0rPHe73VKDq9vttu6Xo+G83N/fJ9kCr6urq6Kj\n",
+       "+/1+2TOuP4N8Ol0A+waAcp6cw2r0FZsDeATIuTI1wM9lOlxA77W2E3AC+Dg8PCxHiaGW8NrJ41iv\n",
+       "t/FCMsMRLhaNRcSTpJAN5YsREt+DwOZJmnPuKABO3rx7964V/+T39MPC+fLyUrLBWVhOSJCTwsaz\n",
+       "p5Y08c2aumRzgzCNNvmML9FC2aGA7R2b7cFDmE6nRch8OiPZbszT09NcX18XtI1SIDGN00sodYMQ\n",
+       "+uNcCCdS4snwXl8Uxtx6w9BH1itpjlrbc67DDLtoVmTOF3C+hQ2ywZyNEXNqehlGDuAOa3VyctIC\n",
+       "Ckk7J8JABGNtTxYlPBqNMpvNinzacLsvGCpCfVaivLs23Ky7nQGeC3BlXKbg8a6TNqBAwdpwm4F0\n",
+       "iInwCuMh7Ov+MBeMz3H518Ju/DTA8HgNQgwybYj8O3/PToWdn102mIJut1sMrQtKIluu8oqeRW9Z\n",
+       "PpOmiCT3p71586YAHwzxwcFBbm5uiuPK2tr7h/0jN5DwMmEHvk+1V9YZHYKuAiyQs8j77ehhqAEz\n",
+       "vjMIefa6scbOQ0Q3ADRcfI1cF/IpkW3kkXoutVF/enrK3d1dut3tCbR3797l+fk58/m8HB5g39dO\n",
+       "Me92agDrSmidEgSU9OckEPaSvjNfgEGckJOTk6KvsJlOaH6t7azOCZML9cZEoLgQMiaVBfHGTtpX\n",
+       "T+NZDYfDVmweqmu5XObs7Czv3r3LfD4vTAiKh4mlL1CEm80mt7e3mc/n+f7770tsdDweF8ADs0Ll\n",
+       "u9VqVSoeAlBYYECH47NsEDYB3wEEobQstD51YaXMhqUZRAFOjG4BQqzD8fFx3r9/3zp5tF6vCzCz\n",
+       "d4wn7FAW68Ia1YXaSGp23PXu7i7L5TL39/eF8YI94r4MU7vr9bqEAgE/NdW5iwZbBMuRpKXMknYd\n",
+       "DRtSGziavWg2N4ZqvV4XeXI5d472JQ1ITZq7adg/yBZrOh6Pyxoa7AAWHfo0M9Dtdlsgy8weDS+K\n",
+       "z6IY+cm7MHQ0lKi9M8+NqWaHKGFaANBeH3vdGFEfdTaAM71tI2yGw142fbYz4jli3uh/7bHyOdqf\n",
+       "nWb4Vzb0CuOHZWBvr9fbPD30X9IGacwfYQzGvtlsT4CgS5Ptnri4uChl7ZOtDl8ul+XaEDPi1kNJ\n",
+       "AzL7/X7+7d/+LUdHR/nll19aFaXn83lxdjH42CLnObKm6BkYAJhxHGISSPmugb3HnzRhS+fQWK7Z\n",
+       "E3VtE8CRGRl+9/PPP5fcSWp0Mac1W8uJPOfFuW/oGv+byIZPqCKzXNaKHDA3R0dHmUwmhVAgvAR4\n",
+       "NYPzWtv53TpJAzAwvMk2hENGcJ3tmzRHHK3goP2Ojo4yGo1asa9ff/01z8/POT8/z/Hxcd69e5f7\n",
+       "+/vWZU6wE/Ymid2t19sblP/7v/876/U6//7v/96qLeIy2NyiybhMVTMWUD4hGJA1G6Tb7ZakYFfk\n",
+       "IyZqpgCjD6gDgNCsZBEue8EWzMPDw1xeXubDhw+ty99IjCUkxvwAMhiDDTNKiA3OXDFOxgEljPH1\n",
+       "0eqnp6eMx+Ny/xDvOzk5yWg0SpIWw7XrVlPwKBEbHMBp0lDPNuT+vsMyAAZ74tx9Azvn8AnPNaC0\n",
+       "MUcOVqvt3VHj8bjE7k1Rw2aaAncYyt4+wMTG3UDHgCppco/s1RkcOB5vb9RHVpFF5M5MlNk8vs/p\n",
+       "DowLfXBoir7WjIXBsdcIT5zfYVgMZvAw+T3yzv5xmNlMlIHPrprXHXBiIJikhFfq8RPCQH48fozn\n",
+       "6elp5vN5AffT6bSE1mFAyO04OjrK7e1trq6uCoNjxmS5XObx8TE//fRTye/gRBA5RT4skDS62Sww\n",
+       "Hr9P6cCe83eSVNGTPAfnEjtl5xE7Q+4chp1TUMg/z/HJNqqi26j71KLBl0O1Blnsm263W5z92rFA\n",
+       "/ngm/aI/dSLrarUq4PDk5CQnJyd5+/ZtyXt5eXnJu3fvis53GsUftZ2AE5BajebcMFj21j15TDYL\n",
+       "mjR3ukAnUhr/+vo6s9ks0+m0CD6K03TzbDbLaDQqz4AJOTo6yvX1dY6PjzMej/Pysr38yuf4bZh5\n",
+       "JgbItPrLy0vrfpMkJXHK4SEWmL7gpaKoURCcQGKxXQ+AMYL0if3xXgML+nl0dJSLi4t0Op3iUbA5\n",
+       "8TApwYzw41WTQ0DfADHMyWq1at1jwsZwAi+XeZkd6vV6mUwmJeRgj4E+7Vp50wwA7H3UoAWAYhYF\n",
+       "IFJ/1h55kpZ8PTw8FOXtuUZWFotFicED3pAfU72Pj4+lEjIhSYAFMW76YWXm/rOPzNr4plbWDuNk\n",
+       "JsXxfa8lex85RGc41EQ/7N3jjda5Oy8vLxkMBgVA0U/mhWd5/g1yDJbYdw77OBxjZqXum/OmzKok\n",
+       "bQaNPrs/u2ibTVOdmfkw8EvSMjoAZYwuupKQu4GBQ3sYwfl8nl9++aXldBGe4B1c8GdWGtaZI/c/\n",
+       "//xzer1ezs/PW7VPyLuyDYJZRk+ZuQC4cFIJmYYp+vLlS7nFl+fU689ewy6ROEufHBZN2se3yWc0\n",
+       "s8F8/vbbb5nNZkUPECoCCJCrSWQC547nEqojnMX+5fsOowH+WU/6ip62PcFusK99ei35up5T3XYC\n",
+       "TqDqoeXxlOv4K4thpYbnDrVFAi3AgEUDZYJQR6NRzs/Pi0HnOCyeJyEFjudi1EiSfa8AACAASURB\n",
+       "VDHCnKO/vr4ux2lZNIBM0tBgIE5QMoiesSUpRoES2syDj6oRWnmtzgMLPZ/Py2YhMzxJy0ggHIvF\n",
+       "4quYpY3BcDjMxcVFnp+fc3d3Vyg6AACK5fn5uVScRUEAMnq9Xuv+IjxUxue4a5ISx3S4wmCU9bTB\n",
+       "R1mgEM3Q7LLVRq/Oi0iaUAVjxMB7szuUSaufiwH48uVLOZaNPNb5ESgW1hKamoZB4H0oe6+J85gY\n",
+       "h40Q8sizeYZzXKzgWNt6Hth7AHhCePTb4NR5AM4jqBvvYe5qw8gYUZzOQ2OsdijsMAGWeK4VukNA\n",
+       "rLG9U5prsfjk2bdSw4f5Zw/CZDIvjJN9jyyQP8J8wUJzZUCSMnfkhKD7zs7O8ssvv5TwCyG6+Xye\n",
+       "4XCYXq9XShCQIE4jwRZWdTab5ezsrADDpMlhYg0JL9nBShqn4fDwMPP5vMWmsD/QQThLXn+zpDBI\n",
+       "ZsmT9u3kyJOPGicNs22W5enpKWdnZ+Xfw+GwsHE4tOxZ1sqX35IzQ8KtIwWbzaY4F94HXnMDMOtg\n",
+       "gKYLxmGP0GN/BkySHYETFs+5A0mjuFhkKyGEg88lW6GeTqflCBkxZqO82WyWT58+5f/8n/+T//zP\n",
+       "/8xgMEiSkg9CbQMWltMp0It4p5PJpCw8TASTjWBMp9NCuVGrxUfKfLKChSPEg2GHhWGRYTEQ6IeH\n",
+       "h9bpn6enp5JgnLTvbUnaxdAAATaEzHfSCNSvv/5avO46zwBFwXq4P7PZrBgLvGLWimdYaQFubOz4\n",
+       "PJsAT9OeAs9DhpyAtutm78fy62ampO6z859gLZApswBWhm/evMnnz58zHA5byXvkohggeV4NCGBH\n",
+       "YK1sfGuPB8DPPgB80D9T9zBEZh2QsxrkGKQl7Vwd53y4/6aok6bsNrLGu0xV86euX0FfcDDMCnmO\n",
+       "6IvL3vPd5Ouj0PZ2a2+a+bIxNMjmd/9Mkf//3egreRW14U22sn93d9eqb9LpdHJ3d9cKpbhx9BZn\n",
+       "Eq/83/7t39Lr9fLbb7+1gB0s7ZcvX/L7778XJxOGhdMp19fXSVJYE8CzD19QhI2TjMlWtn17uoEF\n",
+       "zAL5LbAesNtcxcI1HEkK++95JFyEPq1BOo4tgIk9hq5w+MdMUK/Xy3Q6LeUwcNwdmfARbPQLDbBj\n",
+       "9tDF61wegs8zJphMA6mkyQO1LcDu2TF4re0EnKAg8Q4ZGIaTDiM8PpWB8OC9Y3CdK8LGMJvAGfea\n",
+       "OgedkqOC1+eMafcNj8FKmLL3bA4MAhSfE7WShskgPou3S3P45/HxsSBdh1VYWAwA36efbBrQLz/N\n",
+       "rPAulDaeMmvCpgKkEM5hrDBfjIHicgYkfgdUqzeikycxnklz4gPa0bkWPNPPr3+/q4airL0uy7Vz\n",
+       "JJg/fm9Dz3zAeNAMLJyMZ2aBuXXuBobVzbWGkGmHjQA5vIe+Wrm436zVH8mYDTTNjAb/9nN4lgEp\n",
+       "SX027H424+ffda6OmTr2OR4nc+4QE2EAJ63TT+bN4JG+oEu87vzdfTYw5xlej12HdZhj5/kxX2Y4\n",
+       "WS/rNeaNeTg7O8tgMGhd4wHgob158ya///57qXhKCIIkWfY8a4jsM5fUHDFgZ47RmQ6zoO8BMk9P\n",
+       "TyWPizUYDAYF2Jgt9EEKZIm9gt5lDgEe/BtnDp3qvetogNlAy5vvzQH4mZlyWQCHgp1AzN6DyQE8\n",
+       "mJUkB8YOCn1l7XBasZNms2z7cGjZnwDJuu0sIZYBseBJWh4iAm4lzWKzkCS1LZfLVu0EX5+OYA2H\n",
+       "wxJrRtFgqK1kOYLMYtugcLSZvtXJdCyUT98gvFRyRUAAGNDb9rq8oGZnmDMfPXbSI2NwXgm/h5JN\n",
+       "2hUtrdwpuFavFaj86OioVcb+5aW558LrlzT3HBl8mvaHSQHIOUGLGCjKgSQvh3KsAJnbb4E5Sdpl\n",
+       "59ncZgf8b7NC/p4NMvuD79vTwhvlmB8nmnyCyh4QILQGzEkb8PlEGSEay2gNqF8LW7E3bHQNGmx0\n",
+       "zaoYrKL42fc8g1AhPz2HNkTur3Ma6j+vGTAD/DqB0CDIFD5j593+Wcvua8+0DJutqEHlv7rBYKFv\n",
+       "2cMOH6LbACPPz8+tmlXodp9qZF0Ya6+3rVPy6dOnTCaTUjMK7x+QPhqNyj5I2ndKwerCohHyZD19\n",
+       "GMOna2CLkQHrddbg4GBb1Ozq6qrFHJjdQkbRs4CRJCVVgfnjj8NLDqHQN4dJkLenp6dysMJsnp1s\n",
+       "9q33Ms7u8fFxsZu9Xq8w+wAa9i9JrXwfe4uuwY4C6gGPdXjSziPMkZm3r2Tu/xPJ/X/RmIDlcpl+\n",
+       "v9/K+kaIEWRiYb5nxiyKQxCcrWazQ2Nx8Z+LABG/5D2155I0RpiFMRUHyHJcznF8H6mzx4miWa1W\n",
+       "GQwGhdZ04R17TbyTvmBQDKLot5WhPTOe5TCJQwUYeU4d4fGhaBxjxih1Op1y/wXvNYB0QhS/e3h4\n",
+       "yGKxKJsHBgYjxHFxnkdCMnRtnW+Dcf4WqO+kWQN757UhB2i+BkoMRGqalFNgBhmwgev1ulV+HaoZ\n",
+       "Ga1Bda0AUUT8zrKbtK9op5ltqWUNA2C2o2YV+J3XzvPEv82esHf4npNpeY/77eewj+p18hg9PkA9\n",
+       "78MI1ODRTKFPfBhcJE11VN7Dd2vWlHcz1po520VDhg4PD3N+fl6OndvIek7JY0AOanZzvd4eQSWU\n",
+       "zj4xE8GJD+Ydufzw4UOSdrI04IV5Y3+5sjiMOv3w7cIYSzONDvWgY8bjcVnPxWJR2GKMO/3CFsDu\n",
+       "eU+ORqNi7wyazUIbUPNMg2vmk8hC0gAAHMNOp1McN++bJAUwnpyclDGgS11LBTCHvUUHOyGWvUBY\n",
+       "zqFkAx3WnLnyd19rOwEnRmyfP38uA2JSXJqcxUgaJoEkK9PAj4+Pub+/z93dXf7nf/6nRUGb6mOR\n",
+       "6gRNK1Umn36xMIQsMC6msqG0UH7kU5jmRPgxyPaI+DtGwIiSRXYSMMJudsc0HQjdIMfeFwrRyJY8\n",
+       "Avrg3x0fHxcAmKS1Pg7T+eg34JECPKyvjbWBEkKMR0z/GDfJZKwHsuC8hG+hISvINKcEnMdRb0qU\n",
+       "kQ21P2MK2Zudz5lC5/c0e/O1ATbI43nIDN6P2UHWnLyOmhkwnc+znNthJsgGwMAaAIz3y3fZW2Zb\n",
+       "mFOHavkdzyX3hjE5Zm6QTD/tHGAU6JNZQAMgGxjez3gd1/f46U8N8vic5aAGW//qhuwlKXl7GPfD\n",
+       "w8Ny2zqshfPLjo+Pi3OIbjg6OipHgdEBTjDFsfEJM9bGsgTbwncAHcvlsoB5O4027qwh+nS9XhdH\n",
+       "mM+ancap4uQiSbe1zLHPqCHV7XZLDiIGnn4Q5vD9WDBUdnRfc5xJGyCEUx+N5hQR8mXG1s49dhEw\n",
+       "SZkH9CkX/LH/AVnYtOFw2JoH5hUiAAcTmeCzTrh/re3sVuIkBY3++uuvefv2bQuRJs1RVz6XbA3w\n",
+       "zc1NASgIyu3tbWazWWsToyAw7jAcfMY1P8wu1Emjjlvyf0m7ZPd6vS65MCwg5+2TJiHURsJ9eO2E\n",
+       "QdJmXQ4ODnJ2dlYUIEdrUcAYBdPTgBL+JM0dGY6R867BYFAMPXPBM3zMzMaI76L0B4NBAXLMOxsD\n",
+       "oOON5DDeYrEo68/vicnCFJCBT3P9lF03xkk7ODgo7B8G3UesARL2tg2yzD7Q6vCBQTC/N4PgdTZd\n",
+       "+xowtvJOvr7ML2m8OP+ecdMf11PAcJthcz/piz1MKztk38/i72aXaAZtBu7sP7xkgyje4X1P/7g1\n",
+       "m/U0kKidKOdg4EB5jeo9mDSshMMNfv8/8zD/FQ3n4ODgoLDdm82mhA87nU7Oz89bYAFHp9PpFA98\n",
+       "tVqVat528Lx/qTyaNGuIHfCRdkLjABzXg8LrX62aqztsGHu9XmHsefZgMCjHcXGIcIAIqWBQzXDX\n",
+       "joENOwmqgAcDm6R9bJxnWNaYe/YGMtLpdPLhw4dSHHS5XJY8HmSFAwqj0aicXEUXw5ZQ9DJJiSIc\n",
+       "HR2V5zhXZLPZtHSXw3JmSLDjHOZwHpz3q+3ga21nOScvLy8l+zlJxuNxRqNRUSaLxSLj8fgr42eg\n",
+       "kWwVKwlNFlyEm9LIbCAEwMmuCJ6peNA/DAt9cJKPY3mbzaZFa1HREM8hSXkmmwZDW7MVTpxz3slw\n",
+       "OEy32y2Cg8K2N+6Yueun1BsTQ290TF+vrq6SpChYNsXh4WEmk0lZQ/6fRKk3b96U43qOIaN4AGje\n",
+       "yDSU3GuGOUlr7VH4GNM/ovJ31Uzzkk/g/AvnDzFe+o7HxJpi1A2Ka+NrA8nvMbQGDcylEweRXX/W\n",
+       "1HLSPk3jMJQNrcfN352QSv/5jD04Pt/pdEqBK37PPgbM0l8Uqve9wbnZOPa5w3/Ik0Mu1McwhY7c\n",
+       "Q8PX4TbWyawp4Z26VoYdBIdCoMENRgziasC6i8baPT09Fb1G4cSLi4ui28ziWU7MxvlOGXSbHVDq\n",
+       "cfhwgkEPup938Xz0KBffUXTNBtDOnNkv1o/3WVeaLYZlTJpwEP2z04DMOPdmtVoVloE5RcbsEDAu\n",
+       "yxiOqI8Bv7y85Pz8vNQyQhe4+dAFz16tVplOp8WZvru7K6zRcDgsZS04rm1WxkeDCTeSc4IcW18B\n",
+       "YJO0dJLX44/aTsAJx9Gge05OTjKdTjOZTPL8vL1g7suXLyU0Y3TsBpK7vb1tAQrAg9kJKDDTw8lW\n",
+       "UbtGRK34fILFl0Xx+6RRzgizvUBaTdXzjF6v16Ie6SPAw/+G6kT5+bkWCAw9v6cvbMyTk5NcXFzk\n",
+       "06dPrRozX758yf39fYvGxkCgRKEh/S5/niNsoPlut5u3b98W+pF5hklwkiuGhDUDxDh8ZvapNqbf\n",
+       "Ajih/ygsFBj0qgFH0sgD6+gxYGQBZK7IiTGzwfNzUHR8x4rB1LhDQABHg2OaZRuQxRq4DLWBDcCY\n",
+       "33s/0GeHUBhPDdDpk0MLgC2zIwYOyI4ZIBtU1goFy1iQZYM2DI+TC5FLwgKcrMMwAZjsecIgsNeQ\n",
+       "AYdQzXDV87nL5vwO5KTT6ZTilDhRPkHFOlpeAHusB/Wf0C+Pj4+ZTqetMvgOsxB68y3v1puACRtj\n",
+       "5r4Ob6Dn0WOwCbWcueFMU6jQjBwAC9lF9jlhyf/zf87hsDwDWJgzfkfVXAMZ5ufx8bFUPsZZ9x1I\n",
+       "w+GwOMXUBasdysFg0Dr5R2gMJxWwZ/sKI2OWxfoY0I8tYS+9xhLXbSfgBOFhstjk0+m0RemjQJMG\n",
+       "ZTIB9eDW66bcsCvQvnnzJu/fvy+VR/0s3k8SJrRh3RAE0CNelz2zOnwzn89bXpE9fzYZYSjeDXXK\n",
+       "gidNTgCl7Dudbc7J4eFhPn78WASQd5v65vuuoGoQMRgMSr9RfvP5vFRThN2B1rZiYZ34DgLJuFer\n",
+       "Vdm4HIeG+bm9vW3FmJkPBBow4pwDxk1irpXPrhW3G/Rm0jYwyJqBCc0eZm2M+Ls3P7LvBDzmnO/A\n",
+       "KFhRYxzNeNhDA5g7pOKcDjyg2nDTLwCMmRrHvW2MDbT4ye+RMd7B/nKOCyymvXIciLu7u3z48KEV\n",
+       "+ttsNqXgFHJGHspyuSwKmdMlljH0jC+NYz9tNptMp9MkTeEvvEU8f8snesO5KayF6XEML47IrhNi\n",
+       "mX/6iaOSpDhXLnOetAvLGRADRizrOKo1s9HrbU9Jmv0yo4KMozu73W4xvkla8unQGWwuurHf75dS\n",
+       "B9gRnLzBYFBSCBgHxhog471pBytpnIx+v9/af3WBM5pzdmCCkGvfrMweuby8zP/+7/8WRodIQa/X\n",
+       "KzklPAdQslqtWiUqut1uxuNxYU8cwq+jDOxz9AlADOYsSZlfwm1mUrHVq9WqdZKpbju7lZhTB6Ax\n",
+       "BuBEqjqkwyQmTc4DwubENT5vhsG0k424gcN6vS5389CHpNl8LAxCYDaHeCSbBQYHhZWkJEg+PDxk\n",
+       "Npu1jnuS68HnWMzaG6QPVEa0wgclJ41woFjtQZIHwVFssxd4JDxzvV7nu+++K7cGI6BO2GVzLpfL\n",
+       "AmBQYNRGQeE7nGBPHmPrNTVdyvtIXrNnzTiceLirxlrZ+0GR8DszBQYezqEw+HXIBkXHXjFl6zwW\n",
+       "/pC8lrSP8CdNMSfTywZIhDRN69a1UljjpM0KIpckSdI/My98lvlgTgxOeJaPHpKXgqzwDE51AWLN\n",
+       "uPEOAAh7HObUoB4ZRTegdA3eAEckzDNX7pdlnXGwLg4TeU39e/d/12Ed+oUcms3wsWLGyHolaSVQ\n",
+       "+u8OY/Ms7pqB4WIu2Qsu2AlgA6zA6mK8zVpRD2S5XBYHkXcQgmXNALXIkp0BqodTMXw6nRYH0bko\n",
+       "6/W6nCRlXQ8PD0v+Gf1NUhxPnEY7D4TB6PPnz5/z448/lvfQ5/Pz81Y4HZYFUI0NY2/BXPd6vbJf\n",
+       "kNebm5skTZ0xWG+zYOgRKtQmyWQyKfk+7K1Op1NO0OKwAozqpN+67Yw5scLivhU2qBcYL4QFc0JO\n",
+       "TcXWIRdimy6PjsGz0fYEdTqdElNN0gIlzjWp7yZA8fF+cg36/X5B8aB01zhhfNBp3LvgvhntJk21\n",
+       "Qhrje3x8LDcwY/AYL2MzQOAkDV5esjUWVKxlUyDgMCnMNYARJYA3vF5vj7US+3XxovF4nOvr6zL/\n",
+       "9TFoh5KGw2G53t5AldM/yRaYUGDvWwAnSdtIO7Rg4GHvGAWfpDXW2kM3I8H3AA4opZp1oD+mjU2D\n",
+       "W76ReYNuvCYzmA6/8NN0Nv2Greh2uwWgAjwZj49U+h29Xq8Y+k6nU+6ccnVk56Qw32dnZ7m7u0uS\n",
+       "Vn0Le7x2KDgOyvcZP8bIFVEd6nJdGeaU7xh08EzXhEhSEiMdRuLd9op5758p8X9FGw6Hmc1mJUy5\n",
+       "2WxKkqvZMhto1gfd69Cs5dyfJYeCtaQmCrqcvVXX5EC/wDyjV/l/dDcgBEYXgE4fkNXlcllkpd/v\n",
+       "l/L5sDfkkgyHw1aelKteU0UWUNXr9UrBzrdv35YwIDbC+sEMFHqXMAxgZzqdlitZzs7OSv0Z9Kjl\n",
+       "E3uArvHlqpxuury8LDbr4OAg9/f3JVTnveB57/W2dwS9efOmBUjJZ+HvvHcwGLwKcl9rOwEnnz9/\n",
+       "Tr/fL2iPP6BaAwyDlKShrI12ASgsJhsCOtDPYlFhSEzvooiTtJQSRWhcCdbxThe/SppLr1DO5+fn\n",
+       "pdwyZ+5ns1kBEaenp2Ujfvz4sVCmLCZAwgmuxLjn83kmk0mZIwStjgEiEMfHxxmNRuW4GPPF3T3c\n",
+       "O3R2dlYE8Keffiq3FFMcDU8Vj9pJbvydPB+El9COAUntTQN+mCeML5s3acJIq9WqJHMxzl032Cd7\n",
+       "GVbYSfs4oA1RDVpQlvbqa1Dz9PRUkvKShir2RY3OR0raV7S79oYZMbMF9ioBI6w962+Wx8wO4MPH\n",
+       "3h2GYXzsQQx90lT9pLAiYT7mzewac8++ZmwYiX6/X/aPy3ATggWgmBkaDodFhwBuksZhcSVNAwh+\n",
+       "8hzXj2Bf2UGzoq91nR2UXTbWh3Lx3PtVh9/xnM0+ITsvLy9FVuv8FQ4KPD1t70FjvgAcAG+cV3IW\n",
+       "0Q/sDYq2wWZ0Op3i/QP2AA3kglCUkLmfzWaFJfnhhx+KnGIrGM98Ps90Ok2/3y/63g7acrlslX1w\n",
+       "QUTfccZ+MTto4Pb4+Jibm5tiG3HYnp6eyjFiElw7nW0+0+XlZcnhZG7Ze+hnbBB26NOnTzk8PMz3\n",
+       "33+fu7u7HB8fF0cVVopnUUmX577G0HNaDeaSz7FnHeJ6re2MORmPx0lSQMR4PC5F1rxRQXiOjZvG\n",
+       "ht5DWSXNLb945kwu8WQMt5Pb8ITYXH6uQydsQsCL2RnXRGHTQf8dHx/n48ePxXNC0blEMpsd4UZA\n",
+       "EQjmzoyFQRKgzciYhtBQcwCUfH19XWKNeCv2EjFyHz9+zPn5eZImj4XNxm3Px8fHuby8LEqm3+/n\n",
+       "l19+KYoVNgvwiFKj0qGPBvJ+EDveDO9PmvhunQS3y4YiQ/EmTQjSYQoa4AEZ9+ZmQ/Nv55JguAlz\n",
+       "EB6ELQAI1AnEVob2ZpN2QTbei8dXn1hg/9gr9NUIeMfsE+hf9iYyYYbH7JCBFP0E0Fg+aFDNPl2W\n",
+       "NLebG5TwDkAJ7Bs5KXyGXJSaZsewEqZFfxjAoFdQ6hh0M0WMDwfLYUz6jmLfNSuI8cTYj0ajYswZ\n",
+       "J/92bRIcDfQD7LLHjXMEeFssFnn//n055cKNu93uti4WenY+n7cYKBwjJ2EDihxKplihw5jonV6v\n",
+       "l6urq6KDAVKcvgLIEBq8ubnJxcVFK+zOn9PT0wIeAPuAIhwIZMrH570/Op1OPn/+nPF4XJ738PCQ\n",
+       "4XCY4XCYzWaTjx8/5t27d+U4NKCF/YYz++HDh9zf37dOz7BPqeHS7/cLwHt5eSml5X1IBObMR7sn\n",
+       "k0k6nW2CdK/Xy6dPn1q5nIzdJ19J6fijthNwMhgMSt7F09NTbm5uCvrlj+lr0DHAAAEETePZIHAk\n",
+       "cHKSxLUxKCDExPEs2BEjffrB7/HwQLPkzjjc4mS/zWaT+/v7Et4ALSYp5+kxNMvlMm/fvs3nz5/L\n",
+       "AkKJs4kAHpQPpnQx2esYqTpxkr4lWyF7//59AWPX19c5PT0tawCFCWLebDZ5+/ZtZrNZJpNJTk9P\n",
+       "yy3K1DsYDocFyXNjKP9OmsQwMvHxWBw2QkE5qZlj4N1ut8RPURjOjUEJ7pr6TtoFwjCSDnuhMPlc\n",
+       "HXKjAaIdiknSAiYAcudEGZSaIsZYOmQEPVwrGjMUzDnvR4adz4VhN8vJZ5zUx0kznu2wBrLGH/pn\n",
+       "8PRa+Mvz1u1uk60vLi6KHDnEQj4CMkSuCUaNPlIKoAa9zAWGxSd96I9ZKEIAyRbkkZPlOeCEBXOF\n",
+       "IXe4Gcp8l+3Tp0+FPUYfAdDQw4BadJ3HgV4y0HOeHpemrtfrsn7owX6/n/l8Xhi4pDkpCWvtyuDo\n",
+       "AoA/evng4KB1uzuAK0lhO9mz2AUDVBg/O8Xv378v8orMJimMG+9dLpelRIMdu6TJ2XM+DXOUJD/+\n",
+       "+GMrD/Ho6KgcTe52u7m+vi6y9+nTp7JGT09PJRyXbCMWRAfMmFIlFnZytVqVUNN0Om2dwOL3OBn3\n",
+       "9/fFeXz//n0JeY1Go9ze3rZyFZ0bhP3+M4dyJ+Dku+++K7TYr7/+WjqL0kgaj9LCTAO4cConScnb\n",
+       "ICSBkpvP50UR2asajUZJ2ln/UL/2+Jy7AbpF+AEdJH+SBwL9hbInDJI09CheII2wj+OMbHKus2aT\n",
+       "OyuevroUM58zAqdNJpP84x//KHkfk8mkABs2J8d9uYob2pnEKI4Ls2G92Z6envLx48eSCIuRhgHB\n",
+       "0Jo+RxmQCAf4OTg4KN5Bp9Mp8UpAJCeG2Ji7ThpMGjk1aE6aM/7IJYbNORxJmxVE9lA0DpMgQ47z\n",
+       "OhHbMV3nhdigO//EBQSde1IzBKbwUdJJu0gbYyC5EfBppwOljVK2PHNywgAC+XPisOccY0FxMIcc\n",
+       "iZGzHvZQ+Sz5AA6vAeBRog6T4iwBTviMw2f8ZDzkaBBenUwmLZ1E+BZmgXlCXnbdmAtqIpHUiPw6\n",
+       "FMnet4PnUzaEN9brdcbjcZHjs7OzfP78uYR/Tk9PMx6PWyA1SWGH6UfSsKxm28m9MPuHLHW73cIS\n",
+       "mLGm/0lzMs2hO4AHSfiLxSKnp6dFpwNIkVvrcoy6k2/Pz89LMbTXjPUvv/xS9udqtSoOdpISImQP\n",
+       "MKfsOeqHPT8/l9OhLy8vubi4KGF8n8Kibz6B9do9XHzeckG43Swm15sYXGNz7QS91nZWhA1BPDo6\n",
+       "ysXFRW5vb1sdRQmy4fGYLKQYxaSpEooX4qIxxMdM3dm7TBqBtQDxfxyrYvFJuqqpexI/AS0IOooL\n",
+       "dsBsAcoZANTtdkvlQk4S1OEBjJSFg8UG6fomTNPmg8Ego9GoeIar1aqAKTw1gEeSFoBIthvu8+fP\n",
+       "ubq6KgqVvBLi9ggx3hWKHqEEpGCcaYvFIhcXF0lSlHTSnNYidstJJYcn/hlF+K9qyAwN42dPHw/P\n",
+       "XrkTWVkXvGg2sY2ewyWsncEPtCxzxHdsKMy0AIJhEgz26pwXJ2w77OmTdvwfYzXLBYhHNvjpEAaG\n",
+       "hr4sl8uMRqNScdPzTZtMJrm4uCh7ybk4zJvDSfSJtcDR2Gw2OT09zWQyKdU1ofQZD/uZvzNG1pk5\n",
+       "A4DhicPOcPoBz3U2m5VcBOfcOMS1yzYYDAowpl/MDSCXELqdsSQlodWMUtLkFMGUJMnNzU0Bw0dH\n",
+       "R5nP562SDxjT2WxWTjlyMov5dWiEhHz2EuNItvoGubYjYUehrqjqPDj2HQw6DDi3KBP2ZI/A5qH/\n",
+       "2XOE0y2bBilHR0e5u7vLd99918rXSVLsEUCH7wFmcGpJnrXNcZ4abDe6wuweugDGmj7DRB4dHRXd\n",
+       "P5/PS07jeDzOu3fvWhV6Aeg885tLiP348WNRMHjNzupmEEwkE5Y0J17wPhDMJC0PxJOQpEXRouz4\n",
+       "O54TeSMu9wuNdnCwPY5GXJTEJP7/+Pi4MAoOQVlJIYwO19ze3rb62utt70R49+5dRqNRPnz4UKh8\n",
+       "DIKVHQ3l7rABPxk7Xs+HDx9KaAUBwRsEYZOEljTH9mArhsNhbm5uWsi609leioXxQTGAmA3WACn0\n",
+       "lY2aJPf39y1QyO9B/z7yZgDrmgq7bDXrUBvnpDmNkjTGFeBlWh+ZQZm8dk8GzwNY21t1sjYso3Mq\n",
+       "6BO5Tj5tZrmh1TkUdUiKPpghq8t3A9wdtqjngmdgAJLm8jV7sRg6gPxgMMh4PM7FxUUxaABjmFiO\n",
+       "c8IoEXIyo4G+waj1+/3MZrMis/SFNbChMmAz2+Kj2OghJ3QCqiwDzLe91102xojedM4HoMsAbrPZ\n",
+       "FIBJGBwASSjNdZTI5eBZSaM/ky34xOg6rw7bwXoljZPAmlBuALnjQAAev/P1zDwjH+PxOC8vL6Ua\n",
+       "K+NBFjnhOR6PW3dpJSmOw3Q6bbFi7DUcXeRms9km5cIU+7Z4HBWiA9ieTqdT7u6pWeRud5t35Rvl\n",
+       "YYmQL4clfaACZ55QO/ucd+Dk+D48F+D01SWbzSa3t7e5urpq1V75/vvv1OWGhQAAB0pJREFUX5W3\n",
+       "nYATx43tUUEvYby5cMjUdz35hDTY2CBFBDFJC2UmKQuOADrhFDorSVFahIpQEiBkP5cKfYeHhyVx\n",
+       "9MuXL61kOLyAwWCQ09PTfPjwoSR64Tk4J+Xy8rIIvFmE1WrVqvg3GAxyc3NTKGIy3AFyKF7m8/ff\n",
+       "fy+KmYRUo1o2MJRcst1A5+fneX7eVvDF2wQ9A/LwDii0hDJzKGKz2ZT5s5Ew0+R7G0DpTjhcr9et\n",
+       "BOdut1tYl103gwN7ZfbW+R39xyuELqY6Ixsb4A34cTIfQAjDh4G18nC4wkYWeWLdYFoM+vCE8fr6\n",
+       "/X7ranlACUod40GoDoWKh2baHcCdtG8ato7g+Un75l+AweXlZYmrQ7UnDciBYTKNj9FjfIC3yWRS\n",
+       "ZI81AHQD2jkh5fViHv1+jDN7xeARGWAPcdoCZy1pEvAnk0nryPQu2nw+L3OO7rSjQWgFXYisEVbB\n",
+       "aTk/Py96EvYoad9Sj1zBUqCTLy4uCvhwAiynYtBhsM/cNO8QJmEQ2DuSe7nUFF1yenqaT58+5d27\n",
+       "dyU/0CEtFymDaR8MBmWPAAR4H2kMLy/bAyA4pTA/7FHk6/T0tFWXi6qvJAkjQ5wGAnDAFpL3AThg\n",
+       "j7E35/N5FotFAfTMD3PEQQ7W4e7uriR0w5ACKNnH2DnmfDKZZLPZlr0YjUY5Pz//in0i4fa1thNw\n",
+       "cnV1VSaM4jhJQ7ViqJ2sljQhHbwLNq+pLZgTFtWeFsbw4OCgCG7SgBc2DcYaQ4+yxgvgvfQJQHJ3\n",
+       "d1c2MXFBAACb9OHhIYvFosTjfWw4Sen/8/NzPn782Ep2xQNFgfk+BeoDQCPzPd7NnxoYYFxgZCz4\n",
+       "rrY7n88zn89zcXFRjt8ZvEDrnZ+fp9fbFvIhhHN0dJTb29uyxtfX1616AEmK0XTYjjoUvV6v3POA\n",
+       "Yb25uSnH3fg89/7sutUhPQy+ATbKjo3uvAVySepYuZtzTvA0ydZH2fEee58wDSgj5ARP1EYXdi1J\n",
+       "Ab5JinfI3vLeSFJqLgCOXTDw/v6+gC4YIcsgbGmv1yvAnT3W6WwLOhmooexrFpS97QrK7I3n5+ec\n",
+       "nZ2VfIHRaFQ8Z/bAly9fSrgBDxanpPbGyX0ajUa5v79vlTj/8uVL2af0t9vtFiaQ5FrCITBT5N5g\n",
+       "/P+M/v5XNHTjwcH21CNrw3ohT9TaQP5IyiQnDpkGPAPcmDP2RX0a7fT0tIBIdD/rBWipLyZl3gk9\n",
+       "OS/GuYKwIVS3dn4VsgPwHI1GrRQAZIk54EQN4MRHj7FB3333Xfk8cmpW/fDwsFzpgj7GmYHlZP84\n",
+       "Xw02nWdiW9EvHz9+TK/Xy+XlZbrdbs7Pz8spOoOZ4+Pj/PLLL7m8vCxsliuXA3jQbTAqnz9/bpXb\n",
+       "4HQXoR1OZDqfaDKZfFvMCYlnKLD7+/s8Pz+3PB6ED48JQUSg7UmxaLVnjreBR8+kOp/CoRDeSzIX\n",
+       "iwyz4eqDgA6EebPZFBTpOKVDTHiBJBk6Ix0vL9l6f6enp1kul63jYHim9JvNnzQ5ISh2F2QzM5E0\n",
+       "FLNDKhgqnuXwEMfS+Dx06A8//FCOgHNyiPlmDh4fH/Pdd98VT4BTRskWyPB3DC1z6hg13/Emd0Ip\n",
+       "NQr+rBTyv7Ixz05GTRoQ7JguSof8KR/DZHwOESUN64IHBFMC24hS9f6pQ172dmAnqQ/B+mPweZeP\n",
+       "hLKPzF45yZCGbAHCkHufdKvDr8zXcrksoA2vEqAH43lwcFBAlStR8j3+wN5gbCaTSdlnzLlP+5HM\n",
+       "BwgnTOky5svlMrPZrCTX393dZTAYZDqdlpu5fS0GLOJ6vS61fyaTSVkrG3fndlgudtW8fpeXl2X9\n",
+       "fcqF36PHAQUYNWQQZhemD6AL6+cCk+RNwMSyXrwHMIcXTz/ZYzCEDnOik5ElwvROCH16esrV1VXZ\n",
+       "G5YT7wdYOGSfvtEIj3D6hfkARMPS+fQN7DQ6HrtBCM3MInIOuwxYBGytVqtSa4s9CghbLpf5/vvv\n",
+       "W3k6zMn19XU6nU7r9ul+v5+PHz+W00Aw5AbzgCaYxufn59ze3ubgYHsIBSLBl9f+Ues4Fr5v+7Zv\n",
+       "+7Zv+7Zv+7brtvuqVfu2b/u2b/u2b/u2b2p7cLJv+7Zv+7Zv+7Zv31Tbg5N927d927d927d9+6ba\n",
+       "Hpzs277t277t277t2zfV9uBk3/Zt3/Zt3/Zt376ptgcn+7Zv+7Zv+7Zv+/ZNtT042bd927d927d9\n",
+       "27dvqu3Byb7t277t277t2759U20PTvZt3/Zt3/Zt3/btm2p7cLJv+7Zv+7Zv+7Zv31Tbg5N927d9\n",
+       "27d927d9+6baHpzs277t277t277t2zfV9uBk3/Zt3/Zt3/Zt376ptgcn+7Zv+7Zv+7Zv+/ZNtT04\n",
+       "2bd927d927d927dvqu3Byb7t277t277t2759U20PTvZt3/Zt3/Zt3/btm2p7cLJv+7Zv+7Zv+7Zv\n",
+       "31Tbg5N927d927d927d9+6baHpzs277t277t277t2zfV/i+IAQDEy/wsagAAAABJRU5ErkJggg==\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x113e04090>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "# helper show filter outputs\n",
+    "def show_filters(net):\n",
+    "    net.forward()\n",
+    "    plt.figure()\n",
+    "    filt_min, filt_max = net.blobs['conv'].data.min(), net.blobs['conv'].data.max()\n",
+    "    for i in range(3):\n",
+    "        plt.subplot(1,4,i+2)\n",
+    "        plt.title(\"filter #{} output\".format(i))\n",
+    "        plt.imshow(net.blobs['conv'].data[0, i], vmin=filt_min, vmax=filt_max)\n",
+    "        plt.tight_layout()\n",
+    "        plt.axis('off')\n",
+    "\n",
+    "# filter the image with initial \n",
+    "show_filters(net)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Raising the bias of a filter will correspondingly raise its output:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "pre-surgery output mean -12.93\n",
+      "post-surgery output mean -11.93\n"
+     ]
+    }
+   ],
+   "source": [
+    "# pick first filter output\n",
+    "conv0 = net.blobs['conv'].data[0, 0]\n",
+    "print(\"pre-surgery output mean {:.2f}\".format(conv0.mean()))\n",
+    "# set first filter bias to 10\n",
+    "net.params['conv'][1].data[0] = 1.\n",
+    "net.forward()\n",
+    "print(\"post-surgery output mean {:.2f}\".format(conv0.mean()))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Altering the filter weights is more exciting since we can assign any kernel like Gaussian blur, the Sobel operator for edges, and so on. The following surgery turns the 0th filter into a Gaussian blur and the 1st and 2nd filters into the horizontal and vertical gradient parts of the Sobel operator.\n",
+    "\n",
+    "See how the 0th output is blurred, the 1st picks up horizontal edges, and the 2nd picks up vertical edges."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAicAAACbCAYAAAC5xzv6AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvWuMbNl13/c/9eh6V7/uvT1zHzNDzgw5HNIWNInpMCEi\n",
+       "2wkCwYElBFASBTLg2DCM2LATSAkSJ5GlWDJi5EMAA0ngL/EjkQPFcuIQgREEcCIbAkJD9JhDgdJ4\n",
+       "yOFjHnfuq2/fflV1VXc9Tj7U/e3+1+pTfe+MqOkmWQtodHfVOfvsvfbaa/3XY++T5XmuJS1pSUta\n",
+       "0pKWtKTLQqWL7sCSlrSkJS1pSUtaktMSnCxpSUta0pKWtKRLRUtwsqQlLWlJS1rSki4VLcHJkpa0\n",
+       "pCUtaUlLulS0BCdLWtKSlrSkJS3pUtESnCxpSUta0pKWtKRLRT804CTLsk9nWfa1LMsOsiz7C1mW\n",
+       "/fUsy37+8Xd/KMuy9y+6j0ta0kehpWwv6QeVlrL9w0s/NOBE0n8q6f/N87yb5/l/l+f5n83z/K8U\n",
+       "XZhl2TtZlv2R36uOZFn2lSzLXsqy7JNZlv2z8N1GlmX/R5Zlvcf9+Pd+j/rwX2VZ9iuXtb0lfSj6\n",
+       "fpHtP59l2etZlg2zLPtbv4d9WMr2Dw5detnOsmwly7K/8fj5B1mWvZFl2Y//HvXhh0a2f5jAyfOS\n",
+       "3nzKa3NJ2Ud5SPaYzvm+Kum5PM+/JelfkPTPwiX/g6ShpGuSfkbSX8+y7NWP0pcl/dDQ94tsfyDp\n",
+       "lyX9zY/y/CX9UNL3g2xXJL0n6V/N87wr6ecl/VqWZc9/lL4s6THlef4D/yPp1yWNJQ0kHUh6WdLf\n",
+       "lvTLj7//Q5Lef/z3r0iaSDqSdCjpP3n8+b8k6cuSdiV9TdKPWfv/WNJfkfT/Pb7vk+f05Ucl/frj\n",
+       "v/8bSX/WvmtJOpb0kn32P0n6qwvayjRbCO9Iuv/42m4ck13/jqR/TdKPP37OyeMxvmHj+KuSflPS\n",
+       "vqQvSVr/qO0tf5ayveC6X5b0t54wrqVs/5D/fD/Ktl3/W5L+raVs/y7m/6I78DEK+j+S9Kfs/78l\n",
+       "6ZeKJlDSdyX9Efv/hqSHkn788f//+uP/N0043pH0Gc2iUZWC5//7jxdI//FC2JU0erzoHmnmIfyo\n",
+       "pH647+ck/Z8LxvSnJL0t6QXNgM3/Lul/Pkco07gk/SLX2vf/WNJtSa9Kakr63yT9ykdtb/mzlG1k\n",
+       "O1z/V/RkcLKU7eXP951sP75nSzNA9akFY1rK9lP8/DCldaSzIb+nDQH+cUn/V57n/7ck5Xn+/0h6\n",
+       "XdK/+fj7XNLfzvP8n+d5Ps3zfBwbyPP8b+d5vq5ZOPALkn5E0m/ns1zqRp7n70pqayb0ToeSOgv6\n",
+       "9TOS/ts8z9/J87wv6T+X9NNZlj3NvGY6O/5cM0F9M8/zI0l/SdK/c1648wntLenjo8su23O3PEW/\n",
+       "lrK9JOj7RrYfp3/+l8ftfnNBv5ay/RT0wwZOnkYpFtHzkv7tLMt2+ZH0r0h6xq5ZWDX+uMh1L8uy\n",
+       "PUn/smZI9y1Jn37c3n/0+NKepG64fVUzgFJEz0ryxfGeZvnPracbViH5ON6TVJV05XfR3pI+Hrrs\n",
+       "sj1321P0aynbS4K+L2T7Mbj4Fc1qBv/8Of1ayvZTUOWiO3DBtEjo4+fvaRYm+zMfoS3lef5I0lqW\n",
+       "Zf+upD+U5/mfzbLs70v67/M8/3W79JuSKlmWvZTPCq+kx0h9QdN3NAsNQs9plqO9L+mmZiE+SVKW\n",
+       "ZWVJV5+iv8+Fv0eahUL7H7G9JV0MXTbZfqr2jJayvaRFdOlk+3GU4m9oJjd/NM/zyTnPXMr2U9AP\n",
+       "W+QkC38v8uDuS3rR/v87kv5YlmX/RpZl5SzL6o/32N9Y0PYi+hclffXx3z+qsJvhcYjv70v6pSzL\n",
+       "mlmWfVHSH9MMjRfRr0r62SzLXsiyrC3pv5b0v+Z5PtUM6NSzLPujj0ONPy+pZvfek/RCCP1lkv54\n",
+       "lmWfybKsKemXJP29fJac/CjtLenjo0st29JMMWZZVtfMKSpnWVZ7rCyLaCnbS4IuvWxL+uuSXpH0\n",
+       "E3meHz+hvaVsPwX9sIGTPPwd/4f+qqSffxy6+7k8z29L+klJ/4WkB5oh8v9Y84L9NAj0NUlfzbJs\n",
+       "U9I4z/P9gmv+nKTG4+f8HUn/QZ7n/3xBe39TM+DyG5K+o1nB1l+QpMdt/zlJ/6NmxVI9zYf+/t7j\n",
+       "3ztZlr1uY/gVzSri70pakfQf/i7aW9LHR98Psv2XNJPR/0yzeoCBpP9yQXtL2V4SdKll+/GW4T+j\n",
+       "WZT7XpZlh49/Fp1RtZTtp6DsccXukpakLMv+kWZh0OU5FEv6gaKlbC/pB5V+UGX7hy1ysqQn06UL\n",
+       "7y1pSd8jWsr2kn5Q6QdOtpfgZEmRlqG0Jf2g0lK2l/SDSj9wsr1M6yxpSUta0pKWtKRLRReylfiX\n",
+       "f/mXPxQi+l4UEtvpeGfazbJMpVIp/Yam06kmk4nyPP/QfeBZ3uZ0Oj3TjyzLvifjK3o27VcqFVWr\n",
+       "VZXL5bm+TCaT9JPnuabTqabT6VxbRf0rlUpnriuVSiqXy+kexjkej1OfYr/4XS6XValU5u6jX4uI\n",
+       "ccT+/cW/+BcvNLz5l//yX84jbyAfX6lUSr+n06mazaam06mGw+Ecb5gX/5+/ua5cLhfyyuVtOp3O\n",
+       "zRs8529Jab6yLEvXweeiflerVY1Go9Q+c12tVueeNZ1O0zrgb6fJZKJqtTonUz6+eN9oNJIk9Xo9\n",
+       "VatV1Wq1M/KDXMGDRqOho6OjOf75syLf+Iz7K5XKXNv8TCaTuXmhvVKplHglSZVKRePxeE6XwDe/\n",
+       "P89znZycpHbK5bLK5bKyLNMv/uIvXphs/8Iv/EJOP73/UV9K8/qUuapUKmd4xby6jDk/aB9eO4/h\n",
+       "iaQk+ycnJ6l/9Xo9fRfn3NumH1wHv1038rxoI9w2xPXha3g6napcLs/pQO9TlJ1qtapGo5H0Af0Y\n",
+       "j8caj8eaTCaq1+uqVCo6OTnRysrKmfF4H+Ap+n1lZUXj8ViNRiPxjOuOj4/nxsrYfc6yLFOr1dLx\n",
+       "8bGGw2Fq9+TkRK1Wa279MD7ajDIwnU71S7/0S4Vy/X11zokb1g9LRQLuFBUjxMT4Z24wznse90fl\n",
+       "U/Sc7yXFcUZgsAgouJJY1EdfAJEHvlD9+vOMNb/9B+H2RSHNz5ErtUV9vSiKRpIx+f+TyWQOHAwG\n",
+       "A00mE9VqtWTEJpNJUgZx/viNMq3X6xqNRkmZwp88z5OBQElmWaaVlZX0MxwOdXJyMqfEHDi5UpdO\n",
+       "ZWA8HqtcLms0Gmk8Hqtarc6NnWdyL226IZCklZWVubl1OULxOQ8AHZubmyqVShoMBmnso9EoAWVA\n",
+       "eZZlGgwGid+AKJ+LaPjgQbVa1Xg81mg0OgNQHNRFw8146E/kp7cT9Uu9Xk/f05eLlu0iYCKdGr4I\n",
+       "lv0+QIobTV8PyGrUA7QPOT/8ueVyWePxOM2PdCqb8LEI6LAGef5kMlGj0dBgMJiTi2q1KumsfqQd\n",
+       "N7r+OeCBeV8EZGgzGm3nn+uOWq02txbH43EhiItrlzbQKycnJwmIwYNoY+Gp95U2mGvmAz4w//45\n",
+       "jo9/F/V7pO8bcBIN0Ye9F8TrCq+IXAjcO+e+85gZnxeVl3u3LsgfZhxPC8yiZ+jjdo86Kka/flHf\n",
+       "IjCIisXbLvL645giL3yeoKJ7fUFdtPKGFoFc/x/ejkajM3JG5AQlRGSiSKETLeDzer2uyWSi4+Pj\n",
+       "pLSyLEtG2b0YPPTj4+M5Beny796YG55KpaJOp5NADWDHjYwrT1fgrgD5PkZ9sixLAMLXkCtI/qb/\n",
+       "o9FIx8fHGo/HWllZSUo3rv0I8uCR95HvmE/4SkQD2UQRF4ESnkVECM/SjaGvEY8yMT50iBvdi6Ki\n",
+       "56PTisBIlFf/znkcwQ5tMf7xeKzhcJgAZwR7gADvAzxENlwXE+2jP/C/Wq2q3W7r0aNHajQa2t3d\n",
+       "1crKyplx+frjcwf19AmjHQGNrwPGALjwSJKP8/h4dmxKjHRCHkGBt75WHMg56GCN8dvnhGf5unA9\n",
+       "wryz/llj/iyupQ+sFXfYz7NnlwacROF3xc4gFqUTuDYqFtqBca7Mx+NxIUiJIaloYCJDI9JEyEij\n",
+       "YGCGw+HcuD6KMX1aYOJ88v6hUKEIYFjE5wlNVKLc6wClqC9xfn3huAHi99Mq4+hpXTRFUBdBm3TK\n",
+       "EwwoAMRD1e5t1ut1TadTHR0dzS1wT8dhaF2pYFS5ZjQazaVfUPjIBe3iKUbPB2OxtbWl27dv6w/8\n",
+       "gT+g/f19PXjwQL1e70xkwI2+K6ToxbkCc7BZJIfOH9YwkQ1JarVac6mQqDjpD8DQAXEE3bVaTZPJ\n",
+       "REdHR0k5cx26RFLqh4M5riPszbPwHh3ESPNpCj5nrhnbRdKiSKkbX8YICHMg6voB+RoOh8rzfA4E\n",
+       "wFt0AtEv0hu0U61WU7rD9UdMcbrM+Xc+nlqtlu7f3NzU7u7uGcAcwWSUG5d3f6ZHKhzIuEwCPhmL\n",
+       "r1kAMSDV1yL30Sb3+bwAvIbDYeI7AHllZSVFBI+Pj1OUMc414+M5AG70E5FeB94Q8o7Me8rX136h\n",
+       "zC385mOmIhAQ/3ePP97LZBV50QiagxSurVaraTF4LtGjDPGniOJCLJfLajabqtfr6nQ6arfbSaEt\n",
+       "mhDa+N1EiWJbruQJfcdxRJ48LUUvtIgWjdX5EOf2w3iJboAuC0CJER9XiDGU6/ld8skoGa7t9/ua\n",
+       "TCZqt9tqtVqqVqvp++Pj47k5PTk5SXJHRMO9UffQAM1e41BUW8GzWC8ffPCBrl69qq985Sv65je/\n",
+       "qS9+8Yu6du3amVqTouhJBOcobE9HkVuPawpyw4DiJ5oT8+WNRkOtVkvNZjOtP4xATEm44pekfr8/\n",
+       "xxf66j/0GyPK355Wqtfrc+Fs5ou2I3h13qCfAJ0XRRF0S2droKIT6Y6MO4fMJTJ6cnIyx5PRaJTk\n",
+       "Ns9z1Wo1VavVVA+Bxw7Ydu/d142DEgyvdLYmg3sajUbS1Z1OR1/72tfORJv9N8bYdVcEJi5j8X4c\n",
+       "lLgmWP84EZVKRbVabU7HAehYKw5OfL7grxNy6vchn57mYd5YU643fC24fuHZ/PYoD+OmRgadt4gu\n",
+       "DTiRnhxNcIEr8qgQfPecIPfeWShENxyY8ByUJMaiyKAvMuT0gZAcqHR1dXXOS3CKSvV7Qb74ouDy\n",
+       "G8ULP6LHUzSuou/8WfwuimB5lCXSkwBKfK6H7L8XgO57QUVK3EGiyyB9RvnU6/Ukk0QDfBH3+30d\n",
+       "HR1Jmnl7ROaoueAHZTQej3VycpI8G+TAUyYYAQpLiZ5BrjBRYleuXNEbb7yhwWCg8Xisv/bX/ppe\n",
+       "eumlBCji+GL0zuc2epIuY+5U+Lp3xc+1tVot3cvYAVrwxQ09hp9n1mo1NZtNtdttVavVVJToBcIo\n",
+       "ZXQLfXej616wh83RHcwv97txdwPjYAj+XyRF+ZXmo9YOYD0S5pE3vHSIQmj/DJk/PDxUpVJJha0r\n",
+       "KyspCh2vJfpImzyfuXbADbmHPx6Ptbm5qevXr+u73/2uBoOByuWyfuInfkKvv/66arVaKqZ2AOS0\n",
+       "qPAWPvG/f+eRFo+sMKaYCeDaRqMxB95wUJBVl1F3ODzSQYGy9xlZc7DhEZMIPH1uvZ9Q/Jy16jJ9\n",
+       "Xjbgcmj0xxQXatGPC0VEred53M4IhMsRYRQoyNFqUYgyeuz8T9iR9ih2JIISDWpEzx+Wbx8m2gAh\n",
+       "dB4yBKzxA7nXzfUOVNwbiF535A3t+e8i4/Mk8jmMsnFRFCNCEaw5Hx0YYticp+PxWMfHxwlgoAjI\n",
+       "w2MsG41G8iLxJD0SwU+WzarsW62Wut2ums1m8uxj/5ljB/yMYXd3Vzdu3NDq6qqm06l+3+/7ffrS\n",
+       "l76kP/yH/3AyvihYV7juVHgaxPkUDbb3Cz56SDiCXZdRrqfO4Pj4OAGabrc7F3mZTCYaDAbp+na7\n",
+       "nfjAs6h5iLIe++QGhz4xx0Qvfe59/Hjj0qkXGz3QiyDkx42RpxGgqIsjsMNALtKdrg/6/X7y8svl\n",
+       "shqNhiSlQk6cR7xxUm8RyDkQwIgzDgBSs9nU66+/rmazmcDh7du39VM/9VP6B//gH+jg4GBOn/Mc\n",
+       "N7RRlqX52g8fK3PqNinqDbcJDvzr9XqSWeTy+PhY9Xo9OS+kU+A1axKnh/56dMbnBZ5Hfcb10e6y\n",
+       "7rmOsdMmc0Yxb1wjRXTx2tyoyOC7Mo+KXZoPAxcpQSeE2ZkXQYkrPE/DeH+ipxf/RuH4fQhYvV5P\n",
+       "4UwoCsV5qaOnoZhWcP7ymY+3aBwOQiJf+HxRGm1R+4yhCJjEuYsetvOgSBaiorsoikrFveEIpJx3\n",
+       "Ll947fCeYkwACUoRXmH0SP8MBgMdHR0lEIMiajabKpVKKXrHNkIiK6whgI506ulzP+sNw9lsNjUc\n",
+       "DnXt2jX93b/7d/XKK69I0lz0kv56CNejKbVaTVtbW8n4eMrDr0X5OeCBx3iSfMe1k8lE+/v7CZzA\n",
+       "s16vlyJHRJgcPFCYzNiJLnnks16vq9FozIH5CK4whi4XGAieG9cl0QSPeF103Un0+KOedPAa16Lv\n",
+       "DvE0BPPleoBnYXxHo5FWV1fT+gHgABJLpVKqraIWwo2mry/66s8C5Gxubia559nValWDwUA//dM/\n",
+       "ra985SsJ+EvzoCMWlPo8ukGnHw5MIqCJvPAx8X8cBymmPJ/V7xwfH5/RMURamQ9sITyNO9ikU0Aa\n",
+       "+xejOi4bbjt8jdLfCNDPc6ovBThZ5Pm78EYD6r+5H6YvMs5efOio168vOhOkqL9Q7J8XcxVFQ6bT\n",
+       "acqNR8Md23aiXy5kTkWGOSL4WFfiHmbkhQtZfJ5714uiSBHMoNyLUm6+oGO0oYiKnn3e9R8nORhx\n",
+       "BYnMRa8u5rQdyFAr0W63VavVknL3KAnPbLfbyQhIOmMUXQFhGDHkHoHiGnb9uDKeTqdqNBqpPfo8\n",
+       "Go3U6/W0ubmpt99+W61Wa86ro09FnjJnlOzv76tSqWhzc/OMTEYD7V5qlmUJ8G9sbKjT6SSgUq/X\n",
+       "UxTp+Pg4pbyYBzeQHnXy3RE+f+5les4fuVtZWVGz2Uw7l2K0AWNADYWDGp5Tq9XmtpNfBsANFUVF\n",
+       "3MD4mof4HzkrlUpzdU5EUtx4uZywAwuj1m63UzsrKytz9Ti+o0qa9+5p1+Ud+apUKrp//746nc6c\n",
+       "HOT5bEfb9va2vvCFL+jRo0eprw4YptPpXB0YbTqY3d/fPwOYuJdxR7vCWSYUZUun9SKDwUDSaYSE\n",
+       "owQAL6R6vYgbOYvR6Qj0pZksu/MUASF6ivn3eUcH+v3IuQcHiDYuoksBTqLCks4KaYxWuHHy3KIr\n",
+       "GhdOCIYUGcMYFTmPzgMo0fhyPYKRZZmazebcuPyaSD5ejHxUAEX3OWDwugKvbfB2HNxFYLMoolEU\n",
+       "OYlgpSjSEZ/NvAAaYxGzp+BcqT9tNOnjpKKIjisCohNey+QG04EMhnd9fV3PPfecms1m2l3gSlmS\n",
+       "Njc3U/rQFZJ0qjC8NsXlMXqm/C8p9fXo6CidFwK4IT+PPO/v7+vGjRtpvD5Gl2FoOBwmQ0y0J9Zl\n",
+       "uZGPvPXoHbtE1tfXkwfpEaY8z5Ox43mHh4dJ7tmVw3W+U8Lnj3Sbg8Q8zzUYDHR4eKjj4+ME4rhf\n",
+       "Oq0Bcg8dAMpvQKQb/uhwXSQVOVJRZ3vdR9ThjJu0d5ZlOjw8TPdHvT+ZTNTv97W2tpZ4Q4oIAOAy\n",
+       "7ODaQQggyHUIstNoNPT222+nKBZpP3azVKtVPffcc/qH//AfJqDkZxHRF8YuSe12O60j0qb9fv9M\n",
+       "3QWG3deqpARSGeNkMtHBwUEaG2uS59Xr9bnUFnLpEZEYNUXv0CfmM25dz/N8Li0J/yPQKZVKyXl2\n",
+       "HsHvPM/TDi2cjfMcykuxlTh6youiAItQuv/2XOQiQ+/PiwDFnxc/9+cjFBEknNd3aDweq9VqpZB7\n",
+       "BGcuvNLZA3lA7+4lnOdhwRdH0R49YTzON/f4fMz+nRve6FGd5/XxvSuzIl65QYpeWgRFi4Ddx03O\n",
+       "M5SOg0+POERvie9dSeEZkbPtdrsajUbJc3J+oPQxbu4teorAFQrrhXoKvBuPCGRZlkLFHp3Jslk6\n",
+       "BS8P5fn2229ra2tL29vbSbF5JCE6C71eT61WS5J0eHioer2eUkgOLvhxvhEJIbSPfAPQWCNey9Hv\n",
+       "95Nn6NsqUZxxB0FMM/nfbgBcVo+Pj1MEBcPoUSd4xzM7nU5h5Pey1JxALs940IsKTn2dMy95ns+d\n",
+       "aAovqC+J5+VwiikGkfM8oqfutRCSEljwg78g11flcjlt0XcQSRQGh246nepP/sk/qTfffFMvvvhi\n",
+       "enae53Nrjn4dHx+r0+mo3++nPnvE0vvsa8r1qzsB7DZy5wJHoVKpaDQapXoOn6OjoyO12+20zqXT\n",
+       "g0U9Aun1Iq6fIHdeAEWAJo/++RogyhN1huuW85zLSwFOnIqUuit3yBWFpwq4Lxo9v9YFwZVffM6T\n",
+       "DF5RiLMokhDvkWYTtrq6qoODg4UAyckP7EII/EROH09E5d4/DJLn6KXZoqYvfj/fedqHau/oFRUB\n",
+       "PO5xPkVA50K/qN8u3P7b5+EygJMiQMtPTMXE8Cq8ByRg2LmeuokYnYNvk8lkziA6CPVnS6cnO7oH\n",
+       "CDDBu/J+UXTna4o+4TmenJxobW1NR0dHc+uQ53hdAu3WajUdHx9rZ2cnpTvoCxFCV6wnJydzx5QD\n",
+       "Jg4PD1UqlbS5uamNjQ1tb29re3tb7XZbm5ub6vV6yvM8GRwMheublZUVHR0dpV1LhMVjvz36w7gG\n",
+       "g4FOTk4SKHLDjHcP8EC5D4fDpLv29vbS9bF439fkRZGvX+SKOS46RIwx+LqF7254vSDZD1uDV6TB\n",
+       "AL4eWfNn8T/8d17SZ98t5LpwY2NDt2/fTp+z7pBlIl6NRkM7Ozv65Cc/mebR0zKu54jisc7ol9se\n",
+       "6RR8Ai7oe1zL5XJZ169f187OTroPYNtoNObAFHwk6kPdzu7u7plIU0xrQh7tBGQcHx8nvZ/neYqa\n",
+       "wudSaVb/w5le6BOADOOg/9JpmqqILkVMPBqiGEkpMj5FBiB62R7Gk86erOftegoogpain6J+F0Ub\n",
+       "ivrL/z7xT2NcY2QDwfCtwPw4cPBxudJnESBYLihxvN6Gh+r9uwhWPITqCtsNl/MvzouneWKKzucp\n",
+       "zstFUwSnkT8+fucByszlNM9noVDSESghH6sbQNINGHg3ENLZd+owN767LK43PGPkhvbH47GazWba\n",
+       "9bO5uZmA67vvvqutra1U7yGdPeMDD5pUI9dw/WAwUK/XU6/XS/NLvQcRD9cX6+vrunPnjt58800d\n",
+       "Hh6q2WxqMpnom9/8prrdblKmWZalrcMYBJQuqRUHdRGQ0YdYF3TlypXULsXHgCfWnusowObR0dHc\n",
+       "WR/uibo3epHkMu2AQJqvDfJ+xj57ZCtuKeb6aAN8h5U7O74d1q93z57/+XHD6ilLP5wQfeXgcTKZ\n",
+       "qNvt6uTkRD/+4z+uN998c64WC4BGP5FrT+MQgXFdiw6nloo23HGRpCtXrkiS7t27Nwf0X3rpJT18\n",
+       "+DCBEQextE27h4eH6nQ6CRz6kfxuT5gX17kArXq9Pgfo3dbx7PF4nNqG7x5BcSB5nhMvXZLIiRsc\n",
+       "F7SIvIvuO2+AcUFFA+gTEJ/H/7GPi/qyyMDGBRqNa7PZfOJOI0fbCHdE4TzbkWz0zN3wubH0diMV\n",
+       "8YB7/Hf8PipUlFKMiPl8u6cDwRsfcxHw8nm8aCoCovQ31kV5usFlIypkf0cHCtT5RtqFe5l/B5wo\n",
+       "Wv52Im3k/UFp4r36rjiupf4Cj5aQOy8AIySOjNHfLJs/HKpUOi3GzfPTY+lRaO6ds8MGg0PE5N69\n",
+       "e+m9NJ7b39raSpEZ2vFIBn3Gw4N/bsyguIUUXrTb7eRxHx0dndlN4YoanlDzwjjgp8s5vLloQpch\n",
+       "y16X4PojAm7IQXnRSx7hd9FnyEmUF+msjXBHwKMmvu7oFwe78T9zyX3UdlSr1XS4IeCfefUUo9ed\n",
+       "IGMHBwcpAkKUkAgCab1Go5E+J/qAnK2urqYic+l0DY/HYz148EC3b9/W1tZWeh59j4AtHtvPvDnQ\n",
+       "AGRlWZbW2HA4TGkjb8f1yHg8TnVwOADMkRd7e9TkPLue5vg8gfy4KHbQ/4/hzUURhuhFx+tccBdF\n",
+       "Qnhe/Oxpn+nPiQYnEn1ESD1SE+/zz2IUo8gwI2x4iVRzg+i5xqM90Vv035Gi0vH+RpDmz4g7oRZd\n",
+       "6/fQDwxJ0RwXzfdFURE/XVlHhRBD/ShPL3aNZwsURbJ8J4qkFF2Q5j3aWESMFwWwoJ/MPZES8vIo\n",
+       "XyIRXH/t2rUULaCY9N1339X169eTMiLiUBTZ41n0kVA6hxiylfrk5CQZ/0ajoVqtphdffFH3799P\n",
+       "0RdP22BAut1uMvxRHqfT6VxxHuOMCr5UKqX6BwzldDrb2dHv9zUYDBLYcZ6j1DmfAmo0Gtra2tLa\n",
+       "2loqfC7ayXNZIoLS4vOMfC7ROXENu2MU1yv6ynnvxsyNrl/DHBU5lTyfmhB/NxLAh2jgyclJ2t2F\n",
+       "Qa3Vamq1WslQA8i/+MUvpq3FXmvh43HekKJxsAT5Rg523NC/Gzdu6N69e+p2u6ltr9fAKfCXdhIx\n",
+       "ZfwOIvjOgRXXMQYH9dimSqUyd64S68Vf2On3eaG46zzn1dPQpQAn0uJiUunsdkvprPFeBEyiwERD\n",
+       "sGjhn2f4i54bjQi/mUxftDHF4Tt3/PtFPPDaDIQBpemFwBg5ThR0PsfoBc9GeJ4kQDFKVPSZG+Ii\n",
+       "MBQ/cwXlB4Dxcx74uCzgBIpKyOc78pbxudx4sZyDVk7KRIE5yPW0j3v3TnweUziS5jwi+sj25aOj\n",
+       "ozSGRqOhR48eaW1tLX0/GAzU7/c1HA5TcepwOEx1H3meJ0VH2x7hIzrj/SHqc3h4mIw/r2nHC3zh\n",
+       "hRf09ttvz+W9pdPIEGum1+vNnc7qz8GAeWjcQ9YeZfLDpsitRxBNG6SnmBs/R8bn1SNO/n4TX4dF\n",
+       "9XMfJ8UIUgQj/p3rJwfi6Jaow1jrtOv6gdRIUbqAtiNQihEvl2lfSxSUAkyGw6FWVlbU7/cTKK9U\n",
+       "Kur1ehrW5XRPAAAgAElEQVSPx+r1eumVBq+++qq63W46D8fXGvYFYz4ej1MxNPO+v7+f1oNHuxuN\n",
+       "hqrVqj71qU/pq1/9amE0xNO/nU5Hq6ur6dkxosV683QKbbL9mWsdaHhKmDVA/z3KRxlB1Fk8qyha\n",
+       "7wXn58n1haV1okAXUdFigGHREDpKQ8DPi0Kc542cZ+hoPz6b53vo3Ptb9Ey/p16vz70h1vt/Hm9c\n",
+       "qFDOCBJ5QhdCVwJOrhAZf1EfXHl4P4rADuPz633eI5/d03I++5xGHsQIwkWTh43jWKFoRH0OpdN3\n",
+       "s6AckS88Pdp2hVAqlVJOG2XiYXjuQem6d+XrhjHQNxQb6Uc/7Gk4HKbi0U6noyzL5nYkdDodHRwc\n",
+       "6ObNm9rb20uFrMgDyhjgjOIjIjOdTtVqtdLaAHwBuJ9//nn903/6T1Wr1eY8U49AuTy5EvXP+fGX\n",
+       "ybmsOf88rVMqlZJBc17FCEwEO8iqp46IclHwzPh9Li+aXM9KxccgeK2TdPY9Ng5IvSjSPfJYc+V1\n",
+       "Dp4yiMDat6BHpwj+kTqhn1evXtU3v/nNuWuQt0ajkXa7IHsrKys6OTnRzZs39frrr+uVV15JkRn6\n",
+       "Op1O0y4j1szBwUGK6CFDnEjsa3djY0ONRkNf+9rXEqDhmuiolEolra+vJ7mODhw8ZltzUVrXHSPA\n",
+       "NLt7aMPllXucXz6n2B8+90hUUTDgPBt3YRIfjfzTXO/RjKIohBuxmLeN0ZUPS4v6+jRj8IXi9/iE\n",
+       "8WIyogaOOPkMpO1eeARgLkQUKjqAKIpQOZgoSrcUXRvBm0c9YjqjKAR83o/zbRH4KeLxZaMir1I6\n",
+       "+xoEUgzRCPGel0qlks4PcCXgQNRBCafLesG1NF+g5usjAr8YwcJzRYkxL+TrO52O7ty5o4cPHyrP\n",
+       "85RH530kAOROp6P19fX0MsxGo6F2u53qaYoMMAa81WppdXVVrVZL165d05UrV/Tmm2+mAw09ZeMA\n",
+       "gXngDAsUfVTyzg8Hbhi8RqORdkWUSqW5tJmnn2I6A6OGJ03qRzrd3ux9JoLiHvNlASZRZqJzIJ11\n",
+       "OpFXZDjLsgQ0pfnDHpkTj/ph+Fjzrv/8OR4diA4S17mMcF7K1atX1ev1Utv0tdFoaDAYzPWl2+3q\n",
+       "6OgoFVr3ej3dvn07FYYfHBxoOp0mkMvrHTjLh0gKMsazWCdbW1tqNpv67ne/m6KIyA9zgKyUSqVU\n",
+       "d4Je8JOdnbd8RlqLqInz0qMZfjpyBCE+V/AaZwQeuwMSoya0g+1a9K456YLTOk9jUIquiUo1eub8\n",
+       "/jDg57xnezvntRk9iaJ7QKEOLpikWq2WlCCH8OCV4TUTpuP+IuNNuC8KUVFfI2iKAMOvj8oJcqGN\n",
+       "URVXIg4gFwGM+BNTI0XXuyK7aCrqY5GiLIrOOeDm1FX47qAAxeRnl/hceoE1oVeUoSs6nsmPH2jm\n",
+       "USjeNQMoIv+OsalWq+p2u3rhhRdUKpW0sbGhnZ2d9OK8d999V+vr66rValpdXU31FZubm+r3+2q1\n",
+       "WmkcACrm0iv9pRlg63a7+sY3vpFSqRh1D0W7LACQ4AWASzpNxWRZloADaVIHj4T02WHDlmbfHRdT\n",
+       "S3k+/wK/6XSainIxen4Sp89P1GexgPkiyKMmEfxFUFK0vvmMuSCKwXceMYopyxiB4bn+WTTi/I1+\n",
+       "cKeOZ+HAxSjXeDxWu91Wo9HQ/v6+JOnb3/62Op2O8jxXq9XSa6+9ps3NTeV5nuqgqBva3NzU/v5+\n",
+       "KqZFpgDJABgOC+x0Oup0Ovr2t7+dapMAx0RcfJ3zd6PRSLuKWO8uOzzHSww43A2ibZ9H+EBqxiOG\n",
+       "zLPz1aOtFHxzjTvQ7lx5Oq+ILsVunSdRFHoY5p8tMr5PIldiRSmMqCTOa+e8PdtS8YL2v3npFH0p\n",
+       "Mr6er0YxuwDW6/VU2R1DsHEcUal4SK5IEUTQwT2LFBY/GBGMnxe+RYMZjbYXv0U+LgJoF00xxege\n",
+       "u6Qzf0MoOQeB7u2xqJl/VwoOYmiLfvgr5d17gSIYcY8ryzKtrq6mLZ3SbE6oLyHkvbm5qbt376Yc\n",
+       "/bVr15JnmOe5PvjgA5VKJd25c0dra2upePbGjRuSlA4k9L4hL+wIWl1dVa/X03e+8x01Go1k3ON6\n",
+       "YPx4iHyPUXT+MX4Al/MhplggT29xwixGJAKkWO/AgVxe8Bwjix758fV9keQA2yMXvvadf1FnezE+\n",
+       "12JIoxfuhhPiHsjr+5AzqfgEW08B+W6cSqWi3d3duV1S3s7x8bH6/b6uXLmivb09ffrTn9adO3e0\n",
+       "sbGhjY0Ndbtd3b17N0U5Op2OJKWibV7pgLNA9IgDBt999139/t//+/XZz35W+/v7+u3f/u3EJ/gT\n",
+       "X5ToRa0e4QOgwBPptP4E2cahwWnwaCvpFwcTlUpFzWZTo9Eo7YLz4tsIVvmbscbyBtfPHpVdRJca\n",
+       "nBRFTfiMgrgYOSny0p8mQnMePcnoUSAUc6VuIPCKHWzgIbohkzS32GiL7wg9YvC5H3544dV5/Xc+\n",
+       "OblR8ghNETigr35t/OF6H3NRBKVojtyrWhQ18fFdBnAC8HAA4t9FEBG/q1QqqegOmUIxFM2r16AA\n",
+       "5LyCH9Dsnkw0HG4kAAO0t7m5qcFgkDw8j+Ksra1pNBrp7t27Oj4+1tramvr9fgIopCIBXOyGODw8\n",
+       "TFEKP1SLtM10OlW/358z1Ovr63r48KEePXqU1hmnbjIOz31zDbUIKFaIHD98oZAYHjH+uFOK+YC3\n",
+       "7Fhg3IAi6RRYk+9n94M/w4Fn9OrRK7EG5qLoSXo0ppil02hFr9dLL3b03VpehySdvpLB6xe8wJLr\n",
+       "mBtPdfqOFAfu6Arko1KpqNvtqt/v6/DwMNV3wX/08mQy0bPPPqt33nlHq6urOjo60gsvvKA8z/Wt\n",
+       "b31La2trarfbOjg4OFM3+PDhw7TLplKpaGtrS++8845Go5E2Nzd1eHiYouXb29sp8uL6nK3ObvO8\n",
+       "Pge+OPjx9c24PApCfYx0un3fnV/kEh3AuT882wt9mV946uvOHQB3uD0yC6hbRJcanCwihI3iokWe\n",
+       "qBv9mI7wlIrn1NwzeBpC+Xvoscjbkk4LIYuMludVvU8uOIyB0DRj95Coj4G2FxnvaNz9f8YCuXFz\n",
+       "QiFEfkWQyPjPi4JA/v2i6FURELkM4CT2MRoW5tprRTxaVS7PzsygfgGj6vJQFBL1iCKGlTbxihyo\n",
+       "uIfrbfs5G6VSKR0QRr0EtVGHh4c6ODhIAOT555/XN77xDT377LOpaHB/fz+BFHLrtVotHRrFzh5q\n",
+       "Vw4PD9Vut7WysqJGo6Ht7e3k5XEYGzwplU6L7QAWrAd2YLhOGAwGc++U8vCyA0PuZx2hwKX5NzTT\n",
+       "LnKLkfIcOoaWMHcE4w4KXVY8MoFR8JqAi6AYCZTmXwUSx1aka46OjhL4RI+5nHvxtztzGFl4ESMH\n",
+       "ADiXe/rkfefv1dXVFGV79OiROp1OknPp1Dms1+v64IMP9JnPfEbb29spRdlsNnX16lU9evRI6+vr\n",
+       "aZs9RbBZlqW6r8lkovX1dR0fH6dtyScnJ1pdXU1Ag+gNssXuIYA1AIL0Dueg+FooOgbAX9QHT5Fx\n",
+       "1+MAZ7dBzO3h4WFyQpgj7oenXjsVo2vu9PiaZE6/byInRUJ9HlCITD7vWhjn3ngRFQGUon55uyhy\n",
+       "R6zSfL7NF07c7iXNv2mVe/k+9qMI6eJlRPBQ5HHFyAb984Iq+PW0Bv+8KJePB4V8XjrN+1TUtivv\n",
+       "8+bmIol5iukT6XT+ixQyvMGz9+hBBGkxouVpBBSj70yJIXNASpFCl07fXgzguXr1qo6Pj1WtVlOq\n",
+       "h/6/8847KYqwu7ubvCvAjG9FrtVqGo1GarVaSW7xVvf29hL4wqvc2tqae9mYe4+sK093+Qv7PDXE\n",
+       "OSnw19Ok8N4jXyhzNwSACnd2uJYQONd64aJ0NprrwJ4x+Prjt6erLoq8765/FgGROE7677s3Yo2T\n",
+       "A2v0hPMBHeVOoPPR0zX87YA9ptmm02kCDb1eL8k7RdqVSkWdTkcPHz5M87q6uqp2u63d3V11Op0E\n",
+       "1G/cuJEOajs4ONDq6mra+n7r1i29++672tzc1IMHD3Tt2rUE1Pf29jSdTlO6Tzp1+Oijgw1qWIim\n",
+       "AlRcTohGcr3bJS8XiM6wyxipyvF4drpspVKZO5TR5QDe0lfa9+exzqNNOS9defGxQp01wNCTIhiA\n",
+       "E5Ss52+9zSjggIkIIiLQKeqj9yca/hhxWNQHNyr+zDi5Hg3xNj3MyWdFudaisRQBDj7zQr0oeEUU\n",
+       "+xUpGk7a8blyBF7khfnnEbhdRvJ8bJEc+DUoEul02y5Kg8XuEQ4PmfpcIRNe2Q8xr34GgRuXIs/F\n",
+       "PVN2VjSbTe3s7Gh/f1/3799PzxsOh+nY9itXrqjX66XCbtIYk8ns0Db30Djbh1DxcDjU3t6eqtWq\n",
+       "Wq1War/T6Wh7e1sHBwdz50YAmtABnFLpB5nBN2n+3VSSUj4fDxmZo4+ef3c9g7Hzc3jgIUXsXO/f\n",
+       "AcB8pxRtY5zj8xyEXzZy4xQ/j/pWOtXVXgTru5HifQCMWEuCIXf95GvDvXr0ImCS38PhMJ32Wi6X\n",
+       "tbOzo+l0muYOmTg5OVGn00kRvt3dXfX7fR0cHGhvb0+S9OjRI1WrVT169EiS5opTKTy9ffu29vb2\n",
+       "NBgMUj/yfHY6OClPf5N4BL6sb+QV4EFxroMaZBMggby6foGfDjb52yMn9MXfZeUAMkZzXQY8MglF\n",
+       "/fYk+34pwEmkCFDOIwcmfgCSg4kiAxoXhLe36NmLDDDPY4G4MXBh8Jw4HlNUPrEYzp/D2Lyv8f/Y\n",
+       "x9h+EViifxyh7AdGuSFzKuLfIkErAkdR4S+KvEQD7wAlem2XIYISCyJ90cNL9ySl+UiQdDqvflKo\n",
+       "e5duOB2cuGFDMXtEzdeKA1/IQS73EOUYDAZqt9tqtVqpgM6jHVevXtXdu3clSbdv3067UDjOmjoa\n",
+       "Uku9Xi8p2UajoTyfFRSym8W9YbZuNhqNubfTxl1vR0dHc4WBPhf0gR+iKOy8cb5JSkaAOYxpMD+N\n",
+       "NxpVThb1GgCMalxXDnJIJ/h8XBZ6UhTVDU0EGtEgeRTNgbBfx7XS+duNixzEGHWGpxhVQFKj0dDG\n",
+       "xoYePnwoSXOGn9cMfPDBB6rVajo5OdGLL76ow8NDHR0d6eWXX04vI5Rm9U+DwWCu8J8i6StXruiz\n",
+       "n/1s6kuWZSlaA6hGlrFjTkTvqtVqAjiM88qVK3PRR8bqLw2Evw7MPTLFevHaNJ9P6s2k+WitrzNf\n",
+       "i1Hn4VC4bMQIYRFdCnCyqIMx/LToWlfsi4x+UXvuSS0y4ovaKopU+IIqMqxMiEcOpPmdOd6eLzL/\n",
+       "7cas6CeOORqyRUrPt7vF02adT867+Hlsu8iLiv1b1O8iKrqWti8DOFkkr274fQHHcKqHoFEcnlbw\n",
+       "NICfVuoy4VuJoyfk6TWPBKysrKQQsXQaYeFtqZ1OJ9WPdLvd1FdqSO7cuZN2K2xtben9999PefjB\n",
+       "YJC2xWdZltJBHELlbzt2Rea7awiTk9JpNptz4X/4wuFw7iHjMbfbbdXr9RTVIdKysrKidrs9B/BQ\n",
+       "xvFlmvAR4wWvYkqGfpOi8iiK9xfZyPN87mVsDv5p4yIp6k3vu4OE6JnH610HFTmCRW2zJqRTwCnN\n",
+       "p4ijTme9RaCEnCFT3/nOd1IhN8+aTCapDqrb7aZC8Pv37+vVV1/V6uqq+v1+OlW2VCqlc3sAJBw6\n",
+       "mGWZer1eems1ckTqhQgOaRppvobGdQNpo9FopI2NDQ2HQ7VarbndY35elsuZF867I89n/qwYBYlz\n",
+       "hmx7eifaTuagSFd71OQ8AH4pwMmTyJHYede4sY8gJTIvKm5vI3q1/I6LIAIRb889hggmYhTEF/J5\n",
+       "nrA/w+sQioyhG+yivjoCXiRI8RkeOnUqmhtXsJFvRfNS5GktIh9TUQTlIqlIzqTzAbhTTKn5fbEu\n",
+       "ATmJ9/s9ePIxhcN8RkXB/XhqpdKsKHZ7e3vOgyLsjbePDOF9Pffcc5pOZ8fGk6bJ8zwdPe/e73Q6\n",
+       "TVuDvX2MXqUyO2GTQ+m8OPLKlSvKsixtoffCWEDG0dFRqhXAgFQqlXRwGump+J4d38kAEflxeXXg\n",
+       "yLZn37HjxJxyvobPq7c5nZ4e3oVBuGiKssX/nqIq0p2u2xi/R7ljhCTyIepVCH7HKJRHyxyUAzzg\n",
+       "JXNAZAIZ5kRVj7CwY21nZyc9KxpsUjakVJDvWq2mwWCQZIzD3dBbRTIiKTmJjJs0zv7+fvru6tWr\n",
+       "iZ9ZNr8zjD4SEUfeiorp0QFuC3CiIe+v82uRQ1bkhOV5nmpYnkSXApwUoadFnvR5SEsqTm3Edvx7\n",
+       "L9RZ9OxoJKLRdSF1Y74IGPiE8TmTR1/8gK0IvBAcF6oIYOKzPXriIWrGGt9YCbmH5+Mo+ryIV/Fv\n",
+       "H8+T5ioqq6L+xe8vA7msMseLgGzRPX4tcuLjdOXioVWXhQhaUNQo4ghivXjW2yiVSsl7ROlzD8Ye\n",
+       "gEI0gpf0cWAaY+BE2GazmeozJKWzF3hXDRS3le7v76edESjcfr+feEf+HrDhp2y61ww/SCG50gWA\n",
+       "+dpwj9ZTOniQXmTJdUS2mJOi9cIzfY0y15VKRRsbG+mguyfpvd9r8r7HtCXfF+nJeK80X3Pl/9N2\n",
+       "kV7wKBTy66k9AHXR+gGYeJu8o4niV98R5dvwXS+zc4waqGq1qvX19bkaD3bvcJZJtVrV/v6+Op1O\n",
+       "Aiu+TgHHRFLyPE/gGzmiiJgxX7lyRcPhUF/60pfmgCG8lJTOEcKWeGG7g7bo7ERn0mtOWEcAuuho\n",
+       "+y43lwGf/yzLks5wGSmiCwcnH8br/bALNHo9LrQ+oT5hRdcXUZHxBzV71XiRV+T38Bl98fwqffX+\n",
+       "RiUXoxBxbA4EihY+/TlvW5eHAKMR9H5HIOb9L5o7n59oCJy/RR4Vvy8jOIHiwuez8/rrCtXBb0yh\n",
+       "OT+j8kcJujx67Yo/w71Pwtm0hRG4c+dOUkhEU0gBofQIZ5dKJa2trSnLZgdDDYfD1DYeI6+Op5Cv\n",
+       "2Wxqd3d37oh6SekMCt5zkmVZMtal0unbjxkz/ZhOp+n9IKVSKaVviiJF0ingx/D5IYa+3Rd+Mh6P\n",
+       "/PB3rBPyNe6yEL1KijABZOVyOdXFeI3QRZHLFmMpchijA1PkuCxyBF0PO7D3M0gcvKG3nDzS4k6d\n",
+       "ryFOXyYt6O9GIm3EGEktOgDpdrtzZ5jUajX1+33t7++r1WqlNGCn09G9e/e0tbWlLMu0trY2B/4l\n",
+       "pTeC8xlgws8NAUhTd8U9bMOPgAfQk2VZqteaTCYpYuF89jcN+7zl+emuONfDrjd4lqegXbdEZ5k5\n",
+       "KJfLC51hp0tzfP33wsBEIy2dLRr16yJA4XdRX6Kxj8bBQYNXR7tn5BEXvyYqniJDVCqdHiBEWM2/\n",
+       "X/S3C1Y0anzuJxEWgTTaK/L0igxljI5EAx2ByiLQ6cCEvoHOPRR7Xr8/bloUOYqRr6ioHTRI86k2\n",
+       "vH7kIPK/KLzqgAMjGz0d/9+VjR/Pnuezw9ZQiLQ9HA7V7/dTGoU0iTQzYEdHR5Jminw8Hs+ladg2\n",
+       "6UWN1Wo15fAhL2hlCzOeV6lUSufBcFBUls2KLdkBBLDa29tL70/B0CMrfhgbz4L/yFUsfKWduDuK\n",
+       "NVpkLLx2yAsQpdOCR+aEtFOlUlG73Van01G9Xv/IMvm9IDfuyFuR/Bat7Sfpdl8bXOu1V65HPOoX\n",
+       "t5Tz7FjP52ke+iQpvbRRUjqXBH3o6wfwwk+n09Hu7q6Ojo706NGjJNt7e3sJxE4mE927d09XrlzR\n",
+       "ysqK9vf3leenKRb4SD+JtOX57K3fvV5Ph4eHCYB5lGc0Gumf/JN/os985jMJELBWkTnebwVgpzYG\n",
+       "XnkkyfV41E3Rlsb5cZvnTqTrY/RLPN6gCFw6Xbw2N4pCXIS0z7sv/j4v0lIUaYhebRHQiUY3evpu\n",
+       "IIuMlAOUGIY8bzwOap401gjSokHz36DY6OW4AvfURAQoi7wm+OS1M5GXsV0foytC/4mRFF8IlwGc\n",
+       "RPDqHpF70UXki93nF1lxBemK1tslUoCxo8gZ3rvH7/1AOaLAULC+RRelN52ebuekMBTDHD3Ou3fv\n",
+       "JtBAf/M8T9EJPNJOp5N2s3CCLCFy6kl4IRk596Ojo3QYFm1LSvIszYzOs88+qyzLEniCV74LAsXp\n",
+       "9QKctBvnkX77tmAP/wOq8jyfe3cL0SWAJrUp/X5fR0dHiedHR0fpjIzhcKher5f4d1HkhsrXLt/F\n",
+       "a6Ri3eBrHfl0EEIRqXR6CizgDrklYse8xfXFfMR0mzt3AGIOOotbzev1+txamUwmCbAfHh5qdXU1\n",
+       "RUNoYzqd6t69e6lwVZoVhz948EAHBwc6OTlRv9/X5uZm2rGEgUYesmwWcXz22WcTSCJFc3BwoE6n\n",
+       "k05lJr1TpEuJinhqBx4gqw7GpLM1Iu4cYEdcl8NLnksU1a/jXn/FCvqMeV1EF/pWYmkx4o50nkKP\n",
+       "vx3QuDEsui+GZYuiL0UUowLRc4+G3o2KG1JfgDHCEMfmExk9F7+Pv/1/9yai0iiKgEReLeJ/EfCI\n",
+       "zygCIQCcGB2J6aE4nqL5eRr5+bjIPUzp7M6LOLZYpOq8imHVmOpxkMi9fI6njcyhXOmT98HPBYke\n",
+       "U6VS0bVr1yTNe5ooGIwrO2kwuJz/8Nxzz6WTZAHCKODhcKjNzU2VSqW0DZOICHUuKL3BYKBut5sA\n",
+       "RKVSSR4m/UbxEvLmnJSjo6O5l5sRHaI4t2gNxiJJjzK5zLJrCIIv7AJizjC8Hl0imuURKQAa65Ww\n",
+       "/WWQb9c5UX/zO8pW0f3cxz3+Sgs8fOTdjR9RN4xp3HYNOHbdCu+Yfz6v1WrpLcPUiEiaq81gLkul\n",
+       "UtruTirlgw8+SLUTgM/r16/r8PAw1Zdcu3YtHW0/mUxUr9e1vr6ufr8/dzgiO8okpWfxpmLfYlyr\n",
+       "1bSxsSHpNGoEeYGuO3ExKoWti2Auzi19dpvG36xlj8byDJ4D6HHZcH1FuvQ8upD9aYuMb1yA0fDG\n",
+       "6xalH/z+okXtBiJWJBdRNCoeQZFODQDfETKL4+R7vDhfZCixaMiLnk/bHlpDKIv4WMRXAAJ9cvDi\n",
+       "fY33FfGzaJxQkbLyMfqiWAS0fK74zq8tuv+iKBp3yD0QDBJKx+fNlTz3uyLzKIl7J3yHwqZYD+PN\n",
+       "Z76LBBnAw/cIGm12u11tb2/r5OQkpUD6/f5cmofnjkYjvf/++2lNkU75g3/wD2p/f3/OwPD2bXbh\n",
+       "YIQBLRsbG8nT9cgMb3ClhgVetlotHR4eJi9zf39ftVpNh4eHqfDWPUw3bBgkpyIdw9p1WaN4Ns4h\n",
+       "7aInACruzQO82OETI4vU0fhbmi+SXK4jSHH9BQCl/66vkGu2k2dZNlfPAb94hhtBapXYAeYGlvbp\n",
+       "CwXWGEkcROaKLbzMzWAwSP2gTiPPZ+mSVquVonW8WfuFF17Qm2++qfX1dR0eHiYwSpqTNN7e3t5c\n",
+       "SqNer6fzUTxq4zxCrr32qtvtqlQq6eHDh4nHjDcWjkunwIx5ijbOdZHLrvPRwZzrHX+NhUe/IlAl\n",
+       "AuvyQ1v0+bxdaBe2eb7IM47fu9ItQuvS6YFW8RpHjUXPZiHBLPdwInlUxA2FKwz64OHEeI10CoZ8\n",
+       "uyKLxT07N+DufcOP6Mm5Eigyju6NuJeNgXSvMfY5RmnO84yehiLQKmrHPVT66HxljNHYXjS5B+9j\n",
+       "9PmM3mD0KF0GIyCMBtJBjD9rOp2mN6CywyXLsrl3vyB7pVIphYclpZ0Gw+FQb731VjrnhFoTFC9p\n",
+       "FvrIAX7PPfecbty4oYcPH+q73/2uvvGNb+iFF15Iio0UEUdxYxjYNVGr1dTr9RLA4pnc67spqF3h\n",
+       "SG/kmJ0VhNg93eK89uPUnefS6XkTTu5VuqKNkUnalWay6UYL75/5Y3uzyzP1BYPBYO61EhdNLqsx\n",
+       "FUy6RTqVraiTJKVI2erq6lxaQZrXbfDVgTRRil6vlw4g8+gAOp2oR7/fTwDHU4PT6VQHBwfpfBHA\n",
+       "IYcOEokYj8d69OiR+v1+SuXs7++rXC7r5ZdfVrVa1c7Ojvb29tI7dNABFDNzjgkRkclkkl7dcHR0\n",
+       "lNJ9nETrB6iNx2Otr69rNBqldtyZZA34sf5Eo1yvu+53W+k2gLVEhDOmZwDTfk8En6wJd3QoYve0\n",
+       "nUdgF9GFp3UWkRtl/ywqZj73ayA3EB4Oh9kIMALPAiCkG9Mwblhi7p8xxdSJF855uCvLsrSjgDyz\n",
+       "g4w4hggOPGRZlBbx+1iwRX8jSB7OPs/QL4qCFM3tIkBzXnSrKELk98fP/EVul+EsCF/I/O/E+NwT\n",
+       "iTzykCgy6n8zh4R28UJGo1GqV2BHAsaP9rmPMPF0Ok1pD+YCw7KysqLNzU2trq4mxUXhINcAGlut\n",
+       "lmq1mj73uc/pzp07ajQaun//froXWWs0GnO7glBSKHUMFtuE9/f3kzLloDTGgVIHxHBOCYdmoeyb\n",
+       "zWby9FZXV+dkiJRWkW5xAwtoIAXB5zzT15MbS/hD2og30cbIDb9rtZra7bakWb3Myy+/rI2NjXTw\n",
+       "3UVRlNEIqF238L0Db9cvEWi5gwVI5TN457qY9CGAD6BLtAO96G+hRk9Vq9W5N14jy76uPBp25coV\n",
+       "7ezs6BOf+IR6vZ4+9alP6Rvf+IbK5bLW1tb06NGj5AgAbjgYDd1KnwAApFw9YkEfkG+Koj0dBZh/\n",
+       "//339dJLL6larardbieeSfORPPhCNJ+2PZLC/+604Fy5HAOK3BnBVtE2KV23y8g2c0f92t7eXgKB\n",
+       "i+hCwEmR4StS0i700QieB1SikfY2PVLgxsB/IkCJz/britIWHup0bwEEXyrNdh/s7OzMHbftk180\n",
+       "Fv9uOBymfCmvn48RlsijojMuvEgsArHz5s/nJF4bn+s/MaLDuJ1XPm/xejcEjCeebnqRxCmoHs2L\n",
+       "88l4ve7AIyA+fjd20nxBG4rB594L96ir8LeB4rW7lxu3MuLhoHT5XzpNgfC8RqOhW7duaTAYpBTS\n",
+       "1eWWozkAACAASURBVKtX9Zu/+Zvqdrv62Z/9Wb300ktzR33neZ68SJQ4qYuvf/3runPnTjpR8733\n",
+       "3tPDhw81Ho9THt69MdYBRbHOt263q2eeeSalTkhJURMC7+gDPCiKpLrhdZ3gesRPVfb14S91Y9cR\n",
+       "xZy+tvkfoEbhY6PR0Orq6vdIQj8a4fG6M+E61XWke/VcBy8dpAIipeID2CSllFej0Zh7942ne4g0\n",
+       "+Bt8MepZlqV0DGeZlEqlBFDoL/2iAFtSSufcvXtX/X5fu7u7euedd/TZz35Wt27d0pe//GVtbW2p\n",
+       "Uqmo1+uluhWijr5u7969m87pqVar6na76VBBZJNzd+hLlmUpagJYLpfLevDggZ555hkdHBzMOTl+\n",
+       "ngm8jRFo191FTiQ6hO9ZHz43RP5xOtx2YM9wcAFBDv6IYh0eHs6lnSNdCDiho0UEo2LBpBtXX9Qx\n",
+       "QhCNZszZAk58u5g0D1CKDHUEQtHg0PYiTzmO2Y1OPHHSjb0/n/v6/f6Zo+Y5TTIaOG/LxwDqBdg4\n",
+       "aCkCX9E7iv3yZ3h0Jj7XFVpM08WIUXxOnN8I/CjqvEgql8spZOvbvaXT00UdjBbx3fmEsXOD554l\n",
+       "baPA2FYLCBkMBnOyg4fjBaHsfHAlBnDJ8zwpUn+Tb7PZTICQN6s+88wzOj4+1tramq5evapKpaI/\n",
+       "8Sf+hO7evZuOtncAhVfHmiRK8oUvfEH7+/va2dnR5z//eVUqlXRGCooTuUWWnQe1Wk0HBwf64IMP\n",
+       "5p7JmFHk8JmxU0+wsbGR7uF737YO3x2Elkqzw+oATfSVehj6j+GUZjtCeHa5XE4Rrkpl9qZb6hWe\n",
+       "pMQ/DiqXyyklEQE36xT947sQ4RFGyg02c+Db0Nm2iwzD24ODg2TAJaVIGH3w57tBBbR4Kh1wQ92f\n",
+       "R7/pO8/Z29vT0dGRDg4OEgjKskw/+qM/qvfff183btxIYJ/TY0nBoOPff/99lUolbW1taW9vT51O\n",
+       "R3fv3lWz2UyHuBH9oOAW/cFYJpOJ2u22vvOd7+jWrVspWgg/vS4E3npND/0CQLPuPFKOrongm7Xg\n",
+       "soDt8KiuR75cLkgTMT8UEhMhXEQXAk4IycYiNGle2KHzAEI0kE/y2iH3knwhFYGEJ4ET/03bEaQw\n",
+       "8Xip9Xo9CSDG4LxnQyhl0LkrvachN/Iu2M5zT034b36IVng43EFdEahhHh10+vxyfYygFCnCaLzp\n",
+       "cwRKF0Eo71arNZdvl+ZPW4zRIOYQUMA9Mc2H8uBZbiT5vbKykiI4XBcVNsDItxP6czwiQ4rIdxmg\n",
+       "zIiYbG9vq9FoaHd3VxsbG7p586Y+9alP6Sd/8ifTibDuIcInjHie59rd3dWP/MiP6Bd+4Re0tram\n",
+       "a9eu6Xd+53e0ubmZ+pRlWdoZ5Ip7OBym39PpVGtra3rmmWcknb4tGDDgRM0BkSQKcldXV5PhKJdn\n",
+       "u5+Itvi8MSfw1aOu/q6ca9euJWBC6J25xnnylEi9Xk/bq10uLooA1aQMIQC3e+YYxaLUDrKPbHn0\n",
+       "z42c1+T5EfJ8X6vV0n3+GgCvUUHOmTeOcPc0CQabSICkVHi7v7+vq1ev6uWXX9YnPvGJFBlYW1vT\n",
+       "r/3ar+nVV1/V3t5ekhGijAAnANeDBw/02c9+Vmtra6rVatrZ2dHLL7+st99+O609BxJem+G6dTAY\n",
+       "6NGjR0nHsq05rmXpFGQRpcLeElFFPt258dQ4fISXHs3zdDIv9kTuvQaFfnhJA5FcrjvPobwQcOIo\n",
+       "KxqU6B1LZw20G8sisFAEKiCPlETA4u0vut/BkxtgJiRGBNxAsKjJ2+N9TqfTM9Xn3g/+RvlKpyAF\n",
+       "9Oy5VhS+K083lBEYOKjwsXsEJQKMeEZEETDxZ/vc+vkQcQG68Szqh6faIsi6DOQL0M8BkDRn2N3A\n",
+       "8517FxhMN3Iuq26s/H0ylcrsxXwU4BF6paYEGXRvKqYiMESsQQAwnwOMiT5m2exgtK9+9av63Oc+\n",
+       "p+PjY929e1df/vKXNZlM9MUvfnEOVBNRINxeLpeTR/no0SP96T/9p9XtdnXz5k3duHEjhYoxJuPx\n",
+       "OIElFDMeLcrw4OBAWZalaxzExh1S8JC/8zxPdThEi1DqRFuZL0+ZwTvfygyf+/1+2kkkzYNqDLSf\n",
+       "tYGecYV/0cRYom6J6535jbIMePA1TjvutXOd83YwGKjT6cztsPG0EO0iC64TAEHU1rEeAJPIFsaS\n",
+       "qCPv2PnMZz6j3/qt39KVK1fUarW0u7urvb09vfDCC3rrrbfmrqUAHb22vb2dzjr56le/qkqlop/5\n",
+       "mZ/R0dGR1tbW9OKLL6b0vOvP6NgAxpvNZqpbgvy0VeQG+eRz5o417rwAPCDXRDWZM2p84CvtSzNA\n",
+       "SHppUaTbeY+M0J9LV3PiCtoNHxQBhhNGmO+iAV3049e7IS66zhXBouhJBEVF0RLvc8zTNhoNtVot\n",
+       "tVotdbvduaLB6Cnz2z1ljB6nCMJX0DPXR4ASxwQfPB8Z+VQEFiOoWZQGi6AoAotFcxR5Ge+Jvy9D\n",
+       "1EQ6relAMXi6iTnxhepzIJ3m2B1g+ntouA5gPx7PTl/Nsiy9XXc4HKZ1Eg1CBK/wlIgeJ1jyLC/i\n",
+       "4zpOaC2VSmq1WukkyldeeUW3b9/WBx98oJWVFX3iE59Qv9/XG2+8kUC17yqg/5x+ef36dVUqs1NR\n",
+       "S6WSfvVXfzWNtdPpqFwu6+rVq+kcFC/0ZXcOfKUNxoScEPGjfsGP//bCVebJj5B3J4C6EbxtT7Oy\n",
+       "y6Zarerw8DDN6XQ6VafTmZs7wJ0bcgdx7oFfJLnuQH6YS4pRMXTwOkbjvBAUEOnvbIoRKOk05b2+\n",
+       "vp54yxxxj58RQl9dL0SnCXANf7nHU6ij0UiDwUBra2va29vTzZs3UwSGqMIHH3yQdtAwlna7nVJF\n",
+       "yMB4PNbW1pYODw/VaDT0G7/xG2l3jgMwaVaTxPuopNPzdlg/8HB/f1+lUiltmXYg69FInAoveGW8\n",
+       "RYdvOliWlNaVz/1wOFS3203rAWDNsxx8+NjclhGpOk+2L3y3ThGwgKKH7/fG6ARGgLbckHG/X7co\n",
+       "2uIKoogWgZUiY4txhmJOD6+YsXnY08eIssqy03QB+X5vw0ODvhDjGBkn18Y3c7o34REmH4vzMwLM\n",
+       "RQAu8nERuHAwtAgUxrm4TGkdV6DwzV8CB4+lU3lDyfMOGa9NGI/H6Z0apNR8Kx4RGVITyBkKwz0x\n",
+       "FJbXXOX56fHXblgIiXe7XW1sbKQtuVyLES6VZrUupH3YuXP//n0dHBzo4OBA6+vraRzULjhwqFQq\n",
+       "euWVV5Ky/vSnP62f+7mfU6vV0sbGhvI8V7/fT4daAeAwGC4X8IVDqwAxLtP+t9cjsEuIufGws+fd\n",
+       "Jc0ZNue3R2HW1tYkKdU9jMenL3gD7MV172tWUoqAXSRh8OKpuA4IkQkIAAKA8HXLbhTakE6PfyCF\n",
+       "AH/ZPs7ntOeyGx0cwAuf+9t6qe1gXBBRkzzPU1qUF0hmWZZASLPZ1P7+vo6Pj3X16lXdu3dP7XY7\n",
+       "pQi5ZjAY6ObNm2lXCjuujo+Ptbe3p+eeey69t4o1h/H3LcPIICkl5JJ0lNsfSXO7kTyq59GWmA7H\n",
+       "vnikgwgLgN7TRx6RxDlhTXqBPVE/LyFgbTxpt86FbG9wYXZwIRWnUvzaaIRcoRaFw3ieRwYcYXM/\n",
+       "//tvfy59g6JhdIpgxz8n1Otj9+vcmDivWGws6hhOxpsFyLinHBdq5J3zyHlJn/2ApNivyBvnl8+H\n",
+       "3+/9iLyP/HV+xnqUIn5fNLHoUSrwEgXjvJROFbLz0UPinF7JwWe+u4t7UAK+o4a8MvxFMaNg3Et0\n",
+       "kIhXiMFgyx+RChQfHv/Kyor29vbSoWoUndJfFBDvFsnzPBXt+vba8Xisr3/967p165aeffZZvfHG\n",
+       "G8qyTDdv3lSWzcLj/X4/RW6QdwoVY3gfJQ94gue9Xi+dGkuhKnPjRjKerYKihgA4eZ4nrxbD5DUX\n",
+       "w+Fwbr3DDzcCzE+RAr8sKcto1D0lEl8LIJ3qAebD66QAWhg9drBISsXZpBYoBKUWSNLc26aJaHGQ\n",
+       "mhcYs67QlR71idFExuf9Hw6HqV+sPa5fXV3VaDTS/fv3dePGjQRue71ekmfk5vOf/7x+53d+J9Vd\n",
+       "7e3tpcje7du35wrDXRfQDv2GH51OJ70UE74y3nq9rv39fTUajZQ29XXucsZ8+P2ARUC02848nx0M\n",
+       "B588Pcuac2AjaU7efX55zqUDJ1Jx9CMCk0UARTobSXGv2g1nBAfx+ecZRf8+euuxX0zsovbdCHmF\n",
+       "OH30RbIIWPE94TtHu5LmlKf3tQhgRT575Xusy0Gp+pgiqOK6+H302ov6tyiysug5Rfy/LOAEQxSV\n",
+       "AEbSgQFGKPadOcXb46Am0gl4kM5jP6eEfhBiBZAAPh3wkqN3BUnaiXQK52+gcDFGKJe1tbUEiCiQ\n",
+       "Iy+ObJ6cnKQCT/rhnrg0MwD3799PRmZzc1N3797Vzs6OxuOxut2uqtWqDg4OtLq6qnK5nA4rc8Pu\n",
+       "awaPF7DHCZ0ArY2NjdR3UqQu/36qLqeKOo8ARn7GA1ECgIqnIfzcCfhC5MYLS+kD25AvWr4BWZLm\n",
+       "TkL1vro8QQ5+Me7oE04lpn2ihsg9ckk7yBtpjlqtluQt1re4MfY5ybIsgWYKszHyDk7L5XIC21k2\n",
+       "ezdUo9FQu93Wo0ePEnBoNBpzu3OyLEt9Aij1ej09++yzc0C41+vpwYMHiTfoBUk6Ojqa2x02GAwk\n",
+       "aW69Az4AJA8fPtTW1lYC3f1+P8klER3uRW6RV0Anc0f0TzotjKUvjJE17E6yrz1kwKOTkfe+1oro\n",
+       "Qs9EdjAS0yKLrpfmd3cUGT43aNFTj8/2vxelIGK73nb0PB1sgOr9b1CpA5GYgor9W8QD90YweB4i\n",
+       "9nsieIpteqjWx8BzvLgzth+f5UayaK7iffH+2M84x0VANgKfiyIMkEcq3Fh5TQOeH+SREOl094F0\n",
+       "asTY/st2YPiLt040Y21tLSncKLcobZQnoXSPnHnf8/z0BE1PWUizmifSLSidWq2W8snT6TQpsPfe\n",
+       "e087OztzhhsvejKZHURYKpV0//59jcdj3blzR6PRSFtbW3OGnXfssDuC8UenBD5xjDnPrFar6vV6\n",
+       "Go1GOjw8TMWIDqaQdULn/M0c+xrx4kCiAQBGalGYQ+acwkP3XH2N8DlA4KJ360jzp0zHrefwnnlw\n",
+       "HnGvpCRrfuYIJxLHHYsQOg2Qxv1cB8h3HnkE2OucMLYAE4iCbdp0PU0dyLVr17Szs6Nut5te5Ac/\n",
+       "OOPEoy/0o9VqqVKpJDDFYYmeKgEcIKPwlIMHIecncwCYYf3BJwChn3nCGof38MKjX0QlkU/nr9eV\n",
+       "QMyfrweXDcCNg9jj4+O5iGehvD1BHn9PKIKBaJSlxSdr+j1+LwrfDRn/S2fPzeAzD+35c4oiDG4g\n",
+       "+RvwEfvroCgaHdC/gwIWNcLECYg+0TH1BYp1JRB544s0GqnIV69hoX0vmoogg7E7+CoCCQ58Yj+K\n",
+       "8u3R83LlF2VkEUi6KPK59ggBxh1eOpjwNKc0v0WY00TdCMRthxHEcr4JCoXIgAMTftNnvHdfMw5O\n",
+       "fYfZ6uqqarVa+syff+XKleTJ5Xmua9euqdvtqtvtpvfroGwPDg5S+ocQ+GQy0fr6evIYV1dX07t4\n",
+       "UNzscsPDKzr3xQFiuVyeAyikhg4ODtI5Fr1eb24tuC4BaHndBHNBBAFQ5uvK61WIlrILyIGOA/9o\n",
+       "EKTT9MdFEoALWfXjzeOaRVb523WXdFoQ6cXARKiQ66J6IIwi/PHICjKFfOX56TkckK/JPM/TTjHa\n",
+       "xKhHp7LZbKrVaqX05cnJiTY2NlSv1/XOO+/o4OAgGVvmql6vq1arpTOp/KwS1hkAGwBLZJLoKGDG\n",
+       "gbdH3ugv0bnt7e0E8IlMUFDtUSFPFSF/yLM7h0ShmG90lad8yuVy2okGj7PsNPrFmnSHgbXpEc8i\n",
+       "ulBwEv/2/xdFDRaBGSh62UUGyw2iT0Q02N5+UVTC23bDzGLkB6PvR1cz0b5rAgXNhDko8LSL99FB\n",
+       "wXnRhUXA5LxrvY9utCIoWwQknzR3RXNWNI5F1/P3eW1/3OSgkb4x33gpKOqihelz77wgCoK8EnGI\n",
+       "xpkdJsfHxyqXyyml4Uo78s5BET/SqTePEWZXzdramtrttgaDQTr1stlsajgcpkLB1dXV9C6Td955\n",
+       "R/V6Xa+99po2NjZ0fHysl19+WS+88IJu3ryZABTKinqZtbU1bW9va3t7O0Vt9vf39eDBA/V6PR0c\n",
+       "HCTDRpQGoOCHTVG854dw1ev19PqIRQ4RXiKesSt2+MghiPCN9eLgUTo9CI4CXgwZax4gidKPBdWx\n",
+       "GPcyECDBAbbXc3i0wmVKmt8KD5BzB9OBN/z0KAPy7R46upPdI/THHTRpPmKLbuVlf/TF3yjtu7B4\n",
+       "jw7plFarpXv37iUZYS2QbmQsgCtOSiZKd3x8nI679xNi4ZHLlO+KcvvlBh9ZdBnzKK2DRa4nzeOR\n",
+       "DYrvoxPqgJR1QDveLvPt10P020FsEV2aF//x2XlGtIhcwbqHXWQ0i+5zI78I+LAwPFIRDXtM6/Dj\n",
+       "nqJ7Q9wbIzlsDUNgYsTEyQGC9yn2M0Y9nD+umP160HgRD4uiFbFvMSJ13j1P+r9IJrzP8fOLpEWg\n",
+       "2MPYfO+7qvgM7wKljpLw8zWk+ZQPyh+P09MKcdcVis5DrgAn5ozryuVy2iq8s7Oj/f19SUpbeYkA\n",
+       "rK6u6uWXX9aDBw+U57m+/e1vp+3Mk8lEzz33nL71rW/p5ZdfTuHsb33rW1pbW9Pdu3fTZ+12O8nM\n",
+       "ycnJXFSFAkmeT7/39/fT+3IIbVPfE8PPFG8CArxQrwhoO7/9HkAfR537gWFRDkg9SUovaEPXRD2B\n",
+       "IWRsGIfLkNJxsI3xOj4+TsXNDtx8Z1qRc+fn5Xh9CfqRa2kHOSMFRH3QxsbGnE6lD/F5UX+4LiyV\n",
+       "Skn+RqOR1tfX1e/31ev1tLGxIUkJLB8cHGhra0uj0SgVnlJHxcF9zzzzjCaTiXZ3d1PfSKtwsNut\n",
+       "W7dS3+JWYKLngGG3BfDAZYe1y/hpy9Ml7oSjJwDLtVpt7kRXaRbZjzVyABN3ZCLYi5Et6dQWwV/X\n",
+       "MzGKHulCwEmRQfTw9CKDyWf+A4OYsCiQRdEE/o/P9O9if4uAUxH684XihY9uqPH2JM15Bb5gYjuQ\n",
+       "g5W4ldH/9ol/GtBXFEWJ+eQoSEVeuIM0R/j+3NjHJ4GQ2OcIxi4LMJGU5ts9NmkeMHq0y5WyKx3p\n",
+       "bK1Qls3v9qJd2nDgQl4ejw4P39v1Cn28H9rZ3NzU/v6+7ty5o2vXriVFCgC4c+eOXnvtNb3xxhv6\n",
+       "+te/rldffVVvvfWWhsOhXnvttXTcerVa1b1795Rlmd566y11Oh2trKzo4cOHqe+3bt2aiwAR/uYs\n",
+       "BWTp0aNHSZFSdHjt2rW0gwhjh1cbPTzAP8bCdzehsPFeAQWsvTzP0xZPzrHx+gqAJP1gHh1oYBhI\n",
+       "72AQfAt/rNnwGoSLJE9Jx7SXgwIP5QPo/PA66dQpg//u+aMPPUIlnUZpANfw0wG1A37klD4Actrt\n",
+       "dkqbS0rR7Dyfpe92d3e1tramlZUV3b9/X594fDLs7u6ubt26pXv37qX+MJ5Op6N3331X165dS7uK\n",
+       "SIOgo0kbAX7oE7JCGoa/PfqDrJHmQV49mg1fWfukGeEV7yfyKCrPJ2pCX4hKYadcN7FT1O0TfOC5\n",
+       "DjK53+uE4AfzvIgu9E1pRcAgGqhoEBd5/xFYnBchiIwtMqTenwhuojcQEaR7pY463bMo8tIgFh1K\n",
+       "kv45avYIhxs7FxYMkIeuiwxgHKPzOqJ0+kL78KDICHt7Hkk5b079Pgd0RQBx0TxdJPli8/mm6DJG\n",
+       "k1CazK90GhXxPLqf2OgF1Q54pfmXruGBsQXTyb105Mp3CLz33nvqdrtaW1tLioXajWvXrunrX/+6\n",
+       "7t+/r+eff17ValXf/va30yFqHgafTqfpUKkHDx6kMDYnG2dZpnfffVfT6TS93A3ZR8FLSt83Go20\n",
+       "qybLsuTV0kciLByyhUJtNptzfKNGgAgPfPSdUF7ULJ2+9dm3ZxKZAgj6mqTfkubOy3Dj6anbmH7L\n",
+       "sizVtJznYX4chOfrY4R30UsHYAFIWMsAOOkUxGC0OU9DOjVcRAzdiaHoU1KKRhAlc6NLv3zbcaVS\n",
+       "SeuQ97v4561WS71eT71eT61WK23ZBQwTMcGQE+kj8kI9EXUy1E71ej3V6/W5HU9e8Mzc03fO5aHe\n",
+       "BLlG1hyYOUCD74ABwEi9Xp97NxWggahJ3NLtYA9g5CDddY/rNPjEGqZ/bjOYF+532Yl0ITUnUTlL\n",
+       "OmPwIrhwigDGFT5tFRkyro0/tFlkjKMhh+neTwcWcYyek/VQM4uVPngVP9cXhZr5P47XozhusF0o\n",
+       "fPyxTedV5J/fuwhcxL45f523DnCKIh8YBTfWkeL8F8nIRZEfxOVy4SctSqdnYpASQB7gLwvcZYT2\n",
+       "uV86lVsvAIV31F5IszmlvgH+OQji2RTOkjNniyx9H41G2t3d1Y0bNxKwWFtbSwCFKAsRm1KppO3t\n",
+       "bZVKJV2/fl2f/OQn1e12tbOzk57Xbrd15cqVOcDCy+6Ojo4S4Njd3dX29nZS0rw4DIPjWzU50K7Z\n",
+       "bKZcvUc5ABrk5sfj2VuP3QtEscLnlZUVdbvdM/LN2sV4u3PghpJIAd/7WiUS5A6GR2MumhiLH2YG\n",
+       "QKGmjrEjL4BpN26+K8yjwPz2KLHrEk8JwBdkkigIsu3nbLhOxMAD8PkNCD46OlK73dbKyooePXqU\n",
+       "1uCdO3f0uc99LoEGTitmnLVaTc8//7wODg4SeNnY2NBkMntpo+tnJ4C8p6IAXgA/PwfH02XOH0+/\n",
+       "kM5FJr1wtshuoosB+YA/TwOTYiNqCT/9O58PACVRGfrru6YASF4oHenCwEn0umP0IhrEaIDc8Ppn\n",
+       "izzo+Bn/u1HwNrxNro+G3z93kBO/p6+gzAhsYuErCzrLTo+ALgIqtB9DrFLx7qTz+BB56IspRoqK\n",
+       "gF0R7+IcxmfEn8hnb8v5ViQTlwWgsBg9PE9/PUTqcy2djVJxvXvNXgNUJH8AEPeuJpNJUt4oC+4n\n",
+       "VEze/ejoKMmTK/XpdFYQ2+v10q6b0Wikg4ODVOD67rvv6sd+7Mf04MGDlP44OTlRq9XSzZs3kxJD\n",
+       "WX7605/WeDxO2yzpKx46L74bDodaWVnRO++8o2q1quvXr6ter8/VA0hKQKrVas05AM5vQs6dTkfN\n",
+       "ZlOdTidt4fQXo8EHxs468nSMFyCyG8o9YCcv3GQLqr8XBqPZ6/Xm0m+SUvsXXXdCesBTZkQ8Iuh2\n",
+       "z9r1CH+ztRxjS7QKcr0ToyHSKbhjcwHANBpUogkeZUCWHcADxh2McvDhgwcP9PnPf17vv/++Dg4O\n",
+       "1G63dXh4mPrCSbCs452dHbXb7QR0m81memkhMsbhboPBIB13j8zDH+ehR5yQazfqHu3AnkinQJDo\n",
+       "G/qDrcIANGpPkNF4pL5H4P3ZtOF1UpKSY4b9Qpd4DZFft4guNK0jnU2FFIGLaASh6HkX3Uu78Xlu\n",
+       "UBGAWLlf9LzIzCJjEfviUaHYTjTCDiaYXD6PQGPRGN17jtd7CqYIREWQFnOb7g1G3kSe+P9F4LKI\n",
+       "HLBG4AHfiiJiRc+9CPKwLZ4Zn7HwUai+w0M6BQyM3UFYBL1ed5Rl2ZynBSAiWkPBnhdnAmbyfBby\n",
+       "brfbSd4wCP4ulN3d3RRNaTQaWltbS4a8Uqnok5/8pN566y21Wi1JSodWYcw5sGpvby+FzzmYyj0t\n",
+       "DufK81mNx82bN5XnuW7dupVy6BToMt+8MdhfUIjMwg838AAePoOXeZ6nz1HWzIvXj8E/Qu7OS//e\n",
+       "gSNeMsWGjUZjzsCR3nD9wmfu/FwUMT4MbbPZ1O7uriaTSTK2AAB3Lly/OQhlfTA/1NV4vYIXyyIf\n",
+       "pCIAu34oIPd7xJaoG9GReGR6r9fT6upqAg8uO91uV9evX9frr7+uzc3NZCdqtVrauTYajdTpdNJn\n",
+       "vJm72+2m55DWAbCsrq6m+gwHah51lU51MM4qtSDOt2gjkElpJoOkdpFBP5rC01r+Ek2iL/5s/o58\n",
+       "BXSwpgAqpIt4jgceOFeFsSyiCwEnLrzRYC36P6YHitpzA+jILxpd/9tzZjCRRebXu6JyilEfnr3o\n",
+       "efS3yLh7H5g0XuoWx+5GKoaJ4zVuvIvGswgocE9MuUVFWRQtiW27kfW+0G78PLbp/Y5g9TKAEgjv\n",
+       "xFNjeBvR2OCFFoX7fSeOG74YLVpUf4JMssMExUfkhGvK5dk5BZwbwlwTfalUKnr48KFarVbanTEe\n",
+       "z47glmZzsL+/n5Q7yp/zINjNUiqVUuHq3t5eWmMocA8Ru2L0k1f5nDNcMGhZlun9999Xq9VKBZjO\n",
+       "D5S5F3PSFp4vY+33+3NKnDHCMzxT+MBz3HMtcq6YY+aD+XMPlvs468T10Hm5+Y+LSqVSihKQ2pBO\n",
+       "gbinsL1wm7nkb9IhXn+CjDPPyCkRphj1y/M8ySM7XphjjONoNErvInPZZn739vbSCwXRo6TmpFkU\n",
+       "5OHDhymK0mq1EmDHIGMvABt+/o6nVvwsF9fx6ADWsUeeiAQSyfNiY+e9R2C95kOayR4OhKSUNoLX\n",
+       "rmtcB/CZp+Qc0DBn8MzBpV+P49Hv97W+vq7hcJjkgfsXytvvQlY/MhUZzUUGx41ykQfu3skibz5G\n",
+       "QaIxxpC4AfVajUUGPeZNFxn6ImO96O84rvOASex/jLoUUeRFUUSDtoqKaV3oY3olzkWRko7GtQhY\n",
+       "LAJuPp+Lnn/RRLgfZeMH6rlXh7LNstnx6ngZzmPpFHy6N+TFlkQe/FwMvJ2joyNJp2emoCxJY0gz\n",
+       "Q7i6ulrYxng8Tgeh8fbswWCQ+thqtVQqlbSxsZE8RT/Hh5B5tVpNxtzXOZEXjLenMzD2nNyJwe73\n",
+       "+wl84IH3+31dv349bWv1gjyIiIgrc4CTg5XV1dW5VC/f4Z1Sy+Lbhz1a5lG/LMsSUIu7QtwbAAYo\n",
+       "7AAAIABJREFU9TVDv2q12tycX3RaR1J6VwtGJctmLyItSvGSAgKUObBmXaBjIt8isPHdOdPpNAFl\n",
+       "gAzRAuaQPhDF43/qjySlU5Rpv9frJf6vra2p2Wzq6tWreumll9KBa0RepNOXMZIW5D1NpJccgLis\n",
+       "ui3waI5Hnfg7y7IEgHxt+DX+tvIsy9K5Jg6GJaWieNqFJ8gx/eB+L4D2aIp0GjX09CU6wUEgn7Oe\n",
+       "ACrww0FWEV0IOIkph1j0xzXS2foEpyIjFkHGk6ICRQaU5/v9RZ5LBCb+u+hZ/rtoTP4MB0VFbSwC\n",
+       "You+d+8uRje87diWgzS+cwR/HiApAg1F9SI+bzyzaN6LAGgc40VTlmUp3OzFjMhVuTw7O8TD/75Q\n",
+       "4a0reRSqe0gocLb3ugI4OTlJkQ08FVIGeHL0BSPgZ09Mp6dvhh0Oh+r1ejo8PEwnq0pKCh3jDYjw\n",
+       "uhiveSqVZmdU+Iv7JKXj5ZEHV+IoP89Rk/LxWpBms6kHDx4k4w94ATSQHqDmhBx/v9/X3t5eeg7p\n",
+       "L3jqc0rI3Ne8A3XqCeCfRxFQ8hiTuEUZoObgCR3JQVoX/VZir3/gRFN4gU5gfbpxdeAhnaYekHe8\n",
+       "a9aHnx7Kc5EhUooeGaMfGGn6R7qNPjvIHwwG6dC+0WiktbW1BLqGw6Fu376d0ofb29va29ubS82S\n",
+       "LpGU2jk8PExABdDmYIQUiMsVET36jPz6+if1g0FHFj2tgxwS0YB/jBee7+/vp3uQPyKktEekEp3A\n",
+       "+Ogvcx3nm4JYIj6eegKQF4GR80D3pTi+XjoNh3tEYhEokeYjAEWGrCgKUtRGvM4X0aJoS4w+xHHF\n",
+       "Z/j9iwBVfG6MEjgIgBZFLuL951FRVKfoe55X1GYR8Ip9j58XtXFeXwAvtOd8izy7SMLwSUrhfwAL\n",
+       "yoI6DK+LQP75mwWPMpA0p2ylU6+Hdvmp1+vprAaPfG1sbMzVEPlbVv14aun0QCe2HKLUPRwb+e+h\n",
+       "atr0KA/RkcFgoP+fuTf5cezKrr0XyWB07IOMNlNKlUqlaqBCGTDgkQHP/Wd74omBMuxB+ZWsLlUZ\n",
+       "TbJvoyX5BoHf5uLJy5D8fX5iHiCRDDb3nnuavddeuzmj0ShiCGhYmzQPNuU6k8lkLVgcC+/4+Fi9\n",
+       "Xi/AHOsJhfHw8BCsD5/n83m1Wi1Jq+A8gMTh4WFcy8fRgwJhcxgzZ46k1fpnTSC4vaGwuAdji0JF\n",
+       "8bpS20bDWkZRUumUZ3Z2A4XEbzzmQFqPDWGePBbLlSX3llYuRMAbSg9Xxv7+viaTSawr7geQZ8+5\n",
+       "+yKXy2k0GgWo5ViGx8dH9Xq9iK9xcIjidlkM40OfYTyYe/rq9WBYX8TNYEjQl0qlEicMO5ijOdPE\n",
+       "enZw7q5JScGKsBYdLACaiQ1z4AngdtcboMVBu7tz0iMfGD93r/6UbtoKOEmVHILNFVT6vzf/zEHE\n",
+       "S1a7C9KUufHPGDjeS5VrFhuQ1c8s0OCbzNkC71f6e4/z8Pc2ARPGNwUE0odsVFZ/NzFVLmhdgW0C\n",
+       "kll9TYGEf+6BdOn4+fyk453O6bYbyg0hvlyujlfH/wvA4NmJd/DxcBfL4+PqoD8EI58DEvL5VaZO\n",
+       "ur4J8ET4Iyyl1YnGgBHmAtqcPZDLrU6mdsDkz+JWFPPrbgsEHsoYgMF16BtWIn0mkyIriJW6I5PJ\n",
+       "JFgVaT3Wi+d2qhoLnZRR3GCwF54i6+sL4Y4y9rWZpnpS2TRVxsRHOE3O3KEQuBfg7CXf/C/RGEP6\n",
+       "xenOxFGkDCeK2fc8gMtZPMBZypZIq/N8pBVjwTUA4tLq0EjWGSn0WP6S1jJg/CA87uuyB8DFeUvE\n",
+       "Tjkr4bEwLuNYn647iPl4enoKJZ3P54PBGY1Gajaba8xJsVjUeDyO1HWPXUFm8F1nVFOXzGg0CuAL\n",
+       "S+L9dgDHXJFiz5r273h8FK5ongXWE3DHMyM/YBg9g+ejc+uk9HyqeFzo+nd+juWdotms+6TC25Wg\n",
+       "o9OfUrbuisoCO+lr74d/tokleokNyAIP6TNvep32IwsAZvWH72aBrJeYlyyAuIld8fd8nPn9x8SS\n",
+       "bGoACJQ9EfNOiUsrIcx6g6VAWaYK390+KDvuh8UE2wCgwcqfTqcaj8eSVtYLlWNx43At7uunrqKo\n",
+       "AQkIO6wlL6DlwbfME8Cf/gBQDg8P43mxWnHV8Ky4XnZ2dkKJu3uK2BqEPS4S3DRQ8KwbwNTBwYHO\n",
+       "zs6iT41GI1KRPXiRceZvdzXQRwCdAzKeEQsZRQf4pM8IdxSBU91eQ2WbDSCWz+ejgFmlUolzZgAc\n",
+       "AGeAKM9DUTAHGYAamlvorG/XDcw54MAzZSaTSSh3H0t+h8xHqTp75syctIqTYL0UCs8p5Nzz4uJi\n",
+       "rd+AI1jAQqEQrkpnlqgVwrosFouq1Wo6PDzUzc1NxMiQir+3txdrm/GRVnVEvM/S+qnR9I/ibzA0\n",
+       "qcx3A9kZGElrzC1j6IfRMlfMJ0CJMeYaboAwD27kblxzP7Em/580t6T4m0WYBVyyXmf97da2X9ut\n",
+       "RGc/+JxreZ98caZKwH/n101bVt+z/mfTOKOQXncTm5EFsHzBpfdPQUDWOPr1oORcaTl4S3/j/c+6\n",
+       "V/rsWe87K/QSSEr7+jE0D34jvZU5JqVPWs9QQng4IHFr210HLvA44E9a1VdxYeDWFNk0KGqsI3eB\n",
+       "ACCcDuZ5SB10Xz5pv+wbshrI2pHW9xUgi+dgHOinsya4mebzuer1umazma6urqKuCoqIANbhcKhG\n",
+       "oxGZN57x4YwHio+YGRRrsViMs3xQgF5BF3CUji3P4LUhHLxx/g4AhDGWVhY62VKz2SysUUAM13zJ\n",
+       "wvwlGqnoxeLz4XWVSiVYN8AHqeq+flm7gBtpvQK2xwYxb6wJ1jzzARCEgcDlxfWp+gsL6LLQgZID\n",
+       "F4AA/fJ96fE+xCJRGBDwSlYagePISWqdpHFPMBeSwlio1+uxhufzeZTYf3h4UKvV0ng8XmMq0Rce\n",
+       "lMp93XXDXidwHVDsYM/dvO5q4TNpxZB7are0iqNZLBaxXx2YkFoMKHMXrj/PprY1HtyVT2o9Zylh\n",
+       "/74DD77vf6ef+e9fUmRusUOp+mLdxHRsUtIvsSe0NGjQX/8UqyJ9ePgfLXX90K8sl1ZW2/ScPgbp\n",
+       "PPjrLPCQ9uelMdoEUP3vrHWz7caGhgJFGd/e3oYVtFyuAj9Rlu4q8FN1CeREqSFkvWiTAxLKtkvr\n",
+       "gZy1Wk3SiuEYj8drNSZgbRB60opKRymQXSOtZyW5MkAhE/CKNQ2ocVcG/XFB+vS0OjYeS24wGGh3\n",
+       "d1etViviVvDR397eajweh5VJ312RueXo7M3BwYGq1apms1kcWsjY7e/v6+TkJCzCNEWZeBPWs48Z\n",
+       "jBWAhf6yPrwyJooO5Z5awwCUbYNvntULiaEMPZ7DlZuPcy6XC9cLay0N9JRWAaCMG4yMMykOaN+/\n",
+       "fx8W/t3dXTApLldh/dIUWGkVh+EuPNYpAJF/0+lUlUol+spawvVIv7yysq83gsu5N4cEEtcC4HPg\n",
+       "TnVlaRXcTuN77gp0Q8PZIq6Xy+XCJUW2mjN9ktYMEH7DNbPACf1iLJwB4/eMP3sDY+yjc+tIHyqW\n",
+       "1DJnUJ3BiE7nV/UbpOwsnU33c6o2tczT32UBJv+XAh4WCIvFFby0bv2Aov0zX1AODPzv9Dn4m2fk\n",
+       "/7SPaXsJMKXf82fyZ8U1gND3PvJbv5d/7myA9yUFWg4Ys4BIuo623dh40ipgFVbDA12d3i+Xy9rf\n",
+       "31+by+Pj47DolstluD588+MuYs+gAD3YjT2CBZPP51Wv13V0dBQAAEWLkIEpwY+MgnH/MusVgQ+1\n",
+       "ns/nVa1W4364JbgP15cUp7/mcrkopIaiRvkVCoW1k18RgFDi0PmM1d3dnabTaYAg1uVsNltLZ1ws\n",
+       "nv3xw+EwzlHBiiRYl3vjxqBc/v7+fgRJMjeHh4cxplDby+Vz0TJn0zxYlvcAZvQZZoBr4QbcZkMJ\n",
+       "+TMAHt0N6e95kDVriJRd1idMG2MPGzeZTLS7uxvuEq7hLAhrQVK4XtyFwP8AS1gZroU8Zn48Xfz2\n",
+       "9jbqmsxmM9VqtbXUc+K4iFdhbNx95UybtHKNEfwLG+Muv1wuF8xUuVwOoAWAQ9axjp1tdXkLs+PB\n",
+       "sdVqVeVyWTs7z4UAqTcEY4gx7nuE78KmetCyH/EAU4hr05kcgEoul4vxQ056OEfatl7nxBWLDzIL\n",
+       "MDpqljoC3N/Lsuidrvq5bAv394Hz97MYHQcHLH5vfn9pHemyQbJYEb+Hf5YFirKUcwr+NrFR3vz7\n",
+       "6YL3e6NgiAPgmfy7znLxfroYmesU+Ph3/yfM17YbFp+07uZCoUnPwg8LjEBMhGy/39d4PNZgMAih\n",
+       "4Osd4Uw2gLTKwimXy3F4Ht/xgl5YpLlcTuVyOeYR0OMBbLy+vb2NsuowBgCAu7u7ACLSSsiTPQE4\n",
+       "g+nxuV8sFlGFFUbm4eFBo9EoBGqpVFrz1x8eHgaQuLu703A4DEt+Op0GzY9gvr29DZDEmAAosBQZ\n",
+       "fxQMmR29Xk+dTmctVRsXHQoHyp/7UGSO/Y6bgrEB0BUKhVA8KHYUHn0jZkFaFcfaZgNMMxcAMBg8\n",
+       "YqpwdeTz+QiMfHp6iqw1AlJTme9xG4VCQdVqVaPRKFxqKHCPNeL+yOpyuaxerxfgA8sedxpGgbMJ\n",
+       "yCzXLbe3t6pWq5rP56G4vZIr5ev9AEoYQBgz9hAgBODOXALac7nnDCTWPrKCQwTZR1RcZX0yBqT8\n",
+       "+zixXhkv3F2ML+7PyWQSQJjvpG5pADYAmjFAntDHdE3QH5gy/macAEwvta27dbKsYbfU/bssBGi3\n",
+       "9OGcypM+BDvcw0FGqvAc0HgGBS0LRKT995YK5JRd2PS79Nop25PF6PhzpOzDTynudFz8dQowuB7B\n",
+       "USxA/34WQHGLKes+6ZgwB+n9/VofAyDxhoBwZi/1CzslDoBA8VWr1agrgvDyKH/Gm2BWFDtjlBVw\n",
+       "6+uGoFHcCE5jexGr/f19nZ6eRoE15oLiVADSyWSi+/v7KMmO5eXCFUYHVxcCjLEBVBCfsbe3p3K5\n",
+       "rOl0GuOay+U0mUw0nU5jrBqNRghTt9x5dgJsPY7FXWcI45OTE5XLZTUajbV6Jf1+X2/fvl1jiljb\n",
+       "gJ/FYhHKhJORSf3E2nTA5+MFsGdsCVgEOCGH2DPbbE7T0x83DB00My5ulN3c3MTYeFo9Lh9A9P39\n",
+       "ver1uqSVe8FlJmvIZQzVR4k3ccaGFFnkBkyA6wiYQgANwez0odFoBAjzAGfAF4CKPe2l3TnYEuVM\n",
+       "qq60yuzzNQyYwkB29xMy0d0n3Je1kuoI12G4VfL5vE5PT1UoFMJlCmjCYFoul5G2D2CiX2msi68F\n",
+       "mBK+6/FkkgJkuR7f1LbGnDiISAEASsfRrLsBeM8VtrtJUFjug+N6qQLmft7c3eA0bKoMAVH+LP4M\n",
+       "fi3+d0bIAYa7aJyByWIN/Hc/xX6kbM+m7/2UayR1LfEbp/bSACpvKTPibqAUoKRAzN1fad8/toai\n",
+       "4ZmcKl0un+MnOKnUhaxbk61WKwq1LRYLnZychC/b/b2cnYPF5cKA77A3EJy5XC6qspJNAljxfUGR\n",
+       "suFwqLu7O/V6vQA+ngqLsD4+Pg7XTaVSCeGFtcSaIKCS+UegTqfTYBdwZeAGQBATNwIA7PV6wYrQ\n",
+       "f7fUAGMp3Y8y4r3BYKDpdBq0+uPjYwQlUlr89PQ0ADiAApk0n8+jUirsC1k/HiiLBQ/zhKsJoJey\n",
+       "ZC7PXqK/f4kG8PSaPMzVdDoN0HF3d7fG2mF9o3QZw2KxGIXKYOVYr71eL76DbGEcWS809s5isYg+\n",
+       "EvPhwbIuS0i1Zd1QGNCDw2G9Go1GsIQYZO5mkhRsDXuJfYSRwHqAQZJWMVuwLtJKX7VarbWYNEnB\n",
+       "OPl9AFOpPoNdYtzYr+z9arWqYrGoZrMZaxBQ5wY/BgHHQtAH1qnHmrjLGjYQIJS6NJ0B26SbpC2e\n",
+       "SpxlAaOUvHgT38lyl9DYyExyymQgQPy+mxgUvx+fu4/Tr526irKek/tLilgE+pyOg/fNr+n9yrqX\n",
+       "K32u7ddxZiW9Jm0TUPPrZzFaKCyEC9Y4NN+mMWH8stxtbiWlACUFsh9bc8oWAY5lxGYlBsKBKwod\n",
+       "xYrg4RTg5XJVV8BZK0lrygAlTiwKa8796whXMnjq9XooGoQM4AnLE9cRgnt/f1/ValXdbleSAsRU\n",
+       "q9UIQPVAX2ciid3w4DhcAZ5pgNKn5gjrDvdMpVJZqyuBMiC+wKlqru3gjXXtpxBDxS+XSx0fH+v6\n",
+       "+loPDw+6ublRrVYLWVCr1aL+CjQ24wr48PTsnZ0d1et11ev1OC6AgnQeU+JAhDX0kuz7pRoMhwev\n",
+       "Qs9j/RIvAguCPCYeCLAuKbKscrmc+v3+mrzwefLMGg/8hlGRFPE5pMiXSiU9Pj6fms0+Ys0AXgCu\n",
+       "ZJAAgmezmQ4PDyOeazqdajqdxtENhUIhGC5nMwGze3t7qlara/sNgIZLw10szkYAEABCDkxdhzib\n",
+       "5EYAspPYD5ieQuH51Gba999/H2D7/PxcvV4vZA4l+B0wTSaTNTCVxtFwPhhMZ71eD3aFfzxjuv8+\n",
+       "upgTZyakbIDiAZ8pcHBgkQIMBCuTl7oUHGBkMSnuz/TvbYoNcWXqin3T+7RNrIYDiixmI4vBSa+Z\n",
+       "Ah3+dyYqfZaXmv+WMfK+k/aGEOZzFHN6H1feqeDl9yhhXwc/t7/bbAAQBxnu6kGhSiultb+/r/F4\n",
+       "HODAi6b1+32NRiO9e/cu3B77+/sqlUoqlUoR4IblDWjBx00DfKBAPcvm8fFRBwcHoVBQnG7lknp8\n",
+       "dHQUKY+9Xk/NZnOtqNZgMIh55zmxUB0AeCwI848/3dcO7g+Uy3w+j0BEYnWg4FH6WO8oRQQ/v6dh\n",
+       "CAGMJEVcC5b0xcWFrq6uNJvNIoBwNBqp3+8HRQ27Ij0rOQA7c+nF67g/YA/WivWAZSyt19vYNlOI\n",
+       "QnbGCEW1u7urbrcbVXV9/t2t4sYZsSleG0ZaGSkEJbMeYUX8pG1JEbd0c3Ojp6enCGz2M2ZYv9J6\n",
+       "bInHZtRqtcj6urm5UbPZjBRcaZV6D4MJs8iegpmgSu1wOIx1AehBUcPa4ErCJQgrg2JnvKgoDVAD\n",
+       "XDGOyBH3JuC+Amzn8/lYazs7O/ruu+90cnIS690BCACOvjN/fqAgWXXs+3w+r+FwGPf3lG3PCPI5\n",
+       "duMpq22NOZE+jBFJmQQam8KVslveuBec9ubzTQyJsyNpQyj4tdLrORuT9peWukq4JwLKn2mTANp0\n",
+       "ff9+FhDx36eAJgVNDubS8fA++7ylsSBewdSfy+fX5yAFW8wFffHfpi69tM+bwNo2WrlcjgwWBCpW\n",
+       "ITVBqtVqHByGICIAEKvR008nk0mACOIVZrNZWN1kq6AomS/PQOC7TvO6ewNlAfXLScNOJc/nc717\n",
+       "9y4sP5SR08YpcEW4QUUzTwhnLFisbk/XJWaAbB2EMBYqboT5/PmYgE6no93dXdVqtaDlPWbDrX5p\n",
+       "VcsBa3Q2m0WmD8BisVjo7OxMNzc3Go/HmkwmUdiKuBVpdUbJ0dFRBLrihnK27P7+XuPxOMbEA27z\n",
+       "+XyUMncrFRZxm42MFeYLZg3wAHMhrWf49fv9UNrI1FwuFwHH7IvUoPI4oVxudeAewB+5wO9gLwCc\n",
+       "zrK5CwGZAQsDGJ5MJrHOyMrZ399Xr9cLWcTcwEoUCoVw0wCG+/1+BP8yBnd3dyqVSnHfNHaHGCWC\n",
+       "tcfjcTzLdDoN8MPYALqlVXqwZ/NIq9g3+u01Sbj3zc2Nzs7OIi5rMBjEYYhkFHHtyWQiaZ39Bxgy\n",
+       "PzwDwNLdbABG1jLz89K63mpAbKrwXeFI2ZZ9VmwKD5wqxzR2IY0RSZU4wtzjPiR9oBxRygixFORk\n",
+       "KX36nuWuyuoL13CA48+Ttqy4l5TxyGJ2sgBJVtv0HQcXFAVDaKTMU3qvTa+zxiHrs/TvbVPfkkLp\n",
+       "k32C0sJyQVjhu8XKf3p6Uq1WC8DgAaWNRkONRkO1Wk2np6e6vb2NeBC+JymUJamtrHtAO1kBh4eH\n",
+       "axYwwIUMoZOTkwAT0qpSpGcx1Go11Wo1jUYjjUajuObh4eGaJQigwEoCIOVyuQA5WKhkIHlwYC6X\n",
+       "i3RTrx0DLV6tVmOdHxwchGJ5enqKwEqUQUohU8iOrAsKahGo6GD+/PxcNzc3Go1GETtALAJ7i+wm\n",
+       "B98wUICop6cnHR8fBw3u6a3Q+ihl4hq2nakjKZ4P0OVp68ViMdwizKOzErj0CBjO5/MR8H14eKjz\n",
+       "83NNp9M1Iw3glrKmKDbkNXEsFLNzxpU1hMXfbrcjQHW5XAazAQswHo91cHCgRqOh0Wik9+/f6/T0\n",
+       "NIAKDAAybjwe6/7+PornwVZ4TBSGHM8E2GRMYIAc5NIXd415PB/K3uOZGBPe9zLxHtvmOmg+n+vq\n",
+       "6kpnZ2cqFArhQvWibXyXeXSASdo06xTAB7hEHqfuOgczL63trZ1K7HQQEwhtlrIkWTEjbBYHBkwO\n",
+       "EdcpU8G1/T5ueWe5k3jtFC2WsQt4acW40Hjtz8HzZ73vv3+J/UhBRtqcbUktEu73c9kGB5D8nQJL\n",
+       "HzOC0diAKWhyEJHVjzRWI2W80r/T8dl2Y+0RzMfrnZ3VCcLSqjIqqbIAFT5zQeDFw66urlSpVFSp\n",
+       "VHRycrIWqMh18PljfeFSqtVqa64RFAXXAFAAQqDIEUSHh4dhKQ6HQw2Hwyi/PRqN4nuwOE7rOjD1\n",
+       "NYFARAjDmiCApQ8VEkBnPB7HAXQoluVyGSmSlFZ3xsT34HK5VKlU0uvXryO1m6BhSuvTp0KhoN/8\n",
+       "5jeh6FA2rH0swDStns9gip6enjQej6OOhfvykV8wUlyDGKJtNj+SgPmVFHEYruQlRYAz1jVrFEWJ\n",
+       "e0ZSBMd2Op3YB4xj6pZfLpeRDcTaxjWX6gyYM2KhDg8PA6C78sYIYMz7/b5KpZJms5n6/b5OT0/X\n",
+       "apB4n2DQlsvlWir009OTTk5OIn6ENe8l7l0/sOb9sE2MGUAY7zljwe+llWz2SqywQ9PpNLKGptNp\n",
+       "PPt8Ple73Q55UCqVwh3k3gPPusHFiosLtokMO/qF/CMTj/ly3fBS22q2jrQ5hsStCd+8zh64sGJA\n",
+       "2ESpAuVeaT/82t78b++jpLVJ4/O0H/Q9vc7PaZv68tK1NgEWf52CI373U/1iHF2Q5nKrACfG2AHF\n",
+       "JgDkc/ISg+ULOAU4WWzPz1nsv0RD8QNkR6NRFBtLgyQXi0UEVgJgGGOYCMD3zs6OLi8vI9sHUOjp\n",
+       "h55xgiA7OjqKuWE9ovQxCnZ3d3V0dKTXr1+rXC5rPB6v+ej7/b6k573Vbre1s7Oj8/NzdTqdtbLm\n",
+       "pEgi8D0DhfXgQm9n57meSLlcjvRqrC83LhCCWJtYyKVSKbJ6Dg8PI27HC6dJ625MX3P0rd1uB0DC\n",
+       "VYEl2Gq1dHBwoNPT03DbdDodzWazsG7Z62k8A6CCPQGzAGtCf1Do9I2AXgcD226kkgKAUYB+VAHx\n",
+       "DAQKo7gYG5iFYrEY7k+qrnrWUi6XWzt8zo0g5n00GkXMCnPpzDigfrlcnTkFqwhIgiEAGBDbQiA1\n",
+       "jNf+/n64YUulUrgfmWdcs/xP3weDQewvZ+QlrYFs1nmlUgmw5YYz7mAK0/Gs3lzHIX8BtGQHMgfz\n",
+       "+epgy8PDw3AX49bCFUvDYHD3HZ/73vKUZGnlsnE9gaHxc2T11mJO6Jy7AFIkyUQ6s+KfO5jxhydz\n",
+       "wT/jXm7lS1oTGlluDmcMvB9Ob6V0Gc/nz5HlcshiAbieAx1p/SyWrOukCt5f+3ilcTQ/p3mfUtCH\n",
+       "EHbFyn3cb5+2rDlM2Zm0r+n3eM5N8TLbaARDEvMgac26xipB2HKo3f7+viqVSlwHoYTi39vb09nZ\n",
+       "WYw97AjWDu9zz16vF2nGzEs+vwoKdX8+1DQCClcJ/mUvtFapVPT4+KirqytdXFysxRT0er3wOSN0\n",
+       "eVbALWwNbq7j42NNp9NgZaDQU0DjLGW1Wg3l7dT83t6erq6uQgG51c7+cZct4wXIub6+DkubAEZA\n",
+       "FgDm9vZWp6en4cbE/YJbGSXHa/fdM5/MAVYmJey5N/LKD7KDVdtWI/CTDJh0D6JQUdhkzCwWizWg\n",
+       "C8Phhsx3330Xqa0EiM/ncw2Hww9kDe6Vg4ODNbeJz7G7OZHbMIIwJMvlUu12W7VaLWKtSqWSbm5u\n",
+       "IpUXEPX9998rn8/r5uYmMlJg49hL6Jvlchnpx9VqNZQx96SPsO+s6WKxqOFwGN+TFOzrbDbT0dFR\n",
+       "6B0MH28pI0gNFhpxJRgmXjSPuJB2ux3zgsyg0Sf0H2AsZXl5VuQxe4igYFhJD5DfuOb+P67V/1+N\n",
+       "BQcac79Z6mNMFVjWQLjCZDBoKevCNfmuKzgHCDRX6lhxaR9o7mNN75VOBH3yZ0/vze83fe6MT/o6\n",
+       "67cOVLKYpU2NOUndYPx+U7wL92RcHISlr1PQkQXYXgJnP4cB+iUaAimfz8dBYAhjWhpLAH2bz+fX\n",
+       "SmLDfuBmKZfLkcHC+sP/DWhAaRYKBfV6vUj5Zey4N6CE115NFgHi6xsrVFpR0g8PDzo9PVW321Wh\n",
+       "8FzVc7FYqNlsqtfrrVVlhTFyhoHYA863IXMHYITVS1+xIEejUQQn4h4AFJJ5ICnqlbgxQmPt3t7e\n",
+       "xomwjUYjxj6XywVo8tIGrLVqtSpJUUOF+UW4M9Yu41BK7hZmrlAU7r4hm2PbwbDSqvbNcrmMmi68\n",
+       "v1gsAhACQgHCPD/zj9uAAFbcb+VyOcZ0Z2cnWDAHGpLWzrdhrtwo9MBZwDcgG6AprVwOgC32ArV1\n",
+       "Dg8PAxywrk9PT4M18/oixWIxjI6DgwNdXV1FcGylUlk7HZyS9awFCjE+Pj6GEVAqlTQej6P6MLFR\n",
+       "3BPdxfi7HkHewHLs7OyEsfD09BR1TmAqXRcDIKRn8OLxMakeBqQw/riT2Y/5fD4AkOtP1yWuf7Pa\n",
+       "VsCJA4Y0/iKNN/GWZWmnQpff8+AgO+lDhSmtKlT6595SpekDysR68/vR3ywl7Ao76/mDW9nQAAAg\n",
+       "AElEQVSyhGlWS4HIS6/T936uQvcxcICSRZungOKn7uNzlLJN/h02hLT5UMOPAZxQnAyKGSAhrao8\n",
+       "kh3CuKEI3bJZLBbhTsnn81GqHTcBcRUwBE6XkvYJPetl6z3+gmqs3P/29jZqTvh5KcvlKsaDfUYt\n",
+       "CIIBPdiVzB/SKgkKdcODfuMuwLqVVkrZ/dXcj8A9Mpomk0kAAmJ63KID3LDn+Iwx3tnZiQqgjAVB\n",
+       "xjwzAMGtQU+9Zh4BMhgngBqvTSEpsnlwH+zs7ETNGJ4XCh7Fj3LfVuM5sOR5ZndnIBsZAz89GmXK\n",
+       "39fX12q1WuGmGA6H4QYjaNtTZgnqdiYG5cfe9zAAmBdq07A+fC1Iz2sAlgewm8vlIq240Wio3W7H\n",
+       "+nj16lW4aLmuV8SFMcMooC/oOhgNwADsHzVvAOLsXZhBlxWSIijZdYUbPbCr7p4qFAoB1mnuQgJM\n",
+       "elFGD7xl7QM+PF4OGcG1GS+CpYl9ISiW/eHxMWnbGnPiSsxdIh7TkC46ab0UOJPg3+MafId/afxH\n",
+       "qjy9OTPAPwaU+6TX8gXi7h4HTy/52TYxKylT8VJLwY2zPlnX3XTvrOv6tZ1id6Di4+LzmMXw+Liw\n",
+       "CdwiSGOFENbp+POdnxqbX6rhz0WJkqWwWCzCGpJWligxG+VyOVJqpVXAN8IWoYxfnO9LK6HE2GC5\n",
+       "4hvHwuS+xFfk8/moh1IsFlWv16OYGvcDNCLEPQDQBae0yjRAKHY6HdXr9TUrj/7i2vDgOxQ+Cg6/\n",
+       "OKyNW+XdbjeEP2OJT97dtIwL93d5gJ+fIlRY1s5kMF5ck9+STutWJcoUNqtYLOr09FTSqlDY9fV1\n",
+       "KB7fl51ORycnJ3r//n0oJZgwYgO22fzcIU9vJh7D9yTfA5yyXjgc0vc88w/jSNYPFrhnllDozEug\n",
+       "S+sy9+DgIOrfOPPg4A45AggmPdzlGSxFu91Ws9nU/f19VCTu9/ux/wgy9/kk9orrs5ZQzM7wM8/o\n",
+       "s8PDw4jLce8CwJk16UYi/7OPdnaej0mgH5xFRWA8wB+d5CQB1+CejAX7kixDXJsYP8TveE0fjIub\n",
+       "m5voB/MNm/WSQbm1mJM0kAzBmga0+nedqXD6yq1oz/ZwMJA1CJtcEln9ZQLT6wKsfHO6/y9Vxs7S\n",
+       "pACB9/k7BThZQjYFSemz+r1dsfv7P/X8PAfPynUQHg5OfFxShmMTI8K4QkO6PzpVMOn4+3h/DA2B\n",
+       "RL8PDw8jU4D6ByhBT/ldLp991QhmSkZzPgxCAqsGBY+QcaGAi2M6na5Zk/QL0IQFisChgBrBf5LC\n",
+       "3eNCzFlHZ0G4TqFQCD85+xbQhKWFYeGl2zn0D+XnhaoQvrA3zWYzrgUgoEAdY+/rM907PAOZM3t7\n",
+       "e3GgG0CDOapUKlH0rlQqSVK4tegrcQfEAJFxc3Nzo3fv3uny8lI//PCD7u7uIn6kXq9rf38/Yg16\n",
+       "vV5kP/kYe6G2bTVYscPDw1iPBPUSVwPbRio8MTbEHLi7EtcCTCHl/lHiyHXmHjcRABW2AuCOwscF\n",
+       "huJ3Vsf1ibt42Fs8F7KN92azmWazmY6PjzWZTHR8fKzFYqGjo6NwMXqqdKPRiNTxQuH5ZOu0lgtu\n",
+       "U+Qj4AP3JQwe4yqt13hxQznLbYlrCMDMOiVdn/nwWE7XN8hlxphxhP3hc4+fkrQGwIjl2t/fjwwh\n",
+       "5hhm8iWDfavl63lA90PzubQKQnWB6ErLgUUWg+LXdbDD36mLZhMYSAEIgtIXd9YC8b44EEgVtH/u\n",
+       "/dv0efos9DEFKllAZxNweak5s+XWBe+l90xdPz4ffo6Fu+AQfq5Q/FousDb1+WNgT9iUtVotrH+o\n",
+       "byq7OvPQbDbDZQENzriSscN3AW8IGhiGXG6Vosj8UrgNgUCUPcFqHniZz+ejLsvt7a329/fVaDTW\n",
+       "4lVQADAjZAwBFIbDoer1etQBWS6XAX5ggLrd7loRKmf1KEzn4wi9vVwuI3DYs3CwwLCqOYBP0pqf\n",
+       "X1qtDYQna3WxWGgwGOjh4SFiGRDAFFAj/oXnqtfrcaowMqbVaukPf/hDBC7O53ONRqMoZNfv9/Xj\n",
+       "jz+q1+sF4CN1tlqt6vLyUu12W+12O/bCYvGcccG9ttkIcsXNJK0OoUTRu7zF/UOslLtBmKNcblWA\n",
+       "DyXs4BDXBNflmAHuhbvC5QigGpnlgdmcrkuwMy7AfH5VCwS20M+HAUjAamL9z2YzHRwcaDQaqVwu\n",
+       "xzEQMJMAq8lkEvdz45uGrIS94Tlx8VF1OmX9XD8CQFi/BOSyVyhS6CnTjDXzcn9/H6wpoI9qyQTz\n",
+       "M1/EscC0kGnkzOPXX3+t4+PjMLCYZ+YQlmfjmvvfX8Y/3Vwp0lk6zmAyeU5rO13kFfNSq8iBQKqQ\n",
+       "nf52sOEgCMrRQYffnwXCwBLkBrJ0xOl9cX9g2md/nQVMsoBP+ltpvUaGC+D0en6dn2pp35mTlJpO\n",
+       "WSF/HhdeCBL3xSL8fbwZYzYcBbLcr5zO6bYbFs9i8RxAh6Bwf720iiPBZUHxMqx1D8J0NwiBbbwP\n",
+       "Pcrrg4MD9fv9KOXOukSQME4cm+4WH4Dg5uZGR0dHITCxdAjgw3WSy+WimuXBwYG63a4ajUYISkkR\n",
+       "JHd/f69qtRr3WiwW4RqRtMYMMBbEFJDV4zEguLiq1WqwHygSrzXhbiesa99fy+VSn376aQAUhCvZ\n",
+       "Rygq+ozywJ2B0L2+vtY333wTypI6NFSTHY/HqtfroQhvb2+jGNhy+XyOz2AwiL4j8xaLVdDzNptn\n",
+       "iuzu7ur9+/d6/fp1GG2uZDgPyt1S0spVyZ5HNqXBtMgAXDPScyAse6VWq625/AgyzefzGgwG0Q9n\n",
+       "27kmv+V7xGF4GvRyuQyQvr+/H5VU2Uf0m7lvNpv65ptvdHZ2FnILlxJgmj3hx0fgDkLf4M6RVgdm\n",
+       "3t3dqVKprAHy1IPAPfkbdpT1RGaTFxcEgPs1AGH0zfuKTGNM8vnnEgMAGK5BWQCO6vj222+1s7MT\n",
+       "xewwYvw8q01ta+AEAYHQppOe1SB9GKvBd7MUubsA/DPpw5RghLazACmY4btu+TMJULgOQLyxgNOF\n",
+       "lLb02j/X+k/7mgVwnDFJFXcW+7SpIeRTMOL0Yso6pffgOg4IpXU3TfoZwjllqPwZfU4+BubEQdx0\n",
+       "Ol1LpcPSL5VKQeX70ewoIKwyBMZyuYyaJQh9ytYzPgjrQqGgZrOpfr8fcSfD4TBiVVB2i8UimA6s\n",
+       "KazHnZ0dtdvtAERuXcLUULW1Wq2q3+/Hcy6Xyyhc1W63I5ZAUliDCORGoxGBvr5e3Vr19TYYDNZ8\n",
+       "8oAtTrf13wAgAFyMVSrEp9Op/vM//zPWcqVSCQFcrVbX4gOQLWQKUTQLvzsgA1CFC8c/90qg/X5/\n",
+       "zbWHmyCXew5O9HohR0dHv+Qy/qBR7wPXijPZ/E1AqbSqiwIwATwiD1xBUmUX5gpggouDNfn27Vt9\n",
+       "8sknwUgShE2sA4odoMF9YDYWi+e6QtKq0qqn4cNQ5HK5AB7sj4eHhwBkvV4vlD/rtV6vazKZRMaR\n",
+       "pKggm+4h1h5AGcaPezvDgovPDdrlchnB48wB44keQ74gi3AxszfSEAWuSeVjjCxncdGHsKcE8MJ0\n",
+       "cZ3Dw8NgbnE940ZirwKAmLOsttWAWP4hUFKXgH/XrWRpZQXR3NJ2JZoFZJyN8Ws5SHJ2xBkZX1R+\n",
+       "H9+wriz9WVIAxLU8L9yfmetmgZYUfHn/0s9dIPgc/Fxlzqby1L0UlHDNtH/e99SNln7XY4vSefeG\n",
+       "svONxXxtu7E5sRIkhQBeLp/rD/C+pMhSgfkoFothJUrPzwjVSultDtiisBQsIsqQrB4UI6m9pGfu\n",
+       "7DwfKEh/cEEQr0EtDqhed2EiWGBwmEcCNu/u7lSr1TSdTsOHz/4ikJEGNcxa4jVgi7VArA0CDkuX\n",
+       "2AOvnYKlxxxIq5Nccem45f7rX/86XCmwP5Iik8iZPvYBLrFcblVTA0AjKZ6T4l+sZ34DU1AulyO7\n",
+       "C/DEHACAyOaAcdtWA2zwXF6/hXFgbGHZmFPfwz5/Hk9xc3MT68xrdHjGCsCW/tzc3IQ7j2wXzjbq\n",
+       "9XrhinIwj5uk1WqFiwLZ74wsCpcCf/P5XCcnJ5pOp+HeIgi03+8HKMNw9aBPZ5Wo0QIAd73D/mbO\n",
+       "PY6MuA/Gmf66/vH/CfJlHeFOJ5CY37u7ibgxZ2gB1Bg36AHXdQ5YyV4i/RkjHpDnpRKQd5vaVg/+\n",
+       "k7TGYEgfnh+TKuAUZCBkstwt/hsaQt1ZE0lrwpc+eLS4uywAH1ngwMFV+s+BA8/uzEUW48M903+p\n",
+       "Yua19yvr2ik78XObK/7U5w8CBiHzHbd6HYT6c6Vj5N9HKaSMTdY1/ydg6/9lIxiQvkNNo+xIU/Wx\n",
+       "4IhxgAbCAeEB1Y8w9dLwbpGjkIlfkVZr59WrVyHIpedTcQeDQWRPYPEhuNxnj0JmfAEY0M4ooMFg\n",
+       "EEqEg9QI5iRyH7CBhYg17a5arn14eBisCs1ZDBTcYrEIqh6GD6sv3eOpLPjb3/4Wlm+hUNDJyYlO\n",
+       "T0/XXEMoLa9+ydwR0OsAiz55bASKgecEgKHIYbMAhhR8k/STgYO/VAOM4PKAbfC96jE6HuMEGwFD\n",
+       "5W5a3FiAXSxyxo+6NScnJ2HgoGx7vd6aq4IUYAf3vq88VsUrGvuhlPShVqupWCyq2+1qMpkEW+Lp\n",
+       "uE9PTwFW6C9rgvWSMtej0WjNPcPzo/xd7klai0NzPUffkROMw+PjY5zf5HK10WisudbpFwwv1wYg\n",
+       "OcuTpm17kVMYzFwuF3PF34wV90X2URQuLSbnbSsr3gUzDwztJX1odTMZKUDwICGaT6Jfi/eyfiMp\n",
+       "QE0qBHyROFXIc2SBkPTa6Wf+/K6gsxiILDdJyrD4okmv5X3w/joI/Cmg4vEhWWAoBRwpCPN7ZzEu\n",
+       "m1xDKMgUsLq/OmVRtt08IA+h46mg1OgoFArxXTJNACJkCBC8R4Go8Xi8llGCUPB/HPXuAuz29laD\n",
+       "wUDNZnNN6J+ensZhf9KKISOmBLbHmZLJZBL0d+qnptrmaDQKQc6zko2FUmYuy+VyWN4oddYbIMuf\n",
+       "k/vCNAGaUAoE5iH8vVaGGxCsZ0+rpBLrYDBQv9/X3/72N3W73agMSjVcwKa0MjA8UwX3F8oB9mRT\n",
+       "qXeeCcbED3sjSJR1vq3GesbSJbaAZ+RzACdrfjAYxNohoBmXg/Q8fmRD+fWYM8aGeKD0b4JtuRaV\n",
+       "VtNqvMR0cE3WH3EsXJv5lBQxFa9fv1apVFK73dbR0VEcE8EzEPtCn2DDPBjbAdR8Pg/mhnXD/WFS\n",
+       "2X++dgEQACze93FCjwH8fC8wRj62MGEOGj1+krmAHQG4AaQxmthr3ieeiecjoBZmiHttaltx69AY\n",
+       "cG+pUnUl7r/jM/6xoJyF8e9nKX82FQuGzYdA8cUEgEoVa9p3LE/pw7LqHheR9TxZ72cpewc0KRjb\n",
+       "NIap8v+5TAP3YB7S+XDWIgWQWc/gzJNfw8fvJYDnAiprPLbdoO0BGghcfL7SszAjtY6qkCh4LDgE\n",
+       "G/+7heZjARvglpOn8y6Xy6g1sb+/H0IRv/KbN280HA7jaPhGoxFuFKhlD1ymmqunOXJPQIUH2tE3\n",
+       "gn3dtYL/G9DPZ25NEZ/j6bX8lvidbrerX/3qV3GGiI8Zc4Hy5PestcFgoLOzsygshsAmXgda2un3\n",
+       "fD4fisXpbJhExgwXM2wP90bOwB7QcOF5PBtjtG1W0BUmlq9b4SgaWDK+T0CzK02ChpkrAsYZA9ht\n",
+       "nhngjkuBAGXYht3d3Si9zlxdX1/rzZs30adGoxF9ocEMMl+4OCWtncHT6XTimXG/cShmt9uNzDyu\n",
+       "ybPhBsMgwB0mPQf4NpvNOCfKjT/pue7NmzdvdHl5Gc8qKfa2tJK9i8Ui9hDrhWBtWE836LzyMKUO\n",
+       "PFUckAcjybh7yARMaKFQCPY1NRB5loODA43H45hTCklS6mBT2+rBfw4ooJC8ucUOCOG7bHoXWKll\n",
+       "nrp9UuXHd3jPfdQe7Eo/HQihGFIA4nSZswqbmJWUIXpJyaaAImWMfKz8mt4v/5f2fdM9nZ3wBZgF\n",
+       "Jl5ik9L7cg3/XRZTxOc01oF/n+fednOLjIOwUOL4xVH8XscD4QDVj5Xn1pnXG6FWiVcYJVMGId1q\n",
+       "tXR2dqYvvvgisngIQAWE397ehvukVCpFJolbw2REAJxIBcflgv99Op0GGCDd2EGBzzkWrbQKlE4F\n",
+       "7c7OTpQSdzD79PQUQcOz2SyybebzeVSklVaH0mFY+BwBkM7PzzWfz9VoNFStVoMRYe2hXGB1UGQO\n",
+       "ynyPM1ekIrsrjWdjjnyto9Bhb7i/P/c2G2NQLpeDhSBo11kCl6EoYjJxcElStt6tddgHfo8iWy6X\n",
+       "UXUYxoCxef/+vXZ3d1Wv19VsNjUajdayyjgzygsDSgpWAVCFQQkDgKW/WKwOzzs4ONDt7a0eHh70\n",
+       "7t27tRg85h+w6mAY+QnTAKPEOVR3d3ehrImpYT/3+/0oYoisA0x5zA9Aw8FAr9dTq9VaY7Jo9I1Y\n",
+       "kuVyFe/F/f2ZMCrQA/SBE8HZS8h25kDSWlzNcrmM88OoZ/TS2t5aQKy31AJOYwiyPk/f5/8UkPji\n",
+       "y2IZ/LvO4nhglCvh9DXXkFYAIVXim57TNyefu9spvY//1lt6Dd5z4JAFRNJrp83dKVzPwRx/p/dK\n",
+       "+5mCEx83roew8Of0+ffxcJCYPvfH0BBO4/E4/NZsRKwSBCBWZblclrQKzgaceKbL3/3d36lQKMTJ\n",
+       "wFiOg8FA7969Uy6XCzAEa0J/qtVqWPC9Xk+S4vvSOgtJH1EmKFmyD3jP0yA5NJDnXi6Xa4oL1nF3\n",
+       "dzdAGMGf3N+BLPvAA3BdydH/YrEYVUW9FDfZSxgZKErW1Ww20/v379VoNLS7u6tutxtjx72RAfTD\n",
+       "gzs9+JN94EIcgQ8j4IwI+yVlx3iNhc71PSB5Ww0lg9vK00xZJ4yBpwVLK8DCOHrlUa7nAJaiXe12\n",
+       "W2dnZ3Fd9grjTAG7k5MTSYpA2vl8HooXF9xsNotAXGmVPdTr9VSv19eU8N7eXhypQAYOIGmxeC4R\n",
+       "gDHB86XMhJ+fA+PkMUyPj4969+5duMekldxwhp7vL5fLyIaCuQAguvwnEwc56UGurGncOP4ZrBHP\n",
+       "wDrH/ULfYLEmk4lyuZzq9bokRYwZMSVkatFv2CbG2NmzTW2rbh0pm/Z3xZYCj/S3qUDDqvNYBfdl\n",
+       "S+spwalyS+/rVlaqPCWtKckUWGSBCr8vCPalfvj9U3bFFTXP5a/93mlf0mfIYlByudxaRDv9SZmP\n",
+       "LPbCv+Nj6s+JcuFzR/9ZIMfvkQIlf55tNq/DQq0Q6FwKauF7JVASdwaCQ1oVYOPsmPv7e3399dex\n",
+       "VohFYbypXAp1DWDwuBO+32g0tFgs4tqSgrKFMmd9jUYj3d3dqdFoqNlsBuhxgMj/uVwu6pnAbGJl\n",
+       "QzFzP7IoHLR4MDBKnswfPzgQBc/4YdFKq7NbsJIZjzSwtlKpqNls6vr6ei0bqtFoSNIHz+fWqwMd\n",
+       "VzbuapzPnzOg6C8yCFACO+W+fGdWuB7MGHEV22qpocG6Zd0ABjHyYEMAgrh4pBWDxPyWSiV9++23\n",
+       "a0wV1jfX4iA9Ulr5h/wsFp9PEB4MBjo/P9fd3V24DTzFmPUFI0gcFUGgzLfvHwA3VYmZ0/l8HjFT\n",
+       "GBOAy1RnuBuL1PDZbBaMI24e2CMv+4+MA3ABInDBIPfo2w8//KA3b95Ef9jT7AN31bpBzjjB9HgG\n",
+       "EtemfguAnnpKzt4AUGFrYHy5Pr93eZDVPgpwIq3T+iwIpzRd4XsQTcouULnSYxMQPlkK0wWCtAqo\n",
+       "hbb2Q4/cbeD39UlOmQlXpOn73g8sTPrpz5QqbK6Z+il9nHjtQMw3tTMpm1gHru2shlf4pPk9eN/H\n",
+       "m/fSDYvicPeZsyNZTIz/7QDJx3SbDYsJ94yzSoABXB8Ezs3n80gfdmsd6hXQ/d///d8aDAZqtVpq\n",
+       "NptRhyNNnweoLJfLtawEhAyFne7v7yPlEEsWAYSVhr+43W6vgSlcq1j1rMeUGSBFkbgJUkM9JVLS\n",
+       "GlDy9Nzb29sI8JW0JgMeHh50f38fwg5g6AYDAtD3TqFQiGqZBE0Wi8W4n7vPABWAKBSD19IAnLBP\n",
+       "6Etawh2WCreeM4jufvJgSErE89ttNTKwYDZgTIrFYsQhsa4A03t7e6rX65HCzrNC6e/t7WkwGGg4\n",
+       "HEbJ/+FwGPVDzs7O1gKxl8tlKGPiue7v7zUcDlWpVDSfz6Omz+7ubqSvshZZ4wBRMn6YO+YZ5sVT\n",
+       "9InLcLa7UqloMBhEufpGo6HxeBylAxgH4mJwJ+HOgaFhrVGUkb3schX2ETewA2FJ8WzUYEEuLJfL\n",
+       "yMxjLyAvCE5ljJ2VonEvntuBXKvVCtACq4ZRgEyBFYKB4bkZ75faVmNOUJ4IFDa/B/c53cz7rlSz\n",
+       "2AwEv3+WZWUDYFLLm3t7rAnXcEvI3RJQnL6oUrZDygYa3I/fg0AdwKRjkQKllGHKAjZ+nbRPKUAB\n",
+       "lDhtjb/XY3183NLX6dz4M6VMjbNPqWsuZWf4Ox2Lj4E5QXFj2VD0qVwuB42NJSkpAmWxPBC8jLW0\n",
+       "cgl88cUX+qd/+ie1Wq1gndK9IX3oBhuNRiHU6AM1RlDOpVIp4iPYF36mC/2Bzsdq9APTEGTua5dW\n",
+       "h4nhk0epsdddQLJm+A1ABmUNa8Hz7OzsBC3PIWysVUBduo4AB3yOZT4cDvX+/fu1UgPOGhIvwFgB\n",
+       "anzNwtweHByo0Wjo4OAgik+5C8QZE54ZS5lsCL5Lhtc2G3NJGjGHWKLkAFUen4b8QGEBaN09BoAc\n",
+       "j8dxBhVxGJIiriOfz6+tH2dEAIK1Wi3S9QkAx/AkuBlQCeCRFO5HQClj77rBi6Q9PT2fA8XexV2H\n",
+       "OwcmVFpl72GU1Go1LZfLyE5yBtNZG9Y6ip97M+YeR8Jev729jfHl3nt7exqNRhqPx2sg3wEO93AZ\n",
+       "y+f8o+4R8+s6mO8CUJDN6RlgxKwhI16qcSJtsUJs2lLGBBTH9936z7qWC+RNqYcpc5AiT3/tSJL+\n",
+       "+b0duW5iJFIXS3rPTfd2Ab0JRDjY4O9NIAHFkY41/cpS6ghJrHi+70rQ3VFZ983yJzq4yOqj59an\n",
+       "v0+BKb97if35pRtMEBtVenYh5PP5UDKkCyOAKJzm7AfMQD6fjwqiHORVq9Ui8HN3dzdYF8YHC2s+\n",
+       "n8c9Eeqz2UyTyUTFYlEnJyfhIvI+cw8AELUKoO9hflACnp0EW7JcLsMq9cqQktasWd8THrTnmTaA\n",
+       "EFxhZCM4g0rQYy73XHmWYEiYDE9zxF/vIJIU6dPT03hO3zdY1MwPgtWtbGQN4BNFSd+k1UGDrFmY\n",
+       "KtaOK3Dq1my7AJukYCoAbACxQqGg0WgUh9tJK/eJB3HDbsGEuUzmkMTLy0uVy+VgJAj4Rg4/Pj5q\n",
+       "Op3GuLgrHUCDAmZvoNilZ6DDWVGsPW+AnkqlotFo9IHRw/ol7qPb7QazeHFxoU6nE+AXdoA1zLrz\n",
+       "1HjcvHwPVxagB3ewsyAOcL0YmqSQCa9evdLDw4Pq9bra7XbEvnm6PnKWv53h5zmdQaVfuVxOo9Fo\n",
+       "7dRo9qob6R4wXigU1tKikTUYKZvaVsCJU92eNphaOG5ZeCaKNwSbMzAgM2cJuD4I0ZV1lssFwcWE\n",
+       "uOXvAIrN4ddz5erfow8pU5P1PCnocIDm4+hj569TMObf8f5kuZ0khYJxdM3fWYDP++8AhPt5H5jP\n",
+       "lLZkE6bsioMwd/850k/B2bYaQhyFlcutykBjZeNOAajwTNCiKF18ygiVSqWidrsd6YcINeJVptOp\n",
+       "ZrNZBKzSB5QFJyNLz2xMr9eLVMJaraa3b9+GO9PvjRCjj3t7e2o0GppOp0FLQ1F7jQ4XuoCKUqmk\n",
+       "2Wy2JpTc0sSqRYCxdgigRQ5AkXNtBGGx+Hz+DrFSBEH6PgUEHB0dxfoiawmmxNlPAgelFbhBuDN/\n",
+       "KCHcMMQNADYoh88hfmQg4UbiRGbO7Lm7uwsFSbbXNhvZIex9mCDfo8wxQALlieJCkfEe4zgYDAIM\n",
+       "E4tD7AZjBAPpLnh3Ly8Wi3DjLJfLMBL6/X4Ec3M0AfEcAHECl5k31loa9+LPiFtHUuwT2BxcOs4i\n",
+       "ciwBOgXQ0ev14m8H0hgZPCfj5rE9DiQkRYD3bDbT6empJpOJptNpAJPHx0c1Go1YkzT2jfede7Af\n",
+       "ceEir5En7KXHx8co1AazA4BkXUsKA5R1/ZJrZ2tunSxLN3XneAqSlJ2OmwIaJpSFxaRDO0orq9QX\n",
+       "dupy4H4eCOclqlOAwYZyxsSpOleeKRhAADtLQz8dIHh/N7l1vG/+dxZQ8XFM54HFJWltTL0/L82D\n",
+       "P5/f0/vA/LoQwFqHkuQfY+kuH4998Ptss6X0rJdBZ85gQO7v76OgExYkjAc0KoAGCrjZbK4FVO7s\n",
+       "7Oj4+DjAy8nJSaSkLpfLOCCPOAbp2SKq1WqhdDudjmazmf7+7/9ep6enkhQH4S2XS02n0wAaktaE\n",
+       "v1dS3d3djZNoHZTzDDBApVIp1pfT/J76m84lhekQdMSI+FrwmhUEJKNAnZ6/vr4O5URm0/39fZwg\n",
+       "LSmYKAdAsEjul0cws1cbjUb4/AGDjFen09FkMlG73dYPP/ygm5ubADjL5VIXFxeRtYWSZPzciNtG\n",
+       "c+UKeOT8J0+zBsCg7JkbD2yVFPIXRUgaMHvk5OQk7tHv9yNuBVBEbAYMBNf04PJGo6GzszPd3NwE\n",
+       "68aa4eDFYrEYgbOk5zso9Mwfz4LjPY8hgSnkvB5irWAVJUXfWB/cI00GgC3K5/NrtXZYt6QYs/7f\n",
+       "vn2rcrkc6485c28DzJW0SntOQYfrHrwH6EsAp7vkDg8PI5vPjUx+A4PGXDJ+7KWX2tYDYqV1Bc/f\n",
+       "WEIo7ZRRSH/rn6EIXGm5j5fBk7QWDOQUs8eSLBaLUCAoTfzCLhxTHzLK260wru39T10h/vtNgbbp\n",
+       "ePnfLBJvzuakLEvaAIaS1hYb4CFlgXxRpyAl7TPvZ8WKOABjvLi/F6XyZ/Q5/RyrqEwAACAASURB\n",
+       "VBgaQtAtSmha1rIfxrdYLMKKZtNiSSEYeebpdKpKpRJVRaXnYk0eT1KpVPT69euwgNg7AAesQs73\n",
+       "GI1Gms/n+rd/+ze9fftWrVYrghiXy2UIc5QlWS+j0Sj6xj5YLpdRsRPhyjryIk6e5SEpmA2eQ9Ka\n",
+       "0PQ4BrKYqEnB7wFIAB9PkXT25/b2Vl9++WXsS2o11Gq1tb0H2EEoSwqZ4srX05v9cEGYK8YDpbBc\n",
+       "LoN+LxQKEWB4fHwcmUknJydRTp9x8WJt22oem8A8eXXjyWQSrB2yCzAD8GROPU4BGcxRCADL29tb\n",
+       "XV5eBpswHA51fn6uxeK55DqnHy8Wq9NwYV92dnYic6fT6UTgLWufjDkPfJVWp3sDtnDJYThIKxdi\n",
+       "pVKJWj7D4TDqlPhZP8g6YrSY93w+H8yaH8iH7PD1BIDCPeJndbn8ZU4oXEiWHP1y17yzI8SFuIz3\n",
+       "fcB3mR/YUUA54JQA8FKpFDVdYFZJX/bzqkgj39S25tZJUWKqJFOF5cI+pft5L8vNgXBDODoq5XO+\n",
+       "m2Y98D36SzomQt+VK6xJFhhx4f0SMKFPaVxH6pZxgJYq5tTl4d93MJU11twb4IVic6Tsvlq/L8/v\n",
+       "1/F54vv010FICnjS3/nYpCAn7f+2G9So+1NRjDQsQBgqgC3CCKBBsKHvFwJZOVTv4uJCP/74YwSG\n",
+       "cvZHLpeLIEPiXUhJxYLB572/v69araa//vWvenh40KtXr0Joe+wHghOhh9Chb25tMf9+ABlMEmX5\n",
+       "3f/te4rXuJeGw6GKxWKkXQLipBXoeXh4CKscEOWsp6Qo1MYhh7hqqtVqGB300ZkVSWtF87xuBGBP\n",
+       "UsTsMDfMe7PZDEEMMMNifnx8VLvdDmVJo6YMwv0l+vuXau6WSCl/FA8xNrBqzpowFx4gDMMA4ISJ\n",
+       "ePfunY6Pj1Uul2OvkJ1TLBZ1dXWlzz77TKPRKNyTBwcHsRboxzfffKOjoyPV6/VIG9/Z2QlwjkzL\n",
+       "5XJrqffSSuawTlHSkgJMeTgBVYNxpXjQKsqfZAqehzgUSeFWxJgAiDBuKHJiQZiTXq+nh4cH/frX\n",
+       "v5akSDtnj7L+eTaMHi80yrOXSqUIdmYfOiD36/ncSor94fLMA969xIe7tja1rYATj1lwt430YexE\n",
+       "2nm3/gEkDiiklQJDuXJNj/zmunzuAiOrD1zfB9s3nP9L4zKklaWfXjtlNHjt/lkfFwcojpb5LIsV\n",
+       "SZmctPlvEBhejMp/x/3cveJ98ziStK9Z/UrBafrddG2kQMTB6ccAUsiAkVYCjHXAmsEiQzg4XYq1\n",
+       "DgXNepNW51DxvAiDf/iHf9C3334bawX2hriP/f19tdvt6ANpxFg91WpVo9FIX331lSaTSQAXrBv3\n",
+       "pTsd3el01hQx9wYc3d3dqV6vhxXK3iKNFoYFS4zxk1br7Pr6WoeHh6FsfJ7z+dUhnyg7LGqAgLRa\n",
+       "Q/V6PWIXyAyhIit9H4/Hse889Zc9uVgsdHR0FGCDeg9Y7cgQD5hlvljr9C39HsD/+vr6A1Z229k6\n",
+       "WM7u7pBWGUbS8/4kGBnAiRuPViwW1+q/YDzO5/NYl1jyFBvM5XKqVCrB2BUKhWCWYMlIb0UGeTB1\n",
+       "v9/X0dFRlIt3txtgCAMAdpKibbTpdBrA9fHxUbVaLeaQe9J3QHy1Wv1g7SCzWS97e3sRsOqynmdi\n",
+       "LzqYcR3FPFxcXEQsDwHysFoewwJwwPDwSs0eX8j3MCJcD7ph7gwZsh/WB1bIwwHQ+ezdl0D31tw6\n",
+       "rpDTDroASj9LlZFPkvvOpJWljtU1m83WAAq/ow/4wX2g+S6L0pUv12BSPYDJla0H1aaKmXu5snfa\n",
+       "k+s7cHFQlLp2fJy8L6nV4wvRf8OiSZWA3yMrONnnk9+81LcUWPn7/tqBj49ZulY+BmAirTIVcONg\n",
+       "FXq2B0KWAD7qkMzn8zgojTWA0EIw4mKk5kSlUgnL0dNeiRNZLpex7huNRvyWWAq3YJ6enkKgeuEs\n",
+       "qmQCljjQz0915R9pls1mM6xhd025yxBAAUNBPQj6PR6Pw4XFfsAy5jfOWCLM3UXg1vpyudRgMNDe\n",
+       "3p7evn0bzwvVT1o1QngymQQIYYwKhULEijjgJAbC4wPcTZWyOPyPq8MpdbdWGZeX6O9fonlGEuyF\n",
+       "9GGdKQ/+Rp7ALnn59ul0GinxMFme9XF2dqbLy0t9//33+sMf/hDKejgcRqBzv9+PIE/mjXk+Pj5W\n",
+       "t9uNOiScVox75u7uTq1WS8ViUYPBIIwFAHValp/4kZ2d5xL+nU4n0oKR+yhk4pd4z90bkuJ+HiQK\n",
+       "kIIxlZ7XBuDfD8xzoxcWBObPARB9J86DfUUBRMYMdvPu7m6NDeL5keNe/t/dQ4AtXnvJiZSNcp3v\n",
+       "+iurbTWVOO1klqWPJe9AwMGHsyBck8b3sLj8unw3CwSlLhkUSpYSTJVmyoakuehZLhl3ofj104Da\n",
+       "9L5Zr1M2w/uVunTSZ0mf09Ex1/ZFL62Qu89VKoD9dTrXzrD42Gf1LWu8/Rk/hgYTQJYMdC4Cgvk8\n",
+       "ODgIIeEguFarhfJlrIlNAIBAmcOQcGw8wrFYLEYMBW4Ip7xxOcGsPD4+hvuHPmDdsAb9YDXYl93d\n",
+       "3cgIABgREMghfJ5iK2nt3g60F4tFZKwQZEs2gINo3D3ud6cRoOllvlP5AmAisBU3F+ABMElBNgfz\n",
+       "7rYBlHjsDPE4ABoUghtCKDNiUrBqeTaCLAF6Xjxrm40xdMXD2ABGYMjcmPO4E9YfqfXT6TSClmlk\n",
+       "rJ2cnOgvf/mLPv/8cw0Gg2BKAKgAQQAl65Vr4WKh4i8MJaAfxqJSqURtksViVT2ZfcFz5HK5qOpK\n",
+       "1hmZVqQvs748sJc96QatuwF9f8CYeI0jGCtn3Uh9z+VyUXeGmDIYP0AB8ge3J64xlynEaaUl7N2F\n",
+       "xdi7XGI9s6c9xsxjFmFX+NsNe97Pals9Wyd1M3hHnW2Q1svEuxXGIKZWPs2pJqevsHpSRQfVlDIa\n",
+       "Wb4xBxdZz8f9s1xOL4GiLFfGJjdJlmLeBBCymn+eplFm3YuNml7XmSaumwI2bz6H6Xh43IGDniwX\n",
+       "ltOKH0PL5XKxHrHEsFDoJzQtQsozCRC4WGBUfETIuNI7PDxUu93WZDKJVMydnR31+/2ImyAqHoVB\n",
+       "lL+7GZ+enuJ8mvl8rm63G25OrCVSKBlnCrsdHh6GYELwLpfPgbSUt2aOUP4If2mVxlgoPJcTh93x\n",
+       "eizOjiG8YUYADSgoQI1n6cCccP4HLMty+Vw9ExCHAmUPnJ2dhZLyNObJZLJWAyiXy8V3OIeF50yz\n",
+       "NVCeADpJEaPBWHgV3k8++WTNRbWt5q6c8XisVqsVGTweuIucxB2I0nTrmvFwA8hjrSSp2+3Ge5Ji\n",
+       "TCljj2vNg289jgWrHTnvQAP2Z39/P5g0ZwkADfSL9UWwa7vd1ps3b2J9AiDQHfyeNSCt5BX1der1\n",
+       "+hojAWjGYGDfkUmEnvOq5YwxDCZyBzlDBV5q5mAsAQAB+IBl1j2y3DPj+B5yDCOFNQ4D+fDwEMxq\n",
+       "vV5fS8VmDIil8RiezDX3v7qCf2ZzS4iFwyJPLX1HY5uYDoQKloyDFb+uRyg79eXg4yUg4e+7snUA\n",
+       "kirnVKikDI5b05uAht/H2Q+/p7NLKaBI3TJcK32mFHS4Ukj/9nv6WKRuIxfgjDnuN2ennFlKI8bT\n",
+       "Z0sb8/sSRfhLNVKDl8ulTk5OIsuF01yXy2X47vnuzs7Omi+ZeYWCXi6fT/OsVCpBhUvPTAHpgq1W\n",
+       "KwA8qcRck+A5XE4oU0/hhomBPgcIEKToaxZKGmHkYJ/r+lkkzmTQAEWwTAi6fr8fghlqXVrNMeuf\n",
+       "eANeu08fEMffDw8POj09VaHwXDDMLe9+v698Ph/z46npnmGD779YLIZ1fXt7G8IfC90L4j09PcX7\n",
+       "WL8on1wut+ayIn6BGBgYgX6//1HEnJB2TtExrG4HA5wbhQVfKpWCYcLiBhzAXuCCgSGDWfnxxx8l\n",
+       "KdJmHx8fY23A8mG4IMMBGswf4+3uBcAPgah+QCBz6udUSSt3hvScHUcc1Wg0itfu/gBosl48nmw+\n",
+       "n+v4+FjD4TCYjWKxGOX5YafcZeiBtZ5qn8vl1kAzz0Z/ACH8TlIYTV5iA0bFgSLvM/e+L/gtHgnk\n",
+       "N6wfhR/ZkzA+/AZg6s+V1baWrUNzP5srMGc0UvbBGRQmAgHPJk6DT5kIBitVtJI+uDdUHL9P2QpX\n",
+       "JA5EssCAN79+Cgb4P1W07qvz370EZvg7BRlZjetmATLeTzNm6AfzkMac8L9fE+WRgke/Xhrz4iDH\n",
+       "7+Eg7aee75dquGVgDs7OzjQYDGLzkx1DWiJBd8PhMKwNrJz5fB6xEIvFQtfX1yG0GQeEEkAH2tTn\n",
+       "xil4fPsIUKx1qF5OFiYWAncR/u9cbpWBg58bxcp9AFP0kd+j/H09Eo9AITKAAGm1uVwuMiuYX0/r\n",
+       "T/ekgxQ+29nZ0XA4XEt/JnaAOg24lKQVEKJ+htd7aLfba0fcf/bZZ+G3x0XEPFCKHaFOfBGuJ/r6\n",
+       "/v37CMSsVqs6OTmJQE9Sc7ddhI2A0Lu7Ox0dHenq6ipOA6YUe6/XizXi2Uh+2JzLBVwjHhyK24R1\n",
+       "hysBmc9rP4IgNVRTgwi5AVPO/AAYyQZCFmXJF+YKhvL+/j7AGcwKlXwdWFA75+HhIcA2fzebzTBS\n",
+       "YD9IPwYMejyKr/Xl8jl+6vj4OM7+kVbsN3FvPC9yGMOEa7ZarQBGHu/FuFLbhfguZ7WRE4AXdDUg\n",
+       "RVoFTLOnXFf81HlRW8vWcWsY5eSxFY5E0996EBwPDHp2IcX3+U0KBrLYAulDhsHZD+9TChLS5/D4\n",
+       "DPrpit37t6ltYosAW95PR/DOmKTXyWqwTk5p8n2eAYbKr+mAwp89C8il7EYW0PIYnZ8aDxr9+xjA\n",
+       "iWddoHio0FgoFNYOOcPtsru7GyWn/bTixWKhwWCgwWCwxl543Q6sb66PYPCsKwJEocQXi0UcVIZV\n",
+       "iiIlkI/vEMCIAQGzgtDEIkqLHfIdLESEmbRiwjAqsHrz+byazeaav19a+bxRIIC2LOPB3WduXVar\n",
+       "1TigjmsOh0PlcquTtzncLZ9/Ls3ebrdjfBHUrVYrFCjZQdyb+AOuT+EwytWnqdPSs1X+5Zdfxu+4\n",
+       "brFYjBRn5nybDWANqD4+Po60dWIwCKYkboffeQwKTGIu95ytxfrB8nYA7ewX4ABWgXF0d7K7X1D+\n",
+       "1CFxtxTzQHo7MWGNRkO1Wi3WA6DdS9ATCE7NFGJbeCae1dk3+uSGA/EruEyIP4PFRJbTd1fqPHOj\n",
+       "0VCn04lzoVibjB+yvFarBeuEnK9Wq5pMJlFN2TNnYDYkhSsOo/L+/j7cZ8wFz89cOpDyjCBneLje\n",
+       "S4zg1tw6CA4HEbyPUk8VkTMKDgRSl0oW6+DXTpWk/85dLH7frGfwZ+G1AwN3cfjvHJh4SxmTlPUg\n",
+       "0MrdIj5G/pzpNfxeKR3K93wD8x5CgeYpgmnfHYCkzFQWWPG+p39vcuP8FJjcdsMK6vf7kQmAlUDt\n",
+       "ABQNUf2eaocAl56FBMoxBbesK1wPUMa8pqIq/l1YBizIRqOhwWAQ1CruDoQUKdEUasOg8NRDt1K9\n",
+       "SN5yuYx6LIAeZ994Bn4rrRgkzisBDOEGgbImawn2hH75tTillt/c39+r2+2qXq+H6ymfz4cwR5mg\n",
+       "SFjz5XI5DocrFotqt9sajUaqVqs6Pj7WcrlUv9+P8a3X6yGkPXsCgMYc+pEDXKPT6YSV7ZYpv3uJ\n",
+       "/v4lGrJhPn8u0e9W/9nZ2RqT4NY+INeroXrqPGvC63Z4wTZnWlGIHjuIvHalDQsAkGJ9AmaQLaz1\n",
+       "5XJ16jPuEGS41xshoBe2P5fLrWWBejgBwGY6nWo+n+vk5ESTyUT1el2j0Shcgyh7AtUBTAAW2EMv\n",
+       "4Y98KBaLajaburm50fn5+VosCvoRo8VdNhgrBNYyVgAvWD1YRsbOSx8g173AG1lJqV5hTny8/bON\n",
+       "a+5/ae3+jxodoqOgY6yolyxhV7QsSqeM3JriO6BQvzcD48qXhY6wywIqm9gWWrpZfqo548H10zgO\n",
+       "n0T6lvbBn+ElZe1WRlb/WMg8twcN+xi7a8ybsyHuF5YUwWObruNrIWV9sp4hZcE+hkZAJ6yAB+7h\n",
+       "XsDC5JmdYfNCR9DXgA2CMKXneep2uwECcDMwf9DI+MYp3EQdBAI1UQR8lz0oKRgKFAtgqFAoRF8Q\n",
+       "SlDfMBewQghMF1wONsiawIXi1D1KbrlcrlXrRBhK64HVBEdKq+BAB0Kz2Swsxf39fTWbzRgPWCdc\n",
+       "KChEAlLv7u50cXERgpx4nEajoXa7HQATBU0ALm4Zd/WgINyNhBLhux7UuO1gWBrujJ2d52J+9Xo9\n",
+       "1mlqOLH33W2PnPOgaOYPNwQFxFiX7qLzOCNcQLjYiFnxfQRw93omHjuBjGbNHR4eajweS1oFLqOU\n",
+       "naFfLp/T3DE4nDWXVqwj7KjLtUKhEDVZWAMYAs4+k3EDK0l/KUro8nMymcSeRxawRpfLZRyWiAwA\n",
+       "JPJ99iUxQwTtHh0dBTsE4wXoB0jDliA3nG1nHQNy3BD9OXFUW8vWSRed/4/iSRVQqkxZKM6+AAy8\n",
+       "pdZ/VsuiiPl70yCm13Sr3wGT38Of2Tcw10uZFq7LMzhIyWImuM5LLp0UoPhnWKr+fK5I0+ulTEgK\n",
+       "rFDSCGTeS4sJcd2UMXFryIGLA6ZNjNk2Wq/XW2O2UEgwENStALQBRhCazmBxwBifQxmzdk5OTuJ7\n",
+       "7AEUHkKLM3Tm87mOjo6iCJWXxnYBjFsHwPT4+BjxGg8PD2q1WmsBh7gzyJaoVqtrdTnu7u7Cp43A\n",
+       "hb0gEM/ZFFxCs9kslJOnetJP4kQIGkawTyYT/f73v9cPP/wQSow9hZAvFouq1+u6vb1Vu93W7u7u\n",
+       "WqAxwl+Srq6uVCqVVKlUdHl5KUnhbjs5OdH+/r7evHkTIMfL/hPUiuJDmJN1wZziwoLmLxQKQcEP\n",
+       "h8Ngd7bZvMgce/Dp6UkHBwehHFlLgErWHd8FOOMKkRSH8rnimkwmobxgIJhfDmj0wxy5HjIFAIjC\n",
+       "BAzzPfoGWIf1GQwGIWM5+RtgTUwjawPXBsXaWF/EbC0Wi7Vqube3txEXwvOVSqUwWtwgZE17Zl6t\n",
+       "VtOf//xn/fM///NaNeFcLqfPP/9c7XZb5+fnMd6Hh4fqdrtxWOjnn38ewdWwg5QCIPYHME2to8vL\n",
+       "y2AYiT2BGfYwCcAdTAqynjk/ODiIU6YxNnZ2diLAelPb6qnEtCzl5/9L6wBhkzJKYxj4TpblneUa\n",
+       "8H6gbLNYFwdE6XX5rVuR3j+UtYMUaf3QQwdnfo20ZkrWOG1yl6TNGZI0IJVN71aFWxlZY+h/O8DC\n",
+       "opa0ViMhC0ikNF8KPpzt4l5pHMO2G8Iml8sFQ8IYYEGgJBG+7trweBEsLNaSp5i6tUN6LPPmcS8e\n",
+       "JDscDlWr1bS7uxspiqw7ikQVCoVw95AGncvlAuSkwITfE3w3GAzU7XbDUj47O9OrV6/07t27YHBQ\n",
+       "Fh7Y6lajjxFKJ5fLheLGmnSFzm+Ojo70/ffffwD6+R/XTrfbjbNvCoXneidYhC5r3rx5o/v7ew0G\n",
+       "A5VKJZVKJXU6HX366acaj8d6+/ZtxLQQi4DlS6Ay92csOWsHFgHQVq1WQ1HPZjP1ej198sknkYm1\n",
+       "zYZsfHp6CmXMmKcup8ViETVf/ERaruPGBkGfXAOWke+5oZTLrbJTkEUO5mEGiH9h70haS0vf2dmJ\n",
+       "tHKKs0mrUgmz2UzHx8fBOEgK5o+znUjxf3x8DPbSi6DBRsIgIJs4gwfXln8OaCI+DEAIW1KpVAJw\n",
+       "YESUSqUoFog7i2DqV69eqd/vRyl91iVz51lKrpPG47EKhULIClytHk8DaCNry3UlZQf8qAr2Li4j\n",
+       "YrGQJ1ltK+AkK+7DLe9UmWaxAKkyZBFmgQ7uKX2Ydst7/n8KnJxRcfbCrfn0d/7dtG+u+P2+WNz+\n",
+       "/RQEpQwFr3k+rIV0fLwx9vh1vQ+MlbNbXrUwZU1S3yKbzClePnMLPQU3jqi9j1lAMp3HFJBtsx0d\n",
+       "HWk8HmswGITCdqHg6w7Wg/GhUiYC9OnpKYSB09x7e3sRLzEcDiNldzQaqVKpRC0T9+mjCFG0lUpF\n",
+       "j4+PkQo7Ho8D1FSrVdVqNZ2dnQWFjyXocyophBBWEC4ghGe73dZf//pXXVxc6Msvv9RisYgDz7D+\n",
+       "oMBZgzANWHEECRP/QcMq9do73333nY6OjiQpDv1zS53rN5tN3d3d6eTkRJeXl5GBBLVNBsTNzU24\n",
+       "XCaTibrdbox3qVQKN950OtW3336r169fx5hTeK9QeM4MIYC02Wzq7OwshPZwOAxlDnja29vT73//\n",
+       "+yi2BVjbVgOIeDwMY+4ucNxjACxcKq68WNvuKuHvarUa7gP2ByySZ/nhpvHD8nBTEKcBIwIIhj3x\n",
+       "Gh2sO+aYf6RzM8/OuuDS8ZgmSQH6fbxINfbzcCRFsC5MJowa+8zl2tPTcyXj6+trDYfDqNHDs0uK\n",
+       "DC/W2/v37yOQ3c8kIr4M9olaJ9PpNOKoCJAfjUY6PT2NoorVajX0kdc9caa3VqutBcvyTMwba5tr\n",
+       "eJp22rYCTlKa3xVsVpDoJqXjIMVBhLQOaNx6BxQ4LeX+u9Rd4BZilgvBWQxX8B67wqbyw5Q84Dd1\n",
+       "J2UxNamLxccMheHtpdgT/I1c29kIZ48YQ4+udjDibhbQONd3PzrXg4KV1ovscQ2fM5/PFJBuYkg+\n",
+       "BnDS6/XCioaNcJ8rQph4Dg/ckxSvsQTxgTM+BLrOZjPV63XV63VdXV1FiifpuMRJEMSHAPQKrtIq\n",
+       "ELVSqQSAQshDaxP0JyliTVLgWq/Xw7pE4RwcHKher6vRaGg0Gumvf/1rHCdPaiL7CuAD2wPgefv2\n",
+       "7VrMCowSgcO+NnBd4c6iSBgKBcVVrVY1nU5DaOOeQaGR8nt/f683b95oNBrp6elJvV5PhUJB4/E4\n",
+       "qPJOpxNj9Omnn0atDVwzuHdS9x3P0u12Va1WQ8AjE+/v73V9fR2gctvZOoeHh+Gy5EgF9rC0OoDR\n",
+       "DSVpxQjjXkFJA3B2d3cjBkhSuPsA6ovFcw2fXq8XMUH5/HN1WeSpx+DBWHAGD/dmfPleuVxWr9cL\n",
+       "1xsxFcQtwQgQFE0GjPcdNwgKHtnJ3BLU2u12Y33xTNSD2dvbU6lUisJl8/l8rToxcrXT6ejo6CjO\n",
+       "H2ItMZ6j0UjHx8cBPur1ui4vL6O+j8e7nZ+fx2GhBOQ703V+fq52u629vT1dXV2pXq+vMVboacax\n",
+       "0+lEgC/PhPwH3HmsD2Powc1ZbWs8OANGJ1HYTtdlWcbOBmQpb67t10FIMNn8HmXBAvB/ruydyXBw\n",
+       "k7o6UpDiAMcngb+dqXF/XKqsX+qLK35Xzi8BEw7mInCLDIPFYhF0o48/izIN1iTYy5+dZ8ByT8EE\n",
+       "8+3X8YBQd004NZ8FTHh2B3cfQ/OUSihwrHFfC6PRKPzADr6wcDgYz4NLAYrlclmDwUC3t7c6OzvT\n",
+       "n/70p/ju7e1tlLQHbLhf/+npKep7IHQXi0WcRZLP59XpdMLXTroshZlarZZarZZ+85vf6Kuvvopq\n",
+       "maPRKGjx2Wymq6uroJwBDAAyZ/d4Pmjsg4MDNRqNteDfQqEQTIVboG4wTCYT/fjjj8rlnl0nw+Ew\n",
+       "hPrFxYUkRQbT4eGhzs7O9P79e3377bcBAPr9vqRVTZL/+I//CN95rVYLmfX5558H68VaPjg40Bdf\n",
+       "fKHf/e53uri40NnZWSjw2Wym9+/f6+bmRt9//30E4ZLRdX19HbVWcHdgFQMMt90I1iX+AorejRNn\n",
+       "irCK2cueKeUxKVTVxdKeTCY6OjqKa/7444+6uLiI2kFY5s4w4jJjHj1biv56HCCFEXERSgogT5+l\n",
+       "Z4aD+Ke7u7tIdYcRefv2rY6Pj6OEPXPHeOHKBATxOQBcUmTqeFq165G3b98ql8vFsRKMYaPR0HQ6\n",
+       "1XQ61W9+8xstFgtVKpUANhcXF+p2uyFDOYOo3++vudGkZ7kLY+W6kdL2zDdsO2wSbEmn09HT01P8\n",
+       "3gvmET/j7BGuXN/LadsKc8LgwhI4kyF9mJYqfRgMK61b/XyH67pVz2/9tVv6fi2aU/BZwa300/vN\n",
+       "e/4cXhjOn8mZGpRGCsx4rrRvrqjpVxovs8kd4jEv6XP461Tx81tnmWBIfEN5vrz7MRFU6cZzMEJD\n",
+       "WfvvU7eS9x8q8mMAJ/1+P9wSrVYrzt8gMBMhvVyul3KHlfJzeTz1bnd3V69fv9a7d+8ivbHRaCiX\n",
+       "y4XCI5J+MBjo4OBA+/v7kVUBhYy7wdcY1iL+9cfHR7169SrcL4+PjwEKYDRms1lkTsAOMIdQ3vyT\n",
+       "pC+++EK9Xi/YNAQejAZKBGZjMBiEUnMWkNoh9N3X+unpqf7xH/9Rf/nLX0Lh5fN59fv9YCg+/fTT\n",
+       "OHSRM1twnR0cHKhSqYTVPRqNwn1GX87Pz+OZ8MMfHByo1+uFUH779m3EC2EF39zcxLx9+eWX6nQ6\n",
+       "YZWyfofDYViv1BSBweGe22wAWlfw7H9iJCStzbu0XpSSNePZHZ6BtVgsQtFjYR8cHOj6+nqtjIHH\n",
+       "mQCaYFIA2/l8PpSfByJLK6UMI4ALSFply8xmM3U6ndiTXq2ZPsDUcQ8qJj88PB+0ScAswbLj8Vj3\n",
+       "9/dxDZhIDAFYT3ednZ2dqd1uq9lsBqNUqVTimAqP/bm8vNTR0VGwIrBOvIdby41swC9Gw2g0iirR\n",
+       "uNVcxzw9PQWI9/pInIPkxlej0Yj78Jywj8zXprYV5gRKlsnG7eJgwpVk6m7xQD4EEA/N4nVWw906\n",
+       "0oq54PceMCd9eGZOmu3D7/htGoeSgiC/b8rKpIrd36Nxf/8/C3hgtbiPL6s5WwGC5fspG7XJtYSw\n",
+       "Tt1NMCxc1+fTx8/nweeGuAqeFWoQJUSgJtaGuxheWui/VGs0Gnr9+nX4wweDQXzGeCKgsaAAIxRc\n",
+       "g+YGoBA09+c//zmCSbGAfvjhh0g1Zu5JYSRmBaq8UHiuQAvzUa1Wg+2YsFi22wAADWlJREFUTqca\n",
+       "jUbqdru6v7/X+/fv1ev14gyYq6srdbtd9fv9oLMlBQWOi4U5gCXis36/r6en5zN8EIaTySQoYGqb\n",
+       "0DzgD5eKp0f63mdtjUYj/eu//qsKhUIAEtYLFvl//dd/RezE8fFxKH+Yi1/96leRXYCV7qmo7XZb\n",
+       "3377rTqdjhaLhcbjsa6ursKydBcQMT29Xk9fffVVgM93796p0+moUHg+4RiL9/j4WL/97W/1pz/9\n",
+       "Sa1WS+fn58EGVCqVX2T9vtS80qikqGPjChUlCShg78IKAGSJayBWCtnBKdsYraenp+p2u3GWEi49\n",
+       "gi6x7mHscGfSl2q1GvLm4eEhWGHqhgDUAdcwPtwP0AMrub+/r5ubmxiTu7u7AAnIfACcMzruOiHm\n",
+       "A7fi3t6e3r9/v1b7xWXew8ODLi4uAtTQb+5RrVZDhpAJyDEKBIkTmFooFIIxRXeRNbS3t6fhcBhh\n",
+       "CG5ISCsjHYaT6rLz+TyK8h0eHmo2m0XW0mAwiGBz9AxxJx9lhVi3zKVVFTnpw+DUTZS9MyQpFeaM\n",
+       "Qvrbl6zrrDgNp23dFZMKRmdiUus+VfDeP6xjty5YmMTl8Gz8QwGkFfZApi81V+ZZAMb77KDQWR7G\n",
+       "OWVbUsBIywJLPm9cw/uV3tvfp39Z7227Ufthd3c3UoclBQPBZ71eT5LCYsrlclG6HgFaLpcjFTeX\n",
+       "y+n169fa39/XZDJRr9fTfD7X6elpVOiEyWKs/QTS09PTAAfEddzf36vZbIbVCM0Ne+FxApSX393d\n",
+       "jb+hwWFTiAOAHeJZvXR9pVKJyqCk0GJBISjJoEBIk/rrtD1z7UC92Wzqm2++Ub1ej4yG6+vrKKB1\n",
+       "eHioi4uLqJJLHIefZfMv//Iv+uMf/6hqtao//vGPury81O3trY6Pj6NIG4qFsZzP5xFrQ0zE7e2t\n",
+       "ptOp+v2+Wq2W3r17F0phMBjoV7/6lRaLhU5PTzWfz9VsNiPgcTabxanVpHW+lNXwSzRfCyhGGDkP\n",
+       "ZoeFICbCrWZiDPxMG2JYUK65XC6YM9Z6tVoNQCEpXIAEzRLbRRCzr5t+v69KpRKsBHFHFEr0gwM9\n",
+       "xdtdant7e3GIXj7/XFARUFutVgNk4eKCrUQu+fpZLJ6L+1G8j/TltL4Q7Goul9P79+/12WefRVow\n",
+       "z0BtFFL1cZUtl88l+bne3t6e3r59q4uLC7VaLV1eXkbRNggCmD5pVWPGSQJJwfDiqvHxA7jc3Nyo\n",
+       "XC6HO5tn8Zon6C3A5Ka2FXDi6ZNMgFvgmwJbXaG628PjFVIAkbZNjEIamOMLRVov+OYUV+qC2hTg\n",
+       "432C+oMuywJSLAQWT/rsLAgUkt8ndev46zQQlbHPcofR3K3Ftdi86fy5cnTwkZV1lILB1DUF4Evj\n",
+       "bAAsCIO03P42G1aUpFCKT09PoUQRsKenp8ECuD8WxqFWq+nm5kaVSkWlUilcEWSfIHyI2O92u0Hx\n",
+       "Ytl4ZkSn09Hr168j/gWLsl6vR+VTBLRXpnSL99NPP9XXX38dwj6fX52PQiT/eDyO4nD7+/uhyNk7\n",
+       "xNFMJhN9+umnEYfgRdY8HsOVEJYj5bax0HnObrerP/3pT+Fa29nZ0fn5eVTwvL+/V6fTUaVSWStY\n",
+       "12g01O/3Va/X9dVXX8UcYOFyCrFnrXlmBcIXt9rt7a3q9bomk4larVYoDdjixWKhXq8XVvWrV6/i\n",
+       "mVAsxMWwZrrd7tbWtLQ68wVGgrmRFFa2pKgxgkzCzSIp3BiAbsAL9UBcxrIfxuNxVEmGxaAWEwpU\n",
+       "UoBC9hQn5Z6cnES2WS73nCZLphouV0AzbjT6y29wb1IBuNls6ttvv430+sfHR71//z4y9WC8GCti\n",
+       "cAB0gKSHhwcdHx+H0k5dH7Crr1+/1mg00sHBQZSdpz4ILKYXg0MWIxOurq6igCCxIP1+X9VqVf1+\n",
+       "X+Vyee0cH68l49eFQYRZcQAHI8O65VTp4+Pj6AcpzsTLHR0d6fr6euOa2wo4cWbALWRJHwCTLPbB\n",
+       "FbLTjJ5RwwZJGQAHAmm8iLtiUjeBMwNpbItfA2Xgv3Ol72NAcJQzJ/6cAJAUfEirA57obxpM68+N\n",
+       "EseqRXk6IOTaXkCM/noKn7uN/He8lz5nyibx/fR5s+aZ7/m8YKGgHF0QfgzghAP4cK84W4JVKK3O\n",
+       "rECY4uJxVowsGQIEPf0WAUVWEAF6i8UiWA9Ja/e/ublRqVRaYzzI5EHxelYPcRdv374NVxCR/Chn\n",
+       "r34qPR98+Ic//EGFQkHff/+97u7u1G63w8J6fHzU8fFxpNyizKi4ijXtacOdTidSJXHpQcGfnJyE\n",
+       "P75YLOry8jLie7gmbq5erxeWLe6mYrEYfcFaB0y12+1gL/Grt9ttffLJJ/EsuVxOx8fHkp73xr//\n",
+       "+7/r/Px8jRFjHlAkJycnAXZms5n+z//5P9rZ2VG/31cu91wS/Xe/+52Gw6G+++47lUol/fa3v/1l\n",
+       "FvCGhiyBoveyALhy2ZMEyuKuIw3XwbKf3IzCZf0SQ/H4+BhxPShygCfxSpzwS00RScGAPDw8BPAh\n",
+       "rklaxUIAvKlSzN7EpdTv99VsNiN+YzKZRDGxcrkc1xyPx2q1WgHW5/PnQnC3t7dhfIzHYz09PUUK\n",
+       "OewDWTycrQTr9vDwoM8++0zfffedarVaGBMpkEHmjkajuAbjQNwIGTXHx8fBXj0+PqrdbqvVaqnd\n",
+       "bqter6vX6wUAy+efT+qm0ByB7GQuSc8kA/Ep6DBcRLjGOp1OABp0C0zv9fW1Wq3WxjW3tYBYaaUU\n",
+       "WQy8RkE58EibB2O6i4j//Tog9JRtcSYCpYqC5n3/nMWIsqehnL1YlIMgSWufp0yI99vZF5Svu3/8\n",
+       "u84s0CevnAiwQUg6COS5UpDg93A3C699Q3g/nd1wYOMti0lJmTKfo/Q56QuWhYM67/s2mxcNKxSe\n",
+       "T/2EhaCeAIGqOzs7QQ/DMmDJSSsQR0YN1gen1+Jnr1QqYT2NRiNJq7gurBssc9wXgChX3jAmrNX7\n",
+       "+3udnZ3p/Pw8zn85ODhQq9WKYmOklxLV//j4+H/bu5ue1LUoDMDvFTgqWFMkCEIwAXVi/P9Dh/4I\n",
+       "YqiJaEqjhWoVP1J7BifvctN7z/S6B+8zMTEqUNu9115rf+Dq6srKFWVZYjQabax+mM1m1kkxc1KW\n",
+       "pS2p5cTEVqtlO4hGUWSTFFlX5zVI09TmCARBgDzP7fnmCJAd1tfX10aa/uTkBNPpFLVazZZis2yR\n",
+       "5zm63S6GwyHiOLZM0/HxMZIkAQDEcYzVaoU8z1Gv13FxcWGBCBtvNso8ZPHm5gZ5nuPw8BDNZhOT\n",
+       "yQQAMB6PrTFfLpdI0xSDwQDb29v/2uPl/8bMFu9rlm3YTjCY5QRPPgPsiHivMhhlW8XSAveX4fPM\n",
+       "ElG9XsfDwwPOzs4se+Q+829vb9ah8vX5/2WwzeDFfYbSNMXe3h7CMMR8Psf+/j6SJLFdVtmOMRAr\n",
+       "isLue95HRVHYEngeHMh7lJu1MUhmWxzHMXZ2duz1mVXjnBVmjhgcu2VeTrTn5+Z9zdVILy8vNieH\n",
+       "g471em2bnTEw6HQ6GweSdrtdCzr4laWp5XKJwWBgc6QYJHFwxLYoTVPMZjOcn5/j+fnZ2nVm/5gt\n",
+       "zfPcdqvlBnJ/86OnSXHU687cdoMNtyNzsyBu4OGO8quBDP++W4JxV5fw9/l9t9ThcrMQ7ByrnTw7\n",
+       "SzewYWdZ7XyrWYIqN+gCNiebVssg7nt0rx8/t3sNq+UaVzWoqAZs7s9Ur4v7XqrZE14XBojV12dq\n",
+       "1v271WyTG6Qx6OJruV99CE6YIfj4+LDDvjhSAb63Pg/D0HYK5ZJDjhIB2Kiv2Wyi0+nYrq08iTQI\n",
+       "AmRZhjAMbT4IJ1YyK8Dab1EU9j5YvgG+Z9qz7MFOhUE4/2+slXMZJScsukuD2aE3Gg30+31kWWaT\n",
+       "dFmmKYoCcRyj3W7bKO3u7s5q54vFAo3Gn8PMtra2EEURgO8SJ1dAMbOSZRmSJMFyucTR0RHKskQc\n",
+       "xzZ/jW0FU+gsD7y/v2OxWNiW5aPRCFEU4devPycEJ0mCwWCAy8tLALBSQL1eR7vdxnQ6xWq1Qq/X\n",
+       "w+npqa3E6ff7uL6+xnA4tH0mmPpvtVq4vb3FZDJBu90GANuwi/uZcE5LrVazn+H15qFvP4VbnvOz\n",
+       "cGUR2wTOdeA2/rwX+Bn5DLME4GYBWBbic+IGLNyXZz6f22ohzm1ie8jyEjtkZlYY1PF7LK8wU8Hs\n",
+       "DOdBuFkaljFZVuPnYhmZmbMgCGxSLU8qZtnIDeA4x6LX61l5lgNR7jDLVUYciLJcyu9z1Q8DRban\n",
+       "HARwPhcACwhZvmIWpt/v2yq0x8dHHBwc4P7+Hru7u7ZUmXufrNdrdDodO/n46enJBjd85jnp+PPz\n",
+       "E+Px2PaKeX193TgLrNFoWNvIoNTdb+m//PO3DlJERETkJ/z8YSQiIiIiDgUnIiIi4hUFJyIiIuIV\n",
+       "BSciIiLiFQUnIiIi4hUFJyIiIuIVBSciIiLiFQUnIiIi4hUFJyIiIuIVBSciIiLiFQUnIiIi4hUF\n",
+       "JyIiIuIVBSciIiLiFQUnIiIi4hUFJyIiIuIVBSciIiLiFQUnIiIi4hUFJyIiIuIVBSciIiLiFQUn\n",
+       "IiIi4pXfPRZNtgyLF3IAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x112bc2190>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "ksize = net.params['conv'][0].data.shape[2:]\n",
+    "# make Gaussian blur\n",
+    "sigma = 1.\n",
+    "y, x = np.mgrid[-ksize[0]//2 + 1:ksize[0]//2 + 1, -ksize[1]//2 + 1:ksize[1]//2 + 1]\n",
+    "g = np.exp(-((x**2 + y**2)/(2.0*sigma**2)))\n",
+    "gaussian = (g / g.sum()).astype(np.float32)\n",
+    "net.params['conv'][0].data[0] = gaussian\n",
+    "# make Sobel operator for edge detection\n",
+    "net.params['conv'][0].data[1:] = 0.\n",
+    "sobel = np.array((-1, -2, -1, 0, 0, 0, 1, 2, 1), dtype=np.float32).reshape((3,3))\n",
+    "net.params['conv'][0].data[1, 0, 1:-1, 1:-1] = sobel  # horizontal\n",
+    "net.params['conv'][0].data[2, 0, 1:-1, 1:-1] = sobel.T  # vertical\n",
+    "show_filters(net)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "With net surgery, parameters can be transplanted across nets, regularized by custom per-parameter operations, and transformed according to your schemes."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Casting a Classifier into a Fully Convolutional Network\n",
+    "\n",
+    "Let's take the standard Caffe Reference ImageNet model \"CaffeNet\" and transform it into a fully convolutional net for efficient, dense inference on large inputs. This model generates a classification map that covers a given input size instead of a single classification. In particular a 8 $\\times$ 8 classification map on a 451 $\\times$ 451 input gives 64x the output in only 3x the time. The computation exploits a natural efficiency of convolutional network (convnet) structure by  [...]
+    "\n",
+    "To do so we translate the `InnerProduct` matrix multiplication layers of CaffeNet into `Convolutional` layers. This is the only change: the other layer types are agnostic to spatial size. Convolution is translation-invariant, activations are elementwise operations, and so on. The `fc6` inner product when carried out as convolution by `fc6-conv` turns into a 6 \\times 6 filter with stride 1 on `pool5`. Back in image space this gives a classification for each 227 $\\times$ 227 box wit [...]
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "1,2c1\r\n",
+      "< # Fully convolutional network version of CaffeNet.\r\n",
+      "< name: \"CaffeNetConv\"\r\n",
+      "---\r\n",
+      "> name: \"CaffeNet\"\r\n",
+      "4c3\r\n",
+      "< input_dim: 1\r\n",
+      "---\r\n",
+      "> input_dim: 10\r\n",
+      "6,7c5,6\r\n",
+      "< input_dim: 451\r\n",
+      "< input_dim: 451\r\n",
+      "---\r\n",
+      "> input_dim: 227\r\n",
+      "> input_dim: 227\r\n",
+      "152,153c151,152\r\n",
+      "<   name: \"fc6-conv\"\r\n",
+      "<   type: \"Convolution\"\r\n",
+      "---\r\n",
+      ">   name: \"fc6\"\r\n",
+      ">   type: \"InnerProduct\"\r\n",
+      "155,156c154,155\r\n",
+      "<   top: \"fc6-conv\"\r\n",
+      "<   convolution_param {\r\n",
+      "---\r\n",
+      ">   top: \"fc6\"\r\n",
+      ">   inner_product_param {\r\n",
+      "158d156\r\n",
+      "<     kernel_size: 6\r\n",
+      "164,165c162,163\r\n",
+      "<   bottom: \"fc6-conv\"\r\n",
+      "<   top: \"fc6-conv\"\r\n",
+      "---\r\n",
+      ">   bottom: \"fc6\"\r\n",
+      ">   top: \"fc6\"\r\n",
+      "170,171c168,169\r\n",
+      "<   bottom: \"fc6-conv\"\r\n",
+      "<   top: \"fc6-conv\"\r\n",
+      "---\r\n",
+      ">   bottom: \"fc6\"\r\n",
+      ">   top: \"fc6\"\r\n",
+      "177,181c175,179\r\n",
+      "<   name: \"fc7-conv\"\r\n",
+      "<   type: \"Convolution\"\r\n",
+      "<   bottom: \"fc6-conv\"\r\n",
+      "<   top: \"fc7-conv\"\r\n",
+      "<   convolution_param {\r\n",
+      "---\r\n",
+      ">   name: \"fc7\"\r\n",
+      ">   type: \"InnerProduct\"\r\n",
+      ">   bottom: \"fc6\"\r\n",
+      ">   top: \"fc7\"\r\n",
+      ">   inner_product_param {\r\n",
+      "183d180\r\n",
+      "<     kernel_size: 1\r\n",
+      "189,190c186,187\r\n",
+      "<   bottom: \"fc7-conv\"\r\n",
+      "<   top: \"fc7-conv\"\r\n",
+      "---\r\n",
+      ">   bottom: \"fc7\"\r\n",
+      ">   top: \"fc7\"\r\n",
+      "195,196c192,193\r\n",
+      "<   bottom: \"fc7-conv\"\r\n",
+      "<   top: \"fc7-conv\"\r\n",
+      "---\r\n",
+      ">   bottom: \"fc7\"\r\n",
+      ">   top: \"fc7\"\r\n",
+      "202,206c199,203\r\n",
+      "<   name: \"fc8-conv\"\r\n",
+      "<   type: \"Convolution\"\r\n",
+      "<   bottom: \"fc7-conv\"\r\n",
+      "<   top: \"fc8-conv\"\r\n",
+      "<   convolution_param {\r\n",
+      "---\r\n",
+      ">   name: \"fc8\"\r\n",
+      ">   type: \"InnerProduct\"\r\n",
+      ">   bottom: \"fc7\"\r\n",
+      ">   top: \"fc8\"\r\n",
+      ">   inner_product_param {\r\n",
+      "208d204\r\n",
+      "<     kernel_size: 1\r\n",
+      "214c210\r\n",
+      "<   bottom: \"fc8-conv\"\r\n",
+      "---\r\n",
+      ">   bottom: \"fc8\"\r\n"
+     ]
+    }
+   ],
+   "source": [
+    "!diff net_surgery/bvlc_caffenet_full_conv.prototxt ../models/bvlc_reference_caffenet/deploy.prototxt"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The only differences needed in the architecture are to change the fully connected classifier inner product layers into convolutional layers with the right filter size -- 6 x 6, since the reference model classifiers take the 36 elements of `pool5` as input -- and stride 1 for dense classification. Note that the layers are renamed so that Caffe does not try to blindly load the old parameters when it maps layer names to the pretrained model."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "fc6 weights are (4096, 9216) dimensional and biases are (4096,) dimensional\n",
+      "fc7 weights are (4096, 4096) dimensional and biases are (4096,) dimensional\n",
+      "fc8 weights are (1000, 4096) dimensional and biases are (1000,) dimensional\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Make sure that caffe is on the python path:\n",
+    "caffe_root = '../'  # this file is expected to be in {caffe_root}/examples\n",
+    "import sys\n",
+    "sys.path.insert(0, caffe_root + 'python')\n",
+    "\n",
+    "import caffe\n",
+    "\n",
+    "# Load the original network and extract the fully connected layers' parameters.\n",
+    "net = caffe.Net('../models/bvlc_reference_caffenet/deploy.prototxt', \n",
+    "                '../models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel', \n",
+    "                caffe.TEST)\n",
+    "params = ['fc6', 'fc7', 'fc8']\n",
+    "# fc_params = {name: (weights, biases)}\n",
+    "fc_params = {pr: (net.params[pr][0].data, net.params[pr][1].data) for pr in params}\n",
+    "\n",
+    "for fc in params:\n",
+    "    print '{} weights are {} dimensional and biases are {} dimensional'.format(fc, fc_params[fc][0].shape, fc_params[fc][1].shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Consider the shapes of the inner product parameters. The weight dimensions are the output and input sizes while the bias dimension is the output size."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "fc6-conv weights are (4096, 256, 6, 6) dimensional and biases are (4096,) dimensional\n",
+      "fc7-conv weights are (4096, 4096, 1, 1) dimensional and biases are (4096,) dimensional\n",
+      "fc8-conv weights are (1000, 4096, 1, 1) dimensional and biases are (1000,) dimensional\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Load the fully convolutional network to transplant the parameters.\n",
+    "net_full_conv = caffe.Net('net_surgery/bvlc_caffenet_full_conv.prototxt', \n",
+    "                          '../models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel',\n",
+    "                          caffe.TEST)\n",
+    "params_full_conv = ['fc6-conv', 'fc7-conv', 'fc8-conv']\n",
+    "# conv_params = {name: (weights, biases)}\n",
+    "conv_params = {pr: (net_full_conv.params[pr][0].data, net_full_conv.params[pr][1].data) for pr in params_full_conv}\n",
+    "\n",
+    "for conv in params_full_conv:\n",
+    "    print '{} weights are {} dimensional and biases are {} dimensional'.format(conv, conv_params[conv][0].shape, conv_params[conv][1].shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The convolution weights are arranged in output $\\times$ input $\\times$ height $\\times$ width dimensions. To map the inner product weights to convolution filters, we could roll the flat inner product vectors into channel $\\times$ height $\\times$ width filter matrices, but actually these are identical in memory (as row major arrays) so we can assign them directly.\n",
+    "\n",
+    "The biases are identical to those of the inner product.\n",
+    "\n",
+    "Let's transplant!"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "for pr, pr_conv in zip(params, params_full_conv):\n",
+    "    conv_params[pr_conv][0].flat = fc_params[pr][0].flat  # flat unrolls the arrays\n",
+    "    conv_params[pr_conv][1][...] = fc_params[pr][1]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Next, save the new model weights."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "net_full_conv.save('net_surgery/bvlc_caffenet_full_conv.caffemodel')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "To conclude, let's make a classification map from the example cat image and visualize the confidence of \"tiger cat\" as a probability heatmap. This gives an 8-by-8 prediction on overlapping regions of the 451 $\\times$ 451 input."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[[282 282 281 281 281 281 277 282]\n",
+      " [281 283 283 281 281 281 281 282]\n",
+      " [283 283 283 283 283 283 287 282]\n",
+      " [283 283 283 281 283 283 283 259]\n",
+      " [283 283 283 283 283 283 283 259]\n",
+      " [283 283 283 283 283 283 259 259]\n",
+      " [283 283 283 283 259 259 259 277]\n",
+      " [335 335 283 259 263 263 263 277]]\n"
+     ]
+    },
+    {
+     "data": {
+      "text/plain": [
+       "<matplotlib.image.AxesImage at 0x12379a690>"
+      ]
+     },
+     "execution_count": 11,
+     "metadata": {},
+     "output_type": "execute_result"
+    },
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAAXMAAAC5CAYAAADavt/0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvWnMbWl23/Vbz7T3PtM73aHGrqrurrbdbbttuulYVhqT\n",
+       "gcgycQwIGVlYFiCQhSCRAUu2IyHExxAhWSIKcsRoRSJBfLAi5AgMIU4CiWxIYqc73XZPRVXXcOu+\n",
+       "953OtPczLT48b7dN1O6qxHVz7e7z+/See/Z99nnPfe561l7Df4mqcuDAgQMHfn9jnvQHOHDgwIED\n",
+       "v3sOxvzAgQMHvgE4GPMDBw4c+AbgYMwPHDhw4BuAgzE/cODAgW8ADsb8wIEDB74BeCzGXES+X0Q+\n",
+       "KyKfE5Gfehz3OHDgwIEDv4W813XmImKB3wD+KPA68KvAj6jqZ97TGx04cODAga/yODzzTwCfV9VX\n",
+       "VDUBfwn4ocdwnwMHDhw4cMvjMObPAq/9ttdfvv2zAwcOHDjwmHgcxvygD3DgwIED/5Rxj2HN14Hn\n",
+       "f9vr52ne+VcRkYPBP/BYUVV5Evc97O0Dj5vfaW8/DmP+fwMvi8iLwBvAvwb8yD960dPf889gMO3Z\n",
+       "wCjGG7QX1IM4gxGhqiJiEFMINdMZIe935H3ECRgpqCkoiqpiUgUUjGCtRUSotVJrJidlt1NSugEs\n",
+       "xhi2byaWzw7YXlgOM+YLz2J2zKyb4Z1jHEfOLy94eH4BCMMM7GBx3qA2Y5zFONCijGOmZAUpCFBq\n",
+       "pVah1szqeGC2MtRJmR5Zbq4i41bJSTHGMF2NPPOtc3zvEWvJ2aLVUKtBFFQV1YK9/X1s8IgD64Qa\n",
+       "KkpGfaWkSiqFGoWyVXStCLC6f8S954/50Hc/z73nnmV5Z4Hzls1mw9tvvcErX3yDh2894pW//oDh\n",
+       "2TnbhxNmctSaMQiOijWK85XQO5wFJ+0zxpTJxdAd9/izwN17c/ysYK0jJxi3mcu3R3YXyngZmTax\n",
+       "rest4iriwBnLyQt3+MC3PM9v/p3P8x3/wrcwTYnz1895+PoFXgQ1ghqotQKGUguhh5mtuOAIXQUs\n",
+       "IPy1v/Cbj2Fbv3t++Id/+Ou+/6lPfYpv//Zvf8d17t69+47X/Mqv/Aqf+MQn3vG6H/qhd05b/fzP\n",
+       "/zw/9mM/9o7XffCDH3zHa372Z3+Wn/iJn3jH6y4vL9/xmp/7uZ/jx3/8x9/xOoBf+IVf+Lrv//Iv\n",
+       "/zLf933f967W+qVf+qWv+/5rr73G888//3Wv+Qpf+tKXvu77m82GxWLxjus8ePDgd3zvPTfmqppF\n",
+       "5N8H/hfa/67/+mtVsli5jfCIIKIoFVUB45rxEgGpGGPbBzWCFWGxGtA+cLO+xqpSKxgxxJpbfEcV\n",
+       "LQKqON+MOlSMqfTDiNoeTQVnLGIi3nswlZgifoI9O5wK2VpSSogIq6M5OSeWqx58QZwSKVQBtUqt\n",
+       "GRcsWjIUoYpB1YKOrFaeo/tbqnSIeqaq7DaVkgzWGayR26/BtN8FEHGUmhEsIgDtUCsoYdGjoiCK\n",
+       "mgLGYL1CqLi5IkmpYyEboVgLO8fFo0v8mSNZwQwGO/dIFYyxqAW/Uvyo2F45fb6naGH3+oQ3gnMG\n",
+       "q5ngPc4YnBSMCtYKuRScM4g35FQwpVBToVbBSDtgfTB0vSW6hHUVqF/dA2IF7yzzbmC1OKLrB4IP\n",
+       "HB0ds9tu2Q0di9WcqoVaK6qZUoVCRXMmRcu2E7pswLcDepi/1zv6wIHfHzwOzxxV/avAX/161xhv\n",
+       "0aqogGIRCoJQVBERVBUrjpQzM2dxttI7Q60RkcIw88QxIhVyyph2XxADBapCpmCMQcXRzZSjowDG\n",
+       "sL0ZyVHZPkq4AM1/hf2UyGWHsY7ZfMD2hpNhgbVHiBiyTkSNxLpDCqhJZAtiLMQC1qElgSjOVUzX\n",
+       "0d2PDKce3SnXo7BPE0LFOoOxig2CiJC0YtVg0fawYhxFBWpBMAiF0DmcETKgCrV4XNnBzFLMhErF\n",
+       "DoI6wRmD6SCZyiCeo9MFpWbiOLE53xBLZXP5iPXVmpyu6GYV1yvhbmERlY7A9jziLVh1WGswGtFi\n",
+       "ECeots0zJaWaQph5LIZJMr3pySkDQh4rVjzGZ5SKmAzVoFWRJJjOYhfhq09n1js67yhDR7+cEbY7\n",
+       "8tgO+lIMIhWyImLbvimCCWCtoeulHc4HDnwT8liM+btCoBq5zcDehhlFUASp7bE6N4tFTIXBCkJB\n",
+       "gVoLaYrUknDWEmYdu92eqgpqUKNUMpotnRe6vtLNPMuzGasjTy7Ko4eRMgmut8SoWBVUKzllilZE\n",
+       "DH0fmM07jDFYa5ninov9I0wVSAYlItZhXKVWgzUV7S0qMD+Z03fK++4McASP5JLVdsXDPAGCCPhg\n",
+       "sFaZn4XmgVdBjKVYEOMgtsOIWvHBIEZQzYhx1KqIFDRZyiYjnUE8MGS8GIorlB3Y5OkXHgZhSpWr\n",
+       "yw16fsPF+Zr99hz8SLEJFzJ3PtIR5tDdUbwVclbMFqQCVERamEPEkWLBqqfWQqmC2J7l2YzF3GOs\n",
+       "IghxqoxjZLPJpCkiGIxxVNHbEFih7JWyKoTeYZ3wvo88hwSPLwXvDLNZz75mam0HbvPQlZQStRR8\n",
+       "aJ9TzMBsPsf7J7el3y337t17z9Z69tn3rlDsox/96Hu21vd8z/e8Z2t97GMfe8/WeuGFF96ztVar\n",
+       "1Xu2Vgjhd73GE9v5VhyqGWMMSRWVSikZqR1GDDVXrAqCRWtkjyEMilQl50wpFS0VrMEFx+n8lKuL\n",
+       "G1KKgFDVgShjivQnM/yicrSA49MeGxxh2OGs5fpiIt/sMcYg6qilkFLz0AsDIh3GCVUnqikYa6gx\n",
+       "MdWJSW4IdU5vA84pyVvUKt3Ms5jB0dnAP/ttH+F8+xZ3ujt86vwKTR3iMkEc1uptvFdQgWocWVyL\n",
+       "96sgXtCoCAqYduY5jxGYxh0yQSqKDmC6FmZxwaALcNViUYoU+sUMYxLjuOHBw8T64Y7t248oLtLP\n",
+       "Cgwb8gxOP9SRdc+wEqKFcGmQWDFVKFOkytDCYbVCgTFWijp0Jkjn6JxnOczBF/b7PdhE11kmImNp\n",
+       "YZWcKrXqba6k7YV9zhgrIMLT33KXlCcymbHuESKqGbRAVsiZFEdyrNTJQhb6IeC9IziPd/ax7lsR\n",
+       "+X7gZ2khxP9KVf/MP+4aB2P+j8fHP/7x92ytF1988T1b6+jo6D1b6/e1MVdVrLGU2sIrBsGIJedC\n",
+       "Ni3UQKmoFKRC2k5siyA1oTmSU8JQ29+UwtB3DM+c8uh6y2azQycBqxh1bDd7Fkcr7GDpF5750ZL5\n",
+       "6oT9/g3GGJmvjsmpUEplGiP7fcLulSxK6IQQAiKVsYys95fs40RmxPeCtS1ujRpMZ6imYoJhNu/5\n",
+       "nhe/lSN/n3/3j/8kf/vv/R+sv/Q/8WgO67qjWnDOIaI4X8g1YJylH3rEQsnajCgVQdBSMM5DqaRc\n",
+       "sDmAVNQKOSayGjCKhoLrCmIVMxN832F6pdrKzc3I9uaKR6+vsVHp+ogLFqrBaWXoF5g+sWZL0Up3\n",
+       "BuMuY64CBYuUiHeOXKTZ1ipkKRytThhCTymFKUaCNRhjWvzfecLSMZsiV+sbKpWqisuGTMXOPNYL\n",
+       "u7RnO22RfcZ7Ty4JzbcJcBFyqeRcQAw+BFQTtQpKwrkZw9BjTUuAPi5uu5v/HL+tu1lE/sqhu/nA\n",
+       "7wWemDGvNA8aFDGGioGiWNviqaRMAUQrVSoinu31FsqeNG6ASt8ZhsHhXQ9iCH3HU13P9dAxjiNX\n",
+       "m4laM2lSri5HnnrfDNcJR7NTzHJgfN5g5SHee6YpsdmM1EfCuLlknBzGCo92kYX1FAO7ccc+ZvZp\n",
+       "fxvr7rHWY51BxFGL0puAx7GczdnulT/+iR/k+guv873f8oeQ7tvYvvqn+Zz1rKcrTAsqgfUYFXxv\n",
+       "mS0ctbZvJ08FRKAAtwa+ilIqqFEwhhozUgyahWwyMhdAcN5iqYgWnAuMaQe7yvlbG/YPY4uBG6WU\n",
+       "DFV5+vh93Dk9wc48b9RXGMcrfDAkDFEyWg1ilUrCucA0ZWoRUipcXWyYnwzU6igUjPGoEYrpqBpx\n",
+       "RtFQMcHhbQCrFAOzIdAvAqG3LRGuEKfYnrxyJpYJlYL1jhQzIhXNFckgYvFBSDninGWxmDPvbfP6\n",
+       "Hx9f7W4GEJGvdDcfjPmBJ84TM+ZSFRVt//G1NrMmUGMBUUouOBXEpmaUbg1YTQI44n6iTpk+FOZz\n",
+       "xTlLk5nJdL1HELwTdrvEOI5sb/acvzVy5+4xetoRwqw97npHjLHFYM8rm+sNKSW2+4ldvsbkxEIE\n",
+       "cY4coRiwPuCD4vr2TGFsRZTbckhPqpljf8SJnvDrf+tvc3P+AHHCR77z4/zMT/7n/Cd/9mf43AWI\n",
+       "te3pxLYY8mI1ox8CKWa0Vvaqt0ldoaZMLrQKH+dQa6i1VQGBoeSC5hZeqVVRlRbr9xYjmYphTJG4\n",
+       "GTHFUHImRc+0zZzcPeXe0XM8+/Tz2M4iainxdd68fpNynJG1QWzFiLA8mmOXjgG4fG3PtANjDSlN\n",
+       "LIMHDCllKhWt5TYkBhaDteCXLfzVGUNYuHYoBsF6ZUwbyujw3lOrUmrLYeQYAUvXOSKZnEdUlJKF\n",
+       "2bBkPp8xny+YBUeO+XFu26/V3fwHHucNDxx4tzy5bFFJqLW0gjvBqFC0Ai0RaSg4cVhjMOa2TDE7\n",
+       "sq9EIi5ZJCZ2u5FlmjXj7xwI7XDQRNWKdY7QdaSUub6cGHeGFJUuwHy5YJ4n2BZC77m63IC0KpiS\n",
+       "JorCMBN8V2+NmSPngNaKOI/3ihiBWlvBnSglR0LXk/Ke7376O3n0a/8Q38+ZdwOf/X9+lR/86T9H\n",
+       "Sn+SMBvAteIbKaAWul7AZQLCGAUwGK1ULRQFi6BiKa2UvoV2xJPqRDUeam6lgcW0Wm7rEIFcEik7\n",
+       "0l7R6Kk6UqtBkyEnxVXD7OiI+WKF8cJyfsrRfMPN8ZZ6ueZmscPtPFaEcGSZny1IVTFhRn17i6On\n",
+       "kFEVxrwjiuCdx1iLxbVSSpcwTvChPYmIE1xooSkR15KhGTCFWpVaC8E5sJmorX+gZEOtFmPAZ08m\n",
+       "Ybww6wJWDcEHRB5rr9C7cvs/9alPffXne/fuvacx8gPfXMQYiTG+q2ufnDGvGcRSpMW9KxFTDUkz\n",
+       "zkJwCjoSPFgPBovthJwNLhrGKoxF2F5FLswNUi39ImOtxVRFSqHGQsotPCEus9vtePRwzbPPVGqs\n",
+       "LBdHpGMl5RHjA+Jyq2HWyt17K6Tbc3wi2LlSipDUNi/TWWzvGfoOY1vZ4LTPVBTjPFITi7JCrze4\n",
+       "rmPoO0BZnt7jH/7FP8Nf+rm/xvf/5B+iGKGKEqeKN4YuCMYaCo6cb24972a5BUvRSlVBChTbnmZU\n",
+       "AGORotSk6OjJU6F0CefbwSYVSorN221dSIgYUs7ECfpwxJ2zezz33HNsxjX78YybmxuWyy1H7zvi\n",
+       "jf3bXHxhTT/v6JceNxhWqyNWdyKLM8fmagTjSbrHpEoGYpxweJRCqe1gNcHgnGAcOO9wwbdyRdeS\n",
+       "P1VbYrVVK02UMaGlYD04K+ziCDjsVyp7KvSD5+zsmNVqhnO21dw/Pt6xuxl4Vw1BBw68G0II/7/k\n",
+       "6Ha7/R2vfXIx85RBLRXbaocVqihOlOAqnVeMhy4IzjZPVFSheqaxIyAYI0zrxPpmonLFfBNah2Jn\n",
+       "yLmSs1JiJmvGWoeYwlsPL3nmrUvs0wus3eI6h+06ttsbprhDidy/v8LayvJsRlgWJt3jgydmENdq\n",
+       "2YMPGGvpuo7gAr3fMu0mjKm4rBzNe24entP5GUYqRiymZG7qwPWX/y6f/MC38n+dfwESJN3R9wHj\n",
+       "KgZHLIUSa+tsNRZRqKqE3vPMM0/x1oMH7HZbEItoRqQlP41aarG4CSgeVaXeVv20sFZlfuTJ1jHe\n",
+       "RNKYGWqHdZ75YoUag/vq7zVwtFhRpHLn2US9UnS/RzvHsOjpVoG5n2GGgJt35F2iACZknLXkDLUU\n",
+       "6BRvelwQfMiUVMHS6v+rRcSgKNMmoqKEmUOAuB/JcUJSpuTW/Wo9FNOcADGVzlnOzo5YrVYMQw/W\n",
+       "tPUfH++qu/nAgSfBk6tmqRVyppaKWIsaizEF68BowQUIoTIbHM4ZQLHib2PT7tbDM0QXWd8kNtcT\n",
+       "NSvx0R7rhdC12rdaMniLDQklsr2GT3/uM5ycnPAwXbE4XqApsltvKRl8B8enCzqv+Hkmm20LWyCI\n",
+       "MRipVIFMoqPHuYCIYdZ3zIZmgJfO8cE7z5FfiQzHAzWNmOBQEeI0svn0b/Azf/K/5F/+T/8wMXhc\n",
+       "MayGHmOEmoUYEzlWqCDavFBjhHtP3eXO3WOeun/GF175Eg/evrh97hf0tktUkpJGMLsO6xJqCyIR\n",
+       "wdLPA3fvnRL3HW9/+Zy3395gZnB2dgcqpNIe6a5vLsh5x2oxZ6uR+0/fI75duX5zT14X7P3AbD6A\n",
+       "s9xddFxd7lhfbyklotVhRNBSsUDv5y1x6QLHqwDO4kQopXJ9ecN6MyKikJRxP5Gl4BDiLmOKorWg\n",
+       "WimSwRuMozVdUZkNMxbHgTBz+L4DI5S4f3x79l12Nx848CR4YsbclKZfYu1XGoUUNYAWnC94I/gA\n",
+       "3hWsLxjjMCimGuzCs3WAqzgbqEwM8xOCc+zGxMO3r4lrwFSMEexQEPFY40ET2/2eXVrjcOzHDbv1\n",
+       "FZvdGiOW2SIwzD3DUDBB2UZBEGpsIQ/jhJoqacpElwhTjwkZ2xmGWYcw8ezpKXfK81T/BqKCsZ5h\n",
+       "eUTcXGEqbEXJn/s1nrfHfCFu6X2rU7dGSEUZp4zxYKppnrVahlXgzumKWedZLgbG+BTnDy8p6jCS\n",
+       "2ncnSlWPTBUtLQlqb8uu++BYLu/w1J2nWPhjNk8/w+de/QKzU8/Z/adACtNuz8XFFW+9+YCb3Tmr\n",
+       "+QnOCRPK6Z0j1o8u2F5s2G6OCGPCL2HoAovjQNHIzdUepFXZ9J0juIDSId4RjG+JAeG2+UkJ907p\n",
+       "hh1XVxtKzZRcyfuIxsK0jzjN9J2wWPR0/RysMKbUmsW8cPfoDvefuks/C1jXEqulPtYE6Lvqbj5w\n",
+       "4Enw5GLmClag5Nza4GtFrFC0oMpt7bmCqcitfgt6G/P10ElL7hU1zBZ3GYYFs9kMb3pOTx/yud/8\n",
+       "InlXUTFgKsWBmQxVhPmsZz2+zZ3lHaa6Z8qRNE5oNfjOYAdzW/pnGXOhqCVmi6ug2u4bY2GT13jp\n",
+       "8LYDMZSS6OcdN48uKekGEY9oa/nZ3WywoWec9sxqYfvoDX7qR/40f+rP/yRuPuCsAwzGtQoe1zVv\n",
+       "u0aFquzHiWE2Y5gFQh94+pkTXntjzvmDK1CPaV8PSivnlNw6I0PXPm/nLad3Vzz37HOcnJxx/eCa\n",
+       "cGzJLuONJcXIozf3vPXm69xcPuR6d0GeCst+RTKVyWQyhjoaHrxyRb+a0816MGBdYJh59ntH3GVS\n",
+       "rhjJGBMwtWBtQKV10dr2r0qtlVIqi8VANZWHFxeUXKmxMO4m0r7SB0vvLN4tuH/nHvPFjKnsidM1\n",
+       "Yg1DF+iHHj/zGG/J45YxPz7P/MCB38s8QWNeqUURNdQ6UkWwxpEnpXihdIlqleybYFapE025BFDw\n",
+       "YglDILuMdXOOV6esliusCbjOk3LhzVffYooRcRatQk2GMLccHc8ZmchmR0wT1ELOlRgjxllKVSCz\n",
+       "3+8Yd4liLLUaHJ5SKjWCE89uiqxvdnjx+FCxYtnvM8/YpwkV/Lwj54oYYbfboGPg6adO2U8Tj87f\n",
+       "5qOf+D4++f5P8A+2X8B4xTlDHismFBxKqQ4yaDGUmLjYrHnq3nOEmWXcVJ5+7g7VavNoS6JGSBVy\n",
+       "ghRbW79qxYhBpeDVcud9d1nOZ7gQiDaxmgWeCTOuLrdsdxt2l1dcnp+zK2vWmw359FlInl2ZULGk\n",
+       "KbE/j1yf7+kXjuJ6fAC8UC2kW2EcY7rWH+CESsGZgSlljAjeW0CxpnWCzmaBUz3m4uqKNCXqBDaD\n",
+       "N47QzTk7vk8XFlgJHM1XzO4+w1jXWCksjga64FAtxBQp7zLzf+DANxpPLmb+FcEl6q0nbimxIAbi\n",
+       "pEyjwTrFdJXswRghaWsW4VbO1ztBcHReODqac+fuXazxWB/YryO73YbdbgQFpVI0kZJhOTvheNkS\n",
+       "k/v9yH5XIFd2+x2hs+x3GbHbZuCzRdVRi0XwxF1EFYrRVnanypgjPnYEP7Cwx9y/8zK8HolR6Pqe\n",
+       "WpX5fM7q7B67zQ29D1xfXvDF//2v8B/9O/8h/8Ff+GmSm1rFeJ5wvQHNpH37blTAiONLr7/Ciy+e\n",
+       "tQNGCt3ScsesMMaS4khVZbct7LeRXblpf/dWuKyUStd3+HnHYjkjRGHVH/Hhco9n4oydFP7f6S3+\n",
+       "uwe/RoqJEjPRRG52F/g6UERIktgXRafCl3/jTfpjxyCV+aqn1fc70uQg5qZKiZBrZZx2bPJESorm\n",
+       "QugDi1mgc5YpRay1zAdHSUt21xM6ZfKYmZKlFkE0INrhTEfvPEpL+voQyLliyagqcUrE8WDMD3xz\n",
+       "8gTb+cttw4vHUFsiVE0ro1OhDJmcwEYlmYoNIDiK5qaqiMWIQ8ShBoZFz/JkQQg9+5QZlgOL5RIx\n",
+       "TQCkCsQ0MQxzpMzw5gjRTE6P2F5PbG+2pP0ExuCcJcdIzg4VzzRB2kVEM3Uq9F0HHlRbq791nq4/\n",
+       "InSn9HpEX+6R6hprCqVmnBnY7jfkiytyGrnRkTurI/YpEt7e0psN2nWkPJJ0RKRJvpavaklWhoXQ\n",
+       "dYFXH77G2ckx3sHR2QI7twTnCf6p25K+wtXlhqttzzpdAptWd38r6bu9uGKohg+/scC9Hbm6fJXP\n",
+       "WIvUxLd910d55tWBq+rYlALGcb2/gTQy3Sj7apj2rZlre17ZXkdMB5WmsVNzi1dbY1GthG7AiGBn\n",
+       "c3KEm/WOqWQuby7YJcu9u3fxvaOMBecsvQvMfMdms4Fo2ZuR8zd3HM12eAkMDpIprXzRgTphuxnJ\n",
+       "3lJK4uZyw3b7eGPmBw78XuWJSswZo9SSbzW75VaeTykqxKSYMWG9xxulSquCEGmlaxhDTc1jx0ir\n",
+       "mNARYwPOg+scznlC6NpjvbOsd4aT1Sk1CXnblA/31y0uvd+OaCmkpOypWOfJsVKyJcVK3NIagpxn\n",
+       "s53wnSC9aRozt1oky+EI63oWXYeyYRoz2UaGobbSOWcY5ieQR9I0Erzh9S/9Xf7Ih/8o//MX/yZj\n",
+       "Sa2ccl+Jm8J4AxVhNnhc77C+cPXoEiMwW3gWbsbJ6Qm5ZAyW3g/YI8swX9DfePobyz5BqiNSK2Wa\n",
+       "0HXlfa9nysNXicXgwpycEpoyn/u1z/Kddz/Mr169QlVHKqklnL0g3lJMJlfQJKgUNtdruuMlk1Gc\n",
+       "GEquaLVNz9w6+sXAarG81dypLE9mnD+6wOwzxkGVieXpXcbNRNyOTXjMwTB07NNIRZm217z95pcx\n",
+       "FGJcEDphftzh+ogTxUhlt5vaQbXZsd2OT3JLA/DJT37yPVnnC1/4wnuyDsDV1dV7ttbp6el7ttZL\n",
+       "L730nq0FcH5+/p6t9fnPf/49W+vdNv68E/9Uh1P846AIYpp8bEvdtUYYK4U6VZKBFEpr0lFuq1cM\n",
+       "znr01mutmpn5OSJKrhPjuCWXiawTSsZbh/MerDDvZ3R9jxRP3CrjLnFznllfXZGnTEHREUgKXqA0\n",
+       "TzRHJU+QciXFiJiKDwOmKlorU2wyvcYFFrMT9g+ukN1ErULQgkigimNzc8Pp8phdjMyCBevxYc6/\n",
+       "9L1/jL/xpf+Tqwj7zch+W9lvlZIqNvQU07pAq1imXDi/OOcsz/FWWHQrrHXspoxzinGGfnDc6+8S\n",
+       "OsPDR5Gr7Z7OzrBm4NvfEI5zItkeCZ5geiiRuN9wtdtQUsGXQtKCLRZNlegjxRowrkn05kqYWYy2\n",
+       "A4JgMC6QKmhuE6I63xFmPf1sBhR6ZxDjWNzruLi8IseEw1NJdDOHFM/65gY0Y13Lk4TO4zrDZnfD\n",
+       "5z9/w7AKHB+vOEsL5suOOHX4rnW77raR7XpL3h8SoAe+OXly2iy3E3QUEClN8U8VY2yrXKlCqUJO\n",
+       "hXE0hKJkJ+AsWEUdhOAxpiCqVC3s91uSj1yvL7i6ecB+v8aZBWib5OO84/rmguViyWadMN6xudmS\n",
+       "UmEay22oQClScf52wMWopLGQpkzVinOG0Jk2vmwspD6iIuxD4PLqkiO3pE89qBKnDH1ls73Cd3PO\n",
+       "7jxNzCN+CGzHDfEiUqaRk8//fV7uPsBn3vpV4tZQYkRLAflK4tZTklBtRyqJdLNn1c+pkzJuI1PK\n",
+       "bGNElzNk2aY1GRQjlvt3n2Ffthz3Z/zznPGMOeImb+ntDGsD1jhqyfSzjpBG7vcrKg5RS9aKKRYJ\n",
+       "0kbT0UYhaYFuFrC9Ilaa+qMRco4YCTjvsbaNuCsmMswHOuPIonhZ4DpHLhNpn5AEg5vzKEWqKazO\n",
+       "ujb8winDzOKcIeVCmipKYbO7Ydg5xGaETC8dpSTimMil9RscOPDNyJPTMze302P+ke5r1TYEAVEk\n",
+       "V1JyZDI1O4xX1CneWyyeJJEQDFkTN/sr1C9gB/v9DTFuSbWVG1o3x0hrF5/yyG7a3Hq9le00UvaZ\n",
+       "XDK2WLBtis5+bI1NOSk55ta4UgpGWtGk1Saxm2NkihMOx0PewEzCxxbfyk2aSFOb/rM6OWPcrVmb\n",
+       "wNHZPY5PT7i5eJtxfc6wOCVvEz/6J/4N/vJ//LdYzRZsRpB+B7G1A4lCHgt5vyNNe2qKzBc9Z8tT\n",
+       "pu3Idp/4wmtv0s8sJ2dLju8cs1rMsGpIJfH86lleckd81+l3sN9eM3NzbOiwxmGswbm+jbQbE8/e\n",
+       "fz+7X29PNLHGplVTmvSsOJqSThVs57C9w3qPrQasRbUpH1oxIJUp75mSxUyGsPIEH4il0AffFBe9\n",
+       "hyxIqsyWA7OjnnE3MnOFMGvt+VpaYnPaT1AtIqU9Gflwqz1jMDhKuiGnCuV3rwt94MDvR55c0xC0\n",
+       "JhenaDK/FTen1Zhzq9mSUmkj4UqGDCUEiiqehNiOUkA1M047ylXCGU+piWEW6OaWvImkbDDVotag\n",
+       "OfHw4k0kKCqFcb8lj2PzRLPAVFBxjFMEdZTaDh2RipiKSGnDmo3BuExR2G0Labwgb5V5GuiOPUUC\n",
+       "xhbyuGOVply1AAAgAElEQVS9vqEAc2d48OABcZrwpuK6gQcP3mIcL/iu+8/y7afP8xuPXicEh3U9\n",
+       "mERMlZInxm3Gux5jlG5u2Kw3aLKMWtGqXL19wRS3bE6PePvNt3jppZeoBjotnIVjfvSpP8jN+gpj\n",
+       "O/phTjWewXvEAnmkjBlnHPMwYxEWFANjviFKoro23i0sCvtQKc7RHVtC72+rhG6nIZmAOIezFuc8\n",
+       "vQ3ETWWzPmc265nPFswGSzQDcdxjaiJqZNqO5DrhByFVpTcWOkvNLbzlS8XMeozO8KFQa8QYxVgl\n",
+       "pZFaLIXAfjexPyRAD3yT8gT1zNsUewu3nYvNm5PbSexVviIwJeTSNEaktvhozoWZOFansFh61NBa\n",
+       "4FPF2YgxjtVyzjQW1mlHzolCATVtRGiaiGmkilJVm2pfNkgBiqFqRZMlpYqUgjrFhtqGH0imUCjW\n",
+       "olKhNjGoaW9ZT1u6E9jvtjiEJC0RaNzA2ck9osILz99lt9+z3W+Qkig5M4Qz4sUF/94P/9v82f/x\n",
+       "z/PWuCbRBL2u1iNxLMy7gdO7S1x/TayVPCX2aU/d0xKgUiFXLh5ew5VjZpa4heUD/ogf/Y6Pc755\n",
+       "hNWmIFmNbx21Yql5QtQQuo4+TUzjjsWwIuuIStd0bWhPPyVZjPf4U0NYCNUIEtpAjrhLLOwCNUrw\n",
+       "nuVyRZHCfrvHmMDVw5HF3OG6wLIb2BmDlh1pyjgfqL7gvKOfGdRbSi5MO5jGxH4smGro+5bwns1a\n",
+       "jqSURAiekjN5LNRJmabHqs1y4MDvWZ5caaK06UJNgISW/9Q23LhSEdMqRbjtGCyqTRmQTCmWYSmE\n",
+       "LnB2eowIrDcbgNY09JVJ7ctAHTO7bSZVJaepraMVNS2xqgC21a6nUpDqMFqxpTKlBKXivcfYirG5\n",
+       "iTlZQ9GCcc0Q+t5TimEct9T1RC4VrYJiqKrEGCkF5ssVDx68zTSueekDH+ThW68yLBc8urrkbH3E\n",
+       "yx/7A7w4W9E7T5HCjd/R24EH0yXPP3uH0V5wcrpgo2t2NxO7eEnaBIpmck2I0MZBj4V0tSbfRP7N\n",
+       "H/pB/DZRaNrpVS2LoVWwlJKQmls5pjGIs0QnLO6ckKaRsq/sNyOaU5MyMIJxyvFywbB0mK4Zc2ol\n",
+       "BEeH42g2Z9YfkV1iTBPDbCCnSpoS66sNp/0puWT6oSOOE6Hz1FQRbZvAWEGrZbsfyUlJ+4mahBIr\n",
+       "voc+dDjf1Beda7NQ06RNhrh0VN09tj0rIv8N8C8Cb6vqdzy2Gx048E/AE+wAbYMVWvu5Ymit9vW2\n",
+       "J11ra5YRade12nIDVdsw5L4nhA4xhvmsw1jL9fUNIq32O+fMEAamPkGc0KmQNREzGCq+s1Aqwd0e\n",
+       "GlZw3hNLpaR2f8nSkn42I0bwg6UGcCHjvLk9ZhQjhiIViuXZe0/T9zMu315T04Q1luXxGWE+xwfP\n",
+       "6f07PHgj8ulf+/soGWeVxczz6PqGu3HLR+7e530pc391wt/44mf40FHHpybDVXzEC0/Nee6Z5/jS\n",
+       "zWehGHbbPXmXMaFiDISuY7ed6HTGEOFf/94/gouJbRwRcYT5jH6+JE0T3a06oqb23brOotuKdIan\n",
+       "Tp/mYrdGnLIeR7bpEoMFDG5Q3EwwvUFsIZsmNXzs55x2A7N+hnOtbFSppDhRSm3H9jRRpja4ousC\n",
+       "i8UKTbeKibf/7nlS4rQnx0TceqZtJe4y89Wcvhe8N2htyeFaKzXDtB9Z30zs9hV1jzXM8t8C/wXw\n",
+       "84/zJgcO/JPw5Dxz4Ldc8qbDUm6n4yja9K9v29Fb9NygtbaKkzYxFME1I6/mdpivp7MdNUPNipE2\n",
+       "JFiMQV2hxISKoVZIU8YHwXnB945aDLkUrIFYaxv8gEU7xYUm1sUg2FDb0OCQqVgY25AHSRC6wKo/\n",
+       "wTiDoBQMq6Mj+vkRPnRcXV2RcmS5WLG+Ouel97+fz3zmHxD6BVUr4+U5n/zgyzy43nDsLOPd55i8\n",
+       "5fn+iHPd0R05Xrj3ItXu6cY3GesxD9aP2I4TT89nJCwPs/JMWPCvfvx7+ehzL7G5ugYRrLGI75hi\n",
+       "ZN71eGcoKSICwXt2+zWd73CrJbN4hiLE3Y5gBrxsyVOk5IL3Htu3XEISQeIOj6VzrVSxVbSAcYW8\n",
+       "n4hxpNTMbr9BUYo1LM4G3MyyWh3h1FF2kNcRSyCnG0oGEd/Et8jYbqCbO5wXrCsYZ4BKViHHQi5Q\n",
+       "s6HoSJibx7dnVf/mrfztgQO/53iCHaBCkWbGkeahG2kmsCVAv3JhS4i26WlC1owgPHp4zb27M1Ly\n",
+       "BN/i2Z117VgoisNjS25ecynkkkm3E+G9WFJO2CxUZ5FesKJMouBashNpioPVCSYoYQnGQ7WK9K1Z\n",
+       "RUQoMVCjRbXwwnNPcbI8olRHzi28cnn+iGm/4cWXXiYMHc/de47f/PRnMc7x6pff4N5z72fpEq7v\n",
+       "CBjmyzPuZ4jjjo+9+AJZhJqU6xTx88CdxV3yzZYXTp+i3O/4crdg3I3MuxnGDfydz32eT9x5jj/8\n",
+       "ke/ien2Bn80Y1zuG4yXDMGs5gJSompGi5JrZx4xxQswJMBwv5xAz/dUaa8OtTjrtew0W60urKU9A\n",
+       "TiiF6DKp20MS1BamHLmZLoj5mjxW9mPk0eacXR15n3sW41vdv/eWzndtdqcYqIY8RfZjRFxgtupx\n",
+       "KjifcN424TUytdo2yDop+3FLjC05bb19Ulv6wIEnyhNsGmrW+ithleajl1uFRGmva21zP2tLfOWq\n",
+       "GNOmtU9j4fJyRzdzBO+wnSd4j1Ylp+bLN4ldQ+c7YlGoCe8szt5qbmtCq1BKgeox4XaEnTMw1jbw\n",
+       "ISi4NtjBWI/6iliwOGLKOOnIBp5/5g7P3L2HMxVvHNY7TBTG7Z79+pppv2V5csZ+v+N6uyaNW9DC\n",
+       "h7/zO8lXb5J2G/bba6xp5ZkpV+7fPSPXgpPASUkYKg7lu4+e59IPfO7BQ77t5CnsMkEYuN5HPvrc\n",
+       "ff6Vj/8x6rhjv5+oueB9U3XUWkGVoesoeaLU29r52++61NrCQeqxzuKCx7n2XVkxiCj2dphEG3zR\n",
+       "GpvUwtZFgilkK+T9xCau2cYdWjMlCzVX9nHk3D3i/v071FiZ9olh6OmHpgmfU6bWdnBrbUJc1nms\n",
+       "LfjgMU4x5nawhdYmX5BaPM55A0no5/7JbelbfvEXf/GrP7/88su8/PLLT/DTHPj9zHq9Zr1ev6tr\n",
+       "39GYf62kj4icAn8ZeAF4BfhhVb26fe9ngH+LNlP+T6nq//q1120iUKqKoUmiqmlGvNbaHtURklS0\n",
+       "tjn2apRSBaltcOZrr15TTaaqcrQIDNaTcxsibIwFVWoBsHhjW3ghBOb9gGplO67bMOdscNbQ+0CU\n",
+       "TMrAREtymtvhFs5SXMYYRcggDqk9McPgPR944f08e3KPo9WAicLTzz/LG69liusInWd5NGe73fDZ\n",
+       "T3+GP/jPfZLPfvrXoez4/Kf+Ht/1bS8x9D3T1Rpbt3TzGV/60is8/cwzxJjY18r65oJ7p6fMjzru\n",
+       "18SLd7+FYwZeu3yL02GJdp7dSeBZt2S1KFy9fYHmdghaaTNQVZWhm0PJ1FoYpw3iPON+YjZvQmHJ\n",
+       "RkJncZ2lm3uOFwsqRzzcRrwIVWDMCRsL+WZCRZiHjvW0xVeH2jVFCpFCLZmSWyVSKRWNwuZqy/n5\n",
+       "FcvlMfubLVKVaRyxJjT5g1RbD4Cx7bO7Shc8zoH3BmtNm06kELwjThljPP1QICzx3ZMX2vqBH/iB\n",
+       "J/0RDnyDsFwuWS6XX3391ltv/Y7XvhvP/GslfX4a+CVV/c9E5KduX/+0iHyYNkrrw7RJ5v+biHxI\n",
+       "Vb9mvZhIG+LcRpoF5LZ+W1QpVVFuJVwNZCquGrJmjG3VMDEWHj3csOh886xnM+IUqdU2ZcNSsFrp\n",
+       "HFQxGO/ou8DMdnQhUEUo5abVkIshm4JxhfncUVKmFIeIYkwF7ZCYKLYgxVN9RUWwtdANc56+c8Ti\n",
+       "ZEVyCc2KEVgsl3zxy79BHNccn54yLOZ86EMv85lPf5rTu3dZBDiee8acePD515ktPcuuxztHP1/x\n",
+       "aLOj7HdcXl2Sa221+VbZTRP9ouel9z3P2ekxr375FZ45u8ubF2v6O3fQ4nBujh33iHdt5Jz36JSI\n",
+       "cUO/mFOqwZqezc0GMZk4OVzX8+buglIStoP5vGOxmrMbO2bDEuYTeW8pgKYmk2aDQ6uScuEygYaM\n",
+       "CwlsxXolRaEUocQ2o9SUxKtffJXlbIVm4eZyhymV/X7TSlMtiBX6rsdgEWrLlUhoM1FvK55aDkVA\n",
+       "K2IyxQp9b7B29i629IED33i8ozH/HZI+fwL4vtuf/3vgr9MM+g8B/4Pq/8fem/7alt53Xp9nXtMe\n",
+       "znDHGm6Vy44dD22bzJ3QoiGgRrSEFAGiBShITV40gqCWQLxA4g2oheg3/AMRYpAQYQpE3YLutJIm\n",
+       "Ucc2bsdxbJeryi5XuaruPXc40x7W8Iy8WKcqgcTB7VT1DfH5Svecfc7ZWmvfc579W7/1e75DCcAb\n",
+       "QohvAj8OfO67HBslFFy5BM4jhjS/YfM8ly5FIoogZzH7Y4t5NCOFhJLpLz3bpaeIiCyF5P08Yomz\n",
+       "ShA5b/6pPHd1lTF0dYs2mgOuNs/SRPABKRMg5mLSFIJXjMWTsmDYZYzVIBRBClQ1Qgk4bchqQC8k\n",
+       "dinx20DnGqp1hRSW+4sHeN8z9QMpZU6ePGZhHT/0mY9y8eornG4vqfcFbRsuLy+IdpwVn3k2kFJW\n",
+       "opQhiEzKUIpivTzkO995g8bU1G3LUbfi8vIMKzTPfuhFHj16RAgR17aEUlDFotBoFLZZEoc9KUR2\n",
+       "+x3j2GPcvJGsjeM7FydcpHP2456UJ4wtNJ3l4kJSroqykJkoI/IqqENKhYyKZDy6KJAK0iyoKiVR\n",
+       "ooIiyTkggHHneevbb1JCwjpLDondbkMucY75A3IOV/NxiVGKECNFZKyes0198MiiiGU2IzPOIq5G\n",
+       "MB8UhBD/3dW6PxJCvAX8x6WU//IDO+E1rvGPgO93Zn6rlPKufddD4NbV47v8Pwv328wd+h/GleJT\n",
+       "5PKepH8OhZg7dinnxBySJpOuPL1n+qIoBZULMSfiJNid9xAkqR9QzqKFolCIeaCU2TNdKcHkA845\n",
+       "nDM4ZyiuZcyBizNPCoVUEqbJCJFRToMZEd5BP2+IhkEjlUeYgCgSbQvWZkyViER82HIwLaEKnJ5u\n",
+       "2G08U7+hZEm9PGbvt6yXjtZZXvvC51neWFIPYWa9LBpi2iJMxfbyjJAi9XLFc0c3uTzdUleOxxdn\n",
+       "+OQRRXBy/zv8xE/+ed566ztUVYXEUjvNNO4QpczjJj9QVR1SGrSpZmEWAtsuyDnhrCP4kTAF6OYx\n",
+       "0+uvv84TcYbSkiH2hOSRKBbNkt5GvJ9IWlNQGF0wWYEX5BzRUoKGGGa+UQ4RJoHMEpJg3puUlATb\n",
+       "3cDp+RlOzOOUadox+YEsQFkFZDRz159SIqUMRiLSVXZszsiSEcVQciKLjJHmXZrUB4JSynV48zX+\n",
+       "1OJPvAFaSilCiD/uLfRH/+yqKBcpoEhI5SqkYuYn5wRFFkRJiKAwenZINBrmSN/CVTonpw/3hF4z\n",
+       "dJK6yVSVRmlFFABpns0LgRSCcZpYdBlhJK2s2FU1Wk8MlxNjyFSiYGwmkZBaIMdCHt7lk8NYEq6S\n",
+       "CKnQKhBK5sBZtuMZrTa8sH6W7ZMnPHr7EbvdQImRYb9nHC557oW7xOjZ+ku0hKU54rXtJTZlbty4\n",
+       "Rxs35AyPHz9ktboBRTKkgLGWpq6onKGu15ydPeb41i36aeTy8pLdfsdifYCTkml7yeb8AmNaZDEI\n",
+       "ocg54v0c6pytJhZFu1wxTHsQZbbnRbC4ccT/9etfQnaKuqlJbkI7yRRHYvbUdYM3mT4ElCxYYaiu\n",
+       "2CMxR3IEIwTSFYScBVlON/gU0RSKkuQExhpa1xJ2E4GCUhDjSIgRpRS5JIxWiJzIcV5BIUZEkRSV\n",
+       "SSW9F7ghESDnBiDnjNIfXGd+jWv8acb3W8wfCiFul1JOhBB3gEdX338HeO4PPO/Zq+/9IVzcvwQE\n",
+       "FLCNw9QzCyFfRetoCSJpkogUIRBJgpSUlFBKEMu8sUbO5CEwpEjMjlICOYFzBVSmMBfeUmaVod/3\n",
+       "5NUaiUAJRV1pnLP0QhC2mZQCzVJT1EyflBSKnzfcpuxBaIIq2CSQosIiCVIQ/AUh3GHq9yzqJafm\n",
+       "gsfvvHo1909sNxu0vMu23yHCHi3hc5/7LT716U/Tn2/46tc+x4v3XiKHAWSiCIFtHNvLPTlM+Elg\n",
+       "rGHTb9DG0hysyWLutMdpovaBoC2XTx6jnGGKA9ZV7EaPoqBzJluJVhWCic3ZjrZpOL84o23XjKHH\n",
+       "VjXvvPaYndxjGkV3w1EtJbYzhCljZUPfjyQhkAqMTLSmgDBsp4EpztRSgwYlMaUiEmeVgM6YYphK\n",
+       "QAmFKpLoZ9FUjIVhjIQYMKaglEJETchpDmm+8rzPSaKkIqWrIPAiyEQuTkYu39wjr0Ktr3GNH0R8\n",
+       "v23M/wb8/NXjnwd+5Q98/18VQlghxIvAR4Av/FEHWN9dsH5mwfpOR7UwvCcjEgCzOAhRKFlCyoiU\n",
+       "ETmjEIgiUDNzkBLT7OwXBGGX6DeJ/Tax30f8qEiToHggCMKYSQlCeFf6XjBCY4RECkGYAn4r6C8z\n",
+       "434ihgIkcsxXFD9Ju9B0rcLZCqtmlktJgr3PmElze3GMVJH1esHy4AAfPEoqtDHsdjvG7TlKGHbb\n",
+       "CyiZ3/nyl7jYbzk4us3p2flMuxxmp8D95cBmvyflSLiS3z98+BCpJEVoHpycsO+3tG1DLpkYI916\n",
+       "xbjZIUshxJH1wRIpLdvNgEgVl/ffZvfoCTpH+n2Psy2jH7l98zaqa3j7ySXbx3OB3Dy8ZLOZ4+eq\n",
+       "yuJMzZ2jO0xjZAoeY0GIidoWKmswolyFhcwxfCpnKlFhdY0zDikztXVz3JuPiChIQTGOCT8mZBLI\n",
+       "olEoUinIAt5HYsxwdUEOKc0boRkEBorm5r0jXvrJYz75s8/zmX/u3ve5pK9xjf9/43uhJr676XP8\n",
+       "7qYP8J8BvyyE+KtcURMBSilfF0L8MvB1IAL/dpkNVv4QCrMl7cyJEAgpKHFmiIAkhXkzslx1Wu/R\n",
+       "FaMkqTILjXJB5pmTHFMg5ky+OnhJkGuwlfx9LruQRC9Jscz0RS2wyuKkpq4cVWXph8zUJwgCIQNa\n",
+       "SaQVlKJwnaRZGKSWaFWwtcPWGVVJ+rGwLz3TzRGRC34c0XWFULPXTE6eYRzQJbPdnXLz+Dbn5/c5\n",
+       "bhas1musBp8tIRZQmmHa89bbr7Jc36BzsDnbsj44QElF1bZMY8/l6YOZIonCth1KSk4fP2F5sGCI\n",
+       "iqpp8WOkaY/oFpo4bZHaQolMXrC93HFweMBmd0Fda07DiJAJZy3aitnAKhQudzuWbYtVULkKKzXD\n",
+       "lQFYrQTZeqSKqJwQelbnGglO1ShpyGiy0UgfmaZIjHHOek2C4DMhjSgBunZoqZFFXPncS2pniGmm\n",
+       "mxqnr9bBfA4pDELC5eNLKIK2m1fTNa7xg4jvhc3y3TZ9fva7PP9vAH/jezl5ARSKrGZGgpDlXXX/\n",
+       "ux+ujjl37EJIMrNIJCcBUaCKZsoTqkhKTKQBhpgRncUIyFIj1FWwMQnl5sCHMAVk0Egk1jiOVmvO\n",
+       "z3fsNxMhgKg0VTPz1YXLQMbUEmkKSmeMTTS1w7QZYWenxCF7tvs9cb+lqmcDrKquWK5qku8xorDb\n",
+       "7ehqxaOTt3nm7rOM2y3FR7CKrr3D5cWbHN24hVGWB/ff5vadhmG3IYSJRw+fcHzrBg9PHrBuF5w8\n",
+       "uM+tOy+CqRhGD2HCykLvM1XbzZz6gwMuTneMu0zse6TwpDCx3Q8cHd5ic3pGTCMhZ37rt/4+9cJh\n",
+       "xWwvW4qiRME4DQgdWeqbOCuxqmLfj2wfRepnBTZNGAu+JHLJWFvhsqYSmpwKgjmwW4iMUgqZIfae\n",
+       "GApj70EWmoUlF0ixoKydXTKlwFlFLQo+RKY4Z40qaWc2U8pIpTg4OkCpgJIOIZ6+Be4fFA39SfC9\n",
+       "ikW+F1xeXr5vx/rqV7/6vh3rwYMH79uxAL5L7/h94f18be/n7/+74Sm7JkISQC5IBEXNRlo5ZdJ7\n",
+       "z7xyVkSQYkZq5uqOuGKsxHkDFTXznaeMKQo/gbMFVQo6z/PVuaNWpBSJsZBTwlUGbRWCmtVyxfbx\n",
+       "I0IpSCdwtZtZFHJ2HBRzagYlC4SyyFqhHKAiVlgOaNnvdoybS/wUqauKVAmk8FSHDVJGCh6fJc40\n",
+       "7Pc7lssDtv2eo+M79OM5xi6onES4itW4ZQwJnyMZwebynOM7dzl9+3XknReoq4aq6ajbA/rpglQg\n",
+       "hjmQYz88wrgLDqnQRjDtAv1uRyIggsfHCFIwklgerFC24u3tlrqarYYhk0RBFIEQiTFJ9lwikmFV\n",
+       "WbYbw0UfaLYGrcBqjbVi5t7HecM5CIlM891XDPPdUCnzPkkIHu/nODxBQSgHZb5byzlfpU5FtOa9\n",
+       "jFW4unOhUMocISjQKKVxlZ03xMVTTUK8xjWeGp5iMS9XDoiaLBPEmc1SeFeuPV9h5zf1/LUQkPPs\n",
+       "2xJzpCDnwGHmhBtKQaPJvpBtJASDniI4QUETk0ApBcx2AClFpKjJUSGFxFpHu9TspoBUAqkEOUMO\n",
+       "iSwLJUpSgpwEky9IEa6YLR3TbsOrm9e4197BCY11iTZLVs0RMXi2u3NSkjRtg9UVi+WK1eKQNO1Y\n",
+       "dQd4HzHasrl8AtQYXbNarYlhD2XujlXV8ej+CbZtWR4fcn62plus6fsBowTb3ZbKtuz8iCqBdnmD\n",
+       "4LeMfWG76ZliIA8TMUds25BEoD06wFYas1ziG8ntW7fZbp8QQ8Yz56CmWMgq0HNOW61ZHiq6nSAV\n",
+       "TUgKj0Z5ha6vWEZ5LtyUERE1oghCzkzTNDNq/Py3N0Ix+h7X6j+g2oUY4xxFBzgnkCohKkGRYLMj\n",
+       "54hzljAmhFBYqxCyMHkwWnz3RXeNa/wZxlP1ZskIREmzmo9ZhSmuAiNkEVcFXF45IxaKuJL/C4ks\n",
+       "cu6aAZCg5tR4nxJCFUxSTFNAazlHx6mEdVeba1HgS8RPCVKPEHNIhkZQ1RU+J0hzN0mcuc0pCeIo\n",
+       "mcIsU2+ipe8zdVchlSAVy4aefugRSrGoairb4P0WazUxRfw0cbi6xfZyw2qxQOIJpdA1a5qjG0yX\n",
+       "D+mqjpBGbh8s2TKho2K/Gbi4uOCHf/izvPrK7/Ezf+FnoWRu377Ldrshjj0hekxdsx3OiT4gmwNO\n",
+       "Ly44vtHiqgZpPMfr27zzyjcxbU3O8+9Zi0xXN/zek0e8fPYKqlZ06nDumvc7tI5EaVAls/UTnfW0\n",
+       "jWK10jy+zEypEHJGIqnRlJKIJNIUScUTxkBOc/h2ygmjBKRy1X3PF26BIJSApKC0nT3rZcBqR73Q\n",
+       "OJtRyuGsgOyQai7YZQFd113F2imEkFcX62tc4wcPT5WUKxG/zxcWAqETvDtguRIQSSlARKTK75lw\n",
+       "5fxuEZ/zMaUq2KqgXJkDh7NA29n9cBgCk0/4KSPkHGYw+2vPkXSbzY5pyuz3PSFklNTMIxwog8Dv\n",
+       "M1MPaYRpFxg3iXGT2Z4H9heR3TYx7DJ5MHRuha0cdV1jlCbngFGGlNKcvtN1DH3kzu3niKHQtSs+\n",
+       "/vFP8eTshLt37xCLxocJU6/oug5KRsoOKQvt4oC2bbn7/Ieo2262GRAGbRwYg65W+HH+fzmzYL/b\n",
+       "YYwgxMg4bGk6hzECYQ1nTx5TSqLfXGKcY337Fq+cvYXXAeMa0IoiBcrZWYGbwShHi8LoiKsC3Vph\n",
+       "qkKRkXGMc6ZnyPPehZ+To6YxMewGNpc7dhcbxt1E9DMjpZQ0K0zNfKelVEFZiaslbatoF5b1UYWr\n",
+       "A+1CUNWSZiGpF4a2M7hKsFrVWCNQVaZZGA5u1DSr6878Gj+YeIoWuOVqUxKUnh0RBSBkIReBLAJK\n",
+       "Qcjy3px77tTnW/HEzBsXMqKMRpmMk5HoM04bjJvVJv02Y6Qia5hSpMoWskAKjRKGcexJk2CaBqZx\n",
+       "P2/CpitPkRwRk0AISBoUmZIEOQp8KGyfJLROVHWhVh2VrGi7Q+Swo5SErQyxj8QQqauaplkyek/T\n",
+       "NRwcHrNarUEann/pY3zx81/kxq0V/aMRy+FsAFYstZ3VnMM4sNlteeaZe5iq4uLsnXlerCP16hid\n",
+       "Rt7Z71BCIaXHOYdEEvxIKYKj2wc8euOMzfacupL4aaKylu2jE7p/9l/g7d/7W4isZnfFNPvKK52R\n",
+       "xuKTx6ERImGUQsV+7o5VISrJJMGRMb4QzEwdLUkSpjw7WJZCTAmnLSKBkYpiJabMNrZFZbquoesM\n",
+       "zgmsmQNGtJJoLRFkwJOCIqSJLOdgCqkL6Nm7HpGJJZJK+uMX3jWu8WcUT60zL2kenSCYrWZV+f2o\n",
+       "ilJAFIQUaAPGKKw1V9FizOpRId9TL7YLiTHgGo2SBu0SVS1wTkEupJBm462Yrtz3NClFlssF6/WK\n",
+       "i+0F+8stIYQ54m3MpK2g7AQxFHKQaAHrlcE5NY94JkH/uHD+zsT2NBIn+PSNjyLDQD9uKCKj5TwK\n",
+       "ss4ilcGHgBQgqiW2XXC53fHqN77C2aMTnnnuBX7tN36dJB26bohZUDWOnA1n5+fce+6HyHnmko/j\n",
+       "SCkNWcJ6cYPDG7e4uLikkokY92g1R6vttqfst2e4WpH9wMXZI6bdFpTDDzuQkmefvYdeHCGiYjvs\n",
+       "6YceHzzDNCKAPvmrsZPAFoMWiVTilUAnozQIImPwM5tllOAN3gtSkEihAEXjOhrTzN70uVBbQ20y\n",
+       "ba1YrWua2tBUDmcNRjlkkRQPaZJEn9BYnJ59XKYpMQxhfr1jTwgj/bBl3++ZfPjA1qwQ4jkhxK8L\n",
+       "Ib4mhPiqEOIXP7CTXeMa/4h4ap25YKYaCpmvnPDkrNgsCkS+UhkWFitDjorgM1rPAqGSr5SiQlF1\n",
+       "sFg5zi8DSkVMLUEIrIUwe8BSCoScqEpE6oCQEWNa1l1FWdVs+55HDx6iXCFPfvbqiwmUREmJFJn1\n",
+       "DUx/IjEAACAASURBVEO1iNSLBbtTwfZih99E6soSleT20YIcJcFPWG3Q0nF5ekoIsx9MVdd471HC\n",
+       "Mm4u+NK3v807b53wnXdOiD5x6+Zv89P/1M/y8PEFK6XZb3eM48Sqrjg8XKGM4M7t27SrI6QRhGI4\n",
+       "vzjhY596kTRN5BjRKnO8PCJFxeVuQ+ssGMO0n4hTZIw7Fsc1Qhpu3V3SHqzRFUDkzTff5uzynP3l\n",
+       "lhQ8VgtEbcllB0nT+5HaVYiS0cphrUBUEl3EbE8cExlDZv4sUiKjUUIgmSmFRRSUnC/IdWuRWWMr\n",
+       "w1QiOQXiIOiqg9m6OCZ8mC+8gYAKnl0vudwmzvc7gvdoZRBKIaREykzbVjT1B+pnHoC/Xkr5shCi\n",
+       "A/6hEOLvllJe/iBPeo1rfC94imMW5u4OgZBAmcMRZgikTNR1R7cQ5GTYbnfYpIk5gypYofCTpzt0\n",
+       "1F3mcueRahYJJS8QOmJEmSPjckEWiTEWZSYymkV3wHK1oGtramsZ04YxnqMryCiUEQTkHPzsBKtn\n",
+       "BetVy+5UkoZC9AqtLKvqiCwD/S7y+OItDo6eQ0vH2fkTQr+nbVtyKWTfUxvLlCUPnox8+2Tg62+c\n",
+       "07bHfOvh63ztrXOE+QKrO7dZbT3LznD74CaTv8QohTOWUEAbS1KKMSZM3WLqms2TR9y+fcyYC1ZX\n",
+       "7PuBQzWbku03l5RqwJmKo5tHqGy4f/8xybQsDhYEDDlcMEw9+7Md425EmZliWMjMHxM+Q4gjUneI\n",
+       "NMvztVZIK1CVnlOLUpij9kqAcrUnIgVSSYydPWKEsOQ8kUukriTWZsiFKQikVMQY0KJm2sPQB/wY\n",
+       "WC8DsfT03rDdeDabhLpSwQoKj8/OmKae5YGj69oPcM2WE+Dk6vFOCPEys7ncdTG/xlPH0xuzFK5S\n",
+       "ZWbDLBBz5idXc3KjaLqBeqFp1gphFaYGYwXWKqrKoI3ANhLpIqqefVRMJZCygNW42rI8NkiTkGSE\n",
+       "zQhd8HGLTxNNW7FYrXjmuTs88/xNbFeoDwT1KnPzw4I7H5HcfNFx9JxjeVNiXcLUBbfUVIeCG8/d\n",
+       "4PD2khs3jqGDumkZQmIIeza7S3q/53Jzzsmj13nw5re4/+bLlHHLzXuf4rTP3L73Mf76f/SfcPve\n",
+       "j9Pdfo5f/fUv0rUrXvrQR9mkQiqBNHlyhLqtaJoWt1igzIptSNx65gUePtmiukO6Wx/m4MZzuGaN\n",
+       "vOLhay1pGk1lFU1bce+5Y4a0o6oNt28foESgbiVEwcmjx+xPd4gouLm6zc31XfSVyCeL2SelFEOh\n",
+       "UFVzrqpUAqFn62JjJULKmbXi0+ycqCNCBJTKQMFaTSkTMQr6foeQI4lxvttKIIpEBYUOmrpUKCnw\n",
+       "fWLyA0VItNpjZCD0e/w+ESePypo7Bze5d/vD7Dbw5GL8x7J+r2yhPwt8/h/LCa9xjf8PPFVqokAi\n",
+       "ZHmPepjzTE8suWCMxtiAqebuUCuF0LNLohCSLMXMzpAzP7lpKnyMSFVQpuCqubjXqwJhzug0misB\n",
+       "S2C/mQAwTqOsZH1wyPngSN6j15p2UcjK0o8JbWpMNZB8wDQdevLoArqG5bomqUKYdsSYUEaSUsAo\n",
+       "zRA80zQRfWSzH1kdLnj2pU/xe2/3VOsFv/mbv8NfevCQ3335y+B33Di8RRozw+QRORNjxgfPrVvH\n",
+       "dE1HVTuKWZJT4BOf+AQP33yDy7MzXKtZr45xpsG0EVlusdme46eBAmilqLsFRle89JEPcf/Nd1Bq\n",
+       "9hpfLI44+84blNHTqArrLE4ZFlXDflqwLwOlgBEOLQuyFJQxs6DHaLQps5d8nv9uhVnipfWsDZBa\n",
+       "zX47JWCUReVCibOMH5XnvZGi8FPA4BmyQtkaZxtsHJl8IgSPkhMpRXLhalzlOOw6rFC01YpaV3Su\n",
+       "4c3zNz/4lTuPWP5H4N8rpez+3z9/7bXX3nt8eHjI0dHRB/6arvFnE8MwMAzD9/TcpzczlwXkPEN9\n",
+       "d7yilJo7dQWmEigrkMbPGZRWYFxFCuGKmjirNp0DY+fZ+uP9Hi38nFWJZLmQIDUpjchc4axGC5hC\n",
+       "ZH/6kHvP3+Ho5hFSypm6V0kaUyM6S7t0uGrJdjdQRMFWI2TLZAeyKzircI2gsZbJjmhqkvLENJL3\n",
+       "I+M04oxh22+RxbE4avnYJ3+U+uhZfvf/+NscH3c8PH3Ev/VX/xrKSv7mf/43+V//+/+Btx4+4p90\n",
+       "Fu80phhk44l+RNuK7Cz1Yk1Ie377N/4eb72x4fHZEyoyTRv5+J97iU9+8rPk0sD2DJEK6+UxiYBV\n",
+       "ljFusFXL8y89wzQFchFUqwVvvvk6n7r3MT73yjeoXUulDV1jaahJacl53qHyQMFBsbP2VoCWCSUN\n",
+       "oObov5JIRWC0QaqEzHP82xxMUagbGHeJEAOJkRAFlZ2pj8NlYBCJZ247pBVokVnUjogm5YGSHaJE\n",
+       "pIiUFJAYrIwcrI5Z1YfkAqvlIXVt+F1e++OW3p9s3QphgP8J+G9LKb/yRz3nOvPzGu8X6rqmruv3\n",
+       "vj4/P/+uz31qxfzmnYazs2FmtMgrKiIzZdHognURYzSuBiESbTu7E+q2wXuPlIrtpqC0x1qHJGOc\n",
+       "JHmJNRIhPNlUVG1GW02lW7S0KBmJvnD+YOLRozNu3tlDkWgtaOoOFyxK1KwXFaqai9boRwyWojMq\n",
+       "FZSdMJXCmoJwYCporzY493FCJY9VmnGaCDGhhESaBbZZsugafBg4eRgIQcwOgjny5/6Jz/LFz30R\n",
+       "GZ9giqTf7UnrI6qqRtYVi/Ux1eoIDygheO31ntf7yHMf+Qz7y0te/drLNO6SxfId1ouaqu7mqDgE\n",
+       "hsK+P2PZHSG1xo8TSmba5QpVN/yD3/ttvv3Wm9w9vMOTzQnGLbBO0RXD1BtU1FSqhZSwUlK7hoVN\n",
+       "KLnDpJn/n8WcBqWFQSlJUye0TIyDR1ChBagyYFRi8hFtLSEktMzkNLNicnL46ABBYh7xLGyDLy1C\n",
+       "9ahSqJSirgO1AKH37Mf73Dq6hZwcojbcEOsPbM2K2c/hl4Cvl1L+iw/sRNe4xveBpzYzv3XrJkdH\n",
+       "7Wy8VOBdvw1BYbGsuHFjQWUdUhaUKtRdwVYZ12TapWOx6FisLFW1QBsBQiB1IaPp1hbjLLJI6lqy\n",
+       "PnTYOqIriLFQpowymfOHPScnJ1xcPpqtZN2C2lmcW6JlgzKGwogU6mojz6BRNK1m2TUYbSjSY/Q8\n",
+       "6z+oOiolcUhyyYRhS1N1rA8WHB0uaaxCpolPfuyjPHv3eQ5aSQqeGCb+w1/8a3zrtW9AKkgjWC8X\n",
+       "ZBUIKWCqFm0NarVAS8eXP/cNXrnY8wv/zi/ya7/1Ff6Xv/V3+PjP/Bjf2GlyXlMwswpUaiBhbYPV\n",
+       "DTFGnFA4K+mahvXqgPMnZ7xx/1VuH92iqizdomb0Txj9ntrOkvulckgm6txhjKZpHXfvHPPx5z9C\n",
+       "69azJW3M1LqiUi3domHR1CxqTW1riKBFQcmAtRU3Dw5YOIcUiSL3ZCJV3dDWx+z3ge04h4MoEzE6\n",
+       "s+hWtHWLkAXlMstO0q4VdS2wVeJbD76CdJmcB6z7QNksPw3868BfFEL8ztW/v/RBnvAa1/he8dQ6\n",
+       "84ODlraqeb28wZPHe+awIomrLE0XqVuFM7M/SikDTbNgFwNVbfE+YlVmIRuqOqCNZRz6WRGpE21r\n",
+       "QQ5oA0p1WFdISgKBfjfMxQdNTIVxFyBFJr9DGD139bJGC4vOBsUJqMLCLRmypzBhbcWqPiCNFm0K\n",
+       "Wk9IZiHSOASKH8k+EjNUtkYZy+VmxyuvfImXdM1PfeJ5/tNf+mX+mb/407z88jdZHSz5zGc+y3/z\n",
+       "X//P/OU//y9DAKMN1gj82ROqm88QU0ZEjVOO3/jCV7hz6xb/0l/5BYb9jt3mlNe+9TovfviTiO55\n",
+       "LPcZ/Ejoe1arAzKZAvjQE0KLUgYpE+29j3H68AGNMjS3V5xf7NHNim1+CLKnMhlnDUJHEBVOKBZV\n",
+       "RdUaOtfhuiXLgwUPn5zx6OFjFtbQNWvapaaULSJJor8kjbPLYSoB5yQq16TSIOQeazNTibSdRcSG\n",
+       "9eqIfurZRU/jwNYCZQq2lrM2WCuObuhZ6eoKmoqkCg8u3uR4fUjK/Qe2Zkspv8VTVk1f4xrfDU+t\n",
+       "mK+7jugEQzhm8JFhIyhMFEZcbWma2TQpCD17ZauEGgQhDnAVAt3UlqYGZxTJO870lhw1tpKU7JAW\n",
+       "rBA0rmVkYgg7Yu4Jk6Xkkefv3sVlS0mJFKB2hpQTMvdQWpQaaCvJ6CFHkNIgSqZSFeuqJasKY0Aq\n",
+       "Schb9sOGKmnksEcrS9Jqnh2XQJ4mii5882tfoD6+w7//b/wc/9Wv/Cqf+sxHGS96fv1//z/5d//N\n",
+       "n6O1mu3mMbURKGvoDtYoaXBVja4b7n/1Tf7OF77IT/74j7C7eDgHQZ8/4qd+5Ee5fedDvP7OW7RH\n",
+       "O95+9XfQtePi4oIXP/RRLraPWS2P6Ydzbty4OxP9nUGWyIdv3uWN0xNuHt1i2hum6QwpQQqN1QGn\n",
+       "KiqnERmECiyWNVV7TJcs7XAIpmNRdaSpp7KOdpFIpaWkicsLR4iZaTdyeKuwWtVMynK8foExHRHV\n",
+       "G1zEDU1zQBxgvezohOPNh2+AtNh1oqo1rlbEHNCqxlIjxYqcJZpu9upJI6VMFK4VoNf4wcRTK+au\n",
+       "UmhVaDtD22mII/v9TEusW4F1nqoS9JeeuqooaqKIQko13o9EFTk8XKCNZdGtEUi2yx2ncY8xcwqN\n",
+       "tBlNhVIOa6EPp+gqoUzi8HjB+mBJSZkkRiqtMVoSQiamCR/3ODFH1GmTISiEsDiTsVpRm4osa5xd\n",
+       "kuVjfBJs44RTFmsdQz9g644cC75MTOPIZnvGxz9+g0ZmvvP2N/jX/vmfmROBkiD+01vGfWIYMxdn\n",
+       "D5iMQq0aVnWFFwXhE+LJCSdPzlCq5vDoGGsUOXh+7l/5Kzz/4rOk4lm4hhd/+Bn6J69z/8Hb+NJz\n",
+       "cvKAtm3Zbs9plwtSyXRVA+NE3j6hqRx1lXkUnqCMoC0dRe9o2jXrKy947QS6gJQDPlccryqKXmMu\n",
+       "BnwyVHJB9hZBxlWRlHp8XzAqIbMkhow2hcyeu3c+hUZRpprl4hYh9KR8TtPVZHYoA8uFYkobsgCh\n",
+       "PUp52qXEDxIpJ8gDUqxnewUpUEiK8NTNtTfLNX4w8dSKuRSRIDOucrSdRBQ9Gy4JiSgjyEyREm2a\n",
+       "OShBBZrO0G8nvI+MKVK3kuWhwlQVK9Fy6yiR8n3qNlFEjyxH1HpBSCMFxWKxIIYti8PCvZvPY51F\n",
+       "icK+97TNksSIEBNDv8HompB6IGKkJMmMVYbKgTWSLDLaKGrTUuxE3gdMcYyTRwtJRHNwsEThsCIi\n",
+       "lGLzZMfDh29xQyheOlgSxOzF/vHPfIr733mLh2+8yjuP3+LG4Q8hSqGuDP1+ZFVPhJSIMVE3lh9+\n",
+       "4S4Rz2c+8XE+98Uv8/jkm/zYj/4Fmrbjo/fuUh8sef6lD7MdtiipGIctWkvatkZri59G5PEhOQyk\n",
+       "YaRbrDj2NzE5cx5GRLBEt2LR3ODgqOLh+XfIGKbtFmMjRQpKtWC5WJAZuYFFlsDU7/FjAenRMlKU\n",
+       "pTKOysKoa0qZZm65vI8WL9A0HdJMNE3Ffjui3ewiqRIcLisutiOIRBEOsLRtwuiEsZnsJTkFsk8o\n",
+       "E1CiRjIRy/UU5Bo/mHhqxTykgXHqESJSN4I4FdrWkMuEMRKlZq5yZRVCFUoWWKfwg8SYRNGKnAMh\n",
+       "eEBQV45bt55n0R6R9RuUolCyQ8oWXyTjtEUZT91K6grW3RKpBrRr8MNAY5coJRn8E8Z0QZ1aNpsJ\n",
+       "1EAOFdY2IBJSFIwqyJKwTqKNQ6oDhBMcHd3B5UzY7GkXgVIEpq64ODshTz0Hh0fU7QH7aUsXaiqx\n",
+       "5+j4Ng9e/Qq77SUxDtSu8PD+m9y5c5NHb++puwXy/BSpDb4/w1R3+Q9+4S/zS7/89/ipn/4xXnjp\n",
+       "Fs8982E22wvW+5FnP/sc0/YNchg4PDjidLNnHD3tyiB1g5AapRXSLCmP38EAd82CannM65sTQiOJ\n",
+       "MiKaIw6WN7Gmw1UNb73zMlY7dttTUjKkpJmip20awjBwdGg5ixbSOSVOGD0QZY+rC3UrGQaL0gUh\n",
+       "EyGeYu2KVDzeP6btKhp7wDjM7KScPMZYVk1NCBfovKRxHdYcQThHuj2eQg4RISW5RLSKkA2iXFvg\n",
+       "XuMHE0+tmPfDJfvpEqE0TQM5OGoTMe4Q7UaUHRBa4mpNChasIDMhpcJYjSwWa+b8T8QcH1a5ioVb\n",
+       "44VhH75B626TkfTTSMieKQqqRqOpcN1jvNdsLs8hZ6ysQM92u0papBLEFJGAjxNKWaxpcZWgspoi\n",
+       "IkbPdxOowAP/Dba7S55Rd9AxEEPCWss0DmQMRSgenDxGKmi7Q4QynF5ecrHbk/2AwLDf7Nj5wsX2\n",
+       "lJtHNyhtjRQK70f8xSVn5zvuPjNi3B1+/l/8LG/eH/jK+Rq76fns7UN+/Cc+TT1e8tZrX+Xi4hKh\n",
+       "LYuVxWjP5eUZXVcjVYdUhrI+oGwfkaOnNYreVFTtAUZc0MojdmLLsruJUZqD9W3OH53Qp1NClvhh\n",
+       "iyeh/Z6SEipfMHiFVY5iWrb9OVZNKG1oOiDVTNMZMUpCiOzSOePwNs5VaFNom4qsEkYvyamg5BE+\n",
+       "PqatD+n9JUoJpFBo0WBUJqWAUgktWko2mDSQgqeuFsTx6aeAvl+xaovF4n05DvC++rzfu/f+hWb/\n",
+       "cbzp7wef//z7J8g9OTl534612/0hbdn7jqdWzKdpJDNCKlhjcJXE1g1GW5yJWAwiBipaQlGgIOsJ\n",
+       "4wIxaoyU1E0FAkLa44dvc2P1I1gcXX2TdH6BkjVGKQafUDoi0ohUEik86MTYP2Cc9kjRIXVE6Qpt\n",
+       "E00ryGJEFMPoBWlMGAWrZYUYJ+rKMYUzfJk3Wg09UgqcrdhPAzZHiHMYRZhGLnfnuKrm+eef48nj\n",
+       "J5ydnnJ+esYLH3qJySeOD28y7C9IRChwcHiTdnkXciAWRX95wXZzRt119OenrG7d4N6P/gziH/x9\n",
+       "XnrhkywOj1gfdGzOT/mHf/fXSP1Ad7AkxoQWgsWy5fRiR9M0UBIHd+6BnS1ynZLEXFg0HbeZndy3\n",
+       "YmQhF2zHM+4e3aPuOj790Z/hm298nvPdY0b/DtP2Q8j1ElMkIdaI7NFoKAfYzrDtXwMZMcrQLBLH\n",
+       "YcVuSJSUGUNAiz0gUVpBVjhnibkhZYHUiVY+h5Q9wt3AGojJMwWPDwMxBYzuEMkgRIWsA33uKWJE\n",
+       "q+ZpLelrXOOp4inGxgFItARkoW01hgqjJaZyUMIcMQYIYygloqVEqoy2ic41LJyhiMI4PqE2R/TT\n",
+       "Q3T9LEaC1oYx7FBKgInY4q6KhSfR4/PEYnkHRGCcLjEKsvQsmmNyeYiiYkwjMRqymhPhU8wY2ZHi\n",
+       "nqrSpHJJCTsQE3VpWVVrtFToEtluHxCGkc1FT4iBEuCtb72OVIp+v2PXB1586UPce+4OzimeZE/T\n",
+       "RoYycXjzGZrlISkEtJEkAmV3gZQaKT2rpmb/xivcPuqYpsDDb3+Zt78xYbWiajqenD9A95CpsG3N\n",
+       "o0ePoRTOTk6495GPIg4PYL8hl0KYBnLwVM6xkJqhtIxJMurCfveI/aLjWByij+7SD5/A1hO7+E38\n",
+       "7hTrHEVacmnwuzOEvzH7uKuGTj/L5fQWuXhKtrSdQrJiCAHvN5hWzZF0ITFNPUquKGJEihY/RW4c\n",
+       "tvT7fnZljJcgFVPOxBTIscK6A6RJJF9ANCB6NttHrJrnn9aSvsY1niqeHpvFVBgkQkVyziThca5G\n",
+       "a4GQhpQsOZer8GaNcgKtK5QYMarGGIvWFUVuKezxCcKkZl53gJgz07RFKYgx4eyCXKCEQsgj1rRo\n",
+       "IVgf3eTB42+QxSXQopTDWEHyPQWNFookKpRKTGPAmRqEAenJKRHjAEiCz0wmgBCU7On3I9N+zzBM\n",
+       "FG2onGNPprYGkSW6XaHrQ/ZJc/K45/atFwjuki/92ud48s7L3P+a4/kXP4lt1rQHN3HNEW2d0WrJ\n",
+       "fvOY6vmP8PhLLyOtpnhPUzlO3nkH6wTt8piqrtntdmhabhyt8DFSYqLplqRhjx52kKFtWvbbCxyZ\n",
+       "ThtyCEhZYaeJLGqenH6Zo8VtKnFIVTWsOSD3Nwn9Ewa7wgpPCJGkMpv+DTpzE6EUOSW0aAiMsyLU\n",
+       "dSTfs91PKNlhdY0UhlIKu/0pUnaQO1IvUUXiB4+1ljwZkoc+epKYQHugINQaUQy59Axjf0Vf7dlM\n",
+       "rzytJX2NazxVPLVibmyFwKK0x8c9U8ngPFJpyIWSAwKHFHr2OC+FgsVYgUgdlbEok0FmpH031WbL\n",
+       "dn+C0GsoE1OaEOM8AhFR0Jo1wlj2+y2qWAoRZQztQrPzG5wtc06ltCinKaMkaovKc2pRyp7oIyoV\n",
+       "hErEVCAn9vs9Y+6JSXNoVySVaBer2X9GOVIR9P2Wul1wvu+xpuPjn/40BzduoZ3m2Rc/RtVUxHce\n",
+       "8OwLH+I3f/Xr/MSnbvLmK7/Dzbsv8ODBt7j97A9ha8HBukFbSXV8zDOf/kne+fqXWa4XxGlg3bWE\n",
+       "DALJPkS6w5tM00RVLSn7LbazsyLTtpQnJ6QwMI4Di3pOvV9rwzhOLIqmq44I5ZSTOPLm219jUT1D\n",
+       "8IkYJbrUDLsdyk54uUNh2W335HLKxXTBWi9QFsomQp55+MpZmmUilBU5RtqmYZriXJCBaRwQosxe\n",
+       "6qLm7PRtlgcLfLwgy4hMs++LSA7jFIKANjUxjnPEHx5TSRAfXDjFNa7xpxlPrZhroUA6hACjIyp6\n",
+       "Yh5x1vB/s/dmMbel+XnX7x3XtMdvPGOdquqq6m7b7cR2xwkRgcTECkJOyBVESDgi3KBcwBUi5o6b\n",
+       "CHGDhJBAkBvCRUQgggARIQkOKDZOHMvtod3uqbq6qs45dYZv2NOa3pGLddzpeOiuNn36ROnvJy3p\n",
+       "095rr7Wl793vfvf/ff7PU5g1/TggpcCPHhjJIWNNSRBTlmfOCiE90giUVrghgRjw6YphUMDkihhi\n",
+       "QIaGsrCkGChMg5SOruupqoDImdXijIvLRySOyEkhlUJLQTKCEDKlVSAc47CnS46qrCjGTIgJIzI5\n",
+       "Q0gte/chjZCU5bThOT++xe3X1pTVjKZesR899157E10oVk2BNYphGHn0fEc3POfu65/gR/9wxVc/\n",
+       "/2UuNh9x+2TOL3zuIX/oj/4Ey9Ml9x+csd8dmN1/B/QdzK0z9Be/yuX1BUpkQjUjHHZ0uy1nD94i\n",
+       "Ks1sdkSKgUoVVJXBzSx22BOcZ+j7yRdcKXwIJJGZaUvwA5u+wyhHaSyb4Uvs2itwDcaClDU5dfTb\n",
+       "R0itydkyuAMGS1EKIpcUpsCWEt8KggdyR8oDdW3wftpoRgRillirGcYDSo6EtGCMmbKyDO4CKWcY\n",
+       "u0DIwK57is4KAaQ0fQF470k4hLBoaZHypmnohu9PXtlkPmm1BVJpfMws6xl92GNtQWVmCBlJYkNO\n",
+       "EH3EyBVGT5O9zAKtLTJDTAEjSpQKuHFgHA2FPeDHSM6J6CUhjbTRUxQrfDwgVWDsJH14xok9Z12/\n",
+       "xVAp9oc91lQIkREiYbRhkB6lzbQST5mu3eFDTyMEkmIKuxAD5JGkPXbVcFbdp7JP+Mqv/xJPP3zE\n",
+       "/bc/wdXmORcX1/za53+J+XzFj/7Yj3Hn5ITF/JSybnj79h3arsMWlp/8s3+WX/yf/grbq5Gf/NM/\n",
+       "zZAuePDgLuUnPoHe9iS1QjXn+Kv3ORjN9eUjaC+p56fY2QKODWk8sN/vCfMFhbUUtsBYTe4DY+zJ\n",
+       "WWKLhsPmCToHEJObocgSGWApJKmQHLLCFbBvd8goUdEiRcPQP0X5Eec9QgiCj+ScUaohFz1RHSiK\n",
+       "BYdeIoLB6IJ+vCZnhbYJYsCNLaYsEKkgiQ0xOIyeQ1K4w4iRFaYSVLYgREdt75Dlc0LydO6C2t4m\n",
+       "JkuKEaUTwUPCvrQxK4Qogf8HmITv8Ddzzj/z0m54ww3fAd92MhdC3Af+KnDGFNH53+Sc/wshxBHw\n",
+       "PwAPgK8D/0bOefPiNT8D/AUgAv9+zvnv/PbrWltOqgZtUMoSwogXCq3mCBSFbRidAzFQ2obSLrCV\n",
+       "RWtDLzNKgHcO5zVWzrBa4NWO4D2X189AQsiZ6DPeRZLShHg5BSkYCURyVhi1IKUDVbFks9uTUaRo\n",
+       "QB6IcTL+knIyq9qxJyUPWRJiREvD6LdUleWTb/4oh6uK4TpxefiI5BLLW29hxMiv/8qvcue1u/zw\n",
+       "Z/8YX/3yuxzfvs3y+Jwvfu2rzOdbiArz7ld47f6bDIcD61v3mX/iRxiurymPSm7P7xB85gv/+PPo\n",
+       "ouL2/RXzlLDVnHVd8bUgMM0JV0OPdD0iJPqyIRM5u3MPKwTbzSVv/MBnCYsCuekIpsENI94HrJrk\n",
+       "i0SQZUkpE30YISVquaILiZSvyFkT4hItK0JI+LTHjx3BKZLUWCGJLZw0DbbaIfXIub1FtzV4p5Fi\n",
+       "wRCuyDhiMHQuUylBYwXG1FOotwRtDFloRudRtiJFQ06JkJ4TfUTZRPCZgStSWlIWS8gQ8jCV5F4S\n",
+       "OedBCPEncs6dEEIDPyeE+BdfeLbccMMr5eOszH/X3EPg3wH+bs75PxNC/EfAXwL+khDiB4B/E/gB\n",
+       "4C7w94QQ7+Sc/ykBsNE1WfYoHZCiptAVQz+ihcZai4tTCSQ5gZQzlLYoJZgXjsyenASHgySEgmjt\n",
+       "FLQsDhwGz0g3lWxMSc6C0Qu6MWGDnNLfEbiQSVHw/PIRt8/eJsbMqjjDhRaXA1mG35LcIHJC5gIt\n",
+       "R2yRSHKPkhbouXPrExy2l1xuHlLK1zArhdsLApFyvuLyo8f8sZ/4U/z9n/27fOU3/wr/1r/3F/k/\n",
+       "/vb/ydc/eMgf+fHPcrQ+Yb8bWCzmfPj+B3gfCYcPmJ/dw9Qrnm9a5rMlF7sN83nNGMGNI6nbkIzg\n",
+       "7O3P8Cml8NtrfPLIGHj88AOunz/h+PiYdncB9YJbr91HCI/dedxuj3d7unZHMT8nFxJx2E6ek7zd\n",
+       "vQAAIABJREFU5TLTaIlCsY+SBsOzXKFGza694nhxgvCZZXGLznkQHp8Dw9DhMYg+UZY1s8UcoTK4\n",
+       "TFlUSGp8nxjdBdaC0ooYPCFMKVNaSdCZupgjo6XvAloUXF0/JlC/sAIYSTli5YxKL3HhCqUkRtdE\n",
+       "p9Amkbz7bn02fldyzr/l5GUBBVy91BvecMPH5NtO5r9H7uFd4M8A//KL0/474P9mmtD/deCv5Zw9\n",
+       "8HUhxFeBHwf+4TdfVyg9deslkFqirYbdZtqws5GUPCIWxNwjpcDayfcj6wIzOzDuHTFlhs4y2kxR\n",
+       "CJQUSFnRKEXvRrKMpMA0QYYwrbpFYvAZN2YUFVLC1e6rlHKB1gtSXLDvR6Tp0XYkIYnREoJHCk1T\n",
+       "zmlmx2R1xXL2Cb7y3q9wa7WmLE9JOeByz6I+Z6WPMcqQgufDhx/wyU99ioTi7/3vfxNtTvjxP/Qj\n",
+       "FHWFCyO9a9k93E6hGwHGYeRLv/yr3H3wGtfbLV9jpFAe5I5bd17n3JbE0KOSQSzX3HrwKZ49fkj3\n",
+       "0XsMuy3HpytkcCAz1xdbxDHce/0tRNkQ+x3F6Zr2/SuG/YZs95SuQpsSYabSlROCIA6IMCK9YJZW\n",
+       "XIw79ocPWJQRjKQ0pwgsMT7Emkv2bUsUidLO2G08hW44OQuE4JBIZDJU6pyZibj8HsZoZnWF1Bql\n",
+       "4otQkkBIG2pdMG9WkC1ZRvb7D7F1h9JQ2hlalwgpKNQRkBDSIZREOE9WL3cDVAghgV8GPgH8Vznn\n",
+       "L7zUG95ww8fkOzKy+G25h+c556cvnnoKnL/4+w7w8Jte9pBp8v9tF4uQBSGBQCOlxdo1resZxj05\n",
+       "Z2LypAAuOsZxQAqDRL/Qm0eUFvgUCWFyU8xkkBayxuoCrQRSTvFoOWcgkHJmGDyH/cgwDgQHfddB\n",
+       "rtBak5PGj5ExOCBATnifiBGktpTaorXj/OjTfPDwS2hZklPA+xalLZEdu/4JIY+89967yGxAzVDF\n",
+       "jM3zh1TljH/7p/8CX/rSl/jw/Uf86q9+nsNhh7WG4D2PP3rEe1/9ClppLp8+pS5LYhCUzYx6vkap\n",
+       "kovLZwTnCb4nuBEJNCfHLI5us5wtkEmzWCwJIVHYTFkVU85qIcjVnHa7wQ09ViuS84SxI7o9pIAP\n",
+       "PdJqpFAU0lKLgjLXpChw3nI47CFP4csxRkQyCNLkKS8zIbQMXaTvPH0/acj7bkRIhdElhZ6hVUPK\n",
+       "ibK0iBeB3inlKbuVDmOgrmbMZ3MKOaOQK2QyyJwJsUcpi1KgtCclT06Brj/QtdekOH4nQ/o7Juec\n",
+       "cs5/ELgH/EtCiD/+28/ZbrffOIbhe5NJesM/n3jv6bruG8e34mNvgL4osfwNptzD/RS6MpFzzmIy\n",
+       "JP+9+B3P/e2/8Y/RWhOi44f+wAPe+qEHHM3O+Wizx+VrVJoRosOFESk3k2OfnlPqhJEGp93UJGMk\n",
+       "/TBi/RaEpLKa0Wmk0JOcMY/kDELGqVsQQQwjQz+QgqEqAtZrBr/B6hJTCGZVw8ZJYk5oAzEkBjcy\n",
+       "+kQzW1JYy777MnVxzKE/sO87BjGwSmvq4pST0/tcPnrCanWEP0QWR8e0+2vWZ69xdOtN/tv/+r/k\n",
+       "zU9/kqHfU1iFIHFx+YRf+9zneeetTzObNVTHZwwu8oXPf4HXX7/LmAqOTwrKeUE9L2n7jkU949CN\n",
+       "HK4vif2eduy4/enP8vXP/X0unz2hLA0xGVar21T33sY/fRcdIuP2wGG3JfueujAIIZG6RAoQUuMO\n",
+       "W2wKpPFAlJneO2RskSgO+xYhNPPZKVLUdK0gGoXSieQCCINWJ8yK27huzzg+wQ9PmRcrpLAooYgh\n",
+       "oa3EVgW596TcoZQlpUnRNPgdi/Ub6FTRl9DtHdFP/0stAylfgTgmxsw4JD744nO+9htXeD/A98ho\n",
+       "K+e8FUL8LeCzTL9Kv8FyufyevIcb/vnHGIMx/yRw5VstDj7WZP5NuYf//TflHj4VQtzKOT8RQtwG\n",
+       "nr14/BFw/5tefu/FY/8UP/LHTyhtwW645uxIoVWJKWtkqhnHPVqBD4kYIj4dGA3oUWIKhbKT2kTJ\n",
+       "RF1ZxjgSc6DUkiwzIQaE1ggRyEmSsybFgegTxihgKtskn0FkNts9IQ0YUVDpYwprsGkGaYc2AaJC\n",
+       "kklhJGSHSxklMyiHlHBwGypW9OaSpV6xufqI+WLN5vkeIypySCzXpwQX2e+2/Kmf+ld5/Ojp1FFq\n",
+       "DNfXVyilefONexQ2cXz/NqWeGmrGYc0HH77P2z/0Q8SYaduW9WpFURQILSnLil5KHn30NXSCz//8\n",
+       "Fzm6dcKDz/wBDo8/4M7rn2R97x3i/orU3Gb30Zfo2y1aakKevmO990jrKbUlC6gXDd4bblcWediy\n",
+       "p+DarNhpGBMM454YHTlpDu0W6i2mEAiZII1UtWTXXrPUkpQzQnfs+w+p7F2G0CKkIOcEMlKWNVIp\n",
+       "cnIo6wheYK1ms33E0eIeKI9UAu8Eh25AhzQ1iZXPiAjwd3njU2fcecvS9x3Rl/y/f+vxxxnW3zFC\n",
+       "iBMg5Jw3QogK+EngP3kpN7vhhu+Qb7uM+Ra5h/8r8Odf/P3ngf/lmx7/c0IIK4R4A3gb+MXfcWOR\n",
+       "SXlkXs/ZtdeUxQJbNlhT4X0gREdKAYMhB0Hbt/Rji3Oe4ANudCidKMoMMuBSAikJYcT5EYRESkWl\n",
+       "T1iVpyyLezR6jnDT495lBBYtZ8zqOW07cLW/QFqPKkZyTqSoUaLCWk2IPaEV9NuA77e4cUSryV4g\n",
+       "5ohSls49x42ZUpa0mwvu37pPYdQUjSc0RydHhJj4+X/wCwz9gPM9T589QWtN3/domYlppCklSmdW\n",
+       "s5Lz20veeft1RrdhtZpTVxXDMBBDoNvvMU1JSgMnJ69ztFxy79YxYuyI3vH2v/AnObn3AHl2glo/\n",
+       "YNQK25zRXT0l9heUtpgyV41Ba0Xf98SU8D4CGtcPNFlxbCpKsUBZQ1ll+mFLjJG222ELMzVfSSiN\n",
+       "QqiWEK9QNpGZJIsgMXqkHR8x5gtC7IkxAwKR5qh0hJKzyTRLLdjvRoah42LzPiHvyThAEqMi9BI3\n",
+       "QnvwxCCJIdJ1B2II+HBgdPvf72fh43Ab+FkhxK8wlRr/t5zz//Uyb3jDDR+Xj7My/63cw18TQnzu\n",
+       "xWM/A/ynwF8XQvy7vJAmAuScvyCE+OvAF4AA/MWc8+8osyit0CqRsiTFGVkKRNYoKfCDJxOIWaG1\n",
+       "wDvFOIwIu0UcMnUaCM4hpUQYS+oGfB+I0pCJU61YSCp9wjBoNJGiGEk4xqRQuZic+NIMLSyFUlBU\n",
+       "9F1kv99SzgvK1uIjpJCRClLy3L/zGZx7ipVrfLpE5B4lA9YYUg6889of5vqjxwxSUKpznj57jMyW\n",
+       "YtZQ1w3ee954/RPcuXOH3W7HxeVHCCHphwM5C1IeSNEhRKAwmqOzBePYs17fxczmzJolTdMwX66w\n",
+       "ViK1QY4Hjt94i+7h18EesX38LovidcqmRCRN9dqnyD4RUo9NmaI5Zn1yj93VQ3wYsEWBVBpjLGVR\n",
+       "AoJ2mBqhlLGoJBiGkXFMLGbnDCmw2z5lHGeM7oAuHQKmPRAySieyPND2gr4FoQ9IpnKVG7a0+0zZ\n",
+       "SBCRGDusytOvr5hJckSlGTkYnHeMzmELEKJkGB1CVpP9cQikBLtuZF5lQh5Ad6QUgJeXAZpz/nXg\n",
+       "R1/aDW644f8HH0fN8q1yD//k7/Gavwz85W91XR8FRtWkmJFJoKUlpTi170dJ1BmtSmJMhCwwtmEc\n",
+       "WwQjIgnAMpmeSzKJofcUyuBCi1BAVohU0DQzunhAG0HEIgA/JKwVhAGMqDlaHLFvN+zDgXEYqSrJ\n",
+       "vKl5cjlS6QVKeRaLFc8uPmBZLzCqAhGQKVJlQZPfQuSKh1//ImVR0uUDJ0efYnj/Q7rkyFpyOAy0\n",
+       "3TVam2mjNSeEkBhjGEdHVTbILAk+EFNgcJHt7hoh4OTsLuvzO+jCUhaWopwTskMqCcUcFSJls0QI\n",
+       "kHc/jY6BZn0LkRLh+TPM3fvIrUfuWt794j9iVlrG0SGlQFlNTJkYE0pp+mEAIckhYLWh8x6k4qie\n",
+       "E1VHDImqathuL0gkQufJhSRZTRIj5IZhDMRhQ0wj67VB6QR6T0wZHzPKl6QskHKHVDsIkbEfCTpi\n",
+       "5JY8CsgWpStCmhRFQhqULEB6cpRkYLPtiXGPUhGpPMErkn9lfXA33PBKeWUjX8lE30WUUuQM3a4l\n",
+       "5kmjrNoZznV4HFYarKhRWiO1YQyB4VAgVaRsJDFGfMhsDx11UyBlJsaBoYdlWUDQHNqRohjQZURo\n",
+       "PzkwaoPQipQsVjZY5TA60nYtzRysLCgLi0grcr4mxB60ROsKhMSqY8a4Y2HvgoDbpz/A5bMvUY0C\n",
+       "PZzz5GsfoBDMmob5cokLidt3jhFC4pzj8vISRCanhLISKacvtz7uud5cYZSmrCqkzOjCIKXCaoOU\n",
+       "GiEFRhVkkUlRI2OPPX2APrTohSW1W0bXUTSnqNUZ8cmXSboits+4fes2Tz74GmVZkMgIIRBC4Jyb\n",
+       "4uGUREpJlBIdBad1QYh7LtOeGDw+HljOz/D9no+ef4QyHTpopMxIHRFY3KBI2SGEZr93LEQF4oDW\n",
+       "K/o2kGNFNVMIIn1/jRaGJAJWNuQ8kJMneMOsLhAoYnRY62jHjIoGqQpyigTv6YeBsupQUTG0Nbx6\n",
+       "O/MbbnglvLLJfF2dMaiETJHdMHC5efxC7uYRuaGwEKMDDbW05GRRRhNSC9EgxOSqKKRBK41AobTC\n",
+       "qojpIA6JcQwsmorlcsU4XGK1Ycg9wY+IrAhxJKXEZrOlampgD1Gw3e45PXqdxTwhXElQk1qjLmvQ\n",
+       "keAFpalQSDRLFtWazaPHzOwRLgXOTtd0Tzt8ijjnuXp+QTmfsd8HmmbGbrcDwOjJmyalhNISoS2L\n",
+       "ukDlTFNVxBxoFie4lOnHESEF4zgihMCUhjA60A1KJnSM0MzI4zNkveDw/ldAlVi3J519EvneP6Kf\n",
+       "rdj+xs8hRMI5hy4LfICZMQgFmclvxiqDVImcJV0/+bMsy5q4eQgiYeya9cyw2e65OrQUMaOUxccR\n",
+       "IcYpPEQpUgKRZkSnyXkkxoTVmr4PZJlRUiNFRKgSLRR10dB2F8TUUdfltDkqMzkOxNyTc2YYph6F\n",
+       "LJi+2ERAyEAaC3IoyPEmNu6G709e2cg3VCzLM1IWFMbw9OJ9Nt1DQkiUZsWqvMfZ8nWshnLmsFag\n",
+       "pEHJRJYJIRtELlFCgEjUsxqix5hIXRX4EBhdACL1TFLYAiGmlXxInpgg+8Dl1XNikBy6jtJamvkx\n",
+       "47BA65Kz4zcABzljjMKWBmMkQllyBmuOqMoTiuqYpDNu7DFxhFaR8tRyLjKEGOm7gRAmvXtKCaUU\n",
+       "QghyhqqqMMZQlhVVuaRZHVMuT1ge32W2OmN08YWroCDnTNd3DJ0jDgPD0JNGBykRxp7+omd78ZjZ\n",
+       "fE6VA5mEvPoqYz/CsGcXIkenayLTrwIkU5KRiyhtKZvZpJSRGh8CxhS4FNj2e2RODENPzhpyRCLJ\n",
+       "0aCocGNNiIaYFAiPzJ5aL4ljxeWzRGxL4pAolCClSPCJvsuEEDFGIk0gpQHve0LaYapIU88gKVKa\n",
+       "zLWasiZ4zaHPdEPG6oqyNCjRsFq8QVWeYMxNOMUN35+8ssk8pIiSlqJcIJKgqY/YH64RoqMplgQ/\n",
+       "OSMqIcniElVcMboDKQfQnkRk9AkpFLNyhpaKUhlEFtPmphQ4v32hYQZVSqQQGLmgtDWFVRSFJocB\n",
+       "ox1aOIy2lHbN8fIB1hxTmxOivCbEHT612EqSUQglp6SklJB6xtj1yOwQfiCOGe9alFSkMND3B2Ca\n",
+       "wK+urhiG4Ru18pTSJJFMmcJWlGWFtVOTUMyS7aFlu93SdR3jOLLZbBiGnvZwIAMuQh4O9LsDXTuw\n",
+       "v7okSYlqe8TiHN91sGhI0mKrms3D9zk9u8XF80uKogAh0FIiyCgpCCGSfCaGgDGG+foIIRUxw3p1\n",
+       "RIoZPzoOhwt2/SWyGDhaawor0Ri0rJEyIZPCyhnJWdxQk/yC611mGBVJRAqrUUpSFACBffchPl1z\n",
+       "vb+iHQekiRSFBDESYph+rWlFWUiq0hOiJ0dL9Bo/ZqrilKwMy9kZ8+ZG433D9yevMNA5YSXEkFGi\n",
+       "pC5KhmGJj5FDd01dNoQph4AQHda0KBswo8XTE7xGCUuIisJOdd6+T5gYGF0CBS62dH5DZRoQgbKa\n",
+       "kYaRM2NpD442RSyGPF5DWeOd5s6916iKkn13jRINpmzYXF1ibUIXkWE/MjqHEoFcW/r+mtLUKCvw\n",
+       "bqTKFnKe4tDCSBghScO8qBik5Orqapoo57OpaSoENpsN8/lyKp9YCw6kkDjnUWqkruopQzAm5Kwm\n",
+       "JdhuN9P7fPYRq7O7RNdhNDgMhYRn736e88UctetxfUuSitW8pB8dZWVJCYIPSKlJOVMYjdEKJTPa\n",
+       "GJIAkTNl02DjyOHxR+QccJ3j8dW7WGOADmU9Si2x0jBmBbmikJbsSlwvGcf+hV7esVppCmvRxiKV\n",
+       "QctIyokhXNN76A8GkRJN9U+Sg9qDYnSBWVUQRKQuJVJa2p3CSE30AyJXVHZFAAIv15vl4/Ddyo68\n",
+       "vLz8rlwHmCIDv0tUVfVdu9Z3Ky/1t3j27Nm3P+ljcnX13bPd8f7l++y/ssm8d8+oytUUnlwmcg6c\n",
+       "r25xfXhK7wLO75AWmrokp4IQHGVZ4bJjHCNZbVDpCC2rydcjS4bekVG4YSQEgy0io+9QWaKUIBIo\n",
+       "yxqEJ4VEjBnhBVEMCBSZxKyuqYoGFxwCQRFPWViBH97jEPeQNIfWI8UAtsB1T1iUK8qypFCO+eyc\n",
+       "HC2KCDIzq2dkW0DMSClRSpFSImeB95Hlcon3nqIwVFU9+dCYqWGoLiuWq9W0+WnsFLKBQAootEGk\n",
+       "wHy+YPADxvccNldk7wlaMjx5l8fjGefDJebeG/jYs/OZcb9DGzkpX6Sc3AjDNNhihLIsMVkhqwKQ\n",
+       "2LihEBJrS8YAgxRUakY7XFHbjNaGqtAoWeJdh5AVfkzoUCKyIOWOEBxKKFKIFHVJlJGisJA0WRVc\n",
+       "7yRJ9Hg3ZcEiAn5oGYYdh93UF7DZRNbrmihH6jqRhgofHIYjZNbI7InCk+RNOMUN35+8sjLLYXzG\n",
+       "OOzRsmG3vyCFgJGKUlpiHGiHDSEMSCzkFZIlQjmUDS9WkRKjBVooclIIKqIQ9H0meEdyCZUbok+0\n",
+       "7YFu3NENLSkFRFqDtiTtUAXsx4RLI7aS7LpLQgBlDNebZ6hUoZLh4Bb0PXTDQNt1jC7g0xYpB4a+\n",
+       "w2JYlifEMEL0jH2HmvrjsVYjlGa1WFKW9sUE7qiqAudGZrM56/UapRTWWubzBdYWlGVJ0zTUdUVV\n",
+       "F9RVRVkWrFZLpABNmgI3hIQYsELQP/sS3gea0/vUp8cENzJ+7TdwlxcUJnF8dkyKAiE0+UUNXghB\n",
+       "CB5lLdLUZKOIo2PY7RjGQFPWfOrsDT5z+oOs5BrnI5U9xsgGKzUpRVLwSCwhOqQY8akj5oEYw+SZ\n",
+       "IxI5Z3rXEn1Pzh2FVZAMUBKGGikMs6rCao2LnkN7jQsdPjpUnqPyOU31AJEMzSy/UL5IgpOM447E\n",
+       "FiHCqxrSN9zwSnllk/k4Kno3TE03RcXBP2fXXWLLBbWdUYiSIhlkNhjd4J1g6AJJHJjNFIWp6HxL\n",
+       "58ULP5CK3gl8zoxjnjTHTjPT53RuoO1Htt2OlAMxJlKqSAR8bolqai8Pfs+jx+/RjR3DuOfJ5W/S\n",
+       "5z2BPYUq0OqUnAuqas5idTJtAspIbSWFMIwxgBBYI9HCIIUE5fDBo01BcJGmaV6EOEx182EYODo6\n",
+       "pmkajLEsFguWyxVVVVLXNU3TsFqtSSlR1yVVWU1yRiUJfiSnkfawYf/8Qw6bJ1SzI1qXWaws/cMv\n",
+       "cbh4DBL8cI01K9yw4/btuxijKasaLyGQMbokJ8847nDDgMiZommo50sgc/A9lcicL1fMixmFKWjq\n",
+       "M+ryDm03MroRckFOCp9HhtjTdR1aSyKJbAZ6d8AnR+96nN8R04CSsLT3EWFFJStqs6As5wxDIsSA\n",
+       "i5Ex9tRVRX8AFVeEkMliT1Fpcta0B08/DlMqVbpZmd/w/ckrm8xVsad1zxlDj1GWMQ54RgbvULmh\n",
+       "eWHMFNpIHAUql/ggGHuB0Zp5vWa9vEM/PEVLS9PMUGJO8BCTQaaIlmekJCjNMX0/pQ5tdlf4MDJ0\n",
+       "U0BzlOGFkgJk1hRa4n2P8z3b/j2e7n6ZNj2bYtB0whaR5dpitGZWNQgGlBix2SKSIoSRvmvRlWS1\n",
+       "PqYsaqqyoa4MKU97BKenpy/KLZnTk3O0NpRFTVlWzOfLb9Q3F8vFi/r6nPPzWxhb4GPADQO+7ygK\n",
+       "i1aSHHo2F4/oN89xQ0cWOy6fXyK1RC5WbLdbZvMVfXdFTop9e6CsGqqqokAiU8LHEWKitnN0WaFM\n",
+       "Tc4SKTIzW/HayTGiUGy7A4dwjdSS9foep+u3+OF3fopV/YPkqKjNbYYw9QREIRl6wXDIGK1JMnHo\n",
+       "Pe3YMfiWbfsEFx05VKg0J0tDQuHGxNinySVTJJQSDGNHqZe4PkI4meIGbUYg6Hs4bEeyAyVeXgfo\n",
+       "DTf8s8wrq5lnRkJoaccrpJrc+vbdASUENs/RcobMEXe4IAwF8/UMkRy2qPFOcLw84nx9i97d52r3\n",
+       "LhlHXdZcXO9Qac6isIzdNaZYYJSG1BDdji73RPGYQzfifUKbnqqYMatWCD9iTCDHHiMrUk6I3EMS\n",
+       "JDWnEGuETrh0TcUSIzJd6PB2wd7v0C5wtLpNERua9ZJu3yL0gC0X9D5QlCXHx6fs9xuEUJyfn2N0\n",
+       "MXnRBKjrCq0nL5emaUCISUoZJsuA0hqCj+zGHjcc2F/uifsrTFNTmcyYEjkllmXF5uIZsh9Yv30L\n",
+       "u0k8fPgh50crDruIUXpS02SPKaYUH2KcJIOEacMYIERyCpRVxdX1BQs7Z1lVqEMmBkfXD9y5/w4P\n",
+       "7n6aW0fP+fDZEU+vv8zobtP555R1QRg0KVn6Q089t1RVjaXA9zt6OWnPiQ5rNGUx6d5DFORcQ9Yo\n",
+       "lXGuY8tTvEsIPMiI1gKpAqaIBDI+wHa/4fTo9Zc6boUQCvgl4GHO+U+/1JvdcMN3wCtbmUspGeKO\n",
+       "Xf+cIbRkCVFkrg9PEVlydnSfVXPOzgMUiFBx+/gtrD4mh4YQE01dsqxOqMolhTKUBVhtmBU1RsL2\n",
+       "4n367oBQkvOjO4hsEUrRDVcIlVGyIqcarStyBpQmpoGYO7QSvHn+rxBCJkbNOB6IyZHZItgi2ZHT\n",
+       "gIuO1reMwWNkxW57YPQju+sNUkJVlSQSVVVxdLSmbVv6vufBa69T2MlD/ezsDCkzy+W02SmEoGlm\n",
+       "CCG/0fo/jiPBeYQAlTPdboPbPsMdrtg9fUS/n3TgspC0uz3zWc2tB/cZdwd6N7CY1XRdO72fHEgC\n",
+       "2sOBoe8JIaCUQSoLCIauxfmBnNJkpxAipa04PjrBKkN0jhh7nl9+hFaak9NjfvwP/hgnxyuEcNiy\n",
+       "Qss5xhiqQjKvS2o9ZXWWuWaml6zMOTkavEsoqTlaLybjLy/oekhpgUgFRhXEOJLoyHJDCIHRO1JO\n",
+       "ZKboOm0SznuUiuy7r77sofsfMPkOfSvL5xtu+J7zCnXmCp8cXXjKvn9CSiO6iEjrqErN2eltzm+/\n",
+       "RlPPGGKLyBUiW+bNKdrWfPjofS62Fy9kiglkj9QH5tWc+bxCK4cQkcvr50gKZmbN3aN3kHGJUTV1\n",
+       "XVAXlugs2+sW7xJQkUXmcvtl6rLi3q03acwD3BgZxhGpYNmcU4gFOb8IZ7CSy27H9fgR+9hO8WdC\n",
+       "kFLi+fOP2G0vUVpRlPZFt+ckE8tZ4tzUEWqtJeWIkhol7YsgDUFV1YgM+90U1tH3Pe2hRYpAe/mM\n",
+       "w9UTri+fUBWGan6KqRt812NFRMXM5vo5dVHQ7w+MXUs/uBe5q5oQM9Mic1KyuOBRymJsQ1E12KpB\n",
+       "KEsQGo9EyZKtcwxa0HvPZnNJzh3vfvgP2e4umDUNb7/5gygaYtpTlwadNVYbqsrQlAu0kFhqGBQ2\n",
+       "zyjDAuEsZVmhSzBFQYqG0AUqPWNZ3mJdvc7p8g209qTUkeWWKA7EKMhoQkx470hEhFFYG1/amBVC\n",
+       "3AP+NeCvAOLbnH7DDd9TXt3KPBVIMgSP8y0xDUBCG48yEWs1R+sjbNNQNoE+bthteiQFOiuSMHzh\n",
+       "vZ/j0bNfRxAZ3BZjBbdunzOrTvAIhJLEsaWQhpQis3pJqWsK2ZCBnAM5KMYh411i9IkUM/vhCdpK\n",
+       "Kltze/1Zcqjou5HoMwRLyhVSzEEI5vOau6enrOx95nLNfL5CSoOxllt3H9AsTol5UtQAOOe4fese\n",
+       "MXqGoSfGSEqJ+WzJ84tnDEOPcw7vPdZO1rjTRN4xDCPGZLa7LfP1GmMNq6NbdKOnWi0RerLHvXr+\n",
+       "hN3+CbOyZL+5Yr1c4NxkKVyXc4SySD2FUsQQURKqwk5NVDmSsoAsUVpTVQvK1THVcsbx8hbnJ29y\n",
+       "tpiTGMgy8OjZe3zhvV/kqx/+Ko2cM1sek8JApQxVXaFziUwag6Exc642T+h9h46GW8V9TC7xIVGX\n",
+       "S1IUeN+Tg6bb7PBuwKiKs6O3OFt8CmUUQQS0hnpWMgyB4GEcPcpmlEoI81KH9H8O/IfcOMDc8M8g\n",
+       "r2wyt2JBoWqkfuEB0gZi8IgXpZbej8SUaeolUsOuvWR/uMYNnhQhB/BxRxi3HPpniNhwfvo282aF\n",
+       "1gKsRZeCwiouth/h04gLPUZrNAoCGKMxKiJyouv3CD19Rl10XGyeopSFLDlZPSDmERlhdAdkhhw1\n",
+       "Ri8orGRW1+ScsErT9h1d1yKN5b33v0ZZ1hRlgbGGrt9jjcaamrbbU1UVbdsSQkBKRYqJmDxt22KM\n",
+       "YbPZ0HUdUkq8c+TkuXj2mPKFCsb5ESkU0Xu6riUliZCK2XJJDtB1HSpB13fMFgs6NyC0JCTIMeBj\n",
+       "ImVB8CP7/Z7t9SXRj0iREClBUZCtJPQdYezQEpRUoDKZjPcOhOALv/k5ujFCCtw/fgMmjwlCAAAg\n",
+       "AElEQVQfBM18gS4MQQ7E7BAqIynRlWbMgSwypSk5qtfMyjVGlBhq/JBJXhHdyL67JsjJXXI5f5PK\n",
+       "HqPEFFEXQyAGOUXO5Txp08MUmPEyEEL8FPAs5/w5vs2qPMb4jSOlm3n/ht8/KSVCCN84vhWvbDIv\n",
+       "OKNWJ/ihwLtEcILdxpG84tn+Me144PHFh1S1IeWBftgypAPX2w1aLhiHKwxryNOm4P3br/Pm3c9w\n",
+       "5/wBBz/ikkcvC5p1pPVb9uMFl/sniBwxKWNSRCpB1j3WTi6MOW0IeaQ0xyQ5MMaEFJJVfU6t7vH0\n",
+       "4hH73QV99xxyhDinLs5IemAQB3LyxOgp64qL5x+hlODLX/48w34PUnJoO9548w3KqkApxXK5JMaI\n",
+       "tZayLNhsNpTl9Jx80S2qlJrCqGPEDyN9u8fvr7BWU9gGqSWg2D7/CNduqNcn+P2W1eoIVKZqLO04\n",
+       "0MznrFfHpBgxRpNzRiqJEBkpBCm+sBbICbIgEXHbDbnr0RK67RXD1RWLouD89C5alTjfMw4j7fCU\n",
+       "rz/+In1IvPPgR/jkG3+C0tzmnTf/CMdHdymakjZcYEtJlrAZ9qCgsIb1fMV6vkC8kJLmYOj2HiVr\n",
+       "Dl3P+4/eZUyaWXmbu+sfZWHuIbLm0Pb0w4YYHdqAVAIhFDnalzVk/yjwZ4QQ7wF/DfgJIcRf/d1O\n",
+       "VEp945Dyxvjrht8/Uk77Zr91fMtzv0fv6Xcg8oKU5qRYEcaSsZO4Fq6fJ/re8+HTX2fTfkjXbXHR\n",
+       "M6bI9vAMGQ1hsDTFLSpdEpJmuTilqdYcr844Xt5GSIWWDTJbslkgdeIwXLFpL2nbPZ6A0DVlOePs\n",
+       "+Daq9BgrCB6kKClNxdht2R0uqGcNOWvurD+DMceT85/UjONzNEfUxetkVaN1phdTLmmKmbFrOT+/\n",
+       "zdHREUJknj95zOn6lJyh7w809ZzNZoMQAmtL2nZ44XVuqKqpq1VrTYoRP47sN9dT4k7MjDkx7K5B\n",
+       "CEJyiHSgmS3xoedw/ZQoLd4HYsjknDg5OqE9DJRljU+w71qCd4RxAATtGDBGE7wnuoG+3xN9RGsL\n",
+       "pQapWR/dxY8d7fU1plyzWCwRXpBTIPrIP/j5/5nnmyeENPLpN36Yo9WbNPUxb73+Y5yf3GYxP6IN\n",
+       "HdrMaMqCp/0VwxjwOJI/4NNIZjIgWx8dc7L+AY7nn2Z/2PPkySNc1yJINMUxMpZEp8hJInUg4xDC\n",
+       "I03mJS3MyTn/xznn+znnN4A/B/xszvmnX87dbrjhO+eVTeZ9H6jKGcvZEa4zyGRxo2bop9b+Rxfv\n",
+       "cnn4kI37Iv24JYZEJiLNVI4pzRpBxdHqwbRSdyN953j67BKjJcfLOcezY1b1kpP1Gq0zMTo8PV3o\n",
+       "6XNHGjNGNTQzizYFMUhmsmFVntEeLgjjNSLvQUSsaThdf5IQSpANQp5OP+99Q6PvUpQ1STl2fosQ\n",
+       "nrOzW1xf7TBqxocffB1b2Bf68UzXdTx/foG1lqIoKcuSEDxVXXE4HF4oWDJVU7PZbtEvfFP80KJk\n",
+       "ZOhbYkr0Y0tOClOv6MeBLAKlEizPTqCyLFZr3BAZhumLIqeEVpr18Qn9GLne9+hy+lLLGWxdImyF\n",
+       "EAqpFFIr0uiJYSTGQGUthbBUpuTurXOOV0csdQVKYIj87C/8j2z2GxbVkgf3HuDbiCoM9fwUW2qs\n",
+       "Vfg0lVj+P/berMeWLD3Pe9YQc+whd04nzzlV1dVd1d2kSYomRdm0RIm0LAK+sOwbD/CNLnznP2D5\n",
+       "D8iA/4Bh+IoQDAGCIdoCfGGRht2WmzIpm02y1VPNdaY8Oe0p5jX6Yh8SbbPJ7ha7dEh0PkAiA7Ej\n",
+       "YgGZa3+x4ovve98xOJ75Lb0bGdxIP3b0pkfmCWen77I4WnF68phEL7B2y7r7mG1/hVeQZUdYZ0my\n",
+       "HCWzP1r9BgzR/Sub0vfVLPf8ueK1BfNmu2FRHHG8qFnU6SFdEqCsSiQpeZETfECIQKIUZaFZLo55\n",
+       "evcd9vYGGydmVY0Mkln9gCeXT/nN3/p1Pn7yB4TY4BOHSgSJTtBCUZcZVTlHqBwTPMZ2DM4zToZM\n",
+       "VBjXIEJKP/UgAlJFxvGavrsk9Fu0DAQjWS6+SFF8gfniEVd3z7m6eULfOZAFg5goypooEjyCo6MV\n",
+       "L68+wLsGHQUhHnLMiMB68xKlFMfHJwA4ZwivcqzTNGGNIREK4QNt16ATzeXVC8zYI7zH2wktC+Bw\n",
+       "c6vnC/Jiho+CYA11XjBNE3mZE5RgMB37tsM4R79vKaqSqihYbw/CX6vTc+rlQ4oqoSgrRJIyDSMx\n",
+       "gG17unFi01t2zQ3Nbk9dzTm7WKLKGViBo+Du5RW/9bX/jX3T4yaHLiRXm0uKWcby9A2klCRxROcD\n",
+       "Mh3pxw19GDDOYLqBfbsjyTXVckFRzdn3a+q0Zlas2LV79v01LvTEYHBGo/yco/IhuZzjncQNKWH6\n",
+       "7Kd0jPErMca//ZkPdM89PwSvrWlIRHGoFS9OWR3vGKeRFEmVZaR5RhgNOp2RqTmNNWilcM5S1hVW\n",
+       "3DHLF1g7IZXHO8vd7pZd+4KiUDw4epu+nUB6YtCHR3AhqHMN6mBZNowtWboHmxPjhHY5/djSj46J\n",
+       "PTqL3G4+QMkZaVKy3n1CsDWZyiiygkLVLGrPpy8+YDm/YFbknM6OwWbYqWe92XNUH7NaLhAyBQH7\n",
+       "7Ybt7R1BK/IsJ4RA1/as12tWqyVpluK9PbT5W0dnerIso2l2yDyh0AX77R3CTZSZRMoEaw+iVVpG\n",
+       "lExJT1YE29L3PUIryDQn84dcPf+Ui7MH3G02LOoZu2YHwPHxMUk83NWd61Ayw8mArkoylRKmgegD\n",
+       "U7PlqK5IhCJxEZDEPKAyQ+wlbrR4H/l/fv+rLGcnPFid0fsOaQzjDqpsxdHynIYNk9kz+IBKAplW\n",
+       "iGlARUeZKYJwKOkBQ4iW7f6W+VFCnip8dPg4YK1DxCVhkBgdQUB0GWMniPqzK028554/z7w+oa3m\n",
+       "Uy5fbPBeoXTN0XJBXZRk2QwhDlZhxhlCzICK29sBYxwET51qrBuZYsfN7gVtt4OQMHYGXOT8+A3+\n",
+       "xi/8p1zfTdysd3z64pbru0uyNOF4seRoccZ8CfMqkBc90zBhfMpgJ3rbcbNf0w8TIRE05pbWtHRN\n",
+       "w93mBfv9jvXtHW2/Zp7UpPqI6+srjA/EWBKUpxnuyNICIaGcrajqOev1M+zQsVguyZWmns0I4bDq\n",
+       "VkqgtCBJDp2f2+2aptvjnKOY1fjJsds1ZLnE+YAWka7rSbMEKROm0eC9R+clxlmi0CRFztiNTP1A\n",
+       "s2u5ePwuzTCxWC5p+gbcxGxeMnQdR0dHBKUOphtKo5MCbMRlCfLRQ9TqhHRW8/T2im7siCZhnCaU\n",
+       "FGRlgrOOwUXadkSh+T9++3/ho8v3sOOeenlMXq4QISf1RyiT46YZfR8YjMd4S5ACEwMqVRjTMdoN\n",
+       "Xkbm5YrT1RHjtGNwLT4qhmkCJSlUwjhZumFCB42fAlMvaLf32Y97fjx5bcF8OZ8jYosdJYv6TY6q\n",
+       "Yx6dnFLIyKp6RHAZkxmx1iCUwobAvm+xYcCMB02Otu242nxEPzbU5QnEAjM5ThZfwkwjP/H458jz\n",
+       "GdFJoq+ZjKd41R1al4KssszmnrKscdYc1BeVpe9HgsvxzpFmMDqDC45m13D38pLnN+9zc/chOY6F\n",
+       "TOiHgX17Q5KXJNkSKzPQEnTGy6sn7JuW46PHtO2Wq5fPXpUbHQSh0lSTpinDMBBC+CNPTji093dt\n",
+       "e6jA0XB9c8M09uw2a2JUmMljraWsC6TMAUGR5wQi+13H6vSMtKhxpmfYXpPVM4a+Y7FcYaPixcuX\n",
+       "aKG4Xd8gpEblKaosIJFEJRH7hrjdo7DUWc67bz1kVZ6S5Ss0pyg1YzZbsDjK2O1u6DvDsBuo0pSP\n",
+       "nn+D0U0EZ6nzHEIgBkUkw1tNDOWhQUpbRjfRu5EgPFpG+nHNvn+OkAPHq5w8tyRKAZYQxEFoTEpk\n",
+       "4okYopQQC4iWNFOva0rfc89r5bUF8/myRhYWT8c4OOpqgQQWs3MyPQeRs2tb2vGORa05PSkJwbDf\n",
+       "Duxaw3bXgZCMdmC3vyHBkec5u13k8urbNP0W53rKDOpFBlIiDDS7Dm9HcIoYPTLRPHr0eZROEHLC\n",
+       "jAdl83FscYODqEjTCRcldaKZZyAj7NuOm6tPCK4n4CFabtefIqSmPlpiR8ftzXNOTx7ivCFGQ1kV\n",
+       "VNWhq3NWzymrHOct1jqG4WALF0IgSTLGccQ7jzc9Wnpc21JkOYnQJGWNkgprJ4oyw3mJcQYfHZMZ\n",
+       "ETJlvlxyd3fHNHbMV8e4NMWPDcY6dtsdq+UR9WLJECLeetxk8SEQzQSyIEqNzBJie4e3jnR+jJgC\n",
+       "75wseTCboUWOcnPOTt/m4uKCxWmFUIqTk3OO6hPmecnLqytkSJkGhw4eFwa8CKRZTpVUlJkmkxWz\n",
+       "uaaelcToDl6ipuPm7mM6d4MuBIvlA+bLJVW+QCEJwaC1RWtHmhdIIUAY0pmmWLx+c4p77nkdvLac\n",
+       "edAJJ8fHbHZrJqeQ81NGEUiDJwP6bsILxTgZlsuRNy9KtoWk38EwdeQk4AR1fnpozEkjRVEwtpKv\n",
+       "/s5XODk9Ic0DuYbjxcFrcjf2iFbj3EheOYryiNn8DfI058uf/3n+72/+7wgSkgSS1CPkHJ1EYhhB\n",
+       "GOqy4qhSLDhiDBnNtGN0gUwFlPS83H1KNTuljBXTdMvF6UP6fiC4ESHmXD5/gk7uOLt4m8Ia1us1\n",
+       "x6sTpJQYHwjhUCIYfCDPS5x1ZDKhGbdgR2i3xCTHdD3l6SkhRIwxZLmmyA5130pqlFa0TU9RzhB4\n",
+       "ttstVZHhjSN4j3EjKnqkSJjVOfiR6AM6nRFxxOgRWkMxA5WjgiVYS7o8ZXf7gjJPEX1kMX+LZb5E\n",
+       "n0fW7Q0b3fPw/HNkWYUxI/10ySfPfp8qnyGFQ0pPXiQY16OTQGTE0VHlFd54Ji9QSYYPllzD1K0R\n",
+       "ZUKqZkghKbMVRIvxHVFZZtLShR2jCcyyCqSnrH507jz33PMXidcWzMuyJApHMw1MnWVRLbAY3OTp\n",
+       "GYgiJbrI1DmsbcnzOUdlTRUVPY7JWcZ+pC7PyKQkTQWPT89ZZoKPPvqQp09uWZ1Jjo9T5lVK9ClG\n",
+       "S+6aFi0EBkWWZ1xdP+PR2SNibJiVC5x0qKwhLwTBHRzmbQgIBU6MCErm1YqT5Iypu+Hj5x9Qleog\n",
+       "gKUmds0T8uqLFCczNs0e8OybAeg5OzshSyom05Nnj5nP5kgpKYoCszN4J5B5Ql2VCH9YZacJuLEl\n",
+       "15Ht0LEqSkQ1R7iA1uJQi+48MRUkukQpQdM05EVKkS/YbjcUuaZvenSao/ICaSSb3RaUIEZNVc+R\n",
+       "WhCFAJUjgiBIDTGCMvhmT4yeAsHRyUOePn+ODYLJBfCKMj/ltK7I3okIPyJiBVgQnslMdH2HcwN1\n",
+       "nWOMZxpHgoBgHKnrSMaEIDR5ek5eBpyfwI5EobnZr5n5llX2JsYPqNRRxhMMd4zGE6ykTAs0Oc55\n",
+       "Jvf6V+be/2hewn6/jr8fht/93d/9kV3ra1/72o/sWj9q/jBF+aPgoJH0F4c/Nc0ihMiFEL8thPg9\n",
+       "IcQ3hRD/1av9KyHEbwgh3hNC/BMhxPK7zvkvhRDvCyG+LYT41T/p2jmWRbpkkZ+TqYgWEm0CVZaT\n",
+       "ak2dJCREtBeYJkPFJYKEUsMs08wqDQqULHn7jX+LYBXzRc0bjx7xM1/+BdApfSuxRqFVitYpgoSi\n",
+       "1AQtMK7m5m5kvd/z5PLrrDfPmKeKsi6Y1Uck+qArbk1gGix53dGFls14h/WGKgsUKkEFRaZyJBKp\n",
+       "crr2hilOzFYrjO2w3iC1R0hD2+y5ur6hKmd47xBC8OzZM/q+J8ZIlhWM03RoHKpnByu6PCdJEvbN\n",
+       "lsXqjGGwzGc1xluUTCiLGQDjOEKMtN1AmmdkacXV1cGLsp8ceVURMHRty3a7xRpLu75jGvZ03ZrZ\n",
+       "8QVCa/w4wbIikiJ0CkWOms+YvORyf8XYtCyqORGFnw4t62MfWZ2+TZYKvFzjxXO8vKaoAlnZkRcZ\n",
+       "/eTZtZZ2GhkGh3cJUOCMABKM9QcvUr1gPjtF6Tn9CF0fMK6n6y4Zp5dIkZPqc2R4g5PFuyRJwTQp\n",
+       "pFTEKBibz6wD9J57/lzzp67MY4yjEOJXYoy9EEID/6cQ4q8Bfxv4jRjjfy2E+C+Avwv8XSHETwL/\n",
+       "MfCTwCPgN4UQX4wx/jGBijqVFIVGiYBQOV3bM/aGqAbqcs58ucRKA2FAiwJrcqIQdFNDVitEVKRZ\n",
+       "ZF7mrx7RPcq2LBYXFMmKo5cz+u4O0xbc+sis1GQhoRM7MpkTY4qZBM527PsPKFRBLgqy5BgRC6IP\n",
+       "jHZHkiSU5emh/nu+oe894/rbGB+QUYFSzKpjlHCMcodMZ+ztNekEZJJp11LVx7T9HiUyiqoAIt55\n",
+       "rm9eUlYliIiSMA4N1gUytSDTiiDiwZIuLciWxwTnyPOMvh9IEoUPHust+axiGAzt0FHkGmsCu3F/\n",
+       "0EQH+rZFRIn3UM9qtuOEEa+qX4xlVmeYsUU/fAgx4AeHPDkh2B758o5oRnRRcBHPuY0bapdT5S8Y\n",
+       "x47NYPHeEJVHxhTXdUzBQKzwk+Xs+BhnBXOTY6yFwVAFiQsBLw1JkjKODZtmIJEnnK48IVjG0TFF\n",
+       "Q1lm6DTBMpHJh5T6Td48/yLjquPZ5QecloEp3aBUgjGBTXcfzO/58eT7vgCNMfavNlNAARsOwfzX\n",
+       "Xu3/NeA/eLX97wP/IMZoY4yfAB8Af+V7XjcEmt0NUvlXQlp7nry84ur6jt61IC2pztBFTj/uGMcJ\n",
+       "pUo6M3G9tew7MD7j6dUHbJtLurbD+Y522tJ0az7/4ILTozOmaWLoe4TNyHRCCrgwYIynLo85rj5P\n",
+       "oY/QgIwOH9cEJtpmpGsNSuY4K8jTisenJywWc9LsmHby3PQ9vkhQWUImFMoNxNhxe/sMT8RHhUoK\n",
+       "RIzM6iXDuOX05BTrD23rf/g4XpU1RVng7EQqIwSDGRuyRJJISLTEBU/X9/hwWNG3bc8wjoQQGMeR\n",
+       "qsjJ05ymaVFKveoqdWw2G2azGS9fPMGMDS+evyArUqy1SJ2SFBlBCcahRQ4GLUDpSNzdEXY9ZAnI\n",
+       "BJTm5e6aXEmib6nSCZt8yN3uG+yGb9OuG/rbGZvbhLtrz83VjmiXTINAKk9WGnzoSKVCaU+SjCQq\n",
+       "oINj6EecL3l0+pc4Pf5ZrJc09payFhwtJEWSEEbBsjpnUZzy+Oxd7BBpui0yHQhMCOkpq5S6mv9L\n",
+       "fRHuuecvOt83Zy6EkMDvAl8A/psY4zeEEOcxxqtXh1wB56+2HwL/13ed/ozDCv2PMcaUu6tLdFFx\n",
+       "NF+x2XrWTaQb13htqKoMhEIqGAbBuvmEL7z1JZanb7De3OBjxn67JpUjlzffIShD0zuMeUK/V5RJ\n",
+       "wenROXfS46PFhAkVC2SAVEW6yZKmOXWeU+c1m923maIhkynB90ilaXaOOvEsVkt6e02RK+aLU2RV\n",
+       "0OwtfWwQRaB1G8ZxwqQeySU+KdmZW46KI+bzE6x1vHz+hEcP3+H2bsPnv/RlpBCslqfMF3M2myu8\n",
+       "DaQqIa0O/5I0FbS3axKpuGtuiVHjw0SiMsqypMhSnDE45w6ljq9MLfLskMIpq4y2bfHOcX19xYPz\n",
+       "x2x3WzKt2G+29F2LmSR1PUeKjKJcQrREWQAp+BF9dIp40RIXCxLjWa3Oubn5lM53qLSj3TynHywi\n",
+       "rsil5G4zsNl6lBJIJLc3WxI1J7F7rLuBtMBMEUGOk4YwOZpQEaXgeP6YL3zup3nn7bd4fPIlPl3+\n",
+       "c26uv4r1OaPNOT1+gxeXT/jcX/5FqiTnwdEF33yvJdAgdY91FhsFaX7fNHTPjyc/yMo8xBh/FngM\n",
+       "/HUhxK/8/z6P/Ok6Fd/zsxaHSSqCdDxYvs3p4jFSOaJXbO76g8Sra0icJlcnmKGgabYURcHDs3cI\n",
+       "qidN58zUnMAeokCKFC80+2HPaCNFekpVn5AWc6QusEHgRQ4cNEzHYcA6i5IFSXaMTkq0PCLVR0gZ\n",
+       "kUKTkDJsBdNWsl13ODEhdECmltlMkmYGY3uuh1u6YYsPlijvaJqXr6zZcsZxIC8ymrbj9PSU3d0d\n",
+       "eEeSwDB2zOo5hZaMQ0PEMKsy2v2WrlmjhEDGESUMaTrj9u6Ou/X1IWUhJfaV2cQ4jK90XQ5qfbtt\n",
+       "Q5ZkzGYVWgS6fkeWJOy6icEY0qwAIEmTQ0lkkkGUkBeQp8hiSdi0+EwQxwHcRDe1SJ1wefcBSufI\n",
+       "bs7TT3v6vTpIz8qJJDlU4pTlDJ0kdO3EZtvQdgHrDU71uNJyMa946/SCVVWQMSPENU9e/B5X1zco\n",
+       "Ldg3W4SNFDLyePkupaqoy5yvf/gVggocH81YzTMUCVoUDF3Hvtkz2e0PNPH/ZRFCfCKE+AMhxNeE\n",
+       "EL/zmQ52zz0/BD9wNUuMcSeE+J+BnweuhBAPYowvhRAXwPWrw54Db3zXaY9f7ftj/PZX3idP5php\n",
+       "QvzcHcw8i0WKaUELgQw5WeqR1vLg+JS6fIPN+D5FMaNUFzzfv0eaWGblCRJP1P5QZ97cEuIr9wCZ\n",
+       "kZIgk4rBjGQqp8ofsG+f4OkZzMTm+Q0XR4+JQVPmR2g5Y3KOurxgt/uAbX/HMj+nsxLb7/HG4pKR\n",
+       "yQrOT96mLAs++eQbZHJO144oFSlqQVJNOGHo7j4i0wUIwWKxZLtd8+47b/P82VPKxRLfN2zHnrpM\n",
+       "qQrN2DV80m45P14xJhGhInkxYxgnyjJBMgcChIDOMqQ61MtHAbPFgk+fPeHB8RExKpyPlNXs0B0a\n",
+       "QArBos6ZppTb62dUqaZrt9SzGUIKQhQIAlRzEHMkLxFjTkg1od9xtbtj6wekWiBjIFFLBvMpVbJA\n",
+       "o6nSgk57hBbkaUYljnBiYHI13bSHxHFaPODfeOttHpxVXO17Pr5uudze0N7dsW3uiGywoeE7z36L\n",
+       "i/mMk0wz2mvK/CFZ2fHs8kM+fPpXCGMDWmGd4cX7Lc8+2OKjJIofXQXIn0AEfjnGuP6sB7rnnh+G\n",
+       "71fNcvKHlSpCiAL4W8DXgH8M/J1Xh/0d4H98tf2Pgf9ECJEKId4G3gW+5+rlF//WY/7Nv/kFfvnf\n",
+       "/df5t3/lb6ISwdFpYH7sAEfwAtNIlCwp05TT5ZLz5RtcXz9BxsiXL/4aVSnQcjxUfOgCKVKiP6ww\n",
+       "+2mLCI5cLRFBIIMkmg68ZLV4kzSpQAxYE3FecrvbIMmp8xWRhlTUvPvWL5CXJbt4wzB07HeGrnXc\n",
+       "3t6wvr0Gd/Am/Zkv/CKzVBMmz+464EZPVD17d0OxWKGznOXynIjnc5/7HJeXTzk+eczN9TPssKPM\n",
+       "JG4aaZsdZZEhvKfvtiQU9H2HsZblckGWJehEIqUkOMN+e8vY7djt9ugkZ7SBs7MLbm5uSZKEbuwZ\n",
+       "pgmd5Fze3WJlYDSert1wcnKKzjO8DwTvkd4gMMTZMX6I+H6H0AUxSkQcUHnBO4/eAVmS6zc5W7zF\n",
+       "oj5Gq5zN/pb1bsPgLLM8ZV4IssSTSMGquuBk9pPM1BkPZMW7eck8AdN17LZ7rtZXdJs7Uhuodc7g\n",
+       "XrJv70hUznpcczvtUaVEyZbgJk7mS/7gW1/lvauPKOrHLOt3uHh8wi//jS/x13/p8/z8X/+eWb0f\n",
+       "NfeWcff8ueP7rcwvgF97lTeXwN+PMf6vQoivAf9QCPGfAZ8A/xFAjPGbQoh/yMHw1gH/efwTijXX\n",
+       "t3fkqWBZPuTN8y/z1sU1m83vUAWNSTN6Y5Fpgczm7NuWxWqFQCNlwmgMZpzI9QIXLXjHZAJJGqnS\n",
+       "ilHtDsYTsUfJcKjGIBDZovQxeV7xbvk237j8lCTLCTHBOkkzWhbVQJYIEBNSzDl/8A4vn3+LNDQE\n",
+       "k5HrE7we6doN337/D/jpn/h5vJqoU40fU0yIRJsd2sqdBAvWWGQiqOdHh5eTRUXT3PH4rTe5ub4k\n",
+       "2W/ph4HlrMANPVUmGdsWhEfJhE3ToJKSYEemcTwYiSaa2XwFMaJFJIgAeIa24eHjN+m6w3vrbt8g\n",
+       "lWJWz9he3SGSgLUT1mqScs7i+ORgHl3NEUISEMiqRHYW129QyxNoGsbNJVfNlixqiBB8JEkFqVJs\n",
+       "mitud56izJnVEF0ORU6DpCpXpFPGw+N3Kc0HNI3l9977mNFHnjYdbdgzLxIezBXFyjGanmlyCFfg\n",
+       "/cimmXh44kiTQIqlC7C7e87R6jHFbIkdJdpLGB1WGEad/wi+Fn8qkUOVlgf+2xjjf/dZD3jPPT8I\n",
+       "36808evAz32P/Wvg3/kTzvl7wN/7fgNLKozpkUWkqmqk0AiRkEiN1inL1SPa7mCF5pRk6vfYyZIW\n",
+       "OevdBkVOKhb0/hlh0kQELljybM5RlrENl0y2x7sUEonCIULG5O6oxZLj4zeo11v2ZkRIiZYLrm6v\n",
+       "WGQFeZGx3X/CYlahUTw8+gle3H2F4/KYqjyn8ze4cE2/XvPkybdI3/wCCIGSCcINmK4k2ADsae2C\n",
+       "VAryMqefRqarK4Zhx9HinKfPn3NxdsLL50/IspwYM0bTk1U11jpCPFSunByf4Z2hmtVM1mHGgSxJ\n",
+       "sMFTFYfyQmcswXu0VDTdIV++3x7yzirTpHmGlxE3TAipQEryPMObEXV8jkoSXAQlNVhBKOdIYcE6\n",
+       "xPKU9sn7XK1v+ejqBe9d3fFTb7/JZAfSzBOiw40BZx1tHyhyULoi1Ybe3lFlp7jeEoWgdY6rbYNT\n",
+       "KV5qFBIhICsiUgXGYWLoIt5rXJBECm5uPyJdnSEJeBPx1tA0t2hK2mHDXEISAqPwJHH5/aben5W/\n",
+       "GmO8FEKcAr8hhPh2jPGffvcB320V991aO/fc88MSY/yBm5demzZLlc/xNsULxba54Xb3IU1jCUlC\n",
+       "PT9HyJFymaA0SFHRj5a72yuaQRGRZIliWb+J7RVydDg30po78kzyuTc/hzeOl3cfsBu36DSSZJ4p\n",
+       "Svqp5aZ5RphyHh6/RSo6QvAcLc7x3nO9bdi2O7wf+PjJV4gIyuyUB4/+Kt2+Y08GwLgAACAASURB\n",
+       "VD6fU89OcF7gRsvtzR3vvfxd1lwyO4a60khyhm6k9dd4NmR5ydAb9u2OfduQZjXlvGQxW+CdYL/f\n",
+       "cXZ+jlSaNM2IMlLVBVmWMZvPqWdH5EWFj5osL6nmK6JOMCGyaSaskPT9oSM2Ks28WiJ1QqlSxv4l\n",
+       "0XRIIcmKGXW9ZL46IRz+sBSpRkaPHSZkPkeIFD/uQVhEWUFaEqcNpz/1l9l2e9778Ftcb/4Fv/f+\n",
+       "P+PFzXOsG9CFZX5UkeucQs9RaoYfJZ4G55+wMx/RuJ6trtlEReMlXipcsKQa0lLQKcmmH7m53HPz\n",
+       "4pZ+PeKninEU7Pd37JqnuCmQG42zA8ZseXb9TfrxhihaTDXilEGkn23XXozx8tXvG+DX+R6lt1LK\n",
+       "P/q5D+T3/FkQQvx/5tOfxmsL5tZOKCUxZuR685z17SVKZ+z9xF7dEvIt7fSSIAxCeYie3jQ4uyXK\n",
+       "HVY0RDyz+k0UjrJyWD/S2VsWy1MeHL8LQYD3jP2E9QFZjpikZxgaPrn9Dt34jDxN6c0tMfYs5iU3\n",
+       "m5dcXu3Y7HtGY/n2x7+DU5GsekR5+nl8IkhJGbtIN0RwGdqc4myGLAPJkaeoL5iJt7G+wbhD52cI\n",
+       "BiUiZVlytDqirudMbiTEkfl8jhSCKBSTjQyTp2l7usnSND3GdiitCTEyOUte1NTVMbPlGSqf0XYG\n",
+       "i8KQ0E8T682WYAxGRo5OHzJfniCkRGmF856xH5EonPGgcqTQxNEgvSAIgZq/gdjfICmI6Qwax/Th\n",
+       "N/j8xZfpgiV6uO7uWHdwfvzTvHn+s7z71pf53MOf5q2HP8VJUpGGwH4baJqGfnqGLhOsqNl2HX1v\n",
+       "2TcGJSuKvCZKQQSGfmS/G5gmRxAOISB4yWY/crW27NxEGw15polOYaYJfGBynt4LjBAUdfOZzVkh\n",
+       "RCmEmL3aroBfBb7+mQ14zz0/BK9Nm6Xb34Je4oLn8uqaRAiq8phdt8a5Pa25IQ0PcDHiwgiq5MHZ\n",
+       "Qzq/R8qa0R8c5o/rx+yixcdbEnLaYU3QE2dnp7xYf5NoI007sQgZoSrQWNKs4NnVhyyKHIRCSEWU\n",
+       "hovztzhdRD78+D10NqMfNqQ0fPDsn/Nw8TPM8oTz1UPWdzv8lBCMYb1tOD5Z4EIk5hYnJrJa4UKE\n",
+       "yeGkQyaRpuvIyxrnDE8+fcrzZ08oypy9t5yfXjCMIwCLRY2zIzKryPDoVEOIjKMjTRIW82O8kKis\n",
+       "RAiF8iO1THCmZ7IG4SQ2GkwSSZMUyHAcKlmcc2y214TBIpUnzObo4KjTlOrhW8QsQYSDJjyzC3x7\n",
+       "hZzNEQ8uuHz2MXfjHo9HJhUIR1GckicleaopkuSgFmksy6Xi08vvYPqRKTOkcSLGlmmsDgG7H8gL\n",
+       "zypPyKscMYVXDVCCVEMsJWmVESxEC7e7HTF2HHmJt54YKqSLpC5idh3XvcRJT3WmKOafaZ35OfDr\n",
+       "r1bbGvjvY4z/5LMc8J57flBeXzDfNaiyQK00m90LkqTgLH/MMN6B2pH5Gd1uR6GWZIs547hHhznz\n",
+       "VODdSFHUOK+RMXCz3pFXHi89sjJcrr8JPiNNI84YwjijjR7tIUmX6LKmzHu0kljTIvMBKRVpXlBl\n",
+       "mkePTnnx/A4TFcfVCWHa853nX+VULVlUF3iTMKkRmSis7Vmvn5EvxKEVv1zRTh/T2ZQkS6jKHi8m\n",
+       "nLVMg0IjmS1WEEbGYUBpuNtuOFbH5HlBURS4JEEIaPY7hM7QQpJlCmTCMAxkOifqhBhAKsdmu6dr\n",
+       "tpweLdjv7yi1QJqUXMdDA48U7Pd7lNaM/UhCQlHnxBgwbsJaR9OsWVRLYi3x+0tUqRH1Ejvu0VNA\n",
+       "FSXP3r+lSGruts8Bg0HQq5zhpWe1yEhUzbysEa5Hy4lEeRTxIBrmdoxG4iO4wWCVwDpL9JJxmOid\n",
+       "JTrI9ZIsTZjchHeC7WakaSPLVc44eMxgSFTOMo0sVc9tC9d3nsUDxTzXxGn6zOZsjPFj4Gc/swHu\n",
+       "uefPwGsL5jJX+Gi5fHFF8ILR3ZCqkixPUFlOkBKxiEyTZNe0SDGQaInpU4y3SNXhPQxmz6Z7SW0h\n",
+       "yTweyXr/AkVFxKCzAWdnOCkI3hC6FOIeERVWJExC0myfMqtz2mcdF/MHvHVxwWy+4l98+E2IkmV1\n",
+       "zt1H38SeV4ymYz9sODlOYCUJXaBaPsC5PXY/kBVz5nXF85efILsKIS45lSVlesQ07dmv18z7iTSN\n",
+       "r/w/A6uTE4QQbLc7Ts4fEmQgEpFphkcio8J6T1FVKOsRSUqaFnTdhLGR9eaWTMGTp09ZLmeoPDm8\n",
+       "1M0SovMgFEppnn70bWbzFUV6UGZM0gIlPEFCkhY4M+A//BbpW2/jmg26qtHaErodHz79FCN6erFl\n",
+       "NGuwBUJ1qGgYxh1958hLzW6dcJyXeBlIUkWzH5mJDBlGlJSUuqZPW6KI9F1HqhOCF/RNZOzhqJIU\n",
+       "IqW1EzEcGqCyLIPgGYYR02fE3DE7Elg7MtxGgrCoIiEEQzskr2tK33PPa+W1BfNsUaFsxvHyIVIn\n",
+       "fHp3SZ2uyLM51gV0luBsz8v1R+zGipPlkt44zHhoOE3agx9kWUhUIjDeEE2KlIK+ddR14PjoiG5r\n",
+       "UXnChCYKg1CRIlW0zQAiI00Ux/oxcVyy6TZ8uH+fx4++xF96tOL67iWZSkjSkvnxKU5Zrm4+oekH\n",
+       "klSQF55kVbAqj2m3kjAZZF/g9hWL5Cd5evcROpNUxUgJmHEgSQuII1pXWONJkgTnLG3bsjo6x3iQ\n",
+       "UeOdI0kzohAInaITgVQ5OpMImYFOSVOJcxv6fmTAM00TWV6gZMAwQqxYzWc0TcM4jATnKfKErJgh\n",
+       "M025PCZJNLOTBxTVgqg1Ion46yckjz9HHG4Q+QnS70gzxWbzlGV+Qls9Y7OFKi2QUTEMBePUIBXM\n",
+       "ZjUDAouBKPGjZkSRLmucCMRXkgn5IlKkmhg0UuekiWA/NhgNqZBEn+CDpSgLQgw4GzEe3ASzWWQU\n",
+       "hnbwtN4yP8opUnuQ17X3Lxzv+fHktQXzKp8TtSJPJciUaeeYnQuUmnO7u2NWBLqhJykkIU5M8pBA\n",
+       "dQLavWe4ukEKSVJ4gnSkmSChxDHy8rbhNAiWVc7ZaU2avkOZ5jy9+RZVrsBEprbHu8NqV0+QF59j\n",
+       "kT3ik91XsVNKmmoenp7TOYPSmrzO0IlARIEYBal6CO4SkQhQE3USKY4e8Oxmz9h3FNUZJ4sL/NCj\n",
+       "coHHk0rFOOzxdqSoj5mcZ3V8xtXVC07OLqgXC6ZhZFHXhBDph55ZmeF9RGnF6AM6Kw83AAsueIRS\n",
+       "qCTDjj1VuWQaGzJKkjRhbHvi7IgYLXjLxaO3kBKqIiHEwLC+Qh8tMWNDnuVMw0R1dAz9lrBfE6oF\n",
+       "/tnvo+ojpnZk1zQU+ozj5QWIHu8tWuasRM1m15AmKSLNWbctQhjKPCGRBcEJ3JiBaonRkCYjZZUi\n",
+       "ZSDPZgeRsCSjbSb6PpKqhHmSsZ9G8mTFbHnMMF3j7IidFFHCzky8uLaQ5eRzSFVg6MC/vil9zz2v\n",
+       "ldc48xURz25omcYNq6NzsrQAOZIlKW23J0lShDZEPOO4YV6cUFc5SvdkyQwzeoapxWGJviYKy0V9\n",
+       "wr/3q/8hZZ2zbl+g9cSj88+hkop/+tsTH3z4dQiODI3z0HYdp+kxQgyY2DBL5uTZChFmFNJy3TZU\n",
+       "RxlnJ1/AuGt6uyEmCRKN6JZ45WiTW1Yqo1ys2FjB9fNrvLyjKEqyrCC4HJVoRJIx9AMxBHwwyKD5\n",
+       "8OP3OTo6YZo83kW8dEzWgAg4H9h3E2VZkacpMYA1gUQrpqnHGMs0OVyQPH32jHfe/gLWWEY14sYd\n",
+       "3hiUCvS7O4iK5dHiUPoYI0RIyxxPgpQKR0a1OsJ3O+R8jjAD6ugtxMLzP/2jf8Rl9xH7bkNEYKUm\n",
+       "nV8TpiO0LBC6pDcVUmTYyePs4aYrBSRFQaITfLQMdiKfSVZHJ6hZgReQJhmPHr5LkZ5ihq9wfbkH\n",
+       "BdZbpmFgXmVoUtpO0HeRRKbEGGg7h8dzVDrmicCOEYLmIOx5zz0/fry2YD4OLcFL6kVNUhsYHCIK\n",
+       "QrDMqoKt39G6G5TKyFJNKj1ajZwefQklX9LpDjMWZL5i32wZpw7nc7TMOZudki8SHD3XN8+55IoQ\n",
+       "M54/v2W3NzAGTpOUbX+LX+bspxHlPmRIAqZp2d3dMq8CZ8u3+eDFP6M3AxcnX2S93zCGK2woyMqI\n",
+       "pj6o9U2CKwUP7J6z8xwhjpnaW6RUSGa4GBBKE0VE6xyRRG6fP+Ps4Vu0Tc/iWCOFZrPbkeY5xEhd\n",
+       "15jRUpYlfTdS1wusd2R5hnOBvu+RUjFNhs32hqIseHH5glkaGLc31HlONDs2MpKWGaujFSFahmHA\n",
+       "+5QqLxAyR+cKpWcM3R60JNf6IGxTzohmg5qfcHS24je/8g/YTDsm+5KuG3j7i6dQe+KYoIqCYpjh\n",
+       "nMMER9f1eCEhSqpZQpqkdMPEMPUs6opidYZSKWbaIrVkvniT6AQXp49p+/eZjCXRKUrk7Pe3CAp8\n",
+       "jEQvkamCCH3r0CqlLCRSCVIpSVXOYO6D+T0/nry+nLkQGCMZxxGtHbiRTbslyoTjkxllVjE1LfhA\n",
+       "qhV+mBjTjnldM7k5l+uPkDGHuEQpgSClLEs+efKCX/sf/j71yYzFbEE3XtMOVwzbwE2z4ez4hLLW\n",
+       "BDMxTRE/tYTB84VViZUJd3LBi6fvUc/+NR6cHjFPa6beIlUgxMDYTwgKjB1xWoHy6LxAGMHWWXKV\n",
+       "oArQusZMIy4acgWDH1HB402HGTxFOWOzvkZKjfee9d2afhz44pe+yNX1FUodFBG994QAZjKEEIgx\n",
+       "ZZoM1jqE8IzjwGRH7GSYFSVFpRk3e8rZDDHNURrqcsH6+gqpBDrRYDOkP2jgHB8/RvoBnVbQNoSj\n",
+       "GQRJdI5Ypahxy4sXVzRbw93aYqNhtThnHt7EpH+ALa9hqkmynCgNU9fTTi0yKqZB4k2FzSRD19K5\n",
+       "hizLGcaBIPa4cIN1PV9/r+PR6U8ymDvKKiUUCqJlMop2P6DlIWBnSUkiFV3n6ftAkjjSNMdrQ54X\n",
+       "jENP13zmQlvflx+V3diP0rbsL5oF2p8HfpQNX/8qmsdeWzCvS8GIYLe/5M1HX+Tl9TOsteyGPUoZ\n",
+       "lIpgJIMxSD8RRo0NA5vTK8y0P1Sz0DINPSos0CqSVTNknPGdjz9h/bsbfumXfpbFbEFvJGkpObI5\n",
+       "pRQEKaGqmMeUqMCnYDNFmqXkFnrXsGnWfP7sjAfLR3xw/R369inSQR5WOOd4cPQWWb3ibvOEIghs\n",
+       "qdhtb7kxd+RVTaYUk4QgDKMdGFNDGR3BDgQXCEJhjadeLJn65qA5M/Y8f/oMnWiuhETrlGEc0DrF\n",
+       "GEuMkcvLlxRFQd/3JEnCbrcjenB2YnF+Riot1CW5zvERYvRMU0OSaK5fvke9OMHrnGmQyLBkamdk\n",
+       "1RlN21AXCbqJiHqOWh4RXeTl+x/zye1ToisQtmdeFLx18RCFpGktWW6J7CjKithD8HtEEEy9xw4W\n",
+       "MUV8ZfHOgIRxWtO5lihbUqVIY2C7uWKePub49CHb7jssVgrweFcwNB1KWNT/y96bxdq2pfddv9HN\n",
+       "fq52d2effbpbdetW7zhuExuch7wQkfCCAi/IIrwhIBICxUSCNyLIAxCekECKIkTAAQnEQ4QAS7YV\n",
+       "B6fsclW5XL63bn/6vc/uVjP7ORoe1vZVxS7KVaaury3v38tZa+251tj7aKxvjPmN//f9SYijBBdr\n",
+       "mn6N1Jr5zIOxeATbqqbvDcjbnPktfzb5xGa+lrsy1TzSGFGymD7kyfNvkkk4e/mKOM3pWo+KNF3n\n",
+       "kMHjreH09AVFMUGPnijy9L5Fygl+6HH9gIoTjo7uECcZr148Y/GZEgZFiCLiaYWvR0ahiFxElpdo\n",
+       "ZamlZetG5DDg05RYONbtmm2zIc8SkGu8vGJvfh/nI6SSxElMFJUMzRWh3zCEhqbbUG02SF9xcnDI\n",
+       "B5ct1jmO70yQTmKDJE5KXHdN13dkxa7l7eAsUsVkacbjx0947bXXuLy8IssyDosjjImomwYpJV3f\n",
+       "07QtXduSpinee7y3xGmC946quWBSTmitZzlZcH3+kjgZieOY6XRvd2iZZGit6J1ExgUynSDbS7yX\n",
+       "yChCZCnepMhR8M6Tx1TXHf1YkeaaR4/uo5XgnQ/fwieBvX2Jjq8gaAIjfrCY3qONZhgFkTIYpZEi\n",
+       "gBKMotlVscaBNDIYEdMGR1HGeN9z5zhn2z8m1nPi1LLYjxnbgAgQa8U29OAscQqmzLGu2xlkaE2i\n",
+       "MqLydgd6y59NPrFgbmvP6CyXlxfMJp9ie33NLEtI04inlx4dYo4XE7JFRpSmrNcXbFcX9OECaQe8\n",
+       "0yjrsK1kCDUhCMbBIsWASmIO9vfRwWIbyb3l67y4fMFmK7hzcB/COeVsRRokr4ZX5Dk43WNUiQwZ\n",
+       "61XLph157/QJKpbcf/SQKHZY/wR0gfeSqh3oKsX66pLMSPJFQ9ELVBIxSxOIRuKoZXtu0MJQTEq8\n",
+       "Gnbqk0jincX5kTiJyIs5V+tLQlIymczxUnDx6pLDQ4WSO+OJKIoIITAMA03TYMfxJpA79K4F1a43\n",
+       "eZwggkArRTMMu26Tcmd+vVje3x0W2wGC5+69R6TTA0ZriZMEhUckBSGKEaPDVyPfeP9D3j37GgHY\n",
+       "ti2R0ZxfXRM2Dtopa1VTLmuMrAg+xriROIoYrSXKoCwycq24ajrQkthH9JuIPPV4FLKAg4MF2/YD\n",
+       "FrM7OB9TtxlPHr8kVxmFgSEonDTEdiQ1hotYoXRE3w708UBaahIj8aFhe/0H7GZvueXPBJ9YMD/f\n",
+       "rBEoQhh45+3fYHA1ZZ6jQoxRKVEUUajAJFfEWYGwirGGzeYSqQeIOlAxY7DU65pJPifJJPP5kg8+\n",
+       "fIfZYs6yXFDZDlNDcAqjInRUsFq9jTAVeZKRKo83ZyQ6Io6gHzSZueDCWd59ecZsep/5coKUjjKf\n",
+       "8d673yTLFBfX19S9Yy4OmZgpvfQcHEw4fWmwTtA6TVrmzPXAy/qUJJ+TxzHYgThJaOuaWEcYAb7f\n",
+       "oAOsry+I05LttSRgqesaQvioyU7f94ibsvy6rhmHkc1mg3WegKcoJsRipG7XLCZ32W6usMIzKyfE\n",
+       "MsZkKUU0wVrPdDrZ6cuzBRerFbFRZJMZQkeIfIIdPZcvHmPtiPMxl9eXfPq1Y7QSbFY1s3RG5Rqu\n",
+       "XjniLBBPK6Sx+MTRd9D0u+rTOAiGqkF5iyoEkSiJ9BR7vWa97bGzgSZao5TBDS9YzqfcmR+SDAWr\n",
+       "iwsmkwVxOUcEGNY1V3VFZiNWmy06DxSTgCJgm45upQnX2Sc1pW+55RPlEwvmPREMDUIN2LHBaMW6\n",
+       "bRiDoh8cYazpgqK96sjHDi0zBJrQlmzOO6K0YAwjyiiyuWZot0QqJdYpJycnfPjsfertFUmS4rzj\n",
+       "/smnKItHfPrR5zj7ypt86xvnbKbPmCxi+nSfQnikkaz9NbnU5GJgZVuuqnNG13N8cMLB8if40b/8\n",
+       "1/nNt/4xz1/8En3vIHakpDSjR+jA0f4hV+sVm8qhdcL+QtF3gWaoSbUiS3KaTYdWCmEHxOgQzlHE\n",
+       "GV5KZsuCdVWxnOyC7OXVFVEcU223GB2zrTcIIej7jr5v8bZDYCmzFO9GrHDEImK73SIFjM2WIdJo\n",
+       "bdBKE8UpSS4IdqeM6YaGavuK8vABQimCiQlBo4eWD86f8a0n34DIUM4Kyizi4uwVzgq61NA7Dy6i\n",
+       "q3vSvCJNYBOBkQHf7n4nOQRkUESRxEcabVKi0TEO0PU9UhWs64p267j3yGP9hpO9fe4dlnzptc+R\n",
+       "7e8RxZar6zXvv/8By3xGf14RpS2TzLBZrdk0I2OvEBvFUZp/rPP2xqzlvwO+wK63+d8IIfz6937X\n",
+       "Lbd8/Hxyp0ViiUgzgt0pWKIsxdcdVTOCU2wZuXxRka8MyzuBLOsZnN1VFQ4JXe8IkSDNPEIKetOh\n",
+       "shUmHpmqPRTvc3F+TeASrRLuH6Z84bN/kUkxQQZDbFK++a1rPv9wyvxwwYXdsrd/gpQCN1ZM8wc0\n",
+       "7bsY5RiGns11x+XsFX/hx36Oo8Ofp21qvvLbv8ZlX7F/0CC6CZfDisP5kjSOWdc9UR8T6RgXHALw\n",
+       "kaTfdMgAcZpSJBOq7ZoQLJPcMDrFxEiILLFqWZQRm9UFQkUYbShLydD3SClpmhqlBd5bhFAEtzOE\n",
+       "1q5jvXqF9zMODpaEsDs4Dd5igkcKQRZnuCjgvcWHiCyd0rYNyXyBKEtsvSFSMf/st3+L9foCgefB\n",
+       "/SWjdLz1wVPcWHJwoBgRJBk0a0WcefK8IzJgHBwtJ6jBE+yACIE4kXgMclSIrmEyj4n1jMtNy3gW\n",
+       "sw0V225FMcScXZxzkD6gHh3t5hVFHrNYHDGd3OXyckUnvsZR+jp91bA6FTRXklikpEaRlXPgzY9z\n",
+       "5v494B+HEP5VIYQGPt7V45Zbvk8+OZ15PTKbTBkGgZEtkckwxRQte5zzFHZkS8z1ao31DXuHChki\n",
+       "MIG+t+TaMjqFwzMqT1EaBllxsX4LM5xwZ+8BVfUm1+cOKTrefOd3+OyjHyGOYlAjD04WbF90uK5n\n",
+       "L2ie1JqklBzuP+D85W9QyJL97BHz6YRX2zOquufXf+tXMTrjU/de42D/AfeO3+X0+RUuDEzzY17V\n",
+       "G+p2S9M6pBSgDZebFolCxTWrBibeU5qESVEw9BvSLEcpSRxp1OgRviHXLZvrDXUfYW1g0DEyOLx1\n",
+       "1E0NgHUdwxjoRosioLVHigrnR4auYn5yF6MTetNCcNhxYLu5QCeGyOXM50uUTri8WpMmKVmkQCq8\n",
+       "d0Qy5v233iKWEbkWFKUhFoqXlxVWGJS0CBmjXEBqgTaWph2Rgp31nDZICUbHRE7hGfFBQW/wsSLf\n",
+       "P6BMY5pxZHN9RTbTHO4tKA80OsSEvsSbh5xf9Kzfu6ZcOh6+5jk8OOBYFZTZIZICN8R8+e7ILM3J\n",
+       "spw0itBa8d/+/V/5WOasEGIK/AshhJ8HCCFYYP2xDHbLLT8gn1gwH901Zxc1tmkZx5bFvkcoRd9Y\n",
+       "UDFhcJRE+CJhGAeqVUuexqRJiXcVOlfMEsPmvKUbBvq0J9jAQI1y53iVE5t4p5oRnqbZ8o33vsKd\n",
+       "zT26cU2u5xztLRBOUg3X7AXIo30e3P08q+ff5NXpKdl8n9kkpogKtr3FaM3/8av/M/iBz75+l8Xy\n",
+       "iGrbYN2IEXBv/0tIzinMyNg7rtsGoTxhbGmtRasAskQJtfPdlJKqqjg5ucfLF0/Zn0/JE4Uu75Cm\n",
+       "HW8/eYKvFd0AVgSqTU0cxyil6Lpd/jyEgIkVy9mURFouN69I4pz19QVGatJyinKWoW+Rg2Jzcckk\n",
+       "n6BEwAdPlme0TcMkWyCiCDlarp6+5Otvvs1X3/k1pguwoef0wwaXxszKJWVccLx/B+Ele8cHIGCw\n",
+       "I3cPDsijJXlc0DUj5+dPQAryLOd6dY0xMZPpnPlkSpxEhCCpNzXj0BGlGbOlwY0KERRxnFAkOSEE\n",
+       "xjDigiOLc/JZip0PdH1Pmkwo4xKRCMZhJDIR3n+sapZHwLkQ4u8DPwJ8FfibIYTm4xz0llu+Hz65\n",
+       "oiEtuK4rVpcbNAItL/FSIKTAiQhawRgkKpG7Ev4wYrRldCNFNqEoDYXRJMUrxkvBeuUZ+oEodozd\n",
+       "OaPo0FpRzlKGZkuWG56ePuZyfYr3DbUzGCkI3tLZiCjuOdy7y/HyhDeDJkci2kBbO9Iyw40tbW1Z\n",
+       "TnJOX17y/OV7nNx5gyw1hKFFRQOSOVKsSLzEjReUU9isKryUdDWEsOXOcon0Chc8bVMxnc45PX2J\n",
+       "CG4nvzMZWZmhVcz+4pCXlxu22w6VJHRtR6UkZTHBOcvOmhX2lksmWUpdXZIVGcQBjQU3oqXGO4cg\n",
+       "4MeRwa1ot1fIgyP60TFaTxRplIkQWoFQfPD+Bww+8PDgdbqwZRwh3u8wcYydG5blHpN8wmQyZ3+5\n",
+       "RAqFlIFJPiXKNLNyhnOWD58WGCNJopSu65FaMp9NcYMHLTBKM84GmqamLOYUkxQ3SpzvcM6SJDFp\n",
+       "WtB76NoaNw7Y0ZEkBUlaEMUxSRwxDgNd1zCOI0nysXqAanY2iv9OCOE3hBD/FfALwH/ynRf9/gKd\n",
+       "W7ehW/6o/CC2cZ9YMC8nBUEHGGc0m4aqtowOIh0whUYJSVAwX+YoExi3nrHZ0rmWPH+ADBO6MTBR\n",
+       "nvv7kt9+GuMHTWUbxlYy+i1FkjFbGMQcismcn/nJf4lf/covUW1iosgzyaeYwXJ+vWF6pMgzyfq6\n",
+       "obpqIfYs5lOmxZyRa2aFoSgSnp9/wKwoqFdXVPkpeRIxdoHN9hkqKskmJ4z+BVEUcbgouRIFT16t\n",
+       "maYFyuYIbxB6Zzgxn+6hlKDyjv3lkiwpiFJz09MlxgtJO1heXW4Jg0UbAwSGOEHpBILDGEWsY86v\n",
+       "TtFK0/cjRZrvrN+waO8x+ZQOCONAHCfgBmSwpFHGiIGhR2U5XhqE9aTTJYvBw8mXWOxN2V8ec3V2\n",
+       "Th8c221FHAnaukUZRb/dsNjbI0sKEIHM5MQmoR23PLp7l36wxFGEdRYpFV3X4qylzAqkUhitiZOY\n",
+       "JM1I0hTvLH3ryPKc4D29HRjGEaUEWid474mimDiO8N4x9CMhOJQSoMDzsVaAPgOehRB+4+b5/8Iu\n",
+       "mP9z3AbvW35Y/H4P2e/0l/39fGLBXDiJkoY80Wiv2fQdRkGaRUQxJJMUKQRD31Ove9za4hlRWlNV\n",
+       "H9A2SzITYbUg6A11K8hCwiJfcmU7Rueo/QDKURYj9x494M995kd5/OQxX391ho8MeZ7R25r19YiM\n",
+       "LJdXT3l6fsplf8VUzmjqDdYFAi33D+8xPzjk7Opd9g5Sxr6g70/JsteI51OabUPfr9nbO8EPNVoK\n",
+       "lDB42yOD5WBaIIaCWKfkWc7YbqmrLc717O3tI/yA1hFZkiOlJBBIkphJWVIWW07PL5iYBd57JuUc\n",
+       "k6YI71FS4IaOtukxsSMv5kgG0DHDMLLZXrIXG7K8JDYxaZqyd3hENDugNLqqXgAAIABJREFUbsB2\n",
+       "NYv5dOe4JAJh8BhtyCKNnM+Yl0sUgcOjA8pygrUWrTWXlxdsNhuur6/ZrLdIGYiimNlsijGGrtvl\n",
+       "9kPwdH1304ogYO2IlIJhGJBKkec56STFAzY4xn7XpTKKIrZNjRACYwxRFCGlxDlH37eMY78z/zYG\n",
+       "GSeUN56mf5hP4v8fQginQoinQojPhBDeZmdq/q2PbcBbbvkB+OSkiU2LDz0iRKADkZBIBVGqsENN\n",
+       "LyASiusXLU/O1izKgjSR2MSx3WzZrteU6YwsUmR7CcuDlvY6oOMJOgHVb3GjZLvq0Uby2r3PkZYl\n",
+       "1q1x9Aw+RUUGO/PEY4LzcH5V040XqIlnYCBIyXZ4Tj0E/AB1fcmkzNBKYNMYhMZ7hSdG+Ix2NWKO\n",
+       "NWV0jA4LosGwiJbMDzOKJGLQYISiqSuU79GpIY9nxEawWm85vjPF2hGlA2PvwI9kSUyWxIzjQF1X\n",
+       "xHFClCRMspzReYTvuT67ZH86RceKsamYz0pErlHDgJDQNQ3FdIbSknw+JZ8tCAiC7dE4PBavHaFt\n",
+       "qFYdfd/jXGBvecDl5RXjODCbTWjbljTNePDgAXFsuHv3mKbpuLq6QsrAOI689947lGWJMTuTCO8t\n",
+       "Su2aXymlKYoCpRQhBLTWH7UmCAi01gghGIeBYewRWiOUpu9avLW40YIQ6DhCCBiHjrapUPEu0GdJ\n",
+       "uqs0/Xj5d4H/QQgRAe8B/+bHPeAtt3w/fHLl/C4mZI6h7aiua5I0YbPu6CsPwhElFY0KJCIn8RGE\n",
+       "gEXhRosYwdmEy65iJRSHY8pkHtPpmihJKG3OZrVGKtAyR0jPJCl4+vQxZ6u3UNKzrTqkiTjZf8ho\n",
+       "HhO6EadGlBNk8ynr1TULjpDE5D5BhCnG7XE3SxnDiDGaJEqQ1hCrkiF1DPM1qkvIKOmTHonm5O6n\n",
+       "MFKzqTecnp4j8hgrBzbrLYvpguAFKs6wHqQSDENPbCRNvUFHMWm06/meZzmbzRopoWkauqYlTmKK\n",
+       "JGYymyBkYOx7cANVdcn+7ACpY4zRu3a6dmBSThgGi4pTvId1XRH7gLA9YXlEOLvm+fMP2VZ2Zyk3\n",
+       "9Fg3ghRUVY31jknf81ZbIaQgyxIImqapGUdLnme0bXvTPE1TliVZliGEYDabfHRgG0UxbdvuPEmv\n",
+       "r6mbmuADeVHu0jAhsNluyLKc4zvHRFm2W1TsSJqmmGAQQDt0aGVQIXD6/Bl1XZOm6cc6b0MI3wB+\n",
+       "4mMd5JZb/gh8cuYUE0UfT0nihhdPV8TakAjJer1FKcW0zBACdKI4OlnQD47KbtkrU6yGetuhdUQ/\n",
+       "9FxeDXghySe7isokiqhWHSLSlJFnkd7hg+ffQvmSzaonySXrq2sev3jJp+MHBKeI0wX72QE6naOV\n",
+       "wmaQmCkaRVRqZosDpNY01YbSRPjRs73eUBQp6TSiTDXDmNB3HYGAkBJCYL1a7W79hSDPU4wxeB+R\n",
+       "FXsE70mimA8+eJ+9xYzBWqrNNVrMMEpircf7QJJq3NjjrUNLSZZNSBNDCJ6m2rJdnXJnb5+h2yKD\n",
+       "RxDR91umkxnee4pyikQQlCbLM1ScEkTCcq6xtiWez/EhIRRHPL/8Oh9+8DYHBwc0TUuSpjtFSlRC\n",
+       "BFmcUDcViYkRGKSSHB0d0Q093nkmkwlVVX20M6/rXbql6zp0ZGibljzLODi8Q7WtkDqQpDFnZ2c0\n",
+       "XU2S5SAEry7Omc8dZVnQVBWnp6coE7G3t0ee7XLraZpgjN4tEIkhTmZ4/7EaOt9yy59YPjlpoteM\n",
+       "nWVSBr7wxWO++ZvXLBeG6MQgBgitZbpYomJBc9VSVS3SejJv8EWAkFCtepxXjMbRdYG95YRJNucL\n",
+       "91/jL7zxIxgdMQbHdJaTFYpVHfiR+3+JMjdEIiXRmuViyZce/DSpUZRlgbcwDD1j37EdOrDgvWMc\n",
+       "WqQVZHmMiWPOXp3jhdhJ4YQgBM/qJnCP40g39CymM7q+x0uBDND1A0JKTJTj+xHvKy6uLohis9tR\n",
+       "BsMwdPgAUkUoBKJ3aKmJtEFqxWy+JEtjlNoFscv6GUIE6rqmyDPKNCaMA8Eroiii7Rqq1RWzxR4E\n",
+       "jxKBkCWE4ph4mRHhIS6QQjCOF+zvL5B8mkAgBFAqQimFkz3aGuTUMInmJNpQ1zVSQZ6kaG3w3tP2\n",
+       "PQdHR8ymM9arFW3X0jYNp69e4b1nuVygIkVR5sxmM4ax5+LynEk7wboBqQKb7Zq22WCU4oV3hODJ\n",
+       "spjJfEaSaLquRmtF17YIIZjOFuRZyjCMNM34SU3pW275RPm+grkQQgG/ye4k/68KIRbALwIPgA+B\n",
+       "vx5CWN1c+x8BfwNwwL8XQvg/v9tnnj4/JSkduJyjoynn97YEJDayHGYZSmuurjckaYrsepLeIdOI\n",
+       "bqwxIiLNot0OeNshMku6N+WLd36Kh/deJ89ykjRmudhDCokgYN3IPGm4V2SMzoMKTIspxmiUVvRd\n",
+       "RxSlRJFmtI662mL6nZWa9W4XoLuGOI4Z2p0DfJ5nRNrQNA1JktD3PY+fPaUoCmazGU6A0IokTolM\n",
+       "TD7ZyYzGvscJy9jXuL7GOY8Umm11RZ4XWA/TIuN6swHAO49SgSTNkFHC+fkFgwuUeYq1I5O4wDmL\n",
+       "syNDL8ijhKA9bd+ilcEkKUoqotiQ5zNENsFve8RiD0kEAsDxwTvvcb2uqKoNAcd8vkeRpzjnGK3k\n",
+       "5dUlJyohzfTOws4Yuq6jLEvOzs85OztjsphxePcOIgSMMaR5iptNidOEEAJpmpJmMS+fP8eYCG0U\n",
+       "Sgr29hYIYLSWse85PrqDd4E03aWK1qsVl5fnDMNAmqSYKCKMFhNFAKw3G5zzH7c08ZZb/sTy/e7M\n",
+       "/ybwu0B58/wXgP8rhPB3hRB/6+b5LwghPg/8a8DngbvA/31z8v8H9DS5yphmESYzSBfx5T/3ab75\n",
+       "7feJrWLQA/eXE6Q0uLaBhaCYFFifkuQF+5M5B3uHoKBuO0bbY7Ti+PgBUaaw9DS9RW12u+qymCOF\n",
+       "YL5Y0LUR9XqFQODdgBeOLCmRaYaUOxmQVoLl3h7r1TWTyZTLyyuePnvGwcE+683mow6GAXZ53CTF\n",
+       "eY+JIk5O7pKmKX0/cPrqjHv3HjCbLHHWY51jGDp63+MdrDdrYhnIsgJjIpIIvB3QRjGOA1prQj8g\n",
+       "pWIcPUU2R+mEdvC8OnvJJi24e5gSXI2Rjmk+Z311wfwoo2rWWKPIpwV22C0+WhuivSVjMOjlQ2jX\n",
+       "kEYQJEIKDu/eJyjJi+cfcH15yTvvvs98PiEyMdO8IC4Krtan6MqQ58WuJsA62qYhS1O+9MUvMjpL\n",
+       "33Y8Oz0jyzKyPGXoB7RW5FmOHUf6qqHtO5x3hHqkrRukVvR9T5QkaKm4d3yX0Tq88yACy71DUJK6\n",
+       "rhidYzKbk98EcmkUOkkQUmAi80f+Mtxyy59m/tBgLoQ4Af4K8J8C//7Ny38N+Lmbx/8A+GV2Af1f\n",
+       "Af7HEMIIfCiEeBf4SeAPNCLSkSM3C4wZeXXZsr835Xh+yNOnL5AyRcklD/dTgotJ8gKpUoxJiOMc\n",
+       "KSBLExbLfUZruTg/53J1hvcjXXOjyxSCvqkRUjJJMtI8J4kURmUksWEYRrquwwXJYB0hQNeNWGfJ\n",
+       "85J2W7FabxiGgclkxny+ICtyiixjW1d457B2ZPSBbhh2O848w9qOYWjZbhuKYsJssk+apmzWK5QU\n",
+       "VNuKartmbAfSbMo8T4mUpm073OiJpMTZQB9GggsI4YhMYDqfkmYL3n7/A4be8elPvcb11SWr65r9\n",
+       "Ymet5+xInuV0o0OJgBtHvB9R2hClMVFkEHGJzA6gu2Tz7Ixock1y+ACCZnHygNnRMY+ffZtxcGhT\n",
+       "k6QZq+srvHc0Zy8QUhBHKbP5HOctWZJydvqcKErIsgylFHleEBvJ2Lc8OT9FCEFRFDTVFqUkl1eX\n",
+       "u/8vm6KFxHoHg2cyW5BlGVLKmwWx36VumgYhFVJJ7uwfEicp5XQCAvq+R0pDFGWIxMCto84tf0b5\n",
+       "fnbm/yXwHwKT73jtMIRwdvP4DDi8eXzMPx+4n7Hbof8BqnbLrJa0Q0+9rgg2MM3nzLKeNx58mcXy\n",
+       "mMLkaBPI0gLnHYP1RLFgGEYEAmsbvLMURYyODhmGgaqqiKKIpm2RwROs42i6wEhFLBVJkaFkhJQ9\n",
+       "WmuUUFR1Q13XlIvZRyL9YehRSjGfz7m6ukbJQLfZUHct4UbDP3YDWkukUNTbCk+g7Sqc9YBEaklR\n",
+       "lECg67qbQ0GBVBJrHWWeUbc1RAn7ywlNtUFqjx17MhXhlSQSEi0VUu7SGnXd0tQ9r16d0/ctyzKj\n",
+       "7TqyuMA6MErSdy2zyQSjY6wTxEaSpgV5URD2DnEvniOzKb/97pt85t5DksVdRGQI7HL1r9/7HMbv\n",
+       "cuJZYpjlE642aw4P74KA1fqKNElompq+H8nznNVqzfX1isPDA373d9/k4uoVh4f7LOZLDvYPePud\n",
+       "t/Desre3B+za+VbVhmk5JU1yRudo2w6lFEop6rreadDTFGUMWu4WB7yjbxuGrkMIQZZl9KLHjz10\n",
+       "Cik+Pp3598sPS+v+w7R6+2EWMv1JtqD7Yf6dvyep/WHwvYp9flif8z2DuRDiXwZehRC+JoT4S9/t\n",
+       "mhBCEOJ7inu/689iPSUIT191jG3LZTWw98ZD3vjUfU72HwCewe4KSJwfgYDSisjEpGmGMQbrLATB\n",
+       "Ym9GbCK6vmVbVVRVRdu2JElC0zScXrziUAlUbKDTWDd+JJNbbVcM48j1+optvaEsS4LddRqsqhrr\n",
+       "LH3f4pxnCBbLrn/2pChQQhBFGu/BOkccJbR9RpTEXFysmE53OXkfPEIIoigiigybjcWYmL7fkErD\n",
+       "wWLJdnNNHhdkuWazOmea3mfd1hgdE8c5ZZKw3taUeYmShrZtUSpQZDFFGpEWBU21QsWCSO8mtA8W\n",
+       "Sbgp1hnQOsaPCn14QnN2yvbqGndyD8JICAl4h1eC6cFdhm9/k65v6HrHvXsPme3t0dTVTk54BV3X\n",
+       "M5nOmRRTtFEcH9/FWov3ji996UtsNteIINk72MO6EaVjnn74gjQt+OxnPgNSsF6vubpaM19o5rPZ\n",
+       "TTfIBufcR8VCUkmG0aPSlCTftQno2oY0TRmGge22IoojlBLYMPJdMnq33PJngj9sZ/4Xgb8mhPgr\n",
+       "QAJMhBD/PXAmhDi6qYi7A7y6uf45cO873n9y89of4K03zzFhTRAje4cZh4tDxGg4PDxA6UDT9Bht\n",
+       "doHIO+xo0VGMiTTGmN0XHkGapeAtVTMwjiPL+YK+78myBKM01juuVyvatmU+n6O1IopipJREUcRq\n",
+       "syaWO2VI0zSEEDg7PUUbQ2DXP0VKuXuf1MTBgYDUaIQyNzt4g8ChleTk8JiqqSgmJUcH93DecnV5\n",
+       "QVEU9MOAtRYTJfRdQ55NWOQxbd/grcUav6uwlJp+6AnOIyO5e4/WbDdXrNdr2n6g71tSLfnSawvi\n",
+       "2NA3G1w/sh1G9mYl69WK+XROW23ZPzpCCo2e3AUjsFdX2LGhbRs22zVHbkAI8M4ihWDv5B6Hxyf0\n",
+       "T97j4mLNs2dP2JstOTw85Fu/8y1UgLHrmR3dwclA27bk+e7MwTlJluXkeYJzjuPjE/q+Z1LOmS2W\n",
+       "fPvtN6n7lk89/BTHR/fIsyl931JVFXmef1TpGUUR3ns2my3L/X3qbU0Sx6zXG7x3GBPTtjsrvV/6\n",
+       "lV/l//nK14kis8ux33LLn0G+ZzAPIfxt4G8DCCF+DvgPQgj/hhDi7wI/D/znN//+bzdv+d+BfyiE\n",
+       "+C/YpVdeB77y3T774NOCqU9Rg6QsZ+h8RplkDLZD9AJjFElyU8JNwNyUtjs3ArvS8DhOsdZSVRtM\n",
+       "HO1y4OOAHQfiOKYsS6LKoPLAMI40TY21ljzPGceRKIpYr3cHmmVZ4vG7cedzttstZbE7mJRS3BSj\n",
+       "+F2pfQj0/YjWu0VFKUldt4zjwPtPPqRtGtLpjKZtcN7dHKTO6LuWy8tLZtMJx5/6NCd3lmRJSpLG\n",
+       "vPzwd6hfvSA4h1QprbVExtD2PUkWI6RiGBu00URYlEw4mOeYKKJvamy9Zrm/oN62yChDS0XQGXmZ\n",
+       "MdjAfHlESA2SfUxZc/r0CT/+4z/N8+fvcv3qJcsHS6RWdOfPSA4eUs73WH/ztyjLKVkWc359wenV\n",
+       "OWkWk8S7BXWwu6KirmkZXvRoLXnw4DVWqxV932FMxDe+8Q329/eZLxe89vAhi1lJ2zYs5nOk3O2+\n",
+       "Q3AkSYLWmjiJGMZh12vFyhvJpkdLqKoNUsLV1Zo4jhnHESklP/vTP8mPfunznJ29ZBwtf++/+Qc/\n",
+       "hK/GLbf86eIH1Zn/XsrkPwP+kRDi3+JGmggQQvhdIcQ/Yqd8scC/Hf4/EmzeKlRuGHxHPXoOohyh\n",
+       "5U0Xv51Vmve74CmFYDopyPN8VyEZxx/toqWUZFm2U0aEQD8OEAJxkhBpw3ldk0cxeZrupIKw20Ha\n",
+       "AXxgMimJ4xjnPGWckqcpnkBZ3qEsJzvHHimYz+eMY0/f9ygh6PuR69UVfdviBfQ9JIkiSEmcpwTv\n",
+       "2Gw2pGnK1eUV42Cpmi1CeyIjOVjO6UcYfMf92T6f+/M/x+rijGfvvk19/ZyhrklMhI4MwQswMVGc\n",
+       "cxhHCAl9X7M3yzFCoGRMMpvTjxE626OcP0SlCbEC27e7Hbp1lMkE7wXj9orl/gEvnz+n7zrGvgM8\n",
+       "CEH94pL44AF5Pufw8ICqasiykg/e/xDrR47v3CMy6saLtEYaQ5oWLOZLhsGy3W4wRrFcnvDkyfvs\n",
+       "7+8OgN9+802qZsOnHj7ijdc/w7vvvcu5e4XREWmaMo4O7wNRrLH9iJUO7xxVtWVSTNCR2Uk4pSBJ\n",
+       "NRcX54QQKMuSx48/wLqdlHI2m/0Rvga33PKnn+87mIcQfgX4lZvHV+yaDH236/4O8Hf+sM+bRkv6\n",
+       "2uEAFadoJdDakKU5xmjyPP/odjsEdxPAa4wxCCEoyxJrHVIqIKEbetqhBx92QX8c2VYbFpMpzo6k\n",
+       "ecZiucR7z9XFKybFPrPJDJNESKVp+w7b7UrRFQJnR1ary5vbfsX5+cWuWCUyNFXNarXaybOV5MWL\n",
+       "F2TZhNUmMJ/PEUIhgG11hZL7OOd5/vQdeu8xUcqdgwNW6zX3Hz2kLGbEScLV9RnXFy/4whe+iDc/\n",
+       "xre+/lVcv2G0Oxmj84Khd+TzGC0U5WxCpiS1dUyLknxxwMHRPbTSPHjwgOlsTr3ZkhU5GQ7bbAlK\n",
+       "I6Vn3DY8u7hkbLe8OD3ns1+wBCTV2WPmJzMEAq1GPv3oU1xcX6CV4qd+/Mf55X/ya4AnzSaI4HDj\n",
+       "SNtb7t69Q1VVlOUUHwbmiz2ePXvO9eqag4NjlNKkWUpWJHzrrTe5c3KXOEq4eHVOkgV0pPDB8vTZ\n",
+       "c6pqy/2Hj9ibL+m6Yafy8Q6ld3cDhN87A3BY70hGy/0HjzDaEN8sxB8XQog3gP/pO156DfiPQwj/\n",
+       "9cc26C23fJ98YhWgn/2xL/PWm29y8eqcxSQCJZnNZ8wmC9IswVqLUgJjdk2VtNZovft1x3GnoGi7\n",
+       "jtm8oGs6ijxns92gI03fddi2ZX9vgQ/Qti2r7RVaBiwCpQ13Dg9JspwgoCinXJyfESJDnuc473He\n",
+       "c319zTiOhGBxbrfTbvqOqqmJpaYd+p1+fOjxUY80MVVVfXS3sK22SKE5v3jF4/c/ZLaccfd+wWK5\n",
+       "5Gh/ycHRCVEUoYBuLFFBcvrsW8wPHnF4chchH9FWWz547y2azlJ3DeNV2Jldi5x8f4/ZfMFy74DP\n",
+       "fvZz9P1AtVmhjMHEMXE+Mp0vCPMluh8gbABF0Boxej58+oTV9TUheMS44eLsBc1kwuHCsjef8Pid\n",
+       "bxJFu51zmsT8zM/8LF/72lcYBsukSImMYbNZEUWa/YN9zi+es16vefHiKRcXl6TJhNPTF4QQmM2n\n",
+       "rFZX7O0t+cVf/EU+97nPUzcNg+0Zx5Y0y5mUe9w5ekQ2KTi/uGB/b07wAR8Ce3tLhJC7Xj7dwP2D\n",
+       "uwitUEpjlMK5XXDPbrTnHwchhG8DPwogds3knwP/68c24C23/AB8YsHciIT7Jw8YnCWJCu6dnLB/\n",
+       "cIciT0jilL5vCSEghGBvbw/nHN77jwwIhmG4yV/vjJg3mw3jMCIMREoh0l2Ofb3dcnx8BwKM3vH2\n",
+       "229xeOcuNjgOjg6oqi2Xl6fYocOGQBwnrC6vGYaeKIro+xHnRoZhIEkShJLMZjO89WyaijzJef48\n",
+       "UJQTjImomhaA7XbL9fWaatvw/pP3efzhYxbbAz73+S+TJCnWw2Z1wWKxx3J/D+8HkgcPOX33q1w8\n",
+       "+4B0NkdEBclkQpxNcPYJzllC2MkcizwjSnIcAqUUTdPS1B15UbKta6wP3Dm5C8UEGRSYKYQFQXii\n",
+       "ZMrZ6Vd58t5zQPDhh08ZrefZ0ydMZiXj+hWbas1kMmfbbLHWsq0H7hwfUNdfpG0rlvNdOmN5cMjT\n",
+       "ly949PAR773f0zQN5+cXNE1HZFK+9vWv8tqj13n06FNcXp4Tgufk5GR3N5PmDG2HHQJluWQyKdHa\n",
+       "kJiIh/cf3LS7dUxn0xupqWMxWVLO2QV5AXmaU7c7BdM0L1B/fNLEvwy8F0J4+sc14C23fC8+sWA+\n",
+       "9JZ5sc/DezH7asLDk9fIEoUfPaYwBL8rygnwkVytKIqbsvmBEHrKsmAcBpqm2h16mgg3DvRdx2df\n",
+       "f51Xr15x994Jp6dnfOFzXyBNYg4Pj3j/6ZNd06e25dnzJwxDR6QSnp8+486dOyD4qMdKmiaMo8SY\n",
+       "Xdm+8IFuUyGjiOVswcuXp+R5SZblWLuT1HnnGUfLbDbDxBFf/tKX2V8cMPQ9680FVbXFaM1ZXe36\n",
+       "cd9IY/PZknXdc3j8iOXxXZyy9K1EqIIQHAJFuLljODm+R5rkfOb1N+i6gb4bQQiSJMVozdGdI7TQ\n",
+       "BASCGEQA4RFBErIpv/xrv8xgBwzw9d/+TZ6+eJc3PvNFZtMpV5sNv/rr/4Sf+6mfZT5fsFwu+Wdf\n",
+       "+adY+yH3773GaqW5OD/n3r37HN45Js4zvva1bzCZlMyme3St5+zsbcaxJ00zlNKcn58jiIgiRZaV\n",
+       "PLz3iP3DO4zBgXUYs7sDGMaeJElYr654/ORD9vYPWB68wfj8JderC9brNQd7B3jn0LFBxQGjDZGJ\n",
+       "qKqaovxes+6Hyr8O/MM/ttFuueUP4RML5i8uXjJJU3SSUCQT8jhitCPeOfpm53XprWX0DmstIgSU\n",
+       "UmitsdbuJIkBxmHEjY6x6/HjTgGyf7zEB0GaZ1SbDYvFHBNr1tWG7XrF/mRKU7d06Rbbj2hpEAKK\n",
+       "YkLTdDjnPiryURrqeiCKIiaTKVfWYaKYvh9o6pr5fI6UgaZtGQPIsJNLbuqGut5SypIiS3n48IS2\n",
+       "bZlOSuwYsDbgrEVpjQi7tgNRnjGZ3yEuFwgdU6Rz6vYKEWms9dRNzTCOxHFE19dE0e4g2HvP6EZM\n",
+       "pLF2pGsbgoNqdU4WxQjtAI24yScnScZP/tS/yNOn7xLC7m+7c3SfPM8Yx5FyUvDnv/zjqDjG+YH3\n",
+       "3n+bSTmnLAuc80zKCRLPwcEd3nzrTQ4PDpjPp1RVhdaag4M9NpsNIXju33/A4eEhxkTM5wvm8xlX\n",
+       "V1e7gi0l6Zue6XSCkprHTz5EysDh4RH1tmGzqfjMG18gjJ4sT7hcQd20mPX1rpgoaJq+pm07vPOc\n",
+       "nT3nnXevP/a5e9PL/K8Cf+u7/fw7Czt+v1PMLbf8IPyeqcv3wycWzN/+9jfBC+7dOeTktUPW22uk\n",
+       "UBwcHpCk8UfXlXEMN6oW5xwaj/SedrNh6DvGwZKkGU0IHB8fs1gsSJKED588Js0zlosl4zjStu2N\n",
+       "iYLeBeu25eLykmEYEEJ8lMbp+548zxFil7rZbDZkac44Oq6uzkiShDRN6Loe6yyR2vUUaeqGOM0p\n",
+       "JxlSG/bzhLLLuLy4wrmd0iZJEtp2p6ku8pw4NozDQB12Pp5KGu698WUcBhnHeLFbuJwXvDxd07Yd\n",
+       "m82WNE05OzslyzKMiYDAaEeUhDgyPHjwgGa9ZrG/T4hTBAHXrpBpgUAihOT47glDv0EIwfHxMWma\n",
+       "UZYTNpsNruk4WOxhreXs4gznHJHJePnyJcvlEuk922bg2++/xXSWE8SuU2GapkRRxDAM/MRP/OSN\n",
+       "5LMAwP2/7Z3Zj2T3Vcc/v7tW3dqrq7urt9k9nvEksRN7Ria2WSIICUJxhJBYJAiLeEICCSlA8g+A\n",
+       "eIEnXoAgCCgPBIjCIhRHSUQestnj8T6x25merbuqu6tru/v24+HWOBNrxp7p7ukyk/uRSn37Vtf5\n",
+       "favu6VO/+1vOSVJmZmYYj8dvXVtNU6nVqgxHQ5BQLlf4waU3uHptjWatjqpm1Y/eeO1FKqUaVaPI\n",
+       "wN1ird9jdXWVSqXM0tJSVii6WsU0ixyuz3MAfBx4Tkq5dasn72W1o5wfL97uS3F8+7KIUxwzB88P\n",
+       "Cd2EkT/g8rrggaMnUDUFTc0KHgBYho7j+QS+j23bk8nIFM/zME2TUqXMcDBCUxRM08DzXDqdTtY7\n",
+       "ktmbX19fR1UUkjgmSiN0zcQPfLzAzcbFw2xduhIq+J5LpWxQr2XZ91qtJkEQ0els0GrN0el22BkM\n",
+       "GQ2G1Go1fM8nmJRA6212iaVAVRWazSaOk62+yXZGplnKgEqFRE6WAY7HjC2L2WPH6ff7zLdnUY0i\n",
+       "BbPA2uUrtGZmiMKQ3nBAEPqkabamXVVVFDXLStjr92g2GhiaQZpAd3ObRrPF0HFozNZBMZBpgmJO\n",
+       "kiMCnmczHG9SNAuoujZ5ny1c18V1fUAyGF6jXq9nm52CkCROKBaL+L4PEp564km+9dy38dyISz+4\n",
+       "ysLCIsvLy3S7XaRM8XyfIImpNxpEYdb7H49Hk9wtJYbDIY7jYDsOxaKJqqoMB0Pac4u49piR41Aq\n",
+       "N1ldvUTgj9jRbWqNKqVKGdsb8/DDH8Qwsk1lqqoRxwlJKigUD2Sr+a8BXziIhnJy7pSpBfOlY3P4\n",
+       "ToqmlnBdhzBIOXnsBJ7r4LnZEIIEOusbuFEAKZP6kWBZFuXyHIaIn/ryAAAQTUlEQVSuU65U0DWd\n",
+       "8XiE49j4fsCZM+9nbI+JPJ9Ll9YoFEyEqpJEEbquE4URqqKSpNFkvbRLr9ejUCyxsrKM7/v4foTr\n",
+       "+Kz21qg1G5QLNaIoQjcMqrUGpaKF7TrMzc1hlUvEcYzr2lzvbJJKiRTQmmvhuh5RFDEaDLEsC9/3\n",
+       "uXr5KvVqBW80oF6vI1DZ2e5hlbI85cNxnygMGQ6HDIYDNrodwiDCskpomoppGszPzeH5HiN7QKFQ\n",
+       "QDcMSlaJjY1rFM0iw3gHoVVIhY8iVNIYUEARUCgUePPiRY4cPU5rdhaEpLvZpV6ro2lZAYuZ1jy+\n",
+       "Z7O1vUWaSsIoJApC0hSWVlY4/+xzNGdmqJebPProWfo7A8rlCorQuXZtHV3XsWoV7PH4ra33QmS3\n",
+       "jWEYvnX76Hs+l9cuE8Uxs60WjuNjmCZRLJip12lU62z3NpiprFCrFVD1mPZCm9HAwfFHFMwixWKB\n",
+       "UqlMoVjGtu176rdCiBLZ5Ofv3dOGcnLukqkF80pjHtWIsCIdmUKxoGGPxygCTNNgY+Mavp/lSGk0\n",
+       "myiKwLJKQLZb0DB0FKnx/R+8TqVQpl6vIaXk5MlTvPzSy5CGGKUiiBTP9yhYRWIhEanEKhWQEnTd\n",
+       "IgxjZmdn8TyXwIvpbW3TG/RpNRsULRPDNImDgEsb67TqdQ4fXpnUyAxoV+aoVMqoqqBYK2PrGkIq\n",
+       "9Ho95mdmiKMEvZQNDzWrVcaOjUAhSGLOv3A+u5uolFHVV+h0NihUSoDEsX0Mq8irL13kjUurPP/d\n",
+       "7+GHPsVSgYKqE0QRcaIQRgGL7WV6wz6WVcAPFSTgBQ7zK8skSoDqxUjNAD8CvYSUkIYjCkWLMPTY\n",
+       "3lzHKlZQBMQli1qtzE6vR63epFKpTm7zBK1mE7NYxB6NcG0PpZXt3kzigAsXzlOtNfCDrBTc4tIc\n",
+       "pmkgZTZm7DjO5M4ky+1yo2RcqZQVqMiGSvS3vlhnZmYYDseUrTpxGmPbPp3NZxFxyvr6Omfe9wHc\n",
+       "0YjWbJvR0GZrawtVVVnvXOfw4SP31G+llA7QuqeN5OTsAjGNDGhCCPnrv3WOOFAwU4tapY4hVI4c\n",
+       "OUKtVidNJGHkoSoG1Vo5mwxNU6IopNmsE4Yho5HNwvwsb169zqHFRUql4mQseQuhqoydIWkQYJUq\n",
+       "BH6QFaFQNQajHQwj2/ofBgnVWok0kdl4tOMxM9OiWquyfv06J06coN/rkQYuaAVG9gjT1FEUBd8P\n",
+       "aLWaWEUTz/MxTRPTsIiiGMdxuHR5jUYjS7RVq2WFmp2xS9Es4EYeg8GQi6+/ThBFPHD0JL7jc2hl\n",
+       "hSQRhNLHMAq88eqrXLl+BUjRFIVCvYIqwSwUUUVKpdygUW9hWAWKps7h5RUura1x9rGzLLTnGY6z\n",
+       "sflCscjqm5eYaTZYOn2WqL9Gb2MTNwhIZIKhF2m3l3DtEY5nUy5ZmJbFdrdLo9ZAKApB6ANwfX2d\n",
+       "2XYbkSRZeb96C103GI16uI6DlJI0TWk0GkjIvqAVhY2NDRzHfmvuoFyuUKtVcV3vrQRbpVKJIPCp\n",
+       "VLJ6od3ONjKN8UOfcq2GORmyGjsuIo7RdJPxeEynu87Zs2fZ6Q/Z2enxsV/+XaSUU5l1FELI/cq2\n",
+       "l2dNvHvu96yJcRzf1renFsyf+PAySlmnpJY4MnMEqQgeOnmaWrVKYTKGmuVfMTANC9e1ieIQyyqw\n",
+       "szMgjmMajSalkkUSRkRpjKpmvWBF0ehtbRGmAZVyGU3TaDab9HZ2svqecYTvRVzd6PK+UycRQuC6\n",
+       "Lv3hiAceOMlmr4euawgE1VoJXdMomgU2utvs9Dcpl0tUihalcpkkjvGikG63y3BnxPHjR1hcXKTT\n",
+       "6XL58hqVSiXTaZm8/NJFHjv7CK5jkwpBzaowdl2e+cY3WVhYYLjTx9R00BWGg/FkJ2uY5Q23ijQq\n",
+       "dWzbxvbGRJHCznaH2E9RNEGpVkcVKnPNeY6dPEx7rsX1a1ewSmVURScII0bjHk9/8pewhzaKIjBV\n",
+       "hUSCF0SYpskLL1/ksUfeTxC6jEYjFEXFdWxKeoEwjpiZbzMej5Fpwvxim+2tbdoLi+hatkN17dIl\n",
+       "PNvBdmy2elscOnSEGxkgVlff5PTpU5P0CUU2NjbY2Njg0Ucfpd/vZ/lywoTvnn+eB08cQdN0zFIx\n",
+       "e860SOIE1x4TBB6u5/HG62u05mY4d+4cxaLF6xe/j2oqVCs1zn7kk+/pYH5j/8S7cSf/m3dq607b\n",
+       "O2hdd8Ld2Hq3v7sbW+92HW+kG7kT3i2Y36mtdwrmUxtm6Yx8KkmAXjEpFgosrqxQq9ao12okMpsw\n",
+       "NAwDVdVw3DGappKkWR7whYV5rl/fwHWzWpB6sUBZs+j3tomiBEVRaM5WUYQOStY7XFtbo9lsUq1W\n",
+       "s4k4xeDl197kkfedIUli5ubmiJIU13WoVirU6w3SNCGIPApGdmfQmq2zuDRLr9djq9OlaFkYZoGR\n",
+       "69Cen0dTFBYWFhkOhzSaNZrND9LpdEiSBNt2SCe92UZzBtu22e5vUSqV6G4OePrjH8H3fQqWheeF\n",
+       "jIZOVtezXsUej+lv73DixFFc1yNMYjwvRIgH6A9GjMdjqtUqg+EYL7K5fOUS29tbWeKwYonZ2SZL\n",
+       "y4ewqgViL6A4ySgZeC6abrAwM4OMY5752jf40MMPEsdxVj/U89FUHS8MWFxZ5vKVKyDANExs28EL\n",
+       "PF568QLD4ZiHzpzGtm38MKHWbFGp1mkvLOB6DvZ4xJkzD7G1leVT2dxc5djRk/hewuW1q7Rm6nQ6\n",
+       "HYLQ55mv/y8njq5gj12UQZ9XXn2NuYUFDFNH07J0woZV4tyHH6fVbNDv91ldXWV5eQWhSpLkvV/Q\n",
+       "eVqBLre1v7buJpi/G/vRqZ5aMFeVENcXzFgpC4eWWWgtULQMUCRKIhCqQhgGFIsF0jRhMBiyON9m\n",
+       "Z9gnTWFubi7LkpjEpK5Lb5xlP1RFQprGCFEiCD2iKKVolTHMYpYbfVJ0IvBDojAk9G0GdpZKtVAo\n",
+       "ZDlhNB1JSCpjqrUqoZ+tnCloBltbXUzTZLY9T384pGgWUVMFTQiqlSqbm13m5udQFEkSZ9WLymUN\n",
+       "xwmYW2ijmwYCQalUojXTwHF9VF0nihKsUgXbGRL4KSoppXKRoqHT9z0KBR3f9wgDD1XTKBRUoiDk\n",
+       "A6dOopsFVE3FHu9g6EVs16WgFzh06DCNRoPu5iZJGHH+Wy9w6vQZlg6fIRluo2s6iq7juR6qIjB0\n",
+       "gziIaLVmJ2lrJVHgougmq2++iSJMlg4fpt/bZGfgoBsmiply4vQh4jjh2PFjrHc7HDp8mI3L6wxG\n",
+       "faLAp92eYzzOvpw0TePkyQfZ2dlmcWmWzc0tLjx/geZMnbE9RqYpa1ev8cgHHkamKYmiU6nUmGvN\n",
+       "E4cenc51Qt+FJMAPPYqWSV3WKFgGWzs7iPdAcYqcnGkwtWBeqht4TsLDDz7MkYUVUpEwGAwAMDQN\n",
+       "FEEcx9mSQUUllZLVtUsUzCz3hqZLhKJmOa8HfVqtVvZ6TUchJU2zhEzlSo0gCJBSEoYhCTqqVCmV\n",
+       "KkipsLJyiLLrkKYSzTBxHAdVNybFETRMM0HTtEl5MoV2u43vB1QqFUghDLJVHp4/5tDKCmPbpmCa\n",
+       "WUEGM8WyClne9GGfU6dOQypJNUGqSBzXBQG6pjE33ybwszXv1YrgytoVZtrzrF+9RhAEHD9+nCDM\n",
+       "inVomkarXmdpaZFr16/gOEMSoFQos7K08lZecMuyGI1GLC0s0tnc4MFTZ5hdWMbtbaGbBaIoQAlT\n",
+       "LKuIYugohk51ps5oOMLzXLa3t4l9h3KjxcJCm972Dooe4acRzXoVRSTYtouqgBemdNcuc3j5KDLV\n",
+       "WD60TLezwcrKUa5fvcLs3Dw/8XgTz/NIZczy8iGee/Y5Hjt3jv/8n//micefZH7hGEXrOaqlGrVq\n",
+       "i/F4xJFDx9jobtBolOl0xmi6he0FjIYOplWlVqtRa7Wp1aoI3UDbx3HOnJz/T0xtzPzAG835sWKa\n",
+       "Y+bTaDfnx4f31ARoTk5OTs7+kg8w5uTk5NwH5ME8Jycn5z7gwIO5EOJjQoiLQog3hBC3zDq3T+18\n",
+       "TgjRFUK8dNO5phDiGSHE60KIrwgh6jc995mJpotCiI/uo44VIcTXhRCvCCFeFkL8wTS0CCEKQojv\n",
+       "CCEuCCFeFUL82TR0TOyqQojnhRD/MS0N02K//P9W/r0HW7f00V3auqWf7VHfj/jLHuysCSFenNi6\n",
+       "ZW3iu7BVF0J8UQjx2uR9Pr5LOw9O9Nx4DHf9+ctJGa6DeAAqsAocgSyVNnD6HrX1FFlVmJduOvcX\n",
+       "wB9Pjv8E+PPJ8UMTLfpE2yqg7JOONvDI5LgMfB84PSUt1uSnBnwbeHJKOv4I+Gfgy9O6LtN47Kf/\n",
+       "38q/99tH99PP9tNf9mDnEtDcp2v5D8Dv3PQ+a/tgUwE2gJXdvP6ge+bngFUp5ZqUMiKrp/j0vWhI\n",
+       "SvlN4O3JrT9BdhGY/Pzk5Php4AtSykhKuUb2D3dun3R0pJQXJsc28BqwNCUt7uTQIAss/YPWIYRY\n",
+       "Bn4B+Ft+mMjxwD+LKbFv/n8b/94Vt/HRxT3Ye7uf7ezW1m38ZS/s2YYQogY8JaX8HICUMpZSDves\n",
+       "bI/Vqw46mC8BNwu9Njl3UMxLKbuT4y5wI/n14kTLPdUlhDhC1pv6zjS0CCEUIcSFSXtfl1K+MgUd\n",
+       "fwl8Grh5f/NUr8sBMm3/f1fe5qO7tfF2P3t1D5Ju5S+7RQJfFUI8K4TYS9bLo8CWEOLvhRDnhRB/\n",
+       "I4Sw9kHfnqpXHXQwf8+sg5TZfc076dlXrUKIMvCvwB9KKcc3P3dQWqSUqZTyEWAZ+EkhxM8cpA4h\n",
+       "xC8Cm1LK57lND+mgr8sB857WPvHRL5L56K5zCd/Cz356l3re1V/ukieklB8kKy7y+0KIp3ZpRwM+\n",
+       "BPy1lPJDgAP86V6EiR9Wr/qX3do46GB+HVi56fcVfrTnda/pCiHaAEKIBWDzNrqWJ+f2BSGEThbI\n",
+       "Py+l/NI0tQBMbgn/C3j0gHV8GPiEEOISWXGHjwghPn/AGqbJtP3/ttzko/90k4/uiZv87LFdmriV\n",
+       "v/zjHvRsTH5uAf/O7ofsrgHXpJTfm/z+RbLgvhfesXrVnXDQwfxZ4AEhxJHJN9GvAF8+wPa/DHxq\n",
+       "cvwp4Es3nf9VIYQhhDgKPADsabb7BkIIAfwd8KqU8q+mpUUI0bqxSkQIUQR+Dnj+IHVIKT8rpVyR\n",
+       "Uh4lu6X8mpTyNw5Sw5SZtv/fknfw0d3Yup2f3TW38Zff3KUuSwhRmRyXgI8Cu1oJJKXsAFeFECcn\n",
+       "p34WeGU3tm5i79Wr9mNm9y5nbD9ONlu+CnzmHrbzBWAdCMnGKX8baAJfBV4HvgLUb/r7z040XQR+\n",
+       "fh91PEk23neBzKmfBz520FqA9wPnJzpeBD49OX/gn8nE9k/xw9UsU9Ewjcd++f9N/h3c8O/99tH9\n",
+       "9LP99Jddvv7oRNMF4OW9xh7gYeB7wAvAv7GH1SxACdgGKnvRlG/nz8nJybkPyHeA5uTk5NwH5ME8\n",
+       "Jycn5z4gD+Y5OTk59wF5MM/Jycm5D8iDeU5OTs59QB7Mc3Jycu4D8mCek5OTcx+QB/OcnJyc+4D/\n",
+       "A43ph1xlbAoPAAAAAElFTkSuQmCC\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x12666be50>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline\n",
+    "\n",
+    "# load input and configure preprocessing\n",
+    "im = caffe.io.load_image('images/cat.jpg')\n",
+    "transformer = caffe.io.Transformer({'data': net_full_conv.blobs['data'].data.shape})\n",
+    "transformer.set_mean('data', np.load('../python/caffe/imagenet/ilsvrc_2012_mean.npy').mean(1).mean(1))\n",
+    "transformer.set_transpose('data', (2,0,1))\n",
+    "transformer.set_channel_swap('data', (2,1,0))\n",
+    "transformer.set_raw_scale('data', 255.0)\n",
+    "# make classification map by forward and print prediction indices at each location\n",
+    "out = net_full_conv.forward_all(data=np.asarray([transformer.preprocess('data', im)]))\n",
+    "print out['prob'][0].argmax(axis=0)\n",
+    "# show net input and confidence map (probability of the top prediction at each location)\n",
+    "plt.subplot(1, 2, 1)\n",
+    "plt.imshow(transformer.deprocess('data', net_full_conv.blobs['data'].data[0]))\n",
+    "plt.subplot(1, 2, 2)\n",
+    "plt.imshow(out['prob'][0,281])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The classifications include various cats -- 282 = tiger cat, 281 = tabby, 283 = persian -- and foxes and other mammals.\n",
+    "\n",
+    "In this way the fully connected layers can be extracted as dense features across an image (see `net_full_conv.blobs['fc6'].data` for instance), which is perhaps more useful than the classification map itself.\n",
+    "\n",
+    "Note that this model isn't totally appropriate for sliding-window detection since it was trained for whole-image classification. Nevertheless it can work just fine. Sliding-window training and finetuning can be done by defining a sliding-window ground truth and loss such that a loss map is made for every location and solving as usual. (This is an exercise for the reader.)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "*A thank you to Rowland Depp for first suggesting this trick.*"
+   ]
+  }
+ ],
+ "metadata": {
+  "description": "How to do net surgery and manually change model parameters for custom use.",
+  "example_name": "Editing model parameters",
+  "include_in_docs": true,
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "python2"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.9"
+  },
+  "priority": 5
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/examples/net_surgery/bvlc_caffenet_full_conv.prototxt b/examples/net_surgery/bvlc_caffenet_full_conv.prototxt
new file mode 100644
index 0000000..3c95197
--- /dev/null
+++ b/examples/net_surgery/bvlc_caffenet_full_conv.prototxt
@@ -0,0 +1,216 @@
+# Fully convolutional network version of CaffeNet.
+name: "CaffeNetConv"
+input: "data"
+input_dim: 1
+input_dim: 3
+input_dim: 451
+input_dim: 451
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  convolution_param {
+    num_output: 96
+    kernel_size: 11
+    stride: 4
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "conv1"
+  top: "conv1"
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm1"
+  type: "LRN"
+  bottom: "pool1"
+  top: "norm1"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "norm1"
+  top: "conv2"
+  convolution_param {
+    num_output: 256
+    pad: 2
+    kernel_size: 5
+    group: 2
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm2"
+  type: "LRN"
+  bottom: "pool2"
+  top: "norm2"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "norm2"
+  top: "conv3"
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "conv4"
+  type: "Convolution"
+  bottom: "conv3"
+  top: "conv4"
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    group: 2
+  }
+}
+layer {
+  name: "relu4"
+  type: "ReLU"
+  bottom: "conv4"
+  top: "conv4"
+}
+layer {
+  name: "conv5"
+  type: "Convolution"
+  bottom: "conv4"
+  top: "conv5"
+  convolution_param {
+    num_output: 256
+    pad: 1
+    kernel_size: 3
+    group: 2
+  }
+}
+layer {
+  name: "relu5"
+  type: "ReLU"
+  bottom: "conv5"
+  top: "conv5"
+}
+layer {
+  name: "pool5"
+  type: "Pooling"
+  bottom: "conv5"
+  top: "pool5"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "fc6-conv"
+  type: "Convolution"
+  bottom: "pool5"
+  top: "fc6-conv"
+  convolution_param {
+    num_output: 4096
+    kernel_size: 6
+  }
+}
+layer {
+  name: "relu6"
+  type: "ReLU"
+  bottom: "fc6-conv"
+  top: "fc6-conv"
+}
+layer {
+  name: "drop6"
+  type: "Dropout"
+  bottom: "fc6-conv"
+  top: "fc6-conv"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc7-conv"
+  type: "Convolution"
+  bottom: "fc6-conv"
+  top: "fc7-conv"
+  convolution_param {
+    num_output: 4096
+    kernel_size: 1
+  }
+}
+layer {
+  name: "relu7"
+  type: "ReLU"
+  bottom: "fc7-conv"
+  top: "fc7-conv"
+}
+layer {
+  name: "drop7"
+  type: "Dropout"
+  bottom: "fc7-conv"
+  top: "fc7-conv"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc8-conv"
+  type: "Convolution"
+  bottom: "fc7-conv"
+  top: "fc8-conv"
+  convolution_param {
+    num_output: 1000
+    kernel_size: 1
+  }
+}
+layer {
+  name: "prob"
+  type: "Softmax"
+  bottom: "fc8-conv"
+  top: "prob"
+}
diff --git a/examples/net_surgery/conv.prototxt b/examples/net_surgery/conv.prototxt
new file mode 100644
index 0000000..9444c63
--- /dev/null
+++ b/examples/net_surgery/conv.prototxt
@@ -0,0 +1,26 @@
+# Simple single-layer network to showcase editing model parameters.
+name: "convolution"
+input: "data"
+input_dim: 1
+input_dim: 1
+input_dim: 100
+input_dim: 100
+layer {
+  name: "conv"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv"
+  convolution_param {
+    num_output: 3
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
diff --git a/examples/pycaffe/caffenet.py b/examples/pycaffe/caffenet.py
new file mode 100644
index 0000000..06c5a02
--- /dev/null
+++ b/examples/pycaffe/caffenet.py
@@ -0,0 +1,55 @@
+from caffe import layers as L, params as P, to_proto
+from caffe.proto import caffe_pb2
+from __future__ import print_function
+
+# helper function for common structures
+
+def conv_relu(bottom, ks, nout, stride=1, pad=0, group=1):
+    conv = L.Convolution(bottom, kernel_size=ks, stride=stride,
+                                num_output=nout, pad=pad, group=group)
+    return conv, L.ReLU(conv, in_place=True)
+
+def fc_relu(bottom, nout):
+    fc = L.InnerProduct(bottom, num_output=nout)
+    return fc, L.ReLU(fc, in_place=True)
+
+def max_pool(bottom, ks, stride=1):
+    return L.Pooling(bottom, pool=P.Pooling.MAX, kernel_size=ks, stride=stride)
+
+def caffenet(lmdb, batch_size=256, include_acc=False):
+    data, label = L.Data(source=lmdb, backend=P.Data.LMDB, batch_size=batch_size, ntop=2,
+        transform_param=dict(crop_size=227, mean_value=[104, 117, 123], mirror=True))
+
+    # the net itself
+    conv1, relu1 = conv_relu(data, 11, 96, stride=4)
+    pool1 = max_pool(relu1, 3, stride=2)
+    norm1 = L.LRN(pool1, local_size=5, alpha=1e-4, beta=0.75)
+    conv2, relu2 = conv_relu(norm1, 5, 256, pad=2, group=2)
+    pool2 = max_pool(relu2, 3, stride=2)
+    norm2 = L.LRN(pool2, local_size=5, alpha=1e-4, beta=0.75)
+    conv3, relu3 = conv_relu(norm2, 3, 384, pad=1)
+    conv4, relu4 = conv_relu(relu3, 3, 384, pad=1, group=2)
+    conv5, relu5 = conv_relu(relu4, 3, 256, pad=1, group=2)
+    pool5 = max_pool(relu5, 3, stride=2)
+    fc6, relu6 = fc_relu(pool5, 4096)
+    drop6 = L.Dropout(relu6, in_place=True)
+    fc7, relu7 = fc_relu(drop6, 4096)
+    drop7 = L.Dropout(relu7, in_place=True)
+    fc8 = L.InnerProduct(drop7, num_output=1000)
+    loss = L.SoftmaxWithLoss(fc8, label)
+
+    if include_acc:
+        acc = L.Accuracy(fc8, label)
+        return to_proto(loss, acc)
+    else:
+        return to_proto(loss)
+
+def make_net():
+    with open('train.prototxt', 'w') as f:
+        print(caffenet('/path/to/caffe-train-lmdb'), file=f)
+
+    with open('test.prototxt', 'w') as f:
+        print(caffenet('/path/to/caffe-val-lmdb', batch_size=50, include_acc=True), file=f)
+
+if __name__ == '__main__':
+    make_net()
diff --git a/examples/pycaffe/layers/pyloss.py b/examples/pycaffe/layers/pyloss.py
new file mode 100644
index 0000000..6200e6b
--- /dev/null
+++ b/examples/pycaffe/layers/pyloss.py
@@ -0,0 +1,37 @@
+import caffe
+import numpy as np
+
+
+class EuclideanLossLayer(caffe.Layer):
+    """
+    Compute the Euclidean Loss in the same manner as the C++ EuclideanLossLayer
+    to demonstrate the class interface for developing layers in Python.
+    """
+
+    def setup(self, bottom, top):
+        # check input pair
+        if len(bottom) != 2:
+            raise Exception("Need two inputs to compute distance.")
+
+    def reshape(self, bottom, top):
+        # check input dimensions match
+        if bottom[0].count != bottom[1].count:
+            raise Exception("Inputs must have the same dimension.")
+        # difference is shape of inputs
+        self.diff = np.zeros_like(bottom[0].data, dtype=np.float32)
+        # loss output is scalar
+        top[0].reshape(1)
+
+    def forward(self, bottom, top):
+        self.diff[...] = bottom[0].data - bottom[1].data
+        top[0].data[...] = np.sum(self.diff**2) / bottom[0].num / 2.
+
+    def backward(self, top, propagate_down, bottom):
+        for i in range(2):
+            if not propagate_down[i]:
+                continue
+            if i == 0:
+                sign = 1
+            else:
+                sign = -1
+            bottom[i].diff[...] = sign * self.diff / bottom[i].num
diff --git a/examples/pycaffe/linreg.prototxt b/examples/pycaffe/linreg.prototxt
new file mode 100644
index 0000000..c0fb077
--- /dev/null
+++ b/examples/pycaffe/linreg.prototxt
@@ -0,0 +1,60 @@
+name: 'LinearRegressionExample'
+# define a simple network for linear regression on dummy data
+# that computes the loss by a PythonLayer.
+layer {
+  type: 'DummyData'
+  name: 'x'
+  top: 'x'
+  dummy_data_param {
+    shape: { dim: 10 dim: 3 dim: 2 }
+    data_filler: { type: 'gaussian' }
+  }
+}
+layer {
+  type: 'DummyData'
+  name: 'y'
+  top: 'y'
+  dummy_data_param {
+    shape: { dim: 10 dim: 3 dim: 2 }
+    data_filler: { type: 'gaussian' }
+  }
+}
+# include InnerProduct layers for parameters
+# so the net will need backward
+layer {
+  type: 'InnerProduct'
+  name: 'ipx'
+  top: 'ipx'
+  bottom: 'x'
+  inner_product_param {
+    num_output: 10
+    weight_filler { type: 'xavier' }
+  }
+}
+layer {
+  type: 'InnerProduct'
+  name: 'ipy'
+  top: 'ipy'
+  bottom: 'y'
+  inner_product_param {
+    num_output: 10
+    weight_filler { type: 'xavier' }
+  }
+}
+layer {
+  type: 'Python'
+  name: 'loss'
+  top: 'loss'
+  bottom: 'ipx'
+  bottom: 'ipy'
+  python_param {
+    # the module name -- usually the filename -- that needs to be in $PYTHONPATH
+    module: 'pyloss'
+    # the layer name -- the class name in the module
+    layer: 'EuclideanLossLayer'
+  }
+  # set loss weight so Caffe knows this is a loss layer.
+  # since PythonLayer inherits directly from Layer, this isn't automatically
+  # known to Caffe
+  loss_weight: 1
+}
diff --git a/examples/siamese/convert_mnist_siamese_data.cpp b/examples/siamese/convert_mnist_siamese_data.cpp
new file mode 100644
index 0000000..71c56a0
--- /dev/null
+++ b/examples/siamese/convert_mnist_siamese_data.cpp
@@ -0,0 +1,123 @@
+//
+// This script converts the MNIST dataset to the leveldb format used
+// by caffe to train siamese network.
+// Usage:
+//    convert_mnist_data input_image_file input_label_file output_db_file
+// The MNIST dataset could be downloaded at
+//    http://yann.lecun.com/exdb/mnist/
+#include <fstream>  // NOLINT(readability/streams)
+#include <string>
+
+#include "glog/logging.h"
+#include "google/protobuf/text_format.h"
+#include "leveldb/db.h"
+#include "stdint.h"
+
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/math_functions.hpp"
+
+uint32_t swap_endian(uint32_t val) {
+    val = ((val << 8) & 0xFF00FF00) | ((val >> 8) & 0xFF00FF);
+    return (val << 16) | (val >> 16);
+}
+
+void read_image(std::ifstream* image_file, std::ifstream* label_file,
+        uint32_t index, uint32_t rows, uint32_t cols,
+        char* pixels, char* label) {
+  image_file->seekg(index * rows * cols + 16);
+  image_file->read(pixels, rows * cols);
+  label_file->seekg(index + 8);
+  label_file->read(label, 1);
+}
+
+void convert_dataset(const char* image_filename, const char* label_filename,
+        const char* db_filename) {
+  // Open files
+  std::ifstream image_file(image_filename, std::ios::in | std::ios::binary);
+  std::ifstream label_file(label_filename, std::ios::in | std::ios::binary);
+  CHECK(image_file) << "Unable to open file " << image_filename;
+  CHECK(label_file) << "Unable to open file " << label_filename;
+  // Read the magic and the meta data
+  uint32_t magic;
+  uint32_t num_items;
+  uint32_t num_labels;
+  uint32_t rows;
+  uint32_t cols;
+
+  image_file.read(reinterpret_cast<char*>(&magic), 4);
+  magic = swap_endian(magic);
+  CHECK_EQ(magic, 2051) << "Incorrect image file magic.";
+  label_file.read(reinterpret_cast<char*>(&magic), 4);
+  magic = swap_endian(magic);
+  CHECK_EQ(magic, 2049) << "Incorrect label file magic.";
+  image_file.read(reinterpret_cast<char*>(&num_items), 4);
+  num_items = swap_endian(num_items);
+  label_file.read(reinterpret_cast<char*>(&num_labels), 4);
+  num_labels = swap_endian(num_labels);
+  CHECK_EQ(num_items, num_labels);
+  image_file.read(reinterpret_cast<char*>(&rows), 4);
+  rows = swap_endian(rows);
+  image_file.read(reinterpret_cast<char*>(&cols), 4);
+  cols = swap_endian(cols);
+
+  // Open leveldb
+  leveldb::DB* db;
+  leveldb::Options options;
+  options.create_if_missing = true;
+  options.error_if_exists = true;
+  leveldb::Status status = leveldb::DB::Open(
+      options, db_filename, &db);
+  CHECK(status.ok()) << "Failed to open leveldb " << db_filename
+      << ". Is it already existing?";
+
+  char label_i;
+  char label_j;
+  char* pixels = new char[2 * rows * cols];
+  const int kMaxKeyLength = 10;
+  char key[kMaxKeyLength];
+  std::string value;
+
+  caffe::Datum datum;
+  datum.set_channels(2);  // one channel for each image in the pair
+  datum.set_height(rows);
+  datum.set_width(cols);
+  LOG(INFO) << "A total of " << num_items << " items.";
+  LOG(INFO) << "Rows: " << rows << " Cols: " << cols;
+  for (int itemid = 0; itemid < num_items; ++itemid) {
+    int i = caffe::caffe_rng_rand() % num_items;  // pick a random  pair
+    int j = caffe::caffe_rng_rand() % num_items;
+    read_image(&image_file, &label_file, i, rows, cols,
+        pixels, &label_i);
+    read_image(&image_file, &label_file, j, rows, cols,
+        pixels + (rows * cols), &label_j);
+    datum.set_data(pixels, 2*rows*cols);
+    if (label_i  == label_j) {
+      datum.set_label(1);
+    } else {
+      datum.set_label(0);
+    }
+    datum.SerializeToString(&value);
+    snprintf(key, kMaxKeyLength, "%08d", itemid);
+    db->Put(leveldb::WriteOptions(), std::string(key), value);
+  }
+
+  delete db;
+  delete pixels;
+}
+
+int main(int argc, char** argv) {
+  if (argc != 4) {
+    printf("This script converts the MNIST dataset to the leveldb format used\n"
+           "by caffe to train a siamese network.\n"
+           "Usage:\n"
+           "    convert_mnist_data input_image_file input_label_file "
+           "output_db_file\n"
+           "The MNIST dataset could be downloaded at\n"
+           "    http://yann.lecun.com/exdb/mnist/\n"
+           "You should gunzip them after downloading.\n");
+  } else {
+    google::InitGoogleLogging(argv[0]);
+    convert_dataset(argv[1], argv[2], argv[3]);
+  }
+  return 0;
+}
diff --git a/examples/siamese/create_mnist_siamese.sh b/examples/siamese/create_mnist_siamese.sh
new file mode 100755
index 0000000..43ad6b1
--- /dev/null
+++ b/examples/siamese/create_mnist_siamese.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env sh
+# This script converts the mnist data into leveldb format.
+
+EXAMPLES=./build/examples/siamese
+DATA=./data/mnist
+
+echo "Creating leveldb..."
+
+rm -rf ./examples/siamese/mnist_siamese_train_leveldb
+rm -rf ./examples/siamese/mnist_siamese_test_leveldb
+
+$EXAMPLES/convert_mnist_siamese_data.bin \
+    $DATA/train-images-idx3-ubyte \
+    $DATA/train-labels-idx1-ubyte \
+    ./examples/siamese/mnist_siamese_train_leveldb
+$EXAMPLES/convert_mnist_siamese_data.bin \
+    $DATA/t10k-images-idx3-ubyte \
+    $DATA/t10k-labels-idx1-ubyte \
+    ./examples/siamese/mnist_siamese_test_leveldb
+
+echo "Done."
diff --git a/examples/siamese/mnist_siamese.ipynb b/examples/siamese/mnist_siamese.ipynb
new file mode 100644
index 0000000..1a4e30e
--- /dev/null
+++ b/examples/siamese/mnist_siamese.ipynb
@@ -0,0 +1,1909 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Setup\n",
+    "\n",
+    "Import Caffe and the usual modules."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline\n",
+    "\n",
+    "# Make sure that caffe is on the python path:\n",
+    "caffe_root = '../../'  # this file is expected to be in {caffe_root}/examples/siamese\n",
+    "import sys\n",
+    "sys.path.insert(0, caffe_root + 'python')\n",
+    "\n",
+    "import caffe"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Load the trained net\n",
+    "\n",
+    "Load the model definition and weights and set to CPU mode TEST phase computation with input scaling."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "MODEL_FILE = 'mnist_siamese.prototxt'\n",
+    "# decrease if you want to preview during training\n",
+    "PRETRAINED_FILE = 'mnist_siamese_iter_50000.caffemodel' \n",
+    "caffe.set_mode_cpu()\n",
+    "net = caffe.Net(MODEL_FILE, PRETRAINED_FILE, caffe.TEST)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Load some MNIST test data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "TEST_DATA_FILE = '../../data/mnist/t10k-images-idx3-ubyte'\n",
+    "TEST_LABEL_FILE = '../../data/mnist/t10k-labels-idx1-ubyte'\n",
+    "n = 10000\n",
+    "\n",
+    "with open(TEST_DATA_FILE, 'rb') as f:\n",
+    "    f.read(16) # skip the header\n",
+    "    raw_data = np.fromstring(f.read(n * 28*28), dtype=np.uint8)\n",
+    "\n",
+    "with open(TEST_LABEL_FILE, 'rb') as f:\n",
+    "    f.read(8) # skip the header\n",
+    "    labels = np.fromstring(f.read(n), dtype=np.uint8)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Generate the Siamese features"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "# reshape and preprocess\n",
+    "caffe_in = raw_data.reshape(n, 1, 28, 28) * 0.00390625 # manually scale data instead of using `caffe.io.Transformer`\n",
+    "out = net.forward_all(data=caffe_in)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Visualize the learned Siamese embedding"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": [
+       "iVBORw0KGgoAAAANSUhEUgAAA54AAAIXCAYAAAD0R4FDAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
+       "AAALEgAACxIB0t1+/AAAIABJREFUeJzsvXtwXOWZr/usvurWUktqGdmxaawEHEMuthGXITiIyMaJ\n",
+       "wbEMFmCTDMkkoyqSyTnZMwdqpmYyzEyS2ruKue2ZqSTHO/vYGQbhCxdjwI637ViWMEEEMJhgB4MB\n",
+       "gSRLsizJkiypuyX1+WP1Wlp971YvSd3y+1S5rF69Lt/6+lOrf/2+v/dVgsEggiAIgiAIgiAIgjBT\n",
+       "WOZ6AIIgCIIgCIIgCML8RoSnIAiCIAiCIAiCMKOI8BQEQRAEQRAEQRBmFBGegiAIgiAIgiAIwowi\n",
+       "wlMQBEEQBEEQBEGYUUR4CoIgCIIgCIIgCDNKRsJTUZQ8RVFaFUV5U1GUU4qi/HezBiYIgiAIgiAI\n",
+       "giDMD5RM+3gqilIQDAZHFEWxAS8B/08wGHzJlNEJgiAIgiAIgiAIOU/GqbbBYHAk9KMDsAJ9mZ5T\n",
+       "EARBEARBEARBmD9kLDwVRbEoivIm0A0cDQaDpzIfliAIgiAIgiAIgjBfMCPiORkMBlcAi4EvK4pS\n",
+       "k/GoBEEQBEEQBEEQhHmDzawTBYPBi4qivAhUA03adkVRMjORCoIgCIIgCIIgCFlNMBhUEj2fkfBU\n",
+       "FMUDjAeDwQFFUfKBtcDfxxhEJpcRhDC+9a1vsWPHjrkehjCPkDUlmImsJ8FsZE0JZiNrSjAbRUmo\n",
+       "OYHMI54LgV8pimJBTdt9PBgMHsnwnIIgCIIgCIIgCMI8IiPhGQwG3wZWmTQWQUiJq666aq6HIMwz\n",
+       "ZE0JZiLrSTAbWVOC2ciaEuaCjIsLCcJsU1NTM9dDEOYZsqYEM5H1JJiNrCnBbGRNCXOBCE9BEARB\n",
+       "EARBEARhRjGtqq0gCIIgCIIgCIIQTSrFd3KF6RaOVWa64qyiKEGpaisIgiAIgiAIwuWKoijzotNH\n",
+       "vPsIbU+oriXVVhAEQRAEQRAEQZhRRHgKOUdTU9NcD0GYZ8iaEsxE1pNgNrKmBLORNSXMBSI8BUEQ\n",
+       "BEEQBEEQhBlFPJ6CIAiCIAiCIAgziHg8JeIpCIIgCIIgCIJwWdPX18emTZsoKiriqquu4sknnzT9\n",
+       "GiI8hZxDfAmC2ciaEsxE1pNgNrKmBLORNSVE8v3vf5+8vDx6enp44okneOihhzh16pSp1xDhKQiC\n",
+       "IAiCIAiCcJly6dIlnnnmGX784x9TUFDAl770JTZu3Mjjjz9u6nXE4ykIgiAIgiAIgjCDJPV4NjTA\n",
+       "mTNQUACNjeB2p3eBDI4/ceIEt956K5cuXdK3/fM//zNNTU3s27cvpfsQj6cgCIIgCIIgCEK2c+YM\n",
+       "HDsGBw6oInIWjx8eHqa4uDhsm8vlYmhoKP1xJECEp5BziC9BMBtZU4KZyHoSzEbWlGA2sqaykIIC\n",
+       "9f/qati2bVaPLyoqYnBwMGzbxYsXcblc6Y8jASI8BUEQBEEQBEEQ5pLGRqivh0OH0k+zzfD4a665\n",
+       "hvHxcd5//31921tvvcXnPve59MeRAPF4CoIgCIIgCIIgzCDZ3sdzy5YtKIrCL3/5S9544w3uuusu\n",
+       "fvvb37J8+fKw/cTjKQiCIAiCIAiCIEyLn/3sZ4yOjrJgwQK+8Y1v8Itf/CJKdGaKCE8h5xBfgmA2\n",
+       "sqYEM5H1JJiNrCnBbGRNCZGUlpby7LPPMjw8zEcffcT9999v+jVEeAqCIAiCIAiCIAgzing8BUEQ\n",
+       "BEEQBEEQZpBs93iming8BUEQBEEQBEEQhKxFhKeQc4gvQTAbWVOCmch6EsxG1pRgNrKmhLlAhKcg\n",
+       "CIIgCIIgCIIwo4jHUxAEQRAEQRAEYQYRj6dEPAVBEARBEARBEIQZRoSnkHOIL0EwG1lTgpnIehLM\n",
+       "RtaUYDaypoS5QISnIAiCIAiCIAiCMKOIx1MQBEEQBEEQBGEGyWaP53/8x3+wY8cOfv/737Nlyxa2\n",
+       "b98ed99MPJ62zIcqCIIgCIIgCIIg5CKf+tSn+NGPfsTBgwcZHR2dsetIqq2Qc4gvQTAbWVOCmch6\n",
+       "EsxG1pRgNrKmBCObNm1i48aNlJeXz+h1RHgKgiAIgiAIgiDMKQ1ADbAeGJiD45nxVGDxeAqCIAiC\n",
+       "IAiCIMwgyT2eNcCx0M/1wO40r5Dp8fCjH/2I9vZ28XgKgiAIgiBoNDc3MDBwBputgNraRpxO91wP\n",
+       "SRAEIQMKQv9XA9vm4PiZj3hKqq2Qc4gvQTAbWVOCmch6mh0GBs7Q1XWM9vYDtLQ0zPVwZhRZU4LZ\n",
+       "yJrKRhpRI5WHgOl8kZbp8WrUciaRiKcgCIIgCDmHzaZ+u+/xVLN69fS+3RcEQcge3EwnPdaM4ycm\n",
+       "JggEAoyPjzMxMYHP58Nms2G1WjMYTzTi8RQEQRAEIefw+QZoaWlg9eptkmYrCELWk819PP/u7/6O\n",
+       "f/iHf4ja9rd/+7dR+2bi8RThKQiCIAiCIAiCMINks/BMh0yEp3g8hZxDfAmC2ciaEswkcj01Nzew\n",
+       "b18N+/evx+ebXon7TMmGMQjTR96jBLORNSXMBSI8BUEQBGEGyYYiONkwBkEQBOHyRlJtBUEQBGEG\n",
+       "2b9/Pe3tB/B4qrnzzkNz4kfMhjEIgiBczkiqrQhPQRAEQZhRsqEITjaMQRAE4XJGhKek2go5iPgS\n",
+       "BLORNSWYSeR6cjrdrFmze04FnxljEJ/o3CHvUYLZyJoS5gLp4ykIgiAIOUZzcwMDA2ew2QqorW2c\n",
+       "FVGr+UQBWloaWLMmk35zU8zFvQiCIAizj6TaCoIgCEKOsW9fjS4Cq6rqTROBiUjHJ5qOmJyLexEE\n",
+       "QZhtJNVWUm0FQRAEIeew2QoA8HiqWb16W9hzM5USW1vbSFVVfUrFidKpopvoXgRBEIT5gwhPIecQ\n",
+       "X4JgNrKmBDOZjfWUSATOVOuUdHyi6YjJdATt5Yq8RwlmI2tKmAtEeAqCIAhCDmCMZAJxRWA2RBDz\n",
+       "8ytwOj0pCclsKL4kCIJwOeP3+/nOd77DVVddRXFxMStXruTXv/616dcRj6cgCIJw2ZMLBW5S9UJm\n",
+       "Q+sU8W0KgiCEk80ez5GRER577DG+/e1vc+WVV/Liiy+yZcsW3n77bbxeb9i+mXg8paqtIAiCcNkz\n",
+       "UxVbzSTVSKYWQYyFWQI72XmyIeqqkQtfKgiCIMwlBQUFPProo/rjO++8k6VLl/LGG29ECc9MkFRb\n",
+       "IecQX4JgNrKmBDOF0kytJzO8kGb5P5OdJ5t8mzPleZ1N5D1KMBtZU9lHAw3UUMN61jNA+oXhMj3e\n",
+       "SHd3N2fOnOG6667L6DyRSMRTEARBuOyprW2c8/TUZLS2PsLISA9HjmxNO3KnRf36+98BUhPYiSKF\n",
+       "yYR6oqhrJuza9VlGRrqwWOzcffdruFzJv4nPpuirIAhCPM5whmOomTcNNLCb9N5DMz1eIxAI8MAD\n",
+       "D/Ctb32La665ZlrniId4PAVBEAQhy4gl+tLxTUYef/BgnX5sYeFiNm9+O6lwTXS9ufKRbt/uJhC4\n",
+       "CKj38cADnyQ9Jhs8r4IgCMk8nutZzwEOUE01hziEm/TerzI9HmBycpKtW7cyPDzMc889h9VqTfk+\n",
+       "xOMpCIIgCFlMvKhiLM9prMhdqscbj001/TVRpHCmIprJsFjsAFitBXz96y/F3U+bl6GhsxQWenE4\n",
+       "imdriIIgCNOikUYaaGAb26YlGjM9PhgM8p3vfIfz58+zf//+mKIzU8TjKeQc4ksQzEbWlADh7Up8\n",
+       "vun7Y9JZT/H8h4ODZ0M/WRkd7cHnG4jpm4x3fKRonI7nMpt8mhp33/0ahYWLuffeUwnTbLV5uXSp\n",
+       "nZ6e4znt7wR5jxLMR9ZU9uHGzW52T0s0mnH8Qw89xB/+8Af27duH0+mc1jmSIcJTEARBuGxIJC7n\n",
+       "ogiNJjDt9mJuuukxffvkpD/00wTnzh2jpaUhZr/LeFHJSNE4nV6Z2dhf0+Xy8sADnyT1dmrzYreX\n",
+       "AOLvFARBSERbWxvbtm3jrbfeorKyEpfLhcvl4sknnzT1OuLxFARBEC4bEvkW9+9fT3v7gbTSUdNF\n",
+       "SwEdHDxLMBhgdLQXmIgaz44dZfj9/YAqnrZu/SjmeDL1L2baaiRbW5Vo83LTTY/R2vqw+DsFQZhz\n",
+       "srmPZzpk4vEU4SkIgiBcNiQSl7NRhMYofI1YLA48nhtwOIqprW3k0KF6OjsPY7eXsGTJ1xgZOYfN\n",
+       "VsDg4HuMjp5PqaprKqIwlhBvbm6gre15JiZ8eDzXs3btnrjzkU7BI0EQhMsZEZ6SaivkIOJLEMxG\n",
+       "1tTlQyLfolmppYnW01QK6FSxG4fDTVnZyjAv4tq1e6iqqmfr1o84d65JTwEeHPyIQOAiPl8vu3Zd\n",
+       "E9eP2tzcwNmzu5OmDsdK1R0YOMPoaBd+fz+dnYdpaWngf/0vB9u2KWzbZuHcuZcSHp8uZnlr5zPy\n",
+       "HiWYjawpYS4Q4SkIgiBcNqQjLmdCEGnCd/Pmk3i9G/F669iy5UPy8soAVcBZrfns3r2c9vbDHDpU\n",
+       "z8TEmOEM4/pPk5P+uKJyYOCM3nYELHz88UF+9asKhobawu7twoW3cDrLcTiifaMAZWUrWL16G8Fg\n",
+       "ILQlyPPP36afIxAYJD+/krVrn5q2YJ8Lb60gCIIw+0iqrSAIgpDVzJWPcDbTSI8efZCPP96Px7OC\n",
+       "CxdOMjbWoz+nKHaD8FOxWBxMTvqx20vYvPktPeVWm6uenleYnPShKDYsljwmJoaBqd6XsVJ+tXv0\n",
+       "+QZoavo2EKSmZgdOp5tt2yyA+rd8w4YWFi681bT5mQ1vrSAIwlwjqbbSx1MQBEHIcmL1tJwNUk0j\n",
+       "zUQYa8f29Z3E7++no+MwimIP20cVnQqa8AO1yTdAIHCRZ56ppqLiBmprG8PmSj12XK+Qa7UWYLE4\n",
+       "2L7dzfj4SNg1HI5SrNZ89u2riXkfGzY08/zzt7FhwzEWLrw1rflJRm1t44x7awVBEIS5RyKeQs7R\n",
+       "1NRETU3NXA9DmEfImspuzI6IpSoUUy02ZIz8uVxL+eCDYlauXER+vprammpxH1BFnN1eyLlz0QWI\n",
+       "NCyWPK644ibOnTuGzVbE+LgazayqqsfvH6a9/YC+r93uprj40/T3/55Nm15l374vG1JwVRTFxt13\n",
+       "v87x4/+XPpaioqUUFV2ZcOyx5idbq9zmOvIeJZiNrKnZRyKe4vEUBEEQspxEBYESEc+jmaqnMFU/\n",
+       "qDHyV1CwiL6+t2hvP8DHHx9IubhPWdkKvN46Skuvpb//dNR+ZWVf4FOfWktBwSKuuqqOyclx8vMr\n",
+       "qai4PnSeIsbG+lm9+udhxwUCA1y48DqTkz5eeGGNIdJpwWp1AWpU9MUX12G1qpFWRbExPNyWdOxO\n",
+       "pxuHw83Bg3X6HItfUxAEQYiHRDwFQRCEeYkxmpifX8m9957G6XSbHkE1Rv6OHNmqn9vhcNPZeRib\n",
+       "rYgrrriZNWvUtiS7dn2WkZEuLBY7d911hBMnfqJHDR9/fCGjo11h53c4SrHZ8pmcHGdyMkAgMGzw\n",
+       "fNqwWCx6Oq3XW0db296oMRojo/HwejfS1fUyPt/5sGtv2fJBVERzaOgsExMBxsbC+5BqEdd05lai\n",
+       "pIIgXA5IxFOEpyAIgjBP0QSmhrF4jlmeQqNoys+v4OLFs1y48DplZV/Ebndx4cIJfL4LAOTlVeJw\n",
+       "uBgcfE8/vrBwMUuWfE0/R0/Pb/H7p19B1+vdSFvb88Ckvq2gYBFu97V0dh4O29fpLMfvv0gwOE5Z\n",
+       "2Qo2bDiqC2dQ27zcc8+bnDjxU318vb2vhxU+0tAEKpD23EovUEEQLgdEeIrwFHIQ8SUIZiNran7i\n",
+       "8w2we/dyRke7oiJwZkXZjKLJ6fTg8/Xy7ruwbJn6fH5+ZVgEU1FsBINTLVEWLryNiYlxenqOT/Mu\n",
+       "w1Er4E6iRSGt1gIqK2/hy1/+Jbt3f5aJiTHsdheVlas5f/41XUTabIWhCrg2PJ5V2GyF9Pf/ntHR\n",
+       "84yPXzKMObzIkXbNpUvvYWTk3LTmczaq2uZ6VFXeowSzkTU1+2S78PzGN77BkSNHuHTpEh6Ph+98\n",
+       "5zv89V//ddR+4vEUBEEQhAicTjf33ns6pj/ULC+i0d/p8awIe87hKGXhwhq9Sm1Z2QocjpKwfc6d\n",
+       "O0ZPz8sJrpDen2k1BVcVnYpiZ2JihI6Ow7S2Poz2OSEQGAJgcnKqRcv4+BiBwEV8vgt0dBzmllv+\n",
+       "ldHR8wQCF8OEcqTotNuLuf/+9xgZOTft+dQ8vKWl14b5Rc1EvKeCIAiJ+au/+is+/PBDBgcHOXDg\n",
+       "AP/+7//Or3/9a1OvIcJTyDnkGzrBbGRNzV/iFQjKtBWIVrgoGAxQVOTFanUCasXZZcvUyOaGDU2M\n",
+       "jJzT/ZiXLn1CWdnnKChYRH5+ZehMViLF3BQKxpRZu70ktC05ijLVLc1ud3HTTY8xOTmmb+vtfQOP\n",
+       "Ry1MVFa2ImJ+guzbdysWiyqYrdYCnE4PAOXlK1myZD1LlqynqMhLWdnnaWl5SN83cj7jFXgyor1G\n",
+       "Q0PJCxpNF7Nav8wV8h4lmI2sKSGS6667jry8PP2xzWZjwYIFpl5DhKcgCIJw2THdSrkaWgSto+Mw\n",
+       "gcAluruP09FxGLu9EFArxf6f/1NHX99J/Rif7wLnzh2jouIGios/Hdo6keAqU4JUUewsWfI1Uv2z\n",
+       "HQyO64I3EBjihRduD3ve41nF2rV7qKqqZ8OGo9x99+toolZRrIyP+5icDKAoDurqfsv9979HUdFS\n",
+       "rNYC+vpO4vNdxO8fpLv7OO3tB7DbizKOLM+kOMz09RYEQZhpGoAaYD0wnZyPTI8H+N73vkdhYSHX\n",
+       "XXcdf/M3f8OqVaumeabYiMdTyDnElyCYjawpIV2MvkSn001Hx2G9ku3Ro4f50peqsVqddHdHezeX\n",
+       "LFnPhQsnGRlpT+laDoebxYu/yocfPm2oZps6dnsJZWWf08eiKHYqKm7k/PlXCAaDKIqVu+9+DYej\n",
+       "hH37bqWg4FOcP9+qH+90VlBRUY3fPxjTi5rIm5mOfzPdok/NzQ20tT3PxIQPj+d61q7dM29FpbxH\n",
+       "CWYja2r2SebxrAG0Ds71QLpl1jI9XiMYDHLs2DE2b97M/v37ufHGG8Oez8TjaUv0pCAIgiBc7kRW\n",
+       "rh0aasNqteP1bqSmZgcwVckV4N1367jzzr0cObI15vk6OtQWK6mgKDbKy79IZ+dvpiU6QSEYnKSn\n",
+       "51VA9Z0WF18TJiCDwUmefnolDz54gQce+IT9+9eHHe/znae9/QB5eZVEUlCwKGHRptraRp55ZhVW\n",
+       "q5MjR7YmLOyjpdymysDAGb1wU2fnYVpaGnA43DldREgQhMuXgtD/1cB0cj4yPV5DURRqamqor6/n\n",
+       "ySefjBKemSART0EQBEEwoImnwcGzuFxe+vtP4ff3A1rVWFUAxmr9YRReq1f/nGefvZmxsa6oa4Rj\n",
+       "BSawWBx6P04zyMtbwPj4GOPjg/p1Fiy4md7e15ic9EWPwppPefkqbLZ8rFYH3d2t+P1qKxiHo5R7\n",
+       "7jnBM8/coPf5dDjcbNnyYZi4i9UaxbitqGgpRUVXRgnDVKrORu5jbP2itYM5eLBOWrMIgpCVJIt4\n",
+       "DqCmy24DpvOVWabHR/Ld736XyspKfvKTn4Rtl6q2giAIgmASmi9xZKSd7u7juugEdNEZz4do9DS2\n",
+       "tj7Mffed1gvzxGPDhiZcrqVYrXkJ90uXsbEeg+i0UF6+gp6e4zFFJ8DExCg9Pcfp7DxMd/crKIr6\n",
+       "+cFicVBScg0tLQ9RXv5FQBOib6ZUtMm4raBgUUzPZype0Mh9amsb8Xrr8Ho3smHDUZxOd84XERLm\n",
+       "K2a474T5jhs1PXa6ojGT48+fP8/OnTu5dOkSExMTHDx4kD179rBx48ZpjiY2IjyFnKOpqWmuhyDM\n",
+       "M2RNXd5EVl7VxIvdXhy1b1nZCrzeurh+RZutgHffnRI+TqebioobEl7/nXf+jcLCKwkEBhPulxmT\n",
+       "9PefSnlvv78Pn68Xi8VJeflKzp9v1YsIuVxLKS29lpaWh6Iq1cYq4mPc5nCocxopDCMFo/aaPPHE\n",
+       "EvbuvZX9+9dHVc51Ot2sW/cs69btjXmt+ZRmK+9Ruc4ZVPfdAVQROvfImhKMKIrCL37xCxYvXkx5\n",
+       "eTk/+tGPePzxx7nhhsR/v9JFPJ6CIAhCzpFKamaqaJE0QI+ktbQ0cNNNj7F793ImJkax211UVq7m\n",
+       "K195Iupakem1p08/SGmpl4MH6/Rte/fezOhoFzZbEePjw/qximJncPBjhobOTnv8qRLej1NDTfMF\n",
+       "sNkKGR+/hN3uMvT69DE01AaA0+lhdPQ8Pt8AQ0MfAvDMM6soLAxPnXU43Pq9a55YTVhqcxtZQChy\n",
+       "u/E1uXRJLcLk9dZRVLQUiyW+XzRdn6ggzA5mue8EYWbweDyz8mVERh5PRVGWAP8JLECt+74tGAz+\n",
+       "W8Q+4vEUBEEQTCWWl3C6JKq8unfvrXohHqfTQ0XFDdTWNtLa+oguNgOBQb1irOZh7Os7qafoVlXV\n",
+       "Y7Xm8/HHBwgEBuOmuloszrjPzSRWax6LF3+VW275V1pbH+ammx4LE8oVFdczOPghPt+AIXUXbLYi\n",
+       "JiZ8evqx11vHunXPhr02TmeF7gnNy6vkvvtOh81dvC8NtNfEbi8hELiovzbi4RRyE7Pdd0Iukszj\n",
+       "mSvMZVXbAPDfgsHgm4qiFAGvK4pyKBgMns7wvIIgCIIQFzO9fPn5FTidnpgCSEsNtdmK8Pl6dX/h\n",
+       "yEiPLoDy8yv1sVgsTn07qGJ1eLiTgYFTYV7RWMy86JyKbhqpqLiJmprtYdHCe+89zeOPVzI+Psy5\n",
+       "c8dwOsvDRCcQFrkF9MJIxtfG4XDT2XkYgLGxrqi5i1eJ1hh1bm19GKs1n927lzM21guoKc/i4RRy\n",
+       "B819l000AM8DPuB6YA8iioWZJiOPZzAY7AoGg2+Gfh4GTgOLzBiYIMRDfAmC2ciayj3M9PINDbXh\n",
+       "8/XS0XE4qrCNdp0rrrgZmBK6mrjS2qIoioOLF9/j4sU/8O67YLUWkpe3AL9/iJ6e44aquOr3vQ6H\n",
+       "O2nRIQ2bzWV4pKAKyPQxFt8x0tV1TL9vzVt55MhWrNb8qasqqV/T+NqsXbtHb8MSOXfaY2PRoJ07\n",
+       "r9HbuaxZsxuXy8uaNbsZGmpjdLRLTxd2ua6aVx7OZMh7lGA2TU2vAl1AP3CYbPGeCvMb0zyeiqJc\n",
+       "BawEWhPvKQiCIAiZYaaXL1H0VLuOzzcQ5kHMz69AUWyMjw/rkb9AQI34KYqViYlLTExcirpWMDiO\n",
+       "zVaA232d7pNMRFnZ9fT1nTCegVhRy1Tw+S7E3K4oNnp732T7dneowJGaQuVweEJjWMG6dXtpbX2Y\n",
+       "jo7f4PNdwGrNZ9Gi22lvP6Sn2p4//zt8voGo1+a++06HzV2kn9Mo4rWeoTt3XkNFRbUeATV6YMvK\n",
+       "vkBNzfZpzYEgCBpOw88rEe+pMBuY0sczlGbbBPwkGAzujXgu+OCDD3LVVVcB4Ha7WbFiBTU1NcDU\n",
+       "t3jyWB7LY3ksj+XxXDz+oz9aQUtLA5OTf4zDURT1vMXSyMDAGVpb3yE/v5JVq5YQCAzS3Kz6Opct\n",
+       "A4B331X//+IXFzA21kNX1zWMjHRSVTWsP2+x5HH11T4gqO8febz2+IMPigkEBuM+b9bjlSsXMTLS\n",
+       "zbvvToQ9399/EzZbHn/2Z2rV2KamJkZGuujvf5ivf/0lXn/9Q158cS1XXz2un2/Rotv4i79omtb8\n",
+       "/+53Z+ntfYNlyyzAZNj59u69lZYWdb7vuGMj69btzZr1I4/ny+O7gHZqahYBjTQ1vZll44v1+B+p\n",
+       "qRkGCmhq+h4Q/f4V//ELwP+gpqYC2J4j95vbj2+//fZ54/E8evQob775JgMDamXzjz76iF/96ldJ\n",
+       "PZ4ZC09FUezAC8CBYDD4rzGel+JCgiAIQk7S3NzA2bO7CQQuhm3Pz69kdLQLh6OU0tJr9eJCpaWf\n",
+       "Z3z8UiiaacHjqaav74Tuf0wVq1Ut6mP0i84csb2fDkcpixevY2TkXNxCQGqU9GJofzdbtnw47RRY\n",
+       "n2+AnTuv0YsRORylbNnyAU6nO2EBqHQwsxqyMF9oQG13chI17RSgnuzzZMaiBrVNC+TOmC9fpLhQ\n",
+       "hh5PRe0u/b+BU7FEpyDMBNq3SIJgFrKmhHgMDJzRhZXmz/R4qqmre4Wqqnq2bPmAr371BbzeOgoL\n",
+       "r2R0tIfXXvsALSW2t7cVu10VN1N9QRP+XQZgYmKYrq7mNEdr0ceYHrFTd/3+fj74YLfuv2xq+nbU\n",
+       "PgsWqD3eHA4399zzZlwhF9krNRZq2q2W/qdQXFzFkSNb8fkGTPP0Gv2kkX7ebEbeo2YSrcemJjpz\n",
+       "qeXJ9Nu0TG9NNaCK3fWolXoFIT0yEp7Al4BvALcrinIi9O+rJoxLEARBEOYEo0gaHHwPUEXnXXcd\n",
+       "1cWPVvTG6XTjdLpZt+5ZJif9jI11R51vwYJqqqrq2bz5JFVV9TgcpWHPxxeL6XwzbmHLlg/Iz1+Q\n",
+       "xjGxiBTFxsfh42lubiAQGAWsTE5O8Mwz1XrPz0hSFXyFhV79Wr29r+v7a77RTCOUZlZDFuYLmnhb\n",
+       "AdQBh8id6q6NqJHO2RqzJtIPIMWIhOlgiscz4QUk1VYQBEGYYcxMoQzvQ1luKMqjsHDhl7njjr0x\n",
+       "z//LXzqjUmq1VNzh4TYKC704HMV0d78clbprJP1+nqrodLm8YX1H06WsbAWjo12MjnZFPWe3u9i8\n",
+       "+W1OnPgpbW3P4/P1MTk5QWS0tLBwMQ888EnU8ammyk7171T9rZmm1kYSWSRKEOa+x6aW6luAKiTN\n",
+       "HoOZ51+PKjqryS2Bnh1Iqm3mEU9BEARBmBOMkcm+vlOmpVAao2Iez0rDM0HOnTsW9/zGViWKYiMv\n",
+       "bwElJdfQ3X2cS5fa6ek5Tnv7AaxWZ8zjVSw4naWk2jJFUWxcdVUdR48+yP7967HZ8pMfFPGn32Yr\n",
+       "ZMmS9ZSXf0Hvk2l8DiAQGKK19WEGBs4wOtoVEtjhotNqLeDrX38p5hXVXqkVOByJP6hqKbVadDhS\n",
+       "dKaSspsIsyKnwnxC67E5V2vCGEW8mvTTWJOlv5oZpZztCKswF7z33nvk5eXxzW9+0/Rzi/AUcg7x\n",
+       "ughmI2u1e9m8AAAgAElEQVQqNzGmbw4Oqu02pptCaRQ0q1f/XBc9a9bsQVEc+n6lpZ9n9eptMQVQ\n",
+       "RUU1AO+/n0cwOM7YWA+9vW8AasQQ1JYhbvdyLJZ44nMyFHFM1jJFFbb33/8+Y2MX9Hm4cOFE2Hhj\n",
+       "oSjhf/rHxy/R0XGE999v1Ptkqvs59HtSW530Y7Xaw46120vYsKGFwsLF3HvvKVwuL7FQe6Wep7Mz\n",
+       "vFdq5DxqwtCYymwkVz2amZJb71HiA0wP7QurIqCX9AWiUViuInruY/tAp7em5lqkC7PB97//fW68\n",
+       "8UbUUj7mYlofT0EQBEGYTYyRybVrn6K19eGUUygjU3M1QQPQ2vpwWB/KpUvv5sMPn8HhKOarX30e\n",
+       "p9Mdtn9LSwMOh5tAYJS8vEqCwT79WK3HpcXixOFw4PerIlEtCK+hkJ6fE0AVti+//H9H9MGM3avT\n",
+       "iCoup65ptRYwMTESYz8/Docbp9ODz9dLR8dhFMXOpz61FovFjsVip6ZmB06nmwce+CRhunM8b2Xk\n",
+       "PCbrzTpdj6ZUs51NNCEEqoCajUqrM52uaiaRY20MbesHDqMKxHxUAXkW8ALFxL8vTVh6gAuA1h94\n",
+       "OXDacP65SiUWcomdO3dSWlrKtddey/vvv2/6+SXiKeQcWk8kQTCLy3VNZZq2ONcYK52eOPFTRkZ6\n",
+       "9Cqoye4tMnKWSNCMjJwjGPTj8/XS2vowEC2ABgbO0NNznLGxLq65ZjLqej5fLxbLlNjUBGnoERZL\n",
+       "4ihlOMY/3Qr5+RVYLE7Gx0dTOtpmK+aee97EYsnH4SiLm57rcJRSU7ODioobwsbd3/8OX/vai+Tn\n",
+       "L+DgwTp9jo1zunPnNWFzH68qbbpCcrrVbXM9Uppb71HTr7Q6fXKp6M3zTI3128AjQE/oOa24UVto\n",
+       "n3bgOInvS0t/tQCDhu1doWNiRymj15REqueahgaoqYH162FgGi9BpscPDg7y6KOP8i//8i8z5kUV\n",
+       "4SkIwmVBQ3MzNfv2sX7/fgZ86RRumb9k84fxVNtvaOmYkfeiPf7v7eXcuPP/jXrdjYLHas3H7x/E\n",
+       "as1DUay6eI21ryaOIgWQcR+HowQARbGiJRaVl69k06ZXyM+vDJ3VmMJkxWo1ir/o9Can04PDUUpe\n",
+       "3gKuuOKPQtvK6e5+mfffbwwVI0qWnhu6mtVJUdGVLFhwI35/X8woqcNRyj33nMDpdFNb2xj2XHn5\n",
+       "CiC+eFcjr+fD1lWkt1J7fScnA3i9dSkLyel6NKWa7WwyFz7AuRC706EBVRBq+JkSzYcBO+qcafej\n",
+       "fVlVAjwWca7PAg6gAlW49kc8n+5c5JJ4n5+cOQPHjsGBA6qInO3jf/SjH/Hd736XRYsWzUiaLYjw\n",
+       "FHKQ3PK6CNnCmYEBjnV1caC9nYaWlrDnLtc1lc0fxtMVxUbRMzY25UXss32ak75S/XXXBE8wGMDr\n",
+       "3ciddx5iaKiNnp7jTEyMcf58a9Q1a2sbcbmWYrU6dVEaKYCMQrSi4j8oLFxMeXk1oHomh4ba2Lfv\n",
+       "1lC7kMjU2gm9yq0qQKMLC/l8vfj9/YyN9dDd/dvQtgHGxnrCfJnxcDrLDec6T1PTt8KKIRlRFDsV\n",
+       "FdfrAtrpdLNw4W2A6nHNy/Owb18N/f3vAFPrR5uDBQtu1rdbrfkxv0DQXt/OzsNYrfYZT301qw/o\n",
+       "XJFb71Fz4QPMlaI3ZyIe24ktmrX7WRV6fBG4nfCIZBcQQH2POUb4e8oiEs9FA01NK0jFCyrMHgWh\n",
+       "l6C6GrZN4yXI5Pg333yTI0eO8MMf/hBAIp6CIAiZUGBTI0/VHg/bVq+e49FkB9n8YTxSFCeLgKq+\n",
+       "vQrGx4fp7DyMzVZIVVU9i69QP7hpr7smeDo6DmO1OsKilXZ7cdg1NZxON+Pjo3R3q1Vpn3zy01Hj\n",
+       "MArRgoJKHnjgE/LyykL3UoTf38elS+309rYS6ee0Wl36ddUiRPGFpM1WBGipvKlFODdsaGFiIhCx\n",
+       "VdFf/8g+osFggI6O8CJAd9yxl6qqejyelXz00XN0dR3D5+ulsHCxvn60OVi7do++roaG2vQvEHbv\n",
+       "Xq7P2Wx/6SHVbLMZM1I8s7nojfH+jN7uzwE7mBKZF4Ey1C+mqlAjnGWhfatRxaQxImk81xeAL4V+\n",
+       "XgGsQU3bjTWnDaHrvhU617LQPrki3ucvjY1QXw+HDoF7Gi9BJscfO3aMjz76iCuvvJKFCxfyT//0\n",
+       "Tzz99NNUV1enP5AESB9PQRBynobmZs4MDFBgs9FYW4vbGV0xdMDno6GlhW2rV8d8XsguIvstGntr\n",
+       "VlXVxyxCE6tXZOTrHmsf7Vo33fRYVIEirShNd/fxqMhiVVU9Doc7btEa7bw+X79emEf1dlpRRaON\n",
+       "goIrmJjw4/cPUFl5C11dL0f4P9V9S0quxe2+hkBAFdbxsNmKGB8fDtvmci1laOjDsG2LFq1h7do9\n",
+       "UXOrEa9/ZuS+DkcZ99zzBi6XV5+roaGzFBZ6GR5uY3x8GL9/6oOv9tql0k9TCgLlItMp8lPDVDGi\n",
+       "emanGNFsUsPU/S1AFZG/B5agFg2qQPV0vkT4l0mLgHdQo56LgNcAX+iYk6F9bkEVmk+EHmtFhOoM\n",
+       "16xELTKkvRbG8WjMx3nPPrK5j+fo6ChDQ0OAGu38x3/8Rz766CN+8YtfUF5eHrZvJn08RXgKgpDz\n",
+       "1Ozbx7Eu1TdTX1XF7jVr5nhEgtnEEoyRpCJmUtnHSKTQsttdBAJD+jgOHqxLKIibmxvo6zvF4OBZ\n",
+       "Cgs/xYULrwNgsThYuPDLnDt3nMnJ5EWBvN6NrFu3F59vgJ07r8bn68VudxMIDGH8sLpkyXo++WR/\n",
+       "xNFWYkVHi4qupKhoKRaLnXPnjhEMBrBa81m06Ha+8pUnosS3zVbA5GQgSvhaLE6++c2usLmIRaLX\n",
+       "LhapfNkQOb54AtW4T35+BUNDbSJoZ4Qa0heR61Ejb9XkVrQtmcjWnn8FVTBqeFCLAPlDj50Rz2tY\n",
+       "UFusjBCdBVEJ3IEqWGNdX5tTDeNrEfncCuBojPELZpPNwjOSv//7v+fs2bP853/+Z9RzmQhPSbUV\n",
+       "co7c8roIs0GmabSyprKfVNKCW1sfCatsG4t0Uy61lNCyshV4vXVs3vx23KJCWsqocT0Zq90ODJwK\n",
+       "bbUyOemno+NwSqLTYrHT3/8O27e72bnzaiorvxxKK76EUVDa7S5uvfVnRBcnMorOqeeGhz/WfZYO\n",
+       "RwlWawF2exHd3b/l8OF6fQ6Nflu7vYiqqvowz+jkpI9du5brVXvt9pLQ/2rqcnn5St1Pm2zejSnV\n",
+       "2vmSpeOm4gc27vPxxweytqhWPHLnPWo6PsFcTfE0FuOpRE2LXctUaqtWvdYoKq2ovTr9hm2xfz/V\n",
+       "lPpBYqfedwFPEr8YUGNoTBD9WjQCG2lqugnYiIhOIRaPPvpoTNGZKSI8BUHIeRpra6mvquLQnXdK\n",
+       "Gu08JRXBmG5Boli+0chtmuAtL/8CPl8/LS0PhUVLkwliTZg6nR6Dl3IixjYj4cWFNm16jdHR8wQC\n",
+       "F/H5emlrexaf73xESi4EAkO0tj4c8oFGoyhqlDUWPl8vExOjjI2dx+/vD/N4GsV1Tc121qzZzd13\n",
+       "v47FMvW7NjbWpYvSzZvfCv1/kqqqeu666zesW7c3JbEfKXIjizrFIhW/qHGf8vIvJt1fmC7TEZHZ\n",
+       "7M+MJJZf04YqLrU+nNp7T6woZnSrpaltVuBOwr2bidB+/2NVvHUDrtDYPkT1jxqf2wv8D9TU30Re\n",
+       "0Jo4zwnC9JBUW0EQBGFekEo6LkylXfb1ncTvV1sQaKmc8dI7jdudzgoqKqqTpmka02xdLi/nz7cC\n",
+       "4HCoVWLHxnrp7j6u719evpKioisZH79ER8dhLBY7mza9Rnn5F/jVryrw+XrDzq+l/Uam/2qpuEYs\n",
+       "lnzuu+80DkcJO3deg893Puz5SG+oxeLA47kBh6OY1at/zvPP305BwSIcjmL9vn2+AXbtWs7YWJd+\n",
+       "7dbWRzLyZUa+hslSmSH9FGsgrXRr4XLls6iRxTHUdNQy4HWmem6WoqbJjjDVP9OKWn12D6oAj+/H\n",
+       "VlkGdAJDMZ6zMyUujdiIjoIuCe3rA64PXf8qpgTnYuCTGOeqIX5qdKLnhOmQS6m2iRCPpyAIOUMq\n",
+       "hYCE7CJXiryk6t+M9G0aRdMHHzyF399PeflKyso+r3sBNW+jUaAl8h1GXic/v5LR0S69P6bL5dVF\n",
+       "liY4a2p2hBU7Mt7H0FAbu3YtY3LSp+9/yy3/kxdeuJ28vAUMDbWxadMruFxehobaeO65W1AUK4HA\n",
+       "CH5/H1dccQvFxZ9maKiN/v538Pl6sVjs3Hnnb3jnnX9jbKxf927a7SWUlHw2VIFXjcwGgxNRIj3W\n",
+       "nCfzZSZbS5HnS/XLhFjkyroVspEG4P8jtcrRsTzUTtRIZjD0L955FFRBG91LN1pg5qOmMmuCNNYx\n",
+       "GvWoKbS9ofFVAx2AF7U4keYJTeSvzVXvbfYiwlNSbYUcJHe8LkIsEvXTnCuycU01NDdTs28f6/fv\n",
+       "Z8AXK2Vr9kg3hXWuSNW/Genb1CvgDpzRxVVR0ZVhrUD6+k7i9W7Ue1Rq/UJjpX9q68mY3llX9wpV\n",
+       "VfVs2fIBLpcXmErTjUxFjXUfJ078FIejBEWx43AU43CUcPTog/h8A5w/38rYWBetrQ8D4HJ5+cY3\n",
+       "OnC5qvD7LwBBuruP09b2ot4GxWJxct9977Fw4a16CxSvdyNebx1bt36kt4IBi95DVLuXyFYzxrEm\n",
+       "S3uNtZaM6c1A3P6o6QrHXFm3qZCN71HzD2Nq6SkSi04tHd5D7PRZH1M9NhOdJxjneIiOavpRxWYX\n",
+       "iUXnClRP52uokc5qoBVoB46jeULVNZUoNTpXvbdCNiPCUxCEWUX6aaZGNgn02e65ONNoYmbDhqOs\n",
+       "W/dsTNFUU7NDfwwwNtaD1eoItSCZ6heaSNDU1jbici3l0qVPePrplfh8/WHPp1PoaGDgDGNjPQSD\n",
+       "Ac6dO8b77z9JV9exuIJQTfM9GXYOi2XKOzo56dOFqjaWdev2sm7ds7S2PkIgMIii2NE+FCuKjSVL\n",
+       "1idNYfb7B8nLq2Tt2qcSel6N400kEDPpvznf1q0w0xiLBZ1Nsm9F6N9FIvvypk9/jG2xgkbJoq/F\n",
+       "qKL5C6i+zYeAt5nqBVoS+t9YbCiRvzaXvLdCriCptoIgzCrSTzM11u/fz4H2dqo9njkvmpRuC5Jc\n",
+       "ITIVE6a8f62tj9DXd4re3t8xOekPS/VMJ/0zMq03WXpuvPRQ7ZqRlJWtwO/vZ3x8lMnJABUV11NQ\n",
+       "sIiPPnqOQGCqoEh5+UruuONZdu1azuTkKIpix+NZhdNZFtVeJF5bFK2lS7wxx/LMRhJrLaXrzU01\n",
+       "dXa+rlshVRK1O9GeO8tU+mkA1ZPpCe0T7pMOJ57/ci7ZiFo0qIZwb+Y21Pt9DHg49Fh+H+YCSbUV\n",
+       "4SkIgpCViECfeeL5EZubGzh7drcu3AoLF7N589u6eElH0BgFY3n5Su666zcpC9W8vEruu++07vts\n",
+       "avo23d0vMzbWE+YLjRSKTqdHLy5kt5ewaNHt1NRsp7X1Ec6e3UUgMBh2TaezQi825HRWAMGo4kQA\n",
+       "Xm8d69Y9m3DMkH6/zul4c5MJeCPi9cxmkvXCzIQawgWY23CtQdS0UyN1qIKyM8ZzELuoT7bgQo1u\n",
+       "/hR4CjWKWgTcjFpoSNZ8NiDCU1JthRxEvC5CLDLxRJq5pszyZrqdTnavWSOic4YwpqKWla0IS8Uc\n",
+       "GDiji06HozRMdELy9E9tPWmpp07nApYsWZ9UdAIR6b1d7Nz5Gd37uG7ds9x337u4XEux2QqYmPBH\n",
+       "HVNevhKPZ4Vh7G/p6cTqfamiU2vjYrMVhQlRn++87gEFtXKudt6amu0Jx2zs19na+khUq5p4pOvN\n",
+       "TTd1dj54Pefv3z1jeqtZr43m1Xwn9FhLLTVe69XQcy7DPsWoFWtfi3FOK9kpOrXWK0Oo0cwzTKXu\n",
+       "DhPe3iWc+bumhGxGhKcgCPOCbPFEZss4hMQYCwmNjHSGPacJHK0C7XQjZAMDZ+jpOY7P14PdXpjS\n",
+       "eWprG0PeShWf7wLt7QfYufMaXYAWFl5Jd/dxfXswGGDJkvV4vXXcdddvWLNmj17IaP/+dWzf7uZX\n",
+       "v6pACX0P7XCUcvfdr+N0ehgfH2ZyMvwLEoejlPvuezfUi/NtvQBSvPFrntmyss/j8w1w5MhW+vtP\n",
+       "ZdxTNd510i00NJ+8nqnMU26hfWli9B1miiYwe1GL62jFcbRrFTGVJrsaNRp6LfBc6LhYXximUt12\n",
+       "JlmAGnE14gZuC/2szZ92j8UR2wUhO5BUW0EQ5gXZ4onMlnEIiYn0TBYVLaWo6EpstgJWr/45ra0P\n",
+       "Z+wNnG4rkBdfXEtHx2G9P2dkCxe/f5j29gMptXbZvt2tR28VxU5eXjl1da+EtXMxoig27r//fb3y\n",
+       "bjoYU2Gt1nwmJkax24vZvPlk0vNNN402FeaT13Mm52luGECNyJnpO4zXBkS7Vj9qJND4fA1TabnZ\n",
+       "SDmq8OwOPbYCbwBXEj5/2j2KnzMbyfZU25qaGlpbW7GFikAuXryY06dPR+0nHk9BEC57ssUTmS3j\n",
+       "EBLj8w2wa9dyxsa68HiqsVic9PSovi6zPtD7fAM888wqCgoWYbcXR/kLY3kPm5sb6O8/RW/vG5SU\n",
+       "XMvISAelpcs4d+6YLmBBLYKk9d50Oj1YLFYmJvxUVFzPmjV7aG19hIGBM3R3v0wwGECtkhkMuz/j\n",
+       "HIDqB928+a24IjFyvNo1tMdHjmzVhbaiWDl/vjXl+cykX+flhMyTRiJvaDIxG/l8A1O+yGxm6ndY\n",
+       "ZSmq8JwJf6wwE2S78Lz99tv55je/yZ/8yZ8k3E88nsJlhfgShFhk4onMZE1FejrFm5kbOJ1u7rvv\n",
+       "tJ666XCoqWlmpGNq68npdIelxUamnMbyHqpi8TgTE6P09b3O2FgXfX2/Jz+/kuLiz3DwYB1Hjmxl\n",
+       "9eptrF2rptS63csYHe3G7++no0Nt8aKdOxgMYLXmsXDhl6PuT5sDr3cjRUVeyso+R0vLQ3FTOCPH\n",
+       "G/k4PBW2LK35zKRfpxGzUlGzqY8uTK0ps+YpOzH20Uz22kV6Q43HQuI2IMY2IQ2hn7NddFoJF502\n",
+       "1F6eifyxiedTPksJsZhpYRyZMC4IgnDZ8Y9vvcXfDQ5SYLPRWFsbJRobmps5MzAQ8/nn29roGh0F\n",
+       "4NtNTTy7bt2sjj1byYVKolpRG1A/0E8nHVO7z8HBs7hcXuz2Ymy27+nPJ/IXDg6qvQLt9mJuuumx\n",
+       "iN6bFmASq7UQn09tFt/RcUSvPtvS0oDD4ebcuRbGxqYq0JaXr2T16m0cObJVv64xShp5f1r/TmMK\n",
+       "Z0tLQ8wIZeS97Nnz+bDxZzKfxmMj5zadNaSJ4UT3kQqaVxugoaWF3WvWTOs8ZhNrnsxhJqvLpoom\n",
+       "JrXxJLrPSG9oXZJjY7VPaQSeR+3Fma0UovbfXAK0GrYXMSUmS4nt40xnPoWsINNfQxN+jf/qr/6K\n",
+       "v/zLv2TZsmX89Kc/5bbbbkt+UBpIqq0gCFlJPLGXSATGe17bdnZwEK/LRbHdHnZszb59+ofM+qoq\n",
+       "dq9ZE3aewUCA492qt6YyP5/T996rH1u2Ywf9frW66JVFRfgnJvBNTHC9x8OetWsv28inUcg4nRVU\n",
+       "VFRnrQBNF6Mg+tfea/loLIgDP9/lf1PAaFhqqeYvtFrzw3plOp1unnvuVrq7p9J7R0Z6ovpn5uUt\n",
+       "YGysB4+nGofDTWfnYTyeakpLr43q1VlQsIj6+nf09iuJhF+kqDOmyRqjacb9Ir2vkeM3QxAZr+f3\n",
+       "D6ad/qylojqdHkpKluFwRKc4p8L892pHfkI1Crd65kakxPNmQvR4tW1aumyyY3cQ3XfTgxo1zObP\n",
+       "qG7gj4C3UNu8aCxArcBbCpxAFdORaHPiAZYxJbZz/z04V0maaltDZr+GGR7/6quvct111+FwOHjy\n",
+       "ySf5sz/7M958802qqqrC9pNUW0EQ5h3xqsMat6965pmodLhYx2nb2kdGON7dHXXOgpCRvtrjYdvq\n",
+       "1VHnOTs41W6ia3Q07NjrPWqz8UKrlUG/n67RUfr9fg53dl7WVW216JjNVoTPd35GWllkklaZybHG\n",
+       "FNOPfXbeYxnv8Hn+i29ERTa1CNXQUFtUWq3dHp7eq82Z3V6ib9+06VU9tVJLrb3zzkMMDbWFic7y\n",
+       "8pW66DReN57oPHt2d4I02aljjPfa2vpw2Dkjx28GxutpEeF0zq/dR0nJMnp6Yqc4p0JjbS31VVXz\n",
+       "VHRCdKpq8uqyxt+Zo0cfnIHquo2on5YjhWOs8WrpsjeHfn4VVWjFEp27iRadCmrV27kUnZH3GOvz\n",
+       "+gDqPY8ZtpWg3m898AGxRSdMzacVtS/pAeBb0x+uMPNkWuQ5w+NvvPFGCgsLsdvt/PEf/zFf+tKX\n",
+       "2L9//zQGEh8RnkLOIb6E+UmkpyqWGIRwkbiooCBKZGrPF9ls9Pt8YefSKHU4ws75PZst6kOm8Tqv\n",
+       "1NVRmZ8fczx71q7F43RyaWKCgVDkE2BFWVnYfmbMyVwxnXFoAmDBgpuBmWllkUl/xkyONaacXll5\n",
+       "AwCryor5G+8wd955iN/+9s2Ex2jzECn2tMebN79FX1U9P7/zEPe5vFSHxJ5RTE61fHGn3CPUeO/G\n",
+       "PqVaBDOWUE2UKjwTfkPj9TZteiXt82v3kalnd7a92sm+CDH/717kJ9REok8l7AuXj/fPQG/UR1Cj\n",
+       "eFuZSiON15NTows1VfYCcDLG2OOl0mZDlDPydS6JeKwJ0ULDz6XA14AHUft0JkIT537DtilxK5+l\n",
+       "spDkv4Yze/wsIMJTEISsIDJSGS/iYNxebFf7HRrFYGNtLR6nk+HxcQ53dOjnyrdaAbApCk0bNoSd\n",
+       "s8jhiPqQabyO1+Xi9L33xhyP2+nkhooKAFaWl7OksJBypxNPSKiaNSfLd+9OKPpmUqROpzepJgCM\n",
+       "UTqz02wz6c+YybFGwbX7jjupr6riyIZN1K2Ln9IZS6RFij3tscvl5ddrdnPY6Y5bNkQ735YtH/K1\n",
+       "r704rb6Wxj6l8YSPcdw/aD0Ztsa08ba2PmJa9Cs/vwKnswKHw43DURI3apuMXCvCk8kXIdMj8hOq\n",
+       "seBObIy/Mx7PCv1n875QioxqGrdF9uTU0HreFgAvhY5bCJQBawmPFEZ+5E2YETgHXIp4HDRsv4B6\n",
+       "/xtQ5ydRUaFIrg/9vxLYnvkwhZkj+a/hjB1/8eJFDh48yNjYGOPj4zzxxBO0tLTw1a9+dZqDiY14\n",
+       "PAVByAqm46mK17ok1rlu3buX4z09wJSPM1WS+UoHfD5WPf00iwoKODUwoHs+66uqcDsc+rEV+fm0\n",
+       "DQ3FPU+8OdGIHHeYD9Xvj7q/ZONOlWz1u2XSn9Hs3o6ZFlOKPH5TSHTGcqxlSqx7T6U/ZCwvdKrH\n",
+       "psr861OZGpm1SZmdwkDGdQOxi1VlRiyfZiLvJkAbcCuq6Pwp6qduY4RT80LagMnQv2zAguq97Elx\n",
+       "/xLgI8K9uA7gBuJ7N7V1YUctRrQ9xj7CbJLN7VR6e3tZv349f/jDH7BarSxfvpwf//jH1NbWRu2b\n",
+       "icdTqtoKgjCrxBNDjbW1afW/NJ4nkljnKnY4gOhU2VTGF6/CpXHfRQUFuvAzXqfu4EH9WKfFgm9S\n",
+       "/eCTSgXcxtpalu/eTdfoaMxxG8dVmZcXdX9mVeZM97WZLTKp8JnKsemIyUyrqUYe/98cbm4bOMNy\n",
+       "WwH5tY2Q5MN9OmONde+pRIDjpb9nEj2OxMxz5RLTraqsMjvVSyPXjXlfChgFUh3hAqmR+D05teM+\n",
+       "jyrMzhAuOhXgfOjncZPGahZfBl5JY/8bUe9fS5EuBa5B9W5C7NfduC48qCnMUlxIiI3H4+HVV1+d\n",
+       "8etIqq2Qc4gvIbeJl7aZyFMVK4001nm0/bYeORIlkhIVCzGuqcjzNjQ3c7KvD4Ayh4NjnZ2U7djB\n",
+       "2hde4FR/f1QBohVlZdR5vfp1jB/W8w0iOZXvPB9pbeXTxcVU5ufzVIwKuWE+1E2b4vpUPU4nncPD\n",
+       "007DNb422eI7nQ1STX80tkEpL1/J5OQfJ9w3VlpqpOAaHThDadcxulJMvcw0VTOV1NR4v0NmprXm\n",
+       "WoqsWSQqBgXJ/u5lWpFkrtEE0mFU8Wmcg8jcQWNvylPELpCkESQ7vJyxOA7Ee/+0od5fsWGbdm9a\n",
+       "ivQHqOnEMPW6R/bt1I4pQk1VDk/Nlc9SwlwgEU9BEGaMWNHDWFGTRC1QItuZLN+1i9P33ZewEi3A\n",
+       "oscfZ1VFhd465ZHWVnpGRth65EjCtNMwsXbpEofb2/XUWUVR6BlTPUOHOzv1gkMepxOvywWKwt51\n",
+       "69SfQxijhfWHDnG4s5MVZWXsqKlJOn/GHqE/fPnlqAhpZCQyMhJrt1rZ6PXSOzqqR2Mz7Uk4nSiq\n",
+       "WSm/s02q0beBgTP4/WoD+qKiK3E4ihLuGysyGhnxSjfyl2mkMJUIsHGNpXusmeMQIkkUFcwFEgln\n",
+       "YxpxBfAcU1HNyhjHLUctOFQNvBbjWjayI/oZWWW3HNXHaWyPshZVjK9AbQcDU0Icol/3yMi39nx/\n",
+       "6Dy5+sWEMJ8Qj6cgCDNGLE9YLF9mrP2M2yrz83UBBqrQW+HxUGizsaOmRj9PpCdSo76qip6RkZj+\n",
+       "tEi08XVeuqSLXVAFrtvh4HCn2kttRVkZe9et4/bnn+fC2BiD4+Nxz20UgpFjbmhu5vm2NrX3Z0UF\n",
+       "ewxRX2OP0I1eL3sTpOYm8nsO+/2meTSn4/eM5w1Mh0w9lNMhVR9oOv68VPdN14Nqhmc1kzmei9fH\n",
+       "bF/t5RRhnXuMgvLnwMPEFs41TImpCqZSZzWBZjzus8A51IJCdwH7yA6RCWqCoZUpwWlFFYKtwBdQ\n",
+       "xxo5BwOk94WC5octQm0zsyd0XLrnEWaKbPZ4poP08RQEISuJFZWMlVIba7+odiYhD2ORzUavz8fh\n",
+       "jg4cVmtUOq22n1bxVotcvtPfrx+vtVmJhTY+7fhyp5NyhwO3w8Evb7uNOq+XjV4vRzds4KcnTtDn\n",
+       "8ySAV+QAACAASURBVOmiM7JNi4YWJTSOWUtZfeqDD6Z6f3Z0cPXOnXoaq9YjNJUIaay+o9p8mtmT\n",
+       "cDrniucNTIfZr/qZPP1RI5300FT3TfXaxv0dDjcHD9ZNu7rsXLWnmS7Ga+7ceU3a9z0XY85+ItM1\n",
+       "ZwpjBduHiV+K0xgN/WLoZ2NU0HhcFzCI2j7kWbJHdK5FjWYaP48XAi6migVF3gukX6K0EdXLOYwa\n",
+       "4dTWdKalUgXBPCTVVsg5mpqaqEkhTVGYe2IVpdEic2eHhvAWFlLscFDicFDhdOIOFQAyHptvtfLg\n",
+       "0aN8rqyMm+12vU1KpIjRzvu58nJustn4n7fcwsOtrWGRSwvox3/xqadY6nJRYLPxPZuNu+64I+bY\n",
+       "O4eHOd7Tw+HOTh5ubQ1Ldz0zMMDFwFTK1BNf+UpMMZYsLVijMCSqNX/pnrVrUy7qY7zGU2vX8nBr\n",
+       "K9tWr+aR1ta4RZimQ7x0y0SYUZwom4vORKaHJnqPmslU0kwLHM1Vexoj6UQhtWvabEX4fOd1AZnq\n",
+       "fWfzmorErL97yed3dgoVpe5LNaaTamPKR43o+VBbhWiRPWNrlQDR6azTwYIqwl+I87xWNTeSItQ2\n",
+       "KM2oVXcJjVvrqTmIKg4row+dNm7UKrdaFeDEa1o+SwlzgaTaCjmHvFnmNsa0Sw2P00lvKAIZmYoZ\n",
+       "maa5bfXqmCImXjqnMTX0/YsXGQgJxXKnkwuha942NETTX/wFn921i66REcYmJii22xkPBrEoChd8\n",
+       "PpwWC5PBIEHgS5WV7L3jDrYeORKW2msFbq2sjPIyxkov1sa1oqyMRYWFOCyWMFGdbnQyXmsZM9Jc\n",
+       "s4FEqaTZljI5V+9RqabxxpuveHOcyvymkuqbynnSaaeiXXNsrJ/OzsNptyIxu6XOTGLWmko+v8na\n",
+       "l5hFvPTPVFrD1DAljkEttrObqdYqt4Yem9U6xYNanCcSK3AWeDBiPEa0scGUZ9MFDMXZJ5J0W+Wk\n",
+       "nlYrn6VmH0m1FeEpCMIsowmuErudi4GA6p10OuMKLm3/IpuNm6+4gkUFBTF7YUbut2fNGh5pbeVU\n",
+       "Xx9nBwd5ZdMmvtvczOGODlaWl9M9MkLn6CjFdjsnN2/G63Lh3r49LIKpoQBWRWHc8F5W5/VS7HDw\n",
+       "n++9p2+zMPVRR+vhqfs3PR72rF3LzXv30jUygs1i4aYFC8KipPHEoxnznaqYzcVCQJdr78dIjELq\n",
+       "B0533I+r6c5XuvvHE5jJztPc3MAHHzyF399PeflK7rrrN7Pmb71cSP7lhFl+wOn2Fq1hSsTFE2Sa\n",
+       "OAa1sutypnpZPkJ0L89MsKPeQ+T5lNC1J1E9mu8DHRH7lKJWn53ybDawijMsoIATNOLHnVTg15B8\n",
+       "PoRcQYSneDwFQZhlNI/gW5s3617BPWvWxPUNNtbW4nE69Wjgk++/H7MdS0V+PjZF0fdraGnhzMAA\n",
+       "x3t66Bob4+HWVv06v7nrLpYWq6XqBwMBlu3aRdmOHYyMx/YEBSFMdAI0nzvHzrNnw7ZpolNLqT0z\n",
+       "MDDl3+zspKGlha6RES4GAlwI+VS3Hjmi+01j+V8zbV+SriczXrubbCaXUiZnEqMv1Oigi3Qvpjtf\n",
+       "Q0PqOrfbS7jppseS7h/PO5nsupHVgdPxt6bjh72cSe4xTtS+JB3PZ6IVqBHr3Mkq3NagptCuBzai\n",
+       "OsaOh67zbcJ7edpRo4vTQUvbDRAtOhcBt6D6NvtR7zNWRHQQ+AxqJBbAzRmu5BitHMBPA4tJHlU2\n",
+       "o1XObPl2BSE5IjyFnEN6T+U2mrjyuly6yErUw9PtdHJDRYX+OBASgJq404TZ821tujjUivyE9dC0\n",
+       "Wqk7eJDhUJXYtiE11ckK+E6fpt/vJxAM4rRYuL68POl99Pn9+CfDU7lcdjvrlyzh2tJS6g4e1Asa\n",
+       "gVogaNvq1dgt6tuuBfBPTnKgvZ2rd+5kyX/9F7c+91yUwMxUCCaa21iYUQhotsm23o+pvEfN9EfB\n",
+       "RB9X052vwkIvAIHARVpbH066fzyBmey6xuNqanYkvc7lhFl/96JFerKVmIqAjEUqginWubU+lbEE\n",
+       "mbHf5+9Q/ZLGL+N+DbwV+tkOrCLc95kMLVCzErgt9LM1Yp+VwDuE99gsI7yQkXbNCVRxugxtbgtC\n",
+       "46jGwza8wFYSvwMkmg8jiV7H2K+hfJYS5gIRnoIgZDUNzc0MBgI4QoJtZXk5VxYW4rRY2HrkCPva\n",
+       "2jjW1aW3HSl1ODhxzz24nU4q8vPxhITt2YsXdQG36umnGQztPxFxva8tWcKCUH/OVLEp6geWodA4\n",
+       "24aGONbVRa/Px6KCAr0Krtvp5LW772ZxYSGrFy7Uj+31+WgfGeF4d/eUEH3iCW7du1cXr5p4ziT6\n",
+       "mQpmVsCdLcyKeM1mXGC6H+dTJdHH1XTny+FQP2SnGiGNJzCTXTfbvkC4PEi2EqcbcYu3Ao2/ZZpA\n",
+       "M547UQVWbSxFqG1VDgDG96gxpn5zi1FblfQBi1EjlLEwCkstq+VK1KimhymB+TnUCrS/CY2tMfR4\n",
+       "I2qqr/aXpIQp0arhQ5vbRhqpp55DLMOtR2qvJv67TqoVaRO9jmZETYXLhZ07d7J8+XKKior4zGc+\n",
+       "w0svvWTq+cXjKQhCVhHpMVy+e7few3NRQQHv1NdTd/CgXjDHrih6FNSCWjRoPBjkeo+H0YkJvaJt\n",
+       "ZV4eXWNjVHs8OC0Wvc+l2+HQxd/K8nJ+c9ddACzftYuusTF9XEU2G8MxUnEVpj6uACxwOvmCx8Ph\n",
+       "jg48TifLSkoodjioyM8P86YCNLS0cKi9nQG/Xz++0Grl0kS4HF5cWMjbmzeH3XesQkHZVmQn16hh\n",
+       "9txUs1XCxQw0D6XVms/QUFvY+pI1l+skW4lm94CsYeq3rA5VfCY7dwPwPDCKKjSXh85RDTyFWuG2\n",
+       "K3SuAKqYLEZNg60GrkUtAvQuagQyiCrGalArzx4L7T/I1DxobU5AFa5vJxijNodam5cS4AHgCKro\n",
+       "jDW3xp6bw6FtmbzrJHodpY9ntpDtHs9Dhw7xp3/6p+zevZsbb7yRc+fOEQwGWbRoUdh+4vEUBGHe\n",
+       "EJla6jOIsPFQaqvWp7LYbufGBQv05yeB8z6f7qk09rOsWbRI7ek5MsKr59Um5FbgKwsXsqykhDyr\n",
+       "FYfFwuefeoq7Dhzgc+XlrF+yhCWh6Gq8N8vIt94en49jnZ2sX7IEi6JwvKeHA+3tvBCKzGr3paXA\n",
+       "VhvSiAHyQqmuJaE+otUeD29v3ozb6UyaBit9CTNjNuMCqSbQZQNapHJoqC1qfcmay3WSrcRMekAm\n",
+       "83BuT/HcZ1CF5UXU1iWnUYXhIdT+l6dD97AqtP8EqujUPJRtqD7QXuBroe2ngBeBvaFjTxI+D8Zx\n",
+       "JhKdMDWHH4TG4w6du4v4c6sdc7PhOpm86yR6HaWPp5Aajz76KI8++ig33ngjAAsXLowSnZkiwlPI\n",
+       "OcSXML+JFFfXG4RZz9gYDS0teF1qwYjBQID3Ll5kQV4eoApRjRVlZbxSV6enjZ4bGaHX56NzZESP\n",
+       "kE4A+z7+mOMtLYxNTNB6/jztly6pfTs7Oii026lyufBNTjIYp/BQLALBIEc6OsI8oFq01ON00jk8\n",
+       "rKfL7omIWg4FAtR5vWHFl7SUV2Ma7COtrVFpt5pHbo/zh/zDpftZv38/Dx49OuPpufMFs8RgKu9R\n",
+       "ufhRMJZ383Iq7NTc3MC+fTXs378eny+zZOx0zzVzf/fMWomxRKYx/XMVU4WBNpLeb1mkP7MHNbr5\n",
+       "CLAQqEIVlu8a9lnJlGDUisAVAz8DPkEViDB1/17C5yGdd4N4c5hobrXn9qRxnemMIT7yWSr7aG5o\n",
+       "YF9NDfvXr8c3kP57TCbHT0xM8Prrr9PT08PVV1/NkiVL+MEPfsCYIfPLDER4CoKQVUR6DPesWUNl\n",
+       "yHOpidG24WF9/56xMW654grqq6o4uXkzdV6v7qnUChjdvHcvL4VSVItCwhbUiGdktVoNh8XCsc5O\n",
+       "PVU35j5K/IwS3+QkYwax6p+cxGGxMD45qUdBtchnucFL6Z+cxG618tMTJ+gZGYlb9dYYGb5m507W\n",
+       "79/P9at3UFVVj8+9mpbuXg60t3Pg449zrkrtXHGyuYFv7KvhZROERTLMFDGxMd+xGsuDORu+zJmf\n",
+       "q9QwRnd37rw6o/HMv0hxLI+hMWqopbQeRjUopLNWGlHFqpbdokUH/ws1qtgPdDK1ziuZ8mLClMgc\n",
+       "BD6L2ucz2e/FbH01lItfQQkzxcCZM3QdO0b7gQO0NKT/vpDJ8d3d3QQCAZ5++mleeukl3nzzTU6c\n",
+       "OMFPfvKTtMeRCBGeQs4hDY/nB/HahERWYXU7nZy+994wMeotLNT3L3U42F5Tg9vh4Oa9ezl27hyX\n",
+       "DL04G5qbef/iRb30Q5HdrotTlxYhXbYsanzjk5P0jI3pwjRSYlqBP6qsxB5HfHqcTr2Crba/f3KS\n",
+       "gdDYtCq3AK/ffTfO0L42ReHQJ5+w++xZXTBeHRKWxnnSIsNFNhvnfT4OtLfzg9aTrFmzmyK7GgGu\n",
+       "9nj4YqhC70xUqc201Uu2YZYYSOU9amDgDI91LeGH7ctZvftnMzB/5pcvilUcaDZamWSLSNOiuzZb\n",
+       "ET5fb0bjSTdSnP1/92IlqmtRw2tRi/xopOtxc6OmxL5LeHQwuueyyirChZyxAu0YpFTUZ/6T/Wvq\n",
+       "8sNWEHpfqK5m9bb0M0gyOT4/9AX/D37wA6644grKy8v58z//c/bv35/2OBIhwlMQhDkhnTYhRjHa\n",
+       "0NzMqVAKiRX4QkhYRfbM/OLTT1Ozbx9PffCBLjqtwCt1dTy7bh17162jwJCaG8lkxOMg6OLQqihM\n",
+       "AMfOnWPt4sUsLiykZcMGFhcWcrfXi8fp5ILPx1Ao4mlTFL0qr8ZVLpcurr0uF13f/CaeUGGkgUCA\n",
+       "iwbx3BsSlvcePqxvq8jPp8Lp1M9rFJbGqPGetWtnrEptLvb8TMRspo3abAV0s4D3WMbvRj0zMH/Z\n",
+       "Vckyk6il8XWxWvPnLPqpRXevuOJmfTzTXSfzr4JvrNRULZrXxlS7kRJgxzSvERkd1ASlhfB+nY4Y\n",
+       "Y6s0XB/Uoj69qAJ0OdHiM52MAemTKZhDbWMjVfX13HnoEE53+u8LmRxfWlrK4sWL075mukhVWyHn\n",
+       "aGpqkm/q5gHr9+/nQHs71R5PWqKoZt8+vbKrxtL/n723D2/ivNNGb1lf/rZsy8QhBgU3hKYfCU7c\n",
+       "0ha81tZOKSbUboKSJu1F0rO1djdtt/tuN+w53bNnu233fa/T9Lq63Z7Tbjh9NyRN/YKTNIEU3BQT\n",
+       "/FGSOk1DIF+NuyTQGjDGIGHjD9mY3/lj5hk9Gs1IM9JIlsxzc+nCmo9nnueZkTT33L/f/SstRXhu\n",
+       "TimpAsS7zTptNmxZuRI9IyOYW1iAx+3G9SUl+N2FC8A772iqnjxYmZaHhoYQmp1F7+nTcX0PDgyg\n",
+       "+/jxGNIIALVFRTg/O6vkljptNtx7ww0xLrcetxsrfvYzjExNAZBupdTk111QgNkvfSluHpjrrVli\n",
+       "uXv3+zE9PYqCAifuvPMVlJX5lHVqd+FjQ1+Ncy9N9RzmKph7a1PTjrTIgJHvqEgkjKbuH+G3M15l\n",
+       "/rTmOHXklpPl3r1+jI5KLqH19QG0thp37+TPy/PPd6TcjlWw6joxg/z+3VO7vvoSb24YJyGFzf4a\n",
+       "wJcghfE2IDbMFpA+B29ByvV8HsB3IIXn9nLbqB1l/TDucW1m29xBfl9T+Ylcd7X953/+Z/T09GDf\n",
+       "vn1wOBz4zGc+g09+8pP4l3/5l5jt0nG1dSRaKSAgIGA1GKFx2u1o9/mw0+83RViKOdfXi/Pzkro4\n",
+       "Oxtn/sN/JfJlWGZkl9zzkQguGAxvdNhs6ONyRsORCIKDg9jR1KSosMPhMI5duBBHOovtdvymowM3\n",
+       "7NqlLFtWVIQ9J04o2960ezfevuce+EpKFOKpJp0A8PJnPxs3D+mQvunpUczPXwQA7N27AZ///J+U\n",
+       "dUzNBKSyL1+YHlZu+AcHg2ht7UZXS0vMPKihJq+5TkxZ2Gi2jvWru78eM38spBSIznHqkNShXDkH\n",
+       "6ajJ/HnJBTMjq66TIIIYxjCKUYwudMGTAw8IMoMuZOYhiA+SURAgmfSojxGEFHJ+DBLRBCTS2Q3p\n",
+       "wcxNkHJEtaICzEQM5FZ0gYBAqvinf/onjI+P48Ybb0RhYSHuuece/OM//qOlxxCKp4CAQFbBK3Va\n",
+       "tSjVUN84f+3FF9Hzxz/iA5WVqHS7cW52VjEAsgMokOt68ornNYWFIADnZmeVZWpFlEeVy4Wpy5cR\n",
+       "4Vxp230+PLtxY1x/tg8NYec77yhqJoPDZsNlIqnkS02NUlP05qoqjE5PY0zlFBeor8dLZ88qxPOD\n",
+       "Hg9WV1Tg6zffjNv378dQR4cSVgwgjvxqzZWaZKjX79lVh0hkHHZ7Me6++60YxVOtZr548LMYGemB\n",
+       "19toODzQ7LnOZaRD4NjtbzGkW3C9mdu/v830HCdDrpwDq1TCxVAbMwU//OiXlbIAAujOE6Usf+BH\n",
+       "VIkE4mtcakUFsE+rE0AJpLDgZNdZutEFRr8hjG4nkKvIdcXTKITiKSAgkJPQullPVotSDbXyNjY9\n",
+       "jXORCPpHRxGor1dKqNhtNiwQYYEIdSUluMjlWJ7VsAP3ut04F4koyimP8NxcnOI4cOYM2vbvx8Tc\n",
+       "HA6PjSn9+cXJk3GkE4i65U7Mz6P39GksKyyEr7QUZQ4H3lIprWwu7ujpUYjnDRUVeGbjRgDAzF/8\n",
+       "Rcz2/Lzy749duKCEGwcHB+NIhnou/+edr2Dv3g04cM0j+Omhoyh2vKmcJ7Wa2dLSZfqG3+y5zmWo\n",
+       "584MgWM2P4B066hHL1KZ42TIlXNglUqYTVU60yiWlbJGNGLHoiplmSY0VrQf28bAwHYDYelMiVwL\n",
+       "4HpIdUP57VjOKA/+0xow2FetdszA6DeE0e0EBHIXwlxIIO8gak/lD7TMZ9TlUpJBfeN8fGICgFSz\n",
+       "8+F165T2/vzaa5XtXt+6NaaGpvqLrqG6Gr+9804E6utxdOtWOP7wB2WdQ8elNjQ3h56REYV0Mlfa\n",
+       "uYUFze3VGJudxclLl3B4bEwhpaUOB9pWrFDmotzlUsZQ4XLpOsby83pTdzfeunAB/aOjCumskOdG\n",
+       "DfXclZX58PnP/wknpi/HnSe1u3Aq7qVmz3UuwwyBU39HGQ3Ey4RD7FI6B0sNXehCAAEcwIGkYbaZ\n",
+       "/d1L7ICcfjkbKxyWY9sw5nTMDI8OAXgGxkjkYoTNGj2mke2MGx2JeymBxYAgngICAhmD1s26mtAk\n",
+       "A3Nv9cikzFcmuRdOzM/joaEhpb0nb78dq0pL4S4owH0HD+JDVVVKG7x6aYNEwD6xZw/6T5/Gjbt3\n",
+       "4zJHUi8TaeZXqnHswgXUPP44ktHOKpfaYVEKCQaAS5cvo/fUKVyUCWNXSwtWlZXBbbfjuZMnFTJ4\n",
+       "689/HkNCi7lapKMzMwqhLLFLLV+U50YN9dwx6JEqM+VStLY1e65zGekQODOl6K1GqueAkY3v7m/D\n",
+       "7ZFw3vh15krNTyPwwINudOdAbmdiQpN+ORsryFxsG8ZyfVOpkbkYn1ajxzSynfVllAQErITI8RQQ\n",
+       "EMgYtPIQjUIrfDRQX49LsvKoZarD57PVFhVhdGYG5U4nJubnUepw4JLKgCgTYLmdDO0+H4bOnsWo\n",
+       "HO5rB1DucsU48BYVFOCjy5bh+MQEJubnMcGF/jZUV6PY4VDyWGvcbtzi9eLY+fMYm51Fo9eLp26/\n",
+       "XXLbjUTQe+oUGr1eXJybw9jMDJwFBXjlzjvhKyvTdaHVO09m8gNzJZdQwBrwLrSv1Afw/7V254Vf\n",
+       "ZzruuVcvEucopp97bIXDcmwbkcj9GBzsQVPTLXC7n0yj3aUG5iCszmcVyAWIHE+heAoICGQQ6She\n",
+       "LJyUEbRGrxdFdjsm5uZQW1iIp26/Pa5dXrn7TUcHAvX1OLZ1KwL19fjYsmUx25Y6MpPi/ufLl6NI\n",
+       "Vh6dNhtGp6bwoepqtK1YAXdBARaAGNIJQKoJOjqKkenpGNIJSPMwJIf3ljgcOCeTy49fc42i8G7Y\n",
+       "swcDZ87gt2NjWFZYiBvKy/HuxAQuzs9jPBLBhr17AeirdnrnyUx4KdvW63bj9NSUIZVUIHfBFKWQ\n",
+       "txFPNO3IG7/OXHC9XSykrvYmVgbN1xxVh3smat9oaGhsG273SbS2noPb3Quh7PFYzPgKAYHkEIqn\n",
+       "QN5B1J66OsDUuYbqalyIRFBXUoK3QiGFtBXZ7bjV60W506kY4oQjEdz6859jeXExyp1O1BQVKbUy\n",
+       "f9zUhBt37cKc/H10+3XXoffUKcnZ1kAdTyP4QEUFGpctw7PvvRdX3qXa7cbFubkYNRSQyKmWOREg\n",
+       "helqGR1Vu914v8cTMx88vG43xmXSZwdw/N57lTBbI1BK3hQUoNTpxKMGSt4w1fT01JSizl6tyudS\n",
+       "+I5i7rE3N+3AV9weU1rVwEDQwnqk5pCK620+lDUxck3ljtrrR+ZrYAplL10she+pfINQPIWrrYCA\n",
+       "QI6Cd1XteP55JYyTgZUnAYA1u3fjnXvuwfahIVyYncV7k5MApLDUczIB+8jPfx5D4EocDlQ4nQir\n",
+       "FMZUUVtUhMMdHVj+xBNKrVAe5zn1rwBS3qmroADuggLM64QAT12+rJtvysYOQAknBqTQ3OrCQvSe\n",
+       "OgWnzaaE2ZoB7+AaqK/H9qGhpKVEmGratn8/gFiV1EgpksUkKwLx4N1jzdIXa+uRmkMqrrfDGFbK\n",
+       "mgQRzNuyJrmj9maqBibvbPtjAA/B+tqgySBKmggIpAMRaiuQdxBP6K4O8OGfLIyz2u1WnpbZuW3H\n",
+       "ZmcRHBzEcydPKqVRKpxO3CLXvSyVQ1SZ2thQXY1ylytKOtNUO20A3r77bmwfGtIknWowMrlw5YpS\n",
+       "8oX1k2FtVRWucE8UPXLZGK/bjQV5+YcrK9Hu8+HY1q1o9/nQ4fPhhTvuwJOtrQjU12Ns27aY2p9G\n",
+       "oQ6x1XIn1oNWOK+R/dM3MEkPVprSsO+ofDK6sRKJCJBxz03rEEQQfvjRhjaENY6aO2VN9GHkd898\n",
+       "SGymYCbc08y2vHHOhwD8CsBqACfT6axJLB3zHnEvJbAYEMRTQEAg58HIjMNmA6NpFTIRA4AyhwMP\n",
+       "r1uHCEf67HJZlA6fT8nvLLHbsaywEM986lM4KauiPHgyW66TA1pss6FtxYqYZZUuF+7o6cFT775r\n",
+       "eEwFQIwj7u3XXYc3AgF0+Hxo9/lwaMsWlHB9KHO54HW7cZkIYTm8tnj6GIILP8C3XvkNwpGIMv7t\n",
+       "Q0MYm57GfQcPKnmWZhxq1eTRTK6nVr6okf0XW62xjPgGg4DfD7S1ITz+VrTNbwWzy7YWEYkI0GLc\n",
+       "tjNFswc9CGoc1UxZk1xGJsrxpAYzbrJmtuXV0QIAFwGMA9iA7D3SyGa5lcV4TCMgkFmIHE+BvIPI\n",
+       "S8gv8GGWfM6lXshlonb+8/e/V8ia02ZDicOhqJZetxsEKaSVd7AN1NdjR1MTan/6U0TksikdPh9e\n",
+       "OXcOI9PTUmOqHM+bystxXWkpek+fjutHAYCm2lq8eu4cJg3W8DSClSUlmLtyBZGFBdxWU4PlxcXY\n",
+       "e/IkwnNzuLmqCmUOh1JDFAAKMYfr8Qd4cBG/wcch6a5A24oVmJqfj3OY5V1na9xuNNbUxJyDROGw\n",
+       "6bgTG90/ldw8K5G+c6cMvx99/f3wA9j/r7UY8Y7Ce74Rm799AO4Zj7k0tiWIxcjMa0MbetCDRjTm\n",
+       "Lbm8un739MJZeWfb1QDGMTBgRzjcCIdjGC0tIUhfL5n8kFnh0GsUfqSW/2oMV9c1lRvI5RzP0tJS\n",
+       "2Lg65jMzM3jwwQfx7//+73HbihxPAQGBnMVzJ09idGYGAFDtcuG8rNYFBwcV4xkjOYDD4XCMQjhP\n",
+       "hGmZXJY6HIqZTl1JCd5fUYHe06cVh9X7Dh7EHFers+/MmYR9/v3EBMpdLk3jnytAXL6pHZJ6yXI3\n",
+       "U8HU5ctKHmjvqVOocbsVZfOtCxdQLtcDXVtVhT9NTeF8BPg9PogyzICRTgB47fx53CLXMOUVRqY6\n",
+       "srBjFvbKzgGf18kvB6IqZqowsn8quXlWoqWlyxriWywrIo2NaPnSUxg89hCa9u2QSGe+WMNaAD3q\n",
+       "0IXs3bazPjixB+3oxE78W16SzqsPTBcHpLPIvhc83N+vANiAcPg6jI5KNYkHB4HW1kx/yPg+ZBrZ\n",
+       "VFcFrnZcunRJ+Xtqagq1tbW4++67LT+OUDwFBAQsg5pAbh8ailEplxUWKrUn+RxAIzUgmcstj2q3\n",
+       "Gxtqa/Hbc+dwenoa5U4njm3digqXC7c+/TTOz87GucuqcXNVFd4OhXSdZY2gyuXCBQ13WQZXQUEM\n",
+       "8dXcxmZTHHdtALR6U1dSgte3bsV9Bw+iZ2QEN7ou4rrq1Th0RlJCCwsK8Pt77kGFy6UojMwYyGm3\n",
+       "o8ThwNT8PHpPn445B+/fvRv/dfEiFgB8yOPBYHt7QmXTyIOCqxbhsBRuu2MH4JFJTjZFEg0shnGT\n",
+       "H/FaTbZtWbT6IKCP3DH4Mq6LRyMVGrB580q43TsTbp8vGAgGER5+C47i42jp+g3cHt9id0nAAuSy\n",
+       "4snjsccew7e//W3813/9l+Z6oXgKCAjkBNSq2dj0tEI6PS4XXv7sZ/HQ0FBcyKVWDqCa3DCXW75c\n",
+       "x/lIBIdHRxXToIn5edy4ezeG77kHK0tL8Z78BM9hs8WVMWGoKynBssJCzbBaI3DYbLipshKHz55F\n",
+       "id2OKY3wW5fNBp6Waimpc9x7rZ6udk3hmyVP4sWDP0OV629Q43ZjZfVN+ElzMx789a/x2vnzeLG9\n",
+       "XXGw1VIyA/X12On3x4W9jnLn6cLcXFIiqT7PHpdLEFEGjwfoVlGcbIokGlgMl1ktrUZPx7IKauJU\n",
+       "LBMnoRcZw2/Dz6FM/lwfHPwi2lqfWaSeGNfFLYtUyDGEh4cx2n8YADAYfAit6u8UgSWJdB/+WPXw\n",
+       "6LHHHsO2bdtS2jcZhLmQQN6hr69vsbsgoAM1gWTvK10uvHbXXfCVlcUZzwCxZjbbh4bg37sXMEr3\n",
+       "VgAAIABJREFUT737ruKEeuOuXbjv4EHsaGrCLz79aRTZJRsgO4DxSEQJSQWAuStXsGHv3phjr7/m\n",
+       "GmV9md0ec2xXQQEK/vCHpGMrQNRZlsdlIrw6Pg4boEk6AeCSarndFv9AkPWKN00qtNlwXXExql0u\n",
+       "FNFFjI0dxv8YqcYz7x3HuUgEvadP46GhIezbtAmnvvCFmLIpzEzozVAIQPScaJn/zMr9KwDQs2lT\n",
+       "0rlIx/X2akCufUcthnGTlldppgMH1QZR6j4EB4Lw7/WjbX8bwnnmMpyNa2rCIYX6v+cFnmhaTFXG\n",
+       "uOHQ4hoqZc78xyGH7HsbG9G0IzOf2Vz7nhJI3+TOCpO8kydPYmBgAPfff39K+yeDIJ4CAgKWQe2G\n",
+       "yt6/e++9CWtJ8mSIkZiQTCbVOYketxu3yiVCGJ1rqK6GUyZzxXY7fv2ZzyjHri4sxJHxcYk4ulyw\n",
+       "q4hnKBLBb8+di+vT8uJiLCssVN5fAXRrfi5cuaKpUlbJeZnqZc6C+K/eBUjq69GtW9G2YgWK7Hbc\n",
+       "4vVi+vJlnJ+bw7H55XgCX8AFx/swTRI5rXS5dF1i2TyORyKoKymJIfVqZ9u1ck7oFQDfOXJEsz0g\n",
+       "Smbnr1xBh8+XkuutQPZRVFQDt9ub1ZtzLepgpnBGKlATbHUfhsPD6B/tR89ID4KLULJnMZCslAyP\n",
+       "11puwyv1wMDmtfiRe2fGerR0nFqfQ9Sj+QFLW27p6kJ9IIDNBw7A7Vk6Sq5AYqT7kNCKh4w//elP\n",
+       "0dTUBJ8vM+HdIsdTQEBgUcBCaY9PTsJXUoKTly7BV1aGd8JhjEciWFtVhevLynBJzklkxPHVu+7C\n",
+       "Xw8OomdkBCV2O0qcTrz82c8CADbs3YsN11yDM9PTStjn9V1dSm1PPahDcddWVeHQli0AgJt278bo\n",
+       "7KyyzobYUigFAApU+zttNthtNly+cgV8hul1xcWYnJuLyTtlDrylDgc+ds01eFIm4HzeKwDcVl2J\n",
+       "/7P0GXzl3Cacmp6Bw2bD7+68U7dOJ8uJ5XM59XJptbbVgt7+6breZgq5k7O2uNi716+E2tbXBxbV\n",
+       "xCmTSOaM3La/DT0jPWj0NuLA5gPwXAXXgx9+9MsBzgEE0J0gwDmMMIIIYgd2ZNCEyY/sZ95mKru4\n",
+       "CkBI/rsDwGKFJgvkC5LleKbr7m6FO/yNN96Ib3zjG3jggQd0t0knx1MQTwEBgZQQDALDw5KJZ1dX\n",
+       "1Ecl6X4y4Tx24YKiaqrBTHQ8bjfCkQhqHn9cIXZs3epduxQnW54EqcnRk6ramg3V1Tg1NYUxjkzW\n",
+       "FBbiHPfeV1qK60tLcXxyEtcVFWFofFxZV2a3JyyjoudsawdQ6nQqJNhhs+FTdXX40YYNuGX347h4\n",
+       "RVIx7/TV4emNbQoZbKiuxsrSUuz0++Fxu7Fhzx4lx1XPiAnQJoN6BJPflpkRaeVrGiWouQKjhGup\n",
+       "E1TLSsXkOcKRMIKDQexo2nFVkE4gF0vJpFtQJxUS6UdmyO7tAHoBNAB4wWBfBK5m5Lq50IsvvohP\n",
+       "fepTOHv2LEpKSnS3E+ZCAlcVRO2p3MDwMNAv/5YHg/F+Knrgy6sAUk7jxfl5lDudmJifR6nDgfd7\n",
+       "PPjaiy/iVyMjiCwsKMVCWBitx+3GR2pqFBLEh3eysE+v243TnD04Q3VhIZ751Kfw0WeewdjsLNZW\n",
+       "VeHs0aPAihUAJHLI18EcmZqK2Z+RTlZChY2hwGZDaG5Ot5zKAqCQzgqnE0e3blXCj9/nGMOrc9fB\n",
+       "h/cwef4U/Hsv4w8XL6La7Ua1262QTgAol3NA2bjVynG5y6UQRjUpZQZNjIxqudMmKqui3j/XYTTs\n",
+       "yGrzHfYdlcj9NxnZtZIMLyUDlnS0K4/bg+48VXuN/e7Fz04XurKgYppBugV1UrGoylR28ZNYVLvq\n",
+       "NCHupQTUePzxx3HXXXclJJ3pQuR4CggIpASuXCHMeB9EOLXQXVCAgc98BoH6ehzbuhVetxuXLl9G\n",
+       "76lT6PnjHzE6M4PQ3BzmiVBot+Otu+/Gvx45ouQZ+kpL4S4owH0HDyo5i10tLVhVWoq5hQUcHhuL\n",
+       "O37vqVN4aGgI79xzDwL19Ti0ZQvGOCJ8aX4eFxOURgGAQrsdf37ddQAkc6L3V1QoNUWdNptiFGTX\n",
+       "2f/Ply+PyXn9B+9ruA2v4NtVAzi2UI/+0VGcnpnBedlAqObxx1H4k5/gY888g3kitHP5lYwojkxN\n",
+       "4fDYWEKDH4/bDY/LhY7nn0fb/v1468KFOFOgRPmaamMilvOpzhnNFbS0dKG+PpBU5cuU+U4i06Vk\n",
+       "JhBWmEQwLK4Bi7VgtKMH0i1/MqgzCpdShmE84mfHAw+60Z0C6czUTBk3DtJGKiQyU9nF6Y5FQCC3\n",
+       "8B//8R947LHHMnoMoXgK5B3EE7rcQFdXfLlCPfDKz83V1eg/cwYAELlyBd85ckRR1XgV0+NyKSVO\n",
+       "Gqqr8cIdd8Qpcl63WyGXK372M9htNjgLCvC+8vJoKRVIamOFy4Xw3BwavV4U2e3oeP55hWTZ1qwB\n",
+       "5LARp82GIocD8/PzUt1LjTqgQx0dWFlaiuDgIPpPn44JxeXLpGgF5DZ6vXhUdQ0/X/IVzLtfxRNF\n",
+       "dyBy6ULcPpeJcJkIQ7IJUm1RkbKOjYEpx8kMfvj5q5XNk7xuNwZOn0bVzp24uboa7T5fjMpqpC2m\n",
+       "juZSjU9GuJLBakWQfUclIvHJyO5iONFaiUxl1ZmlHWp9bAyZLemSKah/97QVcSuVvUwXv0n1CklF\n",
+       "MV3kekY5CnEvJbAYEIqngMASwGIoT6xcoZHcTl758bhcCnFS35DzrrhP3n472n0+dPh8CukEYm/m\n",
+       "XbJDrdNmw/Tly7g4P4/xSASvnT8PQFIjFyDVxQzPzaG2qAgHNm/GycnJGCXKKxMwO4Cbq6owkcSM\n",
+       "aMsvf4mburvROzKCC6r5ZuVQtNTO5cXFmrmRxydncCxSiV+dGoVLdry9uaoK7T4fHBqlV0ZnZhQF\n",
+       "jc3Z0a1bYxyF9a6J4xMTAKSQ3ec3b0agvh5rKipwdnYWobk59J85A5fdbogwahGrfCytkilFUO3y\n",
+       "zCOZGmtUreWRS+VCzCqTRmFWu1JTsePy+woAD1vYr+xBUiLD4ac0FHErlb3MFb+RnHZ3og39CJt2\n",
+       "hBUqo4BAPkMQT4G8g6g9FY9cv9nnCcpOvx9v33235g05H8rpcbvx7MaNeGbjxpht+Jv5VXK46jyR\n",
+       "kltpB/DyZz+LQH09PlJTE1PmpMBmiyn/wfJAJ994A4CkUL4qk1YboKl2AsDpqSklDJiZHhXb7VhW\n",
+       "WKiEDm+49lrpmNx+H6mp0SxpwvpT6nCg4Gwlqv/kw7KfbsHOdRtjapDyGJueRjgSwfahIYxNT+Ov\n",
+       "VbmXz508qVwTD/T1KUT0kkyqJ+bn0fqLX+DS3ByKHNHgl4bqasMlUbSIlSitEv2O0qqZypCM7KZC\n",
+       "hnOpXEimaItZ2qGmYj4AXxgI4i/2+vGz/W2I5Ek9z+jvnkTpHQ7JTTVWEbeSlGWu+M0whtGPee6h\n",
+       "REIvEoEMQdxLCSwGBPEUEEiAYBDw+4G2NiCcw/cnuX6zryYoiW7Ik4HflxntMNgAvHrXXfh/3nwT\n",
+       "Y9PTeIc7aYUFBXixvT2mP2sqKnB4bCyGYJLqfy3wdJQplNMLCxibncV3jhzBsfPnldqh7Eu21OHA\n",
+       "Dz7xCc2HBF0tLUp+62jFGZw/a0fvXjeCQeDZjRtjQmsZ+kdHERwc1H3owCuxg2fO4Kl330X/6KhS\n",
+       "i5Svj1rqdGqqy8mgdR4TqXwCEjIVoVAsh+c2ehuxY5HDczNds9Mo1FSsHMCy8DDWjPbDa0H+rFUY\n",
+       "GAhi714/9uuQ4e/he3I9zjcRBtDSshb19R0ZdCnOnLJYLD+WkB5KfBjAo5YfwzyWdvavgECuQJRT\n",
+       "ERBIAL8/6twaCBh3bs02crWOYqYRjkRwU3c3RmdmUOly4chdd8FXVhZTUqW2qAgFNhtebG+PMfQB\n",
+       "ouVB1lZV4Y1QKKYWp91mw4LGdxdzs61wOrG+tha/PnNGqcvptNlw7w034Gd/+INmfqdDru8ZuXIF\n",
+       "dgAbamvx7MaN2D40hKfefRehuTmUhasx+c074P3LIaxpCqO80IF3Ll7Eu5OTMW1VOJ04cd99uO/g\n",
+       "Qc0SJ5WPPqqQTB7q+qj5UhplKUGvHmq6SKVciFamXabyM5P3JYhhDKMYxehCV0ZcWMMAfrS/Dd4c\n",
+       "Ky+TrPRPbD3OOnTjdeRruKlUL/SL2AGCBzuRG+PwI/v1RQWuNuR6ORWjEHU8BQQyhLY2oKdHcm49\n",
+       "cMB4rUqB5EhmQmPUpMZMvcpE+wZ6e9F76hQKIJHOPRs34rO/+hUiV2ILpNx+3XXwuN3K8Woeewzj\n",
+       "kQhsAG71evHuxIRufVItOGSCy74l3QUFKL1Qg4WaEMLzc8o2PCkuANB07bV49lOfkuZK46HD7b/4\n",
+       "BXpPn0a5w4GJy5fj6oHqPawIDgzg5ZO/hX1hAh77AubLb0Wps9BSo6BUa8DmMsyYKuVSPVQ/4m+3\n",
+       "tZZlpy88uQqgO0NHNlNkPVskPFmt1dyrx7nUkG590Vhk4yGKQP5BEE9BPAXyENmsPRUOG3duFTCH\n",
+       "ZKrPtT/9qVLvs8PnwzMbNxpum5GqIrsdJycnY8gATxBqiopwcnISM2+9he4vfxk37NqlEDxXQQH+\n",
+       "7Npr4SoowMFTpxC5cgVlTide5+pvAsDJyUls2LsX1xUXK66zDOu83hjHW0AijXq1PrXQ6PXivclJ\n",
+       "nJdDMvn6oYnUMjYHD69bh4eGhgyr4fx5KcUELqE86bHMIt1IArWj51eHji26ky4/b82Tk+j7+td1\n",
+       "t82lCAWt221rb8GNIYggnsJTCCGEBjTgBbyQlZv1ZMTSj+yQ8GRk+Bd9v8Dj/sdzqB7nUgMrtmNN\n",
+       "Tc5sPURJB6KOZ/YhiKfI8RQQSAgzzq0C2tDLZ0uWl8rX+zT7Nc1yD9XutUCsEVPPH/+I/tFRvHzu\n",
+       "HB4aGoKdc5Cdu3IFvadOocTpRGNNDQBgcn4eDw0NxRxr4/79mJybw6sywWyorka1ywUAGBofh1Nu\n",
+       "0wbA43Jpkk69L2Lmgvu7O+9Esd2OMrtdIZ0Omw0Pr1uXdA58ZWWm8mkVoyNM4gqkcVS5XDg9NZVW\n",
+       "TiJ/HTgrpDbM1oBlUNe4zAVzLf56/vsPfzjhtunkOFsNrVxMftn2LDlmD2MYIUiGOSuxMmvkKpn7\n",
+       "rtUmSXqZhMnMpEpRmmI9TgFjsDanNZrH2ogdFrsCCwjkM9Imnjab7T9tNttZm832uhUdEhBIBvGE\n",
+       "LreQzChFjxQkM6G5zesFIOUkVrhcMccwas6iRW75ZbdUV0t/r1+PHU1NWCu/Z/C4XNjR1KSYGGmR\n",
+       "5NHpaVycn8c8EWwAqt1uNMh997rduLm6Gu6CArx21134+LJlAKIlVz7k8WB5cTE6fL64osoN1dV4\n",
+       "MxCAx+2Gr6wMH6mpwSRHxi8TxZFgK9DV0oI7fXXwuRYwDanMzNTlyzh89ix6RkbwRZNOiOxcMXOj\n",
+       "npERlP7lIAKB1MPX1TUu2Tm9wXEeW2d/uChOpfz1fIccAp0LSGaZonW7zS/LFqnnb9R3YmfGjhN/\n",
+       "XMjHjRJLfs5+DGtNklItM5Pq755UusSPNrQhLExzsoYudCGAQE6HRYt7KYHFgBWK56MAPm1BOwIC\n",
+       "AnmIZDemespmMtXnydtvR6C+Hoe2bIlRLmsefxyPvvOO8n71rl26BFSL3Kprha4qLYW7oAAffuop\n",
+       "/F5lXfyJa66R8jiLilDjdsMjK5k8nLKrbQEkZbb39GmUOp0I1NejwGbD78bHEblyBf/8yitKO2u9\n",
+       "XrT7fBhsb8epL3wB5yMRxSm3wunUdJdl88jqelrlYKwm8R63G09vbMPKZR9SjsOXW0mmPqvbY9cH\n",
+       "y3ttqK7Goy1NSiRBKg6v6hqXXS0tWO8+iS9f/jbCp/cuilNpLqmYPNKtp5kJx2yteqOJbtQz6Teq\n",
+       "VnyDkEg3m7NGAJcsOA4bw5vye+urY2pDKl3Sjx70IGhpRVUjiD1zi0WCkzkGZwIeeIRCLZB3GBkZ\n",
+       "wZYtW1BdXY1rr70WX/3qV7GwoGWVmDrSJp5ENAjI8TECAlmAqD2VW0h2Y5pqeQ3+Rp4dowCS0sfy\n",
+       "MO0AxuWSIFpKnBYZUNcKXVlaisODgxiZmsJFzgV2bVUVfvbJTwKQ8jjPRSLoPX06jly/cuedqCsp\n",
+       "wTK55Em504lCux1j09MxJU2Ia6f/zBm47Pa42peVLhc2rViBUCSC+w4ejCFibB7/63Ofs7RcCV/v\n",
+       "c83u3cox+fPWyKnPO5M8JecfRGzZ/d/htseuX1laGtNvvQcXiQipOizR43bjGzVHUIwZVV3DxUEu\n",
+       "fUelGyqayuc3GVHUqjea6EY9XfKcCEzd3S73+SkAF+V1dgDjFh2XjWEcQB3iFdRkc5bqNbW4IZ+x\n",
+       "Zy4bJFiL3KpD8wUk5NL3lEBu4G/+5m/g9Xpx5swZvPbaa+jv78ePfvQjS48hcjwFBATSQrIbUyuU\n",
+       "IHaMSlUbBVxOJq/E6ZEWreWM9LHw17VVVejw+XBoy5Y4YqhFrn1lZfjT5z+P95VLJjwT8/PoPXUK\n",
+       "/aOjCkEusdsxdfmyoo6q22Hje/fee3FmelohYjdxRNBszqZ6rHpzwufSjs3OKuSPHW/70BBmLl9G\n",
+       "bWEhnt24Melx2Vz58B7umn0Yf+3YhdrCQmXcjLiy/rwZCmnOidkQT7UKKiAh3XqaqXx+k+ZNquqN\n",
+       "apEutmwFgKPysrXInErI+syeolcCqJH/rgDwcJrt8w8AtAqhZIpcL27IZ+xjj2yQYC1yqw7NF+Ah\n",
+       "6pcKRPHmm2/innvugcvlwjXXXINPf/rTePPNN5PvaALqtKKM4IEHHsD1118PAPB4PFi7dq0SW86e\n",
+       "uIj34r2Z9wy50p9cfX/HD36AkUuXsLyhAV0tLXjtpZcycjzmdmpm/+DAAF4eHITbbsfz/+2/weN2\n",
+       "J9ze43Jh2cmTOH/xIrBmDRq9Xhz/7W+l2pcf/CB+8IlPKNsPT0xIDqPvvIOOt99WHEZfHhzE0QsX\n",
+       "gDVrEBwcxIMOBxbeeQc1N98Me0EB6J13UHD+PB79u7+L6U9XSwuCg4O4+Prr8H/ve3Hz2VVQgLdC\n",
+       "IeCdd/C+8nKsamxE76lTKHv3XUxdvoypG29E76lTWB8Oo9lux7P33x833u7WVvT19WHmrbeAqioA\n",
+       "wOjRo+g4d07pv5n5HQ6H0S9bxwZdLoxNT8e8Z8dbdeYMQnJu6w1nzmCb/F3N2nv58GEclc2V7t+x\n",
+       "A9+87TZ0FRRgOBzGzFtv4Z9uvVXJaezr68ODDgcmC0/hrtkfYGJ0BYqvvw9v33M7goOD2HblCl57\n",
+       "6aW4/tXdeisObN4cc30WOxzAO+/gxooK7Lj//qTjdbs9cDgexEsvvZYznz+t998DcMnvRzGAB/v6\n",
+       "UJqF43dnuP0uvx/DAGb6+vBPAIrl9Tf29WGbtEPs9i1dCA4Gse3KNrz20msY9vsl/8++PnQA6JPb\n",
+       "62ff9/L+JX19+IKB+VP35w4D4ymWj38DgA/6/dgJoKmvD6MALvr9eEg+Xqrz1QWgo68Pfw/Ak+D4\n",
+       "NwLYobHe7/encf67TffXmvcPApiG3/8sAA8e7HsQ05jGs/5n4YEnI8efwQzgl8jttr5t6EMfWlq6\n",
+       "MDgYxJUr23L++yGb76VlL8PvPyr/3QHgmznTv6X6PhGCA0EMh4dR7ChGV0uX4XrMVu2/ceNGdHV1\n",
+       "obm5GRcuXEBPTw++853vaG7b19eH1157DWE5RenEiROGjmFJORWbzXY9gOeIKM7KT5RTERBYPGSq\n",
+       "UL0VMNs3fvu6khK8vnUr7vjlL3H47Nm4NvTqJGot59tl0OsPv+2q0lKsLC1FscOBifl5pR9Omw2f\n",
+       "uOYaVLrdODczg8NjYwCkMNp37703qXIUjkRw0+7dGJ2djet/OrUi7zt4UHNOwpEIHujrgw3Ao35/\n",
+       "XJusHa97Cmsq9qPc5cTE/Jdw+Oy47lwZqZOYrJalVsmR4MAAnjt5EpGFBdxWU4MnczCnMhn8yE55\n",
+       "jiCsqz/Jt1UD4KSqXT+iY6oF8BsADwEoUm27XadPiUq6MNgB/DmAGQCH5WXq+WP9PIaocml0jrWK\n",
+       "aWSzrIy1xTz0YOVVkZsIy7mkouyMUSxG8aSrF8nKqfj3+tE/KpfhqQ+gu9XcL0S6+1+4cAGtra14\n",
+       "/fXXsbCwgAceeAD/+Z//GbedKKcicFXByFMjAQks7NE75cXp/7sJbW1SbdJcgFnTEn7717duhcft\n",
+       "1nWbVYf/srDO+StX0OHzxRAdpqwlcq5lOD45CUAKy11WVKSEgh6fmFC2mSdC/+gonHY7jly4oCzf\n",
+       "u3Ejtg8NJTXS8bjdePueezTDl82En6rnQC8k2uN249mNG/GMThgt229NxX4cHutFz0gPjk/8Xneu\n",
+       "3r97N67pegb3ntqM0Tl7XHt6/dOaB3WI53A4jNGZGYTm5tB76tSilU5JhkTfUVaX59CDlaGbfFv/\n",
+       "S6PdYm7bUUiksxsS6eS3fQJh5f1qXFEC+7qgXdKllmt3AUAvgOPyey+A04gNEFSHy5bDeIislruv\n",
+       "Vr9SgRFTnWTFPKz53ctktmxuQJj6GId0TVl1lQtYAXUaQjb3JyJs3LgRgUAA09PTGB8fx4ULF/AP\n",
+       "//APpvuRCGkTT5vN9r8AvAjgRpvN9iebzfbF9LslICBgBRTSsH8zDve60dMDBDN0vxEMShFxK74x\n",
+       "gA0/T+5S2tXSojjKqo109LZP5FDLq2I3dXejd2QEgQMHEI5EFAOd3tOnQUAMmelqaUHz8uVoW7Ei\n",
+       "zrlWnRfpKykBAFycn8fL584BkBTO64qL4SqIfp2W2O0IRSKwc08E733hhRgjnwcS3Ejq5dWZIese\n",
+       "eSw3dXejaudOBA4cUNRDM06yrC/lLkbMG/Gbji/pkkZWXmY8EsGGvXtNjzERijl33YbqastcVrOJ\n",
+       "bN3mpUtwg5CUzDYATm45s98qhUTwwogliV5I1KYKUi4j34c57pZjHAW4Sd5fi3RtB/A+1bFdkAio\n",
+       "TT72YWgTYHaUCfnYibLX+HGqt7GqsuPiOsvyyNxjD2scaxOdDYHMwNr6pQLpoaulC4H6AA5sPmA6\n",
+       "TDbd/cfHx/G73/0OX/nKV+B0OlFVVYUHHngA+/fvN92PRLAk1DbhAUSorYDAoqOtDejpARobU6+d\n",
+       "mAx+P9DfD+Dv9gJrjIXQZiIUWB06G6ivR+/IiFLOo8PnwzMbNxrqi3rZpbk5qQ6lw4FLly/HtbG8\n",
+       "uBiRhQWcl8mcDZLpUbHdjrfuvhsNTz+dtB+AfkitVvip2bnwuFzoPn5ccfA1Ou/hSBjBwSB2NO1I\n",
+       "+INW89hjGI9ElDH7ysqStm0UycKC9TAwEEQ4PAyHoxgtLV15bT5kNFgy1dBNrXDVlQDOQCKdHkjl\n",
+       "RdjVz0JZ2fFOIxoKC0gOrsxMx4tXcR63xhyvBhINqgHwKwARALchNqQWkEinG8Ckqr+VAKoBnIMU\n",
+       "jgsALM7ADomo8v1Uw4/Mhz63oQ096EEjGhe5rmPmAnr98KNfnskAAuhOaSb9yE4guoDA4iBZqO1i\n",
+       "gohQV1eHr33ta/j617+OyclJfPGLX0RJSQmeeOKJmG1FqK2AgEBCdHUBgUB6pDOZSnb8EwPA3+2F\n",
+       "faW2S6kWMlEjkFfF1lZVYUdTE26TzXEaqqvxKGeskKwv6mVMYf3YNdco+5VxIbpvBgL4aE2Nso4g\n",
+       "fcm+1NEBX1mZoX4A+iG1TMXseP75mPOgd2605mI4HFZIZ6XLZXjePW4Pulu7kz5FZeVlrCadUh8S\n",
+       "hwXrYSmVUzAaLJmqjqEOV22E5CzLlE47oqSzElHdjB3vJNfWhxDr4Po7vA/FGIND9qAuhUQYewDs\n",
+       "hxSmG0JsSG0DgHa5DTXp9AA4IrdxERLhnOL6toEbA+snr6ndD4lgA8Ycc1PV46xwlrWmFmXm1C1r\n",
+       "HGuzFYguICCghs1mw89//nM899xz8Hq9WL16NdxuN77//e9behxBPAXyDiLH0zw8HqC7Oz2lM1l+\n",
+       "oa8xDKwZxUJRBHUlJYbq/tUUFcWFt6aLrpYWdPh8aOdKojzZ2opAfT1euOMOzT7d8YMfYGJ+HrVF\n",
+       "RXjq9tt1Q3m3f9WNse+0Ajta0bbchw6fD5tXrIBXrgnK9mHlQwDgCoDvHDkCAEn7wZCIkGudB71z\n",
+       "ozUXfM3QI3fdZbk5DysvYzXpTAfZLqeQye+oTN+aHx8YAPbuRdn+/VgRicAN4B3umA3y35WQSJ+6\n",
+       "FuUE9/4GROtjtgGoQAXKsQyXIT0QvyRv9yFIxI+hDMAnIKmg1QB2Ikp8AWAZAB8kFbQBwLS8vBjA\n",
+       "y5C0sncBPItoWDNfp5PPV2UE+3qNsfghke4Ncv/fQmoZklbkHQ4Ovqz78MSaMNf0YE3ZFpFvmE2I\n",
+       "eykBNdatW4fBwUGEQiGcO3cOu3btQg33MN0KZKWcioCAQP4jmTpZXhhdb7TY/MnJSZyLRNB7+jSC\n",
+       "g4OWhNp63O64EFaWT8iDD2f90+Qk3pBdaR8aGlK2Ve83PCyHE8ONVbcUYWVjGMcuXIgxu+lubcXb\n",
+       "99wT40zL5otXLFkY7fahIQyHwzg+MQFfWRnKnU78uKkJDw0NaYbUJlJmSx0OhCIRhCMR6Vgac8FK\n",
+       "w+iF65pxzs0XsHIKiVx28wVdsDZYUh266wuHMTI6ikkAhYODOCxf/3UAPgBJiWTOtT5VO92IEs9K\n",
+       "AI8C6EA0ePJWSOqkGhcADEIiquchKZuD8ra98n6MpJZCIpf3c+0yPA/gZkQDNIMAxgDcB+B38t88\n",
+       "GJllobor5DGVy+Ngob4j8v8sjzUR6TcSCp2Kt6zdLn0OtR6esBxSqe1gimGuZnoZv46R6/TAFFkB\n",
+       "AYGlCpHjKSAgYAjJ8gvN5h8CyUtqWA1Gqo5PTmIiEsGEnKdZW1SE0ZmZpP3gc2Xd/8deHB6P5k/y\n",
+       "+wYHBvBWKITjExP4jRxmy6DOGx2bnjZczgXQnudwJILVu3ZhXA6zTSdfVi/vNhiUiHfnNT18AAAg\n",
+       "AElEQVRxsRS6nYk8YYHsw4/YrLpL3GfSs3kzet1updDCTZDCYQGJUD4D7ZxQJ4A/QCJxrFhDKaLk\n",
+       "kUcxJCXxXwE8BmBOXs7yM9cCKEFsvqcbUiQBr4Kytj4CiRz75HZZn1i+NSAppXcPBLEsPIw5RzEe\n",
+       "a+nCpOqBRK081gpIYbyNkNTSh5CY9PuRPEvRyDZqJCpRlJkc0kS9TLRucRBEEMMYRjGK0YUu4Wor\n",
+       "kJPI5RxPMxA5ngICArow42Cqub/sVnvfZ93Y0ajvQpqKS6le2ZNU+5oMLCR1ZGpKIZ2VLhd+09GR\n",
+       "sLSHUo7lr/aj/d4IDhyIKrxrq6riSrQMh8M4fPYsRmdm8NDQUExbasWSvTdSzgXQnmeP242PyOEw\n",
+       "6ebL6inbTO3NpDPyUkC++XKqQ3f5z+STbjcCkJROnnQCURKnzgmtBHAXJEWyDcCPIRFFLdIJSGZC\n",
+       "HwOwG1HSCURNgV5HfGhWBPGks0DuYz8khfIw16dSxN7stAKoCw9jzWg/PjzSg8/JoavMnKgBkqIb\n",
+       "AHAU0eBPH5JnSBoJhU4lXNrt9qC1tRtDQ9vjcj2tCXM108vcy8XMHedgAQGBRBDEUyDvIPISzMFM\n",
+       "7UfN/TNIONQkKt2+JgMjVRUyybMDcNvtuOMHP8CluTnd/Vi/ekdH4PrfBuHxRG/QD23ZgmdUNTr/\n",
+       "INf1rHA68fC6dTFtsf0+UFmJjuefxzwRfKWluMnjicsxNQOj5WkSkfvgwIBmrisgKZ2ApPbuyI17\n",
+       "zZzEMID+vr6crJSoJsUsJ7MWkprnAbDd7cZYayvuk889MwziSacTwDhiS600QHK//QCkkFeWC7kG\n",
+       "ElHUw4Lc9kSC9YMAEj5Ch6SAHtFYboNEehmRXQvgZwA+Luf9vudtxM+bdsDBbbMSUZJphGzyMJKl\n",
+       "mEomI/vd0zLKykztykS9zJ1cTJbf+ibeBKBtbpQLObC5CHEvJbAYEMRTQGCJI13n2GwSjuOTkm+l\n",
+       "FmEzAz1yxUjf0a1b4XW7pZvemRm8EQolJLtac5iINEcWpFvYi/PzcYon2+/k5KREZk+dwtT8PIbO\n",
+       "ndNUSOPGJivQbW1AmLuH8rjdWFlaisNjYwnHwvdz9a5dMXOkp9QGg8DEBFBbCzz1lAiz5aG+1qys\n",
+       "n2n1LbLaEXcYkjI4CimEVGsbIKpvARLN8CBaQ9MFYBWAU4iWUuHDW62IW7iCqMKqhwpVP/n+AlF3\n",
+       "3EOQjIZ+2NKFN+oD+H83H8B5t0dx6m2U2/IjtXNgxDc2HW/Z7BllJepl7tR+ZErnOMZRhzpN1Tdf\n",
+       "1VBBmAWWIgTxFMg7+BOUoRCIhzqc1fT+cimWD/z3AXQMZC4MFgB8JSUAtAmbGTz32yi5+uLB2HIk\n",
+       "3a2t8JWVKaGpFU4nsGZNQmJuZA55ctpQXa38rdcmv/1arzfp9gyJFGi+zaKnmzQJKm9ENB6JxJDU\n",
+       "RGG2hw8Do6PAQw9BgINape8CEPD7U9aCjJZLSQVqUqx+H0S0vEgVgAH5/2lI5kLV8rbjsvMt9u/H\n",
+       "85EILkAy7lFXtk01k6k0hX0uIlpKhUcIQCEkYjwA4IOQwnp73R78sLVbye30QCKmByApvJk6B6mC\n",
+       "/e61tHShvj6AzZsP5L1RFo9USRZfxuV1vK6p+lpT6iX9vppFpgmzuJcSWAwIcyEBgTxHtkxf9Exn\n",
+       "rIQRsyEjrqtV/7wfoetGgPe8aD+5Gc926ZshPbxuna6DrBnwpj8Akhotmd2egTc4Utdl5dvs2OiW\n",
+       "HXilBwfd3bHbhCIR9J46FTPXauMiNtdvHnVg/HgRSq+fxMdudeDJjUvD7dYKWG2Qxcx4mKGPlR9n\n",
+       "Fl7LzHHY+yJIZIs3CHIglkjy5jzYu1d6CgEA9fWAxd8FyxDvQJsMMf0zCVYahrn0ZvIcLDYGBoII\n",
+       "h4fhcBSjpaUrZ8irH37FmTeAgGGH3DDCCCKIHdihG2psZJts9NUsMmMaJbCYEOZCgngK5CH6+vrE\n",
+       "kzoOfj80CYbVyIYDrRFnXCME+Pb2CHqvGcTaN5twaJ87KRnPp2sqHJYeNuzYkfghQyKCCsTPtRah\n",
+       "5+caBCXRrsPniyvTcrVC65pN53pSk0OjYKGzxyGZ9MwDuA3AckikMlHpDj/iS5PwKIAU7qpg/35g\n",
+       "ZATweoHNmwELvwtskGp4HtZZH9eXNFEHycCInxc1ITdT9sQKaBUyseo7au9eP0ZHpbNdXx9Aa+vi\n",
+       "O9IC2SFZVjnfZosQahFmK9178+l3b6lAEE8RaisgkPfIVg5muiG7RmDEGddIzuqTj7kRCLcaIp35\n",
+       "Bo9HeriQbFwsRFqLdALGjJ3YXAOIcXex+mcz027GmUQqbs4J20Nq2XMsRHcEkloYglQDczeiYaMP\n",
+       "aOzHh9d+GJIDrRpxRK+lRVI6LSadgHRt6ZFOzb4YAH+jUw5JUQWksU4AWA2JYDKwc7BYIbeZDLfO\n",
+       "Xo6oOWTGmTcWRkJXjYTRZqOvgLZplJnwW5EjKpCLEIqngECew6gClktIJzw4lXqhAsmhpWiHIxHc\n",
+       "1N2N0ZkZlDmdmJyfx9qqKhzassXSuc9GGPdSBVPH3oTkNFuOqENsFaTcR+bWympv8vAjqnZ2ILY0\n",
+       "ihFUAbhgttMyqgGcT3HfVOCEVOrlT5CU4SkAk/K6Onk5j8UKuc3kcRPVAzUKI6pbLtbVNKJUZiuM\n",
+       "NlWYUVtzfSxXI3Jd8Xz77bfx5S9/Ga+++ipqamrw8MMPo6OjI247EWorICCQV8hUeLCR/M+rCWbm\n",
+       "Q4/QW50Lq4VMhnFrhS0m3SdPrqMgJGXuovy+DsCvAfwtJOVwHFH10APgPcSPX01yApCUUjuihFUP\n",
+       "tZAUSLP5mJmGOj+VwQ7JuIjNlwtSWHIxgLcQzfFk14wTQAmAnchunmeq4dbZghFCs5ikR4/0Gsn1\n",
+       "zPW8SjP5qrk+lqsRuUw8L1++jA984AN48MEH8bWvfQ19fX3YsmULjhw5gtWrV8dsK4inwFWFxcxL\n",
+       "yJcb0lSQ7tjM7J8s/zBVpKKcBYPAyy/3YflyvyXmTGbmIdG26ZyP4MAAnjt5EudmZhTykMtKohbp\n",
+       "teqz5kdU0QsAhm5/01Vgs/Ud5Ud0bJUA3kUsUWGkUm2ew4PPZ/wVogpkCRKXErHJLyvzLdOBHUAZ\n",
+       "JDJ5AMBnEBs+q0YFgF8AuBcSWefnxg/9a4aRmuP4B/hwO8rhQBekEi1mH3CYQS7l4xkhNItJetIh\n",
+       "vVYbES0G2DXqhBMlKMFO7NQcSy5dU1cLcpl4vvHGG/j4xz+OyclJZdnGjRuxbt06fOtb34rZVuR4\n",
+       "CghkCVp5cEsF/Nhu/f6gZikOo/snm5tk+YepIpWapcPDwNGj2uVJUsFzJ08q83DL008nzF1MNGda\n",
+       "64zmQg6HwxjlSGely5VSDddsQStP0qrPWip1NdOtfZstsLExYqn+KHVBIk7vAvhXaNem5PMZRyGZ\n",
+       "Es0jef1KQmZIZ8I7FhXs3N8FkPo8BuD/AnB9kvYvAvh3AJsAfAxSyPB1ADYA+I28TTmAh1X7sxy7\n",
+       "ERThMBwxNVHVeZmsJusKud1M1GZdDBjJccxkHuTAQBB79/qxf38bIpH4GU2nfIpWXmW+gV2jveiF\n",
+       "Cy7dsXwP3xM5oDmGdP0OrPZLuHLlCt5444202+HhSL6JgEBuIZNP6JLlHubLDalR8ON1/lV0bO4n\n",
+       "m5RQ2GDQWCismblhBjnpgil7kYUF3Ob14ifNzYbCQXk1zVnRAsBvmTlTZCEaoDg1P68oZ8HBwTjl\n",
+       "LNGcaa1jZEyvPfW+AOBxuXDkrrvyTp3XmxuzSmgXzIctdrW0pJVHbPV3lDpcmKlrTki1J3dCe2yM\n",
+       "VAJRYgQAtwJYKbdXA4l0vmlpj1PH5weCWBYexpyjGP+zpQszCfIQ+VBgngTbECXlgERK/wySey1T\n",
+       "dB2QSOXHIBFuQMptPc3tNyGvfxvR+WWkphxOTCD6QOM+eT3/gIOf8xH5fxYebRaZ+N1LJQwdiJKz\n",
+       "dLdJFeHwsOLMOzgYjHPm7UJXTqmWeqG/mcqDNUq8L/kvKcpwEEGRA5oDMPobn4n916xZg2XLluHh\n",
+       "hx/G3/7t3+LQoUMYGBjAJz/5SVN9SAaheAoIcBgelnIP9dQvI86uwSBMqYVmt7cS/HhLdkXHVu6U\n",
+       "xmaUjAWDwMT3W1B7qh5Pbcic660aTNkLzc2h9/RpPDQ0ZMhhlFfTSv9y0FL19baaGgBAQ3U1Gqqr\n",
+       "AeiT8a6WFqwqK4Pbbsd9Bw/GPKFUX2vBIHDsdxIZa6hMTO67WlrQ7vOhw+fDe/feC19ZWfoDyzL0\n",
+       "PmtmldBUXGKtdqpNB0FIxJJ3pmWEphdSaKne2Jji1gbgD/Iy5urK2ntc/nscUrhtJcypjkk7zzpg\n",
+       "8LttWXgYa0b78eGRHnxhsNPwodgcVAM4B0m1vQ4SOW+CZKr0UW77ywAeAsBrAuxxTTm3bBSxzrJM\n",
+       "yTuGDyGAqPkPU5d5MyBGfp1cu2oFdTFh1j03V1xSkznz5ppqqedEa8ah1gyMqs3pKMMCmUG64kY6\n",
+       "+zudTjz77LPYt28frr32Wnz/+9/H3Xffjbq6OtP9SAgiyuhLOoSAgHU4dOhQxtretIkIIGpsJAqF\n",
+       "UmujuVlqAyAKBKzf3gqsWUNUUUHkdMaOt7NT6k9rK1FHh/E5WIwxEBFt2reP8MgjhEceobVPPkmh\n",
+       "2VlT+zU+/TSFZmctvaZCs7MUOHCAQrOzMX/roXnPHmUMgQMH9LdrJkLRLKHzALXfa2yci4nO/n5q\n",
+       "3rOHNu3bZ/i8GIH63OUirLyemin2R7WDiDbJfzcSEfuIdsrbbuKW8fs6ub9dlPzHu8DANklfzdHv\n",
+       "BQSM7fOVfZvokUdA//vTjVQ0GzJ8LJc8xnKd9W3yvNSq5q6Vm5/b5PVHNbZLBSEiChDROq4fqX49\n",
+       "Gr2mtK4DPWhdR4nQTM0E+V8g5ZGkhk7qpFqqpUqqpE2zzbTvQAfNzsb3upM6qZmaaRNtolDKZ858\n",
+       "3xIdcxNtIhCokRpj1ustzxaeO/QcBSiwKMe+WpGMExm5Z8jk/mp8/OMfpx07dsQt1xuHvDwxL0y2\n",
+       "QbovQTwFzIARn02b9ElPJolnKCQRp1RJJ5F58mp2eyNzlAwVFdEbwsLCaDupEkgrCHsqCM3OUscv\n",
+       "f0ntv/xl0i9angidmJiI+XLe/G//lhGSpHX8uieeoPXPPKMcyyiRWqw5ThXJCHWqxNTqH9ZMwMrv\n",
+       "KEYOQEQfJokgMELDXwbN3HYBjX3NEMsGIjpBREXcstVEVJVgH82XfM2ikQghY/sUzYao80DAFOks\n",
+       "0VhWqnrvlOfjBDd3nUS0jOIJK1uvnmMz6O/vpD17mmnfvk30GXks6ZBYo9dUMxknuUbG2N/ZSXua\n",
+       "m2nfpk30mVBr1oiSmszxpDcR8V0McpzsmCEKaRI8veXZQibvpQS0keuc6NixYzQzM0NTU1P08MMP\n",
+       "U319Pc3NzcVtlw7xFK62AjmFTJXZyCbM1tU0u70Vc1RTA4yPS7mdb70F+GRLx1TdZtOtJZpOXU+j\n",
+       "SORUmo06kvwxGAL19djR1GQonzDf6rUmK5EiancaQxjAFyGZ+eyEflitVu3HMICbIIWLNgA4Bcl8\n",
+       "pxGSMc+QTltVANZBqs/JtukA8AqiuYqGO5+huiCrAcwCmIY0N6yWaDGkkik3IZpfyWMVovmtE4iW\n",
+       "m2EwUzszUY7k3r1+JQ+xrj6Ana3dWSmPYnUN0L1+P0blH5y6QAd2djtjciczlaeodqa9hEvoQQ8A\n",
+       "oAENeAEv5IybrihbImAUuexqCwDbt2/HT37yE8zPz+PP/uzP8MMf/hD19fVx26XjaivMhQRyCsVy\n",
+       "UoxVRi9WwwhBSmaco9WGGfJoxRy98gqwYQPw619HSScg9UeL3AwEgwgPD8NRXIyWri645ZV682GW\n",
+       "SLJcU7ZvJh44mDXyydTxK5xOXJyfV47F8gmTwSpDJjNIp6RJMoOepWbUlQqMmLt4IOUnJgMzUSqC\n",
+       "RBKZcdD75PXPQCohwnggb4YzD+Ao19YFSOSlVn7vALBvIIgr4WHAUQy0dAEq058KROtjxnQ+Q9fs\n",
+       "JIA1iCeX0/LrEwC8kHJXSwFcgjRWN7dPLbffhwHUQyL3Rkuj6Bk2dSE2D7GlaQfazA8xJfOfVMy0\n",
+       "EsEh/+B4GxvRsuNRtKlaZXmKUn9TM6jRIq9a+YcP4AHYYMOjeDSG3PH7/xg/xkN4KGvGQgMDQXwp\n",
+       "PIENjlp8qeUpeFSfi1SJeaL9MkX2BQS++93v4rvf/W5mD5JMEk33hRyXlQVyC0ZCXQ3nuqQZkqre\n",
+       "v7MzNkQ11VxGo+Gsev1PNkdWhOKqsae5mR4B6BGADnCd5seyalX0uOvXm5unbISRbnvhBfLu3Emt\n",
+       "v/hFXJjmc88/n/HwTRYiqg7zzWUYzT9NBfkQMpsqMhEWaRR8m16N9jtJCqG1ycvXUzTPkX9VkhSW\n",
+       "WsOW7WkmPALpdSCQ2RsHAy87Nwb1y8uNq4Niw2v5vMYT8vp2Sh62rEanPEcgKTR5vWqf2dkQHTgQ\n",
+       "0MxD5NtoJv18TL4fzYsUFjkbCtGBQIBmdb6YrchT1ApVNROGyu9fS7VZDV3ds6eZHnkE9MgjoP9x\n",
+       "YFVcrmeyMFy9/NBE+1kVTixCbbOPpcKJ9MYBA6G2QvEUyClYqeqkq6Kp9x8bAy7Kj/QrK1NXG5Mp\n",
+       "lkwtPHYMCIXi+59sjsyM26iixT/1buI6zY/F7Y4et7Y28RjV0FNarcTJyUmMRyLoPXUqzma81OVC\n",
+       "d4YLafPKZr6ElWZSlTSq9OYjvnf0KL45MZH0c5WsxqhRxYvfjjmoNsrb96rafwLADLfvYQBc0IOC\n",
+       "myGpmI2Q1E/ICh68jYCGk2i2saCxzAWgFZLyykJonQBehhRiex+AH0Nys2WKoJaabKT26zCk8iuA\n",
+       "pHTOqfZxuz1xZT602mCKabTMSvRsFmMPACcaAfx9wpYyB7fHg9YEPyJWlC7RUjfNlGMpVs4YMIpR\n",
+       "3IpbsRIrs6II8sr2k03uOPVXzzmWqZYv4SXMyVfPA3gAz+LZmDFpOc7mixutUGYFNJGMmab7whJh\n",
+       "9wL5h3RVNPX+7H1lJdGJE6n3S0ux5FVKXi1Mpf9mxm1U0dJ76s2PhT/uiRPpmzRZjXxwQ801LGVV\n",
+       "MpMw+rlKZu7STMYUUX67Dq5NdfudFP8jXUX6TrBs33YiqpwNSUqnCdMfvZfX5PZOInJTcqfdFfJc\n",
+       "uIkI/Z2SSrtvU0yfjehDRkx31I6wqZgR8W1sI6Z+vkQhqiAiUIjuT8vgKF9gVN3UUgc7qZPW03py\n",
+       "kUtRXtfTeksUQSPglW0t9VdvbGqzJBCogzqU9Wy/bbQtbsyLbUpkFEaU2cVwIV5MLBVOpDcOCFdb\n",
+       "gaWIRKGk/Lp0yY+aIOqFuFoR2sqHrNbWSv83NBC1t5tv04wzr5VkzApH4EziaiNRmQi5FjAGs58r\n",
+       "PYdfo+Uu1NutIaIKkgge/4ysmWJ/oGsonnR6KEoO11M0DJQd40OUfqmVExRbYiTRS8uxFiSF2fLr\n",
+       "Kik23JUPDXbIocGJ5tFMGRKi9F1v1W00c30P0C6d3prtZXaRaRKhRWT4ZXVURyHSJoCZ6iPf3gk6\n",
+       "oUsI1cdlfSyjMgKBGqhBcz9+fF7y5hVBMxKGfbWR06XCiQTxFFhSSHbDfMsth3TzB9X5k9m4+bai\n",
+       "huViqYWMjG37q1nNedKbv6VGapZirsti1Va1ApmqAZotJMoZ1qy3qaOQbiOJALZSYpqhJkGSXia9\n",
+       "3BRV09RKo149z+UUS+K88rIquS/JSKNe7iXkNowS1zqK5p8yglxBUk1Ovn9OksgsI8d2IknpfARU\n",
+       "8XQjHZ0NJSWJzVx7i/FxiT48mKcQ3U/q3krfUc20uL1MjEyVMmHEw0veOCJjRmXMRB+NtqfejvUx\n",
+       "EVnlx1dKpaYJWjLCls7vnhEyaESZtYqc5guWCicSxFNgSSHZDfNHP3pIN5RUHWaq15aVxKmuTmq/\n",
+       "oiL1ENxU1EIrSaHePJldnq/IBvHMNplKJ9Q8o301INpk0tQoG0h0PTVTPHXQU0i1ttWCekrVBJN/\n",
+       "z0hhMRHVkvYPdztFiZC6HiYS7Gflq5QkMrmN/oZq6AVqpm3UQRHlkmH9YyZIJI9dMUOaDdHyAwEK\n",
+       "ceY+iS49I+pyJvXGZAqqdE0Z1cAlZFspStVoKFk/tVRNBrNhp6yPXvLSelqf9twYHXOqc8PG10rJ\n",
+       "a6iqCRr/3k1uqqRKaqVWZX/+e8rstVJLtUrbfIiwWVhFTvMFS4UTCeIpsGTQ2SnlULJQU60b5lBI\n",
+       "clBdvz654yu7+fZ6Y7dXh7amQz7NOrjyY02H/FpJCvVIitnlVoLNT12d9rnOZWidW6vJVDJyyH8W\n",
+       "zBLJjBK/ZkrKpqwKAefHrafqZxta1EEvDNwozeCJYDtJRMxNRI90Er3STHRwE1FFSFILB0lSEk+Q\n",
+       "KjRVfn2AYnMW1Y635RR1ia3Q2N/Kl5eIKuklAlUoN9OM/LUSkY+IlpFEPpkqzBNmtVLczLXNu/yy\n",
+       "9tqTzLPW/gzZCYI1F+CbbaUo1dxDvX4mUjrT7WO6eaCsb63USh3Uodsvre06qZNqqTaOCKr30cvr\n",
+       "1COJalLN5o1XS/XGq3UOEpHRSqpUtm+ndtPzZwb5ktNqBJDKDi+Jl974SBBPgXwCT5raE3yXGSVX\n",
+       "7OZbTQ4ZcbJCtUuVhKWrGlpJCvUUV7PL04GarPHzk+ghQS6G/WqdW6vNjcyQQ7NEku/rthdesEb9\n",
+       "ZHfmTH5LwKasysflx13zjQM5odKboQ5Gt2VlPUCkaA8hInqjObpiVyCeMG2i+B/t5Rp94NcvI+lU\n",
+       "GlU97RQlgmZuHopj3u9SSAc3pDhll82VVhkZfrz8pdess60WEj0I4NtZRbmRiWm1UpQpBVWvn4mU\n",
+       "TjN91iJ56c5NqiG26mVa+ydrW289I2jLaJmyfjktV9RSEGgtrY0ZbyJyn6gfrM0SKtEkzwJXJwTx\n",
+       "FMg7GCFNhw4dMk2u1NuHQlETn3RVu1RJGOtTaSlRa6t1JkK5bvKjBzVZY/NTXp74IYEVYb9Wh9pq\n",
+       "XZ9Wmht19vdT5aOPEh55hNY++WTSNs2SXr6vlqmfzRT9ZaijrNyR8+Nu/cxsxlV6hmznDC8naVqd\n",
+       "JOVfKoRHZkpHGiXFk6mVRBJ5XE8SkXRSlOydoHj1jpFHkJRf2UzxP/YOjWXLKaqOnlCts9EsNdM2\n",
+       "aqMILdPYl4XMNtA8tdP9tI1m455b8GosPzY9gqhF5M0Er4ZIIpW86ZJWO+qanlYglWvKaqUoEwoq\n",
+       "c6WtpVo6EWOFZX2NUL7f6c5Nsr5pETrmUMuW8USQJ/VaYbXJ1vNQq5HbaBtVUzUto2Uxc3zo0KGE\n",
+       "5D7RGEMUihnHKoqvYZoqlpKZ0NUGQTwF8g4x4YE6StahQ4dMkyut7a3Mq0wFoZAUAsyTJr79bdsy\n",
+       "q+Rl+lhac5Vo/rQeDgQCUt5soocEVoT9Wk0UMkn+OzuJKr4VJYMdv/xl8v6kQXotU2rNpafpwkzY\n",
+       "MD/ubD6QySTx1Arp1AqZDRApTGtjKJ4INXPbekgKzT2qsS7AvS8hiey1EsWUKymYDSnmP4ykrqX4\n",
+       "0xxLTs8RaB+10/0UIkmpdXLr2yiWJPJ9Ys8tQnK/2XIWJJOuqpzoxpfvRy23H9+O3qXe2d9JzXua\n",
+       "adO+TTG5p0aQCwZomVBQK+Qwai0yawVxZn0G6TvHpoJkfdMidPyy5bSc2qldU11cSSuphmpilER+\n",
+       "fTu1Jzw2I6aM1Oo9MDh06FBScmnE+MjqEjZLyUzoaoMgngJ5DSuULKthdZ/UxkR8+2pSajX4Yzkc\n",
+       "1h9La64SzV8iYpDquqWI5mYifEUig5XfzXxNUsuUWjNsIAHy3XgoHXRSbF6lYk4kv2fr1IRHTYQ6\n",
+       "KRqey5ckYURKnSd5gmKdaJ1EMeVKquRyJSBJqewg7dMcDc+9rGzfQRFlPVMwtUirHpnTCjNOF4lu\n",
+       "fNXhyTz5ZNC71Gu5Oes4kCM/aiaQSQW1kipTbncNraEKqiA3uWkdrYvLjWyn9pg8zGwoalqETs/Y\n",
+       "qJM6FZWygRpiSFwN1VAd1ZGHPIbIs5aCbIRcbqNtVERFZCc7VVN1nPqsFbLMXw9WPpRYSmZCVxsE\n",
+       "8RTIa2TDwMYsrO6TXu5pY6MUfssfy+pcRj7Ul/WhslK77TVrJHLs9Rp37tWaq1w8p/mGTZuIUDRL\n",
+       "ldsP0Imz+VdqJF1YnSubVZhwoNEsu0LRH9dKbjkLAV1HEhFSf0TVRIjPz1TnSbL9a7hlAYoNtwWR\n",
+       "Uq6k8ulGWj4bilmnNu5hY1Arsw00HzMNfD/V++qRueX0JoGICmiKmmnO8G2qun3+fSttTXCjHp/f\n",
+       "apRCVspzhqcbqd2k4rkUwQhGJVXS5+hzKZNBXjXlCZtWW2qVdRWtSmj0YwTJzIDYNowQrqN1MQ82\n",
+       "1GqmVgkVfr3WMdl7PszWTFixOiS5juoSrs+EOp2JtgSyC0E8BfIaekqWkZAjK0iaVhtWq2t64aXq\n",
+       "v4ni1dB0CShzB/Z4KEZ15cHmgFdE6+o0m9Ns34rwZiuRKHx7sZAwdFSDfSz2HC42rMyVzRQObT6k\n",
+       "TTCbyTBb0dpUq4SI2aY7KT5nU4tIsWN5SSKMcbU5Z0PkPBCgdbMh8nDLeUKs7lOd/HeZfNxEl7DR\n",
+       "8Syj/QSKxGxrhN+r2+ffd1Ak4Y0vTz55FTnZMVtnQ4QDAVo7a/6WOhdCba0AT5j4GpbphFeyXMMC\n",
+       "KogjbImMeyqpMkZdNHJsLZKpV1qE35Y/Dtte7T7LHnSoS6gwoszWa4Uoq4lhIzXSalpNFVRBXvIq\n",
+       "Cibfp+cOPaf0lQ9JLqIi3XxbPoTX7DwZhcjxzF8I4imwqMiE22hnJ9EttxxK2GZnp0Si1CGd6v4k\n",
+       "6x/LKwSIOkzGcBkdu1ESwZeZKSmJH1uq4Mms1hj59QBRcXHqtUpzAXqhvmkV0k6z5mXC0NFmMi+r\n",
+       "5BhSmZ9s1zxNB1p9PXTLIe3zZiLPVbPsCulHKxttupmiXfNQVE1UEyl2LK380ZGHiMoAACAASURB\n",
+       "VOUk5VOq19nl9tnx1X0yYrpjwvyYiIgq6Sg3ngUKkf7HJlbVjG3fbAqy+lzoHTPRPmawqA/HEhAB\n",
+       "syRBTTD1XFXNtHuCTlAd1dFROhpD2LQUa15lPUEnEuaA8n1gxkBaiqLazIft5yKXsryQChUSyfrJ\n",
+       "k1E3uWPIHq+Qsu218j0d5IgZRwM1KLmjPDllCmYM8T4UDW8OUYjaqI2W0/I40snWd1AHraSVhuqf\n",
+       "pvMgQeR45i8E8RRYVGQiR9NIm+rcRUaU1PtqtcUTRp68Jirtkmo/9aBZA5Jrb9kySilcVasupjqc\n",
+       "Vw2myN58M9Hy5eZIZy6WOclEqG+6OYcJQ0ctMuRZTKQyP2b3yU4NRW1o9tWMraoOkm1qNBRVDS3V\n",
+       "lDncaoXpMpWSKZ5a7rFriaiaoj/8Xnk/deivVq4pPwaiWALnovhanGq00pxCOvWOw8Aru2rzonRI\n",
+       "YaJj5iPU5yURETBLEtT5e1omPKm0yyNRqCaf09hMUn3NNmrTrMXJ96GGapS/Wf9ZG1VURUwJ3Ebb\n",
+       "NEN/WY4mPx6e9IKiYb8ucpGNbMpyJzljyGAxFcfsx8ZaSqVUTuVKrquTnAQCFVOxEsrMO9GCQD7y\n",
+       "pfXggEj74YNenqaRBwp8qLEo1ZJfEMRTwDDSJQla+6dyk5+sH0ba1KvRqS5fokW6tAje2rXax0rk\n",
+       "Csv306xjrBZp5dv73OeIamrMl2BRq5eMUCdSXNMJ68zEg4d0kYkw1XRzDhOGjqZ7N5wDSGV+zO7T\n",
+       "TNEfHe1LLQE1TZO1avY1C+etmZKNWRtaXUvUlpbi2UHxZJU3JFJvz9pMpBJqGRsZGZ/WeEJEVEpn\n",
+       "qZyOkZdephMUJqJYIyKTzxKTIh8/qnqXfjPFzn0isxezRjBqUqi3v5F2UwnJTJQLqQbfB94p1kc+\n",
+       "Wk/rY9oopuK4ZexfBVVoqrAhCilht2pnWPU/plh2UqcSUsz+HZX9qBnR5P8xJZUnjDypraZq5W+9\n",
+       "kijJSrlokVE98q+neKvzY3mCLFTP/IEgngKGkS5J0NrfTBgpI2Zqsx01QiGi5uZDScNXtcpvhEJE\n",
+       "bne0/ba2+NItzEm2sVFS99T95/vKiClAVF0d22+WP7l+fTRE1ujcataA5OaSn+tVq4yTWtYuU3L1\n",
+       "yLtVSmU+GQmlE8aWDzmHhpEB6TCV+TG7T3K1qZl0aUyCVUawbXaWag4coNbZWeXY/PVk1ZSq27FS\n",
+       "YePb2qY6DlM8+Vc7xU8bI16tqm35nE+94yZqJ9XxVdARpd06OkxEiV1zGbKlnps9jjWhtrFHbSbt\n",
+       "S199bRlREFNVpfT2N2uIo2cmxKBFOJMRW74PvFKqVjS1SCMjdw5y0FE6SttoG3nJG6fgrabV5CAH\n",
+       "VVN1TK6o+l8zNccpxOyfi1xxKij710ZtRBRLolkb7zv0vhgiqVcShe9XG7XFnRczDx8SKd78MYWz\n",
+       "bX5CEE8Bw0iXJKSzP0+kEtVrZND7AeYJkxZpJIolgcuWRUknH1ZbV6d/bC3VUC/8Vb0tv05PLd22\n",
+       "TSKrtbXaYa18rmdDQzxRT0Qa+bqYeg8E9PJj9eY5lfzVXAzBXSrGHWmjmdIiYYuF5GpTApqWJoNr\n",
+       "pvgp468nrfWpQN1OojEnIzVsfR1JqmUrSWQypHEcXvF8pJPot81Ec5uItoa0yeoJig1p1TJCYghR\n",
+       "fG4pwzaSnHWThdrqwUsvE4iomN5QFE8jqmQzZecjYPY41nxHxR7VgojwjCKZoqnl/qqnjqkJG58L\n",
+       "aaYP6vzKNmqjEIWojuoIBCqjMmqjthjn2lW0Ks4MiLVrJ7uynFcs+fxQfj92HPZPrX6q/7Gc0/W0\n",
+       "ngqpkNbROmqlVmqndnru0HMxhFhLzeykTnKQI649fk7MPHwwqngLZ9v8hCCeAoaRbghiOvvzpJWR\n",
+       "IrPhqUTGVFsWXquX66lXTkTdV74EidNJtG5dPFlk27rdRHa7pIqy9bxxEa+WJqvdyfe1vT2e8GvN\n",
+       "gVZupxFizeZCTRQzoY4L5AgskNE6+zupeU8zbdq3iUI5UzIiwa10mnfZyaYs1SlNR+FspsSkhl+v\n",
+       "3k59HPa+gYhe53aMBKLTpj5eiIgc3LI6jfEw6E1/sjEkwwkKUx0dVkinUWjNs7UqqNTaJpkYZzcn\n",
+       "NHZ0uUIw9ZAsz1Pt/qquj8mDN99hobJGQnTVfVDnZTrIQV7y0m10WwyBZCGsaiXRTnZqpVbNsFpG\n",
+       "QhuoQXH8VZNBfj8b2RQFlyew7F8Jlegei82nlprJclfVbrwVVJFQpUwFIQrRKlpF62k91VGd4fMi\n",
+       "kJsQxFPAFLKlRKmJUGur5Kj6/7P39tFtnfed55cEQIgvIgG+GaYp03QiK87YLhmxcRLGBVpT9ZB2\n",
+       "Q9QTbhRvDtOzO+DO+GS3ezqxN+2cnHZ3JzOd05w5090507VmWuXNTCNbtWVFVhwqAWlVSezaieg0\n",
+       "Tc02Cd3IDi1LASVLFqm33/7x4Ln3dx889w24AEHpfnFwSAD3Pm/3Eryf+3vjffqBE9VNtrvbHrCm\n",
+       "pwUoSoshj8eMREQ7dq61vMRJX5/YJxol2rlTP1a5bXu7+bksRcItrxxg5batraIPdR5yrHKOY2MC\n",
+       "QCWoc1dhO8ur05rK9pNJfVKmwUFz7F5iX8uN0w21QarkSrR4dZ7+XFrUKnwMNDV37d9ZcFsyr0u6\n",
+       "g4g6SCTmWSZ/Fk5VXmG4XbOd2o/ltabhHJmxk8PFt3NkgmezzXwqnYNfGMxRjlL0DCVpkcYc6n3q\n",
+       "1pmPfZD+xndcoVWitQJ10BQdq/Hldb2jplVO7pa6six2rqJEVgsaB6cttMUWdnKUM8Cui7polEap\n",
+       "j/oMCyCPlYxTvATu+qiPClSgVmot+ayXeg04VD/roi4jk67MbCuTC6ngK/tZpMWShETy92ZqJgnJ\n",
+       "cj0lXPJ9pFsuXx8JprzWqpxrO7VrM+C6ycmKHBTQhtoYheAZypdqZYnSgZBal9IJTqTLkQQcDnES\n",
+       "ZnXzUN1IJyfFe6OjJoyqlkL+Pi83wvuQ2wwNEW3fLl5HoybEShjkpUhUy6vbU42b5fGl2ax1TVVX\n",
+       "YTXZkpNVV2e55seCz7urSw+XbueRrg+nRE1uCuKGSehqWyrfJU3SRASi8U+NEx4DjewfKbF4Vlom\n",
+       "pT6tqaUq53zqIPMfZz85g5cbdLnhxTQJwE2TSBTk2bKnaTjNxj2peW+i+F6l5UqsylGaxXB6+Xcl\n",
+       "Lm7zYp+FHHX7OI94wqMOepFQdJss7+K4PBt4kN9R1aiTWI02ndwtdfGdTmVUuHTwJsFMxmCqtTJ5\n",
+       "Eh75kG6ujdRIR+loSYxmL/XSNE1r3WFvpBstLqx8DLo+4xSnJmqidmov2UeCqlyTIRqiPuoz4JBb\n",
+       "Y2Xm4DSlCXlrO5PFv2AO/Ha1Vvm4kpT0lH3WLrGT7E/OLcxmu3kVgmcoX1KtadWyfKpJbrjbqkyW\n",
+       "o7OCSt1/f74EODmkcndYbjXk0NTQIPrnYOlmKZQxoXwO0aj5+cSEFWwleO3eTdTUZLWmTk+XwmUk\n",
+       "Yl0Xaf3UwTefu87lVo13la693JLpVRwUda7GKlw6jcWLi29PjzO4Ou1b7g2TEDxL5VbSpAQii9fT\n",
+       "hQ8VaOrQlPaCvtLSM+kD6U1hTS3nfOomAUQNB9KUPjROy2sFW/BKk/lPtpv8u4Dy/acc3ucgqiYd\n",
+       "ktJhlO69YG1taRqnQ8U+/sGjFXicUNyn9cCvOZ5HulI1PcQvbv6ygqQn/lciRzn6lfyvBAZ1Xlwl\n",
+       "/VqUy3W/LBdYdfGdWcqWuIryWEVuIdVlgOXj5/ORYMXhaIRGLEAnrZLSkikfavkS+eD9N1ADdVAH\n",
+       "baEtFkiVbekAVffopE5KUYp2027LPvL3IRoy1qOf+i3geQfdYXymAr9aa5UDorpuXs8RuYbcqrtI\n",
+       "i2E2202uEDxDeZIEA+m26ZZZtlKpSW54WRPed0+PsN7dcIMAJlk+RAXCoSErpHIrI39K+JKApz77\n",
+       "+pwthdzKJ8eeSJifxWJWILzrLrJk2AWIBgZKrbT8GY8TLS7qkwBxF2UJpXfcYXUB1kFzLCZeT06W\n",
+       "tuX35oLsx67+p7Qg83hXL2DIYdWttqjTvqHrbnByK2lSApEerqcrLT0zfsjemrrZtUxETQyse+am\n",
+       "tBf93LW1lfQA6SY7m5v6fpq1H7PpS3fYvaCVDmy8w8540VX1m1TwGMNZoAJN0icpS+s05nIepal0\n",
+       "rmas6yWapE/W1BrjDRQFHHiJk/OSMTRNzueWCozlZiH1A6y8z920m3qox6ih6VbeQ3Ufla9VeBqm\n",
+       "Yct8JHgu0iIN0iDdTXcbkKmrwzlKo0ZioBEasWSb3UpbCSRcX50y2Eq42027tS68bg872OU1O3ny\n",
+       "I7kOHdRB3dRNy7RsWWvuwtxCLcYa8DWV6+YkHmcrEzvZxdCGVs/NqRA8Q3mSCga1uJC3y0Db3+8M\n",
+       "h3x8w8MmTKkgpSsdooIuf27daoISB/GJiVKrKAcoO5fZjg4TlDlk8kRCdk/pdjw9LQBOQjd3r5XP\n",
+       "aNRaz1ONd9WNWXfM/Wp6WvTR12e9MaC7aeHlfOLg7DdRVTVqc4Yimv72v6KeL/wBjR38iPbivByI\n",
+       "rLT0TGGtQFNzemvqtSAJ1m37RwhrBeIX/RLKeC3KXtIDpJq11mtCH/m+tG52s77k064vvwCZZm3K\n",
+       "OaYWcoQDacKhcZp0PMaV2U/dzqPqW2z9yRsomhfwbiBX6sJaesRM9+K/ozH6qOF+qoMR2ZbqFuvF\n",
+       "msnnprNU2s2Rw5WsoamDYNmmCmO91EtZytIyLVOWsrSNtlEXddEYjRlWOL59IzVa3FwlfPI6nFto\n",
+       "i+Xz7bS95Jg0UAMdpaOONTtBoAQlSkq/RClqicm0e/BtZNKhDuqwwCa3uLZRm8XS2kiNlmRFco7d\n",
+       "1G1Zg17qpQmaoCxlPQGi7hxRz+0CFaiHelzPYT83Wpz2DxMZBasQPEN5kgoGlV7I+3Wt5ODDwYW7\n",
+       "mwIi4c7YGNFXv5ovGZ/anlPpkELB6iKrjkNtSyYS4k/pNlsoEDU22kMkB92hIevvW7aY20nQ5i6s\n",
+       "3OVUQqZTP+rYec1SmUjJ7pjbaccOAdHd3VYXXdXqLJ929VPrHQyr4mpbq4KAVZKbW+umqF/q9RgE\n",
+       "fKzKPZ8kEI0VoZODT5pKAXCZ3DPCqnDnRXz/ePHnMJklV/hy8XIrU5r97frVwV2SnXPZDXSl3kjI\n",
+       "1Gmapqkj3+FoAaosTi5N6hET7sXfJh7PqloNdTDsBKc62SX90W3PIcWp/iQvEaJmgVVBTq4Rt0Dq\n",
+       "4jl1D2nhbKZmCyzJ9VHrfcpHH/UZc0lSsiRuUyYDUh8TNKGN8XR6SIuwtt186fbqGsmkQj3Uo3VP\n",
+       "5sfJDuacIM8LjOrPWO83Wtz2D116g1MInqE8iYNBEIla/LpW6oCoq0s802lhdeSWwnQ6b2yfywnY\n",
+       "kVCmZlq1m48EwK1bS8fB4xjHxkSpFCfLJM9qC5ggOjJC9O53C3huahIutHKtp6etUN3dLdyFJeTy\n",
+       "DLfqUwXdri4zjlXOq61NzHvbNvH52Jg1aY9TLU8utb6pepz4c3jYe7v1pqqAZ5rKu+rfQPG/l7ED\n",
+       "G+fWWmkSIkNp8nYMvG7nUZWeTzrwkaA2RNaEQDrJbTuoFO68iEOhDm7TZC6Xrg6nl/Q5qnV1nIjS\n",
+       "RYvv8DXoSl2J0pQ2IMEN4JZpuYw4Of0RUwHALulMyVjJhC83gLCOwhk4OKTw39X9dGNQXWr5Y4AG\n",
+       "tO9voS22FsYRGimJ53QCOG5BnKAJCxyrbrcS8Pg+7dRO0zTtyeIpH13UZbS1lbZaYlJBKAHPOMUt\n",
+       "a5egBO2m3bYArbrX2sGcX3dqNZOvDlzlMZdj8+viXa5reChnheB5naoSeKzUBZPI2ZrG3Vh1yYMk\n",
+       "mHHL5+CgADcJXdLaqI4XEJDqZT6yn927hWWRu6uqMaLSisctjq2t5u+qW/CuXWLMo6PWz+Jxsw8e\n",
+       "98nHp1p8nZ6plFhDvk82ax27zuXW6diq544uIy+RtSxNY6NYQ79Ji+pVgUFPADUx7RTYGBXxv5fJ\n",
+       "j2+cW2ulSYgMeT0GVTxWfuRkePVjhZPb6qDRi9z6cgPTAhENkrCG2rn7SqXJvGCYvMZdqcuV34tk\n",
+       "/xfV+iOuWqOcsszq+raDU/tRuLevQkgHdVAjNRourHZjkBZS3cMteY+M2ZSPPurTutHaPSZowoCv\n",
+       "O+nOEjjWWVjjFLdAZoYyWiufXZIk1TUYhBLA1Y1TxEJP0gANUC/1auuDykeEIkZMqLruXiyYO2iH\n",
+       "Ja6UyD0+d4qmLHC6SIu+zjE/51oo/wrB8zpVJfDoN75TB7mFggleKvx6HZtdCQ91X9XyFo1a3ULt\n",
+       "5qMrxdLTY45XxprGYsKimUoJi2U2K+JKeQIcaTWV26sJhXTjdsvIq85Hvnay0Kpt8EQ9/Kkrp6Jb\n",
+       "D+mq3N9fCpU6V9tqluCpVY1ZogChp4r+euWO0WuN1f3dRJecaKHKqjQJkSGHY2CB9zfX6sK3Mk3m\n",
+       "P896NpJ7ObXTVHpBoJtTnTB/XcvvRfJGXlTbZUStVkZeDkZbaIt2DPI9p0y2TnCluuumKKUtkWIH\n",
+       "gKq1NUYxo80RGjFKnzg9uHuu20Pn+ttIjTRKozRBE7SNtlGCEhSnuJHwCCQAWMZMqvGlTo9+6rdd\n",
+       "d50FU4pbUmUbOkhV3+MAnaUshaofheB5naqS5EB+4/HsQFJ9X016Yzc2nUVUhbFbbskbbqNjY9ZY\n",
+       "RvlsahJWOZ5hlccrqu6zlpIun12g9v/zAOFThwjNayVgytu99VZzv8ZGAae5nD45UiRC9OCDYtzS\n",
+       "gtvWJqy0HNp57Ke0Yk5Oip/Ly6VQrx4zNVEPh+CODr1lUgVJt5I68pg4lXwJUkFY4p3EXSMDg54q\n",
+       "ynGMDmYzt3WUSaOOJ8gXAQVtga1F/GhgNxg08utqKw9ZNxE9liN6KU10kR+/HST8ZruJyqjXbtuf\n",
+       "032FSsNeJVC2kzNYVvH+zDUlr+dUPSRN8Rvnqe7j1aJaoILFKigtnnaSVs8EJShWfIBEmREeCykB\n",
+       "UwU3O/AqcWH18YhQRBu32U3dBlRL2L2b7naF50ZqpJ2009aKa4nVzcOSEMnro4EajLE1U7MFKFUr\n",
+       "ppObrXQJb6EWow27mwb8PQ7Fk0b1YH/nUajqKATP61S1TOYiLYPt7VagUeGXX/D299uPTXdhXChY\n",
+       "YzwTibzFCiohTn3ybLRqoh4VVmXG2JERotH95gUpZuZKwJRbILn7bSoloFOXBddprKmUdT343HTW\n",
+       "Sb5GsZjVTVin6Wmxfr299u6w8ngNDYmSLzy+VNZWVa3adsmbpIK0UlY70zK/qNsMSXMcx5gmW2h0\n",
+       "W0d5bh2S+3s0Q1UT4qqlat5g8AueaTIP2Xf4ix4SBNfO3uvXNlF2f3Yo4GUbJ1Xq7hvKKq/nlO5C\n",
+       "v9YX417jPMsBVBVCOPQN0mDJPHn2U1kGhGd3lRlxJQQ5xYKqjxjFaJEWPVsivTyaqIkiFHF0//UT\n",
+       "58kfUYoSj4m9LX9bSVkVdXu1LzU77gRZ45tUK6bOgimPSZrS1Ed9JZZQN8kbCLwuqe7c8xLfHIJq\n",
+       "sArBM1SJOAzwZDPlXsyrsZh2yWu8goPcTrW4qVZPCXdDQ86gJy2R6ns33mju19oqxj0wIPqMf1pc\n",
+       "kOIP9hsWTwmYXV1m7c7hYaLOTvG7jIHUuaByi6f8XE0cxK1Pcq5NTcIyK9fAa6kU9XjzBEHcasuP\n",
+       "Pb9ZwefQ1GQdqx9rY5BWys2QGddOtXQTJiJH30XtOjLT1keLrtmZIaJ1tww2vMtNYCVWFeQNhkou\n",
+       "XnhdziEqWjpBRG1k/idtKv5sIcPiWYlF0ot767M5onya6K/HiVY34d/d9SrdhX6tM3h6jfO0SwIk\n",
+       "S5o4/U3JvzkJjLrstmofur4SlLB8Zgd1W2mr4b56F91l1KEkElmHvbjx3k63G5ZT7iLcRm10E92k\n",
+       "rdnpFGPpNOZO6ix5f5EWiciE92ma1pZsAYFaqVW7bk41W3OUM/aXllCdBdPufPT6PerkSu43vrnW\n",
+       "fxvXukLwvMYUdMZZDjBBJBLS1XCU4+Yur06ySy40OmpNMCQ/y2ZNEFSf0WhpLCWHQPW9hobi781r\n",
+       "hNycxc1WPrnltbdXuNbyGEhdtlf5HB01gXx5WV96ZMcOMwsuz5Y7NWU9dk6lUqRLcTxutcjyOXML\n",
+       "sHrs5RySSatLMre+ejkXg7JS1hzcAlY5AF7RnP36LqbJ+MZezxLNDfqP79wMVuJqyrx4eYy66W99\n",
+       "wWCazH+YWSLz+I2RSYeLJCydy/r9/H59ezlFLlXSQagNU7nlKao9Bp34uHbTbouVz62WIweGOMVp\n",
+       "mZapn/oNWEtT2gJJEgxjFKOdtNN3vOdO2kljNEZt1Ebt1G6bEMfp0U/9xto8SA9aPnMaj5OFM0KR\n",
+       "kqyzcYqXWDKTlDSATgKeXRxnX/Eh2weZGWydYjb5OqiWULvjbgekTtZrJzD1G98cZrcNViF4XmMK\n",
+       "woKkSzwjy4b4gQopbkFRy5DoMs+qQGpnfbUDWgGfeQMsl5f1CXTk0y7Jj/rkrrR2z85OPeRGIgJI\n",
+       "l5fF2FU3XgNoYU1gpFqfcjnrfrIdCW7crXlx0Yz7VI8Rt3Cq41ePPYdCp/jaZNK+jqfduejXSml3\n",
+       "3lU7vpOoSuVUiioHwGsxZ0Oq+StNIXD4lLx4aaOXxNLl856Xztb66EKH1UrKIy2pL1Y7608AtVOD\n",
+       "LpWbW8hR+kCaxg+NVy2zbrnW8Uq+o+o1g6ddDc8kJS11Op0sWxxOucVTwouEJLs4TJ1lz+sjTnEL\n",
+       "3PJEPeqjkRrpFrrFiH90cnH1+miiJtt27GC1mZrNmNK8eE+1qk7SpGUtpTtyP/Vb4lHVGwJeIY5b\n",
+       "XPnfAt/fzXodlHWyXv82NqtC8LzGFIQFSU08wy1fEorsLJde2tZZ0uzGzS+u1f14vUtptRwelsCU\n",
+       "N7aVgCQBk1tDYzEzGQ+3/KnPeNw6ltZWe0up01PWueT7NjSYbXO4jcfFdmNjRNu3C1jkgAoQ3XST\n",
+       "sEr39YljwqGXx4WqwKZLtgSIJEb82KtQaBdfq8tQXI2YSzvYqnZ8J1F1wbMcN+FazNmQCjjXcppR\n",
+       "N1Ipk2TkxcsYXSQQ0W35vOfdp0nkDBrz16Utl1YKY2kSh7+jQHSsmsGZsqMKbnAE0IS1vQNpwmMg\n",
+       "PAaamqvOXZdyL56r+R1ViYKKkZPQIWFqjMYoS9mSNmV/YzRm1NFU64xKoORutNK9VloH26iNeqlX\n",
+       "a62MUMSSTMjJ4ihrcd5MN9PddHdJ6RW7h86t1u8jRjHP/ekerfnWklIuHdRhWctGatTGmyYpabFE\n",
+       "pihFHdRBvdTrOWZT/Vtwqs3Kz5GNtE7KucqbIyGwWhWC5zWmasS5cSul1apoXvT6sYCqF8xObrY6\n",
+       "66uM7ezvFz85xE1OijZ5TOfNN5tWuslJqyvs6GhpzCJA1NxcCqLqvioE8ufWraVxlg0NZkZbbnHs\n",
+       "7RXuqlu2mLGSvAao3bOx0d6FmMOZ2t/UlNU9VkKo6o7r5dhJ2QFpENZML/1v5vjOcrWhc/brqltN\n",
+       "FSlq4fdzdGB/mg4dGqe1MixREsYW0+RMKm6fu6icpauwS9/tuYFpze47BNCR1ya8wvj4oXHCY6CR\n",
+       "/SNVs3jWw8VzkAoqmYuEDrckQ7y/ARowwG+apmmURqmXektAkceaLtOyxY13kiZLMtruol1G7GiE\n",
+       "InSUjlIzNTtCHG/TS6mVXbSrpOSJ7M8NNu0+S1DCyFIr20lS0tbau4t2WWC9gzos2WVV4JTQnqSk\n",
+       "BS5VeFePm90xd/pb8JLddiPkNtfrXSF4hnKVvMBV3VV55lmvbn86yFT3zeXE5zJpjcy0qovt5E8O\n",
+       "I3KsQ0PW7WWtTZ45trvbhMTOTgGt6bR1XFu3ijHoINzumc2WWhYnJ52TC3mBWgmd/LVdPOrOnVYw\n",
+       "lzGYHOCcss7anQvqtkFY33TngQqi1yNghnJRmohAdOD30vTYY6DHHgPNlWGJKjZDh9xIxQPJ6CCm\n",
+       "2ol+gmwvTc5gWrP7DkpH5bi5eh1rmrzBfWGtQFNzU1WDTqL6uHgmKt9SqR4nGVfZTu2eLF1uoCph\n",
+       "JE7xklhK/rnqjilBUX1Id1g+bxnbKOMWVbfdSZo0YkaXaZlylNOWPOHxjzImsp3ataDHYbid2mmU\n",
+       "Rn0nE2qmZlqkRUsdS/mIUpSWaZluoBuM9/qoT5tAiEPsIi1SlrJGsiR+XkhraDM10wRNWBJF8WzB\n",
+       "PMZ0iIZKXGjtjjn/W9gs2WX5uSLPn1CmQvAM5VncXXVkxBpzqVoj7axWOriQYDQ0pLc+qjAr4xgl\n",
+       "nG3daq1zSUR08GC+JK6Uw58OIKUFlYMjt3BKd9JUyjpGaaWMxcz95fqoVtOJCefkQl6fst22Nqul\n",
+       "1OnJYzCle+wNN5juvF7Knehg0E9iKCc5ldepegxjUU61Ju+/P1+7BEZBB6ZdyypS1KF/O06PPQba\n",
+       "v3+kLIunhLFMgWjdiVQ8kEyaSiFGfc+PW2TQoOfWXr16UlfTzbVe5+xHQbvaluvyqx4nr2VQpCSo\n",
+       "cusaV4EKNEiDFqthP/VbXGylCy6HUB4Tyl1sG6mRuqhLmwhI1oAsUMFw2+2kThqlUQsAuSUPmqRJ\n",
+       "C/DJtviji7pogiboZrpZC7FOjwZqoDjFLVlpB2jAso1M5sMhM0tZ57HnRdvSZTRHOQtETtCEAd9S\n",
+       "TomJ+qhPC5perPzViN+shgpUoEma1LqBhwrBM5RP2ZXU6OwU4MFdOHWw4AQX2ayAGLWOZTQqXEol\n",
+       "HOksnmpf/B/w9LR1295eot27rRldOzsFhMnX0u1UQm4kYoW7m282614++KAA7rEx03o4Pa1P4HPz\n",
+       "zdbsu8mktV272Evd0642qe7Z11cKS2pSJd3xUuFPB4O8nWw2mHNLPVeCKOvjRbpakxK229rytQPh\n",
+       "NJWSy/WqHFHu0wuU/twBGj9QekNAUtTamwWam5sqCzpZM74uE+wscDqIHj3k7QAAIABJREFUUd+r\n",
+       "13g8ovrypOby4+bq995Nvc7Zj4I+p8p1+VWPk992dKCqWrs4hEQoQsu0bHlPlvUoUIHaqM2oNxml\n",
+       "KKUpTYu0WBL72ERNNE7jtkmLnFx9dXU6pWVStsNBbIImaJImSwC0h3psrY8gZzdaPm8islg9pbWT\n",
+       "iCwW06N0VDt2Dp58rmqCJ102WTWBkwRVuQ47aIcxhjvoDuM4qVZ+bjHldVT9nI+bxUp6PSkEz1Bl\n",
+       "S2c11JX/4HKCC25RtXtyWJTupmqGXFU6CypPVARYLZuAsIoS6SFXzaLL40mlFVdXN7S93bqfdFWW\n",
+       "1uLhYQGuXuAzEjH3c4sLvfNO/dpwF9xEQr+Nenx0LrUcgCfss6P7lt1NjmpCn67WpHr+1CSZz7Vg\n",
+       "fglKaaL075XeEKgHqZYdCaJjh8Zpcq1Aa4x+VgtWsAmN2v7lx801TaX3bsKLUH+yc3N0q5+pHiev\n",
+       "7pK6ups6i+IgDRpwFqUoLdKixT1WhUK1lIgENOn6CrK6uU7SpKOrswS1buo2Mrn2UE8JFMYoRgM0\n",
+       "YFhH5RyGadhYwz7qc6yLyduyi/lUH73UWwK6EmrHabwkvvVBerAkVvNd9C7L6wQlLC65shyNnAfv\n",
+       "SwJvK7VSL/XSIi1a1pMfjz7qsz3/dJZYWW7GqzaLlfR6UgieoRzllPBFjf30Gy8o2+AZUe3KfKiA\n",
+       "q3uqQCLHLuM3pWtuR4cVJoaHhUVQvr7rLnP80uKpWg7V9zmQ2MVwdnaaUBmJiO102WNVS2ZLS2lb\n",
+       "N91kurcuLpbG4N5xh4BAmWxJJ7l9ImG6yMr4Wul+qx5PXYwlT3DELZ5uyYL8fK4r7eKkcmtc6mpN\n",
+       "StgeGtKXpqmKrgXzi07l0NY40finijcE9u33Vwe0ynSnWnZKXEHTVEo/RTl8VKvhbwqpoOI5CRCV\n",
+       "3rtxvwgNV9xOfO14rKTfi3m7Y6C6cKqAYRe3mRWVbUsghceT2sV28mytchsJk17qQKqWPd2Dx2hO\n",
+       "0IS2NIx8SCBspEZby6Yue+xtdFvJ9iKD9pjxuo3aXMfKH1nKGvsnKFFiUZYPtV+ZpImveYpSlpsV\n",
+       "MlFTC7U4xvzKY65aTP3oWkvUdS0oBM9QjvJiaao04Qvvo7dXD209Pc5JeWS9Tql8Pm/ZXiYq4hbN\n",
+       "yUmigQERI8nb2rLFBGHuOsz7UPeRGWbHxqwQK2HXzhoZi5mlUCQkqZlqJyas69LWZh3X4CAZWXud\n",
+       "YFOFMbdyKV6ti9xia9eWHCMHQbdzS3XD9nOOBWkhlet08GC+soY2mcqFd0elyZ22VBWICh9fo6lD\n",
+       "c/6gs9z+fEi17JS4gjpYrt+fz9NjOaKX0kQXbTinysPfFFJBJU3e1kR378b9ItRr6/Upe1fbyoHa\n",
+       "LlYyKBDgbqEJSpS067WMBgcsCbbLtEx91Ee7aJelvAqfx27aTXGKW9xQdVDNb4RIiynfp5VaDRhT\n",
+       "Y0kbqIESlKCx4oNDle7RT/1a0OSPCEVojMYsfycyoQ2PNZT9eQHQERoxMgAn82Z2WhUE+WOYhi3J\n",
+       "h1RrKwfRbbSNGqiBOqjDsdyIPOYyYZGbpd2pjRA660cheIZyVC1qBaoZVgcG9Flao1EBoNJNVk3c\n",
+       "E4uZLrf33583XEnV7Vpbze2cYFYHwm1tzlZZoNRtV93faV9dW6OjYrzcNZaXs+HuuzrAk+JuzNKV\n",
+       "WEq1DutA0k5eMt3yMcpasBLQ29v1SYn8nnuVWEid2pL7u8VPVQXUypRTkiSvqop7cz+Jb/12Io9l\n",
+       "3CpTNV2WlWv53EKORp8apdQXU7R8tjg5B8v1wXye/jZNjpxT9vADNNw5NVUL11UVMio5pO4XoZvb\n",
+       "x93+OypNlQB1jnI0SqOUohQt03JFF/N2+3JQSVLSk8VRF3/pBsV8X/67as2TtSpV8e04FHLLX5ay\n",
+       "Rrvc6sgf0vq5TMvaDLQRilCi+ODv30V3GRDHXXNl1lme0Ib/fUqwlmP+Z/TPjBjXrbSVQKA76U4D\n",
+       "Ho155k0An6Zp6qEeSlHKaOcOuoPaqM2SXVhdyxEaMSC9gzrobrrb8rkb4OvcrYN2mw3d8GunEDxD\n",
+       "Oapa5Sv4RbrqzukGg5OTArDuvlufYEdNgmP3bG52r4M5MWGN19QBMf/8rrtM6NHFecZiRNu2lcKw\n",
+       "7iktofK1Gv8qY0TtAC+Vsh43vladnfbWx74+/y6lbqVP5Bj5vDlI68BGdcN2Go9aq3RyMjgrvFfo\n",
+       "0u1TVRh1IAJdkiS/qspNp1Fyvv5V51QpQFXgsszhfXptrXQYaSICUe4TOUr/cZqSe5P+M666cE7Z\n",
+       "wy+OrUzO8NxU0BeCumRNKmRU1wv9WvVxrwyoaxEnJwGNw5TXWo9c8nxRrWNu+6oJdiZoQruPLhFP\n",
+       "kpKOCYl0YCmz5eYoZ8l2207tJdbCFmqhJCWpl3ot4M8trmlKl8xL5xrLQVW1KHJglWNoozbDKqlr\n",
+       "L0tZC3T3UZ+xRsM0TDfTzTRKoxYrKV+PIRpyBXw1gZO0yAYJimEsaO0UgmcoV1Vy8Wy3r5P1TS03\n",
+       "woFJJhLigMVrXnZ0WEHHCRL5Mx4XsZLc4ifb0sVY2j1lzVEny6bdGPizrc1aN3RkRGTilfvyhEo6\n",
+       "wNNBkw6ypQVX164fOYEaL7fC4VBak2UJHlnOxo87rq5/Wau0EpUDXbp9qmI1lEqTLRHokiT5VVVu\n",
+       "OvktIKm+rqE4vPfMzZUOoziX9B+mDeD0mnHV0DQR9RDRGAXLOgEa7pyaCjp+qprlUjazKrfGVAbU\n",
+       "tYiTU2FKV0rFCQ6cst6q2VgHabBkPXm5FAlDuv4KVLBYOhuowYDBQRrUxocWqGCJJ+XWVBXmeqnX\n",
+       "YiUdpmHbcjRqjKm6JnbZanlyI905pQNMXvJElnqR5wNfD2n1tLMkt1EbpSltZPX1k2DKzkIdBCgG\n",
+       "dY6HllN3heAZylWVXDzb7cutXTIhjYTUsTErnMm4Re7CKsGVgyJAdPSoaOs3fzNvAGlbG9GuXaIN\n",
+       "HhspP+eJfiQ8yJqXuZyZPdfrs6fHCsPlPmVdTHnxr8v4K7Pocuux3E6FSDUL7siINe6Vj1l3nNXE\n",
+       "QxxInECNnwMSNmUG36kp5/I4XgFQPW6VSgddbq62un2q6qruQAS6JEl1IbfrX+mK20HCFXcDPR/H\n",
+       "/2MR3v94P33kzbXSYRTnMn5AxHUOPzlMk9+Y9Ayd+Xy+emAdoOHOqamg46f8lEvxKruSN26f1ZO8\n",
+       "XmRXq0RPLePknGp+6uBAVzNSVxfSLjkR70Odp6wnyhMVEZnJiiIUMepmEjkfJ9l/kpKWtnRw2Eu9\n",
+       "NEET2lqk3HrL4yZlXCfXNE1b2o1SlCZowhXcLLGcebNfuT67aTf1UI+tJbSbug3wkm0N0ZAFvu3O\n",
+       "Y96WUwbboG+GBHWOh5ZTd4XgGcpVlVw82+2rS0ijA5TOTtMKpsueq0JLf79oK5nMW96XcMsBZedO\n",
+       "676yFidPzuPkshuJlJY+0Vk6t261utfq3HXtnhzK1f10WXQl+HAglxAnwYjDrNyupcVshx8rDrXq\n",
+       "WnM4dbKOqTG8dnDGgdgui66dBb1aLuFc5VzUyXGtTVNgMXdm47T5vAJzRJQioiQR9ZFwvR0nyn2z\n",
+       "6Nb62UNUaF4zQczrHKuQjLQwtkZTuTkqNK/RlRTRJws28LVWoMHHB2n0qVFfAJPP5zd7SGHg8lMu\n",
+       "xaucrKibxcLq9SK7nmvDepXdXNU4UyldPKEav0lkBQuv62kHqMu0TP3Ub4zDzkrHrV+qO6uUTACk\n",
+       "1vPk/cnYSgl63FU1RSlLXCcXX5sYxSzrxqF6N+22WOm4C246X+rCy/uXVkv5Hk9eJMcst/Gy7l6P\n",
+       "Tb0mDQqz6LorBM9Qrqrkot5uX/n+9LQ+IYwOLHt7S2MPuWtpczPRrbfau7K2t4u+BgZEuxzOeNZZ\n",
+       "Hhtp57KrezY2Wi2IgJkJ160+KX9yy6N0Q+Zw2d5uhWM5RlnjNBo1LcpuNwuWlwWsLy/rjxXvl89h\n",
+       "aKi03XKhUAfEdqqq62o1lSbzG28zjTsoSTBMkva/QPrfspjU3Jx/EEuT4/qWZdmSUGjXLoPd9P4y\n",
+       "AaZGNw9yCzlKfSlFyb1JGjs4VtfWvaDlZEWthoWVKyi3u6AusjeDG6DdXDnsyBIqRMxiuADCAVD7\n",
+       "oXYaW9NnSpXz5zDkJAlnOrdfLg54TdSktQS6Wb84vKnQorbDrbsyVlQnbmXdTbspRSkjHpUn+OG1\n",
+       "Qb1Y6Xj/smaodDWWyZB02YW9nMf1CpRetdnHXwuF4HkdqxqJT3Rt2vWjJoTRlcxQwU9CIXfL3bVL\n",
+       "JMRZXnbPOCthUP4uE+nwGpgc+NRapX7cbrkLL3ct5gApLbtyTCMjJlwNDQkwT6XMz3nNTSk5RhV6\n",
+       "m5rKi9fkUq2V2awJvepx1UGhn/PB73g2OnOsL13vlq00lX7zbyVjTcY/W3Rr/cx+KrxrrXSN3Cya\n",
+       "6voq25dl2SqQsM7aHTc2p/HPVRdgvMgJrvn8a2LdCzKrboXusE5WVDcLayV95xZy1HGgg3AIhLX6\n",
+       "cLvbrG6AOcpZ4gg5bBkxhgfM8xtz+vnp5u8E405uv1x2pVzsXGT7qd82FlRN8qOzpMo42K20tcRa\n",
+       "yeWUtZdDrt/yOGqmXrk2vA9etiaEsFBcIXhex+Kg4FSGo9w2JXzYWan4+3YJYXSxjWrWWt6macXM\n",
+       "a2GQu8LyupyFgtVS2tVlXYvpafdMtPLz4WFrQqQtW0wglv0nkyJZkEy6s7hoQje3BqsgzRMxqQDH\n",
+       "3X6bm/Xr41fcWukGml6T61RitayFS62dKnJj24xusQ4yMr7+x0NUGFtzBwwJhkNEtI2IukiAyaTY\n",
+       "r/CRolvrhzTQSaS3aHK4WSaiQTJcd9XsuY6WLe7+y5P85IrtpEhf+kXOqY2oMF6gqUP+XEQt51MA\n",
+       "oOYE13L+eAw09MRQ9eE4TaXHq9ymNtAdtpK++b7JuWRNLr5131EcrCqpv1lLOSUK0pU56aZuAfiP\n",
+       "gbAfNLSmz5Sqy4qqxobabe/FSqeurwqSdkl7dHNWt+fxjhyI3ayVunjQLuqiu+luT+Vx8vl8ydjs\n",
+       "rLN8vexci0OFCsHzOpZdGQ5dVlIvUJrLmZY9HrtpZ6Xi8Za7d9v3weFzZEQAmewnEhFWQGnZW14W\n",
+       "VsydO/M0OSmArq9PWEWzWSuQybnK+cnkRRxOJZDrLJa6pyxxYrd9U5MA3HTaWiNUTbCki6lU3VtV\n",
+       "gJP1TQETouWaB2HddgNNr8l1amG1rIY1//7787Wt01lSJ7Ly2pxBjSe9X3GNdQOMaSLqJgF2HApT\n",
+       "JECrQFZwVNdXZzFOs3bUDLiKpdLRssX34/NQ21dVIJGRVreNB5C0QIJbXzopfTjBdWGtQNlvZH0l\n",
+       "P6pIQWbVrUbCIY8up5X0LfdN7k/S8tpyTRIZ6cCTw8skTXqGgY10y1Utk3aJeaSWaZn61vpo19wu\n",
+       "mlyzd6F1sgB2U3eJFVIHZE7r4uZmaUna4wGA5fZ8bNM0bWw7TMMW2JVtcYsqh9Q+6qMsZT1bconE\n",
+       "OaUeDx5vyy2uEjaDLnUS6tpSCJ7XsXidRGkpdMtK6rWkBbfM2dVjtLPs2dV0lFDD+1EhkqgUOvjr\n",
+       "rVutcKa2199vjTXVZVyVTw56gABgOUfdGNXEQ9yCqovllJAcjQpwVt2UJdxKIFVhV0Kwn2PoJC+g\n",
+       "6XTcnN7zIj8wGcR8a9Gmc4dkgZEganP6FV/zSwwYxz9nZnwtNK+5A0ba3NeAQj+gp7MYq3DDXy9r\n",
+       "ti+ZXLHPbtZvJ5nwO6a0r5MdYDnNxU87TlL68J2YpwoJmQwFaOGvSsIhjy6n5SaOkvvycfuxngYF\n",
+       "fQu5HH0unaRPjYM+VNBbAe3k1y21XDnVyrSzHPppy06yj67iQ8YmSpfZDuowSoNwleOuLMfVR33U\n",
+       "RV2UprSREEgFYF35EA6KquWSx6vqLKrywbPe+k2Ao27P++HjaaZmGqVRRytyqFAheF7n4hfTjY2i\n",
+       "3Ih6Ye+3pIUfeFXjPLnLLb/o1SUh4jGN3Bqo9sVfSxfYSERYQ/m4ZfkRnuSmv9+Ev2TSatFdXrbC\n",
+       "I3evVcu/qJ8DJuzzsfM15GCbNXMplMxxYMBqsQUEYPNYULdj6AXq/ABjNSyOfsCvGlbVmseXKjBi\n",
+       "V5uzGmstxdf8ZQmMbUSF8TWaOjRHhTfXvAGGCoW62Ek3+FJBSYUbJ9jRQVYzmf+FeologIja2XsD\n",
+       "ZFpp7eZn16dfkCy2Y2T39WLVrtSqmCbPcFxNRt0I+bnwDsrV14/1tBy40elAOk2PAfQYQIemsu47\n",
+       "8PFq1iiocXHp2iw3QYuf8emgTs5X1qmULq5cXhMO2Y2LAxt3fx6mYduER9zyKQG5kRpL5qrW2eQP\n",
+       "NS7Wz/qq2/NzQ433tINoVZsh0VWo6igEz+tcdllbufVwdFRY33RQyuW1pIadu2gsZoUl/hmHsMlJ\n",
+       "0c/u3QK2GhoEaHV3i/cEHOaNUizcmru4aGZx5ePm7XOo0Vk8uSVRth2JmCDc2ioAVk1Y1Nlp/t7R\n",
+       "oc/iyteQWzClRVRCBp8THyOPU/Va7kRda/XGQDlQE3R7RP7ArxqxoAcP5stus6x5K1BjV5uzmpZY\n",
+       "vuary2R1LZVusl7E56LGTkqqGSOirEObHBQnfE6EW1nl9VeEvddHVhBLUkmcqC95sPhp3SL9WLUr\n",
+       "tSr6ANc0lb8U9Si7JC66i+CgXH2dLLcq2JdbkkE9pw6Nj9NjAO0fGaE1n19cOjipRqmIctq0O17l\n",
+       "tCX34eAnrXgt1FIClxxUB2nQm8u24mLLkxDp3J91MZU6SAYJ92PVQrqbdlOMYsY2uhqfXqX7nuLn\n",
+       "Bo/3lMDrBNFSXm4ShHB6baom4AngnwP4ewD/AOD/0Hxek8mGKhWPn5SWR7vkMJVc3NqBgLywjUbJ\n",
+       "yACrfjYyYnV/dRqbaVXMWyyAHBZ1cotD5TUmZabZZFJAX1+fgHJ1LBMT1qy1sg6nhE439fUR4RML\n",
+       "1PjoAUrvP0TT/2rNYh2Wc3JbJy/ycmPAz3EPuj2i6sCkH1WSXKhWcOhpbXyYr0rW3K3EiBellf3V\n",
+       "13aKsu36HLbTzY+XcZGGn67i6xYSACznllReczDjbVdYm1V3PtlZtasiH+B6rSdldroIroarb2n/\n",
+       "1j+Bci1+6jm1VijQ3NSUb+hUxcuQ2NWM5NtxUHCDh3Lmane8ymlLdxPibrqb4hSnRVos2Z7DrddY\n",
+       "SdmHjIF0S/LES8d0UqexdqpF0y7mla9PH/VVBG1e/u+p8yvHfVenaljY60HXO1BXHTwBRAD8I4Bb\n",
+       "AMQAHAdwu7JNjaYbSienOoryolYHparKseo4WRv5Ra9T4hqd+6pfCLODGt3aqMDLE+2o4KnOT+c2\n",
+       "a6fRUSL8nmkB6fmDOQtgy3jS5WUzhnZsrLTWqU7qsXK7MeAXZCttb8cOcc51d3uD9JqpTJ/Darrp\n",
+       "+gbyNOlBz8vcCmRaD9vI2Q3VTk6xmbwtOZ5+EtZHCZ7NpM8yK/fpoNL5yXjN4WIfO0iUc2kgoqNs\n",
+       "blNkAuUYGVl3DaVZ29z6202B+KHaWbX9qBoXNZUaV+tdG130vd7B3isA6LarBjx4OV7l/h24jZeD\n",
+       "arnnjRsg8xhJPhavgFeOO3Ct5eUmwUb/XVZL1ypQe1UtwPODAL7BXn8GwGeUbWoy2VD+5QSlqoK2\n",
+       "jnkdm3Q1veMO6ziDiEnUvc8hU8ZSTk9by5lw91jd9p7X5VPCAtL1+f3UceOacROAW1jVONaentK4\n",
+       "2HITRvEEUEHEEXo9Jqplt26UJj20uWijrbUWVZoQp0DeXW51MMspxqlkCR+PfG5h2+na5vskbfok\n",
+       "ssKpen7xNtR14GsnYbbNYXsnVSlwkl/UDC4MBp5JNcjsrG5tldNXOcBRroUxCOUWcjR6IE2pQ+O0\n",
+       "vEE1YN1kV4/Si6trNeDBy/Eq9+Lez3irdd5Ii+hW2lrW2vnJWstVb5a4jfy7rKauVaD2qlqA50cB\n",
+       "/Df2+hMA/l9lm5pMNlT1ZFdKxYt0F+V+QFC3v3QP8dqOHYjp3i8UrPGa3d2lGWUjEWuNUO72K8HQ\n",
+       "ixV28uNrlD00R723rFksqSqs8wRJvB87uNTBvpPF2glUq5HcRlquW1ocQL3GGU/y+Xz9mya8yM58\n",
+       "5WZ55Ovs1eU2zbbpodJjxT8fJGs9TQl2EhKdQHmw+FpmqG0iors1/UnJ7aSbrZd1ILKunfxdzX7r\n",
+       "8bzM/0q+PGB1kcUV8MCokRhn8PFgIDTIuppubZXT12axJsiL/OSBZGDrWVGtYQepAODH1XWj4KFa\n",
+       "1shaqBzXVa5y5647rtU6p65n1cM5tpGqBXj+ixA8rz05gRsvpVKu/ICgTvLL0ms7bjGeaj1MCUZq\n",
+       "iRTVBVeulQRTvr1d+Red1ERDKmzL19y9WP4us/W6lTThayLrl8oxlZOxuBItLwtLp1N913Ktj+WC\n",
+       "cj6fv7Z9DnmtTTvAky6lu4koVnyvtXQfucYvSsCzswr2F99rJwGK/L9HlES22UVyB2WeCKi/uJ98\n",
+       "LV3bORAuFrfT3dTwe4zV7dM2c1WUf3++KjcxLK6ALDHO6FOjtoDjx7IYZF1Nt7bK6WuzWBOMi/xD\n",
+       "CGw97//P91e9VijR5ljjal3clxPHWkvxGpt+3Wx1xzUEz1BBywt4RlGZXgewjb3eBuCEutHv/M7v\n",
+       "4JZbbgEAJBIJDA0NIZPJAADm5+cBIHxdR69ffBFYXBSvs9l5XLgAABmMjAD/8l/OY37euv3nPw+c\n",
+       "O5dBSwvw8MPzaGsTn8/MAC++OI94HHjuuQwSCbE9b296WrQ3O5vBK68AwDze9S5gzx7n8c7MwNj+\n",
+       "3e+2bq+2DwBtbRns2QMcP262Nzsr5vfpTwOJRAZLS8DCgvi8vz+D97wHOHJEtL+6msGpU6X9vfji\n",
+       "PAoF0d/amv7zxUXx+cyMWB91PoODQKGQwdCQWN/jx4F9+6zz3bcvg9VVc7wf/nAG27cDp07N48gR\n",
+       "4PbbM/jxj835qfu3tIjXt902j6YmYGHBPL6f/rR+fQDgwgXxemQkg+ZmYGio9Hjqjo/b65//PINM\n",
+       "xlzvmZkM9u1j2xfHO3/bPDANZOCtfS/r7fj64Xng+Mb8/dn9vQBAZjYDLAHzP5oHUkBmWwaYBeaP\n",
+       "zwOfBzLnMkBLcfz/n/K6Dci8lgFOAfNH5oEskJkv9l88vpm24ueH54EOIHOp+Pn5eeAIkLktA4wA\n",
+       "81fmcf+3gP9wJYMLAA5F59HaUDw+I8D89DwwX5zfADB/Yh44C2S+X2wPxf4uZ4CTwPz/Ng/8EZB5\n",
+       "VJlfKgNkgfn/eR74v1n7fzgPfJydD9+ZB4aAzD9lgEKx/XeAzM9t1vv4PPAwkEl4PD7q9nK9RjLA\n",
+       "Hof9n8sAM8X1CPB8Oj5/HA/jYSQyCczeO4vsf8ni07d8Gv/18n8FANy2chumb5mG1Pz8PF489iIW\n",
+       "exYBANn/ksUf7fwj2/Yfjj6Md95+B09/8mkk4omKxsvHl4gnfH+uHd/8w3gH7+DpzNNIIIEH/vQB\n",
+       "nDh3An3DfZi9dxbHv3u8ovUN6nVLpgUA8K7ou5B6O4Wvf/LrFa/nucFzWFhYAADMNM1g39i+qoz/\n",
+       "YTyMv8/8PeKI4775+/BZfBYPZB6oyno9MP8ATuAE+jJ9mMUsjs97P377YD//2cwslrCEC/MXfI3/\n",
+       "xfkXsYhFIAPMYAYPzz+MF/EiFjPFv5/5LP4I9n8/1X7Nx/cIHsHD8w973n8Ws8jOZ/FpfBqJjPh7\n",
+       "k9tsxPHbLK8/j8/jXOYcWtCCh+cfRhva6mp8G/36+PHjWF1dBQAsLy/Dk9zI1OkJIArgJxDJhZoQ\n",
+       "Jheqa3m1BqkWsELBTHDjx1XT7n03i5xM0OPVPVS1wpYbc8fnPT0t5ptKCQtdoSD6UZP76NxgeXkU\n",
+       "Ly7K09PCdVa1XHodr594TjcLp9N+QVs/ZR3V9naNy22Z1sea1+MMUI7rm6bSb+ApzWd2mWSl9bGD\n",
+       "rJZAnUup6gbbyNrrptJxgIjiZO/Wyi2iTez3rWwfp/mp54Ic3xBZraHlWBj9unRXySpeqWe5U3bW\n",
+       "IK2YGyl1jXILOer4i47AXFmDVDUscrU8jrVyafbaj1+rY5AxoPVkAa6nsRBtHtf3SnQ9zDFIodqu\n",
+       "tqIPjAN4FSK77e9rPq/JZEO5y2/SGa+lMry6sjpJt61dn/l83ti+u1sAYn8/0Q03CNDzC3C6eftd\n",
+       "K7eSME4uyuUCHS+X4we01ONb7g2JSsVrlNrN26/rbLk3HerB5chxfSXE6WIinTLJyiyucj8OdFwc\n",
+       "qKRbboqs9TBBlrInV9X/BmoiomkSQNpAJmguklnqhO/jND+nscr9hsi5Tqid0uS8LnZyIcWS88ll\n",
+       "e2MYCwvUfeAAjR86FFjJlWqUDAkyCZFXpUm5v8JiRKN7orR8Vr3zUXtVc10OPnew6qVfpGoFOF77\n",
+       "8XvxH2QM6DRNUzd10xiNbQjsceguNy7UTpX+36s3EK6Groc5BqmagKdrByF41o0qAQenfe0son4g\n",
+       "wKmkitpnPp8vyXprF4NZrvyulZ/xV9qXW79+VckNiUrkZd7ViDHVqR7A03F9JWwtU6nFTbXC8ddp\n",
+       "sn4jDyv75sia9Ee3j58nLz2ia2eSSpMXDZKZ/dYu5tNOfK7lmA3LTSiVJkdgLTmfXLY3jMMHzBJL\n",
+       "U3NzPgZUW7klBqoGgJXcXylaAIO2eFYy9iCTM6kK4jvKq+WwVglSvPbj9+I/yPFvtMWrmv1Xek5d\n",
+       "D4l0roc5BqkQPENZVAk4uO0rLW/cVdar7KxaXsar1iJ1c2v1YkHL5axutuXK63pvdDmOoC2ZXq2U\n",
+       "fo6vtGwHmV3XrxznVY30v5UqRwIo+TdykpivIlktoSBhjZTutPIz1erZSPpv+1b2+6Cmb5CwSr6b\n",
+       "vW4hyv1POUr/XprGPzVOhY9r1q7Ex5L0gJlm7Xq9PlOh3av8AqvL9nIYY4dEiaWR/fsDs3hWQxL6\n",
+       "2v68jca+PlYCaNUAsJL7K2sFSn0pZet+Wi5AljN22Vf3F7rr2q2gjvvyAAAgAElEQVR5oyHKTm5A\n",
+       "vJEX/xtt8apV/0ElUaqnZEyhaq8QPEP5VrnXz2pmVrUdv+U8/MLL7t2irElvrwmLuja8WND8lBfR\n",
+       "9VGPDGKnHTtEjGVTE9HiYjBtBmml1Fm21ay8VRWDnPudXIMtaYPnaloKRjdWV8ulGguqPmVWWB7/\n",
+       "KZ8xm33k+2omWv7cWfpe+vfYhf4fTJWuGR+nXQwrkb+SMZXKL7B63L6wtkZTc3PeoXODvmwKawUD\n",
+       "svAYqPsL3RbAq1U8opMbMQfI1FzK80Wwl7GrF9e8r/6v9FdtzpVakmsNUV7HW69ATLTxFq9a9R/U\n",
+       "MajnYxmq+grBM5RvlQsNMsZxaEgfI+k3RtRpe517iG573Xt2JVT4dZuf8iJe+3XSRoIqtxT393vb\n",
+       "x6l+aipFFI1az4UgxI+Jl9hQv7J1OUqT8W224BRPaxngJT0YBakdJCyS3STKn6TJamGcIhPEtpIV\n",
+       "DGMkypvE2fYgUfYEJJIB9RHRDcU202zbDjKhspUsMZ8EEvGcaSqFVYfn+P9avND/zAgVmgti7BwW\n",
+       "1VqadoBpB3dpqv7xUFQz1+1a+aJrxK2eqoXQa1ypDkyCctM1XHH3g7Dm7SI4t5Cj0adHKfWllGPM\n",
+       "qHpxXQvQzufzWmus03qpgFxriPJqPd5oq6IfXUsWPf49FdQx2EzHMlTwCsEzlG+V63apuk2q7bjF\n",
+       "iPqJj9Rd1Om2172n9qW7bnNyAfUyLz9rmMtZ4a/G145GzdKWFu9uxXZu1Xwty3G5dpJbVt5K4d0W\n",
+       "FMaJcp9YoPQfHqCx/Ydo8uNrNjGYfICkB6NKxWFTwqSEPf6tK/uVILZc/KnW0lSfSSJKlL6f+0SO\n",
+       "0v+m6ArbXCiFTd1ThVqQ6aIrf24RPwvNBZrKTYm2ZQxqmu2XJfsYVjdxd2M1vrWKqhl4bmAaZwmX\n",
+       "Y18fc3S7dZIOTPSwkqPcQorSB5I0fshbH4W1AqXmUoQ194tgCW/JvcmyQKkaCZxU5fN5LeA6wZ0K\n",
+       "yLVOCuUVyDfaquhH1bLobQTQ8u+poI7BZjqWoYJXCJ6hfCuoeEO1Hb/tBrG9nxjCcpMIeenXq5tx\n",
+       "Mul9vkFZSZeXhaXTTywrd6vu7S0Fbrc420qlW/OqGX4KROk/9pnwxa8bpk7c4icz03Lgk7DZQtY4\n",
+       "zRgJC+E02397cRs7F9lGTbvsaXGFzU25f+tr2qBeInpQmUNv8WcrCVDtJDPBUVDwnmb93czW5Fq5\n",
+       "JtroAHEqdbv1E9OpAxM9rKQpfcBfIqHcQo5GD4xS6lCKltecv+A4vEnX4dGnRm0BTXdxXQuo0wGu\n",
+       "E9ypgFzN5Edex7vZVS2LXuiiGupaUAieoTa1JFz191cvsUwtrtvsoIjX+ezo8Ad/G+hhZ7hV6yzF\n",
+       "ulqntZCvGwg+Y/7Giwlf2v79fhr7iI3FM2ilyfwWVWtnthDRUTLjMKUraqvDPk5POyAtPsc/pbjC\n",
+       "Slj1Yvm0eyaLY9eNc5Lc4d3rMeS1RLk1N7yuC1TluprqwEQPK+M0fgjFPoY99SETD+ExUPYbzu4X\n",
+       "cvxDTwxR9htZGn1q1Deg1RrqpJzgTgXka6Wm60aqWha90EU11LWgEDxDbapEN6pU100JOUG6sflZ\n",
+       "n3LX0g6K+PyyWX/tO4FWtY95oVBe/VA3VTJuXzcQ0lQCIE7nVGFtjXr+YI7QvFZ90JdAJYFshEyw\n",
+       "vItE7KV6g0JC2phmnwDA0+IKK9/XWTXVZ5fN+xI6mRvsFfb5+i4P65Rm7dkdjxyJOFW1/6BdoDVy\n",
+       "+47aiDqY1ZQKP8HPr0CFtUmamst6bo+7zU5+Y1I7rtxCjlJfSlHHX3RQ7xd7jbhOP4DmlNE2yHUo\n",
+       "9/+epQ7k2vI1Z4GsZ/lxn90IF9V6KCMW6tpSCJ6hNtQy5iY30JBw1d5uhZxKvyx5v06JatTxlbuW\n",
+       "dlCkwqOf9p1AqxbHvFJLsVvG4VSqijdKNG6cJeeUYlHzHUrnN5Oq3J4nCOov7jtNRD0koHPUoU1u\n",
+       "JSyQqItZ9W94h2cXETVr3lsujjdtvn+RbfNCn4f18uKKy9o3ni3kvIYBye07aqOsYxWDkMe7Q3x+\n",
+       "asbbcsfhd5+xg2OGFdMuHlJ1sfWbHEltU81oG+RxLvf/XujCuXGq97UPwTNU0ArBM9RG5p5wlRsg\n",
+       "SbhZXg7WHVYFHC8ZbFMp08U0qLV0S8hUrur5mEs5ZRyu+o0SLzGYaTK/xabKAG1lf1cQ5durQKV+\n",
+       "5tRmjgTsRYioSbOf16edRdOre61drU+QAGIex9lGtFps9++aiVYlmNrNL03Copu1WUsp2b583knW\n",
+       "Mi/VOL883nDYKJdHJxCyAzvL+2PmnbrcZwdtQdAp463bOMoZu3UiRJQmKnykQFOH9PGQ3V/opt4v\n",
+       "9lJ0T9Roc/hJby68qhxjLFl/o0/bx4zaTiUAi+lmceGsZXKdEst3FfrOUY6SlCQQaJiG63rtQ4UK\n",
+       "SiF4hqqH3BO26u8nw6LpJ76xUnEwc4JaFYSy2equZbUSO9Wj7DIOB+XCW7G7caXJbdT90+QMPHL7\n",
+       "ISoFKhWgkpo2e0hYSNupgm9r0ma1tTy3FPuy+zzio68kWeB4rYFoldeS1a2Z2zpyFUjEi06QGTda\n",
+       "rYzDUh7Ht1FJV5wgyQ7sLLGSf9hr/IGm99vHQaoZb9X+dONwgyxP9TUXcpT+v1gGZuUYFNYKNPj4\n",
+       "ILX/ebvF0tn35b6yj4VjjKWmPz+WzyAsppsly2gtrYMllu8q9M3bnKTJQNoMFareFYLnJtBmjsGs\n",
+       "VOXWY6zUPcQrmFUrlnHTKsCT1e4YBAXNft2NS84pL1ZRJ+uWun8ReH7aTXS/LlGWU38FMl1Wo0S0\n",
+       "WOy7Eoum3dMtdjPoPtX++LHSQaJXcLQ7Nl6OayUqji9/Wz64Pvy6bTtIgpAuY6sd2FliJQ9OGH+g\n",
+       "fPvpb09rodEOynTvu0GWE+DpyqGk/lOKCm+WQi1PHITHQJ17Ox2tkeVaHXVjSu5N+or/5Gt88LmD\n",
+       "nvv2q3qoTVlLy6x6rlej71pbmss5hqGrbaigFYLnJlA9x2BWW+W6hNbyy3IzWA+rpRLO3ICTNeiE\n",
+       "Tka7yj/pss6pNOmBSaci8By+gSgPokMgyjuV98iRcElNkojt5HU7p5S+y4VI3fZbfe7j9PRi/eSu\n",
+       "u3eQOyR6BUe+Pj1UuxIqxfHlD+aDazNN3s8zL83ZAJ4d2OliJdXty3HhVVWO+7EO7nQxm3x8qS8K\n",
+       "C27HX3TQxLMTNPq0CaKpL6U8W4LtxiLnqcaPJvcmjeRFXtvla1zN/3v1EItYS8useq5Xo+9aW5rL\n",
+       "OYYheIYKWl7As0FsVz01NDRQtfvYzJqYAA4fBkZGgLk5IJHY6BHVTqurwMwMsGePOe+ZGWBpCWhp\n",
+       "AWZn62M96nFMQclpbpkMsLAgfp+aAvadq/3JWjKGfd72051blnaRwQJEw1OYwj54bJhrAsBhACMA\n",
+       "5gB4WI5XOoG7CuL3q11A4+niB1PF/ZcAtAA4C+CYTSMdAKIATtt8Xom6fLTbDOCC5v1WAE0ACprP\n",
+       "OgH80qa9LIA8gHMAGgH8VnEsLQBm4Wl9Dclj01ZsDxBrXMZh3nCVcZ7pNPP8DJZWl/Cjwo9wav0U\n",
+       "RrpHMHf/HBJx5wZX11cxc3QGe+7ZY9lWttcSbcEluoQjrx+xtDmDGSxhCa888woKK+JkmLp1Comm\n",
+       "hLHf7L2zRpt2/fD+Dr52EL9c/yVaIi24ePUi1q6s4SquGtsMdw3j9fOv4+TaSWMsj77wKJ786ZMo\n",
+       "XCxgqHMIT9/3NB554RGjn4lnJ3D4xGGjjalbp7BvzDxR5Odu65V5JoOFFfGd0hPvAYFwav0U2qJt\n",
+       "aIm24MXffhEDWwd8t+tX/Ljw9XXSBCZwGIcxghHMYQ6Jck+yUBum8BiGqgc1NDSAiBoctwnBc2Pl\n",
+       "doF8valc0Kim6nFMQclpbiU3RVD7kzWQGzMzMIGuCDCB/JNeLba9B95gYAa4+gTQuApcvhOI3gDg\n",
+       "CARQvBfAAQBnitumAKwo+0cBXGavGwA4fbW2A0gC6AbwsofxyT6uuLQLCDD8VQAv2HweA3CpuN1V\n",
+       "m224hgF8G2KsV4rv8flxaHwPxNqsARiCgFkVTOWxKcBc4wqgbUPl9zyzEQej/tZ+/PCjP6wIdnh7\n",
+       "kwOTaIo0Yc89e/DoC49iaXUJr0RfQeHeAvAtACeAtmgbPnDDB3Dh0gUcOynuqkjI8wJLN375Rqxc\n",
+       "UP8ohKINUdzUehP6W/rRHG1GW6wNezN78egLj2LfT/bhzCXxh5UdyOKp+56y7Lu6vorb992OlQsr\n",
+       "WgjkQPzoC4/i4GsHsX5lHTt7duKJsSeMbbc9vg0nzp9ABBFcKZ7ETY1NuHj1omWuunaDgk7AelzU\n",
+       "Pu20ilXMYAZ7sCcElk2q8BiGqgd5Ac9orQYTSq9E4toCmUrV0iJ+jowIvtFpfn4emUymrsa0WeU0\n",
+       "t9lZlTNrf7KWjsGjOGxy6+EMgH3ALGYt/6TLOqcS8GdBWxLQCQDRdwHYC+B9AOIADsKEziSA7wF4\n",
+       "P4CTEBBHsEKnnbWR63xxv9d8jPGy+yYABEy+aPNZFAI6ATEXaUHdCuBtzfYpCOhMQIDqFQjo7IGY\n",
+       "fweACIAMxPH8BcQxBUzwLR5XQ/LY6KBNcyMiaAX6HeXzPLODuJao+GMPysLG2/tC5gtGe0urSwb4\n",
+       "4CjQ2dyJsw1nce7yORx5/QhSW1LGfnvu2VOyz7bHtyHSEEGsMYaXHnzJsBKuX1m39M8BrzXailRz\n",
+       "ygK0ibiwrEroTDYlsTezt2QeiXgCP/4ffoyZozNojjQj+1wWLdEW9DT34LW3X7Os49LqkgG/R14/\n",
+       "gtSXU2iJtmBn907c1HwTTpw/YYxppHsEiaYEjrxxxDJXqUdfeBQn3zmJh771kCfLpO6c0h1rflzU\n",
+       "Pu2UQKI8r49QdaNyjmGtr6VChQLEv/lQoepGs7PC8lZPbsf1OKag5DQ3eVNkQ+Y8MwNkMkg8NIF9\n",
+       "e1b9j2EJwAKEi+JPiu+NQAAIzH/SNbkzPAMBTT9i49gL4FEIt9NjMN1SkwB+AGAAwKsQlr5mlAKh\n",
+       "A3Q+jxk8gwyevTKB9bdXncfW5XkWpeJWUf6fhI/1fRAutJMAfojSW52tAO5gr18CsAXAcQDbi++d\n",
+       "gbCayeO5pvTJjqtFM8W+zynv83NjRrOf2szzM8g8k8HEsxNYXXdZzzqQhLjDJw5j5qg5wdl7ZzF1\n",
+       "61Rgbp127UnwaY22one9F9vPbsdlEidFsimJ7/3290r247DUiEacuXQGp9ZPYcfXdhhrvrN7JwCg\n",
+       "PdaOba3bMNQ1ZPR55tIZHD993GhDAtdP3hZ//A1owK1tt+Khbz2kPYaJeAL7xvbhmye+aazdV//x\n",
+       "q8bvt++7Havrq8Y4AWHBXb+6jsLFAo68cQSvnRN3eIY6h5AdyGLu/jk8sesJ2zW3O06A93NO10bQ\n",
+       "xzlUqFChglToahuqItVr/ONGjKte12JTqlL/Zh4X9ySAR1Cxq6Krpczu8wxQDCcF+iEALKG83wRh\n",
+       "ERwG8ITSdg+AU96H+QwyWCk2fCumMOZ0F1x139VJ59LLXWgjxedFzb6TAJ4u/i6tkmcg5neO9Z0C\n",
+       "8GNY582PYQKmy+zNAJ6CWK8tEJbXAZQqA3N9uauuz5jJclwXN1J+YwdlLGYLWjCL2YpvxqyuryL1\n",
+       "5RTWrwoLZe+WXpxcO4lkUxL39d+HX7zzC8f4zu1/uR2n1s0TXq453yb7XNa0qgJoibTgu9nv4t/9\n",
+       "4N/h+KnjOHnhJNaurCHWGMO5y9Y7D3ZxpjPPz2Dvq3sNSFY1desUmiPNOPTaIUQaI7g9eTsWfiHG\n",
+       "oIsddZN6nKSLcku0BWcvncWxN63uyDpJ996OWAcWP7poiSENFSpUqFordLUNVXUtLZl8MDNTP27D\n",
+       "GzGuel2LcrThEO3Vv9luoLOwulgGcSykpQwode10+lwaSVTQke8nAdwG4TZ6RNP2SwA+DOAd2Cfm\n",
+       "keoComdbgEtAN0Zwj9YUyOTFtTai2Y7HbV4BZj4+g6XeJbRcbMHsn88icSEhrJl/yrbj7sRnlTZW\n",
+       "YM5bAnwMwmIpvSPl8cxCgPDZ4vN3YcItl1zfNgiL8irE2qvnhovKcV3cSM3eO+srdnAJS0airRnM\n",
+       "+HbXs3P3XL8owPPq1avIDmSxN7PXAowzR2e0APjSgy9hx9d2YP3qumXNpVUSsFoyCYR4JI6Opg7s\n",
+       "G9uHxN6E4V4r4TfaEMVluoxGNOKZ5WfQ1NCEt68Iv+/eL/Ui3ZfGhcsXLNB5V+ddWHlnBSfXTqIt\n",
+       "2obCWgFvXHkDpy8K3/Erp6+gd0svRnpG8PhvPI5HX3gUR39xFLd+9daS+M+SNcMMzt57FqmjKTx5\n",
+       "z5OGG69cm+ZIMwCgI9aBP7n7T2zXfqB1ACfOn8CZS2fwyAuP1P1NkVChQoUKXW1DVaSNiH+cn593\n",
+       "3WYjxnUtxYJKiD58WLBdzeXVv9luoBI2PQKzl3PKApC641v8/MdtwFRBJA4DIEBnClbonIGAphSE\n",
+       "a21n8X0OSVL3QcRGcuiM2YzxNHBvxyxu7ZzC/ZhDPAhXYg9wunTDEhZ2LODwnYcx84nicTgPYWmW\n",
+       "4uNXEw51AXgDwhr5dxAAfwRinglYj2eLsq/dvdVZiGRF52ACPeD73CjHddHT+RSAnp+ZwTOZDJ6d\n",
+       "mMB68YSTgOZlrDPPz+CVZ14BngWG1oewx+1GhUY6d0/pFgsApy6eQiwS08Yf6vb93A8+h46mDsQa\n",
+       "YmiNtRrtvOdr70FibwI9X+zBDVtuAABQ0RRfuFjAh5/5MAAg1mj944g1xvDygy8j2hDFVVzF+tV1\n",
+       "AzoBGBl5v3/q+wBE7Oium3Zh4bcW8OrHXkVPvEfEp75xBD85K4C3LdqG0xdP4+TaSbTGWi3xn4WL\n",
+       "BRx5/Qhmjs7YuswuYQnH4sew0rSCkedGMPHsBGKRmLE2dyXvAgADKAH9OdXe1G7s0xxp3lQu4aE2\n",
+       "XrX6ngoViisEz1AVqZ7iH4thgZiYAP7sz2o/rmqsBZ/Tag2vJTYcohMJzCT2IZNNOM+9lgPVAaTy\n",
+       "eb4H+OA54MkjjIN1oLMEEdu5AgFnKiTdBgFhq8VtzsCqXcWx3Fd8zeArfiqBsXP7vEGnCm3N7ruU\n",
+       "KAa0bCkCxekR7PlK8Ti0ohSipSKa947BNibXolkAvcXfh2FaRFUlIDLvOrVlJxmXOwEkLngHuVpr\n",
+       "dWkJKwsLOHH4MI6WcYdoaXVJlDo5Adxy9Jay3GxVmJx5fgYXrlxAU0OT5X1AQPzg1kHEI3E89K2H\n",
+       "DEhUEw2dXDuJS3QJC79YMIB05Z0VI/bzbwt/CwCINIgTqSXSgr/+yF8DAF568CXEG+MARFbZ93W+\n",
+       "D5954TPoaOqwnUNXvAvRBuEAdgVX8N03v4tbZm9B6sspIyvtUOcQvpcV8akS+BrRiJMXTmJ1fdWw\n",
+       "wgJAW6wNf3L3n1jAevtfbjegsKV496RttQ2nVk7h8InDaI22Gjc4Ord0lqyLTvymyGtvv2YbMxoq\n",
+       "VKhQ9aIwxjPUNaNrsezJRs2pHsr8eJp7PQyUqaT8y6PQx32qcYYfgACuyxDAdr64XQrCUsjjJ+8A\n",
+       "cBSlcaKq+iGAVZdJlisGkTm2ubitHeQPF+dxDNa4zwZgdcsqZj4xgz3P7UHijoRwG5bZbFMAfhPA\n",
+       "4zBLpXQW57gOsQZvFJ/txbn9Ozi7wrqVGOHuum0QcOrn9MhAHx9aZ3p2YgInDh9G98gI7p+bQ9zn\n",
+       "30AQtSTVsiBOZVtmnp8pKW8Si8QsbsG8ruZQ5xDyv5XHoy88asRfNjc2Y3zbOI6uHMVtidvws7d/\n",
+       "hu9MfscS3yjH9Ma5N4xMtxPbJnDk9SMGSBprsG0CL731Ek6unQQg3FuJCGcvn7Vs1xXvwvt73o/Z\n",
+       "e2fxwDceMGIwARGHyfuS73135bs48c4JNKLRqDea2pLC9z72PTwSfwTHnj2GN068gY7uDizev4iB\n",
+       "+IB2Tb1IdyzLqekZKlSoUOXKS4xnaPEMdc1ow610VdBGzWlDM9oW5WnuNRqoV8tzidVbzaAqLWmX\n",
+       "IBLixAE8BJHBVrq08uviFQhw4vp7CBhahel2OgTTXRcQcaM/hD7hj6pLEMmLfg576ERxLj+E+K/B\n",
+       "/3NQ0Sr43/Yh8U/F2M73K3P4GkzoBARM//PiPN4LM/PsWQjodHOFdXOXlevO3XX9yM2tuk507+ws\n",
+       "bp2aKgs6Z56fwdmLZ5HaksKTu54sG0pU115uAVVrherKm6jW5J7mHnTFu9C7pRdP3/e04cYq4y9/\n",
+       "/aZfx+n103hr/S0ce/MYzl48i1958lfQ80VR/gQwS5W8euZVAMI19uLVi/i1G3/NMvYHtj2A85fO\n",
+       "45frph/4aGoUTZGmknmeXj+NwycO47a/vA2vrr5qvD/cNYw99+wxrKD8PQnDV5lv+craCn59/6/j\n",
+       "5DMncf7qeWAAOHP/GTwSFy61M8/PIPtcFucuqumYnaVzCXfKnBsqVKhQG6EQPENtOtnFJdST229Q\n",
+       "uhbn5FW1nLtbrIvXmNcSDlYB5iBMIHodpnspF0FkuZX7vU/5/HJx/9sB/BkEvOVhlhkBgGcgYKsd\n",
+       "ztK5vOqULG4rkwJdcdhuD4R1N1V8bwQi+yxXG4TFcw9EnVFpXIoCsMulwtxfHQEZqBwc3dyqXVSr\n",
+       "2Kl4IoGxfft8QycgoOTYyWNYWVvBB576gOe4QKdSHyrMPvrCo5ZtJZQmm5L4wb/4QQnsvudr78Hj\n",
+       "//A4Tq+L+EkZ3yj3645348z6GfyoIGoTjXSPYO3ymuGC+6EDHwIAfOUfvoKFlQWcWj+FKKJGDdFX\n",
+       "Tr2CRJPZ5wsnX8DCyoIBta2RVly8ehHfeuBb2rk3ohFvrb+FU+un0NTQhIltE/j2A99GIp7A7L2z\n",
+       "mByYRHYga7zXHms32o01xIw2fn7+51hYWcCZN84AJ4Gt2Io/KZ74drDodk7pYnvLSYy12coHhSpf\n",
+       "YYxnqI1QCJ6hrhnVg5UuaF2Lc/Kqepp72ZbnWQCDMC2bvP6mtHB2wATEhuL7FyHgKV58fwJmXKPU\n",
+       "CoB3AXgOouYl/zZPQ2TCLcBZV2CfnEeqCSLm1KF2qDH2H8BMBvRjmPAmYy3vhEgkJGNZ+wAssjYu\n",
+       "w5qQCDCB80l4r79ZITj6TUBUkfwAdYDiNSlX1lYMyJHgse0r2/DhAx8uTYyjAaOZ52dw45dvxF/8\n",
+       "/V8YMPvIC49Ytn3fX70PZy+dRXOkGdGGKIb3D2PX13dZ2l55ZwVXinc1Yo0xS2zo1K1T2NGxA8dO\n",
+       "HsOp9VPob+3H3P1zlvM30hDBjV++EReumCfrFXaX5OS6KLMCCJfa9ybfC0CAIQCcv3IeR14/gt9/\n",
+       "8feNGFUubrm8SBfx49UfI/tcFhPPTgAAnr7vaTx131MG/M3eO4vueDfOXzmPS3TJaMNSsuUC8PbX\n",
+       "3sbvrv+u5bi4waIXQCwnMVZoJQ0VKlQ1FYJnqE2nTCaz0UMI5UMblSDJj9zOKd/WVwkTD0HAlbRs\n",
+       "xjXbnoGAxH4A0hNwBCKm8hgEoLVCuON2KvtegbAWnoIZFwoIq+QxuGekbURpjU6md1reAW0hEbN5\n",
+       "yaWt4zDrac5AWGSPQABgd/F5A8S8pC7AClvDELGmGZggJt1mJUR7sWJK+M2i5kAH+PyOUt2xayBp\n",
+       "mWxqLE0AJMHjxDsncOzNYyUAwsGoOdJsAOfKBRMak01J7Llnj2XbvpY+HHvzGC5cuYC31t8S2V/f\n",
+       "OILbv3a7AU4y2VAEEbz02y8ZcYrS9bQ5JrJfNaIRFy5fwL8++q/REhF9vLfjvbh49SJWLqxY5krK\n",
+       "CX71qoDHM5fO4Kdv/xTRhijOXzlv2eb7p76P4e5hy3vSeik11DmEvpY+R0hLxBP41Z5fLXm/OdKM\n",
+       "ni095htrQMNRQdCz985isG0Q8UaRgEmujTynJHA++dMnXQHRT4Zjqc1WPihU+QqvpUJthELwDBUq\n",
+       "VFW14aVZApCr9VW1WnGY4Flaf0Oz7wgElL0LZu3KOZhWUAlaCQB3F98zKjAXL6ob1oBDZ0yw9epC\n",
+       "Kw04sr1GGBaky42XEb0YRcPZhtI22TUzUBzrv4EJeEsQFtkCBHwegYDjIxButnblYG6GcL3lICYN\n",
+       "c8MAJuHdirkBQFeWNiCeVLrZXrx60bAcqjGajcXLg6HOIQuA8My0B187aAFOAEg0JQw3Wm5xk+Cm\n",
+       "AtzK2gre/dV3Y+LZCXzrgW+hv7UfP/n4T3BX111GMiIJWPNvzAMQVsPT66fxV8t/ZSQB2p7YjsK6\n",
+       "s4k/2hDFFTLH+sb5N6zWx6Le1/0+dMbFXZ7hrmFMDkzilY++gp64eeL/ePXHeOHkC8Y2KqRJQLx0\n",
+       "9RJ6twh3hc6mTsQaYnh/7/vxN7/9N8b7w93D2HuPSM+ciCdwc9vNOHayFPoB88ZA4aKYa9CAWI6V\n",
+       "NFSoUKG8KgTPUJtOYVzC5tJmSPpU8TnFIWc7gB8V3x8B8D2Ybp+/YPu0wwQpCVs8GY7OXbQHAlJH\n",
+       "UYTFIhTSL4G9OQFugD4GU01SJNUB4OViX6cB/BK4HL2M6NUomi4X3Q2TMGEzBtFPb/F9QFhdea1M\n",
+       "XmtzqPiU67EXwhUYEBl6e9lnX0ApiMl1+DaAp+Hd/XUDEwT5Op8qdQv2INUtk1u1fqPvNwx30dX1\n",
+       "VcM9VLqV3rL1FguAJOIJ3Nx6M469ecyAH0AA5cS2Cfzs4z8zkupIi9ujLzyKl0+9jFhDDHcm78S2\n",
+       "1m2W8Z2+KBL33HfoPvzwoz/EwNaBkgy4ACyQ2BJpMeCwPdaOP/3QnxrWTztdpssGJDei0QLMHTFR\n",
+       "biWCCL6z8h384NQPEG+M42dnf4bzl8+jo6kD8YjpsrB+dd0Yz9LqEm6ZvQVb/vsWfOCpD2Db49vw\n",
+       "tX/8GhZWFnDkjSP44A0fxNStU7g9ebtRJmb7X27H7cnbRUzo/d92jc2U55T8TAJxJYCoc9ctx0oa\n",
+       "anMqvJYKtREKwTNUqFBV1XWRIEle77ZBWPZOQbjOzkG4n15tJGsAACAASURBVMp4QbldEsI6+gKA\n",
+       "WyGyxQJWSNLFGX6z2PYCGFyeA+4olpQ5aTO+LRAlWwABmlH2WSuAu4p9PQogC0QuC/MmgYQl96cQ\n",
+       "FllAuNy+DNEXNzANsbHPinYwUGwfMC25CQBPQMDWUQgXYg5eKoiVG29ZA6ALRDWIJ1Xj9pzqP3L3\n",
+       "0JHuEezNlBZK5fAzsW0C2YEsXnvoNRwaP2QBFu4WKmtzHjt5DG9eeFM7zpW1Fdy+T7je8gy4XM2R\n",
+       "ZqSaU/jIwEcMt9qzl84i93zOErfqpq54l+W17OsKrmD10ipW1lawfnUdq5dWceT1I+j5Uk+JGy8g\n",
+       "gDXSEMGZS2ewfnUdL7z1Ak6cP2FYYiOI4Pyl89hzzx68du41Yz8JoAAMl2IJgX92z59Z6p3yGE55\n",
+       "7L79wLeNrL/lKoznDBUqVK0V1vEMFSpUIJqZEW61LS0CNq8pyJR1IdV6nFKyrqR0LZX1OdXttgNY\n",
+       "hrB2bocAT6l+CAB1WrdOWGEvfhW491PA4/9eLHgPBJhyNUMk+umAWftye3G7SHGsncV53Q7hIsvV\n",
+       "CGFl/SVEzOhWiHqfjTBddbdAWHPVsWdgrYcpS8z8BAJKXyv+bId+XUMFIqd6nbrP3OpIrq6v4n37\n",
+       "34e+lj60N7Xb1ojkNT25Yg0xI9mOTk0NTfhg6oMGmAFAAxoQa4zhgW0P4PT6abzyy1csFtcGNKCj\n",
+       "qQOrF70F9EYQwVVcLYkB9aNIQwTff/D7uPfr9+LUuvmHF22I4jJdNn4C+lqfANC7pRevfuxVZJ/L\n",
+       "Gms1desUTr5z0ngdb4yjJdqCnd078cSuJyx1USup0xlEHddQoUKFkvJSxzMEz1ChQgWiTEbEcgLC\n",
+       "wrlv34YOJ1hlYAUou7lJAJUxmaoSEMmEABGPuV78fQiiJIrbdd8uCLCVygJ4qvj7DIBXIJL8HAHw\n",
+       "H4q/fwfA5wB8BSKZTxTC4roKAdJyPFPF/dwy4U5AWDG/DeGaG4EA18sAdkJYMxNs28MwQTwLcx1V\n",
+       "Oa1rHer5mRmsLi0h2tKCe2dnyyppUitpQbJ4p2i1I4aZ/6UNe+7d6ws8bvzyjYYFMDuQxVP3PVWy\n",
+       "jQSboc4hvP7O63hr7S0Mdw2jPdaOhZUFNKDBFvzijXG0xdpwev205X0Oc3aKNkSRfyCPe79+Ly6S\n",
+       "l2K25Wvq1im8/NbL+OnbPy2ZT7IpicLFAhrRiGQ8iVhjTGs1HWwbxOrFVRQuFtAWbcMHej8ANABH\n",
+       "Xj+Ctmgbzl0+Z+lv35j4Q+Fgz9/3KrcbDKH0msEMlrCEFrRgFrNIhHfMQoUC4A08Q1fbUJtOYVxC\n",
+       "fWozxHLayfWc8hov6OY2KZPqtAB4EQLEbv4B0PoA8JBN2l+euOi/w6yJuRXAf2bbLUFYUNcB/D8A\n",
+       "DkHUCh0ofiYrTFwG8BaEy6yETjmvncXXt8NMVMRzwQxBWD9PQsRn9kFA51swrb3cY091d5Xr2K78\n",
+       "9BKHuUFlR+y0urSElYUFnDh8GEeVrFn19h2ljdsrZv1KPHME+74Q04KHU8mO9Svrxu928CjdQvO/\n",
+       "lcfSx5YsLqLd8W7LfhElI9b61XX8cv2Xlvca0egKnYCI5dz17C7c2XknGlzrBZWvtmgbCmsF/NO5\n",
+       "fwJQug7SIiuTIZ1eMyFajqs10orCesHY9tzlczjyhqg5mh3I4gM3FH3kXxWJnpojzcYxiUXEF0q5\n",
+       "CYbCeM7ytIQlLGABh3EYM3WducxZ9fY9Fer6UAieoUKFCkTXdCxnUPGCL0G41P4dRFzlUwAungCO\n",
+       "fRo4/Cngd/730n144qJHYMLh28XXEsh4QiP1GtQu9K0NAuTU2MvvAPgYTPiMQ9TzXAbwTHE8CxCu\n",
+       "ttxjskPpWwVxuY6vABh8Cbj9Y0DqeeDJM+7rapeldoOANFq809I9MoJ7NtudFsDTnSK1DieH0J3d\n",
+       "4kRsjbTi/OXzFjCVwPrQtx4yrGmJeAKJpgSyz2Xx0LcewlC3yDiVaEpgYtsEIg1W8GyLtpWAXKOP\n",
+       "S5a1K2t4+fTLIBCaI83oineVwK1Ue6wdu2/d7bntCCKINkQNSOSZcrk6Yh1GPdBGNOL9ve8HYJ3b\n",
+       "+SvnsXpp1RiH1Mn1k4hFYnhi7AlkB7IYvWEU+d/K45snvmkck6bGJl8ZaL3U/gzlrpbiF+oIRrCn\n",
+       "1pnLQoXa5ApdbUOFClXfulaDR2Xc6LGzwOXiBWf2ItDTZI2DfBUiHlO6q94J4ASEtfAVAJ+E6b7a\n",
+       "BFELlLu7AgLI/kcA34ewiHJ3WuniqsaxOrnFcm0BsAbhcvt9CKB2mq/Rfsafb7bqtivnl4E3N+iA\n",
+       "tb66iqMzM7hnz566crP1HPe3uir+tvbssfxN8f0v0SUcef0IRrpHEI/EcexNEZ+Yak7he9nvYeSv\n",
+       "RozYxsG2QVy4cgHr/z97bx8U13nne377HZoGGmhkhJBakkvWSyIZJBzJsRS1IyleEyd0XshcM3cs\n",
+       "u2rdU8luJffurrh3tu7O3Jqb3Joqp27NTO2uK9pkxEzingQpkWLZZhRhCSThGFu2XhxJMQ6KiRBC\n",
+       "vIgWIKBpoPePp5/T55w+p885/QIN+n1UlOjz8pznvAD97d/Ldy6MKKJCraVaami9tx5XR64KdaKd\n",
+       "dzsxFmENeRxmB3Y9sktS4+k0OzE5P8nWmxwIR+MRVy2SpeceWHUALftbJDWWRpBfG47NZMN2z3a8\n",
+       "N/SeIDJXOVcJ12ckPIIiWxHGImMoc5QhGo1ia9lW3Bi9gcHpQaHusqmrSXI/1//reiE66vf6UZ5f\n",
+       "Llkv3158/9NNzSUYIYQQQACHcZjSbAlCBNV4EgSx9FmuxaM+SIVdzSxwxqos+KrAPEB7AVxBPLLX\n",
+       "AGACTJC5Yt/z5fLLxIXfNcQbENWA1Wq6AaxEvLGQH0ygtsZe84ZCcmrBBCdvkpRM+InPtwHARB3Q\n",
+       "6gdcO4Bd24CjtuRRT7X6WTVB+pCSrriQi0O7xY7Dew6j8e1GtPa1Cts1rG/AxMwEWvtaYYYZZlNi\n",
+       "GqxcPF0bvYbh8DBcVheK7EWYmJkQur+uyFuBwel4W+byvHJE5iJCNJCLx+rSajgsDnQNdUGJUnsp\n",
+       "7s3cU1wnF6Al9hLcfP4m3A43Vr+2Gn0P+gxdKwB43P04yp3luDBwAdPz03BZXfjCyi/g/sz9BDFq\n",
+       "N9sxM89qTqsKqnDhqxdwqOuQpOmQ+Jq7HW7J/eDpySPhEaEWdGp2SthX3pTI4/DgifInBAFKzYQI\n",
+       "gsgmVONJLEuoLkGFQICJtDqVWsGlygIUjy7YMyVOC+X1ntVgQu+MlYmmnthy/qu7CMAFxL0++a2V\n",
+       "+1zuki2Xw1NVh8FqM/2Ii04g3ugIAKKIRz3rwbrt1iPuucnnfBqsxlN8XLXU1wR/ziDgeQqY2AG0\n",
+       "2aBZKqVWP7tQtikGUnoX/HeUaG5CGqBK3Z9WuqXYQ7LZ1yzUAAb3BVGRXyEZO7gvCKvJinnMSwTd\n",
+       "ttJtEo9JnrI7HB6G3WzHxOwE+if7BdFZbCvGFyu/KJnH0PQQ8iysoNlldQnj90/249bELdVLoSQ6\n",
+       "nWYnVuStSPD5HJ0ZxSP/8gieb3seY+Ex1TGTcSV0BW39bZienwbAajQvDl1EvjU/YdsnVzwJgF0/\n",
+       "7lfasr8FRfYiYbn4mgPx+5H3hzwMh4cxEh4RrmFbfxt6xnqEfQ/vOSxs77K6MBwelliliG10SHQS\n",
+       "9F6KWAxIeBLEciHWLAStrUyELheUikeXqsgW1ym6AKwD6xDLRV8AAH//yxNFxsBqOfl75howESj3\n",
+       "ueT1mWoCTCz8roHVl4q347Wj1QCaY+uOAzgBlvJ7AnHPzbOi/eXCT3yOm8FSgX1gtaCSebuBJz4T\n",
+       "n5PaZwpagq8JrNlRo8p6AyQVZWo1ppkYO11Ecwv+PLm40PJuVBMnbocbN751Q7KO120CgNPixIFV\n",
+       "B+D3+tHxlQ6Jx6RYzH5h5RcSjllkL0LrrVbJspqyGvgqfXCYHZianRKWD04PYnpuWtdlMcMME0yY\n",
+       "nJ/E4PSgYofbmegMfnHzF4IIlrPKuUry2mlOLJiWNzAanB5Ed6g7YS4wAWsK1sBhZv6cB88ehPMn\n",
+       "Tvym7zewmWz4yd6foKmrSfKc8PuxpWQLgNg1rPiC8P27X3tXck/49rwpkfgDCLVmQlT7SRDEQkGp\n",
+       "tgSxXKirY6KztnaZdvgRsVTTb3laqAssQjkFgGfjNYAJKJ6OagXrQMtTSL8X2/dxJNZwqiGuq3wV\n",
+       "TMCqWb1oWcHohZ8jx4N4aq88FVfPMX1IXsOptd4ASdNU00zpzWp9nYG58XRLj8ODje6NKLIp+3Dq\n",
+       "rRXtHe/F7td348JXL8Bb6FUcw2axocBagGJ7MXrGevDe4HuCj6fVZMUOzw5J6mx5Xjne/9r72HVi\n",
+       "l6L9iBVWzEK7u61RtKxaHGYHyvLK0D/Zn3ScbaXbUGgtTPDs5GOE59knTfLzqHRWYkPRBsXnRGx9\n",
+       "AkDTBsWIVQrVfhIEkQn0pNpaF2oyBEFkmWBQsVnIsmSpercEATwGZj/SBqAitpxH/Bpjr0sAtAP4\n",
+       "PuKirFe0XwDKDYHkt51HwgAmOpO9n3RrrBeT7LhBsEjnQOy83LE5K0U19RxTy8pGr9WNDsSRuYQ0\n",
+       "1SDSEuZJx04XA3ML7gsicD6A/gf9Qg1i4HxAEBvcn/S3X7qK33lGE9bL8RZ6cevP46mvYsE6FhkT\n",
+       "jtGwvgG9470JdY+z0Vl8OPwhAMBismAuOoeh6SEc6joksWyR7JNh0WmGOSFdWIlCWyEGJhOFsJx1\n",
+       "heswM5cYXXVZXZLorfw8qsuqcXXkKgDW4faVna8I65q6mjA4OYjGtxvRebcTDyIPcOKPJ3Dx6xex\n",
+       "rSyxoxePbuohlWdTdxMrgiAIEZRqSyw5qC5BBbebRf6Wu+gEMu7dsmDPlBtMICH2/7uQpqnytNWb\n",
+       "YN1hxTWNSgJLK/1TTZSla0GS7LhuADcQPy+tFGAttGo4y2NfGXjsk9bAqdWYKqD0PKVaXxc4dw6+\n",
+       "119H3VtvIRRW6eRqYG5ckHDrDrnY4P6k0Tujiuu1ONl7Ukjl/eT+J5IxuMApc5RJ9olEI6gqqMLT\n",
+       "K5+WbM8tW7LNPOZ1bTccHtbc1mayodnXjOC+IErtpZJ1E7MTmEPceoVbrQBAHvIwMz+DVQUstXcs\n",
+       "MoZDXYeE9e+df0+4rmORMcxhDpFoBDtP7ExIlRW/Pnj2oGYabSrPplbKNpFZAgjABx/qUIdQhnyj\n",
+       "6L0UsRhQxJMgiKUHF9nZJADgJFj95Q7oT2/VQh6dEp9GsgigUlSLC0sPgH4wISmOQKpFwsSRUB49\n",
+       "1WITmCCeRbzxUQ2Uo4zy80jnVmlFRZUiwRyDVjy6okRaUeZ0xlagOxRCxwCLsgXOn0fL/v2Gx1CC\n",
+       "Rz7lqZjcn/Q/fViNE8+uxU/2HTEklMVRytryWhTYCnB4z2E0dTVhLDKGivwKeF1ejAyNCNtZYMFE\n",
+       "ZAKRaAT13no0+5rhdrhRWVAJj8ODueicYCHisrowMTuRcFwxWimz2cICCy5+/aLQxddsSvxs3wIL\n",
+       "iu3FsJqsmJqbwswsi4xOYxptt9sSGjhxHBaHsPzKyBVEohGYYEKXvwvf7fyukCq74ecbJNer3FGO\n",
+       "ofAQAPXIdSrPZlYj+EQC3ehGR+yXdgABtCyUbxRBZBiKeBJLDp/Pt9hTIBaTdKN1Cig+U91g6aKj\n",
+       "iIuaTGAgOqW5H48GbgSrFZVHINWOlUp66gBYg6AoIAR9/gRpUx899ybT9y/ZuWSj4ZaOJkOZ/B3l\n",
+       "tLLPh2s9Hhzes8fw/mqNY9QazewLBrG+oQHfevMsfll33HAK5Y5yFqWsKavBa198DS37W9DU1YSW\n",
+       "nhZ03u3EwNQArt5j6aRWkxXFtmLMYQ6hmZDg28mP2Tvei+HwMEZnRuEwO7C6YDXyLdJusfJmP2ZI\n",
+       "bV1K7aUosZdItrEqfOZugSWhSVDCNiYLDqw6gJ3lOxPWmWBCz/M92Fa2TdLFVz7mPOZxb+YeBsPx\n",
+       "Jknm2FuxWk8t3vW/KzQT8p/yC/dt085N8Dg8cDvcOPPcGeRZ8nD5G5exrWxbQidbLjprPbV4vOxx\n",
+       "4ftMCkS9UVJqXJQZhG7VqMXhdGsKYtB7KWIxoOZCBEEsLXzIWDOZpIib5Ij9LnMRo41vUmkkVI54\n",
+       "kyBA2TfUB+17o2cbI8jORVJ79qMI3K+3Zbbh1gL7hobCYQTOn8fhPXvgdjgM778QjWPE1/zVPa/i\n",
+       "UNch5Fvy0Tvey2o9Z8aERjsl9hI8VvQYuoZZM6GK/ApJA6EyRxk+V/45BPcFE7xDxdE7Tt3qOpzu\n",
+       "Oy00K+LbiCOjJpgQRfx9yFv/01v4Hx/9D7TdbjN8rmL/Uo4JJkEEBs4FcOzmMYzOjMICiyS11hz7\n",
+       "x2s7rbDCbDLj7efexj9e+0dJ9HnlT1cK16XeW49QOKR6H3kjodHwKNput6G6tBprC9fiiO8Iuz86\n",
+       "mwxlA2pclBlCCCGAAA7jMNw5+4eIeNghH09iWUJ1CQ85GWwmw1F8poJg9h9+5LboBIx7WaYSdb0I\n",
+       "5v95AOyaKPmGiu9NPvT5eRpBKVoqOxdJ7dlfujJaCwxA17XO5O8ot8OBlv37UxKdANAzznwei23F\n",
+       "kmY1mSJwLoCWnhbhmh/qOoSW/S3oHe8VlnGvyRJ7CS594xJK81jtI4/wrchjBrEuqwsj4RG09rVi\n",
+       "+6+2Y2xmDHazXdiWR+84BZYCXB65LLx22Vxoe65NYicCQCI6ASbEju4/mlBrqoXL6sJoeBSv7nkV\n",
+       "DesbsKN0hzD+9y99HwB7/njEUZ5qO495mEzx92SzmMVMdAY/vPrDhOizOGX5VN8pXOy8CIBFkuWR\n",
+       "Sx69Prr/KBrWN+DsV87i+DPHBcsbpcj2QkEpuZnBDTda0JJR0UnvpYjFgGo8CYJYWqTZXVQ3bjDv\n",
+       "yoXEaP1gKvWGKdYowgvgtui1UtRUfG/8UK4jTef+6ahNlbzR3XcEqMvwQ5Ks5pRf2ykAp5CR55N3\n",
+       "mbU6ndgXDMJhUEB7C7zoe9CH+5H7gijUQq1jqdLy7lA37kfuA2DpqqPTowiFQ4LYLLIV4dSXT+H7\n",
+       "l74vRN3k9aUf/9nH2P7L7bgXvgcAqC6tRoGtQOiAazfbcX30OiwmC2wmG56qeApXR67i3sw9PJh8\n",
+       "IMx7IjKBr576KsJzYdybvqd6fo8WPYrvvfM9zEf1NRUqsBQgYolgYmYCbbfbsPZf16Iiv0LoUFtT\n",
+       "VoPLw5fhPuLG5OwkAPb89T3ow8DUgBBxLbIVYWvp1oTOvkopvjvKdwgR2em5afAGuLcf3E7YlpNq\n",
+       "HXE2UaslJgji4YRSbQmCIFIlVRGnhg/G0lCNbp/qPqmQjZRUHWMa8S/MOD6kdW2VhN3rPh8GYp61\n",
+       "6xsasN9gUy3u21nrqcWWki1C+msyCwy19Eil5Xx8cUOfhvUNEruWdYXrsKZgTdLjisf2e/0Iz4XR\n",
+       "2teq2EyoqqAKW0u2orWvFcW2YkH4AqxT7Ew00cpEjlLabqrUe+vR3t8umcfeir2YnpuW+JMC7Nyi\n",
+       "iOKdu+9gaHoIDrMDDosD4bkwqsuqUeooRXBfEACw+RebMTA9AJvJJqQSA5SyShBEbkKptgRBENlE\n",
+       "R6MZQxhNQ00lbTULqcoJBACMgfmUHkPmItM60lwXNbUwzWurZFHBu8x6amuxJwXPWnETGHH6azIL\n",
+       "DLX0SKXlfHzfSp9kndiupdJZqXlc8dhHfEeEcXetYCmzVhNL0HJanNj9yG6hQ+6+yn1Cs6CtJVuF\n",
+       "cZJRYi9JSNtNlc+6P4tmXzNsZptkecdAh+BPajOxdTazDXce3MHM3Aze/9r7aFjfAIfFgbHIGMLz\n",
+       "YXQNdQnXyO1w48af3UDD+gZs92yXzJ1SVgmCWKqQ8CSWHFSXQGSalJ+pTIs4o7WaRrfX2idTHWe7\n",
+       "wbrsDgA4pLGtEZRqU7PQ5ThlYte2/W/aUxLbSsKOd5n98unThtNsAakQ11tvp9axNLgvCJfVhe5Q\n",
+       "Nzb8fAN6x3vj9YUHjkr2EY+h5Bkq9yWUH1M+7gdf/wBVBVW4/q3ruDN5R+iQe37gvNCsZ33RetSu\n",
+       "YEa5ltg/JbaVbcOP9/5YELNizAbfFl0PXcf6f12Pje6NcJildbjcn3R7GROOkfkIuoa7JLWwvIZV\n",
+       "3NmWXyN+DW4/uA18zMR3+1fa0/5QhTrNEgC9lyIWB0q1JZYc7e3t1Aac0I8OL8eUn6lUusPmMj4k\n",
+       "pIp+5Qeb8Mf5AeTBhjf/8iIeWeHVHmchO7/6sDCpwwZI9XnKdppwJsZ3H3ELKaVVBVW49ee3Ujqu\n",
+       "Dz50nOsAQkCFtQI39t2QzClZbas4fdhtd6Otvw21nlqc/vJpAMBjP39MkkZbZCvCWGQMFpMFc1HW\n",
+       "Zdbj8OD+zH1JCmuRtQhOm1PSZVeMFVZB5Crh9/rxzt13MDg9CIDVqj6YfYCbYzcl3W1L7CW4+fxN\n",
+       "NHU14cQfT2A4PIzPrfgc7jy4g9UFq1FkL5KkJO/+9W50nusENmYmzZY6zRIAvZciMo+eVFsSngRB\n",
+       "LG98PublCLAOpwZr5B4qFATjZ/+bG9ceYULjC3er0PF/aQuNjAvyZLW0C2xv8rBT/s/lGA4Pw2lx\n",
+       "4vq3rsNb6FVtRpSM1edWo6+nj3nDIlEA8drWn/57YOyzHqza+oQwtljIAol2IVyY1pTVYI1rDfIt\n",
+       "+Wi52YL5mAGt0+LE5BxrAmSCCSX2EhTYCrDGtQbXRq8hNJMYBXRanHhixRPouNORYM8CANtKt6Hj\n",
+       "K+z3zOrXVmNqdgpuhxszczMYnx0XtjPBhO2l27HCuQJjkTFJoyFx3an4eoiFtpZvph4yPR5BEARA\n",
+       "wpMgCAKoqwNaWzPr5bhcURCMtf+tHB88MoxHB53oDFzXF/HMND6oRzWXWNQ53S61mSQVwdg73ovd\n",
+       "r+/Gha9egLeQPQvyCJrb7lYdlx/zyr0rgsDjEUDxdm/V1aGvtRX/8DcuXK+cEMbWE52TR1jF8xMj\n",
+       "blwkbo6khAUWFNmL8OQjT6JrsAsj4RGYYRbEbL23HivyV6A71I3Ou53CWEoilSP2MK0pq0GZo0wS\n",
+       "vXU73AicC+D6vevoGevBu197V7jm6bCoDbgIgli2UHMhYllCdQnLiIWozwsGNb0cl/UzZeQaK9RQ\n",
+       "vvmXF/GFu1UZFZ2Ga8yS1dKm4kmaZZI9T6Hubgx0dKCvtRXnA5noSJU6Ss2MtPAWenHrz29JBJC8\n",
+       "djTZuCd7T6JjoEMiOi9941KCAOK1rVXbWXMh7qEpfl7UniN5gymlhkNOixMWE6sBLbAWCEKx2FYM\n",
+       "v9ePYluxZPs5zGF0ZhRX710V6k0dlnhN5/DUMF7vfR0dAx3CWE6LE+e+ck6o4xRTXVqNd/3vot5b\n",
+       "D7/XjzPPnUmokwXYPeoc7MTAlQEc6ooXTBv5GZJvu9jenkRusKz/7hE5C/l4EgSxeIh9GbdfBNb8\n",
+       "H0lrMVPC7V6c9FodtaULgg7vy2SprI+s8OpLrzUypZgwAViapGYUa6G8WxeAdLvUZhK9zYa0kHs1\n",
+       "Jhs3PBcWvq90VuJawzVFAeRwu9Hyv7rxYLQfNpMNE7PMQ1P8vIifo+2/2o6p2SmE58LY4dmByoJK\n",
+       "wTrm1T2v4onjT2BomqWxVpdWo8BagM5BluZaYC3Ag9kHggj2Fnpx4M0Dgo+mmP4H/dj4i42oLqvG\n",
+       "nQd3hOWdg50SP06H2SGkIu+r3IfWvlbJOKMzozh49mBCVLhlf4skEm2zsI64RbYi9E/0o+6tOgT3\n",
+       "BQ39DOnZNpXoN0EQhFEo1ZYgiMVDXJ/neA7ofJMtXw61mLlSW6qnBtKHBW3Q8zDXmIVDIZwPBLDn\n",
+       "8GHNNFuxGPhfTpZj7kYvrE4nfvkfy9Ezpe3HqTXmq3texaGuQxlPueSpnPmW/ATfUC7oaspqcOa5\n",
+       "M0mPK0+RlT8v4ufIYXFI6iXlvqKH9xzGS+0vIYoomn3N2Hp0K/om+1BkK8L5r57H9y99XzLfV/e8\n",
+       "isd+/pguT1CA2arcenBLaLzk9/px/JnjwvXgnpwAS6t1WpyC8F3nWoc1rrjPqf+UXzjvem897BY7\n",
+       "+if6he0b1jdgYmZC8WdISUDq+XmjhkMEQaQL1XgSBJHbiOvzGpdZLebq1UBfH1BUBFy9CnhFaarJ\n",
+       "muVkGn6N8wH0qhwz0w16NM7vYasxSzWaJBYDT/WW44X/ziJ2/8/feXC1ZBiAMZEQOBdAS0+LII6y\n",
+       "LTCUxIyRey9vEtTsa5bsIx6r8e1GIapYYCnAg7kHAJTrR4FYp9i7cSHXsr8lYb6v7HwFW45uQWQu\n",
+       "Iul+y9lctBmjkVFYTVZ4XV78/v7vMRIeURTVoXAIL7a/CBNMOOI7Isy31lMLh9mhKSpX/2y1IJSv\n",
+       "fvMqiu3FitfRyDUXP5eRaARtt9seyg+DCILIDFTjSSxLqC5hGSGuz9NRi5ktsvJMcaE5NgYckplZ\n",
+       "8vTXVjCRlk34Ne5NcsxU/ECToXF+y73GTP48adVSqtXriVNWv/u7xwGwFN2KzdXCciMpst2hbkF0\n",
+       "lthLDO2bivejUsptsnsvPwb39txauhWhcAiNbzeq1nIG9wXh9/pR761HsZ3VZybzveTeouLaUfl8\n",
+       "vYVePOF5QlF0AsDGko248xd38GjRo+gc7MRIeARVBVWKkVy3w40Tz5zA8WeOY9eJXegc6ITdbMdP\n",
+       "9v4ERXapz6mSj6r7U/b/WGQMh7oOqV5HI9dc/FwWWAsUvVuJ5Qu9lyIWAxKeBEHkBrwWc6lHOjlF\n",
+       "7M0kamsBeS1fsmY52WIhG/QsxvnlMFq1lGrCVCxA6v/5KNY3NODLp0/jF88kNqExMg+1hj7JSKUR\n",
+       "kZKAMnIMLph6x3s1j+12uHH8meM48cwJrCtaBwCYjc7i+5e+rzo3j8MjqR1Vmq9SYyKA1Yke8R2R\n",
+       "bFPrqcVH3/xI81wHJgcwNjuGmfkZfPnfvpxwXCWhqLce18g1F4/Z7GvW/DAolQ8fCIIgxFCqLUEs\n",
+       "dXKliQ0hJRRi9+bw4cR7YtQCJBP3eCFtR5aYxUm2EOlmbQAAIABJREFU0UovTaXmNZX03XRSnNOp\n",
+       "yw0ggG50wwknggjCrfJQqB3D6LH1bq9nu1A4hJfaX8KD2Qf46N5H2Fq6FU6rU5L2K76uTV1NmlYy\n",
+       "79x9B5FoROKFqoXeYxjB6PNAdaAEQSSDajwJ4mEgV5rY5DpLWaDTPRbIJR/MTJGKIFxoEaCnTlBN\n",
+       "BPngQ0ese1UDGtCi0r1K7RjJmhUZGUc+32w0V0p2X8Tr8ix5+P23fp+SL+diCcCHuSkYQRDaUI0n\n",
+       "sSyhugQZMXsGxZROIk53NxNvra1MhIrI+WdqOd9jg16uRn0wzwUCeN3nw1t1dQiHFiY90OjzlErN\n",
+       "a6asUPSip05QLQ3WGcu9rkUtDifJvVY7hpGU22TjyOd7qOtQwnbidNKDZw9mpK5Vad2df3/HkOgU\n",
+       "P1MLfe85RlOnidwm5//uEcsS8vEkiKVOMKie0vmwI45y2pgfnm7xZrTzbDYjqnrvsWwOgSZ37gd5\n",
+       "9fiMijDqg8mFKgCcDwSwfwGjxdn0RpR7Zy4kSj6TyURQEEEEEMBhHFZNs00WyebHuzZ6TfNYWuit\n",
+       "twWAckc5hsKsk7Auv1kkvy+ZumeLde+5oCcIgkgVSrUlCCKz5FJKqzhF1e9n4lOvQPfBmLdlLqTD\n",
+       "yubgG2xZ9ClpYtDKxYgPJgC8VVeHvtZWeGpr8eXTpxc0NTeXa+LSEcXi8+I+k+mKoNd9PuEDgvUN\n",
+       "DZIPCMTHqyqo0tXARw0j9bZuuxtt/Zm3GMnmBxIEQRCLhZ5UW4p4EgSRWXhKK8BE6GKqHXGK6pEj\n",
+       "xkSw0c6suZAOK5uDs1FlSgEAJwGEAewAcBRAExbOW1RMEIYaETncbkNRy33BoCGhyslELelipUTq\n",
+       "QRzZ0xvN48i7oaoJJyMCK1kkW3w8w42NzgVwsvckwnNh7PDswNEDR5OeqziaCCArkcV0rj1BEMRS\n",
+       "hiKexJKjvb0dPp9vsadBqFFXx+ooa2sXxZNTQrLOsiIUnymjnVl1HiuryOagOiUf4tFcgEV0B2Es\n",
+       "wrvMSRaB04I/T+l0kVUjU9GydBrF6D0vIxHfZJHsdK6jeA565pEJ+D3qGe+Bt8CLInsRyveVo9fR\n",
+       "CyeciLwVQVtfGzwODza6N6LIVqR5L+nvHpFp6JkiMg1FPAmCWHhyqeaUe4OmtC+Mia90jpUpZHNQ\n",
+       "nZLYmrAaTFzHoqPkvckwWkuqRDZq4lKNlsktTdKpE9R7XkoRX7VIcrJIdjrXUezDWV1avSCRZ/E9\n",
+       "6nvQBwAoP1+Oof2sXrR+Xz0azjeg/0E/Ou92AqDIJ0EQDwcU8SQIgnjYCAF4CUAUQDOYyF6i3pvZ\n",
+       "slcxWkuaCfScS6qRSr2WJplEKVKZTiQ51Tm81P4SoogmTQtOF3EkOhKNoO12G2xmGyLzERTbilH9\n",
+       "zWp0FHagFrU4jdNww032JARBLCvIx5MgiMyRS02DlhpGO+QSulloIZNN9JxLqmmndahDK1olwidb\n",
+       "JEsHXsxmT3pINZVZqeHSn8b+hK7hLgCAf70ftv02SWffbKRiEwRBLBbk40ksS8h7apFI4oO5JAkE\n",
+       "WBfYujq0v/FGWvtDyx+SW4a0gonQ5YxBX850yURKbDLEvo56vRyN/o7ix/jbfdcwmZ/8XJJ5VCbz\n",
+       "LA0iiAY0ZF10Asm9PfcFg1jf0JCTohPQ50uqhLzhUsv+FpTmlQrLjuw5gha0SK69Ef9W+rtHZBp6\n",
+       "pojFgIQnQRD6yIWurZlELKR/+MP09tcS4kY75C4gRvSzLhZYZGdbyKQqRFI5xgePDOP4X1WlfC7c\n",
+       "s7SvtRXnZc+kG+4E4ZMtknXz5bWciyk6AwjABx/qUIeQ7NORVDsRB/cF0bC+QZIyq7SMIAjiYYaE\n",
+       "J7HkoC5si0QwyMwgF7tTbaYQCWnfiRNp7a8pxINgnWJ1+FQuNBkPZC+wyM62kElFiBj9HSU+xq+b\n",
+       "Pkr5XLId/dVLrguubnSjAx1oRSsCsk9HUp17U1cTBicH0fh2oxAZNxLR1IL+7hGZhp4pYjGgGk+C\n",
+       "IB5O0rU/yaB9SiYa5KRagptx9xuNJkXZagaUjHSOuRB1eJk6hpGGSJmyZVmKZKPe1Yh1DEEQxHKE\n",
+       "ajyJZQnVJRC6SZZH6nazL78f7Tt3Gs8z5V4lGRBOqimSBvJgU41cZjyQzW1oVMZKlg6aLdI5ZipR\n",
+       "K6O/ozIVGTMS/V2IFOJcJRv1rqmm6OqF/u4RmYaeKWIxIOFJEMTyYNM5wH0ZKH8f6L3PlmmpMb7+\n",
+       "vfcWtWGSaoqkATWZagluBvWzLvSmg6bS1CfdYz5MZFso5TJ6612NPIO5nl5MEASRC1CqLUEQywP3\n",
+       "ZeB+Nfu+6h3g1ue180i11i+QhYxqiqSBPNgMZv5mFb3poJlMXVwMT85ch6w8tKH0WYIgCP2QjydB\n",
+       "EA8P5e8Dw08AzmvA9SrAW6ytxrTW+3ws4giwfFS3e2G9TJeKmswCdW/VobWvFbWeWooiEYsCPYME\n",
+       "QRD6oRpPYllCdQmEIhcfY5HOr/7fwMF6Fi0EkueRxvJM2y9fVl7P81c9HqC/Hzh2bGG9TBc6DzaH\n",
+       "WMqpi/Q7anmQS88gPVNEpqFnilgMUhaeJpOpwWQyXTOZTHMmk2l7JidFEMRDRKaMJL3FLL32zg1t\n",
+       "caj3mLzzzsaNQGcnMDrKli8XL9McJp2GO+cCAbzu8+GtujqEM2JOSjyMZNIOhSAIgkgj1dZkMm0C\n",
+       "MA/gRwD+92g0+qHKdpRqSxBEHHndpN8fT2etqABu3EgvwqenLlKeQtuiUbvFx6ypAdasAZqbtee4\n",
+       "QPWhRBxum3Lv6lXMxD4kWN/QgP1a93eRWAxrmUUnAKAbzO81iJzztSUIgiBSQ0+qrTXVwaPR6O/5\n",
+       "QQiCWMIstEDinVr5sXk6KwAMDLBl6QiFYFC7LtJoC1i1MZNdO/l55qj4WYqoCTZum8LJ9S624vme\n",
+       "DwQUBfKy89vsBsBvUQDMeocgCIJ4KKAaT2LJQXUJKqSaspqqAWSy4x88qD4XuegLBlmkU7wsHfTU\n",
+       "RQaDwLp1gMMBNDai/Y03Uhsz2bVL1d+E0ETNl5PbppRWV8Pr9+PLp08vShRR7+8oPTYvy85vk3/O\n",
+       "VAuAfix0Q3/3iExDzxSxGCSNeJpMptMAKhRW/Z/RaPSk3oO8+OKLWLt2LQDA7XajuroaPp8PQPzB\n",
+       "p9f0Wu/ry5cv59R8cuZ1dzfaY9ETXyzCpmv/qSn4AKC2Fu0vvAC0t6d2/JMn0T4wwF6XlQEjI2gH\n",
+       "AL8fvth27e3twHe+A5/LBRw+LDT18X3pS0BrK9rn54ELF+B77rnsX681a4TrhclJ4LnnjI83NcVe\n",
+       "x8Rle3s78MMfwjcxAdhsaH/kEWB6Gr7GRiAYjJ9vLjwvS/g1F2wDjz2GtS+8AI71O9/B+OQkDp44\n",
+       "AYfbveDz+4fnnsNEXx8sDgeimzbhnStXYHE48B9PnVKcj575Tl2fAkqZ3+YL8y+gPdWfz1x5/R3A\n",
+       "5/IBh4H2yzkwH3pNrx/S15fp7xG9TvP15cuXEYoFFz799FPoIW07FZPJdBZU40kQi48Bz0cJmbLs\n",
+       "KC2NN99ZsQIYHIzPpakpeTqvz2es5jITpHq9xChdO/G5eDzA8DD7fqHO6yEgV305X/f5hNRZR3k5\n",
+       "wkNDANKrMyW/TYIgCGIpsJB2KlToSRCLDe/AalREpWrZEQgAK1cywXngALBtG1teXQ289550Llrp\n",
+       "vIuRlprq9RLDr11TU/xa/O53bF1tLbsW/HtKt9WNVldah9uN/S0tOSU6AWnqbNnjjwvfp1NnSp1V\n",
+       "CYIgiOVCOl1tvwbgHwF4ANwHcCkajT6rsB1FPImM0i5KNSMWGHEznbExZjHC8fsBm005cqoVXcxU\n",
+       "1DVF0nqmeOOg+/fjy6qqgI8+iq9fpPNaqogjh7nclVYOj8TOv/AC9u7enZNRWWJpQn/3iExDzxSR\n",
+       "abLd1fY4gOOp7k8QxBJg0ybg5k0gGgWeegqYnY2LzQpR+XdNDXDkiLq40uo0yyOHegkEgJMngXAY\n",
+       "2LEDOHpUeVx511mtlF+dh5YM0d0tFZ01NcCZM/Gxl4hoyiUSmu4EkLYFx0JYl/BIbHt7u/D9YqN1\n",
+       "3g+lpQtBEASxKKRd46l5AIp4EsTSxe2WiiqbDYhEWArppk3Ab34DWK0stdbrTdxfrNLKy4He3tRF\n",
+       "X7Joq1r9pLx2dHBQu5ZUw14moRx1IhbNdbuBz38eeO21hYtuZkCQsXGk53yuqWlRxUhCDacPcQuO\n",
+       "BqRkwZHrUdRkAvBcIIDekycxFw7Ds2MHDhw9qvueaJ13rl8XgiAIYmmQ1YgnQRDLlE2bmJ+mzQaY\n",
+       "RWXgBQXAgwfs+7VrgTt3gHv32Otdu4AbN9TtRuRs3qy8vRrydFZ5tFVWQye8ib92DfsAOHiNZWMj\n",
+       "20Ct5lJ+HAX/zcRyVI1objbJlCeizHM0NDio6S+pRTqRNIfbDbvbjVN+P9vfFoQD7rQsOPRYlywm\n",
+       "Sp6e/Breu3oVM7HGXf1tbYbuidZ5q61fdv6hBEEQxKKTqeZCBLFg8JbORJYYGGDCa3iY+VxWVgKr\n",
+       "V7PIJhBPq+UKjO+TrGmQ0jGMeIaK01lLSoB33wXq61ldqTitNYbg8zg8jPMOB3DsGNtGpaGQ8EzJ\n",
+       "j6PwRj1hiFSbM2WCTHkiytR0JkSamtdmSvu7AizSeRopR3X3BYNY39CwIN6een5HyRsoKV1zfg24\n",
+       "6ASYR6mRe6J13mrrl51/6BKH/u4RmYaeKWIxoIgnQTwsJEshFa/jAtPpZALP65Xml65ZExdxmzcz\n",
+       "EcnDf/JjBIPAI48AMzNsX7MZmJ833uWVC6OSEvb1+OMsInvxoqLgE97EA9gTDgOHDsXFoVKk6Ic/\n",
+       "BP7rfwWuXYsf59IlxbGNlqNK0EjjNUwQLNJ5GMqCTG8qrqwGd18wmHZjnHTFq2T/I4dTTyOOsRg1\n",
+       "l8mivvIIp9I159egrKYGzpUrYbbZ4GtuNhw9TnbeauudVnbsWk8tDu/JvQgxQRAEsQSJRqNZ/WKH\n",
+       "IIglzssvR6N790ajzz4bjY6OLvz+mWDv3miUtQmKRhsapOsqKuLrDhyIRquqotFPP42vf/ZZtq62\n",
+       "Vjr/0VE21ugoO8fi4sRjfPppNFpZGY3W1bHv+fZKbNzIxvB4pMfnx3nhhWjUYokfo6pKcZjp0dHo\n",
+       "6YqK6LTSnJWOI742VVXZu0fJ7kFWjheN/zZegMOJmR4djZ5uaIhOp3gt090/F/j13r3RHwHRHwHR\n",
+       "07L7/eazz0Z/BER/WVureo4LfQ06Xn45+uu9e6NvPvtsdODup9GG0w3R0emle/0JgiCIhSOm+ZLq\n",
+       "QmouRBB6SOgoYzByku7+mSCZpUlpKcDT+errgRMnpPvqsTsRn2NJCeuGqxWZkUcA166Np7pWVQG3\n",
+       "bqkfw2IBenpYRFYpksjnnP9ToNchjfqJmyZVVQFbt8avzZYt6k2Q9EQsk22jZSuTaeoAtIKl4hpN\n",
+       "U00zOqsW7XuYuqi+VVeHvtZWeGprE1JZExoo5QDUaIggCIJIFT3NhajGk1hyLEpdQmJHmYXd3wiB\n",
+       "ABNodXVMfHFU6hsBMEsSgHWrLS5O3F9cx6g2fk9P/PstW/TN6+RJJiRbW4GXXmLpswC7XhcuJO4j\n",
+       "PkZBQfx73hyntTVeO8rn3OtgDXhawVJPgYTjtH/nO/Fr09ubOFay48hJtk2ye5ANgki9NlLPuSZB\n",
+       "rcYz3drPdJDXVWYL/jtKrX7yXCCAU34/ZiYmMjrPdM9PnN5syc9fkGtF6IPq8YhMQ88UsRhQjSdB\n",
+       "6EHLhzLb+2uhZjUi7sra1MTsRBobEyNYR4/GooP5wK9/HY8GirvP8mNcvRqPjm7YADzxBBvP6wX6\n",
+       "+tjyzk7gsceY0ObHOnmS1YMCwIsvsqhqOByfwzvvAG+/DXz5y8Du3axT7vAw8w7l5yI+xtgY2+7W\n",
+       "reTCXqkBz8WLwO7dOLd7N0IHD+L61BSePHWKiYOkY+n4ACHZNmkViKaAG6l3uk3zwxK1Gs/F7C6r\n",
+       "1Dk2E8ijuBy1+kmteYjX/3zDBpQ/8QTyy8sx3tsriRTLj5vu+YnrTE/5/Vm5VgRBEMTDC6XaEsRy\n",
+       "QJyCWlERb/gjjqzJ033d7sRUSvE2HL6t2GZETkMD8NvfxkWh1RoXjGVlTNDydQBw4ABLq5WP6fEw\n",
+       "ISv36eSpu1u3xsfJywN+/3smRg8eZJG5xx9nIlosqkNgkc5LTwBDn8SbEnm9yqmFydKKldbJU1L5\n",
+       "ssWwV8kketKrk6CWSprNFFOtNN5kqa/pYDRFVWsefL3V5cJsLCrq8HgQHh6WHEN+3JmJiYydX7au\n",
+       "FUEQBLE8oVRbglguqKW3csTRqXffVU7nFG+Tn89EnzyVkm/DO9vyaNfJk3GBaLFIj81tR7ze+DIu\n",
+       "OgFgZEQqOgHWPVZsXQKwjrfDw2w+4pRaiwVob2fnIj7GF78Yf93bCwwNAW1tiWmhPOo39EncJmb3\n",
+       "bnaaPPrmcmHP6Ci7tsnsUZTWyVNSNexVFirdMxm65pCmTQyP9skFi9ryTKCVxptfXg6zw4GRK1cQ\n",
+       "XLcObxw4IJx/OvdFLYrLx3xt9Wqc2L1bGFuvxckju3YJ43qqqxOOwY/r8Hgw0d+P+UgEXr8/I0JR\n",
+       "j/1MLjzLBEEQxNKBhCex5Hgo6xK06u3EtYNer7JgEG/T2yv1q8zPB1auZFHLFSuADz4A1q1jPp6N\n",
+       "jUw8cubm4t8XF8dtR3p79Z1Lfj5Lq+Uit6aGRUXn59lru52dAxe/c3PA/v1MdOfns2W1tcBrr8XH\n",
+       "FAvm06dZRFX+Rnh6Ov691wtwAVBeDtfEBBxKolUPBlNSF7PGMZfmkA200njHe3sxHw4jGokgEgqh\n",
+       "v61NOP+k1yQAwAfWrElBX8lFGv8dxcd80NeHwc5OYWwuvruamhSFG1+//+hRYVzx91wI8uMWb9yI\n",
+       "wc5O9Le1wWKzZUTU6/mAYLk+R7nIQ/l3j8gq9EwRiwEJT4JYCmiJGz3RKfE2fDy7HZicBP7lX1h6\n",
+       "bijE6kD/6q+YX2dnJxO78nR5sxlwudjy2lomOsXRSCXsdpYGvHIlS4k9c4bNpayMiU++jcMBdHXF\n",
+       "o6ZmM4tmtrYykVtRARw7Jj3XYJCl6c7OsnNQEpGxiBEAdl5cANTWwp7s2mphsGHQYtY4pjqHpRLZ\n",
+       "0orS8fPm2AoLsfOVVyTrFK9JNxIbVIlQE2l8TFtxseLYWsJNPK7SMRxuN+xuN0LXrwNgfp/yuWfz\n",
+       "3uXCs0wQBEEsHajGkyAyRZr2E0nHtNlYF9fmZmlt4cmTrEHPjh3x2kaleciXfe97wC9+wYSaOILJ\n",
+       "sdlYNHN4mAm6SESaFnvlCvCFL0iXlZTEmw6p0dDAmgpFItLlK1YATz7Jjieu7RRjscTnqmRJw61K\n",
+       "ACYyz55VtjKRr1erZczG/URu2GgYncNSsNlIVt/J11lsNpjtdtz97W8xE3tW8yoq8Gc3bgCA+jVJ\n",
+       "0ZaGX+edr7yCrkOHEsbORB2l+N546+vxjMgK6VwggJ6WFkRiP6da986o1U0uPMsEQRBEbqCnxpOE\n",
+       "J0Fkimx4dSYbU94IaN06FqUUd53l+8jHOX8+3mE2GZWVbFwuBk0m4PJlYNs21txH3JVWiVWrWAQ1\n",
+       "EmFRzTNngPJyaQ0op6EBmJhg4pA3JyoqYo2GLBb2/eiougdmKMQsWaJRJtB37WLnyJsJFRdL12u9\n",
+       "Uc4F79UcIZcbzXCxdO/qVUFMygWWWhMejqaY5g2qDsO4LU0SMiHckt0b8XnDYkHl00/jwNGjqsda\n",
+       "Ch8wEARBELkJNRciliU5W5eQDa9OPdYeABN1lZVMKHHR6fEA/f0s0sd9K/k4WoKR87nPMcHHx/v8\n",
+       "54H//J+ZyBOnrirhcrHj8OjmypVM7D31FHv9mc+wSKd4XsEgE7o7drCU2vPnmVCdm2PnVVWlntLq\n",
+       "dgPHj7OIqtvNRKe4mZB8PSA0bWrfuTOxJnQhvVdzHD2NZhYLnq7KRadS2qfcnzIyNgaT3a66fQK8\n",
+       "QZXo1M8FAvjpypVoLi3Fm6ImRYD+31GZaLSU7N5IUovn5iQ1rUrkYursUknzzjY5+3ePWLLQM0Us\n",
+       "BiQ8CSJTGKz1S3vMYBCorwf8fhZJ5AKxpoYt37gxXqPpcknH2bFD/ZguV3wcHnHMz2ciko+3eTNQ\n",
+       "WJh87hMT0qZEnBMn2FwuXAA+/pgJzT/9CVi/nonRkRFW4zkwwIRvzEICRUVsH73Xlottp5PtpwRv\n",
+       "2vTee4k1odm4n6mi1dU4y8d0AFnrRJsuXCyVVlerdnQVi7Px3l7c7exEdGYGBVVVKYvpUHc3pgYG\n",
+       "MDM6itttbfjl9u2CQJqJWaAsBGLxKhdp+4JBOMrLhW3tJSVJBWUufsBADYwIgiCWD5RqSxALQZbq\n",
+       "BYVxe3pYWuuVK6xxT2kpizS2tbGI3ZYtrAGQ08kiiP/2b6xhj/xnc/VqlhobDrOmPkC826wSNhtb\n",
+       "z2svS0uBe/fY99XVzHtzbIy9NplYumttLYvO8vnIPTs54ppOjpGU195eFum8cIE1PlK6B7zuUy19\n",
+       "N1dYjLTfJZJqLE9XPRcIoPfkScyFw/Ds2JGQWpqptGE+DsCa+licTgzGnuNkaao8NXispweFXi9s\n",
+       "RUXYFwyiq6nJUH2lEkqpsnye9pISfOPSJRRqNQHLMXI5zZsgCIKIQzWeBJErZPpNPBdR4npOJXh9\n",
+       "43e/Gz++xxOPIsrhtZVGsNuBmRkm2jZuZOI3P599TUzEhaeY8nImfAGWUiuvNzWZWPMicQ1raSmL\n",
+       "tBYVGRPvSteK3wO1xkK5xmII5KUiymVI6hqRKAL11lUqNdoRL9vz6qt453vfA0wm+I4cwduNjboE\n",
+       "knx+fI6Tg4OK9ZVGGv6IRVrJli0Y7+2FxWaDtaAAvubmJSnatO6X0YZIBEEQRHYg4UksS9rb2+Hz\n",
+       "+RZ7GsaimJl+Ey9vLKQUHeRUVbH/+/qYaKupie9rVGh+5jMsPVa8j9vN0m7v31cWmXK2bWPCt7+f\n",
+       "RUDPnQP+5m+AN99kUVqLBfjwQ9Yo6cUX2TKbTdrx1oh4l18rhXvQ/txz8E1MZD4inSkWQyAvsihP\n",
+       "VVCII5Gl1dX4ytmzhsRIsmZFyZrvcIFkyc/HO1euoKayUlGwRiMR3G5rg62oCJGxMUGoqglXpWOq\n",
+       "XRuxSDvl9z8UjYIeloZIOfN3j1g20DNFZBo9wtO6UJMhiGUHrw8E2Bv0ZG94gkFjb+LVRC1ffu0a\n",
+       "e22N/Qi7XEwoOJ1MqPGGPvn5LNV00yb2emyMRSi9XuDWLePRzU8+SdwnFFKuOzSZElN5AVbTWVjI\n",
+       "hOf9+8AzzwA3brDvt2wBtm5lDYzKy+Pn1NwMNDay/fU0+xFfP17rWV0NrF0LHDmSeA/6+liklu+b\n",
+       "7TevRlOvuQfrQqJxzGxHmnhtH8BsTvQKin3BINpj3YvVonzJ5i4+LgDYioo0vT7F6b2IRnEvFELf\n",
+       "lSv45fbtcK1ZIxGx3vp6rG9okFisdDU1ITI2hvyKChw4dkwiVkdjP+viY6pdG17vmWyuelhKUcRc\n",
+       "bIhEEARBqBCNRrP6xQ5BEMuQZ5+NRoFotLY2Gh0dTVz/8svR6N69bDul9cnYu5eNDUSjDQ3xsUpK\n",
+       "4svlX3Z7/PuKimi0sjIa/fRTNp7JFF9nNkejFov6OJn4qqqKRgsLE5d7PNHoU09Fow6HdHlDQ+J5\n",
+       "l5dL14+Oxv/Xus7icerrlfczci8zjfz+LkF+vXdv9EdA9EdA9HQWzuHNZ5+N/giI/rK2Njpt8J50\n",
+       "vPxy9F8qKqJHSkqiJ/fvT9g/2dz5cX9kNidsMz06Gj3d0JB0PP71E5cr+k/FxZJlaueiNB/xsp9V\n",
+       "VUn203Nt1Oaqh2zf20ySznkSBEEQmSOm+ZLqQop4EkSqaEUx5RFRt1t/lKunh/1vsbBmP/39yg14\n",
+       "OG43iwTyZkI8lXTTJlY/KY48JmsWlAm4X+eGDcD4OFu2cyezU/ntbxPPw2pl5+nzxSO5tbVs/vx8\n",
+       "+DUWR72Uajd5tFJshaLHs9NoRDpdloFVS7YjTfuCwZRrMXnHWQCChYg4Ypps7vuCQfx8wwaEY3XQ\n",
+       "JosF06OjCIdCQkRRfswx/vPKMZsxK+psW1ZTA9eaNaoRWKX5iJd9+fRpSfOhPa++KkRL1a6NOPpp\n",
+       "lGSR3VQjofJ9M9FMCUjvPAmCIIgFRkuZpvsFingSGebs2bOLOwG9kUweReNRPnG0ct06NkZVFVsn\n",
+       "H+upp5SjmTU1iVFEkyka3bEjGt2/n0X3xOPYbOlFLs1m9XVWq/Jyj4fN4dNPpVHYhgb1iK04ullV\n",
+       "xfZXi3ByxFFDebRydJRdY6Vrq0DCM5VOtFoPWue2BMiFSFPHyy9Looo8OidELYHo0erqhDlqzV3Y\n",
+       "32JRjPyJI4L/XFER/dXOncLr/89uj/5vJpPw+qeVlZrXSGk+8mXZikJ2vPxy9Nd790bffPZZ4Vh6\n",
+       "IrtKc1AaS23fpRRVzQUW/e8eseygZ4rINNAR8SQfT4IwCo9ktrYmej+K4T6Q3E+TR+W4nUhHB6st\n",
+       "5N6Y4rG4JydnZoY1CTpzJl7XyYlGgQ8+YNHBq1eZr+fq1cxKhNd6pkqy6KhafejwMIt2fvvbrDMt\n",
+       "EI/sKfmHFhay2k6+3Ucfsagj//L7lf0redSwupptI24Y1NTEbF2Urq0WPGqq5x6nCo/eLnLtnNz3\n",
+       "0Qhi/8jFItTdjcj9+wCkHpX7gkF4/X546+sTmgudCwRwyu9P6rXJ/SxXPf00gMTI37gowjk9MICJ\n",
+       "3l5hDmU1NZIMg+INGzTPw+F2w+5245TfL9wL+fXNVoRZySdT7d5qzSGZ56Z8X6rNJAiCeAjRUqbp\n",
+       "foEinsRyQ289II+aiaOGxcUsMrl/P3vNay1raqLRF16IR9k+/ZRFL/Py4tvt3cu2KS6W7iv+2rEj\n",
+       "vQhnJr7E8yovj0b9/vh1euGFaLSsLDFa6vcrRwArKuLb1NdL1yWLGoqjoSUlxiKL6ey7hFCLFi4l\n",
+       "eGTySElJdIzXM2sgjrQ1ezyK0TmOOPInjuZCKrKxAAAgAElEQVSJI5z82Hw7cbRVHBXVinpqRQCz\n",
+       "FWE2UkurN1KsNJZ831yImBMEQRCZAzoinmSnQhBG0WszIbfxEOP3s26z3E+zvp6NK/f6fOQRVuPJ\n",
+       "sdniUUwlKxTuqcntVZLZrKSLy8W65g4Nsbns3s3sUTo6pNFJsfWJ0jXZto1FLXt7E+tfS0vjkWK/\n",
+       "Hzh+XN/cuH1NSQlw6RLr4quXdPZdQohtKOwlJXj+5s2Uo5eZ6IJqdIxzgQBGr1/HWE8P/O++i0KN\n",
+       "+yTuEhseHobV5RLqMPMrKvCtGzeSHlN8vfIrKjA1MAB7SQmqnnkGk3fuwOp0Ir+8HPd7ejD0/vuI\n",
+       "zsxI9tey+hB7cCbzAc0Ecj9SrXrRhRqLIAiCWLrosVOhVFtiydHe3r64E9CbJslTQS0W6fKaGmbp\n",
+       "8cQT7DVvgCNuOJOfz0TavXvSfbnoNJmUU13n59k6LjazJTpNJpY2+/77TFgODbH02lAIMIt+rRQU\n",
+       "MOHIhai8CQvA7FV6e5VTW/Pz2f+FhcDf/33ivoEAu07yVFye5nzzpi7hKDxTgQCznKmoWNaiE4in\n",
+       "PtpLSvCNS5fSEgrJUiy14Om+N48dSxgjWSpwqLsbdzs7MTUwgK5Dh3TPMTw8jIKqKjyya5ewbmpg\n",
+       "QHPe/HpZXS64N26Et74ez9+8ick7d4R5/+Ff/xWDnZ34/cwMnJWVMDscAKSWLGrw9F7eSCjVFGgl\n",
+       "5NdRfL9+vmEDpvmHOykgHqvr0KFFT79eriz63z1i2UHPFLEYkPAkiGzBxc+HH7JIJGfNGiZa+Xpe\n",
+       "myh+zYWYWh2lWhbB7Kz6OjW4f6URolE2v8ceA155Jd6xt6ODiWWnkwnuBw9Y7WkgEBd1YqxW4B/+\n",
+       "Qb3LKxfO4+PA976XOA+1etvYhwPnjL6B7+5mdaEDA4AOMbOU4ULn+Zs3NaOFWqRTr8eFC/e5FI/R\n",
+       "e/KkIGraX3oprWOKt//mRx9h/9GjyK+oUBxDSfDuCwbh8HgwOzGBOx0dGHjnHbzd2Agz94kFEI19\n",
+       "MFT82GNouHYN5bW1AIDI2JimOBbXVeoR8mqiXGm5fDx+LficeeffVKBaTYIgCEIvJDyJJYfP51vs\n",
+       "KeiDR0a3bQP27WPLeHRTvJ5HB8Sv+RvDmhqWbqqF1crScI1gsQD/7t8BzzxjbD8xMzMsxTYQYFYp\n",
+       "AItObt0aF40lJSxy2dKSKDxnZ5nAk4tw8fgck0L2hoYtid5InPBMLQObE72k2hxITZTxaJ3R8bhw\n",
+       "Ka2uhtfvl4wxFw7HN5R9oKJ1TPk85ds73G5868YNxTHUGu5Y8/KEbcJDQ+hrbYXN5YJJ9LPnrKzE\n",
+       "f+rqgsPtxnis6ZCtqAiwWJJ+CMLn+7PVqzES+zCotLpaVcypPdtKy+XicF8wKIhureMoXUsx6dx7\n",
+       "Qj9L5u8esWSgZ4pYDEh4EsRCoCasxIjTRouLgfJyoKwM2L6drd+4Mb5tYaF03y99KS6a9GC1srTX\n",
+       "O3dYdM8oXAQ6nUzw/tM/xUXi+DiL2ALxOsneXiDWfVQ4PpDo0Sm/NrwLLk9PlqNxXQ1HY/Tcp4cc\n",
+       "I11QAe3OuVy4fOXsWTxz/LhkDE/s/pdWV8NeXCwZR0s4y+fZ1dSEycFBvN3YKMzDSPfWc4EAwvIP\n",
+       "TiwWRCYmUPH5zwNgfp0N164J4/FIcmRsDLfb2pJ+CMLnO9nXh0hsfoVr10rm9otNm3DE7cY/l5eD\n",
+       "fwwjf7aV5q4mutU6/2pdSzG50N2YIAiCWBpQcyFiydHe3r60P6kLBFhKp7yRzsqVcRFYVgaMjLDv\n",
+       "6+uZTUplJYscfvIJS2HljYkA1njnvfeA/n59c9i5k0VSOzqAyUlj83e7gS9+EXjjDeDJJ5mwFL8h\n",
+       "t1qZcB4bAz7/eeDECaCxkaXDihsiVVXFrVPU0NvISYVwKITzgYBms5OMPFNq93UByUSTHy2MNsER\n",
+       "N+XRarAjR3z/Tvn9muOIz38+lkLK56lnf6Xj8vMTn4ccl9eLyIMHsNjtcK1bh99HIti5aRN6T57E\n",
+       "zOgoSqurke/x4LZoPvLrxq+rragIkbExxe2OuN2CfYyzshIVTz2FPYcPo6upKasNfhay8RGhzJL/\n",
+       "u0fkHPRMEZlGT3Mha7KVBEGkiZIY4XWJAItmrlnD1k9Px/fjzT6sVuBv/xb47nfj+4g72wIsZfbU\n",
+       "KUCclqhFV1d8fD3w7rglJezr+PF4nac8xXd2Ni6aOzqAF19k5x4IsPNqa2ORTj1RRR4JTREejVkQ\n",
+       "xPeVe4EuMDwyBQDnA4G0z11JyO4LBnWJeY44AmfJz8frPp9uYSy+f3qi1+Lz9/r9WN/QIMxTvr/8\n",
+       "3MTibV8wmHDtrCoZBVaXCzP372MmFqWc7O/HEIBP3ntP2KZw7Vr4jhwRrpv8WOLruvOVVwTh2NXU\n",
+       "hN6TJzEXDqN8xw6YYo3KLE4n6t95R4iois/7V088IdSWZgqj95wgCIIglKCIJ0Gkgt7oltg+pKGB\n",
+       "bXfsGBNgSnYoAEujjUYBbnBvtwNFRdIIZyawWlkHWpntAwAgL48dl0cyy8pYLWdzM7B2rTRt9sAB\n",
+       "do5K4wCsM+zatSy1d9Uqlnb77rvGO8bmQEQxKdyGRa+ozgKZjkylE63kGI1aJhvnl9u3w1lZCXtR\n",
+       "kaJwVTp/LjDvf/IJ5sNhWBwOFK5bh9Hr14WGRo7yckRnZ4XXJquV+Y2Zzfj6xYso27YN4VAIv9i8\n",
+       "GdOxrAT3Zz+LqTt3EOYfsqhhsaDy6adRUFmJ8d5eWJ1ODH/wAaZjNkkurxeutWsFOxa+zb5gUHK9\n",
+       "AGB1XR3uXb2Kr164IGkIxc9bbBGT6v0iCIIgiFTQE/Ek4UkQRuHRLC6+xD6VcrgY8XhY1HB4WJ/F\n",
+       "icnExCf/P1uYzcyCRUxBAUuhjUSknpvl5Uz4bdgQF8EWC/PztFji1i9btjB7laGh+DqxUAWSXzM1\n",
+       "5CI+195Up5kWrIaR9NlwKIRfxcSZTUWcGUGPkFWbn9Jy8XglW7ZIRJaeeWoJYaUU2Z84nZibmlId\n",
+       "UyzWABYRHf7wQ+HnwpKXhw1/8RcIdXfDbLPBYrfDbLPB19yMtxsb0dfaitLqajy4dStBhJosFkRj\n",
+       "P++O8nKEh4bYcptN6IDrKCsT9hNv4ygvR2RsDPOxTIbSbdvwlY4OxevEz3t6dFSSXkzRSYIgCGKh\n",
+       "IB9PYlmyKN5TgQCrwSwtlYpOi0XqUynfh3tCPvoocPductHpcsW/56JzxYr4cZKxebOx8+FjykWn\n",
+       "2Ry3QCkokK4bGmLndPEiqze129n53L/PRGdFBas17exkArW8nEVt+bUqLmb/p9oxNosdZzPyTOn1\n",
+       "dzWIWmMXpaY9DrcbBWvW4G5np2YnX62mP4C+jqVGuquKxxvv7ZWs1zMftXRbvu/bjY0J6aBzski8\n",
+       "ragIAJjHpsWC2ViKO++qW7Jli+TnwrNjB+5dv46Bjg70t7XBVlCAZ06cENJjC9etg62gQNJ1+Q9O\n",
+       "J1bX1WHl008L8y17/HHh+0dizYhKq6vhqalJ2MbqciE8NCSITgAoXLdOiOAq3ff9LS0oiHmH3v/D\n",
+       "H3C6oUFYr+fayklln6XCUjw38lwkMg09U8RiQMKTIPTQ3c0a/4yOSkXn3Fzcp1JpH+4JKar3SsBu\n",
+       "Z+mqzz0nXR6NxlNxuWC125VF6I0bxs7HYmHpu0C8ztPlkgrRU6ek+xQVMcHn9QK3bycK0127WO2n\n",
+       "282+HA62vLCQRX6vXEmvY+xD2nFWTWypCT69nXz1WM3o6Viqdryxnh4ATOjtfOWVhPHk++mZj5oQ\n",
+       "1uq6CgDmvDysrqvDN69exfqGBiY85+aA2VlY8vKErrrcAoVzt7MTY598IpkrFy7Htm7F1MgI7nZ2\n",
+       "Ijw8DJPdjrwVK+D7yU9QsGoVZqemkFdRgQPHjuHA0aMoXLcOD27dwr0rV5C3YgXcmzYhIttmfUMD\n",
+       "VuzaJZmDvaQEvuZmnAsE8HFzs6q36XhvL+bDYURCIfS3taFl82aEQyHdtkJiUtlnqbCcz40gCCKX\n",
+       "IeFJLDkWpQubuLHI1q2s02wsmpEQgeO2KNeuxZclS5edmWEi7s4d6fKaGvYlJhLRl6qrhLgJ0Nxc\n",
+       "vIGRy8UaBonSDYVtOEVFTDz6/ez/UChudQIwr1K53QmvQRsfZ+fn9TLBuHkzixwfOBBPT+U2Msmi\n",
+       "D1mKKAK57WemJrbUBJ9eX0XDVjMa8yvZsgUtmzejubQUbxw4gIJVqwAwK5GuQ4c0z0vPfMTCVRy1\n",
+       "ssSebaV9v/7BByioqsKf/f73ePbNN1Ho9WJ/SwssdjsAlg5b+vjjgs2KUhOhsscfl8yVC5cHfX2Y\n",
+       "FXV0js7MYHpwEJaf/xyh7m4MdnZiemAAXYcOCdHoqbt3MRMKYXpwELfffluyDbd8MQFwxLIdzHY7\n",
+       "ih97DG83NmL0+nUhRZcdUPp7RT73qYEBnA8EUrrXmXo+cpGleG65/DuKWJrQM0UsBlTjSRB6CIWA\n",
+       "l15ib/Sam5n4UavpE9cickwmZi3yySdArKmIhKoqZoUijjiaTCwyqdSAKBWS1Ys6HKwrrrzhkdnM\n",
+       "ROLFiyyiye1e/H4mNF98kY175EiiIFRqtiO/NuXl7HhcBOdi7WaOotcqJp39jdSXyu1G8isqMDUw\n",
+       "oLvekM/Hkp+vWvspns/M2BgGOzsBAN76eljs9qT7yml7/nl8+qtfwZKfL1iU8C645wMB/OnUKUFU\n",
+       "euvrkb9iBULd3Rjv6cHM+LiwjxJevx/DFy/iQV8fYLFg5e7d+NKJE0JNKMDSaedmZhCdmYGtuBjf\n",
+       "vHIFZw8elHTlvd3WJqk/zVuxQmhK5P7sZ1F//rzkHMOhENpfegl333kH04ODcHg8KN64Edb8fNhc\n",
+       "LviOHNH9rKT7fOUyy/ncCIIgFgtqLkQsS3Lee4oLLpcrMYro97N1cusTLvz0UlwMrF4N/O536c/X\n",
+       "bGbNhPr6WK3m+HjiNuvWAX/8Y/y11cpE5NGj6hFIJWHOrw3AoqAPHsS3V+sGuwDdbHP+mVokjHS1\n",
+       "5Y2DAFa7+MyJEyn5SSY7pnhdXkUFpmXCVu98zwUC6GlpkYhHuUB+48AB9Le1wVJQAEdxMaYGBxHV\n",
+       "8SFQ6bZtKPrBDzD5d38nCGM+nz2HD6P9xRcxcOFCQiOi9Q0NmJmYkDRzCq5dK5nj6ro6mO12IBqF\n",
+       "r7k5QZT3njyJ8L17sOTnwxzr3jscs06iLrdLG/odRWQaeqaITEM+ngRhlEyIHLlnJaekhKWsyhv6\n",
+       "FBayqKER4enzAR98YHxuQLyTLY+Azs+zWtSyMmXRWVLCmgmJhefsLDu3DRuAJ55QvlZKHpzBYDxy\n",
+       "zJsYVVczuxWlqCmQE/6YDytGUhL3BYOs5lAkivQKHXEk05wkbVY8nwPHjqHr0CFY8vNxyu9nkcjY\n",
+       "Bz1WlwufvvEGjhQXw2y34+sXL+LSD34giZZyQWcrLkbl008L0UA+F4vNBntpKWbu3cOk+AOSGLai\n",
+       "IkTGxmCy2WArKGAdb/PyMDkwgIvPP49NsVReACirqREE+DMnTkhEOgDAYkF4dBRf+PGPcfLpp2F2\n",
+       "OPB2YyPMIp9dW3Exwvfvw15UhPzycpzy+yWR3d6TJzEVy0iYjzVUMpnNqteSIAiCIBYaingShBgt\n",
+       "yw49wpRvY7MBV6+y1NqSEuDSJVbfqGTtoGRrokZNDXDmDGtGJIqoaKJlzbJiBZurON3W7QYuXwa+\n",
+       "/e14pJIjjlimkiKr134kB/wxH1YynY6rtq0kkrliBR558smEiB4AnD14EH9qbUXZ44/jwNGjCVFO\n",
+       "NQqqqjA9MiLYqpgdDsyHwzBZrfj6Bx+gbNs2YdufrlwpCDie2spFJsAEoMlqRelnPwtHSQmmh4Zw\n",
+       "N/ZzaLJaJVFRS34+rE4nympqhPny69qyeTOmBgYklivrGxowOTgonI/JaoXJbMbKvXsRmZwUIqgO\n",
+       "jwfhmKURj2Q2l5YKPqQAizq7N23C7bY2eKqrsV90fIIgCILINGSnQhBG0bLs4NG31lblTrYAcPIk\n",
+       "26atjY2zbh3ztvz2t1kjoXT53e9YZ13elZajZbnCRSfvNiumpoZ13m1oAP7wB9Y8ye9nUU6vl4ls\n",
+       "bu1iszEhnZfHXqdqb6K3WZBWN1u9zYkIw+jpamukQ6hWJ14ArDmP3a54zL7f/AbhoSH0t7Wh/cUX\n",
+       "AQDjse65ptjzb5P/XAB40Ncn8fLkNiXR2Vlc/Ou/lmwb5n60iDcVWl1XB0dZGfJWrEDxpk2YGRnB\n",
+       "QEcHbre14e4777CNzWaJ6LQVFqJ02zaER0bQ39YmOV+H241v3biB9Q0NEsuVPYcPS65FdHYW8zMz\n",
+       "cLjdsMfOy1NbC091tWQfACiPNfsy2Wywud3I93iYt+jwMG7Ljp8NlqJFCUEQBLGwkPAklhwZ8Z5S\n",
+       "Eyvl5eyLv+mVb6fHS1KcMtvWBty6xSKTra0sssnhvp1ud/JIpJxIBHjsMfZ/XR378npZ859kIq6m\n",
+       "hglKufCsq2Odeg8eZDWpxcXAiRNxaxQ+x48/ZgLwc59jacQjI6wpUrajkFoCVc+HARqQn1nqGEnH\n",
+       "TdaJ15KfD4BFFGGx4KcrV6K5tBQ/W7UKJ3bvxlt1dYLnJgDBN7Mg1j05OjeHgqoqwS7FKvbFTcLA\n",
+       "hQv42erV+PXu3Xht9WohTRUAzDYb9re0YPLOHYRHRjA9OIiJmN2Kp7YWc+Fw/GdXlLHwMYDI+Dju\n",
+       "XbkiLLv1m9/gzQMHEA6F8ItNm/AvK1bgj8ePY25qCt76eqG+dF8wiLyKivg1KyhAeHQUe159Veis\n",
+       "uz9muyKuSXVWVsJRXg6r04lIKITbbW2CpY3V5UJ4dFRTEKYjHsmiJLvQ7ygi09AzRSwGJDyJhxM1\n",
+       "sdLbCwwNxb055dvp8ZLkNiMFBSzCyaMgJSWsO2xVFbBzZ7zx0NSUMeFpNrNx29rYMVatYqK4s5P9\n",
+       "z43sucjlgvPMGSYoRbVnOH8eePNNdt5a4o0LQB5Rqq0FPvoo+6mvWhFNPR8GEFlDr31Lsm0dbjfK\n",
+       "tm8HAETu38fttjZMDQxgZnQUk/39GOzsRF9rq2CBUlZTA3tREV73+TD4298K40zevYtjjz+O6dFR\n",
+       "WJQi+3LMZoRHRjDZ14e7nZ2sC62IW7/5DX62apUkqln86KOCUNT6uXV/5jPC9/y82l98EZMDA4hG\n",
+       "IojOzuJuZ6ckwutwu7H6S1+CKXausw8e4HZbG7oOHYLd7cYpvx9vNzYK6c9cLPaePInw0JBQu2p1\n",
+       "ueDeuBGOsjLMTkzoinqmIx6XokUJQRAEsbBQjSfxcKJWNyhf3thovL6Q1y52djKLFIClwX74IfO7\n",
+       "lB/nD38wliJaUsIijnxOmzfHbU4A4K232PHffBP4/vcTayh7e4Hdu4ELF4Af/ICJ62vXgOFhfZ1l\n",
+       "X30VOHRIuzbTKGr1s1p1t3prRYkFwUjNpxjecMdTWwuH243b4sZcgGA5wjvl8hpJNRxlZZidnITZ\n",
+       "bsfc1JQkkmkpLMScqJGWvDZTC0teHsp27EDo+nVWV2mxKPrrmmw2qe8mgILVqzF5545wPJPVCmtB\n",
+       "AeYjEZisVljsdhQ9+iiGYt1oAcBeUoLnb97EKb8/oWuvvMbV5nLBZLMJ9Z6Ttgo4IwMoqanFV88k\n",
+       "/3BAfA/0fJAghixKCIIgHm7IToUg1FASK4EAcP060NMDvPsuE2Xi17GUPmFbuUjiy3p62La8FpPj\n",
+       "9bLurVy8Pf00a85z7x6Liqq8eVWkspKJRbebpc6Ka0erqlh6rx7Eoq6qSj2CqSX+MoHaMai5kCp6\n",
+       "RF6qQjDVfY1YsIgRCxcAaH/pJfS+8YaQMbC6rg7PvvmmsL28mY4SjrKyuG2JqGkW9xi1FBRgTqFj\n",
+       "rRgtUWp1OmEpKEB4aEjzHIF4Y6PkB403AjPZbPBs3w5HaSnmIxH0t7UJwrCrqQk3jx1LuA7cambE\n",
+       "VYsfThzDN3AIE/WHETyhz0uVxCNBEARhFGouRCxLMlKXoFQ32N3NopQDAyyiJ38tRilVly/r62P7\n",
+       "yQ3mx8fj+xw6BKxZw7rI8je1zzyj3PhHic99Lj73WG2cgOjNuSbiNNVkabPZSmcVp9HGbDQSjqEn\n",
+       "vTlNlmqti57UyHTSJ1PZVy3lUqt+UNzIyOF245njx7Eq5jFXVlODL772mmR7TyylvWjzZpjz82Er\n",
+       "KYGJP0MxTOKGW7GfM09tLfzvvov1DQ2oePJJzfOp2L0bXr9f+VwLC1GydWuC6PwYTLACLOXVHEub\n",
+       "NVmtmJdFQBWJRuGsrIS1oADRSARDXV1CqvH6hgaUbNmCU36/ougsq6nB12Ln9+6u07gHLy7VtuD/\n",
+       "bU7ebfh1nw9vNzYK9jTUJCi3WKq/o4jchZ4pYjEgH09i+WLUk1Murhobpa/FY167xl57PCyddvXq\n",
+       "eM2mEtXVbFve6VY8Pl//2mvA+vXafp5mM0u1fewxJlwnJ6Xr//qvWS2nEvJrwj1HtdJU9W5nBO7J\n",
+       "yQV6fT0TmPJjKPmBLkNSiS7qqatLp/YulX33BYOKUTMuYgHgfCCgGAnl12CspweFXi+s+fnw1tcr\n",
+       "WqscOHoUv9q+HfmlpZjo6UGEC7BYtNBRVobCdesQDoUQnZlBWU0NXGvWwNfcjK6mJtw5fx6zU1OK\n",
+       "6bBiBi5cQP6KFYp2RLPj47h39ariftHZWZjtdsyKfi8ki5yu9Plw97e/xXw4LEQ0g2vXSra529UF\n",
+       "a34+ImNjgr0LwDrorti1C9aCAsGP1O5242C/HzsrnPifjwXhFnmUyp8x8b0RW7Wo3SeCIAiCSAWK\n",
+       "eBJLDl8sCqIJtzVpbQVi1gtJkUfWlCJtPKo5PMxSUzduZNHNvj7lOk2Xi0Xztm1jTYQqKoBjx+Lj\n",
+       "+/1McJ09y5bxxkQAi2TyRkDV1ay2E2DdMzs6WG3o/fusu60YU5IsB3mkVq+lid7tdHIuEMDrLS14\n",
+       "6/59hAF2bs3NGT2GEczB4KJbQaQSXdTT2MdI8x+1fXmETc/14aJHvr0eEcuvAW/2c7utTdVaxeF2\n",
+       "o2DNGtzt7JTUbyIaRUFVFdybNmGoqwvRmRlYnU5YnU7MxbYLdXdjamAAkfv3EY1EYLLZhAilnOjs\n",
+       "LCb7+1UbCc2Ju+Da7XCUl2MjWKSTd9a1FRezDVQsjyxOJ8xWK/7s44+Fe9XV1CQRl6b8fMzEGiEJ\n",
+       "y2PjRcbHhSixWEwOd3bAM9CKq4cCkuurZmejZtVCLD66/+4RhE7omSIWAxKexPJFHDlMJsY4bjf7\n",
+       "8vuZWOTL+Gu5ncpHH8U7vPL/a2rYtqWl7PXEBOs829ubmLbrdjPLkhUr4sf48Y/Z93Y7E6ozM6ye\n",
+       "8+xZZpciPh/xG+Gysvjxi4rUu8DmSAfYUHc3Bu7fRx+A8zYbcOnSotZu5oIVRCrRRT0+m3q20dp3\n",
+       "vLfX0PVRup77gkEUrlsHi8OBtxsbFQUsvwbci9Ph8aC/owPNpaV4I2ZFwjkXCMQ72ooEXWl1Nb75\n",
+       "0UfCGJ7aWpTV1OBurDPuzyorMXL5srC9yWoVOsymhKgue35mBiaTSegkO3PvHqxOJ9ybNiG/ogJ5\n",
+       "/OdUPsTkJG63teGd//AfsL+lBV1NTehpaZH8jDsKCyXXxl5SgpW7d7Pr5nJhWmaXovQ8cXsVW1ER\n",
+       "dr7yirCt+MMJJasWgiAIgsgEJDyJJYfuugRe+1hYCPz93+vbRx4R1LJT4a+vXmX/nznDaiy5wCsu\n",
+       "Bl55Jf7a5WJpsuI33eJjHDrExGhBQXw9r+cMBuMNjsSi0+Vix+XHF1ujbN8uFaELUC+pB+FNcUkJ\n",
+       "9nzyibRxUwYw6kd4fWqKzWcRozzpRCazjVFRrLS9OEKpJmD5NeBenCazGdODg5gZHUW/zA4k1N0d\n",
+       "j3TOzcFst2N1XR2+cvas4IfpWrcOZocDoY8/Fvabm5oSLEdgMglzNYJadBQApgcHcZU3NAIwGw5j\n",
+       "qKsLUwMDmB4cTD5G7Oc61N0dnyOYsCzZvBkurxfuzZuRX1GBb1y6hC+dOAFHeTlmJyYSro/S81QY\n",
+       "+zmLjI2hS1S3Lq+vTfWDCiJ7UD0ekWnomSIWAxKexPJl3Tr2//h4YnMgNd5/n/1vtQL/5b9II4T/\n",
+       "f3v3HhzVeeZ5/PdKfdENqYUkLMsYGceY4AQb2fgaKGvWJo4xDp148SSe3eCdyqomrtp1qiZ4s5PL\n",
+       "TtXEtalJpWaSmirXpioLGSfEBmKIMSYuZK7GNg4bcBJDjA22bAxCCCSEuLRuZ/84fY5Ot7p1aZ1W\n",
+       "q8X3U0WZVp8+5+3Tr4Ueve/zPMXF9mqkN5fzqafsPMtFi+xcz8ceswM8J5A6d86+9tq1Uk2N/drm\n",
+       "ZmnOnNSrqM4P9c6W24YGafVq+++RiF0VN1l3t902xdmm6g1yz57NbGttlrk/FB87prDPQac09hXM\n",
+       "W7/3vZwHfZP5B/7kIGakwD5dED1SAOvcg2n19bp/3bqEQjzBioqE1yQHjAM9PTr9hz8knKts1iy1\n",
+       "7d3r5iwOYVmD21a9uyIKhv+ncUyro2kqVYeSPufK+fPVuGaNJM/Kb0WFVFCgvu5undy1S93Hj7tB\n",
+       "7Kb4DoiahQsl2avDF06ccD+T5Pm0u6lJHYcOSbILELGNFgAw0WingsllrAWBhpNJG46KCsn5QdRp\n",
+       "L+IU1YlGB9t91NZKhw8nfs2xYoUdDCZf2xlPWdlg8BoMSvfcYz+/Zs3gGJPbvXiLGrW326+74w57\n",
+       "+27y++vstAsPeSttLlwo3XSTvRrqx72d5MbTjxAj86Nlymg+k5eWLNGJ5mbJGLdCbe3nPqfPx4tn\n",
+       "PTd3rmKeVUTJbj9SXFOjstmz1b5//8itS5IU19YqMneuTib/f+2nggLNuPtuheO5nwWhkFsUSJJ2\n",
+       "rFypo88/r8LiYvUOs2I/bfZsldTVqevoUQ309bkBdn00qgc2bkw49tmrr3b7nia3pgEAYLxop4L8\n",
+       "k6pNSaa820qfeip93qOXU8ynpER67bXEFULvCktrq11YyGnf4OR4OquWqba0Ol/z5mr29trvNxRK\n",
+       "XcnVCTrXrUssatTWJr30Uupts5GIPffB4sIAACAASURBVA7JXjFdvtw+xrsFN/neetuaTIEWCpls\n",
+       "Wx3r9tx8Nt736qzIhaur1e1ZZRvJWFd1S+vqFK6pkQoKZPX1yerr08ldu7SnqUnhSERfefddlc2a\n",
+       "lbBt1ert1cUTJ9S2d2/KoNMEg27Rn1Rm3HmnHdiOJi98rJyV1IEBte3dq/Y//EHn3ntPJ3bs0HNz\n",
+       "5uh8S4sk6XxLiwZisZRBp/NeqxcuVO+FCzq1d68utbaqx3PsqddfV6yzM+Fz7otvJ5fktncZyZX0\n",
+       "/wQAIPtY8cTkMopVyp07d469Gltj4+DK5IoV6dtztLTY22Zfe21o3mFnp10IyFtFNhCwCwlt22Zv\n",
+       "d03VbiR5FVeS5s2zg1fJDg63b098nfc1XV32yqZkV389diz9sc5KZvKKqTT8vR3t/Zmidu7cqa5/\n",
+       "/MeMVvHy0XArlqNp6+KsXHbHA7zk84ylNUy6Y3c3NenounUJuY6SvSW1uqFB51taFCgpUW9Xl045\n",
+       "/384Cgrs6s/Ofx2FhTIFBWnbpwQjEVV+5jPqbmnRxZMn026TTSdQXq6+ri69K2muJBMKSZalUHm5\n",
+       "Ztx5p/p7euwVXA8TCLhbdwMlJaq+/XZ1vPPO0O3BhYUyxmjGnXeqqLpajWvW6NfXX+/28fS2QZHs\n",
+       "1dDLZ8+6969oxgxdbmtTVUODlm3fPqrgP9OVbfgvo3/3gGEwp+A3VjyRf7JV/Ga01Vzr66WPPx4a\n",
+       "dDY12dtq45UlXX199uqjN8cyWaoWJocP2yuR0ejQoDP5NfFKlKqsTF39NdUqcXIuZ1OTHcB627lk\n",
+       "cn+msPH0u8w3w73X0eTHOiuXIU/lWO95xpJjm+5Yb4GdYHm5CouLFaqsVMlVV+nc0aPua5xKrdMX\n",
+       "LNC1S5cqVFXlBptOFdnpN99sf72/P2XQaYJBFc2YoYJgUG179+ri8eNu0BmKRNxKsl4F4fCQr/Wd\n",
+       "P5/w2OrpkdXbq9iZMwqWlmrJ+vUKJ1W2teLXKSwpUaC0VK27dtkBZHzFtaCkxF7l7O+X1denU3v3\n",
+       "ui1mquO54NMXLNCX9+9XcW2tJPvzKKmrc+9fqLJSX3rrLV2/YsWog04p9TxhFRQAkCkCT0wuoyh+\n",
+       "k9Fv6JyA9qabEtujjJYT3J09a2+vdbbYSnaPzeEClVRBXSRir552dAwWJHI0NdlVciV7NbW+3g4Y\n",
+       "DxxIXf11NEHjkSND27l4TZJqt7nS2Ng4qavK+m249zqWADzTIkKjud75eEBpAgF9cc8e1dxxh3o6\n",
+       "OvRJc7O7yjp9wQJF33xT169YoYd37NCDW7YoGK9mHayo0EPNzfZzu3YpEP+6kyvqLSBk9fbqclub\n",
+       "Yt686Pi1p99yi6obGoaMO5hqu258d8/cFO+z4bvfVTgSUc0ddwx5TWFRkR49dEgD3qJF8XMNXLyY\n",
+       "UMzIWxhoSbz1ycM7dmhafb0ePXzY/Ty649t2TSCgh3fudAs2jWVup/p8J0ProSsRK1PwG3MKuUDg\n",
+       "iSuDE9AOl+c4HG+l2N5e+09dnb1quWPH8MFauqAuXT7rkSN2QCrZqx779qUNGHc3NenFri69XFur\n",
+       "WKqVzOTxpwtOJ0m121yazFVl/Zbuve5ualJvV5eKa2u1ZMOGEe9FuvOk69mZarUsXfBaGv8li9XX\n",
+       "pwM/+EHKticXT5xQqKJCoUhEr0SjennpUhVfc40kqffcOR34wQ/c8V08ccI+X3+/CouKFHT6YsYD\n",
+       "SG/epyksVKiyUlZfn1p37VIoEhmywhnztEwZjd899JB7fwuKitxczekLFug/nTypA08/LSctxRlL\n",
+       "MF58SIWFUiCggnBYJhRy72ny/Q9HIgpFIlo3b54uOO83fv9SGWn1MtXneyXtDAAA+Ct9MzJgkso4\n",
+       "L8G7ktjQMLYtpWvX2q/v6LDboYylUq4T1HnH4VSolYYGg94gMRIZvF6K8XYeOaLW+OrPnlWr0udg\n",
+       "OeNPlYMKcl3iOo8ccfMl9w03n1JIztUsnTXLzQ/c09Sk+9etc1fLvF9zgptkqbbxPj9vni47udGy\n",
+       "e2b+e02NpMEWJ0We7aaLf/Yzd1zeXM/+y5fVf/myJNnVZSMRxeKrqZIdnDqBZvXChWpcs0a/W7Zs\n",
+       "aC5pGk6Op1fJNdeo49ChIeeYdt11Ckci9tbiePAXKC7WNffdp3t+8hO9sHChm7s50Nen9n37Eu6f\n",
+       "974X19Tow9/+NiEvNlRZOSRAdF5z9o9/dHNEnfOl4r3G4mee0b5Vq9zKxGPJ50Xm+B4FvzGnkAsE\n",
+       "nrhyeFcSZ80aWwDmBI+pivZIY2sD46x0SnaF2uQA1hskOudOEzAG4tsRqysqtPhHPxp5/JOFn21z\n",
+       "4JvxrGYlB5WpzjXS+Xc3Nall82b1x2Kquvlm1Uejaly9WvueekqdR46o6rOfVcGtt+r0/v263Nbm\n",
+       "Vrt1FRaq4lOf0lV33qlQRYVeiUbV9sYbGujpGfY9hyIRdZ84oYJQSAM9PQpXV2tafb2MpPIbbtAr\n",
+       "0ag633039QmSCxilcXrfPhV6tvta/f12UBvv0+td0b18+rQ+2rJFH//udxrwFDgKlJWpr7tbgbIy\n",
+       "xTo6FOvsTLjv4ZqahKAzWFGhRw4cGBIMel8jjfx5e49P/oVEql8mAACQClVtceXIpK/naHmrwobD\n",
+       "dkB1223S+vVDr+PjOGKLFmnP3r1aLCk8nmq0Ex0IXuFVdCcrb59NJ9gb7UpWcu9USQk9O3c3Nanj\n",
+       "0CF1HT2q6JtvalpSvnKqKrZOJdXk6qrtBw+q6/333TzI5ODv+hUrdLGtLSG4SiUYiWggFlO/p9VI\n",
+       "6cyZKquvd1cmvVVnTTA4WJyooEChigrJGPWcPatQZaWqbr45ff/PggIFy8rUG+8TXDpzpv7jn/7k\n",
+       "3tdYZ6fWzZvn9tpMJVRZqd7ubncM4epq9Z4/b7eNKSxUqLxcPR0dCkUiuuqee/QffvWrlJ+b81lV\n",
+       "NTSobNYsNa5ZM+znm/zZeufGQG+vTjQ30zMXAK5wVLXFlWe4fpRr10qzZ9uBYXJBn/FyVisCASkW\n",
+       "G9ySmyqP1MdCPuHyct0vKTzaarTp7o+f/VNHgyq6WTHeiqPenL6xFpFJztVMzg90tvFeam3VvhT5\n",
+       "yt4qtlJiEZ3kvqFdx44NBp2SwtOnu3+fvmCBCouLddbZVl+Q+p+5wqIiTf/MZxKCTkn64muvuf00\n",
+       "vSuqocpK1d177+CBAwPq6ehQz9mzCpSUKHLTTTLBYPoemQMDbtAZLC/XF197LSFIC0cievTwYYWr\n",
+       "q1O+vKCkRD0dHW7QGSgrU6y9fbBXaX+/ejo6VFJXp69+8IEe3LLFDfjT5dUu275dD2zaNGKwmPzZ\n",
+       "eudGsKzsiinKBQAYHwJP5J2dO3emf3K4ACoSsbfY7t3rT4DlDeKeecYOJr2VLisqsl/IZ6xBbLr7\n",
+       "M9GB4CSrojvsnMojflYcHeu225GKM410Puf5YEWFrl26NKHtR9f778sEAurp7LQr2nq2n1bcdJO+\n",
+       "vH+/yurrFaqqUlF1tbqOHh3sb+kJSh2FJSV69C9/Sdkm5fUnnxxcjY2vooYqK/XIgQO6f/16t2WJ\n",
+       "iVe2DpaXq3L+fLXt3asTzc0a6O1Vmk25dpEgSb1dXUOC791NTVo3b5560vzCIOQUQSotlQoK1BcP\n",
+       "mANJ1XVrbr894TNINSfGUkhrd1OTXolG1dPd7X7N+1k2rl59xRTlyqWp8j0KkwdzCrlA4ImpJVUA\n",
+       "5Q0QnTYofgRYmzcPBnFPPmkHkwsX2s8VFtptVrJtrEFsugBzogNBquhmhZ8VR/1uLzPS+ZznH/vw\n",
+       "Q3e1znGprU1WX5+7+ljozYc8dUp7vvENlcycqZ4zZ3SiuVltb70labC/pVNwqHL+fJXU1enRQ4c0\n",
+       "rb7eLoI0c2biQIxxA9LC0lIVzZihRw4c0LT6endV8voVK1R9662S7CDSWSFNDgK9XwtFIiqOF0IK\n",
+       "VlRIhYXuSuSOlSt1dN06XWptdd9jcW2tTHy11gQCWvKb3yhcXa2+CxfsgDgefAfLyhSeMcN9v41r\n",
+       "1iRef5xzIlXgeiW1HgIA+IccT0wNTo5iMCiVlkpr1gwGNd58wuXLpVDIn+qu06cPFiuKRqWNG+3t\n",
+       "q3PmSPEqlJMufzFdcSRMCd4czYkOCLJZ3fQXNTWKtbersKREdY2NGujp0SfNzW6xHUl26yHLSsj3\n",
+       "rI9G9cDGjdqxcqU+2rpVVbfcoiXr1yeMzZs/agIBfeX997X/+9/Xe88+627nrV++XA9s2pTwPjve\n",
+       "eUex9nZ7a6wxQ3qASlLRjBn60ltvuVVgty5b5vYg9eaOhmtqEl5f1dCgZdu361f19erz5IRWzp+v\n",
+       "41u3uq8tLClR/Re/qAsff5wyd3Z3U5POxvNqvxR/bqyfU3J+J4EmACAVcjxx5XC2kDY324Gl94cj\n",
+       "7yrfmjX+rbTddpv934YGKV6ZUpGIdPvtg9cb6wrDcDmqfmClcUrLZS9Sv7b5pspJ/PL+/SqdOVOP\n",
+       "HjqkB7ds0f3r16ts9myZ+NbVwtLSwZzPeNBZvXChQuXlerGxUe/98peKnT6tE83N+vWcOQn5r95q\n",
+       "slZfn15/8kl7BdPzC9OBeF6lUwCpddcuxdrbVRAKyerrSxl0StJVd9+tA08/rYttbXr1scd05sCB\n",
+       "hGtJ9kpo1S23SBq6zbgwni9aWFKiqxYtUk9Xl4pqa7Vsxw73flw8edLNnX3h9tsT7lvnkSNq27tX\n",
+       "lz15tePN3QUAIFMEnsg7KfMShstRzNY20vXr7fNu3z60HUqm15voIj+QRK7LcEZbsCiTLZ3ec+9Y\n",
+       "uVIvNjbq2IYNQwKjafX1+puPP3ZX7F6JRtXT2ekWIwqWlrrnrJw/X/XRqB7atk3nW1rs1UxPxdue\n",
+       "9nY9W1en3y5apJeXLtXiZ56xV0vjBnp7E4JRSTr7xz+6Y3MLIBUWaqCnJyEnM1hRoaIZM/SupOC0\n",
+       "abrnJz9JCPRStXW56p57VFpXZ/cNNUb9nmO8AffFkyfdIPKdn/7UvR/OWANlZYqdPq3jW7dq3bx5\n",
+       "inV2ZtTSJlkuf5mBQXyPgt+YU8gF+nhiavD2vkz+ASlbPSzTnXc816PaKyaZ0fZpvG/t2jFv803o\n",
+       "QVldrZizRV3pA6NUPSiXbNig1598UjJGjatXu9d3giynb6Zj4NIlt13KvlWrFKqocAPIglBIjatX\n",
+       "6xc1NVJ8VfLC8eO6cPy4+3pv65SqhgZdbm9X78WLqm5oUO/581Jbm3rPn9e+VasSAr3zH3yg2Jkz\n",
+       "g9uCJX28dav794FYTCeam/X83Ln663ffdQNuSeqK9+wNlpfrTk/PXue+X+7o0InmZknSpdZW7Wlq\n",
+       "SvmZZPI5AQDgB3I8gVxK7p/pfM3vHMyJ7tOJKSObOX7ec4cjEX3S3Dykt2RyTuKG+fN14fhxBadN\n",
+       "U+3ixWl7VUqDOa8N3/2utj74oPp6etTjCW5DlZX66rFjal6xwr32su3bte+pp3Rs/fohFWZDkYiu\n",
+       "vvdet4CPE8C9Eo26wXBxba0utbYqXF0tU1CggZ4eFYRC+lK84NGLixbpC1u26KX77ksItJM5PUwd\n",
+       "v120yA2Wk59z3qvTB9Tbb7Nl82b1x2Kqvu22IfmtAAD4ZTQ5ngSeQC55Cx9lsxDRRF0HEy6bRX2k\n",
+       "7BYs8p5bUsrreIv/XL9ihbpPnHAL9JTNnq2yWbNG/d5jnZ16ft48XW5ttStPO/82FRTomr/6K3dL\n",
+       "qfeajlAkokcOHkwo3iPZ9//Yhg3q6eiQCQQUKClRYVGRps2erdP79rnHeYNF72tchYVupVonAPa+\n",
+       "n9H8AiD5s0p+H6kCVgAA/EBxIUxJ485LyHYBn7EYaWutX2NlC++w8jnXxc/enamMJ8dvpPxQ77nT\n",
+       "XSc5JzEUb3VSvXChSurqRvXedzc16dmrr9avr79elXPnKlxVZQd5AwP2n74+ffLqq0OuWdXQoGuX\n",
+       "LlV9NKqvfvCBDjz99JD303nkiBtAWn196u3q0tttbeqOt1iR7DYn3m3D3tdI0jVLluirR4+qfvly\n",
+       "1UejQ4JOyd4iO232bBWGw3r1sccU6+wccn+T76E3V7WwtFSxjo5h83QxeeXz9yhMTswp5AKBJ648\n",
+       "k6mAz0iFiPwa60T36byCjLb4Trb42bvTT94KsOMJipOrqnofe4PQ4d5755EjutTaqp6ODp3ctUsF\n",
+       "Tj9fr4GBIX0ql23frge3bNEDGzcqHIkkBPnP3XijXl661D2X0/tTkspvuEHRN99UWX29QlVVKqqu\n",
+       "du/Ji42Nat2zJ+HS4UhE0+rr9cCmTe61vMe/vHSpJKl01iyd2rvXvZ8j/dLhvrVrVR+NKlxVpf4L\n",
+       "F/RJc3NWfjkBAMBosNUWV56lS+1AbuHCkQOxXOdGjmWsyInkraATvZUxl707h+O9L04uZfL4xrtN\n",
+       "eLTv3dmmKtmrmJ/fuFH7Vq3S+Y8+GtwOW1CgqxcvVll9vY6tX6/+S5dkAgFd9bnP6YFNmxSORNzz\n",
+       "ePuHmkBA4enT9dC2bdr//e8nFDjy3oPi2lpZlqXLp04ljM0Eg/paW1vK8SfPrZ7ubvf6M+66S5J0\n",
+       "orl5xPxbenECALKNHE8glc7O0RfwyXVu5FjGipzgh/rUnPsSqqzUIwcODMmNlMYftI82cI11dmrn\n",
+       "448PqXob6+zUr2+4QT1nzrjHhquq7MqzHsW1tXr08GFJGlJB1nvMzM9/XudbWhQoKVFxTY1aNm9O\n",
+       "2FJrgkFZ8Z6gjof37NHVixalHHfy3JKk52680e0bWh+NqjAYHDHwnqy/nAAATB3keGJKGndegtPu\n",
+       "ZDQ/gOU6N3IsY0XGxjOnkreCwlZcU6NwTY2qb7tNoYqKlMckbxMe67bl0ea3hiMRFc+YobY339Sz\n",
+       "V12l1ZGIXlqyRJI04447Eo41hYVDXn+ptVXPz5snSbp/3TotWb9exbW1Q475aOtWte7apVe3btVH\n",
+       "W7cmBJ3B8nIVTZ8ev8jgv8vv/PSnacedPLfCkYhqFi6UZN+zxtWrR5V/Sy/O/Ec+HvzGnEIuZBx4\n",
+       "GmN+ZIw5bIx52xjzgjEm9U8WQD4jNxIj4If61M63tCh2+rRODJNXmBxYjbVQ0ljyW508z4GeHvWe\n",
+       "O+eO6761a1VQVGSfb9o0PbRtm0qvvVYmGJQCg62uL8d7Y0r2Z/7o4cOqX75cRTNmuGOouuUWSVLF\n",
+       "jTeq0MkjLbD/me3t6tKltjb7a/FdQN5xpwq6U80tftEBAMhXGW+1NcYskfSqZVkDxpgfSpJlWd9O\n",
+       "cRxbbQHgCpPJFuSxvmbHypX66OWXVb1ggUrq6txtrqm23XrzPCUpNH26KufNU7C8XKd//3u3p2Z9\n",
+       "NKpYR4e7BbggHNZALJZ2TOlawmxdtsxt+5JKSV2dVrzzjnu+Z6++WpdaWyXZ231r7rgjK+1xAADI\n",
+       "htFstQ0M9+RwLMva5nm4T9IjmZ4LADC13Ld27ajyCr15moufeUb7Vq1yXzNSDuf5lhbF2tv1SXOz\n",
+       "wjU1bu7jnqYm3b9u3ZBzv/7Nb2qgp0cFwaAut7frlBMYera+DvT0JKyklt9wg33+NO/BWZV0OH93\n",
+       "Ku4Wlpaq/8KFhNdMX7BAVTffrFeiUfe99cdi7vOxM2fcVV/6bgIApgq/cjz/VtLLPp0LGNao8hIm\n",
+       "U69OTHrkuvhvtFuQvdtr961alfCakbbenj96VJIUrKjQ9JtukpS4fbVl82b39a9/85t6YONGuz3K\n",
+       "pk1u+5NwdXVC4FkQDCZsZ7148qQb3I62FcnOnTvdc9TefXfCc6UzZ+rhHTt0vqUl4b1V33abJCkw\n",
+       "TIuYXLfuSWeyjmsq4XsU/MacQi4Mu+JpjNkmqTbFU/9gWdbm+DHfkdRjWdbadOd5/PHHdd1110mS\n",
+       "IpGIFixYoMbGRkmDE5/HPB7t44MHD458fLz/5U5JikbVGP/6ZBg/jyffY8dkGc+V9PjQpUuaLjvQ\n",
+       "Gvja17Rz5073+UOXLum0pM/Fg7Dk138Qiajj+HHNPXdOoUhE5++9V9d961tu4PpOd7d6Jc2VdHL3\n",
+       "bv3wzjt16/e+p88vW6b71q7Vv0WjajtzRjPi22yPlpbquq9/3Q2aveMLlJXp90ePauCll1T04ovq\n",
+       "PHJEhy5dcs/nfX+SHXgHnnhCA93dKvrzn3W5tVWtN96ou378Y/u5khK9KzsfdGU8wPy3aFTz/u7v\n",
+       "FHrhBS3+2c/0xsGDCe93z1tv6ezbb2uu7FXdwBNP5Pzzk6Su+C8I3pV0OBrV3/P9lsc8nvSPDyZ9\n",
+       "f8n1eHicf48PHjyozvgvGz/88EONxrjaqRhjHpf0XyXdZ1nW5TTHkOOJiUf/y7HJdb9SXLGGa/Ux\n",
+       "UhuQkXJCX1qyRCeamxO2u16/YoVC8UJGgZISDfT26kRzs0KVlZr5wAO6ePJkwtbeWGennpszx80B\n",
+       "LZs9WxeOH3fbotQvX64HNm3S85/+tC62tqogGNSX9+9PaB+T6n1k0uJksrbumazjAgBMnKz28TTG\n",
+       "fEHSjyXda1lW+zDHEXhi4tH/cmwaG3PbrxTwGEt/zuGCN+f5WEeHPmluVqCsTDPuukv9ly65+Z3e\n",
+       "XpivRKMp+4p6A6uCcDihaFB9NKoHNm7U6khEvefOSbK30/7Nxx/7ek9G835zZbKOCwAwcbIdeL4n\n",
+       "KSTpbPxLb1iW9USK4wg84audnq148MkVvkLMnJpcXmxsTBkAjiRdwBrr7NRzN97oFh8qrq3VpdbW\n",
+       "ISt06VbuvIHVq4895lbHnX7zzXp41y6FIxH9oqZGsfZ2FZaU6Oqf/1xLv/KVMY9zrMeM5/zIL3yP\n",
+       "gt+YU/DbaALPgkxPblnWHMuy6i3Laoj/GRJ0AsgT9CvFJDKW/pxe6YoRhSMR1Sxc6J4z+uabKXth\n",
+       "puuR6S2UdN/atapfvlz10agbdO5uatK0T31KBeGwom+8oZLaVKURRh7nWI8Zz/kBAJho48rxHNUF\n",
+       "WPEEAIxBpls3h8s1zOZ20LGu0CaPc99TTw1ZoRxP3iQ5lwCAiZbVFU8AALJhtK1YkqVbsRzPOVNJ\n",
+       "bh8y1hXa5HGmWqEc7r2MpLimRuHqagJOAMCkQuCJvOOUdAb8wpyaGvwMLoeTHCgmB4kjzafkcaYK\n",
+       "XMfzXs63tIy59ygmN75HwW/MKeQCgScAAGOQHCiON+Adz+rmaMYHAMBkQI4nAABjMNnbh0z28QEA\n",
+       "pp6stlMZwyAIPAEAAABgiqK4EKYk8hLgN+YU/MR8gt+YU/Abcwq5QOAJAAAAAMgqttoCAAAAADLG\n",
+       "VlsAAAAAQM4ReCLvkJcAvzGn4CfmE/zGnILfmFPIBQJPAAAAAEBWkeMJAAAAAMgYOZ4AAAAAgJwj\n",
+       "8ETeIS8BfmNOwU/MJ/iNOQW/MaeQCwSeAAAAAICsIscTAAAAAJAxcjwBAAAAADlH4Im8Q14C/Mac\n",
+       "gp+YT/Abcwp+Y04hFwg8AQAAAABZRY4nAAAAACBj5HgCAAAAAHKOwBN5h7wE+I05BT8xn+A35hT8\n",
+       "xpxCLhB4AgAAAACyihxPAAAAAEDGyPEEAAAAAOQcgSfyDnkJ8BtzCn5iPsFvzCn4jTmFXCDwBAAA\n",
+       "AABkFTmeAAAAAICMkeMJAAAAAMg5Ak/kHfIS4DfmFPzEfILfmFPwG3MKuUDgCQAAAADIKnI8AQAA\n",
+       "AAAZI8cTAAAAAJBzBJ7IO+QlwG/MKfiJ+QS/MafgN+YUcoHAEwAAAACQVeR4AgAAAAAyRo4nAAAA\n",
+       "ACDnCDyRd8hLgN+YU/AT8wl+Y07Bb8wp5AKBJwAAAAAgq8jxBAAAAABkjBxPAAAAAEDOEXgi75CX\n",
+       "AL8xp+An5hP8xpyC35hTyAUCTwAAAABAVpHjCQAAAADIGDmeAAAAAICcI/BE3iEvAX5jTsFPzCf4\n",
+       "jTkFvzGnkAsEngAAAACArCLHEwAAAACQMXI8AQAAAAA5R+CJvENeAvzGnIKfmE/wG3MKfmNOIRcI\n",
+       "PAEAAAAAWUWOJwAAAAAgY+R4AgAAAAByjsATeYe8BPiNOQU/MZ/gN+YU/MacQi4QeAIAAAAAsooc\n",
+       "TwAAAABAxsjxBAAAAADkHIEn8g55CfAbcwp+Yj7Bb8wp+I05hVwg8AQAAAAAZBU5ngAAAACAjJHj\n",
+       "CQAAAADIOQJP5B3yEuA35hT8xHyC35hT8BtzCrlA4AkAAAAAyCpyPAEAAAAAGSPHEwAAAACQcwSe\n",
+       "yDvkJcBvzCn4ifkEvzGn4DfmFHKBwBMAAAAAkFXkeAIAAAAAMkaOJwAAAAAg5wg8kXfIS4DfmFPw\n",
+       "E/MJfmNOwW/MKeQCgScAAAAAIKvI8QQAAAAAZIwcTwAAAABAzmUceBpj/skY87Yx5qAx5lVjzLV+\n",
+       "DgxIh7wE+I05BT8xn+A35hT8xpxCLoxnxfOfLcu6xbKsBZI2SfpfPo0JGNbBgwdzPQRMMcwp+In5\n",
+       "BL8xp+A35hRyIePA07Ks856HZZLaxz8cYGSdnZ25HgKmGOYU/MR8gt+YU/Abcwq5EBjPi40xT0v6\n",
+       "z5IuSrrLlxEBAAAAAKaUYVc8jTHbjDF/SvHnYUmyLOs7lmXNkrRG0r9MwHgBffjhh7keAqYY5hT8\n",
+       "xHyC35hT8BtzCrngSzsVY8wsSS9blvXZFM/RSwUAAAAAprCR2qlkvNXWGDPHsqz34g+XSzqQyQAA\n",
+       "AAAAAFNbxiuexpgNkuZK6pd0VNI3LMtq83FsAAAAAIApwJettgAAAAAApDOePp6jZoz5J2PM28aY\n",
+       "g8aYV40x107EdTE1GWN+ZIw5HJ9TLxhjKnI9JuQ3Y8wKY8w7xph+Y8ytuR4P8pcx5gvGmL8YY94z\n",
+       "xvyPXI8H+c0Y83+NMaeMMX/K9VgwNRhjrjXG7Ij/m/dnY8x/z/WYkL+MMUXGmH3xGO+QMeZ/D3v8\n",
+       "RKx4GmOmOX0/jTH/TdItlmV9PesXxpRkjFki6VXLsgaMMT+UJMuyvp3jYSGPGWM+LWlA0v+R9PeW\n",
+       "Zf0hx0NCHjLGFEp6V9L9kj6R9HtJX7Us63BOB4a8ZYxZLKlb0r9bljU/1+NB/jPG1EqqtSzroDGm\n",
+       "TNL/kxTl+xQyZYwpsSzrojEm47bo4wAAAphJREFUIOk1Sd+yLOu1VMdOyIqnE3TGlUlqn4jrYmqy\n",
+       "LGubZVkD8Yf7JM3M5XiQ/yzL+otlWUdyPQ7kvTskvW9Z1oeWZfVKek528T0gI5Zl7ZHUketxYOqw\n",
+       "LKvVsqyD8b93SzosqS63o0I+syzrYvyvIUmFks6mO3ZCAk9JMsY8bYz5SNJKST+cqOtiyvtbSS/n\n",
+       "ehAAIOkaSR97Hh+Pfw0AJh1jzHWSGmT/Eh/IiDGmwBhzUNIpSTssyzqU7tiM26mkuOg2SbUpnvoH\n",
+       "y7I2W5b1HUnfMcZ8W9K/SPovfl0bU89I8yl+zHck9ViWtXZCB4e8NJo5BYwT1foA5IX4NtsNkp6M\n",
+       "r3wCGYnvQlwQr7nyijGm0bKsnamO9S3wtCxrySgPXStWqDCCkeaTMeZxSUsl3TchA0LeG8P3KCBT\n",
+       "n0jyFs+7VvaqJwBMGsaYoKTfSPqlZVmbcj0eTA2WZZ0zxmyRtFDSzlTHTFRV2zmeh8slHZiI62Jq\n",
+       "MsZ8QdIqScsty7qc6/FgyjG5HgDy1n5Jc4wx1xljQpL+WtKLOR4TALiMMUbSzyUdsizrX3M9HuQ3\n",
+       "Y0y1MSYS/3uxpCUaJs6bqKq2GyTNldQv6aikb1iW1Zb1C2NKMsa8JzuB2UlefsOyrCdyOCTkOWPM\n",
+       "lyT9VFK1pHOSDliW9WBuR4V8ZIx5UNK/yi6w8HPLsoYtLQ8Mxxjza0n3SqqS1Cbp+5Zlrc7tqJDP\n",
+       "jDGLJO2W9EcNpgf8T8uyfpe7USFfGWPmS/qF7MXMAknPWpb1o7THT0TgCQAAAAC4ck1YVVsAAAAA\n",
+       "wJWJwBMAAAAAkFUEngAAAACArCLwBAAAAABkFYEnAAAAACCrCDwBAAAAAFlF4AkAAAAAyCoCTwAA\n",
+       "AABAVv1/lzHCzGUnjVoAAAAASUVORK5CYII=\n"
+      ],
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7fadf4552a90>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "feat = out['feat']\n",
+    "f = plt.figure(figsize=(16,9))\n",
+    "c = ['#ff0000', '#ffff00', '#00ff00', '#00ffff', '#0000ff', \n",
+    "     '#ff00ff', '#990000', '#999900', '#009900', '#009999']\n",
+    "for i in range(10):\n",
+    "    plt.plot(feat[labels==i,0].flatten(), feat[labels==i,1].flatten(), '.', c=c[i])\n",
+    "plt.legend(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'])\n",
+    "plt.grid()\n",
+    "plt.show()"
+   ]
+  }
+ ],
+ "metadata": {
+  "description": "Extracting features and plotting the Siamese network embedding.",
+  "example_name": "Siamese network embedding",
+  "include_in_docs": true,
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "python2"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.9"
+  },
+  "priority": 7
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/examples/siamese/mnist_siamese.prototxt b/examples/siamese/mnist_siamese.prototxt
new file mode 100644
index 0000000..0e903f8
--- /dev/null
+++ b/examples/siamese/mnist_siamese.prototxt
@@ -0,0 +1,113 @@
+name: "mnist_siamese"
+input: "data"
+input_dim: 10000
+input_dim: 1
+input_dim: 28
+input_dim: 28
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 20
+    kernel_size: 5
+    stride: 1
+  }
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 2
+    stride: 2
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "pool1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 50
+    kernel_size: 5
+    stride: 1
+  }
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 2
+    stride: 2
+  }
+}
+layer {
+  name: "ip1"
+  type: "InnerProduct"
+  bottom: "pool2"
+  top: "ip1"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 500
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "ip1"
+  top: "ip1"
+}
+layer {
+  name: "ip2"
+  type: "InnerProduct"
+  bottom: "ip1"
+  top: "ip2"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 10
+  }
+}
+layer {
+  name: "feat"
+  type: "InnerProduct"
+  bottom: "ip2"
+  top: "feat"
+  param {
+    lr_mult: 1
+  }
+  param {
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 2
+  }
+}
diff --git a/examples/siamese/mnist_siamese_solver.prototxt b/examples/siamese/mnist_siamese_solver.prototxt
new file mode 100644
index 0000000..d4d994d
--- /dev/null
+++ b/examples/siamese/mnist_siamese_solver.prototxt
@@ -0,0 +1,25 @@
+# The train/test net protocol buffer definition
+net: "examples/siamese/mnist_siamese_train_test.prototxt"
+# test_iter specifies how many forward passes the test should carry out.
+# In the case of MNIST, we have test batch size 100 and 100 test iterations,
+# covering the full 10,000 testing images.
+test_iter: 100
+# Carry out testing every 500 training iterations.
+test_interval: 500
+# The base learning rate, momentum and the weight decay of the network.
+base_lr: 0.01
+momentum: 0.9
+weight_decay: 0.0000
+# The learning rate policy
+lr_policy: "inv"
+gamma: 0.0001
+power: 0.75
+# Display every 100 iterations
+display: 100
+# The maximum number of iterations
+max_iter: 50000
+# snapshot intermediate results
+snapshot: 5000
+snapshot_prefix: "examples/siamese/mnist_siamese"
+# solver mode: CPU or GPU
+solver_mode: GPU
diff --git a/examples/siamese/mnist_siamese_train_test.prototxt b/examples/siamese/mnist_siamese_train_test.prototxt
new file mode 100644
index 0000000..8ff864f
--- /dev/null
+++ b/examples/siamese/mnist_siamese_train_test.prototxt
@@ -0,0 +1,349 @@
+name: "mnist_siamese_train_test"
+layer {
+  name: "pair_data"
+  type: "Data"
+  top: "pair_data"
+  top: "sim"
+  include {
+    phase: TRAIN
+  }
+  transform_param {
+    scale: 0.00390625
+  }
+  data_param {
+    source: "examples/siamese/mnist_siamese_train_leveldb"
+    batch_size: 64
+  }
+}
+layer {
+  name: "pair_data"
+  type: "Data"
+  top: "pair_data"
+  top: "sim"
+  include {
+    phase: TEST
+  }
+  transform_param {
+    scale: 0.00390625
+  }
+  data_param {
+    source: "examples/siamese/mnist_siamese_test_leveldb"
+    batch_size: 100
+  }
+}
+layer {
+  name: "slice_pair"
+  type: "Slice"
+  bottom: "pair_data"
+  top: "data"
+  top: "data_p"
+  slice_param {
+    slice_dim: 1
+    slice_point: 1
+  }
+}
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    name: "conv1_w"
+    lr_mult: 1
+  }
+  param {
+    name: "conv1_b"
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 20
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 2
+    stride: 2
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "pool1"
+  top: "conv2"
+  param {
+    name: "conv2_w"
+    lr_mult: 1
+  }
+  param {
+    name: "conv2_b"
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 50
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 2
+    stride: 2
+  }
+}
+layer {
+  name: "ip1"
+  type: "InnerProduct"
+  bottom: "pool2"
+  top: "ip1"
+  param {
+    name: "ip1_w"
+    lr_mult: 1
+  }
+  param {
+    name: "ip1_b"
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 500
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "ip1"
+  top: "ip1"
+}
+layer {
+  name: "ip2"
+  type: "InnerProduct"
+  bottom: "ip1"
+  top: "ip2"
+  param {
+    name: "ip2_w"
+    lr_mult: 1
+  }
+  param {
+    name: "ip2_b"
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 10
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "feat"
+  type: "InnerProduct"
+  bottom: "ip2"
+  top: "feat"
+  param {
+    name: "feat_w"
+    lr_mult: 1
+  }
+  param {
+    name: "feat_b"
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 2
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "conv1_p"
+  type: "Convolution"
+  bottom: "data_p"
+  top: "conv1_p"
+  param {
+    name: "conv1_w"
+    lr_mult: 1
+  }
+  param {
+    name: "conv1_b"
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 20
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "pool1_p"
+  type: "Pooling"
+  bottom: "conv1_p"
+  top: "pool1_p"
+  pooling_param {
+    pool: MAX
+    kernel_size: 2
+    stride: 2
+  }
+}
+layer {
+  name: "conv2_p"
+  type: "Convolution"
+  bottom: "pool1_p"
+  top: "conv2_p"
+  param {
+    name: "conv2_w"
+    lr_mult: 1
+  }
+  param {
+    name: "conv2_b"
+    lr_mult: 2
+  }
+  convolution_param {
+    num_output: 50
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "pool2_p"
+  type: "Pooling"
+  bottom: "conv2_p"
+  top: "pool2_p"
+  pooling_param {
+    pool: MAX
+    kernel_size: 2
+    stride: 2
+  }
+}
+layer {
+  name: "ip1_p"
+  type: "InnerProduct"
+  bottom: "pool2_p"
+  top: "ip1_p"
+  param {
+    name: "ip1_w"
+    lr_mult: 1
+  }
+  param {
+    name: "ip1_b"
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 500
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "relu1_p"
+  type: "ReLU"
+  bottom: "ip1_p"
+  top: "ip1_p"
+}
+layer {
+  name: "ip2_p"
+  type: "InnerProduct"
+  bottom: "ip1_p"
+  top: "ip2_p"
+  param {
+    name: "ip2_w"
+    lr_mult: 1
+  }
+  param {
+    name: "ip2_b"
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 10
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "feat_p"
+  type: "InnerProduct"
+  bottom: "ip2_p"
+  top: "feat_p"
+  param {
+    name: "feat_w"
+    lr_mult: 1
+  }
+  param {
+    name: "feat_b"
+    lr_mult: 2
+  }
+  inner_product_param {
+    num_output: 2
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+}
+layer {
+  name: "loss"
+  type: "ContrastiveLoss"
+  bottom: "feat"
+  bottom: "feat_p"
+  bottom: "sim"
+  top: "loss"
+  contrastive_loss_param {
+    margin: 1
+  }
+}
diff --git a/examples/siamese/readme.md b/examples/siamese/readme.md
new file mode 100644
index 0000000..83db8c9
--- /dev/null
+++ b/examples/siamese/readme.md
@@ -0,0 +1,187 @@
+---
+title: Siamese Network Tutorial
+description: Train and test a siamese network on MNIST data.
+category: example
+include_in_docs: true
+layout: default
+priority: 100
+---
+
+# Siamese Network Training with Caffe
+This example shows how you can use weight sharing and a contrastive loss
+function to learn a model using a siamese network in Caffe.
+
+We will assume that you have caffe successfully compiled. If not, please refer
+to the [Installation page](../../installation.html). This example builds on the
+[MNIST tutorial](mnist.html) so it would be a good idea to read that before
+continuing.
+
+*The guide specifies all paths and assumes all commands are executed from the
+root caffe directory*
+
+## Prepare Datasets
+
+You will first need to download and convert the data from the MNIST
+website. To do this, simply run the following commands:
+
+    ./data/mnist/get_mnist.sh
+    ./examples/siamese/create_mnist_siamese.sh
+
+After running the script there should be two datasets,
+`./examples/siamese/mnist_siamese_train_leveldb`, and
+`./examples/siamese/mnist_siamese_test_leveldb`.
+
+## The Model
+First, we will define the model that we want to train using the siamese network.
+We will use the convolutional net defined in
+`./examples/siamese/mnist_siamese.prototxt`. This model is almost
+exactly the same as the [LeNet model](mnist.html), the only difference is that
+we have replaced the top layers that produced probabilities over the 10 digit
+classes with a linear "feature" layer that produces a 2 dimensional vector.
+
+    layer {
+      name: "feat"
+      type: "InnerProduct"
+      bottom: "ip2"
+      top: "feat"
+      param {
+        name: "feat_w"
+        lr_mult: 1
+      }
+      param {
+        name: "feat_b"
+        lr_mult: 2
+      }
+      inner_product_param {
+        num_output: 2
+      }
+    }
+
+## Define the Siamese Network
+
+In this section we will define the siamese network used for training. The
+resulting network is defined in
+`./examples/siamese/mnist_siamese_train_test.prototxt`.
+
+### Reading in the Pair Data
+
+We start with a data layer that reads from the LevelDB database we created
+earlier. Each entry in this database contains the image data for a pair of
+images (`pair_data`) and a binary label saying if they belong to the same class
+or different classes (`sim`).
+
+    layer {
+      name: "pair_data"
+      type: "Data"
+      top: "pair_data"
+      top: "sim"
+      include { phase: TRAIN }
+      transform_param {
+        scale: 0.00390625
+      }
+      data_param {
+        source: "examples/siamese/mnist_siamese_train_leveldb"
+        batch_size: 64
+      }
+    }
+
+In order to pack a pair of images into the same blob in the database we pack one
+image per channel. We want to be able to work with these two images separately,
+so we add a slice layer after the data layer. This takes the `pair_data` and
+slices it along the channel dimension so that we have a single image in `data`
+and its paired image in `data_p.`
+
+    layer {
+      name: "slice_pair"
+      type: "Slice"
+      bottom: "pair_data"
+      top: "data"
+      top: "data_p"
+      slice_param {
+        slice_dim: 1
+        slice_point: 1
+      }
+    }
+
+### Building the First Side of the Siamese Net
+
+Now we can specify the first side of the siamese net. This side operates on
+`data` and produces `feat`. Starting from the net in
+`./examples/siamese/mnist_siamese.prototxt` we add default weight fillers. Then
+we name the parameters of the convolutional and inner product layers. Naming the
+parameters allows Caffe to share the parameters between layers on both sides of
+the siamese net. In the definition this looks like:
+
+    ...
+    param { name: "conv1_w" ...  }
+    param { name: "conv1_b" ...  }
+    ...
+    param { name: "conv2_w" ...  }
+    param { name: "conv2_b" ...  }
+    ...
+    param { name: "ip1_w" ...  }
+    param { name: "ip1_b" ...  }
+    ...
+    param { name: "ip2_w" ...  }
+    param { name: "ip2_b" ...  }
+    ...
+
+### Building the Second Side of the Siamese Net
+
+Now we need to create the second path that operates on `data_p` and produces
+`feat_p`. This path is exactly the same as the first. So we can just copy and
+paste it. Then we change the name of each layer, input, and output by appending
+`_p` to differentiate the "paired" layers from the originals.
+
+### Adding the Contrastive Loss Function
+
+To train the network we will optimize a contrastive loss function proposed in:
+Raia Hadsell, Sumit Chopra, and Yann LeCun "Dimensionality Reduction by Learning
+an Invariant Mapping". This loss function encourages matching pairs to be close
+together in feature space while pushing non-matching pairs apart. This cost
+function is implemented with the `CONTRASTIVE_LOSS` layer:
+
+    layer {
+        name: "loss"
+        type: "ContrastiveLoss"
+        contrastive_loss_param {
+            margin: 1.0
+        }
+        bottom: "feat"
+        bottom: "feat_p"
+        bottom: "sim"
+        top: "loss"
+    }
+
+## Define the Solver
+
+Nothing special needs to be done to the solver besides pointing it at the
+correct model file. The solver is defined in
+`./examples/siamese/mnist_siamese_solver.prototxt`.
+
+## Training and Testing the Model
+
+Training the model is simple after you have written the network definition
+protobuf and solver protobuf files. Simply run
+`./examples/siamese/train_mnist_siamese.sh`:
+
+    ./examples/siamese/train_mnist_siamese.sh
+
+# Plotting the results
+
+First, we can draw the model and siamese networks by running the following
+commands that draw the DAGs defined in the .prototxt files:
+
+    ./python/draw_net.py \
+        ./examples/siamese/mnist_siamese.prototxt \
+        ./examples/siamese/mnist_siamese.png
+
+    ./python/draw_net.py \
+        ./examples/siamese/mnist_siamese_train_test.prototxt \
+        ./examples/siamese/mnist_siamese_train_test.png
+
+Second, we can load the learned model and plot the features using the iPython
+notebook:
+
+    ipython notebook ./examples/siamese/mnist_siamese.ipynb
+
diff --git a/examples/siamese/train_mnist_siamese.sh b/examples/siamese/train_mnist_siamese.sh
new file mode 100755
index 0000000..84a30a8
--- /dev/null
+++ b/examples/siamese/train_mnist_siamese.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env sh
+
+TOOLS=./build/tools
+
+$TOOLS/caffe train --solver=examples/siamese/mnist_siamese_solver.prototxt
diff --git a/examples/web_demo/app.py b/examples/web_demo/app.py
new file mode 100644
index 0000000..09411f3
--- /dev/null
+++ b/examples/web_demo/app.py
@@ -0,0 +1,227 @@
+import os
+import time
+import cPickle
+import datetime
+import logging
+import flask
+import werkzeug
+import optparse
+import tornado.wsgi
+import tornado.httpserver
+import numpy as np
+import pandas as pd
+from PIL import Image
+import cStringIO as StringIO
+import urllib
+import exifutil
+
+import caffe
+
+REPO_DIRNAME = os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + '/../..')
+UPLOAD_FOLDER = '/tmp/caffe_demos_uploads'
+ALLOWED_IMAGE_EXTENSIONS = set(['png', 'bmp', 'jpg', 'jpe', 'jpeg', 'gif'])
+
+# Obtain the flask app object
+app = flask.Flask(__name__)
+
+
+ at app.route('/')
+def index():
+    return flask.render_template('index.html', has_result=False)
+
+
+ at app.route('/classify_url', methods=['GET'])
+def classify_url():
+    imageurl = flask.request.args.get('imageurl', '')
+    try:
+        string_buffer = StringIO.StringIO(
+            urllib.urlopen(imageurl).read())
+        image = caffe.io.load_image(string_buffer)
+
+    except Exception as err:
+        # For any exception we encounter in reading the image, we will just
+        # not continue.
+        logging.info('URL Image open error: %s', err)
+        return flask.render_template(
+            'index.html', has_result=True,
+            result=(False, 'Cannot open image from URL.')
+        )
+
+    logging.info('Image: %s', imageurl)
+    result = app.clf.classify_image(image)
+    return flask.render_template(
+        'index.html', has_result=True, result=result, imagesrc=imageurl)
+
+
+ at app.route('/classify_upload', methods=['POST'])
+def classify_upload():
+    try:
+        # We will save the file to disk for possible data collection.
+        imagefile = flask.request.files['imagefile']
+        filename_ = str(datetime.datetime.now()).replace(' ', '_') + \
+            werkzeug.secure_filename(imagefile.filename)
+        filename = os.path.join(UPLOAD_FOLDER, filename_)
+        imagefile.save(filename)
+        logging.info('Saving to %s.', filename)
+        image = exifutil.open_oriented_im(filename)
+
+    except Exception as err:
+        logging.info('Uploaded image open error: %s', err)
+        return flask.render_template(
+            'index.html', has_result=True,
+            result=(False, 'Cannot open uploaded image.')
+        )
+
+    result = app.clf.classify_image(image)
+    return flask.render_template(
+        'index.html', has_result=True, result=result,
+        imagesrc=embed_image_html(image)
+    )
+
+
+def embed_image_html(image):
+    """Creates an image embedded in HTML base64 format."""
+    image_pil = Image.fromarray((255 * image).astype('uint8'))
+    image_pil = image_pil.resize((256, 256))
+    string_buf = StringIO.StringIO()
+    image_pil.save(string_buf, format='png')
+    data = string_buf.getvalue().encode('base64').replace('\n', '')
+    return 'data:image/png;base64,' + data
+
+
+def allowed_file(filename):
+    return (
+        '.' in filename and
+        filename.rsplit('.', 1)[1] in ALLOWED_IMAGE_EXTENSIONS
+    )
+
+
+class ImagenetClassifier(object):
+    default_args = {
+        'model_def_file': (
+            '{}/models/bvlc_reference_caffenet/deploy.prototxt'.format(REPO_DIRNAME)),
+        'pretrained_model_file': (
+            '{}/models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel'.format(REPO_DIRNAME)),
+        'mean_file': (
+            '{}/python/caffe/imagenet/ilsvrc_2012_mean.npy'.format(REPO_DIRNAME)),
+        'class_labels_file': (
+            '{}/data/ilsvrc12/synset_words.txt'.format(REPO_DIRNAME)),
+        'bet_file': (
+            '{}/data/ilsvrc12/imagenet.bet.pickle'.format(REPO_DIRNAME)),
+    }
+    for key, val in default_args.iteritems():
+        if not os.path.exists(val):
+            raise Exception(
+                "File for {} is missing. Should be at: {}".format(key, val))
+    default_args['image_dim'] = 256
+    default_args['raw_scale'] = 255.
+
+    def __init__(self, model_def_file, pretrained_model_file, mean_file,
+                 raw_scale, class_labels_file, bet_file, image_dim, gpu_mode):
+        logging.info('Loading net and associated files...')
+        if gpu_mode:
+            caffe.set_mode_gpu()
+        else:
+            caffe.set_mode_cpu()
+        self.net = caffe.Classifier(
+            model_def_file, pretrained_model_file,
+            image_dims=(image_dim, image_dim), raw_scale=raw_scale,
+            mean=np.load(mean_file).mean(1).mean(1), channel_swap=(2, 1, 0)
+        )
+
+        with open(class_labels_file) as f:
+            labels_df = pd.DataFrame([
+                {
+                    'synset_id': l.strip().split(' ')[0],
+                    'name': ' '.join(l.strip().split(' ')[1:]).split(',')[0]
+                }
+                for l in f.readlines()
+            ])
+        self.labels = labels_df.sort('synset_id')['name'].values
+
+        self.bet = cPickle.load(open(bet_file))
+        # A bias to prefer children nodes in single-chain paths
+        # I am setting the value to 0.1 as a quick, simple model.
+        # We could use better psychological models here...
+        self.bet['infogain'] -= np.array(self.bet['preferences']) * 0.1
+
+    def classify_image(self, image):
+        try:
+            starttime = time.time()
+            scores = self.net.predict([image], oversample=True).flatten()
+            endtime = time.time()
+
+            indices = (-scores).argsort()[:5]
+            predictions = self.labels[indices]
+
+            # In addition to the prediction text, we will also produce
+            # the length for the progress bar visualization.
+            meta = [
+                (p, '%.5f' % scores[i])
+                for i, p in zip(indices, predictions)
+            ]
+            logging.info('result: %s', str(meta))
+
+            # Compute expected information gain
+            expected_infogain = np.dot(
+                self.bet['probmat'], scores[self.bet['idmapping']])
+            expected_infogain *= self.bet['infogain']
+
+            # sort the scores
+            infogain_sort = expected_infogain.argsort()[::-1]
+            bet_result = [(self.bet['words'][v], '%.5f' % expected_infogain[v])
+                          for v in infogain_sort[:5]]
+            logging.info('bet result: %s', str(bet_result))
+
+            return (True, meta, bet_result, '%.3f' % (endtime - starttime))
+
+        except Exception as err:
+            logging.info('Classification error: %s', err)
+            return (False, 'Something went wrong when classifying the '
+                           'image. Maybe try another one?')
+
+
+def start_tornado(app, port=5000):
+    http_server = tornado.httpserver.HTTPServer(
+        tornado.wsgi.WSGIContainer(app))
+    http_server.listen(port)
+    print("Tornado server starting on port {}".format(port))
+    tornado.ioloop.IOLoop.instance().start()
+
+
+def start_from_terminal(app):
+    """
+    Parse command line options and start the server.
+    """
+    parser = optparse.OptionParser()
+    parser.add_option(
+        '-d', '--debug',
+        help="enable debug mode",
+        action="store_true", default=False)
+    parser.add_option(
+        '-p', '--port',
+        help="which port to serve content on",
+        type='int', default=5000)
+    parser.add_option(
+        '-g', '--gpu',
+        help="use gpu mode",
+        action='store_true', default=False)
+
+    opts, args = parser.parse_args()
+    ImagenetClassifier.default_args.update({'gpu_mode': opts.gpu})
+
+    # Initialize classifier + warm start by forward for allocation
+    app.clf = ImagenetClassifier(**ImagenetClassifier.default_args)
+    app.clf.net.forward()
+
+    if opts.debug:
+        app.run(debug=True, host='0.0.0.0', port=opts.port)
+    else:
+        start_tornado(app, opts.port)
+
+
+if __name__ == '__main__':
+    logging.getLogger().setLevel(logging.INFO)
+    if not os.path.exists(UPLOAD_FOLDER):
+        os.makedirs(UPLOAD_FOLDER)
+    start_from_terminal(app)
diff --git a/examples/web_demo/exifutil.py b/examples/web_demo/exifutil.py
new file mode 100644
index 0000000..01918b2
--- /dev/null
+++ b/examples/web_demo/exifutil.py
@@ -0,0 +1,39 @@
+"""
+This script handles the skimage exif problem.
+"""
+
+from PIL import Image
+import numpy as np
+
+ORIENTATIONS = {   # used in apply_orientation
+    2: (Image.FLIP_LEFT_RIGHT,),
+    3: (Image.ROTATE_180,),
+    4: (Image.FLIP_TOP_BOTTOM,),
+    5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
+    6: (Image.ROTATE_270,),
+    7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
+    8: (Image.ROTATE_90,)
+}
+
+
+def open_oriented_im(im_path):
+    im = Image.open(im_path)
+    if hasattr(im, '_getexif'):
+        exif = im._getexif()
+        if exif is not None and 274 in exif:
+            orientation = exif[274]
+            im = apply_orientation(im, orientation)
+    img = np.asarray(im).astype(np.float32) / 255.
+    if img.ndim == 2:
+        img = img[:, :, np.newaxis]
+        img = np.tile(img, (1, 1, 3))
+    elif img.shape[2] == 4:
+        img = img[:, :, :3]
+    return img
+
+
+def apply_orientation(im, orientation):
+    if orientation in ORIENTATIONS:
+        for method in ORIENTATIONS[orientation]:
+            im = im.transpose(method)
+    return im
diff --git a/examples/web_demo/readme.md b/examples/web_demo/readme.md
new file mode 100644
index 0000000..fe74b9e
--- /dev/null
+++ b/examples/web_demo/readme.md
@@ -0,0 +1,41 @@
+---
+title: Web demo
+description: Image classification demo running as a Flask web server.
+category: example
+include_in_docs: true
+priority: 10
+---
+
+# Web Demo
+
+## Requirements
+
+The demo server requires Python with some dependencies.
+To make sure you have the dependencies, please run `pip install -r examples/web_demo/requirements.txt`, and also make sure that you've compiled the Python Caffe interface and that it is on your `PYTHONPATH` (see [installation instructions](/installation.html)).
+
+Make sure that you have obtained the Reference CaffeNet Model and the ImageNet Auxiliary Data:
+
+    ./scripts/download_model_binary.py models/bvlc_reference_caffenet
+    ./data/ilsvrc12/get_ilsvrc_aux.sh
+
+NOTE: if you run into trouble, try re-downloading the auxiliary files.
+
+## Run
+
+Running `python examples/web_demo/app.py` will bring up the demo server, accessible at `http://0.0.0.0:5000`.
+You can enable debug mode of the web server, or switch to a different port:
+
+    % python examples/web_demo/app.py -h
+    Usage: app.py [options]
+
+    Options:
+      -h, --help            show this help message and exit
+      -d, --debug           enable debug mode
+      -p PORT, --port=PORT  which port to serve content on
+
+## How are the "maximally accurate" results generated?
+
+In a nutshell: ImageNet predictions are made at the leaf nodes, but the organization of the project allows leaf nodes to be united via more general parent nodes, with 'entity' at the very top.
+To give "maximally accurate" results, we "back off" from maximally specific predictions to maintain a high accuracy.
+The `bet_file` that is loaded in the demo provides the graph structure and names of all relevant ImageNet nodes as well as measures of information gain between them.
+Please see the "Hedging your bets" paper from [CVPR 2012](http://www.image-net.org/projects/hedging/) for further information.
diff --git a/examples/web_demo/requirements.txt b/examples/web_demo/requirements.txt
new file mode 100644
index 0000000..8fb1d2c
--- /dev/null
+++ b/examples/web_demo/requirements.txt
@@ -0,0 +1,6 @@
+werkzeug
+flask
+tornado
+numpy
+pandas
+pillow
diff --git a/examples/web_demo/templates/index.html b/examples/web_demo/templates/index.html
new file mode 100644
index 0000000..8789334
--- /dev/null
+++ b/examples/web_demo/templates/index.html
@@ -0,0 +1,138 @@
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <meta charset="utf-8">
+    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+    <meta name="description" content="Caffe demos">
+    <meta name="author" content="BVLC (http://bvlc.eecs.berkeley.edu/)">
+
+    <title>Caffe Demos</title>
+
+    <link href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet">
+
+    <script type="text/javascript" src="//code.jquery.com/jquery-2.1.1.js"></script>
+    <script src="//netdna.bootstrapcdn.com/bootstrap/3.1.1/js/bootstrap.min.js"></script>
+
+    <!-- Script to instantly classify an image once it is uploaded. -->
+    <script type="text/javascript">
+      $(document).ready(
+        function(){
+          $('#classifyfile').attr('disabled',true);
+          $('#imagefile').change(
+            function(){
+              if ($(this).val()){
+                $('#formupload').submit();
+              }
+            }
+          );
+        }
+      );
+    </script>
+
+    <style>
+    body {
+      font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
+      line-height:1.5em;
+      color: #232323;
+      -webkit-font-smoothing: antialiased;
+    }
+
+    h1, h2, h3 {
+      font-family: Times, serif;
+      line-height:1.5em;
+      border-bottom: 1px solid #ccc;
+    }
+    </style>
+  </head>
+
+  <body>
+    <!-- Begin page content -->
+    <div class="container">
+      <div class="page-header">
+        <h1><a href="/">Caffe Demos</a></h1>
+        <p>
+          The <a href="http://caffe.berkeleyvision.org">Caffe</a> neural network library makes implementing state-of-the-art computer vision systems easy.
+        </p>
+      </div>
+
+      <div>
+        <h2>Classification</h2>
+        <a href="/classify_url?imageurl=http%3A%2F%2Fi.telegraph.co.uk%2Fmultimedia%2Farchive%2F02351%2Fcross-eyed-cat_2351472k.jpg">Click for a Quick Example</a>
+      </div>
+
+      {% if has_result %}
+      {% if not result[0] %}
+      <!-- we have error in the result. -->
+      <div class="alert alert-danger">{{ result[1] }} Did you provide a valid URL or a valid image file? </div>
+      {% else %}
+      <div class="media">
+        <a class="pull-left" href="#"><img class="media-object" width="192" height="192" src={{ imagesrc }}></a>
+        <div class="media-body">
+          <div class="bs-example bs-example-tabs">
+            <ul id="myTab" class="nav nav-tabs">
+              <li class="active"><a href="#infopred" data-toggle="tab">Maximally accurate</a></li>
+              <li><a href="#flatpred" data-toggle="tab">Maximally specific</a></li>
+            </ul>
+            <div id="myTabContent" class="tab-content">
+              <div class="tab-pane fade in active" id="infopred">
+                <ul class="list-group">
+                  {% for single_pred in result[2] %}
+                  <li class="list-group-item">
+                  <span class="badge">{{ single_pred[1] }}</span>
+                  <h4 class="list-group-item-heading">
+                    <a href="https://www.google.com/#q={{ single_pred[0] }}" target="_blank">{{ single_pred[0] }}</a>
+                  </h4>
+                  </li>
+                  {% endfor %}
+                </ul>
+              </div>
+              <div class="tab-pane fade" id="flatpred">
+                <ul class="list-group">
+                  {% for single_pred in result[1] %}
+                  <li class="list-group-item">
+                  <span class="badge">{{ single_pred[1] }}</span>
+                  <h4 class="list-group-item-heading">
+                    <a href="https://www.google.com/#q={{ single_pred[0] }}" target="_blank">{{ single_pred[0] }}</a>
+                  </h4>
+                  </li>
+                  {% endfor %}
+                </ul>
+              </div>
+            </div>
+          </div>
+
+        </div>
+      </div>
+      <p> CNN took {{ result[3] }} seconds. </p>
+      {% endif %}
+      <hr>
+      {% endif %}
+
+      <form role="form" action="classify_url" method="get">
+        <div class="form-group">
+          <div class="input-group">
+            <input type="text" class="form-control" name="imageurl" id="imageurl" placeholder="Provide an image URL">
+            <span class="input-group-btn">
+              <input class="btn btn-primary" value="Classify URL" type="submit" id="classifyurl"></input>
+            </span>
+          </div><!-- /input-group -->
+        </div>
+      </form>
+
+      <form id="formupload" class="form-inline" role="form" action="classify_upload" method="post" enctype="multipart/form-data">
+        <div class="form-group">
+          <label for="imagefile">Or upload an image:</label>
+          <input type="file" name="imagefile" id="imagefile">
+        </div>
+        <!--<input type="submit" class="btn btn-primary" value="Classify File" id="classifyfile"></input>-->
+      </form>
+    </div>
+
+    <hr>
+    <div id="footer">
+      <div class="container">
+        <p>© BVLC 2014</p>
+      </div>
+   </div>
+ </body>
+</html>
diff --git a/include/caffe/blob.hpp b/include/caffe/blob.hpp
new file mode 100644
index 0000000..472cc18
--- /dev/null
+++ b/include/caffe/blob.hpp
@@ -0,0 +1,280 @@
+#ifndef CAFFE_BLOB_HPP_
+#define CAFFE_BLOB_HPP_
+
+#include <algorithm>
+#include <string>
+#include <vector>
+
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/syncedmem.hpp"
+#include "caffe/util/math_functions.hpp"
+
+const int kMaxBlobAxes = INT_MAX;
+
+namespace caffe {
+
+/**
+ * @brief A wrapper around SyncedMemory holders serving as the basic
+ *        computational unit through which Layer%s, Net%s, and Solver%s
+ *        interact.
+ *
+ * TODO(dox): more thorough description.
+ */
+template <typename Dtype>
+class Blob {
+ public:
+  Blob()
+       : data_(), diff_(), count_(0), capacity_(0) {}
+
+  /// @brief Deprecated; use <code>Blob(const vector<int>& shape)</code>.
+  explicit Blob(const int num, const int channels, const int height,
+      const int width);
+  explicit Blob(const vector<int>& shape);
+
+  /// @brief Deprecated; use <code>Reshape(const vector<int>& shape)</code>.
+  void Reshape(const int num, const int channels, const int height,
+      const int width);
+  /**
+   * @brief Change the dimensions of the blob, allocating new memory if
+   *        necessary.
+   *
+   * This function can be called both to create an initial allocation
+   * of memory, and to adjust the dimensions of a top blob during Layer::Reshape
+   * or Layer::Forward. When changing the size of blob, memory will only be
+   * reallocated if sufficient memory does not already exist, and excess memory
+   * will never be freed.
+   *
+   * Note that reshaping an input blob and immediately calling Net::Backward is
+   * an error; either Net::Forward or Net::Reshape need to be called to
+   * propagate the new input shape to higher layers.
+   */
+  void Reshape(const vector<int>& shape);
+  void Reshape(const BlobShape& shape);
+  void ReshapeLike(const Blob& other);
+  inline string shape_string() const {
+    ostringstream stream;
+    for (int i = 0; i < shape_.size(); ++i) {
+      stream << shape_[i] << " ";
+    }
+    stream << "(" << count_ << ")";
+    return stream.str();
+  }
+  inline const vector<int>& shape() const { return shape_; }
+  /**
+   * @brief Returns the dimension of the index-th axis (or the negative index-th
+   *        axis from the end, if index is negative).
+   *
+   * @param index the axis index, which may be negative as it will be
+   *        "canonicalized" using CanonicalAxisIndex.
+   *        Dies on out of range index.
+   */
+  inline int shape(int index) const {
+    return shape_[CanonicalAxisIndex(index)];
+  }
+  inline int num_axes() const { return shape_.size(); }
+  inline int count() const { return count_; }
+
+  /**
+   * @brief Compute the volume of a slice; i.e., the product of dimensions
+   *        among a range of axes.
+   *
+   * @param start_axis The first axis to include in the slice.
+   *
+   * @param end_axis The first axis to exclude from the slice.
+   */
+  inline int count(int start_axis, int end_axis) const {
+    CHECK_LE(start_axis, end_axis);
+    CHECK_GE(start_axis, 0);
+    CHECK_GE(end_axis, 0);
+    CHECK_LE(start_axis, num_axes());
+    CHECK_LE(end_axis, num_axes());
+    int count = 1;
+    for (int i = start_axis; i < end_axis; ++i) {
+      count *= shape(i);
+    }
+    return count;
+  }
+  /**
+   * @brief Compute the volume of a slice spanning from a particular first
+   *        axis to the final axis.
+   *
+   * @param start_axis The first axis to include in the slice.
+   */
+  inline int count(int start_axis) const {
+    return count(start_axis, num_axes());
+  }
+
+  /**
+   * @brief Returns the 'canonical' version of a (usually) user-specified axis,
+   *        allowing for negative indexing (e.g., -1 for the last axis).
+   *
+   * @param index the axis index.
+   *        If 0 <= index < num_axes(), return index.
+   *        If -num_axes <= index <= -1, return (num_axes() - (-index)),
+   *        e.g., the last axis index (num_axes() - 1) if index == -1,
+   *        the second to last if index == -2, etc.
+   *        Dies on out of range index.
+   */
+  inline int CanonicalAxisIndex(int axis_index) const {
+    CHECK_GE(axis_index, -num_axes())
+        << "axis " << axis_index << " out of range for " << num_axes()
+        << "-D Blob with shape " << shape_string();
+    CHECK_LT(axis_index, num_axes())
+        << "axis " << axis_index << " out of range for " << num_axes()
+        << "-D Blob with shape " << shape_string();
+    if (axis_index < 0) {
+      return axis_index + num_axes();
+    }
+    return axis_index;
+  }
+
+  /// @brief Deprecated legacy shape accessor num: use shape(0) instead.
+  inline int num() const { return LegacyShape(0); }
+  /// @brief Deprecated legacy shape accessor channels: use shape(1) instead.
+  inline int channels() const { return LegacyShape(1); }
+  /// @brief Deprecated legacy shape accessor height: use shape(2) instead.
+  inline int height() const { return LegacyShape(2); }
+  /// @brief Deprecated legacy shape accessor width: use shape(3) instead.
+  inline int width() const { return LegacyShape(3); }
+  inline int LegacyShape(int index) const {
+    CHECK_LE(num_axes(), 4)
+        << "Cannot use legacy accessors on Blobs with > 4 axes.";
+    CHECK_LT(index, 4);
+    CHECK_GE(index, -4);
+    if (index >= num_axes() || index < -num_axes()) {
+      // Axis is out of range, but still in [0, 3] (or [-4, -1] for reverse
+      // indexing) -- this special case simulates the one-padding used to fill
+      // extraneous axes of legacy blobs.
+      return 1;
+    }
+    return shape(index);
+  }
+
+  inline int offset(const int n, const int c = 0, const int h = 0,
+      const int w = 0) const {
+    CHECK_GE(n, 0);
+    CHECK_LE(n, num());
+    CHECK_GE(channels(), 0);
+    CHECK_LE(c, channels());
+    CHECK_GE(height(), 0);
+    CHECK_LE(h, height());
+    CHECK_GE(width(), 0);
+    CHECK_LE(w, width());
+    return ((n * channels() + c) * height() + h) * width() + w;
+  }
+
+  inline int offset(const vector<int>& indices) const {
+    CHECK_LE(indices.size(), num_axes());
+    int offset = 0;
+    for (int i = 0; i < num_axes(); ++i) {
+      offset *= shape(i);
+      if (indices.size() > i) {
+        CHECK_GE(indices[i], 0);
+        CHECK_LT(indices[i], shape(i));
+        offset += indices[i];
+      }
+    }
+    return offset;
+  }
+  /**
+   * @brief Copy from a source Blob.
+   *
+   * @param source the Blob to copy from
+   * @param copy_diff if false, copy the data; if true, copy the diff
+   * @param reshape if false, require this Blob to be pre-shaped to the shape
+   *        of other (and die otherwise); if true, Reshape this Blob to other's
+   *        shape if necessary
+   */
+  void CopyFrom(const Blob<Dtype>& source, bool copy_diff = false,
+      bool reshape = false);
+
+  inline Dtype data_at(const int n, const int c, const int h,
+      const int w) const {
+    return cpu_data()[offset(n, c, h, w)];
+  }
+
+  inline Dtype diff_at(const int n, const int c, const int h,
+      const int w) const {
+    return cpu_diff()[offset(n, c, h, w)];
+  }
+
+  inline Dtype data_at(const vector<int>& index) const {
+    return cpu_data()[offset(index)];
+  }
+
+  inline Dtype diff_at(const vector<int>& index) const {
+    return cpu_diff()[offset(index)];
+  }
+
+  inline const shared_ptr<SyncedMemory>& data() const {
+    CHECK(data_);
+    return data_;
+  }
+
+  inline const shared_ptr<SyncedMemory>& diff() const {
+    CHECK(diff_);
+    return diff_;
+  }
+
+  const Dtype* cpu_data() const;
+  void set_cpu_data(Dtype* data);
+  const Dtype* gpu_data() const;
+  const Dtype* cpu_diff() const;
+  const Dtype* gpu_diff() const;
+  Dtype* mutable_cpu_data();
+  Dtype* mutable_gpu_data();
+  Dtype* mutable_cpu_diff();
+  Dtype* mutable_gpu_diff();
+  void Update();
+  void FromProto(const BlobProto& proto, bool reshape = true);
+  void ToProto(BlobProto* proto, bool write_diff = false) const;
+
+  /// @brief Compute the sum of absolute values (L1 norm) of the data.
+  Dtype asum_data() const;
+  /// @brief Compute the sum of absolute values (L1 norm) of the diff.
+  Dtype asum_diff() const;
+  /// @brief Compute the sum of squares (L2 norm squared) of the data.
+  Dtype sumsq_data() const;
+  /// @brief Compute the sum of squares (L2 norm squared) of the diff.
+  Dtype sumsq_diff() const;
+
+  /// @brief Scale the blob data by a constant factor.
+  void scale_data(Dtype scale_factor);
+  /// @brief Scale the blob diff by a constant factor.
+  void scale_diff(Dtype scale_factor);
+
+  /**
+   * @brief Set the data_ shared_ptr to point to the SyncedMemory holding the
+   *        data_ of Blob other -- useful in Layer%s which simply perform a copy
+   *        in their Forward pass.
+   *
+   * This deallocates the SyncedMemory holding this Blob's data_, as
+   * shared_ptr calls its destructor when reset with the "=" operator.
+   */
+  void ShareData(const Blob& other);
+  /**
+   * @brief Set the diff_ shared_ptr to point to the SyncedMemory holding the
+   *        diff_ of Blob other -- useful in Layer%s which simply perform a copy
+   *        in their Forward pass.
+   *
+   * This deallocates the SyncedMemory holding this Blob's diff_, as
+   * shared_ptr calls its destructor when reset with the "=" operator.
+   */
+  void ShareDiff(const Blob& other);
+
+  bool ShapeEquals(const BlobProto& other);
+
+ protected:
+  shared_ptr<SyncedMemory> data_;
+  shared_ptr<SyncedMemory> diff_;
+  vector<int> shape_;
+  int count_;
+  int capacity_;
+
+  DISABLE_COPY_AND_ASSIGN(Blob);
+};  // class Blob
+
+}  // namespace caffe
+
+#endif  // CAFFE_BLOB_HPP_
diff --git a/include/caffe/caffe.hpp b/include/caffe/caffe.hpp
new file mode 100644
index 0000000..3c829f2
--- /dev/null
+++ b/include/caffe/caffe.hpp
@@ -0,0 +1,19 @@
+// caffe.hpp is the header file that you need to include in your code. It wraps
+// all the internal caffe header files into one for simpler inclusion.
+
+#ifndef CAFFE_CAFFE_HPP_
+#define CAFFE_CAFFE_HPP_
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/layer_factory.hpp"
+#include "caffe/net.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/solver.hpp"
+#include "caffe/util/benchmark.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/vision_layers.hpp"
+
+#endif  // CAFFE_CAFFE_HPP_
diff --git a/include/caffe/common.hpp b/include/caffe/common.hpp
new file mode 100644
index 0000000..5f86bc2
--- /dev/null
+++ b/include/caffe/common.hpp
@@ -0,0 +1,172 @@
+#ifndef CAFFE_COMMON_HPP_
+#define CAFFE_COMMON_HPP_
+
+#include <boost/shared_ptr.hpp>
+#include <gflags/gflags.h>
+#include <glog/logging.h>
+
+#include <climits>
+#include <cmath>
+#include <fstream>  // NOLINT(readability/streams)
+#include <iostream>  // NOLINT(readability/streams)
+#include <map>
+#include <set>
+#include <sstream>
+#include <string>
+#include <utility>  // pair
+#include <vector>
+
+#include "caffe/util/device_alternate.hpp"
+
+// gflags 2.1 issue: namespace google was changed to gflags without warning.
+// Luckily we will be able to use GFLAGS_GFLAGS_H_ to detect if it is version
+// 2.1. If yes, we will add a temporary solution to redirect the namespace.
+// TODO(Yangqing): Once gflags solves the problem in a more elegant way, let's
+// remove the following hack.
+#ifndef GFLAGS_GFLAGS_H_
+namespace gflags = google;
+#endif  // GFLAGS_GFLAGS_H_
+
+// Disable the copy and assignment operator for a class.
+#define DISABLE_COPY_AND_ASSIGN(classname) \
+private:\
+  classname(const classname&);\
+  classname& operator=(const classname&)
+
+// Instantiate a class with float and double specifications.
+#define INSTANTIATE_CLASS(classname) \
+  char gInstantiationGuard##classname; \
+  template class classname<float>; \
+  template class classname<double>
+
+#define INSTANTIATE_LAYER_GPU_FORWARD(classname) \
+  template void classname<float>::Forward_gpu( \
+      const std::vector<Blob<float>*>& bottom, \
+      const std::vector<Blob<float>*>& top); \
+  template void classname<double>::Forward_gpu( \
+      const std::vector<Blob<double>*>& bottom, \
+      const std::vector<Blob<double>*>& top);
+
+#define INSTANTIATE_LAYER_GPU_BACKWARD(classname) \
+  template void classname<float>::Backward_gpu( \
+      const std::vector<Blob<float>*>& top, \
+      const std::vector<bool>& propagate_down, \
+      const std::vector<Blob<float>*>& bottom); \
+  template void classname<double>::Backward_gpu( \
+      const std::vector<Blob<double>*>& top, \
+      const std::vector<bool>& propagate_down, \
+      const std::vector<Blob<double>*>& bottom)
+
+#define INSTANTIATE_LAYER_GPU_FUNCS(classname) \
+  INSTANTIATE_LAYER_GPU_FORWARD(classname); \
+  INSTANTIATE_LAYER_GPU_BACKWARD(classname)
+
+// A simple macro to mark codes that are not implemented, so that when the code
+// is executed we will see a fatal log.
+#define NOT_IMPLEMENTED LOG(FATAL) << "Not Implemented Yet"
+
+// See PR #1236
+namespace cv { class Mat; }
+
+namespace caffe {
+
+// We will use the boost shared_ptr instead of the new C++11 one mainly
+// because cuda does not work (at least now) well with C++11 features.
+using boost::shared_ptr;
+
+// Common functions and classes from std that caffe often uses.
+using std::fstream;
+using std::ios;
+using std::isnan;
+using std::isinf;
+using std::iterator;
+using std::make_pair;
+using std::map;
+using std::ostringstream;
+using std::pair;
+using std::set;
+using std::string;
+using std::stringstream;
+using std::vector;
+
+// A global initialization function that you should call in your main function.
+// Currently it initializes google flags and google logging.
+void GlobalInit(int* pargc, char*** pargv);
+
+// A singleton class to hold common caffe stuff, such as the handler that
+// caffe is going to use for cublas, curand, etc.
+class Caffe {
+ public:
+  ~Caffe();
+  inline static Caffe& Get() {
+    if (!singleton_.get()) {
+      singleton_.reset(new Caffe());
+    }
+    return *singleton_;
+  }
+  enum Brew { CPU, GPU };
+
+  // This random number generator facade hides boost and CUDA rng
+  // implementation from one another (for cross-platform compatibility).
+  class RNG {
+   public:
+    RNG();
+    explicit RNG(unsigned int seed);
+    explicit RNG(const RNG&);
+    RNG& operator=(const RNG&);
+    void* generator();
+   private:
+    class Generator;
+    shared_ptr<Generator> generator_;
+  };
+
+  // Getters for boost rng, curand, and cublas handles
+  inline static RNG& rng_stream() {
+    if (!Get().random_generator_) {
+      Get().random_generator_.reset(new RNG());
+    }
+    return *(Get().random_generator_);
+  }
+#ifndef CPU_ONLY
+  inline static cublasHandle_t cublas_handle() { return Get().cublas_handle_; }
+  inline static curandGenerator_t curand_generator() {
+    return Get().curand_generator_;
+  }
+#endif
+
+  // Returns the mode: running on CPU or GPU.
+  inline static Brew mode() { return Get().mode_; }
+  // The setters for the variables
+  // Sets the mode. It is recommended that you don't change the mode halfway
+  // into the program since that may cause allocation of pinned memory being
+  // freed in a non-pinned way, which may cause problems - I haven't verified
+  // it personally but better to note it here in the header file.
+  inline static void set_mode(Brew mode) { Get().mode_ = mode; }
+  // Sets the random seed of both boost and curand
+  static void set_random_seed(const unsigned int seed);
+  // Sets the device. Since we have cublas and curand stuff, set device also
+  // requires us to reset those values.
+  static void SetDevice(const int device_id);
+  // Prints the current GPU status.
+  static void DeviceQuery();
+
+ protected:
+#ifndef CPU_ONLY
+  cublasHandle_t cublas_handle_;
+  curandGenerator_t curand_generator_;
+#endif
+  shared_ptr<RNG> random_generator_;
+
+  Brew mode_;
+  static shared_ptr<Caffe> singleton_;
+
+ private:
+  // The private constructor to avoid duplicate instantiation.
+  Caffe();
+
+  DISABLE_COPY_AND_ASSIGN(Caffe);
+};
+
+}  // namespace caffe
+
+#endif  // CAFFE_COMMON_HPP_
diff --git a/include/caffe/common_layers.hpp b/include/caffe/common_layers.hpp
new file mode 100644
index 0000000..d2c0ce6
--- /dev/null
+++ b/include/caffe/common_layers.hpp
@@ -0,0 +1,611 @@
+#ifndef CAFFE_COMMON_LAYERS_HPP_
+#define CAFFE_COMMON_LAYERS_HPP_
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/data_layers.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/loss_layers.hpp"
+#include "caffe/neuron_layers.hpp"
+#include "caffe/proto/caffe.pb.h"
+
+namespace caffe {
+
+/**
+ * @brief Compute the index of the @f$ K @f$ max values for each datum across
+ *        all dimensions @f$ (C \times H \times W) @f$.
+ *
+ * Intended for use after a classification layer to produce a prediction.
+ * If parameter out_max_val is set to true, output is a vector of pairs
+ * (max_ind, max_val) for each image.
+ *
+ * NOTE: does not implement Backwards operation.
+ */
+template <typename Dtype>
+class ArgMaxLayer : public Layer<Dtype> {
+ public:
+  /**
+   * @param param provides ArgMaxParameter argmax_param,
+   *     with ArgMaxLayer options:
+   *   - top_k (\b optional uint, default 1).
+   *     the number @f$ K @f$ of maximal items to output.
+   *   - out_max_val (\b optional bool, default false).
+   *     if set, output a vector of pairs (max_ind, max_val) for each image.
+   */
+  explicit ArgMaxLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "ArgMax"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (N \times 1 \times K \times 1) @f$ or, if out_max_val
+   *      @f$ (N \times 2 \times K \times 1) @f$
+   *      the computed outputs @f$
+   *       y_n = \arg\max\limits_i x_{ni}
+   *      @f$ (for @f$ K = 1 @f$).
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  /// @brief Not implemented (non-differentiable function)
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+    NOT_IMPLEMENTED;
+  }
+  bool out_max_val_;
+  size_t top_k_;
+};
+
+/**
+ * @brief Takes at least two Blob%s and concatenates them along either the num
+ *        or channel dimension, outputting the result.
+ */
+template <typename Dtype>
+class ConcatLayer : public Layer<Dtype> {
+ public:
+  explicit ConcatLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Concat"; }
+  virtual inline int MinBottomBlobs() const { return 2; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 2+)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x_1 @f$
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x_2 @f$
+   *   -# ...
+   *   - K @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x_K @f$
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (KN \times C \times H \times W) @f$ if axis == 0, or
+   *      @f$ (N \times KC \times H \times W) @f$ if axis == 1:
+   *      the concatenated output @f$
+   *        y = [\begin{array}{cccc} x_1 & x_2 & ... & x_K \end{array}]
+   *      @f$
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the error gradient w.r.t. the concatenate inputs.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *        respect to the outputs
+   *   -# @f$ (KN \times C \times H \times W) @f$ if axis == 0, or
+   *      @f$ (N \times KC \times H \times W) @f$ if axis == 1:
+   *      containing error gradients @f$ \frac{\partial E}{\partial y} @f$
+   *      with respect to concatenated outputs @f$ y @f$
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length K), into which the top gradient
+   *        @f$ \frac{\partial E}{\partial y} @f$ is deconcatenated back to the
+   *        inputs @f$
+   *        \left[ \begin{array}{cccc}
+   *          \frac{\partial E}{\partial x_1} &
+   *          \frac{\partial E}{\partial x_2} &
+   *          ... &
+   *          \frac{\partial E}{\partial x_K}
+   *        \end{array} \right] =
+   *        \frac{\partial E}{\partial y}
+   *        @f$
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  int count_;
+  int num_concats_;
+  int concat_input_size_;
+  int concat_axis_;
+};
+
+/**
+ * @brief Compute elementwise operations, such as product and sum,
+ *        along multiple input Blobs.
+ *
+ * TODO(dox): thorough documentation for Forward, Backward, and proto params.
+ */
+template <typename Dtype>
+class EltwiseLayer : public Layer<Dtype> {
+ public:
+  explicit EltwiseLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Eltwise"; }
+  virtual inline int MinBottomBlobs() const { return 2; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  EltwiseParameter_EltwiseOp op_;
+  vector<Dtype> coeffs_;
+  Blob<int> max_idx_;
+
+  bool stable_prod_grad_;
+};
+
+/**
+ * @brief Takes two+ Blobs, interprets last Blob as a selector and
+ *  filter remaining Blobs accordingly with selector data (0 means that
+ * the corresponding item has to be filtered, non-zero means that corresponding
+ * item needs to stay).
+ */
+template <typename Dtype>
+class FilterLayer : public Layer<Dtype> {
+ public:
+  explicit FilterLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Filter"; }
+  virtual inline int MinBottomBlobs() const { return 2; }
+  virtual inline int MinTopBlobs() const { return 1; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 2+)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs to be filtered @f$ x_1 @f$
+   *   -# ...
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs to be filtered @f$ x_K @f$
+   *   -# @f$ (N \times 1 \times 1 \times 1) @f$
+   *      the selector blob
+   * @param top output Blob vector (length 1+)
+   *   -# @f$ (S \times C \times H \times W) @f$ ()
+   *        the filtered output @f$ x_1 @f$
+   *        where S is the number of items
+   *        that haven't been filtered
+   *      @f$ (S \times C \times H \times W) @f$
+   *        the filtered output @f$ x_K @f$
+   *        where S is the number of items
+   *        that haven't been filtered
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the error gradient w.r.t. the forwarded inputs.
+   *
+   * @param top output Blob vector (length 1+), providing the error gradient with
+   *        respect to the outputs
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length 2+), into which the top error
+   *        gradient is copied
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  bool first_reshape_;
+  vector<int> indices_to_forward_;
+};
+
+/**
+ * @brief Reshapes the input Blob into flat vectors.
+ *
+ * Note: because this layer does not change the input values -- merely the
+ * dimensions -- it can simply copy the input. The copy happens "virtually"
+ * (thus taking effectively 0 real time) by setting, in Forward, the data
+ * pointer of the top Blob to that of the bottom Blob (see Blob::ShareData),
+ * and in Backward, the diff pointer of the bottom Blob to that of the top Blob
+ * (see Blob::ShareDiff).
+ */
+template <typename Dtype>
+class FlattenLayer : public Layer<Dtype> {
+ public:
+  explicit FlattenLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Flatten"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 2+)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (N \times CHW \times 1 \times 1) @f$
+   *      the outputs -- i.e., the (virtually) copied, flattened inputs
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the error gradient w.r.t. the concatenate inputs.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *        respect to the outputs
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length K), into which the top error
+   *        gradient is (virtually) copied
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+};
+
+/**
+ * @brief Also known as a "fully-connected" layer, computes an inner product
+ *        with a set of learned weights, and (optionally) adds biases.
+ *
+ * TODO(dox): thorough documentation for Forward, Backward, and proto params.
+ */
+template <typename Dtype>
+class InnerProductLayer : public Layer<Dtype> {
+ public:
+  explicit InnerProductLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "InnerProduct"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  int M_;
+  int K_;
+  int N_;
+  bool bias_term_;
+  Blob<Dtype> bias_multiplier_;
+};
+
+/**
+ * @brief Normalizes the input to have 0-mean and/or unit (1) variance.
+ *
+ * TODO(dox): thorough documentation for Forward, Backward, and proto params.
+ */
+template <typename Dtype>
+class MVNLayer : public Layer<Dtype> {
+ public:
+  explicit MVNLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "MVN"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+     const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  Blob<Dtype> mean_, variance_, temp_;
+
+  /// sum_multiplier is used to carry out sum using BLAS
+  Blob<Dtype> sum_multiplier_;
+  Dtype eps_;
+};
+
+/*
+ * @brief Reshapes the input Blob into an arbitrary-sized output Blob.
+ *
+ * Note: similarly to FlattenLayer, this layer does not change the input values
+ * (see FlattenLayer, Blob::ShareData and Blob::ShareDiff).
+ */
+template <typename Dtype>
+class ReshapeLayer : public Layer<Dtype> {
+ public:
+  explicit ReshapeLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Reshape"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {}
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {}
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {}
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {}
+
+  /// @brief vector of axes indices whose dimensions we'll copy from the bottom
+  vector<int> copy_axes_;
+  /// @brief the index of the axis whose dimension we infer, or -1 if none
+  int inferred_axis_;
+  /// @brief the product of the "constant" output dimensions
+  int constant_count_;
+};
+
+/**
+ * @brief Compute "reductions" -- operations that return a scalar output Blob
+ *        for an input Blob of arbitrary size, such as the sum, absolute sum,
+ *        and sum of squares.
+ *
+ * TODO(dox): thorough documentation for Forward, Backward, and proto params.
+ */
+template <typename Dtype>
+class ReductionLayer : public Layer<Dtype> {
+ public:
+  explicit ReductionLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Reduction"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  /// @brief the reduction operation performed by the layer
+  ReductionParameter_ReductionOp op_;
+  /// @brief a scalar coefficient applied to all outputs
+  Dtype coeff_;
+  /// @brief the index of the first input axis to reduce
+  int axis_;
+  /// @brief the number of reductions performed
+  int num_;
+  /// @brief the input size of each reduction
+  int dim_;
+  /// @brief a helper Blob used for summation (op_ == SUM)
+  Blob<Dtype> sum_multiplier_;
+};
+
+/**
+ * @brief Ignores bottom blobs while producing no top blobs. (This is useful
+ *        to suppress outputs during testing.)
+ */
+template <typename Dtype>
+class SilenceLayer : public Layer<Dtype> {
+ public:
+  explicit SilenceLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {}
+
+  virtual inline const char* type() const { return "Silence"; }
+  virtual inline int MinBottomBlobs() const { return 1; }
+  virtual inline int ExactNumTopBlobs() const { return 0; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {}
+  // We can't define Forward_gpu here, since STUB_GPU will provide
+  // its own definition for CPU_ONLY mode.
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+};
+
+/**
+ * @brief Computes the softmax function.
+ *
+ * TODO(dox): thorough documentation for Forward, Backward, and proto params.
+ */
+template <typename Dtype>
+class SoftmaxLayer : public Layer<Dtype> {
+ public:
+  explicit SoftmaxLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Softmax"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+     const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  int outer_num_;
+  int inner_num_;
+  int softmax_axis_;
+  /// sum_multiplier is used to carry out sum using BLAS
+  Blob<Dtype> sum_multiplier_;
+  /// scale is an intermediate Blob to hold temporary results.
+  Blob<Dtype> scale_;
+};
+
+#ifdef USE_CUDNN
+/**
+ * @brief cuDNN implementation of SoftmaxLayer.
+ *        Fallback to SoftmaxLayer for CPU mode.
+ */
+template <typename Dtype>
+class CuDNNSoftmaxLayer : public SoftmaxLayer<Dtype> {
+ public:
+  explicit CuDNNSoftmaxLayer(const LayerParameter& param)
+      : SoftmaxLayer<Dtype>(param), handles_setup_(false) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual ~CuDNNSoftmaxLayer();
+
+ protected:
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+     const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  bool handles_setup_;
+  cudnnHandle_t             handle_;
+  cudnnTensorDescriptor_t bottom_desc_;
+  cudnnTensorDescriptor_t top_desc_;
+};
+#endif
+
+/**
+ * @brief Creates a "split" path in the network by copying the bottom Blob
+ *        into multiple top Blob%s to be used by multiple consuming layers.
+ *
+ * TODO(dox): thorough documentation for Forward, Backward, and proto params.
+ */
+template <typename Dtype>
+class SplitLayer : public Layer<Dtype> {
+ public:
+  explicit SplitLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Split"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int MinTopBlobs() const { return 1; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  int count_;
+};
+
+/**
+ * @brief Takes a Blob and slices it along either the num or channel dimension,
+ *        outputting multiple sliced Blob results.
+ *
+ * TODO(dox): thorough documentation for Forward, Backward, and proto params.
+ */
+template <typename Dtype>
+class SliceLayer : public Layer<Dtype> {
+ public:
+  explicit SliceLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Slice"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int MinTopBlobs() const { return 2; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  int count_;
+  int num_slices_;
+  int slice_size_;
+  int slice_axis_;
+  vector<int> slice_point_;
+};
+
+}  // namespace caffe
+
+#endif  // CAFFE_COMMON_LAYERS_HPP_
diff --git a/include/caffe/data_layers.hpp b/include/caffe/data_layers.hpp
new file mode 100644
index 0000000..3958cb7
--- /dev/null
+++ b/include/caffe/data_layers.hpp
@@ -0,0 +1,327 @@
+#ifndef CAFFE_DATA_LAYERS_HPP_
+#define CAFFE_DATA_LAYERS_HPP_
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "boost/scoped_ptr.hpp"
+#include "hdf5.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/data_transformer.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/internal_thread.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/db.hpp"
+
+namespace caffe {
+
+/**
+ * @brief Provides base for data layers that feed blobs to the Net.
+ *
+ * TODO(dox): thorough documentation for Forward and proto params.
+ */
+template <typename Dtype>
+class BaseDataLayer : public Layer<Dtype> {
+ public:
+  explicit BaseDataLayer(const LayerParameter& param);
+  // LayerSetUp: implements common data layer setup functionality, and calls
+  // DataLayerSetUp to do special data layer setup for individual layer types.
+  // This method may not be overridden except by the BasePrefetchingDataLayer.
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {}
+  // Data layers have no bottoms, so reshaping is trivial.
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {}
+
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {}
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {}
+
+ protected:
+  TransformationParameter transform_param_;
+  shared_ptr<DataTransformer<Dtype> > data_transformer_;
+  bool output_labels_;
+};
+
+template <typename Dtype>
+class BasePrefetchingDataLayer :
+    public BaseDataLayer<Dtype>, public InternalThread {
+ public:
+  explicit BasePrefetchingDataLayer(const LayerParameter& param)
+      : BaseDataLayer<Dtype>(param) {}
+  // LayerSetUp: implements common data layer setup functionality, and calls
+  // DataLayerSetUp to do special data layer setup for individual layer types.
+  // This method may not be overridden.
+  void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual void CreatePrefetchThread();
+  virtual void JoinPrefetchThread();
+  // The thread's function
+  virtual void InternalThreadEntry() {}
+
+ protected:
+  Blob<Dtype> prefetch_data_;
+  Blob<Dtype> prefetch_label_;
+  Blob<Dtype> transformed_data_;
+};
+
+template <typename Dtype>
+class DataLayer : public BasePrefetchingDataLayer<Dtype> {
+ public:
+  explicit DataLayer(const LayerParameter& param)
+      : BasePrefetchingDataLayer<Dtype>(param) {}
+  virtual ~DataLayer();
+  virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Data"; }
+  virtual inline int ExactNumBottomBlobs() const { return 0; }
+  virtual inline int MinTopBlobs() const { return 1; }
+  virtual inline int MaxTopBlobs() const { return 2; }
+
+ protected:
+  virtual void InternalThreadEntry();
+
+  shared_ptr<db::DB> db_;
+  shared_ptr<db::Cursor> cursor_;
+};
+
+/**
+ * @brief Provides data to the Net generated by a Filler.
+ *
+ * TODO(dox): thorough documentation for Forward and proto params.
+ */
+template <typename Dtype>
+class DummyDataLayer : public Layer<Dtype> {
+ public:
+  explicit DummyDataLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  // Data layers have no bottoms, so reshaping is trivial.
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {}
+
+  virtual inline const char* type() const { return "DummyData"; }
+  virtual inline int ExactNumBottomBlobs() const { return 0; }
+  virtual inline int MinTopBlobs() const { return 1; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {}
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {}
+
+  vector<shared_ptr<Filler<Dtype> > > fillers_;
+  vector<bool> refill_;
+};
+
+/**
+ * @brief Provides data to the Net from HDF5 files.
+ *
+ * TODO(dox): thorough documentation for Forward and proto params.
+ */
+template <typename Dtype>
+class HDF5DataLayer : public Layer<Dtype> {
+ public:
+  explicit HDF5DataLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual ~HDF5DataLayer();
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  // Data layers have no bottoms, so reshaping is trivial.
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {}
+
+  virtual inline const char* type() const { return "HDF5Data"; }
+  virtual inline int ExactNumBottomBlobs() const { return 0; }
+  virtual inline int MinTopBlobs() const { return 1; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {}
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {}
+  virtual void LoadHDF5FileData(const char* filename);
+
+  std::vector<std::string> hdf_filenames_;
+  unsigned int num_files_;
+  unsigned int current_file_;
+  hsize_t current_row_;
+  std::vector<shared_ptr<Blob<Dtype> > > hdf_blobs_;
+  std::vector<unsigned int> data_permutation_;
+  std::vector<unsigned int> file_permutation_;
+};
+
+/**
+ * @brief Write blobs to disk as HDF5 files.
+ *
+ * TODO(dox): thorough documentation for Forward and proto params.
+ */
+template <typename Dtype>
+class HDF5OutputLayer : public Layer<Dtype> {
+ public:
+  explicit HDF5OutputLayer(const LayerParameter& param)
+      : Layer<Dtype>(param), file_opened_(false) {}
+  virtual ~HDF5OutputLayer();
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  // Data layers have no bottoms, so reshaping is trivial.
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {}
+
+  virtual inline const char* type() const { return "HDF5Output"; }
+  // TODO: no limit on the number of blobs
+  virtual inline int ExactNumBottomBlobs() const { return 2; }
+  virtual inline int ExactNumTopBlobs() const { return 0; }
+
+  inline std::string file_name() const { return file_name_; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void SaveBlobs();
+
+  bool file_opened_;
+  std::string file_name_;
+  hid_t file_id_;
+  Blob<Dtype> data_blob_;
+  Blob<Dtype> label_blob_;
+};
+
+/**
+ * @brief Provides data to the Net from image files.
+ *
+ * TODO(dox): thorough documentation for Forward and proto params.
+ */
+template <typename Dtype>
+class ImageDataLayer : public BasePrefetchingDataLayer<Dtype> {
+ public:
+  explicit ImageDataLayer(const LayerParameter& param)
+      : BasePrefetchingDataLayer<Dtype>(param) {}
+  virtual ~ImageDataLayer();
+  virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "ImageData"; }
+  virtual inline int ExactNumBottomBlobs() const { return 0; }
+  virtual inline int ExactNumTopBlobs() const { return 2; }
+
+ protected:
+  shared_ptr<Caffe::RNG> prefetch_rng_;
+  virtual void ShuffleImages();
+  virtual void InternalThreadEntry();
+
+  vector<std::pair<std::string, int> > lines_;
+  int lines_id_;
+};
+
+/**
+ * @brief Provides data to the Net from memory.
+ *
+ * TODO(dox): thorough documentation for Forward and proto params.
+ */
+template <typename Dtype>
+class MemoryDataLayer : public BaseDataLayer<Dtype> {
+ public:
+  explicit MemoryDataLayer(const LayerParameter& param)
+      : BaseDataLayer<Dtype>(param), has_new_data_(false) {}
+  virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "MemoryData"; }
+  virtual inline int ExactNumBottomBlobs() const { return 0; }
+  virtual inline int ExactNumTopBlobs() const { return 2; }
+
+  virtual void AddDatumVector(const vector<Datum>& datum_vector);
+  virtual void AddMatVector(const vector<cv::Mat>& mat_vector,
+      const vector<int>& labels);
+
+  // Reset should accept const pointers, but can't, because the memory
+  //  will be given to Blob, which is mutable
+  void Reset(Dtype* data, Dtype* label, int n);
+  void set_batch_size(int new_size);
+
+  int batch_size() { return batch_size_; }
+  int channels() { return channels_; }
+  int height() { return height_; }
+  int width() { return width_; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  int batch_size_, channels_, height_, width_, size_;
+  Dtype* data_;
+  Dtype* labels_;
+  int n_;
+  size_t pos_;
+  Blob<Dtype> added_data_;
+  Blob<Dtype> added_label_;
+  bool has_new_data_;
+};
+
+/**
+ * @brief Provides data to the Net from windows of images files, specified
+ *        by a window data file.
+ *
+ * TODO(dox): thorough documentation for Forward and proto params.
+ */
+template <typename Dtype>
+class WindowDataLayer : public BasePrefetchingDataLayer<Dtype> {
+ public:
+  explicit WindowDataLayer(const LayerParameter& param)
+      : BasePrefetchingDataLayer<Dtype>(param) {}
+  virtual ~WindowDataLayer();
+  virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "WindowData"; }
+  virtual inline int ExactNumBottomBlobs() const { return 0; }
+  virtual inline int ExactNumTopBlobs() const { return 2; }
+
+ protected:
+  virtual unsigned int PrefetchRand();
+  virtual void InternalThreadEntry();
+
+  shared_ptr<Caffe::RNG> prefetch_rng_;
+  vector<std::pair<std::string, vector<int> > > image_database_;
+  enum WindowField { IMAGE_INDEX, LABEL, OVERLAP, X1, Y1, X2, Y2, NUM };
+  vector<vector<float> > fg_windows_;
+  vector<vector<float> > bg_windows_;
+  Blob<Dtype> data_mean_;
+  vector<Dtype> mean_values_;
+  bool has_mean_file_;
+  bool has_mean_values_;
+  bool cache_images_;
+  vector<std::pair<std::string, Datum > > image_database_cache_;
+};
+
+}  // namespace caffe
+
+#endif  // CAFFE_DATA_LAYERS_HPP_
diff --git a/include/caffe/data_transformer.hpp b/include/caffe/data_transformer.hpp
new file mode 100644
index 0000000..0ad68c8
--- /dev/null
+++ b/include/caffe/data_transformer.hpp
@@ -0,0 +1,151 @@
+#ifndef CAFFE_DATA_TRANSFORMER_HPP
+#define CAFFE_DATA_TRANSFORMER_HPP
+
+#include <vector>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+
+namespace caffe {
+
+/**
+ * @brief Applies common transformations to the input data, such as
+ * scaling, mirroring, substracting the image mean...
+ */
+template <typename Dtype>
+class DataTransformer {
+ public:
+  explicit DataTransformer(const TransformationParameter& param, Phase phase);
+  virtual ~DataTransformer() {}
+
+  /**
+   * @brief Initialize the Random number generations if needed by the
+   *    transformation.
+   */
+  void InitRand();
+
+  /**
+   * @brief Applies the transformation defined in the data layer's
+   * transform_param block to the data.
+   *
+   * @param datum
+   *    Datum containing the data to be transformed.
+   * @param transformed_blob
+   *    This is destination blob. It can be part of top blob's data if
+   *    set_cpu_data() is used. See data_layer.cpp for an example.
+   */
+  void Transform(const Datum& datum, Blob<Dtype>* transformed_blob);
+
+  /**
+   * @brief Applies the transformation defined in the data layer's
+   * transform_param block to a vector of Datum.
+   *
+   * @param datum_vector
+   *    A vector of Datum containing the data to be transformed.
+   * @param transformed_blob
+   *    This is destination blob. It can be part of top blob's data if
+   *    set_cpu_data() is used. See memory_layer.cpp for an example.
+   */
+  void Transform(const vector<Datum> & datum_vector,
+                Blob<Dtype>* transformed_blob);
+
+  /**
+   * @brief Applies the transformation defined in the data layer's
+   * transform_param block to a vector of Mat.
+   *
+   * @param mat_vector
+   *    A vector of Mat containing the data to be transformed.
+   * @param transformed_blob
+   *    This is destination blob. It can be part of top blob's data if
+   *    set_cpu_data() is used. See memory_layer.cpp for an example.
+   */
+  void Transform(const vector<cv::Mat> & mat_vector,
+                Blob<Dtype>* transformed_blob);
+
+  /**
+   * @brief Applies the transformation defined in the data layer's
+   * transform_param block to a cv::Mat
+   *
+   * @param cv_img
+   *    cv::Mat containing the data to be transformed.
+   * @param transformed_blob
+   *    This is destination blob. It can be part of top blob's data if
+   *    set_cpu_data() is used. See image_data_layer.cpp for an example.
+   */
+  void Transform(const cv::Mat& cv_img, Blob<Dtype>* transformed_blob);
+
+  /**
+   * @brief Applies the same transformation defined in the data layer's
+   * transform_param block to all the num images in a input_blob.
+   *
+   * @param input_blob
+   *    A Blob containing the data to be transformed. It applies the same
+   *    transformation to all the num images in the blob.
+   * @param transformed_blob
+   *    This is destination blob, it will contain as many images as the
+   *    input blob. It can be part of top blob's data.
+   */
+  void Transform(Blob<Dtype>* input_blob, Blob<Dtype>* transformed_blob);
+
+  /**
+   * @brief Infers the shape of transformed_blob will have when
+   *    the transformation is applied to the data.
+   *
+   * @param datum
+   *    Datum containing the data to be transformed.
+   */
+  vector<int> InferBlobShape(const Datum& datum);
+  /**
+   * @brief Infers the shape of transformed_blob will have when
+   *    the transformation is applied to the data.
+   *    It uses the first element to infer the shape of the blob.
+   *
+   * @param datum_vector
+   *    A vector of Datum containing the data to be transformed.
+   */
+  vector<int> InferBlobShape(const vector<Datum> & datum_vector);
+  /**
+   * @brief Infers the shape of transformed_blob will have when
+   *    the transformation is applied to the data.
+   *    It uses the first element to infer the shape of the blob.
+   *
+   * @param mat_vector
+   *    A vector of Mat containing the data to be transformed.
+   */
+  vector<int> InferBlobShape(const vector<cv::Mat> & mat_vector);
+  /**
+   * @brief Infers the shape of transformed_blob will have when
+   *    the transformation is applied to the data.
+   *
+   * @param cv_img
+   *    cv::Mat containing the data to be transformed.
+   */
+  vector<int> InferBlobShape(const cv::Mat& cv_img);
+
+ protected:
+   /**
+   * @brief Generates a random integer from Uniform({0, 1, ..., n-1}).
+   *
+   * @param n
+   *    The upperbound (exclusive) value of the random number.
+   * @return
+   *    A uniformly random integer value from ({0, 1, ..., n-1}).
+   */
+  virtual int Rand(int n);
+
+  void Transform(const Datum& datum, Dtype* transformed_data);
+  // Tranformation parameters
+  TransformationParameter param_;
+
+
+  shared_ptr<Caffe::RNG> rng_;
+  Phase phase_;
+  Blob<Dtype> data_mean_;
+  vector<Dtype> mean_values_;
+};
+
+}  // namespace caffe
+
+#endif  // CAFFE_DATA_TRANSFORMER_HPP_
+
diff --git a/include/caffe/filler.hpp b/include/caffe/filler.hpp
new file mode 100644
index 0000000..888f4a4
--- /dev/null
+++ b/include/caffe/filler.hpp
@@ -0,0 +1,296 @@
+// Fillers are random number generators that fills a blob using the specified
+// algorithm. The expectation is that they are only going to be used during
+// initialization time and will not involve any GPUs.
+
+#ifndef CAFFE_FILLER_HPP
+#define CAFFE_FILLER_HPP
+
+#include <string>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/syncedmem.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+/// @brief Fills a Blob with constant or randomly-generated data.
+template <typename Dtype>
+class Filler {
+ public:
+  explicit Filler(const FillerParameter& param) : filler_param_(param) {}
+  virtual ~Filler() {}
+  virtual void Fill(Blob<Dtype>* blob) = 0;
+ protected:
+  FillerParameter filler_param_;
+};  // class Filler
+
+
+/// @brief Fills a Blob with constant values @f$ x = 0 @f$.
+template <typename Dtype>
+class ConstantFiller : public Filler<Dtype> {
+ public:
+  explicit ConstantFiller(const FillerParameter& param)
+      : Filler<Dtype>(param) {}
+  virtual void Fill(Blob<Dtype>* blob) {
+    Dtype* data = blob->mutable_cpu_data();
+    const int count = blob->count();
+    const Dtype value = this->filler_param_.value();
+    CHECK(count);
+    for (int i = 0; i < count; ++i) {
+      data[i] = value;
+    }
+    CHECK_EQ(this->filler_param_.sparse(), -1)
+         << "Sparsity not supported by this Filler.";
+  }
+};
+
+/// @brief Fills a Blob with uniformly distributed values @f$ x\sim U(a, b) @f$.
+template <typename Dtype>
+class UniformFiller : public Filler<Dtype> {
+ public:
+  explicit UniformFiller(const FillerParameter& param)
+      : Filler<Dtype>(param) {}
+  virtual void Fill(Blob<Dtype>* blob) {
+    CHECK(blob->count());
+    caffe_rng_uniform<Dtype>(blob->count(), Dtype(this->filler_param_.min()),
+        Dtype(this->filler_param_.max()), blob->mutable_cpu_data());
+    CHECK_EQ(this->filler_param_.sparse(), -1)
+         << "Sparsity not supported by this Filler.";
+  }
+};
+
+/// @brief Fills a Blob with Gaussian-distributed values @f$ x = a @f$.
+template <typename Dtype>
+class GaussianFiller : public Filler<Dtype> {
+ public:
+  explicit GaussianFiller(const FillerParameter& param)
+      : Filler<Dtype>(param) {}
+  virtual void Fill(Blob<Dtype>* blob) {
+    Dtype* data = blob->mutable_cpu_data();
+    CHECK(blob->count());
+    caffe_rng_gaussian<Dtype>(blob->count(), Dtype(this->filler_param_.mean()),
+        Dtype(this->filler_param_.std()), blob->mutable_cpu_data());
+    int sparse = this->filler_param_.sparse();
+    CHECK_GE(sparse, -1);
+    if (sparse >= 0) {
+      // Sparse initialization is implemented for "weight" blobs; i.e. matrices.
+      // These have num == channels == 1; width is number of inputs; height is
+      // number of outputs.  The 'sparse' variable specifies the mean number
+      // of non-zero input weights for a given output.
+      CHECK_GE(blob->num_axes(), 1);
+      const int num_outputs = blob->shape(0);
+      Dtype non_zero_probability = Dtype(sparse) / Dtype(num_outputs);
+      rand_vec_.reset(new SyncedMemory(blob->count() * sizeof(int)));
+      int* mask = reinterpret_cast<int*>(rand_vec_->mutable_cpu_data());
+      caffe_rng_bernoulli(blob->count(), non_zero_probability, mask);
+      for (int i = 0; i < blob->count(); ++i) {
+        data[i] *= mask[i];
+      }
+    }
+  }
+
+ protected:
+  shared_ptr<SyncedMemory> rand_vec_;
+};
+
+/** @brief Fills a Blob with values @f$ x \in [0, 1] @f$
+ *         such that @f$ \forall i \sum_j x_{ij} = 1 @f$.
+ */
+template <typename Dtype>
+class PositiveUnitballFiller : public Filler<Dtype> {
+ public:
+  explicit PositiveUnitballFiller(const FillerParameter& param)
+      : Filler<Dtype>(param) {}
+  virtual void Fill(Blob<Dtype>* blob) {
+    Dtype* data = blob->mutable_cpu_data();
+    DCHECK(blob->count());
+    caffe_rng_uniform<Dtype>(blob->count(), 0, 1, blob->mutable_cpu_data());
+    // We expect the filler to not be called very frequently, so we will
+    // just use a simple implementation
+    int dim = blob->count() / blob->num();
+    CHECK(dim);
+    for (int i = 0; i < blob->num(); ++i) {
+      Dtype sum = 0;
+      for (int j = 0; j < dim; ++j) {
+        sum += data[i * dim + j];
+      }
+      for (int j = 0; j < dim; ++j) {
+        data[i * dim + j] /= sum;
+      }
+    }
+    CHECK_EQ(this->filler_param_.sparse(), -1)
+         << "Sparsity not supported by this Filler.";
+  }
+};
+
+/**
+ * @brief Fills a Blob with values @f$ x \sim U(-a, +a) @f$ where @f$ a @f$ is
+ *        set inversely proportional to number of incoming nodes, outgoing
+ *        nodes, or their average.
+ *
+ * A Filler based on the paper [Bengio and Glorot 2010]: Understanding
+ * the difficulty of training deep feedforward neuralnetworks.
+ *
+ * It fills the incoming matrix by randomly sampling uniform data from [-scale,
+ * scale] where scale = sqrt(3 / n) where n is the fan_in, fan_out, or their
+ * average, depending on the variance_norm option. You should make sure the
+ * input blob has shape (num, a, b, c) where a * b * c = fan_in and num * b * c
+ * = fan_out. Note that this is currently not the case for inner product layers.
+ *
+ * TODO(dox): make notation in above comment consistent with rest & use LaTeX.
+ */
+template <typename Dtype>
+class XavierFiller : public Filler<Dtype> {
+ public:
+  explicit XavierFiller(const FillerParameter& param)
+      : Filler<Dtype>(param) {}
+  virtual void Fill(Blob<Dtype>* blob) {
+    CHECK(blob->count());
+    int fan_in = blob->count() / blob->num();
+    int fan_out = blob->count() / blob->channels();
+    Dtype n = fan_in;  // default to fan_in
+    if (this->filler_param_.variance_norm() ==
+        FillerParameter_VarianceNorm_AVERAGE) {
+      n = (fan_in + fan_out) / Dtype(2);
+    } else if (this->filler_param_.variance_norm() ==
+        FillerParameter_VarianceNorm_FAN_OUT) {
+      n = fan_out;
+    }
+    Dtype scale = sqrt(Dtype(3) / n);
+    caffe_rng_uniform<Dtype>(blob->count(), -scale, scale,
+        blob->mutable_cpu_data());
+    CHECK_EQ(this->filler_param_.sparse(), -1)
+         << "Sparsity not supported by this Filler.";
+  }
+};
+
+/**
+ * @brief Fills a Blob with values @f$ x \sim N(0, \sigma^2) @f$ where
+ *        @f$ \sigma^2 @f$ is set inversely proportional to number of incoming
+ *        nodes, outgoing nodes, or their average.
+ *
+ * A Filler based on the paper [He, Zhang, Ren and Sun 2015]: Specifically
+ * accounts for ReLU nonlinearities.
+ *
+ * Aside: for another perspective on the scaling factor, see the derivation of
+ * [Saxe, McClelland, and Ganguli 2013 (v3)].
+ *
+ * It fills the incoming matrix by randomly sampling Gaussian data with std =
+ * sqrt(2 / n) where n is the fan_in, fan_out, or their average, depending on
+ * the variance_norm option. You should make sure the input blob has shape (num,
+ * a, b, c) where a * b * c = fan_in and num * b * c = fan_out. Note that this
+ * is currently not the case for inner product layers.
+ */
+template <typename Dtype>
+class MSRAFiller : public Filler<Dtype> {
+ public:
+  explicit MSRAFiller(const FillerParameter& param)
+      : Filler<Dtype>(param) {}
+  virtual void Fill(Blob<Dtype>* blob) {
+    CHECK(blob->count());
+    int fan_in = blob->count() / blob->num();
+    int fan_out = blob->count() / blob->channels();
+    Dtype n = fan_in;  // default to fan_in
+    if (this->filler_param_.variance_norm() ==
+        FillerParameter_VarianceNorm_AVERAGE) {
+      n = (fan_in + fan_out) / Dtype(2);
+    } else if (this->filler_param_.variance_norm() ==
+        FillerParameter_VarianceNorm_FAN_OUT) {
+      n = fan_out;
+    }
+    Dtype std = sqrt(Dtype(2) / n);
+    caffe_rng_gaussian<Dtype>(blob->count(), Dtype(0), std,
+        blob->mutable_cpu_data());
+    CHECK_EQ(this->filler_param_.sparse(), -1)
+         << "Sparsity not supported by this Filler.";
+  }
+};
+
+/*!
+ at brief Fills a Blob with coefficients for bilinear interpolation.
+
+A common use case is with the DeconvolutionLayer acting as upsampling.
+You can upsample a feature map with shape of (B, C, H, W) by any integer factor
+using the following proto.
+\code
+layer {
+  name: "upsample", type: "Deconvolution"
+  bottom: "{{bottom_name}}" top: "{{top_name}}"
+  convolution_param {
+    kernel_size: {{2 * factor - factor % 2}} stride: {{factor}}
+    num_output: {{C}} group: {{C}}
+    pad: {{ceil((factor - 1) / 2.)}}
+    weight_filler: { type: "bilinear" } bias_term: false
+  }
+  param { lr_mult: 0 decay_mult: 0 }
+}
+\endcode
+Please use this by replacing `{{}}` with your values. By specifying
+`num_output: {{C}} group: {{C}}`, it behaves as
+channel-wise convolution. The filter shape of this deconvolution layer will be
+(C, 1, K, K) where K is `kernel_size`, and this filler will set a (K, K)
+interpolation kernel for every channel of the filter identically. The resulting
+shape of the top feature map will be (B, C, factor * H, factor * W).
+Note that the learning rate and the
+weight decay are set to 0 in order to keep coefficient values of bilinear
+interpolation unchanged during training. If you apply this to an image, this
+operation is equivalent to the following call in Python with Scikit.Image.
+\code{.py}
+out = skimage.transform.rescale(img, factor, mode='constant', cval=0)
+\endcode
+ */
+template <typename Dtype>
+class BilinearFiller : public Filler<Dtype> {
+ public:
+  explicit BilinearFiller(const FillerParameter& param)
+      : Filler<Dtype>(param) {}
+  virtual void Fill(Blob<Dtype>* blob) {
+    CHECK_EQ(blob->num_axes(), 4) << "Blob must be 4 dim.";
+    CHECK_EQ(blob->width(), blob->height()) << "Filter must be square";
+    Dtype* data = blob->mutable_cpu_data();
+    int f = ceil(blob->width() / 2.);
+    float c = (2 * f - 1 - f % 2) / (2. * f);
+    for (int i = 0; i < blob->count(); ++i) {
+      float x = i % blob->width();
+      float y = (i / blob->width()) % blob->height();
+      data[i] = (1 - fabs(x / f - c)) * (1 - fabs(y / f - c));
+    }
+    CHECK_EQ(this->filler_param_.sparse(), -1)
+         << "Sparsity not supported by this Filler.";
+  }
+};
+
+/**
+ * @brief Get a specific filler from the specification given in FillerParameter.
+ *
+ * Ideally this would be replaced by a factory pattern, but we will leave it
+ * this way for now.
+ */
+template <typename Dtype>
+Filler<Dtype>* GetFiller(const FillerParameter& param) {
+  const std::string& type = param.type();
+  if (type == "constant") {
+    return new ConstantFiller<Dtype>(param);
+  } else if (type == "gaussian") {
+    return new GaussianFiller<Dtype>(param);
+  } else if (type == "positive_unitball") {
+    return new PositiveUnitballFiller<Dtype>(param);
+  } else if (type == "uniform") {
+    return new UniformFiller<Dtype>(param);
+  } else if (type == "xavier") {
+    return new XavierFiller<Dtype>(param);
+  } else if (type == "msra") {
+    return new MSRAFiller<Dtype>(param);
+  } else if (type == "bilinear") {
+    return new BilinearFiller<Dtype>(param);
+  } else {
+    CHECK(false) << "Unknown filler name: " << param.type();
+  }
+  return (Filler<Dtype>*)(NULL);
+}
+
+}  // namespace caffe
+
+#endif  // CAFFE_FILLER_HPP_
diff --git a/include/caffe/internal_thread.hpp b/include/caffe/internal_thread.hpp
new file mode 100644
index 0000000..815ca54
--- /dev/null
+++ b/include/caffe/internal_thread.hpp
@@ -0,0 +1,42 @@
+#ifndef CAFFE_INTERNAL_THREAD_HPP_
+#define CAFFE_INTERNAL_THREAD_HPP_
+
+#include "caffe/common.hpp"
+
+/**
+ Forward declare boost::thread instead of including boost/thread.hpp
+ to avoid a boost/NVCC issues (#1009, #1010) on OSX.
+ */
+namespace boost { class thread; }
+
+namespace caffe {
+
+/**
+ * Virtual class encapsulate boost::thread for use in base class
+ * The child class will acquire the ability to run a single thread,
+ * by reimplementing the virutal function InternalThreadEntry.
+ */
+class InternalThread {
+ public:
+  InternalThread() : thread_() {}
+  virtual ~InternalThread();
+
+  /** Returns true if the thread was successfully started. **/
+  bool StartInternalThread();
+
+  /** Will not return until the internal thread has exited. */
+  bool WaitForInternalThreadToExit();
+
+  bool is_started() const;
+
+ protected:
+  /* Implement this method in your subclass
+      with the code you want your thread to run. */
+  virtual void InternalThreadEntry() {}
+
+  shared_ptr<boost::thread> thread_;
+};
+
+}  // namespace caffe
+
+#endif  // CAFFE_INTERNAL_THREAD_HPP_
diff --git a/include/caffe/layer.hpp b/include/caffe/layer.hpp
new file mode 100644
index 0000000..8f924a7
--- /dev/null
+++ b/include/caffe/layer.hpp
@@ -0,0 +1,470 @@
+#ifndef CAFFE_LAYER_H_
+#define CAFFE_LAYER_H_
+
+#include <algorithm>
+#include <string>
+#include <vector>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/layer_factory.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/device_alternate.hpp"
+
+namespace caffe {
+
+/**
+ * @brief An interface for the units of computation which can be composed into a
+ *        Net.
+ *
+ * Layer%s must implement a Forward function, in which they take their input
+ * (bottom) Blob%s (if any) and compute their output Blob%s (if any).
+ * They may also implement a Backward function, in which they compute the error
+ * gradients with respect to their input Blob%s, given the error gradients with
+ * their output Blob%s.
+ */
+template <typename Dtype>
+class Layer {
+ public:
+  /**
+   * You should not implement your own constructor. Any set up code should go
+   * to SetUp(), where the dimensions of the bottom blobs are provided to the
+   * layer.
+   */
+  explicit Layer(const LayerParameter& param)
+    : layer_param_(param) {
+      // Set phase and copy blobs (if there are any).
+      phase_ = param.phase();
+      if (layer_param_.blobs_size() > 0) {
+        blobs_.resize(layer_param_.blobs_size());
+        for (int i = 0; i < layer_param_.blobs_size(); ++i) {
+          blobs_[i].reset(new Blob<Dtype>());
+          blobs_[i]->FromProto(layer_param_.blobs(i));
+        }
+      }
+    }
+  virtual ~Layer() {}
+
+  /**
+   * @brief Implements common layer setup functionality.
+   *
+   * @param bottom the preshaped input blobs
+   * @param top
+   *     the allocated but unshaped output blobs, to be shaped by Reshape
+   *
+   * Checks that the number of bottom and top blobs is correct.
+   * Calls LayerSetUp to do special layer setup for individual layer types,
+   * followed by Reshape to set up sizes of top blobs and internal buffers.
+   * Sets up the loss weight multiplier blobs for any non-zero loss weights.
+   * This method may not be overridden.
+   */
+  void SetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+    CheckBlobCounts(bottom, top);
+    LayerSetUp(bottom, top);
+    Reshape(bottom, top);
+    SetLossWeights(top);
+  }
+
+  /**
+   * @brief Does layer-specific setup: your layer should implement this function
+   *        as well as Reshape.
+   *
+   * @param bottom
+   *     the preshaped input blobs, whose data fields store the input data for
+   *     this layer
+   * @param top
+   *     the allocated but unshaped output blobs
+   *
+   * This method should do one-time layer specific setup. This includes reading
+   * and processing relevent parameters from the <code>layer_param_</code>.
+   * Setting up the shapes of top blobs and internal buffers should be done in
+   * <code>Reshape</code>, which will be called before the forward pass to
+   * adjust the top blob sizes.
+   */
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {}
+
+  /**
+   * @brief Adjust the shapes of top blobs and internal buffers to accomodate
+   *        the shapes of the bottom blobs.
+   *
+   * @param bottom the input blobs, with the requested input shapes
+   * @param top the top blobs, which should be reshaped as needed
+   *
+   * This method should reshape top blobs as needed according to the shapes
+   * of the bottom (input) blobs, as well as reshaping any internal buffers
+   * and making any other necessary adjustments so that the layer can
+   * accomodate the bottom blobs.
+   */
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) = 0;
+
+  /**
+   * @brief Given the bottom blobs, compute the top blobs and the loss.
+   *
+   * @param bottom
+   *     the input blobs, whose data fields store the input data for this layer
+   * @param top
+   *     the preshaped output blobs, whose data fields will store this layers'
+   *     outputs
+   * \return The total loss from the layer.
+   *
+   * The Forward wrapper calls the relevant device wrapper function
+   * (Forward_cpu or Forward_gpu) to compute the top blob values given the
+   * bottom blobs.  If the layer has any non-zero loss_weights, the wrapper
+   * then computes and returns the loss.
+   *
+   * Your layer should implement Forward_cpu and (optionally) Forward_gpu.
+   */
+  inline Dtype Forward(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Given the top blob error gradients, compute the bottom blob error
+   *        gradients.
+   *
+   * @param top
+   *     the output blobs, whose diff fields store the gradient of the error
+   *     with respect to themselves
+   * @param propagate_down
+   *     a vector with equal length to bottom, with each index indicating
+   *     whether to propagate the error gradients down to the bottom blob at
+   *     the corresponding index
+   * @param bottom
+   *     the input blobs, whose diff fields will store the gradient of the error
+   *     with respect to themselves after Backward is run
+   *
+   * The Backward wrapper calls the relevant device wrapper function
+   * (Backward_cpu or Backward_gpu) to compute the bottom blob diffs given the
+   * top blob diffs.
+   *
+   * Your layer should implement Forward_cpu and (optionally) Forward_gpu.
+   */
+  inline void Backward(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down,
+      const vector<Blob<Dtype>*>& bottom);
+
+  /**
+   * @brief Returns the vector of learnable parameter blobs.
+   */
+  vector<shared_ptr<Blob<Dtype> > >& blobs() {
+    return blobs_;
+  }
+
+  /**
+   * @brief Returns the layer parameter.
+   */
+  const LayerParameter& layer_param() const { return layer_param_; }
+
+  /**
+   * @brief Writes the layer parameter to a protocol buffer
+   */
+  virtual void ToProto(LayerParameter* param, bool write_diff = false);
+
+  /**
+   * @brief Returns the scalar loss associated with a top blob at a given index.
+   */
+  inline Dtype loss(const int top_index) const {
+    return (loss_.size() > top_index) ? loss_[top_index] : Dtype(0);
+  }
+
+  /**
+   * @brief Sets the loss associated with a top blob at a given index.
+   */
+  inline void set_loss(const int top_index, const Dtype value) {
+    if (loss_.size() <= top_index) {
+      loss_.resize(top_index + 1, Dtype(0));
+    }
+    loss_[top_index] = value;
+  }
+
+  /**
+   * @brief Returns the layer type.
+   */
+  virtual inline const char* type() const { return ""; }
+
+  /**
+   * @brief Returns the exact number of bottom blobs required by the layer,
+   *        or -1 if no exact number is required.
+   *
+   * This method should be overridden to return a non-negative value if your
+   * layer expects some exact number of bottom blobs.
+   */
+  virtual inline int ExactNumBottomBlobs() const { return -1; }
+  /**
+   * @brief Returns the minimum number of bottom blobs required by the layer,
+   *        or -1 if no minimum number is required.
+   *
+   * This method should be overridden to return a non-negative value if your
+   * layer expects some minimum number of bottom blobs.
+   */
+  virtual inline int MinBottomBlobs() const { return -1; }
+  /**
+   * @brief Returns the maximum number of bottom blobs required by the layer,
+   *        or -1 if no maximum number is required.
+   *
+   * This method should be overridden to return a non-negative value if your
+   * layer expects some maximum number of bottom blobs.
+   */
+  virtual inline int MaxBottomBlobs() const { return -1; }
+  /**
+   * @brief Returns the exact number of top blobs required by the layer,
+   *        or -1 if no exact number is required.
+   *
+   * This method should be overridden to return a non-negative value if your
+   * layer expects some exact number of top blobs.
+   */
+  virtual inline int ExactNumTopBlobs() const { return -1; }
+  /**
+   * @brief Returns the minimum number of top blobs required by the layer,
+   *        or -1 if no minimum number is required.
+   *
+   * This method should be overridden to return a non-negative value if your
+   * layer expects some minimum number of top blobs.
+   */
+  virtual inline int MinTopBlobs() const { return -1; }
+  /**
+   * @brief Returns the maximum number of top blobs required by the layer,
+   *        or -1 if no maximum number is required.
+   *
+   * This method should be overridden to return a non-negative value if your
+   * layer expects some maximum number of top blobs.
+   */
+  virtual inline int MaxTopBlobs() const { return -1; }
+  /**
+   * @brief Returns true if the layer requires an equal number of bottom and
+   *        top blobs.
+   *
+   * This method should be overridden to return true if your layer expects an
+   * equal number of bottom and top blobs.
+   */
+  virtual inline bool EqualNumBottomTopBlobs() const { return false; }
+
+  /**
+   * @brief Return whether "anonymous" top blobs are created automatically
+   *        by the layer.
+   *
+   * If this method returns true, Net::Init will create enough "anonymous" top
+   * blobs to fulfill the requirement specified by ExactNumTopBlobs() or
+   * MinTopBlobs().
+   */
+  virtual inline bool AutoTopBlobs() const { return false; }
+
+  /**
+   * @brief Return whether to allow force_backward for a given bottom blob
+   *        index.
+   *
+   * If AllowForceBackward(i) == false, we will ignore the force_backward
+   * setting and backpropagate to blob i only if it needs gradient information
+   * (as is done when force_backward == false).
+   */
+  virtual inline bool AllowForceBackward(const int bottom_index) const {
+    return true;
+  }
+
+  /**
+   * @brief Specifies whether the layer should compute gradients w.r.t. a
+   *        parameter at a particular index given by param_id.
+   *
+   * You can safely ignore false values and always compute gradients
+   * for all parameters, but possibly with wasteful computation.
+   */
+  inline bool param_propagate_down(const int param_id) {
+    return (param_propagate_down_.size() > param_id) ?
+        param_propagate_down_[param_id] : false;
+  }
+  /**
+   * @brief Sets whether the layer should compute gradients w.r.t. a
+   *        parameter at a particular index given by param_id.
+   */
+  inline void set_param_propagate_down(const int param_id, const bool value) {
+    if (param_propagate_down_.size() <= param_id) {
+      param_propagate_down_.resize(param_id + 1, true);
+    }
+    param_propagate_down_[param_id] = value;
+  }
+
+
+ protected:
+  /** The protobuf that stores the layer parameters */
+  LayerParameter layer_param_;
+  /** The phase: TRAIN or TEST */
+  Phase phase_;
+  /** The vector that stores the learnable parameters as a set of blobs. */
+  vector<shared_ptr<Blob<Dtype> > > blobs_;
+  /** Vector indicating whether to compute the diff of each param blob. */
+  vector<bool> param_propagate_down_;
+
+  /** The vector that indicates whether each top blob has a non-zero weight in
+   *  the objective function. */
+  vector<Dtype> loss_;
+
+  /** @brief Using the CPU device, compute the layer output. */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) = 0;
+  /**
+   * @brief Using the GPU device, compute the layer output.
+   *        Fall back to Forward_cpu() if unavailable.
+   */
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+    // LOG(WARNING) << "Using CPU code as backup.";
+    return Forward_cpu(bottom, top);
+  }
+
+  /**
+   * @brief Using the CPU device, compute the gradients for any parameters and
+   *        for the bottom blobs if propagate_down is true.
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down,
+      const vector<Blob<Dtype>*>& bottom) = 0;
+  /**
+   * @brief Using the GPU device, compute the gradients for any parameters and
+   *        for the bottom blobs if propagate_down is true.
+   *        Fall back to Backward_cpu() if unavailable.
+   */
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down,
+      const vector<Blob<Dtype>*>& bottom) {
+    // LOG(WARNING) << "Using CPU code as backup.";
+    Backward_cpu(top, propagate_down, bottom);
+  }
+
+  /**
+   * Called by the parent Layer's SetUp to check that the number of bottom
+   * and top Blobs provided as input match the expected numbers specified by
+   * the {ExactNum,Min,Max}{Bottom,Top}Blobs() functions.
+   */
+  virtual void CheckBlobCounts(const vector<Blob<Dtype>*>& bottom,
+                               const vector<Blob<Dtype>*>& top) {
+    if (ExactNumBottomBlobs() >= 0) {
+      CHECK_EQ(ExactNumBottomBlobs(), bottom.size())
+          << type() << " Layer takes " << ExactNumBottomBlobs()
+          << " bottom blob(s) as input.";
+    }
+    if (MinBottomBlobs() >= 0) {
+      CHECK_LE(MinBottomBlobs(), bottom.size())
+          << type() << " Layer takes at least " << MinBottomBlobs()
+          << " bottom blob(s) as input.";
+    }
+    if (MaxBottomBlobs() >= 0) {
+      CHECK_GE(MaxBottomBlobs(), bottom.size())
+          << type() << " Layer takes at most " << MaxBottomBlobs()
+          << " bottom blob(s) as input.";
+    }
+    if (ExactNumTopBlobs() >= 0) {
+      CHECK_EQ(ExactNumTopBlobs(), top.size())
+          << type() << " Layer produces " << ExactNumTopBlobs()
+          << " top blob(s) as output.";
+    }
+    if (MinTopBlobs() >= 0) {
+      CHECK_LE(MinTopBlobs(), top.size())
+          << type() << " Layer produces at least " << MinTopBlobs()
+          << " top blob(s) as output.";
+    }
+    if (MaxTopBlobs() >= 0) {
+      CHECK_GE(MaxTopBlobs(), top.size())
+          << type() << " Layer produces at most " << MaxTopBlobs()
+          << " top blob(s) as output.";
+    }
+    if (EqualNumBottomTopBlobs()) {
+      CHECK_EQ(bottom.size(), top.size())
+          << type() << " Layer produces one top blob as output for each "
+          << "bottom blob input.";
+    }
+  }
+
+  /**
+   * Called by SetUp to initialize the weights associated with any top blobs in
+   * the loss function. Store non-zero loss weights in the diff blob.
+   */
+  inline void SetLossWeights(const vector<Blob<Dtype>*>& top) {
+    const int num_loss_weights = layer_param_.loss_weight_size();
+    if (num_loss_weights) {
+      CHECK_EQ(top.size(), num_loss_weights) << "loss_weight must be "
+          "unspecified or specified once per top blob.";
+      for (int top_id = 0; top_id < top.size(); ++top_id) {
+        const Dtype loss_weight = layer_param_.loss_weight(top_id);
+        if (loss_weight == Dtype(0)) { continue; }
+        this->set_loss(top_id, loss_weight);
+        const int count = top[top_id]->count();
+        Dtype* loss_multiplier = top[top_id]->mutable_cpu_diff();
+        caffe_set(count, loss_weight, loss_multiplier);
+      }
+    }
+  }
+
+  DISABLE_COPY_AND_ASSIGN(Layer);
+};  // class Layer
+
+// Forward and backward wrappers. You should implement the cpu and
+// gpu specific implementations instead, and should not change these
+// functions.
+template <typename Dtype>
+inline Dtype Layer<Dtype>::Forward(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  Dtype loss = 0;
+  Reshape(bottom, top);
+  switch (Caffe::mode()) {
+  case Caffe::CPU:
+    Forward_cpu(bottom, top);
+    for (int top_id = 0; top_id < top.size(); ++top_id) {
+      if (!this->loss(top_id)) { continue; }
+      const int count = top[top_id]->count();
+      const Dtype* data = top[top_id]->cpu_data();
+      const Dtype* loss_weights = top[top_id]->cpu_diff();
+      loss += caffe_cpu_dot(count, data, loss_weights);
+    }
+    break;
+  case Caffe::GPU:
+    Forward_gpu(bottom, top);
+#ifndef CPU_ONLY
+    for (int top_id = 0; top_id < top.size(); ++top_id) {
+      if (!this->loss(top_id)) { continue; }
+      const int count = top[top_id]->count();
+      const Dtype* data = top[top_id]->gpu_data();
+      const Dtype* loss_weights = top[top_id]->gpu_diff();
+      Dtype blob_loss = 0;
+      caffe_gpu_dot(count, data, loss_weights, &blob_loss);
+      loss += blob_loss;
+    }
+#endif
+    break;
+  default:
+    LOG(FATAL) << "Unknown caffe mode.";
+  }
+  return loss;
+}
+
+template <typename Dtype>
+inline void Layer<Dtype>::Backward(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  switch (Caffe::mode()) {
+  case Caffe::CPU:
+    Backward_cpu(top, propagate_down, bottom);
+    break;
+  case Caffe::GPU:
+    Backward_gpu(top, propagate_down, bottom);
+    break;
+  default:
+    LOG(FATAL) << "Unknown caffe mode.";
+  }
+}
+
+// Serialize LayerParameter to protocol buffer
+template <typename Dtype>
+void Layer<Dtype>::ToProto(LayerParameter* param, bool write_diff) {
+  param->Clear();
+  param->CopyFrom(layer_param_);
+  param->clear_blobs();
+  for (int i = 0; i < blobs_.size(); ++i) {
+    blobs_[i]->ToProto(param->add_blobs(), write_diff);
+  }
+}
+
+}  // namespace caffe
+
+#endif  // CAFFE_LAYER_H_
diff --git a/include/caffe/layer_factory.hpp b/include/caffe/layer_factory.hpp
new file mode 100644
index 0000000..2fcd938
--- /dev/null
+++ b/include/caffe/layer_factory.hpp
@@ -0,0 +1,127 @@
+/**
+ * @brief A layer factory that allows one to register layers.
+ * During runtime, registered layers could be called by passing a LayerParameter
+ * protobuffer to the CreateLayer function:
+ *
+ *     LayerRegistry<Dtype>::CreateLayer(param);
+ *
+ * There are two ways to register a layer. Assuming that we have a layer like:
+ *
+ *   template <typename Dtype>
+ *   class MyAwesomeLayer : public Layer<Dtype> {
+ *     // your implementations
+ *   };
+ *
+ * and its type is its C++ class name, but without the "Layer" at the end
+ * ("MyAwesomeLayer" -> "MyAwesome").
+ *
+ * If the layer is going to be created simply by its constructor, in your c++
+ * file, add the following line:
+ *
+ *    REGISTER_LAYER_CLASS(MyAwesome);
+ *
+ * Or, if the layer is going to be created by another creator function, in the
+ * format of:
+ *
+ *    template <typename Dtype>
+ *    Layer<Dtype*> GetMyAwesomeLayer(const LayerParameter& param) {
+ *      // your implementation
+ *    }
+ *
+ * (for example, when your layer has multiple backends, see GetConvolutionLayer
+ * for a use case), then you can register the creator function instead, like
+ *
+ * REGISTER_LAYER_CREATOR(MyAwesome, GetMyAwesomeLayer)
+ *
+ * Note that each layer type should only be registered once.
+ */
+
+#ifndef CAFFE_LAYER_FACTORY_H_
+#define CAFFE_LAYER_FACTORY_H_
+
+#include <map>
+#include <string>
+
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+
+namespace caffe {
+
+template <typename Dtype>
+class Layer;
+
+template <typename Dtype>
+class LayerRegistry {
+ public:
+  typedef shared_ptr<Layer<Dtype> > (*Creator)(const LayerParameter&);
+  typedef std::map<string, Creator> CreatorRegistry;
+
+  static CreatorRegistry& Registry() {
+    static CreatorRegistry* g_registry_ = new CreatorRegistry();
+    return *g_registry_;
+  }
+
+  // Adds a creator.
+  static void AddCreator(const string& type, Creator creator) {
+    CreatorRegistry& registry = Registry();
+    CHECK_EQ(registry.count(type), 0)
+        << "Layer type " << type << " already registered.";
+    registry[type] = creator;
+  }
+
+  // Get a layer using a LayerParameter.
+  static shared_ptr<Layer<Dtype> > CreateLayer(const LayerParameter& param) {
+    LOG(INFO) << "Creating layer " << param.name();
+    const string& type = param.type();
+    CreatorRegistry& registry = Registry();
+    CHECK_EQ(registry.count(type), 1) << "Unknown layer type: " << type
+        << " (known types: " << LayerTypeList() << ")";
+    return registry[type](param);
+  }
+
+ private:
+  // Layer registry should never be instantiated - everything is done with its
+  // static variables.
+  LayerRegistry() {}
+
+  static string LayerTypeList() {
+    CreatorRegistry& registry = Registry();
+    string layer_types;
+    for (typename CreatorRegistry::iterator iter = registry.begin();
+         iter != registry.end(); ++iter) {
+      if (iter != registry.begin()) {
+        layer_types += ", ";
+      }
+      layer_types += iter->first;
+    }
+    return layer_types;
+  }
+};
+
+
+template <typename Dtype>
+class LayerRegisterer {
+ public:
+  LayerRegisterer(const string& type,
+                  shared_ptr<Layer<Dtype> > (*creator)(const LayerParameter&)) {
+    // LOG(INFO) << "Registering layer type: " << type;
+    LayerRegistry<Dtype>::AddCreator(type, creator);
+  }
+};
+
+
+#define REGISTER_LAYER_CREATOR(type, creator)                                  \
+  static LayerRegisterer<float> g_creator_f_##type(#type, creator<float>);     \
+  static LayerRegisterer<double> g_creator_d_##type(#type, creator<double>)    \
+
+#define REGISTER_LAYER_CLASS(type)                                             \
+  template <typename Dtype>                                                    \
+  shared_ptr<Layer<Dtype> > Creator_##type##Layer(const LayerParameter& param) \
+  {                                                                            \
+    return shared_ptr<Layer<Dtype> >(new type##Layer<Dtype>(param));           \
+  }                                                                            \
+  REGISTER_LAYER_CREATOR(type, Creator_##type##Layer)
+
+}  // namespace caffe
+
+#endif  // CAFFE_LAYER_FACTORY_H_
diff --git a/include/caffe/loss_layers.hpp b/include/caffe/loss_layers.hpp
new file mode 100644
index 0000000..86c3424
--- /dev/null
+++ b/include/caffe/loss_layers.hpp
@@ -0,0 +1,768 @@
+#ifndef CAFFE_LOSS_LAYERS_HPP_
+#define CAFFE_LOSS_LAYERS_HPP_
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/neuron_layers.hpp"
+#include "caffe/proto/caffe.pb.h"
+
+namespace caffe {
+
+const float kLOG_THRESHOLD = 1e-20;
+
+/**
+ * @brief Computes the classification accuracy for a one-of-many
+ *        classification task.
+ */
+template <typename Dtype>
+class AccuracyLayer : public Layer<Dtype> {
+ public:
+  /**
+   * @param param provides AccuracyParameter accuracy_param,
+   *     with AccuracyLayer options:
+   *   - top_k (\b optional, default 1).
+   *     Sets the maximum rank @f$ k @f$ at which a prediction is considered
+   *     correct.  For example, if @f$ k = 5 @f$, a prediction is counted
+   *     correct if the correct label is among the top 5 predicted labels.
+   */
+  explicit AccuracyLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Accuracy"; }
+  virtual inline int ExactNumBottomBlobs() const { return 2; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 2)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the predictions @f$ x @f$, a Blob with values in
+   *      @f$ [-\infty, +\infty] @f$ indicating the predicted score for each of
+   *      the @f$ K = CHW @f$ classes. Each @f$ x_n @f$ is mapped to a predicted
+   *      label @f$ \hat{l}_n @f$ given by its maximal index:
+   *      @f$ \hat{l}_n = \arg\max\limits_k x_{nk} @f$
+   *   -# @f$ (N \times 1 \times 1 \times 1) @f$
+   *      the labels @f$ l @f$, an integer-valued Blob with values
+   *      @f$ l_n \in [0, 1, 2, ..., K - 1] @f$
+   *      indicating the correct class label among the @f$ K @f$ classes
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+   *      the computed accuracy: @f$
+   *        \frac{1}{N} \sum\limits_{n=1}^N \delta\{ \hat{l}_n = l_n \}
+   *      @f$, where @f$
+   *      \delta\{\mathrm{condition}\} = \left\{
+   *         \begin{array}{lr}
+   *            1 & \mbox{if condition} \\
+   *            0 & \mbox{otherwise}
+   *         \end{array} \right.
+   *      @f$
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+
+  /// @brief Not implemented -- AccuracyLayer cannot be used as a loss.
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+    for (int i = 0; i < propagate_down.size(); ++i) {
+      if (propagate_down[i]) { NOT_IMPLEMENTED; }
+    }
+  }
+
+  int label_axis_, outer_num_, inner_num_;
+
+  int top_k_;
+
+  /// Whether to ignore instances with a certain label.
+  bool has_ignore_label_;
+  /// The label indicating that an instance should be ignored.
+  int ignore_label_;
+};
+
+/**
+ * @brief An interface for Layer%s that take two Blob%s as input -- usually
+ *        (1) predictions and (2) ground-truth labels -- and output a
+ *        singleton Blob representing the loss.
+ *
+ * LossLayers are typically only capable of backpropagating to their first input
+ * -- the predictions.
+ */
+template <typename Dtype>
+class LossLayer : public Layer<Dtype> {
+ public:
+  explicit LossLayer(const LayerParameter& param)
+     : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(
+      const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(
+      const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top);
+
+  virtual inline int ExactNumBottomBlobs() const { return 2; }
+
+  /**
+   * @brief For convenience and backwards compatibility, instruct the Net to
+   *        automatically allocate a single top Blob for LossLayers, into which
+   *        they output their singleton loss, (even if the user didn't specify
+   *        one in the prototxt, etc.).
+   */
+  virtual inline bool AutoTopBlobs() const { return true; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+  /**
+   * We usually cannot backpropagate to the labels; ignore force_backward for
+   * these inputs.
+   */
+  virtual inline bool AllowForceBackward(const int bottom_index) const {
+    return bottom_index != 1;
+  }
+};
+
+/**
+ * @brief Computes the contrastive loss @f$
+ *          E = \frac{1}{2N} \sum\limits_{n=1}^N \left(y\right) d +
+ *              \left(1-y\right) \max \left(margin-d, 0\right)
+ *          @f$ where @f$
+ *          d = \left| \left| a_n - b_n \right| \right|_2^2 @f$. This can be
+ *          used to train siamese networks.
+ *
+ * @param bottom input Blob vector (length 3)
+ *   -# @f$ (N \times C \times 1 \times 1) @f$
+ *      the features @f$ a \in [-\infty, +\infty]@f$
+ *   -# @f$ (N \times C \times 1 \times 1) @f$
+ *      the features @f$ b \in [-\infty, +\infty]@f$
+ *   -# @f$ (N \times 1 \times 1 \times 1) @f$
+ *      the binary similarity @f$ s \in [0, 1]@f$
+ * @param top output Blob vector (length 1)
+ *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+ *      the computed contrastive loss: @f$ E =
+ *          \frac{1}{2N} \sum\limits_{n=1}^N \left(y\right) d +
+ *          \left(1-y\right) \max \left(margin-d, 0\right)
+ *          @f$ where @f$
+ *          d = \left| \left| a_n - b_n \right| \right|_2^2 @f$.
+ * This can be used to train siamese networks.
+ */
+template <typename Dtype>
+class ContrastiveLossLayer : public LossLayer<Dtype> {
+ public:
+  explicit ContrastiveLossLayer(const LayerParameter& param)
+      : LossLayer<Dtype>(param), diff_() {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline int ExactNumBottomBlobs() const { return 3; }
+  virtual inline const char* type() const { return "ContrastiveLoss"; }
+  /**
+   * Unlike most loss layers, in the ContrastiveLossLayer we can backpropagate
+   * to the first two inputs.
+   */
+  virtual inline bool AllowForceBackward(const int bottom_index) const {
+    return bottom_index != 2;
+  }
+
+ protected:
+  /// @copydoc ContrastiveLossLayer
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the Contrastive error gradient w.r.t. the inputs.
+   *
+   * Computes the gradients with respect to the two input vectors (bottom[0] and
+   * bottom[1]), but not the similarity label (bottom[2]).
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+   *      This Blob's diff will simply contain the loss_weight* @f$ \lambda @f$,
+   *      as @f$ \lambda @f$ is the coefficient of this layer's output
+   *      @f$\ell_i at f$ in the overall Net loss
+   *      @f$ E = \lambda_i \ell_i + \mbox{other loss terms}@f$; hence
+   *      @f$ \frac{\partial E}{\partial \ell_i} = \lambda_i @f$.
+   *      (*Assuming that this top Blob is not used as a bottom (input) by any
+   *      other layer of the Net.)
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length 2)
+   *   -# @f$ (N \times C \times 1 \times 1) @f$
+   *      the features @f$a at f$; Backward fills their diff with
+   *      gradients if propagate_down[0]
+   *   -# @f$ (N \times C \times 1 \times 1) @f$
+   *      the features @f$b at f$; Backward fills their diff with gradients if
+   *      propagate_down[1]
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  Blob<Dtype> diff_;  // cached for backward pass
+  Blob<Dtype> dist_sq_;  // cached for backward pass
+  Blob<Dtype> diff_sq_;  // tmp storage for gpu forward pass
+  Blob<Dtype> summer_vec_;  // tmp storage for gpu forward pass
+};
+
+/**
+ * @brief Computes the Euclidean (L2) loss @f$
+ *          E = \frac{1}{2N} \sum\limits_{n=1}^N \left| \left| \hat{y}_n - y_n
+ *        \right| \right|_2^2 @f$ for real-valued regression tasks.
+ *
+ * @param bottom input Blob vector (length 2)
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the predictions @f$ \hat{y} \in [-\infty, +\infty]@f$
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the targets @f$ y \in [-\infty, +\infty]@f$
+ * @param top output Blob vector (length 1)
+ *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+ *      the computed Euclidean loss: @f$ E =
+ *          \frac{1}{2n} \sum\limits_{n=1}^N \left| \left| \hat{y}_n - y_n
+ *        \right| \right|_2^2 @f$
+ *
+ * This can be used for least-squares regression tasks.  An InnerProductLayer
+ * input to a EuclideanLossLayer exactly formulates a linear least squares
+ * regression problem. With non-zero weight decay the problem becomes one of
+ * ridge regression -- see src/caffe/test/test_sgd_solver.cpp for a concrete
+ * example wherein we check that the gradients computed for a Net with exactly
+ * this structure match hand-computed gradient formulas for ridge regression.
+ *
+ * (Note: Caffe, and SGD in general, is certainly \b not the best way to solve
+ * linear least squares problems! We use it only as an instructive example.)
+ */
+template <typename Dtype>
+class EuclideanLossLayer : public LossLayer<Dtype> {
+ public:
+  explicit EuclideanLossLayer(const LayerParameter& param)
+      : LossLayer<Dtype>(param), diff_() {}
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "EuclideanLoss"; }
+  /**
+   * Unlike most loss layers, in the EuclideanLossLayer we can backpropagate
+   * to both inputs -- override to return true and always allow force_backward.
+   */
+  virtual inline bool AllowForceBackward(const int bottom_index) const {
+    return true;
+  }
+
+ protected:
+  /// @copydoc EuclideanLossLayer
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the Euclidean error gradient w.r.t. the inputs.
+   *
+   * Unlike other children of LossLayer, EuclideanLossLayer \b can compute
+   * gradients with respect to the label inputs bottom[1] (but still only will
+   * if propagate_down[1] is set, due to being produced by learnable parameters
+   * or if force_backward is set). In fact, this layer is "commutative" -- the
+   * result is the same regardless of the order of the two bottoms.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+   *      This Blob's diff will simply contain the loss_weight* @f$ \lambda @f$,
+   *      as @f$ \lambda @f$ is the coefficient of this layer's output
+   *      @f$\ell_i at f$ in the overall Net loss
+   *      @f$ E = \lambda_i \ell_i + \mbox{other loss terms}@f$; hence
+   *      @f$ \frac{\partial E}{\partial \ell_i} = \lambda_i @f$.
+   *      (*Assuming that this top Blob is not used as a bottom (input) by any
+   *      other layer of the Net.)
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length 2)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the predictions @f$\hat{y}@f$; Backward fills their diff with
+   *      gradients @f$
+   *        \frac{\partial E}{\partial \hat{y}} =
+   *            \frac{1}{n} \sum\limits_{n=1}^N (\hat{y}_n - y_n)
+   *      @f$ if propagate_down[0]
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the targets @f$y at f$; Backward fills their diff with gradients
+   *      @f$ \frac{\partial E}{\partial y} =
+   *          \frac{1}{n} \sum\limits_{n=1}^N (y_n - \hat{y}_n)
+   *      @f$ if propagate_down[1]
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  Blob<Dtype> diff_;
+};
+
+/**
+ * @brief Computes the hinge loss for a one-of-many classification task.
+ *
+ * @param bottom input Blob vector (length 2)
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the predictions @f$ t @f$, a Blob with values in
+ *      @f$ [-\infty, +\infty] @f$ indicating the predicted score for each of
+ *      the @f$ K = CHW @f$ classes. In an SVM, @f$ t @f$ is the result of
+ *      taking the inner product @f$ X^T W @f$ of the D-dimensional features
+ *      @f$ X \in \mathcal{R}^{D \times N} @f$ and the learned hyperplane
+ *      parameters @f$ W \in \mathcal{R}^{D \times K} @f$, so a Net with just
+ *      an InnerProductLayer (with num_output = D) providing predictions to a
+ *      HingeLossLayer and no other learnable parameters or losses is
+ *      equivalent to an SVM.
+ *   -# @f$ (N \times 1 \times 1 \times 1) @f$
+ *      the labels @f$ l @f$, an integer-valued Blob with values
+ *      @f$ l_n \in [0, 1, 2, ..., K - 1] @f$
+ *      indicating the correct class label among the @f$ K @f$ classes
+ * @param top output Blob vector (length 1)
+ *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+ *      the computed hinge loss: @f$ E =
+ *        \frac{1}{N} \sum\limits_{n=1}^N \sum\limits_{k=1}^K
+ *        [\max(0, 1 - \delta\{l_n = k\} t_{nk})] ^ p
+ *      @f$, for the @f$ L^p @f$ norm
+ *      (defaults to @f$ p = 1 @f$, the L1 norm; L2 norm, as in L2-SVM,
+ *      is also available), and @f$
+ *      \delta\{\mathrm{condition}\} = \left\{
+ *         \begin{array}{lr}
+ *            1 & \mbox{if condition} \\
+ *           -1 & \mbox{otherwise}
+ *         \end{array} \right.
+ *      @f$
+ *
+ * In an SVM, @f$ t \in \mathcal{R}^{N \times K} @f$ is the result of taking
+ * the inner product @f$ X^T W @f$ of the features
+ * @f$ X \in \mathcal{R}^{D \times N} @f$
+ * and the learned hyperplane parameters
+ * @f$ W \in \mathcal{R}^{D \times K} @f$. So, a Net with just an
+ * InnerProductLayer (with num_output = @f$k at f$) providing predictions to a
+ * HingeLossLayer is equivalent to an SVM (assuming it has no other learned
+ * outside the InnerProductLayer and no other losses outside the
+ * HingeLossLayer).
+ */
+template <typename Dtype>
+class HingeLossLayer : public LossLayer<Dtype> {
+ public:
+  explicit HingeLossLayer(const LayerParameter& param)
+      : LossLayer<Dtype>(param) {}
+
+  virtual inline const char* type() const { return "HingeLoss"; }
+
+ protected:
+  /// @copydoc HingeLossLayer
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the hinge loss error gradient w.r.t. the predictions.
+   *
+   * Gradients cannot be computed with respect to the label inputs (bottom[1]),
+   * so this method ignores bottom[1] and requires !propagate_down[1], crashing
+   * if propagate_down[1] is set.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+   *      This Blob's diff will simply contain the loss_weight* @f$ \lambda @f$,
+   *      as @f$ \lambda @f$ is the coefficient of this layer's output
+   *      @f$\ell_i at f$ in the overall Net loss
+   *      @f$ E = \lambda_i \ell_i + \mbox{other loss terms}@f$; hence
+   *      @f$ \frac{\partial E}{\partial \ell_i} = \lambda_i @f$.
+   *      (*Assuming that this top Blob is not used as a bottom (input) by any
+   *      other layer of the Net.)
+   * @param propagate_down see Layer::Backward.
+   *      propagate_down[1] must be false as we can't compute gradients with
+   *      respect to the labels.
+   * @param bottom input Blob vector (length 2)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the predictions @f$t at f$; Backward computes diff
+   *      @f$ \frac{\partial E}{\partial t} @f$
+   *   -# @f$ (N \times 1 \times 1 \times 1) @f$
+   *      the labels -- ignored as we can't compute their error gradients
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+};
+
+/**
+ * @brief A generalization of MultinomialLogisticLossLayer that takes an
+ *        "information gain" (infogain) matrix specifying the "value" of all label
+ *        pairs.
+ *
+ * Equivalent to the MultinomialLogisticLossLayer if the infogain matrix is the
+ * identity.
+ *
+ * @param bottom input Blob vector (length 2-3)
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the predictions @f$ \hat{p} @f$, a Blob with values in
+ *      @f$ [0, 1] @f$ indicating the predicted probability of each of the
+ *      @f$ K = CHW @f$ classes.  Each prediction vector @f$ \hat{p}_n @f$
+ *      should sum to 1 as in a probability distribution: @f$
+ *      \forall n \sum\limits_{k=1}^K \hat{p}_{nk} = 1 @f$.
+ *   -# @f$ (N \times 1 \times 1 \times 1) @f$
+ *      the labels @f$ l @f$, an integer-valued Blob with values
+ *      @f$ l_n \in [0, 1, 2, ..., K - 1] @f$
+ *      indicating the correct class label among the @f$ K @f$ classes
+ *   -# @f$ (1 \times 1 \times K \times K) @f$
+ *      (\b optional) the infogain matrix @f$ H @f$.  This must be provided as
+ *      the third bottom blob input if not provided as the infogain_mat in the
+ *      InfogainLossParameter. If @f$ H = I @f$, this layer is equivalent to the
+ *      MultinomialLogisticLossLayer.
+ * @param top output Blob vector (length 1)
+ *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+ *      the computed infogain multinomial logistic loss: @f$ E =
+ *        \frac{-1}{N} \sum\limits_{n=1}^N H_{l_n} \log(\hat{p}_n) =
+ *        \frac{-1}{N} \sum\limits_{n=1}^N \sum\limits_{k=1}^{K} H_{l_n,k}
+ *        \log(\hat{p}_{n,k})
+ *      @f$, where @f$ H_{l_n} @f$ denotes row @f$l_n at f$ of @f$H at f$.
+ */
+template <typename Dtype>
+class InfogainLossLayer : public LossLayer<Dtype> {
+ public:
+  explicit InfogainLossLayer(const LayerParameter& param)
+      : LossLayer<Dtype>(param), infogain_() {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  // InfogainLossLayer takes 2-3 bottom Blobs; if there are 3 the third should
+  // be the infogain matrix.  (Otherwise the infogain matrix is loaded from a
+  // file specified by LayerParameter.)
+  virtual inline int ExactNumBottomBlobs() const { return -1; }
+  virtual inline int MinBottomBlobs() const { return 2; }
+  virtual inline int MaxBottomBlobs() const { return 3; }
+
+  virtual inline const char* type() const { return "InfogainLoss"; }
+
+ protected:
+  /// @copydoc InfogainLossLayer
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the infogain loss error gradient w.r.t. the predictions.
+   *
+   * Gradients cannot be computed with respect to the label inputs (bottom[1]),
+   * so this method ignores bottom[1] and requires !propagate_down[1], crashing
+   * if propagate_down[1] is set. (The same applies to the infogain matrix, if
+   * provided as bottom[2] rather than in the layer_param.)
+   *
+   * @param top output Blob vector (length 1), providing the error gradient
+   *      with respect to the outputs
+   *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+   *      This Blob's diff will simply contain the loss_weight* @f$ \lambda @f$,
+   *      as @f$ \lambda @f$ is the coefficient of this layer's output
+   *      @f$\ell_i at f$ in the overall Net loss
+   *      @f$ E = \lambda_i \ell_i + \mbox{other loss terms}@f$; hence
+   *      @f$ \frac{\partial E}{\partial \ell_i} = \lambda_i @f$.
+   *      (*Assuming that this top Blob is not used as a bottom (input) by any
+   *      other layer of the Net.)
+   * @param propagate_down see Layer::Backward.
+   *      propagate_down[1] must be false as we can't compute gradients with
+   *      respect to the labels (similarly for propagate_down[2] and the
+   *      infogain matrix, if provided as bottom[2])
+   * @param bottom input Blob vector (length 2-3)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the predictions @f$ \hat{p} @f$; Backward computes diff
+   *      @f$ \frac{\partial E}{\partial \hat{p}} @f$
+   *   -# @f$ (N \times 1 \times 1 \times 1) @f$
+   *      the labels -- ignored as we can't compute their error gradients
+   *   -# @f$ (1 \times 1 \times K \times K) @f$
+   *      (\b optional) the information gain matrix -- ignored as its error
+   *      gradient computation is not implemented.
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  Blob<Dtype> infogain_;
+};
+
+/**
+ * @brief Computes the multinomial logistic loss for a one-of-many
+ *        classification task, directly taking a predicted probability
+ *        distribution as input.
+ *
+ * When predictions are not already a probability distribution, you should
+ * instead use the SoftmaxWithLossLayer, which maps predictions to a
+ * distribution using the SoftmaxLayer, before computing the multinomial
+ * logistic loss. The SoftmaxWithLossLayer should be preferred over separate
+ * SoftmaxLayer + MultinomialLogisticLossLayer
+ * as its gradient computation is more numerically stable.
+ *
+ * @param bottom input Blob vector (length 2)
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the predictions @f$ \hat{p} @f$, a Blob with values in
+ *      @f$ [0, 1] @f$ indicating the predicted probability of each of the
+ *      @f$ K = CHW @f$ classes.  Each prediction vector @f$ \hat{p}_n @f$
+ *      should sum to 1 as in a probability distribution: @f$
+ *      \forall n \sum\limits_{k=1}^K \hat{p}_{nk} = 1 @f$.
+ *   -# @f$ (N \times 1 \times 1 \times 1) @f$
+ *      the labels @f$ l @f$, an integer-valued Blob with values
+ *      @f$ l_n \in [0, 1, 2, ..., K - 1] @f$
+ *      indicating the correct class label among the @f$ K @f$ classes
+ * @param top output Blob vector (length 1)
+ *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+ *      the computed multinomial logistic loss: @f$ E =
+ *        \frac{-1}{N} \sum\limits_{n=1}^N \log(\hat{p}_{n,l_n})
+ *      @f$
+ */
+template <typename Dtype>
+class MultinomialLogisticLossLayer : public LossLayer<Dtype> {
+ public:
+  explicit MultinomialLogisticLossLayer(const LayerParameter& param)
+      : LossLayer<Dtype>(param) {}
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "MultinomialLogisticLoss"; }
+
+ protected:
+  /// @copydoc MultinomialLogisticLossLayer
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the multinomial logistic loss error gradient w.r.t. the
+   *        predictions.
+   *
+   * Gradients cannot be computed with respect to the label inputs (bottom[1]),
+   * so this method ignores bottom[1] and requires !propagate_down[1], crashing
+   * if propagate_down[1] is set.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+   *      This Blob's diff will simply contain the loss_weight* @f$ \lambda @f$,
+   *      as @f$ \lambda @f$ is the coefficient of this layer's output
+   *      @f$\ell_i at f$ in the overall Net loss
+   *      @f$ E = \lambda_i \ell_i + \mbox{other loss terms}@f$; hence
+   *      @f$ \frac{\partial E}{\partial \ell_i} = \lambda_i @f$.
+   *      (*Assuming that this top Blob is not used as a bottom (input) by any
+   *      other layer of the Net.)
+   * @param propagate_down see Layer::Backward.
+   *      propagate_down[1] must be false as we can't compute gradients with
+   *      respect to the labels.
+   * @param bottom input Blob vector (length 2)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the predictions @f$ \hat{p} @f$; Backward computes diff
+   *      @f$ \frac{\partial E}{\partial \hat{p}} @f$
+   *   -# @f$ (N \times 1 \times 1 \times 1) @f$
+   *      the labels -- ignored as we can't compute their error gradients
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+};
+
+/**
+ * @brief Computes the cross-entropy (logistic) loss @f$
+ *          E = \frac{-1}{n} \sum\limits_{n=1}^N \left[
+ *                  p_n \log \hat{p}_n +
+ *                  (1 - p_n) \log(1 - \hat{p}_n)
+ *              \right]
+ *        @f$, often used for predicting targets interpreted as probabilities.
+ *
+ * This layer is implemented rather than separate
+ * SigmoidLayer + CrossEntropyLayer
+ * as its gradient computation is more numerically stable.
+ * At test time, this layer can be replaced simply by a SigmoidLayer.
+ *
+ * @param bottom input Blob vector (length 2)
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the scores @f$ x \in [-\infty, +\infty]@f$,
+ *      which this layer maps to probability predictions
+ *      @f$ \hat{p}_n = \sigma(x_n) \in [0, 1] @f$
+ *      using the sigmoid function @f$ \sigma(.) @f$ (see SigmoidLayer).
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the targets @f$ y \in [0, 1] @f$
+ * @param top output Blob vector (length 1)
+ *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+ *      the computed cross-entropy loss: @f$
+ *          E = \frac{-1}{n} \sum\limits_{n=1}^N \left[
+ *                  p_n \log \hat{p}_n + (1 - p_n) \log(1 - \hat{p}_n)
+ *              \right]
+ *      @f$
+ */
+template <typename Dtype>
+class SigmoidCrossEntropyLossLayer : public LossLayer<Dtype> {
+ public:
+  explicit SigmoidCrossEntropyLossLayer(const LayerParameter& param)
+      : LossLayer<Dtype>(param),
+          sigmoid_layer_(new SigmoidLayer<Dtype>(param)),
+          sigmoid_output_(new Blob<Dtype>()) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "SigmoidCrossEntropyLoss"; }
+
+ protected:
+  /// @copydoc SigmoidCrossEntropyLossLayer
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the sigmoid cross-entropy loss error gradient w.r.t. the
+   *        predictions.
+   *
+   * Gradients cannot be computed with respect to the target inputs (bottom[1]),
+   * so this method ignores bottom[1] and requires !propagate_down[1], crashing
+   * if propagate_down[1] is set.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+   *      This Blob's diff will simply contain the loss_weight* @f$ \lambda @f$,
+   *      as @f$ \lambda @f$ is the coefficient of this layer's output
+   *      @f$\ell_i at f$ in the overall Net loss
+   *      @f$ E = \lambda_i \ell_i + \mbox{other loss terms}@f$; hence
+   *      @f$ \frac{\partial E}{\partial \ell_i} = \lambda_i @f$.
+   *      (*Assuming that this top Blob is not used as a bottom (input) by any
+   *      other layer of the Net.)
+   * @param propagate_down see Layer::Backward.
+   *      propagate_down[1] must be false as gradient computation with respect
+   *      to the targets is not implemented.
+   * @param bottom input Blob vector (length 2)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the predictions @f$x at f$; Backward computes diff
+   *      @f$ \frac{\partial E}{\partial x} =
+   *          \frac{1}{n} \sum\limits_{n=1}^N (\hat{p}_n - p_n)
+   *      @f$
+   *   -# @f$ (N \times 1 \times 1 \times 1) @f$
+   *      the labels -- ignored as we can't compute their error gradients
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  /// The internal SigmoidLayer used to map predictions to probabilities.
+  shared_ptr<SigmoidLayer<Dtype> > sigmoid_layer_;
+  /// sigmoid_output stores the output of the SigmoidLayer.
+  shared_ptr<Blob<Dtype> > sigmoid_output_;
+  /// bottom vector holder to call the underlying SigmoidLayer::Forward
+  vector<Blob<Dtype>*> sigmoid_bottom_vec_;
+  /// top vector holder to call the underlying SigmoidLayer::Forward
+  vector<Blob<Dtype>*> sigmoid_top_vec_;
+};
+
+// Forward declare SoftmaxLayer for use in SoftmaxWithLossLayer.
+template <typename Dtype> class SoftmaxLayer;
+
+/**
+ * @brief Computes the multinomial logistic loss for a one-of-many
+ *        classification task, passing real-valued predictions through a
+ *        softmax to get a probability distribution over classes.
+ *
+ * This layer should be preferred over separate
+ * SoftmaxLayer + MultinomialLogisticLossLayer
+ * as its gradient computation is more numerically stable.
+ * At test time, this layer can be replaced simply by a SoftmaxLayer.
+ *
+ * @param bottom input Blob vector (length 2)
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the predictions @f$ x @f$, a Blob with values in
+ *      @f$ [-\infty, +\infty] @f$ indicating the predicted score for each of
+ *      the @f$ K = CHW @f$ classes. This layer maps these scores to a
+ *      probability distribution over classes using the softmax function
+ *      @f$ \hat{p}_{nk} = \exp(x_{nk}) /
+ *      \left[\sum_{k'} \exp(x_{nk'})\right] @f$ (see SoftmaxLayer).
+ *   -# @f$ (N \times 1 \times 1 \times 1) @f$
+ *      the labels @f$ l @f$, an integer-valued Blob with values
+ *      @f$ l_n \in [0, 1, 2, ..., K - 1] @f$
+ *      indicating the correct class label among the @f$ K @f$ classes
+ * @param top output Blob vector (length 1)
+ *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+ *      the computed cross-entropy classification loss: @f$ E =
+ *        \frac{-1}{N} \sum\limits_{n=1}^N \log(\hat{p}_{n,l_n})
+ *      @f$, for softmax output class probabilites @f$ \hat{p} @f$
+ */
+template <typename Dtype>
+class SoftmaxWithLossLayer : public LossLayer<Dtype> {
+ public:
+   /**
+    * @param param provides LossParameter loss_param, with options:
+    *  - ignore_label (optional)
+    *    Specify a label value that should be ignored when computing the loss.
+    *  - normalize (optional, default true)
+    *    If true, the loss is normalized by the number of (nonignored) labels
+    *    present; otherwise the loss is simply summed over spatial locations.
+    */
+  explicit SoftmaxWithLossLayer(const LayerParameter& param)
+      : LossLayer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "SoftmaxWithLoss"; }
+  virtual inline int ExactNumTopBlobs() const { return -1; }
+  virtual inline int MinTopBlobs() const { return 1; }
+  virtual inline int MaxTopBlobs() const { return 2; }
+
+ protected:
+  /// @copydoc SoftmaxWithLossLayer
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  /**
+   * @brief Computes the softmax loss error gradient w.r.t. the predictions.
+   *
+   * Gradients cannot be computed with respect to the label inputs (bottom[1]),
+   * so this method ignores bottom[1] and requires !propagate_down[1], crashing
+   * if propagate_down[1] is set.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (1 \times 1 \times 1 \times 1) @f$
+   *      This Blob's diff will simply contain the loss_weight* @f$ \lambda @f$,
+   *      as @f$ \lambda @f$ is the coefficient of this layer's output
+   *      @f$\ell_i at f$ in the overall Net loss
+   *      @f$ E = \lambda_i \ell_i + \mbox{other loss terms}@f$; hence
+   *      @f$ \frac{\partial E}{\partial \ell_i} = \lambda_i @f$.
+   *      (*Assuming that this top Blob is not used as a bottom (input) by any
+   *      other layer of the Net.)
+   * @param propagate_down see Layer::Backward.
+   *      propagate_down[1] must be false as we can't compute gradients with
+   *      respect to the labels.
+   * @param bottom input Blob vector (length 2)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the predictions @f$ x @f$; Backward computes diff
+   *      @f$ \frac{\partial E}{\partial x} @f$
+   *   -# @f$ (N \times 1 \times 1 \times 1) @f$
+   *      the labels -- ignored as we can't compute their error gradients
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+
+  /// The internal SoftmaxLayer used to map predictions to a distribution.
+  shared_ptr<Layer<Dtype> > softmax_layer_;
+  /// prob stores the output probability predictions from the SoftmaxLayer.
+  Blob<Dtype> prob_;
+  /// bottom vector holder used in call to the underlying SoftmaxLayer::Forward
+  vector<Blob<Dtype>*> softmax_bottom_vec_;
+  /// top vector holder used in call to the underlying SoftmaxLayer::Forward
+  vector<Blob<Dtype>*> softmax_top_vec_;
+  /// Whether to ignore instances with a certain label.
+  bool has_ignore_label_;
+  /// The label indicating that an instance should be ignored.
+  int ignore_label_;
+  /// Whether to normalize the loss by the total number of values present
+  /// (otherwise just by the batch size).
+  bool normalize_;
+
+  int softmax_axis_, outer_num_, inner_num_;
+};
+
+}  // namespace caffe
+
+#endif  // CAFFE_LOSS_LAYERS_HPP_
diff --git a/include/caffe/net.hpp b/include/caffe/net.hpp
new file mode 100644
index 0000000..5665df1
--- /dev/null
+++ b/include/caffe/net.hpp
@@ -0,0 +1,268 @@
+#ifndef CAFFE_NET_HPP_
+#define CAFFE_NET_HPP_
+
+#include <map>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/proto/caffe.pb.h"
+
+namespace caffe {
+
+/**
+ * @brief Connects Layer%s together into a directed acyclic graph (DAG)
+ *        specified by a NetParameter.
+ *
+ * TODO(dox): more thorough description.
+ */
+template <typename Dtype>
+class Net {
+ public:
+  explicit Net(const NetParameter& param);
+  explicit Net(const string& param_file, Phase phase);
+  virtual ~Net() {}
+
+  /// @brief Initialize a network with a NetParameter.
+  void Init(const NetParameter& param);
+
+  /**
+   * @brief Run Forward with the input Blob%s already fed separately.
+   *
+   * You can get the input blobs using input_blobs().
+   */
+  const vector<Blob<Dtype>*>& ForwardPrefilled(Dtype* loss = NULL);
+
+  /**
+   * The From and To variants of Forward and Backward operate on the
+   * (topological) ordering by which the net is specified. For general DAG
+   * networks, note that (1) computing from one layer to another might entail
+   * extra computation on unrelated branches, and (2) computation starting in
+   * the middle may be incorrect if all of the layers of a fan-in are not
+   * included.
+   */
+  Dtype ForwardFromTo(int start, int end);
+  Dtype ForwardFrom(int start);
+  Dtype ForwardTo(int end);
+  /// @brief Run forward using a set of bottom blobs, and return the result.
+  const vector<Blob<Dtype>*>& Forward(const vector<Blob<Dtype>* > & bottom,
+      Dtype* loss = NULL);
+  /**
+   * @brief Run forward using a serialized BlobProtoVector and return the
+   *        result as a serialized BlobProtoVector
+   */
+  string Forward(const string& input_blob_protos, Dtype* loss = NULL);
+
+  /**
+   * The network backward should take no input and output, since it solely
+   * computes the gradient w.r.t the parameters, and the data has already been
+   * provided during the forward pass.
+   */
+  void Backward();
+  void BackwardFromTo(int start, int end);
+  void BackwardFrom(int start);
+  void BackwardTo(int end);
+
+  /**
+   * @brief Reshape all layers from bottom to top.
+   *
+   * This is useful to propagate changes to layer sizes without running
+   * a forward pass, e.g. to compute output feature size.
+   */
+  void Reshape();
+
+  Dtype ForwardBackward(const vector<Blob<Dtype>* > & bottom) {
+    Dtype loss;
+    Forward(bottom, &loss);
+    Backward();
+    return loss;
+  }
+
+  /// @brief Updates the network weights based on the diff values computed.
+  void Update();
+
+  /**
+   * @brief For an already initialized net, implicitly copies (i.e., using no
+   *        additional memory) the pre-trained layers from another Net.
+   */
+  void ShareTrainedLayersWith(const Net* other);
+  // For an already initialized net, CopyTrainedLayersFrom() copies the already
+  // trained layers from another net parameter instance.
+  /**
+   * @brief For an already initialized net, copies the pre-trained layers from
+   *        another Net.
+   */
+  void CopyTrainedLayersFrom(const NetParameter& param);
+  void CopyTrainedLayersFrom(const string trained_filename);
+  /// @brief Writes the net to a proto.
+  void ToProto(NetParameter* param, bool write_diff = false) const;
+
+  /// @brief returns the network name.
+  inline const string& name() const { return name_; }
+  /// @brief returns the layer names
+  inline const vector<string>& layer_names() const { return layer_names_; }
+  /// @brief returns the blob names
+  inline const vector<string>& blob_names() const { return blob_names_; }
+  /// @brief returns the blobs
+  inline const vector<shared_ptr<Blob<Dtype> > >& blobs() const {
+    return blobs_;
+  }
+  /// @brief returns the layers
+  inline const vector<shared_ptr<Layer<Dtype> > >& layers() const {
+    return layers_;
+  }
+  /// @brief returns the phase: TRAIN or TEST
+  inline Phase phase() const { return phase_; }
+  /**
+   * @brief returns the bottom vecs for each layer -- usually you won't
+   *        need this unless you do per-layer checks such as gradients.
+   */
+  inline const vector<vector<Blob<Dtype>*> >& bottom_vecs() const {
+    return bottom_vecs_;
+  }
+  /**
+   * @brief returns the top vecs for each layer -- usually you won't
+   *        need this unless you do per-layer checks such as gradients.
+   */
+  inline const vector<vector<Blob<Dtype>*> >& top_vecs() const {
+    return top_vecs_;
+  }
+  inline const vector<vector<bool> >& bottom_need_backward() const {
+    return bottom_need_backward_;
+  }
+  inline const vector<Dtype>& blob_loss_weights() const {
+    return blob_loss_weights_;
+  }
+  inline const vector<bool>& layer_need_backward() const {
+    return layer_need_backward_;
+  }
+  /// @brief returns the parameters
+  inline const vector<shared_ptr<Blob<Dtype> > >& params() const {
+    return params_;
+  }
+  /// @brief returns the parameter learning rate multipliers
+  inline const vector<float>& params_lr() const { return params_lr_; }
+  inline const vector<float>& params_weight_decay() const {
+    return params_weight_decay_;
+  }
+  const map<string, int>& param_names_index() const {
+    return param_names_index_;
+  }
+  inline const vector<int>& param_owners() const { return param_owners_; }
+  /// @brief Input and output blob numbers
+  inline int num_inputs() const { return net_input_blobs_.size(); }
+  inline int num_outputs() const { return net_output_blobs_.size(); }
+  inline const vector<Blob<Dtype>*>& input_blobs() const {
+    return net_input_blobs_;
+  }
+  inline const vector<Blob<Dtype>*>& output_blobs() const {
+    return net_output_blobs_;
+  }
+  inline const vector<int>& input_blob_indices() const {
+    return net_input_blob_indices_;
+  }
+  inline const vector<int>& output_blob_indices() const {
+    return net_output_blob_indices_;
+  }
+  bool has_blob(const string& blob_name) const;
+  const shared_ptr<Blob<Dtype> > blob_by_name(const string& blob_name) const;
+  bool has_layer(const string& layer_name) const;
+  const shared_ptr<Layer<Dtype> > layer_by_name(const string& layer_name) const;
+
+  void set_debug_info(const bool value) { debug_info_ = value; }
+
+  // Helpers for Init.
+  /**
+   * @brief Remove layers that the user specified should be excluded given the current
+   *        phase, level, and stage.
+   */
+  static void FilterNet(const NetParameter& param,
+      NetParameter* param_filtered);
+  /// @brief return whether NetState state meets NetStateRule rule
+  static bool StateMeetsRule(const NetState& state, const NetStateRule& rule,
+      const string& layer_name);
+
+ protected:
+  // Helpers for Init.
+  /// @brief Append a new input or top blob to the net.
+  void AppendTop(const NetParameter& param, const int layer_id,
+                 const int top_id, set<string>* available_blobs,
+                 map<string, int>* blob_name_to_idx);
+  /// @brief Append a new bottom blob to the net.
+  int AppendBottom(const NetParameter& param, const int layer_id,
+                   const int bottom_id, set<string>* available_blobs,
+                   map<string, int>* blob_name_to_idx);
+  /// @brief Append a new parameter blob to the net.
+  void AppendParam(const NetParameter& param, const int layer_id,
+                   const int param_id);
+
+  /// @brief Helper for displaying debug info in Forward about input Blobs.
+  void InputDebugInfo(const int layer_id);
+  /// @brief Helper for displaying debug info in Forward.
+  void ForwardDebugInfo(const int layer_id);
+  /// @brief Helper for displaying debug info in Backward.
+  void BackwardDebugInfo(const int layer_id);
+  /// @brief Helper for displaying debug info in Update.
+  void UpdateDebugInfo(const int param_id);
+
+  /// @brief Get misc parameters, e.g. the LR multiplier and weight decay.
+  void GetLearningRateAndWeightDecay();
+
+  /// @brief The network name
+  string name_;
+  /// @brief The phase: TRAIN or TEST
+  Phase phase_;
+  /// @brief Individual layers in the net
+  vector<shared_ptr<Layer<Dtype> > > layers_;
+  vector<string> layer_names_;
+  map<string, int> layer_names_index_;
+  vector<bool> layer_need_backward_;
+  /// @brief the blobs storing intermediate results between the layer.
+  vector<shared_ptr<Blob<Dtype> > > blobs_;
+  vector<string> blob_names_;
+  map<string, int> blob_names_index_;
+  vector<bool> blob_need_backward_;
+  /// bottom_vecs stores the vectors containing the input for each layer.
+  /// They don't actually host the blobs (blobs_ does), so we simply store
+  /// pointers.
+  vector<vector<Blob<Dtype>*> > bottom_vecs_;
+  vector<vector<int> > bottom_id_vecs_;
+  vector<vector<bool> > bottom_need_backward_;
+  /// top_vecs stores the vectors containing the output for each layer
+  vector<vector<Blob<Dtype>*> > top_vecs_;
+  vector<vector<int> > top_id_vecs_;
+  /// Vector of weight in the loss (or objective) function of each net blob,
+  /// indexed by blob_id.
+  vector<Dtype> blob_loss_weights_;
+  vector<vector<int> > param_id_vecs_;
+  vector<int> param_owners_;
+  vector<string> param_display_names_;
+  vector<pair<int, int> > param_layer_indices_;
+  map<string, int> param_names_index_;
+  /// blob indices for the input and the output of the net
+  vector<int> net_input_blob_indices_;
+  vector<int> net_output_blob_indices_;
+  vector<Blob<Dtype>*> net_input_blobs_;
+  vector<Blob<Dtype>*> net_output_blobs_;
+  /// The parameters in the network.
+  vector<shared_ptr<Blob<Dtype> > > params_;
+  /// the learning rate multipliers
+  vector<float> params_lr_;
+  /// the weight decay multipliers
+  vector<float> params_weight_decay_;
+  /// The bytes of memory used by this net
+  size_t memory_used_;
+  /// Whether to compute and display debug info for the net.
+  bool debug_info_;
+
+  DISABLE_COPY_AND_ASSIGN(Net);
+};
+
+
+}  // namespace caffe
+
+#endif  // CAFFE_NET_HPP_
diff --git a/include/caffe/neuron_layers.hpp b/include/caffe/neuron_layers.hpp
new file mode 100644
index 0000000..c2e0774
--- /dev/null
+++ b/include/caffe/neuron_layers.hpp
@@ -0,0 +1,809 @@
+#ifndef CAFFE_NEURON_LAYERS_HPP_
+#define CAFFE_NEURON_LAYERS_HPP_
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/proto/caffe.pb.h"
+
+#define HDF5_DATA_DATASET_NAME "data"
+#define HDF5_DATA_LABEL_NAME "label"
+
+namespace caffe {
+
+/**
+ * @brief An interface for layers that take one blob as input (@f$ x @f$)
+ *        and produce one equally-sized blob as output (@f$ y @f$), where
+ *        each element of the output depends only on the corresponding input
+ *        element.
+ */
+template <typename Dtype>
+class NeuronLayer : public Layer<Dtype> {
+ public:
+  explicit NeuronLayer(const LayerParameter& param)
+     : Layer<Dtype>(param) {}
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+};
+
+/**
+ * @brief Computes @f$ y = |x| @f$
+ *
+ * @param bottom input Blob vector (length 1)
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the inputs @f$ x @f$
+ * @param top output Blob vector (length 1)
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the computed outputs @f$ y = |x| @f$
+ */
+template <typename Dtype>
+class AbsValLayer : public NeuronLayer<Dtype> {
+ public:
+  explicit AbsValLayer(const LayerParameter& param)
+      : NeuronLayer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "AbsVal"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  /// @copydoc AbsValLayer
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the error gradient w.r.t. the absolute value inputs.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      containing error gradients @f$ \frac{\partial E}{\partial y} @f$
+   *      with respect to computed outputs @f$ y @f$
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length 2)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$; Backward fills their diff with
+   *      gradients @f$
+   *        \frac{\partial E}{\partial x} =
+   *            \mathrm{sign}(x) \frac{\partial E}{\partial y}
+   *      @f$ if propagate_down[0]
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+};
+
+/**
+ * @brief Computes @f$ y = x + \log(1 + \exp(-x)) @f$ if @f$ x > 0 @f$;
+ *        @f$ y = \log(1 + \exp(x)) @f$ otherwise.
+ *
+ * @param bottom input Blob vector (length 1)
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the inputs @f$ x @f$
+ * @param top output Blob vector (length 1)
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the computed outputs @f$
+ *      y = \left\{
+ *         \begin{array}{ll}
+ *            x + \log(1 + \exp(-x)) & \mbox{if } x > 0 \\
+ *            \log(1 + \exp(x)) & \mbox{otherwise}
+ *         \end{array} \right.
+ *      @f$
+ */
+template <typename Dtype>
+class BNLLLayer : public NeuronLayer<Dtype> {
+ public:
+  explicit BNLLLayer(const LayerParameter& param)
+      : NeuronLayer<Dtype>(param) {}
+
+  virtual inline const char* type() const { return "BNLL"; }
+
+ protected:
+  /// @copydoc BNLLLayer
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the error gradient w.r.t. the BNLL inputs.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      containing error gradients @f$ \frac{\partial E}{\partial y} @f$
+   *      with respect to computed outputs @f$ y @f$
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length 2)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$; Backward fills their diff with
+   *      gradients @f$
+   *        \frac{\partial E}{\partial x}
+   *      @f$ if propagate_down[0]
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+};
+
+/**
+ * @brief During training only, sets a random portion of @f$x at f$ to 0, adjusting
+ *        the rest of the vector magnitude accordingly.
+ *
+ * @param bottom input Blob vector (length 1)
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the inputs @f$ x @f$
+ * @param top output Blob vector (length 1)
+ *   -# @f$ (N \times C \times H \times W) @f$
+ *      the computed outputs @f$ y = |x| @f$
+ */
+template <typename Dtype>
+class DropoutLayer : public NeuronLayer<Dtype> {
+ public:
+  /**
+   * @param param provides DropoutParameter dropout_param,
+   *     with DropoutLayer options:
+   *   - dropout_ratio (\b optional, default 0.5).
+   *     Sets the probability @f$ p @f$ that any given unit is dropped.
+   */
+  explicit DropoutLayer(const LayerParameter& param)
+      : NeuronLayer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Dropout"; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the computed outputs. At training time, we have @f$
+   *      y_{\mbox{train}} = \left\{
+   *         \begin{array}{ll}
+   *            \frac{x}{1 - p} & \mbox{if } u > p \\
+   *            0 & \mbox{otherwise}
+   *         \end{array} \right.
+   *      @f$, where @f$ u \sim U(0, 1)@f$ is generated independently for each
+   *      input at each iteration. At test time, we simply have
+   *      @f$ y_{\mbox{test}} = \mathbb{E}[y_{\mbox{train}}] = x @f$.
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  /// when divided by UINT_MAX, the randomly generated values @f$u\sim U(0,1)@f$
+  Blob<unsigned int> rand_vec_;
+  /// the probability @f$ p @f$ of dropping any input
+  Dtype threshold_;
+  /// the scale for undropped inputs at train time @f$ 1 / (1 - p) @f$
+  Dtype scale_;
+  unsigned int uint_thres_;
+};
+
+/**
+ * @brief Computes @f$ y = \gamma ^ {\alpha x + \beta} @f$,
+ *        as specified by the scale @f$ \alpha @f$, shift @f$ \beta @f$,
+ *        and base @f$ \gamma @f$.
+ */
+template <typename Dtype>
+class ExpLayer : public NeuronLayer<Dtype> {
+ public:
+  /**
+   * @param param provides ExpParameter exp_param,
+   *     with ExpLayer options:
+   *   - scale (\b optional, default 1) the scale @f$ \alpha @f$
+   *   - shift (\b optional, default 0) the shift @f$ \beta @f$
+   *   - base (\b optional, default -1 for a value of @f$ e \approx 2.718 @f$)
+   *         the base @f$ \gamma @f$
+   */
+  explicit ExpLayer(const LayerParameter& param)
+      : NeuronLayer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Exp"; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the computed outputs @f$
+   *        y = \gamma ^ {\alpha x + \beta}
+   *      @f$
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the error gradient w.r.t. the exp inputs.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      containing error gradients @f$ \frac{\partial E}{\partial y} @f$
+   *      with respect to computed outputs @f$ y @f$
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$; Backward fills their diff with
+   *      gradients @f$
+   *        \frac{\partial E}{\partial x} =
+   *            \frac{\partial E}{\partial y} y \alpha \log_e(gamma)
+   *      @f$ if propagate_down[0]
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  Dtype inner_scale_, outer_scale_;
+};
+
+/**
+ * @brief Computes @f$ y = log_{\gamma}(\alpha x + \beta) @f$,
+ *        as specified by the scale @f$ \alpha @f$, shift @f$ \beta @f$,
+ *        and base @f$ \gamma @f$.
+ */
+template <typename Dtype>
+class LogLayer : public NeuronLayer<Dtype> {
+ public:
+  /**
+   * @param param provides LogParameter log_param,
+   *     with LogLayer options:
+   *   - scale (\b optional, default 1) the scale @f$ \alpha @f$
+   *   - shift (\b optional, default 0) the shift @f$ \beta @f$
+   *   - base (\b optional, default -1 for a value of @f$ e \approx 2.718 @f$)
+   *         the base @f$ \gamma @f$
+   */
+  explicit LogLayer(const LayerParameter& param)
+      : NeuronLayer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Log"; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the computed outputs @f$
+   *        y = log_{\gamma}(\alpha x + \beta)
+   *      @f$
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the error gradient w.r.t. the exp inputs.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      containing error gradients @f$ \frac{\partial E}{\partial y} @f$
+   *      with respect to computed outputs @f$ y @f$
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$; Backward fills their diff with
+   *      gradients @f$
+   *        \frac{\partial E}{\partial x} =
+   *            \frac{\partial E}{\partial y} y \alpha \log_e(gamma)
+   *      @f$ if propagate_down[0]
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  Dtype base_scale_;
+  Dtype input_scale_, input_shift_;
+  Dtype backward_num_scale_;
+};
+
+/**
+ * @brief Computes @f$ y = (\alpha x + \beta) ^ \gamma @f$,
+ *        as specified by the scale @f$ \alpha @f$, shift @f$ \beta @f$,
+ *        and power @f$ \gamma @f$.
+ */
+template <typename Dtype>
+class PowerLayer : public NeuronLayer<Dtype> {
+ public:
+  /**
+   * @param param provides PowerParameter power_param,
+   *     with PowerLayer options:
+   *   - scale (\b optional, default 1) the scale @f$ \alpha @f$
+   *   - shift (\b optional, default 0) the shift @f$ \beta @f$
+   *   - power (\b optional, default 1) the power @f$ \gamma @f$
+   */
+  explicit PowerLayer(const LayerParameter& param)
+      : NeuronLayer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Power"; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the computed outputs @f$
+   *        y = (\alpha x + \beta) ^ \gamma
+   *      @f$
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the error gradient w.r.t. the power inputs.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      containing error gradients @f$ \frac{\partial E}{\partial y} @f$
+   *      with respect to computed outputs @f$ y @f$
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$; Backward fills their diff with
+   *      gradients @f$
+   *        \frac{\partial E}{\partial x} =
+   *            \frac{\partial E}{\partial y}
+   *            \alpha \gamma (\alpha x + \beta) ^ {\gamma - 1} =
+   *            \frac{\partial E}{\partial y}
+   *            \frac{\alpha \gamma y}{\alpha x + \beta}
+   *      @f$ if propagate_down[0]
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  /// @brief @f$ \gamma @f$ from layer_param_.power_param()
+  Dtype power_;
+  /// @brief @f$ \alpha @f$ from layer_param_.power_param()
+  Dtype scale_;
+  /// @brief @f$ \beta @f$ from layer_param_.power_param()
+  Dtype shift_;
+  /// @brief Result of @f$ \alpha \gamma @f$
+  Dtype diff_scale_;
+};
+
+/**
+ * @brief Rectified Linear Unit non-linearity @f$ y = \max(0, x) @f$.
+ *        The simple max is fast to compute, and the function does not saturate.
+ */
+template <typename Dtype>
+class ReLULayer : public NeuronLayer<Dtype> {
+ public:
+  /**
+   * @param param provides ReLUParameter relu_param,
+   *     with ReLULayer options:
+   *   - negative_slope (\b optional, default 0).
+   *     the value @f$ \nu @f$ by which negative values are multiplied.
+   */
+  explicit ReLULayer(const LayerParameter& param)
+      : NeuronLayer<Dtype>(param) {}
+
+  virtual inline const char* type() const { return "ReLU"; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the computed outputs @f$
+   *        y = \max(0, x)
+   *      @f$ by default.  If a non-zero negative_slope @f$ \nu @f$ is provided,
+   *      the computed outputs are @f$ y = \max(0, x) + \nu \min(0, x) @f$.
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the error gradient w.r.t. the ReLU inputs.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      containing error gradients @f$ \frac{\partial E}{\partial y} @f$
+   *      with respect to computed outputs @f$ y @f$
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$; Backward fills their diff with
+   *      gradients @f$
+   *        \frac{\partial E}{\partial x} = \left\{
+   *        \begin{array}{lr}
+   *            0 & \mathrm{if} \; x \le 0 \\
+   *            \frac{\partial E}{\partial y} & \mathrm{if} \; x > 0
+   *        \end{array} \right.
+   *      @f$ if propagate_down[0], by default.
+   *      If a non-zero negative_slope @f$ \nu @f$ is provided,
+   *      the computed gradients are @f$
+   *        \frac{\partial E}{\partial x} = \left\{
+   *        \begin{array}{lr}
+   *            \nu \frac{\partial E}{\partial y} & \mathrm{if} \; x \le 0 \\
+   *            \frac{\partial E}{\partial y} & \mathrm{if} \; x > 0
+   *        \end{array} \right.
+   *      @f$.
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+};
+
+#ifdef USE_CUDNN
+/**
+ * @brief CuDNN acceleration of ReLULayer.
+ */
+template <typename Dtype>
+class CuDNNReLULayer : public ReLULayer<Dtype> {
+ public:
+  explicit CuDNNReLULayer(const LayerParameter& param)
+      : ReLULayer<Dtype>(param), handles_setup_(false) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual ~CuDNNReLULayer();
+
+ protected:
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  bool handles_setup_;
+  cudnnHandle_t             handle_;
+  cudnnTensorDescriptor_t bottom_desc_;
+  cudnnTensorDescriptor_t top_desc_;
+};
+#endif
+
+/**
+ * @brief Sigmoid function non-linearity @f$
+ *         y = (1 + \exp(-x))^{-1}
+ *     @f$, a classic choice in neural networks.
+ *
+ * Note that the gradient vanishes as the values move away from 0.
+ * The ReLULayer is often a better choice for this reason.
+ */
+template <typename Dtype>
+class SigmoidLayer : public NeuronLayer<Dtype> {
+ public:
+  explicit SigmoidLayer(const LayerParameter& param)
+      : NeuronLayer<Dtype>(param) {}
+
+  virtual inline const char* type() const { return "Sigmoid"; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the computed outputs @f$
+   *        y = (1 + \exp(-x))^{-1}
+   *      @f$
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the error gradient w.r.t. the sigmoid inputs.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      containing error gradients @f$ \frac{\partial E}{\partial y} @f$
+   *      with respect to computed outputs @f$ y @f$
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$; Backward fills their diff with
+   *      gradients @f$
+   *        \frac{\partial E}{\partial x}
+   *            = \frac{\partial E}{\partial y} y (1 - y)
+   *      @f$ if propagate_down[0]
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+};
+
+#ifdef USE_CUDNN
+/**
+ * @brief CuDNN acceleration of SigmoidLayer.
+ */
+template <typename Dtype>
+class CuDNNSigmoidLayer : public SigmoidLayer<Dtype> {
+ public:
+  explicit CuDNNSigmoidLayer(const LayerParameter& param)
+      : SigmoidLayer<Dtype>(param), handles_setup_(false) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual ~CuDNNSigmoidLayer();
+
+ protected:
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  bool handles_setup_;
+  cudnnHandle_t             handle_;
+  cudnnTensorDescriptor_t bottom_desc_;
+  cudnnTensorDescriptor_t top_desc_;
+};
+#endif
+
+/**
+ * @brief TanH hyperbolic tangent non-linearity @f$
+ *         y = \frac{\exp(2x) - 1}{\exp(2x) + 1}
+ *     @f$, popular in auto-encoders.
+ *
+ * Note that the gradient vanishes as the values move away from 0.
+ * The ReLULayer is often a better choice for this reason.
+ */
+template <typename Dtype>
+class TanHLayer : public NeuronLayer<Dtype> {
+ public:
+  explicit TanHLayer(const LayerParameter& param)
+      : NeuronLayer<Dtype>(param) {}
+
+  virtual inline const char* type() const { return "TanH"; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the computed outputs @f$
+   *        y = \frac{\exp(2x) - 1}{\exp(2x) + 1}
+   *      @f$
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the error gradient w.r.t. the sigmoid inputs.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      containing error gradients @f$ \frac{\partial E}{\partial y} @f$
+   *      with respect to computed outputs @f$ y @f$
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$; Backward fills their diff with
+   *      gradients @f$
+   *        \frac{\partial E}{\partial x}
+   *            = \frac{\partial E}{\partial y}
+   *              \left(1 - \left[\frac{\exp(2x) - 1}{exp(2x) + 1} \right]^2 \right)
+   *            = \frac{\partial E}{\partial y} (1 - y^2)
+   *      @f$ if propagate_down[0]
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+};
+
+#ifdef USE_CUDNN
+/**
+ * @brief CuDNN acceleration of TanHLayer.
+ */
+template <typename Dtype>
+class CuDNNTanHLayer : public TanHLayer<Dtype> {
+ public:
+  explicit CuDNNTanHLayer(const LayerParameter& param)
+      : TanHLayer<Dtype>(param), handles_setup_(false) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual ~CuDNNTanHLayer();
+
+ protected:
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  bool handles_setup_;
+  cudnnHandle_t             handle_;
+  cudnnTensorDescriptor_t bottom_desc_;
+  cudnnTensorDescriptor_t top_desc_;
+};
+#endif
+
+/**
+ * @brief Tests whether the input exceeds a threshold: outputs 1 for inputs
+ *        above threshold; 0 otherwise.
+ */
+template <typename Dtype>
+class ThresholdLayer : public NeuronLayer<Dtype> {
+ public:
+  /**
+   * @param param provides ThresholdParameter threshold_param,
+   *     with ThresholdLayer options:
+   *   - threshold (\b optional, default 0).
+   *     the threshold value @f$ t @f$ to which the input values are compared.
+   */
+  explicit ThresholdLayer(const LayerParameter& param)
+      : NeuronLayer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Threshold"; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the inputs @f$ x @f$
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (N \times C \times H \times W) @f$
+   *      the computed outputs @f$
+   *       y = \left\{
+   *       \begin{array}{lr}
+   *         0 & \mathrm{if} \; x \le t \\
+   *         1 & \mathrm{if} \; x > t
+   *       \end{array} \right.
+   *      @f$
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  /// @brief Not implemented (non-differentiable function)
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+    NOT_IMPLEMENTED;
+  }
+
+  Dtype threshold_;
+};
+
+/**
+ * @brief Parameterized Rectified Linear Unit non-linearity @f$
+ *        y_i = \max(0, x_i) + a_i \min(0, x_i)
+ *        @f$. The differences from ReLULayer are 1) negative slopes are
+ *        learnable though backprop and 2) negative slopes can vary across
+ *        channels. The number of axes of input blob should be greater than or
+ *        equal to 2. The 1st axis (0-based) is seen as channels.
+ */
+template <typename Dtype>
+class PReLULayer : public NeuronLayer<Dtype> {
+ public:
+  /**
+   * @param param provides PReLUParameter prelu_param,
+   *     with PReLULayer options:
+   *   - filler (\b optional, FillerParameter,
+   *     default {'type': constant 'value':0.25}).
+   *   - channel_shared (\b optional, default false).
+   *     negative slopes are shared across channels.
+   */
+  explicit PReLULayer(const LayerParameter& param)
+      : NeuronLayer<Dtype>(param) {}
+
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "PReLU"; }
+
+ protected:
+  /**
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times ...) @f$
+   *      the inputs @f$ x @f$
+   * @param top output Blob vector (length 1)
+   *   -# @f$ (N \times C \times ...) @f$
+   *      the computed outputs for each channel @f$i at f$ @f$
+   *        y_i = \max(0, x_i) + a_i \min(0, x_i)
+   *      @f$.
+   */
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  /**
+   * @brief Computes the error gradient w.r.t. the PReLU inputs.
+   *
+   * @param top output Blob vector (length 1), providing the error gradient with
+   *      respect to the outputs
+   *   -# @f$ (N \times C \times ...) @f$
+   *      containing error gradients @f$ \frac{\partial E}{\partial y} @f$
+   *      with respect to computed outputs @f$ y @f$
+   * @param propagate_down see Layer::Backward.
+   * @param bottom input Blob vector (length 1)
+   *   -# @f$ (N \times C \times ...) @f$
+   *      the inputs @f$ x @f$; For each channel @f$i at f$, backward fills their
+   *      diff with gradients @f$
+   *        \frac{\partial E}{\partial x_i} = \left\{
+   *        \begin{array}{lr}
+   *            a_i \frac{\partial E}{\partial y_i} & \mathrm{if} \; x_i \le 0 \\
+   *            \frac{\partial E}{\partial y_i} & \mathrm{if} \; x_i > 0
+   *        \end{array} \right.
+   *      @f$.
+   *      If param_propagate_down_[0] is true, it fills the diff with gradients
+   *      @f$
+   *        \frac{\partial E}{\partial a_i} = \left\{
+   *        \begin{array}{lr}
+   *            \sum_{x_i} x_i \frac{\partial E}{\partial y_i} & \mathrm{if} \; x_i \le 0 \\
+   *            0 & \mathrm{if} \; x_i > 0
+   *        \end{array} \right.
+   *      @f$.
+   */
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  bool channel_shared_;
+  Blob<Dtype> multiplier_;  // dot multiplier for backward computation of params
+  Blob<Dtype> backward_buff_;  // temporary buffer for backward computation
+  Blob<Dtype> bottom_memory_;  // memory for in-place computation
+};
+
+}  // namespace caffe
+
+#endif  // CAFFE_NEURON_LAYERS_HPP_
diff --git a/include/caffe/python_layer.hpp b/include/caffe/python_layer.hpp
new file mode 100644
index 0000000..19cf18c
--- /dev/null
+++ b/include/caffe/python_layer.hpp
@@ -0,0 +1,67 @@
+#ifndef CAFFE_PYTHON_LAYER_HPP_
+#define CAFFE_PYTHON_LAYER_HPP_
+
+#include <boost/python.hpp>
+#include <vector>
+
+#include "caffe/layer.hpp"
+
+namespace bp = boost::python;
+
+namespace caffe {
+
+template <typename Dtype>
+class PythonLayer : public Layer<Dtype> {
+ public:
+  PythonLayer(PyObject* self, const LayerParameter& param)
+      : Layer<Dtype>(param), self_(bp::handle<>(bp::borrowed(self))) { }
+
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+    try {
+      self_.attr("setup")(bottom, top);
+    } catch (bp::error_already_set) {
+      PyErr_Print();
+      throw;
+    }
+  }
+
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+    try {
+      self_.attr("reshape")(bottom, top);
+    } catch (bp::error_already_set) {
+      PyErr_Print();
+      throw;
+    }
+  }
+
+  virtual inline const char* type() const { return "Python"; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+    try {
+      self_.attr("forward")(bottom, top);
+    } catch (bp::error_already_set) {
+      PyErr_Print();
+      throw;
+    }
+  }
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+    try {
+      self_.attr("backward")(top, propagate_down, bottom);
+    } catch (bp::error_already_set) {
+      PyErr_Print();
+      throw;
+    }
+  }
+
+ private:
+  bp::object self_;
+};
+
+}  // namespace caffe
+
+#endif
diff --git a/include/caffe/solver.hpp b/include/caffe/solver.hpp
new file mode 100644
index 0000000..c2ced48
--- /dev/null
+++ b/include/caffe/solver.hpp
@@ -0,0 +1,150 @@
+#ifndef CAFFE_OPTIMIZATION_SOLVER_HPP_
+#define CAFFE_OPTIMIZATION_SOLVER_HPP_
+
+#include <string>
+#include <vector>
+
+#include "caffe/net.hpp"
+
+namespace caffe {
+
+/**
+ * @brief An interface for classes that perform optimization on Net%s.
+ *
+ * Requires implementation of ApplyUpdate to compute a parameter update
+ * given the current state of the Net parameters.
+ */
+template <typename Dtype>
+class Solver {
+ public:
+  explicit Solver(const SolverParameter& param);
+  explicit Solver(const string& param_file);
+  void Init(const SolverParameter& param);
+  void InitTrainNet();
+  void InitTestNets();
+  // The main entry of the solver function. In default, iter will be zero. Pass
+  // in a non-zero iter number to resume training for a pre-trained net.
+  virtual void Solve(const char* resume_file = NULL);
+  inline void Solve(const string resume_file) { Solve(resume_file.c_str()); }
+  void Step(int iters);
+  // The Restore function implements how one should restore the solver to a
+  // previously snapshotted state. You should implement the RestoreSolverState()
+  // function that restores the state from a SolverState protocol buffer.
+  void Restore(const char* resume_file);
+  virtual ~Solver() {}
+  inline shared_ptr<Net<Dtype> > net() { return net_; }
+  inline const vector<shared_ptr<Net<Dtype> > >& test_nets() {
+    return test_nets_;
+  }
+  int iter() { return iter_; }
+
+ protected:
+  // Make and apply the update value for the current iteration.
+  virtual void ApplyUpdate() = 0;
+  // The Solver::Snapshot function implements the basic snapshotting utility
+  // that stores the learned net. You should implement the SnapshotSolverState()
+  // function that produces a SolverState protocol buffer that needs to be
+  // written to disk together with the learned net.
+  void Snapshot();
+  // The test routine
+  void TestAll();
+  void Test(const int test_net_id = 0);
+  virtual void SnapshotSolverState(SolverState* state) = 0;
+  virtual void RestoreSolverState(const SolverState& state) = 0;
+  void DisplayOutputBlobs(const int net_id);
+
+  SolverParameter param_;
+  int iter_;
+  int current_step_;
+  shared_ptr<Net<Dtype> > net_;
+  vector<shared_ptr<Net<Dtype> > > test_nets_;
+
+  DISABLE_COPY_AND_ASSIGN(Solver);
+};
+
+
+/**
+ * @brief Optimizes the parameters of a Net using
+ *        stochastic gradient descent (SGD) with momentum.
+ */
+template <typename Dtype>
+class SGDSolver : public Solver<Dtype> {
+ public:
+  explicit SGDSolver(const SolverParameter& param)
+      : Solver<Dtype>(param) { PreSolve(); }
+  explicit SGDSolver(const string& param_file)
+      : Solver<Dtype>(param_file) { PreSolve(); }
+
+  const vector<shared_ptr<Blob<Dtype> > >& history() { return history_; }
+
+ protected:
+  void PreSolve();
+  Dtype GetLearningRate();
+  virtual void ApplyUpdate();
+  virtual void Normalize(int param_id);
+  virtual void Regularize(int param_id);
+  virtual void ComputeUpdateValue(int param_id, Dtype rate);
+  virtual void ClipGradients();
+  virtual void SnapshotSolverState(SolverState * state);
+  virtual void RestoreSolverState(const SolverState& state);
+  // history maintains the historical momentum data.
+  // update maintains update related data and is not needed in snapshots.
+  // temp maintains other information that might be needed in computation
+  //   of gradients/updates and is not needed in snapshots
+  vector<shared_ptr<Blob<Dtype> > > history_, update_, temp_;
+
+  DISABLE_COPY_AND_ASSIGN(SGDSolver);
+};
+
+template <typename Dtype>
+class NesterovSolver : public SGDSolver<Dtype> {
+ public:
+  explicit NesterovSolver(const SolverParameter& param)
+      : SGDSolver<Dtype>(param) {}
+  explicit NesterovSolver(const string& param_file)
+      : SGDSolver<Dtype>(param_file) {}
+
+ protected:
+  virtual void ComputeUpdateValue(int param_id, Dtype rate);
+
+  DISABLE_COPY_AND_ASSIGN(NesterovSolver);
+};
+
+template <typename Dtype>
+class AdaGradSolver : public SGDSolver<Dtype> {
+ public:
+  explicit AdaGradSolver(const SolverParameter& param)
+      : SGDSolver<Dtype>(param) { constructor_sanity_check(); }
+  explicit AdaGradSolver(const string& param_file)
+      : SGDSolver<Dtype>(param_file) { constructor_sanity_check(); }
+
+ protected:
+  virtual void ComputeUpdateValue(int param_id, Dtype rate);
+  void constructor_sanity_check() {
+    CHECK_EQ(0, this->param_.momentum())
+        << "Momentum cannot be used with AdaGrad.";
+  }
+
+  DISABLE_COPY_AND_ASSIGN(AdaGradSolver);
+};
+
+template <typename Dtype>
+Solver<Dtype>* GetSolver(const SolverParameter& param) {
+  SolverParameter_SolverType type = param.solver_type();
+
+  switch (type) {
+  case SolverParameter_SolverType_SGD:
+      return new SGDSolver<Dtype>(param);
+  case SolverParameter_SolverType_NESTEROV:
+      return new NesterovSolver<Dtype>(param);
+  case SolverParameter_SolverType_ADAGRAD:
+      return new AdaGradSolver<Dtype>(param);
+  default:
+      LOG(FATAL) << "Unknown SolverType: " << type;
+  }
+  return (Solver<Dtype>*) NULL;
+}
+
+}  // namespace caffe
+
+#endif  // CAFFE_OPTIMIZATION_SOLVER_HPP_
diff --git a/include/caffe/syncedmem.hpp b/include/caffe/syncedmem.hpp
new file mode 100644
index 0000000..1b726de
--- /dev/null
+++ b/include/caffe/syncedmem.hpp
@@ -0,0 +1,73 @@
+#ifndef CAFFE_SYNCEDMEM_HPP_
+#define CAFFE_SYNCEDMEM_HPP_
+
+#include <cstdlib>
+
+#include "caffe/common.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+// Theoretically, CaffeMallocHost and CaffeFreeHost should simply call the
+// cudaMallocHost and cudaFree functions in order to create pinned memory.
+// However, those codes rely on the existence of a cuda GPU (I don't know
+// why that is a must since allocating memory should not be accessing the
+// GPU resource, but it just creates an error as of Cuda 5.0) and will cause
+// problem when running on a machine without GPU. Thus, we simply define
+// these two functions for safety and possible future change if the problem
+// of calling cuda functions disappears in a future version.
+//
+// In practice, although we are creating unpinned memory here, as long as we
+// are constantly accessing them the memory pages almost always stays in
+// the physical memory (assuming we have large enough memory installed), and
+// does not seem to create a memory bottleneck here.
+
+inline void CaffeMallocHost(void** ptr, size_t size) {
+  *ptr = malloc(size);
+  CHECK(*ptr) << "host allocation of size " << size << " failed";
+}
+
+inline void CaffeFreeHost(void* ptr) {
+  free(ptr);
+}
+
+
+/**
+ * @brief Manages memory allocation and synchronization between the host (CPU)
+ *        and device (GPU).
+ *
+ * TODO(dox): more thorough description.
+ */
+class SyncedMemory {
+ public:
+  SyncedMemory()
+      : cpu_ptr_(NULL), gpu_ptr_(NULL), size_(0), head_(UNINITIALIZED),
+        own_cpu_data_(false) {}
+  explicit SyncedMemory(size_t size)
+      : cpu_ptr_(NULL), gpu_ptr_(NULL), size_(size), head_(UNINITIALIZED),
+        own_cpu_data_(false) {}
+  ~SyncedMemory();
+  const void* cpu_data();
+  void set_cpu_data(void* data);
+  const void* gpu_data();
+  void* mutable_cpu_data();
+  void* mutable_gpu_data();
+  enum SyncedHead { UNINITIALIZED, HEAD_AT_CPU, HEAD_AT_GPU, SYNCED };
+  SyncedHead head() { return head_; }
+  size_t size() { return size_; }
+
+ private:
+  void to_cpu();
+  void to_gpu();
+  void* cpu_ptr_;
+  void* gpu_ptr_;
+  size_t size_;
+  SyncedHead head_;
+  bool own_cpu_data_;
+
+  DISABLE_COPY_AND_ASSIGN(SyncedMemory);
+};  // class SyncedMemory
+
+}  // namespace caffe
+
+#endif  // CAFFE_SYNCEDMEM_HPP_
diff --git a/include/caffe/test/test_caffe_main.hpp b/include/caffe/test/test_caffe_main.hpp
new file mode 100644
index 0000000..fc15609
--- /dev/null
+++ b/include/caffe/test/test_caffe_main.hpp
@@ -0,0 +1,78 @@
+// The main caffe test code. Your test cpp code should include this hpp
+// to allow a main function to be compiled into the binary.
+#ifndef CAFFE_TEST_TEST_CAFFE_MAIN_HPP_
+#define CAFFE_TEST_TEST_CAFFE_MAIN_HPP_
+
+#include <glog/logging.h>
+#include <gtest/gtest.h>
+
+#include <cstdio>
+#include <cstdlib>
+
+#include "caffe/common.hpp"
+
+using std::cout;
+using std::endl;
+
+#ifdef CMAKE_BUILD
+  #include "caffe_config.h"
+#else
+  #define CUDA_TEST_DEVICE -1
+  #define CMAKE_SOURCE_DIR "src/"
+  #define EXAMPLES_SOURCE_DIR "examples/"
+  #define CMAKE_EXT ""
+#endif
+
+int main(int argc, char** argv);
+
+namespace caffe {
+
+template <typename TypeParam>
+class MultiDeviceTest : public ::testing::Test {
+ public:
+  typedef typename TypeParam::Dtype Dtype;
+ protected:
+  MultiDeviceTest() {
+    Caffe::set_mode(TypeParam::device);
+  }
+  virtual ~MultiDeviceTest() {}
+};
+
+typedef ::testing::Types<float, double> TestDtypes;
+
+template <typename TypeParam>
+struct CPUDevice {
+  typedef TypeParam Dtype;
+  static const Caffe::Brew device = Caffe::CPU;
+};
+
+template <typename Dtype>
+class CPUDeviceTest : public MultiDeviceTest<CPUDevice<Dtype> > {
+};
+
+#ifdef CPU_ONLY
+
+typedef ::testing::Types<CPUDevice<float>,
+                         CPUDevice<double> > TestDtypesAndDevices;
+
+#else
+
+template <typename TypeParam>
+struct GPUDevice {
+  typedef TypeParam Dtype;
+  static const Caffe::Brew device = Caffe::GPU;
+};
+
+template <typename Dtype>
+class GPUDeviceTest : public MultiDeviceTest<GPUDevice<Dtype> > {
+};
+
+typedef ::testing::Types<CPUDevice<float>, CPUDevice<double>,
+                         GPUDevice<float>, GPUDevice<double> >
+                         TestDtypesAndDevices;
+
+#endif
+
+}  // namespace caffe
+
+#endif  // CAFFE_TEST_TEST_CAFFE_MAIN_HPP_
diff --git a/include/caffe/test/test_gradient_check_util.hpp b/include/caffe/test/test_gradient_check_util.hpp
new file mode 100644
index 0000000..cc5dcba
--- /dev/null
+++ b/include/caffe/test/test_gradient_check_util.hpp
@@ -0,0 +1,260 @@
+#ifndef CAFFE_TEST_GRADIENT_CHECK_UTIL_H_
+#define CAFFE_TEST_GRADIENT_CHECK_UTIL_H_
+
+#include <glog/logging.h>
+#include <gtest/gtest.h>
+
+#include <algorithm>
+#include <cmath>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/net.hpp"
+
+namespace caffe {
+
+// The gradient checker adds a L2 normalization loss function on top of the
+// top blobs, and checks the gradient.
+template <typename Dtype>
+class GradientChecker {
+ public:
+  // kink and kink_range specify an ignored nonsmooth region of the form
+  // kink - kink_range <= |feature value| <= kink + kink_range,
+  // which accounts for all nonsmoothness in use by caffe
+  GradientChecker(const Dtype stepsize, const Dtype threshold,
+      const unsigned int seed = 1701, const Dtype kink = 0.,
+      const Dtype kink_range = -1)
+      : stepsize_(stepsize), threshold_(threshold), seed_(seed),
+        kink_(kink), kink_range_(kink_range) {}
+  // Checks the gradient of a layer, with provided bottom layers and top
+  // layers.
+  // Note that after the gradient check, we do not guarantee that the data
+  // stored in the layer parameters and the blobs are unchanged.
+  void CheckGradient(Layer<Dtype>* layer, const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top, int check_bottom = -1) {
+      layer->SetUp(bottom, top);
+      CheckGradientSingle(layer, bottom, top, check_bottom, -1, -1);
+  }
+  void CheckGradientExhaustive(Layer<Dtype>* layer,
+      const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top,
+      int check_bottom = -1);
+
+  // CheckGradientEltwise can be used to test layers that perform element-wise
+  // computation only (e.g., neuron layers) -- where (d y_i) / (d x_j) = 0 when
+  // i != j.
+  void CheckGradientEltwise(Layer<Dtype>* layer,
+      const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top);
+
+  void CheckGradientSingle(Layer<Dtype>* layer,
+      const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top,
+      int check_bottom, int top_id, int top_data_id, bool element_wise = false);
+
+  // Checks the gradient of a network. This network should not have any data
+  // layers or loss layers, since the function does not explicitly deal with
+  // such cases yet. All input blobs and parameter blobs are going to be
+  // checked, layer-by-layer to avoid numerical problems to accumulate.
+  void CheckGradientNet(const Net<Dtype>& net,
+      const vector<Blob<Dtype>*>& input);
+
+ protected:
+  Dtype GetObjAndGradient(const Layer<Dtype>& layer,
+      const vector<Blob<Dtype>*>& top, int top_id = -1, int top_data_id = -1);
+  Dtype stepsize_;
+  Dtype threshold_;
+  unsigned int seed_;
+  Dtype kink_;
+  Dtype kink_range_;
+};
+
+
+template <typename Dtype>
+void GradientChecker<Dtype>::CheckGradientSingle(Layer<Dtype>* layer,
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top,
+    int check_bottom, int top_id, int top_data_id, bool element_wise) {
+  if (element_wise) {
+    CHECK_EQ(0, layer->blobs().size());
+    CHECK_LE(0, top_id);
+    CHECK_LE(0, top_data_id);
+    const int top_count = top[top_id]->count();
+    for (int blob_id = 0; blob_id < bottom.size(); ++blob_id) {
+      CHECK_EQ(top_count, bottom[blob_id]->count());
+    }
+  }
+  // First, figure out what blobs we need to check against, and zero init
+  // parameter blobs.
+  vector<Blob<Dtype>*> blobs_to_check;
+  vector<bool> propagate_down(bottom.size(), check_bottom < 0);
+  for (int i = 0; i < layer->blobs().size(); ++i) {
+    Blob<Dtype>* blob = layer->blobs()[i].get();
+    caffe_set(blob->count(), static_cast<Dtype>(0), blob->mutable_cpu_diff());
+    blobs_to_check.push_back(blob);
+  }
+  if (check_bottom < 0) {
+    for (int i = 0; i < bottom.size(); ++i) {
+      blobs_to_check.push_back(bottom[i]);
+    }
+  } else {
+    CHECK_LT(check_bottom, bottom.size());
+    blobs_to_check.push_back(bottom[check_bottom]);
+    propagate_down[check_bottom] = true;
+  }
+  // Compute the gradient analytically using Backward
+  Caffe::set_random_seed(seed_);
+  // Ignore the loss from the layer (it's just the weighted sum of the losses
+  // from the top blobs, whose gradients we may want to test individually).
+  layer->Forward(bottom, top);
+  // Get additional loss from the objective
+  GetObjAndGradient(*layer, top, top_id, top_data_id);
+  layer->Backward(top, propagate_down, bottom);
+  // Store computed gradients for all checked blobs
+  vector<shared_ptr<Blob<Dtype> > >
+      computed_gradient_blobs(blobs_to_check.size());
+  for (int blob_id = 0; blob_id < blobs_to_check.size(); ++blob_id) {
+    Blob<Dtype>* current_blob = blobs_to_check[blob_id];
+    computed_gradient_blobs[blob_id].reset(new Blob<Dtype>());
+    computed_gradient_blobs[blob_id]->ReshapeLike(*current_blob);
+    const int count = blobs_to_check[blob_id]->count();
+    const Dtype* diff = blobs_to_check[blob_id]->cpu_diff();
+    Dtype* computed_gradients =
+        computed_gradient_blobs[blob_id]->mutable_cpu_data();
+    caffe_copy(count, diff, computed_gradients);
+  }
+  // Compute derivative of top w.r.t. each bottom and parameter input using
+  // finite differencing.
+  // LOG(ERROR) << "Checking " << blobs_to_check.size() << " blobs.";
+  for (int blob_id = 0; blob_id < blobs_to_check.size(); ++blob_id) {
+    Blob<Dtype>* current_blob = blobs_to_check[blob_id];
+    const Dtype* computed_gradients =
+        computed_gradient_blobs[blob_id]->cpu_data();
+    // LOG(ERROR) << "Blob " << blob_id << ": checking "
+    //     << current_blob->count() << " parameters.";
+    for (int feat_id = 0; feat_id < current_blob->count(); ++feat_id) {
+      // For an element-wise layer, we only need to do finite differencing to
+      // compute the derivative of top[top_id][top_data_id] w.r.t.
+      // bottom[blob_id][i] only for i == top_data_id.  For any other
+      // i != top_data_id, we know the derivative is 0 by definition, and simply
+      // check that that's true.
+      Dtype estimated_gradient = 0;
+      Dtype positive_objective = 0;
+      Dtype negative_objective = 0;
+      if (!element_wise || (feat_id == top_data_id)) {
+        // Do finite differencing.
+        // Compute loss with stepsize_ added to input.
+        current_blob->mutable_cpu_data()[feat_id] += stepsize_;
+        Caffe::set_random_seed(seed_);
+        layer->Forward(bottom, top);
+        positive_objective =
+            GetObjAndGradient(*layer, top, top_id, top_data_id);
+        // Compute loss with stepsize_ subtracted from input.
+        current_blob->mutable_cpu_data()[feat_id] -= stepsize_ * 2;
+        Caffe::set_random_seed(seed_);
+        layer->Forward(bottom, top);
+        negative_objective =
+            GetObjAndGradient(*layer, top, top_id, top_data_id);
+        // Recover original input value.
+        current_blob->mutable_cpu_data()[feat_id] += stepsize_;
+        estimated_gradient = (positive_objective - negative_objective) /
+            stepsize_ / 2.;
+      }
+      Dtype computed_gradient = computed_gradients[feat_id];
+      Dtype feature = current_blob->cpu_data()[feat_id];
+      // LOG(ERROR) << "debug: " << current_blob->cpu_data()[feat_id] << " "
+      //     << current_blob->cpu_diff()[feat_id];
+      if (kink_ - kink_range_ > fabs(feature)
+          || fabs(feature) > kink_ + kink_range_) {
+        // We check relative accuracy, but for too small values, we threshold
+        // the scale factor by 1.
+        Dtype scale = std::max(
+            std::max(fabs(computed_gradient), fabs(estimated_gradient)), 1.);
+        EXPECT_NEAR(computed_gradient, estimated_gradient, threshold_ * scale)
+          << "debug: (top_id, top_data_id, blob_id, feat_id)="
+          << top_id << "," << top_data_id << "," << blob_id << "," << feat_id
+          << "; feat = " << feature
+          << "; objective+ = " << positive_objective
+          << "; objective- = " << negative_objective;
+      }
+      // LOG(ERROR) << "Feature: " << current_blob->cpu_data()[feat_id];
+      // LOG(ERROR) << "computed gradient: " << computed_gradient
+      //    << " estimated_gradient: " << estimated_gradient;
+    }
+  }
+}
+
+template <typename Dtype>
+void GradientChecker<Dtype>::CheckGradientExhaustive(Layer<Dtype>* layer,
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top,
+    int check_bottom) {
+  layer->SetUp(bottom, top);
+  CHECK_GT(top.size(), 0) << "Exhaustive mode requires at least one top blob.";
+  // LOG(ERROR) << "Exhaustive Mode.";
+  for (int i = 0; i < top.size(); ++i) {
+    // LOG(ERROR) << "Exhaustive: blob " << i << " size " << top[i]->count();
+    for (int j = 0; j < top[i]->count(); ++j) {
+      // LOG(ERROR) << "Exhaustive: blob " << i << " data " << j;
+      CheckGradientSingle(layer, bottom, top, check_bottom, i, j);
+    }
+  }
+}
+
+template <typename Dtype>
+void GradientChecker<Dtype>::CheckGradientEltwise(Layer<Dtype>* layer,
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  layer->SetUp(bottom, top);
+  CHECK_GT(top.size(), 0) << "Eltwise mode requires at least one top blob.";
+  const int check_bottom = -1;
+  const bool element_wise = true;
+  for (int i = 0; i < top.size(); ++i) {
+    for (int j = 0; j < top[i]->count(); ++j) {
+      CheckGradientSingle(layer, bottom, top, check_bottom, i, j, element_wise);
+    }
+  }
+}
+
+template <typename Dtype>
+void GradientChecker<Dtype>::CheckGradientNet(
+    const Net<Dtype>& net, const vector<Blob<Dtype>*>& input) {
+  const vector<shared_ptr<Layer<Dtype> > >& layers = net.layers();
+  vector<vector<Blob<Dtype>*> >& bottom_vecs = net.bottom_vecs();
+  vector<vector<Blob<Dtype>*> >& top_vecs = net.top_vecs();
+  for (int i = 0; i < layers.size(); ++i) {
+    net.Forward(input);
+    LOG(ERROR) << "Checking gradient for " << layers[i]->layer_param().name();
+    CheckGradientExhaustive(*(layers[i].get()), bottom_vecs[i], top_vecs[i]);
+  }
+}
+
+template <typename Dtype>
+Dtype GradientChecker<Dtype>::GetObjAndGradient(const Layer<Dtype>& layer,
+    const vector<Blob<Dtype>*>& top, int top_id, int top_data_id) {
+  Dtype loss = 0;
+  if (top_id < 0) {
+    // the loss will be half of the sum of squares of all outputs
+    for (int i = 0; i < top.size(); ++i) {
+      Blob<Dtype>* top_blob = top[i];
+      const Dtype* top_blob_data = top_blob->cpu_data();
+      Dtype* top_blob_diff = top_blob->mutable_cpu_diff();
+      int count = top_blob->count();
+      for (int j = 0; j < count; ++j) {
+        loss += top_blob_data[j] * top_blob_data[j];
+      }
+      // set the diff: simply the data.
+      caffe_copy(top_blob->count(), top_blob_data, top_blob_diff);
+    }
+    loss /= 2.;
+  } else {
+    // the loss will be the top_data_id-th element in the top_id-th blob.
+    for (int i = 0; i < top.size(); ++i) {
+      Blob<Dtype>* top_blob = top[i];
+      Dtype* top_blob_diff = top_blob->mutable_cpu_diff();
+      caffe_set(top_blob->count(), Dtype(0), top_blob_diff);
+    }
+    const Dtype loss_weight = 2;
+    loss = top[top_id]->cpu_data()[top_data_id] * loss_weight;
+    top[top_id]->mutable_cpu_diff()[top_data_id] = loss_weight;
+  }
+  return loss;
+}
+
+}  // namespace caffe
+
+#endif  // CAFFE_TEST_GRADIENT_CHECK_UTIL_H_
diff --git a/include/caffe/util/benchmark.hpp b/include/caffe/util/benchmark.hpp
new file mode 100644
index 0000000..d635827
--- /dev/null
+++ b/include/caffe/util/benchmark.hpp
@@ -0,0 +1,52 @@
+#ifndef CAFFE_UTIL_BENCHMARK_H_
+#define CAFFE_UTIL_BENCHMARK_H_
+
+#include <boost/date_time/posix_time/posix_time.hpp>
+
+#include "caffe/util/device_alternate.hpp"
+
+namespace caffe {
+
+class Timer {
+ public:
+  Timer();
+  virtual ~Timer();
+  virtual void Start();
+  virtual void Stop();
+  virtual float MilliSeconds();
+  virtual float MicroSeconds();
+  virtual float Seconds();
+
+  inline bool initted() { return initted_; }
+  inline bool running() { return running_; }
+  inline bool has_run_at_least_once() { return has_run_at_least_once_; }
+
+ protected:
+  void Init();
+
+  bool initted_;
+  bool running_;
+  bool has_run_at_least_once_;
+#ifndef CPU_ONLY
+  cudaEvent_t start_gpu_;
+  cudaEvent_t stop_gpu_;
+#endif
+  boost::posix_time::ptime start_cpu_;
+  boost::posix_time::ptime stop_cpu_;
+  float elapsed_milliseconds_;
+  float elapsed_microseconds_;
+};
+
+class CPUTimer : public Timer {
+ public:
+  explicit CPUTimer();
+  virtual ~CPUTimer() {}
+  virtual void Start();
+  virtual void Stop();
+  virtual float MilliSeconds();
+  virtual float MicroSeconds();
+};
+
+}  // namespace caffe
+
+#endif   // CAFFE_UTIL_BENCHMARK_H_
diff --git a/include/caffe/util/cudnn.hpp b/include/caffe/util/cudnn.hpp
new file mode 100644
index 0000000..b531dd5
--- /dev/null
+++ b/include/caffe/util/cudnn.hpp
@@ -0,0 +1,132 @@
+#ifndef CAFFE_UTIL_CUDNN_H_
+#define CAFFE_UTIL_CUDNN_H_
+#ifdef USE_CUDNN
+
+#include <cudnn.h>
+
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+
+#define CUDNN_CHECK(condition) \
+  do { \
+    cudnnStatus_t status = condition; \
+    CHECK_EQ(status, CUDNN_STATUS_SUCCESS) << " "\
+      << cudnnGetErrorString(status); \
+  } while (0)
+
+inline const char* cudnnGetErrorString(cudnnStatus_t status) {
+  switch (status) {
+    case CUDNN_STATUS_SUCCESS:
+      return "CUDNN_STATUS_SUCCESS";
+    case CUDNN_STATUS_NOT_INITIALIZED:
+      return "CUDNN_STATUS_NOT_INITIALIZED";
+    case CUDNN_STATUS_ALLOC_FAILED:
+      return "CUDNN_STATUS_ALLOC_FAILED";
+    case CUDNN_STATUS_BAD_PARAM:
+      return "CUDNN_STATUS_BAD_PARAM";
+    case CUDNN_STATUS_INTERNAL_ERROR:
+      return "CUDNN_STATUS_INTERNAL_ERROR";
+    case CUDNN_STATUS_INVALID_VALUE:
+      return "CUDNN_STATUS_INVALID_VALUE";
+    case CUDNN_STATUS_ARCH_MISMATCH:
+      return "CUDNN_STATUS_ARCH_MISMATCH";
+    case CUDNN_STATUS_MAPPING_ERROR:
+      return "CUDNN_STATUS_MAPPING_ERROR";
+    case CUDNN_STATUS_EXECUTION_FAILED:
+      return "CUDNN_STATUS_EXECUTION_FAILED";
+    case CUDNN_STATUS_NOT_SUPPORTED:
+      return "CUDNN_STATUS_NOT_SUPPORTED";
+    case CUDNN_STATUS_LICENSE_ERROR:
+      return "CUDNN_STATUS_LICENSE_ERROR";
+  }
+  return "Unknown cudnn status";
+}
+
+namespace caffe {
+
+namespace cudnn {
+
+template <typename Dtype> class dataType;
+template<> class dataType<float>  {
+ public:
+  static const cudnnDataType_t type = CUDNN_DATA_FLOAT;
+  static float oneval, zeroval;
+  static const void *one, *zero;
+};
+template<> class dataType<double> {
+ public:
+  static const cudnnDataType_t type = CUDNN_DATA_DOUBLE;
+  static double oneval, zeroval;
+  static const void *one, *zero;
+};
+
+template <typename Dtype>
+inline void createTensor4dDesc(cudnnTensorDescriptor_t* desc) {
+  CUDNN_CHECK(cudnnCreateTensorDescriptor(desc));
+}
+
+template <typename Dtype>
+inline void setTensor4dDesc(cudnnTensorDescriptor_t* desc,
+    int n, int c, int h, int w,
+    int stride_n, int stride_c, int stride_h, int stride_w) {
+  CUDNN_CHECK(cudnnSetTensor4dDescriptorEx(*desc, dataType<Dtype>::type,
+        n, c, h, w, stride_n, stride_c, stride_h, stride_w));
+}
+
+template <typename Dtype>
+inline void setTensor4dDesc(cudnnTensorDescriptor_t* desc,
+    int n, int c, int h, int w) {
+  const int stride_w = 1;
+  const int stride_h = w * stride_w;
+  const int stride_c = h * stride_h;
+  const int stride_n = c * stride_c;
+  setTensor4dDesc<Dtype>(desc, n, c, h, w,
+                         stride_n, stride_c, stride_h, stride_w);
+}
+
+template <typename Dtype>
+inline void createFilterDesc(cudnnFilterDescriptor_t* desc,
+    int n, int c, int h, int w) {
+  CUDNN_CHECK(cudnnCreateFilterDescriptor(desc));
+  CUDNN_CHECK(cudnnSetFilter4dDescriptor(*desc, dataType<Dtype>::type,
+      n, c, h, w));
+}
+
+template <typename Dtype>
+inline void createConvolutionDesc(cudnnConvolutionDescriptor_t* conv) {
+  CUDNN_CHECK(cudnnCreateConvolutionDescriptor(conv));
+}
+
+template <typename Dtype>
+inline void setConvolutionDesc(cudnnConvolutionDescriptor_t* conv,
+    cudnnTensorDescriptor_t bottom, cudnnFilterDescriptor_t filter,
+    int pad_h, int pad_w, int stride_h, int stride_w) {
+  CUDNN_CHECK(cudnnSetConvolution2dDescriptor(*conv,
+      pad_h, pad_w, stride_h, stride_w, 1, 1, CUDNN_CROSS_CORRELATION));
+}
+
+template <typename Dtype>
+inline void createPoolingDesc(cudnnPoolingDescriptor_t* pool_desc,
+    PoolingParameter_PoolMethod poolmethod, cudnnPoolingMode_t* mode,
+    int h, int w, int pad_h, int pad_w, int stride_h, int stride_w) {
+  switch (poolmethod) {
+  case PoolingParameter_PoolMethod_MAX:
+    *mode = CUDNN_POOLING_MAX;
+    break;
+  case PoolingParameter_PoolMethod_AVE:
+    *mode = CUDNN_POOLING_AVERAGE_COUNT_INCLUDE_PADDING;
+    break;
+  default:
+    LOG(FATAL) << "Unknown pooling method.";
+  }
+  CUDNN_CHECK(cudnnCreatePoolingDescriptor(pool_desc));
+  CUDNN_CHECK(cudnnSetPooling2dDescriptor(*pool_desc, *mode, h, w,
+        pad_h, pad_w, stride_h, stride_w));
+}
+
+}  // namespace cudnn
+
+}  // namespace caffe
+
+#endif  // USE_CUDNN
+#endif  // CAFFE_UTIL_CUDNN_H_
diff --git a/include/caffe/util/db.hpp b/include/caffe/util/db.hpp
new file mode 100644
index 0000000..59ec3d3
--- /dev/null
+++ b/include/caffe/util/db.hpp
@@ -0,0 +1,54 @@
+#ifndef CAFFE_UTIL_DB_HPP
+#define CAFFE_UTIL_DB_HPP
+
+#include <string>
+
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+
+namespace caffe { namespace db {
+
+enum Mode { READ, WRITE, NEW };
+
+class Cursor {
+ public:
+  Cursor() { }
+  virtual ~Cursor() { }
+  virtual void SeekToFirst() = 0;
+  virtual void Next() = 0;
+  virtual string key() = 0;
+  virtual string value() = 0;
+  virtual bool valid() = 0;
+
+  DISABLE_COPY_AND_ASSIGN(Cursor);
+};
+
+class Transaction {
+ public:
+  Transaction() { }
+  virtual ~Transaction() { }
+  virtual void Put(const string& key, const string& value) = 0;
+  virtual void Commit() = 0;
+
+  DISABLE_COPY_AND_ASSIGN(Transaction);
+};
+
+class DB {
+ public:
+  DB() { }
+  virtual ~DB() { }
+  virtual void Open(const string& source, Mode mode) = 0;
+  virtual void Close() = 0;
+  virtual Cursor* NewCursor() = 0;
+  virtual Transaction* NewTransaction() = 0;
+
+  DISABLE_COPY_AND_ASSIGN(DB);
+};
+
+DB* GetDB(DataParameter::DB backend);
+DB* GetDB(const string& backend);
+
+}  // namespace db
+}  // namespace caffe
+
+#endif  // CAFFE_UTIL_DB_HPP
diff --git a/include/caffe/util/db_leveldb.hpp b/include/caffe/util/db_leveldb.hpp
new file mode 100644
index 0000000..1062355
--- /dev/null
+++ b/include/caffe/util/db_leveldb.hpp
@@ -0,0 +1,73 @@
+#ifndef CAFFE_UTIL_DB_LEVELDB_HPP
+#define CAFFE_UTIL_DB_LEVELDB_HPP
+
+#include <string>
+
+#include "leveldb/db.h"
+#include "leveldb/write_batch.h"
+
+#include "caffe/util/db.hpp"
+
+namespace caffe { namespace db {
+
+class LevelDBCursor : public Cursor {
+ public:
+  explicit LevelDBCursor(leveldb::Iterator* iter)
+    : iter_(iter) { SeekToFirst(); }
+  ~LevelDBCursor() { delete iter_; }
+  virtual void SeekToFirst() { iter_->SeekToFirst(); }
+  virtual void Next() { iter_->Next(); }
+  virtual string key() { return iter_->key().ToString(); }
+  virtual string value() { return iter_->value().ToString(); }
+  virtual bool valid() { return iter_->Valid(); }
+
+ private:
+  leveldb::Iterator* iter_;
+};
+
+class LevelDBTransaction : public Transaction {
+ public:
+  explicit LevelDBTransaction(leveldb::DB* db) : db_(db) { CHECK_NOTNULL(db_); }
+  virtual void Put(const string& key, const string& value) {
+    batch_.Put(key, value);
+  }
+  virtual void Commit() {
+    leveldb::Status status = db_->Write(leveldb::WriteOptions(), &batch_);
+    CHECK(status.ok()) << "Failed to write batch to leveldb "
+                       << std::endl << status.ToString();
+  }
+
+ private:
+  leveldb::DB* db_;
+  leveldb::WriteBatch batch_;
+
+  DISABLE_COPY_AND_ASSIGN(LevelDBTransaction);
+};
+
+class LevelDB : public DB {
+ public:
+  LevelDB() : db_(NULL) { }
+  virtual ~LevelDB() { Close(); }
+  virtual void Open(const string& source, Mode mode);
+  virtual void Close() {
+    if (db_ != NULL) {
+      delete db_;
+      db_ = NULL;
+    }
+  }
+  virtual LevelDBCursor* NewCursor() {
+    return new LevelDBCursor(db_->NewIterator(leveldb::ReadOptions()));
+  }
+  virtual LevelDBTransaction* NewTransaction() {
+    return new LevelDBTransaction(db_);
+  }
+
+ private:
+  leveldb::DB* db_;
+};
+
+
+}  // namespace db
+}  // namespace caffe
+
+#endif  // CAFFE_UTIL_DB_LEVELDB_HPP
diff --git a/include/caffe/util/db_lmdb.hpp b/include/caffe/util/db_lmdb.hpp
new file mode 100644
index 0000000..cc7c90a
--- /dev/null
+++ b/include/caffe/util/db_lmdb.hpp
@@ -0,0 +1,91 @@
+#ifndef CAFFE_UTIL_DB_LMDB_HPP
+#define CAFFE_UTIL_DB_LMDB_HPP
+
+#include <string>
+
+#include "lmdb.h"
+
+#include "caffe/util/db.hpp"
+
+namespace caffe { namespace db {
+
+inline void MDB_CHECK(int mdb_status) {
+  CHECK_EQ(mdb_status, MDB_SUCCESS) << mdb_strerror(mdb_status);
+}
+
+class LMDBCursor : public Cursor {
+ public:
+  explicit LMDBCursor(MDB_txn* mdb_txn, MDB_cursor* mdb_cursor)
+    : mdb_txn_(mdb_txn), mdb_cursor_(mdb_cursor), valid_(false) {
+    SeekToFirst();
+  }
+  virtual ~LMDBCursor() {
+    mdb_cursor_close(mdb_cursor_);
+    mdb_txn_abort(mdb_txn_);
+  }
+  virtual void SeekToFirst() { Seek(MDB_FIRST); }
+  virtual void Next() { Seek(MDB_NEXT); }
+  virtual string key() {
+    return string(static_cast<const char*>(mdb_key_.mv_data), mdb_key_.mv_size);
+  }
+  virtual string value() {
+    return string(static_cast<const char*>(mdb_value_.mv_data),
+        mdb_value_.mv_size);
+  }
+  virtual bool valid() { return valid_; }
+
+ private:
+  void Seek(MDB_cursor_op op) {
+    int mdb_status = mdb_cursor_get(mdb_cursor_, &mdb_key_, &mdb_value_, op);
+    if (mdb_status == MDB_NOTFOUND) {
+      valid_ = false;
+    } else {
+      MDB_CHECK(mdb_status);
+      valid_ = true;
+    }
+  }
+
+  MDB_txn* mdb_txn_;
+  MDB_cursor* mdb_cursor_;
+  MDB_val mdb_key_, mdb_value_;
+  bool valid_;
+};
+
+class LMDBTransaction : public Transaction {
+ public:
+  explicit LMDBTransaction(MDB_dbi* mdb_dbi, MDB_txn* mdb_txn)
+    : mdb_dbi_(mdb_dbi), mdb_txn_(mdb_txn) { }
+  virtual void Put(const string& key, const string& value);
+  virtual void Commit() { MDB_CHECK(mdb_txn_commit(mdb_txn_)); }
+
+ private:
+  MDB_dbi* mdb_dbi_;
+  MDB_txn* mdb_txn_;
+
+  DISABLE_COPY_AND_ASSIGN(LMDBTransaction);
+};
+
+class LMDB : public DB {
+ public:
+  LMDB() : mdb_env_(NULL) { }
+  virtual ~LMDB() { Close(); }
+  virtual void Open(const string& source, Mode mode);
+  virtual void Close() {
+    if (mdb_env_ != NULL) {
+      mdb_dbi_close(mdb_env_, mdb_dbi_);
+      mdb_env_close(mdb_env_);
+      mdb_env_ = NULL;
+    }
+  }
+  virtual LMDBCursor* NewCursor();
+  virtual LMDBTransaction* NewTransaction();
+
+ private:
+  MDB_env* mdb_env_;
+  MDB_dbi mdb_dbi_;
+};
+
+}  // namespace db
+}  // namespace caffe
+
+#endif  // CAFFE_UTIL_DB_LMDB_HPP
diff --git a/include/caffe/util/device_alternate.hpp b/include/caffe/util/device_alternate.hpp
new file mode 100644
index 0000000..6ea595d
--- /dev/null
+++ b/include/caffe/util/device_alternate.hpp
@@ -0,0 +1,102 @@
+#ifndef CAFFE_UTIL_DEVICE_ALTERNATE_H_
+#define CAFFE_UTIL_DEVICE_ALTERNATE_H_
+
+#ifdef CPU_ONLY  // CPU-only Caffe.
+
+#include <vector>
+
+// Stub out GPU calls as unavailable.
+
+#define NO_GPU LOG(FATAL) << "Cannot use GPU in CPU-only Caffe: check mode."
+
+#define STUB_GPU(classname) \
+template <typename Dtype> \
+void classname<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom, \
+    const vector<Blob<Dtype>*>& top) { NO_GPU; } \
+template <typename Dtype> \
+void classname<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top, \
+    const vector<bool>& propagate_down, \
+    const vector<Blob<Dtype>*>& bottom) { NO_GPU; } \
+
+#define STUB_GPU_FORWARD(classname, funcname) \
+template <typename Dtype> \
+void classname<Dtype>::funcname##_##gpu(const vector<Blob<Dtype>*>& bottom, \
+    const vector<Blob<Dtype>*>& top) { NO_GPU; } \
+
+#define STUB_GPU_BACKWARD(classname, funcname) \
+template <typename Dtype> \
+void classname<Dtype>::funcname##_##gpu(const vector<Blob<Dtype>*>& top, \
+    const vector<bool>& propagate_down, \
+    const vector<Blob<Dtype>*>& bottom) { NO_GPU; } \
+
+#else  // Normal GPU + CPU Caffe.
+
+#include <cublas_v2.h>
+#include <cuda.h>
+#include <cuda_runtime.h>
+#include <curand.h>
+#include <driver_types.h>  // cuda driver types
+#ifdef USE_CUDNN  // cuDNN acceleration library.
+#include "caffe/util/cudnn.hpp"
+#endif
+
+//
+// CUDA macros
+//
+
+// CUDA: various checks for different function calls.
+#define CUDA_CHECK(condition) \
+  /* Code block avoids redefinition of cudaError_t error */ \
+  do { \
+    cudaError_t error = condition; \
+    CHECK_EQ(error, cudaSuccess) << " " << cudaGetErrorString(error); \
+  } while (0)
+
+#define CUBLAS_CHECK(condition) \
+  do { \
+    cublasStatus_t status = condition; \
+    CHECK_EQ(status, CUBLAS_STATUS_SUCCESS) << " " \
+      << caffe::cublasGetErrorString(status); \
+  } while (0)
+
+#define CURAND_CHECK(condition) \
+  do { \
+    curandStatus_t status = condition; \
+    CHECK_EQ(status, CURAND_STATUS_SUCCESS) << " " \
+      << caffe::curandGetErrorString(status); \
+  } while (0)
+
+// CUDA: grid stride looping
+#define CUDA_KERNEL_LOOP(i, n) \
+  for (int i = blockIdx.x * blockDim.x + threadIdx.x; \
+       i < (n); \
+       i += blockDim.x * gridDim.x)
+
+// CUDA: check for error after kernel execution and exit loudly if there is one.
+#define CUDA_POST_KERNEL_CHECK CUDA_CHECK(cudaPeekAtLastError())
+
+namespace caffe {
+
+// CUDA: library error reporting.
+const char* cublasGetErrorString(cublasStatus_t error);
+const char* curandGetErrorString(curandStatus_t error);
+
+// CUDA: thread number configuration.
+// Use 1024 threads per block, which requires cuda sm_2x or above,
+// or fall back to attempt compatibility (best of luck to you).
+#if __CUDA_ARCH__ >= 200
+    const int CAFFE_CUDA_NUM_THREADS = 1024;
+#else
+    const int CAFFE_CUDA_NUM_THREADS = 512;
+#endif
+
+// CUDA: number of blocks for threads.
+inline int CAFFE_GET_BLOCKS(const int N) {
+  return (N + CAFFE_CUDA_NUM_THREADS - 1) / CAFFE_CUDA_NUM_THREADS;
+}
+
+}  // namespace caffe
+
+#endif  // CPU_ONLY
+
+#endif  // CAFFE_UTIL_DEVICE_ALTERNATE_H_
diff --git a/include/caffe/util/im2col.hpp b/include/caffe/util/im2col.hpp
new file mode 100644
index 0000000..0051e2f
--- /dev/null
+++ b/include/caffe/util/im2col.hpp
@@ -0,0 +1,32 @@
+#ifndef _CAFFE_UTIL_IM2COL_HPP_
+#define _CAFFE_UTIL_IM2COL_HPP_
+
+namespace caffe {
+
+template <typename Dtype>
+void im2col_cpu(const Dtype* data_im, const int channels,
+    const int height, const int width, const int kernel_h, const int kernel_w,
+    const int pad_h, const int pad_w, const int stride_h,
+    const int stride_w, Dtype* data_col);
+
+template <typename Dtype>
+void col2im_cpu(const Dtype* data_col, const int channels,
+    const int height, const int width, const int patch_h, const int patch_w,
+    const int pad_h, const int pad_w, const int stride_h,
+    const int stride_w, Dtype* data_im);
+
+template <typename Dtype>
+void im2col_gpu(const Dtype* data_im, const int channels,
+    const int height, const int width, const int kernel_h, const int kernel_w,
+    const int pad_h, const int pad_w, const int stride_h,
+    const int stride_w, Dtype* data_col);
+
+template <typename Dtype>
+void col2im_gpu(const Dtype* data_col, const int channels,
+    const int height, const int width, const int patch_h, const int patch_w,
+    const int pad_h, const int pad_w, const int stride_h,
+    const int stride_w, Dtype* data_im);
+
+}  // namespace caffe
+
+#endif  // CAFFE_UTIL_IM2COL_HPP_
diff --git a/include/caffe/util/insert_splits.hpp b/include/caffe/util/insert_splits.hpp
new file mode 100644
index 0000000..446abb8
--- /dev/null
+++ b/include/caffe/util/insert_splits.hpp
@@ -0,0 +1,26 @@
+#ifndef _CAFFE_UTIL_INSERT_SPLITS_HPP_
+#define _CAFFE_UTIL_INSERT_SPLITS_HPP_
+
+#include <string>
+
+#include "caffe/proto/caffe.pb.h"
+
+namespace caffe {
+
+// Copy NetParameters with SplitLayers added to replace any shared bottom
+// blobs with unique bottom blobs provided by the SplitLayer.
+void InsertSplits(const NetParameter& param, NetParameter* param_split);
+
+void ConfigureSplitLayer(const string& layer_name, const string& blob_name,
+    const int blob_idx, const int split_count, const float loss_weight,
+    LayerParameter* split_layer_param);
+
+string SplitLayerName(const string& layer_name, const string& blob_name,
+    const int blob_idx);
+
+string SplitBlobName(const string& layer_name, const string& blob_name,
+    const int blob_idx, const int split_idx);
+
+}  // namespace caffe
+
+#endif  // CAFFE_UTIL_INSERT_SPLITS_HPP_
diff --git a/include/caffe/util/io.hpp b/include/caffe/util/io.hpp
new file mode 100644
index 0000000..3a62c3c
--- /dev/null
+++ b/include/caffe/util/io.hpp
@@ -0,0 +1,159 @@
+#ifndef CAFFE_UTIL_IO_H_
+#define CAFFE_UTIL_IO_H_
+
+#include <unistd.h>
+#include <string>
+
+#include "google/protobuf/message.h"
+#include "hdf5.h"
+#include "hdf5_hl.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+
+#define HDF5_NUM_DIMS 4
+
+namespace caffe {
+
+using ::google::protobuf::Message;
+
+inline void MakeTempFilename(string* temp_filename) {
+  temp_filename->clear();
+  *temp_filename = "/tmp/caffe_test.XXXXXX";
+  char* temp_filename_cstr = new char[temp_filename->size() + 1];
+  // NOLINT_NEXT_LINE(runtime/printf)
+  strcpy(temp_filename_cstr, temp_filename->c_str());
+  int fd = mkstemp(temp_filename_cstr);
+  CHECK_GE(fd, 0) << "Failed to open a temporary file at: " << *temp_filename;
+  close(fd);
+  *temp_filename = temp_filename_cstr;
+  delete[] temp_filename_cstr;
+}
+
+inline void MakeTempDir(string* temp_dirname) {
+  temp_dirname->clear();
+  *temp_dirname = "/tmp/caffe_test.XXXXXX";
+  char* temp_dirname_cstr = new char[temp_dirname->size() + 1];
+  // NOLINT_NEXT_LINE(runtime/printf)
+  strcpy(temp_dirname_cstr, temp_dirname->c_str());
+  char* mkdtemp_result = mkdtemp(temp_dirname_cstr);
+  CHECK(mkdtemp_result != NULL)
+      << "Failed to create a temporary directory at: " << *temp_dirname;
+  *temp_dirname = temp_dirname_cstr;
+  delete[] temp_dirname_cstr;
+}
+
+bool ReadProtoFromTextFile(const char* filename, Message* proto);
+
+inline bool ReadProtoFromTextFile(const string& filename, Message* proto) {
+  return ReadProtoFromTextFile(filename.c_str(), proto);
+}
+
+inline void ReadProtoFromTextFileOrDie(const char* filename, Message* proto) {
+  CHECK(ReadProtoFromTextFile(filename, proto));
+}
+
+inline void ReadProtoFromTextFileOrDie(const string& filename, Message* proto) {
+  ReadProtoFromTextFileOrDie(filename.c_str(), proto);
+}
+
+void WriteProtoToTextFile(const Message& proto, const char* filename);
+inline void WriteProtoToTextFile(const Message& proto, const string& filename) {
+  WriteProtoToTextFile(proto, filename.c_str());
+}
+
+bool ReadProtoFromBinaryFile(const char* filename, Message* proto);
+
+inline bool ReadProtoFromBinaryFile(const string& filename, Message* proto) {
+  return ReadProtoFromBinaryFile(filename.c_str(), proto);
+}
+
+inline void ReadProtoFromBinaryFileOrDie(const char* filename, Message* proto) {
+  CHECK(ReadProtoFromBinaryFile(filename, proto));
+}
+
+inline void ReadProtoFromBinaryFileOrDie(const string& filename,
+                                         Message* proto) {
+  ReadProtoFromBinaryFileOrDie(filename.c_str(), proto);
+}
+
+
+void WriteProtoToBinaryFile(const Message& proto, const char* filename);
+inline void WriteProtoToBinaryFile(
+    const Message& proto, const string& filename) {
+  WriteProtoToBinaryFile(proto, filename.c_str());
+}
+
+bool ReadFileToDatum(const string& filename, const int label, Datum* datum);
+
+inline bool ReadFileToDatum(const string& filename, Datum* datum) {
+  return ReadFileToDatum(filename, -1, datum);
+}
+
+bool ReadImageToDatum(const string& filename, const int label,
+    const int height, const int width, const bool is_color,
+    const std::string & encoding, Datum* datum);
+
+inline bool ReadImageToDatum(const string& filename, const int label,
+    const int height, const int width, const bool is_color, Datum* datum) {
+  return ReadImageToDatum(filename, label, height, width, is_color,
+                          "", datum);
+}
+
+inline bool ReadImageToDatum(const string& filename, const int label,
+    const int height, const int width, Datum* datum) {
+  return ReadImageToDatum(filename, label, height, width, true, datum);
+}
+
+inline bool ReadImageToDatum(const string& filename, const int label,
+    const bool is_color, Datum* datum) {
+  return ReadImageToDatum(filename, label, 0, 0, is_color, datum);
+}
+
+inline bool ReadImageToDatum(const string& filename, const int label,
+    Datum* datum) {
+  return ReadImageToDatum(filename, label, 0, 0, true, datum);
+}
+
+inline bool ReadImageToDatum(const string& filename, const int label,
+    const std::string & encoding, Datum* datum) {
+  return ReadImageToDatum(filename, label, 0, 0, true, encoding, datum);
+}
+
+bool DecodeDatumNative(Datum* datum);
+bool DecodeDatum(Datum* datum, bool is_color);
+
+cv::Mat ReadImageToCVMat(const string& filename,
+    const int height, const int width, const bool is_color);
+
+cv::Mat ReadImageToCVMat(const string& filename,
+    const int height, const int width);
+
+cv::Mat ReadImageToCVMat(const string& filename,
+    const bool is_color);
+
+cv::Mat ReadImageToCVMat(const string& filename);
+
+cv::Mat DecodeDatumToCVMatNative(const Datum& datum);
+cv::Mat DecodeDatumToCVMat(const Datum& datum, bool is_color);
+
+void CVMatToDatum(const cv::Mat& cv_img, Datum* datum);
+
+template <typename Dtype>
+void hdf5_load_nd_dataset_helper(
+    hid_t file_id, const char* dataset_name_, int min_dim, int max_dim,
+    Blob<Dtype>* blob);
+
+template <typename Dtype>
+void hdf5_load_nd_dataset(
+    hid_t file_id, const char* dataset_name_, int min_dim, int max_dim,
+    Blob<Dtype>* blob);
+
+template <typename Dtype>
+void hdf5_save_nd_dataset(
+    const hid_t file_id, const string& dataset_name, const Blob<Dtype>& blob);
+
+}  // namespace caffe
+
+#endif   // CAFFE_UTIL_IO_H_
diff --git a/include/caffe/util/math_functions.hpp b/include/caffe/util/math_functions.hpp
new file mode 100644
index 0000000..2cacd8e
--- /dev/null
+++ b/include/caffe/util/math_functions.hpp
@@ -0,0 +1,280 @@
+#ifndef CAFFE_UTIL_MATH_FUNCTIONS_H_
+#define CAFFE_UTIL_MATH_FUNCTIONS_H_
+
+#include <stdint.h>
+#include <cmath>  // for std::fabs and std::signbit
+
+#include "glog/logging.h"
+
+#include "caffe/common.hpp"
+#include "caffe/util/device_alternate.hpp"
+#include "caffe/util/mkl_alternate.hpp"
+
+namespace caffe {
+
+// Caffe gemm provides a simpler interface to the gemm functions, with the
+// limitation that the data has to be contiguous in memory.
+template <typename Dtype>
+void caffe_cpu_gemm(const CBLAS_TRANSPOSE TransA,
+    const CBLAS_TRANSPOSE TransB, const int M, const int N, const int K,
+    const Dtype alpha, const Dtype* A, const Dtype* B, const Dtype beta,
+    Dtype* C);
+
+template <typename Dtype>
+void caffe_cpu_gemv(const CBLAS_TRANSPOSE TransA, const int M, const int N,
+    const Dtype alpha, const Dtype* A, const Dtype* x, const Dtype beta,
+    Dtype* y);
+
+template <typename Dtype>
+void caffe_axpy(const int N, const Dtype alpha, const Dtype* X,
+    Dtype* Y);
+
+template <typename Dtype>
+void caffe_cpu_axpby(const int N, const Dtype alpha, const Dtype* X,
+    const Dtype beta, Dtype* Y);
+
+template <typename Dtype>
+void caffe_copy(const int N, const Dtype *X, Dtype *Y);
+
+template <typename Dtype>
+void caffe_set(const int N, const Dtype alpha, Dtype *X);
+
+inline void caffe_memset(const size_t N, const int alpha, void* X) {
+  memset(X, alpha, N);  // NOLINT(caffe/alt_fn)
+}
+
+template <typename Dtype>
+void caffe_add_scalar(const int N, const Dtype alpha, Dtype *X);
+
+template <typename Dtype>
+void caffe_scal(const int N, const Dtype alpha, Dtype *X);
+
+template <typename Dtype>
+void caffe_sqr(const int N, const Dtype* a, Dtype* y);
+
+template <typename Dtype>
+void caffe_add(const int N, const Dtype* a, const Dtype* b, Dtype* y);
+
+template <typename Dtype>
+void caffe_sub(const int N, const Dtype* a, const Dtype* b, Dtype* y);
+
+template <typename Dtype>
+void caffe_mul(const int N, const Dtype* a, const Dtype* b, Dtype* y);
+
+template <typename Dtype>
+void caffe_div(const int N, const Dtype* a, const Dtype* b, Dtype* y);
+
+template <typename Dtype>
+void caffe_powx(const int n, const Dtype* a, const Dtype b, Dtype* y);
+
+unsigned int caffe_rng_rand();
+
+template <typename Dtype>
+Dtype caffe_nextafter(const Dtype b);
+
+template <typename Dtype>
+void caffe_rng_uniform(const int n, const Dtype a, const Dtype b, Dtype* r);
+
+template <typename Dtype>
+void caffe_rng_gaussian(const int n, const Dtype mu, const Dtype sigma,
+                        Dtype* r);
+
+template <typename Dtype>
+void caffe_rng_bernoulli(const int n, const Dtype p, int* r);
+
+template <typename Dtype>
+void caffe_rng_bernoulli(const int n, const Dtype p, unsigned int* r);
+
+template <typename Dtype>
+void caffe_exp(const int n, const Dtype* a, Dtype* y);
+
+template <typename Dtype>
+void caffe_log(const int n, const Dtype* a, Dtype* y);
+
+template <typename Dtype>
+void caffe_abs(const int n, const Dtype* a, Dtype* y);
+
+template <typename Dtype>
+Dtype caffe_cpu_dot(const int n, const Dtype* x, const Dtype* y);
+
+template <typename Dtype>
+Dtype caffe_cpu_strided_dot(const int n, const Dtype* x, const int incx,
+    const Dtype* y, const int incy);
+
+template <typename Dtype>
+int caffe_cpu_hamming_distance(const int n, const Dtype* x, const Dtype* y);
+
+// Returns the sum of the absolute values of the elements of vector x
+template <typename Dtype>
+Dtype caffe_cpu_asum(const int n, const Dtype* x);
+
+// the branchless, type-safe version from
+// http://stackoverflow.com/questions/1903954/is-there-a-standard-sign-function-signum-sgn-in-c-c
+template<typename Dtype>
+inline int8_t caffe_sign(Dtype val) {
+  return (Dtype(0) < val) - (val < Dtype(0));
+}
+
+// The following two macros are modifications of DEFINE_VSL_UNARY_FUNC
+//   in include/caffe/util/mkl_alternate.hpp authored by @Rowland Depp.
+// Please refer to commit 7e8ef25c7 of the boost-eigen branch.
+// Git cherry picking that commit caused a conflict hard to resolve and
+//   copying that file in convenient for code reviewing.
+// So they have to be pasted here temporarily.
+#define DEFINE_CAFFE_CPU_UNARY_FUNC(name, operation) \
+  template<typename Dtype> \
+  void caffe_cpu_##name(const int n, const Dtype* x, Dtype* y) { \
+    CHECK_GT(n, 0); CHECK(x); CHECK(y); \
+    for (int i = 0; i < n; ++i) { \
+      operation; \
+    } \
+  }
+
+// output is 1 for the positives, 0 for zero, and -1 for the negatives
+DEFINE_CAFFE_CPU_UNARY_FUNC(sign, y[i] = caffe_sign<Dtype>(x[i]));
+
+// This returns a nonzero value if the input has its sign bit set.
+// The name sngbit is meant to avoid conflicts with std::signbit in the macro.
+// The extra parens are needed because CUDA < 6.5 defines signbit as a macro,
+// and we don't want that to expand here when CUDA headers are also included.
+DEFINE_CAFFE_CPU_UNARY_FUNC(sgnbit, \
+    y[i] = static_cast<bool>((std::signbit)(x[i])));
+
+DEFINE_CAFFE_CPU_UNARY_FUNC(fabs, y[i] = std::fabs(x[i]));
+
+template <typename Dtype>
+void caffe_cpu_scale(const int n, const Dtype alpha, const Dtype *x, Dtype* y);
+
+#ifndef CPU_ONLY  // GPU
+
+// Decaf gpu gemm provides an interface that is almost the same as the cpu
+// gemm function - following the c convention and calling the fortran-order
+// gpu code under the hood.
+template <typename Dtype>
+void caffe_gpu_gemm(const CBLAS_TRANSPOSE TransA,
+    const CBLAS_TRANSPOSE TransB, const int M, const int N, const int K,
+    const Dtype alpha, const Dtype* A, const Dtype* B, const Dtype beta,
+    Dtype* C);
+
+template <typename Dtype>
+void caffe_gpu_gemv(const CBLAS_TRANSPOSE TransA, const int M, const int N,
+    const Dtype alpha, const Dtype* A, const Dtype* x, const Dtype beta,
+    Dtype* y);
+
+template <typename Dtype>
+void caffe_gpu_axpy(const int N, const Dtype alpha, const Dtype* X,
+    Dtype* Y);
+
+template <typename Dtype>
+void caffe_gpu_axpby(const int N, const Dtype alpha, const Dtype* X,
+    const Dtype beta, Dtype* Y);
+
+void caffe_gpu_memcpy(const size_t N, const void *X, void *Y);
+
+template <typename Dtype>
+void caffe_gpu_set(const int N, const Dtype alpha, Dtype *X);
+
+inline void caffe_gpu_memset(const size_t N, const int alpha, void* X) {
+#ifndef CPU_ONLY
+  CUDA_CHECK(cudaMemset(X, alpha, N));  // NOLINT(caffe/alt_fn)
+#else
+  NO_GPU;
+#endif
+}
+
+template <typename Dtype>
+void caffe_gpu_add_scalar(const int N, const Dtype alpha, Dtype *X);
+
+template <typename Dtype>
+void caffe_gpu_scal(const int N, const Dtype alpha, Dtype *X);
+
+template <typename Dtype>
+void caffe_gpu_add(const int N, const Dtype* a, const Dtype* b, Dtype* y);
+
+template <typename Dtype>
+void caffe_gpu_sub(const int N, const Dtype* a, const Dtype* b, Dtype* y);
+
+template <typename Dtype>
+void caffe_gpu_mul(const int N, const Dtype* a, const Dtype* b, Dtype* y);
+
+template <typename Dtype>
+void caffe_gpu_div(const int N, const Dtype* a, const Dtype* b, Dtype* y);
+
+template <typename Dtype>
+void caffe_gpu_abs(const int n, const Dtype* a, Dtype* y);
+
+template <typename Dtype>
+void caffe_gpu_exp(const int n, const Dtype* a, Dtype* y);
+
+template <typename Dtype>
+void caffe_gpu_log(const int n, const Dtype* a, Dtype* y);
+
+template <typename Dtype>
+void caffe_gpu_powx(const int n, const Dtype* a, const Dtype b, Dtype* y);
+
+// caffe_gpu_rng_uniform with two arguments generates integers in the range
+// [0, UINT_MAX].
+void caffe_gpu_rng_uniform(const int n, unsigned int* r);
+
+// caffe_gpu_rng_uniform with four arguments generates floats in the range
+// (a, b] (strictly greater than a, less than or equal to b) due to the
+// specification of curandGenerateUniform.  With a = 0, b = 1, just calls
+// curandGenerateUniform; with other limits will shift and scale the outputs
+// appropriately after calling curandGenerateUniform.
+template <typename Dtype>
+void caffe_gpu_rng_uniform(const int n, const Dtype a, const Dtype b, Dtype* r);
+
+template <typename Dtype>
+void caffe_gpu_rng_gaussian(const int n, const Dtype mu, const Dtype sigma,
+                            Dtype* r);
+
+template <typename Dtype>
+void caffe_gpu_rng_bernoulli(const int n, const Dtype p, int* r);
+
+template <typename Dtype>
+void caffe_gpu_dot(const int n, const Dtype* x, const Dtype* y, Dtype* out);
+
+template <typename Dtype>
+uint32_t caffe_gpu_hamming_distance(const int n, const Dtype* x,
+                                    const Dtype* y);
+
+template <typename Dtype>
+void caffe_gpu_asum(const int n, const Dtype* x, Dtype* y);
+
+template<typename Dtype>
+void caffe_gpu_sign(const int n, const Dtype* x, Dtype* y);
+
+template<typename Dtype>
+void caffe_gpu_sgnbit(const int n, const Dtype* x, Dtype* y);
+
+template <typename Dtype>
+void caffe_gpu_fabs(const int n, const Dtype* x, Dtype* y);
+
+template <typename Dtype>
+void caffe_gpu_scale(const int n, const Dtype alpha, const Dtype *x, Dtype* y);
+
+#define DEFINE_AND_INSTANTIATE_GPU_UNARY_FUNC(name, operation) \
+template<typename Dtype> \
+__global__ void name##_kernel(const int n, const Dtype* x, Dtype* y) { \
+  CUDA_KERNEL_LOOP(index, n) { \
+    operation; \
+  } \
+} \
+template <> \
+void caffe_gpu_##name<float>(const int n, const float* x, float* y) { \
+  /* NOLINT_NEXT_LINE(whitespace/operators) */ \
+  name##_kernel<float><<<CAFFE_GET_BLOCKS(n), CAFFE_CUDA_NUM_THREADS>>>( \
+      n, x, y); \
+} \
+template <> \
+void caffe_gpu_##name<double>(const int n, const double* x, double* y) { \
+  /* NOLINT_NEXT_LINE(whitespace/operators) */ \
+  name##_kernel<double><<<CAFFE_GET_BLOCKS(n), CAFFE_CUDA_NUM_THREADS>>>( \
+      n, x, y); \
+}
+
+#endif  // !CPU_ONLY
+
+}  // namespace caffe
+
+#endif  // CAFFE_UTIL_MATH_FUNCTIONS_H_
diff --git a/include/caffe/util/mkl_alternate.hpp b/include/caffe/util/mkl_alternate.hpp
new file mode 100644
index 0000000..3355b66
--- /dev/null
+++ b/include/caffe/util/mkl_alternate.hpp
@@ -0,0 +1,97 @@
+#ifndef CAFFE_UTIL_MKL_ALTERNATE_H_
+#define CAFFE_UTIL_MKL_ALTERNATE_H_
+
+#ifdef USE_MKL
+
+#include <mkl.h>
+
+#else  // If use MKL, simply include the MKL header
+
+extern "C" {
+#include <cblas.h>
+}
+#include <math.h>
+
+// Functions that caffe uses but are not present if MKL is not linked.
+
+// A simple way to define the vsl unary functions. The operation should
+// be in the form e.g. y[i] = sqrt(a[i])
+#define DEFINE_VSL_UNARY_FUNC(name, operation) \
+  template<typename Dtype> \
+  void v##name(const int n, const Dtype* a, Dtype* y) { \
+    CHECK_GT(n, 0); CHECK(a); CHECK(y); \
+    for (int i = 0; i < n; ++i) { operation; } \
+  } \
+  inline void vs##name( \
+    const int n, const float* a, float* y) { \
+    v##name<float>(n, a, y); \
+  } \
+  inline void vd##name( \
+      const int n, const double* a, double* y) { \
+    v##name<double>(n, a, y); \
+  }
+
+DEFINE_VSL_UNARY_FUNC(Sqr, y[i] = a[i] * a[i]);
+DEFINE_VSL_UNARY_FUNC(Exp, y[i] = exp(a[i]));
+DEFINE_VSL_UNARY_FUNC(Ln, y[i] = log(a[i]));
+DEFINE_VSL_UNARY_FUNC(Abs, y[i] = fabs(a[i]));
+
+// A simple way to define the vsl unary functions with singular parameter b.
+// The operation should be in the form e.g. y[i] = pow(a[i], b)
+#define DEFINE_VSL_UNARY_FUNC_WITH_PARAM(name, operation) \
+  template<typename Dtype> \
+  void v##name(const int n, const Dtype* a, const Dtype b, Dtype* y) { \
+    CHECK_GT(n, 0); CHECK(a); CHECK(y); \
+    for (int i = 0; i < n; ++i) { operation; } \
+  } \
+  inline void vs##name( \
+    const int n, const float* a, const float b, float* y) { \
+    v##name<float>(n, a, b, y); \
+  } \
+  inline void vd##name( \
+      const int n, const double* a, const float b, double* y) { \
+    v##name<double>(n, a, b, y); \
+  }
+
+DEFINE_VSL_UNARY_FUNC_WITH_PARAM(Powx, y[i] = pow(a[i], b));
+
+// A simple way to define the vsl binary functions. The operation should
+// be in the form e.g. y[i] = a[i] + b[i]
+#define DEFINE_VSL_BINARY_FUNC(name, operation) \
+  template<typename Dtype> \
+  void v##name(const int n, const Dtype* a, const Dtype* b, Dtype* y) { \
+    CHECK_GT(n, 0); CHECK(a); CHECK(b); CHECK(y); \
+    for (int i = 0; i < n; ++i) { operation; } \
+  } \
+  inline void vs##name( \
+    const int n, const float* a, const float* b, float* y) { \
+    v##name<float>(n, a, b, y); \
+  } \
+  inline void vd##name( \
+      const int n, const double* a, const double* b, double* y) { \
+    v##name<double>(n, a, b, y); \
+  }
+
+DEFINE_VSL_BINARY_FUNC(Add, y[i] = a[i] + b[i]);
+DEFINE_VSL_BINARY_FUNC(Sub, y[i] = a[i] - b[i]);
+DEFINE_VSL_BINARY_FUNC(Mul, y[i] = a[i] * b[i]);
+DEFINE_VSL_BINARY_FUNC(Div, y[i] = a[i] / b[i]);
+
+// In addition, MKL comes with an additional function axpby that is not present
+// in standard blas. We will simply use a two-step (inefficient, of course) way
+// to mimic that.
+inline void cblas_saxpby(const int N, const float alpha, const float* X,
+                         const int incX, const float beta, float* Y,
+                         const int incY) {
+  cblas_sscal(N, beta, Y, incY);
+  cblas_saxpy(N, alpha, X, incX, Y, incY);
+}
+inline void cblas_daxpby(const int N, const double alpha, const double* X,
+                         const int incX, const double beta, double* Y,
+                         const int incY) {
+  cblas_dscal(N, beta, Y, incY);
+  cblas_daxpy(N, alpha, X, incX, Y, incY);
+}
+
+#endif  // USE_MKL
+#endif  // CAFFE_UTIL_MKL_ALTERNATE_H_
diff --git a/include/caffe/util/rng.hpp b/include/caffe/util/rng.hpp
new file mode 100644
index 0000000..8f1cf0d
--- /dev/null
+++ b/include/caffe/util/rng.hpp
@@ -0,0 +1,43 @@
+#ifndef CAFFE_RNG_CPP_HPP_
+#define CAFFE_RNG_CPP_HPP_
+
+#include <algorithm>
+#include <iterator>
+
+#include "boost/random/mersenne_twister.hpp"
+#include "boost/random/uniform_int.hpp"
+
+#include "caffe/common.hpp"
+
+namespace caffe {
+
+typedef boost::mt19937 rng_t;
+
+inline rng_t* caffe_rng() {
+  return static_cast<caffe::rng_t*>(Caffe::rng_stream().generator());
+}
+
+// Fisher–Yates algorithm
+template <class RandomAccessIterator, class RandomGenerator>
+inline void shuffle(RandomAccessIterator begin, RandomAccessIterator end,
+                    RandomGenerator* gen) {
+  typedef typename std::iterator_traits<RandomAccessIterator>::difference_type
+      difference_type;
+  typedef typename boost::uniform_int<difference_type> dist_type;
+
+  difference_type length = std::distance(begin, end);
+  if (length <= 0) return;
+
+  for (difference_type i = length - 1; i > 0; --i) {
+    dist_type dist(0, i);
+    std::iter_swap(begin + i, begin + dist(*gen));
+  }
+}
+
+template <class RandomAccessIterator>
+inline void shuffle(RandomAccessIterator begin, RandomAccessIterator end) {
+  shuffle(begin, end, caffe_rng());
+}
+}  // namespace caffe
+
+#endif  // CAFFE_RNG_HPP_
diff --git a/include/caffe/util/upgrade_proto.hpp b/include/caffe/util/upgrade_proto.hpp
new file mode 100644
index 0000000..c1f21a0
--- /dev/null
+++ b/include/caffe/util/upgrade_proto.hpp
@@ -0,0 +1,64 @@
+#ifndef CAFFE_UTIL_UPGRADE_PROTO_H_
+#define CAFFE_UTIL_UPGRADE_PROTO_H_
+
+#include <string>
+
+#include "caffe/proto/caffe.pb.h"
+
+namespace caffe {
+
+// Return true iff the net is not the current version.
+bool NetNeedsUpgrade(const NetParameter& net_param);
+
+// Return true iff any layer contains parameters specified using
+// deprecated V0LayerParameter.
+bool NetNeedsV0ToV1Upgrade(const NetParameter& net_param);
+
+// Perform all necessary transformations to upgrade a V0NetParameter into a
+// NetParameter (including upgrading padding layers and LayerParameters).
+bool UpgradeV0Net(const NetParameter& v0_net_param, NetParameter* net_param);
+
+// Upgrade NetParameter with padding layers to pad-aware conv layers.
+// For any padding layer, remove it and put its pad parameter in any layers
+// taking its top blob as input.
+// Error if any of these above layers are not-conv layers.
+void UpgradeV0PaddingLayers(const NetParameter& param,
+                            NetParameter* param_upgraded_pad);
+
+// Upgrade a single V0LayerConnection to the V1LayerParameter format.
+bool UpgradeV0LayerParameter(const V1LayerParameter& v0_layer_connection,
+                             V1LayerParameter* layer_param);
+
+V1LayerParameter_LayerType UpgradeV0LayerType(const string& type);
+
+// Return true iff any layer contains deprecated data transformation parameters.
+bool NetNeedsDataUpgrade(const NetParameter& net_param);
+
+// Perform all necessary transformations to upgrade old transformation fields
+// into a TransformationParameter.
+void UpgradeNetDataTransformation(NetParameter* net_param);
+
+// Return true iff the Net contains any layers specified as V1LayerParameters.
+bool NetNeedsV1ToV2Upgrade(const NetParameter& net_param);
+
+// Perform all necessary transformations to upgrade a NetParameter with
+// deprecated V1LayerParameters.
+bool UpgradeV1Net(const NetParameter& v1_net_param, NetParameter* net_param);
+
+bool UpgradeV1LayerParameter(const V1LayerParameter& v1_layer_param,
+                             LayerParameter* layer_param);
+
+const char* UpgradeV1LayerType(const V1LayerParameter_LayerType type);
+
+// Check for deprecations and upgrade the NetParameter as needed.
+bool UpgradeNetAsNeeded(const string& param_file, NetParameter* param);
+
+// Read parameters from a file into a NetParameter proto message.
+void ReadNetParamsFromTextFileOrDie(const string& param_file,
+                                    NetParameter* param);
+void ReadNetParamsFromBinaryFileOrDie(const string& param_file,
+                                      NetParameter* param);
+
+}  // namespace caffe
+
+#endif   // CAFFE_UTIL_UPGRADE_PROTO_H_
diff --git a/include/caffe/vision_layers.hpp b/include/caffe/vision_layers.hpp
new file mode 100644
index 0000000..a6bd86a
--- /dev/null
+++ b/include/caffe/vision_layers.hpp
@@ -0,0 +1,524 @@
+#ifndef CAFFE_VISION_LAYERS_HPP_
+#define CAFFE_VISION_LAYERS_HPP_
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/common_layers.hpp"
+#include "caffe/data_layers.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/loss_layers.hpp"
+#include "caffe/neuron_layers.hpp"
+#include "caffe/proto/caffe.pb.h"
+
+namespace caffe {
+
+/**
+ * @brief Abstract base class that factors out the BLAS code common to
+ *        ConvolutionLayer and DeconvolutionLayer.
+ */
+template <typename Dtype>
+class BaseConvolutionLayer : public Layer<Dtype> {
+ public:
+  explicit BaseConvolutionLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline int MinBottomBlobs() const { return 1; }
+  virtual inline int MinTopBlobs() const { return 1; }
+  virtual inline bool EqualNumBottomTopBlobs() const { return true; }
+
+ protected:
+  // Helper functions that abstract away the column buffer and gemm arguments.
+  // The last argument in forward_cpu_gemm is so that we can skip the im2col if
+  // we just called weight_cpu_gemm with the same input.
+  void forward_cpu_gemm(const Dtype* input, const Dtype* weights,
+      Dtype* output, bool skip_im2col = false);
+  void forward_cpu_bias(Dtype* output, const Dtype* bias);
+  void backward_cpu_gemm(const Dtype* input, const Dtype* weights,
+      Dtype* output);
+  void weight_cpu_gemm(const Dtype* input, const Dtype* output, Dtype*
+      weights);
+  void backward_cpu_bias(Dtype* bias, const Dtype* input);
+
+#ifndef CPU_ONLY
+  void forward_gpu_gemm(const Dtype* col_input, const Dtype* weights,
+      Dtype* output, bool skip_im2col = false);
+  void forward_gpu_bias(Dtype* output, const Dtype* bias);
+  void backward_gpu_gemm(const Dtype* input, const Dtype* weights,
+      Dtype* col_output);
+  void weight_gpu_gemm(const Dtype* col_input, const Dtype* output, Dtype*
+      weights);
+  void backward_gpu_bias(Dtype* bias, const Dtype* input);
+#endif
+
+  // reverse_dimensions should return true iff we are implementing deconv, so
+  // that conv helpers know which dimensions are which.
+  virtual bool reverse_dimensions() = 0;
+  // Compute height_out_ and width_out_ from other parameters.
+  virtual void compute_output_shape() = 0;
+
+  int kernel_h_, kernel_w_;
+  int stride_h_, stride_w_;
+  int num_;
+  int channels_;
+  int pad_h_, pad_w_;
+  int height_, width_;
+  int group_;
+  int num_output_;
+  int height_out_, width_out_;
+  bool bias_term_;
+  bool is_1x1_;
+
+ private:
+  // wrap im2col/col2im so we don't have to remember the (long) argument lists
+  inline void conv_im2col_cpu(const Dtype* data, Dtype* col_buff) {
+    im2col_cpu(data, conv_in_channels_, conv_in_height_, conv_in_width_,
+        kernel_h_, kernel_w_, pad_h_, pad_w_, stride_h_, stride_w_, col_buff);
+  }
+  inline void conv_col2im_cpu(const Dtype* col_buff, Dtype* data) {
+    col2im_cpu(col_buff, conv_in_channels_, conv_in_height_, conv_in_width_,
+        kernel_h_, kernel_w_, pad_h_, pad_w_, stride_h_, stride_w_, data);
+  }
+#ifndef CPU_ONLY
+  inline void conv_im2col_gpu(const Dtype* data, Dtype* col_buff) {
+    im2col_gpu(data, conv_in_channels_, conv_in_height_, conv_in_width_,
+        kernel_h_, kernel_w_, pad_h_, pad_w_, stride_h_, stride_w_, col_buff);
+  }
+  inline void conv_col2im_gpu(const Dtype* col_buff, Dtype* data) {
+    col2im_gpu(col_buff, conv_in_channels_, conv_in_height_, conv_in_width_,
+        kernel_h_, kernel_w_, pad_h_, pad_w_, stride_h_, stride_w_, data);
+  }
+#endif
+
+  int conv_out_channels_;
+  int conv_in_channels_;
+  int conv_out_spatial_dim_;
+  int conv_in_height_;
+  int conv_in_width_;
+  int kernel_dim_;
+  int weight_offset_;
+  int col_offset_;
+  int output_offset_;
+
+  Blob<Dtype> col_buffer_;
+  Blob<Dtype> bias_multiplier_;
+};
+
+/**
+ * @brief Convolves the input image with a bank of learned filters,
+ *        and (optionally) adds biases.
+ *
+ *   Caffe convolves by reduction to matrix multiplication. This achieves
+ *   high-throughput and generality of input and filter dimensions but comes at
+ *   the cost of memory for matrices. This makes use of efficiency in BLAS.
+ *
+ *   The input is "im2col" transformed to a channel K' x H x W data matrix
+ *   for multiplication with the N x K' x H x W filter matrix to yield a
+ *   N' x H x W output matrix that is then "col2im" restored. K' is the
+ *   input channel * kernel height * kernel width dimension of the unrolled
+ *   inputs so that the im2col matrix has a column for each input region to
+ *   be filtered. col2im restores the output spatial structure by rolling up
+ *   the output channel N' columns of the output matrix.
+ */
+template <typename Dtype>
+class ConvolutionLayer : public BaseConvolutionLayer<Dtype> {
+ public:
+  /**
+   * @param param provides ConvolutionParameter convolution_param,
+   *    with ConvolutionLayer options:
+   *  - num_output. The number of filters.
+   *  - kernel_size / kernel_h / kernel_w. The filter dimensions, given by
+   *  kernel_size for square filters or kernel_h and kernel_w for rectangular
+   *  filters.
+   *  - stride / stride_h / stride_w (\b optional, default 1). The filter
+   *  stride, given by stride_size for equal dimensions or stride_h and stride_w
+   *  for different strides. By default the convolution is dense with stride 1.
+   *  - pad / pad_h / pad_w (\b optional, default 0). The zero-padding for
+   *  convolution, given by pad for equal dimensions or pad_h and pad_w for
+   *  different padding. Input padding is computed implicitly instead of
+   *  actually padding.
+   *  - group (\b optional, default 1). The number of filter groups. Group
+   *  convolution is a method for reducing parameterization by selectively
+   *  connecting input and output channels. The input and output channel dimensions must be divisible
+   *  by the number of groups. For group @f$ \geq 1 @f$, the
+   *  convolutional filters' input and output channels are separated s.t. each
+   *  group takes 1 / group of the input channels and makes 1 / group of the
+   *  output channels. Concretely 4 input channels, 8 output channels, and
+   *  2 groups separate input channels 1-2 and output channels 1-4 into the
+   *  first group and input channels 3-4 and output channels 5-8 into the second
+   *  group.
+   *  - bias_term (\b optional, default true). Whether to have a bias.
+   *  - engine: convolution has CAFFE (matrix multiplication) and CUDNN (library
+   *    kernels + stream parallelism) engines.
+   */
+  explicit ConvolutionLayer(const LayerParameter& param)
+      : BaseConvolutionLayer<Dtype>(param) {}
+
+  virtual inline const char* type() const { return "Convolution"; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual inline bool reverse_dimensions() { return false; }
+  virtual void compute_output_shape();
+};
+
+/**
+ * @brief Convolve the input with a bank of learned filters, and (optionally)
+ *        add biases, treating filters and convolution parameters in the
+ *        opposite sense as ConvolutionLayer.
+ *
+ *   ConvolutionLayer computes each output value by dotting an input window with
+ *   a filter; DeconvolutionLayer multiplies each input value by a filter
+ *   elementwise, and sums over the resulting output windows. In other words,
+ *   DeconvolutionLayer is ConvolutionLayer with the forward and backward passes
+ *   reversed. DeconvolutionLayer reuses ConvolutionParameter for its
+ *   parameters, but they take the opposite sense as in ConvolutionLayer (so
+ *   padding is removed from the output rather than added to the input, and
+ *   stride results in upsampling rather than downsampling).
+ */
+template <typename Dtype>
+class DeconvolutionLayer : public BaseConvolutionLayer<Dtype> {
+ public:
+  explicit DeconvolutionLayer(const LayerParameter& param)
+      : BaseConvolutionLayer<Dtype>(param) {}
+
+  virtual inline const char* type() const { return "Deconvolution"; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual inline bool reverse_dimensions() { return true; }
+  virtual void compute_output_shape();
+};
+
+#ifdef USE_CUDNN
+/*
+ * @brief cuDNN implementation of ConvolutionLayer.
+ *        Fallback to ConvolutionLayer for CPU mode.
+ *
+ * cuDNN accelerates convolution through forward kernels for filtering and bias
+ * plus backward kernels for the gradient w.r.t. the filters, biases, and
+ * inputs. Caffe + cuDNN further speeds up the computation through forward
+ * parallelism across groups and backward parallelism across gradients.
+ *
+ * The CUDNN engine does not have memory overhead for matrix buffers. For many
+ * input and filter regimes the CUDNN engine is faster than the CAFFE engine,
+ * but for fully-convolutional models and large inputs the CAFFE engine can be
+ * faster as long as it fits in memory.
+*/
+template <typename Dtype>
+class CuDNNConvolutionLayer : public ConvolutionLayer<Dtype> {
+ public:
+  explicit CuDNNConvolutionLayer(const LayerParameter& param)
+      : ConvolutionLayer<Dtype>(param), handles_setup_(false) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual ~CuDNNConvolutionLayer();
+
+ protected:
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  bool handles_setup_;
+  cudnnHandle_t* handle_;
+  cudaStream_t*  stream_;
+  vector<cudnnTensorDescriptor_t> bottom_descs_, top_descs_;
+  cudnnTensorDescriptor_t    bias_desc_;
+  cudnnFilterDescriptor_t      filter_desc_;
+  vector<cudnnConvolutionDescriptor_t> conv_descs_;
+  int bottom_offset_, top_offset_, weight_offset_, bias_offset_;
+  size_t workspaceSizeInBytes;
+  void *workspace;
+};
+#endif
+
+/**
+ * @brief A helper for image operations that rearranges image regions into
+ *        column vectors.  Used by ConvolutionLayer to perform convolution
+ *        by matrix multiplication.
+ *
+ * TODO(dox): thorough documentation for Forward, Backward, and proto params.
+ */
+template <typename Dtype>
+class Im2colLayer : public Layer<Dtype> {
+ public:
+  explicit Im2colLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Im2col"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  int kernel_h_, kernel_w_;
+  int stride_h_, stride_w_;
+  int channels_;
+  int height_, width_;
+  int pad_h_, pad_w_;
+};
+
+// Forward declare PoolingLayer and SplitLayer for use in LRNLayer.
+template <typename Dtype> class PoolingLayer;
+template <typename Dtype> class SplitLayer;
+
+/**
+ * @brief Normalize the input in a local region across or within feature maps.
+ *
+ * TODO(dox): thorough documentation for Forward, Backward, and proto params.
+ */
+template <typename Dtype>
+class LRNLayer : public Layer<Dtype> {
+ public:
+  explicit LRNLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "LRN"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  virtual void CrossChannelForward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void CrossChannelForward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void WithinChannelForward(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void CrossChannelBackward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void CrossChannelBackward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void WithinChannelBackward(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  int size_;
+  int pre_pad_;
+  Dtype alpha_;
+  Dtype beta_;
+  Dtype k_;
+  int num_;
+  int channels_;
+  int height_;
+  int width_;
+
+  // Fields used for normalization ACROSS_CHANNELS
+  // scale_ stores the intermediate summing results
+  Blob<Dtype> scale_;
+
+  // Fields used for normalization WITHIN_CHANNEL
+  shared_ptr<SplitLayer<Dtype> > split_layer_;
+  vector<Blob<Dtype>*> split_top_vec_;
+  shared_ptr<PowerLayer<Dtype> > square_layer_;
+  Blob<Dtype> square_input_;
+  Blob<Dtype> square_output_;
+  vector<Blob<Dtype>*> square_bottom_vec_;
+  vector<Blob<Dtype>*> square_top_vec_;
+  shared_ptr<PoolingLayer<Dtype> > pool_layer_;
+  Blob<Dtype> pool_output_;
+  vector<Blob<Dtype>*> pool_top_vec_;
+  shared_ptr<PowerLayer<Dtype> > power_layer_;
+  Blob<Dtype> power_output_;
+  vector<Blob<Dtype>*> power_top_vec_;
+  shared_ptr<EltwiseLayer<Dtype> > product_layer_;
+  Blob<Dtype> product_input_;
+  vector<Blob<Dtype>*> product_bottom_vec_;
+};
+
+
+/**
+ * @brief Pools the input image by taking the max, average, etc. within regions.
+ *
+ * TODO(dox): thorough documentation for Forward, Backward, and proto params.
+ */
+template <typename Dtype>
+class PoolingLayer : public Layer<Dtype> {
+ public:
+  explicit PoolingLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "Pooling"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int MinTopBlobs() const { return 1; }
+  // MAX POOL layers can output an extra top blob for the mask;
+  // others can only output the pooled inputs.
+  virtual inline int MaxTopBlobs() const {
+    return (this->layer_param_.pooling_param().pool() ==
+            PoolingParameter_PoolMethod_MAX) ? 2 : 1;
+  }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  int kernel_h_, kernel_w_;
+  int stride_h_, stride_w_;
+  int pad_h_, pad_w_;
+  int channels_;
+  int height_, width_;
+  int pooled_height_, pooled_width_;
+  bool global_pooling_;
+  Blob<Dtype> rand_idx_;
+  Blob<int> max_idx_;
+};
+
+#ifdef USE_CUDNN
+/*
+ * @brief cuDNN implementation of PoolingLayer.
+ *        Fallback to PoolingLayer for CPU mode.
+*/
+template <typename Dtype>
+class CuDNNPoolingLayer : public PoolingLayer<Dtype> {
+ public:
+  explicit CuDNNPoolingLayer(const LayerParameter& param)
+      : PoolingLayer<Dtype>(param), handles_setup_(false) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual ~CuDNNPoolingLayer();
+  // Currently, cuDNN does not support the extra top blob.
+  virtual inline int MinTopBlobs() const { return -1; }
+  virtual inline int ExactNumTopBlobs() const { return 1; }
+
+ protected:
+  virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+
+  bool handles_setup_;
+  cudnnHandle_t             handle_;
+  cudnnTensorDescriptor_t bottom_desc_, top_desc_;
+  cudnnPoolingDescriptor_t  pooling_desc_;
+  cudnnPoolingMode_t        mode_;
+};
+#endif
+
+/**
+ * @brief Does spatial pyramid pooling on the input image
+ *        by taking the max, average, etc. within regions
+ *        so that the result vector of different sized
+ *        images are of the same size.
+ */
+template <typename Dtype>
+class SPPLayer : public Layer<Dtype> {
+ public:
+  explicit SPPLayer(const LayerParameter& param)
+      : Layer<Dtype>(param) {}
+  virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+
+  virtual inline const char* type() const { return "SPP"; }
+  virtual inline int ExactNumBottomBlobs() const { return 1; }
+  virtual inline int MinTopBlobs() const { return 1; }
+  // MAX POOL layers can output an extra top blob for the mask;
+  // others can only output the pooled inputs.
+  virtual inline int MaxTopBlobs() const {
+    return (this->layer_param_.pooling_param().pool() ==
+            PoolingParameter_PoolMethod_MAX) ? 2 : 1;
+  }
+
+ protected:
+  virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top);
+  virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
+  // calculates the kernel and stride dimensions for the pooling layer,
+  // returns a correctly configured LayerParameter for a PoolingLayer
+  virtual LayerParameter GetPoolingParam(const int pyramid_level,
+      const int bottom_h, const int bottom_w, const SPPParameter spp_param);
+
+  int pyramid_height_;
+  int bottom_h_, bottom_w_;
+  int channels_;
+  int kernel_h_, kernel_w_;
+  int pad_h_, pad_w_;
+
+  /// the internal Split layer that feeds the pooling layers
+  shared_ptr<SplitLayer<Dtype> > split_layer_;
+  /// top vector holder used in call to the underlying SplitLayer::Forward
+  vector<Blob<Dtype>*> split_top_vec_;
+  /// bottom vector holder used in call to the underlying PoolingLayer::Forward
+  vector<vector<Blob<Dtype>*>*> pooling_bottom_vecs_;
+  /// the internal Pooling layers of different kernel sizes
+  vector<shared_ptr<PoolingLayer<Dtype> > > pooling_layers_;
+  /// top vector holders used in call to the underlying PoolingLayer::Forward
+  vector<vector<Blob<Dtype>*>*> pooling_top_vecs_;
+  /// pooling_outputs stores the outputs of the PoolingLayers
+  vector<Blob<Dtype>*> pooling_outputs_;
+  /// the internal Flatten layers that the Pooling layers feed into
+  vector<FlattenLayer<Dtype>*> flatten_layers_;
+  /// top vector holders used in call to the underlying FlattenLayer::Forward
+  vector<vector<Blob<Dtype>*>*> flatten_top_vecs_;
+  /// flatten_outputs stores the outputs of the FlattenLayers
+  vector<Blob<Dtype>*> flatten_outputs_;
+  /// bottom vector holder used in call to the underlying ConcatLayer::Forward
+  vector<Blob<Dtype>*> concat_bottom_vec_;
+  /// the internal Concat layers that the Flatten layers feed into
+  shared_ptr<ConcatLayer<Dtype> > concat_layer_;
+};
+
+}  // namespace caffe
+
+#endif  // CAFFE_VISION_LAYERS_HPP_
diff --git a/matlab/+caffe/+test/test_net.m b/matlab/+caffe/+test/test_net.m
new file mode 100644
index 0000000..3dabe84
--- /dev/null
+++ b/matlab/+caffe/+test/test_net.m
@@ -0,0 +1,96 @@
+classdef test_net < matlab.unittest.TestCase
+  
+  properties
+    num_output
+    model_file
+    net
+  end
+  
+  methods (Static)
+    function model_file = simple_net_file(num_output)
+      model_file = tempname();
+      fid = fopen(model_file, 'w');
+      fprintf(fid, [ ...
+        'name: "testnet" force_backward: true\n' ...
+        'layer { type: "DummyData" name: "data" top: "data" top: "label"\n' ...
+        'dummy_data_param { num: 5 channels: 2 height: 3 width: 4\n' ...
+        '    num: 5 channels: 1 height: 1 width: 1\n' ...
+        '    data_filler { type: "gaussian" std: 1 }\n' ...
+        '    data_filler { type: "constant" } } }\n' ...
+        'layer { type: "Convolution" name: "conv" bottom: "data" top: "conv"\n' ...
+        '  convolution_param { num_output: 11 kernel_size: 2 pad: 3\n' ...
+        '    weight_filler { type: "gaussian" std: 1 }\n' ...
+        '    bias_filler { type: "constant" value: 2 } }\n' ...
+        '    param { decay_mult: 1 } param { decay_mult: 0 }\n' ...
+        '    }\n' ...
+        'layer { type: "InnerProduct" name: "ip" bottom: "conv" top: "ip"\n' ...
+        '  inner_product_param { num_output: ' num2str(num_output) ...
+        '    weight_filler { type: "gaussian" std: 2.5 }\n' ...
+        '    bias_filler { type: "constant" value: -3 } } }\n' ...
+        'layer { type: "SoftmaxWithLoss" name: "loss" bottom: "ip" bottom: "label"\n' ...
+        '  top: "loss" }' ]);
+      fclose(fid);
+    end
+  end
+  methods
+    function self = test_net()
+      self.num_output = 13;
+      self.model_file = caffe.test.test_net.simple_net_file(self.num_output);
+      self.net = caffe.Net(self.model_file, 'train');
+      % also make sure get_solver runs
+      caffe.get_net(self.model_file, 'train');
+      
+      % fill in valid labels
+      self.net.blobs('label').set_data(randi( ...
+        self.num_output - 1, self.net.blobs('label').shape));
+      
+      delete(self.model_file);
+    end
+  end
+  methods (Test)
+    function self = test_blob(self)
+      self.net.blobs('data').set_data(10 * ones(self.net.blobs('data').shape));
+      self.verifyEqual(self.net.blobs('data').get_data(), ...
+        10 * ones(self.net.blobs('data').shape, 'single'));
+      self.net.blobs('data').set_diff(-2 * ones(self.net.blobs('data').shape));
+      self.verifyEqual(self.net.blobs('data').get_diff(), ...
+        -2 * ones(self.net.blobs('data').shape, 'single'));
+      original_shape = self.net.blobs('data').shape;
+      self.net.blobs('data').reshape([6 5 4 3 2 1]);
+      self.verifyEqual(self.net.blobs('data').shape, [6 5 4 3 2 1]);
+      self.net.blobs('data').reshape(original_shape);
+      self.net.reshape();
+    end
+    function self = test_layer(self)
+      self.verifyEqual(self.net.params('conv', 1).shape, [2 2 2 11]);
+      self.verifyEqual(self.net.layers('conv').params(2).shape, 11);
+      self.verifyEqual(self.net.layers('conv').type(), 'Convolution');
+    end
+    function test_forward_backward(self)
+      self.net.forward_prefilled();
+      self.net.backward_prefilled();
+    end
+    function test_inputs_outputs(self)
+      self.verifyEqual(self.net.inputs, cell(0, 1))
+      self.verifyEqual(self.net.outputs, {'loss'});
+    end
+    function test_save_and_read(self)
+      weights_file = tempname();
+      self.net.save(weights_file);
+      model_file2 = caffe.test.test_net.simple_net_file(self.num_output);
+      net2 = caffe.Net(model_file2, 'train');
+      net2.copy_from(weights_file);
+      net3 = caffe.Net(model_file2, weights_file, 'train');
+      delete(model_file2);
+      delete(weights_file);
+      for l = 1:length(self.net.layer_vec)
+        for i = 1:length(self.net.layer_vec(l).params)
+          self.verifyEqual(self.net.layer_vec(l).params(i).get_data(), ...
+            net2.layer_vec(l).params(i).get_data());
+          self.verifyEqual(self.net.layer_vec(l).params(i).get_data(), ...
+            net3.layer_vec(l).params(i).get_data());
+        end
+      end
+    end
+  end
+end
diff --git a/matlab/+caffe/+test/test_solver.m b/matlab/+caffe/+test/test_solver.m
new file mode 100644
index 0000000..739258b
--- /dev/null
+++ b/matlab/+caffe/+test/test_solver.m
@@ -0,0 +1,45 @@
+classdef test_solver < matlab.unittest.TestCase
+  
+  properties
+    num_output
+    solver
+  end
+  
+  methods
+    function self = test_solver()
+      self.num_output = 13;
+      model_file = caffe.test.test_net.simple_net_file(self.num_output);
+      solver_file = tempname();
+      
+      fid = fopen(solver_file, 'w');
+      fprintf(fid, [ ...
+        'net: "'  model_file  '"\n' ...
+        'test_iter: 10 test_interval: 10 base_lr: 0.01 momentum: 0.9\n' ...
+        'weight_decay: 0.0005 lr_policy: "inv" gamma: 0.0001 power: 0.75\n' ...
+        'display: 100 max_iter: 100 snapshot_after_train: false\n' ]);
+      fclose(fid);
+      
+      self.solver = caffe.Solver(solver_file);
+      % also make sure get_solver runs
+      caffe.get_solver(solver_file);
+      caffe.set_mode_cpu();
+      % fill in valid labels
+      self.solver.net.blobs('label').set_data(randi( ...
+        self.num_output - 1, self.solver.net.blobs('label').shape));
+      self.solver.test_nets(1).blobs('label').set_data(randi( ...
+        self.num_output - 1, self.solver.test_nets(1).blobs('label').shape));
+      
+      delete(solver_file);
+      delete(model_file);
+    end
+  end
+  methods (Test)
+    function test_solve(self)
+      self.verifyEqual(self.solver.iter(), 0)
+      self.solver.step(30);
+      self.verifyEqual(self.solver.iter(), 30)
+      self.solver.solve()
+      self.verifyEqual(self.solver.iter(), 100)
+    end
+  end
+end
diff --git a/matlab/+caffe/Blob.m b/matlab/+caffe/Blob.m
new file mode 100644
index 0000000..e39f7ee
--- /dev/null
+++ b/matlab/+caffe/Blob.m
@@ -0,0 +1,78 @@
+classdef Blob < handle
+  % Wrapper class of caffe::Blob in matlab
+  
+  properties (Access = private)
+    hBlob_self
+  end
+  
+  methods
+    function self = Blob(hBlob_blob)
+      CHECK(is_valid_handle(hBlob_blob), 'invalid Blob handle');
+      
+      % setup self handle
+      self.hBlob_self = hBlob_blob;
+    end
+    function shape = shape(self)
+      shape = caffe_('blob_get_shape', self.hBlob_self);
+    end
+    function reshape(self, shape)
+      shape = self.check_and_preprocess_shape(shape);
+      caffe_('blob_reshape', self.hBlob_self, shape);
+    end
+    function data = get_data(self)
+      data = caffe_('blob_get_data', self.hBlob_self);
+    end
+    function set_data(self, data)
+      data = self.check_and_preprocess_data(data);
+      caffe_('blob_set_data', self.hBlob_self, data);
+    end
+    function diff = get_diff(self)
+      diff = caffe_('blob_get_diff', self.hBlob_self);
+    end
+    function set_diff(self, diff)
+      diff = self.check_and_preprocess_data(diff);
+      caffe_('blob_set_diff', self.hBlob_self, diff);
+    end
+  end
+  
+  methods (Access = private)
+    function shape = check_and_preprocess_shape(~, shape)
+      CHECK(isempty(shape) || (isnumeric(shape) && isrow(shape)), ...
+        'shape must be a integer row vector');
+      shape = double(shape);
+    end
+    function data = check_and_preprocess_data(self, data)
+      CHECK(isnumeric(data), 'data or diff must be numeric types');
+      self.check_data_size_matches(data);
+      if ~isa(data, 'single')
+        data = single(data);
+      end
+    end
+    function check_data_size_matches(self, data)
+      % check whether size of data matches shape of this blob
+      % note: matlab arrays always have at least 2 dimensions. To compare
+      % shape between size of data and shape of this blob, extend shape of
+      % this blob to have at least 2 dimensions
+      self_shape_extended = self.shape;
+      if isempty(self_shape_extended)
+        % target blob is a scalar (0 dim)
+        self_shape_extended = [1, 1];
+      elseif isscalar(self_shape_extended)
+        % target blob is a vector (1 dim)
+        self_shape_extended = [self_shape_extended, 1];
+      end
+      % Also, matlab cannot have tailing dimension 1 for ndim > 2, so you
+      % cannot create 20 x 10 x 1 x 1 array in matlab as it becomes 20 x 10
+      % Extend matlab arrays to have tailing dimension 1 during shape match
+      data_size_extended = ...
+        [size(data), ones(1, length(self_shape_extended) - ndims(data))];
+      is_matched = ...
+        (length(self_shape_extended) == length(data_size_extended)) ...
+        && all(self_shape_extended == data_size_extended);
+      CHECK(is_matched, ...
+        sprintf('%s, input data/diff size: [ %s] vs target blob shape: [ %s]', ...
+        'input data/diff size does not match target blob shape', ...
+        sprintf('%d ', data_size_extended), sprintf('%d ', self_shape_extended)));
+    end
+  end
+end
diff --git a/matlab/+caffe/Layer.m b/matlab/+caffe/Layer.m
new file mode 100644
index 0000000..4c20231
--- /dev/null
+++ b/matlab/+caffe/Layer.m
@@ -0,0 +1,32 @@
+classdef Layer < handle
+  % Wrapper class of caffe::Layer in matlab
+  
+  properties (Access = private)
+    hLayer_self
+    attributes
+    % attributes fields:
+    %     hBlob_blobs
+  end
+  properties (SetAccess = private)
+    params
+  end
+  
+  methods
+    function self = Layer(hLayer_layer)
+      CHECK(is_valid_handle(hLayer_layer), 'invalid Layer handle');
+      
+      % setup self handle and attributes
+      self.hLayer_self = hLayer_layer;
+      self.attributes = caffe_('layer_get_attr', self.hLayer_self);
+      
+      % setup weights
+      self.params = caffe.Blob.empty();
+      for n = 1:length(self.attributes.hBlob_blobs)
+        self.params(n) = caffe.Blob(self.attributes.hBlob_blobs(n));
+      end
+    end
+    function layer_type = type(self)
+      layer_type = caffe_('layer_get_type', self.hLayer_self);
+    end
+  end
+end
diff --git a/matlab/+caffe/Net.m b/matlab/+caffe/Net.m
new file mode 100644
index 0000000..e6295bb
--- /dev/null
+++ b/matlab/+caffe/Net.m
@@ -0,0 +1,133 @@
+classdef Net < handle
+  % Wrapper class of caffe::Net in matlab
+  
+  properties (Access = private)
+    hNet_self
+    attributes
+    % attribute fields
+    %     hLayer_layers
+    %     hBlob_blobs
+    %     input_blob_indices
+    %     output_blob_indices
+    %     layer_names
+    %     blob_names
+  end
+  properties (SetAccess = private)
+    layer_vec
+    blob_vec
+    inputs
+    outputs
+    name2layer_index
+    name2blob_index
+    layer_names
+    blob_names
+  end
+  
+  methods
+    function self = Net(varargin)
+      % decide whether to construct a net from model_file or handle
+      if ~(nargin == 1 && isstruct(varargin{1}))
+        % construct a net from model_file
+        self = caffe.get_net(varargin{:});
+        return
+      end
+      % construct a net from handle
+      hNet_net = varargin{1};
+      CHECK(is_valid_handle(hNet_net), 'invalid Net handle');
+      
+      % setup self handle and attributes
+      self.hNet_self = hNet_net;
+      self.attributes = caffe_('net_get_attr', self.hNet_self);
+      
+      % setup layer_vec
+      self.layer_vec = caffe.Layer.empty();
+      for n = 1:length(self.attributes.hLayer_layers)
+        self.layer_vec(n) = caffe.Layer(self.attributes.hLayer_layers(n));
+      end
+      
+      % setup blob_vec
+      self.blob_vec = caffe.Blob.empty();
+      for n = 1:length(self.attributes.hBlob_blobs);
+        self.blob_vec(n) = caffe.Blob(self.attributes.hBlob_blobs(n));
+      end
+      
+      % setup input and output blob and their names
+      % note: add 1 to indices as matlab is 1-indexed while C++ is 0-indexed
+      self.inputs = ...
+        self.attributes.blob_names(self.attributes.input_blob_indices + 1);
+      self.outputs = ...
+        self.attributes.blob_names(self.attributes.output_blob_indices + 1);
+      
+      % create map objects to map from name to layers and blobs
+      self.name2layer_index = containers.Map(self.attributes.layer_names, ...
+        1:length(self.attributes.layer_names));
+      self.name2blob_index = containers.Map(self.attributes.blob_names, ...
+        1:length(self.attributes.blob_names));
+      
+      % expose layer_names and blob_names for public read access
+      self.layer_names = self.attributes.layer_names;
+      self.blob_names = self.attributes.blob_names;
+    end
+    function layer = layers(self, layer_name)
+      CHECK(ischar(layer_name), 'layer_name must be a string');
+      layer = self.layer_vec(self.name2layer_index(layer_name));
+    end
+    function blob = blobs(self, blob_name)
+      CHECK(ischar(blob_name), 'blob_name must be a string');
+      blob = self.blob_vec(self.name2blob_index(blob_name));
+    end
+    function blob = params(self, layer_name, blob_index)
+      CHECK(ischar(layer_name), 'layer_name must be a string');
+      CHECK(isscalar(blob_index), 'blob_index must be a scalar');
+      blob = self.layer_vec(self.name2layer_index(layer_name)).params(blob_index);
+    end
+    function forward_prefilled(self)
+      caffe_('net_forward', self.hNet_self);
+    end
+    function backward_prefilled(self)
+      caffe_('net_backward', self.hNet_self);
+    end
+    function res = forward(self, input_data)
+      CHECK(iscell(input_data), 'input_data must be a cell array');
+      CHECK(length(input_data) == length(self.inputs), ...
+        'input data cell length must match input blob number');
+      % copy data to input blobs
+      for n = 1:length(self.inputs)
+        self.blobs(self.inputs{n}).set_data(input_data{n});
+      end
+      self.forward_prefilled();
+      % retrieve data from output blobs
+      res = cell(length(self.outputs), 1);
+      for n = 1:length(self.outputs)
+        res{n} = self.blobs(self.outputs{n}).get_data();
+      end
+    end
+    function res = backward(self, output_diff)
+      CHECK(iscell(output_diff), 'output_diff must be a cell array');
+      CHECK(length(output_diff) == length(self.outputs), ...
+        'output diff cell length must match output blob number');
+      % copy diff to output blobs
+      for n = 1:length(self.outputs)
+        self.blobs(self.outputs{n}).set_diff(output_diff{n});
+      end
+      self.backward_prefilled();
+      % retrieve diff from input blobs
+      res = cell(length(self.inputs), 1);
+      for n = 1:length(self.inputs)
+        res{n} = self.blobs(self.inputs{n}).get_diff();
+      end
+    end
+    function copy_from(self, weights_file)
+      CHECK(ischar(weights_file), 'weights_file must be a string');
+      CHECK_FILE_EXIST(weights_file);
+      caffe_('net_copy_from', self.hNet_self, weights_file);
+    end
+    function reshape(self)
+      caffe_('net_reshape', self.hNet_self);
+    end
+    function save(self, weights_file)
+      CHECK(ischar(weights_file), 'weights_file must be a string');
+      caffe_('net_save', self.hNet_self, weights_file);
+    end
+  end
+end
diff --git a/matlab/+caffe/Solver.m b/matlab/+caffe/Solver.m
new file mode 100644
index 0000000..f8bdc4e
--- /dev/null
+++ b/matlab/+caffe/Solver.m
@@ -0,0 +1,56 @@
+classdef Solver < handle
+  % Wrapper class of caffe::SGDSolver in matlab
+  
+  properties (Access = private)
+    hSolver_self
+    attributes
+    % attribute fields
+    %     hNet_net
+    %     hNet_test_nets
+  end
+  properties (SetAccess = private)
+    net
+    test_nets
+  end
+  
+  methods
+    function self = Solver(varargin)
+      % decide whether to construct a solver from solver_file or handle
+      if ~(nargin == 1 && isstruct(varargin{1}))
+        % construct a solver from solver_file
+        self = caffe.get_solver(varargin{:});
+        return
+      end
+      % construct a solver from handle
+      hSolver_solver = varargin{1};
+      CHECK(is_valid_handle(hSolver_solver), 'invalid Solver handle');
+      
+      % setup self handle and attributes
+      self.hSolver_self = hSolver_solver;
+      self.attributes = caffe_('solver_get_attr', self.hSolver_self);
+      
+      % setup net and test_nets
+      self.net = caffe.Net(self.attributes.hNet_net);
+      self.test_nets = caffe.Net.empty();
+      for n = 1:length(self.attributes.hNet_test_nets)
+        self.test_nets(n) = caffe.Net(self.attributes.hNet_test_nets(n));
+      end
+    end
+    function iter = iter(self)
+      iter = caffe_('solver_get_iter', self.hSolver_self);
+    end
+    function restore(self, snapshot_filename)
+      CHECK(ischar(snapshot_filename), 'snapshot_filename must be a string');
+      CHECK_FILE_EXIST(snapshot_filename);
+      caffe_('solver_restore', self.hSolver_self, snapshot_filename);
+    end
+    function solve(self)
+      caffe_('solver_solve', self.hSolver_self);
+    end
+    function step(self, iters)
+      CHECK(isscalar(iters) && iters > 0, 'iters must be positive integer');
+      iters = double(iters);
+      caffe_('solver_step', self.hSolver_self, iters);
+    end
+  end
+end
diff --git a/matlab/+caffe/get_net.m b/matlab/+caffe/get_net.m
new file mode 100644
index 0000000..4b5683e
--- /dev/null
+++ b/matlab/+caffe/get_net.m
@@ -0,0 +1,37 @@
+function net = get_net(varargin)
+% net = get_net(model_file, phase_name) or
+% net = get_net(model_file, weights_file, phase_name)
+%   Construct a net from model_file, and load weights from weights_file
+%   phase_name can only be 'train' or 'test'
+
+CHECK(nargin == 2 || nargin == 3, ['usage: ' ...
+  'net = get_net(model_file, phase_name) or ' ...
+  'net = get_net(model_file, weights_file, phase_name)']);
+if nargin == 3
+  model_file = varargin{1};
+  weights_file = varargin{2};
+  phase_name = varargin{3};
+elseif nargin == 2
+  model_file = varargin{1};
+  phase_name = varargin{2};
+end
+
+CHECK(ischar(model_file), 'model_file must be a string');
+CHECK(ischar(phase_name), 'phase_name must be a string');
+CHECK_FILE_EXIST(model_file);
+CHECK(strcmp(phase_name, 'train') || strcmp(phase_name, 'test'), ...
+  sprintf('phase_name can only be %strain%s or %stest%s', ...
+  char(39), char(39), char(39), char(39)));
+
+% construct caffe net from model_file
+hNet = caffe_('get_net', model_file, phase_name);
+net = caffe.Net(hNet);
+
+% load weights from weights_file
+if nargin == 3
+  CHECK(ischar(weights_file), 'weights_file must be a string');
+  CHECK_FILE_EXIST(weights_file);
+  net.copy_from(weights_file);
+end
+
+end
diff --git a/matlab/+caffe/get_solver.m b/matlab/+caffe/get_solver.m
new file mode 100644
index 0000000..74d576e
--- /dev/null
+++ b/matlab/+caffe/get_solver.m
@@ -0,0 +1,10 @@
+function solver = get_solver(solver_file)
+% solver = get_solver(solver_file)
+%   Construct a Solver object from solver_file
+
+CHECK(ischar(solver_file), 'solver_file must be a string');
+CHECK_FILE_EXIST(solver_file);
+pSolver = caffe_('get_solver', solver_file);
+solver = caffe.Solver(pSolver);
+
+end
diff --git a/matlab/+caffe/imagenet/ilsvrc_2012_mean.mat b/matlab/+caffe/imagenet/ilsvrc_2012_mean.mat
new file mode 100644
index 0000000..21df3d3
Binary files /dev/null and b/matlab/+caffe/imagenet/ilsvrc_2012_mean.mat differ
diff --git a/matlab/+caffe/io.m b/matlab/+caffe/io.m
new file mode 100644
index 0000000..af8369d
--- /dev/null
+++ b/matlab/+caffe/io.m
@@ -0,0 +1,33 @@
+classdef io
+  % a class for input and output functions
+  
+  methods (Static)
+    function im_data = load_image(im_file)
+      % im_data = load_image(im_file)
+      %   load an image from disk into Caffe-supported data format
+      %   switch channels from RGB to BGR, make width the fastest dimension
+      %   and convert to single
+      %   returns im_data in W x H x C. For colored images, C = 3 in BGR
+      %   channels, and for grayscale images, C = 1
+      CHECK(ischar(im_file), 'im_file must be a string');
+      CHECK_FILE_EXIST(im_file);
+      im_data = imread(im_file);
+      % permute channels from RGB to BGR for colored images
+      if size(im_data, 3) == 3
+        im_data = im_data(:, :, [3, 2, 1]);
+      end
+      % flip width and height to make width the fastest dimension
+      im_data = permute(im_data, [2, 1, 3]);
+      % convert from uint8 to single
+      im_data = single(im_data);
+    end
+    function mean_data = read_mean(mean_proto_file)
+      % mean_data = read_mean(mean_proto_file)
+      %   read image mean data from binaryproto file
+      %   returns mean_data in W x H x C with BGR channels
+      CHECK(ischar(mean_proto_file), 'mean_proto_file must be a string');
+      CHECK_FILE_EXIST(mean_proto_file);
+      mean_data = caffe_('read_mean', mean_proto_file);
+    end
+  end
+end
diff --git a/matlab/+caffe/private/CHECK.m b/matlab/+caffe/private/CHECK.m
new file mode 100644
index 0000000..2170654
--- /dev/null
+++ b/matlab/+caffe/private/CHECK.m
@@ -0,0 +1,7 @@
+function CHECK(expr, error_msg)
+
+if ~expr
+  error(error_msg);
+end
+
+end
diff --git a/matlab/+caffe/private/CHECK_FILE_EXIST.m b/matlab/+caffe/private/CHECK_FILE_EXIST.m
new file mode 100644
index 0000000..8c80fb8
--- /dev/null
+++ b/matlab/+caffe/private/CHECK_FILE_EXIST.m
@@ -0,0 +1,7 @@
+function CHECK_FILE_EXIST(filename)
+
+if exist(filename, 'file') == 0
+  error('%s does not exist', filename);
+end
+
+end
diff --git a/matlab/+caffe/private/caffe_.cpp b/matlab/+caffe/private/caffe_.cpp
new file mode 100644
index 0000000..4e0ebc1
--- /dev/null
+++ b/matlab/+caffe/private/caffe_.cpp
@@ -0,0 +1,546 @@
+//
+// caffe_.cpp provides wrappers of the caffe::Solver class, caffe::Net class,
+// caffe::Layer class and caffe::Blob class and some caffe::Caffe functions,
+// so that one could easily use Caffe from matlab.
+// Note that for matlab, we will simply use float as the data type.
+
+// Internally, data is stored with dimensions reversed from Caffe's:
+// e.g., if the Caffe blob axes are (num, channels, height, width),
+// the matcaffe data is stored as (width, height, channels, num)
+// where width is the fastest dimension.
+
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "mex.h"
+
+#include "caffe/caffe.hpp"
+
+#define MEX_ARGS int nlhs, mxArray **plhs, int nrhs, const mxArray **prhs
+
+using namespace caffe;  // NOLINT(build/namespaces)
+
+// Do CHECK and throw a Mex error if check fails
+inline void mxCHECK(bool expr, const char* msg) {
+  if (!expr) {
+    mexErrMsgTxt(msg);
+  }
+}
+inline void mxERROR(const char* msg) { mexErrMsgTxt(msg); }
+
+// Check if a file exists and can be opened
+void mxCHECK_FILE_EXIST(const char* file) {
+  std::ifstream f(file);
+  if (!f.good()) {
+    f.close();
+    std::string msg("Could not open file ");
+    msg += file;
+    mxERROR(msg.c_str());
+  }
+  f.close();
+}
+
+// The pointers to caffe::Solver and caffe::Net instances
+static vector<shared_ptr<Solver<float> > > solvers_;
+static vector<shared_ptr<Net<float> > > nets_;
+// init_key is generated at the beginning and everytime you call reset
+static double init_key = static_cast<double>(caffe_rng_rand());
+
+/** -----------------------------------------------------------------
+ ** data conversion functions
+ **/
+// Enum indicates which blob memory to use
+enum WhichMemory { DATA, DIFF };
+
+// Copy matlab array to Blob data or diff
+static void mx_mat_to_blob(const mxArray* mx_mat, Blob<float>* blob,
+    WhichMemory data_or_diff) {
+  mxCHECK(blob->count() == mxGetNumberOfElements(mx_mat),
+      "number of elements in target blob doesn't match that in input mxArray");
+  const float* mat_mem_ptr = reinterpret_cast<const float*>(mxGetData(mx_mat));
+  float* blob_mem_ptr = NULL;
+  switch (Caffe::mode()) {
+  case Caffe::CPU:
+    blob_mem_ptr = (data_or_diff == DATA ?
+        blob->mutable_cpu_data() : blob->mutable_cpu_diff());
+    break;
+  case Caffe::GPU:
+    blob_mem_ptr = (data_or_diff == DATA ?
+        blob->mutable_gpu_data() : blob->mutable_gpu_diff());
+    break;
+  default:
+    mxERROR("Unknown Caffe mode");
+  }
+  caffe_copy(blob->count(), mat_mem_ptr, blob_mem_ptr);
+}
+
+// Copy Blob data or diff to matlab array
+static mxArray* blob_to_mx_mat(const Blob<float>* blob,
+    WhichMemory data_or_diff) {
+  const int num_axes = blob->num_axes();
+  vector<mwSize> dims(num_axes);
+  for (int blob_axis = 0, mat_axis = num_axes - 1; blob_axis < num_axes;
+       ++blob_axis, --mat_axis) {
+    dims[mat_axis] = static_cast<mwSize>(blob->shape(blob_axis));
+  }
+  // matlab array needs to have at least one dimension, convert scalar to 1-dim
+  if (num_axes == 0) {
+    dims.push_back(1);
+  }
+  mxArray* mx_mat =
+      mxCreateNumericArray(dims.size(), dims.data(), mxSINGLE_CLASS, mxREAL);
+  float* mat_mem_ptr = reinterpret_cast<float*>(mxGetData(mx_mat));
+  const float* blob_mem_ptr = NULL;
+  switch (Caffe::mode()) {
+  case Caffe::CPU:
+    blob_mem_ptr = (data_or_diff == DATA ? blob->cpu_data() : blob->cpu_diff());
+    break;
+  case Caffe::GPU:
+    blob_mem_ptr = (data_or_diff == DATA ? blob->gpu_data() : blob->gpu_diff());
+    break;
+  default:
+    mxERROR("Unknown Caffe mode");
+  }
+  caffe_copy(blob->count(), blob_mem_ptr, mat_mem_ptr);
+  return mx_mat;
+}
+
+// Convert vector<int> to matlab row vector
+static mxArray* int_vec_to_mx_vec(const vector<int>& int_vec) {
+  mxArray* mx_vec = mxCreateDoubleMatrix(int_vec.size(), 1, mxREAL);
+  double* vec_mem_ptr = mxGetPr(mx_vec);
+  for (int i = 0; i < int_vec.size(); i++) {
+    vec_mem_ptr[i] = static_cast<double>(int_vec[i]);
+  }
+  return mx_vec;
+}
+
+// Convert vector<string> to matlab cell vector of strings
+static mxArray* str_vec_to_mx_strcell(const vector<std::string>& str_vec) {
+  mxArray* mx_strcell = mxCreateCellMatrix(str_vec.size(), 1);
+  for (int i = 0; i < str_vec.size(); i++) {
+    mxSetCell(mx_strcell, i, mxCreateString(str_vec[i].c_str()));
+  }
+  return mx_strcell;
+}
+
+/** -----------------------------------------------------------------
+ ** handle and pointer conversion functions
+ ** a handle is a struct array with the following fields
+ **   (uint64) ptr      : the pointer to the C++ object
+ **   (double) init_key : caffe initialization key
+ **/
+// Convert a handle in matlab to a pointer in C++. Check if init_key matches
+template <typename T>
+static T* handle_to_ptr(const mxArray* mx_handle) {
+  mxArray* mx_ptr = mxGetField(mx_handle, 0, "ptr");
+  mxArray* mx_init_key = mxGetField(mx_handle, 0, "init_key");
+  mxCHECK(mxIsUint64(mx_ptr), "pointer type must be uint64");
+  mxCHECK(mxGetScalar(mx_init_key) == init_key,
+      "Could not convert handle to pointer due to invalid init_key. "
+      "The object might have been cleared.");
+  return reinterpret_cast<T*>(*reinterpret_cast<uint64_t*>(mxGetData(mx_ptr)));
+}
+
+// Create a handle struct vector, without setting up each handle in it
+template <typename T>
+static mxArray* create_handle_vec(int ptr_num) {
+  const int handle_field_num = 2;
+  const char* handle_fields[handle_field_num] = { "ptr", "init_key" };
+  return mxCreateStructMatrix(ptr_num, 1, handle_field_num, handle_fields);
+}
+
+// Set up a handle in a handle struct vector by its index
+template <typename T>
+static void setup_handle(const T* ptr, int index, mxArray* mx_handle_vec) {
+  mxArray* mx_ptr = mxCreateNumericMatrix(1, 1, mxUINT64_CLASS, mxREAL);
+  *reinterpret_cast<uint64_t*>(mxGetData(mx_ptr)) =
+      reinterpret_cast<uint64_t>(ptr);
+  mxSetField(mx_handle_vec, index, "ptr", mx_ptr);
+  mxSetField(mx_handle_vec, index, "init_key", mxCreateDoubleScalar(init_key));
+}
+
+// Convert a pointer in C++ to a handle in matlab
+template <typename T>
+static mxArray* ptr_to_handle(const T* ptr) {
+  mxArray* mx_handle = create_handle_vec<T>(1);
+  setup_handle(ptr, 0, mx_handle);
+  return mx_handle;
+}
+
+// Convert a vector of shared_ptr in C++ to handle struct vector
+template <typename T>
+static mxArray* ptr_vec_to_handle_vec(const vector<shared_ptr<T> >& ptr_vec) {
+  mxArray* mx_handle_vec = create_handle_vec<T>(ptr_vec.size());
+  for (int i = 0; i < ptr_vec.size(); i++) {
+    setup_handle(ptr_vec[i].get(), i, mx_handle_vec);
+  }
+  return mx_handle_vec;
+}
+
+/** -----------------------------------------------------------------
+ ** matlab command functions: caffe_(api_command, arg1, arg2, ...)
+ **/
+// Usage: caffe_('get_solver', solver_file);
+static void get_solver(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsChar(prhs[0]),
+      "Usage: caffe_('get_solver', solver_file)");
+  char* solver_file = mxArrayToString(prhs[0]);
+  mxCHECK_FILE_EXIST(solver_file);
+  shared_ptr<Solver<float> > solver(new caffe::SGDSolver<float>(solver_file));
+  solvers_.push_back(solver);
+  plhs[0] = ptr_to_handle<Solver<float> >(solver.get());
+  mxFree(solver_file);
+}
+
+// Usage: caffe_('solver_get_attr', hSolver)
+static void solver_get_attr(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsStruct(prhs[0]),
+      "Usage: caffe_('solver_get_attr', hSolver)");
+  Solver<float>* solver = handle_to_ptr<Solver<float> >(prhs[0]);
+  const int solver_attr_num = 2;
+  const char* solver_attrs[solver_attr_num] = { "hNet_net", "hNet_test_nets" };
+  mxArray* mx_solver_attr = mxCreateStructMatrix(1, 1, solver_attr_num,
+      solver_attrs);
+  mxSetField(mx_solver_attr, 0, "hNet_net",
+      ptr_to_handle<Net<float> >(solver->net().get()));
+  mxSetField(mx_solver_attr, 0, "hNet_test_nets",
+      ptr_vec_to_handle_vec<Net<float> >(solver->test_nets()));
+  plhs[0] = mx_solver_attr;
+}
+
+// Usage: caffe_('solver_get_iter', hSolver)
+static void solver_get_iter(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsStruct(prhs[0]),
+      "Usage: caffe_('solver_get_iter', hSolver)");
+  Solver<float>* solver = handle_to_ptr<Solver<float> >(prhs[0]);
+  plhs[0] = mxCreateDoubleScalar(solver->iter());
+}
+
+// Usage: caffe_('solver_restore', hSolver, snapshot_file)
+static void solver_restore(MEX_ARGS) {
+  mxCHECK(nrhs == 2 && mxIsStruct(prhs[0]) && mxIsChar(prhs[1]),
+      "Usage: caffe_('solver_restore', hSolver, snapshot_file)");
+  Solver<float>* solver = handle_to_ptr<Solver<float> >(prhs[0]);
+  char* snapshot_file = mxArrayToString(prhs[1]);
+  mxCHECK_FILE_EXIST(snapshot_file);
+  solver->Restore(snapshot_file);
+  mxFree(snapshot_file);
+}
+
+// Usage: caffe_('solver_solve', hSolver)
+static void solver_solve(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsStruct(prhs[0]),
+      "Usage: caffe_('solver_solve', hSolver)");
+  Solver<float>* solver = handle_to_ptr<Solver<float> >(prhs[0]);
+  solver->Solve();
+}
+
+// Usage: caffe_('solver_step', hSolver, iters)
+static void solver_step(MEX_ARGS) {
+  mxCHECK(nrhs == 2 && mxIsStruct(prhs[0]) && mxIsDouble(prhs[1]),
+      "Usage: caffe_('solver_step', hSolver, iters)");
+  Solver<float>* solver = handle_to_ptr<Solver<float> >(prhs[0]);
+  int iters = mxGetScalar(prhs[1]);
+  solver->Step(iters);
+}
+
+// Usage: caffe_('get_net', model_file, phase_name)
+static void get_net(MEX_ARGS) {
+  mxCHECK(nrhs == 2 && mxIsChar(prhs[0]) && mxIsChar(prhs[1]),
+      "Usage: caffe_('get_net', model_file, phase_name)");
+  char* model_file = mxArrayToString(prhs[0]);
+  char* phase_name = mxArrayToString(prhs[1]);
+  mxCHECK_FILE_EXIST(model_file);
+  Phase phase;
+  if (strcmp(phase_name, "train") == 0) {
+      phase = TRAIN;
+  } else if (strcmp(phase_name, "test") == 0) {
+      phase = TEST;
+  } else {
+    mxERROR("Unknown phase");
+  }
+  shared_ptr<Net<float> > net(new caffe::Net<float>(model_file, phase));
+  nets_.push_back(net);
+  plhs[0] = ptr_to_handle<Net<float> >(net.get());
+  mxFree(model_file);
+  mxFree(phase_name);
+}
+
+// Usage: caffe_('net_get_attr', hNet)
+static void net_get_attr(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsStruct(prhs[0]),
+      "Usage: caffe_('net_get_attr', hNet)");
+  Net<float>* net = handle_to_ptr<Net<float> >(prhs[0]);
+  const int net_attr_num = 6;
+  const char* net_attrs[net_attr_num] = { "hLayer_layers", "hBlob_blobs",
+      "input_blob_indices", "output_blob_indices", "layer_names", "blob_names"};
+  mxArray* mx_net_attr = mxCreateStructMatrix(1, 1, net_attr_num,
+      net_attrs);
+  mxSetField(mx_net_attr, 0, "hLayer_layers",
+      ptr_vec_to_handle_vec<Layer<float> >(net->layers()));
+  mxSetField(mx_net_attr, 0, "hBlob_blobs",
+      ptr_vec_to_handle_vec<Blob<float> >(net->blobs()));
+  mxSetField(mx_net_attr, 0, "input_blob_indices",
+      int_vec_to_mx_vec(net->input_blob_indices()));
+  mxSetField(mx_net_attr, 0, "output_blob_indices",
+      int_vec_to_mx_vec(net->output_blob_indices()));
+  mxSetField(mx_net_attr, 0, "layer_names",
+      str_vec_to_mx_strcell(net->layer_names()));
+  mxSetField(mx_net_attr, 0, "blob_names",
+      str_vec_to_mx_strcell(net->blob_names()));
+  plhs[0] = mx_net_attr;
+}
+
+// Usage: caffe_('net_forward', hNet)
+static void net_forward(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsStruct(prhs[0]),
+      "Usage: caffe_('net_forward', hNet)");
+  Net<float>* net = handle_to_ptr<Net<float> >(prhs[0]);
+  net->ForwardPrefilled();
+}
+
+// Usage: caffe_('net_backward', hNet)
+static void net_backward(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsStruct(prhs[0]),
+      "Usage: caffe_('net_backward', hNet)");
+  Net<float>* net = handle_to_ptr<Net<float> >(prhs[0]);
+  net->Backward();
+}
+
+// Usage: caffe_('net_copy_from', hNet, weights_file)
+static void net_copy_from(MEX_ARGS) {
+  mxCHECK(nrhs == 2 && mxIsStruct(prhs[0]) && mxIsChar(prhs[1]),
+      "Usage: caffe_('net_copy_from', hNet, weights_file)");
+  Net<float>* net = handle_to_ptr<Net<float> >(prhs[0]);
+  char* weights_file = mxArrayToString(prhs[1]);
+  mxCHECK_FILE_EXIST(weights_file);
+  net->CopyTrainedLayersFrom(weights_file);
+  mxFree(weights_file);
+}
+
+// Usage: caffe_('net_reshape', hNet)
+static void net_reshape(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsStruct(prhs[0]),
+      "Usage: caffe_('net_reshape', hNet)");
+  Net<float>* net = handle_to_ptr<Net<float> >(prhs[0]);
+  net->Reshape();
+}
+
+// Usage: caffe_('net_save', hNet, save_file)
+static void net_save(MEX_ARGS) {
+  mxCHECK(nrhs == 2 && mxIsStruct(prhs[0]) && mxIsChar(prhs[1]),
+      "Usage: caffe_('net_save', hNet, save_file)");
+  Net<float>* net = handle_to_ptr<Net<float> >(prhs[0]);
+  char* weights_file = mxArrayToString(prhs[1]);
+  NetParameter net_param;
+  net->ToProto(&net_param, false);
+  WriteProtoToBinaryFile(net_param, weights_file);
+  mxFree(weights_file);
+}
+
+// Usage: caffe_('layer_get_attr', hLayer)
+static void layer_get_attr(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsStruct(prhs[0]),
+      "Usage: caffe_('layer_get_attr', hLayer)");
+  Layer<float>* layer = handle_to_ptr<Layer<float> >(prhs[0]);
+  const int layer_attr_num = 1;
+  const char* layer_attrs[layer_attr_num] = { "hBlob_blobs" };
+  mxArray* mx_layer_attr = mxCreateStructMatrix(1, 1, layer_attr_num,
+      layer_attrs);
+  mxSetField(mx_layer_attr, 0, "hBlob_blobs",
+      ptr_vec_to_handle_vec<Blob<float> >(layer->blobs()));
+  plhs[0] = mx_layer_attr;
+}
+
+// Usage: caffe_('layer_get_type', hLayer)
+static void layer_get_type(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsStruct(prhs[0]),
+      "Usage: caffe_('layer_get_type', hLayer)");
+  Layer<float>* layer = handle_to_ptr<Layer<float> >(prhs[0]);
+  plhs[0] = mxCreateString(layer->type());
+}
+
+// Usage: caffe_('blob_get_shape', hBlob)
+static void blob_get_shape(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsStruct(prhs[0]),
+      "Usage: caffe_('blob_get_shape', hBlob)");
+  Blob<float>* blob = handle_to_ptr<Blob<float> >(prhs[0]);
+  const int num_axes = blob->num_axes();
+  mxArray* mx_shape = mxCreateDoubleMatrix(1, num_axes, mxREAL);
+  double* shape_mem_mtr = mxGetPr(mx_shape);
+  for (int blob_axis = 0, mat_axis = num_axes - 1; blob_axis < num_axes;
+       ++blob_axis, --mat_axis) {
+    shape_mem_mtr[mat_axis] = static_cast<double>(blob->shape(blob_axis));
+  }
+  plhs[0] = mx_shape;
+}
+
+// Usage: caffe_('blob_reshape', hBlob, new_shape)
+static void blob_reshape(MEX_ARGS) {
+  mxCHECK(nrhs == 2 && mxIsStruct(prhs[0]) && mxIsDouble(prhs[1]),
+      "Usage: caffe_('blob_reshape', hBlob, new_shape)");
+  Blob<float>* blob = handle_to_ptr<Blob<float> >(prhs[0]);
+  const mxArray* mx_shape = prhs[1];
+  double* shape_mem_mtr = mxGetPr(mx_shape);
+  const int num_axes = mxGetNumberOfElements(mx_shape);
+  vector<int> blob_shape(num_axes);
+  for (int blob_axis = 0, mat_axis = num_axes - 1; blob_axis < num_axes;
+       ++blob_axis, --mat_axis) {
+    blob_shape[blob_axis] = static_cast<int>(shape_mem_mtr[mat_axis]);
+  }
+  blob->Reshape(blob_shape);
+}
+
+// Usage: caffe_('blob_get_data', hBlob)
+static void blob_get_data(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsStruct(prhs[0]),
+      "Usage: caffe_('blob_get_data', hBlob)");
+  Blob<float>* blob = handle_to_ptr<Blob<float> >(prhs[0]);
+  plhs[0] = blob_to_mx_mat(blob, DATA);
+}
+
+// Usage: caffe_('blob_set_data', hBlob, new_data)
+static void blob_set_data(MEX_ARGS) {
+  mxCHECK(nrhs == 2 && mxIsStruct(prhs[0]) && mxIsSingle(prhs[1]),
+      "Usage: caffe_('blob_set_data', hBlob, new_data)");
+  Blob<float>* blob = handle_to_ptr<Blob<float> >(prhs[0]);
+  mx_mat_to_blob(prhs[1], blob, DATA);
+}
+
+// Usage: caffe_('blob_get_diff', hBlob)
+static void blob_get_diff(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsStruct(prhs[0]),
+      "Usage: caffe_('blob_get_diff', hBlob)");
+  Blob<float>* blob = handle_to_ptr<Blob<float> >(prhs[0]);
+  plhs[0] = blob_to_mx_mat(blob, DIFF);
+}
+
+// Usage: caffe_('blob_set_diff', hBlob, new_diff)
+static void blob_set_diff(MEX_ARGS) {
+  mxCHECK(nrhs == 2 && mxIsStruct(prhs[0]) && mxIsSingle(prhs[1]),
+      "Usage: caffe_('blob_set_diff', hBlob, new_diff)");
+  Blob<float>* blob = handle_to_ptr<Blob<float> >(prhs[0]);
+  mx_mat_to_blob(prhs[1], blob, DIFF);
+}
+
+// Usage: caffe_('set_mode_cpu')
+static void set_mode_cpu(MEX_ARGS) {
+  mxCHECK(nrhs == 0, "Usage: caffe_('set_mode_cpu')");
+  Caffe::set_mode(Caffe::CPU);
+}
+
+// Usage: caffe_('set_mode_gpu')
+static void set_mode_gpu(MEX_ARGS) {
+  mxCHECK(nrhs == 0, "Usage: caffe_('set_mode_gpu')");
+  Caffe::set_mode(Caffe::GPU);
+}
+
+// Usage: caffe_('set_device', device_id)
+static void set_device(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsDouble(prhs[0]),
+      "Usage: caffe_('set_device', device_id)");
+  int device_id = static_cast<int>(mxGetScalar(prhs[0]));
+  Caffe::SetDevice(device_id);
+}
+
+// Usage: caffe_('get_init_key')
+static void get_init_key(MEX_ARGS) {
+  mxCHECK(nrhs == 0, "Usage: caffe_('get_init_key')");
+  plhs[0] = mxCreateDoubleScalar(init_key);
+}
+
+// Usage: caffe_('reset')
+static void reset(MEX_ARGS) {
+  mxCHECK(nrhs == 0, "Usage: caffe_('reset')");
+  // Clear solvers and stand-alone nets
+  mexPrintf("Cleared %d solvers and %d stand-alone nets\n",
+      solvers_.size(), nets_.size());
+  solvers_.clear();
+  nets_.clear();
+  // Generate new init_key, so that handles created before becomes invalid
+  init_key = static_cast<double>(caffe_rng_rand());
+}
+
+// Usage: caffe_('read_mean', mean_proto_file)
+static void read_mean(MEX_ARGS) {
+  mxCHECK(nrhs == 1 && mxIsChar(prhs[0]),
+      "Usage: caffe_('read_mean', mean_proto_file)");
+  char* mean_proto_file = mxArrayToString(prhs[0]);
+  mxCHECK_FILE_EXIST(mean_proto_file);
+  Blob<float> data_mean;
+  BlobProto blob_proto;
+  bool result = ReadProtoFromBinaryFile(mean_proto_file, &blob_proto);
+  mxCHECK(result, "Could not read your mean file");
+  data_mean.FromProto(blob_proto);
+  plhs[0] = blob_to_mx_mat(&data_mean, DATA);
+  mxFree(mean_proto_file);
+}
+
+/** -----------------------------------------------------------------
+ ** Available commands.
+ **/
+struct handler_registry {
+  string cmd;
+  void (*func)(MEX_ARGS);
+};
+
+static handler_registry handlers[] = {
+  // Public API functions
+  { "get_solver",         get_solver      },
+  { "solver_get_attr",    solver_get_attr },
+  { "solver_get_iter",    solver_get_iter },
+  { "solver_restore",     solver_restore  },
+  { "solver_solve",       solver_solve    },
+  { "solver_step",        solver_step     },
+  { "get_net",            get_net         },
+  { "net_get_attr",       net_get_attr    },
+  { "net_forward",        net_forward     },
+  { "net_backward",       net_backward    },
+  { "net_copy_from",      net_copy_from   },
+  { "net_reshape",        net_reshape     },
+  { "net_save",           net_save        },
+  { "layer_get_attr",     layer_get_attr  },
+  { "layer_get_type",     layer_get_type  },
+  { "blob_get_shape",     blob_get_shape  },
+  { "blob_reshape",       blob_reshape    },
+  { "blob_get_data",      blob_get_data   },
+  { "blob_set_data",      blob_set_data   },
+  { "blob_get_diff",      blob_get_diff   },
+  { "blob_set_diff",      blob_set_diff   },
+  { "set_mode_cpu",       set_mode_cpu    },
+  { "set_mode_gpu",       set_mode_gpu    },
+  { "set_device",         set_device      },
+  { "get_init_key",       get_init_key    },
+  { "reset",              reset           },
+  { "read_mean",          read_mean       },
+  // The end.
+  { "END",                NULL            },
+};
+
+/** -----------------------------------------------------------------
+ ** matlab entry point.
+ **/
+// Usage: caffe_(api_command, arg1, arg2, ...)
+void mexFunction(MEX_ARGS) {
+  mexLock();  // Avoid clearing the mex file.
+  mxCHECK(nrhs > 0, "Usage: caffe_(api_command, arg1, arg2, ...)");
+  // Handle input command
+  char* cmd = mxArrayToString(prhs[0]);
+  bool dispatched = false;
+  // Dispatch to cmd handler
+  for (int i = 0; handlers[i].func != NULL; i++) {
+    if (handlers[i].cmd.compare(cmd) == 0) {
+      handlers[i].func(nlhs, plhs, nrhs-1, prhs+1);
+      dispatched = true;
+      break;
+    }
+  }
+  if (!dispatched) {
+    ostringstream error_msg;
+    error_msg << "Unknown command '" << cmd << "'";
+    mxERROR(error_msg.str().c_str());
+  }
+  mxFree(cmd);
+}
diff --git a/matlab/+caffe/private/is_valid_handle.m b/matlab/+caffe/private/is_valid_handle.m
new file mode 100644
index 0000000..a0648ec
--- /dev/null
+++ b/matlab/+caffe/private/is_valid_handle.m
@@ -0,0 +1,27 @@
+function valid = is_valid_handle(hObj)
+% valid = is_valid_handle(hObj) or is_valid_handle('get_new_init_key')
+%   Check if a handle is valid (has the right data type and init_key matches)
+%   Use is_valid_handle('get_new_init_key') to get new init_key from C++;
+
+% a handle is a struct array with the following fields
+%   (uint64) ptr      : the pointer to the C++ object
+%   (double) init_key : caffe initialization key
+
+persistent init_key;
+if isempty(init_key)
+  init_key = caffe_('get_init_key');
+end
+
+% is_valid_handle('get_new_init_key') to get new init_key from C++;
+if ischar(hObj) && strcmp(hObj, 'get_new_init_key')
+  init_key = caffe_('get_init_key');
+  return
+else
+  % check whether data types are correct and init_key matches
+  valid = isstruct(hObj) ...
+    && isscalar(hObj.ptr) && isa(hObj.ptr, 'uint64') ...
+    && isscalar(hObj.init_key) && isa(hObj.init_key, 'double') ...
+    && hObj.init_key == init_key;
+end
+
+end
diff --git a/matlab/+caffe/reset_all.m b/matlab/+caffe/reset_all.m
new file mode 100644
index 0000000..a8b33de
--- /dev/null
+++ b/matlab/+caffe/reset_all.m
@@ -0,0 +1,8 @@
+function reset_all()
+% reset_all()
+%   clear all solvers and stand-alone nets and reset Caffe to initial status
+
+caffe_('reset');
+is_valid_handle('get_new_init_key');
+
+end
diff --git a/matlab/+caffe/run_tests.m b/matlab/+caffe/run_tests.m
new file mode 100644
index 0000000..9389685
--- /dev/null
+++ b/matlab/+caffe/run_tests.m
@@ -0,0 +1,19 @@
+function results = run_tests()
+% results = run_tests()
+%   run all tests in this caffe matlab wrapper package
+
+% use CPU for testing
+caffe.set_mode_cpu();
+
+% reset caffe before testing
+caffe.reset_all();
+
+% put all test cases here
+results = [...
+  run(caffe.test.test_net) ...
+  run(caffe.test.test_solver) ];
+
+% reset caffe after testing
+caffe.reset_all();
+
+end
diff --git a/matlab/+caffe/set_device.m b/matlab/+caffe/set_device.m
new file mode 100644
index 0000000..f94068c
--- /dev/null
+++ b/matlab/+caffe/set_device.m
@@ -0,0 +1,11 @@
+function set_device(device_id)
+% set_device(device_id)
+%   set Caffe's GPU device ID
+
+CHECK(isscalar(device_id) && device_id >= 0, ...
+  'device_id must be non-negative integer');
+device_id = double(device_id);
+
+caffe_('set_device', device_id);
+
+end
diff --git a/matlab/+caffe/set_mode_cpu.m b/matlab/+caffe/set_mode_cpu.m
new file mode 100644
index 0000000..a87e0e2
--- /dev/null
+++ b/matlab/+caffe/set_mode_cpu.m
@@ -0,0 +1,7 @@
+function set_mode_cpu()
+% set_mode_cpu()
+%   set Caffe to CPU mode
+
+caffe_('set_mode_cpu');
+
+end
diff --git a/matlab/+caffe/set_mode_gpu.m b/matlab/+caffe/set_mode_gpu.m
new file mode 100644
index 0000000..78e5f67
--- /dev/null
+++ b/matlab/+caffe/set_mode_gpu.m
@@ -0,0 +1,7 @@
+function set_mode_gpu()
+% set_mode_gpu()
+%   set Caffe to GPU mode
+
+caffe_('set_mode_gpu');
+
+end
diff --git a/matlab/CMakeLists.txt b/matlab/CMakeLists.txt
new file mode 100644
index 0000000..4b0d549
--- /dev/null
+++ b/matlab/CMakeLists.txt
@@ -0,0 +1,72 @@
+# Builds Matlab (or Octave) interface. In case of Matlab caffe must be
+# compield as shared library. Octave can link static or shared caffe library
+# To install octave run: sudo apt-get install liboctave-dev
+
+if(NOT BUILD_matlab)
+  return()
+endif()
+
+if(HAVE_MATLAB AND Octave_compiler)
+  set(build_using ${Matlab_build_mex_using})
+elseif(HAVE_MATLAB AND NOT Octave_compiler)
+  set(build_using "Matlab")
+elseif(NOT HAVE_MATLAB AND Octave_compiler)
+  set(build_using "Octave")
+else()
+  return()
+endif()
+
+if(NOT BUILD_SHARED_LIBS AND build_using MATCHES Matlab)
+  message(FATAL_ERROR "Matlab MEX interface (with default mex options file) can only be built if caffe is compiled as shared library. Please enable 'BUILD_SHARED_LIBS' in CMake. Aternativelly you can switch to Octave compiler.")
+endif()
+
+# helper function to set proper mex file extention
+function(caffe_fetch_and_set_proper_mexext mexfile_variable)
+  execute_process(COMMAND ${Matlab_mexext} OUTPUT_STRIP_TRAILING_WHITESPACE RESULT_VARIABLE res OUTPUT_VARIABLE ext)
+  if(res MATCHES 0)
+    get_filename_component(folder  ${${mexfile_variable}} PATH)
+    get_filename_component(name_we ${${mexfile_variable}} NAME_WE)
+    set(${mexfile_variable} ${folder}/${name_we}.${ext} PARENT_SCOPE)
+  endif()
+endfunction()
+
+# global settings
+file(GLOB Matlab_srcs +caffe/private/caffe_.cpp)
+set(Matlab_caffe_mex ${PROJECT_SOURCE_DIR}/matlab/+caffe/private/caffe_.mex)
+
+caffe_get_current_cflags(cflags)
+caffe_parse_linker_libs(Caffe_LINKER_LIBS folders libflags macos_frameworks)
+set(folders $<TARGET_LINKER_FILE_DIR:caffe> ${folders})
+
+# prepare linker flag lists
+string(REPLACE ";" ";-L" link_folders "-L${folders}")
+string(REPLACE ";" ":"  rpath_folders   "${folders}")
+
+if(build_using MATCHES "Matlab")
+  set(libflags -lcaffe${CAffe_POSTFIX} ${libflags}) # Matlab R2014a complans for -Wl,--whole-archive
+
+  caffe_fetch_and_set_proper_mexext(Matlab_caffe_mex)
+  add_custom_command(OUTPUT ${Matlab_caffe_mex} COMMAND ${Matlab_mex}
+      ARGS -output ${Matlab_caffe_mex} ${Matlab_srcs} ${cflags} ${link_folders} ${libflags}
+      DEPENDS caffe COMMENT "Building Matlab interface: ${Matlab_caffe_mex}" VERBATIM)
+  add_custom_target(matlab ALL DEPENDS ${Matlab_caffe_mex} SOURCES ${Matlab_srcs})
+
+elseif(build_using MATCHES "Octave")
+
+  if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
+    set(libflags -Wl,-force_load,$<TARGET_LINKER_FILE:caffe> ${libflags})
+  elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
+    set(libflags -Wl,--whole-archive -lcaffe${CAffe_POSTFIX} -Wl,--no-whole-archive ${libflags})
+  endif()
+
+  add_custom_command(OUTPUT ${Matlab_caffe_mex} COMMAND ${Octave_compiler}
+      ARGS --mex -o ${Matlab_caffe_mex} ${Matlab_srcs} ${cflags} ${link_folders} ${libflags} -Wl,-rpath,${rpath_folders}
+      DEPENDS caffe COMMENT "Building Octave interface: ${Matlab_caffe_mex}" VERBATIM)
+
+  add_custom_target(octave ALL DEPENDS ${Matlab_caffe_mex} SOURCES ${Matlab_srcs})
+endif()
+
+# ---[ Install
+file(GLOB mfiles caffe/*.m)
+install(FILES ${mfiles} ${Matlab_caffe_mex} DESTINATION matlab)
+
diff --git a/matlab/demo/classification_demo.m b/matlab/demo/classification_demo.m
new file mode 100644
index 0000000..2b60332
--- /dev/null
+++ b/matlab/demo/classification_demo.m
@@ -0,0 +1,147 @@
+function [scores, maxlabel] = classification_demo(im, use_gpu)
+% [scores, maxlabel] = classification_demo(im, use_gpu)
+%
+% Image classification demo using BVLC CaffeNet.
+%
+% IMPORTANT: before you run this demo, you should download BVLC CaffeNet
+% from Model Zoo (http://caffe.berkeleyvision.org/model_zoo.html)
+%
+% ****************************************************************************
+% For detailed documentation and usage on Caffe's Matlab interface, please
+% refer to Caffe Interface Tutorial at
+% http://caffe.berkeleyvision.org/tutorial/interfaces.html#matlab
+% ****************************************************************************
+%
+% input
+%   im       color image as uint8 HxWx3
+%   use_gpu  1 to use the GPU, 0 to use the CPU
+%
+% output
+%   scores   1000-dimensional ILSVRC score vector
+%   maxlabel the label of the highest score
+%
+% You may need to do the following before you start matlab:
+%  $ export LD_LIBRARY_PATH=/opt/intel/mkl/lib/intel64:/usr/local/cuda-5.5/lib64
+%  $ export LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libstdc++.so.6
+% Or the equivalent based on where things are installed on your system
+%
+% Usage:
+%  im = imread('../../examples/images/cat.jpg');
+%  scores = classification_demo(im, 1);
+%  [score, class] = max(scores);
+% Five things to be aware of:
+%   caffe uses row-major order
+%   matlab uses column-major order
+%   caffe uses BGR color channel order
+%   matlab uses RGB color channel order
+%   images need to have the data mean subtracted
+
+% Data coming in from matlab needs to be in the order
+%   [width, height, channels, images]
+% where width is the fastest dimension.
+% Here is the rough matlab for putting image data into the correct
+% format in W x H x C with BGR channels:
+%   % permute channels from RGB to BGR
+%   im_data = im(:, :, [3, 2, 1]);
+%   % flip width and height to make width the fastest dimension
+%   im_data = permute(im_data, [2, 1, 3]);
+%   % convert from uint8 to single
+%   im_data = single(im_data);
+%   % reshape to a fixed size (e.g., 227x227).
+%   im_data = imresize(im_data, [IMAGE_DIM IMAGE_DIM], 'bilinear');
+%   % subtract mean_data (already in W x H x C with BGR channels)
+%   im_data = im_data - mean_data;
+
+% If you have multiple images, cat them with cat(4, ...)
+
+% Add caffe/matlab to you Matlab search PATH to use matcaffe
+if exist('../+caffe', 'dir')
+  addpath('..');
+else
+  error('Please run this demo from caffe/matlab/demo');
+end
+
+% Set caffe mode
+if exist('use_gpu', 'var') && use_gpu
+  caffe.set_mode_gpu();
+  gpu_id = 0;  % we will use the first gpu in this demo
+  caffe.set_device(gpu_id);
+else
+  caffe.set_mode_cpu();
+end
+
+% Initialize the network using BVLC CaffeNet for image classification
+% Weights (parameter) file needs to be downloaded from Model Zoo.
+model_dir = '../../models/bvlc_reference_caffenet/';
+net_model = [model_dir 'deploy.prototxt'];
+net_weights = [model_dir 'bvlc_reference_caffenet.caffemodel'];
+phase = 'test'; % run with phase test (so that dropout isn't applied)
+if ~exist(net_weights, 'file')
+  error('Please download CaffeNet from Model Zoo before you run this demo');
+end
+
+% Initialize a network
+net = caffe.Net(net_model, net_weights, phase);
+
+if nargin < 1
+  % For demo purposes we will use the cat image
+  fprintf('using caffe/examples/images/cat.jpg as input image\n');
+  im = imread('../../examples/images/cat.jpg');
+end
+
+% prepare oversampled input
+% input_data is Height x Width x Channel x Num
+tic;
+input_data = {prepare_image(im)};
+toc;
+
+% do forward pass to get scores
+% scores are now Channels x Num, where Channels == 1000
+tic;
+% The net forward function. It takes in a cell array of N-D arrays
+% (where N == 4 here) containing data of input blob(s) and outputs a cell
+% array containing data from output blob(s)
+scores = net.forward(input_data);
+toc;
+
+scores = scores{1};
+scores = mean(scores, 2);  % take average scores over 10 crops
+
+[~, maxlabel] = max(scores);
+
+% call caffe.reset_all() to reset caffe
+caffe.reset_all();
+
+% ------------------------------------------------------------------------
+function crops_data = prepare_image(im)
+% ------------------------------------------------------------------------
+% caffe/matlab/+caffe/imagenet/ilsvrc_2012_mean.mat contains mean_data that
+% is already in W x H x C with BGR channels
+d = load('../+caffe/imagenet/ilsvrc_2012_mean.mat');
+mean_data = d.mean_data;
+IMAGE_DIM = 256;
+CROPPED_DIM = 227;
+
+% Convert an image returned by Matlab's imread to im_data in caffe's data
+% format: W x H x C with BGR channels
+im_data = im(:, :, [3, 2, 1]);  % permute channels from RGB to BGR
+im_data = permute(im_data, [2, 1, 3]);  % flip width and height
+im_data = single(im_data);  % convert from uint8 to single
+im_data = imresize(im_data, [IMAGE_DIM IMAGE_DIM], 'bilinear');  % resize im_data
+im_data = im_data - mean_data;  % subtract mean_data (already in W x H x C, BGR)
+
+% oversample (4 corners, center, and their x-axis flips)
+crops_data = zeros(CROPPED_DIM, CROPPED_DIM, 3, 10, 'single');
+indices = [0 IMAGE_DIM-CROPPED_DIM] + 1;
+n = 1;
+for i = indices
+  for j = indices
+    crops_data(:, :, :, n) = im_data(i:i+CROPPED_DIM-1, j:j+CROPPED_DIM-1, :);
+    crops_data(:, :, :, n+5) = crops_data(end:-1:1, :, :, n);
+    n = n + 1;
+  end
+end
+center = floor(indices(2) / 2) + 1;
+crops_data(:,:,:,5) = ...
+  im_data(center:center+CROPPED_DIM-1,center:center+CROPPED_DIM-1,:);
+crops_data(:,:,:,10) = crops_data(end:-1:1, :, :, 5);
diff --git a/matlab/hdf5creation/.gitignore b/matlab/hdf5creation/.gitignore
new file mode 100644
index 0000000..e2333dd
--- /dev/null
+++ b/matlab/hdf5creation/.gitignore
@@ -0,0 +1,2 @@
+*.h5
+list.txt
diff --git a/matlab/hdf5creation/demo.m b/matlab/hdf5creation/demo.m
new file mode 100644
index 0000000..4f9f7b5
--- /dev/null
+++ b/matlab/hdf5creation/demo.m
@@ -0,0 +1,64 @@
+%% WRITING TO HDF5
+filename='trial.h5';
+
+num_total_samples=10000;
+% to simulate data being read from disk / generated etc.
+data_disk=rand(5,5,1,num_total_samples); 
+label_disk=rand(10,num_total_samples); 
+
+chunksz=100;
+created_flag=false;
+totalct=0;
+for batchno=1:num_total_samples/chunksz
+  fprintf('batch no. %d\n', batchno);
+  last_read=(batchno-1)*chunksz;
+
+  % to simulate maximum data to be held in memory before dumping to hdf5 file 
+  batchdata=data_disk(:,:,1,last_read+1:last_read+chunksz); 
+  batchlabs=label_disk(:,last_read+1:last_read+chunksz);
+
+  % store to hdf5
+  startloc=struct('dat',[1,1,1,totalct+1], 'lab', [1,totalct+1]);
+  curr_dat_sz=store2hdf5(filename, batchdata, batchlabs, ~created_flag, startloc, chunksz); 
+  created_flag=true;% flag set so that file is created only once
+  totalct=curr_dat_sz(end);% updated dataset size (#samples)
+end
+
+% display structure of the stored HDF5 file
+h5disp(filename);
+
+%% READING FROM HDF5
+
+% Read data and labels for samples #1000 to 1999
+data_rd=h5read(filename, '/data', [1 1 1 1000], [5, 5, 1, 1000]);
+label_rd=h5read(filename, '/label', [1 1000], [10, 1000]);
+fprintf('Testing ...\n');
+try 
+  assert(isequal(data_rd, single(data_disk(:,:,:,1000:1999))), 'Data do not match');
+  assert(isequal(label_rd, single(label_disk(:,1000:1999))), 'Labels do not match');
+
+  fprintf('Success!\n');
+catch err
+  fprintf('Test failed ...\n');
+  getReport(err)
+end
+
+%delete(filename);
+
+% CREATE list.txt containing filename, to be used as source for HDF5_DATA_LAYER
+FILE=fopen('list.txt', 'w');
+fprintf(FILE, '%s', filename);
+fclose(FILE);
+fprintf('HDF5 filename listed in %s \n', 'list.txt');
+
+% NOTE: In net definition prototxt, use list.txt as input to HDF5_DATA as: 
+% layer {
+%   name: "data"
+%   type: "HDF5Data"
+%   top: "data"
+%   top: "labelvec"
+%   hdf5_data_param {
+%     source: "/path/to/list.txt"
+%     batch_size: 64
+%   }
+% }
diff --git a/matlab/hdf5creation/store2hdf5.m b/matlab/hdf5creation/store2hdf5.m
new file mode 100644
index 0000000..0a0016d
--- /dev/null
+++ b/matlab/hdf5creation/store2hdf5.m
@@ -0,0 +1,59 @@
+function [curr_dat_sz, curr_lab_sz] = store2hdf5(filename, data, labels, create, startloc, chunksz)  
+  % *data* is W*H*C*N matrix of images should be normalized (e.g. to lie between 0 and 1) beforehand
+  % *label* is D*N matrix of labels (D labels per sample) 
+  % *create* [0/1] specifies whether to create file newly or to append to previously created file, useful to store information in batches when a dataset is too big to be held in memory  (default: 1)
+  % *startloc* (point at which to start writing data). By default, 
+  % if create=1 (create mode), startloc.data=[1 1 1 1], and startloc.lab=[1 1]; 
+  % if create=0 (append mode), startloc.data=[1 1 1 K+1], and startloc.lab = [1 K+1]; where K is the current number of samples stored in the HDF
+  % chunksz (used only in create mode), specifies number of samples to be stored per chunk (see HDF5 documentation on chunking) for creating HDF5 files with unbounded maximum size - TLDR; higher chunk sizes allow faster read-write operations 
+
+  % verify that format is right
+  dat_dims=size(data);
+  lab_dims=size(labels);
+  num_samples=dat_dims(end);
+
+  assert(lab_dims(end)==num_samples, 'Number of samples should be matched between data and labels');
+
+  if ~exist('create','var')
+    create=true;
+  end
+
+  
+  if create
+    %fprintf('Creating dataset with %d samples\n', num_samples);
+    if ~exist('chunksz', 'var')
+      chunksz=1000;
+    end
+    if exist(filename, 'file')
+      fprintf('Warning: replacing existing file %s \n', filename);
+      delete(filename);
+    end      
+    h5create(filename, '/data', [dat_dims(1:end-1) Inf], 'Datatype', 'single', 'ChunkSize', [dat_dims(1:end-1) chunksz]); % width, height, channels, number 
+    h5create(filename, '/label', [lab_dims(1:end-1) Inf], 'Datatype', 'single', 'ChunkSize', [lab_dims(1:end-1) chunksz]); % width, height, channels, number 
+    if ~exist('startloc','var') 
+      startloc.dat=[ones(1,length(dat_dims)-1), 1];
+      startloc.lab=[ones(1,length(lab_dims)-1), 1];
+    end 
+  else  % append mode
+    if ~exist('startloc','var')
+      info=h5info(filename);
+      prev_dat_sz=info.Datasets(1).Dataspace.Size;
+      prev_lab_sz=info.Datasets(2).Dataspace.Size;
+      assert(prev_dat_sz(1:end-1)==dat_dims(1:end-1), 'Data dimensions must match existing dimensions in dataset');
+      assert(prev_lab_sz(1:end-1)==lab_dims(1:end-1), 'Label dimensions must match existing dimensions in dataset');
+      startloc.dat=[ones(1,length(dat_dims)-1), prev_dat_sz(end)+1];
+      startloc.lab=[ones(1,length(lab_dims)-1), prev_lab_sz(end)+1];
+    end
+  end
+
+  if ~isempty(data)
+    h5write(filename, '/data', single(data), startloc.dat, size(data));
+    h5write(filename, '/label', single(labels), startloc.lab, size(labels));  
+  end
+
+  if nargout
+    info=h5info(filename);
+    curr_dat_sz=info.Datasets(1).Dataspace.Size;
+    curr_lab_sz=info.Datasets(2).Dataspace.Size;
+  end
+end
diff --git a/models/bvlc_alexnet/deploy.prototxt b/models/bvlc_alexnet/deploy.prototxt
new file mode 100644
index 0000000..ced055b
--- /dev/null
+++ b/models/bvlc_alexnet/deploy.prototxt
@@ -0,0 +1,276 @@
+name: "AlexNet"
+input: "data"
+input_dim: 10
+input_dim: 3
+input_dim: 227
+input_dim: 227
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 96
+    kernel_size: 11
+    stride: 4
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "conv1"
+  top: "conv1"
+}
+layer {
+  name: "norm1"
+  type: "LRN"
+  bottom: "conv1"
+  top: "norm1"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "norm1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "pool1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 2
+    kernel_size: 5
+    group: 2
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "norm2"
+  type: "LRN"
+  bottom: "conv2"
+  top: "norm2"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "norm2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "pool2"
+  top: "conv3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "conv4"
+  type: "Convolution"
+  bottom: "conv3"
+  top: "conv4"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    group: 2
+  }
+}
+layer {
+  name: "relu4"
+  type: "ReLU"
+  bottom: "conv4"
+  top: "conv4"
+}
+layer {
+  name: "conv5"
+  type: "Convolution"
+  bottom: "conv4"
+  top: "conv5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 1
+    kernel_size: 3
+    group: 2
+  }
+}
+layer {
+  name: "relu5"
+  type: "ReLU"
+  bottom: "conv5"
+  top: "conv5"
+}
+layer {
+  name: "pool5"
+  type: "Pooling"
+  bottom: "conv5"
+  top: "pool5"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "fc6"
+  type: "InnerProduct"
+  bottom: "pool5"
+  top: "fc6"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 4096
+  }
+}
+layer {
+  name: "relu6"
+  type: "ReLU"
+  bottom: "fc6"
+  top: "fc6"
+}
+layer {
+  name: "drop6"
+  type: "Dropout"
+  bottom: "fc6"
+  top: "fc6"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc7"
+  type: "InnerProduct"
+  bottom: "fc6"
+  top: "fc7"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 4096
+  }
+}
+layer {
+  name: "relu7"
+  type: "ReLU"
+  bottom: "fc7"
+  top: "fc7"
+}
+layer {
+  name: "drop7"
+  type: "Dropout"
+  bottom: "fc7"
+  top: "fc7"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc8"
+  type: "InnerProduct"
+  bottom: "fc7"
+  top: "fc8"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 1000
+  }
+}
+layer {
+  name: "prob"
+  type: "Softmax"
+  bottom: "fc8"
+  top: "prob"
+}
diff --git a/models/bvlc_alexnet/readme.md b/models/bvlc_alexnet/readme.md
new file mode 100644
index 0000000..008d690
--- /dev/null
+++ b/models/bvlc_alexnet/readme.md
@@ -0,0 +1,25 @@
+---
+name: BVLC AlexNet Model
+caffemodel: bvlc_alexnet.caffemodel
+caffemodel_url: http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel
+license: unrestricted
+sha1: 9116a64c0fbe4459d18f4bb6b56d647b63920377
+caffe_commit: 709dc15af4a06bebda027c1eb2b3f3e3375d5077
+---
+
+This model is a replication of the model described in the [AlexNet](http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks) publication.
+
+Differences:
+- not training with the relighting data-augmentation;
+- initializing non-zero biases to 0.1 instead of 1 (found necessary for training, as initialization to 1 gave flat loss).
+
+The bundled model is the iteration 360,000 snapshot.
+The best validation performance during training was iteration 358,000 with validation accuracy 57.258% and loss 1.83948.
+This model obtains a top-1 accuracy 57.1% and a top-5 accuracy 80.2% on the validation set, using just the center crop.
+(Using the average of 10 crops, (4 + 1 center) * 2 mirror, should obtain a bit higher accuracy.)
+
+This model was trained by Evan Shelhamer @shelhamer
+
+## License
+
+This model is released for unrestricted use.
diff --git a/models/bvlc_alexnet/solver.prototxt b/models/bvlc_alexnet/solver.prototxt
new file mode 100644
index 0000000..129265e
--- /dev/null
+++ b/models/bvlc_alexnet/solver.prototxt
@@ -0,0 +1,14 @@
+net: "models/bvlc_alexnet/train_val.prototxt"
+test_iter: 1000
+test_interval: 1000
+base_lr: 0.01
+lr_policy: "step"
+gamma: 0.1
+stepsize: 100000
+display: 20
+max_iter: 450000
+momentum: 0.9
+weight_decay: 0.0005
+snapshot: 10000
+snapshot_prefix: "models/bvlc_alexnet/caffe_alexnet_train"
+solver_mode: GPU
diff --git a/models/bvlc_alexnet/train_val.prototxt b/models/bvlc_alexnet/train_val.prototxt
new file mode 100644
index 0000000..588b4ea
--- /dev/null
+++ b/models/bvlc_alexnet/train_val.prototxt
@@ -0,0 +1,384 @@
+name: "AlexNet"
+layer {
+  name: "data"
+  type: "Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TRAIN
+  }
+  transform_param {
+    mirror: true
+    crop_size: 227
+    mean_file: "data/ilsvrc12/imagenet_mean.binaryproto"
+  }
+  data_param {
+    source: "examples/imagenet/ilsvrc12_train_lmdb"
+    batch_size: 256
+    backend: LMDB
+  }
+}
+layer {
+  name: "data"
+  type: "Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TEST
+  }
+  transform_param {
+    mirror: false
+    crop_size: 227
+    mean_file: "data/ilsvrc12/imagenet_mean.binaryproto"
+  }
+  data_param {
+    source: "examples/imagenet/ilsvrc12_val_lmdb"
+    batch_size: 50
+    backend: LMDB
+  }
+}
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 96
+    kernel_size: 11
+    stride: 4
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "conv1"
+  top: "conv1"
+}
+layer {
+  name: "norm1"
+  type: "LRN"
+  bottom: "conv1"
+  top: "norm1"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "norm1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "pool1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 2
+    kernel_size: 5
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.1
+    }
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "norm2"
+  type: "LRN"
+  bottom: "conv2"
+  top: "norm2"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "norm2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "pool2"
+  top: "conv3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "conv4"
+  type: "Convolution"
+  bottom: "conv3"
+  top: "conv4"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.1
+    }
+  }
+}
+layer {
+  name: "relu4"
+  type: "ReLU"
+  bottom: "conv4"
+  top: "conv4"
+}
+layer {
+  name: "conv5"
+  type: "Convolution"
+  bottom: "conv4"
+  top: "conv5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 1
+    kernel_size: 3
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.1
+    }
+  }
+}
+layer {
+  name: "relu5"
+  type: "ReLU"
+  bottom: "conv5"
+  top: "conv5"
+}
+layer {
+  name: "pool5"
+  type: "Pooling"
+  bottom: "conv5"
+  top: "pool5"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "fc6"
+  type: "InnerProduct"
+  bottom: "pool5"
+  top: "fc6"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 4096
+    weight_filler {
+      type: "gaussian"
+      std: 0.005
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.1
+    }
+  }
+}
+layer {
+  name: "relu6"
+  type: "ReLU"
+  bottom: "fc6"
+  top: "fc6"
+}
+layer {
+  name: "drop6"
+  type: "Dropout"
+  bottom: "fc6"
+  top: "fc6"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc7"
+  type: "InnerProduct"
+  bottom: "fc6"
+  top: "fc7"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 4096
+    weight_filler {
+      type: "gaussian"
+      std: 0.005
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.1
+    }
+  }
+}
+layer {
+  name: "relu7"
+  type: "ReLU"
+  bottom: "fc7"
+  top: "fc7"
+}
+layer {
+  name: "drop7"
+  type: "Dropout"
+  bottom: "fc7"
+  top: "fc7"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc8"
+  type: "InnerProduct"
+  bottom: "fc7"
+  top: "fc8"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 1000
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "accuracy"
+  type: "Accuracy"
+  bottom: "fc8"
+  bottom: "label"
+  top: "accuracy"
+  include {
+    phase: TEST
+  }
+}
+layer {
+  name: "loss"
+  type: "SoftmaxWithLoss"
+  bottom: "fc8"
+  bottom: "label"
+  top: "loss"
+}
diff --git a/models/bvlc_googlenet/deploy.prototxt b/models/bvlc_googlenet/deploy.prototxt
new file mode 100644
index 0000000..4648bf2
--- /dev/null
+++ b/models/bvlc_googlenet/deploy.prototxt
@@ -0,0 +1,2156 @@
+name: "GoogleNet"
+input: "data"
+input_dim: 10
+input_dim: 3
+input_dim: 224
+input_dim: 224
+layer {
+  name: "conv1/7x7_s2"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1/7x7_s2"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    pad: 3
+    kernel_size: 7
+    stride: 2
+    weight_filler {
+      type: "xavier"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "conv1/relu_7x7"
+  type: "ReLU"
+  bottom: "conv1/7x7_s2"
+  top: "conv1/7x7_s2"
+}
+layer {
+  name: "pool1/3x3_s2"
+  type: "Pooling"
+  bottom: "conv1/7x7_s2"
+  top: "pool1/3x3_s2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "pool1/norm1"
+  type: "LRN"
+  bottom: "pool1/3x3_s2"
+  top: "pool1/norm1"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv2/3x3_reduce"
+  type: "Convolution"
+  bottom: "pool1/norm1"
+  top: "conv2/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "conv2/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "conv2/3x3_reduce"
+  top: "conv2/3x3_reduce"
+}
+layer {
+  name: "conv2/3x3"
+  type: "Convolution"
+  bottom: "conv2/3x3_reduce"
+  top: "conv2/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 192
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "conv2/relu_3x3"
+  type: "ReLU"
+  bottom: "conv2/3x3"
+  top: "conv2/3x3"
+}
+layer {
+  name: "conv2/norm2"
+  type: "LRN"
+  bottom: "conv2/3x3"
+  top: "conv2/norm2"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "pool2/3x3_s2"
+  type: "Pooling"
+  bottom: "conv2/norm2"
+  top: "pool2/3x3_s2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "inception_3a/1x1"
+  type: "Convolution"
+  bottom: "pool2/3x3_s2"
+  top: "inception_3a/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3a/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_3a/1x1"
+  top: "inception_3a/1x1"
+}
+layer {
+  name: "inception_3a/3x3_reduce"
+  type: "Convolution"
+  bottom: "pool2/3x3_s2"
+  top: "inception_3a/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 96
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.09
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3a/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_3a/3x3_reduce"
+  top: "inception_3a/3x3_reduce"
+}
+layer {
+  name: "inception_3a/3x3"
+  type: "Convolution"
+  bottom: "inception_3a/3x3_reduce"
+  top: "inception_3a/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3a/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_3a/3x3"
+  top: "inception_3a/3x3"
+}
+layer {
+  name: "inception_3a/5x5_reduce"
+  type: "Convolution"
+  bottom: "pool2/3x3_s2"
+  top: "inception_3a/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 16
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.2
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3a/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_3a/5x5_reduce"
+  top: "inception_3a/5x5_reduce"
+}
+layer {
+  name: "inception_3a/5x5"
+  type: "Convolution"
+  bottom: "inception_3a/5x5_reduce"
+  top: "inception_3a/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 32
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3a/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_3a/5x5"
+  top: "inception_3a/5x5"
+}
+layer {
+  name: "inception_3a/pool"
+  type: "Pooling"
+  bottom: "pool2/3x3_s2"
+  top: "inception_3a/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_3a/pool_proj"
+  type: "Convolution"
+  bottom: "inception_3a/pool"
+  top: "inception_3a/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 32
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3a/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_3a/pool_proj"
+  top: "inception_3a/pool_proj"
+}
+layer {
+  name: "inception_3a/output"
+  type: "Concat"
+  bottom: "inception_3a/1x1"
+  bottom: "inception_3a/3x3"
+  bottom: "inception_3a/5x5"
+  bottom: "inception_3a/pool_proj"
+  top: "inception_3a/output"
+}
+layer {
+  name: "inception_3b/1x1"
+  type: "Convolution"
+  bottom: "inception_3a/output"
+  top: "inception_3b/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3b/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_3b/1x1"
+  top: "inception_3b/1x1"
+}
+layer {
+  name: "inception_3b/3x3_reduce"
+  type: "Convolution"
+  bottom: "inception_3a/output"
+  top: "inception_3b/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.09
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3b/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_3b/3x3_reduce"
+  top: "inception_3b/3x3_reduce"
+}
+layer {
+  name: "inception_3b/3x3"
+  type: "Convolution"
+  bottom: "inception_3b/3x3_reduce"
+  top: "inception_3b/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 192
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3b/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_3b/3x3"
+  top: "inception_3b/3x3"
+}
+layer {
+  name: "inception_3b/5x5_reduce"
+  type: "Convolution"
+  bottom: "inception_3a/output"
+  top: "inception_3b/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 32
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.2
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3b/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_3b/5x5_reduce"
+  top: "inception_3b/5x5_reduce"
+}
+layer {
+  name: "inception_3b/5x5"
+  type: "Convolution"
+  bottom: "inception_3b/5x5_reduce"
+  top: "inception_3b/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 96
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3b/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_3b/5x5"
+  top: "inception_3b/5x5"
+}
+layer {
+  name: "inception_3b/pool"
+  type: "Pooling"
+  bottom: "inception_3a/output"
+  top: "inception_3b/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_3b/pool_proj"
+  type: "Convolution"
+  bottom: "inception_3b/pool"
+  top: "inception_3b/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3b/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_3b/pool_proj"
+  top: "inception_3b/pool_proj"
+}
+layer {
+  name: "inception_3b/output"
+  type: "Concat"
+  bottom: "inception_3b/1x1"
+  bottom: "inception_3b/3x3"
+  bottom: "inception_3b/5x5"
+  bottom: "inception_3b/pool_proj"
+  top: "inception_3b/output"
+}
+layer {
+  name: "pool3/3x3_s2"
+  type: "Pooling"
+  bottom: "inception_3b/output"
+  top: "pool3/3x3_s2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "inception_4a/1x1"
+  type: "Convolution"
+  bottom: "pool3/3x3_s2"
+  top: "inception_4a/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 192
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4a/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_4a/1x1"
+  top: "inception_4a/1x1"
+}
+layer {
+  name: "inception_4a/3x3_reduce"
+  type: "Convolution"
+  bottom: "pool3/3x3_s2"
+  top: "inception_4a/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 96
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.09
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4a/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_4a/3x3_reduce"
+  top: "inception_4a/3x3_reduce"
+}
+layer {
+  name: "inception_4a/3x3"
+  type: "Convolution"
+  bottom: "inception_4a/3x3_reduce"
+  top: "inception_4a/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 208
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4a/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_4a/3x3"
+  top: "inception_4a/3x3"
+}
+layer {
+  name: "inception_4a/5x5_reduce"
+  type: "Convolution"
+  bottom: "pool3/3x3_s2"
+  top: "inception_4a/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 16
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.2
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4a/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_4a/5x5_reduce"
+  top: "inception_4a/5x5_reduce"
+}
+layer {
+  name: "inception_4a/5x5"
+  type: "Convolution"
+  bottom: "inception_4a/5x5_reduce"
+  top: "inception_4a/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 48
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4a/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_4a/5x5"
+  top: "inception_4a/5x5"
+}
+layer {
+  name: "inception_4a/pool"
+  type: "Pooling"
+  bottom: "pool3/3x3_s2"
+  top: "inception_4a/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_4a/pool_proj"
+  type: "Convolution"
+  bottom: "inception_4a/pool"
+  top: "inception_4a/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4a/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_4a/pool_proj"
+  top: "inception_4a/pool_proj"
+}
+layer {
+  name: "inception_4a/output"
+  type: "Concat"
+  bottom: "inception_4a/1x1"
+  bottom: "inception_4a/3x3"
+  bottom: "inception_4a/5x5"
+  bottom: "inception_4a/pool_proj"
+  top: "inception_4a/output"
+}
+layer {
+  name: "inception_4b/1x1"
+  type: "Convolution"
+  bottom: "inception_4a/output"
+  top: "inception_4b/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 160
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4b/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_4b/1x1"
+  top: "inception_4b/1x1"
+}
+layer {
+  name: "inception_4b/3x3_reduce"
+  type: "Convolution"
+  bottom: "inception_4a/output"
+  top: "inception_4b/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 112
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.09
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4b/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_4b/3x3_reduce"
+  top: "inception_4b/3x3_reduce"
+}
+layer {
+  name: "inception_4b/3x3"
+  type: "Convolution"
+  bottom: "inception_4b/3x3_reduce"
+  top: "inception_4b/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 224
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4b/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_4b/3x3"
+  top: "inception_4b/3x3"
+}
+layer {
+  name: "inception_4b/5x5_reduce"
+  type: "Convolution"
+  bottom: "inception_4a/output"
+  top: "inception_4b/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 24
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.2
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4b/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_4b/5x5_reduce"
+  top: "inception_4b/5x5_reduce"
+}
+layer {
+  name: "inception_4b/5x5"
+  type: "Convolution"
+  bottom: "inception_4b/5x5_reduce"
+  top: "inception_4b/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4b/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_4b/5x5"
+  top: "inception_4b/5x5"
+}
+layer {
+  name: "inception_4b/pool"
+  type: "Pooling"
+  bottom: "inception_4a/output"
+  top: "inception_4b/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_4b/pool_proj"
+  type: "Convolution"
+  bottom: "inception_4b/pool"
+  top: "inception_4b/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4b/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_4b/pool_proj"
+  top: "inception_4b/pool_proj"
+}
+layer {
+  name: "inception_4b/output"
+  type: "Concat"
+  bottom: "inception_4b/1x1"
+  bottom: "inception_4b/3x3"
+  bottom: "inception_4b/5x5"
+  bottom: "inception_4b/pool_proj"
+  top: "inception_4b/output"
+}
+layer {
+  name: "inception_4c/1x1"
+  type: "Convolution"
+  bottom: "inception_4b/output"
+  top: "inception_4c/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4c/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_4c/1x1"
+  top: "inception_4c/1x1"
+}
+layer {
+  name: "inception_4c/3x3_reduce"
+  type: "Convolution"
+  bottom: "inception_4b/output"
+  top: "inception_4c/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.09
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4c/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_4c/3x3_reduce"
+  top: "inception_4c/3x3_reduce"
+}
+layer {
+  name: "inception_4c/3x3"
+  type: "Convolution"
+  bottom: "inception_4c/3x3_reduce"
+  top: "inception_4c/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4c/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_4c/3x3"
+  top: "inception_4c/3x3"
+}
+layer {
+  name: "inception_4c/5x5_reduce"
+  type: "Convolution"
+  bottom: "inception_4b/output"
+  top: "inception_4c/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 24
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.2
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4c/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_4c/5x5_reduce"
+  top: "inception_4c/5x5_reduce"
+}
+layer {
+  name: "inception_4c/5x5"
+  type: "Convolution"
+  bottom: "inception_4c/5x5_reduce"
+  top: "inception_4c/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4c/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_4c/5x5"
+  top: "inception_4c/5x5"
+}
+layer {
+  name: "inception_4c/pool"
+  type: "Pooling"
+  bottom: "inception_4b/output"
+  top: "inception_4c/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_4c/pool_proj"
+  type: "Convolution"
+  bottom: "inception_4c/pool"
+  top: "inception_4c/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4c/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_4c/pool_proj"
+  top: "inception_4c/pool_proj"
+}
+layer {
+  name: "inception_4c/output"
+  type: "Concat"
+  bottom: "inception_4c/1x1"
+  bottom: "inception_4c/3x3"
+  bottom: "inception_4c/5x5"
+  bottom: "inception_4c/pool_proj"
+  top: "inception_4c/output"
+}
+layer {
+  name: "inception_4d/1x1"
+  type: "Convolution"
+  bottom: "inception_4c/output"
+  top: "inception_4d/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 112
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4d/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_4d/1x1"
+  top: "inception_4d/1x1"
+}
+layer {
+  name: "inception_4d/3x3_reduce"
+  type: "Convolution"
+  bottom: "inception_4c/output"
+  top: "inception_4d/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 144
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.09
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4d/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_4d/3x3_reduce"
+  top: "inception_4d/3x3_reduce"
+}
+layer {
+  name: "inception_4d/3x3"
+  type: "Convolution"
+  bottom: "inception_4d/3x3_reduce"
+  top: "inception_4d/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 288
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4d/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_4d/3x3"
+  top: "inception_4d/3x3"
+}
+layer {
+  name: "inception_4d/5x5_reduce"
+  type: "Convolution"
+  bottom: "inception_4c/output"
+  top: "inception_4d/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 32
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.2
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4d/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_4d/5x5_reduce"
+  top: "inception_4d/5x5_reduce"
+}
+layer {
+  name: "inception_4d/5x5"
+  type: "Convolution"
+  bottom: "inception_4d/5x5_reduce"
+  top: "inception_4d/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4d/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_4d/5x5"
+  top: "inception_4d/5x5"
+}
+layer {
+  name: "inception_4d/pool"
+  type: "Pooling"
+  bottom: "inception_4c/output"
+  top: "inception_4d/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_4d/pool_proj"
+  type: "Convolution"
+  bottom: "inception_4d/pool"
+  top: "inception_4d/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4d/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_4d/pool_proj"
+  top: "inception_4d/pool_proj"
+}
+layer {
+  name: "inception_4d/output"
+  type: "Concat"
+  bottom: "inception_4d/1x1"
+  bottom: "inception_4d/3x3"
+  bottom: "inception_4d/5x5"
+  bottom: "inception_4d/pool_proj"
+  top: "inception_4d/output"
+}
+layer {
+  name: "inception_4e/1x1"
+  type: "Convolution"
+  bottom: "inception_4d/output"
+  top: "inception_4e/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4e/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_4e/1x1"
+  top: "inception_4e/1x1"
+}
+layer {
+  name: "inception_4e/3x3_reduce"
+  type: "Convolution"
+  bottom: "inception_4d/output"
+  top: "inception_4e/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 160
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.09
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4e/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_4e/3x3_reduce"
+  top: "inception_4e/3x3_reduce"
+}
+layer {
+  name: "inception_4e/3x3"
+  type: "Convolution"
+  bottom: "inception_4e/3x3_reduce"
+  top: "inception_4e/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 320
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4e/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_4e/3x3"
+  top: "inception_4e/3x3"
+}
+layer {
+  name: "inception_4e/5x5_reduce"
+  type: "Convolution"
+  bottom: "inception_4d/output"
+  top: "inception_4e/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 32
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.2
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4e/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_4e/5x5_reduce"
+  top: "inception_4e/5x5_reduce"
+}
+layer {
+  name: "inception_4e/5x5"
+  type: "Convolution"
+  bottom: "inception_4e/5x5_reduce"
+  top: "inception_4e/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4e/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_4e/5x5"
+  top: "inception_4e/5x5"
+}
+layer {
+  name: "inception_4e/pool"
+  type: "Pooling"
+  bottom: "inception_4d/output"
+  top: "inception_4e/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_4e/pool_proj"
+  type: "Convolution"
+  bottom: "inception_4e/pool"
+  top: "inception_4e/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4e/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_4e/pool_proj"
+  top: "inception_4e/pool_proj"
+}
+layer {
+  name: "inception_4e/output"
+  type: "Concat"
+  bottom: "inception_4e/1x1"
+  bottom: "inception_4e/3x3"
+  bottom: "inception_4e/5x5"
+  bottom: "inception_4e/pool_proj"
+  top: "inception_4e/output"
+}
+layer {
+  name: "pool4/3x3_s2"
+  type: "Pooling"
+  bottom: "inception_4e/output"
+  top: "pool4/3x3_s2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "inception_5a/1x1"
+  type: "Convolution"
+  bottom: "pool4/3x3_s2"
+  top: "inception_5a/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5a/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_5a/1x1"
+  top: "inception_5a/1x1"
+}
+layer {
+  name: "inception_5a/3x3_reduce"
+  type: "Convolution"
+  bottom: "pool4/3x3_s2"
+  top: "inception_5a/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 160
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.09
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5a/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_5a/3x3_reduce"
+  top: "inception_5a/3x3_reduce"
+}
+layer {
+  name: "inception_5a/3x3"
+  type: "Convolution"
+  bottom: "inception_5a/3x3_reduce"
+  top: "inception_5a/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 320
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5a/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_5a/3x3"
+  top: "inception_5a/3x3"
+}
+layer {
+  name: "inception_5a/5x5_reduce"
+  type: "Convolution"
+  bottom: "pool4/3x3_s2"
+  top: "inception_5a/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 32
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.2
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5a/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_5a/5x5_reduce"
+  top: "inception_5a/5x5_reduce"
+}
+layer {
+  name: "inception_5a/5x5"
+  type: "Convolution"
+  bottom: "inception_5a/5x5_reduce"
+  top: "inception_5a/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5a/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_5a/5x5"
+  top: "inception_5a/5x5"
+}
+layer {
+  name: "inception_5a/pool"
+  type: "Pooling"
+  bottom: "pool4/3x3_s2"
+  top: "inception_5a/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_5a/pool_proj"
+  type: "Convolution"
+  bottom: "inception_5a/pool"
+  top: "inception_5a/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5a/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_5a/pool_proj"
+  top: "inception_5a/pool_proj"
+}
+layer {
+  name: "inception_5a/output"
+  type: "Concat"
+  bottom: "inception_5a/1x1"
+  bottom: "inception_5a/3x3"
+  bottom: "inception_5a/5x5"
+  bottom: "inception_5a/pool_proj"
+  top: "inception_5a/output"
+}
+layer {
+  name: "inception_5b/1x1"
+  type: "Convolution"
+  bottom: "inception_5a/output"
+  top: "inception_5b/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5b/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_5b/1x1"
+  top: "inception_5b/1x1"
+}
+layer {
+  name: "inception_5b/3x3_reduce"
+  type: "Convolution"
+  bottom: "inception_5a/output"
+  top: "inception_5b/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 192
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.09
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5b/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_5b/3x3_reduce"
+  top: "inception_5b/3x3_reduce"
+}
+layer {
+  name: "inception_5b/3x3"
+  type: "Convolution"
+  bottom: "inception_5b/3x3_reduce"
+  top: "inception_5b/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5b/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_5b/3x3"
+  top: "inception_5b/3x3"
+}
+layer {
+  name: "inception_5b/5x5_reduce"
+  type: "Convolution"
+  bottom: "inception_5a/output"
+  top: "inception_5b/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 48
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.2
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5b/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_5b/5x5_reduce"
+  top: "inception_5b/5x5_reduce"
+}
+layer {
+  name: "inception_5b/5x5"
+  type: "Convolution"
+  bottom: "inception_5b/5x5_reduce"
+  top: "inception_5b/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+      std: 0.03
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5b/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_5b/5x5"
+  top: "inception_5b/5x5"
+}
+layer {
+  name: "inception_5b/pool"
+  type: "Pooling"
+  bottom: "inception_5a/output"
+  top: "inception_5b/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_5b/pool_proj"
+  type: "Convolution"
+  bottom: "inception_5b/pool"
+  top: "inception_5b/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+      std: 0.1
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5b/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_5b/pool_proj"
+  top: "inception_5b/pool_proj"
+}
+layer {
+  name: "inception_5b/output"
+  type: "Concat"
+  bottom: "inception_5b/1x1"
+  bottom: "inception_5b/3x3"
+  bottom: "inception_5b/5x5"
+  bottom: "inception_5b/pool_proj"
+  top: "inception_5b/output"
+}
+layer {
+  name: "pool5/7x7_s1"
+  type: "Pooling"
+  bottom: "inception_5b/output"
+  top: "pool5/7x7_s1"
+  pooling_param {
+    pool: AVE
+    kernel_size: 7
+    stride: 1
+  }
+}
+layer {
+  name: "pool5/drop_7x7_s1"
+  type: "Dropout"
+  bottom: "pool5/7x7_s1"
+  top: "pool5/7x7_s1"
+  dropout_param {
+    dropout_ratio: 0.4
+  }
+}
+layer {
+  name: "loss3/classifier"
+  type: "InnerProduct"
+  bottom: "pool5/7x7_s1"
+  top: "loss3/classifier"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 1000
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "prob"
+  type: "Softmax"
+  bottom: "loss3/classifier"
+  top: "prob"
+}
diff --git a/models/bvlc_googlenet/quick_solver.prototxt b/models/bvlc_googlenet/quick_solver.prototxt
new file mode 100644
index 0000000..5d2f7ee
--- /dev/null
+++ b/models/bvlc_googlenet/quick_solver.prototxt
@@ -0,0 +1,15 @@
+net: "models/bvlc_googlenet/train_val.prototxt"
+test_iter: 1000
+test_interval: 4000
+test_initialization: false
+display: 40
+average_loss: 40
+base_lr: 0.01
+lr_policy: "poly"
+power: 0.5
+max_iter: 2400000
+momentum: 0.9
+weight_decay: 0.0002
+snapshot: 40000
+snapshot_prefix: "models/bvlc_googlenet/bvlc_googlenet_quick"
+solver_mode: GPU
diff --git a/models/bvlc_googlenet/readme.md b/models/bvlc_googlenet/readme.md
new file mode 100644
index 0000000..061b6d7
--- /dev/null
+++ b/models/bvlc_googlenet/readme.md
@@ -0,0 +1,32 @@
+---
+name: BVLC GoogleNet Model
+caffemodel: bvlc_googlenet.caffemodel
+caffemodel_url: http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel
+license: unrestricted
+sha1: 405fc5acd08a3bb12de8ee5e23a96bec22f08204
+caffe_commit: bc614d1bd91896e3faceaf40b23b72dab47d44f5
+---
+
+This model is a replication of the model described in the [GoogleNet](http://arxiv.org/abs/1409.4842) publication. We would like to thank Christian Szegedy for all his help in the replication of GoogleNet model.
+
+Differences:
+- not training with the relighting data-augmentation;
+- not training with the scale or aspect-ratio data-augmentation;
+- uses "xavier" to initialize the weights instead of "gaussian";
+- quick_solver.prototxt uses a different learning rate decay policy than the original solver.prototxt, that allows a much faster training (60 epochs vs 250 epochs);
+
+The bundled model is the iteration 2,400,000 snapshot (60 epochs) using quick_solver.prototxt
+
+This bundled model obtains a top-1 accuracy 68.7% (31.3% error) and a top-5 accuracy 88.9% (11.1% error) on the validation set, using just the center crop.
+(Using the average of 10 crops, (4 + 1 center) * 2 mirror, should obtain a bit higher accuracy.)
+
+Timings for bvlc_googlenet with cuDNN using batch_size:128 on a K40c:
+ - Average Forward pass: 562.841 ms.
+ - Average Backward pass: 1123.84 ms.
+ - Average Forward-Backward: 1688.8 ms.
+
+This model was trained by Sergio Guadarrama @sguada
+
+## License
+
+This model is released for unrestricted use.
diff --git a/models/bvlc_googlenet/solver.prototxt b/models/bvlc_googlenet/solver.prototxt
new file mode 100644
index 0000000..d7d1788
--- /dev/null
+++ b/models/bvlc_googlenet/solver.prototxt
@@ -0,0 +1,16 @@
+net: "models/bvlc_googlenet/train_val.prototxt"
+test_iter: 1000
+test_interval: 4000
+test_initialization: false
+display: 40
+average_loss: 40
+base_lr: 0.01
+lr_policy: "step"
+stepsize: 320000
+gamma: 0.96
+max_iter: 10000000
+momentum: 0.9
+weight_decay: 0.0002
+snapshot: 40000
+snapshot_prefix: "models/bvlc_googlenet/bvlc_googlenet"
+solver_mode: GPU
diff --git a/models/bvlc_googlenet/train_val.prototxt b/models/bvlc_googlenet/train_val.prototxt
new file mode 100644
index 0000000..5dee3ab
--- /dev/null
+++ b/models/bvlc_googlenet/train_val.prototxt
@@ -0,0 +1,2433 @@
+name: "GoogleNet"
+layer {
+  name: "data"
+  type: "Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TRAIN
+  }
+  transform_param {
+    mirror: true
+    crop_size: 224
+    mean_value: 104
+    mean_value: 117
+    mean_value: 123
+  }
+  data_param {
+    source: "examples/imagenet/ilsvrc12_train_lmdb"
+    batch_size: 32
+    backend: LMDB
+  }
+}
+layer {
+  name: "data"
+  type: "Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TEST
+  }
+  transform_param {
+    mirror: false
+    crop_size: 224
+    mean_value: 104
+    mean_value: 117
+    mean_value: 123
+  }
+  data_param {
+    source: "examples/imagenet/ilsvrc12_val_lmdb"
+    batch_size: 50
+    backend: LMDB
+  }
+}
+layer {
+  name: "conv1/7x7_s2"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1/7x7_s2"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    pad: 3
+    kernel_size: 7
+    stride: 2
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "conv1/relu_7x7"
+  type: "ReLU"
+  bottom: "conv1/7x7_s2"
+  top: "conv1/7x7_s2"
+}
+layer {
+  name: "pool1/3x3_s2"
+  type: "Pooling"
+  bottom: "conv1/7x7_s2"
+  top: "pool1/3x3_s2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "pool1/norm1"
+  type: "LRN"
+  bottom: "pool1/3x3_s2"
+  top: "pool1/norm1"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv2/3x3_reduce"
+  type: "Convolution"
+  bottom: "pool1/norm1"
+  top: "conv2/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "conv2/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "conv2/3x3_reduce"
+  top: "conv2/3x3_reduce"
+}
+layer {
+  name: "conv2/3x3"
+  type: "Convolution"
+  bottom: "conv2/3x3_reduce"
+  top: "conv2/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 192
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "conv2/relu_3x3"
+  type: "ReLU"
+  bottom: "conv2/3x3"
+  top: "conv2/3x3"
+}
+layer {
+  name: "conv2/norm2"
+  type: "LRN"
+  bottom: "conv2/3x3"
+  top: "conv2/norm2"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "pool2/3x3_s2"
+  type: "Pooling"
+  bottom: "conv2/norm2"
+  top: "pool2/3x3_s2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "inception_3a/1x1"
+  type: "Convolution"
+  bottom: "pool2/3x3_s2"
+  top: "inception_3a/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3a/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_3a/1x1"
+  top: "inception_3a/1x1"
+}
+layer {
+  name: "inception_3a/3x3_reduce"
+  type: "Convolution"
+  bottom: "pool2/3x3_s2"
+  top: "inception_3a/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 96
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3a/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_3a/3x3_reduce"
+  top: "inception_3a/3x3_reduce"
+}
+layer {
+  name: "inception_3a/3x3"
+  type: "Convolution"
+  bottom: "inception_3a/3x3_reduce"
+  top: "inception_3a/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3a/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_3a/3x3"
+  top: "inception_3a/3x3"
+}
+layer {
+  name: "inception_3a/5x5_reduce"
+  type: "Convolution"
+  bottom: "pool2/3x3_s2"
+  top: "inception_3a/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 16
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3a/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_3a/5x5_reduce"
+  top: "inception_3a/5x5_reduce"
+}
+layer {
+  name: "inception_3a/5x5"
+  type: "Convolution"
+  bottom: "inception_3a/5x5_reduce"
+  top: "inception_3a/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 32
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3a/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_3a/5x5"
+  top: "inception_3a/5x5"
+}
+layer {
+  name: "inception_3a/pool"
+  type: "Pooling"
+  bottom: "pool2/3x3_s2"
+  top: "inception_3a/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_3a/pool_proj"
+  type: "Convolution"
+  bottom: "inception_3a/pool"
+  top: "inception_3a/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 32
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3a/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_3a/pool_proj"
+  top: "inception_3a/pool_proj"
+}
+layer {
+  name: "inception_3a/output"
+  type: "Concat"
+  bottom: "inception_3a/1x1"
+  bottom: "inception_3a/3x3"
+  bottom: "inception_3a/5x5"
+  bottom: "inception_3a/pool_proj"
+  top: "inception_3a/output"
+}
+layer {
+  name: "inception_3b/1x1"
+  type: "Convolution"
+  bottom: "inception_3a/output"
+  top: "inception_3b/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3b/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_3b/1x1"
+  top: "inception_3b/1x1"
+}
+layer {
+  name: "inception_3b/3x3_reduce"
+  type: "Convolution"
+  bottom: "inception_3a/output"
+  top: "inception_3b/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3b/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_3b/3x3_reduce"
+  top: "inception_3b/3x3_reduce"
+}
+layer {
+  name: "inception_3b/3x3"
+  type: "Convolution"
+  bottom: "inception_3b/3x3_reduce"
+  top: "inception_3b/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 192
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3b/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_3b/3x3"
+  top: "inception_3b/3x3"
+}
+layer {
+  name: "inception_3b/5x5_reduce"
+  type: "Convolution"
+  bottom: "inception_3a/output"
+  top: "inception_3b/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 32
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3b/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_3b/5x5_reduce"
+  top: "inception_3b/5x5_reduce"
+}
+layer {
+  name: "inception_3b/5x5"
+  type: "Convolution"
+  bottom: "inception_3b/5x5_reduce"
+  top: "inception_3b/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 96
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3b/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_3b/5x5"
+  top: "inception_3b/5x5"
+}
+layer {
+  name: "inception_3b/pool"
+  type: "Pooling"
+  bottom: "inception_3a/output"
+  top: "inception_3b/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_3b/pool_proj"
+  type: "Convolution"
+  bottom: "inception_3b/pool"
+  top: "inception_3b/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_3b/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_3b/pool_proj"
+  top: "inception_3b/pool_proj"
+}
+layer {
+  name: "inception_3b/output"
+  type: "Concat"
+  bottom: "inception_3b/1x1"
+  bottom: "inception_3b/3x3"
+  bottom: "inception_3b/5x5"
+  bottom: "inception_3b/pool_proj"
+  top: "inception_3b/output"
+}
+layer {
+  name: "pool3/3x3_s2"
+  type: "Pooling"
+  bottom: "inception_3b/output"
+  top: "pool3/3x3_s2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "inception_4a/1x1"
+  type: "Convolution"
+  bottom: "pool3/3x3_s2"
+  top: "inception_4a/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 192
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4a/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_4a/1x1"
+  top: "inception_4a/1x1"
+}
+layer {
+  name: "inception_4a/3x3_reduce"
+  type: "Convolution"
+  bottom: "pool3/3x3_s2"
+  top: "inception_4a/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 96
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4a/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_4a/3x3_reduce"
+  top: "inception_4a/3x3_reduce"
+}
+layer {
+  name: "inception_4a/3x3"
+  type: "Convolution"
+  bottom: "inception_4a/3x3_reduce"
+  top: "inception_4a/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 208
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4a/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_4a/3x3"
+  top: "inception_4a/3x3"
+}
+layer {
+  name: "inception_4a/5x5_reduce"
+  type: "Convolution"
+  bottom: "pool3/3x3_s2"
+  top: "inception_4a/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 16
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4a/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_4a/5x5_reduce"
+  top: "inception_4a/5x5_reduce"
+}
+layer {
+  name: "inception_4a/5x5"
+  type: "Convolution"
+  bottom: "inception_4a/5x5_reduce"
+  top: "inception_4a/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 48
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4a/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_4a/5x5"
+  top: "inception_4a/5x5"
+}
+layer {
+  name: "inception_4a/pool"
+  type: "Pooling"
+  bottom: "pool3/3x3_s2"
+  top: "inception_4a/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_4a/pool_proj"
+  type: "Convolution"
+  bottom: "inception_4a/pool"
+  top: "inception_4a/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4a/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_4a/pool_proj"
+  top: "inception_4a/pool_proj"
+}
+layer {
+  name: "inception_4a/output"
+  type: "Concat"
+  bottom: "inception_4a/1x1"
+  bottom: "inception_4a/3x3"
+  bottom: "inception_4a/5x5"
+  bottom: "inception_4a/pool_proj"
+  top: "inception_4a/output"
+}
+layer {
+  name: "loss1/ave_pool"
+  type: "Pooling"
+  bottom: "inception_4a/output"
+  top: "loss1/ave_pool"
+  pooling_param {
+    pool: AVE
+    kernel_size: 5
+    stride: 3
+  }
+}
+layer {
+  name: "loss1/conv"
+  type: "Convolution"
+  bottom: "loss1/ave_pool"
+  top: "loss1/conv"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "loss1/relu_conv"
+  type: "ReLU"
+  bottom: "loss1/conv"
+  top: "loss1/conv"
+}
+layer {
+  name: "loss1/fc"
+  type: "InnerProduct"
+  bottom: "loss1/conv"
+  top: "loss1/fc"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 1024
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "loss1/relu_fc"
+  type: "ReLU"
+  bottom: "loss1/fc"
+  top: "loss1/fc"
+}
+layer {
+  name: "loss1/drop_fc"
+  type: "Dropout"
+  bottom: "loss1/fc"
+  top: "loss1/fc"
+  dropout_param {
+    dropout_ratio: 0.7
+  }
+}
+layer {
+  name: "loss1/classifier"
+  type: "InnerProduct"
+  bottom: "loss1/fc"
+  top: "loss1/classifier"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 1000
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "loss1/loss"
+  type: "SoftmaxWithLoss"
+  bottom: "loss1/classifier"
+  bottom: "label"
+  top: "loss1/loss1"
+  loss_weight: 0.3
+}
+layer {
+  name: "loss1/top-1"
+  type: "Accuracy"
+  bottom: "loss1/classifier"
+  bottom: "label"
+  top: "loss1/top-1"
+  include {
+    phase: TEST
+  }
+}
+layer {
+  name: "loss1/top-5"
+  type: "Accuracy"
+  bottom: "loss1/classifier"
+  bottom: "label"
+  top: "loss1/top-5"
+  include {
+    phase: TEST
+  }
+  accuracy_param {
+    top_k: 5
+  }
+}
+layer {
+  name: "inception_4b/1x1"
+  type: "Convolution"
+  bottom: "inception_4a/output"
+  top: "inception_4b/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 160
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4b/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_4b/1x1"
+  top: "inception_4b/1x1"
+}
+layer {
+  name: "inception_4b/3x3_reduce"
+  type: "Convolution"
+  bottom: "inception_4a/output"
+  top: "inception_4b/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 112
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4b/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_4b/3x3_reduce"
+  top: "inception_4b/3x3_reduce"
+}
+layer {
+  name: "inception_4b/3x3"
+  type: "Convolution"
+  bottom: "inception_4b/3x3_reduce"
+  top: "inception_4b/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 224
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4b/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_4b/3x3"
+  top: "inception_4b/3x3"
+}
+layer {
+  name: "inception_4b/5x5_reduce"
+  type: "Convolution"
+  bottom: "inception_4a/output"
+  top: "inception_4b/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 24
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4b/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_4b/5x5_reduce"
+  top: "inception_4b/5x5_reduce"
+}
+layer {
+  name: "inception_4b/5x5"
+  type: "Convolution"
+  bottom: "inception_4b/5x5_reduce"
+  top: "inception_4b/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4b/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_4b/5x5"
+  top: "inception_4b/5x5"
+}
+layer {
+  name: "inception_4b/pool"
+  type: "Pooling"
+  bottom: "inception_4a/output"
+  top: "inception_4b/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_4b/pool_proj"
+  type: "Convolution"
+  bottom: "inception_4b/pool"
+  top: "inception_4b/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4b/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_4b/pool_proj"
+  top: "inception_4b/pool_proj"
+}
+layer {
+  name: "inception_4b/output"
+  type: "Concat"
+  bottom: "inception_4b/1x1"
+  bottom: "inception_4b/3x3"
+  bottom: "inception_4b/5x5"
+  bottom: "inception_4b/pool_proj"
+  top: "inception_4b/output"
+}
+layer {
+  name: "inception_4c/1x1"
+  type: "Convolution"
+  bottom: "inception_4b/output"
+  top: "inception_4c/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4c/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_4c/1x1"
+  top: "inception_4c/1x1"
+}
+layer {
+  name: "inception_4c/3x3_reduce"
+  type: "Convolution"
+  bottom: "inception_4b/output"
+  top: "inception_4c/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4c/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_4c/3x3_reduce"
+  top: "inception_4c/3x3_reduce"
+}
+layer {
+  name: "inception_4c/3x3"
+  type: "Convolution"
+  bottom: "inception_4c/3x3_reduce"
+  top: "inception_4c/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4c/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_4c/3x3"
+  top: "inception_4c/3x3"
+}
+layer {
+  name: "inception_4c/5x5_reduce"
+  type: "Convolution"
+  bottom: "inception_4b/output"
+  top: "inception_4c/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 24
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4c/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_4c/5x5_reduce"
+  top: "inception_4c/5x5_reduce"
+}
+layer {
+  name: "inception_4c/5x5"
+  type: "Convolution"
+  bottom: "inception_4c/5x5_reduce"
+  top: "inception_4c/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4c/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_4c/5x5"
+  top: "inception_4c/5x5"
+}
+layer {
+  name: "inception_4c/pool"
+  type: "Pooling"
+  bottom: "inception_4b/output"
+  top: "inception_4c/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_4c/pool_proj"
+  type: "Convolution"
+  bottom: "inception_4c/pool"
+  top: "inception_4c/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4c/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_4c/pool_proj"
+  top: "inception_4c/pool_proj"
+}
+layer {
+  name: "inception_4c/output"
+  type: "Concat"
+  bottom: "inception_4c/1x1"
+  bottom: "inception_4c/3x3"
+  bottom: "inception_4c/5x5"
+  bottom: "inception_4c/pool_proj"
+  top: "inception_4c/output"
+}
+layer {
+  name: "inception_4d/1x1"
+  type: "Convolution"
+  bottom: "inception_4c/output"
+  top: "inception_4d/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 112
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4d/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_4d/1x1"
+  top: "inception_4d/1x1"
+}
+layer {
+  name: "inception_4d/3x3_reduce"
+  type: "Convolution"
+  bottom: "inception_4c/output"
+  top: "inception_4d/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 144
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4d/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_4d/3x3_reduce"
+  top: "inception_4d/3x3_reduce"
+}
+layer {
+  name: "inception_4d/3x3"
+  type: "Convolution"
+  bottom: "inception_4d/3x3_reduce"
+  top: "inception_4d/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 288
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4d/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_4d/3x3"
+  top: "inception_4d/3x3"
+}
+layer {
+  name: "inception_4d/5x5_reduce"
+  type: "Convolution"
+  bottom: "inception_4c/output"
+  top: "inception_4d/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 32
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4d/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_4d/5x5_reduce"
+  top: "inception_4d/5x5_reduce"
+}
+layer {
+  name: "inception_4d/5x5"
+  type: "Convolution"
+  bottom: "inception_4d/5x5_reduce"
+  top: "inception_4d/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4d/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_4d/5x5"
+  top: "inception_4d/5x5"
+}
+layer {
+  name: "inception_4d/pool"
+  type: "Pooling"
+  bottom: "inception_4c/output"
+  top: "inception_4d/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_4d/pool_proj"
+  type: "Convolution"
+  bottom: "inception_4d/pool"
+  top: "inception_4d/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 64
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4d/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_4d/pool_proj"
+  top: "inception_4d/pool_proj"
+}
+layer {
+  name: "inception_4d/output"
+  type: "Concat"
+  bottom: "inception_4d/1x1"
+  bottom: "inception_4d/3x3"
+  bottom: "inception_4d/5x5"
+  bottom: "inception_4d/pool_proj"
+  top: "inception_4d/output"
+}
+layer {
+  name: "loss2/ave_pool"
+  type: "Pooling"
+  bottom: "inception_4d/output"
+  top: "loss2/ave_pool"
+  pooling_param {
+    pool: AVE
+    kernel_size: 5
+    stride: 3
+  }
+}
+layer {
+  name: "loss2/conv"
+  type: "Convolution"
+  bottom: "loss2/ave_pool"
+  top: "loss2/conv"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "loss2/relu_conv"
+  type: "ReLU"
+  bottom: "loss2/conv"
+  top: "loss2/conv"
+}
+layer {
+  name: "loss2/fc"
+  type: "InnerProduct"
+  bottom: "loss2/conv"
+  top: "loss2/fc"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 1024
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "loss2/relu_fc"
+  type: "ReLU"
+  bottom: "loss2/fc"
+  top: "loss2/fc"
+}
+layer {
+  name: "loss2/drop_fc"
+  type: "Dropout"
+  bottom: "loss2/fc"
+  top: "loss2/fc"
+  dropout_param {
+    dropout_ratio: 0.7
+  }
+}
+layer {
+  name: "loss2/classifier"
+  type: "InnerProduct"
+  bottom: "loss2/fc"
+  top: "loss2/classifier"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 1000
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "loss2/loss"
+  type: "SoftmaxWithLoss"
+  bottom: "loss2/classifier"
+  bottom: "label"
+  top: "loss2/loss1"
+  loss_weight: 0.3
+}
+layer {
+  name: "loss2/top-1"
+  type: "Accuracy"
+  bottom: "loss2/classifier"
+  bottom: "label"
+  top: "loss2/top-1"
+  include {
+    phase: TEST
+  }
+}
+layer {
+  name: "loss2/top-5"
+  type: "Accuracy"
+  bottom: "loss2/classifier"
+  bottom: "label"
+  top: "loss2/top-5"
+  include {
+    phase: TEST
+  }
+  accuracy_param {
+    top_k: 5
+  }
+}
+layer {
+  name: "inception_4e/1x1"
+  type: "Convolution"
+  bottom: "inception_4d/output"
+  top: "inception_4e/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4e/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_4e/1x1"
+  top: "inception_4e/1x1"
+}
+layer {
+  name: "inception_4e/3x3_reduce"
+  type: "Convolution"
+  bottom: "inception_4d/output"
+  top: "inception_4e/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 160
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4e/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_4e/3x3_reduce"
+  top: "inception_4e/3x3_reduce"
+}
+layer {
+  name: "inception_4e/3x3"
+  type: "Convolution"
+  bottom: "inception_4e/3x3_reduce"
+  top: "inception_4e/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 320
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4e/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_4e/3x3"
+  top: "inception_4e/3x3"
+}
+layer {
+  name: "inception_4e/5x5_reduce"
+  type: "Convolution"
+  bottom: "inception_4d/output"
+  top: "inception_4e/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 32
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4e/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_4e/5x5_reduce"
+  top: "inception_4e/5x5_reduce"
+}
+layer {
+  name: "inception_4e/5x5"
+  type: "Convolution"
+  bottom: "inception_4e/5x5_reduce"
+  top: "inception_4e/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4e/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_4e/5x5"
+  top: "inception_4e/5x5"
+}
+layer {
+  name: "inception_4e/pool"
+  type: "Pooling"
+  bottom: "inception_4d/output"
+  top: "inception_4e/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_4e/pool_proj"
+  type: "Convolution"
+  bottom: "inception_4e/pool"
+  top: "inception_4e/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_4e/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_4e/pool_proj"
+  top: "inception_4e/pool_proj"
+}
+layer {
+  name: "inception_4e/output"
+  type: "Concat"
+  bottom: "inception_4e/1x1"
+  bottom: "inception_4e/3x3"
+  bottom: "inception_4e/5x5"
+  bottom: "inception_4e/pool_proj"
+  top: "inception_4e/output"
+}
+layer {
+  name: "pool4/3x3_s2"
+  type: "Pooling"
+  bottom: "inception_4e/output"
+  top: "pool4/3x3_s2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "inception_5a/1x1"
+  type: "Convolution"
+  bottom: "pool4/3x3_s2"
+  top: "inception_5a/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5a/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_5a/1x1"
+  top: "inception_5a/1x1"
+}
+layer {
+  name: "inception_5a/3x3_reduce"
+  type: "Convolution"
+  bottom: "pool4/3x3_s2"
+  top: "inception_5a/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 160
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5a/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_5a/3x3_reduce"
+  top: "inception_5a/3x3_reduce"
+}
+layer {
+  name: "inception_5a/3x3"
+  type: "Convolution"
+  bottom: "inception_5a/3x3_reduce"
+  top: "inception_5a/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 320
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5a/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_5a/3x3"
+  top: "inception_5a/3x3"
+}
+layer {
+  name: "inception_5a/5x5_reduce"
+  type: "Convolution"
+  bottom: "pool4/3x3_s2"
+  top: "inception_5a/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 32
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5a/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_5a/5x5_reduce"
+  top: "inception_5a/5x5_reduce"
+}
+layer {
+  name: "inception_5a/5x5"
+  type: "Convolution"
+  bottom: "inception_5a/5x5_reduce"
+  top: "inception_5a/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5a/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_5a/5x5"
+  top: "inception_5a/5x5"
+}
+layer {
+  name: "inception_5a/pool"
+  type: "Pooling"
+  bottom: "pool4/3x3_s2"
+  top: "inception_5a/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_5a/pool_proj"
+  type: "Convolution"
+  bottom: "inception_5a/pool"
+  top: "inception_5a/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5a/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_5a/pool_proj"
+  top: "inception_5a/pool_proj"
+}
+layer {
+  name: "inception_5a/output"
+  type: "Concat"
+  bottom: "inception_5a/1x1"
+  bottom: "inception_5a/3x3"
+  bottom: "inception_5a/5x5"
+  bottom: "inception_5a/pool_proj"
+  top: "inception_5a/output"
+}
+layer {
+  name: "inception_5b/1x1"
+  type: "Convolution"
+  bottom: "inception_5a/output"
+  top: "inception_5b/1x1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5b/relu_1x1"
+  type: "ReLU"
+  bottom: "inception_5b/1x1"
+  top: "inception_5b/1x1"
+}
+layer {
+  name: "inception_5b/3x3_reduce"
+  type: "Convolution"
+  bottom: "inception_5a/output"
+  top: "inception_5b/3x3_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 192
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5b/relu_3x3_reduce"
+  type: "ReLU"
+  bottom: "inception_5b/3x3_reduce"
+  top: "inception_5b/3x3_reduce"
+}
+layer {
+  name: "inception_5b/3x3"
+  type: "Convolution"
+  bottom: "inception_5b/3x3_reduce"
+  top: "inception_5b/3x3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5b/relu_3x3"
+  type: "ReLU"
+  bottom: "inception_5b/3x3"
+  top: "inception_5b/3x3"
+}
+layer {
+  name: "inception_5b/5x5_reduce"
+  type: "Convolution"
+  bottom: "inception_5a/output"
+  top: "inception_5b/5x5_reduce"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 48
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5b/relu_5x5_reduce"
+  type: "ReLU"
+  bottom: "inception_5b/5x5_reduce"
+  top: "inception_5b/5x5_reduce"
+}
+layer {
+  name: "inception_5b/5x5"
+  type: "Convolution"
+  bottom: "inception_5b/5x5_reduce"
+  top: "inception_5b/5x5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    pad: 2
+    kernel_size: 5
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5b/relu_5x5"
+  type: "ReLU"
+  bottom: "inception_5b/5x5"
+  top: "inception_5b/5x5"
+}
+layer {
+  name: "inception_5b/pool"
+  type: "Pooling"
+  bottom: "inception_5a/output"
+  top: "inception_5b/pool"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 1
+    pad: 1
+  }
+}
+layer {
+  name: "inception_5b/pool_proj"
+  type: "Convolution"
+  bottom: "inception_5b/pool"
+  top: "inception_5b/pool_proj"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 128
+    kernel_size: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0.2
+    }
+  }
+}
+layer {
+  name: "inception_5b/relu_pool_proj"
+  type: "ReLU"
+  bottom: "inception_5b/pool_proj"
+  top: "inception_5b/pool_proj"
+}
+layer {
+  name: "inception_5b/output"
+  type: "Concat"
+  bottom: "inception_5b/1x1"
+  bottom: "inception_5b/3x3"
+  bottom: "inception_5b/5x5"
+  bottom: "inception_5b/pool_proj"
+  top: "inception_5b/output"
+}
+layer {
+  name: "pool5/7x7_s1"
+  type: "Pooling"
+  bottom: "inception_5b/output"
+  top: "pool5/7x7_s1"
+  pooling_param {
+    pool: AVE
+    kernel_size: 7
+    stride: 1
+  }
+}
+layer {
+  name: "pool5/drop_7x7_s1"
+  type: "Dropout"
+  bottom: "pool5/7x7_s1"
+  top: "pool5/7x7_s1"
+  dropout_param {
+    dropout_ratio: 0.4
+  }
+}
+layer {
+  name: "loss3/classifier"
+  type: "InnerProduct"
+  bottom: "pool5/7x7_s1"
+  top: "loss3/classifier"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 1000
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "loss3/loss3"
+  type: "SoftmaxWithLoss"
+  bottom: "loss3/classifier"
+  bottom: "label"
+  top: "loss3/loss3"
+  loss_weight: 1
+}
+layer {
+  name: "loss3/top-1"
+  type: "Accuracy"
+  bottom: "loss3/classifier"
+  bottom: "label"
+  top: "loss3/top-1"
+  include {
+    phase: TEST
+  }
+}
+layer {
+  name: "loss3/top-5"
+  type: "Accuracy"
+  bottom: "loss3/classifier"
+  bottom: "label"
+  top: "loss3/top-5"
+  include {
+    phase: TEST
+  }
+  accuracy_param {
+    top_k: 5
+  }
+}
diff --git a/models/bvlc_reference_caffenet/deploy.prototxt b/models/bvlc_reference_caffenet/deploy.prototxt
new file mode 100644
index 0000000..29ccf14
--- /dev/null
+++ b/models/bvlc_reference_caffenet/deploy.prototxt
@@ -0,0 +1,212 @@
+name: "CaffeNet"
+input: "data"
+input_dim: 10
+input_dim: 3
+input_dim: 227
+input_dim: 227
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  convolution_param {
+    num_output: 96
+    kernel_size: 11
+    stride: 4
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "conv1"
+  top: "conv1"
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm1"
+  type: "LRN"
+  bottom: "pool1"
+  top: "norm1"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "norm1"
+  top: "conv2"
+  convolution_param {
+    num_output: 256
+    pad: 2
+    kernel_size: 5
+    group: 2
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm2"
+  type: "LRN"
+  bottom: "pool2"
+  top: "norm2"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "norm2"
+  top: "conv3"
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "conv4"
+  type: "Convolution"
+  bottom: "conv3"
+  top: "conv4"
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    group: 2
+  }
+}
+layer {
+  name: "relu4"
+  type: "ReLU"
+  bottom: "conv4"
+  top: "conv4"
+}
+layer {
+  name: "conv5"
+  type: "Convolution"
+  bottom: "conv4"
+  top: "conv5"
+  convolution_param {
+    num_output: 256
+    pad: 1
+    kernel_size: 3
+    group: 2
+  }
+}
+layer {
+  name: "relu5"
+  type: "ReLU"
+  bottom: "conv5"
+  top: "conv5"
+}
+layer {
+  name: "pool5"
+  type: "Pooling"
+  bottom: "conv5"
+  top: "pool5"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "fc6"
+  type: "InnerProduct"
+  bottom: "pool5"
+  top: "fc6"
+  inner_product_param {
+    num_output: 4096
+  }
+}
+layer {
+  name: "relu6"
+  type: "ReLU"
+  bottom: "fc6"
+  top: "fc6"
+}
+layer {
+  name: "drop6"
+  type: "Dropout"
+  bottom: "fc6"
+  top: "fc6"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc7"
+  type: "InnerProduct"
+  bottom: "fc6"
+  top: "fc7"
+  inner_product_param {
+    num_output: 4096
+  }
+}
+layer {
+  name: "relu7"
+  type: "ReLU"
+  bottom: "fc7"
+  top: "fc7"
+}
+layer {
+  name: "drop7"
+  type: "Dropout"
+  bottom: "fc7"
+  top: "fc7"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc8"
+  type: "InnerProduct"
+  bottom: "fc7"
+  top: "fc8"
+  inner_product_param {
+    num_output: 1000
+  }
+}
+layer {
+  name: "prob"
+  type: "Softmax"
+  bottom: "fc8"
+  top: "prob"
+}
diff --git a/models/bvlc_reference_caffenet/readme.md b/models/bvlc_reference_caffenet/readme.md
new file mode 100644
index 0000000..671e47a
--- /dev/null
+++ b/models/bvlc_reference_caffenet/readme.md
@@ -0,0 +1,25 @@
+---
+name: BVLC CaffeNet Model
+caffemodel: bvlc_reference_caffenet.caffemodel
+caffemodel_url: http://dl.caffe.berkeleyvision.org/bvlc_reference_caffenet.caffemodel
+license: unrestricted
+sha1: 4c8d77deb20ea792f84eb5e6d0a11ca0a8660a46
+caffe_commit: 709dc15af4a06bebda027c1eb2b3f3e3375d5077
+---
+
+This model is the result of following the Caffe [ImageNet model training instructions](http://caffe.berkeleyvision.org/gathered/examples/imagenet.html).
+It is a replication of the model described in the [AlexNet](http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks) publication with some differences:
+
+- not training with the relighting data-augmentation;
+- the order of pooling and normalization layers is switched (in CaffeNet, pooling is done before normalization).
+
+This model is snapshot of iteration 310,000.
+The best validation performance during training was iteration 313,000 with validation accuracy 57.412% and loss 1.82328.
+This model obtains a top-1 accuracy 57.4% and a top-5 accuracy 80.4% on the validation set, using just the center crop.
+(Using the average of 10 crops, (4 + 1 center) * 2 mirror, should obtain a bit higher accuracy still.)
+
+This model was trained by Jeff Donahue @jeffdonahue
+
+## License
+
+This model is released for unrestricted use.
diff --git a/models/bvlc_reference_caffenet/solver.prototxt b/models/bvlc_reference_caffenet/solver.prototxt
new file mode 100644
index 0000000..af1315b
--- /dev/null
+++ b/models/bvlc_reference_caffenet/solver.prototxt
@@ -0,0 +1,14 @@
+net: "models/bvlc_reference_caffenet/train_val.prototxt"
+test_iter: 1000
+test_interval: 1000
+base_lr: 0.01
+lr_policy: "step"
+gamma: 0.1
+stepsize: 100000
+display: 20
+max_iter: 450000
+momentum: 0.9
+weight_decay: 0.0005
+snapshot: 10000
+snapshot_prefix: "models/bvlc_reference_caffenet/caffenet_train"
+solver_mode: GPU
diff --git a/models/bvlc_reference_caffenet/train_val.prototxt b/models/bvlc_reference_caffenet/train_val.prototxt
new file mode 100644
index 0000000..c79472e
--- /dev/null
+++ b/models/bvlc_reference_caffenet/train_val.prototxt
@@ -0,0 +1,400 @@
+name: "CaffeNet"
+layer {
+  name: "data"
+  type: "Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TRAIN
+  }
+  transform_param {
+    mirror: true
+    crop_size: 227
+    mean_file: "data/ilsvrc12/imagenet_mean.binaryproto"
+  }
+# mean pixel / channel-wise mean instead of mean image
+#  transform_param {
+#    crop_size: 227
+#    mean_value: 104
+#    mean_value: 117
+#    mean_value: 123
+#    mirror: true
+#  }
+  data_param {
+    source: "examples/imagenet/ilsvrc12_train_lmdb"
+    batch_size: 256
+    backend: LMDB
+  }
+}
+layer {
+  name: "data"
+  type: "Data"
+  top: "data"
+  top: "label"
+  include {
+    phase: TEST
+  }
+  transform_param {
+    mirror: false
+    crop_size: 227
+    mean_file: "data/ilsvrc12/imagenet_mean.binaryproto"
+  }
+# mean pixel / channel-wise mean instead of mean image
+#  transform_param {
+#    crop_size: 227
+#    mean_value: 104
+#    mean_value: 117
+#    mean_value: 123
+#    mirror: true
+#  }
+  data_param {
+    source: "examples/imagenet/ilsvrc12_val_lmdb"
+    batch_size: 50
+    backend: LMDB
+  }
+}
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 96
+    kernel_size: 11
+    stride: 4
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "conv1"
+  top: "conv1"
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm1"
+  type: "LRN"
+  bottom: "pool1"
+  top: "norm1"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "norm1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 2
+    kernel_size: 5
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm2"
+  type: "LRN"
+  bottom: "pool2"
+  top: "norm2"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "norm2"
+  top: "conv3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "conv4"
+  type: "Convolution"
+  bottom: "conv3"
+  top: "conv4"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu4"
+  type: "ReLU"
+  bottom: "conv4"
+  top: "conv4"
+}
+layer {
+  name: "conv5"
+  type: "Convolution"
+  bottom: "conv4"
+  top: "conv5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 1
+    kernel_size: 3
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu5"
+  type: "ReLU"
+  bottom: "conv5"
+  top: "conv5"
+}
+layer {
+  name: "pool5"
+  type: "Pooling"
+  bottom: "conv5"
+  top: "pool5"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "fc6"
+  type: "InnerProduct"
+  bottom: "pool5"
+  top: "fc6"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 4096
+    weight_filler {
+      type: "gaussian"
+      std: 0.005
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu6"
+  type: "ReLU"
+  bottom: "fc6"
+  top: "fc6"
+}
+layer {
+  name: "drop6"
+  type: "Dropout"
+  bottom: "fc6"
+  top: "fc6"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc7"
+  type: "InnerProduct"
+  bottom: "fc6"
+  top: "fc7"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 4096
+    weight_filler {
+      type: "gaussian"
+      std: 0.005
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu7"
+  type: "ReLU"
+  bottom: "fc7"
+  top: "fc7"
+}
+layer {
+  name: "drop7"
+  type: "Dropout"
+  bottom: "fc7"
+  top: "fc7"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc8"
+  type: "InnerProduct"
+  bottom: "fc7"
+  top: "fc8"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 1000
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "accuracy"
+  type: "Accuracy"
+  bottom: "fc8"
+  bottom: "label"
+  top: "accuracy"
+  include {
+    phase: TEST
+  }
+}
+layer {
+  name: "loss"
+  type: "SoftmaxWithLoss"
+  bottom: "fc8"
+  bottom: "label"
+  top: "loss"
+}
diff --git a/models/bvlc_reference_rcnn_ilsvrc13/deploy.prototxt b/models/bvlc_reference_rcnn_ilsvrc13/deploy.prototxt
new file mode 100644
index 0000000..ea9cf98
--- /dev/null
+++ b/models/bvlc_reference_rcnn_ilsvrc13/deploy.prototxt
@@ -0,0 +1,207 @@
+name: "R-CNN-ilsvrc13"
+input: "data"
+input_dim: 10
+input_dim: 3
+input_dim: 227
+input_dim: 227
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  convolution_param {
+    num_output: 96
+    kernel_size: 11
+    stride: 4
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "conv1"
+  top: "conv1"
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm1"
+  type: "LRN"
+  bottom: "pool1"
+  top: "norm1"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "norm1"
+  top: "conv2"
+  convolution_param {
+    num_output: 256
+    pad: 2
+    kernel_size: 5
+    group: 2
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm2"
+  type: "LRN"
+  bottom: "pool2"
+  top: "norm2"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "norm2"
+  top: "conv3"
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "conv4"
+  type: "Convolution"
+  bottom: "conv3"
+  top: "conv4"
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    group: 2
+  }
+}
+layer {
+  name: "relu4"
+  type: "ReLU"
+  bottom: "conv4"
+  top: "conv4"
+}
+layer {
+  name: "conv5"
+  type: "Convolution"
+  bottom: "conv4"
+  top: "conv5"
+  convolution_param {
+    num_output: 256
+    pad: 1
+    kernel_size: 3
+    group: 2
+  }
+}
+layer {
+  name: "relu5"
+  type: "ReLU"
+  bottom: "conv5"
+  top: "conv5"
+}
+layer {
+  name: "pool5"
+  type: "Pooling"
+  bottom: "conv5"
+  top: "pool5"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "fc6"
+  type: "InnerProduct"
+  bottom: "pool5"
+  top: "fc6"
+  inner_product_param {
+    num_output: 4096
+  }
+}
+layer {
+  name: "relu6"
+  type: "ReLU"
+  bottom: "fc6"
+  top: "fc6"
+}
+layer {
+  name: "drop6"
+  type: "Dropout"
+  bottom: "fc6"
+  top: "fc6"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc7"
+  type: "InnerProduct"
+  bottom: "fc6"
+  top: "fc7"
+  inner_product_param {
+    num_output: 4096
+  }
+}
+layer {
+  name: "relu7"
+  type: "ReLU"
+  bottom: "fc7"
+  top: "fc7"
+}
+layer {
+  name: "drop7"
+  type: "Dropout"
+  bottom: "fc7"
+  top: "fc7"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+# R-CNN classification layer made from R-CNN ILSVRC13 SVMs.
+layer {
+  name: "fc-rcnn"
+  type: "InnerProduct"
+  bottom: "fc7"
+  top: "fc-rcnn"
+  inner_product_param {
+    num_output: 200
+  }
+}
diff --git a/models/bvlc_reference_rcnn_ilsvrc13/readme.md b/models/bvlc_reference_rcnn_ilsvrc13/readme.md
new file mode 100644
index 0000000..9a11a24
--- /dev/null
+++ b/models/bvlc_reference_rcnn_ilsvrc13/readme.md
@@ -0,0 +1,20 @@
+---
+name: BVLC Reference RCNN ILSVRC13 Model
+caffemodel: bvlc_reference_rcnn_ilsvrc13.caffemodel
+caffemodel_url: http://dl.caffe.berkeleyvision.org/bvlc_reference_rcnn_ilsvrc13.caffemodel
+license: unrestricted
+sha1: bdd8abb885819cba5e2fe1eb36235f2319477e64
+caffe_commit: a7e397abbda52c0b90323c23ab95bdeabee90a98
+---
+
+The pure Caffe instantiation of the [R-CNN](https://github.com/rbgirshick/rcnn) model for ILSVRC13 detection.
+This model was made by transplanting the R-CNN SVM classifiers into a `fc-rcnn` classification layer, provided here as an off-the-shelf Caffe detector.
+Try the [detection example](http://nbviewer.ipython.org/github/BVLC/caffe/blob/master/examples/detection.ipynb) to see it in action.
+
+*N.B. For research purposes, make use of the official R-CNN package and not this example.*
+
+This model was trained by Ross Girshick @rbgirshick
+
+## License
+
+This model is released for unrestricted use.
diff --git a/models/finetune_flickr_style/deploy.prototxt b/models/finetune_flickr_style/deploy.prototxt
new file mode 100644
index 0000000..4a924f7
--- /dev/null
+++ b/models/finetune_flickr_style/deploy.prototxt
@@ -0,0 +1,342 @@
+name: "FlickrStyleCaffeNet"
+input: "data"
+input_dim: 10
+input_dim: 3
+input_dim: 227
+input_dim: 227
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 96
+    kernel_size: 11
+    stride: 4
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "conv1"
+  top: "conv1"
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm1"
+  type: "LRN"
+  bottom: "pool1"
+  top: "norm1"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "norm1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 2
+    kernel_size: 5
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm2"
+  type: "LRN"
+  bottom: "pool2"
+  top: "norm2"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "norm2"
+  top: "conv3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "conv4"
+  type: "Convolution"
+  bottom: "conv3"
+  top: "conv4"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu4"
+  type: "ReLU"
+  bottom: "conv4"
+  top: "conv4"
+}
+layer {
+  name: "conv5"
+  type: "Convolution"
+  bottom: "conv4"
+  top: "conv5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 1
+    kernel_size: 3
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu5"
+  type: "ReLU"
+  bottom: "conv5"
+  top: "conv5"
+}
+layer {
+  name: "pool5"
+  type: "Pooling"
+  bottom: "conv5"
+  top: "pool5"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "fc6"
+  type: "InnerProduct"
+  bottom: "pool5"
+  top: "fc6"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 4096
+    weight_filler {
+      type: "gaussian"
+      std: 0.005
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu6"
+  type: "ReLU"
+  bottom: "fc6"
+  top: "fc6"
+}
+layer {
+  name: "drop6"
+  type: "Dropout"
+  bottom: "fc6"
+  top: "fc6"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc7"
+  type: "InnerProduct"
+  bottom: "fc6"
+  top: "fc7"
+  # Note that lr_mult can be set to 0 to disable any fine-tuning of this, and any other, layer
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 4096
+    weight_filler {
+      type: "gaussian"
+      std: 0.005
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu7"
+  type: "ReLU"
+  bottom: "fc7"
+  top: "fc7"
+}
+layer {
+  name: "drop7"
+  type: "Dropout"
+  bottom: "fc7"
+  top: "fc7"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc8_flickr"
+  type: "InnerProduct"
+  bottom: "fc7"
+  top: "fc8_flickr"
+  # lr_mult is set to higher than for other layers, because this layer is starting from random while the others are already trained
+  param {
+    lr_mult: 10
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 20
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 20
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "prob"
+  type: "Softmax"
+  bottom: "fc8_flickr"
+  top: "prob"
+}
diff --git a/models/finetune_flickr_style/readme.md b/models/finetune_flickr_style/readme.md
new file mode 100644
index 0000000..aac7f7c
--- /dev/null
+++ b/models/finetune_flickr_style/readme.md
@@ -0,0 +1,24 @@
+---
+name: Finetuning CaffeNet on Flickr Style
+caffemodel: finetune_flickr_style.caffemodel
+caffemodel_url: http://dl.caffe.berkeleyvision.org/finetune_flickr_style.caffemodel
+license: non-commercial
+sha1: b61b5cef7d771b53b0c488e78d35ccadc073e9cf
+caffe_commit: 737ea5e936821b5c69f9c3952d72693ae5843370
+gist_id: 034c6ac3865563b69e60
+---
+
+This model is trained exactly as described in `docs/finetune_flickr_style/readme.md`, using all 80000 images.
+The final performance:
+
+    I1017 07:36:17.370688 31333 solver.cpp:228] Iteration 100000, loss = 0.757952
+    I1017 07:36:17.370730 31333 solver.cpp:247] Iteration 100000, Testing net (#0)
+    I1017 07:36:34.248730 31333 solver.cpp:298]     Test net output #0: accuracy = 0.3916
+
+This model was trained by Sergey Karayev @sergeyk
+
+## License
+
+The Flickr Style dataset contains only URLs to images.
+Some of the images may have copyright.
+Training a category-recognition model for research/non-commercial use may constitute fair use of this data, but the result should not be used for commercial purposes.
diff --git a/models/finetune_flickr_style/solver.prototxt b/models/finetune_flickr_style/solver.prototxt
new file mode 100644
index 0000000..5e189bc
--- /dev/null
+++ b/models/finetune_flickr_style/solver.prototxt
@@ -0,0 +1,17 @@
+net: "models/finetune_flickr_style/train_val.prototxt"
+test_iter: 100
+test_interval: 1000
+# lr for fine-tuning should be lower than when starting from scratch
+base_lr: 0.001
+lr_policy: "step"
+gamma: 0.1
+# stepsize should also be lower, as we're closer to being done
+stepsize: 20000
+display: 20
+max_iter: 100000
+momentum: 0.9
+weight_decay: 0.0005
+snapshot: 10000
+snapshot_prefix: "models/finetune_flickr_style/finetune_flickr_style"
+# uncomment the following to default to CPU mode solving
+# solver_mode: CPU
diff --git a/models/finetune_flickr_style/train_val.prototxt b/models/finetune_flickr_style/train_val.prototxt
new file mode 100644
index 0000000..848a426
--- /dev/null
+++ b/models/finetune_flickr_style/train_val.prototxt
@@ -0,0 +1,388 @@
+name: "FlickrStyleCaffeNet"
+layer {
+  name: "data"
+  type: "ImageData"
+  top: "data"
+  top: "label"
+  include {
+    phase: TRAIN
+  }
+  transform_param {
+    mirror: true
+    crop_size: 227
+    mean_file: "data/ilsvrc12/imagenet_mean.binaryproto"
+  }
+  image_data_param {
+    source: "data/flickr_style/train.txt"
+    batch_size: 50
+    new_height: 256
+    new_width: 256
+  }
+}
+layer {
+  name: "data"
+  type: "ImageData"
+  top: "data"
+  top: "label"
+  include {
+    phase: TEST
+  }
+  transform_param {
+    mirror: false
+    crop_size: 227
+    mean_file: "data/ilsvrc12/imagenet_mean.binaryproto"
+  }
+  image_data_param {
+    source: "data/flickr_style/test.txt"
+    batch_size: 50
+    new_height: 256
+    new_width: 256
+  }
+}
+layer {
+  name: "conv1"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv1"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 96
+    kernel_size: 11
+    stride: 4
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "relu1"
+  type: "ReLU"
+  bottom: "conv1"
+  top: "conv1"
+}
+layer {
+  name: "pool1"
+  type: "Pooling"
+  bottom: "conv1"
+  top: "pool1"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm1"
+  type: "LRN"
+  bottom: "pool1"
+  top: "norm1"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv2"
+  type: "Convolution"
+  bottom: "norm1"
+  top: "conv2"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 2
+    kernel_size: 5
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu2"
+  type: "ReLU"
+  bottom: "conv2"
+  top: "conv2"
+}
+layer {
+  name: "pool2"
+  type: "Pooling"
+  bottom: "conv2"
+  top: "pool2"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "norm2"
+  type: "LRN"
+  bottom: "pool2"
+  top: "norm2"
+  lrn_param {
+    local_size: 5
+    alpha: 0.0001
+    beta: 0.75
+  }
+}
+layer {
+  name: "conv3"
+  type: "Convolution"
+  bottom: "norm2"
+  top: "conv3"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "relu3"
+  type: "ReLU"
+  bottom: "conv3"
+  top: "conv3"
+}
+layer {
+  name: "conv4"
+  type: "Convolution"
+  bottom: "conv3"
+  top: "conv4"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 384
+    pad: 1
+    kernel_size: 3
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu4"
+  type: "ReLU"
+  bottom: "conv4"
+  top: "conv4"
+}
+layer {
+  name: "conv5"
+  type: "Convolution"
+  bottom: "conv4"
+  top: "conv5"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  convolution_param {
+    num_output: 256
+    pad: 1
+    kernel_size: 3
+    group: 2
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu5"
+  type: "ReLU"
+  bottom: "conv5"
+  top: "conv5"
+}
+layer {
+  name: "pool5"
+  type: "Pooling"
+  bottom: "conv5"
+  top: "pool5"
+  pooling_param {
+    pool: MAX
+    kernel_size: 3
+    stride: 2
+  }
+}
+layer {
+  name: "fc6"
+  type: "InnerProduct"
+  bottom: "pool5"
+  top: "fc6"
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 4096
+    weight_filler {
+      type: "gaussian"
+      std: 0.005
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu6"
+  type: "ReLU"
+  bottom: "fc6"
+  top: "fc6"
+}
+layer {
+  name: "drop6"
+  type: "Dropout"
+  bottom: "fc6"
+  top: "fc6"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc7"
+  type: "InnerProduct"
+  bottom: "fc6"
+  top: "fc7"
+  # Note that lr_mult can be set to 0 to disable any fine-tuning of this, and any other, layer
+  param {
+    lr_mult: 1
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 2
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 4096
+    weight_filler {
+      type: "gaussian"
+      std: 0.005
+    }
+    bias_filler {
+      type: "constant"
+      value: 1
+    }
+  }
+}
+layer {
+  name: "relu7"
+  type: "ReLU"
+  bottom: "fc7"
+  top: "fc7"
+}
+layer {
+  name: "drop7"
+  type: "Dropout"
+  bottom: "fc7"
+  top: "fc7"
+  dropout_param {
+    dropout_ratio: 0.5
+  }
+}
+layer {
+  name: "fc8_flickr"
+  type: "InnerProduct"
+  bottom: "fc7"
+  top: "fc8_flickr"
+  # lr_mult is set to higher than for other layers, because this layer is starting from random while the others are already trained
+  param {
+    lr_mult: 10
+    decay_mult: 1
+  }
+  param {
+    lr_mult: 20
+    decay_mult: 0
+  }
+  inner_product_param {
+    num_output: 20
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
+layer {
+  name: "loss"
+  type: "SoftmaxWithLoss"
+  bottom: "fc8_flickr"
+  bottom: "label"
+  top: "loss"
+}
+layer {
+  name: "accuracy"
+  type: "Accuracy"
+  bottom: "fc8_flickr"
+  bottom: "label"
+  top: "accuracy"
+  include {
+    phase: TEST
+  }
+}
diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt
new file mode 100644
index 0000000..df0401d
--- /dev/null
+++ b/python/CMakeLists.txt
@@ -0,0 +1,34 @@
+if(NOT HAVE_PYTHON)
+  message(STATUS "Python interface is disabled or not all required dependecies found. Building without it...")
+  return()
+endif()
+
+include_directories(${PYTHON_INCLUDE_DIRS} ${NUMPY_INCLUDE_DIR} ${Boost_INCLUDE_DIRS})
+file(GLOB_RECURSE python_srcs ${PROJECT_SOURCE_DIR}/python/*.cpp)
+
+add_library(pycaffe SHARED ${python_srcs})
+target_link_libraries(pycaffe ${Caffe_LINK} ${PYTHON_LIBRARIES} ${Boost_LIBRARIES})
+set_target_properties(pycaffe PROPERTIES PREFIX "" OUTPUT_NAME "_caffe")
+caffe_default_properties(pycaffe)
+
+if(UNIX OR APPLE)
+    set(__linkname "${PROJECT_SOURCE_DIR}/python/caffe/_caffe.so")
+    add_custom_command(TARGET pycaffe POST_BUILD
+                       COMMAND ln -sf $<TARGET_LINKER_FILE:pycaffe> "${__linkname}"
+                       COMMAND ${CMAKE_COMMAND} -E make_directory ${PROJECT_SOURCE_DIR}/python/caffe/proto
+                       COMMAND touch ${PROJECT_SOURCE_DIR}/python/caffe/proto/__init__.py
+                       COMMAND cp ${proto_gen_folder}/*.py ${PROJECT_SOURCE_DIR}/python/caffe/proto/
+                       COMMENT "Creating symlink ${__linkname} -> ${PROJECT_BINARY_DIR}/lib/_caffe${CAffe_POSTFIX}.so")
+endif()
+
+# ---[ Install
+file(GLOB files1 *.py requirements.txt)
+install(FILES ${files1} DESTINATION python)
+
+file(GLOB files2 caffe/*.py)
+install(FILES  ${files2} DESTINATION python/caffe)
+install(TARGETS pycaffe  DESTINATION python/caffe)
+install(DIRECTORY caffe/imagenet caffe/proto caffe/test DESTINATION python/caffe)
+
+
+
diff --git a/python/caffe/__init__.py b/python/caffe/__init__.py
new file mode 100644
index 0000000..1b2da51
--- /dev/null
+++ b/python/caffe/__init__.py
@@ -0,0 +1,7 @@
+from .pycaffe import Net, SGDSolver
+from ._caffe import set_mode_cpu, set_mode_gpu, set_device, Layer, get_solver
+from .proto.caffe_pb2 import TRAIN, TEST
+from .classifier import Classifier
+from .detector import Detector
+from . import io
+from .net_spec import layers, params, NetSpec, to_proto
diff --git a/python/caffe/_caffe.cpp b/python/caffe/_caffe.cpp
new file mode 100644
index 0000000..dff7f62
--- /dev/null
+++ b/python/caffe/_caffe.cpp
@@ -0,0 +1,301 @@
+#include <Python.h>  // NOLINT(build/include_alpha)
+
+// Produce deprecation warnings (needs to come before arrayobject.h inclusion).
+#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
+
+#include <boost/make_shared.hpp>
+#include <boost/python.hpp>
+#include <boost/python/raw_function.hpp>
+#include <boost/python/suite/indexing/vector_indexing_suite.hpp>
+#include <numpy/arrayobject.h>
+
+// these need to be included after boost on OS X
+#include <string>  // NOLINT(build/include_order)
+#include <vector>  // NOLINT(build/include_order)
+#include <fstream>  // NOLINT
+
+#include "caffe/caffe.hpp"
+#include "caffe/python_layer.hpp"
+
+// Temporary solution for numpy < 1.7 versions: old macro, no promises.
+// You're strongly advised to upgrade to >= 1.7.
+#ifndef NPY_ARRAY_C_CONTIGUOUS
+#define NPY_ARRAY_C_CONTIGUOUS NPY_C_CONTIGUOUS
+#define PyArray_SetBaseObject(arr, x) (PyArray_BASE(arr) = (x))
+#endif
+
+namespace bp = boost::python;
+
+namespace caffe {
+
+// For Python, for now, we'll just always use float as the type.
+typedef float Dtype;
+const int NPY_DTYPE = NPY_FLOAT32;
+
+// Selecting mode.
+void set_mode_cpu() { Caffe::set_mode(Caffe::CPU); }
+void set_mode_gpu() { Caffe::set_mode(Caffe::GPU); }
+
+// For convenience, check that input files can be opened, and raise an
+// exception that boost will send to Python if not (caffe could still crash
+// later if the input files are disturbed before they are actually used, but
+// this saves frustration in most cases).
+static void CheckFile(const string& filename) {
+    std::ifstream f(filename.c_str());
+    if (!f.good()) {
+      f.close();
+      throw std::runtime_error("Could not open file " + filename);
+    }
+    f.close();
+}
+
+void CheckContiguousArray(PyArrayObject* arr, string name,
+    int channels, int height, int width) {
+  if (!(PyArray_FLAGS(arr) & NPY_ARRAY_C_CONTIGUOUS)) {
+    throw std::runtime_error(name + " must be C contiguous");
+  }
+  if (PyArray_NDIM(arr) != 4) {
+    throw std::runtime_error(name + " must be 4-d");
+  }
+  if (PyArray_TYPE(arr) != NPY_FLOAT32) {
+    throw std::runtime_error(name + " must be float32");
+  }
+  if (PyArray_DIMS(arr)[1] != channels) {
+    throw std::runtime_error(name + " has wrong number of channels");
+  }
+  if (PyArray_DIMS(arr)[2] != height) {
+    throw std::runtime_error(name + " has wrong height");
+  }
+  if (PyArray_DIMS(arr)[3] != width) {
+    throw std::runtime_error(name + " has wrong width");
+  }
+}
+
+// Net constructor for passing phase as int
+shared_ptr<Net<Dtype> > Net_Init(
+    string param_file, int phase) {
+  CheckFile(param_file);
+
+  shared_ptr<Net<Dtype> > net(new Net<Dtype>(param_file,
+      static_cast<Phase>(phase)));
+  return net;
+}
+
+// Net construct-and-load convenience constructor
+shared_ptr<Net<Dtype> > Net_Init_Load(
+    string param_file, string pretrained_param_file, int phase) {
+  CheckFile(param_file);
+  CheckFile(pretrained_param_file);
+
+  shared_ptr<Net<Dtype> > net(new Net<Dtype>(param_file,
+      static_cast<Phase>(phase)));
+  net->CopyTrainedLayersFrom(pretrained_param_file);
+  return net;
+}
+
+void Net_Save(const Net<Dtype>& net, string filename) {
+  NetParameter net_param;
+  net.ToProto(&net_param, false);
+  WriteProtoToBinaryFile(net_param, filename.c_str());
+}
+
+void Net_SetInputArrays(Net<Dtype>* net, bp::object data_obj,
+    bp::object labels_obj) {
+  // check that this network has an input MemoryDataLayer
+  shared_ptr<MemoryDataLayer<Dtype> > md_layer =
+    boost::dynamic_pointer_cast<MemoryDataLayer<Dtype> >(net->layers()[0]);
+  if (!md_layer) {
+    throw std::runtime_error("set_input_arrays may only be called if the"
+        " first layer is a MemoryDataLayer");
+  }
+
+  // check that we were passed appropriately-sized contiguous memory
+  PyArrayObject* data_arr =
+      reinterpret_cast<PyArrayObject*>(data_obj.ptr());
+  PyArrayObject* labels_arr =
+      reinterpret_cast<PyArrayObject*>(labels_obj.ptr());
+  CheckContiguousArray(data_arr, "data array", md_layer->channels(),
+      md_layer->height(), md_layer->width());
+  CheckContiguousArray(labels_arr, "labels array", 1, 1, 1);
+  if (PyArray_DIMS(data_arr)[0] != PyArray_DIMS(labels_arr)[0]) {
+    throw std::runtime_error("data and labels must have the same first"
+        " dimension");
+  }
+  if (PyArray_DIMS(data_arr)[0] % md_layer->batch_size() != 0) {
+    throw std::runtime_error("first dimensions of input arrays must be a"
+        " multiple of batch size");
+  }
+
+  md_layer->Reset(static_cast<Dtype*>(PyArray_DATA(data_arr)),
+      static_cast<Dtype*>(PyArray_DATA(labels_arr)),
+      PyArray_DIMS(data_arr)[0]);
+}
+
+Solver<Dtype>* GetSolverFromFile(const string& filename) {
+  SolverParameter param;
+  ReadProtoFromTextFileOrDie(filename, &param);
+  return GetSolver<Dtype>(param);
+}
+
+struct NdarrayConverterGenerator {
+  template <typename T> struct apply;
+};
+
+template <>
+struct NdarrayConverterGenerator::apply<Dtype*> {
+  struct type {
+    PyObject* operator() (Dtype* data) const {
+      // Just store the data pointer, and add the shape information in postcall.
+      return PyArray_SimpleNewFromData(0, NULL, NPY_DTYPE, data);
+    }
+    const PyTypeObject* get_pytype() {
+      return &PyArray_Type;
+    }
+  };
+};
+
+struct NdarrayCallPolicies : public bp::default_call_policies {
+  typedef NdarrayConverterGenerator result_converter;
+  PyObject* postcall(PyObject* pyargs, PyObject* result) {
+    bp::object pyblob = bp::extract<bp::tuple>(pyargs)()[0];
+    shared_ptr<Blob<Dtype> > blob =
+      bp::extract<shared_ptr<Blob<Dtype> > >(pyblob);
+    // Free the temporary pointer-holding array, and construct a new one with
+    // the shape information from the blob.
+    void* data = PyArray_DATA(reinterpret_cast<PyArrayObject*>(result));
+    Py_DECREF(result);
+    const int num_axes = blob->num_axes();
+    vector<npy_intp> dims(blob->shape().begin(), blob->shape().end());
+    PyObject *arr_obj = PyArray_SimpleNewFromData(num_axes, dims.data(),
+                                                  NPY_FLOAT32, data);
+    // SetBaseObject steals a ref, so we need to INCREF.
+    Py_INCREF(pyblob.ptr());
+    PyArray_SetBaseObject(reinterpret_cast<PyArrayObject*>(arr_obj),
+        pyblob.ptr());
+    return arr_obj;
+  }
+};
+
+bp::object Blob_Reshape(bp::tuple args, bp::dict kwargs) {
+  if (bp::len(kwargs) > 0) {
+    throw std::runtime_error("Blob.reshape takes no kwargs");
+  }
+  Blob<Dtype>* self = bp::extract<Blob<Dtype>*>(args[0]);
+  vector<int> shape(bp::len(args) - 1);
+  for (int i = 1; i < bp::len(args); ++i) {
+    shape[i - 1] = bp::extract<int>(args[i]);
+  }
+  self->Reshape(shape);
+  // We need to explicitly return None to use bp::raw_function.
+  return bp::object();
+}
+
+BOOST_PYTHON_MEMBER_FUNCTION_OVERLOADS(SolveOverloads, Solve, 0, 1);
+
+BOOST_PYTHON_MODULE(_caffe) {
+  // below, we prepend an underscore to methods that will be replaced
+  // in Python
+  // Caffe utility functions
+  bp::def("set_mode_cpu", &set_mode_cpu);
+  bp::def("set_mode_gpu", &set_mode_gpu);
+  bp::def("set_device", &Caffe::SetDevice);
+
+  bp::class_<Net<Dtype>, shared_ptr<Net<Dtype> >, boost::noncopyable >("Net",
+    bp::no_init)
+    .def("__init__", bp::make_constructor(&Net_Init))
+    .def("__init__", bp::make_constructor(&Net_Init_Load))
+    .def("_forward", &Net<Dtype>::ForwardFromTo)
+    .def("_backward", &Net<Dtype>::BackwardFromTo)
+    .def("reshape", &Net<Dtype>::Reshape)
+    // The cast is to select a particular overload.
+    .def("copy_from", static_cast<void (Net<Dtype>::*)(const string)>(
+        &Net<Dtype>::CopyTrainedLayersFrom))
+    .def("share_with", &Net<Dtype>::ShareTrainedLayersWith)
+    .add_property("_blobs", bp::make_function(&Net<Dtype>::blobs,
+        bp::return_internal_reference<>()))
+    .add_property("layers", bp::make_function(&Net<Dtype>::layers,
+        bp::return_internal_reference<>()))
+    .add_property("_blob_names", bp::make_function(&Net<Dtype>::blob_names,
+        bp::return_value_policy<bp::copy_const_reference>()))
+    .add_property("_layer_names", bp::make_function(&Net<Dtype>::layer_names,
+        bp::return_value_policy<bp::copy_const_reference>()))
+    .add_property("_inputs", bp::make_function(&Net<Dtype>::input_blob_indices,
+        bp::return_value_policy<bp::copy_const_reference>()))
+    .add_property("_outputs",
+        bp::make_function(&Net<Dtype>::output_blob_indices,
+        bp::return_value_policy<bp::copy_const_reference>()))
+    .def("_set_input_arrays", &Net_SetInputArrays,
+        bp::with_custodian_and_ward<1, 2, bp::with_custodian_and_ward<1, 3> >())
+    .def("save", &Net_Save);
+
+  bp::class_<Blob<Dtype>, shared_ptr<Blob<Dtype> >, boost::noncopyable>(
+    "Blob", bp::no_init)
+    .add_property("num",      &Blob<Dtype>::num)
+    .add_property("channels", &Blob<Dtype>::channels)
+    .add_property("height",   &Blob<Dtype>::height)
+    .add_property("width",    &Blob<Dtype>::width)
+    .add_property("count",    static_cast<int (Blob<Dtype>::*)() const>(
+        &Blob<Dtype>::count))
+    .def("reshape",           bp::raw_function(&Blob_Reshape))
+    .add_property("data",     bp::make_function(&Blob<Dtype>::mutable_cpu_data,
+          NdarrayCallPolicies()))
+    .add_property("diff",     bp::make_function(&Blob<Dtype>::mutable_cpu_diff,
+          NdarrayCallPolicies()));
+
+  bp::class_<Layer<Dtype>, shared_ptr<PythonLayer<Dtype> >,
+    boost::noncopyable>("Layer", bp::init<const LayerParameter&>())
+    .add_property("blobs", bp::make_function(&Layer<Dtype>::blobs,
+          bp::return_internal_reference<>()))
+    .def("setup", &Layer<Dtype>::LayerSetUp)
+    .def("reshape", &Layer<Dtype>::Reshape)
+    .add_property("type", bp::make_function(&Layer<Dtype>::type));
+  bp::register_ptr_to_python<shared_ptr<Layer<Dtype> > >();
+
+  bp::class_<LayerParameter>("LayerParameter", bp::no_init);
+
+  bp::class_<Solver<Dtype>, shared_ptr<Solver<Dtype> >, boost::noncopyable>(
+    "Solver", bp::no_init)
+    .add_property("net", &Solver<Dtype>::net)
+    .add_property("test_nets", bp::make_function(&Solver<Dtype>::test_nets,
+          bp::return_internal_reference<>()))
+    .add_property("iter", &Solver<Dtype>::iter)
+    .def("solve", static_cast<void (Solver<Dtype>::*)(const char*)>(
+          &Solver<Dtype>::Solve), SolveOverloads())
+    .def("step", &Solver<Dtype>::Step)
+    .def("restore", &Solver<Dtype>::Restore);
+
+  bp::class_<SGDSolver<Dtype>, bp::bases<Solver<Dtype> >,
+    shared_ptr<SGDSolver<Dtype> >, boost::noncopyable>(
+        "SGDSolver", bp::init<string>());
+  bp::class_<NesterovSolver<Dtype>, bp::bases<Solver<Dtype> >,
+    shared_ptr<NesterovSolver<Dtype> >, boost::noncopyable>(
+        "NesterovSolver", bp::init<string>());
+  bp::class_<AdaGradSolver<Dtype>, bp::bases<Solver<Dtype> >,
+    shared_ptr<AdaGradSolver<Dtype> >, boost::noncopyable>(
+        "AdaGradSolver", bp::init<string>());
+
+  bp::def("get_solver", &GetSolverFromFile,
+      bp::return_value_policy<bp::manage_new_object>());
+
+  // vector wrappers for all the vector types we use
+  bp::class_<vector<shared_ptr<Blob<Dtype> > > >("BlobVec")
+    .def(bp::vector_indexing_suite<vector<shared_ptr<Blob<Dtype> > >, true>());
+  bp::class_<vector<Blob<Dtype>*> >("RawBlobVec")
+    .def(bp::vector_indexing_suite<vector<Blob<Dtype>*>, true>());
+  bp::class_<vector<shared_ptr<Layer<Dtype> > > >("LayerVec")
+    .def(bp::vector_indexing_suite<vector<shared_ptr<Layer<Dtype> > >, true>());
+  bp::class_<vector<string> >("StringVec")
+    .def(bp::vector_indexing_suite<vector<string> >());
+  bp::class_<vector<int> >("IntVec")
+    .def(bp::vector_indexing_suite<vector<int> >());
+  bp::class_<vector<shared_ptr<Net<Dtype> > > >("NetVec")
+    .def(bp::vector_indexing_suite<vector<shared_ptr<Net<Dtype> > >, true>());
+  bp::class_<vector<bool> >("BoolVec")
+    .def(bp::vector_indexing_suite<vector<bool> >());
+
+  // boost python expects a void (missing) return value, while import_array
+  // returns NULL for python3. import_array1() forces a void return value.
+  import_array1();
+}
+
+}  // namespace caffe
diff --git a/python/caffe/classifier.py b/python/caffe/classifier.py
new file mode 100644
index 0000000..537193d
--- /dev/null
+++ b/python/caffe/classifier.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+"""
+Classifier is an image classifier specialization of Net.
+"""
+
+import numpy as np
+
+import caffe
+
+
+class Classifier(caffe.Net):
+    """
+    Classifier extends Net for image class prediction
+    by scaling, center cropping, or oversampling.
+
+    Parameters
+    ----------
+    image_dims : dimensions to scale input for cropping/sampling.
+        Default is to scale to net input size for whole-image crop.
+    mean, input_scale, raw_scale, channel_swap: params for
+        preprocessing options.
+    """
+    def __init__(self, model_file, pretrained_file, image_dims=None,
+                 mean=None, input_scale=None, raw_scale=None,
+                 channel_swap=None):
+        caffe.Net.__init__(self, model_file, pretrained_file, caffe.TEST)
+
+        # configure pre-processing
+        in_ = self.inputs[0]
+        self.transformer = caffe.io.Transformer(
+            {in_: self.blobs[in_].data.shape})
+        self.transformer.set_transpose(in_, (2, 0, 1))
+        if mean is not None:
+            self.transformer.set_mean(in_, mean)
+        if input_scale is not None:
+            self.transformer.set_input_scale(in_, input_scale)
+        if raw_scale is not None:
+            self.transformer.set_raw_scale(in_, raw_scale)
+        if channel_swap is not None:
+            self.transformer.set_channel_swap(in_, channel_swap)
+
+        self.crop_dims = np.array(self.blobs[in_].data.shape[2:])
+        if not image_dims:
+            image_dims = self.crop_dims
+        self.image_dims = image_dims
+
+    def predict(self, inputs, oversample=True):
+        """
+        Predict classification probabilities of inputs.
+
+        Parameters
+        ----------
+        inputs : iterable of (H x W x K) input ndarrays.
+        oversample : boolean
+            average predictions across center, corners, and mirrors
+            when True (default). Center-only prediction when False.
+
+        Returns
+        -------
+        predictions: (N x C) ndarray of class probabilities for N images and C
+            classes.
+        """
+        # Scale to standardize input dimensions.
+        input_ = np.zeros((len(inputs),
+                           self.image_dims[0],
+                           self.image_dims[1],
+                           inputs[0].shape[2]),
+                          dtype=np.float32)
+        for ix, in_ in enumerate(inputs):
+            input_[ix] = caffe.io.resize_image(in_, self.image_dims)
+
+        if oversample:
+            # Generate center, corner, and mirrored crops.
+            input_ = caffe.io.oversample(input_, self.crop_dims)
+        else:
+            # Take center crop.
+            center = np.array(self.image_dims) / 2.0
+            crop = np.tile(center, (1, 2))[0] + np.concatenate([
+                -self.crop_dims / 2.0,
+                self.crop_dims / 2.0
+            ])
+            input_ = input_[:, crop[0]:crop[2], crop[1]:crop[3], :]
+
+        # Classify
+        caffe_in = np.zeros(np.array(input_.shape)[[0, 3, 1, 2]],
+                            dtype=np.float32)
+        for ix, in_ in enumerate(input_):
+            caffe_in[ix] = self.transformer.preprocess(self.inputs[0], in_)
+        out = self.forward_all(**{self.inputs[0]: caffe_in})
+        predictions = out[self.outputs[0]]
+
+        # For oversampling, average predictions across crops.
+        if oversample:
+            predictions = predictions.reshape((len(predictions) / 10, 10, -1))
+            predictions = predictions.mean(1)
+
+        return predictions
diff --git a/python/caffe/detector.py b/python/caffe/detector.py
new file mode 100644
index 0000000..75cd3b1
--- /dev/null
+++ b/python/caffe/detector.py
@@ -0,0 +1,216 @@
+#!/usr/bin/env python
+"""
+Do windowed detection by classifying a number of images/crops at once,
+optionally using the selective search window proposal method.
+
+This implementation follows ideas in
+    Ross Girshick, Jeff Donahue, Trevor Darrell, Jitendra Malik.
+    Rich feature hierarchies for accurate object detection and semantic
+    segmentation.
+    http://arxiv.org/abs/1311.2524
+
+The selective_search_ijcv_with_python code required for the selective search
+proposal mode is available at
+    https://github.com/sergeyk/selective_search_ijcv_with_python
+"""
+import numpy as np
+import os
+
+import caffe
+
+
+class Detector(caffe.Net):
+    """
+    Detector extends Net for windowed detection by a list of crops or
+    selective search proposals.
+
+    Parameters
+    ----------
+    mean, input_scale, raw_scale, channel_swap : params for preprocessing
+        options.
+    context_pad : amount of surrounding context to take s.t. a `context_pad`
+        sized border of pixels in the network input image is context, as in
+        R-CNN feature extraction.
+    """
+    def __init__(self, model_file, pretrained_file, mean=None,
+                 input_scale=None, raw_scale=None, channel_swap=None,
+                 context_pad=None):
+        caffe.Net.__init__(self, model_file, pretrained_file, caffe.TEST)
+
+        # configure pre-processing
+        in_ = self.inputs[0]
+        self.transformer = caffe.io.Transformer(
+            {in_: self.blobs[in_].data.shape})
+        self.transformer.set_transpose(in_, (2, 0, 1))
+        if mean is not None:
+            self.transformer.set_mean(in_, mean)
+        if input_scale is not None:
+            self.transformer.set_input_scale(in_, input_scale)
+        if raw_scale is not None:
+            self.transformer.set_raw_scale(in_, raw_scale)
+        if channel_swap is not None:
+            self.transformer.set_channel_swap(in_, channel_swap)
+
+        self.configure_crop(context_pad)
+
+    def detect_windows(self, images_windows):
+        """
+        Do windowed detection over given images and windows. Windows are
+        extracted then warped to the input dimensions of the net.
+
+        Parameters
+        ----------
+        images_windows: (image filename, window list) iterable.
+        context_crop: size of context border to crop in pixels.
+
+        Returns
+        -------
+        detections: list of {filename: image filename, window: crop coordinates,
+            predictions: prediction vector} dicts.
+        """
+        # Extract windows.
+        window_inputs = []
+        for image_fname, windows in images_windows:
+            image = caffe.io.load_image(image_fname).astype(np.float32)
+            for window in windows:
+                window_inputs.append(self.crop(image, window))
+
+        # Run through the net (warping windows to input dimensions).
+        in_ = self.inputs[0]
+        caffe_in = np.zeros((len(window_inputs), window_inputs[0].shape[2])
+                            + self.blobs[in_].data.shape[2:],
+                            dtype=np.float32)
+        for ix, window_in in enumerate(window_inputs):
+            caffe_in[ix] = self.transformer.preprocess(in_, window_in)
+        out = self.forward_all(**{in_: caffe_in})
+        predictions = out[self.outputs[0]].squeeze(axis=(2, 3))
+
+        # Package predictions with images and windows.
+        detections = []
+        ix = 0
+        for image_fname, windows in images_windows:
+            for window in windows:
+                detections.append({
+                    'window': window,
+                    'prediction': predictions[ix],
+                    'filename': image_fname
+                })
+                ix += 1
+        return detections
+
+    def detect_selective_search(self, image_fnames):
+        """
+        Do windowed detection over Selective Search proposals by extracting
+        the crop and warping to the input dimensions of the net.
+
+        Parameters
+        ----------
+        image_fnames: list
+
+        Returns
+        -------
+        detections: list of {filename: image filename, window: crop coordinates,
+            predictions: prediction vector} dicts.
+        """
+        import selective_search_ijcv_with_python as selective_search
+        # Make absolute paths so MATLAB can find the files.
+        image_fnames = [os.path.abspath(f) for f in image_fnames]
+        windows_list = selective_search.get_windows(
+            image_fnames,
+            cmd='selective_search_rcnn'
+        )
+        # Run windowed detection on the selective search list.
+        return self.detect_windows(zip(image_fnames, windows_list))
+
+    def crop(self, im, window):
+        """
+        Crop a window from the image for detection. Include surrounding context
+        according to the `context_pad` configuration.
+
+        Parameters
+        ----------
+        im: H x W x K image ndarray to crop.
+        window: bounding box coordinates as ymin, xmin, ymax, xmax.
+
+        Returns
+        -------
+        crop: cropped window.
+        """
+        # Crop window from the image.
+        crop = im[window[0]:window[2], window[1]:window[3]]
+
+        if self.context_pad:
+            box = window.copy()
+            crop_size = self.blobs[self.inputs[0]].width  # assumes square
+            scale = crop_size / (1. * crop_size - self.context_pad * 2)
+            # Crop a box + surrounding context.
+            half_h = (box[2] - box[0] + 1) / 2.
+            half_w = (box[3] - box[1] + 1) / 2.
+            center = (box[0] + half_h, box[1] + half_w)
+            scaled_dims = scale * np.array((-half_h, -half_w, half_h, half_w))
+            box = np.round(np.tile(center, 2) + scaled_dims)
+            full_h = box[2] - box[0] + 1
+            full_w = box[3] - box[1] + 1
+            scale_h = crop_size / full_h
+            scale_w = crop_size / full_w
+            pad_y = round(max(0, -box[0]) * scale_h)  # amount out-of-bounds
+            pad_x = round(max(0, -box[1]) * scale_w)
+
+            # Clip box to image dimensions.
+            im_h, im_w = im.shape[:2]
+            box = np.clip(box, 0., [im_h, im_w, im_h, im_w])
+            clip_h = box[2] - box[0] + 1
+            clip_w = box[3] - box[1] + 1
+            assert(clip_h > 0 and clip_w > 0)
+            crop_h = round(clip_h * scale_h)
+            crop_w = round(clip_w * scale_w)
+            if pad_y + crop_h > crop_size:
+                crop_h = crop_size - pad_y
+            if pad_x + crop_w > crop_size:
+                crop_w = crop_size - pad_x
+
+            # collect with context padding and place in input
+            # with mean padding
+            context_crop = im[box[0]:box[2], box[1]:box[3]]
+            context_crop = caffe.io.resize_image(context_crop, (crop_h, crop_w))
+            crop = np.ones(self.crop_dims, dtype=np.float32) * self.crop_mean
+            crop[pad_y:(pad_y + crop_h), pad_x:(pad_x + crop_w)] = context_crop
+
+        return crop
+
+    def configure_crop(self, context_pad):
+        """
+        Configure crop dimensions and amount of context for cropping.
+        If context is included, make the special input mean for context padding.
+
+        Parameters
+        ----------
+        context_pad : amount of context for cropping.
+        """
+        # crop dimensions
+        in_ = self.inputs[0]
+        tpose = self.transformer.transpose[in_]
+        inv_tpose = [tpose[t] for t in tpose]
+        self.crop_dims = np.array(self.blobs[in_].data.shape[1:])[inv_tpose]
+        #.transpose(inv_tpose)
+        # context padding
+        self.context_pad = context_pad
+        if self.context_pad:
+            in_ = self.inputs[0]
+            transpose = self.transformer.transpose.get(in_)
+            channel_order = self.transformer.channel_swap.get(in_)
+            raw_scale = self.transformer.raw_scale.get(in_)
+            # Padding context crops needs the mean in unprocessed input space.
+            mean = self.transformer.mean.get(in_)
+            if mean is not None:
+                inv_transpose = [transpose[t] for t in transpose]
+                crop_mean = mean.copy().transpose(inv_transpose)
+                if channel_order is not None:
+                    channel_order_inverse = [channel_order.index(i)
+                                             for i in range(crop_mean.shape[2])]
+                    crop_mean = crop_mean[:, :, channel_order_inverse]
+                if raw_scale is not None:
+                    crop_mean /= raw_scale
+                self.crop_mean = crop_mean
+            else:
+                self.crop_mean = np.zeros(self.crop_dims, dtype=np.float32)
diff --git a/python/caffe/draw.py b/python/caffe/draw.py
new file mode 100644
index 0000000..324929d
--- /dev/null
+++ b/python/caffe/draw.py
@@ -0,0 +1,213 @@
+"""
+Caffe network visualization: draw the NetParameter protobuffer.
+
+
+.. note::
+
+    This requires pydot>=1.0.2, which is not included in requirements.txt since
+    it requires graphviz and other prerequisites outside the scope of the
+    Caffe.
+"""
+
+from caffe.proto import caffe_pb2
+import pydot
+
+# Internal layer and blob styles.
+LAYER_STYLE_DEFAULT = {'shape': 'record',
+                       'fillcolor': '#6495ED',
+                       'style': 'filled'}
+NEURON_LAYER_STYLE = {'shape': 'record',
+                      'fillcolor': '#90EE90',
+                      'style': 'filled'}
+BLOB_STYLE = {'shape': 'octagon',
+              'fillcolor': '#E0E0E0',
+              'style': 'filled'}
+
+
+def get_pooling_types_dict():
+    """Get dictionary mapping pooling type number to type name
+    """
+    desc = caffe_pb2.PoolingParameter.PoolMethod.DESCRIPTOR
+    d = {}
+    for k, v in desc.values_by_name.items():
+        d[v.number] = k
+    return d
+
+
+def get_edge_label(layer):
+    """Define edge label based on layer type.
+    """
+
+    if layer.type == 'Data':
+        edge_label = 'Batch ' + str(layer.data_param.batch_size)
+    elif layer.type == 'Convolution':
+        edge_label = str(layer.convolution_param.num_output)
+    elif layer.type == 'InnerProduct':
+        edge_label = str(layer.inner_product_param.num_output)
+    else:
+        edge_label = '""'
+
+    return edge_label
+
+
+def get_layer_label(layer, rankdir):
+    """Define node label based on layer type.
+
+    Parameters
+    ----------
+    layer : ?
+    rankdir : {'LR', 'TB', 'BT'}
+        Direction of graph layout.
+
+    Returns
+    -------
+    string :
+        A label for the current layer
+    """
+
+    if rankdir in ('TB', 'BT'):
+        # If graph orientation is vertical, horizontal space is free and
+        # vertical space is not; separate words with spaces
+        separator = ' '
+    else:
+        # If graph orientation is horizontal, vertical space is free and
+        # horizontal space is not; separate words with newlines
+        separator = '\\n'
+
+    if layer.type == 'Convolution':
+        # Outer double quotes needed or else colon characters don't parse
+        # properly
+        node_label = '"%s%s(%s)%skernel size: %d%sstride: %d%spad: %d"' %\
+                     (layer.name,
+                      separator,
+                      layer.type,
+                      separator,
+                      layer.convolution_param.kernel_size,
+                      separator,
+                      layer.convolution_param.stride,
+                      separator,
+                      layer.convolution_param.pad)
+    elif layer.type == 'Pooling':
+        pooling_types_dict = get_pooling_types_dict()
+        node_label = '"%s%s(%s %s)%skernel size: %d%sstride: %d%spad: %d"' %\
+                     (layer.name,
+                      separator,
+                      pooling_types_dict[layer.pooling_param.pool],
+                      layer.type,
+                      separator,
+                      layer.pooling_param.kernel_size,
+                      separator,
+                      layer.pooling_param.stride,
+                      separator,
+                      layer.pooling_param.pad)
+    else:
+        node_label = '"%s%s(%s)"' % (layer.name, separator, layer.type)
+    return node_label
+
+
+def choose_color_by_layertype(layertype):
+    """Define colors for nodes based on the layer type.
+    """
+    color = '#6495ED'  # Default
+    if layertype == 'Convolution':
+        color = '#FF5050'
+    elif layertype == 'Pooling':
+        color = '#FF9900'
+    elif layertype == 'InnerProduct':
+        color = '#CC33FF'
+    return color
+
+
+def get_pydot_graph(caffe_net, rankdir, label_edges=True):
+    """Create a data structure which represents the `caffe_net`.
+
+    Parameters
+    ----------
+    caffe_net : object
+    rankdir : {'LR', 'TB', 'BT'}
+        Direction of graph layout.
+    label_edges : boolean, optional
+        Label the edges (default is True).
+
+    Returns
+    -------
+    pydot graph object
+    """
+    pydot_graph = pydot.Dot(caffe_net.name,
+                            graph_type='digraph',
+                            rankdir=rankdir)
+    pydot_nodes = {}
+    pydot_edges = []
+    for layer in caffe_net.layer:
+        node_label = get_layer_label(layer, rankdir)
+        node_name = "%s_%s" % (layer.name, layer.type)
+        if (len(layer.bottom) == 1 and len(layer.top) == 1 and
+           layer.bottom[0] == layer.top[0]):
+            # We have an in-place neuron layer.
+            pydot_nodes[node_name] = pydot.Node(node_label,
+                                                **NEURON_LAYER_STYLE)
+        else:
+            layer_style = LAYER_STYLE_DEFAULT
+            layer_style['fillcolor'] = choose_color_by_layertype(layer.type)
+            pydot_nodes[node_name] = pydot.Node(node_label, **layer_style)
+        for bottom_blob in layer.bottom:
+            pydot_nodes[bottom_blob + '_blob'] = pydot.Node('%s' % bottom_blob,
+                                                            **BLOB_STYLE)
+            edge_label = '""'
+            pydot_edges.append({'src': bottom_blob + '_blob',
+                                'dst': node_name,
+                                'label': edge_label})
+        for top_blob in layer.top:
+            pydot_nodes[top_blob + '_blob'] = pydot.Node('%s' % (top_blob))
+            if label_edges:
+                edge_label = get_edge_label(layer)
+            else:
+                edge_label = '""'
+            pydot_edges.append({'src': node_name,
+                                'dst': top_blob + '_blob',
+                                'label': edge_label})
+    # Now, add the nodes and edges to the graph.
+    for node in pydot_nodes.values():
+        pydot_graph.add_node(node)
+    for edge in pydot_edges:
+        pydot_graph.add_edge(
+            pydot.Edge(pydot_nodes[edge['src']],
+                       pydot_nodes[edge['dst']],
+                       label=edge['label']))
+    return pydot_graph
+
+
+def draw_net(caffe_net, rankdir, ext='png'):
+    """Draws a caffe net and returns the image string encoded using the given
+    extension.
+
+    Parameters
+    ----------
+    caffe_net : a caffe.proto.caffe_pb2.NetParameter protocol buffer.
+    ext : string, optional
+        The image extension (the default is 'png').
+
+    Returns
+    -------
+    string :
+        Postscript representation of the graph.
+    """
+    return get_pydot_graph(caffe_net, rankdir).create(format=ext)
+
+
+def draw_net_to_file(caffe_net, filename, rankdir='LR'):
+    """Draws a caffe net, and saves it to file using the format given as the
+    file extension. Use '.raw' to output raw text that you can manually feed
+    to graphviz to draw graphs.
+
+    Parameters
+    ----------
+    caffe_net : a caffe.proto.caffe_pb2.NetParameter protocol buffer.
+    filename : string
+        The path to a file where the networks visualization will be stored.
+    rankdir : {'LR', 'TB', 'BT'}
+        Direction of graph layout.
+    """
+    ext = filename[filename.rfind('.')+1:]
+    with open(filename, 'wb') as fid:
+        fid.write(draw_net(caffe_net, rankdir, ext))
diff --git a/python/caffe/imagenet/ilsvrc_2012_mean.npy b/python/caffe/imagenet/ilsvrc_2012_mean.npy
new file mode 100644
index 0000000..666082c
Binary files /dev/null and b/python/caffe/imagenet/ilsvrc_2012_mean.npy differ
diff --git a/python/caffe/io.py b/python/caffe/io.py
new file mode 100644
index 0000000..fc96266
--- /dev/null
+++ b/python/caffe/io.py
@@ -0,0 +1,379 @@
+import numpy as np
+import skimage.io
+from scipy.ndimage import zoom
+from skimage.transform import resize
+
+try:
+    # Python3 will most likely not be able to load protobuf
+    from caffe.proto import caffe_pb2
+except:
+    import sys
+    if sys.version_info >= (3, 0):
+        print("Failed to include caffe_pb2, things might go wrong!")
+    else:
+        raise
+
+
+## proto / datum / ndarray conversion
+def blobproto_to_array(blob, return_diff=False):
+    """
+    Convert a blob proto to an array. In default, we will just return the data,
+    unless return_diff is True, in which case we will return the diff.
+    """
+    if return_diff:
+        return np.array(blob.diff).reshape(
+            blob.num, blob.channels, blob.height, blob.width)
+    else:
+        return np.array(blob.data).reshape(
+            blob.num, blob.channels, blob.height, blob.width)
+
+
+def array_to_blobproto(arr, diff=None):
+    """Converts a 4-dimensional array to blob proto. If diff is given, also
+    convert the diff. You need to make sure that arr and diff have the same
+    shape, and this function does not do sanity check.
+    """
+    if arr.ndim != 4:
+        raise ValueError('Incorrect array shape.')
+    blob = caffe_pb2.BlobProto()
+    blob.num, blob.channels, blob.height, blob.width = arr.shape
+    blob.data.extend(arr.astype(float).flat)
+    if diff is not None:
+        blob.diff.extend(diff.astype(float).flat)
+    return blob
+
+
+def arraylist_to_blobprotovecor_str(arraylist):
+    """Converts a list of arrays to a serialized blobprotovec, which could be
+    then passed to a network for processing.
+    """
+    vec = caffe_pb2.BlobProtoVector()
+    vec.blobs.extend([array_to_blobproto(arr) for arr in arraylist])
+    return vec.SerializeToString()
+
+
+def blobprotovector_str_to_arraylist(str):
+    """Converts a serialized blobprotovec to a list of arrays.
+    """
+    vec = caffe_pb2.BlobProtoVector()
+    vec.ParseFromString(str)
+    return [blobproto_to_array(blob) for blob in vec.blobs]
+
+
+def array_to_datum(arr, label=0):
+    """Converts a 3-dimensional array to datum. If the array has dtype uint8,
+    the output data will be encoded as a string. Otherwise, the output data
+    will be stored in float format.
+    """
+    if arr.ndim != 3:
+        raise ValueError('Incorrect array shape.')
+    datum = caffe_pb2.Datum()
+    datum.channels, datum.height, datum.width = arr.shape
+    if arr.dtype == np.uint8:
+        datum.data = arr.tostring()
+    else:
+        datum.float_data.extend(arr.flat)
+    datum.label = label
+    return datum
+
+
+def datum_to_array(datum):
+    """Converts a datum to an array. Note that the label is not returned,
+    as one can easily get it by calling datum.label.
+    """
+    if len(datum.data):
+        return np.fromstring(datum.data, dtype=np.uint8).reshape(
+            datum.channels, datum.height, datum.width)
+    else:
+        return np.array(datum.float_data).astype(float).reshape(
+            datum.channels, datum.height, datum.width)
+
+
+## Pre-processing
+
+class Transformer:
+    """
+    Transform input for feeding into a Net.
+
+    Note: this is mostly for illustrative purposes and it is likely better
+    to define your own input preprocessing routine for your needs.
+
+    Parameters
+    ----------
+    net : a Net for which the input should be prepared
+    """
+    def __init__(self, inputs):
+        self.inputs = inputs
+        self.transpose = {}
+        self.channel_swap = {}
+        self.raw_scale = {}
+        self.mean = {}
+        self.input_scale = {}
+
+    def __check_input(self, in_):
+        if in_ not in self.inputs:
+            raise Exception('{} is not one of the net inputs: {}'.format(
+                in_, self.inputs))
+
+    def preprocess(self, in_, data):
+        """
+        Format input for Caffe:
+        - convert to single
+        - resize to input dimensions (preserving number of channels)
+        - transpose dimensions to K x H x W
+        - reorder channels (for instance color to BGR)
+        - scale raw input (e.g. from [0, 1] to [0, 255] for ImageNet models)
+        - subtract mean
+        - scale feature
+
+        Parameters
+        ----------
+        in_ : name of input blob to preprocess for
+        data : (H' x W' x K) ndarray
+
+        Returns
+        -------
+        caffe_in : (K x H x W) ndarray for input to a Net
+        """
+        self.__check_input(in_)
+        caffe_in = data.astype(np.float32, copy=False)
+        transpose = self.transpose.get(in_)
+        channel_swap = self.channel_swap.get(in_)
+        raw_scale = self.raw_scale.get(in_)
+        mean = self.mean.get(in_)
+        input_scale = self.input_scale.get(in_)
+        in_dims = self.inputs[in_][2:]
+        if caffe_in.shape[:2] != in_dims:
+            caffe_in = resize_image(caffe_in, in_dims)
+        if transpose is not None:
+            caffe_in = caffe_in.transpose(transpose)
+        if channel_swap is not None:
+            caffe_in = caffe_in[channel_swap, :, :]
+        if raw_scale is not None:
+            caffe_in *= raw_scale
+        if mean is not None:
+            caffe_in -= mean
+        if input_scale is not None:
+            caffe_in *= input_scale
+        return caffe_in
+
+    def deprocess(self, in_, data):
+        """
+        Invert Caffe formatting; see preprocess().
+        """
+        self.__check_input(in_)
+        decaf_in = data.copy().squeeze()
+        transpose = self.transpose.get(in_)
+        channel_swap = self.channel_swap.get(in_)
+        raw_scale = self.raw_scale.get(in_)
+        mean = self.mean.get(in_)
+        input_scale = self.input_scale.get(in_)
+        if input_scale is not None:
+            decaf_in /= input_scale
+        if mean is not None:
+            decaf_in += mean
+        if raw_scale is not None:
+            decaf_in /= raw_scale
+        if channel_swap is not None:
+            decaf_in = decaf_in[channel_swap, :, :]
+        if transpose is not None:
+            decaf_in = decaf_in.transpose([transpose[t] for t in transpose])
+        return decaf_in
+
+    def set_transpose(self, in_, order):
+        """
+        Set the input channel order for e.g. RGB to BGR conversion
+        as needed for the reference ImageNet model.
+
+        Parameters
+        ----------
+        in_ : which input to assign this channel order
+        order : the order to transpose the dimensions
+        """
+        self.__check_input(in_)
+        if len(order) != len(self.inputs[in_]) - 1:
+            raise Exception('Transpose order needs to have the same number of '
+                            'dimensions as the input.')
+        self.transpose[in_] = order
+
+    def set_channel_swap(self, in_, order):
+        """
+        Set the input channel order for e.g. RGB to BGR conversion
+        as needed for the reference ImageNet model.
+        N.B. this assumes the channels are the first dimension AFTER transpose.
+
+        Parameters
+        ----------
+        in_ : which input to assign this channel order
+        order : the order to take the channels.
+            (2,1,0) maps RGB to BGR for example.
+        """
+        self.__check_input(in_)
+        if len(order) != self.inputs[in_][1]:
+            raise Exception('Channel swap needs to have the same number of '
+                            'dimensions as the input channels.')
+        self.channel_swap[in_] = order
+
+    def set_raw_scale(self, in_, scale):
+        """
+        Set the scale of raw features s.t. the input blob = input * scale.
+        While Python represents images in [0, 1], certain Caffe models
+        like CaffeNet and AlexNet represent images in [0, 255] so the raw_scale
+        of these models must be 255.
+
+        Parameters
+        ----------
+        in_ : which input to assign this scale factor
+        scale : scale coefficient
+        """
+        self.__check_input(in_)
+        self.raw_scale[in_] = scale
+
+    def set_mean(self, in_, mean):
+        """
+        Set the mean to subtract for centering the data.
+
+        Parameters
+        ----------
+        in_ : which input to assign this mean.
+        mean : mean ndarray (input dimensional or broadcastable)
+        """
+        self.__check_input(in_)
+        ms = mean.shape
+        if mean.ndim == 1:
+            # broadcast channels
+            if ms[0] != self.inputs[in_][1]:
+                raise ValueError('Mean channels incompatible with input.')
+            mean = mean[:, np.newaxis, np.newaxis]
+        else:
+            # elementwise mean
+            if len(ms) == 2:
+                ms = (1,) + ms
+            if len(ms) != 3:
+                raise ValueError('Mean shape invalid')
+            if ms != self.inputs[in_][1:]:
+                raise ValueError('Mean shape incompatible with input shape.')
+        self.mean[in_] = mean
+
+    def set_input_scale(self, in_, scale):
+        """
+        Set the scale of preprocessed inputs s.t. the blob = blob * scale.
+        N.B. input_scale is done AFTER mean subtraction and other preprocessing
+        while raw_scale is done BEFORE.
+
+        Parameters
+        ----------
+        in_ : which input to assign this scale factor
+        scale : scale coefficient
+        """
+        self.__check_input(in_)
+        self.input_scale[in_] = scale
+
+
+## Image IO
+
+def load_image(filename, color=True):
+    """
+    Load an image converting from grayscale or alpha as needed.
+
+    Parameters
+    ----------
+    filename : string
+    color : boolean
+        flag for color format. True (default) loads as RGB while False
+        loads as intensity (if image is already grayscale).
+
+    Returns
+    -------
+    image : an image with type np.float32 in range [0, 1]
+        of size (H x W x 3) in RGB or
+        of size (H x W x 1) in grayscale.
+    """
+    img = skimage.img_as_float(skimage.io.imread(filename)).astype(np.float32)
+    if img.ndim == 2:
+        img = img[:, :, np.newaxis]
+        if color:
+            img = np.tile(img, (1, 1, 3))
+    elif img.shape[2] == 4:
+        img = img[:, :, :3]
+    return img
+
+
+def resize_image(im, new_dims, interp_order=1):
+    """
+    Resize an image array with interpolation.
+
+    Parameters
+    ----------
+    im : (H x W x K) ndarray
+    new_dims : (height, width) tuple of new dimensions.
+    interp_order : interpolation order, default is linear.
+
+    Returns
+    -------
+    im : resized ndarray with shape (new_dims[0], new_dims[1], K)
+    """
+    if im.shape[-1] == 1 or im.shape[-1] == 3:
+        im_min, im_max = im.min(), im.max()
+        if im_max > im_min:
+            # skimage is fast but only understands {1,3} channel images
+            # in [0, 1].
+            im_std = (im - im_min) / (im_max - im_min)
+            resized_std = resize(im_std, new_dims, order=interp_order)
+            resized_im = resized_std * (im_max - im_min) + im_min
+        else:
+            # the image is a constant -- avoid divide by 0
+            ret = np.empty((new_dims[0], new_dims[1], im.shape[-1]),
+                           dtype=np.float32)
+            ret.fill(im_min)
+            return ret
+    else:
+        # ndimage interpolates anything but more slowly.
+        scale = tuple(np.array(new_dims) / np.array(im.shape[:2]))
+        resized_im = zoom(im, scale + (1,), order=interp_order)
+    return resized_im.astype(np.float32)
+
+
+def oversample(images, crop_dims):
+    """
+    Crop images into the four corners, center, and their mirrored versions.
+
+    Parameters
+    ----------
+    image : iterable of (H x W x K) ndarrays
+    crop_dims : (height, width) tuple for the crops.
+
+    Returns
+    -------
+    crops : (10*N x H x W x K) ndarray of crops for number of inputs N.
+    """
+    # Dimensions and center.
+    im_shape = np.array(images[0].shape)
+    crop_dims = np.array(crop_dims)
+    im_center = im_shape[:2] / 2.0
+
+    # Make crop coordinates
+    h_indices = (0, im_shape[0] - crop_dims[0])
+    w_indices = (0, im_shape[1] - crop_dims[1])
+    crops_ix = np.empty((5, 4), dtype=int)
+    curr = 0
+    for i in h_indices:
+        for j in w_indices:
+            crops_ix[curr] = (i, j, i + crop_dims[0], j + crop_dims[1])
+            curr += 1
+    crops_ix[4] = np.tile(im_center, (1, 2)) + np.concatenate([
+        -crop_dims / 2.0,
+         crop_dims / 2.0
+    ])
+    crops_ix = np.tile(crops_ix, (2, 1))
+
+    # Extract crops
+    crops = np.empty((10 * len(images), crop_dims[0], crop_dims[1],
+                      im_shape[-1]), dtype=np.float32)
+    ix = 0
+    for im in images:
+        for crop in crops_ix:
+            crops[ix] = im[crop[0]:crop[2], crop[1]:crop[3], :]
+            ix += 1
+        crops[ix-5:ix] = crops[ix-5:ix, :, ::-1, :]  # flip for mirrors
+    return crops
diff --git a/python/caffe/net_spec.py b/python/caffe/net_spec.py
new file mode 100644
index 0000000..1b4814a
--- /dev/null
+++ b/python/caffe/net_spec.py
@@ -0,0 +1,204 @@
+"""Python net specification.
+
+This module provides a way to write nets directly in Python, using a natural,
+functional style. See examples/python_nets/caffenet.py for an example.
+
+Currently this works as a thin wrapper around the Python protobuf interface,
+with layers and parameters automatically generated for the "layers" and
+"params" pseudo-modules, which are actually objects using __getattr__ magic
+to generate protobuf messages.
+
+Note that when using to_proto or Top.to_proto, names of intermediate blobs will
+be automatically generated. To explicitly specify blob names, use the NetSpec
+class -- assign to its attributes directly to name layers, and call
+NetSpec.to_proto to serialize all assigned layers.
+
+This interface is expected to continue to evolve as Caffe gains new capabilities
+for specifying nets. In particular, the automatically generated layer names
+are not guaranteed to be forward-compatible.
+"""
+
+from collections import OrderedDict
+
+from .proto import caffe_pb2
+from google import protobuf
+import six
+
+
+def param_name_dict():
+    """Find out the correspondence between layer names and parameter names."""
+
+    layer = caffe_pb2.LayerParameter()
+    # get all parameter names (typically underscore case) and corresponding
+    # type names (typically camel case), which contain the layer names
+    # (note that not all parameters correspond to layers, but we'll ignore that)
+    param_names = [s for s in dir(layer) if s.endswith('_param')]
+    param_type_names = [type(getattr(layer, s)).__name__ for s in param_names]
+    # strip the final '_param' or 'Parameter'
+    param_names = [s[:-len('_param')] for s in param_names]
+    param_type_names = [s[:-len('Parameter')] for s in param_type_names]
+    return dict(zip(param_type_names, param_names))
+
+
+def to_proto(*tops):
+    """Generate a NetParameter that contains all layers needed to compute
+    all arguments."""
+
+    if not isinstance(tops, tuple):
+        tops = (tops,)
+    layers = OrderedDict()
+    autonames = {}
+    for top in tops:
+        top.fn._to_proto(layers, {}, autonames)
+    net = caffe_pb2.NetParameter()
+    net.layer.extend(layers.values())
+    return net
+
+
+def assign_proto(proto, name, val):
+    """Assign a Python object to a protobuf message, based on the Python
+    type (in recursive fashion). Lists become repeated fields/messages, dicts
+    become messages, and other types are assigned directly."""
+
+    if isinstance(val, list):
+        if isinstance(val[0], dict):
+            for item in val:
+                proto_item = getattr(proto, name).add()
+                for k, v in six.iteritems(item):
+                    assign_proto(proto_item, k, v)
+        else:
+            getattr(proto, name).extend(val)
+    elif isinstance(val, dict):
+        for k, v in six.iteritems(val):
+            assign_proto(getattr(proto, name), k, v)
+    else:
+        setattr(proto, name, val)
+
+
+class Top(object):
+    """A Top specifies a single output blob (which could be one of several
+    produced by a layer.)"""
+
+    def __init__(self, fn, n):
+        self.fn = fn
+        self.n = n
+
+    def to_proto(self):
+        """Generate a NetParameter that contains all layers needed to compute
+        this top."""
+
+        return to_proto(self)
+
+
+class Function(object):
+    """A Function specifies a layer, its parameters, and its inputs (which
+    are Tops from other layers)."""
+
+    def __init__(self, type_name, inputs, params):
+        self.type_name = type_name
+        self.inputs = inputs
+        self.params = params
+        self.ntop = self.params.get('ntop', 1)
+        # use del to make sure kwargs are not double-processed as layer params
+        if 'ntop' in self.params:
+            del self.params['ntop']
+        self.in_place = self.params.get('in_place', False)
+        if 'in_place' in self.params:
+            del self.params['in_place']
+        self.tops = tuple(Top(self, n) for n in range(self.ntop))
+
+    def _get_name(self, top, names, autonames):
+        if top not in names:
+            n = autonames.setdefault(top.fn.type_name, 1)
+            autonames[top.fn.type_name] += 1
+            names[top] = top.fn.type_name + str(n)
+        return names[top]
+
+    def _to_proto(self, layers, names, autonames):
+        if self in layers:
+            return
+        bottom_names = []
+        for inp in self.inputs:
+            inp.fn._to_proto(layers, names, autonames)
+            bottom_names.append(layers[inp.fn].top[inp.n])
+        layer = caffe_pb2.LayerParameter()
+        layer.type = self.type_name
+        layer.bottom.extend(bottom_names)
+
+        if self.in_place:
+            layer.top.extend(layer.bottom)
+        else:
+            for top in self.tops:
+                layer.top.append(self._get_name(top, names, autonames))
+        layer.name = self._get_name(self.tops[0], names, autonames)
+
+        for k, v in six.iteritems(self.params):
+            # special case to handle generic *params
+            if k.endswith('param'):
+                assign_proto(layer, k, v)
+            else:
+                try:
+                    assign_proto(getattr(layer,
+                        _param_names[self.type_name] + '_param'), k, v)
+                except (AttributeError, KeyError):
+                    assign_proto(layer, k, v)
+
+        layers[self] = layer
+
+
+class NetSpec(object):
+    """A NetSpec contains a set of Tops (assigned directly as attributes).
+    Calling NetSpec.to_proto generates a NetParameter containing all of the
+    layers needed to produce all of the assigned Tops, using the assigned
+    names."""
+
+    def __init__(self):
+        super(NetSpec, self).__setattr__('tops', OrderedDict())
+
+    def __setattr__(self, name, value):
+        self.tops[name] = value
+
+    def __getattr__(self, name):
+        return self.tops[name]
+
+    def to_proto(self):
+        names = {v: k for k, v in six.iteritems(self.tops)}
+        autonames = {}
+        layers = OrderedDict()
+        for name, top in six.iteritems(self.tops):
+            top.fn._to_proto(layers, names, autonames)
+        net = caffe_pb2.NetParameter()
+        net.layer.extend(layers.values())
+        return net
+
+
+class Layers(object):
+    """A Layers object is a pseudo-module which generates functions that specify
+    layers; e.g., Layers().Convolution(bottom, kernel_size=3) will produce a Top
+    specifying a 3x3 convolution applied to bottom."""
+
+    def __getattr__(self, name):
+        def layer_fn(*args, **kwargs):
+            fn = Function(name, args, kwargs)
+            if fn.ntop == 1:
+                return fn.tops[0]
+            else:
+                return fn.tops
+        return layer_fn
+
+
+class Parameters(object):
+    """A Parameters object is a pseudo-module which generates constants used
+    in layer parameters; e.g., Parameters().Pooling.MAX is the value used
+    to specify max pooling."""
+
+    def __getattr__(self, name):
+       class Param:
+            def __getattr__(self, param_name):
+                return getattr(getattr(caffe_pb2, name + 'Parameter'), param_name)
+       return Param()
+
+
+_param_names = param_name_dict()
+layers = Layers()
+params = Parameters()
diff --git a/python/caffe/pycaffe.py b/python/caffe/pycaffe.py
new file mode 100644
index 0000000..e8a676a
--- /dev/null
+++ b/python/caffe/pycaffe.py
@@ -0,0 +1,281 @@
+"""
+Wrap the internal caffe C++ module (_caffe.so) with a clean, Pythonic
+interface.
+"""
+
+from collections import OrderedDict
+try:
+    from itertools import izip_longest
+except:
+    from itertools import zip_longest as izip_longest
+import numpy as np
+
+from ._caffe import Net, SGDSolver
+import caffe.io
+
+# We directly update methods from Net here (rather than using composition or
+# inheritance) so that nets created by caffe (e.g., by SGDSolver) will
+# automatically have the improved interface.
+
+
+ at property
+def _Net_blobs(self):
+    """
+    An OrderedDict (bottom to top, i.e., input to output) of network
+    blobs indexed by name
+    """
+    return OrderedDict(zip(self._blob_names, self._blobs))
+
+
+ at property
+def _Net_params(self):
+    """
+    An OrderedDict (bottom to top, i.e., input to output) of network
+    parameters indexed by name; each is a list of multiple blobs (e.g.,
+    weights and biases)
+    """
+    return OrderedDict([(name, lr.blobs)
+                        for name, lr in zip(self._layer_names, self.layers)
+                        if len(lr.blobs) > 0])
+
+
+ at property
+def _Net_inputs(self):
+    return [list(self.blobs.keys())[i] for i in self._inputs]
+
+
+ at property
+def _Net_outputs(self):
+    return [list(self.blobs.keys())[i] for i in self._outputs]
+
+
+def _Net_forward(self, blobs=None, start=None, end=None, **kwargs):
+    """
+    Forward pass: prepare inputs and run the net forward.
+
+    Parameters
+    ----------
+    blobs : list of blobs to return in addition to output blobs.
+    kwargs : Keys are input blob names and values are blob ndarrays.
+             For formatting inputs for Caffe, see Net.preprocess().
+             If None, input is taken from data layers.
+    start : optional name of layer at which to begin the forward pass
+    end : optional name of layer at which to finish the forward pass
+          (inclusive)
+
+    Returns
+    -------
+    outs : {blob name: blob ndarray} dict.
+    """
+    if blobs is None:
+        blobs = []
+
+    if start is not None:
+        start_ind = list(self._layer_names).index(start)
+    else:
+        start_ind = 0
+
+    if end is not None:
+        end_ind = list(self._layer_names).index(end)
+        outputs = set([end] + blobs)
+    else:
+        end_ind = len(self.layers) - 1
+        outputs = set(self.outputs + blobs)
+
+    if kwargs:
+        if set(kwargs.keys()) != set(self.inputs):
+            raise Exception('Input blob arguments do not match net inputs.')
+        # Set input according to defined shapes and make arrays single and
+        # C-contiguous as Caffe expects.
+        for in_, blob in kwargs.iteritems():
+            if blob.shape[0] != self.blobs[in_].num:
+                raise Exception('Input is not batch sized')
+            self.blobs[in_].data[...] = blob
+
+    self._forward(start_ind, end_ind)
+
+    # Unpack blobs to extract
+    return {out: self.blobs[out].data for out in outputs}
+
+
+def _Net_backward(self, diffs=None, start=None, end=None, **kwargs):
+    """
+    Backward pass: prepare diffs and run the net backward.
+
+    Parameters
+    ----------
+    diffs : list of diffs to return in addition to bottom diffs.
+    kwargs : Keys are output blob names and values are diff ndarrays.
+            If None, top diffs are taken from forward loss.
+    start : optional name of layer at which to begin the backward pass
+    end : optional name of layer at which to finish the backward pass
+        (inclusive)
+
+    Returns
+    -------
+    outs: {blob name: diff ndarray} dict.
+    """
+    if diffs is None:
+        diffs = []
+
+    if start is not None:
+        start_ind = list(self._layer_names).index(start)
+    else:
+        start_ind = len(self.layers) - 1
+
+    if end is not None:
+        end_ind = list(self._layer_names).index(end)
+        outputs = set([end] + diffs)
+    else:
+        end_ind = 0
+        outputs = set(self.inputs + diffs)
+
+    if kwargs:
+        if set(kwargs.keys()) != set(self.outputs):
+            raise Exception('Top diff arguments do not match net outputs.')
+        # Set top diffs according to defined shapes and make arrays single and
+        # C-contiguous as Caffe expects.
+        for top, diff in kwargs.iteritems():
+            if diff.ndim != 4:
+                raise Exception('{} diff is not 4-d'.format(top))
+            if diff.shape[0] != self.blobs[top].num:
+                raise Exception('Diff is not batch sized')
+            self.blobs[top].diff[...] = diff
+
+    self._backward(start_ind, end_ind)
+
+    # Unpack diffs to extract
+    return {out: self.blobs[out].diff for out in outputs}
+
+
+def _Net_forward_all(self, blobs=None, **kwargs):
+    """
+    Run net forward in batches.
+
+    Parameters
+    ----------
+    blobs : list of blobs to extract as in forward()
+    kwargs : Keys are input blob names and values are blob ndarrays.
+             Refer to forward().
+
+    Returns
+    -------
+    all_outs : {blob name: list of blobs} dict.
+    """
+    # Collect outputs from batches
+    all_outs = {out: [] for out in set(self.outputs + (blobs or []))}
+    for batch in self._batch(kwargs):
+        outs = self.forward(blobs=blobs, **batch)
+        for out, out_blob in outs.iteritems():
+            all_outs[out].extend(out_blob.copy())
+    # Package in ndarray.
+    for out in all_outs:
+        all_outs[out] = np.asarray(all_outs[out])
+    # Discard padding.
+    pad = len(all_outs.itervalues().next()) - len(kwargs.itervalues().next())
+    if pad:
+        for out in all_outs:
+            all_outs[out] = all_outs[out][:-pad]
+    return all_outs
+
+
+def _Net_forward_backward_all(self, blobs=None, diffs=None, **kwargs):
+    """
+    Run net forward + backward in batches.
+
+    Parameters
+    ----------
+    blobs: list of blobs to extract as in forward()
+    diffs: list of diffs to extract as in backward()
+    kwargs: Keys are input (for forward) and output (for backward) blob names
+            and values are ndarrays. Refer to forward() and backward().
+            Prefilled variants are called for lack of input or output blobs.
+
+    Returns
+    -------
+    all_blobs: {blob name: blob ndarray} dict.
+    all_diffs: {blob name: diff ndarray} dict.
+    """
+    # Batch blobs and diffs.
+    all_outs = {out: [] for out in set(self.outputs + (blobs or []))}
+    all_diffs = {diff: [] for diff in set(self.inputs + (diffs or []))}
+    forward_batches = self._batch({in_: kwargs[in_]
+                                   for in_ in self.inputs if in_ in kwargs})
+    backward_batches = self._batch({out: kwargs[out]
+                                    for out in self.outputs if out in kwargs})
+    # Collect outputs from batches (and heed lack of forward/backward batches).
+    for fb, bb in izip_longest(forward_batches, backward_batches, fillvalue={}):
+        batch_blobs = self.forward(blobs=blobs, **fb)
+        batch_diffs = self.backward(diffs=diffs, **bb)
+        for out, out_blobs in batch_blobs.iteritems():
+            all_outs[out].extend(out_blobs)
+        for diff, out_diffs in batch_diffs.iteritems():
+            all_diffs[diff].extend(out_diffs)
+    # Package in ndarray.
+    for out, diff in zip(all_outs, all_diffs):
+        all_outs[out] = np.asarray(all_outs[out])
+        all_diffs[diff] = np.asarray(all_diffs[diff])
+    # Discard padding at the end and package in ndarray.
+    pad = len(all_outs.itervalues().next()) - len(kwargs.itervalues().next())
+    if pad:
+        for out, diff in zip(all_outs, all_diffs):
+            all_outs[out] = all_outs[out][:-pad]
+            all_diffs[diff] = all_diffs[diff][:-pad]
+    return all_outs, all_diffs
+
+
+def _Net_set_input_arrays(self, data, labels):
+    """
+    Set input arrays of the in-memory MemoryDataLayer.
+    (Note: this is only for networks declared with the memory data layer.)
+    """
+    if labels.ndim == 1:
+        labels = np.ascontiguousarray(labels[:, np.newaxis, np.newaxis,
+                                             np.newaxis])
+    return self._set_input_arrays(data, labels)
+
+
+def _Net_batch(self, blobs):
+    """
+    Batch blob lists according to net's batch size.
+
+    Parameters
+    ----------
+    blobs: Keys blob names and values are lists of blobs (of any length).
+           Naturally, all the lists should have the same length.
+
+    Yields
+    ------
+    batch: {blob name: list of blobs} dict for a single batch.
+    """
+    num = len(blobs.itervalues().next())
+    batch_size = self.blobs.itervalues().next().num
+    remainder = num % batch_size
+    num_batches = num / batch_size
+
+    # Yield full batches.
+    for b in range(num_batches):
+        i = b * batch_size
+        yield {name: blobs[name][i:i + batch_size] for name in blobs}
+
+    # Yield last padded batch, if any.
+    if remainder > 0:
+        padded_batch = {}
+        for name in blobs:
+            padding = np.zeros((batch_size - remainder,)
+                               + blobs[name].shape[1:])
+            padded_batch[name] = np.concatenate([blobs[name][-remainder:],
+                                                 padding])
+        yield padded_batch
+
+# Attach methods to Net.
+Net.blobs = _Net_blobs
+Net.params = _Net_params
+Net.forward = _Net_forward
+Net.backward = _Net_backward
+Net.forward_all = _Net_forward_all
+Net.forward_backward_all = _Net_forward_backward_all
+Net.set_input_arrays = _Net_set_input_arrays
+Net._batch = _Net_batch
+Net.inputs = _Net_inputs
+Net.outputs = _Net_outputs
diff --git a/python/caffe/test/test_net.py b/python/caffe/test/test_net.py
new file mode 100644
index 0000000..cc36747
--- /dev/null
+++ b/python/caffe/test/test_net.py
@@ -0,0 +1,80 @@
+import unittest
+import tempfile
+import os
+import numpy as np
+
+import caffe
+
+
+def simple_net_file(num_output):
+    """Make a simple net prototxt, based on test_net.cpp, returning the name
+    of the (temporary) file."""
+
+    f = tempfile.NamedTemporaryFile(delete=False)
+    f.write("""name: 'testnet' force_backward: true
+    layer { type: 'DummyData' name: 'data' top: 'data' top: 'label'
+      dummy_data_param { num: 5 channels: 2 height: 3 width: 4
+        num: 5 channels: 1 height: 1 width: 1
+        data_filler { type: 'gaussian' std: 1 }
+        data_filler { type: 'constant' } } }
+    layer { type: 'Convolution' name: 'conv' bottom: 'data' top: 'conv'
+      convolution_param { num_output: 11 kernel_size: 2 pad: 3
+        weight_filler { type: 'gaussian' std: 1 }
+        bias_filler { type: 'constant' value: 2 } }
+        param { decay_mult: 1 } param { decay_mult: 0 }
+        }
+    layer { type: 'InnerProduct' name: 'ip' bottom: 'conv' top: 'ip'
+      inner_product_param { num_output: """ + str(num_output) + """
+        weight_filler { type: 'gaussian' std: 2.5 }
+        bias_filler { type: 'constant' value: -3 } } }
+    layer { type: 'SoftmaxWithLoss' name: 'loss' bottom: 'ip' bottom: 'label'
+      top: 'loss' }""")
+    f.close()
+    return f.name
+
+
+class TestNet(unittest.TestCase):
+    def setUp(self):
+        self.num_output = 13
+        net_file = simple_net_file(self.num_output)
+        self.net = caffe.Net(net_file, caffe.TRAIN)
+        # fill in valid labels
+        self.net.blobs['label'].data[...] = \
+                np.random.randint(self.num_output,
+                    size=self.net.blobs['label'].data.shape)
+        os.remove(net_file)
+
+    def test_memory(self):
+        """Check that holding onto blob data beyond the life of a Net is OK"""
+
+        params = sum(map(list, self.net.params.itervalues()), [])
+        blobs = self.net.blobs.values()
+        del self.net
+
+        # now sum everything (forcing all memory to be read)
+        total = 0
+        for p in params:
+            total += p.data.sum() + p.diff.sum()
+        for bl in blobs:
+            total += bl.data.sum() + bl.diff.sum()
+
+    def test_forward_backward(self):
+        self.net.forward()
+        self.net.backward()
+
+    def test_inputs_outputs(self):
+        self.assertEqual(self.net.inputs, [])
+        self.assertEqual(self.net.outputs, ['loss'])
+
+    def test_save_and_read(self):
+        f = tempfile.NamedTemporaryFile(delete=False)
+        f.close()
+        self.net.save(f.name)
+        net_file = simple_net_file(self.num_output)
+        net2 = caffe.Net(net_file, f.name, caffe.TRAIN)
+        os.remove(net_file)
+        os.remove(f.name)
+        for name in self.net.params:
+            for i in range(len(self.net.params[name])):
+                self.assertEqual(abs(self.net.params[name][i].data
+                    - net2.params[name][i].data).sum(), 0)
diff --git a/python/caffe/test/test_net_spec.py b/python/caffe/test/test_net_spec.py
new file mode 100644
index 0000000..65b73b9
--- /dev/null
+++ b/python/caffe/test/test_net_spec.py
@@ -0,0 +1,67 @@
+import unittest
+import tempfile
+import caffe
+from caffe import layers as L
+from caffe import params as P
+
+def lenet(batch_size):
+    n = caffe.NetSpec()
+    n.data, n.label = L.DummyData(shape=[dict(dim=[batch_size, 1, 28, 28]),
+                                         dict(dim=[batch_size, 1, 1, 1])],
+                                  transform_param=dict(scale=1./255), ntop=2)
+    n.conv1 = L.Convolution(n.data, kernel_size=5, num_output=20,
+        weight_filler=dict(type='xavier'))
+    n.pool1 = L.Pooling(n.conv1, kernel_size=2, stride=2, pool=P.Pooling.MAX)
+    n.conv2 = L.Convolution(n.pool1, kernel_size=5, num_output=50,
+        weight_filler=dict(type='xavier'))
+    n.pool2 = L.Pooling(n.conv2, kernel_size=2, stride=2, pool=P.Pooling.MAX)
+    n.ip1 = L.InnerProduct(n.pool2, num_output=500,
+        weight_filler=dict(type='xavier'))
+    n.relu1 = L.ReLU(n.ip1, in_place=True)
+    n.ip2 = L.InnerProduct(n.relu1, num_output=10,
+        weight_filler=dict(type='xavier'))
+    n.loss = L.SoftmaxWithLoss(n.ip2, n.label)
+    return n.to_proto()
+
+def anon_lenet(batch_size):
+    data, label = L.DummyData(shape=[dict(dim=[batch_size, 1, 28, 28]),
+                                     dict(dim=[batch_size, 1, 1, 1])],
+                              transform_param=dict(scale=1./255), ntop=2)
+    conv1 = L.Convolution(data, kernel_size=5, num_output=20,
+        weight_filler=dict(type='xavier'))
+    pool1 = L.Pooling(conv1, kernel_size=2, stride=2, pool=P.Pooling.MAX)
+    conv2 = L.Convolution(pool1, kernel_size=5, num_output=50,
+        weight_filler=dict(type='xavier'))
+    pool2 = L.Pooling(conv2, kernel_size=2, stride=2, pool=P.Pooling.MAX)
+    ip1 = L.InnerProduct(pool2, num_output=500,
+        weight_filler=dict(type='xavier'))
+    relu1 = L.ReLU(ip1, in_place=True)
+    ip2 = L.InnerProduct(relu1, num_output=10,
+        weight_filler=dict(type='xavier'))
+    loss = L.SoftmaxWithLoss(ip2, label)
+    return loss.to_proto()
+
+class TestNetSpec(unittest.TestCase):
+    def load_net(self, net_proto):
+        f = tempfile.NamedTemporaryFile(delete=False)
+        f.write(str(net_proto))
+        f.close()
+        return caffe.Net(f.name, caffe.TEST)
+
+    def test_lenet(self):
+        """Construct and build the Caffe version of LeNet."""
+
+        net_proto = lenet(50)
+        # check that relu is in-place
+        self.assertEqual(net_proto.layer[6].bottom,
+                net_proto.layer[6].top)
+        net = self.load_net(net_proto)
+        # check that all layers are present
+        self.assertEqual(len(net.layers), 9)
+
+        # now the check the version with automatically-generated layer names
+        net_proto = anon_lenet(50)
+        self.assertEqual(net_proto.layer[6].bottom,
+                net_proto.layer[6].top)
+        net = self.load_net(net_proto)
+        self.assertEqual(len(net.layers), 9)
diff --git a/python/caffe/test/test_python_layer.py b/python/caffe/test/test_python_layer.py
new file mode 100644
index 0000000..6fba491
--- /dev/null
+++ b/python/caffe/test/test_python_layer.py
@@ -0,0 +1,63 @@
+import unittest
+import tempfile
+import os
+
+import caffe
+
+
+class SimpleLayer(caffe.Layer):
+    """A layer that just multiplies by ten"""
+
+    def setup(self, bottom, top):
+        pass
+
+    def reshape(self, bottom, top):
+        top[0].reshape(*bottom[0].data.shape)
+
+    def forward(self, bottom, top):
+        top[0].data[...] = 10 * bottom[0].data
+
+    def backward(self, top, propagate_down, bottom):
+        bottom[0].diff[...] = 10 * top[0].diff
+
+
+def python_net_file():
+    with tempfile.NamedTemporaryFile(delete=False) as f:
+        f.write("""name: 'pythonnet' force_backward: true
+        input: 'data' input_shape { dim: 10 dim: 9 dim: 8 }
+        layer { type: 'Python' name: 'one' bottom: 'data' top: 'one'
+          python_param { module: 'test_python_layer' layer: 'SimpleLayer' } }
+        layer { type: 'Python' name: 'two' bottom: 'one' top: 'two'
+          python_param { module: 'test_python_layer' layer: 'SimpleLayer' } }
+        layer { type: 'Python' name: 'three' bottom: 'two' top: 'three'
+          python_param { module: 'test_python_layer' layer: 'SimpleLayer' } }""")
+        return f.name
+
+
+class TestPythonLayer(unittest.TestCase):
+    def setUp(self):
+        net_file = python_net_file()
+        self.net = caffe.Net(net_file, caffe.TRAIN)
+        os.remove(net_file)
+
+    def test_forward(self):
+        x = 8
+        self.net.blobs['data'].data[...] = x
+        self.net.forward()
+        for y in self.net.blobs['three'].data.flat:
+            self.assertEqual(y, 10**3 * x)
+
+    def test_backward(self):
+        x = 7
+        self.net.blobs['three'].diff[...] = x
+        self.net.backward()
+        for y in self.net.blobs['data'].diff.flat:
+            self.assertEqual(y, 10**3 * x)
+
+    def test_reshape(self):
+        s = 4
+        self.net.blobs['data'].reshape(s, s, s, s)
+        self.net.forward()
+        for blob in self.net.blobs.itervalues():
+            for d in blob.data.shape:
+                self.assertEqual(s, d)
diff --git a/python/caffe/test/test_solver.py b/python/caffe/test/test_solver.py
new file mode 100644
index 0000000..09b974d
--- /dev/null
+++ b/python/caffe/test/test_solver.py
@@ -0,0 +1,52 @@
+import unittest
+import tempfile
+import os
+import numpy as np
+
+import caffe
+from test_net import simple_net_file
+
+
+class TestSolver(unittest.TestCase):
+    def setUp(self):
+        self.num_output = 13
+        net_f = simple_net_file(self.num_output)
+        f = tempfile.NamedTemporaryFile(delete=False)
+        f.write("""net: '""" + net_f + """'
+        test_iter: 10 test_interval: 10 base_lr: 0.01 momentum: 0.9
+        weight_decay: 0.0005 lr_policy: 'inv' gamma: 0.0001 power: 0.75
+        display: 100 max_iter: 100 snapshot_after_train: false""")
+        f.close()
+        self.solver = caffe.SGDSolver(f.name)
+        # also make sure get_solver runs
+        caffe.get_solver(f.name)
+        caffe.set_mode_cpu()
+        # fill in valid labels
+        self.solver.net.blobs['label'].data[...] = \
+                np.random.randint(self.num_output,
+                    size=self.solver.net.blobs['label'].data.shape)
+        self.solver.test_nets[0].blobs['label'].data[...] = \
+                np.random.randint(self.num_output,
+                    size=self.solver.test_nets[0].blobs['label'].data.shape)
+        os.remove(f.name)
+        os.remove(net_f)
+
+    def test_solve(self):
+        self.assertEqual(self.solver.iter, 0)
+        self.solver.solve()
+        self.assertEqual(self.solver.iter, 100)
+
+    def test_net_memory(self):
+        """Check that nets survive after the solver is destroyed."""
+
+        nets = [self.solver.net] + list(self.solver.test_nets)
+        self.assertEqual(len(nets), 2)
+        del self.solver
+
+        total = 0
+        for net in nets:
+            for ps in net.params.itervalues():
+                for p in ps:
+                    total += p.data.sum() + p.diff.sum()
+            for bl in net.blobs.itervalues():
+                total += bl.data.sum() + bl.diff.sum()
diff --git a/python/classify.py b/python/classify.py
new file mode 100755
index 0000000..4544c51
--- /dev/null
+++ b/python/classify.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+"""
+classify.py is an out-of-the-box image classifer callable from the command line.
+
+By default it configures and runs the Caffe reference ImageNet model.
+"""
+import numpy as np
+import os
+import sys
+import argparse
+import glob
+import time
+
+import caffe
+
+
+def main(argv):
+    pycaffe_dir = os.path.dirname(__file__)
+
+    parser = argparse.ArgumentParser()
+    # Required arguments: input and output files.
+    parser.add_argument(
+        "input_file",
+        help="Input image, directory, or npy."
+    )
+    parser.add_argument(
+        "output_file",
+        help="Output npy filename."
+    )
+    # Optional arguments.
+    parser.add_argument(
+        "--model_def",
+        default=os.path.join(pycaffe_dir,
+                "../models/bvlc_reference_caffenet/deploy.prototxt"),
+        help="Model definition file."
+    )
+    parser.add_argument(
+        "--pretrained_model",
+        default=os.path.join(pycaffe_dir,
+                "../models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel"),
+        help="Trained model weights file."
+    )
+    parser.add_argument(
+        "--gpu",
+        action='store_true',
+        help="Switch for gpu computation."
+    )
+    parser.add_argument(
+        "--center_only",
+        action='store_true',
+        help="Switch for prediction from center crop alone instead of " +
+             "averaging predictions across crops (default)."
+    )
+    parser.add_argument(
+        "--images_dim",
+        default='256,256',
+        help="Canonical 'height,width' dimensions of input images."
+    )
+    parser.add_argument(
+        "--mean_file",
+        default=os.path.join(pycaffe_dir,
+                             'caffe/imagenet/ilsvrc_2012_mean.npy'),
+        help="Data set image mean of [Channels x Height x Width] dimensions " +
+             "(numpy array). Set to '' for no mean subtraction."
+    )
+    parser.add_argument(
+        "--input_scale",
+        type=float,
+        help="Multiply input features by this scale to finish preprocessing."
+    )
+    parser.add_argument(
+        "--raw_scale",
+        type=float,
+        default=255.0,
+        help="Multiply raw input by this scale before preprocessing."
+    )
+    parser.add_argument(
+        "--channel_swap",
+        default='2,1,0',
+        help="Order to permute input channels. The default converts " +
+             "RGB -> BGR since BGR is the Caffe default by way of OpenCV."
+    )
+    parser.add_argument(
+        "--ext",
+        default='jpg',
+        help="Image file extension to take as input when a directory " +
+             "is given as the input file."
+    )
+    args = parser.parse_args()
+
+    image_dims = [int(s) for s in args.images_dim.split(',')]
+
+    mean, channel_swap = None, None
+    if args.mean_file:
+        mean = np.load(args.mean_file)
+    if args.channel_swap:
+        channel_swap = [int(s) for s in args.channel_swap.split(',')]
+
+    if args.gpu:
+        caffe.set_mode_gpu()
+        print("GPU mode")
+    else:
+        caffe.set_mode_cpu()
+        print("CPU mode")
+
+    # Make classifier.
+    classifier = caffe.Classifier(args.model_def, args.pretrained_model,
+            image_dims=image_dims, mean=mean,
+            input_scale=args.input_scale, raw_scale=args.raw_scale,
+            channel_swap=channel_swap)
+
+    # Load numpy array (.npy), directory glob (*.jpg), or image file.
+    args.input_file = os.path.expanduser(args.input_file)
+    if args.input_file.endswith('npy'):
+        print("Loading file: %s" % args.input_file)
+        inputs = np.load(args.input_file)
+    elif os.path.isdir(args.input_file):
+        print("Loading folder: %s" % args.input_file)
+        inputs =[caffe.io.load_image(im_f)
+                 for im_f in glob.glob(args.input_file + '/*.' + args.ext)]
+    else:
+        print("Loading file: %s" % args.input_file)
+        inputs = [caffe.io.load_image(args.input_file)]
+
+    print("Classifying %d inputs." % len(inputs))
+
+    # Classify.
+    start = time.time()
+    predictions = classifier.predict(inputs, not args.center_only)
+    print("Done in %.2f s." % (time.time() - start))
+
+    # Save
+    print("Saving results into %s" % args.output_file)
+    np.save(args.output_file, predictions)
+
+
+if __name__ == '__main__':
+    main(sys.argv)
diff --git a/python/detect.py b/python/detect.py
new file mode 100755
index 0000000..691098f
--- /dev/null
+++ b/python/detect.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+"""
+detector.py is an out-of-the-box windowed detector
+callable from the command line.
+
+By default it configures and runs the Caffe reference ImageNet model.
+Note that this model was trained for image classification and not detection,
+and finetuning for detection can be expected to improve results.
+
+The selective_search_ijcv_with_python code required for the selective search
+proposal mode is available at
+    https://github.com/sergeyk/selective_search_ijcv_with_python
+
+TODO:
+- batch up image filenames as well: don't want to load all of them into memory
+- come up with a batching scheme that preserved order / keeps a unique ID
+"""
+import numpy as np
+import pandas as pd
+import os
+import argparse
+import time
+
+import caffe
+
+CROP_MODES = ['list', 'selective_search']
+COORD_COLS = ['ymin', 'xmin', 'ymax', 'xmax']
+
+
+def main(argv):
+    pycaffe_dir = os.path.dirname(__file__)
+
+    parser = argparse.ArgumentParser()
+    # Required arguments: input and output.
+    parser.add_argument(
+        "input_file",
+        help="Input txt/csv filename. If .txt, must be list of filenames.\
+        If .csv, must be comma-separated file with header\
+        'filename, xmin, ymin, xmax, ymax'"
+    )
+    parser.add_argument(
+        "output_file",
+        help="Output h5/csv filename. Format depends on extension."
+    )
+    # Optional arguments.
+    parser.add_argument(
+        "--model_def",
+        default=os.path.join(pycaffe_dir,
+                "../models/bvlc_reference_caffenet/deploy.prototxt.prototxt"),
+        help="Model definition file."
+    )
+    parser.add_argument(
+        "--pretrained_model",
+        default=os.path.join(pycaffe_dir,
+                "../models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel"),
+        help="Trained model weights file."
+    )
+    parser.add_argument(
+        "--crop_mode",
+        default="selective_search",
+        choices=CROP_MODES,
+        help="How to generate windows for detection."
+    )
+    parser.add_argument(
+        "--gpu",
+        action='store_true',
+        help="Switch for gpu computation."
+    )
+    parser.add_argument(
+        "--mean_file",
+        default=os.path.join(pycaffe_dir,
+                             'caffe/imagenet/ilsvrc_2012_mean.npy'),
+        help="Data set image mean of H x W x K dimensions (numpy array). " +
+             "Set to '' for no mean subtraction."
+    )
+    parser.add_argument(
+        "--input_scale",
+        type=float,
+        help="Multiply input features by this scale to finish preprocessing."
+    )
+    parser.add_argument(
+        "--raw_scale",
+        type=float,
+        default=255.0,
+        help="Multiply raw input by this scale before preprocessing."
+    )
+    parser.add_argument(
+        "--channel_swap",
+        default='2,1,0',
+        help="Order to permute input channels. The default converts " +
+             "RGB -> BGR since BGR is the Caffe default by way of OpenCV."
+
+    )
+    parser.add_argument(
+        "--context_pad",
+        type=int,
+        default='16',
+        help="Amount of surrounding context to collect in input window."
+    )
+    args = parser.parse_args()
+
+    mean, channel_swap = None, None
+    if args.mean_file:
+        mean = np.load(args.mean_file)
+        if mean.shape[1:] != (1, 1):
+            mean = mean.mean(1).mean(1)
+    if args.channel_swap:
+        channel_swap = [int(s) for s in args.channel_swap.split(',')]
+
+    if args.gpu:
+        caffe.set_mode_gpu()
+        print("GPU mode")
+    else:
+        caffe.set_mode_cpu()
+        print("CPU mode")
+
+    # Make detector.
+    detector = caffe.Detector(args.model_def, args.pretrained_model, mean=mean,
+            input_scale=args.input_scale, raw_scale=args.raw_scale,
+            channel_swap=channel_swap,
+            context_pad=args.context_pad)
+
+    # Load input.
+    t = time.time()
+    print("Loading input...")
+    if args.input_file.lower().endswith('txt'):
+        with open(args.input_file) as f:
+            inputs = [_.strip() for _ in f.readlines()]
+    elif args.input_file.lower().endswith('csv'):
+        inputs = pd.read_csv(args.input_file, sep=',', dtype={'filename': str})
+        inputs.set_index('filename', inplace=True)
+    else:
+        raise Exception("Unknown input file type: not in txt or csv.")
+
+    # Detect.
+    if args.crop_mode == 'list':
+        # Unpack sequence of (image filename, windows).
+        images_windows = [
+            (ix, inputs.iloc[np.where(inputs.index == ix)][COORD_COLS].values)
+            for ix in inputs.index.unique()
+        ]
+        detections = detector.detect_windows(images_windows)
+    else:
+        detections = detector.detect_selective_search(inputs)
+    print("Processed {} windows in {:.3f} s.".format(len(detections),
+                                                     time.time() - t))
+
+    # Collect into dataframe with labeled fields.
+    df = pd.DataFrame(detections)
+    df.set_index('filename', inplace=True)
+    df[COORD_COLS] = pd.DataFrame(
+        data=np.vstack(df['window']), index=df.index, columns=COORD_COLS)
+    del(df['window'])
+
+    # Save results.
+    t = time.time()
+    if args.output_file.lower().endswith('csv'):
+        # csv
+        # Enumerate the class probabilities.
+        class_cols = ['class{}'.format(x) for x in range(NUM_OUTPUT)]
+        df[class_cols] = pd.DataFrame(
+            data=np.vstack(df['feat']), index=df.index, columns=class_cols)
+        df.to_csv(args.output_file, cols=COORD_COLS + class_cols)
+    else:
+        # h5
+        df.to_hdf(args.output_file, 'df', mode='w')
+    print("Saved to {} in {:.3f} s.".format(args.output_file,
+                                            time.time() - t))
+
+
+if __name__ == "__main__":
+    import sys
+    main(sys.argv)
diff --git a/python/draw_net.py b/python/draw_net.py
new file mode 100755
index 0000000..ec76a74
--- /dev/null
+++ b/python/draw_net.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+"""
+Draw a graph of the net architecture.
+"""
+from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
+from google.protobuf import text_format
+
+import caffe
+import caffe.draw
+from caffe.proto import caffe_pb2
+
+
+def parse_args():
+    """Parse input arguments
+    """
+
+    parser = ArgumentParser(description=__doc__,
+                            formatter_class=ArgumentDefaultsHelpFormatter)
+
+    parser.add_argument('input_net_proto_file',
+                        help='Input network prototxt file')
+    parser.add_argument('output_image_file',
+                        help='Output image file')
+    parser.add_argument('--rankdir',
+                        help=('One of TB (top-bottom, i.e., vertical), '
+                              'RL (right-left, i.e., horizontal), or another '
+                              'valid dot option; see '
+                              'http://www.graphviz.org/doc/info/'
+                              'attrs.html#k:rankdir'),
+                        default='LR')
+
+    args = parser.parse_args()
+    return args
+
+
+def main():
+    args = parse_args()
+    net = caffe_pb2.NetParameter()
+    text_format.Merge(open(args.input_net_proto_file).read(), net)
+    print('Drawing net to %s' % args.output_image_file)
+    caffe.draw.draw_net_to_file(net, args.output_image_file, args.rankdir)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/python/requirements.txt b/python/requirements.txt
new file mode 100644
index 0000000..e7d89e6
--- /dev/null
+++ b/python/requirements.txt
@@ -0,0 +1,17 @@
+Cython>=0.19.2
+numpy>=1.7.1
+scipy>=0.13.2
+scikit-image>=0.9.3
+matplotlib>=1.3.1
+ipython>=3.0.0
+h5py>=2.2.0
+leveldb>=0.191
+networkx>=1.8.1
+nose>=1.3.0
+pandas>=0.12.0
+python-dateutil>=1.4,<2
+protobuf>=2.5.0
+python-gflags>=2.0
+pyyaml>=3.10
+Pillow>=2.3.0
+six>=1.1.0
\ No newline at end of file
diff --git a/scripts/build_docs.sh b/scripts/build_docs.sh
new file mode 100755
index 0000000..0e28bd7
--- /dev/null
+++ b/scripts/build_docs.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+# Build documentation for display in web browser.
+
+PORT=${1:-4000}
+
+echo "usage: build_docs.sh [port]"
+
+# Find the docs dir, no matter where the script is called
+ROOT_DIR="$( cd "$(dirname "$0")"/.. ; pwd -P )"
+cd $ROOT_DIR
+
+# Gather docs.
+scripts/gather_examples.sh
+
+# Generate developer docs.
+make docs
+
+# Display docs using web server.
+cd docs
+jekyll serve -w -s . -d _site --port=$PORT
diff --git a/scripts/copy_notebook.py b/scripts/copy_notebook.py
new file mode 100755
index 0000000..e4c6385
--- /dev/null
+++ b/scripts/copy_notebook.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+"""
+Takes as arguments:
+1. the path to a JSON file (such as an IPython notebook).
+2. the path to output file
+
+If 'metadata' dict in the JSON file contains 'include_in_docs': true,
+then copies the file to output file, appending the 'metadata' property
+as YAML front-matter, adding the field 'category' with value 'notebook'.
+"""
+import os
+import sys
+import json
+
+filename = sys.argv[1]
+output_filename = sys.argv[2]
+content = json.load(open(filename))
+
+if 'include_in_docs' in content['metadata'] and content['metadata']['include_in_docs']:
+    yaml_frontmatter = ['---']
+    for key, val in content['metadata'].iteritems():
+        if key == 'example_name':
+            key = 'title'
+            if val == '':
+                val = os.path.basename(filename)
+        yaml_frontmatter.append('{}: {}'.format(key, val))
+    yaml_frontmatter += ['category: notebook']
+    yaml_frontmatter += ['original_path: ' + filename]
+
+    with open(output_filename, 'w') as fo:
+        fo.write('\n'.join(yaml_frontmatter + ['---']) + '\n')
+        fo.write(open(filename).read())
diff --git a/scripts/cpp_lint.py b/scripts/cpp_lint.py
new file mode 100755
index 0000000..f750489
--- /dev/null
+++ b/scripts/cpp_lint.py
@@ -0,0 +1,4868 @@
+#!/usr/bin/python2
+#
+# Copyright (c) 2009 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#    * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#    * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#    * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Does google-lint on c++ files.
+
+The goal of this script is to identify places in the code that *may*
+be in non-compliance with google style.  It does not attempt to fix
+up these problems -- the point is to educate.  It does also not
+attempt to find all problems, or to ensure that everything it does
+find is legitimately a problem.
+
+In particular, we can get very confused by /* and // inside strings!
+We do a small hack, which is to ignore //'s with "'s after them on the
+same line, but it is far from perfect (in either direction).
+"""
+
+import codecs
+import copy
+import getopt
+import math  # for log
+import os
+import re
+import sre_compile
+import string
+import sys
+import unicodedata
+
+
+_USAGE = """
+Syntax: cpp_lint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...]
+                   [--counting=total|toplevel|detailed] [--root=subdir]
+                   [--linelength=digits]
+        <file> [file] ...
+
+  The style guidelines this tries to follow are those in
+    http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml
+
+  Every problem is given a confidence score from 1-5, with 5 meaning we are
+  certain of the problem, and 1 meaning it could be a legitimate construct.
+  This will miss some errors, and is not a substitute for a code review.
+
+  To suppress false-positive errors of a certain category, add a
+  'NOLINT(category)' comment to the line.  NOLINT or NOLINT(*)
+  suppresses errors of all categories on that line.
+
+  The files passed in will be linted; at least one file must be provided.
+  Default linted extensions are .cc, .cpp, .cu, .cuh and .h.  Change the
+  extensions with the --extensions flag.
+
+  Flags:
+
+    output=vs7
+      By default, the output is formatted to ease emacs parsing.  Visual Studio
+      compatible output (vs7) may also be used.  Other formats are unsupported.
+
+    verbose=#
+      Specify a number 0-5 to restrict errors to certain verbosity levels.
+
+    filter=-x,+y,...
+      Specify a comma-separated list of category-filters to apply: only
+      error messages whose category names pass the filters will be printed.
+      (Category names are printed with the message and look like
+      "[whitespace/indent]".)  Filters are evaluated left to right.
+      "-FOO" and "FOO" means "do not print categories that start with FOO".
+      "+FOO" means "do print categories that start with FOO".
+
+      Examples: --filter=-whitespace,+whitespace/braces
+                --filter=whitespace,runtime/printf,+runtime/printf_format
+                --filter=-,+build/include_what_you_use
+
+      To see a list of all the categories used in cpplint, pass no arg:
+         --filter=
+
+    counting=total|toplevel|detailed
+      The total number of errors found is always printed. If
+      'toplevel' is provided, then the count of errors in each of
+      the top-level categories like 'build' and 'whitespace' will
+      also be printed. If 'detailed' is provided, then a count
+      is provided for each category like 'build/class'.
+
+    root=subdir
+      The root directory used for deriving header guard CPP variable.
+      By default, the header guard CPP variable is calculated as the relative
+      path to the directory that contains .git, .hg, or .svn.  When this flag
+      is specified, the relative path is calculated from the specified
+      directory. If the specified directory does not exist, this flag is
+      ignored.
+
+      Examples:
+        Assuing that src/.git exists, the header guard CPP variables for
+        src/chrome/browser/ui/browser.h are:
+
+        No flag => CHROME_BROWSER_UI_BROWSER_H_
+        --root=chrome => BROWSER_UI_BROWSER_H_
+        --root=chrome/browser => UI_BROWSER_H_
+
+    linelength=digits
+      This is the allowed line length for the project. The default value is
+      80 characters.
+
+      Examples:
+        --linelength=120
+
+    extensions=extension,extension,...
+      The allowed file extensions that cpplint will check
+
+      Examples:
+        --extensions=hpp,cpp
+"""
+
+# We categorize each error message we print.  Here are the categories.
+# We want an explicit list so we can list them all in cpplint --filter=.
+# If you add a new error message with a new category, add it to the list
+# here!  cpplint_unittest.py should tell you if you forget to do this.
+_ERROR_CATEGORIES = [
+  'build/class',
+  'build/deprecated',
+  'build/endif_comment',
+  'build/explicit_make_pair',
+  'build/forward_decl',
+  'build/header_guard',
+  'build/include',
+  'build/include_alpha',
+  'build/include_dir',
+  'build/include_order',
+  'build/include_what_you_use',
+  'build/namespaces',
+  'build/printf_format',
+  'build/storage_class',
+  'caffe/alt_fn',
+  'caffe/data_layer_setup',
+  'caffe/random_fn',
+  'legal/copyright',
+  'readability/alt_tokens',
+  'readability/braces',
+  'readability/casting',
+  'readability/check',
+  'readability/constructors',
+  'readability/fn_size',
+  'readability/function',
+  'readability/multiline_comment',
+  'readability/multiline_string',
+  'readability/namespace',
+  'readability/nolint',
+  'readability/nul',
+  'readability/streams',
+  'readability/todo',
+  'readability/utf8',
+  'runtime/arrays',
+  'runtime/casting',
+  'runtime/explicit',
+  'runtime/int',
+  'runtime/init',
+  'runtime/invalid_increment',
+  'runtime/member_string_references',
+  'runtime/memset',
+  'runtime/operator',
+  'runtime/printf',
+  'runtime/printf_format',
+  'runtime/references',
+  'runtime/string',
+  'runtime/threadsafe_fn',
+  'runtime/vlog',
+  'whitespace/blank_line',
+  'whitespace/braces',
+  'whitespace/comma',
+  'whitespace/comments',
+  'whitespace/empty_conditional_body',
+  'whitespace/empty_loop_body',
+  'whitespace/end_of_line',
+  'whitespace/ending_newline',
+  'whitespace/forcolon',
+  'whitespace/indent',
+  'whitespace/line_length',
+  'whitespace/newline',
+  'whitespace/operators',
+  'whitespace/parens',
+  'whitespace/semicolon',
+  'whitespace/tab',
+  'whitespace/todo'
+  ]
+
+# The default state of the category filter. This is overrided by the --filter=
+# flag. By default all errors are on, so only add here categories that should be
+# off by default (i.e., categories that must be enabled by the --filter= flags).
+# All entries here should start with a '-' or '+', as in the --filter= flag.
+_DEFAULT_FILTERS = [
+  '-build/include_dir',
+  '-readability/todo',
+  ]
+
+# We used to check for high-bit characters, but after much discussion we
+# decided those were OK, as long as they were in UTF-8 and didn't represent
+# hard-coded international strings, which belong in a separate i18n file.
+
+
+# C++ headers
+_CPP_HEADERS = frozenset([
+    # Legacy
+    'algobase.h',
+    'algo.h',
+    'alloc.h',
+    'builtinbuf.h',
+    'bvector.h',
+    'complex.h',
+    'defalloc.h',
+    'deque.h',
+    'editbuf.h',
+    'fstream.h',
+    'function.h',
+    'hash_map',
+    'hash_map.h',
+    'hash_set',
+    'hash_set.h',
+    'hashtable.h',
+    'heap.h',
+    'indstream.h',
+    'iomanip.h',
+    'iostream.h',
+    'istream.h',
+    'iterator.h',
+    'list.h',
+    'map.h',
+    'multimap.h',
+    'multiset.h',
+    'ostream.h',
+    'pair.h',
+    'parsestream.h',
+    'pfstream.h',
+    'procbuf.h',
+    'pthread_alloc',
+    'pthread_alloc.h',
+    'rope',
+    'rope.h',
+    'ropeimpl.h',
+    'set.h',
+    'slist',
+    'slist.h',
+    'stack.h',
+    'stdiostream.h',
+    'stl_alloc.h',
+    'stl_relops.h',
+    'streambuf.h',
+    'stream.h',
+    'strfile.h',
+    'strstream.h',
+    'tempbuf.h',
+    'tree.h',
+    'type_traits.h',
+    'vector.h',
+    # 17.6.1.2 C++ library headers
+    'algorithm',
+    'array',
+    'atomic',
+    'bitset',
+    'chrono',
+    'codecvt',
+    'complex',
+    'condition_variable',
+    'deque',
+    'exception',
+    'forward_list',
+    'fstream',
+    'functional',
+    'future',
+    'initializer_list',
+    'iomanip',
+    'ios',
+    'iosfwd',
+    'iostream',
+    'istream',
+    'iterator',
+    'limits',
+    'list',
+    'locale',
+    'map',
+    'memory',
+    'mutex',
+    'new',
+    'numeric',
+    'ostream',
+    'queue',
+    'random',
+    'ratio',
+    'regex',
+    'set',
+    'sstream',
+    'stack',
+    'stdexcept',
+    'streambuf',
+    'string',
+    'strstream',
+    'system_error',
+    'thread',
+    'tuple',
+    'typeindex',
+    'typeinfo',
+    'type_traits',
+    'unordered_map',
+    'unordered_set',
+    'utility',
+    'valarray',
+    'vector',
+    # 17.6.1.2 C++ headers for C library facilities
+    'cassert',
+    'ccomplex',
+    'cctype',
+    'cerrno',
+    'cfenv',
+    'cfloat',
+    'cinttypes',
+    'ciso646',
+    'climits',
+    'clocale',
+    'cmath',
+    'csetjmp',
+    'csignal',
+    'cstdalign',
+    'cstdarg',
+    'cstdbool',
+    'cstddef',
+    'cstdint',
+    'cstdio',
+    'cstdlib',
+    'cstring',
+    'ctgmath',
+    'ctime',
+    'cuchar',
+    'cwchar',
+    'cwctype',
+    ])
+
+# Assertion macros.  These are defined in base/logging.h and
+# testing/base/gunit.h.  Note that the _M versions need to come first
+# for substring matching to work.
+_CHECK_MACROS = [
+    'DCHECK', 'CHECK',
+    'EXPECT_TRUE_M', 'EXPECT_TRUE',
+    'ASSERT_TRUE_M', 'ASSERT_TRUE',
+    'EXPECT_FALSE_M', 'EXPECT_FALSE',
+    'ASSERT_FALSE_M', 'ASSERT_FALSE',
+    ]
+
+# Replacement macros for CHECK/DCHECK/EXPECT_TRUE/EXPECT_FALSE
+_CHECK_REPLACEMENT = dict([(m, {}) for m in _CHECK_MACROS])
+
+for op, replacement in [('==', 'EQ'), ('!=', 'NE'),
+                        ('>=', 'GE'), ('>', 'GT'),
+                        ('<=', 'LE'), ('<', 'LT')]:
+  _CHECK_REPLACEMENT['DCHECK'][op] = 'DCHECK_%s' % replacement
+  _CHECK_REPLACEMENT['CHECK'][op] = 'CHECK_%s' % replacement
+  _CHECK_REPLACEMENT['EXPECT_TRUE'][op] = 'EXPECT_%s' % replacement
+  _CHECK_REPLACEMENT['ASSERT_TRUE'][op] = 'ASSERT_%s' % replacement
+  _CHECK_REPLACEMENT['EXPECT_TRUE_M'][op] = 'EXPECT_%s_M' % replacement
+  _CHECK_REPLACEMENT['ASSERT_TRUE_M'][op] = 'ASSERT_%s_M' % replacement
+
+for op, inv_replacement in [('==', 'NE'), ('!=', 'EQ'),
+                            ('>=', 'LT'), ('>', 'LE'),
+                            ('<=', 'GT'), ('<', 'GE')]:
+  _CHECK_REPLACEMENT['EXPECT_FALSE'][op] = 'EXPECT_%s' % inv_replacement
+  _CHECK_REPLACEMENT['ASSERT_FALSE'][op] = 'ASSERT_%s' % inv_replacement
+  _CHECK_REPLACEMENT['EXPECT_FALSE_M'][op] = 'EXPECT_%s_M' % inv_replacement
+  _CHECK_REPLACEMENT['ASSERT_FALSE_M'][op] = 'ASSERT_%s_M' % inv_replacement
+
+# Alternative tokens and their replacements.  For full list, see section 2.5
+# Alternative tokens [lex.digraph] in the C++ standard.
+#
+# Digraphs (such as '%:') are not included here since it's a mess to
+# match those on a word boundary.
+_ALT_TOKEN_REPLACEMENT = {
+    'and': '&&',
+    'bitor': '|',
+    'or': '||',
+    'xor': '^',
+    'compl': '~',
+    'bitand': '&',
+    'and_eq': '&=',
+    'or_eq': '|=',
+    'xor_eq': '^=',
+    'not': '!',
+    'not_eq': '!='
+    }
+
+# Compile regular expression that matches all the above keywords.  The "[ =()]"
+# bit is meant to avoid matching these keywords outside of boolean expressions.
+#
+# False positives include C-style multi-line comments and multi-line strings
+# but those have always been troublesome for cpplint.
+_ALT_TOKEN_REPLACEMENT_PATTERN = re.compile(
+    r'[ =()](' + ('|'.join(_ALT_TOKEN_REPLACEMENT.keys())) + r')(?=[ (]|$)')
+
+
+# These constants define types of headers for use with
+# _IncludeState.CheckNextIncludeOrder().
+_C_SYS_HEADER = 1
+_CPP_SYS_HEADER = 2
+_LIKELY_MY_HEADER = 3
+_POSSIBLE_MY_HEADER = 4
+_OTHER_HEADER = 5
+
+# These constants define the current inline assembly state
+_NO_ASM = 0       # Outside of inline assembly block
+_INSIDE_ASM = 1   # Inside inline assembly block
+_END_ASM = 2      # Last line of inline assembly block
+_BLOCK_ASM = 3    # The whole block is an inline assembly block
+
+# Match start of assembly blocks
+_MATCH_ASM = re.compile(r'^\s*(?:asm|_asm|__asm|__asm__)'
+                        r'(?:\s+(volatile|__volatile__))?'
+                        r'\s*[{(]')
+
+
+_regexp_compile_cache = {}
+
+# Finds occurrences of NOLINT[_NEXT_LINE] or NOLINT[_NEXT_LINE](...).
+_RE_SUPPRESSION = re.compile(r'\bNOLINT(_NEXT_LINE)?\b(\([^)]*\))?')
+
+# {str, set(int)}: a map from error categories to sets of linenumbers
+# on which those errors are expected and should be suppressed.
+_error_suppressions = {}
+
+# Finds Copyright.
+_RE_COPYRIGHT = re.compile(r'Copyright')
+
+# The root directory used for deriving header guard CPP variable.
+# This is set by --root flag.
+_root = None
+
+# The allowed line length of files.
+# This is set by --linelength flag.
+_line_length = 80
+
+# The allowed extensions for file names
+# This is set by --extensions flag.
+_valid_extensions = set(['cc', 'h', 'cpp', 'hpp', 'cu', 'cuh'])
+
+def ParseNolintSuppressions(filename, raw_line, linenum, error):
+  """Updates the global list of error-suppressions.
+
+  Parses any NOLINT comments on the current line, updating the global
+  error_suppressions store.  Reports an error if the NOLINT comment
+  was malformed.
+
+  Args:
+    filename: str, the name of the input file.
+    raw_line: str, the line of input text, with comments.
+    linenum: int, the number of the current line.
+    error: function, an error handler.
+  """
+  # FIXME(adonovan): "NOLINT(" is misparsed as NOLINT(*).
+  matched = _RE_SUPPRESSION.search(raw_line)
+  if matched:
+    if matched.group(1) == '_NEXT_LINE':
+      linenum += 1
+    category = matched.group(2)
+    if category in (None, '(*)'):  # => "suppress all"
+      _error_suppressions.setdefault(None, set()).add(linenum)
+    else:
+      if category.startswith('(') and category.endswith(')'):
+        category = category[1:-1]
+        if category in _ERROR_CATEGORIES:
+          _error_suppressions.setdefault(category, set()).add(linenum)
+        else:
+          error(filename, linenum, 'readability/nolint', 5,
+                'Unknown NOLINT error category: %s' % category)
+
+
+def ResetNolintSuppressions():
+  "Resets the set of NOLINT suppressions to empty."
+  _error_suppressions.clear()
+
+
+def IsErrorSuppressedByNolint(category, linenum):
+  """Returns true if the specified error category is suppressed on this line.
+
+  Consults the global error_suppressions map populated by
+  ParseNolintSuppressions/ResetNolintSuppressions.
+
+  Args:
+    category: str, the category of the error.
+    linenum: int, the current line number.
+  Returns:
+    bool, True iff the error should be suppressed due to a NOLINT comment.
+  """
+  return (linenum in _error_suppressions.get(category, set()) or
+          linenum in _error_suppressions.get(None, set()))
+
+def Match(pattern, s):
+  """Matches the string with the pattern, caching the compiled regexp."""
+  # The regexp compilation caching is inlined in both Match and Search for
+  # performance reasons; factoring it out into a separate function turns out
+  # to be noticeably expensive.
+  if pattern not in _regexp_compile_cache:
+    _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
+  return _regexp_compile_cache[pattern].match(s)
+
+
+def ReplaceAll(pattern, rep, s):
+  """Replaces instances of pattern in a string with a replacement.
+
+  The compiled regex is kept in a cache shared by Match and Search.
+
+  Args:
+    pattern: regex pattern
+    rep: replacement text
+    s: search string
+
+  Returns:
+    string with replacements made (or original string if no replacements)
+  """
+  if pattern not in _regexp_compile_cache:
+    _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
+  return _regexp_compile_cache[pattern].sub(rep, s)
+
+
+def Search(pattern, s):
+  """Searches the string for the pattern, caching the compiled regexp."""
+  if pattern not in _regexp_compile_cache:
+    _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
+  return _regexp_compile_cache[pattern].search(s)
+
+
+class _IncludeState(dict):
+  """Tracks line numbers for includes, and the order in which includes appear.
+
+  As a dict, an _IncludeState object serves as a mapping between include
+  filename and line number on which that file was included.
+
+  Call CheckNextIncludeOrder() once for each header in the file, passing
+  in the type constants defined above. Calls in an illegal order will
+  raise an _IncludeError with an appropriate error message.
+
+  """
+  # self._section will move monotonically through this set. If it ever
+  # needs to move backwards, CheckNextIncludeOrder will raise an error.
+  _INITIAL_SECTION = 0
+  _MY_H_SECTION = 1
+  _C_SECTION = 2
+  _CPP_SECTION = 3
+  _OTHER_H_SECTION = 4
+
+  _TYPE_NAMES = {
+      _C_SYS_HEADER: 'C system header',
+      _CPP_SYS_HEADER: 'C++ system header',
+      _LIKELY_MY_HEADER: 'header this file implements',
+      _POSSIBLE_MY_HEADER: 'header this file may implement',
+      _OTHER_HEADER: 'other header',
+      }
+  _SECTION_NAMES = {
+      _INITIAL_SECTION: "... nothing. (This can't be an error.)",
+      _MY_H_SECTION: 'a header this file implements',
+      _C_SECTION: 'C system header',
+      _CPP_SECTION: 'C++ system header',
+      _OTHER_H_SECTION: 'other header',
+      }
+
+  def __init__(self):
+    dict.__init__(self)
+    self.ResetSection()
+
+  def ResetSection(self):
+    # The name of the current section.
+    self._section = self._INITIAL_SECTION
+    # The path of last found header.
+    self._last_header = ''
+
+  def SetLastHeader(self, header_path):
+    self._last_header = header_path
+
+  def CanonicalizeAlphabeticalOrder(self, header_path):
+    """Returns a path canonicalized for alphabetical comparison.
+
+    - replaces "-" with "_" so they both cmp the same.
+    - removes '-inl' since we don't require them to be after the main header.
+    - lowercase everything, just in case.
+
+    Args:
+      header_path: Path to be canonicalized.
+
+    Returns:
+      Canonicalized path.
+    """
+    return header_path.replace('-inl.h', '.h').replace('-', '_').lower()
+
+  def IsInAlphabeticalOrder(self, clean_lines, linenum, header_path):
+    """Check if a header is in alphabetical order with the previous header.
+
+    Args:
+      clean_lines: A CleansedLines instance containing the file.
+      linenum: The number of the line to check.
+      header_path: Canonicalized header to be checked.
+
+    Returns:
+      Returns true if the header is in alphabetical order.
+    """
+    # If previous section is different from current section, _last_header will
+    # be reset to empty string, so it's always less than current header.
+    #
+    # If previous line was a blank line, assume that the headers are
+    # intentionally sorted the way they are.
+    if (self._last_header > header_path and
+        not Match(r'^\s*$', clean_lines.elided[linenum - 1])):
+      return False
+    return True
+
+  def CheckNextIncludeOrder(self, header_type):
+    """Returns a non-empty error message if the next header is out of order.
+
+    This function also updates the internal state to be ready to check
+    the next include.
+
+    Args:
+      header_type: One of the _XXX_HEADER constants defined above.
+
+    Returns:
+      The empty string if the header is in the right order, or an
+      error message describing what's wrong.
+
+    """
+    error_message = ('Found %s after %s' %
+                     (self._TYPE_NAMES[header_type],
+                      self._SECTION_NAMES[self._section]))
+
+    last_section = self._section
+
+    if header_type == _C_SYS_HEADER:
+      if self._section <= self._C_SECTION:
+        self._section = self._C_SECTION
+      else:
+        self._last_header = ''
+        return error_message
+    elif header_type == _CPP_SYS_HEADER:
+      if self._section <= self._CPP_SECTION:
+        self._section = self._CPP_SECTION
+      else:
+        self._last_header = ''
+        return error_message
+    elif header_type == _LIKELY_MY_HEADER:
+      if self._section <= self._MY_H_SECTION:
+        self._section = self._MY_H_SECTION
+      else:
+        self._section = self._OTHER_H_SECTION
+    elif header_type == _POSSIBLE_MY_HEADER:
+      if self._section <= self._MY_H_SECTION:
+        self._section = self._MY_H_SECTION
+      else:
+        # This will always be the fallback because we're not sure
+        # enough that the header is associated with this file.
+        self._section = self._OTHER_H_SECTION
+    else:
+      assert header_type == _OTHER_HEADER
+      self._section = self._OTHER_H_SECTION
+
+    if last_section != self._section:
+      self._last_header = ''
+
+    return ''
+
+
+class _CppLintState(object):
+  """Maintains module-wide state.."""
+
+  def __init__(self):
+    self.verbose_level = 1  # global setting.
+    self.error_count = 0    # global count of reported errors
+    # filters to apply when emitting error messages
+    self.filters = _DEFAULT_FILTERS[:]
+    self.counting = 'total'  # In what way are we counting errors?
+    self.errors_by_category = {}  # string to int dict storing error counts
+
+    # output format:
+    # "emacs" - format that emacs can parse (default)
+    # "vs7" - format that Microsoft Visual Studio 7 can parse
+    self.output_format = 'emacs'
+
+  def SetOutputFormat(self, output_format):
+    """Sets the output format for errors."""
+    self.output_format = output_format
+
+  def SetVerboseLevel(self, level):
+    """Sets the module's verbosity, and returns the previous setting."""
+    last_verbose_level = self.verbose_level
+    self.verbose_level = level
+    return last_verbose_level
+
+  def SetCountingStyle(self, counting_style):
+    """Sets the module's counting options."""
+    self.counting = counting_style
+
+  def SetFilters(self, filters):
+    """Sets the error-message filters.
+
+    These filters are applied when deciding whether to emit a given
+    error message.
+
+    Args:
+      filters: A string of comma-separated filters (eg "+whitespace/indent").
+               Each filter should start with + or -; else we die.
+
+    Raises:
+      ValueError: The comma-separated filters did not all start with '+' or '-'.
+                  E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter"
+    """
+    # Default filters always have less priority than the flag ones.
+    self.filters = _DEFAULT_FILTERS[:]
+    for filt in filters.split(','):
+      clean_filt = filt.strip()
+      if clean_filt:
+        self.filters.append(clean_filt)
+    for filt in self.filters:
+      if not (filt.startswith('+') or filt.startswith('-')):
+        raise ValueError('Every filter in --filters must start with + or -'
+                         ' (%s does not)' % filt)
+
+  def ResetErrorCounts(self):
+    """Sets the module's error statistic back to zero."""
+    self.error_count = 0
+    self.errors_by_category = {}
+
+  def IncrementErrorCount(self, category):
+    """Bumps the module's error statistic."""
+    self.error_count += 1
+    if self.counting in ('toplevel', 'detailed'):
+      if self.counting != 'detailed':
+        category = category.split('/')[0]
+      if category not in self.errors_by_category:
+        self.errors_by_category[category] = 0
+      self.errors_by_category[category] += 1
+
+  def PrintErrorCounts(self):
+    """Print a summary of errors by category, and the total."""
+    for category, count in self.errors_by_category.iteritems():
+      sys.stderr.write('Category \'%s\' errors found: %d\n' %
+                       (category, count))
+    sys.stderr.write('Total errors found: %d\n' % self.error_count)
+
+_cpplint_state = _CppLintState()
+
+
+def _OutputFormat():
+  """Gets the module's output format."""
+  return _cpplint_state.output_format
+
+
+def _SetOutputFormat(output_format):
+  """Sets the module's output format."""
+  _cpplint_state.SetOutputFormat(output_format)
+
+
+def _VerboseLevel():
+  """Returns the module's verbosity setting."""
+  return _cpplint_state.verbose_level
+
+
+def _SetVerboseLevel(level):
+  """Sets the module's verbosity, and returns the previous setting."""
+  return _cpplint_state.SetVerboseLevel(level)
+
+
+def _SetCountingStyle(level):
+  """Sets the module's counting options."""
+  _cpplint_state.SetCountingStyle(level)
+
+
+def _Filters():
+  """Returns the module's list of output filters, as a list."""
+  return _cpplint_state.filters
+
+
+def _SetFilters(filters):
+  """Sets the module's error-message filters.
+
+  These filters are applied when deciding whether to emit a given
+  error message.
+
+  Args:
+    filters: A string of comma-separated filters (eg "whitespace/indent").
+             Each filter should start with + or -; else we die.
+  """
+  _cpplint_state.SetFilters(filters)
+
+
+class _FunctionState(object):
+  """Tracks current function name and the number of lines in its body."""
+
+  _NORMAL_TRIGGER = 250  # for --v=0, 500 for --v=1, etc.
+  _TEST_TRIGGER = 400    # about 50% more than _NORMAL_TRIGGER.
+
+  def __init__(self):
+    self.in_a_function = False
+    self.lines_in_function = 0
+    self.current_function = ''
+
+  def Begin(self, function_name):
+    """Start analyzing function body.
+
+    Args:
+      function_name: The name of the function being tracked.
+    """
+    self.in_a_function = True
+    self.lines_in_function = 0
+    self.current_function = function_name
+
+  def Count(self):
+    """Count line in current function body."""
+    if self.in_a_function:
+      self.lines_in_function += 1
+
+  def Check(self, error, filename, linenum):
+    """Report if too many lines in function body.
+
+    Args:
+      error: The function to call with any errors found.
+      filename: The name of the current file.
+      linenum: The number of the line to check.
+    """
+    if Match(r'T(EST|est)', self.current_function):
+      base_trigger = self._TEST_TRIGGER
+    else:
+      base_trigger = self._NORMAL_TRIGGER
+    trigger = base_trigger * 2**_VerboseLevel()
+
+    if self.lines_in_function > trigger:
+      error_level = int(math.log(self.lines_in_function / base_trigger, 2))
+      # 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ...
+      if error_level > 5:
+        error_level = 5
+      error(filename, linenum, 'readability/fn_size', error_level,
+            'Small and focused functions are preferred:'
+            ' %s has %d non-comment lines'
+            ' (error triggered by exceeding %d lines).'  % (
+                self.current_function, self.lines_in_function, trigger))
+
+  def End(self):
+    """Stop analyzing function body."""
+    self.in_a_function = False
+
+
+class _IncludeError(Exception):
+  """Indicates a problem with the include order in a file."""
+  pass
+
+
+class FileInfo:
+  """Provides utility functions for filenames.
+
+  FileInfo provides easy access to the components of a file's path
+  relative to the project root.
+  """
+
+  def __init__(self, filename):
+    self._filename = filename
+
+  def FullName(self):
+    """Make Windows paths like Unix."""
+    return os.path.abspath(self._filename).replace('\\', '/')
+
+  def RepositoryName(self):
+    """FullName after removing the local path to the repository.
+
+    If we have a real absolute path name here we can try to do something smart:
+    detecting the root of the checkout and truncating /path/to/checkout from
+    the name so that we get header guards that don't include things like
+    "C:\Documents and Settings\..." or "/home/username/..." in them and thus
+    people on different computers who have checked the source out to different
+    locations won't see bogus errors.
+    """
+    fullname = self.FullName()
+
+    if os.path.exists(fullname):
+      project_dir = os.path.dirname(fullname)
+
+      if os.path.exists(os.path.join(project_dir, ".svn")):
+        # If there's a .svn file in the current directory, we recursively look
+        # up the directory tree for the top of the SVN checkout
+        root_dir = project_dir
+        one_up_dir = os.path.dirname(root_dir)
+        while os.path.exists(os.path.join(one_up_dir, ".svn")):
+          root_dir = os.path.dirname(root_dir)
+          one_up_dir = os.path.dirname(one_up_dir)
+
+        prefix = os.path.commonprefix([root_dir, project_dir])
+        return fullname[len(prefix) + 1:]
+
+      # Not SVN <= 1.6? Try to find a git, hg, or svn top level directory by
+      # searching up from the current path.
+      root_dir = os.path.dirname(fullname)
+      while (root_dir != os.path.dirname(root_dir) and
+             not os.path.exists(os.path.join(root_dir, ".git")) and
+             not os.path.exists(os.path.join(root_dir, ".hg")) and
+             not os.path.exists(os.path.join(root_dir, ".svn"))):
+        root_dir = os.path.dirname(root_dir)
+
+      if (os.path.exists(os.path.join(root_dir, ".git")) or
+          os.path.exists(os.path.join(root_dir, ".hg")) or
+          os.path.exists(os.path.join(root_dir, ".svn"))):
+        prefix = os.path.commonprefix([root_dir, project_dir])
+        return fullname[len(prefix) + 1:]
+
+    # Don't know what to do; header guard warnings may be wrong...
+    return fullname
+
+  def Split(self):
+    """Splits the file into the directory, basename, and extension.
+
+    For 'chrome/browser/browser.cc', Split() would
+    return ('chrome/browser', 'browser', '.cc')
+
+    Returns:
+      A tuple of (directory, basename, extension).
+    """
+
+    googlename = self.RepositoryName()
+    project, rest = os.path.split(googlename)
+    return (project,) + os.path.splitext(rest)
+
+  def BaseName(self):
+    """File base name - text after the final slash, before the final period."""
+    return self.Split()[1]
+
+  def Extension(self):
+    """File extension - text following the final period."""
+    return self.Split()[2]
+
+  def NoExtension(self):
+    """File has no source file extension."""
+    return '/'.join(self.Split()[0:2])
+
+  def IsSource(self):
+    """File has a source file extension."""
+    return self.Extension()[1:] in ('c', 'cc', 'cpp', 'cxx')
+
+
+def _ShouldPrintError(category, confidence, linenum):
+  """If confidence >= verbose, category passes filter and is not suppressed."""
+
+  # There are three ways we might decide not to print an error message:
+  # a "NOLINT(category)" comment appears in the source,
+  # the verbosity level isn't high enough, or the filters filter it out.
+  if IsErrorSuppressedByNolint(category, linenum):
+    return False
+  if confidence < _cpplint_state.verbose_level:
+    return False
+
+  is_filtered = False
+  for one_filter in _Filters():
+    if one_filter.startswith('-'):
+      if category.startswith(one_filter[1:]):
+        is_filtered = True
+    elif one_filter.startswith('+'):
+      if category.startswith(one_filter[1:]):
+        is_filtered = False
+    else:
+      assert False  # should have been checked for in SetFilter.
+  if is_filtered:
+    return False
+
+  return True
+
+
+def Error(filename, linenum, category, confidence, message):
+  """Logs the fact we've found a lint error.
+
+  We log where the error was found, and also our confidence in the error,
+  that is, how certain we are this is a legitimate style regression, and
+  not a misidentification or a use that's sometimes justified.
+
+  False positives can be suppressed by the use of
+  "cpplint(category)"  comments on the offending line.  These are
+  parsed into _error_suppressions.
+
+  Args:
+    filename: The name of the file containing the error.
+    linenum: The number of the line containing the error.
+    category: A string used to describe the "category" this bug
+      falls under: "whitespace", say, or "runtime".  Categories
+      may have a hierarchy separated by slashes: "whitespace/indent".
+    confidence: A number from 1-5 representing a confidence score for
+      the error, with 5 meaning that we are certain of the problem,
+      and 1 meaning that it could be a legitimate construct.
+    message: The error message.
+  """
+  if _ShouldPrintError(category, confidence, linenum):
+    _cpplint_state.IncrementErrorCount(category)
+    if _cpplint_state.output_format == 'vs7':
+      sys.stderr.write('%s(%s):  %s  [%s] [%d]\n' % (
+          filename, linenum, message, category, confidence))
+    elif _cpplint_state.output_format == 'eclipse':
+      sys.stderr.write('%s:%s: warning: %s  [%s] [%d]\n' % (
+          filename, linenum, message, category, confidence))
+    else:
+      sys.stderr.write('%s:%s:  %s  [%s] [%d]\n' % (
+          filename, linenum, message, category, confidence))
+
+
+# Matches standard C++ escape sequences per 2.13.2.3 of the C++ standard.
+_RE_PATTERN_CLEANSE_LINE_ESCAPES = re.compile(
+    r'\\([abfnrtv?"\\\']|\d+|x[0-9a-fA-F]+)')
+# Matches strings.  Escape codes should already be removed by ESCAPES.
+_RE_PATTERN_CLEANSE_LINE_DOUBLE_QUOTES = re.compile(r'"[^"]*"')
+# Matches characters.  Escape codes should already be removed by ESCAPES.
+_RE_PATTERN_CLEANSE_LINE_SINGLE_QUOTES = re.compile(r"'.'")
+# Matches multi-line C++ comments.
+# This RE is a little bit more complicated than one might expect, because we
+# have to take care of space removals tools so we can handle comments inside
+# statements better.
+# The current rule is: We only clear spaces from both sides when we're at the
+# end of the line. Otherwise, we try to remove spaces from the right side,
+# if this doesn't work we try on left side but only if there's a non-character
+# on the right.
+_RE_PATTERN_CLEANSE_LINE_C_COMMENTS = re.compile(
+    r"""(\s*/\*.*\*/\s*$|
+            /\*.*\*/\s+|
+         \s+/\*.*\*/(?=\W)|
+            /\*.*\*/)""", re.VERBOSE)
+
+
+def IsCppString(line):
+  """Does line terminate so, that the next symbol is in string constant.
+
+  This function does not consider single-line nor multi-line comments.
+
+  Args:
+    line: is a partial line of code starting from the 0..n.
+
+  Returns:
+    True, if next character appended to 'line' is inside a
+    string constant.
+  """
+
+  line = line.replace(r'\\', 'XX')  # after this, \\" does not match to \"
+  return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1
+
+
+def CleanseRawStrings(raw_lines):
+  """Removes C++11 raw strings from lines.
+
+    Before:
+      static const char kData[] = R"(
+          multi-line string
+          )";
+
+    After:
+      static const char kData[] = ""
+          (replaced by blank line)
+          "";
+
+  Args:
+    raw_lines: list of raw lines.
+
+  Returns:
+    list of lines with C++11 raw strings replaced by empty strings.
+  """
+
+  delimiter = None
+  lines_without_raw_strings = []
+  for line in raw_lines:
+    if delimiter:
+      # Inside a raw string, look for the end
+      end = line.find(delimiter)
+      if end >= 0:
+        # Found the end of the string, match leading space for this
+        # line and resume copying the original lines, and also insert
+        # a "" on the last line.
+        leading_space = Match(r'^(\s*)\S', line)
+        line = leading_space.group(1) + '""' + line[end + len(delimiter):]
+        delimiter = None
+      else:
+        # Haven't found the end yet, append a blank line.
+        line = ''
+
+    else:
+      # Look for beginning of a raw string.
+      # See 2.14.15 [lex.string] for syntax.
+      matched = Match(r'^(.*)\b(?:R|u8R|uR|UR|LR)"([^\s\\()]*)\((.*)$', line)
+      if matched:
+        delimiter = ')' + matched.group(2) + '"'
+
+        end = matched.group(3).find(delimiter)
+        if end >= 0:
+          # Raw string ended on same line
+          line = (matched.group(1) + '""' +
+                  matched.group(3)[end + len(delimiter):])
+          delimiter = None
+        else:
+          # Start of a multi-line raw string
+          line = matched.group(1) + '""'
+
+    lines_without_raw_strings.append(line)
+
+  # TODO(unknown): if delimiter is not None here, we might want to
+  # emit a warning for unterminated string.
+  return lines_without_raw_strings
+
+
+def FindNextMultiLineCommentStart(lines, lineix):
+  """Find the beginning marker for a multiline comment."""
+  while lineix < len(lines):
+    if lines[lineix].strip().startswith('/*'):
+      # Only return this marker if the comment goes beyond this line
+      if lines[lineix].strip().find('*/', 2) < 0:
+        return lineix
+    lineix += 1
+  return len(lines)
+
+
+def FindNextMultiLineCommentEnd(lines, lineix):
+  """We are inside a comment, find the end marker."""
+  while lineix < len(lines):
+    if lines[lineix].strip().endswith('*/'):
+      return lineix
+    lineix += 1
+  return len(lines)
+
+
+def RemoveMultiLineCommentsFromRange(lines, begin, end):
+  """Clears a range of lines for multi-line comments."""
+  # Having // dummy comments makes the lines non-empty, so we will not get
+  # unnecessary blank line warnings later in the code.
+  for i in range(begin, end):
+    lines[i] = '// dummy'
+
+
+def RemoveMultiLineComments(filename, lines, error):
+  """Removes multiline (c-style) comments from lines."""
+  lineix = 0
+  while lineix < len(lines):
+    lineix_begin = FindNextMultiLineCommentStart(lines, lineix)
+    if lineix_begin >= len(lines):
+      return
+    lineix_end = FindNextMultiLineCommentEnd(lines, lineix_begin)
+    if lineix_end >= len(lines):
+      error(filename, lineix_begin + 1, 'readability/multiline_comment', 5,
+            'Could not find end of multi-line comment')
+      return
+    RemoveMultiLineCommentsFromRange(lines, lineix_begin, lineix_end + 1)
+    lineix = lineix_end + 1
+
+
+def CleanseComments(line):
+  """Removes //-comments and single-line C-style /* */ comments.
+
+  Args:
+    line: A line of C++ source.
+
+  Returns:
+    The line with single-line comments removed.
+  """
+  commentpos = line.find('//')
+  if commentpos != -1 and not IsCppString(line[:commentpos]):
+    line = line[:commentpos].rstrip()
+  # get rid of /* ... */
+  return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line)
+
+
+class CleansedLines(object):
+  """Holds 3 copies of all lines with different preprocessing applied to them.
+
+  1) elided member contains lines without strings and comments,
+  2) lines member contains lines without comments, and
+  3) raw_lines member contains all the lines without processing.
+  All these three members are of <type 'list'>, and of the same length.
+  """
+
+  def __init__(self, lines):
+    self.elided = []
+    self.lines = []
+    self.raw_lines = lines
+    self.num_lines = len(lines)
+    self.lines_without_raw_strings = CleanseRawStrings(lines)
+    for linenum in range(len(self.lines_without_raw_strings)):
+      self.lines.append(CleanseComments(
+          self.lines_without_raw_strings[linenum]))
+      elided = self._CollapseStrings(self.lines_without_raw_strings[linenum])
+      self.elided.append(CleanseComments(elided))
+
+  def NumLines(self):
+    """Returns the number of lines represented."""
+    return self.num_lines
+
+  @staticmethod
+  def _CollapseStrings(elided):
+    """Collapses strings and chars on a line to simple "" or '' blocks.
+
+    We nix strings first so we're not fooled by text like '"http://"'
+
+    Args:
+      elided: The line being processed.
+
+    Returns:
+      The line with collapsed strings.
+    """
+    if not _RE_PATTERN_INCLUDE.match(elided):
+      # Remove escaped characters first to make quote/single quote collapsing
+      # basic.  Things that look like escaped characters shouldn't occur
+      # outside of strings and chars.
+      elided = _RE_PATTERN_CLEANSE_LINE_ESCAPES.sub('', elided)
+      elided = _RE_PATTERN_CLEANSE_LINE_SINGLE_QUOTES.sub("''", elided)
+      elided = _RE_PATTERN_CLEANSE_LINE_DOUBLE_QUOTES.sub('""', elided)
+    return elided
+
+
+def FindEndOfExpressionInLine(line, startpos, depth, startchar, endchar):
+  """Find the position just after the matching endchar.
+
+  Args:
+    line: a CleansedLines line.
+    startpos: start searching at this position.
+    depth: nesting level at startpos.
+    startchar: expression opening character.
+    endchar: expression closing character.
+
+  Returns:
+    On finding matching endchar: (index just after matching endchar, 0)
+    Otherwise: (-1, new depth at end of this line)
+  """
+  for i in xrange(startpos, len(line)):
+    if line[i] == startchar:
+      depth += 1
+    elif line[i] == endchar:
+      depth -= 1
+      if depth == 0:
+        return (i + 1, 0)
+  return (-1, depth)
+
+
+def CloseExpression(clean_lines, linenum, pos):
+  """If input points to ( or { or [ or <, finds the position that closes it.
+
+  If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the
+  linenum/pos that correspond to the closing of the expression.
+
+  Args:
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    pos: A position on the line.
+
+  Returns:
+    A tuple (line, linenum, pos) pointer *past* the closing brace, or
+    (line, len(lines), -1) if we never find a close.  Note we ignore
+    strings and comments when matching; and the line we return is the
+    'cleansed' line at linenum.
+  """
+
+  line = clean_lines.elided[linenum]
+  startchar = line[pos]
+  if startchar not in '({[<':
+    return (line, clean_lines.NumLines(), -1)
+  if startchar == '(': endchar = ')'
+  if startchar == '[': endchar = ']'
+  if startchar == '{': endchar = '}'
+  if startchar == '<': endchar = '>'
+
+  # Check first line
+  (end_pos, num_open) = FindEndOfExpressionInLine(
+      line, pos, 0, startchar, endchar)
+  if end_pos > -1:
+    return (line, linenum, end_pos)
+
+  # Continue scanning forward
+  while linenum < clean_lines.NumLines() - 1:
+    linenum += 1
+    line = clean_lines.elided[linenum]
+    (end_pos, num_open) = FindEndOfExpressionInLine(
+        line, 0, num_open, startchar, endchar)
+    if end_pos > -1:
+      return (line, linenum, end_pos)
+
+  # Did not find endchar before end of file, give up
+  return (line, clean_lines.NumLines(), -1)
+
+
+def FindStartOfExpressionInLine(line, endpos, depth, startchar, endchar):
+  """Find position at the matching startchar.
+
+  This is almost the reverse of FindEndOfExpressionInLine, but note
+  that the input position and returned position differs by 1.
+
+  Args:
+    line: a CleansedLines line.
+    endpos: start searching at this position.
+    depth: nesting level at endpos.
+    startchar: expression opening character.
+    endchar: expression closing character.
+
+  Returns:
+    On finding matching startchar: (index at matching startchar, 0)
+    Otherwise: (-1, new depth at beginning of this line)
+  """
+  for i in xrange(endpos, -1, -1):
+    if line[i] == endchar:
+      depth += 1
+    elif line[i] == startchar:
+      depth -= 1
+      if depth == 0:
+        return (i, 0)
+  return (-1, depth)
+
+
+def ReverseCloseExpression(clean_lines, linenum, pos):
+  """If input points to ) or } or ] or >, finds the position that opens it.
+
+  If lines[linenum][pos] points to a ')' or '}' or ']' or '>', finds the
+  linenum/pos that correspond to the opening of the expression.
+
+  Args:
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    pos: A position on the line.
+
+  Returns:
+    A tuple (line, linenum, pos) pointer *at* the opening brace, or
+    (line, 0, -1) if we never find the matching opening brace.  Note
+    we ignore strings and comments when matching; and the line we
+    return is the 'cleansed' line at linenum.
+  """
+  line = clean_lines.elided[linenum]
+  endchar = line[pos]
+  if endchar not in ')}]>':
+    return (line, 0, -1)
+  if endchar == ')': startchar = '('
+  if endchar == ']': startchar = '['
+  if endchar == '}': startchar = '{'
+  if endchar == '>': startchar = '<'
+
+  # Check last line
+  (start_pos, num_open) = FindStartOfExpressionInLine(
+      line, pos, 0, startchar, endchar)
+  if start_pos > -1:
+    return (line, linenum, start_pos)
+
+  # Continue scanning backward
+  while linenum > 0:
+    linenum -= 1
+    line = clean_lines.elided[linenum]
+    (start_pos, num_open) = FindStartOfExpressionInLine(
+        line, len(line) - 1, num_open, startchar, endchar)
+    if start_pos > -1:
+      return (line, linenum, start_pos)
+
+  # Did not find startchar before beginning of file, give up
+  return (line, 0, -1)
+
+
+def CheckForCopyright(filename, lines, error):
+  """Logs an error if a Copyright message appears at the top of the file."""
+
+  # We'll check up to line 10. Don't forget there's a
+  # dummy line at the front.
+  for line in xrange(1, min(len(lines), 11)):
+    if _RE_COPYRIGHT.search(lines[line], re.I):
+      error(filename, 0, 'legal/copyright', 5,
+            'Copyright message found.  '
+            'You should not include a copyright line.')
+
+
+def GetHeaderGuardCPPVariable(filename):
+  """Returns the CPP variable that should be used as a header guard.
+
+  Args:
+    filename: The name of a C++ header file.
+
+  Returns:
+    The CPP variable that should be used as a header guard in the
+    named file.
+
+  """
+
+  # Restores original filename in case that cpplint is invoked from Emacs's
+  # flymake.
+  filename = re.sub(r'_flymake\.h$', '.h', filename)
+  filename = re.sub(r'/\.flymake/([^/]*)$', r'/\1', filename)
+
+  fileinfo = FileInfo(filename)
+  file_path_from_root = fileinfo.RepositoryName()
+  if _root:
+    file_path_from_root = re.sub('^' + _root + os.sep, '', file_path_from_root)
+  return re.sub(r'[-./\s]', '_', file_path_from_root).upper() + '_'
+
+
+def CheckForHeaderGuard(filename, lines, error):
+  """Checks that the file contains a header guard.
+
+  Logs an error if no #ifndef header guard is present.  For other
+  headers, checks that the full pathname is used.
+
+  Args:
+    filename: The name of the C++ header file.
+    lines: An array of strings, each representing a line of the file.
+    error: The function to call with any errors found.
+  """
+
+  cppvar = GetHeaderGuardCPPVariable(filename)
+
+  ifndef = None
+  ifndef_linenum = 0
+  define = None
+  endif = None
+  endif_linenum = 0
+  for linenum, line in enumerate(lines):
+    linesplit = line.split()
+    if len(linesplit) >= 2:
+      # find the first occurrence of #ifndef and #define, save arg
+      if not ifndef and linesplit[0] == '#ifndef':
+        # set ifndef to the header guard presented on the #ifndef line.
+        ifndef = linesplit[1]
+        ifndef_linenum = linenum
+      if not define and linesplit[0] == '#define':
+        define = linesplit[1]
+    # find the last occurrence of #endif, save entire line
+    if line.startswith('#endif'):
+      endif = line
+      endif_linenum = linenum
+
+  if not ifndef:
+    error(filename, 0, 'build/header_guard', 5,
+          'No #ifndef header guard found, suggested CPP variable is: %s' %
+          cppvar)
+    return
+
+  if not define:
+    error(filename, 0, 'build/header_guard', 5,
+          'No #define header guard found, suggested CPP variable is: %s' %
+          cppvar)
+    return
+
+  # The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__
+  # for backward compatibility.
+  if ifndef != cppvar:
+    error_level = 0
+    if ifndef != cppvar + '_':
+      error_level = 5
+
+    ParseNolintSuppressions(filename, lines[ifndef_linenum], ifndef_linenum,
+                            error)
+    error(filename, ifndef_linenum, 'build/header_guard', error_level,
+          '#ifndef header guard has wrong style, please use: %s' % cppvar)
+
+  if define != ifndef:
+    error(filename, 0, 'build/header_guard', 5,
+          '#ifndef and #define don\'t match, suggested CPP variable is: %s' %
+          cppvar)
+    return
+
+  if endif != ('#endif  // %s' % cppvar):
+    error_level = 0
+    if endif != ('#endif  // %s' % (cppvar + '_')):
+      error_level = 5
+
+    ParseNolintSuppressions(filename, lines[endif_linenum], endif_linenum,
+                            error)
+    error(filename, endif_linenum, 'build/header_guard', error_level,
+          '#endif line should be "#endif  // %s"' % cppvar)
+
+
+def CheckForBadCharacters(filename, lines, error):
+  """Logs an error for each line containing bad characters.
+
+  Two kinds of bad characters:
+
+  1. Unicode replacement characters: These indicate that either the file
+  contained invalid UTF-8 (likely) or Unicode replacement characters (which
+  it shouldn't).  Note that it's possible for this to throw off line
+  numbering if the invalid UTF-8 occurred adjacent to a newline.
+
+  2. NUL bytes.  These are problematic for some tools.
+
+  Args:
+    filename: The name of the current file.
+    lines: An array of strings, each representing a line of the file.
+    error: The function to call with any errors found.
+  """
+  for linenum, line in enumerate(lines):
+    if u'\ufffd' in line:
+      error(filename, linenum, 'readability/utf8', 5,
+            'Line contains invalid UTF-8 (or Unicode replacement character).')
+    if '\0' in line:
+      error(filename, linenum, 'readability/nul', 5, 'Line contains NUL byte.')
+
+
+def CheckForNewlineAtEOF(filename, lines, error):
+  """Logs an error if there is no newline char at the end of the file.
+
+  Args:
+    filename: The name of the current file.
+    lines: An array of strings, each representing a line of the file.
+    error: The function to call with any errors found.
+  """
+
+  # The array lines() was created by adding two newlines to the
+  # original file (go figure), then splitting on \n.
+  # To verify that the file ends in \n, we just have to make sure the
+  # last-but-two element of lines() exists and is empty.
+  if len(lines) < 3 or lines[-2]:
+    error(filename, len(lines) - 2, 'whitespace/ending_newline', 5,
+          'Could not find a newline character at the end of the file.')
+
+
+def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error):
+  """Logs an error if we see /* ... */ or "..." that extend past one line.
+
+  /* ... */ comments are legit inside macros, for one line.
+  Otherwise, we prefer // comments, so it's ok to warn about the
+  other.  Likewise, it's ok for strings to extend across multiple
+  lines, as long as a line continuation character (backslash)
+  terminates each line. Although not currently prohibited by the C++
+  style guide, it's ugly and unnecessary. We don't do well with either
+  in this lint program, so we warn about both.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+  line = clean_lines.elided[linenum]
+
+  # Remove all \\ (escaped backslashes) from the line. They are OK, and the
+  # second (escaped) slash may trigger later \" detection erroneously.
+  line = line.replace('\\\\', '')
+
+  if line.count('/*') > line.count('*/'):
+    error(filename, linenum, 'readability/multiline_comment', 5,
+          'Complex multi-line /*...*/-style comment found. '
+          'Lint may give bogus warnings.  '
+          'Consider replacing these with //-style comments, '
+          'with #if 0...#endif, '
+          'or with more clearly structured multi-line comments.')
+
+  if (line.count('"') - line.count('\\"')) % 2:
+    error(filename, linenum, 'readability/multiline_string', 5,
+          'Multi-line string ("...") found.  This lint script doesn\'t '
+          'do well with such strings, and may give bogus warnings.  '
+          'Use C++11 raw strings or concatenation instead.')
+
+
+caffe_alt_function_list = (
+    ('memset', ['caffe_set', 'caffe_memset']),
+    ('cudaMemset', ['caffe_gpu_set', 'caffe_gpu_memset']),
+    ('memcpy', ['caffe_copy', 'caffe_memcpy']),
+    ('cudaMemcpy', ['caffe_copy', 'caffe_gpu_memcpy']),
+    )
+
+
+def CheckCaffeAlternatives(filename, clean_lines, linenum, error):
+  """Checks for C(++) functions for which a Caffe substitute should be used.
+
+  For certain native C functions (memset, memcpy), there is a Caffe alternative
+  which should be used instead.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+  line = clean_lines.elided[linenum]
+  for function, alts in caffe_alt_function_list:
+    ix = line.find(function + '(')
+    if ix >= 0 and (ix == 0 or (not line[ix - 1].isalnum() and
+                                line[ix - 1] not in ('_', '.', '>'))):
+      disp_alts = ['%s(...)' % alt for alt in alts]
+      error(filename, linenum, 'caffe/alt_fn', 2,
+            'Use Caffe function %s instead of %s(...).' %
+                (' or '.join(disp_alts), function))
+
+
+def CheckCaffeDataLayerSetUp(filename, clean_lines, linenum, error):
+  """Except the base classes, Caffe DataLayer should define DataLayerSetUp
+     instead of LayerSetUp.
+     
+  The base DataLayers define common SetUp steps, the subclasses should
+  not override them.
+  
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+  line = clean_lines.elided[linenum]
+  ix = line.find('DataLayer<Dtype>::LayerSetUp')
+  if ix >= 0 and (
+       line.find('void DataLayer<Dtype>::LayerSetUp') != -1 or
+       line.find('void ImageDataLayer<Dtype>::LayerSetUp') != -1 or
+       line.find('void MemoryDataLayer<Dtype>::LayerSetUp') != -1 or
+       line.find('void WindowDataLayer<Dtype>::LayerSetUp') != -1):
+      error(filename, linenum, 'caffe/data_layer_setup', 2,
+            'Except the base classes, Caffe DataLayer should define'
+            + ' DataLayerSetUp instead of LayerSetUp. The base DataLayers'
+            + ' define common SetUp steps, the subclasses should'
+            + ' not override them.')
+  ix = line.find('DataLayer<Dtype>::DataLayerSetUp')
+  if ix >= 0 and (
+       line.find('void Base') == -1 and
+       line.find('void DataLayer<Dtype>::DataLayerSetUp') == -1 and
+       line.find('void ImageDataLayer<Dtype>::DataLayerSetUp') == -1 and
+       line.find('void MemoryDataLayer<Dtype>::DataLayerSetUp') == -1 and
+       line.find('void WindowDataLayer<Dtype>::DataLayerSetUp') == -1):
+      error(filename, linenum, 'caffe/data_layer_setup', 2,
+            'Except the base classes, Caffe DataLayer should define'
+            + ' DataLayerSetUp instead of LayerSetUp. The base DataLayers'
+            + ' define common SetUp steps, the subclasses should'
+            + ' not override them.')
+
+
+c_random_function_list = (
+    'rand(',
+    'rand_r(',
+    'random(',
+    )
+
+def CheckCaffeRandom(filename, clean_lines, linenum, error):
+  """Checks for calls to C random functions (rand, rand_r, random, ...).
+
+  Caffe code should (almost) always use the caffe_rng_* functions rather
+  than these, as the internal state of these C functions is independent of the
+  native Caffe RNG system which should produce deterministic results for a
+  fixed Caffe seed set using Caffe::set_random_seed(...).
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+  line = clean_lines.elided[linenum]
+  for function in c_random_function_list:
+    ix = line.find(function)
+    # Comparisons made explicit for clarity -- pylint: disable=g-explicit-bool-comparison
+    if ix >= 0 and (ix == 0 or (not line[ix - 1].isalnum() and
+                                line[ix - 1] not in ('_', '.', '>'))):
+      error(filename, linenum, 'caffe/random_fn', 2,
+            'Use caffe_rng_rand() (or other caffe_rng_* function) instead of '
+            + function +
+            ') to ensure results are deterministic for a fixed Caffe seed.')
+
+
+threading_list = (
+    ('asctime(', 'asctime_r('),
+    ('ctime(', 'ctime_r('),
+    ('getgrgid(', 'getgrgid_r('),
+    ('getgrnam(', 'getgrnam_r('),
+    ('getlogin(', 'getlogin_r('),
+    ('getpwnam(', 'getpwnam_r('),
+    ('getpwuid(', 'getpwuid_r('),
+    ('gmtime(', 'gmtime_r('),
+    ('localtime(', 'localtime_r('),
+    ('strtok(', 'strtok_r('),
+    ('ttyname(', 'ttyname_r('),
+    )
+
+
+def CheckPosixThreading(filename, clean_lines, linenum, error):
+  """Checks for calls to thread-unsafe functions.
+
+  Much code has been originally written without consideration of
+  multi-threading. Also, engineers are relying on their old experience;
+  they have learned posix before threading extensions were added. These
+  tests guide the engineers to use thread-safe functions (when using
+  posix directly).
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+  line = clean_lines.elided[linenum]
+  for single_thread_function, multithread_safe_function in threading_list:
+    ix = line.find(single_thread_function)
+    # Comparisons made explicit for clarity -- pylint: disable=g-explicit-bool-comparison
+    if ix >= 0 and (ix == 0 or (not line[ix - 1].isalnum() and
+                                line[ix - 1] not in ('_', '.', '>'))):
+      error(filename, linenum, 'runtime/threadsafe_fn', 2,
+            'Consider using ' + multithread_safe_function +
+            '...) instead of ' + single_thread_function +
+            '...) for improved thread safety.')
+
+
+def CheckVlogArguments(filename, clean_lines, linenum, error):
+  """Checks that VLOG() is only used for defining a logging level.
+
+  For example, VLOG(2) is correct. VLOG(INFO), VLOG(WARNING), VLOG(ERROR), and
+  VLOG(FATAL) are not.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+  line = clean_lines.elided[linenum]
+  if Search(r'\bVLOG\((INFO|ERROR|WARNING|DFATAL|FATAL)\)', line):
+    error(filename, linenum, 'runtime/vlog', 5,
+          'VLOG() should be used with numeric verbosity level.  '
+          'Use LOG() if you want symbolic severity levels.')
+
+
+# Matches invalid increment: *count++, which moves pointer instead of
+# incrementing a value.
+_RE_PATTERN_INVALID_INCREMENT = re.compile(
+    r'^\s*\*\w+(\+\+|--);')
+
+
+def CheckInvalidIncrement(filename, clean_lines, linenum, error):
+  """Checks for invalid increment *count++.
+
+  For example following function:
+  void increment_counter(int* count) {
+    *count++;
+  }
+  is invalid, because it effectively does count++, moving pointer, and should
+  be replaced with ++*count, (*count)++ or *count += 1.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+  line = clean_lines.elided[linenum]
+  if _RE_PATTERN_INVALID_INCREMENT.match(line):
+    error(filename, linenum, 'runtime/invalid_increment', 5,
+          'Changing pointer instead of value (or unused value of operator*).')
+
+
+class _BlockInfo(object):
+  """Stores information about a generic block of code."""
+
+  def __init__(self, seen_open_brace):
+    self.seen_open_brace = seen_open_brace
+    self.open_parentheses = 0
+    self.inline_asm = _NO_ASM
+
+  def CheckBegin(self, filename, clean_lines, linenum, error):
+    """Run checks that applies to text up to the opening brace.
+
+    This is mostly for checking the text after the class identifier
+    and the "{", usually where the base class is specified.  For other
+    blocks, there isn't much to check, so we always pass.
+
+    Args:
+      filename: The name of the current file.
+      clean_lines: A CleansedLines instance containing the file.
+      linenum: The number of the line to check.
+      error: The function to call with any errors found.
+    """
+    pass
+
+  def CheckEnd(self, filename, clean_lines, linenum, error):
+    """Run checks that applies to text after the closing brace.
+
+    This is mostly used for checking end of namespace comments.
+
+    Args:
+      filename: The name of the current file.
+      clean_lines: A CleansedLines instance containing the file.
+      linenum: The number of the line to check.
+      error: The function to call with any errors found.
+    """
+    pass
+
+
+class _ClassInfo(_BlockInfo):
+  """Stores information about a class."""
+
+  def __init__(self, name, class_or_struct, clean_lines, linenum):
+    _BlockInfo.__init__(self, False)
+    self.name = name
+    self.starting_linenum = linenum
+    self.is_derived = False
+    if class_or_struct == 'struct':
+      self.access = 'public'
+      self.is_struct = True
+    else:
+      self.access = 'private'
+      self.is_struct = False
+
+    # Remember initial indentation level for this class.  Using raw_lines here
+    # instead of elided to account for leading comments.
+    initial_indent = Match(r'^( *)\S', clean_lines.raw_lines[linenum])
+    if initial_indent:
+      self.class_indent = len(initial_indent.group(1))
+    else:
+      self.class_indent = 0
+
+    # Try to find the end of the class.  This will be confused by things like:
+    #   class A {
+    #   } *x = { ...
+    #
+    # But it's still good enough for CheckSectionSpacing.
+    self.last_line = 0
+    depth = 0
+    for i in range(linenum, clean_lines.NumLines()):
+      line = clean_lines.elided[i]
+      depth += line.count('{') - line.count('}')
+      if not depth:
+        self.last_line = i
+        break
+
+  def CheckBegin(self, filename, clean_lines, linenum, error):
+    # Look for a bare ':'
+    if Search('(^|[^:]):($|[^:])', clean_lines.elided[linenum]):
+      self.is_derived = True
+
+  def CheckEnd(self, filename, clean_lines, linenum, error):
+    # Check that closing brace is aligned with beginning of the class.
+    # Only do this if the closing brace is indented by only whitespaces.
+    # This means we will not check single-line class definitions.
+    indent = Match(r'^( *)\}', clean_lines.elided[linenum])
+    if indent and len(indent.group(1)) != self.class_indent:
+      if self.is_struct:
+        parent = 'struct ' + self.name
+      else:
+        parent = 'class ' + self.name
+      error(filename, linenum, 'whitespace/indent', 3,
+            'Closing brace should be aligned with beginning of %s' % parent)
+
+
+class _NamespaceInfo(_BlockInfo):
+  """Stores information about a namespace."""
+
+  def __init__(self, name, linenum):
+    _BlockInfo.__init__(self, False)
+    self.name = name or ''
+    self.starting_linenum = linenum
+
+  def CheckEnd(self, filename, clean_lines, linenum, error):
+    """Check end of namespace comments."""
+    line = clean_lines.raw_lines[linenum]
+
+    # Check how many lines is enclosed in this namespace.  Don't issue
+    # warning for missing namespace comments if there aren't enough
+    # lines.  However, do apply checks if there is already an end of
+    # namespace comment and it's incorrect.
+    #
+    # TODO(unknown): We always want to check end of namespace comments
+    # if a namespace is large, but sometimes we also want to apply the
+    # check if a short namespace contained nontrivial things (something
+    # other than forward declarations).  There is currently no logic on
+    # deciding what these nontrivial things are, so this check is
+    # triggered by namespace size only, which works most of the time.
+    if (linenum - self.starting_linenum < 10
+        and not Match(r'};*\s*(//|/\*).*\bnamespace\b', line)):
+      return
+
+    # Look for matching comment at end of namespace.
+    #
+    # Note that we accept C style "/* */" comments for terminating
+    # namespaces, so that code that terminate namespaces inside
+    # preprocessor macros can be cpplint clean.
+    #
+    # We also accept stuff like "// end of namespace <name>." with the
+    # period at the end.
+    #
+    # Besides these, we don't accept anything else, otherwise we might
+    # get false negatives when existing comment is a substring of the
+    # expected namespace.
+    if self.name:
+      # Named namespace
+      if not Match((r'};*\s*(//|/\*).*\bnamespace\s+' + re.escape(self.name) +
+                    r'[\*/\.\\\s]*$'),
+                   line):
+        error(filename, linenum, 'readability/namespace', 5,
+              'Namespace should be terminated with "// namespace %s"' %
+              self.name)
+    else:
+      # Anonymous namespace
+      if not Match(r'};*\s*(//|/\*).*\bnamespace[\*/\.\\\s]*$', line):
+        error(filename, linenum, 'readability/namespace', 5,
+              'Namespace should be terminated with "// namespace"')
+
+
+class _PreprocessorInfo(object):
+  """Stores checkpoints of nesting stacks when #if/#else is seen."""
+
+  def __init__(self, stack_before_if):
+    # The entire nesting stack before #if
+    self.stack_before_if = stack_before_if
+
+    # The entire nesting stack up to #else
+    self.stack_before_else = []
+
+    # Whether we have already seen #else or #elif
+    self.seen_else = False
+
+
+class _NestingState(object):
+  """Holds states related to parsing braces."""
+
+  def __init__(self):
+    # Stack for tracking all braces.  An object is pushed whenever we
+    # see a "{", and popped when we see a "}".  Only 3 types of
+    # objects are possible:
+    # - _ClassInfo: a class or struct.
+    # - _NamespaceInfo: a namespace.
+    # - _BlockInfo: some other type of block.
+    self.stack = []
+
+    # Stack of _PreprocessorInfo objects.
+    self.pp_stack = []
+
+  def SeenOpenBrace(self):
+    """Check if we have seen the opening brace for the innermost block.
+
+    Returns:
+      True if we have seen the opening brace, False if the innermost
+      block is still expecting an opening brace.
+    """
+    return (not self.stack) or self.stack[-1].seen_open_brace
+
+  def InNamespaceBody(self):
+    """Check if we are currently one level inside a namespace body.
+
+    Returns:
+      True if top of the stack is a namespace block, False otherwise.
+    """
+    return self.stack and isinstance(self.stack[-1], _NamespaceInfo)
+
+  def UpdatePreprocessor(self, line):
+    """Update preprocessor stack.
+
+    We need to handle preprocessors due to classes like this:
+      #ifdef SWIG
+      struct ResultDetailsPageElementExtensionPoint {
+      #else
+      struct ResultDetailsPageElementExtensionPoint : public Extension {
+      #endif
+
+    We make the following assumptions (good enough for most files):
+    - Preprocessor condition evaluates to true from #if up to first
+      #else/#elif/#endif.
+
+    - Preprocessor condition evaluates to false from #else/#elif up
+      to #endif.  We still perform lint checks on these lines, but
+      these do not affect nesting stack.
+
+    Args:
+      line: current line to check.
+    """
+    if Match(r'^\s*#\s*(if|ifdef|ifndef)\b', line):
+      # Beginning of #if block, save the nesting stack here.  The saved
+      # stack will allow us to restore the parsing state in the #else case.
+      self.pp_stack.append(_PreprocessorInfo(copy.deepcopy(self.stack)))
+    elif Match(r'^\s*#\s*(else|elif)\b', line):
+      # Beginning of #else block
+      if self.pp_stack:
+        if not self.pp_stack[-1].seen_else:
+          # This is the first #else or #elif block.  Remember the
+          # whole nesting stack up to this point.  This is what we
+          # keep after the #endif.
+          self.pp_stack[-1].seen_else = True
+          self.pp_stack[-1].stack_before_else = copy.deepcopy(self.stack)
+
+        # Restore the stack to how it was before the #if
+        self.stack = copy.deepcopy(self.pp_stack[-1].stack_before_if)
+      else:
+        # TODO(unknown): unexpected #else, issue warning?
+        pass
+    elif Match(r'^\s*#\s*endif\b', line):
+      # End of #if or #else blocks.
+      if self.pp_stack:
+        # If we saw an #else, we will need to restore the nesting
+        # stack to its former state before the #else, otherwise we
+        # will just continue from where we left off.
+        if self.pp_stack[-1].seen_else:
+          # Here we can just use a shallow copy since we are the last
+          # reference to it.
+          self.stack = self.pp_stack[-1].stack_before_else
+        # Drop the corresponding #if
+        self.pp_stack.pop()
+      else:
+        # TODO(unknown): unexpected #endif, issue warning?
+        pass
+
+  def Update(self, filename, clean_lines, linenum, error):
+    """Update nesting state with current line.
+
+    Args:
+      filename: The name of the current file.
+      clean_lines: A CleansedLines instance containing the file.
+      linenum: The number of the line to check.
+      error: The function to call with any errors found.
+    """
+    line = clean_lines.elided[linenum]
+
+    # Update pp_stack first
+    self.UpdatePreprocessor(line)
+
+    # Count parentheses.  This is to avoid adding struct arguments to
+    # the nesting stack.
+    if self.stack:
+      inner_block = self.stack[-1]
+      depth_change = line.count('(') - line.count(')')
+      inner_block.open_parentheses += depth_change
+
+      # Also check if we are starting or ending an inline assembly block.
+      if inner_block.inline_asm in (_NO_ASM, _END_ASM):
+        if (depth_change != 0 and
+            inner_block.open_parentheses == 1 and
+            _MATCH_ASM.match(line)):
+          # Enter assembly block
+          inner_block.inline_asm = _INSIDE_ASM
+        else:
+          # Not entering assembly block.  If previous line was _END_ASM,
+          # we will now shift to _NO_ASM state.
+          inner_block.inline_asm = _NO_ASM
+      elif (inner_block.inline_asm == _INSIDE_ASM and
+            inner_block.open_parentheses == 0):
+        # Exit assembly block
+        inner_block.inline_asm = _END_ASM
+
+    # Consume namespace declaration at the beginning of the line.  Do
+    # this in a loop so that we catch same line declarations like this:
+    #   namespace proto2 { namespace bridge { class MessageSet; } }
+    while True:
+      # Match start of namespace.  The "\b\s*" below catches namespace
+      # declarations even if it weren't followed by a whitespace, this
+      # is so that we don't confuse our namespace checker.  The
+      # missing spaces will be flagged by CheckSpacing.
+      namespace_decl_match = Match(r'^\s*namespace\b\s*([:\w]+)?(.*)$', line)
+      if not namespace_decl_match:
+        break
+
+      new_namespace = _NamespaceInfo(namespace_decl_match.group(1), linenum)
+      self.stack.append(new_namespace)
+
+      line = namespace_decl_match.group(2)
+      if line.find('{') != -1:
+        new_namespace.seen_open_brace = True
+        line = line[line.find('{') + 1:]
+
+    # Look for a class declaration in whatever is left of the line
+    # after parsing namespaces.  The regexp accounts for decorated classes
+    # such as in:
+    #   class LOCKABLE API Object {
+    #   };
+    #
+    # Templates with class arguments may confuse the parser, for example:
+    #   template <class T
+    #             class Comparator = less<T>,
+    #             class Vector = vector<T> >
+    #   class HeapQueue {
+    #
+    # Because this parser has no nesting state about templates, by the
+    # time it saw "class Comparator", it may think that it's a new class.
+    # Nested templates have a similar problem:
+    #   template <
+    #       typename ExportedType,
+    #       typename TupleType,
+    #       template <typename, typename> class ImplTemplate>
+    #
+    # To avoid these cases, we ignore classes that are followed by '=' or '>'
+    class_decl_match = Match(
+        r'\s*(template\s*<[\w\s<>,:]*>\s*)?'
+        r'(class|struct)\s+([A-Z_]+\s+)*(\w+(?:::\w+)*)'
+        r'(([^=>]|<[^<>]*>|<[^<>]*<[^<>]*>\s*>)*)$', line)
+    if (class_decl_match and
+        (not self.stack or self.stack[-1].open_parentheses == 0)):
+      self.stack.append(_ClassInfo(
+          class_decl_match.group(4), class_decl_match.group(2),
+          clean_lines, linenum))
+      line = class_decl_match.group(5)
+
+    # If we have not yet seen the opening brace for the innermost block,
+    # run checks here.
+    if not self.SeenOpenBrace():
+      self.stack[-1].CheckBegin(filename, clean_lines, linenum, error)
+
+    # Update access control if we are inside a class/struct
+    if self.stack and isinstance(self.stack[-1], _ClassInfo):
+      classinfo = self.stack[-1]
+      access_match = Match(
+          r'^(.*)\b(public|private|protected|signals)(\s+(?:slots\s*)?)?'
+          r':(?:[^:]|$)',
+          line)
+      if access_match:
+        classinfo.access = access_match.group(2)
+
+        # Check that access keywords are indented +1 space.  Skip this
+        # check if the keywords are not preceded by whitespaces.
+        indent = access_match.group(1)
+        if (len(indent) != classinfo.class_indent + 1 and
+            Match(r'^\s*$', indent)):
+          if classinfo.is_struct:
+            parent = 'struct ' + classinfo.name
+          else:
+            parent = 'class ' + classinfo.name
+          slots = ''
+          if access_match.group(3):
+            slots = access_match.group(3)
+          error(filename, linenum, 'whitespace/indent', 3,
+                '%s%s: should be indented +1 space inside %s' % (
+                    access_match.group(2), slots, parent))
+
+    # Consume braces or semicolons from what's left of the line
+    while True:
+      # Match first brace, semicolon, or closed parenthesis.
+      matched = Match(r'^[^{;)}]*([{;)}])(.*)$', line)
+      if not matched:
+        break
+
+      token = matched.group(1)
+      if token == '{':
+        # If namespace or class hasn't seen a opening brace yet, mark
+        # namespace/class head as complete.  Push a new block onto the
+        # stack otherwise.
+        if not self.SeenOpenBrace():
+          self.stack[-1].seen_open_brace = True
+        else:
+          self.stack.append(_BlockInfo(True))
+          if _MATCH_ASM.match(line):
+            self.stack[-1].inline_asm = _BLOCK_ASM
+      elif token == ';' or token == ')':
+        # If we haven't seen an opening brace yet, but we already saw
+        # a semicolon, this is probably a forward declaration.  Pop
+        # the stack for these.
+        #
+        # Similarly, if we haven't seen an opening brace yet, but we
+        # already saw a closing parenthesis, then these are probably
+        # function arguments with extra "class" or "struct" keywords.
+        # Also pop these stack for these.
+        if not self.SeenOpenBrace():
+          self.stack.pop()
+      else:  # token == '}'
+        # Perform end of block checks and pop the stack.
+        if self.stack:
+          self.stack[-1].CheckEnd(filename, clean_lines, linenum, error)
+          self.stack.pop()
+      line = matched.group(2)
+
+  def InnermostClass(self):
+    """Get class info on the top of the stack.
+
+    Returns:
+      A _ClassInfo object if we are inside a class, or None otherwise.
+    """
+    for i in range(len(self.stack), 0, -1):
+      classinfo = self.stack[i - 1]
+      if isinstance(classinfo, _ClassInfo):
+        return classinfo
+    return None
+
+  def CheckCompletedBlocks(self, filename, error):
+    """Checks that all classes and namespaces have been completely parsed.
+
+    Call this when all lines in a file have been processed.
+    Args:
+      filename: The name of the current file.
+      error: The function to call with any errors found.
+    """
+    # Note: This test can result in false positives if #ifdef constructs
+    # get in the way of brace matching. See the testBuildClass test in
+    # cpplint_unittest.py for an example of this.
+    for obj in self.stack:
+      if isinstance(obj, _ClassInfo):
+        error(filename, obj.starting_linenum, 'build/class', 5,
+              'Failed to find complete declaration of class %s' %
+              obj.name)
+      elif isinstance(obj, _NamespaceInfo):
+        error(filename, obj.starting_linenum, 'build/namespaces', 5,
+              'Failed to find complete declaration of namespace %s' %
+              obj.name)
+
+
+def CheckForNonStandardConstructs(filename, clean_lines, linenum,
+                                  nesting_state, error):
+  r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2.
+
+  Complain about several constructs which gcc-2 accepts, but which are
+  not standard C++.  Warning about these in lint is one way to ease the
+  transition to new compilers.
+  - put storage class first (e.g. "static const" instead of "const static").
+  - "%lld" instead of %qd" in printf-type functions.
+  - "%1$d" is non-standard in printf-type functions.
+  - "\%" is an undefined character escape sequence.
+  - text after #endif is not allowed.
+  - invalid inner-style forward declaration.
+  - >? and <? operators, and their >?= and <?= cousins.
+
+  Additionally, check for constructor/destructor style violations and reference
+  members, as it is very convenient to do so while checking for
+  gcc-2 compliance.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    nesting_state: A _NestingState instance which maintains information about
+                   the current stack of nested blocks being parsed.
+    error: A callable to which errors are reported, which takes 4 arguments:
+           filename, line number, error level, and message
+  """
+
+  # Remove comments from the line, but leave in strings for now.
+  line = clean_lines.lines[linenum]
+
+  if Search(r'printf\s*\(.*".*%[-+ ]?\d*q', line):
+    error(filename, linenum, 'runtime/printf_format', 3,
+          '%q in format strings is deprecated.  Use %ll instead.')
+
+  if Search(r'printf\s*\(.*".*%\d+\$', line):
+    error(filename, linenum, 'runtime/printf_format', 2,
+          '%N$ formats are unconventional.  Try rewriting to avoid them.')
+
+  # Remove escaped backslashes before looking for undefined escapes.
+  line = line.replace('\\\\', '')
+
+  if Search(r'("|\').*\\(%|\[|\(|{)', line):
+    error(filename, linenum, 'build/printf_format', 3,
+          '%, [, (, and { are undefined character escapes.  Unescape them.')
+
+  # For the rest, work with both comments and strings removed.
+  line = clean_lines.elided[linenum]
+
+  if Search(r'\b(const|volatile|void|char|short|int|long'
+            r'|float|double|signed|unsigned'
+            r'|schar|u?int8|u?int16|u?int32|u?int64)'
+            r'\s+(register|static|extern|typedef)\b',
+            line):
+    error(filename, linenum, 'build/storage_class', 5,
+          'Storage class (static, extern, typedef, etc) should be first.')
+
+  if Match(r'\s*#\s*endif\s*[^/\s]+', line):
+    error(filename, linenum, 'build/endif_comment', 5,
+          'Uncommented text after #endif is non-standard.  Use a comment.')
+
+  if Match(r'\s*class\s+(\w+\s*::\s*)+\w+\s*;', line):
+    error(filename, linenum, 'build/forward_decl', 5,
+          'Inner-style forward declarations are invalid.  Remove this line.')
+
+  if Search(r'(\w+|[+-]?\d+(\.\d*)?)\s*(<|>)\?=?\s*(\w+|[+-]?\d+)(\.\d*)?',
+            line):
+    error(filename, linenum, 'build/deprecated', 3,
+          '>? and <? (max and min) operators are non-standard and deprecated.')
+
+  if Search(r'^\s*const\s*string\s*&\s*\w+\s*;', line):
+    # TODO(unknown): Could it be expanded safely to arbitrary references,
+    # without triggering too many false positives? The first
+    # attempt triggered 5 warnings for mostly benign code in the regtest, hence
+    # the restriction.
+    # Here's the original regexp, for the reference:
+    # type_name = r'\w+((\s*::\s*\w+)|(\s*<\s*\w+?\s*>))?'
+    # r'\s*const\s*' + type_name + '\s*&\s*\w+\s*;'
+    error(filename, linenum, 'runtime/member_string_references', 2,
+          'const string& members are dangerous. It is much better to use '
+          'alternatives, such as pointers or simple constants.')
+
+  # Everything else in this function operates on class declarations.
+  # Return early if the top of the nesting stack is not a class, or if
+  # the class head is not completed yet.
+  classinfo = nesting_state.InnermostClass()
+  if not classinfo or not classinfo.seen_open_brace:
+    return
+
+  # The class may have been declared with namespace or classname qualifiers.
+  # The constructor and destructor will not have those qualifiers.
+  base_classname = classinfo.name.split('::')[-1]
+
+  # Look for single-argument constructors that aren't marked explicit.
+  # Technically a valid construct, but against style.
+  args = Match(r'\s+(?:inline\s+)?%s\s*\(([^,()]+)\)'
+               % re.escape(base_classname),
+               line)
+  if (args and
+      args.group(1) != 'void' and
+      not Match(r'(const\s+)?%s(\s+const)?\s*(?:<\w+>\s*)?&'
+                % re.escape(base_classname), args.group(1).strip())):
+    error(filename, linenum, 'runtime/explicit', 5,
+          'Single-argument constructors should be marked explicit.')
+
+
+def CheckSpacingForFunctionCall(filename, line, linenum, error):
+  """Checks for the correctness of various spacing around function calls.
+
+  Args:
+    filename: The name of the current file.
+    line: The text of the line to check.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+
+  # Since function calls often occur inside if/for/while/switch
+  # expressions - which have their own, more liberal conventions - we
+  # first see if we should be looking inside such an expression for a
+  # function call, to which we can apply more strict standards.
+  fncall = line    # if there's no control flow construct, look at whole line
+  for pattern in (r'\bif\s*\((.*)\)\s*{',
+                  r'\bfor\s*\((.*)\)\s*{',
+                  r'\bwhile\s*\((.*)\)\s*[{;]',
+                  r'\bswitch\s*\((.*)\)\s*{'):
+    match = Search(pattern, line)
+    if match:
+      fncall = match.group(1)    # look inside the parens for function calls
+      break
+
+  # Except in if/for/while/switch, there should never be space
+  # immediately inside parens (eg "f( 3, 4 )").  We make an exception
+  # for nested parens ( (a+b) + c ).  Likewise, there should never be
+  # a space before a ( when it's a function argument.  I assume it's a
+  # function argument when the char before the whitespace is legal in
+  # a function name (alnum + _) and we're not starting a macro. Also ignore
+  # pointers and references to arrays and functions coz they're too tricky:
+  # we use a very simple way to recognize these:
+  # " (something)(maybe-something)" or
+  # " (something)(maybe-something," or
+  # " (something)[something]"
+  # Note that we assume the contents of [] to be short enough that
+  # they'll never need to wrap.
+  if (  # Ignore control structures.
+      not Search(r'\b(if|for|while|switch|return|new|delete|catch|sizeof)\b',
+                 fncall) and
+      # Ignore pointers/references to functions.
+      not Search(r' \([^)]+\)\([^)]*(\)|,$)', fncall) and
+      # Ignore pointers/references to arrays.
+      not Search(r' \([^)]+\)\[[^\]]+\]', fncall)):
+    if Search(r'\w\s*\(\s(?!\s*\\$)', fncall):      # a ( used for a fn call
+      error(filename, linenum, 'whitespace/parens', 4,
+            'Extra space after ( in function call')
+    elif Search(r'\(\s+(?!(\s*\\)|\()', fncall):
+      error(filename, linenum, 'whitespace/parens', 2,
+            'Extra space after (')
+    if (Search(r'\w\s+\(', fncall) and
+        not Search(r'#\s*define|typedef', fncall) and
+        not Search(r'\w\s+\((\w+::)*\*\w+\)\(', fncall)):
+      error(filename, linenum, 'whitespace/parens', 4,
+            'Extra space before ( in function call')
+    # If the ) is followed only by a newline or a { + newline, assume it's
+    # part of a control statement (if/while/etc), and don't complain
+    if Search(r'[^)]\s+\)\s*[^{\s]', fncall):
+      # If the closing parenthesis is preceded by only whitespaces,
+      # try to give a more descriptive error message.
+      if Search(r'^\s+\)', fncall):
+        error(filename, linenum, 'whitespace/parens', 2,
+              'Closing ) should be moved to the previous line')
+      else:
+        error(filename, linenum, 'whitespace/parens', 2,
+              'Extra space before )')
+
+
+def IsBlankLine(line):
+  """Returns true if the given line is blank.
+
+  We consider a line to be blank if the line is empty or consists of
+  only white spaces.
+
+  Args:
+    line: A line of a string.
+
+  Returns:
+    True, if the given line is blank.
+  """
+  return not line or line.isspace()
+
+
+def CheckForFunctionLengths(filename, clean_lines, linenum,
+                            function_state, error):
+  """Reports for long function bodies.
+
+  For an overview why this is done, see:
+  http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Write_Short_Functions
+
+  Uses a simplistic algorithm assuming other style guidelines
+  (especially spacing) are followed.
+  Only checks unindented functions, so class members are unchecked.
+  Trivial bodies are unchecked, so constructors with huge initializer lists
+  may be missed.
+  Blank/comment lines are not counted so as to avoid encouraging the removal
+  of vertical space and comments just to get through a lint check.
+  NOLINT *on the last line of a function* disables this check.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    function_state: Current function name and lines in body so far.
+    error: The function to call with any errors found.
+  """
+  lines = clean_lines.lines
+  line = lines[linenum]
+  raw = clean_lines.raw_lines
+  raw_line = raw[linenum]
+  joined_line = ''
+
+  starting_func = False
+  regexp = r'(\w(\w|::|\*|\&|\s)*)\('  # decls * & space::name( ...
+  match_result = Match(regexp, line)
+  if match_result:
+    # If the name is all caps and underscores, figure it's a macro and
+    # ignore it, unless it's TEST or TEST_F.
+    function_name = match_result.group(1).split()[-1]
+    if function_name == 'TEST' or function_name == 'TEST_F' or (
+        not Match(r'[A-Z_]+$', function_name)):
+      starting_func = True
+
+  if starting_func:
+    body_found = False
+    for start_linenum in xrange(linenum, clean_lines.NumLines()):
+      start_line = lines[start_linenum]
+      joined_line += ' ' + start_line.lstrip()
+      if Search(r'(;|})', start_line):  # Declarations and trivial functions
+        body_found = True
+        break                              # ... ignore
+      elif Search(r'{', start_line):
+        body_found = True
+        function = Search(r'((\w|:)*)\(', line).group(1)
+        if Match(r'TEST', function):    # Handle TEST... macros
+          parameter_regexp = Search(r'(\(.*\))', joined_line)
+          if parameter_regexp:             # Ignore bad syntax
+            function += parameter_regexp.group(1)
+        else:
+          function += '()'
+        function_state.Begin(function)
+        break
+    if not body_found:
+      # No body for the function (or evidence of a non-function) was found.
+      error(filename, linenum, 'readability/fn_size', 5,
+            'Lint failed to find start of function body.')
+  elif Match(r'^\}\s*$', line):  # function end
+    function_state.Check(error, filename, linenum)
+    function_state.End()
+  elif not Match(r'^\s*$', line):
+    function_state.Count()  # Count non-blank/non-comment lines.
+
+
+_RE_PATTERN_TODO = re.compile(r'^//(\s*)TODO(\(.+?\))?:?(\s|$)?')
+
+
+def CheckComment(comment, filename, linenum, error):
+  """Checks for common mistakes in TODO comments.
+
+  Args:
+    comment: The text of the comment from the line in question.
+    filename: The name of the current file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+  match = _RE_PATTERN_TODO.match(comment)
+  if match:
+    # One whitespace is correct; zero whitespace is handled elsewhere.
+    leading_whitespace = match.group(1)
+    if len(leading_whitespace) > 1:
+      error(filename, linenum, 'whitespace/todo', 2,
+            'Too many spaces before TODO')
+
+    username = match.group(2)
+    if not username:
+      error(filename, linenum, 'readability/todo', 2,
+            'Missing username in TODO; it should look like '
+            '"// TODO(my_username): Stuff."')
+
+    middle_whitespace = match.group(3)
+    # Comparisons made explicit for correctness -- pylint: disable=g-explicit-bool-comparison
+    if middle_whitespace != ' ' and middle_whitespace != '':
+      error(filename, linenum, 'whitespace/todo', 2,
+            'TODO(my_username) should be followed by a space')
+
+def CheckAccess(filename, clean_lines, linenum, nesting_state, error):
+  """Checks for improper use of DISALLOW* macros.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    nesting_state: A _NestingState instance which maintains information about
+                   the current stack of nested blocks being parsed.
+    error: The function to call with any errors found.
+  """
+  line = clean_lines.elided[linenum]  # get rid of comments and strings
+
+  matched = Match((r'\s*(DISALLOW_COPY_AND_ASSIGN|'
+                   r'DISALLOW_EVIL_CONSTRUCTORS|'
+                   r'DISALLOW_IMPLICIT_CONSTRUCTORS)'), line)
+  if not matched:
+    return
+  if nesting_state.stack and isinstance(nesting_state.stack[-1], _ClassInfo):
+    if nesting_state.stack[-1].access != 'private':
+      error(filename, linenum, 'readability/constructors', 3,
+            '%s must be in the private: section' % matched.group(1))
+
+  else:
+    # Found DISALLOW* macro outside a class declaration, or perhaps it
+    # was used inside a function when it should have been part of the
+    # class declaration.  We could issue a warning here, but it
+    # probably resulted in a compiler error already.
+    pass
+
+
+def FindNextMatchingAngleBracket(clean_lines, linenum, init_suffix):
+  """Find the corresponding > to close a template.
+
+  Args:
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: Current line number.
+    init_suffix: Remainder of the current line after the initial <.
+
+  Returns:
+    True if a matching bracket exists.
+  """
+  line = init_suffix
+  nesting_stack = ['<']
+  while True:
+    # Find the next operator that can tell us whether < is used as an
+    # opening bracket or as a less-than operator.  We only want to
+    # warn on the latter case.
+    #
+    # We could also check all other operators and terminate the search
+    # early, e.g. if we got something like this "a<b+c", the "<" is
+    # most likely a less-than operator, but then we will get false
+    # positives for default arguments and other template expressions.
+    match = Search(r'^[^<>(),;\[\]]*([<>(),;\[\]])(.*)$', line)
+    if match:
+      # Found an operator, update nesting stack
+      operator = match.group(1)
+      line = match.group(2)
+
+      if nesting_stack[-1] == '<':
+        # Expecting closing angle bracket
+        if operator in ('<', '(', '['):
+          nesting_stack.append(operator)
+        elif operator == '>':
+          nesting_stack.pop()
+          if not nesting_stack:
+            # Found matching angle bracket
+            return True
+        elif operator == ',':
+          # Got a comma after a bracket, this is most likely a template
+          # argument.  We have not seen a closing angle bracket yet, but
+          # it's probably a few lines later if we look for it, so just
+          # return early here.
+          return True
+        else:
+          # Got some other operator.
+          return False
+
+      else:
+        # Expecting closing parenthesis or closing bracket
+        if operator in ('<', '(', '['):
+          nesting_stack.append(operator)
+        elif operator in (')', ']'):
+          # We don't bother checking for matching () or [].  If we got
+          # something like (] or [), it would have been a syntax error.
+          nesting_stack.pop()
+
+    else:
+      # Scan the next line
+      linenum += 1
+      if linenum >= len(clean_lines.elided):
+        break
+      line = clean_lines.elided[linenum]
+
+  # Exhausted all remaining lines and still no matching angle bracket.
+  # Most likely the input was incomplete, otherwise we should have
+  # seen a semicolon and returned early.
+  return True
+
+
+def FindPreviousMatchingAngleBracket(clean_lines, linenum, init_prefix):
+  """Find the corresponding < that started a template.
+
+  Args:
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: Current line number.
+    init_prefix: Part of the current line before the initial >.
+
+  Returns:
+    True if a matching bracket exists.
+  """
+  line = init_prefix
+  nesting_stack = ['>']
+  while True:
+    # Find the previous operator
+    match = Search(r'^(.*)([<>(),;\[\]])[^<>(),;\[\]]*$', line)
+    if match:
+      # Found an operator, update nesting stack
+      operator = match.group(2)
+      line = match.group(1)
+
+      if nesting_stack[-1] == '>':
+        # Expecting opening angle bracket
+        if operator in ('>', ')', ']'):
+          nesting_stack.append(operator)
+        elif operator == '<':
+          nesting_stack.pop()
+          if not nesting_stack:
+            # Found matching angle bracket
+            return True
+        elif operator == ',':
+          # Got a comma before a bracket, this is most likely a
+          # template argument.  The opening angle bracket is probably
+          # there if we look for it, so just return early here.
+          return True
+        else:
+          # Got some other operator.
+          return False
+
+      else:
+        # Expecting opening parenthesis or opening bracket
+        if operator in ('>', ')', ']'):
+          nesting_stack.append(operator)
+        elif operator in ('(', '['):
+          nesting_stack.pop()
+
+    else:
+      # Scan the previous line
+      linenum -= 1
+      if linenum < 0:
+        break
+      line = clean_lines.elided[linenum]
+
+  # Exhausted all earlier lines and still no matching angle bracket.
+  return False
+
+
+def CheckSpacing(filename, clean_lines, linenum, nesting_state, error):
+  """Checks for the correctness of various spacing issues in the code.
+
+  Things we check for: spaces around operators, spaces after
+  if/for/while/switch, no spaces around parens in function calls, two
+  spaces between code and comment, don't start a block with a blank
+  line, don't end a function with a blank line, don't add a blank line
+  after public/protected/private, don't have too many blank lines in a row.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    nesting_state: A _NestingState instance which maintains information about
+                   the current stack of nested blocks being parsed.
+    error: The function to call with any errors found.
+  """
+
+  # Don't use "elided" lines here, otherwise we can't check commented lines.
+  # Don't want to use "raw" either, because we don't want to check inside C++11
+  # raw strings,
+  raw = clean_lines.lines_without_raw_strings
+  line = raw[linenum]
+
+  # Before nixing comments, check if the line is blank for no good
+  # reason.  This includes the first line after a block is opened, and
+  # blank lines at the end of a function (ie, right before a line like '}'
+  #
+  # Skip all the blank line checks if we are immediately inside a
+  # namespace body.  In other words, don't issue blank line warnings
+  # for this block:
+  #   namespace {
+  #
+  #   }
+  #
+  # A warning about missing end of namespace comments will be issued instead.
+  if IsBlankLine(line) and not nesting_state.InNamespaceBody():
+    elided = clean_lines.elided
+    prev_line = elided[linenum - 1]
+    prevbrace = prev_line.rfind('{')
+    # TODO(unknown): Don't complain if line before blank line, and line after,
+    #                both start with alnums and are indented the same amount.
+    #                This ignores whitespace at the start of a namespace block
+    #                because those are not usually indented.
+    if prevbrace != -1 and prev_line[prevbrace:].find('}') == -1:
+      # OK, we have a blank line at the start of a code block.  Before we
+      # complain, we check if it is an exception to the rule: The previous
+      # non-empty line has the parameters of a function header that are indented
+      # 4 spaces (because they did not fit in a 80 column line when placed on
+      # the same line as the function name).  We also check for the case where
+      # the previous line is indented 6 spaces, which may happen when the
+      # initializers of a constructor do not fit into a 80 column line.
+      exception = False
+      if Match(r' {6}\w', prev_line):  # Initializer list?
+        # We are looking for the opening column of initializer list, which
+        # should be indented 4 spaces to cause 6 space indentation afterwards.
+        search_position = linenum-2
+        while (search_position >= 0
+               and Match(r' {6}\w', elided[search_position])):
+          search_position -= 1
+        exception = (search_position >= 0
+                     and elided[search_position][:5] == '    :')
+      else:
+        # Search for the function arguments or an initializer list.  We use a
+        # simple heuristic here: If the line is indented 4 spaces; and we have a
+        # closing paren, without the opening paren, followed by an opening brace
+        # or colon (for initializer lists) we assume that it is the last line of
+        # a function header.  If we have a colon indented 4 spaces, it is an
+        # initializer list.
+        exception = (Match(r' {4}\w[^\(]*\)\s*(const\s*)?(\{\s*$|:)',
+                           prev_line)
+                     or Match(r' {4}:', prev_line))
+
+      if not exception:
+        error(filename, linenum, 'whitespace/blank_line', 2,
+              'Redundant blank line at the start of a code block '
+              'should be deleted.')
+    # Ignore blank lines at the end of a block in a long if-else
+    # chain, like this:
+    #   if (condition1) {
+    #     // Something followed by a blank line
+    #
+    #   } else if (condition2) {
+    #     // Something else
+    #   }
+    if linenum + 1 < clean_lines.NumLines():
+      next_line = raw[linenum + 1]
+      if (next_line
+          and Match(r'\s*}', next_line)
+          and next_line.find('} else ') == -1):
+        error(filename, linenum, 'whitespace/blank_line', 3,
+              'Redundant blank line at the end of a code block '
+              'should be deleted.')
+
+    matched = Match(r'\s*(public|protected|private):', prev_line)
+    if matched:
+      error(filename, linenum, 'whitespace/blank_line', 3,
+            'Do not leave a blank line after "%s:"' % matched.group(1))
+
+  # Next, we complain if there's a comment too near the text
+  commentpos = line.find('//')
+  if commentpos != -1:
+    # Check if the // may be in quotes.  If so, ignore it
+    # Comparisons made explicit for clarity -- pylint: disable=g-explicit-bool-comparison
+    if (line.count('"', 0, commentpos) -
+        line.count('\\"', 0, commentpos)) % 2 == 0:   # not in quotes
+      # Allow one space for new scopes, two spaces otherwise:
+      if (not Match(r'^\s*{ //', line) and
+          ((commentpos >= 1 and
+            line[commentpos-1] not in string.whitespace) or
+           (commentpos >= 2 and
+            line[commentpos-2] not in string.whitespace))):
+        error(filename, linenum, 'whitespace/comments', 2,
+              'At least two spaces is best between code and comments')
+      # There should always be a space between the // and the comment
+      commentend = commentpos + 2
+      if commentend < len(line) and not line[commentend] == ' ':
+        # but some lines are exceptions -- e.g. if they're big
+        # comment delimiters like:
+        # //----------------------------------------------------------
+        # or are an empty C++ style Doxygen comment, like:
+        # ///
+        # or C++ style Doxygen comments placed after the variable:
+        # ///<  Header comment
+        # //!<  Header comment
+        # or they begin with multiple slashes followed by a space:
+        # //////// Header comment
+        match = (Search(r'[=/-]{4,}\s*$', line[commentend:]) or
+                 Search(r'^/$', line[commentend:]) or
+                 Search(r'^!< ', line[commentend:]) or
+                 Search(r'^/< ', line[commentend:]) or
+                 Search(r'^/+ ', line[commentend:]))
+        if not match:
+          error(filename, linenum, 'whitespace/comments', 4,
+                'Should have a space between // and comment')
+      CheckComment(line[commentpos:], filename, linenum, error)
+
+  line = clean_lines.elided[linenum]  # get rid of comments and strings
+
+  # Don't try to do spacing checks for operator methods
+  line = re.sub(r'operator(==|!=|<|<<|<=|>=|>>|>)\(', 'operator\(', line)
+
+  # We allow no-spaces around = within an if: "if ( (a=Foo()) == 0 )".
+  # Otherwise not.  Note we only check for non-spaces on *both* sides;
+  # sometimes people put non-spaces on one side when aligning ='s among
+  # many lines (not that this is behavior that I approve of...)
+  if Search(r'[\w.]=[\w.]', line) and not Search(r'\b(if|while) ', line):
+    error(filename, linenum, 'whitespace/operators', 4,
+          'Missing spaces around =')
+
+  # It's ok not to have spaces around binary operators like + - * /, but if
+  # there's too little whitespace, we get concerned.  It's hard to tell,
+  # though, so we punt on this one for now.  TODO.
+
+  # You should always have whitespace around binary operators.
+  #
+  # Check <= and >= first to avoid false positives with < and >, then
+  # check non-include lines for spacing around < and >.
+  match = Search(r'[^<>=!\s](==|!=|<=|>=)[^<>=!\s]', line)
+  if match:
+    error(filename, linenum, 'whitespace/operators', 3,
+          'Missing spaces around %s' % match.group(1))
+  # We allow no-spaces around << when used like this: 10<<20, but
+  # not otherwise (particularly, not when used as streams)
+  # Also ignore using ns::operator<<;
+  match = Search(r'(operator|\S)(?:L|UL|ULL|l|ul|ull)?<<(\S)', line)
+  if (match and
+      not (match.group(1).isdigit() and match.group(2).isdigit()) and
+      not (match.group(1) == 'operator' and match.group(2) == ';')):
+    error(filename, linenum, 'whitespace/operators', 3,
+          'Missing spaces around <<')
+  elif not Match(r'#.*include', line):
+    # Avoid false positives on ->
+    reduced_line = line.replace('->', '')
+
+    # Look for < that is not surrounded by spaces.  This is only
+    # triggered if both sides are missing spaces, even though
+    # technically should should flag if at least one side is missing a
+    # space.  This is done to avoid some false positives with shifts.
+    match = Search(r'[^\s<]<([^\s=<].*)', reduced_line)
+    if (match and
+        not FindNextMatchingAngleBracket(clean_lines, linenum, match.group(1))):
+      error(filename, linenum, 'whitespace/operators', 3,
+            'Missing spaces around <')
+
+    # Look for > that is not surrounded by spaces.  Similar to the
+    # above, we only trigger if both sides are missing spaces to avoid
+    # false positives with shifts.
+    match = Search(r'^(.*[^\s>])>[^\s=>]', reduced_line)
+    if (match and
+        not FindPreviousMatchingAngleBracket(clean_lines, linenum,
+                                             match.group(1))):
+      error(filename, linenum, 'whitespace/operators', 3,
+            'Missing spaces around >')
+
+  # We allow no-spaces around >> for almost anything.  This is because
+  # C++11 allows ">>" to close nested templates, which accounts for
+  # most cases when ">>" is not followed by a space.
+  #
+  # We still warn on ">>" followed by alpha character, because that is
+  # likely due to ">>" being used for right shifts, e.g.:
+  #   value >> alpha
+  #
+  # When ">>" is used to close templates, the alphanumeric letter that
+  # follows would be part of an identifier, and there should still be
+  # a space separating the template type and the identifier.
+  #   type<type<type>> alpha
+  match = Search(r'>>[a-zA-Z_]', line)
+  if match:
+    error(filename, linenum, 'whitespace/operators', 3,
+          'Missing spaces around >>')
+
+  # There shouldn't be space around unary operators
+  match = Search(r'(!\s|~\s|[\s]--[\s;]|[\s]\+\+[\s;])', line)
+  if match:
+    error(filename, linenum, 'whitespace/operators', 4,
+          'Extra space for operator %s' % match.group(1))
+
+  # A pet peeve of mine: no spaces after an if, while, switch, or for
+  match = Search(r' (if\(|for\(|while\(|switch\()', line)
+  if match:
+    error(filename, linenum, 'whitespace/parens', 5,
+          'Missing space before ( in %s' % match.group(1))
+
+  # For if/for/while/switch, the left and right parens should be
+  # consistent about how many spaces are inside the parens, and
+  # there should either be zero or one spaces inside the parens.
+  # We don't want: "if ( foo)" or "if ( foo   )".
+  # Exception: "for ( ; foo; bar)" and "for (foo; bar; )" are allowed.
+  match = Search(r'\b(if|for|while|switch)\s*'
+                 r'\(([ ]*)(.).*[^ ]+([ ]*)\)\s*{\s*$',
+                 line)
+  if match:
+    if len(match.group(2)) != len(match.group(4)):
+      if not (match.group(3) == ';' and
+              len(match.group(2)) == 1 + len(match.group(4)) or
+              not match.group(2) and Search(r'\bfor\s*\(.*; \)', line)):
+        error(filename, linenum, 'whitespace/parens', 5,
+              'Mismatching spaces inside () in %s' % match.group(1))
+    if len(match.group(2)) not in [0, 1]:
+      error(filename, linenum, 'whitespace/parens', 5,
+            'Should have zero or one spaces inside ( and ) in %s' %
+            match.group(1))
+
+  # You should always have a space after a comma (either as fn arg or operator)
+  #
+  # This does not apply when the non-space character following the
+  # comma is another comma, since the only time when that happens is
+  # for empty macro arguments.
+  #
+  # We run this check in two passes: first pass on elided lines to
+  # verify that lines contain missing whitespaces, second pass on raw
+  # lines to confirm that those missing whitespaces are not due to
+  # elided comments.
+  if Search(r',[^,\s]', line) and Search(r',[^,\s]', raw[linenum]):
+    error(filename, linenum, 'whitespace/comma', 3,
+          'Missing space after ,')
+
+  # You should always have a space after a semicolon
+  # except for few corner cases
+  # TODO(unknown): clarify if 'if (1) { return 1;}' is requires one more
+  # space after ;
+  if Search(r';[^\s};\\)/]', line):
+    error(filename, linenum, 'whitespace/semicolon', 3,
+          'Missing space after ;')
+
+  # Next we will look for issues with function calls.
+  CheckSpacingForFunctionCall(filename, line, linenum, error)
+
+  # Except after an opening paren, or after another opening brace (in case of
+  # an initializer list, for instance), you should have spaces before your
+  # braces. And since you should never have braces at the beginning of a line,
+  # this is an easy test.
+  match = Match(r'^(.*[^ ({]){', line)
+  if match:
+    # Try a bit harder to check for brace initialization.  This
+    # happens in one of the following forms:
+    #   Constructor() : initializer_list_{} { ... }
+    #   Constructor{}.MemberFunction()
+    #   Type variable{};
+    #   FunctionCall(type{}, ...);
+    #   LastArgument(..., type{});
+    #   LOG(INFO) << type{} << " ...";
+    #   map_of_type[{...}] = ...;
+    #
+    # We check for the character following the closing brace, and
+    # silence the warning if it's one of those listed above, i.e.
+    # "{.;,)<]".
+    #
+    # To account for nested initializer list, we allow any number of
+    # closing braces up to "{;,)<".  We can't simply silence the
+    # warning on first sight of closing brace, because that would
+    # cause false negatives for things that are not initializer lists.
+    #   Silence this:         But not this:
+    #     Outer{                if (...) {
+    #       Inner{...}            if (...){  // Missing space before {
+    #     };                    }
+    #
+    # There is a false negative with this approach if people inserted
+    # spurious semicolons, e.g. "if (cond){};", but we will catch the
+    # spurious semicolon with a separate check.
+    (endline, endlinenum, endpos) = CloseExpression(
+        clean_lines, linenum, len(match.group(1)))
+    trailing_text = ''
+    if endpos > -1:
+      trailing_text = endline[endpos:]
+    for offset in xrange(endlinenum + 1,
+                         min(endlinenum + 3, clean_lines.NumLines() - 1)):
+      trailing_text += clean_lines.elided[offset]
+    if not Match(r'^[\s}]*[{.;,)<\]]', trailing_text):
+      error(filename, linenum, 'whitespace/braces', 5,
+            'Missing space before {')
+
+  # Make sure '} else {' has spaces.
+  if Search(r'}else', line):
+    error(filename, linenum, 'whitespace/braces', 5,
+          'Missing space before else')
+
+  # You shouldn't have spaces before your brackets, except maybe after
+  # 'delete []' or 'new char * []'.
+  if Search(r'\w\s+\[', line) and not Search(r'delete\s+\[', line):
+    error(filename, linenum, 'whitespace/braces', 5,
+          'Extra space before [')
+
+  # You shouldn't have a space before a semicolon at the end of the line.
+  # There's a special case for "for" since the style guide allows space before
+  # the semicolon there.
+  if Search(r':\s*;\s*$', line):
+    error(filename, linenum, 'whitespace/semicolon', 5,
+          'Semicolon defining empty statement. Use {} instead.')
+  elif Search(r'^\s*;\s*$', line):
+    error(filename, linenum, 'whitespace/semicolon', 5,
+          'Line contains only semicolon. If this should be an empty statement, '
+          'use {} instead.')
+  elif (Search(r'\s+;\s*$', line) and
+        not Search(r'\bfor\b', line)):
+    error(filename, linenum, 'whitespace/semicolon', 5,
+          'Extra space before last semicolon. If this should be an empty '
+          'statement, use {} instead.')
+
+  # In range-based for, we wanted spaces before and after the colon, but
+  # not around "::" tokens that might appear.
+  if (Search('for *\(.*[^:]:[^: ]', line) or
+      Search('for *\(.*[^: ]:[^:]', line)):
+    error(filename, linenum, 'whitespace/forcolon', 2,
+          'Missing space around colon in range-based for loop')
+
+
+def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error):
+  """Checks for additional blank line issues related to sections.
+
+  Currently the only thing checked here is blank line before protected/private.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    class_info: A _ClassInfo objects.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+  # Skip checks if the class is small, where small means 25 lines or less.
+  # 25 lines seems like a good cutoff since that's the usual height of
+  # terminals, and any class that can't fit in one screen can't really
+  # be considered "small".
+  #
+  # Also skip checks if we are on the first line.  This accounts for
+  # classes that look like
+  #   class Foo { public: ... };
+  #
+  # If we didn't find the end of the class, last_line would be zero,
+  # and the check will be skipped by the first condition.
+  if (class_info.last_line - class_info.starting_linenum <= 24 or
+      linenum <= class_info.starting_linenum):
+    return
+
+  matched = Match(r'\s*(public|protected|private):', clean_lines.lines[linenum])
+  if matched:
+    # Issue warning if the line before public/protected/private was
+    # not a blank line, but don't do this if the previous line contains
+    # "class" or "struct".  This can happen two ways:
+    #  - We are at the beginning of the class.
+    #  - We are forward-declaring an inner class that is semantically
+    #    private, but needed to be public for implementation reasons.
+    # Also ignores cases where the previous line ends with a backslash as can be
+    # common when defining classes in C macros.
+    prev_line = clean_lines.lines[linenum - 1]
+    if (not IsBlankLine(prev_line) and
+        not Search(r'\b(class|struct)\b', prev_line) and
+        not Search(r'\\$', prev_line)):
+      # Try a bit harder to find the beginning of the class.  This is to
+      # account for multi-line base-specifier lists, e.g.:
+      #   class Derived
+      #       : public Base {
+      end_class_head = class_info.starting_linenum
+      for i in range(class_info.starting_linenum, linenum):
+        if Search(r'\{\s*$', clean_lines.lines[i]):
+          end_class_head = i
+          break
+      if end_class_head < linenum - 1:
+        error(filename, linenum, 'whitespace/blank_line', 3,
+              '"%s:" should be preceded by a blank line' % matched.group(1))
+
+
+def GetPreviousNonBlankLine(clean_lines, linenum):
+  """Return the most recent non-blank line and its line number.
+
+  Args:
+    clean_lines: A CleansedLines instance containing the file contents.
+    linenum: The number of the line to check.
+
+  Returns:
+    A tuple with two elements.  The first element is the contents of the last
+    non-blank line before the current line, or the empty string if this is the
+    first non-blank line.  The second is the line number of that line, or -1
+    if this is the first non-blank line.
+  """
+
+  prevlinenum = linenum - 1
+  while prevlinenum >= 0:
+    prevline = clean_lines.elided[prevlinenum]
+    if not IsBlankLine(prevline):     # if not a blank line...
+      return (prevline, prevlinenum)
+    prevlinenum -= 1
+  return ('', -1)
+
+
+def CheckBraces(filename, clean_lines, linenum, error):
+  """Looks for misplaced braces (e.g. at the end of line).
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+
+  line = clean_lines.elided[linenum]        # get rid of comments and strings
+
+  if Match(r'\s*{\s*$', line):
+    # We allow an open brace to start a line in the case where someone is using
+    # braces in a block to explicitly create a new scope, which is commonly used
+    # to control the lifetime of stack-allocated variables.  Braces are also
+    # used for brace initializers inside function calls.  We don't detect this
+    # perfectly: we just don't complain if the last non-whitespace character on
+    # the previous non-blank line is ',', ';', ':', '(', '{', or '}', or if the
+    # previous line starts a preprocessor block.
+    prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
+    if (not Search(r'[,;:}{(]\s*$', prevline) and
+        not Match(r'\s*#', prevline)):
+      error(filename, linenum, 'whitespace/braces', 4,
+            '{ should almost always be at the end of the previous line')
+
+  # An else clause should be on the same line as the preceding closing brace.
+  if Match(r'\s*else\s*', line):
+    prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
+    if Match(r'\s*}\s*$', prevline):
+      error(filename, linenum, 'whitespace/newline', 4,
+            'An else should appear on the same line as the preceding }')
+
+  # If braces come on one side of an else, they should be on both.
+  # However, we have to worry about "else if" that spans multiple lines!
+  if Search(r'}\s*else[^{]*$', line) or Match(r'[^}]*else\s*{', line):
+    if Search(r'}\s*else if([^{]*)$', line):       # could be multi-line if
+      # find the ( after the if
+      pos = line.find('else if')
+      pos = line.find('(', pos)
+      if pos > 0:
+        (endline, _, endpos) = CloseExpression(clean_lines, linenum, pos)
+        if endline[endpos:].find('{') == -1:    # must be brace after if
+          error(filename, linenum, 'readability/braces', 5,
+                'If an else has a brace on one side, it should have it on both')
+    else:            # common case: else not followed by a multi-line if
+      error(filename, linenum, 'readability/braces', 5,
+            'If an else has a brace on one side, it should have it on both')
+
+  # Likewise, an else should never have the else clause on the same line
+  if Search(r'\belse [^\s{]', line) and not Search(r'\belse if\b', line):
+    error(filename, linenum, 'whitespace/newline', 4,
+          'Else clause should never be on same line as else (use 2 lines)')
+
+  # In the same way, a do/while should never be on one line
+  if Match(r'\s*do [^\s{]', line):
+    error(filename, linenum, 'whitespace/newline', 4,
+          'do/while clauses should not be on a single line')
+
+  # Block bodies should not be followed by a semicolon.  Due to C++11
+  # brace initialization, there are more places where semicolons are
+  # required than not, so we use a whitelist approach to check these
+  # rather than a blacklist.  These are the places where "};" should
+  # be replaced by just "}":
+  # 1. Some flavor of block following closing parenthesis:
+  #    for (;;) {};
+  #    while (...) {};
+  #    switch (...) {};
+  #    Function(...) {};
+  #    if (...) {};
+  #    if (...) else if (...) {};
+  #
+  # 2. else block:
+  #    if (...) else {};
+  #
+  # 3. const member function:
+  #    Function(...) const {};
+  #
+  # 4. Block following some statement:
+  #    x = 42;
+  #    {};
+  #
+  # 5. Block at the beginning of a function:
+  #    Function(...) {
+  #      {};
+  #    }
+  #
+  #    Note that naively checking for the preceding "{" will also match
+  #    braces inside multi-dimensional arrays, but this is fine since
+  #    that expression will not contain semicolons.
+  #
+  # 6. Block following another block:
+  #    while (true) {}
+  #    {};
+  #
+  # 7. End of namespaces:
+  #    namespace {};
+  #
+  #    These semicolons seems far more common than other kinds of
+  #    redundant semicolons, possibly due to people converting classes
+  #    to namespaces.  For now we do not warn for this case.
+  #
+  # Try matching case 1 first.
+  match = Match(r'^(.*\)\s*)\{', line)
+  if match:
+    # Matched closing parenthesis (case 1).  Check the token before the
+    # matching opening parenthesis, and don't warn if it looks like a
+    # macro.  This avoids these false positives:
+    #  - macro that defines a base class
+    #  - multi-line macro that defines a base class
+    #  - macro that defines the whole class-head
+    #
+    # But we still issue warnings for macros that we know are safe to
+    # warn, specifically:
+    #  - TEST, TEST_F, TEST_P, MATCHER, MATCHER_P
+    #  - TYPED_TEST
+    #  - INTERFACE_DEF
+    #  - EXCLUSIVE_LOCKS_REQUIRED, SHARED_LOCKS_REQUIRED, LOCKS_EXCLUDED:
+    #
+    # We implement a whitelist of safe macros instead of a blacklist of
+    # unsafe macros, even though the latter appears less frequently in
+    # google code and would have been easier to implement.  This is because
+    # the downside for getting the whitelist wrong means some extra
+    # semicolons, while the downside for getting the blacklist wrong
+    # would result in compile errors.
+    #
+    # In addition to macros, we also don't want to warn on compound
+    # literals.
+    closing_brace_pos = match.group(1).rfind(')')
+    opening_parenthesis = ReverseCloseExpression(
+        clean_lines, linenum, closing_brace_pos)
+    if opening_parenthesis[2] > -1:
+      line_prefix = opening_parenthesis[0][0:opening_parenthesis[2]]
+      macro = Search(r'\b([A-Z_]+)\s*$', line_prefix)
+      if ((macro and
+           macro.group(1) not in (
+               'TEST', 'TEST_F', 'MATCHER', 'MATCHER_P', 'TYPED_TEST',
+               'EXCLUSIVE_LOCKS_REQUIRED', 'SHARED_LOCKS_REQUIRED',
+               'LOCKS_EXCLUDED', 'INTERFACE_DEF')) or
+          Search(r'\s+=\s*$', line_prefix)):
+        match = None
+
+  else:
+    # Try matching cases 2-3.
+    match = Match(r'^(.*(?:else|\)\s*const)\s*)\{', line)
+    if not match:
+      # Try matching cases 4-6.  These are always matched on separate lines.
+      #
+      # Note that we can't simply concatenate the previous line to the
+      # current line and do a single match, otherwise we may output
+      # duplicate warnings for the blank line case:
+      #   if (cond) {
+      #     // blank line
+      #   }
+      prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
+      if prevline and Search(r'[;{}]\s*$', prevline):
+        match = Match(r'^(\s*)\{', line)
+
+  # Check matching closing brace
+  if match:
+    (endline, endlinenum, endpos) = CloseExpression(
+        clean_lines, linenum, len(match.group(1)))
+    if endpos > -1 and Match(r'^\s*;', endline[endpos:]):
+      # Current {} pair is eligible for semicolon check, and we have found
+      # the redundant semicolon, output warning here.
+      #
+      # Note: because we are scanning forward for opening braces, and
+      # outputting warnings for the matching closing brace, if there are
+      # nested blocks with trailing semicolons, we will get the error
+      # messages in reversed order.
+      error(filename, endlinenum, 'readability/braces', 4,
+            "You don't need a ; after a }")
+
+
+def CheckEmptyBlockBody(filename, clean_lines, linenum, error):
+  """Look for empty loop/conditional body with only a single semicolon.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+
+  # Search for loop keywords at the beginning of the line.  Because only
+  # whitespaces are allowed before the keywords, this will also ignore most
+  # do-while-loops, since those lines should start with closing brace.
+  #
+  # We also check "if" blocks here, since an empty conditional block
+  # is likely an error.
+  line = clean_lines.elided[linenum]
+  matched = Match(r'\s*(for|while|if)\s*\(', line)
+  if matched:
+    # Find the end of the conditional expression
+    (end_line, end_linenum, end_pos) = CloseExpression(
+        clean_lines, linenum, line.find('('))
+
+    # Output warning if what follows the condition expression is a semicolon.
+    # No warning for all other cases, including whitespace or newline, since we
+    # have a separate check for semicolons preceded by whitespace.
+    if end_pos >= 0 and Match(r';', end_line[end_pos:]):
+      if matched.group(1) == 'if':
+        error(filename, end_linenum, 'whitespace/empty_conditional_body', 5,
+              'Empty conditional bodies should use {}')
+      else:
+        error(filename, end_linenum, 'whitespace/empty_loop_body', 5,
+              'Empty loop bodies should use {} or continue')
+
+
+def CheckCheck(filename, clean_lines, linenum, error):
+  """Checks the use of CHECK and EXPECT macros.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+
+  # Decide the set of replacement macros that should be suggested
+  lines = clean_lines.elided
+  check_macro = None
+  start_pos = -1
+  for macro in _CHECK_MACROS:
+    i = lines[linenum].find(macro)
+    if i >= 0:
+      check_macro = macro
+
+      # Find opening parenthesis.  Do a regular expression match here
+      # to make sure that we are matching the expected CHECK macro, as
+      # opposed to some other macro that happens to contain the CHECK
+      # substring.
+      matched = Match(r'^(.*\b' + check_macro + r'\s*)\(', lines[linenum])
+      if not matched:
+        continue
+      start_pos = len(matched.group(1))
+      break
+  if not check_macro or start_pos < 0:
+    # Don't waste time here if line doesn't contain 'CHECK' or 'EXPECT'
+    return
+
+  # Find end of the boolean expression by matching parentheses
+  (last_line, end_line, end_pos) = CloseExpression(
+      clean_lines, linenum, start_pos)
+  if end_pos < 0:
+    return
+  if linenum == end_line:
+    expression = lines[linenum][start_pos + 1:end_pos - 1]
+  else:
+    expression = lines[linenum][start_pos + 1:]
+    for i in xrange(linenum + 1, end_line):
+      expression += lines[i]
+    expression += last_line[0:end_pos - 1]
+
+  # Parse expression so that we can take parentheses into account.
+  # This avoids false positives for inputs like "CHECK((a < 4) == b)",
+  # which is not replaceable by CHECK_LE.
+  lhs = ''
+  rhs = ''
+  operator = None
+  while expression:
+    matched = Match(r'^\s*(<<|<<=|>>|>>=|->\*|->|&&|\|\||'
+                    r'==|!=|>=|>|<=|<|\()(.*)$', expression)
+    if matched:
+      token = matched.group(1)
+      if token == '(':
+        # Parenthesized operand
+        expression = matched.group(2)
+        (end, _) = FindEndOfExpressionInLine(expression, 0, 1, '(', ')')
+        if end < 0:
+          return  # Unmatched parenthesis
+        lhs += '(' + expression[0:end]
+        expression = expression[end:]
+      elif token in ('&&', '||'):
+        # Logical and/or operators.  This means the expression
+        # contains more than one term, for example:
+        #   CHECK(42 < a && a < b);
+        #
+        # These are not replaceable with CHECK_LE, so bail out early.
+        return
+      elif token in ('<<', '<<=', '>>', '>>=', '->*', '->'):
+        # Non-relational operator
+        lhs += token
+        expression = matched.group(2)
+      else:
+        # Relational operator
+        operator = token
+        rhs = matched.group(2)
+        break
+    else:
+      # Unparenthesized operand.  Instead of appending to lhs one character
+      # at a time, we do another regular expression match to consume several
+      # characters at once if possible.  Trivial benchmark shows that this
+      # is more efficient when the operands are longer than a single
+      # character, which is generally the case.
+      matched = Match(r'^([^-=!<>()&|]+)(.*)$', expression)
+      if not matched:
+        matched = Match(r'^(\s*\S)(.*)$', expression)
+        if not matched:
+          break
+      lhs += matched.group(1)
+      expression = matched.group(2)
+
+  # Only apply checks if we got all parts of the boolean expression
+  if not (lhs and operator and rhs):
+    return
+
+  # Check that rhs do not contain logical operators.  We already know
+  # that lhs is fine since the loop above parses out && and ||.
+  if rhs.find('&&') > -1 or rhs.find('||') > -1:
+    return
+
+  # At least one of the operands must be a constant literal.  This is
+  # to avoid suggesting replacements for unprintable things like
+  # CHECK(variable != iterator)
+  #
+  # The following pattern matches decimal, hex integers, strings, and
+  # characters (in that order).
+  lhs = lhs.strip()
+  rhs = rhs.strip()
+  match_constant = r'^([-+]?(\d+|0[xX][0-9a-fA-F]+)[lLuU]{0,3}|".*"|\'.*\')$'
+  if Match(match_constant, lhs) or Match(match_constant, rhs):
+    # Note: since we know both lhs and rhs, we can provide a more
+    # descriptive error message like:
+    #   Consider using CHECK_EQ(x, 42) instead of CHECK(x == 42)
+    # Instead of:
+    #   Consider using CHECK_EQ instead of CHECK(a == b)
+    #
+    # We are still keeping the less descriptive message because if lhs
+    # or rhs gets long, the error message might become unreadable.
+    error(filename, linenum, 'readability/check', 2,
+          'Consider using %s instead of %s(a %s b)' % (
+              _CHECK_REPLACEMENT[check_macro][operator],
+              check_macro, operator))
+
+
+def CheckAltTokens(filename, clean_lines, linenum, error):
+  """Check alternative keywords being used in boolean expressions.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+  line = clean_lines.elided[linenum]
+
+  # Avoid preprocessor lines
+  if Match(r'^\s*#', line):
+    return
+
+  # Last ditch effort to avoid multi-line comments.  This will not help
+  # if the comment started before the current line or ended after the
+  # current line, but it catches most of the false positives.  At least,
+  # it provides a way to workaround this warning for people who use
+  # multi-line comments in preprocessor macros.
+  #
+  # TODO(unknown): remove this once cpplint has better support for
+  # multi-line comments.
+  if line.find('/*') >= 0 or line.find('*/') >= 0:
+    return
+
+  for match in _ALT_TOKEN_REPLACEMENT_PATTERN.finditer(line):
+    error(filename, linenum, 'readability/alt_tokens', 2,
+          'Use operator %s instead of %s' % (
+              _ALT_TOKEN_REPLACEMENT[match.group(1)], match.group(1)))
+
+
+def GetLineWidth(line):
+  """Determines the width of the line in column positions.
+
+  Args:
+    line: A string, which may be a Unicode string.
+
+  Returns:
+    The width of the line in column positions, accounting for Unicode
+    combining characters and wide characters.
+  """
+  if isinstance(line, unicode):
+    width = 0
+    for uc in unicodedata.normalize('NFC', line):
+      if unicodedata.east_asian_width(uc) in ('W', 'F'):
+        width += 2
+      elif not unicodedata.combining(uc):
+        width += 1
+    return width
+  else:
+    return len(line)
+
+
+def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state,
+               error):
+  """Checks rules from the 'C++ style rules' section of cppguide.html.
+
+  Most of these rules are hard to test (naming, comment style), but we
+  do what we can.  In particular we check for 2-space indents, line lengths,
+  tab usage, spaces inside code, etc.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    file_extension: The extension (without the dot) of the filename.
+    nesting_state: A _NestingState instance which maintains information about
+                   the current stack of nested blocks being parsed.
+    error: The function to call with any errors found.
+  """
+
+  # Don't use "elided" lines here, otherwise we can't check commented lines.
+  # Don't want to use "raw" either, because we don't want to check inside C++11
+  # raw strings,
+  raw_lines = clean_lines.lines_without_raw_strings
+  line = raw_lines[linenum]
+
+  if line.find('\t') != -1:
+    error(filename, linenum, 'whitespace/tab', 1,
+          'Tab found; better to use spaces')
+
+  # One or three blank spaces at the beginning of the line is weird; it's
+  # hard to reconcile that with 2-space indents.
+  # NOTE: here are the conditions rob pike used for his tests.  Mine aren't
+  # as sophisticated, but it may be worth becoming so:  RLENGTH==initial_spaces
+  # if(RLENGTH > 20) complain = 0;
+  # if(match($0, " +(error|private|public|protected):")) complain = 0;
+  # if(match(prev, "&& *$")) complain = 0;
+  # if(match(prev, "\\|\\| *$")) complain = 0;
+  # if(match(prev, "[\",=><] *$")) complain = 0;
+  # if(match($0, " <<")) complain = 0;
+  # if(match(prev, " +for \\(")) complain = 0;
+  # if(prevodd && match(prevprev, " +for \\(")) complain = 0;
+  initial_spaces = 0
+  cleansed_line = clean_lines.elided[linenum]
+  while initial_spaces < len(line) and line[initial_spaces] == ' ':
+    initial_spaces += 1
+  if line and line[-1].isspace():
+    error(filename, linenum, 'whitespace/end_of_line', 4,
+          'Line ends in whitespace.  Consider deleting these extra spaces.')
+  # There are certain situations we allow one space, notably for section labels
+  elif ((initial_spaces == 1 or initial_spaces == 3) and
+        not Match(r'\s*\w+\s*:\s*$', cleansed_line)):
+    error(filename, linenum, 'whitespace/indent', 3,
+          'Weird number of spaces at line-start.  '
+          'Are you using a 2-space indent?')
+
+  # Check if the line is a header guard.
+  is_header_guard = False
+  if file_extension == 'h':
+    cppvar = GetHeaderGuardCPPVariable(filename)
+    if (line.startswith('#ifndef %s' % cppvar) or
+        line.startswith('#define %s' % cppvar) or
+        line.startswith('#endif  // %s' % cppvar)):
+      is_header_guard = True
+  # #include lines and header guards can be long, since there's no clean way to
+  # split them.
+  #
+  # URLs can be long too.  It's possible to split these, but it makes them
+  # harder to cut&paste.
+  #
+  # The "$Id:...$" comment may also get very long without it being the
+  # developers fault.
+  if (not line.startswith('#include') and not is_header_guard and
+      not Match(r'^\s*//.*http(s?)://\S*$', line) and
+      not Match(r'^// \$Id:.*#[0-9]+ \$$', line)):
+    line_width = GetLineWidth(line)
+    extended_length = int((_line_length * 1.25))
+    if line_width > extended_length:
+      error(filename, linenum, 'whitespace/line_length', 4,
+            'Lines should very rarely be longer than %i characters' %
+            extended_length)
+    elif line_width > _line_length:
+      error(filename, linenum, 'whitespace/line_length', 2,
+            'Lines should be <= %i characters long' % _line_length)
+
+  if (cleansed_line.count(';') > 1 and
+      # for loops are allowed two ;'s (and may run over two lines).
+      cleansed_line.find('for') == -1 and
+      (GetPreviousNonBlankLine(clean_lines, linenum)[0].find('for') == -1 or
+       GetPreviousNonBlankLine(clean_lines, linenum)[0].find(';') != -1) and
+      # It's ok to have many commands in a switch case that fits in 1 line
+      not ((cleansed_line.find('case ') != -1 or
+            cleansed_line.find('default:') != -1) and
+           cleansed_line.find('break;') != -1)):
+    error(filename, linenum, 'whitespace/newline', 0,
+          'More than one command on the same line')
+
+  # Some more style checks
+  CheckBraces(filename, clean_lines, linenum, error)
+  CheckEmptyBlockBody(filename, clean_lines, linenum, error)
+  CheckAccess(filename, clean_lines, linenum, nesting_state, error)
+  CheckSpacing(filename, clean_lines, linenum, nesting_state, error)
+  CheckCheck(filename, clean_lines, linenum, error)
+  CheckAltTokens(filename, clean_lines, linenum, error)
+  classinfo = nesting_state.InnermostClass()
+  if classinfo:
+    CheckSectionSpacing(filename, clean_lines, classinfo, linenum, error)
+
+
+_RE_PATTERN_INCLUDE_NEW_STYLE = re.compile(r'#include +"[^/]+\.h"')
+_RE_PATTERN_INCLUDE = re.compile(r'^\s*#\s*include\s*([<"])([^>"]*)[>"].*$')
+# Matches the first component of a filename delimited by -s and _s. That is:
+#  _RE_FIRST_COMPONENT.match('foo').group(0) == 'foo'
+#  _RE_FIRST_COMPONENT.match('foo.cc').group(0) == 'foo'
+#  _RE_FIRST_COMPONENT.match('foo-bar_baz.cc').group(0) == 'foo'
+#  _RE_FIRST_COMPONENT.match('foo_bar-baz.cc').group(0) == 'foo'
+_RE_FIRST_COMPONENT = re.compile(r'^[^-_.]+')
+
+
+def _DropCommonSuffixes(filename):
+  """Drops common suffixes like _test.cc or -inl.h from filename.
+
+  For example:
+    >>> _DropCommonSuffixes('foo/foo-inl.h')
+    'foo/foo'
+    >>> _DropCommonSuffixes('foo/bar/foo.cc')
+    'foo/bar/foo'
+    >>> _DropCommonSuffixes('foo/foo_internal.h')
+    'foo/foo'
+    >>> _DropCommonSuffixes('foo/foo_unusualinternal.h')
+    'foo/foo_unusualinternal'
+
+  Args:
+    filename: The input filename.
+
+  Returns:
+    The filename with the common suffix removed.
+  """
+  for suffix in ('test.cc', 'regtest.cc', 'unittest.cc',
+                 'inl.h', 'impl.h', 'internal.h'):
+    if (filename.endswith(suffix) and len(filename) > len(suffix) and
+        filename[-len(suffix) - 1] in ('-', '_')):
+      return filename[:-len(suffix) - 1]
+  return os.path.splitext(filename)[0]
+
+
+def _IsTestFilename(filename):
+  """Determines if the given filename has a suffix that identifies it as a test.
+
+  Args:
+    filename: The input filename.
+
+  Returns:
+    True if 'filename' looks like a test, False otherwise.
+  """
+  if (filename.endswith('_test.cc') or
+      filename.endswith('_unittest.cc') or
+      filename.endswith('_regtest.cc')):
+    return True
+  else:
+    return False
+
+
+def _ClassifyInclude(fileinfo, include, is_system):
+  """Figures out what kind of header 'include' is.
+
+  Args:
+    fileinfo: The current file cpplint is running over. A FileInfo instance.
+    include: The path to a #included file.
+    is_system: True if the #include used <> rather than "".
+
+  Returns:
+    One of the _XXX_HEADER constants.
+
+  For example:
+    >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'stdio.h', True)
+    _C_SYS_HEADER
+    >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True)
+    _CPP_SYS_HEADER
+    >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False)
+    _LIKELY_MY_HEADER
+    >>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'),
+    ...                  'bar/foo_other_ext.h', False)
+    _POSSIBLE_MY_HEADER
+    >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/bar.h', False)
+    _OTHER_HEADER
+  """
+  # This is a list of all standard c++ header files, except
+  # those already checked for above.
+  is_cpp_h = include in _CPP_HEADERS
+
+  if is_system:
+    if is_cpp_h:
+      return _CPP_SYS_HEADER
+    else:
+      return _C_SYS_HEADER
+
+  # If the target file and the include we're checking share a
+  # basename when we drop common extensions, and the include
+  # lives in . , then it's likely to be owned by the target file.
+  target_dir, target_base = (
+      os.path.split(_DropCommonSuffixes(fileinfo.RepositoryName())))
+  include_dir, include_base = os.path.split(_DropCommonSuffixes(include))
+  if target_base == include_base and (
+      include_dir == target_dir or
+      include_dir == os.path.normpath(target_dir + '/../public')):
+    return _LIKELY_MY_HEADER
+
+  # If the target and include share some initial basename
+  # component, it's possible the target is implementing the
+  # include, so it's allowed to be first, but we'll never
+  # complain if it's not there.
+  target_first_component = _RE_FIRST_COMPONENT.match(target_base)
+  include_first_component = _RE_FIRST_COMPONENT.match(include_base)
+  if (target_first_component and include_first_component and
+      target_first_component.group(0) ==
+      include_first_component.group(0)):
+    return _POSSIBLE_MY_HEADER
+
+  return _OTHER_HEADER
+
+
+
+def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
+  """Check rules that are applicable to #include lines.
+
+  Strings on #include lines are NOT removed from elided line, to make
+  certain tasks easier. However, to prevent false positives, checks
+  applicable to #include lines in CheckLanguage must be put here.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    include_state: An _IncludeState instance in which the headers are inserted.
+    error: The function to call with any errors found.
+  """
+  fileinfo = FileInfo(filename)
+
+  line = clean_lines.lines[linenum]
+
+  # "include" should use the new style "foo/bar.h" instead of just "bar.h"
+  if _RE_PATTERN_INCLUDE_NEW_STYLE.search(line):
+    error(filename, linenum, 'build/include_dir', 4,
+          'Include the directory when naming .h files')
+
+  # we shouldn't include a file more than once. actually, there are a
+  # handful of instances where doing so is okay, but in general it's
+  # not.
+  match = _RE_PATTERN_INCLUDE.search(line)
+  if match:
+    include = match.group(2)
+    is_system = (match.group(1) == '<')
+    if include in include_state:
+      error(filename, linenum, 'build/include', 4,
+            '"%s" already included at %s:%s' %
+            (include, filename, include_state[include]))
+    else:
+      include_state[include] = linenum
+
+      # We want to ensure that headers appear in the right order:
+      # 1) for foo.cc, foo.h  (preferred location)
+      # 2) c system files
+      # 3) cpp system files
+      # 4) for foo.cc, foo.h  (deprecated location)
+      # 5) other google headers
+      #
+      # We classify each include statement as one of those 5 types
+      # using a number of techniques. The include_state object keeps
+      # track of the highest type seen, and complains if we see a
+      # lower type after that.
+      error_message = include_state.CheckNextIncludeOrder(
+          _ClassifyInclude(fileinfo, include, is_system))
+      if error_message:
+        error(filename, linenum, 'build/include_order', 4,
+              '%s. Should be: %s.h, c system, c++ system, other.' %
+              (error_message, fileinfo.BaseName()))
+      canonical_include = include_state.CanonicalizeAlphabeticalOrder(include)
+      if not include_state.IsInAlphabeticalOrder(
+          clean_lines, linenum, canonical_include):
+        error(filename, linenum, 'build/include_alpha', 4,
+              'Include "%s" not in alphabetical order' % include)
+      include_state.SetLastHeader(canonical_include)
+
+  # Look for any of the stream classes that are part of standard C++.
+  match = _RE_PATTERN_INCLUDE.match(line)
+  if match:
+    include = match.group(2)
+    if Match(r'(f|ind|io|i|o|parse|pf|stdio|str|)?stream$', include):
+      # Many unit tests use cout, so we exempt them.
+      if not _IsTestFilename(filename):
+        error(filename, linenum, 'readability/streams', 3,
+              'Streams are highly discouraged.')
+
+
+def _GetTextInside(text, start_pattern):
+  r"""Retrieves all the text between matching open and close parentheses.
+
+  Given a string of lines and a regular expression string, retrieve all the text
+  following the expression and between opening punctuation symbols like
+  (, [, or {, and the matching close-punctuation symbol. This properly nested
+  occurrences of the punctuations, so for the text like
+    printf(a(), b(c()));
+  a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'.
+  start_pattern must match string having an open punctuation symbol at the end.
+
+  Args:
+    text: The lines to extract text. Its comments and strings must be elided.
+           It can be single line and can span multiple lines.
+    start_pattern: The regexp string indicating where to start extracting
+                   the text.
+  Returns:
+    The extracted text.
+    None if either the opening string or ending punctuation could not be found.
+  """
+  # TODO(sugawarayu): Audit cpplint.py to see what places could be profitably
+  # rewritten to use _GetTextInside (and use inferior regexp matching today).
+
+  # Give opening punctuations to get the matching close-punctuations.
+  matching_punctuation = {'(': ')', '{': '}', '[': ']'}
+  closing_punctuation = set(matching_punctuation.itervalues())
+
+  # Find the position to start extracting text.
+  match = re.search(start_pattern, text, re.M)
+  if not match:  # start_pattern not found in text.
+    return None
+  start_position = match.end(0)
+
+  assert start_position > 0, (
+      'start_pattern must ends with an opening punctuation.')
+  assert text[start_position - 1] in matching_punctuation, (
+      'start_pattern must ends with an opening punctuation.')
+  # Stack of closing punctuations we expect to have in text after position.
+  punctuation_stack = [matching_punctuation[text[start_position - 1]]]
+  position = start_position
+  while punctuation_stack and position < len(text):
+    if text[position] == punctuation_stack[-1]:
+      punctuation_stack.pop()
+    elif text[position] in closing_punctuation:
+      # A closing punctuation without matching opening punctuations.
+      return None
+    elif text[position] in matching_punctuation:
+      punctuation_stack.append(matching_punctuation[text[position]])
+    position += 1
+  if punctuation_stack:
+    # Opening punctuations left without matching close-punctuations.
+    return None
+  # punctuations match.
+  return text[start_position:position - 1]
+
+
+# Patterns for matching call-by-reference parameters.
+#
+# Supports nested templates up to 2 levels deep using this messy pattern:
+#   < (?: < (?: < [^<>]*
+#               >
+#           |   [^<>] )*
+#         >
+#     |   [^<>] )*
+#   >
+_RE_PATTERN_IDENT = r'[_a-zA-Z]\w*'  # =~ [[:alpha:]][[:alnum:]]*
+_RE_PATTERN_TYPE = (
+    r'(?:const\s+)?(?:typename\s+|class\s+|struct\s+|union\s+|enum\s+)?'
+    r'(?:\w|'
+    r'\s*<(?:<(?:<[^<>]*>|[^<>])*>|[^<>])*>|'
+    r'::)+')
+# A call-by-reference parameter ends with '& identifier'.
+_RE_PATTERN_REF_PARAM = re.compile(
+    r'(' + _RE_PATTERN_TYPE + r'(?:\s*(?:\bconst\b|[*]))*\s*'
+    r'&\s*' + _RE_PATTERN_IDENT + r')\s*(?:=[^,()]+)?[,)]')
+# A call-by-const-reference parameter either ends with 'const& identifier'
+# or looks like 'const type& identifier' when 'type' is atomic.
+_RE_PATTERN_CONST_REF_PARAM = (
+    r'(?:.*\s*\bconst\s*&\s*' + _RE_PATTERN_IDENT +
+    r'|const\s+' + _RE_PATTERN_TYPE + r'\s*&\s*' + _RE_PATTERN_IDENT + r')')
+
+
+def CheckLanguage(filename, clean_lines, linenum, file_extension,
+                  include_state, nesting_state, error):
+  """Checks rules from the 'C++ language rules' section of cppguide.html.
+
+  Some of these rules are hard to test (function overloading, using
+  uint32 inappropriately), but we do the best we can.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    file_extension: The extension (without the dot) of the filename.
+    include_state: An _IncludeState instance in which the headers are inserted.
+    nesting_state: A _NestingState instance which maintains information about
+                   the current stack of nested blocks being parsed.
+    error: The function to call with any errors found.
+  """
+  # If the line is empty or consists of entirely a comment, no need to
+  # check it.
+  line = clean_lines.elided[linenum]
+  if not line:
+    return
+
+  match = _RE_PATTERN_INCLUDE.search(line)
+  if match:
+    CheckIncludeLine(filename, clean_lines, linenum, include_state, error)
+    return
+
+  # Reset include state across preprocessor directives.  This is meant
+  # to silence warnings for conditional includes.
+  if Match(r'^\s*#\s*(?:ifdef|elif|else|endif)\b', line):
+    include_state.ResetSection()
+
+  # Make Windows paths like Unix.
+  fullname = os.path.abspath(filename).replace('\\', '/')
+
+  # TODO(unknown): figure out if they're using default arguments in fn proto.
+
+  # Check to see if they're using an conversion function cast.
+  # I just try to capture the most common basic types, though there are more.
+  # Parameterless conversion functions, such as bool(), are allowed as they are
+  # probably a member operator declaration or default constructor.
+  match = Search(
+      r'(\bnew\s+)?\b'  # Grab 'new' operator, if it's there
+      r'(int|float|double|bool|char|int32|uint32|int64|uint64)'
+      r'(\([^)].*)', line)
+  if match:
+    matched_new = match.group(1)
+    matched_type = match.group(2)
+    matched_funcptr = match.group(3)
+
+    # gMock methods are defined using some variant of MOCK_METHODx(name, type)
+    # where type may be float(), int(string), etc.  Without context they are
+    # virtually indistinguishable from int(x) casts. Likewise, gMock's
+    # MockCallback takes a template parameter of the form return_type(arg_type),
+    # which looks much like the cast we're trying to detect.
+    #
+    # std::function<> wrapper has a similar problem.
+    #
+    # Return types for function pointers also look like casts if they
+    # don't have an extra space.
+    if (matched_new is None and  # If new operator, then this isn't a cast
+        not (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line) or
+             Search(r'\bMockCallback<.*>', line) or
+             Search(r'\bstd::function<.*>', line)) and
+        not (matched_funcptr and
+             Match(r'\((?:[^() ]+::\s*\*\s*)?[^() ]+\)\s*\(',
+                   matched_funcptr))):
+      # Try a bit harder to catch gmock lines: the only place where
+      # something looks like an old-style cast is where we declare the
+      # return type of the mocked method, and the only time when we
+      # are missing context is if MOCK_METHOD was split across
+      # multiple lines.  The missing MOCK_METHOD is usually one or two
+      # lines back, so scan back one or two lines.
+      #
+      # It's not possible for gmock macros to appear in the first 2
+      # lines, since the class head + section name takes up 2 lines.
+      if (linenum < 2 or
+          not (Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\((?:\S+,)?\s*$',
+                     clean_lines.elided[linenum - 1]) or
+               Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\(\s*$',
+                     clean_lines.elided[linenum - 2]))):
+        error(filename, linenum, 'readability/casting', 4,
+              'Using deprecated casting style.  '
+              'Use static_cast<%s>(...) instead' %
+              matched_type)
+
+  CheckCStyleCast(filename, linenum, line, clean_lines.raw_lines[linenum],
+                  'static_cast',
+                  r'\((int|float|double|bool|char|u?int(16|32|64))\)', error)
+
+  # This doesn't catch all cases. Consider (const char * const)"hello".
+  #
+  # (char *) "foo" should always be a const_cast (reinterpret_cast won't
+  # compile).
+  if CheckCStyleCast(filename, linenum, line, clean_lines.raw_lines[linenum],
+                     'const_cast', r'\((char\s?\*+\s?)\)\s*"', error):
+    pass
+  else:
+    # Check pointer casts for other than string constants
+    CheckCStyleCast(filename, linenum, line, clean_lines.raw_lines[linenum],
+                    'reinterpret_cast', r'\((\w+\s?\*+\s?)\)', error)
+
+  # In addition, we look for people taking the address of a cast.  This
+  # is dangerous -- casts can assign to temporaries, so the pointer doesn't
+  # point where you think.
+  match = Search(
+      r'(?:&\(([^)]+)\)[\w(])|'
+      r'(?:&(static|dynamic|down|reinterpret)_cast\b)', line)
+  if match and match.group(1) != '*':
+    error(filename, linenum, 'runtime/casting', 4,
+          ('Are you taking an address of a cast?  '
+           'This is dangerous: could be a temp var.  '
+           'Take the address before doing the cast, rather than after'))
+
+  # Create an extended_line, which is the concatenation of the current and
+  # next lines, for more effective checking of code that may span more than one
+  # line.
+  if linenum + 1 < clean_lines.NumLines():
+    extended_line = line + clean_lines.elided[linenum + 1]
+  else:
+    extended_line = line
+
+  # Check for people declaring static/global STL strings at the top level.
+  # This is dangerous because the C++ language does not guarantee that
+  # globals with constructors are initialized before the first access.
+  match = Match(
+      r'((?:|static +)(?:|const +))string +([a-zA-Z0-9_:]+)\b(.*)',
+      line)
+  # Make sure it's not a function.
+  # Function template specialization looks like: "string foo<Type>(...".
+  # Class template definitions look like: "string Foo<Type>::Method(...".
+  #
+  # Also ignore things that look like operators.  These are matched separately
+  # because operator names cross non-word boundaries.  If we change the pattern
+  # above, we would decrease the accuracy of matching identifiers.
+  if (match and
+      not Search(r'\boperator\W', line) and
+      not Match(r'\s*(<.*>)?(::[a-zA-Z0-9_]+)?\s*\(([^"]|$)', match.group(3))):
+    error(filename, linenum, 'runtime/string', 4,
+          'For a static/global string constant, use a C style string instead: '
+          '"%schar %s[]".' %
+          (match.group(1), match.group(2)))
+
+  if Search(r'\b([A-Za-z0-9_]*_)\(\1\)', line):
+    error(filename, linenum, 'runtime/init', 4,
+          'You seem to be initializing a member variable with itself.')
+
+  if file_extension == 'h':
+    # TODO(unknown): check that 1-arg constructors are explicit.
+    #                How to tell it's a constructor?
+    #                (handled in CheckForNonStandardConstructs for now)
+    # TODO(unknown): check that classes have DISALLOW_EVIL_CONSTRUCTORS
+    #                (level 1 error)
+    pass
+
+  # Check if people are using the verboten C basic types.  The only exception
+  # we regularly allow is "unsigned short port" for port.
+  if Search(r'\bshort port\b', line):
+    if not Search(r'\bunsigned short port\b', line):
+      error(filename, linenum, 'runtime/int', 4,
+            'Use "unsigned short" for ports, not "short"')
+  else:
+    match = Search(r'\b(short|long(?! +double)|long long)\b', line)
+    if match:
+      error(filename, linenum, 'runtime/int', 4,
+            'Use int16/int64/etc, rather than the C type %s' % match.group(1))
+
+  # When snprintf is used, the second argument shouldn't be a literal.
+  match = Search(r'snprintf\s*\(([^,]*),\s*([0-9]*)\s*,', line)
+  if match and match.group(2) != '0':
+    # If 2nd arg is zero, snprintf is used to calculate size.
+    error(filename, linenum, 'runtime/printf', 3,
+          'If you can, use sizeof(%s) instead of %s as the 2nd arg '
+          'to snprintf.' % (match.group(1), match.group(2)))
+
+  # Check if some verboten C functions are being used.
+  if Search(r'\bsprintf\b', line):
+    error(filename, linenum, 'runtime/printf', 5,
+          'Never use sprintf.  Use snprintf instead.')
+  match = Search(r'\b(strcpy|strcat)\b', line)
+  if match:
+    error(filename, linenum, 'runtime/printf', 4,
+          'Almost always, snprintf is better than %s' % match.group(1))
+
+  # Check if some verboten operator overloading is going on
+  # TODO(unknown): catch out-of-line unary operator&:
+  #   class X {};
+  #   int operator&(const X& x) { return 42; }  // unary operator&
+  # The trick is it's hard to tell apart from binary operator&:
+  #   class Y { int operator&(const Y& x) { return 23; } }; // binary operator&
+  if Search(r'\boperator\s*&\s*\(\s*\)', line):
+    error(filename, linenum, 'runtime/operator', 4,
+          'Unary operator& is dangerous.  Do not use it.')
+
+  # Check for suspicious usage of "if" like
+  # } if (a == b) {
+  if Search(r'\}\s*if\s*\(', line):
+    error(filename, linenum, 'readability/braces', 4,
+          'Did you mean "else if"? If not, start a new line for "if".')
+
+  # Check for potential format string bugs like printf(foo).
+  # We constrain the pattern not to pick things like DocidForPrintf(foo).
+  # Not perfect but it can catch printf(foo.c_str()) and printf(foo->c_str())
+  # TODO(sugawarayu): Catch the following case. Need to change the calling
+  # convention of the whole function to process multiple line to handle it.
+  #   printf(
+  #       boy_this_is_a_really_long_variable_that_cannot_fit_on_the_prev_line);
+  printf_args = _GetTextInside(line, r'(?i)\b(string)?printf\s*\(')
+  if printf_args:
+    match = Match(r'([\w.\->()]+)$', printf_args)
+    if match and match.group(1) != '__VA_ARGS__':
+      function_name = re.search(r'\b((?:string)?printf)\s*\(',
+                                line, re.I).group(1)
+      error(filename, linenum, 'runtime/printf', 4,
+            'Potential format string bug. Do %s("%%s", %s) instead.'
+            % (function_name, match.group(1)))
+
+  # Check for potential memset bugs like memset(buf, sizeof(buf), 0).
+  match = Search(r'memset\s*\(([^,]*),\s*([^,]*),\s*0\s*\)', line)
+  if match and not Match(r"^''|-?[0-9]+|0x[0-9A-Fa-f]$", match.group(2)):
+    error(filename, linenum, 'runtime/memset', 4,
+          'Did you mean "memset(%s, 0, %s)"?'
+          % (match.group(1), match.group(2)))
+
+  if Search(r'\busing namespace\b', line):
+    error(filename, linenum, 'build/namespaces', 5,
+          'Do not use namespace using-directives.  '
+          'Use using-declarations instead.')
+
+  # Detect variable-length arrays.
+  match = Match(r'\s*(.+::)?(\w+) [a-z]\w*\[(.+)];', line)
+  if (match and match.group(2) != 'return' and match.group(2) != 'delete' and
+      match.group(3).find(']') == -1):
+    # Split the size using space and arithmetic operators as delimiters.
+    # If any of the resulting tokens are not compile time constants then
+    # report the error.
+    tokens = re.split(r'\s|\+|\-|\*|\/|<<|>>]', match.group(3))
+    is_const = True
+    skip_next = False
+    for tok in tokens:
+      if skip_next:
+        skip_next = False
+        continue
+
+      if Search(r'sizeof\(.+\)', tok): continue
+      if Search(r'arraysize\(\w+\)', tok): continue
+
+      tok = tok.lstrip('(')
+      tok = tok.rstrip(')')
+      if not tok: continue
+      if Match(r'\d+', tok): continue
+      if Match(r'0[xX][0-9a-fA-F]+', tok): continue
+      if Match(r'k[A-Z0-9]\w*', tok): continue
+      if Match(r'(.+::)?k[A-Z0-9]\w*', tok): continue
+      if Match(r'(.+::)?[A-Z][A-Z0-9_]*', tok): continue
+      # A catch all for tricky sizeof cases, including 'sizeof expression',
+      # 'sizeof(*type)', 'sizeof(const type)', 'sizeof(struct StructName)'
+      # requires skipping the next token because we split on ' ' and '*'.
+      if tok.startswith('sizeof'):
+        skip_next = True
+        continue
+      is_const = False
+      break
+    if not is_const:
+      error(filename, linenum, 'runtime/arrays', 1,
+            'Do not use variable-length arrays.  Use an appropriately named '
+            "('k' followed by CamelCase) compile-time constant for the size.")
+
+  # If DISALLOW_EVIL_CONSTRUCTORS, DISALLOW_COPY_AND_ASSIGN, or
+  # DISALLOW_IMPLICIT_CONSTRUCTORS is present, then it should be the last thing
+  # in the class declaration.
+  match = Match(
+      (r'\s*'
+       r'(DISALLOW_(EVIL_CONSTRUCTORS|COPY_AND_ASSIGN|IMPLICIT_CONSTRUCTORS))'
+       r'\(.*\);$'),
+      line)
+  if match and linenum + 1 < clean_lines.NumLines():
+    next_line = clean_lines.elided[linenum + 1]
+    # We allow some, but not all, declarations of variables to be present
+    # in the statement that defines the class.  The [\w\*,\s]* fragment of
+    # the regular expression below allows users to declare instances of
+    # the class or pointers to instances, but not less common types such
+    # as function pointers or arrays.  It's a tradeoff between allowing
+    # reasonable code and avoiding trying to parse more C++ using regexps.
+    if not Search(r'^\s*}[\w\*,\s]*;', next_line):
+      error(filename, linenum, 'readability/constructors', 3,
+            match.group(1) + ' should be the last thing in the class')
+
+  # Check for use of unnamed namespaces in header files.  Registration
+  # macros are typically OK, so we allow use of "namespace {" on lines
+  # that end with backslashes.
+  if (file_extension == 'h'
+      and Search(r'\bnamespace\s*{', line)
+      and line[-1] != '\\'):
+    error(filename, linenum, 'build/namespaces', 4,
+          'Do not use unnamed namespaces in header files.  See '
+          'http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces'
+          ' for more information.')
+
+def CheckForNonConstReference(filename, clean_lines, linenum,
+                              nesting_state, error):
+  """Check for non-const references.
+
+  Separate from CheckLanguage since it scans backwards from current
+  line, instead of scanning forward.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    nesting_state: A _NestingState instance which maintains information about
+                   the current stack of nested blocks being parsed.
+    error: The function to call with any errors found.
+  """
+  # Do nothing if there is no '&' on current line.
+  line = clean_lines.elided[linenum]
+  if '&' not in line:
+    return
+
+  # Long type names may be broken across multiple lines, usually in one
+  # of these forms:
+  #   LongType
+  #       ::LongTypeContinued &identifier
+  #   LongType::
+  #       LongTypeContinued &identifier
+  #   LongType<
+  #       ...>::LongTypeContinued &identifier
+  #
+  # If we detected a type split across two lines, join the previous
+  # line to current line so that we can match const references
+  # accordingly.
+  #
+  # Note that this only scans back one line, since scanning back
+  # arbitrary number of lines would be expensive.  If you have a type
+  # that spans more than 2 lines, please use a typedef.
+  if linenum > 1:
+    previous = None
+    if Match(r'\s*::(?:[\w<>]|::)+\s*&\s*\S', line):
+      # previous_line\n + ::current_line
+      previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+[\w<>])\s*$',
+                        clean_lines.elided[linenum - 1])
+    elif Match(r'\s*[a-zA-Z_]([\w<>]|::)+\s*&\s*\S', line):
+      # previous_line::\n + current_line
+      previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+::)\s*$',
+                        clean_lines.elided[linenum - 1])
+    if previous:
+      line = previous.group(1) + line.lstrip()
+    else:
+      # Check for templated parameter that is split across multiple lines
+      endpos = line.rfind('>')
+      if endpos > -1:
+        (_, startline, startpos) = ReverseCloseExpression(
+            clean_lines, linenum, endpos)
+        if startpos > -1 and startline < linenum:
+          # Found the matching < on an earlier line, collect all
+          # pieces up to current line.
+          line = ''
+          for i in xrange(startline, linenum + 1):
+            line += clean_lines.elided[i].strip()
+
+  # Check for non-const references in function parameters.  A single '&' may
+  # found in the following places:
+  #   inside expression: binary & for bitwise AND
+  #   inside expression: unary & for taking the address of something
+  #   inside declarators: reference parameter
+  # We will exclude the first two cases by checking that we are not inside a
+  # function body, including one that was just introduced by a trailing '{'.
+  # TODO(unknwon): Doesn't account for preprocessor directives.
+  # TODO(unknown): Doesn't account for 'catch(Exception& e)' [rare].
+  check_params = False
+  if not nesting_state.stack:
+    check_params = True  # top level
+  elif (isinstance(nesting_state.stack[-1], _ClassInfo) or
+        isinstance(nesting_state.stack[-1], _NamespaceInfo)):
+    check_params = True  # within class or namespace
+  elif Match(r'.*{\s*$', line):
+    if (len(nesting_state.stack) == 1 or
+        isinstance(nesting_state.stack[-2], _ClassInfo) or
+        isinstance(nesting_state.stack[-2], _NamespaceInfo)):
+      check_params = True  # just opened global/class/namespace block
+  # We allow non-const references in a few standard places, like functions
+  # called "swap()" or iostream operators like "<<" or ">>".  Do not check
+  # those function parameters.
+  #
+  # We also accept & in static_assert, which looks like a function but
+  # it's actually a declaration expression.
+  whitelisted_functions = (r'(?:[sS]wap(?:<\w:+>)?|'
+                           r'operator\s*[<>][<>]|'
+                           r'static_assert|COMPILE_ASSERT'
+                           r')\s*\(')
+  if Search(whitelisted_functions, line):
+    check_params = False
+  elif not Search(r'\S+\([^)]*$', line):
+    # Don't see a whitelisted function on this line.  Actually we
+    # didn't see any function name on this line, so this is likely a
+    # multi-line parameter list.  Try a bit harder to catch this case.
+    for i in xrange(2):
+      if (linenum > i and
+          Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])):
+        check_params = False
+        break
+
+  if check_params:
+    decls = ReplaceAll(r'{[^}]*}', ' ', line)  # exclude function body
+    for parameter in re.findall(_RE_PATTERN_REF_PARAM, decls):
+      if not Match(_RE_PATTERN_CONST_REF_PARAM, parameter):
+        error(filename, linenum, 'runtime/references', 2,
+              'Is this a non-const reference? '
+              'If so, make const or use a pointer: ' +
+              ReplaceAll(' *<', '<', parameter))
+
+
+def CheckCStyleCast(filename, linenum, line, raw_line, cast_type, pattern,
+                    error):
+  """Checks for a C-style cast by looking for the pattern.
+
+  Args:
+    filename: The name of the current file.
+    linenum: The number of the line to check.
+    line: The line of code to check.
+    raw_line: The raw line of code to check, with comments.
+    cast_type: The string for the C++ cast to recommend.  This is either
+      reinterpret_cast, static_cast, or const_cast, depending.
+    pattern: The regular expression used to find C-style casts.
+    error: The function to call with any errors found.
+
+  Returns:
+    True if an error was emitted.
+    False otherwise.
+  """
+  match = Search(pattern, line)
+  if not match:
+    return False
+
+  # Exclude lines with sizeof, since sizeof looks like a cast.
+  sizeof_match = Match(r'.*sizeof\s*$', line[0:match.start(1) - 1])
+  if sizeof_match:
+    return False
+
+  # operator++(int) and operator--(int)
+  if (line[0:match.start(1) - 1].endswith(' operator++') or
+      line[0:match.start(1) - 1].endswith(' operator--')):
+    return False
+
+  # A single unnamed argument for a function tends to look like old
+  # style cast.  If we see those, don't issue warnings for deprecated
+  # casts, instead issue warnings for unnamed arguments where
+  # appropriate.
+  #
+  # These are things that we want warnings for, since the style guide
+  # explicitly require all parameters to be named:
+  #   Function(int);
+  #   Function(int) {
+  #   ConstMember(int) const;
+  #   ConstMember(int) const {
+  #   ExceptionMember(int) throw (...);
+  #   ExceptionMember(int) throw (...) {
+  #   PureVirtual(int) = 0;
+  #
+  # These are functions of some sort, where the compiler would be fine
+  # if they had named parameters, but people often omit those
+  # identifiers to reduce clutter:
+  #   (FunctionPointer)(int);
+  #   (FunctionPointer)(int) = value;
+  #   Function((function_pointer_arg)(int))
+  #   <TemplateArgument(int)>;
+  #   <(FunctionPointerTemplateArgument)(int)>;
+  remainder = line[match.end(0):]
+  if Match(r'^\s*(?:;|const\b|throw\b|=|>|\{|\))', remainder):
+    # Looks like an unnamed parameter.
+
+    # Don't warn on any kind of template arguments.
+    if Match(r'^\s*>', remainder):
+      return False
+
+    # Don't warn on assignments to function pointers, but keep warnings for
+    # unnamed parameters to pure virtual functions.  Note that this pattern
+    # will also pass on assignments of "0" to function pointers, but the
+    # preferred values for those would be "nullptr" or "NULL".
+    matched_zero = Match(r'^\s=\s*(\S+)\s*;', remainder)
+    if matched_zero and matched_zero.group(1) != '0':
+      return False
+
+    # Don't warn on function pointer declarations.  For this we need
+    # to check what came before the "(type)" string.
+    if Match(r'.*\)\s*$', line[0:match.start(0)]):
+      return False
+
+    # Don't warn if the parameter is named with block comments, e.g.:
+    #  Function(int /*unused_param*/);
+    if '/*' in raw_line:
+      return False
+
+    # Passed all filters, issue warning here.
+    error(filename, linenum, 'readability/function', 3,
+          'All parameters should be named in a function')
+    return True
+
+  # At this point, all that should be left is actual casts.
+  error(filename, linenum, 'readability/casting', 4,
+        'Using C-style cast.  Use %s<%s>(...) instead' %
+        (cast_type, match.group(1)))
+
+  return True
+
+
+_HEADERS_CONTAINING_TEMPLATES = (
+    ('<deque>', ('deque',)),
+    ('<functional>', ('unary_function', 'binary_function',
+                      'plus', 'minus', 'multiplies', 'divides', 'modulus',
+                      'negate',
+                      'equal_to', 'not_equal_to', 'greater', 'less',
+                      'greater_equal', 'less_equal',
+                      'logical_and', 'logical_or', 'logical_not',
+                      'unary_negate', 'not1', 'binary_negate', 'not2',
+                      'bind1st', 'bind2nd',
+                      'pointer_to_unary_function',
+                      'pointer_to_binary_function',
+                      'ptr_fun',
+                      'mem_fun_t', 'mem_fun', 'mem_fun1_t', 'mem_fun1_ref_t',
+                      'mem_fun_ref_t',
+                      'const_mem_fun_t', 'const_mem_fun1_t',
+                      'const_mem_fun_ref_t', 'const_mem_fun1_ref_t',
+                      'mem_fun_ref',
+                     )),
+    ('<limits>', ('numeric_limits',)),
+    ('<list>', ('list',)),
+    ('<map>', ('map', 'multimap',)),
+    ('<memory>', ('allocator',)),
+    ('<queue>', ('queue', 'priority_queue',)),
+    ('<set>', ('set', 'multiset',)),
+    ('<stack>', ('stack',)),
+    ('<string>', ('char_traits', 'basic_string',)),
+    ('<utility>', ('pair',)),
+    ('<vector>', ('vector',)),
+
+    # gcc extensions.
+    # Note: std::hash is their hash, ::hash is our hash
+    ('<hash_map>', ('hash_map', 'hash_multimap',)),
+    ('<hash_set>', ('hash_set', 'hash_multiset',)),
+    ('<slist>', ('slist',)),
+    )
+
+_RE_PATTERN_STRING = re.compile(r'\bstring\b')
+
+_re_pattern_algorithm_header = []
+for _template in ('copy', 'max', 'min', 'min_element', 'sort', 'swap',
+                  'transform'):
+  # Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max or
+  # type::max().
+  _re_pattern_algorithm_header.append(
+      (re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'),
+       _template,
+       '<algorithm>'))
+
+_re_pattern_templates = []
+for _header, _templates in _HEADERS_CONTAINING_TEMPLATES:
+  for _template in _templates:
+    _re_pattern_templates.append(
+        (re.compile(r'(\<|\b)' + _template + r'\s*\<'),
+         _template + '<>',
+         _header))
+
+
+def FilesBelongToSameModule(filename_cc, filename_h):
+  """Check if these two filenames belong to the same module.
+
+  The concept of a 'module' here is a as follows:
+  foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the
+  same 'module' if they are in the same directory.
+  some/path/public/xyzzy and some/path/internal/xyzzy are also considered
+  to belong to the same module here.
+
+  If the filename_cc contains a longer path than the filename_h, for example,
+  '/absolute/path/to/base/sysinfo.cc', and this file would include
+  'base/sysinfo.h', this function also produces the prefix needed to open the
+  header. This is used by the caller of this function to more robustly open the
+  header file. We don't have access to the real include paths in this context,
+  so we need this guesswork here.
+
+  Known bugs: tools/base/bar.cc and base/bar.h belong to the same module
+  according to this implementation. Because of this, this function gives
+  some false positives. This should be sufficiently rare in practice.
+
+  Args:
+    filename_cc: is the path for the .cc file
+    filename_h: is the path for the header path
+
+  Returns:
+    Tuple with a bool and a string:
+    bool: True if filename_cc and filename_h belong to the same module.
+    string: the additional prefix needed to open the header file.
+  """
+
+  if not filename_cc.endswith('.cc'):
+    return (False, '')
+  filename_cc = filename_cc[:-len('.cc')]
+  if filename_cc.endswith('_unittest'):
+    filename_cc = filename_cc[:-len('_unittest')]
+  elif filename_cc.endswith('_test'):
+    filename_cc = filename_cc[:-len('_test')]
+  filename_cc = filename_cc.replace('/public/', '/')
+  filename_cc = filename_cc.replace('/internal/', '/')
+
+  if not filename_h.endswith('.h'):
+    return (False, '')
+  filename_h = filename_h[:-len('.h')]
+  if filename_h.endswith('-inl'):
+    filename_h = filename_h[:-len('-inl')]
+  filename_h = filename_h.replace('/public/', '/')
+  filename_h = filename_h.replace('/internal/', '/')
+
+  files_belong_to_same_module = filename_cc.endswith(filename_h)
+  common_path = ''
+  if files_belong_to_same_module:
+    common_path = filename_cc[:-len(filename_h)]
+  return files_belong_to_same_module, common_path
+
+
+def UpdateIncludeState(filename, include_state, io=codecs):
+  """Fill up the include_state with new includes found from the file.
+
+  Args:
+    filename: the name of the header to read.
+    include_state: an _IncludeState instance in which the headers are inserted.
+    io: The io factory to use to read the file. Provided for testability.
+
+  Returns:
+    True if a header was succesfully added. False otherwise.
+  """
+  headerfile = None
+  try:
+    headerfile = io.open(filename, 'r', 'utf8', 'replace')
+  except IOError:
+    return False
+  linenum = 0
+  for line in headerfile:
+    linenum += 1
+    clean_line = CleanseComments(line)
+    match = _RE_PATTERN_INCLUDE.search(clean_line)
+    if match:
+      include = match.group(2)
+      # The value formatting is cute, but not really used right now.
+      # What matters here is that the key is in include_state.
+      include_state.setdefault(include, '%s:%d' % (filename, linenum))
+  return True
+
+
+def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
+                              io=codecs):
+  """Reports for missing stl includes.
+
+  This function will output warnings to make sure you are including the headers
+  necessary for the stl containers and functions that you use. We only give one
+  reason to include a header. For example, if you use both equal_to<> and
+  less<> in a .h file, only one (the latter in the file) of these will be
+  reported as a reason to include the <functional>.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    include_state: An _IncludeState instance.
+    error: The function to call with any errors found.
+    io: The IO factory to use to read the header file. Provided for unittest
+        injection.
+  """
+  required = {}  # A map of header name to linenumber and the template entity.
+                 # Example of required: { '<functional>': (1219, 'less<>') }
+
+  for linenum in xrange(clean_lines.NumLines()):
+    line = clean_lines.elided[linenum]
+    if not line or line[0] == '#':
+      continue
+
+    # String is special -- it is a non-templatized type in STL.
+    matched = _RE_PATTERN_STRING.search(line)
+    if matched:
+      # Don't warn about strings in non-STL namespaces:
+      # (We check only the first match per line; good enough.)
+      prefix = line[:matched.start()]
+      if prefix.endswith('std::') or not prefix.endswith('::'):
+        required['<string>'] = (linenum, 'string')
+
+    for pattern, template, header in _re_pattern_algorithm_header:
+      if pattern.search(line):
+        required[header] = (linenum, template)
+
+    # The following function is just a speed up, no semantics are changed.
+    if not '<' in line:  # Reduces the cpu time usage by skipping lines.
+      continue
+
+    for pattern, template, header in _re_pattern_templates:
+      if pattern.search(line):
+        required[header] = (linenum, template)
+
+  # The policy is that if you #include something in foo.h you don't need to
+  # include it again in foo.cc. Here, we will look at possible includes.
+  # Let's copy the include_state so it is only messed up within this function.
+  include_state = include_state.copy()
+
+  # Did we find the header for this file (if any) and succesfully load it?
+  header_found = False
+
+  # Use the absolute path so that matching works properly.
+  abs_filename = FileInfo(filename).FullName()
+
+  # For Emacs's flymake.
+  # If cpplint is invoked from Emacs's flymake, a temporary file is generated
+  # by flymake and that file name might end with '_flymake.cc'. In that case,
+  # restore original file name here so that the corresponding header file can be
+  # found.
+  # e.g. If the file name is 'foo_flymake.cc', we should search for 'foo.h'
+  # instead of 'foo_flymake.h'
+  abs_filename = re.sub(r'_flymake\.cc$', '.cc', abs_filename)
+
+  # include_state is modified during iteration, so we iterate over a copy of
+  # the keys.
+  header_keys = include_state.keys()
+  for header in header_keys:
+    (same_module, common_path) = FilesBelongToSameModule(abs_filename, header)
+    fullpath = common_path + header
+    if same_module and UpdateIncludeState(fullpath, include_state, io):
+      header_found = True
+
+  # If we can't find the header file for a .cc, assume it's because we don't
+  # know where to look. In that case we'll give up as we're not sure they
+  # didn't include it in the .h file.
+  # TODO(unknown): Do a better job of finding .h files so we are confident that
+  # not having the .h file means there isn't one.
+  if filename.endswith('.cc') and not header_found:
+    return
+
+  # All the lines have been processed, report the errors found.
+  for required_header_unstripped in required:
+    template = required[required_header_unstripped][1]
+    if required_header_unstripped.strip('<>"') not in include_state:
+      error(filename, required[required_header_unstripped][0],
+            'build/include_what_you_use', 4,
+            'Add #include ' + required_header_unstripped + ' for ' + template)
+
+
+_RE_PATTERN_EXPLICIT_MAKEPAIR = re.compile(r'\bmake_pair\s*<')
+
+
+def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error):
+  """Check that make_pair's template arguments are deduced.
+
+  G++ 4.6 in C++0x mode fails badly if make_pair's template arguments are
+  specified explicitly, and such use isn't intended in any case.
+
+  Args:
+    filename: The name of the current file.
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+    error: The function to call with any errors found.
+  """
+  line = clean_lines.elided[linenum]
+  match = _RE_PATTERN_EXPLICIT_MAKEPAIR.search(line)
+  if match:
+    error(filename, linenum, 'build/explicit_make_pair',
+          4,  # 4 = high confidence
+          'For C++11-compatibility, omit template arguments from make_pair'
+          ' OR use pair directly OR if appropriate, construct a pair directly')
+
+
+def ProcessLine(filename, file_extension, clean_lines, line,
+                include_state, function_state, nesting_state, error,
+                extra_check_functions=[]):
+  """Processes a single line in the file.
+
+  Args:
+    filename: Filename of the file that is being processed.
+    file_extension: The extension (dot not included) of the file.
+    clean_lines: An array of strings, each representing a line of the file,
+                 with comments stripped.
+    line: Number of line being processed.
+    include_state: An _IncludeState instance in which the headers are inserted.
+    function_state: A _FunctionState instance which counts function lines, etc.
+    nesting_state: A _NestingState instance which maintains information about
+                   the current stack of nested blocks being parsed.
+    error: A callable to which errors are reported, which takes 4 arguments:
+           filename, line number, error level, and message
+    extra_check_functions: An array of additional check functions that will be
+                           run on each source line. Each function takes 4
+                           arguments: filename, clean_lines, line, error
+  """
+  raw_lines = clean_lines.raw_lines
+  ParseNolintSuppressions(filename, raw_lines[line], line, error)
+  nesting_state.Update(filename, clean_lines, line, error)
+  if nesting_state.stack and nesting_state.stack[-1].inline_asm != _NO_ASM:
+    return
+  CheckForFunctionLengths(filename, clean_lines, line, function_state, error)
+  CheckForMultilineCommentsAndStrings(filename, clean_lines, line, error)
+  CheckStyle(filename, clean_lines, line, file_extension, nesting_state, error)
+  CheckLanguage(filename, clean_lines, line, file_extension, include_state,
+                nesting_state, error)
+  CheckForNonConstReference(filename, clean_lines, line, nesting_state, error)
+  CheckForNonStandardConstructs(filename, clean_lines, line,
+                                nesting_state, error)
+  CheckVlogArguments(filename, clean_lines, line, error)
+  CheckCaffeAlternatives(filename, clean_lines, line, error)
+  CheckCaffeDataLayerSetUp(filename, clean_lines, line, error)
+  CheckCaffeRandom(filename, clean_lines, line, error)
+  CheckPosixThreading(filename, clean_lines, line, error)
+  CheckInvalidIncrement(filename, clean_lines, line, error)
+  CheckMakePairUsesDeduction(filename, clean_lines, line, error)
+  for check_fn in extra_check_functions:
+    check_fn(filename, clean_lines, line, error)
+
+def ProcessFileData(filename, file_extension, lines, error,
+                    extra_check_functions=[]):
+  """Performs lint checks and reports any errors to the given error function.
+
+  Args:
+    filename: Filename of the file that is being processed.
+    file_extension: The extension (dot not included) of the file.
+    lines: An array of strings, each representing a line of the file, with the
+           last element being empty if the file is terminated with a newline.
+    error: A callable to which errors are reported, which takes 4 arguments:
+           filename, line number, error level, and message
+    extra_check_functions: An array of additional check functions that will be
+                           run on each source line. Each function takes 4
+                           arguments: filename, clean_lines, line, error
+  """
+  lines = (['// marker so line numbers and indices both start at 1'] + lines +
+           ['// marker so line numbers end in a known way'])
+
+  include_state = _IncludeState()
+  function_state = _FunctionState()
+  nesting_state = _NestingState()
+
+  ResetNolintSuppressions()
+
+  CheckForCopyright(filename, lines, error)
+
+  if file_extension == 'h':
+    CheckForHeaderGuard(filename, lines, error)
+
+  RemoveMultiLineComments(filename, lines, error)
+  clean_lines = CleansedLines(lines)
+  for line in xrange(clean_lines.NumLines()):
+    ProcessLine(filename, file_extension, clean_lines, line,
+                include_state, function_state, nesting_state, error,
+                extra_check_functions)
+  nesting_state.CheckCompletedBlocks(filename, error)
+
+  CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error)
+
+  # We check here rather than inside ProcessLine so that we see raw
+  # lines rather than "cleaned" lines.
+  CheckForBadCharacters(filename, lines, error)
+
+  CheckForNewlineAtEOF(filename, lines, error)
+
+def ProcessFile(filename, vlevel, extra_check_functions=[]):
+  """Does google-lint on a single file.
+
+  Args:
+    filename: The name of the file to parse.
+
+    vlevel: The level of errors to report.  Every error of confidence
+    >= verbose_level will be reported.  0 is a good default.
+
+    extra_check_functions: An array of additional check functions that will be
+                           run on each source line. Each function takes 4
+                           arguments: filename, clean_lines, line, error
+  """
+
+  _SetVerboseLevel(vlevel)
+
+  try:
+    # Support the UNIX convention of using "-" for stdin.  Note that
+    # we are not opening the file with universal newline support
+    # (which codecs doesn't support anyway), so the resulting lines do
+    # contain trailing '\r' characters if we are reading a file that
+    # has CRLF endings.
+    # If after the split a trailing '\r' is present, it is removed
+    # below. If it is not expected to be present (i.e. os.linesep !=
+    # '\r\n' as in Windows), a warning is issued below if this file
+    # is processed.
+
+    if filename == '-':
+      lines = codecs.StreamReaderWriter(sys.stdin,
+                                        codecs.getreader('utf8'),
+                                        codecs.getwriter('utf8'),
+                                        'replace').read().split('\n')
+    else:
+      lines = codecs.open(filename, 'r', 'utf8', 'replace').read().split('\n')
+
+    carriage_return_found = False
+    # Remove trailing '\r'.
+    for linenum in range(len(lines)):
+      if lines[linenum].endswith('\r'):
+        lines[linenum] = lines[linenum].rstrip('\r')
+        carriage_return_found = True
+
+  except IOError:
+    sys.stderr.write(
+        "Skipping input '%s': Can't open for reading\n" % filename)
+    return
+
+  # Note, if no dot is found, this will give the entire filename as the ext.
+  file_extension = filename[filename.rfind('.') + 1:]
+
+  # When reading from stdin, the extension is unknown, so no cpplint tests
+  # should rely on the extension.
+  if filename != '-' and file_extension not in _valid_extensions:
+    sys.stderr.write('Ignoring %s; not a valid file name '
+                     '(%s)\n' % (filename, ', '.join(_valid_extensions)))
+  else:
+    ProcessFileData(filename, file_extension, lines, Error,
+                    extra_check_functions)
+    if carriage_return_found and os.linesep != '\r\n':
+      # Use 0 for linenum since outputting only one error for potentially
+      # several lines.
+      Error(filename, 0, 'whitespace/newline', 1,
+            'One or more unexpected \\r (^M) found;'
+            'better to use only a \\n')
+
+  sys.stderr.write('Done processing %s\n' % filename)
+
+
+def PrintUsage(message):
+  """Prints a brief usage string and exits, optionally with an error message.
+
+  Args:
+    message: The optional error message.
+  """
+  sys.stderr.write(_USAGE)
+  if message:
+    sys.exit('\nFATAL ERROR: ' + message)
+  else:
+    sys.exit(1)
+
+
+def PrintCategories():
+  """Prints a list of all the error-categories used by error messages.
+
+  These are the categories used to filter messages via --filter.
+  """
+  sys.stderr.write(''.join('  %s\n' % cat for cat in _ERROR_CATEGORIES))
+  sys.exit(0)
+
+
+def ParseArguments(args):
+  """Parses the command line arguments.
+
+  This may set the output format and verbosity level as side-effects.
+
+  Args:
+    args: The command line arguments:
+
+  Returns:
+    The list of filenames to lint.
+  """
+  try:
+    (opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=',
+                                                 'counting=',
+                                                 'filter=',
+                                                 'root=',
+                                                 'linelength=',
+                                                 'extensions='])
+  except getopt.GetoptError:
+    PrintUsage('Invalid arguments.')
+
+  verbosity = _VerboseLevel()
+  output_format = _OutputFormat()
+  filters = ''
+  counting_style = ''
+
+  for (opt, val) in opts:
+    if opt == '--help':
+      PrintUsage(None)
+    elif opt == '--output':
+      if val not in ('emacs', 'vs7', 'eclipse'):
+        PrintUsage('The only allowed output formats are emacs, vs7 and eclipse.')
+      output_format = val
+    elif opt == '--verbose':
+      verbosity = int(val)
+    elif opt == '--filter':
+      filters = val
+      if not filters:
+        PrintCategories()
+    elif opt == '--counting':
+      if val not in ('total', 'toplevel', 'detailed'):
+        PrintUsage('Valid counting options are total, toplevel, and detailed')
+      counting_style = val
+    elif opt == '--root':
+      global _root
+      _root = val
+    elif opt == '--linelength':
+      global _line_length
+      try:
+          _line_length = int(val)
+      except ValueError:
+          PrintUsage('Line length must be digits.')
+    elif opt == '--extensions':
+      global _valid_extensions
+      try:
+          _valid_extensions = set(val.split(','))
+      except ValueError:
+          PrintUsage('Extensions must be comma seperated list.')
+
+  if not filenames:
+    PrintUsage('No files were specified.')
+
+  _SetOutputFormat(output_format)
+  _SetVerboseLevel(verbosity)
+  _SetFilters(filters)
+  _SetCountingStyle(counting_style)
+
+  return filenames
+
+
+def main():
+  filenames = ParseArguments(sys.argv[1:])
+
+  # Change stderr to write with replacement characters so we don't die
+  # if we try to print something containing non-ASCII characters.
+  sys.stderr = codecs.StreamReaderWriter(sys.stderr,
+                                         codecs.getreader('utf8'),
+                                         codecs.getwriter('utf8'),
+                                         'replace')
+
+  _cpplint_state.ResetErrorCounts()
+  for filename in filenames:
+    ProcessFile(filename, _cpplint_state.verbose_level)
+  _cpplint_state.PrintErrorCounts()
+
+  sys.exit(_cpplint_state.error_count > 0)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/scripts/deploy_docs.sh b/scripts/deploy_docs.sh
new file mode 100755
index 0000000..fdf97f7
--- /dev/null
+++ b/scripts/deploy_docs.sh
@@ -0,0 +1,50 @@
+#!/bin/bash
+# Publish documentation to the gh-pages site.
+
+# The remote for pushing the docs (defaults to origin).
+# This is where you will submit the PR to BVLC:gh-pages from.
+REMOTE=${1:-origin}
+
+echo "Generating docs and pushing to $REMOTE:gh-pages..."
+echo "To build and view docs when not on master, simply do 'jekyll serve -s docs'."
+echo
+
+REMOTE_URL=`git config --get remote.${REMOTE}.url`
+BRANCH=`git rev-parse --abbrev-ref HEAD`
+MSG=`git log --oneline -1`
+
+if [[ $BRANCH = 'master' ]]; then
+    # Find the docs dir, no matter where the script is called
+    DIR="$( cd "$(dirname "$0")" ; pwd -P )"
+    DOCS_SITE_DIR=$DIR/../docs/_site
+
+    # Make sure that docs/_site tracks remote:gh-pages.
+    # If not, then we make a new repo and check out just that branch.
+    mkdir -p $DOCS_SITE_DIR
+    cd $DOCS_SITE_DIR
+    SITE_REMOTE_URL=`git config --get remote.${REMOTE}.url`
+    SITE_BRANCH=`git rev-parse --abbrev-ref HEAD`
+
+    echo $SITE_REMOTE_URL
+    echo $SITE_BRANCH
+    echo `pwd`
+
+    if [[ ( $SITE_REMOTE_URL = $REMOTE_URL ) && ( $SITE_BRANCH = 'gh-pages' ) ]]; then
+        echo "Confirmed that docs/_site has same remote as main repo, and is on gh-pages."
+    else
+        echo "Checking out $REMOTE:gh-pages into docs/_site (will take a little time)."
+        git init .
+        git remote add -t gh-pages -f $REMOTE $REMOTE_URL
+        git checkout gh-pages
+    fi
+
+    echo "Building the site into docs/_site, and committing the changes."
+    jekyll build -s .. -d .
+    git add --all .
+    git commit -m "$MSG"
+    git push $REMOTE gh-pages
+
+    echo "All done!"
+    cd ../..
+else echo "You must run this deployment script from the 'master' branch."
+fi
diff --git a/scripts/download_model_binary.py b/scripts/download_model_binary.py
new file mode 100755
index 0000000..48e9015
--- /dev/null
+++ b/scripts/download_model_binary.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+import os
+import sys
+import time
+import yaml
+import urllib
+import hashlib
+import argparse
+
+required_keys = ['caffemodel', 'caffemodel_url', 'sha1']
+
+
+def reporthook(count, block_size, total_size):
+    """
+    From http://blog.moleculea.com/2012/10/04/urlretrieve-progres-indicator/
+    """
+    global start_time
+    if count == 0:
+        start_time = time.time()
+        return
+    duration = time.time() - start_time
+    progress_size = int(count * block_size)
+    speed = int(progress_size / (1024 * duration))
+    percent = int(count * block_size * 100 / total_size)
+    sys.stdout.write("\r...%d%%, %d MB, %d KB/s, %d seconds passed" %
+                    (percent, progress_size / (1024 * 1024), speed, duration))
+    sys.stdout.flush()
+
+
+def parse_readme_frontmatter(dirname):
+    readme_filename = os.path.join(dirname, 'readme.md')
+    with open(readme_filename) as f:
+        lines = [line.strip() for line in f.readlines()]
+    top = lines.index('---')
+    bottom = lines[top + 1:].index('---')
+    frontmatter = yaml.load('\n'.join(lines[top + 1:bottom]))
+    assert all(key in frontmatter for key in required_keys)
+    return dirname, frontmatter
+
+
+def valid_dirname(dirname):
+    try:
+        return parse_readme_frontmatter(dirname)
+    except Exception as e:
+        print('ERROR: {}'.format(e))
+        raise argparse.ArgumentTypeError(
+            'Must be valid Caffe model directory with a correct readme.md')
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(
+        description='Download trained model binary.')
+    parser.add_argument('dirname', type=valid_dirname)
+    args = parser.parse_args()
+
+    # A tiny hack: the dirname validator also returns readme YAML frontmatter.
+    dirname = args.dirname[0]
+    frontmatter = args.dirname[1]
+    model_filename = os.path.join(dirname, frontmatter['caffemodel'])
+
+    # Closure-d function for checking SHA1.
+    def model_checks_out(filename=model_filename, sha1=frontmatter['sha1']):
+        with open(filename, 'r') as f:
+            return hashlib.sha1(f.read()).hexdigest() == sha1
+
+    # Check if model exists.
+    if os.path.exists(model_filename) and model_checks_out():
+        print("Model already exists.")
+        sys.exit(0)
+
+    # Download and verify model.
+    urllib.urlretrieve(
+        frontmatter['caffemodel_url'], model_filename, reporthook)
+    if not model_checks_out():
+        print('ERROR: model did not download correctly! Run this again.')
+        sys.exit(1)
diff --git a/scripts/download_model_from_gist.sh b/scripts/download_model_from_gist.sh
new file mode 100755
index 0000000..a1dccf7
--- /dev/null
+++ b/scripts/download_model_from_gist.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env sh
+
+GIST=$1
+DIRNAME=${2:-./models}
+
+if [ -z $GIST ]; then
+  echo "usage: download_model_from_gist.sh <gist_id> <dirname>"
+  exit
+fi
+
+GIST_DIR=$(echo $GIST | tr '/' '-')
+MODEL_DIR="$DIRNAME/$GIST_DIR"
+
+if [ -d $MODEL_DIR ]; then
+    echo "$MODEL_DIR already exists! Please make sure you're not overwriting anything important!"
+    exit
+fi
+
+echo "Downloading Caffe model info to $MODEL_DIR ..."
+mkdir -p $MODEL_DIR
+wget https://gist.github.com/$GIST/download -O $MODEL_DIR/gist.tar.gz
+tar xzf $MODEL_DIR/gist.tar.gz --directory=$MODEL_DIR --strip-components=1
+rm $MODEL_DIR/gist.tar.gz
+echo "Done"
diff --git a/scripts/gather_examples.sh b/scripts/gather_examples.sh
new file mode 100755
index 0000000..3fc7260
--- /dev/null
+++ b/scripts/gather_examples.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+# Assemble documentation for the project into one directory via symbolic links.
+
+# Find the docs dir, no matter where the script is called
+ROOT_DIR="$( cd "$(dirname "$0")"/.. ; pwd -P )"
+cd $ROOT_DIR
+
+# Gather docs from examples/**/readme.md
+GATHERED_DIR=docs/gathered
+rm -r $GATHERED_DIR
+mkdir $GATHERED_DIR
+for README_FILENAME in $(find examples -iname "readme.md"); do
+    # Only use file if it is to be included in docs.
+    if grep -Fxq "include_in_docs: true" $README_FILENAME; then
+        # Make link to readme.md in docs/gathered/.
+        # Since everything is called readme.md, rename it by its dirname.
+        README_DIRNAME=`dirname $README_FILENAME`
+        DOCS_FILENAME=$GATHERED_DIR/$README_DIRNAME.md
+        mkdir -p `dirname $DOCS_FILENAME`
+        ln -s $ROOT_DIR/$README_FILENAME $DOCS_FILENAME
+    fi
+done
+
+# Gather docs from examples/*.ipynb and add YAML front-matter.
+for NOTEBOOK_FILENAME in $(find examples -depth -iname "*.ipynb"); do
+    DOCS_FILENAME=$GATHERED_DIR/$NOTEBOOK_FILENAME
+    mkdir -p `dirname $DOCS_FILENAME`
+    python scripts/copy_notebook.py $NOTEBOOK_FILENAME $DOCS_FILENAME
+done
diff --git a/scripts/travis/travis_build_and_test.sh b/scripts/travis/travis_build_and_test.sh
new file mode 100755
index 0000000..8ff63f3
--- /dev/null
+++ b/scripts/travis/travis_build_and_test.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+# Script called by Travis to do a CPU-only build of and test Caffe.
+
+set -e
+MAKE="make --jobs=$NUM_THREADS --keep-going"
+
+if $WITH_CMAKE; then
+  mkdir build
+  cd build
+  cmake -DBUILD_python=ON -DCMAKE_BUILD_TYPE=Release -DCPU_ONLY=ON ..
+  $MAKE
+  if ! $WITH_CUDA; then
+    $MAKE runtest
+    $MAKE lint
+  fi
+  $MAKE clean
+  cd -
+else
+  if ! $WITH_CUDA; then
+    export CPU_ONLY=1
+  fi
+  $MAKE all test pycaffe warn lint || true
+  if ! $WITH_CUDA; then
+    $MAKE runtest
+  fi
+  $MAKE all
+  $MAKE test
+  $MAKE pycaffe
+  $MAKE pytest
+  $MAKE warn
+  if ! $WITH_CUDA; then
+    $MAKE lint
+  fi
+fi
diff --git a/scripts/travis/travis_install.sh b/scripts/travis/travis_install.sh
new file mode 100755
index 0000000..b6e6f6c
--- /dev/null
+++ b/scripts/travis/travis_install.sh
@@ -0,0 +1,69 @@
+#!/bin/bash
+# This script must be run with sudo.
+
+set -e
+
+MAKE="make --jobs=$NUM_THREADS"
+
+# Install apt packages where the Ubuntu 12.04 default and ppa works for Caffe
+
+# This ppa is for gflags and glog
+add-apt-repository -y ppa:tuleu/precise-backports
+apt-get -y update
+apt-get install \
+    wget git curl \
+    python-dev python-numpy \
+    libleveldb-dev libsnappy-dev libopencv-dev \
+    libboost-dev libboost-system-dev libboost-python-dev libboost-thread-dev \
+    libprotobuf-dev protobuf-compiler \
+    libatlas-dev libatlas-base-dev \
+    libhdf5-serial-dev libgflags-dev libgoogle-glog-dev \
+    bc
+
+# Add a special apt-repository to install CMake 2.8.9 for CMake Caffe build,
+# if needed.  By default, Aptitude in Ubuntu 12.04 installs CMake 2.8.7, but
+# Caffe requires a minimum CMake version of 2.8.8.
+if $WITH_CMAKE; then
+  add-apt-repository -y ppa:ubuntu-sdk-team/ppa
+  apt-get -y update
+  apt-get -y install cmake
+fi
+
+# Install CUDA, if needed
+if $WITH_CUDA; then
+  CUDA_URL=http://developer.download.nvidia.com/compute/cuda/repos/ubuntu1204/x86_64/cuda-repo-ubuntu1204_6.5-14_amd64.deb
+  CUDA_FILE=/tmp/cuda_install.deb
+  curl $CUDA_URL -o $CUDA_FILE
+  dpkg -i $CUDA_FILE
+  rm -f $CUDA_FILE
+  apt-get -y update
+  # Install the minimal CUDA subpackages required to test Caffe build.
+  # For a full CUDA installation, add 'cuda' to the list of packages.
+  apt-get -y install cuda-core-6-5 cuda-cublas-6-5 cuda-cublas-dev-6-5 cuda-cudart-6-5 cuda-cudart-dev-6-5 cuda-curand-6-5 cuda-curand-dev-6-5
+  # Create CUDA symlink at /usr/local/cuda
+  # (This would normally be created by the CUDA installer, but we create it
+  # manually since we did a partial installation.)
+  ln -s /usr/local/cuda-6.5 /usr/local/cuda
+fi
+
+# Install LMDB
+LMDB_URL=https://github.com/LMDB/lmdb/archive/LMDB_0.9.14.tar.gz
+LMDB_FILE=/tmp/lmdb.tar.gz
+pushd .
+wget $LMDB_URL -O $LMDB_FILE
+tar -C /tmp -xzvf $LMDB_FILE
+cd /tmp/lmdb*/libraries/liblmdb/
+$MAKE
+$MAKE install
+popd
+rm -f $LMDB_FILE
+
+# Install the Python runtime dependencies via miniconda (this is much faster
+# than using pip for everything).
+wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh
+chmod +x miniconda.sh
+./miniconda.sh -b
+export PATH=/home/travis/miniconda/bin:$PATH
+conda update --yes conda
+conda install --yes numpy scipy matplotlib scikit-image pip
+pip install protobuf
diff --git a/scripts/travis/travis_setup_makefile_config.sh b/scripts/travis/travis_setup_makefile_config.sh
new file mode 100755
index 0000000..ba32626
--- /dev/null
+++ b/scripts/travis/travis_setup_makefile_config.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+set -e
+
+mv Makefile.config.example Makefile.config
+
+if $WITH_CUDA; then
+  # Only generate compute_50.
+  GENCODE="-gencode arch=compute_50,code=sm_50"
+  GENCODE="$GENCODE -gencode arch=compute_50,code=compute_50"
+  echo "CUDA_ARCH := $GENCODE" >> Makefile.config
+fi
+
+cat << 'EOF' >> Makefile.config
+ANACONDA_HOME := $(HOME)/miniconda
+PYTHON_INCLUDE := $(ANACONDA_HOME)/include \
+		$(ANACONDA_HOME)/include/python2.7 \
+		$(ANACONDA_HOME)/lib/python2.7/site-packages/numpy/core/include
+PYTHON_LIB := $(ANACONDA_HOME)/lib
+INCLUDE_DIRS := $(PYTHON_INCLUDE) /usr/local/include
+LIBRARY_DIRS := $(PYTHON_LIB) /usr/local/lib /usr/lib
+WITH_PYTHON_LAYER := 1
+EOF
diff --git a/scripts/upload_model_to_gist.sh b/scripts/upload_model_to_gist.sh
new file mode 100755
index 0000000..3c4fd64
--- /dev/null
+++ b/scripts/upload_model_to_gist.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+
+# Check for valid directory
+DIRNAME=$1
+if [ ! -f $DIRNAME/readme.md ]; then
+    echo "usage: upload_model_to_gist.sh <dirname>"
+    echo "  <dirname>/readme.md must exist"
+fi
+cd $DIRNAME
+FILES=`find . -maxdepth 1 -type f ! -name "*.caffemodel*" | xargs echo`
+
+# Check for gist tool.
+gist -v >/dev/null 2>&1 || { echo >&2 "I require 'gist' but it's not installed. Do 'gem install gist'."; exit 1; }
+
+NAME=`sed -n 's/^name:[[:space:]]*//p' readme.md`
+if [ -z "$NAME" ]; then
+    echo "  <dirname>/readme.md must contain name field in the front-matter."
+fi
+
+GIST=`sed -n 's/^gist_id:[[:space:]]*//p' readme.md`
+if [ -z "$GIST" ]; then
+    echo "Uploading new Gist"
+    gist -p -d "$NAME" $FILES
+else
+    echo "Updating existing Gist, id $GIST"
+    gist -u $GIST -d "$NAME" $FILES
+fi
+
+RESULT=$?
+if [ $RESULT -eq 0 ]; then
+    echo "You've uploaded your model!"
+    echo "Don't forget to add the gist_id field to your <dirname>/readme.md now!"
+    echo "Run the command again after you do that, to make sure the Gist id propagates."
+    echo ""
+    echo "And do share your model over at https://github.com/BVLC/caffe/wiki/Model-Zoo"
+else
+    echo "Something went wrong!"
+fi
diff --git a/src/caffe/CMakeLists.txt b/src/caffe/CMakeLists.txt
new file mode 100644
index 0000000..40e6c11
--- /dev/null
+++ b/src/caffe/CMakeLists.txt
@@ -0,0 +1,36 @@
+# generate protobuf sources
+file(GLOB proto_files proto/*.proto)
+caffe_protobuf_generate_cpp_py(${proto_gen_folder} proto_srcs proto_hdrs proto_python ${proto_files})
+
+# include python files either to force generation
+add_library(proto STATIC ${proto_hdrs} ${proto_srcs} ${proto_python})
+set(Caffe_LINKER_LIBS proto ${Caffe_LINKER_LIBS}) # note, crucial to prepend!
+caffe_default_properties(proto)
+
+# --[ Caffe library
+
+# creates 'test_srcs', 'srcs', 'test_cuda', 'cuda' lists
+caffe_pickup_caffe_sources(${PROJECT_SOURCE_DIR})
+
+if(HAVE_CUDA)
+  caffe_cuda_compile(cuda_objs ${cuda})
+  list(APPEND srcs ${cuda_objs} ${cuda})
+endif()
+
+add_library(caffe ${srcs})
+target_link_libraries(caffe proto ${Caffe_LINKER_LIBS})
+caffe_default_properties(caffe)
+
+# ---[ Tests
+ add_subdirectory(test)
+
+# ---[ Install
+install(DIRECTORY ${Caffe_INCLUDE_DIR}/caffe DESTINATION include)
+install(FILES ${proto_hdrs} DESTINATION include/caffe/proto)
+install(TARGETS caffe proto EXPORT CaffeTargets DESTINATION lib)
+
+file(WRITE ${PROJECT_BINARY_DIR}/__init__.py)
+list(APPEND proto_python ${PROJECT_BINARY_DIR}/__init__.py)
+install(PROGRAMS ${proto_python} DESTINATION python/caffe/proto)
+
+
diff --git a/src/caffe/blob.cpp b/src/caffe/blob.cpp
new file mode 100644
index 0000000..94fdcc3
--- /dev/null
+++ b/src/caffe/blob.cpp
@@ -0,0 +1,495 @@
+#include <climits>
+#include <vector>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/syncedmem.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void Blob<Dtype>::Reshape(const int num, const int channels, const int height,
+    const int width) {
+  vector<int> shape(4);
+  shape[0] = num;
+  shape[1] = channels;
+  shape[2] = height;
+  shape[3] = width;
+  Reshape(shape);
+}
+
+template <typename Dtype>
+void Blob<Dtype>::Reshape(const vector<int>& shape) {
+  CHECK_LE(shape.size(), kMaxBlobAxes);
+  count_ = 1;
+  shape_.resize(shape.size());
+  for (int i = 0; i < shape.size(); ++i) {
+    CHECK_GE(shape[i], 0);
+    CHECK_LE(shape[i], INT_MAX / count_) << "blob size exceeds INT_MAX";
+    count_ *= shape[i];
+    shape_[i] = shape[i];
+  }
+  if (count_ > capacity_) {
+    capacity_ = count_;
+    data_.reset(new SyncedMemory(capacity_ * sizeof(Dtype)));
+    diff_.reset(new SyncedMemory(capacity_ * sizeof(Dtype)));
+  }
+}
+
+template <typename Dtype>
+void Blob<Dtype>::Reshape(const BlobShape& shape) {
+  CHECK_LE(shape.dim_size(), kMaxBlobAxes);
+  vector<int> shape_vec(shape.dim_size());
+  for (int i = 0; i < shape.dim_size(); ++i) {
+    shape_vec[i] = shape.dim(i);
+  }
+  Reshape(shape_vec);
+}
+
+template <typename Dtype>
+void Blob<Dtype>::ReshapeLike(const Blob<Dtype>& other) {
+  Reshape(other.shape());
+}
+
+template <typename Dtype>
+Blob<Dtype>::Blob(const int num, const int channels, const int height,
+    const int width)
+  // capacity_ must be initialized before calling Reshape
+  : capacity_(0) {
+  Reshape(num, channels, height, width);
+}
+
+template <typename Dtype>
+Blob<Dtype>::Blob(const vector<int>& shape)
+  // capacity_ must be initialized before calling Reshape
+  : capacity_(0) {
+  Reshape(shape);
+}
+
+template <typename Dtype>
+const Dtype* Blob<Dtype>::cpu_data() const {
+  CHECK(data_);
+  return (const Dtype*)data_->cpu_data();
+}
+
+template <typename Dtype>
+void Blob<Dtype>::set_cpu_data(Dtype* data) {
+  CHECK(data);
+  data_->set_cpu_data(data);
+}
+
+template <typename Dtype>
+const Dtype* Blob<Dtype>::gpu_data() const {
+  CHECK(data_);
+  return (const Dtype*)data_->gpu_data();
+}
+
+template <typename Dtype>
+const Dtype* Blob<Dtype>::cpu_diff() const {
+  CHECK(diff_);
+  return (const Dtype*)diff_->cpu_data();
+}
+
+template <typename Dtype>
+const Dtype* Blob<Dtype>::gpu_diff() const {
+  CHECK(diff_);
+  return (const Dtype*)diff_->gpu_data();
+}
+
+template <typename Dtype>
+Dtype* Blob<Dtype>::mutable_cpu_data() {
+  CHECK(data_);
+  return static_cast<Dtype*>(data_->mutable_cpu_data());
+}
+
+template <typename Dtype>
+Dtype* Blob<Dtype>::mutable_gpu_data() {
+  CHECK(data_);
+  return static_cast<Dtype*>(data_->mutable_gpu_data());
+}
+
+template <typename Dtype>
+Dtype* Blob<Dtype>::mutable_cpu_diff() {
+  CHECK(diff_);
+  return static_cast<Dtype*>(diff_->mutable_cpu_data());
+}
+
+template <typename Dtype>
+Dtype* Blob<Dtype>::mutable_gpu_diff() {
+  CHECK(diff_);
+  return static_cast<Dtype*>(diff_->mutable_gpu_data());
+}
+
+template <typename Dtype>
+void Blob<Dtype>::ShareData(const Blob& other) {
+  CHECK_EQ(count_, other.count());
+  data_ = other.data();
+}
+
+template <typename Dtype>
+void Blob<Dtype>::ShareDiff(const Blob& other) {
+  CHECK_EQ(count_, other.count());
+  diff_ = other.diff();
+}
+
+// The "update" method is used for parameter blobs in a Net, which are stored
+// as Blob<float> or Blob<double> -- hence we do not define it for
+// Blob<int> or Blob<unsigned int>.
+template <> void Blob<unsigned int>::Update() { NOT_IMPLEMENTED; }
+template <> void Blob<int>::Update() { NOT_IMPLEMENTED; }
+
+template <typename Dtype>
+void Blob<Dtype>::Update() {
+  // We will perform update based on where the data is located.
+  switch (data_->head()) {
+  case SyncedMemory::HEAD_AT_CPU:
+    // perform computation on CPU
+    caffe_axpy<Dtype>(count_, Dtype(-1),
+        static_cast<const Dtype*>(diff_->cpu_data()),
+        static_cast<Dtype*>(data_->mutable_cpu_data()));
+    break;
+  case SyncedMemory::HEAD_AT_GPU:
+  case SyncedMemory::SYNCED:
+#ifndef CPU_ONLY
+    // perform computation on GPU
+    caffe_gpu_axpy<Dtype>(count_, Dtype(-1),
+        static_cast<const Dtype*>(diff_->gpu_data()),
+        static_cast<Dtype*>(data_->mutable_gpu_data()));
+#else
+    NO_GPU;
+#endif
+    break;
+  default:
+    LOG(FATAL) << "Syncedmem not initialized.";
+  }
+}
+
+template <> unsigned int Blob<unsigned int>::asum_data() const {
+  NOT_IMPLEMENTED;
+  return 0;
+}
+
+template <> int Blob<int>::asum_data() const {
+  NOT_IMPLEMENTED;
+  return 0;
+}
+
+template <typename Dtype>
+Dtype Blob<Dtype>::asum_data() const {
+  if (!data_) { return 0; }
+  switch (data_->head()) {
+  case SyncedMemory::HEAD_AT_CPU:
+    return caffe_cpu_asum(count_, cpu_data());
+  case SyncedMemory::HEAD_AT_GPU:
+  case SyncedMemory::SYNCED:
+#ifndef CPU_ONLY
+  {
+    Dtype asum;
+    caffe_gpu_asum(count_, gpu_data(), &asum);
+    return asum;
+  }
+#else
+    NO_GPU;
+#endif
+  case SyncedMemory::UNINITIALIZED:
+    return 0;
+  default:
+    LOG(FATAL) << "Unknown SyncedMemory head state: " << data_->head();
+  }
+  return 0;
+}
+
+template <> unsigned int Blob<unsigned int>::asum_diff() const {
+  NOT_IMPLEMENTED;
+  return 0;
+}
+
+template <> int Blob<int>::asum_diff() const {
+  NOT_IMPLEMENTED;
+  return 0;
+}
+
+template <typename Dtype>
+Dtype Blob<Dtype>::asum_diff() const {
+  if (!diff_) { return 0; }
+  switch (diff_->head()) {
+  case SyncedMemory::HEAD_AT_CPU:
+    return caffe_cpu_asum(count_, cpu_diff());
+  case SyncedMemory::HEAD_AT_GPU:
+  case SyncedMemory::SYNCED:
+#ifndef CPU_ONLY
+  {
+    Dtype asum;
+    caffe_gpu_asum(count_, gpu_diff(), &asum);
+    return asum;
+  }
+#else
+    NO_GPU;
+#endif
+  case SyncedMemory::UNINITIALIZED:
+    return 0;
+  default:
+    LOG(FATAL) << "Unknown SyncedMemory head state: " << diff_->head();
+  }
+  return 0;
+}
+
+template <> unsigned int Blob<unsigned int>::sumsq_data() const {
+  NOT_IMPLEMENTED;
+  return 0;
+}
+
+template <> int Blob<int>::sumsq_data() const {
+  NOT_IMPLEMENTED;
+  return 0;
+}
+
+template <typename Dtype>
+Dtype Blob<Dtype>::sumsq_data() const {
+  Dtype sumsq;
+  const Dtype* data;
+  if (!data_) { return 0; }
+  switch (data_->head()) {
+  case SyncedMemory::HEAD_AT_CPU:
+    data = cpu_data();
+    sumsq = caffe_cpu_dot(count_, data, data);
+    break;
+  case SyncedMemory::HEAD_AT_GPU:
+  case SyncedMemory::SYNCED:
+#ifndef CPU_ONLY
+    data = gpu_data();
+    caffe_gpu_dot(count_, data, data, &sumsq);
+#else
+    NO_GPU;
+#endif
+    break;
+  case SyncedMemory::UNINITIALIZED:
+    return 0;
+  default:
+    LOG(FATAL) << "Unknown SyncedMemory head state: " << data_->head();
+  }
+  return sumsq;
+}
+
+template <> unsigned int Blob<unsigned int>::sumsq_diff() const {
+  NOT_IMPLEMENTED;
+  return 0;
+}
+
+template <> int Blob<int>::sumsq_diff() const {
+  NOT_IMPLEMENTED;
+  return 0;
+}
+
+template <typename Dtype>
+Dtype Blob<Dtype>::sumsq_diff() const {
+  Dtype sumsq;
+  const Dtype* diff;
+  if (!diff_) { return 0; }
+  switch (diff_->head()) {
+  case SyncedMemory::HEAD_AT_CPU:
+    diff = cpu_diff();
+    sumsq = caffe_cpu_dot(count_, diff, diff);
+    break;
+  case SyncedMemory::HEAD_AT_GPU:
+  case SyncedMemory::SYNCED:
+#ifndef CPU_ONLY
+    diff = gpu_diff();
+    caffe_gpu_dot(count_, diff, diff, &sumsq);
+    break;
+#else
+    NO_GPU;
+#endif
+  case SyncedMemory::UNINITIALIZED:
+    return 0;
+  default:
+    LOG(FATAL) << "Unknown SyncedMemory head state: " << data_->head();
+  }
+  return sumsq;
+}
+
+template <> void Blob<unsigned int>::scale_data(unsigned int scale_factor) {
+  NOT_IMPLEMENTED;
+}
+
+template <> void Blob<int>::scale_data(int scale_factor) {
+  NOT_IMPLEMENTED;
+}
+
+template <typename Dtype>
+void Blob<Dtype>::scale_data(Dtype scale_factor) {
+  Dtype* data;
+  if (!data_) { return; }
+  switch (data_->head()) {
+  case SyncedMemory::HEAD_AT_CPU:
+    data = mutable_cpu_data();
+    caffe_scal(count_, scale_factor, data);
+    return;
+  case SyncedMemory::HEAD_AT_GPU:
+  case SyncedMemory::SYNCED:
+#ifndef CPU_ONLY
+    data = mutable_gpu_data();
+    caffe_gpu_scal(count_, scale_factor, data);
+    return;
+#else
+    NO_GPU;
+#endif
+  case SyncedMemory::UNINITIALIZED:
+    return;
+  default:
+    LOG(FATAL) << "Unknown SyncedMemory head state: " << data_->head();
+  }
+}
+
+template <> void Blob<unsigned int>::scale_diff(unsigned int scale_factor) {
+  NOT_IMPLEMENTED;
+}
+
+template <> void Blob<int>::scale_diff(int scale_factor) {
+  NOT_IMPLEMENTED;
+}
+
+template <typename Dtype>
+void Blob<Dtype>::scale_diff(Dtype scale_factor) {
+  Dtype* diff;
+  if (!diff_) { return; }
+  switch (diff_->head()) {
+  case SyncedMemory::HEAD_AT_CPU:
+    diff = mutable_cpu_diff();
+    caffe_scal(count_, scale_factor, diff);
+    return;
+  case SyncedMemory::HEAD_AT_GPU:
+  case SyncedMemory::SYNCED:
+#ifndef CPU_ONLY
+    diff = mutable_gpu_diff();
+    caffe_gpu_scal(count_, scale_factor, diff);
+    return;
+#else
+    NO_GPU;
+#endif
+  case SyncedMemory::UNINITIALIZED:
+    return;
+  default:
+    LOG(FATAL) << "Unknown SyncedMemory head state: " << diff_->head();
+  }
+}
+
+template <typename Dtype>
+bool Blob<Dtype>::ShapeEquals(const BlobProto& other) {
+  if (other.has_num() || other.has_channels() ||
+      other.has_height() || other.has_width()) {
+    // Using deprecated 4D Blob dimensions --
+    // shape is (num, channels, height, width).
+    // Note: we do not use the normal Blob::num(), Blob::channels(), etc.
+    // methods as these index from the beginning of the blob shape, where legacy
+    // parameter blobs were indexed from the end of the blob shape (e.g., bias
+    // Blob shape (1 x 1 x 1 x N), IP layer weight Blob shape (1 x 1 x M x N)).
+    return shape_.size() <= 4 &&
+           LegacyShape(-4) == other.num() &&
+           LegacyShape(-3) == other.channels() &&
+           LegacyShape(-2) == other.height() &&
+           LegacyShape(-1) == other.width();
+  }
+  vector<int> other_shape(other.shape().dim_size());
+  for (int i = 0; i < other.shape().dim_size(); ++i) {
+    other_shape[i] = other.shape().dim(i);
+  }
+  return shape_ == other_shape;
+}
+
+template <typename Dtype>
+void Blob<Dtype>::CopyFrom(const Blob& source, bool copy_diff, bool reshape) {
+  if (source.count() != count_ || source.shape() != shape_) {
+    if (reshape) {
+      ReshapeLike(source);
+    } else {
+      LOG(FATAL) << "Trying to copy blobs of different sizes.";
+    }
+  }
+  switch (Caffe::mode()) {
+  case Caffe::GPU:
+    if (copy_diff) {
+      caffe_copy(count_, source.gpu_diff(),
+          static_cast<Dtype*>(diff_->mutable_gpu_data()));
+    } else {
+      caffe_copy(count_, source.gpu_data(),
+          static_cast<Dtype*>(data_->mutable_gpu_data()));
+    }
+    break;
+  case Caffe::CPU:
+    if (copy_diff) {
+      caffe_copy(count_, source.cpu_diff(),
+          static_cast<Dtype*>(diff_->mutable_cpu_data()));
+    } else {
+      caffe_copy(count_, source.cpu_data(),
+          static_cast<Dtype*>(data_->mutable_cpu_data()));
+    }
+    break;
+  default:
+    LOG(FATAL) << "Unknown caffe mode.";
+  }
+}
+
+template <typename Dtype>
+void Blob<Dtype>::FromProto(const BlobProto& proto, bool reshape) {
+  if (reshape) {
+    vector<int> shape;
+    if (proto.has_num() || proto.has_channels() ||
+        proto.has_height() || proto.has_width()) {
+      // Using deprecated 4D Blob dimensions --
+      // shape is (num, channels, height, width).
+      shape.resize(4);
+      shape[0] = proto.num();
+      shape[1] = proto.channels();
+      shape[2] = proto.height();
+      shape[3] = proto.width();
+    } else {
+      shape.resize(proto.shape().dim_size());
+      for (int i = 0; i < proto.shape().dim_size(); ++i) {
+        shape[i] = proto.shape().dim(i);
+      }
+    }
+    Reshape(shape);
+  } else {
+    CHECK(ShapeEquals(proto)) << "shape mismatch (reshape not set)";
+  }
+  // copy data
+  Dtype* data_vec = mutable_cpu_data();
+  for (int i = 0; i < count_; ++i) {
+    data_vec[i] = proto.data(i);
+  }
+  if (proto.diff_size() > 0) {
+    Dtype* diff_vec = mutable_cpu_diff();
+    for (int i = 0; i < count_; ++i) {
+      diff_vec[i] = proto.diff(i);
+    }
+  }
+}
+
+template <typename Dtype>
+void Blob<Dtype>::ToProto(BlobProto* proto, bool write_diff) const {
+  proto->clear_shape();
+  for (int i = 0; i < shape_.size(); ++i) {
+    proto->mutable_shape()->add_dim(shape_[i]);
+  }
+  proto->clear_data();
+  proto->clear_diff();
+  const Dtype* data_vec = cpu_data();
+  for (int i = 0; i < count_; ++i) {
+    proto->add_data(data_vec[i]);
+  }
+  if (write_diff) {
+    const Dtype* diff_vec = cpu_diff();
+    for (int i = 0; i < count_; ++i) {
+      proto->add_diff(diff_vec[i]);
+    }
+  }
+}
+
+INSTANTIATE_CLASS(Blob);
+template class Blob<int>;
+template class Blob<unsigned int>;
+
+}  // namespace caffe
+
diff --git a/src/caffe/common.cpp b/src/caffe/common.cpp
new file mode 100644
index 0000000..af96cac
--- /dev/null
+++ b/src/caffe/common.cpp
@@ -0,0 +1,271 @@
+#include <glog/logging.h>
+#include <cstdio>
+#include <ctime>
+
+#include "caffe/common.hpp"
+#include "caffe/util/rng.hpp"
+
+namespace caffe {
+
+shared_ptr<Caffe> Caffe::singleton_;
+
+// random seeding
+int64_t cluster_seedgen(void) {
+  int64_t s, seed, pid;
+  FILE* f = fopen("/dev/urandom", "rb");
+  if (f && fread(&seed, 1, sizeof(seed), f) == sizeof(seed)) {
+    fclose(f);
+    return seed;
+  }
+
+  LOG(INFO) << "System entropy source not available, "
+              "using fallback algorithm to generate seed instead.";
+  if (f)
+    fclose(f);
+
+  pid = getpid();
+  s = time(NULL);
+  seed = abs(((s * 181) * ((pid - 83) * 359)) % 104729);
+  return seed;
+}
+
+
+void GlobalInit(int* pargc, char*** pargv) {
+  // Google flags.
+  ::gflags::ParseCommandLineFlags(pargc, pargv, true);
+  // Google logging.
+  ::google::InitGoogleLogging(*(pargv)[0]);
+  // Provide a backtrace on segfault.
+  ::google::InstallFailureSignalHandler();
+}
+
+#ifdef CPU_ONLY  // CPU-only Caffe.
+
+Caffe::Caffe()
+    : random_generator_(), mode_(Caffe::CPU) { }
+
+Caffe::~Caffe() { }
+
+void Caffe::set_random_seed(const unsigned int seed) {
+  // RNG seed
+  Get().random_generator_.reset(new RNG(seed));
+}
+
+void Caffe::SetDevice(const int device_id) {
+  NO_GPU;
+}
+
+void Caffe::DeviceQuery() {
+  NO_GPU;
+}
+
+
+class Caffe::RNG::Generator {
+ public:
+  Generator() : rng_(new caffe::rng_t(cluster_seedgen())) {}
+  explicit Generator(unsigned int seed) : rng_(new caffe::rng_t(seed)) {}
+  caffe::rng_t* rng() { return rng_.get(); }
+ private:
+  shared_ptr<caffe::rng_t> rng_;
+};
+
+Caffe::RNG::RNG() : generator_(new Generator()) { }
+
+Caffe::RNG::RNG(unsigned int seed) : generator_(new Generator(seed)) { }
+
+Caffe::RNG& Caffe::RNG::operator=(const RNG& other) {
+  generator_ = other.generator_;
+  return *this;
+}
+
+void* Caffe::RNG::generator() {
+  return static_cast<void*>(generator_->rng());
+}
+
+#else  // Normal GPU + CPU Caffe.
+
+Caffe::Caffe()
+    : cublas_handle_(NULL), curand_generator_(NULL), random_generator_(),
+    mode_(Caffe::CPU) {
+  // Try to create a cublas handler, and report an error if failed (but we will
+  // keep the program running as one might just want to run CPU code).
+  if (cublasCreate(&cublas_handle_) != CUBLAS_STATUS_SUCCESS) {
+    LOG(ERROR) << "Cannot create Cublas handle. Cublas won't be available.";
+  }
+  // Try to create a curand handler.
+  if (curandCreateGenerator(&curand_generator_, CURAND_RNG_PSEUDO_DEFAULT)
+      != CURAND_STATUS_SUCCESS ||
+      curandSetPseudoRandomGeneratorSeed(curand_generator_, cluster_seedgen())
+      != CURAND_STATUS_SUCCESS) {
+    LOG(ERROR) << "Cannot create Curand generator. Curand won't be available.";
+  }
+}
+
+Caffe::~Caffe() {
+  if (cublas_handle_) CUBLAS_CHECK(cublasDestroy(cublas_handle_));
+  if (curand_generator_) {
+    CURAND_CHECK(curandDestroyGenerator(curand_generator_));
+  }
+}
+
+void Caffe::set_random_seed(const unsigned int seed) {
+  // Curand seed
+  static bool g_curand_availability_logged = false;
+  if (Get().curand_generator_) {
+    CURAND_CHECK(curandSetPseudoRandomGeneratorSeed(curand_generator(),
+        seed));
+    CURAND_CHECK(curandSetGeneratorOffset(curand_generator(), 0));
+  } else {
+    if (!g_curand_availability_logged) {
+        LOG(ERROR) <<
+            "Curand not available. Skipping setting the curand seed.";
+        g_curand_availability_logged = true;
+    }
+  }
+  // RNG seed
+  Get().random_generator_.reset(new RNG(seed));
+}
+
+void Caffe::SetDevice(const int device_id) {
+  int current_device;
+  CUDA_CHECK(cudaGetDevice(&current_device));
+  if (current_device == device_id) {
+    return;
+  }
+  // The call to cudaSetDevice must come before any calls to Get, which
+  // may perform initialization using the GPU.
+  CUDA_CHECK(cudaSetDevice(device_id));
+  if (Get().cublas_handle_) CUBLAS_CHECK(cublasDestroy(Get().cublas_handle_));
+  if (Get().curand_generator_) {
+    CURAND_CHECK(curandDestroyGenerator(Get().curand_generator_));
+  }
+  CUBLAS_CHECK(cublasCreate(&Get().cublas_handle_));
+  CURAND_CHECK(curandCreateGenerator(&Get().curand_generator_,
+      CURAND_RNG_PSEUDO_DEFAULT));
+  CURAND_CHECK(curandSetPseudoRandomGeneratorSeed(Get().curand_generator_,
+      cluster_seedgen()));
+}
+
+void Caffe::DeviceQuery() {
+  cudaDeviceProp prop;
+  int device;
+  if (cudaSuccess != cudaGetDevice(&device)) {
+    printf("No cuda device present.\n");
+    return;
+  }
+  CUDA_CHECK(cudaGetDeviceProperties(&prop, device));
+  LOG(INFO) << "Device id:                     " << device;
+  LOG(INFO) << "Major revision number:         " << prop.major;
+  LOG(INFO) << "Minor revision number:         " << prop.minor;
+  LOG(INFO) << "Name:                          " << prop.name;
+  LOG(INFO) << "Total global memory:           " << prop.totalGlobalMem;
+  LOG(INFO) << "Total shared memory per block: " << prop.sharedMemPerBlock;
+  LOG(INFO) << "Total registers per block:     " << prop.regsPerBlock;
+  LOG(INFO) << "Warp size:                     " << prop.warpSize;
+  LOG(INFO) << "Maximum memory pitch:          " << prop.memPitch;
+  LOG(INFO) << "Maximum threads per block:     " << prop.maxThreadsPerBlock;
+  LOG(INFO) << "Maximum dimension of block:    "
+      << prop.maxThreadsDim[0] << ", " << prop.maxThreadsDim[1] << ", "
+      << prop.maxThreadsDim[2];
+  LOG(INFO) << "Maximum dimension of grid:     "
+      << prop.maxGridSize[0] << ", " << prop.maxGridSize[1] << ", "
+      << prop.maxGridSize[2];
+  LOG(INFO) << "Clock rate:                    " << prop.clockRate;
+  LOG(INFO) << "Total constant memory:         " << prop.totalConstMem;
+  LOG(INFO) << "Texture alignment:             " << prop.textureAlignment;
+  LOG(INFO) << "Concurrent copy and execution: "
+      << (prop.deviceOverlap ? "Yes" : "No");
+  LOG(INFO) << "Number of multiprocessors:     " << prop.multiProcessorCount;
+  LOG(INFO) << "Kernel execution timeout:      "
+      << (prop.kernelExecTimeoutEnabled ? "Yes" : "No");
+  return;
+}
+
+
+class Caffe::RNG::Generator {
+ public:
+  Generator() : rng_(new caffe::rng_t(cluster_seedgen())) {}
+  explicit Generator(unsigned int seed) : rng_(new caffe::rng_t(seed)) {}
+  caffe::rng_t* rng() { return rng_.get(); }
+ private:
+  shared_ptr<caffe::rng_t> rng_;
+};
+
+Caffe::RNG::RNG() : generator_(new Generator()) { }
+
+Caffe::RNG::RNG(unsigned int seed) : generator_(new Generator(seed)) { }
+
+Caffe::RNG& Caffe::RNG::operator=(const RNG& other) {
+  generator_.reset(other.generator_.get());
+  return *this;
+}
+
+void* Caffe::RNG::generator() {
+  return static_cast<void*>(generator_->rng());
+}
+
+const char* cublasGetErrorString(cublasStatus_t error) {
+  switch (error) {
+  case CUBLAS_STATUS_SUCCESS:
+    return "CUBLAS_STATUS_SUCCESS";
+  case CUBLAS_STATUS_NOT_INITIALIZED:
+    return "CUBLAS_STATUS_NOT_INITIALIZED";
+  case CUBLAS_STATUS_ALLOC_FAILED:
+    return "CUBLAS_STATUS_ALLOC_FAILED";
+  case CUBLAS_STATUS_INVALID_VALUE:
+    return "CUBLAS_STATUS_INVALID_VALUE";
+  case CUBLAS_STATUS_ARCH_MISMATCH:
+    return "CUBLAS_STATUS_ARCH_MISMATCH";
+  case CUBLAS_STATUS_MAPPING_ERROR:
+    return "CUBLAS_STATUS_MAPPING_ERROR";
+  case CUBLAS_STATUS_EXECUTION_FAILED:
+    return "CUBLAS_STATUS_EXECUTION_FAILED";
+  case CUBLAS_STATUS_INTERNAL_ERROR:
+    return "CUBLAS_STATUS_INTERNAL_ERROR";
+#if CUDA_VERSION >= 6000
+  case CUBLAS_STATUS_NOT_SUPPORTED:
+    return "CUBLAS_STATUS_NOT_SUPPORTED";
+#endif
+#if CUDA_VERSION >= 6050
+  case CUBLAS_STATUS_LICENSE_ERROR:
+    return "CUBLAS_STATUS_LICENSE_ERROR";
+#endif
+  }
+  return "Unknown cublas status";
+}
+
+const char* curandGetErrorString(curandStatus_t error) {
+  switch (error) {
+  case CURAND_STATUS_SUCCESS:
+    return "CURAND_STATUS_SUCCESS";
+  case CURAND_STATUS_VERSION_MISMATCH:
+    return "CURAND_STATUS_VERSION_MISMATCH";
+  case CURAND_STATUS_NOT_INITIALIZED:
+    return "CURAND_STATUS_NOT_INITIALIZED";
+  case CURAND_STATUS_ALLOCATION_FAILED:
+    return "CURAND_STATUS_ALLOCATION_FAILED";
+  case CURAND_STATUS_TYPE_ERROR:
+    return "CURAND_STATUS_TYPE_ERROR";
+  case CURAND_STATUS_OUT_OF_RANGE:
+    return "CURAND_STATUS_OUT_OF_RANGE";
+  case CURAND_STATUS_LENGTH_NOT_MULTIPLE:
+    return "CURAND_STATUS_LENGTH_NOT_MULTIPLE";
+  case CURAND_STATUS_DOUBLE_PRECISION_REQUIRED:
+    return "CURAND_STATUS_DOUBLE_PRECISION_REQUIRED";
+  case CURAND_STATUS_LAUNCH_FAILURE:
+    return "CURAND_STATUS_LAUNCH_FAILURE";
+  case CURAND_STATUS_PREEXISTING_FAILURE:
+    return "CURAND_STATUS_PREEXISTING_FAILURE";
+  case CURAND_STATUS_INITIALIZATION_FAILED:
+    return "CURAND_STATUS_INITIALIZATION_FAILED";
+  case CURAND_STATUS_ARCH_MISMATCH:
+    return "CURAND_STATUS_ARCH_MISMATCH";
+  case CURAND_STATUS_INTERNAL_ERROR:
+    return "CURAND_STATUS_INTERNAL_ERROR";
+  }
+  return "Unknown curand status";
+}
+
+#endif  // CPU_ONLY
+
+}  // namespace caffe
diff --git a/src/caffe/data_transformer.cpp b/src/caffe/data_transformer.cpp
new file mode 100644
index 0000000..2263392
--- /dev/null
+++ b/src/caffe/data_transformer.cpp
@@ -0,0 +1,529 @@
+#include <opencv2/core/core.hpp>
+
+#include <string>
+#include <vector>
+
+#include "caffe/data_transformer.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/util/rng.hpp"
+
+namespace caffe {
+
+template<typename Dtype>
+DataTransformer<Dtype>::DataTransformer(const TransformationParameter& param,
+    Phase phase)
+    : param_(param), phase_(phase) {
+  // check if we want to use mean_file
+  if (param_.has_mean_file()) {
+    CHECK_EQ(param_.mean_value_size(), 0) <<
+      "Cannot specify mean_file and mean_value at the same time";
+    const string& mean_file = param.mean_file();
+    LOG(INFO) << "Loading mean file from: " << mean_file;
+    BlobProto blob_proto;
+    ReadProtoFromBinaryFileOrDie(mean_file.c_str(), &blob_proto);
+    data_mean_.FromProto(blob_proto);
+  }
+  // check if we want to use mean_value
+  if (param_.mean_value_size() > 0) {
+    CHECK(param_.has_mean_file() == false) <<
+      "Cannot specify mean_file and mean_value at the same time";
+    for (int c = 0; c < param_.mean_value_size(); ++c) {
+      mean_values_.push_back(param_.mean_value(c));
+    }
+  }
+}
+
+template<typename Dtype>
+void DataTransformer<Dtype>::Transform(const Datum& datum,
+                                       Dtype* transformed_data) {
+  const string& data = datum.data();
+  const int datum_channels = datum.channels();
+  const int datum_height = datum.height();
+  const int datum_width = datum.width();
+
+  const int crop_size = param_.crop_size();
+  const Dtype scale = param_.scale();
+  const bool do_mirror = param_.mirror() && Rand(2);
+  const bool has_mean_file = param_.has_mean_file();
+  const bool has_uint8 = data.size() > 0;
+  const bool has_mean_values = mean_values_.size() > 0;
+
+  CHECK_GT(datum_channels, 0);
+  CHECK_GE(datum_height, crop_size);
+  CHECK_GE(datum_width, crop_size);
+
+  Dtype* mean = NULL;
+  if (has_mean_file) {
+    CHECK_EQ(datum_channels, data_mean_.channels());
+    CHECK_EQ(datum_height, data_mean_.height());
+    CHECK_EQ(datum_width, data_mean_.width());
+    mean = data_mean_.mutable_cpu_data();
+  }
+  if (has_mean_values) {
+    CHECK(mean_values_.size() == 1 || mean_values_.size() == datum_channels) <<
+     "Specify either 1 mean_value or as many as channels: " << datum_channels;
+    if (datum_channels > 1 && mean_values_.size() == 1) {
+      // Replicate the mean_value for simplicity
+      for (int c = 1; c < datum_channels; ++c) {
+        mean_values_.push_back(mean_values_[0]);
+      }
+    }
+  }
+
+  int height = datum_height;
+  int width = datum_width;
+
+  int h_off = 0;
+  int w_off = 0;
+  if (crop_size) {
+    height = crop_size;
+    width = crop_size;
+    // We only do random crop when we do training.
+    if (phase_ == TRAIN) {
+      h_off = Rand(datum_height - crop_size + 1);
+      w_off = Rand(datum_width - crop_size + 1);
+    } else {
+      h_off = (datum_height - crop_size) / 2;
+      w_off = (datum_width - crop_size) / 2;
+    }
+  }
+
+  Dtype datum_element;
+  int top_index, data_index;
+  for (int c = 0; c < datum_channels; ++c) {
+    for (int h = 0; h < height; ++h) {
+      for (int w = 0; w < width; ++w) {
+        data_index = (c * datum_height + h_off + h) * datum_width + w_off + w;
+        if (do_mirror) {
+          top_index = (c * height + h) * width + (width - 1 - w);
+        } else {
+          top_index = (c * height + h) * width + w;
+        }
+        if (has_uint8) {
+          datum_element =
+            static_cast<Dtype>(static_cast<uint8_t>(data[data_index]));
+        } else {
+          datum_element = datum.float_data(data_index);
+        }
+        if (has_mean_file) {
+          transformed_data[top_index] =
+            (datum_element - mean[data_index]) * scale;
+        } else {
+          if (has_mean_values) {
+            transformed_data[top_index] =
+              (datum_element - mean_values_[c]) * scale;
+          } else {
+            transformed_data[top_index] = datum_element * scale;
+          }
+        }
+      }
+    }
+  }
+}
+
+template<typename Dtype>
+void DataTransformer<Dtype>::Transform(const Datum& datum,
+                                       Blob<Dtype>* transformed_blob) {
+  // If datum is encoded, decoded and transform the cv::image.
+  if (datum.encoded()) {
+    CHECK(!(param_.force_color() && param_.force_gray()))
+        << "cannot set both force_color and force_gray";
+    cv::Mat cv_img;
+    if (param_.force_color() || param_.force_gray()) {
+    // If force_color then decode in color otherwise decode in gray.
+      cv_img = DecodeDatumToCVMat(datum, param_.force_color());
+    } else {
+      cv_img = DecodeDatumToCVMatNative(datum);
+    }
+    // Transform the cv::image into blob.
+    return Transform(cv_img, transformed_blob);
+  } else {
+    if (param_.force_color() || param_.force_gray()) {
+      LOG(ERROR) << "force_color and force_gray only for encoded datum";
+    }
+  }
+
+  const int crop_size = param_.crop_size();
+  const int datum_channels = datum.channels();
+  const int datum_height = datum.height();
+  const int datum_width = datum.width();
+
+  // Check dimensions.
+  const int channels = transformed_blob->channels();
+  const int height = transformed_blob->height();
+  const int width = transformed_blob->width();
+  const int num = transformed_blob->num();
+
+  CHECK_EQ(channels, datum_channels);
+  CHECK_LE(height, datum_height);
+  CHECK_LE(width, datum_width);
+  CHECK_GE(num, 1);
+
+  if (crop_size) {
+    CHECK_EQ(crop_size, height);
+    CHECK_EQ(crop_size, width);
+  } else {
+    CHECK_EQ(datum_height, height);
+    CHECK_EQ(datum_width, width);
+  }
+
+  Dtype* transformed_data = transformed_blob->mutable_cpu_data();
+  Transform(datum, transformed_data);
+}
+
+template<typename Dtype>
+void DataTransformer<Dtype>::Transform(const vector<Datum> & datum_vector,
+                                       Blob<Dtype>* transformed_blob) {
+  const int datum_num = datum_vector.size();
+  const int num = transformed_blob->num();
+  const int channels = transformed_blob->channels();
+  const int height = transformed_blob->height();
+  const int width = transformed_blob->width();
+
+  CHECK_GT(datum_num, 0) << "There is no datum to add";
+  CHECK_LE(datum_num, num) <<
+    "The size of datum_vector must be no greater than transformed_blob->num()";
+  Blob<Dtype> uni_blob(1, channels, height, width);
+  for (int item_id = 0; item_id < datum_num; ++item_id) {
+    int offset = transformed_blob->offset(item_id);
+    uni_blob.set_cpu_data(transformed_blob->mutable_cpu_data() + offset);
+    Transform(datum_vector[item_id], &uni_blob);
+  }
+}
+
+template<typename Dtype>
+void DataTransformer<Dtype>::Transform(const vector<cv::Mat> & mat_vector,
+                                       Blob<Dtype>* transformed_blob) {
+  const int mat_num = mat_vector.size();
+  const int num = transformed_blob->num();
+  const int channels = transformed_blob->channels();
+  const int height = transformed_blob->height();
+  const int width = transformed_blob->width();
+
+  CHECK_GT(mat_num, 0) << "There is no MAT to add";
+  CHECK_EQ(mat_num, num) <<
+    "The size of mat_vector must be equals to transformed_blob->num()";
+  Blob<Dtype> uni_blob(1, channels, height, width);
+  for (int item_id = 0; item_id < mat_num; ++item_id) {
+    int offset = transformed_blob->offset(item_id);
+    uni_blob.set_cpu_data(transformed_blob->mutable_cpu_data() + offset);
+    Transform(mat_vector[item_id], &uni_blob);
+  }
+}
+
+template<typename Dtype>
+void DataTransformer<Dtype>::Transform(const cv::Mat& cv_img,
+                                       Blob<Dtype>* transformed_blob) {
+  const int crop_size = param_.crop_size();
+  const int img_channels = cv_img.channels();
+  const int img_height = cv_img.rows;
+  const int img_width = cv_img.cols;
+
+  // Check dimensions.
+  const int channels = transformed_blob->channels();
+  const int height = transformed_blob->height();
+  const int width = transformed_blob->width();
+  const int num = transformed_blob->num();
+
+  CHECK_EQ(channels, img_channels);
+  CHECK_LE(height, img_height);
+  CHECK_LE(width, img_width);
+  CHECK_GE(num, 1);
+
+  CHECK(cv_img.depth() == CV_8U) << "Image data type must be unsigned byte";
+
+  const Dtype scale = param_.scale();
+  const bool do_mirror = param_.mirror() && Rand(2);
+  const bool has_mean_file = param_.has_mean_file();
+  const bool has_mean_values = mean_values_.size() > 0;
+
+  CHECK_GT(img_channels, 0);
+  CHECK_GE(img_height, crop_size);
+  CHECK_GE(img_width, crop_size);
+
+  Dtype* mean = NULL;
+  if (has_mean_file) {
+    CHECK_EQ(img_channels, data_mean_.channels());
+    CHECK_EQ(img_height, data_mean_.height());
+    CHECK_EQ(img_width, data_mean_.width());
+    mean = data_mean_.mutable_cpu_data();
+  }
+  if (has_mean_values) {
+    CHECK(mean_values_.size() == 1 || mean_values_.size() == img_channels) <<
+     "Specify either 1 mean_value or as many as channels: " << img_channels;
+    if (img_channels > 1 && mean_values_.size() == 1) {
+      // Replicate the mean_value for simplicity
+      for (int c = 1; c < img_channels; ++c) {
+        mean_values_.push_back(mean_values_[0]);
+      }
+    }
+  }
+
+  int h_off = 0;
+  int w_off = 0;
+  cv::Mat cv_cropped_img = cv_img;
+  if (crop_size) {
+    CHECK_EQ(crop_size, height);
+    CHECK_EQ(crop_size, width);
+    // We only do random crop when we do training.
+    if (phase_ == TRAIN) {
+      h_off = Rand(img_height - crop_size + 1);
+      w_off = Rand(img_width - crop_size + 1);
+    } else {
+      h_off = (img_height - crop_size) / 2;
+      w_off = (img_width - crop_size) / 2;
+    }
+    cv::Rect roi(w_off, h_off, crop_size, crop_size);
+    cv_cropped_img = cv_img(roi);
+  } else {
+    CHECK_EQ(img_height, height);
+    CHECK_EQ(img_width, width);
+  }
+
+  CHECK(cv_cropped_img.data);
+
+  Dtype* transformed_data = transformed_blob->mutable_cpu_data();
+  int top_index;
+  for (int h = 0; h < height; ++h) {
+    const uchar* ptr = cv_cropped_img.ptr<uchar>(h);
+    int img_index = 0;
+    for (int w = 0; w < width; ++w) {
+      for (int c = 0; c < img_channels; ++c) {
+        if (do_mirror) {
+          top_index = (c * height + h) * width + (width - 1 - w);
+        } else {
+          top_index = (c * height + h) * width + w;
+        }
+        // int top_index = (c * height + h) * width + w;
+        Dtype pixel = static_cast<Dtype>(ptr[img_index++]);
+        if (has_mean_file) {
+          int mean_index = (c * img_height + h_off + h) * img_width + w_off + w;
+          transformed_data[top_index] =
+            (pixel - mean[mean_index]) * scale;
+        } else {
+          if (has_mean_values) {
+            transformed_data[top_index] =
+              (pixel - mean_values_[c]) * scale;
+          } else {
+            transformed_data[top_index] = pixel * scale;
+          }
+        }
+      }
+    }
+  }
+}
+
+template<typename Dtype>
+void DataTransformer<Dtype>::Transform(Blob<Dtype>* input_blob,
+                                       Blob<Dtype>* transformed_blob) {
+  const int crop_size = param_.crop_size();
+  const int input_num = input_blob->num();
+  const int input_channels = input_blob->channels();
+  const int input_height = input_blob->height();
+  const int input_width = input_blob->width();
+
+  if (transformed_blob->count() == 0) {
+    // Initialize transformed_blob with the right shape.
+    if (crop_size) {
+      transformed_blob->Reshape(input_num, input_channels,
+                                crop_size, crop_size);
+    } else {
+      transformed_blob->Reshape(input_num, input_channels,
+                                input_height, input_width);
+    }
+  }
+
+  const int num = transformed_blob->num();
+  const int channels = transformed_blob->channels();
+  const int height = transformed_blob->height();
+  const int width = transformed_blob->width();
+  const int size = transformed_blob->count();
+
+  CHECK_LE(input_num, num);
+  CHECK_EQ(input_channels, channels);
+  CHECK_GE(input_height, height);
+  CHECK_GE(input_width, width);
+
+
+  const Dtype scale = param_.scale();
+  const bool do_mirror = param_.mirror() && Rand(2);
+  const bool has_mean_file = param_.has_mean_file();
+  const bool has_mean_values = mean_values_.size() > 0;
+
+  int h_off = 0;
+  int w_off = 0;
+  if (crop_size) {
+    CHECK_EQ(crop_size, height);
+    CHECK_EQ(crop_size, width);
+    // We only do random crop when we do training.
+    if (phase_ == TRAIN) {
+      h_off = Rand(input_height - crop_size + 1);
+      w_off = Rand(input_width - crop_size + 1);
+    } else {
+      h_off = (input_height - crop_size) / 2;
+      w_off = (input_width - crop_size) / 2;
+    }
+  } else {
+    CHECK_EQ(input_height, height);
+    CHECK_EQ(input_width, width);
+  }
+
+  Dtype* input_data = input_blob->mutable_cpu_data();
+  if (has_mean_file) {
+    CHECK_EQ(input_channels, data_mean_.channels());
+    CHECK_EQ(input_height, data_mean_.height());
+    CHECK_EQ(input_width, data_mean_.width());
+    for (int n = 0; n < input_num; ++n) {
+      int offset = input_blob->offset(n);
+      caffe_sub(data_mean_.count(), input_data + offset,
+            data_mean_.cpu_data(), input_data + offset);
+    }
+  }
+
+  if (has_mean_values) {
+    CHECK(mean_values_.size() == 1 || mean_values_.size() == input_channels) <<
+     "Specify either 1 mean_value or as many as channels: " << input_channels;
+    if (mean_values_.size() == 1) {
+      caffe_add_scalar(input_blob->count(), -(mean_values_[0]), input_data);
+    } else {
+      for (int n = 0; n < input_num; ++n) {
+        for (int c = 0; c < input_channels; ++c) {
+          int offset = input_blob->offset(n, c);
+          caffe_add_scalar(input_height * input_width, -(mean_values_[c]),
+            input_data + offset);
+        }
+      }
+    }
+  }
+
+  Dtype* transformed_data = transformed_blob->mutable_cpu_data();
+
+  for (int n = 0; n < input_num; ++n) {
+    int top_index_n = n * channels;
+    int data_index_n = n * channels;
+    for (int c = 0; c < channels; ++c) {
+      int top_index_c = (top_index_n + c) * height;
+      int data_index_c = (data_index_n + c) * input_height + h_off;
+      for (int h = 0; h < height; ++h) {
+        int top_index_h = (top_index_c + h) * width;
+        int data_index_h = (data_index_c + h) * input_width + w_off;
+        if (do_mirror) {
+          int top_index_w = top_index_h + width - 1;
+          for (int w = 0; w < width; ++w) {
+            transformed_data[top_index_w-w] = input_data[data_index_h + w];
+          }
+        } else {
+          for (int w = 0; w < width; ++w) {
+            transformed_data[top_index_h + w] = input_data[data_index_h + w];
+          }
+        }
+      }
+    }
+  }
+  if (scale != Dtype(1)) {
+    DLOG(INFO) << "Scale: " << scale;
+    caffe_scal(size, scale, transformed_data);
+  }
+}
+
+template<typename Dtype>
+vector<int> DataTransformer<Dtype>::InferBlobShape(const Datum& datum) {
+  if (datum.encoded()) {
+    CHECK(!(param_.force_color() && param_.force_gray()))
+        << "cannot set both force_color and force_gray";
+    cv::Mat cv_img;
+    if (param_.force_color() || param_.force_gray()) {
+    // If force_color then decode in color otherwise decode in gray.
+      cv_img = DecodeDatumToCVMat(datum, param_.force_color());
+    } else {
+      cv_img = DecodeDatumToCVMatNative(datum);
+    }
+    // InferBlobShape using the cv::image.
+    return InferBlobShape(cv_img);
+  }
+
+  const int crop_size = param_.crop_size();
+  const int datum_channels = datum.channels();
+  const int datum_height = datum.height();
+  const int datum_width = datum.width();
+  // Check dimensions.
+  CHECK_GT(datum_channels, 0);
+  CHECK_GE(datum_height, crop_size);
+  CHECK_GE(datum_width, crop_size);
+  // Build BlobShape.
+  vector<int> shape(4);
+  shape[0] = 1;
+  shape[1] = datum_channels;
+  shape[2] = (crop_size)? crop_size: datum_height;
+  shape[3] = (crop_size)? crop_size: datum_width;
+  return shape;
+}
+
+template<typename Dtype>
+vector<int> DataTransformer<Dtype>::InferBlobShape(
+    const vector<Datum> & datum_vector) {
+  const int num = datum_vector.size();
+  CHECK_GT(num, 0) << "There is no datum to in the vector";
+  // Use first datum in the vector to InferBlobShape.
+  vector<int> shape = InferBlobShape(datum_vector[0]);
+  // Adjust num to the size of the vector.
+  shape[0] = num;
+  return shape;
+}
+
+template<typename Dtype>
+vector<int> DataTransformer<Dtype>::InferBlobShape(const cv::Mat& cv_img) {
+  const int crop_size = param_.crop_size();
+  const int img_channels = cv_img.channels();
+  const int img_height = cv_img.rows;
+  const int img_width = cv_img.cols;
+  // Check dimensions.
+  CHECK_GT(img_channels, 0);
+  CHECK_GE(img_height, crop_size);
+  CHECK_GE(img_width, crop_size);
+  // Build BlobShape.
+  vector<int> shape(4);
+  shape[0] = 1;
+  shape[1] = img_channels;
+  shape[2] = (crop_size)? crop_size: img_height;
+  shape[3] = (crop_size)? crop_size: img_width;
+  return shape;
+}
+
+template<typename Dtype>
+vector<int> DataTransformer<Dtype>::InferBlobShape(
+    const vector<cv::Mat> & mat_vector) {
+  const int num = mat_vector.size();
+  CHECK_GT(num, 0) << "There is no cv_img to in the vector";
+  // Use first cv_img in the vector to InferBlobShape.
+  vector<int> shape = InferBlobShape(mat_vector[0]);
+  // Adjust num to the size of the vector.
+  shape[0] = num;
+  return shape;
+}
+
+template <typename Dtype>
+void DataTransformer<Dtype>::InitRand() {
+  const bool needs_rand = param_.mirror() ||
+      (phase_ == TRAIN && param_.crop_size());
+  if (needs_rand) {
+    const unsigned int rng_seed = caffe_rng_rand();
+    rng_.reset(new Caffe::RNG(rng_seed));
+  } else {
+    rng_.reset();
+  }
+}
+
+template <typename Dtype>
+int DataTransformer<Dtype>::Rand(int n) {
+  CHECK(rng_);
+  CHECK_GT(n, 0);
+  caffe::rng_t* rng =
+      static_cast<caffe::rng_t*>(rng_->generator());
+  return ((*rng)() % n);
+}
+
+INSTANTIATE_CLASS(DataTransformer);
+
+}  // namespace caffe
diff --git a/src/caffe/internal_thread.cpp b/src/caffe/internal_thread.cpp
new file mode 100644
index 0000000..c2d19d4
--- /dev/null
+++ b/src/caffe/internal_thread.cpp
@@ -0,0 +1,40 @@
+#include <boost/thread.hpp>
+#include "caffe/internal_thread.hpp"
+
+namespace caffe {
+
+InternalThread::~InternalThread() {
+  WaitForInternalThreadToExit();
+}
+
+bool InternalThread::is_started() const {
+  return thread_.get() != NULL && thread_->joinable();
+}
+
+
+bool InternalThread::StartInternalThread() {
+  if (!WaitForInternalThreadToExit()) {
+    return false;
+  }
+  try {
+    thread_.reset(
+        new boost::thread(&InternalThread::InternalThreadEntry, this));
+  } catch (...) {
+    return false;
+  }
+  return true;
+}
+
+/** Will not return until the internal thread has exited. */
+bool InternalThread::WaitForInternalThreadToExit() {
+  if (is_started()) {
+    try {
+      thread_->join();
+    } catch (...) {
+      return false;
+    }
+  }
+  return true;
+}
+
+}  // namespace caffe
diff --git a/src/caffe/layer_factory.cpp b/src/caffe/layer_factory.cpp
new file mode 100644
index 0000000..d6a1cac
--- /dev/null
+++ b/src/caffe/layer_factory.cpp
@@ -0,0 +1,179 @@
+#include <string>
+
+#include "caffe/layer.hpp"
+#include "caffe/layer_factory.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/vision_layers.hpp"
+
+#ifdef WITH_PYTHON_LAYER
+#include "caffe/python_layer.hpp"
+#endif
+
+namespace caffe {
+
+// Get convolution layer according to engine.
+template <typename Dtype>
+shared_ptr<Layer<Dtype> > GetConvolutionLayer(
+    const LayerParameter& param) {
+  ConvolutionParameter_Engine engine = param.convolution_param().engine();
+  if (engine == ConvolutionParameter_Engine_DEFAULT) {
+    engine = ConvolutionParameter_Engine_CAFFE;
+#ifdef USE_CUDNN
+    engine = ConvolutionParameter_Engine_CUDNN;
+#endif
+  }
+  if (engine == ConvolutionParameter_Engine_CAFFE) {
+    return shared_ptr<Layer<Dtype> >(new ConvolutionLayer<Dtype>(param));
+#ifdef USE_CUDNN
+  } else if (engine == ConvolutionParameter_Engine_CUDNN) {
+    return shared_ptr<Layer<Dtype> >(new CuDNNConvolutionLayer<Dtype>(param));
+#endif
+  } else {
+    LOG(FATAL) << "Layer " << param.name() << " has unknown engine.";
+  }
+}
+
+REGISTER_LAYER_CREATOR(Convolution, GetConvolutionLayer);
+
+// Get pooling layer according to engine.
+template <typename Dtype>
+shared_ptr<Layer<Dtype> > GetPoolingLayer(const LayerParameter& param) {
+  PoolingParameter_Engine engine = param.pooling_param().engine();
+  if (engine == PoolingParameter_Engine_DEFAULT) {
+    engine = PoolingParameter_Engine_CAFFE;
+#ifdef USE_CUDNN
+    engine = PoolingParameter_Engine_CUDNN;
+#endif
+  }
+  if (engine == PoolingParameter_Engine_CAFFE) {
+    return shared_ptr<Layer<Dtype> >(new PoolingLayer<Dtype>(param));
+#ifdef USE_CUDNN
+  } else if (engine == PoolingParameter_Engine_CUDNN) {
+    PoolingParameter p_param = param.pooling_param();
+    if (p_param.pad() || p_param.pad_h() || p_param.pad_w() ||
+        param.top_size() > 1) {
+      LOG(INFO) << "CUDNN does not support padding or multiple tops. "
+                << "Using Caffe's own pooling layer.";
+      return shared_ptr<Layer<Dtype> >(new PoolingLayer<Dtype>(param));
+    }
+    return shared_ptr<Layer<Dtype> >(new CuDNNPoolingLayer<Dtype>(param));
+#endif
+  } else {
+    LOG(FATAL) << "Layer " << param.name() << " has unknown engine.";
+  }
+}
+
+REGISTER_LAYER_CREATOR(Pooling, GetPoolingLayer);
+
+// Get relu layer according to engine.
+template <typename Dtype>
+shared_ptr<Layer<Dtype> > GetReLULayer(const LayerParameter& param) {
+  ReLUParameter_Engine engine = param.relu_param().engine();
+  if (engine == ReLUParameter_Engine_DEFAULT) {
+    engine = ReLUParameter_Engine_CAFFE;
+#ifdef USE_CUDNN
+    engine = ReLUParameter_Engine_CUDNN;
+#endif
+  }
+  if (engine == ReLUParameter_Engine_CAFFE) {
+    return shared_ptr<Layer<Dtype> >(new ReLULayer<Dtype>(param));
+#ifdef USE_CUDNN
+  } else if (engine == ReLUParameter_Engine_CUDNN) {
+    return shared_ptr<Layer<Dtype> >(new CuDNNReLULayer<Dtype>(param));
+#endif
+  } else {
+    LOG(FATAL) << "Layer " << param.name() << " has unknown engine.";
+  }
+}
+
+REGISTER_LAYER_CREATOR(ReLU, GetReLULayer);
+
+// Get sigmoid layer according to engine.
+template <typename Dtype>
+shared_ptr<Layer<Dtype> > GetSigmoidLayer(const LayerParameter& param) {
+  SigmoidParameter_Engine engine = param.sigmoid_param().engine();
+  if (engine == SigmoidParameter_Engine_DEFAULT) {
+    engine = SigmoidParameter_Engine_CAFFE;
+#ifdef USE_CUDNN
+    engine = SigmoidParameter_Engine_CUDNN;
+#endif
+  }
+  if (engine == SigmoidParameter_Engine_CAFFE) {
+    return shared_ptr<Layer<Dtype> >(new SigmoidLayer<Dtype>(param));
+#ifdef USE_CUDNN
+  } else if (engine == SigmoidParameter_Engine_CUDNN) {
+    return shared_ptr<Layer<Dtype> >(new CuDNNSigmoidLayer<Dtype>(param));
+#endif
+  } else {
+    LOG(FATAL) << "Layer " << param.name() << " has unknown engine.";
+  }
+}
+
+REGISTER_LAYER_CREATOR(Sigmoid, GetSigmoidLayer);
+
+// Get softmax layer according to engine.
+template <typename Dtype>
+shared_ptr<Layer<Dtype> > GetSoftmaxLayer(const LayerParameter& param) {
+  SoftmaxParameter_Engine engine = param.softmax_param().engine();
+  if (engine == SoftmaxParameter_Engine_DEFAULT) {
+    engine = SoftmaxParameter_Engine_CAFFE;
+#ifdef USE_CUDNN
+    engine = SoftmaxParameter_Engine_CUDNN;
+#endif
+  }
+  if (engine == SoftmaxParameter_Engine_CAFFE) {
+    return shared_ptr<Layer<Dtype> >(new SoftmaxLayer<Dtype>(param));
+#ifdef USE_CUDNN
+  } else if (engine == SoftmaxParameter_Engine_CUDNN) {
+    return shared_ptr<Layer<Dtype> >(new CuDNNSoftmaxLayer<Dtype>(param));
+#endif
+  } else {
+    LOG(FATAL) << "Layer " << param.name() << " has unknown engine.";
+  }
+}
+
+REGISTER_LAYER_CREATOR(Softmax, GetSoftmaxLayer);
+
+// Get tanh layer according to engine.
+template <typename Dtype>
+shared_ptr<Layer<Dtype> > GetTanHLayer(const LayerParameter& param) {
+  TanHParameter_Engine engine = param.tanh_param().engine();
+  if (engine == TanHParameter_Engine_DEFAULT) {
+    engine = TanHParameter_Engine_CAFFE;
+#ifdef USE_CUDNN
+    engine = TanHParameter_Engine_CUDNN;
+#endif
+  }
+  if (engine == TanHParameter_Engine_CAFFE) {
+    return shared_ptr<Layer<Dtype> >(new TanHLayer<Dtype>(param));
+#ifdef USE_CUDNN
+  } else if (engine == TanHParameter_Engine_CUDNN) {
+    return shared_ptr<Layer<Dtype> >(new CuDNNTanHLayer<Dtype>(param));
+#endif
+  } else {
+    LOG(FATAL) << "Layer " << param.name() << " has unknown engine.";
+  }
+}
+
+REGISTER_LAYER_CREATOR(TanH, GetTanHLayer);
+
+#ifdef WITH_PYTHON_LAYER
+template <typename Dtype>
+shared_ptr<Layer<Dtype> > GetPythonLayer(const LayerParameter& param) {
+  Py_Initialize();
+  try {
+    bp::object module = bp::import(param.python_param().module().c_str());
+    bp::object layer = module.attr(param.python_param().layer().c_str())(param);
+    return bp::extract<shared_ptr<PythonLayer<Dtype> > >(layer)();
+  } catch (bp::error_already_set) {
+    PyErr_Print();
+    throw;
+  }
+}
+
+REGISTER_LAYER_CREATOR(Python, GetPythonLayer);
+#endif
+
+// Layers that use their constructor as their default creator should be
+// registered in their corresponding cpp files. Do not register them here.
+}  // namespace caffe
diff --git a/src/caffe/layers/absval_layer.cpp b/src/caffe/layers/absval_layer.cpp
new file mode 100644
index 0000000..5ce28c9
--- /dev/null
+++ b/src/caffe/layers/absval_layer.cpp
@@ -0,0 +1,45 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/neuron_layers.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void AbsValLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  NeuronLayer<Dtype>::LayerSetUp(bottom, top);
+  CHECK_NE(top[0], bottom[0]) << this->type() << " Layer does not "
+    "allow in-place computation.";
+}
+
+template <typename Dtype>
+void AbsValLayer<Dtype>::Forward_cpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  const int count = top[0]->count();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  caffe_abs(count, bottom[0]->cpu_data(), top_data);
+}
+
+template <typename Dtype>
+void AbsValLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const int count = top[0]->count();
+  const Dtype* top_diff = top[0]->cpu_diff();
+  if (propagate_down[0]) {
+    const Dtype* bottom_data = bottom[0]->cpu_data();
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    caffe_cpu_sign(count, bottom_data, bottom_diff);
+    caffe_mul(count, bottom_diff, top_diff, bottom_diff);
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(AbsValLayer);
+#endif
+
+INSTANTIATE_CLASS(AbsValLayer);
+REGISTER_LAYER_CLASS(AbsVal);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/absval_layer.cu b/src/caffe/layers/absval_layer.cu
new file mode 100644
index 0000000..91f3c77
--- /dev/null
+++ b/src/caffe/layers/absval_layer.cu
@@ -0,0 +1,34 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void AbsValLayer<Dtype>::Forward_gpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  const int count = top[0]->count();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  caffe_gpu_abs(count, bottom[0]->gpu_data(), top_data);
+}
+
+template <typename Dtype>
+void AbsValLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const int count = top[0]->count();
+  const Dtype* top_data = top[0]->gpu_data();
+  const Dtype* top_diff = top[0]->gpu_diff();
+  if (propagate_down[0]) {
+    const Dtype* bottom_data = bottom[0]->gpu_data();
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+    caffe_gpu_sign(count, bottom_data, bottom_diff);
+    caffe_gpu_mul(count, bottom_diff, top_diff, bottom_diff);
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(AbsValLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/accuracy_layer.cpp b/src/caffe/layers/accuracy_layer.cpp
new file mode 100644
index 0000000..90aad67
--- /dev/null
+++ b/src/caffe/layers/accuracy_layer.cpp
@@ -0,0 +1,91 @@
+#include <algorithm>
+#include <functional>
+#include <utility>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void AccuracyLayer<Dtype>::LayerSetUp(
+  const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  top_k_ = this->layer_param_.accuracy_param().top_k();
+
+  has_ignore_label_ =
+    this->layer_param_.accuracy_param().has_ignore_label();
+  if (has_ignore_label_) {
+    ignore_label_ = this->layer_param_.accuracy_param().ignore_label();
+  }
+}
+
+template <typename Dtype>
+void AccuracyLayer<Dtype>::Reshape(
+  const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  CHECK_LE(top_k_, bottom[0]->count() / bottom[1]->count())
+      << "top_k must be less than or equal to the number of classes.";
+  label_axis_ =
+      bottom[0]->CanonicalAxisIndex(this->layer_param_.accuracy_param().axis());
+  outer_num_ = bottom[0]->count(0, label_axis_);
+  inner_num_ = bottom[0]->count(label_axis_ + 1);
+  CHECK_EQ(outer_num_ * inner_num_, bottom[1]->count())
+      << "Number of labels must match number of predictions; "
+      << "e.g., if label axis == 1 and prediction shape is (N, C, H, W), "
+      << "label count (number of labels) must be N*H*W, "
+      << "with integer values in {0, 1, ..., C-1}.";
+  vector<int> top_shape(0);  // Accuracy is a scalar; 0 axes.
+  top[0]->Reshape(top_shape);
+}
+
+template <typename Dtype>
+void AccuracyLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  Dtype accuracy = 0;
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  const Dtype* bottom_label = bottom[1]->cpu_data();
+  const int dim = bottom[0]->count() / outer_num_;
+  const int num_labels = bottom[0]->shape(label_axis_);
+  vector<Dtype> maxval(top_k_+1);
+  vector<int> max_id(top_k_+1);
+  int count = 0;
+  for (int i = 0; i < outer_num_; ++i) {
+    for (int j = 0; j < inner_num_; ++j) {
+      const int label_value =
+          static_cast<int>(bottom_label[i * inner_num_ + j]);
+      if (has_ignore_label_ && label_value == ignore_label_) {
+        continue;
+      }
+      DCHECK_GE(label_value, 0);
+      DCHECK_LT(label_value, num_labels);
+      // Top-k accuracy
+      std::vector<std::pair<Dtype, int> > bottom_data_vector;
+      for (int k = 0; k < num_labels; ++k) {
+        bottom_data_vector.push_back(std::make_pair(
+            bottom_data[i * dim + k * inner_num_ + j], k));
+      }
+      std::partial_sort(
+          bottom_data_vector.begin(), bottom_data_vector.begin() + top_k_,
+          bottom_data_vector.end(), std::greater<std::pair<Dtype, int> >());
+      // check if true label is in top k predictions
+      for (int k = 0; k < top_k_; k++) {
+        if (bottom_data_vector[k].second == label_value) {
+          ++accuracy;
+          break;
+        }
+      }
+      ++count;
+    }
+  }
+
+  // LOG(INFO) << "Accuracy: " << accuracy;
+  top[0]->mutable_cpu_data()[0] = accuracy / count;
+  // Accuracy layer should not be used as a loss function.
+}
+
+INSTANTIATE_CLASS(AccuracyLayer);
+REGISTER_LAYER_CLASS(Accuracy);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/argmax_layer.cpp b/src/caffe/layers/argmax_layer.cpp
new file mode 100644
index 0000000..c4040cd
--- /dev/null
+++ b/src/caffe/layers/argmax_layer.cpp
@@ -0,0 +1,63 @@
+#include <algorithm>
+#include <functional>
+#include <utility>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void ArgMaxLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  out_max_val_ = this->layer_param_.argmax_param().out_max_val();
+  top_k_ = this->layer_param_.argmax_param().top_k();
+  CHECK_GE(top_k_, 1) << " top k must not be less than 1.";
+  CHECK_LE(top_k_, bottom[0]->count() / bottom[0]->num())
+      << "top_k must be less than or equal to the number of classes.";
+}
+
+template <typename Dtype>
+void ArgMaxLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  if (out_max_val_) {
+    // Produces max_ind and max_val
+    top[0]->Reshape(bottom[0]->num(), 2, top_k_, 1);
+  } else {
+    // Produces only max_ind
+    top[0]->Reshape(bottom[0]->num(), 1, top_k_, 1);
+  }
+}
+
+template <typename Dtype>
+void ArgMaxLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  int num = bottom[0]->num();
+  int dim = bottom[0]->count() / bottom[0]->num();
+  for (int i = 0; i < num; ++i) {
+    std::vector<std::pair<Dtype, int> > bottom_data_vector;
+    for (int j = 0; j < dim; ++j) {
+      bottom_data_vector.push_back(
+          std::make_pair(bottom_data[i * dim + j], j));
+    }
+    std::partial_sort(
+        bottom_data_vector.begin(), bottom_data_vector.begin() + top_k_,
+        bottom_data_vector.end(), std::greater<std::pair<Dtype, int> >());
+    for (int j = 0; j < top_k_; ++j) {
+      top_data[top[0]->offset(i, 0, j)] = bottom_data_vector[j].second;
+    }
+    if (out_max_val_) {
+      for (int j = 0; j < top_k_; ++j) {
+        top_data[top[0]->offset(i, 1, j)] = bottom_data_vector[j].first;
+      }
+    }
+  }
+}
+
+INSTANTIATE_CLASS(ArgMaxLayer);
+REGISTER_LAYER_CLASS(ArgMax);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/base_conv_layer.cpp b/src/caffe/layers/base_conv_layer.cpp
new file mode 100644
index 0000000..ccb3adc
--- /dev/null
+++ b/src/caffe/layers/base_conv_layer.cpp
@@ -0,0 +1,298 @@
+#include <vector>
+
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/im2col.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void BaseConvolutionLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  CHECK_EQ(4, bottom[0]->num_axes()) << "Input must have 4 axes, "
+      << "corresponding to (num, channels, height, width)";
+  // Configure the kernel size, padding, stride, and inputs.
+  ConvolutionParameter conv_param = this->layer_param_.convolution_param();
+  CHECK(!conv_param.has_kernel_size() !=
+      !(conv_param.has_kernel_h() && conv_param.has_kernel_w()))
+      << "Filter size is kernel_size OR kernel_h and kernel_w; not both";
+  CHECK(conv_param.has_kernel_size() ||
+      (conv_param.has_kernel_h() && conv_param.has_kernel_w()))
+      << "For non-square filters both kernel_h and kernel_w are required.";
+  CHECK((!conv_param.has_pad() && conv_param.has_pad_h()
+      && conv_param.has_pad_w())
+      || (!conv_param.has_pad_h() && !conv_param.has_pad_w()))
+      << "pad is pad OR pad_h and pad_w are required.";
+  CHECK((!conv_param.has_stride() && conv_param.has_stride_h()
+      && conv_param.has_stride_w())
+      || (!conv_param.has_stride_h() && !conv_param.has_stride_w()))
+      << "Stride is stride OR stride_h and stride_w are required.";
+  if (conv_param.has_kernel_size()) {
+    kernel_h_ = kernel_w_ = conv_param.kernel_size();
+  } else {
+    kernel_h_ = conv_param.kernel_h();
+    kernel_w_ = conv_param.kernel_w();
+  }
+  CHECK_GT(kernel_h_, 0) << "Filter dimensions cannot be zero.";
+  CHECK_GT(kernel_w_, 0) << "Filter dimensions cannot be zero.";
+  if (!conv_param.has_pad_h()) {
+    pad_h_ = pad_w_ = conv_param.pad();
+  } else {
+    pad_h_ = conv_param.pad_h();
+    pad_w_ = conv_param.pad_w();
+  }
+  if (!conv_param.has_stride_h()) {
+    stride_h_ = stride_w_ = conv_param.stride();
+  } else {
+    stride_h_ = conv_param.stride_h();
+    stride_w_ = conv_param.stride_w();
+  }
+  // Special case: im2col is the identity for 1x1 convolution with stride 1
+  // and no padding, so flag for skipping the buffer and transformation.
+  is_1x1_ = kernel_w_ == 1 && kernel_h_ == 1
+      && stride_h_ == 1 && stride_w_ == 1 && pad_h_ == 0 && pad_w_ == 0;
+  // Configure output channels and groups.
+  channels_ = bottom[0]->channels();
+  num_output_ = this->layer_param_.convolution_param().num_output();
+  CHECK_GT(num_output_, 0);
+  group_ = this->layer_param_.convolution_param().group();
+  CHECK_EQ(channels_ % group_, 0);
+  CHECK_EQ(num_output_ % group_, 0)
+      << "Number of output should be multiples of group.";
+  if (reverse_dimensions()) {
+    conv_out_channels_ = channels_;
+    conv_in_channels_ = num_output_;
+  } else {
+    conv_out_channels_ = num_output_;
+    conv_in_channels_ = channels_;
+  }
+  // Handle the parameters: weights and biases.
+  // - blobs_[0] holds the filter weights
+  // - blobs_[1] holds the biases (optional)
+  bias_term_ = this->layer_param_.convolution_param().bias_term();
+  if (this->blobs_.size() > 0) {
+    LOG(INFO) << "Skipping parameter initialization";
+  } else {
+    if (bias_term_) {
+      this->blobs_.resize(2);
+    } else {
+      this->blobs_.resize(1);
+    }
+    // Initialize and fill the weights:
+    // output channels x input channels per-group x kernel height x kernel width
+    this->blobs_[0].reset(new Blob<Dtype>(
+        conv_out_channels_, conv_in_channels_ / group_, kernel_h_, kernel_w_));
+    shared_ptr<Filler<Dtype> > weight_filler(GetFiller<Dtype>(
+        this->layer_param_.convolution_param().weight_filler()));
+    weight_filler->Fill(this->blobs_[0].get());
+    // If necessary, initialize and fill the biases.
+    if (bias_term_) {
+      vector<int> bias_shape(1, num_output_);
+      this->blobs_[1].reset(new Blob<Dtype>(bias_shape));
+      shared_ptr<Filler<Dtype> > bias_filler(GetFiller<Dtype>(
+          this->layer_param_.convolution_param().bias_filler()));
+      bias_filler->Fill(this->blobs_[1].get());
+    }
+  }
+  // Propagate gradients to the parameters (as directed by backward pass).
+  this->param_propagate_down_.resize(this->blobs_.size(), true);
+}
+
+template <typename Dtype>
+void BaseConvolutionLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  CHECK_EQ(4, bottom[0]->num_axes()) << "Input must have 4 axes, "
+      << "corresponding to (num, channels, height, width)";
+  num_ = bottom[0]->num();
+  height_ = bottom[0]->height();
+  width_ = bottom[0]->width();
+  CHECK_EQ(bottom[0]->channels(), channels_) << "Input size incompatible with"
+    " convolution kernel.";
+  // TODO: generalize to handle inputs of different shapes.
+  for (int bottom_id = 1; bottom_id < bottom.size(); ++bottom_id) {
+    CHECK_EQ(num_, bottom[bottom_id]->num()) << "Inputs must have same num.";
+    CHECK_EQ(channels_, bottom[bottom_id]->channels())
+        << "Inputs must have same channels.";
+    CHECK_EQ(height_, bottom[bottom_id]->height())
+        << "Inputs must have same height.";
+    CHECK_EQ(width_, bottom[bottom_id]->width())
+        << "Inputs must have same width.";
+  }
+  // Shape the tops.
+  compute_output_shape();
+  for (int top_id = 0; top_id < top.size(); ++top_id) {
+    top[top_id]->Reshape(num_, num_output_, height_out_, width_out_);
+  }
+  if (reverse_dimensions()) {
+    conv_in_height_ = height_out_;
+    conv_in_width_ = width_out_;
+    conv_out_spatial_dim_ = height_ * width_;
+  } else {
+    conv_in_height_ = height_;
+    conv_in_width_ = width_;
+    conv_out_spatial_dim_ = height_out_ * width_out_;
+  }
+  kernel_dim_ = conv_in_channels_ * kernel_h_ * kernel_w_;
+  weight_offset_ = conv_out_channels_ * kernel_dim_ / group_ / group_;
+  col_offset_ = kernel_dim_ * conv_out_spatial_dim_ / group_;
+  output_offset_ = conv_out_channels_ * conv_out_spatial_dim_ / group_;
+  // The im2col result buffer will only hold one image at a time to avoid
+  // overly large memory usage. In the special case of 1x1 convolution
+  // it goes lazily unused to save memory.
+  if (reverse_dimensions()) {
+    col_buffer_.Reshape(1, kernel_dim_, height_, width_);
+  } else {
+    col_buffer_.Reshape(1, kernel_dim_, height_out_, width_out_);
+  }
+  // Set up the all ones "bias multiplier" for adding biases by BLAS
+  if (bias_term_) {
+    vector<int> bias_multiplier_shape(1, height_out_ * width_out_);
+    bias_multiplier_.Reshape(bias_multiplier_shape);
+    caffe_set(bias_multiplier_.count(), Dtype(1),
+        bias_multiplier_.mutable_cpu_data());
+  }
+}
+
+template <typename Dtype>
+void BaseConvolutionLayer<Dtype>::forward_cpu_gemm(const Dtype* input,
+    const Dtype* weights, Dtype* output, bool skip_im2col) {
+  const Dtype* col_buff = input;
+  if (!is_1x1_) {
+    if (!skip_im2col) {
+      conv_im2col_cpu(input, col_buffer_.mutable_cpu_data());
+    }
+    col_buff = col_buffer_.cpu_data();
+  }
+  for (int g = 0; g < group_; ++g) {
+    caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, conv_out_channels_ /
+        group_, conv_out_spatial_dim_, kernel_dim_ / group_,
+        (Dtype)1., weights + weight_offset_ * g, col_buff + col_offset_ * g,
+        (Dtype)0., output + output_offset_ * g);
+  }
+}
+
+template <typename Dtype>
+void BaseConvolutionLayer<Dtype>::forward_cpu_bias(Dtype* output,
+    const Dtype* bias) {
+  caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num_output_,
+      height_out_ * width_out_, 1, (Dtype)1., bias, bias_multiplier_.cpu_data(),
+      (Dtype)1., output);
+}
+
+template <typename Dtype>
+void BaseConvolutionLayer<Dtype>::backward_cpu_gemm(const Dtype* output,
+    const Dtype* weights, Dtype* input) {
+  Dtype* col_buff = col_buffer_.mutable_cpu_data();
+  if (is_1x1_) {
+    col_buff = input;
+  }
+  for (int g = 0; g < group_; ++g) {
+    caffe_cpu_gemm<Dtype>(CblasTrans, CblasNoTrans, kernel_dim_ / group_,
+        conv_out_spatial_dim_, conv_out_channels_ / group_,
+        (Dtype)1., weights + weight_offset_ * g, output + output_offset_ * g,
+        (Dtype)0., col_buff + col_offset_ * g);
+  }
+  if (!is_1x1_) {
+    conv_col2im_cpu(col_buff, input);
+  }
+}
+
+template <typename Dtype>
+void BaseConvolutionLayer<Dtype>::weight_cpu_gemm(const Dtype* input,
+    const Dtype* output, Dtype* weights) {
+  const Dtype* col_buff = input;
+  if (!is_1x1_) {
+    conv_im2col_cpu(input, col_buffer_.mutable_cpu_data());
+    col_buff = col_buffer_.cpu_data();
+  }
+  for (int g = 0; g < group_; ++g) {
+    caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasTrans, conv_out_channels_ / group_,
+        kernel_dim_ / group_, conv_out_spatial_dim_,
+        (Dtype)1., output + output_offset_ * g, col_buff + col_offset_ * g,
+        (Dtype)1., weights + weight_offset_ * g);
+  }
+}
+
+template <typename Dtype>
+void BaseConvolutionLayer<Dtype>::backward_cpu_bias(Dtype* bias,
+    const Dtype* input) {
+  caffe_cpu_gemv<Dtype>(CblasNoTrans, num_output_, height_out_ * width_out_, 1.,
+      input, bias_multiplier_.cpu_data(), 1., bias);
+}
+
+#ifndef CPU_ONLY
+
+template <typename Dtype>
+void BaseConvolutionLayer<Dtype>::forward_gpu_gemm(const Dtype* input,
+    const Dtype* weights, Dtype* output, bool skip_im2col) {
+  const Dtype* col_buff = input;
+  if (!is_1x1_) {
+    if (!skip_im2col) {
+      conv_im2col_gpu(input, col_buffer_.mutable_gpu_data());
+    }
+    col_buff = col_buffer_.gpu_data();
+  }
+  for (int g = 0; g < group_; ++g) {
+    caffe_gpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, conv_out_channels_ /
+        group_, conv_out_spatial_dim_, kernel_dim_ / group_,
+        (Dtype)1., weights + weight_offset_ * g, col_buff + col_offset_ * g,
+        (Dtype)0., output + output_offset_ * g);
+  }
+}
+
+template <typename Dtype>
+void BaseConvolutionLayer<Dtype>::forward_gpu_bias(Dtype* output,
+    const Dtype* bias) {
+  caffe_gpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num_output_,
+      height_out_ * width_out_, 1, (Dtype)1., bias, bias_multiplier_.gpu_data(),
+      (Dtype)1., output);
+}
+
+template <typename Dtype>
+void BaseConvolutionLayer<Dtype>::backward_gpu_gemm(const Dtype* output,
+    const Dtype* weights, Dtype* input) {
+  Dtype* col_buff = col_buffer_.mutable_gpu_data();
+  if (is_1x1_) {
+    col_buff = input;
+  }
+  for (int g = 0; g < group_; ++g) {
+    caffe_gpu_gemm<Dtype>(CblasTrans, CblasNoTrans, kernel_dim_ / group_,
+        conv_out_spatial_dim_, conv_out_channels_ / group_,
+        (Dtype)1., weights + weight_offset_ * g, output + output_offset_ * g,
+        (Dtype)0., col_buff + col_offset_ * g);
+  }
+  if (!is_1x1_) {
+    conv_col2im_gpu(col_buff, input);
+  }
+}
+
+template <typename Dtype>
+void BaseConvolutionLayer<Dtype>::weight_gpu_gemm(const Dtype* input,
+    const Dtype* output, Dtype* weights) {
+  const Dtype* col_buff = input;
+  if (!is_1x1_) {
+    conv_im2col_gpu(input, col_buffer_.mutable_gpu_data());
+    col_buff = col_buffer_.gpu_data();
+  }
+  for (int g = 0; g < group_; ++g) {
+    caffe_gpu_gemm<Dtype>(CblasNoTrans, CblasTrans, conv_out_channels_ / group_,
+        kernel_dim_ / group_, conv_out_spatial_dim_,
+        (Dtype)1., output + output_offset_ * g, col_buff + col_offset_ * g,
+        (Dtype)1., weights + weight_offset_ * g);
+  }
+}
+
+template <typename Dtype>
+void BaseConvolutionLayer<Dtype>::backward_gpu_bias(Dtype* bias,
+    const Dtype* input) {
+  caffe_gpu_gemv<Dtype>(CblasNoTrans, num_output_, height_out_ * width_out_, 1.,
+      input, bias_multiplier_.gpu_data(), 1., bias);
+}
+
+#endif  // !CPU_ONLY
+
+INSTANTIATE_CLASS(BaseConvolutionLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/base_data_layer.cpp b/src/caffe/layers/base_data_layer.cpp
new file mode 100644
index 0000000..26a1118
--- /dev/null
+++ b/src/caffe/layers/base_data_layer.cpp
@@ -0,0 +1,89 @@
+#include <string>
+#include <vector>
+
+#include "caffe/data_layers.hpp"
+#include "caffe/util/io.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+BaseDataLayer<Dtype>::BaseDataLayer(const LayerParameter& param)
+    : Layer<Dtype>(param),
+      transform_param_(param.transform_param()) {
+}
+
+template <typename Dtype>
+void BaseDataLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  if (top.size() == 1) {
+    output_labels_ = false;
+  } else {
+    output_labels_ = true;
+  }
+  data_transformer_.reset(
+      new DataTransformer<Dtype>(transform_param_, this->phase_));
+  data_transformer_->InitRand();
+  // The subclasses should setup the size of bottom and top
+  DataLayerSetUp(bottom, top);
+}
+
+template <typename Dtype>
+void BasePrefetchingDataLayer<Dtype>::LayerSetUp(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  BaseDataLayer<Dtype>::LayerSetUp(bottom, top);
+  // Now, start the prefetch thread. Before calling prefetch, we make two
+  // cpu_data calls so that the prefetch thread does not accidentally make
+  // simultaneous cudaMalloc calls when the main thread is running. In some
+  // GPUs this seems to cause failures if we do not so.
+  this->prefetch_data_.mutable_cpu_data();
+  if (this->output_labels_) {
+    this->prefetch_label_.mutable_cpu_data();
+  }
+  DLOG(INFO) << "Initializing prefetch";
+  this->CreatePrefetchThread();
+  DLOG(INFO) << "Prefetch initialized.";
+}
+
+template <typename Dtype>
+void BasePrefetchingDataLayer<Dtype>::CreatePrefetchThread() {
+  this->data_transformer_->InitRand();
+  CHECK(StartInternalThread()) << "Thread execution failed";
+}
+
+template <typename Dtype>
+void BasePrefetchingDataLayer<Dtype>::JoinPrefetchThread() {
+  CHECK(WaitForInternalThreadToExit()) << "Thread joining failed";
+}
+
+template <typename Dtype>
+void BasePrefetchingDataLayer<Dtype>::Forward_cpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  // First, join the thread
+  JoinPrefetchThread();
+  DLOG(INFO) << "Thread joined";
+  // Reshape to loaded data.
+  top[0]->ReshapeLike(prefetch_data_);
+  // Copy the data
+  caffe_copy(prefetch_data_.count(), prefetch_data_.cpu_data(),
+             top[0]->mutable_cpu_data());
+  DLOG(INFO) << "Prefetch copied";
+  if (this->output_labels_) {
+    // Reshape to loaded labels.
+    top[1]->ReshapeLike(prefetch_label_);
+    // Copy the labels.
+    caffe_copy(prefetch_label_.count(), prefetch_label_.cpu_data(),
+               top[1]->mutable_cpu_data());
+  }
+  // Start a new prefetch thread
+  DLOG(INFO) << "CreatePrefetchThread";
+  CreatePrefetchThread();
+}
+
+#ifdef CPU_ONLY
+STUB_GPU_FORWARD(BasePrefetchingDataLayer, Forward);
+#endif
+
+INSTANTIATE_CLASS(BaseDataLayer);
+INSTANTIATE_CLASS(BasePrefetchingDataLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/base_data_layer.cu b/src/caffe/layers/base_data_layer.cu
new file mode 100644
index 0000000..9335a5b
--- /dev/null
+++ b/src/caffe/layers/base_data_layer.cu
@@ -0,0 +1,30 @@
+#include <vector>
+
+#include "caffe/data_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void BasePrefetchingDataLayer<Dtype>::Forward_gpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  // First, join the thread
+  JoinPrefetchThread();
+  // Reshape to loaded data.
+  top[0]->ReshapeLike(this->prefetch_data_);
+  // Copy the data
+  caffe_copy(prefetch_data_.count(), prefetch_data_.cpu_data(),
+      top[0]->mutable_gpu_data());
+  if (this->output_labels_) {
+    // Reshape to loaded labels.
+    top[1]->ReshapeLike(prefetch_label_);
+    // Copy the labels.
+    caffe_copy(prefetch_label_.count(), prefetch_label_.cpu_data(),
+        top[1]->mutable_gpu_data());
+  }
+  // Start a new prefetch thread
+  CreatePrefetchThread();
+}
+
+INSTANTIATE_LAYER_GPU_FORWARD(BasePrefetchingDataLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/bnll_layer.cpp b/src/caffe/layers/bnll_layer.cpp
new file mode 100644
index 0000000..9ba0ea9
--- /dev/null
+++ b/src/caffe/layers/bnll_layer.cpp
@@ -0,0 +1,48 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+const float kBNLL_THRESHOLD = 50.;
+
+template <typename Dtype>
+void BNLLLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  const int count = bottom[0]->count();
+  for (int i = 0; i < count; ++i) {
+    top_data[i] = bottom_data[i] > 0 ?
+        bottom_data[i] + log(1. + exp(-bottom_data[i])) :
+        log(1. + exp(bottom_data[i]));
+  }
+}
+
+template <typename Dtype>
+void BNLLLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    const Dtype* bottom_data = bottom[0]->cpu_data();
+    const Dtype* top_diff = top[0]->cpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    const int count = bottom[0]->count();
+    Dtype expval;
+    for (int i = 0; i < count; ++i) {
+      expval = exp(std::min(bottom_data[i], Dtype(kBNLL_THRESHOLD)));
+      bottom_diff[i] = top_diff[i] * expval / (expval + 1.);
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(BNLLLayer);
+#endif
+
+INSTANTIATE_CLASS(BNLLLayer);
+REGISTER_LAYER_CLASS(BNLL);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/bnll_layer.cu b/src/caffe/layers/bnll_layer.cu
new file mode 100644
index 0000000..d963d06
--- /dev/null
+++ b/src/caffe/layers/bnll_layer.cu
@@ -0,0 +1,60 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+const float kBNLL_THRESHOLD = 50.;
+
+template <typename Dtype>
+__global__ void BNLLForward(const int n, const Dtype* in, Dtype* out) {
+  CUDA_KERNEL_LOOP(index, n) {
+    out[index] = in[index] > 0 ?
+        in[index] + log(1. + exp(-in[index])) :
+        log(1. + exp(in[index]));
+  }
+}
+
+template <typename Dtype>
+void BNLLLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  const int count = bottom[0]->count();
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  BNLLForward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+      count, bottom_data, top_data);
+  CUDA_POST_KERNEL_CHECK;
+}
+
+template <typename Dtype>
+__global__ void BNLLBackward(const int n, const Dtype* in_diff,
+    const Dtype* in_data, Dtype* out_diff) {
+  CUDA_KERNEL_LOOP(index, n) {
+    Dtype expval = exp(min(in_data[index], Dtype(kBNLL_THRESHOLD)));
+    out_diff[index] = in_diff[index] * expval / (expval + 1.);
+  }
+}
+
+template <typename Dtype>
+void BNLLLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    const Dtype* bottom_data = bottom[0]->gpu_data();
+    const Dtype* top_diff = top[0]->gpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+    const int count = bottom[0]->count();
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    BNLLBackward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+        count, top_diff, bottom_data, bottom_diff);
+    CUDA_POST_KERNEL_CHECK;
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(BNLLLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/concat_layer.cpp b/src/caffe/layers/concat_layer.cpp
new file mode 100644
index 0000000..1cac8fc
--- /dev/null
+++ b/src/caffe/layers/concat_layer.cpp
@@ -0,0 +1,98 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void ConcatLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const ConcatParameter& concat_param = this->layer_param_.concat_param();
+  CHECK(!(concat_param.has_axis() && concat_param.has_concat_dim()))
+      << "Either axis or concat_dim should be specified; not both.";
+}
+
+template <typename Dtype>
+void ConcatLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const int num_axes = bottom[0]->num_axes();
+  const ConcatParameter& concat_param = this->layer_param_.concat_param();
+  if (concat_param.has_concat_dim()) {
+    concat_axis_ = static_cast<int>(concat_param.concat_dim());
+    // Don't allow negative indexing for concat_dim, a uint32 -- almost
+    // certainly unintended.
+    CHECK_GE(concat_axis_, 0) << "casting concat_dim from uint32 to int32 "
+        << "produced negative result; concat_dim must satisfy "
+        << "0 <= concat_dim < " << kMaxBlobAxes;
+    CHECK_LT(concat_axis_, num_axes) << "concat_dim out of range.";
+  } else {
+    concat_axis_ = bottom[0]->CanonicalAxisIndex(concat_param.axis());
+  }
+  // Initialize with the first blob.
+  vector<int> top_shape = bottom[0]->shape();
+  num_concats_ = bottom[0]->count(0, concat_axis_);
+  concat_input_size_ = bottom[0]->count(concat_axis_ + 1);
+  int bottom_count_sum = bottom[0]->count();
+  for (int i = 1; i < bottom.size(); ++i) {
+    CHECK_EQ(num_axes, bottom[i]->num_axes())
+        << "All inputs must have the same #axes.";
+    for (int j = 0; j < num_axes; ++j) {
+      if (j == concat_axis_) { continue; }
+      CHECK_EQ(top_shape[j], bottom[i]->shape(j))
+          << "All inputs must have the same shape, except at concat_axis.";
+    }
+    bottom_count_sum += bottom[i]->count();
+    top_shape[concat_axis_] += bottom[i]->shape(concat_axis_);
+  }
+  top[0]->Reshape(top_shape);
+  CHECK_EQ(bottom_count_sum, top[0]->count());
+}
+
+template <typename Dtype>
+void ConcatLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  int offset_concat_axis = 0;
+  const int top_concat_axis = top[0]->shape(concat_axis_);
+  for (int i = 0; i < bottom.size(); ++i) {
+    const Dtype* bottom_data = bottom[i]->cpu_data();
+    const int bottom_concat_axis = bottom[i]->shape(concat_axis_);
+    for (int n = 0; n < num_concats_; ++n) {
+      caffe_copy(bottom_concat_axis * concat_input_size_,
+          bottom_data + n * bottom_concat_axis * concat_input_size_,
+          top_data + (n * top_concat_axis + offset_concat_axis)
+              * concat_input_size_);
+    }
+    offset_concat_axis += bottom_concat_axis;
+  }
+}
+
+template <typename Dtype>
+void ConcatLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* top_diff = top[0]->cpu_diff();
+  int offset_concat_axis = 0;
+  const int top_concat_axis = top[0]->shape(concat_axis_);
+  for (int i = 0; i < bottom.size(); ++i) {
+    if (!propagate_down[i]) { continue; }
+    Dtype* bottom_diff = bottom[i]->mutable_cpu_diff();
+    const int bottom_concat_axis = bottom[i]->shape(concat_axis_);
+    for (int n = 0; n < num_concats_; ++n) {
+      caffe_copy(bottom_concat_axis * concat_input_size_, top_diff +
+          (n * top_concat_axis + offset_concat_axis) * concat_input_size_,
+          bottom_diff + n * bottom_concat_axis * concat_input_size_);
+    }
+    offset_concat_axis += bottom_concat_axis;
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(ConcatLayer);
+#endif
+
+INSTANTIATE_CLASS(ConcatLayer);
+REGISTER_LAYER_CLASS(Concat);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/concat_layer.cu b/src/caffe/layers/concat_layer.cu
new file mode 100644
index 0000000..8f2e85d
--- /dev/null
+++ b/src/caffe/layers/concat_layer.cu
@@ -0,0 +1,71 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+__global__ void Concat(const int nthreads, const Dtype* in_data,
+    const bool forward, const int num_concats, const int concat_size,
+    const int top_concat_axis, const int bottom_concat_axis,
+    const int offset_concat_axis, Dtype* out_data) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    const int total_concat_size = concat_size * bottom_concat_axis;
+    const int concat_num = index / total_concat_size;
+    const int concat_index = index % total_concat_size;
+    const int top_index = concat_index +
+        (concat_num * top_concat_axis + offset_concat_axis) * concat_size;
+    if (forward) {
+      out_data[top_index] = in_data[index];
+    } else {
+      out_data[index] = in_data[top_index];
+    }
+  }
+}
+
+template <typename Dtype>
+void ConcatLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  int offset_concat_axis = 0;
+  const int top_concat_axis = top[0]->shape(concat_axis_);
+  const bool kForward = true;
+  for (int i = 0; i < bottom.size(); ++i) {
+    const Dtype* bottom_data = bottom[i]->gpu_data();
+    const int bottom_concat_axis = bottom[i]->shape(concat_axis_);
+    const int bottom_concat_size = bottom_concat_axis * concat_input_size_;
+    const int nthreads = bottom_concat_size * num_concats_;
+    Concat<Dtype>  // NOLINT_NEXT_LINE(whitespace/operators)
+        <<<CAFFE_GET_BLOCKS(nthreads), CAFFE_CUDA_NUM_THREADS>>>(
+        nthreads, bottom_data, kForward, num_concats_, concat_input_size_,
+        top_concat_axis, bottom_concat_axis, offset_concat_axis, top_data);
+    offset_concat_axis += bottom_concat_axis;
+  }
+}
+
+template <typename Dtype>
+void ConcatLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* top_diff = top[0]->gpu_diff();
+  int offset_concat_axis = 0;
+  const int top_concat_axis = top[0]->shape(concat_axis_);
+  const bool kForward = false;
+  for (int i = 0; i < bottom.size(); ++i) {
+    if (!propagate_down[i]) { continue; }
+    Dtype* bottom_diff = bottom[i]->mutable_gpu_diff();
+    const int bottom_concat_axis = bottom[i]->shape(concat_axis_);
+    const int bottom_concat_size = bottom_concat_axis * concat_input_size_;
+    const int nthreads = bottom_concat_size * num_concats_;
+    Concat<Dtype>  // NOLINT_NEXT_LINE(whitespace/operators)
+        <<<CAFFE_GET_BLOCKS(nthreads), CAFFE_CUDA_NUM_THREADS>>>(
+        nthreads, top_diff, kForward, num_concats_, concat_input_size_,
+        top_concat_axis, bottom_concat_axis, offset_concat_axis, bottom_diff);
+    offset_concat_axis += bottom_concat_axis;
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(ConcatLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/contrastive_loss_layer.cpp b/src/caffe/layers/contrastive_loss_layer.cpp
new file mode 100644
index 0000000..25e1678
--- /dev/null
+++ b/src/caffe/layers/contrastive_loss_layer.cpp
@@ -0,0 +1,121 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/loss_layers.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void ContrastiveLossLayer<Dtype>::LayerSetUp(
+  const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  LossLayer<Dtype>::LayerSetUp(bottom, top);
+  CHECK_EQ(bottom[0]->channels(), bottom[1]->channels());
+  CHECK_EQ(bottom[0]->height(), 1);
+  CHECK_EQ(bottom[0]->width(), 1);
+  CHECK_EQ(bottom[1]->height(), 1);
+  CHECK_EQ(bottom[1]->width(), 1);
+  CHECK_EQ(bottom[2]->channels(), 1);
+  CHECK_EQ(bottom[2]->height(), 1);
+  CHECK_EQ(bottom[2]->width(), 1);
+  diff_.Reshape(bottom[0]->num(), bottom[0]->channels(), 1, 1);
+  diff_sq_.Reshape(bottom[0]->num(), bottom[0]->channels(), 1, 1);
+  dist_sq_.Reshape(bottom[0]->num(), 1, 1, 1);
+  // vector of ones used to sum along channels
+  summer_vec_.Reshape(bottom[0]->channels(), 1, 1, 1);
+  for (int i = 0; i < bottom[0]->channels(); ++i)
+    summer_vec_.mutable_cpu_data()[i] = Dtype(1);
+}
+
+template <typename Dtype>
+void ContrastiveLossLayer<Dtype>::Forward_cpu(
+    const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  int count = bottom[0]->count();
+  caffe_sub(
+      count,
+      bottom[0]->cpu_data(),  // a
+      bottom[1]->cpu_data(),  // b
+      diff_.mutable_cpu_data());  // a_i-b_i
+  const int channels = bottom[0]->channels();
+  Dtype margin = this->layer_param_.contrastive_loss_param().margin();
+  bool legacy_version =
+      this->layer_param_.contrastive_loss_param().legacy_version();
+  Dtype loss(0.0);
+  for (int i = 0; i < bottom[0]->num(); ++i) {
+    dist_sq_.mutable_cpu_data()[i] = caffe_cpu_dot(channels,
+        diff_.cpu_data() + (i*channels), diff_.cpu_data() + (i*channels));
+    if (static_cast<int>(bottom[2]->cpu_data()[i])) {  // similar pairs
+      loss += dist_sq_.cpu_data()[i];
+    } else {  // dissimilar pairs
+      if (legacy_version) {
+        loss += std::max(margin - dist_sq_.cpu_data()[i], Dtype(0.0));
+      } else {
+        Dtype dist = std::max(margin - sqrt(dist_sq_.cpu_data()[i]), 0.0);
+        loss += dist*dist;
+      }
+    }
+  }
+  loss = loss / static_cast<Dtype>(bottom[0]->num()) / Dtype(2);
+  top[0]->mutable_cpu_data()[0] = loss;
+}
+
+template <typename Dtype>
+void ContrastiveLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  Dtype margin = this->layer_param_.contrastive_loss_param().margin();
+  bool legacy_version =
+      this->layer_param_.contrastive_loss_param().legacy_version();
+  for (int i = 0; i < 2; ++i) {
+    if (propagate_down[i]) {
+      const Dtype sign = (i == 0) ? 1 : -1;
+      const Dtype alpha = sign * top[0]->cpu_diff()[0] /
+          static_cast<Dtype>(bottom[i]->num());
+      int num = bottom[i]->num();
+      int channels = bottom[i]->channels();
+      for (int j = 0; j < num; ++j) {
+        Dtype* bout = bottom[i]->mutable_cpu_diff();
+        if (static_cast<int>(bottom[2]->cpu_data()[j])) {  // similar pairs
+          caffe_cpu_axpby(
+              channels,
+              alpha,
+              diff_.cpu_data() + (j*channels),
+              Dtype(0.0),
+              bout + (j*channels));
+        } else {  // dissimilar pairs
+          Dtype mdist(0.0);
+          Dtype beta(0.0);
+          if (legacy_version) {
+            mdist = margin - dist_sq_.cpu_data()[j];
+            beta = -alpha;
+          } else {
+            Dtype dist = sqrt(dist_sq_.cpu_data()[j]);
+            mdist = margin - dist;
+            beta = -alpha * mdist / (dist + Dtype(1e-4));
+          }
+          if (mdist > Dtype(0.0)) {
+            caffe_cpu_axpby(
+                channels,
+                beta,
+                diff_.cpu_data() + (j*channels),
+                Dtype(0.0),
+                bout + (j*channels));
+          } else {
+            caffe_set(channels, Dtype(0), bout + (j*channels));
+          }
+        }
+      }
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(ContrastiveLossLayer);
+#endif
+
+INSTANTIATE_CLASS(ContrastiveLossLayer);
+REGISTER_LAYER_CLASS(ContrastiveLoss);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/contrastive_loss_layer.cu b/src/caffe/layers/contrastive_loss_layer.cu
new file mode 100644
index 0000000..9312393
--- /dev/null
+++ b/src/caffe/layers/contrastive_loss_layer.cu
@@ -0,0 +1,111 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void ContrastiveLossLayer<Dtype>::Forward_gpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  const int count = bottom[0]->count();
+  caffe_gpu_sub(
+      count,
+      bottom[0]->gpu_data(),  // a
+      bottom[1]->gpu_data(),  // b
+      diff_.mutable_gpu_data());  // a_i-b_i
+  caffe_gpu_powx(
+      count,
+      diff_.mutable_gpu_data(),  // a_i-b_i
+      Dtype(2),
+      diff_sq_.mutable_gpu_data());  // (a_i-b_i)^2
+  caffe_gpu_gemv(
+      CblasNoTrans,
+      bottom[0]->num(),
+      bottom[0]->channels(),
+      Dtype(1.0),
+      diff_sq_.gpu_data(),  // (a_i-b_i)^2
+      summer_vec_.gpu_data(),
+      Dtype(0.0),
+      dist_sq_.mutable_gpu_data());  // \Sum (a_i-b_i)^2
+  Dtype margin = this->layer_param_.contrastive_loss_param().margin();
+  bool legacy_version =
+      this->layer_param_.contrastive_loss_param().legacy_version();
+  Dtype loss(0.0);
+  for (int i = 0; i < bottom[0]->num(); ++i) {
+    if (static_cast<int>(bottom[2]->cpu_data()[i])) {  // similar pairs
+      loss += dist_sq_.cpu_data()[i];
+    } else {  // dissimilar pairs
+      if (legacy_version) {
+        loss += std::max(margin - dist_sq_.cpu_data()[i], Dtype(0.0));
+      } else {
+        Dtype dist = std::max(margin - sqrt(dist_sq_.cpu_data()[i]),
+                              Dtype(0.0));
+        loss += dist*dist;
+      }
+    }
+  }
+  loss = loss / static_cast<Dtype>(bottom[0]->num()) / Dtype(2);
+  top[0]->mutable_cpu_data()[0] = loss;
+}
+
+template <typename Dtype>
+__global__ void CLLBackward(const int count, const int channels,
+    const Dtype margin, const bool legacy_version, const Dtype alpha,
+    const Dtype* y, const Dtype* diff, const Dtype* dist_sq,
+    Dtype *bottom_diff) {
+  CUDA_KERNEL_LOOP(i, count) {
+    int n = i / channels;  // the num index, to access y and dist_sq
+    if (static_cast<int>(y[n])) {  // similar pairs
+      bottom_diff[i] = alpha * diff[i];
+    } else {  // dissimilar pairs
+      Dtype mdist(0.0);
+      Dtype beta(0.0);
+      if (legacy_version) {
+        mdist = (margin - dist_sq[n]);
+        beta = -alpha;
+      } else {
+        Dtype dist = sqrt(dist_sq[n]);
+        mdist = (margin - dist);
+        beta = -alpha * mdist / (dist + Dtype(1e-4)) * diff[i];
+      }
+      if (mdist > 0.0) {
+        bottom_diff[i] = beta;
+      } else {
+        bottom_diff[i] = 0;
+      }
+    }
+  }
+}
+
+template <typename Dtype>
+void ContrastiveLossLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  for (int i = 0; i < 2; ++i) {
+    if (propagate_down[i]) {
+      const int count = bottom[0]->count();
+      const int channels = bottom[0]->channels();
+      Dtype margin = this->layer_param_.contrastive_loss_param().margin();
+      const bool legacy_version =
+          this->layer_param_.contrastive_loss_param().legacy_version();
+      const Dtype sign = (i == 0) ? 1 : -1;
+      const Dtype alpha = sign * top[0]->cpu_diff()[0] /
+          static_cast<Dtype>(bottom[0]->num());
+      // NOLINT_NEXT_LINE(whitespace/operators)
+      CLLBackward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+          count, channels, margin, legacy_version, alpha,
+          bottom[2]->gpu_data(),  // pair similarity 0 or 1
+          diff_.gpu_data(),  // the cached eltwise difference between a and b
+          dist_sq_.gpu_data(),  // the cached square distance between a and b
+          bottom[i]->mutable_gpu_diff());
+      CUDA_POST_KERNEL_CHECK;
+    }
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(ContrastiveLossLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/conv_layer.cpp b/src/caffe/layers/conv_layer.cpp
new file mode 100644
index 0000000..928ef5e
--- /dev/null
+++ b/src/caffe/layers/conv_layer.cpp
@@ -0,0 +1,76 @@
+#include <vector>
+
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/im2col.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void ConvolutionLayer<Dtype>::compute_output_shape() {
+  this->height_out_ = (this->height_ + 2 * this->pad_h_ - this->kernel_h_)
+      / this->stride_h_ + 1;
+  this->width_out_ = (this->width_ + 2 * this->pad_w_ - this->kernel_w_)
+      / this->stride_w_ + 1;
+}
+
+template <typename Dtype>
+void ConvolutionLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const Dtype* weight = this->blobs_[0]->cpu_data();
+  for (int i = 0; i < bottom.size(); ++i) {
+    const Dtype* bottom_data = bottom[i]->cpu_data();
+    Dtype* top_data = top[i]->mutable_cpu_data();
+    for (int n = 0; n < this->num_; ++n) {
+      this->forward_cpu_gemm(bottom_data + bottom[i]->offset(n), weight,
+          top_data + top[i]->offset(n));
+      if (this->bias_term_) {
+        const Dtype* bias = this->blobs_[1]->cpu_data();
+        this->forward_cpu_bias(top_data + top[i]->offset(n), bias);
+      }
+    }
+  }
+}
+
+template <typename Dtype>
+void ConvolutionLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* weight = this->blobs_[0]->cpu_data();
+  Dtype* weight_diff = this->blobs_[0]->mutable_cpu_diff();
+  for (int i = 0; i < top.size(); ++i) {
+    const Dtype* top_diff = top[i]->cpu_diff();
+    const Dtype* bottom_data = bottom[i]->cpu_data();
+    Dtype* bottom_diff = bottom[i]->mutable_cpu_diff();
+    // Bias gradient, if necessary.
+    if (this->bias_term_ && this->param_propagate_down_[1]) {
+      Dtype* bias_diff = this->blobs_[1]->mutable_cpu_diff();
+      for (int n = 0; n < this->num_; ++n) {
+        this->backward_cpu_bias(bias_diff, top_diff + top[i]->offset(n));
+      }
+    }
+    if (this->param_propagate_down_[0] || propagate_down[i]) {
+      for (int n = 0; n < this->num_; ++n) {
+        // gradient w.r.t. weight. Note that we will accumulate diffs.
+        if (this->param_propagate_down_[0]) {
+          this->weight_cpu_gemm(bottom_data + bottom[i]->offset(n),
+              top_diff + top[i]->offset(n), weight_diff);
+        }
+        // gradient w.r.t. bottom data, if necessary.
+        if (propagate_down[i]) {
+          this->backward_cpu_gemm(top_diff + top[i]->offset(n), weight,
+              bottom_diff + bottom[i]->offset(n));
+        }
+      }
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(ConvolutionLayer);
+#endif
+
+INSTANTIATE_CLASS(ConvolutionLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/conv_layer.cu b/src/caffe/layers/conv_layer.cu
new file mode 100644
index 0000000..b8a98ff
--- /dev/null
+++ b/src/caffe/layers/conv_layer.cu
@@ -0,0 +1,64 @@
+#include <vector>
+
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/im2col.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void ConvolutionLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const Dtype* weight = this->blobs_[0]->gpu_data();
+  for (int i = 0; i < bottom.size(); ++i) {
+    const Dtype* bottom_data = bottom[i]->gpu_data();
+    Dtype* top_data = top[i]->mutable_gpu_data();
+    for (int n = 0; n < this->num_; ++n) {
+      this->forward_gpu_gemm(bottom_data + bottom[i]->offset(n), weight,
+          top_data + top[i]->offset(n));
+      if (this->bias_term_) {
+        const Dtype* bias = this->blobs_[1]->gpu_data();
+        this->forward_gpu_bias(top_data + top[i]->offset(n), bias);
+      }
+    }
+  }
+}
+
+template <typename Dtype>
+void ConvolutionLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* weight = this->blobs_[0]->gpu_data();
+  Dtype* weight_diff = this->blobs_[0]->mutable_gpu_diff();
+  for (int i = 0; i < top.size(); ++i) {
+    const Dtype* top_diff = top[i]->gpu_diff();
+    // Bias gradient, if necessary.
+    if (this->bias_term_ && this->param_propagate_down_[1]) {
+      Dtype* bias_diff = this->blobs_[1]->mutable_gpu_diff();
+      for (int n = 0; n < this->num_; ++n) {
+        this->backward_gpu_bias(bias_diff, top_diff + top[i]->offset(n));
+      }
+    }
+    if (this->param_propagate_down_[0] || propagate_down[i]) {
+      const Dtype* bottom_data = bottom[i]->gpu_data();
+      Dtype* bottom_diff = bottom[i]->mutable_gpu_diff();
+      for (int n = 0; n < this->num_; ++n) {
+        // gradient w.r.t. weight. Note that we will accumulate diffs.
+        if (this->param_propagate_down_[0]) {
+          this->weight_gpu_gemm(bottom_data + bottom[i]->offset(n),
+              top_diff + top[i]->offset(n), weight_diff);
+        }
+        // gradient w.r.t. bottom data, if necessary.
+        if (propagate_down[i]) {
+          this->backward_gpu_gemm(top_diff + top[i]->offset(n), weight,
+              bottom_diff + bottom[i]->offset(n));
+        }
+      }
+    }
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(ConvolutionLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/cudnn_conv_layer.cpp b/src/caffe/layers/cudnn_conv_layer.cpp
new file mode 100644
index 0000000..104d2b9
--- /dev/null
+++ b/src/caffe/layers/cudnn_conv_layer.cpp
@@ -0,0 +1,130 @@
+#ifdef USE_CUDNN
+#include <vector>
+
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/im2col.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+// Set to three for the benefit of the backward pass, which
+// can use separate streams for calculating the gradient w.r.t.
+// bias, filter weights, and bottom data for each group independently
+#define CUDNN_STREAMS_PER_GROUP 3
+
+/**
+ * TODO(dox) explain cuDNN interface
+ */
+template <typename Dtype>
+void CuDNNConvolutionLayer<Dtype>::LayerSetUp(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  ConvolutionLayer<Dtype>::LayerSetUp(bottom, top);
+  // Initialize CUDA streams and cuDNN.
+  stream_         = new cudaStream_t[this->group_ * CUDNN_STREAMS_PER_GROUP];
+  handle_         = new cudnnHandle_t[this->group_ * CUDNN_STREAMS_PER_GROUP];
+  workspaceSizeInBytes = 0;
+  workspace = NULL;
+
+  for (int g = 0; g < this->group_ * CUDNN_STREAMS_PER_GROUP; g++) {
+    CUDA_CHECK(cudaStreamCreate(&stream_[g]));
+    CUDNN_CHECK(cudnnCreate(&handle_[g]));
+    CUDNN_CHECK(cudnnSetStream(handle_[g], stream_[g]));
+  }
+
+  // Set the indexing parameters.
+  weight_offset_ = (this->num_output_ / this->group_)
+      * (this->channels_ / this->group_) * this->kernel_h_ * this->kernel_w_;
+  bias_offset_ = (this->num_output_ / this->group_);
+
+  // Create filter descriptor.
+  cudnn::createFilterDesc<Dtype>(&filter_desc_,
+      this->num_output_ / this->group_, this->channels_ / this->group_,
+      this->kernel_h_, this->kernel_w_);
+
+  // Create tensor descriptor(s) for data and corresponding convolution(s).
+  for (int i = 0; i < bottom.size(); i++) {
+    cudnnTensorDescriptor_t bottom_desc;
+    cudnn::createTensor4dDesc<Dtype>(&bottom_desc);
+    bottom_descs_.push_back(bottom_desc);
+    cudnnTensorDescriptor_t top_desc;
+    cudnn::createTensor4dDesc<Dtype>(&top_desc);
+    top_descs_.push_back(top_desc);
+    cudnnConvolutionDescriptor_t conv_desc;
+    cudnn::createConvolutionDesc<Dtype>(&conv_desc);
+    conv_descs_.push_back(conv_desc);
+  }
+
+  // Tensor descriptor for bias.
+  if (this->bias_term_) {
+    cudnn::createTensor4dDesc<Dtype>(&bias_desc_);
+  }
+
+  handles_setup_ = true;
+}
+
+template <typename Dtype>
+void CuDNNConvolutionLayer<Dtype>::Reshape(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  ConvolutionLayer<Dtype>::Reshape(bottom, top);
+  bottom_offset_ = (this->channels_ / this->group_)
+      * this->height_ * this->width_;
+  top_offset_ = (this->num_output_ / this->group_)
+      * this->height_out_ * this->width_out_;
+
+  for (int i = 0; i < bottom.size(); i++) {
+    cudnn::setTensor4dDesc<Dtype>(&bottom_descs_[i],
+        this->num_,
+        this->channels_ / this->group_,
+        this->height_, this->width_,
+        this->channels_ * this->height_ * this->width_,
+        this->height_ * this->width_,
+        this->width_, 1);
+    cudnn::setTensor4dDesc<Dtype>(&top_descs_[i],
+        this->num_,
+        this->num_output_ / this->group_,
+        this->height_out_, this->width_out_,
+        this->num_output_ * this->height_out_ * this->width_out_,
+        this->height_out_ * this->width_out_,
+        this->width_out_, 1);
+    cudnn::setConvolutionDesc<Dtype>(&conv_descs_[i], bottom_descs_[i],
+        filter_desc_, this->pad_h_, this->pad_w_,
+        this->stride_h_, this->stride_w_);
+  }
+
+  // Tensor descriptor for bias.
+  if (this->bias_term_) {
+    cudnn::setTensor4dDesc<Dtype>(&bias_desc_,
+        1, this->num_output_ / this->group_, 1, 1);
+  }
+}
+
+template <typename Dtype>
+CuDNNConvolutionLayer<Dtype>::~CuDNNConvolutionLayer() {
+  // Check that handles have been setup before destroying.
+  if (!handles_setup_) { return; }
+
+  for (int i = 0; i < bottom_descs_.size(); i++) {
+    cudnnDestroyTensorDescriptor(bottom_descs_[i]);
+    cudnnDestroyTensorDescriptor(top_descs_[i]);
+    cudnnDestroyConvolutionDescriptor(conv_descs_[i]);
+  }
+  if (this->bias_term_) {
+    cudnnDestroyTensorDescriptor(bias_desc_);
+  }
+  cudnnDestroyFilterDescriptor(filter_desc_);
+
+  for (int g = 0; g < this->group_ * CUDNN_STREAMS_PER_GROUP; g++) {
+    cudaStreamDestroy(stream_[g]);
+    cudnnDestroy(handle_[g]);
+  }
+
+  delete [] stream_;
+  delete [] handle_;
+}
+
+INSTANTIATE_CLASS(CuDNNConvolutionLayer);
+
+}   // namespace caffe
+#endif
diff --git a/src/caffe/layers/cudnn_conv_layer.cu b/src/caffe/layers/cudnn_conv_layer.cu
new file mode 100644
index 0000000..b4e802e
--- /dev/null
+++ b/src/caffe/layers/cudnn_conv_layer.cu
@@ -0,0 +1,160 @@
+#ifdef USE_CUDNN
+#include <vector>
+
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/im2col.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+__global__ void sync_conv_groups() { }
+
+template <typename Dtype>
+void CuDNNConvolutionLayer<Dtype>::Forward_gpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  for (int i = 0; i < bottom.size(); ++i) {
+    const Dtype* bottom_data = bottom[i]->gpu_data();
+    Dtype* top_data = top[i]->mutable_gpu_data();
+    const Dtype* weight = this->blobs_[0]->gpu_data();
+
+    size_t workspace_limit_bytes = this->kernel_h_ *
+                                   this->kernel_w_ *
+                                   this->channels_ *
+                                   sizeof(int) + 1;
+
+    // Forward through cuDNN in parallel over groups.
+    for (int g = 0; g < this->group_; g++) {
+      cudnnConvolutionFwdAlgo_t algo;
+
+      // pick the convolution algorithm
+      // TODO(shelhamer) this should be done during reshape
+      // TODO(shelhamer) the choice of automatic or manual algorithm picking
+      // should be exposed in proto
+      CUDNN_CHECK(cudnnGetConvolutionForwardAlgorithm(handle_[g],
+        bottom_descs_[i],
+        filter_desc_,
+        conv_descs_[i],
+        top_descs_[i],
+        CUDNN_CONVOLUTION_FWD_SPECIFY_WORKSPACE_LIMIT,
+        workspace_limit_bytes,  // memoryLimitInBytes,
+        &algo));
+
+      // get minimum size of the workspace needed for the desired algorithm
+      size_t workspaceSizeInBytes_temp = 0;
+
+      CUDNN_CHECK(cudnnGetConvolutionForwardWorkspaceSize(handle_[g],
+        bottom_descs_[i],
+        filter_desc_,
+        conv_descs_[i],
+        top_descs_[i],
+        algo,
+        &workspaceSizeInBytes_temp));
+
+      if (workspaceSizeInBytes_temp > workspaceSizeInBytes) {
+        workspaceSizeInBytes = workspaceSizeInBytes_temp;
+        // free the existing workspace and allocate a new (larger) one
+        cudaFree(this->workspace);
+        cudaError_t err = cudaMalloc(&(this->workspace), workspaceSizeInBytes);
+        if (err != cudaSuccess) {
+          // force zero memory path
+          algo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_GEMM;
+          workspace = NULL;
+          workspaceSizeInBytes = 0;
+        }
+      }
+
+      // Filters.
+      CUDNN_CHECK(cudnnConvolutionForward(handle_[g],
+            cudnn::dataType<Dtype>::one,
+            bottom_descs_[i], bottom_data + bottom_offset_ * g,
+            filter_desc_, weight + weight_offset_ * g,
+            conv_descs_[i],
+            algo, workspace, workspaceSizeInBytes,
+            cudnn::dataType<Dtype>::zero,
+            top_descs_[i], top_data + top_offset_ * g));
+
+      // Bias.
+      if (this->bias_term_) {
+        const Dtype* bias_data = this->blobs_[1]->gpu_data();
+        CUDNN_CHECK(cudnnAddTensor(handle_[g], CUDNN_ADD_SAME_C,
+              cudnn::dataType<Dtype>::one,
+              bias_desc_, bias_data + bias_offset_ * g,
+              cudnn::dataType<Dtype>::one,
+              top_descs_[i], top_data + top_offset_ * g));
+      }
+    }
+
+    // Synchronize the work across groups, each of which went into its own
+    // stream, by launching an empty kernel into the default (null) stream.
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    sync_conv_groups<<<1, 1>>>();
+  }
+}
+
+template <typename Dtype>
+void CuDNNConvolutionLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* weight = NULL;
+  Dtype* weight_diff = NULL;
+  if (this->param_propagate_down_[0]) {
+    weight = this->blobs_[0]->gpu_data();
+    weight_diff = this->blobs_[0]->mutable_gpu_diff();
+  }
+  Dtype* bias_diff = NULL;
+  if (this->bias_term_ && this->param_propagate_down_[1]) {
+    bias_diff = this->blobs_[1]->mutable_gpu_diff();
+  }
+  for (int i = 0; i < top.size(); ++i) {
+    const Dtype* top_diff = top[i]->gpu_diff();
+    // Backward through cuDNN in parallel over groups and gradients.
+    for (int g = 0; g < this->group_; g++) {
+      // Gradient w.r.t. bias.
+      if (this->bias_term_ && this->param_propagate_down_[1]) {
+        CUDNN_CHECK(cudnnConvolutionBackwardBias(handle_[0*this->group_ + g],
+              cudnn::dataType<Dtype>::one,
+              top_descs_[i],  top_diff + top_offset_ * g,
+              cudnn::dataType<Dtype>::one,
+              bias_desc_, bias_diff + bias_offset_ * g));
+      }
+
+      // Gradient w.r.t. weights.
+      if (this->param_propagate_down_[0]) {
+        const Dtype* bottom_data = bottom[i]->gpu_data();
+        CUDNN_CHECK(cudnnConvolutionBackwardFilter(handle_[1*this->group_ + g],
+              cudnn::dataType<Dtype>::one,
+              bottom_descs_[i], bottom_data + bottom_offset_ * g,
+              top_descs_[i],    top_diff + top_offset_ * g,
+              conv_descs_[i],
+              cudnn::dataType<Dtype>::one,
+              filter_desc_, weight_diff + weight_offset_ * g));
+      }
+
+      // Gradient w.r.t. bottom data.
+      if (propagate_down[i]) {
+        if (weight == NULL) {
+          weight = this->blobs_[0]->gpu_data();
+        }
+        Dtype* bottom_diff = bottom[i]->mutable_gpu_diff();
+        CUDNN_CHECK(cudnnConvolutionBackwardData(handle_[2*this->group_ + g],
+              cudnn::dataType<Dtype>::one,
+              filter_desc_, weight + weight_offset_ * g,
+              top_descs_[i], top_diff + top_offset_ * g,
+              conv_descs_[i],
+              cudnn::dataType<Dtype>::zero,
+              bottom_descs_[i], bottom_diff + bottom_offset_ * g));
+      }
+    }
+
+    // Synchronize the work across groups, each of which went into its own
+    // stream, by launching an empty kernel into the default (null) stream.
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    sync_conv_groups<<<1, 1>>>();
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(CuDNNConvolutionLayer);
+
+}  // namespace caffe
+#endif
diff --git a/src/caffe/layers/cudnn_pooling_layer.cpp b/src/caffe/layers/cudnn_pooling_layer.cpp
new file mode 100644
index 0000000..c92c4e4
--- /dev/null
+++ b/src/caffe/layers/cudnn_pooling_layer.cpp
@@ -0,0 +1,50 @@
+#ifdef USE_CUDNN
+#include <vector>
+
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/im2col.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void CuDNNPoolingLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  PoolingLayer<Dtype>::LayerSetUp(bottom, top);
+  CUDNN_CHECK(cudnnCreate(&handle_));
+  cudnn::createTensor4dDesc<Dtype>(&bottom_desc_);
+  cudnn::createTensor4dDesc<Dtype>(&top_desc_);
+  cudnn::createPoolingDesc<Dtype>(&pooling_desc_,
+      this->layer_param_.pooling_param().pool(), &mode_,
+      this->kernel_h_, this->kernel_w_, this->pad_h_, this->pad_w_,
+      this->stride_h_, this->stride_w_);
+  handles_setup_ = true;
+}
+
+template <typename Dtype>
+void CuDNNPoolingLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  PoolingLayer<Dtype>::Reshape(bottom, top);
+  cudnn::setTensor4dDesc<Dtype>(&bottom_desc_, bottom[0]->num(),
+      this->channels_, this->height_, this->width_);
+  cudnn::setTensor4dDesc<Dtype>(&top_desc_, bottom[0]->num(),
+      this->channels_, this->pooled_height_, this->pooled_width_);
+}
+
+template <typename Dtype>
+CuDNNPoolingLayer<Dtype>::~CuDNNPoolingLayer() {
+  // Check that handles have been setup before destroying.
+  if (!handles_setup_) { return; }
+
+  cudnnDestroyTensorDescriptor(bottom_desc_);
+  cudnnDestroyTensorDescriptor(top_desc_);
+  cudnnDestroyPoolingDescriptor(pooling_desc_);
+  cudnnDestroy(handle_);
+}
+
+INSTANTIATE_CLASS(CuDNNPoolingLayer);
+
+}   // namespace caffe
+#endif
diff --git a/src/caffe/layers/cudnn_pooling_layer.cu b/src/caffe/layers/cudnn_pooling_layer.cu
new file mode 100644
index 0000000..a952b85
--- /dev/null
+++ b/src/caffe/layers/cudnn_pooling_layer.cu
@@ -0,0 +1,45 @@
+#ifdef USE_CUDNN
+#include <vector>
+
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/im2col.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void CuDNNPoolingLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  CUDNN_CHECK(cudnnPoolingForward(handle_, pooling_desc_,
+        cudnn::dataType<Dtype>::one,
+        bottom_desc_, bottom_data,
+        cudnn::dataType<Dtype>::zero,
+        top_desc_, top_data));
+}
+
+template <typename Dtype>
+void CuDNNPoolingLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) {
+    return;
+  }
+  const Dtype* top_diff = top[0]->gpu_diff();
+  const Dtype* top_data = top[0]->gpu_data();
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+  CUDNN_CHECK(cudnnPoolingBackward(handle_, pooling_desc_,
+        cudnn::dataType<Dtype>::one,
+        top_desc_, top_data, top_desc_, top_diff,
+        bottom_desc_, bottom_data,
+        cudnn::dataType<Dtype>::zero,
+        bottom_desc_, bottom_diff));
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(CuDNNPoolingLayer);
+
+}  // namespace caffe
+#endif
diff --git a/src/caffe/layers/cudnn_relu_layer.cpp b/src/caffe/layers/cudnn_relu_layer.cpp
new file mode 100644
index 0000000..759d839
--- /dev/null
+++ b/src/caffe/layers/cudnn_relu_layer.cpp
@@ -0,0 +1,46 @@
+#ifdef USE_CUDNN
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void CuDNNReLULayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  ReLULayer<Dtype>::LayerSetUp(bottom, top);
+  // initialize cuDNN
+  CUDNN_CHECK(cudnnCreate(&handle_));
+  cudnn::createTensor4dDesc<Dtype>(&bottom_desc_);
+  cudnn::createTensor4dDesc<Dtype>(&top_desc_);
+  handles_setup_ = true;
+}
+
+template <typename Dtype>
+void CuDNNReLULayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  ReLULayer<Dtype>::Reshape(bottom, top);
+  const int N = bottom[0]->num();
+  const int K = bottom[0]->channels();
+  const int H = bottom[0]->height();
+  const int W = bottom[0]->width();
+  cudnn::setTensor4dDesc<Dtype>(&bottom_desc_, N, K, H, W);
+  cudnn::setTensor4dDesc<Dtype>(&top_desc_, N, K, H, W);
+}
+
+template <typename Dtype>
+CuDNNReLULayer<Dtype>::~CuDNNReLULayer() {
+  // Check that handles have been setup before destroying.
+  if (!handles_setup_) { return; }
+
+  cudnnDestroyTensorDescriptor(this->bottom_desc_);
+  cudnnDestroyTensorDescriptor(this->top_desc_);
+  cudnnDestroy(this->handle_);
+}
+
+INSTANTIATE_CLASS(CuDNNReLULayer);
+
+}  // namespace caffe
+#endif
diff --git a/src/caffe/layers/cudnn_relu_layer.cu b/src/caffe/layers/cudnn_relu_layer.cu
new file mode 100644
index 0000000..21d1485
--- /dev/null
+++ b/src/caffe/layers/cudnn_relu_layer.cu
@@ -0,0 +1,57 @@
+#ifdef USE_CUDNN
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void CuDNNReLULayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  // Fallback to standard Caffe for leaky ReLU.
+  if (ReLULayer<Dtype>::layer_param_.relu_param().negative_slope() != 0) {
+    return ReLULayer<Dtype>::Forward_gpu(bottom, top);
+  }
+
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  CUDNN_CHECK(cudnnActivationForward(this->handle_,
+        CUDNN_ACTIVATION_RELU,
+        cudnn::dataType<Dtype>::one,
+        this->bottom_desc_, bottom_data,
+        cudnn::dataType<Dtype>::zero,
+        this->top_desc_, top_data));
+}
+
+template <typename Dtype>
+void CuDNNReLULayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) {
+    return;
+  }
+
+  // Fallback to standard Caffe for leaky ReLU.
+  if (ReLULayer<Dtype>::layer_param_.relu_param().negative_slope() != 0) {
+    return ReLULayer<Dtype>::Backward_gpu(top, propagate_down, bottom);
+  }
+
+  const Dtype* top_data = top[0]->gpu_data();
+  const Dtype* top_diff = top[0]->gpu_diff();
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+  CUDNN_CHECK(cudnnActivationBackward(this->handle_,
+        CUDNN_ACTIVATION_RELU,
+        cudnn::dataType<Dtype>::one,
+        this->top_desc_, top_data, this->top_desc_, top_diff,
+        this->bottom_desc_, bottom_data,
+        cudnn::dataType<Dtype>::zero,
+        this->bottom_desc_, bottom_diff));
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(CuDNNReLULayer);
+
+}  // namespace caffe
+#endif
diff --git a/src/caffe/layers/cudnn_sigmoid_layer.cpp b/src/caffe/layers/cudnn_sigmoid_layer.cpp
new file mode 100644
index 0000000..3263787
--- /dev/null
+++ b/src/caffe/layers/cudnn_sigmoid_layer.cpp
@@ -0,0 +1,46 @@
+#ifdef USE_CUDNN
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void CuDNNSigmoidLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  SigmoidLayer<Dtype>::LayerSetUp(bottom, top);
+  // initialize cuDNN
+  CUDNN_CHECK(cudnnCreate(&handle_));
+  cudnn::createTensor4dDesc<Dtype>(&bottom_desc_);
+  cudnn::createTensor4dDesc<Dtype>(&top_desc_);
+  handles_setup_ = true;
+}
+
+template <typename Dtype>
+void CuDNNSigmoidLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  SigmoidLayer<Dtype>::Reshape(bottom, top);
+  const int N = bottom[0]->num();
+  const int K = bottom[0]->channels();
+  const int H = bottom[0]->height();
+  const int W = bottom[0]->width();
+  cudnn::setTensor4dDesc<Dtype>(&bottom_desc_, N, K, H, W);
+  cudnn::setTensor4dDesc<Dtype>(&top_desc_, N, K, H, W);
+}
+
+template <typename Dtype>
+CuDNNSigmoidLayer<Dtype>::~CuDNNSigmoidLayer() {
+  // Check that handles have been setup before destroying.
+  if (!handles_setup_) { return; }
+
+  cudnnDestroyTensorDescriptor(this->bottom_desc_);
+  cudnnDestroyTensorDescriptor(this->top_desc_);
+  cudnnDestroy(this->handle_);
+}
+
+INSTANTIATE_CLASS(CuDNNSigmoidLayer);
+
+}  // namespace caffe
+#endif
diff --git a/src/caffe/layers/cudnn_sigmoid_layer.cu b/src/caffe/layers/cudnn_sigmoid_layer.cu
new file mode 100644
index 0000000..7a06cf7
--- /dev/null
+++ b/src/caffe/layers/cudnn_sigmoid_layer.cu
@@ -0,0 +1,47 @@
+#ifdef USE_CUDNN
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void CuDNNSigmoidLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  CUDNN_CHECK(cudnnActivationForward(this->handle_,
+        CUDNN_ACTIVATION_SIGMOID,
+        cudnn::dataType<Dtype>::one,
+        this->bottom_desc_, bottom_data,
+        cudnn::dataType<Dtype>::zero,
+        this->top_desc_, top_data));
+}
+
+template <typename Dtype>
+void CuDNNSigmoidLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) {
+    return;
+  }
+
+  const Dtype* top_data = top[0]->gpu_data();
+  const Dtype* top_diff = top[0]->gpu_diff();
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+  CUDNN_CHECK(cudnnActivationBackward(this->handle_,
+        CUDNN_ACTIVATION_SIGMOID,
+        cudnn::dataType<Dtype>::one,
+        this->top_desc_, top_data, this->top_desc_, top_diff,
+        this->bottom_desc_, bottom_data,
+        cudnn::dataType<Dtype>::zero,
+        this->bottom_desc_, bottom_diff));
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(CuDNNSigmoidLayer);
+
+}  // namespace caffe
+#endif
diff --git a/src/caffe/layers/cudnn_softmax_layer.cpp b/src/caffe/layers/cudnn_softmax_layer.cpp
new file mode 100644
index 0000000..77a3225
--- /dev/null
+++ b/src/caffe/layers/cudnn_softmax_layer.cpp
@@ -0,0 +1,50 @@
+#ifdef USE_CUDNN
+#include <algorithm>
+#include <cfloat>
+#include <vector>
+
+#include "thrust/device_vector.h"
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void CuDNNSoftmaxLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  SoftmaxLayer<Dtype>::LayerSetUp(bottom, top);
+  // Initialize CUDNN.
+  CUDNN_CHECK(cudnnCreate(&handle_));
+  cudnn::createTensor4dDesc<Dtype>(&bottom_desc_);
+  cudnn::createTensor4dDesc<Dtype>(&top_desc_);
+  handles_setup_ = true;
+}
+
+template <typename Dtype>
+void CuDNNSoftmaxLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  SoftmaxLayer<Dtype>::Reshape(bottom, top);
+  int N = this->outer_num_;
+  int K = bottom[0]->shape(this->softmax_axis_);
+  int H = this->inner_num_;
+  int W = 1;
+  cudnn::setTensor4dDesc<Dtype>(&bottom_desc_, N, K, H, W);
+  cudnn::setTensor4dDesc<Dtype>(&top_desc_, N, K, H, W);
+}
+
+template <typename Dtype>
+CuDNNSoftmaxLayer<Dtype>::~CuDNNSoftmaxLayer() {
+  // Check that handles have been setup before destroying.
+  if (!handles_setup_) { return; }
+
+  cudnnDestroyTensorDescriptor(bottom_desc_);
+  cudnnDestroyTensorDescriptor(top_desc_);
+  cudnnDestroy(handle_);
+}
+
+INSTANTIATE_CLASS(CuDNNSoftmaxLayer);
+
+}  // namespace caffe
+#endif
diff --git a/src/caffe/layers/cudnn_softmax_layer.cu b/src/caffe/layers/cudnn_softmax_layer.cu
new file mode 100644
index 0000000..a9e2fce
--- /dev/null
+++ b/src/caffe/layers/cudnn_softmax_layer.cu
@@ -0,0 +1,48 @@
+#ifdef USE_CUDNN
+#include <algorithm>
+#include <cfloat>
+#include <vector>
+
+#include "thrust/device_vector.h"
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void CuDNNSoftmaxLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  CUDNN_CHECK(cudnnSoftmaxForward(handle_, CUDNN_SOFTMAX_ACCURATE,
+        CUDNN_SOFTMAX_MODE_CHANNEL,
+        cudnn::dataType<Dtype>::one,
+        bottom_desc_, bottom_data,
+        cudnn::dataType<Dtype>::zero,
+        top_desc_, top_data));
+}
+
+template <typename Dtype>
+void CuDNNSoftmaxLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    const Dtype* top_data = top[0]->gpu_data();
+    const Dtype* top_diff = top[0]->gpu_diff();
+    const Dtype* bottom_data = bottom[0]->gpu_data();
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+
+    CUDNN_CHECK(cudnnSoftmaxBackward(handle_, CUDNN_SOFTMAX_ACCURATE,
+          CUDNN_SOFTMAX_MODE_CHANNEL,
+          cudnn::dataType<Dtype>::one,
+          top_desc_, top_data, top_desc_, top_diff,
+          cudnn::dataType<Dtype>::zero,
+          bottom_desc_, bottom_diff));
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(CuDNNSoftmaxLayer);
+
+}  // namespace caffe
+#endif
diff --git a/src/caffe/layers/cudnn_tanh_layer.cpp b/src/caffe/layers/cudnn_tanh_layer.cpp
new file mode 100644
index 0000000..376faad
--- /dev/null
+++ b/src/caffe/layers/cudnn_tanh_layer.cpp
@@ -0,0 +1,46 @@
+#ifdef USE_CUDNN
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void CuDNNTanHLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  TanHLayer<Dtype>::LayerSetUp(bottom, top);
+  // initialize cuDNN
+  CUDNN_CHECK(cudnnCreate(&handle_));
+  cudnn::createTensor4dDesc<Dtype>(&bottom_desc_);
+  cudnn::createTensor4dDesc<Dtype>(&top_desc_);
+  handles_setup_ = true;
+}
+
+template <typename Dtype>
+void CuDNNTanHLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  TanHLayer<Dtype>::Reshape(bottom, top);
+  const int N = bottom[0]->num();
+  const int K = bottom[0]->channels();
+  const int H = bottom[0]->height();
+  const int W = bottom[0]->width();
+  cudnn::setTensor4dDesc<Dtype>(&bottom_desc_, N, K, H, W);
+  cudnn::setTensor4dDesc<Dtype>(&top_desc_, N, K, H, W);
+}
+
+template <typename Dtype>
+CuDNNTanHLayer<Dtype>::~CuDNNTanHLayer() {
+  // Check that handles have been setup before destroying.
+  if (!handles_setup_) { return; }
+
+  cudnnDestroyTensorDescriptor(this->bottom_desc_);
+  cudnnDestroyTensorDescriptor(this->top_desc_);
+  cudnnDestroy(this->handle_);
+}
+
+INSTANTIATE_CLASS(CuDNNTanHLayer);
+
+}  // namespace caffe
+#endif
diff --git a/src/caffe/layers/cudnn_tanh_layer.cu b/src/caffe/layers/cudnn_tanh_layer.cu
new file mode 100644
index 0000000..d287f6f
--- /dev/null
+++ b/src/caffe/layers/cudnn_tanh_layer.cu
@@ -0,0 +1,48 @@
+#ifdef USE_CUDNN
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void CuDNNTanHLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  CUDNN_CHECK(cudnnActivationForward(this->handle_,
+        CUDNN_ACTIVATION_TANH,
+        cudnn::dataType<Dtype>::one,
+        this->bottom_desc_, bottom_data,
+        cudnn::dataType<Dtype>::zero,
+        this->top_desc_, top_data));
+}
+
+template <typename Dtype>
+void CuDNNTanHLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) {
+    return;
+  }
+
+  const Dtype* top_data = top[0]->gpu_data();
+  const Dtype* top_diff = top[0]->gpu_diff();
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+
+  CUDNN_CHECK(cudnnActivationBackward(this->handle_,
+        CUDNN_ACTIVATION_TANH,
+        cudnn::dataType<Dtype>::one,
+        this->top_desc_, top_data, this->top_desc_, top_diff,
+        this->bottom_desc_, bottom_data,
+        cudnn::dataType<Dtype>::zero,
+        this->bottom_desc_, bottom_diff));
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(CuDNNTanHLayer);
+
+}  // namespace caffe
+#endif
diff --git a/src/caffe/layers/data_layer.cpp b/src/caffe/layers/data_layer.cpp
new file mode 100644
index 0000000..161a75e
--- /dev/null
+++ b/src/caffe/layers/data_layer.cpp
@@ -0,0 +1,126 @@
+#include <opencv2/core/core.hpp>
+
+#include <stdint.h>
+
+#include <string>
+#include <vector>
+
+#include "caffe/common.hpp"
+#include "caffe/data_layers.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/benchmark.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/util/rng.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+DataLayer<Dtype>::~DataLayer<Dtype>() {
+  this->JoinPrefetchThread();
+}
+
+template <typename Dtype>
+void DataLayer<Dtype>::DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  // Initialize DB
+  db_.reset(db::GetDB(this->layer_param_.data_param().backend()));
+  db_->Open(this->layer_param_.data_param().source(), db::READ);
+  cursor_.reset(db_->NewCursor());
+
+  // Check if we should randomly skip a few data points
+  if (this->layer_param_.data_param().rand_skip()) {
+    unsigned int skip = caffe_rng_rand() %
+                        this->layer_param_.data_param().rand_skip();
+    LOG(INFO) << "Skipping first " << skip << " data points.";
+    while (skip-- > 0) {
+      cursor_->Next();
+    }
+  }
+  // Read a data point, to initialize the prefetch and top blobs.
+  Datum datum;
+  datum.ParseFromString(cursor_->value());
+  // Use data_transformer to infer the expected blob shape from datum.
+  vector<int> top_shape = this->data_transformer_->InferBlobShape(datum);
+  this->transformed_data_.Reshape(top_shape);
+  // Reshape top[0] and prefetch_data according to the batch_size.
+  top_shape[0] = this->layer_param_.data_param().batch_size();
+  this->prefetch_data_.Reshape(top_shape);
+  top[0]->ReshapeLike(this->prefetch_data_);
+
+  LOG(INFO) << "output data size: " << top[0]->num() << ","
+      << top[0]->channels() << "," << top[0]->height() << ","
+      << top[0]->width();
+  // label
+  if (this->output_labels_) {
+    vector<int> label_shape(1, this->layer_param_.data_param().batch_size());
+    top[1]->Reshape(label_shape);
+    this->prefetch_label_.Reshape(label_shape);
+  }
+}
+
+// This function is used to create a thread that prefetches the data.
+template <typename Dtype>
+void DataLayer<Dtype>::InternalThreadEntry() {
+  CPUTimer batch_timer;
+  batch_timer.Start();
+  double read_time = 0;
+  double trans_time = 0;
+  CPUTimer timer;
+  CHECK(this->prefetch_data_.count());
+  CHECK(this->transformed_data_.count());
+
+  // Reshape according to the first datum of each batch
+  // on single input batches allows for inputs of varying dimension.
+  const int batch_size = this->layer_param_.data_param().batch_size();
+  Datum datum;
+  datum.ParseFromString(cursor_->value());
+  // Use data_transformer to infer the expected blob shape from datum.
+  vector<int> top_shape = this->data_transformer_->InferBlobShape(datum);
+  this->transformed_data_.Reshape(top_shape);
+  // Reshape prefetch_data according to the batch_size.
+  top_shape[0] = batch_size;
+  this->prefetch_data_.Reshape(top_shape);
+
+  Dtype* top_data = this->prefetch_data_.mutable_cpu_data();
+  Dtype* top_label = NULL;  // suppress warnings about uninitialized variables
+
+  if (this->output_labels_) {
+    top_label = this->prefetch_label_.mutable_cpu_data();
+  }
+  timer.Start();
+  for (int item_id = 0; item_id < batch_size; ++item_id) {
+    // get a datum
+    Datum datum;
+    datum.ParseFromString(cursor_->value());
+    read_time += timer.MicroSeconds();
+    timer.Start();
+    // Apply data transformations (mirror, scale, crop...)
+    int offset = this->prefetch_data_.offset(item_id);
+    this->transformed_data_.set_cpu_data(top_data + offset);
+    this->data_transformer_->Transform(datum, &(this->transformed_data_));
+    // Copy label.
+    if (this->output_labels_) {
+      top_label[item_id] = datum.label();
+    }
+    trans_time += timer.MicroSeconds();
+    timer.Start();
+    // go to the next item.
+    cursor_->Next();
+    if (!cursor_->valid()) {
+      DLOG(INFO) << "Restarting data prefetching from start.";
+      cursor_->SeekToFirst();
+    }
+  }
+  timer.Stop();
+  batch_timer.Stop();
+  DLOG(INFO) << "Prefetch batch: " << batch_timer.MilliSeconds() << " ms.";
+  DLOG(INFO) << "     Read time: " << read_time / 1000 << " ms.";
+  DLOG(INFO) << "Transform time: " << trans_time / 1000 << " ms.";
+}
+
+INSTANTIATE_CLASS(DataLayer);
+REGISTER_LAYER_CLASS(Data);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/deconv_layer.cpp b/src/caffe/layers/deconv_layer.cpp
new file mode 100644
index 0000000..a461296
--- /dev/null
+++ b/src/caffe/layers/deconv_layer.cpp
@@ -0,0 +1,79 @@
+#include <vector>
+
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/im2col.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void DeconvolutionLayer<Dtype>::compute_output_shape() {
+  this->height_out_ = this->stride_h_ * (this->height_ - 1) + this->kernel_h_
+      - 2 * this->pad_h_;
+  this->width_out_ = this->stride_w_ * (this->width_ - 1) + this->kernel_w_
+      - 2 * this->pad_w_;
+}
+
+template <typename Dtype>
+void DeconvolutionLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const Dtype* weight = this->blobs_[0]->cpu_data();
+  for (int i = 0; i < bottom.size(); ++i) {
+    const Dtype* bottom_data = bottom[i]->cpu_data();
+    Dtype* top_data = top[i]->mutable_cpu_data();
+    for (int n = 0; n < this->num_; ++n) {
+      this->backward_cpu_gemm(bottom_data + bottom[i]->offset(n), weight,
+          top_data + top[i]->offset(n));
+      if (this->bias_term_) {
+        const Dtype* bias = this->blobs_[1]->cpu_data();
+        this->forward_cpu_bias(top_data + top[i]->offset(n), bias);
+      }
+    }
+  }
+}
+
+template <typename Dtype>
+void DeconvolutionLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* weight = this->blobs_[0]->cpu_data();
+  Dtype* weight_diff = this->blobs_[0]->mutable_cpu_diff();
+  for (int i = 0; i < top.size(); ++i) {
+    const Dtype* top_diff = top[i]->cpu_diff();
+    const Dtype* bottom_data = bottom[i]->cpu_data();
+    Dtype* bottom_diff = bottom[i]->mutable_cpu_diff();
+    // Bias gradient, if necessary.
+    if (this->bias_term_ && this->param_propagate_down_[1]) {
+      Dtype* bias_diff = this->blobs_[1]->mutable_cpu_diff();
+      for (int n = 0; n < this->num_; ++n) {
+        this->backward_cpu_bias(bias_diff, top_diff + top[i]->offset(n));
+      }
+    }
+    if (this->param_propagate_down_[0] || propagate_down[i]) {
+      for (int n = 0; n < this->num_; ++n) {
+        // Gradient w.r.t. weight. Note that we will accumulate diffs.
+        if (this->param_propagate_down_[0]) {
+          this->weight_cpu_gemm(top_diff + top[i]->offset(n),
+              bottom_data + bottom[i]->offset(n), weight_diff);
+        }
+        // Gradient w.r.t. bottom data, if necessary, reusing the column buffer
+        // we might have just computed above.
+        if (propagate_down[i]) {
+          this->forward_cpu_gemm(top_diff + top[i]->offset(n), weight,
+              bottom_diff + bottom[i]->offset(n),
+              this->param_propagate_down_[0]);
+        }
+      }
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(DeconvolutionLayer);
+#endif
+
+INSTANTIATE_CLASS(DeconvolutionLayer);
+REGISTER_LAYER_CLASS(Deconvolution);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/deconv_layer.cu b/src/caffe/layers/deconv_layer.cu
new file mode 100644
index 0000000..39bc4de
--- /dev/null
+++ b/src/caffe/layers/deconv_layer.cu
@@ -0,0 +1,64 @@
+#include <vector>
+
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/im2col.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void DeconvolutionLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const Dtype* weight = this->blobs_[0]->gpu_data();
+  for (int i = 0; i < bottom.size(); ++i) {
+    const Dtype* bottom_data = bottom[i]->gpu_data();
+    Dtype* top_data = top[i]->mutable_gpu_data();
+    for (int n = 0; n < this->num_; ++n) {
+      this->backward_gpu_gemm(bottom_data + bottom[i]->offset(n), weight,
+          top_data + top[i]->offset(n));
+      if (this->bias_term_) {
+        const Dtype* bias = this->blobs_[1]->gpu_data();
+        this->forward_gpu_bias(top_data + top[i]->offset(n), bias);
+      }
+    }
+  }
+}
+
+template <typename Dtype>
+void DeconvolutionLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* weight = this->blobs_[0]->gpu_data();
+  Dtype* weight_diff = this->blobs_[0]->mutable_gpu_diff();
+  for (int i = 0; i < top.size(); ++i) {
+    const Dtype* top_diff = top[i]->gpu_diff();
+    const Dtype* bottom_data = bottom[i]->gpu_data();
+    Dtype* bottom_diff = bottom[i]->mutable_gpu_diff();
+    // Bias gradient, if necessary.
+    if (this->bias_term_ && this->param_propagate_down_[1]) {
+      Dtype* bias_diff = this->blobs_[1]->mutable_gpu_diff();
+      for (int n = 0; n < this->num_; ++n) {
+        this->backward_gpu_bias(bias_diff, top_diff + top[i]->offset(n));
+      }
+    }
+    if (this->param_propagate_down_[0] || propagate_down[i]) {
+      for (int n = 0; n < this->num_; ++n) {
+        // gradient w.r.t. weight. Note that we will accumulate diffs.
+        if (this->param_propagate_down_[0]) {
+          this->weight_gpu_gemm(top_diff + top[i]->offset(n),
+              bottom_data + bottom[i]->offset(n), weight_diff);
+        }
+        // gradient w.r.t. bottom data, if necessary.
+        if (propagate_down[i]) {
+          this->forward_gpu_gemm(top_diff + top[i]->offset(n), weight,
+              bottom_diff + bottom[i]->offset(n));
+        }
+      }
+    }
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(DeconvolutionLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/dropout_layer.cpp b/src/caffe/layers/dropout_layer.cpp
new file mode 100644
index 0000000..ec1256f
--- /dev/null
+++ b/src/caffe/layers/dropout_layer.cpp
@@ -0,0 +1,78 @@
+// TODO (sergeyk): effect should not be dependent on phase. wasted memcpy.
+
+#include <vector>
+
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/syncedmem.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void DropoutLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  NeuronLayer<Dtype>::LayerSetUp(bottom, top);
+  threshold_ = this->layer_param_.dropout_param().dropout_ratio();
+  DCHECK(threshold_ > 0.);
+  DCHECK(threshold_ < 1.);
+  scale_ = 1. / (1. - threshold_);
+  uint_thres_ = static_cast<unsigned int>(UINT_MAX * threshold_);
+}
+
+template <typename Dtype>
+void DropoutLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  NeuronLayer<Dtype>::Reshape(bottom, top);
+  // Set up the cache for random number generation
+  rand_vec_.Reshape(bottom[0]->num(), bottom[0]->channels(),
+      bottom[0]->height(), bottom[0]->width());
+}
+
+template <typename Dtype>
+void DropoutLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  unsigned int* mask = rand_vec_.mutable_cpu_data();
+  const int count = bottom[0]->count();
+  if (this->phase_ == TRAIN) {
+    // Create random numbers
+    caffe_rng_bernoulli(count, 1. - threshold_, mask);
+    for (int i = 0; i < count; ++i) {
+      top_data[i] = bottom_data[i] * mask[i] * scale_;
+    }
+  } else {
+    caffe_copy(bottom[0]->count(), bottom_data, top_data);
+  }
+}
+
+template <typename Dtype>
+void DropoutLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    const Dtype* top_diff = top[0]->cpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    if (this->phase_ == TRAIN) {
+      const unsigned int* mask = rand_vec_.cpu_data();
+      const int count = bottom[0]->count();
+      for (int i = 0; i < count; ++i) {
+        bottom_diff[i] = top_diff[i] * mask[i] * scale_;
+      }
+    } else {
+      caffe_copy(top[0]->count(), top_diff, bottom_diff);
+    }
+  }
+}
+
+
+#ifdef CPU_ONLY
+STUB_GPU(DropoutLayer);
+#endif
+
+INSTANTIATE_CLASS(DropoutLayer);
+REGISTER_LAYER_CLASS(Dropout);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/dropout_layer.cu b/src/caffe/layers/dropout_layer.cu
new file mode 100644
index 0000000..f9ea04f
--- /dev/null
+++ b/src/caffe/layers/dropout_layer.cu
@@ -0,0 +1,77 @@
+#include <algorithm>
+#include <limits>
+#include <vector>
+
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/syncedmem.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+
+template <typename Dtype>
+__global__ void DropoutForward(const int n, const Dtype* in,
+    const unsigned int* mask, const unsigned int threshold, const float scale,
+    Dtype* out) {
+  CUDA_KERNEL_LOOP(index, n) {
+    out[index] = in[index] * (mask[index] > threshold) * scale;
+  }
+}
+
+template <typename Dtype>
+void DropoutLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  const int count = bottom[0]->count();
+  if (this->phase_ == TRAIN) {
+    unsigned int* mask =
+        static_cast<unsigned int*>(rand_vec_.mutable_gpu_data());
+    caffe_gpu_rng_uniform(count, mask);
+    // set thresholds
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    DropoutForward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+        count, bottom_data, mask, uint_thres_, scale_, top_data);
+    CUDA_POST_KERNEL_CHECK;
+  } else {
+    caffe_copy(count, bottom_data, top_data);
+  }
+}
+
+template <typename Dtype>
+__global__ void DropoutBackward(const int n, const Dtype* in_diff,
+    const unsigned int* mask, const unsigned int threshold, const float scale,
+    Dtype* out_diff) {
+  CUDA_KERNEL_LOOP(index, n) {
+    out_diff[index] = in_diff[index] * scale * (mask[index] > threshold);
+  }
+}
+
+template <typename Dtype>
+void DropoutLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    const Dtype* top_diff = top[0]->gpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+    if (this->phase_ == TRAIN) {
+      const unsigned int* mask =
+          static_cast<const unsigned int*>(rand_vec_.gpu_data());
+      const int count = bottom[0]->count();
+      // NOLINT_NEXT_LINE(whitespace/operators)
+      DropoutBackward<Dtype><<<CAFFE_GET_BLOCKS(count),
+        CAFFE_CUDA_NUM_THREADS>>>(
+          count, top_diff, mask, uint_thres_, scale_, bottom_diff);
+      CUDA_POST_KERNEL_CHECK;
+    } else {
+      caffe_copy(top[0]->count(), top_diff, bottom_diff);
+    }
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(DropoutLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/dummy_data_layer.cpp b/src/caffe/layers/dummy_data_layer.cpp
new file mode 100644
index 0000000..6b0d617
--- /dev/null
+++ b/src/caffe/layers/dummy_data_layer.cpp
@@ -0,0 +1,115 @@
+#include <vector>
+
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void DummyDataLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const int num_top = top.size();
+  const DummyDataParameter& param = this->layer_param_.dummy_data_param();
+  const int num_data_filler = param.data_filler_size();
+  CHECK(num_data_filler == 0 || num_data_filler == 1 ||
+        num_data_filler == num_top)
+      << "Number of data fillers must be 0, 1 or equal to the number of tops: "
+      << num_top << "; you specified " << num_data_filler << " data fillers.";
+
+  const bool legacy_dims = param.num_size() || param.channels_size() ||
+                           param.height_size() || param.width_size();
+  if (legacy_dims) {
+    CHECK_EQ(0, param.shape_size())
+        << "Both shape and legacy fields were specified";
+    // Using deprecated 4D output dim specifiers.
+    CHECK(param.num_size() == 1 || param.num_size() == num_top)
+        << "Must specify 'num' once, or once per top blob "
+        << "(" << num_top << "); specified " << param.num_size() << ".";
+    CHECK(param.channels_size() == 1 || param.channels_size() == num_top)
+        << "Must specify 'channels' once, or once per top blob "
+        << "(" << num_top << "); specified " << param.channels_size() << ".";
+    CHECK(param.height_size() == 1 || param.height_size() == num_top)
+        << "Must specify 'height' once, or once per top blob "
+        << "(" << num_top << "); specified " << param.height_size() << ".";
+    CHECK(param.width_size() == 1 || param.width_size() == num_top)
+        << "Must specify 'width' once, or once per top blob "
+        << "(" << num_top << "); specified " << param.width_size() << ".";
+  } else {
+    CHECK(param.shape_size() == 1 || param.shape_size() == num_top)
+        << "Must specify 'shape' once, or once per top blob "
+        << "(" << num_top << "); specified " << param.shape_size() << ".";
+  }
+  // refill_[i] tells Forward i whether or not to actually refill top Blob i.
+  // If refill_[i] is false, Forward does nothing for Blob i. We use this to
+  // avoid wastefully refilling "constant" Blobs in every forward pass.
+  // We first fill refill_ in with the INVERSE of its final values.
+  // The first time we run Forward from the LayerSetUp method, we'll fill only
+  // Blobs for which refill_ is normally false.  These Blobs will never be
+  // filled again.
+  refill_.clear();
+  fillers_.clear();
+  if (num_data_filler <= 1) {
+    FillerParameter filler_param;
+    if (num_data_filler == 0) {
+      filler_param.set_type("constant");
+      filler_param.set_value(0);
+    } else {
+      filler_param.CopyFrom(param.data_filler(0));
+    }
+    // Refill on each iteration iff not using a constant filler,
+    // but use the inverse of this rule for the first run.
+    refill_.resize(1);
+    refill_[0] = (strcmp(filler_param.type().c_str(), "constant") == 0);
+    fillers_.resize(1);
+    fillers_[0].reset(GetFiller<Dtype>(filler_param));
+  } else {
+    refill_.resize(num_top);
+    fillers_.resize(num_top);
+    for (int i = 0; i < num_top; ++i) {
+      fillers_[i].reset(GetFiller<Dtype>(param.data_filler(i)));
+      // Refill on each iteration iff not using a constant filler,
+      // but use the inverse of this rule for the first run.
+      refill_[i] =
+          (strcmp(param.data_filler(i).type().c_str(), "constant") == 0);
+    }
+  }
+  for (int i = 0; i < num_top; ++i) {
+    if (legacy_dims) {
+      const int num = (param.num_size() == 1) ? param.num(0) : param.num(i);
+      const int channels =
+          (param.channels_size() == 1) ? param.channels(0) : param.channels(i);
+      const int height =
+          (param.height_size() == 1) ? param.height(0) : param.height(i);
+      const int width =
+          (param.width_size() == 1) ? param.width(0) : param.width(i);
+      top[i]->Reshape(num, channels, height, width);
+    } else {
+      const int shape_index = (param.shape_size() == 1) ? 0 : i;
+      top[i]->Reshape(param.shape(shape_index));
+    }
+  }
+  // Run Forward once, with refill_ inverted, to fill the constant Blobs.
+  this->Forward(bottom, top);
+  // Invert the inverted refill_ values to refill the desired (non-constant)
+  // Blobs in every usual forward pass.
+  for (int i = 0; i < refill_.size(); ++i) {
+    refill_[i] = !refill_[i];
+  }
+}
+
+template <typename Dtype>
+void DummyDataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  for (int i = 0; i < top.size(); ++i) {
+    const int filler_id = (fillers_.size() > 1) ? i : 0;
+    if (refill_[filler_id]) {
+      fillers_[filler_id]->Fill(top[i]);
+    }
+  }
+}
+
+INSTANTIATE_CLASS(DummyDataLayer);
+REGISTER_LAYER_CLASS(DummyData);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/eltwise_layer.cpp b/src/caffe/layers/eltwise_layer.cpp
new file mode 100644
index 0000000..a807007
--- /dev/null
+++ b/src/caffe/layers/eltwise_layer.cpp
@@ -0,0 +1,161 @@
+#include <cfloat>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void EltwiseLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  CHECK(this->layer_param().eltwise_param().coeff_size() == 0
+      || this->layer_param().eltwise_param().coeff_size() == bottom.size()) <<
+      "Eltwise Layer takes one coefficient per bottom blob.";
+  CHECK(!(this->layer_param().eltwise_param().operation()
+      == EltwiseParameter_EltwiseOp_PROD
+      && this->layer_param().eltwise_param().coeff_size())) <<
+      "Eltwise layer only takes coefficients for summation.";
+  op_ = this->layer_param_.eltwise_param().operation();
+  // Blob-wise coefficients for the elementwise operation.
+  coeffs_ = vector<Dtype>(bottom.size(), 1);
+  if (this->layer_param().eltwise_param().coeff_size()) {
+    for (int i = 0; i < bottom.size(); ++i) {
+      coeffs_[i] = this->layer_param().eltwise_param().coeff(i);
+    }
+  }
+  stable_prod_grad_ = this->layer_param_.eltwise_param().stable_prod_grad();
+}
+
+template <typename Dtype>
+void EltwiseLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  for (int i = 1; i < bottom.size(); ++i) {
+    CHECK(bottom[i]->shape() == bottom[0]->shape());
+  }
+  top[0]->ReshapeLike(*bottom[0]);
+  // If max operation, we will initialize the vector index part.
+  if (this->layer_param_.eltwise_param().operation() ==
+      EltwiseParameter_EltwiseOp_MAX && top.size() == 1) {
+    max_idx_.Reshape(bottom[0]->shape());
+  }
+}
+
+template <typename Dtype>
+void EltwiseLayer<Dtype>::Forward_cpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  int* mask = NULL;
+  const Dtype* bottom_data_a = NULL;
+  const Dtype* bottom_data_b = NULL;
+  const int count = top[0]->count();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  switch (op_) {
+  case EltwiseParameter_EltwiseOp_PROD:
+    caffe_mul(count, bottom[0]->cpu_data(), bottom[1]->cpu_data(), top_data);
+    for (int i = 2; i < bottom.size(); ++i) {
+      caffe_mul(count, top_data, bottom[i]->cpu_data(), top_data);
+    }
+    break;
+  case EltwiseParameter_EltwiseOp_SUM:
+    caffe_set(count, Dtype(0), top_data);
+    // TODO(shelhamer) does BLAS optimize to sum for coeff = 1?
+    for (int i = 0; i < bottom.size(); ++i) {
+      caffe_axpy(count, coeffs_[i], bottom[i]->cpu_data(), top_data);
+    }
+    break;
+  case EltwiseParameter_EltwiseOp_MAX:
+    // Initialize
+    mask = max_idx_.mutable_cpu_data();
+    caffe_set(count, -1, mask);
+    caffe_set(count, Dtype(-FLT_MAX), top_data);
+    // bottom 0 & 1
+    bottom_data_a = bottom[0]->cpu_data();
+    bottom_data_b = bottom[1]->cpu_data();
+    for (int idx = 0; idx < count; ++idx) {
+      if (bottom_data_a[idx] > bottom_data_b[idx]) {
+        top_data[idx] = bottom_data_a[idx];  // maxval
+        mask[idx] = 0;  // maxid
+      } else {
+        top_data[idx] = bottom_data_b[idx];  // maxval
+        mask[idx] = 1;  // maxid
+      }
+    }
+    // bottom 2++
+    for (int blob_idx = 2; blob_idx < bottom.size(); ++blob_idx) {
+      bottom_data_b = bottom[blob_idx]->cpu_data();
+      for (int idx = 0; idx < count; ++idx) {
+        if (bottom_data_b[idx] > top_data[idx]) {
+          top_data[idx] = bottom_data_b[idx];  // maxval
+          mask[idx] = blob_idx;  // maxid
+        }
+      }
+    }
+    break;
+  default:
+    LOG(FATAL) << "Unknown elementwise operation.";
+  }
+}
+
+template <typename Dtype>
+void EltwiseLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const int* mask = NULL;
+  const int count = top[0]->count();
+  const Dtype* top_data = top[0]->cpu_data();
+  const Dtype* top_diff = top[0]->cpu_diff();
+  for (int i = 0; i < bottom.size(); ++i) {
+    if (propagate_down[i]) {
+      const Dtype* bottom_data = bottom[i]->cpu_data();
+      Dtype* bottom_diff = bottom[i]->mutable_cpu_diff();
+      switch (op_) {
+      case EltwiseParameter_EltwiseOp_PROD:
+        if (stable_prod_grad_) {
+          bool initialized = false;
+          for (int j = 0; j < bottom.size(); ++j) {
+            if (i == j) { continue; }
+            if (!initialized) {
+              caffe_copy(count, bottom[j]->cpu_data(), bottom_diff);
+              initialized = true;
+            } else {
+              caffe_mul(count, bottom[j]->cpu_data(), bottom_diff,
+                        bottom_diff);
+            }
+          }
+        } else {
+          caffe_div(count, top_data, bottom_data, bottom_diff);
+        }
+        caffe_mul(count, bottom_diff, top_diff, bottom_diff);
+        break;
+      case EltwiseParameter_EltwiseOp_SUM:
+        if (coeffs_[i] == Dtype(1)) {
+          caffe_copy(count, top_diff, bottom_diff);
+        } else {
+          caffe_cpu_scale(count, coeffs_[i], top_diff, bottom_diff);
+        }
+        break;
+      case EltwiseParameter_EltwiseOp_MAX:
+        mask = max_idx_.cpu_data();
+        for (int index = 0; index < count; ++index) {
+          Dtype gradient = 0;
+          if (mask[index] == i) {
+            gradient += top_diff[index];
+          }
+          bottom_diff[index] = gradient;
+        }
+        break;
+      default:
+        LOG(FATAL) << "Unknown elementwise operation.";
+      }
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(EltwiseLayer);
+#endif
+
+INSTANTIATE_CLASS(EltwiseLayer);
+REGISTER_LAYER_CLASS(Eltwise);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/eltwise_layer.cu b/src/caffe/layers/eltwise_layer.cu
new file mode 100644
index 0000000..2247870
--- /dev/null
+++ b/src/caffe/layers/eltwise_layer.cu
@@ -0,0 +1,135 @@
+#include <cfloat>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+__global__ void MaxForward(const int nthreads, const Dtype* bottom_data_a,
+    const Dtype* bottom_data_b, const int blob_idx, Dtype* top_data,
+    int* mask) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    Dtype maxval = -FLT_MAX;
+    int maxidx = -1;
+    if (bottom_data_a[index] > bottom_data_b[index]) {
+      // only update for very first bottom_data blob (blob_idx == 0)
+      if (blob_idx == 0) {
+        maxval = bottom_data_a[index];
+        top_data[index] = maxval;
+        maxidx = blob_idx;
+        mask[index] = maxidx;
+      }
+    } else {
+      maxval = bottom_data_b[index];
+      top_data[index] = maxval;
+      maxidx = blob_idx + 1;
+      mask[index] = maxidx;
+    }
+  }
+}
+
+template <typename Dtype>
+void EltwiseLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  int* mask = NULL;
+  const int count = top[0]->count();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  switch (op_) {
+  case EltwiseParameter_EltwiseOp_PROD:
+    caffe_gpu_mul(count, bottom[0]->gpu_data(), bottom[1]->gpu_data(),
+        top_data);
+    for (int i = 2; i < bottom.size(); ++i) {
+      caffe_gpu_mul(count, top_data, bottom[i]->gpu_data(), top_data);
+    }
+    break;
+  case EltwiseParameter_EltwiseOp_SUM:
+    caffe_gpu_set(count, Dtype(0.), top_data);
+    // TODO(shelhamer) does cuBLAS optimize to sum for coeff = 1?
+    for (int i = 0; i < bottom.size(); ++i) {
+      caffe_gpu_axpy(count, coeffs_[i], bottom[i]->gpu_data(), top_data);
+    }
+    break;
+  case EltwiseParameter_EltwiseOp_MAX:
+    mask = max_idx_.mutable_gpu_data();
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    MaxForward<Dtype> <<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+        count, bottom[0]->gpu_data(), bottom[1]->gpu_data(), 0, top_data, mask);
+    for (int i = 2; i < bottom.size(); ++i) {
+      // NOLINT_NEXT_LINE(whitespace/operators)
+      MaxForward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+          count, top_data, bottom[i]->gpu_data(), i-1, top_data, mask);
+    }
+    break;
+  default:
+    LOG(FATAL) << "Unknown elementwise operation.";
+  }
+}
+
+template <typename Dtype>
+__global__ void MaxBackward(const int nthreads, const Dtype* top_diff,
+    const int blob_idx, const int* mask, Dtype* bottom_diff) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    Dtype gradient = 0;
+    if (mask[index] == blob_idx) {
+      gradient += top_diff[index];
+    }
+    bottom_diff[index] = gradient;
+  }
+}
+
+template <typename Dtype>
+void EltwiseLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const int* mask = NULL;
+  const int count = top[0]->count();
+  const Dtype* top_data = top[0]->gpu_data();
+  const Dtype* top_diff = top[0]->gpu_diff();
+  for (int i = 0; i < bottom.size(); ++i) {
+    if (propagate_down[i]) {
+      const Dtype* bottom_data = bottom[i]->gpu_data();
+      Dtype* bottom_diff = bottom[i]->mutable_gpu_diff();
+      switch (op_) {
+      case EltwiseParameter_EltwiseOp_PROD:
+        if (stable_prod_grad_) {
+          bool initialized = false;
+          for (int j = 0; j < bottom.size(); ++j) {
+            if (i == j) { continue; }
+            if (!initialized) {
+              caffe_copy(count, bottom[j]->gpu_data(), bottom_diff);
+              initialized = true;
+            } else {
+              caffe_gpu_mul(count, bottom[j]->gpu_data(), bottom_diff,
+                            bottom_diff);
+            }
+          }
+        } else {
+          caffe_gpu_div(count, top_data, bottom_data, bottom_diff);
+        }
+        caffe_gpu_mul(count, bottom_diff, top_diff, bottom_diff);
+        break;
+      case EltwiseParameter_EltwiseOp_SUM:
+        if (coeffs_[i] == Dtype(1.)) {
+          caffe_copy(count, top_diff, bottom_diff);
+        } else {
+          caffe_gpu_scale(count, coeffs_[i], top_diff, bottom_diff);
+        }
+        break;
+      case EltwiseParameter_EltwiseOp_MAX:
+        mask = max_idx_.gpu_data();
+        MaxBackward<Dtype>  // NOLINT_NEXT_LINE(whitespace/operators)
+            <<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+            count, top_diff, i, mask, bottom_diff);
+        break;
+      default:
+        LOG(FATAL) << "Unknown elementwise operation.";
+      }
+    }
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(EltwiseLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/euclidean_loss_layer.cpp b/src/caffe/layers/euclidean_loss_layer.cpp
new file mode 100644
index 0000000..80efa31
--- /dev/null
+++ b/src/caffe/layers/euclidean_loss_layer.cpp
@@ -0,0 +1,57 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void EuclideanLossLayer<Dtype>::Reshape(
+  const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  LossLayer<Dtype>::Reshape(bottom, top);
+  CHECK_EQ(bottom[0]->count(1), bottom[1]->count(1))
+      << "Inputs must have the same dimension.";
+  diff_.ReshapeLike(*bottom[0]);
+}
+
+template <typename Dtype>
+void EuclideanLossLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  int count = bottom[0]->count();
+  caffe_sub(
+      count,
+      bottom[0]->cpu_data(),
+      bottom[1]->cpu_data(),
+      diff_.mutable_cpu_data());
+  Dtype dot = caffe_cpu_dot(count, diff_.cpu_data(), diff_.cpu_data());
+  Dtype loss = dot / bottom[0]->num() / Dtype(2);
+  top[0]->mutable_cpu_data()[0] = loss;
+}
+
+template <typename Dtype>
+void EuclideanLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  for (int i = 0; i < 2; ++i) {
+    if (propagate_down[i]) {
+      const Dtype sign = (i == 0) ? 1 : -1;
+      const Dtype alpha = sign * top[0]->cpu_diff()[0] / bottom[i]->num();
+      caffe_cpu_axpby(
+          bottom[i]->count(),              // count
+          alpha,                              // alpha
+          diff_.cpu_data(),                   // a
+          Dtype(0),                           // beta
+          bottom[i]->mutable_cpu_diff());  // b
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(EuclideanLossLayer);
+#endif
+
+INSTANTIATE_CLASS(EuclideanLossLayer);
+REGISTER_LAYER_CLASS(EuclideanLoss);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/euclidean_loss_layer.cu b/src/caffe/layers/euclidean_loss_layer.cu
new file mode 100644
index 0000000..5b1de3a
--- /dev/null
+++ b/src/caffe/layers/euclidean_loss_layer.cu
@@ -0,0 +1,44 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void EuclideanLossLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  int count = bottom[0]->count();
+  caffe_gpu_sub(
+      count,
+      bottom[0]->gpu_data(),
+      bottom[1]->gpu_data(),
+      diff_.mutable_gpu_data());
+  Dtype dot;
+  caffe_gpu_dot(count, diff_.gpu_data(), diff_.gpu_data(), &dot);
+  Dtype loss = dot / bottom[0]->num() / Dtype(2);
+  top[0]->mutable_cpu_data()[0] = loss;
+}
+
+template <typename Dtype>
+void EuclideanLossLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  for (int i = 0; i < 2; ++i) {
+    if (propagate_down[i]) {
+      const Dtype sign = (i == 0) ? 1 : -1;
+      const Dtype alpha = sign * top[0]->cpu_diff()[0] / bottom[i]->num();
+      caffe_gpu_axpby(
+          bottom[i]->count(),              // count
+          alpha,                              // alpha
+          diff_.gpu_data(),                   // a
+          Dtype(0),                           // beta
+          bottom[i]->mutable_gpu_diff());  // b
+    }
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(EuclideanLossLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/exp_layer.cpp b/src/caffe/layers/exp_layer.cpp
new file mode 100644
index 0000000..c7e7c60
--- /dev/null
+++ b/src/caffe/layers/exp_layer.cpp
@@ -0,0 +1,69 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void ExpLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  NeuronLayer<Dtype>::LayerSetUp(bottom, top);
+  const Dtype base = this->layer_param_.exp_param().base();
+  if (base != Dtype(-1)) {
+    CHECK_GT(base, 0) << "base must be strictly positive.";
+  }
+  // If base == -1, interpret the base as e and set log_base = 1 exactly.
+  // Otherwise, calculate its log explicitly.
+  const Dtype log_base = (base == Dtype(-1)) ? Dtype(1) : log(base);
+  CHECK(!isnan(log_base))
+      << "NaN result: log(base) = log(" << base << ") = " << log_base;
+  CHECK(!isinf(log_base))
+      << "Inf result: log(base) = log(" << base << ") = " << log_base;
+  const Dtype input_scale = this->layer_param_.exp_param().scale();
+  const Dtype input_shift = this->layer_param_.exp_param().shift();
+  inner_scale_ = log_base * input_scale;
+  outer_scale_ = (input_shift == Dtype(0)) ? Dtype(1) : pow(base, input_shift);
+}
+
+template <typename Dtype>
+void ExpLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const int count = bottom[0]->count();
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  if (inner_scale_ == Dtype(1)) {
+    caffe_exp(count, bottom_data, top_data);
+  } else {
+    caffe_cpu_scale(count, inner_scale_, bottom_data, top_data);
+    caffe_exp(count, top_data, top_data);
+  }
+  if (outer_scale_ != Dtype(1)) {
+    caffe_scal(count, outer_scale_, top_data);
+  }
+}
+
+template <typename Dtype>
+void ExpLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) { return; }
+  const int count = bottom[0]->count();
+  const Dtype* top_data = top[0]->cpu_data();
+  const Dtype* top_diff = top[0]->cpu_diff();
+  Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+  caffe_mul(count, top_data, top_diff, bottom_diff);
+  if (inner_scale_ != Dtype(1)) {
+    caffe_scal(count, inner_scale_, bottom_diff);
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(ExpLayer);
+#endif
+
+INSTANTIATE_CLASS(ExpLayer);
+REGISTER_LAYER_CLASS(Exp);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/exp_layer.cu b/src/caffe/layers/exp_layer.cu
new file mode 100644
index 0000000..2d75d8d
--- /dev/null
+++ b/src/caffe/layers/exp_layer.cu
@@ -0,0 +1,44 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void ExpLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const int count = bottom[0]->count();
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  if (inner_scale_ == Dtype(1)) {
+    caffe_gpu_exp(count, bottom_data, top_data);
+  } else {
+    caffe_gpu_scale(count, inner_scale_, bottom_data, top_data);
+    caffe_gpu_exp(count, top_data, top_data);
+  }
+  if (outer_scale_ != Dtype(1)) {
+    caffe_gpu_scal(count, outer_scale_, top_data);
+  }
+}
+
+template <typename Dtype>
+void ExpLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) { return; }
+  const int count = bottom[0]->count();
+  const Dtype* top_data = top[0]->gpu_data();
+  const Dtype* top_diff = top[0]->gpu_diff();
+  Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+  caffe_gpu_mul(count, top_data, top_diff, bottom_diff);
+  if (inner_scale_ != Dtype(1)) {
+    caffe_gpu_scal(count, inner_scale_, bottom_diff);
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(ExpLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/filter_layer.cpp b/src/caffe/layers/filter_layer.cpp
new file mode 100644
index 0000000..be1db32
--- /dev/null
+++ b/src/caffe/layers/filter_layer.cpp
@@ -0,0 +1,127 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void FilterLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  CHECK_EQ(top.size(), bottom.size() - 1);
+  first_reshape_ = true;
+}
+
+template <typename Dtype>
+void FilterLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  // bottom[0...k-1] are the blobs to filter
+  // bottom[last] is the "selector_blob"
+  int selector_index = bottom.size() - 1;
+  for (int i = 1; i < bottom[selector_index]->num_axes(); ++i) {
+    CHECK_EQ(bottom[selector_index]->shape(i), 1)
+        << "Selector blob dimensions must be singletons (1), except the first";
+  }
+  for (int i = 0; i < bottom.size() - 1; ++i) {
+    CHECK_EQ(bottom[selector_index]->shape(0), bottom[i]->shape(0)) <<
+        "Each bottom should have the same 0th dimension as the selector blob";
+  }
+
+  const Dtype* bottom_data_selector = bottom[selector_index]->cpu_data();
+  indices_to_forward_.clear();
+
+  // look for non-zero elements in bottom[0]. Items of each bottom that
+  // have the same index as the items in bottom[0] with value == non-zero
+  // will be forwarded
+  for (int item_id = 0; item_id < bottom[selector_index]->shape(0); ++item_id) {
+    // we don't need an offset because item size == 1
+    const Dtype* tmp_data_selector = bottom_data_selector + item_id;
+    if (*tmp_data_selector) {
+      indices_to_forward_.push_back(item_id);
+    }
+  }
+  // only filtered items will be forwarded
+  int new_tops_num = indices_to_forward_.size();
+  // init
+  if (first_reshape_) {
+    new_tops_num = bottom[0]->shape(0);
+    first_reshape_ = false;
+  }
+  for (int t = 0; t < top.size(); ++t) {
+    int num_axes = bottom[t]->num_axes();
+    vector<int> shape_top(num_axes);
+    shape_top[0] = new_tops_num;
+    for (int ts = 1; ts < num_axes; ++ts)
+      shape_top[ts] = bottom[t]->shape(ts);
+    top[t]->Reshape(shape_top);
+  }
+}
+
+template <typename Dtype>
+void FilterLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  int new_tops_num = indices_to_forward_.size();
+  // forward all filtered items for all bottoms but the Selector (bottom[last])
+  for (int t = 0; t < top.size(); ++t) {
+    const Dtype* bottom_data = bottom[t]->cpu_data();
+    Dtype* top_data = top[t]->mutable_cpu_data();
+    int dim = bottom[t]->count() / bottom[t]->shape(0);
+    for (int n = 0; n < new_tops_num; ++n) {
+      int data_offset_top = n * dim;
+      int data_offset_bottom = indices_to_forward_[n] * bottom[t]->count(1);
+      caffe_copy(dim, bottom_data + data_offset_bottom,
+          top_data + data_offset_top);
+    }
+  }
+}
+
+template <typename Dtype>
+void FilterLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[bottom.size() - 1]) {
+    LOG(FATAL) << this->type()
+               << "Layer cannot backpropagate to filter index inputs";
+  }
+  for (int i = 0; i < top.size(); i++) {
+    // bottom[last] is the selector and never needs backpropagation
+    // so we can iterate over top vector because top.size() == bottom.size() -1
+    if (propagate_down[i]) {
+      const int dim = top[i]->count() / top[i]->shape(0);
+      int next_to_backward_offset = 0;
+      int batch_offset = 0;
+      int data_offset_bottom = 0;
+      int data_offset_top = 0;
+      for (int n = 0; n < bottom[i]->shape(0); n++) {
+        data_offset_bottom = n * dim;
+        if (next_to_backward_offset >= indices_to_forward_.size()) {
+          // we already visited all items that were been forwarded, so
+          // just set to zero remaining ones
+          caffe_set(dim, Dtype(0),
+              bottom[i]->mutable_cpu_diff() + data_offset_bottom);
+        } else {
+          batch_offset = indices_to_forward_[next_to_backward_offset];
+          if (n != batch_offset) {  // this data was not been forwarded
+            caffe_set(dim, Dtype(0),
+                bottom[i]->mutable_cpu_diff() + data_offset_bottom);
+          } else {  // this data was been forwarded
+            data_offset_top = next_to_backward_offset * dim;
+            next_to_backward_offset++;  // point to next forwarded item index
+            caffe_copy(dim, top[i]->mutable_cpu_diff() + data_offset_top,
+                bottom[i]->mutable_cpu_diff() + data_offset_bottom);
+          }
+        }
+      }
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(FilterLayer);
+#endif
+
+INSTANTIATE_CLASS(FilterLayer);
+REGISTER_LAYER_CLASS(Filter);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/filter_layer.cu b/src/caffe/layers/filter_layer.cu
new file mode 100644
index 0000000..cf929ee
--- /dev/null
+++ b/src/caffe/layers/filter_layer.cu
@@ -0,0 +1,70 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void FilterLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  int new_tops_num = indices_to_forward_.size();
+  // forward all filtered items for all bottoms but the Selector (bottom[last])
+  for (int t = 0; t < top.size(); ++t) {
+    const Dtype* bottom_data = bottom[t]->gpu_data();
+    Dtype* top_data = top[t]->mutable_gpu_data();
+    int dim = bottom[t]->count() / bottom[t]->shape(0);
+    for (int n = 0; n < new_tops_num; ++n) {
+      int data_offset_top = n * dim;
+      int data_offset_bottom = indices_to_forward_[n] * dim;
+      caffe_copy(dim, bottom_data + data_offset_bottom,
+          top_data + data_offset_top);
+    }
+  }
+}
+
+template <typename Dtype>
+void FilterLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[bottom.size() - 1]) {
+    LOG(FATAL) << this->type()
+               << "Layer cannot backpropagate to filter index inputs";
+  }
+  for (int i = 0; i < top.size(); ++i) {
+    // bottom[last] is the selector and never needs backpropagation
+    // so we can iterate over top vector because top.size() == bottom.size() -1
+    if (propagate_down[i]) {
+      const int dim = top[i]->count() / top[i]->shape(0);
+      int next_to_backward_offset = 0;
+      int batch_offset = 0;
+      int data_offset_bottom = 0;
+      int data_offset_top = 0;
+      for (int n = 0; n < bottom[i]->shape(0); ++n) {
+        if (next_to_backward_offset >= indices_to_forward_.size()) {
+          // we already visited all items that were been forwarded, so
+          // just set to zero remaining ones
+          data_offset_bottom = n * dim;
+          caffe_gpu_set(dim, Dtype(0),
+              bottom[i]->mutable_gpu_diff() + data_offset_bottom);
+        } else {
+          batch_offset = indices_to_forward_[next_to_backward_offset];
+          data_offset_bottom = n * dim;
+          if (n != batch_offset) {  // this data was not been forwarded
+            caffe_gpu_set(dim, Dtype(0),
+                bottom[i]->mutable_gpu_diff() + data_offset_bottom);
+          } else {  // this data was been forwarded
+            data_offset_top = next_to_backward_offset * dim;
+            ++next_to_backward_offset;  // point to next forwarded item index
+            caffe_copy(dim, top[i]->mutable_gpu_diff() + data_offset_top,
+                bottom[i]->mutable_gpu_diff() + data_offset_bottom);
+          }
+        }
+      }
+    }
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(FilterLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/flatten_layer.cpp b/src/caffe/layers/flatten_layer.cpp
new file mode 100644
index 0000000..f7e5c9c
--- /dev/null
+++ b/src/caffe/layers/flatten_layer.cpp
@@ -0,0 +1,44 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void FlattenLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const int start_axis = bottom[0]->CanonicalAxisIndex(
+      this->layer_param_.flatten_param().axis());
+  const int end_axis = bottom[0]->CanonicalAxisIndex(
+      this->layer_param_.flatten_param().end_axis());
+  vector<int> top_shape;
+  for (int i = 0; i < start_axis; ++i) {
+    top_shape.push_back(bottom[0]->shape(i));
+  }
+  const int flattened_dim = bottom[0]->count(start_axis, end_axis + 1);
+  top_shape.push_back(flattened_dim);
+  for (int i = end_axis + 1; i < bottom[0]->num_axes(); ++i) {
+    top_shape.push_back(bottom[0]->shape(i));
+  }
+  top[0]->Reshape(top_shape);
+  CHECK_EQ(top[0]->count(), bottom[0]->count());
+}
+
+template <typename Dtype>
+void FlattenLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  top[0]->ShareData(*bottom[0]);
+}
+
+template <typename Dtype>
+void FlattenLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  bottom[0]->ShareDiff(*top[0]);
+}
+
+INSTANTIATE_CLASS(FlattenLayer);
+REGISTER_LAYER_CLASS(Flatten);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/hdf5_data_layer.cpp b/src/caffe/layers/hdf5_data_layer.cpp
new file mode 100644
index 0000000..8a782f7
--- /dev/null
+++ b/src/caffe/layers/hdf5_data_layer.cpp
@@ -0,0 +1,167 @@
+/*
+TODO:
+- load file in a separate thread ("prefetch")
+- can be smarter about the memcpy call instead of doing it row-by-row
+  :: use util functions caffe_copy, and Blob->offset()
+  :: don't forget to update hdf5_daa_layer.cu accordingly
+- add ability to shuffle filenames if flag is set
+*/
+#include <fstream>  // NOLINT(readability/streams)
+#include <string>
+#include <vector>
+
+#include "hdf5.h"
+#include "hdf5_hl.h"
+#include "stdint.h"
+
+#include "caffe/data_layers.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+HDF5DataLayer<Dtype>::~HDF5DataLayer<Dtype>() { }
+
+// Load data and label from HDF5 filename into the class property blobs.
+template <typename Dtype>
+void HDF5DataLayer<Dtype>::LoadHDF5FileData(const char* filename) {
+  DLOG(INFO) << "Loading HDF5 file: " << filename;
+  hid_t file_id = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT);
+  if (file_id < 0) {
+    LOG(FATAL) << "Failed opening HDF5 file: " << filename;
+  }
+
+  int top_size = this->layer_param_.top_size();
+  hdf_blobs_.resize(top_size);
+
+  const int MIN_DATA_DIM = 1;
+  const int MAX_DATA_DIM = INT_MAX;
+
+  for (int i = 0; i < top_size; ++i) {
+    hdf_blobs_[i] = shared_ptr<Blob<Dtype> >(new Blob<Dtype>());
+    hdf5_load_nd_dataset(file_id, this->layer_param_.top(i).c_str(),
+        MIN_DATA_DIM, MAX_DATA_DIM, hdf_blobs_[i].get());
+  }
+
+  herr_t status = H5Fclose(file_id);
+  CHECK_GE(status, 0) << "Failed to close HDF5 file: " << filename;
+
+  // MinTopBlobs==1 guarantees at least one top blob
+  CHECK_GE(hdf_blobs_[0]->num_axes(), 1) << "Input must have at least 1 axis.";
+  const int num = hdf_blobs_[0]->shape(0);
+  for (int i = 1; i < top_size; ++i) {
+    CHECK_EQ(hdf_blobs_[i]->shape(0), num);
+  }
+  // Default to identity permutation.
+  data_permutation_.clear();
+  data_permutation_.resize(hdf_blobs_[0]->shape(0));
+  for (int i = 0; i < hdf_blobs_[0]->shape(0); i++)
+    data_permutation_[i] = i;
+
+  // Shuffle if needed.
+  if (this->layer_param_.hdf5_data_param().shuffle()) {
+    std::random_shuffle(data_permutation_.begin(), data_permutation_.end());
+    DLOG(INFO) << "Successully loaded " << hdf_blobs_[0]->shape(0)
+               << " rows (shuffled)";
+  } else {
+    DLOG(INFO) << "Successully loaded " << hdf_blobs_[0]->shape(0) << " rows";
+  }
+}
+
+template <typename Dtype>
+void HDF5DataLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  // Refuse transformation parameters since HDF5 is totally generic.
+  CHECK(!this->layer_param_.has_transform_param()) <<
+      this->type() << " does not transform data.";
+  // Read the source to parse the filenames.
+  const string& source = this->layer_param_.hdf5_data_param().source();
+  LOG(INFO) << "Loading list of HDF5 filenames from: " << source;
+  hdf_filenames_.clear();
+  std::ifstream source_file(source.c_str());
+  if (source_file.is_open()) {
+    std::string line;
+    while (source_file >> line) {
+      hdf_filenames_.push_back(line);
+    }
+  } else {
+    LOG(FATAL) << "Failed to open source file: " << source;
+  }
+  source_file.close();
+  num_files_ = hdf_filenames_.size();
+  current_file_ = 0;
+  LOG(INFO) << "Number of HDF5 files: " << num_files_;
+  CHECK_GE(num_files_, 1) << "Must have at least 1 HDF5 filename listed in "
+    << source;
+
+  file_permutation_.clear();
+  file_permutation_.resize(num_files_);
+  // Default to identity permutation.
+  for (int i = 0; i < num_files_; i++) {
+    file_permutation_[i] = i;
+  }
+
+  // Shuffle if needed.
+  if (this->layer_param_.hdf5_data_param().shuffle()) {
+    std::random_shuffle(file_permutation_.begin(), file_permutation_.end());
+  }
+
+  // Load the first HDF5 file and initialize the line counter.
+  LoadHDF5FileData(hdf_filenames_[file_permutation_[current_file_]].c_str());
+  current_row_ = 0;
+
+  // Reshape blobs.
+  const int batch_size = this->layer_param_.hdf5_data_param().batch_size();
+  const int top_size = this->layer_param_.top_size();
+  vector<int> top_shape;
+  for (int i = 0; i < top_size; ++i) {
+    top_shape.resize(hdf_blobs_[i]->num_axes());
+    top_shape[0] = batch_size;
+    for (int j = 1; j < top_shape.size(); ++j) {
+      top_shape[j] = hdf_blobs_[i]->shape(j);
+    }
+    top[i]->Reshape(top_shape);
+  }
+}
+
+template <typename Dtype>
+void HDF5DataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const int batch_size = this->layer_param_.hdf5_data_param().batch_size();
+  for (int i = 0; i < batch_size; ++i, ++current_row_) {
+    if (current_row_ == hdf_blobs_[0]->shape(0)) {
+      if (num_files_ > 1) {
+        ++current_file_;
+        if (current_file_ == num_files_) {
+          current_file_ = 0;
+          if (this->layer_param_.hdf5_data_param().shuffle()) {
+            std::random_shuffle(file_permutation_.begin(),
+                                file_permutation_.end());
+          }
+          DLOG(INFO) << "Looping around to first file.";
+        }
+        LoadHDF5FileData(
+            hdf_filenames_[file_permutation_[current_file_]].c_str());
+      }
+      current_row_ = 0;
+      if (this->layer_param_.hdf5_data_param().shuffle())
+        std::random_shuffle(data_permutation_.begin(), data_permutation_.end());
+    }
+    for (int j = 0; j < this->layer_param_.top_size(); ++j) {
+      int data_dim = top[j]->count() / top[j]->shape(0);
+      caffe_copy(data_dim,
+          &hdf_blobs_[j]->cpu_data()[data_permutation_[current_row_]
+            * data_dim], &top[j]->mutable_cpu_data()[i * data_dim]);
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU_FORWARD(HDF5DataLayer, Forward);
+#endif
+
+INSTANTIATE_CLASS(HDF5DataLayer);
+REGISTER_LAYER_CLASS(HDF5Data);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/hdf5_data_layer.cu b/src/caffe/layers/hdf5_data_layer.cu
new file mode 100644
index 0000000..5e3e4ce
--- /dev/null
+++ b/src/caffe/layers/hdf5_data_layer.cu
@@ -0,0 +1,53 @@
+/*
+TODO:
+- only load parts of the file, in accordance with a prototxt param "max_mem"
+*/
+
+#include <stdint.h>
+#include <string>
+#include <vector>
+
+#include "hdf5.h"
+#include "hdf5_hl.h"
+
+#include "caffe/data_layers.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void HDF5DataLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const int batch_size = this->layer_param_.hdf5_data_param().batch_size();
+  for (int i = 0; i < batch_size; ++i, ++current_row_) {
+    if (current_row_ == hdf_blobs_[0]->shape(0)) {
+      if (num_files_ > 1) {
+        current_file_ += 1;
+        if (current_file_ == num_files_) {
+          current_file_ = 0;
+          if (this->layer_param_.hdf5_data_param().shuffle()) {
+            std::random_shuffle(file_permutation_.begin(),
+                                file_permutation_.end());
+          }
+          DLOG(INFO) << "Looping around to first file.";
+        }
+        LoadHDF5FileData(
+            hdf_filenames_[file_permutation_[current_file_]].c_str());
+      }
+      current_row_ = 0;
+      if (this->layer_param_.hdf5_data_param().shuffle())
+        std::random_shuffle(data_permutation_.begin(), data_permutation_.end());
+    }
+    for (int j = 0; j < this->layer_param_.top_size(); ++j) {
+      int data_dim = top[j]->count() / top[j]->shape(0);
+      caffe_copy(data_dim,
+          &hdf_blobs_[j]->cpu_data()[data_permutation_[current_row_]
+            * data_dim], &top[j]->mutable_gpu_data()[i * data_dim]);
+    }
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(HDF5DataLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/hdf5_output_layer.cpp b/src/caffe/layers/hdf5_output_layer.cpp
new file mode 100644
index 0000000..f63375c
--- /dev/null
+++ b/src/caffe/layers/hdf5_output_layer.cpp
@@ -0,0 +1,77 @@
+#include <vector>
+
+#include "hdf5.h"
+#include "hdf5_hl.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void HDF5OutputLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  file_name_ = this->layer_param_.hdf5_output_param().file_name();
+  file_id_ = H5Fcreate(file_name_.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT,
+                       H5P_DEFAULT);
+  CHECK_GE(file_id_, 0) << "Failed to open HDF5 file" << file_name_;
+  file_opened_ = true;
+}
+
+template <typename Dtype>
+HDF5OutputLayer<Dtype>::~HDF5OutputLayer<Dtype>() {
+  if (file_opened_) {
+    herr_t status = H5Fclose(file_id_);
+    CHECK_GE(status, 0) << "Failed to close HDF5 file " << file_name_;
+  }
+}
+
+template <typename Dtype>
+void HDF5OutputLayer<Dtype>::SaveBlobs() {
+  // TODO: no limit on the number of blobs
+  LOG(INFO) << "Saving HDF5 file " << file_name_;
+  CHECK_EQ(data_blob_.num(), label_blob_.num()) <<
+      "data blob and label blob must have the same batch size";
+  hdf5_save_nd_dataset(file_id_, HDF5_DATA_DATASET_NAME, data_blob_);
+  hdf5_save_nd_dataset(file_id_, HDF5_DATA_LABEL_NAME, label_blob_);
+  LOG(INFO) << "Successfully saved " << data_blob_.num() << " rows";
+}
+
+template <typename Dtype>
+void HDF5OutputLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  CHECK_GE(bottom.size(), 2);
+  CHECK_EQ(bottom[0]->num(), bottom[1]->num());
+  data_blob_.Reshape(bottom[0]->num(), bottom[0]->channels(),
+                     bottom[0]->height(), bottom[0]->width());
+  label_blob_.Reshape(bottom[1]->num(), bottom[1]->channels(),
+                     bottom[1]->height(), bottom[1]->width());
+  const int data_datum_dim = bottom[0]->count() / bottom[0]->num();
+  const int label_datum_dim = bottom[1]->count() / bottom[1]->num();
+
+  for (int i = 0; i < bottom[0]->num(); ++i) {
+    caffe_copy(data_datum_dim, &bottom[0]->cpu_data()[i * data_datum_dim],
+        &data_blob_.mutable_cpu_data()[i * data_datum_dim]);
+    caffe_copy(label_datum_dim, &bottom[1]->cpu_data()[i * label_datum_dim],
+        &label_blob_.mutable_cpu_data()[i * label_datum_dim]);
+  }
+  SaveBlobs();
+}
+
+template <typename Dtype>
+void HDF5OutputLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  return;
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(HDF5OutputLayer);
+#endif
+
+INSTANTIATE_CLASS(HDF5OutputLayer);
+REGISTER_LAYER_CLASS(HDF5Output);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/hdf5_output_layer.cu b/src/caffe/layers/hdf5_output_layer.cu
new file mode 100644
index 0000000..ae497c3
--- /dev/null
+++ b/src/caffe/layers/hdf5_output_layer.cu
@@ -0,0 +1,43 @@
+#include <vector>
+
+#include "hdf5.h"
+#include "hdf5_hl.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void HDF5OutputLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  CHECK_GE(bottom.size(), 2);
+  CHECK_EQ(bottom[0]->num(), bottom[1]->num());
+  data_blob_.Reshape(bottom[0]->num(), bottom[0]->channels(),
+                     bottom[0]->height(), bottom[0]->width());
+  label_blob_.Reshape(bottom[1]->num(), bottom[1]->channels(),
+                     bottom[1]->height(), bottom[1]->width());
+  const int data_datum_dim = bottom[0]->count() / bottom[0]->num();
+  const int label_datum_dim = bottom[1]->count() / bottom[1]->num();
+
+  for (int i = 0; i < bottom[0]->num(); ++i) {
+    caffe_copy(data_datum_dim, &bottom[0]->gpu_data()[i * data_datum_dim],
+        &data_blob_.mutable_cpu_data()[i * data_datum_dim]);
+    caffe_copy(label_datum_dim, &bottom[1]->gpu_data()[i * label_datum_dim],
+        &label_blob_.mutable_cpu_data()[i * label_datum_dim]);
+  }
+  SaveBlobs();
+}
+
+template <typename Dtype>
+void HDF5OutputLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  return;
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(HDF5OutputLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/hinge_loss_layer.cpp b/src/caffe/layers/hinge_loss_layer.cpp
new file mode 100644
index 0000000..a2fb2a1
--- /dev/null
+++ b/src/caffe/layers/hinge_loss_layer.cpp
@@ -0,0 +1,82 @@
+#include <algorithm>
+#include <cfloat>
+#include <cmath>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void HingeLossLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+  const Dtype* label = bottom[1]->cpu_data();
+  int num = bottom[0]->num();
+  int count = bottom[0]->count();
+  int dim = count / num;
+
+  caffe_copy(count, bottom_data, bottom_diff);
+  for (int i = 0; i < num; ++i) {
+    bottom_diff[i * dim + static_cast<int>(label[i])] *= -1;
+  }
+  for (int i = 0; i < num; ++i) {
+    for (int j = 0; j < dim; ++j) {
+      bottom_diff[i * dim + j] = std::max(
+        Dtype(0), 1 + bottom_diff[i * dim + j]);
+    }
+  }
+  Dtype* loss = top[0]->mutable_cpu_data();
+  switch (this->layer_param_.hinge_loss_param().norm()) {
+  case HingeLossParameter_Norm_L1:
+    loss[0] = caffe_cpu_asum(count, bottom_diff) / num;
+    break;
+  case HingeLossParameter_Norm_L2:
+    loss[0] = caffe_cpu_dot(count, bottom_diff, bottom_diff) / num;
+    break;
+  default:
+    LOG(FATAL) << "Unknown Norm";
+  }
+}
+
+template <typename Dtype>
+void HingeLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[1]) {
+    LOG(FATAL) << this->type()
+               << " Layer cannot backpropagate to label inputs.";
+  }
+  if (propagate_down[0]) {
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    const Dtype* label = bottom[1]->cpu_data();
+    int num = bottom[0]->num();
+    int count = bottom[0]->count();
+    int dim = count / num;
+
+    for (int i = 0; i < num; ++i) {
+      bottom_diff[i * dim + static_cast<int>(label[i])] *= -1;
+    }
+
+    const Dtype loss_weight = top[0]->cpu_diff()[0];
+    switch (this->layer_param_.hinge_loss_param().norm()) {
+    case HingeLossParameter_Norm_L1:
+      caffe_cpu_sign(count, bottom_diff, bottom_diff);
+      caffe_scal(count, loss_weight / num, bottom_diff);
+      break;
+    case HingeLossParameter_Norm_L2:
+      caffe_scal(count, loss_weight * 2 / num, bottom_diff);
+      break;
+    default:
+      LOG(FATAL) << "Unknown Norm";
+    }
+  }
+}
+
+INSTANTIATE_CLASS(HingeLossLayer);
+REGISTER_LAYER_CLASS(HingeLoss);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/im2col_layer.cpp b/src/caffe/layers/im2col_layer.cpp
new file mode 100644
index 0000000..1c80271
--- /dev/null
+++ b/src/caffe/layers/im2col_layer.cpp
@@ -0,0 +1,95 @@
+#include <vector>
+
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/im2col.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void Im2colLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  ConvolutionParameter conv_param = this->layer_param_.convolution_param();
+  CHECK(!conv_param.has_kernel_size() !=
+      !(conv_param.has_kernel_h() && conv_param.has_kernel_w()))
+      << "Filter size is kernel_size OR kernel_h and kernel_w; not both";
+  CHECK(conv_param.has_kernel_size() ||
+      (conv_param.has_kernel_h() && conv_param.has_kernel_w()))
+      << "For non-square filters both kernel_h and kernel_w are required.";
+  CHECK((!conv_param.has_pad() && conv_param.has_pad_h()
+      && conv_param.has_pad_w())
+      || (!conv_param.has_pad_h() && !conv_param.has_pad_w()))
+      << "pad is pad OR pad_h and pad_w are required.";
+  CHECK((!conv_param.has_stride() && conv_param.has_stride_h()
+      && conv_param.has_stride_w())
+      || (!conv_param.has_stride_h() && !conv_param.has_stride_w()))
+      << "Stride is stride OR stride_h and stride_w are required.";
+  if (conv_param.has_kernel_size()) {
+    kernel_h_ = kernel_w_ = conv_param.kernel_size();
+  } else {
+    kernel_h_ = conv_param.kernel_h();
+    kernel_w_ = conv_param.kernel_w();
+  }
+  CHECK_GT(kernel_h_, 0) << "Filter dimensions cannot be zero.";
+  CHECK_GT(kernel_w_, 0) << "Filter dimensions cannot be zero.";
+  if (!conv_param.has_pad_h()) {
+    pad_h_ = pad_w_ = conv_param.pad();
+  } else {
+    pad_h_ = conv_param.pad_h();
+    pad_w_ = conv_param.pad_w();
+  }
+  if (!conv_param.has_stride_h()) {
+    stride_h_ = stride_w_ = conv_param.stride();
+  } else {
+    stride_h_ = conv_param.stride_h();
+    stride_w_ = conv_param.stride_w();
+  }
+}
+
+template <typename Dtype>
+void Im2colLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  CHECK_EQ(4, bottom[0]->num_axes()) << "Input must have 4 axes, "
+      << "corresponding to (num, channels, height, width)";
+  channels_ = bottom[0]->channels();
+  height_ = bottom[0]->height();
+  width_ = bottom[0]->width();
+  top[0]->Reshape(
+      bottom[0]->num(), channels_ * kernel_h_ * kernel_w_,
+      (height_ + 2 * pad_h_ - kernel_h_) / stride_h_ + 1,
+      (width_ + 2 * pad_w_ - kernel_w_) / stride_w_ + 1);
+}
+
+template <typename Dtype>
+void Im2colLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  for (int n = 0; n < bottom[0]->num(); ++n) {
+    im2col_cpu(bottom_data + bottom[0]->offset(n), channels_, height_,
+        width_, kernel_h_, kernel_w_, pad_h_, pad_w_,
+        stride_h_, stride_w_, top_data + top[0]->offset(n));
+  }
+}
+
+template <typename Dtype>
+void Im2colLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* top_diff = top[0]->cpu_diff();
+  Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+  for (int n = 0; n < top[0]->num(); ++n) {
+    col2im_cpu(top_diff + top[0]->offset(n), channels_, height_, width_,
+        kernel_h_, kernel_w_, pad_h_, pad_w_,
+        stride_h_, stride_w_, bottom_diff + bottom[0]->offset(n));
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(Im2colLayer);
+#endif
+
+INSTANTIATE_CLASS(Im2colLayer);
+REGISTER_LAYER_CLASS(Im2col);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/im2col_layer.cu b/src/caffe/layers/im2col_layer.cu
new file mode 100644
index 0000000..9c338b1
--- /dev/null
+++ b/src/caffe/layers/im2col_layer.cu
@@ -0,0 +1,37 @@
+#include <vector>
+
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/im2col.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void Im2colLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  for (int n = 0; n < bottom[0]->num(); ++n) {
+    im2col_gpu(bottom_data + bottom[0]->offset(n), channels_, height_,
+        width_, kernel_h_, kernel_w_, pad_h_, pad_w_,
+        stride_h_, stride_w_, top_data + top[0]->offset(n));
+  }
+}
+
+template <typename Dtype>
+void Im2colLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* top_diff = top[0]->gpu_diff();
+  Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+  for (int n = 0; n < top[0]->num(); ++n) {
+    col2im_gpu(top_diff + top[0]->offset(n), channels_, height_, width_,
+        kernel_h_, kernel_w_, pad_h_, pad_w_,
+        stride_h_, stride_w_, bottom_diff + bottom[0]->offset(n));
+  }
+}
+
+
+INSTANTIATE_LAYER_GPU_FUNCS(Im2colLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/image_data_layer.cpp b/src/caffe/layers/image_data_layer.cpp
new file mode 100644
index 0000000..18c035c
--- /dev/null
+++ b/src/caffe/layers/image_data_layer.cpp
@@ -0,0 +1,159 @@
+#include <opencv2/core/core.hpp>
+
+#include <fstream>  // NOLINT(readability/streams)
+#include <iostream>  // NOLINT(readability/streams)
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "caffe/data_layers.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/benchmark.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/util/rng.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+ImageDataLayer<Dtype>::~ImageDataLayer<Dtype>() {
+  this->JoinPrefetchThread();
+}
+
+template <typename Dtype>
+void ImageDataLayer<Dtype>::DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const int new_height = this->layer_param_.image_data_param().new_height();
+  const int new_width  = this->layer_param_.image_data_param().new_width();
+  const bool is_color  = this->layer_param_.image_data_param().is_color();
+  string root_folder = this->layer_param_.image_data_param().root_folder();
+
+  CHECK((new_height == 0 && new_width == 0) ||
+      (new_height > 0 && new_width > 0)) << "Current implementation requires "
+      "new_height and new_width to be set at the same time.";
+  // Read the file with filenames and labels
+  const string& source = this->layer_param_.image_data_param().source();
+  LOG(INFO) << "Opening file " << source;
+  std::ifstream infile(source.c_str());
+  string filename;
+  int label;
+  while (infile >> filename >> label) {
+    lines_.push_back(std::make_pair(filename, label));
+  }
+
+  if (this->layer_param_.image_data_param().shuffle()) {
+    // randomly shuffle data
+    LOG(INFO) << "Shuffling data";
+    const unsigned int prefetch_rng_seed = caffe_rng_rand();
+    prefetch_rng_.reset(new Caffe::RNG(prefetch_rng_seed));
+    ShuffleImages();
+  }
+  LOG(INFO) << "A total of " << lines_.size() << " images.";
+
+  lines_id_ = 0;
+  // Check if we would need to randomly skip a few data points
+  if (this->layer_param_.image_data_param().rand_skip()) {
+    unsigned int skip = caffe_rng_rand() %
+        this->layer_param_.image_data_param().rand_skip();
+    LOG(INFO) << "Skipping first " << skip << " data points.";
+    CHECK_GT(lines_.size(), skip) << "Not enough points to skip";
+    lines_id_ = skip;
+  }
+  // Read an image, and use it to initialize the top blob.
+  cv::Mat cv_img = ReadImageToCVMat(root_folder + lines_[lines_id_].first,
+                                    new_height, new_width, is_color);
+  // Use data_transformer to infer the expected blob shape from a cv_image.
+  vector<int> top_shape = this->data_transformer_->InferBlobShape(cv_img);
+  this->transformed_data_.Reshape(top_shape);
+  // Reshape prefetch_data and top[0] according to the batch_size.
+  const int batch_size = this->layer_param_.image_data_param().batch_size();
+  top_shape[0] = batch_size;
+  this->prefetch_data_.Reshape(top_shape);
+  top[0]->ReshapeLike(this->prefetch_data_);
+
+  LOG(INFO) << "output data size: " << top[0]->num() << ","
+      << top[0]->channels() << "," << top[0]->height() << ","
+      << top[0]->width();
+  // label
+  vector<int> label_shape(1, batch_size);
+  top[1]->Reshape(label_shape);
+  this->prefetch_label_.Reshape(label_shape);
+}
+
+template <typename Dtype>
+void ImageDataLayer<Dtype>::ShuffleImages() {
+  caffe::rng_t* prefetch_rng =
+      static_cast<caffe::rng_t*>(prefetch_rng_->generator());
+  shuffle(lines_.begin(), lines_.end(), prefetch_rng);
+}
+
+// This function is used to create a thread that prefetches the data.
+template <typename Dtype>
+void ImageDataLayer<Dtype>::InternalThreadEntry() {
+  CPUTimer batch_timer;
+  batch_timer.Start();
+  double read_time = 0;
+  double trans_time = 0;
+  CPUTimer timer;
+  CHECK(this->prefetch_data_.count());
+  CHECK(this->transformed_data_.count());
+  ImageDataParameter image_data_param = this->layer_param_.image_data_param();
+  const int batch_size = image_data_param.batch_size();
+  const int new_height = image_data_param.new_height();
+  const int new_width = image_data_param.new_width();
+  const bool is_color = image_data_param.is_color();
+  string root_folder = image_data_param.root_folder();
+
+  // Reshape according to the first image of each batch
+  // on single input batches allows for inputs of varying dimension.
+  cv::Mat cv_img = ReadImageToCVMat(root_folder + lines_[lines_id_].first,
+      new_height, new_width, is_color);
+  // Use data_transformer to infer the expected blob shape from a cv_img.
+  vector<int> top_shape = this->data_transformer_->InferBlobShape(cv_img);
+  this->transformed_data_.Reshape(top_shape);
+  // Reshape prefetch_data according to the batch_size.
+  top_shape[0] = batch_size;
+  this->prefetch_data_.Reshape(top_shape);
+
+  Dtype* prefetch_data = this->prefetch_data_.mutable_cpu_data();
+  Dtype* prefetch_label = this->prefetch_label_.mutable_cpu_data();
+
+  // datum scales
+  const int lines_size = lines_.size();
+  for (int item_id = 0; item_id < batch_size; ++item_id) {
+    // get a blob
+    timer.Start();
+    CHECK_GT(lines_size, lines_id_);
+    cv::Mat cv_img = ReadImageToCVMat(root_folder + lines_[lines_id_].first,
+        new_height, new_width, is_color);
+    CHECK(cv_img.data) << "Could not load " << lines_[lines_id_].first;
+    read_time += timer.MicroSeconds();
+    timer.Start();
+    // Apply transformations (mirror, crop...) to the image
+    int offset = this->prefetch_data_.offset(item_id);
+    this->transformed_data_.set_cpu_data(prefetch_data + offset);
+    this->data_transformer_->Transform(cv_img, &(this->transformed_data_));
+    trans_time += timer.MicroSeconds();
+
+    prefetch_label[item_id] = lines_[lines_id_].second;
+    // go to the next iter
+    lines_id_++;
+    if (lines_id_ >= lines_size) {
+      // We have reached the end. Restart from the first.
+      DLOG(INFO) << "Restarting data prefetching from start.";
+      lines_id_ = 0;
+      if (this->layer_param_.image_data_param().shuffle()) {
+        ShuffleImages();
+      }
+    }
+  }
+  batch_timer.Stop();
+  DLOG(INFO) << "Prefetch batch: " << batch_timer.MilliSeconds() << " ms.";
+  DLOG(INFO) << "     Read time: " << read_time / 1000 << " ms.";
+  DLOG(INFO) << "Transform time: " << trans_time / 1000 << " ms.";
+}
+
+INSTANTIATE_CLASS(ImageDataLayer);
+REGISTER_LAYER_CLASS(ImageData);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/infogain_loss_layer.cpp b/src/caffe/layers/infogain_loss_layer.cpp
new file mode 100644
index 0000000..a1e0b40
--- /dev/null
+++ b/src/caffe/layers/infogain_loss_layer.cpp
@@ -0,0 +1,110 @@
+#include <algorithm>
+#include <cfloat>
+#include <cmath>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void InfogainLossLayer<Dtype>::LayerSetUp(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  LossLayer<Dtype>::LayerSetUp(bottom, top);
+  if (bottom.size() < 3) {
+    CHECK(this->layer_param_.infogain_loss_param().has_source())
+        << "Infogain matrix source must be specified.";
+    BlobProto blob_proto;
+    ReadProtoFromBinaryFile(
+      this->layer_param_.infogain_loss_param().source(), &blob_proto);
+    infogain_.FromProto(blob_proto);
+  }
+}
+
+template <typename Dtype>
+void InfogainLossLayer<Dtype>::Reshape(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  LossLayer<Dtype>::Reshape(bottom, top);
+  Blob<Dtype>* infogain = NULL;
+  if (bottom.size() < 3) {
+    infogain = &infogain_;
+  } else {
+    infogain = bottom[2];
+  }
+  CHECK_EQ(bottom[1]->channels(), 1);
+  CHECK_EQ(bottom[1]->height(), 1);
+  CHECK_EQ(bottom[1]->width(), 1);
+  const int num = bottom[0]->num();
+  const int dim = bottom[0]->count() / num;
+  CHECK_EQ(infogain->num(), 1);
+  CHECK_EQ(infogain->channels(), 1);
+  CHECK_EQ(infogain->height(), dim);
+  CHECK_EQ(infogain->width(), dim);
+}
+
+
+template <typename Dtype>
+void InfogainLossLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  const Dtype* bottom_label = bottom[1]->cpu_data();
+  const Dtype* infogain_mat = NULL;
+  if (bottom.size() < 3) {
+    infogain_mat = infogain_.cpu_data();
+  } else {
+    infogain_mat = bottom[2]->cpu_data();
+  }
+  int num = bottom[0]->num();
+  int dim = bottom[0]->count() / bottom[0]->num();
+  Dtype loss = 0;
+  for (int i = 0; i < num; ++i) {
+    int label = static_cast<int>(bottom_label[i]);
+    for (int j = 0; j < dim; ++j) {
+      Dtype prob = std::max(bottom_data[i * dim + j], Dtype(kLOG_THRESHOLD));
+      loss -= infogain_mat[label * dim + j] * log(prob);
+    }
+  }
+  top[0]->mutable_cpu_data()[0] = loss / num;
+}
+
+template <typename Dtype>
+void InfogainLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[1]) {
+    LOG(FATAL) << this->type()
+               << " Layer cannot backpropagate to label inputs.";
+  }
+  if (propagate_down.size() > 2 && propagate_down[2]) {
+    LOG(FATAL) << this->type()
+               << " Layer cannot backpropagate to infogain inputs.";
+  }
+  if (propagate_down[0]) {
+    const Dtype* bottom_data = bottom[0]->cpu_data();
+    const Dtype* bottom_label = bottom[1]->cpu_data();
+    const Dtype* infogain_mat = NULL;
+    if (bottom.size() < 3) {
+      infogain_mat = infogain_.cpu_data();
+    } else {
+      infogain_mat = bottom[2]->cpu_data();
+    }
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    int num = bottom[0]->num();
+    int dim = bottom[0]->count() / bottom[0]->num();
+    const Dtype scale = - top[0]->cpu_diff()[0] / num;
+    for (int i = 0; i < num; ++i) {
+      const int label = static_cast<int>(bottom_label[i]);
+      for (int j = 0; j < dim; ++j) {
+        Dtype prob = std::max(bottom_data[i * dim + j], Dtype(kLOG_THRESHOLD));
+        bottom_diff[i * dim + j] = scale * infogain_mat[label * dim + j] / prob;
+      }
+    }
+  }
+}
+
+INSTANTIATE_CLASS(InfogainLossLayer);
+REGISTER_LAYER_CLASS(InfogainLoss);
+}  // namespace caffe
diff --git a/src/caffe/layers/inner_product_layer.cpp b/src/caffe/layers/inner_product_layer.cpp
new file mode 100644
index 0000000..83c3235
--- /dev/null
+++ b/src/caffe/layers/inner_product_layer.cpp
@@ -0,0 +1,129 @@
+#include <vector>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void InnerProductLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const int num_output = this->layer_param_.inner_product_param().num_output();
+  bias_term_ = this->layer_param_.inner_product_param().bias_term();
+  N_ = num_output;
+  const int axis = bottom[0]->CanonicalAxisIndex(
+      this->layer_param_.inner_product_param().axis());
+  // Dimensions starting from "axis" are "flattened" into a single
+  // length K_ vector. For example, if bottom[0]'s shape is (N, C, H, W),
+  // and axis == 1, N inner products with dimension CHW are performed.
+  K_ = bottom[0]->count(axis);
+  // Check if we need to set up the weights
+  if (this->blobs_.size() > 0) {
+    LOG(INFO) << "Skipping parameter initialization";
+  } else {
+    if (bias_term_) {
+      this->blobs_.resize(2);
+    } else {
+      this->blobs_.resize(1);
+    }
+    // Intialize the weight
+    vector<int> weight_shape(2);
+    weight_shape[0] = N_;
+    weight_shape[1] = K_;
+    this->blobs_[0].reset(new Blob<Dtype>(weight_shape));
+    // fill the weights
+    shared_ptr<Filler<Dtype> > weight_filler(GetFiller<Dtype>(
+        this->layer_param_.inner_product_param().weight_filler()));
+    weight_filler->Fill(this->blobs_[0].get());
+    // If necessary, intiialize and fill the bias term
+    if (bias_term_) {
+      vector<int> bias_shape(1, N_);
+      this->blobs_[1].reset(new Blob<Dtype>(bias_shape));
+      shared_ptr<Filler<Dtype> > bias_filler(GetFiller<Dtype>(
+          this->layer_param_.inner_product_param().bias_filler()));
+      bias_filler->Fill(this->blobs_[1].get());
+    }
+  }  // parameter initialization
+  this->param_propagate_down_.resize(this->blobs_.size(), true);
+}
+
+template <typename Dtype>
+void InnerProductLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  // Figure out the dimensions
+  const int axis = bottom[0]->CanonicalAxisIndex(
+      this->layer_param_.inner_product_param().axis());
+  const int new_K = bottom[0]->count(axis);
+  CHECK_EQ(K_, new_K)
+      << "Input size incompatible with inner product parameters.";
+  // The first "axis" dimensions are independent inner products; the total
+  // number of these is M_, the product over these dimensions.
+  M_ = bottom[0]->count(0, axis);
+  // The top shape will be the bottom shape with the flattened axes dropped,
+  // and replaced by a single axis with dimension num_output (N_).
+  vector<int> top_shape = bottom[0]->shape();
+  top_shape.resize(axis + 1);
+  top_shape[axis] = N_;
+  top[0]->Reshape(top_shape);
+  // Set up the bias multiplier
+  if (bias_term_) {
+    vector<int> bias_shape(1, M_);
+    bias_multiplier_.Reshape(bias_shape);
+    caffe_set(M_, Dtype(1), bias_multiplier_.mutable_cpu_data());
+  }
+}
+
+template <typename Dtype>
+void InnerProductLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  const Dtype* weight = this->blobs_[0]->cpu_data();
+  caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasTrans, M_, N_, K_, (Dtype)1.,
+      bottom_data, weight, (Dtype)0., top_data);
+  if (bias_term_) {
+    caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, M_, N_, 1, (Dtype)1.,
+        bias_multiplier_.cpu_data(),
+        this->blobs_[1]->cpu_data(), (Dtype)1., top_data);
+  }
+}
+
+template <typename Dtype>
+void InnerProductLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (this->param_propagate_down_[0]) {
+    const Dtype* top_diff = top[0]->cpu_diff();
+    const Dtype* bottom_data = bottom[0]->cpu_data();
+    // Gradient with respect to weight
+    caffe_cpu_gemm<Dtype>(CblasTrans, CblasNoTrans, N_, K_, M_, (Dtype)1.,
+        top_diff, bottom_data, (Dtype)1., this->blobs_[0]->mutable_cpu_diff());
+  }
+  if (bias_term_ && this->param_propagate_down_[1]) {
+    const Dtype* top_diff = top[0]->cpu_diff();
+    // Gradient with respect to bias
+    caffe_cpu_gemv<Dtype>(CblasTrans, M_, N_, (Dtype)1., top_diff,
+        bias_multiplier_.cpu_data(), (Dtype)1.,
+        this->blobs_[1]->mutable_cpu_diff());
+  }
+  if (propagate_down[0]) {
+    const Dtype* top_diff = top[0]->cpu_diff();
+    // Gradient with respect to bottom data
+    caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, M_, K_, N_, (Dtype)1.,
+        top_diff, this->blobs_[0]->cpu_data(), (Dtype)0.,
+        bottom[0]->mutable_cpu_diff());
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(InnerProductLayer);
+#endif
+
+INSTANTIATE_CLASS(InnerProductLayer);
+REGISTER_LAYER_CLASS(InnerProduct);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/inner_product_layer.cu b/src/caffe/layers/inner_product_layer.cu
new file mode 100644
index 0000000..dd90cac
--- /dev/null
+++ b/src/caffe/layers/inner_product_layer.cu
@@ -0,0 +1,56 @@
+#include <vector>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void InnerProductLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  const Dtype* weight = this->blobs_[0]->gpu_data();
+  caffe_gpu_gemm<Dtype>(CblasNoTrans, CblasTrans, M_, N_, K_, (Dtype)1.,
+      bottom_data, weight, (Dtype)0., top_data);
+  if (bias_term_) {
+    caffe_gpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, M_, N_, 1, (Dtype)1.,
+        bias_multiplier_.gpu_data(),
+        this->blobs_[1]->gpu_data(), (Dtype)1., top_data);
+  }
+}
+
+template <typename Dtype>
+void InnerProductLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (this->param_propagate_down_[0]) {
+    const Dtype* top_diff = top[0]->gpu_diff();
+    const Dtype* bottom_data = bottom[0]->gpu_data();
+    // Gradient with respect to weight
+    caffe_gpu_gemm<Dtype>(CblasTrans, CblasNoTrans, N_, K_, M_, (Dtype)1.,
+        top_diff, bottom_data, (Dtype)1., this->blobs_[0]->mutable_gpu_diff());
+  }
+  if (bias_term_ && this->param_propagate_down_[1]) {
+    const Dtype* top_diff = top[0]->gpu_diff();
+    // Gradient with respect to bias
+    caffe_gpu_gemv<Dtype>(CblasTrans, M_, N_, (Dtype)1., top_diff,
+        bias_multiplier_.gpu_data(), (Dtype)1.,
+        this->blobs_[1]->mutable_gpu_diff());
+  }
+  if (propagate_down[0]) {
+    const Dtype* top_diff = top[0]->gpu_diff();
+    // Gradient with respect to bottom data
+    caffe_gpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, M_, K_, N_, (Dtype)1.,
+        top_diff, this->blobs_[0]->gpu_data(), (Dtype)0.,
+        bottom[0]->mutable_gpu_diff());
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(InnerProductLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/log_layer.cpp b/src/caffe/layers/log_layer.cpp
new file mode 100644
index 0000000..55a227f
--- /dev/null
+++ b/src/caffe/layers/log_layer.cpp
@@ -0,0 +1,87 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/neuron_layers.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void LogLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  NeuronLayer<Dtype>::LayerSetUp(bottom, top);
+  const Dtype base = this->layer_param_.log_param().base();
+  if (base != Dtype(-1)) {
+    CHECK_GT(base, 0) << "base must be strictly positive.";
+  }
+  // If base == -1, interpret the base as e and set log_base = 1 exactly.
+  // Otherwise, calculate its log explicitly.
+  const Dtype log_base = (base == Dtype(-1)) ? Dtype(1) : log(base);
+  CHECK(!isnan(log_base))
+      << "NaN result: log(base) = log(" << base << ") = " << log_base;
+  CHECK(!isinf(log_base))
+      << "Inf result: log(base) = log(" << base << ") = " << log_base;
+  base_scale_ = Dtype(1) / log_base;
+  CHECK(!isnan(base_scale_))
+      << "NaN result: 1/log(base) = 1/log(" << base << ") = " << base_scale_;
+  CHECK(!isinf(base_scale_))
+      << "Inf result: 1/log(base) = 1/log(" << base << ") = " << base_scale_;
+  input_scale_ = this->layer_param_.log_param().scale();
+  input_shift_ = this->layer_param_.log_param().shift();
+  backward_num_scale_ = input_scale_ / log_base;
+}
+
+template <typename Dtype>
+void LogLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const int count = bottom[0]->count();
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  if (input_scale_ == Dtype(1) && input_shift_ == Dtype(0)) {
+    caffe_log(count, bottom_data, top_data);
+  } else {
+    caffe_copy(count, bottom_data, top_data);
+    if (input_scale_ != Dtype(1)) {
+      caffe_scal(count, input_scale_, top_data);
+    }
+    if (input_shift_ != Dtype(0)) {
+      caffe_add_scalar(count, input_shift_, top_data);
+    }
+    caffe_log(count, top_data, top_data);
+  }
+  if (base_scale_ != Dtype(1)) {
+    caffe_scal(count, base_scale_, top_data);
+  }
+}
+
+template <typename Dtype>
+void LogLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) { return; }
+  const int count = bottom[0]->count();
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  const Dtype* top_diff = top[0]->cpu_diff();
+  Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+  caffe_copy(count, bottom_data, bottom_diff);
+  if (input_scale_ != Dtype(1)) {
+    caffe_scal(count, input_scale_, bottom_diff);
+  }
+  if (input_shift_ != Dtype(0)) {
+    caffe_add_scalar(count, input_shift_, bottom_diff);
+  }
+  caffe_powx(count, bottom_diff, Dtype(-1), bottom_diff);
+  if (backward_num_scale_ != Dtype(1)) {
+    caffe_scal(count, backward_num_scale_, bottom_diff);
+  }
+  caffe_mul(count, top_diff, bottom_diff, bottom_diff);
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(LogLayer);
+#endif
+
+INSTANTIATE_CLASS(LogLayer);
+REGISTER_LAYER_CLASS(Log);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/log_layer.cu b/src/caffe/layers/log_layer.cu
new file mode 100644
index 0000000..847c86c
--- /dev/null
+++ b/src/caffe/layers/log_layer.cu
@@ -0,0 +1,57 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/neuron_layers.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void LogLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const int count = bottom[0]->count();
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  if (input_scale_ == Dtype(1) && input_shift_ == Dtype(0)) {
+    caffe_gpu_log(count, bottom_data, top_data);
+  } else {
+    caffe_copy(count, bottom_data, top_data);
+    if (input_scale_ != Dtype(1)) {
+      caffe_gpu_scal(count, input_scale_, top_data);
+    }
+    if (input_shift_ != Dtype(0)) {
+      caffe_gpu_add_scalar(count, input_shift_, top_data);
+    }
+    caffe_gpu_log(count, top_data, top_data);
+  }
+  if (base_scale_ != Dtype(1)) {
+    caffe_gpu_scal(count, base_scale_, top_data);
+  }
+}
+
+template <typename Dtype>
+void LogLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) { return; }
+    const int count = bottom[0]->count();
+    const Dtype* bottom_data = bottom[0]->gpu_data();
+    const Dtype* top_diff = top[0]->gpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+    caffe_copy(count, bottom_data, bottom_diff);
+    if (input_scale_ != Dtype(1)) {
+      caffe_gpu_scal(count, input_scale_, bottom_diff);
+    }
+    if (input_shift_ != Dtype(0)) {
+      caffe_gpu_add_scalar(count, input_shift_, bottom_diff);
+    }
+    caffe_gpu_powx(count, bottom_diff, Dtype(-1), bottom_diff);
+    if (backward_num_scale_ != Dtype(1)) {
+      caffe_gpu_scal(count, backward_num_scale_, bottom_diff);
+    }
+    caffe_gpu_mul(count, top_diff, bottom_diff, bottom_diff);
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(LogLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/loss_layer.cpp b/src/caffe/layers/loss_layer.cpp
new file mode 100644
index 0000000..3496a5c
--- /dev/null
+++ b/src/caffe/layers/loss_layer.cpp
@@ -0,0 +1,33 @@
+#include <algorithm>
+#include <cfloat>
+#include <cmath>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void LossLayer<Dtype>::LayerSetUp(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  // LossLayers have a non-zero (1) loss by default.
+  if (this->layer_param_.loss_weight_size() == 0) {
+    this->layer_param_.add_loss_weight(Dtype(1));
+  }
+}
+
+template <typename Dtype>
+void LossLayer<Dtype>::Reshape(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  CHECK_EQ(bottom[0]->num(), bottom[1]->num())
+      << "The data and label should have the same number.";
+  vector<int> loss_shape(0);  // Loss layers output a scalar; 0 axes.
+  top[0]->Reshape(loss_shape);
+}
+
+INSTANTIATE_CLASS(LossLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/lrn_layer.cpp b/src/caffe/layers/lrn_layer.cpp
new file mode 100644
index 0000000..36c1ace
--- /dev/null
+++ b/src/caffe/layers/lrn_layer.cpp
@@ -0,0 +1,259 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void LRNLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  size_ = this->layer_param_.lrn_param().local_size();
+  CHECK_EQ(size_ % 2, 1) << "LRN only supports odd values for local_size";
+  pre_pad_ = (size_ - 1) / 2;
+  alpha_ = this->layer_param_.lrn_param().alpha();
+  beta_ = this->layer_param_.lrn_param().beta();
+  k_ = this->layer_param_.lrn_param().k();
+  if (this->layer_param_.lrn_param().norm_region() ==
+      LRNParameter_NormRegion_WITHIN_CHANNEL) {
+    // Set up split_layer_ to use inputs in the numerator and denominator.
+    split_top_vec_.clear();
+    split_top_vec_.push_back(&product_input_);
+    split_top_vec_.push_back(&square_input_);
+    LayerParameter split_param;
+    split_layer_.reset(new SplitLayer<Dtype>(split_param));
+    split_layer_->SetUp(bottom, split_top_vec_);
+    // Set up square_layer_ to square the inputs.
+    square_bottom_vec_.clear();
+    square_top_vec_.clear();
+    square_bottom_vec_.push_back(&square_input_);
+    square_top_vec_.push_back(&square_output_);
+    LayerParameter square_param;
+    square_param.mutable_power_param()->set_power(Dtype(2));
+    square_layer_.reset(new PowerLayer<Dtype>(square_param));
+    square_layer_->SetUp(square_bottom_vec_, square_top_vec_);
+    // Set up pool_layer_ to sum over square neighborhoods of the input.
+    pool_top_vec_.clear();
+    pool_top_vec_.push_back(&pool_output_);
+    LayerParameter pool_param;
+    pool_param.mutable_pooling_param()->set_pool(
+        PoolingParameter_PoolMethod_AVE);
+    pool_param.mutable_pooling_param()->set_pad(pre_pad_);
+    pool_param.mutable_pooling_param()->set_kernel_size(size_);
+    pool_layer_.reset(new PoolingLayer<Dtype>(pool_param));
+    pool_layer_->SetUp(square_top_vec_, pool_top_vec_);
+    // Set up power_layer_ to compute (1 + alpha_/N^2 s)^-beta_, where s is
+    // the sum of a squared neighborhood (the output of pool_layer_).
+    power_top_vec_.clear();
+    power_top_vec_.push_back(&power_output_);
+    LayerParameter power_param;
+    power_param.mutable_power_param()->set_power(-beta_);
+    power_param.mutable_power_param()->set_scale(alpha_);
+    power_param.mutable_power_param()->set_shift(Dtype(1));
+    power_layer_.reset(new PowerLayer<Dtype>(power_param));
+    power_layer_->SetUp(pool_top_vec_, power_top_vec_);
+    // Set up a product_layer_ to compute outputs by multiplying inputs by the
+    // inverse demoninator computed by the power layer.
+    product_bottom_vec_.clear();
+    product_bottom_vec_.push_back(&product_input_);
+    product_bottom_vec_.push_back(&power_output_);
+    LayerParameter product_param;
+    EltwiseParameter* eltwise_param = product_param.mutable_eltwise_param();
+    eltwise_param->set_operation(EltwiseParameter_EltwiseOp_PROD);
+    product_layer_.reset(new EltwiseLayer<Dtype>(product_param));
+    product_layer_->SetUp(product_bottom_vec_, top);
+  }
+}
+
+template <typename Dtype>
+void LRNLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  CHECK_EQ(4, bottom[0]->num_axes()) << "Input must have 4 axes, "
+      << "corresponding to (num, channels, height, width)";
+  num_ = bottom[0]->num();
+  channels_ = bottom[0]->channels();
+  height_ = bottom[0]->height();
+  width_ = bottom[0]->width();
+  switch (this->layer_param_.lrn_param().norm_region()) {
+  case LRNParameter_NormRegion_ACROSS_CHANNELS:
+    top[0]->Reshape(num_, channels_, height_, width_);
+    scale_.Reshape(num_, channels_, height_, width_);
+    break;
+  case LRNParameter_NormRegion_WITHIN_CHANNEL:
+    split_layer_->Reshape(bottom, split_top_vec_);
+    square_layer_->Reshape(square_bottom_vec_, square_top_vec_);
+    pool_layer_->Reshape(square_top_vec_, pool_top_vec_);
+    power_layer_->Reshape(pool_top_vec_, power_top_vec_);
+    product_layer_->Reshape(product_bottom_vec_, top);
+    break;
+  }
+}
+
+template <typename Dtype>
+void LRNLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  switch (this->layer_param_.lrn_param().norm_region()) {
+  case LRNParameter_NormRegion_ACROSS_CHANNELS:
+    CrossChannelForward_cpu(bottom, top);
+    break;
+  case LRNParameter_NormRegion_WITHIN_CHANNEL:
+    WithinChannelForward(bottom, top);
+    break;
+  default:
+    LOG(FATAL) << "Unknown normalization region.";
+  }
+}
+
+template <typename Dtype>
+void LRNLayer<Dtype>::CrossChannelForward_cpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  Dtype* scale_data = scale_.mutable_cpu_data();
+  // start with the constant value
+  for (int i = 0; i < scale_.count(); ++i) {
+    scale_data[i] = k_;
+  }
+  Blob<Dtype> padded_square(1, channels_ + size_ - 1, height_, width_);
+  Dtype* padded_square_data = padded_square.mutable_cpu_data();
+  caffe_set(padded_square.count(), Dtype(0), padded_square_data);
+  Dtype alpha_over_size = alpha_ / size_;
+  // go through the images
+  for (int n = 0; n < num_; ++n) {
+    // compute the padded square
+    caffe_sqr(channels_ * height_ * width_,
+        bottom_data + bottom[0]->offset(n),
+        padded_square_data + padded_square.offset(0, pre_pad_));
+    // Create the first channel scale
+    for (int c = 0; c < size_; ++c) {
+      caffe_axpy<Dtype>(height_ * width_, alpha_over_size,
+          padded_square_data + padded_square.offset(0, c),
+          scale_data + scale_.offset(n, 0));
+    }
+    for (int c = 1; c < channels_; ++c) {
+      // copy previous scale
+      caffe_copy<Dtype>(height_ * width_,
+          scale_data + scale_.offset(n, c - 1),
+          scale_data + scale_.offset(n, c));
+      // add head
+      caffe_axpy<Dtype>(height_ * width_, alpha_over_size,
+          padded_square_data + padded_square.offset(0, c + size_ - 1),
+          scale_data + scale_.offset(n, c));
+      // subtract tail
+      caffe_axpy<Dtype>(height_ * width_, -alpha_over_size,
+          padded_square_data + padded_square.offset(0, c - 1),
+          scale_data + scale_.offset(n, c));
+    }
+  }
+
+  // In the end, compute output
+  caffe_powx<Dtype>(scale_.count(), scale_data, -beta_, top_data);
+  caffe_mul<Dtype>(scale_.count(), top_data, bottom_data, top_data);
+}
+
+template <typename Dtype>
+void LRNLayer<Dtype>::WithinChannelForward(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  split_layer_->Forward(bottom, split_top_vec_);
+  square_layer_->Forward(square_bottom_vec_, square_top_vec_);
+  pool_layer_->Forward(square_top_vec_, pool_top_vec_);
+  power_layer_->Forward(pool_top_vec_, power_top_vec_);
+  product_layer_->Forward(product_bottom_vec_, top);
+}
+
+template <typename Dtype>
+void LRNLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  switch (this->layer_param_.lrn_param().norm_region()) {
+  case LRNParameter_NormRegion_ACROSS_CHANNELS:
+    CrossChannelBackward_cpu(top, propagate_down, bottom);
+    break;
+  case LRNParameter_NormRegion_WITHIN_CHANNEL:
+    WithinChannelBackward(top, propagate_down, bottom);
+    break;
+  default:
+    LOG(FATAL) << "Unknown normalization region.";
+  }
+}
+
+template <typename Dtype>
+void LRNLayer<Dtype>::CrossChannelBackward_cpu(
+    const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* top_diff = top[0]->cpu_diff();
+  const Dtype* top_data = top[0]->cpu_data();
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  const Dtype* scale_data = scale_.cpu_data();
+  Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+  Blob<Dtype> padded_ratio(1, channels_ + size_ - 1, height_, width_);
+  Blob<Dtype> accum_ratio(1, 1, height_, width_);
+  Dtype* padded_ratio_data = padded_ratio.mutable_cpu_data();
+  Dtype* accum_ratio_data = accum_ratio.mutable_cpu_data();
+  // We hack a little bit by using the diff() to store an additional result
+  Dtype* accum_ratio_times_bottom = accum_ratio.mutable_cpu_diff();
+  caffe_set(padded_ratio.count(), Dtype(0), padded_ratio_data);
+  Dtype cache_ratio_value = 2. * alpha_ * beta_ / size_;
+
+  caffe_powx<Dtype>(scale_.count(), scale_data, -beta_, bottom_diff);
+  caffe_mul<Dtype>(scale_.count(), top_diff, bottom_diff, bottom_diff);
+
+  // go through individual data
+  int inverse_pre_pad = size_ - (size_ + 1) / 2;
+  for (int n = 0; n < num_; ++n) {
+    int block_offset = scale_.offset(n);
+    // first, compute diff_i * y_i / s_i
+    caffe_mul<Dtype>(channels_ * height_ * width_,
+        top_diff + block_offset, top_data + block_offset,
+        padded_ratio_data + padded_ratio.offset(0, inverse_pre_pad));
+    caffe_div<Dtype>(channels_ * height_ * width_,
+        padded_ratio_data + padded_ratio.offset(0, inverse_pre_pad),
+        scale_data + block_offset,
+        padded_ratio_data + padded_ratio.offset(0, inverse_pre_pad));
+    // Now, compute the accumulated ratios and the bottom diff
+    caffe_set(accum_ratio.count(), Dtype(0), accum_ratio_data);
+    for (int c = 0; c < size_ - 1; ++c) {
+      caffe_axpy<Dtype>(height_ * width_, 1.,
+          padded_ratio_data + padded_ratio.offset(0, c), accum_ratio_data);
+    }
+    for (int c = 0; c < channels_; ++c) {
+      caffe_axpy<Dtype>(height_ * width_, 1.,
+          padded_ratio_data + padded_ratio.offset(0, c + size_ - 1),
+          accum_ratio_data);
+      // compute bottom diff
+      caffe_mul<Dtype>(height_ * width_,
+          bottom_data + top[0]->offset(n, c),
+          accum_ratio_data, accum_ratio_times_bottom);
+      caffe_axpy<Dtype>(height_ * width_, -cache_ratio_value,
+          accum_ratio_times_bottom, bottom_diff + top[0]->offset(n, c));
+      caffe_axpy<Dtype>(height_ * width_, -1.,
+          padded_ratio_data + padded_ratio.offset(0, c), accum_ratio_data);
+    }
+  }
+}
+
+template <typename Dtype>
+void LRNLayer<Dtype>::WithinChannelBackward(
+    const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    vector<bool> product_propagate_down(2, true);
+    product_layer_->Backward(top, product_propagate_down, product_bottom_vec_);
+    power_layer_->Backward(power_top_vec_, propagate_down, pool_top_vec_);
+    pool_layer_->Backward(pool_top_vec_, propagate_down, square_top_vec_);
+    square_layer_->Backward(square_top_vec_, propagate_down,
+                            square_bottom_vec_);
+    split_layer_->Backward(split_top_vec_, propagate_down, bottom);
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(LRNLayer);
+STUB_GPU_FORWARD(LRNLayer, CrossChannelForward);
+STUB_GPU_BACKWARD(LRNLayer, CrossChannelBackward);
+#endif
+
+INSTANTIATE_CLASS(LRNLayer);
+REGISTER_LAYER_CLASS(LRN);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/lrn_layer.cu b/src/caffe/layers/lrn_layer.cu
new file mode 100644
index 0000000..001b3c3
--- /dev/null
+++ b/src/caffe/layers/lrn_layer.cu
@@ -0,0 +1,203 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+__global__ void LRNFillScale(const int nthreads, const Dtype* const in,
+    const int num, const int channels, const int height,
+    const int width, const int size, const Dtype alpha_over_size,
+    const Dtype k, Dtype* const scale) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    // find out the local offset
+    const int w = index % width;
+    const int h = (index / width) % height;
+    const int n = index / width / height;
+    const int offset = (n * channels * height + h) * width + w;
+    const int step = height * width;
+    const Dtype* const in_off = in + offset;
+    Dtype* const scale_off = scale + offset;
+    int head = 0;
+    const int pre_pad = (size - 1) / 2;
+    const int post_pad = size - pre_pad - 1;
+    Dtype accum_scale = 0;
+    // fill the scale at [n, :, h, w]
+    // accumulate values
+    while (head < post_pad && head < channels) {
+      accum_scale += in_off[head * step] * in_off[head * step];
+      ++head;
+    }
+    // both add and subtract
+    while (head < channels) {
+      accum_scale += in_off[head * step] * in_off[head * step];
+      if (head - size >= 0) {
+        accum_scale -= in_off[(head - size) * step]
+                       * in_off[(head - size) * step];
+      }
+      scale_off[(head - post_pad) * step] = k + accum_scale * alpha_over_size;
+      ++head;
+    }
+    // subtract only
+    while (head < channels + post_pad) {
+      if (head - size >= 0) {
+        accum_scale -= in_off[(head - size) * step]
+                       * in_off[(head - size) * step];
+      }
+      scale_off[(head - post_pad) * step] = k + accum_scale * alpha_over_size;
+      ++head;
+    }
+  }
+}
+
+
+template <typename Dtype>
+void LRNLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  switch (this->layer_param_.lrn_param().norm_region()) {
+  case LRNParameter_NormRegion_ACROSS_CHANNELS:
+    CrossChannelForward_gpu(bottom, top);
+    break;
+  case LRNParameter_NormRegion_WITHIN_CHANNEL:
+    WithinChannelForward(bottom, top);
+    break;
+  default:
+    LOG(FATAL) << "Unknown normalization region.";
+  }
+}
+
+// TODO: check if it would be faster to just put it into the previous kernel.
+template <typename Dtype>
+__global__ void LRNComputeOutput(const int nthreads, const Dtype* const in,
+    const Dtype* const scale, const Dtype negative_beta, Dtype* const out) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    out[index] = in[index] * pow(scale[index], negative_beta);
+  }
+}
+
+template <typename Dtype>
+void LRNLayer<Dtype>::CrossChannelForward_gpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  // First, compute scale
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  Dtype* scale_data = scale_.mutable_gpu_data();
+  // We will launch one kernel for each pixel location, and have the kernel
+  // go through all the channels.
+  int n_threads = num_ * height_ * width_;
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  LRNFillScale<<<CAFFE_GET_BLOCKS(n_threads), CAFFE_CUDA_NUM_THREADS>>>(
+      n_threads, bottom_data, num_, channels_, height_, width_, size_,
+      alpha_ / size_, k_, scale_data);
+  CUDA_POST_KERNEL_CHECK;
+  n_threads = bottom[0]->count();
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  LRNComputeOutput<<<CAFFE_GET_BLOCKS(n_threads), CAFFE_CUDA_NUM_THREADS>>>(
+      n_threads, bottom_data, scale_data, -beta_, top_data);
+  CUDA_POST_KERNEL_CHECK;
+}
+template void LRNLayer<float>::CrossChannelForward_gpu(
+    const vector<Blob<float>*>& bottom, const vector<Blob<float>*>& top);
+template void LRNLayer<double>::CrossChannelForward_gpu(
+    const vector<Blob<double>*>& bottom, const vector<Blob<double>*>& top);
+
+
+template <typename Dtype>
+void LRNLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  switch (this->layer_param_.lrn_param().norm_region()) {
+  case LRNParameter_NormRegion_ACROSS_CHANNELS:
+    CrossChannelBackward_gpu(top, propagate_down, bottom);
+    break;
+  case LRNParameter_NormRegion_WITHIN_CHANNEL:
+    WithinChannelBackward(top, propagate_down, bottom);
+    break;
+  default:
+    LOG(FATAL) << "Unknown normalization region.";
+  }
+}
+
+template <typename Dtype>
+__global__ void LRNComputeDiff(const int nthreads,
+    const Dtype* const bottom_data, const Dtype* const top_data,
+    const Dtype* const scale, const Dtype* const top_diff,
+    const int num, const int channels, const int height,
+    const int width, const int size, const Dtype negative_beta,
+    const Dtype cache_ratio, Dtype* const bottom_diff) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    // find out the local offset
+    const int w = index % width;
+    const int h = (index / width) % height;
+    const int n = index / width / height;
+    const int offset = (n * channels * height + h) * width + w;
+    const int step = height * width;
+    const Dtype* const bottom_off = bottom_data + offset;
+    const Dtype* const top_off = top_data + offset;
+    const Dtype* const scale_off = scale + offset;
+    const Dtype* const top_diff_off = top_diff + offset;
+    Dtype* const bottom_diff_off = bottom_diff + offset;
+    int head = 0;
+    const int pre_pad = size - (size + 1) / 2;
+    const int post_pad = size - pre_pad - 1;
+    Dtype accum_ratio = 0;
+    // accumulate values
+    while (head < post_pad && head < channels) {
+      accum_ratio += top_diff_off[head * step] * top_off[head * step] /
+          scale_off[head * step];
+      ++head;
+    }
+    // both add and subtract
+    while (head < channels) {
+      accum_ratio += top_diff_off[head * step] * top_off[head * step] /
+          scale_off[head * step];
+      if (head - size >= 0) {
+        accum_ratio -= top_diff_off[(head - size) * step] *
+            top_off[(head - size) * step] / scale_off[(head - size) * step];
+      }
+      bottom_diff_off[(head - post_pad) * step] =
+          top_diff_off[(head - post_pad) * step]
+            * pow(scale_off[(head - post_pad) * step], negative_beta)
+          - cache_ratio * bottom_off[(head - post_pad) * step] * accum_ratio;
+      ++head;
+    }
+    // subtract only
+    while (head < channels + post_pad) {
+      if (head - size >= 0) {
+        accum_ratio -= top_diff_off[(head - size) * step] *
+            top_off[(head - size) * step] / scale_off[(head - size) * step];
+      }
+      bottom_diff_off[(head - post_pad) * step] =
+          top_diff_off[(head - post_pad) * step]
+            * pow(scale_off[(head - post_pad) * step], negative_beta)
+          - cache_ratio * bottom_off[(head - post_pad) * step] * accum_ratio;
+      ++head;
+    }
+  }
+}
+
+template <typename Dtype>
+void LRNLayer<Dtype>::CrossChannelBackward_gpu(
+    const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  int n_threads = num_ * height_ * width_;
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  LRNComputeDiff<<<CAFFE_GET_BLOCKS(n_threads), CAFFE_CUDA_NUM_THREADS>>>(
+      n_threads, bottom[0]->gpu_data(), top[0]->gpu_data(),
+      scale_.gpu_data(), top[0]->gpu_diff(), num_, channels_, height_, width_,
+      size_, -beta_, Dtype(2. * alpha_ * beta_ / size_),
+      bottom[0]->mutable_gpu_diff());
+}
+template void LRNLayer<float>::CrossChannelBackward_gpu(
+    const vector<Blob<float>*>& top, const vector<bool>& propagate_down,
+    const vector<Blob<float>*>& bottom);
+template void LRNLayer<double>::CrossChannelBackward_gpu(
+    const vector<Blob<double>*>& top, const vector<bool>& propagate_down,
+    const vector<Blob<double>*>& bottom);
+
+
+
+INSTANTIATE_LAYER_GPU_FUNCS(LRNLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/memory_data_layer.cpp b/src/caffe/layers/memory_data_layer.cpp
new file mode 100644
index 0000000..42de419
--- /dev/null
+++ b/src/caffe/layers/memory_data_layer.cpp
@@ -0,0 +1,121 @@
+#include <opencv2/core/core.hpp>
+
+#include <vector>
+
+#include "caffe/data_layers.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void MemoryDataLayer<Dtype>::DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
+     const vector<Blob<Dtype>*>& top) {
+  batch_size_ = this->layer_param_.memory_data_param().batch_size();
+  channels_ = this->layer_param_.memory_data_param().channels();
+  height_ = this->layer_param_.memory_data_param().height();
+  width_ = this->layer_param_.memory_data_param().width();
+  size_ = channels_ * height_ * width_;
+  CHECK_GT(batch_size_ * size_, 0) <<
+      "batch_size, channels, height, and width must be specified and"
+      " positive in memory_data_param";
+  vector<int> label_shape(1, batch_size_);
+  top[0]->Reshape(batch_size_, channels_, height_, width_);
+  top[1]->Reshape(label_shape);
+  added_data_.Reshape(batch_size_, channels_, height_, width_);
+  added_label_.Reshape(label_shape);
+  data_ = NULL;
+  labels_ = NULL;
+  added_data_.cpu_data();
+  added_label_.cpu_data();
+}
+
+template <typename Dtype>
+void MemoryDataLayer<Dtype>::AddDatumVector(const vector<Datum>& datum_vector) {
+  CHECK(!has_new_data_) <<
+      "Can't add data until current data has been consumed.";
+  size_t num = datum_vector.size();
+  CHECK_GT(num, 0) << "There is no datum to add.";
+  CHECK_EQ(num % batch_size_, 0) <<
+      "The added data must be a multiple of the batch size.";
+  added_data_.Reshape(num, channels_, height_, width_);
+  added_label_.Reshape(num, 1, 1, 1);
+  // Apply data transformations (mirror, scale, crop...)
+  this->data_transformer_->Transform(datum_vector, &added_data_);
+  // Copy Labels
+  Dtype* top_label = added_label_.mutable_cpu_data();
+  for (int item_id = 0; item_id < num; ++item_id) {
+    top_label[item_id] = datum_vector[item_id].label();
+  }
+  // num_images == batch_size_
+  Dtype* top_data = added_data_.mutable_cpu_data();
+  Reset(top_data, top_label, num);
+  has_new_data_ = true;
+}
+
+template <typename Dtype>
+void MemoryDataLayer<Dtype>::AddMatVector(const vector<cv::Mat>& mat_vector,
+    const vector<int>& labels) {
+  size_t num = mat_vector.size();
+  CHECK(!has_new_data_) <<
+      "Can't add mat until current data has been consumed.";
+  CHECK_GT(num, 0) << "There is no mat to add";
+  CHECK_EQ(num % batch_size_, 0) <<
+      "The added data must be a multiple of the batch size.";
+  added_data_.Reshape(num, channels_, height_, width_);
+  added_label_.Reshape(num, 1, 1, 1);
+  // Apply data transformations (mirror, scale, crop...)
+  this->data_transformer_->Transform(mat_vector, &added_data_);
+  // Copy Labels
+  Dtype* top_label = added_label_.mutable_cpu_data();
+  for (int item_id = 0; item_id < num; ++item_id) {
+    top_label[item_id] = labels[item_id];
+  }
+  // num_images == batch_size_
+  Dtype* top_data = added_data_.mutable_cpu_data();
+  Reset(top_data, top_label, num);
+  has_new_data_ = true;
+}
+
+template <typename Dtype>
+void MemoryDataLayer<Dtype>::Reset(Dtype* data, Dtype* labels, int n) {
+  CHECK(data);
+  CHECK(labels);
+  CHECK_EQ(n % batch_size_, 0) << "n must be a multiple of batch size";
+  // Warn with transformation parameters since a memory array is meant to
+  // be generic and no transformations are done with Reset().
+  if (this->layer_param_.has_transform_param()) {
+    LOG(WARNING) << this->type() << " does not transform array data on Reset()";
+  }
+  data_ = data;
+  labels_ = labels;
+  n_ = n;
+  pos_ = 0;
+}
+
+template <typename Dtype>
+void MemoryDataLayer<Dtype>::set_batch_size(int new_size) {
+  CHECK(!has_new_data_) <<
+      "Can't change batch_size until current data has been consumed.";
+  batch_size_ = new_size;
+  added_data_.Reshape(batch_size_, channels_, height_, width_);
+  added_label_.Reshape(batch_size_, 1, 1, 1);
+}
+
+template <typename Dtype>
+void MemoryDataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  CHECK(data_) << "MemoryDataLayer needs to be initalized by calling Reset";
+  top[0]->Reshape(batch_size_, channels_, height_, width_);
+  top[1]->Reshape(batch_size_, 1, 1, 1);
+  top[0]->set_cpu_data(data_ + pos_ * size_);
+  top[1]->set_cpu_data(labels_ + pos_);
+  pos_ = (pos_ + batch_size_) % n_;
+  if (pos_ == 0)
+    has_new_data_ = false;
+}
+
+INSTANTIATE_CLASS(MemoryDataLayer);
+REGISTER_LAYER_CLASS(MemoryData);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/multinomial_logistic_loss_layer.cpp b/src/caffe/layers/multinomial_logistic_loss_layer.cpp
new file mode 100644
index 0000000..4267a59
--- /dev/null
+++ b/src/caffe/layers/multinomial_logistic_loss_layer.cpp
@@ -0,0 +1,67 @@
+#include <algorithm>
+#include <cfloat>
+#include <cmath>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void MultinomialLogisticLossLayer<Dtype>::Reshape(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  LossLayer<Dtype>::Reshape(bottom, top);
+  CHECK_EQ(bottom[1]->channels(), 1);
+  CHECK_EQ(bottom[1]->height(), 1);
+  CHECK_EQ(bottom[1]->width(), 1);
+}
+
+template <typename Dtype>
+void MultinomialLogisticLossLayer<Dtype>::Forward_cpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  const Dtype* bottom_label = bottom[1]->cpu_data();
+  int num = bottom[0]->num();
+  int dim = bottom[0]->count() / bottom[0]->num();
+  Dtype loss = 0;
+  for (int i = 0; i < num; ++i) {
+    int label = static_cast<int>(bottom_label[i]);
+    Dtype prob = std::max(
+        bottom_data[i * dim + label], Dtype(kLOG_THRESHOLD));
+    loss -= log(prob);
+  }
+  top[0]->mutable_cpu_data()[0] = loss / num;
+}
+
+template <typename Dtype>
+void MultinomialLogisticLossLayer<Dtype>::Backward_cpu(
+    const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[1]) {
+    LOG(FATAL) << this->type()
+               << " Layer cannot backpropagate to label inputs.";
+  }
+  if (propagate_down[0]) {
+    const Dtype* bottom_data = bottom[0]->cpu_data();
+    const Dtype* bottom_label = bottom[1]->cpu_data();
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    int num = bottom[0]->num();
+    int dim = bottom[0]->count() / bottom[0]->num();
+    caffe_set(bottom[0]->count(), Dtype(0), bottom_diff);
+    const Dtype scale = - top[0]->cpu_diff()[0] / num;
+    for (int i = 0; i < num; ++i) {
+      int label = static_cast<int>(bottom_label[i]);
+      Dtype prob = std::max(
+          bottom_data[i * dim + label], Dtype(kLOG_THRESHOLD));
+      bottom_diff[i * dim + label] = scale / prob;
+    }
+  }
+}
+
+INSTANTIATE_CLASS(MultinomialLogisticLossLayer);
+REGISTER_LAYER_CLASS(MultinomialLogisticLoss);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/mvn_layer.cpp b/src/caffe/layers/mvn_layer.cpp
new file mode 100644
index 0000000..3e79bdd
--- /dev/null
+++ b/src/caffe/layers/mvn_layer.cpp
@@ -0,0 +1,145 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/common_layers.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void MVNLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  top[0]->Reshape(bottom[0]->num(), bottom[0]->channels(),
+      bottom[0]->height(), bottom[0]->width());
+  mean_.Reshape(bottom[0]->num(), bottom[0]->channels(),
+      1, 1);
+  variance_.Reshape(bottom[0]->num(), bottom[0]->channels(),
+      1, 1);
+  temp_.Reshape(bottom[0]->num(), bottom[0]->channels(),
+      bottom[0]->height(), bottom[0]->width());
+  sum_multiplier_.Reshape(1, 1,
+      bottom[0]->height(), bottom[0]->width());
+  Dtype* multiplier_data = sum_multiplier_.mutable_cpu_data();
+  caffe_set(sum_multiplier_.count(), Dtype(1), multiplier_data);
+  eps_ = this->layer_param_.mvn_param().eps();
+}
+
+template <typename Dtype>
+void MVNLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  int num;
+  if (this->layer_param_.mvn_param().across_channels())
+    num = bottom[0]->num();
+  else
+    num = bottom[0]->num() * bottom[0]->channels();
+
+  int dim = bottom[0]->count() / num;
+
+  if (this->layer_param_.mvn_param().normalize_variance()) {
+    // put the squares of bottom into temp_
+    caffe_powx(bottom[0]->count(), bottom_data, Dtype(2),
+        temp_.mutable_cpu_data());
+
+    // computes variance using var(X) = E(X^2) - (EX)^2
+    caffe_cpu_gemv<Dtype>(CblasNoTrans, num, dim, 1. / dim, bottom_data,
+        sum_multiplier_.cpu_data(), 0., mean_.mutable_cpu_data());  // EX
+    caffe_cpu_gemv<Dtype>(CblasNoTrans, num, dim, 1. / dim, temp_.cpu_data(),
+        sum_multiplier_.cpu_data(), 0.,
+        variance_.mutable_cpu_data());  // E(X^2)
+    caffe_powx(mean_.count(), mean_.cpu_data(), Dtype(2),
+        temp_.mutable_cpu_data());  // (EX)^2
+    caffe_sub(mean_.count(), variance_.cpu_data(), temp_.cpu_data(),
+        variance_.mutable_cpu_data());  // variance
+
+    // do mean and variance normalization
+    // subtract mean
+    caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num, dim, 1, -1.,
+            mean_.cpu_data(), sum_multiplier_.cpu_data(), 0.,
+            temp_.mutable_cpu_data());
+
+    caffe_add(temp_.count(), bottom_data, temp_.cpu_data(), top_data);
+
+    // normalize variance
+    caffe_powx(variance_.count(), variance_.cpu_data(), Dtype(0.5),
+          variance_.mutable_cpu_data());
+
+    caffe_add_scalar(variance_.count(), eps_, variance_.mutable_cpu_data());
+
+    caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num, dim, 1, 1.,
+          variance_.cpu_data(), sum_multiplier_.cpu_data(), 0.,
+          temp_.mutable_cpu_data());
+
+    caffe_div(temp_.count(), top_data, temp_.cpu_data(), top_data);
+  } else {
+    caffe_cpu_gemv<Dtype>(CblasNoTrans, num, dim, 1. / dim, bottom_data,
+            sum_multiplier_.cpu_data(), 0., mean_.mutable_cpu_data());  // EX
+
+    // subtract mean
+    caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num, dim, 1, -1.,
+            mean_.cpu_data(), sum_multiplier_.cpu_data(), 0.,
+            temp_.mutable_cpu_data());
+
+    caffe_add(temp_.count(), bottom_data, temp_.cpu_data(), top_data);
+  }
+}
+
+template <typename Dtype>
+void MVNLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* top_diff = top[0]->cpu_diff();
+  const Dtype* top_data = top[0]->cpu_data();
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+
+  int num;
+  if (this->layer_param_.mvn_param().across_channels())
+    num = bottom[0]->num();
+  else
+    num = bottom[0]->num() * bottom[0]->channels();
+
+  int dim = bottom[0]->count() / num;
+
+  if (this->layer_param_.mvn_param().normalize_variance()) {
+    caffe_mul(temp_.count(), top_data, top_diff, bottom_diff);
+    caffe_cpu_gemv<Dtype>(CblasNoTrans, num, dim, 1., bottom_diff,
+          sum_multiplier_.cpu_data(), 0., mean_.mutable_cpu_data());
+    caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num, dim, 1, 1.,
+          mean_.cpu_data(), sum_multiplier_.cpu_data(), 0.,
+          bottom_diff);
+    caffe_mul(temp_.count(), top_data, bottom_diff, bottom_diff);
+
+    caffe_cpu_gemv<Dtype>(CblasNoTrans, num, dim, 1., top_diff,
+            sum_multiplier_.cpu_data(), 0., mean_.mutable_cpu_data());
+    caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num, dim, 1, 1.,
+            mean_.cpu_data(), sum_multiplier_.cpu_data(), 1.,
+            bottom_diff);
+
+    caffe_cpu_axpby(temp_.count(), Dtype(1), top_diff, Dtype(-1. / dim),
+        bottom_diff);
+
+    // put the squares of bottom into temp_
+    caffe_powx(temp_.count(), bottom_data, Dtype(2),
+        temp_.mutable_cpu_data());
+    caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num, dim, 1, 1.,
+        variance_.cpu_data(), sum_multiplier_.cpu_data(), 0.,
+        temp_.mutable_cpu_data());
+
+    caffe_div(temp_.count(), bottom_diff, temp_.cpu_data(), bottom_diff);
+  } else {
+    caffe_copy(temp_.count(), top_diff, bottom_diff);
+  }
+}
+
+
+#ifdef CPU_ONLY
+STUB_GPU(MVNLayer);
+#endif
+
+INSTANTIATE_CLASS(MVNLayer);
+REGISTER_LAYER_CLASS(MVN);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/mvn_layer.cu b/src/caffe/layers/mvn_layer.cu
new file mode 100644
index 0000000..3888a0c
--- /dev/null
+++ b/src/caffe/layers/mvn_layer.cu
@@ -0,0 +1,124 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/common_layers.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void MVNLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  int num;
+  if (this->layer_param_.mvn_param().across_channels())
+    num = bottom[0]->num();
+  else
+    num = bottom[0]->num() * bottom[0]->channels();
+
+  int dim = bottom[0]->count() / num;
+
+  if (this->layer_param_.mvn_param().normalize_variance()) {
+    // put the squares of bottom into temp_
+    caffe_gpu_powx(bottom[0]->count(), bottom_data, Dtype(2),
+        temp_.mutable_gpu_data());
+
+    // computes variance using var(X) = E(X^2) - (EX)^2
+    caffe_gpu_gemv<Dtype>(CblasNoTrans, num, dim, 1. / dim, bottom_data,
+        sum_multiplier_.gpu_data(), 0., mean_.mutable_gpu_data());  // EX
+    caffe_gpu_gemv<Dtype>(CblasNoTrans, num, dim, 1. / dim, temp_.gpu_data(),
+        sum_multiplier_.gpu_data(), 0.,
+        variance_.mutable_gpu_data());  // E(X^2)
+    caffe_gpu_powx(mean_.count(), mean_.gpu_data(), Dtype(2),
+        temp_.mutable_gpu_data());  // (EX)^2
+    caffe_gpu_sub(mean_.count(), variance_.gpu_data(), temp_.gpu_data(),
+        variance_.mutable_gpu_data());  // variance
+
+    // do mean and variance normalization
+    // subtract mean
+    caffe_gpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num, dim, 1, -1.,
+            mean_.gpu_data(), sum_multiplier_.gpu_data(), 0.,
+            temp_.mutable_gpu_data());
+
+    caffe_gpu_add(temp_.count(), bottom_data, temp_.gpu_data(), top_data);
+
+    // normalize variance
+    caffe_gpu_powx(variance_.count(), variance_.gpu_data(), Dtype(0.5),
+          variance_.mutable_gpu_data());
+
+    caffe_gpu_add_scalar(variance_.count(), eps_, variance_.mutable_gpu_data());
+
+    caffe_gpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num, dim, 1, 1.,
+          variance_.gpu_data(), sum_multiplier_.gpu_data(), 0.,
+          temp_.mutable_gpu_data());
+
+    caffe_gpu_div(temp_.count(), top_data, temp_.gpu_data(), top_data);
+  } else {
+    caffe_gpu_gemv<Dtype>(CblasNoTrans, num, dim, 1. / dim, bottom_data,
+            sum_multiplier_.gpu_data(), 0., mean_.mutable_gpu_data());  // EX
+
+    // subtract mean
+    caffe_gpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num, dim, 1, -1.,
+            mean_.gpu_data(), sum_multiplier_.gpu_data(), 0.,
+            temp_.mutable_gpu_data());
+
+    caffe_gpu_add(temp_.count(), bottom_data, temp_.gpu_data(), top_data);
+  }
+}
+
+template <typename Dtype>
+void MVNLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* top_diff = top[0]->gpu_diff();
+  const Dtype* top_data = top[0]->gpu_data();
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+
+  int num;
+  if (this->layer_param_.mvn_param().across_channels())
+    num = bottom[0]->num();
+  else
+    num = bottom[0]->num() * bottom[0]->channels();
+
+  int dim = bottom[0]->count() / num;
+
+  if (this->layer_param_.mvn_param().normalize_variance()) {
+    caffe_gpu_mul(temp_.count(), top_data, top_diff, bottom_diff);
+    caffe_gpu_gemv<Dtype>(CblasNoTrans, num, dim, 1., bottom_diff,
+          sum_multiplier_.gpu_data(), 0., mean_.mutable_gpu_data());
+    caffe_gpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num, dim, 1, 1.,
+          mean_.gpu_data(), sum_multiplier_.gpu_data(), 0.,
+          bottom_diff);
+    caffe_gpu_mul(temp_.count(), top_data, bottom_diff, bottom_diff);
+
+    caffe_gpu_gemv<Dtype>(CblasNoTrans, num, dim, 1., top_diff,
+            sum_multiplier_.gpu_data(), 0., mean_.mutable_gpu_data());
+    caffe_gpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num, dim, 1, 1.,
+            mean_.gpu_data(), sum_multiplier_.gpu_data(), 1.,
+            bottom_diff);
+
+    caffe_gpu_axpby(temp_.count(), Dtype(1), top_diff, Dtype(-1. / dim),
+        bottom_diff);
+
+    // put the squares of bottom into temp_
+    caffe_gpu_powx(temp_.count(), bottom_data, Dtype(2),
+        temp_.mutable_gpu_data());
+
+    caffe_gpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, num, dim, 1, 1.,
+        variance_.gpu_data(), sum_multiplier_.gpu_data(), 0.,
+        temp_.mutable_gpu_data());
+
+    caffe_gpu_div(temp_.count(), bottom_diff, temp_.gpu_data(), bottom_diff);
+  } else {
+    caffe_copy(temp_.count(), top_diff, bottom_diff);
+  }
+}
+
+
+INSTANTIATE_LAYER_GPU_FUNCS(MVNLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/neuron_layer.cpp b/src/caffe/layers/neuron_layer.cpp
new file mode 100644
index 0000000..ba67b43
--- /dev/null
+++ b/src/caffe/layers/neuron_layer.cpp
@@ -0,0 +1,16 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void NeuronLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  top[0]->ReshapeLike(*bottom[0]);
+}
+
+INSTANTIATE_CLASS(NeuronLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/pooling_layer.cpp b/src/caffe/layers/pooling_layer.cpp
new file mode 100644
index 0000000..c8d4149
--- /dev/null
+++ b/src/caffe/layers/pooling_layer.cpp
@@ -0,0 +1,319 @@
+#include <algorithm>
+#include <cfloat>
+#include <vector>
+
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/syncedmem.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+using std::min;
+using std::max;
+
+template <typename Dtype>
+void PoolingLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  PoolingParameter pool_param = this->layer_param_.pooling_param();
+  if (pool_param.global_pooling()) {
+    CHECK(!(pool_param.has_kernel_size() ||
+      pool_param.has_kernel_h() || pool_param.has_kernel_w()))
+      << "With Global_pooling: true Filter size cannot specified";
+  } else {
+    CHECK(!pool_param.has_kernel_size() !=
+      !(pool_param.has_kernel_h() && pool_param.has_kernel_w()))
+      << "Filter size is kernel_size OR kernel_h and kernel_w; not both";
+    CHECK(pool_param.has_kernel_size() ||
+      (pool_param.has_kernel_h() && pool_param.has_kernel_w()))
+      << "For non-square filters both kernel_h and kernel_w are required.";
+  }
+  CHECK((!pool_param.has_pad() && pool_param.has_pad_h()
+      && pool_param.has_pad_w())
+      || (!pool_param.has_pad_h() && !pool_param.has_pad_w()))
+      << "pad is pad OR pad_h and pad_w are required.";
+  CHECK((!pool_param.has_stride() && pool_param.has_stride_h()
+      && pool_param.has_stride_w())
+      || (!pool_param.has_stride_h() && !pool_param.has_stride_w()))
+      << "Stride is stride OR stride_h and stride_w are required.";
+  global_pooling_ = pool_param.global_pooling();
+  if (global_pooling_) {
+    kernel_h_ = bottom[0]->height();
+    kernel_w_ = bottom[0]->width();
+  } else {
+    if (pool_param.has_kernel_size()) {
+      kernel_h_ = kernel_w_ = pool_param.kernel_size();
+    } else {
+      kernel_h_ = pool_param.kernel_h();
+      kernel_w_ = pool_param.kernel_w();
+    }
+  }
+  CHECK_GT(kernel_h_, 0) << "Filter dimensions cannot be zero.";
+  CHECK_GT(kernel_w_, 0) << "Filter dimensions cannot be zero.";
+  if (!pool_param.has_pad_h()) {
+    pad_h_ = pad_w_ = pool_param.pad();
+  } else {
+    pad_h_ = pool_param.pad_h();
+    pad_w_ = pool_param.pad_w();
+  }
+  if (!pool_param.has_stride_h()) {
+    stride_h_ = stride_w_ = pool_param.stride();
+  } else {
+    stride_h_ = pool_param.stride_h();
+    stride_w_ = pool_param.stride_w();
+  }
+  if (global_pooling_) {
+    CHECK(pad_h_ == 0 && pad_w_ == 0 && stride_h_ == 1 && stride_w_ == 1)
+      << "With Global_pooling: true; only pad = 0 and stride = 1";
+  }
+  if (pad_h_ != 0 || pad_w_ != 0) {
+    CHECK(this->layer_param_.pooling_param().pool()
+        == PoolingParameter_PoolMethod_AVE
+        || this->layer_param_.pooling_param().pool()
+        == PoolingParameter_PoolMethod_MAX)
+        << "Padding implemented only for average and max pooling.";
+    CHECK_LT(pad_h_, kernel_h_);
+    CHECK_LT(pad_w_, kernel_w_);
+  }
+}
+
+template <typename Dtype>
+void PoolingLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  CHECK_EQ(4, bottom[0]->num_axes()) << "Input must have 4 axes, "
+      << "corresponding to (num, channels, height, width)";
+  channels_ = bottom[0]->channels();
+  height_ = bottom[0]->height();
+  width_ = bottom[0]->width();
+  if (global_pooling_) {
+    kernel_h_ = bottom[0]->height();
+    kernel_w_ = bottom[0]->width();
+  }
+  pooled_height_ = static_cast<int>(ceil(static_cast<float>(
+      height_ + 2 * pad_h_ - kernel_h_) / stride_h_)) + 1;
+  pooled_width_ = static_cast<int>(ceil(static_cast<float>(
+      width_ + 2 * pad_w_ - kernel_w_) / stride_w_)) + 1;
+  if (pad_h_ || pad_w_) {
+    // If we have padding, ensure that the last pooling starts strictly
+    // inside the image (instead of at the padding); otherwise clip the last.
+    if ((pooled_height_ - 1) * stride_h_ >= height_ + pad_h_) {
+      --pooled_height_;
+    }
+    if ((pooled_width_ - 1) * stride_w_ >= width_ + pad_w_) {
+      --pooled_width_;
+    }
+    CHECK_LT((pooled_height_ - 1) * stride_h_, height_ + pad_h_);
+    CHECK_LT((pooled_width_ - 1) * stride_w_, width_ + pad_w_);
+  }
+  top[0]->Reshape(bottom[0]->num(), channels_, pooled_height_,
+      pooled_width_);
+  if (top.size() > 1) {
+    top[1]->ReshapeLike(*top[0]);
+  }
+  // If max pooling, we will initialize the vector index part.
+  if (this->layer_param_.pooling_param().pool() ==
+      PoolingParameter_PoolMethod_MAX && top.size() == 1) {
+    max_idx_.Reshape(bottom[0]->num(), channels_, pooled_height_,
+        pooled_width_);
+  }
+  // If stochastic pooling, we will initialize the random index part.
+  if (this->layer_param_.pooling_param().pool() ==
+      PoolingParameter_PoolMethod_STOCHASTIC) {
+    rand_idx_.Reshape(bottom[0]->num(), channels_, pooled_height_,
+      pooled_width_);
+  }
+}
+
+// TODO(Yangqing): Is there a faster way to do pooling in the channel-first
+// case?
+template <typename Dtype>
+void PoolingLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  const int top_count = top[0]->count();
+  // We'll output the mask to top[1] if it's of size >1.
+  const bool use_top_mask = top.size() > 1;
+  int* mask = NULL;  // suppress warnings about uninitalized variables
+  Dtype* top_mask = NULL;
+  // Different pooling methods. We explicitly do the switch outside the for
+  // loop to save time, although this results in more code.
+  switch (this->layer_param_.pooling_param().pool()) {
+  case PoolingParameter_PoolMethod_MAX:
+    // Initialize
+    if (use_top_mask) {
+      top_mask = top[1]->mutable_cpu_data();
+      caffe_set(top_count, Dtype(-1), top_mask);
+    } else {
+      mask = max_idx_.mutable_cpu_data();
+      caffe_set(top_count, -1, mask);
+    }
+    caffe_set(top_count, Dtype(-FLT_MAX), top_data);
+    // The main loop
+    for (int n = 0; n < bottom[0]->num(); ++n) {
+      for (int c = 0; c < channels_; ++c) {
+        for (int ph = 0; ph < pooled_height_; ++ph) {
+          for (int pw = 0; pw < pooled_width_; ++pw) {
+            int hstart = ph * stride_h_ - pad_h_;
+            int wstart = pw * stride_w_ - pad_w_;
+            int hend = min(hstart + kernel_h_, height_);
+            int wend = min(wstart + kernel_w_, width_);
+            hstart = max(hstart, 0);
+            wstart = max(wstart, 0);
+            const int pool_index = ph * pooled_width_ + pw;
+            for (int h = hstart; h < hend; ++h) {
+              for (int w = wstart; w < wend; ++w) {
+                const int index = h * width_ + w;
+                if (bottom_data[index] > top_data[pool_index]) {
+                  top_data[pool_index] = bottom_data[index];
+                  if (use_top_mask) {
+                    top_mask[pool_index] = static_cast<Dtype>(index);
+                  } else {
+                    mask[pool_index] = index;
+                  }
+                }
+              }
+            }
+          }
+        }
+        // compute offset
+        bottom_data += bottom[0]->offset(0, 1);
+        top_data += top[0]->offset(0, 1);
+        if (use_top_mask) {
+          top_mask += top[0]->offset(0, 1);
+        } else {
+          mask += top[0]->offset(0, 1);
+        }
+      }
+    }
+    break;
+  case PoolingParameter_PoolMethod_AVE:
+    for (int i = 0; i < top_count; ++i) {
+      top_data[i] = 0;
+    }
+    // The main loop
+    for (int n = 0; n < bottom[0]->num(); ++n) {
+      for (int c = 0; c < channels_; ++c) {
+        for (int ph = 0; ph < pooled_height_; ++ph) {
+          for (int pw = 0; pw < pooled_width_; ++pw) {
+            int hstart = ph * stride_h_ - pad_h_;
+            int wstart = pw * stride_w_ - pad_w_;
+            int hend = min(hstart + kernel_h_, height_ + pad_h_);
+            int wend = min(wstart + kernel_w_, width_ + pad_w_);
+            int pool_size = (hend - hstart) * (wend - wstart);
+            hstart = max(hstart, 0);
+            wstart = max(wstart, 0);
+            hend = min(hend, height_);
+            wend = min(wend, width_);
+            for (int h = hstart; h < hend; ++h) {
+              for (int w = wstart; w < wend; ++w) {
+                top_data[ph * pooled_width_ + pw] +=
+                    bottom_data[h * width_ + w];
+              }
+            }
+            top_data[ph * pooled_width_ + pw] /= pool_size;
+          }
+        }
+        // compute offset
+        bottom_data += bottom[0]->offset(0, 1);
+        top_data += top[0]->offset(0, 1);
+      }
+    }
+    break;
+  case PoolingParameter_PoolMethod_STOCHASTIC:
+    NOT_IMPLEMENTED;
+    break;
+  default:
+    LOG(FATAL) << "Unknown pooling method.";
+  }
+}
+
+template <typename Dtype>
+void PoolingLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) {
+    return;
+  }
+  const Dtype* top_diff = top[0]->cpu_diff();
+  Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+  // Different pooling methods. We explicitly do the switch outside the for
+  // loop to save time, although this results in more codes.
+  caffe_set(bottom[0]->count(), Dtype(0), bottom_diff);
+  // We'll output the mask to top[1] if it's of size >1.
+  const bool use_top_mask = top.size() > 1;
+  const int* mask = NULL;  // suppress warnings about uninitialized variables
+  const Dtype* top_mask = NULL;
+  switch (this->layer_param_.pooling_param().pool()) {
+  case PoolingParameter_PoolMethod_MAX:
+    // The main loop
+    if (use_top_mask) {
+      top_mask = top[1]->cpu_data();
+    } else {
+      mask = max_idx_.cpu_data();
+    }
+    for (int n = 0; n < top[0]->num(); ++n) {
+      for (int c = 0; c < channels_; ++c) {
+        for (int ph = 0; ph < pooled_height_; ++ph) {
+          for (int pw = 0; pw < pooled_width_; ++pw) {
+            const int index = ph * pooled_width_ + pw;
+            const int bottom_index =
+                use_top_mask ? top_mask[index] : mask[index];
+            bottom_diff[bottom_index] += top_diff[index];
+          }
+        }
+        bottom_diff += bottom[0]->offset(0, 1);
+        top_diff += top[0]->offset(0, 1);
+        if (use_top_mask) {
+          top_mask += top[0]->offset(0, 1);
+        } else {
+          mask += top[0]->offset(0, 1);
+        }
+      }
+    }
+    break;
+  case PoolingParameter_PoolMethod_AVE:
+    // The main loop
+    for (int n = 0; n < top[0]->num(); ++n) {
+      for (int c = 0; c < channels_; ++c) {
+        for (int ph = 0; ph < pooled_height_; ++ph) {
+          for (int pw = 0; pw < pooled_width_; ++pw) {
+            int hstart = ph * stride_h_ - pad_h_;
+            int wstart = pw * stride_w_ - pad_w_;
+            int hend = min(hstart + kernel_h_, height_ + pad_h_);
+            int wend = min(wstart + kernel_w_, width_ + pad_w_);
+            int pool_size = (hend - hstart) * (wend - wstart);
+            hstart = max(hstart, 0);
+            wstart = max(wstart, 0);
+            hend = min(hend, height_);
+            wend = min(wend, width_);
+            for (int h = hstart; h < hend; ++h) {
+              for (int w = wstart; w < wend; ++w) {
+                bottom_diff[h * width_ + w] +=
+                  top_diff[ph * pooled_width_ + pw] / pool_size;
+              }
+            }
+          }
+        }
+        // offset
+        bottom_diff += bottom[0]->offset(0, 1);
+        top_diff += top[0]->offset(0, 1);
+      }
+    }
+    break;
+  case PoolingParameter_PoolMethod_STOCHASTIC:
+    NOT_IMPLEMENTED;
+    break;
+  default:
+    LOG(FATAL) << "Unknown pooling method.";
+  }
+}
+
+
+#ifdef CPU_ONLY
+STUB_GPU(PoolingLayer);
+#endif
+
+INSTANTIATE_CLASS(PoolingLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/pooling_layer.cu b/src/caffe/layers/pooling_layer.cu
new file mode 100644
index 0000000..ca4b13f
--- /dev/null
+++ b/src/caffe/layers/pooling_layer.cu
@@ -0,0 +1,387 @@
+#include <algorithm>
+#include <cfloat>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+__global__ void MaxPoolForward(const int nthreads,
+    const Dtype* const bottom_data, const int num, const int channels,
+    const int height, const int width, const int pooled_height,
+    const int pooled_width, const int kernel_h, const int kernel_w,
+    const int stride_h, const int stride_w, const int pad_h, const int pad_w,
+    Dtype* const top_data, int* mask, Dtype* top_mask) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    const int pw = index % pooled_width;
+    const int ph = (index / pooled_width) % pooled_height;
+    const int c = (index / pooled_width / pooled_height) % channels;
+    const int n = index / pooled_width / pooled_height / channels;
+    int hstart = ph * stride_h - pad_h;
+    int wstart = pw * stride_w - pad_w;
+    const int hend = min(hstart + kernel_h, height);
+    const int wend = min(wstart + kernel_w, width);
+    hstart = max(hstart, 0);
+    wstart = max(wstart, 0);
+    Dtype maxval = -FLT_MAX;
+    int maxidx = -1;
+    const Dtype* const bottom_slice =
+        bottom_data + (n * channels + c) * height * width;
+    for (int h = hstart; h < hend; ++h) {
+      for (int w = wstart; w < wend; ++w) {
+        if (bottom_slice[h * width + w] > maxval) {
+          maxidx = h * width + w;
+          maxval = bottom_slice[maxidx];
+        }
+      }
+    }
+    top_data[index] = maxval;
+    if (mask) {
+      mask[index] = maxidx;
+    } else {
+      top_mask[index] = maxidx;
+    }
+  }
+}
+
+template <typename Dtype>
+__global__ void AvePoolForward(const int nthreads,
+    const Dtype* const bottom_data, const int num, const int channels,
+    const int height, const int width, const int pooled_height,
+    const int pooled_width, const int kernel_h, const int kernel_w,
+    const int stride_h, const int stride_w, const int pad_h, const int pad_w,
+    Dtype* const top_data) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    const int pw = index % pooled_width;
+    const int ph = (index / pooled_width) % pooled_height;
+    const int c = (index / pooled_width / pooled_height) % channels;
+    const int n = index / pooled_width / pooled_height / channels;
+    int hstart = ph * stride_h - pad_h;
+    int wstart = pw * stride_w - pad_w;
+    int hend = min(hstart + kernel_h, height + pad_h);
+    int wend = min(wstart + kernel_w, width + pad_w);
+    const int pool_size = (hend - hstart) * (wend - wstart);
+    hstart = max(hstart, 0);
+    wstart = max(wstart, 0);
+    hend = min(hend, height);
+    wend = min(wend, width);
+    Dtype aveval = 0;
+    const Dtype* const bottom_slice =
+        bottom_data + (n * channels + c) * height * width;
+    for (int h = hstart; h < hend; ++h) {
+      for (int w = wstart; w < wend; ++w) {
+        aveval += bottom_slice[h * width + w];
+      }
+    }
+    top_data[index] = aveval / pool_size;
+  }
+}
+
+template <typename Dtype>
+__global__ void StoPoolForwardTrain(const int nthreads,
+    const Dtype* const bottom_data,
+    const int num, const int channels, const int height,
+    const int width, const int pooled_height, const int pooled_width,
+    const int kernel_h, const int kernel_w, const int stride_h,
+    const int stride_w, Dtype* const rand_idx, Dtype* const top_data) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    const int pw = index % pooled_width;
+    const int ph = (index / pooled_width) % pooled_height;
+    const int c = (index / pooled_width / pooled_height) % channels;
+    const int n = index / pooled_width / pooled_height / channels;
+    const int hstart = ph * stride_h;
+    const int hend = min(hstart + kernel_h, height);
+    const int wstart = pw * stride_w;
+    const int wend = min(wstart + kernel_w, width);
+    Dtype cumsum = 0.;
+    const Dtype* const bottom_slice =
+        bottom_data + (n * channels + c) * height * width;
+    // First pass: get sum
+    for (int h = hstart; h < hend; ++h) {
+      for (int w = wstart; w < wend; ++w) {
+        cumsum += bottom_slice[h * width + w];
+      }
+    }
+    const float thres = rand_idx[index] * cumsum;
+    // Second pass: get value, and set index.
+    cumsum = 0;
+    for (int h = hstart; h < hend; ++h) {
+      for (int w = wstart; w < wend; ++w) {
+        cumsum += bottom_slice[h * width + w];
+        if (cumsum >= thres) {
+          rand_idx[index] = ((n * channels + c) * height + h) * width + w;
+          top_data[index] = bottom_slice[h * width + w];
+          return;
+        }
+      }
+    }
+  }
+}
+
+
+template <typename Dtype>
+__global__ void StoPoolForwardTest(const int nthreads,
+    const Dtype* const bottom_data,
+    const int num, const int channels, const int height,
+    const int width, const int pooled_height, const int pooled_width,
+    const int kernel_h, const int kernel_w, const int stride_h,
+    const int stride_w, Dtype* const top_data) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    const int pw = index % pooled_width;
+    const int ph = (index / pooled_width) % pooled_height;
+    const int c = (index / pooled_width / pooled_height) % channels;
+    const int n = index / pooled_width / pooled_height / channels;
+    const int hstart = ph * stride_h;
+    const int hend = min(hstart + kernel_h, height);
+    const int wstart = pw * stride_w;
+    const int wend = min(wstart + kernel_w, width);
+    // We set cumsum to be 0 to avoid divide-by-zero problems
+    Dtype cumsum = FLT_MIN;
+    Dtype cumvalues = 0.;
+    const Dtype* const bottom_slice =
+        bottom_data + (n * channels + c) * height * width;
+    // First pass: get sum
+    for (int h = hstart; h < hend; ++h) {
+      for (int w = wstart; w < wend; ++w) {
+        cumsum += bottom_slice[h * width + w];
+        cumvalues += bottom_slice[h * width + w] * bottom_slice[h * width + w];
+      }
+    }
+    top_data[index] = cumvalues / cumsum;
+  }
+}
+
+
+template <typename Dtype>
+void PoolingLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  int count = top[0]->count();
+  // We'll output the mask to top[1] if it's of size >1.
+  const bool use_top_mask = top.size() > 1;
+  int* mask = NULL;
+  Dtype* top_mask = NULL;
+  switch (this->layer_param_.pooling_param().pool()) {
+  case PoolingParameter_PoolMethod_MAX:
+    if (use_top_mask) {
+      top_mask = top[1]->mutable_gpu_data();
+    } else {
+      mask = max_idx_.mutable_gpu_data();
+    }
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    MaxPoolForward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+        count, bottom_data, bottom[0]->num(), channels_,
+        height_, width_, pooled_height_, pooled_width_, kernel_h_,
+        kernel_w_, stride_h_, stride_w_, pad_h_, pad_w_, top_data,
+        mask, top_mask);
+    break;
+  case PoolingParameter_PoolMethod_AVE:
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    AvePoolForward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+        count, bottom_data, bottom[0]->num(), channels_,
+        height_, width_, pooled_height_, pooled_width_, kernel_h_,
+        kernel_w_, stride_h_, stride_w_, pad_h_, pad_w_, top_data);
+    break;
+  case PoolingParameter_PoolMethod_STOCHASTIC:
+    if (this->phase_ == TRAIN) {
+      // We need to create the random index as well.
+      caffe_gpu_rng_uniform(count, Dtype(0), Dtype(1),
+                            rand_idx_.mutable_gpu_data());
+      // NOLINT_NEXT_LINE(whitespace/operators)
+      StoPoolForwardTrain<Dtype><<<CAFFE_GET_BLOCKS(count),
+                                   CAFFE_CUDA_NUM_THREADS>>>(
+          count, bottom_data, bottom[0]->num(), channels_,
+          height_, width_, pooled_height_, pooled_width_, kernel_h_,
+          kernel_w_, stride_h_, stride_w_,
+          rand_idx_.mutable_gpu_data(), top_data);
+    } else {
+      // NOLINT_NEXT_LINE(whitespace/operators)
+      StoPoolForwardTest<Dtype><<<CAFFE_GET_BLOCKS(count),
+                                  CAFFE_CUDA_NUM_THREADS>>>(
+          count, bottom_data, bottom[0]->num(), channels_,
+          height_, width_, pooled_height_, pooled_width_, kernel_h_,
+          kernel_w_, stride_h_, stride_w_, top_data);
+    }
+    break;
+  default:
+    LOG(FATAL) << "Unknown pooling method.";
+  }
+  CUDA_POST_KERNEL_CHECK;
+}
+
+
+template <typename Dtype>
+__global__ void MaxPoolBackward(const int nthreads, const Dtype* const top_diff,
+    const int* const mask, const Dtype* const top_mask, const int num,
+    const int channels, const int height, const int width,
+    const int pooled_height, const int pooled_width, const int kernel_h,
+    const int kernel_w, const int stride_h, const int stride_w, const int pad_h,
+    const int pad_w, Dtype* const bottom_diff) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    // find out the local index
+    // find out the local offset
+    const int w = index % width;
+    const int h = (index / width) % height;
+    const int c = (index / width / height) % channels;
+    const int n = index / width / height / channels;
+    const int phstart =
+         (h + pad_h < kernel_h) ? 0 : (h + pad_h - kernel_h) / stride_h + 1;
+    const int phend = min((h + pad_h) / stride_h + 1, pooled_height);
+    const int pwstart =
+         (w + pad_w < kernel_w) ? 0 : (w + pad_w - kernel_w) / stride_w + 1;
+    const int pwend = min((w + pad_w) / stride_w + 1, pooled_width);
+    Dtype gradient = 0;
+    const int offset = (n * channels + c) * pooled_height * pooled_width;
+    const Dtype* const top_diff_slice = top_diff + offset;
+    if (mask) {
+      const int* const mask_slice = mask + offset;
+      for (int ph = phstart; ph < phend; ++ph) {
+        for (int pw = pwstart; pw < pwend; ++pw) {
+          if (mask_slice[ph * pooled_width + pw] == h * width + w) {
+            gradient += top_diff_slice[ph * pooled_width + pw];
+          }
+        }
+      }
+    } else {
+      const Dtype* const top_mask_slice = top_mask + offset;
+      for (int ph = phstart; ph < phend; ++ph) {
+        for (int pw = pwstart; pw < pwend; ++pw) {
+          if (top_mask_slice[ph * pooled_width + pw] == h * width + w) {
+            gradient += top_diff_slice[ph * pooled_width + pw];
+          }
+        }
+      }
+    }
+    bottom_diff[index] = gradient;
+  }
+}
+
+template <typename Dtype>
+__global__ void AvePoolBackward(const int nthreads, const Dtype* const top_diff,
+    const int num, const int channels, const int height,
+    const int width, const int pooled_height, const int pooled_width,
+    const int kernel_h, const int kernel_w, const int stride_h,
+    const int stride_w, const int pad_h, const int pad_w,
+    Dtype* const bottom_diff) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    // find out the local index
+    // find out the local offset
+    const int w = index % width + pad_w;
+    const int h = (index / width) % height + pad_h;
+    const int c = (index / width / height) % channels;
+    const int n = index / width / height / channels;
+    const int phstart = (h < kernel_h) ? 0 : (h - kernel_h) / stride_h + 1;
+    const int phend = min(h / stride_h + 1, pooled_height);
+    const int pwstart = (w < kernel_w) ? 0 : (w - kernel_w) / stride_w + 1;
+    const int pwend = min(w / stride_w + 1, pooled_width);
+    Dtype gradient = 0;
+    const Dtype* const top_diff_slice =
+        top_diff + (n * channels + c) * pooled_height * pooled_width;
+    for (int ph = phstart; ph < phend; ++ph) {
+      for (int pw = pwstart; pw < pwend; ++pw) {
+        // figure out the pooling size
+        int hstart = ph * stride_h - pad_h;
+        int wstart = pw * stride_w - pad_w;
+        int hend = min(hstart + kernel_h, height + pad_h);
+        int wend = min(wstart + kernel_w, width + pad_w);
+        int pool_size = (hend - hstart) * (wend - wstart);
+        gradient += top_diff_slice[ph * pooled_width + pw] / pool_size;
+      }
+    }
+    bottom_diff[index] = gradient;
+  }
+}
+
+
+template <typename Dtype>
+__global__ void StoPoolBackward(const int nthreads,
+    const Dtype* const rand_idx, const Dtype* const top_diff,
+    const int num, const int channels, const int height,
+    const int width, const int pooled_height, const int pooled_width,
+    const int kernel_h, const int kernel_w, const int stride_h,
+    const int stride_w, Dtype* const bottom_diff) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    // find out the local index
+    // find out the local offset
+    const int w = index % width;
+    const int h = (index / width) % height;
+    const int c = (index / width / height) % channels;
+    const int n = index / width / height / channels;
+    const int phstart = (h < kernel_h) ? 0 : (h - kernel_h) / stride_h + 1;
+    const int phend = min(h / stride_h + 1, pooled_height);
+    const int pwstart = (w < kernel_w) ? 0 : (w - kernel_w) / stride_w + 1;
+    const int pwend = min(w / stride_w + 1, pooled_width);
+    Dtype gradient = 0;
+    const Dtype* const rand_idx_slice =
+        rand_idx + (n * channels + c) * pooled_height * pooled_width;
+    const Dtype* const top_diff_slice =
+        top_diff + (n * channels + c) * pooled_height * pooled_width;
+    for (int ph = phstart; ph < phend; ++ph) {
+      for (int pw = pwstart; pw < pwend; ++pw) {
+        gradient += top_diff_slice[ph * pooled_width + pw] *
+            (index == static_cast<int>(rand_idx_slice[ph * pooled_width + pw]));
+      }
+    }
+    bottom_diff[index] = gradient;
+  }
+}
+
+
+template <typename Dtype>
+void PoolingLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) {
+    return;
+  }
+  const Dtype* top_diff = top[0]->gpu_diff();
+  Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+  const int count = bottom[0]->count();
+  caffe_gpu_set(count, Dtype(0.), bottom_diff);
+  // We'll output the mask to top[1] if it's of size >1.
+  const bool use_top_mask = top.size() > 1;
+  const int* mask = NULL;
+  const Dtype* top_mask = NULL;
+  switch (this->layer_param_.pooling_param().pool()) {
+  case PoolingParameter_PoolMethod_MAX:
+    if (use_top_mask) {
+      top_mask = top[1]->gpu_data();
+    } else {
+      mask = max_idx_.gpu_data();
+    }
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    MaxPoolBackward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+        count, top_diff, mask, top_mask, top[0]->num(), channels_,
+        height_, width_, pooled_height_, pooled_width_,
+        kernel_h_, kernel_w_, stride_h_, stride_w_, pad_h_, pad_w_,
+        bottom_diff);
+    break;
+  case PoolingParameter_PoolMethod_AVE:
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    AvePoolBackward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+        count, top_diff, top[0]->num(), channels_,
+        height_, width_, pooled_height_, pooled_width_, kernel_h_,
+        kernel_w_, stride_h_, stride_w_, pad_h_, pad_w_, bottom_diff);
+    break;
+  case PoolingParameter_PoolMethod_STOCHASTIC:
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    StoPoolBackward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+        count, rand_idx_.gpu_data(), top_diff,
+        top[0]->num(), channels_, height_, width_, pooled_height_,
+        pooled_width_, kernel_h_, kernel_w_, stride_h_, stride_w_,
+        bottom_diff);
+    break;
+  default:
+    LOG(FATAL) << "Unknown pooling method.";
+  }
+  CUDA_POST_KERNEL_CHECK;
+}
+
+
+INSTANTIATE_LAYER_GPU_FUNCS(PoolingLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/power_layer.cpp b/src/caffe/layers/power_layer.cpp
new file mode 100644
index 0000000..4fe34c4
--- /dev/null
+++ b/src/caffe/layers/power_layer.cpp
@@ -0,0 +1,104 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void PowerLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  NeuronLayer<Dtype>::LayerSetUp(bottom, top);
+  power_ = this->layer_param_.power_param().power();
+  scale_ = this->layer_param_.power_param().scale();
+  shift_ = this->layer_param_.power_param().shift();
+  diff_scale_ = power_  * scale_;
+}
+
+// Compute y = (shift + scale * x)^power
+template <typename Dtype>
+void PowerLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  const int count = bottom[0]->count();
+  // Special case where we can ignore the input: scale or power is 0.
+  if (diff_scale_ == Dtype(0)) {
+    Dtype value = (power_ == 0) ? Dtype(1) : pow(shift_, power_);
+    caffe_set(count, value, top_data);
+    return;
+  }
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  caffe_copy(count, bottom_data, top_data);
+  if (scale_ != Dtype(1)) {
+    caffe_scal(count, scale_, top_data);
+  }
+  if (shift_ != Dtype(0)) {
+    caffe_add_scalar(count, shift_, top_data);
+  }
+  if (power_ != Dtype(1)) {
+    caffe_powx(count, top_data, power_, top_data);
+  }
+}
+
+template <typename Dtype>
+void PowerLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    const int count = bottom[0]->count();
+    const Dtype* top_diff = top[0]->cpu_diff();
+    if (diff_scale_ == Dtype(0) || power_ == Dtype(1)) {
+      caffe_set(count, diff_scale_, bottom_diff);
+    } else {
+      const Dtype* bottom_data = bottom[0]->cpu_data();
+      // Compute dy/dx = scale * power * (shift + scale * x)^(power - 1)
+      //               = diff_scale * y / (shift + scale * x)
+      if (power_ == Dtype(2)) {
+        // Special case for y = (shift + scale * x)^2
+        //     -> dy/dx = 2 * scale * (shift + scale * x)
+        //              = diff_scale * shift + diff_scale * scale * x
+        caffe_cpu_axpby(count, diff_scale_ * scale_, bottom_data,
+            Dtype(0), bottom_diff);
+        if (shift_ != Dtype(0)) {
+          caffe_add_scalar(count, diff_scale_ * shift_, bottom_diff);
+        }
+      } else if (shift_ == Dtype(0)) {
+        // Special case for y = (scale * x)^power
+        //     -> dy/dx = scale * power * (scale * x)^(power - 1)
+        //              = scale * power * (scale * x)^power * (scale * x)^(-1)
+        //              = power * y / x
+        const Dtype* top_data = top[0]->cpu_data();
+        caffe_div(count, top_data, bottom_data, bottom_diff);
+        caffe_scal(count, power_, bottom_diff);
+      } else {
+        caffe_copy(count, bottom_data, bottom_diff);
+        if (scale_ != Dtype(1)) {
+          caffe_scal(count, scale_, bottom_diff);
+        }
+        if (shift_ != Dtype(0)) {
+          caffe_add_scalar(count, shift_, bottom_diff);
+        }
+        const Dtype* top_data = top[0]->cpu_data();
+        caffe_div<Dtype>(count, top_data, bottom_diff, bottom_diff);
+        if (diff_scale_ != Dtype(1)) {
+          caffe_scal(count, diff_scale_, bottom_diff);
+        }
+      }
+    }
+    if (diff_scale_ != Dtype(0)) {
+      caffe_mul(count, top_diff, bottom_diff, bottom_diff);
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(PowerLayer);
+#endif
+
+INSTANTIATE_CLASS(PowerLayer);
+REGISTER_LAYER_CLASS(Power);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/power_layer.cu b/src/caffe/layers/power_layer.cu
new file mode 100644
index 0000000..90d9440
--- /dev/null
+++ b/src/caffe/layers/power_layer.cu
@@ -0,0 +1,87 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void PowerLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  const int count = bottom[0]->count();
+  // Special case where we can ignore the input: scale or power is 0.
+  if (diff_scale_ == Dtype(0)) {
+    Dtype value = (power_ == 0) ? Dtype(1) : pow(shift_, power_);
+    caffe_gpu_set(count, value, top_data);
+    return;
+  }
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  caffe_copy(count, bottom_data, top_data);
+  if (scale_ != Dtype(1)) {
+    caffe_gpu_scal(count, scale_, top_data);
+  }
+  if (shift_ != Dtype(0)) {
+    caffe_gpu_add_scalar(count, shift_, top_data);
+  }
+  if (power_ != Dtype(1)) {
+    caffe_gpu_powx(count, top_data, power_, top_data);
+  }
+}
+
+template <typename Dtype>
+void PowerLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+    const int count = bottom[0]->count();
+    const Dtype* top_diff = top[0]->gpu_diff();
+    if (diff_scale_ == Dtype(0) || power_ == Dtype(1)) {
+      caffe_gpu_set(count, diff_scale_, bottom_diff);
+    } else {
+      const Dtype* bottom_data = bottom[0]->gpu_data();
+      // Compute dy/dx = scale * power * (shift + scale * x)^(power - 1)
+      //               = diff_scale * y / (shift + scale * x)
+      if (power_ == Dtype(2)) {
+        // Special case for y = (shift + scale * x)^2
+        //     -> dy/dx = 2 * scale * (shift + scale * x)
+        //              = diff_scale * shift + diff_scale * scale * x
+        caffe_gpu_axpby(count, diff_scale_ * scale_, bottom_data,
+            Dtype(0), bottom_diff);
+        if (shift_ != Dtype(0)) {
+          caffe_gpu_add_scalar(count, diff_scale_ * shift_, bottom_diff);
+        }
+      } else if (shift_ == Dtype(0)) {
+        // Special case for y = (scale * x)^power
+        //     -> dy/dx = scale * power * (scale * x)^(power - 1)
+        //              = scale * power * (scale * x)^power * (scale * x)^(-1)
+        //              = power * y / x
+        const Dtype* top_data = top[0]->gpu_data();
+        caffe_gpu_div(count, top_data, bottom_data, bottom_diff);
+        caffe_gpu_scal(count, power_, bottom_diff);
+      } else {
+        caffe_copy(count, bottom_data, bottom_diff);
+        if (scale_ != Dtype(1)) {
+          caffe_gpu_scal(count, scale_, bottom_diff);
+        }
+        if (shift_ != Dtype(0)) {
+          caffe_gpu_add_scalar(count, shift_, bottom_diff);
+        }
+        const Dtype* top_data = top[0]->gpu_data();
+        caffe_gpu_div<Dtype>(count, top_data, bottom_diff, bottom_diff);
+        if (diff_scale_ != Dtype(1)) {
+          caffe_gpu_scal(count, diff_scale_, bottom_diff);
+        }
+      }
+    }
+    caffe_gpu_mul(count, top_diff, bottom_diff, bottom_diff);
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(PowerLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/prelu_layer.cpp b/src/caffe/layers/prelu_layer.cpp
new file mode 100644
index 0000000..8183175
--- /dev/null
+++ b/src/caffe/layers/prelu_layer.cpp
@@ -0,0 +1,140 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void PReLULayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  CHECK_GE(bottom[0]->num_axes(), 2)
+      << "Number of axes of bottom blob must be >=2.";
+  PReLUParameter prelu_param = this->layer_param().prelu_param();
+  int channels = bottom[0]->channels();
+  channel_shared_ = prelu_param.channel_shared();
+  if (this->blobs_.size() > 0) {
+    LOG(INFO) << "Skipping parameter initialization";
+  } else {
+    this->blobs_.resize(1);
+    if (channel_shared_) {
+      this->blobs_[0].reset(new Blob<Dtype>(vector<int>(0)));
+    } else {
+      this->blobs_[0].reset(new Blob<Dtype>(vector<int>(1, channels)));
+    }
+    shared_ptr<Filler<Dtype> > filler;
+    if (prelu_param.has_filler()) {
+      filler.reset(GetFiller<Dtype>(prelu_param.filler()));
+    } else {
+      FillerParameter filler_param;
+      filler_param.set_type("constant");
+      filler_param.set_value(0.25);
+      filler.reset(GetFiller<Dtype>(filler_param));
+    }
+    filler->Fill(this->blobs_[0].get());
+  }
+  if (channel_shared_) {
+    CHECK_EQ(this->blobs_[0]->count(), 1)
+        << "Negative slope size is inconsistent with prototxt config";
+  } else {
+    CHECK_EQ(this->blobs_[0]->count(), channels)
+        << "Negative slope size is inconsistent with prototxt config";
+  }
+
+  // Propagate gradients to the parameters (as directed by backward pass).
+  this->param_propagate_down_.resize(this->blobs_.size(), true);
+  multiplier_.Reshape(vector<int>(1, bottom[0]->count(1)));
+  backward_buff_.Reshape(vector<int>(1, bottom[0]->count(1)));
+  caffe_set(multiplier_.count(), Dtype(1), multiplier_.mutable_cpu_data());
+}
+
+template <typename Dtype>
+void PReLULayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  CHECK_GE(bottom[0]->num_axes(), 2)
+      << "Number of axes of bottom blob must be >=2.";
+  top[0]->ReshapeLike(*bottom[0]);
+  if (bottom[0] == top[0]) {
+    // For in-place computation
+    bottom_memory_.ReshapeLike(*bottom[0]);
+  }
+}
+
+template <typename Dtype>
+void PReLULayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  const int count = bottom[0]->count();
+  const int dim = bottom[0]->count(2);
+  const int channels = bottom[0]->channels();
+  const Dtype* slope_data = this->blobs_[0]->cpu_data();
+
+  // For in-place computation
+  if (bottom[0] == top[0]) {
+    caffe_copy(count, bottom_data, bottom_memory_.mutable_cpu_data());
+  }
+
+  // if channel_shared, channel index in the following computation becomes
+  // always zero.
+  const int div_factor = channel_shared_ ? channels : 1;
+  for (int i = 0; i < count; ++i) {
+    int c = (i / dim) % channels / div_factor;
+    top_data[i] = std::max(bottom_data[i], Dtype(0))
+        + slope_data[c] * std::min(bottom_data[i], Dtype(0));
+  }
+}
+
+template <typename Dtype>
+void PReLULayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  const Dtype* slope_data = this->blobs_[0]->cpu_data();
+  const Dtype* top_diff = top[0]->cpu_diff();
+  const int count = bottom[0]->count();
+  const int dim = bottom[0]->count(2);
+  const int channels = bottom[0]->channels();
+
+  // For in-place computation
+  if (top[0] == bottom[0]) {
+    bottom_data = bottom_memory_.cpu_data();
+  }
+
+  // if channel_shared, channel index in the following computation becomes
+  // always zero.
+  const int div_factor = channel_shared_ ? channels : 1;
+
+  // Propagte to param
+  // Since to write bottom diff will affect top diff if top and bottom blobs
+  // are identical (in-place computaion), we first compute param backward to
+  // keep top_diff unchanged.
+  if (this->param_propagate_down_[0]) {
+    Dtype* slope_diff = this->blobs_[0]->mutable_cpu_diff();
+    for (int i = 0; i < count; ++i) {
+      int c = (i / dim) % channels / div_factor;
+      slope_diff[c] += top_diff[i] * bottom_data[i] * (bottom_data[i] <= 0);
+    }
+  }
+  // Propagate to bottom
+  if (propagate_down[0]) {
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    for (int i = 0; i < count; ++i) {
+      int c = (i / dim) % channels / div_factor;
+      bottom_diff[i] = top_diff[i] * ((bottom_data[i] > 0)
+          + slope_data[c] * (bottom_data[i] <= 0));
+    }
+  }
+}
+
+
+#ifdef CPU_ONLY
+STUB_GPU(PReLULayer);
+#endif
+
+INSTANTIATE_CLASS(PReLULayer);
+REGISTER_LAYER_CLASS(PReLU);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/prelu_layer.cu b/src/caffe/layers/prelu_layer.cu
new file mode 100644
index 0000000..e1f2004
--- /dev/null
+++ b/src/caffe/layers/prelu_layer.cu
@@ -0,0 +1,128 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+// CUDA kernele for forward
+template <typename Dtype>
+__global__ void PReLUForward(const int n, const int channels, const int dim,
+    const Dtype* in, Dtype* out, const Dtype* slope_data,
+    const int div_factor) {
+  CUDA_KERNEL_LOOP(index, n) {
+    int c = (index / dim) % channels / div_factor;
+    out[index] = in[index] > 0 ? in[index] : in[index] * slope_data[c];
+  }
+}
+
+// CUDA kernel for bottom backward
+template <typename Dtype>
+__global__ void PReLUBackward(const int n, const int channels, const int dim,
+    const Dtype* in_diff, const Dtype* in_data, Dtype* out_diff,
+    const Dtype* slope_data, const int div_factor) {
+  CUDA_KERNEL_LOOP(index, n) {
+    int c = (index / dim) % channels / div_factor;
+    out_diff[index] = in_diff[index] * ((in_data[index] > 0)
+        + (in_data[index] <= 0) * slope_data[c]);
+  }
+}
+
+// CUDA kernel for element-wise parameter backward
+template <typename Dtype>
+__global__ void PReLUParamBackward(const int n, const Dtype* in_diff,
+    const Dtype* in_data, Dtype* out_diff) {
+  CUDA_KERNEL_LOOP(index, n) {
+    out_diff[index] = in_diff[index] * in_data[index] * (in_data[index] <= 0);
+  }
+}
+
+template <typename Dtype>
+void PReLULayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  const int count = bottom[0]->count();
+  const int dim = bottom[0]->count(2);
+  const int channels = bottom[0]->channels();
+  const Dtype* slope_data = this->blobs_[0]->gpu_data();
+  const int div_factor = channel_shared_ ? channels : 1;
+
+  // For in-place computation
+  if (top[0] == bottom[0]) {
+    caffe_copy(count, bottom_data, bottom_memory_.mutable_gpu_data());
+  }
+
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  PReLUForward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+      count, channels, dim, bottom_data, top_data, slope_data, div_factor);
+  CUDA_POST_KERNEL_CHECK;
+}
+
+template <typename Dtype>
+void PReLULayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  const Dtype* top_diff = top[0]->gpu_diff();
+  const int count = bottom[0]->count();
+  const int dim = bottom[0]->count(2);
+  const int channels = bottom[0]->channels();
+
+  // For in-place computation
+  if (top[0] == bottom[0]) {
+    bottom_data = bottom_memory_.gpu_data();
+  }
+
+  // Propagate to param
+  // Since to write bottom diff will affect top diff if top and bottom blobs
+  // are identical (in-place computaion), we first compute param backward to
+  // keep top_diff unchanged.
+  if (this->param_propagate_down_[0]) {
+    Dtype* slope_diff = this->blobs_[0]->mutable_gpu_diff();
+    int cdim = channels * dim;
+    Dtype dsum = 0.;
+    for (int n = 0; n < bottom[0]->num(); ++n) {
+      // compute element-wise diff
+      // NOLINT_NEXT_LINE(whitespace/operators)
+      PReLUParamBackward<Dtype><<<CAFFE_GET_BLOCKS(cdim),
+          CAFFE_CUDA_NUM_THREADS>>>(
+          cdim, top_diff + top[0]->offset(n),
+          bottom_data + bottom[0]->offset(n),
+          backward_buff_.mutable_gpu_diff());
+      CUDA_POST_KERNEL_CHECK;
+      if (channel_shared_) {
+        Dtype d;
+        caffe_gpu_dot<Dtype>(channels * dim, backward_buff_.gpu_diff(),
+            multiplier_.gpu_data(), &d);
+        dsum += d;
+      } else {
+        caffe_gpu_gemv<Dtype>(CblasNoTrans, channels, dim, 1.,
+            backward_buff_.gpu_diff(), multiplier_.gpu_data(), 1.,
+            slope_diff);
+      }
+    }
+    if (channel_shared_) {
+      caffe_gpu_add_scalar(this->blobs_[0]->count(), Dtype(dsum), slope_diff);
+    }
+  }
+  // Propagate to bottom
+  if (propagate_down[0]) {
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+    const Dtype* slope_data = this->blobs_[0]->gpu_data();
+    int div_factor = channel_shared_ ? channels : 1;
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    PReLUBackward<Dtype><<<CAFFE_GET_BLOCKS(count),
+        CAFFE_CUDA_NUM_THREADS>>>(
+        count, channels, dim, top_diff, bottom_data, bottom_diff, slope_data,
+        div_factor);
+    CUDA_POST_KERNEL_CHECK;
+  }
+}
+
+
+INSTANTIATE_LAYER_GPU_FUNCS(PReLULayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/reduction_layer.cpp b/src/caffe/layers/reduction_layer.cpp
new file mode 100644
index 0000000..8ae6329
--- /dev/null
+++ b/src/caffe/layers/reduction_layer.cpp
@@ -0,0 +1,132 @@
+#include <algorithm>
+#include <cfloat>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void ReductionLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  op_ = this->layer_param_.reduction_param().operation();
+}
+
+template <typename Dtype>
+void ReductionLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  axis_ = bottom[0]->CanonicalAxisIndex(
+      this->layer_param_.reduction_param().axis());
+  // In the output, we'll keep all axes up to the reduction axis, but
+  // throw away any after that.
+  // Note: currently reducing along non-tail axes is not supported; otherwise,
+  // we'd need to also copy any axes following an "end_axis".
+  vector<int> top_shape(bottom[0]->shape().begin(),
+                        bottom[0]->shape().begin() + axis_);
+  top[0]->Reshape(top_shape);
+  num_ = bottom[0]->count(0, axis_);
+  dim_ = bottom[0]->count(axis_);
+  CHECK_EQ(num_, top[0]->count());
+  if (op_ == ReductionParameter_ReductionOp_SUM ||
+      op_ == ReductionParameter_ReductionOp_MEAN) {
+    vector<int> sum_mult_shape(1, dim_);
+    sum_multiplier_.Reshape(sum_mult_shape);
+    caffe_set(dim_, Dtype(1), sum_multiplier_.mutable_cpu_data());
+  }
+  coeff_ = this->layer_param().reduction_param().coeff();
+  if (op_ == ReductionParameter_ReductionOp_MEAN) {
+    coeff_ /= dim_;
+  }
+}
+
+template <typename Dtype>
+void ReductionLayer<Dtype>::Forward_cpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  const Dtype* mult_data = NULL;
+  if (sum_multiplier_.count() > 0) {
+    mult_data = sum_multiplier_.cpu_data();
+  }
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  for (int i = 0; i < num_; ++i) {
+    switch (op_) {
+    case ReductionParameter_ReductionOp_SUM:
+    case ReductionParameter_ReductionOp_MEAN:
+      *top_data = caffe_cpu_dot(dim_, mult_data, bottom_data);
+      break;
+    case ReductionParameter_ReductionOp_ASUM:
+      *top_data = caffe_cpu_asum(dim_, bottom_data);
+      break;
+    case ReductionParameter_ReductionOp_SUMSQ:
+      *top_data = caffe_cpu_dot(dim_, bottom_data, bottom_data);
+      break;
+    default:
+      LOG(FATAL) << "Unknown reduction op: "
+          << ReductionParameter_ReductionOp_Name(op_);
+    }
+    bottom_data += dim_;
+    ++top_data;
+  }
+  if (coeff_ != Dtype(1)) {
+    // Reset the top_data pointer.
+    top_data = top[0]->mutable_cpu_data();
+    caffe_scal(num_, coeff_, top_data);
+  }
+}
+
+template <typename Dtype>
+void ReductionLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) { return; }
+  // Get bottom_data, if needed.
+  const Dtype* bottom_data = NULL;
+  switch (op_) {
+  // Operations that don't need bottom_data
+  case ReductionParameter_ReductionOp_SUM:
+  case ReductionParameter_ReductionOp_MEAN:
+    break;
+  // Operations that need bottom_data
+  case ReductionParameter_ReductionOp_ASUM:
+  case ReductionParameter_ReductionOp_SUMSQ:
+    bottom_data = bottom[0]->cpu_data();
+    break;
+  default:
+    LOG(FATAL) << "Unknown reduction op: "
+        << ReductionParameter_ReductionOp_Name(op_);
+  }
+  const Dtype* top_diff = top[0]->cpu_diff();
+  Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+  for (int i = 0; i < num_; ++i) {
+    const Dtype bottom_coeff = (*top_diff) * coeff_;
+    switch (op_) {
+    case ReductionParameter_ReductionOp_SUM:
+    case ReductionParameter_ReductionOp_MEAN:
+      caffe_set(dim_, bottom_coeff, bottom_diff);
+      break;
+    case ReductionParameter_ReductionOp_ASUM:
+      caffe_cpu_sign(dim_, bottom_data, bottom_diff);
+      caffe_scal(dim_, bottom_coeff, bottom_diff);
+      break;
+    case ReductionParameter_ReductionOp_SUMSQ:
+      caffe_cpu_scale(dim_, 2 * bottom_coeff, bottom_data, bottom_diff);
+      break;
+    default:
+      LOG(FATAL) << "Unknown reduction op: "
+          << ReductionParameter_ReductionOp_Name(op_);
+    }
+    bottom_data += dim_;
+    bottom_diff += dim_;
+    ++top_diff;
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(ReductionLayer);
+#endif
+
+INSTANTIATE_CLASS(ReductionLayer);
+REGISTER_LAYER_CLASS(Reduction);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/reduction_layer.cu b/src/caffe/layers/reduction_layer.cu
new file mode 100644
index 0000000..2dbd3bc
--- /dev/null
+++ b/src/caffe/layers/reduction_layer.cu
@@ -0,0 +1,93 @@
+#include <cfloat>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void ReductionLayer<Dtype>::Forward_gpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  const Dtype* mult_data = NULL;
+  if (sum_multiplier_.count() > 0) {
+    mult_data = sum_multiplier_.gpu_data();
+  }
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  for (int i = 0; i < num_; ++i) {
+    switch (op_) {
+    case ReductionParameter_ReductionOp_SUM:
+    case ReductionParameter_ReductionOp_MEAN:
+      caffe_gpu_dot(dim_, mult_data, bottom_data, top_data);
+      break;
+    case ReductionParameter_ReductionOp_ASUM:
+      caffe_gpu_asum(dim_, bottom_data, top_data);
+      break;
+    case ReductionParameter_ReductionOp_SUMSQ:
+      caffe_gpu_dot(dim_, bottom_data, bottom_data, top_data);
+      break;
+    default:
+      LOG(FATAL) << "Unknown reduction op: "
+          << ReductionParameter_ReductionOp_Name(op_);
+    }
+    bottom_data += dim_;
+    ++top_data;
+  }
+  if (coeff_ != Dtype(1)) {
+    // Reset the top_data pointer.
+    top_data = top[0]->mutable_gpu_data();
+    caffe_gpu_scal(num_, coeff_, top_data);
+  }
+}
+
+template <typename Dtype>
+void ReductionLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) { return; }
+  // Get bottom_data, if needed.
+  const Dtype* bottom_data = NULL;
+  switch (op_) {
+  // Operations that don't need bottom_data
+  case ReductionParameter_ReductionOp_SUM:
+  case ReductionParameter_ReductionOp_MEAN:
+    break;
+  // Operations that need bottom_data
+  case ReductionParameter_ReductionOp_ASUM:
+  case ReductionParameter_ReductionOp_SUMSQ:
+    bottom_data = bottom[0]->gpu_data();
+    break;
+  default:
+    LOG(FATAL) << "Unknown reduction op: "
+        << ReductionParameter_ReductionOp_Name(op_);
+  }
+  const Dtype* top_diff = top[0]->cpu_diff();
+  Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+  for (int i = 0; i < num_; ++i) {
+    const Dtype bottom_coeff = (*top_diff) * coeff_;
+    switch (op_) {
+    case ReductionParameter_ReductionOp_SUM:
+    case ReductionParameter_ReductionOp_MEAN:
+      caffe_gpu_set(dim_, bottom_coeff, bottom_diff);
+      break;
+    case ReductionParameter_ReductionOp_ASUM:
+      caffe_gpu_sign(dim_, bottom_data, bottom_diff);
+      caffe_gpu_scal(dim_, bottom_coeff, bottom_diff);
+      break;
+    case ReductionParameter_ReductionOp_SUMSQ:
+      caffe_gpu_scale(dim_, 2 * bottom_coeff, bottom_data, bottom_diff);
+      break;
+    default:
+      LOG(FATAL) << "Unknown reduction op: "
+          << ReductionParameter_ReductionOp_Name(op_);
+    }
+    bottom_data += dim_;
+    bottom_diff += dim_;
+    ++top_diff;
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(ReductionLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/relu_layer.cpp b/src/caffe/layers/relu_layer.cpp
new file mode 100644
index 0000000..cc00319
--- /dev/null
+++ b/src/caffe/layers/relu_layer.cpp
@@ -0,0 +1,46 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void ReLULayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  const int count = bottom[0]->count();
+  Dtype negative_slope = this->layer_param_.relu_param().negative_slope();
+  for (int i = 0; i < count; ++i) {
+    top_data[i] = std::max(bottom_data[i], Dtype(0))
+        + negative_slope * std::min(bottom_data[i], Dtype(0));
+  }
+}
+
+template <typename Dtype>
+void ReLULayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    const Dtype* bottom_data = bottom[0]->cpu_data();
+    const Dtype* top_diff = top[0]->cpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    const int count = bottom[0]->count();
+    Dtype negative_slope = this->layer_param_.relu_param().negative_slope();
+    for (int i = 0; i < count; ++i) {
+      bottom_diff[i] = top_diff[i] * ((bottom_data[i] > 0)
+          + negative_slope * (bottom_data[i] <= 0));
+    }
+  }
+}
+
+
+#ifdef CPU_ONLY
+STUB_GPU(ReLULayer);
+#endif
+
+INSTANTIATE_CLASS(ReLULayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/relu_layer.cu b/src/caffe/layers/relu_layer.cu
new file mode 100644
index 0000000..b8924c8
--- /dev/null
+++ b/src/caffe/layers/relu_layer.cu
@@ -0,0 +1,65 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+__global__ void ReLUForward(const int n, const Dtype* in, Dtype* out,
+    Dtype negative_slope) {
+  CUDA_KERNEL_LOOP(index, n) {
+    out[index] = in[index] > 0 ? in[index] : in[index] * negative_slope;
+  }
+}
+
+template <typename Dtype>
+void ReLULayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  const int count = bottom[0]->count();
+  Dtype negative_slope = this->layer_param_.relu_param().negative_slope();
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  ReLUForward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+      count, bottom_data, top_data, negative_slope);
+  CUDA_POST_KERNEL_CHECK;
+  // << " count: " << count << " bottom_data: "
+  //     << (unsigned long)bottom_data
+  //     << " top_data: " << (unsigned long)top_data
+  //     << " blocks: " << CAFFE_GET_BLOCKS(count)
+  //     << " threads: " << CAFFE_CUDA_NUM_THREADS;
+}
+
+template <typename Dtype>
+__global__ void ReLUBackward(const int n, const Dtype* in_diff,
+    const Dtype* in_data, Dtype* out_diff, Dtype negative_slope) {
+  CUDA_KERNEL_LOOP(index, n) {
+    out_diff[index] = in_diff[index] * ((in_data[index] > 0)
+        + (in_data[index] <= 0) * negative_slope);
+  }
+}
+
+template <typename Dtype>
+void ReLULayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    const Dtype* bottom_data = bottom[0]->gpu_data();
+    const Dtype* top_diff = top[0]->gpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+    const int count = bottom[0]->count();
+    Dtype negative_slope = this->layer_param_.relu_param().negative_slope();
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    ReLUBackward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+        count, top_diff, bottom_data, bottom_diff, negative_slope);
+    CUDA_POST_KERNEL_CHECK;
+  }
+}
+
+
+INSTANTIATE_LAYER_GPU_FUNCS(ReLULayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/reshape_layer.cpp b/src/caffe/layers/reshape_layer.cpp
new file mode 100644
index 0000000..ffe970f
--- /dev/null
+++ b/src/caffe/layers/reshape_layer.cpp
@@ -0,0 +1,95 @@
+#include <vector>
+
+#include "caffe/common_layers.hpp"
+#include "caffe/layer.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void ReshapeLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  inferred_axis_ = -1;
+  copy_axes_.clear();
+  const BlobShape& top_blob_shape = this->layer_param_.reshape_param().shape();
+  const int top_num_axes = top_blob_shape.dim_size();
+  constant_count_ = 1;
+  for (int i = 0; i < top_num_axes; ++i) {
+    const int top_dim = top_blob_shape.dim(i);
+    if (top_dim == 0) {
+      copy_axes_.push_back(i);
+    } else if (top_dim == -1) {
+      CHECK_EQ(inferred_axis_, -1) << "new shape contains multiple "
+          << "-1 dims; at most a single (1) value of -1 may be specified";
+      inferred_axis_ = i;
+    } else {
+      constant_count_ *= top_dim;
+    }
+  }
+}
+
+template <typename Dtype>
+void ReshapeLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const int input_start_axis = this->layer_param_.reshape_param().axis();
+  const int start_axis = (input_start_axis >= 0) ? input_start_axis :
+      bottom[0]->num_axes() + input_start_axis + 1;
+  CHECK_GE(start_axis, 0) << "axis " << input_start_axis << " out of range";
+  CHECK_LE(start_axis, bottom[0]->num_axes()) << "axis " << input_start_axis
+      << " out of range for " << bottom[0]->num_axes() << "-D input blob";
+  const int num_axes = this->layer_param_.reshape_param().num_axes();
+  CHECK_GE(num_axes, -1) << "num_axes must be >= 0, or -1 for all";
+  const int end_axis =
+      (num_axes == -1) ? bottom[0]->num_axes() : (start_axis + num_axes);
+  CHECK_LE(end_axis, bottom[0]->num_axes())
+      << "end_axis = axis + num_axes is out of range";
+  const int num_axes_replaced = end_axis - start_axis;
+  const int num_axes_retained = bottom[0]->num_axes() - num_axes_replaced;
+  const BlobShape& top_blob_shape = this->layer_param_.reshape_param().shape();
+  const int num_new_axes = top_blob_shape.dim_size();
+  vector<int> top_shape(num_axes_retained + num_new_axes);
+  int top_shape_index = 0;
+  for (int i = 0; i < start_axis; ++i) {
+    top_shape[top_shape_index++] = bottom[0]->shape(i);
+  }
+  for (int i = 0; i < num_new_axes; ++i) {
+    top_shape[top_shape_index++] = top_blob_shape.dim(i);
+  }
+  for (int i = end_axis; i < bottom[0]->num_axes(); ++i) {
+    top_shape[top_shape_index++] = bottom[0]->shape(i);
+  }
+  CHECK_EQ(top_shape_index, top_shape.size());
+  for (int i = 0; i < copy_axes_.size(); ++i) {
+    const int copy_axis_index = copy_axes_[i];
+    CHECK_GT(bottom[0]->num_axes(), start_axis + copy_axis_index)
+        << "new shape contains a 0, but there was no corresponding bottom axis "
+        << "to copy";
+    top_shape[start_axis + copy_axis_index] =
+        bottom[0]->shape(start_axis + copy_axis_index);
+  }
+  if (inferred_axis_ >= 0) {
+    // A -1 dim was specified; infer the correct dimension by computing the
+    // product of the other dimensions.
+    int explicit_count = constant_count_;
+    explicit_count *= bottom[0]->count(0, start_axis);
+    explicit_count *= bottom[0]->count(end_axis);
+    for (int i = 0; i < copy_axes_.size(); ++i) {
+      const int copy_axis_index = copy_axes_[i];
+      explicit_count *= top_shape[start_axis + copy_axis_index];
+    }
+    CHECK_EQ(0, bottom[0]->count() % explicit_count) << "bottom count ("
+        << bottom[0]->count() << ") must be divisible by the product of "
+        << "the specified dimensions (" << explicit_count << ")";
+    const int inferred_dim = bottom[0]->count() / explicit_count;
+    top_shape[start_axis + inferred_axis_] = inferred_dim;
+  }
+  top[0]->Reshape(top_shape);
+  CHECK_EQ(top[0]->count(), bottom[0]->count())
+      << "output count must match input count";
+  top[0]->ShareData(*bottom[0]);
+  top[0]->ShareDiff(*bottom[0]);
+}
+
+INSTANTIATE_CLASS(ReshapeLayer);
+REGISTER_LAYER_CLASS(Reshape);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cpp b/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cpp
new file mode 100644
index 0000000..cc236fe
--- /dev/null
+++ b/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cpp
@@ -0,0 +1,80 @@
+#include <algorithm>
+#include <cfloat>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void SigmoidCrossEntropyLossLayer<Dtype>::LayerSetUp(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  LossLayer<Dtype>::LayerSetUp(bottom, top);
+  sigmoid_bottom_vec_.clear();
+  sigmoid_bottom_vec_.push_back(bottom[0]);
+  sigmoid_top_vec_.clear();
+  sigmoid_top_vec_.push_back(sigmoid_output_.get());
+  sigmoid_layer_->SetUp(sigmoid_bottom_vec_, sigmoid_top_vec_);
+}
+
+template <typename Dtype>
+void SigmoidCrossEntropyLossLayer<Dtype>::Reshape(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  LossLayer<Dtype>::Reshape(bottom, top);
+  CHECK_EQ(bottom[0]->count(), bottom[1]->count()) <<
+      "SIGMOID_CROSS_ENTROPY_LOSS layer inputs must have the same count.";
+  sigmoid_layer_->Reshape(sigmoid_bottom_vec_, sigmoid_top_vec_);
+}
+
+template <typename Dtype>
+void SigmoidCrossEntropyLossLayer<Dtype>::Forward_cpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  // The forward pass computes the sigmoid outputs.
+  sigmoid_bottom_vec_[0] = bottom[0];
+  sigmoid_layer_->Forward(sigmoid_bottom_vec_, sigmoid_top_vec_);
+  // Compute the loss (negative log likelihood)
+  const int count = bottom[0]->count();
+  const int num = bottom[0]->num();
+  // Stable version of loss computation from input data
+  const Dtype* input_data = bottom[0]->cpu_data();
+  const Dtype* target = bottom[1]->cpu_data();
+  Dtype loss = 0;
+  for (int i = 0; i < count; ++i) {
+    loss -= input_data[i] * (target[i] - (input_data[i] >= 0)) -
+        log(1 + exp(input_data[i] - 2 * input_data[i] * (input_data[i] >= 0)));
+  }
+  top[0]->mutable_cpu_data()[0] = loss / num;
+}
+
+template <typename Dtype>
+void SigmoidCrossEntropyLossLayer<Dtype>::Backward_cpu(
+    const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[1]) {
+    LOG(FATAL) << this->type()
+               << " Layer cannot backpropagate to label inputs.";
+  }
+  if (propagate_down[0]) {
+    // First, compute the diff
+    const int count = bottom[0]->count();
+    const int num = bottom[0]->num();
+    const Dtype* sigmoid_output_data = sigmoid_output_->cpu_data();
+    const Dtype* target = bottom[1]->cpu_data();
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    caffe_sub(count, sigmoid_output_data, target, bottom_diff);
+    // Scale down gradient
+    const Dtype loss_weight = top[0]->cpu_diff()[0];
+    caffe_scal(count, loss_weight / num, bottom_diff);
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU_BACKWARD(SigmoidCrossEntropyLossLayer, Backward);
+#endif
+
+INSTANTIATE_CLASS(SigmoidCrossEntropyLossLayer);
+REGISTER_LAYER_CLASS(SigmoidCrossEntropyLoss);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cu b/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cu
new file mode 100644
index 0000000..547fa80
--- /dev/null
+++ b/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cu
@@ -0,0 +1,37 @@
+#include <algorithm>
+#include <cfloat>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void SigmoidCrossEntropyLossLayer<Dtype>::Backward_gpu(
+    const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[1]) {
+    LOG(FATAL) << this->type()
+               << " Layer cannot backpropagate to label inputs.";
+  }
+  if (propagate_down[0]) {
+    // First, compute the diff
+    const int count = bottom[0]->count();
+    const int num = bottom[0]->num();
+    const Dtype* sigmoid_output_data = sigmoid_output_->gpu_data();
+    const Dtype* target = bottom[1]->gpu_data();
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+    caffe_copy(count, sigmoid_output_data, bottom_diff);
+    caffe_gpu_axpy(count, Dtype(-1), target, bottom_diff);
+    // Scale down gradient
+    const Dtype loss_weight = top[0]->cpu_diff()[0];
+    caffe_gpu_scal(count, loss_weight / num, bottom_diff);
+  }
+}
+
+INSTANTIATE_LAYER_GPU_BACKWARD(SigmoidCrossEntropyLossLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/sigmoid_layer.cpp b/src/caffe/layers/sigmoid_layer.cpp
new file mode 100644
index 0000000..48c3849
--- /dev/null
+++ b/src/caffe/layers/sigmoid_layer.cpp
@@ -0,0 +1,49 @@
+#include <algorithm>
+#include <cmath>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+inline Dtype sigmoid(Dtype x) {
+  return 1. / (1. + exp(-x));
+}
+
+template <typename Dtype>
+void SigmoidLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  const int count = bottom[0]->count();
+  for (int i = 0; i < count; ++i) {
+    top_data[i] = sigmoid(bottom_data[i]);
+  }
+}
+
+template <typename Dtype>
+void SigmoidLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    const Dtype* top_data = top[0]->cpu_data();
+    const Dtype* top_diff = top[0]->cpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    const int count = bottom[0]->count();
+    for (int i = 0; i < count; ++i) {
+      const Dtype sigmoid_x = top_data[i];
+      bottom_diff[i] = top_diff[i] * sigmoid_x * (1. - sigmoid_x);
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(SigmoidLayer);
+#endif
+
+INSTANTIATE_CLASS(SigmoidLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/sigmoid_layer.cu b/src/caffe/layers/sigmoid_layer.cu
new file mode 100644
index 0000000..e1af065
--- /dev/null
+++ b/src/caffe/layers/sigmoid_layer.cu
@@ -0,0 +1,62 @@
+#include <algorithm>
+#include <cmath>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+__global__ void SigmoidForward(const int n, const Dtype* in, Dtype* out) {
+  CUDA_KERNEL_LOOP(index, n) {
+    out[index] = 1. / (1. + exp(-in[index]));
+  }
+}
+
+template <typename Dtype>
+void SigmoidLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  const int count = bottom[0]->count();
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  SigmoidForward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+      count, bottom_data, top_data);
+  CUDA_POST_KERNEL_CHECK;
+  // << " count: " << count << " bottom_data: "
+  //     << (unsigned long)bottom_data
+  //     << " top_data: " << (unsigned long)top_data
+  //     << " blocks: " << CAFFE_GET_BLOCKS(count)
+  //     << " threads: " << CAFFE_CUDA_NUM_THREADS;
+}
+
+template <typename Dtype>
+__global__ void SigmoidBackward(const int n, const Dtype* in_diff,
+    const Dtype* out_data, Dtype* out_diff) {
+  CUDA_KERNEL_LOOP(index, n) {
+    const Dtype sigmoid_x = out_data[index];
+    out_diff[index] = in_diff[index] * sigmoid_x * (1 - sigmoid_x);
+  }
+}
+
+template <typename Dtype>
+void SigmoidLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    const Dtype* top_data = top[0]->gpu_data();
+    const Dtype* top_diff = top[0]->gpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+    const int count = bottom[0]->count();
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    SigmoidBackward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+        count, top_diff, top_data, bottom_diff);
+    CUDA_POST_KERNEL_CHECK;
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(SigmoidLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/silence_layer.cpp b/src/caffe/layers/silence_layer.cpp
new file mode 100644
index 0000000..4abf9ef
--- /dev/null
+++ b/src/caffe/layers/silence_layer.cpp
@@ -0,0 +1,27 @@
+#include <vector>
+
+#include "caffe/common_layers.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void SilenceLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  for (int i = 0; i < bottom.size(); ++i) {
+    if (propagate_down[i]) {
+      caffe_set(bottom[i]->count(), Dtype(0),
+                bottom[i]->mutable_cpu_data());
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(SilenceLayer);
+#endif
+
+INSTANTIATE_CLASS(SilenceLayer);
+REGISTER_LAYER_CLASS(Silence);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/silence_layer.cu b/src/caffe/layers/silence_layer.cu
new file mode 100644
index 0000000..8d044ee
--- /dev/null
+++ b/src/caffe/layers/silence_layer.cu
@@ -0,0 +1,28 @@
+#include <vector>
+
+#include "caffe/common_layers.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void SilenceLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  // Do nothing.
+}
+
+template <typename Dtype>
+void SilenceLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  for (int i = 0; i < bottom.size(); ++i) {
+    if (propagate_down[i]) {
+      caffe_gpu_set(bottom[i]->count(), Dtype(0),
+                    bottom[i]->mutable_gpu_data());
+    }
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(SilenceLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/slice_layer.cpp b/src/caffe/layers/slice_layer.cpp
new file mode 100644
index 0000000..e4418c9
--- /dev/null
+++ b/src/caffe/layers/slice_layer.cpp
@@ -0,0 +1,120 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void SliceLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const SliceParameter& slice_param = this->layer_param_.slice_param();
+  CHECK(!(slice_param.has_axis() && slice_param.has_slice_dim()))
+      << "Either axis or slice_dim should be specified; not both.";
+  slice_point_.clear();
+  std::copy(slice_param.slice_point().begin(),
+      slice_param.slice_point().end(),
+      std::back_inserter(slice_point_));
+}
+
+template <typename Dtype>
+void SliceLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  const int num_axes = bottom[0]->num_axes();
+  const SliceParameter& slice_param = this->layer_param_.slice_param();
+  if (slice_param.has_slice_dim()) {
+    slice_axis_ = static_cast<int>(slice_param.slice_dim());
+    // Don't allow negative indexing for slice_dim, a uint32 -- almost
+    // certainly unintended.
+    CHECK_GE(slice_axis_, 0) << "casting slice_dim from uint32 to int32 "
+        << "produced negative result; slice_dim must satisfy "
+        << "0 <= slice_dim < " << kMaxBlobAxes;
+    CHECK_LT(slice_axis_, num_axes) << "slice_dim out of range.";
+  } else {
+    slice_axis_ = bottom[0]->CanonicalAxisIndex(slice_param.axis());
+  }
+  vector<int> top_shape = bottom[0]->shape();
+  const int bottom_slice_axis = bottom[0]->shape(slice_axis_);
+  num_slices_ = bottom[0]->count(0, slice_axis_);
+  slice_size_ = bottom[0]->count(slice_axis_ + 1);
+  int count = 0;
+  if (slice_point_.size() != 0) {
+    CHECK_EQ(slice_point_.size(), top.size() - 1);
+    CHECK_LE(top.size(), bottom_slice_axis);
+    int prev = 0;
+    vector<int> slices;
+    for (int i = 0; i < slice_point_.size(); ++i) {
+      CHECK_GT(slice_point_[i], prev);
+      slices.push_back(slice_point_[i] - prev);
+      prev = slice_point_[i];
+    }
+    slices.push_back(bottom_slice_axis - prev);
+    for (int i = 0; i < top.size(); ++i) {
+      top_shape[slice_axis_] = slices[i];
+      top[i]->Reshape(top_shape);
+      count += top[i]->count();
+    }
+  } else {
+    CHECK_EQ(bottom_slice_axis % top.size(), 0)
+        << "Number of top blobs (" << top.size() << ") should evenly "
+        << "divide input slice axis (" << bottom_slice_axis << ")";
+    top_shape[slice_axis_] = bottom_slice_axis / top.size();
+    for (int i = 0; i < top.size(); ++i) {
+      top[i]->Reshape(top_shape);
+      count += top[i]->count();
+    }
+  }
+  CHECK_EQ(count, bottom[0]->count());
+}
+
+template <typename Dtype>
+void SliceLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  int offset_slice_axis = 0;
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  const int bottom_slice_axis = bottom[0]->shape(slice_axis_);
+  for (int i = 0; i < top.size(); ++i) {
+    Dtype* top_data = top[i]->mutable_cpu_data();
+    const int top_slice_axis = top[i]->shape(slice_axis_);
+    for (int n = 0; n < num_slices_; ++n) {
+      const int top_offset = n * top_slice_axis * slice_size_;
+      const int bottom_offset =
+          (n * bottom_slice_axis + offset_slice_axis) * slice_size_;
+      caffe_copy(top_slice_axis * slice_size_,
+          bottom_data + bottom_offset, top_data + top_offset);
+    }
+    offset_slice_axis += top_slice_axis;
+  }
+}
+
+template <typename Dtype>
+void SliceLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) { return; }
+  int offset_slice_axis = 0;
+  Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+  const int bottom_slice_axis = bottom[0]->shape(slice_axis_);
+  for (int i = 0; i < top.size(); ++i) {
+    const Dtype* top_diff = top[i]->cpu_diff();
+    const int top_slice_axis = top[i]->shape(slice_axis_);
+    for (int n = 0; n < num_slices_; ++n) {
+      const int top_offset = n * top_slice_axis * slice_size_;
+      const int bottom_offset =
+          (n * bottom_slice_axis + offset_slice_axis) * slice_size_;
+      caffe_copy(top_slice_axis * slice_size_,
+          top_diff + top_offset, bottom_diff + bottom_offset);
+    }
+    offset_slice_axis += top_slice_axis;
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(SliceLayer);
+#endif
+
+INSTANTIATE_CLASS(SliceLayer);
+REGISTER_LAYER_CLASS(Slice);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/slice_layer.cu b/src/caffe/layers/slice_layer.cu
new file mode 100644
index 0000000..796841d
--- /dev/null
+++ b/src/caffe/layers/slice_layer.cu
@@ -0,0 +1,71 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+__global__ void Slice(const int nthreads, const Dtype* in_data,
+    const bool forward, const int num_slices, const int slice_size,
+    const int bottom_slice_axis, const int top_slice_axis,
+    const int offset_slice_axis, Dtype* out_data) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    const int total_slice_size = slice_size * top_slice_axis;
+    const int slice_num = index / total_slice_size;
+    const int slice_index = index % total_slice_size;
+    const int bottom_index = slice_index +
+        (slice_num * bottom_slice_axis + offset_slice_axis) * slice_size;
+    if (forward) {
+      out_data[index] = in_data[bottom_index];
+    } else {
+      out_data[bottom_index] = in_data[index];
+    }
+  }
+}
+
+template <typename Dtype>
+void SliceLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  int offset_slice_axis = 0;
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  const int bottom_slice_axis = bottom[0]->shape(slice_axis_);
+  const bool kForward = true;
+  for (int i = 0; i < top.size(); ++i) {
+    Dtype* top_data = top[i]->mutable_gpu_data();
+    const int top_slice_axis = top[i]->shape(slice_axis_);
+    const int top_slice_size = top_slice_axis * slice_size_;
+    const int nthreads = top_slice_size * num_slices_;
+    Slice<Dtype>  // NOLINT_NEXT_LINE(whitespace/operators)
+        <<<CAFFE_GET_BLOCKS(nthreads), CAFFE_CUDA_NUM_THREADS>>>(
+        nthreads, bottom_data, kForward, num_slices_, slice_size_,
+        bottom_slice_axis, top_slice_axis, offset_slice_axis, top_data);
+    offset_slice_axis += top_slice_axis;
+  }
+}
+
+template <typename Dtype>
+void SliceLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) { return; }
+  int offset_slice_axis = 0;
+  Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+  const int bottom_slice_axis = bottom[0]->shape(slice_axis_);
+  const bool kForward = false;
+  for (int i = 0; i < top.size(); ++i) {
+    const Dtype* top_diff = top[i]->gpu_diff();
+    const int top_slice_axis = top[i]->shape(slice_axis_);
+    const int top_slice_size = top_slice_axis * slice_size_;
+    const int nthreads = top_slice_size * num_slices_;
+    Slice<Dtype>  // NOLINT_NEXT_LINE(whitespace/operators)
+        <<<CAFFE_GET_BLOCKS(nthreads), CAFFE_CUDA_NUM_THREADS>>>(
+        nthreads, top_diff, kForward, num_slices_, slice_size_,
+        bottom_slice_axis, top_slice_axis, offset_slice_axis, bottom_diff);
+    offset_slice_axis += top_slice_axis;
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(SliceLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/softmax_layer.cpp b/src/caffe/layers/softmax_layer.cpp
new file mode 100644
index 0000000..04712c9
--- /dev/null
+++ b/src/caffe/layers/softmax_layer.cpp
@@ -0,0 +1,96 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void SoftmaxLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  softmax_axis_ =
+      bottom[0]->CanonicalAxisIndex(this->layer_param_.softmax_param().axis());
+  top[0]->ReshapeLike(*bottom[0]);
+  vector<int> mult_dims(1, bottom[0]->shape(softmax_axis_));
+  sum_multiplier_.Reshape(mult_dims);
+  Dtype* multiplier_data = sum_multiplier_.mutable_cpu_data();
+  caffe_set(sum_multiplier_.count(), Dtype(1), multiplier_data);
+  outer_num_ = bottom[0]->count(0, softmax_axis_);
+  inner_num_ = bottom[0]->count(softmax_axis_ + 1);
+  vector<int> scale_dims = bottom[0]->shape();
+  scale_dims[softmax_axis_] = 1;
+  scale_.Reshape(scale_dims);
+}
+
+template <typename Dtype>
+void SoftmaxLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  Dtype* scale_data = scale_.mutable_cpu_data();
+  int channels = bottom[0]->shape(softmax_axis_);
+  int dim = bottom[0]->count() / outer_num_;
+  caffe_copy(bottom[0]->count(), bottom_data, top_data);
+  // We need to subtract the max to avoid numerical issues, compute the exp,
+  // and then normalize.
+  for (int i = 0; i < outer_num_; ++i) {
+    // initialize scale_data to the first plane
+    caffe_copy(inner_num_, bottom_data + i * dim, scale_data);
+    for (int j = 0; j < channels; j++) {
+      for (int k = 0; k < inner_num_; k++) {
+        scale_data[k] = std::max(scale_data[k],
+            bottom_data[i * dim + j * inner_num_ + k]);
+      }
+    }
+    // subtraction
+    caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, channels, inner_num_,
+        1, -1., sum_multiplier_.cpu_data(), scale_data, 1., top_data);
+    // exponentiation
+    caffe_exp<Dtype>(dim, top_data, top_data);
+    // sum after exp
+    caffe_cpu_gemv<Dtype>(CblasTrans, channels, inner_num_, 1.,
+        top_data, sum_multiplier_.cpu_data(), 0., scale_data);
+    // division
+    for (int j = 0; j < channels; j++) {
+      caffe_div(inner_num_, top_data, scale_data, top_data);
+      top_data += inner_num_;
+    }
+  }
+}
+
+template <typename Dtype>
+void SoftmaxLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* top_diff = top[0]->cpu_diff();
+  const Dtype* top_data = top[0]->cpu_data();
+  Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+  Dtype* scale_data = scale_.mutable_cpu_data();
+  int channels = top[0]->shape(softmax_axis_);
+  int dim = top[0]->count() / outer_num_;
+  caffe_copy(top[0]->count(), top_diff, bottom_diff);
+  for (int i = 0; i < outer_num_; ++i) {
+    // compute dot(top_diff, top_data) and subtract them from the bottom diff
+    for (int k = 0; k < inner_num_; ++k) {
+      scale_data[k] = caffe_cpu_strided_dot<Dtype>(channels,
+          bottom_diff + i * dim + k, inner_num_,
+          top_data + i * dim + k, inner_num_);
+    }
+    // subtraction
+    caffe_cpu_gemm<Dtype>(CblasNoTrans, CblasNoTrans, channels, inner_num_, 1,
+        -1., sum_multiplier_.cpu_data(), scale_data, 1., bottom_diff + i * dim);
+  }
+  // elementwise multiplication
+  caffe_mul(top[0]->count(), bottom_diff, top_data, bottom_diff);
+}
+
+
+#ifdef CPU_ONLY
+STUB_GPU(SoftmaxLayer);
+#endif
+
+INSTANTIATE_CLASS(SoftmaxLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/softmax_layer.cu b/src/caffe/layers/softmax_layer.cu
new file mode 100644
index 0000000..1f9c3a4
--- /dev/null
+++ b/src/caffe/layers/softmax_layer.cu
@@ -0,0 +1,149 @@
+#include <algorithm>
+#include <cfloat>
+#include <vector>
+
+#include "thrust/device_vector.h"
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+__global__ void kernel_channel_max(const int num, const int channels,
+    const int spatial_dim, const Dtype* data, Dtype* out) {
+  CUDA_KERNEL_LOOP(index, num * spatial_dim) {
+    int n = index / spatial_dim;
+    int s = index % spatial_dim;
+    Dtype maxval = -FLT_MAX;
+    for (int c = 0; c < channels; ++c) {
+      maxval = max(data[(n * channels + c) * spatial_dim + s], maxval);
+    }
+    out[index] = maxval;
+  }
+}
+
+template <typename Dtype>
+__global__ void kernel_channel_subtract(const int count,
+    const int num, const int channels,
+    const int spatial_dim, const Dtype* channel_max, Dtype* data) {
+  CUDA_KERNEL_LOOP(index, count) {
+    int n = index / channels / spatial_dim;
+    int s = index % spatial_dim;
+    data[index] -= channel_max[n * spatial_dim + s];
+  }
+}
+
+template <typename Dtype>
+__global__ void kernel_exp(const int count, const Dtype* data, Dtype* out) {
+  CUDA_KERNEL_LOOP(index, count) {
+    out[index] = exp(data[index]);
+  }
+}
+
+template <typename Dtype>
+__global__ void kernel_channel_sum(const int num, const int channels,
+    const int spatial_dim, const Dtype* data, Dtype* channel_sum) {
+  CUDA_KERNEL_LOOP(index, num * spatial_dim) {
+    int n = index / spatial_dim;
+    int s = index % spatial_dim;
+    Dtype sum = 0;
+    for (int c = 0; c < channels; ++c) {
+      sum += data[(n * channels + c) * spatial_dim + s];
+    }
+    channel_sum[index] = sum;
+  }
+}
+
+template <typename Dtype>
+__global__ void kernel_channel_div(const int count,
+    const int num, const int channels,
+    const int spatial_dim, const Dtype* channel_sum, Dtype* data) {
+  CUDA_KERNEL_LOOP(index, count) {
+    int n = index / channels / spatial_dim;
+    int s = index % spatial_dim;
+    data[index] /= channel_sum[n * spatial_dim + s];
+  }
+}
+
+template <typename Dtype>
+__global__ void kernel_channel_dot(const int num, const int channels,
+    const int spatial_dim, const Dtype* data_1, const Dtype* data_2,
+    Dtype* channel_dot) {
+  CUDA_KERNEL_LOOP(index, num * spatial_dim) {
+    int n = index / spatial_dim;
+    int s = index % spatial_dim;
+    Dtype dot = 0;
+    for (int c = 0; c < channels; ++c) {
+      dot += (data_1[(n * channels + c) * spatial_dim + s]
+          * data_2[(n * channels + c) * spatial_dim + s]);
+    }
+    channel_dot[index] = dot;
+  }
+}
+
+template <typename Dtype>
+void SoftmaxLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  Dtype* scale_data = scale_.mutable_gpu_data();
+  int count = bottom[0]->count();
+  int channels = top[0]->shape(softmax_axis_);
+  caffe_copy(count, bottom_data, top_data);
+  // We need to subtract the max to avoid numerical issues, compute the exp,
+  // and then normalize.
+  // compute max
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  kernel_channel_max<Dtype><<<CAFFE_GET_BLOCKS(outer_num_ * inner_num_),
+      CAFFE_CUDA_NUM_THREADS>>>(outer_num_, channels, inner_num_, top_data,
+      scale_data);
+  // subtract
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  kernel_channel_subtract<Dtype><<<CAFFE_GET_BLOCKS(count),
+      CAFFE_CUDA_NUM_THREADS>>>(count, outer_num_, channels, inner_num_,
+      scale_data, top_data);
+  // exponentiate
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  kernel_exp<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+      count, top_data, top_data);
+  // sum after exp
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  kernel_channel_sum<Dtype><<<CAFFE_GET_BLOCKS(outer_num_ * inner_num_),
+      CAFFE_CUDA_NUM_THREADS>>>(outer_num_, channels, inner_num_, top_data,
+      scale_data);
+  // divide
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  kernel_channel_div<Dtype><<<CAFFE_GET_BLOCKS(count),
+      CAFFE_CUDA_NUM_THREADS>>>(count, outer_num_, channels, inner_num_,
+      scale_data, top_data);
+}
+
+template <typename Dtype>
+void SoftmaxLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  const Dtype* top_diff = top[0]->gpu_diff();
+  const Dtype* top_data = top[0]->gpu_data();
+  Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+  Dtype* scale_data = scale_.mutable_gpu_data();
+  int count = top[0]->count();
+  int channels = top[0]->shape(softmax_axis_);
+  caffe_copy(count, top_diff, bottom_diff);
+  // Compute inner1d(top_diff, top_data) and subtract them from the bottom diff.
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  kernel_channel_dot<Dtype><<<CAFFE_GET_BLOCKS(outer_num_ * inner_num_),
+      CAFFE_CUDA_NUM_THREADS>>>(outer_num_, channels, inner_num_,
+      top_diff, top_data, scale_data);
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  kernel_channel_subtract<Dtype><<<CAFFE_GET_BLOCKS(count),
+      CAFFE_CUDA_NUM_THREADS>>>(count, outer_num_, channels, inner_num_,
+      scale_data, bottom_diff);
+  // elementwise multiplication
+  caffe_gpu_mul<Dtype>(top[0]->count(), bottom_diff, top_data, bottom_diff);
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(SoftmaxLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/softmax_loss_layer.cpp b/src/caffe/layers/softmax_loss_layer.cpp
new file mode 100644
index 0000000..ba312f6
--- /dev/null
+++ b/src/caffe/layers/softmax_loss_layer.cpp
@@ -0,0 +1,130 @@
+#include <algorithm>
+#include <cfloat>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/layer_factory.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void SoftmaxWithLossLayer<Dtype>::LayerSetUp(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  LossLayer<Dtype>::LayerSetUp(bottom, top);
+  LayerParameter softmax_param(this->layer_param_);
+  softmax_param.set_type("Softmax");
+  softmax_layer_ = LayerRegistry<Dtype>::CreateLayer(softmax_param);
+  softmax_bottom_vec_.clear();
+  softmax_bottom_vec_.push_back(bottom[0]);
+  softmax_top_vec_.clear();
+  softmax_top_vec_.push_back(&prob_);
+  softmax_layer_->SetUp(softmax_bottom_vec_, softmax_top_vec_);
+
+  has_ignore_label_ =
+    this->layer_param_.loss_param().has_ignore_label();
+  if (has_ignore_label_) {
+    ignore_label_ = this->layer_param_.loss_param().ignore_label();
+  }
+  normalize_ = this->layer_param_.loss_param().normalize();
+}
+
+template <typename Dtype>
+void SoftmaxWithLossLayer<Dtype>::Reshape(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  LossLayer<Dtype>::Reshape(bottom, top);
+  softmax_layer_->Reshape(softmax_bottom_vec_, softmax_top_vec_);
+  softmax_axis_ =
+      bottom[0]->CanonicalAxisIndex(this->layer_param_.softmax_param().axis());
+  outer_num_ = bottom[0]->count(0, softmax_axis_);
+  inner_num_ = bottom[0]->count(softmax_axis_ + 1);
+  CHECK_EQ(outer_num_ * inner_num_, bottom[1]->count())
+      << "Number of labels must match number of predictions; "
+      << "e.g., if softmax axis == 1 and prediction shape is (N, C, H, W), "
+      << "label count (number of labels) must be N*H*W, "
+      << "with integer values in {0, 1, ..., C-1}.";
+  if (top.size() >= 2) {
+    // softmax output
+    top[1]->ReshapeLike(*bottom[0]);
+  }
+}
+
+template <typename Dtype>
+void SoftmaxWithLossLayer<Dtype>::Forward_cpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  // The forward pass computes the softmax prob values.
+  softmax_layer_->Forward(softmax_bottom_vec_, softmax_top_vec_);
+  const Dtype* prob_data = prob_.cpu_data();
+  const Dtype* label = bottom[1]->cpu_data();
+  int dim = prob_.count() / outer_num_;
+  int count = 0;
+  Dtype loss = 0;
+  for (int i = 0; i < outer_num_; ++i) {
+    for (int j = 0; j < inner_num_; j++) {
+      const int label_value = static_cast<int>(label[i * inner_num_ + j]);
+      if (has_ignore_label_ && label_value == ignore_label_) {
+        continue;
+      }
+      DCHECK_GE(label_value, 0);
+      DCHECK_LT(label_value, prob_.shape(softmax_axis_));
+      loss -= log(std::max(prob_data[i * dim + label_value * inner_num_ + j],
+                           Dtype(FLT_MIN)));
+      ++count;
+    }
+  }
+  if (normalize_) {
+    top[0]->mutable_cpu_data()[0] = loss / count;
+  } else {
+    top[0]->mutable_cpu_data()[0] = loss / outer_num_;
+  }
+  if (top.size() == 2) {
+    top[1]->ShareData(prob_);
+  }
+}
+
+template <typename Dtype>
+void SoftmaxWithLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[1]) {
+    LOG(FATAL) << this->type()
+               << " Layer cannot backpropagate to label inputs.";
+  }
+  if (propagate_down[0]) {
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    const Dtype* prob_data = prob_.cpu_data();
+    caffe_copy(prob_.count(), prob_data, bottom_diff);
+    const Dtype* label = bottom[1]->cpu_data();
+    int dim = prob_.count() / outer_num_;
+    int count = 0;
+    for (int i = 0; i < outer_num_; ++i) {
+      for (int j = 0; j < inner_num_; ++j) {
+        const int label_value = static_cast<int>(label[i * inner_num_ + j]);
+        if (has_ignore_label_ && label_value == ignore_label_) {
+          for (int c = 0; c < bottom[0]->shape(softmax_axis_); ++c) {
+            bottom_diff[i * dim + c * inner_num_ + j] = 0;
+          }
+        } else {
+          bottom_diff[i * dim + label_value * inner_num_ + j] -= 1;
+          ++count;
+        }
+      }
+    }
+    // Scale gradient
+    const Dtype loss_weight = top[0]->cpu_diff()[0];
+    if (normalize_) {
+      caffe_scal(prob_.count(), loss_weight / count, bottom_diff);
+    } else {
+      caffe_scal(prob_.count(), loss_weight / outer_num_, bottom_diff);
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(SoftmaxWithLossLayer);
+#endif
+
+INSTANTIATE_CLASS(SoftmaxWithLossLayer);
+REGISTER_LAYER_CLASS(SoftmaxWithLoss);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/softmax_loss_layer.cu b/src/caffe/layers/softmax_loss_layer.cu
new file mode 100644
index 0000000..7e0f3da
--- /dev/null
+++ b/src/caffe/layers/softmax_loss_layer.cu
@@ -0,0 +1,125 @@
+#include <algorithm>
+#include <cfloat>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+__global__ void SoftmaxLossForwardGPU(const int nthreads,
+          const Dtype* prob_data, const Dtype* label, Dtype* loss,
+          const int num, const int dim, const int spatial_dim,
+          const bool has_ignore_label_, const int ignore_label_,
+          Dtype* counts) {
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    const int n = index / spatial_dim;
+    const int s = index % spatial_dim;
+    const int label_value = static_cast<int>(label[n * spatial_dim + s]);
+    if (has_ignore_label_ && label_value == ignore_label_) {
+      loss[index] = 0;
+      counts[index] = 0;
+    } else {
+      loss[index] = -log(max(prob_data[n * dim + label_value * spatial_dim + s],
+                      Dtype(FLT_MIN)));
+      counts[index] = 1;
+    }
+  }
+}
+
+template <typename Dtype>
+void SoftmaxWithLossLayer<Dtype>::Forward_gpu(
+    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
+  softmax_layer_->Forward(softmax_bottom_vec_, softmax_top_vec_);
+  const Dtype* prob_data = prob_.gpu_data();
+  const Dtype* label = bottom[1]->gpu_data();
+  const int dim = prob_.count() / outer_num_;
+  const int nthreads = outer_num_ * inner_num_;
+  // Since this memory is not used for anything until it is overwritten
+  // on the backward pass, we use it here to avoid having to allocate new GPU
+  // memory to accumulate intermediate results in the kernel.
+  Dtype* loss_data = bottom[0]->mutable_gpu_diff();
+  // Similarly, this memory is never used elsewhere, and thus we can use it
+  // to avoid having to allocate additional GPU memory.
+  Dtype* counts = prob_.mutable_gpu_diff();
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  SoftmaxLossForwardGPU<Dtype><<<CAFFE_GET_BLOCKS(nthreads),
+      CAFFE_CUDA_NUM_THREADS>>>(nthreads, prob_data, label, loss_data,
+      outer_num_, dim, inner_num_, has_ignore_label_, ignore_label_, counts);
+  Dtype loss;
+  caffe_gpu_asum(nthreads, loss_data, &loss);
+  if (normalize_) {
+    Dtype count;
+    caffe_gpu_asum(nthreads, counts, &count);
+    loss /= count;
+  } else {
+    loss /= outer_num_;
+  }
+  top[0]->mutable_cpu_data()[0] = loss;
+  if (top.size() == 2) {
+    top[1]->ShareData(prob_);
+  }
+}
+
+template <typename Dtype>
+__global__ void SoftmaxLossBackwardGPU(const int nthreads, const Dtype* top,
+          const Dtype* label, Dtype* bottom_diff, const int num, const int dim,
+          const int spatial_dim, const bool has_ignore_label_,
+          const int ignore_label_, Dtype* counts) {
+  const int channels = dim / spatial_dim;
+
+  CUDA_KERNEL_LOOP(index, nthreads) {
+    const int n = index / spatial_dim;
+    const int s = index % spatial_dim;
+    const int label_value = static_cast<int>(label[n * spatial_dim + s]);
+
+    if (has_ignore_label_ && label_value == ignore_label_) {
+      for (int c = 0; c < channels; ++c) {
+        bottom_diff[n * dim + c * spatial_dim + s] = 0;
+      }
+      counts[index] = 0;
+    } else {
+      bottom_diff[n * dim + label_value * spatial_dim + s] -= 1;
+      counts[index] = 1;
+    }
+  }
+}
+
+template <typename Dtype>
+void SoftmaxWithLossLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[1]) {
+    LOG(FATAL) << this->type()
+               << " Layer cannot backpropagate to label inputs.";
+  }
+  if (propagate_down[0]) {
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+    const Dtype* prob_data = prob_.gpu_data();
+    const Dtype* top_data = top[0]->gpu_data();
+    caffe_gpu_memcpy(prob_.count() * sizeof(Dtype), prob_data, bottom_diff);
+    const Dtype* label = bottom[1]->gpu_data();
+    const int dim = prob_.count() / outer_num_;
+    const int nthreads = outer_num_ * inner_num_;
+    // Since this memory is never used for anything else,
+    // we use to to avoid allocating new GPU memory.
+    Dtype* counts = prob_.mutable_gpu_diff();
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    SoftmaxLossBackwardGPU<Dtype><<<CAFFE_GET_BLOCKS(nthreads),
+        CAFFE_CUDA_NUM_THREADS>>>(nthreads, top_data, label, bottom_diff,
+        outer_num_, dim, inner_num_, has_ignore_label_, ignore_label_, counts);
+    const Dtype loss_weight = top[0]->cpu_diff()[0];
+    if (normalize_) {
+      Dtype count;
+      caffe_gpu_asum(nthreads, counts, &count);
+      caffe_gpu_scal(prob_.count(), loss_weight / count, bottom_diff);
+    } else {
+      caffe_gpu_scal(prob_.count(), loss_weight / outer_num_, bottom_diff);
+    }
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(SoftmaxWithLossLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/split_layer.cpp b/src/caffe/layers/split_layer.cpp
new file mode 100644
index 0000000..272cb59
--- /dev/null
+++ b/src/caffe/layers/split_layer.cpp
@@ -0,0 +1,60 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void SplitLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  count_ = bottom[0]->count();
+  for (int i = 0; i < top.size(); ++i) {
+    // Do not allow in-place computation in the SplitLayer.  Instead, share data
+    // by reference in the forward pass, and keep separate diff allocations in
+    // the backward pass.  (Technically, it should be possible to share the diff
+    // blob of the first split output with the input, but this seems to cause
+    // some strange effects in practice...)
+    CHECK_NE(top[i], bottom[0]) << this->type() << " Layer does not "
+        "allow in-place computation.";
+    top[i]->ReshapeLike(*bottom[0]);
+    CHECK_EQ(count_, top[i]->count());
+  }
+}
+
+template <typename Dtype>
+void SplitLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  for (int i = 0; i < top.size(); ++i) {
+    top[i]->ShareData(*bottom[0]);
+  }
+}
+
+template <typename Dtype>
+void SplitLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) { return; }
+  if (top.size() == 1) {
+    caffe_copy(count_, top[0]->cpu_diff(), bottom[0]->mutable_cpu_diff());
+    return;
+  }
+  caffe_add(count_, top[0]->cpu_diff(), top[1]->cpu_diff(),
+            bottom[0]->mutable_cpu_diff());
+  // Add remaining top blob diffs.
+  for (int i = 2; i < top.size(); ++i) {
+    const Dtype* top_diff = top[i]->cpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    caffe_axpy(count_, Dtype(1.), top_diff, bottom_diff);
+  }
+}
+
+
+#ifdef CPU_ONLY
+STUB_GPU(SplitLayer);
+#endif
+
+INSTANTIATE_CLASS(SplitLayer);
+REGISTER_LAYER_CLASS(Split);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/split_layer.cu b/src/caffe/layers/split_layer.cu
new file mode 100644
index 0000000..a4f5df2
--- /dev/null
+++ b/src/caffe/layers/split_layer.cu
@@ -0,0 +1,38 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void SplitLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  for (int i = 0; i < top.size(); ++i) {
+    top[i]->ShareData(*bottom[0]);
+  }
+}
+
+template <typename Dtype>
+void SplitLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) { return; }
+  if (top.size() == 1) {
+    caffe_copy(count_, top[0]->gpu_diff(), bottom[0]->mutable_gpu_diff());
+    return;
+  }
+  caffe_gpu_add(count_, top[0]->gpu_diff(), top[1]->gpu_diff(),
+                bottom[0]->mutable_gpu_diff());
+  // Add remaining top blob diffs.
+  for (int i = 2; i < top.size(); ++i) {
+    const Dtype* top_diff = top[i]->gpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+    caffe_gpu_axpy(count_, Dtype(1.), top_diff, bottom_diff);
+  }
+}
+
+
+INSTANTIATE_LAYER_GPU_FUNCS(SplitLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/spp_layer.cpp b/src/caffe/layers/spp_layer.cpp
new file mode 100644
index 0000000..795dd71
--- /dev/null
+++ b/src/caffe/layers/spp_layer.cpp
@@ -0,0 +1,193 @@
+#include <algorithm>
+#include <cfloat>
+#include <vector>
+
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/syncedmem.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+using std::min;
+using std::max;
+
+template <typename Dtype>
+LayerParameter SPPLayer<Dtype>::GetPoolingParam(const int pyramid_level,
+      const int bottom_h, const int bottom_w, const SPPParameter spp_param) {
+  LayerParameter pooling_param;
+  int num_bins = pow(2, pyramid_level);
+
+  // find padding and kernel size so that the pooling is
+  // performed across the entire image
+  int kernel_h = ceil(bottom_h / static_cast<double>(num_bins));
+  // remainder_h is the min number of pixels that need to be padded before
+  // entire image height is pooled over with the chosen kernel dimension
+  int remainder_h = kernel_h * num_bins - bottom_h;
+  // pooling layer pads (2 * pad_h) pixels on the top and bottom of the
+  // image.
+  int pad_h = (remainder_h + 1) / 2;
+
+  // similar logic for width
+  int kernel_w = ceil(bottom_w / static_cast<double>(num_bins));
+  int remainder_w = kernel_w * num_bins - bottom_w;
+  int pad_w = (remainder_w + 1) / 2;
+
+  pooling_param.mutable_pooling_param()->set_pad_h(pad_h);
+  pooling_param.mutable_pooling_param()->set_pad_w(pad_w);
+  pooling_param.mutable_pooling_param()->set_kernel_h(kernel_h);
+  pooling_param.mutable_pooling_param()->set_kernel_w(kernel_w);
+  pooling_param.mutable_pooling_param()->set_stride_h(kernel_h);
+  pooling_param.mutable_pooling_param()->set_stride_w(kernel_w);
+
+  switch (spp_param.pool()) {
+  case SPPParameter_PoolMethod_MAX:
+    pooling_param.mutable_pooling_param()->set_pool(
+        PoolingParameter_PoolMethod_MAX);
+    break;
+  case SPPParameter_PoolMethod_AVE:
+    pooling_param.mutable_pooling_param()->set_pool(
+        PoolingParameter_PoolMethod_AVE);
+    break;
+  case SPPParameter_PoolMethod_STOCHASTIC:
+    pooling_param.mutable_pooling_param()->set_pool(
+        PoolingParameter_PoolMethod_STOCHASTIC);
+    break;
+  default:
+    LOG(FATAL) << "Unknown pooling method.";
+  }
+
+  return pooling_param;
+}
+
+template <typename Dtype>
+void SPPLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  SPPParameter spp_param = this->layer_param_.spp_param();
+
+  bottom_h_ = bottom[0]->height();
+  bottom_w_ = bottom[0]->width();
+  CHECK_GT(bottom_h_, 0) << "Input dimensions cannot be zero.";
+  CHECK_GT(bottom_w_, 0) << "Input dimensions cannot be zero.";
+
+  pyramid_height_ = spp_param.pyramid_height();
+  split_top_vec_.clear();
+  pooling_bottom_vecs_.clear();
+  pooling_layers_.clear();
+  pooling_top_vecs_.clear();
+  pooling_outputs_.clear();
+  flatten_layers_.clear();
+  flatten_top_vecs_.clear();
+  flatten_outputs_.clear();
+  concat_bottom_vec_.clear();
+
+  // split layer output holders setup
+  for (int i = 0; i < pyramid_height_; i++) {
+    split_top_vec_.push_back(new Blob<Dtype>());
+  }
+
+  // split layer setup
+  LayerParameter split_param;
+  split_layer_.reset(new SplitLayer<Dtype>(split_param));
+  split_layer_->SetUp(bottom, split_top_vec_);
+
+  for (int i = 0; i < pyramid_height_; i++) {
+    // pooling layer input holders setup
+    pooling_bottom_vecs_.push_back(new vector<Blob<Dtype>*>);
+    pooling_bottom_vecs_[i]->push_back(split_top_vec_[i]);
+
+    // pooling layer output holders setup
+    pooling_outputs_.push_back(new Blob<Dtype>());
+    pooling_top_vecs_.push_back(new vector<Blob<Dtype>*>);
+    pooling_top_vecs_[i]->push_back(pooling_outputs_[i]);
+
+    // pooling layer setup
+    LayerParameter pooling_param = GetPoolingParam(
+        i, bottom_h_, bottom_w_, spp_param);
+
+    pooling_layers_.push_back(shared_ptr<PoolingLayer<Dtype> > (
+        new PoolingLayer<Dtype>(pooling_param)));
+    pooling_layers_[i]->SetUp(*pooling_bottom_vecs_[i], *pooling_top_vecs_[i]);
+
+    // flatten layer output holders setup
+    flatten_outputs_.push_back(new Blob<Dtype>());
+    flatten_top_vecs_.push_back(new vector<Blob<Dtype>*>);
+    flatten_top_vecs_[i]->push_back(flatten_outputs_[i]);
+
+    // flatten layer setup
+    LayerParameter flatten_param;
+    flatten_layers_.push_back(new FlattenLayer<Dtype>(flatten_param));
+    flatten_layers_[i]->SetUp(*pooling_top_vecs_[i], *flatten_top_vecs_[i]);
+
+    // concat layer input holders setup
+    concat_bottom_vec_.push_back(flatten_outputs_[i]);
+  }
+
+  // concat layer setup
+  LayerParameter concat_param;
+  concat_layer_.reset(new ConcatLayer<Dtype>(concat_param));
+  concat_layer_->SetUp(concat_bottom_vec_, top);
+}
+
+template <typename Dtype>
+void SPPLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  CHECK_EQ(4, bottom[0]->num_axes()) << "Input must have 4 axes, "
+      << "corresponding to (num, channels, height, width)";
+  channels_ = bottom[0]->channels();
+  bottom_h_ = bottom[0]->height();
+  bottom_w_ = bottom[0]->width();
+  SPPParameter spp_param = this->layer_param_.spp_param();
+  split_layer_->Reshape(bottom, split_top_vec_);
+  for (int i = 0; i < pyramid_height_; i++) {
+    LayerParameter pooling_param = GetPoolingParam(
+        i, bottom_h_, bottom_w_, spp_param);
+
+    pooling_layers_[i].reset(
+        new PoolingLayer<Dtype>(pooling_param));
+    pooling_layers_[i]->SetUp(
+        *pooling_bottom_vecs_[i], *pooling_top_vecs_[i]);
+    pooling_layers_[i]->Reshape(
+        *pooling_bottom_vecs_[i], *pooling_top_vecs_[i]);
+    flatten_layers_[i]->Reshape(
+        *pooling_top_vecs_[i], *flatten_top_vecs_[i]);
+  }
+  concat_layer_->Reshape(concat_bottom_vec_, top);
+}
+
+template <typename Dtype>
+void SPPLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  split_layer_->Forward(bottom, split_top_vec_);
+  for (int i = 0; i < pyramid_height_; i++) {
+    pooling_layers_[i]->Forward(
+        *pooling_bottom_vecs_[i], *pooling_top_vecs_[i]);
+    flatten_layers_[i]->Forward(
+        *pooling_top_vecs_[i], *flatten_top_vecs_[i]);
+  }
+  concat_layer_->Forward(concat_bottom_vec_, top);
+}
+
+template <typename Dtype>
+void SPPLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+      const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
+  if (!propagate_down[0]) {
+    return;
+  }
+  vector<bool> concat_propagate_down(pyramid_height_, true);
+  concat_layer_->Backward(top, concat_propagate_down, concat_bottom_vec_);
+  for (int i = 0; i < pyramid_height_; i++) {
+    flatten_layers_[i]->Backward(
+        *flatten_top_vecs_[i], propagate_down, *pooling_top_vecs_[i]);
+    pooling_layers_[i]->Backward(
+        *pooling_top_vecs_[i], propagate_down, *pooling_bottom_vecs_[i]);
+  }
+  split_layer_->Backward(split_top_vec_, propagate_down, bottom);
+}
+
+
+INSTANTIATE_CLASS(SPPLayer);
+REGISTER_LAYER_CLASS(SPP);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/tanh_layer.cpp b/src/caffe/layers/tanh_layer.cpp
new file mode 100644
index 0000000..ee5ed77
--- /dev/null
+++ b/src/caffe/layers/tanh_layer.cpp
@@ -0,0 +1,46 @@
+// TanH neuron activation function layer.
+// Adapted from ReLU layer code written by Yangqing Jia
+
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void TanHLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  const int count = bottom[0]->count();
+  for (int i = 0; i < count; ++i) {
+    top_data[i] = tanh(bottom_data[i]);
+  }
+}
+
+template <typename Dtype>
+void TanHLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    const Dtype* top_data = top[0]->cpu_data();
+    const Dtype* top_diff = top[0]->cpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
+    const int count = bottom[0]->count();
+    Dtype tanhx;
+    for (int i = 0; i < count; ++i) {
+      tanhx = top_data[i];
+      bottom_diff[i] = top_diff[i] * (1 - tanhx * tanhx);
+    }
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU(TanHLayer);
+#endif
+
+INSTANTIATE_CLASS(TanHLayer);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/tanh_layer.cu b/src/caffe/layers/tanh_layer.cu
new file mode 100644
index 0000000..ccd6e63
--- /dev/null
+++ b/src/caffe/layers/tanh_layer.cu
@@ -0,0 +1,59 @@
+// TanH neuron activation function layer.
+// Adapted from ReLU layer code written by Yangqing Jia
+
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+__global__ void TanHForward(const int n, const Dtype* in, Dtype* out) {
+  CUDA_KERNEL_LOOP(index, n) {
+    out[index] = tanh(in[index]);
+  }
+}
+
+template <typename Dtype>
+void TanHLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  const int count = bottom[0]->count();
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  TanHForward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+      count, bottom_data, top_data);
+  CUDA_POST_KERNEL_CHECK;
+}
+
+template <typename Dtype>
+__global__ void TanHBackward(const int n, const Dtype* in_diff,
+    const Dtype* out_data, Dtype* out_diff) {
+  CUDA_KERNEL_LOOP(index, n) {
+    Dtype tanhx = out_data[index];
+    out_diff[index] = in_diff[index] * (1 - tanhx * tanhx);
+  }
+}
+
+template <typename Dtype>
+void TanHLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
+    const vector<bool>& propagate_down,
+    const vector<Blob<Dtype>*>& bottom) {
+  if (propagate_down[0]) {
+    const Dtype* top_data = top[0]->gpu_data();
+    const Dtype* top_diff = top[0]->gpu_diff();
+    Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+    const int count = bottom[0]->count();
+    // NOLINT_NEXT_LINE(whitespace/operators)
+    TanHBackward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+        count, top_diff, top_data, bottom_diff);
+    CUDA_POST_KERNEL_CHECK;
+  }
+}
+
+INSTANTIATE_LAYER_GPU_FUNCS(TanHLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/threshold_layer.cpp b/src/caffe/layers/threshold_layer.cpp
new file mode 100644
index 0000000..2365e7b
--- /dev/null
+++ b/src/caffe/layers/threshold_layer.cpp
@@ -0,0 +1,34 @@
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+
+namespace caffe {
+
+template <typename Dtype>
+void ThresholdLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  NeuronLayer<Dtype>::LayerSetUp(bottom, top);
+  threshold_ = this->layer_param_.threshold_param().threshold();
+}
+
+template <typename Dtype>
+void ThresholdLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->cpu_data();
+  Dtype* top_data = top[0]->mutable_cpu_data();
+  const int count = bottom[0]->count();
+  for (int i = 0; i < count; ++i) {
+    top_data[i] = (bottom_data[i] > threshold_) ? Dtype(1) : Dtype(0);
+  }
+}
+
+#ifdef CPU_ONLY
+STUB_GPU_FORWARD(ThresholdLayer, Forward);
+#endif
+
+INSTANTIATE_CLASS(ThresholdLayer);
+REGISTER_LAYER_CLASS(Threshold);
+
+}  // namespace caffe
diff --git a/src/caffe/layers/threshold_layer.cu b/src/caffe/layers/threshold_layer.cu
new file mode 100644
index 0000000..bfa7f15
--- /dev/null
+++ b/src/caffe/layers/threshold_layer.cu
@@ -0,0 +1,33 @@
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+__global__ void ThresholdForward(const int n, const Dtype threshold,
+    const Dtype* in, Dtype* out) {
+  CUDA_KERNEL_LOOP(index, n) {
+    out[index] = in[index] > threshold ? 1 : 0;
+  }
+}
+
+template <typename Dtype>
+void ThresholdLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+    const vector<Blob<Dtype>*>& top) {
+  const Dtype* bottom_data = bottom[0]->gpu_data();
+  Dtype* top_data = top[0]->mutable_gpu_data();
+  const int count = bottom[0]->count();
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  ThresholdForward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>(
+      count, threshold_, bottom_data, top_data);
+  CUDA_POST_KERNEL_CHECK;
+}
+
+
+INSTANTIATE_LAYER_GPU_FORWARD(ThresholdLayer);
+
+
+}  // namespace caffe
diff --git a/src/caffe/layers/window_data_layer.cpp b/src/caffe/layers/window_data_layer.cpp
new file mode 100644
index 0000000..c127d56
--- /dev/null
+++ b/src/caffe/layers/window_data_layer.cpp
@@ -0,0 +1,466 @@
+#include <opencv2/highgui/highgui_c.h>
+#include <stdint.h>
+
+#include <algorithm>
+#include <map>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "opencv2/core/core.hpp"
+#include "opencv2/highgui/highgui.hpp"
+#include "opencv2/imgproc/imgproc.hpp"
+
+#include "caffe/common.hpp"
+#include "caffe/data_layers.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/benchmark.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/util/rng.hpp"
+
+// caffe.proto > LayerParameter > WindowDataParameter
+//   'source' field specifies the window_file
+//   'crop_size' indicates the desired warped size
+
+namespace caffe {
+
+template <typename Dtype>
+WindowDataLayer<Dtype>::~WindowDataLayer<Dtype>() {
+  this->JoinPrefetchThread();
+}
+
+template <typename Dtype>
+void WindowDataLayer<Dtype>::DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
+      const vector<Blob<Dtype>*>& top) {
+  // LayerSetUp runs through the window_file and creates two structures
+  // that hold windows: one for foreground (object) windows and one
+  // for background (non-object) windows. We use an overlap threshold
+  // to decide which is which.
+
+  // window_file format
+  // repeated:
+  //    # image_index
+  //    img_path (abs path)
+  //    channels
+  //    height
+  //    width
+  //    num_windows
+  //    class_index overlap x1 y1 x2 y2
+
+  LOG(INFO) << "Window data layer:" << std::endl
+      << "  foreground (object) overlap threshold: "
+      << this->layer_param_.window_data_param().fg_threshold() << std::endl
+      << "  background (non-object) overlap threshold: "
+      << this->layer_param_.window_data_param().bg_threshold() << std::endl
+      << "  foreground sampling fraction: "
+      << this->layer_param_.window_data_param().fg_fraction() << std::endl
+      << "  cache_images: "
+      << this->layer_param_.window_data_param().cache_images() << std::endl
+      << "  root_folder: "
+      << this->layer_param_.window_data_param().root_folder();
+
+  cache_images_ = this->layer_param_.window_data_param().cache_images();
+  string root_folder = this->layer_param_.window_data_param().root_folder();
+
+  const bool prefetch_needs_rand =
+      this->transform_param_.mirror() ||
+      this->transform_param_.crop_size();
+  if (prefetch_needs_rand) {
+    const unsigned int prefetch_rng_seed = caffe_rng_rand();
+    prefetch_rng_.reset(new Caffe::RNG(prefetch_rng_seed));
+  } else {
+    prefetch_rng_.reset();
+  }
+
+  std::ifstream infile(this->layer_param_.window_data_param().source().c_str());
+  CHECK(infile.good()) << "Failed to open window file "
+      << this->layer_param_.window_data_param().source() << std::endl;
+
+  map<int, int> label_hist;
+  label_hist.insert(std::make_pair(0, 0));
+
+  string hashtag;
+  int image_index, channels;
+  if (!(infile >> hashtag >> image_index)) {
+    LOG(FATAL) << "Window file is empty";
+  }
+  do {
+    CHECK_EQ(hashtag, "#");
+    // read image path
+    string image_path;
+    infile >> image_path;
+    image_path = root_folder + image_path;
+    // read image dimensions
+    vector<int> image_size(3);
+    infile >> image_size[0] >> image_size[1] >> image_size[2];
+    channels = image_size[0];
+    image_database_.push_back(std::make_pair(image_path, image_size));
+
+    if (cache_images_) {
+      Datum datum;
+      if (!ReadFileToDatum(image_path, &datum)) {
+        LOG(ERROR) << "Could not open or find file " << image_path;
+        return;
+      }
+      image_database_cache_.push_back(std::make_pair(image_path, datum));
+    }
+    // read each box
+    int num_windows;
+    infile >> num_windows;
+    const float fg_threshold =
+        this->layer_param_.window_data_param().fg_threshold();
+    const float bg_threshold =
+        this->layer_param_.window_data_param().bg_threshold();
+    for (int i = 0; i < num_windows; ++i) {
+      int label, x1, y1, x2, y2;
+      float overlap;
+      infile >> label >> overlap >> x1 >> y1 >> x2 >> y2;
+
+      vector<float> window(WindowDataLayer::NUM);
+      window[WindowDataLayer::IMAGE_INDEX] = image_index;
+      window[WindowDataLayer::LABEL] = label;
+      window[WindowDataLayer::OVERLAP] = overlap;
+      window[WindowDataLayer::X1] = x1;
+      window[WindowDataLayer::Y1] = y1;
+      window[WindowDataLayer::X2] = x2;
+      window[WindowDataLayer::Y2] = y2;
+
+      // add window to foreground list or background list
+      if (overlap >= fg_threshold) {
+        int label = window[WindowDataLayer::LABEL];
+        CHECK_GT(label, 0);
+        fg_windows_.push_back(window);
+        label_hist.insert(std::make_pair(label, 0));
+        label_hist[label]++;
+      } else if (overlap < bg_threshold) {
+        // background window, force label and overlap to 0
+        window[WindowDataLayer::LABEL] = 0;
+        window[WindowDataLayer::OVERLAP] = 0;
+        bg_windows_.push_back(window);
+        label_hist[0]++;
+      }
+    }
+
+    if (image_index % 100 == 0) {
+      LOG(INFO) << "num: " << image_index << " "
+          << image_path << " "
+          << image_size[0] << " "
+          << image_size[1] << " "
+          << image_size[2] << " "
+          << "windows to process: " << num_windows;
+    }
+  } while (infile >> hashtag >> image_index);
+
+  LOG(INFO) << "Number of images: " << image_index+1;
+
+  for (map<int, int>::iterator it = label_hist.begin();
+      it != label_hist.end(); ++it) {
+    LOG(INFO) << "class " << it->first << " has " << label_hist[it->first]
+              << " samples";
+  }
+
+  LOG(INFO) << "Amount of context padding: "
+      << this->layer_param_.window_data_param().context_pad();
+
+  LOG(INFO) << "Crop mode: "
+      << this->layer_param_.window_data_param().crop_mode();
+
+  // image
+  const int crop_size = this->transform_param_.crop_size();
+  CHECK_GT(crop_size, 0);
+  const int batch_size = this->layer_param_.window_data_param().batch_size();
+  top[0]->Reshape(batch_size, channels, crop_size, crop_size);
+  this->prefetch_data_.Reshape(batch_size, channels, crop_size, crop_size);
+
+  LOG(INFO) << "output data size: " << top[0]->num() << ","
+      << top[0]->channels() << "," << top[0]->height() << ","
+      << top[0]->width();
+  // label
+  vector<int> label_shape(1, batch_size);
+  top[1]->Reshape(label_shape);
+  this->prefetch_label_.Reshape(label_shape);
+
+  // data mean
+  has_mean_file_ = this->transform_param_.has_mean_file();
+  has_mean_values_ = this->transform_param_.mean_value_size() > 0;
+  if (has_mean_file_) {
+    const string& mean_file =
+          this->transform_param_.mean_file();
+    LOG(INFO) << "Loading mean file from: " << mean_file;
+    BlobProto blob_proto;
+    ReadProtoFromBinaryFileOrDie(mean_file.c_str(), &blob_proto);
+    data_mean_.FromProto(blob_proto);
+  }
+  if (has_mean_values_) {
+    CHECK(has_mean_file_ == false) <<
+      "Cannot specify mean_file and mean_value at the same time";
+    for (int c = 0; c < this->transform_param_.mean_value_size(); ++c) {
+      mean_values_.push_back(this->transform_param_.mean_value(c));
+    }
+    CHECK(mean_values_.size() == 1 || mean_values_.size() == channels) <<
+     "Specify either 1 mean_value or as many as channels: " << channels;
+    if (channels > 1 && mean_values_.size() == 1) {
+      // Replicate the mean_value for simplicity
+      for (int c = 1; c < channels; ++c) {
+        mean_values_.push_back(mean_values_[0]);
+      }
+    }
+  }
+}
+
+template <typename Dtype>
+unsigned int WindowDataLayer<Dtype>::PrefetchRand() {
+  CHECK(prefetch_rng_);
+  caffe::rng_t* prefetch_rng =
+      static_cast<caffe::rng_t*>(prefetch_rng_->generator());
+  return (*prefetch_rng)();
+}
+
+// Thread fetching the data
+template <typename Dtype>
+void WindowDataLayer<Dtype>::InternalThreadEntry() {
+  // At each iteration, sample N windows where N*p are foreground (object)
+  // windows and N*(1-p) are background (non-object) windows
+  CPUTimer batch_timer;
+  batch_timer.Start();
+  double read_time = 0;
+  double trans_time = 0;
+  CPUTimer timer;
+  Dtype* top_data = this->prefetch_data_.mutable_cpu_data();
+  Dtype* top_label = this->prefetch_label_.mutable_cpu_data();
+  const Dtype scale = this->layer_param_.window_data_param().scale();
+  const int batch_size = this->layer_param_.window_data_param().batch_size();
+  const int context_pad = this->layer_param_.window_data_param().context_pad();
+  const int crop_size = this->transform_param_.crop_size();
+  const bool mirror = this->transform_param_.mirror();
+  const float fg_fraction =
+      this->layer_param_.window_data_param().fg_fraction();
+  Dtype* mean = NULL;
+  int mean_off = 0;
+  int mean_width = 0;
+  int mean_height = 0;
+  if (this->has_mean_file_) {
+    mean = this->data_mean_.mutable_cpu_data();
+    mean_off = (this->data_mean_.width() - crop_size) / 2;
+    mean_width = this->data_mean_.width();
+    mean_height = this->data_mean_.height();
+  }
+  cv::Size cv_crop_size(crop_size, crop_size);
+  const string& crop_mode = this->layer_param_.window_data_param().crop_mode();
+
+  bool use_square = (crop_mode == "square") ? true : false;
+
+  // zero out batch
+  caffe_set(this->prefetch_data_.count(), Dtype(0), top_data);
+
+  const int num_fg = static_cast<int>(static_cast<float>(batch_size)
+      * fg_fraction);
+  const int num_samples[2] = { batch_size - num_fg, num_fg };
+
+  int item_id = 0;
+  // sample from bg set then fg set
+  for (int is_fg = 0; is_fg < 2; ++is_fg) {
+    for (int dummy = 0; dummy < num_samples[is_fg]; ++dummy) {
+      // sample a window
+      timer.Start();
+      const unsigned int rand_index = PrefetchRand();
+      vector<float> window = (is_fg) ?
+          fg_windows_[rand_index % fg_windows_.size()] :
+          bg_windows_[rand_index % bg_windows_.size()];
+
+      bool do_mirror = mirror && PrefetchRand() % 2;
+
+      // load the image containing the window
+      pair<std::string, vector<int> > image =
+          image_database_[window[WindowDataLayer<Dtype>::IMAGE_INDEX]];
+
+      cv::Mat cv_img;
+      if (this->cache_images_) {
+        pair<std::string, Datum> image_cached =
+          image_database_cache_[window[WindowDataLayer<Dtype>::IMAGE_INDEX]];
+        cv_img = DecodeDatumToCVMat(image_cached.second, true);
+      } else {
+        cv_img = cv::imread(image.first, CV_LOAD_IMAGE_COLOR);
+        if (!cv_img.data) {
+          LOG(ERROR) << "Could not open or find file " << image.first;
+          return;
+        }
+      }
+      read_time += timer.MicroSeconds();
+      timer.Start();
+      const int channels = cv_img.channels();
+
+      // crop window out of image and warp it
+      int x1 = window[WindowDataLayer<Dtype>::X1];
+      int y1 = window[WindowDataLayer<Dtype>::Y1];
+      int x2 = window[WindowDataLayer<Dtype>::X2];
+      int y2 = window[WindowDataLayer<Dtype>::Y2];
+
+      int pad_w = 0;
+      int pad_h = 0;
+      if (context_pad > 0 || use_square) {
+        // scale factor by which to expand the original region
+        // such that after warping the expanded region to crop_size x crop_size
+        // there's exactly context_pad amount of padding on each side
+        Dtype context_scale = static_cast<Dtype>(crop_size) /
+            static_cast<Dtype>(crop_size - 2*context_pad);
+
+        // compute the expanded region
+        Dtype half_height = static_cast<Dtype>(y2-y1+1)/2.0;
+        Dtype half_width = static_cast<Dtype>(x2-x1+1)/2.0;
+        Dtype center_x = static_cast<Dtype>(x1) + half_width;
+        Dtype center_y = static_cast<Dtype>(y1) + half_height;
+        if (use_square) {
+          if (half_height > half_width) {
+            half_width = half_height;
+          } else {
+            half_height = half_width;
+          }
+        }
+        x1 = static_cast<int>(round(center_x - half_width*context_scale));
+        x2 = static_cast<int>(round(center_x + half_width*context_scale));
+        y1 = static_cast<int>(round(center_y - half_height*context_scale));
+        y2 = static_cast<int>(round(center_y + half_height*context_scale));
+
+        // the expanded region may go outside of the image
+        // so we compute the clipped (expanded) region and keep track of
+        // the extent beyond the image
+        int unclipped_height = y2-y1+1;
+        int unclipped_width = x2-x1+1;
+        int pad_x1 = std::max(0, -x1);
+        int pad_y1 = std::max(0, -y1);
+        int pad_x2 = std::max(0, x2 - cv_img.cols + 1);
+        int pad_y2 = std::max(0, y2 - cv_img.rows + 1);
+        // clip bounds
+        x1 = x1 + pad_x1;
+        x2 = x2 - pad_x2;
+        y1 = y1 + pad_y1;
+        y2 = y2 - pad_y2;
+        CHECK_GT(x1, -1);
+        CHECK_GT(y1, -1);
+        CHECK_LT(x2, cv_img.cols);
+        CHECK_LT(y2, cv_img.rows);
+
+        int clipped_height = y2-y1+1;
+        int clipped_width = x2-x1+1;
+
+        // scale factors that would be used to warp the unclipped
+        // expanded region
+        Dtype scale_x =
+            static_cast<Dtype>(crop_size)/static_cast<Dtype>(unclipped_width);
+        Dtype scale_y =
+            static_cast<Dtype>(crop_size)/static_cast<Dtype>(unclipped_height);
+
+        // size to warp the clipped expanded region to
+        cv_crop_size.width =
+            static_cast<int>(round(static_cast<Dtype>(clipped_width)*scale_x));
+        cv_crop_size.height =
+            static_cast<int>(round(static_cast<Dtype>(clipped_height)*scale_y));
+        pad_x1 = static_cast<int>(round(static_cast<Dtype>(pad_x1)*scale_x));
+        pad_x2 = static_cast<int>(round(static_cast<Dtype>(pad_x2)*scale_x));
+        pad_y1 = static_cast<int>(round(static_cast<Dtype>(pad_y1)*scale_y));
+        pad_y2 = static_cast<int>(round(static_cast<Dtype>(pad_y2)*scale_y));
+
+        pad_h = pad_y1;
+        // if we're mirroring, we mirror the padding too (to be pedantic)
+        if (do_mirror) {
+          pad_w = pad_x2;
+        } else {
+          pad_w = pad_x1;
+        }
+
+        // ensure that the warped, clipped region plus the padding fits in the
+        // crop_size x crop_size image (it might not due to rounding)
+        if (pad_h + cv_crop_size.height > crop_size) {
+          cv_crop_size.height = crop_size - pad_h;
+        }
+        if (pad_w + cv_crop_size.width > crop_size) {
+          cv_crop_size.width = crop_size - pad_w;
+        }
+      }
+
+      cv::Rect roi(x1, y1, x2-x1+1, y2-y1+1);
+      cv::Mat cv_cropped_img = cv_img(roi);
+      cv::resize(cv_cropped_img, cv_cropped_img,
+          cv_crop_size, 0, 0, cv::INTER_LINEAR);
+
+      // horizontal flip at random
+      if (do_mirror) {
+        cv::flip(cv_cropped_img, cv_cropped_img, 1);
+      }
+
+      // copy the warped window into top_data
+      for (int h = 0; h < cv_cropped_img.rows; ++h) {
+        const uchar* ptr = cv_cropped_img.ptr<uchar>(h);
+        int img_index = 0;
+        for (int w = 0; w < cv_cropped_img.cols; ++w) {
+          for (int c = 0; c < channels; ++c) {
+            int top_index = ((item_id * channels + c) * crop_size + h + pad_h)
+                     * crop_size + w + pad_w;
+            // int top_index = (c * height + h) * width + w;
+            Dtype pixel = static_cast<Dtype>(ptr[img_index++]);
+            if (this->has_mean_file_) {
+              int mean_index = (c * mean_height + h + mean_off + pad_h)
+                           * mean_width + w + mean_off + pad_w;
+              top_data[top_index] = (pixel - mean[mean_index]) * scale;
+            } else {
+              if (this->has_mean_values_) {
+                top_data[top_index] = (pixel - this->mean_values_[c]) * scale;
+              } else {
+                top_data[top_index] = pixel * scale;
+              }
+            }
+          }
+        }
+      }
+      trans_time += timer.MicroSeconds();
+      // get window label
+      top_label[item_id] = window[WindowDataLayer<Dtype>::LABEL];
+
+      #if 0
+      // useful debugging code for dumping transformed windows to disk
+      string file_id;
+      std::stringstream ss;
+      ss << PrefetchRand();
+      ss >> file_id;
+      std::ofstream inf((string("dump/") + file_id +
+          string("_info.txt")).c_str(), std::ofstream::out);
+      inf << image.first << std::endl
+          << window[WindowDataLayer<Dtype>::X1]+1 << std::endl
+          << window[WindowDataLayer<Dtype>::Y1]+1 << std::endl
+          << window[WindowDataLayer<Dtype>::X2]+1 << std::endl
+          << window[WindowDataLayer<Dtype>::Y2]+1 << std::endl
+          << do_mirror << std::endl
+          << top_label[item_id] << std::endl
+          << is_fg << std::endl;
+      inf.close();
+      std::ofstream top_data_file((string("dump/") + file_id +
+          string("_data.txt")).c_str(),
+          std::ofstream::out | std::ofstream::binary);
+      for (int c = 0; c < channels; ++c) {
+        for (int h = 0; h < crop_size; ++h) {
+          for (int w = 0; w < crop_size; ++w) {
+            top_data_file.write(reinterpret_cast<char*>(
+                &top_data[((item_id * channels + c) * crop_size + h)
+                          * crop_size + w]),
+                sizeof(Dtype));
+          }
+        }
+      }
+      top_data_file.close();
+      #endif
+
+      item_id++;
+    }
+  }
+  batch_timer.Stop();
+  DLOG(INFO) << "Prefetch batch: " << batch_timer.MilliSeconds() << " ms.";
+  DLOG(INFO) << "     Read time: " << read_time / 1000 << " ms.";
+  DLOG(INFO) << "Transform time: " << trans_time / 1000 << " ms.";
+}
+
+INSTANTIATE_CLASS(WindowDataLayer);
+REGISTER_LAYER_CLASS(WindowData);
+
+}  // namespace caffe
diff --git a/src/caffe/net.cpp b/src/caffe/net.cpp
new file mode 100644
index 0000000..a18ee63
--- /dev/null
+++ b/src/caffe/net.cpp
@@ -0,0 +1,852 @@
+#include <algorithm>
+#include <map>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/net.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/insert_splits.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/util/upgrade_proto.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+Net<Dtype>::Net(const NetParameter& param) {
+  Init(param);
+}
+
+template <typename Dtype>
+Net<Dtype>::Net(const string& param_file, Phase phase) {
+  NetParameter param;
+  ReadNetParamsFromTextFileOrDie(param_file, &param);
+  param.mutable_state()->set_phase(phase);
+  Init(param);
+}
+
+template <typename Dtype>
+void Net<Dtype>::Init(const NetParameter& in_param) {
+  // Set phase from the state.
+  phase_ = in_param.state().phase();
+  // Filter layers based on their include/exclude rules and
+  // the current NetState.
+  NetParameter filtered_param;
+  FilterNet(in_param, &filtered_param);
+  LOG(INFO) << "Initializing net from parameters: " << std::endl
+            << filtered_param.DebugString();
+  // Create a copy of filtered_param with splits added where necessary.
+  NetParameter param;
+  InsertSplits(filtered_param, &param);
+  // Basically, build all the layers and set up their connections.
+  name_ = param.name();
+  map<string, int> blob_name_to_idx;
+  set<string> available_blobs;
+  CHECK(param.input_dim_size() == 0 || param.input_shape_size() == 0)
+      << "Must specify either input_shape OR deprecated input_dim, not both.";
+  if (param.input_dim_size() > 0) {
+    // Deprecated 4D dimensions.
+    CHECK_EQ(param.input_size() * 4, param.input_dim_size())
+        << "Incorrect input blob dimension specifications.";
+  } else {
+    CHECK_EQ(param.input_size(), param.input_shape_size())
+        << "Exactly one input_shape must be specified per input.";
+  }
+  memory_used_ = 0;
+  // set the input blobs
+  for (int input_id = 0; input_id < param.input_size(); ++input_id) {
+    const int layer_id = -1;  // inputs have fake layer ID -1
+    AppendTop(param, layer_id, input_id, &available_blobs, &blob_name_to_idx);
+  }
+  DLOG(INFO) << "Memory required for data: " << memory_used_ * sizeof(Dtype);
+  // For each layer, set up its input and output
+  bottom_vecs_.resize(param.layer_size());
+  top_vecs_.resize(param.layer_size());
+  bottom_id_vecs_.resize(param.layer_size());
+  param_id_vecs_.resize(param.layer_size());
+  top_id_vecs_.resize(param.layer_size());
+  bottom_need_backward_.resize(param.layer_size());
+  for (int layer_id = 0; layer_id < param.layer_size(); ++layer_id) {
+    // Inherit phase from net if unset.
+    if (!param.layer(layer_id).has_phase()) {
+      param.mutable_layer(layer_id)->set_phase(phase_);
+    }
+    // Setup layer.
+    const LayerParameter& layer_param = param.layer(layer_id);
+    if (layer_param.propagate_down_size() > 0) {
+      CHECK_EQ(layer_param.propagate_down_size(),
+          layer_param.bottom_size())
+          << "propagate_down param must be specified "
+          << "either 0 or bottom_size times ";
+    }
+    layers_.push_back(LayerRegistry<Dtype>::CreateLayer(layer_param));
+    layer_names_.push_back(layer_param.name());
+    LOG(INFO) << "Creating Layer " << layer_param.name();
+    bool need_backward = false;
+
+    // Figure out this layer's input and output
+    for (int bottom_id = 0; bottom_id < layer_param.bottom_size();
+         ++bottom_id) {
+      const int blob_id = AppendBottom(param, layer_id, bottom_id,
+                                       &available_blobs, &blob_name_to_idx);
+      // If a blob needs backward, this layer should provide it.
+      need_backward |= blob_need_backward_[blob_id];
+    }
+    int num_top = layer_param.top_size();
+    for (int top_id = 0; top_id < num_top; ++top_id) {
+      AppendTop(param, layer_id, top_id, &available_blobs, &blob_name_to_idx);
+    }
+    // If the layer specifies that AutoTopBlobs() -> true and the LayerParameter
+    // specified fewer than the required number (as specified by
+    // ExactNumTopBlobs() or MinTopBlobs()), allocate them here.
+    Layer<Dtype>* layer = layers_[layer_id].get();
+    if (layer->AutoTopBlobs()) {
+      const int needed_num_top =
+          std::max(layer->MinTopBlobs(), layer->ExactNumTopBlobs());
+      for (; num_top < needed_num_top; ++num_top) {
+        // Add "anonymous" top blobs -- do not modify available_blobs or
+        // blob_name_to_idx as we don't want these blobs to be usable as input
+        // to other layers.
+        AppendTop(param, layer_id, num_top, NULL, NULL);
+      }
+    }
+    // After this layer is connected, set it up.
+    LOG(INFO) << "Setting up " << layer_names_[layer_id];
+    layers_[layer_id]->SetUp(bottom_vecs_[layer_id], top_vecs_[layer_id]);
+    for (int top_id = 0; top_id < top_vecs_[layer_id].size(); ++top_id) {
+      if (blob_loss_weights_.size() <= top_id_vecs_[layer_id][top_id]) {
+        blob_loss_weights_.resize(top_id_vecs_[layer_id][top_id] + 1, Dtype(0));
+      }
+      blob_loss_weights_[top_id_vecs_[layer_id][top_id]] = layer->loss(top_id);
+      LOG(INFO) << "Top shape: " << top_vecs_[layer_id][top_id]->shape_string();
+      if (layer->loss(top_id)) {
+        LOG(INFO) << "    with loss weight " << layer->loss(top_id);
+      }
+      memory_used_ += top_vecs_[layer_id][top_id]->count();
+    }
+    DLOG(INFO) << "Memory required for data: " << memory_used_ * sizeof(Dtype);
+    const int param_size = layer_param.param_size();
+    const int num_param_blobs = layers_[layer_id]->blobs().size();
+    CHECK_LE(param_size, num_param_blobs)
+        << "Too many params specified for layer " << layer_param.name();
+    ParamSpec default_param_spec;
+    for (int param_id = 0; param_id < num_param_blobs; ++param_id) {
+      const ParamSpec* param_spec = (param_id < param_size) ?
+          &layer_param.param(param_id) : &default_param_spec;
+      const bool param_need_backward = param_spec->lr_mult() > 0;
+      need_backward |= param_need_backward;
+      layers_[layer_id]->set_param_propagate_down(param_id,
+                                                  param_need_backward);
+    }
+    for (int param_id = 0; param_id < num_param_blobs; ++param_id) {
+      AppendParam(param, layer_id, param_id);
+    }
+    // Finally, set the backward flag
+    layer_need_backward_.push_back(need_backward);
+    if (need_backward) {
+      for (int top_id = 0; top_id < top_id_vecs_[layer_id].size(); ++top_id) {
+        blob_need_backward_[top_id_vecs_[layer_id][top_id]] = true;
+      }
+    }
+  }
+  // Go through the net backwards to determine which blobs contribute to the
+  // loss.  We can skip backward computation for blobs that don't contribute
+  // to the loss.
+  // Also checks if all bottom blobs don't need backward computation (possible
+  // because the skip_propagate_down param) and so we can skip bacward
+  // computation for the entire layer
+  set<string> blobs_under_loss;
+  set<string> blobs_skip_backp;
+  for (int layer_id = layers_.size() - 1; layer_id >= 0; --layer_id) {
+    bool layer_contributes_loss = false;
+    bool layer_skip_propagate_down = true;
+    for (int top_id = 0; top_id < top_vecs_[layer_id].size(); ++top_id) {
+      const string& blob_name = blob_names_[top_id_vecs_[layer_id][top_id]];
+      if (layers_[layer_id]->loss(top_id) ||
+          (blobs_under_loss.find(blob_name) != blobs_under_loss.end())) {
+        layer_contributes_loss = true;
+      }
+      if (blobs_skip_backp.find(blob_name) == blobs_skip_backp.end()) {
+        layer_skip_propagate_down = false;
+      }
+      if (layer_contributes_loss && !layer_skip_propagate_down)
+        break;
+    }
+    // If this layer can skip backward computation, also all his bottom blobs
+    // don't need backpropagation
+    if (layer_need_backward_[layer_id] && layer_skip_propagate_down) {
+      layer_need_backward_[layer_id] = false;
+      for (int bottom_id = 0; bottom_id < bottom_vecs_[layer_id].size();
+               ++bottom_id) {
+        bottom_need_backward_[layer_id][bottom_id] = false;
+      }
+    }
+    if (!layer_contributes_loss) { layer_need_backward_[layer_id] = false; }
+    if (layer_need_backward_[layer_id]) {
+      LOG(INFO) << layer_names_[layer_id] << " needs backward computation.";
+    } else {
+      LOG(INFO) << layer_names_[layer_id]
+                << " does not need backward computation.";
+    }
+    for (int bottom_id = 0; bottom_id < bottom_vecs_[layer_id].size();
+         ++bottom_id) {
+      if (layer_contributes_loss) {
+        const string& blob_name =
+            blob_names_[bottom_id_vecs_[layer_id][bottom_id]];
+        blobs_under_loss.insert(blob_name);
+      } else {
+        bottom_need_backward_[layer_id][bottom_id] = false;
+      }
+      if (!bottom_need_backward_[layer_id][bottom_id]) {
+        const string& blob_name =
+                   blob_names_[bottom_id_vecs_[layer_id][bottom_id]];
+        blobs_skip_backp.insert(blob_name);
+      }
+    }
+  }
+  // Handle force_backward if needed.
+  if (param.force_backward()) {
+    for (int layer_id = 0; layer_id < layers_.size(); ++layer_id) {
+      layer_need_backward_[layer_id] = true;
+      for (int bottom_id = 0;
+           bottom_id < bottom_need_backward_[layer_id].size(); ++bottom_id) {
+        bottom_need_backward_[layer_id][bottom_id] =
+            bottom_need_backward_[layer_id][bottom_id] ||
+            layers_[layer_id]->AllowForceBackward(bottom_id);
+        blob_need_backward_[bottom_id_vecs_[layer_id][bottom_id]] =
+            blob_need_backward_[bottom_id_vecs_[layer_id][bottom_id]] ||
+            bottom_need_backward_[layer_id][bottom_id];
+      }
+      for (int param_id = 0; param_id < layers_[layer_id]->blobs().size();
+           ++param_id) {
+        layers_[layer_id]->set_param_propagate_down(param_id, true);
+      }
+    }
+  }
+  // In the end, all remaining blobs are considered output blobs.
+  for (set<string>::iterator it = available_blobs.begin();
+      it != available_blobs.end(); ++it) {
+    LOG(INFO) << "This network produces output " << *it;
+    net_output_blobs_.push_back(blobs_[blob_name_to_idx[*it]].get());
+    net_output_blob_indices_.push_back(blob_name_to_idx[*it]);
+  }
+  for (size_t blob_id = 0; blob_id < blob_names_.size(); ++blob_id) {
+    blob_names_index_[blob_names_[blob_id]] = blob_id;
+  }
+  for (size_t layer_id = 0; layer_id < layer_names_.size(); ++layer_id) {
+    layer_names_index_[layer_names_[layer_id]] = layer_id;
+  }
+  GetLearningRateAndWeightDecay();
+  debug_info_ = param.debug_info();
+  LOG(INFO) << "Network initialization done.";
+  LOG(INFO) << "Memory required for data: " << memory_used_ * sizeof(Dtype);
+}
+
+template <typename Dtype>
+void Net<Dtype>::FilterNet(const NetParameter& param,
+    NetParameter* param_filtered) {
+  NetState net_state(param.state());
+  param_filtered->CopyFrom(param);
+  param_filtered->clear_layer();
+  for (int i = 0; i < param.layer_size(); ++i) {
+    const LayerParameter& layer_param = param.layer(i);
+    const string& layer_name = layer_param.name();
+    CHECK(layer_param.include_size() == 0 || layer_param.exclude_size() == 0)
+          << "Specify either include rules or exclude rules; not both.";
+    // If no include rules are specified, the layer is included by default and
+    // only excluded if it meets one of the exclude rules.
+    bool layer_included = (layer_param.include_size() == 0);
+    for (int j = 0; layer_included && j < layer_param.exclude_size(); ++j) {
+      if (StateMeetsRule(net_state, layer_param.exclude(j), layer_name)) {
+        layer_included = false;
+      }
+    }
+    for (int j = 0; !layer_included && j < layer_param.include_size(); ++j) {
+      if (StateMeetsRule(net_state, layer_param.include(j), layer_name)) {
+        layer_included = true;
+      }
+    }
+    if (layer_included) {
+      param_filtered->add_layer()->CopyFrom(layer_param);
+    }
+  }
+}
+
+template <typename Dtype>
+bool Net<Dtype>::StateMeetsRule(const NetState& state,
+    const NetStateRule& rule, const string& layer_name) {
+  // Check whether the rule is broken due to phase.
+  if (rule.has_phase()) {
+      if (rule.phase() != state.phase()) {
+        LOG(INFO) << "The NetState phase (" << state.phase()
+          << ") differed from the phase (" << rule.phase()
+          << ") specified by a rule in layer " << layer_name;
+        return false;
+      }
+  }
+  // Check whether the rule is broken due to min level.
+  if (rule.has_min_level()) {
+    if (state.level() < rule.min_level()) {
+      LOG(INFO) << "The NetState level (" << state.level()
+          << ") is above the min_level (" << rule.min_level()
+          << ") specified by a rule in layer " << layer_name;
+      return false;
+    }
+  }
+  // Check whether the rule is broken due to max level.
+  if (rule.has_max_level()) {
+    if (state.level() > rule.max_level()) {
+      LOG(INFO) << "The NetState level (" << state.level()
+          << ") is above the max_level (" << rule.max_level()
+          << ") specified by a rule in layer " << layer_name;
+      return false;
+    }
+  }
+  // Check whether the rule is broken due to stage. The NetState must
+  // contain ALL of the rule's stages to meet it.
+  for (int i = 0; i < rule.stage_size(); ++i) {
+    // Check that the NetState contains the rule's ith stage.
+    bool has_stage = false;
+    for (int j = 0; !has_stage && j < state.stage_size(); ++j) {
+      if (rule.stage(i) == state.stage(j)) { has_stage = true; }
+    }
+    if (!has_stage) {
+      LOG(INFO) << "The NetState did not contain stage '" << rule.stage(i)
+                << "' specified by a rule in layer " << layer_name;
+      return false;
+    }
+  }
+  // Check whether the rule is broken due to not_stage. The NetState must
+  // contain NONE of the rule's not_stages to meet it.
+  for (int i = 0; i < rule.not_stage_size(); ++i) {
+    // Check that the NetState contains the rule's ith not_stage.
+    bool has_stage = false;
+    for (int j = 0; !has_stage && j < state.stage_size(); ++j) {
+      if (rule.not_stage(i) == state.stage(j)) { has_stage = true; }
+    }
+    if (has_stage) {
+      LOG(INFO) << "The NetState contained a not_stage '" << rule.not_stage(i)
+                << "' specified by a rule in layer " << layer_name;
+      return false;
+    }
+  }
+  return true;
+}
+
+// Helper for Net::Init: add a new input or top blob to the net.  (Inputs have
+// layer_id == -1, tops have layer_id >= 0.)
+template <typename Dtype>
+void Net<Dtype>::AppendTop(const NetParameter& param, const int layer_id,
+                           const int top_id, set<string>* available_blobs,
+                           map<string, int>* blob_name_to_idx) {
+  shared_ptr<LayerParameter> layer_param((layer_id >= 0) ?
+    (new LayerParameter(param.layer(layer_id))) : NULL);
+  const string& blob_name = layer_param ?
+      (layer_param->top_size() > top_id ?
+          layer_param->top(top_id) : "(automatic)") : param.input(top_id);
+  // Check if we are doing in-place computation
+  if (blob_name_to_idx && layer_param && layer_param->bottom_size() > top_id &&
+      blob_name == layer_param->bottom(top_id)) {
+    // In-place computation
+    LOG(INFO) << layer_param->name() << " -> " << blob_name << " (in-place)";
+    top_vecs_[layer_id].push_back(blobs_[(*blob_name_to_idx)[blob_name]].get());
+    top_id_vecs_[layer_id].push_back((*blob_name_to_idx)[blob_name]);
+  } else if (blob_name_to_idx &&
+             blob_name_to_idx->find(blob_name) != blob_name_to_idx->end()) {
+    // If we are not doing in-place computation but have duplicated blobs,
+    // raise an error.
+    LOG(FATAL) << "Duplicate blobs produced by multiple sources.";
+  } else {
+    // Normal output.
+    if (layer_param) {
+      LOG(INFO) << layer_param->name() << " -> " << blob_name;
+    } else {
+      LOG(INFO) << "Input " << top_id << " -> " << blob_name;
+    }
+    shared_ptr<Blob<Dtype> > blob_pointer(new Blob<Dtype>());
+    const int blob_id = blobs_.size();
+    blobs_.push_back(blob_pointer);
+    blob_names_.push_back(blob_name);
+    blob_need_backward_.push_back(false);
+    if (blob_name_to_idx) { (*blob_name_to_idx)[blob_name] = blob_id; }
+    if (layer_id == -1) {
+      // Set the (explicitly specified) dimensions of the input blob.
+      if (param.input_dim_size() > 0) {
+        blob_pointer->Reshape(param.input_dim(top_id * 4),
+                              param.input_dim(top_id * 4 + 1),
+                              param.input_dim(top_id * 4 + 2),
+                              param.input_dim(top_id * 4 + 3));
+      } else {
+        blob_pointer->Reshape(param.input_shape(top_id));
+      }
+      net_input_blob_indices_.push_back(blob_id);
+      net_input_blobs_.push_back(blob_pointer.get());
+    } else {
+      top_id_vecs_[layer_id].push_back(blob_id);
+      top_vecs_[layer_id].push_back(blob_pointer.get());
+    }
+  }
+  if (available_blobs) { available_blobs->insert(blob_name); }
+}
+
+// Helper for Net::Init: add a new bottom blob to the net.
+template <typename Dtype>
+int Net<Dtype>::AppendBottom(const NetParameter& param, const int layer_id,
+    const int bottom_id, set<string>* available_blobs,
+    map<string, int>* blob_name_to_idx) {
+  const LayerParameter& layer_param = param.layer(layer_id);
+  const string& blob_name = layer_param.bottom(bottom_id);
+  if (available_blobs->find(blob_name) == available_blobs->end()) {
+    LOG(FATAL) << "Unknown blob input " << blob_name
+               << " (at index " << bottom_id << ") to layer " << layer_id;
+  }
+  const int blob_id = (*blob_name_to_idx)[blob_name];
+  LOG(INFO) << layer_names_[layer_id] << " <- " << blob_name;
+  bottom_vecs_[layer_id].push_back(blobs_[blob_id].get());
+  bottom_id_vecs_[layer_id].push_back(blob_id);
+  available_blobs->erase(blob_name);
+  bool propagate_down = true;
+  // Check if the backpropagation on bottom_id should be skipped
+  if (layer_param.propagate_down_size() > 0)
+    propagate_down = layer_param.propagate_down(bottom_id);
+  const bool need_backward = blob_need_backward_[blob_id] &&
+                          propagate_down;
+  bottom_need_backward_[layer_id].push_back(need_backward);
+  return blob_id;
+}
+
+template <typename Dtype>
+void Net<Dtype>::AppendParam(const NetParameter& param, const int layer_id,
+                             const int param_id) {
+  const LayerParameter& layer_param = layers_[layer_id]->layer_param();
+  const int param_size = layer_param.param_size();
+  string param_name =
+      (param_size > param_id) ? layer_param.param(param_id).name() : "";
+  if (param_name.size()) {
+    param_display_names_.push_back(param_name);
+  } else {
+    ostringstream param_display_name;
+    param_display_name << param_id;
+    param_display_names_.push_back(param_display_name.str());
+  }
+  const int net_param_id = params_.size();
+  params_.push_back(layers_[layer_id]->blobs()[param_id]);
+  param_id_vecs_[layer_id].push_back(net_param_id);
+  param_layer_indices_.push_back(make_pair(layer_id, param_id));
+  if (!param_size || !param_name.size() || (param_name.size() &&
+      param_names_index_.find(param_name) == param_names_index_.end())) {
+    // This layer "owns" this parameter blob -- it is either anonymous
+    // (i.e., not given a param_name) or explicitly given a name that we
+    // haven't already seen.
+    param_owners_.push_back(-1);
+    if (param_name.size()) {
+      param_names_index_[param_name] = net_param_id;
+    }
+  } else {
+    // Named param blob with name we've seen before: share params
+    const int owner_net_param_id = param_names_index_[param_name];
+    param_owners_.push_back(owner_net_param_id);
+    const pair<int, int>& owner_index =
+        param_layer_indices_[owner_net_param_id];
+    const int owner_layer_id = owner_index.first;
+    const int owner_param_id = owner_index.second;
+    LOG(INFO) << "Sharing parameters '" << param_name << "' owned by "
+              << "layer '" << layer_names_[owner_layer_id] << "', param "
+              << "index " << owner_param_id;
+    Blob<Dtype>* this_blob = layers_[layer_id]->blobs()[param_id].get();
+    Blob<Dtype>* owner_blob =
+        layers_[owner_layer_id]->blobs()[owner_param_id].get();
+    const int param_size = layer_param.param_size();
+    if (param_size > param_id && (layer_param.param(param_id).share_mode() ==
+                                  ParamSpec_DimCheckMode_PERMISSIVE)) {
+      // Permissive dimension checking -- only check counts are the same.
+      CHECK_EQ(this_blob->count(), owner_blob->count())
+          << "Shared parameter blobs must have the same count.";
+    } else {
+      // Strict dimension checking -- all dims must be the same.
+      CHECK(this_blob->shape() == owner_blob->shape());
+    }
+    layers_[layer_id]->blobs()[param_id]->ShareData(
+        *layers_[owner_layer_id]->blobs()[owner_param_id]);
+  }
+}
+
+template <typename Dtype>
+void Net<Dtype>::GetLearningRateAndWeightDecay() {
+  LOG(INFO) << "Collecting Learning Rate and Weight Decay.";
+  ParamSpec default_param_spec;
+  for (int i = 0; i < layers_.size(); ++i) {
+    vector<shared_ptr<Blob<Dtype> > >& layer_blobs = layers_[i]->blobs();
+    for (int j = 0; j < layer_blobs.size(); ++j) {
+      const ParamSpec* param_spec =
+          (layers_[i]->layer_param().param_size() > j) ?
+          &layers_[i]->layer_param().param(j) : &default_param_spec;
+      params_lr_.push_back(param_spec->lr_mult());
+      params_weight_decay_.push_back(param_spec->decay_mult());
+    }
+  }
+}
+
+template <typename Dtype>
+Dtype Net<Dtype>::ForwardFromTo(int start, int end) {
+  CHECK_GE(start, 0);
+  CHECK_LT(end, layers_.size());
+  Dtype loss = 0;
+  if (debug_info_) {
+    for (int i = 0; i < net_input_blobs_.size(); ++i) {
+      InputDebugInfo(i);
+    }
+  }
+  for (int i = start; i <= end; ++i) {
+    // LOG(ERROR) << "Forwarding " << layer_names_[i];
+    Dtype layer_loss = layers_[i]->Forward(bottom_vecs_[i], top_vecs_[i]);
+    loss += layer_loss;
+    if (debug_info_) { ForwardDebugInfo(i); }
+  }
+  return loss;
+}
+
+template <typename Dtype>
+Dtype Net<Dtype>::ForwardFrom(int start) {
+  return ForwardFromTo(start, layers_.size() - 1);
+}
+
+template <typename Dtype>
+Dtype Net<Dtype>::ForwardTo(int end) {
+  return ForwardFromTo(0, end);
+}
+
+template <typename Dtype>
+const vector<Blob<Dtype>*>& Net<Dtype>::ForwardPrefilled(Dtype* loss) {
+  if (loss != NULL) {
+    *loss = ForwardFromTo(0, layers_.size() - 1);
+  } else {
+    ForwardFromTo(0, layers_.size() - 1);
+  }
+  return net_output_blobs_;
+}
+
+template <typename Dtype>
+const vector<Blob<Dtype>*>& Net<Dtype>::Forward(
+    const vector<Blob<Dtype>*> & bottom, Dtype* loss) {
+  // Copy bottom to internal bottom
+  for (int i = 0; i < bottom.size(); ++i) {
+    net_input_blobs_[i]->CopyFrom(*bottom[i]);
+  }
+  return ForwardPrefilled(loss);
+}
+
+template <typename Dtype>
+string Net<Dtype>::Forward(const string& input_blob_protos, Dtype* loss) {
+  BlobProtoVector blob_proto_vec;
+  if (net_input_blobs_.size()) {
+    blob_proto_vec.ParseFromString(input_blob_protos);
+    CHECK_EQ(blob_proto_vec.blobs_size(), net_input_blobs_.size())
+        << "Incorrect input size.";
+    for (int i = 0; i < blob_proto_vec.blobs_size(); ++i) {
+      net_input_blobs_[i]->FromProto(blob_proto_vec.blobs(i));
+    }
+  }
+  ForwardPrefilled(loss);
+  blob_proto_vec.Clear();
+  for (int i = 0; i < net_output_blobs_.size(); ++i) {
+    net_output_blobs_[i]->ToProto(blob_proto_vec.add_blobs());
+  }
+  string output;
+  blob_proto_vec.SerializeToString(&output);
+  return output;
+}
+
+template <typename Dtype>
+void Net<Dtype>::BackwardFromTo(int start, int end) {
+  CHECK_GE(end, 0);
+  CHECK_LT(start, layers_.size());
+  for (int i = start; i >= end; --i) {
+    if (layer_need_backward_[i]) {
+      layers_[i]->Backward(
+          top_vecs_[i], bottom_need_backward_[i], bottom_vecs_[i]);
+      if (debug_info_) { BackwardDebugInfo(i); }
+    }
+  }
+}
+
+template <typename Dtype>
+void Net<Dtype>::InputDebugInfo(const int input_id) {
+  const Blob<Dtype>& blob = *net_input_blobs_[input_id];
+  const string& blob_name = blob_names_[net_input_blob_indices_[input_id]];
+  const Dtype data_abs_val_mean = blob.asum_data() / blob.count();
+  LOG(INFO) << "    [Forward] "
+     << "Input " << blob_name << " data: " << data_abs_val_mean;
+}
+
+template <typename Dtype>
+void Net<Dtype>::ForwardDebugInfo(const int layer_id) {
+  for (int top_id = 0; top_id < top_vecs_[layer_id].size(); ++top_id) {
+    const Blob<Dtype>& blob = *top_vecs_[layer_id][top_id];
+    const string& blob_name = blob_names_[top_id_vecs_[layer_id][top_id]];
+    const Dtype data_abs_val_mean = blob.asum_data() / blob.count();
+    LOG(INFO) << "    [Forward] "
+       << "Layer " << layer_names_[layer_id] << ", top blob " << blob_name
+       << " data: " << data_abs_val_mean;
+  }
+  for (int param_id = 0; param_id < layers_[layer_id]->blobs().size();
+       ++param_id) {
+    const Blob<Dtype>& blob = *layers_[layer_id]->blobs()[param_id];
+    const int net_param_id = param_id_vecs_[layer_id][param_id];
+    const string& blob_name = param_display_names_[net_param_id];
+    const Dtype data_abs_val_mean = blob.asum_data() / blob.count();
+    LOG(INFO) << "    [Forward] "
+       << "Layer " << layer_names_[layer_id] << ", param blob " << blob_name
+       << " data: " << data_abs_val_mean;
+  }
+}
+
+template <typename Dtype>
+void Net<Dtype>::BackwardDebugInfo(const int layer_id) {
+  const vector<Blob<Dtype>*>& bottom_vec = bottom_vecs_[layer_id];
+  for (int bottom_id = 0; bottom_id < bottom_vec.size(); ++bottom_id) {
+    if (!bottom_need_backward_[layer_id][bottom_id]) { continue; }
+    const Blob<Dtype>& blob = *bottom_vec[bottom_id];
+    const string& blob_name = blob_names_[bottom_id_vecs_[layer_id][bottom_id]];
+    const Dtype diff_abs_val_mean = blob.asum_diff() / blob.count();
+    LOG(INFO) << "    [Backward] "
+        << "Layer " << layer_names_[layer_id] << ", bottom blob " << blob_name
+        << " diff: " << diff_abs_val_mean;
+  }
+  for (int param_id = 0; param_id < layers_[layer_id]->blobs().size();
+       ++param_id) {
+    if (!layers_[layer_id]->param_propagate_down(param_id)) { continue; }
+    const Blob<Dtype>& blob = *layers_[layer_id]->blobs()[param_id];
+    const Dtype diff_abs_val_mean = blob.asum_diff() / blob.count();
+    LOG(INFO) << "    [Backward] "
+        << "Layer " << layer_names_[layer_id] << ", param blob " << param_id
+        << " diff: " << diff_abs_val_mean;
+  }
+}
+
+template <typename Dtype>
+void Net<Dtype>::UpdateDebugInfo(const int param_id) {
+  const Blob<Dtype>& blob = *params_[param_id];
+  const int param_owner = param_owners_[param_id];
+  const string& layer_name = layer_names_[param_layer_indices_[param_id].first];
+  const string& param_display_name = param_display_names_[param_id];
+  const Dtype diff_abs_val_mean = blob.asum_diff() / blob.count();
+  if (param_owner < 0) {
+    const Dtype data_abs_val_mean = blob.asum_data() / blob.count();
+    LOG(INFO) << "    [Update] Layer " << layer_name
+        << ", param " << param_display_name
+        << " data: " << data_abs_val_mean << "; diff: " << diff_abs_val_mean;
+  } else {
+    const string& owner_layer_name =
+        layer_names_[param_layer_indices_[param_owner].first];
+    LOG(INFO) << "    [Update] Layer " << layer_name
+        << ", param blob " << param_display_name
+        << " (owned by layer " << owner_layer_name << ", "
+        << "param " << param_display_names_[param_owners_[param_id]] << ")"
+        << " diff: " << diff_abs_val_mean;
+  }
+}
+
+template <typename Dtype>
+void Net<Dtype>::ShareTrainedLayersWith(const Net* other) {
+  int num_source_layers = other->layers().size();
+  for (int i = 0; i < num_source_layers; ++i) {
+    Layer<Dtype>* source_layer = other->layers()[i].get();
+    const string& source_layer_name = other->layer_names()[i];
+    int target_layer_id = 0;
+    while (target_layer_id != layer_names_.size() &&
+        layer_names_[target_layer_id] != source_layer_name) {
+      ++target_layer_id;
+    }
+    if (target_layer_id == layer_names_.size()) {
+      DLOG(INFO) << "Ignoring source layer " << source_layer_name;
+      continue;
+    }
+    DLOG(INFO) << "Copying source layer " << source_layer_name;
+    vector<shared_ptr<Blob<Dtype> > >& target_blobs =
+        layers_[target_layer_id]->blobs();
+    CHECK_EQ(target_blobs.size(), source_layer->blobs().size())
+        << "Incompatible number of blobs for layer " << source_layer_name;
+    for (int j = 0; j < target_blobs.size(); ++j) {
+      Blob<Dtype>* source_blob = source_layer->blobs()[j].get();
+      CHECK(target_blobs[j]->shape() == source_blob->shape());
+      target_blobs[j]->ShareData(*source_blob);
+    }
+  }
+}
+
+template <typename Dtype>
+void Net<Dtype>::BackwardFrom(int start) {
+  BackwardFromTo(start, 0);
+}
+
+template <typename Dtype>
+void Net<Dtype>::BackwardTo(int end) {
+  BackwardFromTo(layers_.size() - 1, end);
+}
+
+template <typename Dtype>
+void Net<Dtype>::Backward() {
+  BackwardFromTo(layers_.size() - 1, 0);
+  if (debug_info_) {
+    Dtype asum_data = 0, asum_diff = 0, sumsq_data = 0, sumsq_diff = 0;
+    for (int i = 0; i < params_.size(); ++i) {
+      if (param_owners_[i] >= 0) { continue; }
+      asum_data += params_[i]->asum_data();
+      asum_diff += params_[i]->asum_diff();
+      sumsq_data += params_[i]->sumsq_data();
+      sumsq_diff += params_[i]->sumsq_diff();
+    }
+    const Dtype l2norm_data = std::sqrt(sumsq_data);
+    const Dtype l2norm_diff = std::sqrt(sumsq_diff);
+    LOG(ERROR) << "    [Backward] All net params (data, diff): "
+        << "L1 norm = (" << asum_data << ", " << asum_diff << "); "
+        << "L2 norm = (" << l2norm_data << ", " << l2norm_diff << ")";
+  }
+}
+
+template <typename Dtype>
+void Net<Dtype>::Reshape() {
+  for (int i = 0; i < layers_.size(); ++i) {
+    layers_[i]->Reshape(bottom_vecs_[i], top_vecs_[i]);
+  }
+}
+
+template <typename Dtype>
+void Net<Dtype>::CopyTrainedLayersFrom(const NetParameter& param) {
+  int num_source_layers = param.layer_size();
+  for (int i = 0; i < num_source_layers; ++i) {
+    const LayerParameter& source_layer = param.layer(i);
+    const string& source_layer_name = source_layer.name();
+    int target_layer_id = 0;
+    while (target_layer_id != layer_names_.size() &&
+        layer_names_[target_layer_id] != source_layer_name) {
+      ++target_layer_id;
+    }
+    if (target_layer_id == layer_names_.size()) {
+      DLOG(INFO) << "Ignoring source layer " << source_layer_name;
+      continue;
+    }
+    DLOG(INFO) << "Copying source layer " << source_layer_name;
+    vector<shared_ptr<Blob<Dtype> > >& target_blobs =
+        layers_[target_layer_id]->blobs();
+    CHECK_EQ(target_blobs.size(), source_layer.blobs_size())
+        << "Incompatible number of blobs for layer " << source_layer_name;
+    for (int j = 0; j < target_blobs.size(); ++j) {
+      const bool kReshape = false;
+      target_blobs[j]->FromProto(source_layer.blobs(j), kReshape);
+    }
+  }
+}
+
+template <typename Dtype>
+void Net<Dtype>::CopyTrainedLayersFrom(const string trained_filename) {
+  NetParameter param;
+  ReadNetParamsFromBinaryFileOrDie(trained_filename, &param);
+  CopyTrainedLayersFrom(param);
+}
+
+template <typename Dtype>
+void Net<Dtype>::ToProto(NetParameter* param, bool write_diff) const {
+  param->Clear();
+  param->set_name(name_);
+  // Add bottom and top
+  for (int i = 0; i < net_input_blob_indices_.size(); ++i) {
+    param->add_input(blob_names_[net_input_blob_indices_[i]]);
+  }
+  DLOG(INFO) << "Serializing " << layers_.size() << " layers";
+  for (int i = 0; i < layers_.size(); ++i) {
+    LayerParameter* layer_param = param->add_layer();
+    for (int j = 0; j < bottom_id_vecs_[i].size(); ++j) {
+      layer_param->add_bottom(blob_names_[bottom_id_vecs_[i][j]]);
+    }
+    for (int j = 0; j < top_id_vecs_[i].size(); ++j) {
+      layer_param->add_top(blob_names_[top_id_vecs_[i][j]]);
+    }
+    layers_[i]->ToProto(layer_param, write_diff);
+  }
+}
+
+template <typename Dtype>
+void Net<Dtype>::Update() {
+  // First, accumulate the diffs of any shared parameters into their owner's
+  // diff. (Assumes that the learning rate, weight decay, etc. have already been
+  // accounted for in the current diff.)
+  for (int i = 0; i < params_.size(); ++i) {
+    if (param_owners_[i] < 0) { continue; }
+    if (debug_info_) { UpdateDebugInfo(i); }
+    const int count = params_[i]->count();
+    const Dtype* this_diff;
+    Dtype* owner_diff;
+    switch (Caffe::mode()) {
+    case Caffe::CPU:
+      this_diff = params_[i]->cpu_diff();
+      owner_diff = params_[param_owners_[i]]->mutable_cpu_diff();
+      caffe_add(count, this_diff, owner_diff, owner_diff);
+      break;
+    case Caffe::GPU:
+#ifndef CPU_ONLY
+      this_diff = params_[i]->gpu_diff();
+      owner_diff = params_[param_owners_[i]]->mutable_gpu_diff();
+      caffe_gpu_add(count, this_diff, owner_diff, owner_diff);
+#else
+      NO_GPU;
+#endif
+      break;
+    default:
+      LOG(FATAL) << "Unknown caffe mode: " << Caffe::mode();
+    }
+  }
+  // Now, update the owned parameters.
+  for (int i = 0; i < params_.size(); ++i) {
+    if (param_owners_[i] >= 0) { continue; }
+    if (debug_info_) { UpdateDebugInfo(i); }
+    params_[i]->Update();
+  }
+}
+
+template <typename Dtype>
+bool Net<Dtype>::has_blob(const string& blob_name) const {
+  return blob_names_index_.find(blob_name) != blob_names_index_.end();
+}
+
+template <typename Dtype>
+const shared_ptr<Blob<Dtype> > Net<Dtype>::blob_by_name(
+    const string& blob_name) const {
+  shared_ptr<Blob<Dtype> > blob_ptr;
+  if (has_blob(blob_name)) {
+    blob_ptr = blobs_[blob_names_index_.find(blob_name)->second];
+  } else {
+    blob_ptr.reset((Blob<Dtype>*)(NULL));
+    LOG(WARNING) << "Unknown blob name " << blob_name;
+  }
+  return blob_ptr;
+}
+
+template <typename Dtype>
+bool Net<Dtype>::has_layer(const string& layer_name) const {
+  return layer_names_index_.find(layer_name) != layer_names_index_.end();
+}
+
+template <typename Dtype>
+const shared_ptr<Layer<Dtype> > Net<Dtype>::layer_by_name(
+    const string& layer_name) const {
+  shared_ptr<Layer<Dtype> > layer_ptr;
+  if (has_layer(layer_name)) {
+    layer_ptr = layers_[layer_names_index_.find(layer_name)->second];
+  } else {
+    layer_ptr.reset((Layer<Dtype>*)(NULL));
+    LOG(WARNING) << "Unknown layer name " << layer_name;
+  }
+  return layer_ptr;
+}
+
+INSTANTIATE_CLASS(Net);
+
+}  // namespace caffe
diff --git a/src/caffe/proto/caffe.proto b/src/caffe/proto/caffe.proto
new file mode 100644
index 0000000..81a8c69
--- /dev/null
+++ b/src/caffe/proto/caffe.proto
@@ -0,0 +1,1109 @@
+syntax = "proto2";
+
+package caffe;
+
+// Specifies the shape (dimensions) of a Blob.
+message BlobShape {
+  repeated int64 dim = 1 [packed = true];
+}
+
+message BlobProto {
+  optional BlobShape shape = 7;
+  repeated float data = 5 [packed = true];
+  repeated float diff = 6 [packed = true];
+
+  // 4D dimensions -- deprecated.  Use "shape" instead.
+  optional int32 num = 1 [default = 0];
+  optional int32 channels = 2 [default = 0];
+  optional int32 height = 3 [default = 0];
+  optional int32 width = 4 [default = 0];
+}
+
+// The BlobProtoVector is simply a way to pass multiple blobproto instances
+// around.
+message BlobProtoVector {
+  repeated BlobProto blobs = 1;
+}
+
+message Datum {
+  optional int32 channels = 1;
+  optional int32 height = 2;
+  optional int32 width = 3;
+  // the actual image data, in bytes
+  optional bytes data = 4;
+  optional int32 label = 5;
+  // Optionally, the datum could also hold float data.
+  repeated float float_data = 6;
+  // If true data contains an encoded image that need to be decoded
+  optional bool encoded = 7 [default = false];
+}
+
+message FillerParameter {
+  // The filler type.
+  optional string type = 1 [default = 'constant'];
+  optional float value = 2 [default = 0]; // the value in constant filler
+  optional float min = 3 [default = 0]; // the min value in uniform filler
+  optional float max = 4 [default = 1]; // the max value in uniform filler
+  optional float mean = 5 [default = 0]; // the mean value in Gaussian filler
+  optional float std = 6 [default = 1]; // the std value in Gaussian filler
+  // The expected number of non-zero output weights for a given input in
+  // Gaussian filler -- the default -1 means don't perform sparsification.
+  optional int32 sparse = 7 [default = -1];
+  // Normalize the filler variance by fan_in, fan_out, or their average.
+  // Applies to 'xavier' and 'msra' fillers.
+  enum VarianceNorm {
+    FAN_IN = 0;
+    FAN_OUT = 1;
+    AVERAGE = 2;
+  }
+  optional VarianceNorm variance_norm = 8 [default = FAN_IN];
+}
+
+message NetParameter {
+  optional string name = 1; // consider giving the network a name
+  // The input blobs to the network.
+  repeated string input = 3;
+  // The shape of the input blobs.
+  repeated BlobShape input_shape = 8;
+
+  // 4D input dimensions -- deprecated.  Use "shape" instead.
+  // If specified, for each input blob there should be four
+  // values specifying the num, channels, height and width of the input blob.
+  // Thus, there should be a total of (4 * #input) numbers.
+  repeated int32 input_dim = 4;
+
+  // Whether the network will force every layer to carry out backward operation.
+  // If set False, then whether to carry out backward is determined
+  // automatically according to the net structure and learning rates.
+  optional bool force_backward = 5 [default = false];
+  // The current "state" of the network, including the phase, level, and stage.
+  // Some layers may be included/excluded depending on this state and the states
+  // specified in the layers' include and exclude fields.
+  optional NetState state = 6;
+
+  // Print debugging information about results while running Net::Forward,
+  // Net::Backward, and Net::Update.
+  optional bool debug_info = 7 [default = false];
+
+  // The layers that make up the net.  Each of their configurations, including
+  // connectivity and behavior, is specified as a LayerParameter.
+  repeated LayerParameter layer = 100;  // ID 100 so layers are printed last.
+
+  // DEPRECATED: use 'layer' instead.
+  repeated V1LayerParameter layers = 2;
+}
+
+// NOTE
+// Update the next available ID when you add a new SolverParameter field.
+//
+// SolverParameter next available ID: 37 (last added: iter_size)
+message SolverParameter {
+  //////////////////////////////////////////////////////////////////////////////
+  // Specifying the train and test networks
+  //
+  // Exactly one train net must be specified using one of the following fields:
+  //     train_net_param, train_net, net_param, net
+  // One or more test nets may be specified using any of the following fields:
+  //     test_net_param, test_net, net_param, net
+  // If more than one test net field is specified (e.g., both net and
+  // test_net are specified), they will be evaluated in the field order given
+  // above: (1) test_net_param, (2) test_net, (3) net_param/net.
+  // A test_iter must be specified for each test_net.
+  // A test_level and/or a test_stage may also be specified for each test_net.
+  //////////////////////////////////////////////////////////////////////////////
+
+  // Proto filename for the train net, possibly combined with one or more
+  // test nets.
+  optional string net = 24;
+  // Inline train net param, possibly combined with one or more test nets.
+  optional NetParameter net_param = 25;
+
+  optional string train_net = 1; // Proto filename for the train net.
+  repeated string test_net = 2; // Proto filenames for the test nets.
+  optional NetParameter train_net_param = 21; // Inline train net params.
+  repeated NetParameter test_net_param = 22; // Inline test net params.
+
+  // The states for the train/test nets. Must be unspecified or
+  // specified once per net.
+  //
+  // By default, all states will have solver = true;
+  // train_state will have phase = TRAIN,
+  // and all test_state's will have phase = TEST.
+  // Other defaults are set according to the NetState defaults.
+  optional NetState train_state = 26;
+  repeated NetState test_state = 27;
+
+  // The number of iterations for each test net.
+  repeated int32 test_iter = 3;
+
+  // The number of iterations between two testing phases.
+  optional int32 test_interval = 4 [default = 0];
+  optional bool test_compute_loss = 19 [default = false];
+  // If true, run an initial test pass before the first iteration,
+  // ensuring memory availability and printing the starting value of the loss.
+  optional bool test_initialization = 32 [default = true];
+  optional float base_lr = 5; // The base learning rate
+  // the number of iterations between displaying info. If display = 0, no info
+  // will be displayed.
+  optional int32 display = 6;
+  // Display the loss averaged over the last average_loss iterations
+  optional int32 average_loss = 33 [default = 1];
+  optional int32 max_iter = 7; // the maximum number of iterations
+  // accumulate gradients over `iter_size` x `batch_size` instances
+  optional int32 iter_size = 36 [default = 1];
+  optional string lr_policy = 8; // The learning rate decay policy.
+  optional float gamma = 9; // The parameter to compute the learning rate.
+  optional float power = 10; // The parameter to compute the learning rate.
+  optional float momentum = 11; // The momentum value.
+  optional float weight_decay = 12; // The weight decay.
+  // regularization types supported: L1 and L2
+  // controlled by weight_decay
+  optional string regularization_type = 29 [default = "L2"];
+  // the stepsize for learning rate policy "step"
+  optional int32 stepsize = 13;
+  // the stepsize for learning rate policy "multistep"
+  repeated int32 stepvalue = 34;
+
+  // Set clip_gradients to >= 0 to clip parameter gradients to that L2 norm,
+  // whenever their actual L2 norm is larger.
+  optional float clip_gradients = 35 [default = -1];
+
+  optional int32 snapshot = 14 [default = 0]; // The snapshot interval
+  optional string snapshot_prefix = 15; // The prefix for the snapshot.
+  // whether to snapshot diff in the results or not. Snapshotting diff will help
+  // debugging but the final protocol buffer size will be much larger.
+  optional bool snapshot_diff = 16 [default = false];
+  // the mode solver will use: 0 for CPU and 1 for GPU. Use GPU in default.
+  enum SolverMode {
+    CPU = 0;
+    GPU = 1;
+  }
+  optional SolverMode solver_mode = 17 [default = GPU];
+  // the device_id will that be used in GPU mode. Use device_id = 0 in default.
+  optional int32 device_id = 18 [default = 0];
+  // If non-negative, the seed with which the Solver will initialize the Caffe
+  // random number generator -- useful for reproducible results. Otherwise,
+  // (and by default) initialize using a seed derived from the system clock.
+  optional int64 random_seed = 20 [default = -1];
+
+  // Solver type
+  enum SolverType {
+    SGD = 0;
+    NESTEROV = 1;
+    ADAGRAD = 2;
+  }
+  optional SolverType solver_type = 30 [default = SGD];
+  // numerical stability for AdaGrad
+  optional float delta = 31 [default = 1e-8];
+
+  // If true, print information about the state of the net that may help with
+  // debugging learning problems.
+  optional bool debug_info = 23 [default = false];
+
+  // If false, don't save a snapshot after training finishes.
+  optional bool snapshot_after_train = 28 [default = true];
+}
+
+// A message that stores the solver snapshots
+message SolverState {
+  optional int32 iter = 1; // The current iteration
+  optional string learned_net = 2; // The file that stores the learned net.
+  repeated BlobProto history = 3; // The history for sgd solvers
+  optional int32 current_step = 4 [default = 0]; // The current step for learning rate
+}
+
+enum Phase {
+   TRAIN = 0;
+   TEST = 1;
+}
+
+message NetState {
+  optional Phase phase = 1 [default = TEST];
+  optional int32 level = 2 [default = 0];
+  repeated string stage = 3;
+}
+
+message NetStateRule {
+  // Set phase to require the NetState have a particular phase (TRAIN or TEST)
+  // to meet this rule.
+  optional Phase phase = 1;
+
+  // Set the minimum and/or maximum levels in which the layer should be used.
+  // Leave undefined to meet the rule regardless of level.
+  optional int32 min_level = 2;
+  optional int32 max_level = 3;
+
+  // Customizable sets of stages to include or exclude.
+  // The net must have ALL of the specified stages and NONE of the specified
+  // "not_stage"s to meet the rule.
+  // (Use multiple NetStateRules to specify conjunctions of stages.)
+  repeated string stage = 4;
+  repeated string not_stage = 5;
+}
+
+// Specifies training parameters (multipliers on global learning constants,
+// and the name and other settings used for weight sharing).
+message ParamSpec {
+  // The names of the parameter blobs -- useful for sharing parameters among
+  // layers, but never required otherwise.  To share a parameter between two
+  // layers, give it a (non-empty) name.
+  optional string name = 1;
+
+  // Whether to require shared weights to have the same shape, or just the same
+  // count -- defaults to STRICT if unspecified.
+  optional DimCheckMode share_mode = 2;
+  enum DimCheckMode {
+    // STRICT (default) requires that num, channels, height, width each match.
+    STRICT = 0;
+    // PERMISSIVE requires only the count (num*channels*height*width) to match.
+    PERMISSIVE = 1;
+  }
+
+  // The multiplier on the global learning rate for this parameter.
+  optional float lr_mult = 3 [default = 1.0];
+
+  // The multiplier on the global weight decay for this parameter.
+  optional float decay_mult = 4 [default = 1.0];
+}
+
+// NOTE
+// Update the next available ID when you add a new LayerParameter field.
+//
+// LayerParameter next available layer-specific ID: 137 (last added: reduction_param)
+message LayerParameter {
+  optional string name = 1; // the layer name
+  optional string type = 2; // the layer type
+  repeated string bottom = 3; // the name of each bottom blob
+  repeated string top = 4; // the name of each top blob
+
+  // The train / test phase for computation.
+  optional Phase phase = 10;
+
+  // The amount of weight to assign each top blob in the objective.
+  // Each layer assigns a default value, usually of either 0 or 1,
+  // to each top blob.
+  repeated float loss_weight = 5;
+
+  // Specifies training parameters (multipliers on global learning constants,
+  // and the name and other settings used for weight sharing).
+  repeated ParamSpec param = 6;
+
+  // The blobs containing the numeric parameters of the layer.
+  repeated BlobProto blobs = 7;
+  
+  // Specifies on which bottoms the backpropagation should be skipped.
+  // The size must be either 0 or equal to the number of bottoms.
+  repeated bool propagate_down = 11;
+
+  // Rules controlling whether and when a layer is included in the network,
+  // based on the current NetState.  You may specify a non-zero number of rules
+  // to include OR exclude, but not both.  If no include or exclude rules are
+  // specified, the layer is always included.  If the current NetState meets
+  // ANY (i.e., one or more) of the specified rules, the layer is
+  // included/excluded.
+  repeated NetStateRule include = 8;
+  repeated NetStateRule exclude = 9;
+
+  // Parameters for data pre-processing.
+  optional TransformationParameter transform_param = 100;
+
+  // Parameters shared by loss layers.
+  optional LossParameter loss_param = 101;
+
+  // Layer type-specific parameters.
+  //
+  // Note: certain layers may have more than one computational engine
+  // for their implementation. These layers include an Engine type and
+  // engine parameter for selecting the implementation.
+  // The default for the engine is set by the ENGINE switch at compile-time.
+  optional AccuracyParameter accuracy_param = 102;
+  optional ArgMaxParameter argmax_param = 103;
+  optional ConcatParameter concat_param = 104;
+  optional ContrastiveLossParameter contrastive_loss_param = 105;
+  optional ConvolutionParameter convolution_param = 106;
+  optional DataParameter data_param = 107;
+  optional DropoutParameter dropout_param = 108;
+  optional DummyDataParameter dummy_data_param = 109;
+  optional EltwiseParameter eltwise_param = 110;
+  optional ExpParameter exp_param = 111;
+  optional FlattenParameter flatten_param = 135;
+  optional HDF5DataParameter hdf5_data_param = 112;
+  optional HDF5OutputParameter hdf5_output_param = 113;
+  optional HingeLossParameter hinge_loss_param = 114;
+  optional ImageDataParameter image_data_param = 115;
+  optional InfogainLossParameter infogain_loss_param = 116;
+  optional InnerProductParameter inner_product_param = 117;
+  optional LogParameter log_param = 134;
+  optional LRNParameter lrn_param = 118;
+  optional MemoryDataParameter memory_data_param = 119;
+  optional MVNParameter mvn_param = 120;
+  optional PoolingParameter pooling_param = 121;
+  optional PowerParameter power_param = 122;
+  optional PReLUParameter prelu_param = 131;
+  optional PythonParameter python_param = 130;
+  optional ReductionParameter reduction_param = 136;
+  optional ReLUParameter relu_param = 123;
+  optional ReshapeParameter reshape_param = 133;
+  optional SigmoidParameter sigmoid_param = 124;
+  optional SoftmaxParameter softmax_param = 125;
+  optional SPPParameter spp_param = 132;
+  optional SliceParameter slice_param = 126;
+  optional TanHParameter tanh_param = 127;
+  optional ThresholdParameter threshold_param = 128;
+  optional WindowDataParameter window_data_param = 129;
+}
+
+// Message that stores parameters used to apply transformation
+// to the data layer's data
+message TransformationParameter {
+  // For data pre-processing, we can do simple scaling and subtracting the
+  // data mean, if provided. Note that the mean subtraction is always carried
+  // out before scaling.
+  optional float scale = 1 [default = 1];
+  // Specify if we want to randomly mirror data.
+  optional bool mirror = 2 [default = false];
+  // Specify if we would like to randomly crop an image.
+  optional uint32 crop_size = 3 [default = 0];
+  // mean_file and mean_value cannot be specified at the same time
+  optional string mean_file = 4;
+  // if specified can be repeated once (would substract it from all the channels)
+  // or can be repeated the same number of times as channels
+  // (would subtract them from the corresponding channel)
+  repeated float mean_value = 5;
+  // Force the decoded image to have 3 color channels.
+  optional bool force_color = 6 [default = false];
+  // Force the decoded image to have 1 color channels.
+  optional bool force_gray = 7 [default = false];
+}
+
+// Message that stores parameters shared by loss layers
+message LossParameter {
+  // If specified, ignore instances with the given label.
+  optional int32 ignore_label = 1;
+  // If true, normalize each batch across all instances (including spatial
+  // dimesions, but not ignored instances); else, divide by batch size only.
+  optional bool normalize = 2 [default = true];
+}
+
+// Messages that store parameters used by individual layer types follow, in
+// alphabetical order.
+
+message AccuracyParameter {
+  // When computing accuracy, count as correct by comparing the true label to
+  // the top k scoring classes.  By default, only compare to the top scoring
+  // class (i.e. argmax).
+  optional uint32 top_k = 1 [default = 1];
+
+  // The "label" axis of the prediction blob, whose argmax corresponds to the
+  // predicted label -- may be negative to index from the end (e.g., -1 for the
+  // last axis).  For example, if axis == 1 and the predictions are
+  // (N x C x H x W), the label blob is expected to contain N*H*W ground truth
+  // labels with integer values in {0, 1, ..., C-1}.
+  optional int32 axis = 2 [default = 1];
+
+  // If specified, ignore instances with the given label.
+  optional int32 ignore_label = 3;
+}
+
+message ArgMaxParameter {
+  // If true produce pairs (argmax, maxval)
+  optional bool out_max_val = 1 [default = false];
+  optional uint32 top_k = 2 [default = 1];
+}
+
+message ConcatParameter {
+  // The axis along which to concatenate -- may be negative to index from the
+  // end (e.g., -1 for the last axis).  Other axes must have the
+  // same dimension for all the bottom blobs.
+  // By default, ConcatLayer concatenates blobs along the "channels" axis (1).
+  optional int32 axis = 2 [default = 1];
+
+  // DEPRECATED: alias for "axis" -- does not support negative indexing.
+  optional uint32 concat_dim = 1 [default = 1];
+}
+
+message ContrastiveLossParameter {
+  // margin for dissimilar pair
+  optional float margin = 1 [default = 1.0];
+  // The first implementation of this cost did not exactly match the cost of
+  // Hadsell et al 2006 -- using (margin - d^2) instead of (margin - d)^2.
+  // legacy_version = false (the default) uses (margin - d)^2 as proposed in the
+  // Hadsell paper. New models should probably use this version.
+  // legacy_version = true uses (margin - d^2). This is kept to support /
+  // reproduce existing models and results
+  optional bool legacy_version = 2 [default = false]; 
+}
+
+message ConvolutionParameter {
+  optional uint32 num_output = 1; // The number of outputs for the layer
+  optional bool bias_term = 2 [default = true]; // whether to have bias terms
+  // Pad, kernel size, and stride are all given as a single value for equal
+  // dimensions in height and width or as Y, X pairs.
+  optional uint32 pad = 3 [default = 0]; // The padding size (equal in Y, X)
+  optional uint32 pad_h = 9 [default = 0]; // The padding height
+  optional uint32 pad_w = 10 [default = 0]; // The padding width
+  optional uint32 kernel_size = 4; // The kernel size (square)
+  optional uint32 kernel_h = 11; // The kernel height
+  optional uint32 kernel_w = 12; // The kernel width
+  optional uint32 group = 5 [default = 1]; // The group size for group conv
+  optional uint32 stride = 6 [default = 1]; // The stride (equal in Y, X)
+  optional uint32 stride_h = 13; // The stride height
+  optional uint32 stride_w = 14; // The stride width
+  optional FillerParameter weight_filler = 7; // The filler for the weight
+  optional FillerParameter bias_filler = 8; // The filler for the bias
+  enum Engine {
+    DEFAULT = 0;
+    CAFFE = 1;
+    CUDNN = 2;
+  }
+  optional Engine engine = 15 [default = DEFAULT];
+}
+
+message DataParameter {
+  enum DB {
+    LEVELDB = 0;
+    LMDB = 1;
+  }
+  // Specify the data source.
+  optional string source = 1;
+  // Specify the batch size.
+  optional uint32 batch_size = 4;
+  // The rand_skip variable is for the data layer to skip a few data points
+  // to avoid all asynchronous sgd clients to start at the same point. The skip
+  // point would be set as rand_skip * rand(0,1). Note that rand_skip should not
+  // be larger than the number of keys in the database.
+  optional uint32 rand_skip = 7 [default = 0];
+  optional DB backend = 8 [default = LEVELDB];
+  // DEPRECATED. See TransformationParameter. For data pre-processing, we can do
+  // simple scaling and subtracting the data mean, if provided. Note that the
+  // mean subtraction is always carried out before scaling.
+  optional float scale = 2 [default = 1];
+  optional string mean_file = 3;
+  // DEPRECATED. See TransformationParameter. Specify if we would like to randomly
+  // crop an image.
+  optional uint32 crop_size = 5 [default = 0];
+  // DEPRECATED. See TransformationParameter. Specify if we want to randomly mirror
+  // data.
+  optional bool mirror = 6 [default = false];
+  // Force the encoded image to have 3 color channels
+  optional bool force_encoded_color = 9 [default = false];
+}
+
+message DropoutParameter {
+  optional float dropout_ratio = 1 [default = 0.5]; // dropout ratio
+}
+
+// DummyDataLayer fills any number of arbitrarily shaped blobs with random
+// (or constant) data generated by "Fillers" (see "message FillerParameter").
+message DummyDataParameter {
+  // This layer produces N >= 1 top blobs.  DummyDataParameter must specify 1 or N
+  // shape fields, and 0, 1 or N data_fillers.
+  //
+  // If 0 data_fillers are specified, ConstantFiller with a value of 0 is used.
+  // If 1 data_filler is specified, it is applied to all top blobs.  If N are
+  // specified, the ith is applied to the ith top blob.
+  repeated FillerParameter data_filler = 1;
+  repeated BlobShape shape = 6;
+
+  // 4D dimensions -- deprecated.  Use "shape" instead.
+  repeated uint32 num = 2;
+  repeated uint32 channels = 3;
+  repeated uint32 height = 4;
+  repeated uint32 width = 5;
+}
+
+message EltwiseParameter {
+  enum EltwiseOp {
+    PROD = 0;
+    SUM = 1;
+    MAX = 2;
+  }
+  optional EltwiseOp operation = 1 [default = SUM]; // element-wise operation
+  repeated float coeff = 2; // blob-wise coefficient for SUM operation
+
+  // Whether to use an asymptotically slower (for >2 inputs) but stabler method
+  // of computing the gradient for the PROD operation. (No effect for SUM op.)
+  optional bool stable_prod_grad = 3 [default = true];
+}
+
+message ExpParameter {
+  // ExpLayer computes outputs y = base ^ (shift + scale * x), for base > 0.
+  // Or if base is set to the default (-1), base is set to e,
+  // so y = exp(shift + scale * x).
+  optional float base = 1 [default = -1.0];
+  optional float scale = 2 [default = 1.0];
+  optional float shift = 3 [default = 0.0];
+}
+
+/// Message that stores parameters used by FlattenLayer
+message FlattenParameter {
+  // The first axis to flatten: all preceding axes are retained in the output.
+  // May be negative to index from the end (e.g., -1 for the last axis).
+  optional int32 axis = 1 [default = 1];
+
+  // The last axis to flatten: all following axes are retained in the output.
+  // May be negative to index from the end (e.g., the default -1 for the last
+  // axis).
+  optional int32 end_axis = 2 [default = -1];
+}
+
+// Message that stores parameters used by HDF5DataLayer
+message HDF5DataParameter {
+  // Specify the data source.
+  optional string source = 1;
+  // Specify the batch size.
+  optional uint32 batch_size = 2;
+
+  // Specify whether to shuffle the data.
+  // If shuffle == true, the ordering of the HDF5 files is shuffled,
+  // and the ordering of data within any given HDF5 file is shuffled,
+  // but data between different files are not interleaved; all of a file's
+  // data are output (in a random order) before moving onto another file.
+  optional bool shuffle = 3 [default = false];
+}
+
+message HDF5OutputParameter {
+  optional string file_name = 1;
+}
+
+message HingeLossParameter {
+  enum Norm {
+    L1 = 1;
+    L2 = 2;
+  }
+  // Specify the Norm to use L1 or L2
+  optional Norm norm = 1 [default = L1];
+}
+
+message ImageDataParameter {
+  // Specify the data source.
+  optional string source = 1;
+  // Specify the batch size.
+  optional uint32 batch_size = 4;
+  // The rand_skip variable is for the data layer to skip a few data points
+  // to avoid all asynchronous sgd clients to start at the same point. The skip
+  // point would be set as rand_skip * rand(0,1). Note that rand_skip should not
+  // be larger than the number of keys in the database.
+  optional uint32 rand_skip = 7 [default = 0];
+  // Whether or not ImageLayer should shuffle the list of files at every epoch.
+  optional bool shuffle = 8 [default = false];
+  // It will also resize images if new_height or new_width are not zero.
+  optional uint32 new_height = 9 [default = 0];
+  optional uint32 new_width = 10 [default = 0];
+  // Specify if the images are color or gray
+  optional bool is_color = 11 [default = true];
+  // DEPRECATED. See TransformationParameter. For data pre-processing, we can do
+  // simple scaling and subtracting the data mean, if provided. Note that the
+  // mean subtraction is always carried out before scaling.
+  optional float scale = 2 [default = 1];
+  optional string mean_file = 3;
+  // DEPRECATED. See TransformationParameter. Specify if we would like to randomly
+  // crop an image.
+  optional uint32 crop_size = 5 [default = 0];
+  // DEPRECATED. See TransformationParameter. Specify if we want to randomly mirror
+  // data.
+  optional bool mirror = 6 [default = false];
+  optional string root_folder = 12 [default = ""];
+}
+
+message InfogainLossParameter {
+  // Specify the infogain matrix source.
+  optional string source = 1;
+}
+
+message InnerProductParameter {
+  optional uint32 num_output = 1; // The number of outputs for the layer
+  optional bool bias_term = 2 [default = true]; // whether to have bias terms
+  optional FillerParameter weight_filler = 3; // The filler for the weight
+  optional FillerParameter bias_filler = 4; // The filler for the bias
+
+  // The first axis to be lumped into a single inner product computation;
+  // all preceding axes are retained in the output.
+  // May be negative to index from the end (e.g., -1 for the last axis).
+  optional int32 axis = 5 [default = 1];
+}
+
+// Message that stores parameters used by LogLayer
+message LogParameter {
+  // LogLayer computes outputs y = log_base(shift + scale * x), for base > 0.
+  // Or if base is set to the default (-1), base is set to e,
+  // so y = ln(shift + scale * x) = log_e(shift + scale * x)
+  optional float base = 1 [default = -1.0];
+  optional float scale = 2 [default = 1.0];
+  optional float shift = 3 [default = 0.0];
+}
+
+// Message that stores parameters used by LRNLayer
+message LRNParameter {
+  optional uint32 local_size = 1 [default = 5];
+  optional float alpha = 2 [default = 1.];
+  optional float beta = 3 [default = 0.75];
+  enum NormRegion {
+    ACROSS_CHANNELS = 0;
+    WITHIN_CHANNEL = 1;
+  }
+  optional NormRegion norm_region = 4 [default = ACROSS_CHANNELS];
+  optional float k = 5 [default = 1.];
+}
+
+message MemoryDataParameter {
+  optional uint32 batch_size = 1;
+  optional uint32 channels = 2;
+  optional uint32 height = 3;
+  optional uint32 width = 4;
+}
+
+message MVNParameter {
+  // This parameter can be set to false to normalize mean only
+  optional bool normalize_variance = 1 [default = true];
+
+  // This parameter can be set to true to perform DNN-like MVN
+  optional bool across_channels = 2 [default = false];
+
+  // Epsilon for not dividing by zero while normalizing variance
+  optional float eps = 3 [default = 1e-9];
+}
+
+message PoolingParameter {
+  enum PoolMethod {
+    MAX = 0;
+    AVE = 1;
+    STOCHASTIC = 2;
+  }
+  optional PoolMethod pool = 1 [default = MAX]; // The pooling method
+  // Pad, kernel size, and stride are all given as a single value for equal
+  // dimensions in height and width or as Y, X pairs.
+  optional uint32 pad = 4 [default = 0]; // The padding size (equal in Y, X)
+  optional uint32 pad_h = 9 [default = 0]; // The padding height
+  optional uint32 pad_w = 10 [default = 0]; // The padding width
+  optional uint32 kernel_size = 2; // The kernel size (square)
+  optional uint32 kernel_h = 5; // The kernel height
+  optional uint32 kernel_w = 6; // The kernel width
+  optional uint32 stride = 3 [default = 1]; // The stride (equal in Y, X)
+  optional uint32 stride_h = 7; // The stride height
+  optional uint32 stride_w = 8; // The stride width
+  enum Engine {
+    DEFAULT = 0;
+    CAFFE = 1;
+    CUDNN = 2;
+  }
+  optional Engine engine = 11 [default = DEFAULT];
+  // If global_pooling then it will pool over the size of the bottom by doing
+  // kernel_h = bottom->height and kernel_w = bottom->width
+  optional bool global_pooling = 12 [default = false];
+}
+
+message PowerParameter {
+  // PowerLayer computes outputs y = (shift + scale * x) ^ power.
+  optional float power = 1 [default = 1.0];
+  optional float scale = 2 [default = 1.0];
+  optional float shift = 3 [default = 0.0];
+}
+
+message PythonParameter {
+  optional string module = 1;
+  optional string layer = 2;
+}
+
+// Message that stores parameters used by ReductionLayer
+message ReductionParameter {
+  enum ReductionOp {
+    SUM = 1;
+    ASUM = 2;
+    SUMSQ = 3;
+    MEAN = 4;
+  }
+
+  optional ReductionOp operation = 1 [default = SUM]; // reduction operation
+
+  // The first axis to reduce to a scalar -- may be negative to index from the
+  // end (e.g., -1 for the last axis).
+  // (Currently, only reduction along ALL "tail" axes is supported; reduction
+  // of axis M through N, where N < num_axes - 1, is unsupported.)
+  // Suppose we have an n-axis bottom Blob with shape:
+  //     (d0, d1, d2, ..., d(m-1), dm, d(m+1), ..., d(n-1)).
+  // If axis == m, the output Blob will have shape
+  //     (d0, d1, d2, ..., d(m-1)),
+  // and the ReductionOp operation is performed (d0 * d1 * d2 * ... * d(m-1))
+  // times, each including (dm * d(m+1) * ... * d(n-1)) individual data.
+  // If axis == 0 (the default), the output Blob always has the empty shape
+  // (count 1), performing reduction across the entire input --
+  // often useful for creating new loss functions.
+  optional int32 axis = 2 [default = 0];
+
+  optional float coeff = 3 [default = 1.0]; // coefficient for output
+}
+
+// Message that stores parameters used by ReLULayer
+message ReLUParameter {
+  // Allow non-zero slope for negative inputs to speed up optimization
+  // Described in:
+  // Maas, A. L., Hannun, A. Y., & Ng, A. Y. (2013). Rectifier nonlinearities
+  // improve neural network acoustic models. In ICML Workshop on Deep Learning
+  // for Audio, Speech, and Language Processing.
+  optional float negative_slope = 1 [default = 0];
+  enum Engine {
+    DEFAULT = 0;
+    CAFFE = 1;
+    CUDNN = 2;
+  }
+  optional Engine engine = 2 [default = DEFAULT];
+}
+
+message ReshapeParameter {
+  // Specify the output dimensions. If some of the dimensions are set to 0,
+  // the corresponding dimension from the bottom layer is used (unchanged).
+  // Exactly one dimension may be set to -1, in which case its value is
+  // inferred from the count of the bottom blob and the remaining dimensions.
+  // For example, suppose we want to reshape a 2D blob "input" with shape 2 x 8:
+  //
+  //   layer {
+  //     type: "Reshape" bottom: "input" top: "output"
+  //     reshape_param { ... }
+  //   }
+  //
+  // If "input" is 2D with shape 2 x 8, then the following reshape_param
+  // specifications are all equivalent, producing a 3D blob "output" with shape
+  // 2 x 2 x 4:
+  //
+  //   reshape_param { shape { dim:  2  dim: 2  dim:  4 } }
+  //   reshape_param { shape { dim:  0  dim: 2  dim:  4 } }
+  //   reshape_param { shape { dim:  0  dim: 2  dim: -1 } }
+  //   reshape_param { shape { dim: -1  dim: 0  dim:  2 } }
+  //
+  optional BlobShape shape = 1;
+
+  // axis and num_axes control the portion of the bottom blob's shape that are
+  // replaced by (included in) the reshape. By default (axis == 0 and
+  // num_axes == -1), the entire bottom blob shape is included in the reshape,
+  // and hence the shape field must specify the entire output shape.
+  //
+  // axis may be non-zero to retain some portion of the beginning of the input
+  // shape (and may be negative to index from the end; e.g., -1 to begin the
+  // reshape after the last axis, including nothing in the reshape,
+  // -2 to include only the last axis, etc.).
+  //
+  // For example, suppose "input" is a 2D blob with shape 2 x 8.
+  // Then the following ReshapeLayer specifications are all equivalent,
+  // producing a blob "output" with shape 2 x 2 x 4:
+  //
+  //   reshape_param { shape { dim: 2  dim: 2  dim: 4 } }
+  //   reshape_param { shape { dim: 2  dim: 4 } axis:  1 }
+  //   reshape_param { shape { dim: 2  dim: 4 } axis: -3 }
+  //
+  // num_axes specifies the extent of the reshape.
+  // If num_axes >= 0 (and axis >= 0), the reshape will be performed only on
+  // input axes in the range [axis, axis+num_axes].
+  // num_axes may also be -1, the default, to include all remaining axes
+  // (starting from axis).
+  //
+  // For example, suppose "input" is a 2D blob with shape 2 x 8.
+  // Then the following ReshapeLayer specifications are equivalent,
+  // producing a blob "output" with shape 1 x 2 x 8.
+  //
+  //   reshape_param { shape { dim:  1  dim: 2  dim:  8 } }
+  //   reshape_param { shape { dim:  1  dim: 2  }  num_axes: 1 }
+  //   reshape_param { shape { dim:  1  }  num_axes: 0 }
+  //
+  // On the other hand, these would produce output blob shape 2 x 1 x 8:
+  //
+  //   reshape_param { shape { dim: 2  dim: 1  dim: 8  }  }
+  //   reshape_param { shape { dim: 1 }  axis: 1  num_axes: 0 }
+  //
+  optional int32 axis = 2 [default = 0];
+  optional int32 num_axes = 3 [default = -1];
+}
+
+message SigmoidParameter {
+  enum Engine {
+    DEFAULT = 0;
+    CAFFE = 1;
+    CUDNN = 2;
+  }
+  optional Engine engine = 1 [default = DEFAULT];
+}
+
+message SliceParameter {
+  // The axis along which to slice -- may be negative to index from the end
+  // (e.g., -1 for the last axis).
+  // By default, SliceLayer concatenates blobs along the "channels" axis (1).
+  optional int32 axis = 3 [default = 1];
+  repeated uint32 slice_point = 2;
+
+  // DEPRECATED: alias for "axis" -- does not support negative indexing.
+  optional uint32 slice_dim = 1 [default = 1];
+}
+
+// Message that stores parameters used by SoftmaxLayer, SoftmaxWithLossLayer
+message SoftmaxParameter {
+  enum Engine {
+    DEFAULT = 0;
+    CAFFE = 1;
+    CUDNN = 2;
+  }
+  optional Engine engine = 1 [default = DEFAULT];
+
+  // The axis along which to perform the softmax -- may be negative to index
+  // from the end (e.g., -1 for the last axis).
+  // Any other axes will be evaluated as independent softmaxes.
+  optional int32 axis = 2 [default = 1];
+}
+
+message TanHParameter {
+  enum Engine {
+    DEFAULT = 0;
+    CAFFE = 1;
+    CUDNN = 2;
+  }
+  optional Engine engine = 1 [default = DEFAULT];
+}
+
+message ThresholdParameter {
+  optional float threshold = 1 [default = 0]; // Strictly positive values
+}
+
+message WindowDataParameter {
+  // Specify the data source.
+  optional string source = 1;
+  // For data pre-processing, we can do simple scaling and subtracting the
+  // data mean, if provided. Note that the mean subtraction is always carried
+  // out before scaling.
+  optional float scale = 2 [default = 1];
+  optional string mean_file = 3;
+  // Specify the batch size.
+  optional uint32 batch_size = 4;
+  // Specify if we would like to randomly crop an image.
+  optional uint32 crop_size = 5 [default = 0];
+  // Specify if we want to randomly mirror data.
+  optional bool mirror = 6 [default = false];
+  // Foreground (object) overlap threshold
+  optional float fg_threshold = 7 [default = 0.5];
+  // Background (non-object) overlap threshold
+  optional float bg_threshold = 8 [default = 0.5];
+  // Fraction of batch that should be foreground objects
+  optional float fg_fraction = 9 [default = 0.25];
+  // Amount of contextual padding to add around a window
+  // (used only by the window_data_layer)
+  optional uint32 context_pad = 10 [default = 0];
+  // Mode for cropping out a detection window
+  // warp: cropped window is warped to a fixed size and aspect ratio
+  // square: the tightest square around the window is cropped
+  optional string crop_mode = 11 [default = "warp"];
+  // cache_images: will load all images in memory for faster access
+  optional bool cache_images = 12 [default = false];
+  // append root_folder to locate images
+  optional string root_folder = 13 [default = ""];
+}
+
+message SPPParameter {
+  enum PoolMethod {
+    MAX = 0;
+    AVE = 1;
+    STOCHASTIC = 2;
+  }
+  optional uint32 pyramid_height = 1;
+  optional PoolMethod pool = 2 [default = MAX]; // The pooling method
+  enum Engine {
+    DEFAULT = 0;
+    CAFFE = 1;
+    CUDNN = 2;
+  }
+  optional Engine engine = 6 [default = DEFAULT];
+}
+
+// DEPRECATED: use LayerParameter.
+message V1LayerParameter {
+  repeated string bottom = 2;
+  repeated string top = 3;
+  optional string name = 4;
+  repeated NetStateRule include = 32;
+  repeated NetStateRule exclude = 33;
+  enum LayerType {
+    NONE = 0;
+    ABSVAL = 35;
+    ACCURACY = 1;
+    ARGMAX = 30;
+    BNLL = 2;
+    CONCAT = 3;
+    CONTRASTIVE_LOSS = 37;
+    CONVOLUTION = 4;
+    DATA = 5;
+    DECONVOLUTION = 39;
+    DROPOUT = 6;
+    DUMMY_DATA = 32;
+    EUCLIDEAN_LOSS = 7;
+    ELTWISE = 25;
+    EXP = 38;
+    FLATTEN = 8;
+    HDF5_DATA = 9;
+    HDF5_OUTPUT = 10;
+    HINGE_LOSS = 28;
+    IM2COL = 11;
+    IMAGE_DATA = 12;
+    INFOGAIN_LOSS = 13;
+    INNER_PRODUCT = 14;
+    LRN = 15;
+    MEMORY_DATA = 29;
+    MULTINOMIAL_LOGISTIC_LOSS = 16;
+    MVN = 34;
+    POOLING = 17;
+    POWER = 26;
+    RELU = 18;
+    SIGMOID = 19;
+    SIGMOID_CROSS_ENTROPY_LOSS = 27;
+    SILENCE = 36;
+    SOFTMAX = 20;
+    SOFTMAX_LOSS = 21;
+    SPLIT = 22;
+    SLICE = 33;
+    TANH = 23;
+    WINDOW_DATA = 24;
+    THRESHOLD = 31;
+  }
+  optional LayerType type = 5;
+  repeated BlobProto blobs = 6;
+  repeated string param = 1001;
+  repeated DimCheckMode blob_share_mode = 1002;
+  enum DimCheckMode {
+    STRICT = 0;
+    PERMISSIVE = 1;
+  }
+  repeated float blobs_lr = 7;
+  repeated float weight_decay = 8;
+  repeated float loss_weight = 35;
+  optional AccuracyParameter accuracy_param = 27;
+  optional ArgMaxParameter argmax_param = 23;
+  optional ConcatParameter concat_param = 9;
+  optional ContrastiveLossParameter contrastive_loss_param = 40;
+  optional ConvolutionParameter convolution_param = 10;
+  optional DataParameter data_param = 11;
+  optional DropoutParameter dropout_param = 12;
+  optional DummyDataParameter dummy_data_param = 26;
+  optional EltwiseParameter eltwise_param = 24;
+  optional ExpParameter exp_param = 41;
+  optional HDF5DataParameter hdf5_data_param = 13;
+  optional HDF5OutputParameter hdf5_output_param = 14;
+  optional HingeLossParameter hinge_loss_param = 29;
+  optional ImageDataParameter image_data_param = 15;
+  optional InfogainLossParameter infogain_loss_param = 16;
+  optional InnerProductParameter inner_product_param = 17;
+  optional LRNParameter lrn_param = 18;
+  optional MemoryDataParameter memory_data_param = 22;
+  optional MVNParameter mvn_param = 34;
+  optional PoolingParameter pooling_param = 19;
+  optional PowerParameter power_param = 21;
+  optional ReLUParameter relu_param = 30;
+  optional SigmoidParameter sigmoid_param = 38;
+  optional SoftmaxParameter softmax_param = 39;
+  optional SliceParameter slice_param = 31;
+  optional TanHParameter tanh_param = 37;
+  optional ThresholdParameter threshold_param = 25;
+  optional WindowDataParameter window_data_param = 20;
+  optional TransformationParameter transform_param = 36;
+  optional LossParameter loss_param = 42;
+  optional V0LayerParameter layer = 1;
+}
+
+// DEPRECATED: V0LayerParameter is the old way of specifying layer parameters
+// in Caffe.  We keep this message type around for legacy support.
+message V0LayerParameter {
+  optional string name = 1; // the layer name
+  optional string type = 2; // the string to specify the layer type
+
+  // Parameters to specify layers with inner products.
+  optional uint32 num_output = 3; // The number of outputs for the layer
+  optional bool biasterm = 4 [default = true]; // whether to have bias terms
+  optional FillerParameter weight_filler = 5; // The filler for the weight
+  optional FillerParameter bias_filler = 6; // The filler for the bias
+
+  optional uint32 pad = 7 [default = 0]; // The padding size
+  optional uint32 kernelsize = 8; // The kernel size
+  optional uint32 group = 9 [default = 1]; // The group size for group conv
+  optional uint32 stride = 10 [default = 1]; // The stride
+  enum PoolMethod {
+    MAX = 0;
+    AVE = 1;
+    STOCHASTIC = 2;
+  }
+  optional PoolMethod pool = 11 [default = MAX]; // The pooling method
+  optional float dropout_ratio = 12 [default = 0.5]; // dropout ratio
+
+  optional uint32 local_size = 13 [default = 5]; // for local response norm
+  optional float alpha = 14 [default = 1.]; // for local response norm
+  optional float beta = 15 [default = 0.75]; // for local response norm
+  optional float k = 22 [default = 1.];
+
+  // For data layers, specify the data source
+  optional string source = 16;
+  // For data pre-processing, we can do simple scaling and subtracting the
+  // data mean, if provided. Note that the mean subtraction is always carried
+  // out before scaling.
+  optional float scale = 17 [default = 1];
+  optional string meanfile = 18;
+  // For data layers, specify the batch size.
+  optional uint32 batchsize = 19;
+  // For data layers, specify if we would like to randomly crop an image.
+  optional uint32 cropsize = 20 [default = 0];
+  // For data layers, specify if we want to randomly mirror data.
+  optional bool mirror = 21 [default = false];
+
+  // The blobs containing the numeric parameters of the layer
+  repeated BlobProto blobs = 50;
+  // The ratio that is multiplied on the global learning rate. If you want to
+  // set the learning ratio for one blob, you need to set it for all blobs.
+  repeated float blobs_lr = 51;
+  // The weight decay that is multiplied on the global weight decay.
+  repeated float weight_decay = 52;
+
+  // The rand_skip variable is for the data layer to skip a few data points
+  // to avoid all asynchronous sgd clients to start at the same point. The skip
+  // point would be set as rand_skip * rand(0,1). Note that rand_skip should not
+  // be larger than the number of keys in the database.
+  optional uint32 rand_skip = 53 [default = 0];
+
+  // Fields related to detection (det_*)
+  // foreground (object) overlap threshold
+  optional float det_fg_threshold = 54 [default = 0.5];
+  // background (non-object) overlap threshold
+  optional float det_bg_threshold = 55 [default = 0.5];
+  // Fraction of batch that should be foreground objects
+  optional float det_fg_fraction = 56 [default = 0.25];
+
+  // optional bool OBSOLETE_can_clobber = 57 [default = true];
+
+  // Amount of contextual padding to add around a window
+  // (used only by the window_data_layer)
+  optional uint32 det_context_pad = 58 [default = 0];
+
+  // Mode for cropping out a detection window
+  // warp: cropped window is warped to a fixed size and aspect ratio
+  // square: the tightest square around the window is cropped
+  optional string det_crop_mode = 59 [default = "warp"];
+
+  // For ReshapeLayer, one needs to specify the new dimensions.
+  optional int32 new_num = 60 [default = 0];
+  optional int32 new_channels = 61 [default = 0];
+  optional int32 new_height = 62 [default = 0];
+  optional int32 new_width = 63 [default = 0];
+
+  // Whether or not ImageLayer should shuffle the list of files at every epoch.
+  // It will also resize images if new_height or new_width are not zero.
+  optional bool shuffle_images = 64 [default = false];
+
+  // For ConcatLayer, one needs to specify the dimension for concatenation, and
+  // the other dimensions must be the same for all the bottom blobs.
+  // By default it will concatenate blobs along the channels dimension.
+  optional uint32 concat_dim = 65 [default = 1];
+
+  optional HDF5OutputParameter hdf5_output_param = 1001;
+}
+
+message PReLUParameter {
+  // Parametric ReLU described in K. He et al, Delving Deep into Rectifiers:
+  // Surpassing Human-Level Performance on ImageNet Classification, 2015.
+
+  // Initial value of a_i. Default is a_i=0.25 for all i.
+  optional FillerParameter filler = 1;
+  // Whether or not slope paramters are shared across channels.
+  optional bool channel_shared = 2 [default = false];
+}
diff --git a/src/caffe/solver.cpp b/src/caffe/solver.cpp
new file mode 100644
index 0000000..aabe0ed
--- /dev/null
+++ b/src/caffe/solver.cpp
@@ -0,0 +1,783 @@
+#include <cstdio>
+
+#include <algorithm>
+#include <string>
+#include <vector>
+
+#include "caffe/net.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/solver.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/util/upgrade_proto.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+Solver<Dtype>::Solver(const SolverParameter& param)
+    : net_() {
+  Init(param);
+}
+
+template <typename Dtype>
+Solver<Dtype>::Solver(const string& param_file)
+    : net_() {
+  SolverParameter param;
+  ReadProtoFromTextFileOrDie(param_file, &param);
+  Init(param);
+}
+
+template <typename Dtype>
+void Solver<Dtype>::Init(const SolverParameter& param) {
+  LOG(INFO) << "Initializing solver from parameters: " << std::endl
+            << param.DebugString();
+  param_ = param;
+  CHECK_GE(param_.average_loss(), 1) << "average_loss should be non-negative.";
+  if (param_.random_seed() >= 0) {
+    Caffe::set_random_seed(param_.random_seed());
+  }
+  // Scaffolding code
+  InitTrainNet();
+  InitTestNets();
+  LOG(INFO) << "Solver scaffolding done.";
+  iter_ = 0;
+  current_step_ = 0;
+}
+
+template <typename Dtype>
+void Solver<Dtype>::InitTrainNet() {
+  const int num_train_nets = param_.has_net() + param_.has_net_param() +
+      param_.has_train_net() + param_.has_train_net_param();
+  const string& field_names = "net, net_param, train_net, train_net_param";
+  CHECK_GE(num_train_nets, 1) << "SolverParameter must specify a train net "
+      << "using one of these fields: " << field_names;
+  CHECK_LE(num_train_nets, 1) << "SolverParameter must not contain more than "
+      << "one of these fields specifying a train_net: " << field_names;
+  NetParameter net_param;
+  if (param_.has_train_net_param()) {
+    LOG(INFO) << "Creating training net specified in train_net_param.";
+    net_param.CopyFrom(param_.train_net_param());
+  } else if (param_.has_train_net()) {
+    LOG(INFO) << "Creating training net from train_net file: "
+              << param_.train_net();
+    ReadNetParamsFromTextFileOrDie(param_.train_net(), &net_param);
+  }
+  if (param_.has_net_param()) {
+    LOG(INFO) << "Creating training net specified in net_param.";
+    net_param.CopyFrom(param_.net_param());
+  }
+  if (param_.has_net()) {
+    LOG(INFO) << "Creating training net from net file: " << param_.net();
+    ReadNetParamsFromTextFileOrDie(param_.net(), &net_param);
+  }
+  // Set the correct NetState.  We start with the solver defaults (lowest
+  // precedence); then, merge in any NetState specified by the net_param itself;
+  // finally, merge in any NetState specified by the train_state (highest
+  // precedence).
+  NetState net_state;
+  net_state.set_phase(TRAIN);
+  net_state.MergeFrom(net_param.state());
+  net_state.MergeFrom(param_.train_state());
+  net_param.mutable_state()->CopyFrom(net_state);
+  net_.reset(new Net<Dtype>(net_param));
+}
+
+template <typename Dtype>
+void Solver<Dtype>::InitTestNets() {
+  const bool has_net_param = param_.has_net_param();
+  const bool has_net_file = param_.has_net();
+  const int num_generic_nets = has_net_param + has_net_file;
+  CHECK_LE(num_generic_nets, 1)
+      << "Both net_param and net_file may not be specified.";
+  const int num_test_net_params = param_.test_net_param_size();
+  const int num_test_net_files = param_.test_net_size();
+  const int num_test_nets = num_test_net_params + num_test_net_files;
+  if (num_generic_nets) {
+      CHECK_GE(param_.test_iter_size(), num_test_nets)
+          << "test_iter must be specified for each test network.";
+  } else {
+      CHECK_EQ(param_.test_iter_size(), num_test_nets)
+          << "test_iter must be specified for each test network.";
+  }
+  // If we have a generic net (specified by net or net_param, rather than
+  // test_net or test_net_param), we may have an unlimited number of actual
+  // test networks -- the actual number is given by the number of remaining
+  // test_iters after any test nets specified by test_net_param and/or test_net
+  // are evaluated.
+  const int num_generic_net_instances = param_.test_iter_size() - num_test_nets;
+  const int num_test_net_instances = num_test_nets + num_generic_net_instances;
+  if (param_.test_state_size()) {
+    CHECK_EQ(param_.test_state_size(), num_test_net_instances)
+        << "test_state must be unspecified or specified once per test net.";
+  }
+  if (num_test_net_instances) {
+    CHECK_GT(param_.test_interval(), 0);
+  }
+  int test_net_id = 0;
+  vector<string> sources(num_test_net_instances);
+  vector<NetParameter> net_params(num_test_net_instances);
+  for (int i = 0; i < num_test_net_params; ++i, ++test_net_id) {
+      sources[test_net_id] = "test_net_param";
+      net_params[test_net_id].CopyFrom(param_.test_net_param(i));
+  }
+  for (int i = 0; i < num_test_net_files; ++i, ++test_net_id) {
+      sources[test_net_id] = "test_net file: " + param_.test_net(i);
+      ReadNetParamsFromTextFileOrDie(param_.test_net(i),
+          &net_params[test_net_id]);
+  }
+  const int remaining_test_nets = param_.test_iter_size() - test_net_id;
+  if (has_net_param) {
+    for (int i = 0; i < remaining_test_nets; ++i, ++test_net_id) {
+      sources[test_net_id] = "net_param";
+      net_params[test_net_id].CopyFrom(param_.net_param());
+    }
+  }
+  if (has_net_file) {
+    for (int i = 0; i < remaining_test_nets; ++i, ++test_net_id) {
+      sources[test_net_id] = "net file: " + param_.net();
+      ReadNetParamsFromTextFileOrDie(param_.net(), &net_params[test_net_id]);
+    }
+  }
+  test_nets_.resize(num_test_net_instances);
+  for (int i = 0; i < num_test_net_instances; ++i) {
+    // Set the correct NetState.  We start with the solver defaults (lowest
+    // precedence); then, merge in any NetState specified by the net_param
+    // itself; finally, merge in any NetState specified by the test_state
+    // (highest precedence).
+    NetState net_state;
+    net_state.set_phase(TEST);
+    net_state.MergeFrom(net_params[i].state());
+    if (param_.test_state_size()) {
+      net_state.MergeFrom(param_.test_state(i));
+    }
+    net_params[i].mutable_state()->CopyFrom(net_state);
+    LOG(INFO)
+        << "Creating test net (#" << i << ") specified by " << sources[i];
+    test_nets_[i].reset(new Net<Dtype>(net_params[i]));
+    test_nets_[i]->set_debug_info(param_.debug_info());
+  }
+}
+
+template <typename Dtype>
+void Solver<Dtype>::Step(int iters) {
+  vector<Blob<Dtype>*> bottom_vec;
+  const int start_iter = iter_;
+  const int stop_iter = iter_ + iters;
+  int average_loss = this->param_.average_loss();
+  vector<Dtype> losses;
+  Dtype smoothed_loss = 0;
+
+  while (iter_ < stop_iter) {
+    // zero-init the params
+    for (int i = 0; i < net_->params().size(); ++i) {
+      shared_ptr<Blob<Dtype> > blob = net_->params()[i];
+      switch (Caffe::mode()) {
+      case Caffe::CPU:
+        caffe_set(blob->count(), static_cast<Dtype>(0),
+            blob->mutable_cpu_diff());
+        break;
+      case Caffe::GPU:
+#ifndef CPU_ONLY
+        caffe_gpu_set(blob->count(), static_cast<Dtype>(0),
+            blob->mutable_gpu_diff());
+#else
+        NO_GPU;
+#endif
+        break;
+      }
+    }
+
+    if (param_.test_interval() && iter_ % param_.test_interval() == 0
+        && (iter_ > 0 || param_.test_initialization())) {
+      TestAll();
+    }
+
+    const bool display = param_.display() && iter_ % param_.display() == 0;
+    net_->set_debug_info(display && param_.debug_info());
+    // accumulate the loss and gradient
+    Dtype loss = 0;
+    for (int i = 0; i < param_.iter_size(); ++i) {
+      loss += net_->ForwardBackward(bottom_vec);
+    }
+    loss /= param_.iter_size();
+    // average the loss across iterations for smoothed reporting
+    if (losses.size() < average_loss) {
+      losses.push_back(loss);
+      int size = losses.size();
+      smoothed_loss = (smoothed_loss * (size - 1) + loss) / size;
+    } else {
+      int idx = (iter_ - start_iter) % average_loss;
+      smoothed_loss += (loss - losses[idx]) / average_loss;
+      losses[idx] = loss;
+    }
+    if (display) {
+      LOG(INFO) << "Iteration " << iter_ << ", loss = " << smoothed_loss;
+      const vector<Blob<Dtype>*>& result = net_->output_blobs();
+      int score_index = 0;
+      for (int j = 0; j < result.size(); ++j) {
+        const Dtype* result_vec = result[j]->cpu_data();
+        const string& output_name =
+            net_->blob_names()[net_->output_blob_indices()[j]];
+        const Dtype loss_weight =
+            net_->blob_loss_weights()[net_->output_blob_indices()[j]];
+        for (int k = 0; k < result[j]->count(); ++k) {
+          ostringstream loss_msg_stream;
+          if (loss_weight) {
+            loss_msg_stream << " (* " << loss_weight
+                            << " = " << loss_weight * result_vec[k] << " loss)";
+          }
+          LOG(INFO) << "    Train net output #"
+              << score_index++ << ": " << output_name << " = "
+              << result_vec[k] << loss_msg_stream.str();
+        }
+      }
+    }
+    ApplyUpdate();
+
+    // Increment the internal iter_ counter -- its value should always indicate
+    // the number of times the weights have been updated.
+    ++iter_;
+
+    // Save a snapshot if needed.
+    if (param_.snapshot() && iter_ % param_.snapshot() == 0) {
+      Snapshot();
+    }
+  }
+}
+
+template <typename Dtype>
+void Solver<Dtype>::Solve(const char* resume_file) {
+  LOG(INFO) << "Solving " << net_->name();
+  LOG(INFO) << "Learning Rate Policy: " << param_.lr_policy();
+
+  if (resume_file) {
+    LOG(INFO) << "Restoring previous solver status from " << resume_file;
+    Restore(resume_file);
+  }
+
+  // For a network that is trained by the solver, no bottom or top vecs
+  // should be given, and we will just provide dummy vecs.
+  Step(param_.max_iter() - iter_);
+  // If we haven't already, save a snapshot after optimization, unless
+  // overridden by setting snapshot_after_train := false
+  if (param_.snapshot_after_train()
+      && (!param_.snapshot() || iter_ % param_.snapshot() != 0)) {
+    Snapshot();
+  }
+  // After the optimization is done, run an additional train and test pass to
+  // display the train and test loss/outputs if appropriate (based on the
+  // display and test_interval settings, respectively).  Unlike in the rest of
+  // training, for the train net we only run a forward pass as we've already
+  // updated the parameters "max_iter" times -- this final pass is only done to
+  // display the loss, which is computed in the forward pass.
+  if (param_.display() && iter_ % param_.display() == 0) {
+    Dtype loss;
+    net_->ForwardPrefilled(&loss);
+    LOG(INFO) << "Iteration " << iter_ << ", loss = " << loss;
+  }
+  if (param_.test_interval() && iter_ % param_.test_interval() == 0) {
+    TestAll();
+  }
+  LOG(INFO) << "Optimization Done.";
+}
+
+
+template <typename Dtype>
+void Solver<Dtype>::TestAll() {
+  for (int test_net_id = 0; test_net_id < test_nets_.size(); ++test_net_id) {
+    Test(test_net_id);
+  }
+}
+
+template <typename Dtype>
+void Solver<Dtype>::Test(const int test_net_id) {
+  LOG(INFO) << "Iteration " << iter_
+            << ", Testing net (#" << test_net_id << ")";
+  CHECK_NOTNULL(test_nets_[test_net_id].get())->
+      ShareTrainedLayersWith(net_.get());
+  vector<Dtype> test_score;
+  vector<int> test_score_output_id;
+  vector<Blob<Dtype>*> bottom_vec;
+  const shared_ptr<Net<Dtype> >& test_net = test_nets_[test_net_id];
+  Dtype loss = 0;
+  for (int i = 0; i < param_.test_iter(test_net_id); ++i) {
+    Dtype iter_loss;
+    const vector<Blob<Dtype>*>& result =
+        test_net->Forward(bottom_vec, &iter_loss);
+    if (param_.test_compute_loss()) {
+      loss += iter_loss;
+    }
+    if (i == 0) {
+      for (int j = 0; j < result.size(); ++j) {
+        const Dtype* result_vec = result[j]->cpu_data();
+        for (int k = 0; k < result[j]->count(); ++k) {
+          test_score.push_back(result_vec[k]);
+          test_score_output_id.push_back(j);
+        }
+      }
+    } else {
+      int idx = 0;
+      for (int j = 0; j < result.size(); ++j) {
+        const Dtype* result_vec = result[j]->cpu_data();
+        for (int k = 0; k < result[j]->count(); ++k) {
+          test_score[idx++] += result_vec[k];
+        }
+      }
+    }
+  }
+  if (param_.test_compute_loss()) {
+    loss /= param_.test_iter(test_net_id);
+    LOG(INFO) << "Test loss: " << loss;
+  }
+  for (int i = 0; i < test_score.size(); ++i) {
+    const int output_blob_index =
+        test_net->output_blob_indices()[test_score_output_id[i]];
+    const string& output_name = test_net->blob_names()[output_blob_index];
+    const Dtype loss_weight = test_net->blob_loss_weights()[output_blob_index];
+    ostringstream loss_msg_stream;
+    const Dtype mean_score = test_score[i] / param_.test_iter(test_net_id);
+    if (loss_weight) {
+      loss_msg_stream << " (* " << loss_weight
+                      << " = " << loss_weight * mean_score << " loss)";
+    }
+    LOG(INFO) << "    Test net output #" << i << ": " << output_name << " = "
+        << mean_score << loss_msg_stream.str();
+  }
+}
+
+
+template <typename Dtype>
+void Solver<Dtype>::Snapshot() {
+  NetParameter net_param;
+  // For intermediate results, we will also dump the gradient values.
+  net_->ToProto(&net_param, param_.snapshot_diff());
+  string filename(param_.snapshot_prefix());
+  string model_filename, snapshot_filename;
+  const int kBufferSize = 20;
+  char iter_str_buffer[kBufferSize];
+  snprintf(iter_str_buffer, kBufferSize, "_iter_%d", iter_);
+  filename += iter_str_buffer;
+  model_filename = filename + ".caffemodel";
+  LOG(INFO) << "Snapshotting to " << model_filename;
+  WriteProtoToBinaryFile(net_param, model_filename.c_str());
+  SolverState state;
+  SnapshotSolverState(&state);
+  state.set_iter(iter_);
+  state.set_learned_net(model_filename);
+  state.set_current_step(current_step_);
+  snapshot_filename = filename + ".solverstate";
+  LOG(INFO) << "Snapshotting solver state to " << snapshot_filename;
+  WriteProtoToBinaryFile(state, snapshot_filename.c_str());
+}
+
+template <typename Dtype>
+void Solver<Dtype>::Restore(const char* state_file) {
+  SolverState state;
+  NetParameter net_param;
+  ReadProtoFromBinaryFile(state_file, &state);
+  if (state.has_learned_net()) {
+    ReadNetParamsFromBinaryFileOrDie(state.learned_net().c_str(), &net_param);
+    net_->CopyTrainedLayersFrom(net_param);
+  }
+  iter_ = state.iter();
+  current_step_ = state.current_step();
+  RestoreSolverState(state);
+}
+
+
+// Return the current learning rate. The currently implemented learning rate
+// policies are as follows:
+//    - fixed: always return base_lr.
+//    - step: return base_lr * gamma ^ (floor(iter / step))
+//    - exp: return base_lr * gamma ^ iter
+//    - inv: return base_lr * (1 + gamma * iter) ^ (- power)
+//    - multistep: similar to step but it allows non uniform steps defined by
+//      stepvalue
+//    - poly: the effective learning rate follows a polynomial decay, to be
+//      zero by the max_iter. return base_lr (1 - iter/max_iter) ^ (power)
+//    - sigmoid: the effective learning rate follows a sigmod decay
+//      return base_lr ( 1/(1 + exp(-gamma * (iter - stepsize))))
+//
+// where base_lr, max_iter, gamma, step, stepvalue and power are defined
+// in the solver parameter protocol buffer, and iter is the current iteration.
+template <typename Dtype>
+Dtype SGDSolver<Dtype>::GetLearningRate() {
+  Dtype rate;
+  const string& lr_policy = this->param_.lr_policy();
+  if (lr_policy == "fixed") {
+    rate = this->param_.base_lr();
+  } else if (lr_policy == "step") {
+    this->current_step_ = this->iter_ / this->param_.stepsize();
+    rate = this->param_.base_lr() *
+        pow(this->param_.gamma(), this->current_step_);
+  } else if (lr_policy == "exp") {
+    rate = this->param_.base_lr() * pow(this->param_.gamma(), this->iter_);
+  } else if (lr_policy == "inv") {
+    rate = this->param_.base_lr() *
+        pow(Dtype(1) + this->param_.gamma() * this->iter_,
+            - this->param_.power());
+  } else if (lr_policy == "multistep") {
+    if (this->current_step_ < this->param_.stepvalue_size() &&
+          this->iter_ >= this->param_.stepvalue(this->current_step_)) {
+      this->current_step_++;
+      LOG(INFO) << "MultiStep Status: Iteration " <<
+      this->iter_ << ", step = " << this->current_step_;
+    }
+    rate = this->param_.base_lr() *
+        pow(this->param_.gamma(), this->current_step_);
+  } else if (lr_policy == "poly") {
+    rate = this->param_.base_lr() * pow(Dtype(1.) -
+        (Dtype(this->iter_) / Dtype(this->param_.max_iter())),
+        this->param_.power());
+  } else if (lr_policy == "sigmoid") {
+    rate = this->param_.base_lr() * (Dtype(1.) /
+        (Dtype(1.) + exp(-this->param_.gamma() * (Dtype(this->iter_) -
+          Dtype(this->param_.stepsize())))));
+  } else {
+    LOG(FATAL) << "Unknown learning rate policy: " << lr_policy;
+  }
+  return rate;
+}
+
+template <typename Dtype>
+void SGDSolver<Dtype>::PreSolve() {
+  // Initialize the history
+  const vector<shared_ptr<Blob<Dtype> > >& net_params = this->net_->params();
+  history_.clear();
+  update_.clear();
+  temp_.clear();
+  for (int i = 0; i < net_params.size(); ++i) {
+    const vector<int>& shape = net_params[i]->shape();
+    history_.push_back(shared_ptr<Blob<Dtype> >(new Blob<Dtype>(shape)));
+    update_.push_back(shared_ptr<Blob<Dtype> >(new Blob<Dtype>(shape)));
+    temp_.push_back(shared_ptr<Blob<Dtype> >(new Blob<Dtype>(shape)));
+  }
+}
+
+template <typename Dtype>
+void SGDSolver<Dtype>::ClipGradients() {
+  const Dtype clip_gradients = this->param_.clip_gradients();
+  if (clip_gradients < 0) { return; }
+  const vector<shared_ptr<Blob<Dtype> > >& net_params = this->net_->params();
+  Dtype sumsq_diff = 0;
+  for (int i = 0; i < net_params.size(); ++i) {
+    if (this->net_->param_owners()[i] < 0) {
+      sumsq_diff += net_params[i]->sumsq_diff();
+    }
+  }
+  const Dtype l2norm_diff = std::sqrt(sumsq_diff);
+  if (l2norm_diff > clip_gradients) {
+    Dtype scale_factor = clip_gradients / l2norm_diff;
+    LOG(INFO) << "Gradient clipping: scaling down gradients (L2 norm "
+        << l2norm_diff << " > " << clip_gradients << ") "
+        << "by scale factor " << scale_factor;
+    for (int i = 0; i < net_params.size(); ++i) {
+      if (this->net_->param_owners()[i] < 0) {
+        net_params[i]->scale_diff(scale_factor);
+      }
+    }
+  }
+}
+
+template <typename Dtype>
+void SGDSolver<Dtype>::ApplyUpdate() {
+  Dtype rate = GetLearningRate();
+  if (this->param_.display() && this->iter_ % this->param_.display() == 0) {
+    LOG(INFO) << "Iteration " << this->iter_ << ", lr = " << rate;
+  }
+  ClipGradients();
+  for (int param_id = 0; param_id < this->net_->params().size(); ++param_id) {
+    Normalize(param_id);
+    Regularize(param_id);
+    ComputeUpdateValue(param_id, rate);
+  }
+  this->net_->Update();
+}
+
+template <typename Dtype>
+void SGDSolver<Dtype>::Normalize(int param_id) {
+  if (this->param_.iter_size() == 1) { return; }
+  // Scale gradient to counterbalance accumulation.
+  const vector<shared_ptr<Blob<Dtype> > >& net_params = this->net_->params();
+  const Dtype accum_normalization = Dtype(1.) / this->param_.iter_size();
+  switch (Caffe::mode()) {
+  case Caffe::CPU: {
+    caffe_scal(net_params[param_id]->count(), accum_normalization,
+        net_params[param_id]->mutable_cpu_diff());
+    break;
+  }
+  case Caffe::GPU: {
+#ifndef CPU_ONLY
+    caffe_gpu_scal(net_params[param_id]->count(), accum_normalization,
+        net_params[param_id]->mutable_gpu_diff());
+#else
+    NO_GPU;
+#endif
+    break;
+  }
+  default:
+    LOG(FATAL) << "Unknown caffe mode: " << Caffe::mode();
+  }
+}
+
+template <typename Dtype>
+void SGDSolver<Dtype>::Regularize(int param_id) {
+  const vector<shared_ptr<Blob<Dtype> > >& net_params = this->net_->params();
+  const vector<float>& net_params_weight_decay =
+      this->net_->params_weight_decay();
+  Dtype weight_decay = this->param_.weight_decay();
+  string regularization_type = this->param_.regularization_type();
+  Dtype local_decay = weight_decay * net_params_weight_decay[param_id];
+  switch (Caffe::mode()) {
+  case Caffe::CPU: {
+    if (local_decay) {
+      if (regularization_type == "L2") {
+        // add weight decay
+        caffe_axpy(net_params[param_id]->count(),
+            local_decay,
+            net_params[param_id]->cpu_data(),
+            net_params[param_id]->mutable_cpu_diff());
+      } else if (regularization_type == "L1") {
+        caffe_cpu_sign(net_params[param_id]->count(),
+            net_params[param_id]->cpu_data(),
+            temp_[param_id]->mutable_cpu_data());
+        caffe_axpy(net_params[param_id]->count(),
+            local_decay,
+            temp_[param_id]->cpu_data(),
+            net_params[param_id]->mutable_cpu_diff());
+      } else {
+        LOG(FATAL) << "Unknown regularization type: " << regularization_type;
+      }
+    }
+    break;
+  }
+  case Caffe::GPU: {
+#ifndef CPU_ONLY
+    if (local_decay) {
+      if (regularization_type == "L2") {
+        // add weight decay
+        caffe_gpu_axpy(net_params[param_id]->count(),
+            local_decay,
+            net_params[param_id]->gpu_data(),
+            net_params[param_id]->mutable_gpu_diff());
+      } else if (regularization_type == "L1") {
+        caffe_gpu_sign(net_params[param_id]->count(),
+            net_params[param_id]->gpu_data(),
+            temp_[param_id]->mutable_gpu_data());
+        caffe_gpu_axpy(net_params[param_id]->count(),
+            local_decay,
+            temp_[param_id]->gpu_data(),
+            net_params[param_id]->mutable_gpu_diff());
+      } else {
+        LOG(FATAL) << "Unknown regularization type: " << regularization_type;
+      }
+    }
+#else
+    NO_GPU;
+#endif
+    break;
+  }
+  default:
+    LOG(FATAL) << "Unknown caffe mode: " << Caffe::mode();
+  }
+}
+
+template <typename Dtype>
+void SGDSolver<Dtype>::ComputeUpdateValue(int param_id, Dtype rate) {
+  const vector<shared_ptr<Blob<Dtype> > >& net_params = this->net_->params();
+  const vector<float>& net_params_lr = this->net_->params_lr();
+  Dtype momentum = this->param_.momentum();
+  Dtype local_rate = rate * net_params_lr[param_id];
+  // Compute the update to history, then copy it to the parameter diff.
+  switch (Caffe::mode()) {
+  case Caffe::CPU: {
+    caffe_cpu_axpby(net_params[param_id]->count(), local_rate,
+              net_params[param_id]->cpu_diff(), momentum,
+              history_[param_id]->mutable_cpu_data());
+    caffe_copy(net_params[param_id]->count(),
+        history_[param_id]->cpu_data(),
+        net_params[param_id]->mutable_cpu_diff());
+    break;
+  }
+  case Caffe::GPU: {
+#ifndef CPU_ONLY
+    caffe_gpu_axpby(net_params[param_id]->count(), local_rate,
+              net_params[param_id]->gpu_diff(), momentum,
+              history_[param_id]->mutable_gpu_data());
+    caffe_copy(net_params[param_id]->count(),
+        history_[param_id]->gpu_data(),
+        net_params[param_id]->mutable_gpu_diff());
+#else
+    NO_GPU;
+#endif
+    break;
+  }
+  default:
+    LOG(FATAL) << "Unknown caffe mode: " << Caffe::mode();
+  }
+}
+
+template <typename Dtype>
+void SGDSolver<Dtype>::SnapshotSolverState(SolverState* state) {
+  state->clear_history();
+  for (int i = 0; i < history_.size(); ++i) {
+    // Add history
+    BlobProto* history_blob = state->add_history();
+    history_[i]->ToProto(history_blob);
+  }
+}
+
+template <typename Dtype>
+void SGDSolver<Dtype>::RestoreSolverState(const SolverState& state) {
+  CHECK_EQ(state.history_size(), history_.size())
+      << "Incorrect length of history blobs.";
+  LOG(INFO) << "SGDSolver: restoring history";
+  for (int i = 0; i < history_.size(); ++i) {
+    history_[i]->FromProto(state.history(i));
+  }
+}
+
+template <typename Dtype>
+void NesterovSolver<Dtype>::ComputeUpdateValue(int param_id, Dtype rate) {
+  const vector<shared_ptr<Blob<Dtype> > >& net_params = this->net_->params();
+  const vector<float>& net_params_lr = this->net_->params_lr();
+  Dtype momentum = this->param_.momentum();
+  Dtype local_rate = rate * net_params_lr[param_id];
+  switch (Caffe::mode()) {
+  case Caffe::CPU: {
+    // save history momentum for stepping back
+    caffe_copy(net_params[param_id]->count(),
+        this->history_[param_id]->cpu_data(),
+        this->update_[param_id]->mutable_cpu_data());
+
+    // update history
+    caffe_cpu_axpby(net_params[param_id]->count(), local_rate,
+              net_params[param_id]->cpu_diff(), momentum,
+              this->history_[param_id]->mutable_cpu_data());
+
+    // compute update: step back then over step
+    caffe_cpu_axpby(net_params[param_id]->count(), Dtype(1) + momentum,
+        this->history_[param_id]->cpu_data(), -momentum,
+        this->update_[param_id]->mutable_cpu_data());
+
+    // copy
+    caffe_copy(net_params[param_id]->count(),
+        this->update_[param_id]->cpu_data(),
+        net_params[param_id]->mutable_cpu_diff());
+    break;
+  }
+  case Caffe::GPU: {
+#ifndef CPU_ONLY
+    // save history momentum for stepping back
+    caffe_copy(net_params[param_id]->count(),
+        this->history_[param_id]->gpu_data(),
+        this->update_[param_id]->mutable_gpu_data());
+
+    // update history
+    caffe_gpu_axpby(net_params[param_id]->count(), local_rate,
+              net_params[param_id]->gpu_diff(), momentum,
+              this->history_[param_id]->mutable_gpu_data());
+
+    // compute update: step back then over step
+    caffe_gpu_axpby(net_params[param_id]->count(), Dtype(1) + momentum,
+        this->history_[param_id]->gpu_data(), -momentum,
+        this->update_[param_id]->mutable_gpu_data());
+
+    // copy
+    caffe_copy(net_params[param_id]->count(),
+        this->update_[param_id]->gpu_data(),
+        net_params[param_id]->mutable_gpu_diff());
+#else
+    NO_GPU;
+#endif
+    break;
+  }
+  default:
+    LOG(FATAL) << "Unknown caffe mode: " << Caffe::mode();
+  }
+}
+
+template <typename Dtype>
+void AdaGradSolver<Dtype>::ComputeUpdateValue(int param_id, Dtype rate) {
+  const vector<shared_ptr<Blob<Dtype> > >& net_params = this->net_->params();
+  const vector<float>& net_params_lr = this->net_->params_lr();
+  Dtype delta = this->param_.delta();
+  Dtype local_rate = rate * net_params_lr[param_id];
+  switch (Caffe::mode()) {
+  case Caffe::CPU: {
+    // compute square of gradient in update
+    caffe_powx(net_params[param_id]->count(),
+        net_params[param_id]->cpu_diff(), Dtype(2),
+        this->update_[param_id]->mutable_cpu_data());
+
+    // update history
+    caffe_add(net_params[param_id]->count(),
+        this->update_[param_id]->cpu_data(),
+        this->history_[param_id]->cpu_data(),
+        this->history_[param_id]->mutable_cpu_data());
+
+    // prepare update
+    caffe_powx(net_params[param_id]->count(),
+              this->history_[param_id]->cpu_data(), Dtype(0.5),
+              this->update_[param_id]->mutable_cpu_data());
+
+    caffe_add_scalar(net_params[param_id]->count(),
+              delta, this->update_[param_id]->mutable_cpu_data());
+
+    caffe_div(net_params[param_id]->count(),
+              net_params[param_id]->cpu_diff(),
+              this->update_[param_id]->cpu_data(),
+              this->update_[param_id]->mutable_cpu_data());
+
+    // scale and copy
+    caffe_cpu_axpby(net_params[param_id]->count(), local_rate,
+        this->update_[param_id]->cpu_data(), Dtype(0),
+        net_params[param_id]->mutable_cpu_diff());
+    break;
+  }
+  case Caffe::GPU: {
+#ifndef CPU_ONLY
+    // compute square of gradient in update
+    caffe_gpu_powx(net_params[param_id]->count(),
+        net_params[param_id]->gpu_diff(), Dtype(2),
+        this->update_[param_id]->mutable_gpu_data());
+
+    // update history
+    caffe_gpu_add(net_params[param_id]->count(),
+        this->update_[param_id]->gpu_data(),
+        this->history_[param_id]->gpu_data(),
+        this->history_[param_id]->mutable_gpu_data());
+
+    // prepare update
+    caffe_gpu_powx(net_params[param_id]->count(),
+              this->history_[param_id]->gpu_data(), Dtype(0.5),
+              this->update_[param_id]->mutable_gpu_data());
+
+    caffe_gpu_add_scalar(net_params[param_id]->count(),
+              delta, this->update_[param_id]->mutable_gpu_data());
+
+    caffe_gpu_div(net_params[param_id]->count(),
+              net_params[param_id]->gpu_diff(),
+              this->update_[param_id]->gpu_data(),
+              this->update_[param_id]->mutable_gpu_data());
+
+    // scale and copy
+    caffe_gpu_axpby(net_params[param_id]->count(), local_rate,
+        this->update_[param_id]->gpu_data(), Dtype(0),
+        net_params[param_id]->mutable_gpu_diff());
+#else
+    NO_GPU;
+#endif
+    break;
+  }
+  default:
+    LOG(FATAL) << "Unknown caffe mode: " << Caffe::mode();
+  }
+}
+
+INSTANTIATE_CLASS(Solver);
+INSTANTIATE_CLASS(SGDSolver);
+INSTANTIATE_CLASS(NesterovSolver);
+INSTANTIATE_CLASS(AdaGradSolver);
+
+}  // namespace caffe
diff --git a/src/caffe/syncedmem.cpp b/src/caffe/syncedmem.cpp
new file mode 100644
index 0000000..7617ccf
--- /dev/null
+++ b/src/caffe/syncedmem.cpp
@@ -0,0 +1,113 @@
+#include <cstring>
+
+#include "caffe/common.hpp"
+#include "caffe/syncedmem.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+SyncedMemory::~SyncedMemory() {
+  if (cpu_ptr_ && own_cpu_data_) {
+    CaffeFreeHost(cpu_ptr_);
+  }
+
+#ifndef CPU_ONLY
+  if (gpu_ptr_) {
+    CUDA_CHECK(cudaFree(gpu_ptr_));
+  }
+#endif  // CPU_ONLY
+}
+
+inline void SyncedMemory::to_cpu() {
+  switch (head_) {
+  case UNINITIALIZED:
+    CaffeMallocHost(&cpu_ptr_, size_);
+    caffe_memset(size_, 0, cpu_ptr_);
+    head_ = HEAD_AT_CPU;
+    own_cpu_data_ = true;
+    break;
+  case HEAD_AT_GPU:
+#ifndef CPU_ONLY
+    if (cpu_ptr_ == NULL) {
+      CaffeMallocHost(&cpu_ptr_, size_);
+      own_cpu_data_ = true;
+    }
+    caffe_gpu_memcpy(size_, gpu_ptr_, cpu_ptr_);
+    head_ = SYNCED;
+#else
+    NO_GPU;
+#endif
+    break;
+  case HEAD_AT_CPU:
+  case SYNCED:
+    break;
+  }
+}
+
+inline void SyncedMemory::to_gpu() {
+#ifndef CPU_ONLY
+  switch (head_) {
+  case UNINITIALIZED:
+    CUDA_CHECK(cudaMalloc(&gpu_ptr_, size_));
+    caffe_gpu_memset(size_, 0, gpu_ptr_);
+    head_ = HEAD_AT_GPU;
+    break;
+  case HEAD_AT_CPU:
+    if (gpu_ptr_ == NULL) {
+      CUDA_CHECK(cudaMalloc(&gpu_ptr_, size_));
+    }
+    caffe_gpu_memcpy(size_, cpu_ptr_, gpu_ptr_);
+    head_ = SYNCED;
+    break;
+  case HEAD_AT_GPU:
+  case SYNCED:
+    break;
+  }
+#else
+  NO_GPU;
+#endif
+}
+
+const void* SyncedMemory::cpu_data() {
+  to_cpu();
+  return (const void*)cpu_ptr_;
+}
+
+void SyncedMemory::set_cpu_data(void* data) {
+  CHECK(data);
+  if (own_cpu_data_) {
+    CaffeFreeHost(cpu_ptr_);
+  }
+  cpu_ptr_ = data;
+  head_ = HEAD_AT_CPU;
+  own_cpu_data_ = false;
+}
+
+const void* SyncedMemory::gpu_data() {
+#ifndef CPU_ONLY
+  to_gpu();
+  return (const void*)gpu_ptr_;
+#else
+  NO_GPU;
+#endif
+}
+
+void* SyncedMemory::mutable_cpu_data() {
+  to_cpu();
+  head_ = HEAD_AT_CPU;
+  return cpu_ptr_;
+}
+
+void* SyncedMemory::mutable_gpu_data() {
+#ifndef CPU_ONLY
+  to_gpu();
+  head_ = HEAD_AT_GPU;
+  return gpu_ptr_;
+#else
+  NO_GPU;
+#endif
+}
+
+
+}  // namespace caffe
+
diff --git a/src/caffe/test/CMakeLists.txt b/src/caffe/test/CMakeLists.txt
new file mode 100644
index 0000000..35a803f
--- /dev/null
+++ b/src/caffe/test/CMakeLists.txt
@@ -0,0 +1,36 @@
+# The option allows to include in build only selected test files and exclude all others
+# Usage example:
+#  cmake -DBUILD_only_tests="common,net,blob,im2col_kernel"
+set(BUILD_only_tests "" CACHE STRING "Blank or comma-separated list of test files to build without 'test_' prefix and extention")
+caffe_leave_only_selected_tests(test_srcs ${BUILD_only_tests})
+caffe_leave_only_selected_tests(test_cuda ${BUILD_only_tests})
+
+# For 'make runtest' target we don't need to embed test data paths to
+# source files, because test target is executed in source directory
+# That's why the lines below are commented. TODO: remove them
+
+# definition needed to include CMake generated files
+#add_definitions(-DCMAKE_BUILD)
+
+# generates test_data/sample_data_list.txt.gen.cmake
+#caffe_configure_testdatafile(test_data/sample_data_list.txt)
+
+set(the_target test.testbin)
+set(test_args --gtest_shuffle)
+
+if(HAVE_CUDA)
+  caffe_cuda_compile(test_cuda_objs ${test_cuda})
+  list(APPEND test_srcs ${test_cuda_objs} ${test_cuda})
+else()
+  list(APPEND test_args --gtest_filter="-*GPU*")
+endif()
+
+# ---[ Adding test target
+add_executable(${the_target} EXCLUDE_FROM_ALL ${test_srcs})
+target_link_libraries(${the_target} gtest ${Caffe_LINK})
+caffe_default_properties(${the_target})
+caffe_set_runtime_directory(${the_target} "${PROJECT_BINARY_DIR}/test")
+
+# ---[ Adding runtest
+add_custom_target(runtest COMMAND ${the_target} ${test_args}
+                          WORKING_DIRECTORY ${PROJECT_SOURCE_DIR})
diff --git a/src/caffe/test/test_accuracy_layer.cpp b/src/caffe/test/test_accuracy_layer.cpp
new file mode 100644
index 0000000..c14b67c
--- /dev/null
+++ b/src/caffe/test/test_accuracy_layer.cpp
@@ -0,0 +1,231 @@
+#include <cfloat>
+#include <cmath>
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/util/rng.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+class AccuracyLayerTest : public CPUDeviceTest<Dtype> {
+ protected:
+  AccuracyLayerTest()
+      : blob_bottom_data_(new Blob<Dtype>()),
+        blob_bottom_label_(new Blob<Dtype>()),
+        blob_top_(new Blob<Dtype>()),
+        top_k_(3) {
+    vector<int> shape(2);
+    shape[0] = 100;
+    shape[1] = 10;
+    blob_bottom_data_->Reshape(shape);
+    shape.resize(1);
+    blob_bottom_label_->Reshape(shape);
+    FillBottoms();
+
+    blob_bottom_vec_.push_back(blob_bottom_data_);
+    blob_bottom_vec_.push_back(blob_bottom_label_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+
+  virtual void FillBottoms() {
+    // fill the probability values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_data_);
+
+    const unsigned int prefetch_rng_seed = caffe_rng_rand();
+    shared_ptr<Caffe::RNG> rng(new Caffe::RNG(prefetch_rng_seed));
+    caffe::rng_t* prefetch_rng =
+          static_cast<caffe::rng_t*>(rng->generator());
+    Dtype* label_data = blob_bottom_label_->mutable_cpu_data();
+    for (int i = 0; i < blob_bottom_label_->count(); ++i) {
+      label_data[i] = (*prefetch_rng)() % 10;
+    }
+  }
+
+  virtual ~AccuracyLayerTest() {
+    delete blob_bottom_data_;
+    delete blob_bottom_label_;
+    delete blob_top_;
+  }
+  Blob<Dtype>* const blob_bottom_data_;
+  Blob<Dtype>* const blob_bottom_label_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+  int top_k_;
+};
+
+TYPED_TEST_CASE(AccuracyLayerTest, TestDtypes);
+
+TYPED_TEST(AccuracyLayerTest, TestSetup) {
+  LayerParameter layer_param;
+  AccuracyLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 1);
+  EXPECT_EQ(this->blob_top_->channels(), 1);
+  EXPECT_EQ(this->blob_top_->height(), 1);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+}
+
+TYPED_TEST(AccuracyLayerTest, TestSetupTopK) {
+  LayerParameter layer_param;
+  AccuracyParameter* accuracy_param =
+      layer_param.mutable_accuracy_param();
+  accuracy_param->set_top_k(5);
+  AccuracyLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 1);
+  EXPECT_EQ(this->blob_top_->channels(), 1);
+  EXPECT_EQ(this->blob_top_->height(), 1);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+}
+
+TYPED_TEST(AccuracyLayerTest, TestForwardCPU) {
+  LayerParameter layer_param;
+  AccuracyLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  TypeParam max_value;
+  int max_id;
+  int num_correct_labels = 0;
+  for (int i = 0; i < 100; ++i) {
+    max_value = -FLT_MAX;
+    max_id = 0;
+    for (int j = 0; j < 10; ++j) {
+      if (this->blob_bottom_data_->data_at(i, j, 0, 0) > max_value) {
+        max_value = this->blob_bottom_data_->data_at(i, j, 0, 0);
+        max_id = j;
+      }
+    }
+    if (max_id == this->blob_bottom_label_->data_at(i, 0, 0, 0)) {
+      ++num_correct_labels;
+    }
+  }
+  EXPECT_NEAR(this->blob_top_->data_at(0, 0, 0, 0),
+              num_correct_labels / 100.0, 1e-4);
+}
+
+TYPED_TEST(AccuracyLayerTest, TestForwardWithSpatialAxes) {
+  this->blob_bottom_data_->Reshape(2, 10, 4, 5);
+  vector<int> label_shape(3);
+  label_shape[0] = 2; label_shape[1] = 4; label_shape[2] = 5;
+  this->blob_bottom_label_->Reshape(label_shape);
+  this->FillBottoms();
+  LayerParameter layer_param;
+  layer_param.mutable_accuracy_param()->set_axis(1);
+  AccuracyLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  TypeParam max_value;
+  const int num_labels = this->blob_bottom_label_->count();
+  int max_id;
+  int num_correct_labels = 0;
+  vector<int> label_offset(3);
+  for (int n = 0; n < this->blob_bottom_data_->num(); ++n) {
+    for (int h = 0; h < this->blob_bottom_data_->height(); ++h) {
+      for (int w = 0; w < this->blob_bottom_data_->width(); ++w) {
+        max_value = -FLT_MAX;
+        max_id = 0;
+        for (int c = 0; c < this->blob_bottom_data_->channels(); ++c) {
+          const TypeParam pred_value =
+              this->blob_bottom_data_->data_at(n, c, h, w);
+          if (pred_value > max_value) {
+            max_value = pred_value;
+            max_id = c;
+          }
+        }
+        label_offset[0] = n; label_offset[1] = h; label_offset[2] = w;
+        const int correct_label =
+            static_cast<int>(this->blob_bottom_label_->data_at(label_offset));
+        if (max_id == correct_label) {
+          ++num_correct_labels;
+        }
+      }
+    }
+  }
+  EXPECT_NEAR(this->blob_top_->data_at(0, 0, 0, 0),
+              num_correct_labels / TypeParam(num_labels), 1e-4);
+}
+
+TYPED_TEST(AccuracyLayerTest, TestForwardIgnoreLabel) {
+  LayerParameter layer_param;
+  const TypeParam kIgnoreLabelValue = -1;
+  layer_param.mutable_accuracy_param()->set_ignore_label(kIgnoreLabelValue);
+  AccuracyLayer<TypeParam> layer(layer_param);
+  // Manually set some labels to the ignore label value (-1).
+  this->blob_bottom_label_->mutable_cpu_data()[2] = kIgnoreLabelValue;
+  this->blob_bottom_label_->mutable_cpu_data()[5] = kIgnoreLabelValue;
+  this->blob_bottom_label_->mutable_cpu_data()[32] = kIgnoreLabelValue;
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  TypeParam max_value;
+  int max_id;
+  int num_correct_labels = 0;
+  int count = 0;
+  for (int i = 0; i < 100; ++i) {
+    if (kIgnoreLabelValue == this->blob_bottom_label_->data_at(i, 0, 0, 0)) {
+      continue;
+    }
+    ++count;
+    max_value = -FLT_MAX;
+    max_id = 0;
+    for (int j = 0; j < 10; ++j) {
+      if (this->blob_bottom_data_->data_at(i, j, 0, 0) > max_value) {
+        max_value = this->blob_bottom_data_->data_at(i, j, 0, 0);
+        max_id = j;
+      }
+    }
+    if (max_id == this->blob_bottom_label_->data_at(i, 0, 0, 0)) {
+      ++num_correct_labels;
+    }
+  }
+  EXPECT_EQ(count, 97);  // We set 3 out of 100 labels to kIgnoreLabelValue.
+  EXPECT_NEAR(this->blob_top_->data_at(0, 0, 0, 0),
+              num_correct_labels / TypeParam(count), 1e-4);
+}
+
+TYPED_TEST(AccuracyLayerTest, TestForwardCPUTopK) {
+  LayerParameter layer_param;
+  AccuracyParameter* accuracy_param = layer_param.mutable_accuracy_param();
+  accuracy_param->set_top_k(this->top_k_);
+  AccuracyLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  TypeParam current_value;
+  int current_rank;
+  int num_correct_labels = 0;
+  for (int i = 0; i < 100; ++i) {
+    for (int j = 0; j < 10; ++j) {
+      current_value = this->blob_bottom_data_->data_at(i, j, 0, 0);
+      current_rank = 0;
+      for (int k = 0; k < 10; ++k) {
+        if (this->blob_bottom_data_->data_at(i, k, 0, 0) > current_value) {
+          ++current_rank;
+        }
+      }
+      if (current_rank < this->top_k_ &&
+          j == this->blob_bottom_label_->data_at(i, 0, 0, 0)) {
+        ++num_correct_labels;
+      }
+    }
+  }
+
+  EXPECT_NEAR(this->blob_top_->data_at(0, 0, 0, 0),
+              num_correct_labels / 100.0, 1e-4);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_argmax_layer.cpp b/src/caffe/test/test_argmax_layer.cpp
new file mode 100644
index 0000000..895c3d3
--- /dev/null
+++ b/src/caffe/test/test_argmax_layer.cpp
@@ -0,0 +1,168 @@
+#include <utility>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+class ArgMaxLayerTest : public CPUDeviceTest<Dtype> {
+ protected:
+  ArgMaxLayerTest()
+      : blob_bottom_(new Blob<Dtype>(10, 20, 1, 1)),
+        blob_top_(new Blob<Dtype>()),
+        top_k_(5) {
+    Caffe::set_random_seed(1701);
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~ArgMaxLayerTest() { delete blob_bottom_; delete blob_top_; }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+  size_t top_k_;
+};
+
+TYPED_TEST_CASE(ArgMaxLayerTest, TestDtypes);
+
+TYPED_TEST(ArgMaxLayerTest, TestSetup) {
+  LayerParameter layer_param;
+  ArgMaxLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_->num());
+  EXPECT_EQ(this->blob_top_->channels(), 1);
+}
+
+TYPED_TEST(ArgMaxLayerTest, TestSetupMaxVal) {
+  LayerParameter layer_param;
+  ArgMaxParameter* argmax_param = layer_param.mutable_argmax_param();
+  argmax_param->set_out_max_val(true);
+  ArgMaxLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_->num());
+  EXPECT_EQ(this->blob_top_->channels(), 2);
+}
+
+TYPED_TEST(ArgMaxLayerTest, TestCPU) {
+  LayerParameter layer_param;
+  ArgMaxLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
+  const TypeParam* top_data = this->blob_top_->cpu_data();
+  int max_ind;
+  TypeParam max_val;
+  int num = this->blob_bottom_->num();
+  int dim = this->blob_bottom_->count() / num;
+  for (int i = 0; i < num; ++i) {
+    EXPECT_GE(top_data[i], 0);
+    EXPECT_LE(top_data[i], dim);
+    max_ind = top_data[i];
+    max_val = bottom_data[i * dim + max_ind];
+    for (int j = 0; j < dim; ++j) {
+      EXPECT_LE(bottom_data[i * dim + j], max_val);
+    }
+  }
+}
+
+TYPED_TEST(ArgMaxLayerTest, TestCPUMaxVal) {
+  LayerParameter layer_param;
+  ArgMaxParameter* argmax_param = layer_param.mutable_argmax_param();
+  argmax_param->set_out_max_val(true);
+  ArgMaxLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
+  const TypeParam* top_data = this->blob_top_->cpu_data();
+  int max_ind;
+  TypeParam max_val;
+  int num = this->blob_bottom_->num();
+  int dim = this->blob_bottom_->count() / num;
+  for (int i = 0; i < num; ++i) {
+    EXPECT_GE(top_data[i], 0);
+    EXPECT_LE(top_data[i], dim);
+    max_ind = top_data[i * 2];
+    max_val = top_data[i * 2 + 1];
+    EXPECT_EQ(bottom_data[i * dim + max_ind], max_val);
+    for (int j = 0; j < dim; ++j) {
+      EXPECT_LE(bottom_data[i * dim + j], max_val);
+    }
+  }
+}
+
+TYPED_TEST(ArgMaxLayerTest, TestCPUTopK) {
+  LayerParameter layer_param;
+  ArgMaxParameter* argmax_param = layer_param.mutable_argmax_param();
+  argmax_param->set_top_k(this->top_k_);
+  ArgMaxLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  int max_ind;
+  TypeParam max_val;
+  int num = this->blob_bottom_->num();
+  int dim = this->blob_bottom_->count() / num;
+  for (int i = 0; i < num; ++i) {
+    EXPECT_GE(this->blob_top_->data_at(i, 0, 0, 0), 0);
+    EXPECT_LE(this->blob_top_->data_at(i, 0, 0, 0), dim);
+    for (int j = 0; j < this->top_k_; ++j) {
+      max_ind = this->blob_top_->data_at(i, 0, j, 0);
+      max_val = this->blob_bottom_->data_at(i, max_ind, 0, 0);
+      int count = 0;
+      for (int k = 0; k < dim; ++k) {
+        if (this->blob_bottom_->data_at(i, k, 0, 0) > max_val) {
+          ++count;
+        }
+      }
+      EXPECT_EQ(j, count);
+    }
+  }
+}
+
+TYPED_TEST(ArgMaxLayerTest, TestCPUMaxValTopK) {
+  LayerParameter layer_param;
+  ArgMaxParameter* argmax_param = layer_param.mutable_argmax_param();
+  argmax_param->set_out_max_val(true);
+  argmax_param->set_top_k(this->top_k_);
+  ArgMaxLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  int max_ind;
+  TypeParam max_val;
+  int num = this->blob_bottom_->num();
+  int dim = this->blob_bottom_->count() / num;
+  for (int i = 0; i < num; ++i) {
+    EXPECT_GE(this->blob_top_->data_at(i, 0, 0, 0), 0);
+    EXPECT_LE(this->blob_top_->data_at(i, 0, 0, 0), dim);
+    for (int j = 0; j < this->top_k_; ++j) {
+      max_ind = this->blob_top_->data_at(i, 0, j, 0);
+      max_val = this->blob_top_->data_at(i, 1, j, 0);
+      EXPECT_EQ(this->blob_bottom_->data_at(i, max_ind, 0, 0), max_val);
+      int count = 0;
+      for (int k = 0; k < dim; ++k) {
+        if (this->blob_bottom_->data_at(i, k, 0, 0) > max_val) {
+          ++count;
+        }
+      }
+      EXPECT_EQ(j, count);
+    }
+  }
+}
+
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_benchmark.cpp b/src/caffe/test/test_benchmark.cpp
new file mode 100644
index 0000000..43aaa63
--- /dev/null
+++ b/src/caffe/test/test_benchmark.cpp
@@ -0,0 +1,90 @@
+#include <unistd.h>  // for usleep
+
+#include "gtest/gtest.h"
+
+#include "caffe/common.hpp"
+#include "caffe/util/benchmark.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+const float kMillisecondsThreshold = 30;
+
+template <typename TypeParam>
+class BenchmarkTest : public MultiDeviceTest<TypeParam> {};
+
+TYPED_TEST_CASE(BenchmarkTest, TestDtypesAndDevices);
+
+TYPED_TEST(BenchmarkTest, TestTimerConstructor) {
+  Timer timer;
+  EXPECT_TRUE(timer.initted());
+  EXPECT_FALSE(timer.running());
+  EXPECT_FALSE(timer.has_run_at_least_once());
+}
+
+TYPED_TEST(BenchmarkTest, TestTimerStart) {
+  Timer timer;
+  timer.Start();
+  EXPECT_TRUE(timer.initted());
+  EXPECT_TRUE(timer.running());
+  EXPECT_TRUE(timer.has_run_at_least_once());
+  timer.Start();
+  EXPECT_TRUE(timer.initted());
+  EXPECT_TRUE(timer.running());
+  EXPECT_TRUE(timer.has_run_at_least_once());
+  timer.Stop();
+  timer.Start();
+  EXPECT_TRUE(timer.initted());
+  EXPECT_TRUE(timer.running());
+  EXPECT_TRUE(timer.has_run_at_least_once());
+}
+
+TYPED_TEST(BenchmarkTest, TestTimerStop) {
+  Timer timer;
+  timer.Stop();
+  EXPECT_TRUE(timer.initted());
+  EXPECT_FALSE(timer.running());
+  EXPECT_FALSE(timer.has_run_at_least_once());
+  timer.Start();
+  timer.Stop();
+  EXPECT_TRUE(timer.initted());
+  EXPECT_FALSE(timer.running());
+  EXPECT_TRUE(timer.has_run_at_least_once());
+  timer.Stop();
+  EXPECT_TRUE(timer.initted());
+  EXPECT_FALSE(timer.running());
+  EXPECT_TRUE(timer.has_run_at_least_once());
+}
+
+TYPED_TEST(BenchmarkTest, TestTimerMilliSeconds) {
+  Timer timer;
+  EXPECT_EQ(timer.MilliSeconds(), 0);
+  EXPECT_TRUE(timer.initted());
+  EXPECT_FALSE(timer.running());
+  EXPECT_FALSE(timer.has_run_at_least_once());
+  timer.Start();
+  usleep(300 * 1000);
+  EXPECT_GE(timer.MilliSeconds(), 300 - kMillisecondsThreshold);
+  EXPECT_LE(timer.MilliSeconds(), 300 + kMillisecondsThreshold);
+  EXPECT_TRUE(timer.initted());
+  EXPECT_FALSE(timer.running());
+  EXPECT_TRUE(timer.has_run_at_least_once());
+}
+
+TYPED_TEST(BenchmarkTest, TestTimerSeconds) {
+  Timer timer;
+  EXPECT_EQ(timer.Seconds(), 0);
+  EXPECT_TRUE(timer.initted());
+  EXPECT_FALSE(timer.running());
+  EXPECT_FALSE(timer.has_run_at_least_once());
+  timer.Start();
+  usleep(300 * 1000);
+  EXPECT_GE(timer.Seconds(), 0.3 - kMillisecondsThreshold / 1000.);
+  EXPECT_LE(timer.Seconds(), 0.3 + kMillisecondsThreshold / 1000.);
+  EXPECT_TRUE(timer.initted());
+  EXPECT_FALSE(timer.running());
+  EXPECT_TRUE(timer.has_run_at_least_once());
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_blob.cpp b/src/caffe/test/test_blob.cpp
new file mode 100644
index 0000000..7da6423
--- /dev/null
+++ b/src/caffe/test/test_blob.cpp
@@ -0,0 +1,294 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+class BlobSimpleTest : public ::testing::Test {
+ protected:
+  BlobSimpleTest()
+      : blob_(new Blob<Dtype>()),
+        blob_preshaped_(new Blob<Dtype>(2, 3, 4, 5)) {}
+  virtual ~BlobSimpleTest() { delete blob_; delete blob_preshaped_; }
+  Blob<Dtype>* const blob_;
+  Blob<Dtype>* const blob_preshaped_;
+};
+
+TYPED_TEST_CASE(BlobSimpleTest, TestDtypes);
+
+TYPED_TEST(BlobSimpleTest, TestInitialization) {
+  EXPECT_TRUE(this->blob_);
+  EXPECT_TRUE(this->blob_preshaped_);
+  EXPECT_EQ(this->blob_preshaped_->num(), 2);
+  EXPECT_EQ(this->blob_preshaped_->channels(), 3);
+  EXPECT_EQ(this->blob_preshaped_->height(), 4);
+  EXPECT_EQ(this->blob_preshaped_->width(), 5);
+  EXPECT_EQ(this->blob_preshaped_->count(), 120);
+  EXPECT_EQ(this->blob_->num_axes(), 0);
+  EXPECT_EQ(this->blob_->count(), 0);
+}
+
+TYPED_TEST(BlobSimpleTest, TestPointersCPUGPU) {
+  EXPECT_TRUE(this->blob_preshaped_->gpu_data());
+  EXPECT_TRUE(this->blob_preshaped_->cpu_data());
+  EXPECT_TRUE(this->blob_preshaped_->mutable_gpu_data());
+  EXPECT_TRUE(this->blob_preshaped_->mutable_cpu_data());
+}
+
+TYPED_TEST(BlobSimpleTest, TestReshape) {
+  this->blob_->Reshape(2, 3, 4, 5);
+  EXPECT_EQ(this->blob_->num(), 2);
+  EXPECT_EQ(this->blob_->channels(), 3);
+  EXPECT_EQ(this->blob_->height(), 4);
+  EXPECT_EQ(this->blob_->width(), 5);
+  EXPECT_EQ(this->blob_->count(), 120);
+}
+
+TYPED_TEST(BlobSimpleTest, TestLegacyBlobProtoShapeEquals) {
+  BlobProto blob_proto;
+
+  // Reshape to (3 x 2).
+  vector<int> shape(2);
+  shape[0] = 3;
+  shape[1] = 2;
+  this->blob_->Reshape(shape);
+
+  // (3 x 2) blob == (1 x 1 x 3 x 2) legacy blob
+  blob_proto.set_num(1);
+  blob_proto.set_channels(1);
+  blob_proto.set_height(3);
+  blob_proto.set_width(2);
+  EXPECT_TRUE(this->blob_->ShapeEquals(blob_proto));
+
+  // (3 x 2) blob != (0 x 1 x 3 x 2) legacy blob
+  blob_proto.set_num(0);
+  blob_proto.set_channels(1);
+  blob_proto.set_height(3);
+  blob_proto.set_width(2);
+  EXPECT_FALSE(this->blob_->ShapeEquals(blob_proto));
+
+  // (3 x 2) blob != (3 x 1 x 3 x 2) legacy blob
+  blob_proto.set_num(3);
+  blob_proto.set_channels(1);
+  blob_proto.set_height(3);
+  blob_proto.set_width(2);
+  EXPECT_FALSE(this->blob_->ShapeEquals(blob_proto));
+
+  // Reshape to (1 x 3 x 2).
+  shape.insert(shape.begin(), 1);
+  this->blob_->Reshape(shape);
+
+  // (1 x 3 x 2) blob == (1 x 1 x 3 x 2) legacy blob
+  blob_proto.set_num(1);
+  blob_proto.set_channels(1);
+  blob_proto.set_height(3);
+  blob_proto.set_width(2);
+  EXPECT_TRUE(this->blob_->ShapeEquals(blob_proto));
+
+  // Reshape to (2 x 3 x 2).
+  shape[0] = 2;
+  this->blob_->Reshape(shape);
+
+  // (2 x 3 x 2) blob != (1 x 1 x 3 x 2) legacy blob
+  blob_proto.set_num(1);
+  blob_proto.set_channels(1);
+  blob_proto.set_height(3);
+  blob_proto.set_width(2);
+  EXPECT_FALSE(this->blob_->ShapeEquals(blob_proto));
+}
+
+template <typename TypeParam>
+class BlobMathTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+ protected:
+  BlobMathTest()
+      : blob_(new Blob<Dtype>(2, 3, 4, 5)),
+        epsilon_(1e-6) {}
+
+  virtual ~BlobMathTest() { delete blob_; }
+  Blob<Dtype>* const blob_;
+  Dtype epsilon_;
+};
+
+TYPED_TEST_CASE(BlobMathTest, TestDtypesAndDevices);
+
+TYPED_TEST(BlobMathTest, TestSumOfSquares) {
+  typedef typename TypeParam::Dtype Dtype;
+
+  // Uninitialized Blob should have sum of squares == 0.
+  EXPECT_EQ(0, this->blob_->sumsq_data());
+  EXPECT_EQ(0, this->blob_->sumsq_diff());
+  FillerParameter filler_param;
+  filler_param.set_min(-3);
+  filler_param.set_max(3);
+  UniformFiller<Dtype> filler(filler_param);
+  filler.Fill(this->blob_);
+  Dtype expected_sumsq = 0;
+  const Dtype* data = this->blob_->cpu_data();
+  for (int i = 0; i < this->blob_->count(); ++i) {
+    expected_sumsq += data[i] * data[i];
+  }
+  // Do a mutable access on the current device,
+  // so that the sumsq computation is done on that device.
+  // (Otherwise, this would only check the CPU sumsq implementation.)
+  switch (TypeParam::device) {
+  case Caffe::CPU:
+    this->blob_->mutable_cpu_data();
+    break;
+  case Caffe::GPU:
+    this->blob_->mutable_gpu_data();
+    break;
+  default:
+    LOG(FATAL) << "Unknown device: " << TypeParam::device;
+  }
+  EXPECT_NEAR(expected_sumsq, this->blob_->sumsq_data(),
+              this->epsilon_ * expected_sumsq);
+  EXPECT_EQ(0, this->blob_->sumsq_diff());
+
+  // Check sumsq_diff too.
+  const Dtype kDiffScaleFactor = 7;
+  caffe_cpu_scale(this->blob_->count(), kDiffScaleFactor, data,
+                  this->blob_->mutable_cpu_diff());
+  switch (TypeParam::device) {
+  case Caffe::CPU:
+    this->blob_->mutable_cpu_diff();
+    break;
+  case Caffe::GPU:
+    this->blob_->mutable_gpu_diff();
+    break;
+  default:
+    LOG(FATAL) << "Unknown device: " << TypeParam::device;
+  }
+  EXPECT_NEAR(expected_sumsq, this->blob_->sumsq_data(),
+              this->epsilon_ * expected_sumsq);
+  const Dtype expected_sumsq_diff =
+      expected_sumsq * kDiffScaleFactor * kDiffScaleFactor;
+  EXPECT_NEAR(expected_sumsq_diff, this->blob_->sumsq_diff(),
+              this->epsilon_ * expected_sumsq_diff);
+}
+
+TYPED_TEST(BlobMathTest, TestAsum) {
+  typedef typename TypeParam::Dtype Dtype;
+
+  // Uninitialized Blob should have asum == 0.
+  EXPECT_EQ(0, this->blob_->asum_data());
+  EXPECT_EQ(0, this->blob_->asum_diff());
+  FillerParameter filler_param;
+  filler_param.set_min(-3);
+  filler_param.set_max(3);
+  UniformFiller<Dtype> filler(filler_param);
+  filler.Fill(this->blob_);
+  Dtype expected_asum = 0;
+  const Dtype* data = this->blob_->cpu_data();
+  for (int i = 0; i < this->blob_->count(); ++i) {
+    expected_asum += std::fabs(data[i]);
+  }
+  // Do a mutable access on the current device,
+  // so that the asum computation is done on that device.
+  // (Otherwise, this would only check the CPU asum implementation.)
+  switch (TypeParam::device) {
+  case Caffe::CPU:
+    this->blob_->mutable_cpu_data();
+    break;
+  case Caffe::GPU:
+    this->blob_->mutable_gpu_data();
+    break;
+  default:
+    LOG(FATAL) << "Unknown device: " << TypeParam::device;
+  }
+  EXPECT_NEAR(expected_asum, this->blob_->asum_data(),
+              this->epsilon_ * expected_asum);
+  EXPECT_EQ(0, this->blob_->asum_diff());
+
+  // Check asum_diff too.
+  const Dtype kDiffScaleFactor = 7;
+  caffe_cpu_scale(this->blob_->count(), kDiffScaleFactor, data,
+                  this->blob_->mutable_cpu_diff());
+  switch (TypeParam::device) {
+  case Caffe::CPU:
+    this->blob_->mutable_cpu_diff();
+    break;
+  case Caffe::GPU:
+    this->blob_->mutable_gpu_diff();
+    break;
+  default:
+    LOG(FATAL) << "Unknown device: " << TypeParam::device;
+  }
+  EXPECT_NEAR(expected_asum, this->blob_->asum_data(),
+              this->epsilon_ * expected_asum);
+  const Dtype expected_diff_asum = expected_asum * kDiffScaleFactor;
+  EXPECT_NEAR(expected_diff_asum, this->blob_->asum_diff(),
+              this->epsilon_ * expected_diff_asum);
+}
+
+TYPED_TEST(BlobMathTest, TestScaleData) {
+  typedef typename TypeParam::Dtype Dtype;
+
+  EXPECT_EQ(0, this->blob_->asum_data());
+  EXPECT_EQ(0, this->blob_->asum_diff());
+  FillerParameter filler_param;
+  filler_param.set_min(-3);
+  filler_param.set_max(3);
+  UniformFiller<Dtype> filler(filler_param);
+  filler.Fill(this->blob_);
+  const Dtype asum_before_scale = this->blob_->asum_data();
+  // Do a mutable access on the current device,
+  // so that the asum computation is done on that device.
+  // (Otherwise, this would only check the CPU asum implementation.)
+  switch (TypeParam::device) {
+  case Caffe::CPU:
+    this->blob_->mutable_cpu_data();
+    break;
+  case Caffe::GPU:
+    this->blob_->mutable_gpu_data();
+    break;
+  default:
+    LOG(FATAL) << "Unknown device: " << TypeParam::device;
+  }
+  const Dtype kDataScaleFactor = 3;
+  this->blob_->scale_data(kDataScaleFactor);
+  EXPECT_NEAR(asum_before_scale * kDataScaleFactor, this->blob_->asum_data(),
+              this->epsilon_ * asum_before_scale * kDataScaleFactor);
+  EXPECT_EQ(0, this->blob_->asum_diff());
+
+  // Check scale_diff too.
+  const Dtype kDataToDiffScaleFactor = 7;
+  const Dtype* data = this->blob_->cpu_data();
+  caffe_cpu_scale(this->blob_->count(), kDataToDiffScaleFactor, data,
+                  this->blob_->mutable_cpu_diff());
+  const Dtype expected_asum_before_scale = asum_before_scale * kDataScaleFactor;
+  EXPECT_NEAR(expected_asum_before_scale, this->blob_->asum_data(),
+              this->epsilon_ * expected_asum_before_scale);
+  const Dtype expected_diff_asum_before_scale =
+      asum_before_scale * kDataScaleFactor * kDataToDiffScaleFactor;
+  EXPECT_NEAR(expected_diff_asum_before_scale, this->blob_->asum_diff(),
+              this->epsilon_ * expected_diff_asum_before_scale);
+  switch (TypeParam::device) {
+  case Caffe::CPU:
+    this->blob_->mutable_cpu_diff();
+    break;
+  case Caffe::GPU:
+    this->blob_->mutable_gpu_diff();
+    break;
+  default:
+    LOG(FATAL) << "Unknown device: " << TypeParam::device;
+  }
+  const Dtype kDiffScaleFactor = 3;
+  this->blob_->scale_diff(kDiffScaleFactor);
+  EXPECT_NEAR(asum_before_scale * kDataScaleFactor, this->blob_->asum_data(),
+              this->epsilon_ * asum_before_scale * kDataScaleFactor);
+  const Dtype expected_diff_asum =
+      expected_diff_asum_before_scale * kDiffScaleFactor;
+  EXPECT_NEAR(expected_diff_asum, this->blob_->asum_diff(),
+              this->epsilon_ * expected_diff_asum);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_caffe_main.cpp b/src/caffe/test/test_caffe_main.cpp
new file mode 100644
index 0000000..c8caf5a
--- /dev/null
+++ b/src/caffe/test/test_caffe_main.cpp
@@ -0,0 +1,40 @@
+// The main caffe test code. Your test cpp code should include this hpp
+// to allow a main function to be compiled into the binary.
+
+#include "caffe/caffe.hpp"
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+#ifndef CPU_ONLY
+  cudaDeviceProp CAFFE_TEST_CUDA_PROP;
+#endif
+}
+
+#ifndef CPU_ONLY
+using caffe::CAFFE_TEST_CUDA_PROP;
+#endif
+
+int main(int argc, char** argv) {
+  ::testing::InitGoogleTest(&argc, argv);
+  caffe::GlobalInit(&argc, &argv);
+#ifndef CPU_ONLY
+  // Before starting testing, let's first print out a few cuda defice info.
+  int device;
+  cudaGetDeviceCount(&device);
+  cout << "Cuda number of devices: " << device << endl;
+  if (argc > 1) {
+    // Use the given device
+    device = atoi(argv[1]);
+    cudaSetDevice(device);
+    cout << "Setting to use device " << device << endl;
+  } else if (CUDA_TEST_DEVICE >= 0) {
+    // Use the device assigned in build configuration; but with a lower priority
+    device = CUDA_TEST_DEVICE;
+  }
+  cudaGetDevice(&device);
+  cout << "Current device id: " << device << endl;
+  cudaGetDeviceProperties(&CAFFE_TEST_CUDA_PROP, device);
+#endif
+  // invoke the test.
+  return RUN_ALL_TESTS();
+}
diff --git a/src/caffe/test/test_common.cpp b/src/caffe/test/test_common.cpp
new file mode 100644
index 0000000..b3a61b0
--- /dev/null
+++ b/src/caffe/test/test_common.cpp
@@ -0,0 +1,66 @@
+#include <cstring>
+
+#include "gtest/gtest.h"
+
+#include "caffe/common.hpp"
+#include "caffe/syncedmem.hpp"
+#include "caffe/util/math_functions.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+class CommonTest : public ::testing::Test {};
+
+#ifndef CPU_ONLY  // GPU Caffe singleton test.
+
+TEST_F(CommonTest, TestCublasHandlerGPU) {
+  int cuda_device_id;
+  CUDA_CHECK(cudaGetDevice(&cuda_device_id));
+  EXPECT_TRUE(Caffe::cublas_handle());
+}
+
+#endif
+
+TEST_F(CommonTest, TestBrewMode) {
+  Caffe::set_mode(Caffe::CPU);
+  EXPECT_EQ(Caffe::mode(), Caffe::CPU);
+  Caffe::set_mode(Caffe::GPU);
+  EXPECT_EQ(Caffe::mode(), Caffe::GPU);
+}
+
+TEST_F(CommonTest, TestRandSeedCPU) {
+  SyncedMemory data_a(10 * sizeof(int));
+  SyncedMemory data_b(10 * sizeof(int));
+  Caffe::set_random_seed(1701);
+  caffe_rng_bernoulli(10, 0.5, static_cast<int*>(data_a.mutable_cpu_data()));
+
+  Caffe::set_random_seed(1701);
+  caffe_rng_bernoulli(10, 0.5, static_cast<int*>(data_b.mutable_cpu_data()));
+
+  for (int i = 0; i < 10; ++i) {
+    EXPECT_EQ(static_cast<const int*>(data_a.cpu_data())[i],
+        static_cast<const int*>(data_b.cpu_data())[i]);
+  }
+}
+
+#ifndef CPU_ONLY  // GPU Caffe singleton test.
+
+TEST_F(CommonTest, TestRandSeedGPU) {
+  SyncedMemory data_a(10 * sizeof(unsigned int));
+  SyncedMemory data_b(10 * sizeof(unsigned int));
+  Caffe::set_random_seed(1701);
+  CURAND_CHECK(curandGenerate(Caffe::curand_generator(),
+        static_cast<unsigned int*>(data_a.mutable_gpu_data()), 10));
+  Caffe::set_random_seed(1701);
+  CURAND_CHECK(curandGenerate(Caffe::curand_generator(),
+        static_cast<unsigned int*>(data_b.mutable_gpu_data()), 10));
+  for (int i = 0; i < 10; ++i) {
+    EXPECT_EQ(((const unsigned int*)(data_a.cpu_data()))[i],
+        ((const unsigned int*)(data_b.cpu_data()))[i]);
+  }
+}
+
+#endif
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_concat_layer.cpp b/src/caffe/test/test_concat_layer.cpp
new file mode 100644
index 0000000..662a50f
--- /dev/null
+++ b/src/caffe/test/test_concat_layer.cpp
@@ -0,0 +1,176 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class ConcatLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  ConcatLayerTest()
+      : blob_bottom_0_(new Blob<Dtype>(2, 3, 6, 5)),
+        blob_bottom_1_(new Blob<Dtype>(2, 5, 6, 5)),
+        blob_bottom_2_(new Blob<Dtype>(5, 3, 6, 5)),
+        blob_top_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    // fill the values
+    shared_ptr<ConstantFiller<Dtype> > filler;
+    FillerParameter filler_param;
+    filler_param.set_value(1.);
+    filler.reset(new ConstantFiller<Dtype>(filler_param));
+    filler->Fill(this->blob_bottom_0_);
+    filler_param.set_value(2.);
+    filler.reset(new ConstantFiller<Dtype>(filler_param));
+    filler->Fill(this->blob_bottom_1_);
+    filler_param.set_value(3.);
+    filler.reset(new ConstantFiller<Dtype>(filler_param));
+    filler->Fill(this->blob_bottom_2_);
+    blob_bottom_vec_0_.push_back(blob_bottom_0_);
+    blob_bottom_vec_0_.push_back(blob_bottom_1_);
+    blob_bottom_vec_1_.push_back(blob_bottom_0_);
+    blob_bottom_vec_1_.push_back(blob_bottom_2_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+
+  virtual ~ConcatLayerTest() {
+    delete blob_bottom_0_; delete blob_bottom_1_;
+    delete blob_bottom_2_; delete blob_top_;
+  }
+
+  Blob<Dtype>* const blob_bottom_0_;
+  Blob<Dtype>* const blob_bottom_1_;
+  Blob<Dtype>* const blob_bottom_2_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_0_, blob_bottom_vec_1_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(ConcatLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(ConcatLayerTest, TestSetupNum) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_concat_param()->set_axis(0);
+  ConcatLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_1_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(),
+      this->blob_bottom_0_->num() + this->blob_bottom_2_->num());
+  EXPECT_EQ(this->blob_top_->channels(), this->blob_bottom_0_->channels());
+  EXPECT_EQ(this->blob_top_->height(), this->blob_bottom_0_->height());
+  EXPECT_EQ(this->blob_top_->width(), this->blob_bottom_0_->width());
+}
+
+TYPED_TEST(ConcatLayerTest, TestSetupChannels) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConcatLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_0_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_0_->num());
+  EXPECT_EQ(this->blob_top_->channels(),
+      this->blob_bottom_0_->channels() + this->blob_bottom_1_->channels());
+  EXPECT_EQ(this->blob_top_->height(), this->blob_bottom_0_->height());
+  EXPECT_EQ(this->blob_top_->width(), this->blob_bottom_0_->width());
+}
+
+TYPED_TEST(ConcatLayerTest, TestSetupChannelsNegativeIndexing) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConcatLayer<Dtype> layer(layer_param);
+  // "channels" index is the third one from the end -- test negative indexing
+  // by setting axis to -3 and checking that we get the same results as above in
+  // TestSetupChannels.
+  layer_param.mutable_concat_param()->set_axis(-3);
+  layer.SetUp(this->blob_bottom_vec_0_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_0_->num());
+  EXPECT_EQ(this->blob_top_->channels(),
+      this->blob_bottom_0_->channels() + this->blob_bottom_1_->channels());
+  EXPECT_EQ(this->blob_top_->height(), this->blob_bottom_0_->height());
+  EXPECT_EQ(this->blob_top_->width(), this->blob_bottom_0_->width());
+}
+
+TYPED_TEST(ConcatLayerTest, TestForwardNum) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_concat_param()->set_axis(0);
+  ConcatLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_1_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_1_, this->blob_top_vec_);
+  for (int n = 0; n < this->blob_bottom_vec_1_[0]->num(); ++n) {
+    for (int c = 0; c < this->blob_top_->channels(); ++c) {
+      for (int h = 0; h < this->blob_top_->height(); ++h) {
+        for (int w = 0; w < this->blob_top_->width(); ++w) {
+          EXPECT_EQ(this->blob_top_->data_at(n, c, h, w),
+              this->blob_bottom_vec_1_[0]->data_at(n, c, h, w));
+        }
+      }
+    }
+  }
+  for (int n = 0; n < this->blob_bottom_vec_1_[1]->num(); ++n) {
+    for (int c = 0; c < this->blob_top_->channels(); ++c) {
+      for (int h = 0; h < this->blob_top_->height(); ++h) {
+        for (int w = 0; w < this->blob_top_->width(); ++w) {
+          EXPECT_EQ(this->blob_top_->data_at(n + 2, c, h, w),
+              this->blob_bottom_vec_1_[1]->data_at(n, c, h, w));
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST(ConcatLayerTest, TestForwardChannels) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConcatLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_0_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_0_, this->blob_top_vec_);
+  for (int n = 0; n < this->blob_top_->num(); ++n) {
+    for (int c = 0; c < this->blob_bottom_0_->channels(); ++c) {
+      for (int h = 0; h < this->blob_top_->height(); ++h) {
+        for (int w = 0; w < this->blob_top_->width(); ++w) {
+          EXPECT_EQ(this->blob_top_->data_at(n, c, h, w),
+              this->blob_bottom_vec_0_[0]->data_at(n, c, h, w));
+        }
+      }
+    }
+    for (int c = 0; c < this->blob_bottom_1_->channels(); ++c) {
+      for (int h = 0; h < this->blob_top_->height(); ++h) {
+        for (int w = 0; w < this->blob_top_->width(); ++w) {
+          EXPECT_EQ(this->blob_top_->data_at(n, c + 3, h, w),
+              this->blob_bottom_vec_0_[1]->data_at(n, c, h, w));
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST(ConcatLayerTest, TestGradientNum) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_concat_param()->set_axis(0);
+  ConcatLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2);
+  checker.CheckGradient(&layer, this->blob_bottom_vec_1_,
+    this->blob_top_vec_);
+}
+
+TYPED_TEST(ConcatLayerTest, TestGradientChannels) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConcatLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2);
+  checker.CheckGradient(&layer, this->blob_bottom_vec_0_,
+    this->blob_top_vec_);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_contrastive_loss_layer.cpp b/src/caffe/test/test_contrastive_loss_layer.cpp
new file mode 100644
index 0000000..1e9447c
--- /dev/null
+++ b/src/caffe/test/test_contrastive_loss_layer.cpp
@@ -0,0 +1,146 @@
+#include <algorithm>
+#include <cmath>
+#include <cstdlib>
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class ContrastiveLossLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  ContrastiveLossLayerTest()
+      : blob_bottom_data_i_(new Blob<Dtype>(512, 2, 1, 1)),
+        blob_bottom_data_j_(new Blob<Dtype>(512, 2, 1, 1)),
+        blob_bottom_y_(new Blob<Dtype>(512, 1, 1, 1)),
+        blob_top_loss_(new Blob<Dtype>()) {
+    // fill the values
+    FillerParameter filler_param;
+    filler_param.set_min(-1.0);
+    filler_param.set_max(1.0);  // distances~=1.0 to test both sides of margin
+    UniformFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_data_i_);
+    blob_bottom_vec_.push_back(blob_bottom_data_i_);
+    filler.Fill(this->blob_bottom_data_j_);
+    blob_bottom_vec_.push_back(blob_bottom_data_j_);
+    for (int i = 0; i < blob_bottom_y_->count(); ++i) {
+      blob_bottom_y_->mutable_cpu_data()[i] = caffe_rng_rand() % 2;  // 0 or 1
+    }
+    blob_bottom_vec_.push_back(blob_bottom_y_);
+    blob_top_vec_.push_back(blob_top_loss_);
+  }
+  virtual ~ContrastiveLossLayerTest() {
+    delete blob_bottom_data_i_;
+    delete blob_bottom_data_j_;
+    delete blob_bottom_y_;
+    delete blob_top_loss_;
+  }
+
+  Blob<Dtype>* const blob_bottom_data_i_;
+  Blob<Dtype>* const blob_bottom_data_j_;
+  Blob<Dtype>* const blob_bottom_y_;
+  Blob<Dtype>* const blob_top_loss_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(ContrastiveLossLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(ContrastiveLossLayerTest, TestForward) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ContrastiveLossLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // manually compute to compare
+  const Dtype margin = layer_param.contrastive_loss_param().margin();
+  const int num = this->blob_bottom_data_i_->num();
+  const int channels = this->blob_bottom_data_i_->channels();
+  Dtype loss(0);
+  for (int i = 0; i < num; ++i) {
+    Dtype dist_sq(0);
+    for (int j = 0; j < channels; ++j) {
+      Dtype diff = this->blob_bottom_data_i_->cpu_data()[i*channels+j] -
+          this->blob_bottom_data_j_->cpu_data()[i*channels+j];
+      dist_sq += diff*diff;
+    }
+    if (this->blob_bottom_y_->cpu_data()[i]) {  // similar pairs
+      loss += dist_sq;
+    } else {
+      Dtype dist = std::max(margin - sqrt(dist_sq), 0.0);
+      loss += dist*dist;
+    }
+  }
+  loss /= static_cast<Dtype>(num) * Dtype(2);
+  EXPECT_NEAR(this->blob_top_loss_->cpu_data()[0], loss, 1e-6);
+}
+
+TYPED_TEST(ContrastiveLossLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ContrastiveLossLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  GradientChecker<Dtype> checker(1e-2, 1e-2, 1701);
+  // check the gradient for the first two bottom layers
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 0);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 1);
+}
+
+TYPED_TEST(ContrastiveLossLayerTest, TestForwardLegacy) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_contrastive_loss_param()->set_legacy_version(true);
+  ContrastiveLossLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // manually compute to compare
+  const Dtype margin = layer_param.contrastive_loss_param().margin();
+  const int num = this->blob_bottom_data_i_->num();
+  const int channels = this->blob_bottom_data_i_->channels();
+  Dtype loss(0);
+  for (int i = 0; i < num; ++i) {
+    Dtype dist_sq(0);
+    for (int j = 0; j < channels; ++j) {
+      Dtype diff = this->blob_bottom_data_i_->cpu_data()[i*channels+j] -
+          this->blob_bottom_data_j_->cpu_data()[i*channels+j];
+      dist_sq += diff*diff;
+    }
+    if (this->blob_bottom_y_->cpu_data()[i]) {  // similar pairs
+      loss += dist_sq;
+    } else {
+      loss += std::max(margin - dist_sq, Dtype(0.0));
+    }
+  }
+  loss /= static_cast<Dtype>(num) * Dtype(2);
+  EXPECT_NEAR(this->blob_top_loss_->cpu_data()[0], loss, 1e-6);
+}
+
+TYPED_TEST(ContrastiveLossLayerTest, TestGradientLegacy) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_contrastive_loss_param()->set_legacy_version(true);
+  ContrastiveLossLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  GradientChecker<Dtype> checker(1e-2, 1e-2, 1701);
+  // check the gradient for the first two bottom layers
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 0);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 1);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_convolution_layer.cpp b/src/caffe/test/test_convolution_layer.cpp
new file mode 100644
index 0000000..67d41ff
--- /dev/null
+++ b/src/caffe/test/test_convolution_layer.cpp
@@ -0,0 +1,699 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+// Reference convolution for checking results:
+// accumulate through explicit loops over input, output, and filters.
+template <typename Dtype>
+void caffe_conv(const Blob<Dtype>* in, ConvolutionParameter* conv_param,
+    const vector<shared_ptr<Blob<Dtype> > >& weights,
+    Blob<Dtype>* out) {
+  // Kernel size, stride, and pad
+  int kernel_h, kernel_w;
+  if (conv_param->has_kernel_size()) {
+    kernel_h = kernel_w = conv_param->kernel_size();
+  } else {
+    kernel_h = conv_param->kernel_h();
+    kernel_w = conv_param->kernel_w();
+  }
+  int pad_h, pad_w;
+  if (!conv_param->has_pad_h()) {
+    pad_h = pad_w = conv_param->pad();
+  } else {
+    pad_h = conv_param->pad_h();
+    pad_w = conv_param->pad_w();
+  }
+  int stride_h, stride_w;
+  if (!conv_param->has_stride_h()) {
+    stride_h = stride_w = conv_param->stride();
+  } else {
+    stride_h = conv_param->stride_h();
+    stride_w = conv_param->stride_w();
+  }
+  // Groups
+  int groups = conv_param->group();
+  int o_g = out->channels() / groups;
+  int k_g = in->channels() / groups;
+  int o_head, k_head;
+  // Convolution
+  const Dtype* in_data = in->cpu_data();
+  const Dtype* weight_data = weights[0]->cpu_data();
+  Dtype* out_data = out->mutable_cpu_data();
+  for (int n = 0; n < out->num(); n++) {
+    for (int g = 0; g < groups; g++) {
+      o_head = o_g * g;
+      k_head = k_g * g;
+      for (int o = 0; o < o_g; o++) {
+        for (int k = 0; k < k_g; k++) {
+          for (int y = 0; y < out->height(); y++) {
+            for (int x = 0; x < out->width(); x++) {
+              for (int p = 0; p < kernel_h; p++) {
+                for (int q = 0; q < kernel_w; q++) {
+                  int in_y = y * stride_h - pad_h + p;
+                  int in_x = x * stride_w - pad_w + q;
+                  if (in_y >= 0 && in_y < in->height()
+                    && in_x >= 0 && in_x < in->width()) {
+                    out_data[out->offset(n, o + o_head, y, x)] +=
+                        in_data[in->offset(n, k + k_head, in_y, in_x)]
+                        * weight_data[weights[0]->offset(o + o_head, k, p, q)];
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+  }
+  // Bias
+  if (conv_param->bias_term()) {
+    const Dtype* bias_data = weights[1]->cpu_data();
+    for (int n = 0; n < out->num(); n++) {
+      for (int o = 0; o < out->channels(); o++) {
+        for (int y = 0; y < out->height(); y++) {
+          for (int x = 0; x < out->width(); x++) {
+            out_data[out->offset(n, o, y, x)] += bias_data[o];
+          }
+        }
+      }
+    }
+  }
+}
+
+template void caffe_conv(const Blob<float>* in,
+    ConvolutionParameter* conv_param,
+    const vector<shared_ptr<Blob<float> > >& weights,
+    Blob<float>* out);
+template void caffe_conv(const Blob<double>* in,
+    ConvolutionParameter* conv_param,
+    const vector<shared_ptr<Blob<double> > >& weights,
+    Blob<double>* out);
+
+template <typename TypeParam>
+class ConvolutionLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  ConvolutionLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 6, 4)),
+        blob_bottom_2_(new Blob<Dtype>(2, 3, 6, 4)),
+        blob_top_(new Blob<Dtype>()),
+        blob_top_2_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    // fill the values
+    FillerParameter filler_param;
+    filler_param.set_value(1.);
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    filler.Fill(this->blob_bottom_2_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+
+  virtual ~ConvolutionLayerTest() {
+    delete blob_bottom_;
+    delete blob_bottom_2_;
+    delete blob_top_;
+    delete blob_top_2_;
+  }
+
+  virtual Blob<Dtype>* MakeReferenceTop(Blob<Dtype>* top) {
+    this->ref_blob_top_.reset(new Blob<Dtype>());
+    this->ref_blob_top_->ReshapeLike(*top);
+    return this->ref_blob_top_.get();
+  }
+
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_bottom_2_;
+  Blob<Dtype>* const blob_top_;
+  Blob<Dtype>* const blob_top_2_;
+  shared_ptr<Blob<Dtype> > ref_blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(ConvolutionLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(ConvolutionLayerTest, TestSetup) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(4);
+  this->blob_bottom_vec_.push_back(this->blob_bottom_2_);
+  this->blob_top_vec_.push_back(this->blob_top_2_);
+  shared_ptr<Layer<Dtype> > layer(
+      new ConvolutionLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 4);
+  EXPECT_EQ(this->blob_top_->height(), 2);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+  EXPECT_EQ(this->blob_top_2_->num(), 2);
+  EXPECT_EQ(this->blob_top_2_->channels(), 4);
+  EXPECT_EQ(this->blob_top_2_->height(), 2);
+  EXPECT_EQ(this->blob_top_2_->width(), 1);
+  // setting group should not change the shape
+  convolution_param->set_num_output(3);
+  convolution_param->set_group(3);
+  layer.reset(new ConvolutionLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 3);
+  EXPECT_EQ(this->blob_top_->height(), 2);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+  EXPECT_EQ(this->blob_top_2_->num(), 2);
+  EXPECT_EQ(this->blob_top_2_->channels(), 3);
+  EXPECT_EQ(this->blob_top_2_->height(), 2);
+  EXPECT_EQ(this->blob_top_2_->width(), 1);
+}
+
+TYPED_TEST(ConvolutionLayerTest, TestSimpleConvolution) {
+  typedef typename TypeParam::Dtype Dtype;
+  this->blob_bottom_vec_.push_back(this->blob_bottom_2_);
+  this->blob_top_vec_.push_back(this->blob_top_2_);
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(4);
+  convolution_param->mutable_weight_filler()->set_type("gaussian");
+  convolution_param->mutable_bias_filler()->set_type("constant");
+  convolution_param->mutable_bias_filler()->set_value(0.1);
+  shared_ptr<Layer<Dtype> > layer(
+      new ConvolutionLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Check against reference convolution.
+  const Dtype* top_data;
+  const Dtype* ref_top_data;
+  caffe_conv(this->blob_bottom_, convolution_param, layer->blobs(),
+      this->MakeReferenceTop(this->blob_top_));
+  top_data = this->blob_top_->cpu_data();
+  ref_top_data = this->ref_blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    EXPECT_NEAR(top_data[i], ref_top_data[i], 1e-4);
+  }
+  caffe_conv(this->blob_bottom_2_, convolution_param, layer->blobs(),
+      this->MakeReferenceTop(this->blob_top_2_));
+  top_data = this->blob_top_2_->cpu_data();
+  ref_top_data = this->ref_blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    EXPECT_NEAR(top_data[i], ref_top_data[i], 1e-4);
+  }
+}
+
+TYPED_TEST(ConvolutionLayerTest, Test1x1Convolution) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(1);
+  convolution_param->set_stride(1);
+  convolution_param->set_num_output(4);
+  convolution_param->mutable_weight_filler()->set_type("gaussian");
+  convolution_param->mutable_bias_filler()->set_type("constant");
+  convolution_param->mutable_bias_filler()->set_value(0.1);
+  shared_ptr<Layer<Dtype> > layer(
+      new ConvolutionLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Check against reference convolution.
+  const Dtype* top_data;
+  const Dtype* ref_top_data;
+  caffe_conv(this->blob_bottom_, convolution_param, layer->blobs(),
+      this->MakeReferenceTop(this->blob_top_));
+  top_data = this->blob_top_->cpu_data();
+  ref_top_data = this->ref_blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    EXPECT_NEAR(top_data[i], ref_top_data[i], 1e-4);
+  }
+}
+
+TYPED_TEST(ConvolutionLayerTest, TestSimpleConvolutionGroup) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(3);
+  convolution_param->set_group(3);
+  convolution_param->mutable_weight_filler()->set_type("gaussian");
+  convolution_param->mutable_bias_filler()->set_type("constant");
+  convolution_param->mutable_bias_filler()->set_value(0.1);
+  shared_ptr<Layer<Dtype> > layer(
+      new ConvolutionLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Check against reference convolution.
+  const Dtype* top_data;
+  const Dtype* ref_top_data;
+  caffe_conv(this->blob_bottom_, convolution_param, layer->blobs(),
+      this->MakeReferenceTop(this->blob_top_));
+  top_data = this->blob_top_->cpu_data();
+  ref_top_data = this->ref_blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    EXPECT_NEAR(top_data[i], ref_top_data[i], 1e-4);
+  }
+}
+
+TYPED_TEST(ConvolutionLayerTest, TestSobelConvolution) {
+  // Test separable convolution by computing the Sobel operator
+  // as a single filter then comparing the result
+  // as the convolution of two rectangular filters.
+  typedef typename TypeParam::Dtype Dtype;
+  // Fill bottoms with identical Gaussian noise.
+  shared_ptr<GaussianFiller<Dtype> > filler;
+  FillerParameter filler_param;
+  filler_param.set_value(1.);
+  filler.reset(new GaussianFiller<Dtype>(filler_param));
+  filler->Fill(this->blob_bottom_);
+  this->blob_bottom_2_->CopyFrom(*this->blob_bottom_);
+  // Compute Sobel G_x operator as 3 x 3 convolution.
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(1);
+  convolution_param->set_bias_term(false);
+  shared_ptr<Layer<Dtype> > layer(
+      new ConvolutionLayer<Dtype>(layer_param));
+  layer->blobs().resize(1);
+  layer->blobs()[0].reset(new Blob<Dtype>(1, 3, 3, 3));
+  Dtype* weights = layer->blobs()[0]->mutable_cpu_data();
+  for (int c = 0; c < 3; ++c) {
+    int i = c * 9;  // 3 x 3 filter
+    weights[i +  0] = -1;
+    weights[i +  1] =  0;
+    weights[i +  2] =  1;
+    weights[i +  3] = -2;
+    weights[i +  4] =  0;
+    weights[i +  5] =  2;
+    weights[i +  6] = -1;
+    weights[i +  7] =  0;
+    weights[i +  8] =  1;
+  }
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Compute Sobel G_x operator as separable 3 x 1 and 1 x 3 convolutions.
+  // (1) the [1 2 1] column filter
+  vector<Blob<Dtype>*> sep_blob_bottom_vec;
+  vector<Blob<Dtype>*> sep_blob_top_vec;
+  shared_ptr<Blob<Dtype> > blob_sep(new Blob<Dtype>());
+  sep_blob_bottom_vec.push_back(this->blob_bottom_2_);
+  sep_blob_top_vec.push_back(this->blob_top_2_);
+  convolution_param->clear_kernel_size();
+  convolution_param->clear_stride();
+  convolution_param->set_kernel_h(3);
+  convolution_param->set_kernel_w(1);
+  convolution_param->set_stride_h(2);
+  convolution_param->set_stride_w(1);
+  convolution_param->set_num_output(1);
+  convolution_param->set_bias_term(false);
+  layer.reset(new ConvolutionLayer<Dtype>(layer_param));
+  layer->blobs().resize(1);
+  layer->blobs()[0].reset(new Blob<Dtype>(1, 3, 3, 1));
+  Dtype* weights_1 = layer->blobs()[0]->mutable_cpu_data();
+  for (int c = 0; c < 3; ++c) {
+    int i = c * 3;  // 3 x 1 filter
+    weights_1[i +  0] = 1;
+    weights_1[i +  1] = 2;
+    weights_1[i +  2] = 1;
+  }
+  layer->SetUp(sep_blob_bottom_vec, sep_blob_top_vec);
+  layer->Forward(sep_blob_bottom_vec, sep_blob_top_vec);
+  // (2) the [-1 0 1] row filter
+  blob_sep->CopyFrom(*this->blob_top_2_, false, true);
+  sep_blob_bottom_vec.clear();
+  sep_blob_bottom_vec.push_back(blob_sep.get());
+  convolution_param->set_kernel_h(1);
+  convolution_param->set_kernel_w(3);
+  convolution_param->set_stride_h(1);
+  convolution_param->set_stride_w(2);
+  convolution_param->set_num_output(1);
+  convolution_param->set_bias_term(false);
+  layer.reset(new ConvolutionLayer<Dtype>(layer_param));
+  layer->blobs().resize(1);
+  layer->blobs()[0].reset(new Blob<Dtype>(1, 3, 1, 3));
+  Dtype* weights_2 = layer->blobs()[0]->mutable_cpu_data();
+  for (int c = 0; c < 3; ++c) {
+    int i = c * 3;  // 1 x 3 filter
+    weights_2[i +  0] = -1;
+    weights_2[i +  1] =  0;
+    weights_2[i +  2] =  1;
+  }
+  layer->SetUp(sep_blob_bottom_vec, sep_blob_top_vec);
+  layer->Forward(sep_blob_bottom_vec, sep_blob_top_vec);
+  // Test equivalence of full and separable filters.
+  const Dtype* top_data = this->blob_top_->cpu_data();
+  const Dtype* sep_top_data = this->blob_top_2_->cpu_data();
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    EXPECT_NEAR(top_data[i], sep_top_data[i], 1e-4);
+  }
+}
+
+TYPED_TEST(ConvolutionLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  this->blob_bottom_vec_.push_back(this->blob_bottom_2_);
+  this->blob_top_vec_.push_back(this->blob_top_2_);
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(2);
+  convolution_param->mutable_weight_filler()->set_type("gaussian");
+  convolution_param->mutable_bias_filler()->set_type("gaussian");
+  ConvolutionLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(ConvolutionLayerTest, Test1x1Gradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  this->blob_bottom_vec_.push_back(this->blob_bottom_2_);
+  this->blob_top_vec_.push_back(this->blob_top_2_);
+  convolution_param->set_kernel_size(1);
+  convolution_param->set_stride(1);
+  convolution_param->set_num_output(2);
+  convolution_param->mutable_weight_filler()->set_type("gaussian");
+  convolution_param->mutable_bias_filler()->set_type("gaussian");
+  ConvolutionLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(ConvolutionLayerTest, TestGradientGroup) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(3);
+  convolution_param->set_group(3);
+  convolution_param->mutable_weight_filler()->set_type("gaussian");
+  convolution_param->mutable_bias_filler()->set_type("gaussian");
+  ConvolutionLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+#ifdef USE_CUDNN
+
+template <typename Dtype>
+class CuDNNConvolutionLayerTest : public GPUDeviceTest<Dtype> {
+ protected:
+  CuDNNConvolutionLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 6, 4)),
+        blob_bottom_2_(new Blob<Dtype>(2, 3, 6, 4)),
+        blob_top_(new Blob<Dtype>()),
+        blob_top_2_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    // fill the values
+    FillerParameter filler_param;
+    filler_param.set_value(1.);
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    filler.Fill(this->blob_bottom_2_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+
+  virtual ~CuDNNConvolutionLayerTest() {
+    delete blob_bottom_;
+    delete blob_bottom_2_;
+    delete blob_top_;
+    delete blob_top_2_;
+  }
+
+  virtual Blob<Dtype>* MakeReferenceTop(Blob<Dtype>* top) {
+    this->ref_blob_top_.reset(new Blob<Dtype>());
+    this->ref_blob_top_->ReshapeLike(*top);
+    return this->ref_blob_top_.get();
+  }
+
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_bottom_2_;
+  Blob<Dtype>* const blob_top_;
+  Blob<Dtype>* const blob_top_2_;
+  shared_ptr<Blob<Dtype> > ref_blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(CuDNNConvolutionLayerTest, TestDtypes);
+
+TYPED_TEST(CuDNNConvolutionLayerTest, TestSetupCuDNN) {
+  this->blob_bottom_vec_.push_back(this->blob_bottom_2_);
+  this->blob_top_vec_.push_back(this->blob_top_2_);
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(4);
+  this->blob_bottom_vec_.push_back(this->blob_bottom_2_);
+  this->blob_top_vec_.push_back(this->blob_top_2_);
+  shared_ptr<Layer<TypeParam> > layer(
+      new CuDNNConvolutionLayer<TypeParam>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 4);
+  EXPECT_EQ(this->blob_top_->height(), 2);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+  EXPECT_EQ(this->blob_top_2_->num(), 2);
+  EXPECT_EQ(this->blob_top_2_->channels(), 4);
+  EXPECT_EQ(this->blob_top_2_->height(), 2);
+  EXPECT_EQ(this->blob_top_2_->width(), 1);
+  // setting group should not change the shape
+  convolution_param->set_num_output(3);
+  convolution_param->set_group(3);
+  layer.reset(new CuDNNConvolutionLayer<TypeParam>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 3);
+  EXPECT_EQ(this->blob_top_->height(), 2);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+  EXPECT_EQ(this->blob_top_2_->num(), 2);
+  EXPECT_EQ(this->blob_top_2_->channels(), 3);
+  EXPECT_EQ(this->blob_top_2_->height(), 2);
+  EXPECT_EQ(this->blob_top_2_->width(), 1);
+}
+
+TYPED_TEST(CuDNNConvolutionLayerTest, TestSimpleConvolutionCuDNN) {
+  this->blob_bottom_vec_.push_back(this->blob_bottom_2_);
+  this->blob_top_vec_.push_back(this->blob_top_2_);
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(4);
+  convolution_param->mutable_weight_filler()->set_type("gaussian");
+  convolution_param->mutable_bias_filler()->set_type("constant");
+  convolution_param->mutable_bias_filler()->set_value(0.1);
+  shared_ptr<Layer<TypeParam> > layer(
+      new CuDNNConvolutionLayer<TypeParam>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Check against reference convolution.
+  const TypeParam* top_data;
+  const TypeParam* ref_top_data;
+  caffe_conv(this->blob_bottom_, convolution_param, layer->blobs(),
+      this->MakeReferenceTop(this->blob_top_));
+  top_data = this->blob_top_->cpu_data();
+  ref_top_data = this->ref_blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    EXPECT_NEAR(top_data[i], ref_top_data[i], 1e-4);
+  }
+  caffe_conv(this->blob_bottom_2_, convolution_param, layer->blobs(),
+      this->MakeReferenceTop(this->blob_top_2_));
+  top_data = this->blob_top_2_->cpu_data();
+  ref_top_data = this->ref_blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    EXPECT_NEAR(top_data[i], ref_top_data[i], 1e-4);
+  }
+}
+
+TYPED_TEST(CuDNNConvolutionLayerTest, TestSimpleConvolutionGroupCuDNN) {
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(3);
+  convolution_param->set_group(3);
+  convolution_param->mutable_weight_filler()->set_type("gaussian");
+  convolution_param->mutable_bias_filler()->set_type("constant");
+  convolution_param->mutable_bias_filler()->set_value(0.1);
+  shared_ptr<Layer<TypeParam> > layer(
+      new CuDNNConvolutionLayer<TypeParam>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Check against reference convolution.
+  const TypeParam* top_data;
+  const TypeParam* ref_top_data;
+  caffe_conv(this->blob_bottom_, convolution_param, layer->blobs(),
+      this->MakeReferenceTop(this->blob_top_));
+  top_data = this->blob_top_->cpu_data();
+  ref_top_data = this->ref_blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    EXPECT_NEAR(top_data[i], ref_top_data[i], 1e-4);
+  }
+}
+
+TYPED_TEST(CuDNNConvolutionLayerTest, TestSobelConvolutionCuDNN) {
+  // Test separable convolution by computing the Sobel operator
+  // as a single filter then comparing the result
+  // as the convolution of two rectangular filters.
+
+  // Fill bottoms with identical Gaussian noise.
+  shared_ptr<GaussianFiller<TypeParam> > filler;
+  FillerParameter filler_param;
+  filler_param.set_value(1.);
+  filler.reset(new GaussianFiller<TypeParam>(filler_param));
+  filler->Fill(this->blob_bottom_);
+  this->blob_bottom_2_->CopyFrom(*this->blob_bottom_);
+  // Compute Sobel G_x operator as 3 x 3 convolution.
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(1);
+  convolution_param->set_bias_term(false);
+  shared_ptr<Layer<TypeParam> > layer(
+      new CuDNNConvolutionLayer<TypeParam>(layer_param));
+  layer->blobs().resize(1);
+  layer->blobs()[0].reset(new Blob<TypeParam>(1, 3, 3, 3));
+  TypeParam* weights = layer->blobs()[0]->mutable_cpu_data();
+  for (int c = 0; c < 3; ++c) {
+    int i = c * 9;  // 3 x 3 filter
+    weights[i +  0] = -1;
+    weights[i +  1] =  0;
+    weights[i +  2] =  1;
+    weights[i +  3] = -2;
+    weights[i +  4] =  0;
+    weights[i +  5] =  2;
+    weights[i +  6] = -1;
+    weights[i +  7] =  0;
+    weights[i +  8] =  1;
+  }
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Compute Sobel G_x operator as separable 3 x 1 and 1 x 3 convolutions.
+  // (1) the [1 2 1] column filter
+  vector<Blob<TypeParam>*> sep_blob_bottom_vec;
+  vector<Blob<TypeParam>*> sep_blob_top_vec;
+  shared_ptr<Blob<TypeParam> > blob_sep(new Blob<TypeParam>());
+  sep_blob_bottom_vec.push_back(this->blob_bottom_2_);
+  sep_blob_top_vec.push_back(this->blob_top_2_);
+  convolution_param->clear_kernel_size();
+  convolution_param->clear_stride();
+  convolution_param->set_kernel_h(3);
+  convolution_param->set_kernel_w(1);
+  convolution_param->set_stride_h(2);
+  convolution_param->set_stride_w(1);
+  convolution_param->set_num_output(1);
+  convolution_param->set_bias_term(false);
+  layer.reset(new CuDNNConvolutionLayer<TypeParam>(layer_param));
+  layer->blobs().resize(1);
+  layer->blobs()[0].reset(new Blob<TypeParam>(1, 3, 3, 1));
+  TypeParam* weights_1 = layer->blobs()[0]->mutable_cpu_data();
+  for (int c = 0; c < 3; ++c) {
+    int i = c * 3;  // 3 x 1 filter
+    weights_1[i +  0] = 1;
+    weights_1[i +  1] = 2;
+    weights_1[i +  2] = 1;
+  }
+  layer->SetUp(sep_blob_bottom_vec, sep_blob_top_vec);
+  layer->Forward(sep_blob_bottom_vec, sep_blob_top_vec);
+  // (2) the [-1 0 1] row filter
+  blob_sep->CopyFrom(*this->blob_top_2_, false, true);
+  sep_blob_bottom_vec.clear();
+  sep_blob_bottom_vec.push_back(blob_sep.get());
+  convolution_param->set_kernel_h(1);
+  convolution_param->set_kernel_w(3);
+  convolution_param->set_stride_h(1);
+  convolution_param->set_stride_w(2);
+  convolution_param->set_num_output(1);
+  convolution_param->set_bias_term(false);
+  layer.reset(new CuDNNConvolutionLayer<TypeParam>(layer_param));
+  layer->blobs().resize(1);
+  layer->blobs()[0].reset(new Blob<TypeParam>(1, 3, 1, 3));
+  TypeParam* weights_2 = layer->blobs()[0]->mutable_cpu_data();
+  for (int c = 0; c < 3; ++c) {
+    int i = c * 3;  // 1 x 3 filter
+    weights_2[i +  0] = -1;
+    weights_2[i +  1] =  0;
+    weights_2[i +  2] =  1;
+  }
+  layer->SetUp(sep_blob_bottom_vec, sep_blob_top_vec);
+  layer->Forward(sep_blob_bottom_vec, sep_blob_top_vec);
+  // Test equivalence of full and separable filters.
+  const TypeParam* top_data = this->blob_top_->cpu_data();
+  const TypeParam* sep_top_data = this->blob_top_2_->cpu_data();
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    EXPECT_NEAR(top_data[i], sep_top_data[i], 1e-4);
+  }
+}
+
+TYPED_TEST(CuDNNConvolutionLayerTest, TestGradientCuDNN) {
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  this->blob_bottom_vec_.push_back(this->blob_bottom_2_);
+  this->blob_top_vec_.push_back(this->blob_top_2_);
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(2);
+  convolution_param->mutable_weight_filler()->set_type("gaussian");
+  convolution_param->mutable_bias_filler()->set_type("gaussian");
+  CuDNNConvolutionLayer<TypeParam> layer(layer_param);
+  GradientChecker<TypeParam> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(CuDNNConvolutionLayerTest, TestGradientGroupCuDNN) {
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(3);
+  convolution_param->set_group(3);
+  convolution_param->mutable_weight_filler()->set_type("gaussian");
+  convolution_param->mutable_bias_filler()->set_type("gaussian");
+  CuDNNConvolutionLayer<TypeParam> layer(layer_param);
+  GradientChecker<TypeParam> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+#endif
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_data/generate_sample_data.py b/src/caffe/test/test_data/generate_sample_data.py
new file mode 100644
index 0000000..ab55726
--- /dev/null
+++ b/src/caffe/test/test_data/generate_sample_data.py
@@ -0,0 +1,53 @@
+"""
+Generate data used in the HDF5DataLayer test.
+"""
+import os
+import numpy as np
+import h5py
+
+script_dir = os.path.dirname(os.path.abspath(__file__))
+
+num_cols = 8
+num_rows = 10
+height = 6
+width = 5
+total_size = num_cols * num_rows * height * width
+
+data = np.arange(total_size)
+data = data.reshape(num_rows, num_cols, height, width)
+data = data.astype('float32')
+
+# We had a bug where data was copied into label, but the tests weren't
+# catching it, so let's make label 1-indexed.
+label = 1 + np.arange(num_rows)[:, np.newaxis]
+label = label.astype('float32')
+
+# We add an extra label2 dataset to test HDF5 layer's ability
+# to handle arbitrary number of output ("top") Blobs.
+label2 = label + 1
+
+print data
+print label
+
+with h5py.File(script_dir + '/sample_data.h5', 'w') as f:
+    f['data'] = data
+    f['label'] = label
+    f['label2'] = label2
+
+with h5py.File(script_dir + '/sample_data_2_gzip.h5', 'w') as f:
+    f.create_dataset(
+        'data', data=data + total_size,
+        compression='gzip', compression_opts=1
+    )
+    f.create_dataset(
+        'label', data=label,
+        compression='gzip', compression_opts=1
+    )
+    f.create_dataset(
+        'label2', data=label2,
+        compression='gzip', compression_opts=1
+    )
+
+with open(script_dir + '/sample_data_list.txt', 'w') as f:
+    f.write(script_dir + '/sample_data.h5\n')
+    f.write(script_dir + '/sample_data_2_gzip.h5\n')
diff --git a/src/caffe/test/test_data/sample_data.h5 b/src/caffe/test/test_data/sample_data.h5
new file mode 100644
index 0000000..236e66b
Binary files /dev/null and b/src/caffe/test/test_data/sample_data.h5 differ
diff --git a/src/caffe/test/test_data/sample_data_2_gzip.h5 b/src/caffe/test/test_data/sample_data_2_gzip.h5
new file mode 100644
index 0000000..a138e03
Binary files /dev/null and b/src/caffe/test/test_data/sample_data_2_gzip.h5 differ
diff --git a/src/caffe/test/test_data/sample_data_list.txt b/src/caffe/test/test_data/sample_data_list.txt
new file mode 100644
index 0000000..cdf343f
--- /dev/null
+++ b/src/caffe/test/test_data/sample_data_list.txt
@@ -0,0 +1,2 @@
+src/caffe/test/test_data/sample_data.h5
+src/caffe/test/test_data/sample_data_2_gzip.h5
diff --git a/src/caffe/test/test_data_layer.cpp b/src/caffe/test/test_data_layer.cpp
new file mode 100644
index 0000000..afe2a40
--- /dev/null
+++ b/src/caffe/test/test_data_layer.cpp
@@ -0,0 +1,427 @@
+#include <string>
+#include <vector>
+
+#include "boost/scoped_ptr.hpp"
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/data_layers.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/db.hpp"
+#include "caffe/util/io.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+using boost::scoped_ptr;
+
+template <typename TypeParam>
+class DataLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  DataLayerTest()
+      : backend_(DataParameter_DB_LEVELDB),
+        blob_top_data_(new Blob<Dtype>()),
+        blob_top_label_(new Blob<Dtype>()),
+        seed_(1701) {}
+  virtual void SetUp() {
+    filename_.reset(new string());
+    MakeTempDir(filename_.get());
+    *filename_ += "/db";
+    blob_top_vec_.push_back(blob_top_data_);
+    blob_top_vec_.push_back(blob_top_label_);
+  }
+
+  // Fill the DB with data: if unique_pixels, each pixel is unique but
+  // all images are the same; else each image is unique but all pixels within
+  // an image are the same.
+  void Fill(const bool unique_pixels, DataParameter_DB backend) {
+    backend_ = backend;
+    LOG(INFO) << "Using temporary dataset " << *filename_;
+    scoped_ptr<db::DB> db(db::GetDB(backend));
+    db->Open(*filename_, db::NEW);
+    scoped_ptr<db::Transaction> txn(db->NewTransaction());
+    for (int i = 0; i < 5; ++i) {
+      Datum datum;
+      datum.set_label(i);
+      datum.set_channels(2);
+      datum.set_height(3);
+      datum.set_width(4);
+      std::string* data = datum.mutable_data();
+      for (int j = 0; j < 24; ++j) {
+        int datum = unique_pixels ? j : i;
+        data->push_back(static_cast<uint8_t>(datum));
+      }
+      stringstream ss;
+      ss << i;
+      string out;
+      CHECK(datum.SerializeToString(&out));
+      txn->Put(ss.str(), out);
+    }
+    txn->Commit();
+    db->Close();
+  }
+
+  void TestRead() {
+    const Dtype scale = 3;
+    LayerParameter param;
+    param.set_phase(TRAIN);
+    DataParameter* data_param = param.mutable_data_param();
+    data_param->set_batch_size(5);
+    data_param->set_source(filename_->c_str());
+    data_param->set_backend(backend_);
+
+    TransformationParameter* transform_param =
+        param.mutable_transform_param();
+    transform_param->set_scale(scale);
+
+    DataLayer<Dtype> layer(param);
+    layer.SetUp(blob_bottom_vec_, blob_top_vec_);
+    EXPECT_EQ(blob_top_data_->num(), 5);
+    EXPECT_EQ(blob_top_data_->channels(), 2);
+    EXPECT_EQ(blob_top_data_->height(), 3);
+    EXPECT_EQ(blob_top_data_->width(), 4);
+    EXPECT_EQ(blob_top_label_->num(), 5);
+    EXPECT_EQ(blob_top_label_->channels(), 1);
+    EXPECT_EQ(blob_top_label_->height(), 1);
+    EXPECT_EQ(blob_top_label_->width(), 1);
+
+    for (int iter = 0; iter < 100; ++iter) {
+      layer.Forward(blob_bottom_vec_, blob_top_vec_);
+      for (int i = 0; i < 5; ++i) {
+        EXPECT_EQ(i, blob_top_label_->cpu_data()[i]);
+      }
+      for (int i = 0; i < 5; ++i) {
+        for (int j = 0; j < 24; ++j) {
+          EXPECT_EQ(scale * i, blob_top_data_->cpu_data()[i * 24 + j])
+              << "debug: iter " << iter << " i " << i << " j " << j;
+        }
+      }
+    }
+  }
+
+  void TestReshape(DataParameter_DB backend) {
+    const int num_inputs = 5;
+    // Save data of varying shapes.
+    LOG(INFO) << "Using temporary dataset " << *filename_;
+    scoped_ptr<db::DB> db(db::GetDB(backend));
+    db->Open(*filename_, db::NEW);
+    scoped_ptr<db::Transaction> txn(db->NewTransaction());
+    for (int i = 0; i < num_inputs; ++i) {
+      Datum datum;
+      datum.set_label(i);
+      datum.set_channels(2);
+      datum.set_height(i % 2 + 1);
+      datum.set_width(i % 4 + 1);
+      std::string* data = datum.mutable_data();
+      const int data_size = datum.channels() * datum.height() * datum.width();
+      for (int j = 0; j < data_size; ++j) {
+        data->push_back(static_cast<uint8_t>(j));
+      }
+      stringstream ss;
+      ss << i;
+      string out;
+      CHECK(datum.SerializeToString(&out));
+      txn->Put(ss.str(), out);
+    }
+    txn->Commit();
+    db->Close();
+
+    // Load and check data of various shapes.
+    LayerParameter param;
+    param.set_phase(TEST);
+    DataParameter* data_param = param.mutable_data_param();
+    data_param->set_batch_size(1);
+    data_param->set_source(filename_->c_str());
+    data_param->set_backend(backend);
+
+    DataLayer<Dtype> layer(param);
+    layer.SetUp(blob_bottom_vec_, blob_top_vec_);
+    EXPECT_EQ(blob_top_data_->num(), 1);
+    EXPECT_EQ(blob_top_data_->channels(), 2);
+    EXPECT_EQ(blob_top_label_->num(), 1);
+    EXPECT_EQ(blob_top_label_->channels(), 1);
+    EXPECT_EQ(blob_top_label_->height(), 1);
+    EXPECT_EQ(blob_top_label_->width(), 1);
+
+    for (int iter = 0; iter < num_inputs; ++iter) {
+      layer.Forward(blob_bottom_vec_, blob_top_vec_);
+      EXPECT_EQ(blob_top_data_->height(), iter % 2 + 1);
+      EXPECT_EQ(blob_top_data_->width(), iter % 4 + 1);
+      EXPECT_EQ(iter, blob_top_label_->cpu_data()[0]);
+      const int channels = blob_top_data_->channels();
+      const int height = blob_top_data_->height();
+      const int width = blob_top_data_->width();
+      for (int c = 0; c < channels; ++c) {
+        for (int h = 0; h < height; ++h) {
+          for (int w = 0; w < width; ++w) {
+            const int idx = (c * height + h) * width + w;
+            EXPECT_EQ(idx, static_cast<int>(blob_top_data_->cpu_data()[idx]))
+                << "debug: iter " << iter << " c " << c
+                << " h " << h << " w " << w;
+          }
+        }
+      }
+    }
+  }
+
+  void TestReadCrop(Phase phase) {
+    const Dtype scale = 3;
+    LayerParameter param;
+    param.set_phase(phase);
+    Caffe::set_random_seed(1701);
+
+    DataParameter* data_param = param.mutable_data_param();
+    data_param->set_batch_size(5);
+    data_param->set_source(filename_->c_str());
+    data_param->set_backend(backend_);
+
+    TransformationParameter* transform_param =
+        param.mutable_transform_param();
+    transform_param->set_scale(scale);
+    transform_param->set_crop_size(1);
+
+    DataLayer<Dtype> layer(param);
+    layer.SetUp(blob_bottom_vec_, blob_top_vec_);
+    EXPECT_EQ(blob_top_data_->num(), 5);
+    EXPECT_EQ(blob_top_data_->channels(), 2);
+    EXPECT_EQ(blob_top_data_->height(), 1);
+    EXPECT_EQ(blob_top_data_->width(), 1);
+    EXPECT_EQ(blob_top_label_->num(), 5);
+    EXPECT_EQ(blob_top_label_->channels(), 1);
+    EXPECT_EQ(blob_top_label_->height(), 1);
+    EXPECT_EQ(blob_top_label_->width(), 1);
+
+    for (int iter = 0; iter < 2; ++iter) {
+      layer.Forward(blob_bottom_vec_, blob_top_vec_);
+      for (int i = 0; i < 5; ++i) {
+        EXPECT_EQ(i, blob_top_label_->cpu_data()[i]);
+      }
+      int num_with_center_value = 0;
+      for (int i = 0; i < 5; ++i) {
+        for (int j = 0; j < 2; ++j) {
+          const Dtype center_value = scale * (j ? 17 : 5);
+          num_with_center_value +=
+              (center_value == blob_top_data_->cpu_data()[i * 2 + j]);
+          // At TEST time, check that we always get center value.
+          if (phase == caffe::TEST) {
+            EXPECT_EQ(center_value, this->blob_top_data_->cpu_data()[i * 2 + j])
+                << "debug: iter " << iter << " i " << i << " j " << j;
+          }
+        }
+      }
+      // At TRAIN time, check that we did not get the center crop all 10 times.
+      // (This check fails with probability 1-1/12^10 in a correct
+      // implementation, so we call set_random_seed.)
+      if (phase == caffe::TRAIN) {
+        EXPECT_LT(num_with_center_value, 10);
+      }
+    }
+  }
+
+  void TestReadCropTrainSequenceSeeded() {
+    LayerParameter param;
+    param.set_phase(TRAIN);
+    DataParameter* data_param = param.mutable_data_param();
+    data_param->set_batch_size(5);
+    data_param->set_source(filename_->c_str());
+    data_param->set_backend(backend_);
+
+    TransformationParameter* transform_param =
+        param.mutable_transform_param();
+    transform_param->set_crop_size(1);
+    transform_param->set_mirror(true);
+
+    // Get crop sequence with Caffe seed 1701.
+    Caffe::set_random_seed(seed_);
+    vector<vector<Dtype> > crop_sequence;
+    {
+      DataLayer<Dtype> layer1(param);
+      layer1.SetUp(blob_bottom_vec_, blob_top_vec_);
+      for (int iter = 0; iter < 2; ++iter) {
+        layer1.Forward(blob_bottom_vec_, blob_top_vec_);
+        for (int i = 0; i < 5; ++i) {
+          EXPECT_EQ(i, blob_top_label_->cpu_data()[i]);
+        }
+        vector<Dtype> iter_crop_sequence;
+        for (int i = 0; i < 5; ++i) {
+          for (int j = 0; j < 2; ++j) {
+            iter_crop_sequence.push_back(
+                blob_top_data_->cpu_data()[i * 2 + j]);
+          }
+        }
+        crop_sequence.push_back(iter_crop_sequence);
+      }
+    }  // destroy 1st data layer and unlock the db
+
+    // Get crop sequence after reseeding Caffe with 1701.
+    // Check that the sequence is the same as the original.
+    Caffe::set_random_seed(seed_);
+    DataLayer<Dtype> layer2(param);
+    layer2.SetUp(blob_bottom_vec_, blob_top_vec_);
+    for (int iter = 0; iter < 2; ++iter) {
+      layer2.Forward(blob_bottom_vec_, blob_top_vec_);
+      for (int i = 0; i < 5; ++i) {
+        EXPECT_EQ(i, blob_top_label_->cpu_data()[i]);
+      }
+      for (int i = 0; i < 5; ++i) {
+        for (int j = 0; j < 2; ++j) {
+          EXPECT_EQ(crop_sequence[iter][i * 2 + j],
+                    blob_top_data_->cpu_data()[i * 2 + j])
+              << "debug: iter " << iter << " i " << i << " j " << j;
+        }
+      }
+    }
+  }
+
+  void TestReadCropTrainSequenceUnseeded() {
+    LayerParameter param;
+    param.set_phase(TRAIN);
+    DataParameter* data_param = param.mutable_data_param();
+    data_param->set_batch_size(5);
+    data_param->set_source(filename_->c_str());
+    data_param->set_backend(backend_);
+
+    TransformationParameter* transform_param =
+        param.mutable_transform_param();
+    transform_param->set_crop_size(1);
+    transform_param->set_mirror(true);
+
+    // Get crop sequence with Caffe seed 1701, srand seed 1701.
+    Caffe::set_random_seed(seed_);
+    srand(seed_);
+    vector<vector<Dtype> > crop_sequence;
+    {
+      DataLayer<Dtype> layer1(param);
+      layer1.SetUp(blob_bottom_vec_, blob_top_vec_);
+      for (int iter = 0; iter < 2; ++iter) {
+        layer1.Forward(blob_bottom_vec_, blob_top_vec_);
+        for (int i = 0; i < 5; ++i) {
+          EXPECT_EQ(i, blob_top_label_->cpu_data()[i]);
+        }
+        vector<Dtype> iter_crop_sequence;
+        for (int i = 0; i < 5; ++i) {
+          for (int j = 0; j < 2; ++j) {
+            iter_crop_sequence.push_back(
+                blob_top_data_->cpu_data()[i * 2 + j]);
+          }
+        }
+        crop_sequence.push_back(iter_crop_sequence);
+      }
+    }  // destroy 1st data layer and unlock the db
+
+    // Get crop sequence continuing from previous Caffe RNG state; reseed
+    // srand with 1701. Check that the sequence differs from the original.
+    srand(seed_);
+    DataLayer<Dtype> layer2(param);
+    layer2.SetUp(blob_bottom_vec_, blob_top_vec_);
+    for (int iter = 0; iter < 2; ++iter) {
+      layer2.Forward(blob_bottom_vec_, blob_top_vec_);
+      for (int i = 0; i < 5; ++i) {
+        EXPECT_EQ(i, blob_top_label_->cpu_data()[i]);
+      }
+      int num_sequence_matches = 0;
+      for (int i = 0; i < 5; ++i) {
+        for (int j = 0; j < 2; ++j) {
+          num_sequence_matches += (crop_sequence[iter][i * 2 + j] ==
+                                   blob_top_data_->cpu_data()[i * 2 + j]);
+        }
+      }
+      EXPECT_LT(num_sequence_matches, 10);
+    }
+  }
+
+  virtual ~DataLayerTest() { delete blob_top_data_; delete blob_top_label_; }
+
+  DataParameter_DB backend_;
+  shared_ptr<string> filename_;
+  Blob<Dtype>* const blob_top_data_;
+  Blob<Dtype>* const blob_top_label_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+  int seed_;
+};
+
+TYPED_TEST_CASE(DataLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(DataLayerTest, TestReadLevelDB) {
+  const bool unique_pixels = false;  // all pixels the same; images different
+  this->Fill(unique_pixels, DataParameter_DB_LEVELDB);
+  this->TestRead();
+}
+
+TYPED_TEST(DataLayerTest, TestReshapeLevelDB) {
+  this->TestReshape(DataParameter_DB_LEVELDB);
+}
+
+TYPED_TEST(DataLayerTest, TestReadCropTrainLevelDB) {
+  const bool unique_pixels = true;  // all images the same; pixels different
+  this->Fill(unique_pixels, DataParameter_DB_LEVELDB);
+  this->TestReadCrop(TRAIN);
+}
+
+// Test that the sequence of random crops is consistent when using
+// Caffe::set_random_seed.
+TYPED_TEST(DataLayerTest, TestReadCropTrainSequenceSeededLevelDB) {
+  const bool unique_pixels = true;  // all images the same; pixels different
+  this->Fill(unique_pixels, DataParameter_DB_LEVELDB);
+  this->TestReadCropTrainSequenceSeeded();
+}
+
+// Test that the sequence of random crops differs across iterations when
+// Caffe::set_random_seed isn't called (and seeds from srand are ignored).
+TYPED_TEST(DataLayerTest, TestReadCropTrainSequenceUnseededLevelDB) {
+  const bool unique_pixels = true;  // all images the same; pixels different
+  this->Fill(unique_pixels, DataParameter_DB_LEVELDB);
+  this->TestReadCropTrainSequenceUnseeded();
+}
+
+TYPED_TEST(DataLayerTest, TestReadCropTestLevelDB) {
+  const bool unique_pixels = true;  // all images the same; pixels different
+  this->Fill(unique_pixels, DataParameter_DB_LEVELDB);
+  this->TestReadCrop(TEST);
+}
+
+TYPED_TEST(DataLayerTest, TestReadLMDB) {
+  const bool unique_pixels = false;  // all pixels the same; images different
+  this->Fill(unique_pixels, DataParameter_DB_LMDB);
+  this->TestRead();
+}
+
+TYPED_TEST(DataLayerTest, TestReshapeLMDB) {
+  this->TestReshape(DataParameter_DB_LMDB);
+}
+
+TYPED_TEST(DataLayerTest, TestReadCropTrainLMDB) {
+  const bool unique_pixels = true;  // all images the same; pixels different
+  this->Fill(unique_pixels, DataParameter_DB_LMDB);
+  this->TestReadCrop(TRAIN);
+}
+
+// Test that the sequence of random crops is consistent when using
+// Caffe::set_random_seed.
+TYPED_TEST(DataLayerTest, TestReadCropTrainSequenceSeededLMDB) {
+  const bool unique_pixels = true;  // all images the same; pixels different
+  this->Fill(unique_pixels, DataParameter_DB_LMDB);
+  this->TestReadCropTrainSequenceSeeded();
+}
+
+// Test that the sequence of random crops differs across iterations when
+// Caffe::set_random_seed isn't called (and seeds from srand are ignored).
+TYPED_TEST(DataLayerTest, TestReadCropTrainSequenceUnseededLMDB) {
+  const bool unique_pixels = true;  // all images the same; pixels different
+  this->Fill(unique_pixels, DataParameter_DB_LMDB);
+  this->TestReadCropTrainSequenceUnseeded();
+}
+
+TYPED_TEST(DataLayerTest, TestReadCropTestLMDB) {
+  const bool unique_pixels = true;  // all images the same; pixels different
+  this->Fill(unique_pixels, DataParameter_DB_LMDB);
+  this->TestReadCrop(TEST);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_data_transformer.cpp b/src/caffe/test/test_data_transformer.cpp
new file mode 100644
index 0000000..16570e2
--- /dev/null
+++ b/src/caffe/test/test_data_transformer.cpp
@@ -0,0 +1,355 @@
+#include <string>
+#include <vector>
+
+#include "gtest/gtest.h"
+#include "leveldb/db.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/data_transformer.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/io.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+void FillDatum(const int label, const int channels, const int height,
+  const int width, const bool unique_pixels, Datum * datum) {
+  datum->set_label(label);
+  datum->set_channels(channels);
+  datum->set_height(height);
+  datum->set_width(width);
+  int size = channels * height * width;
+  std::string* data = datum->mutable_data();
+  for (int j = 0; j < size; ++j) {
+    int datum = unique_pixels ? j : label;
+    data->push_back(static_cast<uint8_t>(datum));
+  }
+}
+
+template <typename Dtype>
+class DataTransformTest : public ::testing::Test {
+ protected:
+  DataTransformTest()
+      : seed_(1701),
+      num_iter_(10) {}
+
+  int NumSequenceMatches(const TransformationParameter transform_param,
+      const Datum& datum, Phase phase) {
+    // Get crop sequence with Caffe seed 1701.
+    DataTransformer<Dtype>* transformer =
+        new DataTransformer<Dtype>(transform_param, phase);
+    const int crop_size = transform_param.crop_size();
+    Caffe::set_random_seed(seed_);
+    transformer->InitRand();
+    Blob<Dtype>* blob =
+        new Blob<Dtype>(1, datum.channels(), datum.height(), datum.width());
+    if (transform_param.crop_size() > 0) {
+      blob->Reshape(1, datum.channels(), crop_size, crop_size);
+    }
+
+    vector<vector<Dtype> > crop_sequence;
+    for (int iter = 0; iter < this->num_iter_; ++iter) {
+      vector<Dtype> iter_crop_sequence;
+      transformer->Transform(datum, blob);
+      for (int j = 0; j < blob->count(); ++j) {
+        iter_crop_sequence.push_back(blob->cpu_data()[j]);
+      }
+      crop_sequence.push_back(iter_crop_sequence);
+    }
+    // Check if the sequence differs from the previous
+    int num_sequence_matches = 0;
+    for (int iter = 0; iter < this->num_iter_; ++iter) {
+      vector<Dtype> iter_crop_sequence = crop_sequence[iter];
+      transformer->Transform(datum, blob);
+      for (int j = 0; j < blob->count(); ++j) {
+        num_sequence_matches +=
+            (crop_sequence[iter][j] == blob->cpu_data()[j]);
+      }
+    }
+    return num_sequence_matches;
+  }
+
+  virtual ~DataTransformTest() { }
+
+  int seed_;
+  int num_iter_;
+};
+
+TYPED_TEST_CASE(DataTransformTest, TestDtypes);
+
+TYPED_TEST(DataTransformTest, TestEmptyTransform) {
+  TransformationParameter transform_param;
+  const bool unique_pixels = false;  // all pixels the same equal to label
+  const int label = 0;
+  const int channels = 3;
+  const int height = 4;
+  const int width = 5;
+
+  Datum datum;
+  FillDatum(label, channels, height, width, unique_pixels, &datum);
+  Blob<TypeParam>* blob = new Blob<TypeParam>(1, channels, height, width);
+  DataTransformer<TypeParam>* transformer =
+      new DataTransformer<TypeParam>(transform_param, TEST);
+  transformer->InitRand();
+  transformer->Transform(datum, blob);
+  EXPECT_EQ(blob->num(), 1);
+  EXPECT_EQ(blob->channels(), datum.channels());
+  EXPECT_EQ(blob->height(), datum.height());
+  EXPECT_EQ(blob->width(), datum.width());
+  for (int j = 0; j < blob->count(); ++j) {
+    EXPECT_EQ(blob->cpu_data()[j], label);
+  }
+}
+
+TYPED_TEST(DataTransformTest, TestEmptyTransformUniquePixels) {
+  TransformationParameter transform_param;
+  const bool unique_pixels = true;  // pixels are consecutive ints [0,size]
+  const int label = 0;
+  const int channels = 3;
+  const int height = 4;
+  const int width = 5;
+
+  Datum datum;
+  FillDatum(label, channels, height, width, unique_pixels, &datum);
+  Blob<TypeParam>* blob = new Blob<TypeParam>(1, 3, 4, 5);
+  DataTransformer<TypeParam>* transformer =
+      new DataTransformer<TypeParam>(transform_param, TEST);
+  transformer->InitRand();
+  transformer->Transform(datum, blob);
+  EXPECT_EQ(blob->num(), 1);
+  EXPECT_EQ(blob->channels(), datum.channels());
+  EXPECT_EQ(blob->height(), datum.height());
+  EXPECT_EQ(blob->width(), datum.width());
+  for (int j = 0; j < blob->count(); ++j) {
+    EXPECT_EQ(blob->cpu_data()[j], j);
+  }
+}
+
+TYPED_TEST(DataTransformTest, TestCropSize) {
+  TransformationParameter transform_param;
+  const bool unique_pixels = false;  // all pixels the same equal to label
+  const int label = 0;
+  const int channels = 3;
+  const int height = 4;
+  const int width = 5;
+  const int crop_size = 2;
+
+  transform_param.set_crop_size(crop_size);
+  Datum datum;
+  FillDatum(label, channels, height, width, unique_pixels, &datum);
+  DataTransformer<TypeParam>* transformer =
+      new DataTransformer<TypeParam>(transform_param, TEST);
+  transformer->InitRand();
+  Blob<TypeParam>* blob =
+      new Blob<TypeParam>(1, channels, crop_size, crop_size);
+  for (int iter = 0; iter < this->num_iter_; ++iter) {
+    transformer->Transform(datum, blob);
+    EXPECT_EQ(blob->num(), 1);
+    EXPECT_EQ(blob->channels(), datum.channels());
+    EXPECT_EQ(blob->height(), crop_size);
+    EXPECT_EQ(blob->width(), crop_size);
+    for (int j = 0; j < blob->count(); ++j) {
+      EXPECT_EQ(blob->cpu_data()[j], label);
+    }
+  }
+}
+
+TYPED_TEST(DataTransformTest, TestCropTrain) {
+  TransformationParameter transform_param;
+  const bool unique_pixels = true;  // pixels are consecutive ints [0,size]
+  const int label = 0;
+  const int channels = 3;
+  const int height = 4;
+  const int width = 5;
+  const int crop_size = 2;
+  const int size = channels * crop_size * crop_size;
+
+  transform_param.set_crop_size(crop_size);
+  Datum datum;
+  FillDatum(label, channels, height, width, unique_pixels, &datum);
+  int num_matches = this->NumSequenceMatches(transform_param, datum, TRAIN);
+  EXPECT_LT(num_matches, size * this->num_iter_);
+}
+
+TYPED_TEST(DataTransformTest, TestCropTest) {
+  TransformationParameter transform_param;
+  const bool unique_pixels = true;  // pixels are consecutive ints [0,size]
+  const int label = 0;
+  const int channels = 3;
+  const int height = 4;
+  const int width = 5;
+  const int crop_size = 2;
+  const int size = channels * crop_size * crop_size;
+
+  transform_param.set_crop_size(crop_size);
+  Datum datum;
+  FillDatum(label, channels, height, width, unique_pixels, &datum);
+  int num_matches = this->NumSequenceMatches(transform_param, datum, TEST);
+  EXPECT_EQ(num_matches, size * this->num_iter_);
+}
+
+TYPED_TEST(DataTransformTest, TestMirrorTrain) {
+  TransformationParameter transform_param;
+  const bool unique_pixels = true;  // pixels are consecutive ints [0,size]
+  const int label = 0;
+  const int channels = 3;
+  const int height = 4;
+  const int width = 5;
+  const int size = channels * height * width;
+
+  transform_param.set_mirror(true);
+  Datum datum;
+  FillDatum(label, channels, height, width, unique_pixels, &datum);
+  int num_matches = this->NumSequenceMatches(transform_param, datum, TRAIN);
+  EXPECT_LT(num_matches, size * this->num_iter_);
+}
+
+TYPED_TEST(DataTransformTest, TestMirrorTest) {
+  TransformationParameter transform_param;
+  const bool unique_pixels = true;  // pixels are consecutive ints [0,size]
+  const int label = 0;
+  const int channels = 3;
+  const int height = 4;
+  const int width = 5;
+  const int size = channels * height * width;
+
+  transform_param.set_mirror(true);
+  Datum datum;
+  FillDatum(label, channels, height, width, unique_pixels, &datum);
+  int num_matches = this->NumSequenceMatches(transform_param, datum, TEST);
+  EXPECT_LT(num_matches, size * this->num_iter_);
+}
+
+TYPED_TEST(DataTransformTest, TestCropMirrorTrain) {
+  TransformationParameter transform_param;
+  const bool unique_pixels = true;  // pixels are consecutive ints [0,size]
+  const int label = 0;
+  const int channels = 3;
+  const int height = 4;
+  const int width = 5;
+  const int crop_size = 2;
+
+  Datum datum;
+  FillDatum(label, channels, height, width, unique_pixels, &datum);
+  transform_param.set_crop_size(crop_size);
+  int num_matches_crop = this->NumSequenceMatches(
+      transform_param, datum, TRAIN);
+
+  transform_param.set_mirror(true);
+  int num_matches_crop_mirror =
+      this->NumSequenceMatches(transform_param, datum, TRAIN);
+  // When doing crop and mirror we expect less num_matches than just crop
+  EXPECT_LE(num_matches_crop_mirror, num_matches_crop);
+}
+
+TYPED_TEST(DataTransformTest, TestCropMirrorTest) {
+  TransformationParameter transform_param;
+  const bool unique_pixels = true;  // pixels are consecutive ints [0,size]
+  const int label = 0;
+  const int channels = 3;
+  const int height = 4;
+  const int width = 5;
+  const int crop_size = 2;
+
+  Datum datum;
+  FillDatum(label, channels, height, width, unique_pixels, &datum);
+  transform_param.set_crop_size(crop_size);
+  int num_matches_crop = this->NumSequenceMatches(transform_param, datum, TEST);
+
+  transform_param.set_mirror(true);
+  int num_matches_crop_mirror =
+      this->NumSequenceMatches(transform_param, datum, TEST);
+  // When doing crop and mirror we expect less num_matches than just crop
+  EXPECT_LT(num_matches_crop_mirror, num_matches_crop);
+}
+
+
+TYPED_TEST(DataTransformTest, TestMeanValue) {
+  TransformationParameter transform_param;
+  const bool unique_pixels = false;  // pixels are equal to label
+  const int label = 0;
+  const int channels = 3;
+  const int height = 4;
+  const int width = 5;
+  const int mean_value = 2;
+
+  transform_param.add_mean_value(mean_value);
+  Datum datum;
+  FillDatum(label, channels, height, width, unique_pixels, &datum);
+  Blob<TypeParam>* blob = new Blob<TypeParam>(1, channels, height, width);
+  DataTransformer<TypeParam>* transformer =
+      new DataTransformer<TypeParam>(transform_param, TEST);
+  transformer->InitRand();
+  transformer->Transform(datum, blob);
+  for (int j = 0; j < blob->count(); ++j) {
+    EXPECT_EQ(blob->cpu_data()[j], label - mean_value);
+  }
+}
+
+TYPED_TEST(DataTransformTest, TestMeanValues) {
+  TransformationParameter transform_param;
+  const bool unique_pixels = false;  // pixels are equal to label
+  const int label = 0;
+  const int channels = 3;
+  const int height = 4;
+  const int width = 5;
+
+  transform_param.add_mean_value(0);
+  transform_param.add_mean_value(1);
+  transform_param.add_mean_value(2);
+  Datum datum;
+  FillDatum(label, channels, height, width, unique_pixels, &datum);
+  Blob<TypeParam>* blob = new Blob<TypeParam>(1, channels, height, width);
+  DataTransformer<TypeParam>* transformer =
+      new DataTransformer<TypeParam>(transform_param, TEST);
+  transformer->InitRand();
+  transformer->Transform(datum, blob);
+  for (int c = 0; c < channels; ++c) {
+    for (int j = 0; j < height * width; ++j) {
+      EXPECT_EQ(blob->cpu_data()[blob->offset(0, c) + j], label - c);
+    }
+  }
+}
+
+TYPED_TEST(DataTransformTest, TestMeanFile) {
+  TransformationParameter transform_param;
+  const bool unique_pixels = true;  // pixels are consecutive ints [0,size]
+  const int label = 0;
+  const int channels = 3;
+  const int height = 4;
+  const int width = 5;
+  const int size = channels * height * width;
+
+  // Create a mean file
+  string* mean_file = new string();
+  MakeTempFilename(mean_file);
+  BlobProto blob_mean;
+  blob_mean.set_num(1);
+  blob_mean.set_channels(channels);
+  blob_mean.set_height(height);
+  blob_mean.set_width(width);
+
+  for (int j = 0; j < size; ++j) {
+      blob_mean.add_data(j);
+  }
+
+  LOG(INFO) << "Using temporary mean_file " << *mean_file;
+  WriteProtoToBinaryFile(blob_mean, *mean_file);
+
+  transform_param.set_mean_file(*mean_file);
+  Datum datum;
+  FillDatum(label, channels, height, width, unique_pixels, &datum);
+  Blob<TypeParam>* blob = new Blob<TypeParam>(1, channels, height, width);
+  DataTransformer<TypeParam>* transformer =
+      new DataTransformer<TypeParam>(transform_param, TEST);
+  transformer->InitRand();
+  transformer->Transform(datum, blob);
+  for (int j = 0; j < blob->count(); ++j) {
+      EXPECT_EQ(blob->cpu_data()[j], 0);
+  }
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_db.cpp b/src/caffe/test/test_db.cpp
new file mode 100644
index 0000000..5b2ac23
--- /dev/null
+++ b/src/caffe/test/test_db.cpp
@@ -0,0 +1,134 @@
+#include <string>
+
+#include "boost/scoped_ptr.hpp"
+#include "gtest/gtest.h"
+
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/db.hpp"
+#include "caffe/util/io.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+using boost::scoped_ptr;
+
+template <typename TypeParam>
+class DBTest : public ::testing::Test {
+ protected:
+  DBTest()
+      : backend_(TypeParam::backend),
+      root_images_(string(EXAMPLES_SOURCE_DIR) + string("images/")) {}
+
+  virtual void SetUp() {
+    MakeTempDir(&source_);
+    source_ += "/db";
+    string keys[] = {"cat.jpg", "fish-bike.jpg"};
+    LOG(INFO) << "Using temporary db " << source_;
+    scoped_ptr<db::DB> db(db::GetDB(TypeParam::backend));
+    db->Open(this->source_, db::NEW);
+    scoped_ptr<db::Transaction> txn(db->NewTransaction());
+    for (int i = 0; i < 2; ++i) {
+      Datum datum;
+      ReadImageToDatum(root_images_ + keys[i], i, &datum);
+      string out;
+      CHECK(datum.SerializeToString(&out));
+      txn->Put(keys[i], out);
+    }
+    txn->Commit();
+  }
+
+  virtual ~DBTest() { }
+
+  DataParameter_DB backend_;
+  string source_;
+  string root_images_;
+};
+
+struct TypeLevelDB {
+  static DataParameter_DB backend;
+};
+DataParameter_DB TypeLevelDB::backend = DataParameter_DB_LEVELDB;
+
+struct TypeLMDB {
+  static DataParameter_DB backend;
+};
+DataParameter_DB TypeLMDB::backend = DataParameter_DB_LMDB;
+
+// typedef ::testing::Types<TypeLmdb> TestTypes;
+typedef ::testing::Types<TypeLevelDB, TypeLMDB> TestTypes;
+
+TYPED_TEST_CASE(DBTest, TestTypes);
+
+TYPED_TEST(DBTest, TestGetDB) {
+  scoped_ptr<db::DB> db(db::GetDB(TypeParam::backend));
+}
+
+TYPED_TEST(DBTest, TestNext) {
+  scoped_ptr<db::DB> db(db::GetDB(TypeParam::backend));
+  db->Open(this->source_, db::READ);
+  scoped_ptr<db::Cursor> cursor(db->NewCursor());
+  EXPECT_TRUE(cursor->valid());
+  cursor->Next();
+  EXPECT_TRUE(cursor->valid());
+  cursor->Next();
+  EXPECT_FALSE(cursor->valid());
+}
+
+TYPED_TEST(DBTest, TestSeekToFirst) {
+  scoped_ptr<db::DB> db(db::GetDB(TypeParam::backend));
+  db->Open(this->source_, db::READ);
+  scoped_ptr<db::Cursor> cursor(db->NewCursor());
+  cursor->Next();
+  cursor->SeekToFirst();
+  EXPECT_TRUE(cursor->valid());
+  string key = cursor->key();
+  Datum datum;
+  datum.ParseFromString(cursor->value());
+  EXPECT_EQ(key, "cat.jpg");
+  EXPECT_EQ(datum.channels(), 3);
+  EXPECT_EQ(datum.height(), 360);
+  EXPECT_EQ(datum.width(), 480);
+}
+
+TYPED_TEST(DBTest, TestKeyValue) {
+  scoped_ptr<db::DB> db(db::GetDB(TypeParam::backend));
+  db->Open(this->source_, db::READ);
+  scoped_ptr<db::Cursor> cursor(db->NewCursor());
+  EXPECT_TRUE(cursor->valid());
+  string key = cursor->key();
+  Datum datum;
+  datum.ParseFromString(cursor->value());
+  EXPECT_EQ(key, "cat.jpg");
+  EXPECT_EQ(datum.channels(), 3);
+  EXPECT_EQ(datum.height(), 360);
+  EXPECT_EQ(datum.width(), 480);
+  cursor->Next();
+  EXPECT_TRUE(cursor->valid());
+  key = cursor->key();
+  datum.ParseFromString(cursor->value());
+  EXPECT_EQ(key, "fish-bike.jpg");
+  EXPECT_EQ(datum.channels(), 3);
+  EXPECT_EQ(datum.height(), 323);
+  EXPECT_EQ(datum.width(), 481);
+  cursor->Next();
+  EXPECT_FALSE(cursor->valid());
+}
+
+TYPED_TEST(DBTest, TestWrite) {
+  scoped_ptr<db::DB> db(db::GetDB(TypeParam::backend));
+  db->Open(this->source_, db::WRITE);
+  scoped_ptr<db::Transaction> txn(db->NewTransaction());
+  Datum datum;
+  ReadFileToDatum(this->root_images_ + "cat.jpg", 0, &datum);
+  string out;
+  CHECK(datum.SerializeToString(&out));
+  txn->Put("cat.jpg", out);
+  ReadFileToDatum(this->root_images_ + "fish-bike.jpg", 1, &datum);
+  CHECK(datum.SerializeToString(&out));
+  txn->Put("fish-bike.jpg", out);
+  txn->Commit();
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_deconvolution_layer.cpp b/src/caffe/test/test_deconvolution_layer.cpp
new file mode 100644
index 0000000..fc63d5e
--- /dev/null
+++ b/src/caffe/test/test_deconvolution_layer.cpp
@@ -0,0 +1,158 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+// Since ConvolutionLayerTest checks the shared conv/deconv code in detail,
+// we'll just do a simple forward test and a gradient check.
+template <typename TypeParam>
+class DeconvolutionLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  DeconvolutionLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 6, 4)),
+        blob_bottom_2_(new Blob<Dtype>(2, 3, 6, 4)),
+        blob_top_(new Blob<Dtype>()),
+        blob_top_2_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    // fill the values
+    FillerParameter filler_param;
+    filler_param.set_value(1.);
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    filler.Fill(this->blob_bottom_2_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+
+  virtual ~DeconvolutionLayerTest() {
+    delete blob_bottom_;
+    delete blob_bottom_2_;
+    delete blob_top_;
+    delete blob_top_2_;
+  }
+
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_bottom_2_;
+  Blob<Dtype>* const blob_top_;
+  Blob<Dtype>* const blob_top_2_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(DeconvolutionLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(DeconvolutionLayerTest, TestSetup) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(4);
+  this->blob_bottom_vec_.push_back(this->blob_bottom_2_);
+  this->blob_top_vec_.push_back(this->blob_top_2_);
+  shared_ptr<Layer<Dtype> > layer(
+      new DeconvolutionLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 4);
+  EXPECT_EQ(this->blob_top_->height(), 13);
+  EXPECT_EQ(this->blob_top_->width(), 9);
+  EXPECT_EQ(this->blob_top_2_->num(), 2);
+  EXPECT_EQ(this->blob_top_2_->channels(), 4);
+  EXPECT_EQ(this->blob_top_2_->height(), 13);
+  EXPECT_EQ(this->blob_top_2_->width(), 9);
+  // setting group should not change the shape
+  convolution_param->set_num_output(3);
+  convolution_param->set_group(3);
+  layer.reset(new DeconvolutionLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 3);
+  EXPECT_EQ(this->blob_top_->height(), 13);
+  EXPECT_EQ(this->blob_top_->width(), 9);
+  EXPECT_EQ(this->blob_top_2_->num(), 2);
+  EXPECT_EQ(this->blob_top_2_->channels(), 3);
+  EXPECT_EQ(this->blob_top_2_->height(), 13);
+  EXPECT_EQ(this->blob_top_2_->width(), 9);
+}
+
+TYPED_TEST(DeconvolutionLayerTest, TestSimpleDeconvolution) {
+  typedef typename TypeParam::Dtype Dtype;
+  this->blob_bottom_vec_.push_back(this->blob_bottom_2_);
+  this->blob_top_vec_.push_back(this->blob_top_2_);
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  convolution_param->set_num_output(4);
+  convolution_param->mutable_weight_filler()->set_type("constant");
+  convolution_param->mutable_weight_filler()->set_value(1);
+  convolution_param->mutable_bias_filler()->set_type("constant");
+  convolution_param->mutable_bias_filler()->set_value(0.1);
+  shared_ptr<Layer<Dtype> > layer(
+      new DeconvolutionLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  // constant-fill the bottom blobs
+  FillerParameter filler_param;
+  filler_param.set_value(1.);
+  ConstantFiller<Dtype> filler(filler_param);
+  filler.Fill(this->blob_bottom_);
+  filler.Fill(this->blob_bottom_2_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // simply check that accumulation works with overlapping filters
+  const Dtype* top_data = this->blob_top_->cpu_data();
+  for (int n = 0; n < this->blob_top_->num(); ++n) {
+    for (int c = 0; c < this->blob_top_->channels(); ++c) {
+      for (int h = 0; h < this->blob_top_->height(); ++h) {
+        for (int w = 0; w < this->blob_top_->width(); ++w) {
+          Dtype expected = 3.1;
+          bool h_overlap = h % 2 == 0 && h > 0
+            && h < this->blob_top_->height() - 1;
+          bool w_overlap = w % 2 == 0 && w > 0
+            && w < this->blob_top_->width() - 1;
+          if (h_overlap && w_overlap) {
+            expected += 9;
+          } else if (h_overlap || w_overlap) {
+            expected += 3;
+          }
+          EXPECT_NEAR(top_data[this->blob_top_->offset(n, c, h, w)],
+              expected, 1e-4);
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST(DeconvolutionLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  this->blob_bottom_vec_.push_back(this->blob_bottom_2_);
+  this->blob_top_vec_.push_back(this->blob_top_2_);
+  convolution_param->set_kernel_size(2);
+  convolution_param->set_stride(1);
+  convolution_param->set_num_output(1);
+  convolution_param->mutable_weight_filler()->set_type("gaussian");
+  convolution_param->mutable_bias_filler()->set_type("gaussian");
+  DeconvolutionLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_dummy_data_layer.cpp b/src/caffe/test/test_dummy_data_layer.cpp
new file mode 100644
index 0000000..c9ed38d
--- /dev/null
+++ b/src/caffe/test/test_dummy_data_layer.cpp
@@ -0,0 +1,193 @@
+#include <string>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+class DummyDataLayerTest : public CPUDeviceTest<Dtype> {
+ protected:
+  DummyDataLayerTest()
+      : blob_top_a_(new Blob<Dtype>()),
+        blob_top_b_(new Blob<Dtype>()),
+        blob_top_c_(new Blob<Dtype>()) {}
+
+  virtual void SetUp() {
+    blob_bottom_vec_.clear();
+    blob_top_vec_.clear();
+    blob_top_vec_.push_back(blob_top_a_);
+    blob_top_vec_.push_back(blob_top_b_);
+    blob_top_vec_.push_back(blob_top_c_);
+  }
+
+  virtual ~DummyDataLayerTest() {
+    delete blob_top_a_;
+    delete blob_top_b_;
+    delete blob_top_c_;
+  }
+
+  Blob<Dtype>* const blob_top_a_;
+  Blob<Dtype>* const blob_top_b_;
+  Blob<Dtype>* const blob_top_c_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(DummyDataLayerTest, TestDtypes);
+
+TYPED_TEST(DummyDataLayerTest, TestOneTopConstant) {
+  LayerParameter param;
+  DummyDataParameter* dummy_data_param = param.mutable_dummy_data_param();
+  dummy_data_param->add_num(5);
+  dummy_data_param->add_channels(3);
+  dummy_data_param->add_height(2);
+  dummy_data_param->add_width(4);
+  this->blob_top_vec_.resize(1);
+  DummyDataLayer<TypeParam> layer(param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_a_->num(), 5);
+  EXPECT_EQ(this->blob_top_a_->channels(), 3);
+  EXPECT_EQ(this->blob_top_a_->height(), 2);
+  EXPECT_EQ(this->blob_top_a_->width(), 4);
+  EXPECT_EQ(this->blob_top_b_->count(), 0);
+  EXPECT_EQ(this->blob_top_c_->count(), 0);
+  for (int i = 0; i < this->blob_top_vec_.size(); ++i) {
+    for (int j = 0; j < this->blob_top_vec_[i]->count(); ++j) {
+      EXPECT_EQ(0, this->blob_top_vec_[i]->cpu_data()[j]);
+    }
+  }
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_top_vec_.size(); ++i) {
+    for (int j = 0; j < this->blob_top_vec_[i]->count(); ++j) {
+      EXPECT_EQ(0, this->blob_top_vec_[i]->cpu_data()[j]);
+    }
+  }
+}
+
+TYPED_TEST(DummyDataLayerTest, TestTwoTopConstant) {
+  LayerParameter param;
+  DummyDataParameter* dummy_data_param = param.mutable_dummy_data_param();
+  dummy_data_param->add_num(5);
+  dummy_data_param->add_channels(3);
+  dummy_data_param->add_height(2);
+  dummy_data_param->add_width(4);
+  dummy_data_param->add_num(5);
+  // Don't explicitly set number of channels or height for 2nd top blob; should
+  // default to first channels and height (as we check later).
+  dummy_data_param->add_height(1);
+  FillerParameter* data_filler_param = dummy_data_param->add_data_filler();
+  data_filler_param->set_value(7);
+  this->blob_top_vec_.resize(2);
+  DummyDataLayer<TypeParam> layer(param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_a_->num(), 5);
+  EXPECT_EQ(this->blob_top_a_->channels(), 3);
+  EXPECT_EQ(this->blob_top_a_->height(), 2);
+  EXPECT_EQ(this->blob_top_a_->width(), 4);
+  EXPECT_EQ(this->blob_top_b_->num(), 5);
+  EXPECT_EQ(this->blob_top_b_->channels(), 3);
+  EXPECT_EQ(this->blob_top_b_->height(), 1);
+  EXPECT_EQ(this->blob_top_b_->width(), 4);
+  EXPECT_EQ(this->blob_top_c_->count(), 0);
+  for (int i = 0; i < this->blob_top_vec_.size(); ++i) {
+    for (int j = 0; j < this->blob_top_vec_[i]->count(); ++j) {
+      EXPECT_EQ(7, this->blob_top_vec_[i]->cpu_data()[j]);
+    }
+  }
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_top_vec_.size(); ++i) {
+    for (int j = 0; j < this->blob_top_vec_[i]->count(); ++j) {
+      EXPECT_EQ(7, this->blob_top_vec_[i]->cpu_data()[j]);
+    }
+  }
+}
+
+TYPED_TEST(DummyDataLayerTest, TestThreeTopConstantGaussianConstant) {
+  LayerParameter param;
+  DummyDataParameter* dummy_data_param = param.mutable_dummy_data_param();
+  dummy_data_param->add_num(5);
+  dummy_data_param->add_channels(3);
+  dummy_data_param->add_height(2);
+  dummy_data_param->add_width(4);
+  FillerParameter* data_filler_param_a = dummy_data_param->add_data_filler();
+  data_filler_param_a->set_value(7);
+  FillerParameter* data_filler_param_b = dummy_data_param->add_data_filler();
+  data_filler_param_b->set_type("gaussian");
+  TypeParam gaussian_mean = 3.0;
+  TypeParam gaussian_std = 0.01;
+  data_filler_param_b->set_mean(gaussian_mean);
+  data_filler_param_b->set_std(gaussian_std);
+  FillerParameter* data_filler_param_c = dummy_data_param->add_data_filler();
+  data_filler_param_c->set_value(9);
+  DummyDataLayer<TypeParam> layer(param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_a_->num(), 5);
+  EXPECT_EQ(this->blob_top_a_->channels(), 3);
+  EXPECT_EQ(this->blob_top_a_->height(), 2);
+  EXPECT_EQ(this->blob_top_a_->width(), 4);
+  EXPECT_EQ(this->blob_top_b_->num(), 5);
+  EXPECT_EQ(this->blob_top_b_->channels(), 3);
+  EXPECT_EQ(this->blob_top_b_->height(), 2);
+  EXPECT_EQ(this->blob_top_b_->width(), 4);
+  EXPECT_EQ(this->blob_top_c_->num(), 5);
+  EXPECT_EQ(this->blob_top_c_->channels(), 3);
+  EXPECT_EQ(this->blob_top_c_->height(), 2);
+  EXPECT_EQ(this->blob_top_c_->width(), 4);
+  for (int i = 0; i < this->blob_top_a_->count(); ++i) {
+    EXPECT_EQ(7, this->blob_top_a_->cpu_data()[i]);
+  }
+  // Blob b uses a Gaussian filler, so SetUp should not have initialized it.
+  // Blob b's data should therefore be the default Blob data value: 0.
+  for (int i = 0; i < this->blob_top_b_->count(); ++i) {
+    EXPECT_EQ(0, this->blob_top_b_->cpu_data()[i]);
+  }
+  for (int i = 0; i < this->blob_top_c_->count(); ++i) {
+    EXPECT_EQ(9, this->blob_top_c_->cpu_data()[i]);
+  }
+
+  // Do a Forward pass to fill in Blob b with Gaussian data.
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_top_a_->count(); ++i) {
+    EXPECT_EQ(7, this->blob_top_a_->cpu_data()[i]);
+  }
+  // Check that the Gaussian's data has been filled in with values within
+  // 10 standard deviations of the mean. Record the first and last sample.
+  // to check that they're different after the next Forward pass.
+  for (int i = 0; i < this->blob_top_b_->count(); ++i) {
+    EXPECT_NEAR(gaussian_mean, this->blob_top_b_->cpu_data()[i],
+                gaussian_std * 10);
+  }
+  const TypeParam first_gaussian_sample = this->blob_top_b_->cpu_data()[0];
+  const TypeParam last_gaussian_sample =
+      this->blob_top_b_->cpu_data()[this->blob_top_b_->count() - 1];
+  for (int i = 0; i < this->blob_top_c_->count(); ++i) {
+    EXPECT_EQ(9, this->blob_top_c_->cpu_data()[i]);
+  }
+
+  // Do another Forward pass to fill in Blob b with Gaussian data again,
+  // checking that we get different values.
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_top_a_->count(); ++i) {
+    EXPECT_EQ(7, this->blob_top_a_->cpu_data()[i]);
+  }
+  for (int i = 0; i < this->blob_top_b_->count(); ++i) {
+    EXPECT_NEAR(gaussian_mean, this->blob_top_b_->cpu_data()[i],
+                gaussian_std * 10);
+  }
+  EXPECT_NE(first_gaussian_sample, this->blob_top_b_->cpu_data()[0]);
+  EXPECT_NE(last_gaussian_sample,
+      this->blob_top_b_->cpu_data()[this->blob_top_b_->count() - 1]);
+  for (int i = 0; i < this->blob_top_c_->count(); ++i) {
+    EXPECT_EQ(9, this->blob_top_c_->cpu_data()[i]);
+  }
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_eltwise_layer.cpp b/src/caffe/test/test_eltwise_layer.cpp
new file mode 100644
index 0000000..be0c134
--- /dev/null
+++ b/src/caffe/test/test_eltwise_layer.cpp
@@ -0,0 +1,209 @@
+#include <algorithm>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class EltwiseLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  EltwiseLayerTest()
+      : blob_bottom_a_(new Blob<Dtype>(2, 3, 4, 5)),
+        blob_bottom_b_(new Blob<Dtype>(2, 3, 4, 5)),
+        blob_bottom_c_(new Blob<Dtype>(2, 3, 4, 5)),
+        blob_top_(new Blob<Dtype>()) {
+    // fill the values
+    Caffe::set_random_seed(1701);
+    FillerParameter filler_param;
+    UniformFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_a_);
+    filler.Fill(this->blob_bottom_b_);
+    filler.Fill(this->blob_bottom_c_);
+    blob_bottom_vec_.push_back(blob_bottom_a_);
+    blob_bottom_vec_.push_back(blob_bottom_b_);
+    blob_bottom_vec_.push_back(blob_bottom_c_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~EltwiseLayerTest() {
+    delete blob_bottom_a_;
+    delete blob_bottom_b_;
+    delete blob_bottom_c_;
+    delete blob_top_;
+  }
+  Blob<Dtype>* const blob_bottom_a_;
+  Blob<Dtype>* const blob_bottom_b_;
+  Blob<Dtype>* const blob_bottom_c_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(EltwiseLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(EltwiseLayerTest, TestSetUp) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  EltwiseParameter* eltwise_param = layer_param.mutable_eltwise_param();
+  eltwise_param->set_operation(EltwiseParameter_EltwiseOp_PROD);
+  shared_ptr<EltwiseLayer<Dtype> > layer(
+      new EltwiseLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 3);
+  EXPECT_EQ(this->blob_top_->height(), 4);
+  EXPECT_EQ(this->blob_top_->width(), 5);
+}
+
+TYPED_TEST(EltwiseLayerTest, TestProd) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  EltwiseParameter* eltwise_param = layer_param.mutable_eltwise_param();
+  eltwise_param->set_operation(EltwiseParameter_EltwiseOp_PROD);
+  shared_ptr<EltwiseLayer<Dtype> > layer(
+      new EltwiseLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  const Dtype* data = this->blob_top_->cpu_data();
+  const int count = this->blob_top_->count();
+  const Dtype* in_data_a = this->blob_bottom_a_->cpu_data();
+  const Dtype* in_data_b = this->blob_bottom_b_->cpu_data();
+  const Dtype* in_data_c = this->blob_bottom_c_->cpu_data();
+  for (int i = 0; i < count; ++i) {
+    EXPECT_EQ(data[i], in_data_a[i] * in_data_b[i] * in_data_c[i]);
+  }
+}
+
+TYPED_TEST(EltwiseLayerTest, TestSum) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  EltwiseParameter* eltwise_param = layer_param.mutable_eltwise_param();
+  eltwise_param->set_operation(EltwiseParameter_EltwiseOp_SUM);
+  shared_ptr<EltwiseLayer<Dtype> > layer(
+      new EltwiseLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  const Dtype* data = this->blob_top_->cpu_data();
+  const int count = this->blob_top_->count();
+  const Dtype* in_data_a = this->blob_bottom_a_->cpu_data();
+  const Dtype* in_data_b = this->blob_bottom_b_->cpu_data();
+  const Dtype* in_data_c = this->blob_bottom_c_->cpu_data();
+  for (int i = 0; i < count; ++i) {
+    EXPECT_EQ(data[i], in_data_a[i] + in_data_b[i] + in_data_c[i]);
+  }
+}
+
+TYPED_TEST(EltwiseLayerTest, TestSumCoeff) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  EltwiseParameter* eltwise_param = layer_param.mutable_eltwise_param();
+  eltwise_param->set_operation(EltwiseParameter_EltwiseOp_SUM);
+  eltwise_param->add_coeff(1);
+  eltwise_param->add_coeff(-0.5);
+  eltwise_param->add_coeff(2);
+  shared_ptr<EltwiseLayer<Dtype> > layer(
+      new EltwiseLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  const Dtype* data = this->blob_top_->cpu_data();
+  const int count = this->blob_top_->count();
+  const Dtype* in_data_a = this->blob_bottom_a_->cpu_data();
+  const Dtype* in_data_b = this->blob_bottom_b_->cpu_data();
+  const Dtype* in_data_c = this->blob_bottom_c_->cpu_data();
+  for (int i = 0; i < count; ++i) {
+    EXPECT_NEAR(data[i], in_data_a[i] - 0.5*in_data_b[i] + 2*in_data_c[i],
+        1e-4);
+  }
+}
+
+TYPED_TEST(EltwiseLayerTest, TestStableProdGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  EltwiseParameter* eltwise_param = layer_param.mutable_eltwise_param();
+  eltwise_param->set_operation(EltwiseParameter_EltwiseOp_PROD);
+  eltwise_param->set_stable_prod_grad(true);
+  EltwiseLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(EltwiseLayerTest, TestUnstableProdGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  EltwiseParameter* eltwise_param = layer_param.mutable_eltwise_param();
+  eltwise_param->set_operation(EltwiseParameter_EltwiseOp_PROD);
+  eltwise_param->set_stable_prod_grad(false);
+  EltwiseLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(EltwiseLayerTest, TestSumGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  EltwiseParameter* eltwise_param = layer_param.mutable_eltwise_param();
+  eltwise_param->set_operation(EltwiseParameter_EltwiseOp_SUM);
+  EltwiseLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(EltwiseLayerTest, TestSumCoeffGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  EltwiseParameter* eltwise_param = layer_param.mutable_eltwise_param();
+  eltwise_param->set_operation(EltwiseParameter_EltwiseOp_SUM);
+  eltwise_param->add_coeff(1);
+  eltwise_param->add_coeff(-0.5);
+  eltwise_param->add_coeff(2);
+  EltwiseLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(EltwiseLayerTest, TestMax) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  EltwiseParameter* eltwise_param = layer_param.mutable_eltwise_param();
+  eltwise_param->set_operation(EltwiseParameter_EltwiseOp_MAX);
+  shared_ptr<EltwiseLayer<Dtype> > layer(
+      new EltwiseLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  const Dtype* data = this->blob_top_->cpu_data();
+  const int count = this->blob_top_->count();
+  const Dtype* in_data_a = this->blob_bottom_a_->cpu_data();
+  const Dtype* in_data_b = this->blob_bottom_b_->cpu_data();
+  const Dtype* in_data_c = this->blob_bottom_c_->cpu_data();
+  for (int i = 0; i < count; ++i) {
+    EXPECT_EQ(data[i],
+              std::max(in_data_a[i], std::max(in_data_b[i], in_data_c[i])));
+  }
+}
+
+TYPED_TEST(EltwiseLayerTest, TestMaxGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  EltwiseParameter* eltwise_param = layer_param.mutable_eltwise_param();
+  eltwise_param->set_operation(EltwiseParameter_EltwiseOp_MAX);
+  EltwiseLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-4, 1e-3);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_euclidean_loss_layer.cpp b/src/caffe/test/test_euclidean_loss_layer.cpp
new file mode 100644
index 0000000..1949742
--- /dev/null
+++ b/src/caffe/test/test_euclidean_loss_layer.cpp
@@ -0,0 +1,91 @@
+#include <cmath>
+#include <cstdlib>
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class EuclideanLossLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  EuclideanLossLayerTest()
+      : blob_bottom_data_(new Blob<Dtype>(10, 5, 1, 1)),
+        blob_bottom_label_(new Blob<Dtype>(10, 5, 1, 1)),
+        blob_top_loss_(new Blob<Dtype>()) {
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_data_);
+    blob_bottom_vec_.push_back(blob_bottom_data_);
+    filler.Fill(this->blob_bottom_label_);
+    blob_bottom_vec_.push_back(blob_bottom_label_);
+    blob_top_vec_.push_back(blob_top_loss_);
+  }
+  virtual ~EuclideanLossLayerTest() {
+    delete blob_bottom_data_;
+    delete blob_bottom_label_;
+    delete blob_top_loss_;
+  }
+
+  void TestForward() {
+    // Get the loss without a specified objective weight -- should be
+    // equivalent to explicitly specifiying a weight of 1.
+    LayerParameter layer_param;
+    EuclideanLossLayer<Dtype> layer_weight_1(layer_param);
+    layer_weight_1.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+    const Dtype loss_weight_1 =
+        layer_weight_1.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+
+    // Get the loss again with a different objective weight; check that it is
+    // scaled appropriately.
+    const Dtype kLossWeight = 3.7;
+    layer_param.add_loss_weight(kLossWeight);
+    EuclideanLossLayer<Dtype> layer_weight_2(layer_param);
+    layer_weight_2.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+    const Dtype loss_weight_2 =
+        layer_weight_2.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    const Dtype kErrorMargin = 1e-5;
+    EXPECT_NEAR(loss_weight_1 * kLossWeight, loss_weight_2, kErrorMargin);
+    // Make sure the loss is non-trivial.
+    const Dtype kNonTrivialAbsThresh = 1e-1;
+    EXPECT_GE(fabs(loss_weight_1), kNonTrivialAbsThresh);
+  }
+
+  Blob<Dtype>* const blob_bottom_data_;
+  Blob<Dtype>* const blob_bottom_label_;
+  Blob<Dtype>* const blob_top_loss_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(EuclideanLossLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(EuclideanLossLayerTest, TestForward) {
+  this->TestForward();
+}
+
+TYPED_TEST(EuclideanLossLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  const Dtype kLossWeight = 3.7;
+  layer_param.add_loss_weight(kLossWeight);
+  EuclideanLossLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  GradientChecker<Dtype> checker(1e-2, 1e-2, 1701);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_filler.cpp b/src/caffe/test/test_filler.cpp
new file mode 100644
index 0000000..728b8dc
--- /dev/null
+++ b/src/caffe/test/test_filler.cpp
@@ -0,0 +1,243 @@
+#include <cstring>
+
+#include "gtest/gtest.h"
+
+#include "caffe/filler.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+class ConstantFillerTest : public ::testing::Test {
+ protected:
+  ConstantFillerTest()
+      : blob_(new Blob<Dtype>(2, 3, 4, 5)),
+        filler_param_() {
+    filler_param_.set_value(10.);
+    filler_.reset(new ConstantFiller<Dtype>(filler_param_));
+    filler_->Fill(blob_);
+  }
+  virtual ~ConstantFillerTest() { delete blob_; }
+  Blob<Dtype>* const blob_;
+  FillerParameter filler_param_;
+  shared_ptr<ConstantFiller<Dtype> > filler_;
+};
+
+TYPED_TEST_CASE(ConstantFillerTest, TestDtypes);
+
+TYPED_TEST(ConstantFillerTest, TestFill) {
+  EXPECT_TRUE(this->blob_);
+  const int count = this->blob_->count();
+  const TypeParam* data = this->blob_->cpu_data();
+  for (int i = 0; i < count; ++i) {
+    EXPECT_GE(data[i], this->filler_param_.value());
+  }
+}
+
+
+template <typename Dtype>
+class UniformFillerTest : public ::testing::Test {
+ protected:
+  UniformFillerTest()
+      : blob_(new Blob<Dtype>(2, 3, 4, 5)),
+        filler_param_() {
+    filler_param_.set_min(1.);
+    filler_param_.set_max(2.);
+    filler_.reset(new UniformFiller<Dtype>(filler_param_));
+    filler_->Fill(blob_);
+  }
+  virtual ~UniformFillerTest() { delete blob_; }
+  Blob<Dtype>* const blob_;
+  FillerParameter filler_param_;
+  shared_ptr<UniformFiller<Dtype> > filler_;
+};
+
+TYPED_TEST_CASE(UniformFillerTest, TestDtypes);
+
+TYPED_TEST(UniformFillerTest, TestFill) {
+  EXPECT_TRUE(this->blob_);
+  const int count = this->blob_->count();
+  const TypeParam* data = this->blob_->cpu_data();
+  for (int i = 0; i < count; ++i) {
+    EXPECT_GE(data[i], this->filler_param_.min());
+    EXPECT_LE(data[i], this->filler_param_.max());
+  }
+}
+
+template <typename Dtype>
+class PositiveUnitballFillerTest : public ::testing::Test {
+ protected:
+  PositiveUnitballFillerTest()
+      : blob_(new Blob<Dtype>(2, 3, 4, 5)),
+        filler_param_() {
+    filler_.reset(new PositiveUnitballFiller<Dtype>(filler_param_));
+    filler_->Fill(blob_);
+  }
+  virtual ~PositiveUnitballFillerTest() { delete blob_; }
+  Blob<Dtype>* const blob_;
+  FillerParameter filler_param_;
+  shared_ptr<PositiveUnitballFiller<Dtype> > filler_;
+};
+
+TYPED_TEST_CASE(PositiveUnitballFillerTest, TestDtypes);
+
+TYPED_TEST(PositiveUnitballFillerTest, TestFill) {
+  EXPECT_TRUE(this->blob_);
+  const int num = this->blob_->num();
+  const int count = this->blob_->count();
+  const int dim = count / num;
+  const TypeParam* data = this->blob_->cpu_data();
+  for (int i = 0; i < count; ++i) {
+    EXPECT_GE(data[i], 0);
+    EXPECT_LE(data[i], 1);
+  }
+  for (int i = 0; i < num; ++i) {
+    TypeParam sum = 0;
+    for (int j = 0; j < dim; ++j) {
+      sum += data[i * dim + j];
+    }
+    EXPECT_GE(sum, 0.999);
+    EXPECT_LE(sum, 1.001);
+  }
+}
+
+template <typename Dtype>
+class GaussianFillerTest : public ::testing::Test {
+ protected:
+  GaussianFillerTest()
+      : blob_(new Blob<Dtype>(2, 3, 4, 5)),
+        filler_param_() {
+    filler_param_.set_mean(10.);
+    filler_param_.set_std(0.1);
+    filler_.reset(new GaussianFiller<Dtype>(filler_param_));
+    filler_->Fill(blob_);
+  }
+  virtual ~GaussianFillerTest() { delete blob_; }
+  Blob<Dtype>* const blob_;
+  FillerParameter filler_param_;
+  shared_ptr<GaussianFiller<Dtype> > filler_;
+};
+
+TYPED_TEST_CASE(GaussianFillerTest, TestDtypes);
+
+TYPED_TEST(GaussianFillerTest, TestFill) {
+  EXPECT_TRUE(this->blob_);
+  const int count = this->blob_->count();
+  const TypeParam* data = this->blob_->cpu_data();
+  TypeParam mean = 0.;
+  TypeParam var = 0.;
+  for (int i = 0; i < count; ++i) {
+    mean += data[i];
+    var += (data[i] - this->filler_param_.mean()) *
+        (data[i] - this->filler_param_.mean());
+  }
+  mean /= count;
+  var /= count;
+  // Very loose test.
+  EXPECT_GE(mean, this->filler_param_.mean() - this->filler_param_.std() * 5);
+  EXPECT_LE(mean, this->filler_param_.mean() + this->filler_param_.std() * 5);
+  TypeParam target_var = this->filler_param_.std() * this->filler_param_.std();
+  EXPECT_GE(var, target_var / 5.);
+  EXPECT_LE(var, target_var * 5.);
+}
+
+template <typename Dtype>
+class XavierFillerTest : public ::testing::Test {
+ protected:
+  XavierFillerTest()
+      : blob_(new Blob<Dtype>(1000, 2, 4, 5)),
+        filler_param_() {
+  }
+  virtual void test_params(FillerParameter_VarianceNorm variance_norm,
+      Dtype n) {
+    this->filler_param_.set_variance_norm(variance_norm);
+    this->filler_.reset(new XavierFiller<Dtype>(this->filler_param_));
+    this->filler_->Fill(blob_);
+    EXPECT_TRUE(this->blob_);
+    const int count = this->blob_->count();
+    const Dtype* data = this->blob_->cpu_data();
+    Dtype mean = 0.;
+    Dtype ex2 = 0.;
+    for (int i = 0; i < count; ++i) {
+      mean += data[i];
+      ex2 += data[i] * data[i];
+    }
+    mean /= count;
+    ex2 /= count;
+    Dtype std = sqrt(ex2 - mean*mean);
+    Dtype target_std = sqrt(2.0 / n);
+    EXPECT_NEAR(mean, 0.0, 0.1);
+    EXPECT_NEAR(std, target_std, 0.1);
+  }
+  virtual ~XavierFillerTest() { delete blob_; }
+  Blob<Dtype>* const blob_;
+  FillerParameter filler_param_;
+  shared_ptr<XavierFiller<Dtype> > filler_;
+};
+
+TYPED_TEST_CASE(XavierFillerTest, TestDtypes);
+
+TYPED_TEST(XavierFillerTest, TestFillFanIn) {
+  TypeParam n = 2*4*5;
+  this->test_params(FillerParameter_VarianceNorm_FAN_IN, n);
+}
+TYPED_TEST(XavierFillerTest, TestFillFanOut) {
+  TypeParam n = 1000*4*5;
+  this->test_params(FillerParameter_VarianceNorm_FAN_OUT, n);
+}
+TYPED_TEST(XavierFillerTest, TestFillAverage) {
+  TypeParam n = (2*4*5 + 1000*4*5) / 2.0;
+  this->test_params(FillerParameter_VarianceNorm_AVERAGE, n);
+}
+
+template <typename Dtype>
+class MSRAFillerTest : public ::testing::Test {
+ protected:
+  MSRAFillerTest()
+      : blob_(new Blob<Dtype>(1000, 2, 4, 5)),
+        filler_param_() {
+  }
+  virtual void test_params(FillerParameter_VarianceNorm variance_norm,
+      Dtype n) {
+    this->filler_param_.set_variance_norm(variance_norm);
+    this->filler_.reset(new MSRAFiller<Dtype>(this->filler_param_));
+    this->filler_->Fill(blob_);
+    EXPECT_TRUE(this->blob_);
+    const int count = this->blob_->count();
+    const Dtype* data = this->blob_->cpu_data();
+    Dtype mean = 0.;
+    Dtype ex2 = 0.;
+    for (int i = 0; i < count; ++i) {
+      mean += data[i];
+      ex2 += data[i] * data[i];
+    }
+    mean /= count;
+    ex2 /= count;
+    Dtype std = sqrt(ex2 - mean*mean);
+    Dtype target_std = sqrt(2.0 / n);
+    EXPECT_NEAR(mean, 0.0, 0.1);
+    EXPECT_NEAR(std, target_std, 0.1);
+  }
+  virtual ~MSRAFillerTest() { delete blob_; }
+  Blob<Dtype>* const blob_;
+  FillerParameter filler_param_;
+  shared_ptr<MSRAFiller<Dtype> > filler_;
+};
+
+TYPED_TEST_CASE(MSRAFillerTest, TestDtypes);
+
+TYPED_TEST(MSRAFillerTest, TestFillFanIn) {
+  TypeParam n = 2*4*5;
+  this->test_params(FillerParameter_VarianceNorm_FAN_IN, n);
+}
+TYPED_TEST(MSRAFillerTest, TestFillFanOut) {
+  TypeParam n = 1000*4*5;
+  this->test_params(FillerParameter_VarianceNorm_FAN_OUT, n);
+}
+TYPED_TEST(MSRAFillerTest, TestFillAverage) {
+  TypeParam n = (2*4*5 + 1000*4*5) / 2.0;
+  this->test_params(FillerParameter_VarianceNorm_AVERAGE, n);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_filter_layer.cpp b/src/caffe/test/test_filter_layer.cpp
new file mode 100644
index 0000000..c641b6e
--- /dev/null
+++ b/src/caffe/test/test_filter_layer.cpp
@@ -0,0 +1,128 @@
+#include <cstring>
+#include <limits>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class FilterLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  FilterLayerTest()
+      : blob_bottom_data_(new Blob<Dtype>(4, 3, 6, 4)),
+        blob_bottom_labels_(new Blob<Dtype>(4, 1, 1, 1)),
+        blob_bottom_selector_(new Blob<Dtype>(4, 1, 1, 1)),
+        blob_top_data_(new Blob<Dtype>()),
+        blob_top_labels_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    // fill the values
+    Caffe::set_random_seed(1890);
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    // fill the selector blob
+    Dtype* bottom_data_selector_ = blob_bottom_selector_->mutable_cpu_data();
+    bottom_data_selector_[0] = 0;
+    bottom_data_selector_[1] = 1;
+    bottom_data_selector_[2] = 1;
+    bottom_data_selector_[3] = 0;
+    // fill the other bottom blobs
+    filler.Fill(blob_bottom_data_);
+    for (int i = 0; i < blob_bottom_labels_->count(); ++i) {
+      blob_bottom_labels_->mutable_cpu_data()[i] = caffe_rng_rand() % 5;
+    }
+    blob_bottom_vec_.push_back(blob_bottom_data_);
+    blob_bottom_vec_.push_back(blob_bottom_labels_);
+    blob_bottom_vec_.push_back(blob_bottom_selector_);
+    blob_top_vec_.push_back(blob_top_data_);
+    blob_top_vec_.push_back(blob_top_labels_);
+  }
+  virtual ~FilterLayerTest() {
+    delete blob_bottom_data_;
+    delete blob_bottom_labels_;
+    delete blob_bottom_selector_;
+    delete blob_top_data_;
+    delete blob_top_labels_;
+  }
+  Blob<Dtype>* const blob_bottom_data_;
+  Blob<Dtype>* const blob_bottom_labels_;
+  Blob<Dtype>* const blob_bottom_selector_;
+  // blobs for the top of FilterLayer
+  Blob<Dtype>* const blob_top_data_;
+  Blob<Dtype>* const blob_top_labels_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(FilterLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(FilterLayerTest, TestReshape) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  FilterLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Reshape(this->blob_bottom_vec_, this->blob_top_vec_);
+  // In the test first and last items should have been filtered
+  // so we just expect 2 remaining items
+  EXPECT_EQ(this->blob_top_data_->shape(0), 2);
+  EXPECT_EQ(this->blob_top_labels_->shape(0), 2);
+  EXPECT_GT(this->blob_bottom_data_->shape(0),
+      this->blob_top_data_->shape(0));
+  EXPECT_GT(this->blob_bottom_labels_->shape(0),
+      this->blob_top_labels_->shape(0));
+  for (int i = 1; i < this->blob_bottom_labels_->num_axes(); i++) {
+    EXPECT_EQ(this->blob_bottom_labels_->shape(i),
+        this->blob_top_labels_->shape(i));
+  }
+}
+
+TYPED_TEST(FilterLayerTest, TestForward) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  FilterLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Reshape(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_labels_->data_at(0, 0, 0, 0),
+      this->blob_bottom_labels_->data_at(1, 0, 0, 0));
+  EXPECT_EQ(this->blob_top_labels_->data_at(1, 0, 0, 0),
+      this->blob_bottom_labels_->data_at(2, 0, 0, 0));
+
+  int dim = this->blob_top_data_->count() /
+      this->blob_top_data_->shape(0);
+  const Dtype* top_data = this->blob_top_data_->cpu_data();
+  const Dtype* bottom_data = this->blob_bottom_data_->cpu_data();
+  // selector is 0 1 1 0, so we need to compare bottom(1,c,h,w)
+  // with top(0,c,h,w) and bottom(2,c,h,w) with top(1,c,h,w)
+  bottom_data += dim;  // bottom(1,c,h,w)
+  for (size_t n = 0; n < dim; n++)
+    EXPECT_EQ(top_data[n], bottom_data[n]);
+
+  bottom_data += dim;  // bottom(2,c,h,w)
+  top_data += dim;  // top(1,c,h,w)
+  for (size_t n = 0; n < dim; n++)
+    EXPECT_EQ(top_data[n], bottom_data[n]);
+}
+
+TYPED_TEST(FilterLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  FilterLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  // check only input 0 (data) because labels and selector
+  // don't need backpropagation
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 0);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_flatten_layer.cpp b/src/caffe/test/test_flatten_layer.cpp
new file mode 100644
index 0000000..7b6757c
--- /dev/null
+++ b/src/caffe/test/test_flatten_layer.cpp
@@ -0,0 +1,109 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class FlattenLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+ protected:
+  FlattenLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 6, 5)),
+        blob_top_(new Blob<Dtype>()) {
+    Caffe::set_random_seed(1701);
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~FlattenLayerTest() { delete blob_bottom_; delete blob_top_; }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(FlattenLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(FlattenLayerTest, TestSetup) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  FlattenLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  ASSERT_EQ(this->blob_top_->num_axes(), 2);
+  EXPECT_EQ(this->blob_top_->shape(0), 2);
+  EXPECT_EQ(this->blob_top_->shape(1), 3 * 6 * 5);
+}
+
+TYPED_TEST(FlattenLayerTest, TestSetupWithAxis) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_flatten_param()->set_axis(2);
+  FlattenLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  ASSERT_EQ(this->blob_top_->num_axes(), 3);
+  EXPECT_EQ(this->blob_top_->shape(0), 2);
+  EXPECT_EQ(this->blob_top_->shape(1), 3);
+  EXPECT_EQ(this->blob_top_->shape(2), 6 * 5);
+}
+
+TYPED_TEST(FlattenLayerTest, TestSetupWithEndAxis) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_flatten_param()->set_end_axis(-2);
+  FlattenLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  ASSERT_EQ(this->blob_top_->num_axes(), 3);
+  EXPECT_EQ(this->blob_top_->shape(0), 2);
+  EXPECT_EQ(this->blob_top_->shape(1), 3 * 6);
+  EXPECT_EQ(this->blob_top_->shape(2), 5);
+}
+
+TYPED_TEST(FlattenLayerTest, TestSetupWithStartAndEndAxis) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_flatten_param()->set_axis(0);
+  layer_param.mutable_flatten_param()->set_end_axis(-2);
+  FlattenLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  ASSERT_EQ(this->blob_top_->num_axes(), 2);
+  EXPECT_EQ(this->blob_top_->shape(0), 2 * 3 * 6);
+  EXPECT_EQ(this->blob_top_->shape(1), 5);
+}
+
+TYPED_TEST(FlattenLayerTest, TestForward) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  FlattenLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int c = 0; c < 3 * 6 * 5; ++c) {
+    EXPECT_EQ(this->blob_top_->data_at(0, c, 0, 0),
+        this->blob_bottom_->data_at(0, c / (6 * 5), (c / 5) % 6, c % 5));
+    EXPECT_EQ(this->blob_top_->data_at(1, c, 0, 0),
+        this->blob_bottom_->data_at(1, c / (6 * 5), (c / 5) % 6, c % 5));
+  }
+}
+
+TYPED_TEST(FlattenLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  FlattenLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_gradient_based_solver.cpp b/src/caffe/test/test_gradient_based_solver.cpp
new file mode 100644
index 0000000..c9135d6
--- /dev/null
+++ b/src/caffe/test/test_gradient_based_solver.cpp
@@ -0,0 +1,561 @@
+#include <algorithm>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "google/protobuf/text_format.h"
+
+#include "gtest/gtest.h"
+
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/solver.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+using std::ostringstream;
+
+namespace caffe {
+
+template <typename TypeParam>
+class GradientBasedSolverTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  GradientBasedSolverTest() :
+      seed_(1701), num_(4), channels_(3), height_(10), width_(10) {}
+
+  shared_ptr<SGDSolver<Dtype> > solver_;
+  int seed_;
+  int num_, channels_, height_, width_;
+  Dtype delta_;  // Stability constant for AdaGrad.
+
+  virtual SolverParameter_SolverType solver_type() = 0;
+  virtual void InitSolver(const SolverParameter& param) = 0;
+
+  virtual void InitSolverFromProtoString(const string& proto) {
+    SolverParameter param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(proto, &param));
+    // Disable saving a final snapshot so the tests don't pollute the user's
+    // working directory with useless snapshots.
+    param.set_snapshot_after_train(false);
+    // Set the solver_mode according to current Caffe::mode.
+    switch (Caffe::mode()) {
+      case Caffe::CPU:
+        param.set_solver_mode(SolverParameter_SolverMode_CPU);
+        break;
+      case Caffe::GPU:
+        param.set_solver_mode(SolverParameter_SolverMode_GPU);
+        break;
+      default:
+        LOG(FATAL) << "Unknown Caffe mode: " << Caffe::mode();
+    }
+    InitSolver(param);
+    delta_ = (solver_type() == SolverParameter_SolverType_ADAGRAD) ?
+         param.delta() : 0;
+  }
+
+  void RunLeastSquaresSolver(const Dtype learning_rate,
+      const Dtype weight_decay, const Dtype momentum, const int num_iters,
+      const int iter_size = 1) {
+    ostringstream proto;
+    proto <<
+       "max_iter: " << num_iters << " "
+       "base_lr: " << learning_rate << " "
+       "lr_policy: 'fixed' "
+       "iter_size: " << iter_size << " "
+       "net_param { "
+       "  name: 'TestNetwork' "
+       "  layer { "
+       "    name: 'data' "
+       "    type: 'DummyData' "
+       "    dummy_data_param { "
+       "      num: " << num_ / iter_size << " "
+       "      channels: " << channels_ << " "
+       "      height: " << height_ << " "
+       "      width: " << width_ << " "
+       "      channels: 1 "
+       "      height: 1 "
+       "      width: 1 "
+       "      data_filler { "
+       "        type: 'constant' "
+       "        value: 1.0 "
+       "      } "
+       "      data_filler { "
+       "        type: 'gaussian' "
+       "        std: 1.0 "
+       "      } "
+       "    } "
+       "    top: 'data' "
+       "    top: 'targets' "
+       "  } "
+       "  layer { "
+       "    name: 'innerprod' "
+       "    type: 'InnerProduct' "
+       "    inner_product_param { "
+       "      num_output: 1 "
+       "      weight_filler { "
+       "        type: 'gaussian' "
+       "        std: 1.0 "
+       "      } "
+       "      bias_filler { "
+       "        type: 'gaussian' "
+       "        std: 1.0 "
+       "      } "
+       "    } "
+       "    bottom: 'data' "
+       "    top: 'innerprod' "
+       "  } "
+       "  layer { "
+       "    name: 'loss' "
+       "    type: 'EuclideanLoss' "
+       "    bottom: 'innerprod' "
+       "    bottom: 'targets' "
+       "  } "
+       "} ";
+    if (weight_decay != 0) {
+      proto << "weight_decay: " << weight_decay << " ";
+    }
+    if (momentum != 0) {
+      proto << "momentum: " << momentum << " ";
+    }
+    Caffe::set_random_seed(this->seed_);
+    this->InitSolverFromProtoString(proto.str());
+    this->solver_->Solve();
+  }
+
+  // Compute an update value given the current state of the train net,
+  // using the analytical formula for the least squares gradient.
+  // updated_params will store the updated weight and bias results,
+  // using the blobs' diffs to hold the update values themselves.
+  void ComputeLeastSquaresUpdate(const Dtype learning_rate,
+      const Dtype weight_decay, const Dtype momentum,
+      vector<shared_ptr<Blob<Dtype> > >* updated_params) {
+    const int N = num_;
+    const int D = channels_ * height_ * width_;
+
+    // Run a forward pass, and manually compute the update values from the
+    // result.
+    Net<Dtype>& net = *this->solver_->net();
+    vector<Blob<Dtype>*> empty_bottom_vec;
+    net.Forward(empty_bottom_vec);
+    ASSERT_TRUE(net.has_blob("data"));
+    const Blob<Dtype>& data = *net.blob_by_name("data");
+    ASSERT_TRUE(net.has_blob("targets"));
+    const Blob<Dtype>& targets = *net.blob_by_name("targets");
+    ASSERT_TRUE(net.has_layer("innerprod"));
+    const vector<shared_ptr<Blob<Dtype> > >& param_blobs =
+        net.layer_by_name("innerprod")->blobs();
+    const int num_param_blobs = 2;
+    ASSERT_EQ(num_param_blobs, param_blobs.size());
+    const Blob<Dtype>& weights = *param_blobs[0];
+    const Blob<Dtype>& bias = *param_blobs[1];
+    ASSERT_EQ(D * N, data.count());
+    ASSERT_EQ(N, targets.count());
+    ASSERT_EQ(D, weights.count());
+    ASSERT_EQ(1, bias.count());
+
+    updated_params->clear();
+    updated_params->resize(num_param_blobs);
+    for (int i = 0; i < num_param_blobs; ++i) {
+      (*updated_params)[i].reset(new Blob<Dtype>());
+    }
+    Blob<Dtype>& updated_weights = *(*updated_params)[0];
+    updated_weights.ReshapeLike(weights);
+    Blob<Dtype>& updated_bias = *(*updated_params)[1];
+    updated_bias.ReshapeLike(bias);
+
+    for (int i = 0; i <= D; ++i) {
+      // Compute the derivative with respect to the ith weight (i.e., the ith
+      // element of the gradient).
+      Dtype grad = 0;
+      for (int j = 0; j <= D; ++j) {
+        // Compute element (i, j) of X^T * X.
+        Dtype element = 0;
+        for (int k = 0; k < N; ++k) {
+          // (i, k) in X^T (== (k, i) in X) times (k, j) in X.
+          const Dtype element_i = (i == D) ? 1 : data.cpu_data()[k * D + i];
+          const Dtype element_j = (j == D) ? 1 : data.cpu_data()[k * D + j];
+          element += element_i * element_j;
+        }
+        if (j == D) {
+          grad += element * bias.cpu_data()[0];
+        } else {
+          grad += element * weights.cpu_data()[j];
+        }
+      }
+      for (int k = 0; k < N; ++k) {
+        const Dtype element_i = (i == D) ? 1 : data.cpu_data()[k * D + i];
+        grad -= element_i * targets.cpu_data()[k];
+      }
+      // Scale the gradient over the N samples.
+      grad /= N;
+      // Add the weight decay to the gradient.
+      grad += weight_decay *
+          ((i == D) ? bias.cpu_data()[0] : weights.cpu_data()[i]);
+      // Finally, compute update.
+      const vector<shared_ptr<Blob<Dtype> > >& history = solver_->history();
+      ASSERT_EQ(2, history.size());  // 1 blob for weights, 1 for bias
+      Dtype update_value = learning_rate * grad;
+      const Dtype history_value = (i == D) ?
+            history[1]->cpu_data()[0] : history[0]->cpu_data()[i];
+      const Dtype temp = momentum * history_value;
+      switch (solver_type()) {
+      case SolverParameter_SolverType_SGD:
+        update_value += temp;
+        break;
+      case SolverParameter_SolverType_NESTEROV:
+        update_value += temp;
+        // step back then over-step
+        update_value = (1 + momentum) * update_value - temp;
+        break;
+      case SolverParameter_SolverType_ADAGRAD:
+        update_value /= std::sqrt(history_value + grad * grad) + delta_;
+        break;
+      default:
+        LOG(FATAL) << "Unknown solver type: " << solver_type();
+      }
+      if (i == D) {
+        updated_bias.mutable_cpu_diff()[0] = update_value;
+        updated_bias.mutable_cpu_data()[0] = bias.cpu_data()[0] - update_value;
+      } else {
+        updated_weights.mutable_cpu_diff()[i] = update_value;
+        updated_weights.mutable_cpu_data()[i] =
+            weights.cpu_data()[i] - update_value;
+      }
+    }
+  }
+
+  void CheckLeastSquaresUpdate(
+      const vector<shared_ptr<Blob<Dtype> > >& updated_params) {
+    const int D = channels_ * height_ * width_;
+
+    const Blob<Dtype>& updated_weights = *updated_params[0];
+    const Blob<Dtype>& updated_bias = *updated_params[1];
+
+    Net<Dtype>& net = *this->solver_->net();
+    ASSERT_TRUE(net.has_layer("innerprod"));
+    const vector<shared_ptr<Blob<Dtype> > >& param_blobs =
+        net.layer_by_name("innerprod")->blobs();
+    ASSERT_EQ(2, param_blobs.size());
+    const Blob<Dtype>& solver_updated_weights = *param_blobs[0];
+    ASSERT_EQ(D, solver_updated_weights.count());
+    const double kPrecision = 1e-2;
+    const double kMinPrecision = 1e-7;
+    for (int i = 0; i < D; ++i) {
+      const Dtype expected_updated_weight = updated_weights.cpu_data()[i];
+      const Dtype solver_updated_weight = solver_updated_weights.cpu_data()[i];
+      const Dtype error_margin = std::max(kMinPrecision, kPrecision *
+          std::min(fabs(expected_updated_weight), fabs(solver_updated_weight)));
+      EXPECT_NEAR(expected_updated_weight, solver_updated_weight, error_margin);
+    }
+    const Blob<Dtype>& solver_updated_bias_blob = *param_blobs[1];
+    ASSERT_EQ(1, solver_updated_bias_blob.count());
+    const Dtype expected_updated_bias = updated_bias.cpu_data()[0];
+    const Dtype solver_updated_bias = solver_updated_bias_blob.cpu_data()[0];
+    const Dtype error_margin = std::max(kMinPrecision, kPrecision *
+          std::min(fabs(expected_updated_bias), fabs(solver_updated_bias)));
+    EXPECT_NEAR(expected_updated_bias, solver_updated_bias, error_margin);
+
+    // Check the solver's history -- should contain the previous update value.
+    if (solver_type() == SolverParameter_SolverType_SGD) {
+      const vector<shared_ptr<Blob<Dtype> > >& history = solver_->history();
+      ASSERT_EQ(2, history.size());
+      for (int i = 0; i < D; ++i) {
+        const Dtype expected_history = updated_weights.cpu_diff()[i];
+        const Dtype solver_history = history[0]->cpu_data()[i];
+        const Dtype error_margin_hist = std::max(kMinPrecision, kPrecision *
+            std::min(fabs(expected_history), fabs(solver_history)));
+        EXPECT_NEAR(expected_history, solver_history, error_margin_hist);
+      }
+      const Dtype expected_history = updated_bias.cpu_diff()[0];
+      const Dtype solver_history = history[1]->cpu_data()[0];
+      const Dtype error_margin_hist = std::max(kMinPrecision, kPrecision *
+          std::min(fabs(expected_history), fabs(solver_history)));
+      EXPECT_NEAR(expected_history, solver_history, error_margin_hist);
+    }
+  }
+
+  void CheckAccumulation(const Dtype kLearningRate, const Dtype kWeightDecay,
+      const Dtype kMomentum, const int kNumIters, const int kIterSize) {
+    const double kPrecision = 1e-2;
+    const double kMinPrecision = 1e-7;
+    // Solve without accumulation and save parameters.
+    this->RunLeastSquaresSolver(kLearningRate, kWeightDecay, kMomentum,
+        kNumIters);
+    // Save parameters for comparison.
+    Net<Dtype>& net = *this->solver_->net();
+    const vector<shared_ptr<Blob<Dtype> > >& param_blobs =
+        net.layer_by_name("innerprod")->blobs();
+    vector<shared_ptr<Blob<Dtype> > > noaccum_params(param_blobs.size());
+    for (int i = 0; i < param_blobs.size(); ++i) {
+      noaccum_params[i].reset(new Blob<Dtype>());
+      noaccum_params[i]->CopyFrom(*param_blobs[i], false, true);
+    }
+    // Solve by equivalent accumulation of gradients over divided batches.
+    this->RunLeastSquaresSolver(kLearningRate, kWeightDecay, kMomentum,
+        kNumIters, kIterSize);
+    Net<Dtype>& net_accum = *this->solver_->net();
+    const vector<shared_ptr<Blob<Dtype> > >& accum_params =
+        net_accum.layer_by_name("innerprod")->blobs();
+    // Compare accumulated parameters against no accumulation standard.
+    const int D = this->channels_ * this->height_ * this->width_;
+    for (int i = 0; i < D; ++i) {
+      const Dtype expected_param = noaccum_params[0]->cpu_data()[i];
+      const Dtype accum_param = accum_params[0]->cpu_data()[i];
+      const Dtype error_margin = std::max(kMinPrecision, kPrecision *
+          std::min(fabs(expected_param), fabs(accum_param)));
+      EXPECT_NEAR(expected_param, accum_param, error_margin);
+    }
+    ASSERT_EQ(1, accum_params[1]->count());
+    const Dtype expected_bias = noaccum_params[1]->cpu_data()[0];
+    const Dtype accum_bias = accum_params[1]->cpu_data()[0];
+    const Dtype error_margin = std::max(kMinPrecision, kPrecision *
+        std::min(fabs(expected_bias), fabs(accum_bias)));
+    EXPECT_NEAR(expected_bias, accum_bias, error_margin);
+  }
+
+  // Test that the correct update is computed for a regularized least squares
+  // problem:
+  //
+  //            E = (1/(2n)) || X w - y ||^2 + (lambda / 2) || w ||^2
+  //   \nabla_w E = (1/n) (X^T X w - X^T y) + lambda * w
+  //
+  // X \in R^{n x (d+1)} (each example is a row, (d+1)th element is always 1)
+  // w \in R^{(d+1) x 1} ((d+1)th element is the bias)
+  // y \in R^{n x 1}
+  // lambda is weight_decay
+  //
+  // TestLeastSquaresUpdate works "inductively", assuming that the solver
+  // correctly updates the net K (= iter_to_check) times, then given the history
+  // from the Kth update, we compute the (K+1)th update and check that it
+  // matches the solver's (K+1)th update.
+  void TestLeastSquaresUpdate(const Dtype learning_rate = 1.0,
+      const Dtype weight_decay = 0.0, const Dtype momentum = 0.0,
+      const int iter_to_check = 0) {
+    // Initialize the solver and run K (= iter_to_check) solver iterations.
+    RunLeastSquaresSolver(learning_rate, weight_decay, momentum, iter_to_check);
+
+    // Compute the (K+1)th update using the analytic least squares gradient.
+    vector<shared_ptr<Blob<Dtype> > > updated_params;
+    ComputeLeastSquaresUpdate(learning_rate, weight_decay, momentum,
+                              &updated_params);
+
+    // Reinitialize the solver and run K+1 solver iterations.
+    RunLeastSquaresSolver(learning_rate, weight_decay, momentum,
+                          iter_to_check + 1);
+
+    // Check that the solver's solution matches ours.
+    CheckLeastSquaresUpdate(updated_params);
+  }
+};
+
+
+template <typename TypeParam>
+class SGDSolverTest : public GradientBasedSolverTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  virtual void InitSolver(const SolverParameter& param) {
+    this->solver_.reset(new SGDSolver<Dtype>(param));
+  }
+
+  virtual SolverParameter_SolverType solver_type() {
+    return SolverParameter_SolverType_SGD;
+  }
+};
+
+TYPED_TEST_CASE(SGDSolverTest, TestDtypesAndDevices);
+
+TYPED_TEST(SGDSolverTest, TestLeastSquaresUpdate) {
+  this->TestLeastSquaresUpdate();
+}
+
+TYPED_TEST(SGDSolverTest, TestLeastSquaresUpdateLROneTenth) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 0.1;
+  this->TestLeastSquaresUpdate(kLearningRate);
+}
+
+TYPED_TEST(SGDSolverTest, TestLeastSquaresUpdateWithWeightDecay) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 1.0;
+  const Dtype kWeightDecay = 0.5;
+  this->TestLeastSquaresUpdate(kLearningRate, kWeightDecay);
+}
+
+TYPED_TEST(SGDSolverTest, TestLeastSquaresUpdateWithMomentum) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 1.0;
+  const Dtype kWeightDecay = 0.0;
+  const Dtype kMomentum = 0.5;
+  const int kNumIters = 1;
+  for (int i = 0; i <= kNumIters; ++i) {
+    this->TestLeastSquaresUpdate(kLearningRate, kWeightDecay, kMomentum, i);
+  }
+}
+
+TYPED_TEST(SGDSolverTest, TestLeastSquaresUpdateWithMomentumMultiIter) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 1.0;
+  const Dtype kWeightDecay = 0.0;
+  const Dtype kMomentum = 0.5;
+  const int kNumIters = 4;
+  for (int i = 0; i <= kNumIters; ++i) {
+    this->TestLeastSquaresUpdate(kLearningRate, kWeightDecay, kMomentum, i);
+  }
+}
+
+TYPED_TEST(SGDSolverTest, TestLeastSquaresUpdateWithEverything) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 0.01;
+  const Dtype kWeightDecay = 0.1;
+  const Dtype kMomentum = 0.9;
+  const int kNumIters = 4;
+  for (int i = 0; i <= kNumIters; ++i) {
+    this->TestLeastSquaresUpdate(kLearningRate, kWeightDecay, kMomentum, i);
+  }
+}
+
+TYPED_TEST(SGDSolverTest, TestLeastSquaresUpdateWithEverythingAccum) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 0.01;
+  const Dtype kWeightDecay = 0.1;
+  const Dtype kMomentum = 0.9;
+  const int kNumIters = 4;
+  const int kIterSize = 2;
+  this->CheckAccumulation(kLearningRate, kWeightDecay, kMomentum, kNumIters,
+      kIterSize);
+}
+
+template <typename TypeParam>
+class AdaGradSolverTest : public GradientBasedSolverTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  virtual void InitSolver(const SolverParameter& param) {
+    this->solver_.reset(new AdaGradSolver<Dtype>(param));
+  }
+  virtual SolverParameter_SolverType solver_type() {
+    return SolverParameter_SolverType_ADAGRAD;
+  }
+};
+
+TYPED_TEST_CASE(AdaGradSolverTest, TestDtypesAndDevices);
+
+TYPED_TEST(AdaGradSolverTest, TestAdaGradLeastSquaresUpdate) {
+  this->TestLeastSquaresUpdate();
+}
+
+TYPED_TEST(AdaGradSolverTest, TestAdaGradLeastSquaresUpdateLROneTenth) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 0.1;
+  this->TestLeastSquaresUpdate(kLearningRate);
+}
+
+TYPED_TEST(AdaGradSolverTest, TestAdaGradLeastSquaresUpdateWithWeightDecay) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 1.0;
+  const Dtype kWeightDecay = 0.5;
+  this->TestLeastSquaresUpdate(kLearningRate, kWeightDecay);
+}
+
+TYPED_TEST(AdaGradSolverTest, TestAdaGradLeastSquaresUpdateWithEverything) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 0.01;
+  const Dtype kWeightDecay = 0.1;
+  const Dtype kMomentum = 0.0;
+  const int kNumIters = 4;
+  for (int i = 0; i <= kNumIters; ++i) {
+    this->TestLeastSquaresUpdate(kLearningRate, kWeightDecay, kMomentum, i);
+  }
+}
+
+TYPED_TEST(AdaGradSolverTest, TestLeastSquaresUpdateWithEverythingAccum) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 0.01;
+  const Dtype kWeightDecay = 0.1;
+  const Dtype kMomentum = 0.0;
+  const int kNumIters = 4;
+  const int kIterSize = 2;
+  this->CheckAccumulation(kLearningRate, kWeightDecay, kMomentum, kNumIters,
+      kIterSize);
+}
+
+template <typename TypeParam>
+class NesterovSolverTest : public GradientBasedSolverTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  virtual void InitSolver(const SolverParameter& param) {
+    this->solver_.reset(new NesterovSolver<Dtype>(param));
+  }
+  virtual SolverParameter_SolverType solver_type() {
+    return SolverParameter_SolverType_NESTEROV;
+  }
+};
+
+TYPED_TEST_CASE(NesterovSolverTest, TestDtypesAndDevices);
+
+TYPED_TEST(NesterovSolverTest, TestNesterovLeastSquaresUpdate) {
+  this->TestLeastSquaresUpdate();
+}
+
+TYPED_TEST(NesterovSolverTest, TestNesterovLeastSquaresUpdateLROneTenth) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 0.1;
+  this->TestLeastSquaresUpdate(kLearningRate);
+}
+
+TYPED_TEST(NesterovSolverTest, TestNesterovLeastSquaresUpdateWithWeightDecay) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 1.0;
+  const Dtype kWeightDecay = 0.5;
+  this->TestLeastSquaresUpdate(kLearningRate, kWeightDecay);
+}
+
+TYPED_TEST(NesterovSolverTest, TestNesterovLeastSquaresUpdateWithMomentum) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 1.0;
+  const Dtype kWeightDecay = 0.0;
+  const Dtype kMomentum = 0.5;
+  const int kNumIters = 1;
+  for (int i = 0; i <= kNumIters; ++i) {
+    this->TestLeastSquaresUpdate(kLearningRate, kWeightDecay, kMomentum, i);
+  }
+}
+
+TYPED_TEST(NesterovSolverTest, TestLeastSquaresUpdateWithMomentumMultiIter) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 1.0;
+  const Dtype kWeightDecay = 0.0;
+  const Dtype kMomentum = 0.5;
+  const int kNumIters = 4;
+  for (int i = 0; i <= kNumIters; ++i) {
+    this->TestLeastSquaresUpdate(kLearningRate, kWeightDecay, kMomentum, i);
+  }
+}
+
+TYPED_TEST(NesterovSolverTest, TestNesterovLeastSquaresUpdateWithEverything) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 0.01;
+  const Dtype kWeightDecay = 0.1;
+  const Dtype kMomentum = 0.9;
+  const int kNumIters = 4;
+  for (int i = 0; i <= kNumIters; ++i) {
+    this->TestLeastSquaresUpdate(kLearningRate, kWeightDecay, kMomentum, i);
+  }
+}
+
+TYPED_TEST(NesterovSolverTest, TestLeastSquaresUpdateWithEverythingAccum) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kLearningRate = 0.01;
+  const Dtype kWeightDecay = 0.1;
+  const Dtype kMomentum = 0.9;
+  const int kNumIters = 4;
+  const int kIterSize = 2;
+  this->CheckAccumulation(kLearningRate, kWeightDecay, kMomentum, kNumIters,
+      kIterSize);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_hdf5_output_layer.cpp b/src/caffe/test/test_hdf5_output_layer.cpp
new file mode 100644
index 0000000..a23034f
--- /dev/null
+++ b/src/caffe/test/test_hdf5_output_layer.cpp
@@ -0,0 +1,120 @@
+#include <string>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/io.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template<typename TypeParam>
+class HDF5OutputLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  HDF5OutputLayerTest()
+      : input_file_name_(
+        CMAKE_SOURCE_DIR "caffe/test/test_data/sample_data.h5"),
+        blob_data_(new Blob<Dtype>()),
+        blob_label_(new Blob<Dtype>()),
+        num_(5),
+        channels_(8),
+        height_(5),
+        width_(5) {
+    MakeTempFilename(&output_file_name_);
+  }
+
+  virtual ~HDF5OutputLayerTest() {
+    delete blob_data_;
+    delete blob_label_;
+  }
+
+  void CheckBlobEqual(const Blob<Dtype>& b1, const Blob<Dtype>& b2);
+
+  string output_file_name_;
+  string input_file_name_;
+  Blob<Dtype>* const blob_data_;
+  Blob<Dtype>* const blob_label_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+  int num_;
+  int channels_;
+  int height_;
+  int width_;
+};
+
+template<typename TypeParam>
+void HDF5OutputLayerTest<TypeParam>::CheckBlobEqual(const Blob<Dtype>& b1,
+                                                    const Blob<Dtype>& b2) {
+  EXPECT_EQ(b1.num(), b2.num());
+  EXPECT_EQ(b1.channels(), b2.channels());
+  EXPECT_EQ(b1.height(), b2.height());
+  EXPECT_EQ(b1.width(), b2.width());
+  for (int n = 0; n < b1.num(); ++n) {
+    for (int c = 0; c < b1.channels(); ++c) {
+      for (int h = 0; h < b1.height(); ++h) {
+        for (int w = 0; w < b1.width(); ++w) {
+          EXPECT_EQ(b1.data_at(n, c, h, w), b2.data_at(n, c, h, w));
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST_CASE(HDF5OutputLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(HDF5OutputLayerTest, TestForward) {
+  typedef typename TypeParam::Dtype Dtype;
+  LOG(INFO) << "Loading HDF5 file " << this->input_file_name_;
+  hid_t file_id = H5Fopen(this->input_file_name_.c_str(), H5F_ACC_RDONLY,
+                          H5P_DEFAULT);
+  ASSERT_GE(file_id, 0)<< "Failed to open HDF5 file" <<
+      this->input_file_name_;
+  hdf5_load_nd_dataset(file_id, HDF5_DATA_DATASET_NAME, 0, 4,
+                       this->blob_data_);
+  hdf5_load_nd_dataset(file_id, HDF5_DATA_LABEL_NAME, 0, 4,
+                       this->blob_label_);
+  herr_t status = H5Fclose(file_id);
+  EXPECT_GE(status, 0)<< "Failed to close HDF5 file " <<
+      this->input_file_name_;
+  this->blob_bottom_vec_.push_back(this->blob_data_);
+  this->blob_bottom_vec_.push_back(this->blob_label_);
+
+  LayerParameter param;
+  param.mutable_hdf5_output_param()->set_file_name(this->output_file_name_);
+  // This code block ensures that the layer is deconstructed and
+  //   the output hdf5 file is closed.
+  {
+    HDF5OutputLayer<Dtype> layer(param);
+    layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+    EXPECT_EQ(layer.file_name(), this->output_file_name_);
+    layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  }
+  file_id = H5Fopen(this->output_file_name_.c_str(), H5F_ACC_RDONLY,
+                          H5P_DEFAULT);
+  ASSERT_GE(
+    file_id, 0)<< "Failed to open HDF5 file" <<
+          this->input_file_name_;
+
+  Blob<Dtype>* blob_data = new Blob<Dtype>();
+  hdf5_load_nd_dataset(file_id, HDF5_DATA_DATASET_NAME, 0, 4,
+                       blob_data);
+  this->CheckBlobEqual(*(this->blob_data_), *blob_data);
+
+  Blob<Dtype>* blob_label = new Blob<Dtype>();
+  hdf5_load_nd_dataset(file_id, HDF5_DATA_LABEL_NAME, 0, 4,
+                       blob_label);
+  this->CheckBlobEqual(*(this->blob_label_), *blob_label);
+
+  status = H5Fclose(file_id);
+  EXPECT_GE(status, 0) << "Failed to close HDF5 file " <<
+      this->output_file_name_;
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_hdf5data_layer.cpp b/src/caffe/test/test_hdf5data_layer.cpp
new file mode 100644
index 0000000..c9b027f
--- /dev/null
+++ b/src/caffe/test/test_hdf5data_layer.cpp
@@ -0,0 +1,135 @@
+#include <string>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class HDF5DataLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  HDF5DataLayerTest()
+      : filename(NULL),
+        blob_top_data_(new Blob<Dtype>()),
+        blob_top_label_(new Blob<Dtype>()),
+        blob_top_label2_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    blob_top_vec_.push_back(blob_top_data_);
+    blob_top_vec_.push_back(blob_top_label_);
+    blob_top_vec_.push_back(blob_top_label2_);
+
+    // Check out generate_sample_data.py in the same directory.
+    filename = new string(
+    CMAKE_SOURCE_DIR "caffe/test/test_data/sample_data_list.txt" CMAKE_EXT);
+    LOG(INFO)<< "Using sample HDF5 data file " << filename;
+  }
+
+  virtual ~HDF5DataLayerTest() {
+    delete blob_top_data_;
+    delete blob_top_label_;
+    delete blob_top_label2_;
+    delete filename;
+  }
+
+  string* filename;
+  Blob<Dtype>* const blob_top_data_;
+  Blob<Dtype>* const blob_top_label_;
+  Blob<Dtype>* const blob_top_label2_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(HDF5DataLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(HDF5DataLayerTest, TestRead) {
+  typedef typename TypeParam::Dtype Dtype;
+  // Create LayerParameter with the known parameters.
+  // The data file we are reading has 10 rows and 8 columns,
+  // with values from 0 to 10*8 reshaped in row-major order.
+  LayerParameter param;
+  param.add_top("data");
+  param.add_top("label");
+  param.add_top("label2");
+
+  HDF5DataParameter* hdf5_data_param = param.mutable_hdf5_data_param();
+  int batch_size = 5;
+  hdf5_data_param->set_batch_size(batch_size);
+  hdf5_data_param->set_source(*(this->filename));
+  int num_cols = 8;
+  int height = 6;
+  int width = 5;
+
+  // Test that the layer setup got the correct parameters.
+  HDF5DataLayer<Dtype> layer(param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_data_->num(), batch_size);
+  EXPECT_EQ(this->blob_top_data_->channels(), num_cols);
+  EXPECT_EQ(this->blob_top_data_->height(), height);
+  EXPECT_EQ(this->blob_top_data_->width(), width);
+
+  EXPECT_EQ(this->blob_top_label_->num_axes(), 2);
+  EXPECT_EQ(this->blob_top_label_->shape(0), batch_size);
+  EXPECT_EQ(this->blob_top_label_->shape(1), 1);
+
+  EXPECT_EQ(this->blob_top_label2_->num_axes(), 2);
+  EXPECT_EQ(this->blob_top_label2_->shape(0), batch_size);
+  EXPECT_EQ(this->blob_top_label2_->shape(1), 1);
+
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  // Go through the data 10 times (5 batches).
+  const int data_size = num_cols * height * width;
+  for (int iter = 0; iter < 10; ++iter) {
+    layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+
+    // On even iterations, we're reading the first half of the data.
+    // On odd iterations, we're reading the second half of the data.
+    // NB: label is 1-indexed
+    int label_offset = 1 + ((iter % 2 == 0) ? 0 : batch_size);
+    int label2_offset = 1 + label_offset;
+    int data_offset = (iter % 2 == 0) ? 0 : batch_size * data_size;
+
+    // Every two iterations we are reading the second file,
+    // which has the same labels, but data is offset by total data size,
+    // which is 2400 (see generate_sample_data).
+    int file_offset = (iter % 4 < 2) ? 0 : 2400;
+
+    for (int i = 0; i < batch_size; ++i) {
+      EXPECT_EQ(
+        label_offset + i,
+        this->blob_top_label_->cpu_data()[i]);
+      EXPECT_EQ(
+        label2_offset + i,
+        this->blob_top_label2_->cpu_data()[i]);
+    }
+    for (int i = 0; i < batch_size; ++i) {
+      for (int j = 0; j < num_cols; ++j) {
+        for (int h = 0; h < height; ++h) {
+          for (int w = 0; w < width; ++w) {
+            int idx = (
+              i * num_cols * height * width +
+              j * height * width +
+              h * width + w);
+            EXPECT_EQ(
+              file_offset + data_offset + idx,
+              this->blob_top_data_->cpu_data()[idx])
+              << "debug: i " << i << " j " << j
+              << " iter " << iter;
+          }
+        }
+      }
+    }
+  }
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_hinge_loss_layer.cpp b/src/caffe/test/test_hinge_loss_layer.cpp
new file mode 100644
index 0000000..b6a9902
--- /dev/null
+++ b/src/caffe/test/test_hinge_loss_layer.cpp
@@ -0,0 +1,76 @@
+#include <cmath>
+#include <cstdlib>
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class HingeLossLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  HingeLossLayerTest()
+      : blob_bottom_data_(new Blob<Dtype>(10, 5, 1, 1)),
+        blob_bottom_label_(new Blob<Dtype>(10, 1, 1, 1)),
+        blob_top_loss_(new Blob<Dtype>()) {
+    // fill the values
+    Caffe::set_random_seed(1701);
+    FillerParameter filler_param;
+    filler_param.set_std(10);
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_data_);
+    blob_bottom_vec_.push_back(blob_bottom_data_);
+    for (int i = 0; i < blob_bottom_label_->count(); ++i) {
+      blob_bottom_label_->mutable_cpu_data()[i] = caffe_rng_rand() % 5;
+    }
+    blob_bottom_vec_.push_back(blob_bottom_label_);
+    blob_top_vec_.push_back(blob_top_loss_);
+  }
+  virtual ~HingeLossLayerTest() {
+    delete blob_bottom_data_;
+    delete blob_bottom_label_;
+    delete blob_top_loss_;
+  }
+  Blob<Dtype>* const blob_bottom_data_;
+  Blob<Dtype>* const blob_bottom_label_;
+  Blob<Dtype>* const blob_top_loss_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(HingeLossLayerTest, TestDtypesAndDevices);
+
+
+TYPED_TEST(HingeLossLayerTest, TestGradientL1) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  HingeLossLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 2e-3, 1701, 1, 0.01);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 0);
+}
+
+TYPED_TEST(HingeLossLayerTest, TestGradientL2) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  // Set norm to L2
+  HingeLossParameter* hinge_loss_param = layer_param.mutable_hinge_loss_param();
+  hinge_loss_param->set_norm(HingeLossParameter_Norm_L2);
+  HingeLossLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2, 1701);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 0);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_im2col_kernel.cu b/src/caffe/test/test_im2col_kernel.cu
new file mode 100644
index 0000000..0017ac2
--- /dev/null
+++ b/src/caffe/test/test_im2col_kernel.cu
@@ -0,0 +1,125 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/util/im2col.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+// Forward declare kernel functions
+template <typename Dtype>
+__global__ void im2col_gpu_kernel(const int n, const Dtype* data_im,
+    const int height, const int width, const int kernel_h, const int kernel_w,
+    const int pad_h, const int pad_w,
+    const int stride_h, const int stride_w,
+    const int height_col, const int width_col,
+    Dtype* data_col);
+
+extern cudaDeviceProp CAFFE_TEST_CUDA_PROP;
+
+template <typename Dtype>
+class Im2colKernelTest : public GPUDeviceTest<Dtype> {
+ protected:
+  Im2colKernelTest()
+        // big so launches > 1024 threads
+      : blob_bottom_(new Blob<Dtype>(5, 500, 10, 10)),
+        blob_top_(new Blob<Dtype>()),
+        blob_top_cpu_(new Blob<Dtype>()) {
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+
+    height_ = blob_bottom_->height();
+    width_ = blob_bottom_->width();
+    channels_ = blob_bottom_->channels();
+    pad_ = 0;
+    stride_ = 2;
+    kernel_size_ = 3;
+    height_col_ = (height_ + 2 * pad_ - kernel_size_) / stride_ + 1;
+    width_col_ = (width_ + 2 * pad_ - kernel_size_) / stride_ + 1;
+  }
+
+  virtual ~Im2colKernelTest() {
+      delete blob_bottom_;
+      delete blob_top_;
+      delete blob_top_cpu_;
+  }
+
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  Blob<Dtype>* const blob_top_cpu_;
+  int height_;
+  int width_;
+  int channels_;
+  int pad_;
+  int stride_;
+  int kernel_size_;
+  int height_col_;
+  int width_col_;
+};
+
+TYPED_TEST_CASE(Im2colKernelTest, TestDtypes);
+
+TYPED_TEST(Im2colKernelTest, TestGPU) {
+  // Reshape the blobs to correct size for im2col output
+  this->blob_top_->Reshape(this->blob_bottom_->num(),
+          this->channels_ * this->kernel_size_ * this->kernel_size_,
+          this->height_col_,
+          this->width_col_);
+
+  this->blob_top_cpu_->Reshape(this->blob_bottom_->num(),
+          this->channels_ * this->kernel_size_ * this->kernel_size_,
+          this->height_col_,
+          this->width_col_);
+
+  const TypeParam* bottom_data = this->blob_bottom_->gpu_data();
+  TypeParam* top_data = this->blob_top_->mutable_gpu_data();
+  TypeParam* cpu_data = this->blob_top_cpu_->mutable_cpu_data();
+
+  // CPU Version
+  for (int n = 0; n < this->blob_bottom_->num(); ++n) {
+    im2col_cpu(this->blob_bottom_->cpu_data() + this->blob_bottom_->offset(n),
+      this->channels_, this->height_, this->width_,
+      this->kernel_size_, this->kernel_size_, this->pad_, this->pad_,
+      this->stride_, this->stride_,
+      cpu_data + this->blob_top_cpu_->offset(n));
+  }
+
+  // GPU version
+  int num_kernels = this->channels_ * this->height_col_ * this->width_col_;
+  int default_grid_dim = CAFFE_GET_BLOCKS(num_kernels);
+
+  // Launch with different grid sizes
+  for (int grid_div = 2; grid_div <= 8; grid_div++) {
+    for (int n = 0; n < this->blob_bottom_->num(); ++n) {
+      int grid_dim = default_grid_dim/grid_div;
+      // NOLINT_NEXT_LINE(whitespace/operators)
+      im2col_gpu_kernel<TypeParam><<<grid_dim, CAFFE_CUDA_NUM_THREADS>>>(
+        num_kernels, bottom_data + this->blob_bottom_->offset(n),
+        this->height_, this->width_, this->kernel_size_, this->kernel_size_,
+        this->pad_, this->pad_, this->stride_, this->stride_,
+        this->height_col_, this->width_col_,
+        top_data + this->blob_top_->offset(n));
+      CUDA_POST_KERNEL_CHECK;
+    }
+
+    // Compare results against CPU version
+    for (int i = 0; i < this->blob_top_->count(); ++i) {
+      TypeParam cpuval = cpu_data[i];
+      TypeParam gpuval = this->blob_top_->cpu_data()[i];
+      EXPECT_EQ(cpuval, gpuval);
+      if (cpuval != gpuval) {
+        break;
+      }
+    }
+  }
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_im2col_layer.cpp b/src/caffe/test/test_im2col_layer.cpp
new file mode 100644
index 0000000..f50abe1
--- /dev/null
+++ b/src/caffe/test/test_im2col_layer.cpp
@@ -0,0 +1,118 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class Im2colLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+ protected:
+  Im2colLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 6, 5)),
+        blob_top_(new Blob<Dtype>()) {
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~Im2colLayerTest() { delete blob_bottom_; delete blob_top_; }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(Im2colLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(Im2colLayerTest, TestSetup) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  Im2colLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 27);
+  EXPECT_EQ(this->blob_top_->height(), 2);
+  EXPECT_EQ(this->blob_top_->width(), 2);
+}
+
+TYPED_TEST(Im2colLayerTest, TestForward) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  Im2colLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // We are lazy and will only check the top left block
+  for (int c = 0; c < 27; ++c) {
+    EXPECT_EQ(this->blob_bottom_->data_at(0, (c / 9), (c / 3) % 3, c % 3),
+        this->blob_top_->data_at(0, c, 0, 0));
+  }
+}
+
+TYPED_TEST(Im2colLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_size(3);
+  convolution_param->set_stride(2);
+  Im2colLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+
+TYPED_TEST(Im2colLayerTest, TestRect) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_h(5);
+  convolution_param->set_kernel_w(3);
+  convolution_param->set_stride(2);
+  Im2colLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // We are lazy and will only check the top left block
+  for (int c = 0; c < 45; ++c) {
+    EXPECT_EQ(this->blob_top_->data_at(0, c, 0, 0),
+        this->blob_bottom_->data_at(0, (c / 15), (c / 3) % 5, c % 3));
+  }
+}
+
+
+TYPED_TEST(Im2colLayerTest, TestRectGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ConvolutionParameter* convolution_param =
+      layer_param.mutable_convolution_param();
+  convolution_param->set_kernel_h(5);
+  convolution_param->set_kernel_w(3);
+  convolution_param->set_stride(2);
+  Im2colLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_image_data_layer.cpp b/src/caffe/test/test_image_data_layer.cpp
new file mode 100644
index 0000000..931a5eb
--- /dev/null
+++ b/src/caffe/test/test_image_data_layer.cpp
@@ -0,0 +1,179 @@
+#include <map>
+#include <string>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/io.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class ImageDataLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  ImageDataLayerTest()
+      : seed_(1701),
+        blob_top_data_(new Blob<Dtype>()),
+        blob_top_label_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    blob_top_vec_.push_back(blob_top_data_);
+    blob_top_vec_.push_back(blob_top_label_);
+    Caffe::set_random_seed(seed_);
+    // Create test input file.
+    MakeTempFilename(&filename_);
+    std::ofstream outfile(filename_.c_str(), std::ofstream::out);
+    LOG(INFO) << "Using temporary file " << filename_;
+    for (int i = 0; i < 5; ++i) {
+      outfile << EXAMPLES_SOURCE_DIR "images/cat.jpg " << i;
+    }
+    outfile.close();
+    // Create test input file for images of distinct sizes.
+    MakeTempFilename(&filename_reshape_);
+    std::ofstream reshapefile(filename_reshape_.c_str(), std::ofstream::out);
+    LOG(INFO) << "Using temporary file " << filename_reshape_;
+    reshapefile << EXAMPLES_SOURCE_DIR "images/cat.jpg " << 0;
+    reshapefile << EXAMPLES_SOURCE_DIR "images/fish-bike.jpg " << 1;
+    reshapefile.close();
+  }
+
+  virtual ~ImageDataLayerTest() {
+    delete blob_top_data_;
+    delete blob_top_label_;
+  }
+
+  int seed_;
+  string filename_;
+  string filename_reshape_;
+  Blob<Dtype>* const blob_top_data_;
+  Blob<Dtype>* const blob_top_label_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(ImageDataLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(ImageDataLayerTest, TestRead) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter param;
+  ImageDataParameter* image_data_param = param.mutable_image_data_param();
+  image_data_param->set_batch_size(5);
+  image_data_param->set_source(this->filename_.c_str());
+  image_data_param->set_shuffle(false);
+  ImageDataLayer<Dtype> layer(param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_data_->num(), 5);
+  EXPECT_EQ(this->blob_top_data_->channels(), 3);
+  EXPECT_EQ(this->blob_top_data_->height(), 360);
+  EXPECT_EQ(this->blob_top_data_->width(), 480);
+  EXPECT_EQ(this->blob_top_label_->num(), 5);
+  EXPECT_EQ(this->blob_top_label_->channels(), 1);
+  EXPECT_EQ(this->blob_top_label_->height(), 1);
+  EXPECT_EQ(this->blob_top_label_->width(), 1);
+  // Go through the data twice
+  for (int iter = 0; iter < 2; ++iter) {
+    layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    for (int i = 0; i < 5; ++i) {
+      EXPECT_EQ(i, this->blob_top_label_->cpu_data()[i]);
+    }
+  }
+}
+
+TYPED_TEST(ImageDataLayerTest, TestResize) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter param;
+  ImageDataParameter* image_data_param = param.mutable_image_data_param();
+  image_data_param->set_batch_size(5);
+  image_data_param->set_source(this->filename_.c_str());
+  image_data_param->set_new_height(256);
+  image_data_param->set_new_width(256);
+  image_data_param->set_shuffle(false);
+  ImageDataLayer<Dtype> layer(param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_data_->num(), 5);
+  EXPECT_EQ(this->blob_top_data_->channels(), 3);
+  EXPECT_EQ(this->blob_top_data_->height(), 256);
+  EXPECT_EQ(this->blob_top_data_->width(), 256);
+  EXPECT_EQ(this->blob_top_label_->num(), 5);
+  EXPECT_EQ(this->blob_top_label_->channels(), 1);
+  EXPECT_EQ(this->blob_top_label_->height(), 1);
+  EXPECT_EQ(this->blob_top_label_->width(), 1);
+  // Go through the data twice
+  for (int iter = 0; iter < 2; ++iter) {
+    layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    for (int i = 0; i < 5; ++i) {
+      EXPECT_EQ(i, this->blob_top_label_->cpu_data()[i]);
+    }
+  }
+}
+
+TYPED_TEST(ImageDataLayerTest, TestReshape) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter param;
+  ImageDataParameter* image_data_param = param.mutable_image_data_param();
+  image_data_param->set_batch_size(1);
+  image_data_param->set_source(this->filename_reshape_.c_str());
+  image_data_param->set_shuffle(false);
+  ImageDataLayer<Dtype> layer(param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_label_->num(), 1);
+  EXPECT_EQ(this->blob_top_label_->channels(), 1);
+  EXPECT_EQ(this->blob_top_label_->height(), 1);
+  EXPECT_EQ(this->blob_top_label_->width(), 1);
+  // cat.jpg
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_data_->num(), 1);
+  EXPECT_EQ(this->blob_top_data_->channels(), 3);
+  EXPECT_EQ(this->blob_top_data_->height(), 360);
+  EXPECT_EQ(this->blob_top_data_->width(), 480);
+  // fish-bike.jpg
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_data_->num(), 1);
+  EXPECT_EQ(this->blob_top_data_->channels(), 3);
+  EXPECT_EQ(this->blob_top_data_->height(), 323);
+  EXPECT_EQ(this->blob_top_data_->width(), 481);
+}
+
+TYPED_TEST(ImageDataLayerTest, TestShuffle) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter param;
+  ImageDataParameter* image_data_param = param.mutable_image_data_param();
+  image_data_param->set_batch_size(5);
+  image_data_param->set_source(this->filename_.c_str());
+  image_data_param->set_shuffle(true);
+  ImageDataLayer<Dtype> layer(param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_data_->num(), 5);
+  EXPECT_EQ(this->blob_top_data_->channels(), 3);
+  EXPECT_EQ(this->blob_top_data_->height(), 360);
+  EXPECT_EQ(this->blob_top_data_->width(), 480);
+  EXPECT_EQ(this->blob_top_label_->num(), 5);
+  EXPECT_EQ(this->blob_top_label_->channels(), 1);
+  EXPECT_EQ(this->blob_top_label_->height(), 1);
+  EXPECT_EQ(this->blob_top_label_->width(), 1);
+  // Go through the data twice
+  for (int iter = 0; iter < 2; ++iter) {
+    layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    map<Dtype, int> values_to_indices;
+    int num_in_order = 0;
+    for (int i = 0; i < 5; ++i) {
+      Dtype value = this->blob_top_label_->cpu_data()[i];
+      // Check that the value has not been seen already (no duplicates).
+      EXPECT_EQ(values_to_indices.find(value), values_to_indices.end());
+      values_to_indices[value] = i;
+      num_in_order += (value == Dtype(i));
+    }
+    EXPECT_EQ(5, values_to_indices.size());
+    EXPECT_GT(5, num_in_order);
+  }
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_infogain_loss_layer.cpp b/src/caffe/test/test_infogain_loss_layer.cpp
new file mode 100644
index 0000000..7ec2f80
--- /dev/null
+++ b/src/caffe/test/test_infogain_loss_layer.cpp
@@ -0,0 +1,70 @@
+#include <cmath>
+#include <cstdlib>
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/loss_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class InfogainLossLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  InfogainLossLayerTest()
+      : blob_bottom_data_(new Blob<Dtype>(10, 5, 1, 1)),
+        blob_bottom_label_(new Blob<Dtype>(10, 1, 1, 1)),
+        blob_bottom_infogain_(new Blob<Dtype>(1, 1, 5, 5)),
+        blob_top_loss_(new Blob<Dtype>()) {
+    Caffe::set_random_seed(1701);
+    FillerParameter filler_param;
+    PositiveUnitballFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_data_);
+    blob_bottom_vec_.push_back(blob_bottom_data_);
+    for (int i = 0; i < blob_bottom_label_->count(); ++i) {
+      blob_bottom_label_->mutable_cpu_data()[i] = caffe_rng_rand() % 5;
+    }
+    blob_bottom_vec_.push_back(blob_bottom_label_);
+    filler_param.set_min(0.1);
+    filler_param.set_max(2.0);
+    UniformFiller<Dtype> infogain_filler(filler_param);
+    infogain_filler.Fill(this->blob_bottom_infogain_);
+    blob_bottom_vec_.push_back(blob_bottom_infogain_);
+    blob_top_vec_.push_back(blob_top_loss_);
+  }
+  virtual ~InfogainLossLayerTest() {
+    delete blob_bottom_data_;
+    delete blob_bottom_label_;
+    delete blob_bottom_infogain_;
+    delete blob_top_loss_;
+  }
+  Blob<Dtype>* const blob_bottom_data_;
+  Blob<Dtype>* const blob_bottom_label_;
+  Blob<Dtype>* const blob_bottom_infogain_;
+  Blob<Dtype>* const blob_top_loss_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(InfogainLossLayerTest, TestDtypesAndDevices);
+
+
+TYPED_TEST(InfogainLossLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  InfogainLossLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-4, 2e-2, 1701, 1, 0.01);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 0);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_inner_product_layer.cpp b/src/caffe/test/test_inner_product_layer.cpp
new file mode 100644
index 0000000..c03df17
--- /dev/null
+++ b/src/caffe/test/test_inner_product_layer.cpp
@@ -0,0 +1,113 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+#ifndef CPU_ONLY
+extern cudaDeviceProp CAFFE_TEST_CUDA_PROP;
+#endif
+
+template <typename TypeParam>
+class InnerProductLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+ protected:
+  InnerProductLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 4, 5)),
+        blob_top_(new Blob<Dtype>()) {
+    // fill the values
+    FillerParameter filler_param;
+    UniformFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~InnerProductLayerTest() { delete blob_bottom_; delete blob_top_; }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(InnerProductLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(InnerProductLayerTest, TestSetUp) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  InnerProductParameter* inner_product_param =
+      layer_param.mutable_inner_product_param();
+  inner_product_param->set_num_output(10);
+  shared_ptr<InnerProductLayer<Dtype> > layer(
+      new InnerProductLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->height(), 1);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+  EXPECT_EQ(this->blob_top_->channels(), 10);
+}
+
+TYPED_TEST(InnerProductLayerTest, TestForward) {
+  typedef typename TypeParam::Dtype Dtype;
+  bool IS_VALID_CUDA = false;
+#ifndef CPU_ONLY
+  IS_VALID_CUDA = CAFFE_TEST_CUDA_PROP.major >= 2;
+#endif
+  if (Caffe::mode() == Caffe::CPU ||
+      sizeof(Dtype) == 4 || IS_VALID_CUDA) {
+    LayerParameter layer_param;
+    InnerProductParameter* inner_product_param =
+        layer_param.mutable_inner_product_param();
+    inner_product_param->set_num_output(10);
+    inner_product_param->mutable_weight_filler()->set_type("uniform");
+    inner_product_param->mutable_bias_filler()->set_type("uniform");
+    inner_product_param->mutable_bias_filler()->set_min(1);
+    inner_product_param->mutable_bias_filler()->set_max(2);
+    shared_ptr<InnerProductLayer<Dtype> > layer(
+        new InnerProductLayer<Dtype>(layer_param));
+    layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+    layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    const Dtype* data = this->blob_top_->cpu_data();
+    const int count = this->blob_top_->count();
+    for (int i = 0; i < count; ++i) {
+      EXPECT_GE(data[i], 1.);
+    }
+  } else {
+    LOG(ERROR) << "Skipping test due to old architecture.";
+  }
+}
+
+TYPED_TEST(InnerProductLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  bool IS_VALID_CUDA = false;
+#ifndef CPU_ONLY
+  IS_VALID_CUDA = CAFFE_TEST_CUDA_PROP.major >= 2;
+#endif
+  if (Caffe::mode() == Caffe::CPU ||
+      sizeof(Dtype) == 4 || IS_VALID_CUDA) {
+    LayerParameter layer_param;
+    InnerProductParameter* inner_product_param =
+        layer_param.mutable_inner_product_param();
+    inner_product_param->set_num_output(10);
+    inner_product_param->mutable_weight_filler()->set_type("gaussian");
+    inner_product_param->mutable_bias_filler()->set_type("gaussian");
+    inner_product_param->mutable_bias_filler()->set_min(1);
+    inner_product_param->mutable_bias_filler()->set_max(2);
+    InnerProductLayer<Dtype> layer(layer_param);
+    GradientChecker<Dtype> checker(1e-2, 1e-3);
+    checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+        this->blob_top_vec_);
+  } else {
+    LOG(ERROR) << "Skipping test due to old architecture.";
+  }
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_internal_thread.cpp b/src/caffe/test/test_internal_thread.cpp
new file mode 100644
index 0000000..31882b6
--- /dev/null
+++ b/src/caffe/test/test_internal_thread.cpp
@@ -0,0 +1,23 @@
+#include "glog/logging.h"
+#include "gtest/gtest.h"
+
+#include "caffe/internal_thread.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+
+class InternalThreadTest : public ::testing::Test {};
+
+TEST_F(InternalThreadTest, TestStartAndExit) {
+  InternalThread thread;
+  EXPECT_FALSE(thread.is_started());
+  EXPECT_TRUE(thread.StartInternalThread());
+  EXPECT_TRUE(thread.is_started());
+  EXPECT_TRUE(thread.WaitForInternalThreadToExit());
+  EXPECT_FALSE(thread.is_started());
+}
+
+}  // namespace caffe
+
diff --git a/src/caffe/test/test_io.cpp b/src/caffe/test/test_io.cpp
new file mode 100644
index 0000000..4ab9631
--- /dev/null
+++ b/src/caffe/test/test_io.cpp
@@ -0,0 +1,422 @@
+#include <opencv2/core/core.hpp>
+#include <opencv2/highgui/highgui.hpp>
+#include <opencv2/highgui/highgui_c.h>
+#include <opencv2/imgproc/imgproc.hpp>
+
+#include <string>
+
+#include "gtest/gtest.h"
+
+#include "caffe/common.hpp"
+#include "caffe/util/io.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+class IOTest : public ::testing::Test {};
+
+bool ReadImageToDatumReference(const string& filename, const int label,
+    const int height, const int width, const bool is_color, Datum* datum) {
+  cv::Mat cv_img;
+  int cv_read_flag = (is_color ? CV_LOAD_IMAGE_COLOR :
+    CV_LOAD_IMAGE_GRAYSCALE);
+
+  cv::Mat cv_img_origin = cv::imread(filename, cv_read_flag);
+  if (!cv_img_origin.data) {
+    LOG(ERROR) << "Could not open or find file " << filename;
+    return false;
+  }
+  if (height > 0 && width > 0) {
+    cv::resize(cv_img_origin, cv_img, cv::Size(width, height));
+  } else {
+    cv_img = cv_img_origin;
+  }
+
+  int num_channels = (is_color ? 3 : 1);
+  datum->set_channels(num_channels);
+  datum->set_height(cv_img.rows);
+  datum->set_width(cv_img.cols);
+  datum->set_label(label);
+  datum->clear_data();
+  datum->clear_float_data();
+  string* datum_string = datum->mutable_data();
+  if (is_color) {
+    for (int c = 0; c < num_channels; ++c) {
+      for (int h = 0; h < cv_img.rows; ++h) {
+        for (int w = 0; w < cv_img.cols; ++w) {
+          datum_string->push_back(
+            static_cast<char>(cv_img.at<cv::Vec3b>(h, w)[c]));
+        }
+      }
+    }
+  } else {  // Faster than repeatedly testing is_color for each pixel w/i loop
+    for (int h = 0; h < cv_img.rows; ++h) {
+      for (int w = 0; w < cv_img.cols; ++w) {
+        datum_string->push_back(
+          static_cast<char>(cv_img.at<uchar>(h, w)));
+        }
+      }
+  }
+  return true;
+}
+
+TEST_F(IOTest, TestReadImageToDatum) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  ReadImageToDatum(filename, 0, &datum);
+  EXPECT_EQ(datum.channels(), 3);
+  EXPECT_EQ(datum.height(), 360);
+  EXPECT_EQ(datum.width(), 480);
+}
+
+TEST_F(IOTest, TestReadImageToDatumReference) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum, datum_ref;
+  ReadImageToDatum(filename, 0, 0, 0, true, &datum);
+  ReadImageToDatumReference(filename, 0, 0, 0, true, &datum_ref);
+  EXPECT_EQ(datum.channels(), datum_ref.channels());
+  EXPECT_EQ(datum.height(), datum_ref.height());
+  EXPECT_EQ(datum.width(), datum_ref.width());
+  EXPECT_EQ(datum.data().size(), datum_ref.data().size());
+
+  const string& data = datum.data();
+  const string& data_ref = datum.data();
+
+  for (int i = 0; i < datum.data().size(); ++i) {
+    EXPECT_TRUE(data[i] == data_ref[i]);
+  }
+}
+
+
+TEST_F(IOTest, TestReadImageToDatumReferenceResized) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum, datum_ref;
+  ReadImageToDatum(filename, 0, 100, 200, true, &datum);
+  ReadImageToDatumReference(filename, 0, 100, 200, true, &datum_ref);
+  EXPECT_EQ(datum.channels(), datum_ref.channels());
+  EXPECT_EQ(datum.height(), datum_ref.height());
+  EXPECT_EQ(datum.width(), datum_ref.width());
+  EXPECT_EQ(datum.data().size(), datum_ref.data().size());
+
+  const string& data = datum.data();
+  const string& data_ref = datum.data();
+
+  for (int i = 0; i < datum.data().size(); ++i) {
+    EXPECT_TRUE(data[i] == data_ref[i]);
+  }
+}
+
+TEST_F(IOTest, TestReadImageToDatumContent) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  ReadImageToDatum(filename, 0, &datum);
+  cv::Mat cv_img = ReadImageToCVMat(filename);
+  EXPECT_EQ(datum.channels(), cv_img.channels());
+  EXPECT_EQ(datum.height(), cv_img.rows);
+  EXPECT_EQ(datum.width(), cv_img.cols);
+
+  const string& data = datum.data();
+  int index = 0;
+  for (int c = 0; c < datum.channels(); ++c) {
+    for (int h = 0; h < datum.height(); ++h) {
+      for (int w = 0; w < datum.width(); ++w) {
+        EXPECT_TRUE(data[index++] ==
+          static_cast<char>(cv_img.at<cv::Vec3b>(h, w)[c]));
+      }
+    }
+  }
+}
+
+TEST_F(IOTest, TestReadImageToDatumContentGray) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  const bool is_color = false;
+  ReadImageToDatum(filename, 0, is_color, &datum);
+  cv::Mat cv_img = ReadImageToCVMat(filename, is_color);
+  EXPECT_EQ(datum.channels(), cv_img.channels());
+  EXPECT_EQ(datum.height(), cv_img.rows);
+  EXPECT_EQ(datum.width(), cv_img.cols);
+
+  const string& data = datum.data();
+  int index = 0;
+  for (int h = 0; h < datum.height(); ++h) {
+    for (int w = 0; w < datum.width(); ++w) {
+      EXPECT_TRUE(data[index++] == static_cast<char>(cv_img.at<uchar>(h, w)));
+    }
+  }
+}
+
+TEST_F(IOTest, TestReadImageToDatumResized) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  ReadImageToDatum(filename, 0, 100, 200, &datum);
+  EXPECT_EQ(datum.channels(), 3);
+  EXPECT_EQ(datum.height(), 100);
+  EXPECT_EQ(datum.width(), 200);
+}
+
+
+TEST_F(IOTest, TestReadImageToDatumResizedSquare) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  ReadImageToDatum(filename, 0, 256, 256, &datum);
+  EXPECT_EQ(datum.channels(), 3);
+  EXPECT_EQ(datum.height(), 256);
+  EXPECT_EQ(datum.width(), 256);
+}
+
+TEST_F(IOTest, TestReadImageToDatumGray) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  const bool is_color = false;
+  ReadImageToDatum(filename, 0, is_color, &datum);
+  EXPECT_EQ(datum.channels(), 1);
+  EXPECT_EQ(datum.height(), 360);
+  EXPECT_EQ(datum.width(), 480);
+}
+
+TEST_F(IOTest, TestReadImageToDatumResizedGray) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  const bool is_color = false;
+  ReadImageToDatum(filename, 0, 256, 256, is_color, &datum);
+  EXPECT_EQ(datum.channels(), 1);
+  EXPECT_EQ(datum.height(), 256);
+  EXPECT_EQ(datum.width(), 256);
+}
+
+TEST_F(IOTest, TestReadImageToCVMat) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  cv::Mat cv_img = ReadImageToCVMat(filename);
+  EXPECT_EQ(cv_img.channels(), 3);
+  EXPECT_EQ(cv_img.rows, 360);
+  EXPECT_EQ(cv_img.cols, 480);
+}
+
+TEST_F(IOTest, TestReadImageToCVMatResized) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  cv::Mat cv_img = ReadImageToCVMat(filename, 100, 200);
+  EXPECT_EQ(cv_img.channels(), 3);
+  EXPECT_EQ(cv_img.rows, 100);
+  EXPECT_EQ(cv_img.cols, 200);
+}
+
+TEST_F(IOTest, TestReadImageToCVMatResizedSquare) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  cv::Mat cv_img = ReadImageToCVMat(filename, 256, 256);
+  EXPECT_EQ(cv_img.channels(), 3);
+  EXPECT_EQ(cv_img.rows, 256);
+  EXPECT_EQ(cv_img.cols, 256);
+}
+
+TEST_F(IOTest, TestReadImageToCVMatGray) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  const bool is_color = false;
+  cv::Mat cv_img = ReadImageToCVMat(filename, is_color);
+  EXPECT_EQ(cv_img.channels(), 1);
+  EXPECT_EQ(cv_img.rows, 360);
+  EXPECT_EQ(cv_img.cols, 480);
+}
+
+TEST_F(IOTest, TestReadImageToCVMatResizedGray) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  const bool is_color = false;
+  cv::Mat cv_img = ReadImageToCVMat(filename, 256, 256, is_color);
+  EXPECT_EQ(cv_img.channels(), 1);
+  EXPECT_EQ(cv_img.rows, 256);
+  EXPECT_EQ(cv_img.cols, 256);
+}
+
+TEST_F(IOTest, TestCVMatToDatum) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  cv::Mat cv_img = ReadImageToCVMat(filename);
+  Datum datum;
+  CVMatToDatum(cv_img, &datum);
+  EXPECT_EQ(datum.channels(), 3);
+  EXPECT_EQ(datum.height(), 360);
+  EXPECT_EQ(datum.width(), 480);
+}
+
+TEST_F(IOTest, TestCVMatToDatumContent) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  cv::Mat cv_img = ReadImageToCVMat(filename);
+  Datum datum;
+  CVMatToDatum(cv_img, &datum);
+  Datum datum_ref;
+  ReadImageToDatum(filename, 0, &datum_ref);
+  EXPECT_EQ(datum.channels(), datum_ref.channels());
+  EXPECT_EQ(datum.height(), datum_ref.height());
+  EXPECT_EQ(datum.width(), datum_ref.width());
+  EXPECT_EQ(datum.data().size(), datum_ref.data().size());
+
+  const string& data = datum.data();
+  const string& data_ref = datum_ref.data();
+  for (int i = 0; i < datum.data().size(); ++i) {
+    EXPECT_TRUE(data[i] == data_ref[i]);
+  }
+}
+
+TEST_F(IOTest, TestCVMatToDatumReference) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  cv::Mat cv_img = ReadImageToCVMat(filename);
+  Datum datum;
+  CVMatToDatum(cv_img, &datum);
+  Datum datum_ref;
+  ReadImageToDatumReference(filename, 0, 0, 0, true, &datum_ref);
+  EXPECT_EQ(datum.channels(), datum_ref.channels());
+  EXPECT_EQ(datum.height(), datum_ref.height());
+  EXPECT_EQ(datum.width(), datum_ref.width());
+  EXPECT_EQ(datum.data().size(), datum_ref.data().size());
+
+  const string& data = datum.data();
+  const string& data_ref = datum_ref.data();
+  for (int i = 0; i < datum.data().size(); ++i) {
+    EXPECT_TRUE(data[i] == data_ref[i]);
+  }
+}
+
+TEST_F(IOTest, TestReadFileToDatum) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  EXPECT_TRUE(ReadFileToDatum(filename, &datum));
+  EXPECT_TRUE(datum.encoded());
+  EXPECT_EQ(datum.label(), -1);
+  EXPECT_EQ(datum.data().size(), 140391);
+}
+
+TEST_F(IOTest, TestDecodeDatum) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  EXPECT_TRUE(ReadFileToDatum(filename, &datum));
+  EXPECT_TRUE(DecodeDatum(&datum, true));
+  EXPECT_FALSE(DecodeDatum(&datum, true));
+  Datum datum_ref;
+  ReadImageToDatumReference(filename, 0, 0, 0, true, &datum_ref);
+  EXPECT_EQ(datum.channels(), datum_ref.channels());
+  EXPECT_EQ(datum.height(), datum_ref.height());
+  EXPECT_EQ(datum.width(), datum_ref.width());
+  EXPECT_EQ(datum.data().size(), datum_ref.data().size());
+
+  const string& data = datum.data();
+  const string& data_ref = datum_ref.data();
+  for (int i = 0; i < datum.data().size(); ++i) {
+    EXPECT_TRUE(data[i] == data_ref[i]);
+  }
+}
+
+TEST_F(IOTest, TestDecodeDatumToCVMat) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  EXPECT_TRUE(ReadFileToDatum(filename, &datum));
+  cv::Mat cv_img = DecodeDatumToCVMat(datum, true);
+  EXPECT_EQ(cv_img.channels(), 3);
+  EXPECT_EQ(cv_img.rows, 360);
+  EXPECT_EQ(cv_img.cols, 480);
+  cv_img = DecodeDatumToCVMat(datum, false);
+  EXPECT_EQ(cv_img.channels(), 1);
+  EXPECT_EQ(cv_img.rows, 360);
+  EXPECT_EQ(cv_img.cols, 480);
+}
+
+TEST_F(IOTest, TestDecodeDatumToCVMatContent) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  EXPECT_TRUE(ReadImageToDatum(filename, 0, std::string("jpg"), &datum));
+  cv::Mat cv_img = DecodeDatumToCVMat(datum, true);
+  cv::Mat cv_img_ref = ReadImageToCVMat(filename);
+  EXPECT_EQ(cv_img_ref.channels(), cv_img.channels());
+  EXPECT_EQ(cv_img_ref.rows, cv_img.rows);
+  EXPECT_EQ(cv_img_ref.cols, cv_img.cols);
+
+  for (int c = 0; c < datum.channels(); ++c) {
+    for (int h = 0; h < datum.height(); ++h) {
+      for (int w = 0; w < datum.width(); ++w) {
+        EXPECT_TRUE(cv_img.at<cv::Vec3b>(h, w)[c]==
+          cv_img_ref.at<cv::Vec3b>(h, w)[c]);
+      }
+    }
+  }
+}
+
+TEST_F(IOTest, TestDecodeDatumNative) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  EXPECT_TRUE(ReadFileToDatum(filename, &datum));
+  EXPECT_TRUE(DecodeDatumNative(&datum));
+  EXPECT_FALSE(DecodeDatumNative(&datum));
+  Datum datum_ref;
+  ReadImageToDatumReference(filename, 0, 0, 0, true, &datum_ref);
+  EXPECT_EQ(datum.channels(), datum_ref.channels());
+  EXPECT_EQ(datum.height(), datum_ref.height());
+  EXPECT_EQ(datum.width(), datum_ref.width());
+  EXPECT_EQ(datum.data().size(), datum_ref.data().size());
+
+  const string& data = datum.data();
+  const string& data_ref = datum_ref.data();
+  for (int i = 0; i < datum.data().size(); ++i) {
+    EXPECT_TRUE(data[i] == data_ref[i]);
+  }
+}
+
+TEST_F(IOTest, TestDecodeDatumToCVMatNative) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  EXPECT_TRUE(ReadFileToDatum(filename, &datum));
+  cv::Mat cv_img = DecodeDatumToCVMatNative(datum);
+  EXPECT_EQ(cv_img.channels(), 3);
+  EXPECT_EQ(cv_img.rows, 360);
+  EXPECT_EQ(cv_img.cols, 480);
+}
+
+TEST_F(IOTest, TestDecodeDatumNativeGray) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat_gray.jpg";
+  Datum datum;
+  EXPECT_TRUE(ReadFileToDatum(filename, &datum));
+  EXPECT_TRUE(DecodeDatumNative(&datum));
+  EXPECT_FALSE(DecodeDatumNative(&datum));
+  Datum datum_ref;
+  ReadImageToDatumReference(filename, 0, 0, 0, false, &datum_ref);
+  EXPECT_EQ(datum.channels(), datum_ref.channels());
+  EXPECT_EQ(datum.height(), datum_ref.height());
+  EXPECT_EQ(datum.width(), datum_ref.width());
+  EXPECT_EQ(datum.data().size(), datum_ref.data().size());
+
+  const string& data = datum.data();
+  const string& data_ref = datum_ref.data();
+  for (int i = 0; i < datum.data().size(); ++i) {
+    EXPECT_TRUE(data[i] == data_ref[i]);
+  }
+}
+
+TEST_F(IOTest, TestDecodeDatumToCVMatNativeGray) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat_gray.jpg";
+  Datum datum;
+  EXPECT_TRUE(ReadFileToDatum(filename, &datum));
+  cv::Mat cv_img = DecodeDatumToCVMatNative(datum);
+  EXPECT_EQ(cv_img.channels(), 1);
+  EXPECT_EQ(cv_img.rows, 360);
+  EXPECT_EQ(cv_img.cols, 480);
+}
+
+TEST_F(IOTest, TestDecodeDatumToCVMatContentNative) {
+  string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
+  Datum datum;
+  EXPECT_TRUE(ReadImageToDatum(filename, 0, std::string("jpg"), &datum));
+  cv::Mat cv_img = DecodeDatumToCVMatNative(datum);
+  cv::Mat cv_img_ref = ReadImageToCVMat(filename);
+  EXPECT_EQ(cv_img_ref.channels(), cv_img.channels());
+  EXPECT_EQ(cv_img_ref.rows, cv_img.rows);
+  EXPECT_EQ(cv_img_ref.cols, cv_img.cols);
+
+  for (int c = 0; c < datum.channels(); ++c) {
+    for (int h = 0; h < datum.height(); ++h) {
+      for (int w = 0; w < datum.width(); ++w) {
+        EXPECT_TRUE(cv_img.at<cv::Vec3b>(h, w)[c]==
+          cv_img_ref.at<cv::Vec3b>(h, w)[c]);
+      }
+    }
+  }
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_layer_factory.cpp b/src/caffe/test/test_layer_factory.cpp
new file mode 100644
index 0000000..efb1b37
--- /dev/null
+++ b/src/caffe/test/test_layer_factory.cpp
@@ -0,0 +1,35 @@
+#include <map>
+#include <string>
+
+#include "gtest/gtest.h"
+
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/layer_factory.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class LayerFactoryTest : public MultiDeviceTest<TypeParam> {};
+
+TYPED_TEST_CASE(LayerFactoryTest, TestDtypesAndDevices);
+
+TYPED_TEST(LayerFactoryTest, TestCreateLayer) {
+  typedef typename TypeParam::Dtype Dtype;
+  typename LayerRegistry<Dtype>::CreatorRegistry& registry =
+      LayerRegistry<Dtype>::Registry();
+  shared_ptr<Layer<Dtype> > layer;
+  LayerParameter layer_param;
+  for (typename LayerRegistry<Dtype>::CreatorRegistry::iterator iter =
+       registry.begin(); iter != registry.end(); ++iter) {
+    // Special case: PythonLayer is checked by pytest
+    if (iter->first == "Python") { continue; }
+    layer_param.set_type(iter->first);
+    layer = LayerRegistry<Dtype>::CreateLayer(layer_param);
+    EXPECT_EQ(iter->first, layer->type());
+  }
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_lrn_layer.cpp b/src/caffe/test/test_lrn_layer.cpp
new file mode 100644
index 0000000..c4e2f8e
--- /dev/null
+++ b/src/caffe/test/test_lrn_layer.cpp
@@ -0,0 +1,250 @@
+#include <algorithm>
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+using std::min;
+using std::max;
+
+namespace caffe {
+
+template <typename TypeParam>
+class LRNLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  LRNLayerTest()
+      : epsilon_(Dtype(1e-5)),
+        blob_bottom_(new Blob<Dtype>()),
+        blob_top_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    Caffe::set_random_seed(1701);
+    blob_bottom_->Reshape(2, 7, 3, 3);
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~LRNLayerTest() { delete blob_bottom_; delete blob_top_; }
+  void ReferenceLRNForward(const Blob<Dtype>& blob_bottom,
+      const LayerParameter& layer_param, Blob<Dtype>* blob_top);
+
+  Dtype epsilon_;
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+template <typename TypeParam>
+void LRNLayerTest<TypeParam>::ReferenceLRNForward(
+    const Blob<Dtype>& blob_bottom, const LayerParameter& layer_param,
+    Blob<Dtype>* blob_top) {
+  typedef typename TypeParam::Dtype Dtype;
+  blob_top->Reshape(blob_bottom.num(), blob_bottom.channels(),
+      blob_bottom.height(), blob_bottom.width());
+  Dtype* top_data = blob_top->mutable_cpu_data();
+  LRNParameter lrn_param = layer_param.lrn_param();
+  Dtype alpha = lrn_param.alpha();
+  Dtype beta = lrn_param.beta();
+  int size = lrn_param.local_size();
+  switch (lrn_param.norm_region()) {
+  case LRNParameter_NormRegion_ACROSS_CHANNELS:
+    for (int n = 0; n < blob_bottom.num(); ++n) {
+      for (int c = 0; c < blob_bottom.channels(); ++c) {
+        for (int h = 0; h < blob_bottom.height(); ++h) {
+          for (int w = 0; w < blob_bottom.width(); ++w) {
+            int c_start = c - (size - 1) / 2;
+            int c_end = min(c_start + size, blob_bottom.channels());
+            c_start = max(c_start, 0);
+            Dtype scale = 1.;
+            for (int i = c_start; i < c_end; ++i) {
+              Dtype value = blob_bottom.data_at(n, i, h, w);
+              scale += value * value * alpha / size;
+            }
+            *(top_data + blob_top->offset(n, c, h, w)) =
+              blob_bottom.data_at(n, c, h, w) / pow(scale, beta);
+          }
+        }
+      }
+    }
+    break;
+  case LRNParameter_NormRegion_WITHIN_CHANNEL:
+    for (int n = 0; n < blob_bottom.num(); ++n) {
+      for (int c = 0; c < blob_bottom.channels(); ++c) {
+        for (int h = 0; h < blob_bottom.height(); ++h) {
+          int h_start = h - (size - 1) / 2;
+          int h_end = min(h_start + size, blob_bottom.height());
+          h_start = max(h_start, 0);
+          for (int w = 0; w < blob_bottom.width(); ++w) {
+            Dtype scale = 1.;
+            int w_start = w - (size - 1) / 2;
+            int w_end = min(w_start + size, blob_bottom.width());
+            w_start = max(w_start, 0);
+            for (int nh = h_start; nh < h_end; ++nh) {
+              for (int nw = w_start; nw < w_end; ++nw) {
+                Dtype value = blob_bottom.data_at(n, c, nh, nw);
+                scale += value * value * alpha / (size * size);
+              }
+            }
+            *(top_data + blob_top->offset(n, c, h, w)) =
+              blob_bottom.data_at(n, c, h, w) / pow(scale, beta);
+          }
+        }
+      }
+    }
+    break;
+  default:
+    LOG(FATAL) << "Unknown normalization region.";
+  }
+}
+
+TYPED_TEST_CASE(LRNLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(LRNLayerTest, TestSetupAcrossChannels) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  LRNLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 7);
+  EXPECT_EQ(this->blob_top_->height(), 3);
+  EXPECT_EQ(this->blob_top_->width(), 3);
+}
+
+TYPED_TEST(LRNLayerTest, TestForwardAcrossChannels) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  LRNLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  Blob<Dtype> top_reference;
+  this->ReferenceLRNForward(*(this->blob_bottom_), layer_param,
+      &top_reference);
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    EXPECT_NEAR(this->blob_top_->cpu_data()[i], top_reference.cpu_data()[i],
+                this->epsilon_);
+  }
+}
+
+TYPED_TEST(LRNLayerTest, TestForwardAcrossChannelsLargeRegion) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_lrn_param()->set_local_size(15);
+  LRNLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  Blob<Dtype> top_reference;
+  this->ReferenceLRNForward(*(this->blob_bottom_), layer_param,
+      &top_reference);
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    EXPECT_NEAR(this->blob_top_->cpu_data()[i], top_reference.cpu_data()[i],
+                this->epsilon_);
+  }
+}
+
+TYPED_TEST(LRNLayerTest, TestGradientAcrossChannels) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  LRNLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    this->blob_top_->mutable_cpu_diff()[i] = 1.;
+  }
+  vector<bool> propagate_down(this->blob_bottom_vec_.size(), true);
+  layer.Backward(this->blob_top_vec_, propagate_down,
+                 this->blob_bottom_vec_);
+  // for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+  //   std::cout << "CPU diff " << this->blob_bottom_->cpu_diff()[i]
+  //       << std::endl;
+  // }
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(LRNLayerTest, TestGradientAcrossChannelsLargeRegion) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_lrn_param()->set_local_size(15);
+  LRNLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    this->blob_top_->mutable_cpu_diff()[i] = 1.;
+  }
+  vector<bool> propagate_down(this->blob_bottom_vec_.size(), true);
+  layer.Backward(this->blob_top_vec_, propagate_down,
+                 this->blob_bottom_vec_);
+  // for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+  //   std::cout << "CPU diff " << this->blob_bottom_->cpu_diff()[i]
+  //       << std::endl;
+  // }
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(LRNLayerTest, TestSetupWithinChannel) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_lrn_param()->set_norm_region(
+      LRNParameter_NormRegion_WITHIN_CHANNEL);
+  layer_param.mutable_lrn_param()->set_local_size(3);
+  LRNLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 7);
+  EXPECT_EQ(this->blob_top_->height(), 3);
+  EXPECT_EQ(this->blob_top_->width(), 3);
+}
+
+TYPED_TEST(LRNLayerTest, TestForwardWithinChannel) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_lrn_param()->set_norm_region(
+      LRNParameter_NormRegion_WITHIN_CHANNEL);
+  layer_param.mutable_lrn_param()->set_local_size(3);
+  LRNLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  Blob<Dtype> top_reference;
+  this->ReferenceLRNForward(*(this->blob_bottom_), layer_param,
+      &top_reference);
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    EXPECT_NEAR(this->blob_top_->cpu_data()[i], top_reference.cpu_data()[i],
+                this->epsilon_);
+  }
+}
+
+TYPED_TEST(LRNLayerTest, TestGradientWithinChannel) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_lrn_param()->set_norm_region(
+      LRNParameter_NormRegion_WITHIN_CHANNEL);
+  layer_param.mutable_lrn_param()->set_local_size(3);
+  LRNLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    this->blob_top_->mutable_cpu_diff()[i] = 1.;
+  }
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_math_functions.cpp b/src/caffe/test/test_math_functions.cpp
new file mode 100644
index 0000000..a095b54
--- /dev/null
+++ b/src/caffe/test/test_math_functions.cpp
@@ -0,0 +1,246 @@
+#include <stdint.h>  // for uint32_t & uint64_t
+#include <time.h>
+#include <climits>
+#include <cmath>  // for std::fabs
+#include <cstdlib>  // for rand_r
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/util/math_functions.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class MathFunctionsTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  MathFunctionsTest()
+      : blob_bottom_(new Blob<Dtype>()),
+        blob_top_(new Blob<Dtype>()) {
+  }
+
+  virtual void SetUp() {
+    Caffe::set_random_seed(1701);
+    this->blob_bottom_->Reshape(11, 17, 19, 23);
+    this->blob_top_->Reshape(11, 17, 19, 23);
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    filler.Fill(this->blob_top_);
+  }
+
+  virtual ~MathFunctionsTest() {
+    delete blob_bottom_;
+    delete blob_top_;
+  }
+
+  // http://en.wikipedia.org/wiki/Hamming_distance
+  int ReferenceHammingDistance(const int n, const Dtype* x, const Dtype* y) {
+    int dist = 0;
+    uint64_t val;
+    for (int i = 0; i < n; ++i) {
+      if (sizeof(Dtype) == 8) {
+        val = static_cast<uint64_t>(x[i]) ^ static_cast<uint64_t>(y[i]);
+      } else if (sizeof(Dtype) == 4) {
+        val = static_cast<uint32_t>(x[i]) ^ static_cast<uint32_t>(y[i]);
+      } else {
+        LOG(FATAL) << "Unrecognized Dtype size: " << sizeof(Dtype);
+      }
+      // Count the number of set bits
+      while (val) {
+        ++dist;
+        val &= val - 1;
+      }
+    }
+    return dist;
+  }
+
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+};
+
+template <typename Dtype>
+class CPUMathFunctionsTest
+  : public MathFunctionsTest<CPUDevice<Dtype> > {
+};
+
+TYPED_TEST_CASE(CPUMathFunctionsTest, TestDtypes);
+
+TYPED_TEST(CPUMathFunctionsTest, TestNothing) {
+  // The first test case of a test suite takes the longest time
+  //   due to the set up overhead.
+}
+
+TYPED_TEST(CPUMathFunctionsTest, TestHammingDistance) {
+  int n = this->blob_bottom_->count();
+  const TypeParam* x = this->blob_bottom_->cpu_data();
+  const TypeParam* y = this->blob_top_->cpu_data();
+  EXPECT_EQ(this->ReferenceHammingDistance(n, x, y),
+            caffe_cpu_hamming_distance<TypeParam>(n, x, y));
+}
+
+TYPED_TEST(CPUMathFunctionsTest, TestAsum) {
+  int n = this->blob_bottom_->count();
+  const TypeParam* x = this->blob_bottom_->cpu_data();
+  TypeParam std_asum = 0;
+  for (int i = 0; i < n; ++i) {
+    std_asum += std::fabs(x[i]);
+  }
+  TypeParam cpu_asum = caffe_cpu_asum<TypeParam>(n, x);
+  EXPECT_LT((cpu_asum - std_asum) / std_asum, 1e-2);
+}
+
+TYPED_TEST(CPUMathFunctionsTest, TestSign) {
+  int n = this->blob_bottom_->count();
+  const TypeParam* x = this->blob_bottom_->cpu_data();
+  caffe_cpu_sign<TypeParam>(n, x, this->blob_bottom_->mutable_cpu_diff());
+  const TypeParam* signs = this->blob_bottom_->cpu_diff();
+  for (int i = 0; i < n; ++i) {
+    EXPECT_EQ(signs[i], x[i] > 0 ? 1 : (x[i] < 0 ? -1 : 0));
+  }
+}
+
+TYPED_TEST(CPUMathFunctionsTest, TestSgnbit) {
+  int n = this->blob_bottom_->count();
+  const TypeParam* x = this->blob_bottom_->cpu_data();
+  caffe_cpu_sgnbit<TypeParam>(n, x, this->blob_bottom_->mutable_cpu_diff());
+  const TypeParam* signbits = this->blob_bottom_->cpu_diff();
+  for (int i = 0; i < n; ++i) {
+    EXPECT_EQ(signbits[i], x[i] < 0 ? 1 : 0);
+  }
+}
+
+TYPED_TEST(CPUMathFunctionsTest, TestFabs) {
+  int n = this->blob_bottom_->count();
+  const TypeParam* x = this->blob_bottom_->cpu_data();
+  caffe_abs<TypeParam>(n, x, this->blob_bottom_->mutable_cpu_diff());
+  const TypeParam* abs_val = this->blob_bottom_->cpu_diff();
+  for (int i = 0; i < n; ++i) {
+    EXPECT_EQ(abs_val[i], x[i] > 0 ? x[i] : -x[i]);
+  }
+}
+
+TYPED_TEST(CPUMathFunctionsTest, TestScale) {
+  int n = this->blob_bottom_->count();
+  TypeParam alpha = this->blob_bottom_->cpu_diff()[caffe_rng_rand() %
+                                                   this->blob_bottom_->count()];
+  caffe_cpu_scale<TypeParam>(n, alpha, this->blob_bottom_->cpu_data(),
+                             this->blob_bottom_->mutable_cpu_diff());
+  const TypeParam* scaled = this->blob_bottom_->cpu_diff();
+  const TypeParam* x = this->blob_bottom_->cpu_data();
+  for (int i = 0; i < n; ++i) {
+    EXPECT_EQ(scaled[i], x[i] * alpha);
+  }
+}
+
+TYPED_TEST(CPUMathFunctionsTest, TestCopy) {
+  const int n = this->blob_bottom_->count();
+  const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
+  TypeParam* top_data = this->blob_top_->mutable_cpu_data();
+  caffe_copy(n, bottom_data, top_data);
+  for (int i = 0; i < n; ++i) {
+    EXPECT_EQ(bottom_data[i], top_data[i]);
+  }
+}
+
+#ifndef CPU_ONLY
+
+template <typename Dtype>
+class GPUMathFunctionsTest : public MathFunctionsTest<GPUDevice<Dtype> > {
+};
+
+TYPED_TEST_CASE(GPUMathFunctionsTest, TestDtypes);
+
+// TODO: Fix caffe_gpu_hamming_distance and re-enable this test.
+TYPED_TEST(GPUMathFunctionsTest, DISABLED_TestHammingDistance) {
+  int n = this->blob_bottom_->count();
+  const TypeParam* x = this->blob_bottom_->cpu_data();
+  const TypeParam* y = this->blob_top_->cpu_data();
+  int reference_distance = this->ReferenceHammingDistance(n, x, y);
+  x = this->blob_bottom_->gpu_data();
+  y = this->blob_top_->gpu_data();
+  int computed_distance = caffe_gpu_hamming_distance<TypeParam>(n, x, y);
+  EXPECT_EQ(reference_distance, computed_distance);
+}
+
+TYPED_TEST(GPUMathFunctionsTest, TestAsum) {
+  int n = this->blob_bottom_->count();
+  const TypeParam* x = this->blob_bottom_->cpu_data();
+  TypeParam std_asum = 0;
+  for (int i = 0; i < n; ++i) {
+    std_asum += std::fabs(x[i]);
+  }
+  TypeParam gpu_asum;
+  caffe_gpu_asum<TypeParam>(n, this->blob_bottom_->gpu_data(), &gpu_asum);
+  EXPECT_LT((gpu_asum - std_asum) / std_asum, 1e-2);
+}
+
+TYPED_TEST(GPUMathFunctionsTest, TestSign) {
+  int n = this->blob_bottom_->count();
+  caffe_gpu_sign<TypeParam>(n, this->blob_bottom_->gpu_data(),
+                            this->blob_bottom_->mutable_gpu_diff());
+  const TypeParam* signs = this->blob_bottom_->cpu_diff();
+  const TypeParam* x = this->blob_bottom_->cpu_data();
+  for (int i = 0; i < n; ++i) {
+    EXPECT_EQ(signs[i], x[i] > 0 ? 1 : (x[i] < 0 ? -1 : 0));
+  }
+}
+
+TYPED_TEST(GPUMathFunctionsTest, TestSgnbit) {
+  int n = this->blob_bottom_->count();
+  caffe_gpu_sgnbit<TypeParam>(n, this->blob_bottom_->gpu_data(),
+                            this->blob_bottom_->mutable_gpu_diff());
+  const TypeParam* signbits = this->blob_bottom_->cpu_diff();
+  const TypeParam* x = this->blob_bottom_->cpu_data();
+  for (int i = 0; i < n; ++i) {
+    EXPECT_EQ(signbits[i], x[i] < 0 ? 1 : 0);
+  }
+}
+
+TYPED_TEST(GPUMathFunctionsTest, TestFabs) {
+  int n = this->blob_bottom_->count();
+  caffe_gpu_abs<TypeParam>(n, this->blob_bottom_->gpu_data(),
+                            this->blob_bottom_->mutable_gpu_diff());
+  const TypeParam* abs_val = this->blob_bottom_->cpu_diff();
+  const TypeParam* x = this->blob_bottom_->cpu_data();
+  for (int i = 0; i < n; ++i) {
+    EXPECT_EQ(abs_val[i], x[i] > 0 ? x[i] : -x[i]);
+  }
+}
+
+TYPED_TEST(GPUMathFunctionsTest, TestScale) {
+  int n = this->blob_bottom_->count();
+  TypeParam alpha = this->blob_bottom_->cpu_diff()[caffe_rng_rand() %
+                                                   this->blob_bottom_->count()];
+  caffe_gpu_scale<TypeParam>(n, alpha, this->blob_bottom_->gpu_data(),
+                             this->blob_bottom_->mutable_gpu_diff());
+  const TypeParam* scaled = this->blob_bottom_->cpu_diff();
+  const TypeParam* x = this->blob_bottom_->cpu_data();
+  for (int i = 0; i < n; ++i) {
+    EXPECT_EQ(scaled[i], x[i] * alpha);
+  }
+}
+
+TYPED_TEST(GPUMathFunctionsTest, TestCopy) {
+  const int n = this->blob_bottom_->count();
+  const TypeParam* bottom_data = this->blob_bottom_->gpu_data();
+  TypeParam* top_data = this->blob_top_->mutable_gpu_data();
+  caffe_copy(n, bottom_data, top_data);
+  bottom_data = this->blob_bottom_->cpu_data();
+  top_data = this->blob_top_->mutable_cpu_data();
+  for (int i = 0; i < n; ++i) {
+    EXPECT_EQ(bottom_data[i], top_data[i]);
+  }
+}
+
+#endif
+
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_maxpool_dropout_layers.cpp b/src/caffe/test/test_maxpool_dropout_layers.cpp
new file mode 100644
index 0000000..611d979
--- /dev/null
+++ b/src/caffe/test/test_maxpool_dropout_layers.cpp
@@ -0,0 +1,127 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class MaxPoolingDropoutTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+ protected:
+  MaxPoolingDropoutTest()
+      : blob_bottom_(new Blob<Dtype>()),
+        blob_top_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    Caffe::set_random_seed(1703);
+    blob_bottom_->Reshape(2, 3, 6, 5);
+    // fill the values
+    FillerParameter filler_param;
+    filler_param.set_value(1.);
+    ConstantFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~MaxPoolingDropoutTest() { delete blob_bottom_; delete blob_top_; }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(MaxPoolingDropoutTest, TestDtypesAndDevices);
+
+TYPED_TEST(MaxPoolingDropoutTest, TestSetup) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  PoolingLayer<Dtype> max_layer(layer_param);
+  max_layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  DropoutLayer<Dtype> dropout_layer(layer_param);
+  dropout_layer.SetUp(this->blob_top_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_->num());
+  EXPECT_EQ(this->blob_top_->channels(), this->blob_bottom_->channels());
+  EXPECT_EQ(this->blob_top_->height(), 3);
+  EXPECT_EQ(this->blob_top_->width(), 2);
+}
+
+
+TYPED_TEST(MaxPoolingDropoutTest, TestForward) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  PoolingLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  const Dtype* top_data = this->blob_top_->cpu_data();
+  Dtype sum = 0.;
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    sum += top_data[i];
+  }
+  EXPECT_EQ(sum, this->blob_top_->count());
+  // Dropout in-place
+  DropoutLayer<Dtype> dropout_layer(layer_param);
+  dropout_layer.SetUp(this->blob_top_vec_, this->blob_top_vec_);
+  dropout_layer.Forward(this->blob_top_vec_, this->blob_top_vec_);
+  sum = 0.;
+  Dtype scale = 1. / (1. - layer_param.dropout_param().dropout_ratio());
+  top_data = this->blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    sum += top_data[i];
+  }
+  EXPECT_GE(sum, 0);
+  EXPECT_LE(sum, this->blob_top_->count()*scale);
+}
+
+TYPED_TEST(MaxPoolingDropoutTest, TestBackward) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.set_phase(TRAIN);
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  PoolingLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    this->blob_top_->mutable_cpu_diff()[i] = 1.;
+  }
+  vector<bool> propagate_down(this->blob_bottom_vec_.size(), true);
+  layer.Backward(this->blob_top_vec_, propagate_down,
+                 this->blob_bottom_vec_);
+  const Dtype* bottom_diff = this->blob_bottom_->cpu_diff();
+  Dtype sum = 0.;
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    sum += bottom_diff[i];
+  }
+  EXPECT_EQ(sum, this->blob_top_->count());
+  // Dropout in-place
+  DropoutLayer<Dtype> dropout_layer(layer_param);
+  dropout_layer.SetUp(this->blob_top_vec_, this->blob_top_vec_);
+  dropout_layer.Forward(this->blob_top_vec_, this->blob_top_vec_);
+  dropout_layer.Backward(this->blob_top_vec_, propagate_down,
+                         this->blob_top_vec_);
+  layer.Backward(this->blob_top_vec_, propagate_down,
+                 this->blob_bottom_vec_);
+  Dtype sum_with_dropout = 0.;
+  bottom_diff = this->blob_bottom_->cpu_diff();
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    sum_with_dropout += bottom_diff[i];
+  }
+  EXPECT_GE(sum_with_dropout, sum);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_memory_data_layer.cpp b/src/caffe/test/test_memory_data_layer.cpp
new file mode 100644
index 0000000..a79033f
--- /dev/null
+++ b/src/caffe/test/test_memory_data_layer.cpp
@@ -0,0 +1,296 @@
+#include <opencv2/core/core.hpp>
+
+#include <string>
+#include <vector>
+
+#include "caffe/data_layers.hpp"
+#include "caffe/filler.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class MemoryDataLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  MemoryDataLayerTest()
+    : data_(new Blob<Dtype>()),
+      labels_(new Blob<Dtype>()),
+      data_blob_(new Blob<Dtype>()),
+      label_blob_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    batch_size_ = 8;
+    batches_ = 12;
+    channels_ = 4;
+    height_ = 7;
+    width_ = 11;
+    blob_top_vec_.push_back(data_blob_);
+    blob_top_vec_.push_back(label_blob_);
+    // pick random input data
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    data_->Reshape(batches_ * batch_size_, channels_, height_, width_);
+    labels_->Reshape(batches_ * batch_size_, 1, 1, 1);
+    filler.Fill(this->data_);
+    filler.Fill(this->labels_);
+  }
+
+  virtual ~MemoryDataLayerTest() {
+    delete data_blob_;
+    delete label_blob_;
+    delete data_;
+    delete labels_;
+  }
+  int batch_size_;
+  int batches_;
+  int channels_;
+  int height_;
+  int width_;
+  // we don't really need blobs for the input data, but it makes it
+  //  easier to call Filler
+  Blob<Dtype>* const data_;
+  Blob<Dtype>* const labels_;
+  // blobs for the top of MemoryDataLayer
+  Blob<Dtype>* const data_blob_;
+  Blob<Dtype>* const label_blob_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(MemoryDataLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(MemoryDataLayerTest, TestSetup) {
+  typedef typename TypeParam::Dtype Dtype;
+
+  LayerParameter layer_param;
+  MemoryDataParameter* md_param = layer_param.mutable_memory_data_param();
+  md_param->set_batch_size(this->batch_size_);
+  md_param->set_channels(this->channels_);
+  md_param->set_height(this->height_);
+  md_param->set_width(this->width_);
+  shared_ptr<Layer<Dtype> > layer(
+      new MemoryDataLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->data_blob_->num(), this->batch_size_);
+  EXPECT_EQ(this->data_blob_->channels(), this->channels_);
+  EXPECT_EQ(this->data_blob_->height(), this->height_);
+  EXPECT_EQ(this->data_blob_->width(), this->width_);
+  EXPECT_EQ(this->label_blob_->num(), this->batch_size_);
+  EXPECT_EQ(this->label_blob_->channels(), 1);
+  EXPECT_EQ(this->label_blob_->height(), 1);
+  EXPECT_EQ(this->label_blob_->width(), 1);
+}
+
+// run through a few batches and check that the right data appears
+TYPED_TEST(MemoryDataLayerTest, TestForward) {
+  typedef typename TypeParam::Dtype Dtype;
+
+  LayerParameter layer_param;
+  MemoryDataParameter* md_param = layer_param.mutable_memory_data_param();
+  md_param->set_batch_size(this->batch_size_);
+  md_param->set_channels(this->channels_);
+  md_param->set_height(this->height_);
+  md_param->set_width(this->width_);
+  shared_ptr<MemoryDataLayer<Dtype> > layer(
+      new MemoryDataLayer<Dtype>(layer_param));
+  layer->DataLayerSetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Reset(this->data_->mutable_cpu_data(),
+      this->labels_->mutable_cpu_data(), this->data_->num());
+  for (int i = 0; i < this->batches_ * 6; ++i) {
+    int batch_num = i % this->batches_;
+    layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    for (int j = 0; j < this->data_blob_->count(); ++j) {
+      EXPECT_EQ(this->data_blob_->cpu_data()[j],
+          this->data_->cpu_data()[
+              this->data_->offset(1) * this->batch_size_ * batch_num + j]);
+    }
+    for (int j = 0; j < this->label_blob_->count(); ++j) {
+      EXPECT_EQ(this->label_blob_->cpu_data()[j],
+          this->labels_->cpu_data()[this->batch_size_ * batch_num + j]);
+    }
+  }
+}
+
+TYPED_TEST(MemoryDataLayerTest, AddDatumVectorDefaultTransform) {
+  typedef typename TypeParam::Dtype Dtype;
+
+  LayerParameter param;
+  MemoryDataParameter* memory_data_param = param.mutable_memory_data_param();
+  memory_data_param->set_batch_size(this->batch_size_);
+  memory_data_param->set_channels(this->channels_);
+  memory_data_param->set_height(this->height_);
+  memory_data_param->set_width(this->width_);
+  MemoryDataLayer<Dtype> layer(param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  // We add batch_size*num_iter items, then for each iteration
+  // we forward batch_size elements
+  int num_iter = 5;
+  vector<Datum> datum_vector(this->batch_size_ * num_iter);
+  const size_t count = this->channels_ * this->height_ * this->width_;
+  size_t pixel_index = 0;
+  for (int i = 0; i < this->batch_size_ * num_iter; ++i) {
+    datum_vector[i].set_channels(this->channels_);
+    datum_vector[i].set_height(this->height_);
+    datum_vector[i].set_width(this->width_);
+    datum_vector[i].set_label(i);
+    vector<char> pixels(count);
+    for (int j = 0; j < count; ++j) {
+      pixels[j] = pixel_index++ % 256;
+    }
+    datum_vector[i].set_data(&(pixels[0]), count);
+  }
+  layer.AddDatumVector(datum_vector);
+
+  int data_index;
+  // Go through the data 5 times
+  for (int iter = 0; iter < num_iter; ++iter) {
+    int offset = this->batch_size_ * iter;
+    layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    const Dtype* data = this->data_blob_->cpu_data();
+    size_t index = 0;
+    for (int i = 0; i < this->batch_size_; ++i) {
+      const string& data_string = datum_vector[offset + i].data();
+      EXPECT_EQ(offset + i, this->label_blob_->cpu_data()[i]);
+      for (int c = 0; c < this->channels_; ++c) {
+        for (int h = 0; h < this->height_; ++h) {
+          for (int w = 0; w < this->width_; ++w) {
+            data_index = (c * this->height_ + h) * this->width_ + w;
+            EXPECT_EQ(static_cast<Dtype>(
+                static_cast<uint8_t>(data_string[data_index])),
+                      data[index++]);
+          }
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST(MemoryDataLayerTest, AddMatVectorDefaultTransform) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter param;
+  MemoryDataParameter* memory_data_param = param.mutable_memory_data_param();
+  memory_data_param->set_batch_size(this->batch_size_);
+  memory_data_param->set_channels(this->channels_);
+  memory_data_param->set_height(this->height_);
+  memory_data_param->set_width(this->width_);
+  MemoryDataLayer<Dtype> layer(param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  // We add batch_size*num_iter items, then for each iteration
+  // we forward batch_size elements
+  int num_iter = 5;
+  vector<cv::Mat> mat_vector(this->batch_size_ * num_iter);
+  vector<int> label_vector(this->batch_size_ * num_iter);
+  for (int i = 0; i < this->batch_size_*num_iter; ++i) {
+    mat_vector[i] = cv::Mat(this->height_, this->width_, CV_8UC4);
+    label_vector[i] = i;
+    cv::randu(mat_vector[i], cv::Scalar::all(0), cv::Scalar::all(255));
+  }
+  layer.AddMatVector(mat_vector, label_vector);
+
+  int data_index;
+  const size_t count = this->channels_ * this->height_ * this->width_;
+  for (int iter = 0; iter < num_iter; ++iter) {
+    int offset = this->batch_size_ * iter;
+    layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    const Dtype* data = this->data_blob_->cpu_data();
+    for (int i = 0; i < this->batch_size_; ++i) {
+      EXPECT_EQ(offset + i, this->label_blob_->cpu_data()[i]);
+      for (int h = 0; h < this->height_; ++h) {
+        const unsigned char* ptr_mat = mat_vector[offset + i].ptr<uchar>(h);
+        int index = 0;
+        for (int w = 0; w < this->width_; ++w) {
+          for (int c = 0; c < this->channels_; ++c) {
+            data_index = (i*count) + (c * this->height_ + h) * this->width_ + w;
+            Dtype pixel = static_cast<Dtype>(ptr_mat[index++]);
+            EXPECT_EQ(static_cast<int>(pixel),
+                      data[data_index]);
+          }
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST(MemoryDataLayerTest, TestSetBatchSize) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter param;
+  MemoryDataParameter* memory_data_param = param.mutable_memory_data_param();
+  memory_data_param->set_batch_size(this->batch_size_);
+  memory_data_param->set_channels(this->channels_);
+  memory_data_param->set_height(this->height_);
+  memory_data_param->set_width(this->width_);
+  MemoryDataLayer<Dtype> layer(param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  // first add data as usual
+  int num_iter = 5;
+  vector<cv::Mat> mat_vector(this->batch_size_ * num_iter);
+  vector<int> label_vector(this->batch_size_ * num_iter);
+  for (int i = 0; i < this->batch_size_*num_iter; ++i) {
+    mat_vector[i] = cv::Mat(this->height_, this->width_, CV_8UC4);
+    label_vector[i] = i;
+    cv::randu(mat_vector[i], cv::Scalar::all(0), cv::Scalar::all(255));
+  }
+  layer.AddMatVector(mat_vector, label_vector);
+  // then consume the data
+  int data_index;
+  const size_t count = this->channels_ * this->height_ * this->width_;
+  for (int iter = 0; iter < num_iter; ++iter) {
+    int offset = this->batch_size_ * iter;
+    layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    const Dtype* data = this->data_blob_->cpu_data();
+    for (int i = 0; i < this->batch_size_; ++i) {
+      EXPECT_EQ(offset + i, this->label_blob_->cpu_data()[i]);
+      for (int h = 0; h < this->height_; ++h) {
+        const unsigned char* ptr_mat = mat_vector[offset + i].ptr<uchar>(h);
+        int index = 0;
+        for (int w = 0; w < this->width_; ++w) {
+          for (int c = 0; c < this->channels_; ++c) {
+            data_index = (i*count) + (c * this->height_ + h) * this->width_ + w;
+            Dtype pixel = static_cast<Dtype>(ptr_mat[index++]);
+            EXPECT_EQ(static_cast<int>(pixel), data[data_index]);
+          }
+        }
+      }
+    }
+  }
+  // and then add new data with different batch_size
+  int new_batch_size = 16;
+  layer.set_batch_size(new_batch_size);
+  mat_vector.clear();
+  mat_vector.resize(new_batch_size * num_iter);
+  label_vector.clear();
+  label_vector.resize(new_batch_size * num_iter);
+  for (int i = 0; i < new_batch_size*num_iter; ++i) {
+    mat_vector[i] = cv::Mat(this->height_, this->width_, CV_8UC4);
+    label_vector[i] = i;
+    cv::randu(mat_vector[i], cv::Scalar::all(0), cv::Scalar::all(255));
+  }
+  layer.AddMatVector(mat_vector, label_vector);
+
+  // finally consume new data and check if everything is fine
+  for (int iter = 0; iter < num_iter; ++iter) {
+    int offset = new_batch_size * iter;
+    layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    EXPECT_EQ(new_batch_size, this->blob_top_vec_[0]->num());
+    EXPECT_EQ(new_batch_size, this->blob_top_vec_[1]->num());
+    const Dtype* data = this->data_blob_->cpu_data();
+    for (int i = 0; i < new_batch_size; ++i) {
+      EXPECT_EQ(offset + i, this->label_blob_->cpu_data()[i]);
+      for (int h = 0; h < this->height_; ++h) {
+        const unsigned char* ptr_mat = mat_vector[offset + i].ptr<uchar>(h);
+        int index = 0;
+        for (int w = 0; w < this->width_; ++w) {
+          for (int c = 0; c < this->channels_; ++c) {
+            data_index = (i*count) + (c * this->height_ + h) * this->width_ + w;
+            Dtype pixel = static_cast<Dtype>(ptr_mat[index++]);
+            EXPECT_EQ(static_cast<int>(pixel), data[data_index]);
+          }
+        }
+      }
+    }
+  }
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_multinomial_logistic_loss_layer.cpp b/src/caffe/test/test_multinomial_logistic_loss_layer.cpp
new file mode 100644
index 0000000..b2db984
--- /dev/null
+++ b/src/caffe/test/test_multinomial_logistic_loss_layer.cpp
@@ -0,0 +1,61 @@
+#include <cmath>
+#include <cstdlib>
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+class MultinomialLogisticLossLayerTest : public CPUDeviceTest<Dtype> {
+ protected:
+  MultinomialLogisticLossLayerTest()
+      : blob_bottom_data_(new Blob<Dtype>(10, 5, 1, 1)),
+        blob_bottom_label_(new Blob<Dtype>(10, 1, 1, 1)),
+        blob_top_loss_(new Blob<Dtype>()) {
+    Caffe::set_random_seed(1701);
+    // fill the values
+    FillerParameter filler_param;
+    PositiveUnitballFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_data_);
+    blob_bottom_vec_.push_back(blob_bottom_data_);
+    for (int i = 0; i < blob_bottom_label_->count(); ++i) {
+      blob_bottom_label_->mutable_cpu_data()[i] = caffe_rng_rand() % 5;
+    }
+    blob_bottom_vec_.push_back(blob_bottom_label_);
+    blob_top_vec_.push_back(blob_top_loss_);
+  }
+  virtual ~MultinomialLogisticLossLayerTest() {
+    delete blob_bottom_data_;
+    delete blob_bottom_label_;
+    delete blob_top_loss_;
+  }
+  Blob<Dtype>* const blob_bottom_data_;
+  Blob<Dtype>* const blob_bottom_label_;
+  Blob<Dtype>* const blob_top_loss_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(MultinomialLogisticLossLayerTest, TestDtypes);
+
+
+TYPED_TEST(MultinomialLogisticLossLayerTest, TestGradientCPU) {
+  LayerParameter layer_param;
+  MultinomialLogisticLossLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  GradientChecker<TypeParam> checker(1e-2, 2*1e-2, 1701, 0, 0.05);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 0);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_mvn_layer.cpp b/src/caffe/test/test_mvn_layer.cpp
new file mode 100644
index 0000000..933b432
--- /dev/null
+++ b/src/caffe/test/test_mvn_layer.cpp
@@ -0,0 +1,169 @@
+#include <cmath>
+#include <cstring>
+#include <vector>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/common_layers.hpp"
+#include "caffe/filler.hpp"
+#include "gtest/gtest.h"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class MVNLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+ protected:
+  MVNLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 4, 5)),
+        blob_top_(new Blob<Dtype>()) {
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~MVNLayerTest() { delete blob_bottom_; delete blob_top_; }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(MVNLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(MVNLayerTest, TestForward) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  MVNLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Test mean
+  int num = this->blob_bottom_->num();
+  int channels = this->blob_bottom_->channels();
+  int height = this->blob_bottom_->height();
+  int width = this->blob_bottom_->width();
+
+  for (int i = 0; i < num; ++i) {
+    for (int j = 0; j < channels; ++j) {
+      Dtype sum = 0, var = 0;
+      for (int k = 0; k < height; ++k) {
+        for (int l = 0; l < width; ++l) {
+          Dtype data = this->blob_top_->data_at(i, j, k, l);
+          sum += data;
+          var += data * data;
+        }
+      }
+      sum /= height * width;
+      var /= height * width;
+
+      const Dtype kErrorBound = 0.001;
+      // expect zero mean
+      EXPECT_NEAR(0, sum, kErrorBound);
+      // expect unit variance
+      EXPECT_NEAR(1, var, kErrorBound);
+    }
+  }
+}
+
+TYPED_TEST(MVNLayerTest, TestForwardMeanOnly) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.ParseFromString("mvn_param{normalize_variance: false}");
+  MVNLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Test mean
+  int num = this->blob_bottom_->num();
+  int channels = this->blob_bottom_->channels();
+  int height = this->blob_bottom_->height();
+  int width = this->blob_bottom_->width();
+
+  for (int i = 0; i < num; ++i) {
+    for (int j = 0; j < channels; ++j) {
+      Dtype sum = 0, var = 0;
+      for (int k = 0; k < height; ++k) {
+        for (int l = 0; l < width; ++l) {
+          Dtype data = this->blob_top_->data_at(i, j, k, l);
+          sum += data;
+          var += data * data;
+        }
+      }
+      sum /= height * width;
+
+      const Dtype kErrorBound = 0.001;
+      // expect zero mean
+      EXPECT_NEAR(0, sum, kErrorBound);
+    }
+  }
+}
+
+TYPED_TEST(MVNLayerTest, TestForwardAcrossChannels) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.ParseFromString("mvn_param{across_channels: true}");
+  MVNLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Test mean
+  int num = this->blob_bottom_->num();
+  int channels = this->blob_bottom_->channels();
+  int height = this->blob_bottom_->height();
+  int width = this->blob_bottom_->width();
+
+  for (int i = 0; i < num; ++i) {
+    Dtype sum = 0, var = 0;
+    for (int j = 0; j < channels; ++j) {
+      for (int k = 0; k < height; ++k) {
+        for (int l = 0; l < width; ++l) {
+          Dtype data = this->blob_top_->data_at(i, j, k, l);
+          sum += data;
+          var += data * data;
+        }
+      }
+    }
+    sum /= height * width * channels;
+    var /= height * width * channels;
+
+    const Dtype kErrorBound = 0.001;
+    // expect zero mean
+    EXPECT_NEAR(0, sum, kErrorBound);
+    // expect unit variance
+    EXPECT_NEAR(1, var, kErrorBound);
+  }
+}
+
+TYPED_TEST(MVNLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  MVNLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(MVNLayerTest, TestGradientMeanOnly) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.ParseFromString("mvn_param{normalize_variance: false}");
+  MVNLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(MVNLayerTest, TestGradientAcrossChannels) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.ParseFromString("mvn_param{across_channels: true}");
+  MVNLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_net.cpp b/src/caffe/test/test_net.cpp
new file mode 100644
index 0000000..56959f4
--- /dev/null
+++ b/src/caffe/test/test_net.cpp
@@ -0,0 +1,2375 @@
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "google/protobuf/text_format.h"
+
+#include "gtest/gtest.h"
+
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/net.hpp"
+#include "caffe/util/math_functions.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class NetTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  NetTest() : seed_(1701) {}
+
+  virtual void InitNetFromProtoString(const string& proto) {
+    NetParameter param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(proto, &param));
+    net_.reset(new Net<Dtype>(param));
+  }
+
+  virtual void CopyNetBlobs(const bool copy_diff,
+      vector<shared_ptr<Blob<Dtype> > >* blobs_copy) {
+    CHECK(net_);
+    const vector<shared_ptr<Blob<Dtype> > >& net_blobs = net_->blobs();
+    blobs_copy->clear();
+    blobs_copy->resize(net_blobs.size());
+    const bool kReshape = true;
+    for (int i = 0; i < net_blobs.size(); ++i) {
+      (*blobs_copy)[i].reset(new Blob<Dtype>());
+      (*blobs_copy)[i]->CopyFrom(*net_blobs[i], copy_diff, kReshape);
+    }
+  }
+
+  virtual void CopyNetParams(const bool copy_diff,
+      vector<shared_ptr<Blob<Dtype> > >* params_copy) {
+    CHECK(net_);
+    const vector<shared_ptr<Blob<Dtype> > >& net_params = net_->params();
+    params_copy->clear();
+    params_copy->resize(net_params.size());
+    const bool kReshape = true;
+    for (int i = 0; i < net_params.size(); ++i) {
+      (*params_copy)[i].reset(new Blob<Dtype>());
+      (*params_copy)[i]->CopyFrom(*net_params[i], copy_diff, kReshape);
+    }
+  }
+
+  virtual void InitTinyNet(const bool force_backward = false,
+                           const bool accuracy_layer = false) {
+    string proto =
+        "name: 'TinyTestNetwork' "
+        "layer { "
+        "  name: 'data' "
+        "  type: 'DummyData' "
+        "  dummy_data_param { "
+        "    shape { "
+        "      dim: 5 "
+        "      dim: 2 "
+        "      dim: 3 "
+        "      dim: 4 "
+        "    } "
+        "    data_filler { "
+        "      type: 'gaussian' "
+        "      std: 0.01 "
+        "    } "
+        "    shape { "
+        "      dim: 5 "
+        "    } "
+        "    data_filler { "
+        "      type: 'constant' "
+        "      value: 0 "
+        "    } "
+        "  } "
+        "  top: 'data' "
+        "  top: 'label' "
+        "} "
+        "layer { "
+        "  name: 'innerproduct' "
+        "  type: 'InnerProduct' "
+        "  inner_product_param { "
+        "    num_output: 1000 "
+        "    weight_filler { "
+        "      type: 'gaussian' "
+        "      std: 0.01 "
+        "    } "
+        "    bias_filler { "
+        "      type: 'constant' "
+        "      value: 0 "
+        "    } "
+        "  } "
+        "  param { "
+        "    lr_mult: 1 "
+        "    decay_mult: 1 "
+        "  } "
+        "  param { "
+        "    lr_mult: 2 "
+        "    decay_mult: 0 "
+        "  } "
+        "  bottom: 'data' "
+        "  top: 'innerproduct' "
+        "} "
+        "layer { "
+        "  name: 'loss' "
+        "  type: 'SoftmaxWithLoss' "
+        "  bottom: 'innerproduct' "
+        "  bottom: 'label' "
+        "  top: 'top_loss' "
+        "} ";
+    if (accuracy_layer) {
+      proto +=
+          "layer { "
+          "  name: 'loss' "
+          "  type: 'Accuracy' "
+          "  bottom: 'innerproduct' "
+          "  bottom: 'label' "
+          "  top: 'accuracy' "
+          "} ";
+    }
+    if (force_backward) {
+      proto += "force_backward: true ";
+    }
+    InitNetFromProtoString(proto);
+  }
+
+  virtual void InitTinyNetEuclidean(const bool force_backward = false) {
+    string proto =
+        "name: 'TinyTestEuclidLossNetwork' "
+        "layer { "
+        "  name: 'data' "
+        "  type: 'DummyData' "
+        "  dummy_data_param { "
+        "    num: 5 "
+        "    channels: 2 "
+        "    height: 3 "
+        "    width: 4 "
+        "    num: 5 "
+        "    channels: 1 "
+        "    height: 1 "
+        "    width: 1 "
+        "    data_filler { "
+        "      type: 'gaussian' "
+        "      std: 0.01 "
+        "    } "
+        "  } "
+        "  top: 'data' "
+        "  top: 'label' "
+        "} "
+        "layer { "
+        "  name: 'innerproduct' "
+        "  type: 'InnerProduct' "
+        "  inner_product_param { "
+        "    num_output: 1 "
+        "    weight_filler { "
+        "      type: 'gaussian' "
+        "      std: 0.01 "
+        "    } "
+        "    bias_filler { "
+        "      type: 'constant' "
+        "      value: 0 "
+        "    } "
+        "  } "
+        "  param { "
+        "    lr_mult: 1 "
+        "    decay_mult: 1 "
+        "  } "
+        "  param { "
+        "    lr_mult: 2 "
+        "    decay_mult: 0 "
+        "  } "
+        "  bottom: 'data' "
+        "  top: 'innerproduct' "
+        "} "
+        "layer { "
+        "  name: 'loss' "
+        "  type: 'EuclideanLoss' "
+        "  bottom: 'innerproduct' "
+        "  bottom: 'label' "
+        "} ";
+    if (force_backward) {
+      proto += "force_backward: true ";
+    }
+    InitNetFromProtoString(proto);
+  }
+
+  virtual void InitTrickyNet(Dtype* loss_weight = NULL) {
+    ostringstream loss_weight_stream;
+    if (loss_weight) {
+      loss_weight_stream << "  loss_weight: " << *loss_weight << " ";
+    }
+    const string& proto =
+        "name: 'TrickyTestNetwork' "
+        "layer { "
+        "  name: 'data' "
+        "  type: 'DummyData' "
+        "  dummy_data_param { "
+        "    num: 5 "
+        "    channels: 2 "
+        "    height: 3 "
+        "    width: 4 "
+        "    num: 5 "
+        "    channels: 1 "
+        "    height: 1 "
+        "    width: 1 "
+        "    data_filler { "
+        "      type: 'gaussian' "
+        "      std: 0.01 "
+        "    } "
+        "  } "
+        "  top: 'data' "
+        "  top: 'label' "
+        "} "
+        "layer { "
+        "  name: 'innerproduct' "
+        "  type: 'InnerProduct' "
+        "  inner_product_param { "
+        "    num_output: 1000 "
+        "    weight_filler { "
+        "      type: 'gaussian' "
+        "      std: 0.01 "
+        "    } "
+        "    bias_filler { "
+        "      type: 'constant' "
+        "      value: 0 "
+        "    } "
+        "  } "
+        "  param { "
+        "    lr_mult: 1 "
+        "    decay_mult: 1 "
+        "  } "
+        "  param { "
+        "    lr_mult: 2 "
+        "    decay_mult: 0 "
+        "  } "
+        "  bottom: 'data' "
+        "  top: 'transformed_data' "
+        "} "
+        "layer { "
+        "  name: 'innerproduct' "
+        "  type: 'InnerProduct' "
+        "  inner_product_param { "
+        "    num_output: 1 "
+        "    weight_filler { "
+        "      type: 'gaussian' "
+        "      std: 0.01 "
+        "    } "
+        "    bias_filler { "
+        "      type: 'constant' "
+        "      value: 0 "
+        "    } "
+        "  } "
+        "  param { "
+        "    lr_mult: 1 "
+        "    decay_mult: 1 "
+        "  } "
+        "  param { "
+        "    lr_mult: 2 "
+        "    decay_mult: 0 "
+        "  } "
+        "  bottom: 'label' "
+        "  top: 'transformed_label' "
+        "} "
+        "layer { "
+        "  name: 'loss' "
+        "  type: 'SoftmaxWithLoss' " +
+        loss_weight_stream.str() +
+        "  bottom: 'transformed_data' "
+        "  bottom: 'transformed_label' "
+        "} ";
+    InitNetFromProtoString(proto);
+  }
+
+  // loss_weight is the loss weight for the 'EuclideanLoss' layer output.
+  // midnet_loss_weight is the loss weight for the first 'InnerProduct' layer
+  // output.  Should both default to 0.0 if unspecified (i.e., if NULL is
+  // passed to this function).
+  virtual void InitUnsharedWeightsNet(const Dtype* loss_weight = NULL,
+      const Dtype* midnet_loss_weight = NULL,
+      const bool force_backward = false, const bool bias_term = false,
+      const Dtype blobs_lr_w1 = 1, const Dtype blobs_lr_b1 = 2,
+      const Dtype blobs_lr_w2 = 1, const Dtype blobs_lr_b2 = 2) {
+    string bias_str = bias_term ? "true ":"false ";
+    ostringstream proto;
+    proto << "name: 'UnsharedWeightsNetwork' ";
+    if (force_backward) {
+      proto << "force_backward: true ";
+    }
+    proto <<
+        "layer { "
+        "  name: 'data' "
+        "  type: 'DummyData' "
+        "  dummy_data_param { "
+        "    num: 5 "
+        "    channels: 2 "
+        "    height: 3 "
+        "    width: 4 "
+        "    data_filler { "
+        "      type: 'gaussian' "
+        "      std: 0.01 "
+        "    } "
+        "  } "
+        "  top: 'data' "
+        "} "
+        "layer { "
+        "  name: 'innerproduct1' "
+        "  type: 'InnerProduct' "
+        "  inner_product_param { "
+        "    num_output: 10 "
+        "    bias_term: " << bias_str <<
+        "    weight_filler { "
+        "      type: 'gaussian' "
+        "      std: 10 "
+        "    } "
+        "  } "
+        "  param { "
+        "    name: 'unsharedweights1' "
+        "    lr_mult: " << blobs_lr_w1 <<
+        "  } ";
+    if (bias_term) {
+      proto << "  param { lr_mult: " << blobs_lr_b1 << " } ";
+    }
+    proto <<
+        "  bottom: 'data' "
+        "  top: 'innerproduct1' ";
+    if (midnet_loss_weight) {
+      proto << "  loss_weight: " << *midnet_loss_weight << " ";
+    }
+    proto <<
+        "} "
+        "layer { "
+        "  name: 'innerproduct2' "
+        "  type: 'InnerProduct' "
+        "  inner_product_param { "
+        "    num_output: 10 "
+        "    bias_term: " << bias_str <<
+        "    weight_filler { "
+        "      type: 'gaussian' "
+        "      std: 10 "
+        "    } "
+        "  } "
+        "  param { "
+        "    name: 'unsharedweights2' "
+        "    lr_mult: " << blobs_lr_w2 <<
+        "  } ";
+    if (bias_term) {
+      proto << "  param { lr_mult: " << blobs_lr_b2 << " } ";
+    }
+    proto <<
+        "  bottom: 'data' "
+        "  top: 'innerproduct2' "
+        "} "
+        "layer { "
+        "  name: 'loss' "
+        "  type: 'EuclideanLoss' ";
+    if (loss_weight) {
+      proto << "  loss_weight: " << *loss_weight << " ";
+    }
+    proto <<
+        "  bottom: 'innerproduct1' "
+        "  bottom: 'innerproduct2' "
+        "} ";
+    InitNetFromProtoString(proto.str());
+  }
+
+  virtual void InitSharedWeightsNet() {
+    const string& proto =
+        "name: 'SharedWeightsNetwork' "
+        "layer { "
+        "  name: 'data' "
+        "  type: 'DummyData' "
+        "  dummy_data_param { "
+        "    num: 5 "
+        "    channels: 2 "
+        "    height: 3 "
+        "    width: 4 "
+        "    data_filler { "
+        "      type: 'gaussian' "
+        "      std: 0.01 "
+        "    } "
+        "  } "
+        "  top: 'data' "
+        "} "
+        "layer { "
+        "  name: 'innerproduct1' "
+        "  type: 'InnerProduct' "
+        "  inner_product_param { "
+        "    num_output: 10 "
+        "    bias_term: false "
+        "    weight_filler { "
+        "      type: 'gaussian' "
+        "      std: 10 "
+        "    } "
+        "  } "
+        "  param { name: 'sharedweights' } "
+        "  bottom: 'data' "
+        "  top: 'innerproduct1' "
+        "} "
+        "layer { "
+        "  name: 'innerproduct2' "
+        "  type: 'InnerProduct' "
+        "  inner_product_param { "
+        "    num_output: 10 "
+        "    bias_term: false "
+        "    weight_filler { "
+        "      type: 'gaussian' "
+        "      std: 10 "
+        "    } "
+        "  } "
+        "  param { name: 'sharedweights' } "
+        "  bottom: 'data' "
+        "  top: 'innerproduct2' "
+        "} "
+        "layer { "
+        "  name: 'loss' "
+        "  type: 'EuclideanLoss' "
+        "  bottom: 'innerproduct1' "
+        "  bottom: 'innerproduct2' "
+        "} ";
+    InitNetFromProtoString(proto);
+  }
+
+  virtual void InitDiffDataUnsharedWeightsNet() {
+    const string& proto =
+        "name: 'DiffDataUnsharedWeightsNetwork' "
+        "layer { "
+        "  name: 'data' "
+        "  type: 'DummyData' "
+        "  dummy_data_param { "
+        "    num: 10 "
+        "    channels: 10 "
+        "    height: 1 "
+        "    width: 1 "
+        "    num: 10 "
+        "    channels: 10 "
+        "    height: 1 "
+        "    width: 1 "
+        "    data_filler { "
+        "      type: 'gaussian' "
+        "      std: 10 "
+        "    } "
+        "  } "
+        "  top: 'data1' "
+        "  top: 'data2' "
+        "} "
+        "layer { "
+        "  name: 'innerproduct1' "
+        "  type: 'InnerProduct' "
+        "  inner_product_param { "
+        "    num_output: 10 "
+        "    bias_term: false "
+        "    weight_filler { "
+        "      type: 'constant' "
+        "      value: 0.5 "
+        "    } "
+        "  } "
+        "  param { name: 'unsharedweights1' } "
+        "  bottom: 'data1' "
+        "  top: 'innerproduct1' "
+        "} "
+        "layer { "
+        "  name: 'innerproduct2' "
+        "  type: 'InnerProduct' "
+        "  inner_product_param { "
+        "    num_output: 10 "
+        "    bias_term: false "
+        "    weight_filler { "
+        "      type: 'constant' "
+        "      value: 0.5 "
+        "    } "
+        "  } "
+        "  param { name: 'unsharedweights2' } "
+        "  bottom: 'innerproduct1' "
+        "  top: 'innerproduct2' "
+        "} "
+        "layer { "
+        "  name: 'loss' "
+        "  type: 'EuclideanLoss' "
+        "  bottom: 'data2' "
+        "  bottom: 'innerproduct2' "
+        "} ";
+    InitNetFromProtoString(proto);
+  }
+
+  virtual void InitDiffDataSharedWeightsNet() {
+    const string& proto =
+        "name: 'DiffDataSharedWeightsNetwork' "
+        "layer { "
+        "  name: 'data' "
+        "  type: 'DummyData' "
+        "  dummy_data_param { "
+        "    num: 10 "
+        "    channels: 10 "
+        "    height: 1 "
+        "    width: 1 "
+        "    num: 10 "
+        "    channels: 10 "
+        "    height: 1 "
+        "    width: 1 "
+        "    data_filler { "
+        "      type: 'gaussian' "
+        "      std: 10 "
+        "    } "
+        "  } "
+        "  top: 'data1' "
+        "  top: 'data2' "
+        "} "
+        "layer { "
+        "  name: 'innerproduct1' "
+        "  type: 'InnerProduct' "
+        "  inner_product_param { "
+        "    num_output: 10 "
+        "    bias_term: false "
+        "    weight_filler { "
+        "      type: 'constant' "
+        "      value: 0.5 "
+        "    } "
+        "  } "
+        "  param { name: 'sharedweights' } "
+        "  bottom: 'data1' "
+        "  top: 'innerproduct1' "
+        "} "
+        "layer { "
+        "  name: 'innerproduct2' "
+        "  type: 'InnerProduct' "
+        "  inner_product_param { "
+        "    num_output: 10 "
+        "    bias_term: false "
+        "    weight_filler { "
+        "      type: 'constant' "
+        "      value: 0.5 "
+        "    } "
+        "  } "
+        "  param { name: 'sharedweights' } "
+        "  bottom: 'innerproduct1' "
+        "  top: 'innerproduct2' "
+        "} "
+        "layer { "
+        "  name: 'loss' "
+        "  type: 'EuclideanLoss' "
+        "  bottom: 'data2' "
+        "  bottom: 'innerproduct2' "
+        "} ";
+    InitNetFromProtoString(proto);
+  }
+
+  virtual void InitReshapableNet() {
+    const string& proto =
+        "name: 'ReshapableNetwork' "
+        "input: 'data' "
+        "input_dim: 1 "
+        "input_dim: 3 "
+        "input_dim: 100 "
+        "input_dim: 100 "
+        "layer { "
+        "  name: 'conv1' "
+        "  type: 'Convolution' "
+        "  bottom: 'data' "
+        "  top: 'conv1' "
+        "  convolution_param { "
+        "    num_output: 5 "
+        "    kernel_size: 3 "
+        "    stride: 2 "
+        "    weight_filler { "
+        "      type: 'gaussian' "
+        "      std: 0.01 "
+        "    } "
+        "    bias_filler { "
+        "      type: 'constant' "
+        "      value: 0.2 "
+        "    } "
+        "  } "
+        "} "
+        "layer { "
+        "  name: 'relu1' "
+        "  type: 'ReLU' "
+        "  bottom: 'conv1' "
+        "  top: 'conv1' "
+        "} "
+        "layer { "
+        "  name: 'pool1' "
+        "  type: 'Pooling' "
+        "  bottom: 'conv1' "
+        "  top: 'pool1' "
+        "  pooling_param { "
+        "    pool: MAX "
+        "    kernel_size: 2 "
+        "    stride: 2 "
+        "  } "
+        "} "
+        "layer { "
+        "  name: 'norm1' "
+        "  type: 'LRN' "
+        "  bottom: 'pool1' "
+        "  top: 'norm1' "
+        "  lrn_param { "
+        "    local_size: 3 "
+        "  } "
+        "} "
+        "layer { "
+        "  name: 'softmax' "
+        "  type: 'Softmax' "
+        "  bottom: 'norm1' "
+        "  top: 'softmax' "
+        "} ";
+    InitNetFromProtoString(proto);
+  }
+
+  virtual void InitSkipPropNet(bool test_skip_true) {
+    string proto =
+      "name: 'SkipPropTestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'DummyData' "
+      "  dummy_data_param { "
+      "    shape { "
+      "      dim: 5 "
+      "      dim: 2 "
+      "      dim: 3 "
+      "      dim: 4 "
+      "    } "
+      "    data_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    shape { "
+      "      dim: 5 "
+      "    } "
+      "    data_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'silence' "
+      "  bottom: 'label' "
+      "  type: 'Silence' "
+      "} "
+      "layer { "
+      "  name: 'innerproduct' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'innerproduct' "
+      "} "
+      "layer { "
+      "  name: 'ip_fake_labels' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'fake_labels' "
+      "} "
+      "layer { "
+      "  name: 'argmax' "
+      "  bottom: 'fake_labels' "
+      "  top: 'label_argmax' "
+      "  type: 'ArgMax' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  bottom: 'innerproduct' "
+      "  bottom: 'label_argmax' ";
+    if (test_skip_true)
+      proto += "  propagate_down: true "
+               "  propagate_down: false ";
+    else
+      proto += "  propagate_down: true "
+               "  propagate_down: true ";
+    proto +=
+      "  top: 'cross_entropy_loss' "
+      "  type: 'SigmoidCrossEntropyLoss' "
+      "  loss_weight: 0.1 "
+      "} ";
+    InitNetFromProtoString(proto);
+  }
+
+  int seed_;
+  shared_ptr<Net<Dtype> > net_;
+};
+
+TYPED_TEST_CASE(NetTest, TestDtypesAndDevices);
+
+TYPED_TEST(NetTest, TestHasBlob) {
+  this->InitTinyNet();
+  EXPECT_TRUE(this->net_->has_blob("data"));
+  EXPECT_TRUE(this->net_->has_blob("label"));
+  EXPECT_TRUE(this->net_->has_blob("innerproduct"));
+  EXPECT_FALSE(this->net_->has_blob("loss"));
+  EXPECT_TRUE(this->net_->has_blob("top_loss"));
+}
+
+TYPED_TEST(NetTest, TestGetBlob) {
+  this->InitTinyNet();
+  EXPECT_EQ(this->net_->blob_by_name("data"), this->net_->blobs()[0]);
+  EXPECT_EQ(this->net_->blob_by_name("label"), this->net_->blobs()[1]);
+  EXPECT_EQ(this->net_->blob_by_name("innerproduct"), this->net_->blobs()[2]);
+  EXPECT_FALSE(this->net_->blob_by_name("loss"));
+  EXPECT_EQ(this->net_->blob_by_name("top_loss"), this->net_->blobs()[3]);
+}
+
+TYPED_TEST(NetTest, TestHasLayer) {
+  this->InitTinyNet();
+  EXPECT_TRUE(this->net_->has_layer("data"));
+  EXPECT_TRUE(this->net_->has_layer("innerproduct"));
+  EXPECT_TRUE(this->net_->has_layer("loss"));
+  EXPECT_FALSE(this->net_->has_layer("label"));
+}
+
+TYPED_TEST(NetTest, TestGetLayerByName) {
+  this->InitTinyNet();
+  EXPECT_EQ(this->net_->layer_by_name("data"), this->net_->layers()[0]);
+  EXPECT_EQ(this->net_->layer_by_name("innerproduct"), this->net_->layers()[1]);
+  EXPECT_EQ(this->net_->layer_by_name("loss"), this->net_->layers()[2]);
+  EXPECT_FALSE(this->net_->layer_by_name("label"));
+}
+
+TYPED_TEST(NetTest, TestBottomNeedBackward) {
+  this->InitTinyNet();
+  const vector<vector<bool> >& bottom_need_backward =
+      this->net_->bottom_need_backward();
+  EXPECT_EQ(3, bottom_need_backward.size());
+  EXPECT_EQ(0, bottom_need_backward[0].size());
+  EXPECT_EQ(1, bottom_need_backward[1].size());
+  EXPECT_EQ(false, bottom_need_backward[1][0]);
+  EXPECT_EQ(2, bottom_need_backward[2].size());
+  EXPECT_EQ(true, bottom_need_backward[2][0]);
+  EXPECT_EQ(false, bottom_need_backward[2][1]);
+}
+
+TYPED_TEST(NetTest, TestBottomNeedBackwardForce) {
+  const bool force_backward = true;
+  this->InitTinyNet(force_backward);
+  const vector<vector<bool> >& bottom_need_backward =
+      this->net_->bottom_need_backward();
+  EXPECT_EQ(3, bottom_need_backward.size());
+  EXPECT_EQ(0, bottom_need_backward[0].size());
+  EXPECT_EQ(1, bottom_need_backward[1].size());
+  EXPECT_EQ(true, bottom_need_backward[1][0]);
+  EXPECT_EQ(2, bottom_need_backward[2].size());
+  EXPECT_EQ(true, bottom_need_backward[2][0]);
+  EXPECT_EQ(false, bottom_need_backward[2][1]);
+}
+
+TYPED_TEST(NetTest, TestBottomNeedBackwardEuclideanForce) {
+  const bool force_backward = true;
+  this->InitTinyNetEuclidean(force_backward);
+  const vector<vector<bool> >& bottom_need_backward =
+      this->net_->bottom_need_backward();
+  EXPECT_EQ(3, bottom_need_backward.size());
+  EXPECT_EQ(0, bottom_need_backward[0].size());
+  EXPECT_EQ(1, bottom_need_backward[1].size());
+  EXPECT_EQ(true, bottom_need_backward[1][0]);
+  EXPECT_EQ(2, bottom_need_backward[2].size());
+  EXPECT_EQ(true, bottom_need_backward[2][0]);
+  EXPECT_EQ(true, bottom_need_backward[2][1]);
+}
+
+TYPED_TEST(NetTest, TestBottomNeedBackwardTricky) {
+  this->InitTrickyNet();
+  const vector<vector<bool> >& bottom_need_backward =
+      this->net_->bottom_need_backward();
+  EXPECT_EQ(4, bottom_need_backward.size());
+  EXPECT_EQ(0, bottom_need_backward[0].size());
+  EXPECT_EQ(1, bottom_need_backward[1].size());
+  EXPECT_EQ(false, bottom_need_backward[1][0]);
+  EXPECT_EQ(1, bottom_need_backward[2].size());
+  EXPECT_EQ(false, bottom_need_backward[2][0]);
+  EXPECT_EQ(2, bottom_need_backward[3].size());
+  EXPECT_EQ(true, bottom_need_backward[3][0]);
+  // The label input to the SoftmaxLossLayer should say it "needs backward"
+  // since it has weights under it, even though we expect this to cause a crash
+  // at training/test time.
+  EXPECT_EQ(true, bottom_need_backward[3][1]);
+}
+
+TYPED_TEST(NetTest, TestLossWeight) {
+  typedef typename TypeParam::Dtype Dtype;
+  // First, compute the loss and gradients with no loss_weight specified.
+  // In this case, the loss weight for the 'EuclideanLoss' layer should default
+  // to 1.
+  vector<Blob<Dtype>*> bottom;
+  Caffe::set_random_seed(this->seed_);
+  const bool kForceBackward = true;
+  this->InitUnsharedWeightsNet(NULL, NULL, kForceBackward);
+  const Dtype loss = this->net_->ForwardBackward(bottom);
+  const bool kCopyDiff = true;
+  vector<shared_ptr<Blob<Dtype> > > blob_grads;
+  this->CopyNetBlobs(kCopyDiff, &blob_grads);
+  vector<shared_ptr<Blob<Dtype> > > param_grads;
+  this->CopyNetParams(kCopyDiff, &param_grads);
+  // Check that the loss is non-trivial, otherwise the test doesn't prove much.
+  const Dtype kMinLossAbsValue = 1e-2;
+  ASSERT_GE(fabs(loss), kMinLossAbsValue);
+  const Dtype kErrorMargin = 1e-4;
+  const int kNumLossWeights = 6;
+  Dtype kLossWeights[kNumLossWeights] = {2, 0, 1, -1, -2.5, 3.7};
+  for (int i = 0; i < kNumLossWeights; ++i) {
+    Caffe::set_random_seed(this->seed_);
+    this->InitUnsharedWeightsNet(&kLossWeights[i], NULL, kForceBackward);
+    const Dtype weighted_loss = this->net_->ForwardBackward(bottom);
+    const Dtype error_margin = kErrorMargin * fabs(kLossWeights[i]);
+    EXPECT_NEAR(loss * kLossWeights[i], weighted_loss, error_margin)
+        << "loss weight = " << kLossWeights[i];
+    const vector<shared_ptr<Blob<Dtype> > >& weighted_blobs =
+        this->net_->blobs();
+    ASSERT_EQ(blob_grads.size(), weighted_blobs.size());
+    for (int j = 0; j < blob_grads.size(); ++j) {
+      ASSERT_EQ(blob_grads[j]->count(), weighted_blobs[j]->count());
+      for (int k = 0; k < blob_grads[j]->count(); ++k) {
+        EXPECT_NEAR(blob_grads[j]->cpu_diff()[k] * kLossWeights[i],
+                    weighted_blobs[j]->cpu_diff()[k], error_margin);
+      }
+    }
+    const vector<shared_ptr<Blob<Dtype> > >& weighted_params =
+        this->net_->params();
+    ASSERT_EQ(param_grads.size(), weighted_params.size());
+    for (int j = 0; j < param_grads.size(); ++j) {
+      ASSERT_EQ(param_grads[j]->count(), weighted_params[j]->count());
+      for (int k = 0; k < param_grads[j]->count(); ++k) {
+        EXPECT_NEAR(param_grads[j]->cpu_diff()[k] * kLossWeights[i],
+                    weighted_params[j]->cpu_diff()[k], error_margin);
+      }
+    }
+  }
+}
+
+TYPED_TEST(NetTest, TestLossWeightMidNet) {
+  typedef typename TypeParam::Dtype Dtype;
+  vector<Blob<Dtype>*> bottom;
+  Caffe::set_random_seed(this->seed_);
+  const bool kForceBackward = true;
+  Dtype loss_weight = 0;
+  Dtype midnet_loss_weight = 1;
+  this->InitUnsharedWeightsNet(&loss_weight, &midnet_loss_weight,
+                               kForceBackward);
+  const Dtype loss = this->net_->ForwardBackward(bottom);
+  const bool kCopyDiff = true;
+  const bool kReshape = true;
+  Blob<Dtype> data_grad;
+  data_grad.CopyFrom(*this->net_->blob_by_name("data"), kCopyDiff, kReshape);
+  // Check that the loss is non-trivial, otherwise the test doesn't prove much.
+  const Dtype kMinLossAbsValue = 1e-2;
+  ASSERT_GE(fabs(loss), kMinLossAbsValue);
+  const Dtype kErrorMargin = 1e-4;
+  const int kNumLossWeights = 6;
+  Dtype kLossWeights[kNumLossWeights] = {2, 0, 1, -1, -2.5, 3.7};
+  for (int i = 0; i < kNumLossWeights; ++i) {
+    Caffe::set_random_seed(this->seed_);
+    this->InitUnsharedWeightsNet(&loss_weight, &kLossWeights[i],
+                                 kForceBackward);
+    const Dtype weighted_loss = this->net_->ForwardBackward(bottom);
+    const Dtype error_margin = kErrorMargin * fabs(kLossWeights[i]);
+    EXPECT_NEAR(loss * kLossWeights[i], weighted_loss, error_margin)
+        << "loss weight = " << kLossWeights[i];
+    const shared_ptr<Blob<Dtype> >& weighted_blob =
+        this->net_->blob_by_name("data");
+    ASSERT_EQ(data_grad.count(), weighted_blob->count());
+    for (int j = 0; j < data_grad.count(); ++j) {
+      EXPECT_NEAR(data_grad.cpu_diff()[j] * kLossWeights[i],
+                  weighted_blob->cpu_diff()[j], error_margin);
+    }
+  }
+}
+
+TYPED_TEST(NetTest, TestComboLossWeight) {
+  typedef typename TypeParam::Dtype Dtype;
+  vector<Blob<Dtype>*> bottom;
+  Dtype loss_weight;
+  Dtype midnet_loss_weight;
+  const bool kForceBackward = true;
+  const Dtype kErrorMargin = 1e-4;
+
+  // Get the loss and gradients with 'EuclideanLoss' weight 1,
+  // 'InnerProduct' weight 1.
+  loss_weight = 1;
+  midnet_loss_weight = 1;
+  Caffe::set_random_seed(this->seed_);
+  this->InitUnsharedWeightsNet(&loss_weight, &midnet_loss_weight,
+                               kForceBackward);
+  const Dtype loss = this->net_->ForwardBackward(bottom);
+  const bool kCopyDiff = true;
+  vector<shared_ptr<Blob<Dtype> > > blob_grads;
+  this->CopyNetBlobs(kCopyDiff, &blob_grads);
+  vector<shared_ptr<Blob<Dtype> > > param_grads;
+  this->CopyNetParams(kCopyDiff, &param_grads);
+
+  loss_weight = 2;
+  midnet_loss_weight = 1;
+  Caffe::set_random_seed(this->seed_);
+  this->InitUnsharedWeightsNet(&loss_weight, &midnet_loss_weight,
+                               kForceBackward);
+  const Dtype loss_main_2 = this->net_->ForwardBackward(bottom);
+  vector<shared_ptr<Blob<Dtype> > > blob_grads_loss_2;
+  this->CopyNetBlobs(kCopyDiff, &blob_grads_loss_2);
+  vector<shared_ptr<Blob<Dtype> > > param_grads_loss_2;
+  this->CopyNetParams(kCopyDiff, &param_grads_loss_2);
+
+  loss_weight = 3;
+  midnet_loss_weight = 1;
+  Caffe::set_random_seed(this->seed_);
+  this->InitUnsharedWeightsNet(&loss_weight, &midnet_loss_weight,
+                               kForceBackward);
+  const Dtype loss_main_3 = this->net_->ForwardBackward(bottom);
+  const vector<shared_ptr<Blob<Dtype> > >& blob_grads_loss_3 =
+      this->net_->blobs();
+  ASSERT_EQ(blob_grads.size(), blob_grads_loss_3.size());
+  ASSERT_EQ(blob_grads_loss_2.size(), blob_grads_loss_3.size());
+  for (int j = 0; j < blob_grads.size(); ++j) {
+    const string& blob_name = this->net_->blob_names()[j];
+    bool grad_should_change = true;
+    if (blob_name == "innerproduct1_innerproduct1_0_split_0") {
+      grad_should_change = false;
+    }
+    ASSERT_EQ(blob_grads[j]->count(), blob_grads_loss_3[j]->count());
+    ASSERT_EQ(blob_grads_loss_2[j]->count(), blob_grads_loss_3[j]->count());
+    for (int k = 0; k < blob_grads[j]->count(); ++k) {
+      const Dtype grad_diff_2 = blob_grads_loss_2[j]->cpu_diff()[k] -
+                                    blob_grads[j]->cpu_diff()[k];
+      const Dtype grad_diff_3 = blob_grads_loss_3[j]->cpu_diff()[k] -
+                                    blob_grads[j]->cpu_diff()[k];
+      if (grad_should_change) {
+        // Test non-triviality.
+        const Dtype kMinGradDiffAbsValue = 1e-4;
+        EXPECT_GT(fabs(grad_diff_2), kMinGradDiffAbsValue) << blob_name;
+        EXPECT_NEAR(2 * grad_diff_2, grad_diff_3, kErrorMargin) << blob_name;
+      } else {
+        EXPECT_EQ(0, grad_diff_2) << blob_name;
+        EXPECT_EQ(0, grad_diff_3) << blob_name;
+      }
+    }
+  }
+
+  loss_weight = 1;
+  midnet_loss_weight = 2;
+  Caffe::set_random_seed(this->seed_);
+  this->InitUnsharedWeightsNet(&loss_weight, &midnet_loss_weight,
+                               kForceBackward);
+  const Dtype loss_midnet_2 = this->net_->ForwardBackward(bottom);
+  this->CopyNetBlobs(kCopyDiff, &blob_grads_loss_2);
+  this->CopyNetParams(kCopyDiff, &param_grads_loss_2);
+
+  loss_weight = 1;
+  midnet_loss_weight = 3;
+  Caffe::set_random_seed(this->seed_);
+  this->InitUnsharedWeightsNet(&loss_weight, &midnet_loss_weight,
+                               kForceBackward);
+  const Dtype loss_midnet_3 = this->net_->ForwardBackward(bottom);
+  const vector<shared_ptr<Blob<Dtype> > >& blob_grads_midnet_loss_3 =
+      this->net_->blobs();
+  ASSERT_EQ(blob_grads.size(), blob_grads_midnet_loss_3.size());
+  ASSERT_EQ(blob_grads_loss_2.size(), blob_grads_midnet_loss_3.size());
+  const vector<string>& blob_names = this->net_->blob_names();
+  for (int j = 0; j < blob_grads.size(); ++j) {
+    const string& blob_name = blob_names[j];
+    bool grad_should_change = false;
+    if (blob_name == "innerproduct1" ||
+        blob_name == "innerproduct1_innerproduct1_0_split_0" ||
+        blob_name == "data_data_0_split_0" || blob_name == "data") {
+      grad_should_change = true;
+    }
+    ASSERT_EQ(blob_grads[j]->count(), blob_grads_midnet_loss_3[j]->count());
+    ASSERT_EQ(blob_grads[j]->count(), blob_grads_loss_2[j]->count());
+    for (int k = 0; k < blob_grads[j]->count(); ++k) {
+      const Dtype grad_diff_2 = blob_grads_loss_2[j]->cpu_diff()[k] -
+                                    blob_grads[j]->cpu_diff()[k];
+      const Dtype grad_diff_3 = blob_grads_midnet_loss_3[j]->cpu_diff()[k] -
+                                    blob_grads[j]->cpu_diff()[k];
+      if (grad_should_change) {
+        // Test non-triviality.
+        const Dtype kMinGradDiffAbsValue = 1e-4;
+        EXPECT_GT(fabs(grad_diff_2), kMinGradDiffAbsValue) << blob_name;
+        EXPECT_NEAR(2 * grad_diff_2, grad_diff_3, kErrorMargin) << blob_name;
+      } else {
+        EXPECT_EQ(0, grad_diff_2) << blob_name;
+        EXPECT_EQ(0, grad_diff_3) << blob_name;
+      }
+    }
+  }
+
+  const Dtype kMinLossDiffAbsValue = 1e-4;
+
+  Dtype loss_diff_2 = loss_main_2 - loss;
+  // Test non-triviality.
+  EXPECT_GT(fabs(loss_diff_2), kMinLossDiffAbsValue);
+  Dtype loss_diff_3 = loss_main_3 - loss;
+  EXPECT_NEAR(2 * loss_diff_2, loss_diff_3, kErrorMargin);
+
+  loss_diff_2 = loss_midnet_2 - loss;
+  // Test non-triviality.
+  EXPECT_GT(fabs(loss_diff_2), kMinLossDiffAbsValue);
+  loss_diff_3 = loss_midnet_3 - loss;
+  EXPECT_NEAR(2 * loss_diff_2, loss_diff_3, kErrorMargin);
+}
+
+TYPED_TEST(NetTest, TestBackwardWithAccuracyLayer) {
+  typedef typename TypeParam::Dtype Dtype;
+  const bool kForceBackward = false;
+  const bool kAccuracyLayer = true;
+  this->InitTinyNet(kForceBackward, kAccuracyLayer);
+  EXPECT_TRUE(this->net_->has_blob("accuracy"));
+  vector<Blob<Dtype>*> bottom;
+  // Test that we can do Backward even though we have an 'Accuracy' layer.
+  this->net_->ForwardBackward(bottom);
+}
+
+TYPED_TEST(NetTest, TestUnsharedWeightsDataNet) {
+  typedef typename TypeParam::Dtype Dtype;
+  this->InitUnsharedWeightsNet();
+  vector<Blob<Dtype>*> bottom;
+  Dtype loss;
+  this->net_->Forward(bottom, &loss);
+  EXPECT_GT(loss, 0);
+}
+
+TYPED_TEST(NetTest, TestSharedWeightsDataNet) {
+  typedef typename TypeParam::Dtype Dtype;
+  this->InitSharedWeightsNet();
+  vector<Blob<Dtype>*> bottom;
+  Dtype loss;
+  this->net_->Forward(bottom, &loss);
+  EXPECT_FLOAT_EQ(loss, 0);
+}
+
+TYPED_TEST(NetTest, TestUnsharedWeightsDiffNet) {
+  typedef typename TypeParam::Dtype Dtype;
+  this->InitUnsharedWeightsNet();
+  vector<Blob<Dtype>*> bottom;
+  Net<Dtype>* net = this->net_.get();
+  net->Forward(bottom);
+  net->Backward();
+  Layer<Dtype>* ip1_layer = net->layer_by_name("innerproduct1").get();
+  Layer<Dtype>* ip2_layer = net->layer_by_name("innerproduct2").get();
+  const int count = ip1_layer->blobs()[0]->count();
+  const Dtype* grad1 = ip1_layer->blobs()[0]->cpu_diff();
+  const Dtype* grad2 = ip2_layer->blobs()[0]->cpu_diff();
+  for (int i = 0; i < count; ++i) {
+    EXPECT_GT(fabs(grad1[i]), 0);
+    EXPECT_FLOAT_EQ(-1 * grad1[i], grad2[i]);
+  }
+}
+
+TYPED_TEST(NetTest, TestSharedWeightsDiffNet) {
+  typedef typename TypeParam::Dtype Dtype;
+  this->InitSharedWeightsNet();
+  vector<Blob<Dtype>*> bottom;
+  Net<Dtype>* net = this->net_.get();
+  Dtype loss;
+  net->Forward(bottom, &loss);
+  net->Backward();
+  EXPECT_FLOAT_EQ(loss, 0);
+  Layer<Dtype>* ip1_layer = net->layer_by_name("innerproduct1").get();
+  Layer<Dtype>* ip2_layer = net->layer_by_name("innerproduct2").get();
+  const int count = ip1_layer->blobs()[0]->count();
+  const Dtype* grad1 = ip1_layer->blobs()[0]->cpu_diff();
+  const Dtype* grad2 = ip2_layer->blobs()[0]->cpu_diff();
+  for (int i = 0; i < count; ++i) {
+    EXPECT_FLOAT_EQ(0, grad1[i]);
+    EXPECT_FLOAT_EQ(0, grad2[i]);
+  }
+}
+
+TYPED_TEST(NetTest, TestSharedWeightsUpdate) {
+  typedef typename TypeParam::Dtype Dtype;
+  Caffe::set_random_seed(this->seed_);
+  this->InitDiffDataSharedWeightsNet();
+  vector<Blob<Dtype>*> bottom;
+  EXPECT_EQ(this->net_->layer_names()[1], "innerproduct1");
+  EXPECT_EQ(this->net_->layer_names()[2], "innerproduct2");
+  Blob<Dtype>* ip1_weights = this->net_->layers()[1]->blobs()[0].get();
+  Blob<Dtype>* ip2_weights = this->net_->layers()[2]->blobs()[0].get();
+  // Check that data blobs of shared weights share the same location in memory.
+  EXPECT_EQ(ip1_weights->cpu_data(), ip2_weights->cpu_data());
+  // Check that diff blobs of shared weights are at different locations in
+  // memory.  (The diffs should be accumulated at update time.)
+  EXPECT_NE(ip1_weights->cpu_diff(), ip2_weights->cpu_diff());
+  this->net_->Forward(bottom);
+  this->net_->Backward();
+  // Compute the expected update as the data minus the two diffs.
+  Blob<Dtype> shared_params;
+  const bool reshape = true;
+  const bool copy_diff = false;
+  shared_params.CopyFrom(*ip1_weights, copy_diff, reshape);
+  shared_params.CopyFrom(*ip1_weights, !copy_diff, reshape);
+  const int count = ip1_weights->count();
+  // Make sure the diffs are non-trivial.
+  for (int i = 0; i < count; ++i) {
+    EXPECT_NE(0, ip1_weights->cpu_diff()[i]);
+    EXPECT_NE(0, ip2_weights->cpu_diff()[i]);
+    EXPECT_NE(ip1_weights->cpu_diff()[i], ip2_weights->cpu_diff()[i]);
+  }
+  caffe_axpy(count, Dtype(1), ip2_weights->cpu_diff(),
+             shared_params.mutable_cpu_diff());
+  caffe_axpy(count, Dtype(-1), shared_params.cpu_diff(),
+             shared_params.mutable_cpu_data());
+  const Dtype* expected_updated_params = shared_params.cpu_data();
+  this->net_->Update();
+  const Dtype* actual_updated_params = ip1_weights->cpu_data();
+  for (int i = 0; i < count; ++i) {
+    EXPECT_EQ(expected_updated_params[i], actual_updated_params[i]);
+  }
+  // Check that data blobs of shared weights STILL point to the same memory
+  // location (because ... who knows).
+  EXPECT_EQ(ip1_weights->cpu_data(), ip2_weights->cpu_data());
+
+  Caffe::set_random_seed(this->seed_);
+  this->InitDiffDataUnsharedWeightsNet();
+  EXPECT_EQ(this->net_->layer_names()[1], "innerproduct1");
+  EXPECT_EQ(this->net_->layer_names()[2], "innerproduct2");
+  ip1_weights = this->net_->layers()[1]->blobs()[0].get();
+  ip2_weights = this->net_->layers()[2]->blobs()[0].get();
+  // Check that data and diff blobs of unshared weights are at different
+  // locations in memory.
+  EXPECT_NE(ip1_weights->cpu_data(), ip2_weights->cpu_data());
+  EXPECT_NE(ip1_weights->cpu_diff(), ip2_weights->cpu_diff());
+  this->net_->Forward(bottom);
+  this->net_->Backward();
+  // Compute the expected update.
+  Blob<Dtype> unshared_params1;
+  unshared_params1.CopyFrom(*ip1_weights, copy_diff, reshape);
+  unshared_params1.CopyFrom(*ip1_weights, !copy_diff, reshape);
+  Blob<Dtype> unshared_params2;
+  unshared_params2.CopyFrom(*ip2_weights, copy_diff, reshape);
+  unshared_params2.CopyFrom(*ip2_weights, !copy_diff, reshape);
+  // Make sure the diffs are non-trivial and sum to the diff in the shared net.
+  for (int i = 0; i < count; ++i) {
+    EXPECT_NE(0, ip1_weights->cpu_diff()[i]);
+    EXPECT_NE(0, ip2_weights->cpu_diff()[i]);
+    EXPECT_NE(ip1_weights->cpu_diff()[i], ip2_weights->cpu_diff()[i]);
+    EXPECT_EQ(ip1_weights->cpu_diff()[i] + ip2_weights->cpu_diff()[i],
+              shared_params.cpu_diff()[i]);
+  }
+  caffe_axpy(count, Dtype(-1), ip1_weights->cpu_diff(),
+             unshared_params1.mutable_cpu_data());
+  caffe_axpy(count, Dtype(-1), ip2_weights->cpu_diff(),
+             unshared_params2.mutable_cpu_data());
+  const Dtype* expected_updated_params1 = unshared_params1.cpu_data();
+  const Dtype* expected_updated_params2 = unshared_params2.cpu_data();
+  this->net_->Update();
+  const Dtype* actual_updated_params1 = ip1_weights->cpu_data();
+  const Dtype* actual_updated_params2 = ip2_weights->cpu_data();
+  for (int i = 0; i < count; ++i) {
+    EXPECT_EQ(expected_updated_params1[i], actual_updated_params1[i]);
+    EXPECT_EQ(expected_updated_params2[i], actual_updated_params2[i]);
+    EXPECT_NE(actual_updated_params1[i], actual_updated_params2[i]);
+    EXPECT_NE(expected_updated_params, expected_updated_params1);
+  }
+}
+
+TYPED_TEST(NetTest, TestSharedWeightsResume) {
+  typedef typename TypeParam::Dtype Dtype;
+
+  // Create a net with weight sharing; Update it once.
+  Caffe::set_random_seed(this->seed_);
+  this->InitDiffDataSharedWeightsNet();
+  vector<Blob<Dtype>*> bottom;
+  EXPECT_EQ(this->net_->layer_names()[1], "innerproduct1");
+  EXPECT_EQ(this->net_->layer_names()[2], "innerproduct2");
+  Blob<Dtype>* ip1_weights = this->net_->layers()[1]->blobs()[0].get();
+  Blob<Dtype>* ip2_weights = this->net_->layers()[2]->blobs()[0].get();
+  // Check that data blobs of shared weights share the same location in memory.
+  EXPECT_EQ(ip1_weights->cpu_data(), ip2_weights->cpu_data());
+  // Check that diff blobs of shared weights are at different locations in
+  // memory.  (The diffs should be accumulated at update time.)
+  EXPECT_NE(ip1_weights->cpu_diff(), ip2_weights->cpu_diff());
+  this->net_->ForwardBackward(bottom);
+  this->net_->Update();
+  Blob<Dtype> shared_params;
+  const bool kReshape = true;
+  const bool kCopyDiff = false;
+  shared_params.CopyFrom(*ip1_weights, kCopyDiff, kReshape);
+  const int count = ip1_weights->count();
+
+  // Write the net to a NetParameter, as in Solver::Snapshot.
+  NetParameter net_param;
+  this->net_->ToProto(&net_param);
+
+  // Reinitialize the net and copy parameters from net_param, as in
+  // Solver::Restore.
+  Caffe::set_random_seed(this->seed_);
+  this->InitDiffDataSharedWeightsNet();
+  this->net_->CopyTrainedLayersFrom(net_param);
+  ip1_weights = this->net_->layers()[1]->blobs()[0].get();
+  ip2_weights = this->net_->layers()[2]->blobs()[0].get();
+  ASSERT_FALSE(NULL == ip1_weights);
+  ASSERT_FALSE(NULL == ip2_weights);
+  EXPECT_NE(ip1_weights, ip2_weights);
+  // Check that data blobs of shared weights share the same location in memory.
+  EXPECT_EQ(ip1_weights->cpu_data(), ip2_weights->cpu_data());
+  for (int i = 0; i < count; ++i) {
+    EXPECT_FLOAT_EQ(shared_params.cpu_data()[i], ip1_weights->cpu_data()[i]);
+  }
+  // Check that diff blobs of shared weights are at different locations in
+  // memory.  (The diffs should be accumulated at update time.)
+  EXPECT_NE(ip1_weights->cpu_diff(), ip2_weights->cpu_diff());
+}
+
+TYPED_TEST(NetTest, TestParamPropagateDown) {
+  typedef typename TypeParam::Dtype Dtype;
+  vector<Blob<Dtype>*> bottom;
+  const bool kBiasTerm = true, kForceBackward = false;
+  const Dtype* kLossWeight1 = NULL;
+  const Dtype* kLossWeight2 = NULL;
+
+  // Run the net with all params learned; check that gradients are non-zero.
+  Caffe::set_random_seed(this->seed_);
+  Dtype blobs_lr_w1 = 1, blobs_lr_w2 = 1, blobs_lr_b1 = 2, blobs_lr_b2 = 2;
+  this->InitUnsharedWeightsNet(kLossWeight1, kLossWeight2, kForceBackward,
+      kBiasTerm, blobs_lr_w1, blobs_lr_w2, blobs_lr_b1, blobs_lr_b2);
+  this->net_->Forward(bottom);
+  this->net_->Backward();
+  const vector<shared_ptr<Blob<Dtype> > >& params = this->net_->params();
+  const int num_params = params.size();
+  ASSERT_EQ(4, num_params);
+  const Dtype kNonZeroTestMin = 1e-3;
+  vector<Dtype> param_asums(params.size());
+  for (int i = 0; i < num_params; ++i) {
+    const Dtype param_asum =
+       caffe_cpu_asum(params[i]->count(), params[i]->cpu_diff());
+    param_asums[i] = param_asum;
+    EXPECT_GT(param_asum, kNonZeroTestMin);
+  }
+
+  // Change the learning rates to different non-zero values; should see same
+  // gradients.
+  Caffe::set_random_seed(this->seed_);
+  blobs_lr_w1 *= 2, blobs_lr_w2 *= 2, blobs_lr_b1 *= 2, blobs_lr_b2 *= 2;
+  this->InitUnsharedWeightsNet(kLossWeight1, kLossWeight2, kForceBackward,
+      kBiasTerm, blobs_lr_w1, blobs_lr_w2, blobs_lr_b1, blobs_lr_b2);
+  this->net_->Forward(bottom);
+  this->net_->Backward();
+  const vector<shared_ptr<Blob<Dtype> > >& params2 = this->net_->params();
+  ASSERT_EQ(num_params, params2.size());
+  for (int i = 0; i < num_params; ++i) {
+    const Dtype param_asum =
+       caffe_cpu_asum(params2[i]->count(), params2[i]->cpu_diff());
+    EXPECT_FLOAT_EQ(param_asum, param_asums[i]);
+  }
+
+  // Change a subset of the learning rates to zero; check that we see zero
+  // gradients for those.
+  Caffe::set_random_seed(this->seed_);
+  blobs_lr_w1 = 1, blobs_lr_w2 = 0, blobs_lr_b1 = 0, blobs_lr_b2 = 1;
+  this->InitUnsharedWeightsNet(kLossWeight1, kLossWeight2, kForceBackward,
+      kBiasTerm, blobs_lr_w1, blobs_lr_w2, blobs_lr_b1, blobs_lr_b2);
+  this->net_->Forward(bottom);
+  this->net_->Backward();
+  const vector<shared_ptr<Blob<Dtype> > >& params3 = this->net_->params();
+  ASSERT_EQ(num_params, params3.size());
+  for (int i = 0; i < num_params; ++i) {
+    const Dtype param_asum =
+       caffe_cpu_asum(params3[i]->count(), params3[i]->cpu_diff());
+    if (i == 1 || i == 2) {
+      EXPECT_FLOAT_EQ(0, param_asum);
+    } else {
+      EXPECT_FLOAT_EQ(param_asum, param_asums[i]);
+    }
+  }
+
+  // Change the opposite subset of the learning rates to zero.
+  Caffe::set_random_seed(this->seed_);
+  blobs_lr_w1 = 0, blobs_lr_w2 = 1, blobs_lr_b1 = 1, blobs_lr_b2 = 0;
+  this->InitUnsharedWeightsNet(kLossWeight1, kLossWeight2, kForceBackward,
+      kBiasTerm, blobs_lr_w1, blobs_lr_w2, blobs_lr_b1, blobs_lr_b2);
+  this->net_->Forward(bottom);
+  this->net_->Backward();
+  const vector<shared_ptr<Blob<Dtype> > >& params4 = this->net_->params();
+  ASSERT_EQ(num_params, params4.size());
+  for (int i = 0; i < num_params; ++i) {
+    const Dtype param_asum =
+       caffe_cpu_asum(params4[i]->count(), params4[i]->cpu_diff());
+    if (i == 0 || i == 3) {
+      EXPECT_FLOAT_EQ(0, param_asum);
+    } else {
+      EXPECT_FLOAT_EQ(param_asum, param_asums[i]);
+    }
+  }
+}
+
+TYPED_TEST(NetTest, TestFromTo) {
+  typedef typename TypeParam::Dtype Dtype;
+  this->InitTinyNet();
+
+  // Run Forward and Backward, recording the data diff and loss.
+  Blob<Dtype> data;
+  data.ReshapeLike(*this->net_->blob_by_name("data"));
+  this->net_->ForwardPrefilled();
+  this->net_->Backward();
+  data.CopyFrom(*this->net_->blob_by_name("data"), true, true);
+  const Dtype *loss_ptr = this->net_->output_blobs()[0]->cpu_data();
+  Dtype loss = *loss_ptr;
+
+  // Check that combining partial Forwards gives the same loss.
+  for (int i = 1; i < this->net_->layers().size(); ++i) {
+    // Note that we skip layer zero to keep the same data.
+    this->net_->ForwardFromTo(1, 1);
+    if (i < this->net_->layers().size() - 1) {
+      this->net_->ForwardFrom(i + 1);
+    }
+    EXPECT_EQ(loss, *loss_ptr);
+  }
+
+  // Check that combining partial Backwards gives the same data diff.
+  for (int i = 1; i < this->net_->layers().size(); ++i) {
+    this->net_->BackwardTo(i);
+    this->net_->BackwardFrom(i - 1);
+    for (int j = 0; j < data.count(); ++j) {
+      EXPECT_EQ(data.cpu_diff()[j],
+          this->net_->blob_by_name("data")->cpu_diff()[j]);
+    }
+  }
+}
+
+class FilterNetTest : public ::testing::Test {
+ protected:
+  void RunFilterNetTest(
+      const string& input_param_string, const string& filtered_param_string) {
+    NetParameter input_param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(
+        input_param_string, &input_param));
+    NetParameter expected_filtered_param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(
+        filtered_param_string, &expected_filtered_param));
+    NetParameter actual_filtered_param;
+    Net<float>::FilterNet(input_param, &actual_filtered_param);
+    EXPECT_EQ(expected_filtered_param.DebugString(),
+        actual_filtered_param.DebugString());
+    // Also test idempotence.
+    NetParameter double_filtered_param;
+    Net<float>::FilterNet(actual_filtered_param, &double_filtered_param);
+    EXPECT_EQ(actual_filtered_param.DebugString(),
+       double_filtered_param.DebugString());
+  }
+};
+
+TEST_F(FilterNetTest, TestNoFilter) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunFilterNetTest(input_proto, input_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
+  const string& input_proto =
+      "name: 'LeNet' "
+      "layer { "
+      "  name: 'mnist' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "  data_param { "
+      "    source: 'mnist-train-leveldb' "
+      "    batch_size: 64 "
+      "  } "
+      "  transform_param { "
+      "    scale: 0.00390625 "
+      "  } "
+      "  include: { phase: TRAIN } "
+      "} "
+      "layer { "
+      "  name: 'mnist' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "  data_param { "
+      "    source: 'mnist-test-leveldb' "
+      "    batch_size: 100 "
+      "  } "
+      "  transform_param { "
+      "    scale: 0.00390625 "
+      "  } "
+      "  include: { phase: TEST } "
+      "} "
+      "layer { "
+      "  name: 'conv1' "
+      "  type: 'Convolution' "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "  param { "
+      "    lr_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "  } "
+      "  convolution_param { "
+      "    num_output: 20 "
+      "    kernel_size: 5 "
+      "    stride: 1 "
+      "    weight_filler { "
+      "      type: 'xavier' "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "    } "
+      "  } "
+      "} "
+      "layer { "
+      "  name: 'ip1' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'conv1' "
+      "  top: 'ip1' "
+      "  param { "
+      "    lr_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "  } "
+      "  inner_product_param { "
+      "    num_output: 10 "
+      "    weight_filler { "
+      "      type: 'xavier' "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "    } "
+      "  } "
+      "} "
+      "layer { "
+      "  name: 'accuracy' "
+      "  type: 'Accuracy' "
+      "  bottom: 'ip1' "
+      "  bottom: 'label' "
+      "  top: 'accuracy' "
+      "  include: { phase: TEST } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'ip2' "
+      "  bottom: 'label' "
+      "  top: 'loss' "
+      "} ";
+  const string input_proto_train = "state: { phase: TRAIN } " + input_proto;
+  const string input_proto_test = "state: { phase: TEST } " + input_proto;
+  const string output_proto_train =
+      "name: 'LeNet' "
+      "layer { "
+      "  name: 'mnist' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "  data_param { "
+      "    source: 'mnist-train-leveldb' "
+      "    batch_size: 64 "
+      "  } "
+      "  transform_param { "
+      "    scale: 0.00390625 "
+      "  } "
+      "  include: { phase: TRAIN } "
+      "} "
+      "layer { "
+      "  name: 'conv1' "
+      "  type: 'Convolution' "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "  param { "
+      "    lr_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "  } "
+      "  convolution_param { "
+      "    num_output: 20 "
+      "    kernel_size: 5 "
+      "    stride: 1 "
+      "    weight_filler { "
+      "      type: 'xavier' "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "    } "
+      "  } "
+      "} "
+      "layer { "
+      "  name: 'ip1' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'conv1' "
+      "  top: 'ip1' "
+      "  param { "
+      "    lr_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "  } "
+      "  inner_product_param { "
+      "    num_output: 10 "
+      "    weight_filler { "
+      "      type: 'xavier' "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "    } "
+      "  } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'ip2' "
+      "  bottom: 'label' "
+      "  top: 'loss' "
+      "} ";
+  const string& output_proto_test =
+      "name: 'LeNet' "
+      "layer { "
+      "  name: 'mnist' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "  data_param { "
+      "    source: 'mnist-test-leveldb' "
+      "    batch_size: 100 "
+      "  } "
+      "  transform_param { "
+      "    scale: 0.00390625 "
+      "  } "
+      "  include: { phase: TEST } "
+      "} "
+      "layer { "
+      "  name: 'conv1' "
+      "  type: 'Convolution' "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "  param { "
+      "    lr_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "  } "
+      "  convolution_param { "
+      "    num_output: 20 "
+      "    kernel_size: 5 "
+      "    stride: 1 "
+      "    weight_filler { "
+      "      type: 'xavier' "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "    } "
+      "  } "
+      "} "
+      "layer { "
+      "  name: 'ip1' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'conv1' "
+      "  top: 'ip1' "
+      "  param { "
+      "    lr_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "  } "
+      "  inner_product_param { "
+      "    num_output: 10 "
+      "    weight_filler { "
+      "      type: 'xavier' "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "    } "
+      "  } "
+      "} "
+      "layer { "
+      "  name: 'accuracy' "
+      "  type: 'Accuracy' "
+      "  bottom: 'ip1' "
+      "  bottom: 'label' "
+      "  top: 'accuracy' "
+      "  include: { phase: TEST } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'ip2' "
+      "  bottom: 'label' "
+      "  top: 'loss' "
+      "} ";
+  const string output_proto_train_explicit =
+      output_proto_train + " state: { phase: TRAIN } ";
+  const string output_proto_test_explicit =
+      output_proto_test + " state: { phase: TEST } ";
+  this->RunFilterNetTest(input_proto_train, output_proto_train_explicit);
+  this->RunFilterNetTest(input_proto_test, output_proto_test_explicit);
+}
+
+TEST_F(FilterNetTest, TestFilterOutByStage) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "  include: { stage: 'mystage' } "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  const string& output_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunFilterNetTest(input_proto, output_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterOutByStage2) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { stage: 'mystage' } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  const string& output_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunFilterNetTest(input_proto, output_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterInByStage) {
+  const string& input_proto =
+      "state: { stage: 'mystage' } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { stage: 'mystage' } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunFilterNetTest(input_proto, input_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterInByStage2) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  exclude: { stage: 'mystage' } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunFilterNetTest(input_proto, input_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterOutByMultipleStage) {
+  const string& input_proto =
+      "state: { stage: 'mystage' } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { stage: 'mystage' stage: 'myotherstage' } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "  include: { stage: 'mystage' } "
+      "} ";
+  const string& output_proto =
+      "state: { stage: 'mystage' } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "  include: { stage: 'mystage' } "
+      "} ";
+  this->RunFilterNetTest(input_proto, output_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterInByMultipleStage) {
+  const string& input_proto =
+      "state: { stage: 'mystage' } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { stage: 'myotherstage' } "
+      "  include: { stage: 'mystage' } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "  include: { stage: 'mystage' } "
+      "} ";
+  this->RunFilterNetTest(input_proto, input_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterInByMultipleStage2) {
+  const string& input_proto =
+      "state: { stage: 'mystage' stage: 'myotherstage' } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { stage: 'mystage' stage: 'myotherstage' } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "  include: { stage: 'mystage' } "
+      "} ";
+  this->RunFilterNetTest(input_proto, input_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterInByNotStage) {
+  const string& input_proto =
+      "state: { stage: 'mystage' } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { not_stage: 'myotherstage' } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "  include: { not_stage: 'myotherstage' } "
+      "} ";
+  this->RunFilterNetTest(input_proto, input_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterOutByNotStage) {
+  const string& input_proto =
+      "state: { stage: 'mystage' } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { not_stage: 'mystage' } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "  include: { not_stage: 'mystage' } "
+      "} ";
+  const string& output_proto =
+      "state: { stage: 'mystage' } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} ";
+  this->RunFilterNetTest(input_proto, output_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterOutByMinLevel) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { min_level: 3 } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  const string& output_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunFilterNetTest(input_proto, output_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterOutByMaxLevel) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { max_level: -3 } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  const string& output_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunFilterNetTest(input_proto, output_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterInByMinLevel) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { min_level: 0 } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunFilterNetTest(input_proto, input_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterInByMinLevel2) {
+  const string& input_proto =
+      "state: { level: 7 } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { min_level: 3 } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunFilterNetTest(input_proto, input_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterInByMaxLevel) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { max_level: 0 } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunFilterNetTest(input_proto, input_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterInByMaxLevel2) {
+  const string& input_proto =
+      "state: { level: -7 } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { max_level: -3 } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunFilterNetTest(input_proto, input_proto);
+}
+
+TEST_F(FilterNetTest, TestFilterInOutByIncludeMultiRule) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { min_level: 2  phase: TRAIN } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "  include: { min_level: 2  phase: TEST } "
+      "} ";
+  const string& input_proto_train =
+      "state: { level: 4  phase: TRAIN } " + input_proto;
+  const string& input_proto_test =
+      "state: { level: 4  phase: TEST } " + input_proto;
+  const string& output_proto_train =
+      "state: { level: 4  phase: TRAIN } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { min_level: 2  phase: TRAIN } "
+      "} ";
+  const string& output_proto_test =
+      "state: { level: 4  phase: TEST } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "  include: { min_level: 2  phase: TEST } "
+      "} ";
+  this->RunFilterNetTest(input_proto_train, output_proto_train);
+  this->RunFilterNetTest(input_proto_test, output_proto_test);
+}
+
+TEST_F(FilterNetTest, TestFilterInByIncludeMultiRule) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  include: { min_level: 2  phase: TRAIN } "
+      "  include: { phase: TEST } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "  include: { min_level: 2  phase: TEST } "
+      "  include: { phase: TRAIN } "
+      "} ";
+  const string& input_proto_train =
+      "state: { level: 2  phase: TRAIN } " + input_proto;
+  const string& input_proto_test =
+      "state: { level: 2  phase: TEST } " + input_proto;
+  this->RunFilterNetTest(input_proto_train, input_proto_train);
+  this->RunFilterNetTest(input_proto_test, input_proto_test);
+}
+
+TEST_F(FilterNetTest, TestFilterInOutByExcludeMultiRule) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  exclude: { min_level: 2  phase: TRAIN } "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "  exclude: { min_level: 2  phase: TEST } "
+      "} ";
+  const string& input_proto_train =
+      "state: { level: 4  phase: TRAIN } " + input_proto;
+  const string& input_proto_test =
+      "state: { level: 4  phase: TEST } " + input_proto;
+  const string& output_proto_train =
+      "state: { level: 4  phase: TRAIN } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "  exclude: { min_level: 2  phase: TEST } "
+      "} ";
+  const string& output_proto_test =
+      "state: { level: 4  phase: TEST } "
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "  exclude: { min_level: 2  phase: TRAIN } "
+      "} ";
+  this->RunFilterNetTest(input_proto_train, output_proto_train);
+  this->RunFilterNetTest(input_proto_test, output_proto_test);
+}
+
+TYPED_TEST(NetTest, TestReshape) {
+  typedef typename TypeParam::Dtype Dtype;
+  // We set up bottom blobs of two different sizes, switch between
+  // them, and check that forward and backward both run and the results
+  // are the same.
+  Caffe::set_random_seed(this->seed_);
+  Caffe::set_mode(Caffe::CPU);
+  FillerParameter filler_param;
+  filler_param.set_std(1);
+  GaussianFiller<Dtype> filler(filler_param);
+  Blob<Dtype> blob1(4, 3, 9, 11);
+  Blob<Dtype> blob2(2, 3, 12, 10);
+  filler.Fill(&blob1);
+  filler.Fill(&blob2);
+
+  this->InitReshapableNet();
+  Blob<Dtype>* input_blob = this->net_->input_blobs()[0];
+  Blob<Dtype>* output_blob = this->net_->output_blobs()[0];
+  input_blob->Reshape(blob1.num(), blob1.channels(), blob1.height(),
+      blob1.width());
+  caffe_copy(blob1.count(), blob1.cpu_data(), input_blob->mutable_cpu_data());
+  this->net_->ForwardPrefilled();
+  // call backward just to make sure it runs
+  this->net_->Backward();
+  Blob<Dtype> output1(output_blob->num(), output_blob->channels(),
+      output_blob->height(), output_blob->width());
+  caffe_copy(output1.count(), output_blob->cpu_data(),
+      output1.mutable_cpu_data());
+
+  input_blob->Reshape(blob2.num(), blob2.channels(), blob2.height(),
+      blob2.width());
+  caffe_copy(blob2.count(), blob2.cpu_data(), input_blob->mutable_cpu_data());
+  this->net_->ForwardPrefilled();
+  this->net_->Backward();
+  Blob<Dtype> output2(output_blob->num(), output_blob->channels(),
+      output_blob->height(), output_blob->width());
+  caffe_copy(output2.count(), output_blob->cpu_data(),
+      output2.mutable_cpu_data());
+
+  input_blob->Reshape(blob1.num(), blob1.channels(), blob1.height(),
+      blob1.width());
+  caffe_copy(blob1.count(), blob1.cpu_data(), input_blob->mutable_cpu_data());
+  this->net_->ForwardPrefilled();
+  this->net_->Backward();
+  for (int i = 0; i < output1.count(); ++i) {
+    CHECK_EQ(*(output1.cpu_data() + i), *(output_blob->cpu_data() + i));
+  }
+
+  input_blob->Reshape(blob2.num(), blob2.channels(), blob2.height(),
+      blob2.width());
+  caffe_copy(blob2.count(), blob2.cpu_data(), input_blob->mutable_cpu_data());
+  this->net_->ForwardPrefilled();
+  this->net_->Backward();
+  for (int i = 0; i < output2.count(); ++i) {
+    CHECK_EQ(*(output2.cpu_data() + i), *(output_blob->cpu_data() + i));
+  }
+}
+
+TYPED_TEST(NetTest, TestSkipPropagateDown) {
+  // check bottom_need_backward if propagate_down is true
+  this->InitSkipPropNet(false);
+  vector<bool> vec_layer_need_backward = this->net_->layer_need_backward();
+  for (int layer_id = 0; layer_id < this->net_->layers().size(); ++layer_id) {
+    string layer_name = this->net_->layer_names()[layer_id];
+    if (layer_name == "loss") {
+      // access to bottom_need_backward coresponding to label's blob
+      bool need_back = this->net_->bottom_need_backward()[layer_id][1];
+      // if propagate_down is true, the loss layer will try to
+      // backpropagate on labels
+      EXPECT_TRUE(need_back) << "bottom_need_backward should be True";
+    }
+    // layer_need_backward should be True except for data and silence layers
+    if (layer_name.find("data") != std::string::npos ||
+          layer_name == "silence") {
+      EXPECT_FALSE(vec_layer_need_backward[layer_id])
+          << "layer_need_backward for " << layer_name << " should be False";
+    } else {
+      EXPECT_TRUE(vec_layer_need_backward[layer_id])
+          << "layer_need_backward for " << layer_name << " should be True";
+    }
+  }
+  // check bottom_need_backward if propagat_down is false
+  this->InitSkipPropNet(true);
+  vec_layer_need_backward.clear();
+  vec_layer_need_backward = this->net_->layer_need_backward();
+  for (int layer_id = 0; layer_id < this->net_->layers().size(); ++layer_id) {
+    string layer_name = this->net_->layer_names()[layer_id];
+    if (layer_name == "loss") {
+      // access to bottom_need_backward coresponding to label's blob
+      bool need_back = this->net_->bottom_need_backward()[layer_id][1];
+      // if propagate_down is false, the loss layer will not try to
+      // backpropagate on labels
+      EXPECT_FALSE(need_back) << "bottom_need_backward should be False";
+    }
+    // layer_need_backward should be False except for innerproduct and
+    // loss layers
+    if (layer_name == "innerproduct" || layer_name == "loss") {
+      EXPECT_TRUE(vec_layer_need_backward[layer_id])
+          << "layer_need_backward for " << layer_name << " should be True";
+    } else {
+      EXPECT_FALSE(vec_layer_need_backward[layer_id])
+          << "layer_need_backward for " << layer_name << " should be False";
+    }
+  }
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_neuron_layer.cpp b/src/caffe/test/test_neuron_layer.cpp
new file mode 100644
index 0000000..c6e4d27
--- /dev/null
+++ b/src/caffe/test/test_neuron_layer.cpp
@@ -0,0 +1,842 @@
+#include <algorithm>
+#include <cstring>
+#include <vector>
+
+#include "google/protobuf/text_format.h"
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class NeuronLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  NeuronLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 4, 5)),
+        blob_top_(new Blob<Dtype>()) {
+    Caffe::set_random_seed(1701);
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~NeuronLayerTest() { delete blob_bottom_; delete blob_top_; }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+
+  void TestDropoutForward(const float dropout_ratio) {
+    LayerParameter layer_param;
+    // Fill in the given dropout_ratio, unless it's 0.5, in which case we don't
+    // set it explicitly to test that 0.5 is the default.
+    if (dropout_ratio != 0.5) {
+      layer_param.mutable_dropout_param()->set_dropout_ratio(dropout_ratio);
+    }
+    DropoutLayer<Dtype> layer(layer_param);
+    layer_param.set_phase(TRAIN);
+    layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+    layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    // Now, check values
+    const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+    const Dtype* top_data = this->blob_top_->cpu_data();
+    float scale = 1. / (1. - layer_param.dropout_param().dropout_ratio());
+    const int count = this->blob_bottom_->count();
+    // Initialize num_kept to count the number of inputs NOT dropped out.
+    int num_kept = 0;
+    for (int i = 0; i < count; ++i) {
+      if (top_data[i] != 0) {
+        ++num_kept;
+        EXPECT_EQ(top_data[i], bottom_data[i] * scale);
+      }
+    }
+    const Dtype std_error = sqrt(dropout_ratio * (1 - dropout_ratio) / count);
+    // Fail if the number dropped was more than 1.96 * std_error away from the
+    // expected number -- requires 95% confidence that the dropout layer is not
+    // obeying the given dropout_ratio for test failure.
+    const Dtype empirical_dropout_ratio = 1 - num_kept / Dtype(count);
+    EXPECT_NEAR(empirical_dropout_ratio, dropout_ratio, 1.96 * std_error);
+  }
+
+  void TestExpForward(const float base, const float scale, const float shift) {
+    LayerParameter layer_param;
+    layer_param.mutable_exp_param()->set_base(base);
+    layer_param.mutable_exp_param()->set_scale(scale);
+    layer_param.mutable_exp_param()->set_shift(shift);
+    ExpLayer<Dtype> layer(layer_param);
+    layer.SetUp(blob_bottom_vec_, blob_top_vec_);
+    layer.Forward(blob_bottom_vec_, blob_top_vec_);
+    const Dtype kDelta = 2e-4;
+    const Dtype* bottom_data = blob_bottom_->cpu_data();
+    const Dtype* top_data = blob_top_->cpu_data();
+    for (int i = 0; i < blob_bottom_->count(); ++i) {
+      const Dtype bottom_val = bottom_data[i];
+      const Dtype top_val = top_data[i];
+      if (base == -1) {
+        EXPECT_NEAR(top_val, exp(shift + scale * bottom_val), kDelta);
+      } else {
+        EXPECT_NEAR(top_val, pow(base, shift + scale * bottom_val), kDelta);
+      }
+    }
+  }
+
+  void TestExpGradient(const float base, const float scale, const float shift) {
+    LayerParameter layer_param;
+    layer_param.mutable_exp_param()->set_base(base);
+    layer_param.mutable_exp_param()->set_scale(scale);
+    layer_param.mutable_exp_param()->set_shift(shift);
+    ExpLayer<Dtype> layer(layer_param);
+    GradientChecker<Dtype> checker(1e-2, 1e-3);
+    checker.CheckGradientEltwise(&layer, blob_bottom_vec_, blob_top_vec_);
+  }
+
+  void TestPReLU(PReLULayer<Dtype> *layer) {
+    layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+    const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+    const Dtype* top_data = this->blob_top_->cpu_data();
+    const Dtype* slope_data = layer->blobs()[0]->cpu_data();
+    int hw = this->blob_bottom_->height() * this->blob_bottom_->width();
+    int channels = this->blob_bottom_->channels();
+    bool channel_shared = layer->layer_param().prelu_param().channel_shared();
+    for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+      int c = channel_shared ? 0 : (i / hw) % channels;
+      EXPECT_EQ(top_data[i],
+          std::max(bottom_data[i], (Dtype)(0))
+          + slope_data[c] * std::min(bottom_data[i], (Dtype)(0)));
+    }
+  }
+
+  void LogBottomInit() {
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    Dtype* bottom_data = this->blob_bottom_->mutable_cpu_data();
+    caffe_exp(this->blob_bottom_->count(), bottom_data, bottom_data);
+  }
+
+  void TestLogForward(const float base, const float scale, const float shift) {
+    LogBottomInit();
+    LayerParameter layer_param;
+    layer_param.mutable_log_param()->set_base(base);
+    layer_param.mutable_log_param()->set_scale(scale);
+    layer_param.mutable_log_param()->set_shift(shift);
+    LogLayer<Dtype> layer(layer_param);
+    layer.SetUp(blob_bottom_vec_, blob_top_vec_);
+    layer.Forward(blob_bottom_vec_, blob_top_vec_);
+    const Dtype kDelta = 2e-4;
+    const Dtype* bottom_data = blob_bottom_->cpu_data();
+    const Dtype* top_data = blob_top_->cpu_data();
+    for (int i = 0; i < blob_bottom_->count(); ++i) {
+      const Dtype bottom_val = bottom_data[i];
+      const Dtype top_val = top_data[i];
+      if (base == -1) {
+        EXPECT_NEAR(top_val, log(shift + scale * bottom_val), kDelta);
+      } else {
+        EXPECT_NEAR(top_val, log(shift + scale * bottom_val) / log(base),
+                    kDelta);
+      }
+    }
+  }
+
+  void TestLogGradient(const float base, const float scale, const float shift) {
+    LogBottomInit();
+    LayerParameter layer_param;
+    layer_param.mutable_log_param()->set_base(base);
+    layer_param.mutable_log_param()->set_scale(scale);
+    layer_param.mutable_log_param()->set_shift(shift);
+    LogLayer<Dtype> layer(layer_param);
+    GradientChecker<Dtype> checker(1e-2, 1e-2);
+    checker.CheckGradientEltwise(&layer, blob_bottom_vec_, blob_top_vec_);
+  }
+};
+
+TYPED_TEST_CASE(NeuronLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(NeuronLayerTest, TestAbsVal) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  AbsValLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+  const Dtype* top_data    = this->blob_top_->cpu_data();
+  const int count = this->blob_bottom_->count();
+  for (int i = 0; i < count; ++i) {
+    EXPECT_EQ(top_data[i], fabs(bottom_data[i]));
+  }
+}
+
+TYPED_TEST(NeuronLayerTest, TestAbsGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  AbsValLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3, 1701, 0., 0.01);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(NeuronLayerTest, TestReLU) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ReLULayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+  const Dtype* top_data = this->blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    EXPECT_GE(top_data[i], 0.);
+    EXPECT_TRUE(top_data[i] == 0 || top_data[i] == bottom_data[i]);
+  }
+}
+
+TYPED_TEST(NeuronLayerTest, TestReLUGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ReLULayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3, 1701, 0., 0.01);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(NeuronLayerTest, TestReLUWithNegativeSlope) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  CHECK(google::protobuf::TextFormat::ParseFromString(
+      "relu_param { negative_slope: 0.01 }", &layer_param));
+  ReLULayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+  const Dtype* top_data = this->blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    if (top_data[i] >= 0) {
+      EXPECT_FLOAT_EQ(top_data[i], bottom_data[i]);
+    } else {
+      EXPECT_FLOAT_EQ(top_data[i], bottom_data[i] * 0.01);
+    }
+  }
+}
+
+TYPED_TEST(NeuronLayerTest, TestReLUGradientWithNegativeSlope) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  CHECK(google::protobuf::TextFormat::ParseFromString(
+      "relu_param { negative_slope: 0.01 }", &layer_param));
+  ReLULayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3, 1701, 0., 0.01);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(NeuronLayerTest, TestSigmoid) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  SigmoidLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+  const Dtype* top_data = this->blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    EXPECT_FLOAT_EQ(top_data[i], 1. / (1 + exp(-bottom_data[i])));
+    // check that we squashed the value between 0 and 1
+    EXPECT_GE(top_data[i], 0.);
+    EXPECT_LE(top_data[i], 1.);
+  }
+}
+
+TYPED_TEST(NeuronLayerTest, TestSigmoidGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  SigmoidLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3, 1701, 0., 0.01);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(NeuronLayerTest, TestTanH) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  TanHLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Test exact values
+  for (int i = 0; i < this->blob_bottom_->num(); ++i) {
+    for (int j = 0; j < this->blob_bottom_->channels(); ++j) {
+      for (int k = 0; k < this->blob_bottom_->height(); ++k) {
+        for (int l = 0; l < this->blob_bottom_->width(); ++l) {
+          EXPECT_GE(this->blob_top_->data_at(i, j, k, l) + 1e-4,
+             (exp(2*this->blob_bottom_->data_at(i, j, k, l)) - 1) /
+             (exp(2*this->blob_bottom_->data_at(i, j, k, l)) + 1));
+          EXPECT_LE(this->blob_top_->data_at(i, j, k, l) - 1e-4,
+             (exp(2*this->blob_bottom_->data_at(i, j, k, l)) - 1) /
+             (exp(2*this->blob_bottom_->data_at(i, j, k, l)) + 1));
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST(NeuronLayerTest, TestTanHGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  TanHLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(NeuronLayerTest, TestExpLayer) {
+  typedef typename TypeParam::Dtype Dtype;
+  // Test default base of "-1" -- should actually set base := e.
+  const Dtype kBase = -1;
+  const Dtype kScale = 1;
+  const Dtype kShift = 0;
+  this->TestExpForward(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestExpGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  // Test default base of "-1" -- should actually set base := e.
+  const Dtype kBase = -1;
+  const Dtype kScale = 1;
+  const Dtype kShift = 0;
+  this->TestExpGradient(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestExpLayerBase2) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 1;
+  const Dtype kShift = 0;
+  this->TestExpForward(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestExpGradientBase2) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 1;
+  const Dtype kShift = 0;
+  this->TestExpGradient(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestExpLayerBase2Shift1) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 1;
+  const Dtype kShift = 1;
+  this->TestExpForward(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestExpGradientBase2Shift1) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 1;
+  const Dtype kShift = 1;
+  this->TestExpGradient(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestExpLayerBase2Scale3) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 3;
+  const Dtype kShift = 0;
+  this->TestExpForward(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestExpGradientBase2Scale3) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 3;
+  const Dtype kShift = 0;
+  this->TestExpGradient(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestExpLayerBase2Shift1Scale3) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 3;
+  const Dtype kShift = 1;
+  this->TestExpForward(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestExpGradientBase2Shift1Scale3) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 3;
+  const Dtype kShift = 1;
+  this->TestExpGradient(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestLogLayer) {
+  typedef typename TypeParam::Dtype Dtype;
+  // Test default base of "-1" -- should actually set base := e.
+  const Dtype kBase = -1;
+  const Dtype kScale = 1;
+  const Dtype kShift = 0;
+  this->TestLogForward(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestLogGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  // Test default base of "-1" -- should actually set base := e.
+  const Dtype kBase = -1;
+  const Dtype kScale = 1;
+  const Dtype kShift = 0;
+  this->TestLogGradient(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestLogLayerBase2) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 1;
+  const Dtype kShift = 0;
+  this->TestLogForward(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestLogGradientBase2) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 1;
+  const Dtype kShift = 0;
+  this->TestLogGradient(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestLogLayerBase2Shift1) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 1;
+  const Dtype kShift = 1;
+  this->TestLogForward(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestLogGradientBase2Shift1) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 1;
+  const Dtype kShift = 1;
+  this->TestLogGradient(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestLogLayerBase2Scale3) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 3;
+  const Dtype kShift = 0;
+  this->TestLogForward(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestLogGradientBase2Scale3) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 3;
+  const Dtype kShift = 0;
+  this->TestLogGradient(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestLogLayerBase2Shift1Scale3) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 3;
+  const Dtype kShift = 1;
+  this->TestLogForward(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestLogGradientBase2Shift1Scale3) {
+  typedef typename TypeParam::Dtype Dtype;
+  const Dtype kBase = 2;
+  const Dtype kScale = 3;
+  const Dtype kShift = 1;
+  this->TestLogGradient(kBase, kScale, kShift);
+}
+
+TYPED_TEST(NeuronLayerTest, TestDropoutHalf) {
+  const float kDropoutRatio = 0.5;
+  this->TestDropoutForward(kDropoutRatio);
+}
+
+TYPED_TEST(NeuronLayerTest, TestDropoutThreeQuarters) {
+  const float kDropoutRatio = 0.75;
+  this->TestDropoutForward(kDropoutRatio);
+}
+
+TYPED_TEST(NeuronLayerTest, TestDropoutTestPhase) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.set_phase(TEST);
+  DropoutLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+  const Dtype* top_data = this->blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    if (top_data[i] != 0) {
+      EXPECT_EQ(top_data[i], bottom_data[i]);
+    }
+  }
+}
+
+TYPED_TEST(NeuronLayerTest, TestDropoutGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.set_phase(TRAIN);
+  DropoutLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(NeuronLayerTest, TestDropoutGradientTest) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.set_phase(TEST);
+  DropoutLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(NeuronLayerTest, TestBNLL) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  BNLLLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+  const Dtype* top_data = this->blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    EXPECT_GE(top_data[i], 0.);
+    EXPECT_GE(top_data[i], bottom_data[i]);
+  }
+}
+
+TYPED_TEST(NeuronLayerTest, TestBNLLGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  BNLLLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(NeuronLayerTest, TestPReLUParam) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  PReLULayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  const Dtype* slopes = layer.blobs()[0]->cpu_data();
+  int count = layer.blobs()[0]->count();
+  for (int i = 0; i < count; ++i, ++slopes) {
+    EXPECT_EQ(*slopes, 0.25);
+  }
+}
+
+TYPED_TEST(NeuronLayerTest, TestPReLUForward) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  PReLULayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  FillerParameter filler_param;
+  GaussianFiller<Dtype> filler(filler_param);
+  filler.Fill(layer.blobs()[0].get());
+  this->TestPReLU(&layer);
+}
+
+TYPED_TEST(NeuronLayerTest, TestPReLUForwardChannelShared) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_prelu_param()->set_channel_shared(true);
+  PReLULayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  this->TestPReLU(&layer);
+}
+
+TYPED_TEST(NeuronLayerTest, TestPReLUGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  PReLULayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  FillerParameter filler_param;
+  GaussianFiller<Dtype> filler(filler_param);
+  filler.Fill(layer.blobs()[0].get());
+  GradientChecker<Dtype> checker(1e-2, 1e-3, 1701, 0., 0.01);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(NeuronLayerTest, TestPReLUGradientChannelShared) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_prelu_param()->set_channel_shared(true);
+  PReLULayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  GradientChecker<Dtype> checker(1e-2, 1e-3, 1701, 0., 0.01);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(NeuronLayerTest, TestPReLUConsistencyReLU) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter prelu_layer_param;
+  LayerParameter relu_layer_param;
+  relu_layer_param.mutable_relu_param()->set_negative_slope(0.25);
+  PReLULayer<Dtype> prelu(prelu_layer_param);
+  ReLULayer<Dtype> relu(relu_layer_param);
+  // Set up blobs
+  vector<Blob<Dtype>*> blob_bottom_vec_2;
+  vector<Blob<Dtype>*> blob_top_vec_2;
+  shared_ptr<Blob<Dtype> > blob_bottom_2(new Blob<Dtype>());
+  shared_ptr<Blob<Dtype> > blob_top_2(new Blob<Dtype>());
+  blob_bottom_vec_2.push_back(blob_bottom_2.get());
+  blob_top_vec_2.push_back(blob_top_2.get());
+  blob_bottom_2->CopyFrom(*this->blob_bottom_, false, true);
+  // SetUp layers
+  prelu.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  relu.SetUp(blob_bottom_vec_2, blob_top_vec_2);
+  // Check forward
+  prelu.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  relu.Forward(this->blob_bottom_vec_, blob_top_vec_2);
+  for (int s = 0; s < blob_top_2->count(); ++s) {
+    EXPECT_EQ(this->blob_top_->cpu_data()[s], blob_top_2->cpu_data()[s]);
+  }
+  // Check backward
+  shared_ptr<Blob<Dtype> > tmp_blob(new Blob<Dtype>());
+  tmp_blob->ReshapeLike(*blob_top_2.get());
+  FillerParameter filler_param;
+  GaussianFiller<Dtype> filler(filler_param);
+  filler.Fill(tmp_blob.get());
+  caffe_copy(blob_top_2->count(), tmp_blob->cpu_data(),
+      this->blob_top_->mutable_cpu_diff());
+  caffe_copy(blob_top_2->count(), tmp_blob->cpu_data(),
+      blob_top_2->mutable_cpu_diff());
+  vector<bool> propagate_down;
+  propagate_down.push_back(true);
+  prelu.Backward(this->blob_top_vec_, propagate_down, this->blob_bottom_vec_);
+  relu.Backward(blob_top_vec_2, propagate_down, blob_bottom_vec_2);
+  for (int s = 0; s < blob_bottom_2->count(); ++s) {
+    EXPECT_EQ(this->blob_bottom_->cpu_diff()[s], blob_bottom_2->cpu_diff()[s]);
+  }
+}
+
+TYPED_TEST(NeuronLayerTest, TestPReLUInPlace) {
+  typedef typename TypeParam::Dtype Dtype;
+  // Set layer parameters
+  LayerParameter ip_layer_param;
+  LayerParameter prelu_layer_param;
+  InnerProductParameter *ip_param =
+      ip_layer_param.mutable_inner_product_param();
+  ip_param->mutable_weight_filler()->set_type("gaussian");
+  ip_param->set_num_output(3);
+  InnerProductLayer<Dtype> ip(ip_layer_param);
+  PReLULayer<Dtype> prelu(prelu_layer_param);
+  InnerProductLayer<Dtype> ip2(ip_layer_param);
+  PReLULayer<Dtype> prelu2(prelu_layer_param);
+  // Set up blobs
+  vector<Blob<Dtype>*> blob_bottom_vec_2;
+  vector<Blob<Dtype>*> blob_middle_vec_2;
+  vector<Blob<Dtype>*> blob_top_vec_2;
+  shared_ptr<Blob<Dtype> > blob_bottom_2(new Blob<Dtype>());
+  shared_ptr<Blob<Dtype> > blob_middle_2(new Blob<Dtype>());
+  shared_ptr<Blob<Dtype> > blob_top_2(new Blob<Dtype>());
+  blob_bottom_vec_2.push_back(blob_bottom_2.get());
+  blob_middle_vec_2.push_back(blob_middle_2.get());
+  blob_top_vec_2.push_back(blob_top_2.get());
+  blob_bottom_2->CopyFrom(*this->blob_bottom_, false, true);
+  // SetUp layers
+  ip.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  prelu.SetUp(this->blob_top_vec_, this->blob_top_vec_);
+  ip2.SetUp(blob_bottom_vec_2, blob_middle_vec_2);
+  prelu2.SetUp(blob_middle_vec_2, blob_top_vec_2);
+  caffe_copy(ip2.blobs()[0]->count(), ip.blobs()[0]->cpu_data(),
+      ip2.blobs()[0]->mutable_cpu_data());
+  // Forward in-place
+  ip.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  prelu.Forward(this->blob_top_vec_, this->blob_top_vec_);
+  // Forward non-in-place
+  ip2.Forward(blob_bottom_vec_2, blob_middle_vec_2);
+  prelu2.Forward(blob_middle_vec_2, blob_top_vec_2);
+  // Check numbers
+  for (int s = 0; s < blob_top_2->count(); ++s) {
+    EXPECT_EQ(this->blob_top_->cpu_data()[s], blob_top_2->cpu_data()[s]);
+  }
+  // Fill top diff with random numbers
+  shared_ptr<Blob<Dtype> > tmp_blob(new Blob<Dtype>());
+  tmp_blob->ReshapeLike(*blob_top_2.get());
+  FillerParameter filler_param;
+  GaussianFiller<Dtype> filler(filler_param);
+  filler.Fill(tmp_blob.get());
+  caffe_copy(blob_top_2->count(), tmp_blob->cpu_data(),
+      this->blob_top_->mutable_cpu_diff());
+  caffe_copy(blob_top_2->count(), tmp_blob->cpu_data(),
+      blob_top_2->mutable_cpu_diff());
+  // Backward in-place
+  vector<bool> propagate_down;
+  propagate_down.push_back(true);
+  prelu.Backward(this->blob_top_vec_, propagate_down, this->blob_top_vec_);
+  ip.Backward(this->blob_top_vec_, propagate_down, this->blob_bottom_vec_);
+  // Backward non-in-place
+  prelu2.Backward(blob_top_vec_2, propagate_down, blob_middle_vec_2);
+  ip2.Backward(blob_middle_vec_2, propagate_down, blob_bottom_vec_2);
+  // Check numbers
+  for (int s = 0; s < blob_bottom_2->count(); ++s) {
+    EXPECT_EQ(this->blob_bottom_->cpu_diff()[s], blob_bottom_2->cpu_diff()[s]);
+  }
+  for (int s = 0; s < ip.blobs()[0]->count(); ++s) {
+    EXPECT_EQ(ip.blobs()[0]->cpu_diff()[s], ip2.blobs()[0]->cpu_diff()[s]);
+  }
+  for (int s = 0; s < ip.blobs()[1]->count(); ++s) {
+    EXPECT_EQ(ip.blobs()[1]->cpu_diff()[s], ip2.blobs()[1]->cpu_diff()[s]);
+  }
+  for (int s = 0; s < prelu.blobs()[0]->count(); ++s) {
+    EXPECT_EQ(prelu.blobs()[0]->cpu_diff()[s],
+        prelu2.blobs()[0]->cpu_diff()[s]);
+  }
+}
+
+#ifdef USE_CUDNN
+template <typename Dtype>
+class CuDNNNeuronLayerTest : public GPUDeviceTest<Dtype> {
+ protected:
+  CuDNNNeuronLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 4, 5)),
+        blob_top_(new Blob<Dtype>()) {
+    Caffe::set_random_seed(1701);
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~CuDNNNeuronLayerTest() { delete blob_bottom_; delete blob_top_; }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(CuDNNNeuronLayerTest, TestDtypes);
+
+TYPED_TEST(CuDNNNeuronLayerTest, TestReLUCuDNN) {
+  LayerParameter layer_param;
+  CuDNNReLULayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
+  const TypeParam* top_data = this->blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    EXPECT_GE(top_data[i], 0.);
+    EXPECT_TRUE(top_data[i] == 0 || top_data[i] == bottom_data[i]);
+  }
+}
+
+TYPED_TEST(CuDNNNeuronLayerTest, TestReLUGradientCuDNN) {
+  LayerParameter layer_param;
+  CuDNNReLULayer<TypeParam> layer(layer_param);
+  GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(CuDNNNeuronLayerTest, TestReLUWithNegativeSlopeCuDNN) {
+  LayerParameter layer_param;
+  CHECK(google::protobuf::TextFormat::ParseFromString(
+      "relu_param { negative_slope: 0.01 }", &layer_param));
+  CuDNNReLULayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
+  const TypeParam* top_data = this->blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    if (top_data[i] >= 0) {
+      EXPECT_FLOAT_EQ(top_data[i], bottom_data[i]);
+    } else {
+      EXPECT_FLOAT_EQ(top_data[i], bottom_data[i] * 0.01);
+    }
+  }
+}
+
+TYPED_TEST(CuDNNNeuronLayerTest, TestReLUGradientWithNegativeSlopeCuDNN) {
+  LayerParameter layer_param;
+  CHECK(google::protobuf::TextFormat::ParseFromString(
+      "relu_param { negative_slope: 0.01 }", &layer_param));
+  CuDNNReLULayer<TypeParam> layer(layer_param);
+  GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(CuDNNNeuronLayerTest, TestSigmoidCuDNN) {
+  LayerParameter layer_param;
+  CuDNNSigmoidLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
+  const TypeParam* top_data = this->blob_top_->cpu_data();
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    EXPECT_FLOAT_EQ(top_data[i], 1. / (1 + exp(-bottom_data[i])));
+    // check that we squashed the value between 0 and 1
+    EXPECT_GE(top_data[i], 0.);
+    EXPECT_LE(top_data[i], 1.);
+  }
+}
+
+TYPED_TEST(CuDNNNeuronLayerTest, TestSigmoidGradientCuDNN) {
+  LayerParameter layer_param;
+  CuDNNSigmoidLayer<TypeParam> layer(layer_param);
+  GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+TYPED_TEST(CuDNNNeuronLayerTest, TestTanHCuDNN) {
+  LayerParameter layer_param;
+  CuDNNTanHLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Test exact values
+  for (int i = 0; i < this->blob_bottom_->num(); ++i) {
+    for (int j = 0; j < this->blob_bottom_->channels(); ++j) {
+      for (int k = 0; k < this->blob_bottom_->height(); ++k) {
+        for (int l = 0; l < this->blob_bottom_->width(); ++l) {
+          EXPECT_GE(this->blob_top_->data_at(i, j, k, l) + 1e-4,
+             (exp(2*this->blob_bottom_->data_at(i, j, k, l)) - 1) /
+             (exp(2*this->blob_bottom_->data_at(i, j, k, l)) + 1));
+          EXPECT_LE(this->blob_top_->data_at(i, j, k, l) - 1e-4,
+             (exp(2*this->blob_bottom_->data_at(i, j, k, l)) - 1) /
+             (exp(2*this->blob_bottom_->data_at(i, j, k, l)) + 1));
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST(CuDNNNeuronLayerTest, TestTanHGradientCuDNN) {
+  LayerParameter layer_param;
+  CuDNNTanHLayer<TypeParam> layer(layer_param);
+  GradientChecker<TypeParam> checker(1e-2, 1e-3);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+#endif
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_platform.cpp b/src/caffe/test/test_platform.cpp
new file mode 100644
index 0000000..f3513e0
--- /dev/null
+++ b/src/caffe/test/test_platform.cpp
@@ -0,0 +1,57 @@
+#ifndef CPU_ONLY
+
+#include <cstdio>
+#include <cstdlib>
+
+#include "glog/logging.h"
+#include "gtest/gtest.h"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+extern cudaDeviceProp CAFFE_TEST_CUDA_PROP;
+
+class PlatformTest : public ::testing::Test {};
+
+TEST_F(PlatformTest, TestInitialization) {
+  printf("Major revision number:         %d\n",  CAFFE_TEST_CUDA_PROP.major);
+  printf("Minor revision number:         %d\n",  CAFFE_TEST_CUDA_PROP.minor);
+  printf("Name:                          %s\n",  CAFFE_TEST_CUDA_PROP.name);
+  printf("Total global memory:           %lu\n",
+         CAFFE_TEST_CUDA_PROP.totalGlobalMem);
+  printf("Total shared memory per block: %lu\n",
+         CAFFE_TEST_CUDA_PROP.sharedMemPerBlock);
+  printf("Total registers per block:     %d\n",
+         CAFFE_TEST_CUDA_PROP.regsPerBlock);
+  printf("Warp size:                     %d\n",
+         CAFFE_TEST_CUDA_PROP.warpSize);
+  printf("Maximum memory pitch:          %lu\n",
+         CAFFE_TEST_CUDA_PROP.memPitch);
+  printf("Maximum threads per block:     %d\n",
+         CAFFE_TEST_CUDA_PROP.maxThreadsPerBlock);
+  for (int i = 0; i < 3; ++i)
+    printf("Maximum dimension %d of block:  %d\n", i,
+           CAFFE_TEST_CUDA_PROP.maxThreadsDim[i]);
+  for (int i = 0; i < 3; ++i)
+    printf("Maximum dimension %d of grid:   %d\n", i,
+           CAFFE_TEST_CUDA_PROP.maxGridSize[i]);
+  printf("Clock rate:                    %d\n", CAFFE_TEST_CUDA_PROP.clockRate);
+  printf("Total constant memory:         %lu\n",
+         CAFFE_TEST_CUDA_PROP.totalConstMem);
+  printf("Texture alignment:             %lu\n",
+         CAFFE_TEST_CUDA_PROP.textureAlignment);
+  printf("Concurrent copy and execution: %s\n",
+         (CAFFE_TEST_CUDA_PROP.deviceOverlap ? "Yes" : "No"));
+  printf("Number of multiprocessors:     %d\n",
+         CAFFE_TEST_CUDA_PROP.multiProcessorCount);
+  printf("Kernel execution timeout:      %s\n",
+         (CAFFE_TEST_CUDA_PROP.kernelExecTimeoutEnabled ? "Yes" : "No"));
+  printf("Unified virtual addressing:    %s\n",
+         (CAFFE_TEST_CUDA_PROP.unifiedAddressing ? "Yes" : "No"));
+  EXPECT_TRUE(true);
+}
+
+}  // namespace caffe
+
+#endif  // CPU_ONLY
diff --git a/src/caffe/test/test_pooling_layer.cpp b/src/caffe/test/test_pooling_layer.cpp
new file mode 100644
index 0000000..69f2d5c
--- /dev/null
+++ b/src/caffe/test/test_pooling_layer.cpp
@@ -0,0 +1,1182 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class PoolingLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  PoolingLayerTest()
+      : blob_bottom_(new Blob<Dtype>()),
+        blob_top_(new Blob<Dtype>()),
+        blob_top_mask_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    Caffe::set_random_seed(1701);
+    blob_bottom_->Reshape(2, 3, 6, 5);
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~PoolingLayerTest() {
+    delete blob_bottom_;
+    delete blob_top_;
+    delete blob_top_mask_;
+  }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  Blob<Dtype>* const blob_top_mask_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+  // Test for 2x 2 square pooling layer
+  void TestForwardSquare() {
+    LayerParameter layer_param;
+    PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+    pooling_param->set_kernel_size(2);
+    pooling_param->set_pool(PoolingParameter_PoolMethod_MAX);
+    const int num = 2;
+    const int channels = 2;
+    blob_bottom_->Reshape(num, channels, 3, 5);
+    // Input: 2x 2 channels of:
+    //     [1 2 5 2 3]
+    //     [9 4 1 4 8]
+    //     [1 2 5 2 3]
+    for (int i = 0; i < 15 * num * channels; i += 15) {
+      blob_bottom_->mutable_cpu_data()[i +  0] = 1;
+      blob_bottom_->mutable_cpu_data()[i +  1] = 2;
+      blob_bottom_->mutable_cpu_data()[i +  2] = 5;
+      blob_bottom_->mutable_cpu_data()[i +  3] = 2;
+      blob_bottom_->mutable_cpu_data()[i +  4] = 3;
+      blob_bottom_->mutable_cpu_data()[i +  5] = 9;
+      blob_bottom_->mutable_cpu_data()[i +  6] = 4;
+      blob_bottom_->mutable_cpu_data()[i +  7] = 1;
+      blob_bottom_->mutable_cpu_data()[i +  8] = 4;
+      blob_bottom_->mutable_cpu_data()[i +  9] = 8;
+      blob_bottom_->mutable_cpu_data()[i + 10] = 1;
+      blob_bottom_->mutable_cpu_data()[i + 11] = 2;
+      blob_bottom_->mutable_cpu_data()[i + 12] = 5;
+      blob_bottom_->mutable_cpu_data()[i + 13] = 2;
+      blob_bottom_->mutable_cpu_data()[i + 14] = 3;
+    }
+    PoolingLayer<Dtype> layer(layer_param);
+    layer.SetUp(blob_bottom_vec_, blob_top_vec_);
+    EXPECT_EQ(blob_top_->num(), num);
+    EXPECT_EQ(blob_top_->channels(), channels);
+    EXPECT_EQ(blob_top_->height(), 2);
+    EXPECT_EQ(blob_top_->width(), 4);
+    if (blob_top_vec_.size() > 1) {
+      EXPECT_EQ(blob_top_mask_->num(), num);
+      EXPECT_EQ(blob_top_mask_->channels(), channels);
+      EXPECT_EQ(blob_top_mask_->height(), 2);
+      EXPECT_EQ(blob_top_mask_->width(), 4);
+    }
+    layer.Forward(blob_bottom_vec_, blob_top_vec_);
+    // Expected output: 2x 2 channels of:
+    //     [9 5 5 8]
+    //     [9 5 5 8]
+    for (int i = 0; i < 8 * num * channels; i += 8) {
+      EXPECT_EQ(blob_top_->cpu_data()[i + 0], 9);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 1], 5);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 2], 5);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 3], 8);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 4], 9);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 5], 5);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 6], 5);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 7], 8);
+    }
+    if (blob_top_vec_.size() > 1) {
+      // Expected mask output: 2x 2 channels of:
+      //     [5  2  2 9]
+      //     [5 12 12 9]
+      for (int i = 0; i < 8 * num * channels; i += 8) {
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 0],  5);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 1],  2);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 2],  2);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 3],  9);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 4],  5);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 5], 12);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 6], 12);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 7],  9);
+      }
+    }
+  }
+  // Test for 3x 2 rectangular pooling layer with kernel_h > kernel_w
+  void TestForwardRectHigh() {
+    LayerParameter layer_param;
+    PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+    pooling_param->set_kernel_h(3);
+    pooling_param->set_kernel_w(2);
+    pooling_param->set_pool(PoolingParameter_PoolMethod_MAX);
+    const int num = 2;
+    const int channels = 2;
+    blob_bottom_->Reshape(num, channels, 6, 6);
+    // Input: 2x 2 channels of:
+    // [35     1     6    26    19    24]
+    // [ 3    32     7    21    23    25]
+    // [31     9     2    22    27    20]
+    // [ 8    28    33    17    10    15]
+    // [30     5    34    12    14    16]
+    // [ 4    36    29    13    18    11]
+    // (this is generated by magic(6) in MATLAB)
+    for (int i = 0; i < 36 * num * channels; i += 36) {
+      blob_bottom_->mutable_cpu_data()[i +  0] = 35;
+      blob_bottom_->mutable_cpu_data()[i +  1] = 1;
+      blob_bottom_->mutable_cpu_data()[i +  2] = 6;
+      blob_bottom_->mutable_cpu_data()[i +  3] = 26;
+      blob_bottom_->mutable_cpu_data()[i +  4] = 19;
+      blob_bottom_->mutable_cpu_data()[i +  5] = 24;
+      blob_bottom_->mutable_cpu_data()[i +  6] = 3;
+      blob_bottom_->mutable_cpu_data()[i +  7] = 32;
+      blob_bottom_->mutable_cpu_data()[i +  8] = 7;
+      blob_bottom_->mutable_cpu_data()[i +  9] = 21;
+      blob_bottom_->mutable_cpu_data()[i + 10] = 23;
+      blob_bottom_->mutable_cpu_data()[i + 11] = 25;
+      blob_bottom_->mutable_cpu_data()[i + 12] = 31;
+      blob_bottom_->mutable_cpu_data()[i + 13] = 9;
+      blob_bottom_->mutable_cpu_data()[i + 14] = 2;
+      blob_bottom_->mutable_cpu_data()[i + 15] = 22;
+      blob_bottom_->mutable_cpu_data()[i + 16] = 27;
+      blob_bottom_->mutable_cpu_data()[i + 17] = 20;
+      blob_bottom_->mutable_cpu_data()[i + 18] = 8;
+      blob_bottom_->mutable_cpu_data()[i + 19] = 28;
+      blob_bottom_->mutable_cpu_data()[i + 20] = 33;
+      blob_bottom_->mutable_cpu_data()[i + 21] = 17;
+      blob_bottom_->mutable_cpu_data()[i + 22] = 10;
+      blob_bottom_->mutable_cpu_data()[i + 23] = 15;
+      blob_bottom_->mutable_cpu_data()[i + 24] = 30;
+      blob_bottom_->mutable_cpu_data()[i + 25] = 5;
+      blob_bottom_->mutable_cpu_data()[i + 26] = 34;
+      blob_bottom_->mutable_cpu_data()[i + 27] = 12;
+      blob_bottom_->mutable_cpu_data()[i + 28] = 14;
+      blob_bottom_->mutable_cpu_data()[i + 29] = 16;
+      blob_bottom_->mutable_cpu_data()[i + 30] = 4;
+      blob_bottom_->mutable_cpu_data()[i + 31] = 36;
+      blob_bottom_->mutable_cpu_data()[i + 32] = 29;
+      blob_bottom_->mutable_cpu_data()[i + 33] = 13;
+      blob_bottom_->mutable_cpu_data()[i + 34] = 18;
+      blob_bottom_->mutable_cpu_data()[i + 35] = 11;
+    }
+    PoolingLayer<Dtype> layer(layer_param);
+    layer.SetUp(blob_bottom_vec_, blob_top_vec_);
+    EXPECT_EQ(blob_top_->num(), num);
+    EXPECT_EQ(blob_top_->channels(), channels);
+    EXPECT_EQ(blob_top_->height(), 4);
+    EXPECT_EQ(blob_top_->width(), 5);
+    if (blob_top_vec_.size() > 1) {
+      EXPECT_EQ(blob_top_mask_->num(), num);
+      EXPECT_EQ(blob_top_mask_->channels(), channels);
+      EXPECT_EQ(blob_top_mask_->height(), 4);
+      EXPECT_EQ(blob_top_mask_->width(), 5);
+    }
+    layer.Forward(blob_bottom_vec_, blob_top_vec_);
+    // Expected output: 2x 2 channels of:
+    // [35    32    26    27    27]
+    // [32    33    33    27    27]
+    // [31    34    34    27    27]
+    // [36    36    34    18    18]
+    for (int i = 0; i < 20 * num * channels; i += 20) {
+      EXPECT_EQ(blob_top_->cpu_data()[i +  0], 35);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  1], 32);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  2], 26);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  3], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  4], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  5], 32);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  6], 33);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  7], 33);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  8], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  9], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 10], 31);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 11], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 12], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 13], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 14], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 15], 36);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 16], 36);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 17], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 18], 18);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 19], 18);
+    }
+    if (blob_top_vec_.size() > 1) {
+        // [ 1     8     4    17    17]
+        // [ 8    21    21    17    17]
+        // [13    27    27    17    17]
+        // [32    32    27    35    35]
+      for (int i = 0; i < 20 * num * channels; i += 20) {
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  0],  0);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  1],  7);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  2],  3);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  3], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  4], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  5],  7);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  6], 20);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  7], 20);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  8], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  9], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 10], 12);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 11], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 12], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 13], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 14], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 15], 31);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 16], 31);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 17], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 18], 34);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 19], 34);
+      }
+    }
+  }
+  // Test for rectangular pooling layer with kernel_w > kernel_h
+  void TestForwardRectWide() {
+    LayerParameter layer_param;
+    PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+    pooling_param->set_kernel_h(2);
+    pooling_param->set_kernel_w(3);
+    pooling_param->set_pool(PoolingParameter_PoolMethod_MAX);
+    const int num = 2;
+    const int channels = 2;
+    blob_bottom_->Reshape(num, channels, 6, 6);
+    // Input: 2x 2 channels of:
+    // [35     1     6    26    19    24]
+    // [ 3    32     7    21    23    25]
+    // [31     9     2    22    27    20]
+    // [ 8    28    33    17    10    15]
+    // [30     5    34    12    14    16]
+    // [ 4    36    29    13    18    11]
+    // (this is generated by magic(6) in MATLAB)
+    for (int i = 0; i < 36 * num * channels; i += 36) {
+      blob_bottom_->mutable_cpu_data()[i +  0] = 35;
+      blob_bottom_->mutable_cpu_data()[i +  1] = 1;
+      blob_bottom_->mutable_cpu_data()[i +  2] = 6;
+      blob_bottom_->mutable_cpu_data()[i +  3] = 26;
+      blob_bottom_->mutable_cpu_data()[i +  4] = 19;
+      blob_bottom_->mutable_cpu_data()[i +  5] = 24;
+      blob_bottom_->mutable_cpu_data()[i +  6] = 3;
+      blob_bottom_->mutable_cpu_data()[i +  7] = 32;
+      blob_bottom_->mutable_cpu_data()[i +  8] = 7;
+      blob_bottom_->mutable_cpu_data()[i +  9] = 21;
+      blob_bottom_->mutable_cpu_data()[i + 10] = 23;
+      blob_bottom_->mutable_cpu_data()[i + 11] = 25;
+      blob_bottom_->mutable_cpu_data()[i + 12] = 31;
+      blob_bottom_->mutable_cpu_data()[i + 13] = 9;
+      blob_bottom_->mutable_cpu_data()[i + 14] = 2;
+      blob_bottom_->mutable_cpu_data()[i + 15] = 22;
+      blob_bottom_->mutable_cpu_data()[i + 16] = 27;
+      blob_bottom_->mutable_cpu_data()[i + 17] = 20;
+      blob_bottom_->mutable_cpu_data()[i + 18] = 8;
+      blob_bottom_->mutable_cpu_data()[i + 19] = 28;
+      blob_bottom_->mutable_cpu_data()[i + 20] = 33;
+      blob_bottom_->mutable_cpu_data()[i + 21] = 17;
+      blob_bottom_->mutable_cpu_data()[i + 22] = 10;
+      blob_bottom_->mutable_cpu_data()[i + 23] = 15;
+      blob_bottom_->mutable_cpu_data()[i + 24] = 30;
+      blob_bottom_->mutable_cpu_data()[i + 25] = 5;
+      blob_bottom_->mutable_cpu_data()[i + 26] = 34;
+      blob_bottom_->mutable_cpu_data()[i + 27] = 12;
+      blob_bottom_->mutable_cpu_data()[i + 28] = 14;
+      blob_bottom_->mutable_cpu_data()[i + 29] = 16;
+      blob_bottom_->mutable_cpu_data()[i + 30] = 4;
+      blob_bottom_->mutable_cpu_data()[i + 31] = 36;
+      blob_bottom_->mutable_cpu_data()[i + 32] = 29;
+      blob_bottom_->mutable_cpu_data()[i + 33] = 13;
+      blob_bottom_->mutable_cpu_data()[i + 34] = 18;
+      blob_bottom_->mutable_cpu_data()[i + 35] = 11;
+    }
+    PoolingLayer<Dtype> layer(layer_param);
+    layer.SetUp(blob_bottom_vec_, blob_top_vec_);
+    EXPECT_EQ(blob_top_->num(), num);
+    EXPECT_EQ(blob_top_->channels(), channels);
+    EXPECT_EQ(blob_top_->height(), 5);
+    EXPECT_EQ(blob_top_->width(), 4);
+    if (blob_top_vec_.size() > 1) {
+      EXPECT_EQ(blob_top_mask_->num(), num);
+      EXPECT_EQ(blob_top_mask_->channels(), channels);
+      EXPECT_EQ(blob_top_mask_->height(), 5);
+      EXPECT_EQ(blob_top_mask_->width(), 4);
+    }
+    layer.Forward(blob_bottom_vec_, blob_top_vec_);
+    // Expected output: 2x 2 channels of:
+    // [35    32    26    26]
+    // [32    32    27    27]
+    // [33    33    33    27]
+    // [34    34    34    17]
+    // [36    36    34    18]
+    for (int i = 0; i < 20 * num * channels; i += 20) {
+      EXPECT_EQ(blob_top_->cpu_data()[i +  0], 35);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  1], 32);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  2], 26);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  3], 26);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  4], 32);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  5], 32);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  6], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  7], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  8], 33);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  9], 33);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 10], 33);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 11], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 12], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 13], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 14], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 15], 17);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 16], 36);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 17], 36);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 18], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 19], 18);
+    }
+    if (blob_top_vec_.size() > 1) {
+        // [ 1     8     4     4]
+        // [ 8     8    17    17]
+        // [21    21    21    17]
+        // [27    27    27    22]
+        // [32    32    27    35]
+      for (int i = 0; i < 20 * num * channels; i += 20) {
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  0],  0);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  1],  7);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  2],  3);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  3],  3);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  4],  7);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  5],  7);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  6], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  7], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  8], 20);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  9], 20);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 10], 20);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 11], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 12], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 13], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 14], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 15], 21);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 16], 31);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 17], 31);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 18], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 19], 34);
+      }
+    }
+  }
+};
+
+TYPED_TEST_CASE(PoolingLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(PoolingLayerTest, TestSetup) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  PoolingLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_->num());
+  EXPECT_EQ(this->blob_top_->channels(), this->blob_bottom_->channels());
+  EXPECT_EQ(this->blob_top_->height(), 3);
+  EXPECT_EQ(this->blob_top_->width(), 2);
+}
+
+TYPED_TEST(PoolingLayerTest, TestSetupPadded) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  pooling_param->set_pad(1);
+  pooling_param->set_pool(PoolingParameter_PoolMethod_AVE);
+  PoolingLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_->num());
+  EXPECT_EQ(this->blob_top_->channels(), this->blob_bottom_->channels());
+  EXPECT_EQ(this->blob_top_->height(), 4);
+  EXPECT_EQ(this->blob_top_->width(), 3);
+}
+
+TYPED_TEST(PoolingLayerTest, TestSetupGlobalPooling) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_global_pooling(true);
+  pooling_param->set_pool(PoolingParameter_PoolMethod_AVE);
+  PoolingLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_->num());
+  EXPECT_EQ(this->blob_top_->channels(), this->blob_bottom_->channels());
+  EXPECT_EQ(this->blob_top_->height(), 1);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+}
+
+/*
+TYPED_TEST(PoolingLayerTest, PrintBackward) {
+  LayerParameter layer_param;
+  layer_param.set_kernelsize(3);
+  layer_param.set_stride(2);
+  layer_param.set_pool(LayerParameter_PoolMethod_MAX);
+  PoolingLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    cout << "bottom data " << i << " " << this->blob_bottom_->cpu_data()[i] << endl;
+  }
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    cout << "top data " << i << " " << this->blob_top_->cpu_data()[i] << endl;
+  }
+
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    this->blob_top_->mutable_cpu_diff()[i] = i;
+  }
+  layer.Backward(this->blob_top_vec_, true, this->blob_bottom_vec_);
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    cout << "bottom diff " << i << " " << this->blob_bottom_->cpu_diff()[i] << endl;
+  }
+}
+*/
+
+TYPED_TEST(PoolingLayerTest, TestForwardMax) {
+  this->TestForwardSquare();
+  this->TestForwardRectHigh();
+  this->TestForwardRectWide();
+}
+
+TYPED_TEST(PoolingLayerTest, TestForwardMaxTopMask) {
+  this->blob_top_vec_.push_back(this->blob_top_mask_);
+  this->TestForwardSquare();
+  this->TestForwardRectHigh();
+  this->TestForwardRectWide();
+}
+
+TYPED_TEST(PoolingLayerTest, TestGradientMax) {
+  typedef typename TypeParam::Dtype Dtype;
+  for (int kernel_h = 3; kernel_h <= 4; kernel_h++) {
+    for (int kernel_w = 3; kernel_w <= 4; kernel_w++) {
+      LayerParameter layer_param;
+      PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+      pooling_param->set_kernel_h(kernel_h);
+      pooling_param->set_kernel_w(kernel_w);
+      pooling_param->set_stride(2);
+      pooling_param->set_pad(1);
+      pooling_param->set_pool(PoolingParameter_PoolMethod_MAX);
+      PoolingLayer<Dtype> layer(layer_param);
+      GradientChecker<Dtype> checker(1e-4, 1e-2);
+      checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+          this->blob_top_vec_);
+    }
+  }
+}
+
+TYPED_TEST(PoolingLayerTest, TestForwardMaxPadded) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  pooling_param->set_pad(2);
+  pooling_param->set_pool(PoolingParameter_PoolMethod_MAX);
+  this->blob_bottom_->Reshape(1, 1, 3, 3);
+  // Input:
+  //     [ 1 2 4 ]
+  //     [ 2 3 2 ]
+  //     [ 4 2 1 ]
+  this->blob_bottom_->mutable_cpu_data()[0] = 1;
+  this->blob_bottom_->mutable_cpu_data()[1] = 2;
+  this->blob_bottom_->mutable_cpu_data()[2] = 4;
+  this->blob_bottom_->mutable_cpu_data()[3] = 2;
+  this->blob_bottom_->mutable_cpu_data()[4] = 3;
+  this->blob_bottom_->mutable_cpu_data()[5] = 2;
+  this->blob_bottom_->mutable_cpu_data()[6] = 4;
+  this->blob_bottom_->mutable_cpu_data()[7] = 2;
+  this->blob_bottom_->mutable_cpu_data()[8] = 1;
+  PoolingLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 1);
+  EXPECT_EQ(this->blob_top_->channels(), 1);
+  EXPECT_EQ(this->blob_top_->height(), 3);
+  EXPECT_EQ(this->blob_top_->width(), 3);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  Dtype epsilon = 1e-8;
+  // Output:
+  //     [ 1 4 4 ]
+  //     [ 4 4 4 ]
+  //     [ 4 4 1 ]
+  EXPECT_NEAR(this->blob_top_->cpu_data()[0], 1, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[1], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[2], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[3], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[4], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[5], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[6], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[7], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[8], 1, epsilon);
+}
+
+TYPED_TEST(PoolingLayerTest, TestGradientMaxTopMask) {
+  typedef typename TypeParam::Dtype Dtype;
+  for (int kernel_h = 3; kernel_h <= 4; kernel_h++) {
+    for (int kernel_w = 3; kernel_w <= 4; kernel_w++) {
+      LayerParameter layer_param;
+      PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+      pooling_param->set_kernel_h(kernel_h);
+      pooling_param->set_kernel_w(kernel_w);
+      pooling_param->set_stride(2);
+      pooling_param->set_pool(PoolingParameter_PoolMethod_MAX);
+      this->blob_top_vec_.push_back(this->blob_top_mask_);
+      PoolingLayer<Dtype> layer(layer_param);
+      GradientChecker<Dtype> checker(1e-4, 1e-2);
+      checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+          this->blob_top_vec_);
+      this->blob_top_vec_.pop_back();
+    }
+  }
+}
+
+TYPED_TEST(PoolingLayerTest, TestForwardAve) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(1);
+  pooling_param->set_pad(1);
+  pooling_param->set_pool(PoolingParameter_PoolMethod_AVE);
+  this->blob_bottom_->Reshape(1, 1, 3, 3);
+  FillerParameter filler_param;
+  filler_param.set_value(Dtype(2));
+  ConstantFiller<Dtype> filler(filler_param);
+  filler.Fill(this->blob_bottom_);
+  PoolingLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 1);
+  EXPECT_EQ(this->blob_top_->channels(), 1);
+  EXPECT_EQ(this->blob_top_->height(), 3);
+  EXPECT_EQ(this->blob_top_->width(), 3);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  Dtype epsilon = 1e-5;
+  EXPECT_NEAR(this->blob_top_->cpu_data()[0], 8.0 / 9, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[1], 4.0 / 3, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[2], 8.0 / 9, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[3], 4.0 / 3, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[4], 2.0    , epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[5], 4.0 / 3, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[6], 8.0 / 9, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[7], 4.0 / 3, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[8], 8.0 / 9, epsilon);
+}
+
+TYPED_TEST(PoolingLayerTest, TestGradientAve) {
+  typedef typename TypeParam::Dtype Dtype;
+  for (int kernel_h = 3; kernel_h <= 4; kernel_h++) {
+    for (int kernel_w = 3; kernel_w <= 4; kernel_w++) {
+      LayerParameter layer_param;
+      PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+      pooling_param->set_kernel_h(kernel_h);
+      pooling_param->set_kernel_w(kernel_w);
+      pooling_param->set_stride(2);
+      pooling_param->set_pool(PoolingParameter_PoolMethod_AVE);
+      PoolingLayer<Dtype> layer(layer_param);
+      GradientChecker<Dtype> checker(1e-2, 1e-2);
+      checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+          this->blob_top_vec_);
+    }
+  }
+}
+
+TYPED_TEST(PoolingLayerTest, TestGradientAvePadded) {
+  typedef typename TypeParam::Dtype Dtype;
+  for (int kernel_h = 3; kernel_h <= 4; kernel_h++) {
+    for (int kernel_w = 3; kernel_w <= 4; kernel_w++) {
+      LayerParameter layer_param;
+      PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+      pooling_param->set_kernel_h(kernel_h);
+      pooling_param->set_kernel_w(kernel_w);
+      pooling_param->set_stride(2);
+      pooling_param->set_pad(2);
+      pooling_param->set_pool(PoolingParameter_PoolMethod_AVE);
+      PoolingLayer<Dtype> layer(layer_param);
+      GradientChecker<Dtype> checker(1e-2, 1e-2);
+      checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+          this->blob_top_vec_);
+    }
+  }
+}
+
+#ifdef USE_CUDNN
+template <typename Dtype>
+class CuDNNPoolingLayerTest : public GPUDeviceTest<Dtype> {
+ protected:
+  CuDNNPoolingLayerTest()
+      : blob_bottom_(new Blob<Dtype>()),
+        blob_top_(new Blob<Dtype>()),
+        blob_top_mask_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    Caffe::set_random_seed(1701);
+    blob_bottom_->Reshape(2, 3, 6, 5);
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~CuDNNPoolingLayerTest() {
+    delete blob_bottom_;
+    delete blob_top_;
+    delete blob_top_mask_;
+  }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  Blob<Dtype>* const blob_top_mask_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+  // Test for 2x 2 square pooling layer
+  void TestForwardSquare() {
+    LayerParameter layer_param;
+    PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+    pooling_param->set_kernel_size(2);
+    pooling_param->set_pool(PoolingParameter_PoolMethod_MAX);
+    const int num = 2;
+    const int channels = 2;
+    blob_bottom_->Reshape(num, channels, 3, 5);
+    // Input: 2x 2 channels of:
+    //     [1 2 5 2 3]
+    //     [9 4 1 4 8]
+    //     [1 2 5 2 3]
+    for (int i = 0; i < 15 * num * channels; i += 15) {
+      blob_bottom_->mutable_cpu_data()[i +  0] = 1;
+      blob_bottom_->mutable_cpu_data()[i +  1] = 2;
+      blob_bottom_->mutable_cpu_data()[i +  2] = 5;
+      blob_bottom_->mutable_cpu_data()[i +  3] = 2;
+      blob_bottom_->mutable_cpu_data()[i +  4] = 3;
+      blob_bottom_->mutable_cpu_data()[i +  5] = 9;
+      blob_bottom_->mutable_cpu_data()[i +  6] = 4;
+      blob_bottom_->mutable_cpu_data()[i +  7] = 1;
+      blob_bottom_->mutable_cpu_data()[i +  8] = 4;
+      blob_bottom_->mutable_cpu_data()[i +  9] = 8;
+      blob_bottom_->mutable_cpu_data()[i + 10] = 1;
+      blob_bottom_->mutable_cpu_data()[i + 11] = 2;
+      blob_bottom_->mutable_cpu_data()[i + 12] = 5;
+      blob_bottom_->mutable_cpu_data()[i + 13] = 2;
+      blob_bottom_->mutable_cpu_data()[i + 14] = 3;
+    }
+    CuDNNPoolingLayer<Dtype> layer(layer_param);
+    layer.SetUp(blob_bottom_vec_, blob_top_vec_);
+    EXPECT_EQ(blob_top_->num(), num);
+    EXPECT_EQ(blob_top_->channels(), channels);
+    EXPECT_EQ(blob_top_->height(), 2);
+    EXPECT_EQ(blob_top_->width(), 4);
+    if (blob_top_vec_.size() > 1) {
+      EXPECT_EQ(blob_top_mask_->num(), num);
+      EXPECT_EQ(blob_top_mask_->channels(), channels);
+      EXPECT_EQ(blob_top_mask_->height(), 2);
+      EXPECT_EQ(blob_top_mask_->width(), 4);
+    }
+    layer.Forward(blob_bottom_vec_, blob_top_vec_);
+    // Expected output: 2x 2 channels of:
+    //     [9 5 5 8]
+    //     [9 5 5 8]
+    for (int i = 0; i < 8 * num * channels; i += 8) {
+      EXPECT_EQ(blob_top_->cpu_data()[i + 0], 9);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 1], 5);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 2], 5);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 3], 8);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 4], 9);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 5], 5);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 6], 5);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 7], 8);
+    }
+    if (blob_top_vec_.size() > 1) {
+      // Expected mask output: 2x 2 channels of:
+      //     [5  2  2 9]
+      //     [5 12 12 9]
+      for (int i = 0; i < 8 * num * channels; i += 8) {
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 0],  5);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 1],  2);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 2],  2);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 3],  9);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 4],  5);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 5], 12);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 6], 12);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 7],  9);
+      }
+    }
+  }
+  // Test for 3x 2 rectangular pooling layer with kernel_h > kernel_w
+  void TestForwardRectHigh() {
+    LayerParameter layer_param;
+    PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+    pooling_param->set_kernel_h(3);
+    pooling_param->set_kernel_w(2);
+    pooling_param->set_pool(PoolingParameter_PoolMethod_MAX);
+    const int num = 2;
+    const int channels = 2;
+    blob_bottom_->Reshape(num, channels, 6, 6);
+    // Input: 2x 2 channels of:
+    // [35     1     6    26    19    24]
+    // [ 3    32     7    21    23    25]
+    // [31     9     2    22    27    20]
+    // [ 8    28    33    17    10    15]
+    // [30     5    34    12    14    16]
+    // [ 4    36    29    13    18    11]
+    // (this is generated by magic(6) in MATLAB)
+    for (int i = 0; i < 36 * num * channels; i += 36) {
+      blob_bottom_->mutable_cpu_data()[i +  0] = 35;
+      blob_bottom_->mutable_cpu_data()[i +  1] = 1;
+      blob_bottom_->mutable_cpu_data()[i +  2] = 6;
+      blob_bottom_->mutable_cpu_data()[i +  3] = 26;
+      blob_bottom_->mutable_cpu_data()[i +  4] = 19;
+      blob_bottom_->mutable_cpu_data()[i +  5] = 24;
+      blob_bottom_->mutable_cpu_data()[i +  6] = 3;
+      blob_bottom_->mutable_cpu_data()[i +  7] = 32;
+      blob_bottom_->mutable_cpu_data()[i +  8] = 7;
+      blob_bottom_->mutable_cpu_data()[i +  9] = 21;
+      blob_bottom_->mutable_cpu_data()[i + 10] = 23;
+      blob_bottom_->mutable_cpu_data()[i + 11] = 25;
+      blob_bottom_->mutable_cpu_data()[i + 12] = 31;
+      blob_bottom_->mutable_cpu_data()[i + 13] = 9;
+      blob_bottom_->mutable_cpu_data()[i + 14] = 2;
+      blob_bottom_->mutable_cpu_data()[i + 15] = 22;
+      blob_bottom_->mutable_cpu_data()[i + 16] = 27;
+      blob_bottom_->mutable_cpu_data()[i + 17] = 20;
+      blob_bottom_->mutable_cpu_data()[i + 18] = 8;
+      blob_bottom_->mutable_cpu_data()[i + 19] = 28;
+      blob_bottom_->mutable_cpu_data()[i + 20] = 33;
+      blob_bottom_->mutable_cpu_data()[i + 21] = 17;
+      blob_bottom_->mutable_cpu_data()[i + 22] = 10;
+      blob_bottom_->mutable_cpu_data()[i + 23] = 15;
+      blob_bottom_->mutable_cpu_data()[i + 24] = 30;
+      blob_bottom_->mutable_cpu_data()[i + 25] = 5;
+      blob_bottom_->mutable_cpu_data()[i + 26] = 34;
+      blob_bottom_->mutable_cpu_data()[i + 27] = 12;
+      blob_bottom_->mutable_cpu_data()[i + 28] = 14;
+      blob_bottom_->mutable_cpu_data()[i + 29] = 16;
+      blob_bottom_->mutable_cpu_data()[i + 30] = 4;
+      blob_bottom_->mutable_cpu_data()[i + 31] = 36;
+      blob_bottom_->mutable_cpu_data()[i + 32] = 29;
+      blob_bottom_->mutable_cpu_data()[i + 33] = 13;
+      blob_bottom_->mutable_cpu_data()[i + 34] = 18;
+      blob_bottom_->mutable_cpu_data()[i + 35] = 11;
+    }
+    CuDNNPoolingLayer<Dtype> layer(layer_param);
+    layer.SetUp(blob_bottom_vec_, blob_top_vec_);
+    EXPECT_EQ(blob_top_->num(), num);
+    EXPECT_EQ(blob_top_->channels(), channels);
+    EXPECT_EQ(blob_top_->height(), 4);
+    EXPECT_EQ(blob_top_->width(), 5);
+    if (blob_top_vec_.size() > 1) {
+      EXPECT_EQ(blob_top_mask_->num(), num);
+      EXPECT_EQ(blob_top_mask_->channels(), channels);
+      EXPECT_EQ(blob_top_mask_->height(), 4);
+      EXPECT_EQ(blob_top_mask_->width(), 5);
+    }
+    layer.Forward(blob_bottom_vec_, blob_top_vec_);
+    // Expected output: 2x 2 channels of:
+    // [35    32    26    27    27]
+    // [32    33    33    27    27]
+    // [31    34    34    27    27]
+    // [36    36    34    18    18]
+    for (int i = 0; i < 20 * num * channels; i += 20) {
+      EXPECT_EQ(blob_top_->cpu_data()[i +  0], 35);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  1], 32);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  2], 26);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  3], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  4], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  5], 32);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  6], 33);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  7], 33);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  8], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  9], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 10], 31);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 11], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 12], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 13], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 14], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 15], 36);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 16], 36);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 17], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 18], 18);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 19], 18);
+    }
+    if (blob_top_vec_.size() > 1) {
+        // [ 1     8     4    17    17]
+        // [ 8    21    21    17    17]
+        // [13    27    27    17    17]
+        // [32    32    27    35    35]
+      for (int i = 0; i < 20 * num * channels; i += 20) {
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  0],  0);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  1],  7);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  2],  3);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  3], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  4], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  5],  7);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  6], 20);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  7], 20);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  8], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  9], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 10], 12);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 11], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 12], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 13], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 14], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 15], 31);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 16], 31);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 17], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 18], 34);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 19], 34);
+      }
+    }
+  }
+  // Test for rectangular pooling layer with kernel_w > kernel_h
+  void TestForwardRectWide() {
+    LayerParameter layer_param;
+    PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+    pooling_param->set_kernel_h(2);
+    pooling_param->set_kernel_w(3);
+    pooling_param->set_pool(PoolingParameter_PoolMethod_MAX);
+    const int num = 2;
+    const int channels = 2;
+    blob_bottom_->Reshape(num, channels, 6, 6);
+    // Input: 2x 2 channels of:
+    // [35     1     6    26    19    24]
+    // [ 3    32     7    21    23    25]
+    // [31     9     2    22    27    20]
+    // [ 8    28    33    17    10    15]
+    // [30     5    34    12    14    16]
+    // [ 4    36    29    13    18    11]
+    // (this is generated by magic(6) in MATLAB)
+    for (int i = 0; i < 36 * num * channels; i += 36) {
+      blob_bottom_->mutable_cpu_data()[i +  0] = 35;
+      blob_bottom_->mutable_cpu_data()[i +  1] = 1;
+      blob_bottom_->mutable_cpu_data()[i +  2] = 6;
+      blob_bottom_->mutable_cpu_data()[i +  3] = 26;
+      blob_bottom_->mutable_cpu_data()[i +  4] = 19;
+      blob_bottom_->mutable_cpu_data()[i +  5] = 24;
+      blob_bottom_->mutable_cpu_data()[i +  6] = 3;
+      blob_bottom_->mutable_cpu_data()[i +  7] = 32;
+      blob_bottom_->mutable_cpu_data()[i +  8] = 7;
+      blob_bottom_->mutable_cpu_data()[i +  9] = 21;
+      blob_bottom_->mutable_cpu_data()[i + 10] = 23;
+      blob_bottom_->mutable_cpu_data()[i + 11] = 25;
+      blob_bottom_->mutable_cpu_data()[i + 12] = 31;
+      blob_bottom_->mutable_cpu_data()[i + 13] = 9;
+      blob_bottom_->mutable_cpu_data()[i + 14] = 2;
+      blob_bottom_->mutable_cpu_data()[i + 15] = 22;
+      blob_bottom_->mutable_cpu_data()[i + 16] = 27;
+      blob_bottom_->mutable_cpu_data()[i + 17] = 20;
+      blob_bottom_->mutable_cpu_data()[i + 18] = 8;
+      blob_bottom_->mutable_cpu_data()[i + 19] = 28;
+      blob_bottom_->mutable_cpu_data()[i + 20] = 33;
+      blob_bottom_->mutable_cpu_data()[i + 21] = 17;
+      blob_bottom_->mutable_cpu_data()[i + 22] = 10;
+      blob_bottom_->mutable_cpu_data()[i + 23] = 15;
+      blob_bottom_->mutable_cpu_data()[i + 24] = 30;
+      blob_bottom_->mutable_cpu_data()[i + 25] = 5;
+      blob_bottom_->mutable_cpu_data()[i + 26] = 34;
+      blob_bottom_->mutable_cpu_data()[i + 27] = 12;
+      blob_bottom_->mutable_cpu_data()[i + 28] = 14;
+      blob_bottom_->mutable_cpu_data()[i + 29] = 16;
+      blob_bottom_->mutable_cpu_data()[i + 30] = 4;
+      blob_bottom_->mutable_cpu_data()[i + 31] = 36;
+      blob_bottom_->mutable_cpu_data()[i + 32] = 29;
+      blob_bottom_->mutable_cpu_data()[i + 33] = 13;
+      blob_bottom_->mutable_cpu_data()[i + 34] = 18;
+      blob_bottom_->mutable_cpu_data()[i + 35] = 11;
+    }
+    CuDNNPoolingLayer<Dtype> layer(layer_param);
+    layer.SetUp(blob_bottom_vec_, blob_top_vec_);
+    EXPECT_EQ(blob_top_->num(), num);
+    EXPECT_EQ(blob_top_->channels(), channels);
+    EXPECT_EQ(blob_top_->height(), 5);
+    EXPECT_EQ(blob_top_->width(), 4);
+    if (blob_top_vec_.size() > 1) {
+      EXPECT_EQ(blob_top_mask_->num(), num);
+      EXPECT_EQ(blob_top_mask_->channels(), channels);
+      EXPECT_EQ(blob_top_mask_->height(), 5);
+      EXPECT_EQ(blob_top_mask_->width(), 4);
+    }
+    layer.Forward(blob_bottom_vec_, blob_top_vec_);
+    // Expected output: 2x 2 channels of:
+    // [35    32    26    26]
+    // [32    32    27    27]
+    // [33    33    33    27]
+    // [34    34    34    17]
+    // [36    36    34    18]
+    for (int i = 0; i < 20 * num * channels; i += 20) {
+      EXPECT_EQ(blob_top_->cpu_data()[i +  0], 35);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  1], 32);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  2], 26);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  3], 26);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  4], 32);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  5], 32);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  6], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  7], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  8], 33);
+      EXPECT_EQ(blob_top_->cpu_data()[i +  9], 33);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 10], 33);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 11], 27);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 12], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 13], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 14], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 15], 17);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 16], 36);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 17], 36);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 18], 34);
+      EXPECT_EQ(blob_top_->cpu_data()[i + 19], 18);
+    }
+    if (blob_top_vec_.size() > 1) {
+        // [ 1     8     4     4]
+        // [ 8     8    17    17]
+        // [21    21    21    17]
+        // [27    27    27    22]
+        // [32    32    27    35]
+      for (int i = 0; i < 20 * num * channels; i += 20) {
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  0],  0);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  1],  7);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  2],  3);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  3],  3);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  4],  7);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  5],  7);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  6], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  7], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  8], 20);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i +  9], 20);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 10], 20);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 11], 16);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 12], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 13], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 14], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 15], 21);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 16], 31);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 17], 31);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 18], 26);
+        EXPECT_EQ(blob_top_mask_->cpu_data()[i + 19], 34);
+      }
+    }
+  }
+};
+
+TYPED_TEST_CASE(CuDNNPoolingLayerTest, TestDtypes);
+
+TYPED_TEST(CuDNNPoolingLayerTest, TestSetupCuDNN) {
+  LayerParameter layer_param;
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  CuDNNPoolingLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_->num());
+  EXPECT_EQ(this->blob_top_->channels(), this->blob_bottom_->channels());
+  EXPECT_EQ(this->blob_top_->height(), 3);
+  EXPECT_EQ(this->blob_top_->width(), 2);
+}
+
+TYPED_TEST(CuDNNPoolingLayerTest, TestSetupPaddedCuDNN) {
+  LayerParameter layer_param;
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  pooling_param->set_pad(1);
+  pooling_param->set_pool(PoolingParameter_PoolMethod_AVE);
+  CuDNNPoolingLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_->num());
+  EXPECT_EQ(this->blob_top_->channels(), this->blob_bottom_->channels());
+  EXPECT_EQ(this->blob_top_->height(), 4);
+  EXPECT_EQ(this->blob_top_->width(), 3);
+}
+
+/*
+TYPED_TEST(CuDNNPoolingLayerTest, PrintBackwardCuDNN) {
+  LayerParameter layer_param;
+  layer_param.set_kernelsize(3);
+  layer_param.set_stride(2);
+  layer_param.set_pool(LayerParameter_PoolMethod_MAX);
+  CuDNNPoolingLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    cout << "bottom data " << i << " " << this->blob_bottom_->cpu_data()[i] << endl;
+  }
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    cout << "top data " << i << " " << this->blob_top_->cpu_data()[i] << endl;
+  }
+
+  for (int i = 0; i < this->blob_top_->count(); ++i) {
+    this->blob_top_->mutable_cpu_diff()[i] = i;
+  }
+  layer.Backward(this->blob_top_vec_, true, this->blob_bottom_vec_);
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    cout << "bottom diff " << i << " " << this->blob_bottom_->cpu_diff()[i] << endl;
+  }
+}
+*/
+
+TYPED_TEST(CuDNNPoolingLayerTest, TestForwardMaxCuDNN) {
+  this->TestForwardSquare();
+  this->TestForwardRectHigh();
+  this->TestForwardRectWide();
+}
+
+// Currently, cuDNN does not support a top mask, so we comment this and
+// the corresponding backward test.
+/*
+TYPED_TEST(CuDNNPoolingLayerTest, TestForwardMaxTopMaskCuDNN) {
+  this->blob_top_vec_.push_back(this->blob_top_mask_);
+  this->TestForwardSquare();
+  this->TestForwardRectHigh();
+  this->TestForwardRectWide();
+}
+*/
+
+TYPED_TEST(CuDNNPoolingLayerTest, TestGradientMaxCuDNN) {
+  for (int kernel_h = 3; kernel_h <= 4; kernel_h++) {
+    for (int kernel_w = 3; kernel_w <= 4; kernel_w++) {
+      LayerParameter layer_param;
+      PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+      pooling_param->set_kernel_h(kernel_h);
+      pooling_param->set_kernel_w(kernel_w);
+      pooling_param->set_stride(2);
+      // currenty, cuDNN pooling does not support padding
+      pooling_param->set_pad(0);
+      pooling_param->set_pool(PoolingParameter_PoolMethod_MAX);
+      CuDNNPoolingLayer<TypeParam> layer(layer_param);
+      GradientChecker<TypeParam> checker(1e-4, 1e-2);
+      checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+          this->blob_top_vec_);
+    }
+  }
+}
+
+TYPED_TEST(CuDNNPoolingLayerTest, TestForwardMaxPaddedCuDNN) {
+  LayerParameter layer_param;
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  pooling_param->set_pad(2);
+  pooling_param->set_pool(PoolingParameter_PoolMethod_MAX);
+  this->blob_bottom_->Reshape(1, 1, 3, 3);
+  // Input:
+  //     [ 1 2 4 ]
+  //     [ 2 3 2 ]
+  //     [ 4 2 1 ]
+  this->blob_bottom_->mutable_cpu_data()[0] = 1;
+  this->blob_bottom_->mutable_cpu_data()[1] = 2;
+  this->blob_bottom_->mutable_cpu_data()[2] = 4;
+  this->blob_bottom_->mutable_cpu_data()[3] = 2;
+  this->blob_bottom_->mutable_cpu_data()[4] = 3;
+  this->blob_bottom_->mutable_cpu_data()[5] = 2;
+  this->blob_bottom_->mutable_cpu_data()[6] = 4;
+  this->blob_bottom_->mutable_cpu_data()[7] = 2;
+  this->blob_bottom_->mutable_cpu_data()[8] = 1;
+  CuDNNPoolingLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 1);
+  EXPECT_EQ(this->blob_top_->channels(), 1);
+  EXPECT_EQ(this->blob_top_->height(), 3);
+  EXPECT_EQ(this->blob_top_->width(), 3);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  TypeParam epsilon = 1e-8;
+  // Output:
+  //     [ 1 4 4 ]
+  //     [ 4 4 4 ]
+  //     [ 4 4 1 ]
+  EXPECT_NEAR(this->blob_top_->cpu_data()[0], 1, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[1], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[2], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[3], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[4], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[5], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[6], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[7], 4, epsilon);
+  EXPECT_NEAR(this->blob_top_->cpu_data()[8], 1, epsilon);
+}
+
+/*
+TYPED_TEST(CuDNNPoolingLayerTest, TestGradientMaxTopMaskCuDNN) {
+  for (int kernel_h = 3; kernel_h <= 4; kernel_h++) {
+    for (int kernel_w = 3; kernel_w <= 4; kernel_w++) {
+      LayerParameter layer_param;
+      PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+      pooling_param->set_kernel_h(kernel_h);
+      pooling_param->set_kernel_w(kernel_w);
+      pooling_param->set_stride(2);
+      pooling_param->set_pool(PoolingParameter_PoolMethod_MAX);
+      this->blob_top_vec_.push_back(this->blob_top_mask_);
+      CuDNNPoolingLayer<TypeParam> layer(layer_param);
+      GradientChecker<TypeParam> checker(1e-4, 1e-2);
+      checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+          this->blob_top_vec_);
+      this->blob_top_vec_.pop_back();
+    }
+  }
+}
+*/
+
+TYPED_TEST(CuDNNPoolingLayerTest, TestForwardAveCuDNN) {
+  LayerParameter layer_param;
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(1);
+  // Currently, cuDNN pooling does not support padding, so we use
+  // a simplified version of this test.
+  pooling_param->set_pad(0);
+  pooling_param->set_pool(PoolingParameter_PoolMethod_AVE);
+  this->blob_bottom_->Reshape(1, 1, 3, 3);
+  FillerParameter filler_param;
+  filler_param.set_value(TypeParam(2));
+  ConstantFiller<TypeParam> filler(filler_param);
+  filler.Fill(this->blob_bottom_);
+  CuDNNPoolingLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 1);
+  EXPECT_EQ(this->blob_top_->channels(), 1);
+  EXPECT_EQ(this->blob_top_->height(), 1);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  TypeParam epsilon = 1e-5;
+  EXPECT_NEAR(this->blob_top_->cpu_data()[0], 2.0, epsilon);
+}
+
+TYPED_TEST(CuDNNPoolingLayerTest, TestGradientAveCuDNN) {
+  for (int kernel_h = 3; kernel_h <= 4; kernel_h++) {
+    for (int kernel_w = 3; kernel_w <= 4; kernel_w++) {
+      LayerParameter layer_param;
+      PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+      pooling_param->set_kernel_h(kernel_h);
+      pooling_param->set_kernel_w(kernel_w);
+      pooling_param->set_stride(2);
+      pooling_param->set_pool(PoolingParameter_PoolMethod_AVE);
+      CuDNNPoolingLayer<TypeParam> layer(layer_param);
+      GradientChecker<TypeParam> checker(1e-2, 1e-2);
+      checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+          this->blob_top_vec_);
+    }
+  }
+}
+
+TYPED_TEST(CuDNNPoolingLayerTest, TestGradientAvePaddedCuDNN) {
+  for (int kernel_h = 3; kernel_h <= 4; kernel_h++) {
+    for (int kernel_w = 3; kernel_w <= 4; kernel_w++) {
+      LayerParameter layer_param;
+      PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+      pooling_param->set_kernel_h(kernel_h);
+      pooling_param->set_kernel_w(kernel_w);
+      pooling_param->set_stride(2);
+      pooling_param->set_pad(2);
+      pooling_param->set_pool(PoolingParameter_PoolMethod_AVE);
+      CuDNNPoolingLayer<TypeParam> layer(layer_param);
+      GradientChecker<TypeParam> checker(1e-2, 1e-2);
+      checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+          this->blob_top_vec_);
+    }
+  }
+}
+
+#endif
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_power_layer.cpp b/src/caffe/test/test_power_layer.cpp
new file mode 100644
index 0000000..76c9e85
--- /dev/null
+++ b/src/caffe/test/test_power_layer.cpp
@@ -0,0 +1,170 @@
+#include <algorithm>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class PowerLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  PowerLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 4, 5)),
+        blob_top_(new Blob<Dtype>()) {
+    Caffe::set_random_seed(1701);
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~PowerLayerTest() { delete blob_bottom_; delete blob_top_; }
+
+  void TestForward(Dtype power, Dtype scale, Dtype shift) {
+    LayerParameter layer_param;
+    layer_param.mutable_power_param()->set_power(power);
+    layer_param.mutable_power_param()->set_scale(scale);
+    layer_param.mutable_power_param()->set_shift(shift);
+    PowerLayer<Dtype> layer(layer_param);
+    layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+    layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    // Now, check values
+    const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+    const Dtype* top_data = this->blob_top_->cpu_data();
+    const Dtype min_precision = 1e-5;
+    for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+      Dtype expected_value = pow(shift + scale * bottom_data[i], power);
+      if (power == Dtype(0) || power == Dtype(1) || power == Dtype(2)) {
+        EXPECT_FALSE(isnan(top_data[i]));
+      }
+      if (isnan(expected_value)) {
+        EXPECT_TRUE(isnan(top_data[i]));
+      } else {
+        Dtype precision = std::max(
+          Dtype(std::abs(expected_value * Dtype(1e-4))), min_precision);
+        EXPECT_NEAR(expected_value, top_data[i], precision);
+      }
+    }
+  }
+
+  void TestBackward(Dtype power, Dtype scale, Dtype shift) {
+    LayerParameter layer_param;
+    layer_param.mutable_power_param()->set_power(power);
+    layer_param.mutable_power_param()->set_scale(scale);
+    layer_param.mutable_power_param()->set_shift(shift);
+    PowerLayer<Dtype> layer(layer_param);
+    if (power != Dtype(0) && power != Dtype(1) && power != Dtype(2)) {
+      // Avoid NaNs by forcing (shift + scale * x) >= 0
+      Dtype* bottom_data = this->blob_bottom_->mutable_cpu_data();
+      Dtype min_value = -shift / scale;
+      for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+        if (bottom_data[i] < min_value) {
+          bottom_data[i] = min_value + (min_value - bottom_data[i]);
+        }
+      }
+    }
+    GradientChecker<Dtype> checker(1e-3, 1e-2, 1701, 0., 0.01);
+    checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+        this->blob_top_vec_);
+  }
+
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(PowerLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(PowerLayerTest, TestPower) {
+  typedef typename TypeParam::Dtype Dtype;
+  Dtype power = 0.37;
+  Dtype scale = 0.83;
+  Dtype shift = -2.4;
+  this->TestForward(power, scale, shift);
+}
+
+TYPED_TEST(PowerLayerTest, TestPowerGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  Dtype power = 0.37;
+  Dtype scale = 0.83;
+  Dtype shift = -2.4;
+  this->TestBackward(power, scale, shift);
+}
+
+TYPED_TEST(PowerLayerTest, TestPowerGradientShiftZero) {
+  typedef typename TypeParam::Dtype Dtype;
+  Dtype power = 0.37;
+  Dtype scale = 0.83;
+  Dtype shift = 0.0;
+  this->TestBackward(power, scale, shift);
+}
+
+TYPED_TEST(PowerLayerTest, TestPowerZero) {
+  typedef typename TypeParam::Dtype Dtype;
+  Dtype power = 0.0;
+  Dtype scale = 0.83;
+  Dtype shift = -2.4;
+  this->TestForward(power, scale, shift);
+}
+
+TYPED_TEST(PowerLayerTest, TestPowerZeroGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  Dtype power = 0.0;
+  Dtype scale = 0.83;
+  Dtype shift = -2.4;
+  this->TestBackward(power, scale, shift);
+}
+
+TYPED_TEST(PowerLayerTest, TestPowerOne) {
+  typedef typename TypeParam::Dtype Dtype;
+  Dtype power = 1.0;
+  Dtype scale = 0.83;
+  Dtype shift = -2.4;
+  this->TestForward(power, scale, shift);
+}
+
+TYPED_TEST(PowerLayerTest, TestPowerOneGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  Dtype power = 1.0;
+  Dtype scale = 0.83;
+  Dtype shift = -2.4;
+  this->TestBackward(power, scale, shift);
+}
+
+TYPED_TEST(PowerLayerTest, TestPowerTwo) {
+  typedef typename TypeParam::Dtype Dtype;
+  Dtype power = 2.0;
+  Dtype scale = 0.34;
+  Dtype shift = -2.4;
+  this->TestForward(power, scale, shift);
+}
+
+TYPED_TEST(PowerLayerTest, TestPowerTwoGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  Dtype power = 2.0;
+  Dtype scale = 0.83;
+  Dtype shift = -2.4;
+  this->TestBackward(power, scale, shift);
+}
+
+TYPED_TEST(PowerLayerTest, TestPowerTwoScaleHalfGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  Dtype power = 2.0;
+  Dtype scale = 0.5;
+  Dtype shift = -2.4;
+  this->TestBackward(power, scale, shift);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_protobuf.cpp b/src/caffe/test/test_protobuf.cpp
new file mode 100644
index 0000000..01de461
--- /dev/null
+++ b/src/caffe/test/test_protobuf.cpp
@@ -0,0 +1,29 @@
+// This is simply a script that tries serializing protocol buffer in text
+// format. Nothing special here and no actual code is being tested.
+#include <string>
+
+#include "google/protobuf/text_format.h"
+#include "gtest/gtest.h"
+
+#include "caffe/proto/caffe.pb.h"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+class ProtoTest : public ::testing::Test {};
+
+TEST_F(ProtoTest, TestSerialization) {
+  LayerParameter param;
+  param.set_name("test");
+  param.set_type("Test");
+  std::cout << "Printing in binary format." << std::endl;
+  std::cout << param.SerializeAsString() << std::endl;
+  std::cout << "Printing in text format." << std::endl;
+  std::string str;
+  google::protobuf::TextFormat::PrintToString(param, &str);
+  std::cout << str << std::endl;
+  EXPECT_TRUE(true);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_random_number_generator.cpp b/src/caffe/test/test_random_number_generator.cpp
new file mode 100644
index 0000000..98424c0
--- /dev/null
+++ b/src/caffe/test/test_random_number_generator.cpp
@@ -0,0 +1,521 @@
+#include <cmath>
+#include <cstring>
+
+#include "gtest/gtest.h"
+
+#include "caffe/common.hpp"
+#include "caffe/syncedmem.hpp"
+#include "caffe/util/math_functions.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+class RandomNumberGeneratorTest : public ::testing::Test {
+ protected:
+  RandomNumberGeneratorTest()
+     : mean_bound_multiplier_(3.8),  // ~99.99% confidence for test failure.
+       sample_size_(10000),
+       seed_(1701),
+       data_(new SyncedMemory(sample_size_ * sizeof(Dtype))),
+       data_2_(new SyncedMemory(sample_size_ * sizeof(Dtype))),
+       int_data_(new SyncedMemory(sample_size_ * sizeof(int))),
+       int_data_2_(new SyncedMemory(sample_size_ * sizeof(int))) {}
+
+  virtual void SetUp() {
+    Caffe::set_random_seed(this->seed_);
+  }
+
+  Dtype sample_mean(const Dtype* const seqs, const int sample_size) {
+    Dtype sum = 0;
+    for (int i = 0; i < sample_size; ++i) {
+      sum += seqs[i];
+    }
+    return sum / sample_size;
+  }
+
+  Dtype sample_mean(const Dtype* const seqs) {
+    return sample_mean(seqs, sample_size_);
+  }
+
+  Dtype sample_mean(const int* const seqs, const int sample_size) {
+    Dtype sum = 0;
+    for (int i = 0; i < sample_size; ++i) {
+      sum += Dtype(seqs[i]);
+    }
+    return sum / sample_size;
+  }
+
+  Dtype sample_mean(const int* const seqs) {
+    return sample_mean(seqs, sample_size_);
+  }
+
+  Dtype mean_bound(const Dtype std, const int sample_size) {
+    return mean_bound_multiplier_ * std / sqrt(static_cast<Dtype>(sample_size));
+  }
+
+  Dtype mean_bound(const Dtype std) {
+    return mean_bound(std, sample_size_);
+  }
+
+  void RngGaussianFill(const Dtype mu, const Dtype sigma, void* cpu_data) {
+    Dtype* rng_data = static_cast<Dtype*>(cpu_data);
+    caffe_rng_gaussian(sample_size_, mu, sigma, rng_data);
+  }
+
+  void RngGaussianChecks(const Dtype mu, const Dtype sigma,
+                         const void* cpu_data, const Dtype sparse_p = 0) {
+    const Dtype* rng_data = static_cast<const Dtype*>(cpu_data);
+    const Dtype true_mean = mu;
+    const Dtype true_std = sigma;
+    // Check that sample mean roughly matches true mean.
+    const Dtype bound = this->mean_bound(true_std);
+    const Dtype sample_mean = this->sample_mean(
+        static_cast<const Dtype*>(cpu_data));
+    EXPECT_NEAR(sample_mean, true_mean, bound);
+    // Check that roughly half the samples are above the true mean.
+    int num_above_mean = 0;
+    int num_below_mean = 0;
+    int num_mean = 0;
+    int num_nan = 0;
+    for (int i = 0; i < sample_size_; ++i) {
+      if (rng_data[i] > true_mean) {
+        ++num_above_mean;
+      } else if (rng_data[i] < true_mean) {
+        ++num_below_mean;
+      } else if (rng_data[i] == true_mean) {
+        ++num_mean;
+      } else {
+        ++num_nan;
+      }
+    }
+    EXPECT_EQ(0, num_nan);
+    if (sparse_p == Dtype(0)) {
+      EXPECT_EQ(0, num_mean);
+    }
+    const Dtype sample_p_above_mean =
+        static_cast<Dtype>(num_above_mean) / sample_size_;
+    const Dtype bernoulli_p = (1 - sparse_p) * 0.5;
+    const Dtype bernoulli_std = sqrt(bernoulli_p * (1 - bernoulli_p));
+    const Dtype bernoulli_bound = this->mean_bound(bernoulli_std);
+    EXPECT_NEAR(bernoulli_p, sample_p_above_mean, bernoulli_bound);
+  }
+
+  void RngUniformFill(const Dtype lower, const Dtype upper, void* cpu_data) {
+    CHECK_GE(upper, lower);
+    Dtype* rng_data = static_cast<Dtype*>(cpu_data);
+    caffe_rng_uniform(sample_size_, lower, upper, rng_data);
+  }
+
+  void RngUniformChecks(const Dtype lower, const Dtype upper,
+                        const void* cpu_data, const Dtype sparse_p = 0) {
+    const Dtype* rng_data = static_cast<const Dtype*>(cpu_data);
+    const Dtype true_mean = (lower + upper) / 2;
+    const Dtype true_std = (upper - lower) / sqrt(12);
+    // Check that sample mean roughly matches true mean.
+    const Dtype bound = this->mean_bound(true_std);
+    const Dtype sample_mean = this->sample_mean(rng_data);
+    EXPECT_NEAR(sample_mean, true_mean, bound);
+    // Check that roughly half the samples are above the true mean, and none are
+    // above upper or below lower.
+    int num_above_mean = 0;
+    int num_below_mean = 0;
+    int num_mean = 0;
+    int num_nan = 0;
+    int num_above_upper = 0;
+    int num_below_lower = 0;
+    for (int i = 0; i < sample_size_; ++i) {
+      if (rng_data[i] > true_mean) {
+        ++num_above_mean;
+      } else if (rng_data[i] < true_mean) {
+        ++num_below_mean;
+      } else if (rng_data[i] == true_mean) {
+        ++num_mean;
+      } else {
+        ++num_nan;
+      }
+      if (rng_data[i] > upper) {
+        ++num_above_upper;
+      } else if (rng_data[i] < lower) {
+        ++num_below_lower;
+      }
+    }
+    EXPECT_EQ(0, num_nan);
+    EXPECT_EQ(0, num_above_upper);
+    EXPECT_EQ(0, num_below_lower);
+    if (sparse_p == Dtype(0)) {
+      EXPECT_EQ(0, num_mean);
+    }
+    const Dtype sample_p_above_mean =
+        static_cast<Dtype>(num_above_mean) / sample_size_;
+    const Dtype bernoulli_p = (1 - sparse_p) * 0.5;
+    const Dtype bernoulli_std = sqrt(bernoulli_p * (1 - bernoulli_p));
+    const Dtype bernoulli_bound = this->mean_bound(bernoulli_std);
+    EXPECT_NEAR(bernoulli_p, sample_p_above_mean, bernoulli_bound);
+  }
+
+  void RngBernoulliFill(const Dtype p, void* cpu_data) {
+    int* rng_data = static_cast<int*>(cpu_data);
+    caffe_rng_bernoulli(sample_size_, p, rng_data);
+  }
+
+  void RngBernoulliChecks(const Dtype p, const void* cpu_data) {
+    const int* rng_data = static_cast<const int*>(cpu_data);
+    const Dtype true_mean = p;
+    const Dtype true_std = sqrt(p * (1 - p));
+    const Dtype bound = this->mean_bound(true_std);
+    const Dtype sample_mean = this->sample_mean(rng_data);
+    EXPECT_NEAR(sample_mean, true_mean, bound);
+  }
+
+#ifndef CPU_ONLY
+
+  void RngGaussianFillGPU(const Dtype mu, const Dtype sigma, void* gpu_data) {
+    Dtype* rng_data = static_cast<Dtype*>(gpu_data);
+    caffe_gpu_rng_gaussian(sample_size_, mu, sigma, rng_data);
+  }
+
+  void RngUniformFillGPU(const Dtype lower, const Dtype upper, void* gpu_data) {
+    CHECK_GE(upper, lower);
+    Dtype* rng_data = static_cast<Dtype*>(gpu_data);
+    caffe_gpu_rng_uniform(sample_size_, lower, upper, rng_data);
+  }
+
+  // Fills with uniform integers in [0, UINT_MAX] using 2 argument form of
+  // caffe_gpu_rng_uniform.
+  void RngUniformIntFillGPU(void* gpu_data) {
+    unsigned int* rng_data = static_cast<unsigned int*>(gpu_data);
+    caffe_gpu_rng_uniform(sample_size_, rng_data);
+  }
+
+#endif
+
+  int num_above_mean;
+  int num_below_mean;
+
+  Dtype mean_bound_multiplier_;
+
+  size_t sample_size_;
+  uint32_t seed_;
+
+  shared_ptr<SyncedMemory> data_;
+  shared_ptr<SyncedMemory> data_2_;
+  shared_ptr<SyncedMemory> int_data_;
+  shared_ptr<SyncedMemory> int_data_2_;
+};
+
+TYPED_TEST_CASE(RandomNumberGeneratorTest, TestDtypes);
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngGaussian) {
+  const TypeParam mu = 0;
+  const TypeParam sigma = 1;
+  void* gaussian_data = this->data_->mutable_cpu_data();
+  this->RngGaussianFill(mu, sigma, gaussian_data);
+  this->RngGaussianChecks(mu, sigma, gaussian_data);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngGaussian2) {
+  const TypeParam mu = -2;
+  const TypeParam sigma = 3;
+  void* gaussian_data = this->data_->mutable_cpu_data();
+  this->RngGaussianFill(mu, sigma, gaussian_data);
+  this->RngGaussianChecks(mu, sigma, gaussian_data);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngUniform) {
+  const TypeParam lower = 0;
+  const TypeParam upper = 1;
+  void* uniform_data = this->data_->mutable_cpu_data();
+  this->RngUniformFill(lower, upper, uniform_data);
+  this->RngUniformChecks(lower, upper, uniform_data);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngUniform2) {
+  const TypeParam lower = -7.3;
+  const TypeParam upper = -2.3;
+  void* uniform_data = this->data_->mutable_cpu_data();
+  this->RngUniformFill(lower, upper, uniform_data);
+  this->RngUniformChecks(lower, upper, uniform_data);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngBernoulli) {
+  const TypeParam p = 0.3;
+  void* bernoulli_data = this->int_data_->mutable_cpu_data();
+  this->RngBernoulliFill(p, bernoulli_data);
+  this->RngBernoulliChecks(p, bernoulli_data);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngBernoulli2) {
+  const TypeParam p = 0.9;
+  void* bernoulli_data = this->int_data_->mutable_cpu_data();
+  this->RngBernoulliFill(p, bernoulli_data);
+  this->RngBernoulliChecks(p, bernoulli_data);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngGaussianTimesGaussian) {
+  const TypeParam mu = 0;
+  const TypeParam sigma = 1;
+
+  // Sample from 0 mean Gaussian.
+  TypeParam* gaussian_data_1 =
+      static_cast<TypeParam*>(this->data_->mutable_cpu_data());
+  this->RngGaussianFill(mu, sigma, gaussian_data_1);
+
+  // Sample from 0 mean Gaussian again.
+  TypeParam* gaussian_data_2 =
+      static_cast<TypeParam*>(this->data_2_->mutable_cpu_data());
+  this->RngGaussianFill(mu, sigma, gaussian_data_2);
+
+  // Multiply Gaussians.
+  for (int i = 0; i < this->sample_size_; ++i) {
+    gaussian_data_1[i] *= gaussian_data_2[i];
+  }
+
+  // Check that result has mean 0.
+  TypeParam mu_product = pow(mu, 2);
+  TypeParam sigma_product = sqrt(pow(sigma, 2) / 2);
+  this->RngGaussianChecks(mu_product, sigma_product, gaussian_data_1);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngUniformTimesUniform) {
+  // Sample from Uniform on [-2, 2].
+  const TypeParam lower_1 = -2;
+  const TypeParam upper_1 = -lower_1;
+  TypeParam* uniform_data_1 =
+      static_cast<TypeParam*>(this->data_->mutable_cpu_data());
+  this->RngUniformFill(lower_1, upper_1, uniform_data_1);
+
+  // Sample from Uniform on [-3, 3].
+  const TypeParam lower_2 = -3;
+  const TypeParam upper_2 = -lower_2;
+  TypeParam* uniform_data_2 =
+      static_cast<TypeParam*>(this->data_2_->mutable_cpu_data());
+  this->RngUniformFill(lower_2, upper_2, uniform_data_2);
+
+  // Multiply Uniforms.
+  for (int i = 0; i < this->sample_size_; ++i) {
+    uniform_data_1[i] *= uniform_data_2[i];
+  }
+
+  // Check that result does not violate checked properties of Uniform on [-6, 6]
+  // (though it is not actually uniformly distributed).
+  const TypeParam lower_prod = lower_1 * upper_2;
+  const TypeParam upper_prod = -lower_prod;
+  this->RngUniformChecks(lower_prod, upper_prod, uniform_data_1);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngGaussianTimesBernoulli) {
+  // Sample from 0 mean Gaussian.
+  const TypeParam mu = 0;
+  const TypeParam sigma = 1;
+  TypeParam* gaussian_data =
+      static_cast<TypeParam*>(this->data_->mutable_cpu_data());
+  this->RngGaussianFill(mu, sigma, gaussian_data);
+
+  // Sample from Bernoulli with p = 0.3.
+  const TypeParam bernoulli_p = 0.3;
+  int* bernoulli_data =
+      static_cast<int*>(this->int_data_->mutable_cpu_data());
+  this->RngBernoulliFill(bernoulli_p, bernoulli_data);
+
+  // Multiply Gaussian by Bernoulli.
+  for (int i = 0; i < this->sample_size_; ++i) {
+    gaussian_data[i] *= bernoulli_data[i];
+  }
+
+  // Check that result does not violate checked properties of sparsified
+  // Gaussian (though it is not actually a Gaussian).
+  this->RngGaussianChecks(mu, sigma, gaussian_data, 1 - bernoulli_p);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngUniformTimesBernoulli) {
+  // Sample from Uniform on [-1, 1].
+  const TypeParam lower = -1;
+  const TypeParam upper = 1;
+  TypeParam* uniform_data =
+      static_cast<TypeParam*>(this->data_->mutable_cpu_data());
+  this->RngUniformFill(lower, upper, uniform_data);
+
+  // Sample from Bernoulli with p = 0.3.
+  const TypeParam bernoulli_p = 0.3;
+  int* bernoulli_data =
+      static_cast<int*>(this->int_data_->mutable_cpu_data());
+  this->RngBernoulliFill(bernoulli_p, bernoulli_data);
+
+  // Multiply Uniform by Bernoulli.
+  for (int i = 0; i < this->sample_size_; ++i) {
+    uniform_data[i] *= bernoulli_data[i];
+  }
+
+  // Check that result does not violate checked properties of sparsified
+  // Uniform on [-1, 1] (though it is not actually uniformly distributed).
+  this->RngUniformChecks(lower, upper, uniform_data, 1 - bernoulli_p);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngBernoulliTimesBernoulli) {
+  // Sample from Bernoulli with p = 0.5.
+  const TypeParam p_a = 0.5;
+  int* bernoulli_data_a =
+      static_cast<int*>(this->int_data_->mutable_cpu_data());
+  this->RngBernoulliFill(p_a, bernoulli_data_a);
+
+  // Sample from Bernoulli with p = 0.3.
+  const TypeParam p_b = 0.3;
+  int* bernoulli_data_b =
+      static_cast<int*>(this->int_data_2_->mutable_cpu_data());
+  this->RngBernoulliFill(p_b, bernoulli_data_b);
+
+  // Multiply Bernoullis.
+  for (int i = 0; i < this->sample_size_; ++i) {
+    bernoulli_data_a[i] *= bernoulli_data_b[i];
+  }
+  int num_ones = 0;
+  for (int i = 0; i < this->sample_size_; ++i) {
+    if (bernoulli_data_a[i] != TypeParam(0)) {
+      EXPECT_EQ(TypeParam(1), bernoulli_data_a[i]);
+      ++num_ones;
+    }
+  }
+
+  // Check that resulting product has roughly p_a * p_b ones.
+  const TypeParam sample_p = this->sample_mean(bernoulli_data_a);
+  const TypeParam true_mean = p_a * p_b;
+  const TypeParam true_std = sqrt(true_mean * (1 - true_mean));
+  const TypeParam bound = this->mean_bound(true_std);
+  EXPECT_NEAR(true_mean, sample_p, bound);
+}
+
+#ifndef CPU_ONLY
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngGaussianGPU) {
+  const TypeParam mu = 0;
+  const TypeParam sigma = 1;
+  void* gaussian_gpu_data = this->data_->mutable_gpu_data();
+  this->RngGaussianFillGPU(mu, sigma, gaussian_gpu_data);
+  const void* gaussian_data = this->data_->cpu_data();
+  this->RngGaussianChecks(mu, sigma, gaussian_data);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngGaussian2GPU) {
+  const TypeParam mu = -2;
+  const TypeParam sigma = 3;
+  void* gaussian_gpu_data = this->data_->mutable_gpu_data();
+  this->RngGaussianFillGPU(mu, sigma, gaussian_gpu_data);
+  const void* gaussian_data = this->data_->cpu_data();
+  this->RngGaussianChecks(mu, sigma, gaussian_data);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngUniformGPU) {
+  const TypeParam lower = 0;
+  const TypeParam upper = 1;
+  void* uniform_gpu_data = this->data_->mutable_gpu_data();
+  this->RngUniformFillGPU(lower, upper, uniform_gpu_data);
+  const void* uniform_data = this->data_->cpu_data();
+  this->RngUniformChecks(lower, upper, uniform_data);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngUniform2GPU) {
+  const TypeParam lower = -7.3;
+  const TypeParam upper = -2.3;
+  void* uniform_gpu_data = this->data_->mutable_gpu_data();
+  this->RngUniformFillGPU(lower, upper, uniform_gpu_data);
+  const void* uniform_data = this->data_->cpu_data();
+  this->RngUniformChecks(lower, upper, uniform_data);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngUniformIntGPU) {
+  unsigned int* uniform_uint_gpu_data =
+      static_cast<unsigned int*>(this->int_data_->mutable_gpu_data());
+  this->RngUniformIntFillGPU(uniform_uint_gpu_data);
+  const unsigned int* uniform_uint_data =
+      static_cast<const unsigned int*>(this->int_data_->cpu_data());
+  TypeParam* uniform_data =
+      static_cast<TypeParam*>(this->data_->mutable_cpu_data());
+  for (int i = 0; i < this->sample_size_; ++i) {
+    uniform_data[i] = static_cast<const TypeParam>(uniform_uint_data[i]);
+  }
+  const TypeParam lower = 0;
+  const TypeParam upper = UINT_MAX;
+  this->RngUniformChecks(lower, upper, uniform_data);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngGaussianTimesGaussianGPU) {
+  const TypeParam mu = 0;
+  const TypeParam sigma = 1;
+
+  // Sample from 0 mean Gaussian.
+  TypeParam* gaussian_gpu_data_1 =
+      static_cast<TypeParam*>(this->data_->mutable_gpu_data());
+  this->RngGaussianFillGPU(mu, sigma, gaussian_gpu_data_1);
+
+  // Sample from 0 mean Gaussian again.
+  TypeParam* gaussian_gpu_data_2 =
+      static_cast<TypeParam*>(this->data_2_->mutable_gpu_data());
+  this->RngGaussianFillGPU(mu, sigma, gaussian_gpu_data_2);
+
+  // Multiply Gaussians.
+  TypeParam* gaussian_data_1 =
+      static_cast<TypeParam*>(this->data_->mutable_cpu_data());
+  const TypeParam* gaussian_data_2 =
+      static_cast<const TypeParam*>(this->data_2_->cpu_data());
+  for (int i = 0; i < this->sample_size_; ++i) {
+    gaussian_data_1[i] *= gaussian_data_2[i];
+  }
+
+  // Check that result does not violate checked properties of Gaussian
+  // (though it is not actually a Gaussian).
+  TypeParam mu_product = pow(mu, 2);
+  TypeParam sigma_product = sqrt(pow(sigma, 2) / 2);
+  this->RngGaussianChecks(mu_product, sigma_product, gaussian_data_1);
+}
+
+
+TYPED_TEST(RandomNumberGeneratorTest, TestRngUniformTimesUniformGPU) {
+  // Sample from Uniform on [-2, 2].
+  const TypeParam lower_1 = -2;
+  const TypeParam upper_1 = -lower_1;
+  TypeParam* uniform_gpu_data_1 =
+      static_cast<TypeParam*>(this->data_->mutable_gpu_data());
+  this->RngUniformFillGPU(lower_1, upper_1, uniform_gpu_data_1);
+
+  // Sample from Uniform on [-3, 3].
+  const TypeParam lower_2 = -3;
+  const TypeParam upper_2 = -lower_2;
+  TypeParam* uniform_gpu_data_2 =
+      static_cast<TypeParam*>(this->data_2_->mutable_gpu_data());
+  this->RngUniformFillGPU(lower_2, upper_2, uniform_gpu_data_2);
+
+  // Multiply Uniforms.
+  TypeParam* uniform_data_1 =
+      static_cast<TypeParam*>(this->data_->mutable_cpu_data());
+  const TypeParam* uniform_data_2 =
+      static_cast<const TypeParam*>(this->data_2_->cpu_data());
+  for (int i = 0; i < this->sample_size_; ++i) {
+    uniform_data_1[i] *= uniform_data_2[i];
+  }
+
+  // Check that result does not violate properties of Uniform on [-7, -3].
+  const TypeParam lower_prod = lower_1 * upper_2;
+  const TypeParam upper_prod = -lower_prod;
+  this->RngUniformChecks(lower_prod, upper_prod, uniform_data_1);
+}
+
+#endif
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_reduction_layer.cpp b/src/caffe/test/test_reduction_layer.cpp
new file mode 100644
index 0000000..f568a18
--- /dev/null
+++ b/src/caffe/test/test_reduction_layer.cpp
@@ -0,0 +1,297 @@
+#include <algorithm>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class ReductionLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  ReductionLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 4, 5)),
+        blob_top_(new Blob<Dtype>()) {
+    // fill the values
+    Caffe::set_random_seed(1701);
+    FillerParameter filler_param;
+    UniformFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~ReductionLayerTest() {
+    delete blob_bottom_;
+    delete blob_top_;
+  }
+
+  void TestForward(ReductionParameter_ReductionOp op,
+                   float coeff = 1, int axis = 0) {
+    LayerParameter layer_param;
+    ReductionParameter* reduction_param = layer_param.mutable_reduction_param();
+    reduction_param->set_operation(op);
+    if (coeff != 1.0) { reduction_param->set_coeff(coeff); }
+    if (axis != 0) { reduction_param->set_axis(axis); }
+    shared_ptr<ReductionLayer<Dtype> > layer(
+        new ReductionLayer<Dtype>(layer_param));
+    layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+    layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    const Dtype* in_data = this->blob_bottom_->cpu_data();
+    const int num = this->blob_bottom_->count(0, axis);
+    const int dim = this->blob_bottom_->count(axis);
+    for (int n = 0; n < num; ++n) {
+      Dtype expected_result = 0;
+      for (int d = 0; d < dim; ++d) {
+        switch (op) {
+          case ReductionParameter_ReductionOp_SUM:
+            expected_result += *in_data;
+            break;
+          case ReductionParameter_ReductionOp_MEAN:
+            expected_result += *in_data / dim;
+            break;
+          case ReductionParameter_ReductionOp_ASUM:
+            expected_result += fabs(*in_data);
+            break;
+          case ReductionParameter_ReductionOp_SUMSQ:
+            expected_result += (*in_data) * (*in_data);
+            break;
+          default:
+            LOG(FATAL) << "Unknown reduction op: "
+                << ReductionParameter_ReductionOp_Name(op);
+        }
+        ++in_data;
+      }
+      expected_result *= coeff;
+      const Dtype computed_result = this->blob_top_->cpu_data()[n];
+      EXPECT_FLOAT_EQ(expected_result, computed_result)
+          << "Incorrect result computed with op "
+          << ReductionParameter_ReductionOp_Name(op) << ", coeff " << coeff;
+    }
+  }
+
+  void TestGradient(ReductionParameter_ReductionOp op,
+                    float coeff = 1, int axis = 0) {
+    typedef typename TypeParam::Dtype Dtype;
+    LayerParameter layer_param;
+    ReductionParameter* reduction_param = layer_param.mutable_reduction_param();
+    reduction_param->set_operation(op);
+    reduction_param->set_coeff(coeff);
+    reduction_param->set_axis(axis);
+    ReductionLayer<Dtype> layer(layer_param);
+    GradientChecker<Dtype> checker(1e-2, 2e-3);
+    checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+        this->blob_top_vec_);
+  }
+
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(ReductionLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(ReductionLayerTest, TestSetUp) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  shared_ptr<ReductionLayer<Dtype> > layer(
+      new ReductionLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  ASSERT_EQ(this->blob_top_->num_axes(), 0);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSetUpWithAxis1) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_reduction_param()->set_axis(1);
+  shared_ptr<ReductionLayer<Dtype> > layer(
+      new ReductionLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  ASSERT_EQ(this->blob_top_->num_axes(), 1);
+  EXPECT_EQ(this->blob_top_->shape(0), 2);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSetUpWithAxis2) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_reduction_param()->set_axis(2);
+  shared_ptr<ReductionLayer<Dtype> > layer(
+      new ReductionLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  ASSERT_EQ(this->blob_top_->num_axes(), 2);
+  EXPECT_EQ(this->blob_top_->shape(0), 2);
+  EXPECT_EQ(this->blob_top_->shape(1), 3);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSum) {
+  const ReductionParameter_ReductionOp kOp = ReductionParameter_ReductionOp_SUM;
+  this->TestForward(kOp);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSumCoeff) {
+  const ReductionParameter_ReductionOp kOp = ReductionParameter_ReductionOp_SUM;
+  const float kCoeff = 2.3;
+  this->TestForward(kOp, kCoeff);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSumCoeffAxis1) {
+  const ReductionParameter_ReductionOp kOp = ReductionParameter_ReductionOp_SUM;
+  const float kCoeff = 2.3;
+  const int kAxis = 1;
+  this->TestForward(kOp, kCoeff, kAxis);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSumGradient) {
+  const ReductionParameter_ReductionOp kOp = ReductionParameter_ReductionOp_SUM;
+  this->TestGradient(kOp);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSumCoeffGradient) {
+  const ReductionParameter_ReductionOp kOp = ReductionParameter_ReductionOp_SUM;
+  const float kCoeff = 2.3;
+  this->TestGradient(kOp, kCoeff);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSumCoeffAxis1Gradient) {
+  const ReductionParameter_ReductionOp kOp = ReductionParameter_ReductionOp_SUM;
+  const float kCoeff = 2.3;
+  const int kAxis = 1;
+  this->TestGradient(kOp, kCoeff, kAxis);
+}
+
+TYPED_TEST(ReductionLayerTest, TestMean) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_MEAN;
+  this->TestForward(kOp);
+}
+
+TYPED_TEST(ReductionLayerTest, TestMeanCoeff) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_MEAN;
+  const float kCoeff = 2.3;
+  this->TestForward(kOp, kCoeff);
+}
+
+TYPED_TEST(ReductionLayerTest, TestMeanCoeffAxis1) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_MEAN;
+  const float kCoeff = 2.3;
+  const int kAxis = 1;
+  this->TestForward(kOp, kCoeff, kAxis);
+}
+
+TYPED_TEST(ReductionLayerTest, TestMeanGradient) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_MEAN;
+  this->TestGradient(kOp);
+}
+
+TYPED_TEST(ReductionLayerTest, TestMeanCoeffGradient) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_MEAN;
+  const float kCoeff = 2.3;
+  this->TestGradient(kOp, kCoeff);
+}
+
+TYPED_TEST(ReductionLayerTest, TestMeanCoeffGradientAxis1) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_MEAN;
+  const float kCoeff = 2.3;
+  const int kAxis = 1;
+  this->TestGradient(kOp, kCoeff, kAxis);
+}
+
+TYPED_TEST(ReductionLayerTest, TestAbsSum) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_ASUM;
+  this->TestForward(kOp);
+}
+
+TYPED_TEST(ReductionLayerTest, TestAbsSumCoeff) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_ASUM;
+  const float kCoeff = 2.3;
+  this->TestForward(kOp, kCoeff);
+}
+
+TYPED_TEST(ReductionLayerTest, TestAbsSumCoeffAxis1) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_ASUM;
+  const float kCoeff = 2.3;
+  const int kAxis = 1;
+  this->TestForward(kOp, kCoeff, kAxis);
+}
+
+TYPED_TEST(ReductionLayerTest, TestAbsSumGradient) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_ASUM;
+  this->TestGradient(kOp);
+}
+
+TYPED_TEST(ReductionLayerTest, TestAbsSumCoeffGradient) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_ASUM;
+  const float kCoeff = 2.3;
+  this->TestGradient(kOp, kCoeff);
+}
+
+TYPED_TEST(ReductionLayerTest, TestAbsSumCoeffAxis1Gradient) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_ASUM;
+  const float kCoeff = 2.3;
+  const int kAxis = 1;
+  this->TestGradient(kOp, kCoeff, kAxis);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSumOfSquares) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_SUMSQ;
+  this->TestForward(kOp);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSumOfSquaresCoeff) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_SUMSQ;
+  const float kCoeff = 2.3;
+  this->TestForward(kOp, kCoeff);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSumOfSquaresCoeffAxis1) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_SUMSQ;
+  const float kCoeff = 2.3;
+  const int kAxis = 1;
+  this->TestForward(kOp, kCoeff, kAxis);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSumOfSquaresGradient) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_SUMSQ;
+  this->TestGradient(kOp);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSumOfSquaresCoeffGradient) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_SUMSQ;
+  const float kCoeff = 2.3;
+  this->TestGradient(kOp, kCoeff);
+}
+
+TYPED_TEST(ReductionLayerTest, TestSumOfSquaresCoeffAxis1Gradient) {
+  const ReductionParameter_ReductionOp kOp =
+      ReductionParameter_ReductionOp_SUMSQ;
+  const float kCoeff = 2.3;
+  const int kAxis = 1;
+  this->TestGradient(kOp, kCoeff, kAxis);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_reshape_layer.cpp b/src/caffe/test/test_reshape_layer.cpp
new file mode 100644
index 0000000..9d08ec6
--- /dev/null
+++ b/src/caffe/test/test_reshape_layer.cpp
@@ -0,0 +1,280 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/common_layers.hpp"
+#include "caffe/filler.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class ReshapeLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+ protected:
+  ReshapeLayerTest()
+    : blob_bottom_(new Blob<Dtype>(2, 3, 6, 5)),
+      blob_top_(new Blob<Dtype>()) {
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+
+  virtual ~ReshapeLayerTest() { delete blob_bottom_; delete blob_top_; }
+
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(ReshapeLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(ReshapeLayerTest, TestFlattenOutputSizes) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  BlobShape* blob_shape = layer_param.mutable_reshape_param()->mutable_shape();
+  blob_shape->add_dim(0);
+  blob_shape->add_dim(-1);
+  blob_shape->add_dim(1);
+  blob_shape->add_dim(1);
+
+  ReshapeLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 3 * 6 * 5);
+  EXPECT_EQ(this->blob_top_->height(), 1);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+}
+
+TYPED_TEST(ReshapeLayerTest, TestFlattenValues) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  BlobShape* blob_shape = layer_param.mutable_reshape_param()->mutable_shape();
+  blob_shape->add_dim(0);
+  blob_shape->add_dim(-1);
+  blob_shape->add_dim(1);
+  blob_shape->add_dim(1);
+  ReshapeLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int c = 0; c < 3 * 6 * 5; ++c) {
+    EXPECT_EQ(this->blob_top_->data_at(0, c, 0, 0),
+        this->blob_bottom_->data_at(0, c / (6 * 5), (c / 5) % 6, c % 5));
+    EXPECT_EQ(this->blob_top_->data_at(1, c, 0, 0),
+        this->blob_bottom_->data_at(1, c / (6 * 5), (c / 5) % 6, c % 5));
+  }
+}
+
+// Test whether setting output dimensions to 0 either explicitly or implicitly
+// copies the respective dimension of the input layer.
+TYPED_TEST(ReshapeLayerTest, TestCopyDimensions) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  BlobShape* blob_shape = layer_param.mutable_reshape_param()->mutable_shape();
+  blob_shape->add_dim(0);
+  blob_shape->add_dim(0);
+  blob_shape->add_dim(0);
+  blob_shape->add_dim(0);
+  ReshapeLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 3);
+  EXPECT_EQ(this->blob_top_->height(), 6);
+  EXPECT_EQ(this->blob_top_->width(), 5);
+}
+
+// When a dimension is set to -1, we should infer its value from the other
+// dimensions (including those that get copied from below).
+TYPED_TEST(ReshapeLayerTest, TestInferenceOfUnspecified) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  BlobShape* blob_shape = layer_param.mutable_reshape_param()->mutable_shape();
+  blob_shape->add_dim(0);
+  blob_shape->add_dim(3);
+  blob_shape->add_dim(10);
+  blob_shape->add_dim(-1);
+
+  // Count is 180, thus height should be 180 / (2*3*10) = 3.
+
+  ReshapeLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 3);
+  EXPECT_EQ(this->blob_top_->height(), 10);
+  EXPECT_EQ(this->blob_top_->width(), 3);
+}
+
+TYPED_TEST(ReshapeLayerTest, TestInferenceOfUnspecifiedWithStartAxis) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_reshape_param()->set_axis(1);
+  BlobShape* blob_shape = layer_param.mutable_reshape_param()->mutable_shape();
+  blob_shape->add_dim(3);
+  blob_shape->add_dim(10);
+  blob_shape->add_dim(-1);
+
+  ReshapeLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  ASSERT_EQ(this->blob_top_->num_axes(), 4);
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 3);
+  EXPECT_EQ(this->blob_top_->height(), 10);
+  EXPECT_EQ(this->blob_top_->width(), 3);
+}
+
+TYPED_TEST(ReshapeLayerTest, TestInsertSingletonAxesStart) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_reshape_param()->set_axis(0);
+  layer_param.mutable_reshape_param()->set_num_axes(0);
+  BlobShape* blob_shape = layer_param.mutable_reshape_param()->mutable_shape();
+  blob_shape->add_dim(1);
+  blob_shape->add_dim(1);
+  blob_shape->add_dim(1);
+
+  ReshapeLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  ASSERT_EQ(this->blob_top_->num_axes(), 7);
+  EXPECT_EQ(this->blob_top_->shape(0), 1);
+  EXPECT_EQ(this->blob_top_->shape(1), 1);
+  EXPECT_EQ(this->blob_top_->shape(2), 1);
+  EXPECT_EQ(this->blob_top_->shape(3), 2);
+  EXPECT_EQ(this->blob_top_->shape(4), 3);
+  EXPECT_EQ(this->blob_top_->shape(5), 6);
+  EXPECT_EQ(this->blob_top_->shape(6), 5);
+}
+
+TYPED_TEST(ReshapeLayerTest, TestInsertSingletonAxesMiddle) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_reshape_param()->set_axis(2);
+  layer_param.mutable_reshape_param()->set_num_axes(0);
+  BlobShape* blob_shape = layer_param.mutable_reshape_param()->mutable_shape();
+  blob_shape->add_dim(1);
+  blob_shape->add_dim(1);
+  blob_shape->add_dim(1);
+
+  ReshapeLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  ASSERT_EQ(this->blob_top_->num_axes(), 7);
+  EXPECT_EQ(this->blob_top_->shape(0), 2);
+  EXPECT_EQ(this->blob_top_->shape(1), 3);
+  EXPECT_EQ(this->blob_top_->shape(2), 1);
+  EXPECT_EQ(this->blob_top_->shape(3), 1);
+  EXPECT_EQ(this->blob_top_->shape(4), 1);
+  EXPECT_EQ(this->blob_top_->shape(5), 6);
+  EXPECT_EQ(this->blob_top_->shape(6), 5);
+}
+
+TYPED_TEST(ReshapeLayerTest, TestInsertSingletonAxesEnd) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_reshape_param()->set_axis(-1);
+  layer_param.mutable_reshape_param()->set_num_axes(0);
+  BlobShape* blob_shape = layer_param.mutable_reshape_param()->mutable_shape();
+  blob_shape->add_dim(1);
+  blob_shape->add_dim(1);
+  blob_shape->add_dim(1);
+
+  ReshapeLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  ASSERT_EQ(this->blob_top_->num_axes(), 7);
+  EXPECT_EQ(this->blob_top_->shape(0), 2);
+  EXPECT_EQ(this->blob_top_->shape(1), 3);
+  EXPECT_EQ(this->blob_top_->shape(2), 6);
+  EXPECT_EQ(this->blob_top_->shape(3), 5);
+  EXPECT_EQ(this->blob_top_->shape(4), 1);
+  EXPECT_EQ(this->blob_top_->shape(5), 1);
+  EXPECT_EQ(this->blob_top_->shape(6), 1);
+}
+
+TYPED_TEST(ReshapeLayerTest, TestFlattenMiddle) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_reshape_param()->set_axis(1);
+  layer_param.mutable_reshape_param()->set_num_axes(2);
+  BlobShape* blob_shape = layer_param.mutable_reshape_param()->mutable_shape();
+  blob_shape->add_dim(-1);
+
+  ReshapeLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  ASSERT_EQ(this->blob_top_->num_axes(), 3);
+  EXPECT_EQ(this->blob_top_->shape(0), 2);
+  EXPECT_EQ(this->blob_top_->shape(1), 3 * 6);
+  EXPECT_EQ(this->blob_top_->shape(2), 5);
+}
+
+TYPED_TEST(ReshapeLayerTest, TestForward) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  BlobShape* shape = layer_param.mutable_reshape_param()->mutable_shape();
+  shape->add_dim(6);
+  shape->add_dim(2);
+  shape->add_dim(3);
+  shape->add_dim(5);
+  ReshapeLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    EXPECT_EQ(this->blob_top_->cpu_data()[i],
+              this->blob_bottom_->cpu_data()[i]);
+  }
+}
+
+TYPED_TEST(ReshapeLayerTest, TestForwardAfterReshape) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  BlobShape* shape = layer_param.mutable_reshape_param()->mutable_shape();
+  shape->add_dim(6);
+  shape->add_dim(2);
+  shape->add_dim(3);
+  shape->add_dim(5);
+  ReshapeLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // We know the above produced the correct result from TestForward.
+  // Reshape the bottom and call layer.Reshape, then try again.
+  vector<int> new_bottom_shape(1, 2 * 3 * 6 * 5);
+  this->blob_bottom_->Reshape(new_bottom_shape);
+  layer.Reshape(this->blob_bottom_vec_, this->blob_top_vec_);
+  FillerParameter filler_param;
+  GaussianFiller<Dtype> filler(filler_param);
+  filler.Fill(this->blob_bottom_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    EXPECT_EQ(this->blob_top_->cpu_data()[i],
+              this->blob_bottom_->cpu_data()[i]);
+  }
+}
+
+TYPED_TEST(ReshapeLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  BlobShape* shape = layer_param.mutable_reshape_param()->mutable_shape();
+  shape->add_dim(6);
+  shape->add_dim(2);
+  shape->add_dim(3);
+  shape->add_dim(5);
+  ReshapeLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_sigmoid_cross_entropy_loss_layer.cpp b/src/caffe/test/test_sigmoid_cross_entropy_loss_layer.cpp
new file mode 100644
index 0000000..e5737e4
--- /dev/null
+++ b/src/caffe/test/test_sigmoid_cross_entropy_loss_layer.cpp
@@ -0,0 +1,122 @@
+#include <cmath>
+#include <cstdlib>
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class SigmoidCrossEntropyLossLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  SigmoidCrossEntropyLossLayerTest()
+      : blob_bottom_data_(new Blob<Dtype>(10, 5, 1, 1)),
+        blob_bottom_targets_(new Blob<Dtype>(10, 5, 1, 1)),
+        blob_top_loss_(new Blob<Dtype>()) {
+    // Fill the data vector
+    FillerParameter data_filler_param;
+    data_filler_param.set_std(1);
+    GaussianFiller<Dtype> data_filler(data_filler_param);
+    data_filler.Fill(blob_bottom_data_);
+    blob_bottom_vec_.push_back(blob_bottom_data_);
+    // Fill the targets vector
+    FillerParameter targets_filler_param;
+    targets_filler_param.set_min(0);
+    targets_filler_param.set_max(1);
+    UniformFiller<Dtype> targets_filler(targets_filler_param);
+    targets_filler.Fill(blob_bottom_targets_);
+    blob_bottom_vec_.push_back(blob_bottom_targets_);
+    blob_top_vec_.push_back(blob_top_loss_);
+  }
+  virtual ~SigmoidCrossEntropyLossLayerTest() {
+    delete blob_bottom_data_;
+    delete blob_bottom_targets_;
+    delete blob_top_loss_;
+  }
+
+  Dtype SigmoidCrossEntropyLossReference(const int count, const int num,
+                                         const Dtype* input,
+                                         const Dtype* target) {
+    Dtype loss = 0;
+    for (int i = 0; i < count; ++i) {
+      const Dtype prediction = 1 / (1 + exp(-input[i]));
+      EXPECT_LE(prediction, 1);
+      EXPECT_GE(prediction, 0);
+      EXPECT_LE(target[i], 1);
+      EXPECT_GE(target[i], 0);
+      loss -= target[i] * log(prediction + (target[i] == Dtype(0)));
+      loss -= (1 - target[i]) * log(1 - prediction + (target[i] == Dtype(1)));
+    }
+    return loss / num;
+  }
+
+  void TestForward() {
+    LayerParameter layer_param;
+    const Dtype kLossWeight = 3.7;
+    layer_param.add_loss_weight(kLossWeight);
+    FillerParameter data_filler_param;
+    data_filler_param.set_std(1);
+    GaussianFiller<Dtype> data_filler(data_filler_param);
+    FillerParameter targets_filler_param;
+    targets_filler_param.set_min(0.0);
+    targets_filler_param.set_max(1.0);
+    UniformFiller<Dtype> targets_filler(targets_filler_param);
+    Dtype eps = 2e-2;
+    for (int i = 0; i < 100; ++i) {
+      // Fill the data vector
+      data_filler.Fill(this->blob_bottom_data_);
+      // Fill the targets vector
+      targets_filler.Fill(this->blob_bottom_targets_);
+      SigmoidCrossEntropyLossLayer<Dtype> layer(layer_param);
+      layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+      Dtype layer_loss =
+          layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+      const int count = this->blob_bottom_data_->count();
+      const int num = this->blob_bottom_data_->num();
+      const Dtype* blob_bottom_data = this->blob_bottom_data_->cpu_data();
+      const Dtype* blob_bottom_targets =
+          this->blob_bottom_targets_->cpu_data();
+      Dtype reference_loss = kLossWeight * SigmoidCrossEntropyLossReference(
+          count, num, blob_bottom_data, blob_bottom_targets);
+      EXPECT_NEAR(reference_loss, layer_loss, eps) << "debug: trial #" << i;
+    }
+  }
+
+  Blob<Dtype>* const blob_bottom_data_;
+  Blob<Dtype>* const blob_bottom_targets_;
+  Blob<Dtype>* const blob_top_loss_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(SigmoidCrossEntropyLossLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(SigmoidCrossEntropyLossLayerTest, TestSigmoidCrossEntropyLoss) {
+  this->TestForward();
+}
+
+TYPED_TEST(SigmoidCrossEntropyLossLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  const Dtype kLossWeight = 3.7;
+  layer_param.add_loss_weight(kLossWeight);
+  SigmoidCrossEntropyLossLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  GradientChecker<Dtype> checker(1e-2, 1e-2, 1701);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 0);
+}
+
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_slice_layer.cpp b/src/caffe/test/test_slice_layer.cpp
new file mode 100644
index 0000000..ccd0364
--- /dev/null
+++ b/src/caffe/test/test_slice_layer.cpp
@@ -0,0 +1,189 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class SliceLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  SliceLayerTest()
+      : blob_bottom_(new Blob<Dtype>(6, 12, 2, 3)),
+        blob_top_0_(new Blob<Dtype>()),
+        blob_top_1_(new Blob<Dtype>()),
+        blob_top_2_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    // fill the values
+    Caffe::set_random_seed(1701);
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_top_vec_0_.push_back(blob_top_0_);
+    blob_top_vec_0_.push_back(blob_top_1_);
+    blob_top_vec_1_.push_back(blob_top_0_);
+    blob_top_vec_1_.push_back(blob_top_1_);
+    blob_top_vec_1_.push_back(blob_top_2_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+  }
+
+  virtual void ReduceBottomBlobSize() {
+    blob_bottom_->Reshape(4, 5, 2, 2);
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+  }
+
+  virtual ~SliceLayerTest() {
+    delete blob_top_0_; delete blob_top_1_;
+    delete blob_top_2_; delete blob_bottom_;
+  }
+
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_0_;
+  Blob<Dtype>* const blob_top_1_;
+  Blob<Dtype>* const blob_top_2_;
+  vector<Blob<Dtype>*> blob_top_vec_0_, blob_top_vec_1_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+};
+
+TYPED_TEST_CASE(SliceLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(SliceLayerTest, TestSetupNum) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_slice_param()->set_axis(0);
+  SliceLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_1_);
+  EXPECT_EQ(this->blob_bottom_->num(), 3 * this->blob_top_0_->num());
+  EXPECT_EQ(this->blob_top_0_->num(), this->blob_top_1_->num());
+  EXPECT_EQ(this->blob_top_0_->num(), this->blob_top_2_->num());
+  EXPECT_EQ(this->blob_bottom_->channels(), this->blob_top_0_->channels());
+  EXPECT_EQ(this->blob_bottom_->height(), this->blob_top_0_->height());
+  EXPECT_EQ(this->blob_bottom_->width(), this->blob_top_0_->width());
+}
+
+TYPED_TEST(SliceLayerTest, TestSetupChannels) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_slice_param()->add_slice_point(3);
+  SliceLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_0_);
+  EXPECT_EQ(this->blob_top_0_->num(), this->blob_bottom_->num());
+  EXPECT_EQ(this->blob_top_0_->channels(), 3);
+  EXPECT_EQ(this->blob_top_1_->channels(), 9);
+  EXPECT_EQ(this->blob_bottom_->channels(),
+    this->blob_top_0_->channels() + this->blob_top_1_->channels());
+  EXPECT_EQ(this->blob_bottom_->height(), this->blob_top_0_->height());
+  EXPECT_EQ(this->blob_bottom_->width(), this->blob_top_0_->width());
+}
+
+TYPED_TEST(SliceLayerTest, TestSliceAcrossNum) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_slice_param()->set_axis(0);
+  SliceLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_0_);
+  const int top_num = this->blob_bottom_->num() / 2;
+  ASSERT_EQ(top_num, this->blob_top_0_->num());
+  ASSERT_EQ(top_num, this->blob_top_1_->num());
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_0_);
+  for (int n = 0; n < top_num; ++n) {
+    for (int c = 0; c < this->blob_top_0_->channels(); ++c) {
+      for (int h = 0; h < this->blob_bottom_->height(); ++h) {
+        for (int w = 0; w < this->blob_bottom_->width(); ++w) {
+          EXPECT_EQ(this->blob_bottom_->data_at(n, c, h, w),
+                    this->blob_top_0_->data_at(n, c, h, w));
+        }
+      }
+    }
+    for (int c = 0; c < this->blob_top_1_->channels(); ++c) {
+      for (int h = 0; h < this->blob_bottom_->height(); ++h) {
+        for (int w = 0; w < this->blob_bottom_->width(); ++w) {
+          EXPECT_EQ(this->blob_bottom_->data_at(n + 3, c, h, w),
+                    this->blob_top_1_->data_at(n, c, h, w));
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST(SliceLayerTest, TestSliceAcrossChannels) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  // Slice at 2, 8: should produce output blobs with #channels 2, 6, 4.
+  const int kSlicePoint0 = 2;
+  const int kSlicePoint1 = 8;
+  layer_param.mutable_slice_param()->add_slice_point(kSlicePoint0);
+  layer_param.mutable_slice_param()->add_slice_point(kSlicePoint1);
+  SliceLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_1_);
+  ASSERT_EQ(kSlicePoint0, this->blob_top_0_->channels());
+  ASSERT_EQ(kSlicePoint1 - kSlicePoint0, this->blob_top_1_->channels());
+  ASSERT_EQ(this->blob_bottom_->channels() - kSlicePoint1,
+            this->blob_top_2_->channels());
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_1_);
+  for (int n = 0; n < this->blob_bottom_->num(); ++n) {
+    for (int c = 0; c < this->blob_top_0_->channels(); ++c) {
+      for (int h = 0; h < this->blob_bottom_->height(); ++h) {
+        for (int w = 0; w < this->blob_bottom_->width(); ++w) {
+          EXPECT_EQ(this->blob_bottom_->data_at(n, c, h, w),
+              this->blob_top_0_->data_at(n, c, h, w));
+        }
+      }
+    }
+    for (int c = 0; c < this->blob_top_1_->channels(); ++c) {
+      for (int h = 0; h < this->blob_bottom_->height(); ++h) {
+        for (int w = 0; w < this->blob_bottom_->width(); ++w) {
+          EXPECT_EQ(this->blob_bottom_->data_at(n, c + kSlicePoint0, h, w),
+              this->blob_top_1_->data_at(n, c, h, w));
+        }
+      }
+    }
+    for (int c = 0; c < this->blob_top_2_->channels(); ++c) {
+      for (int h = 0; h < this->blob_bottom_->height(); ++h) {
+        for (int w = 0; w < this->blob_bottom_->width(); ++w) {
+          EXPECT_EQ(this->blob_bottom_->data_at(n, c + kSlicePoint1, h, w),
+              this->blob_top_2_->data_at(n, c, h, w));
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST(SliceLayerTest, TestGradientAcrossNum) {
+  typedef typename TypeParam::Dtype Dtype;
+  // Gradient checks are slow; reduce blob size.
+  this->ReduceBottomBlobSize();
+  LayerParameter layer_param;
+  layer_param.mutable_slice_param()->set_axis(0);
+  SliceLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+    this->blob_top_vec_0_);
+}
+
+TYPED_TEST(SliceLayerTest, TestGradientAcrossChannels) {
+  typedef typename TypeParam::Dtype Dtype;
+  // Gradient checks are slow; reduce blob size.
+  this->ReduceBottomBlobSize();
+  LayerParameter layer_param;
+  const int kSlicePoint = 4;
+  layer_param.mutable_slice_param()->add_slice_point(kSlicePoint);
+  SliceLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+    this->blob_top_vec_0_);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_softmax_layer.cpp b/src/caffe/test/test_softmax_layer.cpp
new file mode 100644
index 0000000..996da4b
--- /dev/null
+++ b/src/caffe/test/test_softmax_layer.cpp
@@ -0,0 +1,149 @@
+#include <cmath>
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class SoftmaxLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+ protected:
+  SoftmaxLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 10, 2, 3)),
+        blob_top_(new Blob<Dtype>()) {
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~SoftmaxLayerTest() { delete blob_bottom_; delete blob_top_; }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(SoftmaxLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(SoftmaxLayerTest, TestForward) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  SoftmaxLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Test sum
+  for (int i = 0; i < this->blob_bottom_->num(); ++i) {
+    for (int k = 0; k < this->blob_bottom_->height(); ++k) {
+      for (int l = 0; l < this->blob_bottom_->width(); ++l) {
+        Dtype sum = 0;
+        for (int j = 0; j < this->blob_top_->channels(); ++j) {
+          sum += this->blob_top_->data_at(i, j, k, l);
+        }
+        EXPECT_GE(sum, 0.999);
+        EXPECT_LE(sum, 1.001);
+        // Test exact values
+        Dtype scale = 0;
+        for (int j = 0; j < this->blob_bottom_->channels(); ++j) {
+          scale += exp(this->blob_bottom_->data_at(i, j, k, l));
+        }
+        for (int j = 0; j < this->blob_bottom_->channels(); ++j) {
+          EXPECT_GE(this->blob_top_->data_at(i, j, k, l) + 1e-4,
+              exp(this->blob_bottom_->data_at(i, j, k, l)) / scale)
+              << "debug: " << i << " " << j;
+          EXPECT_LE(this->blob_top_->data_at(i, j, k, l) - 1e-4,
+              exp(this->blob_bottom_->data_at(i, j, k, l)) / scale)
+              << "debug: " << i << " " << j;
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST(SoftmaxLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  SoftmaxLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+#ifdef USE_CUDNN
+template <typename Dtype>
+class CuDNNSoftmaxLayerTest : public GPUDeviceTest<Dtype> {
+ protected:
+  CuDNNSoftmaxLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 10, 2, 3)),
+        blob_top_(new Blob<Dtype>()) {
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~CuDNNSoftmaxLayerTest() { delete blob_bottom_; delete blob_top_; }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(CuDNNSoftmaxLayerTest, TestDtypes);
+
+TYPED_TEST(CuDNNSoftmaxLayerTest, TestForwardCuDNN) {
+  LayerParameter layer_param;
+  CuDNNSoftmaxLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Test sum
+  for (int i = 0; i < this->blob_bottom_->num(); ++i) {
+    for (int k = 0; k < this->blob_bottom_->height(); ++k) {
+      for (int l = 0; l < this->blob_bottom_->width(); ++l) {
+        TypeParam sum = 0;
+        for (int j = 0; j < this->blob_top_->channels(); ++j) {
+          sum += this->blob_top_->data_at(i, j, k, l);
+        }
+        EXPECT_GE(sum, 0.999);
+        EXPECT_LE(sum, 1.001);
+        // Test exact values
+        TypeParam scale = 0;
+        for (int j = 0; j < this->blob_bottom_->channels(); ++j) {
+          scale += exp(this->blob_bottom_->data_at(i, j, k, l));
+        }
+        for (int j = 0; j < this->blob_bottom_->channels(); ++j) {
+          EXPECT_GE(this->blob_top_->data_at(i, j, k, l) + 1e-4,
+              exp(this->blob_bottom_->data_at(i, j, k, l)) / scale)
+              << "debug: " << i << " " << j;
+          EXPECT_LE(this->blob_top_->data_at(i, j, k, l) - 1e-4,
+              exp(this->blob_bottom_->data_at(i, j, k, l)) / scale)
+              << "debug: " << i << " " << j;
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST(CuDNNSoftmaxLayerTest, TestGradientCuDNN) {
+  LayerParameter layer_param;
+  CuDNNSoftmaxLayer<TypeParam> layer(layer_param);
+  GradientChecker<TypeParam> checker(1e-2, 1e-3);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+#endif
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_softmax_with_loss_layer.cpp b/src/caffe/test/test_softmax_with_loss_layer.cpp
new file mode 100644
index 0000000..1498d5c
--- /dev/null
+++ b/src/caffe/test/test_softmax_with_loss_layer.cpp
@@ -0,0 +1,110 @@
+#include <cmath>
+#include <cstdlib>
+#include <cstring>
+#include <vector>
+
+#include "boost/scoped_ptr.hpp"
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+using boost::scoped_ptr;
+
+namespace caffe {
+
+template <typename TypeParam>
+class SoftmaxWithLossLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  SoftmaxWithLossLayerTest()
+      : blob_bottom_data_(new Blob<Dtype>(10, 5, 2, 3)),
+        blob_bottom_label_(new Blob<Dtype>(10, 1, 2, 3)),
+        blob_top_loss_(new Blob<Dtype>()) {
+    // fill the values
+    FillerParameter filler_param;
+    filler_param.set_std(10);
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_data_);
+    blob_bottom_vec_.push_back(blob_bottom_data_);
+    for (int i = 0; i < blob_bottom_label_->count(); ++i) {
+      blob_bottom_label_->mutable_cpu_data()[i] = caffe_rng_rand() % 5;
+    }
+    blob_bottom_vec_.push_back(blob_bottom_label_);
+    blob_top_vec_.push_back(blob_top_loss_);
+  }
+  virtual ~SoftmaxWithLossLayerTest() {
+    delete blob_bottom_data_;
+    delete blob_bottom_label_;
+    delete blob_top_loss_;
+  }
+  Blob<Dtype>* const blob_bottom_data_;
+  Blob<Dtype>* const blob_bottom_label_;
+  Blob<Dtype>* const blob_top_loss_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(SoftmaxWithLossLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(SoftmaxWithLossLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.add_loss_weight(3);
+  SoftmaxWithLossLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2, 1701);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 0);
+}
+
+TYPED_TEST(SoftmaxWithLossLayerTest, TestForwardIgnoreLabel) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_loss_param()->set_normalize(false);
+  // First, compute the loss with all labels
+  scoped_ptr<SoftmaxWithLossLayer<Dtype> > layer(
+      new SoftmaxWithLossLayer<Dtype>(layer_param));
+  layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  Dtype full_loss = this->blob_top_loss_->cpu_data()[0];
+  // Now, accumulate the loss, ignoring each label in {0, ..., 4} in turn.
+  Dtype accum_loss = 0;
+  for (int label = 0; label < 5; ++label) {
+    layer_param.mutable_loss_param()->set_ignore_label(label);
+    layer.reset(new SoftmaxWithLossLayer<Dtype>(layer_param));
+    layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+    layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    accum_loss += this->blob_top_loss_->cpu_data()[0];
+  }
+  // Check that each label was included all but once.
+  EXPECT_NEAR(4 * full_loss, accum_loss, 1e-4);
+}
+
+TYPED_TEST(SoftmaxWithLossLayerTest, TestGradientIgnoreLabel) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  // labels are in {0, ..., 4}, so we'll ignore about a fifth of them
+  layer_param.mutable_loss_param()->set_ignore_label(0);
+  SoftmaxWithLossLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2, 1701);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 0);
+}
+
+TYPED_TEST(SoftmaxWithLossLayerTest, TestGradientUnnormalized) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_loss_param()->set_normalize(false);
+  SoftmaxWithLossLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2, 1701);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_, 0);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_solver.cpp b/src/caffe/test/test_solver.cpp
new file mode 100644
index 0000000..ceabc9c
--- /dev/null
+++ b/src/caffe/test/test_solver.cpp
@@ -0,0 +1,108 @@
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "google/protobuf/text_format.h"
+#include "gtest/gtest.h"
+
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/solver.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+using std::ostringstream;
+
+namespace caffe {
+
+template <typename TypeParam>
+class SolverTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  virtual void InitSolverFromProtoString(const string& proto) {
+    SolverParameter param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(proto, &param));
+    // Set the solver_mode according to current Caffe::mode.
+    switch (Caffe::mode()) {
+      case Caffe::CPU:
+        param.set_solver_mode(SolverParameter_SolverMode_CPU);
+        break;
+      case Caffe::GPU:
+        param.set_solver_mode(SolverParameter_SolverMode_GPU);
+        break;
+      default:
+        LOG(FATAL) << "Unknown Caffe mode: " << Caffe::mode();
+    }
+    solver_.reset(new SGDSolver<Dtype>(param));
+  }
+
+  shared_ptr<Solver<Dtype> > solver_;
+};
+
+TYPED_TEST_CASE(SolverTest, TestDtypesAndDevices);
+
+TYPED_TEST(SolverTest, TestInitTrainTestNets) {
+  const string& proto =
+     "test_interval: 10 "
+     "test_iter: 10 "
+     "test_state: { stage: 'with-softmax' }"
+     "test_iter: 10 "
+     "test_state: {}"
+     "net_param { "
+     "  name: 'TestNetwork' "
+     "  layer { "
+     "    name: 'data' "
+     "    type: 'DummyData' "
+     "    dummy_data_param { "
+     "      shape { "
+     "        dim: 5 "
+     "        dim: 2 "
+     "        dim: 3 "
+     "        dim: 4 "
+     "      } "
+     "      shape { "
+     "        dim: 5 "
+     "      } "
+     "    } "
+     "    top: 'data' "
+     "    top: 'label' "
+     "  } "
+     "  layer { "
+     "    name: 'innerprod' "
+     "    type: 'InnerProduct' "
+     "    inner_product_param { "
+     "      num_output: 10 "
+     "    } "
+     "    bottom: 'data' "
+     "    top: 'innerprod' "
+     "  } "
+     "  layer { "
+     "    name: 'accuracy' "
+     "    type: 'Accuracy' "
+     "    bottom: 'innerprod' "
+     "    bottom: 'label' "
+     "    top: 'accuracy' "
+     "    exclude: { phase: TRAIN } "
+     "  } "
+     "  layer { "
+     "    name: 'loss' "
+     "    type: 'SoftmaxWithLoss' "
+     "    bottom: 'innerprod' "
+     "    bottom: 'label' "
+     "    include: { phase: TRAIN } "
+     "    include: { phase: TEST stage: 'with-softmax' } "
+     "  } "
+     "} ";
+  this->InitSolverFromProtoString(proto);
+  ASSERT_TRUE(this->solver_->net() != NULL);
+  EXPECT_TRUE(this->solver_->net()->has_layer("loss"));
+  EXPECT_FALSE(this->solver_->net()->has_layer("accuracy"));
+  ASSERT_EQ(2, this->solver_->test_nets().size());
+  EXPECT_TRUE(this->solver_->test_nets()[0]->has_layer("loss"));
+  EXPECT_TRUE(this->solver_->test_nets()[0]->has_layer("accuracy"));
+  EXPECT_FALSE(this->solver_->test_nets()[1]->has_layer("loss"));
+  EXPECT_TRUE(this->solver_->test_nets()[1]->has_layer("accuracy"));
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_split_layer.cpp b/src/caffe/test/test_split_layer.cpp
new file mode 100644
index 0000000..be5204b
--- /dev/null
+++ b/src/caffe/test/test_split_layer.cpp
@@ -0,0 +1,1045 @@
+#include <cstring>
+#include <string>
+#include <vector>
+
+#include "google/protobuf/text_format.h"
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/insert_splits.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class SplitLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  SplitLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 6, 5)),
+        blob_top_a_(new Blob<Dtype>()),
+        blob_top_b_(new Blob<Dtype>()) {
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_a_);
+    blob_top_vec_.push_back(blob_top_b_);
+  }
+  virtual ~SplitLayerTest() {
+    delete blob_bottom_;
+    delete blob_top_a_;
+    delete blob_top_b_;
+  }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_a_;
+  Blob<Dtype>* const blob_top_b_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(SplitLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(SplitLayerTest, TestSetup) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  SplitLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_a_->num(), 2);
+  EXPECT_EQ(this->blob_top_a_->channels(), 3);
+  EXPECT_EQ(this->blob_top_a_->height(), 6);
+  EXPECT_EQ(this->blob_top_a_->width(), 5);
+  EXPECT_EQ(this->blob_top_b_->num(), 2);
+  EXPECT_EQ(this->blob_top_b_->channels(), 3);
+  EXPECT_EQ(this->blob_top_b_->height(), 6);
+  EXPECT_EQ(this->blob_top_b_->width(), 5);
+}
+
+TYPED_TEST(SplitLayerTest, Test) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  SplitLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    Dtype bottom_value = this->blob_bottom_->cpu_data()[i];
+    EXPECT_EQ(bottom_value, this->blob_top_a_->cpu_data()[i]);
+    EXPECT_EQ(bottom_value, this->blob_top_b_->cpu_data()[i]);
+  }
+}
+
+TYPED_TEST(SplitLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  SplitLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-2, 1e-2);
+  checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+
+class SplitLayerInsertionTest : public ::testing::Test {
+ protected:
+  void RunInsertionTest(
+      const string& input_param_string, const string& output_param_string) {
+    // Test that InsertSplits called on the proto specified by
+    // input_param_string results in the proto specified by
+    // output_param_string.
+    NetParameter input_param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(
+        input_param_string, &input_param));
+    NetParameter expected_output_param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(
+        output_param_string, &expected_output_param));
+    NetParameter actual_output_param;
+    InsertSplits(input_param, &actual_output_param);
+    EXPECT_EQ(expected_output_param.DebugString(),
+        actual_output_param.DebugString());
+    // Also test idempotence.
+    NetParameter double_split_insert_param;
+    InsertSplits(actual_output_param, &double_split_insert_param);
+    EXPECT_EQ(actual_output_param.DebugString(),
+       double_split_insert_param.DebugString());
+  }
+};
+
+TEST_F(SplitLayerInsertionTest, TestNoInsertion1) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunInsertionTest(input_proto, input_proto);
+}
+
+TEST_F(SplitLayerInsertionTest, TestNoInsertion2) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'data_split' "
+      "  type: 'Split' "
+      "  bottom: 'data' "
+      "  top: 'data_split_0' "
+      "  top: 'data_split_1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod1' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data_split_0' "
+      "  top: 'innerprod1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod2' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data_split_1' "
+      "  top: 'innerprod2' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod1' "
+      "  bottom: 'innerprod2' "
+      "} ";
+  this->RunInsertionTest(input_proto, input_proto);
+}
+
+TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
+  const string& input_proto =
+      "name: 'CaffeNet' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  data_param { "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    batch_size: 256 "
+      "  } "
+      "  transform_param { "
+      "    crop_size: 227 "
+      "    mirror: true "
+      "    mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'conv1' "
+      "  type: 'Convolution' "
+      "  convolution_param { "
+      "    num_output: 96 "
+      "    kernel_size: 11 "
+      "    stride: 4 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "} "
+      "layer { "
+      "  name: 'relu1' "
+      "  type: 'ReLU' "
+      "  bottom: 'conv1' "
+      "  top: 'conv1' "
+      "} "
+      "layer { "
+      "  name: 'pool1' "
+      "  type: 'Pooling' "
+      "  pooling_param { "
+      "    pool: MAX "
+      "    kernel_size: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'pool1' "
+      "} "
+      "layer { "
+      "  name: 'norm1' "
+      "  type: 'LRN' "
+      "  lrn_param { "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool1' "
+      "  top: 'norm1' "
+      "} "
+      "layer { "
+      "  name: 'conv2' "
+      "  type: 'Convolution' "
+      "  convolution_param { "
+      "    num_output: 256 "
+      "    group: 2 "
+      "    kernel_size: 5 "
+      "    pad: 2 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'norm1' "
+      "  top: 'conv2' "
+      "} "
+      "layer { "
+      "  name: 'relu2' "
+      "  type: 'ReLU' "
+      "  bottom: 'conv2' "
+      "  top: 'conv2' "
+      "} "
+      "layer { "
+      "  name: 'pool2' "
+      "  type: 'Pooling' "
+      "  pooling_param { "
+      "    pool: MAX "
+      "    kernel_size: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv2' "
+      "  top: 'pool2' "
+      "} "
+      "layer { "
+      "  name: 'norm2' "
+      "  type: 'LRN' "
+      "  lrn_param { "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool2' "
+      "  top: 'norm2' "
+      "} "
+      "layer { "
+      "  name: 'conv3' "
+      "  type: 'Convolution' "
+      "  convolution_param { "
+      "    num_output: 384 "
+      "    kernel_size: 3 "
+      "    pad: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'norm2' "
+      "  top: 'conv3' "
+      "} "
+      "layer { "
+      "  name: 'relu3' "
+      "  type: 'ReLU' "
+      "  bottom: 'conv3' "
+      "  top: 'conv3' "
+      "} "
+      "layer { "
+      "  name: 'conv4' "
+      "  type: 'Convolution' "
+      "  convolution_param { "
+      "    num_output: 384 "
+      "    group: 2 "
+      "    kernel_size: 3 "
+      "    pad: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'conv3' "
+      "  top: 'conv4' "
+      "} "
+      "layer { "
+      "  name: 'relu4' "
+      "  type: 'ReLU' "
+      "  bottom: 'conv4' "
+      "  top: 'conv4' "
+      "} "
+      "layer { "
+      "  name: 'conv5' "
+      "  type: 'Convolution' "
+      "  convolution_param { "
+      "    num_output: 256 "
+      "    group: 2 "
+      "    kernel_size: 3 "
+      "    pad: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'conv4' "
+      "  top: 'conv5' "
+      "} "
+      "layer { "
+      "  name: 'relu5' "
+      "  type: 'ReLU' "
+      "  bottom: 'conv5' "
+      "  top: 'conv5' "
+      "} "
+      "layer { "
+      "  name: 'pool5' "
+      "  type: 'Pooling' "
+      "  pooling_param { "
+      "    kernel_size: 3 "
+      "    pool: MAX "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv5' "
+      "  top: 'pool5' "
+      "} "
+      "layer { "
+      "  name: 'fc6' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 4096 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'pool5' "
+      "  top: 'fc6' "
+      "} "
+      "layer { "
+      "  name: 'relu6' "
+      "  type: 'ReLU' "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layer { "
+      "  name: 'drop6' "
+      "  type: 'Dropout' "
+      "  dropout_param { "
+      "    dropout_ratio: 0.5 "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layer { "
+      "  name: 'fc7' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 4096 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc7' "
+      "} "
+      "layer { "
+      "  name: 'relu7' "
+      "  type: 'ReLU' "
+      "  bottom: 'fc7' "
+      "  top: 'fc7' "
+      "} "
+      "layer { "
+      "  name: 'drop7' "
+      "  type: 'Dropout' "
+      "  dropout_param { "
+      "    dropout_ratio: 0.5 "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc7' "
+      "} "
+      "layer { "
+      "  name: 'fc8' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc8' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunInsertionTest(input_proto, input_proto);
+}
+
+TEST_F(SplitLayerInsertionTest, TestNoInsertionWithInPlace) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod' "
+      "} "
+      "layer { "
+      "  name: 'relu' "
+      "  type: 'ReLU' "
+      "  bottom: 'innerprod' "
+      "  top: 'innerprod' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'innerprod' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunInsertionTest(input_proto, input_proto);
+}
+
+TEST_F(SplitLayerInsertionTest, TestLossInsertion) {
+  const string& input_proto =
+      "name: 'UnsharedWeightsNetwork' "
+      "force_backward: true "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'DummyData' "
+      "  dummy_data_param { "
+      "    num: 5 "
+      "    channels: 2 "
+      "    height: 3 "
+      "    width: 4 "
+      "    data_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "  } "
+      "  top: 'data' "
+      "} "
+      "layer { "
+      "  name: 'innerproduct1' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 10 "
+      "    bias_term: false "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 10 "
+      "    } "
+      "  } "
+      "  param { name: 'unsharedweights1' } "
+      "  bottom: 'data' "
+      "  top: 'innerproduct1' "
+      "  loss_weight: 2.5 "
+      "} "
+      "layer { "
+      "  name: 'innerproduct2' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 10 "
+      "    bias_term: false "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 10 "
+      "    } "
+      "  } "
+      "  param { name: 'unsharedweights2' } "
+      "  bottom: 'data' "
+      "  top: 'innerproduct2' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerproduct1' "
+      "  bottom: 'innerproduct2' "
+      "} ";
+  const string& expected_output_proto =
+      "name: 'UnsharedWeightsNetwork' "
+      "force_backward: true "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'DummyData' "
+      "  dummy_data_param { "
+      "    num: 5 "
+      "    channels: 2 "
+      "    height: 3 "
+      "    width: 4 "
+      "    data_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "  } "
+      "  top: 'data' "
+      "} "
+      "layer { "
+      "  name: 'data_data_0_split' "
+      "  type: 'Split' "
+      "  bottom: 'data' "
+      "  top: 'data_data_0_split_0' "
+      "  top: 'data_data_0_split_1' "
+      "} "
+      "layer { "
+      "  name: 'innerproduct1' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 10 "
+      "    bias_term: false "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 10 "
+      "    } "
+      "  } "
+      "  param { name: 'unsharedweights1' } "
+      "  bottom: 'data_data_0_split_0' "
+      "  top: 'innerproduct1' "
+      "} "
+      "layer { "
+      "  name: 'innerproduct1_innerproduct1_0_split' "
+      "  type: 'Split' "
+      "  bottom: 'innerproduct1' "
+      "  top: 'innerproduct1_innerproduct1_0_split_0' "
+      "  top: 'innerproduct1_innerproduct1_0_split_1' "
+      "  loss_weight: 2.5 "
+      "  loss_weight: 0 "
+      "} "
+      "layer { "
+      "  name: 'innerproduct2' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 10 "
+      "    bias_term: false "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 10 "
+      "    } "
+      "  } "
+      "  param { name: 'unsharedweights2' } "
+      "  bottom: 'data_data_0_split_1' "
+      "  top: 'innerproduct2' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerproduct1_innerproduct1_0_split_1' "
+      "  bottom: 'innerproduct2' "
+      "} ";
+  this->RunInsertionTest(input_proto, expected_output_proto);
+}
+
+TEST_F(SplitLayerInsertionTest, TestInsertion) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod1' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod2' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod2' "
+      "} "
+      "layer { "
+      "  name: 'innerprod3' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod3' "
+      "} "
+      "layer { "
+      "  name: 'loss1' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod1' "
+      "  bottom: 'innerprod2' "
+      "} "
+      "layer { "
+      "  name: 'loss2' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod2' "
+      "  bottom: 'innerprod3' "
+      "} ";
+  const string& expected_output_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'data_data_0_split' "
+      "  type: 'Split' "
+      "  bottom: 'data' "
+      "  top: 'data_data_0_split_0' "
+      "  top: 'data_data_0_split_1' "
+      "  top: 'data_data_0_split_2' "
+      "} "
+      "layer { "
+      "  name: 'innerprod1' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data_data_0_split_0' "
+      "  top: 'innerprod1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod2' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data_data_0_split_1' "
+      "  top: 'innerprod2' "
+      "} "
+      "layer { "
+      "  name: 'innerprod2_innerprod2_0_split' "
+      "  type: 'Split' "
+      "  bottom: 'innerprod2' "
+      "  top: 'innerprod2_innerprod2_0_split_0' "
+      "  top: 'innerprod2_innerprod2_0_split_1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod3' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data_data_0_split_2' "
+      "  top: 'innerprod3' "
+      "} "
+      "layer { "
+      "  name: 'loss1' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod1' "
+      "  bottom: 'innerprod2_innerprod2_0_split_0' "
+      "} "
+      "layer { "
+      "  name: 'loss2' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod2_innerprod2_0_split_1' "
+      "  bottom: 'innerprod3' "
+      "} ";
+  this->RunInsertionTest(input_proto, expected_output_proto);
+}
+
+TEST_F(SplitLayerInsertionTest, TestInsertionTwoTop) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod1' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod2' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'label' "
+      "  top: 'innerprod2' "
+      "} "
+      "layer { "
+      "  name: 'innerprod3' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod3' "
+      "} "
+      "layer { "
+      "  name: 'innerprod4' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'label' "
+      "  top: 'innerprod4' "
+      "} "
+      "layer { "
+      "  name: 'loss1' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod1' "
+      "  bottom: 'innerprod3' "
+      "} "
+      "layer { "
+      "  name: 'loss2' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod2' "
+      "  bottom: 'innerprod4' "
+      "} ";
+  const string& expected_output_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'data_data_0_split' "
+      "  type: 'Split' "
+      "  bottom: 'data' "
+      "  top: 'data_data_0_split_0' "
+      "  top: 'data_data_0_split_1' "
+      "} "
+      "layer { "
+      "  name: 'label_data_1_split' "
+      "  type: 'Split' "
+      "  bottom: 'label' "
+      "  top: 'label_data_1_split_0' "
+      "  top: 'label_data_1_split_1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod1' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data_data_0_split_0' "
+      "  top: 'innerprod1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod2' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'label_data_1_split_0' "
+      "  top: 'innerprod2' "
+      "} "
+      "layer { "
+      "  name: 'innerprod3' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data_data_0_split_1' "
+      "  top: 'innerprod3' "
+      "} "
+      "layer { "
+      "  name: 'innerprod4' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'label_data_1_split_1' "
+      "  top: 'innerprod4' "
+      "} "
+      "layer { "
+      "  name: 'loss1' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod1' "
+      "  bottom: 'innerprod3' "
+      "} "
+      "layer { "
+      "  name: 'loss2' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod2' "
+      "  bottom: 'innerprod4' "
+      "} ";
+  this->RunInsertionTest(input_proto, expected_output_proto);
+}
+
+TEST_F(SplitLayerInsertionTest, TestInputInsertion) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "input: 'data' "
+      "input_dim: 10 "
+      "input_dim: 3 "
+      "input_dim: 227 "
+      "input_dim: 227 "
+      "layer { "
+      "  name: 'innerprod1' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod2' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod2' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod1' "
+      "  bottom: 'innerprod2' "
+      "} ";
+  const string& expected_output_proto =
+      "name: 'TestNetwork' "
+      "input: 'data' "
+      "input_dim: 10 "
+      "input_dim: 3 "
+      "input_dim: 227 "
+      "input_dim: 227 "
+      "layer { "
+      "  name: 'data_input_0_split' "
+      "  type: 'Split' "
+      "  bottom: 'data' "
+      "  top: 'data_input_0_split_0' "
+      "  top: 'data_input_0_split_1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod1' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data_input_0_split_0' "
+      "  top: 'innerprod1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod2' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data_input_0_split_1' "
+      "  top: 'innerprod2' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod1' "
+      "  bottom: 'innerprod2' "
+      "} ";
+  this->RunInsertionTest(input_proto, expected_output_proto);
+}
+
+TEST_F(SplitLayerInsertionTest, TestWithInPlace) {
+  const string& input_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'innerprod1' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data' "
+      "  top: 'innerprod1' "
+      "} "
+      "layer { "
+      "  name: 'relu1' "
+      "  type: 'ReLU' "
+      "  bottom: 'innerprod1' "
+      "  top: 'innerprod1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod2' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'innerprod1' "
+      "  top: 'innerprod2' "
+      "} "
+      "layer { "
+      "  name: 'loss1' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod1' "
+      "  bottom: 'label' "
+      "} "
+      "layer { "
+      "  name: 'loss2' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod2' "
+      "  bottom: 'data' "
+      "} ";
+  const string& expected_output_proto =
+      "name: 'TestNetwork' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'data_data_0_split' "
+      "  type: 'Split' "
+      "  bottom: 'data' "
+      "  top: 'data_data_0_split_0' "
+      "  top: 'data_data_0_split_1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod1' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'data_data_0_split_0' "
+      "  top: 'innerprod1' "
+      "} "
+      "layer { "
+      "  name: 'relu1' "
+      "  type: 'ReLU' "
+      "  bottom: 'innerprod1' "
+      "  top: 'innerprod1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod1_relu1_0_split' "
+      "  type: 'Split' "
+      "  bottom: 'innerprod1' "
+      "  top: 'innerprod1_relu1_0_split_0' "
+      "  top: 'innerprod1_relu1_0_split_1' "
+      "} "
+      "layer { "
+      "  name: 'innerprod2' "
+      "  type: 'InnerProduct' "
+      "  bottom: 'innerprod1_relu1_0_split_0' "
+      "  top: 'innerprod2' "
+      "} "
+      "layer { "
+      "  name: 'loss1' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod1_relu1_0_split_1' "
+      "  bottom: 'label' "
+      "} "
+      "layer { "
+      "  name: 'loss2' "
+      "  type: 'EuclideanLoss' "
+      "  bottom: 'innerprod2' "
+      "  bottom: 'data_data_0_split_1' "
+      "} ";
+  this->RunInsertionTest(input_proto, expected_output_proto);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_spp_layer.cpp b/src/caffe/test/test_spp_layer.cpp
new file mode 100644
index 0000000..b2585f1
--- /dev/null
+++ b/src/caffe/test/test_spp_layer.cpp
@@ -0,0 +1,131 @@
+#include <algorithm>
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class SPPLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  SPPLayerTest()
+      : blob_bottom_(new Blob<Dtype>()),
+        blob_bottom_2_(new Blob<Dtype>()),
+        blob_bottom_3_(new Blob<Dtype>()),
+        blob_top_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    Caffe::set_random_seed(1701);
+    blob_bottom_->Reshape(2, 3, 9, 8);
+    blob_bottom_2_->Reshape(4, 3, 1024, 765);
+    blob_bottom_3_->Reshape(10, 3, 7, 7);
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_bottom_vec_2_.push_back(blob_bottom_2_);
+    blob_bottom_vec_3_.push_back(blob_bottom_3_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~SPPLayerTest() { delete blob_bottom_; delete blob_top_; }
+
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_bottom_2_;
+  Blob<Dtype>* const blob_bottom_3_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_bottom_vec_2_;
+  vector<Blob<Dtype>*> blob_bottom_vec_3_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(SPPLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(SPPLayerTest, TestSetup) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_spp_param()->set_pyramid_height(3);
+  SPPLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  // expected number of pool results is geometric sum
+  // (1 - r ** n)/(1 - r) where r = 4 and n = pyramid_height
+  // (1 - 4 ** 3)/(1 - 4) = 21
+  // multiply bottom num_channels * expected_pool_results
+  // to get expected num_channels (3 * 21 = 63)
+  EXPECT_EQ(this->blob_top_->num(), 2);
+  EXPECT_EQ(this->blob_top_->channels(), 63);
+  EXPECT_EQ(this->blob_top_->height(), 1);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+}
+
+TYPED_TEST(SPPLayerTest, TestEqualOutputDims) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_spp_param()->set_pyramid_height(5);
+  SPPLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_2_, this->blob_top_vec_);
+  // expected number of pool results is geometric sum
+  // (1 - r ** n)/(1 - r) where r = 4 and n = pyramid_height
+  // (1 - 4 ** 5)/(1 - 4) = 341
+  // multiply bottom num_channels * expected_pool_results
+  // to get expected num_channels (3 * 341 = 1023)
+  EXPECT_EQ(this->blob_top_->num(), 4);
+  EXPECT_EQ(this->blob_top_->channels(), 1023);
+  EXPECT_EQ(this->blob_top_->height(), 1);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+}
+
+TYPED_TEST(SPPLayerTest, TestEqualOutputDims2) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_spp_param()->set_pyramid_height(3);
+  SPPLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_3_, this->blob_top_vec_);
+  // expected number of pool results is geometric sum
+  // (1 - r ** n)/(1 - r) where r = 4 and n = pyramid_height
+  // (1 - 4 ** 3)/(1 - 4) = 21
+  // multiply bottom num_channels * expected_pool_results
+  // to get expected num_channels (3 * 21 = 63)
+  EXPECT_EQ(this->blob_top_->num(), 10);
+  EXPECT_EQ(this->blob_top_->channels(), 63);
+  EXPECT_EQ(this->blob_top_->height(), 1);
+  EXPECT_EQ(this->blob_top_->width(), 1);
+}
+
+TYPED_TEST(SPPLayerTest, TestForwardBackward) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  layer_param.mutable_spp_param()->set_pyramid_height(3);
+  SPPLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  vector<bool> propagate_down(this->blob_bottom_vec_.size(), true);
+  layer.Backward(this->blob_top_vec_, propagate_down,
+                 this->blob_bottom_vec_);
+}
+
+TYPED_TEST(SPPLayerTest, TestGradient) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  SPPParameter* spp_param = layer_param.mutable_spp_param();
+  spp_param->set_pyramid_height(3);
+  SPPLayer<Dtype> layer(layer_param);
+  GradientChecker<Dtype> checker(1e-4, 1e-2);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_stochastic_pooling.cpp b/src/caffe/test/test_stochastic_pooling.cpp
new file mode 100644
index 0000000..f84464c
--- /dev/null
+++ b/src/caffe/test/test_stochastic_pooling.cpp
@@ -0,0 +1,176 @@
+#include <algorithm>
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+using std::min;
+
+namespace caffe {
+
+template <typename TypeParam>
+class StochasticPoolingLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  StochasticPoolingLayerTest()
+      : blob_bottom_(new Blob<Dtype>()),
+        blob_top_(new Blob<Dtype>()) {}
+  virtual void SetUp() {
+    Caffe::set_random_seed(1701);
+    blob_bottom_->Reshape(2, 3, 6, 5);
+    // fill the values
+    FillerParameter filler_param;
+    filler_param.set_min(0.1);
+    filler_param.set_max(1.);
+    UniformFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+
+  virtual ~StochasticPoolingLayerTest() {
+    delete blob_bottom_; delete blob_top_;
+  }
+
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+template <typename Dtype>
+class CPUStochasticPoolingLayerTest
+  : public StochasticPoolingLayerTest<CPUDevice<Dtype> > {
+};
+
+TYPED_TEST_CASE(CPUStochasticPoolingLayerTest, TestDtypes);
+
+TYPED_TEST(CPUStochasticPoolingLayerTest, TestSetup) {
+  LayerParameter layer_param;
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  PoolingLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_->num());
+  EXPECT_EQ(this->blob_top_->channels(), this->blob_bottom_->channels());
+  EXPECT_EQ(this->blob_top_->height(), 3);
+  EXPECT_EQ(this->blob_top_->width(), 2);
+}
+
+#ifndef CPU_ONLY
+
+template <typename Dtype>
+class GPUStochasticPoolingLayerTest
+  : public StochasticPoolingLayerTest<GPUDevice<Dtype> > {
+};
+
+TYPED_TEST_CASE(GPUStochasticPoolingLayerTest, TestDtypes);
+
+TYPED_TEST(GPUStochasticPoolingLayerTest, TestStochastic) {
+  LayerParameter layer_param;
+  layer_param.set_phase(TRAIN);
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  pooling_param->set_pool(PoolingParameter_PoolMethod_STOCHASTIC);
+  PoolingLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  // Check if the output is correct - it should do random sampling
+  const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
+  const TypeParam* top_data = this->blob_top_->cpu_data();
+  TypeParam total = 0;
+  for (int n = 0; n < this->blob_top_->num(); ++n) {
+    for (int c = 0; c < this->blob_top_->channels(); ++c) {
+      for (int ph = 0; ph < this->blob_top_->height(); ++ph) {
+        for (int pw = 0; pw < this->blob_top_->width(); ++pw) {
+          TypeParam pooled = top_data[this->blob_top_->offset(n, c, ph, pw)];
+          total += pooled;
+          int hstart = ph * 2;
+          int hend = min(hstart + 3, this->blob_bottom_->height());
+          int wstart = pw * 2;
+          int wend = min(wstart + 3, this->blob_bottom_->width());
+          bool has_equal = false;
+          for (int h = hstart; h < hend; ++h) {
+            for (int w = wstart; w < wend; ++w) {
+              has_equal |= (pooled == bottom_data[this->blob_bottom_->
+                  offset(n, c, h, w)]);
+            }
+          }
+          EXPECT_TRUE(has_equal);
+        }
+      }
+    }
+  }
+  // When we are doing stochastic pooling, the average we get should be higher
+  // than the simple data average since we are weighting more on higher-valued
+  // ones.
+  EXPECT_GE(total / this->blob_top_->count(), 0.55);
+}
+
+TYPED_TEST(GPUStochasticPoolingLayerTest, TestStochasticTestPhase) {
+  LayerParameter layer_param;
+  layer_param.set_phase(TEST);
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  pooling_param->set_pool(PoolingParameter_PoolMethod_STOCHASTIC);
+  PoolingLayer<TypeParam> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+
+  // Check if the output is correct - it should do random sampling
+  const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
+  const TypeParam* top_data = this->blob_top_->cpu_data();
+  for (int n = 0; n < this->blob_top_->num(); ++n) {
+    for (int c = 0; c < this->blob_top_->channels(); ++c) {
+      for (int ph = 0; ph < this->blob_top_->height(); ++ph) {
+        for (int pw = 0; pw < this->blob_top_->width(); ++pw) {
+          TypeParam pooled = top_data[this->blob_top_->offset(n, c, ph, pw)];
+          int hstart = ph * 2;
+          int hend = min(hstart + 3, this->blob_bottom_->height());
+          int wstart = pw * 2;
+          int wend = min(wstart + 3, this->blob_bottom_->width());
+          bool smaller_than_max = false;
+          for (int h = hstart; h < hend; ++h) {
+            for (int w = wstart; w < wend; ++w) {
+              smaller_than_max |= (pooled <= bottom_data[this->blob_bottom_->
+                  offset(n, c, h, w)]);
+            }
+          }
+          EXPECT_TRUE(smaller_than_max);
+        }
+      }
+    }
+  }
+}
+
+TYPED_TEST(GPUStochasticPoolingLayerTest, TestGradient) {
+  LayerParameter layer_param;
+  layer_param.set_phase(TRAIN);
+  PoolingParameter* pooling_param = layer_param.mutable_pooling_param();
+  pooling_param->set_kernel_size(3);
+  pooling_param->set_stride(2);
+  pooling_param->set_pool(PoolingParameter_PoolMethod_STOCHASTIC);
+  PoolingLayer<TypeParam> layer(layer_param);
+  GradientChecker<TypeParam> checker(1e-4, 1e-2);
+  // it is too expensive to call curand multiple times, so we don't do an
+  // exhaustive gradient check.
+  checker.CheckGradient(&layer, this->blob_bottom_vec_,
+      this->blob_top_vec_);
+}
+
+#endif
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_syncedmem.cpp b/src/caffe/test/test_syncedmem.cpp
new file mode 100644
index 0000000..b946233
--- /dev/null
+++ b/src/caffe/test/test_syncedmem.cpp
@@ -0,0 +1,126 @@
+#include <cstring>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/common.hpp"
+#include "caffe/syncedmem.hpp"
+#include "caffe/util/device_alternate.hpp"
+#include "caffe/util/math_functions.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+class SyncedMemoryTest : public ::testing::Test {};
+
+TEST_F(SyncedMemoryTest, TestInitialization) {
+  SyncedMemory mem(10);
+  EXPECT_EQ(mem.head(), SyncedMemory::UNINITIALIZED);
+  EXPECT_EQ(mem.size(), 10);
+  SyncedMemory* p_mem = new SyncedMemory(10 * sizeof(float));
+  EXPECT_EQ(p_mem->size(), 10 * sizeof(float));
+  delete p_mem;
+}
+
+#ifndef CPU_ONLY  // GPU test
+
+TEST_F(SyncedMemoryTest, TestAllocationCPUGPU) {
+  SyncedMemory mem(10);
+  EXPECT_TRUE(mem.cpu_data());
+  EXPECT_TRUE(mem.gpu_data());
+  EXPECT_TRUE(mem.mutable_cpu_data());
+  EXPECT_TRUE(mem.mutable_gpu_data());
+}
+
+#endif
+
+TEST_F(SyncedMemoryTest, TestAllocationCPU) {
+  SyncedMemory mem(10);
+  EXPECT_TRUE(mem.cpu_data());
+  EXPECT_TRUE(mem.mutable_cpu_data());
+}
+
+#ifndef CPU_ONLY  // GPU test
+
+TEST_F(SyncedMemoryTest, TestAllocationGPU) {
+  SyncedMemory mem(10);
+  EXPECT_TRUE(mem.gpu_data());
+  EXPECT_TRUE(mem.mutable_gpu_data());
+}
+
+#endif
+
+TEST_F(SyncedMemoryTest, TestCPUWrite) {
+  SyncedMemory mem(10);
+  void* cpu_data = mem.mutable_cpu_data();
+  EXPECT_EQ(mem.head(), SyncedMemory::HEAD_AT_CPU);
+  caffe_memset(mem.size(), 1, cpu_data);
+  for (int i = 0; i < mem.size(); ++i) {
+    EXPECT_EQ((static_cast<char*>(cpu_data))[i], 1);
+  }
+  // do another round
+  cpu_data = mem.mutable_cpu_data();
+  EXPECT_EQ(mem.head(), SyncedMemory::HEAD_AT_CPU);
+  caffe_memset(mem.size(), 2, cpu_data);
+  for (int i = 0; i < mem.size(); ++i) {
+    EXPECT_EQ((static_cast<char*>(cpu_data))[i], 2);
+  }
+}
+
+#ifndef CPU_ONLY  // GPU test
+
+TEST_F(SyncedMemoryTest, TestGPURead) {
+  SyncedMemory mem(10);
+  void* cpu_data = mem.mutable_cpu_data();
+  EXPECT_EQ(mem.head(), SyncedMemory::HEAD_AT_CPU);
+  caffe_memset(mem.size(), 1, cpu_data);
+  const void* gpu_data = mem.gpu_data();
+  EXPECT_EQ(mem.head(), SyncedMemory::SYNCED);
+  // check if values are the same
+  char* recovered_value = new char[10];
+  caffe_gpu_memcpy(10, gpu_data, recovered_value);
+  for (int i = 0; i < mem.size(); ++i) {
+    EXPECT_EQ((static_cast<char*>(recovered_value))[i], 1);
+  }
+  // do another round
+  cpu_data = mem.mutable_cpu_data();
+  EXPECT_EQ(mem.head(), SyncedMemory::HEAD_AT_CPU);
+  caffe_memset(mem.size(), 2, cpu_data);
+  for (int i = 0; i < mem.size(); ++i) {
+    EXPECT_EQ((static_cast<char*>(cpu_data))[i], 2);
+  }
+  gpu_data = mem.gpu_data();
+  EXPECT_EQ(mem.head(), SyncedMemory::SYNCED);
+  // check if values are the same
+  caffe_gpu_memcpy(10, gpu_data, recovered_value);
+  for (int i = 0; i < mem.size(); ++i) {
+    EXPECT_EQ((static_cast<char*>(recovered_value))[i], 2);
+  }
+  delete[] recovered_value;
+}
+
+TEST_F(SyncedMemoryTest, TestGPUWrite) {
+  SyncedMemory mem(10);
+  void* gpu_data = mem.mutable_gpu_data();
+  EXPECT_EQ(mem.head(), SyncedMemory::HEAD_AT_GPU);
+  caffe_gpu_memset(mem.size(), 1, gpu_data);
+  const void* cpu_data = mem.cpu_data();
+  for (int i = 0; i < mem.size(); ++i) {
+    EXPECT_EQ((static_cast<const char*>(cpu_data))[i], 1);
+  }
+  EXPECT_EQ(mem.head(), SyncedMemory::SYNCED);
+
+  gpu_data = mem.mutable_gpu_data();
+  EXPECT_EQ(mem.head(), SyncedMemory::HEAD_AT_GPU);
+  caffe_gpu_memset(mem.size(), 2, gpu_data);
+  cpu_data = mem.cpu_data();
+  for (int i = 0; i < mem.size(); ++i) {
+    EXPECT_EQ((static_cast<const char*>(cpu_data))[i], 2);
+  }
+  EXPECT_EQ(mem.head(), SyncedMemory::SYNCED);
+}
+
+#endif
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_tanh_layer.cpp b/src/caffe/test/test_tanh_layer.cpp
new file mode 100644
index 0000000..5dc9283
--- /dev/null
+++ b/src/caffe/test/test_tanh_layer.cpp
@@ -0,0 +1,101 @@
+#include <algorithm>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/common_layers.hpp"
+#include "caffe/filler.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+#include "caffe/test/test_gradient_check_util.hpp"
+
+namespace caffe {
+
+double tanh_naive(double x) {
+  if (x < -40) {
+    // avoid negative overflow
+    return -1;
+  } else if (x > 40) {
+    // avoid positive overflow
+    return 1;
+  } else {
+    // exact expression for tanh, which is unstable for large x
+    double exp2x = exp(2 * x);
+    return (exp2x - 1.0) / (exp2x + 1.0);
+  }
+}
+
+template <typename TypeParam>
+class TanHLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+
+ protected:
+  TanHLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 4, 5)),
+        blob_top_(new Blob<Dtype>()) {
+    Caffe::set_random_seed(1701);
+    FillerParameter filler_param;
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~TanHLayerTest() { delete blob_bottom_; delete blob_top_; }
+
+  void TestForward(Dtype filler_std) {
+    FillerParameter filler_param;
+    filler_param.set_std(filler_std);
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+
+    LayerParameter layer_param;
+    TanHLayer<Dtype> layer(layer_param);
+    layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+    layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+    // Now, check values
+    const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+    const Dtype* top_data = this->blob_top_->cpu_data();
+    const Dtype min_precision = 1e-5;
+    for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+      Dtype expected_value = tanh_naive(bottom_data[i]);
+      Dtype precision = std::max(
+        Dtype(std::abs(expected_value * Dtype(1e-4))), min_precision);
+      EXPECT_NEAR(expected_value, top_data[i], precision);
+    }
+  }
+
+  void TestBackward(Dtype filler_std) {
+    FillerParameter filler_param;
+    filler_param.set_std(filler_std);
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+
+    LayerParameter layer_param;
+    TanHLayer<Dtype> layer(layer_param);
+    GradientChecker<Dtype> checker(1e-2, 1e-2, 1701);
+    checker.CheckGradientEltwise(&layer, this->blob_bottom_vec_,
+        this->blob_top_vec_);
+  }
+
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(TanHLayerTest, TestDtypesAndDevices);
+
+TYPED_TEST(TanHLayerTest, TestTanH) {
+  this->TestForward(1.0);
+}
+
+TYPED_TEST(TanHLayerTest, TestTanHOverflow) {
+  // this will fail if tanh overflow is not properly handled
+  this->TestForward(10000.0);
+}
+
+TYPED_TEST(TanHLayerTest, TestTanHGradient) {
+  this->TestBackward(1.0);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_threshold_layer.cpp b/src/caffe/test/test_threshold_layer.cpp
new file mode 100644
index 0000000..05ce821
--- /dev/null
+++ b/src/caffe/test/test_threshold_layer.cpp
@@ -0,0 +1,98 @@
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/vision_layers.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+template <typename TypeParam>
+class ThresholdLayerTest : public MultiDeviceTest<TypeParam> {
+  typedef typename TypeParam::Dtype Dtype;
+ protected:
+  ThresholdLayerTest()
+      : blob_bottom_(new Blob<Dtype>(2, 3, 6, 5)),
+        blob_top_(new Blob<Dtype>()) {
+    Caffe::set_random_seed(1701);
+    // fill the values
+    FillerParameter filler_param;
+    GaussianFiller<Dtype> filler(filler_param);
+    filler.Fill(this->blob_bottom_);
+    blob_bottom_vec_.push_back(blob_bottom_);
+    blob_top_vec_.push_back(blob_top_);
+  }
+  virtual ~ThresholdLayerTest() { delete blob_bottom_; delete blob_top_; }
+  Blob<Dtype>* const blob_bottom_;
+  Blob<Dtype>* const blob_top_;
+  vector<Blob<Dtype>*> blob_bottom_vec_;
+  vector<Blob<Dtype>*> blob_top_vec_;
+};
+
+TYPED_TEST_CASE(ThresholdLayerTest, TestDtypesAndDevices);
+
+
+TYPED_TEST(ThresholdLayerTest, TestSetup) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ThresholdLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_->num());
+  EXPECT_EQ(this->blob_top_->channels(), this->blob_bottom_->channels());
+  EXPECT_EQ(this->blob_top_->height(), this->blob_bottom_->height());
+  EXPECT_EQ(this->blob_top_->width(), this->blob_bottom_->width());
+}
+
+TYPED_TEST(ThresholdLayerTest, Test) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ThresholdLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+  const Dtype* top_data = this->blob_top_->cpu_data();
+  const Dtype threshold_ = layer_param.threshold_param().threshold();
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    EXPECT_GE(top_data[i], 0.);
+    EXPECT_LE(top_data[i], 1.);
+    if (top_data[i] == 0) {
+      EXPECT_LE(bottom_data[i], threshold_);
+    }
+    if (top_data[i] == 1) {
+      EXPECT_GT(bottom_data[i], threshold_);
+    }
+  }
+}
+
+TYPED_TEST(ThresholdLayerTest, Test2) {
+  typedef typename TypeParam::Dtype Dtype;
+  LayerParameter layer_param;
+  ThresholdParameter* threshold_param =
+    layer_param.mutable_threshold_param();
+  threshold_param->set_threshold(0.5);
+  ThresholdLayer<Dtype> layer(layer_param);
+  layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+  layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+  // Now, check values
+  const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+  const Dtype* top_data = this->blob_top_->cpu_data();
+  const Dtype threshold_ = layer_param.threshold_param().threshold();
+  EXPECT_FLOAT_EQ(threshold_, 0.5);
+  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+    EXPECT_GE(top_data[i], 0.);
+    EXPECT_LE(top_data[i], 1.);
+    if (top_data[i] == 0) {
+      EXPECT_LE(bottom_data[i], threshold_);
+    }
+    if (top_data[i] == 1) {
+      EXPECT_GT(bottom_data[i], threshold_);
+    }
+  }
+}
+
+}  // namespace caffe
diff --git a/src/caffe/test/test_upgrade_proto.cpp b/src/caffe/test/test_upgrade_proto.cpp
new file mode 100644
index 0000000..eec6276
--- /dev/null
+++ b/src/caffe/test/test_upgrade_proto.cpp
@@ -0,0 +1,2909 @@
+#include <cstring>
+#include <string>
+#include <vector>
+
+#include "google/protobuf/text_format.h"
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/util/upgrade_proto.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+class PaddingLayerUpgradeTest : public ::testing::Test {
+ protected:
+  void RunPaddingUpgradeTest(
+      const string& input_param_string, const string& output_param_string) {
+    // Test that UpgradeV0PaddingLayers called on the proto specified by
+    // input_param_string results in the proto specified by
+    // output_param_string.
+    NetParameter input_param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(
+        input_param_string, &input_param));
+    NetParameter expected_output_param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(
+        output_param_string, &expected_output_param));
+    NetParameter actual_output_param;
+    UpgradeV0PaddingLayers(input_param, &actual_output_param);
+    EXPECT_EQ(expected_output_param.DebugString(),
+        actual_output_param.DebugString());
+    // Also test idempotence.
+    NetParameter double_pad_upgrade_param;
+    UpgradeV0PaddingLayers(actual_output_param, &double_pad_upgrade_param);
+    EXPECT_EQ(actual_output_param.DebugString(),
+       double_pad_upgrade_param.DebugString());
+  }
+};
+
+TEST_F(PaddingLayerUpgradeTest, TestSimple) {
+  const string& input_proto =
+      "name: 'CaffeNet' "
+      "layers { "
+      "  layer { "
+      "    name: 'data' "
+      "    type: 'data' "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "    batchsize: 256 "
+      "    cropsize: 227 "
+      "    mirror: true "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pad1' "
+      "    type: 'padding' "
+      "    pad: 2 "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'pad1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv1' "
+      "    type: 'conv' "
+      "    num_output: 96 "
+      "    kernelsize: 11 "
+      "    stride: 4 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pad1' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc8' "
+      "    type: 'innerproduct' "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'fc8' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'loss' "
+      "    type: 'softmax_loss' "
+      "  } "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  const string& expected_output_proto =
+      "name: 'CaffeNet' "
+      "layers { "
+      "  layer { "
+      "    name: 'data' "
+      "    type: 'data' "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "    batchsize: 256 "
+      "    cropsize: 227 "
+      "    mirror: true "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv1' "
+      "    type: 'conv' "
+      "    num_output: 96 "
+      "    kernelsize: 11 "
+      "    stride: 4 "
+      "    pad: 2 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc8' "
+      "    type: 'innerproduct' "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'fc8' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'loss' "
+      "    type: 'softmax_loss' "
+      "  } "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunPaddingUpgradeTest(input_proto, expected_output_proto);
+}
+
+TEST_F(PaddingLayerUpgradeTest, TestTwoTops) {
+  const string& input_proto =
+      "name: 'CaffeNet' "
+      "layers { "
+      "  layer { "
+      "    name: 'data' "
+      "    type: 'data' "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "    batchsize: 256 "
+      "    cropsize: 227 "
+      "    mirror: true "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pad1' "
+      "    type: 'padding' "
+      "    pad: 2 "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'pad1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv1' "
+      "    type: 'conv' "
+      "    num_output: 96 "
+      "    kernelsize: 11 "
+      "    stride: 4 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pad1' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc8' "
+      "    type: 'innerproduct' "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'fc8' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv2' "
+      "    type: 'conv' "
+      "    num_output: 96 "
+      "    kernelsize: 11 "
+      "    stride: 4 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pad1' "
+      "  top: 'conv2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'loss' "
+      "    type: 'softmax_loss' "
+      "  } "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  const string& expected_output_proto =
+      "name: 'CaffeNet' "
+      "layers { "
+      "  layer { "
+      "    name: 'data' "
+      "    type: 'data' "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "    batchsize: 256 "
+      "    cropsize: 227 "
+      "    mirror: true "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv1' "
+      "    type: 'conv' "
+      "    num_output: 96 "
+      "    kernelsize: 11 "
+      "    stride: 4 "
+      "    pad: 2 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc8' "
+      "    type: 'innerproduct' "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'fc8' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv2' "
+      "    type: 'conv' "
+      "    num_output: 96 "
+      "    kernelsize: 11 "
+      "    stride: 4 "
+      "    pad: 2 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'conv2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'loss' "
+      "    type: 'softmax_loss' "
+      "  } "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunPaddingUpgradeTest(input_proto, expected_output_proto);
+}
+
+TEST_F(PaddingLayerUpgradeTest, TestImageNet) {
+  const string& input_proto =
+      "name: 'CaffeNet' "
+      "layers { "
+      "  layer { "
+      "    name: 'data' "
+      "    type: 'data' "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "    batchsize: 256 "
+      "    cropsize: 227 "
+      "    mirror: true "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv1' "
+      "    type: 'conv' "
+      "    num_output: 96 "
+      "    kernelsize: 11 "
+      "    stride: 4 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu1' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pool1' "
+      "    type: 'pool' "
+      "    pool: MAX "
+      "    kernelsize: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'pool1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'norm1' "
+      "    type: 'lrn' "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool1' "
+      "  top: 'norm1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pad2' "
+      "    type: 'padding' "
+      "    pad: 2 "
+      "  } "
+      "  bottom: 'norm1' "
+      "  top: 'pad2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv2' "
+      "    type: 'conv' "
+      "    num_output: 256 "
+      "    group: 2 "
+      "    kernelsize: 5 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pad2' "
+      "  top: 'conv2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu2' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv2' "
+      "  top: 'conv2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pool2' "
+      "    type: 'pool' "
+      "    pool: MAX "
+      "    kernelsize: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv2' "
+      "  top: 'pool2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'norm2' "
+      "    type: 'lrn' "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool2' "
+      "  top: 'norm2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pad3' "
+      "    type: 'padding' "
+      "    pad: 1 "
+      "  } "
+      "  bottom: 'norm2' "
+      "  top: 'pad3' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv3' "
+      "    type: 'conv' "
+      "    num_output: 384 "
+      "    kernelsize: 3 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pad3' "
+      "  top: 'conv3' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu3' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv3' "
+      "  top: 'conv3' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pad4' "
+      "    type: 'padding' "
+      "    pad: 1 "
+      "  } "
+      "  bottom: 'conv3' "
+      "  top: 'pad4' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv4' "
+      "    type: 'conv' "
+      "    num_output: 384 "
+      "    group: 2 "
+      "    kernelsize: 3 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pad4' "
+      "  top: 'conv4' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu4' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv4' "
+      "  top: 'conv4' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pad5' "
+      "    type: 'padding' "
+      "    pad: 1 "
+      "  } "
+      "  bottom: 'conv4' "
+      "  top: 'pad5' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv5' "
+      "    type: 'conv' "
+      "    num_output: 256 "
+      "    group: 2 "
+      "    kernelsize: 3 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pad5' "
+      "  top: 'conv5' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu5' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv5' "
+      "  top: 'conv5' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pool5' "
+      "    type: 'pool' "
+      "    kernelsize: 3 "
+      "    pool: MAX "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv5' "
+      "  top: 'pool5' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc6' "
+      "    type: 'innerproduct' "
+      "    num_output: 4096 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pool5' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu6' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'drop6' "
+      "    type: 'dropout' "
+      "    dropout_ratio: 0.5 "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc7' "
+      "    type: 'innerproduct' "
+      "    num_output: 4096 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc7' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu7' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc7' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'drop7' "
+      "    type: 'dropout' "
+      "    dropout_ratio: 0.5 "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc7' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc8' "
+      "    type: 'innerproduct' "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc8' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'loss' "
+      "    type: 'softmax_loss' "
+      "  } "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  const string& expected_output_proto =
+      "name: 'CaffeNet' "
+      "layers { "
+      "  layer { "
+      "    name: 'data' "
+      "    type: 'data' "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "    batchsize: 256 "
+      "    cropsize: 227 "
+      "    mirror: true "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv1' "
+      "    type: 'conv' "
+      "    num_output: 96 "
+      "    kernelsize: 11 "
+      "    stride: 4 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu1' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pool1' "
+      "    type: 'pool' "
+      "    pool: MAX "
+      "    kernelsize: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'pool1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'norm1' "
+      "    type: 'lrn' "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool1' "
+      "  top: 'norm1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv2' "
+      "    type: 'conv' "
+      "    num_output: 256 "
+      "    group: 2 "
+      "    kernelsize: 5 "
+      "    pad: 2 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'norm1' "
+      "  top: 'conv2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu2' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv2' "
+      "  top: 'conv2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pool2' "
+      "    type: 'pool' "
+      "    pool: MAX "
+      "    kernelsize: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv2' "
+      "  top: 'pool2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'norm2' "
+      "    type: 'lrn' "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool2' "
+      "  top: 'norm2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv3' "
+      "    type: 'conv' "
+      "    num_output: 384 "
+      "    kernelsize: 3 "
+      "    pad: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'norm2' "
+      "  top: 'conv3' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu3' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv3' "
+      "  top: 'conv3' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv4' "
+      "    type: 'conv' "
+      "    num_output: 384 "
+      "    group: 2 "
+      "    kernelsize: 3 "
+      "    pad: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'conv3' "
+      "  top: 'conv4' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu4' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv4' "
+      "  top: 'conv4' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv5' "
+      "    type: 'conv' "
+      "    num_output: 256 "
+      "    group: 2 "
+      "    kernelsize: 3 "
+      "    pad: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'conv4' "
+      "  top: 'conv5' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu5' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv5' "
+      "  top: 'conv5' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pool5' "
+      "    type: 'pool' "
+      "    kernelsize: 3 "
+      "    pool: MAX "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv5' "
+      "  top: 'pool5' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc6' "
+      "    type: 'innerproduct' "
+      "    num_output: 4096 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pool5' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu6' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'drop6' "
+      "    type: 'dropout' "
+      "    dropout_ratio: 0.5 "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc7' "
+      "    type: 'innerproduct' "
+      "    num_output: 4096 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc7' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu7' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc7' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'drop7' "
+      "    type: 'dropout' "
+      "    dropout_ratio: 0.5 "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc7' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc8' "
+      "    type: 'innerproduct' "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc8' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'loss' "
+      "    type: 'softmax_loss' "
+      "  } "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunPaddingUpgradeTest(input_proto, expected_output_proto);
+}
+
+class NetUpgradeTest : public ::testing::Test {
+ protected:
+  void RunV0UpgradeTest(
+      const string& input_param_string, const string& output_param_string) {
+    // Test that UpgradeV0Net called on the NetParameter proto specified by
+    // input_param_string results in the NetParameter proto specified by
+    // output_param_string.
+    NetParameter input_param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(
+        input_param_string, &input_param));
+    NetParameter expected_output_param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(
+        output_param_string, &expected_output_param));
+    NetParameter actual_output_param;
+    UpgradeV0Net(input_param, &actual_output_param);
+    EXPECT_EQ(expected_output_param.DebugString(),
+        actual_output_param.DebugString());
+  }
+
+  void RunV1UpgradeTest(
+      const string& input_param_string, const string& output_param_string) {
+    // Test that UpgradeV0Net called on the NetParameter proto specified by
+    // input_param_string results in the NetParameter proto specified by
+    // output_param_string.
+    NetParameter input_param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(
+        input_param_string, &input_param));
+    NetParameter expected_output_param;
+    CHECK(google::protobuf::TextFormat::ParseFromString(
+        output_param_string, &expected_output_param));
+    NetParameter actual_output_param;
+    UpgradeV1Net(input_param, &actual_output_param);
+    EXPECT_EQ(expected_output_param.DebugString(),
+        actual_output_param.DebugString());
+  }
+};
+
+TEST_F(NetUpgradeTest, TestSimple) {
+  const string& v0_proto =
+      "name: 'CaffeNet' "
+      "layers { "
+      "  layer { "
+      "    name: 'data' "
+      "    type: 'data' "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "    batchsize: 256 "
+      "    cropsize: 227 "
+      "    mirror: true "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pad1' "
+      "    type: 'padding' "
+      "    pad: 2 "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'pad1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv1' "
+      "    type: 'conv' "
+      "    num_output: 96 "
+      "    kernelsize: 11 "
+      "    stride: 4 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pad1' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc8' "
+      "    type: 'innerproduct' "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'fc8' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'loss' "
+      "    type: 'softmax_loss' "
+      "  } "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  const string& expected_v1_proto =
+      "name: 'CaffeNet' "
+      "layers { "
+      "  name: 'data' "
+      "  type: DATA "
+      "  data_param { "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    batch_size: 256 "
+      "  } "
+      "  transform_param { "
+      "    crop_size: 227 "
+      "    mirror: true "
+      "    mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layers { "
+      "  name: 'conv1' "
+      "  type: CONVOLUTION "
+      "  convolution_param { "
+      "    num_output: 96 "
+      "    kernel_size: 11 "
+      "    stride: 4 "
+      "    pad: 2 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "  } "
+      "  blobs_lr: 1. "
+      "  blobs_lr: 2. "
+      "  weight_decay: 1. "
+      "  weight_decay: 0. "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  name: 'fc8' "
+      "  type: INNER_PRODUCT "
+      "  inner_product_param { "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "  } "
+      "  blobs_lr: 1. "
+      "  blobs_lr: 2. "
+      "  weight_decay: 1. "
+      "  weight_decay: 0. "
+      "  bottom: 'conv1' "
+      "  top: 'fc8' "
+      "} "
+      "layers { "
+      "  name: 'loss' "
+      "  type: SOFTMAX_LOSS "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunV0UpgradeTest(v0_proto, expected_v1_proto);
+
+  const string& expected_v2_proto =
+      "name: 'CaffeNet' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  data_param { "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    batch_size: 256 "
+      "  } "
+      "  transform_param { "
+      "    crop_size: 227 "
+      "    mirror: true "
+      "    mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'conv1' "
+      "  type: 'Convolution' "
+      "  convolution_param { "
+      "    num_output: 96 "
+      "    kernel_size: 11 "
+      "    stride: 4 "
+      "    pad: 2 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "} "
+      "layer { "
+      "  name: 'fc8' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'fc8' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunV1UpgradeTest(expected_v1_proto, expected_v2_proto);
+}
+
+// Test any layer or parameter upgrades not covered by other tests.
+TEST_F(NetUpgradeTest, TestAllParams) {
+  const string& input_proto =
+      "name: 'CaffeNet' "
+      "input: 'input_data' "
+      "input_dim: 64 "
+      "input_dim: 3 "
+      "input_dim: 32 "
+      "input_dim: 32 "
+      "layers { "
+      "  layer { "
+      "    name: 'data' "
+      "    type: 'data' "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "    batchsize: 256 "
+      "    cropsize: 227 "
+      "    mirror: true "
+      "    scale: 0.25 "
+      "    rand_skip: 73 "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'images' "
+      "    type: 'images' "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-images' "
+      "    meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "    batchsize: 256 "
+      "    cropsize: 227 "
+      "    mirror: true "
+      "    scale: 0.25 "
+      "    rand_skip: 73 "
+      "    shuffle_images: true "
+      "    new_height: 40 "
+      "    new_width: 30 "
+      "  } "
+      "  top: 'images_data' "
+      "  top: 'images_label' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'window_data' "
+      "    type: 'window_data' "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "    batchsize: 256 "
+      "    cropsize: 227 "
+      "    mirror: true "
+      "    det_fg_threshold: 0.25 "
+      "    det_bg_threshold: 0.75 "
+      "    det_fg_fraction: 0.5 "
+      "    det_context_pad: 16 "
+      "    det_crop_mode: 'square' "
+      "  } "
+      "  top: 'window_data' "
+      "  top: 'window_label' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'hdf5data' "
+      "    type: 'hdf5_data' "
+      "    source: '/my/hdf5/data' "
+      "    batchsize: 256 "
+      "  } "
+      "  top: 'hdf5data' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv1' "
+      "    type: 'conv' "
+      "    num_output: 96 "
+      "    biasterm: false "
+      "    pad: 4 "
+      "    kernelsize: 11 "
+      "    stride: 4 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 3. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pool1ave' "
+      "    type: 'pool' "
+      "    pool: AVE "
+      "    kernelsize: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'pool1ave' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pool1stoch' "
+      "    type: 'pool' "
+      "    pool: STOCHASTIC "
+      "    kernelsize: 4 "
+      "    stride: 5 "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'pool1stoch' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'concat' "
+      "    type: 'concat' "
+      "    concat_dim: 2 "
+      "  } "
+      "  bottom: 'pool1ave' "
+      "  bottom: 'pool1stoch' "
+      "  top: 'pool1concat' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'norm1' "
+      "    type: 'lrn' "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool1concat' "
+      "  top: 'norm1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc6' "
+      "    type: 'innerproduct' "
+      "    num_output: 4096 "
+      "    biasterm: false "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'norm1' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu6' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'drop6' "
+      "    type: 'dropout' "
+      "    dropout_ratio: 0.2 "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'loss' "
+      "    type: 'infogain_loss' "
+      "    source: '/my/infogain/matrix' "
+      "  } "
+      "  bottom: 'fc6' "
+      "  bottom: 'label' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'accuracy' "
+      "    type: 'accuracy' "
+      "  } "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'bnll' "
+      "    type: 'bnll' "
+      "  } "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'euclidean_loss' "
+      "    type: 'euclidean_loss' "
+      "  } "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'flatten' "
+      "    type: 'flatten' "
+      "  } "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'hdf5_output' "
+      "    type: 'hdf5_output' "
+      "    hdf5_output_param { "
+      "      file_name: '/my/hdf5/output/file' "
+      "    } "
+      "  } "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'im2col' "
+      "    type: 'im2col' "
+      "  } "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'images' "
+      "    type: 'images' "
+      "  } "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'multinomial_logistic_loss' "
+      "    type: 'multinomial_logistic_loss' "
+      "  } "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'sigmoid' "
+      "    type: 'sigmoid' "
+      "  } "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'softmax' "
+      "    type: 'softmax' "
+      "  } "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'split' "
+      "    type: 'split' "
+      "  } "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'tanh' "
+      "    type: 'tanh' "
+      "  } "
+      "} ";
+  const string& expected_output_proto =
+      "name: 'CaffeNet' "
+      "input: 'input_data' "
+      "input_dim: 64 "
+      "input_dim: 3 "
+      "input_dim: 32 "
+      "input_dim: 32 "
+      "layers { "
+      "  name: 'data' "
+      "  type: DATA "
+      "  data_param { "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    batch_size: 256 "
+      "    rand_skip: 73 "
+      "  } "
+      "  transform_param { "
+      "    crop_size: 227 "
+      "    mirror: true "
+      "    scale: 0.25 "
+      "    mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layers { "
+      "  name: 'images' "
+      "  type: IMAGE_DATA "
+      "  image_data_param { "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-images' "
+      "    batch_size: 256 "
+      "    rand_skip: 73 "
+      "    shuffle: true "
+      "    new_height: 40 "
+      "    new_width: 30 "
+      "  } "
+      "  transform_param {"
+      "    mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "    crop_size: 227 "
+      "    mirror: true "
+      "    scale: 0.25 "
+      "  } "
+      "  top: 'images_data' "
+      "  top: 'images_label' "
+      "} "
+      "layers { "
+      "  name: 'window_data' "
+      "  type: WINDOW_DATA "
+      "  window_data_param { "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    batch_size: 256 "
+      "    fg_threshold: 0.25 "
+      "    bg_threshold: 0.75 "
+      "    fg_fraction: 0.5 "
+      "    context_pad: 16 "
+      "    crop_mode: 'square' "
+      "  } "
+      "  transform_param { "
+      "    mirror: true "
+      "    crop_size: 227 "
+      "    mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "  }"
+      "  top: 'window_data' "
+      "  top: 'window_label' "
+      "} "
+      "layers { "
+      "  name: 'hdf5data' "
+      "  type: HDF5_DATA "
+      "  hdf5_data_param { "
+      "    source: '/my/hdf5/data' "
+      "    batch_size: 256 "
+      "  } "
+      "  top: 'hdf5data' "
+      "} "
+      "layers { "
+      "  name: 'conv1' "
+      "  type: CONVOLUTION "
+      "  convolution_param { "
+      "    num_output: 96 "
+      "    bias_term: false "
+      "    pad: 4 "
+      "    kernel_size: 11 "
+      "    stride: 4 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 3. "
+      "    } "
+      "  } "
+      "  blobs_lr: 1. "
+      "  blobs_lr: 2. "
+      "  weight_decay: 1. "
+      "  weight_decay: 0. "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  name: 'pool1ave' "
+      "  type: POOLING "
+      "  pooling_param { "
+      "    pool: AVE "
+      "    kernel_size: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'pool1ave' "
+      "} "
+      "layers { "
+      "  name: 'pool1stoch' "
+      "  type: POOLING "
+      "  pooling_param { "
+      "    pool: STOCHASTIC "
+      "    kernel_size: 4 "
+      "    stride: 5 "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'pool1stoch' "
+      "} "
+      "layers { "
+      "  name: 'concat' "
+      "  type: CONCAT "
+      "  concat_param { "
+      "    concat_dim: 2 "
+      "  } "
+      "  bottom: 'pool1ave' "
+      "  bottom: 'pool1stoch' "
+      "  top: 'pool1concat' "
+      "} "
+      "layers { "
+      "  name: 'norm1' "
+      "  type: LRN "
+      "  lrn_param { "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool1concat' "
+      "  top: 'norm1' "
+      "} "
+      "layers { "
+      "  name: 'fc6' "
+      "  type: INNER_PRODUCT "
+      "  inner_product_param { "
+      "    num_output: 4096 "
+      "    bias_term: false "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  blobs_lr: 1. "
+      "  blobs_lr: 2. "
+      "  weight_decay: 1. "
+      "  weight_decay: 0. "
+      "  bottom: 'norm1' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  name: 'relu6' "
+      "  type: RELU "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  name: 'drop6' "
+      "  type: DROPOUT "
+      "  dropout_param { "
+      "    dropout_ratio: 0.2 "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  name: 'loss' "
+      "  type: INFOGAIN_LOSS "
+      "  infogain_loss_param { "
+      "    source: '/my/infogain/matrix' "
+      "  } "
+      "  bottom: 'fc6' "
+      "  bottom: 'label' "
+      "} "
+      "layers { "
+      "  name: 'accuracy' "
+      "  type: ACCURACY "
+      "} "
+      "layers { "
+      "  name: 'bnll' "
+      "  type: BNLL "
+      "} "
+      "layers { "
+      "  name: 'euclidean_loss' "
+      "  type: EUCLIDEAN_LOSS "
+      "} "
+      "layers { "
+      "  name: 'flatten' "
+      "  type: FLATTEN "
+      "} "
+      "layers { "
+      "  name: 'hdf5_output' "
+      "  type: HDF5_OUTPUT "
+      "  hdf5_output_param { "
+      "    file_name: '/my/hdf5/output/file' "
+      "  } "
+      "} "
+      "layers { "
+      "  name: 'im2col' "
+      "  type: IM2COL "
+      "} "
+      "layers { "
+      "  name: 'images' "
+      "  type: IMAGE_DATA "
+      "} "
+      "layers { "
+      "  name: 'multinomial_logistic_loss' "
+      "  type: MULTINOMIAL_LOGISTIC_LOSS "
+      "} "
+      "layers { "
+      "  name: 'sigmoid' "
+      "  type: SIGMOID "
+      "} "
+      "layers { "
+      "  name: 'softmax' "
+      "  type: SOFTMAX "
+      "} "
+      "layers { "
+      "  name: 'split' "
+      "  type: SPLIT "
+      "} "
+      "layers { "
+      "  name: 'tanh' "
+      "  type: TANH "
+      "} ";
+  this->RunV0UpgradeTest(input_proto, expected_output_proto);
+}
+
+TEST_F(NetUpgradeTest, TestImageNet) {
+  const string& v0_proto =
+      "name: 'CaffeNet' "
+      "layers { "
+      "  layer { "
+      "    name: 'data' "
+      "    type: 'data' "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "    batchsize: 256 "
+      "    cropsize: 227 "
+      "    mirror: true "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv1' "
+      "    type: 'conv' "
+      "    num_output: 96 "
+      "    kernelsize: 11 "
+      "    stride: 4 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu1' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pool1' "
+      "    type: 'pool' "
+      "    pool: MAX "
+      "    kernelsize: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'pool1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'norm1' "
+      "    type: 'lrn' "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool1' "
+      "  top: 'norm1' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pad2' "
+      "    type: 'padding' "
+      "    pad: 2 "
+      "  } "
+      "  bottom: 'norm1' "
+      "  top: 'pad2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv2' "
+      "    type: 'conv' "
+      "    num_output: 256 "
+      "    group: 2 "
+      "    kernelsize: 5 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pad2' "
+      "  top: 'conv2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu2' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv2' "
+      "  top: 'conv2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pool2' "
+      "    type: 'pool' "
+      "    pool: MAX "
+      "    kernelsize: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv2' "
+      "  top: 'pool2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'norm2' "
+      "    type: 'lrn' "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool2' "
+      "  top: 'norm2' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pad3' "
+      "    type: 'padding' "
+      "    pad: 1 "
+      "  } "
+      "  bottom: 'norm2' "
+      "  top: 'pad3' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv3' "
+      "    type: 'conv' "
+      "    num_output: 384 "
+      "    kernelsize: 3 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pad3' "
+      "  top: 'conv3' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu3' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv3' "
+      "  top: 'conv3' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pad4' "
+      "    type: 'padding' "
+      "    pad: 1 "
+      "  } "
+      "  bottom: 'conv3' "
+      "  top: 'pad4' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv4' "
+      "    type: 'conv' "
+      "    num_output: 384 "
+      "    group: 2 "
+      "    kernelsize: 3 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pad4' "
+      "  top: 'conv4' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu4' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv4' "
+      "  top: 'conv4' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pad5' "
+      "    type: 'padding' "
+      "    pad: 1 "
+      "  } "
+      "  bottom: 'conv4' "
+      "  top: 'pad5' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'conv5' "
+      "    type: 'conv' "
+      "    num_output: 256 "
+      "    group: 2 "
+      "    kernelsize: 3 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pad5' "
+      "  top: 'conv5' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu5' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'conv5' "
+      "  top: 'conv5' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'pool5' "
+      "    type: 'pool' "
+      "    kernelsize: 3 "
+      "    pool: MAX "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv5' "
+      "  top: 'pool5' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc6' "
+      "    type: 'innerproduct' "
+      "    num_output: 4096 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'pool5' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu6' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'drop6' "
+      "    type: 'dropout' "
+      "    dropout_ratio: 0.5 "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc7' "
+      "    type: 'innerproduct' "
+      "    num_output: 4096 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc7' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'relu7' "
+      "    type: 'relu' "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc7' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'drop7' "
+      "    type: 'dropout' "
+      "    dropout_ratio: 0.5 "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc7' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'fc8' "
+      "    type: 'innerproduct' "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "    blobs_lr: 1. "
+      "    blobs_lr: 2. "
+      "    weight_decay: 1. "
+      "    weight_decay: 0. "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc8' "
+      "} "
+      "layers { "
+      "  layer { "
+      "    name: 'loss' "
+      "    type: 'softmax_loss' "
+      "  } "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  const string& expected_v1_proto =
+      "name: 'CaffeNet' "
+      "layers { "
+      "  name: 'data' "
+      "  type: DATA "
+      "  data_param { "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    batch_size: 256 "
+      "  } "
+      "  transform_param { "
+      "    crop_size: 227 "
+      "    mirror: true "
+      "    mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layers { "
+      "  name: 'conv1' "
+      "  type: CONVOLUTION "
+      "  convolution_param { "
+      "    num_output: 96 "
+      "    kernel_size: 11 "
+      "    stride: 4 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "  } "
+      "  blobs_lr: 1. "
+      "  blobs_lr: 2. "
+      "  weight_decay: 1. "
+      "  weight_decay: 0. "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  name: 'relu1' "
+      "  type: RELU "
+      "  bottom: 'conv1' "
+      "  top: 'conv1' "
+      "} "
+      "layers { "
+      "  name: 'pool1' "
+      "  type: POOLING "
+      "  pooling_param { "
+      "    pool: MAX "
+      "    kernel_size: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'pool1' "
+      "} "
+      "layers { "
+      "  name: 'norm1' "
+      "  type: LRN "
+      "  lrn_param { "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool1' "
+      "  top: 'norm1' "
+      "} "
+      "layers { "
+      "  name: 'conv2' "
+      "  type: CONVOLUTION "
+      "  convolution_param { "
+      "    num_output: 256 "
+      "    group: 2 "
+      "    kernel_size: 5 "
+      "    pad: 2 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  blobs_lr: 1. "
+      "  blobs_lr: 2. "
+      "  weight_decay: 1. "
+      "  weight_decay: 0. "
+      "  bottom: 'norm1' "
+      "  top: 'conv2' "
+      "} "
+      "layers { "
+      "  name: 'relu2' "
+      "  type: RELU "
+      "  bottom: 'conv2' "
+      "  top: 'conv2' "
+      "} "
+      "layers { "
+      "  name: 'pool2' "
+      "  type: POOLING "
+      "  pooling_param { "
+      "    pool: MAX "
+      "    kernel_size: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv2' "
+      "  top: 'pool2' "
+      "} "
+      "layers { "
+      "  name: 'norm2' "
+      "  type: LRN "
+      "  lrn_param { "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool2' "
+      "  top: 'norm2' "
+      "} "
+      "layers { "
+      "  name: 'conv3' "
+      "  type: CONVOLUTION "
+      "  convolution_param { "
+      "    num_output: 384 "
+      "    kernel_size: 3 "
+      "    pad: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "  } "
+      "  blobs_lr: 1. "
+      "  blobs_lr: 2. "
+      "  weight_decay: 1. "
+      "  weight_decay: 0. "
+      "  bottom: 'norm2' "
+      "  top: 'conv3' "
+      "} "
+      "layers { "
+      "  name: 'relu3' "
+      "  type: RELU "
+      "  bottom: 'conv3' "
+      "  top: 'conv3' "
+      "} "
+      "layers { "
+      "  name: 'conv4' "
+      "  type: CONVOLUTION "
+      "  convolution_param { "
+      "    num_output: 384 "
+      "    group: 2 "
+      "    kernel_size: 3 "
+      "    pad: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  blobs_lr: 1. "
+      "  blobs_lr: 2. "
+      "  weight_decay: 1. "
+      "  weight_decay: 0. "
+      "  bottom: 'conv3' "
+      "  top: 'conv4' "
+      "} "
+      "layers { "
+      "  name: 'relu4' "
+      "  type: RELU "
+      "  bottom: 'conv4' "
+      "  top: 'conv4' "
+      "} "
+      "layers { "
+      "  name: 'conv5' "
+      "  type: CONVOLUTION "
+      "  convolution_param { "
+      "    num_output: 256 "
+      "    group: 2 "
+      "    kernel_size: 3 "
+      "    pad: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  blobs_lr: 1. "
+      "  blobs_lr: 2. "
+      "  weight_decay: 1. "
+      "  weight_decay: 0. "
+      "  bottom: 'conv4' "
+      "  top: 'conv5' "
+      "} "
+      "layers { "
+      "  name: 'relu5' "
+      "  type: RELU "
+      "  bottom: 'conv5' "
+      "  top: 'conv5' "
+      "} "
+      "layers { "
+      "  name: 'pool5' "
+      "  type: POOLING "
+      "  pooling_param { "
+      "    kernel_size: 3 "
+      "    pool: MAX "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv5' "
+      "  top: 'pool5' "
+      "} "
+      "layers { "
+      "  name: 'fc6' "
+      "  type: INNER_PRODUCT "
+      "  inner_product_param { "
+      "    num_output: 4096 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  blobs_lr: 1. "
+      "  blobs_lr: 2. "
+      "  weight_decay: 1. "
+      "  weight_decay: 0. "
+      "  bottom: 'pool5' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  name: 'relu6' "
+      "  type: RELU "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  name: 'drop6' "
+      "  type: DROPOUT "
+      "  dropout_param { "
+      "    dropout_ratio: 0.5 "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layers { "
+      "  name: 'fc7' "
+      "  type: INNER_PRODUCT "
+      "  inner_product_param { "
+      "    num_output: 4096 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  blobs_lr: 1. "
+      "  blobs_lr: 2. "
+      "  weight_decay: 1. "
+      "  weight_decay: 0. "
+      "  bottom: 'fc6' "
+      "  top: 'fc7' "
+      "} "
+      "layers { "
+      "  name: 'relu7' "
+      "  type: RELU "
+      "  bottom: 'fc7' "
+      "  top: 'fc7' "
+      "} "
+      "layers { "
+      "  name: 'drop7' "
+      "  type: DROPOUT "
+      "  dropout_param { "
+      "    dropout_ratio: 0.5 "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc7' "
+      "} "
+      "layers { "
+      "  name: 'fc8' "
+      "  type: INNER_PRODUCT "
+      "  inner_product_param { "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "  } "
+      "  blobs_lr: 1. "
+      "  blobs_lr: 2. "
+      "  weight_decay: 1. "
+      "  weight_decay: 0. "
+      "  bottom: 'fc7' "
+      "  top: 'fc8' "
+      "} "
+      "layers { "
+      "  name: 'loss' "
+      "  type: SOFTMAX_LOSS "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunV0UpgradeTest(v0_proto, expected_v1_proto);
+
+  const string& expected_v2_proto =
+      "name: 'CaffeNet' "
+      "layer { "
+      "  name: 'data' "
+      "  type: 'Data' "
+      "  data_param { "
+      "    source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
+      "    batch_size: 256 "
+      "  } "
+      "  transform_param { "
+      "    crop_size: 227 "
+      "    mirror: true "
+      "    mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
+      "  } "
+      "  top: 'data' "
+      "  top: 'label' "
+      "} "
+      "layer { "
+      "  name: 'conv1' "
+      "  type: 'Convolution' "
+      "  convolution_param { "
+      "    num_output: 96 "
+      "    kernel_size: 11 "
+      "    stride: 4 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'data' "
+      "  top: 'conv1' "
+      "} "
+      "layer { "
+      "  name: 'relu1' "
+      "  type: 'ReLU' "
+      "  bottom: 'conv1' "
+      "  top: 'conv1' "
+      "} "
+      "layer { "
+      "  name: 'pool1' "
+      "  type: 'Pooling' "
+      "  pooling_param { "
+      "    pool: MAX "
+      "    kernel_size: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv1' "
+      "  top: 'pool1' "
+      "} "
+      "layer { "
+      "  name: 'norm1' "
+      "  type: 'LRN' "
+      "  lrn_param { "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool1' "
+      "  top: 'norm1' "
+      "} "
+      "layer { "
+      "  name: 'conv2' "
+      "  type: 'Convolution' "
+      "  convolution_param { "
+      "    num_output: 256 "
+      "    group: 2 "
+      "    kernel_size: 5 "
+      "    pad: 2 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'norm1' "
+      "  top: 'conv2' "
+      "} "
+      "layer { "
+      "  name: 'relu2' "
+      "  type: 'ReLU' "
+      "  bottom: 'conv2' "
+      "  top: 'conv2' "
+      "} "
+      "layer { "
+      "  name: 'pool2' "
+      "  type: 'Pooling' "
+      "  pooling_param { "
+      "    pool: MAX "
+      "    kernel_size: 3 "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv2' "
+      "  top: 'pool2' "
+      "} "
+      "layer { "
+      "  name: 'norm2' "
+      "  type: 'LRN' "
+      "  lrn_param { "
+      "    local_size: 5 "
+      "    alpha: 0.0001 "
+      "    beta: 0.75 "
+      "  } "
+      "  bottom: 'pool2' "
+      "  top: 'norm2' "
+      "} "
+      "layer { "
+      "  name: 'conv3' "
+      "  type: 'Convolution' "
+      "  convolution_param { "
+      "    num_output: 384 "
+      "    kernel_size: 3 "
+      "    pad: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'norm2' "
+      "  top: 'conv3' "
+      "} "
+      "layer { "
+      "  name: 'relu3' "
+      "  type: 'ReLU' "
+      "  bottom: 'conv3' "
+      "  top: 'conv3' "
+      "} "
+      "layer { "
+      "  name: 'conv4' "
+      "  type: 'Convolution' "
+      "  convolution_param { "
+      "    num_output: 384 "
+      "    group: 2 "
+      "    kernel_size: 3 "
+      "    pad: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'conv3' "
+      "  top: 'conv4' "
+      "} "
+      "layer { "
+      "  name: 'relu4' "
+      "  type: 'ReLU' "
+      "  bottom: 'conv4' "
+      "  top: 'conv4' "
+      "} "
+      "layer { "
+      "  name: 'conv5' "
+      "  type: 'Convolution' "
+      "  convolution_param { "
+      "    num_output: 256 "
+      "    group: 2 "
+      "    kernel_size: 3 "
+      "    pad: 1 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'conv4' "
+      "  top: 'conv5' "
+      "} "
+      "layer { "
+      "  name: 'relu5' "
+      "  type: 'ReLU' "
+      "  bottom: 'conv5' "
+      "  top: 'conv5' "
+      "} "
+      "layer { "
+      "  name: 'pool5' "
+      "  type: 'Pooling' "
+      "  pooling_param { "
+      "    kernel_size: 3 "
+      "    pool: MAX "
+      "    stride: 2 "
+      "  } "
+      "  bottom: 'conv5' "
+      "  top: 'pool5' "
+      "} "
+      "layer { "
+      "  name: 'fc6' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 4096 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'pool5' "
+      "  top: 'fc6' "
+      "} "
+      "layer { "
+      "  name: 'relu6' "
+      "  type: 'ReLU' "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layer { "
+      "  name: 'drop6' "
+      "  type: 'Dropout' "
+      "  dropout_param { "
+      "    dropout_ratio: 0.5 "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc6' "
+      "} "
+      "layer { "
+      "  name: 'fc7' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 4096 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.005 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 1. "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'fc6' "
+      "  top: 'fc7' "
+      "} "
+      "layer { "
+      "  name: 'relu7' "
+      "  type: 'ReLU' "
+      "  bottom: 'fc7' "
+      "  top: 'fc7' "
+      "} "
+      "layer { "
+      "  name: 'drop7' "
+      "  type: 'Dropout' "
+      "  dropout_param { "
+      "    dropout_ratio: 0.5 "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc7' "
+      "} "
+      "layer { "
+      "  name: 'fc8' "
+      "  type: 'InnerProduct' "
+      "  inner_product_param { "
+      "    num_output: 1000 "
+      "    weight_filler { "
+      "      type: 'gaussian' "
+      "      std: 0.01 "
+      "    } "
+      "    bias_filler { "
+      "      type: 'constant' "
+      "      value: 0 "
+      "    } "
+      "  } "
+      "  param { "
+      "    lr_mult: 1 "
+      "    decay_mult: 1 "
+      "  } "
+      "  param { "
+      "    lr_mult: 2 "
+      "    decay_mult: 0 "
+      "  } "
+      "  bottom: 'fc7' "
+      "  top: 'fc8' "
+      "} "
+      "layer { "
+      "  name: 'loss' "
+      "  type: 'SoftmaxWithLoss' "
+      "  bottom: 'fc8' "
+      "  bottom: 'label' "
+      "} ";
+  this->RunV1UpgradeTest(expected_v1_proto, expected_v2_proto);
+}  // NOLINT(readability/fn_size)
+
+TEST_F(NetUpgradeTest, TestUpgradeV1LayerType) {
+  LayerParameter layer_param;
+  shared_ptr<Layer<float> > layer;
+  for (int i = 0; i < V1LayerParameter_LayerType_LayerType_ARRAYSIZE; ++i) {
+    ASSERT_TRUE(V1LayerParameter_LayerType_IsValid(i));
+    V1LayerParameter_LayerType v1_type = V1LayerParameter_LayerType(i);
+    string v2_layer_type(UpgradeV1LayerType(v1_type));
+    if (v2_layer_type == "") {
+      EXPECT_EQ(V1LayerParameter_LayerType_NONE, v1_type);
+      continue;  // Empty string isn't actually a valid layer type.
+    }
+    layer_param.set_type(v2_layer_type);
+    layer = LayerRegistry<float>::CreateLayer(layer_param);
+    EXPECT_EQ(v2_layer_type, layer->type());
+  }
+}
+
+}  // NOLINT(readability/fn_size)  // namespace caffe
diff --git a/src/caffe/test/test_util_blas.cpp b/src/caffe/test/test_util_blas.cpp
new file mode 100644
index 0000000..8770f30
--- /dev/null
+++ b/src/caffe/test/test_util_blas.cpp
@@ -0,0 +1,134 @@
+#ifndef CPU_ONLY  // CPU-GPU test
+
+#include <cstring>
+
+#include "gtest/gtest.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/util/device_alternate.hpp"
+#include "caffe/util/math_functions.hpp"
+
+#include "caffe/test/test_caffe_main.hpp"
+
+namespace caffe {
+
+extern cudaDeviceProp CAFFE_TEST_CUDA_PROP;
+
+template <typename TypeParam>
+class GemmTest : public ::testing::Test {};
+
+TYPED_TEST_CASE(GemmTest, TestDtypes);
+
+TYPED_TEST(GemmTest, TestGemmCPUGPU) {
+  Blob<TypeParam> A(1, 1, 2, 3);
+  Blob<TypeParam> B(1, 1, 3, 4);
+  Blob<TypeParam> C(1, 1, 2, 4);
+  TypeParam data[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12};
+  TypeParam A_reshape_data[6] = {1, 4, 2, 5, 3, 6};
+  TypeParam B_reshape_data[12] = {1, 5, 9, 2, 6, 10, 3, 7, 11, 4, 8, 12};
+  TypeParam result[8] = {38, 44, 50, 56, 83, 98, 113, 128};
+  caffe_copy(6, data, A.mutable_cpu_data());
+  caffe_copy(12, data, B.mutable_cpu_data());
+
+  if (sizeof(TypeParam) == 4 || CAFFE_TEST_CUDA_PROP.major >= 2) {
+    // [1, 2, 3; 4 5 6] * [1, 2, 3, 4; 5, 6, 7, 8; 9, 10, 11, 12];
+    caffe_cpu_gemm<TypeParam>(CblasNoTrans, CblasNoTrans, 2, 4, 3, 1.,
+        A.cpu_data(), B.cpu_data(), 0., C.mutable_cpu_data());
+    for (int i = 0; i < 8; ++i) {
+      EXPECT_EQ(C.cpu_data()[i], result[i]);
+    }
+    caffe_gpu_gemm<TypeParam>(CblasNoTrans, CblasNoTrans, 2, 4, 3, 1.,
+        A.gpu_data(), B.gpu_data(), 0., C.mutable_gpu_data());
+    for (int i = 0; i < 8; ++i) {
+      EXPECT_EQ(C.cpu_data()[i], result[i]);
+    }
+
+    // Test when we have a transposed A
+    A.Reshape(1, 1, 3, 2);
+    caffe_copy(6, A_reshape_data, A.mutable_cpu_data());
+    caffe_cpu_gemm<TypeParam>(CblasTrans, CblasNoTrans, 2, 4, 3, 1.,
+        A.cpu_data(), B.cpu_data(), 0., C.mutable_cpu_data());
+    for (int i = 0; i < 8; ++i) {
+      EXPECT_EQ(C.cpu_data()[i], result[i]);
+    }
+    caffe_gpu_gemm<TypeParam>(CblasTrans, CblasNoTrans, 2, 4, 3, 1.,
+        A.gpu_data(), B.gpu_data(), 0., C.mutable_gpu_data());
+    for (int i = 0; i < 8; ++i) {
+      EXPECT_EQ(C.cpu_data()[i], result[i]);
+    }
+
+    // Test when we have a transposed A and a transposed B too
+    B.Reshape(1, 1, 4, 3);
+    caffe_copy(12, B_reshape_data, B.mutable_cpu_data());
+    caffe_cpu_gemm<TypeParam>(CblasTrans, CblasTrans, 2, 4, 3, 1.,
+        A.cpu_data(), B.cpu_data(), 0., C.mutable_cpu_data());
+    for (int i = 0; i < 8; ++i) {
+      EXPECT_EQ(C.cpu_data()[i], result[i]);
+    }
+    caffe_gpu_gemm<TypeParam>(CblasTrans, CblasTrans, 2, 4, 3, 1.,
+        A.gpu_data(), B.gpu_data(), 0., C.mutable_gpu_data());
+    for (int i = 0; i < 8; ++i) {
+      EXPECT_EQ(C.cpu_data()[i], result[i]);
+    }
+
+    // Test when we have a transposed B
+    A.Reshape(1, 1, 2, 3);
+    caffe_copy(6, data, A.mutable_cpu_data());
+    caffe_cpu_gemm<TypeParam>(CblasNoTrans, CblasTrans, 2, 4, 3, 1.,
+        A.cpu_data(), B.cpu_data(), 0., C.mutable_cpu_data());
+    for (int i = 0; i < 8; ++i) {
+      EXPECT_EQ(C.cpu_data()[i], result[i]);
+    }
+    caffe_gpu_gemm<TypeParam>(CblasNoTrans, CblasTrans, 2, 4, 3, 1.,
+        A.gpu_data(), B.gpu_data(), 0., C.mutable_gpu_data());
+    for (int i = 0; i < 8; ++i) {
+      EXPECT_EQ(C.cpu_data()[i], result[i]);
+    }
+  } else {
+    LOG(ERROR) << "Skipping test due to old architecture.";
+  }
+}
+
+
+TYPED_TEST(GemmTest, TestGemvCPUGPU) {
+  Blob<TypeParam> A(1, 1, 2, 3);
+  Blob<TypeParam> x(1, 1, 1, 3);
+  Blob<TypeParam> y(1, 1, 1, 2);
+  TypeParam data[6] = {1, 2, 3, 4, 5, 6};
+  TypeParam result_2[2] = {14, 32};
+  TypeParam result_3[3] = {9, 12, 15};
+  caffe_copy(6, data, A.mutable_cpu_data());
+  caffe_copy(3, data, x.mutable_cpu_data());
+
+  if (sizeof(TypeParam) == 4 || CAFFE_TEST_CUDA_PROP.major >= 2) {
+    caffe_cpu_gemv<TypeParam>(CblasNoTrans, 2, 3, 1., A.cpu_data(),
+        x.cpu_data(), 0., y.mutable_cpu_data());
+    for (int i = 0; i < 2; ++i) {
+      EXPECT_EQ(y.cpu_data()[i], result_2[i]);
+    }
+    caffe_gpu_gemv<TypeParam>(CblasNoTrans, 2, 3, 1., A.gpu_data(),
+        x.gpu_data(), 0., y.mutable_gpu_data());
+    for (int i = 0; i < 2; ++i) {
+      EXPECT_EQ(y.cpu_data()[i], result_2[i]);
+    }
+
+    // Test transpose case
+    caffe_copy(2, data, y.mutable_cpu_data());
+    caffe_cpu_gemv<TypeParam>(CblasTrans, 2, 3, 1., A.cpu_data(),
+        y.cpu_data(), 0., x.mutable_cpu_data());
+    for (int i = 0; i < 3; ++i) {
+      EXPECT_EQ(x.cpu_data()[i], result_3[i]);
+    }
+    caffe_gpu_gemv<TypeParam>(CblasTrans, 2, 3, 1., A.gpu_data(),
+        y.gpu_data(), 0., x.mutable_gpu_data());
+    for (int i = 0; i < 3; ++i) {
+      EXPECT_EQ(x.cpu_data()[i], result_3[i]);
+    }
+  } else {
+    LOG(ERROR) << "Skipping test due to old architecture.";
+  }
+}
+
+}  // namespace caffe
+
+#endif  // CPU_ONLY
diff --git a/src/caffe/util/benchmark.cpp b/src/caffe/util/benchmark.cpp
new file mode 100644
index 0000000..1d269c3
--- /dev/null
+++ b/src/caffe/util/benchmark.cpp
@@ -0,0 +1,168 @@
+#include <boost/date_time/posix_time/posix_time.hpp>
+
+#include "caffe/common.hpp"
+#include "caffe/util/benchmark.hpp"
+
+namespace caffe {
+
+Timer::Timer()
+    : initted_(false),
+      running_(false),
+      has_run_at_least_once_(false) {
+  Init();
+}
+
+Timer::~Timer() {
+  if (Caffe::mode() == Caffe::GPU) {
+#ifndef CPU_ONLY
+    CUDA_CHECK(cudaEventDestroy(start_gpu_));
+    CUDA_CHECK(cudaEventDestroy(stop_gpu_));
+#else
+    NO_GPU;
+#endif
+  }
+}
+
+void Timer::Start() {
+  if (!running()) {
+    if (Caffe::mode() == Caffe::GPU) {
+#ifndef CPU_ONLY
+      CUDA_CHECK(cudaEventRecord(start_gpu_, 0));
+#else
+      NO_GPU;
+#endif
+    } else {
+      start_cpu_ = boost::posix_time::microsec_clock::local_time();
+    }
+    running_ = true;
+    has_run_at_least_once_ = true;
+  }
+}
+
+void Timer::Stop() {
+  if (running()) {
+    if (Caffe::mode() == Caffe::GPU) {
+#ifndef CPU_ONLY
+      CUDA_CHECK(cudaEventRecord(stop_gpu_, 0));
+      CUDA_CHECK(cudaEventSynchronize(stop_gpu_));
+#else
+      NO_GPU;
+#endif
+    } else {
+      stop_cpu_ = boost::posix_time::microsec_clock::local_time();
+    }
+    running_ = false;
+  }
+}
+
+
+float Timer::MicroSeconds() {
+  if (!has_run_at_least_once()) {
+    LOG(WARNING) << "Timer has never been run before reading time.";
+    return 0;
+  }
+  if (running()) {
+    Stop();
+  }
+  if (Caffe::mode() == Caffe::GPU) {
+#ifndef CPU_ONLY
+    CUDA_CHECK(cudaEventElapsedTime(&elapsed_milliseconds_, start_gpu_,
+                                    stop_gpu_));
+    // Cuda only measure milliseconds
+    elapsed_microseconds_ = elapsed_milliseconds_ * 1000;
+#else
+      NO_GPU;
+#endif
+  } else {
+    elapsed_microseconds_ = (stop_cpu_ - start_cpu_).total_microseconds();
+  }
+  return elapsed_microseconds_;
+}
+
+float Timer::MilliSeconds() {
+  if (!has_run_at_least_once()) {
+    LOG(WARNING) << "Timer has never been run before reading time.";
+    return 0;
+  }
+  if (running()) {
+    Stop();
+  }
+  if (Caffe::mode() == Caffe::GPU) {
+#ifndef CPU_ONLY
+    CUDA_CHECK(cudaEventElapsedTime(&elapsed_milliseconds_, start_gpu_,
+                                    stop_gpu_));
+#else
+      NO_GPU;
+#endif
+  } else {
+    elapsed_milliseconds_ = (stop_cpu_ - start_cpu_).total_milliseconds();
+  }
+  return elapsed_milliseconds_;
+}
+
+float Timer::Seconds() {
+  return MilliSeconds() / 1000.;
+}
+
+void Timer::Init() {
+  if (!initted()) {
+    if (Caffe::mode() == Caffe::GPU) {
+#ifndef CPU_ONLY
+      CUDA_CHECK(cudaEventCreate(&start_gpu_));
+      CUDA_CHECK(cudaEventCreate(&stop_gpu_));
+#else
+      NO_GPU;
+#endif
+    }
+    initted_ = true;
+  }
+}
+
+CPUTimer::CPUTimer() {
+  this->initted_ = true;
+  this->running_ = false;
+  this->has_run_at_least_once_ = false;
+}
+
+void CPUTimer::Start() {
+  if (!running()) {
+    this->start_cpu_ = boost::posix_time::microsec_clock::local_time();
+    this->running_ = true;
+    this->has_run_at_least_once_ = true;
+  }
+}
+
+void CPUTimer::Stop() {
+  if (running()) {
+    this->stop_cpu_ = boost::posix_time::microsec_clock::local_time();
+    this->running_ = false;
+  }
+}
+
+float CPUTimer::MilliSeconds() {
+  if (!has_run_at_least_once()) {
+    LOG(WARNING) << "Timer has never been run before reading time.";
+    return 0;
+  }
+  if (running()) {
+    Stop();
+  }
+  this->elapsed_milliseconds_ = (this->stop_cpu_ -
+                                this->start_cpu_).total_milliseconds();
+  return this->elapsed_milliseconds_;
+}
+
+float CPUTimer::MicroSeconds() {
+  if (!has_run_at_least_once()) {
+    LOG(WARNING) << "Timer has never been run before reading time.";
+    return 0;
+  }
+  if (running()) {
+    Stop();
+  }
+  this->elapsed_microseconds_ = (this->stop_cpu_ -
+                                this->start_cpu_).total_microseconds();
+  return this->elapsed_microseconds_;
+}
+
+}  // namespace caffe
diff --git a/src/caffe/util/cudnn.cpp b/src/caffe/util/cudnn.cpp
new file mode 100644
index 0000000..1772f00
--- /dev/null
+++ b/src/caffe/util/cudnn.cpp
@@ -0,0 +1,23 @@
+#ifdef USE_CUDNN
+#include "caffe/util/cudnn.hpp"
+
+namespace caffe {
+namespace cudnn {
+
+float dataType<float>::oneval = 1.0;
+float dataType<float>::zeroval = 0.0;
+const void* dataType<float>::one =
+    static_cast<void *>(&dataType<float>::oneval);
+const void* dataType<float>::zero =
+    static_cast<void *>(&dataType<float>::zeroval);
+
+double dataType<double>::oneval = 1.0;
+double dataType<double>::zeroval = 0.0;
+const void* dataType<double>::one =
+    static_cast<void *>(&dataType<double>::oneval);
+const void* dataType<double>::zero =
+    static_cast<void *>(&dataType<double>::zeroval);
+
+}  // namespace cudnn
+}  // namespace caffe
+#endif
diff --git a/src/caffe/util/db.cpp b/src/caffe/util/db.cpp
new file mode 100644
index 0000000..f55420e
--- /dev/null
+++ b/src/caffe/util/db.cpp
@@ -0,0 +1,31 @@
+#include "caffe/util/db.hpp"
+#include "caffe/util/db_leveldb.hpp"
+#include "caffe/util/db_lmdb.hpp"
+
+#include <string>
+
+namespace caffe { namespace db {
+
+DB* GetDB(DataParameter::DB backend) {
+  switch (backend) {
+  case DataParameter_DB_LEVELDB:
+    return new LevelDB();
+  case DataParameter_DB_LMDB:
+    return new LMDB();
+  default:
+    LOG(FATAL) << "Unknown database backend";
+  }
+}
+
+DB* GetDB(const string& backend) {
+  if (backend == "leveldb") {
+    return new LevelDB();
+  } else if (backend == "lmdb") {
+    return new LMDB();
+  } else {
+    LOG(FATAL) << "Unknown database backend";
+  }
+}
+
+}  // namespace db
+}  // namespace caffe
diff --git a/src/caffe/util/db_leveldb.cpp b/src/caffe/util/db_leveldb.cpp
new file mode 100644
index 0000000..06c4662
--- /dev/null
+++ b/src/caffe/util/db_leveldb.cpp
@@ -0,0 +1,21 @@
+#include "caffe/util/db_leveldb.hpp"
+
+#include <string>
+
+namespace caffe { namespace db {
+
+void LevelDB::Open(const string& source, Mode mode) {
+  leveldb::Options options;
+  options.block_size = 65536;
+  options.write_buffer_size = 268435456;
+  options.max_open_files = 100;
+  options.error_if_exists = mode == NEW;
+  options.create_if_missing = mode != READ;
+  leveldb::Status status = leveldb::DB::Open(options, source, &db_);
+  CHECK(status.ok()) << "Failed to open leveldb " << source
+                     << std::endl << status.ToString();
+  LOG(INFO) << "Opened leveldb " << source;
+}
+
+}  // namespace db
+}  // namespace caffe
diff --git a/src/caffe/util/db_lmdb.cpp b/src/caffe/util/db_lmdb.cpp
new file mode 100644
index 0000000..a054b79
--- /dev/null
+++ b/src/caffe/util/db_lmdb.cpp
@@ -0,0 +1,51 @@
+#include "caffe/util/db_lmdb.hpp"
+
+#include <sys/stat.h>
+
+#include <string>
+
+namespace caffe { namespace db {
+
+const size_t LMDB_MAP_SIZE = 1099511627776;  // 1 TB
+
+void LMDB::Open(const string& source, Mode mode) {
+  MDB_CHECK(mdb_env_create(&mdb_env_));
+  MDB_CHECK(mdb_env_set_mapsize(mdb_env_, LMDB_MAP_SIZE));
+  if (mode == NEW) {
+    CHECK_EQ(mkdir(source.c_str(), 0744), 0) << "mkdir " << source << "failed";
+  }
+  int flags = 0;
+  if (mode == READ) {
+    flags = MDB_RDONLY | MDB_NOTLS;
+  }
+  MDB_CHECK(mdb_env_open(mdb_env_, source.c_str(), flags, 0664));
+  LOG(INFO) << "Opened lmdb " << source;
+}
+
+LMDBCursor* LMDB::NewCursor() {
+  MDB_txn* mdb_txn;
+  MDB_cursor* mdb_cursor;
+  MDB_CHECK(mdb_txn_begin(mdb_env_, NULL, MDB_RDONLY, &mdb_txn));
+  MDB_CHECK(mdb_dbi_open(mdb_txn, NULL, 0, &mdb_dbi_));
+  MDB_CHECK(mdb_cursor_open(mdb_txn, mdb_dbi_, &mdb_cursor));
+  return new LMDBCursor(mdb_txn, mdb_cursor);
+}
+
+LMDBTransaction* LMDB::NewTransaction() {
+  MDB_txn* mdb_txn;
+  MDB_CHECK(mdb_txn_begin(mdb_env_, NULL, 0, &mdb_txn));
+  MDB_CHECK(mdb_dbi_open(mdb_txn, NULL, 0, &mdb_dbi_));
+  return new LMDBTransaction(&mdb_dbi_, mdb_txn);
+}
+
+void LMDBTransaction::Put(const string& key, const string& value) {
+  MDB_val mdb_key, mdb_value;
+  mdb_key.mv_data = const_cast<char*>(key.data());
+  mdb_key.mv_size = key.size();
+  mdb_value.mv_data = const_cast<char*>(value.data());
+  mdb_value.mv_size = value.size();
+  MDB_CHECK(mdb_put(mdb_txn_, *mdb_dbi_, &mdb_key, &mdb_value, 0));
+}
+
+}  // namespace db
+}  // namespace caffe
diff --git a/src/caffe/util/im2col.cpp b/src/caffe/util/im2col.cpp
new file mode 100644
index 0000000..c48f31f
--- /dev/null
+++ b/src/caffe/util/im2col.cpp
@@ -0,0 +1,83 @@
+#include <cmath>
+#include <cstdlib>
+#include <cstring>
+
+#include "caffe/util/im2col.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void im2col_cpu(const Dtype* data_im, const int channels,
+    const int height, const int width, const int kernel_h, const int kernel_w,
+    const int pad_h, const int pad_w,
+    const int stride_h, const int stride_w,
+    Dtype* data_col) {
+  int height_col = (height + 2 * pad_h - kernel_h) / stride_h + 1;
+  int width_col = (width + 2 * pad_w - kernel_w) / stride_w + 1;
+  int channels_col = channels * kernel_h * kernel_w;
+  for (int c = 0; c < channels_col; ++c) {
+    int w_offset = c % kernel_w;
+    int h_offset = (c / kernel_w) % kernel_h;
+    int c_im = c / kernel_h / kernel_w;
+    for (int h = 0; h < height_col; ++h) {
+      for (int w = 0; w < width_col; ++w) {
+        int h_pad = h * stride_h - pad_h + h_offset;
+        int w_pad = w * stride_w - pad_w + w_offset;
+        if (h_pad >= 0 && h_pad < height && w_pad >= 0 && w_pad < width)
+          data_col[(c * height_col + h) * width_col + w] =
+            data_im[(c_im * height + h_pad) * width + w_pad];
+        else
+          data_col[(c * height_col + h) * width_col + w] = 0;
+      }
+    }
+  }
+}
+
+// Explicit instantiation
+template void im2col_cpu<float>(const float* data_im, const int channels,
+    const int height, const int width, const int kernel_h, const int kernel_w,
+    const int pad_h, const int pad_w, const int stride_h,
+    const int stride_w, float* data_col);
+template void im2col_cpu<double>(const double* data_im, const int channels,
+    const int height, const int width, const int kernel_h, const int kernel_w,
+    const int pad_h, const int pad_w, const int stride_h,
+    const int stride_w, double* data_col);
+
+template <typename Dtype>
+void col2im_cpu(const Dtype* data_col, const int channels,
+    const int height, const int width, const int patch_h, const int patch_w,
+    const int pad_h, const int pad_w,
+    const int stride_h, const int stride_w,
+    Dtype* data_im) {
+  caffe_set(height * width * channels, Dtype(0), data_im);
+  int height_col = (height + 2 * pad_h - patch_h) / stride_h + 1;
+  int width_col = (width + 2 * pad_w - patch_w) / stride_w + 1;
+  int channels_col = channels * patch_h * patch_w;
+  for (int c = 0; c < channels_col; ++c) {
+    int w_offset = c % patch_w;
+    int h_offset = (c / patch_w) % patch_h;
+    int c_im = c / patch_h / patch_w;
+    for (int h = 0; h < height_col; ++h) {
+      for (int w = 0; w < width_col; ++w) {
+        int h_pad = h * stride_h - pad_h + h_offset;
+        int w_pad = w * stride_w - pad_w + w_offset;
+        if (h_pad >= 0 && h_pad < height && w_pad >= 0 && w_pad < width)
+          data_im[(c_im * height + h_pad) * width + w_pad] +=
+              data_col[(c * height_col + h) * width_col + w];
+      }
+    }
+  }
+}
+
+// Explicit instantiation
+template void col2im_cpu<float>(const float* data_col, const int channels,
+    const int height, const int width, const int patch_h, const int patch_w,
+    const int pad_h, const int pad_w, const int stride_h,
+    const int stride_w, float* data_im);
+template void col2im_cpu<double>(const double* data_col, const int channels,
+    const int height, const int width, const int patch_h, const int patch_w,
+    const int pad_h, const int pad_w, const int stride_h,
+    const int stride_w, double* data_im);
+
+}  // namespace caffe
diff --git a/src/caffe/util/im2col.cu b/src/caffe/util/im2col.cu
new file mode 100644
index 0000000..c90f93e
--- /dev/null
+++ b/src/caffe/util/im2col.cu
@@ -0,0 +1,144 @@
+#include <algorithm>
+#include <cmath>
+#include <cstdlib>
+#include <cstring>
+
+#include "caffe/common.hpp"
+#include "caffe/util/im2col.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+__global__ void im2col_gpu_kernel(const int n, const Dtype* data_im,
+    const int height, const int width, const int kernel_h, const int kernel_w,
+    const int pad_h, const int pad_w,
+    const int stride_h, const int stride_w,
+    const int height_col, const int width_col,
+    Dtype* data_col) {
+  CUDA_KERNEL_LOOP(index, n) {
+    int w_out = index % width_col;
+    int h_index = index / width_col;
+    int h_out = h_index % height_col;
+    int channel_in = h_index / height_col;
+    int channel_out = channel_in * kernel_h * kernel_w;
+    int h_in = h_out * stride_h - pad_h;
+    int w_in = w_out * stride_w - pad_w;
+    Dtype* data_col_ptr = data_col;
+    data_col_ptr += (channel_out * height_col + h_out) * width_col + w_out;
+    const Dtype* data_im_ptr = data_im;
+    data_im_ptr += (channel_in * height + h_in) * width + w_in;
+    for (int i = 0; i < kernel_h; ++i) {
+      for (int j = 0; j < kernel_w; ++j) {
+        int h = h_in + i;
+        int w = w_in + j;
+        *data_col_ptr = (h >= 0 && w >= 0 && h < height && w < width) ?
+            data_im_ptr[i * width + j] : 0;
+        data_col_ptr += height_col * width_col;
+      }
+    }
+  }
+}
+
+template <typename Dtype>
+void im2col_gpu(const Dtype* data_im, const int channels,
+    const int height, const int width, const int kernel_h, const int kernel_w,
+    const int pad_h, const int pad_w,
+    const int stride_h, const int stride_w,
+    Dtype* data_col) {
+  // We are going to launch channels * height_col * width_col kernels, each
+  // kernel responsible for copying a single-channel grid.
+  int height_col = (height + 2 * pad_h - kernel_h) / stride_h + 1;
+  int width_col = (width + 2 * pad_w - kernel_w) / stride_w + 1;
+  int num_kernels = channels * height_col * width_col;
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  im2col_gpu_kernel<Dtype><<<CAFFE_GET_BLOCKS(num_kernels),
+                             CAFFE_CUDA_NUM_THREADS>>>(
+      num_kernels, data_im, height, width, kernel_h, kernel_w, pad_h,
+      pad_w, stride_h, stride_w, height_col,
+      width_col, data_col);
+  CUDA_POST_KERNEL_CHECK;
+}
+
+
+// Explicit instantiation
+template void im2col_gpu<float>(const float* data_im, const int channels,
+    const int height, const int width, const int kernel_h, const int kernel_w,
+    const int pad_h, const int pad_w, const int stride_h, const int stride_w,
+    float* data_col);
+template void im2col_gpu<double>(const double* data_im, const int channels,
+    const int height, const int width, const int kernel_h, const int kernel_w,
+    const int pad_h, const int pad_w, const int stride_h, const int stride_w,
+    double* data_col);
+
+template <typename Dtype>
+__global__ void col2im_gpu_kernel(const int n, const Dtype* data_col,
+    const int height, const int width, const int channels,
+    const int patch_h, const int patch_w,
+    const int pad_h, const int pad_w,
+    const int stride_h, const int stride_w,
+    const int height_col, const int width_col,
+    Dtype* data_im) {
+  CUDA_KERNEL_LOOP(index, n) {
+    Dtype val = 0;
+    int w = index % width + pad_w;
+    int h = (index / width) % height + pad_h;
+    int c = index / (width * height);
+    // compute the start and end of the output
+    int w_col_start = (w < patch_w) ? 0 : (w - patch_w) / stride_w + 1;
+    int w_col_end = min(w / stride_w + 1, width_col);
+    int h_col_start = (h < patch_h) ? 0 : (h - patch_h) / stride_h + 1;
+    int h_col_end = min(h / stride_h + 1, height_col);
+    /*
+    for (int h_col = h_col_start; h_col < h_col_end; ++h_col) {
+      for (int w_col = w_col_start; w_col < w_col_end; ++w_col) {
+        // the col location: [c * width * height + h_out, w_out]
+        int c_col = c * patch_h * patch_w + (h - h_col * stride_h) * ksize
+            + (w - w_col * stride_w);
+        val += data_col[(c_col * height_col + h_col) * width_col + w_col];
+      }
+    }
+    */
+    // equivalent implementation
+    int offset =
+        (c * patch_h * patch_w + h * patch_w + w) * height_col * width_col;
+    int coeff_h_col = (1 - stride_h * patch_w * height_col) * width_col;
+    int coeff_w_col = (1 - stride_w * height_col * width_col);
+    for (int h_col = h_col_start; h_col < h_col_end; ++h_col) {
+      for (int w_col = w_col_start; w_col < w_col_end; ++w_col) {
+        val += data_col[offset + h_col * coeff_h_col + w_col * coeff_w_col];
+      }
+    }
+    data_im[index] = val;
+  }
+}
+
+template <typename Dtype>
+void col2im_gpu(const Dtype* data_col, const int channels,
+    const int height, const int width, const int patch_h, const int patch_w,
+    const int pad_h, const int pad_w, const int stride_h,
+    const int stride_w, Dtype* data_im) {
+  int height_col = (height + 2 * pad_h - patch_h) / stride_h + 1;
+  int width_col = (width + 2 * pad_w - patch_w) / stride_w + 1;
+  int num_kernels = channels * height * width;
+  // To avoid involving atomic operations, we will launch one kernel per
+  // bottom dimension, and then in the kernel add up the top dimensions.
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  col2im_gpu_kernel<Dtype><<<CAFFE_GET_BLOCKS(num_kernels),
+                             CAFFE_CUDA_NUM_THREADS>>>(
+      num_kernels, data_col, height, width, channels, patch_h, patch_w,
+      pad_h, pad_w, stride_h, stride_w,
+      height_col, width_col, data_im);
+  CUDA_POST_KERNEL_CHECK;
+}
+
+// Explicit instantiation
+template void col2im_gpu<float>(const float* data_col, const int channels,
+    const int height, const int width, const int patch_h, const int patch_w,
+    const int pad_h, const int pad_w, const int stride_h,
+    const int stride_w, float* data_im);
+template void col2im_gpu<double>(const double* data_col, const int channels,
+    const int height, const int width, const int patch_h, const int patch_w,
+    const int pad_h, const int pad_w, const int stride_h,
+    const int stride_w, double* data_im);
+
+}  // namespace caffe
diff --git a/src/caffe/util/insert_splits.cpp b/src/caffe/util/insert_splits.cpp
new file mode 100644
index 0000000..416f80a
--- /dev/null
+++ b/src/caffe/util/insert_splits.cpp
@@ -0,0 +1,144 @@
+#include <algorithm>
+#include <map>
+#include <sstream>
+#include <string>
+#include <utility>
+
+#include "caffe/common.hpp"
+#include "caffe/util/insert_splits.hpp"
+
+namespace caffe {
+
+void InsertSplits(const NetParameter& param, NetParameter* param_split) {
+  // Initialize by copying from the input NetParameter.
+  param_split->CopyFrom(param);
+  param_split->clear_layer();
+  map<string, pair<int, int> > blob_name_to_last_top_idx;
+  map<pair<int, int>, pair<int, int> > bottom_idx_to_source_top_idx;
+  map<pair<int, int>, int> top_idx_to_bottom_count;
+  map<pair<int, int>, float> top_idx_to_loss_weight;
+  map<pair<int, int>, int> top_idx_to_bottom_split_idx;
+  map<int, string> layer_idx_to_layer_name;
+  layer_idx_to_layer_name[-1] = "input";
+  // Determine the number of times each blob is used as an input (bottom) blob.
+  for (int i = 0; i < param.input_size(); ++i) {
+    const string& blob_name = param.input(i);
+    blob_name_to_last_top_idx[blob_name] = make_pair(-1, i);
+  }
+  for (int i = 0; i < param.layer_size(); ++i) {
+    const LayerParameter& layer_param = param.layer(i);
+    layer_idx_to_layer_name[i] = layer_param.name();
+    for (int j = 0; j < layer_param.bottom_size(); ++j) {
+      const string& blob_name = layer_param.bottom(j);
+      if (blob_name_to_last_top_idx.find(blob_name) ==
+          blob_name_to_last_top_idx.end()) {
+        LOG(FATAL) << "Unknown blob input " << blob_name << " to layer " << j;
+      }
+      const pair<int, int>& bottom_idx = make_pair(i, j);
+      const pair<int, int>& top_idx = blob_name_to_last_top_idx[blob_name];
+      bottom_idx_to_source_top_idx[bottom_idx] = top_idx;
+      ++top_idx_to_bottom_count[top_idx];
+    }
+    for (int j = 0; j < layer_param.top_size(); ++j) {
+      const string& blob_name = layer_param.top(j);
+      blob_name_to_last_top_idx[blob_name] = make_pair(i, j);
+    }
+    // A use of a top blob as a loss should be handled similarly to the use of
+    // a top blob as an input (bottom) blob to another layer.
+    const int last_loss =
+        std::min(layer_param.loss_weight_size(), layer_param.top_size());
+    for (int j = 0; j < last_loss; ++j) {
+      const string& blob_name = layer_param.top(j);
+      const pair<int, int>& top_idx = blob_name_to_last_top_idx[blob_name];
+      top_idx_to_loss_weight[top_idx] = layer_param.loss_weight(j);
+      if (top_idx_to_loss_weight[top_idx]) {
+        ++top_idx_to_bottom_count[top_idx];
+      }
+    }
+  }
+  // Create split layer for any input blobs used by other layer as bottom
+  // blobs more than once.
+  for (int i = 0; i < param.input_size(); ++i) {
+    const int split_count = top_idx_to_bottom_count[make_pair(-1, i)];
+    if (split_count > 1) {
+      const string& layer_name = layer_idx_to_layer_name[-1];
+      const string& blob_name = param.input(i);
+      LayerParameter* split_layer_param = param_split->add_layer();
+      const float kZeroLossWeight = 0;
+      ConfigureSplitLayer(layer_name, blob_name, i, split_count,
+          kZeroLossWeight, split_layer_param);
+    }
+  }
+  for (int i = 0; i < param.layer_size(); ++i) {
+    LayerParameter* layer_param = param_split->add_layer();
+    layer_param->CopyFrom(param.layer(i));
+    // Replace any shared bottom blobs with split layer outputs.
+    for (int j = 0; j < layer_param->bottom_size(); ++j) {
+      const pair<int, int>& top_idx =
+          bottom_idx_to_source_top_idx[make_pair(i, j)];
+      const int split_count = top_idx_to_bottom_count[top_idx];
+      if (split_count > 1) {
+        const string& layer_name = layer_idx_to_layer_name[top_idx.first];
+        const string& blob_name = layer_param->bottom(j);
+        layer_param->set_bottom(j, SplitBlobName(layer_name,
+            blob_name, top_idx.second, top_idx_to_bottom_split_idx[top_idx]++));
+      }
+    }
+    // Create split layer for any top blobs used by other layer as bottom
+    // blobs more than once.
+    for (int j = 0; j < layer_param->top_size(); ++j) {
+      const pair<int, int>& top_idx = make_pair(i, j);
+      const int split_count = top_idx_to_bottom_count[top_idx];
+      if (split_count > 1) {
+        const string& layer_name = layer_idx_to_layer_name[i];
+        const string& blob_name = layer_param->top(j);
+        LayerParameter* split_layer_param = param_split->add_layer();
+        const float loss_weight = top_idx_to_loss_weight[top_idx];
+        ConfigureSplitLayer(layer_name, blob_name, j, split_count,
+            loss_weight, split_layer_param);
+        if (loss_weight) {
+          layer_param->clear_loss_weight();
+          top_idx_to_bottom_split_idx[top_idx]++;
+        }
+      }
+    }
+  }
+}
+
+void ConfigureSplitLayer(const string& layer_name, const string& blob_name,
+    const int blob_idx, const int split_count, const float loss_weight,
+    LayerParameter* split_layer_param) {
+  split_layer_param->Clear();
+  split_layer_param->add_bottom(blob_name);
+  split_layer_param->set_name(SplitLayerName(layer_name, blob_name, blob_idx));
+  split_layer_param->set_type("Split");
+  for (int k = 0; k < split_count; ++k) {
+    split_layer_param->add_top(
+        SplitBlobName(layer_name, blob_name, blob_idx, k));
+    if (loss_weight) {
+      if (k == 0) {
+        split_layer_param->add_loss_weight(loss_weight);
+      } else {
+        split_layer_param->add_loss_weight(0);
+      }
+    }
+  }
+}
+
+string SplitLayerName(const string& layer_name, const string& blob_name,
+    const int blob_idx) {
+  ostringstream split_layer_name;
+  split_layer_name << blob_name << "_" << layer_name << "_" << blob_idx
+      << "_split";
+  return split_layer_name.str();
+}
+
+string SplitBlobName(const string& layer_name, const string& blob_name,
+    const int blob_idx, const int split_idx) {
+  ostringstream split_blob_name;
+  split_blob_name << blob_name << "_" << layer_name << "_" << blob_idx
+      << "_split_" << split_idx;
+  return split_blob_name.str();
+}
+
+}  // namespace caffe
diff --git a/src/caffe/util/io.cpp b/src/caffe/util/io.cpp
new file mode 100644
index 0000000..77ef7f2
--- /dev/null
+++ b/src/caffe/util/io.cpp
@@ -0,0 +1,306 @@
+#include <fcntl.h>
+#include <google/protobuf/io/coded_stream.h>
+#include <google/protobuf/io/zero_copy_stream_impl.h>
+#include <google/protobuf/text_format.h>
+#include <opencv2/core/core.hpp>
+#include <opencv2/highgui/highgui.hpp>
+#include <opencv2/highgui/highgui_c.h>
+#include <opencv2/imgproc/imgproc.hpp>
+#include <stdint.h>
+
+#include <algorithm>
+#include <fstream>  // NOLINT(readability/streams)
+#include <string>
+#include <vector>
+
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/io.hpp"
+
+const int kProtoReadBytesLimit = INT_MAX;  // Max size of 2 GB minus 1 byte.
+
+namespace caffe {
+
+using google::protobuf::io::FileInputStream;
+using google::protobuf::io::FileOutputStream;
+using google::protobuf::io::ZeroCopyInputStream;
+using google::protobuf::io::CodedInputStream;
+using google::protobuf::io::ZeroCopyOutputStream;
+using google::protobuf::io::CodedOutputStream;
+using google::protobuf::Message;
+
+bool ReadProtoFromTextFile(const char* filename, Message* proto) {
+  int fd = open(filename, O_RDONLY);
+  CHECK_NE(fd, -1) << "File not found: " << filename;
+  FileInputStream* input = new FileInputStream(fd);
+  bool success = google::protobuf::TextFormat::Parse(input, proto);
+  delete input;
+  close(fd);
+  return success;
+}
+
+void WriteProtoToTextFile(const Message& proto, const char* filename) {
+  int fd = open(filename, O_WRONLY | O_CREAT | O_TRUNC, 0644);
+  FileOutputStream* output = new FileOutputStream(fd);
+  CHECK(google::protobuf::TextFormat::Print(proto, output));
+  delete output;
+  close(fd);
+}
+
+bool ReadProtoFromBinaryFile(const char* filename, Message* proto) {
+  int fd = open(filename, O_RDONLY);
+  CHECK_NE(fd, -1) << "File not found: " << filename;
+  ZeroCopyInputStream* raw_input = new FileInputStream(fd);
+  CodedInputStream* coded_input = new CodedInputStream(raw_input);
+  coded_input->SetTotalBytesLimit(kProtoReadBytesLimit, 536870912);
+
+  bool success = proto->ParseFromCodedStream(coded_input);
+
+  delete coded_input;
+  delete raw_input;
+  close(fd);
+  return success;
+}
+
+void WriteProtoToBinaryFile(const Message& proto, const char* filename) {
+  fstream output(filename, ios::out | ios::trunc | ios::binary);
+  CHECK(proto.SerializeToOstream(&output));
+}
+
+cv::Mat ReadImageToCVMat(const string& filename,
+    const int height, const int width, const bool is_color) {
+  cv::Mat cv_img;
+  int cv_read_flag = (is_color ? CV_LOAD_IMAGE_COLOR :
+    CV_LOAD_IMAGE_GRAYSCALE);
+  cv::Mat cv_img_origin = cv::imread(filename, cv_read_flag);
+  if (!cv_img_origin.data) {
+    LOG(ERROR) << "Could not open or find file " << filename;
+    return cv_img_origin;
+  }
+  if (height > 0 && width > 0) {
+    cv::resize(cv_img_origin, cv_img, cv::Size(width, height));
+  } else {
+    cv_img = cv_img_origin;
+  }
+  return cv_img;
+}
+
+cv::Mat ReadImageToCVMat(const string& filename,
+    const int height, const int width) {
+  return ReadImageToCVMat(filename, height, width, true);
+}
+
+cv::Mat ReadImageToCVMat(const string& filename,
+    const bool is_color) {
+  return ReadImageToCVMat(filename, 0, 0, is_color);
+}
+
+cv::Mat ReadImageToCVMat(const string& filename) {
+  return ReadImageToCVMat(filename, 0, 0, true);
+}
+// Do the file extension and encoding match?
+static bool matchExt(const std::string & fn,
+                     std::string en) {
+  size_t p = fn.rfind('.');
+  std::string ext = p != fn.npos ? fn.substr(p) : fn;
+  std::transform(ext.begin(), ext.end(), ext.begin(), ::tolower);
+  std::transform(en.begin(), en.end(), en.begin(), ::tolower);
+  if ( ext == en )
+    return true;
+  if ( en == "jpg" && ext == "jpeg" )
+    return true;
+  return false;
+}
+bool ReadImageToDatum(const string& filename, const int label,
+    const int height, const int width, const bool is_color,
+    const std::string & encoding, Datum* datum) {
+  cv::Mat cv_img = ReadImageToCVMat(filename, height, width, is_color);
+  if (cv_img.data) {
+    if (encoding.size()) {
+      if ( (cv_img.channels() == 3) == is_color && !height && !width &&
+          matchExt(filename, encoding) )
+        return ReadFileToDatum(filename, label, datum);
+      std::vector<uchar> buf;
+      cv::imencode("."+encoding, cv_img, buf);
+      datum->set_data(std::string(reinterpret_cast<char*>(&buf[0]),
+                      buf.size()));
+      datum->set_label(label);
+      datum->set_encoded(true);
+      return true;
+    }
+    CVMatToDatum(cv_img, datum);
+    datum->set_label(label);
+    return true;
+  } else {
+    return false;
+  }
+}
+
+bool ReadFileToDatum(const string& filename, const int label,
+    Datum* datum) {
+  std::streampos size;
+
+  fstream file(filename.c_str(), ios::in|ios::binary|ios::ate);
+  if (file.is_open()) {
+    size = file.tellg();
+    std::string buffer(size, ' ');
+    file.seekg(0, ios::beg);
+    file.read(&buffer[0], size);
+    file.close();
+    datum->set_data(buffer);
+    datum->set_label(label);
+    datum->set_encoded(true);
+    return true;
+  } else {
+    return false;
+  }
+}
+
+cv::Mat DecodeDatumToCVMatNative(const Datum& datum) {
+  cv::Mat cv_img;
+  CHECK(datum.encoded()) << "Datum not encoded";
+  const string& data = datum.data();
+  std::vector<char> vec_data(data.c_str(), data.c_str() + data.size());
+  cv_img = cv::imdecode(vec_data, -1);
+  if (!cv_img.data) {
+    LOG(ERROR) << "Could not decode datum ";
+  }
+  return cv_img;
+}
+cv::Mat DecodeDatumToCVMat(const Datum& datum, bool is_color) {
+  cv::Mat cv_img;
+  CHECK(datum.encoded()) << "Datum not encoded";
+  const string& data = datum.data();
+  std::vector<char> vec_data(data.c_str(), data.c_str() + data.size());
+  int cv_read_flag = (is_color ? CV_LOAD_IMAGE_COLOR :
+    CV_LOAD_IMAGE_GRAYSCALE);
+  cv_img = cv::imdecode(vec_data, cv_read_flag);
+  if (!cv_img.data) {
+    LOG(ERROR) << "Could not decode datum ";
+  }
+  return cv_img;
+}
+
+// If Datum is encoded will decoded using DecodeDatumToCVMat and CVMatToDatum
+// If Datum is not encoded will do nothing
+bool DecodeDatumNative(Datum* datum) {
+  if (datum->encoded()) {
+    cv::Mat cv_img = DecodeDatumToCVMatNative((*datum));
+    CVMatToDatum(cv_img, datum);
+    return true;
+  } else {
+    return false;
+  }
+}
+bool DecodeDatum(Datum* datum, bool is_color) {
+  if (datum->encoded()) {
+    cv::Mat cv_img = DecodeDatumToCVMat((*datum), is_color);
+    CVMatToDatum(cv_img, datum);
+    return true;
+  } else {
+    return false;
+  }
+}
+
+void CVMatToDatum(const cv::Mat& cv_img, Datum* datum) {
+  CHECK(cv_img.depth() == CV_8U) << "Image data type must be unsigned byte";
+  datum->set_channels(cv_img.channels());
+  datum->set_height(cv_img.rows);
+  datum->set_width(cv_img.cols);
+  datum->clear_data();
+  datum->clear_float_data();
+  datum->set_encoded(false);
+  int datum_channels = datum->channels();
+  int datum_height = datum->height();
+  int datum_width = datum->width();
+  int datum_size = datum_channels * datum_height * datum_width;
+  std::string buffer(datum_size, ' ');
+  for (int h = 0; h < datum_height; ++h) {
+    const uchar* ptr = cv_img.ptr<uchar>(h);
+    int img_index = 0;
+    for (int w = 0; w < datum_width; ++w) {
+      for (int c = 0; c < datum_channels; ++c) {
+        int datum_index = (c * datum_height + h) * datum_width + w;
+        buffer[datum_index] = static_cast<char>(ptr[img_index++]);
+      }
+    }
+  }
+  datum->set_data(buffer);
+}
+
+// Verifies format of data stored in HDF5 file and reshapes blob accordingly.
+template <typename Dtype>
+void hdf5_load_nd_dataset_helper(
+    hid_t file_id, const char* dataset_name_, int min_dim, int max_dim,
+    Blob<Dtype>* blob) {
+  // Verify that the dataset exists.
+  CHECK(H5LTfind_dataset(file_id, dataset_name_))
+      << "Failed to find HDF5 dataset " << dataset_name_;
+  // Verify that the number of dimensions is in the accepted range.
+  herr_t status;
+  int ndims;
+  status = H5LTget_dataset_ndims(file_id, dataset_name_, &ndims);
+  CHECK_GE(status, 0) << "Failed to get dataset ndims for " << dataset_name_;
+  CHECK_GE(ndims, min_dim);
+  CHECK_LE(ndims, max_dim);
+
+  // Verify that the data format is what we expect: float or double.
+  std::vector<hsize_t> dims(ndims);
+  H5T_class_t class_;
+  status = H5LTget_dataset_info(
+      file_id, dataset_name_, dims.data(), &class_, NULL);
+  CHECK_GE(status, 0) << "Failed to get dataset info for " << dataset_name_;
+  CHECK_EQ(class_, H5T_FLOAT) << "Expected float or double data";
+
+  vector<int> blob_dims(dims.size());
+  for (int i = 0; i < dims.size(); ++i) {
+    blob_dims[i] = dims[i];
+  }
+  blob->Reshape(blob_dims);
+}
+
+template <>
+void hdf5_load_nd_dataset<float>(hid_t file_id, const char* dataset_name_,
+        int min_dim, int max_dim, Blob<float>* blob) {
+  hdf5_load_nd_dataset_helper(file_id, dataset_name_, min_dim, max_dim, blob);
+  herr_t status = H5LTread_dataset_float(
+    file_id, dataset_name_, blob->mutable_cpu_data());
+  CHECK_GE(status, 0) << "Failed to read float dataset " << dataset_name_;
+}
+
+template <>
+void hdf5_load_nd_dataset<double>(hid_t file_id, const char* dataset_name_,
+        int min_dim, int max_dim, Blob<double>* blob) {
+  hdf5_load_nd_dataset_helper(file_id, dataset_name_, min_dim, max_dim, blob);
+  herr_t status = H5LTread_dataset_double(
+    file_id, dataset_name_, blob->mutable_cpu_data());
+  CHECK_GE(status, 0) << "Failed to read double dataset " << dataset_name_;
+}
+
+template <>
+void hdf5_save_nd_dataset<float>(
+    const hid_t file_id, const string& dataset_name, const Blob<float>& blob) {
+  hsize_t dims[HDF5_NUM_DIMS];
+  dims[0] = blob.num();
+  dims[1] = blob.channels();
+  dims[2] = blob.height();
+  dims[3] = blob.width();
+  herr_t status = H5LTmake_dataset_float(
+      file_id, dataset_name.c_str(), HDF5_NUM_DIMS, dims, blob.cpu_data());
+  CHECK_GE(status, 0) << "Failed to make float dataset " << dataset_name;
+}
+
+template <>
+void hdf5_save_nd_dataset<double>(
+    const hid_t file_id, const string& dataset_name, const Blob<double>& blob) {
+  hsize_t dims[HDF5_NUM_DIMS];
+  dims[0] = blob.num();
+  dims[1] = blob.channels();
+  dims[2] = blob.height();
+  dims[3] = blob.width();
+  herr_t status = H5LTmake_dataset_double(
+      file_id, dataset_name.c_str(), HDF5_NUM_DIMS, dims, blob.cpu_data());
+  CHECK_GE(status, 0) << "Failed to make double dataset " << dataset_name;
+}
+
+}  // namespace caffe
diff --git a/src/caffe/util/math_functions.cpp b/src/caffe/util/math_functions.cpp
new file mode 100644
index 0000000..0aab6b1
--- /dev/null
+++ b/src/caffe/util/math_functions.cpp
@@ -0,0 +1,397 @@
+#include <boost/math/special_functions/next.hpp>
+#include <boost/random.hpp>
+
+#include <limits>
+
+#include "caffe/common.hpp"
+#include "caffe/util/math_functions.hpp"
+#include "caffe/util/rng.hpp"
+
+namespace caffe {
+
+template<>
+void caffe_cpu_gemm<float>(const CBLAS_TRANSPOSE TransA,
+    const CBLAS_TRANSPOSE TransB, const int M, const int N, const int K,
+    const float alpha, const float* A, const float* B, const float beta,
+    float* C) {
+  int lda = (TransA == CblasNoTrans) ? K : M;
+  int ldb = (TransB == CblasNoTrans) ? N : K;
+  cblas_sgemm(CblasRowMajor, TransA, TransB, M, N, K, alpha, A, lda, B,
+      ldb, beta, C, N);
+}
+
+template<>
+void caffe_cpu_gemm<double>(const CBLAS_TRANSPOSE TransA,
+    const CBLAS_TRANSPOSE TransB, const int M, const int N, const int K,
+    const double alpha, const double* A, const double* B, const double beta,
+    double* C) {
+  int lda = (TransA == CblasNoTrans) ? K : M;
+  int ldb = (TransB == CblasNoTrans) ? N : K;
+  cblas_dgemm(CblasRowMajor, TransA, TransB, M, N, K, alpha, A, lda, B,
+      ldb, beta, C, N);
+}
+
+template <>
+void caffe_cpu_gemv<float>(const CBLAS_TRANSPOSE TransA, const int M,
+    const int N, const float alpha, const float* A, const float* x,
+    const float beta, float* y) {
+  cblas_sgemv(CblasRowMajor, TransA, M, N, alpha, A, N, x, 1, beta, y, 1);
+}
+
+template <>
+void caffe_cpu_gemv<double>(const CBLAS_TRANSPOSE TransA, const int M,
+    const int N, const double alpha, const double* A, const double* x,
+    const double beta, double* y) {
+  cblas_dgemv(CblasRowMajor, TransA, M, N, alpha, A, N, x, 1, beta, y, 1);
+}
+
+template <>
+void caffe_axpy<float>(const int N, const float alpha, const float* X,
+    float* Y) { cblas_saxpy(N, alpha, X, 1, Y, 1); }
+
+template <>
+void caffe_axpy<double>(const int N, const double alpha, const double* X,
+    double* Y) { cblas_daxpy(N, alpha, X, 1, Y, 1); }
+
+template <typename Dtype>
+void caffe_set(const int N, const Dtype alpha, Dtype* Y) {
+  if (alpha == 0) {
+    memset(Y, 0, sizeof(Dtype) * N);  // NOLINT(caffe/alt_fn)
+    return;
+  }
+  for (int i = 0; i < N; ++i) {
+    Y[i] = alpha;
+  }
+}
+
+template void caffe_set<int>(const int N, const int alpha, int* Y);
+template void caffe_set<float>(const int N, const float alpha, float* Y);
+template void caffe_set<double>(const int N, const double alpha, double* Y);
+
+template <>
+void caffe_add_scalar(const int N, const float alpha, float* Y) {
+  for (int i = 0; i < N; ++i) {
+    Y[i] += alpha;
+  }
+}
+
+template <>
+void caffe_add_scalar(const int N, const double alpha, double* Y) {
+  for (int i = 0; i < N; ++i) {
+    Y[i] += alpha;
+  }
+}
+
+template <typename Dtype>
+void caffe_copy(const int N, const Dtype* X, Dtype* Y) {
+  if (X != Y) {
+    if (Caffe::mode() == Caffe::GPU) {
+#ifndef CPU_ONLY
+      // NOLINT_NEXT_LINE(caffe/alt_fn)
+      CUDA_CHECK(cudaMemcpy(Y, X, sizeof(Dtype) * N, cudaMemcpyDefault));
+#else
+      NO_GPU;
+#endif
+    } else {
+      memcpy(Y, X, sizeof(Dtype) * N);  // NOLINT(caffe/alt_fn)
+    }
+  }
+}
+
+template void caffe_copy<int>(const int N, const int* X, int* Y);
+template void caffe_copy<unsigned int>(const int N, const unsigned int* X,
+    unsigned int* Y);
+template void caffe_copy<float>(const int N, const float* X, float* Y);
+template void caffe_copy<double>(const int N, const double* X, double* Y);
+
+template <>
+void caffe_scal<float>(const int N, const float alpha, float *X) {
+  cblas_sscal(N, alpha, X, 1);
+}
+
+template <>
+void caffe_scal<double>(const int N, const double alpha, double *X) {
+  cblas_dscal(N, alpha, X, 1);
+}
+
+template <>
+void caffe_cpu_axpby<float>(const int N, const float alpha, const float* X,
+                            const float beta, float* Y) {
+  cblas_saxpby(N, alpha, X, 1, beta, Y, 1);
+}
+
+template <>
+void caffe_cpu_axpby<double>(const int N, const double alpha, const double* X,
+                             const double beta, double* Y) {
+  cblas_daxpby(N, alpha, X, 1, beta, Y, 1);
+}
+
+template <>
+void caffe_add<float>(const int n, const float* a, const float* b,
+    float* y) {
+  vsAdd(n, a, b, y);
+}
+
+template <>
+void caffe_add<double>(const int n, const double* a, const double* b,
+    double* y) {
+  vdAdd(n, a, b, y);
+}
+
+template <>
+void caffe_sub<float>(const int n, const float* a, const float* b,
+    float* y) {
+  vsSub(n, a, b, y);
+}
+
+template <>
+void caffe_sub<double>(const int n, const double* a, const double* b,
+    double* y) {
+  vdSub(n, a, b, y);
+}
+
+template <>
+void caffe_mul<float>(const int n, const float* a, const float* b,
+    float* y) {
+  vsMul(n, a, b, y);
+}
+
+template <>
+void caffe_mul<double>(const int n, const double* a, const double* b,
+    double* y) {
+  vdMul(n, a, b, y);
+}
+
+template <>
+void caffe_div<float>(const int n, const float* a, const float* b,
+    float* y) {
+  vsDiv(n, a, b, y);
+}
+
+template <>
+void caffe_div<double>(const int n, const double* a, const double* b,
+    double* y) {
+  vdDiv(n, a, b, y);
+}
+
+template <>
+void caffe_powx<float>(const int n, const float* a, const float b,
+    float* y) {
+  vsPowx(n, a, b, y);
+}
+
+template <>
+void caffe_powx<double>(const int n, const double* a, const double b,
+    double* y) {
+  vdPowx(n, a, b, y);
+}
+
+template <>
+void caffe_sqr<float>(const int n, const float* a, float* y) {
+  vsSqr(n, a, y);
+}
+
+template <>
+void caffe_sqr<double>(const int n, const double* a, double* y) {
+  vdSqr(n, a, y);
+}
+
+template <>
+void caffe_exp<float>(const int n, const float* a, float* y) {
+  vsExp(n, a, y);
+}
+
+template <>
+void caffe_exp<double>(const int n, const double* a, double* y) {
+  vdExp(n, a, y);
+}
+
+template <>
+void caffe_log<float>(const int n, const float* a, float* y) {
+  vsLn(n, a, y);
+}
+
+template <>
+void caffe_log<double>(const int n, const double* a, double* y) {
+  vdLn(n, a, y);
+}
+
+template <>
+void caffe_abs<float>(const int n, const float* a, float* y) {
+    vsAbs(n, a, y);
+}
+
+template <>
+void caffe_abs<double>(const int n, const double* a, double* y) {
+    vdAbs(n, a, y);
+}
+
+unsigned int caffe_rng_rand() {
+  return (*caffe_rng())();
+}
+
+template <typename Dtype>
+Dtype caffe_nextafter(const Dtype b) {
+  return boost::math::nextafter<Dtype>(
+      b, std::numeric_limits<Dtype>::max());
+}
+
+template
+float caffe_nextafter(const float b);
+
+template
+double caffe_nextafter(const double b);
+
+template <typename Dtype>
+void caffe_rng_uniform(const int n, const Dtype a, const Dtype b, Dtype* r) {
+  CHECK_GE(n, 0);
+  CHECK(r);
+  CHECK_LE(a, b);
+  boost::uniform_real<Dtype> random_distribution(a, caffe_nextafter<Dtype>(b));
+  boost::variate_generator<caffe::rng_t*, boost::uniform_real<Dtype> >
+      variate_generator(caffe_rng(), random_distribution);
+  for (int i = 0; i < n; ++i) {
+    r[i] = variate_generator();
+  }
+}
+
+template
+void caffe_rng_uniform<float>(const int n, const float a, const float b,
+                              float* r);
+
+template
+void caffe_rng_uniform<double>(const int n, const double a, const double b,
+                               double* r);
+
+template <typename Dtype>
+void caffe_rng_gaussian(const int n, const Dtype a,
+                        const Dtype sigma, Dtype* r) {
+  CHECK_GE(n, 0);
+  CHECK(r);
+  CHECK_GT(sigma, 0);
+  boost::normal_distribution<Dtype> random_distribution(a, sigma);
+  boost::variate_generator<caffe::rng_t*, boost::normal_distribution<Dtype> >
+      variate_generator(caffe_rng(), random_distribution);
+  for (int i = 0; i < n; ++i) {
+    r[i] = variate_generator();
+  }
+}
+
+template
+void caffe_rng_gaussian<float>(const int n, const float mu,
+                               const float sigma, float* r);
+
+template
+void caffe_rng_gaussian<double>(const int n, const double mu,
+                                const double sigma, double* r);
+
+template <typename Dtype>
+void caffe_rng_bernoulli(const int n, const Dtype p, int* r) {
+  CHECK_GE(n, 0);
+  CHECK(r);
+  CHECK_GE(p, 0);
+  CHECK_LE(p, 1);
+  boost::bernoulli_distribution<Dtype> random_distribution(p);
+  boost::variate_generator<caffe::rng_t*, boost::bernoulli_distribution<Dtype> >
+      variate_generator(caffe_rng(), random_distribution);
+  for (int i = 0; i < n; ++i) {
+    r[i] = variate_generator();
+  }
+}
+
+template
+void caffe_rng_bernoulli<double>(const int n, const double p, int* r);
+
+template
+void caffe_rng_bernoulli<float>(const int n, const float p, int* r);
+
+template <typename Dtype>
+void caffe_rng_bernoulli(const int n, const Dtype p, unsigned int* r) {
+  CHECK_GE(n, 0);
+  CHECK(r);
+  CHECK_GE(p, 0);
+  CHECK_LE(p, 1);
+  boost::bernoulli_distribution<Dtype> random_distribution(p);
+  boost::variate_generator<caffe::rng_t*, boost::bernoulli_distribution<Dtype> >
+      variate_generator(caffe_rng(), random_distribution);
+  for (int i = 0; i < n; ++i) {
+    r[i] = static_cast<unsigned int>(variate_generator());
+  }
+}
+
+template
+void caffe_rng_bernoulli<double>(const int n, const double p, unsigned int* r);
+
+template
+void caffe_rng_bernoulli<float>(const int n, const float p, unsigned int* r);
+
+template <>
+float caffe_cpu_strided_dot<float>(const int n, const float* x, const int incx,
+    const float* y, const int incy) {
+  return cblas_sdot(n, x, incx, y, incy);
+}
+
+template <>
+double caffe_cpu_strided_dot<double>(const int n, const double* x,
+    const int incx, const double* y, const int incy) {
+  return cblas_ddot(n, x, incx, y, incy);
+}
+
+template <typename Dtype>
+Dtype caffe_cpu_dot(const int n, const Dtype* x, const Dtype* y) {
+  return caffe_cpu_strided_dot(n, x, 1, y, 1);
+}
+
+template
+float caffe_cpu_dot<float>(const int n, const float* x, const float* y);
+
+template
+double caffe_cpu_dot<double>(const int n, const double* x, const double* y);
+
+template <>
+int caffe_cpu_hamming_distance<float>(const int n, const float* x,
+                                  const float* y) {
+  int dist = 0;
+  for (int i = 0; i < n; ++i) {
+    dist += __builtin_popcount(static_cast<uint32_t>(x[i]) ^
+                               static_cast<uint32_t>(y[i]));
+  }
+  return dist;
+}
+
+template <>
+int caffe_cpu_hamming_distance<double>(const int n, const double* x,
+                                   const double* y) {
+  int dist = 0;
+  for (int i = 0; i < n; ++i) {
+    dist += __builtin_popcountl(static_cast<uint64_t>(x[i]) ^
+                                static_cast<uint64_t>(y[i]));
+  }
+  return dist;
+}
+
+template <>
+float caffe_cpu_asum<float>(const int n, const float* x) {
+  return cblas_sasum(n, x, 1);
+}
+
+template <>
+double caffe_cpu_asum<double>(const int n, const double* x) {
+  return cblas_dasum(n, x, 1);
+}
+
+template <>
+void caffe_cpu_scale<float>(const int n, const float alpha, const float *x,
+                            float* y) {
+  cblas_scopy(n, x, 1, y, 1);
+  cblas_sscal(n, alpha, y, 1);
+}
+
+template <>
+void caffe_cpu_scale<double>(const int n, const double alpha, const double *x,
+                             double* y) {
+  cblas_dcopy(n, x, 1, y, 1);
+  cblas_dscal(n, alpha, y, 1);
+}
+
+}  // namespace caffe
diff --git a/src/caffe/util/math_functions.cu b/src/caffe/util/math_functions.cu
new file mode 100644
index 0000000..2631a07
--- /dev/null
+++ b/src/caffe/util/math_functions.cu
@@ -0,0 +1,465 @@
+#include <math_functions.h>  // CUDA's, not caffe's, for fabs, signbit
+#include <thrust/device_vector.h>
+#include <thrust/functional.h>  // thrust::plus
+#include <thrust/reduce.h>
+
+#include <cmath>
+#include <cstdlib>
+#include <cstring>
+
+#include "caffe/common.hpp"
+#include "caffe/util/math_functions.hpp"
+
+namespace caffe {
+
+template <>
+void caffe_gpu_gemm<float>(const CBLAS_TRANSPOSE TransA,
+    const CBLAS_TRANSPOSE TransB, const int M, const int N, const int K,
+    const float alpha, const float* A, const float* B, const float beta,
+    float* C) {
+  // Note that cublas follows fortran order.
+  int lda = (TransA == CblasNoTrans) ? K : M;
+  int ldb = (TransB == CblasNoTrans) ? N : K;
+  cublasOperation_t cuTransA =
+      (TransA == CblasNoTrans) ? CUBLAS_OP_N : CUBLAS_OP_T;
+  cublasOperation_t cuTransB =
+      (TransB == CblasNoTrans) ? CUBLAS_OP_N : CUBLAS_OP_T;
+  CUBLAS_CHECK(cublasSgemm(Caffe::cublas_handle(), cuTransB, cuTransA,
+      N, M, K, &alpha, B, ldb, A, lda, &beta, C, N));
+}
+
+template <>
+void caffe_gpu_gemm<double>(const CBLAS_TRANSPOSE TransA,
+    const CBLAS_TRANSPOSE TransB, const int M, const int N, const int K,
+    const double alpha, const double* A, const double* B, const double beta,
+    double* C) {
+  // Note that cublas follows fortran order.
+  int lda = (TransA == CblasNoTrans) ? K : M;
+  int ldb = (TransB == CblasNoTrans) ? N : K;
+  cublasOperation_t cuTransA =
+      (TransA == CblasNoTrans) ? CUBLAS_OP_N : CUBLAS_OP_T;
+  cublasOperation_t cuTransB =
+      (TransB == CblasNoTrans) ? CUBLAS_OP_N : CUBLAS_OP_T;
+  CUBLAS_CHECK(cublasDgemm(Caffe::cublas_handle(), cuTransB, cuTransA,
+      N, M, K, &alpha, B, ldb, A, lda, &beta, C, N));
+}
+
+template <>
+void caffe_gpu_gemv<float>(const CBLAS_TRANSPOSE TransA, const int M,
+    const int N, const float alpha, const float* A, const float* x,
+    const float beta, float* y) {
+  cublasOperation_t cuTransA =
+      (TransA == CblasNoTrans) ? CUBLAS_OP_T : CUBLAS_OP_N;
+  CUBLAS_CHECK(cublasSgemv(Caffe::cublas_handle(), cuTransA, N, M, &alpha,
+      A, N, x, 1, &beta, y, 1));
+}
+
+template <>
+void caffe_gpu_gemv<double>(const CBLAS_TRANSPOSE TransA, const int M,
+    const int N, const double alpha, const double* A, const double* x,
+    const double beta, double* y) {
+  cublasOperation_t cuTransA =
+      (TransA == CblasNoTrans) ? CUBLAS_OP_T : CUBLAS_OP_N;
+  CUBLAS_CHECK(cublasDgemv(Caffe::cublas_handle(), cuTransA, N, M, &alpha,
+      A, N, x, 1, &beta, y, 1));
+}
+
+template <>
+void caffe_gpu_axpy<float>(const int N, const float alpha, const float* X,
+    float* Y) {
+  CUBLAS_CHECK(cublasSaxpy(Caffe::cublas_handle(), N, &alpha, X, 1, Y, 1));
+}
+
+template <>
+void caffe_gpu_axpy<double>(const int N, const double alpha, const double* X,
+    double* Y) {
+  CUBLAS_CHECK(cublasDaxpy(Caffe::cublas_handle(), N, &alpha, X, 1, Y, 1));
+}
+
+void caffe_gpu_memcpy(const size_t N, const void* X, void* Y) {
+  if (X != Y) {
+    CUDA_CHECK(cudaMemcpy(Y, X, N, cudaMemcpyDefault));  // NOLINT(caffe/alt_fn)
+  }
+}
+
+template <>
+void caffe_gpu_scal<float>(const int N, const float alpha, float *X) {
+  CUBLAS_CHECK(cublasSscal(Caffe::cublas_handle(), N, &alpha, X, 1));
+}
+
+template <>
+void caffe_gpu_scal<double>(const int N, const double alpha, double *X) {
+  CUBLAS_CHECK(cublasDscal(Caffe::cublas_handle(), N, &alpha, X, 1));
+}
+
+template <>
+void caffe_gpu_axpby<float>(const int N, const float alpha, const float* X,
+    const float beta, float* Y) {
+  caffe_gpu_scal<float>(N, beta, Y);
+  caffe_gpu_axpy<float>(N, alpha, X, Y);
+}
+
+template <>
+void caffe_gpu_axpby<double>(const int N, const double alpha, const double* X,
+    const double beta, double* Y) {
+  caffe_gpu_scal<double>(N, beta, Y);
+  caffe_gpu_axpy<double>(N, alpha, X, Y);
+}
+
+template <>
+void caffe_gpu_dot<float>(const int n, const float* x, const float* y,
+    float* out) {
+  CUBLAS_CHECK(cublasSdot(Caffe::cublas_handle(), n, x, 1, y, 1, out));
+}
+
+template <>
+void caffe_gpu_dot<double>(const int n, const double* x, const double* y,
+    double * out) {
+  CUBLAS_CHECK(cublasDdot(Caffe::cublas_handle(), n, x, 1, y, 1, out));
+}
+
+template <>
+void caffe_gpu_asum<float>(const int n, const float* x, float* y) {
+  CUBLAS_CHECK(cublasSasum(Caffe::cublas_handle(), n, x, 1, y));
+}
+
+template <>
+void caffe_gpu_asum<double>(const int n, const double* x, double* y) {
+  CUBLAS_CHECK(cublasDasum(Caffe::cublas_handle(), n, x, 1, y));
+}
+
+template <>
+void caffe_gpu_scale<float>(const int n, const float alpha, const float *x,
+                            float* y) {
+  CUBLAS_CHECK(cublasScopy(Caffe::cublas_handle(), n, x, 1, y, 1));
+  CUBLAS_CHECK(cublasSscal(Caffe::cublas_handle(), n, &alpha, y, 1));
+}
+
+template <>
+void caffe_gpu_scale<double>(const int n, const double alpha, const double *x,
+                             double* y) {
+  CUBLAS_CHECK(cublasDcopy(Caffe::cublas_handle(), n, x, 1, y, 1));
+  CUBLAS_CHECK(cublasDscal(Caffe::cublas_handle(), n, &alpha, y, 1));
+}
+
+template <typename Dtype>
+__global__ void set_kernel(const int n, const Dtype alpha, Dtype* y) {
+  CUDA_KERNEL_LOOP(index, n) {
+    y[index] = alpha;
+  }
+}
+
+template <typename Dtype>
+void caffe_gpu_set(const int N, const Dtype alpha, Dtype* Y) {
+  if (alpha == 0) {
+    CUDA_CHECK(cudaMemset(Y, 0, sizeof(Dtype) * N));  // NOLINT(caffe/alt_fn)
+    return;
+  }
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  set_kernel<Dtype><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, alpha, Y);
+}
+
+template void caffe_gpu_set<int>(const int N, const int alpha, int* Y);
+template void caffe_gpu_set<float>(const int N, const float alpha, float* Y);
+template void caffe_gpu_set<double>(const int N, const double alpha, double* Y);
+
+template <typename Dtype>
+__global__ void add_scalar_kernel(const int n, const Dtype alpha, Dtype* y) {
+  CUDA_KERNEL_LOOP(index, n) {
+    y[index] += alpha;
+  }
+}
+
+template <>
+void caffe_gpu_add_scalar(const int N, const float alpha, float* Y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  add_scalar_kernel<float><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, alpha, Y);
+}
+
+template <>
+void caffe_gpu_add_scalar(const int N, const double alpha, double* Y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  add_scalar_kernel<double><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, alpha, Y);
+}
+
+template <typename Dtype>
+__global__ void add_kernel(const int n, const Dtype* a,
+    const Dtype* b, Dtype* y) {
+  CUDA_KERNEL_LOOP(index, n) {
+    y[index] = a[index] + b[index];
+  }
+}
+
+template <>
+void caffe_gpu_add<float>(const int N, const float* a, const float* b,
+    float* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  add_kernel<float><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, b, y);
+}
+
+template <>
+void caffe_gpu_add<double>(const int N, const double* a, const double* b,
+    double* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  add_kernel<double><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, b, y);
+}
+
+template <typename Dtype>
+__global__ void sub_kernel(const int n, const Dtype* a,
+    const Dtype* b, Dtype* y) {
+  CUDA_KERNEL_LOOP(index, n) {
+    y[index] = a[index] - b[index];
+  }
+}
+
+template <>
+void caffe_gpu_sub<float>(const int N, const float* a, const float* b,
+    float* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  sub_kernel<float><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, b, y);
+}
+
+template <>
+void caffe_gpu_sub<double>(const int N, const double* a, const double* b,
+    double* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  sub_kernel<double><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, b, y);
+}
+
+template <typename Dtype>
+__global__ void mul_kernel(const int n, const Dtype* a,
+    const Dtype* b, Dtype* y) {
+  CUDA_KERNEL_LOOP(index, n) {
+    y[index] = a[index] * b[index];
+  }
+}
+
+template <>
+void caffe_gpu_mul<float>(const int N, const float* a,
+    const float* b, float* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  mul_kernel<float><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, b, y);
+}
+
+template <>
+void caffe_gpu_mul<double>(const int N, const double* a,
+    const double* b, double* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  mul_kernel<double><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, b, y);
+}
+
+template <typename Dtype>
+__global__ void div_kernel(const int n, const Dtype* a,
+    const Dtype* b, Dtype* y) {
+  CUDA_KERNEL_LOOP(index, n) {
+    y[index] = a[index] / b[index];
+  }
+}
+
+template <>
+void caffe_gpu_div<float>(const int N, const float* a,
+    const float* b, float* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  div_kernel<float><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, b, y);
+}
+
+template <>
+void caffe_gpu_div<double>(const int N, const double* a,
+    const double* b, double* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  div_kernel<double><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, b, y);
+}
+
+template <typename Dtype>
+__global__ void abs_kernel(const int n, const Dtype* a, Dtype* y) {
+  CUDA_KERNEL_LOOP(index, n) {
+    y[index] = abs(a[index]);
+  }
+}
+
+template <>
+void caffe_gpu_abs<float>(const int N, const float* a, float* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  abs_kernel<float><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, y);
+}
+
+template <>
+void caffe_gpu_abs<double>(const int N, const double* a, double* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  abs_kernel<double><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, y);
+}
+
+
+template <typename Dtype>
+__global__ void exp_kernel(const int n, const Dtype* a, Dtype* y) {
+  CUDA_KERNEL_LOOP(index, n) {
+    y[index] = exp(a[index]);
+  }
+}
+
+template <>
+void caffe_gpu_exp<float>(const int N, const float* a, float* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  exp_kernel<float><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, y);
+}
+
+template <>
+void caffe_gpu_exp<double>(const int N, const double* a, double* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  exp_kernel<double><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, y);
+}
+
+template <typename Dtype>
+__global__ void log_kernel(const int n, const Dtype* a, Dtype* y) {
+  CUDA_KERNEL_LOOP(index, n) {
+    y[index] = log(a[index]);
+  }
+}
+
+template <>
+void caffe_gpu_log<float>(const int N, const float* a, float* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  log_kernel<float><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, y);
+}
+
+template <>
+void caffe_gpu_log<double>(const int N, const double* a, double* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  log_kernel<double><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, y);
+}
+
+template <typename Dtype>
+__global__ void powx_kernel(const int n, const Dtype* a,
+    const Dtype alpha, Dtype* y) {
+  CUDA_KERNEL_LOOP(index, n) {
+    y[index] = pow(a[index], alpha);
+  }
+}
+
+template <>
+void caffe_gpu_powx<float>(const int N, const float* a,
+    const float alpha, float* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  powx_kernel<float><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, alpha, y);
+}
+
+template <>
+void caffe_gpu_powx<double>(const int N, const double* a,
+    const double alpha, double* y) {
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  powx_kernel<double><<<CAFFE_GET_BLOCKS(N), CAFFE_CUDA_NUM_THREADS>>>(
+      N, a, alpha, y);
+}
+
+DEFINE_AND_INSTANTIATE_GPU_UNARY_FUNC(sign, y[index] = (Dtype(0) < x[index])
+                                      - (x[index] < Dtype(0)));
+DEFINE_AND_INSTANTIATE_GPU_UNARY_FUNC(sgnbit, y[index] = signbit(x[index]));
+
+__global__ void popc_kernel(const int n, const float* a,
+    const float* b, uint8_t* y) {
+  CUDA_KERNEL_LOOP(index, n) {
+    y[index] = __popc(static_cast<uint32_t>(a[index]) ^
+                      static_cast<uint32_t>(b[index]));
+  }
+}
+
+__global__ void popcll_kernel(const int n, const double* a,
+    const double* b, uint8_t* y) {
+  CUDA_KERNEL_LOOP(index, n) {
+    y[index] = __popcll(static_cast<uint64_t>(a[index]) ^
+                      static_cast<uint64_t>(b[index]));
+  }
+}
+
+template <>
+uint32_t caffe_gpu_hamming_distance<float>(const int n, const float* x,
+                                  const float* y) {
+  // TODO: Fix caffe_gpu_hamming_distance (see failing unit test
+  // TestHammingDistanceGPU in test_math_functions.cpp).
+  NOT_IMPLEMENTED;
+  thrust::device_vector<uint8_t> popcounts(n);
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  popc_kernel<<<CAFFE_GET_BLOCKS(n), CAFFE_CUDA_NUM_THREADS>>>(
+      n, x, y, thrust::raw_pointer_cast(popcounts.data()));
+  return thrust::reduce(popcounts.begin(), popcounts.end(),
+                        (uint32_t) 0, thrust::plus<uint32_t>());
+}
+
+template <>
+uint32_t caffe_gpu_hamming_distance<double>(const int n, const double* x,
+                                   const double* y) {
+  // TODO: Fix caffe_gpu_hamming_distance (see failing unit test
+  // TestHammingDistanceGPU in test_math_functions.cpp).
+  NOT_IMPLEMENTED;
+  thrust::device_vector<uint8_t> popcounts(n);
+  // NOLINT_NEXT_LINE(whitespace/operators)
+  popcll_kernel<<<CAFFE_GET_BLOCKS(n), CAFFE_CUDA_NUM_THREADS>>>(
+      n, x, y, thrust::raw_pointer_cast(popcounts.data()));
+  return thrust::reduce(popcounts.begin(), popcounts.end(),
+                        /* NOLINT_NEXT_LINE(build/include_what_you_use) */
+                        (uint32_t) 0, thrust::plus<uint32_t>());
+}
+
+void caffe_gpu_rng_uniform(const int n, unsigned int* r) {
+  CURAND_CHECK(curandGenerate(Caffe::curand_generator(), r, n));
+}
+
+template <>
+void caffe_gpu_rng_uniform<float>(const int n, const float a, const float b,
+                                  float* r) {
+  CURAND_CHECK(curandGenerateUniform(Caffe::curand_generator(), r, n));
+  const float range = b - a;
+  if (range != static_cast<float>(1)) {
+    caffe_gpu_scal(n, range, r);
+  }
+  if (a != static_cast<float>(0)) {
+    caffe_gpu_add_scalar(n, a, r);
+  }
+}
+
+template <>
+void caffe_gpu_rng_uniform<double>(const int n, const double a, const double b,
+                                   double* r) {
+  CURAND_CHECK(curandGenerateUniformDouble(Caffe::curand_generator(), r, n));
+  const double range = b - a;
+  if (range != static_cast<double>(1)) {
+    caffe_gpu_scal(n, range, r);
+  }
+  if (a != static_cast<double>(0)) {
+    caffe_gpu_add_scalar(n, a, r);
+  }
+}
+
+template <>
+void caffe_gpu_rng_gaussian(const int n, const float mu, const float sigma,
+                            float* r) {
+  CURAND_CHECK(
+      curandGenerateNormal(Caffe::curand_generator(), r, n, mu, sigma));
+}
+
+template <>
+void caffe_gpu_rng_gaussian(const int n, const double mu, const double sigma,
+                            double* r) {
+  CURAND_CHECK(
+      curandGenerateNormalDouble(Caffe::curand_generator(), r, n, mu, sigma));
+}
+
+}  // namespace caffe
diff --git a/src/caffe/util/upgrade_proto.cpp b/src/caffe/util/upgrade_proto.cpp
new file mode 100644
index 0000000..38a0602
--- /dev/null
+++ b/src/caffe/util/upgrade_proto.cpp
@@ -0,0 +1,940 @@
+#include <google/protobuf/io/coded_stream.h>
+#include <google/protobuf/io/zero_copy_stream_impl.h>
+#include <google/protobuf/text_format.h>
+
+#include <map>
+#include <string>
+
+#include "caffe/common.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/io.hpp"
+#include "caffe/util/upgrade_proto.hpp"
+
+namespace caffe {
+
+bool NetNeedsUpgrade(const NetParameter& net_param) {
+  return NetNeedsV0ToV1Upgrade(net_param) || NetNeedsV1ToV2Upgrade(net_param);
+}
+
+bool NetNeedsV0ToV1Upgrade(const NetParameter& net_param) {
+  for (int i = 0; i < net_param.layers_size(); ++i) {
+    if (net_param.layers(i).has_layer()) {
+      return true;
+    }
+  }
+  return false;
+}
+
+bool NetNeedsV1ToV2Upgrade(const NetParameter& net_param) {
+  return net_param.layers_size() > 0;
+}
+
+bool UpgradeV0Net(const NetParameter& v0_net_param_padding_layers,
+                  NetParameter* net_param) {
+  // First upgrade padding layers to padded conv layers.
+  NetParameter v0_net_param;
+  UpgradeV0PaddingLayers(v0_net_param_padding_layers, &v0_net_param);
+  // Now upgrade layer parameters.
+  bool is_fully_compatible = true;
+  net_param->Clear();
+  if (v0_net_param.has_name()) {
+    net_param->set_name(v0_net_param.name());
+  }
+  for (int i = 0; i < v0_net_param.layers_size(); ++i) {
+    is_fully_compatible &= UpgradeV0LayerParameter(v0_net_param.layers(i),
+                                                   net_param->add_layers());
+  }
+  for (int i = 0; i < v0_net_param.input_size(); ++i) {
+    net_param->add_input(v0_net_param.input(i));
+  }
+  for (int i = 0; i < v0_net_param.input_dim_size(); ++i) {
+    net_param->add_input_dim(v0_net_param.input_dim(i));
+  }
+  if (v0_net_param.has_force_backward()) {
+    net_param->set_force_backward(v0_net_param.force_backward());
+  }
+  return is_fully_compatible;
+}
+
+void UpgradeV0PaddingLayers(const NetParameter& param,
+                            NetParameter* param_upgraded_pad) {
+  // Copy everything other than the layers from the original param.
+  param_upgraded_pad->Clear();
+  param_upgraded_pad->CopyFrom(param);
+  param_upgraded_pad->clear_layers();
+  // Figure out which layer each bottom blob comes from.
+  map<string, int> blob_name_to_last_top_idx;
+  for (int i = 0; i < param.input_size(); ++i) {
+    const string& blob_name = param.input(i);
+    blob_name_to_last_top_idx[blob_name] = -1;
+  }
+  for (int i = 0; i < param.layers_size(); ++i) {
+    const V1LayerParameter& layer_connection = param.layers(i);
+    const V0LayerParameter& layer_param = layer_connection.layer();
+    // Add the layer to the new net, unless it's a padding layer.
+    if (layer_param.type() != "padding") {
+      param_upgraded_pad->add_layers()->CopyFrom(layer_connection);
+    }
+    for (int j = 0; j < layer_connection.bottom_size(); ++j) {
+      const string& blob_name = layer_connection.bottom(j);
+      if (blob_name_to_last_top_idx.find(blob_name) ==
+          blob_name_to_last_top_idx.end()) {
+        LOG(FATAL) << "Unknown blob input " << blob_name << " to layer " << j;
+      }
+      const int top_idx = blob_name_to_last_top_idx[blob_name];
+      if (top_idx == -1) {
+        continue;
+      }
+      const V1LayerParameter& source_layer = param.layers(top_idx);
+      if (source_layer.layer().type() == "padding") {
+        // This layer has a padding layer as input -- check that it is a conv
+        // layer or a pooling layer and takes only one input.  Also check that
+        // the padding layer input has only one input and one output.  Other
+        // cases have undefined behavior in Caffe.
+        CHECK((layer_param.type() == "conv") || (layer_param.type() == "pool"))
+            << "Padding layer input to "
+            "non-convolutional / non-pooling layer type "
+            << layer_param.type();
+        CHECK_EQ(layer_connection.bottom_size(), 1)
+            << "Conv Layer takes a single blob as input.";
+        CHECK_EQ(source_layer.bottom_size(), 1)
+            << "Padding Layer takes a single blob as input.";
+        CHECK_EQ(source_layer.top_size(), 1)
+            << "Padding Layer produces a single blob as output.";
+        int layer_index = param_upgraded_pad->layers_size() - 1;
+        param_upgraded_pad->mutable_layers(layer_index)->mutable_layer()
+            ->set_pad(source_layer.layer().pad());
+        param_upgraded_pad->mutable_layers(layer_index)
+            ->set_bottom(j, source_layer.bottom(0));
+      }
+    }
+    for (int j = 0; j < layer_connection.top_size(); ++j) {
+      const string& blob_name = layer_connection.top(j);
+      blob_name_to_last_top_idx[blob_name] = i;
+    }
+  }
+}
+
+bool UpgradeV0LayerParameter(const V1LayerParameter& v0_layer_connection,
+                             V1LayerParameter* layer_param) {
+  bool is_fully_compatible = true;
+  layer_param->Clear();
+  for (int i = 0; i < v0_layer_connection.bottom_size(); ++i) {
+    layer_param->add_bottom(v0_layer_connection.bottom(i));
+  }
+  for (int i = 0; i < v0_layer_connection.top_size(); ++i) {
+    layer_param->add_top(v0_layer_connection.top(i));
+  }
+  if (v0_layer_connection.has_layer()) {
+    const V0LayerParameter& v0_layer_param = v0_layer_connection.layer();
+    if (v0_layer_param.has_name()) {
+      layer_param->set_name(v0_layer_param.name());
+    }
+    const string& type = v0_layer_param.type();
+    if (v0_layer_param.has_type()) {
+      layer_param->set_type(UpgradeV0LayerType(type));
+    }
+    for (int i = 0; i < v0_layer_param.blobs_size(); ++i) {
+      layer_param->add_blobs()->CopyFrom(v0_layer_param.blobs(i));
+    }
+    for (int i = 0; i < v0_layer_param.blobs_lr_size(); ++i) {
+      layer_param->add_blobs_lr(v0_layer_param.blobs_lr(i));
+    }
+    for (int i = 0; i < v0_layer_param.weight_decay_size(); ++i) {
+      layer_param->add_weight_decay(v0_layer_param.weight_decay(i));
+    }
+    if (v0_layer_param.has_num_output()) {
+      if (type == "conv") {
+        layer_param->mutable_convolution_param()->set_num_output(
+            v0_layer_param.num_output());
+      } else if (type == "innerproduct") {
+        layer_param->mutable_inner_product_param()->set_num_output(
+            v0_layer_param.num_output());
+      } else {
+        LOG(ERROR) << "Unknown parameter num_output for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_biasterm()) {
+      if (type == "conv") {
+        layer_param->mutable_convolution_param()->set_bias_term(
+            v0_layer_param.biasterm());
+      } else if (type == "innerproduct") {
+        layer_param->mutable_inner_product_param()->set_bias_term(
+            v0_layer_param.biasterm());
+      } else {
+        LOG(ERROR) << "Unknown parameter biasterm for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_weight_filler()) {
+      if (type == "conv") {
+        layer_param->mutable_convolution_param()->
+            mutable_weight_filler()->CopyFrom(v0_layer_param.weight_filler());
+      } else if (type == "innerproduct") {
+        layer_param->mutable_inner_product_param()->
+            mutable_weight_filler()->CopyFrom(v0_layer_param.weight_filler());
+      } else {
+        LOG(ERROR) << "Unknown parameter weight_filler for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_bias_filler()) {
+      if (type == "conv") {
+        layer_param->mutable_convolution_param()->
+            mutable_bias_filler()->CopyFrom(v0_layer_param.bias_filler());
+      } else if (type == "innerproduct") {
+        layer_param->mutable_inner_product_param()->
+            mutable_bias_filler()->CopyFrom(v0_layer_param.bias_filler());
+      } else {
+        LOG(ERROR) << "Unknown parameter bias_filler for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_pad()) {
+      if (type == "conv") {
+        layer_param->mutable_convolution_param()->set_pad(v0_layer_param.pad());
+      } else if (type == "pool") {
+        layer_param->mutable_pooling_param()->set_pad(v0_layer_param.pad());
+      } else {
+        LOG(ERROR) << "Unknown parameter pad for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_kernelsize()) {
+      if (type == "conv") {
+        layer_param->mutable_convolution_param()->set_kernel_size(
+            v0_layer_param.kernelsize());
+      } else if (type == "pool") {
+        layer_param->mutable_pooling_param()->set_kernel_size(
+            v0_layer_param.kernelsize());
+      } else {
+        LOG(ERROR) << "Unknown parameter kernelsize for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_group()) {
+      if (type == "conv") {
+        layer_param->mutable_convolution_param()->set_group(
+            v0_layer_param.group());
+      } else {
+        LOG(ERROR) << "Unknown parameter group for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_stride()) {
+      if (type == "conv") {
+        layer_param->mutable_convolution_param()->set_stride(
+            v0_layer_param.stride());
+      } else if (type == "pool") {
+        layer_param->mutable_pooling_param()->set_stride(
+            v0_layer_param.stride());
+      } else {
+        LOG(ERROR) << "Unknown parameter stride for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_pool()) {
+      if (type == "pool") {
+        V0LayerParameter_PoolMethod pool = v0_layer_param.pool();
+        switch (pool) {
+        case V0LayerParameter_PoolMethod_MAX:
+          layer_param->mutable_pooling_param()->set_pool(
+              PoolingParameter_PoolMethod_MAX);
+          break;
+        case V0LayerParameter_PoolMethod_AVE:
+          layer_param->mutable_pooling_param()->set_pool(
+              PoolingParameter_PoolMethod_AVE);
+          break;
+        case V0LayerParameter_PoolMethod_STOCHASTIC:
+          layer_param->mutable_pooling_param()->set_pool(
+              PoolingParameter_PoolMethod_STOCHASTIC);
+          break;
+        default:
+          LOG(ERROR) << "Unknown pool method " << pool;
+          is_fully_compatible = false;
+        }
+      } else {
+        LOG(ERROR) << "Unknown parameter pool for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_dropout_ratio()) {
+      if (type == "dropout") {
+        layer_param->mutable_dropout_param()->set_dropout_ratio(
+            v0_layer_param.dropout_ratio());
+      } else {
+        LOG(ERROR) << "Unknown parameter dropout_ratio for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_local_size()) {
+      if (type == "lrn") {
+        layer_param->mutable_lrn_param()->set_local_size(
+            v0_layer_param.local_size());
+      } else {
+        LOG(ERROR) << "Unknown parameter local_size for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_alpha()) {
+      if (type == "lrn") {
+        layer_param->mutable_lrn_param()->set_alpha(v0_layer_param.alpha());
+      } else {
+        LOG(ERROR) << "Unknown parameter alpha for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_beta()) {
+      if (type == "lrn") {
+        layer_param->mutable_lrn_param()->set_beta(v0_layer_param.beta());
+      } else {
+        LOG(ERROR) << "Unknown parameter beta for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_k()) {
+      if (type == "lrn") {
+        layer_param->mutable_lrn_param()->set_k(v0_layer_param.k());
+      } else {
+        LOG(ERROR) << "Unknown parameter k for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_source()) {
+      if (type == "data") {
+        layer_param->mutable_data_param()->set_source(v0_layer_param.source());
+      } else if (type == "hdf5_data") {
+        layer_param->mutable_hdf5_data_param()->set_source(
+            v0_layer_param.source());
+      } else if (type == "images") {
+        layer_param->mutable_image_data_param()->set_source(
+            v0_layer_param.source());
+      } else if (type == "window_data") {
+        layer_param->mutable_window_data_param()->set_source(
+            v0_layer_param.source());
+      } else if (type == "infogain_loss") {
+        layer_param->mutable_infogain_loss_param()->set_source(
+            v0_layer_param.source());
+      } else {
+        LOG(ERROR) << "Unknown parameter source for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_scale()) {
+      layer_param->mutable_transform_param()->
+          set_scale(v0_layer_param.scale());
+    }
+    if (v0_layer_param.has_meanfile()) {
+      layer_param->mutable_transform_param()->
+          set_mean_file(v0_layer_param.meanfile());
+    }
+    if (v0_layer_param.has_batchsize()) {
+      if (type == "data") {
+        layer_param->mutable_data_param()->set_batch_size(
+            v0_layer_param.batchsize());
+      } else if (type == "hdf5_data") {
+        layer_param->mutable_hdf5_data_param()->set_batch_size(
+            v0_layer_param.batchsize());
+      } else if (type == "images") {
+        layer_param->mutable_image_data_param()->set_batch_size(
+            v0_layer_param.batchsize());
+      } else if (type == "window_data") {
+        layer_param->mutable_window_data_param()->set_batch_size(
+            v0_layer_param.batchsize());
+      } else {
+        LOG(ERROR) << "Unknown parameter batchsize for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_cropsize()) {
+      layer_param->mutable_transform_param()->
+          set_crop_size(v0_layer_param.cropsize());
+    }
+    if (v0_layer_param.has_mirror()) {
+      layer_param->mutable_transform_param()->
+          set_mirror(v0_layer_param.mirror());
+    }
+    if (v0_layer_param.has_rand_skip()) {
+      if (type == "data") {
+        layer_param->mutable_data_param()->set_rand_skip(
+            v0_layer_param.rand_skip());
+      } else if (type == "images") {
+        layer_param->mutable_image_data_param()->set_rand_skip(
+            v0_layer_param.rand_skip());
+      } else {
+        LOG(ERROR) << "Unknown parameter rand_skip for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_shuffle_images()) {
+      if (type == "images") {
+        layer_param->mutable_image_data_param()->set_shuffle(
+            v0_layer_param.shuffle_images());
+      } else {
+        LOG(ERROR) << "Unknown parameter shuffle for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_new_height()) {
+      if (type == "images") {
+        layer_param->mutable_image_data_param()->set_new_height(
+            v0_layer_param.new_height());
+      } else {
+        LOG(ERROR) << "Unknown parameter new_height for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_new_width()) {
+      if (type == "images") {
+        layer_param->mutable_image_data_param()->set_new_width(
+            v0_layer_param.new_width());
+      } else {
+        LOG(ERROR) << "Unknown parameter new_width for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_concat_dim()) {
+      if (type == "concat") {
+        layer_param->mutable_concat_param()->set_concat_dim(
+            v0_layer_param.concat_dim());
+      } else {
+        LOG(ERROR) << "Unknown parameter concat_dim for layer type " << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_det_fg_threshold()) {
+      if (type == "window_data") {
+        layer_param->mutable_window_data_param()->set_fg_threshold(
+            v0_layer_param.det_fg_threshold());
+      } else {
+        LOG(ERROR) << "Unknown parameter det_fg_threshold for layer type "
+                   << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_det_bg_threshold()) {
+      if (type == "window_data") {
+        layer_param->mutable_window_data_param()->set_bg_threshold(
+            v0_layer_param.det_bg_threshold());
+      } else {
+        LOG(ERROR) << "Unknown parameter det_bg_threshold for layer type "
+                   << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_det_fg_fraction()) {
+      if (type == "window_data") {
+        layer_param->mutable_window_data_param()->set_fg_fraction(
+            v0_layer_param.det_fg_fraction());
+      } else {
+        LOG(ERROR) << "Unknown parameter det_fg_fraction for layer type "
+                   << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_det_context_pad()) {
+      if (type == "window_data") {
+        layer_param->mutable_window_data_param()->set_context_pad(
+            v0_layer_param.det_context_pad());
+      } else {
+        LOG(ERROR) << "Unknown parameter det_context_pad for layer type "
+                   << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_det_crop_mode()) {
+      if (type == "window_data") {
+        layer_param->mutable_window_data_param()->set_crop_mode(
+            v0_layer_param.det_crop_mode());
+      } else {
+        LOG(ERROR) << "Unknown parameter det_crop_mode for layer type "
+                   << type;
+        is_fully_compatible = false;
+      }
+    }
+    if (v0_layer_param.has_hdf5_output_param()) {
+      if (type == "hdf5_output") {
+        layer_param->mutable_hdf5_output_param()->CopyFrom(
+            v0_layer_param.hdf5_output_param());
+      } else {
+        LOG(ERROR) << "Unknown parameter hdf5_output_param for layer type "
+                   << type;
+        is_fully_compatible = false;
+      }
+    }
+  }
+  return is_fully_compatible;
+}
+
+V1LayerParameter_LayerType UpgradeV0LayerType(const string& type) {
+  if (type == "accuracy") {
+    return V1LayerParameter_LayerType_ACCURACY;
+  } else if (type == "bnll") {
+    return V1LayerParameter_LayerType_BNLL;
+  } else if (type == "concat") {
+    return V1LayerParameter_LayerType_CONCAT;
+  } else if (type == "conv") {
+    return V1LayerParameter_LayerType_CONVOLUTION;
+  } else if (type == "data") {
+    return V1LayerParameter_LayerType_DATA;
+  } else if (type == "dropout") {
+    return V1LayerParameter_LayerType_DROPOUT;
+  } else if (type == "euclidean_loss") {
+    return V1LayerParameter_LayerType_EUCLIDEAN_LOSS;
+  } else if (type == "flatten") {
+    return V1LayerParameter_LayerType_FLATTEN;
+  } else if (type == "hdf5_data") {
+    return V1LayerParameter_LayerType_HDF5_DATA;
+  } else if (type == "hdf5_output") {
+    return V1LayerParameter_LayerType_HDF5_OUTPUT;
+  } else if (type == "im2col") {
+    return V1LayerParameter_LayerType_IM2COL;
+  } else if (type == "images") {
+    return V1LayerParameter_LayerType_IMAGE_DATA;
+  } else if (type == "infogain_loss") {
+    return V1LayerParameter_LayerType_INFOGAIN_LOSS;
+  } else if (type == "innerproduct") {
+    return V1LayerParameter_LayerType_INNER_PRODUCT;
+  } else if (type == "lrn") {
+    return V1LayerParameter_LayerType_LRN;
+  } else if (type == "multinomial_logistic_loss") {
+    return V1LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS;
+  } else if (type == "pool") {
+    return V1LayerParameter_LayerType_POOLING;
+  } else if (type == "relu") {
+    return V1LayerParameter_LayerType_RELU;
+  } else if (type == "sigmoid") {
+    return V1LayerParameter_LayerType_SIGMOID;
+  } else if (type == "softmax") {
+    return V1LayerParameter_LayerType_SOFTMAX;
+  } else if (type == "softmax_loss") {
+    return V1LayerParameter_LayerType_SOFTMAX_LOSS;
+  } else if (type == "split") {
+    return V1LayerParameter_LayerType_SPLIT;
+  } else if (type == "tanh") {
+    return V1LayerParameter_LayerType_TANH;
+  } else if (type == "window_data") {
+    return V1LayerParameter_LayerType_WINDOW_DATA;
+  } else {
+    LOG(FATAL) << "Unknown layer name: " << type;
+    return V1LayerParameter_LayerType_NONE;
+  }
+}
+
+bool NetNeedsDataUpgrade(const NetParameter& net_param) {
+  for (int i = 0; i < net_param.layers_size(); ++i) {
+    if (net_param.layers(i).type() == V1LayerParameter_LayerType_DATA) {
+      DataParameter layer_param = net_param.layers(i).data_param();
+      if (layer_param.has_scale()) { return true; }
+      if (layer_param.has_mean_file()) { return true; }
+      if (layer_param.has_crop_size()) { return true; }
+      if (layer_param.has_mirror()) { return true; }
+    }
+    if (net_param.layers(i).type() == V1LayerParameter_LayerType_IMAGE_DATA) {
+      ImageDataParameter layer_param = net_param.layers(i).image_data_param();
+      if (layer_param.has_scale()) { return true; }
+      if (layer_param.has_mean_file()) { return true; }
+      if (layer_param.has_crop_size()) { return true; }
+      if (layer_param.has_mirror()) { return true; }
+    }
+    if (net_param.layers(i).type() == V1LayerParameter_LayerType_WINDOW_DATA) {
+      WindowDataParameter layer_param = net_param.layers(i).window_data_param();
+      if (layer_param.has_scale()) { return true; }
+      if (layer_param.has_mean_file()) { return true; }
+      if (layer_param.has_crop_size()) { return true; }
+      if (layer_param.has_mirror()) { return true; }
+    }
+  }
+  return false;
+}
+
+#define CONVERT_LAYER_TRANSFORM_PARAM(TYPE, Name, param_name) \
+  do { \
+    if (net_param->layers(i).type() == V1LayerParameter_LayerType_##TYPE) { \
+      Name##Parameter* layer_param = \
+          net_param->mutable_layers(i)->mutable_##param_name##_param(); \
+      TransformationParameter* transform_param = \
+          net_param->mutable_layers(i)->mutable_transform_param(); \
+      if (layer_param->has_scale()) { \
+        transform_param->set_scale(layer_param->scale()); \
+        layer_param->clear_scale(); \
+      } \
+      if (layer_param->has_mean_file()) { \
+        transform_param->set_mean_file(layer_param->mean_file()); \
+        layer_param->clear_mean_file(); \
+      } \
+      if (layer_param->has_crop_size()) { \
+        transform_param->set_crop_size(layer_param->crop_size()); \
+        layer_param->clear_crop_size(); \
+      } \
+      if (layer_param->has_mirror()) { \
+        transform_param->set_mirror(layer_param->mirror()); \
+        layer_param->clear_mirror(); \
+      } \
+    } \
+  } while (0)
+
+void UpgradeNetDataTransformation(NetParameter* net_param) {
+  for (int i = 0; i < net_param->layers_size(); ++i) {
+    CONVERT_LAYER_TRANSFORM_PARAM(DATA, Data, data);
+    CONVERT_LAYER_TRANSFORM_PARAM(IMAGE_DATA, ImageData, image_data);
+    CONVERT_LAYER_TRANSFORM_PARAM(WINDOW_DATA, WindowData, window_data);
+  }
+}
+
+bool UpgradeNetAsNeeded(const string& param_file, NetParameter* param) {
+  bool success = true;
+  if (NetNeedsV0ToV1Upgrade(*param)) {
+    // NetParameter was specified using the old style (V0LayerParameter); try to
+    // upgrade it.
+    LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
+               << "V0LayerParameter: " << param_file;
+    NetParameter original_param(*param);
+    if (!UpgradeV0Net(original_param, param)) {
+      success = false;
+      LOG(ERROR) << "Warning: had one or more problems upgrading "
+          << "V0NetParameter to NetParameter (see above); continuing anyway.";
+    } else {
+      LOG(INFO) << "Successfully upgraded file specified using deprecated "
+                << "V0LayerParameter";
+    }
+    LOG(ERROR) << "Note that future Caffe releases will not support "
+        << "V0NetParameter; use ./build/tools/upgrade_net_proto_text for "
+        << "prototxt and ./build/tools/upgrade_net_proto_binary for model "
+        << "weights upgrade this and any other net protos to the new format.";
+  }
+  // NetParameter uses old style data transformation fields; try to upgrade it.
+  if (NetNeedsDataUpgrade(*param)) {
+    LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
+               << "transformation parameters: " << param_file;
+    UpgradeNetDataTransformation(param);
+    LOG(INFO) << "Successfully upgraded file specified using deprecated "
+              << "data transformation parameters.";
+    LOG(ERROR) << "Note that future Caffe releases will only support "
+               << "transform_param messages for transformation fields.";
+  }
+  if (NetNeedsV1ToV2Upgrade(*param)) {
+    LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
+               << "V1LayerParameter: " << param_file;
+    NetParameter original_param(*param);
+    if (!UpgradeV1Net(original_param, param)) {
+      success = false;
+      LOG(ERROR) << "Warning: had one or more problems upgrading "
+          << "V1LayerParameter (see above); continuing anyway.";
+    } else {
+      LOG(INFO) << "Successfully upgraded file specified using deprecated "
+                << "V1LayerParameter";
+    }
+  }
+  return success;
+}
+
+bool UpgradeV1Net(const NetParameter& v1_net_param, NetParameter* net_param) {
+  bool is_fully_compatible = true;
+  if (v1_net_param.layer_size() > 0) {
+    LOG(ERROR) << "Input NetParameter to be upgraded already specifies 'layer' "
+               << "fields; these will be ignored for the upgrade.";
+    is_fully_compatible = false;
+  }
+  net_param->CopyFrom(v1_net_param);
+  net_param->clear_layers();
+  net_param->clear_layer();
+  for (int i = 0; i < v1_net_param.layers_size(); ++i) {
+    if (!UpgradeV1LayerParameter(v1_net_param.layers(i),
+                                 net_param->add_layer())) {
+      LOG(ERROR) << "Upgrade of input layer " << i << " failed.";
+      is_fully_compatible = false;
+    }
+  }
+  return is_fully_compatible;
+}
+
+bool UpgradeV1LayerParameter(const V1LayerParameter& v1_layer_param,
+                             LayerParameter* layer_param) {
+  layer_param->Clear();
+  bool is_fully_compatible = true;
+  for (int i = 0; i < v1_layer_param.bottom_size(); ++i) {
+    layer_param->add_bottom(v1_layer_param.bottom(i));
+  }
+  for (int i = 0; i < v1_layer_param.top_size(); ++i) {
+    layer_param->add_top(v1_layer_param.top(i));
+  }
+  if (v1_layer_param.has_name()) {
+    layer_param->set_name(v1_layer_param.name());
+  }
+  for (int i = 0; i < v1_layer_param.include_size(); ++i) {
+    layer_param->add_include()->CopyFrom(v1_layer_param.include(i));
+  }
+  for (int i = 0; i < v1_layer_param.exclude_size(); ++i) {
+    layer_param->add_exclude()->CopyFrom(v1_layer_param.exclude(i));
+  }
+  if (v1_layer_param.has_type()) {
+    layer_param->set_type(UpgradeV1LayerType(v1_layer_param.type()));
+  }
+  for (int i = 0; i < v1_layer_param.blobs_size(); ++i) {
+    layer_param->add_blobs()->CopyFrom(v1_layer_param.blobs(i));
+  }
+  for (int i = 0; i < v1_layer_param.param_size(); ++i) {
+    while (layer_param->param_size() <= i) { layer_param->add_param(); }
+    layer_param->mutable_param(i)->set_name(v1_layer_param.param(i));
+  }
+  ParamSpec_DimCheckMode mode;
+  for (int i = 0; i < v1_layer_param.blob_share_mode_size(); ++i) {
+    while (layer_param->param_size() <= i) { layer_param->add_param(); }
+    switch (v1_layer_param.blob_share_mode(i)) {
+    case V1LayerParameter_DimCheckMode_STRICT:
+      mode = ParamSpec_DimCheckMode_STRICT;
+      break;
+    case V1LayerParameter_DimCheckMode_PERMISSIVE:
+      mode = ParamSpec_DimCheckMode_PERMISSIVE;
+      break;
+    default:
+      LOG(FATAL) << "Unknown blob_share_mode: "
+                 << v1_layer_param.blob_share_mode(i);
+      break;
+    }
+    layer_param->mutable_param(i)->set_share_mode(mode);
+  }
+  for (int i = 0; i < v1_layer_param.blobs_lr_size(); ++i) {
+    while (layer_param->param_size() <= i) { layer_param->add_param(); }
+    layer_param->mutable_param(i)->set_lr_mult(v1_layer_param.blobs_lr(i));
+  }
+  for (int i = 0; i < v1_layer_param.weight_decay_size(); ++i) {
+    while (layer_param->param_size() <= i) { layer_param->add_param(); }
+    layer_param->mutable_param(i)->set_decay_mult(
+        v1_layer_param.weight_decay(i));
+  }
+  for (int i = 0; i < v1_layer_param.loss_weight_size(); ++i) {
+    layer_param->add_loss_weight(v1_layer_param.loss_weight(i));
+  }
+  if (v1_layer_param.has_accuracy_param()) {
+    layer_param->mutable_accuracy_param()->CopyFrom(
+        v1_layer_param.accuracy_param());
+  }
+  if (v1_layer_param.has_argmax_param()) {
+    layer_param->mutable_argmax_param()->CopyFrom(
+        v1_layer_param.argmax_param());
+  }
+  if (v1_layer_param.has_concat_param()) {
+    layer_param->mutable_concat_param()->CopyFrom(
+        v1_layer_param.concat_param());
+  }
+  if (v1_layer_param.has_contrastive_loss_param()) {
+    layer_param->mutable_contrastive_loss_param()->CopyFrom(
+        v1_layer_param.contrastive_loss_param());
+  }
+  if (v1_layer_param.has_convolution_param()) {
+    layer_param->mutable_convolution_param()->CopyFrom(
+        v1_layer_param.convolution_param());
+  }
+  if (v1_layer_param.has_data_param()) {
+    layer_param->mutable_data_param()->CopyFrom(
+        v1_layer_param.data_param());
+  }
+  if (v1_layer_param.has_dropout_param()) {
+    layer_param->mutable_dropout_param()->CopyFrom(
+        v1_layer_param.dropout_param());
+  }
+  if (v1_layer_param.has_dummy_data_param()) {
+    layer_param->mutable_dummy_data_param()->CopyFrom(
+        v1_layer_param.dummy_data_param());
+  }
+  if (v1_layer_param.has_eltwise_param()) {
+    layer_param->mutable_eltwise_param()->CopyFrom(
+        v1_layer_param.eltwise_param());
+  }
+  if (v1_layer_param.has_exp_param()) {
+    layer_param->mutable_exp_param()->CopyFrom(
+        v1_layer_param.exp_param());
+  }
+  if (v1_layer_param.has_hdf5_data_param()) {
+    layer_param->mutable_hdf5_data_param()->CopyFrom(
+        v1_layer_param.hdf5_data_param());
+  }
+  if (v1_layer_param.has_hdf5_output_param()) {
+    layer_param->mutable_hdf5_output_param()->CopyFrom(
+        v1_layer_param.hdf5_output_param());
+  }
+  if (v1_layer_param.has_hinge_loss_param()) {
+    layer_param->mutable_hinge_loss_param()->CopyFrom(
+        v1_layer_param.hinge_loss_param());
+  }
+  if (v1_layer_param.has_image_data_param()) {
+    layer_param->mutable_image_data_param()->CopyFrom(
+        v1_layer_param.image_data_param());
+  }
+  if (v1_layer_param.has_infogain_loss_param()) {
+    layer_param->mutable_infogain_loss_param()->CopyFrom(
+        v1_layer_param.infogain_loss_param());
+  }
+  if (v1_layer_param.has_inner_product_param()) {
+    layer_param->mutable_inner_product_param()->CopyFrom(
+        v1_layer_param.inner_product_param());
+  }
+  if (v1_layer_param.has_lrn_param()) {
+    layer_param->mutable_lrn_param()->CopyFrom(
+        v1_layer_param.lrn_param());
+  }
+  if (v1_layer_param.has_memory_data_param()) {
+    layer_param->mutable_memory_data_param()->CopyFrom(
+        v1_layer_param.memory_data_param());
+  }
+  if (v1_layer_param.has_mvn_param()) {
+    layer_param->mutable_mvn_param()->CopyFrom(
+        v1_layer_param.mvn_param());
+  }
+  if (v1_layer_param.has_pooling_param()) {
+    layer_param->mutable_pooling_param()->CopyFrom(
+        v1_layer_param.pooling_param());
+  }
+  if (v1_layer_param.has_power_param()) {
+    layer_param->mutable_power_param()->CopyFrom(
+        v1_layer_param.power_param());
+  }
+  if (v1_layer_param.has_relu_param()) {
+    layer_param->mutable_relu_param()->CopyFrom(
+        v1_layer_param.relu_param());
+  }
+  if (v1_layer_param.has_sigmoid_param()) {
+    layer_param->mutable_sigmoid_param()->CopyFrom(
+        v1_layer_param.sigmoid_param());
+  }
+  if (v1_layer_param.has_softmax_param()) {
+    layer_param->mutable_softmax_param()->CopyFrom(
+        v1_layer_param.softmax_param());
+  }
+  if (v1_layer_param.has_slice_param()) {
+    layer_param->mutable_slice_param()->CopyFrom(
+        v1_layer_param.slice_param());
+  }
+  if (v1_layer_param.has_tanh_param()) {
+    layer_param->mutable_tanh_param()->CopyFrom(
+        v1_layer_param.tanh_param());
+  }
+  if (v1_layer_param.has_threshold_param()) {
+    layer_param->mutable_threshold_param()->CopyFrom(
+        v1_layer_param.threshold_param());
+  }
+  if (v1_layer_param.has_window_data_param()) {
+    layer_param->mutable_window_data_param()->CopyFrom(
+        v1_layer_param.window_data_param());
+  }
+  if (v1_layer_param.has_transform_param()) {
+    layer_param->mutable_transform_param()->CopyFrom(
+        v1_layer_param.transform_param());
+  }
+  if (v1_layer_param.has_loss_param()) {
+    layer_param->mutable_loss_param()->CopyFrom(
+        v1_layer_param.loss_param());
+  }
+  if (v1_layer_param.has_layer()) {
+    LOG(ERROR) << "Input NetParameter has V0 layer -- ignoring.";
+    is_fully_compatible = false;
+  }
+  return is_fully_compatible;
+}
+
+const char* UpgradeV1LayerType(const V1LayerParameter_LayerType type) {
+  switch (type) {
+  case V1LayerParameter_LayerType_NONE:
+    return "";
+  case V1LayerParameter_LayerType_ABSVAL:
+    return "AbsVal";
+  case V1LayerParameter_LayerType_ACCURACY:
+    return "Accuracy";
+  case V1LayerParameter_LayerType_ARGMAX:
+    return "ArgMax";
+  case V1LayerParameter_LayerType_BNLL:
+    return "BNLL";
+  case V1LayerParameter_LayerType_CONCAT:
+    return "Concat";
+  case V1LayerParameter_LayerType_CONTRASTIVE_LOSS:
+    return "ContrastiveLoss";
+  case V1LayerParameter_LayerType_CONVOLUTION:
+    return "Convolution";
+  case V1LayerParameter_LayerType_DECONVOLUTION:
+    return "Deconvolution";
+  case V1LayerParameter_LayerType_DATA:
+    return "Data";
+  case V1LayerParameter_LayerType_DROPOUT:
+    return "Dropout";
+  case V1LayerParameter_LayerType_DUMMY_DATA:
+    return "DummyData";
+  case V1LayerParameter_LayerType_EUCLIDEAN_LOSS:
+    return "EuclideanLoss";
+  case V1LayerParameter_LayerType_ELTWISE:
+    return "Eltwise";
+  case V1LayerParameter_LayerType_EXP:
+    return "Exp";
+  case V1LayerParameter_LayerType_FLATTEN:
+    return "Flatten";
+  case V1LayerParameter_LayerType_HDF5_DATA:
+    return "HDF5Data";
+  case V1LayerParameter_LayerType_HDF5_OUTPUT:
+    return "HDF5Output";
+  case V1LayerParameter_LayerType_HINGE_LOSS:
+    return "HingeLoss";
+  case V1LayerParameter_LayerType_IM2COL:
+    return "Im2col";
+  case V1LayerParameter_LayerType_IMAGE_DATA:
+    return "ImageData";
+  case V1LayerParameter_LayerType_INFOGAIN_LOSS:
+    return "InfogainLoss";
+  case V1LayerParameter_LayerType_INNER_PRODUCT:
+    return "InnerProduct";
+  case V1LayerParameter_LayerType_LRN:
+    return "LRN";
+  case V1LayerParameter_LayerType_MEMORY_DATA:
+    return "MemoryData";
+  case V1LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS:
+    return "MultinomialLogisticLoss";
+  case V1LayerParameter_LayerType_MVN:
+    return "MVN";
+  case V1LayerParameter_LayerType_POOLING:
+    return "Pooling";
+  case V1LayerParameter_LayerType_POWER:
+    return "Power";
+  case V1LayerParameter_LayerType_RELU:
+    return "ReLU";
+  case V1LayerParameter_LayerType_SIGMOID:
+    return "Sigmoid";
+  case V1LayerParameter_LayerType_SIGMOID_CROSS_ENTROPY_LOSS:
+    return "SigmoidCrossEntropyLoss";
+  case V1LayerParameter_LayerType_SILENCE:
+    return "Silence";
+  case V1LayerParameter_LayerType_SOFTMAX:
+    return "Softmax";
+  case V1LayerParameter_LayerType_SOFTMAX_LOSS:
+    return "SoftmaxWithLoss";
+  case V1LayerParameter_LayerType_SPLIT:
+    return "Split";
+  case V1LayerParameter_LayerType_SLICE:
+    return "Slice";
+  case V1LayerParameter_LayerType_TANH:
+    return "TanH";
+  case V1LayerParameter_LayerType_WINDOW_DATA:
+    return "WindowData";
+  case V1LayerParameter_LayerType_THRESHOLD:
+    return "Threshold";
+  default:
+    LOG(FATAL) << "Unknown V1LayerParameter layer type: " << type;
+    return "";
+  }
+}
+
+void ReadNetParamsFromTextFileOrDie(const string& param_file,
+                                    NetParameter* param) {
+  CHECK(ReadProtoFromTextFile(param_file, param))
+      << "Failed to parse NetParameter file: " << param_file;
+  UpgradeNetAsNeeded(param_file, param);
+}
+
+void ReadNetParamsFromBinaryFileOrDie(const string& param_file,
+                                      NetParameter* param) {
+  CHECK(ReadProtoFromBinaryFile(param_file, param))
+      << "Failed to parse NetParameter file: " << param_file;
+  UpgradeNetAsNeeded(param_file, param);
+}
+
+}  // namespace caffe
diff --git a/src/gtest/CMakeLists.txt b/src/gtest/CMakeLists.txt
new file mode 100644
index 0000000..ef7ff7e
--- /dev/null
+++ b/src/gtest/CMakeLists.txt
@@ -0,0 +1,5 @@
+add_library(gtest STATIC EXCLUDE_FROM_ALL gtest.h gtest-all.cpp)
+caffe_default_properties(gtest)
+
+#add_library(gtest_main gtest_main.cc)
+#target_link_libraries(gtest_main gtest)
diff --git a/src/gtest/gtest-all.cpp b/src/gtest/gtest-all.cpp
new file mode 100644
index 0000000..9261974
--- /dev/null
+++ b/src/gtest/gtest-all.cpp
@@ -0,0 +1,9117 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: mheule at google.com (Markus Heule)
+//
+// Google C++ Testing Framework (Google Test)
+//
+// Sometimes it's desirable to build Google Test by compiling a single file.
+// This file serves this purpose.
+
+// This line ensures that gtest.h can be compiled on its own, even
+// when it's fused.
+#include "gtest/gtest.h"
+
+// The following lines pull in the real gtest *.cc files.
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+//
+// The Google C++ Testing Framework (Google Test)
+
+// Copyright 2007, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+//
+// Utilities for testing Google Test itself and code that uses Google Test
+// (e.g. frameworks built on top of Google Test).
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_SPI_H_
+#define GTEST_INCLUDE_GTEST_GTEST_SPI_H_
+
+
+namespace testing {
+
+// This helper class can be used to mock out Google Test failure reporting
+// so that we can test Google Test or code that builds on Google Test.
+//
+// An object of this class appends a TestPartResult object to the
+// TestPartResultArray object given in the constructor whenever a Google Test
+// failure is reported. It can either intercept only failures that are
+// generated in the same thread that created this object or it can intercept
+// all generated failures. The scope of this mock object can be controlled with
+// the second argument to the two arguments constructor.
+class GTEST_API_ ScopedFakeTestPartResultReporter
+    : public TestPartResultReporterInterface {
+ public:
+  // The two possible mocking modes of this object.
+  enum InterceptMode {
+    INTERCEPT_ONLY_CURRENT_THREAD,  // Intercepts only thread local failures.
+    INTERCEPT_ALL_THREADS           // Intercepts all failures.
+  };
+
+  // The c'tor sets this object as the test part result reporter used
+  // by Google Test.  The 'result' parameter specifies where to report the
+  // results. This reporter will only catch failures generated in the current
+  // thread. DEPRECATED
+  explicit ScopedFakeTestPartResultReporter(TestPartResultArray* result);
+
+  // Same as above, but you can choose the interception scope of this object.
+  ScopedFakeTestPartResultReporter(InterceptMode intercept_mode,
+                                   TestPartResultArray* result);
+
+  // The d'tor restores the previous test part result reporter.
+  virtual ~ScopedFakeTestPartResultReporter();
+
+  // Appends the TestPartResult object to the TestPartResultArray
+  // received in the constructor.
+  //
+  // This method is from the TestPartResultReporterInterface
+  // interface.
+  virtual void ReportTestPartResult(const TestPartResult& result);
+ private:
+  void Init();
+
+  const InterceptMode intercept_mode_;
+  TestPartResultReporterInterface* old_reporter_;
+  TestPartResultArray* const result_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(ScopedFakeTestPartResultReporter);
+};
+
+namespace internal {
+
+// A helper class for implementing EXPECT_FATAL_FAILURE() and
+// EXPECT_NONFATAL_FAILURE().  Its destructor verifies that the given
+// TestPartResultArray contains exactly one failure that has the given
+// type and contains the given substring.  If that's not the case, a
+// non-fatal failure will be generated.
+class GTEST_API_ SingleFailureChecker {
+ public:
+  // The constructor remembers the arguments.
+  SingleFailureChecker(const TestPartResultArray* results,
+                       TestPartResult::Type type,
+                       const string& substr);
+  ~SingleFailureChecker();
+ private:
+  const TestPartResultArray* const results_;
+  const TestPartResult::Type type_;
+  const string substr_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(SingleFailureChecker);
+};
+
+}  // namespace internal
+
+}  // namespace testing
+
+// A set of macros for testing Google Test assertions or code that's expected
+// to generate Google Test fatal failures.  It verifies that the given
+// statement will cause exactly one fatal Google Test failure with 'substr'
+// being part of the failure message.
+//
+// There are two different versions of this macro. EXPECT_FATAL_FAILURE only
+// affects and considers failures generated in the current thread and
+// EXPECT_FATAL_FAILURE_ON_ALL_THREADS does the same but for all threads.
+//
+// The verification of the assertion is done correctly even when the statement
+// throws an exception or aborts the current function.
+//
+// Known restrictions:
+//   - 'statement' cannot reference local non-static variables or
+//     non-static members of the current object.
+//   - 'statement' cannot return a value.
+//   - You cannot stream a failure message to this macro.
+//
+// Note that even though the implementations of the following two
+// macros are much alike, we cannot refactor them to use a common
+// helper macro, due to some peculiarity in how the preprocessor
+// works.  The AcceptsMacroThatExpandsToUnprotectedComma test in
+// gtest_unittest.cc will fail to compile if we do that.
+#define EXPECT_FATAL_FAILURE(statement, substr) \
+  do { \
+    class GTestExpectFatalFailureHelper {\
+     public:\
+      static void Execute() { statement; }\
+    };\
+    ::testing::TestPartResultArray gtest_failures;\
+    ::testing::internal::SingleFailureChecker gtest_checker(\
+        &gtest_failures, ::testing::TestPartResult::kFatalFailure, (substr));\
+    {\
+      ::testing::ScopedFakeTestPartResultReporter gtest_reporter(\
+          ::testing::ScopedFakeTestPartResultReporter:: \
+          INTERCEPT_ONLY_CURRENT_THREAD, &gtest_failures);\
+      GTestExpectFatalFailureHelper::Execute();\
+    }\
+  } while (::testing::internal::AlwaysFalse())
+
+#define EXPECT_FATAL_FAILURE_ON_ALL_THREADS(statement, substr) \
+  do { \
+    class GTestExpectFatalFailureHelper {\
+     public:\
+      static void Execute() { statement; }\
+    };\
+    ::testing::TestPartResultArray gtest_failures;\
+    ::testing::internal::SingleFailureChecker gtest_checker(\
+        &gtest_failures, ::testing::TestPartResult::kFatalFailure, (substr));\
+    {\
+      ::testing::ScopedFakeTestPartResultReporter gtest_reporter(\
+          ::testing::ScopedFakeTestPartResultReporter:: \
+          INTERCEPT_ALL_THREADS, &gtest_failures);\
+      GTestExpectFatalFailureHelper::Execute();\
+    }\
+  } while (::testing::internal::AlwaysFalse())
+
+// A macro for testing Google Test assertions or code that's expected to
+// generate Google Test non-fatal failures.  It asserts that the given
+// statement will cause exactly one non-fatal Google Test failure with 'substr'
+// being part of the failure message.
+//
+// There are two different versions of this macro. EXPECT_NONFATAL_FAILURE only
+// affects and considers failures generated in the current thread and
+// EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS does the same but for all threads.
+//
+// 'statement' is allowed to reference local variables and members of
+// the current object.
+//
+// The verification of the assertion is done correctly even when the statement
+// throws an exception or aborts the current function.
+//
+// Known restrictions:
+//   - You cannot stream a failure message to this macro.
+//
+// Note that even though the implementations of the following two
+// macros are much alike, we cannot refactor them to use a common
+// helper macro, due to some peculiarity in how the preprocessor
+// works.  If we do that, the code won't compile when the user gives
+// EXPECT_NONFATAL_FAILURE() a statement that contains a macro that
+// expands to code containing an unprotected comma.  The
+// AcceptsMacroThatExpandsToUnprotectedComma test in gtest_unittest.cc
+// catches that.
+//
+// For the same reason, we have to write
+//   if (::testing::internal::AlwaysTrue()) { statement; }
+// instead of
+//   GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement)
+// to avoid an MSVC warning on unreachable code.
+#define EXPECT_NONFATAL_FAILURE(statement, substr) \
+  do {\
+    ::testing::TestPartResultArray gtest_failures;\
+    ::testing::internal::SingleFailureChecker gtest_checker(\
+        &gtest_failures, ::testing::TestPartResult::kNonFatalFailure, \
+        (substr));\
+    {\
+      ::testing::ScopedFakeTestPartResultReporter gtest_reporter(\
+          ::testing::ScopedFakeTestPartResultReporter:: \
+          INTERCEPT_ONLY_CURRENT_THREAD, &gtest_failures);\
+      if (::testing::internal::AlwaysTrue()) { statement; }\
+    }\
+  } while (::testing::internal::AlwaysFalse())
+
+#define EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(statement, substr) \
+  do {\
+    ::testing::TestPartResultArray gtest_failures;\
+    ::testing::internal::SingleFailureChecker gtest_checker(\
+        &gtest_failures, ::testing::TestPartResult::kNonFatalFailure, \
+        (substr));\
+    {\
+      ::testing::ScopedFakeTestPartResultReporter gtest_reporter(\
+          ::testing::ScopedFakeTestPartResultReporter::INTERCEPT_ALL_THREADS,\
+          &gtest_failures);\
+      if (::testing::internal::AlwaysTrue()) { statement; }\
+    }\
+  } while (::testing::internal::AlwaysFalse())
+
+#endif  // GTEST_INCLUDE_GTEST_GTEST_SPI_H_
+
+#include <ctype.h>
+#include <math.h>
+#include <stdarg.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <wchar.h>
+#include <wctype.h>
+
+#include <algorithm>
+#include <ostream>  // NOLINT
+#include <sstream>
+#include <vector>
+
+#if GTEST_OS_LINUX
+
+// TODO(kenton at google.com): Use autoconf to detect availability of
+// gettimeofday().
+# define GTEST_HAS_GETTIMEOFDAY_ 1
+
+# include <fcntl.h>  // NOLINT
+# include <limits.h>  // NOLINT
+# include <sched.h>  // NOLINT
+// Declares vsnprintf().  This header is not available on Windows.
+# include <strings.h>  // NOLINT
+# include <sys/mman.h>  // NOLINT
+# include <sys/time.h>  // NOLINT
+# include <unistd.h>  // NOLINT
+# include <string>
+
+#elif GTEST_OS_SYMBIAN
+# define GTEST_HAS_GETTIMEOFDAY_ 1
+# include <sys/time.h>  // NOLINT
+
+#elif GTEST_OS_ZOS
+# define GTEST_HAS_GETTIMEOFDAY_ 1
+# include <sys/time.h>  // NOLINT
+
+// On z/OS we additionally need strings.h for strcasecmp.
+# include <strings.h>  // NOLINT
+
+#elif GTEST_OS_WINDOWS_MOBILE  // We are on Windows CE.
+
+# include <windows.h>  // NOLINT
+
+#elif GTEST_OS_WINDOWS  // We are on Windows proper.
+
+# include <io.h>  // NOLINT
+# include <sys/timeb.h>  // NOLINT
+# include <sys/types.h>  // NOLINT
+# include <sys/stat.h>  // NOLINT
+
+# if GTEST_OS_WINDOWS_MINGW
+// MinGW has gettimeofday() but not _ftime64().
+// TODO(kenton at google.com): Use autoconf to detect availability of
+//   gettimeofday().
+// TODO(kenton at google.com): There are other ways to get the time on
+//   Windows, like GetTickCount() or GetSystemTimeAsFileTime().  MinGW
+//   supports these.  consider using them instead.
+#  define GTEST_HAS_GETTIMEOFDAY_ 1
+#  include <sys/time.h>  // NOLINT
+# endif  // GTEST_OS_WINDOWS_MINGW
+
+// cpplint thinks that the header is already included, so we want to
+// silence it.
+# include <windows.h>  // NOLINT
+
+#else
+
+// Assume other platforms have gettimeofday().
+// TODO(kenton at google.com): Use autoconf to detect availability of
+//   gettimeofday().
+# define GTEST_HAS_GETTIMEOFDAY_ 1
+
+// cpplint thinks that the header is already included, so we want to
+// silence it.
+# include <sys/time.h>  // NOLINT
+# include <unistd.h>  // NOLINT
+
+#endif  // GTEST_OS_LINUX
+
+#if GTEST_HAS_EXCEPTIONS
+# include <stdexcept>
+#endif
+
+#if GTEST_CAN_STREAM_RESULTS_
+# include <arpa/inet.h>  // NOLINT
+# include <netdb.h>  // NOLINT
+#endif
+
+// Indicates that this translation unit is part of Google Test's
+// implementation.  It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error.  This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Utility functions and classes used by the Google C++ testing framework.
+//
+// Author: wan at google.com (Zhanyong Wan)
+//
+// This file contains purely Google Test's internal implementation.  Please
+// DO NOT #INCLUDE IT IN A USER PROGRAM.
+
+#ifndef GTEST_SRC_GTEST_INTERNAL_INL_H_
+#define GTEST_SRC_GTEST_INTERNAL_INL_H_
+
+// GTEST_IMPLEMENTATION_ is defined to 1 iff the current translation unit is
+// part of Google Test's implementation; otherwise it's undefined.
+#if !GTEST_IMPLEMENTATION_
+// A user is trying to include this from his code - just say no.
+# error "gtest-internal-inl.h is part of Google Test's internal implementation."
+# error "It must not be included except by Google Test itself."
+#endif  // GTEST_IMPLEMENTATION_
+
+#ifndef _WIN32_WCE
+# include <errno.h>
+#endif  // !_WIN32_WCE
+#include <stddef.h>
+#include <stdlib.h>  // For strtoll/_strtoul64/malloc/free.
+#include <string.h>  // For memmove.
+
+#include <algorithm>
+#include <string>
+#include <vector>
+
+
+#if GTEST_OS_WINDOWS
+# include <windows.h>  // NOLINT
+#endif  // GTEST_OS_WINDOWS
+
+
+namespace testing {
+
+// Declares the flags.
+//
+// We don't want the users to modify this flag in the code, but want
+// Google Test's own unit tests to be able to access it. Therefore we
+// declare it here as opposed to in gtest.h.
+GTEST_DECLARE_bool_(death_test_use_fork);
+
+namespace internal {
+
+// The value of GetTestTypeId() as seen from within the Google Test
+// library.  This is solely for testing GetTestTypeId().
+GTEST_API_ extern const TypeId kTestTypeIdInGoogleTest;
+
+// Names of the flags (needed for parsing Google Test flags).
+const char kAlsoRunDisabledTestsFlag[] = "also_run_disabled_tests";
+const char kBreakOnFailureFlag[] = "break_on_failure";
+const char kCatchExceptionsFlag[] = "catch_exceptions";
+const char kColorFlag[] = "color";
+const char kFilterFlag[] = "filter";
+const char kListTestsFlag[] = "list_tests";
+const char kOutputFlag[] = "output";
+const char kPrintTimeFlag[] = "print_time";
+const char kRandomSeedFlag[] = "random_seed";
+const char kRepeatFlag[] = "repeat";
+const char kShuffleFlag[] = "shuffle";
+const char kStackTraceDepthFlag[] = "stack_trace_depth";
+const char kStreamResultToFlag[] = "stream_result_to";
+const char kThrowOnFailureFlag[] = "throw_on_failure";
+
+// A valid random seed must be in [1, kMaxRandomSeed].
+const int kMaxRandomSeed = 99999;
+
+// g_help_flag is true iff the --help flag or an equivalent form is
+// specified on the command line.
+GTEST_API_ extern bool g_help_flag;
+
+// Returns the current time in milliseconds.
+GTEST_API_ TimeInMillis GetTimeInMillis();
+
+// Returns true iff Google Test should use colors in the output.
+GTEST_API_ bool ShouldUseColor(bool stdout_is_tty);
+
+// Formats the given time in milliseconds as seconds.
+GTEST_API_ std::string FormatTimeInMillisAsSeconds(TimeInMillis ms);
+
+// Parses a string for an Int32 flag, in the form of "--flag=value".
+//
+// On success, stores the value of the flag in *value, and returns
+// true.  On failure, returns false without changing *value.
+GTEST_API_ bool ParseInt32Flag(
+    const char* str, const char* flag, Int32* value);
+
+// Returns a random seed in range [1, kMaxRandomSeed] based on the
+// given --gtest_random_seed flag value.
+inline int GetRandomSeedFromFlag(Int32 random_seed_flag) {
+  const unsigned int raw_seed = (random_seed_flag == 0) ?
+      static_cast<unsigned int>(GetTimeInMillis()) :
+      static_cast<unsigned int>(random_seed_flag);
+
+  // Normalizes the actual seed to range [1, kMaxRandomSeed] such that
+  // it's easy to type.
+  const int normalized_seed =
+      static_cast<int>((raw_seed - 1U) %
+                       static_cast<unsigned int>(kMaxRandomSeed)) + 1;
+  return normalized_seed;
+}
+
+// Returns the first valid random seed after 'seed'.  The behavior is
+// undefined if 'seed' is invalid.  The seed after kMaxRandomSeed is
+// considered to be 1.
+inline int GetNextRandomSeed(int seed) {
+  GTEST_CHECK_(1 <= seed && seed <= kMaxRandomSeed)
+      << "Invalid random seed " << seed << " - must be in [1, "
+      << kMaxRandomSeed << "].";
+  const int next_seed = seed + 1;
+  return (next_seed > kMaxRandomSeed) ? 1 : next_seed;
+}
+
+// This class saves the values of all Google Test flags in its c'tor, and
+// restores them in its d'tor.
+class GTestFlagSaver {
+ public:
+  // The c'tor.
+  GTestFlagSaver() {
+    also_run_disabled_tests_ = GTEST_FLAG(also_run_disabled_tests);
+    break_on_failure_ = GTEST_FLAG(break_on_failure);
+    catch_exceptions_ = GTEST_FLAG(catch_exceptions);
+    color_ = GTEST_FLAG(color);
+    death_test_style_ = GTEST_FLAG(death_test_style);
+    death_test_use_fork_ = GTEST_FLAG(death_test_use_fork);
+    filter_ = GTEST_FLAG(filter);
+    internal_run_death_test_ = GTEST_FLAG(internal_run_death_test);
+    list_tests_ = GTEST_FLAG(list_tests);
+    output_ = GTEST_FLAG(output);
+    print_time_ = GTEST_FLAG(print_time);
+    random_seed_ = GTEST_FLAG(random_seed);
+    repeat_ = GTEST_FLAG(repeat);
+    shuffle_ = GTEST_FLAG(shuffle);
+    stack_trace_depth_ = GTEST_FLAG(stack_trace_depth);
+    stream_result_to_ = GTEST_FLAG(stream_result_to);
+    throw_on_failure_ = GTEST_FLAG(throw_on_failure);
+  }
+
+  // The d'tor is not virtual.  DO NOT INHERIT FROM THIS CLASS.
+  ~GTestFlagSaver() {
+    GTEST_FLAG(also_run_disabled_tests) = also_run_disabled_tests_;
+    GTEST_FLAG(break_on_failure) = break_on_failure_;
+    GTEST_FLAG(catch_exceptions) = catch_exceptions_;
+    GTEST_FLAG(color) = color_;
+    GTEST_FLAG(death_test_style) = death_test_style_;
+    GTEST_FLAG(death_test_use_fork) = death_test_use_fork_;
+    GTEST_FLAG(filter) = filter_;
+    GTEST_FLAG(internal_run_death_test) = internal_run_death_test_;
+    GTEST_FLAG(list_tests) = list_tests_;
+    GTEST_FLAG(output) = output_;
+    GTEST_FLAG(print_time) = print_time_;
+    GTEST_FLAG(random_seed) = random_seed_;
+    GTEST_FLAG(repeat) = repeat_;
+    GTEST_FLAG(shuffle) = shuffle_;
+    GTEST_FLAG(stack_trace_depth) = stack_trace_depth_;
+    GTEST_FLAG(stream_result_to) = stream_result_to_;
+    GTEST_FLAG(throw_on_failure) = throw_on_failure_;
+  }
+ private:
+  // Fields for saving the original values of flags.
+  bool also_run_disabled_tests_;
+  bool break_on_failure_;
+  bool catch_exceptions_;
+  String color_;
+  String death_test_style_;
+  bool death_test_use_fork_;
+  String filter_;
+  String internal_run_death_test_;
+  bool list_tests_;
+  String output_;
+  bool print_time_;
+  internal::Int32 random_seed_;
+  internal::Int32 repeat_;
+  bool shuffle_;
+  internal::Int32 stack_trace_depth_;
+  String stream_result_to_;
+  bool throw_on_failure_;
+} GTEST_ATTRIBUTE_UNUSED_;
+
+// Converts a Unicode code point to a narrow string in UTF-8 encoding.
+// code_point parameter is of type UInt32 because wchar_t may not be
+// wide enough to contain a code point.
+// The output buffer str must containt at least 32 characters.
+// The function returns the address of the output buffer.
+// If the code_point is not a valid Unicode code point
+// (i.e. outside of Unicode range U+0 to U+10FFFF) it will be output
+// as '(Invalid Unicode 0xXXXXXXXX)'.
+GTEST_API_ char* CodePointToUtf8(UInt32 code_point, char* str);
+
+// Converts a wide string to a narrow string in UTF-8 encoding.
+// The wide string is assumed to have the following encoding:
+//   UTF-16 if sizeof(wchar_t) == 2 (on Windows, Cygwin, Symbian OS)
+//   UTF-32 if sizeof(wchar_t) == 4 (on Linux)
+// Parameter str points to a null-terminated wide string.
+// Parameter num_chars may additionally limit the number
+// of wchar_t characters processed. -1 is used when the entire string
+// should be processed.
+// If the string contains code points that are not valid Unicode code points
+// (i.e. outside of Unicode range U+0 to U+10FFFF) they will be output
+// as '(Invalid Unicode 0xXXXXXXXX)'. If the string is in UTF16 encoding
+// and contains invalid UTF-16 surrogate pairs, values in those pairs
+// will be encoded as individual Unicode characters from Basic Normal Plane.
+GTEST_API_ String WideStringToUtf8(const wchar_t* str, int num_chars);
+
+// Reads the GTEST_SHARD_STATUS_FILE environment variable, and creates the file
+// if the variable is present. If a file already exists at this location, this
+// function will write over it. If the variable is present, but the file cannot
+// be created, prints an error and exits.
+void WriteToShardStatusFileIfNeeded();
+
+// Checks whether sharding is enabled by examining the relevant
+// environment variable values. If the variables are present,
+// but inconsistent (e.g., shard_index >= total_shards), prints
+// an error and exits. If in_subprocess_for_death_test, sharding is
+// disabled because it must only be applied to the original test
+// process. Otherwise, we could filter out death tests we intended to execute.
+GTEST_API_ bool ShouldShard(const char* total_shards_str,
+                            const char* shard_index_str,
+                            bool in_subprocess_for_death_test);
+
+// Parses the environment variable var as an Int32. If it is unset,
+// returns default_val. If it is not an Int32, prints an error and
+// and aborts.
+GTEST_API_ Int32 Int32FromEnvOrDie(const char* env_var, Int32 default_val);
+
+// Given the total number of shards, the shard index, and the test id,
+// returns true iff the test should be run on this shard. The test id is
+// some arbitrary but unique non-negative integer assigned to each test
+// method. Assumes that 0 <= shard_index < total_shards.
+GTEST_API_ bool ShouldRunTestOnShard(
+    int total_shards, int shard_index, int test_id);
+
+// STL container utilities.
+
+// Returns the number of elements in the given container that satisfy
+// the given predicate.
+template <class Container, typename Predicate>
+inline int CountIf(const Container& c, Predicate predicate) {
+  // Implemented as an explicit loop since std::count_if() in libCstd on
+  // Solaris has a non-standard signature.
+  int count = 0;
+  for (typename Container::const_iterator it = c.begin(); it != c.end(); ++it) {
+    if (predicate(*it))
+      ++count;
+  }
+  return count;
+}
+
+// Applies a function/functor to each element in the container.
+template <class Container, typename Functor>
+void ForEach(const Container& c, Functor functor) {
+  std::for_each(c.begin(), c.end(), functor);
+}
+
+// Returns the i-th element of the vector, or default_value if i is not
+// in range [0, v.size()).
+template <typename E>
+inline E GetElementOr(const std::vector<E>& v, int i, E default_value) {
+  return (i < 0 || i >= static_cast<int>(v.size())) ? default_value : v[i];
+}
+
+// Performs an in-place shuffle of a range of the vector's elements.
+// 'begin' and 'end' are element indices as an STL-style range;
+// i.e. [begin, end) are shuffled, where 'end' == size() means to
+// shuffle to the end of the vector.
+template <typename E>
+void ShuffleRange(internal::Random* random, int begin, int end,
+                  std::vector<E>* v) {
+  const int size = static_cast<int>(v->size());
+  GTEST_CHECK_(0 <= begin && begin <= size)
+      << "Invalid shuffle range start " << begin << ": must be in range [0, "
+      << size << "].";
+  GTEST_CHECK_(begin <= end && end <= size)
+      << "Invalid shuffle range finish " << end << ": must be in range ["
+      << begin << ", " << size << "].";
+
+  // Fisher-Yates shuffle, from
+  // http://en.wikipedia.org/wiki/Fisher-Yates_shuffle
+  for (int range_width = end - begin; range_width >= 2; range_width--) {
+    const int last_in_range = begin + range_width - 1;
+    const int selected = begin + random->Generate(range_width);
+    std::swap((*v)[selected], (*v)[last_in_range]);
+  }
+}
+
+// Performs an in-place shuffle of the vector's elements.
+template <typename E>
+inline void Shuffle(internal::Random* random, std::vector<E>* v) {
+  ShuffleRange(random, 0, static_cast<int>(v->size()), v);
+}
+
+// A function for deleting an object.  Handy for being used as a
+// functor.
+template <typename T>
+static void Delete(T* x) {
+  delete x;
+}
+
+// A predicate that checks the key of a TestProperty against a known key.
+//
+// TestPropertyKeyIs is copyable.
+class TestPropertyKeyIs {
+ public:
+  // Constructor.
+  //
+  // TestPropertyKeyIs has NO default constructor.
+  explicit TestPropertyKeyIs(const char* key)
+      : key_(key) {}
+
+  // Returns true iff the test name of test property matches on key_.
+  bool operator()(const TestProperty& test_property) const {
+    return String(test_property.key()).Compare(key_) == 0;
+  }
+
+ private:
+  String key_;
+};
+
+// Class UnitTestOptions.
+//
+// This class contains functions for processing options the user
+// specifies when running the tests.  It has only static members.
+//
+// In most cases, the user can specify an option using either an
+// environment variable or a command line flag.  E.g. you can set the
+// test filter using either GTEST_FILTER or --gtest_filter.  If both
+// the variable and the flag are present, the latter overrides the
+// former.
+class GTEST_API_ UnitTestOptions {
+ public:
+  // Functions for processing the gtest_output flag.
+
+  // Returns the output format, or "" for normal printed output.
+  static String GetOutputFormat();
+
+  // Returns the absolute path of the requested output file, or the
+  // default (test_detail.xml in the original working directory) if
+  // none was explicitly specified.
+  static String GetAbsolutePathToOutputFile();
+
+  // Functions for processing the gtest_filter flag.
+
+  // Returns true iff the wildcard pattern matches the string.  The
+  // first ':' or '\0' character in pattern marks the end of it.
+  //
+  // This recursive algorithm isn't very efficient, but is clear and
+  // works well enough for matching test names, which are short.
+  static bool PatternMatchesString(const char *pattern, const char *str);
+
+  // Returns true iff the user-specified filter matches the test case
+  // name and the test name.
+  static bool FilterMatchesTest(const String &test_case_name,
+                                const String &test_name);
+
+#if GTEST_OS_WINDOWS
+  // Function for supporting the gtest_catch_exception flag.
+
+  // Returns EXCEPTION_EXECUTE_HANDLER if Google Test should handle the
+  // given SEH exception, or EXCEPTION_CONTINUE_SEARCH otherwise.
+  // This function is useful as an __except condition.
+  static int GTestShouldProcessSEH(DWORD exception_code);
+#endif  // GTEST_OS_WINDOWS
+
+  // Returns true if "name" matches the ':' separated list of glob-style
+  // filters in "filter".
+  static bool MatchesFilter(const String& name, const char* filter);
+};
+
+// Returns the current application's name, removing directory path if that
+// is present.  Used by UnitTestOptions::GetOutputFile.
+GTEST_API_ FilePath GetCurrentExecutableName();
+
+// The role interface for getting the OS stack trace as a string.
+class OsStackTraceGetterInterface {
+ public:
+  OsStackTraceGetterInterface() {}
+  virtual ~OsStackTraceGetterInterface() {}
+
+  // Returns the current OS stack trace as a String.  Parameters:
+  //
+  //   max_depth  - the maximum number of stack frames to be included
+  //                in the trace.
+  //   skip_count - the number of top frames to be skipped; doesn't count
+  //                against max_depth.
+  virtual String CurrentStackTrace(int max_depth, int skip_count) = 0;
+
+  // UponLeavingGTest() should be called immediately before Google Test calls
+  // user code. It saves some information about the current stack that
+  // CurrentStackTrace() will use to find and hide Google Test stack frames.
+  virtual void UponLeavingGTest() = 0;
+
+ private:
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(OsStackTraceGetterInterface);
+};
+
+// A working implementation of the OsStackTraceGetterInterface interface.
+class OsStackTraceGetter : public OsStackTraceGetterInterface {
+ public:
+  OsStackTraceGetter() : caller_frame_(NULL) {}
+  virtual String CurrentStackTrace(int max_depth, int skip_count);
+  virtual void UponLeavingGTest();
+
+  // This string is inserted in place of stack frames that are part of
+  // Google Test's implementation.
+  static const char* const kElidedFramesMarker;
+
+ private:
+  Mutex mutex_;  // protects all internal state
+
+  // We save the stack frame below the frame that calls user code.
+  // We do this because the address of the frame immediately below
+  // the user code changes between the call to UponLeavingGTest()
+  // and any calls to CurrentStackTrace() from within the user code.
+  void* caller_frame_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(OsStackTraceGetter);
+};
+
+// Information about a Google Test trace point.
+struct TraceInfo {
+  const char* file;
+  int line;
+  String message;
+};
+
+// This is the default global test part result reporter used in UnitTestImpl.
+// This class should only be used by UnitTestImpl.
+class DefaultGlobalTestPartResultReporter
+  : public TestPartResultReporterInterface {
+ public:
+  explicit DefaultGlobalTestPartResultReporter(UnitTestImpl* unit_test);
+  // Implements the TestPartResultReporterInterface. Reports the test part
+  // result in the current test.
+  virtual void ReportTestPartResult(const TestPartResult& result);
+
+ private:
+  UnitTestImpl* const unit_test_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultGlobalTestPartResultReporter);
+};
+
+// This is the default per thread test part result reporter used in
+// UnitTestImpl. This class should only be used by UnitTestImpl.
+class DefaultPerThreadTestPartResultReporter
+    : public TestPartResultReporterInterface {
+ public:
+  explicit DefaultPerThreadTestPartResultReporter(UnitTestImpl* unit_test);
+  // Implements the TestPartResultReporterInterface. The implementation just
+  // delegates to the current global test part result reporter of *unit_test_.
+  virtual void ReportTestPartResult(const TestPartResult& result);
+
+ private:
+  UnitTestImpl* const unit_test_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultPerThreadTestPartResultReporter);
+};
+
+// The private implementation of the UnitTest class.  We don't protect
+// the methods under a mutex, as this class is not accessible by a
+// user and the UnitTest class that delegates work to this class does
+// proper locking.
+class GTEST_API_ UnitTestImpl {
+ public:
+  explicit UnitTestImpl(UnitTest* parent);
+  virtual ~UnitTestImpl();
+
+  // There are two different ways to register your own TestPartResultReporter.
+  // You can register your own repoter to listen either only for test results
+  // from the current thread or for results from all threads.
+  // By default, each per-thread test result repoter just passes a new
+  // TestPartResult to the global test result reporter, which registers the
+  // test part result for the currently running test.
+
+  // Returns the global test part result reporter.
+  TestPartResultReporterInterface* GetGlobalTestPartResultReporter();
+
+  // Sets the global test part result reporter.
+  void SetGlobalTestPartResultReporter(
+      TestPartResultReporterInterface* reporter);
+
+  // Returns the test part result reporter for the current thread.
+  TestPartResultReporterInterface* GetTestPartResultReporterForCurrentThread();
+
+  // Sets the test part result reporter for the current thread.
+  void SetTestPartResultReporterForCurrentThread(
+      TestPartResultReporterInterface* reporter);
+
+  // Gets the number of successful test cases.
+  int successful_test_case_count() const;
+
+  // Gets the number of failed test cases.
+  int failed_test_case_count() const;
+
+  // Gets the number of all test cases.
+  int total_test_case_count() const;
+
+  // Gets the number of all test cases that contain at least one test
+  // that should run.
+  int test_case_to_run_count() const;
+
+  // Gets the number of successful tests.
+  int successful_test_count() const;
+
+  // Gets the number of failed tests.
+  int failed_test_count() const;
+
+  // Gets the number of disabled tests.
+  int disabled_test_count() const;
+
+  // Gets the number of all tests.
+  int total_test_count() const;
+
+  // Gets the number of tests that should run.
+  int test_to_run_count() const;
+
+  // Gets the elapsed time, in milliseconds.
+  TimeInMillis elapsed_time() const { return elapsed_time_; }
+
+  // Returns true iff the unit test passed (i.e. all test cases passed).
+  bool Passed() const { return !Failed(); }
+
+  // Returns true iff the unit test failed (i.e. some test case failed
+  // or something outside of all tests failed).
+  bool Failed() const {
+    return failed_test_case_count() > 0 || ad_hoc_test_result()->Failed();
+  }
+
+  // Gets the i-th test case among all the test cases. i can range from 0 to
+  // total_test_case_count() - 1. If i is not in that range, returns NULL.
+  const TestCase* GetTestCase(int i) const {
+    const int index = GetElementOr(test_case_indices_, i, -1);
+    return index < 0 ? NULL : test_cases_[i];
+  }
+
+  // Gets the i-th test case among all the test cases. i can range from 0 to
+  // total_test_case_count() - 1. If i is not in that range, returns NULL.
+  TestCase* GetMutableTestCase(int i) {
+    const int index = GetElementOr(test_case_indices_, i, -1);
+    return index < 0 ? NULL : test_cases_[index];
+  }
+
+  // Provides access to the event listener list.
+  TestEventListeners* listeners() { return &listeners_; }
+
+  // Returns the TestResult for the test that's currently running, or
+  // the TestResult for the ad hoc test if no test is running.
+  TestResult* current_test_result();
+
+  // Returns the TestResult for the ad hoc test.
+  const TestResult* ad_hoc_test_result() const { return &ad_hoc_test_result_; }
+
+  // Sets the OS stack trace getter.
+  //
+  // Does nothing if the input and the current OS stack trace getter
+  // are the same; otherwise, deletes the old getter and makes the
+  // input the current getter.
+  void set_os_stack_trace_getter(OsStackTraceGetterInterface* getter);
+
+  // Returns the current OS stack trace getter if it is not NULL;
+  // otherwise, creates an OsStackTraceGetter, makes it the current
+  // getter, and returns it.
+  OsStackTraceGetterInterface* os_stack_trace_getter();
+
+  // Returns the current OS stack trace as a String.
+  //
+  // The maximum number of stack frames to be included is specified by
+  // the gtest_stack_trace_depth flag.  The skip_count parameter
+  // specifies the number of top frames to be skipped, which doesn't
+  // count against the number of frames to be included.
+  //
+  // For example, if Foo() calls Bar(), which in turn calls
+  // CurrentOsStackTraceExceptTop(1), Foo() will be included in the
+  // trace but Bar() and CurrentOsStackTraceExceptTop() won't.
+  String CurrentOsStackTraceExceptTop(int skip_count);
+
+  // Finds and returns a TestCase with the given name.  If one doesn't
+  // exist, creates one and returns it.
+  //
+  // Arguments:
+  //
+  //   test_case_name: name of the test case
+  //   type_param:     the name of the test's type parameter, or NULL if
+  //                   this is not a typed or a type-parameterized test.
+  //   set_up_tc:      pointer to the function that sets up the test case
+  //   tear_down_tc:   pointer to the function that tears down the test case
+  TestCase* GetTestCase(const char* test_case_name,
+                        const char* type_param,
+                        Test::SetUpTestCaseFunc set_up_tc,
+                        Test::TearDownTestCaseFunc tear_down_tc);
+
+  // Adds a TestInfo to the unit test.
+  //
+  // Arguments:
+  //
+  //   set_up_tc:    pointer to the function that sets up the test case
+  //   tear_down_tc: pointer to the function that tears down the test case
+  //   test_info:    the TestInfo object
+  void AddTestInfo(Test::SetUpTestCaseFunc set_up_tc,
+                   Test::TearDownTestCaseFunc tear_down_tc,
+                   TestInfo* test_info) {
+    // In order to support thread-safe death tests, we need to
+    // remember the original working directory when the test program
+    // was first invoked.  We cannot do this in RUN_ALL_TESTS(), as
+    // the user may have changed the current directory before calling
+    // RUN_ALL_TESTS().  Therefore we capture the current directory in
+    // AddTestInfo(), which is called to register a TEST or TEST_F
+    // before main() is reached.
+    if (original_working_dir_.IsEmpty()) {
+      original_working_dir_.Set(FilePath::GetCurrentDir());
+      GTEST_CHECK_(!original_working_dir_.IsEmpty())
+          << "Failed to get the current working directory.";
+    }
+
+    GetTestCase(test_info->test_case_name(),
+                test_info->type_param(),
+                set_up_tc,
+                tear_down_tc)->AddTestInfo(test_info);
+  }
+
+#if GTEST_HAS_PARAM_TEST
+  // Returns ParameterizedTestCaseRegistry object used to keep track of
+  // value-parameterized tests and instantiate and register them.
+  internal::ParameterizedTestCaseRegistry& parameterized_test_registry() {
+    return parameterized_test_registry_;
+  }
+#endif  // GTEST_HAS_PARAM_TEST
+
+  // Sets the TestCase object for the test that's currently running.
+  void set_current_test_case(TestCase* a_current_test_case) {
+    current_test_case_ = a_current_test_case;
+  }
+
+  // Sets the TestInfo object for the test that's currently running.  If
+  // current_test_info is NULL, the assertion results will be stored in
+  // ad_hoc_test_result_.
+  void set_current_test_info(TestInfo* a_current_test_info) {
+    current_test_info_ = a_current_test_info;
+  }
+
+  // Registers all parameterized tests defined using TEST_P and
+  // INSTANTIATE_TEST_CASE_P, creating regular tests for each test/parameter
+  // combination. This method can be called more then once; it has guards
+  // protecting from registering the tests more then once.  If
+  // value-parameterized tests are disabled, RegisterParameterizedTests is
+  // present but does nothing.
+  void RegisterParameterizedTests();
+
+  // Runs all tests in this UnitTest object, prints the result, and
+  // returns true if all tests are successful.  If any exception is
+  // thrown during a test, this test is considered to be failed, but
+  // the rest of the tests will still be run.
+  bool RunAllTests();
+
+  // Clears the results of all tests, except the ad hoc tests.
+  void ClearNonAdHocTestResult() {
+    ForEach(test_cases_, TestCase::ClearTestCaseResult);
+  }
+
+  // Clears the results of ad-hoc test assertions.
+  void ClearAdHocTestResult() {
+    ad_hoc_test_result_.Clear();
+  }
+
+  enum ReactionToSharding {
+    HONOR_SHARDING_PROTOCOL,
+    IGNORE_SHARDING_PROTOCOL
+  };
+
+  // Matches the full name of each test against the user-specified
+  // filter to decide whether the test should run, then records the
+  // result in each TestCase and TestInfo object.
+  // If shard_tests == HONOR_SHARDING_PROTOCOL, further filters tests
+  // based on sharding variables in the environment.
+  // Returns the number of tests that should run.
+  int FilterTests(ReactionToSharding shard_tests);
+
+  // Prints the names of the tests matching the user-specified filter flag.
+  void ListTestsMatchingFilter();
+
+  const TestCase* current_test_case() const { return current_test_case_; }
+  TestInfo* current_test_info() { return current_test_info_; }
+  const TestInfo* current_test_info() const { return current_test_info_; }
+
+  // Returns the vector of environments that need to be set-up/torn-down
+  // before/after the tests are run.
+  std::vector<Environment*>& environments() { return environments_; }
+
+  // Getters for the per-thread Google Test trace stack.
+  std::vector<TraceInfo>& gtest_trace_stack() {
+    return *(gtest_trace_stack_.pointer());
+  }
+  const std::vector<TraceInfo>& gtest_trace_stack() const {
+    return gtest_trace_stack_.get();
+  }
+
+#if GTEST_HAS_DEATH_TEST
+  void InitDeathTestSubprocessControlInfo() {
+    internal_run_death_test_flag_.reset(ParseInternalRunDeathTestFlag());
+  }
+  // Returns a pointer to the parsed --gtest_internal_run_death_test
+  // flag, or NULL if that flag was not specified.
+  // This information is useful only in a death test child process.
+  // Must not be called before a call to InitGoogleTest.
+  const InternalRunDeathTestFlag* internal_run_death_test_flag() const {
+    return internal_run_death_test_flag_.get();
+  }
+
+  // Returns a pointer to the current death test factory.
+  internal::DeathTestFactory* death_test_factory() {
+    return death_test_factory_.get();
+  }
+
+  void SuppressTestEventsIfInSubprocess();
+
+  friend class ReplaceDeathTestFactory;
+#endif  // GTEST_HAS_DEATH_TEST
+
+  // Initializes the event listener performing XML output as specified by
+  // UnitTestOptions. Must not be called before InitGoogleTest.
+  void ConfigureXmlOutput();
+
+#if GTEST_CAN_STREAM_RESULTS_
+  // Initializes the event listener for streaming test results to a socket.
+  // Must not be called before InitGoogleTest.
+  void ConfigureStreamingOutput();
+#endif
+
+  // Performs initialization dependent upon flag values obtained in
+  // ParseGoogleTestFlagsOnly.  Is called from InitGoogleTest after the call to
+  // ParseGoogleTestFlagsOnly.  In case a user neglects to call InitGoogleTest
+  // this function is also called from RunAllTests.  Since this function can be
+  // called more than once, it has to be idempotent.
+  void PostFlagParsingInit();
+
+  // Gets the random seed used at the start of the current test iteration.
+  int random_seed() const { return random_seed_; }
+
+  // Gets the random number generator.
+  internal::Random* random() { return &random_; }
+
+  // Shuffles all test cases, and the tests within each test case,
+  // making sure that death tests are still run first.
+  void ShuffleTests();
+
+  // Restores the test cases and tests to their order before the first shuffle.
+  void UnshuffleTests();
+
+  // Returns the value of GTEST_FLAG(catch_exceptions) at the moment
+  // UnitTest::Run() starts.
+  bool catch_exceptions() const { return catch_exceptions_; }
+
+ private:
+  friend class ::testing::UnitTest;
+
+  // Used by UnitTest::Run() to capture the state of
+  // GTEST_FLAG(catch_exceptions) at the moment it starts.
+  void set_catch_exceptions(bool value) { catch_exceptions_ = value; }
+
+  // The UnitTest object that owns this implementation object.
+  UnitTest* const parent_;
+
+  // The working directory when the first TEST() or TEST_F() was
+  // executed.
+  internal::FilePath original_working_dir_;
+
+  // The default test part result reporters.
+  DefaultGlobalTestPartResultReporter default_global_test_part_result_reporter_;
+  DefaultPerThreadTestPartResultReporter
+      default_per_thread_test_part_result_reporter_;
+
+  // Points to (but doesn't own) the global test part result reporter.
+  TestPartResultReporterInterface* global_test_part_result_repoter_;
+
+  // Protects read and write access to global_test_part_result_reporter_.
+  internal::Mutex global_test_part_result_reporter_mutex_;
+
+  // Points to (but doesn't own) the per-thread test part result reporter.
+  internal::ThreadLocal<TestPartResultReporterInterface*>
+      per_thread_test_part_result_reporter_;
+
+  // The vector of environments that need to be set-up/torn-down
+  // before/after the tests are run.
+  std::vector<Environment*> environments_;
+
+  // The vector of TestCases in their original order.  It owns the
+  // elements in the vector.
+  std::vector<TestCase*> test_cases_;
+
+  // Provides a level of indirection for the test case list to allow
+  // easy shuffling and restoring the test case order.  The i-th
+  // element of this vector is the index of the i-th test case in the
+  // shuffled order.
+  std::vector<int> test_case_indices_;
+
+#if GTEST_HAS_PARAM_TEST
+  // ParameterizedTestRegistry object used to register value-parameterized
+  // tests.
+  internal::ParameterizedTestCaseRegistry parameterized_test_registry_;
+
+  // Indicates whether RegisterParameterizedTests() has been called already.
+  bool parameterized_tests_registered_;
+#endif  // GTEST_HAS_PARAM_TEST
+
+  // Index of the last death test case registered.  Initially -1.
+  int last_death_test_case_;
+
+  // This points to the TestCase for the currently running test.  It
+  // changes as Google Test goes through one test case after another.
+  // When no test is running, this is set to NULL and Google Test
+  // stores assertion results in ad_hoc_test_result_.  Initially NULL.
+  TestCase* current_test_case_;
+
+  // This points to the TestInfo for the currently running test.  It
+  // changes as Google Test goes through one test after another.  When
+  // no test is running, this is set to NULL and Google Test stores
+  // assertion results in ad_hoc_test_result_.  Initially NULL.
+  TestInfo* current_test_info_;
+
+  // Normally, a user only writes assertions inside a TEST or TEST_F,
+  // or inside a function called by a TEST or TEST_F.  Since Google
+  // Test keeps track of which test is current running, it can
+  // associate such an assertion with the test it belongs to.
+  //
+  // If an assertion is encountered when no TEST or TEST_F is running,
+  // Google Test attributes the assertion result to an imaginary "ad hoc"
+  // test, and records the result in ad_hoc_test_result_.
+  TestResult ad_hoc_test_result_;
+
+  // The list of event listeners that can be used to track events inside
+  // Google Test.
+  TestEventListeners listeners_;
+
+  // The OS stack trace getter.  Will be deleted when the UnitTest
+  // object is destructed.  By default, an OsStackTraceGetter is used,
+  // but the user can set this field to use a custom getter if that is
+  // desired.
+  OsStackTraceGetterInterface* os_stack_trace_getter_;
+
+  // True iff PostFlagParsingInit() has been called.
+  bool post_flag_parse_init_performed_;
+
+  // The random number seed used at the beginning of the test run.
+  int random_seed_;
+
+  // Our random number generator.
+  internal::Random random_;
+
+  // How long the test took to run, in milliseconds.
+  TimeInMillis elapsed_time_;
+
+#if GTEST_HAS_DEATH_TEST
+  // The decomposed components of the gtest_internal_run_death_test flag,
+  // parsed when RUN_ALL_TESTS is called.
+  internal::scoped_ptr<InternalRunDeathTestFlag> internal_run_death_test_flag_;
+  internal::scoped_ptr<internal::DeathTestFactory> death_test_factory_;
+#endif  // GTEST_HAS_DEATH_TEST
+
+  // A per-thread stack of traces created by the SCOPED_TRACE() macro.
+  internal::ThreadLocal<std::vector<TraceInfo> > gtest_trace_stack_;
+
+  // The value of GTEST_FLAG(catch_exceptions) at the moment RunAllTests()
+  // starts.
+  bool catch_exceptions_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(UnitTestImpl);
+};  // class UnitTestImpl
+
+// Convenience function for accessing the global UnitTest
+// implementation object.
+inline UnitTestImpl* GetUnitTestImpl() {
+  return UnitTest::GetInstance()->impl();
+}
+
+#if GTEST_USES_SIMPLE_RE
+
+// Internal helper functions for implementing the simple regular
+// expression matcher.
+GTEST_API_ bool IsInSet(char ch, const char* str);
+GTEST_API_ bool IsAsciiDigit(char ch);
+GTEST_API_ bool IsAsciiPunct(char ch);
+GTEST_API_ bool IsRepeat(char ch);
+GTEST_API_ bool IsAsciiWhiteSpace(char ch);
+GTEST_API_ bool IsAsciiWordChar(char ch);
+GTEST_API_ bool IsValidEscape(char ch);
+GTEST_API_ bool AtomMatchesChar(bool escaped, char pattern, char ch);
+GTEST_API_ bool ValidateRegex(const char* regex);
+GTEST_API_ bool MatchRegexAtHead(const char* regex, const char* str);
+GTEST_API_ bool MatchRepetitionAndRegexAtHead(
+    bool escaped, char ch, char repeat, const char* regex, const char* str);
+GTEST_API_ bool MatchRegexAnywhere(const char* regex, const char* str);
+
+#endif  // GTEST_USES_SIMPLE_RE
+
+// Parses the command line for Google Test flags, without initializing
+// other parts of Google Test.
+GTEST_API_ void ParseGoogleTestFlagsOnly(int* argc, char** argv);
+GTEST_API_ void ParseGoogleTestFlagsOnly(int* argc, wchar_t** argv);
+
+#if GTEST_HAS_DEATH_TEST
+
+// Returns the message describing the last system error, regardless of the
+// platform.
+GTEST_API_ String GetLastErrnoDescription();
+
+# if GTEST_OS_WINDOWS
+// Provides leak-safe Windows kernel handle ownership.
+class AutoHandle {
+ public:
+  AutoHandle() : handle_(INVALID_HANDLE_VALUE) {}
+  explicit AutoHandle(HANDLE handle) : handle_(handle) {}
+
+  ~AutoHandle() { Reset(); }
+
+  HANDLE Get() const { return handle_; }
+  void Reset() { Reset(INVALID_HANDLE_VALUE); }
+  void Reset(HANDLE handle) {
+    if (handle != handle_) {
+      if (handle_ != INVALID_HANDLE_VALUE)
+        ::CloseHandle(handle_);
+      handle_ = handle;
+    }
+  }
+
+ private:
+  HANDLE handle_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(AutoHandle);
+};
+# endif  // GTEST_OS_WINDOWS
+
+// Attempts to parse a string into a positive integer pointed to by the
+// number parameter.  Returns true if that is possible.
+// GTEST_HAS_DEATH_TEST implies that we have ::std::string, so we can use
+// it here.
+template <typename Integer>
+bool ParseNaturalNumber(const ::std::string& str, Integer* number) {
+  // Fail fast if the given string does not begin with a digit;
+  // this bypasses strtoXXX's "optional leading whitespace and plus
+  // or minus sign" semantics, which are undesirable here.
+  if (str.empty() || !IsDigit(str[0])) {
+    return false;
+  }
+  errno = 0;
+
+  char* end;
+  // BiggestConvertible is the largest integer type that system-provided
+  // string-to-number conversion routines can return.
+
+# if GTEST_OS_WINDOWS && !defined(__GNUC__)
+
+  // MSVC and C++ Builder define __int64 instead of the standard long long.
+  typedef unsigned __int64 BiggestConvertible;
+  const BiggestConvertible parsed = _strtoui64(str.c_str(), &end, 10);
+
+# else
+
+  typedef unsigned long long BiggestConvertible;  // NOLINT
+  const BiggestConvertible parsed = strtoull(str.c_str(), &end, 10);
+
+# endif  // GTEST_OS_WINDOWS && !defined(__GNUC__)
+
+  const bool parse_success = *end == '\0' && errno == 0;
+
+  // TODO(vladl at google.com): Convert this to compile time assertion when it is
+  // available.
+  GTEST_CHECK_(sizeof(Integer) <= sizeof(parsed));
+
+  const Integer result = static_cast<Integer>(parsed);
+  if (parse_success && static_cast<BiggestConvertible>(result) == parsed) {
+    *number = result;
+    return true;
+  }
+  return false;
+}
+#endif  // GTEST_HAS_DEATH_TEST
+
+// TestResult contains some private methods that should be hidden from
+// Google Test user but are required for testing. This class allow our tests
+// to access them.
+//
+// This class is supplied only for the purpose of testing Google Test's own
+// constructs. Do not use it in user tests, either directly or indirectly.
+class TestResultAccessor {
+ public:
+  static void RecordProperty(TestResult* test_result,
+                             const TestProperty& property) {
+    test_result->RecordProperty(property);
+  }
+
+  static void ClearTestPartResults(TestResult* test_result) {
+    test_result->ClearTestPartResults();
+  }
+
+  static const std::vector<testing::TestPartResult>& test_part_results(
+      const TestResult& test_result) {
+    return test_result.test_part_results();
+  }
+};
+
+}  // namespace internal
+}  // namespace testing
+
+#endif  // GTEST_SRC_GTEST_INTERNAL_INL_H_
+#undef GTEST_IMPLEMENTATION_
+
+#if GTEST_OS_WINDOWS
+# define vsnprintf _vsnprintf
+#endif  // GTEST_OS_WINDOWS
+
+namespace testing {
+
+using internal::CountIf;
+using internal::ForEach;
+using internal::GetElementOr;
+using internal::Shuffle;
+
+// Constants.
+
+// A test whose test case name or test name matches this filter is
+// disabled and not run.
+static const char kDisableTestFilter[] = "DISABLED_*:*/DISABLED_*";
+
+// A test case whose name matches this filter is considered a death
+// test case and will be run before test cases whose name doesn't
+// match this filter.
+static const char kDeathTestCaseFilter[] = "*DeathTest:*DeathTest/*";
+
+// A test filter that matches everything.
+static const char kUniversalFilter[] = "*";
+
+// The default output file for XML output.
+static const char kDefaultOutputFile[] = "test_detail.xml";
+
+// The environment variable name for the test shard index.
+static const char kTestShardIndex[] = "GTEST_SHARD_INDEX";
+// The environment variable name for the total number of test shards.
+static const char kTestTotalShards[] = "GTEST_TOTAL_SHARDS";
+// The environment variable name for the test shard status file.
+static const char kTestShardStatusFile[] = "GTEST_SHARD_STATUS_FILE";
+
+namespace internal {
+
+// The text used in failure messages to indicate the start of the
+// stack trace.
+const char kStackTraceMarker[] = "\nStack trace:\n";
+
+// g_help_flag is true iff the --help flag or an equivalent form is
+// specified on the command line.
+bool g_help_flag = false;
+
+}  // namespace internal
+
+GTEST_DEFINE_bool_(
+    also_run_disabled_tests,
+    internal::BoolFromGTestEnv("also_run_disabled_tests", false),
+    "Run disabled tests too, in addition to the tests normally being run.");
+
+GTEST_DEFINE_bool_(
+    break_on_failure,
+    internal::BoolFromGTestEnv("break_on_failure", false),
+    "True iff a failed assertion should be a debugger break-point.");
+
+GTEST_DEFINE_bool_(
+    catch_exceptions,
+    internal::BoolFromGTestEnv("catch_exceptions", true),
+    "True iff " GTEST_NAME_
+    " should catch exceptions and treat them as test failures.");
+
+GTEST_DEFINE_string_(
+    color,
+    internal::StringFromGTestEnv("color", "auto"),
+    "Whether to use colors in the output.  Valid values: yes, no, "
+    "and auto.  'auto' means to use colors if the output is "
+    "being sent to a terminal and the TERM environment variable "
+    "is set to xterm, xterm-color, xterm-256color, linux or cygwin.");
+
+GTEST_DEFINE_string_(
+    filter,
+    internal::StringFromGTestEnv("filter", kUniversalFilter),
+    "A colon-separated list of glob (not regex) patterns "
+    "for filtering the tests to run, optionally followed by a "
+    "'-' and a : separated list of negative patterns (tests to "
+    "exclude).  A test is run if it matches one of the positive "
+    "patterns and does not match any of the negative patterns.");
+
+GTEST_DEFINE_bool_(list_tests, false,
+                   "List all tests without running them.");
+
+GTEST_DEFINE_string_(
+    output,
+    internal::StringFromGTestEnv("output", ""),
+    "A format (currently must be \"xml\"), optionally followed "
+    "by a colon and an output file name or directory. A directory "
+    "is indicated by a trailing pathname separator. "
+    "Examples: \"xml:filename.xml\", \"xml::directoryname/\". "
+    "If a directory is specified, output files will be created "
+    "within that directory, with file-names based on the test "
+    "executable's name and, if necessary, made unique by adding "
+    "digits.");
+
+GTEST_DEFINE_bool_(
+    print_time,
+    internal::BoolFromGTestEnv("print_time", true),
+    "True iff " GTEST_NAME_
+    " should display elapsed time in text output.");
+
+GTEST_DEFINE_int32_(
+    random_seed,
+    internal::Int32FromGTestEnv("random_seed", 0),
+    "Random number seed to use when shuffling test orders.  Must be in range "
+    "[1, 99999], or 0 to use a seed based on the current time.");
+
+GTEST_DEFINE_int32_(
+    repeat,
+    internal::Int32FromGTestEnv("repeat", 1),
+    "How many times to repeat each test.  Specify a negative number "
+    "for repeating forever.  Useful for shaking out flaky tests.");
+
+GTEST_DEFINE_bool_(
+    show_internal_stack_frames, false,
+    "True iff " GTEST_NAME_ " should include internal stack frames when "
+    "printing test failure stack traces.");
+
+GTEST_DEFINE_bool_(
+    shuffle,
+    internal::BoolFromGTestEnv("shuffle", false),
+    "True iff " GTEST_NAME_
+    " should randomize tests' order on every run.");
+
+GTEST_DEFINE_int32_(
+    stack_trace_depth,
+    internal::Int32FromGTestEnv("stack_trace_depth", kMaxStackTraceDepth),
+    "The maximum number of stack frames to print when an "
+    "assertion fails.  The valid range is 0 through 100, inclusive.");
+
+GTEST_DEFINE_string_(
+    stream_result_to,
+    internal::StringFromGTestEnv("stream_result_to", ""),
+    "This flag specifies the host name and the port number on which to stream "
+    "test results. Example: \"localhost:555\". The flag is effective only on "
+    "Linux.");
+
+GTEST_DEFINE_bool_(
+    throw_on_failure,
+    internal::BoolFromGTestEnv("throw_on_failure", false),
+    "When this flag is specified, a failed assertion will throw an exception "
+    "if exceptions are enabled or exit the program with a non-zero code "
+    "otherwise.");
+
+namespace internal {
+
+// Generates a random number from [0, range), using a Linear
+// Congruential Generator (LCG).  Crashes if 'range' is 0 or greater
+// than kMaxRange.
+UInt32 Random::Generate(UInt32 range) {
+  // These constants are the same as are used in glibc's rand(3).
+  state_ = (1103515245U*state_ + 12345U) % kMaxRange;
+
+  GTEST_CHECK_(range > 0)
+      << "Cannot generate a number in the range [0, 0).";
+  GTEST_CHECK_(range <= kMaxRange)
+      << "Generation of a number in [0, " << range << ") was requested, "
+      << "but this can only generate numbers in [0, " << kMaxRange << ").";
+
+  // Converting via modulus introduces a bit of downward bias, but
+  // it's simple, and a linear congruential generator isn't too good
+  // to begin with.
+  return state_ % range;
+}
+
+// GTestIsInitialized() returns true iff the user has initialized
+// Google Test.  Useful for catching the user mistake of not initializing
+// Google Test before calling RUN_ALL_TESTS().
+//
+// A user must call testing::InitGoogleTest() to initialize Google
+// Test.  g_init_gtest_count is set to the number of times
+// InitGoogleTest() has been called.  We don't protect this variable
+// under a mutex as it is only accessed in the main thread.
+int g_init_gtest_count = 0;
+static bool GTestIsInitialized() { return g_init_gtest_count != 0; }
+
+// Iterates over a vector of TestCases, keeping a running sum of the
+// results of calling a given int-returning method on each.
+// Returns the sum.
+static int SumOverTestCaseList(const std::vector<TestCase*>& case_list,
+                               int (TestCase::*method)() const) {
+  int sum = 0;
+  for (size_t i = 0; i < case_list.size(); i++) {
+    sum += (case_list[i]->*method)();
+  }
+  return sum;
+}
+
+// Returns true iff the test case passed.
+static bool TestCasePassed(const TestCase* test_case) {
+  return test_case->should_run() && test_case->Passed();
+}
+
+// Returns true iff the test case failed.
+static bool TestCaseFailed(const TestCase* test_case) {
+  return test_case->should_run() && test_case->Failed();
+}
+
+// Returns true iff test_case contains at least one test that should
+// run.
+static bool ShouldRunTestCase(const TestCase* test_case) {
+  return test_case->should_run();
+}
+
+// AssertHelper constructor.
+AssertHelper::AssertHelper(TestPartResult::Type type,
+                           const char* file,
+                           int line,
+                           const char* message)
+    : data_(new AssertHelperData(type, file, line, message)) {
+}
+
+AssertHelper::~AssertHelper() {
+  delete data_;
+}
+
+// Message assignment, for assertion streaming support.
+void AssertHelper::operator=(const Message& message) const {
+  UnitTest::GetInstance()->
+    AddTestPartResult(data_->type, data_->file, data_->line,
+                      AppendUserMessage(data_->message, message),
+                      UnitTest::GetInstance()->impl()
+                      ->CurrentOsStackTraceExceptTop(1)
+                      // Skips the stack frame for this function itself.
+                      );  // NOLINT
+}
+
+// Mutex for linked pointers.
+GTEST_DEFINE_STATIC_MUTEX_(g_linked_ptr_mutex);
+
+// Application pathname gotten in InitGoogleTest.
+String g_executable_path;
+
+// Returns the current application's name, removing directory path if that
+// is present.
+FilePath GetCurrentExecutableName() {
+  FilePath result;
+
+#if GTEST_OS_WINDOWS
+  result.Set(FilePath(g_executable_path).RemoveExtension("exe"));
+#else
+  result.Set(FilePath(g_executable_path));
+#endif  // GTEST_OS_WINDOWS
+
+  return result.RemoveDirectoryName();
+}
+
+// Functions for processing the gtest_output flag.
+
+// Returns the output format, or "" for normal printed output.
+String UnitTestOptions::GetOutputFormat() {
+  const char* const gtest_output_flag = GTEST_FLAG(output).c_str();
+  if (gtest_output_flag == NULL) return String("");
+
+  const char* const colon = strchr(gtest_output_flag, ':');
+  return (colon == NULL) ?
+      String(gtest_output_flag) :
+      String(gtest_output_flag, colon - gtest_output_flag);
+}
+
+// Returns the name of the requested output file, or the default if none
+// was explicitly specified.
+String UnitTestOptions::GetAbsolutePathToOutputFile() {
+  const char* const gtest_output_flag = GTEST_FLAG(output).c_str();
+  if (gtest_output_flag == NULL)
+    return String("");
+
+  const char* const colon = strchr(gtest_output_flag, ':');
+  if (colon == NULL)
+    return String(internal::FilePath::ConcatPaths(
+               internal::FilePath(
+                   UnitTest::GetInstance()->original_working_dir()),
+               internal::FilePath(kDefaultOutputFile)).ToString() );
+
+  internal::FilePath output_name(colon + 1);
+  if (!output_name.IsAbsolutePath())
+    // TODO(wan at google.com): on Windows \some\path is not an absolute
+    // path (as its meaning depends on the current drive), yet the
+    // following logic for turning it into an absolute path is wrong.
+    // Fix it.
+    output_name = internal::FilePath::ConcatPaths(
+        internal::FilePath(UnitTest::GetInstance()->original_working_dir()),
+        internal::FilePath(colon + 1));
+
+  if (!output_name.IsDirectory())
+    return output_name.ToString();
+
+  internal::FilePath result(internal::FilePath::GenerateUniqueFileName(
+      output_name, internal::GetCurrentExecutableName(),
+      GetOutputFormat().c_str()));
+  return result.ToString();
+}
+
+// Returns true iff the wildcard pattern matches the string.  The
+// first ':' or '\0' character in pattern marks the end of it.
+//
+// This recursive algorithm isn't very efficient, but is clear and
+// works well enough for matching test names, which are short.
+bool UnitTestOptions::PatternMatchesString(const char *pattern,
+                                           const char *str) {
+  switch (*pattern) {
+    case '\0':
+    case ':':  // Either ':' or '\0' marks the end of the pattern.
+      return *str == '\0';
+    case '?':  // Matches any single character.
+      return *str != '\0' && PatternMatchesString(pattern + 1, str + 1);
+    case '*':  // Matches any string (possibly empty) of characters.
+      return (*str != '\0' && PatternMatchesString(pattern, str + 1)) ||
+          PatternMatchesString(pattern + 1, str);
+    default:  // Non-special character.  Matches itself.
+      return *pattern == *str &&
+          PatternMatchesString(pattern + 1, str + 1);
+  }
+}
+
+bool UnitTestOptions::MatchesFilter(const String& name, const char* filter) {
+  const char *cur_pattern = filter;
+  for (;;) {
+    if (PatternMatchesString(cur_pattern, name.c_str())) {
+      return true;
+    }
+
+    // Finds the next pattern in the filter.
+    cur_pattern = strchr(cur_pattern, ':');
+
+    // Returns if no more pattern can be found.
+    if (cur_pattern == NULL) {
+      return false;
+    }
+
+    // Skips the pattern separater (the ':' character).
+    cur_pattern++;
+  }
+}
+
+// TODO(keithray): move String function implementations to gtest-string.cc.
+
+// Returns true iff the user-specified filter matches the test case
+// name and the test name.
+bool UnitTestOptions::FilterMatchesTest(const String &test_case_name,
+                                        const String &test_name) {
+  const String& full_name = String::Format("%s.%s",
+                                           test_case_name.c_str(),
+                                           test_name.c_str());
+
+  // Split --gtest_filter at '-', if there is one, to separate into
+  // positive filter and negative filter portions
+  const char* const p = GTEST_FLAG(filter).c_str();
+  const char* const dash = strchr(p, '-');
+  String positive;
+  String negative;
+  if (dash == NULL) {
+    positive = GTEST_FLAG(filter).c_str();  // Whole string is a positive filter
+    negative = String("");
+  } else {
+    positive = String(p, dash - p);  // Everything up to the dash
+    negative = String(dash+1);       // Everything after the dash
+    if (positive.empty()) {
+      // Treat '-test1' as the same as '*-test1'
+      positive = kUniversalFilter;
+    }
+  }
+
+  // A filter is a colon-separated list of patterns.  It matches a
+  // test if any pattern in it matches the test.
+  return (MatchesFilter(full_name, positive.c_str()) &&
+          !MatchesFilter(full_name, negative.c_str()));
+}
+
+#if GTEST_HAS_SEH
+// Returns EXCEPTION_EXECUTE_HANDLER if Google Test should handle the
+// given SEH exception, or EXCEPTION_CONTINUE_SEARCH otherwise.
+// This function is useful as an __except condition.
+int UnitTestOptions::GTestShouldProcessSEH(DWORD exception_code) {
+  // Google Test should handle a SEH exception if:
+  //   1. the user wants it to, AND
+  //   2. this is not a breakpoint exception, AND
+  //   3. this is not a C++ exception (VC++ implements them via SEH,
+  //      apparently).
+  //
+  // SEH exception code for C++ exceptions.
+  // (see http://support.microsoft.com/kb/185294 for more information).
+  const DWORD kCxxExceptionCode = 0xe06d7363;
+
+  bool should_handle = true;
+
+  if (!GTEST_FLAG(catch_exceptions))
+    should_handle = false;
+  else if (exception_code == EXCEPTION_BREAKPOINT)
+    should_handle = false;
+  else if (exception_code == kCxxExceptionCode)
+    should_handle = false;
+
+  return should_handle ? EXCEPTION_EXECUTE_HANDLER : EXCEPTION_CONTINUE_SEARCH;
+}
+#endif  // GTEST_HAS_SEH
+
+}  // namespace internal
+
+// The c'tor sets this object as the test part result reporter used by
+// Google Test.  The 'result' parameter specifies where to report the
+// results. Intercepts only failures from the current thread.
+ScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter(
+    TestPartResultArray* result)
+    : intercept_mode_(INTERCEPT_ONLY_CURRENT_THREAD),
+      result_(result) {
+  Init();
+}
+
+// The c'tor sets this object as the test part result reporter used by
+// Google Test.  The 'result' parameter specifies where to report the
+// results.
+ScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter(
+    InterceptMode intercept_mode, TestPartResultArray* result)
+    : intercept_mode_(intercept_mode),
+      result_(result) {
+  Init();
+}
+
+void ScopedFakeTestPartResultReporter::Init() {
+  internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+  if (intercept_mode_ == INTERCEPT_ALL_THREADS) {
+    old_reporter_ = impl->GetGlobalTestPartResultReporter();
+    impl->SetGlobalTestPartResultReporter(this);
+  } else {
+    old_reporter_ = impl->GetTestPartResultReporterForCurrentThread();
+    impl->SetTestPartResultReporterForCurrentThread(this);
+  }
+}
+
+// The d'tor restores the test part result reporter used by Google Test
+// before.
+ScopedFakeTestPartResultReporter::~ScopedFakeTestPartResultReporter() {
+  internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+  if (intercept_mode_ == INTERCEPT_ALL_THREADS) {
+    impl->SetGlobalTestPartResultReporter(old_reporter_);
+  } else {
+    impl->SetTestPartResultReporterForCurrentThread(old_reporter_);
+  }
+}
+
+// Increments the test part result count and remembers the result.
+// This method is from the TestPartResultReporterInterface interface.
+void ScopedFakeTestPartResultReporter::ReportTestPartResult(
+    const TestPartResult& result) {
+  result_->Append(result);
+}
+
+namespace internal {
+
+// Returns the type ID of ::testing::Test.  We should always call this
+// instead of GetTypeId< ::testing::Test>() to get the type ID of
+// testing::Test.  This is to work around a suspected linker bug when
+// using Google Test as a framework on Mac OS X.  The bug causes
+// GetTypeId< ::testing::Test>() to return different values depending
+// on whether the call is from the Google Test framework itself or
+// from user test code.  GetTestTypeId() is guaranteed to always
+// return the same value, as it always calls GetTypeId<>() from the
+// gtest.cc, which is within the Google Test framework.
+TypeId GetTestTypeId() {
+  return GetTypeId<Test>();
+}
+
+// The value of GetTestTypeId() as seen from within the Google Test
+// library.  This is solely for testing GetTestTypeId().
+extern const TypeId kTestTypeIdInGoogleTest = GetTestTypeId();
+
+// This predicate-formatter checks that 'results' contains a test part
+// failure of the given type and that the failure message contains the
+// given substring.
+AssertionResult HasOneFailure(const char* /* results_expr */,
+                              const char* /* type_expr */,
+                              const char* /* substr_expr */,
+                              const TestPartResultArray& results,
+                              TestPartResult::Type type,
+                              const string& substr) {
+  const String expected(type == TestPartResult::kFatalFailure ?
+                        "1 fatal failure" :
+                        "1 non-fatal failure");
+  Message msg;
+  if (results.size() != 1) {
+    msg << "Expected: " << expected << "\n"
+        << "  Actual: " << results.size() << " failures";
+    for (int i = 0; i < results.size(); i++) {
+      msg << "\n" << results.GetTestPartResult(i);
+    }
+    return AssertionFailure() << msg;
+  }
+
+  const TestPartResult& r = results.GetTestPartResult(0);
+  if (r.type() != type) {
+    return AssertionFailure() << "Expected: " << expected << "\n"
+                              << "  Actual:\n"
+                              << r;
+  }
+
+  if (strstr(r.message(), substr.c_str()) == NULL) {
+    return AssertionFailure() << "Expected: " << expected << " containing \""
+                              << substr << "\"\n"
+                              << "  Actual:\n"
+                              << r;
+  }
+
+  return AssertionSuccess();
+}
+
+// The constructor of SingleFailureChecker remembers where to look up
+// test part results, what type of failure we expect, and what
+// substring the failure message should contain.
+SingleFailureChecker:: SingleFailureChecker(
+    const TestPartResultArray* results,
+    TestPartResult::Type type,
+    const string& substr)
+    : results_(results),
+      type_(type),
+      substr_(substr) {}
+
+// The destructor of SingleFailureChecker verifies that the given
+// TestPartResultArray contains exactly one failure that has the given
+// type and contains the given substring.  If that's not the case, a
+// non-fatal failure will be generated.
+SingleFailureChecker::~SingleFailureChecker() {
+  EXPECT_PRED_FORMAT3(HasOneFailure, *results_, type_, substr_);
+}
+
+DefaultGlobalTestPartResultReporter::DefaultGlobalTestPartResultReporter(
+    UnitTestImpl* unit_test) : unit_test_(unit_test) {}
+
+void DefaultGlobalTestPartResultReporter::ReportTestPartResult(
+    const TestPartResult& result) {
+  unit_test_->current_test_result()->AddTestPartResult(result);
+  unit_test_->listeners()->repeater()->OnTestPartResult(result);
+}
+
+DefaultPerThreadTestPartResultReporter::DefaultPerThreadTestPartResultReporter(
+    UnitTestImpl* unit_test) : unit_test_(unit_test) {}
+
+void DefaultPerThreadTestPartResultReporter::ReportTestPartResult(
+    const TestPartResult& result) {
+  unit_test_->GetGlobalTestPartResultReporter()->ReportTestPartResult(result);
+}
+
+// Returns the global test part result reporter.
+TestPartResultReporterInterface*
+UnitTestImpl::GetGlobalTestPartResultReporter() {
+  internal::MutexLock lock(&global_test_part_result_reporter_mutex_);
+  return global_test_part_result_repoter_;
+}
+
+// Sets the global test part result reporter.
+void UnitTestImpl::SetGlobalTestPartResultReporter(
+    TestPartResultReporterInterface* reporter) {
+  internal::MutexLock lock(&global_test_part_result_reporter_mutex_);
+  global_test_part_result_repoter_ = reporter;
+}
+
+// Returns the test part result reporter for the current thread.
+TestPartResultReporterInterface*
+UnitTestImpl::GetTestPartResultReporterForCurrentThread() {
+  return per_thread_test_part_result_reporter_.get();
+}
+
+// Sets the test part result reporter for the current thread.
+void UnitTestImpl::SetTestPartResultReporterForCurrentThread(
+    TestPartResultReporterInterface* reporter) {
+  per_thread_test_part_result_reporter_.set(reporter);
+}
+
+// Gets the number of successful test cases.
+int UnitTestImpl::successful_test_case_count() const {
+  return CountIf(test_cases_, TestCasePassed);
+}
+
+// Gets the number of failed test cases.
+int UnitTestImpl::failed_test_case_count() const {
+  return CountIf(test_cases_, TestCaseFailed);
+}
+
+// Gets the number of all test cases.
+int UnitTestImpl::total_test_case_count() const {
+  return static_cast<int>(test_cases_.size());
+}
+
+// Gets the number of all test cases that contain at least one test
+// that should run.
+int UnitTestImpl::test_case_to_run_count() const {
+  return CountIf(test_cases_, ShouldRunTestCase);
+}
+
+// Gets the number of successful tests.
+int UnitTestImpl::successful_test_count() const {
+  return SumOverTestCaseList(test_cases_, &TestCase::successful_test_count);
+}
+
+// Gets the number of failed tests.
+int UnitTestImpl::failed_test_count() const {
+  return SumOverTestCaseList(test_cases_, &TestCase::failed_test_count);
+}
+
+// Gets the number of disabled tests.
+int UnitTestImpl::disabled_test_count() const {
+  return SumOverTestCaseList(test_cases_, &TestCase::disabled_test_count);
+}
+
+// Gets the number of all tests.
+int UnitTestImpl::total_test_count() const {
+  return SumOverTestCaseList(test_cases_, &TestCase::total_test_count);
+}
+
+// Gets the number of tests that should run.
+int UnitTestImpl::test_to_run_count() const {
+  return SumOverTestCaseList(test_cases_, &TestCase::test_to_run_count);
+}
+
+// Returns the current OS stack trace as a String.
+//
+// The maximum number of stack frames to be included is specified by
+// the gtest_stack_trace_depth flag.  The skip_count parameter
+// specifies the number of top frames to be skipped, which doesn't
+// count against the number of frames to be included.
+//
+// For example, if Foo() calls Bar(), which in turn calls
+// CurrentOsStackTraceExceptTop(1), Foo() will be included in the
+// trace but Bar() and CurrentOsStackTraceExceptTop() won't.
+String UnitTestImpl::CurrentOsStackTraceExceptTop(int skip_count) {
+  (void)skip_count;
+  return String("");
+}
+
+// Returns the current time in milliseconds.
+TimeInMillis GetTimeInMillis() {
+#if GTEST_OS_WINDOWS_MOBILE || defined(__BORLANDC__)
+  // Difference between 1970-01-01 and 1601-01-01 in milliseconds.
+  // http://analogous.blogspot.com/2005/04/epoch.html
+  const TimeInMillis kJavaEpochToWinFileTimeDelta =
+    static_cast<TimeInMillis>(116444736UL) * 100000UL;
+  const DWORD kTenthMicrosInMilliSecond = 10000;
+
+  SYSTEMTIME now_systime;
+  FILETIME now_filetime;
+  ULARGE_INTEGER now_int64;
+  // TODO(kenton at google.com): Shouldn't this just use
+  //   GetSystemTimeAsFileTime()?
+  GetSystemTime(&now_systime);
+  if (SystemTimeToFileTime(&now_systime, &now_filetime)) {
+    now_int64.LowPart = now_filetime.dwLowDateTime;
+    now_int64.HighPart = now_filetime.dwHighDateTime;
+    now_int64.QuadPart = (now_int64.QuadPart / kTenthMicrosInMilliSecond) -
+      kJavaEpochToWinFileTimeDelta;
+    return now_int64.QuadPart;
+  }
+  return 0;
+#elif GTEST_OS_WINDOWS && !GTEST_HAS_GETTIMEOFDAY_
+  __timeb64 now;
+
+# ifdef _MSC_VER
+
+  // MSVC 8 deprecates _ftime64(), so we want to suppress warning 4996
+  // (deprecated function) there.
+  // TODO(kenton at google.com): Use GetTickCount()?  Or use
+  //   SystemTimeToFileTime()
+#  pragma warning(push)          // Saves the current warning state.
+#  pragma warning(disable:4996)  // Temporarily disables warning 4996.
+  _ftime64(&now);
+#  pragma warning(pop)           // Restores the warning state.
+# else
+
+  _ftime64(&now);
+
+# endif  // _MSC_VER
+
+  return static_cast<TimeInMillis>(now.time) * 1000 + now.millitm;
+#elif GTEST_HAS_GETTIMEOFDAY_
+  struct timeval now;
+  gettimeofday(&now, NULL);
+  return static_cast<TimeInMillis>(now.tv_sec) * 1000 + now.tv_usec / 1000;
+#else
+# error "Don't know how to get the current time on your system."
+#endif
+}
+
+// Utilities
+
+// class String
+
+// Returns the input enclosed in double quotes if it's not NULL;
+// otherwise returns "(null)".  For example, "\"Hello\"" is returned
+// for input "Hello".
+//
+// This is useful for printing a C string in the syntax of a literal.
+//
+// Known issue: escape sequences are not handled yet.
+String String::ShowCStringQuoted(const char* c_str) {
+  return c_str ? String::Format("\"%s\"", c_str) : String("(null)");
+}
+
+// Copies at most length characters from str into a newly-allocated
+// piece of memory of size length+1.  The memory is allocated with new[].
+// A terminating null byte is written to the memory, and a pointer to it
+// is returned.  If str is NULL, NULL is returned.
+static char* CloneString(const char* str, size_t length) {
+  if (str == NULL) {
+    return NULL;
+  } else {
+    char* const clone = new char[length + 1];
+    posix::StrNCpy(clone, str, length);
+    clone[length] = '\0';
+    return clone;
+  }
+}
+
+// Clones a 0-terminated C string, allocating memory using new.  The
+// caller is responsible for deleting[] the return value.  Returns the
+// cloned string, or NULL if the input is NULL.
+const char * String::CloneCString(const char* c_str) {
+  return (c_str == NULL) ?
+                    NULL : CloneString(c_str, strlen(c_str));
+}
+
+#if GTEST_OS_WINDOWS_MOBILE
+// Creates a UTF-16 wide string from the given ANSI string, allocating
+// memory using new. The caller is responsible for deleting the return
+// value using delete[]. Returns the wide string, or NULL if the
+// input is NULL.
+LPCWSTR String::AnsiToUtf16(const char* ansi) {
+  if (!ansi) return NULL;
+  const int length = strlen(ansi);
+  const int unicode_length =
+      MultiByteToWideChar(CP_ACP, 0, ansi, length,
+                          NULL, 0);
+  WCHAR* unicode = new WCHAR[unicode_length + 1];
+  MultiByteToWideChar(CP_ACP, 0, ansi, length,
+                      unicode, unicode_length);
+  unicode[unicode_length] = 0;
+  return unicode;
+}
+
+// Creates an ANSI string from the given wide string, allocating
+// memory using new. The caller is responsible for deleting the return
+// value using delete[]. Returns the ANSI string, or NULL if the
+// input is NULL.
+const char* String::Utf16ToAnsi(LPCWSTR utf16_str)  {
+  if (!utf16_str) return NULL;
+  const int ansi_length =
+      WideCharToMultiByte(CP_ACP, 0, utf16_str, -1,
+                          NULL, 0, NULL, NULL);
+  char* ansi = new char[ansi_length + 1];
+  WideCharToMultiByte(CP_ACP, 0, utf16_str, -1,
+                      ansi, ansi_length, NULL, NULL);
+  ansi[ansi_length] = 0;
+  return ansi;
+}
+
+#endif  // GTEST_OS_WINDOWS_MOBILE
+
+// Compares two C strings.  Returns true iff they have the same content.
+//
+// Unlike strcmp(), this function can handle NULL argument(s).  A NULL
+// C string is considered different to any non-NULL C string,
+// including the empty string.
+bool String::CStringEquals(const char * lhs, const char * rhs) {
+  if ( lhs == NULL ) return rhs == NULL;
+
+  if ( rhs == NULL ) return false;
+
+  return strcmp(lhs, rhs) == 0;
+}
+
+#if GTEST_HAS_STD_WSTRING || GTEST_HAS_GLOBAL_WSTRING
+
+// Converts an array of wide chars to a narrow string using the UTF-8
+// encoding, and streams the result to the given Message object.
+static void StreamWideCharsToMessage(const wchar_t* wstr, size_t length,
+                                     Message* msg) {
+  // TODO(wan): consider allowing a testing::String object to
+  // contain '\0'.  This will make it behave more like std::string,
+  // and will allow ToUtf8String() to return the correct encoding
+  // for '\0' s.t. we can get rid of the conditional here (and in
+  // several other places).
+  for (size_t i = 0; i != length; ) {  // NOLINT
+    if (wstr[i] != L'\0') {
+      *msg << WideStringToUtf8(wstr + i, static_cast<int>(length - i));
+      while (i != length && wstr[i] != L'\0')
+        i++;
+    } else {
+      *msg << '\0';
+      i++;
+    }
+  }
+}
+
+#endif  // GTEST_HAS_STD_WSTRING || GTEST_HAS_GLOBAL_WSTRING
+
+}  // namespace internal
+
+#if GTEST_HAS_STD_WSTRING
+// Converts the given wide string to a narrow string using the UTF-8
+// encoding, and streams the result to this Message object.
+Message& Message::operator <<(const ::std::wstring& wstr) {
+  internal::StreamWideCharsToMessage(wstr.c_str(), wstr.length(), this);
+  return *this;
+}
+#endif  // GTEST_HAS_STD_WSTRING
+
+#if GTEST_HAS_GLOBAL_WSTRING
+// Converts the given wide string to a narrow string using the UTF-8
+// encoding, and streams the result to this Message object.
+Message& Message::operator <<(const ::wstring& wstr) {
+  internal::StreamWideCharsToMessage(wstr.c_str(), wstr.length(), this);
+  return *this;
+}
+#endif  // GTEST_HAS_GLOBAL_WSTRING
+
+// AssertionResult constructors.
+// Used in EXPECT_TRUE/FALSE(assertion_result).
+AssertionResult::AssertionResult(const AssertionResult& other)
+    : success_(other.success_),
+      message_(other.message_.get() != NULL ?
+               new ::std::string(*other.message_) :
+               static_cast< ::std::string*>(NULL)) {
+}
+
+// Returns the assertion's negation. Used with EXPECT/ASSERT_FALSE.
+AssertionResult AssertionResult::operator!() const {
+  AssertionResult negation(!success_);
+  if (message_.get() != NULL)
+    negation << *message_;
+  return negation;
+}
+
+// Makes a successful assertion result.
+AssertionResult AssertionSuccess() {
+  return AssertionResult(true);
+}
+
+// Makes a failed assertion result.
+AssertionResult AssertionFailure() {
+  return AssertionResult(false);
+}
+
+// Makes a failed assertion result with the given failure message.
+// Deprecated; use AssertionFailure() << message.
+AssertionResult AssertionFailure(const Message& message) {
+  return AssertionFailure() << message;
+}
+
+namespace internal {
+
+// Constructs and returns the message for an equality assertion
+// (e.g. ASSERT_EQ, EXPECT_STREQ, etc) failure.
+//
+// The first four parameters are the expressions used in the assertion
+// and their values, as strings.  For example, for ASSERT_EQ(foo, bar)
+// where foo is 5 and bar is 6, we have:
+//
+//   expected_expression: "foo"
+//   actual_expression:   "bar"
+//   expected_value:      "5"
+//   actual_value:        "6"
+//
+// The ignoring_case parameter is true iff the assertion is a
+// *_STRCASEEQ*.  When it's true, the string " (ignoring case)" will
+// be inserted into the message.
+AssertionResult EqFailure(const char* expected_expression,
+                          const char* actual_expression,
+                          const String& expected_value,
+                          const String& actual_value,
+                          bool ignoring_case) {
+  Message msg;
+  msg << "Value of: " << actual_expression;
+  if (actual_value != actual_expression) {
+    msg << "\n  Actual: " << actual_value;
+  }
+
+  msg << "\nExpected: " << expected_expression;
+  if (ignoring_case) {
+    msg << " (ignoring case)";
+  }
+  if (expected_value != expected_expression) {
+    msg << "\nWhich is: " << expected_value;
+  }
+
+  return AssertionFailure() << msg;
+}
+
+// Constructs a failure message for Boolean assertions such as EXPECT_TRUE.
+String GetBoolAssertionFailureMessage(const AssertionResult& assertion_result,
+                                      const char* expression_text,
+                                      const char* actual_predicate_value,
+                                      const char* expected_predicate_value) {
+  const char* actual_message = assertion_result.message();
+  Message msg;
+  msg << "Value of: " << expression_text
+      << "\n  Actual: " << actual_predicate_value;
+  if (actual_message[0] != '\0')
+    msg << " (" << actual_message << ")";
+  msg << "\nExpected: " << expected_predicate_value;
+  return msg.GetString();
+}
+
+// Helper function for implementing ASSERT_NEAR.
+AssertionResult DoubleNearPredFormat(const char* expr1,
+                                     const char* expr2,
+                                     const char* abs_error_expr,
+                                     double val1,
+                                     double val2,
+                                     double abs_error) {
+  const double diff = fabs(val1 - val2);
+  if (diff <= abs_error) return AssertionSuccess();
+
+  // TODO(wan): do not print the value of an expression if it's
+  // already a literal.
+  return AssertionFailure()
+      << "The difference between " << expr1 << " and " << expr2
+      << " is " << diff << ", which exceeds " << abs_error_expr << ", where\n"
+      << expr1 << " evaluates to " << val1 << ",\n"
+      << expr2 << " evaluates to " << val2 << ", and\n"
+      << abs_error_expr << " evaluates to " << abs_error << ".";
+}
+
+
+// Helper template for implementing FloatLE() and DoubleLE().
+template <typename RawType>
+AssertionResult FloatingPointLE(const char* expr1,
+                                const char* expr2,
+                                RawType val1,
+                                RawType val2) {
+  // Returns success if val1 is less than val2,
+  if (val1 < val2) {
+    return AssertionSuccess();
+  }
+
+  // or if val1 is almost equal to val2.
+  const FloatingPoint<RawType> lhs(val1), rhs(val2);
+  if (lhs.AlmostEquals(rhs)) {
+    return AssertionSuccess();
+  }
+
+  // Note that the above two checks will both fail if either val1 or
+  // val2 is NaN, as the IEEE floating-point standard requires that
+  // any predicate involving a NaN must return false.
+
+  ::std::stringstream val1_ss;
+  val1_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2)
+          << val1;
+
+  ::std::stringstream val2_ss;
+  val2_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2)
+          << val2;
+
+  return AssertionFailure()
+      << "Expected: (" << expr1 << ") <= (" << expr2 << ")\n"
+      << "  Actual: " << StringStreamToString(&val1_ss) << " vs "
+      << StringStreamToString(&val2_ss);
+}
+
+}  // namespace internal
+
+// Asserts that val1 is less than, or almost equal to, val2.  Fails
+// otherwise.  In particular, it fails if either val1 or val2 is NaN.
+AssertionResult FloatLE(const char* expr1, const char* expr2,
+                        float val1, float val2) {
+  return internal::FloatingPointLE<float>(expr1, expr2, val1, val2);
+}
+
+// Asserts that val1 is less than, or almost equal to, val2.  Fails
+// otherwise.  In particular, it fails if either val1 or val2 is NaN.
+AssertionResult DoubleLE(const char* expr1, const char* expr2,
+                         double val1, double val2) {
+  return internal::FloatingPointLE<double>(expr1, expr2, val1, val2);
+}
+
+namespace internal {
+
+// The helper function for {ASSERT|EXPECT}_EQ with int or enum
+// arguments.
+AssertionResult CmpHelperEQ(const char* expected_expression,
+                            const char* actual_expression,
+                            BiggestInt expected,
+                            BiggestInt actual) {
+  if (expected == actual) {
+    return AssertionSuccess();
+  }
+
+  return EqFailure(expected_expression,
+                   actual_expression,
+                   FormatForComparisonFailureMessage(expected, actual),
+                   FormatForComparisonFailureMessage(actual, expected),
+                   false);
+}
+
+// A macro for implementing the helper functions needed to implement
+// ASSERT_?? and EXPECT_?? with integer or enum arguments.  It is here
+// just to avoid copy-and-paste of similar code.
+#define GTEST_IMPL_CMP_HELPER_(op_name, op)\
+AssertionResult CmpHelper##op_name(const char* expr1, const char* expr2, \
+                                   BiggestInt val1, BiggestInt val2) {\
+  if (val1 op val2) {\
+    return AssertionSuccess();\
+  } else {\
+    return AssertionFailure() \
+        << "Expected: (" << expr1 << ") " #op " (" << expr2\
+        << "), actual: " << FormatForComparisonFailureMessage(val1, val2)\
+        << " vs " << FormatForComparisonFailureMessage(val2, val1);\
+  }\
+}
+
+// Implements the helper function for {ASSERT|EXPECT}_NE with int or
+// enum arguments.
+GTEST_IMPL_CMP_HELPER_(NE, !=)
+// Implements the helper function for {ASSERT|EXPECT}_LE with int or
+// enum arguments.
+GTEST_IMPL_CMP_HELPER_(LE, <=)
+// Implements the helper function for {ASSERT|EXPECT}_LT with int or
+// enum arguments.
+GTEST_IMPL_CMP_HELPER_(LT, < )
+// Implements the helper function for {ASSERT|EXPECT}_GE with int or
+// enum arguments.
+GTEST_IMPL_CMP_HELPER_(GE, >=)
+// Implements the helper function for {ASSERT|EXPECT}_GT with int or
+// enum arguments.
+GTEST_IMPL_CMP_HELPER_(GT, > )
+
+#undef GTEST_IMPL_CMP_HELPER_
+
+// The helper function for {ASSERT|EXPECT}_STREQ.
+AssertionResult CmpHelperSTREQ(const char* expected_expression,
+                               const char* actual_expression,
+                               const char* expected,
+                               const char* actual) {
+  if (String::CStringEquals(expected, actual)) {
+    return AssertionSuccess();
+  }
+
+  return EqFailure(expected_expression,
+                   actual_expression,
+                   String::ShowCStringQuoted(expected),
+                   String::ShowCStringQuoted(actual),
+                   false);
+}
+
+// The helper function for {ASSERT|EXPECT}_STRCASEEQ.
+AssertionResult CmpHelperSTRCASEEQ(const char* expected_expression,
+                                   const char* actual_expression,
+                                   const char* expected,
+                                   const char* actual) {
+  if (String::CaseInsensitiveCStringEquals(expected, actual)) {
+    return AssertionSuccess();
+  }
+
+  return EqFailure(expected_expression,
+                   actual_expression,
+                   String::ShowCStringQuoted(expected),
+                   String::ShowCStringQuoted(actual),
+                   true);
+}
+
+// The helper function for {ASSERT|EXPECT}_STRNE.
+AssertionResult CmpHelperSTRNE(const char* s1_expression,
+                               const char* s2_expression,
+                               const char* s1,
+                               const char* s2) {
+  if (!String::CStringEquals(s1, s2)) {
+    return AssertionSuccess();
+  } else {
+    return AssertionFailure() << "Expected: (" << s1_expression << ") != ("
+                              << s2_expression << "), actual: \""
+                              << s1 << "\" vs \"" << s2 << "\"";
+  }
+}
+
+// The helper function for {ASSERT|EXPECT}_STRCASENE.
+AssertionResult CmpHelperSTRCASENE(const char* s1_expression,
+                                   const char* s2_expression,
+                                   const char* s1,
+                                   const char* s2) {
+  if (!String::CaseInsensitiveCStringEquals(s1, s2)) {
+    return AssertionSuccess();
+  } else {
+    return AssertionFailure()
+        << "Expected: (" << s1_expression << ") != ("
+        << s2_expression << ") (ignoring case), actual: \""
+        << s1 << "\" vs \"" << s2 << "\"";
+  }
+}
+
+}  // namespace internal
+
+namespace {
+
+// Helper functions for implementing IsSubString() and IsNotSubstring().
+
+// This group of overloaded functions return true iff needle is a
+// substring of haystack.  NULL is considered a substring of itself
+// only.
+
+bool IsSubstringPred(const char* needle, const char* haystack) {
+  if (needle == NULL || haystack == NULL)
+    return needle == haystack;
+
+  return strstr(haystack, needle) != NULL;
+}
+
+bool IsSubstringPred(const wchar_t* needle, const wchar_t* haystack) {
+  if (needle == NULL || haystack == NULL)
+    return needle == haystack;
+
+  return wcsstr(haystack, needle) != NULL;
+}
+
+// StringType here can be either ::std::string or ::std::wstring.
+template <typename StringType>
+bool IsSubstringPred(const StringType& needle,
+                     const StringType& haystack) {
+  return haystack.find(needle) != StringType::npos;
+}
+
+// This function implements either IsSubstring() or IsNotSubstring(),
+// depending on the value of the expected_to_be_substring parameter.
+// StringType here can be const char*, const wchar_t*, ::std::string,
+// or ::std::wstring.
+template <typename StringType>
+AssertionResult IsSubstringImpl(
+    bool expected_to_be_substring,
+    const char* needle_expr, const char* haystack_expr,
+    const StringType& needle, const StringType& haystack) {
+  if (IsSubstringPred(needle, haystack) == expected_to_be_substring)
+    return AssertionSuccess();
+
+  const bool is_wide_string = sizeof(needle[0]) > 1;
+  const char* const begin_string_quote = is_wide_string ? "L\"" : "\"";
+  return AssertionFailure()
+      << "Value of: " << needle_expr << "\n"
+      << "  Actual: " << begin_string_quote << needle << "\"\n"
+      << "Expected: " << (expected_to_be_substring ? "" : "not ")
+      << "a substring of " << haystack_expr << "\n"
+      << "Which is: " << begin_string_quote << haystack << "\"";
+}
+
+}  // namespace
+
+// IsSubstring() and IsNotSubstring() check whether needle is a
+// substring of haystack (NULL is considered a substring of itself
+// only), and return an appropriate error message when they fail.
+
+AssertionResult IsSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const char* needle, const char* haystack) {
+  return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack);
+}
+
+AssertionResult IsSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const wchar_t* needle, const wchar_t* haystack) {
+  return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack);
+}
+
+AssertionResult IsNotSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const char* needle, const char* haystack) {
+  return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack);
+}
+
+AssertionResult IsNotSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const wchar_t* needle, const wchar_t* haystack) {
+  return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack);
+}
+
+AssertionResult IsSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const ::std::string& needle, const ::std::string& haystack) {
+  return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack);
+}
+
+AssertionResult IsNotSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const ::std::string& needle, const ::std::string& haystack) {
+  return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack);
+}
+
+#if GTEST_HAS_STD_WSTRING
+AssertionResult IsSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const ::std::wstring& needle, const ::std::wstring& haystack) {
+  return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack);
+}
+
+AssertionResult IsNotSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const ::std::wstring& needle, const ::std::wstring& haystack) {
+  return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack);
+}
+#endif  // GTEST_HAS_STD_WSTRING
+
+namespace internal {
+
+#if GTEST_OS_WINDOWS
+
+namespace {
+
+// Helper function for IsHRESULT{SuccessFailure} predicates
+AssertionResult HRESULTFailureHelper(const char* expr,
+                                     const char* expected,
+                                     long hr) {  // NOLINT
+# if GTEST_OS_WINDOWS_MOBILE
+
+  // Windows CE doesn't support FormatMessage.
+  const char error_text[] = "";
+
+# else
+
+  // Looks up the human-readable system message for the HRESULT code
+  // and since we're not passing any params to FormatMessage, we don't
+  // want inserts expanded.
+  const DWORD kFlags = FORMAT_MESSAGE_FROM_SYSTEM |
+                       FORMAT_MESSAGE_IGNORE_INSERTS;
+  const DWORD kBufSize = 4096;  // String::Format can't exceed this length.
+  // Gets the system's human readable message string for this HRESULT.
+  char error_text[kBufSize] = { '\0' };
+  DWORD message_length = ::FormatMessageA(kFlags,
+                                          0,  // no source, we're asking system
+                                          hr,  // the error
+                                          0,  // no line width restrictions
+                                          error_text,  // output buffer
+                                          kBufSize,  // buf size
+                                          NULL);  // no arguments for inserts
+  // Trims tailing white space (FormatMessage leaves a trailing cr-lf)
+  for (; message_length && IsSpace(error_text[message_length - 1]);
+          --message_length) {
+    error_text[message_length - 1] = '\0';
+  }
+
+# endif  // GTEST_OS_WINDOWS_MOBILE
+
+  const String error_hex(String::Format("0x%08X ", hr));
+  return ::testing::AssertionFailure()
+      << "Expected: " << expr << " " << expected << ".\n"
+      << "  Actual: " << error_hex << error_text << "\n";
+}
+
+}  // namespace
+
+AssertionResult IsHRESULTSuccess(const char* expr, long hr) {  // NOLINT
+  if (SUCCEEDED(hr)) {
+    return AssertionSuccess();
+  }
+  return HRESULTFailureHelper(expr, "succeeds", hr);
+}
+
+AssertionResult IsHRESULTFailure(const char* expr, long hr) {  // NOLINT
+  if (FAILED(hr)) {
+    return AssertionSuccess();
+  }
+  return HRESULTFailureHelper(expr, "fails", hr);
+}
+
+#endif  // GTEST_OS_WINDOWS
+
+// Utility functions for encoding Unicode text (wide strings) in
+// UTF-8.
+
+// A Unicode code-point can have upto 21 bits, and is encoded in UTF-8
+// like this:
+//
+// Code-point length   Encoding
+//   0 -  7 bits       0xxxxxxx
+//   8 - 11 bits       110xxxxx 10xxxxxx
+//  12 - 16 bits       1110xxxx 10xxxxxx 10xxxxxx
+//  17 - 21 bits       11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
+
+// The maximum code-point a one-byte UTF-8 sequence can represent.
+const UInt32 kMaxCodePoint1 = (static_cast<UInt32>(1) <<  7) - 1;
+
+// The maximum code-point a two-byte UTF-8 sequence can represent.
+const UInt32 kMaxCodePoint2 = (static_cast<UInt32>(1) << (5 + 6)) - 1;
+
+// The maximum code-point a three-byte UTF-8 sequence can represent.
+const UInt32 kMaxCodePoint3 = (static_cast<UInt32>(1) << (4 + 2*6)) - 1;
+
+// The maximum code-point a four-byte UTF-8 sequence can represent.
+const UInt32 kMaxCodePoint4 = (static_cast<UInt32>(1) << (3 + 3*6)) - 1;
+
+// Chops off the n lowest bits from a bit pattern.  Returns the n
+// lowest bits.  As a side effect, the original bit pattern will be
+// shifted to the right by n bits.
+inline UInt32 ChopLowBits(UInt32* bits, int n) {
+  const UInt32 low_bits = *bits & ((static_cast<UInt32>(1) << n) - 1);
+  *bits >>= n;
+  return low_bits;
+}
+
+// Converts a Unicode code point to a narrow string in UTF-8 encoding.
+// code_point parameter is of type UInt32 because wchar_t may not be
+// wide enough to contain a code point.
+// The output buffer str must containt at least 32 characters.
+// The function returns the address of the output buffer.
+// If the code_point is not a valid Unicode code point
+// (i.e. outside of Unicode range U+0 to U+10FFFF) it will be output
+// as '(Invalid Unicode 0xXXXXXXXX)'.
+char* CodePointToUtf8(UInt32 code_point, char* str) {
+  if (code_point <= kMaxCodePoint1) {
+    str[1] = '\0';
+    str[0] = static_cast<char>(code_point);                          // 0xxxxxxx
+  } else if (code_point <= kMaxCodePoint2) {
+    str[2] = '\0';
+    str[1] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6));  // 10xxxxxx
+    str[0] = static_cast<char>(0xC0 | code_point);                   // 110xxxxx
+  } else if (code_point <= kMaxCodePoint3) {
+    str[3] = '\0';
+    str[2] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6));  // 10xxxxxx
+    str[1] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6));  // 10xxxxxx
+    str[0] = static_cast<char>(0xE0 | code_point);                   // 1110xxxx
+  } else if (code_point <= kMaxCodePoint4) {
+    str[4] = '\0';
+    str[3] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6));  // 10xxxxxx
+    str[2] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6));  // 10xxxxxx
+    str[1] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6));  // 10xxxxxx
+    str[0] = static_cast<char>(0xF0 | code_point);                   // 11110xxx
+  } else {
+    // The longest string String::Format can produce when invoked
+    // with these parameters is 28 character long (not including
+    // the terminating nul character). We are asking for 32 character
+    // buffer just in case. This is also enough for strncpy to
+    // null-terminate the destination string.
+    posix::StrNCpy(
+        str, String::Format("(Invalid Unicode 0x%X)", code_point).c_str(), 32);
+    str[31] = '\0';  // Makes sure no change in the format to strncpy leaves
+                     // the result unterminated.
+  }
+  return str;
+}
+
+// The following two functions only make sense if the the system
+// uses UTF-16 for wide string encoding. All supported systems
+// with 16 bit wchar_t (Windows, Cygwin, Symbian OS) do use UTF-16.
+
+// Determines if the arguments constitute UTF-16 surrogate pair
+// and thus should be combined into a single Unicode code point
+// using CreateCodePointFromUtf16SurrogatePair.
+inline bool IsUtf16SurrogatePair(wchar_t first, wchar_t second) {
+  return sizeof(wchar_t) == 2 &&
+      (first & 0xFC00) == 0xD800 && (second & 0xFC00) == 0xDC00;
+}
+
+// Creates a Unicode code point from UTF16 surrogate pair.
+inline UInt32 CreateCodePointFromUtf16SurrogatePair(wchar_t first,
+                                                    wchar_t second) {
+  const UInt32 mask = (1 << 10) - 1;
+  return (sizeof(wchar_t) == 2) ?
+      (((first & mask) << 10) | (second & mask)) + 0x10000 :
+      // This function should not be called when the condition is
+      // false, but we provide a sensible default in case it is.
+      static_cast<UInt32>(first);
+}
+
+// Converts a wide string to a narrow string in UTF-8 encoding.
+// The wide string is assumed to have the following encoding:
+//   UTF-16 if sizeof(wchar_t) == 2 (on Windows, Cygwin, Symbian OS)
+//   UTF-32 if sizeof(wchar_t) == 4 (on Linux)
+// Parameter str points to a null-terminated wide string.
+// Parameter num_chars may additionally limit the number
+// of wchar_t characters processed. -1 is used when the entire string
+// should be processed.
+// If the string contains code points that are not valid Unicode code points
+// (i.e. outside of Unicode range U+0 to U+10FFFF) they will be output
+// as '(Invalid Unicode 0xXXXXXXXX)'. If the string is in UTF16 encoding
+// and contains invalid UTF-16 surrogate pairs, values in those pairs
+// will be encoded as individual Unicode characters from Basic Normal Plane.
+String WideStringToUtf8(const wchar_t* str, int num_chars) {
+  if (num_chars == -1)
+    num_chars = static_cast<int>(wcslen(str));
+
+  ::std::stringstream stream;
+  for (int i = 0; i < num_chars; ++i) {
+    UInt32 unicode_code_point;
+
+    if (str[i] == L'\0') {
+      break;
+    } else if (i + 1 < num_chars && IsUtf16SurrogatePair(str[i], str[i + 1])) {
+      unicode_code_point = CreateCodePointFromUtf16SurrogatePair(str[i],
+                                                                 str[i + 1]);
+      i++;
+    } else {
+      unicode_code_point = static_cast<UInt32>(str[i]);
+    }
+
+    char buffer[32];  // CodePointToUtf8 requires a buffer this big.
+    stream << CodePointToUtf8(unicode_code_point, buffer);
+  }
+  return StringStreamToString(&stream);
+}
+
+// Converts a wide C string to a String using the UTF-8 encoding.
+// NULL will be converted to "(null)".
+String String::ShowWideCString(const wchar_t * wide_c_str) {
+  if (wide_c_str == NULL) return String("(null)");
+
+  return String(internal::WideStringToUtf8(wide_c_str, -1).c_str());
+}
+
+// Similar to ShowWideCString(), except that this function encloses
+// the converted string in double quotes.
+String String::ShowWideCStringQuoted(const wchar_t* wide_c_str) {
+  if (wide_c_str == NULL) return String("(null)");
+
+  return String::Format("L\"%s\"",
+                        String::ShowWideCString(wide_c_str).c_str());
+}
+
+// Compares two wide C strings.  Returns true iff they have the same
+// content.
+//
+// Unlike wcscmp(), this function can handle NULL argument(s).  A NULL
+// C string is considered different to any non-NULL C string,
+// including the empty string.
+bool String::WideCStringEquals(const wchar_t * lhs, const wchar_t * rhs) {
+  if (lhs == NULL) return rhs == NULL;
+
+  if (rhs == NULL) return false;
+
+  return wcscmp(lhs, rhs) == 0;
+}
+
+// Helper function for *_STREQ on wide strings.
+AssertionResult CmpHelperSTREQ(const char* expected_expression,
+                               const char* actual_expression,
+                               const wchar_t* expected,
+                               const wchar_t* actual) {
+  if (String::WideCStringEquals(expected, actual)) {
+    return AssertionSuccess();
+  }
+
+  return EqFailure(expected_expression,
+                   actual_expression,
+                   String::ShowWideCStringQuoted(expected),
+                   String::ShowWideCStringQuoted(actual),
+                   false);
+}
+
+// Helper function for *_STRNE on wide strings.
+AssertionResult CmpHelperSTRNE(const char* s1_expression,
+                               const char* s2_expression,
+                               const wchar_t* s1,
+                               const wchar_t* s2) {
+  if (!String::WideCStringEquals(s1, s2)) {
+    return AssertionSuccess();
+  }
+
+  return AssertionFailure() << "Expected: (" << s1_expression << ") != ("
+                            << s2_expression << "), actual: "
+                            << String::ShowWideCStringQuoted(s1)
+                            << " vs " << String::ShowWideCStringQuoted(s2);
+}
+
+// Compares two C strings, ignoring case.  Returns true iff they have
+// the same content.
+//
+// Unlike strcasecmp(), this function can handle NULL argument(s).  A
+// NULL C string is considered different to any non-NULL C string,
+// including the empty string.
+bool String::CaseInsensitiveCStringEquals(const char * lhs, const char * rhs) {
+  if (lhs == NULL)
+    return rhs == NULL;
+  if (rhs == NULL)
+    return false;
+  return posix::StrCaseCmp(lhs, rhs) == 0;
+}
+
+  // Compares two wide C strings, ignoring case.  Returns true iff they
+  // have the same content.
+  //
+  // Unlike wcscasecmp(), this function can handle NULL argument(s).
+  // A NULL C string is considered different to any non-NULL wide C string,
+  // including the empty string.
+  // NB: The implementations on different platforms slightly differ.
+  // On windows, this method uses _wcsicmp which compares according to LC_CTYPE
+  // environment variable. On GNU platform this method uses wcscasecmp
+  // which compares according to LC_CTYPE category of the current locale.
+  // On MacOS X, it uses towlower, which also uses LC_CTYPE category of the
+  // current locale.
+bool String::CaseInsensitiveWideCStringEquals(const wchar_t* lhs,
+                                              const wchar_t* rhs) {
+  if (lhs == NULL) return rhs == NULL;
+
+  if (rhs == NULL) return false;
+
+#if GTEST_OS_WINDOWS
+  return _wcsicmp(lhs, rhs) == 0;
+#elif GTEST_OS_LINUX && !GTEST_OS_LINUX_ANDROID
+  return wcscasecmp(lhs, rhs) == 0;
+#else
+  // Android, Mac OS X and Cygwin don't define wcscasecmp.
+  // Other unknown OSes may not define it either.
+  wint_t left, right;
+  do {
+    left = towlower(*lhs++);
+    right = towlower(*rhs++);
+  } while (left && left == right);
+  return left == right;
+#endif  // OS selector
+}
+
+// Compares this with another String.
+// Returns < 0 if this is less than rhs, 0 if this is equal to rhs, or > 0
+// if this is greater than rhs.
+int String::Compare(const String & rhs) const {
+  const char* const lhs_c_str = c_str();
+  const char* const rhs_c_str = rhs.c_str();
+
+  if (lhs_c_str == NULL) {
+    return rhs_c_str == NULL ? 0 : -1;  // NULL < anything except NULL
+  } else if (rhs_c_str == NULL) {
+    return 1;
+  }
+
+  const size_t shorter_str_len =
+      length() <= rhs.length() ? length() : rhs.length();
+  for (size_t i = 0; i != shorter_str_len; i++) {
+    if (lhs_c_str[i] < rhs_c_str[i]) {
+      return -1;
+    } else if (lhs_c_str[i] > rhs_c_str[i]) {
+      return 1;
+    }
+  }
+  return (length() < rhs.length()) ? -1 :
+      (length() > rhs.length()) ? 1 : 0;
+}
+
+// Returns true iff this String ends with the given suffix.  *Any*
+// String is considered to end with a NULL or empty suffix.
+bool String::EndsWith(const char* suffix) const {
+  if (suffix == NULL || CStringEquals(suffix, "")) return true;
+
+  if (c_str() == NULL) return false;
+
+  const size_t this_len = strlen(c_str());
+  const size_t suffix_len = strlen(suffix);
+  return (this_len >= suffix_len) &&
+         CStringEquals(c_str() + this_len - suffix_len, suffix);
+}
+
+// Returns true iff this String ends with the given suffix, ignoring case.
+// Any String is considered to end with a NULL or empty suffix.
+bool String::EndsWithCaseInsensitive(const char* suffix) const {
+  if (suffix == NULL || CStringEquals(suffix, "")) return true;
+
+  if (c_str() == NULL) return false;
+
+  const size_t this_len = strlen(c_str());
+  const size_t suffix_len = strlen(suffix);
+  return (this_len >= suffix_len) &&
+         CaseInsensitiveCStringEquals(c_str() + this_len - suffix_len, suffix);
+}
+
+// Formats a list of arguments to a String, using the same format
+// spec string as for printf.
+//
+// We do not use the StringPrintf class as it is not universally
+// available.
+//
+// The result is limited to 4096 characters (including the tailing 0).
+// If 4096 characters are not enough to format the input, or if
+// there's an error, "<formatting error or buffer exceeded>" is
+// returned.
+String String::Format(const char * format, ...) {
+  va_list args;
+  va_start(args, format);
+
+  char buffer[4096];
+  const int kBufferSize = sizeof(buffer)/sizeof(buffer[0]);
+
+  // MSVC 8 deprecates vsnprintf(), so we want to suppress warning
+  // 4996 (deprecated function) there.
+#ifdef _MSC_VER  // We are using MSVC.
+# pragma warning(push)          // Saves the current warning state.
+# pragma warning(disable:4996)  // Temporarily disables warning 4996.
+
+  const int size = vsnprintf(buffer, kBufferSize, format, args);
+
+# pragma warning(pop)           // Restores the warning state.
+#else  // We are not using MSVC.
+  const int size = vsnprintf(buffer, kBufferSize, format, args);
+#endif  // _MSC_VER
+  va_end(args);
+
+  // vsnprintf()'s behavior is not portable.  When the buffer is not
+  // big enough, it returns a negative value in MSVC, and returns the
+  // needed buffer size on Linux.  When there is an output error, it
+  // always returns a negative value.  For simplicity, we lump the two
+  // error cases together.
+  if (size < 0 || size >= kBufferSize) {
+    return String("<formatting error or buffer exceeded>");
+  } else {
+    return String(buffer, size);
+  }
+}
+
+// Converts the buffer in a stringstream to a String, converting NUL
+// bytes to "\\0" along the way.
+String StringStreamToString(::std::stringstream* ss) {
+  const ::std::string& str = ss->str();
+  const char* const start = str.c_str();
+  const char* const end = start + str.length();
+
+  // We need to use a helper stringstream to do this transformation
+  // because String doesn't support push_back().
+  ::std::stringstream helper;
+  for (const char* ch = start; ch != end; ++ch) {
+    if (*ch == '\0') {
+      helper << "\\0";  // Replaces NUL with "\\0";
+    } else {
+      helper.put(*ch);
+    }
+  }
+
+  return String(helper.str().c_str());
+}
+
+// Appends the user-supplied message to the Google-Test-generated message.
+String AppendUserMessage(const String& gtest_msg,
+                         const Message& user_msg) {
+  // Appends the user message if it's non-empty.
+  const String user_msg_string = user_msg.GetString();
+  if (user_msg_string.empty()) {
+    return gtest_msg;
+  }
+
+  Message msg;
+  msg << gtest_msg << "\n" << user_msg_string;
+
+  return msg.GetString();
+}
+
+}  // namespace internal
+
+// class TestResult
+
+// Creates an empty TestResult.
+TestResult::TestResult()
+    : death_test_count_(0),
+      elapsed_time_(0) {
+}
+
+// D'tor.
+TestResult::~TestResult() {
+}
+
+// Returns the i-th test part result among all the results. i can
+// range from 0 to total_part_count() - 1. If i is not in that range,
+// aborts the program.
+const TestPartResult& TestResult::GetTestPartResult(int i) const {
+  if (i < 0 || i >= total_part_count())
+    internal::posix::Abort();
+  return test_part_results_.at(i);
+}
+
+// Returns the i-th test property. i can range from 0 to
+// test_property_count() - 1. If i is not in that range, aborts the
+// program.
+const TestProperty& TestResult::GetTestProperty(int i) const {
+  if (i < 0 || i >= test_property_count())
+    internal::posix::Abort();
+  return test_properties_.at(i);
+}
+
+// Clears the test part results.
+void TestResult::ClearTestPartResults() {
+  test_part_results_.clear();
+}
+
+// Adds a test part result to the list.
+void TestResult::AddTestPartResult(const TestPartResult& test_part_result) {
+  test_part_results_.push_back(test_part_result);
+}
+
+// Adds a test property to the list. If a property with the same key as the
+// supplied property is already represented, the value of this test_property
+// replaces the old value for that key.
+void TestResult::RecordProperty(const TestProperty& test_property) {
+  if (!ValidateTestProperty(test_property)) {
+    return;
+  }
+  internal::MutexLock lock(&test_properites_mutex_);
+  const std::vector<TestProperty>::iterator property_with_matching_key =
+      std::find_if(test_properties_.begin(), test_properties_.end(),
+                   internal::TestPropertyKeyIs(test_property.key()));
+  if (property_with_matching_key == test_properties_.end()) {
+    test_properties_.push_back(test_property);
+    return;
+  }
+  property_with_matching_key->SetValue(test_property.value());
+}
+
+// Adds a failure if the key is a reserved attribute of Google Test
+// testcase tags.  Returns true if the property is valid.
+bool TestResult::ValidateTestProperty(const TestProperty& test_property) {
+  internal::String key(test_property.key());
+  if (key == "name" || key == "status" || key == "time" || key == "classname") {
+    ADD_FAILURE()
+        << "Reserved key used in RecordProperty(): "
+        << key
+        << " ('name', 'status', 'time', and 'classname' are reserved by "
+        << GTEST_NAME_ << ")";
+    return false;
+  }
+  return true;
+}
+
+// Clears the object.
+void TestResult::Clear() {
+  test_part_results_.clear();
+  test_properties_.clear();
+  death_test_count_ = 0;
+  elapsed_time_ = 0;
+}
+
+// Returns true iff the test failed.
+bool TestResult::Failed() const {
+  for (int i = 0; i < total_part_count(); ++i) {
+    if (GetTestPartResult(i).failed())
+      return true;
+  }
+  return false;
+}
+
+// Returns true iff the test part fatally failed.
+static bool TestPartFatallyFailed(const TestPartResult& result) {
+  return result.fatally_failed();
+}
+
+// Returns true iff the test fatally failed.
+bool TestResult::HasFatalFailure() const {
+  return CountIf(test_part_results_, TestPartFatallyFailed) > 0;
+}
+
+// Returns true iff the test part non-fatally failed.
+static bool TestPartNonfatallyFailed(const TestPartResult& result) {
+  return result.nonfatally_failed();
+}
+
+// Returns true iff the test has a non-fatal failure.
+bool TestResult::HasNonfatalFailure() const {
+  return CountIf(test_part_results_, TestPartNonfatallyFailed) > 0;
+}
+
+// Gets the number of all test parts.  This is the sum of the number
+// of successful test parts and the number of failed test parts.
+int TestResult::total_part_count() const {
+  return static_cast<int>(test_part_results_.size());
+}
+
+// Returns the number of the test properties.
+int TestResult::test_property_count() const {
+  return static_cast<int>(test_properties_.size());
+}
+
+// class Test
+
+// Creates a Test object.
+
+// The c'tor saves the values of all Google Test flags.
+Test::Test()
+    : gtest_flag_saver_(new internal::GTestFlagSaver) {
+}
+
+// The d'tor restores the values of all Google Test flags.
+Test::~Test() {
+  delete gtest_flag_saver_;
+}
+
+// Sets up the test fixture.
+//
+// A sub-class may override this.
+void Test::SetUp() {
+}
+
+// Tears down the test fixture.
+//
+// A sub-class may override this.
+void Test::TearDown() {
+}
+
+// Allows user supplied key value pairs to be recorded for later output.
+void Test::RecordProperty(const char* key, const char* value) {
+  UnitTest::GetInstance()->RecordPropertyForCurrentTest(key, value);
+}
+
+// Allows user supplied key value pairs to be recorded for later output.
+void Test::RecordProperty(const char* key, int value) {
+  Message value_message;
+  value_message << value;
+  RecordProperty(key, value_message.GetString().c_str());
+}
+
+namespace internal {
+
+void ReportFailureInUnknownLocation(TestPartResult::Type result_type,
+                                    const String& message) {
+  // This function is a friend of UnitTest and as such has access to
+  // AddTestPartResult.
+  UnitTest::GetInstance()->AddTestPartResult(
+      result_type,
+      NULL,  // No info about the source file where the exception occurred.
+      -1,    // We have no info on which line caused the exception.
+      message,
+      String());  // No stack trace, either.
+}
+
+}  // namespace internal
+
+// Google Test requires all tests in the same test case to use the same test
+// fixture class.  This function checks if the current test has the
+// same fixture class as the first test in the current test case.  If
+// yes, it returns true; otherwise it generates a Google Test failure and
+// returns false.
+bool Test::HasSameFixtureClass() {
+  internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+  const TestCase* const test_case = impl->current_test_case();
+
+  // Info about the first test in the current test case.
+  const TestInfo* const first_test_info = test_case->test_info_list()[0];
+  const internal::TypeId first_fixture_id = first_test_info->fixture_class_id_;
+  const char* const first_test_name = first_test_info->name();
+
+  // Info about the current test.
+  const TestInfo* const this_test_info = impl->current_test_info();
+  const internal::TypeId this_fixture_id = this_test_info->fixture_class_id_;
+  const char* const this_test_name = this_test_info->name();
+
+  if (this_fixture_id != first_fixture_id) {
+    // Is the first test defined using TEST?
+    const bool first_is_TEST = first_fixture_id == internal::GetTestTypeId();
+    // Is this test defined using TEST?
+    const bool this_is_TEST = this_fixture_id == internal::GetTestTypeId();
+
+    if (first_is_TEST || this_is_TEST) {
+      // The user mixed TEST and TEST_F in this test case - we'll tell
+      // him/her how to fix it.
+
+      // Gets the name of the TEST and the name of the TEST_F.  Note
+      // that first_is_TEST and this_is_TEST cannot both be true, as
+      // the fixture IDs are different for the two tests.
+      const char* const TEST_name =
+          first_is_TEST ? first_test_name : this_test_name;
+      const char* const TEST_F_name =
+          first_is_TEST ? this_test_name : first_test_name;
+
+      ADD_FAILURE()
+          << "All tests in the same test case must use the same test fixture\n"
+          << "class, so mixing TEST_F and TEST in the same test case is\n"
+          << "illegal.  In test case " << this_test_info->test_case_name()
+          << ",\n"
+          << "test " << TEST_F_name << " is defined using TEST_F but\n"
+          << "test " << TEST_name << " is defined using TEST.  You probably\n"
+          << "want to change the TEST to TEST_F or move it to another test\n"
+          << "case.";
+    } else {
+      // The user defined two fixture classes with the same name in
+      // two namespaces - we'll tell him/her how to fix it.
+      ADD_FAILURE()
+          << "All tests in the same test case must use the same test fixture\n"
+          << "class.  However, in test case "
+          << this_test_info->test_case_name() << ",\n"
+          << "you defined test " << first_test_name
+          << " and test " << this_test_name << "\n"
+          << "using two different test fixture classes.  This can happen if\n"
+          << "the two classes are from different namespaces or translation\n"
+          << "units and have the same name.  You should probably rename one\n"
+          << "of the classes to put the tests into different test cases.";
+    }
+    return false;
+  }
+
+  return true;
+}
+
+#if GTEST_HAS_SEH
+
+// Adds an "exception thrown" fatal failure to the current test.  This
+// function returns its result via an output parameter pointer because VC++
+// prohibits creation of objects with destructors on stack in functions
+// using __try (see error C2712).
+static internal::String* FormatSehExceptionMessage(DWORD exception_code,
+                                                   const char* location) {
+  Message message;
+  message << "SEH exception with code 0x" << std::setbase(16) <<
+    exception_code << std::setbase(10) << " thrown in " << location << ".";
+
+  return new internal::String(message.GetString());
+}
+
+#endif  // GTEST_HAS_SEH
+
+#if GTEST_HAS_EXCEPTIONS
+
+// Adds an "exception thrown" fatal failure to the current test.
+static internal::String FormatCxxExceptionMessage(const char* description,
+                                                  const char* location) {
+  Message message;
+  if (description != NULL) {
+    message << "C++ exception with description \"" << description << "\"";
+  } else {
+    message << "Unknown C++ exception";
+  }
+  message << " thrown in " << location << ".";
+
+  return message.GetString();
+}
+
+static internal::String PrintTestPartResultToString(
+    const TestPartResult& test_part_result);
+
+// A failed Google Test assertion will throw an exception of this type when
+// GTEST_FLAG(throw_on_failure) is true (if exceptions are enabled).  We
+// derive it from std::runtime_error, which is for errors presumably
+// detectable only at run time.  Since std::runtime_error inherits from
+// std::exception, many testing frameworks know how to extract and print the
+// message inside it.
+class GoogleTestFailureException : public ::std::runtime_error {
+ public:
+  explicit GoogleTestFailureException(const TestPartResult& failure)
+      : ::std::runtime_error(PrintTestPartResultToString(failure).c_str()) {}
+};
+#endif  // GTEST_HAS_EXCEPTIONS
+
+namespace internal {
+// We put these helper functions in the internal namespace as IBM's xlC
+// compiler rejects the code if they were declared static.
+
+// Runs the given method and handles SEH exceptions it throws, when
+// SEH is supported; returns the 0-value for type Result in case of an
+// SEH exception.  (Microsoft compilers cannot handle SEH and C++
+// exceptions in the same function.  Therefore, we provide a separate
+// wrapper function for handling SEH exceptions.)
+template <class T, typename Result>
+Result HandleSehExceptionsInMethodIfSupported(
+    T* object, Result (T::*method)(), const char* location) {
+#if GTEST_HAS_SEH
+  __try {
+    return (object->*method)();
+  } __except (internal::UnitTestOptions::GTestShouldProcessSEH(  // NOLINT
+      GetExceptionCode())) {
+    // We create the exception message on the heap because VC++ prohibits
+    // creation of objects with destructors on stack in functions using __try
+    // (see error C2712).
+    internal::String* exception_message = FormatSehExceptionMessage(
+        GetExceptionCode(), location);
+    internal::ReportFailureInUnknownLocation(TestPartResult::kFatalFailure,
+                                             *exception_message);
+    delete exception_message;
+    return static_cast<Result>(0);
+  }
+#else
+  (void)location;
+  return (object->*method)();
+#endif  // GTEST_HAS_SEH
+}
+
+// Runs the given method and catches and reports C++ and/or SEH-style
+// exceptions, if they are supported; returns the 0-value for type
+// Result in case of an SEH exception.
+template <class T, typename Result>
+Result HandleExceptionsInMethodIfSupported(
+    T* object, Result (T::*method)(), const char* location) {
+  // NOTE: The user code can affect the way in which Google Test handles
+  // exceptions by setting GTEST_FLAG(catch_exceptions), but only before
+  // RUN_ALL_TESTS() starts. It is technically possible to check the flag
+  // after the exception is caught and either report or re-throw the
+  // exception based on the flag's value:
+  //
+  // try {
+  //   // Perform the test method.
+  // } catch (...) {
+  //   if (GTEST_FLAG(catch_exceptions))
+  //     // Report the exception as failure.
+  //   else
+  //     throw;  // Re-throws the original exception.
+  // }
+  //
+  // However, the purpose of this flag is to allow the program to drop into
+  // the debugger when the exception is thrown. On most platforms, once the
+  // control enters the catch block, the exception origin information is
+  // lost and the debugger will stop the program at the point of the
+  // re-throw in this function -- instead of at the point of the original
+  // throw statement in the code under test.  For this reason, we perform
+  // the check early, sacrificing the ability to affect Google Test's
+  // exception handling in the method where the exception is thrown.
+  if (internal::GetUnitTestImpl()->catch_exceptions()) {
+#if GTEST_HAS_EXCEPTIONS
+    try {
+      return HandleSehExceptionsInMethodIfSupported(object, method, location);
+    } catch (const GoogleTestFailureException&) {  // NOLINT
+      // This exception doesn't originate in code under test. It makes no
+      // sense to report it as a test failure.
+      throw;
+    } catch (const std::exception& e) {  // NOLINT
+      internal::ReportFailureInUnknownLocation(
+          TestPartResult::kFatalFailure,
+          FormatCxxExceptionMessage(e.what(), location));
+    } catch (...) {  // NOLINT
+      internal::ReportFailureInUnknownLocation(
+          TestPartResult::kFatalFailure,
+          FormatCxxExceptionMessage(NULL, location));
+    }
+    return static_cast<Result>(0);
+#else
+    return HandleSehExceptionsInMethodIfSupported(object, method, location);
+#endif  // GTEST_HAS_EXCEPTIONS
+  } else {
+    return (object->*method)();
+  }
+}
+
+}  // namespace internal
+
+// Runs the test and updates the test result.
+void Test::Run() {
+  if (!HasSameFixtureClass()) return;
+
+  internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+  impl->os_stack_trace_getter()->UponLeavingGTest();
+  internal::HandleExceptionsInMethodIfSupported(this, &Test::SetUp, "SetUp()");
+  // We will run the test only if SetUp() was successful.
+  if (!HasFatalFailure()) {
+    impl->os_stack_trace_getter()->UponLeavingGTest();
+    internal::HandleExceptionsInMethodIfSupported(
+        this, &Test::TestBody, "the test body");
+  }
+
+  // However, we want to clean up as much as possible.  Hence we will
+  // always call TearDown(), even if SetUp() or the test body has
+  // failed.
+  impl->os_stack_trace_getter()->UponLeavingGTest();
+  internal::HandleExceptionsInMethodIfSupported(
+      this, &Test::TearDown, "TearDown()");
+}
+
+// Returns true iff the current test has a fatal failure.
+bool Test::HasFatalFailure() {
+  return internal::GetUnitTestImpl()->current_test_result()->HasFatalFailure();
+}
+
+// Returns true iff the current test has a non-fatal failure.
+bool Test::HasNonfatalFailure() {
+  return internal::GetUnitTestImpl()->current_test_result()->
+      HasNonfatalFailure();
+}
+
+// class TestInfo
+
+// Constructs a TestInfo object. It assumes ownership of the test factory
+// object.
+// TODO(vladl at google.com): Make a_test_case_name and a_name const string&'s
+// to signify they cannot be NULLs.
+TestInfo::TestInfo(const char* a_test_case_name,
+                   const char* a_name,
+                   const char* a_type_param,
+                   const char* a_value_param,
+                   internal::TypeId fixture_class_id,
+                   internal::TestFactoryBase* factory)
+    : test_case_name_(a_test_case_name),
+      name_(a_name),
+      type_param_(a_type_param ? new std::string(a_type_param) : NULL),
+      value_param_(a_value_param ? new std::string(a_value_param) : NULL),
+      fixture_class_id_(fixture_class_id),
+      should_run_(false),
+      is_disabled_(false),
+      matches_filter_(false),
+      factory_(factory),
+      result_() {}
+
+// Destructs a TestInfo object.
+TestInfo::~TestInfo() { delete factory_; }
+
+namespace internal {
+
+// Creates a new TestInfo object and registers it with Google Test;
+// returns the created object.
+//
+// Arguments:
+//
+//   test_case_name:   name of the test case
+//   name:             name of the test
+//   type_param:       the name of the test's type parameter, or NULL if
+//                     this is not a typed or a type-parameterized test.
+//   value_param:      text representation of the test's value parameter,
+//                     or NULL if this is not a value-parameterized test.
+//   fixture_class_id: ID of the test fixture class
+//   set_up_tc:        pointer to the function that sets up the test case
+//   tear_down_tc:     pointer to the function that tears down the test case
+//   factory:          pointer to the factory that creates a test object.
+//                     The newly created TestInfo instance will assume
+//                     ownership of the factory object.
+TestInfo* MakeAndRegisterTestInfo(
+    const char* test_case_name, const char* name,
+    const char* type_param,
+    const char* value_param,
+    TypeId fixture_class_id,
+    SetUpTestCaseFunc set_up_tc,
+    TearDownTestCaseFunc tear_down_tc,
+    TestFactoryBase* factory) {
+  TestInfo* const test_info =
+      new TestInfo(test_case_name, name, type_param, value_param,
+                   fixture_class_id, factory);
+  GetUnitTestImpl()->AddTestInfo(set_up_tc, tear_down_tc, test_info);
+  return test_info;
+}
+
+#if GTEST_HAS_PARAM_TEST
+void ReportInvalidTestCaseType(const char* test_case_name,
+                               const char* file, int line) {
+  Message errors;
+  errors
+      << "Attempted redefinition of test case " << test_case_name << ".\n"
+      << "All tests in the same test case must use the same test fixture\n"
+      << "class.  However, in test case " << test_case_name << ", you tried\n"
+      << "to define a test using a fixture class different from the one\n"
+      << "used earlier. This can happen if the two fixture classes are\n"
+      << "from different namespaces and have the same name. You should\n"
+      << "probably rename one of the classes to put the tests into different\n"
+      << "test cases.";
+
+  fprintf(stderr, "%s %s", FormatFileLocation(file, line).c_str(),
+          errors.GetString().c_str());
+}
+#endif  // GTEST_HAS_PARAM_TEST
+
+}  // namespace internal
+
+namespace {
+
+// A predicate that checks the test name of a TestInfo against a known
+// value.
+//
+// This is used for implementation of the TestCase class only.  We put
+// it in the anonymous namespace to prevent polluting the outer
+// namespace.
+//
+// TestNameIs is copyable.
+class TestNameIs {
+ public:
+  // Constructor.
+  //
+  // TestNameIs has NO default constructor.
+  explicit TestNameIs(const char* name)
+      : name_(name) {}
+
+  // Returns true iff the test name of test_info matches name_.
+  bool operator()(const TestInfo * test_info) const {
+    return test_info && internal::String(test_info->name()).Compare(name_) == 0;
+  }
+
+ private:
+  internal::String name_;
+};
+
+}  // namespace
+
+namespace internal {
+
+// This method expands all parameterized tests registered with macros TEST_P
+// and INSTANTIATE_TEST_CASE_P into regular tests and registers those.
+// This will be done just once during the program runtime.
+void UnitTestImpl::RegisterParameterizedTests() {
+#if GTEST_HAS_PARAM_TEST
+  if (!parameterized_tests_registered_) {
+    parameterized_test_registry_.RegisterTests();
+    parameterized_tests_registered_ = true;
+  }
+#endif
+}
+
+}  // namespace internal
+
+// Creates the test object, runs it, records its result, and then
+// deletes it.
+void TestInfo::Run() {
+  if (!should_run_) return;
+
+  // Tells UnitTest where to store test result.
+  internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+  impl->set_current_test_info(this);
+
+  TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();
+
+  // Notifies the unit test event listeners that a test is about to start.
+  repeater->OnTestStart(*this);
+
+  const TimeInMillis start = internal::GetTimeInMillis();
+
+  impl->os_stack_trace_getter()->UponLeavingGTest();
+
+  // Creates the test object.
+  Test* const test = internal::HandleExceptionsInMethodIfSupported(
+      factory_, &internal::TestFactoryBase::CreateTest,
+      "the test fixture's constructor");
+
+  // Runs the test only if the test object was created and its
+  // constructor didn't generate a fatal failure.
+  if ((test != NULL) && !Test::HasFatalFailure()) {
+    // This doesn't throw as all user code that can throw are wrapped into
+    // exception handling code.
+    test->Run();
+  }
+
+  // Deletes the test object.
+  impl->os_stack_trace_getter()->UponLeavingGTest();
+  internal::HandleExceptionsInMethodIfSupported(
+      test, &Test::DeleteSelf_, "the test fixture's destructor");
+
+  result_.set_elapsed_time(internal::GetTimeInMillis() - start);
+
+  // Notifies the unit test event listener that a test has just finished.
+  repeater->OnTestEnd(*this);
+
+  // Tells UnitTest to stop associating assertion results to this
+  // test.
+  impl->set_current_test_info(NULL);
+}
+
+// class TestCase
+
+// Gets the number of successful tests in this test case.
+int TestCase::successful_test_count() const {
+  return CountIf(test_info_list_, TestPassed);
+}
+
+// Gets the number of failed tests in this test case.
+int TestCase::failed_test_count() const {
+  return CountIf(test_info_list_, TestFailed);
+}
+
+int TestCase::disabled_test_count() const {
+  return CountIf(test_info_list_, TestDisabled);
+}
+
+// Get the number of tests in this test case that should run.
+int TestCase::test_to_run_count() const {
+  return CountIf(test_info_list_, ShouldRunTest);
+}
+
+// Gets the number of all tests.
+int TestCase::total_test_count() const {
+  return static_cast<int>(test_info_list_.size());
+}
+
+// Creates a TestCase with the given name.
+//
+// Arguments:
+//
+//   name:         name of the test case
+//   a_type_param: the name of the test case's type parameter, or NULL if
+//                 this is not a typed or a type-parameterized test case.
+//   set_up_tc:    pointer to the function that sets up the test case
+//   tear_down_tc: pointer to the function that tears down the test case
+TestCase::TestCase(const char* a_name, const char* a_type_param,
+                   Test::SetUpTestCaseFunc set_up_tc,
+                   Test::TearDownTestCaseFunc tear_down_tc)
+    : name_(a_name),
+      type_param_(a_type_param ? new std::string(a_type_param) : NULL),
+      set_up_tc_(set_up_tc),
+      tear_down_tc_(tear_down_tc),
+      should_run_(false),
+      elapsed_time_(0) {
+}
+
+// Destructor of TestCase.
+TestCase::~TestCase() {
+  // Deletes every Test in the collection.
+  ForEach(test_info_list_, internal::Delete<TestInfo>);
+}
+
+// Returns the i-th test among all the tests. i can range from 0 to
+// total_test_count() - 1. If i is not in that range, returns NULL.
+const TestInfo* TestCase::GetTestInfo(int i) const {
+  const int index = GetElementOr(test_indices_, i, -1);
+  return index < 0 ? NULL : test_info_list_[index];
+}
+
+// Returns the i-th test among all the tests. i can range from 0 to
+// total_test_count() - 1. If i is not in that range, returns NULL.
+TestInfo* TestCase::GetMutableTestInfo(int i) {
+  const int index = GetElementOr(test_indices_, i, -1);
+  return index < 0 ? NULL : test_info_list_[index];
+}
+
+// Adds a test to this test case.  Will delete the test upon
+// destruction of the TestCase object.
+void TestCase::AddTestInfo(TestInfo * test_info) {
+  test_info_list_.push_back(test_info);
+  test_indices_.push_back(static_cast<int>(test_indices_.size()));
+}
+
+// Runs every test in this TestCase.
+void TestCase::Run() {
+  if (!should_run_) return;
+
+  internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+  impl->set_current_test_case(this);
+
+  TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();
+
+  repeater->OnTestCaseStart(*this);
+  impl->os_stack_trace_getter()->UponLeavingGTest();
+  internal::HandleExceptionsInMethodIfSupported(
+      this, &TestCase::RunSetUpTestCase, "SetUpTestCase()");
+
+  const internal::TimeInMillis start = internal::GetTimeInMillis();
+  for (int i = 0; i < total_test_count(); i++) {
+    GetMutableTestInfo(i)->Run();
+  }
+  elapsed_time_ = internal::GetTimeInMillis() - start;
+
+  impl->os_stack_trace_getter()->UponLeavingGTest();
+  internal::HandleExceptionsInMethodIfSupported(
+      this, &TestCase::RunTearDownTestCase, "TearDownTestCase()");
+
+  repeater->OnTestCaseEnd(*this);
+  impl->set_current_test_case(NULL);
+}
+
+// Clears the results of all tests in this test case.
+void TestCase::ClearResult() {
+  ForEach(test_info_list_, TestInfo::ClearTestResult);
+}
+
+// Shuffles the tests in this test case.
+void TestCase::ShuffleTests(internal::Random* random) {
+  Shuffle(random, &test_indices_);
+}
+
+// Restores the test order to before the first shuffle.
+void TestCase::UnshuffleTests() {
+  for (size_t i = 0; i < test_indices_.size(); i++) {
+    test_indices_[i] = static_cast<int>(i);
+  }
+}
+
+// Formats a countable noun.  Depending on its quantity, either the
+// singular form or the plural form is used. e.g.
+//
+// FormatCountableNoun(1, "formula", "formuli") returns "1 formula".
+// FormatCountableNoun(5, "book", "books") returns "5 books".
+static internal::String FormatCountableNoun(int count,
+                                            const char * singular_form,
+                                            const char * plural_form) {
+  return internal::String::Format("%d %s", count,
+                                  count == 1 ? singular_form : plural_form);
+}
+
+// Formats the count of tests.
+static internal::String FormatTestCount(int test_count) {
+  return FormatCountableNoun(test_count, "test", "tests");
+}
+
+// Formats the count of test cases.
+static internal::String FormatTestCaseCount(int test_case_count) {
+  return FormatCountableNoun(test_case_count, "test case", "test cases");
+}
+
+// Converts a TestPartResult::Type enum to human-friendly string
+// representation.  Both kNonFatalFailure and kFatalFailure are translated
+// to "Failure", as the user usually doesn't care about the difference
+// between the two when viewing the test result.
+static const char * TestPartResultTypeToString(TestPartResult::Type type) {
+  switch (type) {
+    case TestPartResult::kSuccess:
+      return "Success";
+
+    case TestPartResult::kNonFatalFailure:
+    case TestPartResult::kFatalFailure:
+#ifdef _MSC_VER
+      return "error: ";
+#else
+      return "Failure\n";
+#endif
+    default:
+      return "Unknown result type";
+  }
+}
+
+// Prints a TestPartResult to a String.
+static internal::String PrintTestPartResultToString(
+    const TestPartResult& test_part_result) {
+  return (Message()
+          << internal::FormatFileLocation(test_part_result.file_name(),
+                                          test_part_result.line_number())
+          << " " << TestPartResultTypeToString(test_part_result.type())
+          << test_part_result.message()).GetString();
+}
+
+// Prints a TestPartResult.
+static void PrintTestPartResult(const TestPartResult& test_part_result) {
+  const internal::String& result =
+      PrintTestPartResultToString(test_part_result);
+  printf("%s\n", result.c_str());
+  fflush(stdout);
+  // If the test program runs in Visual Studio or a debugger, the
+  // following statements add the test part result message to the Output
+  // window such that the user can double-click on it to jump to the
+  // corresponding source code location; otherwise they do nothing.
+#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE
+  // We don't call OutputDebugString*() on Windows Mobile, as printing
+  // to stdout is done by OutputDebugString() there already - we don't
+  // want the same message printed twice.
+  ::OutputDebugStringA(result.c_str());
+  ::OutputDebugStringA("\n");
+#endif
+}
+
+// class PrettyUnitTestResultPrinter
+
+namespace internal {
+
+enum GTestColor {
+  COLOR_DEFAULT,
+  COLOR_RED,
+  COLOR_GREEN,
+  COLOR_YELLOW
+};
+
+#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE
+
+// Returns the character attribute for the given color.
+WORD GetColorAttribute(GTestColor color) {
+  switch (color) {
+    case COLOR_RED:    return FOREGROUND_RED;
+    case COLOR_GREEN:  return FOREGROUND_GREEN;
+    case COLOR_YELLOW: return FOREGROUND_RED | FOREGROUND_GREEN;
+    default:           return 0;
+  }
+}
+
+#else
+
+// Returns the ANSI color code for the given color.  COLOR_DEFAULT is
+// an invalid input.
+const char* GetAnsiColorCode(GTestColor color) {
+  switch (color) {
+    case COLOR_RED:     return "1";
+    case COLOR_GREEN:   return "2";
+    case COLOR_YELLOW:  return "3";
+    default:            return NULL;
+  };
+}
+
+#endif  // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE
+
+// Returns true iff Google Test should use colors in the output.
+bool ShouldUseColor(bool stdout_is_tty) {
+  const char* const gtest_color = GTEST_FLAG(color).c_str();
+
+  if (String::CaseInsensitiveCStringEquals(gtest_color, "auto")) {
+#if GTEST_OS_WINDOWS
+    // On Windows the TERM variable is usually not set, but the
+    // console there does support colors.
+    return stdout_is_tty;
+#else
+    // On non-Windows platforms, we rely on the TERM variable.
+    const char* const term = posix::GetEnv("TERM");
+    const bool term_supports_color =
+        String::CStringEquals(term, "xterm") ||
+        String::CStringEquals(term, "xterm-color") ||
+        String::CStringEquals(term, "xterm-256color") ||
+        String::CStringEquals(term, "screen") ||
+        String::CStringEquals(term, "linux") ||
+        String::CStringEquals(term, "cygwin");
+    return stdout_is_tty && term_supports_color;
+#endif  // GTEST_OS_WINDOWS
+  }
+
+  return String::CaseInsensitiveCStringEquals(gtest_color, "yes") ||
+      String::CaseInsensitiveCStringEquals(gtest_color, "true") ||
+      String::CaseInsensitiveCStringEquals(gtest_color, "t") ||
+      String::CStringEquals(gtest_color, "1");
+  // We take "yes", "true", "t", and "1" as meaning "yes".  If the
+  // value is neither one of these nor "auto", we treat it as "no" to
+  // be conservative.
+}
+
+// Helpers for printing colored strings to stdout. Note that on Windows, we
+// cannot simply emit special characters and have the terminal change colors.
+// This routine must actually emit the characters rather than return a string
+// that would be colored when printed, as can be done on Linux.
+void ColoredPrintf(GTestColor color, const char* fmt, ...) {
+  va_list args;
+  va_start(args, fmt);
+
+#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_SYMBIAN || GTEST_OS_ZOS
+  const bool use_color = false;
+#else
+  static const bool in_color_mode =
+      ShouldUseColor(posix::IsATTY(posix::FileNo(stdout)) != 0);
+  const bool use_color = in_color_mode && (color != COLOR_DEFAULT);
+#endif  // GTEST_OS_WINDOWS_MOBILE || GTEST_OS_SYMBIAN || GTEST_OS_ZOS
+  // The '!= 0' comparison is necessary to satisfy MSVC 7.1.
+
+  if (!use_color) {
+    vprintf(fmt, args);
+    va_end(args);
+    return;
+  }
+
+#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE
+  const HANDLE stdout_handle = GetStdHandle(STD_OUTPUT_HANDLE);
+
+  // Gets the current text color.
+  CONSOLE_SCREEN_BUFFER_INFO buffer_info;
+  GetConsoleScreenBufferInfo(stdout_handle, &buffer_info);
+  const WORD old_color_attrs = buffer_info.wAttributes;
+
+  // We need to flush the stream buffers into the console before each
+  // SetConsoleTextAttribute call lest it affect the text that is already
+  // printed but has not yet reached the console.
+  fflush(stdout);
+  SetConsoleTextAttribute(stdout_handle,
+                          GetColorAttribute(color) | FOREGROUND_INTENSITY);
+  vprintf(fmt, args);
+
+  fflush(stdout);
+  // Restores the text color.
+  SetConsoleTextAttribute(stdout_handle, old_color_attrs);
+#else
+  printf("\033[0;3%sm", GetAnsiColorCode(color));
+  vprintf(fmt, args);
+  printf("\033[m");  // Resets the terminal to default.
+#endif  // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE
+  va_end(args);
+}
+
+void PrintFullTestCommentIfPresent(const TestInfo& test_info) {
+  const char* const type_param = test_info.type_param();
+  const char* const value_param = test_info.value_param();
+
+  if (type_param != NULL || value_param != NULL) {
+    printf(", where ");
+    if (type_param != NULL) {
+      printf("TypeParam = %s", type_param);
+      if (value_param != NULL)
+        printf(" and ");
+    }
+    if (value_param != NULL) {
+      printf("GetParam() = %s", value_param);
+    }
+  }
+}
+
+// This class implements the TestEventListener interface.
+//
+// Class PrettyUnitTestResultPrinter is copyable.
+class PrettyUnitTestResultPrinter : public TestEventListener {
+ public:
+  PrettyUnitTestResultPrinter() {}
+  static void PrintTestName(const char * test_case, const char * test) {
+    printf("%s.%s", test_case, test);
+  }
+
+  // The following methods override what's in the TestEventListener class.
+  virtual void OnTestProgramStart(const UnitTest& /*unit_test*/) {}
+  virtual void OnTestIterationStart(const UnitTest& unit_test, int iteration);
+  virtual void OnEnvironmentsSetUpStart(const UnitTest& unit_test);
+  virtual void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) {}
+  virtual void OnTestCaseStart(const TestCase& test_case);
+  virtual void OnTestStart(const TestInfo& test_info);
+  virtual void OnTestPartResult(const TestPartResult& result);
+  virtual void OnTestEnd(const TestInfo& test_info);
+  virtual void OnTestCaseEnd(const TestCase& test_case);
+  virtual void OnEnvironmentsTearDownStart(const UnitTest& unit_test);
+  virtual void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) {}
+  virtual void OnTestIterationEnd(const UnitTest& unit_test, int iteration);
+  virtual void OnTestProgramEnd(const UnitTest& /*unit_test*/) {}
+
+ private:
+  static void PrintFailedTests(const UnitTest& unit_test);
+
+  internal::String test_case_name_;
+};
+
+  // Fired before each iteration of tests starts.
+void PrettyUnitTestResultPrinter::OnTestIterationStart(
+    const UnitTest& unit_test, int iteration) {
+  if (GTEST_FLAG(repeat) != 1)
+    printf("\nRepeating all tests (iteration %d) . . .\n\n", iteration + 1);
+
+  const char* const filter = GTEST_FLAG(filter).c_str();
+
+  // Prints the filter if it's not *.  This reminds the user that some
+  // tests may be skipped.
+  if (!internal::String::CStringEquals(filter, kUniversalFilter)) {
+    ColoredPrintf(COLOR_YELLOW,
+                  "Note: %s filter = %s\n", GTEST_NAME_, filter);
+  }
+
+  if (internal::ShouldShard(kTestTotalShards, kTestShardIndex, false)) {
+    const Int32 shard_index = Int32FromEnvOrDie(kTestShardIndex, -1);
+    ColoredPrintf(COLOR_YELLOW,
+                  "Note: This is test shard %d of %s.\n",
+                  static_cast<int>(shard_index) + 1,
+                  internal::posix::GetEnv(kTestTotalShards));
+  }
+
+  if (GTEST_FLAG(shuffle)) {
+    ColoredPrintf(COLOR_YELLOW,
+                  "Note: Randomizing tests' orders with a seed of %d .\n",
+                  unit_test.random_seed());
+  }
+
+  ColoredPrintf(COLOR_GREEN,  "[==========] ");
+  printf("Running %s from %s.\n",
+         FormatTestCount(unit_test.test_to_run_count()).c_str(),
+         FormatTestCaseCount(unit_test.test_case_to_run_count()).c_str());
+  fflush(stdout);
+}
+
+void PrettyUnitTestResultPrinter::OnEnvironmentsSetUpStart(
+    const UnitTest& /*unit_test*/) {
+  ColoredPrintf(COLOR_GREEN,  "[----------] ");
+  printf("Global test environment set-up.\n");
+  fflush(stdout);
+}
+
+void PrettyUnitTestResultPrinter::OnTestCaseStart(const TestCase& test_case) {
+  test_case_name_ = test_case.name();
+  const internal::String counts =
+      FormatCountableNoun(test_case.test_to_run_count(), "test", "tests");
+  ColoredPrintf(COLOR_GREEN, "[----------] ");
+  printf("%s from %s", counts.c_str(), test_case_name_.c_str());
+  if (test_case.type_param() == NULL) {
+    printf("\n");
+  } else {
+    printf(", where TypeParam = %s\n", test_case.type_param());
+  }
+  fflush(stdout);
+}
+
+void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) {
+  ColoredPrintf(COLOR_GREEN,  "[ RUN      ] ");
+  PrintTestName(test_case_name_.c_str(), test_info.name());
+  printf("\n");
+  fflush(stdout);
+}
+
+// Called after an assertion failure.
+void PrettyUnitTestResultPrinter::OnTestPartResult(
+    const TestPartResult& result) {
+  // If the test part succeeded, we don't need to do anything.
+  if (result.type() == TestPartResult::kSuccess)
+    return;
+
+  // Print failure message from the assertion (e.g. expected this and got that).
+  PrintTestPartResult(result);
+  fflush(stdout);
+}
+
+void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) {
+  if (test_info.result()->Passed()) {
+    ColoredPrintf(COLOR_GREEN, "[       OK ] ");
+  } else {
+    ColoredPrintf(COLOR_RED, "[  FAILED  ] ");
+  }
+  PrintTestName(test_case_name_.c_str(), test_info.name());
+  if (test_info.result()->Failed())
+    PrintFullTestCommentIfPresent(test_info);
+
+  if (GTEST_FLAG(print_time)) {
+    printf(" (%s ms)\n", internal::StreamableToString(
+           test_info.result()->elapsed_time()).c_str());
+  } else {
+    printf("\n");
+  }
+  fflush(stdout);
+}
+
+void PrettyUnitTestResultPrinter::OnTestCaseEnd(const TestCase& test_case) {
+  if (!GTEST_FLAG(print_time)) return;
+
+  test_case_name_ = test_case.name();
+  const internal::String counts =
+      FormatCountableNoun(test_case.test_to_run_count(), "test", "tests");
+  ColoredPrintf(COLOR_GREEN, "[----------] ");
+  printf("%s from %s (%s ms total)\n\n",
+         counts.c_str(), test_case_name_.c_str(),
+         internal::StreamableToString(test_case.elapsed_time()).c_str());
+  fflush(stdout);
+}
+
+void PrettyUnitTestResultPrinter::OnEnvironmentsTearDownStart(
+    const UnitTest& /*unit_test*/) {
+  ColoredPrintf(COLOR_GREEN,  "[----------] ");
+  printf("Global test environment tear-down\n");
+  fflush(stdout);
+}
+
+// Internal helper for printing the list of failed tests.
+void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) {
+  const int failed_test_count = unit_test.failed_test_count();
+  if (failed_test_count == 0) {
+    return;
+  }
+
+  for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
+    const TestCase& test_case = *unit_test.GetTestCase(i);
+    if (!test_case.should_run() || (test_case.failed_test_count() == 0)) {
+      continue;
+    }
+    for (int j = 0; j < test_case.total_test_count(); ++j) {
+      const TestInfo& test_info = *test_case.GetTestInfo(j);
+      if (!test_info.should_run() || test_info.result()->Passed()) {
+        continue;
+      }
+      ColoredPrintf(COLOR_RED, "[  FAILED  ] ");
+      printf("%s.%s", test_case.name(), test_info.name());
+      PrintFullTestCommentIfPresent(test_info);
+      printf("\n");
+    }
+  }
+}
+
+void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
+                                                     int /*iteration*/) {
+  ColoredPrintf(COLOR_GREEN,  "[==========] ");
+  printf("%s from %s ran.",
+         FormatTestCount(unit_test.test_to_run_count()).c_str(),
+         FormatTestCaseCount(unit_test.test_case_to_run_count()).c_str());
+  if (GTEST_FLAG(print_time)) {
+    printf(" (%s ms total)",
+           internal::StreamableToString(unit_test.elapsed_time()).c_str());
+  }
+  printf("\n");
+  ColoredPrintf(COLOR_GREEN,  "[  PASSED  ] ");
+  printf("%s.\n", FormatTestCount(unit_test.successful_test_count()).c_str());
+
+  int num_failures = unit_test.failed_test_count();
+  if (!unit_test.Passed()) {
+    const int failed_test_count = unit_test.failed_test_count();
+    ColoredPrintf(COLOR_RED,  "[  FAILED  ] ");
+    printf("%s, listed below:\n", FormatTestCount(failed_test_count).c_str());
+    PrintFailedTests(unit_test);
+    printf("\n%2d FAILED %s\n", num_failures,
+                        num_failures == 1 ? "TEST" : "TESTS");
+  }
+
+  int num_disabled = unit_test.disabled_test_count();
+  if (num_disabled && !GTEST_FLAG(also_run_disabled_tests)) {
+    if (!num_failures) {
+      printf("\n");  // Add a spacer if no FAILURE banner is displayed.
+    }
+    ColoredPrintf(COLOR_YELLOW,
+                  "  YOU HAVE %d DISABLED %s\n\n",
+                  num_disabled,
+                  num_disabled == 1 ? "TEST" : "TESTS");
+  }
+  // Ensure that Google Test output is printed before, e.g., heapchecker output.
+  fflush(stdout);
+}
+
+// End PrettyUnitTestResultPrinter
+
+// class TestEventRepeater
+//
+// This class forwards events to other event listeners.
+class TestEventRepeater : public TestEventListener {
+ public:
+  TestEventRepeater() : forwarding_enabled_(true) {}
+  virtual ~TestEventRepeater();
+  void Append(TestEventListener *listener);
+  TestEventListener* Release(TestEventListener* listener);
+
+  // Controls whether events will be forwarded to listeners_. Set to false
+  // in death test child processes.
+  bool forwarding_enabled() const { return forwarding_enabled_; }
+  void set_forwarding_enabled(bool enable) { forwarding_enabled_ = enable; }
+
+  virtual void OnTestProgramStart(const UnitTest& unit_test);
+  virtual void OnTestIterationStart(const UnitTest& unit_test, int iteration);
+  virtual void OnEnvironmentsSetUpStart(const UnitTest& unit_test);
+  virtual void OnEnvironmentsSetUpEnd(const UnitTest& unit_test);
+  virtual void OnTestCaseStart(const TestCase& test_case);
+  virtual void OnTestStart(const TestInfo& test_info);
+  virtual void OnTestPartResult(const TestPartResult& result);
+  virtual void OnTestEnd(const TestInfo& test_info);
+  virtual void OnTestCaseEnd(const TestCase& test_case);
+  virtual void OnEnvironmentsTearDownStart(const UnitTest& unit_test);
+  virtual void OnEnvironmentsTearDownEnd(const UnitTest& unit_test);
+  virtual void OnTestIterationEnd(const UnitTest& unit_test, int iteration);
+  virtual void OnTestProgramEnd(const UnitTest& unit_test);
+
+ private:
+  // Controls whether events will be forwarded to listeners_. Set to false
+  // in death test child processes.
+  bool forwarding_enabled_;
+  // The list of listeners that receive events.
+  std::vector<TestEventListener*> listeners_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestEventRepeater);
+};
+
+TestEventRepeater::~TestEventRepeater() {
+  ForEach(listeners_, Delete<TestEventListener>);
+}
+
+void TestEventRepeater::Append(TestEventListener *listener) {
+  listeners_.push_back(listener);
+}
+
+// TODO(vladl at google.com): Factor the search functionality into Vector::Find.
+TestEventListener* TestEventRepeater::Release(TestEventListener *listener) {
+  for (size_t i = 0; i < listeners_.size(); ++i) {
+    if (listeners_[i] == listener) {
+      listeners_.erase(listeners_.begin() + i);
+      return listener;
+    }
+  }
+
+  return NULL;
+}
+
+// Since most methods are very similar, use macros to reduce boilerplate.
+// This defines a member that forwards the call to all listeners.
+#define GTEST_REPEATER_METHOD_(Name, Type) \
+void TestEventRepeater::Name(const Type& parameter) { \
+  if (forwarding_enabled_) { \
+    for (size_t i = 0; i < listeners_.size(); i++) { \
+      listeners_[i]->Name(parameter); \
+    } \
+  } \
+}
+// This defines a member that forwards the call to all listeners in reverse
+// order.
+#define GTEST_REVERSE_REPEATER_METHOD_(Name, Type) \
+void TestEventRepeater::Name(const Type& parameter) { \
+  if (forwarding_enabled_) { \
+    for (int i = static_cast<int>(listeners_.size()) - 1; i >= 0; i--) { \
+      listeners_[i]->Name(parameter); \
+    } \
+  } \
+}
+
+GTEST_REPEATER_METHOD_(OnTestProgramStart, UnitTest)
+GTEST_REPEATER_METHOD_(OnEnvironmentsSetUpStart, UnitTest)
+GTEST_REPEATER_METHOD_(OnTestCaseStart, TestCase)
+GTEST_REPEATER_METHOD_(OnTestStart, TestInfo)
+GTEST_REPEATER_METHOD_(OnTestPartResult, TestPartResult)
+GTEST_REPEATER_METHOD_(OnEnvironmentsTearDownStart, UnitTest)
+GTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsSetUpEnd, UnitTest)
+GTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsTearDownEnd, UnitTest)
+GTEST_REVERSE_REPEATER_METHOD_(OnTestEnd, TestInfo)
+GTEST_REVERSE_REPEATER_METHOD_(OnTestCaseEnd, TestCase)
+GTEST_REVERSE_REPEATER_METHOD_(OnTestProgramEnd, UnitTest)
+
+#undef GTEST_REPEATER_METHOD_
+#undef GTEST_REVERSE_REPEATER_METHOD_
+
+void TestEventRepeater::OnTestIterationStart(const UnitTest& unit_test,
+                                             int iteration) {
+  if (forwarding_enabled_) {
+    for (size_t i = 0; i < listeners_.size(); i++) {
+      listeners_[i]->OnTestIterationStart(unit_test, iteration);
+    }
+  }
+}
+
+void TestEventRepeater::OnTestIterationEnd(const UnitTest& unit_test,
+                                           int iteration) {
+  if (forwarding_enabled_) {
+    for (int i = static_cast<int>(listeners_.size()) - 1; i >= 0; i--) {
+      listeners_[i]->OnTestIterationEnd(unit_test, iteration);
+    }
+  }
+}
+
+// End TestEventRepeater
+
+// This class generates an XML output file.
+class XmlUnitTestResultPrinter : public EmptyTestEventListener {
+ public:
+  explicit XmlUnitTestResultPrinter(const char* output_file);
+
+  virtual void OnTestIterationEnd(const UnitTest& unit_test, int iteration);
+
+ private:
+  // Is c a whitespace character that is normalized to a space character
+  // when it appears in an XML attribute value?
+  static bool IsNormalizableWhitespace(char c) {
+    return c == 0x9 || c == 0xA || c == 0xD;
+  }
+
+  // May c appear in a well-formed XML document?
+  static bool IsValidXmlCharacter(char c) {
+    return IsNormalizableWhitespace(c) || c >= 0x20;
+  }
+
+  // Returns an XML-escaped copy of the input string str.  If
+  // is_attribute is true, the text is meant to appear as an attribute
+  // value, and normalizable whitespace is preserved by replacing it
+  // with character references.
+  static String EscapeXml(const char* str, bool is_attribute);
+
+  // Returns the given string with all characters invalid in XML removed.
+  static string RemoveInvalidXmlCharacters(const string& str);
+
+  // Convenience wrapper around EscapeXml when str is an attribute value.
+  static String EscapeXmlAttribute(const char* str) {
+    return EscapeXml(str, true);
+  }
+
+  // Convenience wrapper around EscapeXml when str is not an attribute value.
+  static String EscapeXmlText(const char* str) { return EscapeXml(str, false); }
+
+  // Streams an XML CDATA section, escaping invalid CDATA sequences as needed.
+  static void OutputXmlCDataSection(::std::ostream* stream, const char* data);
+
+  // Streams an XML representation of a TestInfo object.
+  static void OutputXmlTestInfo(::std::ostream* stream,
+                                const char* test_case_name,
+                                const TestInfo& test_info);
+
+  // Prints an XML representation of a TestCase object
+  static void PrintXmlTestCase(FILE* out, const TestCase& test_case);
+
+  // Prints an XML summary of unit_test to output stream out.
+  static void PrintXmlUnitTest(FILE* out, const UnitTest& unit_test);
+
+  // Produces a string representing the test properties in a result as space
+  // delimited XML attributes based on the property key="value" pairs.
+  // When the String is not empty, it includes a space at the beginning,
+  // to delimit this attribute from prior attributes.
+  static String TestPropertiesAsXmlAttributes(const TestResult& result);
+
+  // The output file.
+  const String output_file_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(XmlUnitTestResultPrinter);
+};
+
+// Creates a new XmlUnitTestResultPrinter.
+XmlUnitTestResultPrinter::XmlUnitTestResultPrinter(const char* output_file)
+    : output_file_(output_file) {
+  if (output_file_.c_str() == NULL || output_file_.empty()) {
+    fprintf(stderr, "XML output file may not be null\n");
+    fflush(stderr);
+    exit(EXIT_FAILURE);
+  }
+}
+
+// Called after the unit test ends.
+void XmlUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
+                                                  int /*iteration*/) {
+  FILE* xmlout = NULL;
+  FilePath output_file(output_file_);
+  FilePath output_dir(output_file.RemoveFileName());
+
+  if (output_dir.CreateDirectoriesRecursively()) {
+    xmlout = posix::FOpen(output_file_.c_str(), "w");
+  }
+  if (xmlout == NULL) {
+    // TODO(wan): report the reason of the failure.
+    //
+    // We don't do it for now as:
+    //
+    //   1. There is no urgent need for it.
+    //   2. It's a bit involved to make the errno variable thread-safe on
+    //      all three operating systems (Linux, Windows, and Mac OS).
+    //   3. To interpret the meaning of errno in a thread-safe way,
+    //      we need the strerror_r() function, which is not available on
+    //      Windows.
+    fprintf(stderr,
+            "Unable to open file \"%s\"\n",
+            output_file_.c_str());
+    fflush(stderr);
+    exit(EXIT_FAILURE);
+  }
+  PrintXmlUnitTest(xmlout, unit_test);
+  fclose(xmlout);
+}
+
+// Returns an XML-escaped copy of the input string str.  If is_attribute
+// is true, the text is meant to appear as an attribute value, and
+// normalizable whitespace is preserved by replacing it with character
+// references.
+//
+// Invalid XML characters in str, if any, are stripped from the output.
+// It is expected that most, if not all, of the text processed by this
+// module will consist of ordinary English text.
+// If this module is ever modified to produce version 1.1 XML output,
+// most invalid characters can be retained using character references.
+// TODO(wan): It might be nice to have a minimally invasive, human-readable
+// escaping scheme for invalid characters, rather than dropping them.
+String XmlUnitTestResultPrinter::EscapeXml(const char* str, bool is_attribute) {
+  Message m;
+
+  if (str != NULL) {
+    for (const char* src = str; *src; ++src) {
+      switch (*src) {
+        case '<':
+          m << "<";
+          break;
+        case '>':
+          m << ">";
+          break;
+        case '&':
+          m << "&";
+          break;
+        case '\'':
+          if (is_attribute)
+            m << "'";
+          else
+            m << '\'';
+          break;
+        case '"':
+          if (is_attribute)
+            m << """;
+          else
+            m << '"';
+          break;
+        default:
+          if (IsValidXmlCharacter(*src)) {
+            if (is_attribute && IsNormalizableWhitespace(*src))
+              m << String::Format("&#x%02X;", unsigned(*src));
+            else
+              m << *src;
+          }
+          break;
+      }
+    }
+  }
+
+  return m.GetString();
+}
+
+// Returns the given string with all characters invalid in XML removed.
+// Currently invalid characters are dropped from the string. An
+// alternative is to replace them with certain characters such as . or ?.
+string XmlUnitTestResultPrinter::RemoveInvalidXmlCharacters(const string& str) {
+  string output;
+  output.reserve(str.size());
+  for (string::const_iterator it = str.begin(); it != str.end(); ++it)
+    if (IsValidXmlCharacter(*it))
+      output.push_back(*it);
+
+  return output;
+}
+
+// The following routines generate an XML representation of a UnitTest
+// object.
+//
+// This is how Google Test concepts map to the DTD:
+//
+// <testsuites name="AllTests">        <-- corresponds to a UnitTest object
+//   <testsuite name="testcase-name">  <-- corresponds to a TestCase object
+//     <testcase name="test-name">     <-- corresponds to a TestInfo object
+//       <failure message="...">...</failure>
+//       <failure message="...">...</failure>
+//       <failure message="...">...</failure>
+//                                     <-- individual assertion failures
+//     </testcase>
+//   </testsuite>
+// </testsuites>
+
+// Formats the given time in milliseconds as seconds.
+std::string FormatTimeInMillisAsSeconds(TimeInMillis ms) {
+  ::std::stringstream ss;
+  ss << ms/1000.0;
+  return ss.str();
+}
+
+// Streams an XML CDATA section, escaping invalid CDATA sequences as needed.
+void XmlUnitTestResultPrinter::OutputXmlCDataSection(::std::ostream* stream,
+                                                     const char* data) {
+  const char* segment = data;
+  *stream << "<![CDATA[";
+  for (;;) {
+    const char* const next_segment = strstr(segment, "]]>");
+    if (next_segment != NULL) {
+      stream->write(
+          segment, static_cast<std::streamsize>(next_segment - segment));
+      *stream << "]]>]]><![CDATA[";
+      segment = next_segment + strlen("]]>");
+    } else {
+      *stream << segment;
+      break;
+    }
+  }
+  *stream << "]]>";
+}
+
+// Prints an XML representation of a TestInfo object.
+// TODO(wan): There is also value in printing properties with the plain printer.
+void XmlUnitTestResultPrinter::OutputXmlTestInfo(::std::ostream* stream,
+                                                 const char* test_case_name,
+                                                 const TestInfo& test_info) {
+  const TestResult& result = *test_info.result();
+  *stream << "    <testcase name=\""
+          << EscapeXmlAttribute(test_info.name()).c_str() << "\"";
+
+  if (test_info.value_param() != NULL) {
+    *stream << " value_param=\"" << EscapeXmlAttribute(test_info.value_param())
+            << "\"";
+  }
+  if (test_info.type_param() != NULL) {
+    *stream << " type_param=\"" << EscapeXmlAttribute(test_info.type_param())
+            << "\"";
+  }
+
+  *stream << " status=\""
+          << (test_info.should_run() ? "run" : "notrun")
+          << "\" time=\""
+          << FormatTimeInMillisAsSeconds(result.elapsed_time())
+          << "\" classname=\"" << EscapeXmlAttribute(test_case_name).c_str()
+          << "\"" << TestPropertiesAsXmlAttributes(result).c_str();
+
+  int failures = 0;
+  for (int i = 0; i < result.total_part_count(); ++i) {
+    const TestPartResult& part = result.GetTestPartResult(i);
+    if (part.failed()) {
+      if (++failures == 1)
+        *stream << ">\n";
+      *stream << "      <failure message=\""
+              << EscapeXmlAttribute(part.summary()).c_str()
+              << "\" type=\"\">";
+      const string location = internal::FormatCompilerIndependentFileLocation(
+          part.file_name(), part.line_number());
+      const string message = location + "\n" + part.message();
+      OutputXmlCDataSection(stream,
+                            RemoveInvalidXmlCharacters(message).c_str());
+      *stream << "</failure>\n";
+    }
+  }
+
+  if (failures == 0)
+    *stream << " />\n";
+  else
+    *stream << "    </testcase>\n";
+}
+
+// Prints an XML representation of a TestCase object
+void XmlUnitTestResultPrinter::PrintXmlTestCase(FILE* out,
+                                                const TestCase& test_case) {
+  fprintf(out,
+          "  <testsuite name=\"%s\" tests=\"%d\" failures=\"%d\" "
+          "disabled=\"%d\" ",
+          EscapeXmlAttribute(test_case.name()).c_str(),
+          test_case.total_test_count(),
+          test_case.failed_test_count(),
+          test_case.disabled_test_count());
+  fprintf(out,
+          "errors=\"0\" time=\"%s\">\n",
+          FormatTimeInMillisAsSeconds(test_case.elapsed_time()).c_str());
+  for (int i = 0; i < test_case.total_test_count(); ++i) {
+    ::std::stringstream stream;
+    OutputXmlTestInfo(&stream, test_case.name(), *test_case.GetTestInfo(i));
+    fprintf(out, "%s", StringStreamToString(&stream).c_str());
+  }
+  fprintf(out, "  </testsuite>\n");
+}
+
+// Prints an XML summary of unit_test to output stream out.
+void XmlUnitTestResultPrinter::PrintXmlUnitTest(FILE* out,
+                                                const UnitTest& unit_test) {
+  fprintf(out, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
+  fprintf(out,
+          "<testsuites tests=\"%d\" failures=\"%d\" disabled=\"%d\" "
+          "errors=\"0\" time=\"%s\" ",
+          unit_test.total_test_count(),
+          unit_test.failed_test_count(),
+          unit_test.disabled_test_count(),
+          FormatTimeInMillisAsSeconds(unit_test.elapsed_time()).c_str());
+  if (GTEST_FLAG(shuffle)) {
+    fprintf(out, "random_seed=\"%d\" ", unit_test.random_seed());
+  }
+  fprintf(out, "name=\"AllTests\">\n");
+  for (int i = 0; i < unit_test.total_test_case_count(); ++i)
+    PrintXmlTestCase(out, *unit_test.GetTestCase(i));
+  fprintf(out, "</testsuites>\n");
+}
+
+// Produces a string representing the test properties in a result as space
+// delimited XML attributes based on the property key="value" pairs.
+String XmlUnitTestResultPrinter::TestPropertiesAsXmlAttributes(
+    const TestResult& result) {
+  Message attributes;
+  for (int i = 0; i < result.test_property_count(); ++i) {
+    const TestProperty& property = result.GetTestProperty(i);
+    attributes << " " << property.key() << "="
+        << "\"" << EscapeXmlAttribute(property.value()) << "\"";
+  }
+  return attributes.GetString();
+}
+
+// End XmlUnitTestResultPrinter
+
+#if GTEST_CAN_STREAM_RESULTS_
+
+// Streams test results to the given port on the given host machine.
+class StreamingListener : public EmptyTestEventListener {
+ public:
+  // Escapes '=', '&', '%', and '\n' characters in str as "%xx".
+  static string UrlEncode(const char* str);
+
+  StreamingListener(const string& host, const string& port)
+      : sockfd_(-1), host_name_(host), port_num_(port) {
+    MakeConnection();
+    Send("gtest_streaming_protocol_version=1.0\n");
+  }
+
+  virtual ~StreamingListener() {
+    if (sockfd_ != -1)
+      CloseConnection();
+  }
+
+  void OnTestProgramStart(const UnitTest& /* unit_test */) {
+    Send("event=TestProgramStart\n");
+  }
+
+  void OnTestProgramEnd(const UnitTest& unit_test) {
+    // Note that Google Test current only report elapsed time for each
+    // test iteration, not for the entire test program.
+    Send(String::Format("event=TestProgramEnd&passed=%d\n",
+                        unit_test.Passed()));
+
+    // Notify the streaming server to stop.
+    CloseConnection();
+  }
+
+  void OnTestIterationStart(const UnitTest& /* unit_test */, int iteration) {
+    Send(String::Format("event=TestIterationStart&iteration=%d\n",
+                        iteration));
+  }
+
+  void OnTestIterationEnd(const UnitTest& unit_test, int /* iteration */) {
+    Send(String::Format("event=TestIterationEnd&passed=%d&elapsed_time=%sms\n",
+                        unit_test.Passed(),
+                        StreamableToString(unit_test.elapsed_time()).c_str()));
+  }
+
+  void OnTestCaseStart(const TestCase& test_case) {
+    Send(String::Format("event=TestCaseStart&name=%s\n", test_case.name()));
+  }
+
+  void OnTestCaseEnd(const TestCase& test_case) {
+    Send(String::Format("event=TestCaseEnd&passed=%d&elapsed_time=%sms\n",
+                        test_case.Passed(),
+                        StreamableToString(test_case.elapsed_time()).c_str()));
+  }
+
+  void OnTestStart(const TestInfo& test_info) {
+    Send(String::Format("event=TestStart&name=%s\n", test_info.name()));
+  }
+
+  void OnTestEnd(const TestInfo& test_info) {
+    Send(String::Format(
+        "event=TestEnd&passed=%d&elapsed_time=%sms\n",
+        (test_info.result())->Passed(),
+        StreamableToString((test_info.result())->elapsed_time()).c_str()));
+  }
+
+  void OnTestPartResult(const TestPartResult& test_part_result) {
+    const char* file_name = test_part_result.file_name();
+    if (file_name == NULL)
+      file_name = "";
+    Send(String::Format("event=TestPartResult&file=%s&line=%d&message=",
+                        UrlEncode(file_name).c_str(),
+                        test_part_result.line_number()));
+    Send(UrlEncode(test_part_result.message()) + "\n");
+  }
+
+ private:
+  // Creates a client socket and connects to the server.
+  void MakeConnection();
+
+  // Closes the socket.
+  void CloseConnection() {
+    GTEST_CHECK_(sockfd_ != -1)
+        << "CloseConnection() can be called only when there is a connection.";
+
+    close(sockfd_);
+    sockfd_ = -1;
+  }
+
+  // Sends a string to the socket.
+  void Send(const string& message) {
+    GTEST_CHECK_(sockfd_ != -1)
+        << "Send() can be called only when there is a connection.";
+
+    const int len = static_cast<int>(message.length());
+    if (write(sockfd_, message.c_str(), len) != len) {
+      GTEST_LOG_(WARNING)
+          << "stream_result_to: failed to stream to "
+          << host_name_ << ":" << port_num_;
+    }
+  }
+
+  int sockfd_;   // socket file descriptor
+  const string host_name_;
+  const string port_num_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(StreamingListener);
+};  // class StreamingListener
+
+// Checks if str contains '=', '&', '%' or '\n' characters. If yes,
+// replaces them by "%xx" where xx is their hexadecimal value. For
+// example, replaces "=" with "%3D".  This algorithm is O(strlen(str))
+// in both time and space -- important as the input str may contain an
+// arbitrarily long test failure message and stack trace.
+string StreamingListener::UrlEncode(const char* str) {
+  string result;
+  result.reserve(strlen(str) + 1);
+  for (char ch = *str; ch != '\0'; ch = *++str) {
+    switch (ch) {
+      case '%':
+      case '=':
+      case '&':
+      case '\n':
+        result.append(String::Format("%%%02x", static_cast<unsigned char>(ch)));
+        break;
+      default:
+        result.push_back(ch);
+        break;
+    }
+  }
+  return result;
+}
+
+void StreamingListener::MakeConnection() {
+  GTEST_CHECK_(sockfd_ == -1)
+      << "MakeConnection() can't be called when there is already a connection.";
+
+  addrinfo hints;
+  memset(&hints, 0, sizeof(hints));
+  hints.ai_family = AF_UNSPEC;    // To allow both IPv4 and IPv6 addresses.
+  hints.ai_socktype = SOCK_STREAM;
+  addrinfo* servinfo = NULL;
+
+  // Use the getaddrinfo() to get a linked list of IP addresses for
+  // the given host name.
+  const int error_num = getaddrinfo(
+      host_name_.c_str(), port_num_.c_str(), &hints, &servinfo);
+  if (error_num != 0) {
+    GTEST_LOG_(WARNING) << "stream_result_to: getaddrinfo() failed: "
+                        << gai_strerror(error_num);
+  }
+
+  // Loop through all the results and connect to the first we can.
+  for (addrinfo* cur_addr = servinfo; sockfd_ == -1 && cur_addr != NULL;
+       cur_addr = cur_addr->ai_next) {
+    sockfd_ = socket(
+        cur_addr->ai_family, cur_addr->ai_socktype, cur_addr->ai_protocol);
+    if (sockfd_ != -1) {
+      // Connect the client socket to the server socket.
+      if (connect(sockfd_, cur_addr->ai_addr, cur_addr->ai_addrlen) == -1) {
+        close(sockfd_);
+        sockfd_ = -1;
+      }
+    }
+  }
+
+  freeaddrinfo(servinfo);  // all done with this structure
+
+  if (sockfd_ == -1) {
+    GTEST_LOG_(WARNING) << "stream_result_to: failed to connect to "
+                        << host_name_ << ":" << port_num_;
+  }
+}
+
+// End of class Streaming Listener
+#endif  // GTEST_CAN_STREAM_RESULTS__
+
+// Class ScopedTrace
+
+// Pushes the given source file location and message onto a per-thread
+// trace stack maintained by Google Test.
+// L < UnitTest::mutex_
+ScopedTrace::ScopedTrace(const char* file, int line, const Message& message) {
+  TraceInfo trace;
+  trace.file = file;
+  trace.line = line;
+  trace.message = message.GetString();
+
+  UnitTest::GetInstance()->PushGTestTrace(trace);
+}
+
+// Pops the info pushed by the c'tor.
+// L < UnitTest::mutex_
+ScopedTrace::~ScopedTrace() {
+  UnitTest::GetInstance()->PopGTestTrace();
+}
+
+
+// class OsStackTraceGetter
+
+// Returns the current OS stack trace as a String.  Parameters:
+//
+//   max_depth  - the maximum number of stack frames to be included
+//                in the trace.
+//   skip_count - the number of top frames to be skipped; doesn't count
+//                against max_depth.
+//
+// L < mutex_
+// We use "L < mutex_" to denote that the function may acquire mutex_.
+String OsStackTraceGetter::CurrentStackTrace(int, int) {
+  return String("");
+}
+
+// L < mutex_
+void OsStackTraceGetter::UponLeavingGTest() {
+}
+
+const char* const
+OsStackTraceGetter::kElidedFramesMarker =
+    "... " GTEST_NAME_ " internal frames ...";
+
+}  // namespace internal
+
+// class TestEventListeners
+
+TestEventListeners::TestEventListeners()
+    : repeater_(new internal::TestEventRepeater()),
+      default_result_printer_(NULL),
+      default_xml_generator_(NULL) {
+}
+
+TestEventListeners::~TestEventListeners() { delete repeater_; }
+
+// Returns the standard listener responsible for the default console
+// output.  Can be removed from the listeners list to shut down default
+// console output.  Note that removing this object from the listener list
+// with Release transfers its ownership to the user.
+void TestEventListeners::Append(TestEventListener* listener) {
+  repeater_->Append(listener);
+}
+
+// Removes the given event listener from the list and returns it.  It then
+// becomes the caller's responsibility to delete the listener. Returns
+// NULL if the listener is not found in the list.
+TestEventListener* TestEventListeners::Release(TestEventListener* listener) {
+  if (listener == default_result_printer_)
+    default_result_printer_ = NULL;
+  else if (listener == default_xml_generator_)
+    default_xml_generator_ = NULL;
+  return repeater_->Release(listener);
+}
+
+// Returns repeater that broadcasts the TestEventListener events to all
+// subscribers.
+TestEventListener* TestEventListeners::repeater() { return repeater_; }
+
+// Sets the default_result_printer attribute to the provided listener.
+// The listener is also added to the listener list and previous
+// default_result_printer is removed from it and deleted. The listener can
+// also be NULL in which case it will not be added to the list. Does
+// nothing if the previous and the current listener objects are the same.
+void TestEventListeners::SetDefaultResultPrinter(TestEventListener* listener) {
+  if (default_result_printer_ != listener) {
+    // It is an error to pass this method a listener that is already in the
+    // list.
+    delete Release(default_result_printer_);
+    default_result_printer_ = listener;
+    if (listener != NULL)
+      Append(listener);
+  }
+}
+
+// Sets the default_xml_generator attribute to the provided listener.  The
+// listener is also added to the listener list and previous
+// default_xml_generator is removed from it and deleted. The listener can
+// also be NULL in which case it will not be added to the list. Does
+// nothing if the previous and the current listener objects are the same.
+void TestEventListeners::SetDefaultXmlGenerator(TestEventListener* listener) {
+  if (default_xml_generator_ != listener) {
+    // It is an error to pass this method a listener that is already in the
+    // list.
+    delete Release(default_xml_generator_);
+    default_xml_generator_ = listener;
+    if (listener != NULL)
+      Append(listener);
+  }
+}
+
+// Controls whether events will be forwarded by the repeater to the
+// listeners in the list.
+bool TestEventListeners::EventForwardingEnabled() const {
+  return repeater_->forwarding_enabled();
+}
+
+void TestEventListeners::SuppressEventForwarding() {
+  repeater_->set_forwarding_enabled(false);
+}
+
+// class UnitTest
+
+// Gets the singleton UnitTest object.  The first time this method is
+// called, a UnitTest object is constructed and returned.  Consecutive
+// calls will return the same object.
+//
+// We don't protect this under mutex_ as a user is not supposed to
+// call this before main() starts, from which point on the return
+// value will never change.
+UnitTest * UnitTest::GetInstance() {
+  // When compiled with MSVC 7.1 in optimized mode, destroying the
+  // UnitTest object upon exiting the program messes up the exit code,
+  // causing successful tests to appear failed.  We have to use a
+  // different implementation in this case to bypass the compiler bug.
+  // This implementation makes the compiler happy, at the cost of
+  // leaking the UnitTest object.
+
+  // CodeGear C++Builder insists on a public destructor for the
+  // default implementation.  Use this implementation to keep good OO
+  // design with private destructor.
+
+#if (_MSC_VER == 1310 && !defined(_DEBUG)) || defined(__BORLANDC__)
+  static UnitTest* const instance = new UnitTest;
+  return instance;
+#else
+  static UnitTest instance;
+  return &instance;
+#endif  // (_MSC_VER == 1310 && !defined(_DEBUG)) || defined(__BORLANDC__)
+}
+
+// Gets the number of successful test cases.
+int UnitTest::successful_test_case_count() const {
+  return impl()->successful_test_case_count();
+}
+
+// Gets the number of failed test cases.
+int UnitTest::failed_test_case_count() const {
+  return impl()->failed_test_case_count();
+}
+
+// Gets the number of all test cases.
+int UnitTest::total_test_case_count() const {
+  return impl()->total_test_case_count();
+}
+
+// Gets the number of all test cases that contain at least one test
+// that should run.
+int UnitTest::test_case_to_run_count() const {
+  return impl()->test_case_to_run_count();
+}
+
+// Gets the number of successful tests.
+int UnitTest::successful_test_count() const {
+  return impl()->successful_test_count();
+}
+
+// Gets the number of failed tests.
+int UnitTest::failed_test_count() const { return impl()->failed_test_count(); }
+
+// Gets the number of disabled tests.
+int UnitTest::disabled_test_count() const {
+  return impl()->disabled_test_count();
+}
+
+// Gets the number of all tests.
+int UnitTest::total_test_count() const { return impl()->total_test_count(); }
+
+// Gets the number of tests that should run.
+int UnitTest::test_to_run_count() const { return impl()->test_to_run_count(); }
+
+// Gets the elapsed time, in milliseconds.
+internal::TimeInMillis UnitTest::elapsed_time() const {
+  return impl()->elapsed_time();
+}
+
+// Returns true iff the unit test passed (i.e. all test cases passed).
+bool UnitTest::Passed() const { return impl()->Passed(); }
+
+// Returns true iff the unit test failed (i.e. some test case failed
+// or something outside of all tests failed).
+bool UnitTest::Failed() const { return impl()->Failed(); }
+
+// Gets the i-th test case among all the test cases. i can range from 0 to
+// total_test_case_count() - 1. If i is not in that range, returns NULL.
+const TestCase* UnitTest::GetTestCase(int i) const {
+  return impl()->GetTestCase(i);
+}
+
+// Gets the i-th test case among all the test cases. i can range from 0 to
+// total_test_case_count() - 1. If i is not in that range, returns NULL.
+TestCase* UnitTest::GetMutableTestCase(int i) {
+  return impl()->GetMutableTestCase(i);
+}
+
+// Returns the list of event listeners that can be used to track events
+// inside Google Test.
+TestEventListeners& UnitTest::listeners() {
+  return *impl()->listeners();
+}
+
+// Registers and returns a global test environment.  When a test
+// program is run, all global test environments will be set-up in the
+// order they were registered.  After all tests in the program have
+// finished, all global test environments will be torn-down in the
+// *reverse* order they were registered.
+//
+// The UnitTest object takes ownership of the given environment.
+//
+// We don't protect this under mutex_, as we only support calling it
+// from the main thread.
+Environment* UnitTest::AddEnvironment(Environment* env) {
+  if (env == NULL) {
+    return NULL;
+  }
+
+  impl_->environments().push_back(env);
+  return env;
+}
+
+// Adds a TestPartResult to the current TestResult object.  All Google Test
+// assertion macros (e.g. ASSERT_TRUE, EXPECT_EQ, etc) eventually call
+// this to report their results.  The user code should use the
+// assertion macros instead of calling this directly.
+// L < mutex_
+void UnitTest::AddTestPartResult(TestPartResult::Type result_type,
+                                 const char* file_name,
+                                 int line_number,
+                                 const internal::String& message,
+                                 const internal::String& os_stack_trace) {
+  Message msg;
+  msg << message;
+
+  internal::MutexLock lock(&mutex_);
+  if (impl_->gtest_trace_stack().size() > 0) {
+    msg << "\n" << GTEST_NAME_ << " trace:";
+
+    for (int i = static_cast<int>(impl_->gtest_trace_stack().size());
+         i > 0; --i) {
+      const internal::TraceInfo& trace = impl_->gtest_trace_stack()[i - 1];
+      msg << "\n" << internal::FormatFileLocation(trace.file, trace.line)
+          << " " << trace.message;
+    }
+  }
+
+  if (os_stack_trace.c_str() != NULL && !os_stack_trace.empty()) {
+    msg << internal::kStackTraceMarker << os_stack_trace;
+  }
+
+  const TestPartResult result =
+    TestPartResult(result_type, file_name, line_number,
+                   msg.GetString().c_str());
+  impl_->GetTestPartResultReporterForCurrentThread()->
+      ReportTestPartResult(result);
+
+  if (result_type != TestPartResult::kSuccess) {
+    // gtest_break_on_failure takes precedence over
+    // gtest_throw_on_failure.  This allows a user to set the latter
+    // in the code (perhaps in order to use Google Test assertions
+    // with another testing framework) and specify the former on the
+    // command line for debugging.
+    if (GTEST_FLAG(break_on_failure)) {
+#if GTEST_OS_WINDOWS
+      // Using DebugBreak on Windows allows gtest to still break into a debugger
+      // when a failure happens and both the --gtest_break_on_failure and
+      // the --gtest_catch_exceptions flags are specified.
+      DebugBreak();
+#else
+      // Dereference NULL through a volatile pointer to prevent the compiler
+      // from removing. We use this rather than abort() or __builtin_trap() for
+      // portability: Symbian doesn't implement abort() well, and some debuggers
+      // don't correctly trap abort().
+      *static_cast<volatile int*>(NULL) = 1;
+#endif  // GTEST_OS_WINDOWS
+    } else if (GTEST_FLAG(throw_on_failure)) {
+#if GTEST_HAS_EXCEPTIONS
+      throw GoogleTestFailureException(result);
+#else
+      // We cannot call abort() as it generates a pop-up in debug mode
+      // that cannot be suppressed in VC 7.1 or below.
+      exit(1);
+#endif
+    }
+  }
+}
+
+// Creates and adds a property to the current TestResult. If a property matching
+// the supplied value already exists, updates its value instead.
+void UnitTest::RecordPropertyForCurrentTest(const char* key,
+                                            const char* value) {
+  const TestProperty test_property(key, value);
+  impl_->current_test_result()->RecordProperty(test_property);
+}
+
+// Runs all tests in this UnitTest object and prints the result.
+// Returns 0 if successful, or 1 otherwise.
+//
+// We don't protect this under mutex_, as we only support calling it
+// from the main thread.
+int UnitTest::Run() {
+  // Captures the value of GTEST_FLAG(catch_exceptions).  This value will be
+  // used for the duration of the program.
+  impl()->set_catch_exceptions(GTEST_FLAG(catch_exceptions));
+
+#if GTEST_HAS_SEH
+  const bool in_death_test_child_process =
+      internal::GTEST_FLAG(internal_run_death_test).length() > 0;
+
+  // Either the user wants Google Test to catch exceptions thrown by the
+  // tests or this is executing in the context of death test child
+  // process. In either case the user does not want to see pop-up dialogs
+  // about crashes - they are expected.
+  if (impl()->catch_exceptions() || in_death_test_child_process) {
+
+# if !GTEST_OS_WINDOWS_MOBILE
+    // SetErrorMode doesn't exist on CE.
+    SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOALIGNMENTFAULTEXCEPT |
+                 SEM_NOGPFAULTERRORBOX | SEM_NOOPENFILEERRORBOX);
+# endif  // !GTEST_OS_WINDOWS_MOBILE
+
+# if (defined(_MSC_VER) || GTEST_OS_WINDOWS_MINGW) && !GTEST_OS_WINDOWS_MOBILE
+    // Death test children can be terminated with _abort().  On Windows,
+    // _abort() can show a dialog with a warning message.  This forces the
+    // abort message to go to stderr instead.
+    _set_error_mode(_OUT_TO_STDERR);
+# endif
+
+# if _MSC_VER >= 1400 && !GTEST_OS_WINDOWS_MOBILE
+    // In the debug version, Visual Studio pops up a separate dialog
+    // offering a choice to debug the aborted program. We need to suppress
+    // this dialog or it will pop up for every EXPECT/ASSERT_DEATH statement
+    // executed. Google Test will notify the user of any unexpected
+    // failure via stderr.
+    //
+    // VC++ doesn't define _set_abort_behavior() prior to the version 8.0.
+    // Users of prior VC versions shall suffer the agony and pain of
+    // clicking through the countless debug dialogs.
+    // TODO(vladl at google.com): find a way to suppress the abort dialog() in the
+    // debug mode when compiled with VC 7.1 or lower.
+    if (!GTEST_FLAG(break_on_failure))
+      _set_abort_behavior(
+          0x0,                                    // Clear the following flags:
+          _WRITE_ABORT_MSG | _CALL_REPORTFAULT);  // pop-up window, core dump.
+# endif
+
+  }
+#endif  // GTEST_HAS_SEH
+
+  return internal::HandleExceptionsInMethodIfSupported(
+      impl(),
+      &internal::UnitTestImpl::RunAllTests,
+      "auxiliary test code (environments or event listeners)") ? 0 : 1;
+}
+
+// Returns the working directory when the first TEST() or TEST_F() was
+// executed.
+const char* UnitTest::original_working_dir() const {
+  return impl_->original_working_dir_.c_str();
+}
+
+// Returns the TestCase object for the test that's currently running,
+// or NULL if no test is running.
+// L < mutex_
+const TestCase* UnitTest::current_test_case() const {
+  internal::MutexLock lock(&mutex_);
+  return impl_->current_test_case();
+}
+
+// Returns the TestInfo object for the test that's currently running,
+// or NULL if no test is running.
+// L < mutex_
+const TestInfo* UnitTest::current_test_info() const {
+  internal::MutexLock lock(&mutex_);
+  return impl_->current_test_info();
+}
+
+// Returns the random seed used at the start of the current test run.
+int UnitTest::random_seed() const { return impl_->random_seed(); }
+
+#if GTEST_HAS_PARAM_TEST
+// Returns ParameterizedTestCaseRegistry object used to keep track of
+// value-parameterized tests and instantiate and register them.
+// L < mutex_
+internal::ParameterizedTestCaseRegistry&
+    UnitTest::parameterized_test_registry() {
+  return impl_->parameterized_test_registry();
+}
+#endif  // GTEST_HAS_PARAM_TEST
+
+// Creates an empty UnitTest.
+UnitTest::UnitTest() {
+  impl_ = new internal::UnitTestImpl(this);
+}
+
+// Destructor of UnitTest.
+UnitTest::~UnitTest() {
+  delete impl_;
+}
+
+// Pushes a trace defined by SCOPED_TRACE() on to the per-thread
+// Google Test trace stack.
+// L < mutex_
+void UnitTest::PushGTestTrace(const internal::TraceInfo& trace) {
+  internal::MutexLock lock(&mutex_);
+  impl_->gtest_trace_stack().push_back(trace);
+}
+
+// Pops a trace from the per-thread Google Test trace stack.
+// L < mutex_
+void UnitTest::PopGTestTrace() {
+  internal::MutexLock lock(&mutex_);
+  impl_->gtest_trace_stack().pop_back();
+}
+
+namespace internal {
+
+UnitTestImpl::UnitTestImpl(UnitTest* parent)
+    : parent_(parent),
+#ifdef _MSC_VER
+# pragma warning(push)                    // Saves the current warning state.
+# pragma warning(disable:4355)            // Temporarily disables warning 4355
+                                         // (using this in initializer).
+      default_global_test_part_result_reporter_(this),
+      default_per_thread_test_part_result_reporter_(this),
+# pragma warning(pop)                     // Restores the warning state again.
+#else
+      default_global_test_part_result_reporter_(this),
+      default_per_thread_test_part_result_reporter_(this),
+#endif  // _MSC_VER
+      global_test_part_result_repoter_(
+          &default_global_test_part_result_reporter_),
+      per_thread_test_part_result_reporter_(
+          &default_per_thread_test_part_result_reporter_),
+#if GTEST_HAS_PARAM_TEST
+      parameterized_test_registry_(),
+      parameterized_tests_registered_(false),
+#endif  // GTEST_HAS_PARAM_TEST
+      last_death_test_case_(-1),
+      current_test_case_(NULL),
+      current_test_info_(NULL),
+      ad_hoc_test_result_(),
+      os_stack_trace_getter_(NULL),
+      post_flag_parse_init_performed_(false),
+      random_seed_(0),  // Will be overridden by the flag before first use.
+      random_(0),  // Will be reseeded before first use.
+      elapsed_time_(0),
+#if GTEST_HAS_DEATH_TEST
+      internal_run_death_test_flag_(NULL),
+      death_test_factory_(new DefaultDeathTestFactory),
+#endif
+      // Will be overridden by the flag before first use.
+      catch_exceptions_(false) {
+  listeners()->SetDefaultResultPrinter(new PrettyUnitTestResultPrinter);
+}
+
+UnitTestImpl::~UnitTestImpl() {
+  // Deletes every TestCase.
+  ForEach(test_cases_, internal::Delete<TestCase>);
+
+  // Deletes every Environment.
+  ForEach(environments_, internal::Delete<Environment>);
+
+  delete os_stack_trace_getter_;
+}
+
+#if GTEST_HAS_DEATH_TEST
+// Disables event forwarding if the control is currently in a death test
+// subprocess. Must not be called before InitGoogleTest.
+void UnitTestImpl::SuppressTestEventsIfInSubprocess() {
+  if (internal_run_death_test_flag_.get() != NULL)
+    listeners()->SuppressEventForwarding();
+}
+#endif  // GTEST_HAS_DEATH_TEST
+
+// Initializes event listeners performing XML output as specified by
+// UnitTestOptions. Must not be called before InitGoogleTest.
+void UnitTestImpl::ConfigureXmlOutput() {
+  const String& output_format = UnitTestOptions::GetOutputFormat();
+  if (output_format == "xml") {
+    listeners()->SetDefaultXmlGenerator(new XmlUnitTestResultPrinter(
+        UnitTestOptions::GetAbsolutePathToOutputFile().c_str()));
+  } else if (output_format != "") {
+    printf("WARNING: unrecognized output format \"%s\" ignored.\n",
+           output_format.c_str());
+    fflush(stdout);
+  }
+}
+
+#if GTEST_CAN_STREAM_RESULTS_
+// Initializes event listeners for streaming test results in String form.
+// Must not be called before InitGoogleTest.
+void UnitTestImpl::ConfigureStreamingOutput() {
+  const string& target = GTEST_FLAG(stream_result_to);
+  if (!target.empty()) {
+    const size_t pos = target.find(':');
+    if (pos != string::npos) {
+      listeners()->Append(new StreamingListener(target.substr(0, pos),
+                                                target.substr(pos+1)));
+    } else {
+      printf("WARNING: unrecognized streaming target \"%s\" ignored.\n",
+             target.c_str());
+      fflush(stdout);
+    }
+  }
+}
+#endif  // GTEST_CAN_STREAM_RESULTS_
+
+// Performs initialization dependent upon flag values obtained in
+// ParseGoogleTestFlagsOnly.  Is called from InitGoogleTest after the call to
+// ParseGoogleTestFlagsOnly.  In case a user neglects to call InitGoogleTest
+// this function is also called from RunAllTests.  Since this function can be
+// called more than once, it has to be idempotent.
+void UnitTestImpl::PostFlagParsingInit() {
+  // Ensures that this function does not execute more than once.
+  if (!post_flag_parse_init_performed_) {
+    post_flag_parse_init_performed_ = true;
+
+#if GTEST_HAS_DEATH_TEST
+    InitDeathTestSubprocessControlInfo();
+    SuppressTestEventsIfInSubprocess();
+#endif  // GTEST_HAS_DEATH_TEST
+
+    // Registers parameterized tests. This makes parameterized tests
+    // available to the UnitTest reflection API without running
+    // RUN_ALL_TESTS.
+    RegisterParameterizedTests();
+
+    // Configures listeners for XML output. This makes it possible for users
+    // to shut down the default XML output before invoking RUN_ALL_TESTS.
+    ConfigureXmlOutput();
+
+#if GTEST_CAN_STREAM_RESULTS_
+    // Configures listeners for streaming test results to the specified server.
+    ConfigureStreamingOutput();
+#endif  // GTEST_CAN_STREAM_RESULTS_
+  }
+}
+
+// A predicate that checks the name of a TestCase against a known
+// value.
+//
+// This is used for implementation of the UnitTest class only.  We put
+// it in the anonymous namespace to prevent polluting the outer
+// namespace.
+//
+// TestCaseNameIs is copyable.
+class TestCaseNameIs {
+ public:
+  // Constructor.
+  explicit TestCaseNameIs(const String& name)
+      : name_(name) {}
+
+  // Returns true iff the name of test_case matches name_.
+  bool operator()(const TestCase* test_case) const {
+    return test_case != NULL && strcmp(test_case->name(), name_.c_str()) == 0;
+  }
+
+ private:
+  String name_;
+};
+
+// Finds and returns a TestCase with the given name.  If one doesn't
+// exist, creates one and returns it.  It's the CALLER'S
+// RESPONSIBILITY to ensure that this function is only called WHEN THE
+// TESTS ARE NOT SHUFFLED.
+//
+// Arguments:
+//
+//   test_case_name: name of the test case
+//   type_param:     the name of the test case's type parameter, or NULL if
+//                   this is not a typed or a type-parameterized test case.
+//   set_up_tc:      pointer to the function that sets up the test case
+//   tear_down_tc:   pointer to the function that tears down the test case
+TestCase* UnitTestImpl::GetTestCase(const char* test_case_name,
+                                    const char* type_param,
+                                    Test::SetUpTestCaseFunc set_up_tc,
+                                    Test::TearDownTestCaseFunc tear_down_tc) {
+  // Can we find a TestCase with the given name?
+  const std::vector<TestCase*>::const_iterator test_case =
+      std::find_if(test_cases_.begin(), test_cases_.end(),
+                   TestCaseNameIs(test_case_name));
+
+  if (test_case != test_cases_.end())
+    return *test_case;
+
+  // No.  Let's create one.
+  TestCase* const new_test_case =
+      new TestCase(test_case_name, type_param, set_up_tc, tear_down_tc);
+
+  // Is this a death test case?
+  if (internal::UnitTestOptions::MatchesFilter(String(test_case_name),
+                                               kDeathTestCaseFilter)) {
+    // Yes.  Inserts the test case after the last death test case
+    // defined so far.  This only works when the test cases haven't
+    // been shuffled.  Otherwise we may end up running a death test
+    // after a non-death test.
+    ++last_death_test_case_;
+    test_cases_.insert(test_cases_.begin() + last_death_test_case_,
+                       new_test_case);
+  } else {
+    // No.  Appends to the end of the list.
+    test_cases_.push_back(new_test_case);
+  }
+
+  test_case_indices_.push_back(static_cast<int>(test_case_indices_.size()));
+  return new_test_case;
+}
+
+// Helpers for setting up / tearing down the given environment.  They
+// are for use in the ForEach() function.
+static void SetUpEnvironment(Environment* env) { env->SetUp(); }
+static void TearDownEnvironment(Environment* env) { env->TearDown(); }
+
+// Runs all tests in this UnitTest object, prints the result, and
+// returns true if all tests are successful.  If any exception is
+// thrown during a test, the test is considered to be failed, but the
+// rest of the tests will still be run.
+//
+// When parameterized tests are enabled, it expands and registers
+// parameterized tests first in RegisterParameterizedTests().
+// All other functions called from RunAllTests() may safely assume that
+// parameterized tests are ready to be counted and run.
+bool UnitTestImpl::RunAllTests() {
+  // Makes sure InitGoogleTest() was called.
+  if (!GTestIsInitialized()) {
+    printf("%s",
+           "\nThis test program did NOT call ::testing::InitGoogleTest "
+           "before calling RUN_ALL_TESTS().  Please fix it.\n");
+    return false;
+  }
+
+  // Do not run any test if the --help flag was specified.
+  if (g_help_flag)
+    return true;
+
+  // Repeats the call to the post-flag parsing initialization in case the
+  // user didn't call InitGoogleTest.
+  PostFlagParsingInit();
+
+  // Even if sharding is not on, test runners may want to use the
+  // GTEST_SHARD_STATUS_FILE to query whether the test supports the sharding
+  // protocol.
+  internal::WriteToShardStatusFileIfNeeded();
+
+  // True iff we are in a subprocess for running a thread-safe-style
+  // death test.
+  bool in_subprocess_for_death_test = false;
+
+#if GTEST_HAS_DEATH_TEST
+  in_subprocess_for_death_test = (internal_run_death_test_flag_.get() != NULL);
+#endif  // GTEST_HAS_DEATH_TEST
+
+  const bool should_shard = ShouldShard(kTestTotalShards, kTestShardIndex,
+                                        in_subprocess_for_death_test);
+
+  // Compares the full test names with the filter to decide which
+  // tests to run.
+  const bool has_tests_to_run = FilterTests(should_shard
+                                              ? HONOR_SHARDING_PROTOCOL
+                                              : IGNORE_SHARDING_PROTOCOL) > 0;
+
+  // Lists the tests and exits if the --gtest_list_tests flag was specified.
+  if (GTEST_FLAG(list_tests)) {
+    // This must be called *after* FilterTests() has been called.
+    ListTestsMatchingFilter();
+    return true;
+  }
+
+  random_seed_ = GTEST_FLAG(shuffle) ?
+      GetRandomSeedFromFlag(GTEST_FLAG(random_seed)) : 0;
+
+  // True iff at least one test has failed.
+  bool failed = false;
+
+  TestEventListener* repeater = listeners()->repeater();
+
+  repeater->OnTestProgramStart(*parent_);
+
+  // How many times to repeat the tests?  We don't want to repeat them
+  // when we are inside the subprocess of a death test.
+  const int repeat = in_subprocess_for_death_test ? 1 : GTEST_FLAG(repeat);
+  // Repeats forever if the repeat count is negative.
+  const bool forever = repeat < 0;
+  for (int i = 0; forever || i != repeat; i++) {
+    // We want to preserve failures generated by ad-hoc test
+    // assertions executed before RUN_ALL_TESTS().
+    ClearNonAdHocTestResult();
+
+    const TimeInMillis start = GetTimeInMillis();
+
+    // Shuffles test cases and tests if requested.
+    if (has_tests_to_run && GTEST_FLAG(shuffle)) {
+      random()->Reseed(random_seed_);
+      // This should be done before calling OnTestIterationStart(),
+      // such that a test event listener can see the actual test order
+      // in the event.
+      ShuffleTests();
+    }
+
+    // Tells the unit test event listeners that the tests are about to start.
+    repeater->OnTestIterationStart(*parent_, i);
+
+    // Runs each test case if there is at least one test to run.
+    if (has_tests_to_run) {
+      // Sets up all environments beforehand.
+      repeater->OnEnvironmentsSetUpStart(*parent_);
+      ForEach(environments_, SetUpEnvironment);
+      repeater->OnEnvironmentsSetUpEnd(*parent_);
+
+      // Runs the tests only if there was no fatal failure during global
+      // set-up.
+      if (!Test::HasFatalFailure()) {
+        for (int test_index = 0; test_index < total_test_case_count();
+             test_index++) {
+          GetMutableTestCase(test_index)->Run();
+        }
+      }
+
+      // Tears down all environments in reverse order afterwards.
+      repeater->OnEnvironmentsTearDownStart(*parent_);
+      std::for_each(environments_.rbegin(), environments_.rend(),
+                    TearDownEnvironment);
+      repeater->OnEnvironmentsTearDownEnd(*parent_);
+    }
+
+    elapsed_time_ = GetTimeInMillis() - start;
+
+    // Tells the unit test event listener that the tests have just finished.
+    repeater->OnTestIterationEnd(*parent_, i);
+
+    // Gets the result and clears it.
+    if (!Passed()) {
+      failed = true;
+    }
+
+    // Restores the original test order after the iteration.  This
+    // allows the user to quickly repro a failure that happens in the
+    // N-th iteration without repeating the first (N - 1) iterations.
+    // This is not enclosed in "if (GTEST_FLAG(shuffle)) { ... }", in
+    // case the user somehow changes the value of the flag somewhere
+    // (it's always safe to unshuffle the tests).
+    UnshuffleTests();
+
+    if (GTEST_FLAG(shuffle)) {
+      // Picks a new random seed for each iteration.
+      random_seed_ = GetNextRandomSeed(random_seed_);
+    }
+  }
+
+  repeater->OnTestProgramEnd(*parent_);
+
+  return !failed;
+}
+
+// Reads the GTEST_SHARD_STATUS_FILE environment variable, and creates the file
+// if the variable is present. If a file already exists at this location, this
+// function will write over it. If the variable is present, but the file cannot
+// be created, prints an error and exits.
+void WriteToShardStatusFileIfNeeded() {
+  const char* const test_shard_file = posix::GetEnv(kTestShardStatusFile);
+  if (test_shard_file != NULL) {
+    FILE* const file = posix::FOpen(test_shard_file, "w");
+    if (file == NULL) {
+      ColoredPrintf(COLOR_RED,
+                    "Could not write to the test shard status file \"%s\" "
+                    "specified by the %s environment variable.\n",
+                    test_shard_file, kTestShardStatusFile);
+      fflush(stdout);
+      exit(EXIT_FAILURE);
+    }
+    fclose(file);
+  }
+}
+
+// Checks whether sharding is enabled by examining the relevant
+// environment variable values. If the variables are present,
+// but inconsistent (i.e., shard_index >= total_shards), prints
+// an error and exits. If in_subprocess_for_death_test, sharding is
+// disabled because it must only be applied to the original test
+// process. Otherwise, we could filter out death tests we intended to execute.
+bool ShouldShard(const char* total_shards_env,
+                 const char* shard_index_env,
+                 bool in_subprocess_for_death_test) {
+  if (in_subprocess_for_death_test) {
+    return false;
+  }
+
+  const Int32 total_shards = Int32FromEnvOrDie(total_shards_env, -1);
+  const Int32 shard_index = Int32FromEnvOrDie(shard_index_env, -1);
+
+  if (total_shards == -1 && shard_index == -1) {
+    return false;
+  } else if (total_shards == -1 && shard_index != -1) {
+    const Message msg = Message()
+      << "Invalid environment variables: you have "
+      << kTestShardIndex << " = " << shard_index
+      << ", but have left " << kTestTotalShards << " unset.\n";
+    ColoredPrintf(COLOR_RED, msg.GetString().c_str());
+    fflush(stdout);
+    exit(EXIT_FAILURE);
+  } else if (total_shards != -1 && shard_index == -1) {
+    const Message msg = Message()
+      << "Invalid environment variables: you have "
+      << kTestTotalShards << " = " << total_shards
+      << ", but have left " << kTestShardIndex << " unset.\n";
+    ColoredPrintf(COLOR_RED, msg.GetString().c_str());
+    fflush(stdout);
+    exit(EXIT_FAILURE);
+  } else if (shard_index < 0 || shard_index >= total_shards) {
+    const Message msg = Message()
+      << "Invalid environment variables: we require 0 <= "
+      << kTestShardIndex << " < " << kTestTotalShards
+      << ", but you have " << kTestShardIndex << "=" << shard_index
+      << ", " << kTestTotalShards << "=" << total_shards << ".\n";
+    ColoredPrintf(COLOR_RED, msg.GetString().c_str());
+    fflush(stdout);
+    exit(EXIT_FAILURE);
+  }
+
+  return total_shards > 1;
+}
+
+// Parses the environment variable var as an Int32. If it is unset,
+// returns default_val. If it is not an Int32, prints an error
+// and aborts.
+Int32 Int32FromEnvOrDie(const char* var, Int32 default_val) {
+  const char* str_val = posix::GetEnv(var);
+  if (str_val == NULL) {
+    return default_val;
+  }
+
+  Int32 result;
+  if (!ParseInt32(Message() << "The value of environment variable " << var,
+                  str_val, &result)) {
+    exit(EXIT_FAILURE);
+  }
+  return result;
+}
+
+// Given the total number of shards, the shard index, and the test id,
+// returns true iff the test should be run on this shard. The test id is
+// some arbitrary but unique non-negative integer assigned to each test
+// method. Assumes that 0 <= shard_index < total_shards.
+bool ShouldRunTestOnShard(int total_shards, int shard_index, int test_id) {
+  return (test_id % total_shards) == shard_index;
+}
+
+// Compares the name of each test with the user-specified filter to
+// decide whether the test should be run, then records the result in
+// each TestCase and TestInfo object.
+// If shard_tests == true, further filters tests based on sharding
+// variables in the environment - see
+// http://code.google.com/p/googletest/wiki/GoogleTestAdvancedGuide.
+// Returns the number of tests that should run.
+int UnitTestImpl::FilterTests(ReactionToSharding shard_tests) {
+  const Int32 total_shards = shard_tests == HONOR_SHARDING_PROTOCOL ?
+      Int32FromEnvOrDie(kTestTotalShards, -1) : -1;
+  const Int32 shard_index = shard_tests == HONOR_SHARDING_PROTOCOL ?
+      Int32FromEnvOrDie(kTestShardIndex, -1) : -1;
+
+  // num_runnable_tests are the number of tests that will
+  // run across all shards (i.e., match filter and are not disabled).
+  // num_selected_tests are the number of tests to be run on
+  // this shard.
+  int num_runnable_tests = 0;
+  int num_selected_tests = 0;
+  for (size_t i = 0; i < test_cases_.size(); i++) {
+    TestCase* const test_case = test_cases_[i];
+    const String &test_case_name = test_case->name();
+    test_case->set_should_run(false);
+
+    for (size_t j = 0; j < test_case->test_info_list().size(); j++) {
+      TestInfo* const test_info = test_case->test_info_list()[j];
+      const String test_name(test_info->name());
+      // A test is disabled if test case name or test name matches
+      // kDisableTestFilter.
+      const bool is_disabled =
+          internal::UnitTestOptions::MatchesFilter(test_case_name,
+                                                   kDisableTestFilter) ||
+          internal::UnitTestOptions::MatchesFilter(test_name,
+                                                   kDisableTestFilter);
+      test_info->is_disabled_ = is_disabled;
+
+      const bool matches_filter =
+          internal::UnitTestOptions::FilterMatchesTest(test_case_name,
+                                                       test_name);
+      test_info->matches_filter_ = matches_filter;
+
+      const bool is_runnable =
+          (GTEST_FLAG(also_run_disabled_tests) || !is_disabled) &&
+          matches_filter;
+
+      const bool is_selected = is_runnable &&
+          (shard_tests == IGNORE_SHARDING_PROTOCOL ||
+           ShouldRunTestOnShard(total_shards, shard_index,
+                                num_runnable_tests));
+
+      num_runnable_tests += is_runnable;
+      num_selected_tests += is_selected;
+
+      test_info->should_run_ = is_selected;
+      test_case->set_should_run(test_case->should_run() || is_selected);
+    }
+  }
+  return num_selected_tests;
+}
+
+// Prints the names of the tests matching the user-specified filter flag.
+void UnitTestImpl::ListTestsMatchingFilter() {
+  for (size_t i = 0; i < test_cases_.size(); i++) {
+    const TestCase* const test_case = test_cases_[i];
+    bool printed_test_case_name = false;
+
+    for (size_t j = 0; j < test_case->test_info_list().size(); j++) {
+      const TestInfo* const test_info =
+          test_case->test_info_list()[j];
+      if (test_info->matches_filter_) {
+        if (!printed_test_case_name) {
+          printed_test_case_name = true;
+          printf("%s.\n", test_case->name());
+        }
+        printf("  %s\n", test_info->name());
+      }
+    }
+  }
+  fflush(stdout);
+}
+
+// Sets the OS stack trace getter.
+//
+// Does nothing if the input and the current OS stack trace getter are
+// the same; otherwise, deletes the old getter and makes the input the
+// current getter.
+void UnitTestImpl::set_os_stack_trace_getter(
+    OsStackTraceGetterInterface* getter) {
+  if (os_stack_trace_getter_ != getter) {
+    delete os_stack_trace_getter_;
+    os_stack_trace_getter_ = getter;
+  }
+}
+
+// Returns the current OS stack trace getter if it is not NULL;
+// otherwise, creates an OsStackTraceGetter, makes it the current
+// getter, and returns it.
+OsStackTraceGetterInterface* UnitTestImpl::os_stack_trace_getter() {
+  if (os_stack_trace_getter_ == NULL) {
+    os_stack_trace_getter_ = new OsStackTraceGetter;
+  }
+
+  return os_stack_trace_getter_;
+}
+
+// Returns the TestResult for the test that's currently running, or
+// the TestResult for the ad hoc test if no test is running.
+TestResult* UnitTestImpl::current_test_result() {
+  return current_test_info_ ?
+      &(current_test_info_->result_) : &ad_hoc_test_result_;
+}
+
+// Shuffles all test cases, and the tests within each test case,
+// making sure that death tests are still run first.
+void UnitTestImpl::ShuffleTests() {
+  // Shuffles the death test cases.
+  ShuffleRange(random(), 0, last_death_test_case_ + 1, &test_case_indices_);
+
+  // Shuffles the non-death test cases.
+  ShuffleRange(random(), last_death_test_case_ + 1,
+               static_cast<int>(test_cases_.size()), &test_case_indices_);
+
+  // Shuffles the tests inside each test case.
+  for (size_t i = 0; i < test_cases_.size(); i++) {
+    test_cases_[i]->ShuffleTests(random());
+  }
+}
+
+// Restores the test cases and tests to their order before the first shuffle.
+void UnitTestImpl::UnshuffleTests() {
+  for (size_t i = 0; i < test_cases_.size(); i++) {
+    // Unshuffles the tests in each test case.
+    test_cases_[i]->UnshuffleTests();
+    // Resets the index of each test case.
+    test_case_indices_[i] = static_cast<int>(i);
+  }
+}
+
+// Returns the current OS stack trace as a String.
+//
+// The maximum number of stack frames to be included is specified by
+// the gtest_stack_trace_depth flag.  The skip_count parameter
+// specifies the number of top frames to be skipped, which doesn't
+// count against the number of frames to be included.
+//
+// For example, if Foo() calls Bar(), which in turn calls
+// GetCurrentOsStackTraceExceptTop(..., 1), Foo() will be included in
+// the trace but Bar() and GetCurrentOsStackTraceExceptTop() won't.
+String GetCurrentOsStackTraceExceptTop(UnitTest* /*unit_test*/,
+                                       int skip_count) {
+  // We pass skip_count + 1 to skip this wrapper function in addition
+  // to what the user really wants to skip.
+  return GetUnitTestImpl()->CurrentOsStackTraceExceptTop(skip_count + 1);
+}
+
+// Used by the GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_ macro to
+// suppress unreachable code warnings.
+namespace {
+class ClassUniqueToAlwaysTrue {};
+}
+
+bool IsTrue(bool condition) { return condition; }
+
+bool AlwaysTrue() {
+#if GTEST_HAS_EXCEPTIONS
+  // This condition is always false so AlwaysTrue() never actually throws,
+  // but it makes the compiler think that it may throw.
+  if (IsTrue(false))
+    throw ClassUniqueToAlwaysTrue();
+#endif  // GTEST_HAS_EXCEPTIONS
+  return true;
+}
+
+// If *pstr starts with the given prefix, modifies *pstr to be right
+// past the prefix and returns true; otherwise leaves *pstr unchanged
+// and returns false.  None of pstr, *pstr, and prefix can be NULL.
+bool SkipPrefix(const char* prefix, const char** pstr) {
+  const size_t prefix_len = strlen(prefix);
+  if (strncmp(*pstr, prefix, prefix_len) == 0) {
+    *pstr += prefix_len;
+    return true;
+  }
+  return false;
+}
+
+// Parses a string as a command line flag.  The string should have
+// the format "--flag=value".  When def_optional is true, the "=value"
+// part can be omitted.
+//
+// Returns the value of the flag, or NULL if the parsing failed.
+const char* ParseFlagValue(const char* str,
+                           const char* flag,
+                           bool def_optional) {
+  // str and flag must not be NULL.
+  if (str == NULL || flag == NULL) return NULL;
+
+  // The flag must start with "--" followed by GTEST_FLAG_PREFIX_.
+  const String flag_str = String::Format("--%s%s", GTEST_FLAG_PREFIX_, flag);
+  const size_t flag_len = flag_str.length();
+  if (strncmp(str, flag_str.c_str(), flag_len) != 0) return NULL;
+
+  // Skips the flag name.
+  const char* flag_end = str + flag_len;
+
+  // When def_optional is true, it's OK to not have a "=value" part.
+  if (def_optional && (flag_end[0] == '\0')) {
+    return flag_end;
+  }
+
+  // If def_optional is true and there are more characters after the
+  // flag name, or if def_optional is false, there must be a '=' after
+  // the flag name.
+  if (flag_end[0] != '=') return NULL;
+
+  // Returns the string after "=".
+  return flag_end + 1;
+}
+
+// Parses a string for a bool flag, in the form of either
+// "--flag=value" or "--flag".
+//
+// In the former case, the value is taken as true as long as it does
+// not start with '0', 'f', or 'F'.
+//
+// In the latter case, the value is taken as true.
+//
+// On success, stores the value of the flag in *value, and returns
+// true.  On failure, returns false without changing *value.
+bool ParseBoolFlag(const char* str, const char* flag, bool* value) {
+  // Gets the value of the flag as a string.
+  const char* const value_str = ParseFlagValue(str, flag, true);
+
+  // Aborts if the parsing failed.
+  if (value_str == NULL) return false;
+
+  // Converts the string value to a bool.
+  *value = !(*value_str == '0' || *value_str == 'f' || *value_str == 'F');
+  return true;
+}
+
+// Parses a string for an Int32 flag, in the form of
+// "--flag=value".
+//
+// On success, stores the value of the flag in *value, and returns
+// true.  On failure, returns false without changing *value.
+bool ParseInt32Flag(const char* str, const char* flag, Int32* value) {
+  // Gets the value of the flag as a string.
+  const char* const value_str = ParseFlagValue(str, flag, false);
+
+  // Aborts if the parsing failed.
+  if (value_str == NULL) return false;
+
+  // Sets *value to the value of the flag.
+  return ParseInt32(Message() << "The value of flag --" << flag,
+                    value_str, value);
+}
+
+// Parses a string for a string flag, in the form of
+// "--flag=value".
+//
+// On success, stores the value of the flag in *value, and returns
+// true.  On failure, returns false without changing *value.
+bool ParseStringFlag(const char* str, const char* flag, String* value) {
+  // Gets the value of the flag as a string.
+  const char* const value_str = ParseFlagValue(str, flag, false);
+
+  // Aborts if the parsing failed.
+  if (value_str == NULL) return false;
+
+  // Sets *value to the value of the flag.
+  *value = value_str;
+  return true;
+}
+
+// Determines whether a string has a prefix that Google Test uses for its
+// flags, i.e., starts with GTEST_FLAG_PREFIX_ or GTEST_FLAG_PREFIX_DASH_.
+// If Google Test detects that a command line flag has its prefix but is not
+// recognized, it will print its help message. Flags starting with
+// GTEST_INTERNAL_PREFIX_ followed by "internal_" are considered Google Test
+// internal flags and do not trigger the help message.
+static bool HasGoogleTestFlagPrefix(const char* str) {
+  return (SkipPrefix("--", &str) ||
+          SkipPrefix("-", &str) ||
+          SkipPrefix("/", &str)) &&
+         !SkipPrefix(GTEST_FLAG_PREFIX_ "internal_", &str) &&
+         (SkipPrefix(GTEST_FLAG_PREFIX_, &str) ||
+          SkipPrefix(GTEST_FLAG_PREFIX_DASH_, &str));
+}
+
+// Prints a string containing code-encoded text.  The following escape
+// sequences can be used in the string to control the text color:
+//
+//   @@    prints a single '@' character.
+//   @R    changes the color to red.
+//   @G    changes the color to green.
+//   @Y    changes the color to yellow.
+//   @D    changes to the default terminal text color.
+//
+// TODO(wan at google.com): Write tests for this once we add stdout
+// capturing to Google Test.
+static void PrintColorEncoded(const char* str) {
+  GTestColor color = COLOR_DEFAULT;  // The current color.
+
+  // Conceptually, we split the string into segments divided by escape
+  // sequences.  Then we print one segment at a time.  At the end of
+  // each iteration, the str pointer advances to the beginning of the
+  // next segment.
+  for (;;) {
+    const char* p = strchr(str, '@');
+    if (p == NULL) {
+      ColoredPrintf(color, "%s", str);
+      return;
+    }
+
+    ColoredPrintf(color, "%s", String(str, p - str).c_str());
+
+    const char ch = p[1];
+    str = p + 2;
+    if (ch == '@') {
+      ColoredPrintf(color, "@");
+    } else if (ch == 'D') {
+      color = COLOR_DEFAULT;
+    } else if (ch == 'R') {
+      color = COLOR_RED;
+    } else if (ch == 'G') {
+      color = COLOR_GREEN;
+    } else if (ch == 'Y') {
+      color = COLOR_YELLOW;
+    } else {
+      --str;
+    }
+  }
+}
+
+static const char kColorEncodedHelpMessage[] =
+"This program contains tests written using " GTEST_NAME_ ". You can use the\n"
+"following command line flags to control its behavior:\n"
+"\n"
+"Test Selection:\n"
+"  @G--" GTEST_FLAG_PREFIX_ "list_tests at D\n"
+"      List the names of all tests instead of running them. The name of\n"
+"      TEST(Foo, Bar) is \"Foo.Bar\".\n"
+"  @G--" GTEST_FLAG_PREFIX_ "filter=@YPOSTIVE_PATTERNS"
+    "[@G- at YNEGATIVE_PATTERNS]@D\n"
+"      Run only the tests whose name matches one of the positive patterns but\n"
+"      none of the negative patterns. '?' matches any single character; '*'\n"
+"      matches any substring; ':' separates two patterns.\n"
+"  @G--" GTEST_FLAG_PREFIX_ "also_run_disabled_tests at D\n"
+"      Run all disabled tests too.\n"
+"\n"
+"Test Execution:\n"
+"  @G--" GTEST_FLAG_PREFIX_ "repeat=@Y[COUNT]@D\n"
+"      Run the tests repeatedly; use a negative count to repeat forever.\n"
+"  @G--" GTEST_FLAG_PREFIX_ "shuffle at D\n"
+"      Randomize tests' orders on every iteration.\n"
+"  @G--" GTEST_FLAG_PREFIX_ "random_seed=@Y[NUMBER]@D\n"
+"      Random number seed to use for shuffling test orders (between 1 and\n"
+"      99999, or 0 to use a seed based on the current time).\n"
+"\n"
+"Test Output:\n"
+"  @G--" GTEST_FLAG_PREFIX_ "color=@Y(@Gyes at Y|@Gno at Y|@Gauto at Y)@D\n"
+"      Enable/disable colored output. The default is @Gauto at D.\n"
+"  - at G-" GTEST_FLAG_PREFIX_ "print_time=0 at D\n"
+"      Don't print the elapsed time of each test.\n"
+"  @G--" GTEST_FLAG_PREFIX_ "output=xml at Y[@G:@YDIRECTORY_PATH at G"
+    GTEST_PATH_SEP_ "@Y|@G:@YFILE_PATH]@D\n"
+"      Generate an XML report in the given directory or with the given file\n"
+"      name. @YFILE_PATH at D defaults to @Gtest_details.xml at D.\n"
+#if GTEST_CAN_STREAM_RESULTS_
+"  @G--" GTEST_FLAG_PREFIX_ "stream_result_to=@YHOST at G:@YPORT at D\n"
+"      Stream test results to the given server.\n"
+#endif  // GTEST_CAN_STREAM_RESULTS_
+"\n"
+"Assertion Behavior:\n"
+#if GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS
+"  @G--" GTEST_FLAG_PREFIX_ "death_test_style=@Y(@Gfast at Y|@Gthreadsafe at Y)@D\n"
+"      Set the default death test style.\n"
+#endif  // GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS
+"  @G--" GTEST_FLAG_PREFIX_ "break_on_failure at D\n"
+"      Turn assertion failures into debugger break-points.\n"
+"  @G--" GTEST_FLAG_PREFIX_ "throw_on_failure at D\n"
+"      Turn assertion failures into C++ exceptions.\n"
+"  @G--" GTEST_FLAG_PREFIX_ "catch_exceptions=0 at D\n"
+"      Do not report exceptions as test failures. Instead, allow them\n"
+"      to crash the program or throw a pop-up (on Windows).\n"
+"\n"
+"Except for @G--" GTEST_FLAG_PREFIX_ "list_tests at D, you can alternatively set "
+    "the corresponding\n"
+"environment variable of a flag (all letters in upper-case). For example, to\n"
+"disable colored text output, you can either specify @G--" GTEST_FLAG_PREFIX_
+    "color=no at D or set\n"
+"the @G" GTEST_FLAG_PREFIX_UPPER_ "COLOR at D environment variable to @Gno at D.\n"
+"\n"
+"For more information, please read the " GTEST_NAME_ " documentation at\n"
+"@G" GTEST_PROJECT_URL_ "@D. If you find a bug in " GTEST_NAME_ "\n"
+"(not one in your own code or tests), please report it to\n"
+"@G<" GTEST_DEV_EMAIL_ ">@D.\n";
+
+// Parses the command line for Google Test flags, without initializing
+// other parts of Google Test.  The type parameter CharType can be
+// instantiated to either char or wchar_t.
+template <typename CharType>
+void ParseGoogleTestFlagsOnlyImpl(int* argc, CharType** argv) {
+  for (int i = 1; i < *argc; i++) {
+    const String arg_string = StreamableToString(argv[i]);
+    const char* const arg = arg_string.c_str();
+
+    using internal::ParseBoolFlag;
+    using internal::ParseInt32Flag;
+    using internal::ParseStringFlag;
+
+    // Do we see a Google Test flag?
+    if (ParseBoolFlag(arg, kAlsoRunDisabledTestsFlag,
+                      &GTEST_FLAG(also_run_disabled_tests)) ||
+        ParseBoolFlag(arg, kBreakOnFailureFlag,
+                      &GTEST_FLAG(break_on_failure)) ||
+        ParseBoolFlag(arg, kCatchExceptionsFlag,
+                      &GTEST_FLAG(catch_exceptions)) ||
+        ParseStringFlag(arg, kColorFlag, &GTEST_FLAG(color)) ||
+        ParseStringFlag(arg, kDeathTestStyleFlag,
+                        &GTEST_FLAG(death_test_style)) ||
+        ParseBoolFlag(arg, kDeathTestUseFork,
+                      &GTEST_FLAG(death_test_use_fork)) ||
+        ParseStringFlag(arg, kFilterFlag, &GTEST_FLAG(filter)) ||
+        ParseStringFlag(arg, kInternalRunDeathTestFlag,
+                        &GTEST_FLAG(internal_run_death_test)) ||
+        ParseBoolFlag(arg, kListTestsFlag, &GTEST_FLAG(list_tests)) ||
+        ParseStringFlag(arg, kOutputFlag, &GTEST_FLAG(output)) ||
+        ParseBoolFlag(arg, kPrintTimeFlag, &GTEST_FLAG(print_time)) ||
+        ParseInt32Flag(arg, kRandomSeedFlag, &GTEST_FLAG(random_seed)) ||
+        ParseInt32Flag(arg, kRepeatFlag, &GTEST_FLAG(repeat)) ||
+        ParseBoolFlag(arg, kShuffleFlag, &GTEST_FLAG(shuffle)) ||
+        ParseInt32Flag(arg, kStackTraceDepthFlag,
+                       &GTEST_FLAG(stack_trace_depth)) ||
+        ParseStringFlag(arg, kStreamResultToFlag,
+                        &GTEST_FLAG(stream_result_to)) ||
+        ParseBoolFlag(arg, kThrowOnFailureFlag,
+                      &GTEST_FLAG(throw_on_failure))
+        ) {
+      // Yes.  Shift the remainder of the argv list left by one.  Note
+      // that argv has (*argc + 1) elements, the last one always being
+      // NULL.  The following loop moves the trailing NULL element as
+      // well.
+      for (int j = i; j != *argc; j++) {
+        argv[j] = argv[j + 1];
+      }
+
+      // Decrements the argument count.
+      (*argc)--;
+
+      // We also need to decrement the iterator as we just removed
+      // an element.
+      i--;
+    } else if (arg_string == "--help" || arg_string == "-h" ||
+               arg_string == "-?" || arg_string == "/?" ||
+               HasGoogleTestFlagPrefix(arg)) {
+      // Both help flag and unrecognized Google Test flags (excluding
+      // internal ones) trigger help display.
+      g_help_flag = true;
+    }
+  }
+
+  if (g_help_flag) {
+    // We print the help here instead of in RUN_ALL_TESTS(), as the
+    // latter may not be called at all if the user is using Google
+    // Test with another testing framework.
+    PrintColorEncoded(kColorEncodedHelpMessage);
+  }
+}
+
+// Parses the command line for Google Test flags, without initializing
+// other parts of Google Test.
+void ParseGoogleTestFlagsOnly(int* argc, char** argv) {
+  ParseGoogleTestFlagsOnlyImpl(argc, argv);
+}
+void ParseGoogleTestFlagsOnly(int* argc, wchar_t** argv) {
+  ParseGoogleTestFlagsOnlyImpl(argc, argv);
+}
+
+// The internal implementation of InitGoogleTest().
+//
+// The type parameter CharType can be instantiated to either char or
+// wchar_t.
+template <typename CharType>
+void InitGoogleTestImpl(int* argc, CharType** argv) {
+  g_init_gtest_count++;
+
+  // We don't want to run the initialization code twice.
+  if (g_init_gtest_count != 1) return;
+
+  if (*argc <= 0) return;
+
+  internal::g_executable_path = internal::StreamableToString(argv[0]);
+
+#if GTEST_HAS_DEATH_TEST
+
+  g_argvs.clear();
+  for (int i = 0; i != *argc; i++) {
+    g_argvs.push_back(StreamableToString(argv[i]));
+  }
+
+#endif  // GTEST_HAS_DEATH_TEST
+
+  ParseGoogleTestFlagsOnly(argc, argv);
+  GetUnitTestImpl()->PostFlagParsingInit();
+}
+
+}  // namespace internal
+
+// Initializes Google Test.  This must be called before calling
+// RUN_ALL_TESTS().  In particular, it parses a command line for the
+// flags that Google Test recognizes.  Whenever a Google Test flag is
+// seen, it is removed from argv, and *argc is decremented.
+//
+// No value is returned.  Instead, the Google Test flag variables are
+// updated.
+//
+// Calling the function for the second time has no user-visible effect.
+void InitGoogleTest(int* argc, char** argv) {
+  internal::InitGoogleTestImpl(argc, argv);
+}
+
+// This overloaded version can be used in Windows programs compiled in
+// UNICODE mode.
+void InitGoogleTest(int* argc, wchar_t** argv) {
+  internal::InitGoogleTestImpl(argc, argv);
+}
+
+}  // namespace testing
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan), vladl at google.com (Vlad Losev)
+//
+// This file implements death tests.
+
+
+#if GTEST_HAS_DEATH_TEST
+
+# if GTEST_OS_MAC
+#  include <crt_externs.h>
+# endif  // GTEST_OS_MAC
+
+# include <errno.h>
+# include <fcntl.h>
+# include <limits.h>
+# include <stdarg.h>
+
+# if GTEST_OS_WINDOWS
+#  include <windows.h>
+# else
+#  include <sys/mman.h>
+#  include <sys/wait.h>
+# endif  // GTEST_OS_WINDOWS
+
+#endif  // GTEST_HAS_DEATH_TEST
+
+
+// Indicates that this translation unit is part of Google Test's
+// implementation.  It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error.  This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#undef GTEST_IMPLEMENTATION_
+
+namespace testing {
+
+// Constants.
+
+// The default death test style.
+static const char kDefaultDeathTestStyle[] = "fast";
+
+GTEST_DEFINE_string_(
+    death_test_style,
+    internal::StringFromGTestEnv("death_test_style", kDefaultDeathTestStyle),
+    "Indicates how to run a death test in a forked child process: "
+    "\"threadsafe\" (child process re-executes the test binary "
+    "from the beginning, running only the specific death test) or "
+    "\"fast\" (child process runs the death test immediately "
+    "after forking).");
+
+GTEST_DEFINE_bool_(
+    death_test_use_fork,
+    internal::BoolFromGTestEnv("death_test_use_fork", false),
+    "Instructs to use fork()/_exit() instead of clone() in death tests. "
+    "Ignored and always uses fork() on POSIX systems where clone() is not "
+    "implemented. Useful when running under valgrind or similar tools if "
+    "those do not support clone(). Valgrind 3.3.1 will just fail if "
+    "it sees an unsupported combination of clone() flags. "
+    "It is not recommended to use this flag w/o valgrind though it will "
+    "work in 99% of the cases. Once valgrind is fixed, this flag will "
+    "most likely be removed.");
+
+namespace internal {
+GTEST_DEFINE_string_(
+    internal_run_death_test, "",
+    "Indicates the file, line number, temporal index of "
+    "the single death test to run, and a file descriptor to "
+    "which a success code may be sent, all separated by "
+    "colons.  This flag is specified if and only if the current "
+    "process is a sub-process launched for running a thread-safe "
+    "death test.  FOR INTERNAL USE ONLY.");
+}  // namespace internal
+
+#if GTEST_HAS_DEATH_TEST
+
+// ExitedWithCode constructor.
+ExitedWithCode::ExitedWithCode(int exit_code) : exit_code_(exit_code) {
+}
+
+// ExitedWithCode function-call operator.
+bool ExitedWithCode::operator()(int exit_status) const {
+# if GTEST_OS_WINDOWS
+
+  return exit_status == exit_code_;
+
+# else
+
+  return WIFEXITED(exit_status) && WEXITSTATUS(exit_status) == exit_code_;
+
+# endif  // GTEST_OS_WINDOWS
+}
+
+# if !GTEST_OS_WINDOWS
+// KilledBySignal constructor.
+KilledBySignal::KilledBySignal(int signum) : signum_(signum) {
+}
+
+// KilledBySignal function-call operator.
+bool KilledBySignal::operator()(int exit_status) const {
+  return WIFSIGNALED(exit_status) && WTERMSIG(exit_status) == signum_;
+}
+# endif  // !GTEST_OS_WINDOWS
+
+namespace internal {
+
+// Utilities needed for death tests.
+
+// Generates a textual description of a given exit code, in the format
+// specified by wait(2).
+static String ExitSummary(int exit_code) {
+  Message m;
+
+# if GTEST_OS_WINDOWS
+
+  m << "Exited with exit status " << exit_code;
+
+# else
+
+  if (WIFEXITED(exit_code)) {
+    m << "Exited with exit status " << WEXITSTATUS(exit_code);
+  } else if (WIFSIGNALED(exit_code)) {
+    m << "Terminated by signal " << WTERMSIG(exit_code);
+  }
+#  ifdef WCOREDUMP
+  if (WCOREDUMP(exit_code)) {
+    m << " (core dumped)";
+  }
+#  endif
+# endif  // GTEST_OS_WINDOWS
+
+  return m.GetString();
+}
+
+// Returns true if exit_status describes a process that was terminated
+// by a signal, or exited normally with a nonzero exit code.
+bool ExitedUnsuccessfully(int exit_status) {
+  return !ExitedWithCode(0)(exit_status);
+}
+
+# if !GTEST_OS_WINDOWS
+// Generates a textual failure message when a death test finds more than
+// one thread running, or cannot determine the number of threads, prior
+// to executing the given statement.  It is the responsibility of the
+// caller not to pass a thread_count of 1.
+static String DeathTestThreadWarning(size_t thread_count) {
+  Message msg;
+  msg << "Death tests use fork(), which is unsafe particularly"
+      << " in a threaded context. For this test, " << GTEST_NAME_ << " ";
+  if (thread_count == 0)
+    msg << "couldn't detect the number of threads.";
+  else
+    msg << "detected " << thread_count << " threads.";
+  return msg.GetString();
+}
+# endif  // !GTEST_OS_WINDOWS
+
+// Flag characters for reporting a death test that did not die.
+static const char kDeathTestLived = 'L';
+static const char kDeathTestReturned = 'R';
+static const char kDeathTestThrew = 'T';
+static const char kDeathTestInternalError = 'I';
+
+// An enumeration describing all of the possible ways that a death test can
+// conclude.  DIED means that the process died while executing the test
+// code; LIVED means that process lived beyond the end of the test code;
+// RETURNED means that the test statement attempted to execute a return
+// statement, which is not allowed; THREW means that the test statement
+// returned control by throwing an exception.  IN_PROGRESS means the test
+// has not yet concluded.
+// TODO(vladl at google.com): Unify names and possibly values for
+// AbortReason, DeathTestOutcome, and flag characters above.
+enum DeathTestOutcome { IN_PROGRESS, DIED, LIVED, RETURNED, THREW };
+
+// Routine for aborting the program which is safe to call from an
+// exec-style death test child process, in which case the error
+// message is propagated back to the parent process.  Otherwise, the
+// message is simply printed to stderr.  In either case, the program
+// then exits with status 1.
+void DeathTestAbort(const String& message) {
+  // On a POSIX system, this function may be called from a threadsafe-style
+  // death test child process, which operates on a very small stack.  Use
+  // the heap for any additional non-minuscule memory requirements.
+  const InternalRunDeathTestFlag* const flag =
+      GetUnitTestImpl()->internal_run_death_test_flag();
+  if (flag != NULL) {
+    FILE* parent = posix::FDOpen(flag->write_fd(), "w");
+    fputc(kDeathTestInternalError, parent);
+    fprintf(parent, "%s", message.c_str());
+    fflush(parent);
+    _exit(1);
+  } else {
+    fprintf(stderr, "%s", message.c_str());
+    fflush(stderr);
+    posix::Abort();
+  }
+}
+
+// A replacement for CHECK that calls DeathTestAbort if the assertion
+// fails.
+# define GTEST_DEATH_TEST_CHECK_(expression) \
+  do { \
+    if (!::testing::internal::IsTrue(expression)) { \
+      DeathTestAbort(::testing::internal::String::Format( \
+          "CHECK failed: File %s, line %d: %s", \
+          __FILE__, __LINE__, #expression)); \
+    } \
+  } while (::testing::internal::AlwaysFalse())
+
+// This macro is similar to GTEST_DEATH_TEST_CHECK_, but it is meant for
+// evaluating any system call that fulfills two conditions: it must return
+// -1 on failure, and set errno to EINTR when it is interrupted and
+// should be tried again.  The macro expands to a loop that repeatedly
+// evaluates the expression as long as it evaluates to -1 and sets
+// errno to EINTR.  If the expression evaluates to -1 but errno is
+// something other than EINTR, DeathTestAbort is called.
+# define GTEST_DEATH_TEST_CHECK_SYSCALL_(expression) \
+  do { \
+    int gtest_retval; \
+    do { \
+      gtest_retval = (expression); \
+    } while (gtest_retval == -1 && errno == EINTR); \
+    if (gtest_retval == -1) { \
+      DeathTestAbort(::testing::internal::String::Format( \
+          "CHECK failed: File %s, line %d: %s != -1", \
+          __FILE__, __LINE__, #expression)); \
+    } \
+  } while (::testing::internal::AlwaysFalse())
+
+// Returns the message describing the last system error in errno.
+String GetLastErrnoDescription() {
+    return String(errno == 0 ? "" : posix::StrError(errno));
+}
+
+// This is called from a death test parent process to read a failure
+// message from the death test child process and log it with the FATAL
+// severity. On Windows, the message is read from a pipe handle. On other
+// platforms, it is read from a file descriptor.
+static void FailFromInternalError(int fd) {
+  Message error;
+  char buffer[256];
+  int num_read;
+
+  do {
+    while ((num_read = posix::Read(fd, buffer, 255)) > 0) {
+      buffer[num_read] = '\0';
+      error << buffer;
+    }
+  } while (num_read == -1 && errno == EINTR);
+
+  if (num_read == 0) {
+    GTEST_LOG_(FATAL) << error.GetString();
+  } else {
+    const int last_error = errno;
+    GTEST_LOG_(FATAL) << "Error while reading death test internal: "
+                      << GetLastErrnoDescription() << " [" << last_error << "]";
+  }
+}
+
+// Death test constructor.  Increments the running death test count
+// for the current test.
+DeathTest::DeathTest() {
+  TestInfo* const info = GetUnitTestImpl()->current_test_info();
+  if (info == NULL) {
+    DeathTestAbort("Cannot run a death test outside of a TEST or "
+                   "TEST_F construct");
+  }
+}
+
+// Creates and returns a death test by dispatching to the current
+// death test factory.
+bool DeathTest::Create(const char* statement, const RE* regex,
+                       const char* file, int line, DeathTest** test) {
+  return GetUnitTestImpl()->death_test_factory()->Create(
+      statement, regex, file, line, test);
+}
+
+const char* DeathTest::LastMessage() {
+  return last_death_test_message_.c_str();
+}
+
+void DeathTest::set_last_death_test_message(const String& message) {
+  last_death_test_message_ = message;
+}
+
+String DeathTest::last_death_test_message_;
+
+// Provides cross platform implementation for some death functionality.
+class DeathTestImpl : public DeathTest {
+ protected:
+  DeathTestImpl(const char* a_statement, const RE* a_regex)
+      : statement_(a_statement),
+        regex_(a_regex),
+        spawned_(false),
+        status_(-1),
+        outcome_(IN_PROGRESS),
+        read_fd_(-1),
+        write_fd_(-1) {}
+
+  // read_fd_ is expected to be closed and cleared by a derived class.
+  ~DeathTestImpl() { GTEST_DEATH_TEST_CHECK_(read_fd_ == -1); }
+
+  void Abort(AbortReason reason);
+  virtual bool Passed(bool status_ok);
+
+  const char* statement() const { return statement_; }
+  const RE* regex() const { return regex_; }
+  bool spawned() const { return spawned_; }
+  void set_spawned(bool is_spawned) { spawned_ = is_spawned; }
+  int status() const { return status_; }
+  void set_status(int a_status) { status_ = a_status; }
+  DeathTestOutcome outcome() const { return outcome_; }
+  void set_outcome(DeathTestOutcome an_outcome) { outcome_ = an_outcome; }
+  int read_fd() const { return read_fd_; }
+  void set_read_fd(int fd) { read_fd_ = fd; }
+  int write_fd() const { return write_fd_; }
+  void set_write_fd(int fd) { write_fd_ = fd; }
+
+  // Called in the parent process only. Reads the result code of the death
+  // test child process via a pipe, interprets it to set the outcome_
+  // member, and closes read_fd_.  Outputs diagnostics and terminates in
+  // case of unexpected codes.
+  void ReadAndInterpretStatusByte();
+
+ private:
+  // The textual content of the code this object is testing.  This class
+  // doesn't own this string and should not attempt to delete it.
+  const char* const statement_;
+  // The regular expression which test output must match.  DeathTestImpl
+  // doesn't own this object and should not attempt to delete it.
+  const RE* const regex_;
+  // True if the death test child process has been successfully spawned.
+  bool spawned_;
+  // The exit status of the child process.
+  int status_;
+  // How the death test concluded.
+  DeathTestOutcome outcome_;
+  // Descriptor to the read end of the pipe to the child process.  It is
+  // always -1 in the child process.  The child keeps its write end of the
+  // pipe in write_fd_.
+  int read_fd_;
+  // Descriptor to the child's write end of the pipe to the parent process.
+  // It is always -1 in the parent process.  The parent keeps its end of the
+  // pipe in read_fd_.
+  int write_fd_;
+};
+
+// Called in the parent process only. Reads the result code of the death
+// test child process via a pipe, interprets it to set the outcome_
+// member, and closes read_fd_.  Outputs diagnostics and terminates in
+// case of unexpected codes.
+void DeathTestImpl::ReadAndInterpretStatusByte() {
+  char flag;
+  int bytes_read;
+
+  // The read() here blocks until data is available (signifying the
+  // failure of the death test) or until the pipe is closed (signifying
+  // its success), so it's okay to call this in the parent before
+  // the child process has exited.
+  do {
+    bytes_read = posix::Read(read_fd(), &flag, 1);
+  } while (bytes_read == -1 && errno == EINTR);
+
+  if (bytes_read == 0) {
+    set_outcome(DIED);
+  } else if (bytes_read == 1) {
+    switch (flag) {
+      case kDeathTestReturned:
+        set_outcome(RETURNED);
+        break;
+      case kDeathTestThrew:
+        set_outcome(THREW);
+        break;
+      case kDeathTestLived:
+        set_outcome(LIVED);
+        break;
+      case kDeathTestInternalError:
+        FailFromInternalError(read_fd());  // Does not return.
+        break;
+      default:
+        GTEST_LOG_(FATAL) << "Death test child process reported "
+                          << "unexpected status byte ("
+                          << static_cast<unsigned int>(flag) << ")";
+    }
+  } else {
+    GTEST_LOG_(FATAL) << "Read from death test child process failed: "
+                      << GetLastErrnoDescription();
+  }
+  GTEST_DEATH_TEST_CHECK_SYSCALL_(posix::Close(read_fd()));
+  set_read_fd(-1);
+}
+
+// Signals that the death test code which should have exited, didn't.
+// Should be called only in a death test child process.
+// Writes a status byte to the child's status file descriptor, then
+// calls _exit(1).
+void DeathTestImpl::Abort(AbortReason reason) {
+  // The parent process considers the death test to be a failure if
+  // it finds any data in our pipe.  So, here we write a single flag byte
+  // to the pipe, then exit.
+  const char status_ch =
+      reason == TEST_DID_NOT_DIE ? kDeathTestLived :
+      reason == TEST_THREW_EXCEPTION ? kDeathTestThrew : kDeathTestReturned;
+
+  GTEST_DEATH_TEST_CHECK_SYSCALL_(posix::Write(write_fd(), &status_ch, 1));
+  // We are leaking the descriptor here because on some platforms (i.e.,
+  // when built as Windows DLL), destructors of global objects will still
+  // run after calling _exit(). On such systems, write_fd_ will be
+  // indirectly closed from the destructor of UnitTestImpl, causing double
+  // close if it is also closed here. On debug configurations, double close
+  // may assert. As there are no in-process buffers to flush here, we are
+  // relying on the OS to close the descriptor after the process terminates
+  // when the destructors are not run.
+  _exit(1);  // Exits w/o any normal exit hooks (we were supposed to crash)
+}
+
+// Returns an indented copy of stderr output for a death test.
+// This makes distinguishing death test output lines from regular log lines
+// much easier.
+static ::std::string FormatDeathTestOutput(const ::std::string& output) {
+  ::std::string ret;
+  for (size_t at = 0; ; ) {
+    const size_t line_end = output.find('\n', at);
+    ret += "[  DEATH   ] ";
+    if (line_end == ::std::string::npos) {
+      ret += output.substr(at);
+      break;
+    }
+    ret += output.substr(at, line_end + 1 - at);
+    at = line_end + 1;
+  }
+  return ret;
+}
+
+// Assesses the success or failure of a death test, using both private
+// members which have previously been set, and one argument:
+//
+// Private data members:
+//   outcome:  An enumeration describing how the death test
+//             concluded: DIED, LIVED, THREW, or RETURNED.  The death test
+//             fails in the latter three cases.
+//   status:   The exit status of the child process. On *nix, it is in the
+//             in the format specified by wait(2). On Windows, this is the
+//             value supplied to the ExitProcess() API or a numeric code
+//             of the exception that terminated the program.
+//   regex:    A regular expression object to be applied to
+//             the test's captured standard error output; the death test
+//             fails if it does not match.
+//
+// Argument:
+//   status_ok: true if exit_status is acceptable in the context of
+//              this particular death test, which fails if it is false
+//
+// Returns true iff all of the above conditions are met.  Otherwise, the
+// first failing condition, in the order given above, is the one that is
+// reported. Also sets the last death test message string.
+bool DeathTestImpl::Passed(bool status_ok) {
+  if (!spawned())
+    return false;
+
+  const String error_message = GetCapturedStderr();
+
+  bool success = false;
+  Message buffer;
+
+  buffer << "Death test: " << statement() << "\n";
+  switch (outcome()) {
+    case LIVED:
+      buffer << "    Result: failed to die.\n"
+             << " Error msg:\n" << FormatDeathTestOutput(error_message);
+      break;
+    case THREW:
+      buffer << "    Result: threw an exception.\n"
+             << " Error msg:\n" << FormatDeathTestOutput(error_message);
+      break;
+    case RETURNED:
+      buffer << "    Result: illegal return in test statement.\n"
+             << " Error msg:\n" << FormatDeathTestOutput(error_message);
+      break;
+    case DIED:
+      if (status_ok) {
+        const bool matched = RE::PartialMatch(error_message.c_str(), *regex());
+        if (matched) {
+          success = true;
+        } else {
+          buffer << "    Result: died but not with expected error.\n"
+                 << "  Expected: " << regex()->pattern() << "\n"
+                 << "Actual msg:\n" << FormatDeathTestOutput(error_message);
+        }
+      } else {
+        buffer << "    Result: died but not with expected exit code:\n"
+               << "            " << ExitSummary(status()) << "\n"
+               << "Actual msg:\n" << FormatDeathTestOutput(error_message);
+      }
+      break;
+    case IN_PROGRESS:
+    default:
+      GTEST_LOG_(FATAL)
+          << "DeathTest::Passed somehow called before conclusion of test";
+  }
+
+  DeathTest::set_last_death_test_message(buffer.GetString());
+  return success;
+}
+
+# if GTEST_OS_WINDOWS
+// WindowsDeathTest implements death tests on Windows. Due to the
+// specifics of starting new processes on Windows, death tests there are
+// always threadsafe, and Google Test considers the
+// --gtest_death_test_style=fast setting to be equivalent to
+// --gtest_death_test_style=threadsafe there.
+//
+// A few implementation notes:  Like the Linux version, the Windows
+// implementation uses pipes for child-to-parent communication. But due to
+// the specifics of pipes on Windows, some extra steps are required:
+//
+// 1. The parent creates a communication pipe and stores handles to both
+//    ends of it.
+// 2. The parent starts the child and provides it with the information
+//    necessary to acquire the handle to the write end of the pipe.
+// 3. The child acquires the write end of the pipe and signals the parent
+//    using a Windows event.
+// 4. Now the parent can release the write end of the pipe on its side. If
+//    this is done before step 3, the object's reference count goes down to
+//    0 and it is destroyed, preventing the child from acquiring it. The
+//    parent now has to release it, or read operations on the read end of
+//    the pipe will not return when the child terminates.
+// 5. The parent reads child's output through the pipe (outcome code and
+//    any possible error messages) from the pipe, and its stderr and then
+//    determines whether to fail the test.
+//
+// Note: to distinguish Win32 API calls from the local method and function
+// calls, the former are explicitly resolved in the global namespace.
+//
+class WindowsDeathTest : public DeathTestImpl {
+ public:
+  WindowsDeathTest(const char* a_statement,
+                   const RE* a_regex,
+                   const char* file,
+                   int line)
+      : DeathTestImpl(a_statement, a_regex), file_(file), line_(line) {}
+
+  // All of these virtual functions are inherited from DeathTest.
+  virtual int Wait();
+  virtual TestRole AssumeRole();
+
+ private:
+  // The name of the file in which the death test is located.
+  const char* const file_;
+  // The line number on which the death test is located.
+  const int line_;
+  // Handle to the write end of the pipe to the child process.
+  AutoHandle write_handle_;
+  // Child process handle.
+  AutoHandle child_handle_;
+  // Event the child process uses to signal the parent that it has
+  // acquired the handle to the write end of the pipe. After seeing this
+  // event the parent can release its own handles to make sure its
+  // ReadFile() calls return when the child terminates.
+  AutoHandle event_handle_;
+};
+
+// Waits for the child in a death test to exit, returning its exit
+// status, or 0 if no child process exists.  As a side effect, sets the
+// outcome data member.
+int WindowsDeathTest::Wait() {
+  if (!spawned())
+    return 0;
+
+  // Wait until the child either signals that it has acquired the write end
+  // of the pipe or it dies.
+  const HANDLE wait_handles[2] = { child_handle_.Get(), event_handle_.Get() };
+  switch (::WaitForMultipleObjects(2,
+                                   wait_handles,
+                                   FALSE,  // Waits for any of the handles.
+                                   INFINITE)) {
+    case WAIT_OBJECT_0:
+    case WAIT_OBJECT_0 + 1:
+      break;
+    default:
+      GTEST_DEATH_TEST_CHECK_(false);  // Should not get here.
+  }
+
+  // The child has acquired the write end of the pipe or exited.
+  // We release the handle on our side and continue.
+  write_handle_.Reset();
+  event_handle_.Reset();
+
+  ReadAndInterpretStatusByte();
+
+  // Waits for the child process to exit if it haven't already. This
+  // returns immediately if the child has already exited, regardless of
+  // whether previous calls to WaitForMultipleObjects synchronized on this
+  // handle or not.
+  GTEST_DEATH_TEST_CHECK_(
+      WAIT_OBJECT_0 == ::WaitForSingleObject(child_handle_.Get(),
+                                             INFINITE));
+  DWORD status_code;
+  GTEST_DEATH_TEST_CHECK_(
+      ::GetExitCodeProcess(child_handle_.Get(), &status_code) != FALSE);
+  child_handle_.Reset();
+  set_status(static_cast<int>(status_code));
+  return status();
+}
+
+// The AssumeRole process for a Windows death test.  It creates a child
+// process with the same executable as the current process to run the
+// death test.  The child process is given the --gtest_filter and
+// --gtest_internal_run_death_test flags such that it knows to run the
+// current death test only.
+DeathTest::TestRole WindowsDeathTest::AssumeRole() {
+  const UnitTestImpl* const impl = GetUnitTestImpl();
+  const InternalRunDeathTestFlag* const flag =
+      impl->internal_run_death_test_flag();
+  const TestInfo* const info = impl->current_test_info();
+  const int death_test_index = info->result()->death_test_count();
+
+  if (flag != NULL) {
+    // ParseInternalRunDeathTestFlag() has performed all the necessary
+    // processing.
+    set_write_fd(flag->write_fd());
+    return EXECUTE_TEST;
+  }
+
+  // WindowsDeathTest uses an anonymous pipe to communicate results of
+  // a death test.
+  SECURITY_ATTRIBUTES handles_are_inheritable = {
+    sizeof(SECURITY_ATTRIBUTES), NULL, TRUE };
+  HANDLE read_handle, write_handle;
+  GTEST_DEATH_TEST_CHECK_(
+      ::CreatePipe(&read_handle, &write_handle, &handles_are_inheritable,
+                   0)  // Default buffer size.
+      != FALSE);
+  set_read_fd(::_open_osfhandle(reinterpret_cast<intptr_t>(read_handle),
+                                O_RDONLY));
+  write_handle_.Reset(write_handle);
+  event_handle_.Reset(::CreateEvent(
+      &handles_are_inheritable,
+      TRUE,    // The event will automatically reset to non-signaled state.
+      FALSE,   // The initial state is non-signalled.
+      NULL));  // The even is unnamed.
+  GTEST_DEATH_TEST_CHECK_(event_handle_.Get() != NULL);
+  const String filter_flag = String::Format("--%s%s=%s.%s",
+                                            GTEST_FLAG_PREFIX_, kFilterFlag,
+                                            info->test_case_name(),
+                                            info->name());
+  const String internal_flag = String::Format(
+    "--%s%s=%s|%d|%d|%u|%Iu|%Iu",
+      GTEST_FLAG_PREFIX_,
+      kInternalRunDeathTestFlag,
+      file_, line_,
+      death_test_index,
+      static_cast<unsigned int>(::GetCurrentProcessId()),
+      // size_t has the same with as pointers on both 32-bit and 64-bit
+      // Windows platforms.
+      // See http://msdn.microsoft.com/en-us/library/tcxf1dw6.aspx.
+      reinterpret_cast<size_t>(write_handle),
+      reinterpret_cast<size_t>(event_handle_.Get()));
+
+  char executable_path[_MAX_PATH + 1];  // NOLINT
+  GTEST_DEATH_TEST_CHECK_(
+      _MAX_PATH + 1 != ::GetModuleFileNameA(NULL,
+                                            executable_path,
+                                            _MAX_PATH));
+
+  String command_line = String::Format("%s %s \"%s\"",
+                                       ::GetCommandLineA(),
+                                       filter_flag.c_str(),
+                                       internal_flag.c_str());
+
+  DeathTest::set_last_death_test_message("");
+
+  CaptureStderr();
+  // Flush the log buffers since the log streams are shared with the child.
+  FlushInfoLog();
+
+  // The child process will share the standard handles with the parent.
+  STARTUPINFOA startup_info;
+  memset(&startup_info, 0, sizeof(STARTUPINFO));
+  startup_info.dwFlags = STARTF_USESTDHANDLES;
+  startup_info.hStdInput = ::GetStdHandle(STD_INPUT_HANDLE);
+  startup_info.hStdOutput = ::GetStdHandle(STD_OUTPUT_HANDLE);
+  startup_info.hStdError = ::GetStdHandle(STD_ERROR_HANDLE);
+
+  PROCESS_INFORMATION process_info;
+  GTEST_DEATH_TEST_CHECK_(::CreateProcessA(
+      executable_path,
+      const_cast<char*>(command_line.c_str()),
+      NULL,   // Retuned process handle is not inheritable.
+      NULL,   // Retuned thread handle is not inheritable.
+      TRUE,   // Child inherits all inheritable handles (for write_handle_).
+      0x0,    // Default creation flags.
+      NULL,   // Inherit the parent's environment.
+      UnitTest::GetInstance()->original_working_dir(),
+      &startup_info,
+      &process_info) != FALSE);
+  child_handle_.Reset(process_info.hProcess);
+  ::CloseHandle(process_info.hThread);
+  set_spawned(true);
+  return OVERSEE_TEST;
+}
+# else  // We are not on Windows.
+
+// ForkingDeathTest provides implementations for most of the abstract
+// methods of the DeathTest interface.  Only the AssumeRole method is
+// left undefined.
+class ForkingDeathTest : public DeathTestImpl {
+ public:
+  ForkingDeathTest(const char* statement, const RE* regex);
+
+  // All of these virtual functions are inherited from DeathTest.
+  virtual int Wait();
+
+ protected:
+  void set_child_pid(pid_t child_pid) { child_pid_ = child_pid; }
+
+ private:
+  // PID of child process during death test; 0 in the child process itself.
+  pid_t child_pid_;
+};
+
+// Constructs a ForkingDeathTest.
+ForkingDeathTest::ForkingDeathTest(const char* a_statement, const RE* a_regex)
+    : DeathTestImpl(a_statement, a_regex),
+      child_pid_(-1) {}
+
+// Waits for the child in a death test to exit, returning its exit
+// status, or 0 if no child process exists.  As a side effect, sets the
+// outcome data member.
+int ForkingDeathTest::Wait() {
+  if (!spawned())
+    return 0;
+
+  ReadAndInterpretStatusByte();
+
+  int status_value;
+  GTEST_DEATH_TEST_CHECK_SYSCALL_(waitpid(child_pid_, &status_value, 0));
+  set_status(status_value);
+  return status_value;
+}
+
+// A concrete death test class that forks, then immediately runs the test
+// in the child process.
+class NoExecDeathTest : public ForkingDeathTest {
+ public:
+  NoExecDeathTest(const char* a_statement, const RE* a_regex) :
+      ForkingDeathTest(a_statement, a_regex) { }
+  virtual TestRole AssumeRole();
+};
+
+// The AssumeRole process for a fork-and-run death test.  It implements a
+// straightforward fork, with a simple pipe to transmit the status byte.
+DeathTest::TestRole NoExecDeathTest::AssumeRole() {
+  const size_t thread_count = GetThreadCount();
+  if (thread_count != 1) {
+    GTEST_LOG_(WARNING) << DeathTestThreadWarning(thread_count);
+  }
+
+  int pipe_fd[2];
+  GTEST_DEATH_TEST_CHECK_(pipe(pipe_fd) != -1);
+
+  DeathTest::set_last_death_test_message("");
+  CaptureStderr();
+  // When we fork the process below, the log file buffers are copied, but the
+  // file descriptors are shared.  We flush all log files here so that closing
+  // the file descriptors in the child process doesn't throw off the
+  // synchronization between descriptors and buffers in the parent process.
+  // This is as close to the fork as possible to avoid a race condition in case
+  // there are multiple threads running before the death test, and another
+  // thread writes to the log file.
+  FlushInfoLog();
+
+  const pid_t child_pid = fork();
+  GTEST_DEATH_TEST_CHECK_(child_pid != -1);
+  set_child_pid(child_pid);
+  if (child_pid == 0) {
+    GTEST_DEATH_TEST_CHECK_SYSCALL_(close(pipe_fd[0]));
+    set_write_fd(pipe_fd[1]);
+    // Redirects all logging to stderr in the child process to prevent
+    // concurrent writes to the log files.  We capture stderr in the parent
+    // process and append the child process' output to a log.
+    LogToStderr();
+    // Event forwarding to the listeners of event listener API mush be shut
+    // down in death test subprocesses.
+    GetUnitTestImpl()->listeners()->SuppressEventForwarding();
+    return EXECUTE_TEST;
+  } else {
+    GTEST_DEATH_TEST_CHECK_SYSCALL_(close(pipe_fd[1]));
+    set_read_fd(pipe_fd[0]);
+    set_spawned(true);
+    return OVERSEE_TEST;
+  }
+}
+
+// A concrete death test class that forks and re-executes the main
+// program from the beginning, with command-line flags set that cause
+// only this specific death test to be run.
+class ExecDeathTest : public ForkingDeathTest {
+ public:
+  ExecDeathTest(const char* a_statement, const RE* a_regex,
+                const char* file, int line) :
+      ForkingDeathTest(a_statement, a_regex), file_(file), line_(line) { }
+  virtual TestRole AssumeRole();
+ private:
+  // The name of the file in which the death test is located.
+  const char* const file_;
+  // The line number on which the death test is located.
+  const int line_;
+};
+
+// Utility class for accumulating command-line arguments.
+class Arguments {
+ public:
+  Arguments() {
+    args_.push_back(NULL);
+  }
+
+  ~Arguments() {
+    for (std::vector<char*>::iterator i = args_.begin(); i != args_.end();
+         ++i) {
+      free(*i);
+    }
+  }
+  void AddArgument(const char* argument) {
+    args_.insert(args_.end() - 1, posix::StrDup(argument));
+  }
+
+  template <typename Str>
+  void AddArguments(const ::std::vector<Str>& arguments) {
+    for (typename ::std::vector<Str>::const_iterator i = arguments.begin();
+         i != arguments.end();
+         ++i) {
+      args_.insert(args_.end() - 1, posix::StrDup(i->c_str()));
+    }
+  }
+  char* const* Argv() {
+    return &args_[0];
+  }
+ private:
+  std::vector<char*> args_;
+};
+
+// A struct that encompasses the arguments to the child process of a
+// threadsafe-style death test process.
+struct ExecDeathTestArgs {
+  char* const* argv;  // Command-line arguments for the child's call to exec
+  int close_fd;       // File descriptor to close; the read end of a pipe
+};
+
+#  if GTEST_OS_MAC
+inline char** GetEnviron() {
+  // When Google Test is built as a framework on MacOS X, the environ variable
+  // is unavailable. Apple's documentation (man environ) recommends using
+  // _NSGetEnviron() instead.
+  return *_NSGetEnviron();
+}
+#  else
+// Some POSIX platforms expect you to declare environ. extern "C" makes
+// it reside in the global namespace.
+extern "C" char** environ;
+inline char** GetEnviron() { return environ; }
+#  endif  // GTEST_OS_MAC
+
+// The main function for a threadsafe-style death test child process.
+// This function is called in a clone()-ed process and thus must avoid
+// any potentially unsafe operations like malloc or libc functions.
+static int ExecDeathTestChildMain(void* child_arg) {
+  ExecDeathTestArgs* const args = static_cast<ExecDeathTestArgs*>(child_arg);
+  GTEST_DEATH_TEST_CHECK_SYSCALL_(close(args->close_fd));
+
+  // We need to execute the test program in the same environment where
+  // it was originally invoked.  Therefore we change to the original
+  // working directory first.
+  const char* const original_dir =
+      UnitTest::GetInstance()->original_working_dir();
+  // We can safely call chdir() as it's a direct system call.
+  if (chdir(original_dir) != 0) {
+    DeathTestAbort(String::Format("chdir(\"%s\") failed: %s",
+                                  original_dir,
+                                  GetLastErrnoDescription().c_str()));
+    return EXIT_FAILURE;
+  }
+
+  // We can safely call execve() as it's a direct system call.  We
+  // cannot use execvp() as it's a libc function and thus potentially
+  // unsafe.  Since execve() doesn't search the PATH, the user must
+  // invoke the test program via a valid path that contains at least
+  // one path separator.
+  execve(args->argv[0], args->argv, GetEnviron());
+  DeathTestAbort(String::Format("execve(%s, ...) in %s failed: %s",
+                                args->argv[0],
+                                original_dir,
+                                GetLastErrnoDescription().c_str()));
+  return EXIT_FAILURE;
+}
+
+// Two utility routines that together determine the direction the stack
+// grows.
+// This could be accomplished more elegantly by a single recursive
+// function, but we want to guard against the unlikely possibility of
+// a smart compiler optimizing the recursion away.
+//
+// GTEST_NO_INLINE_ is required to prevent GCC 4.6 from inlining
+// StackLowerThanAddress into StackGrowsDown, which then doesn't give
+// correct answer.
+bool StackLowerThanAddress(const void* ptr) GTEST_NO_INLINE_;
+bool StackLowerThanAddress(const void* ptr) {
+  int dummy;
+  return &dummy < ptr;
+}
+
+bool StackGrowsDown() {
+  int dummy;
+  return StackLowerThanAddress(&dummy);
+}
+
+// A threadsafe implementation of fork(2) for threadsafe-style death tests
+// that uses clone(2).  It dies with an error message if anything goes
+// wrong.
+static pid_t ExecDeathTestFork(char* const* argv, int close_fd) {
+  ExecDeathTestArgs args = { argv, close_fd };
+  pid_t child_pid = -1;
+
+#  if GTEST_HAS_CLONE
+  const bool use_fork = GTEST_FLAG(death_test_use_fork);
+
+  if (!use_fork) {
+    static const bool stack_grows_down = StackGrowsDown();
+    const size_t stack_size = getpagesize();
+    // MMAP_ANONYMOUS is not defined on Mac, so we use MAP_ANON instead.
+    void* const stack = mmap(NULL, stack_size, PROT_READ | PROT_WRITE,
+                             MAP_ANON | MAP_PRIVATE, -1, 0);
+    GTEST_DEATH_TEST_CHECK_(stack != MAP_FAILED);
+    void* const stack_top =
+        static_cast<char*>(stack) + (stack_grows_down ? stack_size : 0);
+
+    child_pid = clone(&ExecDeathTestChildMain, stack_top, SIGCHLD, &args);
+
+    GTEST_DEATH_TEST_CHECK_(munmap(stack, stack_size) != -1);
+  }
+#  else
+  const bool use_fork = true;
+#  endif  // GTEST_HAS_CLONE
+
+  if (use_fork && (child_pid = fork()) == 0) {
+      ExecDeathTestChildMain(&args);
+      _exit(0);
+  }
+
+  GTEST_DEATH_TEST_CHECK_(child_pid != -1);
+  return child_pid;
+}
+
+// The AssumeRole process for a fork-and-exec death test.  It re-executes the
+// main program from the beginning, setting the --gtest_filter
+// and --gtest_internal_run_death_test flags to cause only the current
+// death test to be re-run.
+DeathTest::TestRole ExecDeathTest::AssumeRole() {
+  const UnitTestImpl* const impl = GetUnitTestImpl();
+  const InternalRunDeathTestFlag* const flag =
+      impl->internal_run_death_test_flag();
+  const TestInfo* const info = impl->current_test_info();
+  const int death_test_index = info->result()->death_test_count();
+
+  if (flag != NULL) {
+    set_write_fd(flag->write_fd());
+    return EXECUTE_TEST;
+  }
+
+  int pipe_fd[2];
+  GTEST_DEATH_TEST_CHECK_(pipe(pipe_fd) != -1);
+  // Clear the close-on-exec flag on the write end of the pipe, lest
+  // it be closed when the child process does an exec:
+  GTEST_DEATH_TEST_CHECK_(fcntl(pipe_fd[1], F_SETFD, 0) != -1);
+
+  const String filter_flag =
+      String::Format("--%s%s=%s.%s",
+                     GTEST_FLAG_PREFIX_, kFilterFlag,
+                     info->test_case_name(), info->name());
+  const String internal_flag =
+      String::Format("--%s%s=%s|%d|%d|%d",
+                     GTEST_FLAG_PREFIX_, kInternalRunDeathTestFlag,
+                     file_, line_, death_test_index, pipe_fd[1]);
+  Arguments args;
+  args.AddArguments(GetArgvs());
+  args.AddArgument(filter_flag.c_str());
+  args.AddArgument(internal_flag.c_str());
+
+  DeathTest::set_last_death_test_message("");
+
+  CaptureStderr();
+  // See the comment in NoExecDeathTest::AssumeRole for why the next line
+  // is necessary.
+  FlushInfoLog();
+
+  const pid_t child_pid = ExecDeathTestFork(args.Argv(), pipe_fd[0]);
+  GTEST_DEATH_TEST_CHECK_SYSCALL_(close(pipe_fd[1]));
+  set_child_pid(child_pid);
+  set_read_fd(pipe_fd[0]);
+  set_spawned(true);
+  return OVERSEE_TEST;
+}
+
+# endif  // !GTEST_OS_WINDOWS
+
+// Creates a concrete DeathTest-derived class that depends on the
+// --gtest_death_test_style flag, and sets the pointer pointed to
+// by the "test" argument to its address.  If the test should be
+// skipped, sets that pointer to NULL.  Returns true, unless the
+// flag is set to an invalid value.
+bool DefaultDeathTestFactory::Create(const char* statement, const RE* regex,
+                                     const char* file, int line,
+                                     DeathTest** test) {
+  UnitTestImpl* const impl = GetUnitTestImpl();
+  const InternalRunDeathTestFlag* const flag =
+      impl->internal_run_death_test_flag();
+  const int death_test_index = impl->current_test_info()
+      ->increment_death_test_count();
+
+  if (flag != NULL) {
+    if (death_test_index > flag->index()) {
+      DeathTest::set_last_death_test_message(String::Format(
+          "Death test count (%d) somehow exceeded expected maximum (%d)",
+          death_test_index, flag->index()));
+      return false;
+    }
+
+    if (!(flag->file() == file && flag->line() == line &&
+          flag->index() == death_test_index)) {
+      *test = NULL;
+      return true;
+    }
+  }
+
+# if GTEST_OS_WINDOWS
+
+  if (GTEST_FLAG(death_test_style) == "threadsafe" ||
+      GTEST_FLAG(death_test_style) == "fast") {
+    *test = new WindowsDeathTest(statement, regex, file, line);
+  }
+
+# else
+
+  if (GTEST_FLAG(death_test_style) == "threadsafe") {
+    *test = new ExecDeathTest(statement, regex, file, line);
+  } else if (GTEST_FLAG(death_test_style) == "fast") {
+    *test = new NoExecDeathTest(statement, regex);
+  }
+
+# endif  // GTEST_OS_WINDOWS
+
+  else {  // NOLINT - this is more readable than unbalanced brackets inside #if.
+    DeathTest::set_last_death_test_message(String::Format(
+        "Unknown death test style \"%s\" encountered",
+        GTEST_FLAG(death_test_style).c_str()));
+    return false;
+  }
+
+  return true;
+}
+
+// Splits a given string on a given delimiter, populating a given
+// vector with the fields.  GTEST_HAS_DEATH_TEST implies that we have
+// ::std::string, so we can use it here.
+static void SplitString(const ::std::string& str, char delimiter,
+                        ::std::vector< ::std::string>* dest) {
+  ::std::vector< ::std::string> parsed;
+  ::std::string::size_type pos = 0;
+  while (::testing::internal::AlwaysTrue()) {
+    const ::std::string::size_type colon = str.find(delimiter, pos);
+    if (colon == ::std::string::npos) {
+      parsed.push_back(str.substr(pos));
+      break;
+    } else {
+      parsed.push_back(str.substr(pos, colon - pos));
+      pos = colon + 1;
+    }
+  }
+  dest->swap(parsed);
+}
+
+# if GTEST_OS_WINDOWS
+// Recreates the pipe and event handles from the provided parameters,
+// signals the event, and returns a file descriptor wrapped around the pipe
+// handle. This function is called in the child process only.
+int GetStatusFileDescriptor(unsigned int parent_process_id,
+                            size_t write_handle_as_size_t,
+                            size_t event_handle_as_size_t) {
+  AutoHandle parent_process_handle(::OpenProcess(PROCESS_DUP_HANDLE,
+                                                   FALSE,  // Non-inheritable.
+                                                   parent_process_id));
+  if (parent_process_handle.Get() == INVALID_HANDLE_VALUE) {
+    DeathTestAbort(String::Format("Unable to open parent process %u",
+                                  parent_process_id));
+  }
+
+  // TODO(vladl at google.com): Replace the following check with a
+  // compile-time assertion when available.
+  GTEST_CHECK_(sizeof(HANDLE) <= sizeof(size_t));
+
+  const HANDLE write_handle =
+      reinterpret_cast<HANDLE>(write_handle_as_size_t);
+  HANDLE dup_write_handle;
+
+  // The newly initialized handle is accessible only in in the parent
+  // process. To obtain one accessible within the child, we need to use
+  // DuplicateHandle.
+  if (!::DuplicateHandle(parent_process_handle.Get(), write_handle,
+                         ::GetCurrentProcess(), &dup_write_handle,
+                         0x0,    // Requested privileges ignored since
+                                 // DUPLICATE_SAME_ACCESS is used.
+                         FALSE,  // Request non-inheritable handler.
+                         DUPLICATE_SAME_ACCESS)) {
+    DeathTestAbort(String::Format(
+        "Unable to duplicate the pipe handle %Iu from the parent process %u",
+        write_handle_as_size_t, parent_process_id));
+  }
+
+  const HANDLE event_handle = reinterpret_cast<HANDLE>(event_handle_as_size_t);
+  HANDLE dup_event_handle;
+
+  if (!::DuplicateHandle(parent_process_handle.Get(), event_handle,
+                         ::GetCurrentProcess(), &dup_event_handle,
+                         0x0,
+                         FALSE,
+                         DUPLICATE_SAME_ACCESS)) {
+    DeathTestAbort(String::Format(
+        "Unable to duplicate the event handle %Iu from the parent process %u",
+        event_handle_as_size_t, parent_process_id));
+  }
+
+  const int write_fd =
+      ::_open_osfhandle(reinterpret_cast<intptr_t>(dup_write_handle), O_APPEND);
+  if (write_fd == -1) {
+    DeathTestAbort(String::Format(
+        "Unable to convert pipe handle %Iu to a file descriptor",
+        write_handle_as_size_t));
+  }
+
+  // Signals the parent that the write end of the pipe has been acquired
+  // so the parent can release its own write end.
+  ::SetEvent(dup_event_handle);
+
+  return write_fd;
+}
+# endif  // GTEST_OS_WINDOWS
+
+// Returns a newly created InternalRunDeathTestFlag object with fields
+// initialized from the GTEST_FLAG(internal_run_death_test) flag if
+// the flag is specified; otherwise returns NULL.
+InternalRunDeathTestFlag* ParseInternalRunDeathTestFlag() {
+  if (GTEST_FLAG(internal_run_death_test) == "") return NULL;
+
+  // GTEST_HAS_DEATH_TEST implies that we have ::std::string, so we
+  // can use it here.
+  int line = -1;
+  int index = -1;
+  ::std::vector< ::std::string> fields;
+  SplitString(GTEST_FLAG(internal_run_death_test).c_str(), '|', &fields);
+  int write_fd = -1;
+
+# if GTEST_OS_WINDOWS
+
+  unsigned int parent_process_id = 0;
+  size_t write_handle_as_size_t = 0;
+  size_t event_handle_as_size_t = 0;
+
+  if (fields.size() != 6
+      || !ParseNaturalNumber(fields[1], &line)
+      || !ParseNaturalNumber(fields[2], &index)
+      || !ParseNaturalNumber(fields[3], &parent_process_id)
+      || !ParseNaturalNumber(fields[4], &write_handle_as_size_t)
+      || !ParseNaturalNumber(fields[5], &event_handle_as_size_t)) {
+    DeathTestAbort(String::Format(
+        "Bad --gtest_internal_run_death_test flag: %s",
+        GTEST_FLAG(internal_run_death_test).c_str()));
+  }
+  write_fd = GetStatusFileDescriptor(parent_process_id,
+                                     write_handle_as_size_t,
+                                     event_handle_as_size_t);
+# else
+
+  if (fields.size() != 4
+      || !ParseNaturalNumber(fields[1], &line)
+      || !ParseNaturalNumber(fields[2], &index)
+      || !ParseNaturalNumber(fields[3], &write_fd)) {
+    DeathTestAbort(String::Format(
+        "Bad --gtest_internal_run_death_test flag: %s",
+        GTEST_FLAG(internal_run_death_test).c_str()));
+  }
+
+# endif  // GTEST_OS_WINDOWS
+
+  return new InternalRunDeathTestFlag(fields[0], line, index, write_fd);
+}
+
+}  // namespace internal
+
+#endif  // GTEST_HAS_DEATH_TEST
+
+}  // namespace testing
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: keith.ray at gmail.com (Keith Ray)
+
+
+#include <stdlib.h>
+
+#if GTEST_OS_WINDOWS_MOBILE
+# include <windows.h>
+#elif GTEST_OS_WINDOWS
+# include <direct.h>
+# include <io.h>
+#elif GTEST_OS_SYMBIAN || GTEST_OS_NACL
+// Symbian OpenC and NaCl have PATH_MAX in sys/syslimits.h
+# include <sys/syslimits.h>
+#else
+# include <limits.h>
+# include <climits>  // Some Linux distributions define PATH_MAX here.
+#endif  // GTEST_OS_WINDOWS_MOBILE
+
+#if GTEST_OS_WINDOWS
+# define GTEST_PATH_MAX_ _MAX_PATH
+#elif defined(PATH_MAX)
+# define GTEST_PATH_MAX_ PATH_MAX
+#elif defined(_XOPEN_PATH_MAX)
+# define GTEST_PATH_MAX_ _XOPEN_PATH_MAX
+#else
+# define GTEST_PATH_MAX_ _POSIX_PATH_MAX
+#endif  // GTEST_OS_WINDOWS
+
+
+namespace testing {
+namespace internal {
+
+#if GTEST_OS_WINDOWS
+// On Windows, '\\' is the standard path separator, but many tools and the
+// Windows API also accept '/' as an alternate path separator. Unless otherwise
+// noted, a file path can contain either kind of path separators, or a mixture
+// of them.
+const char kPathSeparator = '\\';
+const char kAlternatePathSeparator = '/';
+const char kPathSeparatorString[] = "\\";
+const char kAlternatePathSeparatorString[] = "/";
+# if GTEST_OS_WINDOWS_MOBILE
+// Windows CE doesn't have a current directory. You should not use
+// the current directory in tests on Windows CE, but this at least
+// provides a reasonable fallback.
+const char kCurrentDirectoryString[] = "\\";
+// Windows CE doesn't define INVALID_FILE_ATTRIBUTES
+const DWORD kInvalidFileAttributes = 0xffffffff;
+# else
+const char kCurrentDirectoryString[] = ".\\";
+# endif  // GTEST_OS_WINDOWS_MOBILE
+#else
+const char kPathSeparator = '/';
+const char kPathSeparatorString[] = "/";
+const char kCurrentDirectoryString[] = "./";
+#endif  // GTEST_OS_WINDOWS
+
+// Returns whether the given character is a valid path separator.
+static bool IsPathSeparator(char c) {
+#if GTEST_HAS_ALT_PATH_SEP_
+  return (c == kPathSeparator) || (c == kAlternatePathSeparator);
+#else
+  return c == kPathSeparator;
+#endif
+}
+
+// Returns the current working directory, or "" if unsuccessful.
+FilePath FilePath::GetCurrentDir() {
+#if GTEST_OS_WINDOWS_MOBILE
+  // Windows CE doesn't have a current directory, so we just return
+  // something reasonable.
+  return FilePath(kCurrentDirectoryString);
+#elif GTEST_OS_WINDOWS
+  char cwd[GTEST_PATH_MAX_ + 1] = { '\0' };
+  return FilePath(_getcwd(cwd, sizeof(cwd)) == NULL ? "" : cwd);
+#else
+  char cwd[GTEST_PATH_MAX_ + 1] = { '\0' };
+  return FilePath(getcwd(cwd, sizeof(cwd)) == NULL ? "" : cwd);
+#endif  // GTEST_OS_WINDOWS_MOBILE
+}
+
+// Returns a copy of the FilePath with the case-insensitive extension removed.
+// Example: FilePath("dir/file.exe").RemoveExtension("EXE") returns
+// FilePath("dir/file"). If a case-insensitive extension is not
+// found, returns a copy of the original FilePath.
+FilePath FilePath::RemoveExtension(const char* extension) const {
+  String dot_extension(String::Format(".%s", extension));
+  if (pathname_.EndsWithCaseInsensitive(dot_extension.c_str())) {
+    return FilePath(String(pathname_.c_str(), pathname_.length() - 4));
+  }
+  return *this;
+}
+
+// Returns a pointer to the last occurence of a valid path separator in
+// the FilePath. On Windows, for example, both '/' and '\' are valid path
+// separators. Returns NULL if no path separator was found.
+const char* FilePath::FindLastPathSeparator() const {
+  const char* const last_sep = strrchr(c_str(), kPathSeparator);
+#if GTEST_HAS_ALT_PATH_SEP_
+  const char* const last_alt_sep = strrchr(c_str(), kAlternatePathSeparator);
+  // Comparing two pointers of which only one is NULL is undefined.
+  if (last_alt_sep != NULL &&
+      (last_sep == NULL || last_alt_sep > last_sep)) {
+    return last_alt_sep;
+  }
+#endif
+  return last_sep;
+}
+
+// Returns a copy of the FilePath with the directory part removed.
+// Example: FilePath("path/to/file").RemoveDirectoryName() returns
+// FilePath("file"). If there is no directory part ("just_a_file"), it returns
+// the FilePath unmodified. If there is no file part ("just_a_dir/") it
+// returns an empty FilePath ("").
+// On Windows platform, '\' is the path separator, otherwise it is '/'.
+FilePath FilePath::RemoveDirectoryName() const {
+  const char* const last_sep = FindLastPathSeparator();
+  return last_sep ? FilePath(String(last_sep + 1)) : *this;
+}
+
+// RemoveFileName returns the directory path with the filename removed.
+// Example: FilePath("path/to/file").RemoveFileName() returns "path/to/".
+// If the FilePath is "a_file" or "/a_file", RemoveFileName returns
+// FilePath("./") or, on Windows, FilePath(".\\"). If the filepath does
+// not have a file, like "just/a/dir/", it returns the FilePath unmodified.
+// On Windows platform, '\' is the path separator, otherwise it is '/'.
+FilePath FilePath::RemoveFileName() const {
+  const char* const last_sep = FindLastPathSeparator();
+  String dir;
+  if (last_sep) {
+    dir = String(c_str(), last_sep + 1 - c_str());
+  } else {
+    dir = kCurrentDirectoryString;
+  }
+  return FilePath(dir);
+}
+
+// Helper functions for naming files in a directory for xml output.
+
+// Given directory = "dir", base_name = "test", number = 0,
+// extension = "xml", returns "dir/test.xml". If number is greater
+// than zero (e.g., 12), returns "dir/test_12.xml".
+// On Windows platform, uses \ as the separator rather than /.
+FilePath FilePath::MakeFileName(const FilePath& directory,
+                                const FilePath& base_name,
+                                int number,
+                                const char* extension) {
+  String file;
+  if (number == 0) {
+    file = String::Format("%s.%s", base_name.c_str(), extension);
+  } else {
+    file = String::Format("%s_%d.%s", base_name.c_str(), number, extension);
+  }
+  return ConcatPaths(directory, FilePath(file));
+}
+
+// Given directory = "dir", relative_path = "test.xml", returns "dir/test.xml".
+// On Windows, uses \ as the separator rather than /.
+FilePath FilePath::ConcatPaths(const FilePath& directory,
+                               const FilePath& relative_path) {
+  if (directory.IsEmpty())
+    return relative_path;
+  const FilePath dir(directory.RemoveTrailingPathSeparator());
+  return FilePath(String::Format("%s%c%s", dir.c_str(), kPathSeparator,
+                                 relative_path.c_str()));
+}
+
+// Returns true if pathname describes something findable in the file-system,
+// either a file, directory, or whatever.
+bool FilePath::FileOrDirectoryExists() const {
+#if GTEST_OS_WINDOWS_MOBILE
+  LPCWSTR unicode = String::AnsiToUtf16(pathname_.c_str());
+  const DWORD attributes = GetFileAttributes(unicode);
+  delete [] unicode;
+  return attributes != kInvalidFileAttributes;
+#else
+  posix::StatStruct file_stat;
+  return posix::Stat(pathname_.c_str(), &file_stat) == 0;
+#endif  // GTEST_OS_WINDOWS_MOBILE
+}
+
+// Returns true if pathname describes a directory in the file-system
+// that exists.
+bool FilePath::DirectoryExists() const {
+  bool result = false;
+#if GTEST_OS_WINDOWS
+  // Don't strip off trailing separator if path is a root directory on
+  // Windows (like "C:\\").
+  const FilePath& path(IsRootDirectory() ? *this :
+                                           RemoveTrailingPathSeparator());
+#else
+  const FilePath& path(*this);
+#endif
+
+#if GTEST_OS_WINDOWS_MOBILE
+  LPCWSTR unicode = String::AnsiToUtf16(path.c_str());
+  const DWORD attributes = GetFileAttributes(unicode);
+  delete [] unicode;
+  if ((attributes != kInvalidFileAttributes) &&
+      (attributes & FILE_ATTRIBUTE_DIRECTORY)) {
+    result = true;
+  }
+#else
+  posix::StatStruct file_stat;
+  result = posix::Stat(path.c_str(), &file_stat) == 0 &&
+      posix::IsDir(file_stat);
+#endif  // GTEST_OS_WINDOWS_MOBILE
+
+  return result;
+}
+
+// Returns true if pathname describes a root directory. (Windows has one
+// root directory per disk drive.)
+bool FilePath::IsRootDirectory() const {
+#if GTEST_OS_WINDOWS
+  // TODO(wan at google.com): on Windows a network share like
+  // \\server\share can be a root directory, although it cannot be the
+  // current directory.  Handle this properly.
+  return pathname_.length() == 3 && IsAbsolutePath();
+#else
+  return pathname_.length() == 1 && IsPathSeparator(pathname_.c_str()[0]);
+#endif
+}
+
+// Returns true if pathname describes an absolute path.
+bool FilePath::IsAbsolutePath() const {
+  const char* const name = pathname_.c_str();
+#if GTEST_OS_WINDOWS
+  return pathname_.length() >= 3 &&
+     ((name[0] >= 'a' && name[0] <= 'z') ||
+      (name[0] >= 'A' && name[0] <= 'Z')) &&
+     name[1] == ':' &&
+     IsPathSeparator(name[2]);
+#else
+  return IsPathSeparator(name[0]);
+#endif
+}
+
+// Returns a pathname for a file that does not currently exist. The pathname
+// will be directory/base_name.extension or
+// directory/base_name_<number>.extension if directory/base_name.extension
+// already exists. The number will be incremented until a pathname is found
+// that does not already exist.
+// Examples: 'dir/foo_test.xml' or 'dir/foo_test_1.xml'.
+// There could be a race condition if two or more processes are calling this
+// function at the same time -- they could both pick the same filename.
+FilePath FilePath::GenerateUniqueFileName(const FilePath& directory,
+                                          const FilePath& base_name,
+                                          const char* extension) {
+  FilePath full_pathname;
+  int number = 0;
+  do {
+    full_pathname.Set(MakeFileName(directory, base_name, number++, extension));
+  } while (full_pathname.FileOrDirectoryExists());
+  return full_pathname;
+}
+
+// Returns true if FilePath ends with a path separator, which indicates that
+// it is intended to represent a directory. Returns false otherwise.
+// This does NOT check that a directory (or file) actually exists.
+bool FilePath::IsDirectory() const {
+  return !pathname_.empty() &&
+         IsPathSeparator(pathname_.c_str()[pathname_.length() - 1]);
+}
+
+// Create directories so that path exists. Returns true if successful or if
+// the directories already exist; returns false if unable to create directories
+// for any reason.
+bool FilePath::CreateDirectoriesRecursively() const {
+  if (!this->IsDirectory()) {
+    return false;
+  }
+
+  if (pathname_.length() == 0 || this->DirectoryExists()) {
+    return true;
+  }
+
+  const FilePath parent(this->RemoveTrailingPathSeparator().RemoveFileName());
+  return parent.CreateDirectoriesRecursively() && this->CreateFolder();
+}
+
+// Create the directory so that path exists. Returns true if successful or
+// if the directory already exists; returns false if unable to create the
+// directory for any reason, including if the parent directory does not
+// exist. Not named "CreateDirectory" because that's a macro on Windows.
+bool FilePath::CreateFolder() const {
+#if GTEST_OS_WINDOWS_MOBILE
+  FilePath removed_sep(this->RemoveTrailingPathSeparator());
+  LPCWSTR unicode = String::AnsiToUtf16(removed_sep.c_str());
+  int result = CreateDirectory(unicode, NULL) ? 0 : -1;
+  delete [] unicode;
+#elif GTEST_OS_WINDOWS
+  int result = _mkdir(pathname_.c_str());
+#else
+  int result = mkdir(pathname_.c_str(), 0777);
+#endif  // GTEST_OS_WINDOWS_MOBILE
+
+  if (result == -1) {
+    return this->DirectoryExists();  // An error is OK if the directory exists.
+  }
+  return true;  // No error.
+}
+
+// If input name has a trailing separator character, remove it and return the
+// name, otherwise return the name string unmodified.
+// On Windows platform, uses \ as the separator, other platforms use /.
+FilePath FilePath::RemoveTrailingPathSeparator() const {
+  return IsDirectory()
+      ? FilePath(String(pathname_.c_str(), pathname_.length() - 1))
+      : *this;
+}
+
+// Removes any redundant separators that might be in the pathname.
+// For example, "bar///foo" becomes "bar/foo". Does not eliminate other
+// redundancies that might be in a pathname involving "." or "..".
+// TODO(wan at google.com): handle Windows network shares (e.g. \\server\share).
+void FilePath::Normalize() {
+  if (pathname_.c_str() == NULL) {
+    pathname_ = "";
+    return;
+  }
+  const char* src = pathname_.c_str();
+  char* const dest = new char[pathname_.length() + 1];
+  char* dest_ptr = dest;
+  memset(dest_ptr, 0, pathname_.length() + 1);
+
+  while (*src != '\0') {
+    *dest_ptr = *src;
+    if (!IsPathSeparator(*src)) {
+      src++;
+    } else {
+#if GTEST_HAS_ALT_PATH_SEP_
+      if (*dest_ptr == kAlternatePathSeparator) {
+        *dest_ptr = kPathSeparator;
+      }
+#endif
+      while (IsPathSeparator(*src))
+        src++;
+    }
+    dest_ptr++;
+  }
+  *dest_ptr = '\0';
+  pathname_ = dest;
+  delete[] dest;
+}
+
+}  // namespace internal
+}  // namespace testing
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+
+
+#include <limits.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+
+#if GTEST_OS_WINDOWS_MOBILE
+# include <windows.h>  // For TerminateProcess()
+#elif GTEST_OS_WINDOWS
+# include <io.h>
+# include <sys/stat.h>
+#else
+# include <unistd.h>
+#endif  // GTEST_OS_WINDOWS_MOBILE
+
+#if GTEST_OS_MAC
+# include <mach/mach_init.h>
+# include <mach/task.h>
+# include <mach/vm_map.h>
+#endif  // GTEST_OS_MAC
+
+
+// Indicates that this translation unit is part of Google Test's
+// implementation.  It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error.  This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#undef GTEST_IMPLEMENTATION_
+
+namespace testing {
+namespace internal {
+
+#if defined(_MSC_VER) || defined(__BORLANDC__)
+// MSVC and C++Builder do not provide a definition of STDERR_FILENO.
+const int kStdOutFileno = 1;
+const int kStdErrFileno = 2;
+#else
+const int kStdOutFileno = STDOUT_FILENO;
+const int kStdErrFileno = STDERR_FILENO;
+#endif  // _MSC_VER
+
+#if GTEST_OS_MAC
+
+// Returns the number of threads running in the process, or 0 to indicate that
+// we cannot detect it.
+size_t GetThreadCount() {
+  const task_t task = mach_task_self();
+  mach_msg_type_number_t thread_count;
+  thread_act_array_t thread_list;
+  const kern_return_t status = task_threads(task, &thread_list, &thread_count);
+  if (status == KERN_SUCCESS) {
+    // task_threads allocates resources in thread_list and we need to free them
+    // to avoid leaks.
+    vm_deallocate(task,
+                  reinterpret_cast<vm_address_t>(thread_list),
+                  sizeof(thread_t) * thread_count);
+    return static_cast<size_t>(thread_count);
+  } else {
+    return 0;
+  }
+}
+
+#else
+
+size_t GetThreadCount() {
+  // There's no portable way to detect the number of threads, so we just
+  // return 0 to indicate that we cannot detect it.
+  return 0;
+}
+
+#endif  // GTEST_OS_MAC
+
+#if GTEST_USES_POSIX_RE
+
+// Implements RE.  Currently only needed for death tests.
+
+RE::~RE() {
+  if (is_valid_) {
+    // regfree'ing an invalid regex might crash because the content
+    // of the regex is undefined. Since the regex's are essentially
+    // the same, one cannot be valid (or invalid) without the other
+    // being so too.
+    regfree(&partial_regex_);
+    regfree(&full_regex_);
+  }
+  free(const_cast<char*>(pattern_));
+}
+
+// Returns true iff regular expression re matches the entire str.
+bool RE::FullMatch(const char* str, const RE& re) {
+  if (!re.is_valid_) return false;
+
+  regmatch_t match;
+  return regexec(&re.full_regex_, str, 1, &match, 0) == 0;
+}
+
+// Returns true iff regular expression re matches a substring of str
+// (including str itself).
+bool RE::PartialMatch(const char* str, const RE& re) {
+  if (!re.is_valid_) return false;
+
+  regmatch_t match;
+  return regexec(&re.partial_regex_, str, 1, &match, 0) == 0;
+}
+
+// Initializes an RE from its string representation.
+void RE::Init(const char* regex) {
+  pattern_ = posix::StrDup(regex);
+
+  // Reserves enough bytes to hold the regular expression used for a
+  // full match.
+  const size_t full_regex_len = strlen(regex) + 10;
+  char* const full_pattern = new char[full_regex_len];
+
+  snprintf(full_pattern, full_regex_len, "^(%s)$", regex);
+  is_valid_ = regcomp(&full_regex_, full_pattern, REG_EXTENDED) == 0;
+  // We want to call regcomp(&partial_regex_, ...) even if the
+  // previous expression returns false.  Otherwise partial_regex_ may
+  // not be properly initialized can may cause trouble when it's
+  // freed.
+  //
+  // Some implementation of POSIX regex (e.g. on at least some
+  // versions of Cygwin) doesn't accept the empty string as a valid
+  // regex.  We change it to an equivalent form "()" to be safe.
+  if (is_valid_) {
+    const char* const partial_regex = (*regex == '\0') ? "()" : regex;
+    is_valid_ = regcomp(&partial_regex_, partial_regex, REG_EXTENDED) == 0;
+  }
+  EXPECT_TRUE(is_valid_)
+      << "Regular expression \"" << regex
+      << "\" is not a valid POSIX Extended regular expression.";
+
+  delete[] full_pattern;
+}
+
+#elif GTEST_USES_SIMPLE_RE
+
+// Returns true iff ch appears anywhere in str (excluding the
+// terminating '\0' character).
+bool IsInSet(char ch, const char* str) {
+  return ch != '\0' && strchr(str, ch) != NULL;
+}
+
+// Returns true iff ch belongs to the given classification.  Unlike
+// similar functions in <ctype.h>, these aren't affected by the
+// current locale.
+bool IsAsciiDigit(char ch) { return '0' <= ch && ch <= '9'; }
+bool IsAsciiPunct(char ch) {
+  return IsInSet(ch, "^-!\"#$%&'()*+,./:;<=>?@[\\]_`{|}~");
+}
+bool IsRepeat(char ch) { return IsInSet(ch, "?*+"); }
+bool IsAsciiWhiteSpace(char ch) { return IsInSet(ch, " \f\n\r\t\v"); }
+bool IsAsciiWordChar(char ch) {
+  return ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z') ||
+      ('0' <= ch && ch <= '9') || ch == '_';
+}
+
+// Returns true iff "\\c" is a supported escape sequence.
+bool IsValidEscape(char c) {
+  return (IsAsciiPunct(c) || IsInSet(c, "dDfnrsStvwW"));
+}
+
+// Returns true iff the given atom (specified by escaped and pattern)
+// matches ch.  The result is undefined if the atom is invalid.
+bool AtomMatchesChar(bool escaped, char pattern_char, char ch) {
+  if (escaped) {  // "\\p" where p is pattern_char.
+    switch (pattern_char) {
+      case 'd': return IsAsciiDigit(ch);
+      case 'D': return !IsAsciiDigit(ch);
+      case 'f': return ch == '\f';
+      case 'n': return ch == '\n';
+      case 'r': return ch == '\r';
+      case 's': return IsAsciiWhiteSpace(ch);
+      case 'S': return !IsAsciiWhiteSpace(ch);
+      case 't': return ch == '\t';
+      case 'v': return ch == '\v';
+      case 'w': return IsAsciiWordChar(ch);
+      case 'W': return !IsAsciiWordChar(ch);
+    }
+    return IsAsciiPunct(pattern_char) && pattern_char == ch;
+  }
+
+  return (pattern_char == '.' && ch != '\n') || pattern_char == ch;
+}
+
+// Helper function used by ValidateRegex() to format error messages.
+String FormatRegexSyntaxError(const char* regex, int index) {
+  return (Message() << "Syntax error at index " << index
+          << " in simple regular expression \"" << regex << "\": ").GetString();
+}
+
+// Generates non-fatal failures and returns false if regex is invalid;
+// otherwise returns true.
+bool ValidateRegex(const char* regex) {
+  if (regex == NULL) {
+    // TODO(wan at google.com): fix the source file location in the
+    // assertion failures to match where the regex is used in user
+    // code.
+    ADD_FAILURE() << "NULL is not a valid simple regular expression.";
+    return false;
+  }
+
+  bool is_valid = true;
+
+  // True iff ?, *, or + can follow the previous atom.
+  bool prev_repeatable = false;
+  for (int i = 0; regex[i]; i++) {
+    if (regex[i] == '\\') {  // An escape sequence
+      i++;
+      if (regex[i] == '\0') {
+        ADD_FAILURE() << FormatRegexSyntaxError(regex, i - 1)
+                      << "'\\' cannot appear at the end.";
+        return false;
+      }
+
+      if (!IsValidEscape(regex[i])) {
+        ADD_FAILURE() << FormatRegexSyntaxError(regex, i - 1)
+                      << "invalid escape sequence \"\\" << regex[i] << "\".";
+        is_valid = false;
+      }
+      prev_repeatable = true;
+    } else {  // Not an escape sequence.
+      const char ch = regex[i];
+
+      if (ch == '^' && i > 0) {
+        ADD_FAILURE() << FormatRegexSyntaxError(regex, i)
+                      << "'^' can only appear at the beginning.";
+        is_valid = false;
+      } else if (ch == '$' && regex[i + 1] != '\0') {
+        ADD_FAILURE() << FormatRegexSyntaxError(regex, i)
+                      << "'$' can only appear at the end.";
+        is_valid = false;
+      } else if (IsInSet(ch, "()[]{}|")) {
+        ADD_FAILURE() << FormatRegexSyntaxError(regex, i)
+                      << "'" << ch << "' is unsupported.";
+        is_valid = false;
+      } else if (IsRepeat(ch) && !prev_repeatable) {
+        ADD_FAILURE() << FormatRegexSyntaxError(regex, i)
+                      << "'" << ch << "' can only follow a repeatable token.";
+        is_valid = false;
+      }
+
+      prev_repeatable = !IsInSet(ch, "^$?*+");
+    }
+  }
+
+  return is_valid;
+}
+
+// Matches a repeated regex atom followed by a valid simple regular
+// expression.  The regex atom is defined as c if escaped is false,
+// or \c otherwise.  repeat is the repetition meta character (?, *,
+// or +).  The behavior is undefined if str contains too many
+// characters to be indexable by size_t, in which case the test will
+// probably time out anyway.  We are fine with this limitation as
+// std::string has it too.
+bool MatchRepetitionAndRegexAtHead(
+    bool escaped, char c, char repeat, const char* regex,
+    const char* str) {
+  const size_t min_count = (repeat == '+') ? 1 : 0;
+  const size_t max_count = (repeat == '?') ? 1 :
+      static_cast<size_t>(-1) - 1;
+  // We cannot call numeric_limits::max() as it conflicts with the
+  // max() macro on Windows.
+
+  for (size_t i = 0; i <= max_count; ++i) {
+    // We know that the atom matches each of the first i characters in str.
+    if (i >= min_count && MatchRegexAtHead(regex, str + i)) {
+      // We have enough matches at the head, and the tail matches too.
+      // Since we only care about *whether* the pattern matches str
+      // (as opposed to *how* it matches), there is no need to find a
+      // greedy match.
+      return true;
+    }
+    if (str[i] == '\0' || !AtomMatchesChar(escaped, c, str[i]))
+      return false;
+  }
+  return false;
+}
+
+// Returns true iff regex matches a prefix of str.  regex must be a
+// valid simple regular expression and not start with "^", or the
+// result is undefined.
+bool MatchRegexAtHead(const char* regex, const char* str) {
+  if (*regex == '\0')  // An empty regex matches a prefix of anything.
+    return true;
+
+  // "$" only matches the end of a string.  Note that regex being
+  // valid guarantees that there's nothing after "$" in it.
+  if (*regex == '$')
+    return *str == '\0';
+
+  // Is the first thing in regex an escape sequence?
+  const bool escaped = *regex == '\\';
+  if (escaped)
+    ++regex;
+  if (IsRepeat(regex[1])) {
+    // MatchRepetitionAndRegexAtHead() calls MatchRegexAtHead(), so
+    // here's an indirect recursion.  It terminates as the regex gets
+    // shorter in each recursion.
+    return MatchRepetitionAndRegexAtHead(
+        escaped, regex[0], regex[1], regex + 2, str);
+  } else {
+    // regex isn't empty, isn't "$", and doesn't start with a
+    // repetition.  We match the first atom of regex with the first
+    // character of str and recurse.
+    return (*str != '\0') && AtomMatchesChar(escaped, *regex, *str) &&
+        MatchRegexAtHead(regex + 1, str + 1);
+  }
+}
+
+// Returns true iff regex matches any substring of str.  regex must be
+// a valid simple regular expression, or the result is undefined.
+//
+// The algorithm is recursive, but the recursion depth doesn't exceed
+// the regex length, so we won't need to worry about running out of
+// stack space normally.  In rare cases the time complexity can be
+// exponential with respect to the regex length + the string length,
+// but usually it's must faster (often close to linear).
+bool MatchRegexAnywhere(const char* regex, const char* str) {
+  if (regex == NULL || str == NULL)
+    return false;
+
+  if (*regex == '^')
+    return MatchRegexAtHead(regex + 1, str);
+
+  // A successful match can be anywhere in str.
+  do {
+    if (MatchRegexAtHead(regex, str))
+      return true;
+  } while (*str++ != '\0');
+  return false;
+}
+
+// Implements the RE class.
+
+RE::~RE() {
+  free(const_cast<char*>(pattern_));
+  free(const_cast<char*>(full_pattern_));
+}
+
+// Returns true iff regular expression re matches the entire str.
+bool RE::FullMatch(const char* str, const RE& re) {
+  return re.is_valid_ && MatchRegexAnywhere(re.full_pattern_, str);
+}
+
+// Returns true iff regular expression re matches a substring of str
+// (including str itself).
+bool RE::PartialMatch(const char* str, const RE& re) {
+  return re.is_valid_ && MatchRegexAnywhere(re.pattern_, str);
+}
+
+// Initializes an RE from its string representation.
+void RE::Init(const char* regex) {
+  pattern_ = full_pattern_ = NULL;
+  if (regex != NULL) {
+    pattern_ = posix::StrDup(regex);
+  }
+
+  is_valid_ = ValidateRegex(regex);
+  if (!is_valid_) {
+    // No need to calculate the full pattern when the regex is invalid.
+    return;
+  }
+
+  const size_t len = strlen(regex);
+  // Reserves enough bytes to hold the regular expression used for a
+  // full match: we need space to prepend a '^', append a '$', and
+  // terminate the string with '\0'.
+  char* buffer = static_cast<char*>(malloc(len + 3));
+  full_pattern_ = buffer;
+
+  if (*regex != '^')
+    *buffer++ = '^';  // Makes sure full_pattern_ starts with '^'.
+
+  // We don't use snprintf or strncpy, as they trigger a warning when
+  // compiled with VC++ 8.0.
+  memcpy(buffer, regex, len);
+  buffer += len;
+
+  if (len == 0 || regex[len - 1] != '$')
+    *buffer++ = '$';  // Makes sure full_pattern_ ends with '$'.
+
+  *buffer = '\0';
+}
+
+#endif  // GTEST_USES_POSIX_RE
+
+const char kUnknownFile[] = "unknown file";
+
+// Formats a source file path and a line number as they would appear
+// in an error message from the compiler used to compile this code.
+GTEST_API_ ::std::string FormatFileLocation(const char* file, int line) {
+  const char* const file_name = file == NULL ? kUnknownFile : file;
+
+  if (line < 0) {
+    return String::Format("%s:", file_name).c_str();
+  }
+#ifdef _MSC_VER
+  return String::Format("%s(%d):", file_name, line).c_str();
+#else
+  return String::Format("%s:%d:", file_name, line).c_str();
+#endif  // _MSC_VER
+}
+
+// Formats a file location for compiler-independent XML output.
+// Although this function is not platform dependent, we put it next to
+// FormatFileLocation in order to contrast the two functions.
+// Note that FormatCompilerIndependentFileLocation() does NOT append colon
+// to the file location it produces, unlike FormatFileLocation().
+GTEST_API_ ::std::string FormatCompilerIndependentFileLocation(
+    const char* file, int line) {
+  const char* const file_name = file == NULL ? kUnknownFile : file;
+
+  if (line < 0)
+    return file_name;
+  else
+    return String::Format("%s:%d", file_name, line).c_str();
+}
+
+
+GTestLog::GTestLog(GTestLogSeverity severity, const char* file, int line)
+    : severity_(severity) {
+  const char* const marker =
+      severity == GTEST_INFO ?    "[  INFO ]" :
+      severity == GTEST_WARNING ? "[WARNING]" :
+      severity == GTEST_ERROR ?   "[ ERROR ]" : "[ FATAL ]";
+  GetStream() << ::std::endl << marker << " "
+              << FormatFileLocation(file, line).c_str() << ": ";
+}
+
+// Flushes the buffers and, if severity is GTEST_FATAL, aborts the program.
+GTestLog::~GTestLog() {
+  GetStream() << ::std::endl;
+  if (severity_ == GTEST_FATAL) {
+    fflush(stderr);
+    posix::Abort();
+  }
+}
+// Disable Microsoft deprecation warnings for POSIX functions called from
+// this class (creat, dup, dup2, and close)
+#ifdef _MSC_VER
+# pragma warning(push)
+# pragma warning(disable: 4996)
+#endif  // _MSC_VER
+
+#if GTEST_HAS_STREAM_REDIRECTION
+
+// Object that captures an output stream (stdout/stderr).
+class CapturedStream {
+ public:
+  // The ctor redirects the stream to a temporary file.
+  CapturedStream(int fd) : fd_(fd), uncaptured_fd_(dup(fd)) {
+
+# if GTEST_OS_WINDOWS
+    char temp_dir_path[MAX_PATH + 1] = { '\0' };  // NOLINT
+    char temp_file_path[MAX_PATH + 1] = { '\0' };  // NOLINT
+
+    ::GetTempPathA(sizeof(temp_dir_path), temp_dir_path);
+    const UINT success = ::GetTempFileNameA(temp_dir_path,
+                                            "gtest_redir",
+                                            0,  // Generate unique file name.
+                                            temp_file_path);
+    GTEST_CHECK_(success != 0)
+        << "Unable to create a temporary file in " << temp_dir_path;
+    const int captured_fd = creat(temp_file_path, _S_IREAD | _S_IWRITE);
+    GTEST_CHECK_(captured_fd != -1) << "Unable to open temporary file "
+                                    << temp_file_path;
+    filename_ = temp_file_path;
+# else
+    // There's no guarantee that a test has write access to the
+    // current directory, so we create the temporary file in the /tmp
+    // directory instead.
+    char name_template[] = "/tmp/captured_stream.XXXXXX";
+    const int captured_fd = mkstemp(name_template);
+    filename_ = name_template;
+# endif  // GTEST_OS_WINDOWS
+    fflush(NULL);
+    dup2(captured_fd, fd_);
+    close(captured_fd);
+  }
+
+  ~CapturedStream() {
+    remove(filename_.c_str());
+  }
+
+  String GetCapturedString() {
+    if (uncaptured_fd_ != -1) {
+      // Restores the original stream.
+      fflush(NULL);
+      dup2(uncaptured_fd_, fd_);
+      close(uncaptured_fd_);
+      uncaptured_fd_ = -1;
+    }
+
+    FILE* const file = posix::FOpen(filename_.c_str(), "r");
+    const String content = ReadEntireFile(file);
+    posix::FClose(file);
+    return content;
+  }
+
+ private:
+  // Reads the entire content of a file as a String.
+  static String ReadEntireFile(FILE* file);
+
+  // Returns the size (in bytes) of a file.
+  static size_t GetFileSize(FILE* file);
+
+  const int fd_;  // A stream to capture.
+  int uncaptured_fd_;
+  // Name of the temporary file holding the stderr output.
+  ::std::string filename_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(CapturedStream);
+};
+
+// Returns the size (in bytes) of a file.
+size_t CapturedStream::GetFileSize(FILE* file) {
+  fseek(file, 0, SEEK_END);
+  return static_cast<size_t>(ftell(file));
+}
+
+// Reads the entire content of a file as a string.
+String CapturedStream::ReadEntireFile(FILE* file) {
+  const size_t file_size = GetFileSize(file);
+  char* const buffer = new char[file_size];
+
+  size_t bytes_last_read = 0;  // # of bytes read in the last fread()
+  size_t bytes_read = 0;       // # of bytes read so far
+
+  fseek(file, 0, SEEK_SET);
+
+  // Keeps reading the file until we cannot read further or the
+  // pre-determined file size is reached.
+  do {
+    bytes_last_read = fread(buffer+bytes_read, 1, file_size-bytes_read, file);
+    bytes_read += bytes_last_read;
+  } while (bytes_last_read > 0 && bytes_read < file_size);
+
+  const String content(buffer, bytes_read);
+  delete[] buffer;
+
+  return content;
+}
+
+# ifdef _MSC_VER
+#  pragma warning(pop)
+# endif  // _MSC_VER
+
+static CapturedStream* g_captured_stderr = NULL;
+static CapturedStream* g_captured_stdout = NULL;
+
+// Starts capturing an output stream (stdout/stderr).
+void CaptureStream(int fd, const char* stream_name, CapturedStream** stream) {
+  if (*stream != NULL) {
+    GTEST_LOG_(FATAL) << "Only one " << stream_name
+                      << " capturer can exist at a time.";
+  }
+  *stream = new CapturedStream(fd);
+}
+
+// Stops capturing the output stream and returns the captured string.
+String GetCapturedStream(CapturedStream** captured_stream) {
+  const String content = (*captured_stream)->GetCapturedString();
+
+  delete *captured_stream;
+  *captured_stream = NULL;
+
+  return content;
+}
+
+// Starts capturing stdout.
+void CaptureStdout() {
+  CaptureStream(kStdOutFileno, "stdout", &g_captured_stdout);
+}
+
+// Starts capturing stderr.
+void CaptureStderr() {
+  CaptureStream(kStdErrFileno, "stderr", &g_captured_stderr);
+}
+
+// Stops capturing stdout and returns the captured string.
+String GetCapturedStdout() { return GetCapturedStream(&g_captured_stdout); }
+
+// Stops capturing stderr and returns the captured string.
+String GetCapturedStderr() { return GetCapturedStream(&g_captured_stderr); }
+
+#endif  // GTEST_HAS_STREAM_REDIRECTION
+
+#if GTEST_HAS_DEATH_TEST
+
+// A copy of all command line arguments.  Set by InitGoogleTest().
+::std::vector<String> g_argvs;
+
+// Returns the command line as a vector of strings.
+const ::std::vector<String>& GetArgvs() { return g_argvs; }
+
+#endif  // GTEST_HAS_DEATH_TEST
+
+#if GTEST_OS_WINDOWS_MOBILE
+namespace posix {
+void Abort() {
+  DebugBreak();
+  TerminateProcess(GetCurrentProcess(), 1);
+}
+}  // namespace posix
+#endif  // GTEST_OS_WINDOWS_MOBILE
+
+// Returns the name of the environment variable corresponding to the
+// given flag.  For example, FlagToEnvVar("foo") will return
+// "GTEST_FOO" in the open-source version.
+static String FlagToEnvVar(const char* flag) {
+  const String full_flag =
+      (Message() << GTEST_FLAG_PREFIX_ << flag).GetString();
+
+  Message env_var;
+  for (size_t i = 0; i != full_flag.length(); i++) {
+    env_var << ToUpper(full_flag.c_str()[i]);
+  }
+
+  return env_var.GetString();
+}
+
+// Parses 'str' for a 32-bit signed integer.  If successful, writes
+// the result to *value and returns true; otherwise leaves *value
+// unchanged and returns false.
+bool ParseInt32(const Message& src_text, const char* str, Int32* value) {
+  // Parses the environment variable as a decimal integer.
+  char* end = NULL;
+  const long long_value = strtol(str, &end, 10);  // NOLINT
+
+  // Has strtol() consumed all characters in the string?
+  if (*end != '\0') {
+    // No - an invalid character was encountered.
+    Message msg;
+    msg << "WARNING: " << src_text
+        << " is expected to be a 32-bit integer, but actually"
+        << " has value \"" << str << "\".\n";
+    printf("%s", msg.GetString().c_str());
+    fflush(stdout);
+    return false;
+  }
+
+  // Is the parsed value in the range of an Int32?
+  const Int32 result = static_cast<Int32>(long_value);
+  if (long_value == LONG_MAX || long_value == LONG_MIN ||
+      // The parsed value overflows as a long.  (strtol() returns
+      // LONG_MAX or LONG_MIN when the input overflows.)
+      result != long_value
+      // The parsed value overflows as an Int32.
+      ) {
+    Message msg;
+    msg << "WARNING: " << src_text
+        << " is expected to be a 32-bit integer, but actually"
+        << " has value " << str << ", which overflows.\n";
+    printf("%s", msg.GetString().c_str());
+    fflush(stdout);
+    return false;
+  }
+
+  *value = result;
+  return true;
+}
+
+// Reads and returns the Boolean environment variable corresponding to
+// the given flag; if it's not set, returns default_value.
+//
+// The value is considered true iff it's not "0".
+bool BoolFromGTestEnv(const char* flag, bool default_value) {
+  const String env_var = FlagToEnvVar(flag);
+  const char* const string_value = posix::GetEnv(env_var.c_str());
+  return string_value == NULL ?
+      default_value : strcmp(string_value, "0") != 0;
+}
+
+// Reads and returns a 32-bit integer stored in the environment
+// variable corresponding to the given flag; if it isn't set or
+// doesn't represent a valid 32-bit integer, returns default_value.
+Int32 Int32FromGTestEnv(const char* flag, Int32 default_value) {
+  const String env_var = FlagToEnvVar(flag);
+  const char* const string_value = posix::GetEnv(env_var.c_str());
+  if (string_value == NULL) {
+    // The environment variable is not set.
+    return default_value;
+  }
+
+  Int32 result = default_value;
+  if (!ParseInt32(Message() << "Environment variable " << env_var,
+                  string_value, &result)) {
+    printf("The default value %s is used.\n",
+           (Message() << default_value).GetString().c_str());
+    fflush(stdout);
+    return default_value;
+  }
+
+  return result;
+}
+
+// Reads and returns the string environment variable corresponding to
+// the given flag; if it's not set, returns default_value.
+const char* StringFromGTestEnv(const char* flag, const char* default_value) {
+  const String env_var = FlagToEnvVar(flag);
+  const char* const value = posix::GetEnv(env_var.c_str());
+  return value == NULL ? default_value : value;
+}
+
+}  // namespace internal
+}  // namespace testing
+// Copyright 2007, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+
+// Google Test - The Google C++ Testing Framework
+//
+// This file implements a universal value printer that can print a
+// value of any type T:
+//
+//   void ::testing::internal::UniversalPrinter<T>::Print(value, ostream_ptr);
+//
+// It uses the << operator when possible, and prints the bytes in the
+// object otherwise.  A user can override its behavior for a class
+// type Foo by defining either operator<<(::std::ostream&, const Foo&)
+// or void PrintTo(const Foo&, ::std::ostream*) in the namespace that
+// defines Foo.
+
+#include <ctype.h>
+#include <stdio.h>
+#include <ostream>  // NOLINT
+#include <string>
+
+namespace testing {
+
+namespace {
+
+using ::std::ostream;
+
+#if GTEST_OS_WINDOWS_MOBILE  // Windows CE does not define _snprintf_s.
+# define snprintf _snprintf
+#elif _MSC_VER >= 1400  // VC 8.0 and later deprecate snprintf and _snprintf.
+# define snprintf _snprintf_s
+#elif _MSC_VER
+# define snprintf _snprintf
+#endif  // GTEST_OS_WINDOWS_MOBILE
+
+// Prints a segment of bytes in the given object.
+void PrintByteSegmentInObjectTo(const unsigned char* obj_bytes, size_t start,
+                                size_t count, ostream* os) {
+  char text[5] = "";
+  for (size_t i = 0; i != count; i++) {
+    const size_t j = start + i;
+    if (i != 0) {
+      // Organizes the bytes into groups of 2 for easy parsing by
+      // human.
+      if ((j % 2) == 0)
+        *os << ' ';
+      else
+        *os << '-';
+    }
+    snprintf(text, sizeof(text), "%02X", obj_bytes[j]);
+    *os << text;
+  }
+}
+
+// Prints the bytes in the given value to the given ostream.
+void PrintBytesInObjectToImpl(const unsigned char* obj_bytes, size_t count,
+                              ostream* os) {
+  // Tells the user how big the object is.
+  *os << count << "-byte object <";
+
+  const size_t kThreshold = 132;
+  const size_t kChunkSize = 64;
+  // If the object size is bigger than kThreshold, we'll have to omit
+  // some details by printing only the first and the last kChunkSize
+  // bytes.
+  // TODO(wan): let the user control the threshold using a flag.
+  if (count < kThreshold) {
+    PrintByteSegmentInObjectTo(obj_bytes, 0, count, os);
+  } else {
+    PrintByteSegmentInObjectTo(obj_bytes, 0, kChunkSize, os);
+    *os << " ... ";
+    // Rounds up to 2-byte boundary.
+    const size_t resume_pos = (count - kChunkSize + 1)/2*2;
+    PrintByteSegmentInObjectTo(obj_bytes, resume_pos, count - resume_pos, os);
+  }
+  *os << ">";
+}
+
+}  // namespace
+
+namespace internal2 {
+
+// Delegates to PrintBytesInObjectToImpl() to print the bytes in the
+// given object.  The delegation simplifies the implementation, which
+// uses the << operator and thus is easier done outside of the
+// ::testing::internal namespace, which contains a << operator that
+// sometimes conflicts with the one in STL.
+void PrintBytesInObjectTo(const unsigned char* obj_bytes, size_t count,
+                          ostream* os) {
+  PrintBytesInObjectToImpl(obj_bytes, count, os);
+}
+
+}  // namespace internal2
+
+namespace internal {
+
+// Depending on the value of a char (or wchar_t), we print it in one
+// of three formats:
+//   - as is if it's a printable ASCII (e.g. 'a', '2', ' '),
+//   - as a hexidecimal escape sequence (e.g. '\x7F'), or
+//   - as a special escape sequence (e.g. '\r', '\n').
+enum CharFormat {
+  kAsIs,
+  kHexEscape,
+  kSpecialEscape
+};
+
+// Returns true if c is a printable ASCII character.  We test the
+// value of c directly instead of calling isprint(), which is buggy on
+// Windows Mobile.
+inline bool IsPrintableAscii(wchar_t c) {
+  return 0x20 <= c && c <= 0x7E;
+}
+
+// Prints a wide or narrow char c as a character literal without the
+// quotes, escaping it when necessary; returns how c was formatted.
+// The template argument UnsignedChar is the unsigned version of Char,
+// which is the type of c.
+template <typename UnsignedChar, typename Char>
+static CharFormat PrintAsCharLiteralTo(Char c, ostream* os) {
+  switch (static_cast<wchar_t>(c)) {
+    case L'\0':
+      *os << "\\0";
+      break;
+    case L'\'':
+      *os << "\\'";
+      break;
+    case L'\\':
+      *os << "\\\\";
+      break;
+    case L'\a':
+      *os << "\\a";
+      break;
+    case L'\b':
+      *os << "\\b";
+      break;
+    case L'\f':
+      *os << "\\f";
+      break;
+    case L'\n':
+      *os << "\\n";
+      break;
+    case L'\r':
+      *os << "\\r";
+      break;
+    case L'\t':
+      *os << "\\t";
+      break;
+    case L'\v':
+      *os << "\\v";
+      break;
+    default:
+      if (IsPrintableAscii(c)) {
+        *os << static_cast<char>(c);
+        return kAsIs;
+      } else {
+        *os << String::Format("\\x%X", static_cast<UnsignedChar>(c));
+        return kHexEscape;
+      }
+  }
+  return kSpecialEscape;
+}
+
+// Prints a char c as if it's part of a string literal, escaping it when
+// necessary; returns how c was formatted.
+static CharFormat PrintAsWideStringLiteralTo(wchar_t c, ostream* os) {
+  switch (c) {
+    case L'\'':
+      *os << "'";
+      return kAsIs;
+    case L'"':
+      *os << "\\\"";
+      return kSpecialEscape;
+    default:
+      return PrintAsCharLiteralTo<wchar_t>(c, os);
+  }
+}
+
+// Prints a char c as if it's part of a string literal, escaping it when
+// necessary; returns how c was formatted.
+static CharFormat PrintAsNarrowStringLiteralTo(char c, ostream* os) {
+  return PrintAsWideStringLiteralTo(static_cast<unsigned char>(c), os);
+}
+
+// Prints a wide or narrow character c and its code.  '\0' is printed
+// as "'\\0'", other unprintable characters are also properly escaped
+// using the standard C++ escape sequence.  The template argument
+// UnsignedChar is the unsigned version of Char, which is the type of c.
+template <typename UnsignedChar, typename Char>
+void PrintCharAndCodeTo(Char c, ostream* os) {
+  // First, print c as a literal in the most readable form we can find.
+  *os << ((sizeof(c) > 1) ? "L'" : "'");
+  const CharFormat format = PrintAsCharLiteralTo<UnsignedChar>(c, os);
+  *os << "'";
+
+  // To aid user debugging, we also print c's code in decimal, unless
+  // it's 0 (in which case c was printed as '\\0', making the code
+  // obvious).
+  if (c == 0)
+    return;
+  *os << " (" << String::Format("%d", c).c_str();
+
+  // For more convenience, we print c's code again in hexidecimal,
+  // unless c was already printed in the form '\x##' or the code is in
+  // [1, 9].
+  if (format == kHexEscape || (1 <= c && c <= 9)) {
+    // Do nothing.
+  } else {
+    *os << String::Format(", 0x%X",
+                          static_cast<UnsignedChar>(c)).c_str();
+  }
+  *os << ")";
+}
+
+void PrintTo(unsigned char c, ::std::ostream* os) {
+  PrintCharAndCodeTo<unsigned char>(c, os);
+}
+void PrintTo(signed char c, ::std::ostream* os) {
+  PrintCharAndCodeTo<unsigned char>(c, os);
+}
+
+// Prints a wchar_t as a symbol if it is printable or as its internal
+// code otherwise and also as its code.  L'\0' is printed as "L'\\0'".
+void PrintTo(wchar_t wc, ostream* os) {
+  PrintCharAndCodeTo<wchar_t>(wc, os);
+}
+
+// Prints the given array of characters to the ostream.
+// The array starts at *begin, the length is len, it may include '\0' characters
+// and may not be null-terminated.
+static void PrintCharsAsStringTo(const char* begin, size_t len, ostream* os) {
+  *os << "\"";
+  bool is_previous_hex = false;
+  for (size_t index = 0; index < len; ++index) {
+    const char cur = begin[index];
+    if (is_previous_hex && IsXDigit(cur)) {
+      // Previous character is of '\x..' form and this character can be
+      // interpreted as another hexadecimal digit in its number. Break string to
+      // disambiguate.
+      *os << "\" \"";
+    }
+    is_previous_hex = PrintAsNarrowStringLiteralTo(cur, os) == kHexEscape;
+  }
+  *os << "\"";
+}
+
+// Prints a (const) char array of 'len' elements, starting at address 'begin'.
+void UniversalPrintArray(const char* begin, size_t len, ostream* os) {
+  PrintCharsAsStringTo(begin, len, os);
+}
+
+// Prints the given array of wide characters to the ostream.
+// The array starts at *begin, the length is len, it may include L'\0'
+// characters and may not be null-terminated.
+static void PrintWideCharsAsStringTo(const wchar_t* begin, size_t len,
+                                     ostream* os) {
+  *os << "L\"";
+  bool is_previous_hex = false;
+  for (size_t index = 0; index < len; ++index) {
+    const wchar_t cur = begin[index];
+    if (is_previous_hex && isascii(cur) && IsXDigit(static_cast<char>(cur))) {
+      // Previous character is of '\x..' form and this character can be
+      // interpreted as another hexadecimal digit in its number. Break string to
+      // disambiguate.
+      *os << "\" L\"";
+    }
+    is_previous_hex = PrintAsWideStringLiteralTo(cur, os) == kHexEscape;
+  }
+  *os << "\"";
+}
+
+// Prints the given C string to the ostream.
+void PrintTo(const char* s, ostream* os) {
+  if (s == NULL) {
+    *os << "NULL";
+  } else {
+    *os << ImplicitCast_<const void*>(s) << " pointing to ";
+    PrintCharsAsStringTo(s, strlen(s), os);
+  }
+}
+
+// MSVC compiler can be configured to define whar_t as a typedef
+// of unsigned short. Defining an overload for const wchar_t* in that case
+// would cause pointers to unsigned shorts be printed as wide strings,
+// possibly accessing more memory than intended and causing invalid
+// memory accesses. MSVC defines _NATIVE_WCHAR_T_DEFINED symbol when
+// wchar_t is implemented as a native type.
+#if !defined(_MSC_VER) || defined(_NATIVE_WCHAR_T_DEFINED)
+// Prints the given wide C string to the ostream.
+void PrintTo(const wchar_t* s, ostream* os) {
+  if (s == NULL) {
+    *os << "NULL";
+  } else {
+    *os << ImplicitCast_<const void*>(s) << " pointing to ";
+    PrintWideCharsAsStringTo(s, wcslen(s), os);
+  }
+}
+#endif  // wchar_t is native
+
+// Prints a ::string object.
+#if GTEST_HAS_GLOBAL_STRING
+void PrintStringTo(const ::string& s, ostream* os) {
+  PrintCharsAsStringTo(s.data(), s.size(), os);
+}
+#endif  // GTEST_HAS_GLOBAL_STRING
+
+void PrintStringTo(const ::std::string& s, ostream* os) {
+  PrintCharsAsStringTo(s.data(), s.size(), os);
+}
+
+// Prints a ::wstring object.
+#if GTEST_HAS_GLOBAL_WSTRING
+void PrintWideStringTo(const ::wstring& s, ostream* os) {
+  PrintWideCharsAsStringTo(s.data(), s.size(), os);
+}
+#endif  // GTEST_HAS_GLOBAL_WSTRING
+
+#if GTEST_HAS_STD_WSTRING
+void PrintWideStringTo(const ::std::wstring& s, ostream* os) {
+  PrintWideCharsAsStringTo(s.data(), s.size(), os);
+}
+#endif  // GTEST_HAS_STD_WSTRING
+
+}  // namespace internal
+
+}  // namespace testing
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: mheule at google.com (Markus Heule)
+//
+// The Google C++ Testing Framework (Google Test)
+
+
+// Indicates that this translation unit is part of Google Test's
+// implementation.  It must come before gtest-internal-inl.h is
+// included, or there will be a compiler error.  This trick is to
+// prevent a user from accidentally including gtest-internal-inl.h in
+// his code.
+#define GTEST_IMPLEMENTATION_ 1
+#undef GTEST_IMPLEMENTATION_
+
+namespace testing {
+
+using internal::GetUnitTestImpl;
+
+// Gets the summary of the failure message by omitting the stack trace
+// in it.
+internal::String TestPartResult::ExtractSummary(const char* message) {
+  const char* const stack_trace = strstr(message, internal::kStackTraceMarker);
+  return stack_trace == NULL ? internal::String(message) :
+      internal::String(message, stack_trace - message);
+}
+
+// Prints a TestPartResult object.
+std::ostream& operator<<(std::ostream& os, const TestPartResult& result) {
+  return os
+      << result.file_name() << ":" << result.line_number() << ": "
+      << (result.type() == TestPartResult::kSuccess ? "Success" :
+          result.type() == TestPartResult::kFatalFailure ? "Fatal failure" :
+          "Non-fatal failure") << ":\n"
+      << result.message() << std::endl;
+}
+
+// Appends a TestPartResult to the array.
+void TestPartResultArray::Append(const TestPartResult& result) {
+  array_.push_back(result);
+}
+
+// Returns the TestPartResult at the given index (0-based).
+const TestPartResult& TestPartResultArray::GetTestPartResult(int index) const {
+  if (index < 0 || index >= size()) {
+    printf("\nInvalid index (%d) into TestPartResultArray.\n", index);
+    internal::posix::Abort();
+  }
+
+  return array_[index];
+}
+
+// Returns the number of TestPartResult objects in the array.
+int TestPartResultArray::size() const {
+  return static_cast<int>(array_.size());
+}
+
+namespace internal {
+
+HasNewFatalFailureHelper::HasNewFatalFailureHelper()
+    : has_new_fatal_failure_(false),
+      original_reporter_(GetUnitTestImpl()->
+                         GetTestPartResultReporterForCurrentThread()) {
+  GetUnitTestImpl()->SetTestPartResultReporterForCurrentThread(this);
+}
+
+HasNewFatalFailureHelper::~HasNewFatalFailureHelper() {
+  GetUnitTestImpl()->SetTestPartResultReporterForCurrentThread(
+      original_reporter_);
+}
+
+void HasNewFatalFailureHelper::ReportTestPartResult(
+    const TestPartResult& result) {
+  if (result.fatally_failed())
+    has_new_fatal_failure_ = true;
+  original_reporter_->ReportTestPartResult(result);
+}
+
+}  // namespace internal
+
+}  // namespace testing
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+
+
+namespace testing {
+namespace internal {
+
+#if GTEST_HAS_TYPED_TEST_P
+
+// Skips to the first non-space char in str. Returns an empty string if str
+// contains only whitespace characters.
+static const char* SkipSpaces(const char* str) {
+  while (IsSpace(*str))
+    str++;
+  return str;
+}
+
+// Verifies that registered_tests match the test names in
+// defined_test_names_; returns registered_tests if successful, or
+// aborts the program otherwise.
+const char* TypedTestCasePState::VerifyRegisteredTestNames(
+    const char* file, int line, const char* registered_tests) {
+  typedef ::std::set<const char*>::const_iterator DefinedTestIter;
+  registered_ = true;
+
+  // Skip initial whitespace in registered_tests since some
+  // preprocessors prefix stringizied literals with whitespace.
+  registered_tests = SkipSpaces(registered_tests);
+
+  Message errors;
+  ::std::set<String> tests;
+  for (const char* names = registered_tests; names != NULL;
+       names = SkipComma(names)) {
+    const String name = GetPrefixUntilComma(names);
+    if (tests.count(name) != 0) {
+      errors << "Test " << name << " is listed more than once.\n";
+      continue;
+    }
+
+    bool found = false;
+    for (DefinedTestIter it = defined_test_names_.begin();
+         it != defined_test_names_.end();
+         ++it) {
+      if (name == *it) {
+        found = true;
+        break;
+      }
+    }
+
+    if (found) {
+      tests.insert(name);
+    } else {
+      errors << "No test named " << name
+             << " can be found in this test case.\n";
+    }
+  }
+
+  for (DefinedTestIter it = defined_test_names_.begin();
+       it != defined_test_names_.end();
+       ++it) {
+    if (tests.count(*it) == 0) {
+      errors << "You forgot to list test " << *it << ".\n";
+    }
+  }
+
+  const String& errors_str = errors.GetString();
+  if (errors_str != "") {
+    fprintf(stderr, "%s %s", FormatFileLocation(file, line).c_str(),
+            errors_str.c_str());
+    fflush(stderr);
+    posix::Abort();
+  }
+
+  return registered_tests;
+}
+
+#endif  // GTEST_HAS_TYPED_TEST_P
+
+}  // namespace internal
+}  // namespace testing
diff --git a/src/gtest/gtest.h b/src/gtest/gtest.h
new file mode 100644
index 0000000..3143bd6
--- /dev/null
+++ b/src/gtest/gtest.h
@@ -0,0 +1,19537 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This header file defines the public API for Google Test.  It should be
+// included by any test program that uses Google Test.
+//
+// IMPORTANT NOTE: Due to limitation of the C++ language, we have to
+// leave some internal implementation details in this header file.
+// They are clearly marked by comments like this:
+//
+//   // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+//
+// Such code is NOT meant to be used by a user directly, and is subject
+// to CHANGE WITHOUT NOTICE.  Therefore DO NOT DEPEND ON IT in a user
+// program!
+//
+// Acknowledgment: Google Test borrowed the idea of automatic test
+// registration from Barthelemy Dagenais' (barthelemy at prologique.com)
+// easyUnit framework.
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
+#define GTEST_INCLUDE_GTEST_GTEST_H_
+
+#include <limits>
+#include <vector>
+
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: wan at google.com (Zhanyong Wan), eefacm at gmail.com (Sean Mcafee)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This header file declares functions and macros used internally by
+// Google Test.  They are subject to change without notice.
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_INTERNAL_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_INTERNAL_H_
+
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: wan at google.com (Zhanyong Wan)
+//
+// Low-level types and utilities for porting Google Test to various
+// platforms.  They are subject to change without notice.  DO NOT USE
+// THEM IN USER CODE.
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_H_
+
+// The user can define the following macros in the build script to
+// control Google Test's behavior.  If the user doesn't define a macro
+// in this list, Google Test will define it.
+//
+//   GTEST_HAS_CLONE          - Define it to 1/0 to indicate that clone(2)
+//                              is/isn't available.
+//   GTEST_HAS_EXCEPTIONS     - Define it to 1/0 to indicate that exceptions
+//                              are enabled.
+//   GTEST_HAS_GLOBAL_STRING  - Define it to 1/0 to indicate that ::string
+//                              is/isn't available (some systems define
+//                              ::string, which is different to std::string).
+//   GTEST_HAS_GLOBAL_WSTRING - Define it to 1/0 to indicate that ::string
+//                              is/isn't available (some systems define
+//                              ::wstring, which is different to std::wstring).
+//   GTEST_HAS_POSIX_RE       - Define it to 1/0 to indicate that POSIX regular
+//                              expressions are/aren't available.
+//   GTEST_HAS_PTHREAD        - Define it to 1/0 to indicate that <pthread.h>
+//                              is/isn't available.
+//   GTEST_HAS_RTTI           - Define it to 1/0 to indicate that RTTI is/isn't
+//                              enabled.
+//   GTEST_HAS_STD_WSTRING    - Define it to 1/0 to indicate that
+//                              std::wstring does/doesn't work (Google Test can
+//                              be used where std::wstring is unavailable).
+//   GTEST_HAS_TR1_TUPLE      - Define it to 1/0 to indicate tr1::tuple
+//                              is/isn't available.
+//   GTEST_HAS_SEH            - Define it to 1/0 to indicate whether the
+//                              compiler supports Microsoft's "Structured
+//                              Exception Handling".
+//   GTEST_HAS_STREAM_REDIRECTION
+//                            - Define it to 1/0 to indicate whether the
+//                              platform supports I/O stream redirection using
+//                              dup() and dup2().
+//   GTEST_USE_OWN_TR1_TUPLE  - Define it to 1/0 to indicate whether Google
+//                              Test's own tr1 tuple implementation should be
+//                              used.  Unused when the user sets
+//                              GTEST_HAS_TR1_TUPLE to 0.
+//   GTEST_LINKED_AS_SHARED_LIBRARY
+//                            - Define to 1 when compiling tests that use
+//                              Google Test as a shared library (known as
+//                              DLL on Windows).
+//   GTEST_CREATE_SHARED_LIBRARY
+//                            - Define to 1 when compiling Google Test itself
+//                              as a shared library.
+
+// This header defines the following utilities:
+//
+// Macros indicating the current platform (defined to 1 if compiled on
+// the given platform; otherwise undefined):
+//   GTEST_OS_AIX      - IBM AIX
+//   GTEST_OS_CYGWIN   - Cygwin
+//   GTEST_OS_HPUX     - HP-UX
+//   GTEST_OS_LINUX    - Linux
+//     GTEST_OS_LINUX_ANDROID - Google Android
+//   GTEST_OS_MAC      - Mac OS X
+//   GTEST_OS_NACL     - Google Native Client (NaCl)
+//   GTEST_OS_SOLARIS  - Sun Solaris
+//   GTEST_OS_SYMBIAN  - Symbian
+//   GTEST_OS_WINDOWS  - Windows (Desktop, MinGW, or Mobile)
+//     GTEST_OS_WINDOWS_DESKTOP  - Windows Desktop
+//     GTEST_OS_WINDOWS_MINGW    - MinGW
+//     GTEST_OS_WINDOWS_MOBILE   - Windows Mobile
+//   GTEST_OS_ZOS      - z/OS
+//
+// Among the platforms, Cygwin, Linux, Max OS X, and Windows have the
+// most stable support.  Since core members of the Google Test project
+// don't have access to other platforms, support for them may be less
+// stable.  If you notice any problems on your platform, please notify
+// googletestframework at googlegroups.com (patches for fixing them are
+// even more welcome!).
+//
+// Note that it is possible that none of the GTEST_OS_* macros are defined.
+//
+// Macros indicating available Google Test features (defined to 1 if
+// the corresponding feature is supported; otherwise undefined):
+//   GTEST_HAS_COMBINE      - the Combine() function (for value-parameterized
+//                            tests)
+//   GTEST_HAS_DEATH_TEST   - death tests
+//   GTEST_HAS_PARAM_TEST   - value-parameterized tests
+//   GTEST_HAS_TYPED_TEST   - typed tests
+//   GTEST_HAS_TYPED_TEST_P - type-parameterized tests
+//   GTEST_USES_POSIX_RE    - enhanced POSIX regex is used. Do not confuse with
+//                            GTEST_HAS_POSIX_RE (see above) which users can
+//                            define themselves.
+//   GTEST_USES_SIMPLE_RE   - our own simple regex is used;
+//                            the above two are mutually exclusive.
+//   GTEST_CAN_COMPARE_NULL - accepts untyped NULL in EXPECT_EQ().
+//
+// Macros for basic C++ coding:
+//   GTEST_AMBIGUOUS_ELSE_BLOCKER_ - for disabling a gcc warning.
+//   GTEST_ATTRIBUTE_UNUSED_  - declares that a class' instances or a
+//                              variable don't have to be used.
+//   GTEST_DISALLOW_ASSIGN_   - disables operator=.
+//   GTEST_DISALLOW_COPY_AND_ASSIGN_ - disables copy ctor and operator=.
+//   GTEST_MUST_USE_RESULT_   - declares that a function's result must be used.
+//
+// Synchronization:
+//   Mutex, MutexLock, ThreadLocal, GetThreadCount()
+//                  - synchronization primitives.
+//   GTEST_IS_THREADSAFE - defined to 1 to indicate that the above
+//                         synchronization primitives have real implementations
+//                         and Google Test is thread-safe; or 0 otherwise.
+//
+// Template meta programming:
+//   is_pointer     - as in TR1; needed on Symbian and IBM XL C/C++ only.
+//   IteratorTraits - partial implementation of std::iterator_traits, which
+//                    is not available in libCstd when compiled with Sun C++.
+//
+// Smart pointers:
+//   scoped_ptr     - as in TR2.
+//
+// Regular expressions:
+//   RE             - a simple regular expression class using the POSIX
+//                    Extended Regular Expression syntax on UNIX-like
+//                    platforms, or a reduced regular exception syntax on
+//                    other platforms, including Windows.
+//
+// Logging:
+//   GTEST_LOG_()   - logs messages at the specified severity level.
+//   LogToStderr()  - directs all log messages to stderr.
+//   FlushInfoLog() - flushes informational log messages.
+//
+// Stdout and stderr capturing:
+//   CaptureStdout()     - starts capturing stdout.
+//   GetCapturedStdout() - stops capturing stdout and returns the captured
+//                         string.
+//   CaptureStderr()     - starts capturing stderr.
+//   GetCapturedStderr() - stops capturing stderr and returns the captured
+//                         string.
+//
+// Integer types:
+//   TypeWithSize   - maps an integer to a int type.
+//   Int32, UInt32, Int64, UInt64, TimeInMillis
+//                  - integers of known sizes.
+//   BiggestInt     - the biggest signed integer type.
+//
+// Command-line utilities:
+//   GTEST_FLAG()       - references a flag.
+//   GTEST_DECLARE_*()  - declares a flag.
+//   GTEST_DEFINE_*()   - defines a flag.
+//   GetArgvs()         - returns the command line as a vector of strings.
+//
+// Environment variable utilities:
+//   GetEnv()             - gets the value of an environment variable.
+//   BoolFromGTestEnv()   - parses a bool environment variable.
+//   Int32FromGTestEnv()  - parses an Int32 environment variable.
+//   StringFromGTestEnv() - parses a string environment variable.
+
+#include <ctype.h>   // for isspace, etc
+#include <stddef.h>  // for ptrdiff_t
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#ifndef _WIN32_WCE
+# include <sys/types.h>
+# include <sys/stat.h>
+#endif  // !_WIN32_WCE
+
+#include <iostream>  // NOLINT
+#include <sstream>  // NOLINT
+#include <string>  // NOLINT
+
+#define GTEST_DEV_EMAIL_ "googletestframework@@googlegroups.com"
+#define GTEST_FLAG_PREFIX_ "gtest_"
+#define GTEST_FLAG_PREFIX_DASH_ "gtest-"
+#define GTEST_FLAG_PREFIX_UPPER_ "GTEST_"
+#define GTEST_NAME_ "Google Test"
+#define GTEST_PROJECT_URL_ "http://code.google.com/p/googletest/"
+
+// Determines the version of gcc that is used to compile this.
+#ifdef __GNUC__
+// 40302 means version 4.3.2.
+# define GTEST_GCC_VER_ \
+    (__GNUC__*10000 + __GNUC_MINOR__*100 + __GNUC_PATCHLEVEL__)
+#endif  // __GNUC__
+
+// Determines the platform on which Google Test is compiled.
+#ifdef __CYGWIN__
+# define GTEST_OS_CYGWIN 1
+#elif defined __SYMBIAN32__
+# define GTEST_OS_SYMBIAN 1
+#elif defined _WIN32
+# define GTEST_OS_WINDOWS 1
+# ifdef _WIN32_WCE
+#  define GTEST_OS_WINDOWS_MOBILE 1
+# elif defined(__MINGW__) || defined(__MINGW32__)
+#  define GTEST_OS_WINDOWS_MINGW 1
+# else
+#  define GTEST_OS_WINDOWS_DESKTOP 1
+# endif  // _WIN32_WCE
+#elif defined __APPLE__
+# define GTEST_OS_MAC 1
+#elif defined __linux__
+# define GTEST_OS_LINUX 1
+# ifdef ANDROID
+#  define GTEST_OS_LINUX_ANDROID 1
+# endif  // ANDROID
+#elif defined __MVS__
+# define GTEST_OS_ZOS 1
+#elif defined(__sun) && defined(__SVR4)
+# define GTEST_OS_SOLARIS 1
+#elif defined(_AIX)
+# define GTEST_OS_AIX 1
+#elif defined(__hpux)
+# define GTEST_OS_HPUX 1
+#elif defined __native_client__
+# define GTEST_OS_NACL 1
+#endif  // __CYGWIN__
+
+// Brings in definitions for functions used in the testing::internal::posix
+// namespace (read, write, close, chdir, isatty, stat). We do not currently
+// use them on Windows Mobile.
+#if !GTEST_OS_WINDOWS
+// This assumes that non-Windows OSes provide unistd.h. For OSes where this
+// is not the case, we need to include headers that provide the functions
+// mentioned above.
+# include <unistd.h>
+# if !GTEST_OS_NACL
+// TODO(vladl at google.com): Remove this condition when Native Client SDK adds
+// strings.h (tracked in
+// http://code.google.com/p/nativeclient/issues/detail?id=1175).
+#  include <strings.h>  // Native Client doesn't provide strings.h.
+# endif
+#elif !GTEST_OS_WINDOWS_MOBILE
+# include <direct.h>
+# include <io.h>
+#endif
+
+// Defines this to true iff Google Test can use POSIX regular expressions.
+#ifndef GTEST_HAS_POSIX_RE
+# define GTEST_HAS_POSIX_RE (!GTEST_OS_WINDOWS)
+#endif
+
+#if GTEST_HAS_POSIX_RE
+
+// On some platforms, <regex.h> needs someone to define size_t, and
+// won't compile otherwise.  We can #include it here as we already
+// included <stdlib.h>, which is guaranteed to define size_t through
+// <stddef.h>.
+# include <regex.h>  // NOLINT
+
+# define GTEST_USES_POSIX_RE 1
+
+#elif GTEST_OS_WINDOWS
+
+// <regex.h> is not available on Windows.  Use our own simple regex
+// implementation instead.
+# define GTEST_USES_SIMPLE_RE 1
+
+#else
+
+// <regex.h> may not be available on this platform.  Use our own
+// simple regex implementation instead.
+# define GTEST_USES_SIMPLE_RE 1
+
+#endif  // GTEST_HAS_POSIX_RE
+
+#ifndef GTEST_HAS_EXCEPTIONS
+// The user didn't tell us whether exceptions are enabled, so we need
+// to figure it out.
+# if defined(_MSC_VER) || defined(__BORLANDC__)
+// MSVC's and C++Builder's implementations of the STL use the _HAS_EXCEPTIONS
+// macro to enable exceptions, so we'll do the same.
+// Assumes that exceptions are enabled by default.
+#  ifndef _HAS_EXCEPTIONS
+#   define _HAS_EXCEPTIONS 1
+#  endif  // _HAS_EXCEPTIONS
+#  define GTEST_HAS_EXCEPTIONS _HAS_EXCEPTIONS
+# elif defined(__GNUC__) && __EXCEPTIONS
+// gcc defines __EXCEPTIONS to 1 iff exceptions are enabled.
+#  define GTEST_HAS_EXCEPTIONS 1
+# elif defined(__SUNPRO_CC)
+// Sun Pro CC supports exceptions.  However, there is no compile-time way of
+// detecting whether they are enabled or not.  Therefore, we assume that
+// they are enabled unless the user tells us otherwise.
+#  define GTEST_HAS_EXCEPTIONS 1
+# elif defined(__IBMCPP__) && __EXCEPTIONS
+// xlC defines __EXCEPTIONS to 1 iff exceptions are enabled.
+#  define GTEST_HAS_EXCEPTIONS 1
+# elif defined(__HP_aCC)
+// Exception handling is in effect by default in HP aCC compiler. It has to
+// be turned of by +noeh compiler option if desired.
+#  define GTEST_HAS_EXCEPTIONS 1
+# else
+// For other compilers, we assume exceptions are disabled to be
+// conservative.
+#  define GTEST_HAS_EXCEPTIONS 0
+# endif  // defined(_MSC_VER) || defined(__BORLANDC__)
+#endif  // GTEST_HAS_EXCEPTIONS
+
+#if !defined(GTEST_HAS_STD_STRING)
+// Even though we don't use this macro any longer, we keep it in case
+// some clients still depend on it.
+# define GTEST_HAS_STD_STRING 1
+#elif !GTEST_HAS_STD_STRING
+// The user told us that ::std::string isn't available.
+# error "Google Test cannot be used where ::std::string isn't available."
+#endif  // !defined(GTEST_HAS_STD_STRING)
+
+#ifndef GTEST_HAS_GLOBAL_STRING
+// The user didn't tell us whether ::string is available, so we need
+// to figure it out.
+
+# define GTEST_HAS_GLOBAL_STRING 0
+
+#endif  // GTEST_HAS_GLOBAL_STRING
+
+#ifndef GTEST_HAS_STD_WSTRING
+// The user didn't tell us whether ::std::wstring is available, so we need
+// to figure it out.
+// TODO(wan at google.com): uses autoconf to detect whether ::std::wstring
+//   is available.
+
+// Cygwin 1.7 and below doesn't support ::std::wstring.
+// Solaris' libc++ doesn't support it either.  Android has
+// no support for it at least as recent as Froyo (2.2).
+# define GTEST_HAS_STD_WSTRING \
+    (!(GTEST_OS_LINUX_ANDROID || GTEST_OS_CYGWIN || GTEST_OS_SOLARIS))
+
+#endif  // GTEST_HAS_STD_WSTRING
+
+#ifndef GTEST_HAS_GLOBAL_WSTRING
+// The user didn't tell us whether ::wstring is available, so we need
+// to figure it out.
+# define GTEST_HAS_GLOBAL_WSTRING \
+    (GTEST_HAS_STD_WSTRING && GTEST_HAS_GLOBAL_STRING)
+#endif  // GTEST_HAS_GLOBAL_WSTRING
+
+// Determines whether RTTI is available.
+#ifndef GTEST_HAS_RTTI
+// The user didn't tell us whether RTTI is enabled, so we need to
+// figure it out.
+
+# ifdef _MSC_VER
+
+#  ifdef _CPPRTTI  // MSVC defines this macro iff RTTI is enabled.
+#   define GTEST_HAS_RTTI 1
+#  else
+#   define GTEST_HAS_RTTI 0
+#  endif
+
+// Starting with version 4.3.2, gcc defines __GXX_RTTI iff RTTI is enabled.
+# elif defined(__GNUC__) && (GTEST_GCC_VER_ >= 40302)
+
+#  ifdef __GXX_RTTI
+#   define GTEST_HAS_RTTI 1
+#  else
+#   define GTEST_HAS_RTTI 0
+#  endif  // __GXX_RTTI
+
+// Starting with version 9.0 IBM Visual Age defines __RTTI_ALL__ to 1 if
+// both the typeid and dynamic_cast features are present.
+# elif defined(__IBMCPP__) && (__IBMCPP__ >= 900)
+
+#  ifdef __RTTI_ALL__
+#   define GTEST_HAS_RTTI 1
+#  else
+#   define GTEST_HAS_RTTI 0
+#  endif
+
+# else
+
+// For all other compilers, we assume RTTI is enabled.
+#  define GTEST_HAS_RTTI 1
+
+# endif  // _MSC_VER
+
+#endif  // GTEST_HAS_RTTI
+
+// It's this header's responsibility to #include <typeinfo> when RTTI
+// is enabled.
+#if GTEST_HAS_RTTI
+# include <typeinfo>
+#endif
+
+// Determines whether Google Test can use the pthreads library.
+#ifndef GTEST_HAS_PTHREAD
+// The user didn't tell us explicitly, so we assume pthreads support is
+// available on Linux and Mac.
+//
+// To disable threading support in Google Test, add -DGTEST_HAS_PTHREAD=0
+// to your compiler flags.
+# define GTEST_HAS_PTHREAD (GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_HPUX)
+#endif  // GTEST_HAS_PTHREAD
+
+#if GTEST_HAS_PTHREAD
+// gtest-port.h guarantees to #include <pthread.h> when GTEST_HAS_PTHREAD is
+// true.
+# include <pthread.h>  // NOLINT
+
+// For timespec and nanosleep, used below.
+# include <time.h>  // NOLINT
+#endif
+
+// Determines whether Google Test can use tr1/tuple.  You can define
+// this macro to 0 to prevent Google Test from using tuple (any
+// feature depending on tuple with be disabled in this mode).
+#ifndef GTEST_HAS_TR1_TUPLE
+// The user didn't tell us not to do it, so we assume it's OK.
+# define GTEST_HAS_TR1_TUPLE 1
+#endif  // GTEST_HAS_TR1_TUPLE
+
+// Determines whether Google Test's own tr1 tuple implementation
+// should be used.
+#ifndef GTEST_USE_OWN_TR1_TUPLE
+// The user didn't tell us, so we need to figure it out.
+
+// We use our own TR1 tuple if we aren't sure the user has an
+// implementation of it already.  At this time, GCC 4.0.0+ and MSVC
+// 2010 are the only mainstream compilers that come with a TR1 tuple
+// implementation.  NVIDIA's CUDA NVCC compiler pretends to be GCC by
+// defining __GNUC__ and friends, but cannot compile GCC's tuple
+// implementation.  MSVC 2008 (9.0) provides TR1 tuple in a 323 MB
+// Feature Pack download, which we cannot assume the user has.
+# if (defined(__GNUC__) && !defined(__CUDACC__) && (GTEST_GCC_VER_ >= 40000)) \
+    || _MSC_VER >= 1600
+#  define GTEST_USE_OWN_TR1_TUPLE 0
+# else
+#  define GTEST_USE_OWN_TR1_TUPLE 1
+# endif
+
+#endif  // GTEST_USE_OWN_TR1_TUPLE
+
+// To avoid conditional compilation everywhere, we make it
+// gtest-port.h's responsibility to #include the header implementing
+// tr1/tuple.
+#if GTEST_HAS_TR1_TUPLE
+
+# if GTEST_USE_OWN_TR1_TUPLE
+// This file was GENERATED by a script.  DO NOT EDIT BY HAND!!!
+
+// Copyright 2009 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+
+// Implements a subset of TR1 tuple needed by Google Test and Google Mock.
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TUPLE_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TUPLE_H_
+
+#include <utility>  // For ::std::pair.
+
+// The compiler used in Symbian has a bug that prevents us from declaring the
+// tuple template as a friend (it complains that tuple is redefined).  This
+// hack bypasses the bug by declaring the members that should otherwise be
+// private as public.
+// Sun Studio versions < 12 also have the above bug.
+#if defined(__SYMBIAN32__) || (defined(__SUNPRO_CC) && __SUNPRO_CC < 0x590)
+# define GTEST_DECLARE_TUPLE_AS_FRIEND_ public:
+#else
+# define GTEST_DECLARE_TUPLE_AS_FRIEND_ \
+    template <GTEST_10_TYPENAMES_(U)> friend class tuple; \
+   private:
+#endif
+
+// GTEST_n_TUPLE_(T) is the type of an n-tuple.
+#define GTEST_0_TUPLE_(T) tuple<>
+#define GTEST_1_TUPLE_(T) tuple<T##0, void, void, void, void, void, void, \
+    void, void, void>
+#define GTEST_2_TUPLE_(T) tuple<T##0, T##1, void, void, void, void, void, \
+    void, void, void>
+#define GTEST_3_TUPLE_(T) tuple<T##0, T##1, T##2, void, void, void, void, \
+    void, void, void>
+#define GTEST_4_TUPLE_(T) tuple<T##0, T##1, T##2, T##3, void, void, void, \
+    void, void, void>
+#define GTEST_5_TUPLE_(T) tuple<T##0, T##1, T##2, T##3, T##4, void, void, \
+    void, void, void>
+#define GTEST_6_TUPLE_(T) tuple<T##0, T##1, T##2, T##3, T##4, T##5, void, \
+    void, void, void>
+#define GTEST_7_TUPLE_(T) tuple<T##0, T##1, T##2, T##3, T##4, T##5, T##6, \
+    void, void, void>
+#define GTEST_8_TUPLE_(T) tuple<T##0, T##1, T##2, T##3, T##4, T##5, T##6, \
+    T##7, void, void>
+#define GTEST_9_TUPLE_(T) tuple<T##0, T##1, T##2, T##3, T##4, T##5, T##6, \
+    T##7, T##8, void>
+#define GTEST_10_TUPLE_(T) tuple<T##0, T##1, T##2, T##3, T##4, T##5, T##6, \
+    T##7, T##8, T##9>
+
+// GTEST_n_TYPENAMES_(T) declares a list of n typenames.
+#define GTEST_0_TYPENAMES_(T)
+#define GTEST_1_TYPENAMES_(T) typename T##0
+#define GTEST_2_TYPENAMES_(T) typename T##0, typename T##1
+#define GTEST_3_TYPENAMES_(T) typename T##0, typename T##1, typename T##2
+#define GTEST_4_TYPENAMES_(T) typename T##0, typename T##1, typename T##2, \
+    typename T##3
+#define GTEST_5_TYPENAMES_(T) typename T##0, typename T##1, typename T##2, \
+    typename T##3, typename T##4
+#define GTEST_6_TYPENAMES_(T) typename T##0, typename T##1, typename T##2, \
+    typename T##3, typename T##4, typename T##5
+#define GTEST_7_TYPENAMES_(T) typename T##0, typename T##1, typename T##2, \
+    typename T##3, typename T##4, typename T##5, typename T##6
+#define GTEST_8_TYPENAMES_(T) typename T##0, typename T##1, typename T##2, \
+    typename T##3, typename T##4, typename T##5, typename T##6, typename T##7
+#define GTEST_9_TYPENAMES_(T) typename T##0, typename T##1, typename T##2, \
+    typename T##3, typename T##4, typename T##5, typename T##6, \
+    typename T##7, typename T##8
+#define GTEST_10_TYPENAMES_(T) typename T##0, typename T##1, typename T##2, \
+    typename T##3, typename T##4, typename T##5, typename T##6, \
+    typename T##7, typename T##8, typename T##9
+
+// In theory, defining stuff in the ::std namespace is undefined
+// behavior.  We can do this as we are playing the role of a standard
+// library vendor.
+namespace std {
+namespace tr1 {
+
+template <typename T0 = void, typename T1 = void, typename T2 = void,
+    typename T3 = void, typename T4 = void, typename T5 = void,
+    typename T6 = void, typename T7 = void, typename T8 = void,
+    typename T9 = void>
+class tuple;
+
+// Anything in namespace gtest_internal is Google Test's INTERNAL
+// IMPLEMENTATION DETAIL and MUST NOT BE USED DIRECTLY in user code.
+namespace gtest_internal {
+
+// ByRef<T>::type is T if T is a reference; otherwise it's const T&.
+template <typename T>
+struct ByRef { typedef const T& type; };  // NOLINT
+template <typename T>
+struct ByRef<T&> { typedef T& type; };  // NOLINT
+
+// A handy wrapper for ByRef.
+#define GTEST_BY_REF_(T) typename ::std::tr1::gtest_internal::ByRef<T>::type
+
+// AddRef<T>::type is T if T is a reference; otherwise it's T&.  This
+// is the same as tr1::add_reference<T>::type.
+template <typename T>
+struct AddRef { typedef T& type; };  // NOLINT
+template <typename T>
+struct AddRef<T&> { typedef T& type; };  // NOLINT
+
+// A handy wrapper for AddRef.
+#define GTEST_ADD_REF_(T) typename ::std::tr1::gtest_internal::AddRef<T>::type
+
+// A helper for implementing get<k>().
+template <int k> class Get;
+
+// A helper for implementing tuple_element<k, T>.  kIndexValid is true
+// iff k < the number of fields in tuple type T.
+template <bool kIndexValid, int kIndex, class Tuple>
+struct TupleElement;
+
+template <GTEST_10_TYPENAMES_(T)>
+struct TupleElement<true, 0, GTEST_10_TUPLE_(T)> { typedef T0 type; };
+
+template <GTEST_10_TYPENAMES_(T)>
+struct TupleElement<true, 1, GTEST_10_TUPLE_(T)> { typedef T1 type; };
+
+template <GTEST_10_TYPENAMES_(T)>
+struct TupleElement<true, 2, GTEST_10_TUPLE_(T)> { typedef T2 type; };
+
+template <GTEST_10_TYPENAMES_(T)>
+struct TupleElement<true, 3, GTEST_10_TUPLE_(T)> { typedef T3 type; };
+
+template <GTEST_10_TYPENAMES_(T)>
+struct TupleElement<true, 4, GTEST_10_TUPLE_(T)> { typedef T4 type; };
+
+template <GTEST_10_TYPENAMES_(T)>
+struct TupleElement<true, 5, GTEST_10_TUPLE_(T)> { typedef T5 type; };
+
+template <GTEST_10_TYPENAMES_(T)>
+struct TupleElement<true, 6, GTEST_10_TUPLE_(T)> { typedef T6 type; };
+
+template <GTEST_10_TYPENAMES_(T)>
+struct TupleElement<true, 7, GTEST_10_TUPLE_(T)> { typedef T7 type; };
+
+template <GTEST_10_TYPENAMES_(T)>
+struct TupleElement<true, 8, GTEST_10_TUPLE_(T)> { typedef T8 type; };
+
+template <GTEST_10_TYPENAMES_(T)>
+struct TupleElement<true, 9, GTEST_10_TUPLE_(T)> { typedef T9 type; };
+
+}  // namespace gtest_internal
+
+template <>
+class tuple<> {
+ public:
+  tuple() {}
+  tuple(const tuple& /* t */)  {}
+  tuple& operator=(const tuple& /* t */) { return *this; }
+};
+
+template <GTEST_1_TYPENAMES_(T)>
+class GTEST_1_TUPLE_(T) {
+ public:
+  template <int k> friend class gtest_internal::Get;
+
+  tuple() : f0_() {}
+
+  explicit tuple(GTEST_BY_REF_(T0) f0) : f0_(f0) {}
+
+  tuple(const tuple& t) : f0_(t.f0_) {}
+
+  template <GTEST_1_TYPENAMES_(U)>
+  tuple(const GTEST_1_TUPLE_(U)& t) : f0_(t.f0_) {}
+
+  tuple& operator=(const tuple& t) { return CopyFrom(t); }
+
+  template <GTEST_1_TYPENAMES_(U)>
+  tuple& operator=(const GTEST_1_TUPLE_(U)& t) {
+    return CopyFrom(t);
+  }
+
+  GTEST_DECLARE_TUPLE_AS_FRIEND_
+
+  template <GTEST_1_TYPENAMES_(U)>
+  tuple& CopyFrom(const GTEST_1_TUPLE_(U)& t) {
+    f0_ = t.f0_;
+    return *this;
+  }
+
+  T0 f0_;
+};
+
+template <GTEST_2_TYPENAMES_(T)>
+class GTEST_2_TUPLE_(T) {
+ public:
+  template <int k> friend class gtest_internal::Get;
+
+  tuple() : f0_(), f1_() {}
+
+  explicit tuple(GTEST_BY_REF_(T0) f0, GTEST_BY_REF_(T1) f1) : f0_(f0),
+      f1_(f1) {}
+
+  tuple(const tuple& t) : f0_(t.f0_), f1_(t.f1_) {}
+
+  template <GTEST_2_TYPENAMES_(U)>
+  tuple(const GTEST_2_TUPLE_(U)& t) : f0_(t.f0_), f1_(t.f1_) {}
+  template <typename U0, typename U1>
+  tuple(const ::std::pair<U0, U1>& p) : f0_(p.first), f1_(p.second) {}
+
+  tuple& operator=(const tuple& t) { return CopyFrom(t); }
+
+  template <GTEST_2_TYPENAMES_(U)>
+  tuple& operator=(const GTEST_2_TUPLE_(U)& t) {
+    return CopyFrom(t);
+  }
+  template <typename U0, typename U1>
+  tuple& operator=(const ::std::pair<U0, U1>& p) {
+    f0_ = p.first;
+    f1_ = p.second;
+    return *this;
+  }
+
+  GTEST_DECLARE_TUPLE_AS_FRIEND_
+
+  template <GTEST_2_TYPENAMES_(U)>
+  tuple& CopyFrom(const GTEST_2_TUPLE_(U)& t) {
+    f0_ = t.f0_;
+    f1_ = t.f1_;
+    return *this;
+  }
+
+  T0 f0_;
+  T1 f1_;
+};
+
+template <GTEST_3_TYPENAMES_(T)>
+class GTEST_3_TUPLE_(T) {
+ public:
+  template <int k> friend class gtest_internal::Get;
+
+  tuple() : f0_(), f1_(), f2_() {}
+
+  explicit tuple(GTEST_BY_REF_(T0) f0, GTEST_BY_REF_(T1) f1,
+      GTEST_BY_REF_(T2) f2) : f0_(f0), f1_(f1), f2_(f2) {}
+
+  tuple(const tuple& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_) {}
+
+  template <GTEST_3_TYPENAMES_(U)>
+  tuple(const GTEST_3_TUPLE_(U)& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_) {}
+
+  tuple& operator=(const tuple& t) { return CopyFrom(t); }
+
+  template <GTEST_3_TYPENAMES_(U)>
+  tuple& operator=(const GTEST_3_TUPLE_(U)& t) {
+    return CopyFrom(t);
+  }
+
+  GTEST_DECLARE_TUPLE_AS_FRIEND_
+
+  template <GTEST_3_TYPENAMES_(U)>
+  tuple& CopyFrom(const GTEST_3_TUPLE_(U)& t) {
+    f0_ = t.f0_;
+    f1_ = t.f1_;
+    f2_ = t.f2_;
+    return *this;
+  }
+
+  T0 f0_;
+  T1 f1_;
+  T2 f2_;
+};
+
+template <GTEST_4_TYPENAMES_(T)>
+class GTEST_4_TUPLE_(T) {
+ public:
+  template <int k> friend class gtest_internal::Get;
+
+  tuple() : f0_(), f1_(), f2_(), f3_() {}
+
+  explicit tuple(GTEST_BY_REF_(T0) f0, GTEST_BY_REF_(T1) f1,
+      GTEST_BY_REF_(T2) f2, GTEST_BY_REF_(T3) f3) : f0_(f0), f1_(f1), f2_(f2),
+      f3_(f3) {}
+
+  tuple(const tuple& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_), f3_(t.f3_) {}
+
+  template <GTEST_4_TYPENAMES_(U)>
+  tuple(const GTEST_4_TUPLE_(U)& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_),
+      f3_(t.f3_) {}
+
+  tuple& operator=(const tuple& t) { return CopyFrom(t); }
+
+  template <GTEST_4_TYPENAMES_(U)>
+  tuple& operator=(const GTEST_4_TUPLE_(U)& t) {
+    return CopyFrom(t);
+  }
+
+  GTEST_DECLARE_TUPLE_AS_FRIEND_
+
+  template <GTEST_4_TYPENAMES_(U)>
+  tuple& CopyFrom(const GTEST_4_TUPLE_(U)& t) {
+    f0_ = t.f0_;
+    f1_ = t.f1_;
+    f2_ = t.f2_;
+    f3_ = t.f3_;
+    return *this;
+  }
+
+  T0 f0_;
+  T1 f1_;
+  T2 f2_;
+  T3 f3_;
+};
+
+template <GTEST_5_TYPENAMES_(T)>
+class GTEST_5_TUPLE_(T) {
+ public:
+  template <int k> friend class gtest_internal::Get;
+
+  tuple() : f0_(), f1_(), f2_(), f3_(), f4_() {}
+
+  explicit tuple(GTEST_BY_REF_(T0) f0, GTEST_BY_REF_(T1) f1,
+      GTEST_BY_REF_(T2) f2, GTEST_BY_REF_(T3) f3,
+      GTEST_BY_REF_(T4) f4) : f0_(f0), f1_(f1), f2_(f2), f3_(f3), f4_(f4) {}
+
+  tuple(const tuple& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_), f3_(t.f3_),
+      f4_(t.f4_) {}
+
+  template <GTEST_5_TYPENAMES_(U)>
+  tuple(const GTEST_5_TUPLE_(U)& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_),
+      f3_(t.f3_), f4_(t.f4_) {}
+
+  tuple& operator=(const tuple& t) { return CopyFrom(t); }
+
+  template <GTEST_5_TYPENAMES_(U)>
+  tuple& operator=(const GTEST_5_TUPLE_(U)& t) {
+    return CopyFrom(t);
+  }
+
+  GTEST_DECLARE_TUPLE_AS_FRIEND_
+
+  template <GTEST_5_TYPENAMES_(U)>
+  tuple& CopyFrom(const GTEST_5_TUPLE_(U)& t) {
+    f0_ = t.f0_;
+    f1_ = t.f1_;
+    f2_ = t.f2_;
+    f3_ = t.f3_;
+    f4_ = t.f4_;
+    return *this;
+  }
+
+  T0 f0_;
+  T1 f1_;
+  T2 f2_;
+  T3 f3_;
+  T4 f4_;
+};
+
+template <GTEST_6_TYPENAMES_(T)>
+class GTEST_6_TUPLE_(T) {
+ public:
+  template <int k> friend class gtest_internal::Get;
+
+  tuple() : f0_(), f1_(), f2_(), f3_(), f4_(), f5_() {}
+
+  explicit tuple(GTEST_BY_REF_(T0) f0, GTEST_BY_REF_(T1) f1,
+      GTEST_BY_REF_(T2) f2, GTEST_BY_REF_(T3) f3, GTEST_BY_REF_(T4) f4,
+      GTEST_BY_REF_(T5) f5) : f0_(f0), f1_(f1), f2_(f2), f3_(f3), f4_(f4),
+      f5_(f5) {}
+
+  tuple(const tuple& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_), f3_(t.f3_),
+      f4_(t.f4_), f5_(t.f5_) {}
+
+  template <GTEST_6_TYPENAMES_(U)>
+  tuple(const GTEST_6_TUPLE_(U)& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_),
+      f3_(t.f3_), f4_(t.f4_), f5_(t.f5_) {}
+
+  tuple& operator=(const tuple& t) { return CopyFrom(t); }
+
+  template <GTEST_6_TYPENAMES_(U)>
+  tuple& operator=(const GTEST_6_TUPLE_(U)& t) {
+    return CopyFrom(t);
+  }
+
+  GTEST_DECLARE_TUPLE_AS_FRIEND_
+
+  template <GTEST_6_TYPENAMES_(U)>
+  tuple& CopyFrom(const GTEST_6_TUPLE_(U)& t) {
+    f0_ = t.f0_;
+    f1_ = t.f1_;
+    f2_ = t.f2_;
+    f3_ = t.f3_;
+    f4_ = t.f4_;
+    f5_ = t.f5_;
+    return *this;
+  }
+
+  T0 f0_;
+  T1 f1_;
+  T2 f2_;
+  T3 f3_;
+  T4 f4_;
+  T5 f5_;
+};
+
+template <GTEST_7_TYPENAMES_(T)>
+class GTEST_7_TUPLE_(T) {
+ public:
+  template <int k> friend class gtest_internal::Get;
+
+  tuple() : f0_(), f1_(), f2_(), f3_(), f4_(), f5_(), f6_() {}
+
+  explicit tuple(GTEST_BY_REF_(T0) f0, GTEST_BY_REF_(T1) f1,
+      GTEST_BY_REF_(T2) f2, GTEST_BY_REF_(T3) f3, GTEST_BY_REF_(T4) f4,
+      GTEST_BY_REF_(T5) f5, GTEST_BY_REF_(T6) f6) : f0_(f0), f1_(f1), f2_(f2),
+      f3_(f3), f4_(f4), f5_(f5), f6_(f6) {}
+
+  tuple(const tuple& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_), f3_(t.f3_),
+      f4_(t.f4_), f5_(t.f5_), f6_(t.f6_) {}
+
+  template <GTEST_7_TYPENAMES_(U)>
+  tuple(const GTEST_7_TUPLE_(U)& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_),
+      f3_(t.f3_), f4_(t.f4_), f5_(t.f5_), f6_(t.f6_) {}
+
+  tuple& operator=(const tuple& t) { return CopyFrom(t); }
+
+  template <GTEST_7_TYPENAMES_(U)>
+  tuple& operator=(const GTEST_7_TUPLE_(U)& t) {
+    return CopyFrom(t);
+  }
+
+  GTEST_DECLARE_TUPLE_AS_FRIEND_
+
+  template <GTEST_7_TYPENAMES_(U)>
+  tuple& CopyFrom(const GTEST_7_TUPLE_(U)& t) {
+    f0_ = t.f0_;
+    f1_ = t.f1_;
+    f2_ = t.f2_;
+    f3_ = t.f3_;
+    f4_ = t.f4_;
+    f5_ = t.f5_;
+    f6_ = t.f6_;
+    return *this;
+  }
+
+  T0 f0_;
+  T1 f1_;
+  T2 f2_;
+  T3 f3_;
+  T4 f4_;
+  T5 f5_;
+  T6 f6_;
+};
+
+template <GTEST_8_TYPENAMES_(T)>
+class GTEST_8_TUPLE_(T) {
+ public:
+  template <int k> friend class gtest_internal::Get;
+
+  tuple() : f0_(), f1_(), f2_(), f3_(), f4_(), f5_(), f6_(), f7_() {}
+
+  explicit tuple(GTEST_BY_REF_(T0) f0, GTEST_BY_REF_(T1) f1,
+      GTEST_BY_REF_(T2) f2, GTEST_BY_REF_(T3) f3, GTEST_BY_REF_(T4) f4,
+      GTEST_BY_REF_(T5) f5, GTEST_BY_REF_(T6) f6,
+      GTEST_BY_REF_(T7) f7) : f0_(f0), f1_(f1), f2_(f2), f3_(f3), f4_(f4),
+      f5_(f5), f6_(f6), f7_(f7) {}
+
+  tuple(const tuple& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_), f3_(t.f3_),
+      f4_(t.f4_), f5_(t.f5_), f6_(t.f6_), f7_(t.f7_) {}
+
+  template <GTEST_8_TYPENAMES_(U)>
+  tuple(const GTEST_8_TUPLE_(U)& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_),
+      f3_(t.f3_), f4_(t.f4_), f5_(t.f5_), f6_(t.f6_), f7_(t.f7_) {}
+
+  tuple& operator=(const tuple& t) { return CopyFrom(t); }
+
+  template <GTEST_8_TYPENAMES_(U)>
+  tuple& operator=(const GTEST_8_TUPLE_(U)& t) {
+    return CopyFrom(t);
+  }
+
+  GTEST_DECLARE_TUPLE_AS_FRIEND_
+
+  template <GTEST_8_TYPENAMES_(U)>
+  tuple& CopyFrom(const GTEST_8_TUPLE_(U)& t) {
+    f0_ = t.f0_;
+    f1_ = t.f1_;
+    f2_ = t.f2_;
+    f3_ = t.f3_;
+    f4_ = t.f4_;
+    f5_ = t.f5_;
+    f6_ = t.f6_;
+    f7_ = t.f7_;
+    return *this;
+  }
+
+  T0 f0_;
+  T1 f1_;
+  T2 f2_;
+  T3 f3_;
+  T4 f4_;
+  T5 f5_;
+  T6 f6_;
+  T7 f7_;
+};
+
+template <GTEST_9_TYPENAMES_(T)>
+class GTEST_9_TUPLE_(T) {
+ public:
+  template <int k> friend class gtest_internal::Get;
+
+  tuple() : f0_(), f1_(), f2_(), f3_(), f4_(), f5_(), f6_(), f7_(), f8_() {}
+
+  explicit tuple(GTEST_BY_REF_(T0) f0, GTEST_BY_REF_(T1) f1,
+      GTEST_BY_REF_(T2) f2, GTEST_BY_REF_(T3) f3, GTEST_BY_REF_(T4) f4,
+      GTEST_BY_REF_(T5) f5, GTEST_BY_REF_(T6) f6, GTEST_BY_REF_(T7) f7,
+      GTEST_BY_REF_(T8) f8) : f0_(f0), f1_(f1), f2_(f2), f3_(f3), f4_(f4),
+      f5_(f5), f6_(f6), f7_(f7), f8_(f8) {}
+
+  tuple(const tuple& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_), f3_(t.f3_),
+      f4_(t.f4_), f5_(t.f5_), f6_(t.f6_), f7_(t.f7_), f8_(t.f8_) {}
+
+  template <GTEST_9_TYPENAMES_(U)>
+  tuple(const GTEST_9_TUPLE_(U)& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_),
+      f3_(t.f3_), f4_(t.f4_), f5_(t.f5_), f6_(t.f6_), f7_(t.f7_), f8_(t.f8_) {}
+
+  tuple& operator=(const tuple& t) { return CopyFrom(t); }
+
+  template <GTEST_9_TYPENAMES_(U)>
+  tuple& operator=(const GTEST_9_TUPLE_(U)& t) {
+    return CopyFrom(t);
+  }
+
+  GTEST_DECLARE_TUPLE_AS_FRIEND_
+
+  template <GTEST_9_TYPENAMES_(U)>
+  tuple& CopyFrom(const GTEST_9_TUPLE_(U)& t) {
+    f0_ = t.f0_;
+    f1_ = t.f1_;
+    f2_ = t.f2_;
+    f3_ = t.f3_;
+    f4_ = t.f4_;
+    f5_ = t.f5_;
+    f6_ = t.f6_;
+    f7_ = t.f7_;
+    f8_ = t.f8_;
+    return *this;
+  }
+
+  T0 f0_;
+  T1 f1_;
+  T2 f2_;
+  T3 f3_;
+  T4 f4_;
+  T5 f5_;
+  T6 f6_;
+  T7 f7_;
+  T8 f8_;
+};
+
+template <GTEST_10_TYPENAMES_(T)>
+class tuple {
+ public:
+  template <int k> friend class gtest_internal::Get;
+
+  tuple() : f0_(), f1_(), f2_(), f3_(), f4_(), f5_(), f6_(), f7_(), f8_(),
+      f9_() {}
+
+  explicit tuple(GTEST_BY_REF_(T0) f0, GTEST_BY_REF_(T1) f1,
+      GTEST_BY_REF_(T2) f2, GTEST_BY_REF_(T3) f3, GTEST_BY_REF_(T4) f4,
+      GTEST_BY_REF_(T5) f5, GTEST_BY_REF_(T6) f6, GTEST_BY_REF_(T7) f7,
+      GTEST_BY_REF_(T8) f8, GTEST_BY_REF_(T9) f9) : f0_(f0), f1_(f1), f2_(f2),
+      f3_(f3), f4_(f4), f5_(f5), f6_(f6), f7_(f7), f8_(f8), f9_(f9) {}
+
+  tuple(const tuple& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_), f3_(t.f3_),
+      f4_(t.f4_), f5_(t.f5_), f6_(t.f6_), f7_(t.f7_), f8_(t.f8_), f9_(t.f9_) {}
+
+  template <GTEST_10_TYPENAMES_(U)>
+  tuple(const GTEST_10_TUPLE_(U)& t) : f0_(t.f0_), f1_(t.f1_), f2_(t.f2_),
+      f3_(t.f3_), f4_(t.f4_), f5_(t.f5_), f6_(t.f6_), f7_(t.f7_), f8_(t.f8_),
+      f9_(t.f9_) {}
+
+  tuple& operator=(const tuple& t) { return CopyFrom(t); }
+
+  template <GTEST_10_TYPENAMES_(U)>
+  tuple& operator=(const GTEST_10_TUPLE_(U)& t) {
+    return CopyFrom(t);
+  }
+
+  GTEST_DECLARE_TUPLE_AS_FRIEND_
+
+  template <GTEST_10_TYPENAMES_(U)>
+  tuple& CopyFrom(const GTEST_10_TUPLE_(U)& t) {
+    f0_ = t.f0_;
+    f1_ = t.f1_;
+    f2_ = t.f2_;
+    f3_ = t.f3_;
+    f4_ = t.f4_;
+    f5_ = t.f5_;
+    f6_ = t.f6_;
+    f7_ = t.f7_;
+    f8_ = t.f8_;
+    f9_ = t.f9_;
+    return *this;
+  }
+
+  T0 f0_;
+  T1 f1_;
+  T2 f2_;
+  T3 f3_;
+  T4 f4_;
+  T5 f5_;
+  T6 f6_;
+  T7 f7_;
+  T8 f8_;
+  T9 f9_;
+};
+
+// 6.1.3.2 Tuple creation functions.
+
+// Known limitations: we don't support passing an
+// std::tr1::reference_wrapper<T> to make_tuple().  And we don't
+// implement tie().
+
+inline tuple<> make_tuple() { return tuple<>(); }
+
+template <GTEST_1_TYPENAMES_(T)>
+inline GTEST_1_TUPLE_(T) make_tuple(const T0& f0) {
+  return GTEST_1_TUPLE_(T)(f0);
+}
+
+template <GTEST_2_TYPENAMES_(T)>
+inline GTEST_2_TUPLE_(T) make_tuple(const T0& f0, const T1& f1) {
+  return GTEST_2_TUPLE_(T)(f0, f1);
+}
+
+template <GTEST_3_TYPENAMES_(T)>
+inline GTEST_3_TUPLE_(T) make_tuple(const T0& f0, const T1& f1, const T2& f2) {
+  return GTEST_3_TUPLE_(T)(f0, f1, f2);
+}
+
+template <GTEST_4_TYPENAMES_(T)>
+inline GTEST_4_TUPLE_(T) make_tuple(const T0& f0, const T1& f1, const T2& f2,
+    const T3& f3) {
+  return GTEST_4_TUPLE_(T)(f0, f1, f2, f3);
+}
+
+template <GTEST_5_TYPENAMES_(T)>
+inline GTEST_5_TUPLE_(T) make_tuple(const T0& f0, const T1& f1, const T2& f2,
+    const T3& f3, const T4& f4) {
+  return GTEST_5_TUPLE_(T)(f0, f1, f2, f3, f4);
+}
+
+template <GTEST_6_TYPENAMES_(T)>
+inline GTEST_6_TUPLE_(T) make_tuple(const T0& f0, const T1& f1, const T2& f2,
+    const T3& f3, const T4& f4, const T5& f5) {
+  return GTEST_6_TUPLE_(T)(f0, f1, f2, f3, f4, f5);
+}
+
+template <GTEST_7_TYPENAMES_(T)>
+inline GTEST_7_TUPLE_(T) make_tuple(const T0& f0, const T1& f1, const T2& f2,
+    const T3& f3, const T4& f4, const T5& f5, const T6& f6) {
+  return GTEST_7_TUPLE_(T)(f0, f1, f2, f3, f4, f5, f6);
+}
+
+template <GTEST_8_TYPENAMES_(T)>
+inline GTEST_8_TUPLE_(T) make_tuple(const T0& f0, const T1& f1, const T2& f2,
+    const T3& f3, const T4& f4, const T5& f5, const T6& f6, const T7& f7) {
+  return GTEST_8_TUPLE_(T)(f0, f1, f2, f3, f4, f5, f6, f7);
+}
+
+template <GTEST_9_TYPENAMES_(T)>
+inline GTEST_9_TUPLE_(T) make_tuple(const T0& f0, const T1& f1, const T2& f2,
+    const T3& f3, const T4& f4, const T5& f5, const T6& f6, const T7& f7,
+    const T8& f8) {
+  return GTEST_9_TUPLE_(T)(f0, f1, f2, f3, f4, f5, f6, f7, f8);
+}
+
+template <GTEST_10_TYPENAMES_(T)>
+inline GTEST_10_TUPLE_(T) make_tuple(const T0& f0, const T1& f1, const T2& f2,
+    const T3& f3, const T4& f4, const T5& f5, const T6& f6, const T7& f7,
+    const T8& f8, const T9& f9) {
+  return GTEST_10_TUPLE_(T)(f0, f1, f2, f3, f4, f5, f6, f7, f8, f9);
+}
+
+// 6.1.3.3 Tuple helper classes.
+
+template <typename Tuple> struct tuple_size;
+
+template <GTEST_0_TYPENAMES_(T)>
+struct tuple_size<GTEST_0_TUPLE_(T)> { static const int value = 0; };
+
+template <GTEST_1_TYPENAMES_(T)>
+struct tuple_size<GTEST_1_TUPLE_(T)> { static const int value = 1; };
+
+template <GTEST_2_TYPENAMES_(T)>
+struct tuple_size<GTEST_2_TUPLE_(T)> { static const int value = 2; };
+
+template <GTEST_3_TYPENAMES_(T)>
+struct tuple_size<GTEST_3_TUPLE_(T)> { static const int value = 3; };
+
+template <GTEST_4_TYPENAMES_(T)>
+struct tuple_size<GTEST_4_TUPLE_(T)> { static const int value = 4; };
+
+template <GTEST_5_TYPENAMES_(T)>
+struct tuple_size<GTEST_5_TUPLE_(T)> { static const int value = 5; };
+
+template <GTEST_6_TYPENAMES_(T)>
+struct tuple_size<GTEST_6_TUPLE_(T)> { static const int value = 6; };
+
+template <GTEST_7_TYPENAMES_(T)>
+struct tuple_size<GTEST_7_TUPLE_(T)> { static const int value = 7; };
+
+template <GTEST_8_TYPENAMES_(T)>
+struct tuple_size<GTEST_8_TUPLE_(T)> { static const int value = 8; };
+
+template <GTEST_9_TYPENAMES_(T)>
+struct tuple_size<GTEST_9_TUPLE_(T)> { static const int value = 9; };
+
+template <GTEST_10_TYPENAMES_(T)>
+struct tuple_size<GTEST_10_TUPLE_(T)> { static const int value = 10; };
+
+template <int k, class Tuple>
+struct tuple_element {
+  typedef typename gtest_internal::TupleElement<
+      k < (tuple_size<Tuple>::value), k, Tuple>::type type;
+};
+
+#define GTEST_TUPLE_ELEMENT_(k, Tuple) typename tuple_element<k, Tuple >::type
+
+// 6.1.3.4 Element access.
+
+namespace gtest_internal {
+
+template <>
+class Get<0> {
+ public:
+  template <class Tuple>
+  static GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_(0, Tuple))
+  Field(Tuple& t) { return t.f0_; }  // NOLINT
+
+  template <class Tuple>
+  static GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_(0, Tuple))
+  ConstField(const Tuple& t) { return t.f0_; }
+};
+
+template <>
+class Get<1> {
+ public:
+  template <class Tuple>
+  static GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_(1, Tuple))
+  Field(Tuple& t) { return t.f1_; }  // NOLINT
+
+  template <class Tuple>
+  static GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_(1, Tuple))
+  ConstField(const Tuple& t) { return t.f1_; }
+};
+
+template <>
+class Get<2> {
+ public:
+  template <class Tuple>
+  static GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_(2, Tuple))
+  Field(Tuple& t) { return t.f2_; }  // NOLINT
+
+  template <class Tuple>
+  static GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_(2, Tuple))
+  ConstField(const Tuple& t) { return t.f2_; }
+};
+
+template <>
+class Get<3> {
+ public:
+  template <class Tuple>
+  static GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_(3, Tuple))
+  Field(Tuple& t) { return t.f3_; }  // NOLINT
+
+  template <class Tuple>
+  static GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_(3, Tuple))
+  ConstField(const Tuple& t) { return t.f3_; }
+};
+
+template <>
+class Get<4> {
+ public:
+  template <class Tuple>
+  static GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_(4, Tuple))
+  Field(Tuple& t) { return t.f4_; }  // NOLINT
+
+  template <class Tuple>
+  static GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_(4, Tuple))
+  ConstField(const Tuple& t) { return t.f4_; }
+};
+
+template <>
+class Get<5> {
+ public:
+  template <class Tuple>
+  static GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_(5, Tuple))
+  Field(Tuple& t) { return t.f5_; }  // NOLINT
+
+  template <class Tuple>
+  static GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_(5, Tuple))
+  ConstField(const Tuple& t) { return t.f5_; }
+};
+
+template <>
+class Get<6> {
+ public:
+  template <class Tuple>
+  static GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_(6, Tuple))
+  Field(Tuple& t) { return t.f6_; }  // NOLINT
+
+  template <class Tuple>
+  static GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_(6, Tuple))
+  ConstField(const Tuple& t) { return t.f6_; }
+};
+
+template <>
+class Get<7> {
+ public:
+  template <class Tuple>
+  static GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_(7, Tuple))
+  Field(Tuple& t) { return t.f7_; }  // NOLINT
+
+  template <class Tuple>
+  static GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_(7, Tuple))
+  ConstField(const Tuple& t) { return t.f7_; }
+};
+
+template <>
+class Get<8> {
+ public:
+  template <class Tuple>
+  static GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_(8, Tuple))
+  Field(Tuple& t) { return t.f8_; }  // NOLINT
+
+  template <class Tuple>
+  static GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_(8, Tuple))
+  ConstField(const Tuple& t) { return t.f8_; }
+};
+
+template <>
+class Get<9> {
+ public:
+  template <class Tuple>
+  static GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_(9, Tuple))
+  Field(Tuple& t) { return t.f9_; }  // NOLINT
+
+  template <class Tuple>
+  static GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_(9, Tuple))
+  ConstField(const Tuple& t) { return t.f9_; }
+};
+
+}  // namespace gtest_internal
+
+template <int k, GTEST_10_TYPENAMES_(T)>
+GTEST_ADD_REF_(GTEST_TUPLE_ELEMENT_(k, GTEST_10_TUPLE_(T)))
+get(GTEST_10_TUPLE_(T)& t) {
+  return gtest_internal::Get<k>::Field(t);
+}
+
+template <int k, GTEST_10_TYPENAMES_(T)>
+GTEST_BY_REF_(GTEST_TUPLE_ELEMENT_(k,  GTEST_10_TUPLE_(T)))
+get(const GTEST_10_TUPLE_(T)& t) {
+  return gtest_internal::Get<k>::ConstField(t);
+}
+
+// 6.1.3.5 Relational operators
+
+// We only implement == and !=, as we don't have a need for the rest yet.
+
+namespace gtest_internal {
+
+// SameSizeTuplePrefixComparator<k, k>::Eq(t1, t2) returns true if the
+// first k fields of t1 equals the first k fields of t2.
+// SameSizeTuplePrefixComparator(k1, k2) would be a compiler error if
+// k1 != k2.
+template <int kSize1, int kSize2>
+struct SameSizeTuplePrefixComparator;
+
+template <>
+struct SameSizeTuplePrefixComparator<0, 0> {
+  template <class Tuple1, class Tuple2>
+  static bool Eq(const Tuple1& /* t1 */, const Tuple2& /* t2 */) {
+    return true;
+  }
+};
+
+template <int k>
+struct SameSizeTuplePrefixComparator<k, k> {
+  template <class Tuple1, class Tuple2>
+  static bool Eq(const Tuple1& t1, const Tuple2& t2) {
+    return SameSizeTuplePrefixComparator<k - 1, k - 1>::Eq(t1, t2) &&
+        ::std::tr1::get<k - 1>(t1) == ::std::tr1::get<k - 1>(t2);
+  }
+};
+
+}  // namespace gtest_internal
+
+template <GTEST_10_TYPENAMES_(T), GTEST_10_TYPENAMES_(U)>
+inline bool operator==(const GTEST_10_TUPLE_(T)& t,
+                       const GTEST_10_TUPLE_(U)& u) {
+  return gtest_internal::SameSizeTuplePrefixComparator<
+      tuple_size<GTEST_10_TUPLE_(T)>::value,
+      tuple_size<GTEST_10_TUPLE_(U)>::value>::Eq(t, u);
+}
+
+template <GTEST_10_TYPENAMES_(T), GTEST_10_TYPENAMES_(U)>
+inline bool operator!=(const GTEST_10_TUPLE_(T)& t,
+                       const GTEST_10_TUPLE_(U)& u) { return !(t == u); }
+
+// 6.1.4 Pairs.
+// Unimplemented.
+
+}  // namespace tr1
+}  // namespace std
+
+#undef GTEST_0_TUPLE_
+#undef GTEST_1_TUPLE_
+#undef GTEST_2_TUPLE_
+#undef GTEST_3_TUPLE_
+#undef GTEST_4_TUPLE_
+#undef GTEST_5_TUPLE_
+#undef GTEST_6_TUPLE_
+#undef GTEST_7_TUPLE_
+#undef GTEST_8_TUPLE_
+#undef GTEST_9_TUPLE_
+#undef GTEST_10_TUPLE_
+
+#undef GTEST_0_TYPENAMES_
+#undef GTEST_1_TYPENAMES_
+#undef GTEST_2_TYPENAMES_
+#undef GTEST_3_TYPENAMES_
+#undef GTEST_4_TYPENAMES_
+#undef GTEST_5_TYPENAMES_
+#undef GTEST_6_TYPENAMES_
+#undef GTEST_7_TYPENAMES_
+#undef GTEST_8_TYPENAMES_
+#undef GTEST_9_TYPENAMES_
+#undef GTEST_10_TYPENAMES_
+
+#undef GTEST_DECLARE_TUPLE_AS_FRIEND_
+#undef GTEST_BY_REF_
+#undef GTEST_ADD_REF_
+#undef GTEST_TUPLE_ELEMENT_
+
+#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TUPLE_H_
+# elif GTEST_OS_SYMBIAN
+
+// On Symbian, BOOST_HAS_TR1_TUPLE causes Boost's TR1 tuple library to
+// use STLport's tuple implementation, which unfortunately doesn't
+// work as the copy of STLport distributed with Symbian is incomplete.
+// By making sure BOOST_HAS_TR1_TUPLE is undefined, we force Boost to
+// use its own tuple implementation.
+#  ifdef BOOST_HAS_TR1_TUPLE
+#   undef BOOST_HAS_TR1_TUPLE
+#  endif  // BOOST_HAS_TR1_TUPLE
+
+// This prevents <boost/tr1/detail/config.hpp>, which defines
+// BOOST_HAS_TR1_TUPLE, from being #included by Boost's <tuple>.
+#  define BOOST_TR1_DETAIL_CONFIG_HPP_INCLUDED
+#  include <tuple>
+
+# elif defined(__GNUC__) && (GTEST_GCC_VER_ >= 40000)
+// GCC 4.0+ implements tr1/tuple in the <tr1/tuple> header.  This does
+// not conform to the TR1 spec, which requires the header to be <tuple>.
+
+#  if !GTEST_HAS_RTTI && GTEST_GCC_VER_ < 40302
+// Until version 4.3.2, gcc has a bug that causes <tr1/functional>,
+// which is #included by <tr1/tuple>, to not compile when RTTI is
+// disabled.  _TR1_FUNCTIONAL is the header guard for
+// <tr1/functional>.  Hence the following #define is a hack to prevent
+// <tr1/functional> from being included.
+#   define _TR1_FUNCTIONAL 1
+#   include <tr1/tuple>
+#   undef _TR1_FUNCTIONAL  // Allows the user to #include
+                        // <tr1/functional> if he chooses to.
+#  else
+#   include <tr1/tuple>  // NOLINT
+#  endif  // !GTEST_HAS_RTTI && GTEST_GCC_VER_ < 40302
+
+# else
+// If the compiler is not GCC 4.0+, we assume the user is using a
+// spec-conforming TR1 implementation.
+#  include <tuple>  // NOLINT
+# endif  // GTEST_USE_OWN_TR1_TUPLE
+
+#endif  // GTEST_HAS_TR1_TUPLE
+
+// Determines whether clone(2) is supported.
+// Usually it will only be available on Linux, excluding
+// Linux on the Itanium architecture.
+// Also see http://linux.die.net/man/2/clone.
+#ifndef GTEST_HAS_CLONE
+// The user didn't tell us, so we need to figure it out.
+
+# if GTEST_OS_LINUX && !defined(__ia64__)
+#  define GTEST_HAS_CLONE 1
+# else
+#  define GTEST_HAS_CLONE 0
+# endif  // GTEST_OS_LINUX && !defined(__ia64__)
+
+#endif  // GTEST_HAS_CLONE
+
+// Determines whether to support stream redirection. This is used to test
+// output correctness and to implement death tests.
+#ifndef GTEST_HAS_STREAM_REDIRECTION
+// By default, we assume that stream redirection is supported on all
+// platforms except known mobile ones.
+# if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_SYMBIAN
+#  define GTEST_HAS_STREAM_REDIRECTION 0
+# else
+#  define GTEST_HAS_STREAM_REDIRECTION 1
+# endif  // !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_SYMBIAN
+#endif  // GTEST_HAS_STREAM_REDIRECTION
+
+// Determines whether to support death tests.
+// Google Test does not support death tests for VC 7.1 and earlier as
+// abort() in a VC 7.1 application compiled as GUI in debug config
+// pops up a dialog window that cannot be suppressed programmatically.
+#if (GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_CYGWIN || GTEST_OS_SOLARIS || \
+     (GTEST_OS_WINDOWS_DESKTOP && _MSC_VER >= 1400) || \
+     GTEST_OS_WINDOWS_MINGW || GTEST_OS_AIX || GTEST_OS_HPUX)
+# define GTEST_HAS_DEATH_TEST 1
+# include <vector>  // NOLINT
+#endif
+
+// We don't support MSVC 7.1 with exceptions disabled now.  Therefore
+// all the compilers we care about are adequate for supporting
+// value-parameterized tests.
+#define GTEST_HAS_PARAM_TEST 1
+
+// Determines whether to support type-driven tests.
+
+// Typed tests need <typeinfo> and variadic macros, which GCC, VC++ 8.0,
+// Sun Pro CC, IBM Visual Age, and HP aCC support.
+#if defined(__GNUC__) || (_MSC_VER >= 1400) || defined(__SUNPRO_CC) || \
+    defined(__IBMCPP__) || defined(__HP_aCC)
+# define GTEST_HAS_TYPED_TEST 1
+# define GTEST_HAS_TYPED_TEST_P 1
+#endif
+
+// Determines whether to support Combine(). This only makes sense when
+// value-parameterized tests are enabled.  The implementation doesn't
+// work on Sun Studio since it doesn't understand templated conversion
+// operators.
+#if GTEST_HAS_PARAM_TEST && GTEST_HAS_TR1_TUPLE && !defined(__SUNPRO_CC)
+# define GTEST_HAS_COMBINE 1
+#endif
+
+// Determines whether the system compiler uses UTF-16 for encoding wide strings.
+#define GTEST_WIDE_STRING_USES_UTF16_ \
+    (GTEST_OS_WINDOWS || GTEST_OS_CYGWIN || GTEST_OS_SYMBIAN || GTEST_OS_AIX)
+
+// Determines whether test results can be streamed to a socket.
+#if GTEST_OS_LINUX
+# define GTEST_CAN_STREAM_RESULTS_ 1
+#endif
+
+// Defines some utility macros.
+
+// The GNU compiler emits a warning if nested "if" statements are followed by
+// an "else" statement and braces are not used to explicitly disambiguate the
+// "else" binding.  This leads to problems with code like:
+//
+//   if (gate)
+//     ASSERT_*(condition) << "Some message";
+//
+// The "switch (0) case 0:" idiom is used to suppress this.
+#ifdef __INTEL_COMPILER
+# define GTEST_AMBIGUOUS_ELSE_BLOCKER_
+#else
+# define GTEST_AMBIGUOUS_ELSE_BLOCKER_ switch (0) case 0: default:  // NOLINT
+#endif
+
+// Use this annotation at the end of a struct/class definition to
+// prevent the compiler from optimizing away instances that are never
+// used.  This is useful when all interesting logic happens inside the
+// c'tor and / or d'tor.  Example:
+//
+//   struct Foo {
+//     Foo() { ... }
+//   } GTEST_ATTRIBUTE_UNUSED_;
+//
+// Also use it after a variable or parameter declaration to tell the
+// compiler the variable/parameter does not have to be used.
+#if defined(__GNUC__) && !defined(COMPILER_ICC)
+# define GTEST_ATTRIBUTE_UNUSED_ __attribute__ ((unused))
+#else
+# define GTEST_ATTRIBUTE_UNUSED_
+#endif
+
+// A macro to disallow operator=
+// This should be used in the private: declarations for a class.
+#define GTEST_DISALLOW_ASSIGN_(type)\
+  void operator=(type const &)
+
+// A macro to disallow copy constructor and operator=
+// This should be used in the private: declarations for a class.
+#define GTEST_DISALLOW_COPY_AND_ASSIGN_(type)\
+  type(type const &);\
+  GTEST_DISALLOW_ASSIGN_(type)
+
+// Tell the compiler to warn about unused return values for functions declared
+// with this macro.  The macro should be used on function declarations
+// following the argument list:
+//
+//   Sprocket* AllocateSprocket() GTEST_MUST_USE_RESULT_;
+#if defined(__GNUC__) && (GTEST_GCC_VER_ >= 30400) && !defined(COMPILER_ICC)
+# define GTEST_MUST_USE_RESULT_ __attribute__ ((warn_unused_result))
+#else
+# define GTEST_MUST_USE_RESULT_
+#endif  // __GNUC__ && (GTEST_GCC_VER_ >= 30400) && !COMPILER_ICC
+
+// Determine whether the compiler supports Microsoft's Structured Exception
+// Handling.  This is supported by several Windows compilers but generally
+// does not exist on any other system.
+#ifndef GTEST_HAS_SEH
+// The user didn't tell us, so we need to figure it out.
+
+# if defined(_MSC_VER) || defined(__BORLANDC__)
+// These two compilers are known to support SEH.
+#  define GTEST_HAS_SEH 1
+# else
+// Assume no SEH.
+#  define GTEST_HAS_SEH 0
+# endif
+
+#endif  // GTEST_HAS_SEH
+
+#ifdef _MSC_VER
+
+# if GTEST_LINKED_AS_SHARED_LIBRARY
+#  define GTEST_API_ __declspec(dllimport)
+# elif GTEST_CREATE_SHARED_LIBRARY
+#  define GTEST_API_ __declspec(dllexport)
+# endif
+
+#endif  // _MSC_VER
+
+#ifndef GTEST_API_
+# define GTEST_API_
+#endif
+
+#ifdef __GNUC__
+// Ask the compiler to never inline a given function.
+# define GTEST_NO_INLINE_ __attribute__((noinline))
+#else
+# define GTEST_NO_INLINE_
+#endif
+
+namespace testing {
+
+class Message;
+
+namespace internal {
+
+class String;
+
+// The GTEST_COMPILE_ASSERT_ macro can be used to verify that a compile time
+// expression is true. For example, you could use it to verify the
+// size of a static array:
+//
+//   GTEST_COMPILE_ASSERT_(ARRAYSIZE(content_type_names) == CONTENT_NUM_TYPES,
+//                         content_type_names_incorrect_size);
+//
+// or to make sure a struct is smaller than a certain size:
+//
+//   GTEST_COMPILE_ASSERT_(sizeof(foo) < 128, foo_too_large);
+//
+// The second argument to the macro is the name of the variable. If
+// the expression is false, most compilers will issue a warning/error
+// containing the name of the variable.
+
+template <bool>
+struct CompileAssert {
+};
+
+#define GTEST_COMPILE_ASSERT_(expr, msg) \
+  typedef ::testing::internal::CompileAssert<(bool(expr))> \
+      msg[bool(expr) ? 1 : -1]
+
+// Implementation details of GTEST_COMPILE_ASSERT_:
+//
+// - GTEST_COMPILE_ASSERT_ works by defining an array type that has -1
+//   elements (and thus is invalid) when the expression is false.
+//
+// - The simpler definition
+//
+//    #define GTEST_COMPILE_ASSERT_(expr, msg) typedef char msg[(expr) ? 1 : -1]
+//
+//   does not work, as gcc supports variable-length arrays whose sizes
+//   are determined at run-time (this is gcc's extension and not part
+//   of the C++ standard).  As a result, gcc fails to reject the
+//   following code with the simple definition:
+//
+//     int foo;
+//     GTEST_COMPILE_ASSERT_(foo, msg); // not supposed to compile as foo is
+//                                      // not a compile-time constant.
+//
+// - By using the type CompileAssert<(bool(expr))>, we ensures that
+//   expr is a compile-time constant.  (Template arguments must be
+//   determined at compile-time.)
+//
+// - The outter parentheses in CompileAssert<(bool(expr))> are necessary
+//   to work around a bug in gcc 3.4.4 and 4.0.1.  If we had written
+//
+//     CompileAssert<bool(expr)>
+//
+//   instead, these compilers will refuse to compile
+//
+//     GTEST_COMPILE_ASSERT_(5 > 0, some_message);
+//
+//   (They seem to think the ">" in "5 > 0" marks the end of the
+//   template argument list.)
+//
+// - The array size is (bool(expr) ? 1 : -1), instead of simply
+//
+//     ((expr) ? 1 : -1).
+//
+//   This is to avoid running into a bug in MS VC 7.1, which
+//   causes ((0.0) ? 1 : -1) to incorrectly evaluate to 1.
+
+// StaticAssertTypeEqHelper is used by StaticAssertTypeEq defined in gtest.h.
+//
+// This template is declared, but intentionally undefined.
+template <typename T1, typename T2>
+struct StaticAssertTypeEqHelper;
+
+template <typename T>
+struct StaticAssertTypeEqHelper<T, T> {};
+
+#if GTEST_HAS_GLOBAL_STRING
+typedef ::string string;
+#else
+typedef ::std::string string;
+#endif  // GTEST_HAS_GLOBAL_STRING
+
+#if GTEST_HAS_GLOBAL_WSTRING
+typedef ::wstring wstring;
+#elif GTEST_HAS_STD_WSTRING
+typedef ::std::wstring wstring;
+#endif  // GTEST_HAS_GLOBAL_WSTRING
+
+// A helper for suppressing warnings on constant condition.  It just
+// returns 'condition'.
+GTEST_API_ bool IsTrue(bool condition);
+
+// Defines scoped_ptr.
+
+// This implementation of scoped_ptr is PARTIAL - it only contains
+// enough stuff to satisfy Google Test's need.
+template <typename T>
+class scoped_ptr {
+ public:
+  typedef T element_type;
+
+  explicit scoped_ptr(T* p = NULL) : ptr_(p) {}
+  ~scoped_ptr() { reset(); }
+
+  T& operator*() const { return *ptr_; }
+  T* operator->() const { return ptr_; }
+  T* get() const { return ptr_; }
+
+  T* release() {
+    T* const ptr = ptr_;
+    ptr_ = NULL;
+    return ptr;
+  }
+
+  void reset(T* p = NULL) {
+    if (p != ptr_) {
+      if (IsTrue(sizeof(T) > 0)) {  // Makes sure T is a complete type.
+        delete ptr_;
+      }
+      ptr_ = p;
+    }
+  }
+ private:
+  T* ptr_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(scoped_ptr);
+};
+
+// Defines RE.
+
+// A simple C++ wrapper for <regex.h>.  It uses the POSIX Extended
+// Regular Expression syntax.
+class GTEST_API_ RE {
+ public:
+  // A copy constructor is required by the Standard to initialize object
+  // references from r-values.
+  RE(const RE& other) { Init(other.pattern()); }
+
+  // Constructs an RE from a string.
+  RE(const ::std::string& regex) { Init(regex.c_str()); }  // NOLINT
+
+#if GTEST_HAS_GLOBAL_STRING
+
+  RE(const ::string& regex) { Init(regex.c_str()); }  // NOLINT
+
+#endif  // GTEST_HAS_GLOBAL_STRING
+
+  RE(const char* regex) { Init(regex); }  // NOLINT
+  ~RE();
+
+  // Returns the string representation of the regex.
+  const char* pattern() const { return pattern_; }
+
+  // FullMatch(str, re) returns true iff regular expression re matches
+  // the entire str.
+  // PartialMatch(str, re) returns true iff regular expression re
+  // matches a substring of str (including str itself).
+  //
+  // TODO(wan at google.com): make FullMatch() and PartialMatch() work
+  // when str contains NUL characters.
+  static bool FullMatch(const ::std::string& str, const RE& re) {
+    return FullMatch(str.c_str(), re);
+  }
+  static bool PartialMatch(const ::std::string& str, const RE& re) {
+    return PartialMatch(str.c_str(), re);
+  }
+
+#if GTEST_HAS_GLOBAL_STRING
+
+  static bool FullMatch(const ::string& str, const RE& re) {
+    return FullMatch(str.c_str(), re);
+  }
+  static bool PartialMatch(const ::string& str, const RE& re) {
+    return PartialMatch(str.c_str(), re);
+  }
+
+#endif  // GTEST_HAS_GLOBAL_STRING
+
+  static bool FullMatch(const char* str, const RE& re);
+  static bool PartialMatch(const char* str, const RE& re);
+
+ private:
+  void Init(const char* regex);
+
+  // We use a const char* instead of a string, as Google Test may be used
+  // where string is not available.  We also do not use Google Test's own
+  // String type here, in order to simplify dependencies between the
+  // files.
+  const char* pattern_;
+  bool is_valid_;
+
+#if GTEST_USES_POSIX_RE
+
+  regex_t full_regex_;     // For FullMatch().
+  regex_t partial_regex_;  // For PartialMatch().
+
+#else  // GTEST_USES_SIMPLE_RE
+
+  const char* full_pattern_;  // For FullMatch();
+
+#endif
+
+  GTEST_DISALLOW_ASSIGN_(RE);
+};
+
+// Formats a source file path and a line number as they would appear
+// in an error message from the compiler used to compile this code.
+GTEST_API_ ::std::string FormatFileLocation(const char* file, int line);
+
+// Formats a file location for compiler-independent XML output.
+// Although this function is not platform dependent, we put it next to
+// FormatFileLocation in order to contrast the two functions.
+GTEST_API_ ::std::string FormatCompilerIndependentFileLocation(const char* file,
+                                                               int line);
+
+// Defines logging utilities:
+//   GTEST_LOG_(severity) - logs messages at the specified severity level. The
+//                          message itself is streamed into the macro.
+//   LogToStderr()  - directs all log messages to stderr.
+//   FlushInfoLog() - flushes informational log messages.
+
+enum GTestLogSeverity {
+  GTEST_INFO,
+  GTEST_WARNING,
+  GTEST_ERROR,
+  GTEST_FATAL
+};
+
+// Formats log entry severity, provides a stream object for streaming the
+// log message, and terminates the message with a newline when going out of
+// scope.
+class GTEST_API_ GTestLog {
+ public:
+  GTestLog(GTestLogSeverity severity, const char* file, int line);
+
+  // Flushes the buffers and, if severity is GTEST_FATAL, aborts the program.
+  ~GTestLog();
+
+  ::std::ostream& GetStream() { return ::std::cerr; }
+
+ private:
+  const GTestLogSeverity severity_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(GTestLog);
+};
+
+#define GTEST_LOG_(severity) \
+    ::testing::internal::GTestLog(::testing::internal::GTEST_##severity, \
+                                  __FILE__, __LINE__).GetStream()
+
+inline void LogToStderr() {}
+inline void FlushInfoLog() { fflush(NULL); }
+
+// INTERNAL IMPLEMENTATION - DO NOT USE.
+//
+// GTEST_CHECK_ is an all-mode assert. It aborts the program if the condition
+// is not satisfied.
+//  Synopsys:
+//    GTEST_CHECK_(boolean_condition);
+//     or
+//    GTEST_CHECK_(boolean_condition) << "Additional message";
+//
+//    This checks the condition and if the condition is not satisfied
+//    it prints message about the condition violation, including the
+//    condition itself, plus additional message streamed into it, if any,
+//    and then it aborts the program. It aborts the program irrespective of
+//    whether it is built in the debug mode or not.
+#define GTEST_CHECK_(condition) \
+    GTEST_AMBIGUOUS_ELSE_BLOCKER_ \
+    if (::testing::internal::IsTrue(condition)) \
+      ; \
+    else \
+      GTEST_LOG_(FATAL) << "Condition " #condition " failed. "
+
+// An all-mode assert to verify that the given POSIX-style function
+// call returns 0 (indicating success).  Known limitation: this
+// doesn't expand to a balanced 'if' statement, so enclose the macro
+// in {} if you need to use it as the only statement in an 'if'
+// branch.
+#define GTEST_CHECK_POSIX_SUCCESS_(posix_call) \
+  if (const int gtest_error = (posix_call)) \
+    GTEST_LOG_(FATAL) << #posix_call << "failed with error " \
+                      << gtest_error
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// Use ImplicitCast_ as a safe version of static_cast for upcasting in
+// the type hierarchy (e.g. casting a Foo* to a SuperclassOfFoo* or a
+// const Foo*).  When you use ImplicitCast_, the compiler checks that
+// the cast is safe.  Such explicit ImplicitCast_s are necessary in
+// surprisingly many situations where C++ demands an exact type match
+// instead of an argument type convertable to a target type.
+//
+// The syntax for using ImplicitCast_ is the same as for static_cast:
+//
+//   ImplicitCast_<ToType>(expr)
+//
+// ImplicitCast_ would have been part of the C++ standard library,
+// but the proposal was submitted too late.  It will probably make
+// its way into the language in the future.
+//
+// This relatively ugly name is intentional. It prevents clashes with
+// similar functions users may have (e.g., implicit_cast). The internal
+// namespace alone is not enough because the function can be found by ADL.
+template<typename To>
+inline To ImplicitCast_(To x) { return x; }
+
+// When you upcast (that is, cast a pointer from type Foo to type
+// SuperclassOfFoo), it's fine to use ImplicitCast_<>, since upcasts
+// always succeed.  When you downcast (that is, cast a pointer from
+// type Foo to type SubclassOfFoo), static_cast<> isn't safe, because
+// how do you know the pointer is really of type SubclassOfFoo?  It
+// could be a bare Foo, or of type DifferentSubclassOfFoo.  Thus,
+// when you downcast, you should use this macro.  In debug mode, we
+// use dynamic_cast<> to double-check the downcast is legal (we die
+// if it's not).  In normal mode, we do the efficient static_cast<>
+// instead.  Thus, it's important to test in debug mode to make sure
+// the cast is legal!
+//    This is the only place in the code we should use dynamic_cast<>.
+// In particular, you SHOULDN'T be using dynamic_cast<> in order to
+// do RTTI (eg code like this:
+//    if (dynamic_cast<Subclass1>(foo)) HandleASubclass1Object(foo);
+//    if (dynamic_cast<Subclass2>(foo)) HandleASubclass2Object(foo);
+// You should design the code some other way not to need this.
+//
+// This relatively ugly name is intentional. It prevents clashes with
+// similar functions users may have (e.g., down_cast). The internal
+// namespace alone is not enough because the function can be found by ADL.
+template<typename To, typename From>  // use like this: DownCast_<T*>(foo);
+inline To DownCast_(From* f) {  // so we only accept pointers
+  // Ensures that To is a sub-type of From *.  This test is here only
+  // for compile-time type checking, and has no overhead in an
+  // optimized build at run-time, as it will be optimized away
+  // completely.
+  if (false) {
+    const To to = NULL;
+    ::testing::internal::ImplicitCast_<From*>(to);
+  }
+
+#if GTEST_HAS_RTTI
+  // RTTI: debug mode only!
+  GTEST_CHECK_(f == NULL || dynamic_cast<To>(f) != NULL);
+#endif
+  return static_cast<To>(f);
+}
+
+// Downcasts the pointer of type Base to Derived.
+// Derived must be a subclass of Base. The parameter MUST
+// point to a class of type Derived, not any subclass of it.
+// When RTTI is available, the function performs a runtime
+// check to enforce this.
+template <class Derived, class Base>
+Derived* CheckedDowncastToActualType(Base* base) {
+#if GTEST_HAS_RTTI
+  GTEST_CHECK_(typeid(*base) == typeid(Derived));
+  return dynamic_cast<Derived*>(base);  // NOLINT
+#else
+  return static_cast<Derived*>(base);  // Poor man's downcast.
+#endif
+}
+
+#if GTEST_HAS_STREAM_REDIRECTION
+
+// Defines the stderr capturer:
+//   CaptureStdout     - starts capturing stdout.
+//   GetCapturedStdout - stops capturing stdout and returns the captured string.
+//   CaptureStderr     - starts capturing stderr.
+//   GetCapturedStderr - stops capturing stderr and returns the captured string.
+//
+GTEST_API_ void CaptureStdout();
+GTEST_API_ String GetCapturedStdout();
+GTEST_API_ void CaptureStderr();
+GTEST_API_ String GetCapturedStderr();
+
+#endif  // GTEST_HAS_STREAM_REDIRECTION
+
+
+#if GTEST_HAS_DEATH_TEST
+
+// A copy of all command line arguments.  Set by InitGoogleTest().
+extern ::std::vector<String> g_argvs;
+
+// GTEST_HAS_DEATH_TEST implies we have ::std::string.
+const ::std::vector<String>& GetArgvs();
+
+#endif  // GTEST_HAS_DEATH_TEST
+
+// Defines synchronization primitives.
+
+#if GTEST_HAS_PTHREAD
+
+// Sleeps for (roughly) n milli-seconds.  This function is only for
+// testing Google Test's own constructs.  Don't use it in user tests,
+// either directly or indirectly.
+inline void SleepMilliseconds(int n) {
+  const timespec time = {
+    0,                  // 0 seconds.
+    n * 1000L * 1000L,  // And n ms.
+  };
+  nanosleep(&time, NULL);
+}
+
+// Allows a controller thread to pause execution of newly created
+// threads until notified.  Instances of this class must be created
+// and destroyed in the controller thread.
+//
+// This class is only for testing Google Test's own constructs. Do not
+// use it in user tests, either directly or indirectly.
+class Notification {
+ public:
+  Notification() : notified_(false) {}
+
+  // Notifies all threads created with this notification to start. Must
+  // be called from the controller thread.
+  void Notify() { notified_ = true; }
+
+  // Blocks until the controller thread notifies. Must be called from a test
+  // thread.
+  void WaitForNotification() {
+    while(!notified_) {
+      SleepMilliseconds(10);
+    }
+  }
+
+ private:
+  volatile bool notified_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(Notification);
+};
+
+// As a C-function, ThreadFuncWithCLinkage cannot be templated itself.
+// Consequently, it cannot select a correct instantiation of ThreadWithParam
+// in order to call its Run(). Introducing ThreadWithParamBase as a
+// non-templated base class for ThreadWithParam allows us to bypass this
+// problem.
+class ThreadWithParamBase {
+ public:
+  virtual ~ThreadWithParamBase() {}
+  virtual void Run() = 0;
+};
+
+// pthread_create() accepts a pointer to a function type with the C linkage.
+// According to the Standard (7.5/1), function types with different linkages
+// are different even if they are otherwise identical.  Some compilers (for
+// example, SunStudio) treat them as different types.  Since class methods
+// cannot be defined with C-linkage we need to define a free C-function to
+// pass into pthread_create().
+extern "C" inline void* ThreadFuncWithCLinkage(void* thread) {
+  static_cast<ThreadWithParamBase*>(thread)->Run();
+  return NULL;
+}
+
+// Helper class for testing Google Test's multi-threading constructs.
+// To use it, write:
+//
+//   void ThreadFunc(int param) { /* Do things with param */ }
+//   Notification thread_can_start;
+//   ...
+//   // The thread_can_start parameter is optional; you can supply NULL.
+//   ThreadWithParam<int> thread(&ThreadFunc, 5, &thread_can_start);
+//   thread_can_start.Notify();
+//
+// These classes are only for testing Google Test's own constructs. Do
+// not use them in user tests, either directly or indirectly.
+template <typename T>
+class ThreadWithParam : public ThreadWithParamBase {
+ public:
+  typedef void (*UserThreadFunc)(T);
+
+  ThreadWithParam(
+      UserThreadFunc func, T param, Notification* thread_can_start)
+      : func_(func),
+        param_(param),
+        thread_can_start_(thread_can_start),
+        finished_(false) {
+    ThreadWithParamBase* const base = this;
+    // The thread can be created only after all fields except thread_
+    // have been initialized.
+    GTEST_CHECK_POSIX_SUCCESS_(
+        pthread_create(&thread_, 0, &ThreadFuncWithCLinkage, base));
+  }
+  ~ThreadWithParam() { Join(); }
+
+  void Join() {
+    if (!finished_) {
+      GTEST_CHECK_POSIX_SUCCESS_(pthread_join(thread_, 0));
+      finished_ = true;
+    }
+  }
+
+  virtual void Run() {
+    if (thread_can_start_ != NULL)
+      thread_can_start_->WaitForNotification();
+    func_(param_);
+  }
+
+ private:
+  const UserThreadFunc func_;  // User-supplied thread function.
+  const T param_;  // User-supplied parameter to the thread function.
+  // When non-NULL, used to block execution until the controller thread
+  // notifies.
+  Notification* const thread_can_start_;
+  bool finished_;  // true iff we know that the thread function has finished.
+  pthread_t thread_;  // The native thread object.
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadWithParam);
+};
+
+// MutexBase and Mutex implement mutex on pthreads-based platforms. They
+// are used in conjunction with class MutexLock:
+//
+//   Mutex mutex;
+//   ...
+//   MutexLock lock(&mutex);  // Acquires the mutex and releases it at the end
+//                            // of the current scope.
+//
+// MutexBase implements behavior for both statically and dynamically
+// allocated mutexes.  Do not use MutexBase directly.  Instead, write
+// the following to define a static mutex:
+//
+//   GTEST_DEFINE_STATIC_MUTEX_(g_some_mutex);
+//
+// You can forward declare a static mutex like this:
+//
+//   GTEST_DECLARE_STATIC_MUTEX_(g_some_mutex);
+//
+// To create a dynamic mutex, just define an object of type Mutex.
+class MutexBase {
+ public:
+  // Acquires this mutex.
+  void Lock() {
+    GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_lock(&mutex_));
+    owner_ = pthread_self();
+  }
+
+  // Releases this mutex.
+  void Unlock() {
+    // We don't protect writing to owner_ here, as it's the caller's
+    // responsibility to ensure that the current thread holds the
+    // mutex when this is called.
+    owner_ = 0;
+    GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_unlock(&mutex_));
+  }
+
+  // Does nothing if the current thread holds the mutex. Otherwise, crashes
+  // with high probability.
+  void AssertHeld() const {
+    GTEST_CHECK_(owner_ == pthread_self())
+        << "The current thread is not holding the mutex @" << this;
+  }
+
+  // A static mutex may be used before main() is entered.  It may even
+  // be used before the dynamic initialization stage.  Therefore we
+  // must be able to initialize a static mutex object at link time.
+  // This means MutexBase has to be a POD and its member variables
+  // have to be public.
+ public:
+  pthread_mutex_t mutex_;  // The underlying pthread mutex.
+  pthread_t owner_;  // The thread holding the mutex; 0 means no one holds it.
+};
+
+// Forward-declares a static mutex.
+# define GTEST_DECLARE_STATIC_MUTEX_(mutex) \
+    extern ::testing::internal::MutexBase mutex
+
+// Defines and statically (i.e. at link time) initializes a static mutex.
+# define GTEST_DEFINE_STATIC_MUTEX_(mutex) \
+    ::testing::internal::MutexBase mutex = { PTHREAD_MUTEX_INITIALIZER, 0 }
+
+// The Mutex class can only be used for mutexes created at runtime. It
+// shares its API with MutexBase otherwise.
+class Mutex : public MutexBase {
+ public:
+  Mutex() {
+    GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_init(&mutex_, NULL));
+    owner_ = 0;
+  }
+  ~Mutex() {
+    GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_destroy(&mutex_));
+  }
+
+ private:
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(Mutex);
+};
+
+// We cannot name this class MutexLock as the ctor declaration would
+// conflict with a macro named MutexLock, which is defined on some
+// platforms.  Hence the typedef trick below.
+class GTestMutexLock {
+ public:
+  explicit GTestMutexLock(MutexBase* mutex)
+      : mutex_(mutex) { mutex_->Lock(); }
+
+  ~GTestMutexLock() { mutex_->Unlock(); }
+
+ private:
+  MutexBase* const mutex_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(GTestMutexLock);
+};
+
+typedef GTestMutexLock MutexLock;
+
+// Helpers for ThreadLocal.
+
+// pthread_key_create() requires DeleteThreadLocalValue() to have
+// C-linkage.  Therefore it cannot be templatized to access
+// ThreadLocal<T>.  Hence the need for class
+// ThreadLocalValueHolderBase.
+class ThreadLocalValueHolderBase {
+ public:
+  virtual ~ThreadLocalValueHolderBase() {}
+};
+
+// Called by pthread to delete thread-local data stored by
+// pthread_setspecific().
+extern "C" inline void DeleteThreadLocalValue(void* value_holder) {
+  delete static_cast<ThreadLocalValueHolderBase*>(value_holder);
+}
+
+// Implements thread-local storage on pthreads-based systems.
+//
+//   // Thread 1
+//   ThreadLocal<int> tl(100);  // 100 is the default value for each thread.
+//
+//   // Thread 2
+//   tl.set(150);  // Changes the value for thread 2 only.
+//   EXPECT_EQ(150, tl.get());
+//
+//   // Thread 1
+//   EXPECT_EQ(100, tl.get());  // In thread 1, tl has the original value.
+//   tl.set(200);
+//   EXPECT_EQ(200, tl.get());
+//
+// The template type argument T must have a public copy constructor.
+// In addition, the default ThreadLocal constructor requires T to have
+// a public default constructor.
+//
+// An object managed for a thread by a ThreadLocal instance is deleted
+// when the thread exits.  Or, if the ThreadLocal instance dies in
+// that thread, when the ThreadLocal dies.  It's the user's
+// responsibility to ensure that all other threads using a ThreadLocal
+// have exited when it dies, or the per-thread objects for those
+// threads will not be deleted.
+//
+// Google Test only uses global ThreadLocal objects.  That means they
+// will die after main() has returned.  Therefore, no per-thread
+// object managed by Google Test will be leaked as long as all threads
+// using Google Test have exited when main() returns.
+template <typename T>
+class ThreadLocal {
+ public:
+  ThreadLocal() : key_(CreateKey()),
+                  default_() {}
+  explicit ThreadLocal(const T& value) : key_(CreateKey()),
+                                         default_(value) {}
+
+  ~ThreadLocal() {
+    // Destroys the managed object for the current thread, if any.
+    DeleteThreadLocalValue(pthread_getspecific(key_));
+
+    // Releases resources associated with the key.  This will *not*
+    // delete managed objects for other threads.
+    GTEST_CHECK_POSIX_SUCCESS_(pthread_key_delete(key_));
+  }
+
+  T* pointer() { return GetOrCreateValue(); }
+  const T* pointer() const { return GetOrCreateValue(); }
+  const T& get() const { return *pointer(); }
+  void set(const T& value) { *pointer() = value; }
+
+ private:
+  // Holds a value of type T.
+  class ValueHolder : public ThreadLocalValueHolderBase {
+   public:
+    explicit ValueHolder(const T& value) : value_(value) {}
+
+    T* pointer() { return &value_; }
+
+   private:
+    T value_;
+    GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolder);
+  };
+
+  static pthread_key_t CreateKey() {
+    pthread_key_t key;
+    // When a thread exits, DeleteThreadLocalValue() will be called on
+    // the object managed for that thread.
+    GTEST_CHECK_POSIX_SUCCESS_(
+        pthread_key_create(&key, &DeleteThreadLocalValue));
+    return key;
+  }
+
+  T* GetOrCreateValue() const {
+    ThreadLocalValueHolderBase* const holder =
+        static_cast<ThreadLocalValueHolderBase*>(pthread_getspecific(key_));
+    if (holder != NULL) {
+      return CheckedDowncastToActualType<ValueHolder>(holder)->pointer();
+    }
+
+    ValueHolder* const new_holder = new ValueHolder(default_);
+    ThreadLocalValueHolderBase* const holder_base = new_holder;
+    GTEST_CHECK_POSIX_SUCCESS_(pthread_setspecific(key_, holder_base));
+    return new_holder->pointer();
+  }
+
+  // A key pthreads uses for looking up per-thread values.
+  const pthread_key_t key_;
+  const T default_;  // The default value for each thread.
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadLocal);
+};
+
+# define GTEST_IS_THREADSAFE 1
+
+#else  // GTEST_HAS_PTHREAD
+
+// A dummy implementation of synchronization primitives (mutex, lock,
+// and thread-local variable).  Necessary for compiling Google Test where
+// mutex is not supported - using Google Test in multiple threads is not
+// supported on such platforms.
+
+class Mutex {
+ public:
+  Mutex() {}
+  void AssertHeld() const {}
+};
+
+# define GTEST_DECLARE_STATIC_MUTEX_(mutex) \
+  extern ::testing::internal::Mutex mutex
+
+# define GTEST_DEFINE_STATIC_MUTEX_(mutex) ::testing::internal::Mutex mutex
+
+class GTestMutexLock {
+ public:
+  explicit GTestMutexLock(Mutex*) {}  // NOLINT
+};
+
+typedef GTestMutexLock MutexLock;
+
+template <typename T>
+class ThreadLocal {
+ public:
+  ThreadLocal() : value_() {}
+  explicit ThreadLocal(const T& value) : value_(value) {}
+  T* pointer() { return &value_; }
+  const T* pointer() const { return &value_; }
+  const T& get() const { return value_; }
+  void set(const T& value) { value_ = value; }
+ private:
+  T value_;
+};
+
+// The above synchronization primitives have dummy implementations.
+// Therefore Google Test is not thread-safe.
+# define GTEST_IS_THREADSAFE 0
+
+#endif  // GTEST_HAS_PTHREAD
+
+// Returns the number of threads running in the process, or 0 to indicate that
+// we cannot detect it.
+GTEST_API_ size_t GetThreadCount();
+
+// Passing non-POD classes through ellipsis (...) crashes the ARM
+// compiler and generates a warning in Sun Studio.  The Nokia Symbian
+// and the IBM XL C/C++ compiler try to instantiate a copy constructor
+// for objects passed through ellipsis (...), failing for uncopyable
+// objects.  We define this to ensure that only POD is passed through
+// ellipsis on these systems.
+#if defined(__SYMBIAN32__) || defined(__IBMCPP__) || defined(__SUNPRO_CC)
+// We lose support for NULL detection where the compiler doesn't like
+// passing non-POD classes through ellipsis (...).
+# define GTEST_ELLIPSIS_NEEDS_POD_ 1
+#else
+# define GTEST_CAN_COMPARE_NULL 1
+#endif
+
+// The Nokia Symbian and IBM XL C/C++ compilers cannot decide between
+// const T& and const T* in a function template.  These compilers
+// _can_ decide between class template specializations for T and T*,
+// so a tr1::type_traits-like is_pointer works.
+#if defined(__SYMBIAN32__) || defined(__IBMCPP__)
+# define GTEST_NEEDS_IS_POINTER_ 1
+#endif
+
+template <bool bool_value>
+struct bool_constant {
+  typedef bool_constant<bool_value> type;
+  static const bool value = bool_value;
+};
+template <bool bool_value> const bool bool_constant<bool_value>::value;
+
+typedef bool_constant<false> false_type;
+typedef bool_constant<true> true_type;
+
+template <typename T>
+struct is_pointer : public false_type {};
+
+template <typename T>
+struct is_pointer<T*> : public true_type {};
+
+template <typename Iterator>
+struct IteratorTraits {
+  typedef typename Iterator::value_type value_type;
+};
+
+template <typename T>
+struct IteratorTraits<T*> {
+  typedef T value_type;
+};
+
+template <typename T>
+struct IteratorTraits<const T*> {
+  typedef T value_type;
+};
+
+#if GTEST_OS_WINDOWS
+# define GTEST_PATH_SEP_ "\\"
+# define GTEST_HAS_ALT_PATH_SEP_ 1
+// The biggest signed integer type the compiler supports.
+typedef __int64 BiggestInt;
+#else
+# define GTEST_PATH_SEP_ "/"
+# define GTEST_HAS_ALT_PATH_SEP_ 0
+typedef long long BiggestInt;  // NOLINT
+#endif  // GTEST_OS_WINDOWS
+
+// Utilities for char.
+
+// isspace(int ch) and friends accept an unsigned char or EOF.  char
+// may be signed, depending on the compiler (or compiler flags).
+// Therefore we need to cast a char to unsigned char before calling
+// isspace(), etc.
+
+inline bool IsAlpha(char ch) {
+  return isalpha(static_cast<unsigned char>(ch)) != 0;
+}
+inline bool IsAlNum(char ch) {
+  return isalnum(static_cast<unsigned char>(ch)) != 0;
+}
+inline bool IsDigit(char ch) {
+  return isdigit(static_cast<unsigned char>(ch)) != 0;
+}
+inline bool IsLower(char ch) {
+  return islower(static_cast<unsigned char>(ch)) != 0;
+}
+inline bool IsSpace(char ch) {
+  return isspace(static_cast<unsigned char>(ch)) != 0;
+}
+inline bool IsUpper(char ch) {
+  return isupper(static_cast<unsigned char>(ch)) != 0;
+}
+inline bool IsXDigit(char ch) {
+  return isxdigit(static_cast<unsigned char>(ch)) != 0;
+}
+
+inline char ToLower(char ch) {
+  return static_cast<char>(tolower(static_cast<unsigned char>(ch)));
+}
+inline char ToUpper(char ch) {
+  return static_cast<char>(toupper(static_cast<unsigned char>(ch)));
+}
+
+// The testing::internal::posix namespace holds wrappers for common
+// POSIX functions.  These wrappers hide the differences between
+// Windows/MSVC and POSIX systems.  Since some compilers define these
+// standard functions as macros, the wrapper cannot have the same name
+// as the wrapped function.
+
+namespace posix {
+
+// Functions with a different name on Windows.
+
+#if GTEST_OS_WINDOWS
+
+typedef struct _stat StatStruct;
+
+# ifdef __BORLANDC__
+inline int IsATTY(int fd) { return isatty(fd); }
+inline int StrCaseCmp(const char* s1, const char* s2) {
+  return stricmp(s1, s2);
+}
+inline char* StrDup(const char* src) { return strdup(src); }
+# else  // !__BORLANDC__
+#  if GTEST_OS_WINDOWS_MOBILE
+inline int IsATTY(int /* fd */) { return 0; }
+#  else
+inline int IsATTY(int fd) { return _isatty(fd); }
+#  endif  // GTEST_OS_WINDOWS_MOBILE
+inline int StrCaseCmp(const char* s1, const char* s2) {
+  return _stricmp(s1, s2);
+}
+inline char* StrDup(const char* src) { return _strdup(src); }
+# endif  // __BORLANDC__
+
+# if GTEST_OS_WINDOWS_MOBILE
+inline int FileNo(FILE* file) { return reinterpret_cast<int>(_fileno(file)); }
+// Stat(), RmDir(), and IsDir() are not needed on Windows CE at this
+// time and thus not defined there.
+# else
+inline int FileNo(FILE* file) { return _fileno(file); }
+inline int Stat(const char* path, StatStruct* buf) { return _stat(path, buf); }
+inline int RmDir(const char* dir) { return _rmdir(dir); }
+inline bool IsDir(const StatStruct& st) {
+  return (_S_IFDIR & st.st_mode) != 0;
+}
+# endif  // GTEST_OS_WINDOWS_MOBILE
+
+#else
+
+typedef struct stat StatStruct;
+
+inline int FileNo(FILE* file) { return fileno(file); }
+inline int IsATTY(int fd) { return isatty(fd); }
+inline int Stat(const char* path, StatStruct* buf) { return stat(path, buf); }
+inline int StrCaseCmp(const char* s1, const char* s2) {
+  return strcasecmp(s1, s2);
+}
+inline char* StrDup(const char* src) { return strdup(src); }
+inline int RmDir(const char* dir) { return rmdir(dir); }
+inline bool IsDir(const StatStruct& st) { return S_ISDIR(st.st_mode); }
+
+#endif  // GTEST_OS_WINDOWS
+
+// Functions deprecated by MSVC 8.0.
+
+#ifdef _MSC_VER
+// Temporarily disable warning 4996 (deprecated function).
+# pragma warning(push)
+# pragma warning(disable:4996)
+#endif
+
+inline const char* StrNCpy(char* dest, const char* src, size_t n) {
+  return strncpy(dest, src, n);
+}
+
+// ChDir(), FReopen(), FDOpen(), Read(), Write(), Close(), and
+// StrError() aren't needed on Windows CE at this time and thus not
+// defined there.
+
+#if !GTEST_OS_WINDOWS_MOBILE
+inline int ChDir(const char* dir) { return chdir(dir); }
+#endif
+inline FILE* FOpen(const char* path, const char* mode) {
+  return fopen(path, mode);
+}
+#if !GTEST_OS_WINDOWS_MOBILE
+inline FILE *FReopen(const char* path, const char* mode, FILE* stream) {
+  return freopen(path, mode, stream);
+}
+inline FILE* FDOpen(int fd, const char* mode) { return fdopen(fd, mode); }
+#endif
+inline int FClose(FILE* fp) { return fclose(fp); }
+#if !GTEST_OS_WINDOWS_MOBILE
+inline int Read(int fd, void* buf, unsigned int count) {
+  return static_cast<int>(read(fd, buf, count));
+}
+inline int Write(int fd, const void* buf, unsigned int count) {
+  return static_cast<int>(write(fd, buf, count));
+}
+inline int Close(int fd) { return close(fd); }
+inline const char* StrError(int errnum) { return strerror(errnum); }
+#endif
+inline const char* GetEnv(const char* name) {
+#if GTEST_OS_WINDOWS_MOBILE
+  // We are on Windows CE, which has no environment variables.
+  return NULL;
+#elif defined(__BORLANDC__) || defined(__SunOS_5_8) || defined(__SunOS_5_9)
+  // Environment variables which we programmatically clear will be set to the
+  // empty string rather than unset (NULL).  Handle that case.
+  const char* const env = getenv(name);
+  return (env != NULL && env[0] != '\0') ? env : NULL;
+#else
+  return getenv(name);
+#endif
+}
+
+#ifdef _MSC_VER
+# pragma warning(pop)  // Restores the warning state.
+#endif
+
+#if GTEST_OS_WINDOWS_MOBILE
+// Windows CE has no C library. The abort() function is used in
+// several places in Google Test. This implementation provides a reasonable
+// imitation of standard behaviour.
+void Abort();
+#else
+inline void Abort() { abort(); }
+#endif  // GTEST_OS_WINDOWS_MOBILE
+
+}  // namespace posix
+
+// The maximum number a BiggestInt can represent.  This definition
+// works no matter BiggestInt is represented in one's complement or
+// two's complement.
+//
+// We cannot rely on numeric_limits in STL, as __int64 and long long
+// are not part of standard C++ and numeric_limits doesn't need to be
+// defined for them.
+const BiggestInt kMaxBiggestInt =
+    ~(static_cast<BiggestInt>(1) << (8*sizeof(BiggestInt) - 1));
+
+// This template class serves as a compile-time function from size to
+// type.  It maps a size in bytes to a primitive type with that
+// size. e.g.
+//
+//   TypeWithSize<4>::UInt
+//
+// is typedef-ed to be unsigned int (unsigned integer made up of 4
+// bytes).
+//
+// Such functionality should belong to STL, but I cannot find it
+// there.
+//
+// Google Test uses this class in the implementation of floating-point
+// comparison.
+//
+// For now it only handles UInt (unsigned int) as that's all Google Test
+// needs.  Other types can be easily added in the future if need
+// arises.
+template <size_t size>
+class TypeWithSize {
+ public:
+  // This prevents the user from using TypeWithSize<N> with incorrect
+  // values of N.
+  typedef void UInt;
+};
+
+// The specialization for size 4.
+template <>
+class TypeWithSize<4> {
+ public:
+  // unsigned int has size 4 in both gcc and MSVC.
+  //
+  // As base/basictypes.h doesn't compile on Windows, we cannot use
+  // uint32, uint64, and etc here.
+  typedef int Int;
+  typedef unsigned int UInt;
+};
+
+// The specialization for size 8.
+template <>
+class TypeWithSize<8> {
+ public:
+
+#if GTEST_OS_WINDOWS
+  typedef __int64 Int;
+  typedef unsigned __int64 UInt;
+#else
+  typedef long long Int;  // NOLINT
+  typedef unsigned long long UInt;  // NOLINT
+#endif  // GTEST_OS_WINDOWS
+};
+
+// Integer types of known sizes.
+typedef TypeWithSize<4>::Int Int32;
+typedef TypeWithSize<4>::UInt UInt32;
+typedef TypeWithSize<8>::Int Int64;
+typedef TypeWithSize<8>::UInt UInt64;
+typedef TypeWithSize<8>::Int TimeInMillis;  // Represents time in milliseconds.
+
+// Utilities for command line flags and environment variables.
+
+// Macro for referencing flags.
+#define GTEST_FLAG(name) FLAGS_gtest_##name
+
+// Macros for declaring flags.
+#define GTEST_DECLARE_bool_(name) GTEST_API_ extern bool GTEST_FLAG(name)
+#define GTEST_DECLARE_int32_(name) \
+    GTEST_API_ extern ::testing::internal::Int32 GTEST_FLAG(name)
+#define GTEST_DECLARE_string_(name) \
+    GTEST_API_ extern ::testing::internal::String GTEST_FLAG(name)
+
+// Macros for defining flags.
+#define GTEST_DEFINE_bool_(name, default_val, doc) \
+    GTEST_API_ bool GTEST_FLAG(name) = (default_val)
+#define GTEST_DEFINE_int32_(name, default_val, doc) \
+    GTEST_API_ ::testing::internal::Int32 GTEST_FLAG(name) = (default_val)
+#define GTEST_DEFINE_string_(name, default_val, doc) \
+    GTEST_API_ ::testing::internal::String GTEST_FLAG(name) = (default_val)
+
+// Parses 'str' for a 32-bit signed integer.  If successful, writes the result
+// to *value and returns true; otherwise leaves *value unchanged and returns
+// false.
+// TODO(chandlerc): Find a better way to refactor flag and environment parsing
+// out of both gtest-port.cc and gtest.cc to avoid exporting this utility
+// function.
+bool ParseInt32(const Message& src_text, const char* str, Int32* value);
+
+// Parses a bool/Int32/string from the environment variable
+// corresponding to the given Google Test flag.
+bool BoolFromGTestEnv(const char* flag, bool default_val);
+GTEST_API_ Int32 Int32FromGTestEnv(const char* flag, Int32 default_val);
+const char* StringFromGTestEnv(const char* flag, const char* default_val);
+
+}  // namespace internal
+}  // namespace testing
+
+#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_H_
+
+#if GTEST_OS_LINUX
+# include <stdlib.h>
+# include <sys/types.h>
+# include <sys/wait.h>
+# include <unistd.h>
+#endif  // GTEST_OS_LINUX
+
+#include <ctype.h>
+#include <string.h>
+#include <iomanip>
+#include <limits>
+#include <set>
+
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: wan at google.com (Zhanyong Wan), eefacm at gmail.com (Sean Mcafee)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This header file declares the String class and functions used internally by
+// Google Test.  They are subject to change without notice. They should not used
+// by code external to Google Test.
+//
+// This header file is #included by <gtest/internal/gtest-internal.h>.
+// It should not be #included by other files.
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_STRING_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_STRING_H_
+
+#ifdef __BORLANDC__
+// string.h is not guaranteed to provide strcpy on C++ Builder.
+# include <mem.h>
+#endif
+
+#include <string.h>
+
+#include <string>
+
+namespace testing {
+namespace internal {
+
+// String - a UTF-8 string class.
+//
+// For historic reasons, we don't use std::string.
+//
+// TODO(wan at google.com): replace this class with std::string or
+// implement it in terms of the latter.
+//
+// Note that String can represent both NULL and the empty string,
+// while std::string cannot represent NULL.
+//
+// NULL and the empty string are considered different.  NULL is less
+// than anything (including the empty string) except itself.
+//
+// This class only provides minimum functionality necessary for
+// implementing Google Test.  We do not intend to implement a full-fledged
+// string class here.
+//
+// Since the purpose of this class is to provide a substitute for
+// std::string on platforms where it cannot be used, we define a copy
+// constructor and assignment operators such that we don't need
+// conditional compilation in a lot of places.
+//
+// In order to make the representation efficient, the d'tor of String
+// is not virtual.  Therefore DO NOT INHERIT FROM String.
+class GTEST_API_ String {
+ public:
+  // Static utility methods
+
+  // Returns the input enclosed in double quotes if it's not NULL;
+  // otherwise returns "(null)".  For example, "\"Hello\"" is returned
+  // for input "Hello".
+  //
+  // This is useful for printing a C string in the syntax of a literal.
+  //
+  // Known issue: escape sequences are not handled yet.
+  static String ShowCStringQuoted(const char* c_str);
+
+  // Clones a 0-terminated C string, allocating memory using new.  The
+  // caller is responsible for deleting the return value using
+  // delete[].  Returns the cloned string, or NULL if the input is
+  // NULL.
+  //
+  // This is different from strdup() in string.h, which allocates
+  // memory using malloc().
+  static const char* CloneCString(const char* c_str);
+
+#if GTEST_OS_WINDOWS_MOBILE
+  // Windows CE does not have the 'ANSI' versions of Win32 APIs. To be
+  // able to pass strings to Win32 APIs on CE we need to convert them
+  // to 'Unicode', UTF-16.
+
+  // Creates a UTF-16 wide string from the given ANSI string, allocating
+  // memory using new. The caller is responsible for deleting the return
+  // value using delete[]. Returns the wide string, or NULL if the
+  // input is NULL.
+  //
+  // The wide string is created using the ANSI codepage (CP_ACP) to
+  // match the behaviour of the ANSI versions of Win32 calls and the
+  // C runtime.
+  static LPCWSTR AnsiToUtf16(const char* c_str);
+
+  // Creates an ANSI string from the given wide string, allocating
+  // memory using new. The caller is responsible for deleting the return
+  // value using delete[]. Returns the ANSI string, or NULL if the
+  // input is NULL.
+  //
+  // The returned string is created using the ANSI codepage (CP_ACP) to
+  // match the behaviour of the ANSI versions of Win32 calls and the
+  // C runtime.
+  static const char* Utf16ToAnsi(LPCWSTR utf16_str);
+#endif
+
+  // Compares two C strings.  Returns true iff they have the same content.
+  //
+  // Unlike strcmp(), this function can handle NULL argument(s).  A
+  // NULL C string is considered different to any non-NULL C string,
+  // including the empty string.
+  static bool CStringEquals(const char* lhs, const char* rhs);
+
+  // Converts a wide C string to a String using the UTF-8 encoding.
+  // NULL will be converted to "(null)".  If an error occurred during
+  // the conversion, "(failed to convert from wide string)" is
+  // returned.
+  static String ShowWideCString(const wchar_t* wide_c_str);
+
+  // Similar to ShowWideCString(), except that this function encloses
+  // the converted string in double quotes.
+  static String ShowWideCStringQuoted(const wchar_t* wide_c_str);
+
+  // Compares two wide C strings.  Returns true iff they have the same
+  // content.
+  //
+  // Unlike wcscmp(), this function can handle NULL argument(s).  A
+  // NULL C string is considered different to any non-NULL C string,
+  // including the empty string.
+  static bool WideCStringEquals(const wchar_t* lhs, const wchar_t* rhs);
+
+  // Compares two C strings, ignoring case.  Returns true iff they
+  // have the same content.
+  //
+  // Unlike strcasecmp(), this function can handle NULL argument(s).
+  // A NULL C string is considered different to any non-NULL C string,
+  // including the empty string.
+  static bool CaseInsensitiveCStringEquals(const char* lhs,
+                                           const char* rhs);
+
+  // Compares two wide C strings, ignoring case.  Returns true iff they
+  // have the same content.
+  //
+  // Unlike wcscasecmp(), this function can handle NULL argument(s).
+  // A NULL C string is considered different to any non-NULL wide C string,
+  // including the empty string.
+  // NB: The implementations on different platforms slightly differ.
+  // On windows, this method uses _wcsicmp which compares according to LC_CTYPE
+  // environment variable. On GNU platform this method uses wcscasecmp
+  // which compares according to LC_CTYPE category of the current locale.
+  // On MacOS X, it uses towlower, which also uses LC_CTYPE category of the
+  // current locale.
+  static bool CaseInsensitiveWideCStringEquals(const wchar_t* lhs,
+                                               const wchar_t* rhs);
+
+  // Formats a list of arguments to a String, using the same format
+  // spec string as for printf.
+  //
+  // We do not use the StringPrintf class as it is not universally
+  // available.
+  //
+  // The result is limited to 4096 characters (including the tailing
+  // 0).  If 4096 characters are not enough to format the input,
+  // "<buffer exceeded>" is returned.
+  static String Format(const char* format, ...);
+
+  // C'tors
+
+  // The default c'tor constructs a NULL string.
+  String() : c_str_(NULL), length_(0) {}
+
+  // Constructs a String by cloning a 0-terminated C string.
+  String(const char* a_c_str) {  // NOLINT
+    if (a_c_str == NULL) {
+      c_str_ = NULL;
+      length_ = 0;
+    } else {
+      ConstructNonNull(a_c_str, strlen(a_c_str));
+    }
+  }
+
+  // Constructs a String by copying a given number of chars from a
+  // buffer.  E.g. String("hello", 3) creates the string "hel",
+  // String("a\0bcd", 4) creates "a\0bc", String(NULL, 0) creates "",
+  // and String(NULL, 1) results in access violation.
+  String(const char* buffer, size_t a_length) {
+    ConstructNonNull(buffer, a_length);
+  }
+
+  // The copy c'tor creates a new copy of the string.  The two
+  // String objects do not share content.
+  String(const String& str) : c_str_(NULL), length_(0) { *this = str; }
+
+  // D'tor.  String is intended to be a final class, so the d'tor
+  // doesn't need to be virtual.
+  ~String() { delete[] c_str_; }
+
+  // Allows a String to be implicitly converted to an ::std::string or
+  // ::string, and vice versa.  Converting a String containing a NULL
+  // pointer to ::std::string or ::string is undefined behavior.
+  // Converting a ::std::string or ::string containing an embedded NUL
+  // character to a String will result in the prefix up to the first
+  // NUL character.
+  String(const ::std::string& str) {
+    ConstructNonNull(str.c_str(), str.length());
+  }
+
+  operator ::std::string() const { return ::std::string(c_str(), length()); }
+
+#if GTEST_HAS_GLOBAL_STRING
+  String(const ::string& str) {
+    ConstructNonNull(str.c_str(), str.length());
+  }
+
+  operator ::string() const { return ::string(c_str(), length()); }
+#endif  // GTEST_HAS_GLOBAL_STRING
+
+  // Returns true iff this is an empty string (i.e. "").
+  bool empty() const { return (c_str() != NULL) && (length() == 0); }
+
+  // Compares this with another String.
+  // Returns < 0 if this is less than rhs, 0 if this is equal to rhs, or > 0
+  // if this is greater than rhs.
+  int Compare(const String& rhs) const;
+
+  // Returns true iff this String equals the given C string.  A NULL
+  // string and a non-NULL string are considered not equal.
+  bool operator==(const char* a_c_str) const { return Compare(a_c_str) == 0; }
+
+  // Returns true iff this String is less than the given String.  A
+  // NULL string is considered less than "".
+  bool operator<(const String& rhs) const { return Compare(rhs) < 0; }
+
+  // Returns true iff this String doesn't equal the given C string.  A NULL
+  // string and a non-NULL string are considered not equal.
+  bool operator!=(const char* a_c_str) const { return !(*this == a_c_str); }
+
+  // Returns true iff this String ends with the given suffix.  *Any*
+  // String is considered to end with a NULL or empty suffix.
+  bool EndsWith(const char* suffix) const;
+
+  // Returns true iff this String ends with the given suffix, not considering
+  // case. Any String is considered to end with a NULL or empty suffix.
+  bool EndsWithCaseInsensitive(const char* suffix) const;
+
+  // Returns the length of the encapsulated string, or 0 if the
+  // string is NULL.
+  size_t length() const { return length_; }
+
+  // Gets the 0-terminated C string this String object represents.
+  // The String object still owns the string.  Therefore the caller
+  // should NOT delete the return value.
+  const char* c_str() const { return c_str_; }
+
+  // Assigns a C string to this object.  Self-assignment works.
+  const String& operator=(const char* a_c_str) {
+    return *this = String(a_c_str);
+  }
+
+  // Assigns a String object to this object.  Self-assignment works.
+  const String& operator=(const String& rhs) {
+    if (this != &rhs) {
+      delete[] c_str_;
+      if (rhs.c_str() == NULL) {
+        c_str_ = NULL;
+        length_ = 0;
+      } else {
+        ConstructNonNull(rhs.c_str(), rhs.length());
+      }
+    }
+
+    return *this;
+  }
+
+ private:
+  // Constructs a non-NULL String from the given content.  This
+  // function can only be called when c_str_ has not been allocated.
+  // ConstructNonNull(NULL, 0) results in an empty string ("").
+  // ConstructNonNull(NULL, non_zero) is undefined behavior.
+  void ConstructNonNull(const char* buffer, size_t a_length) {
+    char* const str = new char[a_length + 1];
+    memcpy(str, buffer, a_length);
+    str[a_length] = '\0';
+    c_str_ = str;
+    length_ = a_length;
+  }
+
+  const char* c_str_;
+  size_t length_;
+};  // class String
+
+// Streams a String to an ostream.  Each '\0' character in the String
+// is replaced with "\\0".
+inline ::std::ostream& operator<<(::std::ostream& os, const String& str) {
+  if (str.c_str() == NULL) {
+    os << "(null)";
+  } else {
+    const char* const c_str = str.c_str();
+    for (size_t i = 0; i != str.length(); i++) {
+      if (c_str[i] == '\0') {
+        os << "\\0";
+      } else {
+        os << c_str[i];
+      }
+    }
+  }
+  return os;
+}
+
+// Gets the content of the stringstream's buffer as a String.  Each '\0'
+// character in the buffer is replaced with "\\0".
+GTEST_API_ String StringStreamToString(::std::stringstream* stream);
+
+// Converts a streamable value to a String.  A NULL pointer is
+// converted to "(null)".  When the input value is a ::string,
+// ::std::string, ::wstring, or ::std::wstring object, each NUL
+// character in it is replaced with "\\0".
+
+// Declared here but defined in gtest.h, so that it has access
+// to the definition of the Message class, required by the ARM
+// compiler.
+template <typename T>
+String StreamableToString(const T& streamable);
+
+}  // namespace internal
+}  // namespace testing
+
+#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_STRING_H_
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: keith.ray at gmail.com (Keith Ray)
+//
+// Google Test filepath utilities
+//
+// This header file declares classes and functions used internally by
+// Google Test.  They are subject to change without notice.
+//
+// This file is #included in <gtest/internal/gtest-internal.h>.
+// Do not include this header file separately!
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_FILEPATH_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_FILEPATH_H_
+
+
+namespace testing {
+namespace internal {
+
+// FilePath - a class for file and directory pathname manipulation which
+// handles platform-specific conventions (like the pathname separator).
+// Used for helper functions for naming files in a directory for xml output.
+// Except for Set methods, all methods are const or static, which provides an
+// "immutable value object" -- useful for peace of mind.
+// A FilePath with a value ending in a path separator ("like/this/") represents
+// a directory, otherwise it is assumed to represent a file. In either case,
+// it may or may not represent an actual file or directory in the file system.
+// Names are NOT checked for syntax correctness -- no checking for illegal
+// characters, malformed paths, etc.
+
+class GTEST_API_ FilePath {
+ public:
+  FilePath() : pathname_("") { }
+  FilePath(const FilePath& rhs) : pathname_(rhs.pathname_) { }
+
+  explicit FilePath(const char* pathname) : pathname_(pathname) {
+    Normalize();
+  }
+
+  explicit FilePath(const String& pathname) : pathname_(pathname) {
+    Normalize();
+  }
+
+  FilePath& operator=(const FilePath& rhs) {
+    Set(rhs);
+    return *this;
+  }
+
+  void Set(const FilePath& rhs) {
+    pathname_ = rhs.pathname_;
+  }
+
+  String ToString() const { return pathname_; }
+  const char* c_str() const { return pathname_.c_str(); }
+
+  // Returns the current working directory, or "" if unsuccessful.
+  static FilePath GetCurrentDir();
+
+  // Given directory = "dir", base_name = "test", number = 0,
+  // extension = "xml", returns "dir/test.xml". If number is greater
+  // than zero (e.g., 12), returns "dir/test_12.xml".
+  // On Windows platform, uses \ as the separator rather than /.
+  static FilePath MakeFileName(const FilePath& directory,
+                               const FilePath& base_name,
+                               int number,
+                               const char* extension);
+
+  // Given directory = "dir", relative_path = "test.xml",
+  // returns "dir/test.xml".
+  // On Windows, uses \ as the separator rather than /.
+  static FilePath ConcatPaths(const FilePath& directory,
+                              const FilePath& relative_path);
+
+  // Returns a pathname for a file that does not currently exist. The pathname
+  // will be directory/base_name.extension or
+  // directory/base_name_<number>.extension if directory/base_name.extension
+  // already exists. The number will be incremented until a pathname is found
+  // that does not already exist.
+  // Examples: 'dir/foo_test.xml' or 'dir/foo_test_1.xml'.
+  // There could be a race condition if two or more processes are calling this
+  // function at the same time -- they could both pick the same filename.
+  static FilePath GenerateUniqueFileName(const FilePath& directory,
+                                         const FilePath& base_name,
+                                         const char* extension);
+
+  // Returns true iff the path is NULL or "".
+  bool IsEmpty() const { return c_str() == NULL || *c_str() == '\0'; }
+
+  // If input name has a trailing separator character, removes it and returns
+  // the name, otherwise return the name string unmodified.
+  // On Windows platform, uses \ as the separator, other platforms use /.
+  FilePath RemoveTrailingPathSeparator() const;
+
+  // Returns a copy of the FilePath with the directory part removed.
+  // Example: FilePath("path/to/file").RemoveDirectoryName() returns
+  // FilePath("file"). If there is no directory part ("just_a_file"), it returns
+  // the FilePath unmodified. If there is no file part ("just_a_dir/") it
+  // returns an empty FilePath ("").
+  // On Windows platform, '\' is the path separator, otherwise it is '/'.
+  FilePath RemoveDirectoryName() const;
+
+  // RemoveFileName returns the directory path with the filename removed.
+  // Example: FilePath("path/to/file").RemoveFileName() returns "path/to/".
+  // If the FilePath is "a_file" or "/a_file", RemoveFileName returns
+  // FilePath("./") or, on Windows, FilePath(".\\"). If the filepath does
+  // not have a file, like "just/a/dir/", it returns the FilePath unmodified.
+  // On Windows platform, '\' is the path separator, otherwise it is '/'.
+  FilePath RemoveFileName() const;
+
+  // Returns a copy of the FilePath with the case-insensitive extension removed.
+  // Example: FilePath("dir/file.exe").RemoveExtension("EXE") returns
+  // FilePath("dir/file"). If a case-insensitive extension is not
+  // found, returns a copy of the original FilePath.
+  FilePath RemoveExtension(const char* extension) const;
+
+  // Creates directories so that path exists. Returns true if successful or if
+  // the directories already exist; returns false if unable to create
+  // directories for any reason. Will also return false if the FilePath does
+  // not represent a directory (that is, it doesn't end with a path separator).
+  bool CreateDirectoriesRecursively() const;
+
+  // Create the directory so that path exists. Returns true if successful or
+  // if the directory already exists; returns false if unable to create the
+  // directory for any reason, including if the parent directory does not
+  // exist. Not named "CreateDirectory" because that's a macro on Windows.
+  bool CreateFolder() const;
+
+  // Returns true if FilePath describes something in the file-system,
+  // either a file, directory, or whatever, and that something exists.
+  bool FileOrDirectoryExists() const;
+
+  // Returns true if pathname describes a directory in the file-system
+  // that exists.
+  bool DirectoryExists() const;
+
+  // Returns true if FilePath ends with a path separator, which indicates that
+  // it is intended to represent a directory. Returns false otherwise.
+  // This does NOT check that a directory (or file) actually exists.
+  bool IsDirectory() const;
+
+  // Returns true if pathname describes a root directory. (Windows has one
+  // root directory per disk drive.)
+  bool IsRootDirectory() const;
+
+  // Returns true if pathname describes an absolute path.
+  bool IsAbsolutePath() const;
+
+ private:
+  // Replaces multiple consecutive separators with a single separator.
+  // For example, "bar///foo" becomes "bar/foo". Does not eliminate other
+  // redundancies that might be in a pathname involving "." or "..".
+  //
+  // A pathname with multiple consecutive separators may occur either through
+  // user error or as a result of some scripts or APIs that generate a pathname
+  // with a trailing separator. On other platforms the same API or script
+  // may NOT generate a pathname with a trailing "/". Then elsewhere that
+  // pathname may have another "/" and pathname components added to it,
+  // without checking for the separator already being there.
+  // The script language and operating system may allow paths like "foo//bar"
+  // but some of the functions in FilePath will not handle that correctly. In
+  // particular, RemoveTrailingPathSeparator() only removes one separator, and
+  // it is called in CreateDirectoriesRecursively() assuming that it will change
+  // a pathname from directory syntax (trailing separator) to filename syntax.
+  //
+  // On Windows this method also replaces the alternate path separator '/' with
+  // the primary path separator '\\', so that for example "bar\\/\\foo" becomes
+  // "bar\\foo".
+
+  void Normalize();
+
+  // Returns a pointer to the last occurence of a valid path separator in
+  // the FilePath. On Windows, for example, both '/' and '\' are valid path
+  // separators. Returns NULL if no path separator was found.
+  const char* FindLastPathSeparator() const;
+
+  String pathname_;
+};  // class FilePath
+
+}  // namespace internal
+}  // namespace testing
+
+#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_FILEPATH_H_
+// This file was GENERATED by command:
+//     pump.py gtest-type-util.h.pump
+// DO NOT EDIT BY HAND!!!
+
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+
+// Type utilities needed for implementing typed and type-parameterized
+// tests.  This file is generated by a SCRIPT.  DO NOT EDIT BY HAND!
+//
+// Currently we support at most 50 types in a list, and at most 50
+// type-parameterized tests in one type-parameterized test case.
+// Please contact googletestframework at googlegroups.com if you need
+// more.
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TYPE_UTIL_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TYPE_UTIL_H_
+
+
+// #ifdef __GNUC__ is too general here.  It is possible to use gcc without using
+// libstdc++ (which is where cxxabi.h comes from).
+# ifdef __GLIBCXX__
+#  include <cxxabi.h>
+# elif defined(__HP_aCC)
+#  include <acxx_demangle.h>
+# endif  // __GLIBCXX__
+
+namespace testing {
+namespace internal {
+
+// GetTypeName<T>() returns a human-readable name of type T.
+// NB: This function is also used in Google Mock, so don't move it inside of
+// the typed-test-only section below.
+template <typename T>
+String GetTypeName() {
+# if GTEST_HAS_RTTI
+
+  const char* const name = typeid(T).name();
+#  if defined(__GLIBCXX__) || defined(__HP_aCC)
+  int status = 0;
+  // gcc's implementation of typeid(T).name() mangles the type name,
+  // so we have to demangle it.
+#   ifdef __GLIBCXX__
+  using abi::__cxa_demangle;
+#   endif // __GLIBCXX__
+  char* const readable_name = __cxa_demangle(name, 0, 0, &status);
+  const String name_str(status == 0 ? readable_name : name);
+  free(readable_name);
+  return name_str;
+#  else
+  return name;
+#  endif  // __GLIBCXX__ || __HP_aCC
+
+# else
+
+  return "<type>";
+
+# endif  // GTEST_HAS_RTTI
+}
+
+#if GTEST_HAS_TYPED_TEST || GTEST_HAS_TYPED_TEST_P
+
+// AssertyTypeEq<T1, T2>::type is defined iff T1 and T2 are the same
+// type.  This can be used as a compile-time assertion to ensure that
+// two types are equal.
+
+template <typename T1, typename T2>
+struct AssertTypeEq;
+
+template <typename T>
+struct AssertTypeEq<T, T> {
+  typedef bool type;
+};
+
+// A unique type used as the default value for the arguments of class
+// template Types.  This allows us to simulate variadic templates
+// (e.g. Types<int>, Type<int, double>, and etc), which C++ doesn't
+// support directly.
+struct None {};
+
+// The following family of struct and struct templates are used to
+// represent type lists.  In particular, TypesN<T1, T2, ..., TN>
+// represents a type list with N types (T1, T2, ..., and TN) in it.
+// Except for Types0, every struct in the family has two member types:
+// Head for the first type in the list, and Tail for the rest of the
+// list.
+
+// The empty type list.
+struct Types0 {};
+
+// Type lists of length 1, 2, 3, and so on.
+
+template <typename T1>
+struct Types1 {
+  typedef T1 Head;
+  typedef Types0 Tail;
+};
+template <typename T1, typename T2>
+struct Types2 {
+  typedef T1 Head;
+  typedef Types1<T2> Tail;
+};
+
+template <typename T1, typename T2, typename T3>
+struct Types3 {
+  typedef T1 Head;
+  typedef Types2<T2, T3> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4>
+struct Types4 {
+  typedef T1 Head;
+  typedef Types3<T2, T3, T4> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5>
+struct Types5 {
+  typedef T1 Head;
+  typedef Types4<T2, T3, T4, T5> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6>
+struct Types6 {
+  typedef T1 Head;
+  typedef Types5<T2, T3, T4, T5, T6> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7>
+struct Types7 {
+  typedef T1 Head;
+  typedef Types6<T2, T3, T4, T5, T6, T7> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8>
+struct Types8 {
+  typedef T1 Head;
+  typedef Types7<T2, T3, T4, T5, T6, T7, T8> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9>
+struct Types9 {
+  typedef T1 Head;
+  typedef Types8<T2, T3, T4, T5, T6, T7, T8, T9> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10>
+struct Types10 {
+  typedef T1 Head;
+  typedef Types9<T2, T3, T4, T5, T6, T7, T8, T9, T10> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11>
+struct Types11 {
+  typedef T1 Head;
+  typedef Types10<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12>
+struct Types12 {
+  typedef T1 Head;
+  typedef Types11<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13>
+struct Types13 {
+  typedef T1 Head;
+  typedef Types12<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14>
+struct Types14 {
+  typedef T1 Head;
+  typedef Types13<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15>
+struct Types15 {
+  typedef T1 Head;
+  typedef Types14<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16>
+struct Types16 {
+  typedef T1 Head;
+  typedef Types15<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17>
+struct Types17 {
+  typedef T1 Head;
+  typedef Types16<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18>
+struct Types18 {
+  typedef T1 Head;
+  typedef Types17<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19>
+struct Types19 {
+  typedef T1 Head;
+  typedef Types18<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20>
+struct Types20 {
+  typedef T1 Head;
+  typedef Types19<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21>
+struct Types21 {
+  typedef T1 Head;
+  typedef Types20<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22>
+struct Types22 {
+  typedef T1 Head;
+  typedef Types21<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23>
+struct Types23 {
+  typedef T1 Head;
+  typedef Types22<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24>
+struct Types24 {
+  typedef T1 Head;
+  typedef Types23<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25>
+struct Types25 {
+  typedef T1 Head;
+  typedef Types24<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26>
+struct Types26 {
+  typedef T1 Head;
+  typedef Types25<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27>
+struct Types27 {
+  typedef T1 Head;
+  typedef Types26<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28>
+struct Types28 {
+  typedef T1 Head;
+  typedef Types27<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29>
+struct Types29 {
+  typedef T1 Head;
+  typedef Types28<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30>
+struct Types30 {
+  typedef T1 Head;
+  typedef Types29<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31>
+struct Types31 {
+  typedef T1 Head;
+  typedef Types30<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32>
+struct Types32 {
+  typedef T1 Head;
+  typedef Types31<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33>
+struct Types33 {
+  typedef T1 Head;
+  typedef Types32<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34>
+struct Types34 {
+  typedef T1 Head;
+  typedef Types33<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35>
+struct Types35 {
+  typedef T1 Head;
+  typedef Types34<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36>
+struct Types36 {
+  typedef T1 Head;
+  typedef Types35<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37>
+struct Types37 {
+  typedef T1 Head;
+  typedef Types36<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38>
+struct Types38 {
+  typedef T1 Head;
+  typedef Types37<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39>
+struct Types39 {
+  typedef T1 Head;
+  typedef Types38<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38, T39> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40>
+struct Types40 {
+  typedef T1 Head;
+  typedef Types39<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41>
+struct Types41 {
+  typedef T1 Head;
+  typedef Types40<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42>
+struct Types42 {
+  typedef T1 Head;
+  typedef Types41<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43>
+struct Types43 {
+  typedef T1 Head;
+  typedef Types42<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42,
+      T43> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44>
+struct Types44 {
+  typedef T1 Head;
+  typedef Types43<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+      T44> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45>
+struct Types45 {
+  typedef T1 Head;
+  typedef Types44<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+      T44, T45> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46>
+struct Types46 {
+  typedef T1 Head;
+  typedef Types45<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+      T44, T45, T46> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47>
+struct Types47 {
+  typedef T1 Head;
+  typedef Types46<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+      T44, T45, T46, T47> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47, typename T48>
+struct Types48 {
+  typedef T1 Head;
+  typedef Types47<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+      T44, T45, T46, T47, T48> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47, typename T48, typename T49>
+struct Types49 {
+  typedef T1 Head;
+  typedef Types48<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+      T44, T45, T46, T47, T48, T49> Tail;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47, typename T48, typename T49, typename T50>
+struct Types50 {
+  typedef T1 Head;
+  typedef Types49<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+      T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+      T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+      T44, T45, T46, T47, T48, T49, T50> Tail;
+};
+
+
+}  // namespace internal
+
+// We don't want to require the users to write TypesN<...> directly,
+// as that would require them to count the length.  Types<...> is much
+// easier to write, but generates horrible messages when there is a
+// compiler error, as gcc insists on printing out each template
+// argument, even if it has the default value (this means Types<int>
+// will appear as Types<int, None, None, ..., None> in the compiler
+// errors).
+//
+// Our solution is to combine the best part of the two approaches: a
+// user would write Types<T1, ..., TN>, and Google Test will translate
+// that to TypesN<T1, ..., TN> internally to make error messages
+// readable.  The translation is done by the 'type' member of the
+// Types template.
+template <typename T1 = internal::None, typename T2 = internal::None,
+    typename T3 = internal::None, typename T4 = internal::None,
+    typename T5 = internal::None, typename T6 = internal::None,
+    typename T7 = internal::None, typename T8 = internal::None,
+    typename T9 = internal::None, typename T10 = internal::None,
+    typename T11 = internal::None, typename T12 = internal::None,
+    typename T13 = internal::None, typename T14 = internal::None,
+    typename T15 = internal::None, typename T16 = internal::None,
+    typename T17 = internal::None, typename T18 = internal::None,
+    typename T19 = internal::None, typename T20 = internal::None,
+    typename T21 = internal::None, typename T22 = internal::None,
+    typename T23 = internal::None, typename T24 = internal::None,
+    typename T25 = internal::None, typename T26 = internal::None,
+    typename T27 = internal::None, typename T28 = internal::None,
+    typename T29 = internal::None, typename T30 = internal::None,
+    typename T31 = internal::None, typename T32 = internal::None,
+    typename T33 = internal::None, typename T34 = internal::None,
+    typename T35 = internal::None, typename T36 = internal::None,
+    typename T37 = internal::None, typename T38 = internal::None,
+    typename T39 = internal::None, typename T40 = internal::None,
+    typename T41 = internal::None, typename T42 = internal::None,
+    typename T43 = internal::None, typename T44 = internal::None,
+    typename T45 = internal::None, typename T46 = internal::None,
+    typename T47 = internal::None, typename T48 = internal::None,
+    typename T49 = internal::None, typename T50 = internal::None>
+struct Types {
+  typedef internal::Types50<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+      T41, T42, T43, T44, T45, T46, T47, T48, T49, T50> type;
+};
+
+template <>
+struct Types<internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types0 type;
+};
+template <typename T1>
+struct Types<T1, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types1<T1> type;
+};
+template <typename T1, typename T2>
+struct Types<T1, T2, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None> {
+  typedef internal::Types2<T1, T2> type;
+};
+template <typename T1, typename T2, typename T3>
+struct Types<T1, T2, T3, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None> {
+  typedef internal::Types3<T1, T2, T3> type;
+};
+template <typename T1, typename T2, typename T3, typename T4>
+struct Types<T1, T2, T3, T4, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None> {
+  typedef internal::Types4<T1, T2, T3, T4> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5>
+struct Types<T1, T2, T3, T4, T5, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None> {
+  typedef internal::Types5<T1, T2, T3, T4, T5> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6>
+struct Types<T1, T2, T3, T4, T5, T6, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types6<T1, T2, T3, T4, T5, T6> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7>
+struct Types<T1, T2, T3, T4, T5, T6, T7, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None> {
+  typedef internal::Types7<T1, T2, T3, T4, T5, T6, T7> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None> {
+  typedef internal::Types8<T1, T2, T3, T4, T5, T6, T7, T8> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None> {
+  typedef internal::Types9<T1, T2, T3, T4, T5, T6, T7, T8, T9> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None> {
+  typedef internal::Types10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None> {
+  typedef internal::Types12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None> {
+  typedef internal::Types13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None> {
+  typedef internal::Types14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None> {
+  typedef internal::Types15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None> {
+  typedef internal::Types17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None> {
+  typedef internal::Types18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None> {
+  typedef internal::Types19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None> {
+  typedef internal::Types20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None> {
+  typedef internal::Types21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None> {
+  typedef internal::Types23<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None> {
+  typedef internal::Types24<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None> {
+  typedef internal::Types25<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None> {
+  typedef internal::Types26<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types27<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types28<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None> {
+  typedef internal::Types29<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None> {
+  typedef internal::Types30<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None> {
+  typedef internal::Types31<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types32<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types33<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None> {
+  typedef internal::Types34<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None> {
+  typedef internal::Types35<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None> {
+  typedef internal::Types36<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None> {
+  typedef internal::Types37<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, T38, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types38<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, T38, T39, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types39<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None> {
+  typedef internal::Types40<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39,
+      T40> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None, internal::None> {
+  typedef internal::Types41<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+      T41> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, internal::None,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None> {
+  typedef internal::Types42<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+      T41, T42> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None, internal::None> {
+  typedef internal::Types43<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+      T41, T42, T43> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43, T44,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None, internal::None> {
+  typedef internal::Types44<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+      T41, T42, T43, T44> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43, T44, T45,
+    internal::None, internal::None, internal::None, internal::None,
+    internal::None> {
+  typedef internal::Types45<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+      T41, T42, T43, T44, T45> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43, T44, T45,
+    T46, internal::None, internal::None, internal::None, internal::None> {
+  typedef internal::Types46<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+      T41, T42, T43, T44, T45, T46> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43, T44, T45,
+    T46, T47, internal::None, internal::None, internal::None> {
+  typedef internal::Types47<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+      T41, T42, T43, T44, T45, T46, T47> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47, typename T48>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43, T44, T45,
+    T46, T47, T48, internal::None, internal::None> {
+  typedef internal::Types48<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+      T41, T42, T43, T44, T45, T46, T47, T48> type;
+};
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47, typename T48, typename T49>
+struct Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,
+    T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29, T30,
+    T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43, T44, T45,
+    T46, T47, T48, T49, internal::None> {
+  typedef internal::Types49<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+      T41, T42, T43, T44, T45, T46, T47, T48, T49> type;
+};
+
+namespace internal {
+
+# define GTEST_TEMPLATE_ template <typename T> class
+
+// The template "selector" struct TemplateSel<Tmpl> is used to
+// represent Tmpl, which must be a class template with one type
+// parameter, as a type.  TemplateSel<Tmpl>::Bind<T>::type is defined
+// as the type Tmpl<T>.  This allows us to actually instantiate the
+// template "selected" by TemplateSel<Tmpl>.
+//
+// This trick is necessary for simulating typedef for class templates,
+// which C++ doesn't support directly.
+template <GTEST_TEMPLATE_ Tmpl>
+struct TemplateSel {
+  template <typename T>
+  struct Bind {
+    typedef Tmpl<T> type;
+  };
+};
+
+# define GTEST_BIND_(TmplSel, T) \
+  TmplSel::template Bind<T>::type
+
+// A unique struct template used as the default value for the
+// arguments of class template Templates.  This allows us to simulate
+// variadic templates (e.g. Templates<int>, Templates<int, double>,
+// and etc), which C++ doesn't support directly.
+template <typename T>
+struct NoneT {};
+
+// The following family of struct and struct templates are used to
+// represent template lists.  In particular, TemplatesN<T1, T2, ...,
+// TN> represents a list of N templates (T1, T2, ..., and TN).  Except
+// for Templates0, every struct in the family has two member types:
+// Head for the selector of the first template in the list, and Tail
+// for the rest of the list.
+
+// The empty template list.
+struct Templates0 {};
+
+// Template lists of length 1, 2, 3, and so on.
+
+template <GTEST_TEMPLATE_ T1>
+struct Templates1 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates0 Tail;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2>
+struct Templates2 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates1<T2> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3>
+struct Templates3 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates2<T2, T3> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4>
+struct Templates4 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates3<T2, T3, T4> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5>
+struct Templates5 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates4<T2, T3, T4, T5> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6>
+struct Templates6 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates5<T2, T3, T4, T5, T6> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7>
+struct Templates7 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates6<T2, T3, T4, T5, T6, T7> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8>
+struct Templates8 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates7<T2, T3, T4, T5, T6, T7, T8> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9>
+struct Templates9 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates8<T2, T3, T4, T5, T6, T7, T8, T9> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10>
+struct Templates10 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates9<T2, T3, T4, T5, T6, T7, T8, T9, T10> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11>
+struct Templates11 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates10<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12>
+struct Templates12 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates11<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13>
+struct Templates13 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates12<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14>
+struct Templates14 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates13<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15>
+struct Templates15 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates14<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16>
+struct Templates16 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates15<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17>
+struct Templates17 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates16<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18>
+struct Templates18 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates17<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19>
+struct Templates19 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates18<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20>
+struct Templates20 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates19<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21>
+struct Templates21 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates20<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22>
+struct Templates22 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates21<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23>
+struct Templates23 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates22<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24>
+struct Templates24 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates23<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25>
+struct Templates25 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates24<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26>
+struct Templates26 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates25<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27>
+struct Templates27 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates26<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28>
+struct Templates28 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates27<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29>
+struct Templates29 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates28<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30>
+struct Templates30 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates29<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31>
+struct Templates31 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates30<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32>
+struct Templates32 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates31<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33>
+struct Templates33 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates32<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34>
+struct Templates34 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates33<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35>
+struct Templates35 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates34<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36>
+struct Templates36 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates35<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37>
+struct Templates37 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates36<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38>
+struct Templates38 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates37<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39>
+struct Templates39 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates38<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40>
+struct Templates40 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates39<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41>
+struct Templates41 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates40<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42>
+struct Templates42 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates41<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41,
+      T42> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43>
+struct Templates43 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates42<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42,
+      T43> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44>
+struct Templates44 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates43<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42,
+      T43, T44> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44, GTEST_TEMPLATE_ T45>
+struct Templates45 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates44<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42,
+      T43, T44, T45> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44, GTEST_TEMPLATE_ T45,
+    GTEST_TEMPLATE_ T46>
+struct Templates46 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates45<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42,
+      T43, T44, T45, T46> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44, GTEST_TEMPLATE_ T45,
+    GTEST_TEMPLATE_ T46, GTEST_TEMPLATE_ T47>
+struct Templates47 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates46<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42,
+      T43, T44, T45, T46, T47> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44, GTEST_TEMPLATE_ T45,
+    GTEST_TEMPLATE_ T46, GTEST_TEMPLATE_ T47, GTEST_TEMPLATE_ T48>
+struct Templates48 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates47<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42,
+      T43, T44, T45, T46, T47, T48> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44, GTEST_TEMPLATE_ T45,
+    GTEST_TEMPLATE_ T46, GTEST_TEMPLATE_ T47, GTEST_TEMPLATE_ T48,
+    GTEST_TEMPLATE_ T49>
+struct Templates49 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates48<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42,
+      T43, T44, T45, T46, T47, T48, T49> Tail;
+};
+
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44, GTEST_TEMPLATE_ T45,
+    GTEST_TEMPLATE_ T46, GTEST_TEMPLATE_ T47, GTEST_TEMPLATE_ T48,
+    GTEST_TEMPLATE_ T49, GTEST_TEMPLATE_ T50>
+struct Templates50 {
+  typedef TemplateSel<T1> Head;
+  typedef Templates49<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+      T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+      T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42,
+      T43, T44, T45, T46, T47, T48, T49, T50> Tail;
+};
+
+
+// We don't want to require the users to write TemplatesN<...> directly,
+// as that would require them to count the length.  Templates<...> is much
+// easier to write, but generates horrible messages when there is a
+// compiler error, as gcc insists on printing out each template
+// argument, even if it has the default value (this means Templates<list>
+// will appear as Templates<list, NoneT, NoneT, ..., NoneT> in the compiler
+// errors).
+//
+// Our solution is to combine the best part of the two approaches: a
+// user would write Templates<T1, ..., TN>, and Google Test will translate
+// that to TemplatesN<T1, ..., TN> internally to make error messages
+// readable.  The translation is done by the 'type' member of the
+// Templates template.
+template <GTEST_TEMPLATE_ T1 = NoneT, GTEST_TEMPLATE_ T2 = NoneT,
+    GTEST_TEMPLATE_ T3 = NoneT, GTEST_TEMPLATE_ T4 = NoneT,
+    GTEST_TEMPLATE_ T5 = NoneT, GTEST_TEMPLATE_ T6 = NoneT,
+    GTEST_TEMPLATE_ T7 = NoneT, GTEST_TEMPLATE_ T8 = NoneT,
+    GTEST_TEMPLATE_ T9 = NoneT, GTEST_TEMPLATE_ T10 = NoneT,
+    GTEST_TEMPLATE_ T11 = NoneT, GTEST_TEMPLATE_ T12 = NoneT,
+    GTEST_TEMPLATE_ T13 = NoneT, GTEST_TEMPLATE_ T14 = NoneT,
+    GTEST_TEMPLATE_ T15 = NoneT, GTEST_TEMPLATE_ T16 = NoneT,
+    GTEST_TEMPLATE_ T17 = NoneT, GTEST_TEMPLATE_ T18 = NoneT,
+    GTEST_TEMPLATE_ T19 = NoneT, GTEST_TEMPLATE_ T20 = NoneT,
+    GTEST_TEMPLATE_ T21 = NoneT, GTEST_TEMPLATE_ T22 = NoneT,
+    GTEST_TEMPLATE_ T23 = NoneT, GTEST_TEMPLATE_ T24 = NoneT,
+    GTEST_TEMPLATE_ T25 = NoneT, GTEST_TEMPLATE_ T26 = NoneT,
+    GTEST_TEMPLATE_ T27 = NoneT, GTEST_TEMPLATE_ T28 = NoneT,
+    GTEST_TEMPLATE_ T29 = NoneT, GTEST_TEMPLATE_ T30 = NoneT,
+    GTEST_TEMPLATE_ T31 = NoneT, GTEST_TEMPLATE_ T32 = NoneT,
+    GTEST_TEMPLATE_ T33 = NoneT, GTEST_TEMPLATE_ T34 = NoneT,
+    GTEST_TEMPLATE_ T35 = NoneT, GTEST_TEMPLATE_ T36 = NoneT,
+    GTEST_TEMPLATE_ T37 = NoneT, GTEST_TEMPLATE_ T38 = NoneT,
+    GTEST_TEMPLATE_ T39 = NoneT, GTEST_TEMPLATE_ T40 = NoneT,
+    GTEST_TEMPLATE_ T41 = NoneT, GTEST_TEMPLATE_ T42 = NoneT,
+    GTEST_TEMPLATE_ T43 = NoneT, GTEST_TEMPLATE_ T44 = NoneT,
+    GTEST_TEMPLATE_ T45 = NoneT, GTEST_TEMPLATE_ T46 = NoneT,
+    GTEST_TEMPLATE_ T47 = NoneT, GTEST_TEMPLATE_ T48 = NoneT,
+    GTEST_TEMPLATE_ T49 = NoneT, GTEST_TEMPLATE_ T50 = NoneT>
+struct Templates {
+  typedef Templates50<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41,
+      T42, T43, T44, T45, T46, T47, T48, T49, T50> type;
+};
+
+template <>
+struct Templates<NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT> {
+  typedef Templates0 type;
+};
+template <GTEST_TEMPLATE_ T1>
+struct Templates<T1, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT> {
+  typedef Templates1<T1> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2>
+struct Templates<T1, T2, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT> {
+  typedef Templates2<T1, T2> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3>
+struct Templates<T1, T2, T3, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates3<T1, T2, T3> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4>
+struct Templates<T1, T2, T3, T4, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates4<T1, T2, T3, T4> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5>
+struct Templates<T1, T2, T3, T4, T5, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates5<T1, T2, T3, T4, T5> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6>
+struct Templates<T1, T2, T3, T4, T5, T6, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates6<T1, T2, T3, T4, T5, T6> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates7<T1, T2, T3, T4, T5, T6, T7> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates8<T1, T2, T3, T4, T5, T6, T7, T8> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates9<T1, T2, T3, T4, T5, T6, T7, T8, T9> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT> {
+  typedef Templates22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT> {
+  typedef Templates23<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT> {
+  typedef Templates24<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT> {
+  typedef Templates25<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT> {
+  typedef Templates26<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT> {
+  typedef Templates27<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT> {
+  typedef Templates28<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT> {
+  typedef Templates29<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates30<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates31<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates32<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates33<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates34<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates35<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates36<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, NoneT, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates37<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, T38, NoneT, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates38<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates39<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, NoneT, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates40<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, NoneT, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates41<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+      T41> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, NoneT,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates42<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41,
+      T42> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates43<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41,
+      T42, T43> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43, T44,
+    NoneT, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates44<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41,
+      T42, T43, T44> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44, GTEST_TEMPLATE_ T45>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43, T44,
+    T45, NoneT, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates45<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41,
+      T42, T43, T44, T45> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44, GTEST_TEMPLATE_ T45,
+    GTEST_TEMPLATE_ T46>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43, T44,
+    T45, T46, NoneT, NoneT, NoneT, NoneT> {
+  typedef Templates46<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41,
+      T42, T43, T44, T45, T46> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44, GTEST_TEMPLATE_ T45,
+    GTEST_TEMPLATE_ T46, GTEST_TEMPLATE_ T47>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43, T44,
+    T45, T46, T47, NoneT, NoneT, NoneT> {
+  typedef Templates47<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41,
+      T42, T43, T44, T45, T46, T47> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44, GTEST_TEMPLATE_ T45,
+    GTEST_TEMPLATE_ T46, GTEST_TEMPLATE_ T47, GTEST_TEMPLATE_ T48>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43, T44,
+    T45, T46, T47, T48, NoneT, NoneT> {
+  typedef Templates48<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41,
+      T42, T43, T44, T45, T46, T47, T48> type;
+};
+template <GTEST_TEMPLATE_ T1, GTEST_TEMPLATE_ T2, GTEST_TEMPLATE_ T3,
+    GTEST_TEMPLATE_ T4, GTEST_TEMPLATE_ T5, GTEST_TEMPLATE_ T6,
+    GTEST_TEMPLATE_ T7, GTEST_TEMPLATE_ T8, GTEST_TEMPLATE_ T9,
+    GTEST_TEMPLATE_ T10, GTEST_TEMPLATE_ T11, GTEST_TEMPLATE_ T12,
+    GTEST_TEMPLATE_ T13, GTEST_TEMPLATE_ T14, GTEST_TEMPLATE_ T15,
+    GTEST_TEMPLATE_ T16, GTEST_TEMPLATE_ T17, GTEST_TEMPLATE_ T18,
+    GTEST_TEMPLATE_ T19, GTEST_TEMPLATE_ T20, GTEST_TEMPLATE_ T21,
+    GTEST_TEMPLATE_ T22, GTEST_TEMPLATE_ T23, GTEST_TEMPLATE_ T24,
+    GTEST_TEMPLATE_ T25, GTEST_TEMPLATE_ T26, GTEST_TEMPLATE_ T27,
+    GTEST_TEMPLATE_ T28, GTEST_TEMPLATE_ T29, GTEST_TEMPLATE_ T30,
+    GTEST_TEMPLATE_ T31, GTEST_TEMPLATE_ T32, GTEST_TEMPLATE_ T33,
+    GTEST_TEMPLATE_ T34, GTEST_TEMPLATE_ T35, GTEST_TEMPLATE_ T36,
+    GTEST_TEMPLATE_ T37, GTEST_TEMPLATE_ T38, GTEST_TEMPLATE_ T39,
+    GTEST_TEMPLATE_ T40, GTEST_TEMPLATE_ T41, GTEST_TEMPLATE_ T42,
+    GTEST_TEMPLATE_ T43, GTEST_TEMPLATE_ T44, GTEST_TEMPLATE_ T45,
+    GTEST_TEMPLATE_ T46, GTEST_TEMPLATE_ T47, GTEST_TEMPLATE_ T48,
+    GTEST_TEMPLATE_ T49>
+struct Templates<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14,
+    T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28, T29,
+    T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43, T44,
+    T45, T46, T47, T48, T49, NoneT> {
+  typedef Templates49<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+      T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+      T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41,
+      T42, T43, T44, T45, T46, T47, T48, T49> type;
+};
+
+// The TypeList template makes it possible to use either a single type
+// or a Types<...> list in TYPED_TEST_CASE() and
+// INSTANTIATE_TYPED_TEST_CASE_P().
+
+template <typename T>
+struct TypeList { typedef Types1<T> type; };
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47, typename T48, typename T49, typename T50>
+struct TypeList<Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+    T44, T45, T46, T47, T48, T49, T50> > {
+  typedef typename Types<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+      T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+      T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+      T41, T42, T43, T44, T45, T46, T47, T48, T49, T50>::type type;
+};
+
+#endif  // GTEST_HAS_TYPED_TEST || GTEST_HAS_TYPED_TEST_P
+
+}  // namespace internal
+}  // namespace testing
+
+#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TYPE_UTIL_H_
+
+// Due to C++ preprocessor weirdness, we need double indirection to
+// concatenate two tokens when one of them is __LINE__.  Writing
+//
+//   foo ## __LINE__
+//
+// will result in the token foo__LINE__, instead of foo followed by
+// the current line number.  For more details, see
+// http://www.parashift.com/c++-faq-lite/misc-technical-issues.html#faq-39.6
+#define GTEST_CONCAT_TOKEN_(foo, bar) GTEST_CONCAT_TOKEN_IMPL_(foo, bar)
+#define GTEST_CONCAT_TOKEN_IMPL_(foo, bar) foo ## bar
+
+// Google Test defines the testing::Message class to allow construction of
+// test messages via the << operator.  The idea is that anything
+// streamable to std::ostream can be streamed to a testing::Message.
+// This allows a user to use his own types in Google Test assertions by
+// overloading the << operator.
+//
+// util/gtl/stl_logging-inl.h overloads << for STL containers.  These
+// overloads cannot be defined in the std namespace, as that will be
+// undefined behavior.  Therefore, they are defined in the global
+// namespace instead.
+//
+// C++'s symbol lookup rule (i.e. Koenig lookup) says that these
+// overloads are visible in either the std namespace or the global
+// namespace, but not other namespaces, including the testing
+// namespace which Google Test's Message class is in.
+//
+// To allow STL containers (and other types that has a << operator
+// defined in the global namespace) to be used in Google Test assertions,
+// testing::Message must access the custom << operator from the global
+// namespace.  Hence this helper function.
+//
+// Note: Jeffrey Yasskin suggested an alternative fix by "using
+// ::operator<<;" in the definition of Message's operator<<.  That fix
+// doesn't require a helper function, but unfortunately doesn't
+// compile with MSVC.
+template <typename T>
+inline void GTestStreamToHelper(std::ostream* os, const T& val) {
+  *os << val;
+}
+
+class ProtocolMessage;
+namespace proto2 { class Message; }
+
+namespace testing {
+
+// Forward declarations.
+
+class AssertionResult;                 // Result of an assertion.
+class Message;                         // Represents a failure message.
+class Test;                            // Represents a test.
+class TestInfo;                        // Information about a test.
+class TestPartResult;                  // Result of a test part.
+class UnitTest;                        // A collection of test cases.
+
+template <typename T>
+::std::string PrintToString(const T& value);
+
+namespace internal {
+
+struct TraceInfo;                      // Information about a trace point.
+class ScopedTrace;                     // Implements scoped trace.
+class TestInfoImpl;                    // Opaque implementation of TestInfo
+class UnitTestImpl;                    // Opaque implementation of UnitTest
+
+// How many times InitGoogleTest() has been called.
+extern int g_init_gtest_count;
+
+// The text used in failure messages to indicate the start of the
+// stack trace.
+GTEST_API_ extern const char kStackTraceMarker[];
+
+// A secret type that Google Test users don't know about.  It has no
+// definition on purpose.  Therefore it's impossible to create a
+// Secret object, which is what we want.
+class Secret;
+
+// Two overloaded helpers for checking at compile time whether an
+// expression is a null pointer literal (i.e. NULL or any 0-valued
+// compile-time integral constant).  Their return values have
+// different sizes, so we can use sizeof() to test which version is
+// picked by the compiler.  These helpers have no implementations, as
+// we only need their signatures.
+//
+// Given IsNullLiteralHelper(x), the compiler will pick the first
+// version if x can be implicitly converted to Secret*, and pick the
+// second version otherwise.  Since Secret is a secret and incomplete
+// type, the only expression a user can write that has type Secret* is
+// a null pointer literal.  Therefore, we know that x is a null
+// pointer literal if and only if the first version is picked by the
+// compiler.
+char IsNullLiteralHelper(Secret* p);
+char (&IsNullLiteralHelper(...))[2];  // NOLINT
+
+// A compile-time bool constant that is true if and only if x is a
+// null pointer literal (i.e. NULL or any 0-valued compile-time
+// integral constant).
+#ifdef GTEST_ELLIPSIS_NEEDS_POD_
+// We lose support for NULL detection where the compiler doesn't like
+// passing non-POD classes through ellipsis (...).
+# define GTEST_IS_NULL_LITERAL_(x) false
+#else
+# define GTEST_IS_NULL_LITERAL_(x) \
+    (sizeof(::testing::internal::IsNullLiteralHelper(x)) == 1)
+#endif  // GTEST_ELLIPSIS_NEEDS_POD_
+
+// Appends the user-supplied message to the Google-Test-generated message.
+GTEST_API_ String AppendUserMessage(const String& gtest_msg,
+                                    const Message& user_msg);
+
+// A helper class for creating scoped traces in user programs.
+class GTEST_API_ ScopedTrace {
+ public:
+  // The c'tor pushes the given source file location and message onto
+  // a trace stack maintained by Google Test.
+  ScopedTrace(const char* file, int line, const Message& message);
+
+  // The d'tor pops the info pushed by the c'tor.
+  //
+  // Note that the d'tor is not virtual in order to be efficient.
+  // Don't inherit from ScopedTrace!
+  ~ScopedTrace();
+
+ private:
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(ScopedTrace);
+} GTEST_ATTRIBUTE_UNUSED_;  // A ScopedTrace object does its job in its
+                            // c'tor and d'tor.  Therefore it doesn't
+                            // need to be used otherwise.
+
+// Converts a streamable value to a String.  A NULL pointer is
+// converted to "(null)".  When the input value is a ::string,
+// ::std::string, ::wstring, or ::std::wstring object, each NUL
+// character in it is replaced with "\\0".
+// Declared here but defined in gtest.h, so that it has access
+// to the definition of the Message class, required by the ARM
+// compiler.
+template <typename T>
+String StreamableToString(const T& streamable);
+
+// The Symbian compiler has a bug that prevents it from selecting the
+// correct overload of FormatForComparisonFailureMessage (see below)
+// unless we pass the first argument by reference.  If we do that,
+// however, Visual Age C++ 10.1 generates a compiler error.  Therefore
+// we only apply the work-around for Symbian.
+#if defined(__SYMBIAN32__)
+# define GTEST_CREF_WORKAROUND_ const&
+#else
+# define GTEST_CREF_WORKAROUND_
+#endif
+
+// When this operand is a const char* or char*, if the other operand
+// is a ::std::string or ::string, we print this operand as a C string
+// rather than a pointer (we do the same for wide strings); otherwise
+// we print it as a pointer to be safe.
+
+// This internal macro is used to avoid duplicated code.
+#define GTEST_FORMAT_IMPL_(operand2_type, operand1_printer)\
+inline String FormatForComparisonFailureMessage(\
+    operand2_type::value_type* GTEST_CREF_WORKAROUND_ str, \
+    const operand2_type& /*operand2*/) {\
+  return operand1_printer(str);\
+}\
+inline String FormatForComparisonFailureMessage(\
+    const operand2_type::value_type* GTEST_CREF_WORKAROUND_ str, \
+    const operand2_type& /*operand2*/) {\
+  return operand1_printer(str);\
+}
+
+GTEST_FORMAT_IMPL_(::std::string, String::ShowCStringQuoted)
+#if GTEST_HAS_STD_WSTRING
+GTEST_FORMAT_IMPL_(::std::wstring, String::ShowWideCStringQuoted)
+#endif  // GTEST_HAS_STD_WSTRING
+
+#if GTEST_HAS_GLOBAL_STRING
+GTEST_FORMAT_IMPL_(::string, String::ShowCStringQuoted)
+#endif  // GTEST_HAS_GLOBAL_STRING
+#if GTEST_HAS_GLOBAL_WSTRING
+GTEST_FORMAT_IMPL_(::wstring, String::ShowWideCStringQuoted)
+#endif  // GTEST_HAS_GLOBAL_WSTRING
+
+#undef GTEST_FORMAT_IMPL_
+
+// The next four overloads handle the case where the operand being
+// printed is a char/wchar_t pointer and the other operand is not a
+// string/wstring object.  In such cases, we just print the operand as
+// a pointer to be safe.
+#define GTEST_FORMAT_CHAR_PTR_IMPL_(CharType)                       \
+  template <typename T>                                             \
+  String FormatForComparisonFailureMessage(CharType* GTEST_CREF_WORKAROUND_ p, \
+                                           const T&) { \
+    return PrintToString(static_cast<const void*>(p));              \
+  }
+
+GTEST_FORMAT_CHAR_PTR_IMPL_(char)
+GTEST_FORMAT_CHAR_PTR_IMPL_(const char)
+GTEST_FORMAT_CHAR_PTR_IMPL_(wchar_t)
+GTEST_FORMAT_CHAR_PTR_IMPL_(const wchar_t)
+
+#undef GTEST_FORMAT_CHAR_PTR_IMPL_
+
+// Constructs and returns the message for an equality assertion
+// (e.g. ASSERT_EQ, EXPECT_STREQ, etc) failure.
+//
+// The first four parameters are the expressions used in the assertion
+// and their values, as strings.  For example, for ASSERT_EQ(foo, bar)
+// where foo is 5 and bar is 6, we have:
+//
+//   expected_expression: "foo"
+//   actual_expression:   "bar"
+//   expected_value:      "5"
+//   actual_value:        "6"
+//
+// The ignoring_case parameter is true iff the assertion is a
+// *_STRCASEEQ*.  When it's true, the string " (ignoring case)" will
+// be inserted into the message.
+GTEST_API_ AssertionResult EqFailure(const char* expected_expression,
+                                     const char* actual_expression,
+                                     const String& expected_value,
+                                     const String& actual_value,
+                                     bool ignoring_case);
+
+// Constructs a failure message for Boolean assertions such as EXPECT_TRUE.
+GTEST_API_ String GetBoolAssertionFailureMessage(
+    const AssertionResult& assertion_result,
+    const char* expression_text,
+    const char* actual_predicate_value,
+    const char* expected_predicate_value);
+
+// This template class represents an IEEE floating-point number
+// (either single-precision or double-precision, depending on the
+// template parameters).
+//
+// The purpose of this class is to do more sophisticated number
+// comparison.  (Due to round-off error, etc, it's very unlikely that
+// two floating-points will be equal exactly.  Hence a naive
+// comparison by the == operation often doesn't work.)
+//
+// Format of IEEE floating-point:
+//
+//   The most-significant bit being the leftmost, an IEEE
+//   floating-point looks like
+//
+//     sign_bit exponent_bits fraction_bits
+//
+//   Here, sign_bit is a single bit that designates the sign of the
+//   number.
+//
+//   For float, there are 8 exponent bits and 23 fraction bits.
+//
+//   For double, there are 11 exponent bits and 52 fraction bits.
+//
+//   More details can be found at
+//   http://en.wikipedia.org/wiki/IEEE_floating-point_standard.
+//
+// Template parameter:
+//
+//   RawType: the raw floating-point type (either float or double)
+template <typename RawType>
+class FloatingPoint {
+ public:
+  // Defines the unsigned integer type that has the same size as the
+  // floating point number.
+  typedef typename TypeWithSize<sizeof(RawType)>::UInt Bits;
+
+  // Constants.
+
+  // # of bits in a number.
+  static const size_t kBitCount = 8*sizeof(RawType);
+
+  // # of fraction bits in a number.
+  static const size_t kFractionBitCount =
+    std::numeric_limits<RawType>::digits - 1;
+
+  // # of exponent bits in a number.
+  static const size_t kExponentBitCount = kBitCount - 1 - kFractionBitCount;
+
+  // The mask for the sign bit.
+  static const Bits kSignBitMask = static_cast<Bits>(1) << (kBitCount - 1);
+
+  // The mask for the fraction bits.
+  static const Bits kFractionBitMask =
+    ~static_cast<Bits>(0) >> (kExponentBitCount + 1);
+
+  // The mask for the exponent bits.
+  static const Bits kExponentBitMask = ~(kSignBitMask | kFractionBitMask);
+
+  // How many ULP's (Units in the Last Place) we want to tolerate when
+  // comparing two numbers.  The larger the value, the more error we
+  // allow.  A 0 value means that two numbers must be exactly the same
+  // to be considered equal.
+  //
+  // The maximum error of a single floating-point operation is 0.5
+  // units in the last place.  On Intel CPU's, all floating-point
+  // calculations are done with 80-bit precision, while double has 64
+  // bits.  Therefore, 4 should be enough for ordinary use.
+  //
+  // See the following article for more details on ULP:
+  // http://www.cygnus-software.com/papers/comparingfloats/comparingfloats.htm.
+  static const size_t kMaxUlps = 4;
+
+  // Constructs a FloatingPoint from a raw floating-point number.
+  //
+  // On an Intel CPU, passing a non-normalized NAN (Not a Number)
+  // around may change its bits, although the new value is guaranteed
+  // to be also a NAN.  Therefore, don't expect this constructor to
+  // preserve the bits in x when x is a NAN.
+  explicit FloatingPoint(const RawType& x) { u_.value_ = x; }
+
+  // Static methods
+
+  // Reinterprets a bit pattern as a floating-point number.
+  //
+  // This function is needed to test the AlmostEquals() method.
+  static RawType ReinterpretBits(const Bits bits) {
+    FloatingPoint fp(0);
+    fp.u_.bits_ = bits;
+    return fp.u_.value_;
+  }
+
+  // Returns the floating-point number that represent positive infinity.
+  static RawType Infinity() {
+    return ReinterpretBits(kExponentBitMask);
+  }
+
+  // Non-static methods
+
+  // Returns the bits that represents this number.
+  const Bits &bits() const { return u_.bits_; }
+
+  // Returns the exponent bits of this number.
+  Bits exponent_bits() const { return kExponentBitMask & u_.bits_; }
+
+  // Returns the fraction bits of this number.
+  Bits fraction_bits() const { return kFractionBitMask & u_.bits_; }
+
+  // Returns the sign bit of this number.
+  Bits sign_bit() const { return kSignBitMask & u_.bits_; }
+
+  // Returns true iff this is NAN (not a number).
+  bool is_nan() const {
+    // It's a NAN if the exponent bits are all ones and the fraction
+    // bits are not entirely zeros.
+    return (exponent_bits() == kExponentBitMask) && (fraction_bits() != 0);
+  }
+
+  // Returns true iff this number is at most kMaxUlps ULP's away from
+  // rhs.  In particular, this function:
+  //
+  //   - returns false if either number is (or both are) NAN.
+  //   - treats really large numbers as almost equal to infinity.
+  //   - thinks +0.0 and -0.0 are 0 DLP's apart.
+  bool AlmostEquals(const FloatingPoint& rhs) const {
+    // The IEEE standard says that any comparison operation involving
+    // a NAN must return false.
+    if (is_nan() || rhs.is_nan()) return false;
+
+    return DistanceBetweenSignAndMagnitudeNumbers(u_.bits_, rhs.u_.bits_)
+        <= kMaxUlps;
+  }
+
+ private:
+  // The data type used to store the actual floating-point number.
+  union FloatingPointUnion {
+    RawType value_;  // The raw floating-point number.
+    Bits bits_;      // The bits that represent the number.
+  };
+
+  // Converts an integer from the sign-and-magnitude representation to
+  // the biased representation.  More precisely, let N be 2 to the
+  // power of (kBitCount - 1), an integer x is represented by the
+  // unsigned number x + N.
+  //
+  // For instance,
+  //
+  //   -N + 1 (the most negative number representable using
+  //          sign-and-magnitude) is represented by 1;
+  //   0      is represented by N; and
+  //   N - 1  (the biggest number representable using
+  //          sign-and-magnitude) is represented by 2N - 1.
+  //
+  // Read http://en.wikipedia.org/wiki/Signed_number_representations
+  // for more details on signed number representations.
+  static Bits SignAndMagnitudeToBiased(const Bits &sam) {
+    if (kSignBitMask & sam) {
+      // sam represents a negative number.
+      return ~sam + 1;
+    } else {
+      // sam represents a positive number.
+      return kSignBitMask | sam;
+    }
+  }
+
+  // Given two numbers in the sign-and-magnitude representation,
+  // returns the distance between them as an unsigned number.
+  static Bits DistanceBetweenSignAndMagnitudeNumbers(const Bits &sam1,
+                                                     const Bits &sam2) {
+    const Bits biased1 = SignAndMagnitudeToBiased(sam1);
+    const Bits biased2 = SignAndMagnitudeToBiased(sam2);
+    return (biased1 >= biased2) ? (biased1 - biased2) : (biased2 - biased1);
+  }
+
+  FloatingPointUnion u_;
+};
+
+// Typedefs the instances of the FloatingPoint template class that we
+// care to use.
+typedef FloatingPoint<float> Float;
+typedef FloatingPoint<double> Double;
+
+// In order to catch the mistake of putting tests that use different
+// test fixture classes in the same test case, we need to assign
+// unique IDs to fixture classes and compare them.  The TypeId type is
+// used to hold such IDs.  The user should treat TypeId as an opaque
+// type: the only operation allowed on TypeId values is to compare
+// them for equality using the == operator.
+typedef const void* TypeId;
+
+template <typename T>
+class TypeIdHelper {
+ public:
+  // dummy_ must not have a const type.  Otherwise an overly eager
+  // compiler (e.g. MSVC 7.1 & 8.0) may try to merge
+  // TypeIdHelper<T>::dummy_ for different Ts as an "optimization".
+  static bool dummy_;
+};
+
+template <typename T>
+bool TypeIdHelper<T>::dummy_ = false;
+
+// GetTypeId<T>() returns the ID of type T.  Different values will be
+// returned for different types.  Calling the function twice with the
+// same type argument is guaranteed to return the same ID.
+template <typename T>
+TypeId GetTypeId() {
+  // The compiler is required to allocate a different
+  // TypeIdHelper<T>::dummy_ variable for each T used to instantiate
+  // the template.  Therefore, the address of dummy_ is guaranteed to
+  // be unique.
+  return &(TypeIdHelper<T>::dummy_);
+}
+
+// Returns the type ID of ::testing::Test.  Always call this instead
+// of GetTypeId< ::testing::Test>() to get the type ID of
+// ::testing::Test, as the latter may give the wrong result due to a
+// suspected linker bug when compiling Google Test as a Mac OS X
+// framework.
+GTEST_API_ TypeId GetTestTypeId();
+
+// Defines the abstract factory interface that creates instances
+// of a Test object.
+class TestFactoryBase {
+ public:
+  virtual ~TestFactoryBase() {}
+
+  // Creates a test instance to run. The instance is both created and destroyed
+  // within TestInfoImpl::Run()
+  virtual Test* CreateTest() = 0;
+
+ protected:
+  TestFactoryBase() {}
+
+ private:
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestFactoryBase);
+};
+
+// This class provides implementation of TeastFactoryBase interface.
+// It is used in TEST and TEST_F macros.
+template <class TestClass>
+class TestFactoryImpl : public TestFactoryBase {
+ public:
+  virtual Test* CreateTest() { return new TestClass; }
+};
+
+#if GTEST_OS_WINDOWS
+
+// Predicate-formatters for implementing the HRESULT checking macros
+// {ASSERT|EXPECT}_HRESULT_{SUCCEEDED|FAILED}
+// We pass a long instead of HRESULT to avoid causing an
+// include dependency for the HRESULT type.
+GTEST_API_ AssertionResult IsHRESULTSuccess(const char* expr,
+                                            long hr);  // NOLINT
+GTEST_API_ AssertionResult IsHRESULTFailure(const char* expr,
+                                            long hr);  // NOLINT
+
+#endif  // GTEST_OS_WINDOWS
+
+// Types of SetUpTestCase() and TearDownTestCase() functions.
+typedef void (*SetUpTestCaseFunc)();
+typedef void (*TearDownTestCaseFunc)();
+
+// Creates a new TestInfo object and registers it with Google Test;
+// returns the created object.
+//
+// Arguments:
+//
+//   test_case_name:   name of the test case
+//   name:             name of the test
+//   type_param        the name of the test's type parameter, or NULL if
+//                     this is not  a typed or a type-parameterized test.
+//   value_param       text representation of the test's value parameter,
+//                     or NULL if this is not a type-parameterized test.
+//   fixture_class_id: ID of the test fixture class
+//   set_up_tc:        pointer to the function that sets up the test case
+//   tear_down_tc:     pointer to the function that tears down the test case
+//   factory:          pointer to the factory that creates a test object.
+//                     The newly created TestInfo instance will assume
+//                     ownership of the factory object.
+GTEST_API_ TestInfo* MakeAndRegisterTestInfo(
+    const char* test_case_name, const char* name,
+    const char* type_param,
+    const char* value_param,
+    TypeId fixture_class_id,
+    SetUpTestCaseFunc set_up_tc,
+    TearDownTestCaseFunc tear_down_tc,
+    TestFactoryBase* factory);
+
+// If *pstr starts with the given prefix, modifies *pstr to be right
+// past the prefix and returns true; otherwise leaves *pstr unchanged
+// and returns false.  None of pstr, *pstr, and prefix can be NULL.
+GTEST_API_ bool SkipPrefix(const char* prefix, const char** pstr);
+
+#if GTEST_HAS_TYPED_TEST || GTEST_HAS_TYPED_TEST_P
+
+// State of the definition of a type-parameterized test case.
+class GTEST_API_ TypedTestCasePState {
+ public:
+  TypedTestCasePState() : registered_(false) {}
+
+  // Adds the given test name to defined_test_names_ and return true
+  // if the test case hasn't been registered; otherwise aborts the
+  // program.
+  bool AddTestName(const char* file, int line, const char* case_name,
+                   const char* test_name) {
+    if (registered_) {
+      fprintf(stderr, "%s Test %s must be defined before "
+              "REGISTER_TYPED_TEST_CASE_P(%s, ...).\n",
+              FormatFileLocation(file, line).c_str(), test_name, case_name);
+      fflush(stderr);
+      posix::Abort();
+    }
+    defined_test_names_.insert(test_name);
+    return true;
+  }
+
+  // Verifies that registered_tests match the test names in
+  // defined_test_names_; returns registered_tests if successful, or
+  // aborts the program otherwise.
+  const char* VerifyRegisteredTestNames(
+      const char* file, int line, const char* registered_tests);
+
+ private:
+  bool registered_;
+  ::std::set<const char*> defined_test_names_;
+};
+
+// Skips to the first non-space char after the first comma in 'str';
+// returns NULL if no comma is found in 'str'.
+inline const char* SkipComma(const char* str) {
+  const char* comma = strchr(str, ',');
+  if (comma == NULL) {
+    return NULL;
+  }
+  while (IsSpace(*(++comma))) {}
+  return comma;
+}
+
+// Returns the prefix of 'str' before the first comma in it; returns
+// the entire string if it contains no comma.
+inline String GetPrefixUntilComma(const char* str) {
+  const char* comma = strchr(str, ',');
+  return comma == NULL ? String(str) : String(str, comma - str);
+}
+
+// TypeParameterizedTest<Fixture, TestSel, Types>::Register()
+// registers a list of type-parameterized tests with Google Test.  The
+// return value is insignificant - we just need to return something
+// such that we can call this function in a namespace scope.
+//
+// Implementation note: The GTEST_TEMPLATE_ macro declares a template
+// template parameter.  It's defined in gtest-type-util.h.
+template <GTEST_TEMPLATE_ Fixture, class TestSel, typename Types>
+class TypeParameterizedTest {
+ public:
+  // 'index' is the index of the test in the type list 'Types'
+  // specified in INSTANTIATE_TYPED_TEST_CASE_P(Prefix, TestCase,
+  // Types).  Valid values for 'index' are [0, N - 1] where N is the
+  // length of Types.
+  static bool Register(const char* prefix, const char* case_name,
+                       const char* test_names, int index) {
+    typedef typename Types::Head Type;
+    typedef Fixture<Type> FixtureClass;
+    typedef typename GTEST_BIND_(TestSel, Type) TestClass;
+
+    // First, registers the first type-parameterized test in the type
+    // list.
+    MakeAndRegisterTestInfo(
+        String::Format("%s%s%s/%d", prefix, prefix[0] == '\0' ? "" : "/",
+                       case_name, index).c_str(),
+        GetPrefixUntilComma(test_names).c_str(),
+        GetTypeName<Type>().c_str(),
+        NULL,  // No value parameter.
+        GetTypeId<FixtureClass>(),
+        TestClass::SetUpTestCase,
+        TestClass::TearDownTestCase,
+        new TestFactoryImpl<TestClass>);
+
+    // Next, recurses (at compile time) with the tail of the type list.
+    return TypeParameterizedTest<Fixture, TestSel, typename Types::Tail>
+        ::Register(prefix, case_name, test_names, index + 1);
+  }
+};
+
+// The base case for the compile time recursion.
+template <GTEST_TEMPLATE_ Fixture, class TestSel>
+class TypeParameterizedTest<Fixture, TestSel, Types0> {
+ public:
+  static bool Register(const char* /*prefix*/, const char* /*case_name*/,
+                       const char* /*test_names*/, int /*index*/) {
+    return true;
+  }
+};
+
+// TypeParameterizedTestCase<Fixture, Tests, Types>::Register()
+// registers *all combinations* of 'Tests' and 'Types' with Google
+// Test.  The return value is insignificant - we just need to return
+// something such that we can call this function in a namespace scope.
+template <GTEST_TEMPLATE_ Fixture, typename Tests, typename Types>
+class TypeParameterizedTestCase {
+ public:
+  static bool Register(const char* prefix, const char* case_name,
+                       const char* test_names) {
+    typedef typename Tests::Head Head;
+
+    // First, register the first test in 'Test' for each type in 'Types'.
+    TypeParameterizedTest<Fixture, Head, Types>::Register(
+        prefix, case_name, test_names, 0);
+
+    // Next, recurses (at compile time) with the tail of the test list.
+    return TypeParameterizedTestCase<Fixture, typename Tests::Tail, Types>
+        ::Register(prefix, case_name, SkipComma(test_names));
+  }
+};
+
+// The base case for the compile time recursion.
+template <GTEST_TEMPLATE_ Fixture, typename Types>
+class TypeParameterizedTestCase<Fixture, Templates0, Types> {
+ public:
+  static bool Register(const char* /*prefix*/, const char* /*case_name*/,
+                       const char* /*test_names*/) {
+    return true;
+  }
+};
+
+#endif  // GTEST_HAS_TYPED_TEST || GTEST_HAS_TYPED_TEST_P
+
+// Returns the current OS stack trace as a String.
+//
+// The maximum number of stack frames to be included is specified by
+// the gtest_stack_trace_depth flag.  The skip_count parameter
+// specifies the number of top frames to be skipped, which doesn't
+// count against the number of frames to be included.
+//
+// For example, if Foo() calls Bar(), which in turn calls
+// GetCurrentOsStackTraceExceptTop(..., 1), Foo() will be included in
+// the trace but Bar() and GetCurrentOsStackTraceExceptTop() won't.
+GTEST_API_ String GetCurrentOsStackTraceExceptTop(UnitTest* unit_test,
+                                                  int skip_count);
+
+// Helpers for suppressing warnings on unreachable code or constant
+// condition.
+
+// Always returns true.
+GTEST_API_ bool AlwaysTrue();
+
+// Always returns false.
+inline bool AlwaysFalse() { return !AlwaysTrue(); }
+
+// Helper for suppressing false warning from Clang on a const char*
+// variable declared in a conditional expression always being NULL in
+// the else branch.
+struct GTEST_API_ ConstCharPtr {
+  ConstCharPtr(const char* str) : value(str) {}
+  operator bool() const { return true; }
+  const char* value;
+};
+
+// A simple Linear Congruential Generator for generating random
+// numbers with a uniform distribution.  Unlike rand() and srand(), it
+// doesn't use global state (and therefore can't interfere with user
+// code).  Unlike rand_r(), it's portable.  An LCG isn't very random,
+// but it's good enough for our purposes.
+class GTEST_API_ Random {
+ public:
+  static const UInt32 kMaxRange = 1u << 31;
+
+  explicit Random(UInt32 seed) : state_(seed) {}
+
+  void Reseed(UInt32 seed) { state_ = seed; }
+
+  // Generates a random number from [0, range).  Crashes if 'range' is
+  // 0 or greater than kMaxRange.
+  UInt32 Generate(UInt32 range);
+
+ private:
+  UInt32 state_;
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(Random);
+};
+
+// Defining a variable of type CompileAssertTypesEqual<T1, T2> will cause a
+// compiler error iff T1 and T2 are different types.
+template <typename T1, typename T2>
+struct CompileAssertTypesEqual;
+
+template <typename T>
+struct CompileAssertTypesEqual<T, T> {
+};
+
+// Removes the reference from a type if it is a reference type,
+// otherwise leaves it unchanged.  This is the same as
+// tr1::remove_reference, which is not widely available yet.
+template <typename T>
+struct RemoveReference { typedef T type; };  // NOLINT
+template <typename T>
+struct RemoveReference<T&> { typedef T type; };  // NOLINT
+
+// A handy wrapper around RemoveReference that works when the argument
+// T depends on template parameters.
+#define GTEST_REMOVE_REFERENCE_(T) \
+    typename ::testing::internal::RemoveReference<T>::type
+
+// Removes const from a type if it is a const type, otherwise leaves
+// it unchanged.  This is the same as tr1::remove_const, which is not
+// widely available yet.
+template <typename T>
+struct RemoveConst { typedef T type; };  // NOLINT
+template <typename T>
+struct RemoveConst<const T> { typedef T type; };  // NOLINT
+
+// MSVC 8.0, Sun C++, and IBM XL C++ have a bug which causes the above
+// definition to fail to remove the const in 'const int[3]' and 'const
+// char[3][4]'.  The following specialization works around the bug.
+// However, it causes trouble with GCC and thus needs to be
+// conditionally compiled.
+#if defined(_MSC_VER) || defined(__SUNPRO_CC) || defined(__IBMCPP__)
+template <typename T, size_t N>
+struct RemoveConst<const T[N]> {
+  typedef typename RemoveConst<T>::type type[N];
+};
+#endif
+
+// A handy wrapper around RemoveConst that works when the argument
+// T depends on template parameters.
+#define GTEST_REMOVE_CONST_(T) \
+    typename ::testing::internal::RemoveConst<T>::type
+
+// Turns const U&, U&, const U, and U all into U.
+#define GTEST_REMOVE_REFERENCE_AND_CONST_(T) \
+    GTEST_REMOVE_CONST_(GTEST_REMOVE_REFERENCE_(T))
+
+// Adds reference to a type if it is not a reference type,
+// otherwise leaves it unchanged.  This is the same as
+// tr1::add_reference, which is not widely available yet.
+template <typename T>
+struct AddReference { typedef T& type; };  // NOLINT
+template <typename T>
+struct AddReference<T&> { typedef T& type; };  // NOLINT
+
+// A handy wrapper around AddReference that works when the argument T
+// depends on template parameters.
+#define GTEST_ADD_REFERENCE_(T) \
+    typename ::testing::internal::AddReference<T>::type
+
+// Adds a reference to const on top of T as necessary.  For example,
+// it transforms
+//
+//   char         ==> const char&
+//   const char   ==> const char&
+//   char&        ==> const char&
+//   const char&  ==> const char&
+//
+// The argument T must depend on some template parameters.
+#define GTEST_REFERENCE_TO_CONST_(T) \
+    GTEST_ADD_REFERENCE_(const GTEST_REMOVE_REFERENCE_(T))
+
+// ImplicitlyConvertible<From, To>::value is a compile-time bool
+// constant that's true iff type From can be implicitly converted to
+// type To.
+template <typename From, typename To>
+class ImplicitlyConvertible {
+ private:
+  // We need the following helper functions only for their types.
+  // They have no implementations.
+
+  // MakeFrom() is an expression whose type is From.  We cannot simply
+  // use From(), as the type From may not have a public default
+  // constructor.
+  static From MakeFrom();
+
+  // These two functions are overloaded.  Given an expression
+  // Helper(x), the compiler will pick the first version if x can be
+  // implicitly converted to type To; otherwise it will pick the
+  // second version.
+  //
+  // The first version returns a value of size 1, and the second
+  // version returns a value of size 2.  Therefore, by checking the
+  // size of Helper(x), which can be done at compile time, we can tell
+  // which version of Helper() is used, and hence whether x can be
+  // implicitly converted to type To.
+  static char Helper(To);
+  static char (&Helper(...))[2];  // NOLINT
+
+  // We have to put the 'public' section after the 'private' section,
+  // or MSVC refuses to compile the code.
+ public:
+  // MSVC warns about implicitly converting from double to int for
+  // possible loss of data, so we need to temporarily disable the
+  // warning.
+#ifdef _MSC_VER
+# pragma warning(push)          // Saves the current warning state.
+# pragma warning(disable:4244)  // Temporarily disables warning 4244.
+
+  static const bool value =
+      sizeof(Helper(ImplicitlyConvertible::MakeFrom())) == 1;
+# pragma warning(pop)           // Restores the warning state.
+#elif defined(__BORLANDC__)
+  // C++Builder cannot use member overload resolution during template
+  // instantiation.  The simplest workaround is to use its C++0x type traits
+  // functions (C++Builder 2009 and above only).
+  static const bool value = __is_convertible(From, To);
+#else
+  static const bool value =
+      sizeof(Helper(ImplicitlyConvertible::MakeFrom())) == 1;
+#endif  // _MSV_VER
+};
+template <typename From, typename To>
+const bool ImplicitlyConvertible<From, To>::value;
+
+// IsAProtocolMessage<T>::value is a compile-time bool constant that's
+// true iff T is type ProtocolMessage, proto2::Message, or a subclass
+// of those.
+template <typename T>
+struct IsAProtocolMessage
+    : public bool_constant<
+  ImplicitlyConvertible<const T*, const ::ProtocolMessage*>::value ||
+  ImplicitlyConvertible<const T*, const ::proto2::Message*>::value> {
+};
+
+// When the compiler sees expression IsContainerTest<C>(0), if C is an
+// STL-style container class, the first overload of IsContainerTest
+// will be viable (since both C::iterator* and C::const_iterator* are
+// valid types and NULL can be implicitly converted to them).  It will
+// be picked over the second overload as 'int' is a perfect match for
+// the type of argument 0.  If C::iterator or C::const_iterator is not
+// a valid type, the first overload is not viable, and the second
+// overload will be picked.  Therefore, we can determine whether C is
+// a container class by checking the type of IsContainerTest<C>(0).
+// The value of the expression is insignificant.
+//
+// Note that we look for both C::iterator and C::const_iterator.  The
+// reason is that C++ injects the name of a class as a member of the
+// class itself (e.g. you can refer to class iterator as either
+// 'iterator' or 'iterator::iterator').  If we look for C::iterator
+// only, for example, we would mistakenly think that a class named
+// iterator is an STL container.
+//
+// Also note that the simpler approach of overloading
+// IsContainerTest(typename C::const_iterator*) and
+// IsContainerTest(...) doesn't work with Visual Age C++ and Sun C++.
+typedef int IsContainer;
+template <class C>
+IsContainer IsContainerTest(int /* dummy */,
+                            typename C::iterator* /* it */ = NULL,
+                            typename C::const_iterator* /* const_it */ = NULL) {
+  return 0;
+}
+
+typedef char IsNotContainer;
+template <class C>
+IsNotContainer IsContainerTest(long /* dummy */) { return '\0'; }
+
+// EnableIf<condition>::type is void when 'Cond' is true, and
+// undefined when 'Cond' is false.  To use SFINAE to make a function
+// overload only apply when a particular expression is true, add
+// "typename EnableIf<expression>::type* = 0" as the last parameter.
+template<bool> struct EnableIf;
+template<> struct EnableIf<true> { typedef void type; };  // NOLINT
+
+// Utilities for native arrays.
+
+// ArrayEq() compares two k-dimensional native arrays using the
+// elements' operator==, where k can be any integer >= 0.  When k is
+// 0, ArrayEq() degenerates into comparing a single pair of values.
+
+template <typename T, typename U>
+bool ArrayEq(const T* lhs, size_t size, const U* rhs);
+
+// This generic version is used when k is 0.
+template <typename T, typename U>
+inline bool ArrayEq(const T& lhs, const U& rhs) { return lhs == rhs; }
+
+// This overload is used when k >= 1.
+template <typename T, typename U, size_t N>
+inline bool ArrayEq(const T(&lhs)[N], const U(&rhs)[N]) {
+  return internal::ArrayEq(lhs, N, rhs);
+}
+
+// This helper reduces code bloat.  If we instead put its logic inside
+// the previous ArrayEq() function, arrays with different sizes would
+// lead to different copies of the template code.
+template <typename T, typename U>
+bool ArrayEq(const T* lhs, size_t size, const U* rhs) {
+  for (size_t i = 0; i != size; i++) {
+    if (!internal::ArrayEq(lhs[i], rhs[i]))
+      return false;
+  }
+  return true;
+}
+
+// Finds the first element in the iterator range [begin, end) that
+// equals elem.  Element may be a native array type itself.
+template <typename Iter, typename Element>
+Iter ArrayAwareFind(Iter begin, Iter end, const Element& elem) {
+  for (Iter it = begin; it != end; ++it) {
+    if (internal::ArrayEq(*it, elem))
+      return it;
+  }
+  return end;
+}
+
+// CopyArray() copies a k-dimensional native array using the elements'
+// operator=, where k can be any integer >= 0.  When k is 0,
+// CopyArray() degenerates into copying a single value.
+
+template <typename T, typename U>
+void CopyArray(const T* from, size_t size, U* to);
+
+// This generic version is used when k is 0.
+template <typename T, typename U>
+inline void CopyArray(const T& from, U* to) { *to = from; }
+
+// This overload is used when k >= 1.
+template <typename T, typename U, size_t N>
+inline void CopyArray(const T(&from)[N], U(*to)[N]) {
+  internal::CopyArray(from, N, *to);
+}
+
+// This helper reduces code bloat.  If we instead put its logic inside
+// the previous CopyArray() function, arrays with different sizes
+// would lead to different copies of the template code.
+template <typename T, typename U>
+void CopyArray(const T* from, size_t size, U* to) {
+  for (size_t i = 0; i != size; i++) {
+    internal::CopyArray(from[i], to + i);
+  }
+}
+
+// The relation between an NativeArray object (see below) and the
+// native array it represents.
+enum RelationToSource {
+  kReference,  // The NativeArray references the native array.
+  kCopy        // The NativeArray makes a copy of the native array and
+               // owns the copy.
+};
+
+// Adapts a native array to a read-only STL-style container.  Instead
+// of the complete STL container concept, this adaptor only implements
+// members useful for Google Mock's container matchers.  New members
+// should be added as needed.  To simplify the implementation, we only
+// support Element being a raw type (i.e. having no top-level const or
+// reference modifier).  It's the client's responsibility to satisfy
+// this requirement.  Element can be an array type itself (hence
+// multi-dimensional arrays are supported).
+template <typename Element>
+class NativeArray {
+ public:
+  // STL-style container typedefs.
+  typedef Element value_type;
+  typedef Element* iterator;
+  typedef const Element* const_iterator;
+
+  // Constructs from a native array.
+  NativeArray(const Element* array, size_t count, RelationToSource relation) {
+    Init(array, count, relation);
+  }
+
+  // Copy constructor.
+  NativeArray(const NativeArray& rhs) {
+    Init(rhs.array_, rhs.size_, rhs.relation_to_source_);
+  }
+
+  ~NativeArray() {
+    // Ensures that the user doesn't instantiate NativeArray with a
+    // const or reference type.
+    static_cast<void>(StaticAssertTypeEqHelper<Element,
+        GTEST_REMOVE_REFERENCE_AND_CONST_(Element)>());
+    if (relation_to_source_ == kCopy)
+      delete[] array_;
+  }
+
+  // STL-style container methods.
+  size_t size() const { return size_; }
+  const_iterator begin() const { return array_; }
+  const_iterator end() const { return array_ + size_; }
+  bool operator==(const NativeArray& rhs) const {
+    return size() == rhs.size() &&
+        ArrayEq(begin(), size(), rhs.begin());
+  }
+
+ private:
+  // Initializes this object; makes a copy of the input array if
+  // 'relation' is kCopy.
+  void Init(const Element* array, size_t a_size, RelationToSource relation) {
+    if (relation == kReference) {
+      array_ = array;
+    } else {
+      Element* const copy = new Element[a_size];
+      CopyArray(array, a_size, copy);
+      array_ = copy;
+    }
+    size_ = a_size;
+    relation_to_source_ = relation;
+  }
+
+  const Element* array_;
+  size_t size_;
+  RelationToSource relation_to_source_;
+
+  GTEST_DISALLOW_ASSIGN_(NativeArray);
+};
+
+}  // namespace internal
+}  // namespace testing
+
+#define GTEST_MESSAGE_AT_(file, line, message, result_type) \
+  ::testing::internal::AssertHelper(result_type, file, line, message) \
+    = ::testing::Message()
+
+#define GTEST_MESSAGE_(message, result_type) \
+  GTEST_MESSAGE_AT_(__FILE__, __LINE__, message, result_type)
+
+#define GTEST_FATAL_FAILURE_(message) \
+  return GTEST_MESSAGE_(message, ::testing::TestPartResult::kFatalFailure)
+
+#define GTEST_NONFATAL_FAILURE_(message) \
+  GTEST_MESSAGE_(message, ::testing::TestPartResult::kNonFatalFailure)
+
+#define GTEST_SUCCESS_(message) \
+  GTEST_MESSAGE_(message, ::testing::TestPartResult::kSuccess)
+
+// Suppresses MSVC warnings 4072 (unreachable code) for the code following
+// statement if it returns or throws (or doesn't return or throw in some
+// situations).
+#define GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement) \
+  if (::testing::internal::AlwaysTrue()) { statement; }
+
+#define GTEST_TEST_THROW_(statement, expected_exception, fail) \
+  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \
+  if (::testing::internal::ConstCharPtr gtest_msg = "") { \
+    bool gtest_caught_expected = false; \
+    try { \
+      GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \
+    } \
+    catch (expected_exception const&) { \
+      gtest_caught_expected = true; \
+    } \
+    catch (...) { \
+      gtest_msg.value = \
+          "Expected: " #statement " throws an exception of type " \
+          #expected_exception ".\n  Actual: it throws a different type."; \
+      goto GTEST_CONCAT_TOKEN_(gtest_label_testthrow_, __LINE__); \
+    } \
+    if (!gtest_caught_expected) { \
+      gtest_msg.value = \
+          "Expected: " #statement " throws an exception of type " \
+          #expected_exception ".\n  Actual: it throws nothing."; \
+      goto GTEST_CONCAT_TOKEN_(gtest_label_testthrow_, __LINE__); \
+    } \
+  } else \
+    GTEST_CONCAT_TOKEN_(gtest_label_testthrow_, __LINE__): \
+      fail(gtest_msg.value)
+
+#define GTEST_TEST_NO_THROW_(statement, fail) \
+  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \
+  if (::testing::internal::AlwaysTrue()) { \
+    try { \
+      GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \
+    } \
+    catch (...) { \
+      goto GTEST_CONCAT_TOKEN_(gtest_label_testnothrow_, __LINE__); \
+    } \
+  } else \
+    GTEST_CONCAT_TOKEN_(gtest_label_testnothrow_, __LINE__): \
+      fail("Expected: " #statement " doesn't throw an exception.\n" \
+           "  Actual: it throws.")
+
+#define GTEST_TEST_ANY_THROW_(statement, fail) \
+  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \
+  if (::testing::internal::AlwaysTrue()) { \
+    bool gtest_caught_any = false; \
+    try { \
+      GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \
+    } \
+    catch (...) { \
+      gtest_caught_any = true; \
+    } \
+    if (!gtest_caught_any) { \
+      goto GTEST_CONCAT_TOKEN_(gtest_label_testanythrow_, __LINE__); \
+    } \
+  } else \
+    GTEST_CONCAT_TOKEN_(gtest_label_testanythrow_, __LINE__): \
+      fail("Expected: " #statement " throws an exception.\n" \
+           "  Actual: it doesn't.")
+
+
+// Implements Boolean test assertions such as EXPECT_TRUE. expression can be
+// either a boolean expression or an AssertionResult. text is a textual
+// represenation of expression as it was passed into the EXPECT_TRUE.
+#define GTEST_TEST_BOOLEAN_(expression, text, actual, expected, fail) \
+  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \
+  if (const ::testing::AssertionResult gtest_ar_ = \
+      ::testing::AssertionResult(expression)) \
+    ; \
+  else \
+    fail(::testing::internal::GetBoolAssertionFailureMessage(\
+        gtest_ar_, text, #actual, #expected).c_str())
+
+#define GTEST_TEST_NO_FATAL_FAILURE_(statement, fail) \
+  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \
+  if (::testing::internal::AlwaysTrue()) { \
+    ::testing::internal::HasNewFatalFailureHelper gtest_fatal_failure_checker; \
+    GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \
+    if (gtest_fatal_failure_checker.has_new_fatal_failure()) { \
+      goto GTEST_CONCAT_TOKEN_(gtest_label_testnofatal_, __LINE__); \
+    } \
+  } else \
+    GTEST_CONCAT_TOKEN_(gtest_label_testnofatal_, __LINE__): \
+      fail("Expected: " #statement " doesn't generate new fatal " \
+           "failures in the current thread.\n" \
+           "  Actual: it does.")
+
+// Expands to the name of the class that implements the given test.
+#define GTEST_TEST_CLASS_NAME_(test_case_name, test_name) \
+  test_case_name##_##test_name##_Test
+
+// Helper macro for defining tests.
+#define GTEST_TEST_(test_case_name, test_name, parent_class, parent_id)\
+class GTEST_TEST_CLASS_NAME_(test_case_name, test_name) : public parent_class {\
+ public:\
+  GTEST_TEST_CLASS_NAME_(test_case_name, test_name)() {}\
+ private:\
+  virtual void TestBody();\
+  static ::testing::TestInfo* const test_info_ GTEST_ATTRIBUTE_UNUSED_;\
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(\
+      GTEST_TEST_CLASS_NAME_(test_case_name, test_name));\
+};\
+\
+::testing::TestInfo* const GTEST_TEST_CLASS_NAME_(test_case_name, test_name)\
+  ::test_info_ =\
+    ::testing::internal::MakeAndRegisterTestInfo(\
+        #test_case_name, #test_name, NULL, NULL, \
+        (parent_id), \
+        parent_class::SetUpTestCase, \
+        parent_class::TearDownTestCase, \
+        new ::testing::internal::TestFactoryImpl<\
+            GTEST_TEST_CLASS_NAME_(test_case_name, test_name)>);\
+void GTEST_TEST_CLASS_NAME_(test_case_name, test_name)::TestBody()
+
+#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_INTERNAL_H_
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This header file defines the public API for death tests.  It is
+// #included by gtest.h so a user doesn't need to include this
+// directly.
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_
+#define GTEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_
+
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: wan at google.com (Zhanyong Wan), eefacm at gmail.com (Sean Mcafee)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This header file defines internal utilities needed for implementing
+// death tests.  They are subject to change without notice.
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_DEATH_TEST_INTERNAL_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_DEATH_TEST_INTERNAL_H_
+
+
+#include <stdio.h>
+
+namespace testing {
+namespace internal {
+
+GTEST_DECLARE_string_(internal_run_death_test);
+
+// Names of the flags (needed for parsing Google Test flags).
+const char kDeathTestStyleFlag[] = "death_test_style";
+const char kDeathTestUseFork[] = "death_test_use_fork";
+const char kInternalRunDeathTestFlag[] = "internal_run_death_test";
+
+#if GTEST_HAS_DEATH_TEST
+
+// DeathTest is a class that hides much of the complexity of the
+// GTEST_DEATH_TEST_ macro.  It is abstract; its static Create method
+// returns a concrete class that depends on the prevailing death test
+// style, as defined by the --gtest_death_test_style and/or
+// --gtest_internal_run_death_test flags.
+
+// In describing the results of death tests, these terms are used with
+// the corresponding definitions:
+//
+// exit status:  The integer exit information in the format specified
+//               by wait(2)
+// exit code:    The integer code passed to exit(3), _exit(2), or
+//               returned from main()
+class GTEST_API_ DeathTest {
+ public:
+  // Create returns false if there was an error determining the
+  // appropriate action to take for the current death test; for example,
+  // if the gtest_death_test_style flag is set to an invalid value.
+  // The LastMessage method will return a more detailed message in that
+  // case.  Otherwise, the DeathTest pointer pointed to by the "test"
+  // argument is set.  If the death test should be skipped, the pointer
+  // is set to NULL; otherwise, it is set to the address of a new concrete
+  // DeathTest object that controls the execution of the current test.
+  static bool Create(const char* statement, const RE* regex,
+                     const char* file, int line, DeathTest** test);
+  DeathTest();
+  virtual ~DeathTest() { }
+
+  // A helper class that aborts a death test when it's deleted.
+  class ReturnSentinel {
+   public:
+    explicit ReturnSentinel(DeathTest* test) : test_(test) { }
+    ~ReturnSentinel() { test_->Abort(TEST_ENCOUNTERED_RETURN_STATEMENT); }
+   private:
+    DeathTest* const test_;
+    GTEST_DISALLOW_COPY_AND_ASSIGN_(ReturnSentinel);
+  } GTEST_ATTRIBUTE_UNUSED_;
+
+  // An enumeration of possible roles that may be taken when a death
+  // test is encountered.  EXECUTE means that the death test logic should
+  // be executed immediately.  OVERSEE means that the program should prepare
+  // the appropriate environment for a child process to execute the death
+  // test, then wait for it to complete.
+  enum TestRole { OVERSEE_TEST, EXECUTE_TEST };
+
+  // An enumeration of the three reasons that a test might be aborted.
+  enum AbortReason {
+    TEST_ENCOUNTERED_RETURN_STATEMENT,
+    TEST_THREW_EXCEPTION,
+    TEST_DID_NOT_DIE
+  };
+
+  // Assumes one of the above roles.
+  virtual TestRole AssumeRole() = 0;
+
+  // Waits for the death test to finish and returns its status.
+  virtual int Wait() = 0;
+
+  // Returns true if the death test passed; that is, the test process
+  // exited during the test, its exit status matches a user-supplied
+  // predicate, and its stderr output matches a user-supplied regular
+  // expression.
+  // The user-supplied predicate may be a macro expression rather
+  // than a function pointer or functor, or else Wait and Passed could
+  // be combined.
+  virtual bool Passed(bool exit_status_ok) = 0;
+
+  // Signals that the death test did not die as expected.
+  virtual void Abort(AbortReason reason) = 0;
+
+  // Returns a human-readable outcome message regarding the outcome of
+  // the last death test.
+  static const char* LastMessage();
+
+  static void set_last_death_test_message(const String& message);
+
+ private:
+  // A string containing a description of the outcome of the last death test.
+  static String last_death_test_message_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(DeathTest);
+};
+
+// Factory interface for death tests.  May be mocked out for testing.
+class DeathTestFactory {
+ public:
+  virtual ~DeathTestFactory() { }
+  virtual bool Create(const char* statement, const RE* regex,
+                      const char* file, int line, DeathTest** test) = 0;
+};
+
+// A concrete DeathTestFactory implementation for normal use.
+class DefaultDeathTestFactory : public DeathTestFactory {
+ public:
+  virtual bool Create(const char* statement, const RE* regex,
+                      const char* file, int line, DeathTest** test);
+};
+
+// Returns true if exit_status describes a process that was terminated
+// by a signal, or exited normally with a nonzero exit code.
+GTEST_API_ bool ExitedUnsuccessfully(int exit_status);
+
+// Traps C++ exceptions escaping statement and reports them as test
+// failures. Note that trapping SEH exceptions is not implemented here.
+# if GTEST_HAS_EXCEPTIONS
+#  define GTEST_EXECUTE_DEATH_TEST_STATEMENT_(statement, death_test) \
+  try { \
+    GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \
+  } catch (const ::std::exception& gtest_exception) { \
+    fprintf(\
+        stderr, \
+        "\n%s: Caught std::exception-derived exception escaping the " \
+        "death test statement. Exception message: %s\n", \
+        ::testing::internal::FormatFileLocation(__FILE__, __LINE__).c_str(), \
+        gtest_exception.what()); \
+    fflush(stderr); \
+    death_test->Abort(::testing::internal::DeathTest::TEST_THREW_EXCEPTION); \
+  } catch (...) { \
+    death_test->Abort(::testing::internal::DeathTest::TEST_THREW_EXCEPTION); \
+  }
+
+# else
+#  define GTEST_EXECUTE_DEATH_TEST_STATEMENT_(statement, death_test) \
+  GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement)
+
+# endif
+
+// This macro is for implementing ASSERT_DEATH*, EXPECT_DEATH*,
+// ASSERT_EXIT*, and EXPECT_EXIT*.
+# define GTEST_DEATH_TEST_(statement, predicate, regex, fail) \
+  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \
+  if (::testing::internal::AlwaysTrue()) { \
+    const ::testing::internal::RE& gtest_regex = (regex); \
+    ::testing::internal::DeathTest* gtest_dt; \
+    if (!::testing::internal::DeathTest::Create(#statement, &gtest_regex, \
+        __FILE__, __LINE__, &gtest_dt)) { \
+      goto GTEST_CONCAT_TOKEN_(gtest_label_, __LINE__); \
+    } \
+    if (gtest_dt != NULL) { \
+      ::testing::internal::scoped_ptr< ::testing::internal::DeathTest> \
+          gtest_dt_ptr(gtest_dt); \
+      switch (gtest_dt->AssumeRole()) { \
+        case ::testing::internal::DeathTest::OVERSEE_TEST: \
+          if (!gtest_dt->Passed(predicate(gtest_dt->Wait()))) { \
+            goto GTEST_CONCAT_TOKEN_(gtest_label_, __LINE__); \
+          } \
+          break; \
+        case ::testing::internal::DeathTest::EXECUTE_TEST: { \
+          ::testing::internal::DeathTest::ReturnSentinel \
+              gtest_sentinel(gtest_dt); \
+          GTEST_EXECUTE_DEATH_TEST_STATEMENT_(statement, gtest_dt); \
+          gtest_dt->Abort(::testing::internal::DeathTest::TEST_DID_NOT_DIE); \
+          break; \
+        } \
+        default: \
+          break; \
+      } \
+    } \
+  } else \
+    GTEST_CONCAT_TOKEN_(gtest_label_, __LINE__): \
+      fail(::testing::internal::DeathTest::LastMessage())
+// The symbol "fail" here expands to something into which a message
+// can be streamed.
+
+// A class representing the parsed contents of the
+// --gtest_internal_run_death_test flag, as it existed when
+// RUN_ALL_TESTS was called.
+class InternalRunDeathTestFlag {
+ public:
+  InternalRunDeathTestFlag(const String& a_file,
+                           int a_line,
+                           int an_index,
+                           int a_write_fd)
+      : file_(a_file), line_(a_line), index_(an_index),
+        write_fd_(a_write_fd) {}
+
+  ~InternalRunDeathTestFlag() {
+    if (write_fd_ >= 0)
+      posix::Close(write_fd_);
+  }
+
+  String file() const { return file_; }
+  int line() const { return line_; }
+  int index() const { return index_; }
+  int write_fd() const { return write_fd_; }
+
+ private:
+  String file_;
+  int line_;
+  int index_;
+  int write_fd_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(InternalRunDeathTestFlag);
+};
+
+// Returns a newly created InternalRunDeathTestFlag object with fields
+// initialized from the GTEST_FLAG(internal_run_death_test) flag if
+// the flag is specified; otherwise returns NULL.
+InternalRunDeathTestFlag* ParseInternalRunDeathTestFlag();
+
+#else  // GTEST_HAS_DEATH_TEST
+
+// This macro is used for implementing macros such as
+// EXPECT_DEATH_IF_SUPPORTED and ASSERT_DEATH_IF_SUPPORTED on systems where
+// death tests are not supported. Those macros must compile on such systems
+// iff EXPECT_DEATH and ASSERT_DEATH compile with the same parameters on
+// systems that support death tests. This allows one to write such a macro
+// on a system that does not support death tests and be sure that it will
+// compile on a death-test supporting system.
+//
+// Parameters:
+//   statement -  A statement that a macro such as EXPECT_DEATH would test
+//                for program termination. This macro has to make sure this
+//                statement is compiled but not executed, to ensure that
+//                EXPECT_DEATH_IF_SUPPORTED compiles with a certain
+//                parameter iff EXPECT_DEATH compiles with it.
+//   regex     -  A regex that a macro such as EXPECT_DEATH would use to test
+//                the output of statement.  This parameter has to be
+//                compiled but not evaluated by this macro, to ensure that
+//                this macro only accepts expressions that a macro such as
+//                EXPECT_DEATH would accept.
+//   terminator - Must be an empty statement for EXPECT_DEATH_IF_SUPPORTED
+//                and a return statement for ASSERT_DEATH_IF_SUPPORTED.
+//                This ensures that ASSERT_DEATH_IF_SUPPORTED will not
+//                compile inside functions where ASSERT_DEATH doesn't
+//                compile.
+//
+//  The branch that has an always false condition is used to ensure that
+//  statement and regex are compiled (and thus syntactically correct) but
+//  never executed. The unreachable code macro protects the terminator
+//  statement from generating an 'unreachable code' warning in case
+//  statement unconditionally returns or throws. The Message constructor at
+//  the end allows the syntax of streaming additional messages into the
+//  macro, for compilational compatibility with EXPECT_DEATH/ASSERT_DEATH.
+# define GTEST_UNSUPPORTED_DEATH_TEST_(statement, regex, terminator) \
+    GTEST_AMBIGUOUS_ELSE_BLOCKER_ \
+    if (::testing::internal::AlwaysTrue()) { \
+      GTEST_LOG_(WARNING) \
+          << "Death tests are not supported on this platform.\n" \
+          << "Statement '" #statement "' cannot be verified."; \
+    } else if (::testing::internal::AlwaysFalse()) { \
+      ::testing::internal::RE::PartialMatch(".*", (regex)); \
+      GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \
+      terminator; \
+    } else \
+      ::testing::Message()
+
+#endif  // GTEST_HAS_DEATH_TEST
+
+}  // namespace internal
+}  // namespace testing
+
+#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_DEATH_TEST_INTERNAL_H_
+
+namespace testing {
+
+// This flag controls the style of death tests.  Valid values are "threadsafe",
+// meaning that the death test child process will re-execute the test binary
+// from the start, running only a single death test, or "fast",
+// meaning that the child process will execute the test logic immediately
+// after forking.
+GTEST_DECLARE_string_(death_test_style);
+
+#if GTEST_HAS_DEATH_TEST
+
+// The following macros are useful for writing death tests.
+
+// Here's what happens when an ASSERT_DEATH* or EXPECT_DEATH* is
+// executed:
+//
+//   1. It generates a warning if there is more than one active
+//   thread.  This is because it's safe to fork() or clone() only
+//   when there is a single thread.
+//
+//   2. The parent process clone()s a sub-process and runs the death
+//   test in it; the sub-process exits with code 0 at the end of the
+//   death test, if it hasn't exited already.
+//
+//   3. The parent process waits for the sub-process to terminate.
+//
+//   4. The parent process checks the exit code and error message of
+//   the sub-process.
+//
+// Examples:
+//
+//   ASSERT_DEATH(server.SendMessage(56, "Hello"), "Invalid port number");
+//   for (int i = 0; i < 5; i++) {
+//     EXPECT_DEATH(server.ProcessRequest(i),
+//                  "Invalid request .* in ProcessRequest()")
+//         << "Failed to die on request " << i);
+//   }
+//
+//   ASSERT_EXIT(server.ExitNow(), ::testing::ExitedWithCode(0), "Exiting");
+//
+//   bool KilledBySIGHUP(int exit_code) {
+//     return WIFSIGNALED(exit_code) && WTERMSIG(exit_code) == SIGHUP;
+//   }
+//
+//   ASSERT_EXIT(client.HangUpServer(), KilledBySIGHUP, "Hanging up!");
+//
+// On the regular expressions used in death tests:
+//
+//   On POSIX-compliant systems (*nix), we use the <regex.h> library,
+//   which uses the POSIX extended regex syntax.
+//
+//   On other platforms (e.g. Windows), we only support a simple regex
+//   syntax implemented as part of Google Test.  This limited
+//   implementation should be enough most of the time when writing
+//   death tests; though it lacks many features you can find in PCRE
+//   or POSIX extended regex syntax.  For example, we don't support
+//   union ("x|y"), grouping ("(xy)"), brackets ("[xy]"), and
+//   repetition count ("x{5,7}"), among others.
+//
+//   Below is the syntax that we do support.  We chose it to be a
+//   subset of both PCRE and POSIX extended regex, so it's easy to
+//   learn wherever you come from.  In the following: 'A' denotes a
+//   literal character, period (.), or a single \\ escape sequence;
+//   'x' and 'y' denote regular expressions; 'm' and 'n' are for
+//   natural numbers.
+//
+//     c     matches any literal character c
+//     \\d   matches any decimal digit
+//     \\D   matches any character that's not a decimal digit
+//     \\f   matches \f
+//     \\n   matches \n
+//     \\r   matches \r
+//     \\s   matches any ASCII whitespace, including \n
+//     \\S   matches any character that's not a whitespace
+//     \\t   matches \t
+//     \\v   matches \v
+//     \\w   matches any letter, _, or decimal digit
+//     \\W   matches any character that \\w doesn't match
+//     \\c   matches any literal character c, which must be a punctuation
+//     .     matches any single character except \n
+//     A?    matches 0 or 1 occurrences of A
+//     A*    matches 0 or many occurrences of A
+//     A+    matches 1 or many occurrences of A
+//     ^     matches the beginning of a string (not that of each line)
+//     $     matches the end of a string (not that of each line)
+//     xy    matches x followed by y
+//
+//   If you accidentally use PCRE or POSIX extended regex features
+//   not implemented by us, you will get a run-time failure.  In that
+//   case, please try to rewrite your regular expression within the
+//   above syntax.
+//
+//   This implementation is *not* meant to be as highly tuned or robust
+//   as a compiled regex library, but should perform well enough for a
+//   death test, which already incurs significant overhead by launching
+//   a child process.
+//
+// Known caveats:
+//
+//   A "threadsafe" style death test obtains the path to the test
+//   program from argv[0] and re-executes it in the sub-process.  For
+//   simplicity, the current implementation doesn't search the PATH
+//   when launching the sub-process.  This means that the user must
+//   invoke the test program via a path that contains at least one
+//   path separator (e.g. path/to/foo_test and
+//   /absolute/path/to/bar_test are fine, but foo_test is not).  This
+//   is rarely a problem as people usually don't put the test binary
+//   directory in PATH.
+//
+// TODO(wan at google.com): make thread-safe death tests search the PATH.
+
+// Asserts that a given statement causes the program to exit, with an
+// integer exit status that satisfies predicate, and emitting error output
+// that matches regex.
+# define ASSERT_EXIT(statement, predicate, regex) \
+    GTEST_DEATH_TEST_(statement, predicate, regex, GTEST_FATAL_FAILURE_)
+
+// Like ASSERT_EXIT, but continues on to successive tests in the
+// test case, if any:
+# define EXPECT_EXIT(statement, predicate, regex) \
+    GTEST_DEATH_TEST_(statement, predicate, regex, GTEST_NONFATAL_FAILURE_)
+
+// Asserts that a given statement causes the program to exit, either by
+// explicitly exiting with a nonzero exit code or being killed by a
+// signal, and emitting error output that matches regex.
+# define ASSERT_DEATH(statement, regex) \
+    ASSERT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, regex)
+
+// Like ASSERT_DEATH, but continues on to successive tests in the
+// test case, if any:
+# define EXPECT_DEATH(statement, regex) \
+    EXPECT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, regex)
+
+// Two predicate classes that can be used in {ASSERT,EXPECT}_EXIT*:
+
+// Tests that an exit code describes a normal exit with a given exit code.
+class GTEST_API_ ExitedWithCode {
+ public:
+  explicit ExitedWithCode(int exit_code);
+  bool operator()(int exit_status) const;
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ExitedWithCode& other);
+
+  const int exit_code_;
+};
+
+# if !GTEST_OS_WINDOWS
+// Tests that an exit code describes an exit due to termination by a
+// given signal.
+class GTEST_API_ KilledBySignal {
+ public:
+  explicit KilledBySignal(int signum);
+  bool operator()(int exit_status) const;
+ private:
+  const int signum_;
+};
+# endif  // !GTEST_OS_WINDOWS
+
+// EXPECT_DEBUG_DEATH asserts that the given statements die in debug mode.
+// The death testing framework causes this to have interesting semantics,
+// since the sideeffects of the call are only visible in opt mode, and not
+// in debug mode.
+//
+// In practice, this can be used to test functions that utilize the
+// LOG(DFATAL) macro using the following style:
+//
+// int DieInDebugOr12(int* sideeffect) {
+//   if (sideeffect) {
+//     *sideeffect = 12;
+//   }
+//   LOG(DFATAL) << "death";
+//   return 12;
+// }
+//
+// TEST(TestCase, TestDieOr12WorksInDgbAndOpt) {
+//   int sideeffect = 0;
+//   // Only asserts in dbg.
+//   EXPECT_DEBUG_DEATH(DieInDebugOr12(&sideeffect), "death");
+//
+// #ifdef NDEBUG
+//   // opt-mode has sideeffect visible.
+//   EXPECT_EQ(12, sideeffect);
+// #else
+//   // dbg-mode no visible sideeffect.
+//   EXPECT_EQ(0, sideeffect);
+// #endif
+// }
+//
+// This will assert that DieInDebugReturn12InOpt() crashes in debug
+// mode, usually due to a DCHECK or LOG(DFATAL), but returns the
+// appropriate fallback value (12 in this case) in opt mode. If you
+// need to test that a function has appropriate side-effects in opt
+// mode, include assertions against the side-effects.  A general
+// pattern for this is:
+//
+// EXPECT_DEBUG_DEATH({
+//   // Side-effects here will have an effect after this statement in
+//   // opt mode, but none in debug mode.
+//   EXPECT_EQ(12, DieInDebugOr12(&sideeffect));
+// }, "death");
+//
+# ifdef NDEBUG
+
+#  define EXPECT_DEBUG_DEATH(statement, regex) \
+  do { statement; } while (::testing::internal::AlwaysFalse())
+
+#  define ASSERT_DEBUG_DEATH(statement, regex) \
+  do { statement; } while (::testing::internal::AlwaysFalse())
+
+# else
+
+#  define EXPECT_DEBUG_DEATH(statement, regex) \
+  EXPECT_DEATH(statement, regex)
+
+#  define ASSERT_DEBUG_DEATH(statement, regex) \
+  ASSERT_DEATH(statement, regex)
+
+# endif  // NDEBUG for EXPECT_DEBUG_DEATH
+#endif  // GTEST_HAS_DEATH_TEST
+
+// EXPECT_DEATH_IF_SUPPORTED(statement, regex) and
+// ASSERT_DEATH_IF_SUPPORTED(statement, regex) expand to real death tests if
+// death tests are supported; otherwise they just issue a warning.  This is
+// useful when you are combining death test assertions with normal test
+// assertions in one test.
+#if GTEST_HAS_DEATH_TEST
+# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \
+    EXPECT_DEATH(statement, regex)
+# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \
+    ASSERT_DEATH(statement, regex)
+#else
+# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \
+    GTEST_UNSUPPORTED_DEATH_TEST_(statement, regex, )
+# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \
+    GTEST_UNSUPPORTED_DEATH_TEST_(statement, regex, return)
+#endif
+
+}  // namespace testing
+
+#endif  // GTEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This header file defines the Message class.
+//
+// IMPORTANT NOTE: Due to limitation of the C++ language, we have to
+// leave some internal implementation details in this header file.
+// They are clearly marked by comments like this:
+//
+//   // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+//
+// Such code is NOT meant to be used by a user directly, and is subject
+// to CHANGE WITHOUT NOTICE.  Therefore DO NOT DEPEND ON IT in a user
+// program!
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_MESSAGE_H_
+#define GTEST_INCLUDE_GTEST_GTEST_MESSAGE_H_
+
+#include <limits>
+
+
+namespace testing {
+
+// The Message class works like an ostream repeater.
+//
+// Typical usage:
+//
+//   1. You stream a bunch of values to a Message object.
+//      It will remember the text in a stringstream.
+//   2. Then you stream the Message object to an ostream.
+//      This causes the text in the Message to be streamed
+//      to the ostream.
+//
+// For example;
+//
+//   testing::Message foo;
+//   foo << 1 << " != " << 2;
+//   std::cout << foo;
+//
+// will print "1 != 2".
+//
+// Message is not intended to be inherited from.  In particular, its
+// destructor is not virtual.
+//
+// Note that stringstream behaves differently in gcc and in MSVC.  You
+// can stream a NULL char pointer to it in the former, but not in the
+// latter (it causes an access violation if you do).  The Message
+// class hides this difference by treating a NULL char pointer as
+// "(null)".
+class GTEST_API_ Message {
+ private:
+  // The type of basic IO manipulators (endl, ends, and flush) for
+  // narrow streams.
+  typedef std::ostream& (*BasicNarrowIoManip)(std::ostream&);
+
+ public:
+  // Constructs an empty Message.
+  // We allocate the stringstream separately because otherwise each use of
+  // ASSERT/EXPECT in a procedure adds over 200 bytes to the procedure's
+  // stack frame leading to huge stack frames in some cases; gcc does not reuse
+  // the stack space.
+  Message() : ss_(new ::std::stringstream) {
+    // By default, we want there to be enough precision when printing
+    // a double to a Message.
+    *ss_ << std::setprecision(std::numeric_limits<double>::digits10 + 2);
+  }
+
+  // Copy constructor.
+  Message(const Message& msg) : ss_(new ::std::stringstream) {  // NOLINT
+    *ss_ << msg.GetString();
+  }
+
+  // Constructs a Message from a C-string.
+  explicit Message(const char* str) : ss_(new ::std::stringstream) {
+    *ss_ << str;
+  }
+
+#if GTEST_OS_SYMBIAN
+  // Streams a value (either a pointer or not) to this object.
+  template <typename T>
+  inline Message& operator <<(const T& value) {
+    StreamHelper(typename internal::is_pointer<T>::type(), value);
+    return *this;
+  }
+#else
+  // Streams a non-pointer value to this object.
+  template <typename T>
+  inline Message& operator <<(const T& val) {
+    ::GTestStreamToHelper(ss_.get(), val);
+    return *this;
+  }
+
+  // Streams a pointer value to this object.
+  //
+  // This function is an overload of the previous one.  When you
+  // stream a pointer to a Message, this definition will be used as it
+  // is more specialized.  (The C++ Standard, section
+  // [temp.func.order].)  If you stream a non-pointer, then the
+  // previous definition will be used.
+  //
+  // The reason for this overload is that streaming a NULL pointer to
+  // ostream is undefined behavior.  Depending on the compiler, you
+  // may get "0", "(nil)", "(null)", or an access violation.  To
+  // ensure consistent result across compilers, we always treat NULL
+  // as "(null)".
+  template <typename T>
+  inline Message& operator <<(T* const& pointer) {  // NOLINT
+    if (pointer == NULL) {
+      *ss_ << "(null)";
+    } else {
+      ::GTestStreamToHelper(ss_.get(), pointer);
+    }
+    return *this;
+  }
+#endif  // GTEST_OS_SYMBIAN
+
+  // Since the basic IO manipulators are overloaded for both narrow
+  // and wide streams, we have to provide this specialized definition
+  // of operator <<, even though its body is the same as the
+  // templatized version above.  Without this definition, streaming
+  // endl or other basic IO manipulators to Message will confuse the
+  // compiler.
+  Message& operator <<(BasicNarrowIoManip val) {
+    *ss_ << val;
+    return *this;
+  }
+
+  // Instead of 1/0, we want to see true/false for bool values.
+  Message& operator <<(bool b) {
+    return *this << (b ? "true" : "false");
+  }
+
+  // These two overloads allow streaming a wide C string to a Message
+  // using the UTF-8 encoding.
+  Message& operator <<(const wchar_t* wide_c_str) {
+    return *this << internal::String::ShowWideCString(wide_c_str);
+  }
+  Message& operator <<(wchar_t* wide_c_str) {
+    return *this << internal::String::ShowWideCString(wide_c_str);
+  }
+
+#if GTEST_HAS_STD_WSTRING
+  // Converts the given wide string to a narrow string using the UTF-8
+  // encoding, and streams the result to this Message object.
+  Message& operator <<(const ::std::wstring& wstr);
+#endif  // GTEST_HAS_STD_WSTRING
+
+#if GTEST_HAS_GLOBAL_WSTRING
+  // Converts the given wide string to a narrow string using the UTF-8
+  // encoding, and streams the result to this Message object.
+  Message& operator <<(const ::wstring& wstr);
+#endif  // GTEST_HAS_GLOBAL_WSTRING
+
+  // Gets the text streamed to this object so far as a String.
+  // Each '\0' character in the buffer is replaced with "\\0".
+  //
+  // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+  internal::String GetString() const {
+    return internal::StringStreamToString(ss_.get());
+  }
+
+ private:
+
+#if GTEST_OS_SYMBIAN
+  // These are needed as the Nokia Symbian Compiler cannot decide between
+  // const T& and const T* in a function template. The Nokia compiler _can_
+  // decide between class template specializations for T and T*, so a
+  // tr1::type_traits-like is_pointer works, and we can overload on that.
+  template <typename T>
+  inline void StreamHelper(internal::true_type /*dummy*/, T* pointer) {
+    if (pointer == NULL) {
+      *ss_ << "(null)";
+    } else {
+      ::GTestStreamToHelper(ss_.get(), pointer);
+    }
+  }
+  template <typename T>
+  inline void StreamHelper(internal::false_type /*dummy*/, const T& value) {
+    ::GTestStreamToHelper(ss_.get(), value);
+  }
+#endif  // GTEST_OS_SYMBIAN
+
+  // We'll hold the text streamed to this object here.
+  const internal::scoped_ptr< ::std::stringstream> ss_;
+
+  // We declare (but don't implement) this to prevent the compiler
+  // from implementing the assignment operator.
+  void operator=(const Message&);
+};
+
+// Streams a Message to an ostream.
+inline std::ostream& operator <<(std::ostream& os, const Message& sb) {
+  return os << sb.GetString();
+}
+
+}  // namespace testing
+
+#endif  // GTEST_INCLUDE_GTEST_GTEST_MESSAGE_H_
+// This file was GENERATED by command:
+//     pump.py gtest-param-test.h.pump
+// DO NOT EDIT BY HAND!!!
+
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: vladl at google.com (Vlad Losev)
+//
+// Macros and functions for implementing parameterized tests
+// in Google C++ Testing Framework (Google Test)
+//
+// This file is generated by a SCRIPT.  DO NOT EDIT BY HAND!
+//
+#ifndef GTEST_INCLUDE_GTEST_GTEST_PARAM_TEST_H_
+#define GTEST_INCLUDE_GTEST_GTEST_PARAM_TEST_H_
+
+
+// Value-parameterized tests allow you to test your code with different
+// parameters without writing multiple copies of the same test.
+//
+// Here is how you use value-parameterized tests:
+
+#if 0
+
+// To write value-parameterized tests, first you should define a fixture
+// class. It is usually derived from testing::TestWithParam<T> (see below for
+// another inheritance scheme that's sometimes useful in more complicated
+// class hierarchies), where the type of your parameter values.
+// TestWithParam<T> is itself derived from testing::Test. T can be any
+// copyable type. If it's a raw pointer, you are responsible for managing the
+// lifespan of the pointed values.
+
+class FooTest : public ::testing::TestWithParam<const char*> {
+  // You can implement all the usual class fixture members here.
+};
+
+// Then, use the TEST_P macro to define as many parameterized tests
+// for this fixture as you want. The _P suffix is for "parameterized"
+// or "pattern", whichever you prefer to think.
+
+TEST_P(FooTest, DoesBlah) {
+  // Inside a test, access the test parameter with the GetParam() method
+  // of the TestWithParam<T> class:
+  EXPECT_TRUE(foo.Blah(GetParam()));
+  ...
+}
+
+TEST_P(FooTest, HasBlahBlah) {
+  ...
+}
+
+// Finally, you can use INSTANTIATE_TEST_CASE_P to instantiate the test
+// case with any set of parameters you want. Google Test defines a number
+// of functions for generating test parameters. They return what we call
+// (surprise!) parameter generators. Here is a  summary of them, which
+// are all in the testing namespace:
+//
+//
+//  Range(begin, end [, step]) - Yields values {begin, begin+step,
+//                               begin+step+step, ...}. The values do not
+//                               include end. step defaults to 1.
+//  Values(v1, v2, ..., vN)    - Yields values {v1, v2, ..., vN}.
+//  ValuesIn(container)        - Yields values from a C-style array, an STL
+//  ValuesIn(begin,end)          container, or an iterator range [begin, end).
+//  Bool()                     - Yields sequence {false, true}.
+//  Combine(g1, g2, ..., gN)   - Yields all combinations (the Cartesian product
+//                               for the math savvy) of the values generated
+//                               by the N generators.
+//
+// For more details, see comments at the definitions of these functions below
+// in this file.
+//
+// The following statement will instantiate tests from the FooTest test case
+// each with parameter values "meeny", "miny", and "moe".
+
+INSTANTIATE_TEST_CASE_P(InstantiationName,
+                        FooTest,
+                        Values("meeny", "miny", "moe"));
+
+// To distinguish different instances of the pattern, (yes, you
+// can instantiate it more then once) the first argument to the
+// INSTANTIATE_TEST_CASE_P macro is a prefix that will be added to the
+// actual test case name. Remember to pick unique prefixes for different
+// instantiations. The tests from the instantiation above will have
+// these names:
+//
+//    * InstantiationName/FooTest.DoesBlah/0 for "meeny"
+//    * InstantiationName/FooTest.DoesBlah/1 for "miny"
+//    * InstantiationName/FooTest.DoesBlah/2 for "moe"
+//    * InstantiationName/FooTest.HasBlahBlah/0 for "meeny"
+//    * InstantiationName/FooTest.HasBlahBlah/1 for "miny"
+//    * InstantiationName/FooTest.HasBlahBlah/2 for "moe"
+//
+// You can use these names in --gtest_filter.
+//
+// This statement will instantiate all tests from FooTest again, each
+// with parameter values "cat" and "dog":
+
+const char* pets[] = {"cat", "dog"};
+INSTANTIATE_TEST_CASE_P(AnotherInstantiationName, FooTest, ValuesIn(pets));
+
+// The tests from the instantiation above will have these names:
+//
+//    * AnotherInstantiationName/FooTest.DoesBlah/0 for "cat"
+//    * AnotherInstantiationName/FooTest.DoesBlah/1 for "dog"
+//    * AnotherInstantiationName/FooTest.HasBlahBlah/0 for "cat"
+//    * AnotherInstantiationName/FooTest.HasBlahBlah/1 for "dog"
+//
+// Please note that INSTANTIATE_TEST_CASE_P will instantiate all tests
+// in the given test case, whether their definitions come before or
+// AFTER the INSTANTIATE_TEST_CASE_P statement.
+//
+// Please also note that generator expressions (including parameters to the
+// generators) are evaluated in InitGoogleTest(), after main() has started.
+// This allows the user on one hand, to adjust generator parameters in order
+// to dynamically determine a set of tests to run and on the other hand,
+// give the user a chance to inspect the generated tests with Google Test
+// reflection API before RUN_ALL_TESTS() is executed.
+//
+// You can see samples/sample7_unittest.cc and samples/sample8_unittest.cc
+// for more examples.
+//
+// In the future, we plan to publish the API for defining new parameter
+// generators. But for now this interface remains part of the internal
+// implementation and is subject to change.
+//
+//
+// A parameterized test fixture must be derived from testing::Test and from
+// testing::WithParamInterface<T>, where T is the type of the parameter
+// values. Inheriting from TestWithParam<T> satisfies that requirement because
+// TestWithParam<T> inherits from both Test and WithParamInterface. In more
+// complicated hierarchies, however, it is occasionally useful to inherit
+// separately from Test and WithParamInterface. For example:
+
+class BaseTest : public ::testing::Test {
+  // You can inherit all the usual members for a non-parameterized test
+  // fixture here.
+};
+
+class DerivedTest : public BaseTest, public ::testing::WithParamInterface<int> {
+  // The usual test fixture members go here too.
+};
+
+TEST_F(BaseTest, HasFoo) {
+  // This is an ordinary non-parameterized test.
+}
+
+TEST_P(DerivedTest, DoesBlah) {
+  // GetParam works just the same here as if you inherit from TestWithParam.
+  EXPECT_TRUE(foo.Blah(GetParam()));
+}
+
+#endif  // 0
+
+
+#if !GTEST_OS_SYMBIAN
+# include <utility>
+#endif
+
+// scripts/fuse_gtest.py depends on gtest's own header being #included
+// *unconditionally*.  Therefore these #includes cannot be moved
+// inside #if GTEST_HAS_PARAM_TEST.
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: vladl at google.com (Vlad Losev)
+
+// Type and function utilities for implementing parameterized tests.
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_H_
+
+#include <iterator>
+#include <utility>
+#include <vector>
+
+// scripts/fuse_gtest.py depends on gtest's own header being #included
+// *unconditionally*.  Therefore these #includes cannot be moved
+// inside #if GTEST_HAS_PARAM_TEST.
+// Copyright 2003 Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: Dan Egnor (egnor at google.com)
+//
+// A "smart" pointer type with reference tracking.  Every pointer to a
+// particular object is kept on a circular linked list.  When the last pointer
+// to an object is destroyed or reassigned, the object is deleted.
+//
+// Used properly, this deletes the object when the last reference goes away.
+// There are several caveats:
+// - Like all reference counting schemes, cycles lead to leaks.
+// - Each smart pointer is actually two pointers (8 bytes instead of 4).
+// - Every time a pointer is assigned, the entire list of pointers to that
+//   object is traversed.  This class is therefore NOT SUITABLE when there
+//   will often be more than two or three pointers to a particular object.
+// - References are only tracked as long as linked_ptr<> objects are copied.
+//   If a linked_ptr<> is converted to a raw pointer and back, BAD THINGS
+//   will happen (double deletion).
+//
+// A good use of this class is storing object references in STL containers.
+// You can safely put linked_ptr<> in a vector<>.
+// Other uses may not be as good.
+//
+// Note: If you use an incomplete type with linked_ptr<>, the class
+// *containing* linked_ptr<> must have a constructor and destructor (even
+// if they do nothing!).
+//
+// Bill Gibbons suggested we use something like this.
+//
+// Thread Safety:
+//   Unlike other linked_ptr implementations, in this implementation
+//   a linked_ptr object is thread-safe in the sense that:
+//     - it's safe to copy linked_ptr objects concurrently,
+//     - it's safe to copy *from* a linked_ptr and read its underlying
+//       raw pointer (e.g. via get()) concurrently, and
+//     - it's safe to write to two linked_ptrs that point to the same
+//       shared object concurrently.
+// TODO(wan at google.com): rename this to safe_linked_ptr to avoid
+// confusion with normal linked_ptr.
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_LINKED_PTR_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_LINKED_PTR_H_
+
+#include <stdlib.h>
+#include <assert.h>
+
+
+namespace testing {
+namespace internal {
+
+// Protects copying of all linked_ptr objects.
+GTEST_API_ GTEST_DECLARE_STATIC_MUTEX_(g_linked_ptr_mutex);
+
+// This is used internally by all instances of linked_ptr<>.  It needs to be
+// a non-template class because different types of linked_ptr<> can refer to
+// the same object (linked_ptr<Superclass>(obj) vs linked_ptr<Subclass>(obj)).
+// So, it needs to be possible for different types of linked_ptr to participate
+// in the same circular linked list, so we need a single class type here.
+//
+// DO NOT USE THIS CLASS DIRECTLY YOURSELF.  Use linked_ptr<T>.
+class linked_ptr_internal {
+ public:
+  // Create a new circle that includes only this instance.
+  void join_new() {
+    next_ = this;
+  }
+
+  // Many linked_ptr operations may change p.link_ for some linked_ptr
+  // variable p in the same circle as this object.  Therefore we need
+  // to prevent two such operations from occurring concurrently.
+  //
+  // Note that different types of linked_ptr objects can coexist in a
+  // circle (e.g. linked_ptr<Base>, linked_ptr<Derived1>, and
+  // linked_ptr<Derived2>).  Therefore we must use a single mutex to
+  // protect all linked_ptr objects.  This can create serious
+  // contention in production code, but is acceptable in a testing
+  // framework.
+
+  // Join an existing circle.
+  // L < g_linked_ptr_mutex
+  void join(linked_ptr_internal const* ptr) {
+    MutexLock lock(&g_linked_ptr_mutex);
+
+    linked_ptr_internal const* p = ptr;
+    while (p->next_ != ptr) p = p->next_;
+    p->next_ = this;
+    next_ = ptr;
+  }
+
+  // Leave whatever circle we're part of.  Returns true if we were the
+  // last member of the circle.  Once this is done, you can join() another.
+  // L < g_linked_ptr_mutex
+  bool depart() {
+    MutexLock lock(&g_linked_ptr_mutex);
+
+    if (next_ == this) return true;
+    linked_ptr_internal const* p = next_;
+    while (p->next_ != this) p = p->next_;
+    p->next_ = next_;
+    return false;
+  }
+
+ private:
+  mutable linked_ptr_internal const* next_;
+};
+
+template <typename T>
+class linked_ptr {
+ public:
+  typedef T element_type;
+
+  // Take over ownership of a raw pointer.  This should happen as soon as
+  // possible after the object is created.
+  explicit linked_ptr(T* ptr = NULL) { capture(ptr); }
+  ~linked_ptr() { depart(); }
+
+  // Copy an existing linked_ptr<>, adding ourselves to the list of references.
+  template <typename U> linked_ptr(linked_ptr<U> const& ptr) { copy(&ptr); }
+  linked_ptr(linked_ptr const& ptr) {  // NOLINT
+    assert(&ptr != this);
+    copy(&ptr);
+  }
+
+  // Assignment releases the old value and acquires the new.
+  template <typename U> linked_ptr& operator=(linked_ptr<U> const& ptr) {
+    depart();
+    copy(&ptr);
+    return *this;
+  }
+
+  linked_ptr& operator=(linked_ptr const& ptr) {
+    if (&ptr != this) {
+      depart();
+      copy(&ptr);
+    }
+    return *this;
+  }
+
+  // Smart pointer members.
+  void reset(T* ptr = NULL) {
+    depart();
+    capture(ptr);
+  }
+  T* get() const { return value_; }
+  T* operator->() const { return value_; }
+  T& operator*() const { return *value_; }
+
+  bool operator==(T* p) const { return value_ == p; }
+  bool operator!=(T* p) const { return value_ != p; }
+  template <typename U>
+  bool operator==(linked_ptr<U> const& ptr) const {
+    return value_ == ptr.get();
+  }
+  template <typename U>
+  bool operator!=(linked_ptr<U> const& ptr) const {
+    return value_ != ptr.get();
+  }
+
+ private:
+  template <typename U>
+  friend class linked_ptr;
+
+  T* value_;
+  linked_ptr_internal link_;
+
+  void depart() {
+    if (link_.depart()) delete value_;
+  }
+
+  void capture(T* ptr) {
+    value_ = ptr;
+    link_.join_new();
+  }
+
+  template <typename U> void copy(linked_ptr<U> const* ptr) {
+    value_ = ptr->get();
+    if (value_)
+      link_.join(&ptr->link_);
+    else
+      link_.join_new();
+  }
+};
+
+template<typename T> inline
+bool operator==(T* ptr, const linked_ptr<T>& x) {
+  return ptr == x.get();
+}
+
+template<typename T> inline
+bool operator!=(T* ptr, const linked_ptr<T>& x) {
+  return ptr != x.get();
+}
+
+// A function to convert T* into linked_ptr<T>
+// Doing e.g. make_linked_ptr(new FooBarBaz<type>(arg)) is a shorter notation
+// for linked_ptr<FooBarBaz<type> >(new FooBarBaz<type>(arg))
+template <typename T>
+linked_ptr<T> make_linked_ptr(T* ptr) {
+  return linked_ptr<T>(ptr);
+}
+
+}  // namespace internal
+}  // namespace testing
+
+#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_LINKED_PTR_H_
+// Copyright 2007, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+
+// Google Test - The Google C++ Testing Framework
+//
+// This file implements a universal value printer that can print a
+// value of any type T:
+//
+//   void ::testing::internal::UniversalPrinter<T>::Print(value, ostream_ptr);
+//
+// A user can teach this function how to print a class type T by
+// defining either operator<<() or PrintTo() in the namespace that
+// defines T.  More specifically, the FIRST defined function in the
+// following list will be used (assuming T is defined in namespace
+// foo):
+//
+//   1. foo::PrintTo(const T&, ostream*)
+//   2. operator<<(ostream&, const T&) defined in either foo or the
+//      global namespace.
+//
+// If none of the above is defined, it will print the debug string of
+// the value if it is a protocol buffer, or print the raw bytes in the
+// value otherwise.
+//
+// To aid debugging: when T is a reference type, the address of the
+// value is also printed; when T is a (const) char pointer, both the
+// pointer value and the NUL-terminated string it points to are
+// printed.
+//
+// We also provide some convenient wrappers:
+//
+//   // Prints a value to a string.  For a (const or not) char
+//   // pointer, the NUL-terminated string (but not the pointer) is
+//   // printed.
+//   std::string ::testing::PrintToString(const T& value);
+//
+//   // Prints a value tersely: for a reference type, the referenced
+//   // value (but not the address) is printed; for a (const or not) char
+//   // pointer, the NUL-terminated string (but not the pointer) is
+//   // printed.
+//   void ::testing::internal::UniversalTersePrint(const T& value, ostream*);
+//
+//   // Prints value using the type inferred by the compiler.  The difference
+//   // from UniversalTersePrint() is that this function prints both the
+//   // pointer and the NUL-terminated string for a (const or not) char pointer.
+//   void ::testing::internal::UniversalPrint(const T& value, ostream*);
+//
+//   // Prints the fields of a tuple tersely to a string vector, one
+//   // element for each field. Tuple support must be enabled in
+//   // gtest-port.h.
+//   std::vector<string> UniversalTersePrintTupleFieldsToStrings(
+//       const Tuple& value);
+//
+// Known limitation:
+//
+// The print primitives print the elements of an STL-style container
+// using the compiler-inferred type of *iter where iter is a
+// const_iterator of the container.  When const_iterator is an input
+// iterator but not a forward iterator, this inferred type may not
+// match value_type, and the print output may be incorrect.  In
+// practice, this is rarely a problem as for most containers
+// const_iterator is a forward iterator.  We'll fix this if there's an
+// actual need for it.  Note that this fix cannot rely on value_type
+// being defined as many user-defined container types don't have
+// value_type.
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_PRINTERS_H_
+#define GTEST_INCLUDE_GTEST_GTEST_PRINTERS_H_
+
+#include <ostream>  // NOLINT
+#include <sstream>
+#include <string>
+#include <utility>
+#include <vector>
+
+namespace testing {
+
+// Definitions in the 'internal' and 'internal2' name spaces are
+// subject to change without notice.  DO NOT USE THEM IN USER CODE!
+namespace internal2 {
+
+// Prints the given number of bytes in the given object to the given
+// ostream.
+GTEST_API_ void PrintBytesInObjectTo(const unsigned char* obj_bytes,
+                                     size_t count,
+                                     ::std::ostream* os);
+
+// For selecting which printer to use when a given type has neither <<
+// nor PrintTo().
+enum TypeKind {
+  kProtobuf,              // a protobuf type
+  kConvertibleToInteger,  // a type implicitly convertible to BiggestInt
+                          // (e.g. a named or unnamed enum type)
+  kOtherType              // anything else
+};
+
+// TypeWithoutFormatter<T, kTypeKind>::PrintValue(value, os) is called
+// by the universal printer to print a value of type T when neither
+// operator<< nor PrintTo() is defined for T, where kTypeKind is the
+// "kind" of T as defined by enum TypeKind.
+template <typename T, TypeKind kTypeKind>
+class TypeWithoutFormatter {
+ public:
+  // This default version is called when kTypeKind is kOtherType.
+  static void PrintValue(const T& value, ::std::ostream* os) {
+    PrintBytesInObjectTo(reinterpret_cast<const unsigned char*>(&value),
+                         sizeof(value), os);
+  }
+};
+
+// We print a protobuf using its ShortDebugString() when the string
+// doesn't exceed this many characters; otherwise we print it using
+// DebugString() for better readability.
+const size_t kProtobufOneLinerMaxLength = 50;
+
+template <typename T>
+class TypeWithoutFormatter<T, kProtobuf> {
+ public:
+  static void PrintValue(const T& value, ::std::ostream* os) {
+    const ::testing::internal::string short_str = value.ShortDebugString();
+    const ::testing::internal::string pretty_str =
+        short_str.length() <= kProtobufOneLinerMaxLength ?
+        short_str : ("\n" + value.DebugString());
+    *os << ("<" + pretty_str + ">");
+  }
+};
+
+template <typename T>
+class TypeWithoutFormatter<T, kConvertibleToInteger> {
+ public:
+  // Since T has no << operator or PrintTo() but can be implicitly
+  // converted to BiggestInt, we print it as a BiggestInt.
+  //
+  // Most likely T is an enum type (either named or unnamed), in which
+  // case printing it as an integer is the desired behavior.  In case
+  // T is not an enum, printing it as an integer is the best we can do
+  // given that it has no user-defined printer.
+  static void PrintValue(const T& value, ::std::ostream* os) {
+    const internal::BiggestInt kBigInt = value;
+    *os << kBigInt;
+  }
+};
+
+// Prints the given value to the given ostream.  If the value is a
+// protocol message, its debug string is printed; if it's an enum or
+// of a type implicitly convertible to BiggestInt, it's printed as an
+// integer; otherwise the bytes in the value are printed.  This is
+// what UniversalPrinter<T>::Print() does when it knows nothing about
+// type T and T has neither << operator nor PrintTo().
+//
+// A user can override this behavior for a class type Foo by defining
+// a << operator in the namespace where Foo is defined.
+//
+// We put this operator in namespace 'internal2' instead of 'internal'
+// to simplify the implementation, as much code in 'internal' needs to
+// use << in STL, which would conflict with our own << were it defined
+// in 'internal'.
+//
+// Note that this operator<< takes a generic std::basic_ostream<Char,
+// CharTraits> type instead of the more restricted std::ostream.  If
+// we define it to take an std::ostream instead, we'll get an
+// "ambiguous overloads" compiler error when trying to print a type
+// Foo that supports streaming to std::basic_ostream<Char,
+// CharTraits>, as the compiler cannot tell whether
+// operator<<(std::ostream&, const T&) or
+// operator<<(std::basic_stream<Char, CharTraits>, const Foo&) is more
+// specific.
+template <typename Char, typename CharTraits, typename T>
+::std::basic_ostream<Char, CharTraits>& operator<<(
+    ::std::basic_ostream<Char, CharTraits>& os, const T& x) {
+  TypeWithoutFormatter<T,
+      (internal::IsAProtocolMessage<T>::value ? kProtobuf :
+       internal::ImplicitlyConvertible<const T&, internal::BiggestInt>::value ?
+       kConvertibleToInteger : kOtherType)>::PrintValue(x, &os);
+  return os;
+}
+
+}  // namespace internal2
+}  // namespace testing
+
+// This namespace MUST NOT BE NESTED IN ::testing, or the name look-up
+// magic needed for implementing UniversalPrinter won't work.
+namespace testing_internal {
+
+// Used to print a value that is not an STL-style container when the
+// user doesn't define PrintTo() for it.
+template <typename T>
+void DefaultPrintNonContainerTo(const T& value, ::std::ostream* os) {
+  // With the following statement, during unqualified name lookup,
+  // testing::internal2::operator<< appears as if it was declared in
+  // the nearest enclosing namespace that contains both
+  // ::testing_internal and ::testing::internal2, i.e. the global
+  // namespace.  For more details, refer to the C++ Standard section
+  // 7.3.4-1 [namespace.udir].  This allows us to fall back onto
+  // testing::internal2::operator<< in case T doesn't come with a <<
+  // operator.
+  //
+  // We cannot write 'using ::testing::internal2::operator<<;', which
+  // gcc 3.3 fails to compile due to a compiler bug.
+  using namespace ::testing::internal2;  // NOLINT
+
+  // Assuming T is defined in namespace foo, in the next statement,
+  // the compiler will consider all of:
+  //
+  //   1. foo::operator<< (thanks to Koenig look-up),
+  //   2. ::operator<< (as the current namespace is enclosed in ::),
+  //   3. testing::internal2::operator<< (thanks to the using statement above).
+  //
+  // The operator<< whose type matches T best will be picked.
+  //
+  // We deliberately allow #2 to be a candidate, as sometimes it's
+  // impossible to define #1 (e.g. when foo is ::std, defining
+  // anything in it is undefined behavior unless you are a compiler
+  // vendor.).
+  *os << value;
+}
+
+}  // namespace testing_internal
+
+namespace testing {
+namespace internal {
+
+// UniversalPrinter<T>::Print(value, ostream_ptr) prints the given
+// value to the given ostream.  The caller must ensure that
+// 'ostream_ptr' is not NULL, or the behavior is undefined.
+//
+// We define UniversalPrinter as a class template (as opposed to a
+// function template), as we need to partially specialize it for
+// reference types, which cannot be done with function templates.
+template <typename T>
+class UniversalPrinter;
+
+template <typename T>
+void UniversalPrint(const T& value, ::std::ostream* os);
+
+// Used to print an STL-style container when the user doesn't define
+// a PrintTo() for it.
+template <typename C>
+void DefaultPrintTo(IsContainer /* dummy */,
+                    false_type /* is not a pointer */,
+                    const C& container, ::std::ostream* os) {
+  const size_t kMaxCount = 32;  // The maximum number of elements to print.
+  *os << '{';
+  size_t count = 0;
+  for (typename C::const_iterator it = container.begin();
+       it != container.end(); ++it, ++count) {
+    if (count > 0) {
+      *os << ',';
+      if (count == kMaxCount) {  // Enough has been printed.
+        *os << " ...";
+        break;
+      }
+    }
+    *os << ' ';
+    // We cannot call PrintTo(*it, os) here as PrintTo() doesn't
+    // handle *it being a native array.
+    internal::UniversalPrint(*it, os);
+  }
+
+  if (count > 0) {
+    *os << ' ';
+  }
+  *os << '}';
+}
+
+// Used to print a pointer that is neither a char pointer nor a member
+// pointer, when the user doesn't define PrintTo() for it.  (A member
+// variable pointer or member function pointer doesn't really point to
+// a location in the address space.  Their representation is
+// implementation-defined.  Therefore they will be printed as raw
+// bytes.)
+template <typename T>
+void DefaultPrintTo(IsNotContainer /* dummy */,
+                    true_type /* is a pointer */,
+                    T* p, ::std::ostream* os) {
+  if (p == NULL) {
+    *os << "NULL";
+  } else {
+    // C++ doesn't allow casting from a function pointer to any object
+    // pointer.
+    //
+    // IsTrue() silences warnings: "Condition is always true",
+    // "unreachable code".
+    if (IsTrue(ImplicitlyConvertible<T*, const void*>::value)) {
+      // T is not a function type.  We just call << to print p,
+      // relying on ADL to pick up user-defined << for their pointer
+      // types, if any.
+      *os << p;
+    } else {
+      // T is a function type, so '*os << p' doesn't do what we want
+      // (it just prints p as bool).  We want to print p as a const
+      // void*.  However, we cannot cast it to const void* directly,
+      // even using reinterpret_cast, as earlier versions of gcc
+      // (e.g. 3.4.5) cannot compile the cast when p is a function
+      // pointer.  Casting to UInt64 first solves the problem.
+      *os << reinterpret_cast<const void*>(
+          reinterpret_cast<internal::UInt64>(p));
+    }
+  }
+}
+
+// Used to print a non-container, non-pointer value when the user
+// doesn't define PrintTo() for it.
+template <typename T>
+void DefaultPrintTo(IsNotContainer /* dummy */,
+                    false_type /* is not a pointer */,
+                    const T& value, ::std::ostream* os) {
+  ::testing_internal::DefaultPrintNonContainerTo(value, os);
+}
+
+// Prints the given value using the << operator if it has one;
+// otherwise prints the bytes in it.  This is what
+// UniversalPrinter<T>::Print() does when PrintTo() is not specialized
+// or overloaded for type T.
+//
+// A user can override this behavior for a class type Foo by defining
+// an overload of PrintTo() in the namespace where Foo is defined.  We
+// give the user this option as sometimes defining a << operator for
+// Foo is not desirable (e.g. the coding style may prevent doing it,
+// or there is already a << operator but it doesn't do what the user
+// wants).
+template <typename T>
+void PrintTo(const T& value, ::std::ostream* os) {
+  // DefaultPrintTo() is overloaded.  The type of its first two
+  // arguments determine which version will be picked.  If T is an
+  // STL-style container, the version for container will be called; if
+  // T is a pointer, the pointer version will be called; otherwise the
+  // generic version will be called.
+  //
+  // Note that we check for container types here, prior to we check
+  // for protocol message types in our operator<<.  The rationale is:
+  //
+  // For protocol messages, we want to give people a chance to
+  // override Google Mock's format by defining a PrintTo() or
+  // operator<<.  For STL containers, other formats can be
+  // incompatible with Google Mock's format for the container
+  // elements; therefore we check for container types here to ensure
+  // that our format is used.
+  //
+  // The second argument of DefaultPrintTo() is needed to bypass a bug
+  // in Symbian's C++ compiler that prevents it from picking the right
+  // overload between:
+  //
+  //   PrintTo(const T& x, ...);
+  //   PrintTo(T* x, ...);
+  DefaultPrintTo(IsContainerTest<T>(0), is_pointer<T>(), value, os);
+}
+
+// The following list of PrintTo() overloads tells
+// UniversalPrinter<T>::Print() how to print standard types (built-in
+// types, strings, plain arrays, and pointers).
+
+// Overloads for various char types.
+GTEST_API_ void PrintTo(unsigned char c, ::std::ostream* os);
+GTEST_API_ void PrintTo(signed char c, ::std::ostream* os);
+inline void PrintTo(char c, ::std::ostream* os) {
+  // When printing a plain char, we always treat it as unsigned.  This
+  // way, the output won't be affected by whether the compiler thinks
+  // char is signed or not.
+  PrintTo(static_cast<unsigned char>(c), os);
+}
+
+// Overloads for other simple built-in types.
+inline void PrintTo(bool x, ::std::ostream* os) {
+  *os << (x ? "true" : "false");
+}
+
+// Overload for wchar_t type.
+// Prints a wchar_t as a symbol if it is printable or as its internal
+// code otherwise and also as its decimal code (except for L'\0').
+// The L'\0' char is printed as "L'\\0'". The decimal code is printed
+// as signed integer when wchar_t is implemented by the compiler
+// as a signed type and is printed as an unsigned integer when wchar_t
+// is implemented as an unsigned type.
+GTEST_API_ void PrintTo(wchar_t wc, ::std::ostream* os);
+
+// Overloads for C strings.
+GTEST_API_ void PrintTo(const char* s, ::std::ostream* os);
+inline void PrintTo(char* s, ::std::ostream* os) {
+  PrintTo(ImplicitCast_<const char*>(s), os);
+}
+
+// signed/unsigned char is often used for representing binary data, so
+// we print pointers to it as void* to be safe.
+inline void PrintTo(const signed char* s, ::std::ostream* os) {
+  PrintTo(ImplicitCast_<const void*>(s), os);
+}
+inline void PrintTo(signed char* s, ::std::ostream* os) {
+  PrintTo(ImplicitCast_<const void*>(s), os);
+}
+inline void PrintTo(const unsigned char* s, ::std::ostream* os) {
+  PrintTo(ImplicitCast_<const void*>(s), os);
+}
+inline void PrintTo(unsigned char* s, ::std::ostream* os) {
+  PrintTo(ImplicitCast_<const void*>(s), os);
+}
+
+// MSVC can be configured to define wchar_t as a typedef of unsigned
+// short.  It defines _NATIVE_WCHAR_T_DEFINED when wchar_t is a native
+// type.  When wchar_t is a typedef, defining an overload for const
+// wchar_t* would cause unsigned short* be printed as a wide string,
+// possibly causing invalid memory accesses.
+#if !defined(_MSC_VER) || defined(_NATIVE_WCHAR_T_DEFINED)
+// Overloads for wide C strings
+GTEST_API_ void PrintTo(const wchar_t* s, ::std::ostream* os);
+inline void PrintTo(wchar_t* s, ::std::ostream* os) {
+  PrintTo(ImplicitCast_<const wchar_t*>(s), os);
+}
+#endif
+
+// Overload for C arrays.  Multi-dimensional arrays are printed
+// properly.
+
+// Prints the given number of elements in an array, without printing
+// the curly braces.
+template <typename T>
+void PrintRawArrayTo(const T a[], size_t count, ::std::ostream* os) {
+  UniversalPrint(a[0], os);
+  for (size_t i = 1; i != count; i++) {
+    *os << ", ";
+    UniversalPrint(a[i], os);
+  }
+}
+
+// Overloads for ::string and ::std::string.
+#if GTEST_HAS_GLOBAL_STRING
+GTEST_API_ void PrintStringTo(const ::string&s, ::std::ostream* os);
+inline void PrintTo(const ::string& s, ::std::ostream* os) {
+  PrintStringTo(s, os);
+}
+#endif  // GTEST_HAS_GLOBAL_STRING
+
+GTEST_API_ void PrintStringTo(const ::std::string&s, ::std::ostream* os);
+inline void PrintTo(const ::std::string& s, ::std::ostream* os) {
+  PrintStringTo(s, os);
+}
+
+// Overloads for ::wstring and ::std::wstring.
+#if GTEST_HAS_GLOBAL_WSTRING
+GTEST_API_ void PrintWideStringTo(const ::wstring&s, ::std::ostream* os);
+inline void PrintTo(const ::wstring& s, ::std::ostream* os) {
+  PrintWideStringTo(s, os);
+}
+#endif  // GTEST_HAS_GLOBAL_WSTRING
+
+#if GTEST_HAS_STD_WSTRING
+GTEST_API_ void PrintWideStringTo(const ::std::wstring&s, ::std::ostream* os);
+inline void PrintTo(const ::std::wstring& s, ::std::ostream* os) {
+  PrintWideStringTo(s, os);
+}
+#endif  // GTEST_HAS_STD_WSTRING
+
+#if GTEST_HAS_TR1_TUPLE
+// Overload for ::std::tr1::tuple.  Needed for printing function arguments,
+// which are packed as tuples.
+
+// Helper function for printing a tuple.  T must be instantiated with
+// a tuple type.
+template <typename T>
+void PrintTupleTo(const T& t, ::std::ostream* os);
+
+// Overloaded PrintTo() for tuples of various arities.  We support
+// tuples of up-to 10 fields.  The following implementation works
+// regardless of whether tr1::tuple is implemented using the
+// non-standard variadic template feature or not.
+
+inline void PrintTo(const ::std::tr1::tuple<>& t, ::std::ostream* os) {
+  PrintTupleTo(t, os);
+}
+
+template <typename T1>
+void PrintTo(const ::std::tr1::tuple<T1>& t, ::std::ostream* os) {
+  PrintTupleTo(t, os);
+}
+
+template <typename T1, typename T2>
+void PrintTo(const ::std::tr1::tuple<T1, T2>& t, ::std::ostream* os) {
+  PrintTupleTo(t, os);
+}
+
+template <typename T1, typename T2, typename T3>
+void PrintTo(const ::std::tr1::tuple<T1, T2, T3>& t, ::std::ostream* os) {
+  PrintTupleTo(t, os);
+}
+
+template <typename T1, typename T2, typename T3, typename T4>
+void PrintTo(const ::std::tr1::tuple<T1, T2, T3, T4>& t, ::std::ostream* os) {
+  PrintTupleTo(t, os);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5>
+void PrintTo(const ::std::tr1::tuple<T1, T2, T3, T4, T5>& t,
+             ::std::ostream* os) {
+  PrintTupleTo(t, os);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+          typename T6>
+void PrintTo(const ::std::tr1::tuple<T1, T2, T3, T4, T5, T6>& t,
+             ::std::ostream* os) {
+  PrintTupleTo(t, os);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+          typename T6, typename T7>
+void PrintTo(const ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7>& t,
+             ::std::ostream* os) {
+  PrintTupleTo(t, os);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+          typename T6, typename T7, typename T8>
+void PrintTo(const ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7, T8>& t,
+             ::std::ostream* os) {
+  PrintTupleTo(t, os);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+          typename T6, typename T7, typename T8, typename T9>
+void PrintTo(const ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7, T8, T9>& t,
+             ::std::ostream* os) {
+  PrintTupleTo(t, os);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+          typename T6, typename T7, typename T8, typename T9, typename T10>
+void PrintTo(
+    const ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>& t,
+    ::std::ostream* os) {
+  PrintTupleTo(t, os);
+}
+#endif  // GTEST_HAS_TR1_TUPLE
+
+// Overload for std::pair.
+template <typename T1, typename T2>
+void PrintTo(const ::std::pair<T1, T2>& value, ::std::ostream* os) {
+  *os << '(';
+  // We cannot use UniversalPrint(value.first, os) here, as T1 may be
+  // a reference type.  The same for printing value.second.
+  UniversalPrinter<T1>::Print(value.first, os);
+  *os << ", ";
+  UniversalPrinter<T2>::Print(value.second, os);
+  *os << ')';
+}
+
+// Implements printing a non-reference type T by letting the compiler
+// pick the right overload of PrintTo() for T.
+template <typename T>
+class UniversalPrinter {
+ public:
+  // MSVC warns about adding const to a function type, so we want to
+  // disable the warning.
+#ifdef _MSC_VER
+# pragma warning(push)          // Saves the current warning state.
+# pragma warning(disable:4180)  // Temporarily disables warning 4180.
+#endif  // _MSC_VER
+
+  // Note: we deliberately don't call this PrintTo(), as that name
+  // conflicts with ::testing::internal::PrintTo in the body of the
+  // function.
+  static void Print(const T& value, ::std::ostream* os) {
+    // By default, ::testing::internal::PrintTo() is used for printing
+    // the value.
+    //
+    // Thanks to Koenig look-up, if T is a class and has its own
+    // PrintTo() function defined in its namespace, that function will
+    // be visible here.  Since it is more specific than the generic ones
+    // in ::testing::internal, it will be picked by the compiler in the
+    // following statement - exactly what we want.
+    PrintTo(value, os);
+  }
+
+#ifdef _MSC_VER
+# pragma warning(pop)           // Restores the warning state.
+#endif  // _MSC_VER
+};
+
+// UniversalPrintArray(begin, len, os) prints an array of 'len'
+// elements, starting at address 'begin'.
+template <typename T>
+void UniversalPrintArray(const T* begin, size_t len, ::std::ostream* os) {
+  if (len == 0) {
+    *os << "{}";
+  } else {
+    *os << "{ ";
+    const size_t kThreshold = 18;
+    const size_t kChunkSize = 8;
+    // If the array has more than kThreshold elements, we'll have to
+    // omit some details by printing only the first and the last
+    // kChunkSize elements.
+    // TODO(wan at google.com): let the user control the threshold using a flag.
+    if (len <= kThreshold) {
+      PrintRawArrayTo(begin, len, os);
+    } else {
+      PrintRawArrayTo(begin, kChunkSize, os);
+      *os << ", ..., ";
+      PrintRawArrayTo(begin + len - kChunkSize, kChunkSize, os);
+    }
+    *os << " }";
+  }
+}
+// This overload prints a (const) char array compactly.
+GTEST_API_ void UniversalPrintArray(const char* begin,
+                                    size_t len,
+                                    ::std::ostream* os);
+
+// Implements printing an array type T[N].
+template <typename T, size_t N>
+class UniversalPrinter<T[N]> {
+ public:
+  // Prints the given array, omitting some elements when there are too
+  // many.
+  static void Print(const T (&a)[N], ::std::ostream* os) {
+    UniversalPrintArray(a, N, os);
+  }
+};
+
+// Implements printing a reference type T&.
+template <typename T>
+class UniversalPrinter<T&> {
+ public:
+  // MSVC warns about adding const to a function type, so we want to
+  // disable the warning.
+#ifdef _MSC_VER
+# pragma warning(push)          // Saves the current warning state.
+# pragma warning(disable:4180)  // Temporarily disables warning 4180.
+#endif  // _MSC_VER
+
+  static void Print(const T& value, ::std::ostream* os) {
+    // Prints the address of the value.  We use reinterpret_cast here
+    // as static_cast doesn't compile when T is a function type.
+    *os << "@" << reinterpret_cast<const void*>(&value) << " ";
+
+    // Then prints the value itself.
+    UniversalPrint(value, os);
+  }
+
+#ifdef _MSC_VER
+# pragma warning(pop)           // Restores the warning state.
+#endif  // _MSC_VER
+};
+
+// Prints a value tersely: for a reference type, the referenced value
+// (but not the address) is printed; for a (const) char pointer, the
+// NUL-terminated string (but not the pointer) is printed.
+template <typename T>
+void UniversalTersePrint(const T& value, ::std::ostream* os) {
+  UniversalPrint(value, os);
+}
+inline void UniversalTersePrint(const char* str, ::std::ostream* os) {
+  if (str == NULL) {
+    *os << "NULL";
+  } else {
+    UniversalPrint(string(str), os);
+  }
+}
+inline void UniversalTersePrint(char* str, ::std::ostream* os) {
+  UniversalTersePrint(static_cast<const char*>(str), os);
+}
+
+// Prints a value using the type inferred by the compiler.  The
+// difference between this and UniversalTersePrint() is that for a
+// (const) char pointer, this prints both the pointer and the
+// NUL-terminated string.
+template <typename T>
+void UniversalPrint(const T& value, ::std::ostream* os) {
+  UniversalPrinter<T>::Print(value, os);
+}
+
+#if GTEST_HAS_TR1_TUPLE
+typedef ::std::vector<string> Strings;
+
+// This helper template allows PrintTo() for tuples and
+// UniversalTersePrintTupleFieldsToStrings() to be defined by
+// induction on the number of tuple fields.  The idea is that
+// TuplePrefixPrinter<N>::PrintPrefixTo(t, os) prints the first N
+// fields in tuple t, and can be defined in terms of
+// TuplePrefixPrinter<N - 1>.
+
+// The inductive case.
+template <size_t N>
+struct TuplePrefixPrinter {
+  // Prints the first N fields of a tuple.
+  template <typename Tuple>
+  static void PrintPrefixTo(const Tuple& t, ::std::ostream* os) {
+    TuplePrefixPrinter<N - 1>::PrintPrefixTo(t, os);
+    *os << ", ";
+    UniversalPrinter<typename ::std::tr1::tuple_element<N - 1, Tuple>::type>
+        ::Print(::std::tr1::get<N - 1>(t), os);
+  }
+
+  // Tersely prints the first N fields of a tuple to a string vector,
+  // one element for each field.
+  template <typename Tuple>
+  static void TersePrintPrefixToStrings(const Tuple& t, Strings* strings) {
+    TuplePrefixPrinter<N - 1>::TersePrintPrefixToStrings(t, strings);
+    ::std::stringstream ss;
+    UniversalTersePrint(::std::tr1::get<N - 1>(t), &ss);
+    strings->push_back(ss.str());
+  }
+};
+
+// Base cases.
+template <>
+struct TuplePrefixPrinter<0> {
+  template <typename Tuple>
+  static void PrintPrefixTo(const Tuple&, ::std::ostream*) {}
+
+  template <typename Tuple>
+  static void TersePrintPrefixToStrings(const Tuple&, Strings*) {}
+};
+// We have to specialize the entire TuplePrefixPrinter<> class
+// template here, even though the definition of
+// TersePrintPrefixToStrings() is the same as the generic version, as
+// Embarcadero (formerly CodeGear, formerly Borland) C++ doesn't
+// support specializing a method template of a class template.
+template <>
+struct TuplePrefixPrinter<1> {
+  template <typename Tuple>
+  static void PrintPrefixTo(const Tuple& t, ::std::ostream* os) {
+    UniversalPrinter<typename ::std::tr1::tuple_element<0, Tuple>::type>::
+        Print(::std::tr1::get<0>(t), os);
+  }
+
+  template <typename Tuple>
+  static void TersePrintPrefixToStrings(const Tuple& t, Strings* strings) {
+    ::std::stringstream ss;
+    UniversalTersePrint(::std::tr1::get<0>(t), &ss);
+    strings->push_back(ss.str());
+  }
+};
+
+// Helper function for printing a tuple.  T must be instantiated with
+// a tuple type.
+template <typename T>
+void PrintTupleTo(const T& t, ::std::ostream* os) {
+  *os << "(";
+  TuplePrefixPrinter< ::std::tr1::tuple_size<T>::value>::
+      PrintPrefixTo(t, os);
+  *os << ")";
+}
+
+// Prints the fields of a tuple tersely to a string vector, one
+// element for each field.  See the comment before
+// UniversalTersePrint() for how we define "tersely".
+template <typename Tuple>
+Strings UniversalTersePrintTupleFieldsToStrings(const Tuple& value) {
+  Strings result;
+  TuplePrefixPrinter< ::std::tr1::tuple_size<Tuple>::value>::
+      TersePrintPrefixToStrings(value, &result);
+  return result;
+}
+#endif  // GTEST_HAS_TR1_TUPLE
+
+}  // namespace internal
+
+template <typename T>
+::std::string PrintToString(const T& value) {
+  ::std::stringstream ss;
+  internal::UniversalTersePrint(value, &ss);
+  return ss.str();
+}
+
+}  // namespace testing
+
+#endif  // GTEST_INCLUDE_GTEST_GTEST_PRINTERS_H_
+
+#if GTEST_HAS_PARAM_TEST
+
+namespace testing {
+namespace internal {
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// Outputs a message explaining invalid registration of different
+// fixture class for the same test case. This may happen when
+// TEST_P macro is used to define two tests with the same name
+// but in different namespaces.
+GTEST_API_ void ReportInvalidTestCaseType(const char* test_case_name,
+                                          const char* file, int line);
+
+template <typename> class ParamGeneratorInterface;
+template <typename> class ParamGenerator;
+
+// Interface for iterating over elements provided by an implementation
+// of ParamGeneratorInterface<T>.
+template <typename T>
+class ParamIteratorInterface {
+ public:
+  virtual ~ParamIteratorInterface() {}
+  // A pointer to the base generator instance.
+  // Used only for the purposes of iterator comparison
+  // to make sure that two iterators belong to the same generator.
+  virtual const ParamGeneratorInterface<T>* BaseGenerator() const = 0;
+  // Advances iterator to point to the next element
+  // provided by the generator. The caller is responsible
+  // for not calling Advance() on an iterator equal to
+  // BaseGenerator()->End().
+  virtual void Advance() = 0;
+  // Clones the iterator object. Used for implementing copy semantics
+  // of ParamIterator<T>.
+  virtual ParamIteratorInterface* Clone() const = 0;
+  // Dereferences the current iterator and provides (read-only) access
+  // to the pointed value. It is the caller's responsibility not to call
+  // Current() on an iterator equal to BaseGenerator()->End().
+  // Used for implementing ParamGenerator<T>::operator*().
+  virtual const T* Current() const = 0;
+  // Determines whether the given iterator and other point to the same
+  // element in the sequence generated by the generator.
+  // Used for implementing ParamGenerator<T>::operator==().
+  virtual bool Equals(const ParamIteratorInterface& other) const = 0;
+};
+
+// Class iterating over elements provided by an implementation of
+// ParamGeneratorInterface<T>. It wraps ParamIteratorInterface<T>
+// and implements the const forward iterator concept.
+template <typename T>
+class ParamIterator {
+ public:
+  typedef T value_type;
+  typedef const T& reference;
+  typedef ptrdiff_t difference_type;
+
+  // ParamIterator assumes ownership of the impl_ pointer.
+  ParamIterator(const ParamIterator& other) : impl_(other.impl_->Clone()) {}
+  ParamIterator& operator=(const ParamIterator& other) {
+    if (this != &other)
+      impl_.reset(other.impl_->Clone());
+    return *this;
+  }
+
+  const T& operator*() const { return *impl_->Current(); }
+  const T* operator->() const { return impl_->Current(); }
+  // Prefix version of operator++.
+  ParamIterator& operator++() {
+    impl_->Advance();
+    return *this;
+  }
+  // Postfix version of operator++.
+  ParamIterator operator++(int /*unused*/) {
+    ParamIteratorInterface<T>* clone = impl_->Clone();
+    impl_->Advance();
+    return ParamIterator(clone);
+  }
+  bool operator==(const ParamIterator& other) const {
+    return impl_.get() == other.impl_.get() || impl_->Equals(*other.impl_);
+  }
+  bool operator!=(const ParamIterator& other) const {
+    return !(*this == other);
+  }
+
+ private:
+  friend class ParamGenerator<T>;
+  explicit ParamIterator(ParamIteratorInterface<T>* impl) : impl_(impl) {}
+  scoped_ptr<ParamIteratorInterface<T> > impl_;
+};
+
+// ParamGeneratorInterface<T> is the binary interface to access generators
+// defined in other translation units.
+template <typename T>
+class ParamGeneratorInterface {
+ public:
+  typedef T ParamType;
+
+  virtual ~ParamGeneratorInterface() {}
+
+  // Generator interface definition
+  virtual ParamIteratorInterface<T>* Begin() const = 0;
+  virtual ParamIteratorInterface<T>* End() const = 0;
+};
+
+// Wraps ParamGeneratorInterface<T> and provides general generator syntax
+// compatible with the STL Container concept.
+// This class implements copy initialization semantics and the contained
+// ParamGeneratorInterface<T> instance is shared among all copies
+// of the original object. This is possible because that instance is immutable.
+template<typename T>
+class ParamGenerator {
+ public:
+  typedef ParamIterator<T> iterator;
+
+  explicit ParamGenerator(ParamGeneratorInterface<T>* impl) : impl_(impl) {}
+  ParamGenerator(const ParamGenerator& other) : impl_(other.impl_) {}
+
+  ParamGenerator& operator=(const ParamGenerator& other) {
+    impl_ = other.impl_;
+    return *this;
+  }
+
+  iterator begin() const { return iterator(impl_->Begin()); }
+  iterator end() const { return iterator(impl_->End()); }
+
+ private:
+  linked_ptr<const ParamGeneratorInterface<T> > impl_;
+};
+
+// Generates values from a range of two comparable values. Can be used to
+// generate sequences of user-defined types that implement operator+() and
+// operator<().
+// This class is used in the Range() function.
+template <typename T, typename IncrementT>
+class RangeGenerator : public ParamGeneratorInterface<T> {
+ public:
+  RangeGenerator(T begin, T end, IncrementT step)
+      : begin_(begin), end_(end),
+        step_(step), end_index_(CalculateEndIndex(begin, end, step)) {}
+  virtual ~RangeGenerator() {}
+
+  virtual ParamIteratorInterface<T>* Begin() const {
+    return new Iterator(this, begin_, 0, step_);
+  }
+  virtual ParamIteratorInterface<T>* End() const {
+    return new Iterator(this, end_, end_index_, step_);
+  }
+
+ private:
+  class Iterator : public ParamIteratorInterface<T> {
+   public:
+    Iterator(const ParamGeneratorInterface<T>* base, T value, int index,
+             IncrementT step)
+        : base_(base), value_(value), index_(index), step_(step) {}
+    virtual ~Iterator() {}
+
+    virtual const ParamGeneratorInterface<T>* BaseGenerator() const {
+      return base_;
+    }
+    virtual void Advance() {
+      value_ = value_ + step_;
+      index_++;
+    }
+    virtual ParamIteratorInterface<T>* Clone() const {
+      return new Iterator(*this);
+    }
+    virtual const T* Current() const { return &value_; }
+    virtual bool Equals(const ParamIteratorInterface<T>& other) const {
+      // Having the same base generator guarantees that the other
+      // iterator is of the same type and we can downcast.
+      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())
+          << "The program attempted to compare iterators "
+          << "from different generators." << std::endl;
+      const int other_index =
+          CheckedDowncastToActualType<const Iterator>(&other)->index_;
+      return index_ == other_index;
+    }
+
+   private:
+    Iterator(const Iterator& other)
+        : ParamIteratorInterface<T>(),
+          base_(other.base_), value_(other.value_), index_(other.index_),
+          step_(other.step_) {}
+
+    // No implementation - assignment is unsupported.
+    void operator=(const Iterator& other);
+
+    const ParamGeneratorInterface<T>* const base_;
+    T value_;
+    int index_;
+    const IncrementT step_;
+  };  // class RangeGenerator::Iterator
+
+  static int CalculateEndIndex(const T& begin,
+                               const T& end,
+                               const IncrementT& step) {
+    int end_index = 0;
+    for (T i = begin; i < end; i = i + step)
+      end_index++;
+    return end_index;
+  }
+
+  // No implementation - assignment is unsupported.
+  void operator=(const RangeGenerator& other);
+
+  const T begin_;
+  const T end_;
+  const IncrementT step_;
+  // The index for the end() iterator. All the elements in the generated
+  // sequence are indexed (0-based) to aid iterator comparison.
+  const int end_index_;
+};  // class RangeGenerator
+
+
+// Generates values from a pair of STL-style iterators. Used in the
+// ValuesIn() function. The elements are copied from the source range
+// since the source can be located on the stack, and the generator
+// is likely to persist beyond that stack frame.
+template <typename T>
+class ValuesInIteratorRangeGenerator : public ParamGeneratorInterface<T> {
+ public:
+  template <typename ForwardIterator>
+  ValuesInIteratorRangeGenerator(ForwardIterator begin, ForwardIterator end)
+      : container_(begin, end) {}
+  virtual ~ValuesInIteratorRangeGenerator() {}
+
+  virtual ParamIteratorInterface<T>* Begin() const {
+    return new Iterator(this, container_.begin());
+  }
+  virtual ParamIteratorInterface<T>* End() const {
+    return new Iterator(this, container_.end());
+  }
+
+ private:
+  typedef typename ::std::vector<T> ContainerType;
+
+  class Iterator : public ParamIteratorInterface<T> {
+   public:
+    Iterator(const ParamGeneratorInterface<T>* base,
+             typename ContainerType::const_iterator iterator)
+        : base_(base), iterator_(iterator) {}
+    virtual ~Iterator() {}
+
+    virtual const ParamGeneratorInterface<T>* BaseGenerator() const {
+      return base_;
+    }
+    virtual void Advance() {
+      ++iterator_;
+      value_.reset();
+    }
+    virtual ParamIteratorInterface<T>* Clone() const {
+      return new Iterator(*this);
+    }
+    // We need to use cached value referenced by iterator_ because *iterator_
+    // can return a temporary object (and of type other then T), so just
+    // having "return &*iterator_;" doesn't work.
+    // value_ is updated here and not in Advance() because Advance()
+    // can advance iterator_ beyond the end of the range, and we cannot
+    // detect that fact. The client code, on the other hand, is
+    // responsible for not calling Current() on an out-of-range iterator.
+    virtual const T* Current() const {
+      if (value_.get() == NULL)
+        value_.reset(new T(*iterator_));
+      return value_.get();
+    }
+    virtual bool Equals(const ParamIteratorInterface<T>& other) const {
+      // Having the same base generator guarantees that the other
+      // iterator is of the same type and we can downcast.
+      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())
+          << "The program attempted to compare iterators "
+          << "from different generators." << std::endl;
+      return iterator_ ==
+          CheckedDowncastToActualType<const Iterator>(&other)->iterator_;
+    }
+
+   private:
+    Iterator(const Iterator& other)
+          // The explicit constructor call suppresses a false warning
+          // emitted by gcc when supplied with the -Wextra option.
+        : ParamIteratorInterface<T>(),
+          base_(other.base_),
+          iterator_(other.iterator_) {}
+
+    const ParamGeneratorInterface<T>* const base_;
+    typename ContainerType::const_iterator iterator_;
+    // A cached value of *iterator_. We keep it here to allow access by
+    // pointer in the wrapping iterator's operator->().
+    // value_ needs to be mutable to be accessed in Current().
+    // Use of scoped_ptr helps manage cached value's lifetime,
+    // which is bound by the lifespan of the iterator itself.
+    mutable scoped_ptr<const T> value_;
+  };  // class ValuesInIteratorRangeGenerator::Iterator
+
+  // No implementation - assignment is unsupported.
+  void operator=(const ValuesInIteratorRangeGenerator& other);
+
+  const ContainerType container_;
+};  // class ValuesInIteratorRangeGenerator
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// Stores a parameter value and later creates tests parameterized with that
+// value.
+template <class TestClass>
+class ParameterizedTestFactory : public TestFactoryBase {
+ public:
+  typedef typename TestClass::ParamType ParamType;
+  explicit ParameterizedTestFactory(ParamType parameter) :
+      parameter_(parameter) {}
+  virtual Test* CreateTest() {
+    TestClass::SetParam(&parameter_);
+    return new TestClass();
+  }
+
+ private:
+  const ParamType parameter_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestFactory);
+};
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// TestMetaFactoryBase is a base class for meta-factories that create
+// test factories for passing into MakeAndRegisterTestInfo function.
+template <class ParamType>
+class TestMetaFactoryBase {
+ public:
+  virtual ~TestMetaFactoryBase() {}
+
+  virtual TestFactoryBase* CreateTestFactory(ParamType parameter) = 0;
+};
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// TestMetaFactory creates test factories for passing into
+// MakeAndRegisterTestInfo function. Since MakeAndRegisterTestInfo receives
+// ownership of test factory pointer, same factory object cannot be passed
+// into that method twice. But ParameterizedTestCaseInfo is going to call
+// it for each Test/Parameter value combination. Thus it needs meta factory
+// creator class.
+template <class TestCase>
+class TestMetaFactory
+    : public TestMetaFactoryBase<typename TestCase::ParamType> {
+ public:
+  typedef typename TestCase::ParamType ParamType;
+
+  TestMetaFactory() {}
+
+  virtual TestFactoryBase* CreateTestFactory(ParamType parameter) {
+    return new ParameterizedTestFactory<TestCase>(parameter);
+  }
+
+ private:
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestMetaFactory);
+};
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// ParameterizedTestCaseInfoBase is a generic interface
+// to ParameterizedTestCaseInfo classes. ParameterizedTestCaseInfoBase
+// accumulates test information provided by TEST_P macro invocations
+// and generators provided by INSTANTIATE_TEST_CASE_P macro invocations
+// and uses that information to register all resulting test instances
+// in RegisterTests method. The ParameterizeTestCaseRegistry class holds
+// a collection of pointers to the ParameterizedTestCaseInfo objects
+// and calls RegisterTests() on each of them when asked.
+class ParameterizedTestCaseInfoBase {
+ public:
+  virtual ~ParameterizedTestCaseInfoBase() {}
+
+  // Base part of test case name for display purposes.
+  virtual const string& GetTestCaseName() const = 0;
+  // Test case id to verify identity.
+  virtual TypeId GetTestCaseTypeId() const = 0;
+  // UnitTest class invokes this method to register tests in this
+  // test case right before running them in RUN_ALL_TESTS macro.
+  // This method should not be called more then once on any single
+  // instance of a ParameterizedTestCaseInfoBase derived class.
+  virtual void RegisterTests() = 0;
+
+ protected:
+  ParameterizedTestCaseInfoBase() {}
+
+ private:
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestCaseInfoBase);
+};
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// ParameterizedTestCaseInfo accumulates tests obtained from TEST_P
+// macro invocations for a particular test case and generators
+// obtained from INSTANTIATE_TEST_CASE_P macro invocations for that
+// test case. It registers tests with all values generated by all
+// generators when asked.
+template <class TestCase>
+class ParameterizedTestCaseInfo : public ParameterizedTestCaseInfoBase {
+ public:
+  // ParamType and GeneratorCreationFunc are private types but are required
+  // for declarations of public methods AddTestPattern() and
+  // AddTestCaseInstantiation().
+  typedef typename TestCase::ParamType ParamType;
+  // A function that returns an instance of appropriate generator type.
+  typedef ParamGenerator<ParamType>(GeneratorCreationFunc)();
+
+  explicit ParameterizedTestCaseInfo(const char* name)
+      : test_case_name_(name) {}
+
+  // Test case base name for display purposes.
+  virtual const string& GetTestCaseName() const { return test_case_name_; }
+  // Test case id to verify identity.
+  virtual TypeId GetTestCaseTypeId() const { return GetTypeId<TestCase>(); }
+  // TEST_P macro uses AddTestPattern() to record information
+  // about a single test in a LocalTestInfo structure.
+  // test_case_name is the base name of the test case (without invocation
+  // prefix). test_base_name is the name of an individual test without
+  // parameter index. For the test SequenceA/FooTest.DoBar/1 FooTest is
+  // test case base name and DoBar is test base name.
+  void AddTestPattern(const char* test_case_name,
+                      const char* test_base_name,
+                      TestMetaFactoryBase<ParamType>* meta_factory) {
+    tests_.push_back(linked_ptr<TestInfo>(new TestInfo(test_case_name,
+                                                       test_base_name,
+                                                       meta_factory)));
+  }
+  // INSTANTIATE_TEST_CASE_P macro uses AddGenerator() to record information
+  // about a generator.
+  int AddTestCaseInstantiation(const string& instantiation_name,
+                               GeneratorCreationFunc* func,
+                               const char* /* file */,
+                               int /* line */) {
+    instantiations_.push_back(::std::make_pair(instantiation_name, func));
+    return 0;  // Return value used only to run this method in namespace scope.
+  }
+  // UnitTest class invokes this method to register tests in this test case
+  // test cases right before running tests in RUN_ALL_TESTS macro.
+  // This method should not be called more then once on any single
+  // instance of a ParameterizedTestCaseInfoBase derived class.
+  // UnitTest has a guard to prevent from calling this method more then once.
+  virtual void RegisterTests() {
+    for (typename TestInfoContainer::iterator test_it = tests_.begin();
+         test_it != tests_.end(); ++test_it) {
+      linked_ptr<TestInfo> test_info = *test_it;
+      for (typename InstantiationContainer::iterator gen_it =
+               instantiations_.begin(); gen_it != instantiations_.end();
+               ++gen_it) {
+        const string& instantiation_name = gen_it->first;
+        ParamGenerator<ParamType> generator((*gen_it->second)());
+
+        Message test_case_name_stream;
+        if ( !instantiation_name.empty() )
+          test_case_name_stream << instantiation_name << "/";
+        test_case_name_stream << test_info->test_case_base_name;
+
+        int i = 0;
+        for (typename ParamGenerator<ParamType>::iterator param_it =
+                 generator.begin();
+             param_it != generator.end(); ++param_it, ++i) {
+          Message test_name_stream;
+          test_name_stream << test_info->test_base_name << "/" << i;
+          MakeAndRegisterTestInfo(
+              test_case_name_stream.GetString().c_str(),
+              test_name_stream.GetString().c_str(),
+              NULL,  // No type parameter.
+              PrintToString(*param_it).c_str(),
+              GetTestCaseTypeId(),
+              TestCase::SetUpTestCase,
+              TestCase::TearDownTestCase,
+              test_info->test_meta_factory->CreateTestFactory(*param_it));
+        }  // for param_it
+      }  // for gen_it
+    }  // for test_it
+  }  // RegisterTests
+
+ private:
+  // LocalTestInfo structure keeps information about a single test registered
+  // with TEST_P macro.
+  struct TestInfo {
+    TestInfo(const char* a_test_case_base_name,
+             const char* a_test_base_name,
+             TestMetaFactoryBase<ParamType>* a_test_meta_factory) :
+        test_case_base_name(a_test_case_base_name),
+        test_base_name(a_test_base_name),
+        test_meta_factory(a_test_meta_factory) {}
+
+    const string test_case_base_name;
+    const string test_base_name;
+    const scoped_ptr<TestMetaFactoryBase<ParamType> > test_meta_factory;
+  };
+  typedef ::std::vector<linked_ptr<TestInfo> > TestInfoContainer;
+  // Keeps pairs of <Instantiation name, Sequence generator creation function>
+  // received from INSTANTIATE_TEST_CASE_P macros.
+  typedef ::std::vector<std::pair<string, GeneratorCreationFunc*> >
+      InstantiationContainer;
+
+  const string test_case_name_;
+  TestInfoContainer tests_;
+  InstantiationContainer instantiations_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestCaseInfo);
+};  // class ParameterizedTestCaseInfo
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// ParameterizedTestCaseRegistry contains a map of ParameterizedTestCaseInfoBase
+// classes accessed by test case names. TEST_P and INSTANTIATE_TEST_CASE_P
+// macros use it to locate their corresponding ParameterizedTestCaseInfo
+// descriptors.
+class ParameterizedTestCaseRegistry {
+ public:
+  ParameterizedTestCaseRegistry() {}
+  ~ParameterizedTestCaseRegistry() {
+    for (TestCaseInfoContainer::iterator it = test_case_infos_.begin();
+         it != test_case_infos_.end(); ++it) {
+      delete *it;
+    }
+  }
+
+  // Looks up or creates and returns a structure containing information about
+  // tests and instantiations of a particular test case.
+  template <class TestCase>
+  ParameterizedTestCaseInfo<TestCase>* GetTestCasePatternHolder(
+      const char* test_case_name,
+      const char* file,
+      int line) {
+    ParameterizedTestCaseInfo<TestCase>* typed_test_info = NULL;
+    for (TestCaseInfoContainer::iterator it = test_case_infos_.begin();
+         it != test_case_infos_.end(); ++it) {
+      if ((*it)->GetTestCaseName() == test_case_name) {
+        if ((*it)->GetTestCaseTypeId() != GetTypeId<TestCase>()) {
+          // Complain about incorrect usage of Google Test facilities
+          // and terminate the program since we cannot guaranty correct
+          // test case setup and tear-down in this case.
+          ReportInvalidTestCaseType(test_case_name,  file, line);
+          posix::Abort();
+        } else {
+          // At this point we are sure that the object we found is of the same
+          // type we are looking for, so we downcast it to that type
+          // without further checks.
+          typed_test_info = CheckedDowncastToActualType<
+              ParameterizedTestCaseInfo<TestCase> >(*it);
+        }
+        break;
+      }
+    }
+    if (typed_test_info == NULL) {
+      typed_test_info = new ParameterizedTestCaseInfo<TestCase>(test_case_name);
+      test_case_infos_.push_back(typed_test_info);
+    }
+    return typed_test_info;
+  }
+  void RegisterTests() {
+    for (TestCaseInfoContainer::iterator it = test_case_infos_.begin();
+         it != test_case_infos_.end(); ++it) {
+      (*it)->RegisterTests();
+    }
+  }
+
+ private:
+  typedef ::std::vector<ParameterizedTestCaseInfoBase*> TestCaseInfoContainer;
+
+  TestCaseInfoContainer test_case_infos_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestCaseRegistry);
+};
+
+}  // namespace internal
+}  // namespace testing
+
+#endif  //  GTEST_HAS_PARAM_TEST
+
+#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_H_
+// This file was GENERATED by command:
+//     pump.py gtest-param-util-generated.h.pump
+// DO NOT EDIT BY HAND!!!
+
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: vladl at google.com (Vlad Losev)
+
+// Type and function utilities for implementing parameterized tests.
+// This file is generated by a SCRIPT.  DO NOT EDIT BY HAND!
+//
+// Currently Google Test supports at most 50 arguments in Values,
+// and at most 10 arguments in Combine. Please contact
+// googletestframework at googlegroups.com if you need more.
+// Please note that the number of arguments to Combine is limited
+// by the maximum arity of the implementation of tr1::tuple which is
+// currently set at 10.
+
+#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_GENERATED_H_
+#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_GENERATED_H_
+
+// scripts/fuse_gtest.py depends on gtest's own header being #included
+// *unconditionally*.  Therefore these #includes cannot be moved
+// inside #if GTEST_HAS_PARAM_TEST.
+
+#if GTEST_HAS_PARAM_TEST
+
+namespace testing {
+
+// Forward declarations of ValuesIn(), which is implemented in
+// include/gtest/gtest-param-test.h.
+template <typename ForwardIterator>
+internal::ParamGenerator<
+  typename ::testing::internal::IteratorTraits<ForwardIterator>::value_type>
+ValuesIn(ForwardIterator begin, ForwardIterator end);
+
+template <typename T, size_t N>
+internal::ParamGenerator<T> ValuesIn(const T (&array)[N]);
+
+template <class Container>
+internal::ParamGenerator<typename Container::value_type> ValuesIn(
+    const Container& container);
+
+namespace internal {
+
+// Used in the Values() function to provide polymorphic capabilities.
+template <typename T1>
+class ValueArray1 {
+ public:
+  explicit ValueArray1(T1 v1) : v1_(v1) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const { return ValuesIn(&v1_, &v1_ + 1); }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray1& other);
+
+  const T1 v1_;
+};
+
+template <typename T1, typename T2>
+class ValueArray2 {
+ public:
+  ValueArray2(T1 v1, T2 v2) : v1_(v1), v2_(v2) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray2& other);
+
+  const T1 v1_;
+  const T2 v2_;
+};
+
+template <typename T1, typename T2, typename T3>
+class ValueArray3 {
+ public:
+  ValueArray3(T1 v1, T2 v2, T3 v3) : v1_(v1), v2_(v2), v3_(v3) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray3& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4>
+class ValueArray4 {
+ public:
+  ValueArray4(T1 v1, T2 v2, T3 v3, T4 v4) : v1_(v1), v2_(v2), v3_(v3),
+      v4_(v4) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray4& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5>
+class ValueArray5 {
+ public:
+  ValueArray5(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5) : v1_(v1), v2_(v2), v3_(v3),
+      v4_(v4), v5_(v5) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray5& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6>
+class ValueArray6 {
+ public:
+  ValueArray6(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6) : v1_(v1), v2_(v2),
+      v3_(v3), v4_(v4), v5_(v5), v6_(v6) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray6& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7>
+class ValueArray7 {
+ public:
+  ValueArray7(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7) : v1_(v1),
+      v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray7& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8>
+class ValueArray8 {
+ public:
+  ValueArray8(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7,
+      T8 v8) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray8& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9>
+class ValueArray9 {
+ public:
+  ValueArray9(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8,
+      T9 v9) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8), v9_(v9) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray9& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10>
+class ValueArray10 {
+ public:
+  ValueArray10(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8), v9_(v9), v10_(v10) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray10& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11>
+class ValueArray11 {
+ public:
+  ValueArray11(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6),
+      v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray11& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12>
+class ValueArray12 {
+ public:
+  ValueArray12(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5),
+      v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray12& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13>
+class ValueArray13 {
+ public:
+  ValueArray13(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13) : v1_(v1), v2_(v2), v3_(v3), v4_(v4),
+      v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11),
+      v12_(v12), v13_(v13) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray13& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14>
+class ValueArray14 {
+ public:
+  ValueArray14(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14) : v1_(v1), v2_(v2), v3_(v3),
+      v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10),
+      v11_(v11), v12_(v12), v13_(v13), v14_(v14) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray14& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15>
+class ValueArray15 {
+ public:
+  ValueArray15(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15) : v1_(v1), v2_(v2),
+      v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10),
+      v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray15& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16>
+class ValueArray16 {
+ public:
+  ValueArray16(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16) : v1_(v1),
+      v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9),
+      v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15),
+      v16_(v16) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray16& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17>
+class ValueArray17 {
+ public:
+  ValueArray17(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16,
+      T17 v17) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14),
+      v15_(v15), v16_(v16), v17_(v17) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray17& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18>
+class ValueArray18 {
+ public:
+  ValueArray18(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14),
+      v15_(v15), v16_(v16), v17_(v17), v18_(v18) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray18& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19>
+class ValueArray19 {
+ public:
+  ValueArray19(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6),
+      v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13),
+      v14_(v14), v15_(v15), v16_(v16), v17_(v17), v18_(v18), v19_(v19) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray19& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20>
+class ValueArray20 {
+ public:
+  ValueArray20(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5),
+      v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12),
+      v13_(v13), v14_(v14), v15_(v15), v16_(v16), v17_(v17), v18_(v18),
+      v19_(v19), v20_(v20) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray20& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21>
+class ValueArray21 {
+ public:
+  ValueArray21(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21) : v1_(v1), v2_(v2), v3_(v3), v4_(v4),
+      v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11),
+      v12_(v12), v13_(v13), v14_(v14), v15_(v15), v16_(v16), v17_(v17),
+      v18_(v18), v19_(v19), v20_(v20), v21_(v21) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray21& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22>
+class ValueArray22 {
+ public:
+  ValueArray22(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22) : v1_(v1), v2_(v2), v3_(v3),
+      v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10),
+      v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15), v16_(v16),
+      v17_(v17), v18_(v18), v19_(v19), v20_(v20), v21_(v21), v22_(v22) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray22& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23>
+class ValueArray23 {
+ public:
+  ValueArray23(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23) : v1_(v1), v2_(v2),
+      v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10),
+      v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15), v16_(v16),
+      v17_(v17), v18_(v18), v19_(v19), v20_(v20), v21_(v21), v22_(v22),
+      v23_(v23) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_,
+        v23_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray23& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24>
+class ValueArray24 {
+ public:
+  ValueArray24(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24) : v1_(v1),
+      v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9),
+      v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15),
+      v16_(v16), v17_(v17), v18_(v18), v19_(v19), v20_(v20), v21_(v21),
+      v22_(v22), v23_(v23), v24_(v24) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray24& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25>
+class ValueArray25 {
+ public:
+  ValueArray25(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24,
+      T25 v25) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14),
+      v15_(v15), v16_(v16), v17_(v17), v18_(v18), v19_(v19), v20_(v20),
+      v21_(v21), v22_(v22), v23_(v23), v24_(v24), v25_(v25) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray25& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26>
+class ValueArray26 {
+ public:
+  ValueArray26(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14),
+      v15_(v15), v16_(v16), v17_(v17), v18_(v18), v19_(v19), v20_(v20),
+      v21_(v21), v22_(v22), v23_(v23), v24_(v24), v25_(v25), v26_(v26) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray26& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27>
+class ValueArray27 {
+ public:
+  ValueArray27(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6),
+      v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13),
+      v14_(v14), v15_(v15), v16_(v16), v17_(v17), v18_(v18), v19_(v19),
+      v20_(v20), v21_(v21), v22_(v22), v23_(v23), v24_(v24), v25_(v25),
+      v26_(v26), v27_(v27) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray27& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28>
+class ValueArray28 {
+ public:
+  ValueArray28(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5),
+      v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12),
+      v13_(v13), v14_(v14), v15_(v15), v16_(v16), v17_(v17), v18_(v18),
+      v19_(v19), v20_(v20), v21_(v21), v22_(v22), v23_(v23), v24_(v24),
+      v25_(v25), v26_(v26), v27_(v27), v28_(v28) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray28& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29>
+class ValueArray29 {
+ public:
+  ValueArray29(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29) : v1_(v1), v2_(v2), v3_(v3), v4_(v4),
+      v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11),
+      v12_(v12), v13_(v13), v14_(v14), v15_(v15), v16_(v16), v17_(v17),
+      v18_(v18), v19_(v19), v20_(v20), v21_(v21), v22_(v22), v23_(v23),
+      v24_(v24), v25_(v25), v26_(v26), v27_(v27), v28_(v28), v29_(v29) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray29& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30>
+class ValueArray30 {
+ public:
+  ValueArray30(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30) : v1_(v1), v2_(v2), v3_(v3),
+      v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10),
+      v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15), v16_(v16),
+      v17_(v17), v18_(v18), v19_(v19), v20_(v20), v21_(v21), v22_(v22),
+      v23_(v23), v24_(v24), v25_(v25), v26_(v26), v27_(v27), v28_(v28),
+      v29_(v29), v30_(v30) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray30& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31>
+class ValueArray31 {
+ public:
+  ValueArray31(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31) : v1_(v1), v2_(v2),
+      v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10),
+      v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15), v16_(v16),
+      v17_(v17), v18_(v18), v19_(v19), v20_(v20), v21_(v21), v22_(v22),
+      v23_(v23), v24_(v24), v25_(v25), v26_(v26), v27_(v27), v28_(v28),
+      v29_(v29), v30_(v30), v31_(v31) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray31& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32>
+class ValueArray32 {
+ public:
+  ValueArray32(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32) : v1_(v1),
+      v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9),
+      v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15),
+      v16_(v16), v17_(v17), v18_(v18), v19_(v19), v20_(v20), v21_(v21),
+      v22_(v22), v23_(v23), v24_(v24), v25_(v25), v26_(v26), v27_(v27),
+      v28_(v28), v29_(v29), v30_(v30), v31_(v31), v32_(v32) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray32& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33>
+class ValueArray33 {
+ public:
+  ValueArray33(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32,
+      T33 v33) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14),
+      v15_(v15), v16_(v16), v17_(v17), v18_(v18), v19_(v19), v20_(v20),
+      v21_(v21), v22_(v22), v23_(v23), v24_(v24), v25_(v25), v26_(v26),
+      v27_(v27), v28_(v28), v29_(v29), v30_(v30), v31_(v31), v32_(v32),
+      v33_(v33) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray33& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34>
+class ValueArray34 {
+ public:
+  ValueArray34(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14),
+      v15_(v15), v16_(v16), v17_(v17), v18_(v18), v19_(v19), v20_(v20),
+      v21_(v21), v22_(v22), v23_(v23), v24_(v24), v25_(v25), v26_(v26),
+      v27_(v27), v28_(v28), v29_(v29), v30_(v30), v31_(v31), v32_(v32),
+      v33_(v33), v34_(v34) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray34& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35>
+class ValueArray35 {
+ public:
+  ValueArray35(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6),
+      v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13),
+      v14_(v14), v15_(v15), v16_(v16), v17_(v17), v18_(v18), v19_(v19),
+      v20_(v20), v21_(v21), v22_(v22), v23_(v23), v24_(v24), v25_(v25),
+      v26_(v26), v27_(v27), v28_(v28), v29_(v29), v30_(v30), v31_(v31),
+      v32_(v32), v33_(v33), v34_(v34), v35_(v35) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_,
+        v35_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray35& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36>
+class ValueArray36 {
+ public:
+  ValueArray36(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5),
+      v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12),
+      v13_(v13), v14_(v14), v15_(v15), v16_(v16), v17_(v17), v18_(v18),
+      v19_(v19), v20_(v20), v21_(v21), v22_(v22), v23_(v23), v24_(v24),
+      v25_(v25), v26_(v26), v27_(v27), v28_(v28), v29_(v29), v30_(v30),
+      v31_(v31), v32_(v32), v33_(v33), v34_(v34), v35_(v35), v36_(v36) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray36& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37>
+class ValueArray37 {
+ public:
+  ValueArray37(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37) : v1_(v1), v2_(v2), v3_(v3), v4_(v4),
+      v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11),
+      v12_(v12), v13_(v13), v14_(v14), v15_(v15), v16_(v16), v17_(v17),
+      v18_(v18), v19_(v19), v20_(v20), v21_(v21), v22_(v22), v23_(v23),
+      v24_(v24), v25_(v25), v26_(v26), v27_(v27), v28_(v28), v29_(v29),
+      v30_(v30), v31_(v31), v32_(v32), v33_(v33), v34_(v34), v35_(v35),
+      v36_(v36), v37_(v37) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray37& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38>
+class ValueArray38 {
+ public:
+  ValueArray38(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38) : v1_(v1), v2_(v2), v3_(v3),
+      v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10),
+      v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15), v16_(v16),
+      v17_(v17), v18_(v18), v19_(v19), v20_(v20), v21_(v21), v22_(v22),
+      v23_(v23), v24_(v24), v25_(v25), v26_(v26), v27_(v27), v28_(v28),
+      v29_(v29), v30_(v30), v31_(v31), v32_(v32), v33_(v33), v34_(v34),
+      v35_(v35), v36_(v36), v37_(v37), v38_(v38) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray38& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39>
+class ValueArray39 {
+ public:
+  ValueArray39(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39) : v1_(v1), v2_(v2),
+      v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10),
+      v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15), v16_(v16),
+      v17_(v17), v18_(v18), v19_(v19), v20_(v20), v21_(v21), v22_(v22),
+      v23_(v23), v24_(v24), v25_(v25), v26_(v26), v27_(v27), v28_(v28),
+      v29_(v29), v30_(v30), v31_(v31), v32_(v32), v33_(v33), v34_(v34),
+      v35_(v35), v36_(v36), v37_(v37), v38_(v38), v39_(v39) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_, v39_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray39& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+  const T39 v39_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40>
+class ValueArray40 {
+ public:
+  ValueArray40(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40) : v1_(v1),
+      v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9),
+      v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15),
+      v16_(v16), v17_(v17), v18_(v18), v19_(v19), v20_(v20), v21_(v21),
+      v22_(v22), v23_(v23), v24_(v24), v25_(v25), v26_(v26), v27_(v27),
+      v28_(v28), v29_(v29), v30_(v30), v31_(v31), v32_(v32), v33_(v33),
+      v34_(v34), v35_(v35), v36_(v36), v37_(v37), v38_(v38), v39_(v39),
+      v40_(v40) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_, v39_, v40_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray40& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+  const T39 v39_;
+  const T40 v40_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41>
+class ValueArray41 {
+ public:
+  ValueArray41(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40,
+      T41 v41) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14),
+      v15_(v15), v16_(v16), v17_(v17), v18_(v18), v19_(v19), v20_(v20),
+      v21_(v21), v22_(v22), v23_(v23), v24_(v24), v25_(v25), v26_(v26),
+      v27_(v27), v28_(v28), v29_(v29), v30_(v30), v31_(v31), v32_(v32),
+      v33_(v33), v34_(v34), v35_(v35), v36_(v36), v37_(v37), v38_(v38),
+      v39_(v39), v40_(v40), v41_(v41) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_, v39_, v40_, v41_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray41& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+  const T39 v39_;
+  const T40 v40_;
+  const T41 v41_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42>
+class ValueArray42 {
+ public:
+  ValueArray42(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41,
+      T42 v42) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14),
+      v15_(v15), v16_(v16), v17_(v17), v18_(v18), v19_(v19), v20_(v20),
+      v21_(v21), v22_(v22), v23_(v23), v24_(v24), v25_(v25), v26_(v26),
+      v27_(v27), v28_(v28), v29_(v29), v30_(v30), v31_(v31), v32_(v32),
+      v33_(v33), v34_(v34), v35_(v35), v36_(v36), v37_(v37), v38_(v38),
+      v39_(v39), v40_(v40), v41_(v41), v42_(v42) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_, v39_, v40_, v41_, v42_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray42& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+  const T39 v39_;
+  const T40 v40_;
+  const T41 v41_;
+  const T42 v42_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43>
+class ValueArray43 {
+ public:
+  ValueArray43(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41,
+      T42 v42, T43 v43) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6),
+      v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13),
+      v14_(v14), v15_(v15), v16_(v16), v17_(v17), v18_(v18), v19_(v19),
+      v20_(v20), v21_(v21), v22_(v22), v23_(v23), v24_(v24), v25_(v25),
+      v26_(v26), v27_(v27), v28_(v28), v29_(v29), v30_(v30), v31_(v31),
+      v32_(v32), v33_(v33), v34_(v34), v35_(v35), v36_(v36), v37_(v37),
+      v38_(v38), v39_(v39), v40_(v40), v41_(v41), v42_(v42), v43_(v43) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_, v39_, v40_, v41_, v42_, v43_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray43& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+  const T39 v39_;
+  const T40 v40_;
+  const T41 v41_;
+  const T42 v42_;
+  const T43 v43_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44>
+class ValueArray44 {
+ public:
+  ValueArray44(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41,
+      T42 v42, T43 v43, T44 v44) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5),
+      v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12),
+      v13_(v13), v14_(v14), v15_(v15), v16_(v16), v17_(v17), v18_(v18),
+      v19_(v19), v20_(v20), v21_(v21), v22_(v22), v23_(v23), v24_(v24),
+      v25_(v25), v26_(v26), v27_(v27), v28_(v28), v29_(v29), v30_(v30),
+      v31_(v31), v32_(v32), v33_(v33), v34_(v34), v35_(v35), v36_(v36),
+      v37_(v37), v38_(v38), v39_(v39), v40_(v40), v41_(v41), v42_(v42),
+      v43_(v43), v44_(v44) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_, v39_, v40_, v41_, v42_, v43_, v44_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray44& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+  const T39 v39_;
+  const T40 v40_;
+  const T41 v41_;
+  const T42 v42_;
+  const T43 v43_;
+  const T44 v44_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45>
+class ValueArray45 {
+ public:
+  ValueArray45(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41,
+      T42 v42, T43 v43, T44 v44, T45 v45) : v1_(v1), v2_(v2), v3_(v3), v4_(v4),
+      v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10), v11_(v11),
+      v12_(v12), v13_(v13), v14_(v14), v15_(v15), v16_(v16), v17_(v17),
+      v18_(v18), v19_(v19), v20_(v20), v21_(v21), v22_(v22), v23_(v23),
+      v24_(v24), v25_(v25), v26_(v26), v27_(v27), v28_(v28), v29_(v29),
+      v30_(v30), v31_(v31), v32_(v32), v33_(v33), v34_(v34), v35_(v35),
+      v36_(v36), v37_(v37), v38_(v38), v39_(v39), v40_(v40), v41_(v41),
+      v42_(v42), v43_(v43), v44_(v44), v45_(v45) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_, v39_, v40_, v41_, v42_, v43_, v44_, v45_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray45& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+  const T39 v39_;
+  const T40 v40_;
+  const T41 v41_;
+  const T42 v42_;
+  const T43 v43_;
+  const T44 v44_;
+  const T45 v45_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46>
+class ValueArray46 {
+ public:
+  ValueArray46(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41,
+      T42 v42, T43 v43, T44 v44, T45 v45, T46 v46) : v1_(v1), v2_(v2), v3_(v3),
+      v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10),
+      v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15), v16_(v16),
+      v17_(v17), v18_(v18), v19_(v19), v20_(v20), v21_(v21), v22_(v22),
+      v23_(v23), v24_(v24), v25_(v25), v26_(v26), v27_(v27), v28_(v28),
+      v29_(v29), v30_(v30), v31_(v31), v32_(v32), v33_(v33), v34_(v34),
+      v35_(v35), v36_(v36), v37_(v37), v38_(v38), v39_(v39), v40_(v40),
+      v41_(v41), v42_(v42), v43_(v43), v44_(v44), v45_(v45), v46_(v46) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_, v39_, v40_, v41_, v42_, v43_, v44_, v45_, v46_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray46& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+  const T39 v39_;
+  const T40 v40_;
+  const T41 v41_;
+  const T42 v42_;
+  const T43 v43_;
+  const T44 v44_;
+  const T45 v45_;
+  const T46 v46_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47>
+class ValueArray47 {
+ public:
+  ValueArray47(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41,
+      T42 v42, T43 v43, T44 v44, T45 v45, T46 v46, T47 v47) : v1_(v1), v2_(v2),
+      v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9), v10_(v10),
+      v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15), v16_(v16),
+      v17_(v17), v18_(v18), v19_(v19), v20_(v20), v21_(v21), v22_(v22),
+      v23_(v23), v24_(v24), v25_(v25), v26_(v26), v27_(v27), v28_(v28),
+      v29_(v29), v30_(v30), v31_(v31), v32_(v32), v33_(v33), v34_(v34),
+      v35_(v35), v36_(v36), v37_(v37), v38_(v38), v39_(v39), v40_(v40),
+      v41_(v41), v42_(v42), v43_(v43), v44_(v44), v45_(v45), v46_(v46),
+      v47_(v47) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_, v39_, v40_, v41_, v42_, v43_, v44_, v45_, v46_,
+        v47_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray47& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+  const T39 v39_;
+  const T40 v40_;
+  const T41 v41_;
+  const T42 v42_;
+  const T43 v43_;
+  const T44 v44_;
+  const T45 v45_;
+  const T46 v46_;
+  const T47 v47_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47, typename T48>
+class ValueArray48 {
+ public:
+  ValueArray48(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41,
+      T42 v42, T43 v43, T44 v44, T45 v45, T46 v46, T47 v47, T48 v48) : v1_(v1),
+      v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7), v8_(v8), v9_(v9),
+      v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14), v15_(v15),
+      v16_(v16), v17_(v17), v18_(v18), v19_(v19), v20_(v20), v21_(v21),
+      v22_(v22), v23_(v23), v24_(v24), v25_(v25), v26_(v26), v27_(v27),
+      v28_(v28), v29_(v29), v30_(v30), v31_(v31), v32_(v32), v33_(v33),
+      v34_(v34), v35_(v35), v36_(v36), v37_(v37), v38_(v38), v39_(v39),
+      v40_(v40), v41_(v41), v42_(v42), v43_(v43), v44_(v44), v45_(v45),
+      v46_(v46), v47_(v47), v48_(v48) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_, v39_, v40_, v41_, v42_, v43_, v44_, v45_, v46_, v47_,
+        v48_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray48& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+  const T39 v39_;
+  const T40 v40_;
+  const T41 v41_;
+  const T42 v42_;
+  const T43 v43_;
+  const T44 v44_;
+  const T45 v45_;
+  const T46 v46_;
+  const T47 v47_;
+  const T48 v48_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47, typename T48, typename T49>
+class ValueArray49 {
+ public:
+  ValueArray49(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41,
+      T42 v42, T43 v43, T44 v44, T45 v45, T46 v46, T47 v47, T48 v48,
+      T49 v49) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14),
+      v15_(v15), v16_(v16), v17_(v17), v18_(v18), v19_(v19), v20_(v20),
+      v21_(v21), v22_(v22), v23_(v23), v24_(v24), v25_(v25), v26_(v26),
+      v27_(v27), v28_(v28), v29_(v29), v30_(v30), v31_(v31), v32_(v32),
+      v33_(v33), v34_(v34), v35_(v35), v36_(v36), v37_(v37), v38_(v38),
+      v39_(v39), v40_(v40), v41_(v41), v42_(v42), v43_(v43), v44_(v44),
+      v45_(v45), v46_(v46), v47_(v47), v48_(v48), v49_(v49) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_, v39_, v40_, v41_, v42_, v43_, v44_, v45_, v46_, v47_,
+        v48_, v49_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray49& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+  const T39 v39_;
+  const T40 v40_;
+  const T41 v41_;
+  const T42 v42_;
+  const T43 v43_;
+  const T44 v44_;
+  const T45 v45_;
+  const T46 v46_;
+  const T47 v47_;
+  const T48 v48_;
+  const T49 v49_;
+};
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47, typename T48, typename T49, typename T50>
+class ValueArray50 {
+ public:
+  ValueArray50(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+      T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+      T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+      T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+      T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41,
+      T42 v42, T43 v43, T44 v44, T45 v45, T46 v46, T47 v47, T48 v48, T49 v49,
+      T50 v50) : v1_(v1), v2_(v2), v3_(v3), v4_(v4), v5_(v5), v6_(v6), v7_(v7),
+      v8_(v8), v9_(v9), v10_(v10), v11_(v11), v12_(v12), v13_(v13), v14_(v14),
+      v15_(v15), v16_(v16), v17_(v17), v18_(v18), v19_(v19), v20_(v20),
+      v21_(v21), v22_(v22), v23_(v23), v24_(v24), v25_(v25), v26_(v26),
+      v27_(v27), v28_(v28), v29_(v29), v30_(v30), v31_(v31), v32_(v32),
+      v33_(v33), v34_(v34), v35_(v35), v36_(v36), v37_(v37), v38_(v38),
+      v39_(v39), v40_(v40), v41_(v41), v42_(v42), v43_(v43), v44_(v44),
+      v45_(v45), v46_(v46), v47_(v47), v48_(v48), v49_(v49), v50_(v50) {}
+
+  template <typename T>
+  operator ParamGenerator<T>() const {
+    const T array[] = {v1_, v2_, v3_, v4_, v5_, v6_, v7_, v8_, v9_, v10_, v11_,
+        v12_, v13_, v14_, v15_, v16_, v17_, v18_, v19_, v20_, v21_, v22_, v23_,
+        v24_, v25_, v26_, v27_, v28_, v29_, v30_, v31_, v32_, v33_, v34_, v35_,
+        v36_, v37_, v38_, v39_, v40_, v41_, v42_, v43_, v44_, v45_, v46_, v47_,
+        v48_, v49_, v50_};
+    return ValuesIn(array);
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const ValueArray50& other);
+
+  const T1 v1_;
+  const T2 v2_;
+  const T3 v3_;
+  const T4 v4_;
+  const T5 v5_;
+  const T6 v6_;
+  const T7 v7_;
+  const T8 v8_;
+  const T9 v9_;
+  const T10 v10_;
+  const T11 v11_;
+  const T12 v12_;
+  const T13 v13_;
+  const T14 v14_;
+  const T15 v15_;
+  const T16 v16_;
+  const T17 v17_;
+  const T18 v18_;
+  const T19 v19_;
+  const T20 v20_;
+  const T21 v21_;
+  const T22 v22_;
+  const T23 v23_;
+  const T24 v24_;
+  const T25 v25_;
+  const T26 v26_;
+  const T27 v27_;
+  const T28 v28_;
+  const T29 v29_;
+  const T30 v30_;
+  const T31 v31_;
+  const T32 v32_;
+  const T33 v33_;
+  const T34 v34_;
+  const T35 v35_;
+  const T36 v36_;
+  const T37 v37_;
+  const T38 v38_;
+  const T39 v39_;
+  const T40 v40_;
+  const T41 v41_;
+  const T42 v42_;
+  const T43 v43_;
+  const T44 v44_;
+  const T45 v45_;
+  const T46 v46_;
+  const T47 v47_;
+  const T48 v48_;
+  const T49 v49_;
+  const T50 v50_;
+};
+
+# if GTEST_HAS_COMBINE
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// Generates values from the Cartesian product of values produced
+// by the argument generators.
+//
+template <typename T1, typename T2>
+class CartesianProductGenerator2
+    : public ParamGeneratorInterface< ::std::tr1::tuple<T1, T2> > {
+ public:
+  typedef ::std::tr1::tuple<T1, T2> ParamType;
+
+  CartesianProductGenerator2(const ParamGenerator<T1>& g1,
+      const ParamGenerator<T2>& g2)
+      : g1_(g1), g2_(g2) {}
+  virtual ~CartesianProductGenerator2() {}
+
+  virtual ParamIteratorInterface<ParamType>* Begin() const {
+    return new Iterator(this, g1_, g1_.begin(), g2_, g2_.begin());
+  }
+  virtual ParamIteratorInterface<ParamType>* End() const {
+    return new Iterator(this, g1_, g1_.end(), g2_, g2_.end());
+  }
+
+ private:
+  class Iterator : public ParamIteratorInterface<ParamType> {
+   public:
+    Iterator(const ParamGeneratorInterface<ParamType>* base,
+      const ParamGenerator<T1>& g1,
+      const typename ParamGenerator<T1>::iterator& current1,
+      const ParamGenerator<T2>& g2,
+      const typename ParamGenerator<T2>::iterator& current2)
+        : base_(base),
+          begin1_(g1.begin()), end1_(g1.end()), current1_(current1),
+          begin2_(g2.begin()), end2_(g2.end()), current2_(current2)    {
+      ComputeCurrentValue();
+    }
+    virtual ~Iterator() {}
+
+    virtual const ParamGeneratorInterface<ParamType>* BaseGenerator() const {
+      return base_;
+    }
+    // Advance should not be called on beyond-of-range iterators
+    // so no component iterators must be beyond end of range, either.
+    virtual void Advance() {
+      assert(!AtEnd());
+      ++current2_;
+      if (current2_ == end2_) {
+        current2_ = begin2_;
+        ++current1_;
+      }
+      ComputeCurrentValue();
+    }
+    virtual ParamIteratorInterface<ParamType>* Clone() const {
+      return new Iterator(*this);
+    }
+    virtual const ParamType* Current() const { return &current_value_; }
+    virtual bool Equals(const ParamIteratorInterface<ParamType>& other) const {
+      // Having the same base generator guarantees that the other
+      // iterator is of the same type and we can downcast.
+      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())
+          << "The program attempted to compare iterators "
+          << "from different generators." << std::endl;
+      const Iterator* typed_other =
+          CheckedDowncastToActualType<const Iterator>(&other);
+      // We must report iterators equal if they both point beyond their
+      // respective ranges. That can happen in a variety of fashions,
+      // so we have to consult AtEnd().
+      return (AtEnd() && typed_other->AtEnd()) ||
+         (
+          current1_ == typed_other->current1_ &&
+          current2_ == typed_other->current2_);
+    }
+
+   private:
+    Iterator(const Iterator& other)
+        : base_(other.base_),
+        begin1_(other.begin1_),
+        end1_(other.end1_),
+        current1_(other.current1_),
+        begin2_(other.begin2_),
+        end2_(other.end2_),
+        current2_(other.current2_) {
+      ComputeCurrentValue();
+    }
+
+    void ComputeCurrentValue() {
+      if (!AtEnd())
+        current_value_ = ParamType(*current1_, *current2_);
+    }
+    bool AtEnd() const {
+      // We must report iterator past the end of the range when either of the
+      // component iterators has reached the end of its range.
+      return
+          current1_ == end1_ ||
+          current2_ == end2_;
+    }
+
+    // No implementation - assignment is unsupported.
+    void operator=(const Iterator& other);
+
+    const ParamGeneratorInterface<ParamType>* const base_;
+    // begin[i]_ and end[i]_ define the i-th range that Iterator traverses.
+    // current[i]_ is the actual traversing iterator.
+    const typename ParamGenerator<T1>::iterator begin1_;
+    const typename ParamGenerator<T1>::iterator end1_;
+    typename ParamGenerator<T1>::iterator current1_;
+    const typename ParamGenerator<T2>::iterator begin2_;
+    const typename ParamGenerator<T2>::iterator end2_;
+    typename ParamGenerator<T2>::iterator current2_;
+    ParamType current_value_;
+  };  // class CartesianProductGenerator2::Iterator
+
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductGenerator2& other);
+
+  const ParamGenerator<T1> g1_;
+  const ParamGenerator<T2> g2_;
+};  // class CartesianProductGenerator2
+
+
+template <typename T1, typename T2, typename T3>
+class CartesianProductGenerator3
+    : public ParamGeneratorInterface< ::std::tr1::tuple<T1, T2, T3> > {
+ public:
+  typedef ::std::tr1::tuple<T1, T2, T3> ParamType;
+
+  CartesianProductGenerator3(const ParamGenerator<T1>& g1,
+      const ParamGenerator<T2>& g2, const ParamGenerator<T3>& g3)
+      : g1_(g1), g2_(g2), g3_(g3) {}
+  virtual ~CartesianProductGenerator3() {}
+
+  virtual ParamIteratorInterface<ParamType>* Begin() const {
+    return new Iterator(this, g1_, g1_.begin(), g2_, g2_.begin(), g3_,
+        g3_.begin());
+  }
+  virtual ParamIteratorInterface<ParamType>* End() const {
+    return new Iterator(this, g1_, g1_.end(), g2_, g2_.end(), g3_, g3_.end());
+  }
+
+ private:
+  class Iterator : public ParamIteratorInterface<ParamType> {
+   public:
+    Iterator(const ParamGeneratorInterface<ParamType>* base,
+      const ParamGenerator<T1>& g1,
+      const typename ParamGenerator<T1>::iterator& current1,
+      const ParamGenerator<T2>& g2,
+      const typename ParamGenerator<T2>::iterator& current2,
+      const ParamGenerator<T3>& g3,
+      const typename ParamGenerator<T3>::iterator& current3)
+        : base_(base),
+          begin1_(g1.begin()), end1_(g1.end()), current1_(current1),
+          begin2_(g2.begin()), end2_(g2.end()), current2_(current2),
+          begin3_(g3.begin()), end3_(g3.end()), current3_(current3)    {
+      ComputeCurrentValue();
+    }
+    virtual ~Iterator() {}
+
+    virtual const ParamGeneratorInterface<ParamType>* BaseGenerator() const {
+      return base_;
+    }
+    // Advance should not be called on beyond-of-range iterators
+    // so no component iterators must be beyond end of range, either.
+    virtual void Advance() {
+      assert(!AtEnd());
+      ++current3_;
+      if (current3_ == end3_) {
+        current3_ = begin3_;
+        ++current2_;
+      }
+      if (current2_ == end2_) {
+        current2_ = begin2_;
+        ++current1_;
+      }
+      ComputeCurrentValue();
+    }
+    virtual ParamIteratorInterface<ParamType>* Clone() const {
+      return new Iterator(*this);
+    }
+    virtual const ParamType* Current() const { return &current_value_; }
+    virtual bool Equals(const ParamIteratorInterface<ParamType>& other) const {
+      // Having the same base generator guarantees that the other
+      // iterator is of the same type and we can downcast.
+      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())
+          << "The program attempted to compare iterators "
+          << "from different generators." << std::endl;
+      const Iterator* typed_other =
+          CheckedDowncastToActualType<const Iterator>(&other);
+      // We must report iterators equal if they both point beyond their
+      // respective ranges. That can happen in a variety of fashions,
+      // so we have to consult AtEnd().
+      return (AtEnd() && typed_other->AtEnd()) ||
+         (
+          current1_ == typed_other->current1_ &&
+          current2_ == typed_other->current2_ &&
+          current3_ == typed_other->current3_);
+    }
+
+   private:
+    Iterator(const Iterator& other)
+        : base_(other.base_),
+        begin1_(other.begin1_),
+        end1_(other.end1_),
+        current1_(other.current1_),
+        begin2_(other.begin2_),
+        end2_(other.end2_),
+        current2_(other.current2_),
+        begin3_(other.begin3_),
+        end3_(other.end3_),
+        current3_(other.current3_) {
+      ComputeCurrentValue();
+    }
+
+    void ComputeCurrentValue() {
+      if (!AtEnd())
+        current_value_ = ParamType(*current1_, *current2_, *current3_);
+    }
+    bool AtEnd() const {
+      // We must report iterator past the end of the range when either of the
+      // component iterators has reached the end of its range.
+      return
+          current1_ == end1_ ||
+          current2_ == end2_ ||
+          current3_ == end3_;
+    }
+
+    // No implementation - assignment is unsupported.
+    void operator=(const Iterator& other);
+
+    const ParamGeneratorInterface<ParamType>* const base_;
+    // begin[i]_ and end[i]_ define the i-th range that Iterator traverses.
+    // current[i]_ is the actual traversing iterator.
+    const typename ParamGenerator<T1>::iterator begin1_;
+    const typename ParamGenerator<T1>::iterator end1_;
+    typename ParamGenerator<T1>::iterator current1_;
+    const typename ParamGenerator<T2>::iterator begin2_;
+    const typename ParamGenerator<T2>::iterator end2_;
+    typename ParamGenerator<T2>::iterator current2_;
+    const typename ParamGenerator<T3>::iterator begin3_;
+    const typename ParamGenerator<T3>::iterator end3_;
+    typename ParamGenerator<T3>::iterator current3_;
+    ParamType current_value_;
+  };  // class CartesianProductGenerator3::Iterator
+
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductGenerator3& other);
+
+  const ParamGenerator<T1> g1_;
+  const ParamGenerator<T2> g2_;
+  const ParamGenerator<T3> g3_;
+};  // class CartesianProductGenerator3
+
+
+template <typename T1, typename T2, typename T3, typename T4>
+class CartesianProductGenerator4
+    : public ParamGeneratorInterface< ::std::tr1::tuple<T1, T2, T3, T4> > {
+ public:
+  typedef ::std::tr1::tuple<T1, T2, T3, T4> ParamType;
+
+  CartesianProductGenerator4(const ParamGenerator<T1>& g1,
+      const ParamGenerator<T2>& g2, const ParamGenerator<T3>& g3,
+      const ParamGenerator<T4>& g4)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4) {}
+  virtual ~CartesianProductGenerator4() {}
+
+  virtual ParamIteratorInterface<ParamType>* Begin() const {
+    return new Iterator(this, g1_, g1_.begin(), g2_, g2_.begin(), g3_,
+        g3_.begin(), g4_, g4_.begin());
+  }
+  virtual ParamIteratorInterface<ParamType>* End() const {
+    return new Iterator(this, g1_, g1_.end(), g2_, g2_.end(), g3_, g3_.end(),
+        g4_, g4_.end());
+  }
+
+ private:
+  class Iterator : public ParamIteratorInterface<ParamType> {
+   public:
+    Iterator(const ParamGeneratorInterface<ParamType>* base,
+      const ParamGenerator<T1>& g1,
+      const typename ParamGenerator<T1>::iterator& current1,
+      const ParamGenerator<T2>& g2,
+      const typename ParamGenerator<T2>::iterator& current2,
+      const ParamGenerator<T3>& g3,
+      const typename ParamGenerator<T3>::iterator& current3,
+      const ParamGenerator<T4>& g4,
+      const typename ParamGenerator<T4>::iterator& current4)
+        : base_(base),
+          begin1_(g1.begin()), end1_(g1.end()), current1_(current1),
+          begin2_(g2.begin()), end2_(g2.end()), current2_(current2),
+          begin3_(g3.begin()), end3_(g3.end()), current3_(current3),
+          begin4_(g4.begin()), end4_(g4.end()), current4_(current4)    {
+      ComputeCurrentValue();
+    }
+    virtual ~Iterator() {}
+
+    virtual const ParamGeneratorInterface<ParamType>* BaseGenerator() const {
+      return base_;
+    }
+    // Advance should not be called on beyond-of-range iterators
+    // so no component iterators must be beyond end of range, either.
+    virtual void Advance() {
+      assert(!AtEnd());
+      ++current4_;
+      if (current4_ == end4_) {
+        current4_ = begin4_;
+        ++current3_;
+      }
+      if (current3_ == end3_) {
+        current3_ = begin3_;
+        ++current2_;
+      }
+      if (current2_ == end2_) {
+        current2_ = begin2_;
+        ++current1_;
+      }
+      ComputeCurrentValue();
+    }
+    virtual ParamIteratorInterface<ParamType>* Clone() const {
+      return new Iterator(*this);
+    }
+    virtual const ParamType* Current() const { return &current_value_; }
+    virtual bool Equals(const ParamIteratorInterface<ParamType>& other) const {
+      // Having the same base generator guarantees that the other
+      // iterator is of the same type and we can downcast.
+      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())
+          << "The program attempted to compare iterators "
+          << "from different generators." << std::endl;
+      const Iterator* typed_other =
+          CheckedDowncastToActualType<const Iterator>(&other);
+      // We must report iterators equal if they both point beyond their
+      // respective ranges. That can happen in a variety of fashions,
+      // so we have to consult AtEnd().
+      return (AtEnd() && typed_other->AtEnd()) ||
+         (
+          current1_ == typed_other->current1_ &&
+          current2_ == typed_other->current2_ &&
+          current3_ == typed_other->current3_ &&
+          current4_ == typed_other->current4_);
+    }
+
+   private:
+    Iterator(const Iterator& other)
+        : base_(other.base_),
+        begin1_(other.begin1_),
+        end1_(other.end1_),
+        current1_(other.current1_),
+        begin2_(other.begin2_),
+        end2_(other.end2_),
+        current2_(other.current2_),
+        begin3_(other.begin3_),
+        end3_(other.end3_),
+        current3_(other.current3_),
+        begin4_(other.begin4_),
+        end4_(other.end4_),
+        current4_(other.current4_) {
+      ComputeCurrentValue();
+    }
+
+    void ComputeCurrentValue() {
+      if (!AtEnd())
+        current_value_ = ParamType(*current1_, *current2_, *current3_,
+            *current4_);
+    }
+    bool AtEnd() const {
+      // We must report iterator past the end of the range when either of the
+      // component iterators has reached the end of its range.
+      return
+          current1_ == end1_ ||
+          current2_ == end2_ ||
+          current3_ == end3_ ||
+          current4_ == end4_;
+    }
+
+    // No implementation - assignment is unsupported.
+    void operator=(const Iterator& other);
+
+    const ParamGeneratorInterface<ParamType>* const base_;
+    // begin[i]_ and end[i]_ define the i-th range that Iterator traverses.
+    // current[i]_ is the actual traversing iterator.
+    const typename ParamGenerator<T1>::iterator begin1_;
+    const typename ParamGenerator<T1>::iterator end1_;
+    typename ParamGenerator<T1>::iterator current1_;
+    const typename ParamGenerator<T2>::iterator begin2_;
+    const typename ParamGenerator<T2>::iterator end2_;
+    typename ParamGenerator<T2>::iterator current2_;
+    const typename ParamGenerator<T3>::iterator begin3_;
+    const typename ParamGenerator<T3>::iterator end3_;
+    typename ParamGenerator<T3>::iterator current3_;
+    const typename ParamGenerator<T4>::iterator begin4_;
+    const typename ParamGenerator<T4>::iterator end4_;
+    typename ParamGenerator<T4>::iterator current4_;
+    ParamType current_value_;
+  };  // class CartesianProductGenerator4::Iterator
+
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductGenerator4& other);
+
+  const ParamGenerator<T1> g1_;
+  const ParamGenerator<T2> g2_;
+  const ParamGenerator<T3> g3_;
+  const ParamGenerator<T4> g4_;
+};  // class CartesianProductGenerator4
+
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5>
+class CartesianProductGenerator5
+    : public ParamGeneratorInterface< ::std::tr1::tuple<T1, T2, T3, T4, T5> > {
+ public:
+  typedef ::std::tr1::tuple<T1, T2, T3, T4, T5> ParamType;
+
+  CartesianProductGenerator5(const ParamGenerator<T1>& g1,
+      const ParamGenerator<T2>& g2, const ParamGenerator<T3>& g3,
+      const ParamGenerator<T4>& g4, const ParamGenerator<T5>& g5)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4), g5_(g5) {}
+  virtual ~CartesianProductGenerator5() {}
+
+  virtual ParamIteratorInterface<ParamType>* Begin() const {
+    return new Iterator(this, g1_, g1_.begin(), g2_, g2_.begin(), g3_,
+        g3_.begin(), g4_, g4_.begin(), g5_, g5_.begin());
+  }
+  virtual ParamIteratorInterface<ParamType>* End() const {
+    return new Iterator(this, g1_, g1_.end(), g2_, g2_.end(), g3_, g3_.end(),
+        g4_, g4_.end(), g5_, g5_.end());
+  }
+
+ private:
+  class Iterator : public ParamIteratorInterface<ParamType> {
+   public:
+    Iterator(const ParamGeneratorInterface<ParamType>* base,
+      const ParamGenerator<T1>& g1,
+      const typename ParamGenerator<T1>::iterator& current1,
+      const ParamGenerator<T2>& g2,
+      const typename ParamGenerator<T2>::iterator& current2,
+      const ParamGenerator<T3>& g3,
+      const typename ParamGenerator<T3>::iterator& current3,
+      const ParamGenerator<T4>& g4,
+      const typename ParamGenerator<T4>::iterator& current4,
+      const ParamGenerator<T5>& g5,
+      const typename ParamGenerator<T5>::iterator& current5)
+        : base_(base),
+          begin1_(g1.begin()), end1_(g1.end()), current1_(current1),
+          begin2_(g2.begin()), end2_(g2.end()), current2_(current2),
+          begin3_(g3.begin()), end3_(g3.end()), current3_(current3),
+          begin4_(g4.begin()), end4_(g4.end()), current4_(current4),
+          begin5_(g5.begin()), end5_(g5.end()), current5_(current5)    {
+      ComputeCurrentValue();
+    }
+    virtual ~Iterator() {}
+
+    virtual const ParamGeneratorInterface<ParamType>* BaseGenerator() const {
+      return base_;
+    }
+    // Advance should not be called on beyond-of-range iterators
+    // so no component iterators must be beyond end of range, either.
+    virtual void Advance() {
+      assert(!AtEnd());
+      ++current5_;
+      if (current5_ == end5_) {
+        current5_ = begin5_;
+        ++current4_;
+      }
+      if (current4_ == end4_) {
+        current4_ = begin4_;
+        ++current3_;
+      }
+      if (current3_ == end3_) {
+        current3_ = begin3_;
+        ++current2_;
+      }
+      if (current2_ == end2_) {
+        current2_ = begin2_;
+        ++current1_;
+      }
+      ComputeCurrentValue();
+    }
+    virtual ParamIteratorInterface<ParamType>* Clone() const {
+      return new Iterator(*this);
+    }
+    virtual const ParamType* Current() const { return &current_value_; }
+    virtual bool Equals(const ParamIteratorInterface<ParamType>& other) const {
+      // Having the same base generator guarantees that the other
+      // iterator is of the same type and we can downcast.
+      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())
+          << "The program attempted to compare iterators "
+          << "from different generators." << std::endl;
+      const Iterator* typed_other =
+          CheckedDowncastToActualType<const Iterator>(&other);
+      // We must report iterators equal if they both point beyond their
+      // respective ranges. That can happen in a variety of fashions,
+      // so we have to consult AtEnd().
+      return (AtEnd() && typed_other->AtEnd()) ||
+         (
+          current1_ == typed_other->current1_ &&
+          current2_ == typed_other->current2_ &&
+          current3_ == typed_other->current3_ &&
+          current4_ == typed_other->current4_ &&
+          current5_ == typed_other->current5_);
+    }
+
+   private:
+    Iterator(const Iterator& other)
+        : base_(other.base_),
+        begin1_(other.begin1_),
+        end1_(other.end1_),
+        current1_(other.current1_),
+        begin2_(other.begin2_),
+        end2_(other.end2_),
+        current2_(other.current2_),
+        begin3_(other.begin3_),
+        end3_(other.end3_),
+        current3_(other.current3_),
+        begin4_(other.begin4_),
+        end4_(other.end4_),
+        current4_(other.current4_),
+        begin5_(other.begin5_),
+        end5_(other.end5_),
+        current5_(other.current5_) {
+      ComputeCurrentValue();
+    }
+
+    void ComputeCurrentValue() {
+      if (!AtEnd())
+        current_value_ = ParamType(*current1_, *current2_, *current3_,
+            *current4_, *current5_);
+    }
+    bool AtEnd() const {
+      // We must report iterator past the end of the range when either of the
+      // component iterators has reached the end of its range.
+      return
+          current1_ == end1_ ||
+          current2_ == end2_ ||
+          current3_ == end3_ ||
+          current4_ == end4_ ||
+          current5_ == end5_;
+    }
+
+    // No implementation - assignment is unsupported.
+    void operator=(const Iterator& other);
+
+    const ParamGeneratorInterface<ParamType>* const base_;
+    // begin[i]_ and end[i]_ define the i-th range that Iterator traverses.
+    // current[i]_ is the actual traversing iterator.
+    const typename ParamGenerator<T1>::iterator begin1_;
+    const typename ParamGenerator<T1>::iterator end1_;
+    typename ParamGenerator<T1>::iterator current1_;
+    const typename ParamGenerator<T2>::iterator begin2_;
+    const typename ParamGenerator<T2>::iterator end2_;
+    typename ParamGenerator<T2>::iterator current2_;
+    const typename ParamGenerator<T3>::iterator begin3_;
+    const typename ParamGenerator<T3>::iterator end3_;
+    typename ParamGenerator<T3>::iterator current3_;
+    const typename ParamGenerator<T4>::iterator begin4_;
+    const typename ParamGenerator<T4>::iterator end4_;
+    typename ParamGenerator<T4>::iterator current4_;
+    const typename ParamGenerator<T5>::iterator begin5_;
+    const typename ParamGenerator<T5>::iterator end5_;
+    typename ParamGenerator<T5>::iterator current5_;
+    ParamType current_value_;
+  };  // class CartesianProductGenerator5::Iterator
+
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductGenerator5& other);
+
+  const ParamGenerator<T1> g1_;
+  const ParamGenerator<T2> g2_;
+  const ParamGenerator<T3> g3_;
+  const ParamGenerator<T4> g4_;
+  const ParamGenerator<T5> g5_;
+};  // class CartesianProductGenerator5
+
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6>
+class CartesianProductGenerator6
+    : public ParamGeneratorInterface< ::std::tr1::tuple<T1, T2, T3, T4, T5,
+        T6> > {
+ public:
+  typedef ::std::tr1::tuple<T1, T2, T3, T4, T5, T6> ParamType;
+
+  CartesianProductGenerator6(const ParamGenerator<T1>& g1,
+      const ParamGenerator<T2>& g2, const ParamGenerator<T3>& g3,
+      const ParamGenerator<T4>& g4, const ParamGenerator<T5>& g5,
+      const ParamGenerator<T6>& g6)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4), g5_(g5), g6_(g6) {}
+  virtual ~CartesianProductGenerator6() {}
+
+  virtual ParamIteratorInterface<ParamType>* Begin() const {
+    return new Iterator(this, g1_, g1_.begin(), g2_, g2_.begin(), g3_,
+        g3_.begin(), g4_, g4_.begin(), g5_, g5_.begin(), g6_, g6_.begin());
+  }
+  virtual ParamIteratorInterface<ParamType>* End() const {
+    return new Iterator(this, g1_, g1_.end(), g2_, g2_.end(), g3_, g3_.end(),
+        g4_, g4_.end(), g5_, g5_.end(), g6_, g6_.end());
+  }
+
+ private:
+  class Iterator : public ParamIteratorInterface<ParamType> {
+   public:
+    Iterator(const ParamGeneratorInterface<ParamType>* base,
+      const ParamGenerator<T1>& g1,
+      const typename ParamGenerator<T1>::iterator& current1,
+      const ParamGenerator<T2>& g2,
+      const typename ParamGenerator<T2>::iterator& current2,
+      const ParamGenerator<T3>& g3,
+      const typename ParamGenerator<T3>::iterator& current3,
+      const ParamGenerator<T4>& g4,
+      const typename ParamGenerator<T4>::iterator& current4,
+      const ParamGenerator<T5>& g5,
+      const typename ParamGenerator<T5>::iterator& current5,
+      const ParamGenerator<T6>& g6,
+      const typename ParamGenerator<T6>::iterator& current6)
+        : base_(base),
+          begin1_(g1.begin()), end1_(g1.end()), current1_(current1),
+          begin2_(g2.begin()), end2_(g2.end()), current2_(current2),
+          begin3_(g3.begin()), end3_(g3.end()), current3_(current3),
+          begin4_(g4.begin()), end4_(g4.end()), current4_(current4),
+          begin5_(g5.begin()), end5_(g5.end()), current5_(current5),
+          begin6_(g6.begin()), end6_(g6.end()), current6_(current6)    {
+      ComputeCurrentValue();
+    }
+    virtual ~Iterator() {}
+
+    virtual const ParamGeneratorInterface<ParamType>* BaseGenerator() const {
+      return base_;
+    }
+    // Advance should not be called on beyond-of-range iterators
+    // so no component iterators must be beyond end of range, either.
+    virtual void Advance() {
+      assert(!AtEnd());
+      ++current6_;
+      if (current6_ == end6_) {
+        current6_ = begin6_;
+        ++current5_;
+      }
+      if (current5_ == end5_) {
+        current5_ = begin5_;
+        ++current4_;
+      }
+      if (current4_ == end4_) {
+        current4_ = begin4_;
+        ++current3_;
+      }
+      if (current3_ == end3_) {
+        current3_ = begin3_;
+        ++current2_;
+      }
+      if (current2_ == end2_) {
+        current2_ = begin2_;
+        ++current1_;
+      }
+      ComputeCurrentValue();
+    }
+    virtual ParamIteratorInterface<ParamType>* Clone() const {
+      return new Iterator(*this);
+    }
+    virtual const ParamType* Current() const { return &current_value_; }
+    virtual bool Equals(const ParamIteratorInterface<ParamType>& other) const {
+      // Having the same base generator guarantees that the other
+      // iterator is of the same type and we can downcast.
+      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())
+          << "The program attempted to compare iterators "
+          << "from different generators." << std::endl;
+      const Iterator* typed_other =
+          CheckedDowncastToActualType<const Iterator>(&other);
+      // We must report iterators equal if they both point beyond their
+      // respective ranges. That can happen in a variety of fashions,
+      // so we have to consult AtEnd().
+      return (AtEnd() && typed_other->AtEnd()) ||
+         (
+          current1_ == typed_other->current1_ &&
+          current2_ == typed_other->current2_ &&
+          current3_ == typed_other->current3_ &&
+          current4_ == typed_other->current4_ &&
+          current5_ == typed_other->current5_ &&
+          current6_ == typed_other->current6_);
+    }
+
+   private:
+    Iterator(const Iterator& other)
+        : base_(other.base_),
+        begin1_(other.begin1_),
+        end1_(other.end1_),
+        current1_(other.current1_),
+        begin2_(other.begin2_),
+        end2_(other.end2_),
+        current2_(other.current2_),
+        begin3_(other.begin3_),
+        end3_(other.end3_),
+        current3_(other.current3_),
+        begin4_(other.begin4_),
+        end4_(other.end4_),
+        current4_(other.current4_),
+        begin5_(other.begin5_),
+        end5_(other.end5_),
+        current5_(other.current5_),
+        begin6_(other.begin6_),
+        end6_(other.end6_),
+        current6_(other.current6_) {
+      ComputeCurrentValue();
+    }
+
+    void ComputeCurrentValue() {
+      if (!AtEnd())
+        current_value_ = ParamType(*current1_, *current2_, *current3_,
+            *current4_, *current5_, *current6_);
+    }
+    bool AtEnd() const {
+      // We must report iterator past the end of the range when either of the
+      // component iterators has reached the end of its range.
+      return
+          current1_ == end1_ ||
+          current2_ == end2_ ||
+          current3_ == end3_ ||
+          current4_ == end4_ ||
+          current5_ == end5_ ||
+          current6_ == end6_;
+    }
+
+    // No implementation - assignment is unsupported.
+    void operator=(const Iterator& other);
+
+    const ParamGeneratorInterface<ParamType>* const base_;
+    // begin[i]_ and end[i]_ define the i-th range that Iterator traverses.
+    // current[i]_ is the actual traversing iterator.
+    const typename ParamGenerator<T1>::iterator begin1_;
+    const typename ParamGenerator<T1>::iterator end1_;
+    typename ParamGenerator<T1>::iterator current1_;
+    const typename ParamGenerator<T2>::iterator begin2_;
+    const typename ParamGenerator<T2>::iterator end2_;
+    typename ParamGenerator<T2>::iterator current2_;
+    const typename ParamGenerator<T3>::iterator begin3_;
+    const typename ParamGenerator<T3>::iterator end3_;
+    typename ParamGenerator<T3>::iterator current3_;
+    const typename ParamGenerator<T4>::iterator begin4_;
+    const typename ParamGenerator<T4>::iterator end4_;
+    typename ParamGenerator<T4>::iterator current4_;
+    const typename ParamGenerator<T5>::iterator begin5_;
+    const typename ParamGenerator<T5>::iterator end5_;
+    typename ParamGenerator<T5>::iterator current5_;
+    const typename ParamGenerator<T6>::iterator begin6_;
+    const typename ParamGenerator<T6>::iterator end6_;
+    typename ParamGenerator<T6>::iterator current6_;
+    ParamType current_value_;
+  };  // class CartesianProductGenerator6::Iterator
+
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductGenerator6& other);
+
+  const ParamGenerator<T1> g1_;
+  const ParamGenerator<T2> g2_;
+  const ParamGenerator<T3> g3_;
+  const ParamGenerator<T4> g4_;
+  const ParamGenerator<T5> g5_;
+  const ParamGenerator<T6> g6_;
+};  // class CartesianProductGenerator6
+
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7>
+class CartesianProductGenerator7
+    : public ParamGeneratorInterface< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6,
+        T7> > {
+ public:
+  typedef ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7> ParamType;
+
+  CartesianProductGenerator7(const ParamGenerator<T1>& g1,
+      const ParamGenerator<T2>& g2, const ParamGenerator<T3>& g3,
+      const ParamGenerator<T4>& g4, const ParamGenerator<T5>& g5,
+      const ParamGenerator<T6>& g6, const ParamGenerator<T7>& g7)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4), g5_(g5), g6_(g6), g7_(g7) {}
+  virtual ~CartesianProductGenerator7() {}
+
+  virtual ParamIteratorInterface<ParamType>* Begin() const {
+    return new Iterator(this, g1_, g1_.begin(), g2_, g2_.begin(), g3_,
+        g3_.begin(), g4_, g4_.begin(), g5_, g5_.begin(), g6_, g6_.begin(), g7_,
+        g7_.begin());
+  }
+  virtual ParamIteratorInterface<ParamType>* End() const {
+    return new Iterator(this, g1_, g1_.end(), g2_, g2_.end(), g3_, g3_.end(),
+        g4_, g4_.end(), g5_, g5_.end(), g6_, g6_.end(), g7_, g7_.end());
+  }
+
+ private:
+  class Iterator : public ParamIteratorInterface<ParamType> {
+   public:
+    Iterator(const ParamGeneratorInterface<ParamType>* base,
+      const ParamGenerator<T1>& g1,
+      const typename ParamGenerator<T1>::iterator& current1,
+      const ParamGenerator<T2>& g2,
+      const typename ParamGenerator<T2>::iterator& current2,
+      const ParamGenerator<T3>& g3,
+      const typename ParamGenerator<T3>::iterator& current3,
+      const ParamGenerator<T4>& g4,
+      const typename ParamGenerator<T4>::iterator& current4,
+      const ParamGenerator<T5>& g5,
+      const typename ParamGenerator<T5>::iterator& current5,
+      const ParamGenerator<T6>& g6,
+      const typename ParamGenerator<T6>::iterator& current6,
+      const ParamGenerator<T7>& g7,
+      const typename ParamGenerator<T7>::iterator& current7)
+        : base_(base),
+          begin1_(g1.begin()), end1_(g1.end()), current1_(current1),
+          begin2_(g2.begin()), end2_(g2.end()), current2_(current2),
+          begin3_(g3.begin()), end3_(g3.end()), current3_(current3),
+          begin4_(g4.begin()), end4_(g4.end()), current4_(current4),
+          begin5_(g5.begin()), end5_(g5.end()), current5_(current5),
+          begin6_(g6.begin()), end6_(g6.end()), current6_(current6),
+          begin7_(g7.begin()), end7_(g7.end()), current7_(current7)    {
+      ComputeCurrentValue();
+    }
+    virtual ~Iterator() {}
+
+    virtual const ParamGeneratorInterface<ParamType>* BaseGenerator() const {
+      return base_;
+    }
+    // Advance should not be called on beyond-of-range iterators
+    // so no component iterators must be beyond end of range, either.
+    virtual void Advance() {
+      assert(!AtEnd());
+      ++current7_;
+      if (current7_ == end7_) {
+        current7_ = begin7_;
+        ++current6_;
+      }
+      if (current6_ == end6_) {
+        current6_ = begin6_;
+        ++current5_;
+      }
+      if (current5_ == end5_) {
+        current5_ = begin5_;
+        ++current4_;
+      }
+      if (current4_ == end4_) {
+        current4_ = begin4_;
+        ++current3_;
+      }
+      if (current3_ == end3_) {
+        current3_ = begin3_;
+        ++current2_;
+      }
+      if (current2_ == end2_) {
+        current2_ = begin2_;
+        ++current1_;
+      }
+      ComputeCurrentValue();
+    }
+    virtual ParamIteratorInterface<ParamType>* Clone() const {
+      return new Iterator(*this);
+    }
+    virtual const ParamType* Current() const { return &current_value_; }
+    virtual bool Equals(const ParamIteratorInterface<ParamType>& other) const {
+      // Having the same base generator guarantees that the other
+      // iterator is of the same type and we can downcast.
+      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())
+          << "The program attempted to compare iterators "
+          << "from different generators." << std::endl;
+      const Iterator* typed_other =
+          CheckedDowncastToActualType<const Iterator>(&other);
+      // We must report iterators equal if they both point beyond their
+      // respective ranges. That can happen in a variety of fashions,
+      // so we have to consult AtEnd().
+      return (AtEnd() && typed_other->AtEnd()) ||
+         (
+          current1_ == typed_other->current1_ &&
+          current2_ == typed_other->current2_ &&
+          current3_ == typed_other->current3_ &&
+          current4_ == typed_other->current4_ &&
+          current5_ == typed_other->current5_ &&
+          current6_ == typed_other->current6_ &&
+          current7_ == typed_other->current7_);
+    }
+
+   private:
+    Iterator(const Iterator& other)
+        : base_(other.base_),
+        begin1_(other.begin1_),
+        end1_(other.end1_),
+        current1_(other.current1_),
+        begin2_(other.begin2_),
+        end2_(other.end2_),
+        current2_(other.current2_),
+        begin3_(other.begin3_),
+        end3_(other.end3_),
+        current3_(other.current3_),
+        begin4_(other.begin4_),
+        end4_(other.end4_),
+        current4_(other.current4_),
+        begin5_(other.begin5_),
+        end5_(other.end5_),
+        current5_(other.current5_),
+        begin6_(other.begin6_),
+        end6_(other.end6_),
+        current6_(other.current6_),
+        begin7_(other.begin7_),
+        end7_(other.end7_),
+        current7_(other.current7_) {
+      ComputeCurrentValue();
+    }
+
+    void ComputeCurrentValue() {
+      if (!AtEnd())
+        current_value_ = ParamType(*current1_, *current2_, *current3_,
+            *current4_, *current5_, *current6_, *current7_);
+    }
+    bool AtEnd() const {
+      // We must report iterator past the end of the range when either of the
+      // component iterators has reached the end of its range.
+      return
+          current1_ == end1_ ||
+          current2_ == end2_ ||
+          current3_ == end3_ ||
+          current4_ == end4_ ||
+          current5_ == end5_ ||
+          current6_ == end6_ ||
+          current7_ == end7_;
+    }
+
+    // No implementation - assignment is unsupported.
+    void operator=(const Iterator& other);
+
+    const ParamGeneratorInterface<ParamType>* const base_;
+    // begin[i]_ and end[i]_ define the i-th range that Iterator traverses.
+    // current[i]_ is the actual traversing iterator.
+    const typename ParamGenerator<T1>::iterator begin1_;
+    const typename ParamGenerator<T1>::iterator end1_;
+    typename ParamGenerator<T1>::iterator current1_;
+    const typename ParamGenerator<T2>::iterator begin2_;
+    const typename ParamGenerator<T2>::iterator end2_;
+    typename ParamGenerator<T2>::iterator current2_;
+    const typename ParamGenerator<T3>::iterator begin3_;
+    const typename ParamGenerator<T3>::iterator end3_;
+    typename ParamGenerator<T3>::iterator current3_;
+    const typename ParamGenerator<T4>::iterator begin4_;
+    const typename ParamGenerator<T4>::iterator end4_;
+    typename ParamGenerator<T4>::iterator current4_;
+    const typename ParamGenerator<T5>::iterator begin5_;
+    const typename ParamGenerator<T5>::iterator end5_;
+    typename ParamGenerator<T5>::iterator current5_;
+    const typename ParamGenerator<T6>::iterator begin6_;
+    const typename ParamGenerator<T6>::iterator end6_;
+    typename ParamGenerator<T6>::iterator current6_;
+    const typename ParamGenerator<T7>::iterator begin7_;
+    const typename ParamGenerator<T7>::iterator end7_;
+    typename ParamGenerator<T7>::iterator current7_;
+    ParamType current_value_;
+  };  // class CartesianProductGenerator7::Iterator
+
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductGenerator7& other);
+
+  const ParamGenerator<T1> g1_;
+  const ParamGenerator<T2> g2_;
+  const ParamGenerator<T3> g3_;
+  const ParamGenerator<T4> g4_;
+  const ParamGenerator<T5> g5_;
+  const ParamGenerator<T6> g6_;
+  const ParamGenerator<T7> g7_;
+};  // class CartesianProductGenerator7
+
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8>
+class CartesianProductGenerator8
+    : public ParamGeneratorInterface< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6,
+        T7, T8> > {
+ public:
+  typedef ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7, T8> ParamType;
+
+  CartesianProductGenerator8(const ParamGenerator<T1>& g1,
+      const ParamGenerator<T2>& g2, const ParamGenerator<T3>& g3,
+      const ParamGenerator<T4>& g4, const ParamGenerator<T5>& g5,
+      const ParamGenerator<T6>& g6, const ParamGenerator<T7>& g7,
+      const ParamGenerator<T8>& g8)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4), g5_(g5), g6_(g6), g7_(g7),
+          g8_(g8) {}
+  virtual ~CartesianProductGenerator8() {}
+
+  virtual ParamIteratorInterface<ParamType>* Begin() const {
+    return new Iterator(this, g1_, g1_.begin(), g2_, g2_.begin(), g3_,
+        g3_.begin(), g4_, g4_.begin(), g5_, g5_.begin(), g6_, g6_.begin(), g7_,
+        g7_.begin(), g8_, g8_.begin());
+  }
+  virtual ParamIteratorInterface<ParamType>* End() const {
+    return new Iterator(this, g1_, g1_.end(), g2_, g2_.end(), g3_, g3_.end(),
+        g4_, g4_.end(), g5_, g5_.end(), g6_, g6_.end(), g7_, g7_.end(), g8_,
+        g8_.end());
+  }
+
+ private:
+  class Iterator : public ParamIteratorInterface<ParamType> {
+   public:
+    Iterator(const ParamGeneratorInterface<ParamType>* base,
+      const ParamGenerator<T1>& g1,
+      const typename ParamGenerator<T1>::iterator& current1,
+      const ParamGenerator<T2>& g2,
+      const typename ParamGenerator<T2>::iterator& current2,
+      const ParamGenerator<T3>& g3,
+      const typename ParamGenerator<T3>::iterator& current3,
+      const ParamGenerator<T4>& g4,
+      const typename ParamGenerator<T4>::iterator& current4,
+      const ParamGenerator<T5>& g5,
+      const typename ParamGenerator<T5>::iterator& current5,
+      const ParamGenerator<T6>& g6,
+      const typename ParamGenerator<T6>::iterator& current6,
+      const ParamGenerator<T7>& g7,
+      const typename ParamGenerator<T7>::iterator& current7,
+      const ParamGenerator<T8>& g8,
+      const typename ParamGenerator<T8>::iterator& current8)
+        : base_(base),
+          begin1_(g1.begin()), end1_(g1.end()), current1_(current1),
+          begin2_(g2.begin()), end2_(g2.end()), current2_(current2),
+          begin3_(g3.begin()), end3_(g3.end()), current3_(current3),
+          begin4_(g4.begin()), end4_(g4.end()), current4_(current4),
+          begin5_(g5.begin()), end5_(g5.end()), current5_(current5),
+          begin6_(g6.begin()), end6_(g6.end()), current6_(current6),
+          begin7_(g7.begin()), end7_(g7.end()), current7_(current7),
+          begin8_(g8.begin()), end8_(g8.end()), current8_(current8)    {
+      ComputeCurrentValue();
+    }
+    virtual ~Iterator() {}
+
+    virtual const ParamGeneratorInterface<ParamType>* BaseGenerator() const {
+      return base_;
+    }
+    // Advance should not be called on beyond-of-range iterators
+    // so no component iterators must be beyond end of range, either.
+    virtual void Advance() {
+      assert(!AtEnd());
+      ++current8_;
+      if (current8_ == end8_) {
+        current8_ = begin8_;
+        ++current7_;
+      }
+      if (current7_ == end7_) {
+        current7_ = begin7_;
+        ++current6_;
+      }
+      if (current6_ == end6_) {
+        current6_ = begin6_;
+        ++current5_;
+      }
+      if (current5_ == end5_) {
+        current5_ = begin5_;
+        ++current4_;
+      }
+      if (current4_ == end4_) {
+        current4_ = begin4_;
+        ++current3_;
+      }
+      if (current3_ == end3_) {
+        current3_ = begin3_;
+        ++current2_;
+      }
+      if (current2_ == end2_) {
+        current2_ = begin2_;
+        ++current1_;
+      }
+      ComputeCurrentValue();
+    }
+    virtual ParamIteratorInterface<ParamType>* Clone() const {
+      return new Iterator(*this);
+    }
+    virtual const ParamType* Current() const { return &current_value_; }
+    virtual bool Equals(const ParamIteratorInterface<ParamType>& other) const {
+      // Having the same base generator guarantees that the other
+      // iterator is of the same type and we can downcast.
+      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())
+          << "The program attempted to compare iterators "
+          << "from different generators." << std::endl;
+      const Iterator* typed_other =
+          CheckedDowncastToActualType<const Iterator>(&other);
+      // We must report iterators equal if they both point beyond their
+      // respective ranges. That can happen in a variety of fashions,
+      // so we have to consult AtEnd().
+      return (AtEnd() && typed_other->AtEnd()) ||
+         (
+          current1_ == typed_other->current1_ &&
+          current2_ == typed_other->current2_ &&
+          current3_ == typed_other->current3_ &&
+          current4_ == typed_other->current4_ &&
+          current5_ == typed_other->current5_ &&
+          current6_ == typed_other->current6_ &&
+          current7_ == typed_other->current7_ &&
+          current8_ == typed_other->current8_);
+    }
+
+   private:
+    Iterator(const Iterator& other)
+        : base_(other.base_),
+        begin1_(other.begin1_),
+        end1_(other.end1_),
+        current1_(other.current1_),
+        begin2_(other.begin2_),
+        end2_(other.end2_),
+        current2_(other.current2_),
+        begin3_(other.begin3_),
+        end3_(other.end3_),
+        current3_(other.current3_),
+        begin4_(other.begin4_),
+        end4_(other.end4_),
+        current4_(other.current4_),
+        begin5_(other.begin5_),
+        end5_(other.end5_),
+        current5_(other.current5_),
+        begin6_(other.begin6_),
+        end6_(other.end6_),
+        current6_(other.current6_),
+        begin7_(other.begin7_),
+        end7_(other.end7_),
+        current7_(other.current7_),
+        begin8_(other.begin8_),
+        end8_(other.end8_),
+        current8_(other.current8_) {
+      ComputeCurrentValue();
+    }
+
+    void ComputeCurrentValue() {
+      if (!AtEnd())
+        current_value_ = ParamType(*current1_, *current2_, *current3_,
+            *current4_, *current5_, *current6_, *current7_, *current8_);
+    }
+    bool AtEnd() const {
+      // We must report iterator past the end of the range when either of the
+      // component iterators has reached the end of its range.
+      return
+          current1_ == end1_ ||
+          current2_ == end2_ ||
+          current3_ == end3_ ||
+          current4_ == end4_ ||
+          current5_ == end5_ ||
+          current6_ == end6_ ||
+          current7_ == end7_ ||
+          current8_ == end8_;
+    }
+
+    // No implementation - assignment is unsupported.
+    void operator=(const Iterator& other);
+
+    const ParamGeneratorInterface<ParamType>* const base_;
+    // begin[i]_ and end[i]_ define the i-th range that Iterator traverses.
+    // current[i]_ is the actual traversing iterator.
+    const typename ParamGenerator<T1>::iterator begin1_;
+    const typename ParamGenerator<T1>::iterator end1_;
+    typename ParamGenerator<T1>::iterator current1_;
+    const typename ParamGenerator<T2>::iterator begin2_;
+    const typename ParamGenerator<T2>::iterator end2_;
+    typename ParamGenerator<T2>::iterator current2_;
+    const typename ParamGenerator<T3>::iterator begin3_;
+    const typename ParamGenerator<T3>::iterator end3_;
+    typename ParamGenerator<T3>::iterator current3_;
+    const typename ParamGenerator<T4>::iterator begin4_;
+    const typename ParamGenerator<T4>::iterator end4_;
+    typename ParamGenerator<T4>::iterator current4_;
+    const typename ParamGenerator<T5>::iterator begin5_;
+    const typename ParamGenerator<T5>::iterator end5_;
+    typename ParamGenerator<T5>::iterator current5_;
+    const typename ParamGenerator<T6>::iterator begin6_;
+    const typename ParamGenerator<T6>::iterator end6_;
+    typename ParamGenerator<T6>::iterator current6_;
+    const typename ParamGenerator<T7>::iterator begin7_;
+    const typename ParamGenerator<T7>::iterator end7_;
+    typename ParamGenerator<T7>::iterator current7_;
+    const typename ParamGenerator<T8>::iterator begin8_;
+    const typename ParamGenerator<T8>::iterator end8_;
+    typename ParamGenerator<T8>::iterator current8_;
+    ParamType current_value_;
+  };  // class CartesianProductGenerator8::Iterator
+
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductGenerator8& other);
+
+  const ParamGenerator<T1> g1_;
+  const ParamGenerator<T2> g2_;
+  const ParamGenerator<T3> g3_;
+  const ParamGenerator<T4> g4_;
+  const ParamGenerator<T5> g5_;
+  const ParamGenerator<T6> g6_;
+  const ParamGenerator<T7> g7_;
+  const ParamGenerator<T8> g8_;
+};  // class CartesianProductGenerator8
+
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9>
+class CartesianProductGenerator9
+    : public ParamGeneratorInterface< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6,
+        T7, T8, T9> > {
+ public:
+  typedef ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7, T8, T9> ParamType;
+
+  CartesianProductGenerator9(const ParamGenerator<T1>& g1,
+      const ParamGenerator<T2>& g2, const ParamGenerator<T3>& g3,
+      const ParamGenerator<T4>& g4, const ParamGenerator<T5>& g5,
+      const ParamGenerator<T6>& g6, const ParamGenerator<T7>& g7,
+      const ParamGenerator<T8>& g8, const ParamGenerator<T9>& g9)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4), g5_(g5), g6_(g6), g7_(g7), g8_(g8),
+          g9_(g9) {}
+  virtual ~CartesianProductGenerator9() {}
+
+  virtual ParamIteratorInterface<ParamType>* Begin() const {
+    return new Iterator(this, g1_, g1_.begin(), g2_, g2_.begin(), g3_,
+        g3_.begin(), g4_, g4_.begin(), g5_, g5_.begin(), g6_, g6_.begin(), g7_,
+        g7_.begin(), g8_, g8_.begin(), g9_, g9_.begin());
+  }
+  virtual ParamIteratorInterface<ParamType>* End() const {
+    return new Iterator(this, g1_, g1_.end(), g2_, g2_.end(), g3_, g3_.end(),
+        g4_, g4_.end(), g5_, g5_.end(), g6_, g6_.end(), g7_, g7_.end(), g8_,
+        g8_.end(), g9_, g9_.end());
+  }
+
+ private:
+  class Iterator : public ParamIteratorInterface<ParamType> {
+   public:
+    Iterator(const ParamGeneratorInterface<ParamType>* base,
+      const ParamGenerator<T1>& g1,
+      const typename ParamGenerator<T1>::iterator& current1,
+      const ParamGenerator<T2>& g2,
+      const typename ParamGenerator<T2>::iterator& current2,
+      const ParamGenerator<T3>& g3,
+      const typename ParamGenerator<T3>::iterator& current3,
+      const ParamGenerator<T4>& g4,
+      const typename ParamGenerator<T4>::iterator& current4,
+      const ParamGenerator<T5>& g5,
+      const typename ParamGenerator<T5>::iterator& current5,
+      const ParamGenerator<T6>& g6,
+      const typename ParamGenerator<T6>::iterator& current6,
+      const ParamGenerator<T7>& g7,
+      const typename ParamGenerator<T7>::iterator& current7,
+      const ParamGenerator<T8>& g8,
+      const typename ParamGenerator<T8>::iterator& current8,
+      const ParamGenerator<T9>& g9,
+      const typename ParamGenerator<T9>::iterator& current9)
+        : base_(base),
+          begin1_(g1.begin()), end1_(g1.end()), current1_(current1),
+          begin2_(g2.begin()), end2_(g2.end()), current2_(current2),
+          begin3_(g3.begin()), end3_(g3.end()), current3_(current3),
+          begin4_(g4.begin()), end4_(g4.end()), current4_(current4),
+          begin5_(g5.begin()), end5_(g5.end()), current5_(current5),
+          begin6_(g6.begin()), end6_(g6.end()), current6_(current6),
+          begin7_(g7.begin()), end7_(g7.end()), current7_(current7),
+          begin8_(g8.begin()), end8_(g8.end()), current8_(current8),
+          begin9_(g9.begin()), end9_(g9.end()), current9_(current9)    {
+      ComputeCurrentValue();
+    }
+    virtual ~Iterator() {}
+
+    virtual const ParamGeneratorInterface<ParamType>* BaseGenerator() const {
+      return base_;
+    }
+    // Advance should not be called on beyond-of-range iterators
+    // so no component iterators must be beyond end of range, either.
+    virtual void Advance() {
+      assert(!AtEnd());
+      ++current9_;
+      if (current9_ == end9_) {
+        current9_ = begin9_;
+        ++current8_;
+      }
+      if (current8_ == end8_) {
+        current8_ = begin8_;
+        ++current7_;
+      }
+      if (current7_ == end7_) {
+        current7_ = begin7_;
+        ++current6_;
+      }
+      if (current6_ == end6_) {
+        current6_ = begin6_;
+        ++current5_;
+      }
+      if (current5_ == end5_) {
+        current5_ = begin5_;
+        ++current4_;
+      }
+      if (current4_ == end4_) {
+        current4_ = begin4_;
+        ++current3_;
+      }
+      if (current3_ == end3_) {
+        current3_ = begin3_;
+        ++current2_;
+      }
+      if (current2_ == end2_) {
+        current2_ = begin2_;
+        ++current1_;
+      }
+      ComputeCurrentValue();
+    }
+    virtual ParamIteratorInterface<ParamType>* Clone() const {
+      return new Iterator(*this);
+    }
+    virtual const ParamType* Current() const { return &current_value_; }
+    virtual bool Equals(const ParamIteratorInterface<ParamType>& other) const {
+      // Having the same base generator guarantees that the other
+      // iterator is of the same type and we can downcast.
+      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())
+          << "The program attempted to compare iterators "
+          << "from different generators." << std::endl;
+      const Iterator* typed_other =
+          CheckedDowncastToActualType<const Iterator>(&other);
+      // We must report iterators equal if they both point beyond their
+      // respective ranges. That can happen in a variety of fashions,
+      // so we have to consult AtEnd().
+      return (AtEnd() && typed_other->AtEnd()) ||
+         (
+          current1_ == typed_other->current1_ &&
+          current2_ == typed_other->current2_ &&
+          current3_ == typed_other->current3_ &&
+          current4_ == typed_other->current4_ &&
+          current5_ == typed_other->current5_ &&
+          current6_ == typed_other->current6_ &&
+          current7_ == typed_other->current7_ &&
+          current8_ == typed_other->current8_ &&
+          current9_ == typed_other->current9_);
+    }
+
+   private:
+    Iterator(const Iterator& other)
+        : base_(other.base_),
+        begin1_(other.begin1_),
+        end1_(other.end1_),
+        current1_(other.current1_),
+        begin2_(other.begin2_),
+        end2_(other.end2_),
+        current2_(other.current2_),
+        begin3_(other.begin3_),
+        end3_(other.end3_),
+        current3_(other.current3_),
+        begin4_(other.begin4_),
+        end4_(other.end4_),
+        current4_(other.current4_),
+        begin5_(other.begin5_),
+        end5_(other.end5_),
+        current5_(other.current5_),
+        begin6_(other.begin6_),
+        end6_(other.end6_),
+        current6_(other.current6_),
+        begin7_(other.begin7_),
+        end7_(other.end7_),
+        current7_(other.current7_),
+        begin8_(other.begin8_),
+        end8_(other.end8_),
+        current8_(other.current8_),
+        begin9_(other.begin9_),
+        end9_(other.end9_),
+        current9_(other.current9_) {
+      ComputeCurrentValue();
+    }
+
+    void ComputeCurrentValue() {
+      if (!AtEnd())
+        current_value_ = ParamType(*current1_, *current2_, *current3_,
+            *current4_, *current5_, *current6_, *current7_, *current8_,
+            *current9_);
+    }
+    bool AtEnd() const {
+      // We must report iterator past the end of the range when either of the
+      // component iterators has reached the end of its range.
+      return
+          current1_ == end1_ ||
+          current2_ == end2_ ||
+          current3_ == end3_ ||
+          current4_ == end4_ ||
+          current5_ == end5_ ||
+          current6_ == end6_ ||
+          current7_ == end7_ ||
+          current8_ == end8_ ||
+          current9_ == end9_;
+    }
+
+    // No implementation - assignment is unsupported.
+    void operator=(const Iterator& other);
+
+    const ParamGeneratorInterface<ParamType>* const base_;
+    // begin[i]_ and end[i]_ define the i-th range that Iterator traverses.
+    // current[i]_ is the actual traversing iterator.
+    const typename ParamGenerator<T1>::iterator begin1_;
+    const typename ParamGenerator<T1>::iterator end1_;
+    typename ParamGenerator<T1>::iterator current1_;
+    const typename ParamGenerator<T2>::iterator begin2_;
+    const typename ParamGenerator<T2>::iterator end2_;
+    typename ParamGenerator<T2>::iterator current2_;
+    const typename ParamGenerator<T3>::iterator begin3_;
+    const typename ParamGenerator<T3>::iterator end3_;
+    typename ParamGenerator<T3>::iterator current3_;
+    const typename ParamGenerator<T4>::iterator begin4_;
+    const typename ParamGenerator<T4>::iterator end4_;
+    typename ParamGenerator<T4>::iterator current4_;
+    const typename ParamGenerator<T5>::iterator begin5_;
+    const typename ParamGenerator<T5>::iterator end5_;
+    typename ParamGenerator<T5>::iterator current5_;
+    const typename ParamGenerator<T6>::iterator begin6_;
+    const typename ParamGenerator<T6>::iterator end6_;
+    typename ParamGenerator<T6>::iterator current6_;
+    const typename ParamGenerator<T7>::iterator begin7_;
+    const typename ParamGenerator<T7>::iterator end7_;
+    typename ParamGenerator<T7>::iterator current7_;
+    const typename ParamGenerator<T8>::iterator begin8_;
+    const typename ParamGenerator<T8>::iterator end8_;
+    typename ParamGenerator<T8>::iterator current8_;
+    const typename ParamGenerator<T9>::iterator begin9_;
+    const typename ParamGenerator<T9>::iterator end9_;
+    typename ParamGenerator<T9>::iterator current9_;
+    ParamType current_value_;
+  };  // class CartesianProductGenerator9::Iterator
+
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductGenerator9& other);
+
+  const ParamGenerator<T1> g1_;
+  const ParamGenerator<T2> g2_;
+  const ParamGenerator<T3> g3_;
+  const ParamGenerator<T4> g4_;
+  const ParamGenerator<T5> g5_;
+  const ParamGenerator<T6> g6_;
+  const ParamGenerator<T7> g7_;
+  const ParamGenerator<T8> g8_;
+  const ParamGenerator<T9> g9_;
+};  // class CartesianProductGenerator9
+
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10>
+class CartesianProductGenerator10
+    : public ParamGeneratorInterface< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6,
+        T7, T8, T9, T10> > {
+ public:
+  typedef ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> ParamType;
+
+  CartesianProductGenerator10(const ParamGenerator<T1>& g1,
+      const ParamGenerator<T2>& g2, const ParamGenerator<T3>& g3,
+      const ParamGenerator<T4>& g4, const ParamGenerator<T5>& g5,
+      const ParamGenerator<T6>& g6, const ParamGenerator<T7>& g7,
+      const ParamGenerator<T8>& g8, const ParamGenerator<T9>& g9,
+      const ParamGenerator<T10>& g10)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4), g5_(g5), g6_(g6), g7_(g7), g8_(g8),
+          g9_(g9), g10_(g10) {}
+  virtual ~CartesianProductGenerator10() {}
+
+  virtual ParamIteratorInterface<ParamType>* Begin() const {
+    return new Iterator(this, g1_, g1_.begin(), g2_, g2_.begin(), g3_,
+        g3_.begin(), g4_, g4_.begin(), g5_, g5_.begin(), g6_, g6_.begin(), g7_,
+        g7_.begin(), g8_, g8_.begin(), g9_, g9_.begin(), g10_, g10_.begin());
+  }
+  virtual ParamIteratorInterface<ParamType>* End() const {
+    return new Iterator(this, g1_, g1_.end(), g2_, g2_.end(), g3_, g3_.end(),
+        g4_, g4_.end(), g5_, g5_.end(), g6_, g6_.end(), g7_, g7_.end(), g8_,
+        g8_.end(), g9_, g9_.end(), g10_, g10_.end());
+  }
+
+ private:
+  class Iterator : public ParamIteratorInterface<ParamType> {
+   public:
+    Iterator(const ParamGeneratorInterface<ParamType>* base,
+      const ParamGenerator<T1>& g1,
+      const typename ParamGenerator<T1>::iterator& current1,
+      const ParamGenerator<T2>& g2,
+      const typename ParamGenerator<T2>::iterator& current2,
+      const ParamGenerator<T3>& g3,
+      const typename ParamGenerator<T3>::iterator& current3,
+      const ParamGenerator<T4>& g4,
+      const typename ParamGenerator<T4>::iterator& current4,
+      const ParamGenerator<T5>& g5,
+      const typename ParamGenerator<T5>::iterator& current5,
+      const ParamGenerator<T6>& g6,
+      const typename ParamGenerator<T6>::iterator& current6,
+      const ParamGenerator<T7>& g7,
+      const typename ParamGenerator<T7>::iterator& current7,
+      const ParamGenerator<T8>& g8,
+      const typename ParamGenerator<T8>::iterator& current8,
+      const ParamGenerator<T9>& g9,
+      const typename ParamGenerator<T9>::iterator& current9,
+      const ParamGenerator<T10>& g10,
+      const typename ParamGenerator<T10>::iterator& current10)
+        : base_(base),
+          begin1_(g1.begin()), end1_(g1.end()), current1_(current1),
+          begin2_(g2.begin()), end2_(g2.end()), current2_(current2),
+          begin3_(g3.begin()), end3_(g3.end()), current3_(current3),
+          begin4_(g4.begin()), end4_(g4.end()), current4_(current4),
+          begin5_(g5.begin()), end5_(g5.end()), current5_(current5),
+          begin6_(g6.begin()), end6_(g6.end()), current6_(current6),
+          begin7_(g7.begin()), end7_(g7.end()), current7_(current7),
+          begin8_(g8.begin()), end8_(g8.end()), current8_(current8),
+          begin9_(g9.begin()), end9_(g9.end()), current9_(current9),
+          begin10_(g10.begin()), end10_(g10.end()), current10_(current10)    {
+      ComputeCurrentValue();
+    }
+    virtual ~Iterator() {}
+
+    virtual const ParamGeneratorInterface<ParamType>* BaseGenerator() const {
+      return base_;
+    }
+    // Advance should not be called on beyond-of-range iterators
+    // so no component iterators must be beyond end of range, either.
+    virtual void Advance() {
+      assert(!AtEnd());
+      ++current10_;
+      if (current10_ == end10_) {
+        current10_ = begin10_;
+        ++current9_;
+      }
+      if (current9_ == end9_) {
+        current9_ = begin9_;
+        ++current8_;
+      }
+      if (current8_ == end8_) {
+        current8_ = begin8_;
+        ++current7_;
+      }
+      if (current7_ == end7_) {
+        current7_ = begin7_;
+        ++current6_;
+      }
+      if (current6_ == end6_) {
+        current6_ = begin6_;
+        ++current5_;
+      }
+      if (current5_ == end5_) {
+        current5_ = begin5_;
+        ++current4_;
+      }
+      if (current4_ == end4_) {
+        current4_ = begin4_;
+        ++current3_;
+      }
+      if (current3_ == end3_) {
+        current3_ = begin3_;
+        ++current2_;
+      }
+      if (current2_ == end2_) {
+        current2_ = begin2_;
+        ++current1_;
+      }
+      ComputeCurrentValue();
+    }
+    virtual ParamIteratorInterface<ParamType>* Clone() const {
+      return new Iterator(*this);
+    }
+    virtual const ParamType* Current() const { return &current_value_; }
+    virtual bool Equals(const ParamIteratorInterface<ParamType>& other) const {
+      // Having the same base generator guarantees that the other
+      // iterator is of the same type and we can downcast.
+      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())
+          << "The program attempted to compare iterators "
+          << "from different generators." << std::endl;
+      const Iterator* typed_other =
+          CheckedDowncastToActualType<const Iterator>(&other);
+      // We must report iterators equal if they both point beyond their
+      // respective ranges. That can happen in a variety of fashions,
+      // so we have to consult AtEnd().
+      return (AtEnd() && typed_other->AtEnd()) ||
+         (
+          current1_ == typed_other->current1_ &&
+          current2_ == typed_other->current2_ &&
+          current3_ == typed_other->current3_ &&
+          current4_ == typed_other->current4_ &&
+          current5_ == typed_other->current5_ &&
+          current6_ == typed_other->current6_ &&
+          current7_ == typed_other->current7_ &&
+          current8_ == typed_other->current8_ &&
+          current9_ == typed_other->current9_ &&
+          current10_ == typed_other->current10_);
+    }
+
+   private:
+    Iterator(const Iterator& other)
+        : base_(other.base_),
+        begin1_(other.begin1_),
+        end1_(other.end1_),
+        current1_(other.current1_),
+        begin2_(other.begin2_),
+        end2_(other.end2_),
+        current2_(other.current2_),
+        begin3_(other.begin3_),
+        end3_(other.end3_),
+        current3_(other.current3_),
+        begin4_(other.begin4_),
+        end4_(other.end4_),
+        current4_(other.current4_),
+        begin5_(other.begin5_),
+        end5_(other.end5_),
+        current5_(other.current5_),
+        begin6_(other.begin6_),
+        end6_(other.end6_),
+        current6_(other.current6_),
+        begin7_(other.begin7_),
+        end7_(other.end7_),
+        current7_(other.current7_),
+        begin8_(other.begin8_),
+        end8_(other.end8_),
+        current8_(other.current8_),
+        begin9_(other.begin9_),
+        end9_(other.end9_),
+        current9_(other.current9_),
+        begin10_(other.begin10_),
+        end10_(other.end10_),
+        current10_(other.current10_) {
+      ComputeCurrentValue();
+    }
+
+    void ComputeCurrentValue() {
+      if (!AtEnd())
+        current_value_ = ParamType(*current1_, *current2_, *current3_,
+            *current4_, *current5_, *current6_, *current7_, *current8_,
+            *current9_, *current10_);
+    }
+    bool AtEnd() const {
+      // We must report iterator past the end of the range when either of the
+      // component iterators has reached the end of its range.
+      return
+          current1_ == end1_ ||
+          current2_ == end2_ ||
+          current3_ == end3_ ||
+          current4_ == end4_ ||
+          current5_ == end5_ ||
+          current6_ == end6_ ||
+          current7_ == end7_ ||
+          current8_ == end8_ ||
+          current9_ == end9_ ||
+          current10_ == end10_;
+    }
+
+    // No implementation - assignment is unsupported.
+    void operator=(const Iterator& other);
+
+    const ParamGeneratorInterface<ParamType>* const base_;
+    // begin[i]_ and end[i]_ define the i-th range that Iterator traverses.
+    // current[i]_ is the actual traversing iterator.
+    const typename ParamGenerator<T1>::iterator begin1_;
+    const typename ParamGenerator<T1>::iterator end1_;
+    typename ParamGenerator<T1>::iterator current1_;
+    const typename ParamGenerator<T2>::iterator begin2_;
+    const typename ParamGenerator<T2>::iterator end2_;
+    typename ParamGenerator<T2>::iterator current2_;
+    const typename ParamGenerator<T3>::iterator begin3_;
+    const typename ParamGenerator<T3>::iterator end3_;
+    typename ParamGenerator<T3>::iterator current3_;
+    const typename ParamGenerator<T4>::iterator begin4_;
+    const typename ParamGenerator<T4>::iterator end4_;
+    typename ParamGenerator<T4>::iterator current4_;
+    const typename ParamGenerator<T5>::iterator begin5_;
+    const typename ParamGenerator<T5>::iterator end5_;
+    typename ParamGenerator<T5>::iterator current5_;
+    const typename ParamGenerator<T6>::iterator begin6_;
+    const typename ParamGenerator<T6>::iterator end6_;
+    typename ParamGenerator<T6>::iterator current6_;
+    const typename ParamGenerator<T7>::iterator begin7_;
+    const typename ParamGenerator<T7>::iterator end7_;
+    typename ParamGenerator<T7>::iterator current7_;
+    const typename ParamGenerator<T8>::iterator begin8_;
+    const typename ParamGenerator<T8>::iterator end8_;
+    typename ParamGenerator<T8>::iterator current8_;
+    const typename ParamGenerator<T9>::iterator begin9_;
+    const typename ParamGenerator<T9>::iterator end9_;
+    typename ParamGenerator<T9>::iterator current9_;
+    const typename ParamGenerator<T10>::iterator begin10_;
+    const typename ParamGenerator<T10>::iterator end10_;
+    typename ParamGenerator<T10>::iterator current10_;
+    ParamType current_value_;
+  };  // class CartesianProductGenerator10::Iterator
+
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductGenerator10& other);
+
+  const ParamGenerator<T1> g1_;
+  const ParamGenerator<T2> g2_;
+  const ParamGenerator<T3> g3_;
+  const ParamGenerator<T4> g4_;
+  const ParamGenerator<T5> g5_;
+  const ParamGenerator<T6> g6_;
+  const ParamGenerator<T7> g7_;
+  const ParamGenerator<T8> g8_;
+  const ParamGenerator<T9> g9_;
+  const ParamGenerator<T10> g10_;
+};  // class CartesianProductGenerator10
+
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// Helper classes providing Combine() with polymorphic features. They allow
+// casting CartesianProductGeneratorN<T> to ParamGenerator<U> if T is
+// convertible to U.
+//
+template <class Generator1, class Generator2>
+class CartesianProductHolder2 {
+ public:
+CartesianProductHolder2(const Generator1& g1, const Generator2& g2)
+      : g1_(g1), g2_(g2) {}
+  template <typename T1, typename T2>
+  operator ParamGenerator< ::std::tr1::tuple<T1, T2> >() const {
+    return ParamGenerator< ::std::tr1::tuple<T1, T2> >(
+        new CartesianProductGenerator2<T1, T2>(
+        static_cast<ParamGenerator<T1> >(g1_),
+        static_cast<ParamGenerator<T2> >(g2_)));
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductHolder2& other);
+
+  const Generator1 g1_;
+  const Generator2 g2_;
+};  // class CartesianProductHolder2
+
+template <class Generator1, class Generator2, class Generator3>
+class CartesianProductHolder3 {
+ public:
+CartesianProductHolder3(const Generator1& g1, const Generator2& g2,
+    const Generator3& g3)
+      : g1_(g1), g2_(g2), g3_(g3) {}
+  template <typename T1, typename T2, typename T3>
+  operator ParamGenerator< ::std::tr1::tuple<T1, T2, T3> >() const {
+    return ParamGenerator< ::std::tr1::tuple<T1, T2, T3> >(
+        new CartesianProductGenerator3<T1, T2, T3>(
+        static_cast<ParamGenerator<T1> >(g1_),
+        static_cast<ParamGenerator<T2> >(g2_),
+        static_cast<ParamGenerator<T3> >(g3_)));
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductHolder3& other);
+
+  const Generator1 g1_;
+  const Generator2 g2_;
+  const Generator3 g3_;
+};  // class CartesianProductHolder3
+
+template <class Generator1, class Generator2, class Generator3,
+    class Generator4>
+class CartesianProductHolder4 {
+ public:
+CartesianProductHolder4(const Generator1& g1, const Generator2& g2,
+    const Generator3& g3, const Generator4& g4)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4) {}
+  template <typename T1, typename T2, typename T3, typename T4>
+  operator ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4> >() const {
+    return ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4> >(
+        new CartesianProductGenerator4<T1, T2, T3, T4>(
+        static_cast<ParamGenerator<T1> >(g1_),
+        static_cast<ParamGenerator<T2> >(g2_),
+        static_cast<ParamGenerator<T3> >(g3_),
+        static_cast<ParamGenerator<T4> >(g4_)));
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductHolder4& other);
+
+  const Generator1 g1_;
+  const Generator2 g2_;
+  const Generator3 g3_;
+  const Generator4 g4_;
+};  // class CartesianProductHolder4
+
+template <class Generator1, class Generator2, class Generator3,
+    class Generator4, class Generator5>
+class CartesianProductHolder5 {
+ public:
+CartesianProductHolder5(const Generator1& g1, const Generator2& g2,
+    const Generator3& g3, const Generator4& g4, const Generator5& g5)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4), g5_(g5) {}
+  template <typename T1, typename T2, typename T3, typename T4, typename T5>
+  operator ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4, T5> >() const {
+    return ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4, T5> >(
+        new CartesianProductGenerator5<T1, T2, T3, T4, T5>(
+        static_cast<ParamGenerator<T1> >(g1_),
+        static_cast<ParamGenerator<T2> >(g2_),
+        static_cast<ParamGenerator<T3> >(g3_),
+        static_cast<ParamGenerator<T4> >(g4_),
+        static_cast<ParamGenerator<T5> >(g5_)));
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductHolder5& other);
+
+  const Generator1 g1_;
+  const Generator2 g2_;
+  const Generator3 g3_;
+  const Generator4 g4_;
+  const Generator5 g5_;
+};  // class CartesianProductHolder5
+
+template <class Generator1, class Generator2, class Generator3,
+    class Generator4, class Generator5, class Generator6>
+class CartesianProductHolder6 {
+ public:
+CartesianProductHolder6(const Generator1& g1, const Generator2& g2,
+    const Generator3& g3, const Generator4& g4, const Generator5& g5,
+    const Generator6& g6)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4), g5_(g5), g6_(g6) {}
+  template <typename T1, typename T2, typename T3, typename T4, typename T5,
+      typename T6>
+  operator ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6> >() const {
+    return ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6> >(
+        new CartesianProductGenerator6<T1, T2, T3, T4, T5, T6>(
+        static_cast<ParamGenerator<T1> >(g1_),
+        static_cast<ParamGenerator<T2> >(g2_),
+        static_cast<ParamGenerator<T3> >(g3_),
+        static_cast<ParamGenerator<T4> >(g4_),
+        static_cast<ParamGenerator<T5> >(g5_),
+        static_cast<ParamGenerator<T6> >(g6_)));
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductHolder6& other);
+
+  const Generator1 g1_;
+  const Generator2 g2_;
+  const Generator3 g3_;
+  const Generator4 g4_;
+  const Generator5 g5_;
+  const Generator6 g6_;
+};  // class CartesianProductHolder6
+
+template <class Generator1, class Generator2, class Generator3,
+    class Generator4, class Generator5, class Generator6, class Generator7>
+class CartesianProductHolder7 {
+ public:
+CartesianProductHolder7(const Generator1& g1, const Generator2& g2,
+    const Generator3& g3, const Generator4& g4, const Generator5& g5,
+    const Generator6& g6, const Generator7& g7)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4), g5_(g5), g6_(g6), g7_(g7) {}
+  template <typename T1, typename T2, typename T3, typename T4, typename T5,
+      typename T6, typename T7>
+  operator ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6,
+      T7> >() const {
+    return ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7> >(
+        new CartesianProductGenerator7<T1, T2, T3, T4, T5, T6, T7>(
+        static_cast<ParamGenerator<T1> >(g1_),
+        static_cast<ParamGenerator<T2> >(g2_),
+        static_cast<ParamGenerator<T3> >(g3_),
+        static_cast<ParamGenerator<T4> >(g4_),
+        static_cast<ParamGenerator<T5> >(g5_),
+        static_cast<ParamGenerator<T6> >(g6_),
+        static_cast<ParamGenerator<T7> >(g7_)));
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductHolder7& other);
+
+  const Generator1 g1_;
+  const Generator2 g2_;
+  const Generator3 g3_;
+  const Generator4 g4_;
+  const Generator5 g5_;
+  const Generator6 g6_;
+  const Generator7 g7_;
+};  // class CartesianProductHolder7
+
+template <class Generator1, class Generator2, class Generator3,
+    class Generator4, class Generator5, class Generator6, class Generator7,
+    class Generator8>
+class CartesianProductHolder8 {
+ public:
+CartesianProductHolder8(const Generator1& g1, const Generator2& g2,
+    const Generator3& g3, const Generator4& g4, const Generator5& g5,
+    const Generator6& g6, const Generator7& g7, const Generator8& g8)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4), g5_(g5), g6_(g6), g7_(g7),
+          g8_(g8) {}
+  template <typename T1, typename T2, typename T3, typename T4, typename T5,
+      typename T6, typename T7, typename T8>
+  operator ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7,
+      T8> >() const {
+    return ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7, T8> >(
+        new CartesianProductGenerator8<T1, T2, T3, T4, T5, T6, T7, T8>(
+        static_cast<ParamGenerator<T1> >(g1_),
+        static_cast<ParamGenerator<T2> >(g2_),
+        static_cast<ParamGenerator<T3> >(g3_),
+        static_cast<ParamGenerator<T4> >(g4_),
+        static_cast<ParamGenerator<T5> >(g5_),
+        static_cast<ParamGenerator<T6> >(g6_),
+        static_cast<ParamGenerator<T7> >(g7_),
+        static_cast<ParamGenerator<T8> >(g8_)));
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductHolder8& other);
+
+  const Generator1 g1_;
+  const Generator2 g2_;
+  const Generator3 g3_;
+  const Generator4 g4_;
+  const Generator5 g5_;
+  const Generator6 g6_;
+  const Generator7 g7_;
+  const Generator8 g8_;
+};  // class CartesianProductHolder8
+
+template <class Generator1, class Generator2, class Generator3,
+    class Generator4, class Generator5, class Generator6, class Generator7,
+    class Generator8, class Generator9>
+class CartesianProductHolder9 {
+ public:
+CartesianProductHolder9(const Generator1& g1, const Generator2& g2,
+    const Generator3& g3, const Generator4& g4, const Generator5& g5,
+    const Generator6& g6, const Generator7& g7, const Generator8& g8,
+    const Generator9& g9)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4), g5_(g5), g6_(g6), g7_(g7), g8_(g8),
+          g9_(g9) {}
+  template <typename T1, typename T2, typename T3, typename T4, typename T5,
+      typename T6, typename T7, typename T8, typename T9>
+  operator ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7, T8,
+      T9> >() const {
+    return ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7, T8,
+        T9> >(
+        new CartesianProductGenerator9<T1, T2, T3, T4, T5, T6, T7, T8, T9>(
+        static_cast<ParamGenerator<T1> >(g1_),
+        static_cast<ParamGenerator<T2> >(g2_),
+        static_cast<ParamGenerator<T3> >(g3_),
+        static_cast<ParamGenerator<T4> >(g4_),
+        static_cast<ParamGenerator<T5> >(g5_),
+        static_cast<ParamGenerator<T6> >(g6_),
+        static_cast<ParamGenerator<T7> >(g7_),
+        static_cast<ParamGenerator<T8> >(g8_),
+        static_cast<ParamGenerator<T9> >(g9_)));
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductHolder9& other);
+
+  const Generator1 g1_;
+  const Generator2 g2_;
+  const Generator3 g3_;
+  const Generator4 g4_;
+  const Generator5 g5_;
+  const Generator6 g6_;
+  const Generator7 g7_;
+  const Generator8 g8_;
+  const Generator9 g9_;
+};  // class CartesianProductHolder9
+
+template <class Generator1, class Generator2, class Generator3,
+    class Generator4, class Generator5, class Generator6, class Generator7,
+    class Generator8, class Generator9, class Generator10>
+class CartesianProductHolder10 {
+ public:
+CartesianProductHolder10(const Generator1& g1, const Generator2& g2,
+    const Generator3& g3, const Generator4& g4, const Generator5& g5,
+    const Generator6& g6, const Generator7& g7, const Generator8& g8,
+    const Generator9& g9, const Generator10& g10)
+      : g1_(g1), g2_(g2), g3_(g3), g4_(g4), g5_(g5), g6_(g6), g7_(g7), g8_(g8),
+          g9_(g9), g10_(g10) {}
+  template <typename T1, typename T2, typename T3, typename T4, typename T5,
+      typename T6, typename T7, typename T8, typename T9, typename T10>
+  operator ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7, T8,
+      T9, T10> >() const {
+    return ParamGenerator< ::std::tr1::tuple<T1, T2, T3, T4, T5, T6, T7, T8,
+        T9, T10> >(
+        new CartesianProductGenerator10<T1, T2, T3, T4, T5, T6, T7, T8, T9,
+            T10>(
+        static_cast<ParamGenerator<T1> >(g1_),
+        static_cast<ParamGenerator<T2> >(g2_),
+        static_cast<ParamGenerator<T3> >(g3_),
+        static_cast<ParamGenerator<T4> >(g4_),
+        static_cast<ParamGenerator<T5> >(g5_),
+        static_cast<ParamGenerator<T6> >(g6_),
+        static_cast<ParamGenerator<T7> >(g7_),
+        static_cast<ParamGenerator<T8> >(g8_),
+        static_cast<ParamGenerator<T9> >(g9_),
+        static_cast<ParamGenerator<T10> >(g10_)));
+  }
+
+ private:
+  // No implementation - assignment is unsupported.
+  void operator=(const CartesianProductHolder10& other);
+
+  const Generator1 g1_;
+  const Generator2 g2_;
+  const Generator3 g3_;
+  const Generator4 g4_;
+  const Generator5 g5_;
+  const Generator6 g6_;
+  const Generator7 g7_;
+  const Generator8 g8_;
+  const Generator9 g9_;
+  const Generator10 g10_;
+};  // class CartesianProductHolder10
+
+# endif  // GTEST_HAS_COMBINE
+
+}  // namespace internal
+}  // namespace testing
+
+#endif  //  GTEST_HAS_PARAM_TEST
+
+#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_GENERATED_H_
+
+#if GTEST_HAS_PARAM_TEST
+
+namespace testing {
+
+// Functions producing parameter generators.
+//
+// Google Test uses these generators to produce parameters for value-
+// parameterized tests. When a parameterized test case is instantiated
+// with a particular generator, Google Test creates and runs tests
+// for each element in the sequence produced by the generator.
+//
+// In the following sample, tests from test case FooTest are instantiated
+// each three times with parameter values 3, 5, and 8:
+//
+// class FooTest : public TestWithParam<int> { ... };
+//
+// TEST_P(FooTest, TestThis) {
+// }
+// TEST_P(FooTest, TestThat) {
+// }
+// INSTANTIATE_TEST_CASE_P(TestSequence, FooTest, Values(3, 5, 8));
+//
+
+// Range() returns generators providing sequences of values in a range.
+//
+// Synopsis:
+// Range(start, end)
+//   - returns a generator producing a sequence of values {start, start+1,
+//     start+2, ..., }.
+// Range(start, end, step)
+//   - returns a generator producing a sequence of values {start, start+step,
+//     start+step+step, ..., }.
+// Notes:
+//   * The generated sequences never include end. For example, Range(1, 5)
+//     returns a generator producing a sequence {1, 2, 3, 4}. Range(1, 9, 2)
+//     returns a generator producing {1, 3, 5, 7}.
+//   * start and end must have the same type. That type may be any integral or
+//     floating-point type or a user defined type satisfying these conditions:
+//     * It must be assignable (have operator=() defined).
+//     * It must have operator+() (operator+(int-compatible type) for
+//       two-operand version).
+//     * It must have operator<() defined.
+//     Elements in the resulting sequences will also have that type.
+//   * Condition start < end must be satisfied in order for resulting sequences
+//     to contain any elements.
+//
+template <typename T, typename IncrementT>
+internal::ParamGenerator<T> Range(T start, T end, IncrementT step) {
+  return internal::ParamGenerator<T>(
+      new internal::RangeGenerator<T, IncrementT>(start, end, step));
+}
+
+template <typename T>
+internal::ParamGenerator<T> Range(T start, T end) {
+  return Range(start, end, 1);
+}
+
+// ValuesIn() function allows generation of tests with parameters coming from
+// a container.
+//
+// Synopsis:
+// ValuesIn(const T (&array)[N])
+//   - returns a generator producing sequences with elements from
+//     a C-style array.
+// ValuesIn(const Container& container)
+//   - returns a generator producing sequences with elements from
+//     an STL-style container.
+// ValuesIn(Iterator begin, Iterator end)
+//   - returns a generator producing sequences with elements from
+//     a range [begin, end) defined by a pair of STL-style iterators. These
+//     iterators can also be plain C pointers.
+//
+// Please note that ValuesIn copies the values from the containers
+// passed in and keeps them to generate tests in RUN_ALL_TESTS().
+//
+// Examples:
+//
+// This instantiates tests from test case StringTest
+// each with C-string values of "foo", "bar", and "baz":
+//
+// const char* strings[] = {"foo", "bar", "baz"};
+// INSTANTIATE_TEST_CASE_P(StringSequence, SrtingTest, ValuesIn(strings));
+//
+// This instantiates tests from test case StlStringTest
+// each with STL strings with values "a" and "b":
+//
+// ::std::vector< ::std::string> GetParameterStrings() {
+//   ::std::vector< ::std::string> v;
+//   v.push_back("a");
+//   v.push_back("b");
+//   return v;
+// }
+//
+// INSTANTIATE_TEST_CASE_P(CharSequence,
+//                         StlStringTest,
+//                         ValuesIn(GetParameterStrings()));
+//
+//
+// This will also instantiate tests from CharTest
+// each with parameter values 'a' and 'b':
+//
+// ::std::list<char> GetParameterChars() {
+//   ::std::list<char> list;
+//   list.push_back('a');
+//   list.push_back('b');
+//   return list;
+// }
+// ::std::list<char> l = GetParameterChars();
+// INSTANTIATE_TEST_CASE_P(CharSequence2,
+//                         CharTest,
+//                         ValuesIn(l.begin(), l.end()));
+//
+template <typename ForwardIterator>
+internal::ParamGenerator<
+  typename ::testing::internal::IteratorTraits<ForwardIterator>::value_type>
+ValuesIn(ForwardIterator begin, ForwardIterator end) {
+  typedef typename ::testing::internal::IteratorTraits<ForwardIterator>
+      ::value_type ParamType;
+  return internal::ParamGenerator<ParamType>(
+      new internal::ValuesInIteratorRangeGenerator<ParamType>(begin, end));
+}
+
+template <typename T, size_t N>
+internal::ParamGenerator<T> ValuesIn(const T (&array)[N]) {
+  return ValuesIn(array, array + N);
+}
+
+template <class Container>
+internal::ParamGenerator<typename Container::value_type> ValuesIn(
+    const Container& container) {
+  return ValuesIn(container.begin(), container.end());
+}
+
+// Values() allows generating tests from explicitly specified list of
+// parameters.
+//
+// Synopsis:
+// Values(T v1, T v2, ..., T vN)
+//   - returns a generator producing sequences with elements v1, v2, ..., vN.
+//
+// For example, this instantiates tests from test case BarTest each
+// with values "one", "two", and "three":
+//
+// INSTANTIATE_TEST_CASE_P(NumSequence, BarTest, Values("one", "two", "three"));
+//
+// This instantiates tests from test case BazTest each with values 1, 2, 3.5.
+// The exact type of values will depend on the type of parameter in BazTest.
+//
+// INSTANTIATE_TEST_CASE_P(FloatingNumbers, BazTest, Values(1, 2, 3.5));
+//
+// Currently, Values() supports from 1 to 50 parameters.
+//
+template <typename T1>
+internal::ValueArray1<T1> Values(T1 v1) {
+  return internal::ValueArray1<T1>(v1);
+}
+
+template <typename T1, typename T2>
+internal::ValueArray2<T1, T2> Values(T1 v1, T2 v2) {
+  return internal::ValueArray2<T1, T2>(v1, v2);
+}
+
+template <typename T1, typename T2, typename T3>
+internal::ValueArray3<T1, T2, T3> Values(T1 v1, T2 v2, T3 v3) {
+  return internal::ValueArray3<T1, T2, T3>(v1, v2, v3);
+}
+
+template <typename T1, typename T2, typename T3, typename T4>
+internal::ValueArray4<T1, T2, T3, T4> Values(T1 v1, T2 v2, T3 v3, T4 v4) {
+  return internal::ValueArray4<T1, T2, T3, T4>(v1, v2, v3, v4);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5>
+internal::ValueArray5<T1, T2, T3, T4, T5> Values(T1 v1, T2 v2, T3 v3, T4 v4,
+    T5 v5) {
+  return internal::ValueArray5<T1, T2, T3, T4, T5>(v1, v2, v3, v4, v5);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6>
+internal::ValueArray6<T1, T2, T3, T4, T5, T6> Values(T1 v1, T2 v2, T3 v3,
+    T4 v4, T5 v5, T6 v6) {
+  return internal::ValueArray6<T1, T2, T3, T4, T5, T6>(v1, v2, v3, v4, v5, v6);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7>
+internal::ValueArray7<T1, T2, T3, T4, T5, T6, T7> Values(T1 v1, T2 v2, T3 v3,
+    T4 v4, T5 v5, T6 v6, T7 v7) {
+  return internal::ValueArray7<T1, T2, T3, T4, T5, T6, T7>(v1, v2, v3, v4, v5,
+      v6, v7);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8>
+internal::ValueArray8<T1, T2, T3, T4, T5, T6, T7, T8> Values(T1 v1, T2 v2,
+    T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8) {
+  return internal::ValueArray8<T1, T2, T3, T4, T5, T6, T7, T8>(v1, v2, v3, v4,
+      v5, v6, v7, v8);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9>
+internal::ValueArray9<T1, T2, T3, T4, T5, T6, T7, T8, T9> Values(T1 v1, T2 v2,
+    T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9) {
+  return internal::ValueArray9<T1, T2, T3, T4, T5, T6, T7, T8, T9>(v1, v2, v3,
+      v4, v5, v6, v7, v8, v9);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10>
+internal::ValueArray10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Values(T1 v1,
+    T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10) {
+  return internal::ValueArray10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>(v1,
+      v2, v3, v4, v5, v6, v7, v8, v9, v10);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11>
+internal::ValueArray11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10,
+    T11> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+    T10 v10, T11 v11) {
+  return internal::ValueArray11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10,
+      T11>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12>
+internal::ValueArray12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+    T12> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+    T10 v10, T11 v11, T12 v12) {
+  return internal::ValueArray12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13>
+internal::ValueArray13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
+    T13> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+    T10 v10, T11 v11, T12 v12, T13 v13) {
+  return internal::ValueArray13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14>
+internal::ValueArray14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+    T10 v10, T11 v11, T12 v12, T13 v13, T14 v14) {
+  return internal::ValueArray14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13,
+      v14);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15>
+internal::ValueArray15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8,
+    T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15) {
+  return internal::ValueArray15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12,
+      v13, v14, v15);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16>
+internal::ValueArray16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7,
+    T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15,
+    T16 v16) {
+  return internal::ValueArray16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11,
+      v12, v13, v14, v15, v16);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17>
+internal::ValueArray17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7,
+    T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15,
+    T16 v16, T17 v17) {
+  return internal::ValueArray17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10,
+      v11, v12, v13, v14, v15, v16, v17);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18>
+internal::ValueArray18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6,
+    T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15,
+    T16 v16, T17 v17, T18 v18) {
+  return internal::ValueArray18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18>(v1, v2, v3, v4, v5, v6, v7, v8, v9,
+      v10, v11, v12, v13, v14, v15, v16, v17, v18);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19>
+internal::ValueArray19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5,
+    T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14,
+    T15 v15, T16 v16, T17 v17, T18 v18, T19 v19) {
+  return internal::ValueArray19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19>(v1, v2, v3, v4, v5, v6, v7, v8,
+      v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20>
+internal::ValueArray20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20> Values(T1 v1, T2 v2, T3 v3, T4 v4,
+    T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13,
+    T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20) {
+  return internal::ValueArray20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20>(v1, v2, v3, v4, v5, v6, v7,
+      v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21>
+internal::ValueArray21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21> Values(T1 v1, T2 v2, T3 v3, T4 v4,
+    T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13,
+    T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20, T21 v21) {
+  return internal::ValueArray21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>(v1, v2, v3, v4, v5, v6,
+      v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22>
+internal::ValueArray22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22> Values(T1 v1, T2 v2, T3 v3,
+    T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12,
+    T13 v13, T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20,
+    T21 v21, T22 v22) {
+  return internal::ValueArray22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>(v1, v2, v3, v4,
+      v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19,
+      v20, v21, v22);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23>
+internal::ValueArray23<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Values(T1 v1, T2 v2,
+    T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12,
+    T13 v13, T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20,
+    T21 v21, T22 v22, T23 v23) {
+  return internal::ValueArray23<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>(v1, v2, v3,
+      v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19,
+      v20, v21, v22, v23);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24>
+internal::ValueArray24<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Values(T1 v1, T2 v2,
+    T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12,
+    T13 v13, T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20,
+    T21 v21, T22 v22, T23 v23, T24 v24) {
+  return internal::ValueArray24<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>(v1, v2,
+      v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18,
+      v19, v20, v21, v22, v23, v24);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25>
+internal::ValueArray25<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Values(T1 v1,
+    T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11,
+    T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19,
+    T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25) {
+  return internal::ValueArray25<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>(v1,
+      v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17,
+      v18, v19, v20, v21, v22, v23, v24, v25);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26>
+internal::ValueArray26<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+    T26> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+    T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+    T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+    T26 v26) {
+  return internal::ValueArray26<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15,
+      v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27>
+internal::ValueArray27<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26,
+    T27> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+    T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+    T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+    T26 v26, T27 v27) {
+  return internal::ValueArray27<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14,
+      v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28>
+internal::ValueArray28<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27,
+    T28> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+    T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+    T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+    T26 v26, T27 v27, T28 v28) {
+  return internal::ValueArray28<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13,
+      v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27,
+      v28);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29>
+internal::ValueArray29<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+    T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+    T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+    T26 v26, T27 v27, T28 v28, T29 v29) {
+  return internal::ValueArray29<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12,
+      v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26,
+      v27, v28, v29);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30>
+internal::ValueArray30<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8,
+    T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16,
+    T17 v17, T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24,
+    T25 v25, T26 v26, T27 v27, T28 v28, T29 v29, T30 v30) {
+  return internal::ValueArray30<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11,
+      v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25,
+      v26, v27, v28, v29, v30);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31>
+internal::ValueArray31<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7,
+    T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15,
+    T16 v16, T17 v17, T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23,
+    T24 v24, T25 v25, T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31) {
+  return internal::ValueArray31<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10,
+      v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24,
+      v25, v26, v27, v28, v29, v30, v31);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32>
+internal::ValueArray32<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7,
+    T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15,
+    T16 v16, T17 v17, T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23,
+    T24 v24, T25 v25, T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31,
+    T32 v32) {
+  return internal::ValueArray32<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32>(v1, v2, v3, v4, v5, v6, v7, v8, v9,
+      v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23,
+      v24, v25, v26, v27, v28, v29, v30, v31, v32);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33>
+internal::ValueArray33<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6,
+    T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15,
+    T16 v16, T17 v17, T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23,
+    T24 v24, T25 v25, T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31,
+    T32 v32, T33 v33) {
+  return internal::ValueArray33<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33>(v1, v2, v3, v4, v5, v6, v7, v8,
+      v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23,
+      v24, v25, v26, v27, v28, v29, v30, v31, v32, v33);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34>
+internal::ValueArray34<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5,
+    T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14,
+    T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20, T21 v21, T22 v22,
+    T23 v23, T24 v24, T25 v25, T26 v26, T27 v27, T28 v28, T29 v29, T30 v30,
+    T31 v31, T32 v32, T33 v33, T34 v34) {
+  return internal::ValueArray34<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34>(v1, v2, v3, v4, v5, v6, v7,
+      v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22,
+      v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35>
+internal::ValueArray35<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35> Values(T1 v1, T2 v2, T3 v3, T4 v4,
+    T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13,
+    T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20, T21 v21,
+    T22 v22, T23 v23, T24 v24, T25 v25, T26 v26, T27 v27, T28 v28, T29 v29,
+    T30 v30, T31 v31, T32 v32, T33 v33, T34 v34, T35 v35) {
+  return internal::ValueArray35<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35>(v1, v2, v3, v4, v5, v6,
+      v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21,
+      v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36>
+internal::ValueArray36<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36> Values(T1 v1, T2 v2, T3 v3, T4 v4,
+    T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13,
+    T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20, T21 v21,
+    T22 v22, T23 v23, T24 v24, T25 v25, T26 v26, T27 v27, T28 v28, T29 v29,
+    T30 v30, T31 v31, T32 v32, T33 v33, T34 v34, T35 v35, T36 v36) {
+  return internal::ValueArray36<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36>(v1, v2, v3, v4,
+      v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19,
+      v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33,
+      v34, v35, v36);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37>
+internal::ValueArray37<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37> Values(T1 v1, T2 v2, T3 v3,
+    T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12,
+    T13 v13, T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20,
+    T21 v21, T22 v22, T23 v23, T24 v24, T25 v25, T26 v26, T27 v27, T28 v28,
+    T29 v29, T30 v30, T31 v31, T32 v32, T33 v33, T34 v34, T35 v35, T36 v36,
+    T37 v37) {
+  return internal::ValueArray37<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37>(v1, v2, v3,
+      v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19,
+      v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33,
+      v34, v35, v36, v37);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38>
+internal::ValueArray38<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38> Values(T1 v1, T2 v2,
+    T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12,
+    T13 v13, T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20,
+    T21 v21, T22 v22, T23 v23, T24 v24, T25 v25, T26 v26, T27 v27, T28 v28,
+    T29 v29, T30 v30, T31 v31, T32 v32, T33 v33, T34 v34, T35 v35, T36 v36,
+    T37 v37, T38 v38) {
+  return internal::ValueArray38<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38>(v1, v2,
+      v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18,
+      v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32,
+      v33, v34, v35, v36, v37, v38);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39>
+internal::ValueArray39<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39> Values(T1 v1, T2 v2,
+    T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12,
+    T13 v13, T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20,
+    T21 v21, T22 v22, T23 v23, T24 v24, T25 v25, T26 v26, T27 v27, T28 v28,
+    T29 v29, T30 v30, T31 v31, T32 v32, T33 v33, T34 v34, T35 v35, T36 v36,
+    T37 v37, T38 v38, T39 v39) {
+  return internal::ValueArray39<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39>(v1,
+      v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17,
+      v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31,
+      v32, v33, v34, v35, v36, v37, v38, v39);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40>
+internal::ValueArray40<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40> Values(T1 v1,
+    T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11,
+    T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19,
+    T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25, T26 v26, T27 v27,
+    T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33, T34 v34, T35 v35,
+    T36 v36, T37 v37, T38 v38, T39 v39, T40 v40) {
+  return internal::ValueArray40<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39,
+      T40>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15,
+      v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29,
+      v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41>
+internal::ValueArray41<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40,
+    T41> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+    T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+    T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+    T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+    T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41) {
+  return internal::ValueArray41<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39,
+      T40, T41>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14,
+      v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28,
+      v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42>
+internal::ValueArray42<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41,
+    T42> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+    T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+    T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+    T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+    T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41,
+    T42 v42) {
+  return internal::ValueArray42<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39,
+      T40, T41, T42>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13,
+      v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27,
+      v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41,
+      v42);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43>
+internal::ValueArray43<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42,
+    T43> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+    T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+    T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+    T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+    T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41,
+    T42 v42, T43 v43) {
+  return internal::ValueArray43<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39,
+      T40, T41, T42, T43>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12,
+      v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26,
+      v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40,
+      v41, v42, v43);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44>
+internal::ValueArray44<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+    T44> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8, T9 v9,
+    T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16, T17 v17,
+    T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24, T25 v25,
+    T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32, T33 v33,
+    T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40, T41 v41,
+    T42 v42, T43 v43, T44 v44) {
+  return internal::ValueArray44<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39,
+      T40, T41, T42, T43, T44>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11,
+      v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25,
+      v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39,
+      v40, v41, v42, v43, v44);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45>
+internal::ValueArray45<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+    T44, T45> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7, T8 v8,
+    T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15, T16 v16,
+    T17 v17, T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23, T24 v24,
+    T25 v25, T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31, T32 v32,
+    T33 v33, T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39, T40 v40,
+    T41 v41, T42 v42, T43 v43, T44 v44, T45 v45) {
+  return internal::ValueArray45<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39,
+      T40, T41, T42, T43, T44, T45>(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10,
+      v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24,
+      v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38,
+      v39, v40, v41, v42, v43, v44, v45);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46>
+internal::ValueArray46<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+    T44, T45, T46> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7,
+    T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15,
+    T16 v16, T17 v17, T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23,
+    T24 v24, T25 v25, T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31,
+    T32 v32, T33 v33, T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39,
+    T40 v40, T41 v41, T42 v42, T43 v43, T44 v44, T45 v45, T46 v46) {
+  return internal::ValueArray46<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39,
+      T40, T41, T42, T43, T44, T45, T46>(v1, v2, v3, v4, v5, v6, v7, v8, v9,
+      v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23,
+      v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37,
+      v38, v39, v40, v41, v42, v43, v44, v45, v46);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47>
+internal::ValueArray47<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+    T44, T45, T46, T47> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6, T7 v7,
+    T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15,
+    T16 v16, T17 v17, T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23,
+    T24 v24, T25 v25, T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31,
+    T32 v32, T33 v33, T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39,
+    T40 v40, T41 v41, T42 v42, T43 v43, T44 v44, T45 v45, T46 v46, T47 v47) {
+  return internal::ValueArray47<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39,
+      T40, T41, T42, T43, T44, T45, T46, T47>(v1, v2, v3, v4, v5, v6, v7, v8,
+      v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23,
+      v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37,
+      v38, v39, v40, v41, v42, v43, v44, v45, v46, v47);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47, typename T48>
+internal::ValueArray48<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+    T44, T45, T46, T47, T48> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6,
+    T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14, T15 v15,
+    T16 v16, T17 v17, T18 v18, T19 v19, T20 v20, T21 v21, T22 v22, T23 v23,
+    T24 v24, T25 v25, T26 v26, T27 v27, T28 v28, T29 v29, T30 v30, T31 v31,
+    T32 v32, T33 v33, T34 v34, T35 v35, T36 v36, T37 v37, T38 v38, T39 v39,
+    T40 v40, T41 v41, T42 v42, T43 v43, T44 v44, T45 v45, T46 v46, T47 v47,
+    T48 v48) {
+  return internal::ValueArray48<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39,
+      T40, T41, T42, T43, T44, T45, T46, T47, T48>(v1, v2, v3, v4, v5, v6, v7,
+      v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22,
+      v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36,
+      v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47, typename T48, typename T49>
+internal::ValueArray49<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+    T44, T45, T46, T47, T48, T49> Values(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5,
+    T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13, T14 v14,
+    T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20, T21 v21, T22 v22,
+    T23 v23, T24 v24, T25 v25, T26 v26, T27 v27, T28 v28, T29 v29, T30 v30,
+    T31 v31, T32 v32, T33 v33, T34 v34, T35 v35, T36 v36, T37 v37, T38 v38,
+    T39 v39, T40 v40, T41 v41, T42 v42, T43 v43, T44 v44, T45 v45, T46 v46,
+    T47 v47, T48 v48, T49 v49) {
+  return internal::ValueArray49<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39,
+      T40, T41, T42, T43, T44, T45, T46, T47, T48, T49>(v1, v2, v3, v4, v5, v6,
+      v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21,
+      v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35,
+      v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49);
+}
+
+template <typename T1, typename T2, typename T3, typename T4, typename T5,
+    typename T6, typename T7, typename T8, typename T9, typename T10,
+    typename T11, typename T12, typename T13, typename T14, typename T15,
+    typename T16, typename T17, typename T18, typename T19, typename T20,
+    typename T21, typename T22, typename T23, typename T24, typename T25,
+    typename T26, typename T27, typename T28, typename T29, typename T30,
+    typename T31, typename T32, typename T33, typename T34, typename T35,
+    typename T36, typename T37, typename T38, typename T39, typename T40,
+    typename T41, typename T42, typename T43, typename T44, typename T45,
+    typename T46, typename T47, typename T48, typename T49, typename T50>
+internal::ValueArray50<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13,
+    T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26, T27, T28,
+    T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39, T40, T41, T42, T43,
+    T44, T45, T46, T47, T48, T49, T50> Values(T1 v1, T2 v2, T3 v3, T4 v4,
+    T5 v5, T6 v6, T7 v7, T8 v8, T9 v9, T10 v10, T11 v11, T12 v12, T13 v13,
+    T14 v14, T15 v15, T16 v16, T17 v17, T18 v18, T19 v19, T20 v20, T21 v21,
+    T22 v22, T23 v23, T24 v24, T25 v25, T26 v26, T27 v27, T28 v28, T29 v29,
+    T30 v30, T31 v31, T32 v32, T33 v33, T34 v34, T35 v35, T36 v36, T37 v37,
+    T38 v38, T39 v39, T40 v40, T41 v41, T42 v42, T43 v43, T44 v44, T45 v45,
+    T46 v46, T47 v47, T48 v48, T49 v49, T50 v50) {
+  return internal::ValueArray50<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,
+      T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25,
+      T26, T27, T28, T29, T30, T31, T32, T33, T34, T35, T36, T37, T38, T39,
+      T40, T41, T42, T43, T44, T45, T46, T47, T48, T49, T50>(v1, v2, v3, v4,
+      v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19,
+      v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33,
+      v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47,
+      v48, v49, v50);
+}
+
+// Bool() allows generating tests with parameters in a set of (false, true).
+//
+// Synopsis:
+// Bool()
+//   - returns a generator producing sequences with elements {false, true}.
+//
+// It is useful when testing code that depends on Boolean flags. Combinations
+// of multiple flags can be tested when several Bool()'s are combined using
+// Combine() function.
+//
+// In the following example all tests in the test case FlagDependentTest
+// will be instantiated twice with parameters false and true.
+//
+// class FlagDependentTest : public testing::TestWithParam<bool> {
+//   virtual void SetUp() {
+//     external_flag = GetParam();
+//   }
+// }
+// INSTANTIATE_TEST_CASE_P(BoolSequence, FlagDependentTest, Bool());
+//
+inline internal::ParamGenerator<bool> Bool() {
+  return Values(false, true);
+}
+
+# if GTEST_HAS_COMBINE
+// Combine() allows the user to combine two or more sequences to produce
+// values of a Cartesian product of those sequences' elements.
+//
+// Synopsis:
+// Combine(gen1, gen2, ..., genN)
+//   - returns a generator producing sequences with elements coming from
+//     the Cartesian product of elements from the sequences generated by
+//     gen1, gen2, ..., genN. The sequence elements will have a type of
+//     tuple<T1, T2, ..., TN> where T1, T2, ..., TN are the types
+//     of elements from sequences produces by gen1, gen2, ..., genN.
+//
+// Combine can have up to 10 arguments. This number is currently limited
+// by the maximum number of elements in the tuple implementation used by Google
+// Test.
+//
+// Example:
+//
+// This will instantiate tests in test case AnimalTest each one with
+// the parameter values tuple("cat", BLACK), tuple("cat", WHITE),
+// tuple("dog", BLACK), and tuple("dog", WHITE):
+//
+// enum Color { BLACK, GRAY, WHITE };
+// class AnimalTest
+//     : public testing::TestWithParam<tuple<const char*, Color> > {...};
+//
+// TEST_P(AnimalTest, AnimalLooksNice) {...}
+//
+// INSTANTIATE_TEST_CASE_P(AnimalVariations, AnimalTest,
+//                         Combine(Values("cat", "dog"),
+//                                 Values(BLACK, WHITE)));
+//
+// This will instantiate tests in FlagDependentTest with all variations of two
+// Boolean flags:
+//
+// class FlagDependentTest
+//     : public testing::TestWithParam<tuple(bool, bool)> > {
+//   virtual void SetUp() {
+//     // Assigns external_flag_1 and external_flag_2 values from the tuple.
+//     tie(external_flag_1, external_flag_2) = GetParam();
+//   }
+// };
+//
+// TEST_P(FlagDependentTest, TestFeature1) {
+//   // Test your code using external_flag_1 and external_flag_2 here.
+// }
+// INSTANTIATE_TEST_CASE_P(TwoBoolSequence, FlagDependentTest,
+//                         Combine(Bool(), Bool()));
+//
+template <typename Generator1, typename Generator2>
+internal::CartesianProductHolder2<Generator1, Generator2> Combine(
+    const Generator1& g1, const Generator2& g2) {
+  return internal::CartesianProductHolder2<Generator1, Generator2>(
+      g1, g2);
+}
+
+template <typename Generator1, typename Generator2, typename Generator3>
+internal::CartesianProductHolder3<Generator1, Generator2, Generator3> Combine(
+    const Generator1& g1, const Generator2& g2, const Generator3& g3) {
+  return internal::CartesianProductHolder3<Generator1, Generator2, Generator3>(
+      g1, g2, g3);
+}
+
+template <typename Generator1, typename Generator2, typename Generator3,
+    typename Generator4>
+internal::CartesianProductHolder4<Generator1, Generator2, Generator3,
+    Generator4> Combine(
+    const Generator1& g1, const Generator2& g2, const Generator3& g3,
+        const Generator4& g4) {
+  return internal::CartesianProductHolder4<Generator1, Generator2, Generator3,
+      Generator4>(
+      g1, g2, g3, g4);
+}
+
+template <typename Generator1, typename Generator2, typename Generator3,
+    typename Generator4, typename Generator5>
+internal::CartesianProductHolder5<Generator1, Generator2, Generator3,
+    Generator4, Generator5> Combine(
+    const Generator1& g1, const Generator2& g2, const Generator3& g3,
+        const Generator4& g4, const Generator5& g5) {
+  return internal::CartesianProductHolder5<Generator1, Generator2, Generator3,
+      Generator4, Generator5>(
+      g1, g2, g3, g4, g5);
+}
+
+template <typename Generator1, typename Generator2, typename Generator3,
+    typename Generator4, typename Generator5, typename Generator6>
+internal::CartesianProductHolder6<Generator1, Generator2, Generator3,
+    Generator4, Generator5, Generator6> Combine(
+    const Generator1& g1, const Generator2& g2, const Generator3& g3,
+        const Generator4& g4, const Generator5& g5, const Generator6& g6) {
+  return internal::CartesianProductHolder6<Generator1, Generator2, Generator3,
+      Generator4, Generator5, Generator6>(
+      g1, g2, g3, g4, g5, g6);
+}
+
+template <typename Generator1, typename Generator2, typename Generator3,
+    typename Generator4, typename Generator5, typename Generator6,
+    typename Generator7>
+internal::CartesianProductHolder7<Generator1, Generator2, Generator3,
+    Generator4, Generator5, Generator6, Generator7> Combine(
+    const Generator1& g1, const Generator2& g2, const Generator3& g3,
+        const Generator4& g4, const Generator5& g5, const Generator6& g6,
+        const Generator7& g7) {
+  return internal::CartesianProductHolder7<Generator1, Generator2, Generator3,
+      Generator4, Generator5, Generator6, Generator7>(
+      g1, g2, g3, g4, g5, g6, g7);
+}
+
+template <typename Generator1, typename Generator2, typename Generator3,
+    typename Generator4, typename Generator5, typename Generator6,
+    typename Generator7, typename Generator8>
+internal::CartesianProductHolder8<Generator1, Generator2, Generator3,
+    Generator4, Generator5, Generator6, Generator7, Generator8> Combine(
+    const Generator1& g1, const Generator2& g2, const Generator3& g3,
+        const Generator4& g4, const Generator5& g5, const Generator6& g6,
+        const Generator7& g7, const Generator8& g8) {
+  return internal::CartesianProductHolder8<Generator1, Generator2, Generator3,
+      Generator4, Generator5, Generator6, Generator7, Generator8>(
+      g1, g2, g3, g4, g5, g6, g7, g8);
+}
+
+template <typename Generator1, typename Generator2, typename Generator3,
+    typename Generator4, typename Generator5, typename Generator6,
+    typename Generator7, typename Generator8, typename Generator9>
+internal::CartesianProductHolder9<Generator1, Generator2, Generator3,
+    Generator4, Generator5, Generator6, Generator7, Generator8,
+    Generator9> Combine(
+    const Generator1& g1, const Generator2& g2, const Generator3& g3,
+        const Generator4& g4, const Generator5& g5, const Generator6& g6,
+        const Generator7& g7, const Generator8& g8, const Generator9& g9) {
+  return internal::CartesianProductHolder9<Generator1, Generator2, Generator3,
+      Generator4, Generator5, Generator6, Generator7, Generator8, Generator9>(
+      g1, g2, g3, g4, g5, g6, g7, g8, g9);
+}
+
+template <typename Generator1, typename Generator2, typename Generator3,
+    typename Generator4, typename Generator5, typename Generator6,
+    typename Generator7, typename Generator8, typename Generator9,
+    typename Generator10>
+internal::CartesianProductHolder10<Generator1, Generator2, Generator3,
+    Generator4, Generator5, Generator6, Generator7, Generator8, Generator9,
+    Generator10> Combine(
+    const Generator1& g1, const Generator2& g2, const Generator3& g3,
+        const Generator4& g4, const Generator5& g5, const Generator6& g6,
+        const Generator7& g7, const Generator8& g8, const Generator9& g9,
+        const Generator10& g10) {
+  return internal::CartesianProductHolder10<Generator1, Generator2, Generator3,
+      Generator4, Generator5, Generator6, Generator7, Generator8, Generator9,
+      Generator10>(
+      g1, g2, g3, g4, g5, g6, g7, g8, g9, g10);
+}
+# endif  // GTEST_HAS_COMBINE
+
+
+
+# define TEST_P(test_case_name, test_name) \
+  class GTEST_TEST_CLASS_NAME_(test_case_name, test_name) \
+      : public test_case_name { \
+   public: \
+    GTEST_TEST_CLASS_NAME_(test_case_name, test_name)() {} \
+    virtual void TestBody(); \
+   private: \
+    static int AddToRegistry() { \
+      ::testing::UnitTest::GetInstance()->parameterized_test_registry(). \
+          GetTestCasePatternHolder<test_case_name>(\
+              #test_case_name, __FILE__, __LINE__)->AddTestPattern(\
+                  #test_case_name, \
+                  #test_name, \
+                  new ::testing::internal::TestMetaFactory< \
+                      GTEST_TEST_CLASS_NAME_(test_case_name, test_name)>()); \
+      return 0; \
+    } \
+    static int gtest_registering_dummy_; \
+    GTEST_DISALLOW_COPY_AND_ASSIGN_(\
+        GTEST_TEST_CLASS_NAME_(test_case_name, test_name)); \
+  }; \
+  int GTEST_TEST_CLASS_NAME_(test_case_name, \
+                             test_name)::gtest_registering_dummy_ = \
+      GTEST_TEST_CLASS_NAME_(test_case_name, test_name)::AddToRegistry(); \
+  void GTEST_TEST_CLASS_NAME_(test_case_name, test_name)::TestBody()
+
+# define INSTANTIATE_TEST_CASE_P(prefix, test_case_name, generator) \
+  ::testing::internal::ParamGenerator<test_case_name::ParamType> \
+      gtest_##prefix##test_case_name##_EvalGenerator_() { return generator; } \
+  int gtest_##prefix##test_case_name##_dummy_ = \
+      ::testing::UnitTest::GetInstance()->parameterized_test_registry(). \
+          GetTestCasePatternHolder<test_case_name>(\
+              #test_case_name, __FILE__, __LINE__)->AddTestCaseInstantiation(\
+                  #prefix, \
+                  &gtest_##prefix##test_case_name##_EvalGenerator_, \
+                  __FILE__, __LINE__)
+
+}  // namespace testing
+
+#endif  // GTEST_HAS_PARAM_TEST
+
+#endif  // GTEST_INCLUDE_GTEST_GTEST_PARAM_TEST_H_
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+//
+// Google C++ Testing Framework definitions useful in production code.
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_PROD_H_
+#define GTEST_INCLUDE_GTEST_GTEST_PROD_H_
+
+// When you need to test the private or protected members of a class,
+// use the FRIEND_TEST macro to declare your tests as friends of the
+// class.  For example:
+//
+// class MyClass {
+//  private:
+//   void MyMethod();
+//   FRIEND_TEST(MyClassTest, MyMethod);
+// };
+//
+// class MyClassTest : public testing::Test {
+//   // ...
+// };
+//
+// TEST_F(MyClassTest, MyMethod) {
+//   // Can call MyClass::MyMethod() here.
+// }
+
+#define FRIEND_TEST(test_case_name, test_name)\
+friend class test_case_name##_##test_name##_Test
+
+#endif  // GTEST_INCLUDE_GTEST_GTEST_PROD_H_
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: mheule at google.com (Markus Heule)
+//
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_TEST_PART_H_
+#define GTEST_INCLUDE_GTEST_GTEST_TEST_PART_H_
+
+#include <iosfwd>
+#include <vector>
+
+namespace testing {
+
+// A copyable object representing the result of a test part (i.e. an
+// assertion or an explicit FAIL(), ADD_FAILURE(), or SUCCESS()).
+//
+// Don't inherit from TestPartResult as its destructor is not virtual.
+class GTEST_API_ TestPartResult {
+ public:
+  // The possible outcomes of a test part (i.e. an assertion or an
+  // explicit SUCCEED(), FAIL(), or ADD_FAILURE()).
+  enum Type {
+    kSuccess,          // Succeeded.
+    kNonFatalFailure,  // Failed but the test can continue.
+    kFatalFailure      // Failed and the test should be terminated.
+  };
+
+  // C'tor.  TestPartResult does NOT have a default constructor.
+  // Always use this constructor (with parameters) to create a
+  // TestPartResult object.
+  TestPartResult(Type a_type,
+                 const char* a_file_name,
+                 int a_line_number,
+                 const char* a_message)
+      : type_(a_type),
+        file_name_(a_file_name),
+        line_number_(a_line_number),
+        summary_(ExtractSummary(a_message)),
+        message_(a_message) {
+  }
+
+  // Gets the outcome of the test part.
+  Type type() const { return type_; }
+
+  // Gets the name of the source file where the test part took place, or
+  // NULL if it's unknown.
+  const char* file_name() const { return file_name_.c_str(); }
+
+  // Gets the line in the source file where the test part took place,
+  // or -1 if it's unknown.
+  int line_number() const { return line_number_; }
+
+  // Gets the summary of the failure message.
+  const char* summary() const { return summary_.c_str(); }
+
+  // Gets the message associated with the test part.
+  const char* message() const { return message_.c_str(); }
+
+  // Returns true iff the test part passed.
+  bool passed() const { return type_ == kSuccess; }
+
+  // Returns true iff the test part failed.
+  bool failed() const { return type_ != kSuccess; }
+
+  // Returns true iff the test part non-fatally failed.
+  bool nonfatally_failed() const { return type_ == kNonFatalFailure; }
+
+  // Returns true iff the test part fatally failed.
+  bool fatally_failed() const { return type_ == kFatalFailure; }
+ private:
+  Type type_;
+
+  // Gets the summary of the failure message by omitting the stack
+  // trace in it.
+  static internal::String ExtractSummary(const char* message);
+
+  // The name of the source file where the test part took place, or
+  // NULL if the source file is unknown.
+  internal::String file_name_;
+  // The line in the source file where the test part took place, or -1
+  // if the line number is unknown.
+  int line_number_;
+  internal::String summary_;  // The test failure summary.
+  internal::String message_;  // The test failure message.
+};
+
+// Prints a TestPartResult object.
+std::ostream& operator<<(std::ostream& os, const TestPartResult& result);
+
+// An array of TestPartResult objects.
+//
+// Don't inherit from TestPartResultArray as its destructor is not
+// virtual.
+class GTEST_API_ TestPartResultArray {
+ public:
+  TestPartResultArray() {}
+
+  // Appends the given TestPartResult to the array.
+  void Append(const TestPartResult& result);
+
+  // Returns the TestPartResult at the given index (0-based).
+  const TestPartResult& GetTestPartResult(int index) const;
+
+  // Returns the number of TestPartResult objects in the array.
+  int size() const;
+
+ private:
+  std::vector<TestPartResult> array_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestPartResultArray);
+};
+
+// This interface knows how to report a test part result.
+class TestPartResultReporterInterface {
+ public:
+  virtual ~TestPartResultReporterInterface() {}
+
+  virtual void ReportTestPartResult(const TestPartResult& result) = 0;
+};
+
+namespace internal {
+
+// This helper class is used by {ASSERT|EXPECT}_NO_FATAL_FAILURE to check if a
+// statement generates new fatal failures. To do so it registers itself as the
+// current test part result reporter. Besides checking if fatal failures were
+// reported, it only delegates the reporting to the former result reporter.
+// The original result reporter is restored in the destructor.
+// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+class GTEST_API_ HasNewFatalFailureHelper
+    : public TestPartResultReporterInterface {
+ public:
+  HasNewFatalFailureHelper();
+  virtual ~HasNewFatalFailureHelper();
+  virtual void ReportTestPartResult(const TestPartResult& result);
+  bool has_new_fatal_failure() const { return has_new_fatal_failure_; }
+ private:
+  bool has_new_fatal_failure_;
+  TestPartResultReporterInterface* original_reporter_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(HasNewFatalFailureHelper);
+};
+
+}  // namespace internal
+
+}  // namespace testing
+
+#endif  // GTEST_INCLUDE_GTEST_GTEST_TEST_PART_H_
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan at google.com (Zhanyong Wan)
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_TYPED_TEST_H_
+#define GTEST_INCLUDE_GTEST_GTEST_TYPED_TEST_H_
+
+// This header implements typed tests and type-parameterized tests.
+
+// Typed (aka type-driven) tests repeat the same test for types in a
+// list.  You must know which types you want to test with when writing
+// typed tests. Here's how you do it:
+
+#if 0
+
+// First, define a fixture class template.  It should be parameterized
+// by a type.  Remember to derive it from testing::Test.
+template <typename T>
+class FooTest : public testing::Test {
+ public:
+  ...
+  typedef std::list<T> List;
+  static T shared_;
+  T value_;
+};
+
+// Next, associate a list of types with the test case, which will be
+// repeated for each type in the list.  The typedef is necessary for
+// the macro to parse correctly.
+typedef testing::Types<char, int, unsigned int> MyTypes;
+TYPED_TEST_CASE(FooTest, MyTypes);
+
+// If the type list contains only one type, you can write that type
+// directly without Types<...>:
+//   TYPED_TEST_CASE(FooTest, int);
+
+// Then, use TYPED_TEST() instead of TEST_F() to define as many typed
+// tests for this test case as you want.
+TYPED_TEST(FooTest, DoesBlah) {
+  // Inside a test, refer to TypeParam to get the type parameter.
+  // Since we are inside a derived class template, C++ requires use to
+  // visit the members of FooTest via 'this'.
+  TypeParam n = this->value_;
+
+  // To visit static members of the fixture, add the TestFixture::
+  // prefix.
+  n += TestFixture::shared_;
+
+  // To refer to typedefs in the fixture, add the "typename
+  // TestFixture::" prefix.
+  typename TestFixture::List values;
+  values.push_back(n);
+  ...
+}
+
+TYPED_TEST(FooTest, HasPropertyA) { ... }
+
+#endif  // 0
+
+// Type-parameterized tests are abstract test patterns parameterized
+// by a type.  Compared with typed tests, type-parameterized tests
+// allow you to define the test pattern without knowing what the type
+// parameters are.  The defined pattern can be instantiated with
+// different types any number of times, in any number of translation
+// units.
+//
+// If you are designing an interface or concept, you can define a
+// suite of type-parameterized tests to verify properties that any
+// valid implementation of the interface/concept should have.  Then,
+// each implementation can easily instantiate the test suite to verify
+// that it conforms to the requirements, without having to write
+// similar tests repeatedly.  Here's an example:
+
+#if 0
+
+// First, define a fixture class template.  It should be parameterized
+// by a type.  Remember to derive it from testing::Test.
+template <typename T>
+class FooTest : public testing::Test {
+  ...
+};
+
+// Next, declare that you will define a type-parameterized test case
+// (the _P suffix is for "parameterized" or "pattern", whichever you
+// prefer):
+TYPED_TEST_CASE_P(FooTest);
+
+// Then, use TYPED_TEST_P() to define as many type-parameterized tests
+// for this type-parameterized test case as you want.
+TYPED_TEST_P(FooTest, DoesBlah) {
+  // Inside a test, refer to TypeParam to get the type parameter.
+  TypeParam n = 0;
+  ...
+}
+
+TYPED_TEST_P(FooTest, HasPropertyA) { ... }
+
+// Now the tricky part: you need to register all test patterns before
+// you can instantiate them.  The first argument of the macro is the
+// test case name; the rest are the names of the tests in this test
+// case.
+REGISTER_TYPED_TEST_CASE_P(FooTest,
+                           DoesBlah, HasPropertyA);
+
+// Finally, you are free to instantiate the pattern with the types you
+// want.  If you put the above code in a header file, you can #include
+// it in multiple C++ source files and instantiate it multiple times.
+//
+// To distinguish different instances of the pattern, the first
+// argument to the INSTANTIATE_* macro is a prefix that will be added
+// to the actual test case name.  Remember to pick unique prefixes for
+// different instances.
+typedef testing::Types<char, int, unsigned int> MyTypes;
+INSTANTIATE_TYPED_TEST_CASE_P(My, FooTest, MyTypes);
+
+// If the type list contains only one type, you can write that type
+// directly without Types<...>:
+//   INSTANTIATE_TYPED_TEST_CASE_P(My, FooTest, int);
+
+#endif  // 0
+
+
+// Implements typed tests.
+
+#if GTEST_HAS_TYPED_TEST
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// Expands to the name of the typedef for the type parameters of the
+// given test case.
+# define GTEST_TYPE_PARAMS_(TestCaseName) gtest_type_params_##TestCaseName##_
+
+// The 'Types' template argument below must have spaces around it
+// since some compilers may choke on '>>' when passing a template
+// instance (e.g. Types<int>)
+# define TYPED_TEST_CASE(CaseName, Types) \
+  typedef ::testing::internal::TypeList< Types >::type \
+      GTEST_TYPE_PARAMS_(CaseName)
+
+# define TYPED_TEST(CaseName, TestName) \
+  template <typename gtest_TypeParam_> \
+  class GTEST_TEST_CLASS_NAME_(CaseName, TestName) \
+      : public CaseName<gtest_TypeParam_> { \
+   private: \
+    typedef CaseName<gtest_TypeParam_> TestFixture; \
+    typedef gtest_TypeParam_ TypeParam; \
+    virtual void TestBody(); \
+  }; \
+  bool gtest_##CaseName##_##TestName##_registered_ GTEST_ATTRIBUTE_UNUSED_ = \
+      ::testing::internal::TypeParameterizedTest< \
+          CaseName, \
+          ::testing::internal::TemplateSel< \
+              GTEST_TEST_CLASS_NAME_(CaseName, TestName)>, \
+          GTEST_TYPE_PARAMS_(CaseName)>::Register(\
+              "", #CaseName, #TestName, 0); \
+  template <typename gtest_TypeParam_> \
+  void GTEST_TEST_CLASS_NAME_(CaseName, TestName)<gtest_TypeParam_>::TestBody()
+
+#endif  // GTEST_HAS_TYPED_TEST
+
+// Implements type-parameterized tests.
+
+#if GTEST_HAS_TYPED_TEST_P
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// Expands to the namespace name that the type-parameterized tests for
+// the given type-parameterized test case are defined in.  The exact
+// name of the namespace is subject to change without notice.
+# define GTEST_CASE_NAMESPACE_(TestCaseName) \
+  gtest_case_##TestCaseName##_
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+//
+// Expands to the name of the variable used to remember the names of
+// the defined tests in the given test case.
+# define GTEST_TYPED_TEST_CASE_P_STATE_(TestCaseName) \
+  gtest_typed_test_case_p_state_##TestCaseName##_
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE DIRECTLY.
+//
+// Expands to the name of the variable used to remember the names of
+// the registered tests in the given test case.
+# define GTEST_REGISTERED_TEST_NAMES_(TestCaseName) \
+  gtest_registered_test_names_##TestCaseName##_
+
+// The variables defined in the type-parameterized test macros are
+// static as typically these macros are used in a .h file that can be
+// #included in multiple translation units linked together.
+# define TYPED_TEST_CASE_P(CaseName) \
+  static ::testing::internal::TypedTestCasePState \
+      GTEST_TYPED_TEST_CASE_P_STATE_(CaseName)
+
+# define TYPED_TEST_P(CaseName, TestName) \
+  namespace GTEST_CASE_NAMESPACE_(CaseName) { \
+  template <typename gtest_TypeParam_> \
+  class TestName : public CaseName<gtest_TypeParam_> { \
+   private: \
+    typedef CaseName<gtest_TypeParam_> TestFixture; \
+    typedef gtest_TypeParam_ TypeParam; \
+    virtual void TestBody(); \
+  }; \
+  static bool gtest_##TestName##_defined_ GTEST_ATTRIBUTE_UNUSED_ = \
+      GTEST_TYPED_TEST_CASE_P_STATE_(CaseName).AddTestName(\
+          __FILE__, __LINE__, #CaseName, #TestName); \
+  } \
+  template <typename gtest_TypeParam_> \
+  void GTEST_CASE_NAMESPACE_(CaseName)::TestName<gtest_TypeParam_>::TestBody()
+
+# define REGISTER_TYPED_TEST_CASE_P(CaseName, ...) \
+  namespace GTEST_CASE_NAMESPACE_(CaseName) { \
+  typedef ::testing::internal::Templates<__VA_ARGS__>::type gtest_AllTests_; \
+  } \
+  static const char* const GTEST_REGISTERED_TEST_NAMES_(CaseName) = \
+      GTEST_TYPED_TEST_CASE_P_STATE_(CaseName).VerifyRegisteredTestNames(\
+          __FILE__, __LINE__, #__VA_ARGS__)
+
+// The 'Types' template argument below must have spaces around it
+// since some compilers may choke on '>>' when passing a template
+// instance (e.g. Types<int>)
+# define INSTANTIATE_TYPED_TEST_CASE_P(Prefix, CaseName, Types) \
+  bool gtest_##Prefix##_##CaseName GTEST_ATTRIBUTE_UNUSED_ = \
+      ::testing::internal::TypeParameterizedTestCase<CaseName, \
+          GTEST_CASE_NAMESPACE_(CaseName)::gtest_AllTests_, \
+          ::testing::internal::TypeList< Types >::type>::Register(\
+              #Prefix, #CaseName, GTEST_REGISTERED_TEST_NAMES_(CaseName))
+
+#endif  // GTEST_HAS_TYPED_TEST_P
+
+#endif  // GTEST_INCLUDE_GTEST_GTEST_TYPED_TEST_H_
+
+// Depending on the platform, different string classes are available.
+// On Linux, in addition to ::std::string, Google also makes use of
+// class ::string, which has the same interface as ::std::string, but
+// has a different implementation.
+//
+// The user can define GTEST_HAS_GLOBAL_STRING to 1 to indicate that
+// ::string is available AND is a distinct type to ::std::string, or
+// define it to 0 to indicate otherwise.
+//
+// If the user's ::std::string and ::string are the same class due to
+// aliasing, he should define GTEST_HAS_GLOBAL_STRING to 0.
+//
+// If the user doesn't define GTEST_HAS_GLOBAL_STRING, it is defined
+// heuristically.
+
+namespace testing {
+
+// Declares the flags.
+
+// This flag temporary enables the disabled tests.
+GTEST_DECLARE_bool_(also_run_disabled_tests);
+
+// This flag brings the debugger on an assertion failure.
+GTEST_DECLARE_bool_(break_on_failure);
+
+// This flag controls whether Google Test catches all test-thrown exceptions
+// and logs them as failures.
+GTEST_DECLARE_bool_(catch_exceptions);
+
+// This flag enables using colors in terminal output. Available values are
+// "yes" to enable colors, "no" (disable colors), or "auto" (the default)
+// to let Google Test decide.
+GTEST_DECLARE_string_(color);
+
+// This flag sets up the filter to select by name using a glob pattern
+// the tests to run. If the filter is not given all tests are executed.
+GTEST_DECLARE_string_(filter);
+
+// This flag causes the Google Test to list tests. None of the tests listed
+// are actually run if the flag is provided.
+GTEST_DECLARE_bool_(list_tests);
+
+// This flag controls whether Google Test emits a detailed XML report to a file
+// in addition to its normal textual output.
+GTEST_DECLARE_string_(output);
+
+// This flags control whether Google Test prints the elapsed time for each
+// test.
+GTEST_DECLARE_bool_(print_time);
+
+// This flag specifies the random number seed.
+GTEST_DECLARE_int32_(random_seed);
+
+// This flag sets how many times the tests are repeated. The default value
+// is 1. If the value is -1 the tests are repeating forever.
+GTEST_DECLARE_int32_(repeat);
+
+// This flag controls whether Google Test includes Google Test internal
+// stack frames in failure stack traces.
+GTEST_DECLARE_bool_(show_internal_stack_frames);
+
+// When this flag is specified, tests' order is randomized on every iteration.
+GTEST_DECLARE_bool_(shuffle);
+
+// This flag specifies the maximum number of stack frames to be
+// printed in a failure message.
+GTEST_DECLARE_int32_(stack_trace_depth);
+
+// When this flag is specified, a failed assertion will throw an
+// exception if exceptions are enabled, or exit the program with a
+// non-zero code otherwise.
+GTEST_DECLARE_bool_(throw_on_failure);
+
+// When this flag is set with a "host:port" string, on supported
+// platforms test results are streamed to the specified port on
+// the specified host machine.
+GTEST_DECLARE_string_(stream_result_to);
+
+// The upper limit for valid stack trace depths.
+const int kMaxStackTraceDepth = 100;
+
+namespace internal {
+
+class AssertHelper;
+class DefaultGlobalTestPartResultReporter;
+class ExecDeathTest;
+class NoExecDeathTest;
+class FinalSuccessChecker;
+class GTestFlagSaver;
+class TestResultAccessor;
+class TestEventListenersAccessor;
+class TestEventRepeater;
+class WindowsDeathTest;
+class UnitTestImpl* GetUnitTestImpl();
+void ReportFailureInUnknownLocation(TestPartResult::Type result_type,
+                                    const String& message);
+
+// Converts a streamable value to a String.  A NULL pointer is
+// converted to "(null)".  When the input value is a ::string,
+// ::std::string, ::wstring, or ::std::wstring object, each NUL
+// character in it is replaced with "\\0".
+// Declared in gtest-internal.h but defined here, so that it has access
+// to the definition of the Message class, required by the ARM
+// compiler.
+template <typename T>
+String StreamableToString(const T& streamable) {
+  return (Message() << streamable).GetString();
+}
+
+}  // namespace internal
+
+// The friend relationship of some of these classes is cyclic.
+// If we don't forward declare them the compiler might confuse the classes
+// in friendship clauses with same named classes on the scope.
+class Test;
+class TestCase;
+class TestInfo;
+class UnitTest;
+
+// A class for indicating whether an assertion was successful.  When
+// the assertion wasn't successful, the AssertionResult object
+// remembers a non-empty message that describes how it failed.
+//
+// To create an instance of this class, use one of the factory functions
+// (AssertionSuccess() and AssertionFailure()).
+//
+// This class is useful for two purposes:
+//   1. Defining predicate functions to be used with Boolean test assertions
+//      EXPECT_TRUE/EXPECT_FALSE and their ASSERT_ counterparts
+//   2. Defining predicate-format functions to be
+//      used with predicate assertions (ASSERT_PRED_FORMAT*, etc).
+//
+// For example, if you define IsEven predicate:
+//
+//   testing::AssertionResult IsEven(int n) {
+//     if ((n % 2) == 0)
+//       return testing::AssertionSuccess();
+//     else
+//       return testing::AssertionFailure() << n << " is odd";
+//   }
+//
+// Then the failed expectation EXPECT_TRUE(IsEven(Fib(5)))
+// will print the message
+//
+//   Value of: IsEven(Fib(5))
+//     Actual: false (5 is odd)
+//   Expected: true
+//
+// instead of a more opaque
+//
+//   Value of: IsEven(Fib(5))
+//     Actual: false
+//   Expected: true
+//
+// in case IsEven is a simple Boolean predicate.
+//
+// If you expect your predicate to be reused and want to support informative
+// messages in EXPECT_FALSE and ASSERT_FALSE (negative assertions show up
+// about half as often as positive ones in our tests), supply messages for
+// both success and failure cases:
+//
+//   testing::AssertionResult IsEven(int n) {
+//     if ((n % 2) == 0)
+//       return testing::AssertionSuccess() << n << " is even";
+//     else
+//       return testing::AssertionFailure() << n << " is odd";
+//   }
+//
+// Then a statement EXPECT_FALSE(IsEven(Fib(6))) will print
+//
+//   Value of: IsEven(Fib(6))
+//     Actual: true (8 is even)
+//   Expected: false
+//
+// NB: Predicates that support negative Boolean assertions have reduced
+// performance in positive ones so be careful not to use them in tests
+// that have lots (tens of thousands) of positive Boolean assertions.
+//
+// To use this class with EXPECT_PRED_FORMAT assertions such as:
+//
+//   // Verifies that Foo() returns an even number.
+//   EXPECT_PRED_FORMAT1(IsEven, Foo());
+//
+// you need to define:
+//
+//   testing::AssertionResult IsEven(const char* expr, int n) {
+//     if ((n % 2) == 0)
+//       return testing::AssertionSuccess();
+//     else
+//       return testing::AssertionFailure()
+//         << "Expected: " << expr << " is even\n  Actual: it's " << n;
+//   }
+//
+// If Foo() returns 5, you will see the following message:
+//
+//   Expected: Foo() is even
+//     Actual: it's 5
+//
+class GTEST_API_ AssertionResult {
+ public:
+  // Copy constructor.
+  // Used in EXPECT_TRUE/FALSE(assertion_result).
+  AssertionResult(const AssertionResult& other);
+  // Used in the EXPECT_TRUE/FALSE(bool_expression).
+  explicit AssertionResult(bool success) : success_(success) {}
+
+  // Returns true iff the assertion succeeded.
+  operator bool() const { return success_; }  // NOLINT
+
+  // Returns the assertion's negation. Used with EXPECT/ASSERT_FALSE.
+  AssertionResult operator!() const;
+
+  // Returns the text streamed into this AssertionResult. Test assertions
+  // use it when they fail (i.e., the predicate's outcome doesn't match the
+  // assertion's expectation). When nothing has been streamed into the
+  // object, returns an empty string.
+  const char* message() const {
+    return message_.get() != NULL ?  message_->c_str() : "";
+  }
+  // TODO(vladl at google.com): Remove this after making sure no clients use it.
+  // Deprecated; please use message() instead.
+  const char* failure_message() const { return message(); }
+
+  // Streams a custom failure message into this object.
+  template <typename T> AssertionResult& operator<<(const T& value) {
+    AppendMessage(Message() << value);
+    return *this;
+  }
+
+  // Allows streaming basic output manipulators such as endl or flush into
+  // this object.
+  AssertionResult& operator<<(
+      ::std::ostream& (*basic_manipulator)(::std::ostream& stream)) {
+    AppendMessage(Message() << basic_manipulator);
+    return *this;
+  }
+
+ private:
+  // Appends the contents of message to message_.
+  void AppendMessage(const Message& a_message) {
+    if (message_.get() == NULL)
+      message_.reset(new ::std::string);
+    message_->append(a_message.GetString().c_str());
+  }
+
+  // Stores result of the assertion predicate.
+  bool success_;
+  // Stores the message describing the condition in case the expectation
+  // construct is not satisfied with the predicate's outcome.
+  // Referenced via a pointer to avoid taking too much stack frame space
+  // with test assertions.
+  internal::scoped_ptr< ::std::string> message_;
+
+  GTEST_DISALLOW_ASSIGN_(AssertionResult);
+};
+
+// Makes a successful assertion result.
+GTEST_API_ AssertionResult AssertionSuccess();
+
+// Makes a failed assertion result.
+GTEST_API_ AssertionResult AssertionFailure();
+
+// Makes a failed assertion result with the given failure message.
+// Deprecated; use AssertionFailure() << msg.
+GTEST_API_ AssertionResult AssertionFailure(const Message& msg);
+
+// The abstract class that all tests inherit from.
+//
+// In Google Test, a unit test program contains one or many TestCases, and
+// each TestCase contains one or many Tests.
+//
+// When you define a test using the TEST macro, you don't need to
+// explicitly derive from Test - the TEST macro automatically does
+// this for you.
+//
+// The only time you derive from Test is when defining a test fixture
+// to be used a TEST_F.  For example:
+//
+//   class FooTest : public testing::Test {
+//    protected:
+//     virtual void SetUp() { ... }
+//     virtual void TearDown() { ... }
+//     ...
+//   };
+//
+//   TEST_F(FooTest, Bar) { ... }
+//   TEST_F(FooTest, Baz) { ... }
+//
+// Test is not copyable.
+class GTEST_API_ Test {
+ public:
+  friend class TestInfo;
+
+  // Defines types for pointers to functions that set up and tear down
+  // a test case.
+  typedef internal::SetUpTestCaseFunc SetUpTestCaseFunc;
+  typedef internal::TearDownTestCaseFunc TearDownTestCaseFunc;
+
+  // The d'tor is virtual as we intend to inherit from Test.
+  virtual ~Test();
+
+  // Sets up the stuff shared by all tests in this test case.
+  //
+  // Google Test will call Foo::SetUpTestCase() before running the first
+  // test in test case Foo.  Hence a sub-class can define its own
+  // SetUpTestCase() method to shadow the one defined in the super
+  // class.
+  static void SetUpTestCase() {}
+
+  // Tears down the stuff shared by all tests in this test case.
+  //
+  // Google Test will call Foo::TearDownTestCase() after running the last
+  // test in test case Foo.  Hence a sub-class can define its own
+  // TearDownTestCase() method to shadow the one defined in the super
+  // class.
+  static void TearDownTestCase() {}
+
+  // Returns true iff the current test has a fatal failure.
+  static bool HasFatalFailure();
+
+  // Returns true iff the current test has a non-fatal failure.
+  static bool HasNonfatalFailure();
+
+  // Returns true iff the current test has a (either fatal or
+  // non-fatal) failure.
+  static bool HasFailure() { return HasFatalFailure() || HasNonfatalFailure(); }
+
+  // Logs a property for the current test.  Only the last value for a given
+  // key is remembered.
+  // These are public static so they can be called from utility functions
+  // that are not members of the test fixture.
+  // The arguments are const char* instead strings, as Google Test is used
+  // on platforms where string doesn't compile.
+  //
+  // Note that a driving consideration for these RecordProperty methods
+  // was to produce xml output suited to the Greenspan charting utility,
+  // which at present will only chart values that fit in a 32-bit int. It
+  // is the user's responsibility to restrict their values to 32-bit ints
+  // if they intend them to be used with Greenspan.
+  static void RecordProperty(const char* key, const char* value);
+  static void RecordProperty(const char* key, int value);
+
+ protected:
+  // Creates a Test object.
+  Test();
+
+  // Sets up the test fixture.
+  virtual void SetUp();
+
+  // Tears down the test fixture.
+  virtual void TearDown();
+
+ private:
+  // Returns true iff the current test has the same fixture class as
+  // the first test in the current test case.
+  static bool HasSameFixtureClass();
+
+  // Runs the test after the test fixture has been set up.
+  //
+  // A sub-class must implement this to define the test logic.
+  //
+  // DO NOT OVERRIDE THIS FUNCTION DIRECTLY IN A USER PROGRAM.
+  // Instead, use the TEST or TEST_F macro.
+  virtual void TestBody() = 0;
+
+  // Sets up, executes, and tears down the test.
+  void Run();
+
+  // Deletes self.  We deliberately pick an unusual name for this
+  // internal method to avoid clashing with names used in user TESTs.
+  void DeleteSelf_() { delete this; }
+
+  // Uses a GTestFlagSaver to save and restore all Google Test flags.
+  const internal::GTestFlagSaver* const gtest_flag_saver_;
+
+  // Often a user mis-spells SetUp() as Setup() and spends a long time
+  // wondering why it is never called by Google Test.  The declaration of
+  // the following method is solely for catching such an error at
+  // compile time:
+  //
+  //   - The return type is deliberately chosen to be not void, so it
+  //   will be a conflict if a user declares void Setup() in his test
+  //   fixture.
+  //
+  //   - This method is private, so it will be another compiler error
+  //   if a user calls it from his test fixture.
+  //
+  // DO NOT OVERRIDE THIS FUNCTION.
+  //
+  // If you see an error about overriding the following function or
+  // about it being private, you have mis-spelled SetUp() as Setup().
+  struct Setup_should_be_spelled_SetUp {};
+  virtual Setup_should_be_spelled_SetUp* Setup() { return NULL; }
+
+  // We disallow copying Tests.
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(Test);
+};
+
+typedef internal::TimeInMillis TimeInMillis;
+
+// A copyable object representing a user specified test property which can be
+// output as a key/value string pair.
+//
+// Don't inherit from TestProperty as its destructor is not virtual.
+class TestProperty {
+ public:
+  // C'tor.  TestProperty does NOT have a default constructor.
+  // Always use this constructor (with parameters) to create a
+  // TestProperty object.
+  TestProperty(const char* a_key, const char* a_value) :
+    key_(a_key), value_(a_value) {
+  }
+
+  // Gets the user supplied key.
+  const char* key() const {
+    return key_.c_str();
+  }
+
+  // Gets the user supplied value.
+  const char* value() const {
+    return value_.c_str();
+  }
+
+  // Sets a new value, overriding the one supplied in the constructor.
+  void SetValue(const char* new_value) {
+    value_ = new_value;
+  }
+
+ private:
+  // The key supplied by the user.
+  internal::String key_;
+  // The value supplied by the user.
+  internal::String value_;
+};
+
+// The result of a single Test.  This includes a list of
+// TestPartResults, a list of TestProperties, a count of how many
+// death tests there are in the Test, and how much time it took to run
+// the Test.
+//
+// TestResult is not copyable.
+class GTEST_API_ TestResult {
+ public:
+  // Creates an empty TestResult.
+  TestResult();
+
+  // D'tor.  Do not inherit from TestResult.
+  ~TestResult();
+
+  // Gets the number of all test parts.  This is the sum of the number
+  // of successful test parts and the number of failed test parts.
+  int total_part_count() const;
+
+  // Returns the number of the test properties.
+  int test_property_count() const;
+
+  // Returns true iff the test passed (i.e. no test part failed).
+  bool Passed() const { return !Failed(); }
+
+  // Returns true iff the test failed.
+  bool Failed() const;
+
+  // Returns true iff the test fatally failed.
+  bool HasFatalFailure() const;
+
+  // Returns true iff the test has a non-fatal failure.
+  bool HasNonfatalFailure() const;
+
+  // Returns the elapsed time, in milliseconds.
+  TimeInMillis elapsed_time() const { return elapsed_time_; }
+
+  // Returns the i-th test part result among all the results. i can range
+  // from 0 to test_property_count() - 1. If i is not in that range, aborts
+  // the program.
+  const TestPartResult& GetTestPartResult(int i) const;
+
+  // Returns the i-th test property. i can range from 0 to
+  // test_property_count() - 1. If i is not in that range, aborts the
+  // program.
+  const TestProperty& GetTestProperty(int i) const;
+
+ private:
+  friend class TestInfo;
+  friend class UnitTest;
+  friend class internal::DefaultGlobalTestPartResultReporter;
+  friend class internal::ExecDeathTest;
+  friend class internal::TestResultAccessor;
+  friend class internal::UnitTestImpl;
+  friend class internal::WindowsDeathTest;
+
+  // Gets the vector of TestPartResults.
+  const std::vector<TestPartResult>& test_part_results() const {
+    return test_part_results_;
+  }
+
+  // Gets the vector of TestProperties.
+  const std::vector<TestProperty>& test_properties() const {
+    return test_properties_;
+  }
+
+  // Sets the elapsed time.
+  void set_elapsed_time(TimeInMillis elapsed) { elapsed_time_ = elapsed; }
+
+  // Adds a test property to the list. The property is validated and may add
+  // a non-fatal failure if invalid (e.g., if it conflicts with reserved
+  // key names). If a property is already recorded for the same key, the
+  // value will be updated, rather than storing multiple values for the same
+  // key.
+  void RecordProperty(const TestProperty& test_property);
+
+  // Adds a failure if the key is a reserved attribute of Google Test
+  // testcase tags.  Returns true if the property is valid.
+  // TODO(russr): Validate attribute names are legal and human readable.
+  static bool ValidateTestProperty(const TestProperty& test_property);
+
+  // Adds a test part result to the list.
+  void AddTestPartResult(const TestPartResult& test_part_result);
+
+  // Returns the death test count.
+  int death_test_count() const { return death_test_count_; }
+
+  // Increments the death test count, returning the new count.
+  int increment_death_test_count() { return ++death_test_count_; }
+
+  // Clears the test part results.
+  void ClearTestPartResults();
+
+  // Clears the object.
+  void Clear();
+
+  // Protects mutable state of the property vector and of owned
+  // properties, whose values may be updated.
+  internal::Mutex test_properites_mutex_;
+
+  // The vector of TestPartResults
+  std::vector<TestPartResult> test_part_results_;
+  // The vector of TestProperties
+  std::vector<TestProperty> test_properties_;
+  // Running count of death tests.
+  int death_test_count_;
+  // The elapsed time, in milliseconds.
+  TimeInMillis elapsed_time_;
+
+  // We disallow copying TestResult.
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestResult);
+};  // class TestResult
+
+// A TestInfo object stores the following information about a test:
+//
+//   Test case name
+//   Test name
+//   Whether the test should be run
+//   A function pointer that creates the test object when invoked
+//   Test result
+//
+// The constructor of TestInfo registers itself with the UnitTest
+// singleton such that the RUN_ALL_TESTS() macro knows which tests to
+// run.
+class GTEST_API_ TestInfo {
+ public:
+  // Destructs a TestInfo object.  This function is not virtual, so
+  // don't inherit from TestInfo.
+  ~TestInfo();
+
+  // Returns the test case name.
+  const char* test_case_name() const { return test_case_name_.c_str(); }
+
+  // Returns the test name.
+  const char* name() const { return name_.c_str(); }
+
+  // Returns the name of the parameter type, or NULL if this is not a typed
+  // or a type-parameterized test.
+  const char* type_param() const {
+    if (type_param_.get() != NULL)
+      return type_param_->c_str();
+    return NULL;
+  }
+
+  // Returns the text representation of the value parameter, or NULL if this
+  // is not a value-parameterized test.
+  const char* value_param() const {
+    if (value_param_.get() != NULL)
+      return value_param_->c_str();
+    return NULL;
+  }
+
+  // Returns true if this test should run, that is if the test is not disabled
+  // (or it is disabled but the also_run_disabled_tests flag has been specified)
+  // and its full name matches the user-specified filter.
+  //
+  // Google Test allows the user to filter the tests by their full names.
+  // The full name of a test Bar in test case Foo is defined as
+  // "Foo.Bar".  Only the tests that match the filter will run.
+  //
+  // A filter is a colon-separated list of glob (not regex) patterns,
+  // optionally followed by a '-' and a colon-separated list of
+  // negative patterns (tests to exclude).  A test is run if it
+  // matches one of the positive patterns and does not match any of
+  // the negative patterns.
+  //
+  // For example, *A*:Foo.* is a filter that matches any string that
+  // contains the character 'A' or starts with "Foo.".
+  bool should_run() const { return should_run_; }
+
+  // Returns the result of the test.
+  const TestResult* result() const { return &result_; }
+
+ private:
+
+#if GTEST_HAS_DEATH_TEST
+  friend class internal::DefaultDeathTestFactory;
+#endif  // GTEST_HAS_DEATH_TEST
+  friend class Test;
+  friend class TestCase;
+  friend class internal::UnitTestImpl;
+  friend TestInfo* internal::MakeAndRegisterTestInfo(
+      const char* test_case_name, const char* name,
+      const char* type_param,
+      const char* value_param,
+      internal::TypeId fixture_class_id,
+      Test::SetUpTestCaseFunc set_up_tc,
+      Test::TearDownTestCaseFunc tear_down_tc,
+      internal::TestFactoryBase* factory);
+
+  // Constructs a TestInfo object. The newly constructed instance assumes
+  // ownership of the factory object.
+  TestInfo(const char* test_case_name, const char* name,
+           const char* a_type_param,
+           const char* a_value_param,
+           internal::TypeId fixture_class_id,
+           internal::TestFactoryBase* factory);
+
+  // Increments the number of death tests encountered in this test so
+  // far.
+  int increment_death_test_count() {
+    return result_.increment_death_test_count();
+  }
+
+  // Creates the test object, runs it, records its result, and then
+  // deletes it.
+  void Run();
+
+  static void ClearTestResult(TestInfo* test_info) {
+    test_info->result_.Clear();
+  }
+
+  // These fields are immutable properties of the test.
+  const std::string test_case_name_;     // Test case name
+  const std::string name_;               // Test name
+  // Name of the parameter type, or NULL if this is not a typed or a
+  // type-parameterized test.
+  const internal::scoped_ptr<const ::std::string> type_param_;
+  // Text representation of the value parameter, or NULL if this is not a
+  // value-parameterized test.
+  const internal::scoped_ptr<const ::std::string> value_param_;
+  const internal::TypeId fixture_class_id_;   // ID of the test fixture class
+  bool should_run_;                 // True iff this test should run
+  bool is_disabled_;                // True iff this test is disabled
+  bool matches_filter_;             // True if this test matches the
+                                    // user-specified filter.
+  internal::TestFactoryBase* const factory_;  // The factory that creates
+                                              // the test object
+
+  // This field is mutable and needs to be reset before running the
+  // test for the second time.
+  TestResult result_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestInfo);
+};
+
+// A test case, which consists of a vector of TestInfos.
+//
+// TestCase is not copyable.
+class GTEST_API_ TestCase {
+ public:
+  // Creates a TestCase with the given name.
+  //
+  // TestCase does NOT have a default constructor.  Always use this
+  // constructor to create a TestCase object.
+  //
+  // Arguments:
+  //
+  //   name:         name of the test case
+  //   a_type_param: the name of the test's type parameter, or NULL if
+  //                 this is not a type-parameterized test.
+  //   set_up_tc:    pointer to the function that sets up the test case
+  //   tear_down_tc: pointer to the function that tears down the test case
+  TestCase(const char* name, const char* a_type_param,
+           Test::SetUpTestCaseFunc set_up_tc,
+           Test::TearDownTestCaseFunc tear_down_tc);
+
+  // Destructor of TestCase.
+  virtual ~TestCase();
+
+  // Gets the name of the TestCase.
+  const char* name() const { return name_.c_str(); }
+
+  // Returns the name of the parameter type, or NULL if this is not a
+  // type-parameterized test case.
+  const char* type_param() const {
+    if (type_param_.get() != NULL)
+      return type_param_->c_str();
+    return NULL;
+  }
+
+  // Returns true if any test in this test case should run.
+  bool should_run() const { return should_run_; }
+
+  // Gets the number of successful tests in this test case.
+  int successful_test_count() const;
+
+  // Gets the number of failed tests in this test case.
+  int failed_test_count() const;
+
+  // Gets the number of disabled tests in this test case.
+  int disabled_test_count() const;
+
+  // Get the number of tests in this test case that should run.
+  int test_to_run_count() const;
+
+  // Gets the number of all tests in this test case.
+  int total_test_count() const;
+
+  // Returns true iff the test case passed.
+  bool Passed() const { return !Failed(); }
+
+  // Returns true iff the test case failed.
+  bool Failed() const { return failed_test_count() > 0; }
+
+  // Returns the elapsed time, in milliseconds.
+  TimeInMillis elapsed_time() const { return elapsed_time_; }
+
+  // Returns the i-th test among all the tests. i can range from 0 to
+  // total_test_count() - 1. If i is not in that range, returns NULL.
+  const TestInfo* GetTestInfo(int i) const;
+
+ private:
+  friend class Test;
+  friend class internal::UnitTestImpl;
+
+  // Gets the (mutable) vector of TestInfos in this TestCase.
+  std::vector<TestInfo*>& test_info_list() { return test_info_list_; }
+
+  // Gets the (immutable) vector of TestInfos in this TestCase.
+  const std::vector<TestInfo*>& test_info_list() const {
+    return test_info_list_;
+  }
+
+  // Returns the i-th test among all the tests. i can range from 0 to
+  // total_test_count() - 1. If i is not in that range, returns NULL.
+  TestInfo* GetMutableTestInfo(int i);
+
+  // Sets the should_run member.
+  void set_should_run(bool should) { should_run_ = should; }
+
+  // Adds a TestInfo to this test case.  Will delete the TestInfo upon
+  // destruction of the TestCase object.
+  void AddTestInfo(TestInfo * test_info);
+
+  // Clears the results of all tests in this test case.
+  void ClearResult();
+
+  // Clears the results of all tests in the given test case.
+  static void ClearTestCaseResult(TestCase* test_case) {
+    test_case->ClearResult();
+  }
+
+  // Runs every test in this TestCase.
+  void Run();
+
+  // Runs SetUpTestCase() for this TestCase.  This wrapper is needed
+  // for catching exceptions thrown from SetUpTestCase().
+  void RunSetUpTestCase() { (*set_up_tc_)(); }
+
+  // Runs TearDownTestCase() for this TestCase.  This wrapper is
+  // needed for catching exceptions thrown from TearDownTestCase().
+  void RunTearDownTestCase() { (*tear_down_tc_)(); }
+
+  // Returns true iff test passed.
+  static bool TestPassed(const TestInfo* test_info) {
+    return test_info->should_run() && test_info->result()->Passed();
+  }
+
+  // Returns true iff test failed.
+  static bool TestFailed(const TestInfo* test_info) {
+    return test_info->should_run() && test_info->result()->Failed();
+  }
+
+  // Returns true iff test is disabled.
+  static bool TestDisabled(const TestInfo* test_info) {
+    return test_info->is_disabled_;
+  }
+
+  // Returns true if the given test should run.
+  static bool ShouldRunTest(const TestInfo* test_info) {
+    return test_info->should_run();
+  }
+
+  // Shuffles the tests in this test case.
+  void ShuffleTests(internal::Random* random);
+
+  // Restores the test order to before the first shuffle.
+  void UnshuffleTests();
+
+  // Name of the test case.
+  internal::String name_;
+  // Name of the parameter type, or NULL if this is not a typed or a
+  // type-parameterized test.
+  const internal::scoped_ptr<const ::std::string> type_param_;
+  // The vector of TestInfos in their original order.  It owns the
+  // elements in the vector.
+  std::vector<TestInfo*> test_info_list_;
+  // Provides a level of indirection for the test list to allow easy
+  // shuffling and restoring the test order.  The i-th element in this
+  // vector is the index of the i-th test in the shuffled test list.
+  std::vector<int> test_indices_;
+  // Pointer to the function that sets up the test case.
+  Test::SetUpTestCaseFunc set_up_tc_;
+  // Pointer to the function that tears down the test case.
+  Test::TearDownTestCaseFunc tear_down_tc_;
+  // True iff any test in this test case should run.
+  bool should_run_;
+  // Elapsed time, in milliseconds.
+  TimeInMillis elapsed_time_;
+
+  // We disallow copying TestCases.
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestCase);
+};
+
+// An Environment object is capable of setting up and tearing down an
+// environment.  The user should subclass this to define his own
+// environment(s).
+//
+// An Environment object does the set-up and tear-down in virtual
+// methods SetUp() and TearDown() instead of the constructor and the
+// destructor, as:
+//
+//   1. You cannot safely throw from a destructor.  This is a problem
+//      as in some cases Google Test is used where exceptions are enabled, and
+//      we may want to implement ASSERT_* using exceptions where they are
+//      available.
+//   2. You cannot use ASSERT_* directly in a constructor or
+//      destructor.
+class Environment {
+ public:
+  // The d'tor is virtual as we need to subclass Environment.
+  virtual ~Environment() {}
+
+  // Override this to define how to set up the environment.
+  virtual void SetUp() {}
+
+  // Override this to define how to tear down the environment.
+  virtual void TearDown() {}
+ private:
+  // If you see an error about overriding the following function or
+  // about it being private, you have mis-spelled SetUp() as Setup().
+  struct Setup_should_be_spelled_SetUp {};
+  virtual Setup_should_be_spelled_SetUp* Setup() { return NULL; }
+};
+
+// The interface for tracing execution of tests. The methods are organized in
+// the order the corresponding events are fired.
+class TestEventListener {
+ public:
+  virtual ~TestEventListener() {}
+
+  // Fired before any test activity starts.
+  virtual void OnTestProgramStart(const UnitTest& unit_test) = 0;
+
+  // Fired before each iteration of tests starts.  There may be more than
+  // one iteration if GTEST_FLAG(repeat) is set. iteration is the iteration
+  // index, starting from 0.
+  virtual void OnTestIterationStart(const UnitTest& unit_test,
+                                    int iteration) = 0;
+
+  // Fired before environment set-up for each iteration of tests starts.
+  virtual void OnEnvironmentsSetUpStart(const UnitTest& unit_test) = 0;
+
+  // Fired after environment set-up for each iteration of tests ends.
+  virtual void OnEnvironmentsSetUpEnd(const UnitTest& unit_test) = 0;
+
+  // Fired before the test case starts.
+  virtual void OnTestCaseStart(const TestCase& test_case) = 0;
+
+  // Fired before the test starts.
+  virtual void OnTestStart(const TestInfo& test_info) = 0;
+
+  // Fired after a failed assertion or a SUCCEED() invocation.
+  virtual void OnTestPartResult(const TestPartResult& test_part_result) = 0;
+
+  // Fired after the test ends.
+  virtual void OnTestEnd(const TestInfo& test_info) = 0;
+
+  // Fired after the test case ends.
+  virtual void OnTestCaseEnd(const TestCase& test_case) = 0;
+
+  // Fired before environment tear-down for each iteration of tests starts.
+  virtual void OnEnvironmentsTearDownStart(const UnitTest& unit_test) = 0;
+
+  // Fired after environment tear-down for each iteration of tests ends.
+  virtual void OnEnvironmentsTearDownEnd(const UnitTest& unit_test) = 0;
+
+  // Fired after each iteration of tests finishes.
+  virtual void OnTestIterationEnd(const UnitTest& unit_test,
+                                  int iteration) = 0;
+
+  // Fired after all test activities have ended.
+  virtual void OnTestProgramEnd(const UnitTest& unit_test) = 0;
+};
+
+// The convenience class for users who need to override just one or two
+// methods and are not concerned that a possible change to a signature of
+// the methods they override will not be caught during the build.  For
+// comments about each method please see the definition of TestEventListener
+// above.
+class EmptyTestEventListener : public TestEventListener {
+ public:
+  virtual void OnTestProgramStart(const UnitTest& /*unit_test*/) {}
+  virtual void OnTestIterationStart(const UnitTest& /*unit_test*/,
+                                    int /*iteration*/) {}
+  virtual void OnEnvironmentsSetUpStart(const UnitTest& /*unit_test*/) {}
+  virtual void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) {}
+  virtual void OnTestCaseStart(const TestCase& /*test_case*/) {}
+  virtual void OnTestStart(const TestInfo& /*test_info*/) {}
+  virtual void OnTestPartResult(const TestPartResult& /*test_part_result*/) {}
+  virtual void OnTestEnd(const TestInfo& /*test_info*/) {}
+  virtual void OnTestCaseEnd(const TestCase& /*test_case*/) {}
+  virtual void OnEnvironmentsTearDownStart(const UnitTest& /*unit_test*/) {}
+  virtual void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) {}
+  virtual void OnTestIterationEnd(const UnitTest& /*unit_test*/,
+                                  int /*iteration*/) {}
+  virtual void OnTestProgramEnd(const UnitTest& /*unit_test*/) {}
+};
+
+// TestEventListeners lets users add listeners to track events in Google Test.
+class GTEST_API_ TestEventListeners {
+ public:
+  TestEventListeners();
+  ~TestEventListeners();
+
+  // Appends an event listener to the end of the list. Google Test assumes
+  // the ownership of the listener (i.e. it will delete the listener when
+  // the test program finishes).
+  void Append(TestEventListener* listener);
+
+  // Removes the given event listener from the list and returns it.  It then
+  // becomes the caller's responsibility to delete the listener. Returns
+  // NULL if the listener is not found in the list.
+  TestEventListener* Release(TestEventListener* listener);
+
+  // Returns the standard listener responsible for the default console
+  // output.  Can be removed from the listeners list to shut down default
+  // console output.  Note that removing this object from the listener list
+  // with Release transfers its ownership to the caller and makes this
+  // function return NULL the next time.
+  TestEventListener* default_result_printer() const {
+    return default_result_printer_;
+  }
+
+  // Returns the standard listener responsible for the default XML output
+  // controlled by the --gtest_output=xml flag.  Can be removed from the
+  // listeners list by users who want to shut down the default XML output
+  // controlled by this flag and substitute it with custom one.  Note that
+  // removing this object from the listener list with Release transfers its
+  // ownership to the caller and makes this function return NULL the next
+  // time.
+  TestEventListener* default_xml_generator() const {
+    return default_xml_generator_;
+  }
+
+ private:
+  friend class TestCase;
+  friend class TestInfo;
+  friend class internal::DefaultGlobalTestPartResultReporter;
+  friend class internal::NoExecDeathTest;
+  friend class internal::TestEventListenersAccessor;
+  friend class internal::UnitTestImpl;
+
+  // Returns repeater that broadcasts the TestEventListener events to all
+  // subscribers.
+  TestEventListener* repeater();
+
+  // Sets the default_result_printer attribute to the provided listener.
+  // The listener is also added to the listener list and previous
+  // default_result_printer is removed from it and deleted. The listener can
+  // also be NULL in which case it will not be added to the list. Does
+  // nothing if the previous and the current listener objects are the same.
+  void SetDefaultResultPrinter(TestEventListener* listener);
+
+  // Sets the default_xml_generator attribute to the provided listener.  The
+  // listener is also added to the listener list and previous
+  // default_xml_generator is removed from it and deleted. The listener can
+  // also be NULL in which case it will not be added to the list. Does
+  // nothing if the previous and the current listener objects are the same.
+  void SetDefaultXmlGenerator(TestEventListener* listener);
+
+  // Controls whether events will be forwarded by the repeater to the
+  // listeners in the list.
+  bool EventForwardingEnabled() const;
+  void SuppressEventForwarding();
+
+  // The actual list of listeners.
+  internal::TestEventRepeater* repeater_;
+  // Listener responsible for the standard result output.
+  TestEventListener* default_result_printer_;
+  // Listener responsible for the creation of the XML output file.
+  TestEventListener* default_xml_generator_;
+
+  // We disallow copying TestEventListeners.
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestEventListeners);
+};
+
+// A UnitTest consists of a vector of TestCases.
+//
+// This is a singleton class.  The only instance of UnitTest is
+// created when UnitTest::GetInstance() is first called.  This
+// instance is never deleted.
+//
+// UnitTest is not copyable.
+//
+// This class is thread-safe as long as the methods are called
+// according to their specification.
+class GTEST_API_ UnitTest {
+ public:
+  // Gets the singleton UnitTest object.  The first time this method
+  // is called, a UnitTest object is constructed and returned.
+  // Consecutive calls will return the same object.
+  static UnitTest* GetInstance();
+
+  // Runs all tests in this UnitTest object and prints the result.
+  // Returns 0 if successful, or 1 otherwise.
+  //
+  // This method can only be called from the main thread.
+  //
+  // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+  int Run() GTEST_MUST_USE_RESULT_;
+
+  // Returns the working directory when the first TEST() or TEST_F()
+  // was executed.  The UnitTest object owns the string.
+  const char* original_working_dir() const;
+
+  // Returns the TestCase object for the test that's currently running,
+  // or NULL if no test is running.
+  const TestCase* current_test_case() const;
+
+  // Returns the TestInfo object for the test that's currently running,
+  // or NULL if no test is running.
+  const TestInfo* current_test_info() const;
+
+  // Returns the random seed used at the start of the current test run.
+  int random_seed() const;
+
+#if GTEST_HAS_PARAM_TEST
+  // Returns the ParameterizedTestCaseRegistry object used to keep track of
+  // value-parameterized tests and instantiate and register them.
+  //
+  // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+  internal::ParameterizedTestCaseRegistry& parameterized_test_registry();
+#endif  // GTEST_HAS_PARAM_TEST
+
+  // Gets the number of successful test cases.
+  int successful_test_case_count() const;
+
+  // Gets the number of failed test cases.
+  int failed_test_case_count() const;
+
+  // Gets the number of all test cases.
+  int total_test_case_count() const;
+
+  // Gets the number of all test cases that contain at least one test
+  // that should run.
+  int test_case_to_run_count() const;
+
+  // Gets the number of successful tests.
+  int successful_test_count() const;
+
+  // Gets the number of failed tests.
+  int failed_test_count() const;
+
+  // Gets the number of disabled tests.
+  int disabled_test_count() const;
+
+  // Gets the number of all tests.
+  int total_test_count() const;
+
+  // Gets the number of tests that should run.
+  int test_to_run_count() const;
+
+  // Gets the elapsed time, in milliseconds.
+  TimeInMillis elapsed_time() const;
+
+  // Returns true iff the unit test passed (i.e. all test cases passed).
+  bool Passed() const;
+
+  // Returns true iff the unit test failed (i.e. some test case failed
+  // or something outside of all tests failed).
+  bool Failed() const;
+
+  // Gets the i-th test case among all the test cases. i can range from 0 to
+  // total_test_case_count() - 1. If i is not in that range, returns NULL.
+  const TestCase* GetTestCase(int i) const;
+
+  // Returns the list of event listeners that can be used to track events
+  // inside Google Test.
+  TestEventListeners& listeners();
+
+ private:
+  // Registers and returns a global test environment.  When a test
+  // program is run, all global test environments will be set-up in
+  // the order they were registered.  After all tests in the program
+  // have finished, all global test environments will be torn-down in
+  // the *reverse* order they were registered.
+  //
+  // The UnitTest object takes ownership of the given environment.
+  //
+  // This method can only be called from the main thread.
+  Environment* AddEnvironment(Environment* env);
+
+  // Adds a TestPartResult to the current TestResult object.  All
+  // Google Test assertion macros (e.g. ASSERT_TRUE, EXPECT_EQ, etc)
+  // eventually call this to report their results.  The user code
+  // should use the assertion macros instead of calling this directly.
+  void AddTestPartResult(TestPartResult::Type result_type,
+                         const char* file_name,
+                         int line_number,
+                         const internal::String& message,
+                         const internal::String& os_stack_trace);
+
+  // Adds a TestProperty to the current TestResult object. If the result already
+  // contains a property with the same key, the value will be updated.
+  void RecordPropertyForCurrentTest(const char* key, const char* value);
+
+  // Gets the i-th test case among all the test cases. i can range from 0 to
+  // total_test_case_count() - 1. If i is not in that range, returns NULL.
+  TestCase* GetMutableTestCase(int i);
+
+  // Accessors for the implementation object.
+  internal::UnitTestImpl* impl() { return impl_; }
+  const internal::UnitTestImpl* impl() const { return impl_; }
+
+  // These classes and funcions are friends as they need to access private
+  // members of UnitTest.
+  friend class Test;
+  friend class internal::AssertHelper;
+  friend class internal::ScopedTrace;
+  friend Environment* AddGlobalTestEnvironment(Environment* env);
+  friend internal::UnitTestImpl* internal::GetUnitTestImpl();
+  friend void internal::ReportFailureInUnknownLocation(
+      TestPartResult::Type result_type,
+      const internal::String& message);
+
+  // Creates an empty UnitTest.
+  UnitTest();
+
+  // D'tor
+  virtual ~UnitTest();
+
+  // Pushes a trace defined by SCOPED_TRACE() on to the per-thread
+  // Google Test trace stack.
+  void PushGTestTrace(const internal::TraceInfo& trace);
+
+  // Pops a trace from the per-thread Google Test trace stack.
+  void PopGTestTrace();
+
+  // Protects mutable state in *impl_.  This is mutable as some const
+  // methods need to lock it too.
+  mutable internal::Mutex mutex_;
+
+  // Opaque implementation object.  This field is never changed once
+  // the object is constructed.  We don't mark it as const here, as
+  // doing so will cause a warning in the constructor of UnitTest.
+  // Mutable state in *impl_ is protected by mutex_.
+  internal::UnitTestImpl* impl_;
+
+  // We disallow copying UnitTest.
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(UnitTest);
+};
+
+// A convenient wrapper for adding an environment for the test
+// program.
+//
+// You should call this before RUN_ALL_TESTS() is called, probably in
+// main().  If you use gtest_main, you need to call this before main()
+// starts for it to take effect.  For example, you can define a global
+// variable like this:
+//
+//   testing::Environment* const foo_env =
+//       testing::AddGlobalTestEnvironment(new FooEnvironment);
+//
+// However, we strongly recommend you to write your own main() and
+// call AddGlobalTestEnvironment() there, as relying on initialization
+// of global variables makes the code harder to read and may cause
+// problems when you register multiple environments from different
+// translation units and the environments have dependencies among them
+// (remember that the compiler doesn't guarantee the order in which
+// global variables from different translation units are initialized).
+inline Environment* AddGlobalTestEnvironment(Environment* env) {
+  return UnitTest::GetInstance()->AddEnvironment(env);
+}
+
+// Initializes Google Test.  This must be called before calling
+// RUN_ALL_TESTS().  In particular, it parses a command line for the
+// flags that Google Test recognizes.  Whenever a Google Test flag is
+// seen, it is removed from argv, and *argc is decremented.
+//
+// No value is returned.  Instead, the Google Test flag variables are
+// updated.
+//
+// Calling the function for the second time has no user-visible effect.
+GTEST_API_ void InitGoogleTest(int* argc, char** argv);
+
+// This overloaded version can be used in Windows programs compiled in
+// UNICODE mode.
+GTEST_API_ void InitGoogleTest(int* argc, wchar_t** argv);
+
+namespace internal {
+
+// Formats a comparison assertion (e.g. ASSERT_EQ, EXPECT_LT, and etc)
+// operand to be used in a failure message.  The type (but not value)
+// of the other operand may affect the format.  This allows us to
+// print a char* as a raw pointer when it is compared against another
+// char*, and print it as a C string when it is compared against an
+// std::string object, for example.
+//
+// The default implementation ignores the type of the other operand.
+// Some specialized versions are used to handle formatting wide or
+// narrow C strings.
+//
+// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+template <typename T1, typename T2>
+String FormatForComparisonFailureMessage(const T1& value,
+                                         const T2& /* other_operand */) {
+  // C++Builder compiles this incorrectly if the namespace isn't explicitly
+  // given.
+  return ::testing::PrintToString(value);
+}
+
+// The helper function for {ASSERT|EXPECT}_EQ.
+template <typename T1, typename T2>
+AssertionResult CmpHelperEQ(const char* expected_expression,
+                            const char* actual_expression,
+                            const T1& expected,
+                            const T2& actual) {
+#ifdef _MSC_VER
+# pragma warning(push)          // Saves the current warning state.
+# pragma warning(disable:4389)  // Temporarily disables warning on
+                               // signed/unsigned mismatch.
+#endif
+
+  if (expected == actual) {
+    return AssertionSuccess();
+  }
+
+#ifdef _MSC_VER
+# pragma warning(pop)          // Restores the warning state.
+#endif
+
+  return EqFailure(expected_expression,
+                   actual_expression,
+                   FormatForComparisonFailureMessage(expected, actual),
+                   FormatForComparisonFailureMessage(actual, expected),
+                   false);
+}
+
+// With this overloaded version, we allow anonymous enums to be used
+// in {ASSERT|EXPECT}_EQ when compiled with gcc 4, as anonymous enums
+// can be implicitly cast to BiggestInt.
+GTEST_API_ AssertionResult CmpHelperEQ(const char* expected_expression,
+                                       const char* actual_expression,
+                                       BiggestInt expected,
+                                       BiggestInt actual);
+
+// The helper class for {ASSERT|EXPECT}_EQ.  The template argument
+// lhs_is_null_literal is true iff the first argument to ASSERT_EQ()
+// is a null pointer literal.  The following default implementation is
+// for lhs_is_null_literal being false.
+template <bool lhs_is_null_literal>
+class EqHelper {
+ public:
+  // This templatized version is for the general case.
+  template <typename T1, typename T2>
+  static AssertionResult Compare(const char* expected_expression,
+                                 const char* actual_expression,
+                                 const T1& expected,
+                                 const T2& actual) {
+    return CmpHelperEQ(expected_expression, actual_expression, expected,
+                       actual);
+  }
+
+  // With this overloaded version, we allow anonymous enums to be used
+  // in {ASSERT|EXPECT}_EQ when compiled with gcc 4, as anonymous
+  // enums can be implicitly cast to BiggestInt.
+  //
+  // Even though its body looks the same as the above version, we
+  // cannot merge the two, as it will make anonymous enums unhappy.
+  static AssertionResult Compare(const char* expected_expression,
+                                 const char* actual_expression,
+                                 BiggestInt expected,
+                                 BiggestInt actual) {
+    return CmpHelperEQ(expected_expression, actual_expression, expected,
+                       actual);
+  }
+};
+
+// This specialization is used when the first argument to ASSERT_EQ()
+// is a null pointer literal, like NULL, false, or 0.
+template <>
+class EqHelper<true> {
+ public:
+  // We define two overloaded versions of Compare().  The first
+  // version will be picked when the second argument to ASSERT_EQ() is
+  // NOT a pointer, e.g. ASSERT_EQ(0, AnIntFunction()) or
+  // EXPECT_EQ(false, a_bool).
+  template <typename T1, typename T2>
+  static AssertionResult Compare(
+      const char* expected_expression,
+      const char* actual_expression,
+      const T1& expected,
+      const T2& actual,
+      // The following line prevents this overload from being considered if T2
+      // is not a pointer type.  We need this because ASSERT_EQ(NULL, my_ptr)
+      // expands to Compare("", "", NULL, my_ptr), which requires a conversion
+      // to match the Secret* in the other overload, which would otherwise make
+      // this template match better.
+      typename EnableIf<!is_pointer<T2>::value>::type* = 0) {
+    return CmpHelperEQ(expected_expression, actual_expression, expected,
+                       actual);
+  }
+
+  // This version will be picked when the second argument to ASSERT_EQ() is a
+  // pointer, e.g. ASSERT_EQ(NULL, a_pointer).
+  template <typename T>
+  static AssertionResult Compare(
+      const char* expected_expression,
+      const char* actual_expression,
+      // We used to have a second template parameter instead of Secret*.  That
+      // template parameter would deduce to 'long', making this a better match
+      // than the first overload even without the first overload's EnableIf.
+      // Unfortunately, gcc with -Wconversion-null warns when "passing NULL to
+      // non-pointer argument" (even a deduced integral argument), so the old
+      // implementation caused warnings in user code.
+      Secret* /* expected (NULL) */,
+      T* actual) {
+    // We already know that 'expected' is a null pointer.
+    return CmpHelperEQ(expected_expression, actual_expression,
+                       static_cast<T*>(NULL), actual);
+  }
+};
+
+// A macro for implementing the helper functions needed to implement
+// ASSERT_?? and EXPECT_??.  It is here just to avoid copy-and-paste
+// of similar code.
+//
+// For each templatized helper function, we also define an overloaded
+// version for BiggestInt in order to reduce code bloat and allow
+// anonymous enums to be used with {ASSERT|EXPECT}_?? when compiled
+// with gcc 4.
+//
+// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+#define GTEST_IMPL_CMP_HELPER_(op_name, op)\
+template <typename T1, typename T2>\
+AssertionResult CmpHelper##op_name(const char* expr1, const char* expr2, \
+                                   const T1& val1, const T2& val2) {\
+  if (val1 op val2) {\
+    return AssertionSuccess();\
+  } else {\
+    return AssertionFailure() \
+        << "Expected: (" << expr1 << ") " #op " (" << expr2\
+        << "), actual: " << FormatForComparisonFailureMessage(val1, val2)\
+        << " vs " << FormatForComparisonFailureMessage(val2, val1);\
+  }\
+}\
+GTEST_API_ AssertionResult CmpHelper##op_name(\
+    const char* expr1, const char* expr2, BiggestInt val1, BiggestInt val2)
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+
+// Implements the helper function for {ASSERT|EXPECT}_NE
+GTEST_IMPL_CMP_HELPER_(NE, !=);
+// Implements the helper function for {ASSERT|EXPECT}_LE
+GTEST_IMPL_CMP_HELPER_(LE, <=);
+// Implements the helper function for {ASSERT|EXPECT}_LT
+GTEST_IMPL_CMP_HELPER_(LT, < );
+// Implements the helper function for {ASSERT|EXPECT}_GE
+GTEST_IMPL_CMP_HELPER_(GE, >=);
+// Implements the helper function for {ASSERT|EXPECT}_GT
+GTEST_IMPL_CMP_HELPER_(GT, > );
+
+#undef GTEST_IMPL_CMP_HELPER_
+
+// The helper function for {ASSERT|EXPECT}_STREQ.
+//
+// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+GTEST_API_ AssertionResult CmpHelperSTREQ(const char* expected_expression,
+                                          const char* actual_expression,
+                                          const char* expected,
+                                          const char* actual);
+
+// The helper function for {ASSERT|EXPECT}_STRCASEEQ.
+//
+// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+GTEST_API_ AssertionResult CmpHelperSTRCASEEQ(const char* expected_expression,
+                                              const char* actual_expression,
+                                              const char* expected,
+                                              const char* actual);
+
+// The helper function for {ASSERT|EXPECT}_STRNE.
+//
+// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+GTEST_API_ AssertionResult CmpHelperSTRNE(const char* s1_expression,
+                                          const char* s2_expression,
+                                          const char* s1,
+                                          const char* s2);
+
+// The helper function for {ASSERT|EXPECT}_STRCASENE.
+//
+// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+GTEST_API_ AssertionResult CmpHelperSTRCASENE(const char* s1_expression,
+                                              const char* s2_expression,
+                                              const char* s1,
+                                              const char* s2);
+
+
+// Helper function for *_STREQ on wide strings.
+//
+// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+GTEST_API_ AssertionResult CmpHelperSTREQ(const char* expected_expression,
+                                          const char* actual_expression,
+                                          const wchar_t* expected,
+                                          const wchar_t* actual);
+
+// Helper function for *_STRNE on wide strings.
+//
+// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+GTEST_API_ AssertionResult CmpHelperSTRNE(const char* s1_expression,
+                                          const char* s2_expression,
+                                          const wchar_t* s1,
+                                          const wchar_t* s2);
+
+}  // namespace internal
+
+// IsSubstring() and IsNotSubstring() are intended to be used as the
+// first argument to {EXPECT,ASSERT}_PRED_FORMAT2(), not by
+// themselves.  They check whether needle is a substring of haystack
+// (NULL is considered a substring of itself only), and return an
+// appropriate error message when they fail.
+//
+// The {needle,haystack}_expr arguments are the stringified
+// expressions that generated the two real arguments.
+GTEST_API_ AssertionResult IsSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const char* needle, const char* haystack);
+GTEST_API_ AssertionResult IsSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const wchar_t* needle, const wchar_t* haystack);
+GTEST_API_ AssertionResult IsNotSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const char* needle, const char* haystack);
+GTEST_API_ AssertionResult IsNotSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const wchar_t* needle, const wchar_t* haystack);
+GTEST_API_ AssertionResult IsSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const ::std::string& needle, const ::std::string& haystack);
+GTEST_API_ AssertionResult IsNotSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const ::std::string& needle, const ::std::string& haystack);
+
+#if GTEST_HAS_STD_WSTRING
+GTEST_API_ AssertionResult IsSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const ::std::wstring& needle, const ::std::wstring& haystack);
+GTEST_API_ AssertionResult IsNotSubstring(
+    const char* needle_expr, const char* haystack_expr,
+    const ::std::wstring& needle, const ::std::wstring& haystack);
+#endif  // GTEST_HAS_STD_WSTRING
+
+namespace internal {
+
+// Helper template function for comparing floating-points.
+//
+// Template parameter:
+//
+//   RawType: the raw floating-point type (either float or double)
+//
+// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+template <typename RawType>
+AssertionResult CmpHelperFloatingPointEQ(const char* expected_expression,
+                                         const char* actual_expression,
+                                         RawType expected,
+                                         RawType actual) {
+  const FloatingPoint<RawType> lhs(expected), rhs(actual);
+
+  if (lhs.AlmostEquals(rhs)) {
+    return AssertionSuccess();
+  }
+
+  ::std::stringstream expected_ss;
+  expected_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2)
+              << expected;
+
+  ::std::stringstream actual_ss;
+  actual_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2)
+            << actual;
+
+  return EqFailure(expected_expression,
+                   actual_expression,
+                   StringStreamToString(&expected_ss),
+                   StringStreamToString(&actual_ss),
+                   false);
+}
+
+// Helper function for implementing ASSERT_NEAR.
+//
+// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+GTEST_API_ AssertionResult DoubleNearPredFormat(const char* expr1,
+                                                const char* expr2,
+                                                const char* abs_error_expr,
+                                                double val1,
+                                                double val2,
+                                                double abs_error);
+
+// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
+// A class that enables one to stream messages to assertion macros
+class GTEST_API_ AssertHelper {
+ public:
+  // Constructor.
+  AssertHelper(TestPartResult::Type type,
+               const char* file,
+               int line,
+               const char* message);
+  ~AssertHelper();
+
+  // Message assignment is a semantic trick to enable assertion
+  // streaming; see the GTEST_MESSAGE_ macro below.
+  void operator=(const Message& message) const;
+
+ private:
+  // We put our data in a struct so that the size of the AssertHelper class can
+  // be as small as possible.  This is important because gcc is incapable of
+  // re-using stack space even for temporary variables, so every EXPECT_EQ
+  // reserves stack space for another AssertHelper.
+  struct AssertHelperData {
+    AssertHelperData(TestPartResult::Type t,
+                     const char* srcfile,
+                     int line_num,
+                     const char* msg)
+        : type(t), file(srcfile), line(line_num), message(msg) { }
+
+    TestPartResult::Type const type;
+    const char*        const file;
+    int                const line;
+    String             const message;
+
+   private:
+    GTEST_DISALLOW_COPY_AND_ASSIGN_(AssertHelperData);
+  };
+
+  AssertHelperData* const data_;
+
+  GTEST_DISALLOW_COPY_AND_ASSIGN_(AssertHelper);
+};
+
+}  // namespace internal
+
+#if GTEST_HAS_PARAM_TEST
+// The pure interface class that all value-parameterized tests inherit from.
+// A value-parameterized class must inherit from both ::testing::Test and
+// ::testing::WithParamInterface. In most cases that just means inheriting
+// from ::testing::TestWithParam, but more complicated test hierarchies
+// may need to inherit from Test and WithParamInterface at different levels.
+//
+// This interface has support for accessing the test parameter value via
+// the GetParam() method.
+//
+// Use it with one of the parameter generator defining functions, like Range(),
+// Values(), ValuesIn(), Bool(), and Combine().
+//
+// class FooTest : public ::testing::TestWithParam<int> {
+//  protected:
+//   FooTest() {
+//     // Can use GetParam() here.
+//   }
+//   virtual ~FooTest() {
+//     // Can use GetParam() here.
+//   }
+//   virtual void SetUp() {
+//     // Can use GetParam() here.
+//   }
+//   virtual void TearDown {
+//     // Can use GetParam() here.
+//   }
+// };
+// TEST_P(FooTest, DoesBar) {
+//   // Can use GetParam() method here.
+//   Foo foo;
+//   ASSERT_TRUE(foo.DoesBar(GetParam()));
+// }
+// INSTANTIATE_TEST_CASE_P(OneToTenRange, FooTest, ::testing::Range(1, 10));
+
+template <typename T>
+class WithParamInterface {
+ public:
+  typedef T ParamType;
+  virtual ~WithParamInterface() {}
+
+  // The current parameter value. Is also available in the test fixture's
+  // constructor. This member function is non-static, even though it only
+  // references static data, to reduce the opportunity for incorrect uses
+  // like writing 'WithParamInterface<bool>::GetParam()' for a test that
+  // uses a fixture whose parameter type is int.
+  const ParamType& GetParam() const { return *parameter_; }
+
+ private:
+  // Sets parameter value. The caller is responsible for making sure the value
+  // remains alive and unchanged throughout the current test.
+  static void SetParam(const ParamType* parameter) {
+    parameter_ = parameter;
+  }
+
+  // Static value used for accessing parameter during a test lifetime.
+  static const ParamType* parameter_;
+
+  // TestClass must be a subclass of WithParamInterface<T> and Test.
+  template <class TestClass> friend class internal::ParameterizedTestFactory;
+};
+
+template <typename T>
+const T* WithParamInterface<T>::parameter_ = NULL;
+
+// Most value-parameterized classes can ignore the existence of
+// WithParamInterface, and can just inherit from ::testing::TestWithParam.
+
+template <typename T>
+class TestWithParam : public Test, public WithParamInterface<T> {
+};
+
+#endif  // GTEST_HAS_PARAM_TEST
+
+// Macros for indicating success/failure in test code.
+
+// ADD_FAILURE unconditionally adds a failure to the current test.
+// SUCCEED generates a success - it doesn't automatically make the
+// current test successful, as a test is only successful when it has
+// no failure.
+//
+// EXPECT_* verifies that a certain condition is satisfied.  If not,
+// it behaves like ADD_FAILURE.  In particular:
+//
+//   EXPECT_TRUE  verifies that a Boolean condition is true.
+//   EXPECT_FALSE verifies that a Boolean condition is false.
+//
+// FAIL and ASSERT_* are similar to ADD_FAILURE and EXPECT_*, except
+// that they will also abort the current function on failure.  People
+// usually want the fail-fast behavior of FAIL and ASSERT_*, but those
+// writing data-driven tests often find themselves using ADD_FAILURE
+// and EXPECT_* more.
+//
+// Examples:
+//
+//   EXPECT_TRUE(server.StatusIsOK());
+//   ASSERT_FALSE(server.HasPendingRequest(port))
+//       << "There are still pending requests " << "on port " << port;
+
+// Generates a nonfatal failure with a generic message.
+#define ADD_FAILURE() GTEST_NONFATAL_FAILURE_("Failed")
+
+// Generates a nonfatal failure at the given source file location with
+// a generic message.
+#define ADD_FAILURE_AT(file, line) \
+  GTEST_MESSAGE_AT_(file, line, "Failed", \
+                    ::testing::TestPartResult::kNonFatalFailure)
+
+// Generates a fatal failure with a generic message.
+#define GTEST_FAIL() GTEST_FATAL_FAILURE_("Failed")
+
+// Define this macro to 1 to omit the definition of FAIL(), which is a
+// generic name and clashes with some other libraries.
+#if !GTEST_DONT_DEFINE_FAIL
+# define FAIL() GTEST_FAIL()
+#endif
+
+// Generates a success with a generic message.
+#define GTEST_SUCCEED() GTEST_SUCCESS_("Succeeded")
+
+// Define this macro to 1 to omit the definition of SUCCEED(), which
+// is a generic name and clashes with some other libraries.
+#if !GTEST_DONT_DEFINE_SUCCEED
+# define SUCCEED() GTEST_SUCCEED()
+#endif
+
+// Macros for testing exceptions.
+//
+//    * {ASSERT|EXPECT}_THROW(statement, expected_exception):
+//         Tests that the statement throws the expected exception.
+//    * {ASSERT|EXPECT}_NO_THROW(statement):
+//         Tests that the statement doesn't throw any exception.
+//    * {ASSERT|EXPECT}_ANY_THROW(statement):
+//         Tests that the statement throws an exception.
+
+#define EXPECT_THROW(statement, expected_exception) \
+  GTEST_TEST_THROW_(statement, expected_exception, GTEST_NONFATAL_FAILURE_)
+#define EXPECT_NO_THROW(statement) \
+  GTEST_TEST_NO_THROW_(statement, GTEST_NONFATAL_FAILURE_)
+#define EXPECT_ANY_THROW(statement) \
+  GTEST_TEST_ANY_THROW_(statement, GTEST_NONFATAL_FAILURE_)
+#define ASSERT_THROW(statement, expected_exception) \
+  GTEST_TEST_THROW_(statement, expected_exception, GTEST_FATAL_FAILURE_)
+#define ASSERT_NO_THROW(statement) \
+  GTEST_TEST_NO_THROW_(statement, GTEST_FATAL_FAILURE_)
+#define ASSERT_ANY_THROW(statement) \
+  GTEST_TEST_ANY_THROW_(statement, GTEST_FATAL_FAILURE_)
+
+// Boolean assertions. Condition can be either a Boolean expression or an
+// AssertionResult. For more information on how to use AssertionResult with
+// these macros see comments on that class.
+#define EXPECT_TRUE(condition) \
+  GTEST_TEST_BOOLEAN_(condition, #condition, false, true, \
+                      GTEST_NONFATAL_FAILURE_)
+#define EXPECT_FALSE(condition) \
+  GTEST_TEST_BOOLEAN_(!(condition), #condition, true, false, \
+                      GTEST_NONFATAL_FAILURE_)
+#define ASSERT_TRUE(condition) \
+  GTEST_TEST_BOOLEAN_(condition, #condition, false, true, \
+                      GTEST_FATAL_FAILURE_)
+#define ASSERT_FALSE(condition) \
+  GTEST_TEST_BOOLEAN_(!(condition), #condition, true, false, \
+                      GTEST_FATAL_FAILURE_)
+
+// Includes the auto-generated header that implements a family of
+// generic predicate assertion macros.
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This file is AUTOMATICALLY GENERATED on 09/24/2010 by command
+// 'gen_gtest_pred_impl.py 5'.  DO NOT EDIT BY HAND!
+//
+// Implements a family of generic predicate assertion macros.
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
+#define GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
+
+// Makes sure this header is not included before gtest.h.
+#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
+# error Do not include gtest_pred_impl.h directly.  Include gtest.h instead.
+#endif  // GTEST_INCLUDE_GTEST_GTEST_H_
+
+// This header implements a family of generic predicate assertion
+// macros:
+//
+//   ASSERT_PRED_FORMAT1(pred_format, v1)
+//   ASSERT_PRED_FORMAT2(pred_format, v1, v2)
+//   ...
+//
+// where pred_format is a function or functor that takes n (in the
+// case of ASSERT_PRED_FORMATn) values and their source expression
+// text, and returns a testing::AssertionResult.  See the definition
+// of ASSERT_EQ in gtest.h for an example.
+//
+// If you don't care about formatting, you can use the more
+// restrictive version:
+//
+//   ASSERT_PRED1(pred, v1)
+//   ASSERT_PRED2(pred, v1, v2)
+//   ...
+//
+// where pred is an n-ary function or functor that returns bool,
+// and the values v1, v2, ..., must support the << operator for
+// streaming to std::ostream.
+//
+// We also define the EXPECT_* variations.
+//
+// For now we only support predicates whose arity is at most 5.
+// Please email googletestframework at googlegroups.com if you need
+// support for higher arities.
+
+// GTEST_ASSERT_ is the basic statement to which all of the assertions
+// in this file reduce.  Don't use this in your code.
+
+#define GTEST_ASSERT_(expression, on_failure) \
+  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \
+  if (const ::testing::AssertionResult gtest_ar = (expression)) \
+    ; \
+  else \
+    on_failure(gtest_ar.failure_message())
+
+
+// Helper function for implementing {EXPECT|ASSERT}_PRED1.  Don't use
+// this in your code.
+template <typename Pred,
+          typename T1>
+AssertionResult AssertPred1Helper(const char* pred_text,
+                                  const char* e1,
+                                  Pred pred,
+                                  const T1& v1) {
+  if (pred(v1)) return AssertionSuccess();
+
+  return AssertionFailure() << pred_text << "("
+                            << e1 << ") evaluates to false, where"
+                            << "\n" << e1 << " evaluates to " << v1;
+}
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT1.
+// Don't use this in your code.
+#define GTEST_PRED_FORMAT1_(pred_format, v1, on_failure)\
+  GTEST_ASSERT_(pred_format(#v1, v1),\
+                on_failure)
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED1.  Don't use
+// this in your code.
+#define GTEST_PRED1_(pred, v1, on_failure)\
+  GTEST_ASSERT_(::testing::AssertPred1Helper(#pred, \
+                                             #v1, \
+                                             pred, \
+                                             v1), on_failure)
+
+// Unary predicate assertion macros.
+#define EXPECT_PRED_FORMAT1(pred_format, v1) \
+  GTEST_PRED_FORMAT1_(pred_format, v1, GTEST_NONFATAL_FAILURE_)
+#define EXPECT_PRED1(pred, v1) \
+  GTEST_PRED1_(pred, v1, GTEST_NONFATAL_FAILURE_)
+#define ASSERT_PRED_FORMAT1(pred_format, v1) \
+  GTEST_PRED_FORMAT1_(pred_format, v1, GTEST_FATAL_FAILURE_)
+#define ASSERT_PRED1(pred, v1) \
+  GTEST_PRED1_(pred, v1, GTEST_FATAL_FAILURE_)
+
+
+
+// Helper function for implementing {EXPECT|ASSERT}_PRED2.  Don't use
+// this in your code.
+template <typename Pred,
+          typename T1,
+          typename T2>
+AssertionResult AssertPred2Helper(const char* pred_text,
+                                  const char* e1,
+                                  const char* e2,
+                                  Pred pred,
+                                  const T1& v1,
+                                  const T2& v2) {
+  if (pred(v1, v2)) return AssertionSuccess();
+
+  return AssertionFailure() << pred_text << "("
+                            << e1 << ", "
+                            << e2 << ") evaluates to false, where"
+                            << "\n" << e1 << " evaluates to " << v1
+                            << "\n" << e2 << " evaluates to " << v2;
+}
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT2.
+// Don't use this in your code.
+#define GTEST_PRED_FORMAT2_(pred_format, v1, v2, on_failure)\
+  GTEST_ASSERT_(pred_format(#v1, #v2, v1, v2),\
+                on_failure)
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED2.  Don't use
+// this in your code.
+#define GTEST_PRED2_(pred, v1, v2, on_failure)\
+  GTEST_ASSERT_(::testing::AssertPred2Helper(#pred, \
+                                             #v1, \
+                                             #v2, \
+                                             pred, \
+                                             v1, \
+                                             v2), on_failure)
+
+// Binary predicate assertion macros.
+#define EXPECT_PRED_FORMAT2(pred_format, v1, v2) \
+  GTEST_PRED_FORMAT2_(pred_format, v1, v2, GTEST_NONFATAL_FAILURE_)
+#define EXPECT_PRED2(pred, v1, v2) \
+  GTEST_PRED2_(pred, v1, v2, GTEST_NONFATAL_FAILURE_)
+#define ASSERT_PRED_FORMAT2(pred_format, v1, v2) \
+  GTEST_PRED_FORMAT2_(pred_format, v1, v2, GTEST_FATAL_FAILURE_)
+#define ASSERT_PRED2(pred, v1, v2) \
+  GTEST_PRED2_(pred, v1, v2, GTEST_FATAL_FAILURE_)
+
+
+
+// Helper function for implementing {EXPECT|ASSERT}_PRED3.  Don't use
+// this in your code.
+template <typename Pred,
+          typename T1,
+          typename T2,
+          typename T3>
+AssertionResult AssertPred3Helper(const char* pred_text,
+                                  const char* e1,
+                                  const char* e2,
+                                  const char* e3,
+                                  Pred pred,
+                                  const T1& v1,
+                                  const T2& v2,
+                                  const T3& v3) {
+  if (pred(v1, v2, v3)) return AssertionSuccess();
+
+  return AssertionFailure() << pred_text << "("
+                            << e1 << ", "
+                            << e2 << ", "
+                            << e3 << ") evaluates to false, where"
+                            << "\n" << e1 << " evaluates to " << v1
+                            << "\n" << e2 << " evaluates to " << v2
+                            << "\n" << e3 << " evaluates to " << v3;
+}
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT3.
+// Don't use this in your code.
+#define GTEST_PRED_FORMAT3_(pred_format, v1, v2, v3, on_failure)\
+  GTEST_ASSERT_(pred_format(#v1, #v2, #v3, v1, v2, v3),\
+                on_failure)
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED3.  Don't use
+// this in your code.
+#define GTEST_PRED3_(pred, v1, v2, v3, on_failure)\
+  GTEST_ASSERT_(::testing::AssertPred3Helper(#pred, \
+                                             #v1, \
+                                             #v2, \
+                                             #v3, \
+                                             pred, \
+                                             v1, \
+                                             v2, \
+                                             v3), on_failure)
+
+// Ternary predicate assertion macros.
+#define EXPECT_PRED_FORMAT3(pred_format, v1, v2, v3) \
+  GTEST_PRED_FORMAT3_(pred_format, v1, v2, v3, GTEST_NONFATAL_FAILURE_)
+#define EXPECT_PRED3(pred, v1, v2, v3) \
+  GTEST_PRED3_(pred, v1, v2, v3, GTEST_NONFATAL_FAILURE_)
+#define ASSERT_PRED_FORMAT3(pred_format, v1, v2, v3) \
+  GTEST_PRED_FORMAT3_(pred_format, v1, v2, v3, GTEST_FATAL_FAILURE_)
+#define ASSERT_PRED3(pred, v1, v2, v3) \
+  GTEST_PRED3_(pred, v1, v2, v3, GTEST_FATAL_FAILURE_)
+
+
+
+// Helper function for implementing {EXPECT|ASSERT}_PRED4.  Don't use
+// this in your code.
+template <typename Pred,
+          typename T1,
+          typename T2,
+          typename T3,
+          typename T4>
+AssertionResult AssertPred4Helper(const char* pred_text,
+                                  const char* e1,
+                                  const char* e2,
+                                  const char* e3,
+                                  const char* e4,
+                                  Pred pred,
+                                  const T1& v1,
+                                  const T2& v2,
+                                  const T3& v3,
+                                  const T4& v4) {
+  if (pred(v1, v2, v3, v4)) return AssertionSuccess();
+
+  return AssertionFailure() << pred_text << "("
+                            << e1 << ", "
+                            << e2 << ", "
+                            << e3 << ", "
+                            << e4 << ") evaluates to false, where"
+                            << "\n" << e1 << " evaluates to " << v1
+                            << "\n" << e2 << " evaluates to " << v2
+                            << "\n" << e3 << " evaluates to " << v3
+                            << "\n" << e4 << " evaluates to " << v4;
+}
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT4.
+// Don't use this in your code.
+#define GTEST_PRED_FORMAT4_(pred_format, v1, v2, v3, v4, on_failure)\
+  GTEST_ASSERT_(pred_format(#v1, #v2, #v3, #v4, v1, v2, v3, v4),\
+                on_failure)
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED4.  Don't use
+// this in your code.
+#define GTEST_PRED4_(pred, v1, v2, v3, v4, on_failure)\
+  GTEST_ASSERT_(::testing::AssertPred4Helper(#pred, \
+                                             #v1, \
+                                             #v2, \
+                                             #v3, \
+                                             #v4, \
+                                             pred, \
+                                             v1, \
+                                             v2, \
+                                             v3, \
+                                             v4), on_failure)
+
+// 4-ary predicate assertion macros.
+#define EXPECT_PRED_FORMAT4(pred_format, v1, v2, v3, v4) \
+  GTEST_PRED_FORMAT4_(pred_format, v1, v2, v3, v4, GTEST_NONFATAL_FAILURE_)
+#define EXPECT_PRED4(pred, v1, v2, v3, v4) \
+  GTEST_PRED4_(pred, v1, v2, v3, v4, GTEST_NONFATAL_FAILURE_)
+#define ASSERT_PRED_FORMAT4(pred_format, v1, v2, v3, v4) \
+  GTEST_PRED_FORMAT4_(pred_format, v1, v2, v3, v4, GTEST_FATAL_FAILURE_)
+#define ASSERT_PRED4(pred, v1, v2, v3, v4) \
+  GTEST_PRED4_(pred, v1, v2, v3, v4, GTEST_FATAL_FAILURE_)
+
+
+
+// Helper function for implementing {EXPECT|ASSERT}_PRED5.  Don't use
+// this in your code.
+template <typename Pred,
+          typename T1,
+          typename T2,
+          typename T3,
+          typename T4,
+          typename T5>
+AssertionResult AssertPred5Helper(const char* pred_text,
+                                  const char* e1,
+                                  const char* e2,
+                                  const char* e3,
+                                  const char* e4,
+                                  const char* e5,
+                                  Pred pred,
+                                  const T1& v1,
+                                  const T2& v2,
+                                  const T3& v3,
+                                  const T4& v4,
+                                  const T5& v5) {
+  if (pred(v1, v2, v3, v4, v5)) return AssertionSuccess();
+
+  return AssertionFailure() << pred_text << "("
+                            << e1 << ", "
+                            << e2 << ", "
+                            << e3 << ", "
+                            << e4 << ", "
+                            << e5 << ") evaluates to false, where"
+                            << "\n" << e1 << " evaluates to " << v1
+                            << "\n" << e2 << " evaluates to " << v2
+                            << "\n" << e3 << " evaluates to " << v3
+                            << "\n" << e4 << " evaluates to " << v4
+                            << "\n" << e5 << " evaluates to " << v5;
+}
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT5.
+// Don't use this in your code.
+#define GTEST_PRED_FORMAT5_(pred_format, v1, v2, v3, v4, v5, on_failure)\
+  GTEST_ASSERT_(pred_format(#v1, #v2, #v3, #v4, #v5, v1, v2, v3, v4, v5),\
+                on_failure)
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED5.  Don't use
+// this in your code.
+#define GTEST_PRED5_(pred, v1, v2, v3, v4, v5, on_failure)\
+  GTEST_ASSERT_(::testing::AssertPred5Helper(#pred, \
+                                             #v1, \
+                                             #v2, \
+                                             #v3, \
+                                             #v4, \
+                                             #v5, \
+                                             pred, \
+                                             v1, \
+                                             v2, \
+                                             v3, \
+                                             v4, \
+                                             v5), on_failure)
+
+// 5-ary predicate assertion macros.
+#define EXPECT_PRED_FORMAT5(pred_format, v1, v2, v3, v4, v5) \
+  GTEST_PRED_FORMAT5_(pred_format, v1, v2, v3, v4, v5, GTEST_NONFATAL_FAILURE_)
+#define EXPECT_PRED5(pred, v1, v2, v3, v4, v5) \
+  GTEST_PRED5_(pred, v1, v2, v3, v4, v5, GTEST_NONFATAL_FAILURE_)
+#define ASSERT_PRED_FORMAT5(pred_format, v1, v2, v3, v4, v5) \
+  GTEST_PRED_FORMAT5_(pred_format, v1, v2, v3, v4, v5, GTEST_FATAL_FAILURE_)
+#define ASSERT_PRED5(pred, v1, v2, v3, v4, v5) \
+  GTEST_PRED5_(pred, v1, v2, v3, v4, v5, GTEST_FATAL_FAILURE_)
+
+
+
+#endif  // GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
+
+// Macros for testing equalities and inequalities.
+//
+//    * {ASSERT|EXPECT}_EQ(expected, actual): Tests that expected == actual
+//    * {ASSERT|EXPECT}_NE(v1, v2):           Tests that v1 != v2
+//    * {ASSERT|EXPECT}_LT(v1, v2):           Tests that v1 < v2
+//    * {ASSERT|EXPECT}_LE(v1, v2):           Tests that v1 <= v2
+//    * {ASSERT|EXPECT}_GT(v1, v2):           Tests that v1 > v2
+//    * {ASSERT|EXPECT}_GE(v1, v2):           Tests that v1 >= v2
+//
+// When they are not, Google Test prints both the tested expressions and
+// their actual values.  The values must be compatible built-in types,
+// or you will get a compiler error.  By "compatible" we mean that the
+// values can be compared by the respective operator.
+//
+// Note:
+//
+//   1. It is possible to make a user-defined type work with
+//   {ASSERT|EXPECT}_??(), but that requires overloading the
+//   comparison operators and is thus discouraged by the Google C++
+//   Usage Guide.  Therefore, you are advised to use the
+//   {ASSERT|EXPECT}_TRUE() macro to assert that two objects are
+//   equal.
+//
+//   2. The {ASSERT|EXPECT}_??() macros do pointer comparisons on
+//   pointers (in particular, C strings).  Therefore, if you use it
+//   with two C strings, you are testing how their locations in memory
+//   are related, not how their content is related.  To compare two C
+//   strings by content, use {ASSERT|EXPECT}_STR*().
+//
+//   3. {ASSERT|EXPECT}_EQ(expected, actual) is preferred to
+//   {ASSERT|EXPECT}_TRUE(expected == actual), as the former tells you
+//   what the actual value is when it fails, and similarly for the
+//   other comparisons.
+//
+//   4. Do not depend on the order in which {ASSERT|EXPECT}_??()
+//   evaluate their arguments, which is undefined.
+//
+//   5. These macros evaluate their arguments exactly once.
+//
+// Examples:
+//
+//   EXPECT_NE(5, Foo());
+//   EXPECT_EQ(NULL, a_pointer);
+//   ASSERT_LT(i, array_size);
+//   ASSERT_GT(records.size(), 0) << "There is no record left.";
+
+#define EXPECT_EQ(expected, actual) \
+  EXPECT_PRED_FORMAT2(::testing::internal:: \
+                      EqHelper<GTEST_IS_NULL_LITERAL_(expected)>::Compare, \
+                      expected, actual)
+#define EXPECT_NE(expected, actual) \
+  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperNE, expected, actual)
+#define EXPECT_LE(val1, val2) \
+  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperLE, val1, val2)
+#define EXPECT_LT(val1, val2) \
+  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperLT, val1, val2)
+#define EXPECT_GE(val1, val2) \
+  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperGE, val1, val2)
+#define EXPECT_GT(val1, val2) \
+  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperGT, val1, val2)
+
+#define GTEST_ASSERT_EQ(expected, actual) \
+  ASSERT_PRED_FORMAT2(::testing::internal:: \
+                      EqHelper<GTEST_IS_NULL_LITERAL_(expected)>::Compare, \
+                      expected, actual)
+#define GTEST_ASSERT_NE(val1, val2) \
+  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperNE, val1, val2)
+#define GTEST_ASSERT_LE(val1, val2) \
+  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperLE, val1, val2)
+#define GTEST_ASSERT_LT(val1, val2) \
+  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperLT, val1, val2)
+#define GTEST_ASSERT_GE(val1, val2) \
+  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperGE, val1, val2)
+#define GTEST_ASSERT_GT(val1, val2) \
+  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperGT, val1, val2)
+
+// Define macro GTEST_DONT_DEFINE_ASSERT_XY to 1 to omit the definition of
+// ASSERT_XY(), which clashes with some users' own code.
+
+#if !GTEST_DONT_DEFINE_ASSERT_EQ
+# define ASSERT_EQ(val1, val2) GTEST_ASSERT_EQ(val1, val2)
+#endif
+
+#if !GTEST_DONT_DEFINE_ASSERT_NE
+# define ASSERT_NE(val1, val2) GTEST_ASSERT_NE(val1, val2)
+#endif
+
+#if !GTEST_DONT_DEFINE_ASSERT_LE
+# define ASSERT_LE(val1, val2) GTEST_ASSERT_LE(val1, val2)
+#endif
+
+#if !GTEST_DONT_DEFINE_ASSERT_LT
+# define ASSERT_LT(val1, val2) GTEST_ASSERT_LT(val1, val2)
+#endif
+
+#if !GTEST_DONT_DEFINE_ASSERT_GE
+# define ASSERT_GE(val1, val2) GTEST_ASSERT_GE(val1, val2)
+#endif
+
+#if !GTEST_DONT_DEFINE_ASSERT_GT
+# define ASSERT_GT(val1, val2) GTEST_ASSERT_GT(val1, val2)
+#endif
+
+// C String Comparisons.  All tests treat NULL and any non-NULL string
+// as different.  Two NULLs are equal.
+//
+//    * {ASSERT|EXPECT}_STREQ(s1, s2):     Tests that s1 == s2
+//    * {ASSERT|EXPECT}_STRNE(s1, s2):     Tests that s1 != s2
+//    * {ASSERT|EXPECT}_STRCASEEQ(s1, s2): Tests that s1 == s2, ignoring case
+//    * {ASSERT|EXPECT}_STRCASENE(s1, s2): Tests that s1 != s2, ignoring case
+//
+// For wide or narrow string objects, you can use the
+// {ASSERT|EXPECT}_??() macros.
+//
+// Don't depend on the order in which the arguments are evaluated,
+// which is undefined.
+//
+// These macros evaluate their arguments exactly once.
+
+#define EXPECT_STREQ(expected, actual) \
+  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperSTREQ, expected, actual)
+#define EXPECT_STRNE(s1, s2) \
+  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperSTRNE, s1, s2)
+#define EXPECT_STRCASEEQ(expected, actual) \
+  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperSTRCASEEQ, expected, actual)
+#define EXPECT_STRCASENE(s1, s2)\
+  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperSTRCASENE, s1, s2)
+
+#define ASSERT_STREQ(expected, actual) \
+  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperSTREQ, expected, actual)
+#define ASSERT_STRNE(s1, s2) \
+  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperSTRNE, s1, s2)
+#define ASSERT_STRCASEEQ(expected, actual) \
+  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperSTRCASEEQ, expected, actual)
+#define ASSERT_STRCASENE(s1, s2)\
+  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperSTRCASENE, s1, s2)
+
+// Macros for comparing floating-point numbers.
+//
+//    * {ASSERT|EXPECT}_FLOAT_EQ(expected, actual):
+//         Tests that two float values are almost equal.
+//    * {ASSERT|EXPECT}_DOUBLE_EQ(expected, actual):
+//         Tests that two double values are almost equal.
+//    * {ASSERT|EXPECT}_NEAR(v1, v2, abs_error):
+//         Tests that v1 and v2 are within the given distance to each other.
+//
+// Google Test uses ULP-based comparison to automatically pick a default
+// error bound that is appropriate for the operands.  See the
+// FloatingPoint template class in gtest-internal.h if you are
+// interested in the implementation details.
+
+#define EXPECT_FLOAT_EQ(expected, actual)\
+  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperFloatingPointEQ<float>, \
+                      expected, actual)
+
+#define EXPECT_DOUBLE_EQ(expected, actual)\
+  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperFloatingPointEQ<double>, \
+                      expected, actual)
+
+#define ASSERT_FLOAT_EQ(expected, actual)\
+  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperFloatingPointEQ<float>, \
+                      expected, actual)
+
+#define ASSERT_DOUBLE_EQ(expected, actual)\
+  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperFloatingPointEQ<double>, \
+                      expected, actual)
+
+#define EXPECT_NEAR(val1, val2, abs_error)\
+  EXPECT_PRED_FORMAT3(::testing::internal::DoubleNearPredFormat, \
+                      val1, val2, abs_error)
+
+#define ASSERT_NEAR(val1, val2, abs_error)\
+  ASSERT_PRED_FORMAT3(::testing::internal::DoubleNearPredFormat, \
+                      val1, val2, abs_error)
+
+// These predicate format functions work on floating-point values, and
+// can be used in {ASSERT|EXPECT}_PRED_FORMAT2*(), e.g.
+//
+//   EXPECT_PRED_FORMAT2(testing::DoubleLE, Foo(), 5.0);
+
+// Asserts that val1 is less than, or almost equal to, val2.  Fails
+// otherwise.  In particular, it fails if either val1 or val2 is NaN.
+GTEST_API_ AssertionResult FloatLE(const char* expr1, const char* expr2,
+                                   float val1, float val2);
+GTEST_API_ AssertionResult DoubleLE(const char* expr1, const char* expr2,
+                                    double val1, double val2);
+
+
+#if GTEST_OS_WINDOWS
+
+// Macros that test for HRESULT failure and success, these are only useful
+// on Windows, and rely on Windows SDK macros and APIs to compile.
+//
+//    * {ASSERT|EXPECT}_HRESULT_{SUCCEEDED|FAILED}(expr)
+//
+// When expr unexpectedly fails or succeeds, Google Test prints the
+// expected result and the actual result with both a human-readable
+// string representation of the error, if available, as well as the
+// hex result code.
+# define EXPECT_HRESULT_SUCCEEDED(expr) \
+    EXPECT_PRED_FORMAT1(::testing::internal::IsHRESULTSuccess, (expr))
+
+# define ASSERT_HRESULT_SUCCEEDED(expr) \
+    ASSERT_PRED_FORMAT1(::testing::internal::IsHRESULTSuccess, (expr))
+
+# define EXPECT_HRESULT_FAILED(expr) \
+    EXPECT_PRED_FORMAT1(::testing::internal::IsHRESULTFailure, (expr))
+
+# define ASSERT_HRESULT_FAILED(expr) \
+    ASSERT_PRED_FORMAT1(::testing::internal::IsHRESULTFailure, (expr))
+
+#endif  // GTEST_OS_WINDOWS
+
+// Macros that execute statement and check that it doesn't generate new fatal
+// failures in the current thread.
+//
+//   * {ASSERT|EXPECT}_NO_FATAL_FAILURE(statement);
+//
+// Examples:
+//
+//   EXPECT_NO_FATAL_FAILURE(Process());
+//   ASSERT_NO_FATAL_FAILURE(Process()) << "Process() failed";
+//
+#define ASSERT_NO_FATAL_FAILURE(statement) \
+    GTEST_TEST_NO_FATAL_FAILURE_(statement, GTEST_FATAL_FAILURE_)
+#define EXPECT_NO_FATAL_FAILURE(statement) \
+    GTEST_TEST_NO_FATAL_FAILURE_(statement, GTEST_NONFATAL_FAILURE_)
+
+// Causes a trace (including the source file path, the current line
+// number, and the given message) to be included in every test failure
+// message generated by code in the current scope.  The effect is
+// undone when the control leaves the current scope.
+//
+// The message argument can be anything streamable to std::ostream.
+//
+// In the implementation, we include the current line number as part
+// of the dummy variable name, thus allowing multiple SCOPED_TRACE()s
+// to appear in the same block - as long as they are on different
+// lines.
+#define SCOPED_TRACE(message) \
+  ::testing::internal::ScopedTrace GTEST_CONCAT_TOKEN_(gtest_trace_, __LINE__)(\
+    __FILE__, __LINE__, ::testing::Message() << (message))
+
+// Compile-time assertion for type equality.
+// StaticAssertTypeEq<type1, type2>() compiles iff type1 and type2 are
+// the same type.  The value it returns is not interesting.
+//
+// Instead of making StaticAssertTypeEq a class template, we make it a
+// function template that invokes a helper class template.  This
+// prevents a user from misusing StaticAssertTypeEq<T1, T2> by
+// defining objects of that type.
+//
+// CAVEAT:
+//
+// When used inside a method of a class template,
+// StaticAssertTypeEq<T1, T2>() is effective ONLY IF the method is
+// instantiated.  For example, given:
+//
+//   template <typename T> class Foo {
+//    public:
+//     void Bar() { testing::StaticAssertTypeEq<int, T>(); }
+//   };
+//
+// the code:
+//
+//   void Test1() { Foo<bool> foo; }
+//
+// will NOT generate a compiler error, as Foo<bool>::Bar() is never
+// actually instantiated.  Instead, you need:
+//
+//   void Test2() { Foo<bool> foo; foo.Bar(); }
+//
+// to cause a compiler error.
+template <typename T1, typename T2>
+bool StaticAssertTypeEq() {
+  (void)internal::StaticAssertTypeEqHelper<T1, T2>();
+  return true;
+}
+
+// Defines a test.
+//
+// The first parameter is the name of the test case, and the second
+// parameter is the name of the test within the test case.
+//
+// The convention is to end the test case name with "Test".  For
+// example, a test case for the Foo class can be named FooTest.
+//
+// The user should put his test code between braces after using this
+// macro.  Example:
+//
+//   TEST(FooTest, InitializesCorrectly) {
+//     Foo foo;
+//     EXPECT_TRUE(foo.StatusIsOK());
+//   }
+
+// Note that we call GetTestTypeId() instead of GetTypeId<
+// ::testing::Test>() here to get the type ID of testing::Test.  This
+// is to work around a suspected linker bug when using Google Test as
+// a framework on Mac OS X.  The bug causes GetTypeId<
+// ::testing::Test>() to return different values depending on whether
+// the call is from the Google Test framework itself or from user test
+// code.  GetTestTypeId() is guaranteed to always return the same
+// value, as it always calls GetTypeId<>() from the Google Test
+// framework.
+#define GTEST_TEST(test_case_name, test_name)\
+  GTEST_TEST_(test_case_name, test_name, \
+              ::testing::Test, ::testing::internal::GetTestTypeId())
+
+// Define this macro to 1 to omit the definition of TEST(), which
+// is a generic name and clashes with some other libraries.
+#if !GTEST_DONT_DEFINE_TEST
+# define TEST(test_case_name, test_name) GTEST_TEST(test_case_name, test_name)
+#endif
+
+// Defines a test that uses a test fixture.
+//
+// The first parameter is the name of the test fixture class, which
+// also doubles as the test case name.  The second parameter is the
+// name of the test within the test case.
+//
+// A test fixture class must be declared earlier.  The user should put
+// his test code between braces after using this macro.  Example:
+//
+//   class FooTest : public testing::Test {
+//    protected:
+//     virtual void SetUp() { b_.AddElement(3); }
+//
+//     Foo a_;
+//     Foo b_;
+//   };
+//
+//   TEST_F(FooTest, InitializesCorrectly) {
+//     EXPECT_TRUE(a_.StatusIsOK());
+//   }
+//
+//   TEST_F(FooTest, ReturnsElementCountCorrectly) {
+//     EXPECT_EQ(0, a_.size());
+//     EXPECT_EQ(1, b_.size());
+//   }
+
+#define TEST_F(test_fixture, test_name)\
+  GTEST_TEST_(test_fixture, test_name, test_fixture, \
+              ::testing::internal::GetTypeId<test_fixture>())
+
+// Use this macro in main() to run all tests.  It returns 0 if all
+// tests are successful, or 1 otherwise.
+//
+// RUN_ALL_TESTS() should be invoked after the command line has been
+// parsed by InitGoogleTest().
+
+#define RUN_ALL_TESTS()\
+  (::testing::UnitTest::GetInstance()->Run())
+
+}  // namespace testing
+
+#endif  // GTEST_INCLUDE_GTEST_GTEST_H_
diff --git a/src/gtest/gtest_main.cc b/src/gtest/gtest_main.cc
new file mode 100644
index 0000000..a09bbe0
--- /dev/null
+++ b/src/gtest/gtest_main.cc
@@ -0,0 +1,39 @@
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include <iostream>
+
+#include "gtest/gtest.h"
+
+GTEST_API_ int main(int argc, char **argv) {
+  std::cout << "Running main() from gtest_main.cc\n";
+
+  testing::InitGoogleTest(&argc, argv);
+  return RUN_ALL_TESTS();
+}
diff --git a/tools/CMakeLists.txt b/tools/CMakeLists.txt
new file mode 100644
index 0000000..02fbd5c
--- /dev/null
+++ b/tools/CMakeLists.txt
@@ -0,0 +1,29 @@
+# Collect source files
+file(GLOB_RECURSE srcs ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
+
+# Build each source file independently
+foreach(source ${srcs})
+  get_filename_component(name ${source} NAME_WE)
+
+  # caffe target already exits
+  if(name MATCHES "caffe")
+    set(name ${name}.bin)
+  endif()
+
+  # target
+  add_executable(${name} ${source})
+  target_link_libraries(${name} ${Caffe_LINK})
+  caffe_default_properties(${name})
+
+  # set back RUNTIME_OUTPUT_DIRECTORY
+  caffe_set_runtime_directory(${name} "${PROJECT_BINARY_DIR}/tools")
+  caffe_set_solution_folder(${name} tools)
+
+  # restore output name without suffix
+  if(name MATCHES "caffe.bin")
+    set_target_properties(${name} PROPERTIES OUTPUT_NAME caffe)
+  endif()
+
+  # Install
+  install(TARGETS ${name} DESTINATION bin)
+endforeach(source)
diff --git a/tools/caffe.cpp b/tools/caffe.cpp
new file mode 100644
index 0000000..0b7523f
--- /dev/null
+++ b/tools/caffe.cpp
@@ -0,0 +1,311 @@
+#include <glog/logging.h>
+
+#include <cstring>
+#include <map>
+#include <string>
+#include <vector>
+
+#include "boost/algorithm/string.hpp"
+#include "caffe/caffe.hpp"
+
+using caffe::Blob;
+using caffe::Caffe;
+using caffe::Net;
+using caffe::Layer;
+using caffe::shared_ptr;
+using caffe::Timer;
+using caffe::vector;
+
+
+DEFINE_int32(gpu, -1,
+    "Run in GPU mode on given device ID.");
+DEFINE_string(solver, "",
+    "The solver definition protocol buffer text file.");
+DEFINE_string(model, "",
+    "The model definition protocol buffer text file..");
+DEFINE_string(snapshot, "",
+    "Optional; the snapshot solver state to resume training.");
+DEFINE_string(weights, "",
+    "Optional; the pretrained weights to initialize finetuning. "
+    "Cannot be set simultaneously with snapshot.");
+DEFINE_int32(iterations, 50,
+    "The number of iterations to run.");
+
+// A simple registry for caffe commands.
+typedef int (*BrewFunction)();
+typedef std::map<caffe::string, BrewFunction> BrewMap;
+BrewMap g_brew_map;
+
+#define RegisterBrewFunction(func) \
+namespace { \
+class __Registerer_##func { \
+ public: /* NOLINT */ \
+  __Registerer_##func() { \
+    g_brew_map[#func] = &func; \
+  } \
+}; \
+__Registerer_##func g_registerer_##func; \
+}
+
+static BrewFunction GetBrewFunction(const caffe::string& name) {
+  if (g_brew_map.count(name)) {
+    return g_brew_map[name];
+  } else {
+    LOG(ERROR) << "Available caffe actions:";
+    for (BrewMap::iterator it = g_brew_map.begin();
+         it != g_brew_map.end(); ++it) {
+      LOG(ERROR) << "\t" << it->first;
+    }
+    LOG(FATAL) << "Unknown action: " << name;
+    return NULL;  // not reachable, just to suppress old compiler warnings.
+  }
+}
+
+// caffe commands to call by
+//     caffe <command> <args>
+//
+// To add a command, define a function "int command()" and register it with
+// RegisterBrewFunction(action);
+
+// Device Query: show diagnostic information for a GPU device.
+int device_query() {
+  CHECK_GT(FLAGS_gpu, -1) << "Need a device ID to query.";
+  LOG(INFO) << "Querying device ID = " << FLAGS_gpu;
+  caffe::Caffe::SetDevice(FLAGS_gpu);
+  caffe::Caffe::DeviceQuery();
+  return 0;
+}
+RegisterBrewFunction(device_query);
+
+// Load the weights from the specified caffemodel(s) into the train and
+// test nets.
+void CopyLayers(caffe::Solver<float>* solver, const std::string& model_list) {
+  std::vector<std::string> model_names;
+  boost::split(model_names, model_list, boost::is_any_of(",") );
+  for (int i = 0; i < model_names.size(); ++i) {
+    LOG(INFO) << "Finetuning from " << model_names[i];
+    solver->net()->CopyTrainedLayersFrom(model_names[i]);
+    for (int j = 0; j < solver->test_nets().size(); ++j) {
+      solver->test_nets()[j]->CopyTrainedLayersFrom(model_names[i]);
+    }
+  }
+}
+
+// Train / Finetune a model.
+int train() {
+  CHECK_GT(FLAGS_solver.size(), 0) << "Need a solver definition to train.";
+  CHECK(!FLAGS_snapshot.size() || !FLAGS_weights.size())
+      << "Give a snapshot to resume training or weights to finetune "
+      "but not both.";
+
+  caffe::SolverParameter solver_param;
+  caffe::ReadProtoFromTextFileOrDie(FLAGS_solver, &solver_param);
+
+  // If the gpu flag is not provided, allow the mode and device to be set
+  // in the solver prototxt.
+  if (FLAGS_gpu < 0
+      && solver_param.solver_mode() == caffe::SolverParameter_SolverMode_GPU) {
+    FLAGS_gpu = solver_param.device_id();
+  }
+
+  // Set device id and mode
+  if (FLAGS_gpu >= 0) {
+    LOG(INFO) << "Use GPU with device ID " << FLAGS_gpu;
+    Caffe::SetDevice(FLAGS_gpu);
+    Caffe::set_mode(Caffe::GPU);
+  } else {
+    LOG(INFO) << "Use CPU.";
+    Caffe::set_mode(Caffe::CPU);
+  }
+
+  LOG(INFO) << "Starting Optimization";
+  shared_ptr<caffe::Solver<float> >
+    solver(caffe::GetSolver<float>(solver_param));
+
+  if (FLAGS_snapshot.size()) {
+    LOG(INFO) << "Resuming from " << FLAGS_snapshot;
+    solver->Solve(FLAGS_snapshot);
+  } else if (FLAGS_weights.size()) {
+    CopyLayers(&*solver, FLAGS_weights);
+    solver->Solve();
+  } else {
+    solver->Solve();
+  }
+  LOG(INFO) << "Optimization Done.";
+  return 0;
+}
+RegisterBrewFunction(train);
+
+
+// Test: score a model.
+int test() {
+  CHECK_GT(FLAGS_model.size(), 0) << "Need a model definition to score.";
+  CHECK_GT(FLAGS_weights.size(), 0) << "Need model weights to score.";
+
+  // Set device id and mode
+  if (FLAGS_gpu >= 0) {
+    LOG(INFO) << "Use GPU with device ID " << FLAGS_gpu;
+    Caffe::SetDevice(FLAGS_gpu);
+    Caffe::set_mode(Caffe::GPU);
+  } else {
+    LOG(INFO) << "Use CPU.";
+    Caffe::set_mode(Caffe::CPU);
+  }
+  // Instantiate the caffe net.
+  Net<float> caffe_net(FLAGS_model, caffe::TEST);
+  caffe_net.CopyTrainedLayersFrom(FLAGS_weights);
+  LOG(INFO) << "Running for " << FLAGS_iterations << " iterations.";
+
+  vector<Blob<float>* > bottom_vec;
+  vector<int> test_score_output_id;
+  vector<float> test_score;
+  float loss = 0;
+  for (int i = 0; i < FLAGS_iterations; ++i) {
+    float iter_loss;
+    const vector<Blob<float>*>& result =
+        caffe_net.Forward(bottom_vec, &iter_loss);
+    loss += iter_loss;
+    int idx = 0;
+    for (int j = 0; j < result.size(); ++j) {
+      const float* result_vec = result[j]->cpu_data();
+      for (int k = 0; k < result[j]->count(); ++k, ++idx) {
+        const float score = result_vec[k];
+        if (i == 0) {
+          test_score.push_back(score);
+          test_score_output_id.push_back(j);
+        } else {
+          test_score[idx] += score;
+        }
+        const std::string& output_name = caffe_net.blob_names()[
+            caffe_net.output_blob_indices()[j]];
+        LOG(INFO) << "Batch " << i << ", " << output_name << " = " << score;
+      }
+    }
+  }
+  loss /= FLAGS_iterations;
+  LOG(INFO) << "Loss: " << loss;
+  for (int i = 0; i < test_score.size(); ++i) {
+    const std::string& output_name = caffe_net.blob_names()[
+        caffe_net.output_blob_indices()[test_score_output_id[i]]];
+    const float loss_weight = caffe_net.blob_loss_weights()[
+        caffe_net.output_blob_indices()[test_score_output_id[i]]];
+    std::ostringstream loss_msg_stream;
+    const float mean_score = test_score[i] / FLAGS_iterations;
+    if (loss_weight) {
+      loss_msg_stream << " (* " << loss_weight
+                      << " = " << loss_weight * mean_score << " loss)";
+    }
+    LOG(INFO) << output_name << " = " << mean_score << loss_msg_stream.str();
+  }
+
+  return 0;
+}
+RegisterBrewFunction(test);
+
+
+// Time: benchmark the execution time of a model.
+int time() {
+  CHECK_GT(FLAGS_model.size(), 0) << "Need a model definition to time.";
+
+  // Set device id and mode
+  if (FLAGS_gpu >= 0) {
+    LOG(INFO) << "Use GPU with device ID " << FLAGS_gpu;
+    Caffe::SetDevice(FLAGS_gpu);
+    Caffe::set_mode(Caffe::GPU);
+  } else {
+    LOG(INFO) << "Use CPU.";
+    Caffe::set_mode(Caffe::CPU);
+  }
+  // Instantiate the caffe net.
+  Net<float> caffe_net(FLAGS_model, caffe::TRAIN);
+
+  // Do a clean forward and backward pass, so that memory allocation are done
+  // and future iterations will be more stable.
+  LOG(INFO) << "Performing Forward";
+  // Note that for the speed benchmark, we will assume that the network does
+  // not take any input blobs.
+  float initial_loss;
+  caffe_net.Forward(vector<Blob<float>*>(), &initial_loss);
+  LOG(INFO) << "Initial loss: " << initial_loss;
+  LOG(INFO) << "Performing Backward";
+  caffe_net.Backward();
+
+  const vector<shared_ptr<Layer<float> > >& layers = caffe_net.layers();
+  const vector<vector<Blob<float>*> >& bottom_vecs = caffe_net.bottom_vecs();
+  const vector<vector<Blob<float>*> >& top_vecs = caffe_net.top_vecs();
+  const vector<vector<bool> >& bottom_need_backward =
+      caffe_net.bottom_need_backward();
+  LOG(INFO) << "*** Benchmark begins ***";
+  LOG(INFO) << "Testing for " << FLAGS_iterations << " iterations.";
+  Timer total_timer;
+  total_timer.Start();
+  Timer forward_timer;
+  Timer backward_timer;
+  Timer timer;
+  std::vector<double> forward_time_per_layer(layers.size(), 0.0);
+  std::vector<double> backward_time_per_layer(layers.size(), 0.0);
+  double forward_time = 0.0;
+  double backward_time = 0.0;
+  for (int j = 0; j < FLAGS_iterations; ++j) {
+    Timer iter_timer;
+    iter_timer.Start();
+    forward_timer.Start();
+    for (int i = 0; i < layers.size(); ++i) {
+      timer.Start();
+      layers[i]->Forward(bottom_vecs[i], top_vecs[i]);
+      forward_time_per_layer[i] += timer.MicroSeconds();
+    }
+    forward_time += forward_timer.MicroSeconds();
+    backward_timer.Start();
+    for (int i = layers.size() - 1; i >= 0; --i) {
+      timer.Start();
+      layers[i]->Backward(top_vecs[i], bottom_need_backward[i],
+                          bottom_vecs[i]);
+      backward_time_per_layer[i] += timer.MicroSeconds();
+    }
+    backward_time += backward_timer.MicroSeconds();
+    LOG(INFO) << "Iteration: " << j + 1 << " forward-backward time: "
+      << iter_timer.MilliSeconds() << " ms.";
+  }
+  LOG(INFO) << "Average time per layer: ";
+  for (int i = 0; i < layers.size(); ++i) {
+    const caffe::string& layername = layers[i]->layer_param().name();
+    LOG(INFO) << std::setfill(' ') << std::setw(10) << layername <<
+      "\tforward: " << forward_time_per_layer[i] / 1000 /
+      FLAGS_iterations << " ms.";
+    LOG(INFO) << std::setfill(' ') << std::setw(10) << layername  <<
+      "\tbackward: " << backward_time_per_layer[i] / 1000 /
+      FLAGS_iterations << " ms.";
+  }
+  total_timer.Stop();
+  LOG(INFO) << "Average Forward pass: " << forward_time / 1000 /
+    FLAGS_iterations << " ms.";
+  LOG(INFO) << "Average Backward pass: " << backward_time / 1000 /
+    FLAGS_iterations << " ms.";
+  LOG(INFO) << "Average Forward-Backward: " << total_timer.MilliSeconds() /
+    FLAGS_iterations << " ms.";
+  LOG(INFO) << "Total Time: " << total_timer.MilliSeconds() << " ms.";
+  LOG(INFO) << "*** Benchmark ends ***";
+  return 0;
+}
+RegisterBrewFunction(time);
+
+int main(int argc, char** argv) {
+  // Print output to stderr (while still logging).
+  FLAGS_alsologtostderr = 1;
+  // Usage message.
+  gflags::SetUsageMessage("command line brew\n"
+      "usage: caffe <command> <args>\n\n"
+      "commands:\n"
+      "  train           train or finetune a model\n"
+      "  test            score a model\n"
+      "  device_query    show GPU diagnostic information\n"
+      "  time            benchmark model execution time");
+  // Run tool or show usage.
+  caffe::GlobalInit(&argc, &argv);
+  if (argc == 2) {
+    return GetBrewFunction(caffe::string(argv[1]))();
+  } else {
+    gflags::ShowUsageWithFlagsRestrict(argv[0], "tools/caffe");
+  }
+}
diff --git a/tools/compute_image_mean.cpp b/tools/compute_image_mean.cpp
new file mode 100644
index 0000000..b1fc7ca
--- /dev/null
+++ b/tools/compute_image_mean.cpp
@@ -0,0 +1,119 @@
+#include <stdint.h>
+#include <algorithm>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "boost/scoped_ptr.hpp"
+#include "gflags/gflags.h"
+#include "glog/logging.h"
+
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/db.hpp"
+#include "caffe/util/io.hpp"
+
+using namespace caffe;  // NOLINT(build/namespaces)
+
+using std::max;
+using std::pair;
+using boost::scoped_ptr;
+
+DEFINE_string(backend, "lmdb",
+        "The backend {leveldb, lmdb} containing the images");
+
+int main(int argc, char** argv) {
+  ::google::InitGoogleLogging(argv[0]);
+
+#ifndef GFLAGS_GFLAGS_H_
+  namespace gflags = google;
+#endif
+
+  gflags::SetUsageMessage("Compute the mean_image of a set of images given by"
+        " a leveldb/lmdb\n"
+        "Usage:\n"
+        "    compute_image_mean [FLAGS] INPUT_DB [OUTPUT_FILE]\n");
+
+  gflags::ParseCommandLineFlags(&argc, &argv, true);
+
+  if (argc < 2 || argc > 3) {
+    gflags::ShowUsageWithFlagsRestrict(argv[0], "tools/compute_image_mean");
+    return 1;
+  }
+
+  scoped_ptr<db::DB> db(db::GetDB(FLAGS_backend));
+  db->Open(argv[1], db::READ);
+  scoped_ptr<db::Cursor> cursor(db->NewCursor());
+
+  BlobProto sum_blob;
+  int count = 0;
+  // load first datum
+  Datum datum;
+  datum.ParseFromString(cursor->value());
+
+  if (DecodeDatumNative(&datum)) {
+    LOG(INFO) << "Decoding Datum";
+  }
+
+  sum_blob.set_num(1);
+  sum_blob.set_channels(datum.channels());
+  sum_blob.set_height(datum.height());
+  sum_blob.set_width(datum.width());
+  const int data_size = datum.channels() * datum.height() * datum.width();
+  int size_in_datum = std::max<int>(datum.data().size(),
+                                    datum.float_data_size());
+  for (int i = 0; i < size_in_datum; ++i) {
+    sum_blob.add_data(0.);
+  }
+  LOG(INFO) << "Starting Iteration";
+  while (cursor->valid()) {
+    Datum datum;
+    datum.ParseFromString(cursor->value());
+    DecodeDatumNative(&datum);
+
+    const std::string& data = datum.data();
+    size_in_datum = std::max<int>(datum.data().size(),
+        datum.float_data_size());
+    CHECK_EQ(size_in_datum, data_size) << "Incorrect data field size " <<
+        size_in_datum;
+    if (data.size() != 0) {
+      CHECK_EQ(data.size(), size_in_datum);
+      for (int i = 0; i < size_in_datum; ++i) {
+        sum_blob.set_data(i, sum_blob.data(i) + (uint8_t)data[i]);
+      }
+    } else {
+      CHECK_EQ(datum.float_data_size(), size_in_datum);
+      for (int i = 0; i < size_in_datum; ++i) {
+        sum_blob.set_data(i, sum_blob.data(i) +
+            static_cast<float>(datum.float_data(i)));
+      }
+    }
+    ++count;
+    if (count % 10000 == 0) {
+      LOG(INFO) << "Processed " << count << " files.";
+    }
+    cursor->Next();
+  }
+
+  if (count % 10000 != 0) {
+    LOG(INFO) << "Processed " << count << " files.";
+  }
+  for (int i = 0; i < sum_blob.data_size(); ++i) {
+    sum_blob.set_data(i, sum_blob.data(i) / count);
+  }
+  // Write to disk
+  if (argc == 3) {
+    LOG(INFO) << "Write to " << argv[2];
+    WriteProtoToBinaryFile(sum_blob, argv[2]);
+  }
+  const int channels = sum_blob.channels();
+  const int dim = sum_blob.height() * sum_blob.width();
+  std::vector<float> mean_values(channels, 0.0);
+  LOG(INFO) << "Number of channels: " << channels;
+  for (int c = 0; c < channels; ++c) {
+    for (int i = 0; i < dim; ++i) {
+      mean_values[c] += sum_blob.data(dim * c + i);
+    }
+    LOG(INFO) << "mean_value channel [" << c << "]:" << mean_values[c] / dim;
+  }
+  return 0;
+}
diff --git a/tools/convert_imageset.cpp b/tools/convert_imageset.cpp
new file mode 100644
index 0000000..816a91f
--- /dev/null
+++ b/tools/convert_imageset.cpp
@@ -0,0 +1,152 @@
+// This program converts a set of images to a lmdb/leveldb by storing them
+// as Datum proto buffers.
+// Usage:
+//   convert_imageset [FLAGS] ROOTFOLDER/ LISTFILE DB_NAME
+//
+// where ROOTFOLDER is the root folder that holds all the images, and LISTFILE
+// should be a list of files as well as their labels, in the format as
+//   subfolder1/file1.JPEG 7
+//   ....
+
+#include <algorithm>
+#include <fstream>  // NOLINT(readability/streams)
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "boost/scoped_ptr.hpp"
+#include "gflags/gflags.h"
+#include "glog/logging.h"
+
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/db.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/rng.hpp"
+
+using namespace caffe;  // NOLINT(build/namespaces)
+using std::pair;
+using boost::scoped_ptr;
+
+DEFINE_bool(gray, false,
+    "When this option is on, treat images as grayscale ones");
+DEFINE_bool(shuffle, false,
+    "Randomly shuffle the order of images and their labels");
+DEFINE_string(backend, "lmdb",
+        "The backend {lmdb, leveldb} for storing the result");
+DEFINE_int32(resize_width, 0, "Width images are resized to");
+DEFINE_int32(resize_height, 0, "Height images are resized to");
+DEFINE_bool(check_size, false,
+    "When this option is on, check that all the datum have the same size");
+DEFINE_bool(encoded, false,
+    "When this option is on, the encoded image will be save in datum");
+DEFINE_string(encode_type, "",
+    "Optional: What type should we encode the image as ('png','jpg',...).");
+
+int main(int argc, char** argv) {
+  ::google::InitGoogleLogging(argv[0]);
+
+#ifndef GFLAGS_GFLAGS_H_
+  namespace gflags = google;
+#endif
+
+  gflags::SetUsageMessage("Convert a set of images to the leveldb/lmdb\n"
+        "format used as input for Caffe.\n"
+        "Usage:\n"
+        "    convert_imageset [FLAGS] ROOTFOLDER/ LISTFILE DB_NAME\n"
+        "The ImageNet dataset for the training demo is at\n"
+        "    http://www.image-net.org/download-images\n");
+  gflags::ParseCommandLineFlags(&argc, &argv, true);
+
+  if (argc < 4) {
+    gflags::ShowUsageWithFlagsRestrict(argv[0], "tools/convert_imageset");
+    return 1;
+  }
+
+  const bool is_color = !FLAGS_gray;
+  const bool check_size = FLAGS_check_size;
+  const bool encoded = FLAGS_encoded;
+  const string encode_type = FLAGS_encode_type;
+
+  std::ifstream infile(argv[2]);
+  std::vector<std::pair<std::string, int> > lines;
+  std::string filename;
+  int label;
+  while (infile >> filename >> label) {
+    lines.push_back(std::make_pair(filename, label));
+  }
+  if (FLAGS_shuffle) {
+    // randomly shuffle data
+    LOG(INFO) << "Shuffling data";
+    shuffle(lines.begin(), lines.end());
+  }
+  LOG(INFO) << "A total of " << lines.size() << " images.";
+
+  if (encode_type.size() && !encoded)
+    LOG(INFO) << "encode_type specified, assuming encoded=true.";
+
+  int resize_height = std::max<int>(0, FLAGS_resize_height);
+  int resize_width = std::max<int>(0, FLAGS_resize_width);
+
+  // Create new DB
+  scoped_ptr<db::DB> db(db::GetDB(FLAGS_backend));
+  db->Open(argv[3], db::NEW);
+  scoped_ptr<db::Transaction> txn(db->NewTransaction());
+
+  // Storing to db
+  std::string root_folder(argv[1]);
+  Datum datum;
+  int count = 0;
+  const int kMaxKeyLength = 256;
+  char key_cstr[kMaxKeyLength];
+  int data_size = 0;
+  bool data_size_initialized = false;
+
+  for (int line_id = 0; line_id < lines.size(); ++line_id) {
+    bool status;
+    std::string enc = encode_type;
+    if (encoded && !enc.size()) {
+      // Guess the encoding type from the file name
+      string fn = lines[line_id].first;
+      size_t p = fn.rfind('.');
+      if ( p == fn.npos )
+        LOG(WARNING) << "Failed to guess the encoding of '" << fn << "'";
+      enc = fn.substr(p);
+      std::transform(enc.begin(), enc.end(), enc.begin(), ::tolower);
+    }
+    status = ReadImageToDatum(root_folder + lines[line_id].first,
+        lines[line_id].second, resize_height, resize_width, is_color,
+        enc, &datum);
+    if (status == false) continue;
+    if (check_size) {
+      if (!data_size_initialized) {
+        data_size = datum.channels() * datum.height() * datum.width();
+        data_size_initialized = true;
+      } else {
+        const std::string& data = datum.data();
+        CHECK_EQ(data.size(), data_size) << "Incorrect data field size "
+            << data.size();
+      }
+    }
+    // sequential
+    int length = snprintf(key_cstr, kMaxKeyLength, "%08d_%s", line_id,
+        lines[line_id].first.c_str());
+
+    // Put in db
+    string out;
+    CHECK(datum.SerializeToString(&out));
+    txn->Put(string(key_cstr, length), out);
+
+    if (++count % 1000 == 0) {
+      // Commit db
+      txn->Commit();
+      txn.reset(db->NewTransaction());
+      LOG(ERROR) << "Processed " << count << " files.";
+    }
+  }
+  // write the last batch
+  if (count % 1000 != 0) {
+    txn->Commit();
+    LOG(ERROR) << "Processed " << count << " files.";
+  }
+  return 0;
+}
diff --git a/tools/device_query.cpp b/tools/device_query.cpp
new file mode 100644
index 0000000..03799e5
--- /dev/null
+++ b/tools/device_query.cpp
@@ -0,0 +1,7 @@
+#include "caffe/common.hpp"
+
+int main(int argc, char** argv) {
+  LOG(FATAL) << "Deprecated. Use caffe device_query "
+                "[--device_id=0] instead.";
+  return 0;
+}
diff --git a/tools/extra/extract_seconds.py b/tools/extra/extract_seconds.py
new file mode 100755
index 0000000..591a51f
--- /dev/null
+++ b/tools/extra/extract_seconds.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+import datetime
+import os
+import sys
+
+def extract_datetime_from_line(line, year):
+    # Expected format: I0210 13:39:22.381027 25210 solver.cpp:204] Iteration 100, lr = 0.00992565
+    line = line.strip().split()
+    month = int(line[0][1:3])
+    day = int(line[0][3:])
+    timestamp = line[1]
+    pos = timestamp.rfind('.')
+    ts = [int(x) for x in timestamp[:pos].split(':')]
+    hour = ts[0]
+    minute = ts[1]
+    second = ts[2]
+    microsecond = int(timestamp[pos + 1:])
+    dt = datetime.datetime(year, month, day, hour, minute, second, microsecond)
+    return dt
+
+
+def get_log_created_year(input_file):
+    """Get year from log file system timestamp
+    """
+
+    log_created_time = os.path.getctime(input_file)
+    log_created_year = datetime.datetime.fromtimestamp(log_created_time).year
+    return log_created_year
+
+
+def get_start_time(line_iterable, year):
+    """Find start time from group of lines
+    """
+
+    start_datetime = None
+    for line in line_iterable:
+        line = line.strip()
+        if line.find('Solving') != -1:
+            start_datetime = extract_datetime_from_line(line, year)
+            break
+    return start_datetime
+
+
+def extract_seconds(input_file, output_file):
+    with open(input_file, 'r') as f:
+        lines = f.readlines()
+    log_created_year = get_log_created_year(input_file)
+    start_datetime = get_start_time(lines, log_created_year)
+    assert start_datetime, 'Start time not found'
+
+    out = open(output_file, 'w')
+    for line in lines:
+        line = line.strip()
+        if line.find('Iteration') != -1:
+            dt = extract_datetime_from_line(line, log_created_year)
+            elapsed_seconds = (dt - start_datetime).total_seconds()
+            out.write('%f\n' % elapsed_seconds)
+    out.close()
+
+if __name__ == '__main__':
+    if len(sys.argv) < 3:
+        print('Usage: ./extract_seconds input_file output_file')
+        exit(1)
+    extract_seconds(sys.argv[1], sys.argv[2])
diff --git a/tools/extra/launch_resize_and_crop_images.sh b/tools/extra/launch_resize_and_crop_images.sh
new file mode 100755
index 0000000..84ca858
--- /dev/null
+++ b/tools/extra/launch_resize_and_crop_images.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+#### https://github.com/Yangqing/mincepie/wiki/Launch-Your-Mapreducer
+
+# If you encounter error that the address already in use, kill the process.
+# 11235 is the port of server process
+# https://github.com/Yangqing/mincepie/blob/master/mincepie/mince.py
+#     sudo netstat -ap | grep 11235
+# The last column of the output is  PID/Program name
+#     kill -9 PID
+# Second solution: 
+#     nmap localhost
+#     fuser -k 11235/tcp
+# Or just wait a few seconds.
+
+## Launch your Mapreduce locally
+# num_clients: number of processes
+# image_lib: OpenCV or PIL, case insensitive. The default value is the faster OpenCV.
+# input: the file containing one image path relative to input_folder each line
+# input_folder: where are the original images
+# output_folder: where to save the resized and cropped images
+./resize_and_crop_images.py --num_clients=8 --image_lib=opencv --input=/home/user/Datasets/ImageNet/ILSVRC2010/ILSVRC2010_images.txt --input_folder=/home/user/Datasets/ImageNet/ILSVRC2010/ILSVRC2010_images_train/ --output_folder=/home/user/Datasets/ImageNet/ILSVRC2010/ILSVRC2010_images_train_resized/
+
+## Launch your Mapreduce with MPI
+# mpirun -n 8 --launch=mpi resize_and_crop_images.py --image_lib=opencv --input=/home/user/Datasets/ImageNet/ILSVRC2010/ILSVRC2010_images.txt --input_folder=/home/user/Datasets/ImageNet/ILSVRC2010/ILSVRC2010_images_train/ --output_folder=/home/user/Datasets/ImageNet/ILSVRC2010/ILSVRC2010_images_train_resized/
diff --git a/tools/extra/parse_log.py b/tools/extra/parse_log.py
new file mode 100755
index 0000000..09ea216
--- /dev/null
+++ b/tools/extra/parse_log.py
@@ -0,0 +1,196 @@
+#!/usr/bin/env python
+
+"""
+Parse training log
+
+Evolved from parse_log.sh
+"""
+
+import os
+import re
+import extract_seconds
+import argparse
+import csv
+from collections import OrderedDict
+
+
+def parse_log(path_to_log):
+    """Parse log file
+    Returns (train_dict_list, train_dict_names, test_dict_list, test_dict_names)
+
+    train_dict_list and test_dict_list are lists of dicts that define the table
+    rows
+
+    train_dict_names and test_dict_names are ordered tuples of the column names
+    for the two dict_lists
+    """
+
+    regex_iteration = re.compile('Iteration (\d+)')
+    regex_train_output = re.compile('Train net output #(\d+): (\S+) = ([\.\deE+-]+)')
+    regex_test_output = re.compile('Test net output #(\d+): (\S+) = ([\.\deE+-]+)')
+    regex_learning_rate = re.compile('lr = ([\.\d]+)')
+
+    # Pick out lines of interest
+    iteration = -1
+    learning_rate = float('NaN')
+    train_dict_list = []
+    test_dict_list = []
+    train_row = None
+    test_row = None
+
+    logfile_year = extract_seconds.get_log_created_year(path_to_log)
+    with open(path_to_log) as f:
+        start_time = extract_seconds.get_start_time(f, logfile_year)
+
+        for line in f:
+            iteration_match = regex_iteration.search(line)
+            if iteration_match:
+                iteration = float(iteration_match.group(1))
+            if iteration == -1:
+                # Only start parsing for other stuff if we've found the first
+                # iteration
+                continue
+
+            time = extract_seconds.extract_datetime_from_line(line,
+                                                              logfile_year)
+            seconds = (time - start_time).total_seconds()
+
+            learning_rate_match = regex_learning_rate.search(line)
+            if learning_rate_match:
+                learning_rate = float(learning_rate_match.group(1))
+
+            train_dict_list, train_row = parse_line_for_net_output(
+                regex_train_output, train_row, train_dict_list,
+                line, iteration, seconds, learning_rate
+            )
+            test_dict_list, test_row = parse_line_for_net_output(
+                regex_test_output, test_row, test_dict_list,
+                line, iteration, seconds, learning_rate
+            )
+
+    fix_initial_nan_learning_rate(train_dict_list)
+    fix_initial_nan_learning_rate(test_dict_list)
+
+    return train_dict_list, test_dict_list
+
+
+def parse_line_for_net_output(regex_obj, row, row_dict_list,
+                              line, iteration, seconds, learning_rate):
+    """Parse a single line for training or test output
+
+    Returns a a tuple with (row_dict_list, row)
+    row: may be either a new row or an augmented version of the current row
+    row_dict_list: may be either the current row_dict_list or an augmented
+    version of the current row_dict_list
+    """
+
+    output_match = regex_obj.search(line)
+    if output_match:
+        if not row or row['NumIters'] != iteration:
+            # Push the last row and start a new one
+            if row:
+                # If we're on a new iteration, push the last row
+                # This will probably only happen for the first row; otherwise
+                # the full row checking logic below will push and clear full
+                # rows
+                row_dict_list.append(row)
+
+            row = OrderedDict([
+                ('NumIters', iteration),
+                ('Seconds', seconds),
+                ('LearningRate', learning_rate)
+            ])
+
+        # output_num is not used; may be used in the future
+        # output_num = output_match.group(1)
+        output_name = output_match.group(2)
+        output_val = output_match.group(3)
+        row[output_name] = float(output_val)
+
+    if row and len(row_dict_list) >= 1 and len(row) == len(row_dict_list[0]):
+        # The row is full, based on the fact that it has the same number of
+        # columns as the first row; append it to the list
+        row_dict_list.append(row)
+        row = None
+
+    return row_dict_list, row
+
+
+def fix_initial_nan_learning_rate(dict_list):
+    """Correct initial value of learning rate
+
+    Learning rate is normally not printed until after the initial test and
+    training step, which means the initial testing and training rows have
+    LearningRate = NaN. Fix this by copying over the LearningRate from the
+    second row, if it exists.
+    """
+
+    if len(dict_list) > 1:
+        dict_list[0]['LearningRate'] = dict_list[1]['LearningRate']
+
+
+def save_csv_files(logfile_path, output_dir, train_dict_list, test_dict_list,
+                   delimiter=',', verbose=False):
+    """Save CSV files to output_dir
+
+    If the input log file is, e.g., caffe.INFO, the names will be
+    caffe.INFO.train and caffe.INFO.test
+    """
+
+    log_basename = os.path.basename(logfile_path)
+    train_filename = os.path.join(output_dir, log_basename + '.train')
+    write_csv(train_filename, train_dict_list, delimiter, verbose)
+
+    test_filename = os.path.join(output_dir, log_basename + '.test')
+    write_csv(test_filename, test_dict_list, delimiter, verbose)
+
+
+def write_csv(output_filename, dict_list, delimiter, verbose=False):
+    """Write a CSV file
+    """
+
+    dialect = csv.excel
+    dialect.delimiter = delimiter
+
+    with open(output_filename, 'w') as f:
+        dict_writer = csv.DictWriter(f, fieldnames=dict_list[0].keys(),
+                                     dialect=dialect)
+        dict_writer.writeheader()
+        dict_writer.writerows(dict_list)
+    if verbose:
+        print 'Wrote %s' % output_filename
+
+
+def parse_args():
+    description = ('Parse a Caffe training log into two CSV files '
+                   'containing training and testing information')
+    parser = argparse.ArgumentParser(description=description)
+
+    parser.add_argument('logfile_path',
+                        help='Path to log file')
+
+    parser.add_argument('output_dir',
+                        help='Directory in which to place output CSV files')
+
+    parser.add_argument('--verbose',
+                        action='store_true',
+                        help='Print some extra info (e.g., output filenames)')
+
+    parser.add_argument('--delimiter',
+                        default=',',
+                        help=('Column delimiter in output files '
+                              '(default: \'%(default)s\')'))
+
+    args = parser.parse_args()
+    return args
+
+
+def main():
+    args = parse_args()
+    train_dict_list, test_dict_list = parse_log(args.logfile_path)
+    save_csv_files(args.logfile_path, args.output_dir, train_dict_list,
+                   test_dict_list, delimiter=args.delimiter)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/tools/extra/parse_log.sh b/tools/extra/parse_log.sh
new file mode 100755
index 0000000..98ef0a0
--- /dev/null
+++ b/tools/extra/parse_log.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+# Usage parse_log.sh caffe.log
+# It creates the following two text files, each containing a table:
+#     caffe.log.test (columns: '#Iters Seconds TestAccuracy TestLoss')
+#     caffe.log.train (columns: '#Iters Seconds TrainingLoss LearningRate')
+
+
+# get the dirname of the script
+DIR="$( cd "$(dirname "$0")" ; pwd -P )"
+
+if [ "$#" -lt 1 ]
+then
+echo "Usage parse_log.sh /path/to/your.log"
+exit
+fi
+LOG=`basename $1`
+grep -B 1 'Test ' $1 > aux.txt
+grep 'Iteration ' aux.txt | sed  's/.*Iteration \([[:digit:]]*\).*/\1/g' > aux0.txt
+grep 'Test net output #0' aux.txt | awk '{print $11}' > aux1.txt
+grep 'Test net output #1' aux.txt | awk '{print $11}' > aux2.txt
+
+# Extracting elapsed seconds
+# For extraction of time since this line contains the start time
+grep '] Solving ' $1 > aux3.txt
+grep 'Testing net' $1 >> aux3.txt
+$DIR/extract_seconds.py aux3.txt aux4.txt
+
+# Generating
+echo '#Iters Seconds TestAccuracy TestLoss'> $LOG.test
+paste aux0.txt aux4.txt aux1.txt aux2.txt | column -t >> $LOG.test
+rm aux.txt aux0.txt aux1.txt aux2.txt aux3.txt aux4.txt
+
+# For extraction of time since this line contains the start time
+grep '] Solving ' $1 > aux.txt
+grep ', loss = ' $1 >> aux.txt
+grep 'Iteration ' aux.txt | sed  's/.*Iteration \([[:digit:]]*\).*/\1/g' > aux0.txt
+grep ', loss = ' $1 | awk '{print $9}' > aux1.txt
+grep ', lr = ' $1 | awk '{print $9}' > aux2.txt
+
+# Extracting elapsed seconds
+$DIR/extract_seconds.py aux.txt aux3.txt
+
+# Generating
+echo '#Iters Seconds TrainingLoss LearningRate'> $LOG.train
+paste aux0.txt aux3.txt aux1.txt aux2.txt | column -t >> $LOG.train
+rm aux.txt aux0.txt aux1.txt aux2.txt  aux3.txt
diff --git a/tools/extra/plot_log.gnuplot.example b/tools/extra/plot_log.gnuplot.example
new file mode 100644
index 0000000..748b96e
--- /dev/null
+++ b/tools/extra/plot_log.gnuplot.example
@@ -0,0 +1,69 @@
+# These snippets serve only as basic examples.
+# Customization is a must.
+# You can copy, paste, edit them in whatever way you want.
+# Be warned that the fields in the training log may change in the future.
+# You had better check the data files before designing your own plots.
+
+# Please generate the neccessary data files with 
+# /path/to/caffe/tools/extra/parse_log.sh before plotting.
+# Example usage: 
+#     ./parse_log.sh mnist.log
+# Now you have mnist.log.train and mnist.log.test.
+#     gnuplot mnist.gnuplot
+
+# The fields present in the data files that are usually proper to plot along
+# the y axis are test accuracy, test loss, training loss, and learning rate.
+# Those should plot along the x axis are training iterations and seconds.
+# Possible combinations:
+# 1. Test accuracy (test score 0) vs. training iterations / time;
+# 2. Test loss (test score 1) time;
+# 3. Training loss vs. training iterations / time;
+# 4. Learning rate vs. training iterations / time;
+# A rarer one: Training time vs. iterations.
+
+# What is the difference between plotting against iterations and time?
+# If the overhead in one iteration is too high, one algorithm might appear
+# to be faster in terms of progress per iteration and slower when measured
+# against time. And the reverse case is not entirely impossible. Thus, some
+# papers chose to only publish the more favorable type. It is your freedom
+# to decide what to plot.
+
+reset
+set terminal png
+set output "your_chart_name.png"
+set style data lines
+set key right
+
+###### Fields in the data file your_log_name.log.train are
+###### Iters Seconds TrainingLoss LearningRate
+
+# Training loss vs. training iterations
+set title "Training loss vs. training iterations"
+set xlabel "Training iterations"
+set ylabel "Training loss"
+plot "mnist.log.train" using 1:3 title "mnist"
+
+# Training loss vs. training time
+# plot "mnist.log.train" using 2:3 title "mnist"
+
+# Learning rate vs. training iterations;
+# plot "mnist.log.train" using 1:4 title "mnist"
+
+# Learning rate vs. training time;
+# plot "mnist.log.train" using 2:4 title "mnist"
+
+
+###### Fields in the data file your_log_name.log.test are
+###### Iters Seconds TestAccuracy TestLoss
+
+# Test loss vs. training iterations
+# plot "mnist.log.test" using 1:4 title "mnist"
+
+# Test accuracy vs. training iterations
+# plot "mnist.log.test" using 1:3 title "mnist"
+
+# Test loss vs. training time
+# plot "mnist.log.test" using 2:4 title "mnist"
+
+# Test accuracy vs. training time
+# plot "mnist.log.test" using 2:3 title "mnist"
diff --git a/tools/extra/plot_training_log.py.example b/tools/extra/plot_training_log.py.example
new file mode 100755
index 0000000..b6fda54
--- /dev/null
+++ b/tools/extra/plot_training_log.py.example
@@ -0,0 +1,187 @@
+#!/usr/bin/env python
+import inspect
+import os
+import random
+import sys
+import matplotlib.cm as cmx
+import matplotlib.colors as colors
+import matplotlib.pyplot as plt
+import matplotlib.legend as lgd
+import matplotlib.markers as mks
+
+def get_log_parsing_script():
+    dirname = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
+    return dirname + '/parse_log.sh'
+
+def get_log_file_suffix():
+    return '.log'
+
+def get_chart_type_description_separator():
+    return '  vs. '
+
+def is_x_axis_field(field):
+    x_axis_fields = ['Iters', 'Seconds']
+    return field in x_axis_fields
+
+def create_field_index():
+    train_key = 'Train'
+    test_key = 'Test'
+    field_index = {train_key:{'Iters':0, 'Seconds':1, train_key + ' loss':2,
+                              train_key + ' learning rate':3},
+                   test_key:{'Iters':0, 'Seconds':1, test_key + ' accuracy':2,
+                             test_key + ' loss':3}}
+    fields = set()
+    for data_file_type in field_index.keys():
+        fields = fields.union(set(field_index[data_file_type].keys()))
+    fields = list(fields)
+    fields.sort()
+    return field_index, fields
+
+def get_supported_chart_types():
+    field_index, fields = create_field_index()
+    num_fields = len(fields)
+    supported_chart_types = []
+    for i in xrange(num_fields):
+        if not is_x_axis_field(fields[i]):
+            for j in xrange(num_fields):
+                if i != j and is_x_axis_field(fields[j]):
+                    supported_chart_types.append('%s%s%s' % (
+                        fields[i], get_chart_type_description_separator(),
+                        fields[j]))
+    return supported_chart_types
+
+def get_chart_type_description(chart_type):
+    supported_chart_types = get_supported_chart_types()
+    chart_type_description = supported_chart_types[chart_type]
+    return chart_type_description
+
+def get_data_file_type(chart_type):
+    description = get_chart_type_description(chart_type)
+    data_file_type = description.split()[0]
+    return data_file_type
+
+def get_data_file(chart_type, path_to_log):
+    return os.path.basename(path_to_log) + '.' + get_data_file_type(chart_type).lower()
+
+def get_field_descriptions(chart_type):
+    description = get_chart_type_description(chart_type).split(
+        get_chart_type_description_separator())
+    y_axis_field = description[0]
+    x_axis_field = description[1]
+    return x_axis_field, y_axis_field    
+
+def get_field_indecies(x_axis_field, y_axis_field):    
+    data_file_type = get_data_file_type(chart_type)
+    fields = create_field_index()[0][data_file_type]
+    return fields[x_axis_field], fields[y_axis_field]
+
+def load_data(data_file, field_idx0, field_idx1):
+    data = [[], []]
+    with open(data_file, 'r') as f:
+        for line in f:
+            line = line.strip()
+            if line[0] != '#':
+                fields = line.split()
+                data[0].append(float(fields[field_idx0].strip()))
+                data[1].append(float(fields[field_idx1].strip()))
+    return data
+
+def random_marker():
+    markers = mks.MarkerStyle.markers
+    num = len(markers.values())
+    idx = random.randint(0, num - 1)
+    return markers.values()[idx]
+
+def get_data_label(path_to_log):
+    label = path_to_log[path_to_log.rfind('/')+1 : path_to_log.rfind(
+        get_log_file_suffix())]
+    return label
+
+def get_legend_loc(chart_type):
+    x_axis, y_axis = get_field_descriptions(chart_type)
+    loc = 'lower right'
+    if y_axis.find('accuracy') != -1:
+        pass
+    if y_axis.find('loss') != -1 or y_axis.find('learning rate') != -1:
+        loc = 'upper right'
+    return loc
+
+def plot_chart(chart_type, path_to_png, path_to_log_list):
+    for path_to_log in path_to_log_list:
+        os.system('%s %s' % (get_log_parsing_script(), path_to_log))
+        data_file = get_data_file(chart_type, path_to_log)
+        x_axis_field, y_axis_field = get_field_descriptions(chart_type)
+        x, y = get_field_indecies(x_axis_field, y_axis_field)
+        data = load_data(data_file, x, y)
+        ## TODO: more systematic color cycle for lines
+        color = [random.random(), random.random(), random.random()]
+        label = get_data_label(path_to_log)
+        linewidth = 0.75
+        ## If there too many datapoints, do not use marker.
+##        use_marker = False
+        use_marker = True
+        if not use_marker:
+            plt.plot(data[0], data[1], label = label, color = color,
+                     linewidth = linewidth)
+        else:
+            ok = False
+            ## Some markers throw ValueError: Unrecognized marker style
+            while not ok:
+                try:
+                    marker = random_marker()
+                    plt.plot(data[0], data[1], label = label, color = color,
+                             marker = marker, linewidth = linewidth)
+                    ok = True
+                except:
+                    pass
+    legend_loc = get_legend_loc(chart_type)
+    plt.legend(loc = legend_loc, ncol = 1) # ajust ncol to fit the space
+    plt.title(get_chart_type_description(chart_type))
+    plt.xlabel(x_axis_field)
+    plt.ylabel(y_axis_field)  
+    plt.savefig(path_to_png)     
+    plt.show()
+
+def print_help():
+    print """This script mainly serves as the basis of your customizations.
+Customization is a must.
+You can copy, paste, edit them in whatever way you want.
+Be warned that the fields in the training log may change in the future.
+You had better check the data files and change the mapping from field name to
+ field index in create_field_index before designing your own plots.
+Usage:
+    ./plot_log.sh chart_type[0-%s] /where/to/save.png /path/to/first.log ...
+Notes:
+    1. Supporting multiple logs.
+    2. Log file name must end with the lower-cased "%s".
+Supported chart types:""" % (len(get_supported_chart_types()) - 1,
+                             get_log_file_suffix())
+    supported_chart_types = get_supported_chart_types()
+    num = len(supported_chart_types)
+    for i in xrange(num):
+        print '    %d: %s' % (i, supported_chart_types[i])
+    exit
+
+def is_valid_chart_type(chart_type):
+    return chart_type >= 0 and chart_type < len(get_supported_chart_types())
+  
+if __name__ == '__main__':
+    if len(sys.argv) < 4:
+        print_help()
+    else:
+        chart_type = int(sys.argv[1])
+        if not is_valid_chart_type(chart_type):
+            print_help()
+        path_to_png = sys.argv[2]
+        if not path_to_png.endswith('.png'):
+            print 'Path must ends with png' % path_to_png
+            exit            
+        path_to_logs = sys.argv[3:]
+        for path_to_log in path_to_logs:
+            if not os.path.exists(path_to_log):
+                print 'Path does not exist: %s' % path_to_log
+                exit
+            if not path_to_log.endswith(get_log_file_suffix()):
+                print_help()
+        ## plot_chart accpets multiple path_to_logs
+        plot_chart(chart_type, path_to_png, path_to_logs)
diff --git a/tools/extra/resize_and_crop_images.py b/tools/extra/resize_and_crop_images.py
new file mode 100755
index 0000000..c844f59
--- /dev/null
+++ b/tools/extra/resize_and_crop_images.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+from mincepie import mapreducer, launcher
+import gflags
+import os
+import cv2
+from PIL import Image
+
+# gflags
+gflags.DEFINE_string('image_lib', 'opencv',
+                     'OpenCV or PIL, case insensitive. The default value is the faster OpenCV.')
+gflags.DEFINE_string('input_folder', '',
+                     'The folder that contains all input images, organized in synsets.')
+gflags.DEFINE_integer('output_side_length', 256,
+                     'Expected side length of the output image.')
+gflags.DEFINE_string('output_folder', '',
+                     'The folder that we write output resized and cropped images to')
+FLAGS = gflags.FLAGS
+
+class OpenCVResizeCrop:
+    def resize_and_crop_image(self, input_file, output_file, output_side_length = 256):
+        '''Takes an image name, resize it and crop the center square
+        '''
+        img = cv2.imread(input_file)
+        height, width, depth = img.shape
+        new_height = output_side_length
+        new_width = output_side_length
+        if height > width:
+            new_height = output_side_length * height / width
+        else:
+            new_width = output_side_length * width / height
+        resized_img = cv2.resize(img, (new_width, new_height))
+        height_offset = (new_height - output_side_length) / 2
+        width_offset = (new_width - output_side_length) / 2
+        cropped_img = resized_img[height_offset:height_offset + output_side_length,
+                                  width_offset:width_offset + output_side_length]
+        cv2.imwrite(output_file, cropped_img)
+
+class PILResizeCrop:
+## http://united-coders.com/christian-harms/image-resizing-tips-every-coder-should-know/
+    def resize_and_crop_image(self, input_file, output_file, output_side_length = 256, fit = True):
+        '''Downsample the image.
+        '''
+        img = Image.open(input_file)
+        box = (output_side_length, output_side_length)
+        #preresize image with factor 2, 4, 8 and fast algorithm
+        factor = 1
+        while img.size[0]/factor > 2*box[0] and img.size[1]*2/factor > 2*box[1]:
+            factor *=2
+        if factor > 1:
+            img.thumbnail((img.size[0]/factor, img.size[1]/factor), Image.NEAREST)
+
+        #calculate the cropping box and get the cropped part
+        if fit:
+            x1 = y1 = 0
+            x2, y2 = img.size
+            wRatio = 1.0 * x2/box[0]
+            hRatio = 1.0 * y2/box[1]
+            if hRatio > wRatio:
+                y1 = int(y2/2-box[1]*wRatio/2)
+                y2 = int(y2/2+box[1]*wRatio/2)
+            else:
+                x1 = int(x2/2-box[0]*hRatio/2)
+                x2 = int(x2/2+box[0]*hRatio/2)
+            img = img.crop((x1,y1,x2,y2))
+
+        #Resize the image with best quality algorithm ANTI-ALIAS
+        img.thumbnail(box, Image.ANTIALIAS)
+
+        #save it into a file-like object
+        with open(output_file, 'wb') as out:
+            img.save(out, 'JPEG', quality=75)
+
+class ResizeCropImagesMapper(mapreducer.BasicMapper):
+    '''The ImageNet Compute mapper. 
+    The input value would be the file listing images' paths relative to input_folder.
+    '''
+    def map(self, key, value):
+        if type(value) is not str:
+            value = str(value)
+        files = [value]
+        image_lib = FLAGS.image_lib.lower()
+        if image_lib == 'pil':
+            resize_crop = PILResizeCrop()
+        else:
+            resize_crop = OpenCVResizeCrop()
+        for i, line in enumerate(files):
+            try:
+                line = line.replace(FLAGS.input_folder, '').strip()
+                line = line.split()
+                image_file_name = line[0]
+                input_file = os.path.join(FLAGS.input_folder, image_file_name)
+                output_file = os.path.join(FLAGS.output_folder, image_file_name)
+                output_dir = output_file[:output_file.rfind('/')]
+                if not os.path.exists(output_dir):
+                    os.makedirs(output_dir)
+                feat = resize_crop.resize_and_crop_image(input_file, output_file,
+                                                              FLAGS.output_side_length)
+            except Exception, e:
+                # we ignore the exception (maybe the image is corrupted?)
+                print line, Exception, e
+        yield value, FLAGS.output_folder
+
+mapreducer.REGISTER_DEFAULT_MAPPER(ResizeCropImagesMapper)
+
+mapreducer.REGISTER_DEFAULT_READER(mapreducer.FileReader)
+mapreducer.REGISTER_DEFAULT_WRITER(mapreducer.FileWriter)
+ 
+if __name__ == '__main__':
+    launcher.launch()
diff --git a/tools/extract_features.cpp b/tools/extract_features.cpp
new file mode 100644
index 0000000..365dd49
--- /dev/null
+++ b/tools/extract_features.cpp
@@ -0,0 +1,189 @@
+#include <stdio.h>  // for snprintf
+#include <string>
+#include <vector>
+
+#include "boost/algorithm/string.hpp"
+#include "google/protobuf/text_format.h"
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/net.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/db.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/vision_layers.hpp"
+
+using caffe::Blob;
+using caffe::Caffe;
+using caffe::Datum;
+using caffe::Net;
+using boost::shared_ptr;
+using std::string;
+namespace db = caffe::db;
+
+template<typename Dtype>
+int feature_extraction_pipeline(int argc, char** argv);
+
+int main(int argc, char** argv) {
+  return feature_extraction_pipeline<float>(argc, argv);
+//  return feature_extraction_pipeline<double>(argc, argv);
+}
+
+template<typename Dtype>
+int feature_extraction_pipeline(int argc, char** argv) {
+  ::google::InitGoogleLogging(argv[0]);
+  const int num_required_args = 7;
+  if (argc < num_required_args) {
+    LOG(ERROR)<<
+    "This program takes in a trained network and an input data layer, and then"
+    " extract features of the input data produced by the net.\n"
+    "Usage: extract_features  pretrained_net_param"
+    "  feature_extraction_proto_file  extract_feature_blob_name1[,name2,...]"
+    "  save_feature_dataset_name1[,name2,...]  num_mini_batches  db_type"
+    "  [CPU/GPU] [DEVICE_ID=0]\n"
+    "Note: you can extract multiple features in one pass by specifying"
+    " multiple feature blob names and dataset names seperated by ','."
+    " The names cannot contain white space characters and the number of blobs"
+    " and datasets must be equal.";
+    return 1;
+  }
+  int arg_pos = num_required_args;
+
+  arg_pos = num_required_args;
+  if (argc > arg_pos && strcmp(argv[arg_pos], "GPU") == 0) {
+    LOG(ERROR)<< "Using GPU";
+    uint device_id = 0;
+    if (argc > arg_pos + 1) {
+      device_id = atoi(argv[arg_pos + 1]);
+      CHECK_GE(device_id, 0);
+    }
+    LOG(ERROR) << "Using Device_id=" << device_id;
+    Caffe::SetDevice(device_id);
+    Caffe::set_mode(Caffe::GPU);
+  } else {
+    LOG(ERROR) << "Using CPU";
+    Caffe::set_mode(Caffe::CPU);
+  }
+
+  arg_pos = 0;  // the name of the executable
+  std::string pretrained_binary_proto(argv[++arg_pos]);
+
+  // Expected prototxt contains at least one data layer such as
+  //  the layer data_layer_name and one feature blob such as the
+  //  fc7 top blob to extract features.
+  /*
+   layers {
+     name: "data_layer_name"
+     type: DATA
+     data_param {
+       source: "/path/to/your/images/to/extract/feature/images_leveldb"
+       mean_file: "/path/to/your/image_mean.binaryproto"
+       batch_size: 128
+       crop_size: 227
+       mirror: false
+     }
+     top: "data_blob_name"
+     top: "label_blob_name"
+   }
+   layers {
+     name: "drop7"
+     type: DROPOUT
+     dropout_param {
+       dropout_ratio: 0.5
+     }
+     bottom: "fc7"
+     top: "fc7"
+   }
+   */
+  std::string feature_extraction_proto(argv[++arg_pos]);
+  shared_ptr<Net<Dtype> > feature_extraction_net(
+      new Net<Dtype>(feature_extraction_proto, caffe::TEST));
+  feature_extraction_net->CopyTrainedLayersFrom(pretrained_binary_proto);
+
+  std::string extract_feature_blob_names(argv[++arg_pos]);
+  std::vector<std::string> blob_names;
+  boost::split(blob_names, extract_feature_blob_names, boost::is_any_of(","));
+
+  std::string save_feature_dataset_names(argv[++arg_pos]);
+  std::vector<std::string> dataset_names;
+  boost::split(dataset_names, save_feature_dataset_names,
+               boost::is_any_of(","));
+  CHECK_EQ(blob_names.size(), dataset_names.size()) <<
+      " the number of blob names and dataset names must be equal";
+  size_t num_features = blob_names.size();
+
+  for (size_t i = 0; i < num_features; i++) {
+    CHECK(feature_extraction_net->has_blob(blob_names[i]))
+        << "Unknown feature blob name " << blob_names[i]
+        << " in the network " << feature_extraction_proto;
+  }
+
+  int num_mini_batches = atoi(argv[++arg_pos]);
+
+  std::vector<shared_ptr<db::DB> > feature_dbs;
+  std::vector<shared_ptr<db::Transaction> > txns;
+  const char* db_type = argv[++arg_pos];
+  for (size_t i = 0; i < num_features; ++i) {
+    LOG(INFO)<< "Opening dataset " << dataset_names[i];
+    shared_ptr<db::DB> db(db::GetDB(db_type));
+    db->Open(dataset_names.at(i), db::NEW);
+    feature_dbs.push_back(db);
+    shared_ptr<db::Transaction> txn(db->NewTransaction());
+    txns.push_back(txn);
+  }
+
+  LOG(ERROR)<< "Extacting Features";
+
+  Datum datum;
+  const int kMaxKeyStrLength = 100;
+  char key_str[kMaxKeyStrLength];
+  std::vector<Blob<float>*> input_vec;
+  std::vector<int> image_indices(num_features, 0);
+  for (int batch_index = 0; batch_index < num_mini_batches; ++batch_index) {
+    feature_extraction_net->Forward(input_vec);
+    for (int i = 0; i < num_features; ++i) {
+      const shared_ptr<Blob<Dtype> > feature_blob = feature_extraction_net
+          ->blob_by_name(blob_names[i]);
+      int batch_size = feature_blob->num();
+      int dim_features = feature_blob->count() / batch_size;
+      const Dtype* feature_blob_data;
+      for (int n = 0; n < batch_size; ++n) {
+        datum.set_height(feature_blob->height());
+        datum.set_width(feature_blob->width());
+        datum.set_channels(feature_blob->channels());
+        datum.clear_data();
+        datum.clear_float_data();
+        feature_blob_data = feature_blob->cpu_data() +
+            feature_blob->offset(n);
+        for (int d = 0; d < dim_features; ++d) {
+          datum.add_float_data(feature_blob_data[d]);
+        }
+        int length = snprintf(key_str, kMaxKeyStrLength, "%010d",
+            image_indices[i]);
+        string out;
+        CHECK(datum.SerializeToString(&out));
+        txns.at(i)->Put(std::string(key_str, length), out);
+        ++image_indices[i];
+        if (image_indices[i] % 1000 == 0) {
+          txns.at(i)->Commit();
+          txns.at(i).reset(feature_dbs.at(i)->NewTransaction());
+          LOG(ERROR)<< "Extracted features of " << image_indices[i] <<
+              " query images for feature blob " << blob_names[i];
+        }
+      }  // for (int n = 0; n < batch_size; ++n)
+    }  // for (int i = 0; i < num_features; ++i)
+  }  // for (int batch_index = 0; batch_index < num_mini_batches; ++batch_index)
+  // write the last batch
+  for (int i = 0; i < num_features; ++i) {
+    if (image_indices[i] % 1000 != 0) {
+      txns.at(i)->Commit();
+    }
+    LOG(ERROR)<< "Extracted features of " << image_indices[i] <<
+        " query images for feature blob " << blob_names[i];
+    feature_dbs.at(i)->Close();
+  }
+
+  LOG(ERROR)<< "Successfully extracted the features!";
+  return 0;
+}
+
diff --git a/tools/finetune_net.cpp b/tools/finetune_net.cpp
new file mode 100644
index 0000000..81c0c35
--- /dev/null
+++ b/tools/finetune_net.cpp
@@ -0,0 +1,7 @@
+#include "caffe/caffe.hpp"
+
+int main(int argc, char** argv) {
+  LOG(FATAL) << "Deprecated. Use caffe train --solver=... "
+                "[--weights=...] instead.";
+  return 0;
+}
diff --git a/tools/net_speed_benchmark.cpp b/tools/net_speed_benchmark.cpp
new file mode 100644
index 0000000..cd16e8d
--- /dev/null
+++ b/tools/net_speed_benchmark.cpp
@@ -0,0 +1,7 @@
+#include "caffe/caffe.hpp"
+
+int main(int argc, char** argv) {
+  LOG(FATAL) << "Deprecated. Use caffe time --model=... "
+             "[--iterations=50] [--gpu] [--device_id=0]";
+  return 0;
+}
diff --git a/tools/test_net.cpp b/tools/test_net.cpp
new file mode 100644
index 0000000..92e14ee
--- /dev/null
+++ b/tools/test_net.cpp
@@ -0,0 +1,7 @@
+#include "caffe/caffe.hpp"
+
+int main(int argc, char** argv) {
+  LOG(FATAL) << "Deprecated. Use caffe test --model=... "
+      "--weights=... instead.";
+  return 0;
+}
diff --git a/tools/train_net.cpp b/tools/train_net.cpp
new file mode 100644
index 0000000..622bca3
--- /dev/null
+++ b/tools/train_net.cpp
@@ -0,0 +1,7 @@
+#include "caffe/caffe.hpp"
+
+int main(int argc, char** argv) {
+  LOG(FATAL) << "Deprecated. Use caffe train --solver=... "
+                "[--snapshot=...] instead.";
+  return 0;
+}
diff --git a/tools/upgrade_net_proto_binary.cpp b/tools/upgrade_net_proto_binary.cpp
new file mode 100644
index 0000000..8a0dd7a
--- /dev/null
+++ b/tools/upgrade_net_proto_binary.cpp
@@ -0,0 +1,49 @@
+// This is a script to upgrade "V0" network prototxts to the new format.
+// Usage:
+//    upgrade_net_proto_binary v0_net_proto_file_in net_proto_file_out
+
+#include <cstring>
+#include <fstream>  // NOLINT(readability/streams)
+#include <iostream>  // NOLINT(readability/streams)
+#include <string>
+
+#include "caffe/caffe.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/upgrade_proto.hpp"
+
+using std::ofstream;
+
+using namespace caffe;  // NOLINT(build/namespaces)
+
+int main(int argc, char** argv) {
+  ::google::InitGoogleLogging(argv[0]);
+  if (argc != 3) {
+    LOG(ERROR) << "Usage: "
+        << "upgrade_net_proto_binary v0_net_proto_file_in net_proto_file_out";
+    return 1;
+  }
+
+  NetParameter net_param;
+  string input_filename(argv[1]);
+  if (!ReadProtoFromBinaryFile(input_filename, &net_param)) {
+    LOG(ERROR) << "Failed to parse input binary file as NetParameter: "
+               << input_filename;
+    return 2;
+  }
+  bool need_upgrade = NetNeedsUpgrade(net_param);
+  bool success = true;
+  if (need_upgrade) {
+    success = UpgradeNetAsNeeded(input_filename, &net_param);
+    if (!success) {
+      LOG(ERROR) << "Encountered error(s) while upgrading prototxt; "
+                 << "see details above.";
+    }
+  } else {
+    LOG(ERROR) << "File already in V1 proto format: " << argv[1];
+  }
+
+  WriteProtoToBinaryFile(net_param, argv[2]);
+
+  LOG(ERROR) << "Wrote upgraded NetParameter binary proto to " << argv[2];
+  return !success;
+}
diff --git a/tools/upgrade_net_proto_text.cpp b/tools/upgrade_net_proto_text.cpp
new file mode 100644
index 0000000..9200431
--- /dev/null
+++ b/tools/upgrade_net_proto_text.cpp
@@ -0,0 +1,55 @@
+// This is a script to upgrade "V0" network prototxts to the new format.
+// Usage:
+//    upgrade_net_proto_text v0_net_proto_file_in net_proto_file_out
+
+#include <cstring>
+#include <fstream>  // NOLINT(readability/streams)
+#include <iostream>  // NOLINT(readability/streams)
+#include <string>
+
+#include "caffe/caffe.hpp"
+#include "caffe/util/io.hpp"
+#include "caffe/util/upgrade_proto.hpp"
+
+using std::ofstream;
+
+using namespace caffe;  // NOLINT(build/namespaces)
+
+int main(int argc, char** argv) {
+  ::google::InitGoogleLogging(argv[0]);
+  if (argc != 3) {
+    LOG(ERROR) << "Usage: "
+        << "upgrade_net_proto_text v0_net_proto_file_in net_proto_file_out";
+    return 1;
+  }
+
+  NetParameter net_param;
+  string input_filename(argv[1]);
+  if (!ReadProtoFromTextFile(input_filename, &net_param)) {
+    LOG(ERROR) << "Failed to parse input text file as NetParameter: "
+               << input_filename;
+    return 2;
+  }
+  bool need_upgrade = NetNeedsUpgrade(net_param);
+  bool need_data_upgrade = NetNeedsDataUpgrade(net_param);
+  bool success = true;
+  if (need_upgrade) {
+    success = UpgradeNetAsNeeded(input_filename, &net_param);
+    if (!success) {
+      LOG(ERROR) << "Encountered error(s) while upgrading prototxt; "
+                 << "see details above.";
+    }
+  } else {
+    LOG(ERROR) << "File already in latest proto format: " << input_filename;
+  }
+
+  if (need_data_upgrade) {
+    UpgradeNetDataTransformation(&net_param);
+  }
+
+  // Save new format prototxt.
+  WriteProtoToTextFile(net_param, argv[2]);
+
+  LOG(ERROR) << "Wrote upgraded NetParameter text proto to " << argv[2];
+  return !success;
+}

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/caffe-contrib.git



More information about the debian-science-commits mailing list